summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/tools/manifest
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
commit26a029d407be480d791972afb5975cf62c9360a6 (patch)
treef435a8308119effd964b339f76abb83a57c29483 /testing/web-platform/tests/tools/manifest
parentInitial commit. (diff)
downloadfirefox-26a029d407be480d791972afb5975cf62c9360a6.tar.xz
firefox-26a029d407be480d791972afb5975cf62c9360a6.zip
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'testing/web-platform/tests/tools/manifest')
-rw-r--r--testing/web-platform/tests/tools/manifest/XMLParser.py131
-rw-r--r--testing/web-platform/tests/tools/manifest/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/manifest/catalog/xhtml.dtd2125
-rw-r--r--testing/web-platform/tests/tools/manifest/commands.json30
-rw-r--r--testing/web-platform/tests/tools/manifest/download.py191
-rw-r--r--testing/web-platform/tests/tools/manifest/item.py376
-rw-r--r--testing/web-platform/tests/tools/manifest/jsonlib.py125
-rw-r--r--testing/web-platform/tests/tools/manifest/log.py9
-rw-r--r--testing/web-platform/tests/tools/manifest/manifest.py428
-rw-r--r--testing/web-platform/tests/tools/manifest/mputil.py14
-rw-r--r--testing/web-platform/tests/tools/manifest/requirements.txt1
-rw-r--r--testing/web-platform/tests/tools/manifest/sourcefile.py1083
-rw-r--r--testing/web-platform/tests/tools/manifest/spec.py84
-rw-r--r--testing/web-platform/tests/tools/manifest/testpaths.py98
-rw-r--r--testing/web-platform/tests/tools/manifest/tests/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/manifest/tests/test_XMLParser.py56
-rw-r--r--testing/web-platform/tests/tools/manifest/tests/test_item.py160
-rw-r--r--testing/web-platform/tests/tools/manifest/tests/test_manifest.py337
-rw-r--r--testing/web-platform/tests/tools/manifest/tests/test_sourcefile.py962
-rw-r--r--testing/web-platform/tests/tools/manifest/tests/test_utils.py15
-rw-r--r--testing/web-platform/tests/tools/manifest/typedata.py300
-rwxr-xr-xtesting/web-platform/tests/tools/manifest/update.py96
-rw-r--r--testing/web-platform/tests/tools/manifest/utils.py72
-rw-r--r--testing/web-platform/tests/tools/manifest/vcs.py305
24 files changed, 6999 insertions, 0 deletions
diff --git a/testing/web-platform/tests/tools/manifest/XMLParser.py b/testing/web-platform/tests/tools/manifest/XMLParser.py
new file mode 100644
index 0000000000..8dcdb45007
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/XMLParser.py
@@ -0,0 +1,131 @@
+from collections import OrderedDict
+from typing import Dict, List, Optional, Text, Union
+from os.path import dirname, join
+from xml.parsers import expat
+import xml.etree.ElementTree as etree # noqa: N813
+
+
+_catalog = join(dirname(__file__), "catalog")
+
+def _wrap_error(e: expat.error) -> etree.ParseError:
+ err = etree.ParseError(e)
+ err.code = e.code
+ err.position = e.lineno, e.offset
+ raise err
+
+_names: Dict[Text, Text] = {}
+def _fixname(key: Text) -> Text:
+ try:
+ name = _names[key]
+ except KeyError:
+ name = key
+ if "}" in name:
+ name = "{" + name
+ _names[key] = name
+ return name
+
+
+_undefined_entity_code: int = expat.errors.codes[expat.errors.XML_ERROR_UNDEFINED_ENTITY]
+
+
+class XMLParser:
+ """
+ An XML parser with support for XHTML DTDs and all Python-supported encodings
+
+ This implements the API defined by
+ xml.etree.ElementTree.XMLParser, but supports XHTML DTDs
+ (therefore allowing XHTML entities) and supports all encodings
+ Python does, rather than just those supported by expat.
+ """
+ def __init__(self, encoding: Optional[Text] = None) -> None:
+ self._parser = expat.ParserCreate(encoding, "}")
+ self._target = etree.TreeBuilder()
+ # parser settings
+ self._parser.buffer_text = True
+ self._parser.ordered_attributes = True
+ self._parser.SetParamEntityParsing(expat.XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE)
+ # parser callbacks
+ self._parser.XmlDeclHandler = self._xml_decl
+ self._parser.StartElementHandler = self._start
+ self._parser.EndElementHandler = self._end
+ self._parser.CharacterDataHandler = self._data
+ self._parser.ExternalEntityRefHandler = self._external
+ self._parser.SkippedEntityHandler = self._skipped
+ # used for our horrible re-encoding hack
+ self._fed_data: Optional[List[bytes]] = []
+ self._read_encoding: Optional[Text] = None
+
+ def _xml_decl(self, version: Text, encoding: Optional[Text], standalone: int) -> None:
+ self._read_encoding = encoding
+
+ def _start(self, tag: Text, attrib_in: List[str]) -> etree.Element:
+ assert isinstance(tag, str)
+ self._fed_data = None
+ tag = _fixname(tag)
+ attrib: Dict[Union[bytes, Text], Union[bytes, Text]] = OrderedDict()
+ if attrib_in:
+ for i in range(0, len(attrib_in), 2):
+ attrib[_fixname(attrib_in[i])] = attrib_in[i+1]
+ return self._target.start(tag, attrib)
+
+ def _data(self, text: Text) -> None:
+ self._target.data(text)
+
+ def _end(self, tag: Text) -> etree.Element:
+ return self._target.end(_fixname(tag))
+
+ def _external(self, context: Text, base: Optional[Text], system_id: Optional[Text], public_id: Optional[Text]) -> bool:
+ if public_id in {
+ "-//W3C//DTD XHTML 1.0 Transitional//EN",
+ "-//W3C//DTD XHTML 1.1//EN",
+ "-//W3C//DTD XHTML 1.0 Strict//EN",
+ "-//W3C//DTD XHTML 1.0 Frameset//EN",
+ "-//W3C//DTD XHTML Basic 1.0//EN",
+ "-//W3C//DTD XHTML 1.1 plus MathML 2.0//EN",
+ "-//W3C//DTD XHTML 1.1 plus MathML 2.0 plus SVG 1.1//EN",
+ "-//W3C//DTD MathML 2.0//EN",
+ "-//WAPFORUM//DTD XHTML Mobile 1.0//EN"
+ }:
+ parser = self._parser.ExternalEntityParserCreate(context)
+ with open(join(_catalog, "xhtml.dtd"), "rb") as fp:
+ try:
+ parser.ParseFile(fp)
+ except expat.error:
+ return False
+
+ return True
+
+ def _skipped(self, name: Text, is_parameter_entity: bool) -> None:
+ err = expat.error("undefined entity %s: line %d, column %d" %
+ (name, self._parser.ErrorLineNumber,
+ self._parser.ErrorColumnNumber))
+ err.code = _undefined_entity_code
+ err.lineno = self._parser.ErrorLineNumber
+ err.offset = self._parser.ErrorColumnNumber
+ raise err
+
+ def feed(self, data: bytes) -> None:
+ if self._fed_data is not None:
+ self._fed_data.append(data)
+ try:
+ self._parser.Parse(data, False)
+ except expat.error as v:
+ _wrap_error(v)
+ except ValueError as e:
+ if e.args[0] == 'multi-byte encodings are not supported':
+ assert self._read_encoding is not None
+ assert self._fed_data is not None
+ xml = b"".join(self._fed_data).decode(self._read_encoding).encode("utf-8")
+ new_parser = XMLParser("utf-8")
+ self._parser = new_parser._parser
+ self._target = new_parser._target
+ self._fed_data = None
+ self.feed(xml)
+
+ def close(self) -> etree.Element:
+ try:
+ self._parser.Parse("", True)
+ except expat.error as v:
+ _wrap_error(v)
+ tree = self._target.close()
+ return tree
diff --git a/testing/web-platform/tests/tools/manifest/__init__.py b/testing/web-platform/tests/tools/manifest/__init__.py
new file mode 100644
index 0000000000..8c8f189070
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/__init__.py
@@ -0,0 +1 @@
+from . import item, manifest, sourcefile, update # noqa: F401
diff --git a/testing/web-platform/tests/tools/manifest/catalog/xhtml.dtd b/testing/web-platform/tests/tools/manifest/catalog/xhtml.dtd
new file mode 100644
index 0000000000..4307b1c2c4
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/catalog/xhtml.dtd
@@ -0,0 +1,2125 @@
+<!ENTITY Tab "&#x9;">
+<!ENTITY NewLine "&#xA;">
+<!ENTITY excl "&#x21;">
+<!ENTITY quot "&#x22;">
+<!ENTITY QUOT "&#x22;">
+<!ENTITY num "&#x23;">
+<!ENTITY dollar "&#x24;">
+<!ENTITY percnt "&#x25;">
+<!ENTITY amp "&#x26;#x26;">
+<!ENTITY AMP "&#x26;#x26;">
+<!ENTITY apos "&#x27;">
+<!ENTITY lpar "&#x28;">
+<!ENTITY rpar "&#x29;">
+<!ENTITY ast "&#x2A;">
+<!ENTITY midast "&#x2A;">
+<!ENTITY plus "&#x2B;">
+<!ENTITY comma "&#x2C;">
+<!ENTITY period "&#x2E;">
+<!ENTITY sol "&#x2F;">
+<!ENTITY colon "&#x3A;">
+<!ENTITY semi "&#x3B;">
+<!ENTITY lt "&#x26;#x3C;">
+<!ENTITY LT "&#x26;#x3C;">
+<!ENTITY nvlt "&#x26;#x3C;&#x20D2;">
+<!ENTITY equals "&#x3D;">
+<!ENTITY bne "&#x3D;&#x20E5;">
+<!ENTITY gt "&#x3E;">
+<!ENTITY GT "&#x3E;">
+<!ENTITY nvgt "&#x3E;&#x20D2;">
+<!ENTITY quest "&#x3F;">
+<!ENTITY commat "&#x40;">
+<!ENTITY lsqb "&#x5B;">
+<!ENTITY lbrack "&#x5B;">
+<!ENTITY bsol "&#x5C;">
+<!ENTITY rsqb "&#x5D;">
+<!ENTITY rbrack "&#x5D;">
+<!ENTITY Hat "&#x5E;">
+<!ENTITY lowbar "&#x5F;">
+<!ENTITY UnderBar "&#x5F;">
+<!ENTITY grave "&#x60;">
+<!ENTITY DiacriticalGrave "&#x60;">
+<!ENTITY fjlig "&#x66;&#x6A;">
+<!ENTITY lcub "&#x7B;">
+<!ENTITY lbrace "&#x7B;">
+<!ENTITY verbar "&#x7C;">
+<!ENTITY vert "&#x7C;">
+<!ENTITY VerticalLine "&#x7C;">
+<!ENTITY rcub "&#x7D;">
+<!ENTITY rbrace "&#x7D;">
+<!ENTITY nbsp "&#xA0;">
+<!ENTITY NonBreakingSpace "&#xA0;">
+<!ENTITY iexcl "&#xA1;">
+<!ENTITY cent "&#xA2;">
+<!ENTITY pound "&#xA3;">
+<!ENTITY curren "&#xA4;">
+<!ENTITY yen "&#xA5;">
+<!ENTITY brvbar "&#xA6;">
+<!ENTITY sect "&#xA7;">
+<!ENTITY Dot "&#xA8;">
+<!ENTITY die "&#xA8;">
+<!ENTITY DoubleDot "&#xA8;">
+<!ENTITY uml "&#xA8;">
+<!ENTITY copy "&#xA9;">
+<!ENTITY COPY "&#xA9;">
+<!ENTITY ordf "&#xAA;">
+<!ENTITY laquo "&#xAB;">
+<!ENTITY not "&#xAC;">
+<!ENTITY shy "&#xAD;">
+<!ENTITY reg "&#xAE;">
+<!ENTITY circledR "&#xAE;">
+<!ENTITY REG "&#xAE;">
+<!ENTITY macr "&#xAF;">
+<!ENTITY strns "&#xAF;">
+<!ENTITY deg "&#xB0;">
+<!ENTITY plusmn "&#xB1;">
+<!ENTITY pm "&#xB1;">
+<!ENTITY PlusMinus "&#xB1;">
+<!ENTITY sup2 "&#xB2;">
+<!ENTITY sup3 "&#xB3;">
+<!ENTITY acute "&#xB4;">
+<!ENTITY DiacriticalAcute "&#xB4;">
+<!ENTITY micro "&#xB5;">
+<!ENTITY para "&#xB6;">
+<!ENTITY middot "&#xB7;">
+<!ENTITY centerdot "&#xB7;">
+<!ENTITY CenterDot "&#xB7;">
+<!ENTITY cedil "&#xB8;">
+<!ENTITY Cedilla "&#xB8;">
+<!ENTITY sup1 "&#xB9;">
+<!ENTITY ordm "&#xBA;">
+<!ENTITY raquo "&#xBB;">
+<!ENTITY frac14 "&#xBC;">
+<!ENTITY frac12 "&#xBD;">
+<!ENTITY half "&#xBD;">
+<!ENTITY frac34 "&#xBE;">
+<!ENTITY iquest "&#xBF;">
+<!ENTITY Agrave "&#xC0;">
+<!ENTITY Aacute "&#xC1;">
+<!ENTITY Acirc "&#xC2;">
+<!ENTITY Atilde "&#xC3;">
+<!ENTITY Auml "&#xC4;">
+<!ENTITY Aring "&#xC5;">
+<!ENTITY angst "&#xC5;">
+<!ENTITY AElig "&#xC6;">
+<!ENTITY Ccedil "&#xC7;">
+<!ENTITY Egrave "&#xC8;">
+<!ENTITY Eacute "&#xC9;">
+<!ENTITY Ecirc "&#xCA;">
+<!ENTITY Euml "&#xCB;">
+<!ENTITY Igrave "&#xCC;">
+<!ENTITY Iacute "&#xCD;">
+<!ENTITY Icirc "&#xCE;">
+<!ENTITY Iuml "&#xCF;">
+<!ENTITY ETH "&#xD0;">
+<!ENTITY Ntilde "&#xD1;">
+<!ENTITY Ograve "&#xD2;">
+<!ENTITY Oacute "&#xD3;">
+<!ENTITY Ocirc "&#xD4;">
+<!ENTITY Otilde "&#xD5;">
+<!ENTITY Ouml "&#xD6;">
+<!ENTITY times "&#xD7;">
+<!ENTITY Oslash "&#xD8;">
+<!ENTITY Ugrave "&#xD9;">
+<!ENTITY Uacute "&#xDA;">
+<!ENTITY Ucirc "&#xDB;">
+<!ENTITY Uuml "&#xDC;">
+<!ENTITY Yacute "&#xDD;">
+<!ENTITY THORN "&#xDE;">
+<!ENTITY szlig "&#xDF;">
+<!ENTITY agrave "&#xE0;">
+<!ENTITY aacute "&#xE1;">
+<!ENTITY acirc "&#xE2;">
+<!ENTITY atilde "&#xE3;">
+<!ENTITY auml "&#xE4;">
+<!ENTITY aring "&#xE5;">
+<!ENTITY aelig "&#xE6;">
+<!ENTITY ccedil "&#xE7;">
+<!ENTITY egrave "&#xE8;">
+<!ENTITY eacute "&#xE9;">
+<!ENTITY ecirc "&#xEA;">
+<!ENTITY euml "&#xEB;">
+<!ENTITY igrave "&#xEC;">
+<!ENTITY iacute "&#xED;">
+<!ENTITY icirc "&#xEE;">
+<!ENTITY iuml "&#xEF;">
+<!ENTITY eth "&#xF0;">
+<!ENTITY ntilde "&#xF1;">
+<!ENTITY ograve "&#xF2;">
+<!ENTITY oacute "&#xF3;">
+<!ENTITY ocirc "&#xF4;">
+<!ENTITY otilde "&#xF5;">
+<!ENTITY ouml "&#xF6;">
+<!ENTITY divide "&#xF7;">
+<!ENTITY div "&#xF7;">
+<!ENTITY oslash "&#xF8;">
+<!ENTITY ugrave "&#xF9;">
+<!ENTITY uacute "&#xFA;">
+<!ENTITY ucirc "&#xFB;">
+<!ENTITY uuml "&#xFC;">
+<!ENTITY yacute "&#xFD;">
+<!ENTITY thorn "&#xFE;">
+<!ENTITY yuml "&#xFF;">
+<!ENTITY Amacr "&#x100;">
+<!ENTITY amacr "&#x101;">
+<!ENTITY Abreve "&#x102;">
+<!ENTITY abreve "&#x103;">
+<!ENTITY Aogon "&#x104;">
+<!ENTITY aogon "&#x105;">
+<!ENTITY Cacute "&#x106;">
+<!ENTITY cacute "&#x107;">
+<!ENTITY Ccirc "&#x108;">
+<!ENTITY ccirc "&#x109;">
+<!ENTITY Cdot "&#x10A;">
+<!ENTITY cdot "&#x10B;">
+<!ENTITY Ccaron "&#x10C;">
+<!ENTITY ccaron "&#x10D;">
+<!ENTITY Dcaron "&#x10E;">
+<!ENTITY dcaron "&#x10F;">
+<!ENTITY Dstrok "&#x110;">
+<!ENTITY dstrok "&#x111;">
+<!ENTITY Emacr "&#x112;">
+<!ENTITY emacr "&#x113;">
+<!ENTITY Edot "&#x116;">
+<!ENTITY edot "&#x117;">
+<!ENTITY Eogon "&#x118;">
+<!ENTITY eogon "&#x119;">
+<!ENTITY Ecaron "&#x11A;">
+<!ENTITY ecaron "&#x11B;">
+<!ENTITY Gcirc "&#x11C;">
+<!ENTITY gcirc "&#x11D;">
+<!ENTITY Gbreve "&#x11E;">
+<!ENTITY gbreve "&#x11F;">
+<!ENTITY Gdot "&#x120;">
+<!ENTITY gdot "&#x121;">
+<!ENTITY Gcedil "&#x122;">
+<!ENTITY Hcirc "&#x124;">
+<!ENTITY hcirc "&#x125;">
+<!ENTITY Hstrok "&#x126;">
+<!ENTITY hstrok "&#x127;">
+<!ENTITY Itilde "&#x128;">
+<!ENTITY itilde "&#x129;">
+<!ENTITY Imacr "&#x12A;">
+<!ENTITY imacr "&#x12B;">
+<!ENTITY Iogon "&#x12E;">
+<!ENTITY iogon "&#x12F;">
+<!ENTITY Idot "&#x130;">
+<!ENTITY imath "&#x131;">
+<!ENTITY inodot "&#x131;">
+<!ENTITY IJlig "&#x132;">
+<!ENTITY ijlig "&#x133;">
+<!ENTITY Jcirc "&#x134;">
+<!ENTITY jcirc "&#x135;">
+<!ENTITY Kcedil "&#x136;">
+<!ENTITY kcedil "&#x137;">
+<!ENTITY kgreen "&#x138;">
+<!ENTITY Lacute "&#x139;">
+<!ENTITY lacute "&#x13A;">
+<!ENTITY Lcedil "&#x13B;">
+<!ENTITY lcedil "&#x13C;">
+<!ENTITY Lcaron "&#x13D;">
+<!ENTITY lcaron "&#x13E;">
+<!ENTITY Lmidot "&#x13F;">
+<!ENTITY lmidot "&#x140;">
+<!ENTITY Lstrok "&#x141;">
+<!ENTITY lstrok "&#x142;">
+<!ENTITY Nacute "&#x143;">
+<!ENTITY nacute "&#x144;">
+<!ENTITY Ncedil "&#x145;">
+<!ENTITY ncedil "&#x146;">
+<!ENTITY Ncaron "&#x147;">
+<!ENTITY ncaron "&#x148;">
+<!ENTITY napos "&#x149;">
+<!ENTITY ENG "&#x14A;">
+<!ENTITY eng "&#x14B;">
+<!ENTITY Omacr "&#x14C;">
+<!ENTITY omacr "&#x14D;">
+<!ENTITY Odblac "&#x150;">
+<!ENTITY odblac "&#x151;">
+<!ENTITY OElig "&#x152;">
+<!ENTITY oelig "&#x153;">
+<!ENTITY Racute "&#x154;">
+<!ENTITY racute "&#x155;">
+<!ENTITY Rcedil "&#x156;">
+<!ENTITY rcedil "&#x157;">
+<!ENTITY Rcaron "&#x158;">
+<!ENTITY rcaron "&#x159;">
+<!ENTITY Sacute "&#x15A;">
+<!ENTITY sacute "&#x15B;">
+<!ENTITY Scirc "&#x15C;">
+<!ENTITY scirc "&#x15D;">
+<!ENTITY Scedil "&#x15E;">
+<!ENTITY scedil "&#x15F;">
+<!ENTITY Scaron "&#x160;">
+<!ENTITY scaron "&#x161;">
+<!ENTITY Tcedil "&#x162;">
+<!ENTITY tcedil "&#x163;">
+<!ENTITY Tcaron "&#x164;">
+<!ENTITY tcaron "&#x165;">
+<!ENTITY Tstrok "&#x166;">
+<!ENTITY tstrok "&#x167;">
+<!ENTITY Utilde "&#x168;">
+<!ENTITY utilde "&#x169;">
+<!ENTITY Umacr "&#x16A;">
+<!ENTITY umacr "&#x16B;">
+<!ENTITY Ubreve "&#x16C;">
+<!ENTITY ubreve "&#x16D;">
+<!ENTITY Uring "&#x16E;">
+<!ENTITY uring "&#x16F;">
+<!ENTITY Udblac "&#x170;">
+<!ENTITY udblac "&#x171;">
+<!ENTITY Uogon "&#x172;">
+<!ENTITY uogon "&#x173;">
+<!ENTITY Wcirc "&#x174;">
+<!ENTITY wcirc "&#x175;">
+<!ENTITY Ycirc "&#x176;">
+<!ENTITY ycirc "&#x177;">
+<!ENTITY Yuml "&#x178;">
+<!ENTITY Zacute "&#x179;">
+<!ENTITY zacute "&#x17A;">
+<!ENTITY Zdot "&#x17B;">
+<!ENTITY zdot "&#x17C;">
+<!ENTITY Zcaron "&#x17D;">
+<!ENTITY zcaron "&#x17E;">
+<!ENTITY fnof "&#x192;">
+<!ENTITY imped "&#x1B5;">
+<!ENTITY gacute "&#x1F5;">
+<!ENTITY jmath "&#x237;">
+<!ENTITY circ "&#x2C6;">
+<!ENTITY caron "&#x2C7;">
+<!ENTITY Hacek "&#x2C7;">
+<!ENTITY breve "&#x2D8;">
+<!ENTITY Breve "&#x2D8;">
+<!ENTITY dot "&#x2D9;">
+<!ENTITY DiacriticalDot "&#x2D9;">
+<!ENTITY ring "&#x2DA;">
+<!ENTITY ogon "&#x2DB;">
+<!ENTITY tilde "&#x2DC;">
+<!ENTITY DiacriticalTilde "&#x2DC;">
+<!ENTITY dblac "&#x2DD;">
+<!ENTITY DiacriticalDoubleAcute "&#x2DD;">
+<!ENTITY DownBreve "&#x311;">
+<!ENTITY Alpha "&#x391;">
+<!ENTITY Beta "&#x392;">
+<!ENTITY Gamma "&#x393;">
+<!ENTITY Delta "&#x394;">
+<!ENTITY Epsilon "&#x395;">
+<!ENTITY Zeta "&#x396;">
+<!ENTITY Eta "&#x397;">
+<!ENTITY Theta "&#x398;">
+<!ENTITY Iota "&#x399;">
+<!ENTITY Kappa "&#x39A;">
+<!ENTITY Lambda "&#x39B;">
+<!ENTITY Mu "&#x39C;">
+<!ENTITY Nu "&#x39D;">
+<!ENTITY Xi "&#x39E;">
+<!ENTITY Omicron "&#x39F;">
+<!ENTITY Pi "&#x3A0;">
+<!ENTITY Rho "&#x3A1;">
+<!ENTITY Sigma "&#x3A3;">
+<!ENTITY Tau "&#x3A4;">
+<!ENTITY Upsilon "&#x3A5;">
+<!ENTITY Phi "&#x3A6;">
+<!ENTITY Chi "&#x3A7;">
+<!ENTITY Psi "&#x3A8;">
+<!ENTITY Omega "&#x3A9;">
+<!ENTITY ohm "&#x3A9;">
+<!ENTITY alpha "&#x3B1;">
+<!ENTITY beta "&#x3B2;">
+<!ENTITY gamma "&#x3B3;">
+<!ENTITY delta "&#x3B4;">
+<!ENTITY epsi "&#x3B5;">
+<!ENTITY epsilon "&#x3B5;">
+<!ENTITY zeta "&#x3B6;">
+<!ENTITY eta "&#x3B7;">
+<!ENTITY theta "&#x3B8;">
+<!ENTITY iota "&#x3B9;">
+<!ENTITY kappa "&#x3BA;">
+<!ENTITY lambda "&#x3BB;">
+<!ENTITY mu "&#x3BC;">
+<!ENTITY nu "&#x3BD;">
+<!ENTITY xi "&#x3BE;">
+<!ENTITY omicron "&#x3BF;">
+<!ENTITY pi "&#x3C0;">
+<!ENTITY rho "&#x3C1;">
+<!ENTITY sigmav "&#x3C2;">
+<!ENTITY varsigma "&#x3C2;">
+<!ENTITY sigmaf "&#x3C2;">
+<!ENTITY sigma "&#x3C3;">
+<!ENTITY tau "&#x3C4;">
+<!ENTITY upsi "&#x3C5;">
+<!ENTITY upsilon "&#x3C5;">
+<!ENTITY phi "&#x3C6;">
+<!ENTITY chi "&#x3C7;">
+<!ENTITY psi "&#x3C8;">
+<!ENTITY omega "&#x3C9;">
+<!ENTITY thetav "&#x3D1;">
+<!ENTITY vartheta "&#x3D1;">
+<!ENTITY thetasym "&#x3D1;">
+<!ENTITY Upsi "&#x3D2;">
+<!ENTITY upsih "&#x3D2;">
+<!ENTITY straightphi "&#x3D5;">
+<!ENTITY phiv "&#x3D5;">
+<!ENTITY varphi "&#x3D5;">
+<!ENTITY piv "&#x3D6;">
+<!ENTITY varpi "&#x3D6;">
+<!ENTITY Gammad "&#x3DC;">
+<!ENTITY gammad "&#x3DD;">
+<!ENTITY digamma "&#x3DD;">
+<!ENTITY kappav "&#x3F0;">
+<!ENTITY varkappa "&#x3F0;">
+<!ENTITY rhov "&#x3F1;">
+<!ENTITY varrho "&#x3F1;">
+<!ENTITY epsiv "&#x3F5;">
+<!ENTITY straightepsilon "&#x3F5;">
+<!ENTITY varepsilon "&#x3F5;">
+<!ENTITY bepsi "&#x3F6;">
+<!ENTITY backepsilon "&#x3F6;">
+<!ENTITY IOcy "&#x401;">
+<!ENTITY DJcy "&#x402;">
+<!ENTITY GJcy "&#x403;">
+<!ENTITY Jukcy "&#x404;">
+<!ENTITY DScy "&#x405;">
+<!ENTITY Iukcy "&#x406;">
+<!ENTITY YIcy "&#x407;">
+<!ENTITY Jsercy "&#x408;">
+<!ENTITY LJcy "&#x409;">
+<!ENTITY NJcy "&#x40A;">
+<!ENTITY TSHcy "&#x40B;">
+<!ENTITY KJcy "&#x40C;">
+<!ENTITY Ubrcy "&#x40E;">
+<!ENTITY DZcy "&#x40F;">
+<!ENTITY Acy "&#x410;">
+<!ENTITY Bcy "&#x411;">
+<!ENTITY Vcy "&#x412;">
+<!ENTITY Gcy "&#x413;">
+<!ENTITY Dcy "&#x414;">
+<!ENTITY IEcy "&#x415;">
+<!ENTITY ZHcy "&#x416;">
+<!ENTITY Zcy "&#x417;">
+<!ENTITY Icy "&#x418;">
+<!ENTITY Jcy "&#x419;">
+<!ENTITY Kcy "&#x41A;">
+<!ENTITY Lcy "&#x41B;">
+<!ENTITY Mcy "&#x41C;">
+<!ENTITY Ncy "&#x41D;">
+<!ENTITY Ocy "&#x41E;">
+<!ENTITY Pcy "&#x41F;">
+<!ENTITY Rcy "&#x420;">
+<!ENTITY Scy "&#x421;">
+<!ENTITY Tcy "&#x422;">
+<!ENTITY Ucy "&#x423;">
+<!ENTITY Fcy "&#x424;">
+<!ENTITY KHcy "&#x425;">
+<!ENTITY TScy "&#x426;">
+<!ENTITY CHcy "&#x427;">
+<!ENTITY SHcy "&#x428;">
+<!ENTITY SHCHcy "&#x429;">
+<!ENTITY HARDcy "&#x42A;">
+<!ENTITY Ycy "&#x42B;">
+<!ENTITY SOFTcy "&#x42C;">
+<!ENTITY Ecy "&#x42D;">
+<!ENTITY YUcy "&#x42E;">
+<!ENTITY YAcy "&#x42F;">
+<!ENTITY acy "&#x430;">
+<!ENTITY bcy "&#x431;">
+<!ENTITY vcy "&#x432;">
+<!ENTITY gcy "&#x433;">
+<!ENTITY dcy "&#x434;">
+<!ENTITY iecy "&#x435;">
+<!ENTITY zhcy "&#x436;">
+<!ENTITY zcy "&#x437;">
+<!ENTITY icy "&#x438;">
+<!ENTITY jcy "&#x439;">
+<!ENTITY kcy "&#x43A;">
+<!ENTITY lcy "&#x43B;">
+<!ENTITY mcy "&#x43C;">
+<!ENTITY ncy "&#x43D;">
+<!ENTITY ocy "&#x43E;">
+<!ENTITY pcy "&#x43F;">
+<!ENTITY rcy "&#x440;">
+<!ENTITY scy "&#x441;">
+<!ENTITY tcy "&#x442;">
+<!ENTITY ucy "&#x443;">
+<!ENTITY fcy "&#x444;">
+<!ENTITY khcy "&#x445;">
+<!ENTITY tscy "&#x446;">
+<!ENTITY chcy "&#x447;">
+<!ENTITY shcy "&#x448;">
+<!ENTITY shchcy "&#x449;">
+<!ENTITY hardcy "&#x44A;">
+<!ENTITY ycy "&#x44B;">
+<!ENTITY softcy "&#x44C;">
+<!ENTITY ecy "&#x44D;">
+<!ENTITY yucy "&#x44E;">
+<!ENTITY yacy "&#x44F;">
+<!ENTITY iocy "&#x451;">
+<!ENTITY djcy "&#x452;">
+<!ENTITY gjcy "&#x453;">
+<!ENTITY jukcy "&#x454;">
+<!ENTITY dscy "&#x455;">
+<!ENTITY iukcy "&#x456;">
+<!ENTITY yicy "&#x457;">
+<!ENTITY jsercy "&#x458;">
+<!ENTITY ljcy "&#x459;">
+<!ENTITY njcy "&#x45A;">
+<!ENTITY tshcy "&#x45B;">
+<!ENTITY kjcy "&#x45C;">
+<!ENTITY ubrcy "&#x45E;">
+<!ENTITY dzcy "&#x45F;">
+<!ENTITY ensp "&#x2002;">
+<!ENTITY emsp "&#x2003;">
+<!ENTITY emsp13 "&#x2004;">
+<!ENTITY emsp14 "&#x2005;">
+<!ENTITY numsp "&#x2007;">
+<!ENTITY puncsp "&#x2008;">
+<!ENTITY thinsp "&#x2009;">
+<!ENTITY ThinSpace "&#x2009;">
+<!ENTITY hairsp "&#x200A;">
+<!ENTITY VeryThinSpace "&#x200A;">
+<!ENTITY ZeroWidthSpace "&#x200B;">
+<!ENTITY NegativeVeryThinSpace "&#x200B;">
+<!ENTITY NegativeThinSpace "&#x200B;">
+<!ENTITY NegativeMediumSpace "&#x200B;">
+<!ENTITY NegativeThickSpace "&#x200B;">
+<!ENTITY zwnj "&#x200C;">
+<!ENTITY zwj "&#x200D;">
+<!ENTITY lrm "&#x200E;">
+<!ENTITY rlm "&#x200F;">
+<!ENTITY hyphen "&#x2010;">
+<!ENTITY dash "&#x2010;">
+<!ENTITY ndash "&#x2013;">
+<!ENTITY mdash "&#x2014;">
+<!ENTITY horbar "&#x2015;">
+<!ENTITY Verbar "&#x2016;">
+<!ENTITY Vert "&#x2016;">
+<!ENTITY lsquo "&#x2018;">
+<!ENTITY OpenCurlyQuote "&#x2018;">
+<!ENTITY rsquo "&#x2019;">
+<!ENTITY rsquor "&#x2019;">
+<!ENTITY CloseCurlyQuote "&#x2019;">
+<!ENTITY lsquor "&#x201A;">
+<!ENTITY sbquo "&#x201A;">
+<!ENTITY ldquo "&#x201C;">
+<!ENTITY OpenCurlyDoubleQuote "&#x201C;">
+<!ENTITY rdquo "&#x201D;">
+<!ENTITY rdquor "&#x201D;">
+<!ENTITY CloseCurlyDoubleQuote "&#x201D;">
+<!ENTITY ldquor "&#x201E;">
+<!ENTITY bdquo "&#x201E;">
+<!ENTITY dagger "&#x2020;">
+<!ENTITY Dagger "&#x2021;">
+<!ENTITY ddagger "&#x2021;">
+<!ENTITY bull "&#x2022;">
+<!ENTITY bullet "&#x2022;">
+<!ENTITY nldr "&#x2025;">
+<!ENTITY hellip "&#x2026;">
+<!ENTITY mldr "&#x2026;">
+<!ENTITY permil "&#x2030;">
+<!ENTITY pertenk "&#x2031;">
+<!ENTITY prime "&#x2032;">
+<!ENTITY Prime "&#x2033;">
+<!ENTITY tprime "&#x2034;">
+<!ENTITY bprime "&#x2035;">
+<!ENTITY backprime "&#x2035;">
+<!ENTITY lsaquo "&#x2039;">
+<!ENTITY rsaquo "&#x203A;">
+<!ENTITY oline "&#x203E;">
+<!ENTITY OverBar "&#x203E;">
+<!ENTITY caret "&#x2041;">
+<!ENTITY hybull "&#x2043;">
+<!ENTITY frasl "&#x2044;">
+<!ENTITY bsemi "&#x204F;">
+<!ENTITY qprime "&#x2057;">
+<!ENTITY MediumSpace "&#x205F;">
+<!ENTITY ThickSpace "&#x205F;&#x200A;">
+<!ENTITY NoBreak "&#x2060;">
+<!ENTITY ApplyFunction "&#x2061;">
+<!ENTITY af "&#x2061;">
+<!ENTITY InvisibleTimes "&#x2062;">
+<!ENTITY it "&#x2062;">
+<!ENTITY InvisibleComma "&#x2063;">
+<!ENTITY ic "&#x2063;">
+<!ENTITY euro "&#x20AC;">
+<!ENTITY tdot "&#x20DB;">
+<!ENTITY TripleDot "&#x20DB;">
+<!ENTITY DotDot "&#x20DC;">
+<!ENTITY Copf "&#x2102;">
+<!ENTITY complexes "&#x2102;">
+<!ENTITY incare "&#x2105;">
+<!ENTITY gscr "&#x210A;">
+<!ENTITY hamilt "&#x210B;">
+<!ENTITY HilbertSpace "&#x210B;">
+<!ENTITY Hscr "&#x210B;">
+<!ENTITY Hfr "&#x210C;">
+<!ENTITY Poincareplane "&#x210C;">
+<!ENTITY quaternions "&#x210D;">
+<!ENTITY Hopf "&#x210D;">
+<!ENTITY planckh "&#x210E;">
+<!ENTITY planck "&#x210F;">
+<!ENTITY hbar "&#x210F;">
+<!ENTITY plankv "&#x210F;">
+<!ENTITY hslash "&#x210F;">
+<!ENTITY Iscr "&#x2110;">
+<!ENTITY imagline "&#x2110;">
+<!ENTITY image "&#x2111;">
+<!ENTITY Im "&#x2111;">
+<!ENTITY imagpart "&#x2111;">
+<!ENTITY Ifr "&#x2111;">
+<!ENTITY Lscr "&#x2112;">
+<!ENTITY lagran "&#x2112;">
+<!ENTITY Laplacetrf "&#x2112;">
+<!ENTITY ell "&#x2113;">
+<!ENTITY Nopf "&#x2115;">
+<!ENTITY naturals "&#x2115;">
+<!ENTITY numero "&#x2116;">
+<!ENTITY copysr "&#x2117;">
+<!ENTITY weierp "&#x2118;">
+<!ENTITY wp "&#x2118;">
+<!ENTITY Popf "&#x2119;">
+<!ENTITY primes "&#x2119;">
+<!ENTITY rationals "&#x211A;">
+<!ENTITY Qopf "&#x211A;">
+<!ENTITY Rscr "&#x211B;">
+<!ENTITY realine "&#x211B;">
+<!ENTITY real "&#x211C;">
+<!ENTITY Re "&#x211C;">
+<!ENTITY realpart "&#x211C;">
+<!ENTITY Rfr "&#x211C;">
+<!ENTITY reals "&#x211D;">
+<!ENTITY Ropf "&#x211D;">
+<!ENTITY rx "&#x211E;">
+<!ENTITY trade "&#x2122;">
+<!ENTITY TRADE "&#x2122;">
+<!ENTITY integers "&#x2124;">
+<!ENTITY Zopf "&#x2124;">
+<!ENTITY mho "&#x2127;">
+<!ENTITY Zfr "&#x2128;">
+<!ENTITY zeetrf "&#x2128;">
+<!ENTITY iiota "&#x2129;">
+<!ENTITY bernou "&#x212C;">
+<!ENTITY Bernoullis "&#x212C;">
+<!ENTITY Bscr "&#x212C;">
+<!ENTITY Cfr "&#x212D;">
+<!ENTITY Cayleys "&#x212D;">
+<!ENTITY escr "&#x212F;">
+<!ENTITY Escr "&#x2130;">
+<!ENTITY expectation "&#x2130;">
+<!ENTITY Fscr "&#x2131;">
+<!ENTITY Fouriertrf "&#x2131;">
+<!ENTITY phmmat "&#x2133;">
+<!ENTITY Mellintrf "&#x2133;">
+<!ENTITY Mscr "&#x2133;">
+<!ENTITY order "&#x2134;">
+<!ENTITY orderof "&#x2134;">
+<!ENTITY oscr "&#x2134;">
+<!ENTITY alefsym "&#x2135;">
+<!ENTITY aleph "&#x2135;">
+<!ENTITY beth "&#x2136;">
+<!ENTITY gimel "&#x2137;">
+<!ENTITY daleth "&#x2138;">
+<!ENTITY CapitalDifferentialD "&#x2145;">
+<!ENTITY DD "&#x2145;">
+<!ENTITY DifferentialD "&#x2146;">
+<!ENTITY dd "&#x2146;">
+<!ENTITY ExponentialE "&#x2147;">
+<!ENTITY exponentiale "&#x2147;">
+<!ENTITY ee "&#x2147;">
+<!ENTITY ImaginaryI "&#x2148;">
+<!ENTITY ii "&#x2148;">
+<!ENTITY frac13 "&#x2153;">
+<!ENTITY frac23 "&#x2154;">
+<!ENTITY frac15 "&#x2155;">
+<!ENTITY frac25 "&#x2156;">
+<!ENTITY frac35 "&#x2157;">
+<!ENTITY frac45 "&#x2158;">
+<!ENTITY frac16 "&#x2159;">
+<!ENTITY frac56 "&#x215A;">
+<!ENTITY frac18 "&#x215B;">
+<!ENTITY frac38 "&#x215C;">
+<!ENTITY frac58 "&#x215D;">
+<!ENTITY frac78 "&#x215E;">
+<!ENTITY larr "&#x2190;">
+<!ENTITY leftarrow "&#x2190;">
+<!ENTITY LeftArrow "&#x2190;">
+<!ENTITY slarr "&#x2190;">
+<!ENTITY ShortLeftArrow "&#x2190;">
+<!ENTITY uarr "&#x2191;">
+<!ENTITY uparrow "&#x2191;">
+<!ENTITY UpArrow "&#x2191;">
+<!ENTITY ShortUpArrow "&#x2191;">
+<!ENTITY rarr "&#x2192;">
+<!ENTITY rightarrow "&#x2192;">
+<!ENTITY RightArrow "&#x2192;">
+<!ENTITY srarr "&#x2192;">
+<!ENTITY ShortRightArrow "&#x2192;">
+<!ENTITY darr "&#x2193;">
+<!ENTITY downarrow "&#x2193;">
+<!ENTITY DownArrow "&#x2193;">
+<!ENTITY ShortDownArrow "&#x2193;">
+<!ENTITY harr "&#x2194;">
+<!ENTITY leftrightarrow "&#x2194;">
+<!ENTITY LeftRightArrow "&#x2194;">
+<!ENTITY varr "&#x2195;">
+<!ENTITY updownarrow "&#x2195;">
+<!ENTITY UpDownArrow "&#x2195;">
+<!ENTITY nwarr "&#x2196;">
+<!ENTITY UpperLeftArrow "&#x2196;">
+<!ENTITY nwarrow "&#x2196;">
+<!ENTITY nearr "&#x2197;">
+<!ENTITY UpperRightArrow "&#x2197;">
+<!ENTITY nearrow "&#x2197;">
+<!ENTITY searr "&#x2198;">
+<!ENTITY searrow "&#x2198;">
+<!ENTITY LowerRightArrow "&#x2198;">
+<!ENTITY swarr "&#x2199;">
+<!ENTITY swarrow "&#x2199;">
+<!ENTITY LowerLeftArrow "&#x2199;">
+<!ENTITY nlarr "&#x219A;">
+<!ENTITY nleftarrow "&#x219A;">
+<!ENTITY nrarr "&#x219B;">
+<!ENTITY nrightarrow "&#x219B;">
+<!ENTITY rarrw "&#x219D;">
+<!ENTITY rightsquigarrow "&#x219D;">
+<!ENTITY nrarrw "&#x219D;&#x338;">
+<!ENTITY Larr "&#x219E;">
+<!ENTITY twoheadleftarrow "&#x219E;">
+<!ENTITY Uarr "&#x219F;">
+<!ENTITY Rarr "&#x21A0;">
+<!ENTITY twoheadrightarrow "&#x21A0;">
+<!ENTITY Darr "&#x21A1;">
+<!ENTITY larrtl "&#x21A2;">
+<!ENTITY leftarrowtail "&#x21A2;">
+<!ENTITY rarrtl "&#x21A3;">
+<!ENTITY rightarrowtail "&#x21A3;">
+<!ENTITY LeftTeeArrow "&#x21A4;">
+<!ENTITY mapstoleft "&#x21A4;">
+<!ENTITY UpTeeArrow "&#x21A5;">
+<!ENTITY mapstoup "&#x21A5;">
+<!ENTITY map "&#x21A6;">
+<!ENTITY RightTeeArrow "&#x21A6;">
+<!ENTITY mapsto "&#x21A6;">
+<!ENTITY DownTeeArrow "&#x21A7;">
+<!ENTITY mapstodown "&#x21A7;">
+<!ENTITY larrhk "&#x21A9;">
+<!ENTITY hookleftarrow "&#x21A9;">
+<!ENTITY rarrhk "&#x21AA;">
+<!ENTITY hookrightarrow "&#x21AA;">
+<!ENTITY larrlp "&#x21AB;">
+<!ENTITY looparrowleft "&#x21AB;">
+<!ENTITY rarrlp "&#x21AC;">
+<!ENTITY looparrowright "&#x21AC;">
+<!ENTITY harrw "&#x21AD;">
+<!ENTITY leftrightsquigarrow "&#x21AD;">
+<!ENTITY nharr "&#x21AE;">
+<!ENTITY nleftrightarrow "&#x21AE;">
+<!ENTITY lsh "&#x21B0;">
+<!ENTITY Lsh "&#x21B0;">
+<!ENTITY rsh "&#x21B1;">
+<!ENTITY Rsh "&#x21B1;">
+<!ENTITY ldsh "&#x21B2;">
+<!ENTITY rdsh "&#x21B3;">
+<!ENTITY crarr "&#x21B5;">
+<!ENTITY cularr "&#x21B6;">
+<!ENTITY curvearrowleft "&#x21B6;">
+<!ENTITY curarr "&#x21B7;">
+<!ENTITY curvearrowright "&#x21B7;">
+<!ENTITY olarr "&#x21BA;">
+<!ENTITY circlearrowleft "&#x21BA;">
+<!ENTITY orarr "&#x21BB;">
+<!ENTITY circlearrowright "&#x21BB;">
+<!ENTITY lharu "&#x21BC;">
+<!ENTITY LeftVector "&#x21BC;">
+<!ENTITY leftharpoonup "&#x21BC;">
+<!ENTITY lhard "&#x21BD;">
+<!ENTITY leftharpoondown "&#x21BD;">
+<!ENTITY DownLeftVector "&#x21BD;">
+<!ENTITY uharr "&#x21BE;">
+<!ENTITY upharpoonright "&#x21BE;">
+<!ENTITY RightUpVector "&#x21BE;">
+<!ENTITY uharl "&#x21BF;">
+<!ENTITY upharpoonleft "&#x21BF;">
+<!ENTITY LeftUpVector "&#x21BF;">
+<!ENTITY rharu "&#x21C0;">
+<!ENTITY RightVector "&#x21C0;">
+<!ENTITY rightharpoonup "&#x21C0;">
+<!ENTITY rhard "&#x21C1;">
+<!ENTITY rightharpoondown "&#x21C1;">
+<!ENTITY DownRightVector "&#x21C1;">
+<!ENTITY dharr "&#x21C2;">
+<!ENTITY RightDownVector "&#x21C2;">
+<!ENTITY downharpoonright "&#x21C2;">
+<!ENTITY dharl "&#x21C3;">
+<!ENTITY LeftDownVector "&#x21C3;">
+<!ENTITY downharpoonleft "&#x21C3;">
+<!ENTITY rlarr "&#x21C4;">
+<!ENTITY rightleftarrows "&#x21C4;">
+<!ENTITY RightArrowLeftArrow "&#x21C4;">
+<!ENTITY udarr "&#x21C5;">
+<!ENTITY UpArrowDownArrow "&#x21C5;">
+<!ENTITY lrarr "&#x21C6;">
+<!ENTITY leftrightarrows "&#x21C6;">
+<!ENTITY LeftArrowRightArrow "&#x21C6;">
+<!ENTITY llarr "&#x21C7;">
+<!ENTITY leftleftarrows "&#x21C7;">
+<!ENTITY uuarr "&#x21C8;">
+<!ENTITY upuparrows "&#x21C8;">
+<!ENTITY rrarr "&#x21C9;">
+<!ENTITY rightrightarrows "&#x21C9;">
+<!ENTITY ddarr "&#x21CA;">
+<!ENTITY downdownarrows "&#x21CA;">
+<!ENTITY lrhar "&#x21CB;">
+<!ENTITY ReverseEquilibrium "&#x21CB;">
+<!ENTITY leftrightharpoons "&#x21CB;">
+<!ENTITY rlhar "&#x21CC;">
+<!ENTITY rightleftharpoons "&#x21CC;">
+<!ENTITY Equilibrium "&#x21CC;">
+<!ENTITY nlArr "&#x21CD;">
+<!ENTITY nLeftarrow "&#x21CD;">
+<!ENTITY nhArr "&#x21CE;">
+<!ENTITY nLeftrightarrow "&#x21CE;">
+<!ENTITY nrArr "&#x21CF;">
+<!ENTITY nRightarrow "&#x21CF;">
+<!ENTITY lArr "&#x21D0;">
+<!ENTITY Leftarrow "&#x21D0;">
+<!ENTITY DoubleLeftArrow "&#x21D0;">
+<!ENTITY uArr "&#x21D1;">
+<!ENTITY Uparrow "&#x21D1;">
+<!ENTITY DoubleUpArrow "&#x21D1;">
+<!ENTITY rArr "&#x21D2;">
+<!ENTITY Rightarrow "&#x21D2;">
+<!ENTITY Implies "&#x21D2;">
+<!ENTITY DoubleRightArrow "&#x21D2;">
+<!ENTITY dArr "&#x21D3;">
+<!ENTITY Downarrow "&#x21D3;">
+<!ENTITY DoubleDownArrow "&#x21D3;">
+<!ENTITY hArr "&#x21D4;">
+<!ENTITY Leftrightarrow "&#x21D4;">
+<!ENTITY DoubleLeftRightArrow "&#x21D4;">
+<!ENTITY iff "&#x21D4;">
+<!ENTITY vArr "&#x21D5;">
+<!ENTITY Updownarrow "&#x21D5;">
+<!ENTITY DoubleUpDownArrow "&#x21D5;">
+<!ENTITY nwArr "&#x21D6;">
+<!ENTITY neArr "&#x21D7;">
+<!ENTITY seArr "&#x21D8;">
+<!ENTITY swArr "&#x21D9;">
+<!ENTITY lAarr "&#x21DA;">
+<!ENTITY Lleftarrow "&#x21DA;">
+<!ENTITY rAarr "&#x21DB;">
+<!ENTITY Rrightarrow "&#x21DB;">
+<!ENTITY zigrarr "&#x21DD;">
+<!ENTITY larrb "&#x21E4;">
+<!ENTITY LeftArrowBar "&#x21E4;">
+<!ENTITY rarrb "&#x21E5;">
+<!ENTITY RightArrowBar "&#x21E5;">
+<!ENTITY duarr "&#x21F5;">
+<!ENTITY DownArrowUpArrow "&#x21F5;">
+<!ENTITY loarr "&#x21FD;">
+<!ENTITY roarr "&#x21FE;">
+<!ENTITY hoarr "&#x21FF;">
+<!ENTITY forall "&#x2200;">
+<!ENTITY ForAll "&#x2200;">
+<!ENTITY comp "&#x2201;">
+<!ENTITY complement "&#x2201;">
+<!ENTITY part "&#x2202;">
+<!ENTITY PartialD "&#x2202;">
+<!ENTITY npart "&#x2202;&#x338;">
+<!ENTITY exist "&#x2203;">
+<!ENTITY Exists "&#x2203;">
+<!ENTITY nexist "&#x2204;">
+<!ENTITY NotExists "&#x2204;">
+<!ENTITY nexists "&#x2204;">
+<!ENTITY empty "&#x2205;">
+<!ENTITY emptyset "&#x2205;">
+<!ENTITY emptyv "&#x2205;">
+<!ENTITY varnothing "&#x2205;">
+<!ENTITY nabla "&#x2207;">
+<!ENTITY Del "&#x2207;">
+<!ENTITY isin "&#x2208;">
+<!ENTITY isinv "&#x2208;">
+<!ENTITY Element "&#x2208;">
+<!ENTITY in "&#x2208;">
+<!ENTITY notin "&#x2209;">
+<!ENTITY NotElement "&#x2209;">
+<!ENTITY notinva "&#x2209;">
+<!ENTITY niv "&#x220B;">
+<!ENTITY ReverseElement "&#x220B;">
+<!ENTITY ni "&#x220B;">
+<!ENTITY SuchThat "&#x220B;">
+<!ENTITY notni "&#x220C;">
+<!ENTITY notniva "&#x220C;">
+<!ENTITY NotReverseElement "&#x220C;">
+<!ENTITY prod "&#x220F;">
+<!ENTITY Product "&#x220F;">
+<!ENTITY coprod "&#x2210;">
+<!ENTITY Coproduct "&#x2210;">
+<!ENTITY sum "&#x2211;">
+<!ENTITY Sum "&#x2211;">
+<!ENTITY minus "&#x2212;">
+<!ENTITY mnplus "&#x2213;">
+<!ENTITY mp "&#x2213;">
+<!ENTITY MinusPlus "&#x2213;">
+<!ENTITY plusdo "&#x2214;">
+<!ENTITY dotplus "&#x2214;">
+<!ENTITY setmn "&#x2216;">
+<!ENTITY setminus "&#x2216;">
+<!ENTITY Backslash "&#x2216;">
+<!ENTITY ssetmn "&#x2216;">
+<!ENTITY smallsetminus "&#x2216;">
+<!ENTITY lowast "&#x2217;">
+<!ENTITY compfn "&#x2218;">
+<!ENTITY SmallCircle "&#x2218;">
+<!ENTITY radic "&#x221A;">
+<!ENTITY Sqrt "&#x221A;">
+<!ENTITY prop "&#x221D;">
+<!ENTITY propto "&#x221D;">
+<!ENTITY Proportional "&#x221D;">
+<!ENTITY vprop "&#x221D;">
+<!ENTITY varpropto "&#x221D;">
+<!ENTITY infin "&#x221E;">
+<!ENTITY angrt "&#x221F;">
+<!ENTITY ang "&#x2220;">
+<!ENTITY angle "&#x2220;">
+<!ENTITY nang "&#x2220;&#x20D2;">
+<!ENTITY angmsd "&#x2221;">
+<!ENTITY measuredangle "&#x2221;">
+<!ENTITY angsph "&#x2222;">
+<!ENTITY mid "&#x2223;">
+<!ENTITY VerticalBar "&#x2223;">
+<!ENTITY smid "&#x2223;">
+<!ENTITY shortmid "&#x2223;">
+<!ENTITY nmid "&#x2224;">
+<!ENTITY NotVerticalBar "&#x2224;">
+<!ENTITY nsmid "&#x2224;">
+<!ENTITY nshortmid "&#x2224;">
+<!ENTITY par "&#x2225;">
+<!ENTITY parallel "&#x2225;">
+<!ENTITY DoubleVerticalBar "&#x2225;">
+<!ENTITY spar "&#x2225;">
+<!ENTITY shortparallel "&#x2225;">
+<!ENTITY npar "&#x2226;">
+<!ENTITY nparallel "&#x2226;">
+<!ENTITY NotDoubleVerticalBar "&#x2226;">
+<!ENTITY nspar "&#x2226;">
+<!ENTITY nshortparallel "&#x2226;">
+<!ENTITY and "&#x2227;">
+<!ENTITY wedge "&#x2227;">
+<!ENTITY or "&#x2228;">
+<!ENTITY vee "&#x2228;">
+<!ENTITY cap "&#x2229;">
+<!ENTITY caps "&#x2229;&#xFE00;">
+<!ENTITY cup "&#x222A;">
+<!ENTITY cups "&#x222A;&#xFE00;">
+<!ENTITY int "&#x222B;">
+<!ENTITY Integral "&#x222B;">
+<!ENTITY Int "&#x222C;">
+<!ENTITY tint "&#x222D;">
+<!ENTITY iiint "&#x222D;">
+<!ENTITY conint "&#x222E;">
+<!ENTITY oint "&#x222E;">
+<!ENTITY ContourIntegral "&#x222E;">
+<!ENTITY Conint "&#x222F;">
+<!ENTITY DoubleContourIntegral "&#x222F;">
+<!ENTITY Cconint "&#x2230;">
+<!ENTITY cwint "&#x2231;">
+<!ENTITY cwconint "&#x2232;">
+<!ENTITY ClockwiseContourIntegral "&#x2232;">
+<!ENTITY awconint "&#x2233;">
+<!ENTITY CounterClockwiseContourIntegral "&#x2233;">
+<!ENTITY there4 "&#x2234;">
+<!ENTITY therefore "&#x2234;">
+<!ENTITY Therefore "&#x2234;">
+<!ENTITY becaus "&#x2235;">
+<!ENTITY because "&#x2235;">
+<!ENTITY Because "&#x2235;">
+<!ENTITY ratio "&#x2236;">
+<!ENTITY Colon "&#x2237;">
+<!ENTITY Proportion "&#x2237;">
+<!ENTITY minusd "&#x2238;">
+<!ENTITY dotminus "&#x2238;">
+<!ENTITY mDDot "&#x223A;">
+<!ENTITY homtht "&#x223B;">
+<!ENTITY sim "&#x223C;">
+<!ENTITY Tilde "&#x223C;">
+<!ENTITY thksim "&#x223C;">
+<!ENTITY thicksim "&#x223C;">
+<!ENTITY nvsim "&#x223C;&#x20D2;">
+<!ENTITY bsim "&#x223D;">
+<!ENTITY backsim "&#x223D;">
+<!ENTITY race "&#x223D;&#x331;">
+<!ENTITY ac "&#x223E;">
+<!ENTITY mstpos "&#x223E;">
+<!ENTITY acE "&#x223E;&#x333;">
+<!ENTITY acd "&#x223F;">
+<!ENTITY wreath "&#x2240;">
+<!ENTITY VerticalTilde "&#x2240;">
+<!ENTITY wr "&#x2240;">
+<!ENTITY nsim "&#x2241;">
+<!ENTITY NotTilde "&#x2241;">
+<!ENTITY esim "&#x2242;">
+<!ENTITY EqualTilde "&#x2242;">
+<!ENTITY eqsim "&#x2242;">
+<!ENTITY NotEqualTilde "&#x2242;&#x338;">
+<!ENTITY nesim "&#x2242;&#x338;">
+<!ENTITY sime "&#x2243;">
+<!ENTITY TildeEqual "&#x2243;">
+<!ENTITY simeq "&#x2243;">
+<!ENTITY nsime "&#x2244;">
+<!ENTITY nsimeq "&#x2244;">
+<!ENTITY NotTildeEqual "&#x2244;">
+<!ENTITY cong "&#x2245;">
+<!ENTITY TildeFullEqual "&#x2245;">
+<!ENTITY simne "&#x2246;">
+<!ENTITY ncong "&#x2247;">
+<!ENTITY NotTildeFullEqual "&#x2247;">
+<!ENTITY asymp "&#x2248;">
+<!ENTITY ap "&#x2248;">
+<!ENTITY TildeTilde "&#x2248;">
+<!ENTITY approx "&#x2248;">
+<!ENTITY thkap "&#x2248;">
+<!ENTITY thickapprox "&#x2248;">
+<!ENTITY nap "&#x2249;">
+<!ENTITY NotTildeTilde "&#x2249;">
+<!ENTITY napprox "&#x2249;">
+<!ENTITY ape "&#x224A;">
+<!ENTITY approxeq "&#x224A;">
+<!ENTITY apid "&#x224B;">
+<!ENTITY napid "&#x224B;&#x338;">
+<!ENTITY bcong "&#x224C;">
+<!ENTITY backcong "&#x224C;">
+<!ENTITY asympeq "&#x224D;">
+<!ENTITY CupCap "&#x224D;">
+<!ENTITY nvap "&#x224D;&#x20D2;">
+<!ENTITY bump "&#x224E;">
+<!ENTITY HumpDownHump "&#x224E;">
+<!ENTITY Bumpeq "&#x224E;">
+<!ENTITY NotHumpDownHump "&#x224E;&#x338;">
+<!ENTITY nbump "&#x224E;&#x338;">
+<!ENTITY bumpe "&#x224F;">
+<!ENTITY HumpEqual "&#x224F;">
+<!ENTITY bumpeq "&#x224F;">
+<!ENTITY nbumpe "&#x224F;&#x338;">
+<!ENTITY NotHumpEqual "&#x224F;&#x338;">
+<!ENTITY esdot "&#x2250;">
+<!ENTITY DotEqual "&#x2250;">
+<!ENTITY doteq "&#x2250;">
+<!ENTITY nedot "&#x2250;&#x338;">
+<!ENTITY eDot "&#x2251;">
+<!ENTITY doteqdot "&#x2251;">
+<!ENTITY efDot "&#x2252;">
+<!ENTITY fallingdotseq "&#x2252;">
+<!ENTITY erDot "&#x2253;">
+<!ENTITY risingdotseq "&#x2253;">
+<!ENTITY colone "&#x2254;">
+<!ENTITY coloneq "&#x2254;">
+<!ENTITY Assign "&#x2254;">
+<!ENTITY ecolon "&#x2255;">
+<!ENTITY eqcolon "&#x2255;">
+<!ENTITY ecir "&#x2256;">
+<!ENTITY eqcirc "&#x2256;">
+<!ENTITY cire "&#x2257;">
+<!ENTITY circeq "&#x2257;">
+<!ENTITY wedgeq "&#x2259;">
+<!ENTITY veeeq "&#x225A;">
+<!ENTITY trie "&#x225C;">
+<!ENTITY triangleq "&#x225C;">
+<!ENTITY equest "&#x225F;">
+<!ENTITY questeq "&#x225F;">
+<!ENTITY ne "&#x2260;">
+<!ENTITY NotEqual "&#x2260;">
+<!ENTITY equiv "&#x2261;">
+<!ENTITY Congruent "&#x2261;">
+<!ENTITY bnequiv "&#x2261;&#x20E5;">
+<!ENTITY nequiv "&#x2262;">
+<!ENTITY NotCongruent "&#x2262;">
+<!ENTITY le "&#x2264;">
+<!ENTITY leq "&#x2264;">
+<!ENTITY nvle "&#x2264;&#x20D2;">
+<!ENTITY ge "&#x2265;">
+<!ENTITY GreaterEqual "&#x2265;">
+<!ENTITY geq "&#x2265;">
+<!ENTITY nvge "&#x2265;&#x20D2;">
+<!ENTITY lE "&#x2266;">
+<!ENTITY LessFullEqual "&#x2266;">
+<!ENTITY leqq "&#x2266;">
+<!ENTITY nlE "&#x2266;&#x338;">
+<!ENTITY nleqq "&#x2266;&#x338;">
+<!ENTITY gE "&#x2267;">
+<!ENTITY GreaterFullEqual "&#x2267;">
+<!ENTITY geqq "&#x2267;">
+<!ENTITY ngE "&#x2267;&#x338;">
+<!ENTITY ngeqq "&#x2267;&#x338;">
+<!ENTITY NotGreaterFullEqual "&#x2267;&#x338;">
+<!ENTITY lnE "&#x2268;">
+<!ENTITY lneqq "&#x2268;">
+<!ENTITY lvnE "&#x2268;&#xFE00;">
+<!ENTITY lvertneqq "&#x2268;&#xFE00;">
+<!ENTITY gnE "&#x2269;">
+<!ENTITY gneqq "&#x2269;">
+<!ENTITY gvnE "&#x2269;&#xFE00;">
+<!ENTITY gvertneqq "&#x2269;&#xFE00;">
+<!ENTITY Lt "&#x226A;">
+<!ENTITY NestedLessLess "&#x226A;">
+<!ENTITY ll "&#x226A;">
+<!ENTITY nLtv "&#x226A;&#x338;">
+<!ENTITY NotLessLess "&#x226A;&#x338;">
+<!ENTITY nLt "&#x226A;&#x20D2;">
+<!ENTITY Gt "&#x226B;">
+<!ENTITY NestedGreaterGreater "&#x226B;">
+<!ENTITY gg "&#x226B;">
+<!ENTITY nGtv "&#x226B;&#x338;">
+<!ENTITY NotGreaterGreater "&#x226B;&#x338;">
+<!ENTITY nGt "&#x226B;&#x20D2;">
+<!ENTITY twixt "&#x226C;">
+<!ENTITY between "&#x226C;">
+<!ENTITY NotCupCap "&#x226D;">
+<!ENTITY nlt "&#x226E;">
+<!ENTITY NotLess "&#x226E;">
+<!ENTITY nless "&#x226E;">
+<!ENTITY ngt "&#x226F;">
+<!ENTITY NotGreater "&#x226F;">
+<!ENTITY ngtr "&#x226F;">
+<!ENTITY nle "&#x2270;">
+<!ENTITY NotLessEqual "&#x2270;">
+<!ENTITY nleq "&#x2270;">
+<!ENTITY nge "&#x2271;">
+<!ENTITY NotGreaterEqual "&#x2271;">
+<!ENTITY ngeq "&#x2271;">
+<!ENTITY lsim "&#x2272;">
+<!ENTITY LessTilde "&#x2272;">
+<!ENTITY lesssim "&#x2272;">
+<!ENTITY gsim "&#x2273;">
+<!ENTITY gtrsim "&#x2273;">
+<!ENTITY GreaterTilde "&#x2273;">
+<!ENTITY nlsim "&#x2274;">
+<!ENTITY NotLessTilde "&#x2274;">
+<!ENTITY ngsim "&#x2275;">
+<!ENTITY NotGreaterTilde "&#x2275;">
+<!ENTITY lg "&#x2276;">
+<!ENTITY lessgtr "&#x2276;">
+<!ENTITY LessGreater "&#x2276;">
+<!ENTITY gl "&#x2277;">
+<!ENTITY gtrless "&#x2277;">
+<!ENTITY GreaterLess "&#x2277;">
+<!ENTITY ntlg "&#x2278;">
+<!ENTITY NotLessGreater "&#x2278;">
+<!ENTITY ntgl "&#x2279;">
+<!ENTITY NotGreaterLess "&#x2279;">
+<!ENTITY pr "&#x227A;">
+<!ENTITY Precedes "&#x227A;">
+<!ENTITY prec "&#x227A;">
+<!ENTITY sc "&#x227B;">
+<!ENTITY Succeeds "&#x227B;">
+<!ENTITY succ "&#x227B;">
+<!ENTITY prcue "&#x227C;">
+<!ENTITY PrecedesSlantEqual "&#x227C;">
+<!ENTITY preccurlyeq "&#x227C;">
+<!ENTITY sccue "&#x227D;">
+<!ENTITY SucceedsSlantEqual "&#x227D;">
+<!ENTITY succcurlyeq "&#x227D;">
+<!ENTITY prsim "&#x227E;">
+<!ENTITY precsim "&#x227E;">
+<!ENTITY PrecedesTilde "&#x227E;">
+<!ENTITY scsim "&#x227F;">
+<!ENTITY succsim "&#x227F;">
+<!ENTITY SucceedsTilde "&#x227F;">
+<!ENTITY NotSucceedsTilde "&#x227F;&#x338;">
+<!ENTITY npr "&#x2280;">
+<!ENTITY nprec "&#x2280;">
+<!ENTITY NotPrecedes "&#x2280;">
+<!ENTITY nsc "&#x2281;">
+<!ENTITY nsucc "&#x2281;">
+<!ENTITY NotSucceeds "&#x2281;">
+<!ENTITY sub "&#x2282;">
+<!ENTITY subset "&#x2282;">
+<!ENTITY vnsub "&#x2282;&#x20D2;">
+<!ENTITY nsubset "&#x2282;&#x20D2;">
+<!ENTITY NotSubset "&#x2282;&#x20D2;">
+<!ENTITY sup "&#x2283;">
+<!ENTITY supset "&#x2283;">
+<!ENTITY Superset "&#x2283;">
+<!ENTITY vnsup "&#x2283;&#x20D2;">
+<!ENTITY nsupset "&#x2283;&#x20D2;">
+<!ENTITY NotSuperset "&#x2283;&#x20D2;">
+<!ENTITY nsub "&#x2284;">
+<!ENTITY nsup "&#x2285;">
+<!ENTITY sube "&#x2286;">
+<!ENTITY SubsetEqual "&#x2286;">
+<!ENTITY subseteq "&#x2286;">
+<!ENTITY supe "&#x2287;">
+<!ENTITY supseteq "&#x2287;">
+<!ENTITY SupersetEqual "&#x2287;">
+<!ENTITY nsube "&#x2288;">
+<!ENTITY nsubseteq "&#x2288;">
+<!ENTITY NotSubsetEqual "&#x2288;">
+<!ENTITY nsupe "&#x2289;">
+<!ENTITY nsupseteq "&#x2289;">
+<!ENTITY NotSupersetEqual "&#x2289;">
+<!ENTITY subne "&#x228A;">
+<!ENTITY subsetneq "&#x228A;">
+<!ENTITY vsubne "&#x228A;&#xFE00;">
+<!ENTITY varsubsetneq "&#x228A;&#xFE00;">
+<!ENTITY supne "&#x228B;">
+<!ENTITY supsetneq "&#x228B;">
+<!ENTITY vsupne "&#x228B;&#xFE00;">
+<!ENTITY varsupsetneq "&#x228B;&#xFE00;">
+<!ENTITY cupdot "&#x228D;">
+<!ENTITY uplus "&#x228E;">
+<!ENTITY UnionPlus "&#x228E;">
+<!ENTITY sqsub "&#x228F;">
+<!ENTITY SquareSubset "&#x228F;">
+<!ENTITY sqsubset "&#x228F;">
+<!ENTITY NotSquareSubset "&#x228F;&#x338;">
+<!ENTITY sqsup "&#x2290;">
+<!ENTITY SquareSuperset "&#x2290;">
+<!ENTITY sqsupset "&#x2290;">
+<!ENTITY NotSquareSuperset "&#x2290;&#x338;">
+<!ENTITY sqsube "&#x2291;">
+<!ENTITY SquareSubsetEqual "&#x2291;">
+<!ENTITY sqsubseteq "&#x2291;">
+<!ENTITY sqsupe "&#x2292;">
+<!ENTITY SquareSupersetEqual "&#x2292;">
+<!ENTITY sqsupseteq "&#x2292;">
+<!ENTITY sqcap "&#x2293;">
+<!ENTITY SquareIntersection "&#x2293;">
+<!ENTITY sqcaps "&#x2293;&#xFE00;">
+<!ENTITY sqcup "&#x2294;">
+<!ENTITY SquareUnion "&#x2294;">
+<!ENTITY sqcups "&#x2294;&#xFE00;">
+<!ENTITY oplus "&#x2295;">
+<!ENTITY CirclePlus "&#x2295;">
+<!ENTITY ominus "&#x2296;">
+<!ENTITY CircleMinus "&#x2296;">
+<!ENTITY otimes "&#x2297;">
+<!ENTITY CircleTimes "&#x2297;">
+<!ENTITY osol "&#x2298;">
+<!ENTITY odot "&#x2299;">
+<!ENTITY CircleDot "&#x2299;">
+<!ENTITY ocir "&#x229A;">
+<!ENTITY circledcirc "&#x229A;">
+<!ENTITY oast "&#x229B;">
+<!ENTITY circledast "&#x229B;">
+<!ENTITY odash "&#x229D;">
+<!ENTITY circleddash "&#x229D;">
+<!ENTITY plusb "&#x229E;">
+<!ENTITY boxplus "&#x229E;">
+<!ENTITY minusb "&#x229F;">
+<!ENTITY boxminus "&#x229F;">
+<!ENTITY timesb "&#x22A0;">
+<!ENTITY boxtimes "&#x22A0;">
+<!ENTITY sdotb "&#x22A1;">
+<!ENTITY dotsquare "&#x22A1;">
+<!ENTITY vdash "&#x22A2;">
+<!ENTITY RightTee "&#x22A2;">
+<!ENTITY dashv "&#x22A3;">
+<!ENTITY LeftTee "&#x22A3;">
+<!ENTITY top "&#x22A4;">
+<!ENTITY DownTee "&#x22A4;">
+<!ENTITY bottom "&#x22A5;">
+<!ENTITY bot "&#x22A5;">
+<!ENTITY perp "&#x22A5;">
+<!ENTITY UpTee "&#x22A5;">
+<!ENTITY models "&#x22A7;">
+<!ENTITY vDash "&#x22A8;">
+<!ENTITY DoubleRightTee "&#x22A8;">
+<!ENTITY Vdash "&#x22A9;">
+<!ENTITY Vvdash "&#x22AA;">
+<!ENTITY VDash "&#x22AB;">
+<!ENTITY nvdash "&#x22AC;">
+<!ENTITY nvDash "&#x22AD;">
+<!ENTITY nVdash "&#x22AE;">
+<!ENTITY nVDash "&#x22AF;">
+<!ENTITY prurel "&#x22B0;">
+<!ENTITY vltri "&#x22B2;">
+<!ENTITY vartriangleleft "&#x22B2;">
+<!ENTITY LeftTriangle "&#x22B2;">
+<!ENTITY vrtri "&#x22B3;">
+<!ENTITY vartriangleright "&#x22B3;">
+<!ENTITY RightTriangle "&#x22B3;">
+<!ENTITY ltrie "&#x22B4;">
+<!ENTITY trianglelefteq "&#x22B4;">
+<!ENTITY LeftTriangleEqual "&#x22B4;">
+<!ENTITY nvltrie "&#x22B4;&#x20D2;">
+<!ENTITY rtrie "&#x22B5;">
+<!ENTITY trianglerighteq "&#x22B5;">
+<!ENTITY RightTriangleEqual "&#x22B5;">
+<!ENTITY nvrtrie "&#x22B5;&#x20D2;">
+<!ENTITY origof "&#x22B6;">
+<!ENTITY imof "&#x22B7;">
+<!ENTITY mumap "&#x22B8;">
+<!ENTITY multimap "&#x22B8;">
+<!ENTITY hercon "&#x22B9;">
+<!ENTITY intcal "&#x22BA;">
+<!ENTITY intercal "&#x22BA;">
+<!ENTITY veebar "&#x22BB;">
+<!ENTITY barvee "&#x22BD;">
+<!ENTITY angrtvb "&#x22BE;">
+<!ENTITY lrtri "&#x22BF;">
+<!ENTITY xwedge "&#x22C0;">
+<!ENTITY Wedge "&#x22C0;">
+<!ENTITY bigwedge "&#x22C0;">
+<!ENTITY xvee "&#x22C1;">
+<!ENTITY Vee "&#x22C1;">
+<!ENTITY bigvee "&#x22C1;">
+<!ENTITY xcap "&#x22C2;">
+<!ENTITY Intersection "&#x22C2;">
+<!ENTITY bigcap "&#x22C2;">
+<!ENTITY xcup "&#x22C3;">
+<!ENTITY Union "&#x22C3;">
+<!ENTITY bigcup "&#x22C3;">
+<!ENTITY diam "&#x22C4;">
+<!ENTITY diamond "&#x22C4;">
+<!ENTITY Diamond "&#x22C4;">
+<!ENTITY sdot "&#x22C5;">
+<!ENTITY sstarf "&#x22C6;">
+<!ENTITY Star "&#x22C6;">
+<!ENTITY divonx "&#x22C7;">
+<!ENTITY divideontimes "&#x22C7;">
+<!ENTITY bowtie "&#x22C8;">
+<!ENTITY ltimes "&#x22C9;">
+<!ENTITY rtimes "&#x22CA;">
+<!ENTITY lthree "&#x22CB;">
+<!ENTITY leftthreetimes "&#x22CB;">
+<!ENTITY rthree "&#x22CC;">
+<!ENTITY rightthreetimes "&#x22CC;">
+<!ENTITY bsime "&#x22CD;">
+<!ENTITY backsimeq "&#x22CD;">
+<!ENTITY cuvee "&#x22CE;">
+<!ENTITY curlyvee "&#x22CE;">
+<!ENTITY cuwed "&#x22CF;">
+<!ENTITY curlywedge "&#x22CF;">
+<!ENTITY Sub "&#x22D0;">
+<!ENTITY Subset "&#x22D0;">
+<!ENTITY Sup "&#x22D1;">
+<!ENTITY Supset "&#x22D1;">
+<!ENTITY Cap "&#x22D2;">
+<!ENTITY Cup "&#x22D3;">
+<!ENTITY fork "&#x22D4;">
+<!ENTITY pitchfork "&#x22D4;">
+<!ENTITY epar "&#x22D5;">
+<!ENTITY ltdot "&#x22D6;">
+<!ENTITY lessdot "&#x22D6;">
+<!ENTITY gtdot "&#x22D7;">
+<!ENTITY gtrdot "&#x22D7;">
+<!ENTITY Ll "&#x22D8;">
+<!ENTITY nLl "&#x22D8;&#x338;">
+<!ENTITY Gg "&#x22D9;">
+<!ENTITY ggg "&#x22D9;">
+<!ENTITY nGg "&#x22D9;&#x338;">
+<!ENTITY leg "&#x22DA;">
+<!ENTITY LessEqualGreater "&#x22DA;">
+<!ENTITY lesseqgtr "&#x22DA;">
+<!ENTITY lesg "&#x22DA;&#xFE00;">
+<!ENTITY gel "&#x22DB;">
+<!ENTITY gtreqless "&#x22DB;">
+<!ENTITY GreaterEqualLess "&#x22DB;">
+<!ENTITY gesl "&#x22DB;&#xFE00;">
+<!ENTITY cuepr "&#x22DE;">
+<!ENTITY curlyeqprec "&#x22DE;">
+<!ENTITY cuesc "&#x22DF;">
+<!ENTITY curlyeqsucc "&#x22DF;">
+<!ENTITY nprcue "&#x22E0;">
+<!ENTITY NotPrecedesSlantEqual "&#x22E0;">
+<!ENTITY nsccue "&#x22E1;">
+<!ENTITY NotSucceedsSlantEqual "&#x22E1;">
+<!ENTITY nsqsube "&#x22E2;">
+<!ENTITY NotSquareSubsetEqual "&#x22E2;">
+<!ENTITY nsqsupe "&#x22E3;">
+<!ENTITY NotSquareSupersetEqual "&#x22E3;">
+<!ENTITY lnsim "&#x22E6;">
+<!ENTITY gnsim "&#x22E7;">
+<!ENTITY prnsim "&#x22E8;">
+<!ENTITY precnsim "&#x22E8;">
+<!ENTITY scnsim "&#x22E9;">
+<!ENTITY succnsim "&#x22E9;">
+<!ENTITY nltri "&#x22EA;">
+<!ENTITY ntriangleleft "&#x22EA;">
+<!ENTITY NotLeftTriangle "&#x22EA;">
+<!ENTITY nrtri "&#x22EB;">
+<!ENTITY ntriangleright "&#x22EB;">
+<!ENTITY NotRightTriangle "&#x22EB;">
+<!ENTITY nltrie "&#x22EC;">
+<!ENTITY ntrianglelefteq "&#x22EC;">
+<!ENTITY NotLeftTriangleEqual "&#x22EC;">
+<!ENTITY nrtrie "&#x22ED;">
+<!ENTITY ntrianglerighteq "&#x22ED;">
+<!ENTITY NotRightTriangleEqual "&#x22ED;">
+<!ENTITY vellip "&#x22EE;">
+<!ENTITY ctdot "&#x22EF;">
+<!ENTITY utdot "&#x22F0;">
+<!ENTITY dtdot "&#x22F1;">
+<!ENTITY disin "&#x22F2;">
+<!ENTITY isinsv "&#x22F3;">
+<!ENTITY isins "&#x22F4;">
+<!ENTITY isindot "&#x22F5;">
+<!ENTITY notindot "&#x22F5;&#x338;">
+<!ENTITY notinvc "&#x22F6;">
+<!ENTITY notinvb "&#x22F7;">
+<!ENTITY isinE "&#x22F9;">
+<!ENTITY notinE "&#x22F9;&#x338;">
+<!ENTITY nisd "&#x22FA;">
+<!ENTITY xnis "&#x22FB;">
+<!ENTITY nis "&#x22FC;">
+<!ENTITY notnivc "&#x22FD;">
+<!ENTITY notnivb "&#x22FE;">
+<!ENTITY barwed "&#x2305;">
+<!ENTITY barwedge "&#x2305;">
+<!ENTITY Barwed "&#x2306;">
+<!ENTITY doublebarwedge "&#x2306;">
+<!ENTITY lceil "&#x2308;">
+<!ENTITY LeftCeiling "&#x2308;">
+<!ENTITY rceil "&#x2309;">
+<!ENTITY RightCeiling "&#x2309;">
+<!ENTITY lfloor "&#x230A;">
+<!ENTITY LeftFloor "&#x230A;">
+<!ENTITY rfloor "&#x230B;">
+<!ENTITY RightFloor "&#x230B;">
+<!ENTITY drcrop "&#x230C;">
+<!ENTITY dlcrop "&#x230D;">
+<!ENTITY urcrop "&#x230E;">
+<!ENTITY ulcrop "&#x230F;">
+<!ENTITY bnot "&#x2310;">
+<!ENTITY profline "&#x2312;">
+<!ENTITY profsurf "&#x2313;">
+<!ENTITY telrec "&#x2315;">
+<!ENTITY target "&#x2316;">
+<!ENTITY ulcorn "&#x231C;">
+<!ENTITY ulcorner "&#x231C;">
+<!ENTITY urcorn "&#x231D;">
+<!ENTITY urcorner "&#x231D;">
+<!ENTITY dlcorn "&#x231E;">
+<!ENTITY llcorner "&#x231E;">
+<!ENTITY drcorn "&#x231F;">
+<!ENTITY lrcorner "&#x231F;">
+<!ENTITY frown "&#x2322;">
+<!ENTITY sfrown "&#x2322;">
+<!ENTITY smile "&#x2323;">
+<!ENTITY ssmile "&#x2323;">
+<!ENTITY cylcty "&#x232D;">
+<!ENTITY profalar "&#x232E;">
+<!ENTITY topbot "&#x2336;">
+<!ENTITY ovbar "&#x233D;">
+<!ENTITY solbar "&#x233F;">
+<!ENTITY angzarr "&#x237C;">
+<!ENTITY lmoust "&#x23B0;">
+<!ENTITY lmoustache "&#x23B0;">
+<!ENTITY rmoust "&#x23B1;">
+<!ENTITY rmoustache "&#x23B1;">
+<!ENTITY tbrk "&#x23B4;">
+<!ENTITY OverBracket "&#x23B4;">
+<!ENTITY bbrk "&#x23B5;">
+<!ENTITY UnderBracket "&#x23B5;">
+<!ENTITY bbrktbrk "&#x23B6;">
+<!ENTITY OverParenthesis "&#x23DC;">
+<!ENTITY UnderParenthesis "&#x23DD;">
+<!ENTITY OverBrace "&#x23DE;">
+<!ENTITY UnderBrace "&#x23DF;">
+<!ENTITY trpezium "&#x23E2;">
+<!ENTITY elinters "&#x23E7;">
+<!ENTITY blank "&#x2423;">
+<!ENTITY oS "&#x24C8;">
+<!ENTITY circledS "&#x24C8;">
+<!ENTITY boxh "&#x2500;">
+<!ENTITY HorizontalLine "&#x2500;">
+<!ENTITY boxv "&#x2502;">
+<!ENTITY boxdr "&#x250C;">
+<!ENTITY boxdl "&#x2510;">
+<!ENTITY boxur "&#x2514;">
+<!ENTITY boxul "&#x2518;">
+<!ENTITY boxvr "&#x251C;">
+<!ENTITY boxvl "&#x2524;">
+<!ENTITY boxhd "&#x252C;">
+<!ENTITY boxhu "&#x2534;">
+<!ENTITY boxvh "&#x253C;">
+<!ENTITY boxH "&#x2550;">
+<!ENTITY boxV "&#x2551;">
+<!ENTITY boxdR "&#x2552;">
+<!ENTITY boxDr "&#x2553;">
+<!ENTITY boxDR "&#x2554;">
+<!ENTITY boxdL "&#x2555;">
+<!ENTITY boxDl "&#x2556;">
+<!ENTITY boxDL "&#x2557;">
+<!ENTITY boxuR "&#x2558;">
+<!ENTITY boxUr "&#x2559;">
+<!ENTITY boxUR "&#x255A;">
+<!ENTITY boxuL "&#x255B;">
+<!ENTITY boxUl "&#x255C;">
+<!ENTITY boxUL "&#x255D;">
+<!ENTITY boxvR "&#x255E;">
+<!ENTITY boxVr "&#x255F;">
+<!ENTITY boxVR "&#x2560;">
+<!ENTITY boxvL "&#x2561;">
+<!ENTITY boxVl "&#x2562;">
+<!ENTITY boxVL "&#x2563;">
+<!ENTITY boxHd "&#x2564;">
+<!ENTITY boxhD "&#x2565;">
+<!ENTITY boxHD "&#x2566;">
+<!ENTITY boxHu "&#x2567;">
+<!ENTITY boxhU "&#x2568;">
+<!ENTITY boxHU "&#x2569;">
+<!ENTITY boxvH "&#x256A;">
+<!ENTITY boxVh "&#x256B;">
+<!ENTITY boxVH "&#x256C;">
+<!ENTITY uhblk "&#x2580;">
+<!ENTITY lhblk "&#x2584;">
+<!ENTITY block "&#x2588;">
+<!ENTITY blk14 "&#x2591;">
+<!ENTITY blk12 "&#x2592;">
+<!ENTITY blk34 "&#x2593;">
+<!ENTITY squ "&#x25A1;">
+<!ENTITY square "&#x25A1;">
+<!ENTITY Square "&#x25A1;">
+<!ENTITY squf "&#x25AA;">
+<!ENTITY squarf "&#x25AA;">
+<!ENTITY blacksquare "&#x25AA;">
+<!ENTITY FilledVerySmallSquare "&#x25AA;">
+<!ENTITY EmptyVerySmallSquare "&#x25AB;">
+<!ENTITY rect "&#x25AD;">
+<!ENTITY marker "&#x25AE;">
+<!ENTITY fltns "&#x25B1;">
+<!ENTITY xutri "&#x25B3;">
+<!ENTITY bigtriangleup "&#x25B3;">
+<!ENTITY utrif "&#x25B4;">
+<!ENTITY blacktriangle "&#x25B4;">
+<!ENTITY utri "&#x25B5;">
+<!ENTITY triangle "&#x25B5;">
+<!ENTITY rtrif "&#x25B8;">
+<!ENTITY blacktriangleright "&#x25B8;">
+<!ENTITY rtri "&#x25B9;">
+<!ENTITY triangleright "&#x25B9;">
+<!ENTITY xdtri "&#x25BD;">
+<!ENTITY bigtriangledown "&#x25BD;">
+<!ENTITY dtrif "&#x25BE;">
+<!ENTITY blacktriangledown "&#x25BE;">
+<!ENTITY dtri "&#x25BF;">
+<!ENTITY triangledown "&#x25BF;">
+<!ENTITY ltrif "&#x25C2;">
+<!ENTITY blacktriangleleft "&#x25C2;">
+<!ENTITY ltri "&#x25C3;">
+<!ENTITY triangleleft "&#x25C3;">
+<!ENTITY loz "&#x25CA;">
+<!ENTITY lozenge "&#x25CA;">
+<!ENTITY cir "&#x25CB;">
+<!ENTITY tridot "&#x25EC;">
+<!ENTITY xcirc "&#x25EF;">
+<!ENTITY bigcirc "&#x25EF;">
+<!ENTITY ultri "&#x25F8;">
+<!ENTITY urtri "&#x25F9;">
+<!ENTITY lltri "&#x25FA;">
+<!ENTITY EmptySmallSquare "&#x25FB;">
+<!ENTITY FilledSmallSquare "&#x25FC;">
+<!ENTITY starf "&#x2605;">
+<!ENTITY bigstar "&#x2605;">
+<!ENTITY star "&#x2606;">
+<!ENTITY phone "&#x260E;">
+<!ENTITY female "&#x2640;">
+<!ENTITY male "&#x2642;">
+<!ENTITY spades "&#x2660;">
+<!ENTITY spadesuit "&#x2660;">
+<!ENTITY clubs "&#x2663;">
+<!ENTITY clubsuit "&#x2663;">
+<!ENTITY hearts "&#x2665;">
+<!ENTITY heartsuit "&#x2665;">
+<!ENTITY diams "&#x2666;">
+<!ENTITY diamondsuit "&#x2666;">
+<!ENTITY sung "&#x266A;">
+<!ENTITY flat "&#x266D;">
+<!ENTITY natur "&#x266E;">
+<!ENTITY natural "&#x266E;">
+<!ENTITY sharp "&#x266F;">
+<!ENTITY check "&#x2713;">
+<!ENTITY checkmark "&#x2713;">
+<!ENTITY cross "&#x2717;">
+<!ENTITY malt "&#x2720;">
+<!ENTITY maltese "&#x2720;">
+<!ENTITY sext "&#x2736;">
+<!ENTITY VerticalSeparator "&#x2758;">
+<!ENTITY lbbrk "&#x2772;">
+<!ENTITY rbbrk "&#x2773;">
+<!ENTITY bsolhsub "&#x27C8;">
+<!ENTITY suphsol "&#x27C9;">
+<!ENTITY lobrk "&#x27E6;">
+<!ENTITY LeftDoubleBracket "&#x27E6;">
+<!ENTITY robrk "&#x27E7;">
+<!ENTITY RightDoubleBracket "&#x27E7;">
+<!ENTITY lang "&#x27E8;">
+<!ENTITY LeftAngleBracket "&#x27E8;">
+<!ENTITY langle "&#x27E8;">
+<!ENTITY rang "&#x27E9;">
+<!ENTITY RightAngleBracket "&#x27E9;">
+<!ENTITY rangle "&#x27E9;">
+<!ENTITY Lang "&#x27EA;">
+<!ENTITY Rang "&#x27EB;">
+<!ENTITY loang "&#x27EC;">
+<!ENTITY roang "&#x27ED;">
+<!ENTITY xlarr "&#x27F5;">
+<!ENTITY longleftarrow "&#x27F5;">
+<!ENTITY LongLeftArrow "&#x27F5;">
+<!ENTITY xrarr "&#x27F6;">
+<!ENTITY longrightarrow "&#x27F6;">
+<!ENTITY LongRightArrow "&#x27F6;">
+<!ENTITY xharr "&#x27F7;">
+<!ENTITY longleftrightarrow "&#x27F7;">
+<!ENTITY LongLeftRightArrow "&#x27F7;">
+<!ENTITY xlArr "&#x27F8;">
+<!ENTITY Longleftarrow "&#x27F8;">
+<!ENTITY DoubleLongLeftArrow "&#x27F8;">
+<!ENTITY xrArr "&#x27F9;">
+<!ENTITY Longrightarrow "&#x27F9;">
+<!ENTITY DoubleLongRightArrow "&#x27F9;">
+<!ENTITY xhArr "&#x27FA;">
+<!ENTITY Longleftrightarrow "&#x27FA;">
+<!ENTITY DoubleLongLeftRightArrow "&#x27FA;">
+<!ENTITY xmap "&#x27FC;">
+<!ENTITY longmapsto "&#x27FC;">
+<!ENTITY dzigrarr "&#x27FF;">
+<!ENTITY nvlArr "&#x2902;">
+<!ENTITY nvrArr "&#x2903;">
+<!ENTITY nvHarr "&#x2904;">
+<!ENTITY Map "&#x2905;">
+<!ENTITY lbarr "&#x290C;">
+<!ENTITY rbarr "&#x290D;">
+<!ENTITY bkarow "&#x290D;">
+<!ENTITY lBarr "&#x290E;">
+<!ENTITY rBarr "&#x290F;">
+<!ENTITY dbkarow "&#x290F;">
+<!ENTITY RBarr "&#x2910;">
+<!ENTITY drbkarow "&#x2910;">
+<!ENTITY DDotrahd "&#x2911;">
+<!ENTITY UpArrowBar "&#x2912;">
+<!ENTITY DownArrowBar "&#x2913;">
+<!ENTITY Rarrtl "&#x2916;">
+<!ENTITY latail "&#x2919;">
+<!ENTITY ratail "&#x291A;">
+<!ENTITY lAtail "&#x291B;">
+<!ENTITY rAtail "&#x291C;">
+<!ENTITY larrfs "&#x291D;">
+<!ENTITY rarrfs "&#x291E;">
+<!ENTITY larrbfs "&#x291F;">
+<!ENTITY rarrbfs "&#x2920;">
+<!ENTITY nwarhk "&#x2923;">
+<!ENTITY nearhk "&#x2924;">
+<!ENTITY searhk "&#x2925;">
+<!ENTITY hksearow "&#x2925;">
+<!ENTITY swarhk "&#x2926;">
+<!ENTITY hkswarow "&#x2926;">
+<!ENTITY nwnear "&#x2927;">
+<!ENTITY nesear "&#x2928;">
+<!ENTITY toea "&#x2928;">
+<!ENTITY seswar "&#x2929;">
+<!ENTITY tosa "&#x2929;">
+<!ENTITY swnwar "&#x292A;">
+<!ENTITY rarrc "&#x2933;">
+<!ENTITY nrarrc "&#x2933;&#x338;">
+<!ENTITY cudarrr "&#x2935;">
+<!ENTITY ldca "&#x2936;">
+<!ENTITY rdca "&#x2937;">
+<!ENTITY cudarrl "&#x2938;">
+<!ENTITY larrpl "&#x2939;">
+<!ENTITY curarrm "&#x293C;">
+<!ENTITY cularrp "&#x293D;">
+<!ENTITY rarrpl "&#x2945;">
+<!ENTITY harrcir "&#x2948;">
+<!ENTITY Uarrocir "&#x2949;">
+<!ENTITY lurdshar "&#x294A;">
+<!ENTITY ldrushar "&#x294B;">
+<!ENTITY LeftRightVector "&#x294E;">
+<!ENTITY RightUpDownVector "&#x294F;">
+<!ENTITY DownLeftRightVector "&#x2950;">
+<!ENTITY LeftUpDownVector "&#x2951;">
+<!ENTITY LeftVectorBar "&#x2952;">
+<!ENTITY RightVectorBar "&#x2953;">
+<!ENTITY RightUpVectorBar "&#x2954;">
+<!ENTITY RightDownVectorBar "&#x2955;">
+<!ENTITY DownLeftVectorBar "&#x2956;">
+<!ENTITY DownRightVectorBar "&#x2957;">
+<!ENTITY LeftUpVectorBar "&#x2958;">
+<!ENTITY LeftDownVectorBar "&#x2959;">
+<!ENTITY LeftTeeVector "&#x295A;">
+<!ENTITY RightTeeVector "&#x295B;">
+<!ENTITY RightUpTeeVector "&#x295C;">
+<!ENTITY RightDownTeeVector "&#x295D;">
+<!ENTITY DownLeftTeeVector "&#x295E;">
+<!ENTITY DownRightTeeVector "&#x295F;">
+<!ENTITY LeftUpTeeVector "&#x2960;">
+<!ENTITY LeftDownTeeVector "&#x2961;">
+<!ENTITY lHar "&#x2962;">
+<!ENTITY uHar "&#x2963;">
+<!ENTITY rHar "&#x2964;">
+<!ENTITY dHar "&#x2965;">
+<!ENTITY luruhar "&#x2966;">
+<!ENTITY ldrdhar "&#x2967;">
+<!ENTITY ruluhar "&#x2968;">
+<!ENTITY rdldhar "&#x2969;">
+<!ENTITY lharul "&#x296A;">
+<!ENTITY llhard "&#x296B;">
+<!ENTITY rharul "&#x296C;">
+<!ENTITY lrhard "&#x296D;">
+<!ENTITY udhar "&#x296E;">
+<!ENTITY UpEquilibrium "&#x296E;">
+<!ENTITY duhar "&#x296F;">
+<!ENTITY ReverseUpEquilibrium "&#x296F;">
+<!ENTITY RoundImplies "&#x2970;">
+<!ENTITY erarr "&#x2971;">
+<!ENTITY simrarr "&#x2972;">
+<!ENTITY larrsim "&#x2973;">
+<!ENTITY rarrsim "&#x2974;">
+<!ENTITY rarrap "&#x2975;">
+<!ENTITY ltlarr "&#x2976;">
+<!ENTITY gtrarr "&#x2978;">
+<!ENTITY subrarr "&#x2979;">
+<!ENTITY suplarr "&#x297B;">
+<!ENTITY lfisht "&#x297C;">
+<!ENTITY rfisht "&#x297D;">
+<!ENTITY ufisht "&#x297E;">
+<!ENTITY dfisht "&#x297F;">
+<!ENTITY lopar "&#x2985;">
+<!ENTITY ropar "&#x2986;">
+<!ENTITY lbrke "&#x298B;">
+<!ENTITY rbrke "&#x298C;">
+<!ENTITY lbrkslu "&#x298D;">
+<!ENTITY rbrksld "&#x298E;">
+<!ENTITY lbrksld "&#x298F;">
+<!ENTITY rbrkslu "&#x2990;">
+<!ENTITY langd "&#x2991;">
+<!ENTITY rangd "&#x2992;">
+<!ENTITY lparlt "&#x2993;">
+<!ENTITY rpargt "&#x2994;">
+<!ENTITY gtlPar "&#x2995;">
+<!ENTITY ltrPar "&#x2996;">
+<!ENTITY vzigzag "&#x299A;">
+<!ENTITY vangrt "&#x299C;">
+<!ENTITY angrtvbd "&#x299D;">
+<!ENTITY ange "&#x29A4;">
+<!ENTITY range "&#x29A5;">
+<!ENTITY dwangle "&#x29A6;">
+<!ENTITY uwangle "&#x29A7;">
+<!ENTITY angmsdaa "&#x29A8;">
+<!ENTITY angmsdab "&#x29A9;">
+<!ENTITY angmsdac "&#x29AA;">
+<!ENTITY angmsdad "&#x29AB;">
+<!ENTITY angmsdae "&#x29AC;">
+<!ENTITY angmsdaf "&#x29AD;">
+<!ENTITY angmsdag "&#x29AE;">
+<!ENTITY angmsdah "&#x29AF;">
+<!ENTITY bemptyv "&#x29B0;">
+<!ENTITY demptyv "&#x29B1;">
+<!ENTITY cemptyv "&#x29B2;">
+<!ENTITY raemptyv "&#x29B3;">
+<!ENTITY laemptyv "&#x29B4;">
+<!ENTITY ohbar "&#x29B5;">
+<!ENTITY omid "&#x29B6;">
+<!ENTITY opar "&#x29B7;">
+<!ENTITY operp "&#x29B9;">
+<!ENTITY olcross "&#x29BB;">
+<!ENTITY odsold "&#x29BC;">
+<!ENTITY olcir "&#x29BE;">
+<!ENTITY ofcir "&#x29BF;">
+<!ENTITY olt "&#x29C0;">
+<!ENTITY ogt "&#x29C1;">
+<!ENTITY cirscir "&#x29C2;">
+<!ENTITY cirE "&#x29C3;">
+<!ENTITY solb "&#x29C4;">
+<!ENTITY bsolb "&#x29C5;">
+<!ENTITY boxbox "&#x29C9;">
+<!ENTITY trisb "&#x29CD;">
+<!ENTITY rtriltri "&#x29CE;">
+<!ENTITY LeftTriangleBar "&#x29CF;">
+<!ENTITY NotLeftTriangleBar "&#x29CF;&#x338;">
+<!ENTITY RightTriangleBar "&#x29D0;">
+<!ENTITY NotRightTriangleBar "&#x29D0;&#x338;">
+<!ENTITY iinfin "&#x29DC;">
+<!ENTITY infintie "&#x29DD;">
+<!ENTITY nvinfin "&#x29DE;">
+<!ENTITY eparsl "&#x29E3;">
+<!ENTITY smeparsl "&#x29E4;">
+<!ENTITY eqvparsl "&#x29E5;">
+<!ENTITY lozf "&#x29EB;">
+<!ENTITY blacklozenge "&#x29EB;">
+<!ENTITY RuleDelayed "&#x29F4;">
+<!ENTITY dsol "&#x29F6;">
+<!ENTITY xodot "&#x2A00;">
+<!ENTITY bigodot "&#x2A00;">
+<!ENTITY xoplus "&#x2A01;">
+<!ENTITY bigoplus "&#x2A01;">
+<!ENTITY xotime "&#x2A02;">
+<!ENTITY bigotimes "&#x2A02;">
+<!ENTITY xuplus "&#x2A04;">
+<!ENTITY biguplus "&#x2A04;">
+<!ENTITY xsqcup "&#x2A06;">
+<!ENTITY bigsqcup "&#x2A06;">
+<!ENTITY qint "&#x2A0C;">
+<!ENTITY iiiint "&#x2A0C;">
+<!ENTITY fpartint "&#x2A0D;">
+<!ENTITY cirfnint "&#x2A10;">
+<!ENTITY awint "&#x2A11;">
+<!ENTITY rppolint "&#x2A12;">
+<!ENTITY scpolint "&#x2A13;">
+<!ENTITY npolint "&#x2A14;">
+<!ENTITY pointint "&#x2A15;">
+<!ENTITY quatint "&#x2A16;">
+<!ENTITY intlarhk "&#x2A17;">
+<!ENTITY pluscir "&#x2A22;">
+<!ENTITY plusacir "&#x2A23;">
+<!ENTITY simplus "&#x2A24;">
+<!ENTITY plusdu "&#x2A25;">
+<!ENTITY plussim "&#x2A26;">
+<!ENTITY plustwo "&#x2A27;">
+<!ENTITY mcomma "&#x2A29;">
+<!ENTITY minusdu "&#x2A2A;">
+<!ENTITY loplus "&#x2A2D;">
+<!ENTITY roplus "&#x2A2E;">
+<!ENTITY Cross "&#x2A2F;">
+<!ENTITY timesd "&#x2A30;">
+<!ENTITY timesbar "&#x2A31;">
+<!ENTITY smashp "&#x2A33;">
+<!ENTITY lotimes "&#x2A34;">
+<!ENTITY rotimes "&#x2A35;">
+<!ENTITY otimesas "&#x2A36;">
+<!ENTITY Otimes "&#x2A37;">
+<!ENTITY odiv "&#x2A38;">
+<!ENTITY triplus "&#x2A39;">
+<!ENTITY triminus "&#x2A3A;">
+<!ENTITY tritime "&#x2A3B;">
+<!ENTITY iprod "&#x2A3C;">
+<!ENTITY intprod "&#x2A3C;">
+<!ENTITY amalg "&#x2A3F;">
+<!ENTITY capdot "&#x2A40;">
+<!ENTITY ncup "&#x2A42;">
+<!ENTITY ncap "&#x2A43;">
+<!ENTITY capand "&#x2A44;">
+<!ENTITY cupor "&#x2A45;">
+<!ENTITY cupcap "&#x2A46;">
+<!ENTITY capcup "&#x2A47;">
+<!ENTITY cupbrcap "&#x2A48;">
+<!ENTITY capbrcup "&#x2A49;">
+<!ENTITY cupcup "&#x2A4A;">
+<!ENTITY capcap "&#x2A4B;">
+<!ENTITY ccups "&#x2A4C;">
+<!ENTITY ccaps "&#x2A4D;">
+<!ENTITY ccupssm "&#x2A50;">
+<!ENTITY And "&#x2A53;">
+<!ENTITY Or "&#x2A54;">
+<!ENTITY andand "&#x2A55;">
+<!ENTITY oror "&#x2A56;">
+<!ENTITY orslope "&#x2A57;">
+<!ENTITY andslope "&#x2A58;">
+<!ENTITY andv "&#x2A5A;">
+<!ENTITY orv "&#x2A5B;">
+<!ENTITY andd "&#x2A5C;">
+<!ENTITY ord "&#x2A5D;">
+<!ENTITY wedbar "&#x2A5F;">
+<!ENTITY sdote "&#x2A66;">
+<!ENTITY simdot "&#x2A6A;">
+<!ENTITY congdot "&#x2A6D;">
+<!ENTITY ncongdot "&#x2A6D;&#x338;">
+<!ENTITY easter "&#x2A6E;">
+<!ENTITY apacir "&#x2A6F;">
+<!ENTITY apE "&#x2A70;">
+<!ENTITY napE "&#x2A70;&#x338;">
+<!ENTITY eplus "&#x2A71;">
+<!ENTITY pluse "&#x2A72;">
+<!ENTITY Esim "&#x2A73;">
+<!ENTITY Colone "&#x2A74;">
+<!ENTITY Equal "&#x2A75;">
+<!ENTITY eDDot "&#x2A77;">
+<!ENTITY ddotseq "&#x2A77;">
+<!ENTITY equivDD "&#x2A78;">
+<!ENTITY ltcir "&#x2A79;">
+<!ENTITY gtcir "&#x2A7A;">
+<!ENTITY ltquest "&#x2A7B;">
+<!ENTITY gtquest "&#x2A7C;">
+<!ENTITY les "&#x2A7D;">
+<!ENTITY LessSlantEqual "&#x2A7D;">
+<!ENTITY leqslant "&#x2A7D;">
+<!ENTITY nles "&#x2A7D;&#x338;">
+<!ENTITY NotLessSlantEqual "&#x2A7D;&#x338;">
+<!ENTITY nleqslant "&#x2A7D;&#x338;">
+<!ENTITY ges "&#x2A7E;">
+<!ENTITY GreaterSlantEqual "&#x2A7E;">
+<!ENTITY geqslant "&#x2A7E;">
+<!ENTITY nges "&#x2A7E;&#x338;">
+<!ENTITY NotGreaterSlantEqual "&#x2A7E;&#x338;">
+<!ENTITY ngeqslant "&#x2A7E;&#x338;">
+<!ENTITY lesdot "&#x2A7F;">
+<!ENTITY gesdot "&#x2A80;">
+<!ENTITY lesdoto "&#x2A81;">
+<!ENTITY gesdoto "&#x2A82;">
+<!ENTITY lesdotor "&#x2A83;">
+<!ENTITY gesdotol "&#x2A84;">
+<!ENTITY lap "&#x2A85;">
+<!ENTITY lessapprox "&#x2A85;">
+<!ENTITY gap "&#x2A86;">
+<!ENTITY gtrapprox "&#x2A86;">
+<!ENTITY lne "&#x2A87;">
+<!ENTITY lneq "&#x2A87;">
+<!ENTITY gne "&#x2A88;">
+<!ENTITY gneq "&#x2A88;">
+<!ENTITY lnap "&#x2A89;">
+<!ENTITY lnapprox "&#x2A89;">
+<!ENTITY gnap "&#x2A8A;">
+<!ENTITY gnapprox "&#x2A8A;">
+<!ENTITY lEg "&#x2A8B;">
+<!ENTITY lesseqqgtr "&#x2A8B;">
+<!ENTITY gEl "&#x2A8C;">
+<!ENTITY gtreqqless "&#x2A8C;">
+<!ENTITY lsime "&#x2A8D;">
+<!ENTITY gsime "&#x2A8E;">
+<!ENTITY lsimg "&#x2A8F;">
+<!ENTITY gsiml "&#x2A90;">
+<!ENTITY lgE "&#x2A91;">
+<!ENTITY glE "&#x2A92;">
+<!ENTITY lesges "&#x2A93;">
+<!ENTITY gesles "&#x2A94;">
+<!ENTITY els "&#x2A95;">
+<!ENTITY eqslantless "&#x2A95;">
+<!ENTITY egs "&#x2A96;">
+<!ENTITY eqslantgtr "&#x2A96;">
+<!ENTITY elsdot "&#x2A97;">
+<!ENTITY egsdot "&#x2A98;">
+<!ENTITY el "&#x2A99;">
+<!ENTITY eg "&#x2A9A;">
+<!ENTITY siml "&#x2A9D;">
+<!ENTITY simg "&#x2A9E;">
+<!ENTITY simlE "&#x2A9F;">
+<!ENTITY simgE "&#x2AA0;">
+<!ENTITY LessLess "&#x2AA1;">
+<!ENTITY NotNestedLessLess "&#x2AA1;&#x338;">
+<!ENTITY GreaterGreater "&#x2AA2;">
+<!ENTITY NotNestedGreaterGreater "&#x2AA2;&#x338;">
+<!ENTITY glj "&#x2AA4;">
+<!ENTITY gla "&#x2AA5;">
+<!ENTITY ltcc "&#x2AA6;">
+<!ENTITY gtcc "&#x2AA7;">
+<!ENTITY lescc "&#x2AA8;">
+<!ENTITY gescc "&#x2AA9;">
+<!ENTITY smt "&#x2AAA;">
+<!ENTITY lat "&#x2AAB;">
+<!ENTITY smte "&#x2AAC;">
+<!ENTITY smtes "&#x2AAC;&#xFE00;">
+<!ENTITY late "&#x2AAD;">
+<!ENTITY lates "&#x2AAD;&#xFE00;">
+<!ENTITY bumpE "&#x2AAE;">
+<!ENTITY pre "&#x2AAF;">
+<!ENTITY preceq "&#x2AAF;">
+<!ENTITY PrecedesEqual "&#x2AAF;">
+<!ENTITY npre "&#x2AAF;&#x338;">
+<!ENTITY npreceq "&#x2AAF;&#x338;">
+<!ENTITY NotPrecedesEqual "&#x2AAF;&#x338;">
+<!ENTITY sce "&#x2AB0;">
+<!ENTITY succeq "&#x2AB0;">
+<!ENTITY SucceedsEqual "&#x2AB0;">
+<!ENTITY nsce "&#x2AB0;&#x338;">
+<!ENTITY nsucceq "&#x2AB0;&#x338;">
+<!ENTITY NotSucceedsEqual "&#x2AB0;&#x338;">
+<!ENTITY prE "&#x2AB3;">
+<!ENTITY scE "&#x2AB4;">
+<!ENTITY prnE "&#x2AB5;">
+<!ENTITY precneqq "&#x2AB5;">
+<!ENTITY scnE "&#x2AB6;">
+<!ENTITY succneqq "&#x2AB6;">
+<!ENTITY prap "&#x2AB7;">
+<!ENTITY precapprox "&#x2AB7;">
+<!ENTITY scap "&#x2AB8;">
+<!ENTITY succapprox "&#x2AB8;">
+<!ENTITY prnap "&#x2AB9;">
+<!ENTITY precnapprox "&#x2AB9;">
+<!ENTITY scnap "&#x2ABA;">
+<!ENTITY succnapprox "&#x2ABA;">
+<!ENTITY Pr "&#x2ABB;">
+<!ENTITY Sc "&#x2ABC;">
+<!ENTITY subdot "&#x2ABD;">
+<!ENTITY supdot "&#x2ABE;">
+<!ENTITY subplus "&#x2ABF;">
+<!ENTITY supplus "&#x2AC0;">
+<!ENTITY submult "&#x2AC1;">
+<!ENTITY supmult "&#x2AC2;">
+<!ENTITY subedot "&#x2AC3;">
+<!ENTITY supedot "&#x2AC4;">
+<!ENTITY subE "&#x2AC5;">
+<!ENTITY subseteqq "&#x2AC5;">
+<!ENTITY nsubE "&#x2AC5;&#x338;">
+<!ENTITY nsubseteqq "&#x2AC5;&#x338;">
+<!ENTITY supE "&#x2AC6;">
+<!ENTITY supseteqq "&#x2AC6;">
+<!ENTITY nsupE "&#x2AC6;&#x338;">
+<!ENTITY nsupseteqq "&#x2AC6;&#x338;">
+<!ENTITY subsim "&#x2AC7;">
+<!ENTITY supsim "&#x2AC8;">
+<!ENTITY subnE "&#x2ACB;">
+<!ENTITY subsetneqq "&#x2ACB;">
+<!ENTITY vsubnE "&#x2ACB;&#xFE00;">
+<!ENTITY varsubsetneqq "&#x2ACB;&#xFE00;">
+<!ENTITY supnE "&#x2ACC;">
+<!ENTITY supsetneqq "&#x2ACC;">
+<!ENTITY vsupnE "&#x2ACC;&#xFE00;">
+<!ENTITY varsupsetneqq "&#x2ACC;&#xFE00;">
+<!ENTITY csub "&#x2ACF;">
+<!ENTITY csup "&#x2AD0;">
+<!ENTITY csube "&#x2AD1;">
+<!ENTITY csupe "&#x2AD2;">
+<!ENTITY subsup "&#x2AD3;">
+<!ENTITY supsub "&#x2AD4;">
+<!ENTITY subsub "&#x2AD5;">
+<!ENTITY supsup "&#x2AD6;">
+<!ENTITY suphsub "&#x2AD7;">
+<!ENTITY supdsub "&#x2AD8;">
+<!ENTITY forkv "&#x2AD9;">
+<!ENTITY topfork "&#x2ADA;">
+<!ENTITY mlcp "&#x2ADB;">
+<!ENTITY Dashv "&#x2AE4;">
+<!ENTITY DoubleLeftTee "&#x2AE4;">
+<!ENTITY Vdashl "&#x2AE6;">
+<!ENTITY Barv "&#x2AE7;">
+<!ENTITY vBar "&#x2AE8;">
+<!ENTITY vBarv "&#x2AE9;">
+<!ENTITY Vbar "&#x2AEB;">
+<!ENTITY Not "&#x2AEC;">
+<!ENTITY bNot "&#x2AED;">
+<!ENTITY rnmid "&#x2AEE;">
+<!ENTITY cirmid "&#x2AEF;">
+<!ENTITY midcir "&#x2AF0;">
+<!ENTITY topcir "&#x2AF1;">
+<!ENTITY nhpar "&#x2AF2;">
+<!ENTITY parsim "&#x2AF3;">
+<!ENTITY parsl "&#x2AFD;">
+<!ENTITY nparsl "&#x2AFD;&#x20E5;">
+<!ENTITY fflig "&#xFB00;">
+<!ENTITY filig "&#xFB01;">
+<!ENTITY fllig "&#xFB02;">
+<!ENTITY ffilig "&#xFB03;">
+<!ENTITY ffllig "&#xFB04;">
+<!ENTITY Ascr "&#x1D49C;">
+<!ENTITY Cscr "&#x1D49E;">
+<!ENTITY Dscr "&#x1D49F;">
+<!ENTITY Gscr "&#x1D4A2;">
+<!ENTITY Jscr "&#x1D4A5;">
+<!ENTITY Kscr "&#x1D4A6;">
+<!ENTITY Nscr "&#x1D4A9;">
+<!ENTITY Oscr "&#x1D4AA;">
+<!ENTITY Pscr "&#x1D4AB;">
+<!ENTITY Qscr "&#x1D4AC;">
+<!ENTITY Sscr "&#x1D4AE;">
+<!ENTITY Tscr "&#x1D4AF;">
+<!ENTITY Uscr "&#x1D4B0;">
+<!ENTITY Vscr "&#x1D4B1;">
+<!ENTITY Wscr "&#x1D4B2;">
+<!ENTITY Xscr "&#x1D4B3;">
+<!ENTITY Yscr "&#x1D4B4;">
+<!ENTITY Zscr "&#x1D4B5;">
+<!ENTITY ascr "&#x1D4B6;">
+<!ENTITY bscr "&#x1D4B7;">
+<!ENTITY cscr "&#x1D4B8;">
+<!ENTITY dscr "&#x1D4B9;">
+<!ENTITY fscr "&#x1D4BB;">
+<!ENTITY hscr "&#x1D4BD;">
+<!ENTITY iscr "&#x1D4BE;">
+<!ENTITY jscr "&#x1D4BF;">
+<!ENTITY kscr "&#x1D4C0;">
+<!ENTITY lscr "&#x1D4C1;">
+<!ENTITY mscr "&#x1D4C2;">
+<!ENTITY nscr "&#x1D4C3;">
+<!ENTITY pscr "&#x1D4C5;">
+<!ENTITY qscr "&#x1D4C6;">
+<!ENTITY rscr "&#x1D4C7;">
+<!ENTITY sscr "&#x1D4C8;">
+<!ENTITY tscr "&#x1D4C9;">
+<!ENTITY uscr "&#x1D4CA;">
+<!ENTITY vscr "&#x1D4CB;">
+<!ENTITY wscr "&#x1D4CC;">
+<!ENTITY xscr "&#x1D4CD;">
+<!ENTITY yscr "&#x1D4CE;">
+<!ENTITY zscr "&#x1D4CF;">
+<!ENTITY Afr "&#x1D504;">
+<!ENTITY Bfr "&#x1D505;">
+<!ENTITY Dfr "&#x1D507;">
+<!ENTITY Efr "&#x1D508;">
+<!ENTITY Ffr "&#x1D509;">
+<!ENTITY Gfr "&#x1D50A;">
+<!ENTITY Jfr "&#x1D50D;">
+<!ENTITY Kfr "&#x1D50E;">
+<!ENTITY Lfr "&#x1D50F;">
+<!ENTITY Mfr "&#x1D510;">
+<!ENTITY Nfr "&#x1D511;">
+<!ENTITY Ofr "&#x1D512;">
+<!ENTITY Pfr "&#x1D513;">
+<!ENTITY Qfr "&#x1D514;">
+<!ENTITY Sfr "&#x1D516;">
+<!ENTITY Tfr "&#x1D517;">
+<!ENTITY Ufr "&#x1D518;">
+<!ENTITY Vfr "&#x1D519;">
+<!ENTITY Wfr "&#x1D51A;">
+<!ENTITY Xfr "&#x1D51B;">
+<!ENTITY Yfr "&#x1D51C;">
+<!ENTITY afr "&#x1D51E;">
+<!ENTITY bfr "&#x1D51F;">
+<!ENTITY cfr "&#x1D520;">
+<!ENTITY dfr "&#x1D521;">
+<!ENTITY efr "&#x1D522;">
+<!ENTITY ffr "&#x1D523;">
+<!ENTITY gfr "&#x1D524;">
+<!ENTITY hfr "&#x1D525;">
+<!ENTITY ifr "&#x1D526;">
+<!ENTITY jfr "&#x1D527;">
+<!ENTITY kfr "&#x1D528;">
+<!ENTITY lfr "&#x1D529;">
+<!ENTITY mfr "&#x1D52A;">
+<!ENTITY nfr "&#x1D52B;">
+<!ENTITY ofr "&#x1D52C;">
+<!ENTITY pfr "&#x1D52D;">
+<!ENTITY qfr "&#x1D52E;">
+<!ENTITY rfr "&#x1D52F;">
+<!ENTITY sfr "&#x1D530;">
+<!ENTITY tfr "&#x1D531;">
+<!ENTITY ufr "&#x1D532;">
+<!ENTITY vfr "&#x1D533;">
+<!ENTITY wfr "&#x1D534;">
+<!ENTITY xfr "&#x1D535;">
+<!ENTITY yfr "&#x1D536;">
+<!ENTITY zfr "&#x1D537;">
+<!ENTITY Aopf "&#x1D538;">
+<!ENTITY Bopf "&#x1D539;">
+<!ENTITY Dopf "&#x1D53B;">
+<!ENTITY Eopf "&#x1D53C;">
+<!ENTITY Fopf "&#x1D53D;">
+<!ENTITY Gopf "&#x1D53E;">
+<!ENTITY Iopf "&#x1D540;">
+<!ENTITY Jopf "&#x1D541;">
+<!ENTITY Kopf "&#x1D542;">
+<!ENTITY Lopf "&#x1D543;">
+<!ENTITY Mopf "&#x1D544;">
+<!ENTITY Oopf "&#x1D546;">
+<!ENTITY Sopf "&#x1D54A;">
+<!ENTITY Topf "&#x1D54B;">
+<!ENTITY Uopf "&#x1D54C;">
+<!ENTITY Vopf "&#x1D54D;">
+<!ENTITY Wopf "&#x1D54E;">
+<!ENTITY Xopf "&#x1D54F;">
+<!ENTITY Yopf "&#x1D550;">
+<!ENTITY aopf "&#x1D552;">
+<!ENTITY bopf "&#x1D553;">
+<!ENTITY copf "&#x1D554;">
+<!ENTITY dopf "&#x1D555;">
+<!ENTITY eopf "&#x1D556;">
+<!ENTITY fopf "&#x1D557;">
+<!ENTITY gopf "&#x1D558;">
+<!ENTITY hopf "&#x1D559;">
+<!ENTITY iopf "&#x1D55A;">
+<!ENTITY jopf "&#x1D55B;">
+<!ENTITY kopf "&#x1D55C;">
+<!ENTITY lopf "&#x1D55D;">
+<!ENTITY mopf "&#x1D55E;">
+<!ENTITY nopf "&#x1D55F;">
+<!ENTITY oopf "&#x1D560;">
+<!ENTITY popf "&#x1D561;">
+<!ENTITY qopf "&#x1D562;">
+<!ENTITY ropf "&#x1D563;">
+<!ENTITY sopf "&#x1D564;">
+<!ENTITY topf "&#x1D565;">
+<!ENTITY uopf "&#x1D566;">
+<!ENTITY vopf "&#x1D567;">
+<!ENTITY wopf "&#x1D568;">
+<!ENTITY xopf "&#x1D569;">
+<!ENTITY yopf "&#x1D56A;">
+<!ENTITY zopf "&#x1D56B;">
diff --git a/testing/web-platform/tests/tools/manifest/commands.json b/testing/web-platform/tests/tools/manifest/commands.json
new file mode 100644
index 0000000000..cef6d22473
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/commands.json
@@ -0,0 +1,30 @@
+{
+ "manifest": {
+ "path": "update.py",
+ "script": "run",
+ "parser": "create_parser",
+ "help": "Update the MANIFEST.json file",
+ "virtualenv": false
+ },
+ "manifest-download": {
+ "path": "download.py",
+ "script": "run",
+ "parser": "create_parser",
+ "help": "Download recent pregenerated MANIFEST.json file",
+ "virtualenv": false
+ },
+ "test-paths": {
+ "path": "testpaths.py",
+ "script": "run",
+ "parser": "create_parser",
+ "help": "Print test paths given a set of test ids",
+ "virtualenv": false
+ },
+ "spec": {
+ "path": "spec.py",
+ "script": "run",
+ "parser": "create_parser",
+ "help": "Update the SPEC_MANIFEST.json file",
+ "virtualenv": false
+ }
+}
diff --git a/testing/web-platform/tests/tools/manifest/download.py b/testing/web-platform/tests/tools/manifest/download.py
new file mode 100644
index 0000000000..8527fb232a
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/download.py
@@ -0,0 +1,191 @@
+import argparse
+import bz2
+import gzip
+import json
+import io
+import os
+from datetime import datetime, timedelta
+from typing import Any, Callable, List, Optional, Text
+from urllib.request import urlopen
+
+try:
+ import zstandard
+except ImportError:
+ zstandard = None
+
+from .utils import git
+
+from . import log
+
+
+here = os.path.dirname(__file__)
+
+wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
+logger = log.get_logger()
+
+
+def abs_path(path: Text) -> Text:
+ return os.path.abspath(os.path.expanduser(path))
+
+
+def should_download(manifest_path: Text, rebuild_time: timedelta = timedelta(days=5)) -> bool:
+ if not os.path.exists(manifest_path):
+ return True
+ mtime = datetime.fromtimestamp(os.path.getmtime(manifest_path))
+ if mtime < datetime.now() - rebuild_time:
+ return True
+ logger.info("Skipping manifest download because existing file is recent")
+ return False
+
+
+def merge_pr_tags(repo_root: Text, max_count: int = 50) -> List[Text]:
+ gitfunc = git(repo_root)
+ tags: List[Text] = []
+ if gitfunc is None:
+ return tags
+ for line in gitfunc("log", "--format=%D", "--max-count=%s" % max_count).split("\n"):
+ for ref in line.split(", "):
+ if ref.startswith("tag: merge_pr_"):
+ tags.append(ref[5:])
+ return tags
+
+
+def score_name(name: Text) -> Optional[int]:
+ """Score how much we like each filename, lower wins, None rejects"""
+
+ # Accept both ways of naming the manifest asset, even though
+ # there's no longer a reason to include the commit sha.
+ if name.startswith("MANIFEST-") or name.startswith("MANIFEST."):
+ if zstandard and name.endswith("json.zst"):
+ return 1
+ if name.endswith(".json.bz2"):
+ return 2
+ if name.endswith(".json.gz"):
+ return 3
+ return None
+
+
+def github_url(tags: List[Text]) -> Optional[List[Text]]:
+ for tag in tags:
+ url = "https://api.github.com/repos/web-platform-tests/wpt/releases/tags/%s" % tag
+ try:
+ resp = urlopen(url)
+ except Exception:
+ logger.warning("Fetching %s failed" % url)
+ continue
+
+ if resp.code != 200:
+ logger.warning("Fetching %s failed; got HTTP status %d" % (url, resp.code))
+ continue
+
+ try:
+ release = json.load(resp.fp)
+ except ValueError:
+ logger.warning("Response was not valid JSON")
+ return None
+
+ candidates = []
+ for item in release["assets"]:
+ score = score_name(item["name"])
+ if score is not None:
+ candidates.append((score, item["browser_download_url"]))
+
+ return [item[1] for item in sorted(candidates)]
+
+ return None
+
+
+def download_manifest(
+ manifest_path: Text,
+ tags_func: Callable[[], List[Text]],
+ url_func: Callable[[List[Text]], Optional[List[Text]]],
+ force: bool = False
+) -> bool:
+ if not force and not should_download(manifest_path):
+ return False
+
+ tags = tags_func()
+
+ urls = url_func(tags)
+ if not urls:
+ logger.warning("No generated manifest found")
+ return False
+
+ for url in urls:
+ logger.info("Downloading manifest from %s" % url)
+ try:
+ resp = urlopen(url)
+ except Exception:
+ logger.warning("Downloading pregenerated manifest failed")
+ continue
+
+ if resp.code != 200:
+ logger.warning("Downloading pregenerated manifest failed; got HTTP status %d" %
+ resp.code)
+ continue
+
+ if url.endswith(".zst"):
+ if not zstandard:
+ continue
+ try:
+ dctx = zstandard.ZstdDecompressor()
+ decompressed = dctx.decompress(resp.read())
+ except OSError:
+ logger.warning("Failed to decompress downloaded file")
+ continue
+ elif url.endswith(".bz2"):
+ try:
+ decompressed = bz2.decompress(resp.read())
+ except OSError:
+ logger.warning("Failed to decompress downloaded file")
+ continue
+ elif url.endswith(".gz"):
+ fileobj = io.BytesIO(resp.read())
+ try:
+ with gzip.GzipFile(fileobj=fileobj) as gzf:
+ data = gzf.read()
+ decompressed = data
+ except OSError:
+ logger.warning("Failed to decompress downloaded file")
+ continue
+ else:
+ logger.warning("Unknown file extension: %s" % url)
+ continue
+ break
+ else:
+ return False
+
+ try:
+ with open(manifest_path, "wb") as f:
+ f.write(decompressed)
+ except Exception:
+ logger.warning("Failed to write manifest")
+ return False
+ logger.info("Manifest downloaded")
+ return True
+
+
+def create_parser() -> argparse.ArgumentParser:
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "-p", "--path", type=abs_path, help="Path to manifest file.")
+ parser.add_argument(
+ "--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.")
+ parser.add_argument(
+ "--force", action="store_true",
+ help="Always download, even if the existing manifest is recent")
+ return parser
+
+
+def download_from_github(path: Text, tests_root: Text, force: bool = False) -> bool:
+ return download_manifest(path, lambda: merge_pr_tags(tests_root), github_url,
+ force=force)
+
+
+def run(**kwargs: Any) -> int:
+ if kwargs["path"] is None:
+ path = os.path.join(kwargs["tests_root"], "MANIFEST.json")
+ else:
+ path = kwargs["path"]
+ success = download_from_github(path, kwargs["tests_root"], kwargs["force"])
+ return 0 if success else 1
diff --git a/testing/web-platform/tests/tools/manifest/item.py b/testing/web-platform/tests/tools/manifest/item.py
new file mode 100644
index 0000000000..86f7bd6020
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/item.py
@@ -0,0 +1,376 @@
+import os.path
+from abc import ABCMeta, abstractproperty
+from inspect import isabstract
+from typing import (Any, Dict, Hashable, List, Optional, Sequence, Text, Tuple, Type,
+ TYPE_CHECKING, Union, cast)
+from urllib.parse import urljoin, urlparse, parse_qs
+
+from .utils import to_os_path
+
+if TYPE_CHECKING:
+ from .manifest import Manifest
+
+Fuzzy = Dict[Optional[Tuple[str, str, str]], List[int]]
+PageRanges = Dict[str, List[int]]
+item_types: Dict[str, Type["ManifestItem"]] = {}
+
+
+class ManifestItemMeta(ABCMeta):
+ """Custom metaclass that registers all the subclasses in the
+ item_types dictionary according to the value of their item_type
+ attribute, and otherwise behaves like an ABCMeta."""
+
+ def __new__(cls: Type["ManifestItemMeta"], name: str, bases: Tuple[type], attrs: Dict[str, Any]) -> "ManifestItemMeta":
+ inst = super().__new__(cls, name, bases, attrs)
+ if isabstract(inst):
+ return inst
+
+ assert issubclass(inst, ManifestItem)
+ item_type = cast(str, inst.item_type)
+
+ item_types[item_type] = inst
+
+ return inst
+
+
+class ManifestItem(metaclass=ManifestItemMeta):
+ __slots__ = ("_tests_root", "path")
+
+ def __init__(self, tests_root: Text, path: Text) -> None:
+ self._tests_root = tests_root
+ self.path = path
+
+ @abstractproperty
+ def id(self) -> Text:
+ """The test's id (usually its url)"""
+ pass
+
+ @abstractproperty
+ def item_type(self) -> str:
+ """The item's type"""
+ pass
+
+ @property
+ def path_parts(self) -> Tuple[Text, ...]:
+ return tuple(self.path.split(os.path.sep))
+
+ def key(self) -> Hashable:
+ """A unique identifier for the test"""
+ return (self.item_type, self.id)
+
+ def __eq__(self, other: Any) -> bool:
+ if not hasattr(other, "key"):
+ return False
+ return bool(self.key() == other.key())
+
+ def __hash__(self) -> int:
+ return hash(self.key())
+
+ def __repr__(self) -> str:
+ return f"<{self.__module__}.{self.__class__.__name__} id={self.id!r}, path={self.path!r}>"
+
+ def to_json(self) -> Tuple[Any, ...]:
+ return ()
+
+ @classmethod
+ def from_json(cls,
+ manifest: "Manifest",
+ path: Text,
+ obj: Any
+ ) -> "ManifestItem":
+ path = to_os_path(path)
+ tests_root = manifest.tests_root
+ assert tests_root is not None
+ return cls(tests_root, path)
+
+
+class URLManifestItem(ManifestItem):
+ __slots__ = ("url_base", "_url", "_extras", "_flags")
+
+ def __init__(self,
+ tests_root: Text,
+ path: Text,
+ url_base: Text,
+ url: Optional[Text],
+ **extras: Any
+ ) -> None:
+ super().__init__(tests_root, path)
+ assert url_base[0] == "/"
+ self.url_base = url_base
+ assert url is None or url[0] != "/"
+ self._url = url
+ self._extras = extras
+ parsed_url = urlparse(self.url)
+ self._flags = (set(parsed_url.path.rsplit("/", 1)[1].split(".")[1:-1]) |
+ set(parse_qs(parsed_url.query).get("wpt_flags", [])))
+
+ @property
+ def id(self) -> Text:
+ return self.url
+
+ @property
+ def url(self) -> Text:
+ rel_url = self._url or self.path.replace(os.path.sep, "/")
+ # we can outperform urljoin, because we know we just have path relative URLs
+ if self.url_base == "/":
+ return "/" + rel_url
+ return urljoin(self.url_base, rel_url)
+
+ @property
+ def https(self) -> bool:
+ return "https" in self._flags or "serviceworker" in self._flags or "serviceworker-module" in self._flags
+
+ @property
+ def h2(self) -> bool:
+ return "h2" in self._flags
+
+ @property
+ def subdomain(self) -> bool:
+ # Note: this is currently hard-coded to check for `www`, rather than
+ # all possible valid subdomains. It can be extended if needed.
+ return "www" in self._flags
+
+ def to_json(self) -> Tuple[Optional[Text], Dict[Any, Any]]:
+ rel_url = None if self._url == self.path.replace(os.path.sep, "/") else self._url
+ rv: Tuple[Optional[Text], Dict[Any, Any]] = (rel_url, {})
+ return rv
+
+ @classmethod
+ def from_json(cls,
+ manifest: "Manifest",
+ path: Text,
+ obj: Tuple[Text, Dict[Any, Any]]
+ ) -> "URLManifestItem":
+ path = to_os_path(path)
+ url, extras = obj
+ tests_root = manifest.tests_root
+ assert tests_root is not None
+ return cls(tests_root,
+ path,
+ manifest.url_base,
+ url,
+ **extras)
+
+
+class TestharnessTest(URLManifestItem):
+ __slots__ = ()
+
+ item_type = "testharness"
+
+ @property
+ def timeout(self) -> Optional[Text]:
+ return self._extras.get("timeout")
+
+ @property
+ def pac(self) -> Optional[Text]:
+ return self._extras.get("pac")
+
+ @property
+ def testdriver(self) -> Optional[Text]:
+ return self._extras.get("testdriver")
+
+ @property
+ def jsshell(self) -> Optional[Text]:
+ return self._extras.get("jsshell")
+
+ @property
+ def script_metadata(self) -> Optional[List[Tuple[Text, Text]]]:
+ return self._extras.get("script_metadata")
+
+ def to_json(self) -> Tuple[Optional[Text], Dict[Text, Any]]:
+ rv = super().to_json()
+ if self.timeout is not None:
+ rv[-1]["timeout"] = self.timeout
+ if self.pac is not None:
+ rv[-1]["pac"] = self.pac
+ if self.testdriver:
+ rv[-1]["testdriver"] = self.testdriver
+ if self.jsshell:
+ rv[-1]["jsshell"] = True
+ if self.script_metadata:
+ rv[-1]["script_metadata"] = [(k, v) for (k,v) in self.script_metadata]
+ return rv
+
+
+class RefTest(URLManifestItem):
+ __slots__ = ("references",)
+
+ item_type = "reftest"
+
+ def __init__(self,
+ tests_root: Text,
+ path: Text,
+ url_base: Text,
+ url: Optional[Text],
+ references: Optional[List[Tuple[Text, Text]]] = None,
+ **extras: Any
+ ):
+ super().__init__(tests_root, path, url_base, url, **extras)
+ if references is None:
+ self.references: List[Tuple[Text, Text]] = []
+ else:
+ self.references = references
+
+ @property
+ def timeout(self) -> Optional[Text]:
+ return self._extras.get("timeout")
+
+ @property
+ def viewport_size(self) -> Optional[Text]:
+ return self._extras.get("viewport_size")
+
+ @property
+ def dpi(self) -> Optional[Text]:
+ return self._extras.get("dpi")
+
+ @property
+ def fuzzy(self) -> Fuzzy:
+ fuzzy: Union[Fuzzy, List[Tuple[Optional[Sequence[Text]], List[int]]]] = self._extras.get("fuzzy", {})
+ if not isinstance(fuzzy, list):
+ return fuzzy
+
+ rv: Fuzzy = {}
+ for k, v in fuzzy: # type: Tuple[Optional[Sequence[Text]], List[int]]
+ if k is None:
+ key: Optional[Tuple[Text, Text, Text]] = None
+ else:
+ # mypy types this as Tuple[Text, ...]
+ assert len(k) == 3
+ key = tuple(k) # type: ignore
+ rv[key] = v
+ return rv
+
+ def to_json(self) -> Tuple[Optional[Text], List[Tuple[Text, Text]], Dict[Text, Any]]: # type: ignore
+ rel_url = None if self._url == self.path else self._url
+ rv: Tuple[Optional[Text], List[Tuple[Text, Text]], Dict[Text, Any]] = (rel_url, self.references, {})
+ extras = rv[-1]
+ if self.timeout is not None:
+ extras["timeout"] = self.timeout
+ if self.viewport_size is not None:
+ extras["viewport_size"] = self.viewport_size
+ if self.dpi is not None:
+ extras["dpi"] = self.dpi
+ if self.fuzzy:
+ extras["fuzzy"] = list(self.fuzzy.items())
+ return rv
+
+ @classmethod
+ def from_json(cls, # type: ignore
+ manifest: "Manifest",
+ path: Text,
+ obj: Tuple[Text, List[Tuple[Text, Text]], Dict[Any, Any]]
+ ) -> "RefTest":
+ tests_root = manifest.tests_root
+ assert tests_root is not None
+ path = to_os_path(path)
+ url, references, extras = obj
+ return cls(tests_root,
+ path,
+ manifest.url_base,
+ url,
+ references,
+ **extras)
+
+
+class PrintRefTest(RefTest):
+ __slots__ = ("references",)
+
+ item_type = "print-reftest"
+
+ @property
+ def page_ranges(self) -> PageRanges:
+ return self._extras.get("page_ranges", {})
+
+ def to_json(self): # type: ignore
+ rv = super().to_json()
+ if self.page_ranges:
+ rv[-1]["page_ranges"] = self.page_ranges
+ return rv
+
+
+class ManualTest(URLManifestItem):
+ __slots__ = ()
+
+ item_type = "manual"
+
+
+class ConformanceCheckerTest(URLManifestItem):
+ __slots__ = ()
+
+ item_type = "conformancechecker"
+
+
+class VisualTest(URLManifestItem):
+ __slots__ = ()
+
+ item_type = "visual"
+
+
+class CrashTest(URLManifestItem):
+ __slots__ = ()
+
+ item_type = "crashtest"
+
+ @property
+ def timeout(self) -> Optional[Text]:
+ return None
+
+
+class WebDriverSpecTest(URLManifestItem):
+ __slots__ = ()
+
+ item_type = "wdspec"
+
+ @property
+ def timeout(self) -> Optional[Text]:
+ return self._extras.get("timeout")
+
+ def to_json(self) -> Tuple[Optional[Text], Dict[Text, Any]]:
+ rv = super().to_json()
+ if self.timeout is not None:
+ rv[-1]["timeout"] = self.timeout
+ return rv
+
+
+class SupportFile(ManifestItem):
+ __slots__ = ()
+
+ item_type = "support"
+
+ @property
+ def id(self) -> Text:
+ return self.path
+
+
+class SpecItem(ManifestItem):
+ __slots__ = ("specs")
+
+ item_type = "spec"
+
+ def __init__(self,
+ tests_root: Text,
+ path: Text,
+ specs: List[Text]
+ ) -> None:
+ super().__init__(tests_root, path)
+ self.specs = specs
+
+ @property
+ def id(self) -> Text:
+ return self.path
+
+ def to_json(self) -> Tuple[Optional[Text], Dict[Text, Any]]:
+ rv: Tuple[Optional[Text], Dict[Any, Any]] = (None, {})
+ for i in range(len(self.specs)):
+ spec_key = f"spec_link{i+1}"
+ rv[-1][spec_key] = self.specs[i]
+ return rv
+
+ @classmethod
+ def from_json(cls,
+ manifest: "Manifest",
+ path: Text,
+ obj: Any
+ ) -> "ManifestItem":
+ """Not properly implemented and is not used."""
+ return cls("/", "", [])
diff --git a/testing/web-platform/tests/tools/manifest/jsonlib.py b/testing/web-platform/tests/tools/manifest/jsonlib.py
new file mode 100644
index 0000000000..0f70cf1e17
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/jsonlib.py
@@ -0,0 +1,125 @@
+import re
+import json
+from typing import Any, AnyStr, Callable, Dict, IO, Text
+
+
+__all__ = ["load", "dump_local", "dump_local", "dump_dist", "dumps_dist"]
+
+
+try:
+ import ujson
+except ImportError:
+ has_ujson = False
+else:
+ has_ujson = True
+
+#
+# load
+#
+
+if has_ujson:
+ load: Callable[[IO[AnyStr]], Any] = ujson.load
+
+else:
+ load = json.load
+
+
+#
+# loads
+#
+
+if has_ujson:
+ loads: Callable[[AnyStr], Any] = ujson.loads
+
+else:
+ loads = json.loads
+
+
+#
+# dump/dumps_local options for some libraries
+#
+_ujson_dump_local_kwargs: Dict[str, Any] = {
+ 'ensure_ascii': False,
+ 'escape_forward_slashes': False,
+ 'indent': 1,
+ 'reject_bytes': True,
+}
+
+
+_json_dump_local_kwargs: Dict[str, Any] = {
+ 'ensure_ascii': False,
+ 'indent': 1,
+ 'separators': (',', ': '),
+}
+
+
+#
+# dump_local (for local, non-distributed usage of JSON)
+#
+
+if has_ujson:
+ def dump_local(obj: Any, fp: IO[str]) -> None:
+ return ujson.dump(obj, fp, **_ujson_dump_local_kwargs)
+
+else:
+ def dump_local(obj: Any, fp: IO[str]) -> None:
+ return json.dump(obj, fp, **_json_dump_local_kwargs)
+
+
+#
+# dumps_local (for local, non-distributed usage of JSON)
+#
+
+if has_ujson:
+ def dumps_local(obj: Any) -> Text:
+ return ujson.dumps(obj, **_ujson_dump_local_kwargs)
+
+else:
+ def dumps_local(obj: Any) -> Text:
+ return json.dumps(obj, **_json_dump_local_kwargs)
+
+
+#
+# dump/dumps_dist (for distributed usage of JSON where files should safely roundtrip)
+#
+
+_ujson_dump_dist_kwargs: Dict[str, Any] = {
+ 'sort_keys': True,
+ 'indent': 1,
+ 'reject_bytes': True,
+ 'escape_forward_slashes': False,
+}
+
+
+_json_dump_dist_kwargs: Dict[str, Any] = {
+ 'sort_keys': True,
+ 'indent': 1,
+ 'separators': (',', ': '),
+}
+
+
+if has_ujson:
+ if ujson.dumps([], indent=1) == "[]":
+ # optimistically see if https://github.com/ultrajson/ultrajson/issues/429 is fixed
+ def _ujson_fixup(s: str) -> str:
+ return s
+ else:
+ _ujson_fixup_re = re.compile(r"([\[{])[\n\x20]+([}\]])")
+
+ def _ujson_fixup(s: str) -> str:
+ return _ujson_fixup_re.sub(
+ lambda m: m.group(1) + m.group(2),
+ s
+ )
+
+ def dump_dist(obj: Any, fp: IO[str]) -> None:
+ fp.write(_ujson_fixup(ujson.dumps(obj, **_ujson_dump_dist_kwargs)))
+
+ def dumps_dist(obj: Any) -> Text:
+ return _ujson_fixup(ujson.dumps(obj, **_ujson_dump_dist_kwargs))
+else:
+ def dump_dist(obj: Any, fp: IO[str]) -> None:
+ json.dump(obj, fp, **_json_dump_dist_kwargs)
+
+ def dumps_dist(obj: Any) -> Text:
+ return json.dumps(obj, **_json_dump_dist_kwargs)
diff --git a/testing/web-platform/tests/tools/manifest/log.py b/testing/web-platform/tests/tools/manifest/log.py
new file mode 100644
index 0000000000..7881381733
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/log.py
@@ -0,0 +1,9 @@
+import logging
+
+logger = logging.getLogger("manifest")
+
+def enable_debug_logging() -> None:
+ logger.setLevel(logging.DEBUG)
+
+def get_logger() -> logging.Logger:
+ return logger
diff --git a/testing/web-platform/tests/tools/manifest/manifest.py b/testing/web-platform/tests/tools/manifest/manifest.py
new file mode 100644
index 0000000000..959978f528
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/manifest.py
@@ -0,0 +1,428 @@
+import os
+from atomicwrites import atomic_write
+from copy import deepcopy
+from logging import Logger
+from multiprocessing import Pool
+from typing import (Any, Callable, Container, Dict, IO, Iterator, Iterable, Optional, Set, Text, Tuple, Type,
+ Union)
+
+from . import jsonlib
+from . import vcs
+from .item import (ConformanceCheckerTest,
+ CrashTest,
+ ManifestItem,
+ ManualTest,
+ PrintRefTest,
+ RefTest,
+ SpecItem,
+ SupportFile,
+ TestharnessTest,
+ VisualTest,
+ WebDriverSpecTest)
+from .log import get_logger
+from .mputil import max_parallelism
+from .sourcefile import SourceFile
+from .typedata import TypeData
+
+
+CURRENT_VERSION: int = 8
+
+
+class ManifestError(Exception):
+ pass
+
+
+class ManifestVersionMismatch(ManifestError):
+ pass
+
+
+class InvalidCacheError(Exception):
+ pass
+
+
+item_classes: Dict[Text, Type[ManifestItem]] = {"testharness": TestharnessTest,
+ "reftest": RefTest,
+ "print-reftest": PrintRefTest,
+ "crashtest": CrashTest,
+ "manual": ManualTest,
+ "wdspec": WebDriverSpecTest,
+ "conformancechecker": ConformanceCheckerTest,
+ "visual": VisualTest,
+ "spec": SpecItem,
+ "support": SupportFile}
+
+
+def compute_manifest_items(source_file: SourceFile) -> Optional[Tuple[Tuple[Text, ...], Text, Set[ManifestItem], Text]]:
+ rel_path_parts = source_file.rel_path_parts
+ new_type, manifest_items = source_file.manifest_items()
+ file_hash = source_file.hash
+ return rel_path_parts, new_type, set(manifest_items), file_hash
+
+
+def compute_manifest_spec_items(source_file: SourceFile) -> Optional[Tuple[Tuple[Text, ...], Text, Set[ManifestItem], Text]]:
+ spec_tuple = source_file.manifest_spec_items()
+ if not spec_tuple:
+ return None
+
+ new_type, manifest_items = spec_tuple
+ rel_path_parts = source_file.rel_path_parts
+ file_hash = source_file.hash
+ return rel_path_parts, new_type, set(manifest_items), file_hash
+
+
+ManifestDataType = Dict[Any, TypeData]
+
+
+class ManifestData(ManifestDataType):
+ def __init__(self, manifest: "Manifest") -> None:
+ """Dictionary subclass containing a TypeData instance for each test type,
+ keyed by type name"""
+ self.initialized: bool = False
+ for key, value in item_classes.items():
+ self[key] = TypeData(manifest, value)
+ self.initialized = True
+ self.json_obj: None = None
+
+ def __setitem__(self, key: Text, value: TypeData) -> None:
+ if self.initialized:
+ raise AttributeError
+ dict.__setitem__(self, key, value)
+
+ def paths(self) -> Set[Text]:
+ """Get a list of all paths containing test items
+ without actually constructing all the items"""
+ rv: Set[Text] = set()
+ for item_data in self.values():
+ for item in item_data:
+ rv.add(os.path.sep.join(item))
+ return rv
+
+ def type_by_path(self) -> Dict[Tuple[Text, ...], Text]:
+ rv = {}
+ for item_type, item_data in self.items():
+ for item in item_data:
+ rv[item] = item_type
+ return rv
+
+
+class Manifest:
+ def __init__(self, tests_root: Text, url_base: Text = "/") -> None:
+ assert url_base is not None
+ self._data: ManifestData = ManifestData(self)
+ self.tests_root: Text = tests_root
+ self.url_base: Text = url_base
+
+ def __iter__(self) -> Iterator[Tuple[Text, Text, Set[ManifestItem]]]:
+ return self.itertypes()
+
+ def itertypes(self, *types: Text) -> Iterator[Tuple[Text, Text, Set[ManifestItem]]]:
+ for item_type in (types or sorted(self._data.keys())):
+ for path in self._data[item_type]:
+ rel_path = os.sep.join(path)
+ tests = self._data[item_type][path]
+ yield item_type, rel_path, tests
+
+ def iterpath(self, path: Text) -> Iterable[ManifestItem]:
+ tpath = tuple(path.split(os.path.sep))
+
+ for type_tests in self._data.values():
+ i = type_tests.get(tpath, set())
+ assert i is not None
+ yield from i
+
+ def iterdir(self, dir_name: Text) -> Iterable[ManifestItem]:
+ tpath = tuple(dir_name.split(os.path.sep))
+ tpath_len = len(tpath)
+
+ for type_tests in self._data.values():
+ for path, tests in type_tests.items():
+ if path[:tpath_len] == tpath:
+ yield from tests
+
+ def update(self, tree: Iterable[Tuple[Text, Optional[Text], bool]], parallel: bool = True,
+ update_func: Callable[..., Any] = compute_manifest_items) -> bool:
+ """Update the manifest given an iterable of items that make up the updated manifest.
+
+ The iterable must either generate tuples of the form (SourceFile, True) for paths
+ that are to be updated, or (path, False) for items that are not to be updated. This
+ unusual API is designed as an optimistaion meaning that SourceFile items need not be
+ constructed in the case we are not updating a path, but the absence of an item from
+ the iterator may be used to remove defunct entries from the manifest."""
+
+ logger = get_logger()
+
+ changed = False
+
+ # Create local variable references to these dicts so we avoid the
+ # attribute access in the hot loop below
+ data = self._data
+
+ types = data.type_by_path()
+ remaining_manifest_paths = set(types)
+
+ to_update = []
+
+ for path, file_hash, updated in tree:
+ path_parts = tuple(path.split(os.path.sep))
+ is_new = path_parts not in remaining_manifest_paths
+
+ if not updated and is_new:
+ # This is kind of a bandaid; if we ended up here the cache
+ # was invalid but we've been using it anyway. That's obviously
+ # bad; we should fix the underlying issue that we sometimes
+ # use an invalid cache. But at least this fixes the immediate
+ # problem
+ raise InvalidCacheError
+
+ if not updated:
+ remaining_manifest_paths.remove(path_parts)
+ else:
+ assert self.tests_root is not None
+ source_file = SourceFile(self.tests_root,
+ path,
+ self.url_base,
+ file_hash)
+
+ hash_changed: bool = False
+
+ if not is_new:
+ if file_hash is None:
+ file_hash = source_file.hash
+ remaining_manifest_paths.remove(path_parts)
+ old_type = types[path_parts]
+ old_hash = data[old_type].hashes[path_parts]
+ if old_hash != file_hash:
+ hash_changed = True
+ del data[old_type][path_parts]
+
+ if is_new or hash_changed:
+ to_update.append(source_file)
+
+ if to_update:
+ logger.debug("Computing manifest update for %s items" % len(to_update))
+ changed = True
+
+ # 25 items was derived experimentally (2020-01) to be approximately the
+ # point at which it is quicker to create a Pool and parallelize update.
+ pool = None
+ processes = max_parallelism()
+ if parallel and len(to_update) > 25 and processes > 1:
+ pool = Pool(processes)
+
+ # chunksize set > 1 when more than 10000 tests, because
+ # chunking is a net-gain once we get to very large numbers
+ # of items (again, experimentally, 2020-01)
+ chunksize = max(1, len(to_update) // 10000)
+ logger.debug("Doing a multiprocessed update. "
+ "Processes: %s, chunksize: %s" % (processes, chunksize))
+ results: Iterator[Optional[Tuple[Tuple[Text, ...],
+ Text,
+ Set[ManifestItem], Text]]] = pool.imap_unordered(
+ update_func,
+ to_update,
+ chunksize=chunksize)
+ else:
+ results = map(update_func, to_update)
+
+ for result in results:
+ if not result:
+ continue
+ rel_path_parts, new_type, manifest_items, file_hash = result
+ data[new_type][rel_path_parts] = manifest_items
+ data[new_type].hashes[rel_path_parts] = file_hash
+
+ # Make sure to terminate the Pool, to avoid hangs on Python 3.
+ # https://docs.python.org/3/library/multiprocessing.html#multiprocessing.pool.Pool
+ if pool is not None:
+ pool.terminate()
+
+ if remaining_manifest_paths:
+ changed = True
+ for rel_path_parts in remaining_manifest_paths:
+ for test_data in data.values():
+ if rel_path_parts in test_data:
+ del test_data[rel_path_parts]
+
+ return changed
+
+ def to_json(self, caller_owns_obj: bool = True) -> Dict[Text, Any]:
+ """Dump a manifest into a object which can be serialized as JSON
+
+ If caller_owns_obj is False, then the return value remains
+ owned by the manifest; it is _vitally important_ that _no_
+ (even read) operation is done on the manifest, as otherwise
+ objects within the object graph rooted at the return value can
+ be mutated. This essentially makes this mode very dangerous
+ and only to be used under extreme care.
+
+ """
+ out_items = {
+ test_type: type_paths.to_json()
+ for test_type, type_paths in self._data.items() if type_paths
+ }
+
+ if caller_owns_obj:
+ out_items = deepcopy(out_items)
+
+ rv: Dict[Text, Any] = {"url_base": self.url_base,
+ "items": out_items,
+ "version": CURRENT_VERSION}
+ return rv
+
+ @classmethod
+ def from_json(cls,
+ tests_root: Text,
+ obj: Dict[Text, Any],
+ types: Optional[Container[Text]] = None,
+ callee_owns_obj: bool = False) -> "Manifest":
+ """Load a manifest from a JSON object
+
+ This loads a manifest for a given local test_root path from an
+ object obj, potentially partially loading it to only load the
+ types given by types.
+
+ If callee_owns_obj is True, then ownership of obj transfers
+ to this function when called, and the caller must never mutate
+ the obj or anything referred to in the object graph rooted at
+ obj.
+
+ """
+ version = obj.get("version")
+ if version != CURRENT_VERSION:
+ raise ManifestVersionMismatch
+
+ self = cls(tests_root, url_base=obj.get("url_base", "/"))
+ if not hasattr(obj, "items"):
+ raise ManifestError
+
+ for test_type, type_paths in obj["items"].items():
+ if test_type not in item_classes:
+ raise ManifestError
+
+ if types and test_type not in types:
+ continue
+
+ if not callee_owns_obj:
+ type_paths = deepcopy(type_paths)
+
+ self._data[test_type].set_json(type_paths)
+
+ return self
+
+
+def load(tests_root: Text, manifest: Union[IO[bytes], Text], types: Optional[Container[Text]] = None) -> Optional[Manifest]:
+ logger = get_logger()
+
+ logger.warning("Prefer load_and_update instead")
+ return _load(logger, tests_root, manifest, types)
+
+
+__load_cache: Dict[Text, Manifest] = {}
+
+
+def _load(logger: Logger,
+ tests_root: Text,
+ manifest: Union[IO[bytes], Text],
+ types: Optional[Container[Text]] = None,
+ allow_cached: bool = True
+ ) -> Optional[Manifest]:
+ manifest_path = (manifest if isinstance(manifest, str)
+ else manifest.name)
+ if allow_cached and manifest_path in __load_cache:
+ return __load_cache[manifest_path]
+
+ if isinstance(manifest, str):
+ if os.path.exists(manifest):
+ logger.debug("Opening manifest at %s" % manifest)
+ else:
+ logger.debug("Creating new manifest at %s" % manifest)
+ try:
+ with open(manifest, encoding="utf-8") as f:
+ rv = Manifest.from_json(tests_root,
+ jsonlib.load(f),
+ types=types,
+ callee_owns_obj=True)
+ except OSError:
+ return None
+ except ValueError:
+ logger.warning("%r may be corrupted", manifest)
+ return None
+ else:
+ rv = Manifest.from_json(tests_root,
+ jsonlib.load(manifest),
+ types=types,
+ callee_owns_obj=True)
+
+ if allow_cached:
+ __load_cache[manifest_path] = rv
+ return rv
+
+
+def load_and_update(tests_root: Text,
+ manifest_path: Text,
+ url_base: Text,
+ update: bool = True,
+ rebuild: bool = False,
+ metadata_path: Optional[Text] = None,
+ cache_root: Optional[Text] = None,
+ working_copy: bool = True,
+ types: Optional[Container[Text]] = None,
+ write_manifest: bool = True,
+ allow_cached: bool = True,
+ parallel: bool = True
+ ) -> Manifest:
+
+ logger = get_logger()
+
+ manifest = None
+ if not rebuild:
+ try:
+ manifest = _load(logger,
+ tests_root,
+ manifest_path,
+ types=types,
+ allow_cached=allow_cached)
+ except ManifestVersionMismatch:
+ logger.info("Manifest version changed, rebuilding")
+ except ManifestError:
+ logger.warning("Failed to load manifest, rebuilding")
+
+ if manifest is not None and manifest.url_base != url_base:
+ logger.info("Manifest url base did not match, rebuilding")
+ manifest = None
+
+ if manifest is None:
+ manifest = Manifest(tests_root, url_base)
+ rebuild = True
+ update = True
+
+ if rebuild or update:
+ logger.info("Updating manifest")
+ for retry in range(2):
+ try:
+ tree = vcs.get_tree(tests_root, manifest, manifest_path, cache_root,
+ working_copy, rebuild)
+ changed = manifest.update(tree, parallel)
+ break
+ except InvalidCacheError:
+ logger.warning("Manifest cache was invalid, doing a complete rebuild")
+ rebuild = True
+ else:
+ # If we didn't break there was an error
+ raise
+ if write_manifest and changed:
+ write(manifest, manifest_path)
+ tree.dump_caches()
+
+ return manifest
+
+
+def write(manifest: Manifest, manifest_path: Text) -> None:
+ dir_name = os.path.dirname(manifest_path)
+ if not os.path.exists(dir_name):
+ os.makedirs(dir_name)
+ with atomic_write(manifest_path, overwrite=True) as f:
+ # Use ',' instead of the default ', ' separator to prevent trailing
+ # spaces: https://docs.python.org/2/library/json.html#json.dump
+ jsonlib.dump_dist(manifest.to_json(caller_owns_obj=True), f)
+ f.write("\n")
diff --git a/testing/web-platform/tests/tools/manifest/mputil.py b/testing/web-platform/tests/tools/manifest/mputil.py
new file mode 100644
index 0000000000..fc9d5ac94c
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/mputil.py
@@ -0,0 +1,14 @@
+import multiprocessing
+import sys
+
+def max_parallelism() -> int:
+ cpu_count = multiprocessing.cpu_count()
+ if sys.platform == 'win32':
+ # On Python 3 on Windows, using >= MAXIMUM_WAIT_OBJECTS processes
+ # causes a crash in the multiprocessing module. Whilst this enum
+ # can technically have any value, it is usually 64. For safety,
+ # restrict manifest regeneration to 56 processes on Windows.
+ #
+ # See https://bugs.python.org/issue26903 and https://bugs.python.org/issue40263
+ cpu_count = min(cpu_count, 56)
+ return cpu_count
diff --git a/testing/web-platform/tests/tools/manifest/requirements.txt b/testing/web-platform/tests/tools/manifest/requirements.txt
new file mode 100644
index 0000000000..d7c173723e
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/requirements.txt
@@ -0,0 +1 @@
+zstandard==0.21.0
diff --git a/testing/web-platform/tests/tools/manifest/sourcefile.py b/testing/web-platform/tests/tools/manifest/sourcefile.py
new file mode 100644
index 0000000000..23aa7f491f
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/sourcefile.py
@@ -0,0 +1,1083 @@
+import hashlib
+import re
+import os
+from collections import deque
+from fnmatch import fnmatch
+from io import BytesIO
+from typing import (Any, BinaryIO, Callable, Deque, Dict, Iterable, List, Optional, Pattern,
+ Set, Text, Tuple, Union, cast)
+from urllib.parse import urljoin
+
+try:
+ from xml.etree import cElementTree as ElementTree
+except ImportError:
+ from xml.etree import ElementTree as ElementTree # type: ignore
+
+import html5lib
+
+from . import XMLParser
+from .item import (ConformanceCheckerTest,
+ CrashTest,
+ ManifestItem,
+ ManualTest,
+ PrintRefTest,
+ RefTest,
+ SpecItem,
+ SupportFile,
+ TestharnessTest,
+ VisualTest,
+ WebDriverSpecTest)
+from .utils import cached_property
+
+# Cannot do `from ..metadata.webfeatures.schema import WEB_FEATURES_YML_FILENAME`
+# because relative import beyond toplevel throws *ImportError*!
+from metadata.webfeatures.schema import WEB_FEATURES_YML_FILENAME # type: ignore
+
+wd_pattern = "*.py"
+js_meta_re = re.compile(br"//\s*META:\s*(\w*)=(.*)$")
+python_meta_re = re.compile(br"#\s*META:\s*(\w*)=(.*)$")
+
+reference_file_re = re.compile(r'(^|[\-_])(not)?ref[0-9]*([\-_]|$)')
+
+space_chars: Text = "".join(html5lib.constants.spaceCharacters)
+
+
+def replace_end(s: Text, old: Text, new: Text) -> Text:
+ """
+ Given a string `s` that ends with `old`, replace that occurrence of `old`
+ with `new`.
+ """
+ assert s.endswith(old)
+ return s[:-len(old)] + new
+
+
+def read_script_metadata(f: BinaryIO, regexp: Pattern[bytes]) -> Iterable[Tuple[Text, Text]]:
+ """
+ Yields any metadata (pairs of strings) from the file-like object `f`,
+ as specified according to a supplied regexp.
+
+ `regexp` - Regexp containing two groups containing the metadata name and
+ value.
+ """
+ for line in f:
+ assert isinstance(line, bytes), line
+ m = regexp.match(line)
+ if not m:
+ break
+
+ yield (m.groups()[0].decode("utf8"), m.groups()[1].decode("utf8"))
+
+
+_any_variants: Dict[Text, Dict[Text, Any]] = {
+ "window": {"suffix": ".any.html"},
+ "serviceworker": {"force_https": True},
+ "serviceworker-module": {"force_https": True},
+ "sharedworker": {},
+ "sharedworker-module": {},
+ "dedicatedworker": {"suffix": ".any.worker.html"},
+ "dedicatedworker-module": {"suffix": ".any.worker-module.html"},
+ "worker": {"longhand": {"dedicatedworker", "sharedworker", "serviceworker"}},
+ "worker-module": {},
+ "shadowrealm": {},
+ "jsshell": {"suffix": ".any.js"},
+}
+
+
+def get_any_variants(item: Text) -> Set[Text]:
+ """
+ Returns a set of variants (strings) defined by the given keyword.
+ """
+ assert isinstance(item, str), item
+
+ variant = _any_variants.get(item, None)
+ if variant is None:
+ return set()
+
+ return variant.get("longhand", {item})
+
+
+def get_default_any_variants() -> Set[Text]:
+ """
+ Returns a set of variants (strings) that will be used by default.
+ """
+ return set({"window", "dedicatedworker"})
+
+
+def parse_variants(value: Text) -> Set[Text]:
+ """
+ Returns a set of variants (strings) defined by a comma-separated value.
+ """
+ assert isinstance(value, str), value
+
+ if value == "":
+ return get_default_any_variants()
+
+ globals = set()
+ for item in value.split(","):
+ item = item.strip()
+ globals |= get_any_variants(item)
+ return globals
+
+
+def global_suffixes(value: Text) -> Set[Tuple[Text, bool]]:
+ """
+ Yields tuples of the relevant filename suffix (a string) and whether the
+ variant is intended to run in a JS shell, for the variants defined by the
+ given comma-separated value.
+ """
+ assert isinstance(value, str), value
+
+ rv = set()
+
+ global_types = parse_variants(value)
+ for global_type in global_types:
+ variant = _any_variants[global_type]
+ suffix = variant.get("suffix", ".any.%s.html" % global_type)
+ rv.add((suffix, global_type == "jsshell"))
+
+ return rv
+
+
+def global_variant_url(url: Text, suffix: Text) -> Text:
+ """
+ Returns a url created from the given url and suffix (all strings).
+ """
+ url = url.replace(".any.", ".")
+ # If the url must be loaded over https, ensure that it will have
+ # the form .https.any.js
+ if ".https." in url and suffix.startswith(".https."):
+ url = url.replace(".https.", ".")
+ elif ".h2." in url and suffix.startswith(".h2."):
+ url = url.replace(".h2.", ".")
+ return replace_end(url, ".js", suffix)
+
+
+def _parse_html(f: BinaryIO) -> ElementTree.Element:
+ doc = html5lib.parse(f, treebuilder="etree", useChardet=False)
+ return cast(ElementTree.Element, doc)
+
+def _parse_xml(f: BinaryIO) -> ElementTree.Element:
+ try:
+ # raises ValueError with an unsupported encoding,
+ # ParseError when there's an undefined entity
+ return ElementTree.parse(f).getroot()
+ except (ValueError, ElementTree.ParseError):
+ f.seek(0)
+ return ElementTree.parse(f, XMLParser.XMLParser()).getroot() # type: ignore
+
+
+class SourceFile:
+ parsers: Dict[Text, Callable[[BinaryIO], ElementTree.Element]] = {"html":_parse_html,
+ "xhtml":_parse_xml,
+ "svg":_parse_xml}
+
+ root_dir_non_test = {"common"}
+
+ dir_non_test = {"resources",
+ "support",
+ "tools"}
+
+ dir_path_non_test: Set[Tuple[Text, ...]] = {("css21", "archive"),
+ ("css", "CSS2", "archive"),
+ ("css", "common")}
+
+ def __init__(self, tests_root: Text,
+ rel_path: Text,
+ url_base: Text,
+ hash: Optional[Text] = None,
+ contents: Optional[bytes] = None) -> None:
+ """Object representing a file in a source tree.
+
+ :param tests_root: Path to the root of the source tree
+ :param rel_path_str: File path relative to tests_root
+ :param url_base: Base URL used when converting file paths to urls
+ :param contents: Byte array of the contents of the file or ``None``.
+ """
+
+ assert not os.path.isabs(rel_path), rel_path
+ if os.name == "nt":
+ # do slash normalization on Windows
+ rel_path = rel_path.replace("/", "\\")
+
+ dir_path, filename = os.path.split(rel_path)
+ name, ext = os.path.splitext(filename)
+
+ type_flag = None
+ if "-" in name:
+ type_flag = name.rsplit("-", 1)[1].split(".")[0]
+
+ meta_flags = name.split(".")[1:]
+
+ self.tests_root: Text = tests_root
+ self.rel_path: Text = rel_path
+ self.dir_path: Text = dir_path
+ self.filename: Text = filename
+ self.name: Text = name
+ self.ext: Text = ext
+ self.type_flag: Optional[Text] = type_flag
+ self.meta_flags: Union[List[bytes], List[Text]] = meta_flags
+ self.url_base = url_base
+ self.contents = contents
+ self.items_cache: Optional[Tuple[Text, List[ManifestItem]]] = None
+ self._hash = hash
+
+ def __getstate__(self) -> Dict[str, Any]:
+ # Remove computed properties if we pickle this class
+ rv = self.__dict__.copy()
+
+ if "__cached_properties__" in rv:
+ cached_properties = rv["__cached_properties__"]
+ rv = {key:value for key, value in rv.items() if key not in cached_properties}
+ del rv["__cached_properties__"]
+ return rv
+
+ def name_prefix(self, prefix: Text) -> bool:
+ """Check if the filename starts with a given prefix
+
+ :param prefix: The prefix to check"""
+ return self.name.startswith(prefix)
+
+ def is_dir(self) -> bool:
+ """Return whether this file represents a directory."""
+ if self.contents is not None:
+ return False
+
+ return os.path.isdir(self.rel_path)
+
+ def open(self) -> BinaryIO:
+ """
+ Return either
+ * the contents specified in the constructor, if any;
+ * a File object opened for reading the file contents.
+ """
+ if self.contents is not None:
+ file_obj: BinaryIO = BytesIO(self.contents)
+ else:
+ file_obj = open(self.path, 'rb')
+ return file_obj
+
+ @cached_property
+ def rel_path_parts(self) -> Tuple[Text, ...]:
+ return tuple(self.rel_path.split(os.path.sep))
+
+ @cached_property
+ def path(self) -> Text:
+ return os.path.join(self.tests_root, self.rel_path)
+
+ @cached_property
+ def rel_url(self) -> Text:
+ assert not os.path.isabs(self.rel_path), self.rel_path
+ return self.rel_path.replace(os.sep, "/")
+
+ @cached_property
+ def url(self) -> Text:
+ return urljoin(self.url_base, self.rel_url)
+
+ @cached_property
+ def hash(self) -> Text:
+ if not self._hash:
+ with self.open() as f:
+ content = f.read()
+
+ data = b"".join((b"blob ", b"%d" % len(content), b"\0", content))
+ self._hash = str(hashlib.sha1(data).hexdigest())
+
+ return self._hash
+
+ def in_non_test_dir(self) -> bool:
+ if self.dir_path == "":
+ return True
+
+ parts = self.rel_path_parts
+
+ if (parts[0] in self.root_dir_non_test or
+ any(item in self.dir_non_test for item in parts) or
+ any(parts[:len(path)] == path for path in self.dir_path_non_test)):
+ return True
+ return False
+
+ def in_conformance_checker_dir(self) -> bool:
+ return self.rel_path_parts[0] == "conformance-checkers"
+
+ @property
+ def name_is_non_test(self) -> bool:
+ """Check if the file name matches the conditions for the file to
+ be a non-test file"""
+ return (self.is_dir() or
+ self.name_prefix("MANIFEST") or
+ self.filename == "META.yml" or
+ self.filename == WEB_FEATURES_YML_FILENAME or
+ self.filename.startswith(".") or
+ self.filename.endswith(".headers") or
+ self.filename.endswith(".ini") or
+ self.in_non_test_dir())
+
+ @property
+ def name_is_conformance(self) -> bool:
+ return (self.in_conformance_checker_dir() and
+ self.type_flag in ("is-valid", "no-valid"))
+
+ @property
+ def name_is_conformance_support(self) -> bool:
+ return self.in_conformance_checker_dir()
+
+ @property
+ def name_is_manual(self) -> bool:
+ """Check if the file name matches the conditions for the file to
+ be a manual test file"""
+ return self.type_flag == "manual"
+
+ @property
+ def name_is_visual(self) -> bool:
+ """Check if the file name matches the conditions for the file to
+ be a visual test file"""
+ return self.type_flag == "visual"
+
+ @property
+ def name_is_multi_global(self) -> bool:
+ """Check if the file name matches the conditions for the file to
+ be a multi-global js test file"""
+ return "any" in self.meta_flags and self.ext == ".js"
+
+ @property
+ def name_is_worker(self) -> bool:
+ """Check if the file name matches the conditions for the file to
+ be a worker js test file"""
+ return "worker" in self.meta_flags and self.ext == ".js"
+
+ @property
+ def name_is_window(self) -> bool:
+ """Check if the file name matches the conditions for the file to
+ be a window js test file"""
+ return "window" in self.meta_flags and self.ext == ".js"
+
+ @property
+ def name_is_webdriver(self) -> bool:
+ """Check if the file name matches the conditions for the file to
+ be a webdriver spec test file"""
+ # wdspec tests are in subdirectories of /webdriver excluding __init__.py
+ # files.
+ rel_path_parts = self.rel_path_parts
+ return (((rel_path_parts[0] == "webdriver" and len(rel_path_parts) > 1) or
+ (rel_path_parts[:2] == ("infrastructure", "webdriver") and
+ len(rel_path_parts) > 2)) and
+ self.filename not in ("__init__.py", "conftest.py") and
+ fnmatch(self.filename, wd_pattern))
+
+ @property
+ def name_is_reference(self) -> bool:
+ """Check if the file name matches the conditions for the file to
+ be a reference file (not a reftest)"""
+ return "/reference/" in self.url or bool(reference_file_re.search(self.name))
+
+ @property
+ def name_is_crashtest(self) -> bool:
+ return (self.markup_type is not None and
+ (self.type_flag == "crash" or "crashtests" in self.dir_path.split(os.path.sep)))
+
+ @property
+ def name_is_tentative(self) -> bool:
+ """Check if the file name matches the conditions for the file to be a
+ tentative file.
+
+ See https://web-platform-tests.org/writing-tests/file-names.html#test-features"""
+ return "tentative" in self.meta_flags or "tentative" in self.dir_path.split(os.path.sep)
+
+ @property
+ def name_is_print_reftest(self) -> bool:
+ return (self.markup_type is not None and
+ (self.type_flag == "print" or "print" in self.dir_path.split(os.path.sep)))
+
+ @property
+ def markup_type(self) -> Optional[Text]:
+ """Return the type of markup contained in a file, based on its extension,
+ or None if it doesn't contain markup"""
+ ext = self.ext
+
+ if not ext:
+ return None
+ if ext[0] == ".":
+ ext = ext[1:]
+ if ext in ["html", "htm"]:
+ return "html"
+ if ext in ["xhtml", "xht", "xml"]:
+ return "xhtml"
+ if ext == "svg":
+ return "svg"
+ return None
+
+ @cached_property
+ def root(self) -> Optional[ElementTree.Element]:
+ """Return an ElementTree Element for the root node of the file if it contains
+ markup, or None if it does not"""
+ if not self.markup_type:
+ return None
+
+ parser = self.parsers[self.markup_type]
+
+ with self.open() as f:
+ try:
+ tree = parser(f)
+ except Exception:
+ return None
+
+ return tree
+
+ @cached_property
+ def timeout_nodes(self) -> List[ElementTree.Element]:
+ """List of ElementTree Elements corresponding to nodes in a test that
+ specify timeouts"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='timeout']")
+
+ @cached_property
+ def pac_nodes(self) -> List[ElementTree.Element]:
+ """List of ElementTree Elements corresponding to nodes in a test that
+ specify PAC (proxy auto-config)"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='pac']")
+
+ @cached_property
+ def script_metadata(self) -> Optional[List[Tuple[Text, Text]]]:
+ if self.name_is_worker or self.name_is_multi_global or self.name_is_window:
+ regexp = js_meta_re
+ elif self.name_is_webdriver:
+ regexp = python_meta_re
+ else:
+ return None
+
+ with self.open() as f:
+ return list(read_script_metadata(f, regexp))
+
+ @cached_property
+ def timeout(self) -> Optional[Text]:
+ """The timeout of a test or reference file. "long" if the file has an extended timeout
+ or None otherwise"""
+ if self.script_metadata:
+ if any(m == ("timeout", "long") for m in self.script_metadata):
+ return "long"
+
+ if self.root is None:
+ return None
+
+ if self.timeout_nodes:
+ timeout_str: Optional[Text] = self.timeout_nodes[0].attrib.get("content", None)
+ if timeout_str and timeout_str.lower() == "long":
+ return "long"
+
+ return None
+
+ @cached_property
+ def pac(self) -> Optional[Text]:
+ """The PAC (proxy config) of a test or reference file. A URL or null"""
+ if self.script_metadata:
+ for (meta, content) in self.script_metadata:
+ if meta == 'pac':
+ return content
+
+ if self.root is None:
+ return None
+
+ if self.pac_nodes:
+ return self.pac_nodes[0].attrib.get("content", None)
+
+ return None
+
+ @cached_property
+ def viewport_nodes(self) -> List[ElementTree.Element]:
+ """List of ElementTree Elements corresponding to nodes in a test that
+ specify viewport sizes"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='viewport-size']")
+
+ @cached_property
+ def viewport_size(self) -> Optional[Text]:
+ """The viewport size of a test or reference file"""
+ if self.root is None:
+ return None
+
+ if not self.viewport_nodes:
+ return None
+
+ return self.viewport_nodes[0].attrib.get("content", None)
+
+ @cached_property
+ def dpi_nodes(self) -> List[ElementTree.Element]:
+ """List of ElementTree Elements corresponding to nodes in a test that
+ specify device pixel ratios"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='device-pixel-ratio']")
+
+ @cached_property
+ def dpi(self) -> Optional[Text]:
+ """The device pixel ratio of a test or reference file"""
+ if self.root is None:
+ return None
+
+ if not self.dpi_nodes:
+ return None
+
+ return self.dpi_nodes[0].attrib.get("content", None)
+
+ def parse_ref_keyed_meta(self, node: ElementTree.Element) -> Tuple[Optional[Tuple[Text, Text, Text]], Text]:
+ item: Text = node.attrib.get("content", "")
+
+ parts = item.rsplit(":", 1)
+ if len(parts) == 1:
+ key: Optional[Tuple[Text, Text, Text]] = None
+ value = parts[0]
+ else:
+ key_part = urljoin(self.url, parts[0])
+ reftype = None
+ for ref in self.references: # type: Tuple[Text, Text]
+ if ref[0] == key_part:
+ reftype = ref[1]
+ break
+ if reftype not in ("==", "!="):
+ raise ValueError("Key %s doesn't correspond to a reference" % key_part)
+ key = (self.url, key_part, reftype)
+ value = parts[1]
+
+ return key, value
+
+
+ @cached_property
+ def fuzzy_nodes(self) -> List[ElementTree.Element]:
+ """List of ElementTree Elements corresponding to nodes in a test that
+ specify reftest fuzziness"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='fuzzy']")
+
+
+ @cached_property
+ def fuzzy(self) -> Dict[Optional[Tuple[Text, Text, Text]], List[List[int]]]:
+ rv: Dict[Optional[Tuple[Text, Text, Text]], List[List[int]]] = {}
+ if self.root is None:
+ return rv
+
+ if not self.fuzzy_nodes:
+ return rv
+
+ args = ["maxDifference", "totalPixels"]
+
+ for node in self.fuzzy_nodes:
+ key, value = self.parse_ref_keyed_meta(node)
+ ranges = value.split(";")
+ if len(ranges) != 2:
+ raise ValueError("Malformed fuzzy value %s" % value)
+ arg_values: Dict[Text, List[int]] = {}
+ positional_args: Deque[List[int]] = deque()
+ for range_str_value in ranges: # type: Text
+ name: Optional[Text] = None
+ if "=" in range_str_value:
+ name, range_str_value = (part.strip()
+ for part in range_str_value.split("=", 1))
+ if name not in args:
+ raise ValueError("%s is not a valid fuzzy property" % name)
+ if arg_values.get(name):
+ raise ValueError("Got multiple values for argument %s" % name)
+ if "-" in range_str_value:
+ range_min, range_max = range_str_value.split("-")
+ else:
+ range_min = range_str_value
+ range_max = range_str_value
+ try:
+ range_value = [int(x.strip()) for x in (range_min, range_max)]
+ except ValueError:
+ raise ValueError("Fuzzy value %s must be a range of integers" %
+ range_str_value)
+ if name is None:
+ positional_args.append(range_value)
+ else:
+ arg_values[name] = range_value
+ rv[key] = []
+ for arg_name in args:
+ if arg_values.get(arg_name):
+ arg_value = arg_values.pop(arg_name)
+ else:
+ arg_value = positional_args.popleft()
+ rv[key].append(arg_value)
+ assert len(arg_values) == 0 and len(positional_args) == 0
+ return rv
+
+ @cached_property
+ def page_ranges_nodes(self) -> List[ElementTree.Element]:
+ """List of ElementTree Elements corresponding to nodes in a test that
+ specify print-reftest """
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='reftest-pages']")
+
+ @cached_property
+ def page_ranges(self) -> Dict[Text, List[List[Optional[int]]]]:
+ """List of ElementTree Elements corresponding to nodes in a test that
+ specify print-reftest page ranges"""
+ rv: Dict[Text, List[List[Optional[int]]]] = {}
+ for node in self.page_ranges_nodes:
+ key_data, value = self.parse_ref_keyed_meta(node)
+ # Just key by url
+ if key_data is None:
+ key = self.url
+ else:
+ key = key_data[1]
+ if key in rv:
+ raise ValueError("Duplicate page-ranges value")
+ rv[key] = []
+ for range_str in value.split(","):
+ range_str = range_str.strip()
+ if "-" in range_str:
+ range_parts_str = [item.strip() for item in range_str.split("-")]
+ try:
+ range_parts = [int(item) if item else None for item in range_parts_str]
+ except ValueError:
+ raise ValueError("Malformed page-range value %s" % range_str)
+ if any(item == 0 for item in range_parts):
+ raise ValueError("Malformed page-range value %s" % range_str)
+ else:
+ try:
+ range_parts = [int(range_str)]
+ except ValueError:
+ raise ValueError("Malformed page-range value %s" % range_str)
+ rv[key].append(range_parts)
+ return rv
+
+ @cached_property
+ def testharness_nodes(self) -> List[ElementTree.Element]:
+ """List of ElementTree Elements corresponding to nodes representing a
+ testharness.js script"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src='/resources/testharness.js']")
+
+ @cached_property
+ def content_is_testharness(self) -> Optional[bool]:
+ """Boolean indicating whether the file content represents a
+ testharness.js test"""
+ if self.root is None:
+ return None
+ return bool(self.testharness_nodes)
+
+ @cached_property
+ def variant_nodes(self) -> List[ElementTree.Element]:
+ """List of ElementTree Elements corresponding to nodes representing a
+ test variant"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='variant']")
+
+ @cached_property
+ def test_variants(self) -> List[Text]:
+ rv: List[Text] = []
+ if self.ext == ".js":
+ script_metadata = self.script_metadata
+ assert script_metadata is not None
+ for (key, value) in script_metadata:
+ if key == "variant":
+ rv.append(value)
+ else:
+ for element in self.variant_nodes:
+ if "content" in element.attrib:
+ variant: Text = element.attrib["content"]
+ rv.append(variant)
+
+ for variant in rv:
+ if variant != "":
+ if variant[0] not in ("#", "?"):
+ raise ValueError("Non-empty variant must start with either a ? or a #")
+ if len(variant) == 1 or (variant[0] == "?" and variant[1] == "#"):
+ raise ValueError("Variants must not have empty fragment or query " +
+ "(omit the empty part instead)")
+
+ if not rv:
+ rv = [""]
+
+ return rv
+
+ @cached_property
+ def testdriver_nodes(self) -> List[ElementTree.Element]:
+ """List of ElementTree Elements corresponding to nodes representing a
+ testdriver.js script"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src='/resources/testdriver.js']")
+
+ @cached_property
+ def has_testdriver(self) -> Optional[bool]:
+ """Boolean indicating whether the file content represents a
+ testharness.js test"""
+ if self.root is None:
+ return None
+ return bool(self.testdriver_nodes)
+
+ @cached_property
+ def reftest_nodes(self) -> List[ElementTree.Element]:
+ """List of ElementTree Elements corresponding to nodes representing a
+ to a reftest <link>"""
+ if self.root is None:
+ return []
+
+ match_links = self.root.findall(".//{http://www.w3.org/1999/xhtml}link[@rel='match']")
+ mismatch_links = self.root.findall(".//{http://www.w3.org/1999/xhtml}link[@rel='mismatch']")
+ return match_links + mismatch_links
+
+ @cached_property
+ def references(self) -> List[Tuple[Text, Text]]:
+ """List of (ref_url, relation) tuples for any reftest references specified in
+ the file"""
+ rv: List[Tuple[Text, Text]] = []
+ rel_map = {"match": "==", "mismatch": "!="}
+ for item in self.reftest_nodes:
+ if "href" in item.attrib:
+ ref_url = urljoin(self.url, item.attrib["href"].strip(space_chars))
+ ref_type = rel_map[item.attrib["rel"]]
+ rv.append((ref_url, ref_type))
+ return rv
+
+ @cached_property
+ def content_is_ref_node(self) -> bool:
+ """Boolean indicating whether the file is a non-leaf node in a reftest
+ graph (i.e. if it contains any <link rel=[mis]match>"""
+ return bool(self.references)
+
+ @cached_property
+ def css_flag_nodes(self) -> List[ElementTree.Element]:
+ """List of ElementTree Elements corresponding to nodes representing a
+ flag <meta>"""
+ if self.root is None:
+ return []
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='flags']")
+
+ @cached_property
+ def css_flags(self) -> Set[Text]:
+ """Set of flags specified in the file"""
+ rv: Set[Text] = set()
+ for item in self.css_flag_nodes:
+ if "content" in item.attrib:
+ for flag in item.attrib["content"].split():
+ rv.add(flag)
+ return rv
+
+ @cached_property
+ def content_is_css_manual(self) -> Optional[bool]:
+ """Boolean indicating whether the file content represents a
+ CSS WG-style manual test"""
+ if self.root is None:
+ return None
+ # return True if the intersection between the two sets is non-empty
+ return bool(self.css_flags & {"animated", "font", "history", "interact", "paged", "speech", "userstyle"})
+
+ @cached_property
+ def spec_link_nodes(self) -> List[ElementTree.Element]:
+ """List of ElementTree Elements corresponding to nodes representing a
+ <link rel=help>, used to point to specs"""
+ if self.root is None:
+ return []
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}link[@rel='help']")
+
+ @cached_property
+ def spec_links(self) -> Set[Text]:
+ """Set of spec links specified in the file"""
+ rv: Set[Text] = set()
+ for item in self.spec_link_nodes:
+ if "href" in item.attrib:
+ rv.add(item.attrib["href"].strip(space_chars))
+ return rv
+
+ @cached_property
+ def content_is_css_visual(self) -> Optional[bool]:
+ """Boolean indicating whether the file content represents a
+ CSS WG-style visual test"""
+ if self.root is None:
+ return None
+ return bool(self.ext in {'.xht', '.html', '.xhtml', '.htm', '.xml', '.svg'} and
+ self.spec_links)
+
+ @property
+ def type(self) -> Text:
+ possible_types = self.possible_types
+ if len(possible_types) == 1:
+ return possible_types.pop()
+
+ rv, _ = self.manifest_items()
+ return rv
+
+ @property
+ def possible_types(self) -> Set[Text]:
+ """Determines the set of possible types without reading the file"""
+
+ if self.items_cache:
+ return {self.items_cache[0]}
+
+ if self.name_is_non_test:
+ return {SupportFile.item_type}
+
+ if self.name_is_manual:
+ return {ManualTest.item_type}
+
+ if self.name_is_conformance:
+ return {ConformanceCheckerTest.item_type}
+
+ if self.name_is_conformance_support:
+ return {SupportFile.item_type}
+
+ if self.name_is_webdriver:
+ return {WebDriverSpecTest.item_type}
+
+ if self.name_is_visual:
+ return {VisualTest.item_type}
+
+ if self.name_is_crashtest:
+ return {CrashTest.item_type}
+
+ if self.name_is_print_reftest:
+ return {PrintRefTest.item_type}
+
+ if self.name_is_multi_global:
+ return {TestharnessTest.item_type}
+
+ if self.name_is_worker:
+ return {TestharnessTest.item_type}
+
+ if self.name_is_window:
+ return {TestharnessTest.item_type}
+
+ if self.markup_type is None:
+ return {SupportFile.item_type}
+
+ if not self.name_is_reference:
+ return {ManualTest.item_type,
+ TestharnessTest.item_type,
+ RefTest.item_type,
+ VisualTest.item_type,
+ SupportFile.item_type}
+
+ return {TestharnessTest.item_type,
+ RefTest.item_type,
+ SupportFile.item_type}
+
+ def manifest_items(self) -> Tuple[Text, List[ManifestItem]]:
+ """List of manifest items corresponding to the file. There is typically one
+ per test, but in the case of reftests a node may have corresponding manifest
+ items without being a test itself."""
+
+ if self.items_cache:
+ return self.items_cache
+
+ drop_cached = "root" not in self.__dict__
+
+ if self.name_is_non_test:
+ rv: Tuple[Text, List[ManifestItem]] = ("support", [
+ SupportFile(
+ self.tests_root,
+ self.rel_path
+ )])
+
+ elif self.name_is_manual:
+ rv = ManualTest.item_type, [
+ ManualTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url
+ )]
+
+ elif self.name_is_conformance:
+ rv = ConformanceCheckerTest.item_type, [
+ ConformanceCheckerTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url
+ )]
+
+ elif self.name_is_conformance_support:
+ rv = "support", [
+ SupportFile(
+ self.tests_root,
+ self.rel_path
+ )]
+
+ elif self.name_is_webdriver:
+ rv = WebDriverSpecTest.item_type, [
+ WebDriverSpecTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url,
+ timeout=self.timeout
+ )]
+
+ elif self.name_is_visual:
+ rv = VisualTest.item_type, [
+ VisualTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url
+ )]
+
+ elif self.name_is_crashtest:
+ rv = CrashTest.item_type, [
+ CrashTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url
+ )]
+
+ elif self.name_is_print_reftest:
+ references = self.references
+ if not references:
+ raise ValueError("%s detected as print reftest but doesn't have any refs" %
+ self.path)
+ rv = PrintRefTest.item_type, [
+ PrintRefTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url,
+ references=references,
+ timeout=self.timeout,
+ viewport_size=self.viewport_size,
+ fuzzy=self.fuzzy,
+ page_ranges=self.page_ranges,
+ )]
+
+ elif self.name_is_multi_global:
+ globals = ""
+ script_metadata = self.script_metadata
+ assert script_metadata is not None
+ for (key, value) in script_metadata:
+ if key == "global":
+ globals = value
+ break
+
+ tests: List[ManifestItem] = [
+ TestharnessTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ global_variant_url(self.rel_url, suffix) + variant,
+ timeout=self.timeout,
+ pac=self.pac,
+ jsshell=jsshell,
+ script_metadata=self.script_metadata
+ )
+ for (suffix, jsshell) in sorted(global_suffixes(globals))
+ for variant in self.test_variants
+ ]
+ rv = TestharnessTest.item_type, tests
+
+ elif self.name_is_worker:
+ test_url = replace_end(self.rel_url, ".worker.js", ".worker.html")
+ tests = [
+ TestharnessTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ test_url + variant,
+ timeout=self.timeout,
+ pac=self.pac,
+ script_metadata=self.script_metadata
+ )
+ for variant in self.test_variants
+ ]
+ rv = TestharnessTest.item_type, tests
+
+ elif self.name_is_window:
+ test_url = replace_end(self.rel_url, ".window.js", ".window.html")
+ tests = [
+ TestharnessTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ test_url + variant,
+ timeout=self.timeout,
+ pac=self.pac,
+ script_metadata=self.script_metadata
+ )
+ for variant in self.test_variants
+ ]
+ rv = TestharnessTest.item_type, tests
+
+ elif self.content_is_css_manual and not self.name_is_reference:
+ rv = ManualTest.item_type, [
+ ManualTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url
+ )]
+
+ elif self.content_is_testharness:
+ rv = TestharnessTest.item_type, []
+ testdriver = self.has_testdriver
+ for variant in self.test_variants:
+ url = self.rel_url + variant
+ rv[1].append(TestharnessTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ url,
+ timeout=self.timeout,
+ pac=self.pac,
+ testdriver=testdriver,
+ script_metadata=self.script_metadata
+ ))
+
+ elif self.content_is_ref_node:
+ rv = RefTest.item_type, []
+ for variant in self.test_variants:
+ url = self.rel_url + variant
+ rv[1].append(RefTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ url,
+ references=[
+ (ref[0] + variant, ref[1])
+ for ref in self.references
+ ],
+ timeout=self.timeout,
+ viewport_size=self.viewport_size,
+ dpi=self.dpi,
+ fuzzy=self.fuzzy
+ ))
+
+ elif self.content_is_css_visual and not self.name_is_reference:
+ rv = VisualTest.item_type, [
+ VisualTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url
+ )]
+
+ else:
+ rv = "support", [
+ SupportFile(
+ self.tests_root,
+ self.rel_path
+ )]
+
+ assert rv[0] in self.possible_types
+ assert len(rv[1]) == len(set(rv[1]))
+
+ self.items_cache = rv
+
+ if drop_cached and "__cached_properties__" in self.__dict__:
+ cached_properties = self.__dict__["__cached_properties__"]
+ for prop in cached_properties:
+ if prop in self.__dict__:
+ del self.__dict__[prop]
+ del self.__dict__["__cached_properties__"]
+
+ return rv
+
+ def manifest_spec_items(self) -> Optional[Tuple[Text, List[ManifestItem]]]:
+ specs = list(self.spec_links)
+ if not specs:
+ return None
+ rv: Tuple[Text, List[ManifestItem]] = (SpecItem.item_type, [
+ SpecItem(
+ self.tests_root,
+ self.rel_path,
+ specs
+ )])
+ return rv
diff --git a/testing/web-platform/tests/tools/manifest/spec.py b/testing/web-platform/tests/tools/manifest/spec.py
new file mode 100644
index 0000000000..5148fceb3e
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/spec.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python3
+import argparse
+import os
+from typing import Any, Optional, Text
+
+from . import vcs
+from .manifest import compute_manifest_spec_items, InvalidCacheError, Manifest, write
+from .log import get_logger, enable_debug_logging
+
+
+here = os.path.dirname(__file__)
+
+wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
+
+logger = get_logger()
+
+
+def update_spec(tests_root: Text,
+ manifest_path: Text,
+ url_base: Text,
+ cache_root: Optional[Text] = None,
+ working_copy: bool = True,
+ parallel: bool = True
+ ) -> None:
+
+ manifest = Manifest(tests_root, url_base)
+
+ logger.info("Updating SPEC_MANIFEST")
+ try:
+ tree = vcs.get_tree(tests_root, manifest, manifest_path, cache_root,
+ working_copy, True)
+ changed = manifest.update(tree, parallel, compute_manifest_spec_items)
+ except InvalidCacheError:
+ logger.error("Manifest cache in spec.py was invalid.")
+ return
+
+ if changed:
+ write(manifest, manifest_path)
+ tree.dump_caches()
+
+
+def update_from_cli(**kwargs: Any) -> None:
+ tests_root = kwargs["tests_root"]
+ path = kwargs["path"]
+ assert tests_root is not None
+
+ update_spec(tests_root,
+ path,
+ kwargs["url_base"],
+ cache_root=kwargs["cache_root"],
+ parallel=kwargs["parallel"])
+
+
+def abs_path(path: str) -> str:
+ return os.path.abspath(os.path.expanduser(path))
+
+
+def create_parser() -> argparse.ArgumentParser:
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "-v", "--verbose", dest="verbose", action="store_true", default=False,
+ help="Turn on verbose logging")
+ parser.add_argument(
+ "-p", "--path", type=abs_path, help="Path to manifest file.")
+ parser.add_argument(
+ "--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.")
+ parser.add_argument(
+ "--url-base", action="store", default="/",
+ help="Base url to use as the mount point for tests in this manifest.")
+ parser.add_argument(
+ "--cache-root", action="store", default=os.path.join(wpt_root, ".wptcache"),
+ help="Path in which to store any caches (default <tests_root>/.wptcache/)")
+ parser.add_argument(
+ "--no-parallel", dest="parallel", action="store_false", default=True,
+ help="Do not parallelize building the manifest")
+ return parser
+
+
+def run(*args: Any, **kwargs: Any) -> None:
+ if kwargs["path"] is None:
+ kwargs["path"] = os.path.join(kwargs["tests_root"], "SPEC_MANIFEST.json")
+ if kwargs["verbose"]:
+ enable_debug_logging()
+ update_from_cli(**kwargs)
diff --git a/testing/web-platform/tests/tools/manifest/testpaths.py b/testing/web-platform/tests/tools/manifest/testpaths.py
new file mode 100644
index 0000000000..2fa5393826
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/testpaths.py
@@ -0,0 +1,98 @@
+import argparse
+import json
+import os
+from collections import defaultdict
+from typing import Any, Dict, Iterable, List, Text
+
+from .manifest import load_and_update, Manifest
+from .log import get_logger
+
+wpt_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+
+logger = get_logger()
+
+
+def abs_path(path: str) -> str:
+ return os.path.abspath(os.path.expanduser(path))
+
+
+def create_parser() -> argparse.ArgumentParser:
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "-p", "--path", type=abs_path, help="Path to manifest file.")
+ parser.add_argument(
+ "--src-root", type=abs_path, default=None, help="Path to root of sourcetree.")
+ parser.add_argument(
+ "--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.")
+ parser.add_argument(
+ "--no-update", dest="update", action="store_false", default=True,
+ help="Don't update manifest before continuing")
+ parser.add_argument(
+ "-r", "--rebuild", action="store_true", default=False,
+ help="Force a full rebuild of the manifest.")
+ parser.add_argument(
+ "--url-base", action="store", default="/",
+ help="Base url to use as the mount point for tests in this manifest.")
+ parser.add_argument(
+ "--cache-root", action="store", default=os.path.join(wpt_root, ".wptcache"),
+ help="Path in which to store any caches (default <tests_root>/.wptcache/)")
+ parser.add_argument(
+ "--json", action="store_true", default=False,
+ help="Output as JSON")
+ parser.add_argument(
+ "test_ids", action="store", nargs="+",
+ help="Test ids for which to get paths")
+ return parser
+
+
+def get_path_id_map(src_root: Text, tests_root: Text, manifest_file: Manifest, test_ids: Iterable[Text]) -> Dict[Text, List[Text]]:
+ test_ids = set(test_ids)
+ path_id_map: Dict[Text, List[Text]] = defaultdict(list)
+
+ compute_rel_path = src_root != tests_root
+
+ for item_type, path, tests in manifest_file:
+ for test in tests:
+ if test.id in test_ids:
+ if compute_rel_path:
+ rel_path = os.path.relpath(os.path.join(tests_root, path),
+ src_root)
+ else:
+ rel_path = path
+ path_id_map[rel_path].append(test.id)
+ return path_id_map
+
+
+def get_paths(**kwargs: Any) -> Dict[Text, List[Text]]:
+ tests_root = kwargs["tests_root"]
+ assert tests_root is not None
+ path = kwargs["path"]
+ if path is None:
+ path = os.path.join(kwargs["tests_root"], "MANIFEST.json")
+ src_root = kwargs["src_root"]
+ if src_root is None:
+ src_root = tests_root
+
+ manifest_file = load_and_update(tests_root,
+ path,
+ kwargs["url_base"],
+ update=kwargs["update"],
+ rebuild=kwargs["rebuild"],
+ cache_root=kwargs["cache_root"])
+
+ return get_path_id_map(src_root, tests_root, manifest_file, kwargs["test_ids"])
+
+
+def write_output(path_id_map: Dict[Text, List[Text]], as_json: bool) -> None:
+ if as_json:
+ print(json.dumps(path_id_map))
+ else:
+ for path, test_ids in sorted(path_id_map.items()):
+ print(path)
+ for test_id in sorted(test_ids):
+ print(" " + test_id)
+
+
+def run(**kwargs: Any) -> None:
+ path_id_map = get_paths(**kwargs)
+ write_output(path_id_map, as_json=kwargs["json"])
diff --git a/testing/web-platform/tests/tools/manifest/tests/__init__.py b/testing/web-platform/tests/tools/manifest/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/tests/__init__.py
diff --git a/testing/web-platform/tests/tools/manifest/tests/test_XMLParser.py b/testing/web-platform/tests/tools/manifest/tests/test_XMLParser.py
new file mode 100644
index 0000000000..d2d349d11e
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/tests/test_XMLParser.py
@@ -0,0 +1,56 @@
+# mypy: allow-untyped-defs
+
+from xml.etree.ElementTree import ParseError
+
+import pytest
+
+from ..XMLParser import XMLParser
+
+
+@pytest.mark.parametrize("s", [
+ '<foo>&nbsp;</foo>',
+ '<!DOCTYPE foo><foo>&nbsp;</foo>',
+ '<!DOCTYPE foo PUBLIC "fake" "id"><foo>&nbsp;</foo>',
+ '<!DOCTYPE foo PUBLIC "fake" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"><foo>&nbsp;</foo>',
+ '<!DOCTYPE foo PUBLIC "fake-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"><foo>&nbsp;</foo>'
+])
+def test_undefined_entity(s):
+ with pytest.raises(ParseError):
+ p = XMLParser()
+ p.feed(s)
+ p.close()
+
+
+@pytest.mark.parametrize("s", [
+ '<!DOCTYPE foo PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"><foo>&nbsp;</foo>'
+])
+def test_defined_entity(s):
+ p = XMLParser()
+ p.feed(s)
+ d = p.close()
+ assert d.tag == "foo"
+ assert d.text == "\u00A0"
+
+
+def test_pi():
+ p = XMLParser()
+ p.feed('<foo><?foo bar?></foo>')
+ d = p.close()
+ assert d.tag == "foo"
+ assert len(d) == 0
+
+
+def test_comment():
+ p = XMLParser()
+ p.feed('<foo><!-- data --></foo>')
+ d = p.close()
+ assert d.tag == "foo"
+ assert len(d) == 0
+
+
+def test_unsupported_encoding():
+ p = XMLParser()
+ p.feed("<?xml version='1.0' encoding='Shift-JIS'?><foo>\u3044</foo>".encode("shift-jis"))
+ d = p.close()
+ assert d.tag == "foo"
+ assert d.text == "\u3044"
diff --git a/testing/web-platform/tests/tools/manifest/tests/test_item.py b/testing/web-platform/tests/tools/manifest/tests/test_item.py
new file mode 100644
index 0000000000..7640e9262c
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/tests/test_item.py
@@ -0,0 +1,160 @@
+# mypy: allow-untyped-defs
+
+import inspect
+import json
+
+import pytest
+
+from ..manifest import Manifest
+# Prevent pytest from treating TestharnessTest as a test class
+from ..item import TestharnessTest as HarnessTest
+from ..item import RefTest, item_types
+
+
+@pytest.mark.parametrize("path", [
+ "a.https.c",
+ "a.b.https.c",
+ "a.https.b.c",
+ "a.b.https.c.d",
+ "a.serviceworker.c",
+ "a.b.serviceworker.c",
+ "a.serviceworker.b.c",
+ "a.b.serviceworker.c.d",
+])
+def test_url_https(path):
+ m = HarnessTest("/foo", "bar/" + path, "/", "bar/" + path)
+
+ assert m.https is True
+
+
+@pytest.mark.parametrize("path", [
+ "https",
+ "a.https",
+ "a.b.https",
+ "https.a",
+ "https.a.b",
+ "a.bhttps.c",
+ "a.httpsb.c",
+ "serviceworker",
+ "a.serviceworker",
+ "a.b.serviceworker",
+ "serviceworker.a",
+ "serviceworker.a.b",
+ "a.bserviceworker.c",
+ "a.serviceworkerb.c",
+])
+def test_url_not_https(path):
+ m = HarnessTest("/foo", "bar/" + path, "/", "bar/" + path)
+
+ assert m.https is False
+
+
+@pytest.mark.parametrize("path", [
+ "a.www.c",
+ "a.b.www.c",
+ "a.www.b.c",
+ "a.b.www.c.d",
+ "a.https.www.c",
+ "a.b.https.www.c",
+ "a.https.www.b.c",
+ "a.b.https.www.c.d",
+])
+def test_url_subdomain(path):
+ m = HarnessTest("/foo", "bar/" + path, "/", "bar/" + path)
+
+ assert m.subdomain is True
+
+
+@pytest.mark.parametrize("path", [
+ "www",
+ "a.www",
+ "a.b.www",
+ "www.a",
+ "www.a.b",
+ "a.bwwww.c",
+ "a.wwwwb.c",
+])
+def test_url_not_subdomain(path):
+ m = HarnessTest("/foo", "bar/" + path, "/", "bar/" + path)
+
+ assert m.subdomain is False
+
+
+@pytest.mark.parametrize("fuzzy", [
+ {('/foo/test.html', '/foo/ref.html', '=='): [[1, 1], [200, 200]]},
+ {('/foo/test.html', '/foo/ref.html', '=='): [[0, 1], [100, 200]]},
+ {None: [[0, 1], [100, 200]]},
+ {None: [[1, 1], [200, 200]]},
+])
+def test_reftest_fuzzy(fuzzy):
+ t = RefTest('/',
+ 'foo/test.html',
+ '/',
+ 'foo/test.html',
+ [('/foo/ref.html', '==')],
+ fuzzy=fuzzy)
+ assert fuzzy == t.fuzzy
+
+ json_obj = t.to_json()
+
+ m = Manifest("/", "/")
+ t2 = RefTest.from_json(m, t.path, json_obj)
+ assert fuzzy == t2.fuzzy
+
+ # test the roundtrip case, given tuples become lists
+ roundtrip = json.loads(json.dumps(json_obj))
+ t3 = RefTest.from_json(m, t.path, roundtrip)
+ assert fuzzy == t3.fuzzy
+
+
+@pytest.mark.parametrize("fuzzy", [
+ {('/foo/test.html', '/foo/ref-2.html', '=='): [[0, 1], [100, 200]]},
+ {None: [[1, 1], [200, 200]], ('/foo/test.html', '/foo/ref-2.html', '=='): [[0, 1], [100, 200]]},
+])
+def test_reftest_fuzzy_multi(fuzzy):
+ t = RefTest('/',
+ 'foo/test.html',
+ '/',
+ 'foo/test.html',
+ [('/foo/ref-1.html', '=='), ('/foo/ref-2.html', '==')],
+ fuzzy=fuzzy)
+ assert fuzzy == t.fuzzy
+
+ json_obj = t.to_json()
+
+ m = Manifest("/", "/")
+ t2 = RefTest.from_json(m, t.path, json_obj)
+ assert fuzzy == t2.fuzzy
+
+ # test the roundtrip case, given tuples become lists
+ roundtrip = json.loads(json.dumps(json_obj))
+ t3 = RefTest.from_json(m, t.path, roundtrip)
+ assert fuzzy == t3.fuzzy
+
+
+def test_item_types():
+ for key, value in item_types.items():
+ assert isinstance(key, str)
+ assert not inspect.isabstract(value)
+
+
+def test_wpt_flags():
+ m1 = HarnessTest("/foo", "bar", "/", "bar" + "?wpt_flags=www")
+ assert m1.subdomain is True
+ assert m1.https is False
+ assert m1.h2 is False
+
+ m2 = HarnessTest("/foo", "bar", "/", "bar" + "?wpt_flags=https")
+ assert m2.subdomain is False
+ assert m2.https is True
+ assert m2.h2 is False
+
+ m3 = HarnessTest("/foo", "bar", "/", "bar" + "?wpt_flags=h2")
+ assert m3.subdomain is False
+ assert m3.https is False
+ assert m3.h2 is True
+
+ m4 = HarnessTest("/foo", "bar", "/", "bar" + "?wpt_flags=https&wpt_flags=www")
+ assert m4.subdomain is True
+ assert m4.https is True
+ assert m4.h2 is False
diff --git a/testing/web-platform/tests/tools/manifest/tests/test_manifest.py b/testing/web-platform/tests/tools/manifest/tests/test_manifest.py
new file mode 100644
index 0000000000..fc2314b835
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/tests/test_manifest.py
@@ -0,0 +1,337 @@
+# mypy: ignore-errors
+
+import os
+import sys
+from unittest import mock
+
+import hypothesis as h
+import hypothesis.strategies as hs
+import pytest
+
+from .. import manifest, sourcefile, item, utils
+
+from typing import Any, Type
+
+
+def SourceFileWithTest(path: str, hash: str, cls: Type[item.ManifestItem], **kwargs: Any) -> sourcefile.SourceFile:
+ rel_path_parts = tuple(path.split(os.path.sep))
+ s = mock.Mock(rel_path=path,
+ rel_path_parts=rel_path_parts,
+ hash=hash)
+ if cls == item.SupportFile:
+ test = cls("/foobar", path)
+ else:
+ assert issubclass(cls, item.URLManifestItem)
+ test = cls("/foobar", path, "/", utils.from_os_path(path), **kwargs)
+ s.manifest_items = mock.Mock(return_value=(cls.item_type, [test]))
+ return s # type: ignore
+
+
+def SourceFileWithTests(path: str, hash: str, cls: Type[item.URLManifestItem], variants: Any) -> sourcefile.SourceFile:
+ rel_path_parts = tuple(path.split(os.path.sep))
+ s = mock.Mock(rel_path=path,
+ rel_path_parts=rel_path_parts,
+ hash=hash)
+ tests = [cls("/foobar", path, "/", item[0], **item[1]) for item in variants]
+ s.manifest_items = mock.Mock(return_value=(cls.item_type, tests))
+ return s # type: ignore
+
+
+def tree_and_sourcefile_mocks(source_files):
+ paths_dict = {}
+ tree = []
+ for source_file, file_hash, updated in source_files:
+ paths_dict[source_file.rel_path] = source_file
+ tree.append([source_file.rel_path, file_hash, updated])
+
+ def MockSourceFile(tests_root, path, url_base, file_hash):
+ return paths_dict[path]
+
+ return tree, MockSourceFile
+
+
+@hs.composite
+def sourcefile_strategy(draw):
+ item_classes = [item.TestharnessTest, item.RefTest, item.PrintRefTest,
+ item.ManualTest, item.WebDriverSpecTest,
+ item.ConformanceCheckerTest, item.SupportFile]
+ cls = draw(hs.sampled_from(item_classes))
+
+ path = "a"
+ rel_path_parts = tuple(path.split(os.path.sep))
+ hash = draw(hs.text(alphabet="0123456789abcdef", min_size=40, max_size=40))
+ s = mock.Mock(rel_path=path,
+ rel_path_parts=rel_path_parts,
+ hash=hash)
+
+ if cls in (item.RefTest, item.PrintRefTest):
+ ref_path = "b"
+ ref_eq = draw(hs.sampled_from(["==", "!="]))
+ test = cls("/foobar", path, "/", utils.from_os_path(path), references=[(utils.from_os_path(ref_path), ref_eq)])
+ elif cls is item.SupportFile:
+ test = cls("/foobar", path)
+ else:
+ test = cls("/foobar", path, "/", "foobar")
+
+ s.manifest_items = mock.Mock(return_value=(cls.item_type, [test]))
+ return s
+
+
+@hs.composite
+def manifest_tree(draw):
+ names = hs.text(alphabet=hs.characters(blacklist_characters="\0/\\:*\"?<>|"), min_size=1)
+ tree = hs.recursive(sourcefile_strategy(),
+ lambda children: hs.dictionaries(names, children, min_size=1),
+ max_leaves=10)
+
+ generated_root = draw(tree)
+ h.assume(isinstance(generated_root, dict))
+
+ reftest_urls = []
+ output = []
+ stack = [((k,), v) for k, v in generated_root.items()]
+ while stack:
+ path, node = stack.pop()
+ if isinstance(node, dict):
+ stack.extend((path + (k,), v) for k, v in node.items())
+ else:
+ rel_path = os.path.sep.join(path)
+ node.rel_path = rel_path
+ node.rel_path_parts = tuple(path)
+ for test_item in node.manifest_items.return_value[1]:
+ test_item.path = rel_path
+ if isinstance(test_item, item.RefTest):
+ if reftest_urls:
+ possible_urls = hs.sampled_from(reftest_urls) | names
+ else:
+ possible_urls = names
+ reference = hs.tuples(hs.sampled_from(["==", "!="]),
+ possible_urls)
+ references = hs.lists(reference, min_size=1, unique=True)
+ test_item.references = draw(references)
+ reftest_urls.append(test_item.url)
+ output.append(node)
+
+ return output
+
+
+@pytest.mark.skipif(sys.version_info[:3] in ((3, 10, 10), (3, 11, 2)),
+ reason="https://github.com/python/cpython/issues/102126")
+@h.given(manifest_tree())
+# FIXME: Workaround for https://github.com/web-platform-tests/wpt/issues/22758
+@h.settings(suppress_health_check=(h.HealthCheck.too_slow,))
+@h.example([SourceFileWithTest("a", "0"*40, item.ConformanceCheckerTest)])
+def test_manifest_to_json(s):
+ m = manifest.Manifest("")
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks((item, None, True) for item in s)
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ assert m.update(tree) is True
+
+ json_str = m.to_json()
+ loaded = manifest.Manifest.from_json("/", json_str)
+
+ assert list(loaded) == list(m)
+
+ assert loaded.to_json() == json_str
+
+
+@pytest.mark.skipif(sys.version_info[:3] in ((3, 10, 10), (3, 11, 2)),
+ reason="https://github.com/python/cpython/issues/102126")
+@h.given(manifest_tree())
+# FIXME: Workaround for https://github.com/web-platform-tests/wpt/issues/22758
+@h.settings(suppress_health_check=(h.HealthCheck.too_slow,))
+@h.example([SourceFileWithTest("a", "0"*40, item.TestharnessTest)])
+@h.example([SourceFileWithTest("a", "0"*40, item.RefTest, references=[("/aa", "==")])])
+def test_manifest_idempotent(s):
+ m = manifest.Manifest("")
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks((item, None, True) for item in s)
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ assert m.update(tree) is True
+
+ m1 = list(m)
+
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ assert m.update(tree) is False
+
+ assert list(m) == m1
+
+
+def test_manifest_to_json_forwardslash():
+ m = manifest.Manifest("")
+
+ s = SourceFileWithTest("a" + os.path.sep + "b", "0"*40, item.TestharnessTest)
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ assert m.update(tree) is True
+
+ assert m.to_json() == {
+ 'version': 8,
+ 'url_base': '/',
+ 'items': {
+ 'testharness': {'a': {'b': [
+ '0000000000000000000000000000000000000000',
+ (None, {})
+ ]}},
+ }
+ }
+
+
+def test_reftest_computation_chain():
+ m = manifest.Manifest("")
+
+ s1 = SourceFileWithTest("test1", "0"*40, item.RefTest, references=[("/test2", "==")])
+ s2 = SourceFileWithTest("test2", "0"*40, item.RefTest, references=[("/test3", "==")])
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True), (s2, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ test1 = s1.manifest_items()[1][0]
+ test2 = s2.manifest_items()[1][0]
+
+ assert list(m) == [("reftest", test1.path, {test1}),
+ ("reftest", test2.path, {test2})]
+
+
+def test_iterpath():
+ m = manifest.Manifest("")
+
+ sources = [SourceFileWithTest("test1", "0"*40, item.RefTest, references=[("/test1-ref", "==")]),
+ SourceFileWithTests("test2", "1"*40, item.TestharnessTest, [("test2-1.html", {}),
+ ("test2-2.html", {})]),
+ SourceFileWithTest("test3", "0"*40, item.TestharnessTest)]
+ tree, sourcefile_mock = tree_and_sourcefile_mocks((item, None, True) for item in sources)
+ assert len(tree) == len(sources)
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ assert {item.url for item in m.iterpath("test2")} == {"/test2-1.html",
+ "/test2-2.html"}
+ assert set(m.iterpath("missing")) == set()
+
+
+def test_no_update():
+ m = manifest.Manifest("")
+
+ s1 = SourceFileWithTest("test1", "0"*40, item.TestharnessTest)
+ s2 = SourceFileWithTest("test2", "0"*40, item.TestharnessTest)
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks((item, None, True) for item in [s1, s2])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ test1 = s1.manifest_items()[1][0]
+ test2 = s2.manifest_items()[1][0]
+
+ assert list(m) == [("testharness", test1.path, {test1}),
+ ("testharness", test2.path, {test2})]
+
+ s1_1 = SourceFileWithTest("test1", "1"*40, item.ManualTest)
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1_1, None, True), (s2, None, False)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ test1_1 = s1_1.manifest_items()[1][0]
+
+ assert list(m) == [("manual", test1_1.path, {test1_1}),
+ ("testharness", test2.path, {test2})]
+
+
+def test_no_update_delete():
+ m = manifest.Manifest("")
+
+ s1 = SourceFileWithTest("test1", "0"*40, item.TestharnessTest)
+ s2 = SourceFileWithTest("test2", "0"*40, item.TestharnessTest)
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True), (s2, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ test1 = s1.manifest_items()[1][0]
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, False)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ assert list(m) == [("testharness", test1.path, {test1})]
+
+
+def test_update_from_json():
+ m = manifest.Manifest("")
+
+ s1 = SourceFileWithTest("test1", "0"*40, item.TestharnessTest)
+ s2 = SourceFileWithTest("test2", "0"*40, item.TestharnessTest)
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True), (s2, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ json_str = m.to_json()
+ m = manifest.Manifest.from_json("/", json_str)
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ test1 = s1.manifest_items()[1][0]
+
+ assert list(m) == [("testharness", test1.path, {test1})]
+
+
+def test_update_from_json_modified():
+ # Create the original manifest
+ m = manifest.Manifest("")
+ s1 = SourceFileWithTest("test1", "0"*40, item.TestharnessTest)
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+ json_str = m.to_json()
+
+ # Reload it from JSON
+ m = manifest.Manifest.from_json("/", json_str)
+
+ # Update it with timeout="long"
+ s2 = SourceFileWithTest("test1", "1"*40, item.TestharnessTest, timeout="long", pac="proxy.pac")
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s2, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+ json_str = m.to_json()
+ assert json_str == {
+ 'items': {'testharness': {'test1': [
+ "1"*40,
+ (None, {'timeout': 'long', 'pac': 'proxy.pac'})
+ ]}},
+ 'url_base': '/',
+ 'version': 8
+ }
+
+def test_manifest_spec_to_json():
+ m = manifest.Manifest("")
+
+ path = "a" + os.path.sep + "b"
+ hash = "0"*40
+ rel_path_parts = tuple(path.split(os.path.sep))
+ s = mock.Mock(rel_path=path,
+ rel_path_parts=rel_path_parts,
+ hash=hash)
+ spec = item.SpecItem("/foobar", path, ["specA"])
+ s.manifest_spec_items = mock.Mock(return_value=(item.SpecItem.item_type, [spec]))
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ assert m.update(tree, True, manifest.compute_manifest_spec_items) is True
+
+ assert m.to_json() == {
+ 'version': 8,
+ 'url_base': '/',
+ 'items': {
+ 'spec': {'a': {'b': [
+ '0000000000000000000000000000000000000000',
+ (None, {'spec_link1': 'specA'})
+ ]}},
+ }
+ }
diff --git a/testing/web-platform/tests/tools/manifest/tests/test_sourcefile.py b/testing/web-platform/tests/tools/manifest/tests/test_sourcefile.py
new file mode 100644
index 0000000000..8a9d8c36ee
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/tests/test_sourcefile.py
@@ -0,0 +1,962 @@
+# mypy: allow-untyped-defs
+
+import os
+
+import pytest
+
+from io import BytesIO
+from ...lint.lint import check_global_metadata
+from ..sourcefile import SourceFile, read_script_metadata, js_meta_re, python_meta_re
+
+
+def create(filename, contents=b""):
+ assert isinstance(contents, bytes)
+ return SourceFile("/", filename, "/", contents=contents)
+
+
+def items(s):
+ item_type, items = s.manifest_items()
+ if item_type == "support":
+ return []
+ else:
+ return [(item_type, item.url) for item in items]
+
+
+@pytest.mark.parametrize("rel_path", [
+ ".gitignore",
+ ".travis.yml",
+ "MANIFEST.json",
+ "tools/test.html",
+ "resources/test.html",
+ "common/test.html",
+ "support/test.html",
+ "css21/archive/test.html",
+ "conformance-checkers/test.html",
+ "conformance-checkers/README.md",
+ "conformance-checkers/html/Makefile",
+ "conformance-checkers/html/test.html",
+ "foo/tools/test.html",
+ "foo/resources/test.html",
+ "foo/support/test.html",
+ "foo/foo-manual.html.headers",
+ "crashtests/foo.html.ini",
+ "css/common/test.html",
+ "css/CSS2/archive/test.html",
+ "css/WEB_FEATURES.yml",
+ "css/META.yml",
+])
+def test_name_is_non_test(rel_path):
+ s = create(rel_path)
+ assert s.name_is_non_test or s.name_is_conformance_support
+
+ assert not s.content_is_testharness
+
+ assert items(s) == []
+
+
+@pytest.mark.parametrize("rel_path", [
+ "foo/common/test.html",
+ "foo/conformance-checkers/test.html",
+ "foo/_certs/test.html",
+ "foo/css21/archive/test.html",
+ "foo/CSS2/archive/test.html",
+ "css/css21/archive/test.html",
+ "foo/test-support.html",
+])
+def test_not_name_is_non_test(rel_path):
+ s = create(rel_path)
+ assert not (s.name_is_non_test or s.name_is_conformance_support)
+ # We aren't actually asserting what type of test these are, just their
+ # name doesn't prohibit them from being tests.
+
+
+@pytest.mark.parametrize("rel_path", [
+ "foo/foo-manual.html",
+ "html/test-manual.html",
+ "html/test-manual.xhtml",
+ "html/test-manual.https.html",
+ "html/test-manual.https.xhtml"
+])
+def test_name_is_manual(rel_path):
+ s = create(rel_path)
+ assert not s.name_is_non_test
+ assert s.name_is_manual
+
+ assert not s.content_is_testharness
+
+ assert items(s) == [("manual", "/" + rel_path)]
+
+
+@pytest.mark.parametrize("rel_path", [
+ "html/test-visual.html",
+ "html/test-visual.xhtml",
+])
+def test_name_is_visual(rel_path):
+ s = create(rel_path)
+ assert not s.name_is_non_test
+ assert s.name_is_visual
+
+ assert not s.content_is_testharness
+
+ assert items(s) == [("visual", "/" + rel_path)]
+
+
+@pytest.mark.parametrize("rel_path", [
+ "css-namespaces-3/reftest/ref-lime-1.xml",
+ "css21/reference/pass_if_box_ahem.html",
+ "css21/csswg-issues/submitted/css2.1/reference/ref-green-box-100x100.xht",
+ "selectors-3/selectors-empty-001-ref.xml",
+ "css21/text/text-indent-wrap-001-notref-block-margin.xht",
+ "css21/text/text-indent-wrap-001-notref-block-margin.xht",
+ "css21/css-e-notation-ref-1.html",
+ "css21/floats/floats-placement-vertical-004-ref2.xht",
+ "css21/box/rtl-linebreak-notref1.xht",
+ "css21/box/rtl-linebreak-notref2.xht",
+ "html/canvas/element/drawing-images-to-the-canvas/drawimage_html_image_5_ref.html",
+ "html/canvas/element/line-styles/lineto_ref.html",
+ "html/rendering/non-replaced-elements/the-fieldset-element-0/ref.html"
+])
+def test_name_is_reference(rel_path):
+ s = create(rel_path)
+ assert not s.name_is_non_test
+ assert s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ assert items(s) == []
+
+
+def test_name_is_tentative():
+ s = create("css/css-ui/appearance-revert-001.tentative.html")
+ assert s.name_is_tentative
+
+ s = create("css/css-ui/tentative/appearance-revert-001.html")
+ assert s.name_is_tentative
+
+ s = create("css/css-ui/appearance-revert-001.html")
+ assert not s.name_is_tentative
+
+
+@pytest.mark.parametrize("rel_path", [
+ "webdriver/tests/foo.py",
+ "webdriver/tests/print/foo.py",
+ "webdriver/tests/foo-crash.py",
+ "webdriver/tests/foo-visual.py",
+])
+def test_name_is_webdriver(rel_path):
+ s = create(rel_path)
+ assert s.name_is_webdriver
+
+ item_type, items = s.manifest_items()
+ assert item_type == "wdspec"
+
+
+def test_worker():
+ s = create("html/test.worker.js")
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert s.name_is_worker
+ assert not s.name_is_window
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ expected_urls = [
+ "/html/test.worker.html",
+ ]
+ assert len(items) == len(expected_urls)
+
+ for item, url in zip(items, expected_urls):
+ assert item.url == url
+ assert item.timeout is None
+
+
+def test_window():
+ s = create("html/test.window.js")
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert s.name_is_window
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ expected_urls = [
+ "/html/test.window.html",
+ ]
+ assert len(items) == len(expected_urls)
+
+ for item, url in zip(items, expected_urls):
+ assert item.url == url
+ assert item.timeout is None
+
+
+def test_worker_long_timeout():
+ contents = b"""// META: timeout=long
+importScripts('/resources/testharness.js')
+test()"""
+
+ metadata = list(read_script_metadata(BytesIO(contents), js_meta_re))
+ assert metadata == [("timeout", "long")]
+
+ s = create("html/test.worker.js", contents=contents)
+ assert s.name_is_worker
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ for item in items:
+ assert item.timeout == "long"
+
+
+def test_window_long_timeout():
+ contents = b"""// META: timeout=long
+test()"""
+
+ metadata = list(read_script_metadata(BytesIO(contents), js_meta_re))
+ assert metadata == [("timeout", "long")]
+
+ s = create("html/test.window.js", contents=contents)
+ assert s.name_is_window
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ for item in items:
+ assert item.timeout == "long"
+
+
+def test_worker_with_variants():
+ contents = b"""// META: variant=?default
+// META: variant=?wss
+test()"""
+
+ s = create("html/test.worker.js", contents=contents)
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert s.name_is_worker
+ assert not s.name_is_window
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ expected_urls = [
+ "/html/test.worker.html" + suffix
+ for suffix in ["?default", "?wss"]
+ ]
+ assert len(items) == len(expected_urls)
+
+ for item, url in zip(items, expected_urls):
+ assert item.url == url
+ assert item.timeout is None
+
+
+def test_window_with_variants():
+ contents = b"""// META: variant=?default
+// META: variant=?wss
+test()"""
+
+ s = create("html/test.window.js", contents=contents)
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert s.name_is_window
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ expected_urls = [
+ "/html/test.window.html" + suffix
+ for suffix in ["?default", "?wss"]
+ ]
+ assert len(items) == len(expected_urls)
+
+ for item, url in zip(items, expected_urls):
+ assert item.url == url
+ assert item.timeout is None
+
+
+def test_python_long_timeout():
+ contents = b"""# META: timeout=long
+
+"""
+
+ metadata = list(read_script_metadata(BytesIO(contents),
+ python_meta_re))
+ assert metadata == [("timeout", "long")]
+
+ s = create("webdriver/test.py", contents=contents)
+ assert s.name_is_webdriver
+
+ item_type, items = s.manifest_items()
+ assert item_type == "wdspec"
+
+ for item in items:
+ assert item.timeout == "long"
+
+
+def test_multi_global():
+ s = create("html/test.any.js")
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ expected_urls = [
+ "/html/test.any.html",
+ "/html/test.any.worker.html",
+ ]
+ assert len(items) == len(expected_urls)
+
+ for item, url in zip(items, expected_urls):
+ assert item.url == url
+ assert item.timeout is None
+
+
+def test_multi_global_long_timeout():
+ contents = b"""// META: timeout=long
+importScripts('/resources/testharness.js')
+test()"""
+
+ metadata = list(read_script_metadata(BytesIO(contents), js_meta_re))
+ assert metadata == [("timeout", "long")]
+
+ s = create("html/test.any.js", contents=contents)
+ assert s.name_is_multi_global
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ for item in items:
+ assert item.timeout == "long"
+
+
+@pytest.mark.parametrize("input,expected", [
+ (b"window", {"window"}),
+ (b"sharedworker", {"sharedworker"}),
+ (b"sharedworker,serviceworker", {"serviceworker", "sharedworker"}),
+ (b"worker", {"dedicatedworker", "serviceworker", "sharedworker"}),
+])
+def test_multi_global_with_custom_globals(input, expected):
+ contents = b"""// META: global=%s
+test()""" % input
+
+ assert list(check_global_metadata(input)) == []
+
+ s = create("html/test.any.js", contents=contents)
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ urls = {
+ "dedicatedworker": "/html/test.any.worker.html",
+ "serviceworker": "/html/test.any.serviceworker.html",
+ "sharedworker": "/html/test.any.sharedworker.html",
+ "window": "/html/test.any.html",
+ }
+
+ expected_urls = sorted(urls[ty] for ty in expected)
+ assert len(items) == len(expected_urls)
+
+ for item, url in zip(items, expected_urls):
+ assert item.url == url
+ assert item.jsshell is False
+ assert item.timeout is None
+
+
+def test_multi_global_with_jsshell_globals():
+ contents = b"""// META: global=window,dedicatedworker,jsshell
+test()"""
+
+ s = create("html/test.any.js", contents=contents)
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ expected = [
+ ("/html/test.any.html", False),
+ ("/html/test.any.js", True),
+ ("/html/test.any.worker.html", False),
+ ]
+ assert len(items) == len(expected)
+
+ for item, (url, jsshell) in zip(items, expected):
+ assert item.url == url
+ assert item.jsshell == jsshell
+ assert item.timeout is None
+
+
+def test_multi_global_with_variants():
+ contents = b"""// META: global=window,worker
+// META: variant=?default
+// META: variant=?wss
+test()"""
+
+ s = create("html/test.any.js", contents=contents)
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ urls = {
+ "dedicatedworker": "/html/test.any.worker.html",
+ "serviceworker": "/html/test.any.serviceworker.html",
+ "sharedworker": "/html/test.any.sharedworker.html",
+ "window": "/html/test.any.html",
+ }
+
+ expected_urls = sorted(
+ urls[ty] + suffix
+ for ty in ["dedicatedworker", "serviceworker", "sharedworker", "window"]
+ for suffix in ["?default", "?wss"]
+ )
+ assert len(items) == len(expected_urls)
+
+ for item, url in zip(items, expected_urls):
+ assert item.url == url
+ assert item.timeout is None
+
+
+@pytest.mark.parametrize("input,expected", [
+ (b"""//META: foo=bar\n""", [("foo", "bar")]),
+ (b"""// META: foo=bar\n""", [("foo", "bar")]),
+ (b"""// META: foo=bar\n""", [("foo", "bar")]),
+ (b"""\n// META: foo=bar\n""", []),
+ (b""" // META: foo=bar\n""", []),
+ (b"""// META: foo=bar\n// META: baz=quux\n""", [("foo", "bar"), ("baz", "quux")]),
+ (b"""// META: foo=bar\n\n// META: baz=quux\n""", [("foo", "bar")]),
+ (b"""// META: foo=bar\n// Start of the test\n// META: baz=quux\n""", [("foo", "bar")]),
+ (b"""// META:\n""", []),
+ (b"""// META: foobar\n""", []),
+])
+def test_script_metadata(input, expected):
+ metadata = read_script_metadata(BytesIO(input), js_meta_re)
+ assert list(metadata) == expected
+
+
+@pytest.mark.parametrize("ext", ["htm", "html"])
+def test_testharness(ext):
+ content = b"<script src=/resources/testharness.js></script>"
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert s.content_is_testharness
+
+ assert items(s) == [("testharness", "/" + filename)]
+
+
+@pytest.mark.parametrize("variant", ["", "?foo", "#bar", "?foo#bar"])
+def test_testharness_variant(variant):
+ content = (b"<meta name=variant content=\"%s\">" % variant.encode("utf-8") +
+ b"<meta name=variant content=\"?fixed\">" +
+ b"<script src=/resources/testharness.js></script>")
+
+ filename = "html/test.html"
+ s = create(filename, content)
+
+ s.test_variants = [variant, "?fixed"]
+
+
+@pytest.mark.parametrize("variant", ["?", "#", "?#bar"])
+def test_testharness_variant_invalid(variant):
+ content = (b"<meta name=variant content=\"%s\">" % variant.encode("utf-8") +
+ b"<meta name=variant content=\"?fixed\">" +
+ b"<script src=/resources/testharness.js></script>")
+
+ filename = "html/test.html"
+ s = create(filename, content)
+
+ with pytest.raises(ValueError):
+ s.test_variants
+
+
+def test_reftest_variant():
+ content = (b"<meta name=variant content=\"?first\">" +
+ b"<meta name=variant content=\"?second\">" +
+ b"<link rel=\"match\" href=\"ref.html\">")
+
+ s = create("html/test.html", contents=content)
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ item_type, items = s.manifest_items()
+ assert item_type == "reftest"
+
+ actual_tests = [
+ {"url": item.url, "refs": item.references}
+ for item in items
+ ]
+
+ expected_tests = [
+ {
+ "url": "/html/test.html?first",
+ "refs": [("/html/ref.html?first", "==")],
+ },
+ {
+ "url": "/html/test.html?second",
+ "refs": [("/html/ref.html?second", "==")],
+ },
+ ]
+
+ assert actual_tests == expected_tests
+
+
+@pytest.mark.parametrize("ext", ["htm", "html"])
+def test_relative_testharness(ext):
+ content = b"<script src=../resources/testharness.js></script>"
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ assert items(s) == []
+
+
+@pytest.mark.parametrize("ext", ["xhtml", "xht", "xml"])
+def test_testharness_xhtml(ext):
+ content = b"""
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+</head>
+<body/>
+</html>
+"""
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert s.content_is_testharness
+
+ assert items(s) == [("testharness", "/" + filename)]
+
+
+@pytest.mark.parametrize("ext", ["xhtml", "xht", "xml"])
+def test_relative_testharness_xhtml(ext):
+ content = b"""
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<script src="../resources/testharness.js"></script>
+<script src="../resources/testharnessreport.js"></script>
+</head>
+<body/>
+</html>
+"""
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ assert items(s) == []
+
+
+def test_testharness_svg():
+ content = b"""\
+<?xml version="1.0" encoding="UTF-8"?>
+<svg xmlns="http://www.w3.org/2000/svg"
+ xmlns:h="http://www.w3.org/1999/xhtml"
+ version="1.1"
+ width="100%" height="100%" viewBox="0 0 400 400">
+<title>Null test</title>
+<h:script src="/resources/testharness.js"/>
+<h:script src="/resources/testharnessreport.js"/>
+</svg>
+"""
+
+ filename = "html/test.svg"
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert s.root is not None
+ assert s.content_is_testharness
+
+ assert items(s) == [("testharness", "/" + filename)]
+
+
+def test_relative_testharness_svg():
+ content = b"""\
+<?xml version="1.0" encoding="UTF-8"?>
+<svg xmlns="http://www.w3.org/2000/svg"
+ xmlns:h="http://www.w3.org/1999/xhtml"
+ version="1.1"
+ width="100%" height="100%" viewBox="0 0 400 400">
+<title>Null test</title>
+<h:script src="../resources/testharness.js"/>
+<h:script src="../resources/testharnessreport.js"/>
+</svg>
+"""
+
+ filename = "html/test.svg"
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert s.root is not None
+ assert not s.content_is_testharness
+
+ assert items(s) == []
+
+
+@pytest.mark.parametrize("filename", ["test", "test.test"])
+def test_testharness_ext(filename):
+ content = b"<script src=/resources/testharness.js></script>"
+
+ s = create("html/" + filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert not s.root
+ assert not s.content_is_testharness
+
+ assert items(s) == []
+
+
+@pytest.mark.parametrize("ext", ["htm", "html"])
+def test_testdriver(ext):
+ content = b"<script src=/resources/testdriver.js></script>"
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert s.has_testdriver
+
+
+@pytest.mark.parametrize("ext", ["htm", "html"])
+def test_relative_testdriver(ext):
+ content = b"<script src=../resources/testdriver.js></script>"
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert not s.has_testdriver
+
+
+@pytest.mark.parametrize("ext", ["htm", "html"])
+def test_reftest(ext):
+ content = b"<link rel=match href=ref.html>"
+
+ filename = "foo/test." + ext
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+ assert not s.content_is_testharness
+
+ assert s.content_is_ref_node
+
+ assert items(s) == [("reftest", "/" + filename)]
+
+
+@pytest.mark.parametrize("ext", ["xht", "html", "xhtml", "htm", "xml", "svg"])
+def test_css_visual(ext):
+ content = b"""
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<link rel="help" href="http://www.w3.org/TR/CSS21/box.html#bidi-box-model"/>
+</head>
+<body></body>
+</html>
+"""
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+ assert not s.content_is_testharness
+ assert not s.content_is_ref_node
+
+ assert s.content_is_css_visual
+
+ assert items(s) == [("visual", "/" + filename)]
+
+
+@pytest.mark.parametrize("ext", ["xht", "xhtml", "xml"])
+def test_xhtml_with_entity(ext):
+ content = b"""
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+&nbsp;
+</html>
+"""
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert s.root is not None
+
+ assert items(s) == []
+
+
+def test_no_parse():
+ s = create("foo/bar.xml", "\uFFFF".encode("utf-8"))
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+ assert not s.content_is_testharness
+ assert not s.content_is_ref_node
+ assert not s.content_is_css_visual
+
+ assert items(s) == []
+
+
+@pytest.mark.parametrize("input,expected", [
+ ("aA", "aA"),
+ ("a/b", "a/b" if os.name != "nt" else "a\\b"),
+ ("a\\b", "a\\b")
+])
+def test_relpath_normalized(input, expected):
+ s = create(input, b"")
+ assert s.rel_path == expected
+
+
+@pytest.mark.parametrize("url", [b"ref.html",
+ b"\x20ref.html",
+ b"ref.html\x20",
+ b"\x09\x0a\x0c\x0d\x20ref.html\x09\x0a\x0c\x0d\x20"])
+def test_reftest_url_whitespace(url):
+ content = b"<link rel=match href='%s'>" % url
+ s = create("foo/test.html", content)
+ assert s.references == [("/foo/ref.html", "==")]
+
+
+@pytest.mark.parametrize("url", [b"http://example.com/",
+ b"\x20http://example.com/",
+ b"http://example.com/\x20",
+ b"\x09\x0a\x0c\x0d\x20http://example.com/\x09\x0a\x0c\x0d\x20"])
+def test_spec_links_whitespace(url):
+ content = b"<link rel=help href='%s'>" % url
+ s = create("foo/test.html", content)
+ assert s.spec_links == {"http://example.com/"}
+
+
+@pytest.mark.parametrize("input,expected", [
+ (b"""<link rel="help" title="Intel" href="foo">\n""", ["foo"]),
+ (b"""<link rel=help title="Intel" href="foo">\n""", ["foo"]),
+ (b"""<link rel=help href="foo" >\n""", ["foo"]),
+ (b"""<link rel="author" href="foo">\n""", []),
+ (b"""<link href="foo">\n""", []),
+ (b"""<link rel="help" href="foo">\n<link rel="help" href="bar">\n""", ["foo", "bar"]),
+ (b"""<link rel="help" href="foo">\n<script>\n""", ["foo"]),
+ (b"""random\n""", []),
+])
+def test_spec_links_complex(input, expected):
+ s = create("foo/test.html", input)
+ assert s.spec_links == set(expected)
+
+
+def test_url_base():
+ contents = b"""// META: global=window,worker
+// META: variant=?default
+// META: variant=?wss
+test()"""
+
+ s = SourceFile("/", "html/test.any.js", "/_fake_base/", contents=contents)
+ item_type, items = s.manifest_items()
+
+ assert item_type == "testharness"
+
+ assert [item.url for item in items] == ['/_fake_base/html/test.any.html?default',
+ '/_fake_base/html/test.any.html?wss',
+ '/_fake_base/html/test.any.serviceworker.html?default',
+ '/_fake_base/html/test.any.serviceworker.html?wss',
+ '/_fake_base/html/test.any.sharedworker.html?default',
+ '/_fake_base/html/test.any.sharedworker.html?wss',
+ '/_fake_base/html/test.any.worker.html?default',
+ '/_fake_base/html/test.any.worker.html?wss']
+
+ assert items[0].url_base == "/_fake_base/"
+
+
+@pytest.mark.parametrize("fuzzy, expected", [
+ (b"ref.html:1;200", {("/foo/test.html", "/foo/ref.html", "=="): [[1, 1], [200, 200]]}),
+ (b"ref.html:0-1;100-200", {("/foo/test.html", "/foo/ref.html", "=="): [[0, 1], [100, 200]]}),
+ (b"0-1;100-200", {None: [[0,1], [100, 200]]}),
+ (b"maxDifference=1;totalPixels=200", {None: [[1, 1], [200, 200]]}),
+ (b"totalPixels=200;maxDifference=1", {None: [[1, 1], [200, 200]]}),
+ (b"totalPixels=200;1", {None: [[1, 1], [200, 200]]}),
+ (b"maxDifference=1;200", {None: [[1, 1], [200, 200]]}),])
+def test_reftest_fuzzy(fuzzy, expected):
+ content = b"""<link rel=match href=ref.html>
+<meta name=fuzzy content="%s">
+""" % fuzzy
+
+ s = create("foo/test.html", content)
+
+ assert s.content_is_ref_node
+ assert s.fuzzy == expected
+
+@pytest.mark.parametrize("fuzzy, expected", [
+ ([b"1;200"], {None: [[1, 1], [200, 200]]}),
+ ([b"ref-2.html:0-1;100-200"], {("/foo/test.html", "/foo/ref-2.html", "=="): [[0, 1], [100, 200]]}),
+ ([b"1;200", b"ref-2.html:0-1;100-200"],
+ {None: [[1, 1], [200, 200]],
+ ("/foo/test.html", "/foo/ref-2.html", "=="): [[0,1], [100, 200]]})])
+def test_reftest_fuzzy_multi(fuzzy, expected):
+ content = b"""<link rel=match href=ref-1.html>
+<link rel=match href=ref-2.html>
+"""
+ for item in fuzzy:
+ content += b'\n<meta name=fuzzy content="%s">' % item
+
+ s = create("foo/test.html", content)
+
+ assert s.content_is_ref_node
+ assert s.fuzzy == expected
+
+@pytest.mark.parametrize("pac, expected", [
+ (b"proxy.pac", "proxy.pac")])
+def test_pac(pac, expected):
+ content = b"""
+<meta name=pac content="%s">
+""" % pac
+
+ s = create("foo/test.html", content)
+ assert s.pac == expected
+
+@pytest.mark.parametrize("page_ranges, expected", [
+ (b"1-2", [[1, 2]]),
+ (b"1-1,3-4", [[1, 1], [3, 4]]),
+ (b"1,3", [[1], [3]]),
+ (b"2-", [[2, None]]),
+ (b"-2", [[None, 2]]),
+ (b"-2,2-", [[None, 2], [2, None]]),
+ (b"1,6-7,8", [[1], [6, 7], [8]])])
+def test_page_ranges(page_ranges, expected):
+ content = b"""<link rel=match href=ref.html>
+<meta name=reftest-pages content="%s">
+""" % page_ranges
+
+ s = create("foo/test-print.html", content)
+
+ assert s.page_ranges == {"/foo/test-print.html": expected}
+
+
+@pytest.mark.parametrize("page_ranges", [b"a", b"1-a", b"1=2", b"1-2:2-3"])
+def test_page_ranges_invalid(page_ranges):
+ content = b"""<link rel=match href=ref.html>
+<meta name=reftest-pages content="%s">
+""" % page_ranges
+
+ s = create("foo/test-print.html", content)
+ with pytest.raises(ValueError):
+ s.page_ranges
+
+
+def test_hash():
+ s = SourceFile("/", "foo", "/", contents=b"Hello, World!")
+ assert "b45ef6fec89518d314f546fd6c3025367b721684" == s.hash
diff --git a/testing/web-platform/tests/tools/manifest/tests/test_utils.py b/testing/web-platform/tests/tools/manifest/tests/test_utils.py
new file mode 100644
index 0000000000..e8cf1ad689
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/tests/test_utils.py
@@ -0,0 +1,15 @@
+# mypy: allow-untyped-defs
+
+import os
+import subprocess
+from unittest import mock
+
+from .. import utils
+
+
+def test_git_for_path_no_git():
+ this_dir = os.path.dirname(__file__)
+ with mock.patch(
+ "subprocess.check_output",
+ side_effect=subprocess.CalledProcessError(1, "foo")):
+ assert utils.git(this_dir) is None
diff --git a/testing/web-platform/tests/tools/manifest/typedata.py b/testing/web-platform/tests/tools/manifest/typedata.py
new file mode 100644
index 0000000000..746a42c98c
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/typedata.py
@@ -0,0 +1,300 @@
+from typing import (Any, Dict, Iterator, List, Optional, MutableMapping, Set, Text, Tuple,
+ Type, TYPE_CHECKING, Union)
+
+from .item import ManifestItem
+
+if TYPE_CHECKING:
+ # avoid actually importing these, they're only used by type comments
+ from .manifest import Manifest
+
+TypeDataType = MutableMapping[Tuple[str, ...], Set[ManifestItem]]
+PathHashType = MutableMapping[Tuple[str, ...], str]
+
+
+class TypeData(TypeDataType):
+ def __init__(self, m: "Manifest", type_cls: Type[ManifestItem]) -> None:
+ """Dict-like object containing the TestItems for each test type.
+
+ Loading an actual Item class for each test is unnecessarily
+ slow, so this class allows lazy-loading of the test
+ items. When the manifest is loaded we store the raw json
+ corresponding to the test type, and only create an Item
+ subclass when the test is accessed. In order to remain
+ API-compatible with consumers that depend on getting an Item
+ from iteration, we do egerly load all items when iterating
+ over the class."""
+ self._manifest = m
+ self._type_cls: Type[ManifestItem] = type_cls
+ self._json_data: Dict[Text, Any] = {}
+ self._data: Dict[Text, Any] = {}
+ self._hashes: Dict[Tuple[Text, ...], Text] = {}
+ self.hashes = PathHash(self)
+
+ def _delete_node(self, data: Dict[Text, Any], key: Tuple[Text, ...]) -> None:
+ """delete a path from a Dict data with a given key"""
+ path = []
+ node = data
+ for pathseg in key[:-1]:
+ path.append((node, pathseg))
+ node = node[pathseg]
+ if not isinstance(node, dict):
+ raise KeyError(key)
+
+ del node[key[-1]]
+ while path:
+ node, pathseg = path.pop()
+ if len(node[pathseg]) == 0:
+ del node[pathseg]
+ else:
+ break
+
+ def __getitem__(self, key: Tuple[Text, ...]) -> Set[ManifestItem]:
+ node: Union[Dict[Text, Any], Set[ManifestItem], List[Any]] = self._data
+ for pathseg in key:
+ if isinstance(node, dict) and pathseg in node:
+ node = node[pathseg]
+ else:
+ break
+ else:
+ if isinstance(node, set):
+ return node
+ else:
+ raise KeyError(key)
+
+ node = self._json_data
+ found = False
+ for pathseg in key:
+ if isinstance(node, dict) and pathseg in node:
+ node = node[pathseg]
+ else:
+ break
+ else:
+ found = True
+
+ if not found:
+ raise KeyError(key)
+
+ if not isinstance(node, list):
+ raise KeyError(key)
+
+ self._hashes[key] = node[0]
+
+ data = set()
+ path = "/".join(key)
+ for test in node[1:]:
+ manifest_item = self._type_cls.from_json(self._manifest, path, test)
+ data.add(manifest_item)
+
+ node = self._data
+ assert isinstance(node, dict)
+ for pathseg in key[:-1]:
+ node = node.setdefault(pathseg, {})
+ assert isinstance(node, dict)
+ assert key[-1] not in node
+ node[key[-1]] = data
+
+ self._delete_node(self._json_data, key)
+
+ return data
+
+ def __setitem__(self, key: Tuple[Text, ...], value: Set[ManifestItem]) -> None:
+ try:
+ self._delete_node(self._json_data, key)
+ except KeyError:
+ pass
+
+ node = self._data
+ for i, pathseg in enumerate(key[:-1]):
+ node = node.setdefault(pathseg, {})
+ if not isinstance(node, dict):
+ raise KeyError(f"{key!r} is a child of a test ({key[:i+1]!r})")
+ node[key[-1]] = value
+
+ def __delitem__(self, key: Tuple[Text, ...]) -> None:
+ try:
+ self._delete_node(self._data, key)
+ except KeyError:
+ self._delete_node(self._json_data, key)
+ else:
+ try:
+ del self._hashes[key]
+ except KeyError:
+ pass
+
+ def __iter__(self) -> Iterator[Tuple[Text, ...]]:
+ """Iterator over keys in the TypeData in codepoint order"""
+ data_node: Optional[Union[Dict[Text, Any], Set[ManifestItem]]] = self._data
+ json_node: Optional[Union[Dict[Text, Any], List[Any]]] = self._json_data
+ path: Tuple[Text, ...] = tuple()
+ stack = [(data_node, json_node, path)]
+ while stack:
+ data_node, json_node, path = stack.pop()
+ if isinstance(data_node, set) or isinstance(json_node, list):
+ assert data_node is None or json_node is None
+ yield path
+ else:
+ assert data_node is None or isinstance(data_node, dict)
+ assert json_node is None or isinstance(json_node, dict)
+
+ keys: Set[Text] = set()
+ if data_node is not None:
+ keys |= set(iter(data_node))
+ if json_node is not None:
+ keys |= set(iter(json_node))
+
+ for key in sorted(keys, reverse=True):
+ stack.append((data_node.get(key) if data_node is not None else None,
+ json_node.get(key) if json_node is not None else None,
+ path + (key,)))
+
+ def __len__(self) -> int:
+ count = 0
+
+ stack: List[Union[Dict[Text, Any], Set[ManifestItem]]] = [self._data]
+ while stack:
+ v = stack.pop()
+ if isinstance(v, set):
+ count += 1
+ else:
+ stack.extend(v.values())
+
+ json_stack: List[Union[Dict[Text, Any], List[Any]]] = [self._json_data]
+ while json_stack:
+ json_v = json_stack.pop()
+ if isinstance(json_v, list):
+ count += 1
+ else:
+ json_stack.extend(json_v.values())
+
+ return count
+
+ def __nonzero__(self) -> bool:
+ return bool(self._data) or bool(self._json_data)
+
+ __bool__ = __nonzero__
+
+ def __contains__(self, key: Any) -> bool:
+ # we provide our own impl of this to avoid calling __getitem__ and generating items for
+ # those in self._json_data
+ node = self._data
+ for pathseg in key:
+ if pathseg in node:
+ node = node[pathseg]
+ else:
+ break
+ else:
+ return bool(isinstance(node, set))
+
+ node = self._json_data
+ for pathseg in key:
+ if pathseg in node:
+ node = node[pathseg]
+ else:
+ break
+ else:
+ return bool(isinstance(node, list))
+
+ return False
+
+ def clear(self) -> None:
+ # much, much simpler/quicker than that defined in MutableMapping
+ self._json_data.clear()
+ self._data.clear()
+ self._hashes.clear()
+
+ def set_json(self, json_data: Dict[Text, Any]) -> None:
+ """Provide the object with a raw JSON blob
+
+ Note that this object graph is assumed to be owned by the TypeData
+ object after the call, so the caller must not mutate any part of the
+ graph.
+ """
+ if self._json_data:
+ raise ValueError("set_json call when JSON data is not empty")
+
+ self._json_data = json_data
+
+ def to_json(self) -> Dict[Text, Any]:
+ """Convert the current data to JSON
+
+ Note that the returned object may contain references to the internal
+ data structures, and is only guaranteed to be valid until the next
+ __getitem__, __setitem__, __delitem__ call, so the caller must not
+ mutate any part of the returned object graph.
+
+ """
+ json_rv = self._json_data.copy()
+
+ def safe_sorter(element: Tuple[str,str]) -> Tuple[str,str]:
+ """key function to sort lists with None values."""
+ if element and not element[0]:
+ return ("", element[1])
+ else:
+ return element
+
+ stack: List[Tuple[Dict[Text, Any], Dict[Text, Any], Tuple[Text, ...]]] = [(self._data, json_rv, tuple())]
+ while stack:
+ data_node, json_node, par_full_key = stack.pop()
+ for k, v in data_node.items():
+ full_key = par_full_key + (k,)
+ if isinstance(v, set):
+ assert k not in json_node
+ json_node[k] = [self._hashes.get(
+ full_key)] + [t for t in sorted((test.to_json() for test in v), key=safe_sorter)]
+ else:
+ json_node[k] = json_node.get(k, {}).copy()
+ stack.append((v, json_node[k], full_key))
+
+ return json_rv
+
+
+class PathHash(PathHashType):
+ def __init__(self, data: TypeData) -> None:
+ self._data = data
+
+ def __getitem__(self, k: Tuple[Text, ...]) -> Text:
+ if k not in self._data:
+ raise KeyError
+
+ if k in self._data._hashes:
+ return self._data._hashes[k]
+
+ node = self._data._json_data
+ for pathseg in k:
+ if pathseg in node:
+ node = node[pathseg]
+ else:
+ break
+ else:
+ return node[0] # type: ignore
+
+ assert False, "unreachable"
+ raise KeyError
+
+ def __setitem__(self, k: Tuple[Text, ...], v: Text) -> None:
+ if k not in self._data:
+ raise KeyError
+
+ if k in self._data._hashes:
+ self._data._hashes[k] = v
+
+ node = self._data._json_data
+ for pathseg in k:
+ if pathseg in node:
+ node = node[pathseg]
+ else:
+ break
+ else:
+ node[0] = v # type: ignore
+ return
+
+ self._data._hashes[k] = v
+
+ def __delitem__(self, k: Tuple[Text, ...]) -> None:
+ raise ValueError("keys here must match underlying data")
+
+ def __iter__(self) -> Iterator[Tuple[Text, ...]]:
+ return iter(self._data)
+
+ def __len__(self) -> int:
+ return len(self._data)
diff --git a/testing/web-platform/tests/tools/manifest/update.py b/testing/web-platform/tests/tools/manifest/update.py
new file mode 100755
index 0000000000..fef0b96b86
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/update.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python3
+import argparse
+import os
+from typing import Any, Optional, TYPE_CHECKING
+
+from . import manifest
+from . import vcs
+from .log import get_logger, enable_debug_logging
+from .download import download_from_github
+if TYPE_CHECKING:
+ from .manifest import Manifest # avoid cyclic import
+
+
+here = os.path.dirname(__file__)
+
+wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
+
+logger = get_logger()
+
+
+def update(tests_root: str,
+ manifest: "Manifest",
+ manifest_path: Optional[str] = None,
+ working_copy: bool = True,
+ cache_root: Optional[str] = None,
+ rebuild: bool = False,
+ parallel: bool = True
+ ) -> bool:
+ logger.warning("Deprecated; use manifest.load_and_update instead")
+ logger.info("Updating manifest")
+
+ tree = vcs.get_tree(tests_root, manifest, manifest_path, cache_root,
+ working_copy, rebuild)
+ return manifest.update(tree, parallel)
+
+
+def update_from_cli(**kwargs: Any) -> None:
+ tests_root = kwargs["tests_root"]
+ path = kwargs["path"]
+ assert tests_root is not None
+
+ if not kwargs["rebuild"] and kwargs["download"]:
+ download_from_github(path, tests_root)
+
+ manifest.load_and_update(tests_root,
+ path,
+ kwargs["url_base"],
+ update=True,
+ rebuild=kwargs["rebuild"],
+ cache_root=kwargs["cache_root"],
+ parallel=kwargs["parallel"])
+
+
+def abs_path(path: str) -> str:
+ return os.path.abspath(os.path.expanduser(path))
+
+
+def create_parser() -> argparse.ArgumentParser:
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "-v", "--verbose", dest="verbose", action="store_true", default=False,
+ help="Turn on verbose logging")
+ parser.add_argument(
+ "-p", "--path", type=abs_path, help="Path to manifest file.")
+ parser.add_argument(
+ "--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.")
+ parser.add_argument(
+ "-r", "--rebuild", action="store_true", default=False,
+ help="Force a full rebuild of the manifest.")
+ parser.add_argument(
+ "--url-base", action="store", default="/",
+ help="Base url to use as the mount point for tests in this manifest.")
+ parser.add_argument(
+ "--no-download", dest="download", action="store_false", default=True,
+ help="Never attempt to download the manifest.")
+ parser.add_argument(
+ "--cache-root", action="store", default=os.path.join(wpt_root, ".wptcache"),
+ help="Path in which to store any caches (default <tests_root>/.wptcache/)")
+ parser.add_argument(
+ "--no-parallel", dest="parallel", action="store_false", default=True,
+ help="Do not parallelize building the manifest")
+ return parser
+
+
+def run(*args: Any, **kwargs: Any) -> None:
+ if kwargs["path"] is None:
+ kwargs["path"] = os.path.join(kwargs["tests_root"], "MANIFEST.json")
+ if kwargs["verbose"]:
+ enable_debug_logging()
+ update_from_cli(**kwargs)
+
+
+def main() -> None:
+ opts = create_parser().parse_args()
+
+ run(**vars(opts))
diff --git a/testing/web-platform/tests/tools/manifest/utils.py b/testing/web-platform/tests/tools/manifest/utils.py
new file mode 100644
index 0000000000..7ccd3afb71
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/utils.py
@@ -0,0 +1,72 @@
+import os
+import subprocess
+import sys
+from typing import Any, Callable, Generic, Optional, Text, TypeVar
+T = TypeVar("T")
+
+
+def rel_path_to_url(rel_path: Text, url_base: Text = "/") -> Text:
+ assert not os.path.isabs(rel_path), rel_path
+ if url_base[0] != "/":
+ url_base = "/" + url_base
+ if url_base[-1] != "/":
+ url_base += "/"
+ return url_base + rel_path.replace(os.sep, "/")
+
+
+def from_os_path(path: Text) -> Text:
+ assert os.path.sep == "/" or sys.platform == "win32"
+ if "/" == os.path.sep:
+ rv = path
+ else:
+ rv = path.replace(os.path.sep, "/")
+ if "\\" in rv:
+ raise ValueError("path contains \\ when separator is %s" % os.path.sep)
+ return rv
+
+
+def to_os_path(path: Text) -> Text:
+ assert os.path.sep == "/" or sys.platform == "win32"
+ if "\\" in path:
+ raise ValueError("normalised path contains \\")
+ if "/" == os.path.sep:
+ return path
+ return path.replace("/", os.path.sep)
+
+
+def git(path: Text) -> Optional[Callable[..., Text]]:
+ def gitfunc(cmd: Text, *args: Text) -> Text:
+ full_cmd = ["git", cmd] + list(args)
+ try:
+ return subprocess.check_output(full_cmd, cwd=path, stderr=subprocess.STDOUT).decode('utf8')
+ except Exception as e:
+ if sys.platform == "win32" and isinstance(e, WindowsError):
+ full_cmd[0] = "git.bat"
+ return subprocess.check_output(full_cmd, cwd=path, stderr=subprocess.STDOUT).decode('utf8')
+ else:
+ raise
+
+ try:
+ # this needs to be a command that fails if we aren't in a git repo
+ gitfunc("rev-parse", "--show-toplevel")
+ except (subprocess.CalledProcessError, OSError):
+ return None
+ else:
+ return gitfunc
+
+
+class cached_property(Generic[T]):
+ def __init__(self, func: Callable[[Any], T]) -> None:
+ self.func = func
+ self.__doc__ = getattr(func, "__doc__")
+ self.name = func.__name__
+
+ def __get__(self, obj: Any, cls: Optional[type] = None) -> T:
+ if obj is None:
+ return self # type: ignore
+
+ # we can unconditionally assign as next time this won't be called
+ assert self.name not in obj.__dict__
+ rv = obj.__dict__[self.name] = self.func(obj)
+ obj.__dict__.setdefault("__cached_properties__", set()).add(self.name)
+ return rv
diff --git a/testing/web-platform/tests/tools/manifest/vcs.py b/testing/web-platform/tests/tools/manifest/vcs.py
new file mode 100644
index 0000000000..7b6b73d877
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/vcs.py
@@ -0,0 +1,305 @@
+import abc
+import os
+import stat
+from collections import deque
+from os import stat_result
+from typing import (Any, Dict, Iterable, Iterator, List, MutableMapping, Optional, Set, Text, Tuple,
+ TYPE_CHECKING)
+
+from . import jsonlib
+from .utils import git
+
+# Cannot do `from ..gitignore import gitignore` because
+# relative import beyond toplevel throws *ImportError*!
+from gitignore import gitignore # type: ignore
+
+
+if TYPE_CHECKING:
+ from .manifest import Manifest # avoid cyclic import
+
+GitIgnoreCacheType = MutableMapping[bytes, bool]
+
+
+def get_tree(tests_root: Text,
+ manifest: "Manifest",
+ manifest_path: Optional[Text],
+ cache_root: Optional[Text],
+ working_copy: bool = True,
+ rebuild: bool = False) -> "FileSystem":
+ tree = None
+ if cache_root is None:
+ cache_root = os.path.join(tests_root, ".wptcache")
+ if not os.path.exists(cache_root):
+ try:
+ os.makedirs(cache_root)
+ except OSError:
+ cache_root = None
+
+ if not working_copy:
+ raise ValueError("working_copy=False unsupported")
+
+ if tree is None:
+ tree = FileSystem(tests_root,
+ manifest.url_base,
+ manifest_path=manifest_path,
+ cache_path=cache_root,
+ rebuild=rebuild)
+ return tree
+
+
+class GitHasher:
+ def __init__(self, path: Text) -> None:
+ self.git = git(path)
+
+ def _local_changes(self) -> Set[Text]:
+ """get a set of files which have changed between HEAD and working copy"""
+ assert self.git is not None
+ # note that git runs the command with tests_root as the cwd, which may
+ # not be the root of the git repo (e.g., within a browser repo)
+ #
+ # `git diff-index --relative` without a path still compares all tracked
+ # files before non-WPT files are filtered out, which can be slow in
+ # vendor repos. Explicitly pass the CWD (i.e., `tests_root`) as a path
+ # argument to avoid unnecessary diffing.
+ cmd = ["diff-index", "--relative", "--no-renames", "--name-only", "-z", "HEAD", os.curdir]
+ data = self.git(*cmd)
+ return set(data.split("\0"))
+
+ def hash_cache(self) -> Dict[Text, Optional[Text]]:
+ """
+ A dict of rel_path -> current git object id if the working tree matches HEAD else None
+ """
+ hash_cache: Dict[Text, Optional[Text]] = {}
+
+ if self.git is None:
+ return hash_cache
+
+ # note that git runs the command with tests_root as the cwd, which may
+ # not be the root of the git repo (e.g., within a browser repo)
+ cmd = ["ls-tree", "-r", "-z", "HEAD"]
+ local_changes = self._local_changes()
+ for result in self.git(*cmd).split("\0")[:-1]: # type: Text
+ data, rel_path = result.rsplit("\t", 1)
+ hash_cache[rel_path] = None if rel_path in local_changes else data.split(" ", 3)[2]
+
+ return hash_cache
+
+
+
+class FileSystem:
+ def __init__(self,
+ tests_root: Text,
+ url_base: Text,
+ cache_path: Optional[Text],
+ manifest_path: Optional[Text] = None,
+ rebuild: bool = False) -> None:
+ self.tests_root = tests_root
+ self.url_base = url_base
+ self.ignore_cache = None
+ self.mtime_cache = None
+ tests_root_bytes = tests_root.encode("utf8")
+ if cache_path is not None:
+ if manifest_path is not None:
+ self.mtime_cache = MtimeCache(cache_path, tests_root, manifest_path, rebuild)
+ if gitignore.has_ignore(tests_root_bytes):
+ self.ignore_cache = GitIgnoreCache(cache_path, tests_root, rebuild)
+ self.path_filter = gitignore.PathFilter(tests_root_bytes,
+ extras=[b".git/"],
+ cache=self.ignore_cache)
+ git = GitHasher(tests_root)
+ self.hash_cache = git.hash_cache()
+
+ def __iter__(self) -> Iterator[Tuple[Text, Optional[Text], bool]]:
+ mtime_cache = self.mtime_cache
+ for dirpath, dirnames, filenames in self.path_filter(
+ walk(self.tests_root.encode("utf8"))):
+ for filename, path_stat in filenames:
+ path = os.path.join(dirpath, filename).decode("utf8")
+ if mtime_cache is None or mtime_cache.updated(path, path_stat):
+ file_hash = self.hash_cache.get(path, None)
+ yield path, file_hash, True
+ else:
+ yield path, None, False
+
+ def dump_caches(self) -> None:
+ for cache in [self.mtime_cache, self.ignore_cache]:
+ if cache is not None:
+ cache.dump()
+
+
+class CacheFile(metaclass=abc.ABCMeta):
+ def __init__(self, cache_root: Text, tests_root: Text, rebuild: bool = False) -> None:
+ self.tests_root = tests_root
+ if not os.path.exists(cache_root):
+ os.makedirs(cache_root)
+ self.path = os.path.join(cache_root, self.file_name)
+ self.modified = False
+ self.data = self.load(rebuild)
+
+ @abc.abstractproperty
+ def file_name(self) -> Text:
+ pass
+
+ def dump(self) -> None:
+ if not self.modified:
+ return
+ with open(self.path, 'w') as f:
+ jsonlib.dump_local(self.data, f)
+
+ def load(self, rebuild: bool = False) -> Dict[Text, Any]:
+ data: Dict[Text, Any] = {}
+ try:
+ if not rebuild:
+ with open(self.path) as f:
+ try:
+ data = jsonlib.load(f)
+ except ValueError:
+ pass
+ data = self.check_valid(data)
+ except OSError:
+ pass
+ return data
+
+ def check_valid(self, data: Dict[Text, Any]) -> Dict[Text, Any]:
+ """Check if the cached data is valid and return an updated copy of the
+ cache containing only data that can be used."""
+ return data
+
+
+class MtimeCache(CacheFile):
+ file_name = "mtime.json"
+
+ def __init__(self, cache_root: Text, tests_root: Text, manifest_path: Text, rebuild: bool = False) -> None:
+ self.manifest_path = manifest_path
+ super().__init__(cache_root, tests_root, rebuild)
+
+ def updated(self, rel_path: Text, stat: stat_result) -> bool:
+ """Return a boolean indicating whether the file changed since the cache was last updated.
+
+ This implicitly updates the cache with the new mtime data."""
+ mtime = stat.st_mtime
+ if mtime != self.data.get(rel_path):
+ self.modified = True
+ self.data[rel_path] = mtime
+ return True
+ return False
+
+ def check_valid(self, data: Dict[Any, Any]) -> Dict[Any, Any]:
+ if data.get("/tests_root") != self.tests_root:
+ self.modified = True
+ else:
+ if self.manifest_path is not None and os.path.exists(self.manifest_path):
+ mtime = os.path.getmtime(self.manifest_path)
+ if data.get("/manifest_path") != [self.manifest_path, mtime]:
+ self.modified = True
+ else:
+ self.modified = True
+ if self.modified:
+ data = {}
+ data["/tests_root"] = self.tests_root
+ return data
+
+ def dump(self) -> None:
+ if self.manifest_path is None:
+ raise ValueError
+ if not os.path.exists(self.manifest_path):
+ return
+ mtime = os.path.getmtime(self.manifest_path)
+ self.data["/manifest_path"] = [self.manifest_path, mtime]
+ self.data["/tests_root"] = self.tests_root
+ super().dump()
+
+
+class GitIgnoreCache(CacheFile, GitIgnoreCacheType):
+ file_name = "gitignore2.json"
+
+ def check_valid(self, data: Dict[Any, Any]) -> Dict[Any, Any]:
+ ignore_path = os.path.join(self.tests_root, ".gitignore")
+ mtime = os.path.getmtime(ignore_path)
+ if data.get("/gitignore_file") != [ignore_path, mtime]:
+ self.modified = True
+ data = {}
+ data["/gitignore_file"] = [ignore_path, mtime]
+ return data
+
+ def __contains__(self, key: Any) -> bool:
+ try:
+ key = key.decode("utf-8")
+ except Exception:
+ return False
+
+ return key in self.data
+
+ def __getitem__(self, key: bytes) -> bool:
+ real_key = key.decode("utf-8")
+ v = self.data[real_key]
+ assert isinstance(v, bool)
+ return v
+
+ def __setitem__(self, key: bytes, value: bool) -> None:
+ real_key = key.decode("utf-8")
+ if self.data.get(real_key) != value:
+ self.modified = True
+ self.data[real_key] = value
+
+ def __delitem__(self, key: bytes) -> None:
+ real_key = key.decode("utf-8")
+ del self.data[real_key]
+
+ def __iter__(self) -> Iterator[bytes]:
+ return (key.encode("utf-8") for key in self.data)
+
+ def __len__(self) -> int:
+ return len(self.data)
+
+
+def walk(root: bytes) -> Iterable[Tuple[bytes, List[Tuple[bytes, stat_result]], List[Tuple[bytes, stat_result]]]]:
+ """Re-implementation of os.walk. Returns an iterator over
+ (dirpath, dirnames, filenames), with some semantic differences
+ to os.walk.
+
+ This has a similar interface to os.walk, with the important difference
+ that instead of lists of filenames and directory names, it yields
+ lists of tuples of the form [(name, stat)] where stat is the result of
+ os.stat for the file. That allows reusing the same stat data in the
+ caller. It also always returns the dirpath relative to the root, with
+ the root iself being returned as the empty string.
+
+ Unlike os.walk the implementation is not recursive."""
+
+ get_stat = os.stat
+ is_dir = stat.S_ISDIR
+ is_link = stat.S_ISLNK
+ join = os.path.join
+ listdir = os.listdir
+ relpath = os.path.relpath
+
+ root = os.path.abspath(root)
+ stack = deque([(root, b"")])
+
+ while stack:
+ dir_path, rel_path = stack.popleft()
+ try:
+ # Note that listdir and error are globals in this module due
+ # to earlier import-*.
+ names = listdir(dir_path)
+ except OSError:
+ continue
+
+ dirs, non_dirs = [], []
+ for name in names:
+ path = join(dir_path, name)
+ try:
+ path_stat = get_stat(path)
+ except OSError:
+ continue
+ if is_dir(path_stat.st_mode):
+ dirs.append((name, path_stat))
+ else:
+ non_dirs.append((name, path_stat))
+
+ yield rel_path, dirs, non_dirs
+ for name, path_stat in dirs:
+ new_path = join(dir_path, name)
+ if not is_link(path_stat.st_mode):
+ stack.append((new_path, relpath(new_path, root)))