diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-19 00:47:55 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-19 00:47:55 +0000 |
commit | 26a029d407be480d791972afb5975cf62c9360a6 (patch) | |
tree | f435a8308119effd964b339f76abb83a57c29483 /testing/web-platform/tests/tools/manifest | |
parent | Initial commit. (diff) | |
download | firefox-26a029d407be480d791972afb5975cf62c9360a6.tar.xz firefox-26a029d407be480d791972afb5975cf62c9360a6.zip |
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'testing/web-platform/tests/tools/manifest')
24 files changed, 6999 insertions, 0 deletions
diff --git a/testing/web-platform/tests/tools/manifest/XMLParser.py b/testing/web-platform/tests/tools/manifest/XMLParser.py new file mode 100644 index 0000000000..8dcdb45007 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/XMLParser.py @@ -0,0 +1,131 @@ +from collections import OrderedDict +from typing import Dict, List, Optional, Text, Union +from os.path import dirname, join +from xml.parsers import expat +import xml.etree.ElementTree as etree # noqa: N813 + + +_catalog = join(dirname(__file__), "catalog") + +def _wrap_error(e: expat.error) -> etree.ParseError: + err = etree.ParseError(e) + err.code = e.code + err.position = e.lineno, e.offset + raise err + +_names: Dict[Text, Text] = {} +def _fixname(key: Text) -> Text: + try: + name = _names[key] + except KeyError: + name = key + if "}" in name: + name = "{" + name + _names[key] = name + return name + + +_undefined_entity_code: int = expat.errors.codes[expat.errors.XML_ERROR_UNDEFINED_ENTITY] + + +class XMLParser: + """ + An XML parser with support for XHTML DTDs and all Python-supported encodings + + This implements the API defined by + xml.etree.ElementTree.XMLParser, but supports XHTML DTDs + (therefore allowing XHTML entities) and supports all encodings + Python does, rather than just those supported by expat. + """ + def __init__(self, encoding: Optional[Text] = None) -> None: + self._parser = expat.ParserCreate(encoding, "}") + self._target = etree.TreeBuilder() + # parser settings + self._parser.buffer_text = True + self._parser.ordered_attributes = True + self._parser.SetParamEntityParsing(expat.XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE) + # parser callbacks + self._parser.XmlDeclHandler = self._xml_decl + self._parser.StartElementHandler = self._start + self._parser.EndElementHandler = self._end + self._parser.CharacterDataHandler = self._data + self._parser.ExternalEntityRefHandler = self._external + self._parser.SkippedEntityHandler = self._skipped + # used for our horrible re-encoding hack + self._fed_data: Optional[List[bytes]] = [] + self._read_encoding: Optional[Text] = None + + def _xml_decl(self, version: Text, encoding: Optional[Text], standalone: int) -> None: + self._read_encoding = encoding + + def _start(self, tag: Text, attrib_in: List[str]) -> etree.Element: + assert isinstance(tag, str) + self._fed_data = None + tag = _fixname(tag) + attrib: Dict[Union[bytes, Text], Union[bytes, Text]] = OrderedDict() + if attrib_in: + for i in range(0, len(attrib_in), 2): + attrib[_fixname(attrib_in[i])] = attrib_in[i+1] + return self._target.start(tag, attrib) + + def _data(self, text: Text) -> None: + self._target.data(text) + + def _end(self, tag: Text) -> etree.Element: + return self._target.end(_fixname(tag)) + + def _external(self, context: Text, base: Optional[Text], system_id: Optional[Text], public_id: Optional[Text]) -> bool: + if public_id in { + "-//W3C//DTD XHTML 1.0 Transitional//EN", + "-//W3C//DTD XHTML 1.1//EN", + "-//W3C//DTD XHTML 1.0 Strict//EN", + "-//W3C//DTD XHTML 1.0 Frameset//EN", + "-//W3C//DTD XHTML Basic 1.0//EN", + "-//W3C//DTD XHTML 1.1 plus MathML 2.0//EN", + "-//W3C//DTD XHTML 1.1 plus MathML 2.0 plus SVG 1.1//EN", + "-//W3C//DTD MathML 2.0//EN", + "-//WAPFORUM//DTD XHTML Mobile 1.0//EN" + }: + parser = self._parser.ExternalEntityParserCreate(context) + with open(join(_catalog, "xhtml.dtd"), "rb") as fp: + try: + parser.ParseFile(fp) + except expat.error: + return False + + return True + + def _skipped(self, name: Text, is_parameter_entity: bool) -> None: + err = expat.error("undefined entity %s: line %d, column %d" % + (name, self._parser.ErrorLineNumber, + self._parser.ErrorColumnNumber)) + err.code = _undefined_entity_code + err.lineno = self._parser.ErrorLineNumber + err.offset = self._parser.ErrorColumnNumber + raise err + + def feed(self, data: bytes) -> None: + if self._fed_data is not None: + self._fed_data.append(data) + try: + self._parser.Parse(data, False) + except expat.error as v: + _wrap_error(v) + except ValueError as e: + if e.args[0] == 'multi-byte encodings are not supported': + assert self._read_encoding is not None + assert self._fed_data is not None + xml = b"".join(self._fed_data).decode(self._read_encoding).encode("utf-8") + new_parser = XMLParser("utf-8") + self._parser = new_parser._parser + self._target = new_parser._target + self._fed_data = None + self.feed(xml) + + def close(self) -> etree.Element: + try: + self._parser.Parse("", True) + except expat.error as v: + _wrap_error(v) + tree = self._target.close() + return tree diff --git a/testing/web-platform/tests/tools/manifest/__init__.py b/testing/web-platform/tests/tools/manifest/__init__.py new file mode 100644 index 0000000000..8c8f189070 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/__init__.py @@ -0,0 +1 @@ +from . import item, manifest, sourcefile, update # noqa: F401 diff --git a/testing/web-platform/tests/tools/manifest/catalog/xhtml.dtd b/testing/web-platform/tests/tools/manifest/catalog/xhtml.dtd new file mode 100644 index 0000000000..4307b1c2c4 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/catalog/xhtml.dtd @@ -0,0 +1,2125 @@ +<!ENTITY Tab "	"> +<!ENTITY NewLine "
"> +<!ENTITY excl "!"> +<!ENTITY quot """> +<!ENTITY QUOT """> +<!ENTITY num "#"> +<!ENTITY dollar "$"> +<!ENTITY percnt "%"> +<!ENTITY amp "&#x26;"> +<!ENTITY AMP "&#x26;"> +<!ENTITY apos "'"> +<!ENTITY lpar "("> +<!ENTITY rpar ")"> +<!ENTITY ast "*"> +<!ENTITY midast "*"> +<!ENTITY plus "+"> +<!ENTITY comma ","> +<!ENTITY period "."> +<!ENTITY sol "/"> +<!ENTITY colon ":"> +<!ENTITY semi ";"> +<!ENTITY lt "&#x3C;"> +<!ENTITY LT "&#x3C;"> +<!ENTITY nvlt "&#x3C;⃒"> +<!ENTITY equals "="> +<!ENTITY bne "=⃥"> +<!ENTITY gt ">"> +<!ENTITY GT ">"> +<!ENTITY nvgt ">⃒"> +<!ENTITY quest "?"> +<!ENTITY commat "@"> +<!ENTITY lsqb "["> +<!ENTITY lbrack "["> +<!ENTITY bsol "\"> +<!ENTITY rsqb "]"> +<!ENTITY rbrack "]"> +<!ENTITY Hat "^"> +<!ENTITY lowbar "_"> +<!ENTITY UnderBar "_"> +<!ENTITY grave "`"> +<!ENTITY DiacriticalGrave "`"> +<!ENTITY fjlig "fj"> +<!ENTITY lcub "{"> +<!ENTITY lbrace "{"> +<!ENTITY verbar "|"> +<!ENTITY vert "|"> +<!ENTITY VerticalLine "|"> +<!ENTITY rcub "}"> +<!ENTITY rbrace "}"> +<!ENTITY nbsp " "> +<!ENTITY NonBreakingSpace " "> +<!ENTITY iexcl "¡"> +<!ENTITY cent "¢"> +<!ENTITY pound "£"> +<!ENTITY curren "¤"> +<!ENTITY yen "¥"> +<!ENTITY brvbar "¦"> +<!ENTITY sect "§"> +<!ENTITY Dot "¨"> +<!ENTITY die "¨"> +<!ENTITY DoubleDot "¨"> +<!ENTITY uml "¨"> +<!ENTITY copy "©"> +<!ENTITY COPY "©"> +<!ENTITY ordf "ª"> +<!ENTITY laquo "«"> +<!ENTITY not "¬"> +<!ENTITY shy "­"> +<!ENTITY reg "®"> +<!ENTITY circledR "®"> +<!ENTITY REG "®"> +<!ENTITY macr "¯"> +<!ENTITY strns "¯"> +<!ENTITY deg "°"> +<!ENTITY plusmn "±"> +<!ENTITY pm "±"> +<!ENTITY PlusMinus "±"> +<!ENTITY sup2 "²"> +<!ENTITY sup3 "³"> +<!ENTITY acute "´"> +<!ENTITY DiacriticalAcute "´"> +<!ENTITY micro "µ"> +<!ENTITY para "¶"> +<!ENTITY middot "·"> +<!ENTITY centerdot "·"> +<!ENTITY CenterDot "·"> +<!ENTITY cedil "¸"> +<!ENTITY Cedilla "¸"> +<!ENTITY sup1 "¹"> +<!ENTITY ordm "º"> +<!ENTITY raquo "»"> +<!ENTITY frac14 "¼"> +<!ENTITY frac12 "½"> +<!ENTITY half "½"> +<!ENTITY frac34 "¾"> +<!ENTITY iquest "¿"> +<!ENTITY Agrave "À"> +<!ENTITY Aacute "Á"> +<!ENTITY Acirc "Â"> +<!ENTITY Atilde "Ã"> +<!ENTITY Auml "Ä"> +<!ENTITY Aring "Å"> +<!ENTITY angst "Å"> +<!ENTITY AElig "Æ"> +<!ENTITY Ccedil "Ç"> +<!ENTITY Egrave "È"> +<!ENTITY Eacute "É"> +<!ENTITY Ecirc "Ê"> +<!ENTITY Euml "Ë"> +<!ENTITY Igrave "Ì"> +<!ENTITY Iacute "Í"> +<!ENTITY Icirc "Î"> +<!ENTITY Iuml "Ï"> +<!ENTITY ETH "Ð"> +<!ENTITY Ntilde "Ñ"> +<!ENTITY Ograve "Ò"> +<!ENTITY Oacute "Ó"> +<!ENTITY Ocirc "Ô"> +<!ENTITY Otilde "Õ"> +<!ENTITY Ouml "Ö"> +<!ENTITY times "×"> +<!ENTITY Oslash "Ø"> +<!ENTITY Ugrave "Ù"> +<!ENTITY Uacute "Ú"> +<!ENTITY Ucirc "Û"> +<!ENTITY Uuml "Ü"> +<!ENTITY Yacute "Ý"> +<!ENTITY THORN "Þ"> +<!ENTITY szlig "ß"> +<!ENTITY agrave "à"> +<!ENTITY aacute "á"> +<!ENTITY acirc "â"> +<!ENTITY atilde "ã"> +<!ENTITY auml "ä"> +<!ENTITY aring "å"> +<!ENTITY aelig "æ"> +<!ENTITY ccedil "ç"> +<!ENTITY egrave "è"> +<!ENTITY eacute "é"> +<!ENTITY ecirc "ê"> +<!ENTITY euml "ë"> +<!ENTITY igrave "ì"> +<!ENTITY iacute "í"> +<!ENTITY icirc "î"> +<!ENTITY iuml "ï"> +<!ENTITY eth "ð"> +<!ENTITY ntilde "ñ"> +<!ENTITY ograve "ò"> +<!ENTITY oacute "ó"> +<!ENTITY ocirc "ô"> +<!ENTITY otilde "õ"> +<!ENTITY ouml "ö"> +<!ENTITY divide "÷"> +<!ENTITY div "÷"> +<!ENTITY oslash "ø"> +<!ENTITY ugrave "ù"> +<!ENTITY uacute "ú"> +<!ENTITY ucirc "û"> +<!ENTITY uuml "ü"> +<!ENTITY yacute "ý"> +<!ENTITY thorn "þ"> +<!ENTITY yuml "ÿ"> +<!ENTITY Amacr "Ā"> +<!ENTITY amacr "ā"> +<!ENTITY Abreve "Ă"> +<!ENTITY abreve "ă"> +<!ENTITY Aogon "Ą"> +<!ENTITY aogon "ą"> +<!ENTITY Cacute "Ć"> +<!ENTITY cacute "ć"> +<!ENTITY Ccirc "Ĉ"> +<!ENTITY ccirc "ĉ"> +<!ENTITY Cdot "Ċ"> +<!ENTITY cdot "ċ"> +<!ENTITY Ccaron "Č"> +<!ENTITY ccaron "č"> +<!ENTITY Dcaron "Ď"> +<!ENTITY dcaron "ď"> +<!ENTITY Dstrok "Đ"> +<!ENTITY dstrok "đ"> +<!ENTITY Emacr "Ē"> +<!ENTITY emacr "ē"> +<!ENTITY Edot "Ė"> +<!ENTITY edot "ė"> +<!ENTITY Eogon "Ę"> +<!ENTITY eogon "ę"> +<!ENTITY Ecaron "Ě"> +<!ENTITY ecaron "ě"> +<!ENTITY Gcirc "Ĝ"> +<!ENTITY gcirc "ĝ"> +<!ENTITY Gbreve "Ğ"> +<!ENTITY gbreve "ğ"> +<!ENTITY Gdot "Ġ"> +<!ENTITY gdot "ġ"> +<!ENTITY Gcedil "Ģ"> +<!ENTITY Hcirc "Ĥ"> +<!ENTITY hcirc "ĥ"> +<!ENTITY Hstrok "Ħ"> +<!ENTITY hstrok "ħ"> +<!ENTITY Itilde "Ĩ"> +<!ENTITY itilde "ĩ"> +<!ENTITY Imacr "Ī"> +<!ENTITY imacr "ī"> +<!ENTITY Iogon "Į"> +<!ENTITY iogon "į"> +<!ENTITY Idot "İ"> +<!ENTITY imath "ı"> +<!ENTITY inodot "ı"> +<!ENTITY IJlig "IJ"> +<!ENTITY ijlig "ij"> +<!ENTITY Jcirc "Ĵ"> +<!ENTITY jcirc "ĵ"> +<!ENTITY Kcedil "Ķ"> +<!ENTITY kcedil "ķ"> +<!ENTITY kgreen "ĸ"> +<!ENTITY Lacute "Ĺ"> +<!ENTITY lacute "ĺ"> +<!ENTITY Lcedil "Ļ"> +<!ENTITY lcedil "ļ"> +<!ENTITY Lcaron "Ľ"> +<!ENTITY lcaron "ľ"> +<!ENTITY Lmidot "Ŀ"> +<!ENTITY lmidot "ŀ"> +<!ENTITY Lstrok "Ł"> +<!ENTITY lstrok "ł"> +<!ENTITY Nacute "Ń"> +<!ENTITY nacute "ń"> +<!ENTITY Ncedil "Ņ"> +<!ENTITY ncedil "ņ"> +<!ENTITY Ncaron "Ň"> +<!ENTITY ncaron "ň"> +<!ENTITY napos "ʼn"> +<!ENTITY ENG "Ŋ"> +<!ENTITY eng "ŋ"> +<!ENTITY Omacr "Ō"> +<!ENTITY omacr "ō"> +<!ENTITY Odblac "Ő"> +<!ENTITY odblac "ő"> +<!ENTITY OElig "Œ"> +<!ENTITY oelig "œ"> +<!ENTITY Racute "Ŕ"> +<!ENTITY racute "ŕ"> +<!ENTITY Rcedil "Ŗ"> +<!ENTITY rcedil "ŗ"> +<!ENTITY Rcaron "Ř"> +<!ENTITY rcaron "ř"> +<!ENTITY Sacute "Ś"> +<!ENTITY sacute "ś"> +<!ENTITY Scirc "Ŝ"> +<!ENTITY scirc "ŝ"> +<!ENTITY Scedil "Ş"> +<!ENTITY scedil "ş"> +<!ENTITY Scaron "Š"> +<!ENTITY scaron "š"> +<!ENTITY Tcedil "Ţ"> +<!ENTITY tcedil "ţ"> +<!ENTITY Tcaron "Ť"> +<!ENTITY tcaron "ť"> +<!ENTITY Tstrok "Ŧ"> +<!ENTITY tstrok "ŧ"> +<!ENTITY Utilde "Ũ"> +<!ENTITY utilde "ũ"> +<!ENTITY Umacr "Ū"> +<!ENTITY umacr "ū"> +<!ENTITY Ubreve "Ŭ"> +<!ENTITY ubreve "ŭ"> +<!ENTITY Uring "Ů"> +<!ENTITY uring "ů"> +<!ENTITY Udblac "Ű"> +<!ENTITY udblac "ű"> +<!ENTITY Uogon "Ų"> +<!ENTITY uogon "ų"> +<!ENTITY Wcirc "Ŵ"> +<!ENTITY wcirc "ŵ"> +<!ENTITY Ycirc "Ŷ"> +<!ENTITY ycirc "ŷ"> +<!ENTITY Yuml "Ÿ"> +<!ENTITY Zacute "Ź"> +<!ENTITY zacute "ź"> +<!ENTITY Zdot "Ż"> +<!ENTITY zdot "ż"> +<!ENTITY Zcaron "Ž"> +<!ENTITY zcaron "ž"> +<!ENTITY fnof "ƒ"> +<!ENTITY imped "Ƶ"> +<!ENTITY gacute "ǵ"> +<!ENTITY jmath "ȷ"> +<!ENTITY circ "ˆ"> +<!ENTITY caron "ˇ"> +<!ENTITY Hacek "ˇ"> +<!ENTITY breve "˘"> +<!ENTITY Breve "˘"> +<!ENTITY dot "˙"> +<!ENTITY DiacriticalDot "˙"> +<!ENTITY ring "˚"> +<!ENTITY ogon "˛"> +<!ENTITY tilde "˜"> +<!ENTITY DiacriticalTilde "˜"> +<!ENTITY dblac "˝"> +<!ENTITY DiacriticalDoubleAcute "˝"> +<!ENTITY DownBreve "̑"> +<!ENTITY Alpha "Α"> +<!ENTITY Beta "Β"> +<!ENTITY Gamma "Γ"> +<!ENTITY Delta "Δ"> +<!ENTITY Epsilon "Ε"> +<!ENTITY Zeta "Ζ"> +<!ENTITY Eta "Η"> +<!ENTITY Theta "Θ"> +<!ENTITY Iota "Ι"> +<!ENTITY Kappa "Κ"> +<!ENTITY Lambda "Λ"> +<!ENTITY Mu "Μ"> +<!ENTITY Nu "Ν"> +<!ENTITY Xi "Ξ"> +<!ENTITY Omicron "Ο"> +<!ENTITY Pi "Π"> +<!ENTITY Rho "Ρ"> +<!ENTITY Sigma "Σ"> +<!ENTITY Tau "Τ"> +<!ENTITY Upsilon "Υ"> +<!ENTITY Phi "Φ"> +<!ENTITY Chi "Χ"> +<!ENTITY Psi "Ψ"> +<!ENTITY Omega "Ω"> +<!ENTITY ohm "Ω"> +<!ENTITY alpha "α"> +<!ENTITY beta "β"> +<!ENTITY gamma "γ"> +<!ENTITY delta "δ"> +<!ENTITY epsi "ε"> +<!ENTITY epsilon "ε"> +<!ENTITY zeta "ζ"> +<!ENTITY eta "η"> +<!ENTITY theta "θ"> +<!ENTITY iota "ι"> +<!ENTITY kappa "κ"> +<!ENTITY lambda "λ"> +<!ENTITY mu "μ"> +<!ENTITY nu "ν"> +<!ENTITY xi "ξ"> +<!ENTITY omicron "ο"> +<!ENTITY pi "π"> +<!ENTITY rho "ρ"> +<!ENTITY sigmav "ς"> +<!ENTITY varsigma "ς"> +<!ENTITY sigmaf "ς"> +<!ENTITY sigma "σ"> +<!ENTITY tau "τ"> +<!ENTITY upsi "υ"> +<!ENTITY upsilon "υ"> +<!ENTITY phi "φ"> +<!ENTITY chi "χ"> +<!ENTITY psi "ψ"> +<!ENTITY omega "ω"> +<!ENTITY thetav "ϑ"> +<!ENTITY vartheta "ϑ"> +<!ENTITY thetasym "ϑ"> +<!ENTITY Upsi "ϒ"> +<!ENTITY upsih "ϒ"> +<!ENTITY straightphi "ϕ"> +<!ENTITY phiv "ϕ"> +<!ENTITY varphi "ϕ"> +<!ENTITY piv "ϖ"> +<!ENTITY varpi "ϖ"> +<!ENTITY Gammad "Ϝ"> +<!ENTITY gammad "ϝ"> +<!ENTITY digamma "ϝ"> +<!ENTITY kappav "ϰ"> +<!ENTITY varkappa "ϰ"> +<!ENTITY rhov "ϱ"> +<!ENTITY varrho "ϱ"> +<!ENTITY epsiv "ϵ"> +<!ENTITY straightepsilon "ϵ"> +<!ENTITY varepsilon "ϵ"> +<!ENTITY bepsi "϶"> +<!ENTITY backepsilon "϶"> +<!ENTITY IOcy "Ё"> +<!ENTITY DJcy "Ђ"> +<!ENTITY GJcy "Ѓ"> +<!ENTITY Jukcy "Є"> +<!ENTITY DScy "Ѕ"> +<!ENTITY Iukcy "І"> +<!ENTITY YIcy "Ї"> +<!ENTITY Jsercy "Ј"> +<!ENTITY LJcy "Љ"> +<!ENTITY NJcy "Њ"> +<!ENTITY TSHcy "Ћ"> +<!ENTITY KJcy "Ќ"> +<!ENTITY Ubrcy "Ў"> +<!ENTITY DZcy "Џ"> +<!ENTITY Acy "А"> +<!ENTITY Bcy "Б"> +<!ENTITY Vcy "В"> +<!ENTITY Gcy "Г"> +<!ENTITY Dcy "Д"> +<!ENTITY IEcy "Е"> +<!ENTITY ZHcy "Ж"> +<!ENTITY Zcy "З"> +<!ENTITY Icy "И"> +<!ENTITY Jcy "Й"> +<!ENTITY Kcy "К"> +<!ENTITY Lcy "Л"> +<!ENTITY Mcy "М"> +<!ENTITY Ncy "Н"> +<!ENTITY Ocy "О"> +<!ENTITY Pcy "П"> +<!ENTITY Rcy "Р"> +<!ENTITY Scy "С"> +<!ENTITY Tcy "Т"> +<!ENTITY Ucy "У"> +<!ENTITY Fcy "Ф"> +<!ENTITY KHcy "Х"> +<!ENTITY TScy "Ц"> +<!ENTITY CHcy "Ч"> +<!ENTITY SHcy "Ш"> +<!ENTITY SHCHcy "Щ"> +<!ENTITY HARDcy "Ъ"> +<!ENTITY Ycy "Ы"> +<!ENTITY SOFTcy "Ь"> +<!ENTITY Ecy "Э"> +<!ENTITY YUcy "Ю"> +<!ENTITY YAcy "Я"> +<!ENTITY acy "а"> +<!ENTITY bcy "б"> +<!ENTITY vcy "в"> +<!ENTITY gcy "г"> +<!ENTITY dcy "д"> +<!ENTITY iecy "е"> +<!ENTITY zhcy "ж"> +<!ENTITY zcy "з"> +<!ENTITY icy "и"> +<!ENTITY jcy "й"> +<!ENTITY kcy "к"> +<!ENTITY lcy "л"> +<!ENTITY mcy "м"> +<!ENTITY ncy "н"> +<!ENTITY ocy "о"> +<!ENTITY pcy "п"> +<!ENTITY rcy "р"> +<!ENTITY scy "с"> +<!ENTITY tcy "т"> +<!ENTITY ucy "у"> +<!ENTITY fcy "ф"> +<!ENTITY khcy "х"> +<!ENTITY tscy "ц"> +<!ENTITY chcy "ч"> +<!ENTITY shcy "ш"> +<!ENTITY shchcy "щ"> +<!ENTITY hardcy "ъ"> +<!ENTITY ycy "ы"> +<!ENTITY softcy "ь"> +<!ENTITY ecy "э"> +<!ENTITY yucy "ю"> +<!ENTITY yacy "я"> +<!ENTITY iocy "ё"> +<!ENTITY djcy "ђ"> +<!ENTITY gjcy "ѓ"> +<!ENTITY jukcy "є"> +<!ENTITY dscy "ѕ"> +<!ENTITY iukcy "і"> +<!ENTITY yicy "ї"> +<!ENTITY jsercy "ј"> +<!ENTITY ljcy "љ"> +<!ENTITY njcy "њ"> +<!ENTITY tshcy "ћ"> +<!ENTITY kjcy "ќ"> +<!ENTITY ubrcy "ў"> +<!ENTITY dzcy "џ"> +<!ENTITY ensp " "> +<!ENTITY emsp " "> +<!ENTITY emsp13 " "> +<!ENTITY emsp14 " "> +<!ENTITY numsp " "> +<!ENTITY puncsp " "> +<!ENTITY thinsp " "> +<!ENTITY ThinSpace " "> +<!ENTITY hairsp " "> +<!ENTITY VeryThinSpace " "> +<!ENTITY ZeroWidthSpace "​"> +<!ENTITY NegativeVeryThinSpace "​"> +<!ENTITY NegativeThinSpace "​"> +<!ENTITY NegativeMediumSpace "​"> +<!ENTITY NegativeThickSpace "​"> +<!ENTITY zwnj "‌"> +<!ENTITY zwj "‍"> +<!ENTITY lrm "‎"> +<!ENTITY rlm "‏"> +<!ENTITY hyphen "‐"> +<!ENTITY dash "‐"> +<!ENTITY ndash "–"> +<!ENTITY mdash "—"> +<!ENTITY horbar "―"> +<!ENTITY Verbar "‖"> +<!ENTITY Vert "‖"> +<!ENTITY lsquo "‘"> +<!ENTITY OpenCurlyQuote "‘"> +<!ENTITY rsquo "’"> +<!ENTITY rsquor "’"> +<!ENTITY CloseCurlyQuote "’"> +<!ENTITY lsquor "‚"> +<!ENTITY sbquo "‚"> +<!ENTITY ldquo "“"> +<!ENTITY OpenCurlyDoubleQuote "“"> +<!ENTITY rdquo "”"> +<!ENTITY rdquor "”"> +<!ENTITY CloseCurlyDoubleQuote "”"> +<!ENTITY ldquor "„"> +<!ENTITY bdquo "„"> +<!ENTITY dagger "†"> +<!ENTITY Dagger "‡"> +<!ENTITY ddagger "‡"> +<!ENTITY bull "•"> +<!ENTITY bullet "•"> +<!ENTITY nldr "‥"> +<!ENTITY hellip "…"> +<!ENTITY mldr "…"> +<!ENTITY permil "‰"> +<!ENTITY pertenk "‱"> +<!ENTITY prime "′"> +<!ENTITY Prime "″"> +<!ENTITY tprime "‴"> +<!ENTITY bprime "‵"> +<!ENTITY backprime "‵"> +<!ENTITY lsaquo "‹"> +<!ENTITY rsaquo "›"> +<!ENTITY oline "‾"> +<!ENTITY OverBar "‾"> +<!ENTITY caret "⁁"> +<!ENTITY hybull "⁃"> +<!ENTITY frasl "⁄"> +<!ENTITY bsemi "⁏"> +<!ENTITY qprime "⁗"> +<!ENTITY MediumSpace " "> +<!ENTITY ThickSpace "  "> +<!ENTITY NoBreak "⁠"> +<!ENTITY ApplyFunction "⁡"> +<!ENTITY af "⁡"> +<!ENTITY InvisibleTimes "⁢"> +<!ENTITY it "⁢"> +<!ENTITY InvisibleComma "⁣"> +<!ENTITY ic "⁣"> +<!ENTITY euro "€"> +<!ENTITY tdot "⃛"> +<!ENTITY TripleDot "⃛"> +<!ENTITY DotDot "⃜"> +<!ENTITY Copf "ℂ"> +<!ENTITY complexes "ℂ"> +<!ENTITY incare "℅"> +<!ENTITY gscr "ℊ"> +<!ENTITY hamilt "ℋ"> +<!ENTITY HilbertSpace "ℋ"> +<!ENTITY Hscr "ℋ"> +<!ENTITY Hfr "ℌ"> +<!ENTITY Poincareplane "ℌ"> +<!ENTITY quaternions "ℍ"> +<!ENTITY Hopf "ℍ"> +<!ENTITY planckh "ℎ"> +<!ENTITY planck "ℏ"> +<!ENTITY hbar "ℏ"> +<!ENTITY plankv "ℏ"> +<!ENTITY hslash "ℏ"> +<!ENTITY Iscr "ℐ"> +<!ENTITY imagline "ℐ"> +<!ENTITY image "ℑ"> +<!ENTITY Im "ℑ"> +<!ENTITY imagpart "ℑ"> +<!ENTITY Ifr "ℑ"> +<!ENTITY Lscr "ℒ"> +<!ENTITY lagran "ℒ"> +<!ENTITY Laplacetrf "ℒ"> +<!ENTITY ell "ℓ"> +<!ENTITY Nopf "ℕ"> +<!ENTITY naturals "ℕ"> +<!ENTITY numero "№"> +<!ENTITY copysr "℗"> +<!ENTITY weierp "℘"> +<!ENTITY wp "℘"> +<!ENTITY Popf "ℙ"> +<!ENTITY primes "ℙ"> +<!ENTITY rationals "ℚ"> +<!ENTITY Qopf "ℚ"> +<!ENTITY Rscr "ℛ"> +<!ENTITY realine "ℛ"> +<!ENTITY real "ℜ"> +<!ENTITY Re "ℜ"> +<!ENTITY realpart "ℜ"> +<!ENTITY Rfr "ℜ"> +<!ENTITY reals "ℝ"> +<!ENTITY Ropf "ℝ"> +<!ENTITY rx "℞"> +<!ENTITY trade "™"> +<!ENTITY TRADE "™"> +<!ENTITY integers "ℤ"> +<!ENTITY Zopf "ℤ"> +<!ENTITY mho "℧"> +<!ENTITY Zfr "ℨ"> +<!ENTITY zeetrf "ℨ"> +<!ENTITY iiota "℩"> +<!ENTITY bernou "ℬ"> +<!ENTITY Bernoullis "ℬ"> +<!ENTITY Bscr "ℬ"> +<!ENTITY Cfr "ℭ"> +<!ENTITY Cayleys "ℭ"> +<!ENTITY escr "ℯ"> +<!ENTITY Escr "ℰ"> +<!ENTITY expectation "ℰ"> +<!ENTITY Fscr "ℱ"> +<!ENTITY Fouriertrf "ℱ"> +<!ENTITY phmmat "ℳ"> +<!ENTITY Mellintrf "ℳ"> +<!ENTITY Mscr "ℳ"> +<!ENTITY order "ℴ"> +<!ENTITY orderof "ℴ"> +<!ENTITY oscr "ℴ"> +<!ENTITY alefsym "ℵ"> +<!ENTITY aleph "ℵ"> +<!ENTITY beth "ℶ"> +<!ENTITY gimel "ℷ"> +<!ENTITY daleth "ℸ"> +<!ENTITY CapitalDifferentialD "ⅅ"> +<!ENTITY DD "ⅅ"> +<!ENTITY DifferentialD "ⅆ"> +<!ENTITY dd "ⅆ"> +<!ENTITY ExponentialE "ⅇ"> +<!ENTITY exponentiale "ⅇ"> +<!ENTITY ee "ⅇ"> +<!ENTITY ImaginaryI "ⅈ"> +<!ENTITY ii "ⅈ"> +<!ENTITY frac13 "⅓"> +<!ENTITY frac23 "⅔"> +<!ENTITY frac15 "⅕"> +<!ENTITY frac25 "⅖"> +<!ENTITY frac35 "⅗"> +<!ENTITY frac45 "⅘"> +<!ENTITY frac16 "⅙"> +<!ENTITY frac56 "⅚"> +<!ENTITY frac18 "⅛"> +<!ENTITY frac38 "⅜"> +<!ENTITY frac58 "⅝"> +<!ENTITY frac78 "⅞"> +<!ENTITY larr "←"> +<!ENTITY leftarrow "←"> +<!ENTITY LeftArrow "←"> +<!ENTITY slarr "←"> +<!ENTITY ShortLeftArrow "←"> +<!ENTITY uarr "↑"> +<!ENTITY uparrow "↑"> +<!ENTITY UpArrow "↑"> +<!ENTITY ShortUpArrow "↑"> +<!ENTITY rarr "→"> +<!ENTITY rightarrow "→"> +<!ENTITY RightArrow "→"> +<!ENTITY srarr "→"> +<!ENTITY ShortRightArrow "→"> +<!ENTITY darr "↓"> +<!ENTITY downarrow "↓"> +<!ENTITY DownArrow "↓"> +<!ENTITY ShortDownArrow "↓"> +<!ENTITY harr "↔"> +<!ENTITY leftrightarrow "↔"> +<!ENTITY LeftRightArrow "↔"> +<!ENTITY varr "↕"> +<!ENTITY updownarrow "↕"> +<!ENTITY UpDownArrow "↕"> +<!ENTITY nwarr "↖"> +<!ENTITY UpperLeftArrow "↖"> +<!ENTITY nwarrow "↖"> +<!ENTITY nearr "↗"> +<!ENTITY UpperRightArrow "↗"> +<!ENTITY nearrow "↗"> +<!ENTITY searr "↘"> +<!ENTITY searrow "↘"> +<!ENTITY LowerRightArrow "↘"> +<!ENTITY swarr "↙"> +<!ENTITY swarrow "↙"> +<!ENTITY LowerLeftArrow "↙"> +<!ENTITY nlarr "↚"> +<!ENTITY nleftarrow "↚"> +<!ENTITY nrarr "↛"> +<!ENTITY nrightarrow "↛"> +<!ENTITY rarrw "↝"> +<!ENTITY rightsquigarrow "↝"> +<!ENTITY nrarrw "↝̸"> +<!ENTITY Larr "↞"> +<!ENTITY twoheadleftarrow "↞"> +<!ENTITY Uarr "↟"> +<!ENTITY Rarr "↠"> +<!ENTITY twoheadrightarrow "↠"> +<!ENTITY Darr "↡"> +<!ENTITY larrtl "↢"> +<!ENTITY leftarrowtail "↢"> +<!ENTITY rarrtl "↣"> +<!ENTITY rightarrowtail "↣"> +<!ENTITY LeftTeeArrow "↤"> +<!ENTITY mapstoleft "↤"> +<!ENTITY UpTeeArrow "↥"> +<!ENTITY mapstoup "↥"> +<!ENTITY map "↦"> +<!ENTITY RightTeeArrow "↦"> +<!ENTITY mapsto "↦"> +<!ENTITY DownTeeArrow "↧"> +<!ENTITY mapstodown "↧"> +<!ENTITY larrhk "↩"> +<!ENTITY hookleftarrow "↩"> +<!ENTITY rarrhk "↪"> +<!ENTITY hookrightarrow "↪"> +<!ENTITY larrlp "↫"> +<!ENTITY looparrowleft "↫"> +<!ENTITY rarrlp "↬"> +<!ENTITY looparrowright "↬"> +<!ENTITY harrw "↭"> +<!ENTITY leftrightsquigarrow "↭"> +<!ENTITY nharr "↮"> +<!ENTITY nleftrightarrow "↮"> +<!ENTITY lsh "↰"> +<!ENTITY Lsh "↰"> +<!ENTITY rsh "↱"> +<!ENTITY Rsh "↱"> +<!ENTITY ldsh "↲"> +<!ENTITY rdsh "↳"> +<!ENTITY crarr "↵"> +<!ENTITY cularr "↶"> +<!ENTITY curvearrowleft "↶"> +<!ENTITY curarr "↷"> +<!ENTITY curvearrowright "↷"> +<!ENTITY olarr "↺"> +<!ENTITY circlearrowleft "↺"> +<!ENTITY orarr "↻"> +<!ENTITY circlearrowright "↻"> +<!ENTITY lharu "↼"> +<!ENTITY LeftVector "↼"> +<!ENTITY leftharpoonup "↼"> +<!ENTITY lhard "↽"> +<!ENTITY leftharpoondown "↽"> +<!ENTITY DownLeftVector "↽"> +<!ENTITY uharr "↾"> +<!ENTITY upharpoonright "↾"> +<!ENTITY RightUpVector "↾"> +<!ENTITY uharl "↿"> +<!ENTITY upharpoonleft "↿"> +<!ENTITY LeftUpVector "↿"> +<!ENTITY rharu "⇀"> +<!ENTITY RightVector "⇀"> +<!ENTITY rightharpoonup "⇀"> +<!ENTITY rhard "⇁"> +<!ENTITY rightharpoondown "⇁"> +<!ENTITY DownRightVector "⇁"> +<!ENTITY dharr "⇂"> +<!ENTITY RightDownVector "⇂"> +<!ENTITY downharpoonright "⇂"> +<!ENTITY dharl "⇃"> +<!ENTITY LeftDownVector "⇃"> +<!ENTITY downharpoonleft "⇃"> +<!ENTITY rlarr "⇄"> +<!ENTITY rightleftarrows "⇄"> +<!ENTITY RightArrowLeftArrow "⇄"> +<!ENTITY udarr "⇅"> +<!ENTITY UpArrowDownArrow "⇅"> +<!ENTITY lrarr "⇆"> +<!ENTITY leftrightarrows "⇆"> +<!ENTITY LeftArrowRightArrow "⇆"> +<!ENTITY llarr "⇇"> +<!ENTITY leftleftarrows "⇇"> +<!ENTITY uuarr "⇈"> +<!ENTITY upuparrows "⇈"> +<!ENTITY rrarr "⇉"> +<!ENTITY rightrightarrows "⇉"> +<!ENTITY ddarr "⇊"> +<!ENTITY downdownarrows "⇊"> +<!ENTITY lrhar "⇋"> +<!ENTITY ReverseEquilibrium "⇋"> +<!ENTITY leftrightharpoons "⇋"> +<!ENTITY rlhar "⇌"> +<!ENTITY rightleftharpoons "⇌"> +<!ENTITY Equilibrium "⇌"> +<!ENTITY nlArr "⇍"> +<!ENTITY nLeftarrow "⇍"> +<!ENTITY nhArr "⇎"> +<!ENTITY nLeftrightarrow "⇎"> +<!ENTITY nrArr "⇏"> +<!ENTITY nRightarrow "⇏"> +<!ENTITY lArr "⇐"> +<!ENTITY Leftarrow "⇐"> +<!ENTITY DoubleLeftArrow "⇐"> +<!ENTITY uArr "⇑"> +<!ENTITY Uparrow "⇑"> +<!ENTITY DoubleUpArrow "⇑"> +<!ENTITY rArr "⇒"> +<!ENTITY Rightarrow "⇒"> +<!ENTITY Implies "⇒"> +<!ENTITY DoubleRightArrow "⇒"> +<!ENTITY dArr "⇓"> +<!ENTITY Downarrow "⇓"> +<!ENTITY DoubleDownArrow "⇓"> +<!ENTITY hArr "⇔"> +<!ENTITY Leftrightarrow "⇔"> +<!ENTITY DoubleLeftRightArrow "⇔"> +<!ENTITY iff "⇔"> +<!ENTITY vArr "⇕"> +<!ENTITY Updownarrow "⇕"> +<!ENTITY DoubleUpDownArrow "⇕"> +<!ENTITY nwArr "⇖"> +<!ENTITY neArr "⇗"> +<!ENTITY seArr "⇘"> +<!ENTITY swArr "⇙"> +<!ENTITY lAarr "⇚"> +<!ENTITY Lleftarrow "⇚"> +<!ENTITY rAarr "⇛"> +<!ENTITY Rrightarrow "⇛"> +<!ENTITY zigrarr "⇝"> +<!ENTITY larrb "⇤"> +<!ENTITY LeftArrowBar "⇤"> +<!ENTITY rarrb "⇥"> +<!ENTITY RightArrowBar "⇥"> +<!ENTITY duarr "⇵"> +<!ENTITY DownArrowUpArrow "⇵"> +<!ENTITY loarr "⇽"> +<!ENTITY roarr "⇾"> +<!ENTITY hoarr "⇿"> +<!ENTITY forall "∀"> +<!ENTITY ForAll "∀"> +<!ENTITY comp "∁"> +<!ENTITY complement "∁"> +<!ENTITY part "∂"> +<!ENTITY PartialD "∂"> +<!ENTITY npart "∂̸"> +<!ENTITY exist "∃"> +<!ENTITY Exists "∃"> +<!ENTITY nexist "∄"> +<!ENTITY NotExists "∄"> +<!ENTITY nexists "∄"> +<!ENTITY empty "∅"> +<!ENTITY emptyset "∅"> +<!ENTITY emptyv "∅"> +<!ENTITY varnothing "∅"> +<!ENTITY nabla "∇"> +<!ENTITY Del "∇"> +<!ENTITY isin "∈"> +<!ENTITY isinv "∈"> +<!ENTITY Element "∈"> +<!ENTITY in "∈"> +<!ENTITY notin "∉"> +<!ENTITY NotElement "∉"> +<!ENTITY notinva "∉"> +<!ENTITY niv "∋"> +<!ENTITY ReverseElement "∋"> +<!ENTITY ni "∋"> +<!ENTITY SuchThat "∋"> +<!ENTITY notni "∌"> +<!ENTITY notniva "∌"> +<!ENTITY NotReverseElement "∌"> +<!ENTITY prod "∏"> +<!ENTITY Product "∏"> +<!ENTITY coprod "∐"> +<!ENTITY Coproduct "∐"> +<!ENTITY sum "∑"> +<!ENTITY Sum "∑"> +<!ENTITY minus "−"> +<!ENTITY mnplus "∓"> +<!ENTITY mp "∓"> +<!ENTITY MinusPlus "∓"> +<!ENTITY plusdo "∔"> +<!ENTITY dotplus "∔"> +<!ENTITY setmn "∖"> +<!ENTITY setminus "∖"> +<!ENTITY Backslash "∖"> +<!ENTITY ssetmn "∖"> +<!ENTITY smallsetminus "∖"> +<!ENTITY lowast "∗"> +<!ENTITY compfn "∘"> +<!ENTITY SmallCircle "∘"> +<!ENTITY radic "√"> +<!ENTITY Sqrt "√"> +<!ENTITY prop "∝"> +<!ENTITY propto "∝"> +<!ENTITY Proportional "∝"> +<!ENTITY vprop "∝"> +<!ENTITY varpropto "∝"> +<!ENTITY infin "∞"> +<!ENTITY angrt "∟"> +<!ENTITY ang "∠"> +<!ENTITY angle "∠"> +<!ENTITY nang "∠⃒"> +<!ENTITY angmsd "∡"> +<!ENTITY measuredangle "∡"> +<!ENTITY angsph "∢"> +<!ENTITY mid "∣"> +<!ENTITY VerticalBar "∣"> +<!ENTITY smid "∣"> +<!ENTITY shortmid "∣"> +<!ENTITY nmid "∤"> +<!ENTITY NotVerticalBar "∤"> +<!ENTITY nsmid "∤"> +<!ENTITY nshortmid "∤"> +<!ENTITY par "∥"> +<!ENTITY parallel "∥"> +<!ENTITY DoubleVerticalBar "∥"> +<!ENTITY spar "∥"> +<!ENTITY shortparallel "∥"> +<!ENTITY npar "∦"> +<!ENTITY nparallel "∦"> +<!ENTITY NotDoubleVerticalBar "∦"> +<!ENTITY nspar "∦"> +<!ENTITY nshortparallel "∦"> +<!ENTITY and "∧"> +<!ENTITY wedge "∧"> +<!ENTITY or "∨"> +<!ENTITY vee "∨"> +<!ENTITY cap "∩"> +<!ENTITY caps "∩︀"> +<!ENTITY cup "∪"> +<!ENTITY cups "∪︀"> +<!ENTITY int "∫"> +<!ENTITY Integral "∫"> +<!ENTITY Int "∬"> +<!ENTITY tint "∭"> +<!ENTITY iiint "∭"> +<!ENTITY conint "∮"> +<!ENTITY oint "∮"> +<!ENTITY ContourIntegral "∮"> +<!ENTITY Conint "∯"> +<!ENTITY DoubleContourIntegral "∯"> +<!ENTITY Cconint "∰"> +<!ENTITY cwint "∱"> +<!ENTITY cwconint "∲"> +<!ENTITY ClockwiseContourIntegral "∲"> +<!ENTITY awconint "∳"> +<!ENTITY CounterClockwiseContourIntegral "∳"> +<!ENTITY there4 "∴"> +<!ENTITY therefore "∴"> +<!ENTITY Therefore "∴"> +<!ENTITY becaus "∵"> +<!ENTITY because "∵"> +<!ENTITY Because "∵"> +<!ENTITY ratio "∶"> +<!ENTITY Colon "∷"> +<!ENTITY Proportion "∷"> +<!ENTITY minusd "∸"> +<!ENTITY dotminus "∸"> +<!ENTITY mDDot "∺"> +<!ENTITY homtht "∻"> +<!ENTITY sim "∼"> +<!ENTITY Tilde "∼"> +<!ENTITY thksim "∼"> +<!ENTITY thicksim "∼"> +<!ENTITY nvsim "∼⃒"> +<!ENTITY bsim "∽"> +<!ENTITY backsim "∽"> +<!ENTITY race "∽̱"> +<!ENTITY ac "∾"> +<!ENTITY mstpos "∾"> +<!ENTITY acE "∾̳"> +<!ENTITY acd "∿"> +<!ENTITY wreath "≀"> +<!ENTITY VerticalTilde "≀"> +<!ENTITY wr "≀"> +<!ENTITY nsim "≁"> +<!ENTITY NotTilde "≁"> +<!ENTITY esim "≂"> +<!ENTITY EqualTilde "≂"> +<!ENTITY eqsim "≂"> +<!ENTITY NotEqualTilde "≂̸"> +<!ENTITY nesim "≂̸"> +<!ENTITY sime "≃"> +<!ENTITY TildeEqual "≃"> +<!ENTITY simeq "≃"> +<!ENTITY nsime "≄"> +<!ENTITY nsimeq "≄"> +<!ENTITY NotTildeEqual "≄"> +<!ENTITY cong "≅"> +<!ENTITY TildeFullEqual "≅"> +<!ENTITY simne "≆"> +<!ENTITY ncong "≇"> +<!ENTITY NotTildeFullEqual "≇"> +<!ENTITY asymp "≈"> +<!ENTITY ap "≈"> +<!ENTITY TildeTilde "≈"> +<!ENTITY approx "≈"> +<!ENTITY thkap "≈"> +<!ENTITY thickapprox "≈"> +<!ENTITY nap "≉"> +<!ENTITY NotTildeTilde "≉"> +<!ENTITY napprox "≉"> +<!ENTITY ape "≊"> +<!ENTITY approxeq "≊"> +<!ENTITY apid "≋"> +<!ENTITY napid "≋̸"> +<!ENTITY bcong "≌"> +<!ENTITY backcong "≌"> +<!ENTITY asympeq "≍"> +<!ENTITY CupCap "≍"> +<!ENTITY nvap "≍⃒"> +<!ENTITY bump "≎"> +<!ENTITY HumpDownHump "≎"> +<!ENTITY Bumpeq "≎"> +<!ENTITY NotHumpDownHump "≎̸"> +<!ENTITY nbump "≎̸"> +<!ENTITY bumpe "≏"> +<!ENTITY HumpEqual "≏"> +<!ENTITY bumpeq "≏"> +<!ENTITY nbumpe "≏̸"> +<!ENTITY NotHumpEqual "≏̸"> +<!ENTITY esdot "≐"> +<!ENTITY DotEqual "≐"> +<!ENTITY doteq "≐"> +<!ENTITY nedot "≐̸"> +<!ENTITY eDot "≑"> +<!ENTITY doteqdot "≑"> +<!ENTITY efDot "≒"> +<!ENTITY fallingdotseq "≒"> +<!ENTITY erDot "≓"> +<!ENTITY risingdotseq "≓"> +<!ENTITY colone "≔"> +<!ENTITY coloneq "≔"> +<!ENTITY Assign "≔"> +<!ENTITY ecolon "≕"> +<!ENTITY eqcolon "≕"> +<!ENTITY ecir "≖"> +<!ENTITY eqcirc "≖"> +<!ENTITY cire "≗"> +<!ENTITY circeq "≗"> +<!ENTITY wedgeq "≙"> +<!ENTITY veeeq "≚"> +<!ENTITY trie "≜"> +<!ENTITY triangleq "≜"> +<!ENTITY equest "≟"> +<!ENTITY questeq "≟"> +<!ENTITY ne "≠"> +<!ENTITY NotEqual "≠"> +<!ENTITY equiv "≡"> +<!ENTITY Congruent "≡"> +<!ENTITY bnequiv "≡⃥"> +<!ENTITY nequiv "≢"> +<!ENTITY NotCongruent "≢"> +<!ENTITY le "≤"> +<!ENTITY leq "≤"> +<!ENTITY nvle "≤⃒"> +<!ENTITY ge "≥"> +<!ENTITY GreaterEqual "≥"> +<!ENTITY geq "≥"> +<!ENTITY nvge "≥⃒"> +<!ENTITY lE "≦"> +<!ENTITY LessFullEqual "≦"> +<!ENTITY leqq "≦"> +<!ENTITY nlE "≦̸"> +<!ENTITY nleqq "≦̸"> +<!ENTITY gE "≧"> +<!ENTITY GreaterFullEqual "≧"> +<!ENTITY geqq "≧"> +<!ENTITY ngE "≧̸"> +<!ENTITY ngeqq "≧̸"> +<!ENTITY NotGreaterFullEqual "≧̸"> +<!ENTITY lnE "≨"> +<!ENTITY lneqq "≨"> +<!ENTITY lvnE "≨︀"> +<!ENTITY lvertneqq "≨︀"> +<!ENTITY gnE "≩"> +<!ENTITY gneqq "≩"> +<!ENTITY gvnE "≩︀"> +<!ENTITY gvertneqq "≩︀"> +<!ENTITY Lt "≪"> +<!ENTITY NestedLessLess "≪"> +<!ENTITY ll "≪"> +<!ENTITY nLtv "≪̸"> +<!ENTITY NotLessLess "≪̸"> +<!ENTITY nLt "≪⃒"> +<!ENTITY Gt "≫"> +<!ENTITY NestedGreaterGreater "≫"> +<!ENTITY gg "≫"> +<!ENTITY nGtv "≫̸"> +<!ENTITY NotGreaterGreater "≫̸"> +<!ENTITY nGt "≫⃒"> +<!ENTITY twixt "≬"> +<!ENTITY between "≬"> +<!ENTITY NotCupCap "≭"> +<!ENTITY nlt "≮"> +<!ENTITY NotLess "≮"> +<!ENTITY nless "≮"> +<!ENTITY ngt "≯"> +<!ENTITY NotGreater "≯"> +<!ENTITY ngtr "≯"> +<!ENTITY nle "≰"> +<!ENTITY NotLessEqual "≰"> +<!ENTITY nleq "≰"> +<!ENTITY nge "≱"> +<!ENTITY NotGreaterEqual "≱"> +<!ENTITY ngeq "≱"> +<!ENTITY lsim "≲"> +<!ENTITY LessTilde "≲"> +<!ENTITY lesssim "≲"> +<!ENTITY gsim "≳"> +<!ENTITY gtrsim "≳"> +<!ENTITY GreaterTilde "≳"> +<!ENTITY nlsim "≴"> +<!ENTITY NotLessTilde "≴"> +<!ENTITY ngsim "≵"> +<!ENTITY NotGreaterTilde "≵"> +<!ENTITY lg "≶"> +<!ENTITY lessgtr "≶"> +<!ENTITY LessGreater "≶"> +<!ENTITY gl "≷"> +<!ENTITY gtrless "≷"> +<!ENTITY GreaterLess "≷"> +<!ENTITY ntlg "≸"> +<!ENTITY NotLessGreater "≸"> +<!ENTITY ntgl "≹"> +<!ENTITY NotGreaterLess "≹"> +<!ENTITY pr "≺"> +<!ENTITY Precedes "≺"> +<!ENTITY prec "≺"> +<!ENTITY sc "≻"> +<!ENTITY Succeeds "≻"> +<!ENTITY succ "≻"> +<!ENTITY prcue "≼"> +<!ENTITY PrecedesSlantEqual "≼"> +<!ENTITY preccurlyeq "≼"> +<!ENTITY sccue "≽"> +<!ENTITY SucceedsSlantEqual "≽"> +<!ENTITY succcurlyeq "≽"> +<!ENTITY prsim "≾"> +<!ENTITY precsim "≾"> +<!ENTITY PrecedesTilde "≾"> +<!ENTITY scsim "≿"> +<!ENTITY succsim "≿"> +<!ENTITY SucceedsTilde "≿"> +<!ENTITY NotSucceedsTilde "≿̸"> +<!ENTITY npr "⊀"> +<!ENTITY nprec "⊀"> +<!ENTITY NotPrecedes "⊀"> +<!ENTITY nsc "⊁"> +<!ENTITY nsucc "⊁"> +<!ENTITY NotSucceeds "⊁"> +<!ENTITY sub "⊂"> +<!ENTITY subset "⊂"> +<!ENTITY vnsub "⊂⃒"> +<!ENTITY nsubset "⊂⃒"> +<!ENTITY NotSubset "⊂⃒"> +<!ENTITY sup "⊃"> +<!ENTITY supset "⊃"> +<!ENTITY Superset "⊃"> +<!ENTITY vnsup "⊃⃒"> +<!ENTITY nsupset "⊃⃒"> +<!ENTITY NotSuperset "⊃⃒"> +<!ENTITY nsub "⊄"> +<!ENTITY nsup "⊅"> +<!ENTITY sube "⊆"> +<!ENTITY SubsetEqual "⊆"> +<!ENTITY subseteq "⊆"> +<!ENTITY supe "⊇"> +<!ENTITY supseteq "⊇"> +<!ENTITY SupersetEqual "⊇"> +<!ENTITY nsube "⊈"> +<!ENTITY nsubseteq "⊈"> +<!ENTITY NotSubsetEqual "⊈"> +<!ENTITY nsupe "⊉"> +<!ENTITY nsupseteq "⊉"> +<!ENTITY NotSupersetEqual "⊉"> +<!ENTITY subne "⊊"> +<!ENTITY subsetneq "⊊"> +<!ENTITY vsubne "⊊︀"> +<!ENTITY varsubsetneq "⊊︀"> +<!ENTITY supne "⊋"> +<!ENTITY supsetneq "⊋"> +<!ENTITY vsupne "⊋︀"> +<!ENTITY varsupsetneq "⊋︀"> +<!ENTITY cupdot "⊍"> +<!ENTITY uplus "⊎"> +<!ENTITY UnionPlus "⊎"> +<!ENTITY sqsub "⊏"> +<!ENTITY SquareSubset "⊏"> +<!ENTITY sqsubset "⊏"> +<!ENTITY NotSquareSubset "⊏̸"> +<!ENTITY sqsup "⊐"> +<!ENTITY SquareSuperset "⊐"> +<!ENTITY sqsupset "⊐"> +<!ENTITY NotSquareSuperset "⊐̸"> +<!ENTITY sqsube "⊑"> +<!ENTITY SquareSubsetEqual "⊑"> +<!ENTITY sqsubseteq "⊑"> +<!ENTITY sqsupe "⊒"> +<!ENTITY SquareSupersetEqual "⊒"> +<!ENTITY sqsupseteq "⊒"> +<!ENTITY sqcap "⊓"> +<!ENTITY SquareIntersection "⊓"> +<!ENTITY sqcaps "⊓︀"> +<!ENTITY sqcup "⊔"> +<!ENTITY SquareUnion "⊔"> +<!ENTITY sqcups "⊔︀"> +<!ENTITY oplus "⊕"> +<!ENTITY CirclePlus "⊕"> +<!ENTITY ominus "⊖"> +<!ENTITY CircleMinus "⊖"> +<!ENTITY otimes "⊗"> +<!ENTITY CircleTimes "⊗"> +<!ENTITY osol "⊘"> +<!ENTITY odot "⊙"> +<!ENTITY CircleDot "⊙"> +<!ENTITY ocir "⊚"> +<!ENTITY circledcirc "⊚"> +<!ENTITY oast "⊛"> +<!ENTITY circledast "⊛"> +<!ENTITY odash "⊝"> +<!ENTITY circleddash "⊝"> +<!ENTITY plusb "⊞"> +<!ENTITY boxplus "⊞"> +<!ENTITY minusb "⊟"> +<!ENTITY boxminus "⊟"> +<!ENTITY timesb "⊠"> +<!ENTITY boxtimes "⊠"> +<!ENTITY sdotb "⊡"> +<!ENTITY dotsquare "⊡"> +<!ENTITY vdash "⊢"> +<!ENTITY RightTee "⊢"> +<!ENTITY dashv "⊣"> +<!ENTITY LeftTee "⊣"> +<!ENTITY top "⊤"> +<!ENTITY DownTee "⊤"> +<!ENTITY bottom "⊥"> +<!ENTITY bot "⊥"> +<!ENTITY perp "⊥"> +<!ENTITY UpTee "⊥"> +<!ENTITY models "⊧"> +<!ENTITY vDash "⊨"> +<!ENTITY DoubleRightTee "⊨"> +<!ENTITY Vdash "⊩"> +<!ENTITY Vvdash "⊪"> +<!ENTITY VDash "⊫"> +<!ENTITY nvdash "⊬"> +<!ENTITY nvDash "⊭"> +<!ENTITY nVdash "⊮"> +<!ENTITY nVDash "⊯"> +<!ENTITY prurel "⊰"> +<!ENTITY vltri "⊲"> +<!ENTITY vartriangleleft "⊲"> +<!ENTITY LeftTriangle "⊲"> +<!ENTITY vrtri "⊳"> +<!ENTITY vartriangleright "⊳"> +<!ENTITY RightTriangle "⊳"> +<!ENTITY ltrie "⊴"> +<!ENTITY trianglelefteq "⊴"> +<!ENTITY LeftTriangleEqual "⊴"> +<!ENTITY nvltrie "⊴⃒"> +<!ENTITY rtrie "⊵"> +<!ENTITY trianglerighteq "⊵"> +<!ENTITY RightTriangleEqual "⊵"> +<!ENTITY nvrtrie "⊵⃒"> +<!ENTITY origof "⊶"> +<!ENTITY imof "⊷"> +<!ENTITY mumap "⊸"> +<!ENTITY multimap "⊸"> +<!ENTITY hercon "⊹"> +<!ENTITY intcal "⊺"> +<!ENTITY intercal "⊺"> +<!ENTITY veebar "⊻"> +<!ENTITY barvee "⊽"> +<!ENTITY angrtvb "⊾"> +<!ENTITY lrtri "⊿"> +<!ENTITY xwedge "⋀"> +<!ENTITY Wedge "⋀"> +<!ENTITY bigwedge "⋀"> +<!ENTITY xvee "⋁"> +<!ENTITY Vee "⋁"> +<!ENTITY bigvee "⋁"> +<!ENTITY xcap "⋂"> +<!ENTITY Intersection "⋂"> +<!ENTITY bigcap "⋂"> +<!ENTITY xcup "⋃"> +<!ENTITY Union "⋃"> +<!ENTITY bigcup "⋃"> +<!ENTITY diam "⋄"> +<!ENTITY diamond "⋄"> +<!ENTITY Diamond "⋄"> +<!ENTITY sdot "⋅"> +<!ENTITY sstarf "⋆"> +<!ENTITY Star "⋆"> +<!ENTITY divonx "⋇"> +<!ENTITY divideontimes "⋇"> +<!ENTITY bowtie "⋈"> +<!ENTITY ltimes "⋉"> +<!ENTITY rtimes "⋊"> +<!ENTITY lthree "⋋"> +<!ENTITY leftthreetimes "⋋"> +<!ENTITY rthree "⋌"> +<!ENTITY rightthreetimes "⋌"> +<!ENTITY bsime "⋍"> +<!ENTITY backsimeq "⋍"> +<!ENTITY cuvee "⋎"> +<!ENTITY curlyvee "⋎"> +<!ENTITY cuwed "⋏"> +<!ENTITY curlywedge "⋏"> +<!ENTITY Sub "⋐"> +<!ENTITY Subset "⋐"> +<!ENTITY Sup "⋑"> +<!ENTITY Supset "⋑"> +<!ENTITY Cap "⋒"> +<!ENTITY Cup "⋓"> +<!ENTITY fork "⋔"> +<!ENTITY pitchfork "⋔"> +<!ENTITY epar "⋕"> +<!ENTITY ltdot "⋖"> +<!ENTITY lessdot "⋖"> +<!ENTITY gtdot "⋗"> +<!ENTITY gtrdot "⋗"> +<!ENTITY Ll "⋘"> +<!ENTITY nLl "⋘̸"> +<!ENTITY Gg "⋙"> +<!ENTITY ggg "⋙"> +<!ENTITY nGg "⋙̸"> +<!ENTITY leg "⋚"> +<!ENTITY LessEqualGreater "⋚"> +<!ENTITY lesseqgtr "⋚"> +<!ENTITY lesg "⋚︀"> +<!ENTITY gel "⋛"> +<!ENTITY gtreqless "⋛"> +<!ENTITY GreaterEqualLess "⋛"> +<!ENTITY gesl "⋛︀"> +<!ENTITY cuepr "⋞"> +<!ENTITY curlyeqprec "⋞"> +<!ENTITY cuesc "⋟"> +<!ENTITY curlyeqsucc "⋟"> +<!ENTITY nprcue "⋠"> +<!ENTITY NotPrecedesSlantEqual "⋠"> +<!ENTITY nsccue "⋡"> +<!ENTITY NotSucceedsSlantEqual "⋡"> +<!ENTITY nsqsube "⋢"> +<!ENTITY NotSquareSubsetEqual "⋢"> +<!ENTITY nsqsupe "⋣"> +<!ENTITY NotSquareSupersetEqual "⋣"> +<!ENTITY lnsim "⋦"> +<!ENTITY gnsim "⋧"> +<!ENTITY prnsim "⋨"> +<!ENTITY precnsim "⋨"> +<!ENTITY scnsim "⋩"> +<!ENTITY succnsim "⋩"> +<!ENTITY nltri "⋪"> +<!ENTITY ntriangleleft "⋪"> +<!ENTITY NotLeftTriangle "⋪"> +<!ENTITY nrtri "⋫"> +<!ENTITY ntriangleright "⋫"> +<!ENTITY NotRightTriangle "⋫"> +<!ENTITY nltrie "⋬"> +<!ENTITY ntrianglelefteq "⋬"> +<!ENTITY NotLeftTriangleEqual "⋬"> +<!ENTITY nrtrie "⋭"> +<!ENTITY ntrianglerighteq "⋭"> +<!ENTITY NotRightTriangleEqual "⋭"> +<!ENTITY vellip "⋮"> +<!ENTITY ctdot "⋯"> +<!ENTITY utdot "⋰"> +<!ENTITY dtdot "⋱"> +<!ENTITY disin "⋲"> +<!ENTITY isinsv "⋳"> +<!ENTITY isins "⋴"> +<!ENTITY isindot "⋵"> +<!ENTITY notindot "⋵̸"> +<!ENTITY notinvc "⋶"> +<!ENTITY notinvb "⋷"> +<!ENTITY isinE "⋹"> +<!ENTITY notinE "⋹̸"> +<!ENTITY nisd "⋺"> +<!ENTITY xnis "⋻"> +<!ENTITY nis "⋼"> +<!ENTITY notnivc "⋽"> +<!ENTITY notnivb "⋾"> +<!ENTITY barwed "⌅"> +<!ENTITY barwedge "⌅"> +<!ENTITY Barwed "⌆"> +<!ENTITY doublebarwedge "⌆"> +<!ENTITY lceil "⌈"> +<!ENTITY LeftCeiling "⌈"> +<!ENTITY rceil "⌉"> +<!ENTITY RightCeiling "⌉"> +<!ENTITY lfloor "⌊"> +<!ENTITY LeftFloor "⌊"> +<!ENTITY rfloor "⌋"> +<!ENTITY RightFloor "⌋"> +<!ENTITY drcrop "⌌"> +<!ENTITY dlcrop "⌍"> +<!ENTITY urcrop "⌎"> +<!ENTITY ulcrop "⌏"> +<!ENTITY bnot "⌐"> +<!ENTITY profline "⌒"> +<!ENTITY profsurf "⌓"> +<!ENTITY telrec "⌕"> +<!ENTITY target "⌖"> +<!ENTITY ulcorn "⌜"> +<!ENTITY ulcorner "⌜"> +<!ENTITY urcorn "⌝"> +<!ENTITY urcorner "⌝"> +<!ENTITY dlcorn "⌞"> +<!ENTITY llcorner "⌞"> +<!ENTITY drcorn "⌟"> +<!ENTITY lrcorner "⌟"> +<!ENTITY frown "⌢"> +<!ENTITY sfrown "⌢"> +<!ENTITY smile "⌣"> +<!ENTITY ssmile "⌣"> +<!ENTITY cylcty "⌭"> +<!ENTITY profalar "⌮"> +<!ENTITY topbot "⌶"> +<!ENTITY ovbar "⌽"> +<!ENTITY solbar "⌿"> +<!ENTITY angzarr "⍼"> +<!ENTITY lmoust "⎰"> +<!ENTITY lmoustache "⎰"> +<!ENTITY rmoust "⎱"> +<!ENTITY rmoustache "⎱"> +<!ENTITY tbrk "⎴"> +<!ENTITY OverBracket "⎴"> +<!ENTITY bbrk "⎵"> +<!ENTITY UnderBracket "⎵"> +<!ENTITY bbrktbrk "⎶"> +<!ENTITY OverParenthesis "⏜"> +<!ENTITY UnderParenthesis "⏝"> +<!ENTITY OverBrace "⏞"> +<!ENTITY UnderBrace "⏟"> +<!ENTITY trpezium "⏢"> +<!ENTITY elinters "⏧"> +<!ENTITY blank "␣"> +<!ENTITY oS "Ⓢ"> +<!ENTITY circledS "Ⓢ"> +<!ENTITY boxh "─"> +<!ENTITY HorizontalLine "─"> +<!ENTITY boxv "│"> +<!ENTITY boxdr "┌"> +<!ENTITY boxdl "┐"> +<!ENTITY boxur "└"> +<!ENTITY boxul "┘"> +<!ENTITY boxvr "├"> +<!ENTITY boxvl "┤"> +<!ENTITY boxhd "┬"> +<!ENTITY boxhu "┴"> +<!ENTITY boxvh "┼"> +<!ENTITY boxH "═"> +<!ENTITY boxV "║"> +<!ENTITY boxdR "╒"> +<!ENTITY boxDr "╓"> +<!ENTITY boxDR "╔"> +<!ENTITY boxdL "╕"> +<!ENTITY boxDl "╖"> +<!ENTITY boxDL "╗"> +<!ENTITY boxuR "╘"> +<!ENTITY boxUr "╙"> +<!ENTITY boxUR "╚"> +<!ENTITY boxuL "╛"> +<!ENTITY boxUl "╜"> +<!ENTITY boxUL "╝"> +<!ENTITY boxvR "╞"> +<!ENTITY boxVr "╟"> +<!ENTITY boxVR "╠"> +<!ENTITY boxvL "╡"> +<!ENTITY boxVl "╢"> +<!ENTITY boxVL "╣"> +<!ENTITY boxHd "╤"> +<!ENTITY boxhD "╥"> +<!ENTITY boxHD "╦"> +<!ENTITY boxHu "╧"> +<!ENTITY boxhU "╨"> +<!ENTITY boxHU "╩"> +<!ENTITY boxvH "╪"> +<!ENTITY boxVh "╫"> +<!ENTITY boxVH "╬"> +<!ENTITY uhblk "▀"> +<!ENTITY lhblk "▄"> +<!ENTITY block "█"> +<!ENTITY blk14 "░"> +<!ENTITY blk12 "▒"> +<!ENTITY blk34 "▓"> +<!ENTITY squ "□"> +<!ENTITY square "□"> +<!ENTITY Square "□"> +<!ENTITY squf "▪"> +<!ENTITY squarf "▪"> +<!ENTITY blacksquare "▪"> +<!ENTITY FilledVerySmallSquare "▪"> +<!ENTITY EmptyVerySmallSquare "▫"> +<!ENTITY rect "▭"> +<!ENTITY marker "▮"> +<!ENTITY fltns "▱"> +<!ENTITY xutri "△"> +<!ENTITY bigtriangleup "△"> +<!ENTITY utrif "▴"> +<!ENTITY blacktriangle "▴"> +<!ENTITY utri "▵"> +<!ENTITY triangle "▵"> +<!ENTITY rtrif "▸"> +<!ENTITY blacktriangleright "▸"> +<!ENTITY rtri "▹"> +<!ENTITY triangleright "▹"> +<!ENTITY xdtri "▽"> +<!ENTITY bigtriangledown "▽"> +<!ENTITY dtrif "▾"> +<!ENTITY blacktriangledown "▾"> +<!ENTITY dtri "▿"> +<!ENTITY triangledown "▿"> +<!ENTITY ltrif "◂"> +<!ENTITY blacktriangleleft "◂"> +<!ENTITY ltri "◃"> +<!ENTITY triangleleft "◃"> +<!ENTITY loz "◊"> +<!ENTITY lozenge "◊"> +<!ENTITY cir "○"> +<!ENTITY tridot "◬"> +<!ENTITY xcirc "◯"> +<!ENTITY bigcirc "◯"> +<!ENTITY ultri "◸"> +<!ENTITY urtri "◹"> +<!ENTITY lltri "◺"> +<!ENTITY EmptySmallSquare "◻"> +<!ENTITY FilledSmallSquare "◼"> +<!ENTITY starf "★"> +<!ENTITY bigstar "★"> +<!ENTITY star "☆"> +<!ENTITY phone "☎"> +<!ENTITY female "♀"> +<!ENTITY male "♂"> +<!ENTITY spades "♠"> +<!ENTITY spadesuit "♠"> +<!ENTITY clubs "♣"> +<!ENTITY clubsuit "♣"> +<!ENTITY hearts "♥"> +<!ENTITY heartsuit "♥"> +<!ENTITY diams "♦"> +<!ENTITY diamondsuit "♦"> +<!ENTITY sung "♪"> +<!ENTITY flat "♭"> +<!ENTITY natur "♮"> +<!ENTITY natural "♮"> +<!ENTITY sharp "♯"> +<!ENTITY check "✓"> +<!ENTITY checkmark "✓"> +<!ENTITY cross "✗"> +<!ENTITY malt "✠"> +<!ENTITY maltese "✠"> +<!ENTITY sext "✶"> +<!ENTITY VerticalSeparator "❘"> +<!ENTITY lbbrk "❲"> +<!ENTITY rbbrk "❳"> +<!ENTITY bsolhsub "⟈"> +<!ENTITY suphsol "⟉"> +<!ENTITY lobrk "⟦"> +<!ENTITY LeftDoubleBracket "⟦"> +<!ENTITY robrk "⟧"> +<!ENTITY RightDoubleBracket "⟧"> +<!ENTITY lang "⟨"> +<!ENTITY LeftAngleBracket "⟨"> +<!ENTITY langle "⟨"> +<!ENTITY rang "⟩"> +<!ENTITY RightAngleBracket "⟩"> +<!ENTITY rangle "⟩"> +<!ENTITY Lang "⟪"> +<!ENTITY Rang "⟫"> +<!ENTITY loang "⟬"> +<!ENTITY roang "⟭"> +<!ENTITY xlarr "⟵"> +<!ENTITY longleftarrow "⟵"> +<!ENTITY LongLeftArrow "⟵"> +<!ENTITY xrarr "⟶"> +<!ENTITY longrightarrow "⟶"> +<!ENTITY LongRightArrow "⟶"> +<!ENTITY xharr "⟷"> +<!ENTITY longleftrightarrow "⟷"> +<!ENTITY LongLeftRightArrow "⟷"> +<!ENTITY xlArr "⟸"> +<!ENTITY Longleftarrow "⟸"> +<!ENTITY DoubleLongLeftArrow "⟸"> +<!ENTITY xrArr "⟹"> +<!ENTITY Longrightarrow "⟹"> +<!ENTITY DoubleLongRightArrow "⟹"> +<!ENTITY xhArr "⟺"> +<!ENTITY Longleftrightarrow "⟺"> +<!ENTITY DoubleLongLeftRightArrow "⟺"> +<!ENTITY xmap "⟼"> +<!ENTITY longmapsto "⟼"> +<!ENTITY dzigrarr "⟿"> +<!ENTITY nvlArr "⤂"> +<!ENTITY nvrArr "⤃"> +<!ENTITY nvHarr "⤄"> +<!ENTITY Map "⤅"> +<!ENTITY lbarr "⤌"> +<!ENTITY rbarr "⤍"> +<!ENTITY bkarow "⤍"> +<!ENTITY lBarr "⤎"> +<!ENTITY rBarr "⤏"> +<!ENTITY dbkarow "⤏"> +<!ENTITY RBarr "⤐"> +<!ENTITY drbkarow "⤐"> +<!ENTITY DDotrahd "⤑"> +<!ENTITY UpArrowBar "⤒"> +<!ENTITY DownArrowBar "⤓"> +<!ENTITY Rarrtl "⤖"> +<!ENTITY latail "⤙"> +<!ENTITY ratail "⤚"> +<!ENTITY lAtail "⤛"> +<!ENTITY rAtail "⤜"> +<!ENTITY larrfs "⤝"> +<!ENTITY rarrfs "⤞"> +<!ENTITY larrbfs "⤟"> +<!ENTITY rarrbfs "⤠"> +<!ENTITY nwarhk "⤣"> +<!ENTITY nearhk "⤤"> +<!ENTITY searhk "⤥"> +<!ENTITY hksearow "⤥"> +<!ENTITY swarhk "⤦"> +<!ENTITY hkswarow "⤦"> +<!ENTITY nwnear "⤧"> +<!ENTITY nesear "⤨"> +<!ENTITY toea "⤨"> +<!ENTITY seswar "⤩"> +<!ENTITY tosa "⤩"> +<!ENTITY swnwar "⤪"> +<!ENTITY rarrc "⤳"> +<!ENTITY nrarrc "⤳̸"> +<!ENTITY cudarrr "⤵"> +<!ENTITY ldca "⤶"> +<!ENTITY rdca "⤷"> +<!ENTITY cudarrl "⤸"> +<!ENTITY larrpl "⤹"> +<!ENTITY curarrm "⤼"> +<!ENTITY cularrp "⤽"> +<!ENTITY rarrpl "⥅"> +<!ENTITY harrcir "⥈"> +<!ENTITY Uarrocir "⥉"> +<!ENTITY lurdshar "⥊"> +<!ENTITY ldrushar "⥋"> +<!ENTITY LeftRightVector "⥎"> +<!ENTITY RightUpDownVector "⥏"> +<!ENTITY DownLeftRightVector "⥐"> +<!ENTITY LeftUpDownVector "⥑"> +<!ENTITY LeftVectorBar "⥒"> +<!ENTITY RightVectorBar "⥓"> +<!ENTITY RightUpVectorBar "⥔"> +<!ENTITY RightDownVectorBar "⥕"> +<!ENTITY DownLeftVectorBar "⥖"> +<!ENTITY DownRightVectorBar "⥗"> +<!ENTITY LeftUpVectorBar "⥘"> +<!ENTITY LeftDownVectorBar "⥙"> +<!ENTITY LeftTeeVector "⥚"> +<!ENTITY RightTeeVector "⥛"> +<!ENTITY RightUpTeeVector "⥜"> +<!ENTITY RightDownTeeVector "⥝"> +<!ENTITY DownLeftTeeVector "⥞"> +<!ENTITY DownRightTeeVector "⥟"> +<!ENTITY LeftUpTeeVector "⥠"> +<!ENTITY LeftDownTeeVector "⥡"> +<!ENTITY lHar "⥢"> +<!ENTITY uHar "⥣"> +<!ENTITY rHar "⥤"> +<!ENTITY dHar "⥥"> +<!ENTITY luruhar "⥦"> +<!ENTITY ldrdhar "⥧"> +<!ENTITY ruluhar "⥨"> +<!ENTITY rdldhar "⥩"> +<!ENTITY lharul "⥪"> +<!ENTITY llhard "⥫"> +<!ENTITY rharul "⥬"> +<!ENTITY lrhard "⥭"> +<!ENTITY udhar "⥮"> +<!ENTITY UpEquilibrium "⥮"> +<!ENTITY duhar "⥯"> +<!ENTITY ReverseUpEquilibrium "⥯"> +<!ENTITY RoundImplies "⥰"> +<!ENTITY erarr "⥱"> +<!ENTITY simrarr "⥲"> +<!ENTITY larrsim "⥳"> +<!ENTITY rarrsim "⥴"> +<!ENTITY rarrap "⥵"> +<!ENTITY ltlarr "⥶"> +<!ENTITY gtrarr "⥸"> +<!ENTITY subrarr "⥹"> +<!ENTITY suplarr "⥻"> +<!ENTITY lfisht "⥼"> +<!ENTITY rfisht "⥽"> +<!ENTITY ufisht "⥾"> +<!ENTITY dfisht "⥿"> +<!ENTITY lopar "⦅"> +<!ENTITY ropar "⦆"> +<!ENTITY lbrke "⦋"> +<!ENTITY rbrke "⦌"> +<!ENTITY lbrkslu "⦍"> +<!ENTITY rbrksld "⦎"> +<!ENTITY lbrksld "⦏"> +<!ENTITY rbrkslu "⦐"> +<!ENTITY langd "⦑"> +<!ENTITY rangd "⦒"> +<!ENTITY lparlt "⦓"> +<!ENTITY rpargt "⦔"> +<!ENTITY gtlPar "⦕"> +<!ENTITY ltrPar "⦖"> +<!ENTITY vzigzag "⦚"> +<!ENTITY vangrt "⦜"> +<!ENTITY angrtvbd "⦝"> +<!ENTITY ange "⦤"> +<!ENTITY range "⦥"> +<!ENTITY dwangle "⦦"> +<!ENTITY uwangle "⦧"> +<!ENTITY angmsdaa "⦨"> +<!ENTITY angmsdab "⦩"> +<!ENTITY angmsdac "⦪"> +<!ENTITY angmsdad "⦫"> +<!ENTITY angmsdae "⦬"> +<!ENTITY angmsdaf "⦭"> +<!ENTITY angmsdag "⦮"> +<!ENTITY angmsdah "⦯"> +<!ENTITY bemptyv "⦰"> +<!ENTITY demptyv "⦱"> +<!ENTITY cemptyv "⦲"> +<!ENTITY raemptyv "⦳"> +<!ENTITY laemptyv "⦴"> +<!ENTITY ohbar "⦵"> +<!ENTITY omid "⦶"> +<!ENTITY opar "⦷"> +<!ENTITY operp "⦹"> +<!ENTITY olcross "⦻"> +<!ENTITY odsold "⦼"> +<!ENTITY olcir "⦾"> +<!ENTITY ofcir "⦿"> +<!ENTITY olt "⧀"> +<!ENTITY ogt "⧁"> +<!ENTITY cirscir "⧂"> +<!ENTITY cirE "⧃"> +<!ENTITY solb "⧄"> +<!ENTITY bsolb "⧅"> +<!ENTITY boxbox "⧉"> +<!ENTITY trisb "⧍"> +<!ENTITY rtriltri "⧎"> +<!ENTITY LeftTriangleBar "⧏"> +<!ENTITY NotLeftTriangleBar "⧏̸"> +<!ENTITY RightTriangleBar "⧐"> +<!ENTITY NotRightTriangleBar "⧐̸"> +<!ENTITY iinfin "⧜"> +<!ENTITY infintie "⧝"> +<!ENTITY nvinfin "⧞"> +<!ENTITY eparsl "⧣"> +<!ENTITY smeparsl "⧤"> +<!ENTITY eqvparsl "⧥"> +<!ENTITY lozf "⧫"> +<!ENTITY blacklozenge "⧫"> +<!ENTITY RuleDelayed "⧴"> +<!ENTITY dsol "⧶"> +<!ENTITY xodot "⨀"> +<!ENTITY bigodot "⨀"> +<!ENTITY xoplus "⨁"> +<!ENTITY bigoplus "⨁"> +<!ENTITY xotime "⨂"> +<!ENTITY bigotimes "⨂"> +<!ENTITY xuplus "⨄"> +<!ENTITY biguplus "⨄"> +<!ENTITY xsqcup "⨆"> +<!ENTITY bigsqcup "⨆"> +<!ENTITY qint "⨌"> +<!ENTITY iiiint "⨌"> +<!ENTITY fpartint "⨍"> +<!ENTITY cirfnint "⨐"> +<!ENTITY awint "⨑"> +<!ENTITY rppolint "⨒"> +<!ENTITY scpolint "⨓"> +<!ENTITY npolint "⨔"> +<!ENTITY pointint "⨕"> +<!ENTITY quatint "⨖"> +<!ENTITY intlarhk "⨗"> +<!ENTITY pluscir "⨢"> +<!ENTITY plusacir "⨣"> +<!ENTITY simplus "⨤"> +<!ENTITY plusdu "⨥"> +<!ENTITY plussim "⨦"> +<!ENTITY plustwo "⨧"> +<!ENTITY mcomma "⨩"> +<!ENTITY minusdu "⨪"> +<!ENTITY loplus "⨭"> +<!ENTITY roplus "⨮"> +<!ENTITY Cross "⨯"> +<!ENTITY timesd "⨰"> +<!ENTITY timesbar "⨱"> +<!ENTITY smashp "⨳"> +<!ENTITY lotimes "⨴"> +<!ENTITY rotimes "⨵"> +<!ENTITY otimesas "⨶"> +<!ENTITY Otimes "⨷"> +<!ENTITY odiv "⨸"> +<!ENTITY triplus "⨹"> +<!ENTITY triminus "⨺"> +<!ENTITY tritime "⨻"> +<!ENTITY iprod "⨼"> +<!ENTITY intprod "⨼"> +<!ENTITY amalg "⨿"> +<!ENTITY capdot "⩀"> +<!ENTITY ncup "⩂"> +<!ENTITY ncap "⩃"> +<!ENTITY capand "⩄"> +<!ENTITY cupor "⩅"> +<!ENTITY cupcap "⩆"> +<!ENTITY capcup "⩇"> +<!ENTITY cupbrcap "⩈"> +<!ENTITY capbrcup "⩉"> +<!ENTITY cupcup "⩊"> +<!ENTITY capcap "⩋"> +<!ENTITY ccups "⩌"> +<!ENTITY ccaps "⩍"> +<!ENTITY ccupssm "⩐"> +<!ENTITY And "⩓"> +<!ENTITY Or "⩔"> +<!ENTITY andand "⩕"> +<!ENTITY oror "⩖"> +<!ENTITY orslope "⩗"> +<!ENTITY andslope "⩘"> +<!ENTITY andv "⩚"> +<!ENTITY orv "⩛"> +<!ENTITY andd "⩜"> +<!ENTITY ord "⩝"> +<!ENTITY wedbar "⩟"> +<!ENTITY sdote "⩦"> +<!ENTITY simdot "⩪"> +<!ENTITY congdot "⩭"> +<!ENTITY ncongdot "⩭̸"> +<!ENTITY easter "⩮"> +<!ENTITY apacir "⩯"> +<!ENTITY apE "⩰"> +<!ENTITY napE "⩰̸"> +<!ENTITY eplus "⩱"> +<!ENTITY pluse "⩲"> +<!ENTITY Esim "⩳"> +<!ENTITY Colone "⩴"> +<!ENTITY Equal "⩵"> +<!ENTITY eDDot "⩷"> +<!ENTITY ddotseq "⩷"> +<!ENTITY equivDD "⩸"> +<!ENTITY ltcir "⩹"> +<!ENTITY gtcir "⩺"> +<!ENTITY ltquest "⩻"> +<!ENTITY gtquest "⩼"> +<!ENTITY les "⩽"> +<!ENTITY LessSlantEqual "⩽"> +<!ENTITY leqslant "⩽"> +<!ENTITY nles "⩽̸"> +<!ENTITY NotLessSlantEqual "⩽̸"> +<!ENTITY nleqslant "⩽̸"> +<!ENTITY ges "⩾"> +<!ENTITY GreaterSlantEqual "⩾"> +<!ENTITY geqslant "⩾"> +<!ENTITY nges "⩾̸"> +<!ENTITY NotGreaterSlantEqual "⩾̸"> +<!ENTITY ngeqslant "⩾̸"> +<!ENTITY lesdot "⩿"> +<!ENTITY gesdot "⪀"> +<!ENTITY lesdoto "⪁"> +<!ENTITY gesdoto "⪂"> +<!ENTITY lesdotor "⪃"> +<!ENTITY gesdotol "⪄"> +<!ENTITY lap "⪅"> +<!ENTITY lessapprox "⪅"> +<!ENTITY gap "⪆"> +<!ENTITY gtrapprox "⪆"> +<!ENTITY lne "⪇"> +<!ENTITY lneq "⪇"> +<!ENTITY gne "⪈"> +<!ENTITY gneq "⪈"> +<!ENTITY lnap "⪉"> +<!ENTITY lnapprox "⪉"> +<!ENTITY gnap "⪊"> +<!ENTITY gnapprox "⪊"> +<!ENTITY lEg "⪋"> +<!ENTITY lesseqqgtr "⪋"> +<!ENTITY gEl "⪌"> +<!ENTITY gtreqqless "⪌"> +<!ENTITY lsime "⪍"> +<!ENTITY gsime "⪎"> +<!ENTITY lsimg "⪏"> +<!ENTITY gsiml "⪐"> +<!ENTITY lgE "⪑"> +<!ENTITY glE "⪒"> +<!ENTITY lesges "⪓"> +<!ENTITY gesles "⪔"> +<!ENTITY els "⪕"> +<!ENTITY eqslantless "⪕"> +<!ENTITY egs "⪖"> +<!ENTITY eqslantgtr "⪖"> +<!ENTITY elsdot "⪗"> +<!ENTITY egsdot "⪘"> +<!ENTITY el "⪙"> +<!ENTITY eg "⪚"> +<!ENTITY siml "⪝"> +<!ENTITY simg "⪞"> +<!ENTITY simlE "⪟"> +<!ENTITY simgE "⪠"> +<!ENTITY LessLess "⪡"> +<!ENTITY NotNestedLessLess "⪡̸"> +<!ENTITY GreaterGreater "⪢"> +<!ENTITY NotNestedGreaterGreater "⪢̸"> +<!ENTITY glj "⪤"> +<!ENTITY gla "⪥"> +<!ENTITY ltcc "⪦"> +<!ENTITY gtcc "⪧"> +<!ENTITY lescc "⪨"> +<!ENTITY gescc "⪩"> +<!ENTITY smt "⪪"> +<!ENTITY lat "⪫"> +<!ENTITY smte "⪬"> +<!ENTITY smtes "⪬︀"> +<!ENTITY late "⪭"> +<!ENTITY lates "⪭︀"> +<!ENTITY bumpE "⪮"> +<!ENTITY pre "⪯"> +<!ENTITY preceq "⪯"> +<!ENTITY PrecedesEqual "⪯"> +<!ENTITY npre "⪯̸"> +<!ENTITY npreceq "⪯̸"> +<!ENTITY NotPrecedesEqual "⪯̸"> +<!ENTITY sce "⪰"> +<!ENTITY succeq "⪰"> +<!ENTITY SucceedsEqual "⪰"> +<!ENTITY nsce "⪰̸"> +<!ENTITY nsucceq "⪰̸"> +<!ENTITY NotSucceedsEqual "⪰̸"> +<!ENTITY prE "⪳"> +<!ENTITY scE "⪴"> +<!ENTITY prnE "⪵"> +<!ENTITY precneqq "⪵"> +<!ENTITY scnE "⪶"> +<!ENTITY succneqq "⪶"> +<!ENTITY prap "⪷"> +<!ENTITY precapprox "⪷"> +<!ENTITY scap "⪸"> +<!ENTITY succapprox "⪸"> +<!ENTITY prnap "⪹"> +<!ENTITY precnapprox "⪹"> +<!ENTITY scnap "⪺"> +<!ENTITY succnapprox "⪺"> +<!ENTITY Pr "⪻"> +<!ENTITY Sc "⪼"> +<!ENTITY subdot "⪽"> +<!ENTITY supdot "⪾"> +<!ENTITY subplus "⪿"> +<!ENTITY supplus "⫀"> +<!ENTITY submult "⫁"> +<!ENTITY supmult "⫂"> +<!ENTITY subedot "⫃"> +<!ENTITY supedot "⫄"> +<!ENTITY subE "⫅"> +<!ENTITY subseteqq "⫅"> +<!ENTITY nsubE "⫅̸"> +<!ENTITY nsubseteqq "⫅̸"> +<!ENTITY supE "⫆"> +<!ENTITY supseteqq "⫆"> +<!ENTITY nsupE "⫆̸"> +<!ENTITY nsupseteqq "⫆̸"> +<!ENTITY subsim "⫇"> +<!ENTITY supsim "⫈"> +<!ENTITY subnE "⫋"> +<!ENTITY subsetneqq "⫋"> +<!ENTITY vsubnE "⫋︀"> +<!ENTITY varsubsetneqq "⫋︀"> +<!ENTITY supnE "⫌"> +<!ENTITY supsetneqq "⫌"> +<!ENTITY vsupnE "⫌︀"> +<!ENTITY varsupsetneqq "⫌︀"> +<!ENTITY csub "⫏"> +<!ENTITY csup "⫐"> +<!ENTITY csube "⫑"> +<!ENTITY csupe "⫒"> +<!ENTITY subsup "⫓"> +<!ENTITY supsub "⫔"> +<!ENTITY subsub "⫕"> +<!ENTITY supsup "⫖"> +<!ENTITY suphsub "⫗"> +<!ENTITY supdsub "⫘"> +<!ENTITY forkv "⫙"> +<!ENTITY topfork "⫚"> +<!ENTITY mlcp "⫛"> +<!ENTITY Dashv "⫤"> +<!ENTITY DoubleLeftTee "⫤"> +<!ENTITY Vdashl "⫦"> +<!ENTITY Barv "⫧"> +<!ENTITY vBar "⫨"> +<!ENTITY vBarv "⫩"> +<!ENTITY Vbar "⫫"> +<!ENTITY Not "⫬"> +<!ENTITY bNot "⫭"> +<!ENTITY rnmid "⫮"> +<!ENTITY cirmid "⫯"> +<!ENTITY midcir "⫰"> +<!ENTITY topcir "⫱"> +<!ENTITY nhpar "⫲"> +<!ENTITY parsim "⫳"> +<!ENTITY parsl "⫽"> +<!ENTITY nparsl "⫽⃥"> +<!ENTITY fflig "ff"> +<!ENTITY filig "fi"> +<!ENTITY fllig "fl"> +<!ENTITY ffilig "ffi"> +<!ENTITY ffllig "ffl"> +<!ENTITY Ascr "𝒜"> +<!ENTITY Cscr "𝒞"> +<!ENTITY Dscr "𝒟"> +<!ENTITY Gscr "𝒢"> +<!ENTITY Jscr "𝒥"> +<!ENTITY Kscr "𝒦"> +<!ENTITY Nscr "𝒩"> +<!ENTITY Oscr "𝒪"> +<!ENTITY Pscr "𝒫"> +<!ENTITY Qscr "𝒬"> +<!ENTITY Sscr "𝒮"> +<!ENTITY Tscr "𝒯"> +<!ENTITY Uscr "𝒰"> +<!ENTITY Vscr "𝒱"> +<!ENTITY Wscr "𝒲"> +<!ENTITY Xscr "𝒳"> +<!ENTITY Yscr "𝒴"> +<!ENTITY Zscr "𝒵"> +<!ENTITY ascr "𝒶"> +<!ENTITY bscr "𝒷"> +<!ENTITY cscr "𝒸"> +<!ENTITY dscr "𝒹"> +<!ENTITY fscr "𝒻"> +<!ENTITY hscr "𝒽"> +<!ENTITY iscr "𝒾"> +<!ENTITY jscr "𝒿"> +<!ENTITY kscr "𝓀"> +<!ENTITY lscr "𝓁"> +<!ENTITY mscr "𝓂"> +<!ENTITY nscr "𝓃"> +<!ENTITY pscr "𝓅"> +<!ENTITY qscr "𝓆"> +<!ENTITY rscr "𝓇"> +<!ENTITY sscr "𝓈"> +<!ENTITY tscr "𝓉"> +<!ENTITY uscr "𝓊"> +<!ENTITY vscr "𝓋"> +<!ENTITY wscr "𝓌"> +<!ENTITY xscr "𝓍"> +<!ENTITY yscr "𝓎"> +<!ENTITY zscr "𝓏"> +<!ENTITY Afr "𝔄"> +<!ENTITY Bfr "𝔅"> +<!ENTITY Dfr "𝔇"> +<!ENTITY Efr "𝔈"> +<!ENTITY Ffr "𝔉"> +<!ENTITY Gfr "𝔊"> +<!ENTITY Jfr "𝔍"> +<!ENTITY Kfr "𝔎"> +<!ENTITY Lfr "𝔏"> +<!ENTITY Mfr "𝔐"> +<!ENTITY Nfr "𝔑"> +<!ENTITY Ofr "𝔒"> +<!ENTITY Pfr "𝔓"> +<!ENTITY Qfr "𝔔"> +<!ENTITY Sfr "𝔖"> +<!ENTITY Tfr "𝔗"> +<!ENTITY Ufr "𝔘"> +<!ENTITY Vfr "𝔙"> +<!ENTITY Wfr "𝔚"> +<!ENTITY Xfr "𝔛"> +<!ENTITY Yfr "𝔜"> +<!ENTITY afr "𝔞"> +<!ENTITY bfr "𝔟"> +<!ENTITY cfr "𝔠"> +<!ENTITY dfr "𝔡"> +<!ENTITY efr "𝔢"> +<!ENTITY ffr "𝔣"> +<!ENTITY gfr "𝔤"> +<!ENTITY hfr "𝔥"> +<!ENTITY ifr "𝔦"> +<!ENTITY jfr "𝔧"> +<!ENTITY kfr "𝔨"> +<!ENTITY lfr "𝔩"> +<!ENTITY mfr "𝔪"> +<!ENTITY nfr "𝔫"> +<!ENTITY ofr "𝔬"> +<!ENTITY pfr "𝔭"> +<!ENTITY qfr "𝔮"> +<!ENTITY rfr "𝔯"> +<!ENTITY sfr "𝔰"> +<!ENTITY tfr "𝔱"> +<!ENTITY ufr "𝔲"> +<!ENTITY vfr "𝔳"> +<!ENTITY wfr "𝔴"> +<!ENTITY xfr "𝔵"> +<!ENTITY yfr "𝔶"> +<!ENTITY zfr "𝔷"> +<!ENTITY Aopf "𝔸"> +<!ENTITY Bopf "𝔹"> +<!ENTITY Dopf "𝔻"> +<!ENTITY Eopf "𝔼"> +<!ENTITY Fopf "𝔽"> +<!ENTITY Gopf "𝔾"> +<!ENTITY Iopf "𝕀"> +<!ENTITY Jopf "𝕁"> +<!ENTITY Kopf "𝕂"> +<!ENTITY Lopf "𝕃"> +<!ENTITY Mopf "𝕄"> +<!ENTITY Oopf "𝕆"> +<!ENTITY Sopf "𝕊"> +<!ENTITY Topf "𝕋"> +<!ENTITY Uopf "𝕌"> +<!ENTITY Vopf "𝕍"> +<!ENTITY Wopf "𝕎"> +<!ENTITY Xopf "𝕏"> +<!ENTITY Yopf "𝕐"> +<!ENTITY aopf "𝕒"> +<!ENTITY bopf "𝕓"> +<!ENTITY copf "𝕔"> +<!ENTITY dopf "𝕕"> +<!ENTITY eopf "𝕖"> +<!ENTITY fopf "𝕗"> +<!ENTITY gopf "𝕘"> +<!ENTITY hopf "𝕙"> +<!ENTITY iopf "𝕚"> +<!ENTITY jopf "𝕛"> +<!ENTITY kopf "𝕜"> +<!ENTITY lopf "𝕝"> +<!ENTITY mopf "𝕞"> +<!ENTITY nopf "𝕟"> +<!ENTITY oopf "𝕠"> +<!ENTITY popf "𝕡"> +<!ENTITY qopf "𝕢"> +<!ENTITY ropf "𝕣"> +<!ENTITY sopf "𝕤"> +<!ENTITY topf "𝕥"> +<!ENTITY uopf "𝕦"> +<!ENTITY vopf "𝕧"> +<!ENTITY wopf "𝕨"> +<!ENTITY xopf "𝕩"> +<!ENTITY yopf "𝕪"> +<!ENTITY zopf "𝕫"> diff --git a/testing/web-platform/tests/tools/manifest/commands.json b/testing/web-platform/tests/tools/manifest/commands.json new file mode 100644 index 0000000000..cef6d22473 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/commands.json @@ -0,0 +1,30 @@ +{ + "manifest": { + "path": "update.py", + "script": "run", + "parser": "create_parser", + "help": "Update the MANIFEST.json file", + "virtualenv": false + }, + "manifest-download": { + "path": "download.py", + "script": "run", + "parser": "create_parser", + "help": "Download recent pregenerated MANIFEST.json file", + "virtualenv": false + }, + "test-paths": { + "path": "testpaths.py", + "script": "run", + "parser": "create_parser", + "help": "Print test paths given a set of test ids", + "virtualenv": false + }, + "spec": { + "path": "spec.py", + "script": "run", + "parser": "create_parser", + "help": "Update the SPEC_MANIFEST.json file", + "virtualenv": false + } +} diff --git a/testing/web-platform/tests/tools/manifest/download.py b/testing/web-platform/tests/tools/manifest/download.py new file mode 100644 index 0000000000..8527fb232a --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/download.py @@ -0,0 +1,191 @@ +import argparse +import bz2 +import gzip +import json +import io +import os +from datetime import datetime, timedelta +from typing import Any, Callable, List, Optional, Text +from urllib.request import urlopen + +try: + import zstandard +except ImportError: + zstandard = None + +from .utils import git + +from . import log + + +here = os.path.dirname(__file__) + +wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir)) +logger = log.get_logger() + + +def abs_path(path: Text) -> Text: + return os.path.abspath(os.path.expanduser(path)) + + +def should_download(manifest_path: Text, rebuild_time: timedelta = timedelta(days=5)) -> bool: + if not os.path.exists(manifest_path): + return True + mtime = datetime.fromtimestamp(os.path.getmtime(manifest_path)) + if mtime < datetime.now() - rebuild_time: + return True + logger.info("Skipping manifest download because existing file is recent") + return False + + +def merge_pr_tags(repo_root: Text, max_count: int = 50) -> List[Text]: + gitfunc = git(repo_root) + tags: List[Text] = [] + if gitfunc is None: + return tags + for line in gitfunc("log", "--format=%D", "--max-count=%s" % max_count).split("\n"): + for ref in line.split(", "): + if ref.startswith("tag: merge_pr_"): + tags.append(ref[5:]) + return tags + + +def score_name(name: Text) -> Optional[int]: + """Score how much we like each filename, lower wins, None rejects""" + + # Accept both ways of naming the manifest asset, even though + # there's no longer a reason to include the commit sha. + if name.startswith("MANIFEST-") or name.startswith("MANIFEST."): + if zstandard and name.endswith("json.zst"): + return 1 + if name.endswith(".json.bz2"): + return 2 + if name.endswith(".json.gz"): + return 3 + return None + + +def github_url(tags: List[Text]) -> Optional[List[Text]]: + for tag in tags: + url = "https://api.github.com/repos/web-platform-tests/wpt/releases/tags/%s" % tag + try: + resp = urlopen(url) + except Exception: + logger.warning("Fetching %s failed" % url) + continue + + if resp.code != 200: + logger.warning("Fetching %s failed; got HTTP status %d" % (url, resp.code)) + continue + + try: + release = json.load(resp.fp) + except ValueError: + logger.warning("Response was not valid JSON") + return None + + candidates = [] + for item in release["assets"]: + score = score_name(item["name"]) + if score is not None: + candidates.append((score, item["browser_download_url"])) + + return [item[1] for item in sorted(candidates)] + + return None + + +def download_manifest( + manifest_path: Text, + tags_func: Callable[[], List[Text]], + url_func: Callable[[List[Text]], Optional[List[Text]]], + force: bool = False +) -> bool: + if not force and not should_download(manifest_path): + return False + + tags = tags_func() + + urls = url_func(tags) + if not urls: + logger.warning("No generated manifest found") + return False + + for url in urls: + logger.info("Downloading manifest from %s" % url) + try: + resp = urlopen(url) + except Exception: + logger.warning("Downloading pregenerated manifest failed") + continue + + if resp.code != 200: + logger.warning("Downloading pregenerated manifest failed; got HTTP status %d" % + resp.code) + continue + + if url.endswith(".zst"): + if not zstandard: + continue + try: + dctx = zstandard.ZstdDecompressor() + decompressed = dctx.decompress(resp.read()) + except OSError: + logger.warning("Failed to decompress downloaded file") + continue + elif url.endswith(".bz2"): + try: + decompressed = bz2.decompress(resp.read()) + except OSError: + logger.warning("Failed to decompress downloaded file") + continue + elif url.endswith(".gz"): + fileobj = io.BytesIO(resp.read()) + try: + with gzip.GzipFile(fileobj=fileobj) as gzf: + data = gzf.read() + decompressed = data + except OSError: + logger.warning("Failed to decompress downloaded file") + continue + else: + logger.warning("Unknown file extension: %s" % url) + continue + break + else: + return False + + try: + with open(manifest_path, "wb") as f: + f.write(decompressed) + except Exception: + logger.warning("Failed to write manifest") + return False + logger.info("Manifest downloaded") + return True + + +def create_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser() + parser.add_argument( + "-p", "--path", type=abs_path, help="Path to manifest file.") + parser.add_argument( + "--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.") + parser.add_argument( + "--force", action="store_true", + help="Always download, even if the existing manifest is recent") + return parser + + +def download_from_github(path: Text, tests_root: Text, force: bool = False) -> bool: + return download_manifest(path, lambda: merge_pr_tags(tests_root), github_url, + force=force) + + +def run(**kwargs: Any) -> int: + if kwargs["path"] is None: + path = os.path.join(kwargs["tests_root"], "MANIFEST.json") + else: + path = kwargs["path"] + success = download_from_github(path, kwargs["tests_root"], kwargs["force"]) + return 0 if success else 1 diff --git a/testing/web-platform/tests/tools/manifest/item.py b/testing/web-platform/tests/tools/manifest/item.py new file mode 100644 index 0000000000..86f7bd6020 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/item.py @@ -0,0 +1,376 @@ +import os.path +from abc import ABCMeta, abstractproperty +from inspect import isabstract +from typing import (Any, Dict, Hashable, List, Optional, Sequence, Text, Tuple, Type, + TYPE_CHECKING, Union, cast) +from urllib.parse import urljoin, urlparse, parse_qs + +from .utils import to_os_path + +if TYPE_CHECKING: + from .manifest import Manifest + +Fuzzy = Dict[Optional[Tuple[str, str, str]], List[int]] +PageRanges = Dict[str, List[int]] +item_types: Dict[str, Type["ManifestItem"]] = {} + + +class ManifestItemMeta(ABCMeta): + """Custom metaclass that registers all the subclasses in the + item_types dictionary according to the value of their item_type + attribute, and otherwise behaves like an ABCMeta.""" + + def __new__(cls: Type["ManifestItemMeta"], name: str, bases: Tuple[type], attrs: Dict[str, Any]) -> "ManifestItemMeta": + inst = super().__new__(cls, name, bases, attrs) + if isabstract(inst): + return inst + + assert issubclass(inst, ManifestItem) + item_type = cast(str, inst.item_type) + + item_types[item_type] = inst + + return inst + + +class ManifestItem(metaclass=ManifestItemMeta): + __slots__ = ("_tests_root", "path") + + def __init__(self, tests_root: Text, path: Text) -> None: + self._tests_root = tests_root + self.path = path + + @abstractproperty + def id(self) -> Text: + """The test's id (usually its url)""" + pass + + @abstractproperty + def item_type(self) -> str: + """The item's type""" + pass + + @property + def path_parts(self) -> Tuple[Text, ...]: + return tuple(self.path.split(os.path.sep)) + + def key(self) -> Hashable: + """A unique identifier for the test""" + return (self.item_type, self.id) + + def __eq__(self, other: Any) -> bool: + if not hasattr(other, "key"): + return False + return bool(self.key() == other.key()) + + def __hash__(self) -> int: + return hash(self.key()) + + def __repr__(self) -> str: + return f"<{self.__module__}.{self.__class__.__name__} id={self.id!r}, path={self.path!r}>" + + def to_json(self) -> Tuple[Any, ...]: + return () + + @classmethod + def from_json(cls, + manifest: "Manifest", + path: Text, + obj: Any + ) -> "ManifestItem": + path = to_os_path(path) + tests_root = manifest.tests_root + assert tests_root is not None + return cls(tests_root, path) + + +class URLManifestItem(ManifestItem): + __slots__ = ("url_base", "_url", "_extras", "_flags") + + def __init__(self, + tests_root: Text, + path: Text, + url_base: Text, + url: Optional[Text], + **extras: Any + ) -> None: + super().__init__(tests_root, path) + assert url_base[0] == "/" + self.url_base = url_base + assert url is None or url[0] != "/" + self._url = url + self._extras = extras + parsed_url = urlparse(self.url) + self._flags = (set(parsed_url.path.rsplit("/", 1)[1].split(".")[1:-1]) | + set(parse_qs(parsed_url.query).get("wpt_flags", []))) + + @property + def id(self) -> Text: + return self.url + + @property + def url(self) -> Text: + rel_url = self._url or self.path.replace(os.path.sep, "/") + # we can outperform urljoin, because we know we just have path relative URLs + if self.url_base == "/": + return "/" + rel_url + return urljoin(self.url_base, rel_url) + + @property + def https(self) -> bool: + return "https" in self._flags or "serviceworker" in self._flags or "serviceworker-module" in self._flags + + @property + def h2(self) -> bool: + return "h2" in self._flags + + @property + def subdomain(self) -> bool: + # Note: this is currently hard-coded to check for `www`, rather than + # all possible valid subdomains. It can be extended if needed. + return "www" in self._flags + + def to_json(self) -> Tuple[Optional[Text], Dict[Any, Any]]: + rel_url = None if self._url == self.path.replace(os.path.sep, "/") else self._url + rv: Tuple[Optional[Text], Dict[Any, Any]] = (rel_url, {}) + return rv + + @classmethod + def from_json(cls, + manifest: "Manifest", + path: Text, + obj: Tuple[Text, Dict[Any, Any]] + ) -> "URLManifestItem": + path = to_os_path(path) + url, extras = obj + tests_root = manifest.tests_root + assert tests_root is not None + return cls(tests_root, + path, + manifest.url_base, + url, + **extras) + + +class TestharnessTest(URLManifestItem): + __slots__ = () + + item_type = "testharness" + + @property + def timeout(self) -> Optional[Text]: + return self._extras.get("timeout") + + @property + def pac(self) -> Optional[Text]: + return self._extras.get("pac") + + @property + def testdriver(self) -> Optional[Text]: + return self._extras.get("testdriver") + + @property + def jsshell(self) -> Optional[Text]: + return self._extras.get("jsshell") + + @property + def script_metadata(self) -> Optional[List[Tuple[Text, Text]]]: + return self._extras.get("script_metadata") + + def to_json(self) -> Tuple[Optional[Text], Dict[Text, Any]]: + rv = super().to_json() + if self.timeout is not None: + rv[-1]["timeout"] = self.timeout + if self.pac is not None: + rv[-1]["pac"] = self.pac + if self.testdriver: + rv[-1]["testdriver"] = self.testdriver + if self.jsshell: + rv[-1]["jsshell"] = True + if self.script_metadata: + rv[-1]["script_metadata"] = [(k, v) for (k,v) in self.script_metadata] + return rv + + +class RefTest(URLManifestItem): + __slots__ = ("references",) + + item_type = "reftest" + + def __init__(self, + tests_root: Text, + path: Text, + url_base: Text, + url: Optional[Text], + references: Optional[List[Tuple[Text, Text]]] = None, + **extras: Any + ): + super().__init__(tests_root, path, url_base, url, **extras) + if references is None: + self.references: List[Tuple[Text, Text]] = [] + else: + self.references = references + + @property + def timeout(self) -> Optional[Text]: + return self._extras.get("timeout") + + @property + def viewport_size(self) -> Optional[Text]: + return self._extras.get("viewport_size") + + @property + def dpi(self) -> Optional[Text]: + return self._extras.get("dpi") + + @property + def fuzzy(self) -> Fuzzy: + fuzzy: Union[Fuzzy, List[Tuple[Optional[Sequence[Text]], List[int]]]] = self._extras.get("fuzzy", {}) + if not isinstance(fuzzy, list): + return fuzzy + + rv: Fuzzy = {} + for k, v in fuzzy: # type: Tuple[Optional[Sequence[Text]], List[int]] + if k is None: + key: Optional[Tuple[Text, Text, Text]] = None + else: + # mypy types this as Tuple[Text, ...] + assert len(k) == 3 + key = tuple(k) # type: ignore + rv[key] = v + return rv + + def to_json(self) -> Tuple[Optional[Text], List[Tuple[Text, Text]], Dict[Text, Any]]: # type: ignore + rel_url = None if self._url == self.path else self._url + rv: Tuple[Optional[Text], List[Tuple[Text, Text]], Dict[Text, Any]] = (rel_url, self.references, {}) + extras = rv[-1] + if self.timeout is not None: + extras["timeout"] = self.timeout + if self.viewport_size is not None: + extras["viewport_size"] = self.viewport_size + if self.dpi is not None: + extras["dpi"] = self.dpi + if self.fuzzy: + extras["fuzzy"] = list(self.fuzzy.items()) + return rv + + @classmethod + def from_json(cls, # type: ignore + manifest: "Manifest", + path: Text, + obj: Tuple[Text, List[Tuple[Text, Text]], Dict[Any, Any]] + ) -> "RefTest": + tests_root = manifest.tests_root + assert tests_root is not None + path = to_os_path(path) + url, references, extras = obj + return cls(tests_root, + path, + manifest.url_base, + url, + references, + **extras) + + +class PrintRefTest(RefTest): + __slots__ = ("references",) + + item_type = "print-reftest" + + @property + def page_ranges(self) -> PageRanges: + return self._extras.get("page_ranges", {}) + + def to_json(self): # type: ignore + rv = super().to_json() + if self.page_ranges: + rv[-1]["page_ranges"] = self.page_ranges + return rv + + +class ManualTest(URLManifestItem): + __slots__ = () + + item_type = "manual" + + +class ConformanceCheckerTest(URLManifestItem): + __slots__ = () + + item_type = "conformancechecker" + + +class VisualTest(URLManifestItem): + __slots__ = () + + item_type = "visual" + + +class CrashTest(URLManifestItem): + __slots__ = () + + item_type = "crashtest" + + @property + def timeout(self) -> Optional[Text]: + return None + + +class WebDriverSpecTest(URLManifestItem): + __slots__ = () + + item_type = "wdspec" + + @property + def timeout(self) -> Optional[Text]: + return self._extras.get("timeout") + + def to_json(self) -> Tuple[Optional[Text], Dict[Text, Any]]: + rv = super().to_json() + if self.timeout is not None: + rv[-1]["timeout"] = self.timeout + return rv + + +class SupportFile(ManifestItem): + __slots__ = () + + item_type = "support" + + @property + def id(self) -> Text: + return self.path + + +class SpecItem(ManifestItem): + __slots__ = ("specs") + + item_type = "spec" + + def __init__(self, + tests_root: Text, + path: Text, + specs: List[Text] + ) -> None: + super().__init__(tests_root, path) + self.specs = specs + + @property + def id(self) -> Text: + return self.path + + def to_json(self) -> Tuple[Optional[Text], Dict[Text, Any]]: + rv: Tuple[Optional[Text], Dict[Any, Any]] = (None, {}) + for i in range(len(self.specs)): + spec_key = f"spec_link{i+1}" + rv[-1][spec_key] = self.specs[i] + return rv + + @classmethod + def from_json(cls, + manifest: "Manifest", + path: Text, + obj: Any + ) -> "ManifestItem": + """Not properly implemented and is not used.""" + return cls("/", "", []) diff --git a/testing/web-platform/tests/tools/manifest/jsonlib.py b/testing/web-platform/tests/tools/manifest/jsonlib.py new file mode 100644 index 0000000000..0f70cf1e17 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/jsonlib.py @@ -0,0 +1,125 @@ +import re +import json +from typing import Any, AnyStr, Callable, Dict, IO, Text + + +__all__ = ["load", "dump_local", "dump_local", "dump_dist", "dumps_dist"] + + +try: + import ujson +except ImportError: + has_ujson = False +else: + has_ujson = True + +# +# load +# + +if has_ujson: + load: Callable[[IO[AnyStr]], Any] = ujson.load + +else: + load = json.load + + +# +# loads +# + +if has_ujson: + loads: Callable[[AnyStr], Any] = ujson.loads + +else: + loads = json.loads + + +# +# dump/dumps_local options for some libraries +# +_ujson_dump_local_kwargs: Dict[str, Any] = { + 'ensure_ascii': False, + 'escape_forward_slashes': False, + 'indent': 1, + 'reject_bytes': True, +} + + +_json_dump_local_kwargs: Dict[str, Any] = { + 'ensure_ascii': False, + 'indent': 1, + 'separators': (',', ': '), +} + + +# +# dump_local (for local, non-distributed usage of JSON) +# + +if has_ujson: + def dump_local(obj: Any, fp: IO[str]) -> None: + return ujson.dump(obj, fp, **_ujson_dump_local_kwargs) + +else: + def dump_local(obj: Any, fp: IO[str]) -> None: + return json.dump(obj, fp, **_json_dump_local_kwargs) + + +# +# dumps_local (for local, non-distributed usage of JSON) +# + +if has_ujson: + def dumps_local(obj: Any) -> Text: + return ujson.dumps(obj, **_ujson_dump_local_kwargs) + +else: + def dumps_local(obj: Any) -> Text: + return json.dumps(obj, **_json_dump_local_kwargs) + + +# +# dump/dumps_dist (for distributed usage of JSON where files should safely roundtrip) +# + +_ujson_dump_dist_kwargs: Dict[str, Any] = { + 'sort_keys': True, + 'indent': 1, + 'reject_bytes': True, + 'escape_forward_slashes': False, +} + + +_json_dump_dist_kwargs: Dict[str, Any] = { + 'sort_keys': True, + 'indent': 1, + 'separators': (',', ': '), +} + + +if has_ujson: + if ujson.dumps([], indent=1) == "[]": + # optimistically see if https://github.com/ultrajson/ultrajson/issues/429 is fixed + def _ujson_fixup(s: str) -> str: + return s + else: + _ujson_fixup_re = re.compile(r"([\[{])[\n\x20]+([}\]])") + + def _ujson_fixup(s: str) -> str: + return _ujson_fixup_re.sub( + lambda m: m.group(1) + m.group(2), + s + ) + + def dump_dist(obj: Any, fp: IO[str]) -> None: + fp.write(_ujson_fixup(ujson.dumps(obj, **_ujson_dump_dist_kwargs))) + + def dumps_dist(obj: Any) -> Text: + return _ujson_fixup(ujson.dumps(obj, **_ujson_dump_dist_kwargs)) +else: + def dump_dist(obj: Any, fp: IO[str]) -> None: + json.dump(obj, fp, **_json_dump_dist_kwargs) + + def dumps_dist(obj: Any) -> Text: + return json.dumps(obj, **_json_dump_dist_kwargs) diff --git a/testing/web-platform/tests/tools/manifest/log.py b/testing/web-platform/tests/tools/manifest/log.py new file mode 100644 index 0000000000..7881381733 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/log.py @@ -0,0 +1,9 @@ +import logging + +logger = logging.getLogger("manifest") + +def enable_debug_logging() -> None: + logger.setLevel(logging.DEBUG) + +def get_logger() -> logging.Logger: + return logger diff --git a/testing/web-platform/tests/tools/manifest/manifest.py b/testing/web-platform/tests/tools/manifest/manifest.py new file mode 100644 index 0000000000..959978f528 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/manifest.py @@ -0,0 +1,428 @@ +import os +from atomicwrites import atomic_write +from copy import deepcopy +from logging import Logger +from multiprocessing import Pool +from typing import (Any, Callable, Container, Dict, IO, Iterator, Iterable, Optional, Set, Text, Tuple, Type, + Union) + +from . import jsonlib +from . import vcs +from .item import (ConformanceCheckerTest, + CrashTest, + ManifestItem, + ManualTest, + PrintRefTest, + RefTest, + SpecItem, + SupportFile, + TestharnessTest, + VisualTest, + WebDriverSpecTest) +from .log import get_logger +from .mputil import max_parallelism +from .sourcefile import SourceFile +from .typedata import TypeData + + +CURRENT_VERSION: int = 8 + + +class ManifestError(Exception): + pass + + +class ManifestVersionMismatch(ManifestError): + pass + + +class InvalidCacheError(Exception): + pass + + +item_classes: Dict[Text, Type[ManifestItem]] = {"testharness": TestharnessTest, + "reftest": RefTest, + "print-reftest": PrintRefTest, + "crashtest": CrashTest, + "manual": ManualTest, + "wdspec": WebDriverSpecTest, + "conformancechecker": ConformanceCheckerTest, + "visual": VisualTest, + "spec": SpecItem, + "support": SupportFile} + + +def compute_manifest_items(source_file: SourceFile) -> Optional[Tuple[Tuple[Text, ...], Text, Set[ManifestItem], Text]]: + rel_path_parts = source_file.rel_path_parts + new_type, manifest_items = source_file.manifest_items() + file_hash = source_file.hash + return rel_path_parts, new_type, set(manifest_items), file_hash + + +def compute_manifest_spec_items(source_file: SourceFile) -> Optional[Tuple[Tuple[Text, ...], Text, Set[ManifestItem], Text]]: + spec_tuple = source_file.manifest_spec_items() + if not spec_tuple: + return None + + new_type, manifest_items = spec_tuple + rel_path_parts = source_file.rel_path_parts + file_hash = source_file.hash + return rel_path_parts, new_type, set(manifest_items), file_hash + + +ManifestDataType = Dict[Any, TypeData] + + +class ManifestData(ManifestDataType): + def __init__(self, manifest: "Manifest") -> None: + """Dictionary subclass containing a TypeData instance for each test type, + keyed by type name""" + self.initialized: bool = False + for key, value in item_classes.items(): + self[key] = TypeData(manifest, value) + self.initialized = True + self.json_obj: None = None + + def __setitem__(self, key: Text, value: TypeData) -> None: + if self.initialized: + raise AttributeError + dict.__setitem__(self, key, value) + + def paths(self) -> Set[Text]: + """Get a list of all paths containing test items + without actually constructing all the items""" + rv: Set[Text] = set() + for item_data in self.values(): + for item in item_data: + rv.add(os.path.sep.join(item)) + return rv + + def type_by_path(self) -> Dict[Tuple[Text, ...], Text]: + rv = {} + for item_type, item_data in self.items(): + for item in item_data: + rv[item] = item_type + return rv + + +class Manifest: + def __init__(self, tests_root: Text, url_base: Text = "/") -> None: + assert url_base is not None + self._data: ManifestData = ManifestData(self) + self.tests_root: Text = tests_root + self.url_base: Text = url_base + + def __iter__(self) -> Iterator[Tuple[Text, Text, Set[ManifestItem]]]: + return self.itertypes() + + def itertypes(self, *types: Text) -> Iterator[Tuple[Text, Text, Set[ManifestItem]]]: + for item_type in (types or sorted(self._data.keys())): + for path in self._data[item_type]: + rel_path = os.sep.join(path) + tests = self._data[item_type][path] + yield item_type, rel_path, tests + + def iterpath(self, path: Text) -> Iterable[ManifestItem]: + tpath = tuple(path.split(os.path.sep)) + + for type_tests in self._data.values(): + i = type_tests.get(tpath, set()) + assert i is not None + yield from i + + def iterdir(self, dir_name: Text) -> Iterable[ManifestItem]: + tpath = tuple(dir_name.split(os.path.sep)) + tpath_len = len(tpath) + + for type_tests in self._data.values(): + for path, tests in type_tests.items(): + if path[:tpath_len] == tpath: + yield from tests + + def update(self, tree: Iterable[Tuple[Text, Optional[Text], bool]], parallel: bool = True, + update_func: Callable[..., Any] = compute_manifest_items) -> bool: + """Update the manifest given an iterable of items that make up the updated manifest. + + The iterable must either generate tuples of the form (SourceFile, True) for paths + that are to be updated, or (path, False) for items that are not to be updated. This + unusual API is designed as an optimistaion meaning that SourceFile items need not be + constructed in the case we are not updating a path, but the absence of an item from + the iterator may be used to remove defunct entries from the manifest.""" + + logger = get_logger() + + changed = False + + # Create local variable references to these dicts so we avoid the + # attribute access in the hot loop below + data = self._data + + types = data.type_by_path() + remaining_manifest_paths = set(types) + + to_update = [] + + for path, file_hash, updated in tree: + path_parts = tuple(path.split(os.path.sep)) + is_new = path_parts not in remaining_manifest_paths + + if not updated and is_new: + # This is kind of a bandaid; if we ended up here the cache + # was invalid but we've been using it anyway. That's obviously + # bad; we should fix the underlying issue that we sometimes + # use an invalid cache. But at least this fixes the immediate + # problem + raise InvalidCacheError + + if not updated: + remaining_manifest_paths.remove(path_parts) + else: + assert self.tests_root is not None + source_file = SourceFile(self.tests_root, + path, + self.url_base, + file_hash) + + hash_changed: bool = False + + if not is_new: + if file_hash is None: + file_hash = source_file.hash + remaining_manifest_paths.remove(path_parts) + old_type = types[path_parts] + old_hash = data[old_type].hashes[path_parts] + if old_hash != file_hash: + hash_changed = True + del data[old_type][path_parts] + + if is_new or hash_changed: + to_update.append(source_file) + + if to_update: + logger.debug("Computing manifest update for %s items" % len(to_update)) + changed = True + + # 25 items was derived experimentally (2020-01) to be approximately the + # point at which it is quicker to create a Pool and parallelize update. + pool = None + processes = max_parallelism() + if parallel and len(to_update) > 25 and processes > 1: + pool = Pool(processes) + + # chunksize set > 1 when more than 10000 tests, because + # chunking is a net-gain once we get to very large numbers + # of items (again, experimentally, 2020-01) + chunksize = max(1, len(to_update) // 10000) + logger.debug("Doing a multiprocessed update. " + "Processes: %s, chunksize: %s" % (processes, chunksize)) + results: Iterator[Optional[Tuple[Tuple[Text, ...], + Text, + Set[ManifestItem], Text]]] = pool.imap_unordered( + update_func, + to_update, + chunksize=chunksize) + else: + results = map(update_func, to_update) + + for result in results: + if not result: + continue + rel_path_parts, new_type, manifest_items, file_hash = result + data[new_type][rel_path_parts] = manifest_items + data[new_type].hashes[rel_path_parts] = file_hash + + # Make sure to terminate the Pool, to avoid hangs on Python 3. + # https://docs.python.org/3/library/multiprocessing.html#multiprocessing.pool.Pool + if pool is not None: + pool.terminate() + + if remaining_manifest_paths: + changed = True + for rel_path_parts in remaining_manifest_paths: + for test_data in data.values(): + if rel_path_parts in test_data: + del test_data[rel_path_parts] + + return changed + + def to_json(self, caller_owns_obj: bool = True) -> Dict[Text, Any]: + """Dump a manifest into a object which can be serialized as JSON + + If caller_owns_obj is False, then the return value remains + owned by the manifest; it is _vitally important_ that _no_ + (even read) operation is done on the manifest, as otherwise + objects within the object graph rooted at the return value can + be mutated. This essentially makes this mode very dangerous + and only to be used under extreme care. + + """ + out_items = { + test_type: type_paths.to_json() + for test_type, type_paths in self._data.items() if type_paths + } + + if caller_owns_obj: + out_items = deepcopy(out_items) + + rv: Dict[Text, Any] = {"url_base": self.url_base, + "items": out_items, + "version": CURRENT_VERSION} + return rv + + @classmethod + def from_json(cls, + tests_root: Text, + obj: Dict[Text, Any], + types: Optional[Container[Text]] = None, + callee_owns_obj: bool = False) -> "Manifest": + """Load a manifest from a JSON object + + This loads a manifest for a given local test_root path from an + object obj, potentially partially loading it to only load the + types given by types. + + If callee_owns_obj is True, then ownership of obj transfers + to this function when called, and the caller must never mutate + the obj or anything referred to in the object graph rooted at + obj. + + """ + version = obj.get("version") + if version != CURRENT_VERSION: + raise ManifestVersionMismatch + + self = cls(tests_root, url_base=obj.get("url_base", "/")) + if not hasattr(obj, "items"): + raise ManifestError + + for test_type, type_paths in obj["items"].items(): + if test_type not in item_classes: + raise ManifestError + + if types and test_type not in types: + continue + + if not callee_owns_obj: + type_paths = deepcopy(type_paths) + + self._data[test_type].set_json(type_paths) + + return self + + +def load(tests_root: Text, manifest: Union[IO[bytes], Text], types: Optional[Container[Text]] = None) -> Optional[Manifest]: + logger = get_logger() + + logger.warning("Prefer load_and_update instead") + return _load(logger, tests_root, manifest, types) + + +__load_cache: Dict[Text, Manifest] = {} + + +def _load(logger: Logger, + tests_root: Text, + manifest: Union[IO[bytes], Text], + types: Optional[Container[Text]] = None, + allow_cached: bool = True + ) -> Optional[Manifest]: + manifest_path = (manifest if isinstance(manifest, str) + else manifest.name) + if allow_cached and manifest_path in __load_cache: + return __load_cache[manifest_path] + + if isinstance(manifest, str): + if os.path.exists(manifest): + logger.debug("Opening manifest at %s" % manifest) + else: + logger.debug("Creating new manifest at %s" % manifest) + try: + with open(manifest, encoding="utf-8") as f: + rv = Manifest.from_json(tests_root, + jsonlib.load(f), + types=types, + callee_owns_obj=True) + except OSError: + return None + except ValueError: + logger.warning("%r may be corrupted", manifest) + return None + else: + rv = Manifest.from_json(tests_root, + jsonlib.load(manifest), + types=types, + callee_owns_obj=True) + + if allow_cached: + __load_cache[manifest_path] = rv + return rv + + +def load_and_update(tests_root: Text, + manifest_path: Text, + url_base: Text, + update: bool = True, + rebuild: bool = False, + metadata_path: Optional[Text] = None, + cache_root: Optional[Text] = None, + working_copy: bool = True, + types: Optional[Container[Text]] = None, + write_manifest: bool = True, + allow_cached: bool = True, + parallel: bool = True + ) -> Manifest: + + logger = get_logger() + + manifest = None + if not rebuild: + try: + manifest = _load(logger, + tests_root, + manifest_path, + types=types, + allow_cached=allow_cached) + except ManifestVersionMismatch: + logger.info("Manifest version changed, rebuilding") + except ManifestError: + logger.warning("Failed to load manifest, rebuilding") + + if manifest is not None and manifest.url_base != url_base: + logger.info("Manifest url base did not match, rebuilding") + manifest = None + + if manifest is None: + manifest = Manifest(tests_root, url_base) + rebuild = True + update = True + + if rebuild or update: + logger.info("Updating manifest") + for retry in range(2): + try: + tree = vcs.get_tree(tests_root, manifest, manifest_path, cache_root, + working_copy, rebuild) + changed = manifest.update(tree, parallel) + break + except InvalidCacheError: + logger.warning("Manifest cache was invalid, doing a complete rebuild") + rebuild = True + else: + # If we didn't break there was an error + raise + if write_manifest and changed: + write(manifest, manifest_path) + tree.dump_caches() + + return manifest + + +def write(manifest: Manifest, manifest_path: Text) -> None: + dir_name = os.path.dirname(manifest_path) + if not os.path.exists(dir_name): + os.makedirs(dir_name) + with atomic_write(manifest_path, overwrite=True) as f: + # Use ',' instead of the default ', ' separator to prevent trailing + # spaces: https://docs.python.org/2/library/json.html#json.dump + jsonlib.dump_dist(manifest.to_json(caller_owns_obj=True), f) + f.write("\n") diff --git a/testing/web-platform/tests/tools/manifest/mputil.py b/testing/web-platform/tests/tools/manifest/mputil.py new file mode 100644 index 0000000000..fc9d5ac94c --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/mputil.py @@ -0,0 +1,14 @@ +import multiprocessing +import sys + +def max_parallelism() -> int: + cpu_count = multiprocessing.cpu_count() + if sys.platform == 'win32': + # On Python 3 on Windows, using >= MAXIMUM_WAIT_OBJECTS processes + # causes a crash in the multiprocessing module. Whilst this enum + # can technically have any value, it is usually 64. For safety, + # restrict manifest regeneration to 56 processes on Windows. + # + # See https://bugs.python.org/issue26903 and https://bugs.python.org/issue40263 + cpu_count = min(cpu_count, 56) + return cpu_count diff --git a/testing/web-platform/tests/tools/manifest/requirements.txt b/testing/web-platform/tests/tools/manifest/requirements.txt new file mode 100644 index 0000000000..d7c173723e --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/requirements.txt @@ -0,0 +1 @@ +zstandard==0.21.0 diff --git a/testing/web-platform/tests/tools/manifest/sourcefile.py b/testing/web-platform/tests/tools/manifest/sourcefile.py new file mode 100644 index 0000000000..23aa7f491f --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/sourcefile.py @@ -0,0 +1,1083 @@ +import hashlib +import re +import os +from collections import deque +from fnmatch import fnmatch +from io import BytesIO +from typing import (Any, BinaryIO, Callable, Deque, Dict, Iterable, List, Optional, Pattern, + Set, Text, Tuple, Union, cast) +from urllib.parse import urljoin + +try: + from xml.etree import cElementTree as ElementTree +except ImportError: + from xml.etree import ElementTree as ElementTree # type: ignore + +import html5lib + +from . import XMLParser +from .item import (ConformanceCheckerTest, + CrashTest, + ManifestItem, + ManualTest, + PrintRefTest, + RefTest, + SpecItem, + SupportFile, + TestharnessTest, + VisualTest, + WebDriverSpecTest) +from .utils import cached_property + +# Cannot do `from ..metadata.webfeatures.schema import WEB_FEATURES_YML_FILENAME` +# because relative import beyond toplevel throws *ImportError*! +from metadata.webfeatures.schema import WEB_FEATURES_YML_FILENAME # type: ignore + +wd_pattern = "*.py" +js_meta_re = re.compile(br"//\s*META:\s*(\w*)=(.*)$") +python_meta_re = re.compile(br"#\s*META:\s*(\w*)=(.*)$") + +reference_file_re = re.compile(r'(^|[\-_])(not)?ref[0-9]*([\-_]|$)') + +space_chars: Text = "".join(html5lib.constants.spaceCharacters) + + +def replace_end(s: Text, old: Text, new: Text) -> Text: + """ + Given a string `s` that ends with `old`, replace that occurrence of `old` + with `new`. + """ + assert s.endswith(old) + return s[:-len(old)] + new + + +def read_script_metadata(f: BinaryIO, regexp: Pattern[bytes]) -> Iterable[Tuple[Text, Text]]: + """ + Yields any metadata (pairs of strings) from the file-like object `f`, + as specified according to a supplied regexp. + + `regexp` - Regexp containing two groups containing the metadata name and + value. + """ + for line in f: + assert isinstance(line, bytes), line + m = regexp.match(line) + if not m: + break + + yield (m.groups()[0].decode("utf8"), m.groups()[1].decode("utf8")) + + +_any_variants: Dict[Text, Dict[Text, Any]] = { + "window": {"suffix": ".any.html"}, + "serviceworker": {"force_https": True}, + "serviceworker-module": {"force_https": True}, + "sharedworker": {}, + "sharedworker-module": {}, + "dedicatedworker": {"suffix": ".any.worker.html"}, + "dedicatedworker-module": {"suffix": ".any.worker-module.html"}, + "worker": {"longhand": {"dedicatedworker", "sharedworker", "serviceworker"}}, + "worker-module": {}, + "shadowrealm": {}, + "jsshell": {"suffix": ".any.js"}, +} + + +def get_any_variants(item: Text) -> Set[Text]: + """ + Returns a set of variants (strings) defined by the given keyword. + """ + assert isinstance(item, str), item + + variant = _any_variants.get(item, None) + if variant is None: + return set() + + return variant.get("longhand", {item}) + + +def get_default_any_variants() -> Set[Text]: + """ + Returns a set of variants (strings) that will be used by default. + """ + return set({"window", "dedicatedworker"}) + + +def parse_variants(value: Text) -> Set[Text]: + """ + Returns a set of variants (strings) defined by a comma-separated value. + """ + assert isinstance(value, str), value + + if value == "": + return get_default_any_variants() + + globals = set() + for item in value.split(","): + item = item.strip() + globals |= get_any_variants(item) + return globals + + +def global_suffixes(value: Text) -> Set[Tuple[Text, bool]]: + """ + Yields tuples of the relevant filename suffix (a string) and whether the + variant is intended to run in a JS shell, for the variants defined by the + given comma-separated value. + """ + assert isinstance(value, str), value + + rv = set() + + global_types = parse_variants(value) + for global_type in global_types: + variant = _any_variants[global_type] + suffix = variant.get("suffix", ".any.%s.html" % global_type) + rv.add((suffix, global_type == "jsshell")) + + return rv + + +def global_variant_url(url: Text, suffix: Text) -> Text: + """ + Returns a url created from the given url and suffix (all strings). + """ + url = url.replace(".any.", ".") + # If the url must be loaded over https, ensure that it will have + # the form .https.any.js + if ".https." in url and suffix.startswith(".https."): + url = url.replace(".https.", ".") + elif ".h2." in url and suffix.startswith(".h2."): + url = url.replace(".h2.", ".") + return replace_end(url, ".js", suffix) + + +def _parse_html(f: BinaryIO) -> ElementTree.Element: + doc = html5lib.parse(f, treebuilder="etree", useChardet=False) + return cast(ElementTree.Element, doc) + +def _parse_xml(f: BinaryIO) -> ElementTree.Element: + try: + # raises ValueError with an unsupported encoding, + # ParseError when there's an undefined entity + return ElementTree.parse(f).getroot() + except (ValueError, ElementTree.ParseError): + f.seek(0) + return ElementTree.parse(f, XMLParser.XMLParser()).getroot() # type: ignore + + +class SourceFile: + parsers: Dict[Text, Callable[[BinaryIO], ElementTree.Element]] = {"html":_parse_html, + "xhtml":_parse_xml, + "svg":_parse_xml} + + root_dir_non_test = {"common"} + + dir_non_test = {"resources", + "support", + "tools"} + + dir_path_non_test: Set[Tuple[Text, ...]] = {("css21", "archive"), + ("css", "CSS2", "archive"), + ("css", "common")} + + def __init__(self, tests_root: Text, + rel_path: Text, + url_base: Text, + hash: Optional[Text] = None, + contents: Optional[bytes] = None) -> None: + """Object representing a file in a source tree. + + :param tests_root: Path to the root of the source tree + :param rel_path_str: File path relative to tests_root + :param url_base: Base URL used when converting file paths to urls + :param contents: Byte array of the contents of the file or ``None``. + """ + + assert not os.path.isabs(rel_path), rel_path + if os.name == "nt": + # do slash normalization on Windows + rel_path = rel_path.replace("/", "\\") + + dir_path, filename = os.path.split(rel_path) + name, ext = os.path.splitext(filename) + + type_flag = None + if "-" in name: + type_flag = name.rsplit("-", 1)[1].split(".")[0] + + meta_flags = name.split(".")[1:] + + self.tests_root: Text = tests_root + self.rel_path: Text = rel_path + self.dir_path: Text = dir_path + self.filename: Text = filename + self.name: Text = name + self.ext: Text = ext + self.type_flag: Optional[Text] = type_flag + self.meta_flags: Union[List[bytes], List[Text]] = meta_flags + self.url_base = url_base + self.contents = contents + self.items_cache: Optional[Tuple[Text, List[ManifestItem]]] = None + self._hash = hash + + def __getstate__(self) -> Dict[str, Any]: + # Remove computed properties if we pickle this class + rv = self.__dict__.copy() + + if "__cached_properties__" in rv: + cached_properties = rv["__cached_properties__"] + rv = {key:value for key, value in rv.items() if key not in cached_properties} + del rv["__cached_properties__"] + return rv + + def name_prefix(self, prefix: Text) -> bool: + """Check if the filename starts with a given prefix + + :param prefix: The prefix to check""" + return self.name.startswith(prefix) + + def is_dir(self) -> bool: + """Return whether this file represents a directory.""" + if self.contents is not None: + return False + + return os.path.isdir(self.rel_path) + + def open(self) -> BinaryIO: + """ + Return either + * the contents specified in the constructor, if any; + * a File object opened for reading the file contents. + """ + if self.contents is not None: + file_obj: BinaryIO = BytesIO(self.contents) + else: + file_obj = open(self.path, 'rb') + return file_obj + + @cached_property + def rel_path_parts(self) -> Tuple[Text, ...]: + return tuple(self.rel_path.split(os.path.sep)) + + @cached_property + def path(self) -> Text: + return os.path.join(self.tests_root, self.rel_path) + + @cached_property + def rel_url(self) -> Text: + assert not os.path.isabs(self.rel_path), self.rel_path + return self.rel_path.replace(os.sep, "/") + + @cached_property + def url(self) -> Text: + return urljoin(self.url_base, self.rel_url) + + @cached_property + def hash(self) -> Text: + if not self._hash: + with self.open() as f: + content = f.read() + + data = b"".join((b"blob ", b"%d" % len(content), b"\0", content)) + self._hash = str(hashlib.sha1(data).hexdigest()) + + return self._hash + + def in_non_test_dir(self) -> bool: + if self.dir_path == "": + return True + + parts = self.rel_path_parts + + if (parts[0] in self.root_dir_non_test or + any(item in self.dir_non_test for item in parts) or + any(parts[:len(path)] == path for path in self.dir_path_non_test)): + return True + return False + + def in_conformance_checker_dir(self) -> bool: + return self.rel_path_parts[0] == "conformance-checkers" + + @property + def name_is_non_test(self) -> bool: + """Check if the file name matches the conditions for the file to + be a non-test file""" + return (self.is_dir() or + self.name_prefix("MANIFEST") or + self.filename == "META.yml" or + self.filename == WEB_FEATURES_YML_FILENAME or + self.filename.startswith(".") or + self.filename.endswith(".headers") or + self.filename.endswith(".ini") or + self.in_non_test_dir()) + + @property + def name_is_conformance(self) -> bool: + return (self.in_conformance_checker_dir() and + self.type_flag in ("is-valid", "no-valid")) + + @property + def name_is_conformance_support(self) -> bool: + return self.in_conformance_checker_dir() + + @property + def name_is_manual(self) -> bool: + """Check if the file name matches the conditions for the file to + be a manual test file""" + return self.type_flag == "manual" + + @property + def name_is_visual(self) -> bool: + """Check if the file name matches the conditions for the file to + be a visual test file""" + return self.type_flag == "visual" + + @property + def name_is_multi_global(self) -> bool: + """Check if the file name matches the conditions for the file to + be a multi-global js test file""" + return "any" in self.meta_flags and self.ext == ".js" + + @property + def name_is_worker(self) -> bool: + """Check if the file name matches the conditions for the file to + be a worker js test file""" + return "worker" in self.meta_flags and self.ext == ".js" + + @property + def name_is_window(self) -> bool: + """Check if the file name matches the conditions for the file to + be a window js test file""" + return "window" in self.meta_flags and self.ext == ".js" + + @property + def name_is_webdriver(self) -> bool: + """Check if the file name matches the conditions for the file to + be a webdriver spec test file""" + # wdspec tests are in subdirectories of /webdriver excluding __init__.py + # files. + rel_path_parts = self.rel_path_parts + return (((rel_path_parts[0] == "webdriver" and len(rel_path_parts) > 1) or + (rel_path_parts[:2] == ("infrastructure", "webdriver") and + len(rel_path_parts) > 2)) and + self.filename not in ("__init__.py", "conftest.py") and + fnmatch(self.filename, wd_pattern)) + + @property + def name_is_reference(self) -> bool: + """Check if the file name matches the conditions for the file to + be a reference file (not a reftest)""" + return "/reference/" in self.url or bool(reference_file_re.search(self.name)) + + @property + def name_is_crashtest(self) -> bool: + return (self.markup_type is not None and + (self.type_flag == "crash" or "crashtests" in self.dir_path.split(os.path.sep))) + + @property + def name_is_tentative(self) -> bool: + """Check if the file name matches the conditions for the file to be a + tentative file. + + See https://web-platform-tests.org/writing-tests/file-names.html#test-features""" + return "tentative" in self.meta_flags or "tentative" in self.dir_path.split(os.path.sep) + + @property + def name_is_print_reftest(self) -> bool: + return (self.markup_type is not None and + (self.type_flag == "print" or "print" in self.dir_path.split(os.path.sep))) + + @property + def markup_type(self) -> Optional[Text]: + """Return the type of markup contained in a file, based on its extension, + or None if it doesn't contain markup""" + ext = self.ext + + if not ext: + return None + if ext[0] == ".": + ext = ext[1:] + if ext in ["html", "htm"]: + return "html" + if ext in ["xhtml", "xht", "xml"]: + return "xhtml" + if ext == "svg": + return "svg" + return None + + @cached_property + def root(self) -> Optional[ElementTree.Element]: + """Return an ElementTree Element for the root node of the file if it contains + markup, or None if it does not""" + if not self.markup_type: + return None + + parser = self.parsers[self.markup_type] + + with self.open() as f: + try: + tree = parser(f) + except Exception: + return None + + return tree + + @cached_property + def timeout_nodes(self) -> List[ElementTree.Element]: + """List of ElementTree Elements corresponding to nodes in a test that + specify timeouts""" + assert self.root is not None + return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='timeout']") + + @cached_property + def pac_nodes(self) -> List[ElementTree.Element]: + """List of ElementTree Elements corresponding to nodes in a test that + specify PAC (proxy auto-config)""" + assert self.root is not None + return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='pac']") + + @cached_property + def script_metadata(self) -> Optional[List[Tuple[Text, Text]]]: + if self.name_is_worker or self.name_is_multi_global or self.name_is_window: + regexp = js_meta_re + elif self.name_is_webdriver: + regexp = python_meta_re + else: + return None + + with self.open() as f: + return list(read_script_metadata(f, regexp)) + + @cached_property + def timeout(self) -> Optional[Text]: + """The timeout of a test or reference file. "long" if the file has an extended timeout + or None otherwise""" + if self.script_metadata: + if any(m == ("timeout", "long") for m in self.script_metadata): + return "long" + + if self.root is None: + return None + + if self.timeout_nodes: + timeout_str: Optional[Text] = self.timeout_nodes[0].attrib.get("content", None) + if timeout_str and timeout_str.lower() == "long": + return "long" + + return None + + @cached_property + def pac(self) -> Optional[Text]: + """The PAC (proxy config) of a test or reference file. A URL or null""" + if self.script_metadata: + for (meta, content) in self.script_metadata: + if meta == 'pac': + return content + + if self.root is None: + return None + + if self.pac_nodes: + return self.pac_nodes[0].attrib.get("content", None) + + return None + + @cached_property + def viewport_nodes(self) -> List[ElementTree.Element]: + """List of ElementTree Elements corresponding to nodes in a test that + specify viewport sizes""" + assert self.root is not None + return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='viewport-size']") + + @cached_property + def viewport_size(self) -> Optional[Text]: + """The viewport size of a test or reference file""" + if self.root is None: + return None + + if not self.viewport_nodes: + return None + + return self.viewport_nodes[0].attrib.get("content", None) + + @cached_property + def dpi_nodes(self) -> List[ElementTree.Element]: + """List of ElementTree Elements corresponding to nodes in a test that + specify device pixel ratios""" + assert self.root is not None + return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='device-pixel-ratio']") + + @cached_property + def dpi(self) -> Optional[Text]: + """The device pixel ratio of a test or reference file""" + if self.root is None: + return None + + if not self.dpi_nodes: + return None + + return self.dpi_nodes[0].attrib.get("content", None) + + def parse_ref_keyed_meta(self, node: ElementTree.Element) -> Tuple[Optional[Tuple[Text, Text, Text]], Text]: + item: Text = node.attrib.get("content", "") + + parts = item.rsplit(":", 1) + if len(parts) == 1: + key: Optional[Tuple[Text, Text, Text]] = None + value = parts[0] + else: + key_part = urljoin(self.url, parts[0]) + reftype = None + for ref in self.references: # type: Tuple[Text, Text] + if ref[0] == key_part: + reftype = ref[1] + break + if reftype not in ("==", "!="): + raise ValueError("Key %s doesn't correspond to a reference" % key_part) + key = (self.url, key_part, reftype) + value = parts[1] + + return key, value + + + @cached_property + def fuzzy_nodes(self) -> List[ElementTree.Element]: + """List of ElementTree Elements corresponding to nodes in a test that + specify reftest fuzziness""" + assert self.root is not None + return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='fuzzy']") + + + @cached_property + def fuzzy(self) -> Dict[Optional[Tuple[Text, Text, Text]], List[List[int]]]: + rv: Dict[Optional[Tuple[Text, Text, Text]], List[List[int]]] = {} + if self.root is None: + return rv + + if not self.fuzzy_nodes: + return rv + + args = ["maxDifference", "totalPixels"] + + for node in self.fuzzy_nodes: + key, value = self.parse_ref_keyed_meta(node) + ranges = value.split(";") + if len(ranges) != 2: + raise ValueError("Malformed fuzzy value %s" % value) + arg_values: Dict[Text, List[int]] = {} + positional_args: Deque[List[int]] = deque() + for range_str_value in ranges: # type: Text + name: Optional[Text] = None + if "=" in range_str_value: + name, range_str_value = (part.strip() + for part in range_str_value.split("=", 1)) + if name not in args: + raise ValueError("%s is not a valid fuzzy property" % name) + if arg_values.get(name): + raise ValueError("Got multiple values for argument %s" % name) + if "-" in range_str_value: + range_min, range_max = range_str_value.split("-") + else: + range_min = range_str_value + range_max = range_str_value + try: + range_value = [int(x.strip()) for x in (range_min, range_max)] + except ValueError: + raise ValueError("Fuzzy value %s must be a range of integers" % + range_str_value) + if name is None: + positional_args.append(range_value) + else: + arg_values[name] = range_value + rv[key] = [] + for arg_name in args: + if arg_values.get(arg_name): + arg_value = arg_values.pop(arg_name) + else: + arg_value = positional_args.popleft() + rv[key].append(arg_value) + assert len(arg_values) == 0 and len(positional_args) == 0 + return rv + + @cached_property + def page_ranges_nodes(self) -> List[ElementTree.Element]: + """List of ElementTree Elements corresponding to nodes in a test that + specify print-reftest """ + assert self.root is not None + return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='reftest-pages']") + + @cached_property + def page_ranges(self) -> Dict[Text, List[List[Optional[int]]]]: + """List of ElementTree Elements corresponding to nodes in a test that + specify print-reftest page ranges""" + rv: Dict[Text, List[List[Optional[int]]]] = {} + for node in self.page_ranges_nodes: + key_data, value = self.parse_ref_keyed_meta(node) + # Just key by url + if key_data is None: + key = self.url + else: + key = key_data[1] + if key in rv: + raise ValueError("Duplicate page-ranges value") + rv[key] = [] + for range_str in value.split(","): + range_str = range_str.strip() + if "-" in range_str: + range_parts_str = [item.strip() for item in range_str.split("-")] + try: + range_parts = [int(item) if item else None for item in range_parts_str] + except ValueError: + raise ValueError("Malformed page-range value %s" % range_str) + if any(item == 0 for item in range_parts): + raise ValueError("Malformed page-range value %s" % range_str) + else: + try: + range_parts = [int(range_str)] + except ValueError: + raise ValueError("Malformed page-range value %s" % range_str) + rv[key].append(range_parts) + return rv + + @cached_property + def testharness_nodes(self) -> List[ElementTree.Element]: + """List of ElementTree Elements corresponding to nodes representing a + testharness.js script""" + assert self.root is not None + return self.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src='/resources/testharness.js']") + + @cached_property + def content_is_testharness(self) -> Optional[bool]: + """Boolean indicating whether the file content represents a + testharness.js test""" + if self.root is None: + return None + return bool(self.testharness_nodes) + + @cached_property + def variant_nodes(self) -> List[ElementTree.Element]: + """List of ElementTree Elements corresponding to nodes representing a + test variant""" + assert self.root is not None + return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='variant']") + + @cached_property + def test_variants(self) -> List[Text]: + rv: List[Text] = [] + if self.ext == ".js": + script_metadata = self.script_metadata + assert script_metadata is not None + for (key, value) in script_metadata: + if key == "variant": + rv.append(value) + else: + for element in self.variant_nodes: + if "content" in element.attrib: + variant: Text = element.attrib["content"] + rv.append(variant) + + for variant in rv: + if variant != "": + if variant[0] not in ("#", "?"): + raise ValueError("Non-empty variant must start with either a ? or a #") + if len(variant) == 1 or (variant[0] == "?" and variant[1] == "#"): + raise ValueError("Variants must not have empty fragment or query " + + "(omit the empty part instead)") + + if not rv: + rv = [""] + + return rv + + @cached_property + def testdriver_nodes(self) -> List[ElementTree.Element]: + """List of ElementTree Elements corresponding to nodes representing a + testdriver.js script""" + assert self.root is not None + return self.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src='/resources/testdriver.js']") + + @cached_property + def has_testdriver(self) -> Optional[bool]: + """Boolean indicating whether the file content represents a + testharness.js test""" + if self.root is None: + return None + return bool(self.testdriver_nodes) + + @cached_property + def reftest_nodes(self) -> List[ElementTree.Element]: + """List of ElementTree Elements corresponding to nodes representing a + to a reftest <link>""" + if self.root is None: + return [] + + match_links = self.root.findall(".//{http://www.w3.org/1999/xhtml}link[@rel='match']") + mismatch_links = self.root.findall(".//{http://www.w3.org/1999/xhtml}link[@rel='mismatch']") + return match_links + mismatch_links + + @cached_property + def references(self) -> List[Tuple[Text, Text]]: + """List of (ref_url, relation) tuples for any reftest references specified in + the file""" + rv: List[Tuple[Text, Text]] = [] + rel_map = {"match": "==", "mismatch": "!="} + for item in self.reftest_nodes: + if "href" in item.attrib: + ref_url = urljoin(self.url, item.attrib["href"].strip(space_chars)) + ref_type = rel_map[item.attrib["rel"]] + rv.append((ref_url, ref_type)) + return rv + + @cached_property + def content_is_ref_node(self) -> bool: + """Boolean indicating whether the file is a non-leaf node in a reftest + graph (i.e. if it contains any <link rel=[mis]match>""" + return bool(self.references) + + @cached_property + def css_flag_nodes(self) -> List[ElementTree.Element]: + """List of ElementTree Elements corresponding to nodes representing a + flag <meta>""" + if self.root is None: + return [] + return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='flags']") + + @cached_property + def css_flags(self) -> Set[Text]: + """Set of flags specified in the file""" + rv: Set[Text] = set() + for item in self.css_flag_nodes: + if "content" in item.attrib: + for flag in item.attrib["content"].split(): + rv.add(flag) + return rv + + @cached_property + def content_is_css_manual(self) -> Optional[bool]: + """Boolean indicating whether the file content represents a + CSS WG-style manual test""" + if self.root is None: + return None + # return True if the intersection between the two sets is non-empty + return bool(self.css_flags & {"animated", "font", "history", "interact", "paged", "speech", "userstyle"}) + + @cached_property + def spec_link_nodes(self) -> List[ElementTree.Element]: + """List of ElementTree Elements corresponding to nodes representing a + <link rel=help>, used to point to specs""" + if self.root is None: + return [] + return self.root.findall(".//{http://www.w3.org/1999/xhtml}link[@rel='help']") + + @cached_property + def spec_links(self) -> Set[Text]: + """Set of spec links specified in the file""" + rv: Set[Text] = set() + for item in self.spec_link_nodes: + if "href" in item.attrib: + rv.add(item.attrib["href"].strip(space_chars)) + return rv + + @cached_property + def content_is_css_visual(self) -> Optional[bool]: + """Boolean indicating whether the file content represents a + CSS WG-style visual test""" + if self.root is None: + return None + return bool(self.ext in {'.xht', '.html', '.xhtml', '.htm', '.xml', '.svg'} and + self.spec_links) + + @property + def type(self) -> Text: + possible_types = self.possible_types + if len(possible_types) == 1: + return possible_types.pop() + + rv, _ = self.manifest_items() + return rv + + @property + def possible_types(self) -> Set[Text]: + """Determines the set of possible types without reading the file""" + + if self.items_cache: + return {self.items_cache[0]} + + if self.name_is_non_test: + return {SupportFile.item_type} + + if self.name_is_manual: + return {ManualTest.item_type} + + if self.name_is_conformance: + return {ConformanceCheckerTest.item_type} + + if self.name_is_conformance_support: + return {SupportFile.item_type} + + if self.name_is_webdriver: + return {WebDriverSpecTest.item_type} + + if self.name_is_visual: + return {VisualTest.item_type} + + if self.name_is_crashtest: + return {CrashTest.item_type} + + if self.name_is_print_reftest: + return {PrintRefTest.item_type} + + if self.name_is_multi_global: + return {TestharnessTest.item_type} + + if self.name_is_worker: + return {TestharnessTest.item_type} + + if self.name_is_window: + return {TestharnessTest.item_type} + + if self.markup_type is None: + return {SupportFile.item_type} + + if not self.name_is_reference: + return {ManualTest.item_type, + TestharnessTest.item_type, + RefTest.item_type, + VisualTest.item_type, + SupportFile.item_type} + + return {TestharnessTest.item_type, + RefTest.item_type, + SupportFile.item_type} + + def manifest_items(self) -> Tuple[Text, List[ManifestItem]]: + """List of manifest items corresponding to the file. There is typically one + per test, but in the case of reftests a node may have corresponding manifest + items without being a test itself.""" + + if self.items_cache: + return self.items_cache + + drop_cached = "root" not in self.__dict__ + + if self.name_is_non_test: + rv: Tuple[Text, List[ManifestItem]] = ("support", [ + SupportFile( + self.tests_root, + self.rel_path + )]) + + elif self.name_is_manual: + rv = ManualTest.item_type, [ + ManualTest( + self.tests_root, + self.rel_path, + self.url_base, + self.rel_url + )] + + elif self.name_is_conformance: + rv = ConformanceCheckerTest.item_type, [ + ConformanceCheckerTest( + self.tests_root, + self.rel_path, + self.url_base, + self.rel_url + )] + + elif self.name_is_conformance_support: + rv = "support", [ + SupportFile( + self.tests_root, + self.rel_path + )] + + elif self.name_is_webdriver: + rv = WebDriverSpecTest.item_type, [ + WebDriverSpecTest( + self.tests_root, + self.rel_path, + self.url_base, + self.rel_url, + timeout=self.timeout + )] + + elif self.name_is_visual: + rv = VisualTest.item_type, [ + VisualTest( + self.tests_root, + self.rel_path, + self.url_base, + self.rel_url + )] + + elif self.name_is_crashtest: + rv = CrashTest.item_type, [ + CrashTest( + self.tests_root, + self.rel_path, + self.url_base, + self.rel_url + )] + + elif self.name_is_print_reftest: + references = self.references + if not references: + raise ValueError("%s detected as print reftest but doesn't have any refs" % + self.path) + rv = PrintRefTest.item_type, [ + PrintRefTest( + self.tests_root, + self.rel_path, + self.url_base, + self.rel_url, + references=references, + timeout=self.timeout, + viewport_size=self.viewport_size, + fuzzy=self.fuzzy, + page_ranges=self.page_ranges, + )] + + elif self.name_is_multi_global: + globals = "" + script_metadata = self.script_metadata + assert script_metadata is not None + for (key, value) in script_metadata: + if key == "global": + globals = value + break + + tests: List[ManifestItem] = [ + TestharnessTest( + self.tests_root, + self.rel_path, + self.url_base, + global_variant_url(self.rel_url, suffix) + variant, + timeout=self.timeout, + pac=self.pac, + jsshell=jsshell, + script_metadata=self.script_metadata + ) + for (suffix, jsshell) in sorted(global_suffixes(globals)) + for variant in self.test_variants + ] + rv = TestharnessTest.item_type, tests + + elif self.name_is_worker: + test_url = replace_end(self.rel_url, ".worker.js", ".worker.html") + tests = [ + TestharnessTest( + self.tests_root, + self.rel_path, + self.url_base, + test_url + variant, + timeout=self.timeout, + pac=self.pac, + script_metadata=self.script_metadata + ) + for variant in self.test_variants + ] + rv = TestharnessTest.item_type, tests + + elif self.name_is_window: + test_url = replace_end(self.rel_url, ".window.js", ".window.html") + tests = [ + TestharnessTest( + self.tests_root, + self.rel_path, + self.url_base, + test_url + variant, + timeout=self.timeout, + pac=self.pac, + script_metadata=self.script_metadata + ) + for variant in self.test_variants + ] + rv = TestharnessTest.item_type, tests + + elif self.content_is_css_manual and not self.name_is_reference: + rv = ManualTest.item_type, [ + ManualTest( + self.tests_root, + self.rel_path, + self.url_base, + self.rel_url + )] + + elif self.content_is_testharness: + rv = TestharnessTest.item_type, [] + testdriver = self.has_testdriver + for variant in self.test_variants: + url = self.rel_url + variant + rv[1].append(TestharnessTest( + self.tests_root, + self.rel_path, + self.url_base, + url, + timeout=self.timeout, + pac=self.pac, + testdriver=testdriver, + script_metadata=self.script_metadata + )) + + elif self.content_is_ref_node: + rv = RefTest.item_type, [] + for variant in self.test_variants: + url = self.rel_url + variant + rv[1].append(RefTest( + self.tests_root, + self.rel_path, + self.url_base, + url, + references=[ + (ref[0] + variant, ref[1]) + for ref in self.references + ], + timeout=self.timeout, + viewport_size=self.viewport_size, + dpi=self.dpi, + fuzzy=self.fuzzy + )) + + elif self.content_is_css_visual and not self.name_is_reference: + rv = VisualTest.item_type, [ + VisualTest( + self.tests_root, + self.rel_path, + self.url_base, + self.rel_url + )] + + else: + rv = "support", [ + SupportFile( + self.tests_root, + self.rel_path + )] + + assert rv[0] in self.possible_types + assert len(rv[1]) == len(set(rv[1])) + + self.items_cache = rv + + if drop_cached and "__cached_properties__" in self.__dict__: + cached_properties = self.__dict__["__cached_properties__"] + for prop in cached_properties: + if prop in self.__dict__: + del self.__dict__[prop] + del self.__dict__["__cached_properties__"] + + return rv + + def manifest_spec_items(self) -> Optional[Tuple[Text, List[ManifestItem]]]: + specs = list(self.spec_links) + if not specs: + return None + rv: Tuple[Text, List[ManifestItem]] = (SpecItem.item_type, [ + SpecItem( + self.tests_root, + self.rel_path, + specs + )]) + return rv diff --git a/testing/web-platform/tests/tools/manifest/spec.py b/testing/web-platform/tests/tools/manifest/spec.py new file mode 100644 index 0000000000..5148fceb3e --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/spec.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python3 +import argparse +import os +from typing import Any, Optional, Text + +from . import vcs +from .manifest import compute_manifest_spec_items, InvalidCacheError, Manifest, write +from .log import get_logger, enable_debug_logging + + +here = os.path.dirname(__file__) + +wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir)) + +logger = get_logger() + + +def update_spec(tests_root: Text, + manifest_path: Text, + url_base: Text, + cache_root: Optional[Text] = None, + working_copy: bool = True, + parallel: bool = True + ) -> None: + + manifest = Manifest(tests_root, url_base) + + logger.info("Updating SPEC_MANIFEST") + try: + tree = vcs.get_tree(tests_root, manifest, manifest_path, cache_root, + working_copy, True) + changed = manifest.update(tree, parallel, compute_manifest_spec_items) + except InvalidCacheError: + logger.error("Manifest cache in spec.py was invalid.") + return + + if changed: + write(manifest, manifest_path) + tree.dump_caches() + + +def update_from_cli(**kwargs: Any) -> None: + tests_root = kwargs["tests_root"] + path = kwargs["path"] + assert tests_root is not None + + update_spec(tests_root, + path, + kwargs["url_base"], + cache_root=kwargs["cache_root"], + parallel=kwargs["parallel"]) + + +def abs_path(path: str) -> str: + return os.path.abspath(os.path.expanduser(path)) + + +def create_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser() + parser.add_argument( + "-v", "--verbose", dest="verbose", action="store_true", default=False, + help="Turn on verbose logging") + parser.add_argument( + "-p", "--path", type=abs_path, help="Path to manifest file.") + parser.add_argument( + "--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.") + parser.add_argument( + "--url-base", action="store", default="/", + help="Base url to use as the mount point for tests in this manifest.") + parser.add_argument( + "--cache-root", action="store", default=os.path.join(wpt_root, ".wptcache"), + help="Path in which to store any caches (default <tests_root>/.wptcache/)") + parser.add_argument( + "--no-parallel", dest="parallel", action="store_false", default=True, + help="Do not parallelize building the manifest") + return parser + + +def run(*args: Any, **kwargs: Any) -> None: + if kwargs["path"] is None: + kwargs["path"] = os.path.join(kwargs["tests_root"], "SPEC_MANIFEST.json") + if kwargs["verbose"]: + enable_debug_logging() + update_from_cli(**kwargs) diff --git a/testing/web-platform/tests/tools/manifest/testpaths.py b/testing/web-platform/tests/tools/manifest/testpaths.py new file mode 100644 index 0000000000..2fa5393826 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/testpaths.py @@ -0,0 +1,98 @@ +import argparse +import json +import os +from collections import defaultdict +from typing import Any, Dict, Iterable, List, Text + +from .manifest import load_and_update, Manifest +from .log import get_logger + +wpt_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) + +logger = get_logger() + + +def abs_path(path: str) -> str: + return os.path.abspath(os.path.expanduser(path)) + + +def create_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser() + parser.add_argument( + "-p", "--path", type=abs_path, help="Path to manifest file.") + parser.add_argument( + "--src-root", type=abs_path, default=None, help="Path to root of sourcetree.") + parser.add_argument( + "--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.") + parser.add_argument( + "--no-update", dest="update", action="store_false", default=True, + help="Don't update manifest before continuing") + parser.add_argument( + "-r", "--rebuild", action="store_true", default=False, + help="Force a full rebuild of the manifest.") + parser.add_argument( + "--url-base", action="store", default="/", + help="Base url to use as the mount point for tests in this manifest.") + parser.add_argument( + "--cache-root", action="store", default=os.path.join(wpt_root, ".wptcache"), + help="Path in which to store any caches (default <tests_root>/.wptcache/)") + parser.add_argument( + "--json", action="store_true", default=False, + help="Output as JSON") + parser.add_argument( + "test_ids", action="store", nargs="+", + help="Test ids for which to get paths") + return parser + + +def get_path_id_map(src_root: Text, tests_root: Text, manifest_file: Manifest, test_ids: Iterable[Text]) -> Dict[Text, List[Text]]: + test_ids = set(test_ids) + path_id_map: Dict[Text, List[Text]] = defaultdict(list) + + compute_rel_path = src_root != tests_root + + for item_type, path, tests in manifest_file: + for test in tests: + if test.id in test_ids: + if compute_rel_path: + rel_path = os.path.relpath(os.path.join(tests_root, path), + src_root) + else: + rel_path = path + path_id_map[rel_path].append(test.id) + return path_id_map + + +def get_paths(**kwargs: Any) -> Dict[Text, List[Text]]: + tests_root = kwargs["tests_root"] + assert tests_root is not None + path = kwargs["path"] + if path is None: + path = os.path.join(kwargs["tests_root"], "MANIFEST.json") + src_root = kwargs["src_root"] + if src_root is None: + src_root = tests_root + + manifest_file = load_and_update(tests_root, + path, + kwargs["url_base"], + update=kwargs["update"], + rebuild=kwargs["rebuild"], + cache_root=kwargs["cache_root"]) + + return get_path_id_map(src_root, tests_root, manifest_file, kwargs["test_ids"]) + + +def write_output(path_id_map: Dict[Text, List[Text]], as_json: bool) -> None: + if as_json: + print(json.dumps(path_id_map)) + else: + for path, test_ids in sorted(path_id_map.items()): + print(path) + for test_id in sorted(test_ids): + print(" " + test_id) + + +def run(**kwargs: Any) -> None: + path_id_map = get_paths(**kwargs) + write_output(path_id_map, as_json=kwargs["json"]) diff --git a/testing/web-platform/tests/tools/manifest/tests/__init__.py b/testing/web-platform/tests/tools/manifest/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/tests/__init__.py diff --git a/testing/web-platform/tests/tools/manifest/tests/test_XMLParser.py b/testing/web-platform/tests/tools/manifest/tests/test_XMLParser.py new file mode 100644 index 0000000000..d2d349d11e --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/tests/test_XMLParser.py @@ -0,0 +1,56 @@ +# mypy: allow-untyped-defs + +from xml.etree.ElementTree import ParseError + +import pytest + +from ..XMLParser import XMLParser + + +@pytest.mark.parametrize("s", [ + '<foo> </foo>', + '<!DOCTYPE foo><foo> </foo>', + '<!DOCTYPE foo PUBLIC "fake" "id"><foo> </foo>', + '<!DOCTYPE foo PUBLIC "fake" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"><foo> </foo>', + '<!DOCTYPE foo PUBLIC "fake-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"><foo> </foo>' +]) +def test_undefined_entity(s): + with pytest.raises(ParseError): + p = XMLParser() + p.feed(s) + p.close() + + +@pytest.mark.parametrize("s", [ + '<!DOCTYPE foo PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"><foo> </foo>' +]) +def test_defined_entity(s): + p = XMLParser() + p.feed(s) + d = p.close() + assert d.tag == "foo" + assert d.text == "\u00A0" + + +def test_pi(): + p = XMLParser() + p.feed('<foo><?foo bar?></foo>') + d = p.close() + assert d.tag == "foo" + assert len(d) == 0 + + +def test_comment(): + p = XMLParser() + p.feed('<foo><!-- data --></foo>') + d = p.close() + assert d.tag == "foo" + assert len(d) == 0 + + +def test_unsupported_encoding(): + p = XMLParser() + p.feed("<?xml version='1.0' encoding='Shift-JIS'?><foo>\u3044</foo>".encode("shift-jis")) + d = p.close() + assert d.tag == "foo" + assert d.text == "\u3044" diff --git a/testing/web-platform/tests/tools/manifest/tests/test_item.py b/testing/web-platform/tests/tools/manifest/tests/test_item.py new file mode 100644 index 0000000000..7640e9262c --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/tests/test_item.py @@ -0,0 +1,160 @@ +# mypy: allow-untyped-defs + +import inspect +import json + +import pytest + +from ..manifest import Manifest +# Prevent pytest from treating TestharnessTest as a test class +from ..item import TestharnessTest as HarnessTest +from ..item import RefTest, item_types + + +@pytest.mark.parametrize("path", [ + "a.https.c", + "a.b.https.c", + "a.https.b.c", + "a.b.https.c.d", + "a.serviceworker.c", + "a.b.serviceworker.c", + "a.serviceworker.b.c", + "a.b.serviceworker.c.d", +]) +def test_url_https(path): + m = HarnessTest("/foo", "bar/" + path, "/", "bar/" + path) + + assert m.https is True + + +@pytest.mark.parametrize("path", [ + "https", + "a.https", + "a.b.https", + "https.a", + "https.a.b", + "a.bhttps.c", + "a.httpsb.c", + "serviceworker", + "a.serviceworker", + "a.b.serviceworker", + "serviceworker.a", + "serviceworker.a.b", + "a.bserviceworker.c", + "a.serviceworkerb.c", +]) +def test_url_not_https(path): + m = HarnessTest("/foo", "bar/" + path, "/", "bar/" + path) + + assert m.https is False + + +@pytest.mark.parametrize("path", [ + "a.www.c", + "a.b.www.c", + "a.www.b.c", + "a.b.www.c.d", + "a.https.www.c", + "a.b.https.www.c", + "a.https.www.b.c", + "a.b.https.www.c.d", +]) +def test_url_subdomain(path): + m = HarnessTest("/foo", "bar/" + path, "/", "bar/" + path) + + assert m.subdomain is True + + +@pytest.mark.parametrize("path", [ + "www", + "a.www", + "a.b.www", + "www.a", + "www.a.b", + "a.bwwww.c", + "a.wwwwb.c", +]) +def test_url_not_subdomain(path): + m = HarnessTest("/foo", "bar/" + path, "/", "bar/" + path) + + assert m.subdomain is False + + +@pytest.mark.parametrize("fuzzy", [ + {('/foo/test.html', '/foo/ref.html', '=='): [[1, 1], [200, 200]]}, + {('/foo/test.html', '/foo/ref.html', '=='): [[0, 1], [100, 200]]}, + {None: [[0, 1], [100, 200]]}, + {None: [[1, 1], [200, 200]]}, +]) +def test_reftest_fuzzy(fuzzy): + t = RefTest('/', + 'foo/test.html', + '/', + 'foo/test.html', + [('/foo/ref.html', '==')], + fuzzy=fuzzy) + assert fuzzy == t.fuzzy + + json_obj = t.to_json() + + m = Manifest("/", "/") + t2 = RefTest.from_json(m, t.path, json_obj) + assert fuzzy == t2.fuzzy + + # test the roundtrip case, given tuples become lists + roundtrip = json.loads(json.dumps(json_obj)) + t3 = RefTest.from_json(m, t.path, roundtrip) + assert fuzzy == t3.fuzzy + + +@pytest.mark.parametrize("fuzzy", [ + {('/foo/test.html', '/foo/ref-2.html', '=='): [[0, 1], [100, 200]]}, + {None: [[1, 1], [200, 200]], ('/foo/test.html', '/foo/ref-2.html', '=='): [[0, 1], [100, 200]]}, +]) +def test_reftest_fuzzy_multi(fuzzy): + t = RefTest('/', + 'foo/test.html', + '/', + 'foo/test.html', + [('/foo/ref-1.html', '=='), ('/foo/ref-2.html', '==')], + fuzzy=fuzzy) + assert fuzzy == t.fuzzy + + json_obj = t.to_json() + + m = Manifest("/", "/") + t2 = RefTest.from_json(m, t.path, json_obj) + assert fuzzy == t2.fuzzy + + # test the roundtrip case, given tuples become lists + roundtrip = json.loads(json.dumps(json_obj)) + t3 = RefTest.from_json(m, t.path, roundtrip) + assert fuzzy == t3.fuzzy + + +def test_item_types(): + for key, value in item_types.items(): + assert isinstance(key, str) + assert not inspect.isabstract(value) + + +def test_wpt_flags(): + m1 = HarnessTest("/foo", "bar", "/", "bar" + "?wpt_flags=www") + assert m1.subdomain is True + assert m1.https is False + assert m1.h2 is False + + m2 = HarnessTest("/foo", "bar", "/", "bar" + "?wpt_flags=https") + assert m2.subdomain is False + assert m2.https is True + assert m2.h2 is False + + m3 = HarnessTest("/foo", "bar", "/", "bar" + "?wpt_flags=h2") + assert m3.subdomain is False + assert m3.https is False + assert m3.h2 is True + + m4 = HarnessTest("/foo", "bar", "/", "bar" + "?wpt_flags=https&wpt_flags=www") + assert m4.subdomain is True + assert m4.https is True + assert m4.h2 is False diff --git a/testing/web-platform/tests/tools/manifest/tests/test_manifest.py b/testing/web-platform/tests/tools/manifest/tests/test_manifest.py new file mode 100644 index 0000000000..fc2314b835 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/tests/test_manifest.py @@ -0,0 +1,337 @@ +# mypy: ignore-errors + +import os +import sys +from unittest import mock + +import hypothesis as h +import hypothesis.strategies as hs +import pytest + +from .. import manifest, sourcefile, item, utils + +from typing import Any, Type + + +def SourceFileWithTest(path: str, hash: str, cls: Type[item.ManifestItem], **kwargs: Any) -> sourcefile.SourceFile: + rel_path_parts = tuple(path.split(os.path.sep)) + s = mock.Mock(rel_path=path, + rel_path_parts=rel_path_parts, + hash=hash) + if cls == item.SupportFile: + test = cls("/foobar", path) + else: + assert issubclass(cls, item.URLManifestItem) + test = cls("/foobar", path, "/", utils.from_os_path(path), **kwargs) + s.manifest_items = mock.Mock(return_value=(cls.item_type, [test])) + return s # type: ignore + + +def SourceFileWithTests(path: str, hash: str, cls: Type[item.URLManifestItem], variants: Any) -> sourcefile.SourceFile: + rel_path_parts = tuple(path.split(os.path.sep)) + s = mock.Mock(rel_path=path, + rel_path_parts=rel_path_parts, + hash=hash) + tests = [cls("/foobar", path, "/", item[0], **item[1]) for item in variants] + s.manifest_items = mock.Mock(return_value=(cls.item_type, tests)) + return s # type: ignore + + +def tree_and_sourcefile_mocks(source_files): + paths_dict = {} + tree = [] + for source_file, file_hash, updated in source_files: + paths_dict[source_file.rel_path] = source_file + tree.append([source_file.rel_path, file_hash, updated]) + + def MockSourceFile(tests_root, path, url_base, file_hash): + return paths_dict[path] + + return tree, MockSourceFile + + +@hs.composite +def sourcefile_strategy(draw): + item_classes = [item.TestharnessTest, item.RefTest, item.PrintRefTest, + item.ManualTest, item.WebDriverSpecTest, + item.ConformanceCheckerTest, item.SupportFile] + cls = draw(hs.sampled_from(item_classes)) + + path = "a" + rel_path_parts = tuple(path.split(os.path.sep)) + hash = draw(hs.text(alphabet="0123456789abcdef", min_size=40, max_size=40)) + s = mock.Mock(rel_path=path, + rel_path_parts=rel_path_parts, + hash=hash) + + if cls in (item.RefTest, item.PrintRefTest): + ref_path = "b" + ref_eq = draw(hs.sampled_from(["==", "!="])) + test = cls("/foobar", path, "/", utils.from_os_path(path), references=[(utils.from_os_path(ref_path), ref_eq)]) + elif cls is item.SupportFile: + test = cls("/foobar", path) + else: + test = cls("/foobar", path, "/", "foobar") + + s.manifest_items = mock.Mock(return_value=(cls.item_type, [test])) + return s + + +@hs.composite +def manifest_tree(draw): + names = hs.text(alphabet=hs.characters(blacklist_characters="\0/\\:*\"?<>|"), min_size=1) + tree = hs.recursive(sourcefile_strategy(), + lambda children: hs.dictionaries(names, children, min_size=1), + max_leaves=10) + + generated_root = draw(tree) + h.assume(isinstance(generated_root, dict)) + + reftest_urls = [] + output = [] + stack = [((k,), v) for k, v in generated_root.items()] + while stack: + path, node = stack.pop() + if isinstance(node, dict): + stack.extend((path + (k,), v) for k, v in node.items()) + else: + rel_path = os.path.sep.join(path) + node.rel_path = rel_path + node.rel_path_parts = tuple(path) + for test_item in node.manifest_items.return_value[1]: + test_item.path = rel_path + if isinstance(test_item, item.RefTest): + if reftest_urls: + possible_urls = hs.sampled_from(reftest_urls) | names + else: + possible_urls = names + reference = hs.tuples(hs.sampled_from(["==", "!="]), + possible_urls) + references = hs.lists(reference, min_size=1, unique=True) + test_item.references = draw(references) + reftest_urls.append(test_item.url) + output.append(node) + + return output + + +@pytest.mark.skipif(sys.version_info[:3] in ((3, 10, 10), (3, 11, 2)), + reason="https://github.com/python/cpython/issues/102126") +@h.given(manifest_tree()) +# FIXME: Workaround for https://github.com/web-platform-tests/wpt/issues/22758 +@h.settings(suppress_health_check=(h.HealthCheck.too_slow,)) +@h.example([SourceFileWithTest("a", "0"*40, item.ConformanceCheckerTest)]) +def test_manifest_to_json(s): + m = manifest.Manifest("") + + tree, sourcefile_mock = tree_and_sourcefile_mocks((item, None, True) for item in s) + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + assert m.update(tree) is True + + json_str = m.to_json() + loaded = manifest.Manifest.from_json("/", json_str) + + assert list(loaded) == list(m) + + assert loaded.to_json() == json_str + + +@pytest.mark.skipif(sys.version_info[:3] in ((3, 10, 10), (3, 11, 2)), + reason="https://github.com/python/cpython/issues/102126") +@h.given(manifest_tree()) +# FIXME: Workaround for https://github.com/web-platform-tests/wpt/issues/22758 +@h.settings(suppress_health_check=(h.HealthCheck.too_slow,)) +@h.example([SourceFileWithTest("a", "0"*40, item.TestharnessTest)]) +@h.example([SourceFileWithTest("a", "0"*40, item.RefTest, references=[("/aa", "==")])]) +def test_manifest_idempotent(s): + m = manifest.Manifest("") + + tree, sourcefile_mock = tree_and_sourcefile_mocks((item, None, True) for item in s) + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + assert m.update(tree) is True + + m1 = list(m) + + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + assert m.update(tree) is False + + assert list(m) == m1 + + +def test_manifest_to_json_forwardslash(): + m = manifest.Manifest("") + + s = SourceFileWithTest("a" + os.path.sep + "b", "0"*40, item.TestharnessTest) + + tree, sourcefile_mock = tree_and_sourcefile_mocks([(s, None, True)]) + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + assert m.update(tree) is True + + assert m.to_json() == { + 'version': 8, + 'url_base': '/', + 'items': { + 'testharness': {'a': {'b': [ + '0000000000000000000000000000000000000000', + (None, {}) + ]}}, + } + } + + +def test_reftest_computation_chain(): + m = manifest.Manifest("") + + s1 = SourceFileWithTest("test1", "0"*40, item.RefTest, references=[("/test2", "==")]) + s2 = SourceFileWithTest("test2", "0"*40, item.RefTest, references=[("/test3", "==")]) + + tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True), (s2, None, True)]) + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + m.update(tree) + + test1 = s1.manifest_items()[1][0] + test2 = s2.manifest_items()[1][0] + + assert list(m) == [("reftest", test1.path, {test1}), + ("reftest", test2.path, {test2})] + + +def test_iterpath(): + m = manifest.Manifest("") + + sources = [SourceFileWithTest("test1", "0"*40, item.RefTest, references=[("/test1-ref", "==")]), + SourceFileWithTests("test2", "1"*40, item.TestharnessTest, [("test2-1.html", {}), + ("test2-2.html", {})]), + SourceFileWithTest("test3", "0"*40, item.TestharnessTest)] + tree, sourcefile_mock = tree_and_sourcefile_mocks((item, None, True) for item in sources) + assert len(tree) == len(sources) + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + m.update(tree) + + assert {item.url for item in m.iterpath("test2")} == {"/test2-1.html", + "/test2-2.html"} + assert set(m.iterpath("missing")) == set() + + +def test_no_update(): + m = manifest.Manifest("") + + s1 = SourceFileWithTest("test1", "0"*40, item.TestharnessTest) + s2 = SourceFileWithTest("test2", "0"*40, item.TestharnessTest) + + tree, sourcefile_mock = tree_and_sourcefile_mocks((item, None, True) for item in [s1, s2]) + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + m.update(tree) + + test1 = s1.manifest_items()[1][0] + test2 = s2.manifest_items()[1][0] + + assert list(m) == [("testharness", test1.path, {test1}), + ("testharness", test2.path, {test2})] + + s1_1 = SourceFileWithTest("test1", "1"*40, item.ManualTest) + + tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1_1, None, True), (s2, None, False)]) + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + m.update(tree) + + test1_1 = s1_1.manifest_items()[1][0] + + assert list(m) == [("manual", test1_1.path, {test1_1}), + ("testharness", test2.path, {test2})] + + +def test_no_update_delete(): + m = manifest.Manifest("") + + s1 = SourceFileWithTest("test1", "0"*40, item.TestharnessTest) + s2 = SourceFileWithTest("test2", "0"*40, item.TestharnessTest) + + tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True), (s2, None, True)]) + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + m.update(tree) + + test1 = s1.manifest_items()[1][0] + + tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, False)]) + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + m.update(tree) + + assert list(m) == [("testharness", test1.path, {test1})] + + +def test_update_from_json(): + m = manifest.Manifest("") + + s1 = SourceFileWithTest("test1", "0"*40, item.TestharnessTest) + s2 = SourceFileWithTest("test2", "0"*40, item.TestharnessTest) + + tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True), (s2, None, True)]) + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + m.update(tree) + + json_str = m.to_json() + m = manifest.Manifest.from_json("/", json_str) + + tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True)]) + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + m.update(tree) + + test1 = s1.manifest_items()[1][0] + + assert list(m) == [("testharness", test1.path, {test1})] + + +def test_update_from_json_modified(): + # Create the original manifest + m = manifest.Manifest("") + s1 = SourceFileWithTest("test1", "0"*40, item.TestharnessTest) + tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True)]) + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + m.update(tree) + json_str = m.to_json() + + # Reload it from JSON + m = manifest.Manifest.from_json("/", json_str) + + # Update it with timeout="long" + s2 = SourceFileWithTest("test1", "1"*40, item.TestharnessTest, timeout="long", pac="proxy.pac") + tree, sourcefile_mock = tree_and_sourcefile_mocks([(s2, None, True)]) + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + m.update(tree) + json_str = m.to_json() + assert json_str == { + 'items': {'testharness': {'test1': [ + "1"*40, + (None, {'timeout': 'long', 'pac': 'proxy.pac'}) + ]}}, + 'url_base': '/', + 'version': 8 + } + +def test_manifest_spec_to_json(): + m = manifest.Manifest("") + + path = "a" + os.path.sep + "b" + hash = "0"*40 + rel_path_parts = tuple(path.split(os.path.sep)) + s = mock.Mock(rel_path=path, + rel_path_parts=rel_path_parts, + hash=hash) + spec = item.SpecItem("/foobar", path, ["specA"]) + s.manifest_spec_items = mock.Mock(return_value=(item.SpecItem.item_type, [spec])) + + tree, sourcefile_mock = tree_and_sourcefile_mocks([(s, None, True)]) + with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock): + assert m.update(tree, True, manifest.compute_manifest_spec_items) is True + + assert m.to_json() == { + 'version': 8, + 'url_base': '/', + 'items': { + 'spec': {'a': {'b': [ + '0000000000000000000000000000000000000000', + (None, {'spec_link1': 'specA'}) + ]}}, + } + } diff --git a/testing/web-platform/tests/tools/manifest/tests/test_sourcefile.py b/testing/web-platform/tests/tools/manifest/tests/test_sourcefile.py new file mode 100644 index 0000000000..8a9d8c36ee --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/tests/test_sourcefile.py @@ -0,0 +1,962 @@ +# mypy: allow-untyped-defs + +import os + +import pytest + +from io import BytesIO +from ...lint.lint import check_global_metadata +from ..sourcefile import SourceFile, read_script_metadata, js_meta_re, python_meta_re + + +def create(filename, contents=b""): + assert isinstance(contents, bytes) + return SourceFile("/", filename, "/", contents=contents) + + +def items(s): + item_type, items = s.manifest_items() + if item_type == "support": + return [] + else: + return [(item_type, item.url) for item in items] + + +@pytest.mark.parametrize("rel_path", [ + ".gitignore", + ".travis.yml", + "MANIFEST.json", + "tools/test.html", + "resources/test.html", + "common/test.html", + "support/test.html", + "css21/archive/test.html", + "conformance-checkers/test.html", + "conformance-checkers/README.md", + "conformance-checkers/html/Makefile", + "conformance-checkers/html/test.html", + "foo/tools/test.html", + "foo/resources/test.html", + "foo/support/test.html", + "foo/foo-manual.html.headers", + "crashtests/foo.html.ini", + "css/common/test.html", + "css/CSS2/archive/test.html", + "css/WEB_FEATURES.yml", + "css/META.yml", +]) +def test_name_is_non_test(rel_path): + s = create(rel_path) + assert s.name_is_non_test or s.name_is_conformance_support + + assert not s.content_is_testharness + + assert items(s) == [] + + +@pytest.mark.parametrize("rel_path", [ + "foo/common/test.html", + "foo/conformance-checkers/test.html", + "foo/_certs/test.html", + "foo/css21/archive/test.html", + "foo/CSS2/archive/test.html", + "css/css21/archive/test.html", + "foo/test-support.html", +]) +def test_not_name_is_non_test(rel_path): + s = create(rel_path) + assert not (s.name_is_non_test or s.name_is_conformance_support) + # We aren't actually asserting what type of test these are, just their + # name doesn't prohibit them from being tests. + + +@pytest.mark.parametrize("rel_path", [ + "foo/foo-manual.html", + "html/test-manual.html", + "html/test-manual.xhtml", + "html/test-manual.https.html", + "html/test-manual.https.xhtml" +]) +def test_name_is_manual(rel_path): + s = create(rel_path) + assert not s.name_is_non_test + assert s.name_is_manual + + assert not s.content_is_testharness + + assert items(s) == [("manual", "/" + rel_path)] + + +@pytest.mark.parametrize("rel_path", [ + "html/test-visual.html", + "html/test-visual.xhtml", +]) +def test_name_is_visual(rel_path): + s = create(rel_path) + assert not s.name_is_non_test + assert s.name_is_visual + + assert not s.content_is_testharness + + assert items(s) == [("visual", "/" + rel_path)] + + +@pytest.mark.parametrize("rel_path", [ + "css-namespaces-3/reftest/ref-lime-1.xml", + "css21/reference/pass_if_box_ahem.html", + "css21/csswg-issues/submitted/css2.1/reference/ref-green-box-100x100.xht", + "selectors-3/selectors-empty-001-ref.xml", + "css21/text/text-indent-wrap-001-notref-block-margin.xht", + "css21/text/text-indent-wrap-001-notref-block-margin.xht", + "css21/css-e-notation-ref-1.html", + "css21/floats/floats-placement-vertical-004-ref2.xht", + "css21/box/rtl-linebreak-notref1.xht", + "css21/box/rtl-linebreak-notref2.xht", + "html/canvas/element/drawing-images-to-the-canvas/drawimage_html_image_5_ref.html", + "html/canvas/element/line-styles/lineto_ref.html", + "html/rendering/non-replaced-elements/the-fieldset-element-0/ref.html" +]) +def test_name_is_reference(rel_path): + s = create(rel_path) + assert not s.name_is_non_test + assert s.name_is_reference + + assert not s.content_is_testharness + + assert items(s) == [] + + +def test_name_is_tentative(): + s = create("css/css-ui/appearance-revert-001.tentative.html") + assert s.name_is_tentative + + s = create("css/css-ui/tentative/appearance-revert-001.html") + assert s.name_is_tentative + + s = create("css/css-ui/appearance-revert-001.html") + assert not s.name_is_tentative + + +@pytest.mark.parametrize("rel_path", [ + "webdriver/tests/foo.py", + "webdriver/tests/print/foo.py", + "webdriver/tests/foo-crash.py", + "webdriver/tests/foo-visual.py", +]) +def test_name_is_webdriver(rel_path): + s = create(rel_path) + assert s.name_is_webdriver + + item_type, items = s.manifest_items() + assert item_type == "wdspec" + + +def test_worker(): + s = create("html/test.worker.js") + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_multi_global + assert s.name_is_worker + assert not s.name_is_window + assert not s.name_is_reference + + assert not s.content_is_testharness + + item_type, items = s.manifest_items() + assert item_type == "testharness" + + expected_urls = [ + "/html/test.worker.html", + ] + assert len(items) == len(expected_urls) + + for item, url in zip(items, expected_urls): + assert item.url == url + assert item.timeout is None + + +def test_window(): + s = create("html/test.window.js") + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_multi_global + assert not s.name_is_worker + assert s.name_is_window + assert not s.name_is_reference + + assert not s.content_is_testharness + + item_type, items = s.manifest_items() + assert item_type == "testharness" + + expected_urls = [ + "/html/test.window.html", + ] + assert len(items) == len(expected_urls) + + for item, url in zip(items, expected_urls): + assert item.url == url + assert item.timeout is None + + +def test_worker_long_timeout(): + contents = b"""// META: timeout=long +importScripts('/resources/testharness.js') +test()""" + + metadata = list(read_script_metadata(BytesIO(contents), js_meta_re)) + assert metadata == [("timeout", "long")] + + s = create("html/test.worker.js", contents=contents) + assert s.name_is_worker + + item_type, items = s.manifest_items() + assert item_type == "testharness" + + for item in items: + assert item.timeout == "long" + + +def test_window_long_timeout(): + contents = b"""// META: timeout=long +test()""" + + metadata = list(read_script_metadata(BytesIO(contents), js_meta_re)) + assert metadata == [("timeout", "long")] + + s = create("html/test.window.js", contents=contents) + assert s.name_is_window + + item_type, items = s.manifest_items() + assert item_type == "testharness" + + for item in items: + assert item.timeout == "long" + + +def test_worker_with_variants(): + contents = b"""// META: variant=?default +// META: variant=?wss +test()""" + + s = create("html/test.worker.js", contents=contents) + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_multi_global + assert s.name_is_worker + assert not s.name_is_window + assert not s.name_is_reference + + assert not s.content_is_testharness + + item_type, items = s.manifest_items() + assert item_type == "testharness" + + expected_urls = [ + "/html/test.worker.html" + suffix + for suffix in ["?default", "?wss"] + ] + assert len(items) == len(expected_urls) + + for item, url in zip(items, expected_urls): + assert item.url == url + assert item.timeout is None + + +def test_window_with_variants(): + contents = b"""// META: variant=?default +// META: variant=?wss +test()""" + + s = create("html/test.window.js", contents=contents) + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_multi_global + assert not s.name_is_worker + assert s.name_is_window + assert not s.name_is_reference + + assert not s.content_is_testharness + + item_type, items = s.manifest_items() + assert item_type == "testharness" + + expected_urls = [ + "/html/test.window.html" + suffix + for suffix in ["?default", "?wss"] + ] + assert len(items) == len(expected_urls) + + for item, url in zip(items, expected_urls): + assert item.url == url + assert item.timeout is None + + +def test_python_long_timeout(): + contents = b"""# META: timeout=long + +""" + + metadata = list(read_script_metadata(BytesIO(contents), + python_meta_re)) + assert metadata == [("timeout", "long")] + + s = create("webdriver/test.py", contents=contents) + assert s.name_is_webdriver + + item_type, items = s.manifest_items() + assert item_type == "wdspec" + + for item in items: + assert item.timeout == "long" + + +def test_multi_global(): + s = create("html/test.any.js") + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert s.name_is_multi_global + assert not s.name_is_worker + assert not s.name_is_reference + + assert not s.content_is_testharness + + item_type, items = s.manifest_items() + assert item_type == "testharness" + + expected_urls = [ + "/html/test.any.html", + "/html/test.any.worker.html", + ] + assert len(items) == len(expected_urls) + + for item, url in zip(items, expected_urls): + assert item.url == url + assert item.timeout is None + + +def test_multi_global_long_timeout(): + contents = b"""// META: timeout=long +importScripts('/resources/testharness.js') +test()""" + + metadata = list(read_script_metadata(BytesIO(contents), js_meta_re)) + assert metadata == [("timeout", "long")] + + s = create("html/test.any.js", contents=contents) + assert s.name_is_multi_global + + item_type, items = s.manifest_items() + assert item_type == "testharness" + + for item in items: + assert item.timeout == "long" + + +@pytest.mark.parametrize("input,expected", [ + (b"window", {"window"}), + (b"sharedworker", {"sharedworker"}), + (b"sharedworker,serviceworker", {"serviceworker", "sharedworker"}), + (b"worker", {"dedicatedworker", "serviceworker", "sharedworker"}), +]) +def test_multi_global_with_custom_globals(input, expected): + contents = b"""// META: global=%s +test()""" % input + + assert list(check_global_metadata(input)) == [] + + s = create("html/test.any.js", contents=contents) + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert s.name_is_multi_global + assert not s.name_is_worker + assert not s.name_is_reference + + assert not s.content_is_testharness + + item_type, items = s.manifest_items() + assert item_type == "testharness" + + urls = { + "dedicatedworker": "/html/test.any.worker.html", + "serviceworker": "/html/test.any.serviceworker.html", + "sharedworker": "/html/test.any.sharedworker.html", + "window": "/html/test.any.html", + } + + expected_urls = sorted(urls[ty] for ty in expected) + assert len(items) == len(expected_urls) + + for item, url in zip(items, expected_urls): + assert item.url == url + assert item.jsshell is False + assert item.timeout is None + + +def test_multi_global_with_jsshell_globals(): + contents = b"""// META: global=window,dedicatedworker,jsshell +test()""" + + s = create("html/test.any.js", contents=contents) + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert s.name_is_multi_global + assert not s.name_is_worker + assert not s.name_is_reference + + assert not s.content_is_testharness + + item_type, items = s.manifest_items() + assert item_type == "testharness" + + expected = [ + ("/html/test.any.html", False), + ("/html/test.any.js", True), + ("/html/test.any.worker.html", False), + ] + assert len(items) == len(expected) + + for item, (url, jsshell) in zip(items, expected): + assert item.url == url + assert item.jsshell == jsshell + assert item.timeout is None + + +def test_multi_global_with_variants(): + contents = b"""// META: global=window,worker +// META: variant=?default +// META: variant=?wss +test()""" + + s = create("html/test.any.js", contents=contents) + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert s.name_is_multi_global + assert not s.name_is_worker + assert not s.name_is_reference + + assert not s.content_is_testharness + + item_type, items = s.manifest_items() + assert item_type == "testharness" + + urls = { + "dedicatedworker": "/html/test.any.worker.html", + "serviceworker": "/html/test.any.serviceworker.html", + "sharedworker": "/html/test.any.sharedworker.html", + "window": "/html/test.any.html", + } + + expected_urls = sorted( + urls[ty] + suffix + for ty in ["dedicatedworker", "serviceworker", "sharedworker", "window"] + for suffix in ["?default", "?wss"] + ) + assert len(items) == len(expected_urls) + + for item, url in zip(items, expected_urls): + assert item.url == url + assert item.timeout is None + + +@pytest.mark.parametrize("input,expected", [ + (b"""//META: foo=bar\n""", [("foo", "bar")]), + (b"""// META: foo=bar\n""", [("foo", "bar")]), + (b"""// META: foo=bar\n""", [("foo", "bar")]), + (b"""\n// META: foo=bar\n""", []), + (b""" // META: foo=bar\n""", []), + (b"""// META: foo=bar\n// META: baz=quux\n""", [("foo", "bar"), ("baz", "quux")]), + (b"""// META: foo=bar\n\n// META: baz=quux\n""", [("foo", "bar")]), + (b"""// META: foo=bar\n// Start of the test\n// META: baz=quux\n""", [("foo", "bar")]), + (b"""// META:\n""", []), + (b"""// META: foobar\n""", []), +]) +def test_script_metadata(input, expected): + metadata = read_script_metadata(BytesIO(input), js_meta_re) + assert list(metadata) == expected + + +@pytest.mark.parametrize("ext", ["htm", "html"]) +def test_testharness(ext): + content = b"<script src=/resources/testharness.js></script>" + + filename = "html/test." + ext + s = create(filename, content) + + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_multi_global + assert not s.name_is_worker + assert not s.name_is_reference + + assert s.content_is_testharness + + assert items(s) == [("testharness", "/" + filename)] + + +@pytest.mark.parametrize("variant", ["", "?foo", "#bar", "?foo#bar"]) +def test_testharness_variant(variant): + content = (b"<meta name=variant content=\"%s\">" % variant.encode("utf-8") + + b"<meta name=variant content=\"?fixed\">" + + b"<script src=/resources/testharness.js></script>") + + filename = "html/test.html" + s = create(filename, content) + + s.test_variants = [variant, "?fixed"] + + +@pytest.mark.parametrize("variant", ["?", "#", "?#bar"]) +def test_testharness_variant_invalid(variant): + content = (b"<meta name=variant content=\"%s\">" % variant.encode("utf-8") + + b"<meta name=variant content=\"?fixed\">" + + b"<script src=/resources/testharness.js></script>") + + filename = "html/test.html" + s = create(filename, content) + + with pytest.raises(ValueError): + s.test_variants + + +def test_reftest_variant(): + content = (b"<meta name=variant content=\"?first\">" + + b"<meta name=variant content=\"?second\">" + + b"<link rel=\"match\" href=\"ref.html\">") + + s = create("html/test.html", contents=content) + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_worker + assert not s.name_is_reference + + item_type, items = s.manifest_items() + assert item_type == "reftest" + + actual_tests = [ + {"url": item.url, "refs": item.references} + for item in items + ] + + expected_tests = [ + { + "url": "/html/test.html?first", + "refs": [("/html/ref.html?first", "==")], + }, + { + "url": "/html/test.html?second", + "refs": [("/html/ref.html?second", "==")], + }, + ] + + assert actual_tests == expected_tests + + +@pytest.mark.parametrize("ext", ["htm", "html"]) +def test_relative_testharness(ext): + content = b"<script src=../resources/testharness.js></script>" + + filename = "html/test." + ext + s = create(filename, content) + + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_multi_global + assert not s.name_is_worker + assert not s.name_is_reference + + assert not s.content_is_testharness + + assert items(s) == [] + + +@pytest.mark.parametrize("ext", ["xhtml", "xht", "xml"]) +def test_testharness_xhtml(ext): + content = b""" +<html xmlns="http://www.w3.org/1999/xhtml"> +<head> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +</head> +<body/> +</html> +""" + + filename = "html/test." + ext + s = create(filename, content) + + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_multi_global + assert not s.name_is_worker + assert not s.name_is_reference + + assert s.content_is_testharness + + assert items(s) == [("testharness", "/" + filename)] + + +@pytest.mark.parametrize("ext", ["xhtml", "xht", "xml"]) +def test_relative_testharness_xhtml(ext): + content = b""" +<html xmlns="http://www.w3.org/1999/xhtml"> +<head> +<script src="../resources/testharness.js"></script> +<script src="../resources/testharnessreport.js"></script> +</head> +<body/> +</html> +""" + + filename = "html/test." + ext + s = create(filename, content) + + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_multi_global + assert not s.name_is_worker + assert not s.name_is_reference + + assert not s.content_is_testharness + + assert items(s) == [] + + +def test_testharness_svg(): + content = b"""\ +<?xml version="1.0" encoding="UTF-8"?> +<svg xmlns="http://www.w3.org/2000/svg" + xmlns:h="http://www.w3.org/1999/xhtml" + version="1.1" + width="100%" height="100%" viewBox="0 0 400 400"> +<title>Null test</title> +<h:script src="/resources/testharness.js"/> +<h:script src="/resources/testharnessreport.js"/> +</svg> +""" + + filename = "html/test.svg" + s = create(filename, content) + + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_multi_global + assert not s.name_is_worker + assert not s.name_is_reference + + assert s.root is not None + assert s.content_is_testharness + + assert items(s) == [("testharness", "/" + filename)] + + +def test_relative_testharness_svg(): + content = b"""\ +<?xml version="1.0" encoding="UTF-8"?> +<svg xmlns="http://www.w3.org/2000/svg" + xmlns:h="http://www.w3.org/1999/xhtml" + version="1.1" + width="100%" height="100%" viewBox="0 0 400 400"> +<title>Null test</title> +<h:script src="../resources/testharness.js"/> +<h:script src="../resources/testharnessreport.js"/> +</svg> +""" + + filename = "html/test.svg" + s = create(filename, content) + + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_multi_global + assert not s.name_is_worker + assert not s.name_is_reference + + assert s.root is not None + assert not s.content_is_testharness + + assert items(s) == [] + + +@pytest.mark.parametrize("filename", ["test", "test.test"]) +def test_testharness_ext(filename): + content = b"<script src=/resources/testharness.js></script>" + + s = create("html/" + filename, content) + + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_multi_global + assert not s.name_is_worker + assert not s.name_is_reference + + assert not s.root + assert not s.content_is_testharness + + assert items(s) == [] + + +@pytest.mark.parametrize("ext", ["htm", "html"]) +def test_testdriver(ext): + content = b"<script src=/resources/testdriver.js></script>" + + filename = "html/test." + ext + s = create(filename, content) + + assert s.has_testdriver + + +@pytest.mark.parametrize("ext", ["htm", "html"]) +def test_relative_testdriver(ext): + content = b"<script src=../resources/testdriver.js></script>" + + filename = "html/test." + ext + s = create(filename, content) + + assert not s.has_testdriver + + +@pytest.mark.parametrize("ext", ["htm", "html"]) +def test_reftest(ext): + content = b"<link rel=match href=ref.html>" + + filename = "foo/test." + ext + s = create(filename, content) + + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_multi_global + assert not s.name_is_worker + assert not s.name_is_reference + assert not s.content_is_testharness + + assert s.content_is_ref_node + + assert items(s) == [("reftest", "/" + filename)] + + +@pytest.mark.parametrize("ext", ["xht", "html", "xhtml", "htm", "xml", "svg"]) +def test_css_visual(ext): + content = b""" +<html xmlns="http://www.w3.org/1999/xhtml"> +<head> +<link rel="help" href="http://www.w3.org/TR/CSS21/box.html#bidi-box-model"/> +</head> +<body></body> +</html> +""" + + filename = "html/test." + ext + s = create(filename, content) + + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_multi_global + assert not s.name_is_worker + assert not s.name_is_reference + assert not s.content_is_testharness + assert not s.content_is_ref_node + + assert s.content_is_css_visual + + assert items(s) == [("visual", "/" + filename)] + + +@pytest.mark.parametrize("ext", ["xht", "xhtml", "xml"]) +def test_xhtml_with_entity(ext): + content = b""" +<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" + "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> +<html xmlns="http://www.w3.org/1999/xhtml"> + +</html> +""" + + filename = "html/test." + ext + s = create(filename, content) + + assert s.root is not None + + assert items(s) == [] + + +def test_no_parse(): + s = create("foo/bar.xml", "\uFFFF".encode("utf-8")) + + assert not s.name_is_non_test + assert not s.name_is_manual + assert not s.name_is_visual + assert not s.name_is_multi_global + assert not s.name_is_worker + assert not s.name_is_reference + assert not s.content_is_testharness + assert not s.content_is_ref_node + assert not s.content_is_css_visual + + assert items(s) == [] + + +@pytest.mark.parametrize("input,expected", [ + ("aA", "aA"), + ("a/b", "a/b" if os.name != "nt" else "a\\b"), + ("a\\b", "a\\b") +]) +def test_relpath_normalized(input, expected): + s = create(input, b"") + assert s.rel_path == expected + + +@pytest.mark.parametrize("url", [b"ref.html", + b"\x20ref.html", + b"ref.html\x20", + b"\x09\x0a\x0c\x0d\x20ref.html\x09\x0a\x0c\x0d\x20"]) +def test_reftest_url_whitespace(url): + content = b"<link rel=match href='%s'>" % url + s = create("foo/test.html", content) + assert s.references == [("/foo/ref.html", "==")] + + +@pytest.mark.parametrize("url", [b"http://example.com/", + b"\x20http://example.com/", + b"http://example.com/\x20", + b"\x09\x0a\x0c\x0d\x20http://example.com/\x09\x0a\x0c\x0d\x20"]) +def test_spec_links_whitespace(url): + content = b"<link rel=help href='%s'>" % url + s = create("foo/test.html", content) + assert s.spec_links == {"http://example.com/"} + + +@pytest.mark.parametrize("input,expected", [ + (b"""<link rel="help" title="Intel" href="foo">\n""", ["foo"]), + (b"""<link rel=help title="Intel" href="foo">\n""", ["foo"]), + (b"""<link rel=help href="foo" >\n""", ["foo"]), + (b"""<link rel="author" href="foo">\n""", []), + (b"""<link href="foo">\n""", []), + (b"""<link rel="help" href="foo">\n<link rel="help" href="bar">\n""", ["foo", "bar"]), + (b"""<link rel="help" href="foo">\n<script>\n""", ["foo"]), + (b"""random\n""", []), +]) +def test_spec_links_complex(input, expected): + s = create("foo/test.html", input) + assert s.spec_links == set(expected) + + +def test_url_base(): + contents = b"""// META: global=window,worker +// META: variant=?default +// META: variant=?wss +test()""" + + s = SourceFile("/", "html/test.any.js", "/_fake_base/", contents=contents) + item_type, items = s.manifest_items() + + assert item_type == "testharness" + + assert [item.url for item in items] == ['/_fake_base/html/test.any.html?default', + '/_fake_base/html/test.any.html?wss', + '/_fake_base/html/test.any.serviceworker.html?default', + '/_fake_base/html/test.any.serviceworker.html?wss', + '/_fake_base/html/test.any.sharedworker.html?default', + '/_fake_base/html/test.any.sharedworker.html?wss', + '/_fake_base/html/test.any.worker.html?default', + '/_fake_base/html/test.any.worker.html?wss'] + + assert items[0].url_base == "/_fake_base/" + + +@pytest.mark.parametrize("fuzzy, expected", [ + (b"ref.html:1;200", {("/foo/test.html", "/foo/ref.html", "=="): [[1, 1], [200, 200]]}), + (b"ref.html:0-1;100-200", {("/foo/test.html", "/foo/ref.html", "=="): [[0, 1], [100, 200]]}), + (b"0-1;100-200", {None: [[0,1], [100, 200]]}), + (b"maxDifference=1;totalPixels=200", {None: [[1, 1], [200, 200]]}), + (b"totalPixels=200;maxDifference=1", {None: [[1, 1], [200, 200]]}), + (b"totalPixels=200;1", {None: [[1, 1], [200, 200]]}), + (b"maxDifference=1;200", {None: [[1, 1], [200, 200]]}),]) +def test_reftest_fuzzy(fuzzy, expected): + content = b"""<link rel=match href=ref.html> +<meta name=fuzzy content="%s"> +""" % fuzzy + + s = create("foo/test.html", content) + + assert s.content_is_ref_node + assert s.fuzzy == expected + +@pytest.mark.parametrize("fuzzy, expected", [ + ([b"1;200"], {None: [[1, 1], [200, 200]]}), + ([b"ref-2.html:0-1;100-200"], {("/foo/test.html", "/foo/ref-2.html", "=="): [[0, 1], [100, 200]]}), + ([b"1;200", b"ref-2.html:0-1;100-200"], + {None: [[1, 1], [200, 200]], + ("/foo/test.html", "/foo/ref-2.html", "=="): [[0,1], [100, 200]]})]) +def test_reftest_fuzzy_multi(fuzzy, expected): + content = b"""<link rel=match href=ref-1.html> +<link rel=match href=ref-2.html> +""" + for item in fuzzy: + content += b'\n<meta name=fuzzy content="%s">' % item + + s = create("foo/test.html", content) + + assert s.content_is_ref_node + assert s.fuzzy == expected + +@pytest.mark.parametrize("pac, expected", [ + (b"proxy.pac", "proxy.pac")]) +def test_pac(pac, expected): + content = b""" +<meta name=pac content="%s"> +""" % pac + + s = create("foo/test.html", content) + assert s.pac == expected + +@pytest.mark.parametrize("page_ranges, expected", [ + (b"1-2", [[1, 2]]), + (b"1-1,3-4", [[1, 1], [3, 4]]), + (b"1,3", [[1], [3]]), + (b"2-", [[2, None]]), + (b"-2", [[None, 2]]), + (b"-2,2-", [[None, 2], [2, None]]), + (b"1,6-7,8", [[1], [6, 7], [8]])]) +def test_page_ranges(page_ranges, expected): + content = b"""<link rel=match href=ref.html> +<meta name=reftest-pages content="%s"> +""" % page_ranges + + s = create("foo/test-print.html", content) + + assert s.page_ranges == {"/foo/test-print.html": expected} + + +@pytest.mark.parametrize("page_ranges", [b"a", b"1-a", b"1=2", b"1-2:2-3"]) +def test_page_ranges_invalid(page_ranges): + content = b"""<link rel=match href=ref.html> +<meta name=reftest-pages content="%s"> +""" % page_ranges + + s = create("foo/test-print.html", content) + with pytest.raises(ValueError): + s.page_ranges + + +def test_hash(): + s = SourceFile("/", "foo", "/", contents=b"Hello, World!") + assert "b45ef6fec89518d314f546fd6c3025367b721684" == s.hash diff --git a/testing/web-platform/tests/tools/manifest/tests/test_utils.py b/testing/web-platform/tests/tools/manifest/tests/test_utils.py new file mode 100644 index 0000000000..e8cf1ad689 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/tests/test_utils.py @@ -0,0 +1,15 @@ +# mypy: allow-untyped-defs + +import os +import subprocess +from unittest import mock + +from .. import utils + + +def test_git_for_path_no_git(): + this_dir = os.path.dirname(__file__) + with mock.patch( + "subprocess.check_output", + side_effect=subprocess.CalledProcessError(1, "foo")): + assert utils.git(this_dir) is None diff --git a/testing/web-platform/tests/tools/manifest/typedata.py b/testing/web-platform/tests/tools/manifest/typedata.py new file mode 100644 index 0000000000..746a42c98c --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/typedata.py @@ -0,0 +1,300 @@ +from typing import (Any, Dict, Iterator, List, Optional, MutableMapping, Set, Text, Tuple, + Type, TYPE_CHECKING, Union) + +from .item import ManifestItem + +if TYPE_CHECKING: + # avoid actually importing these, they're only used by type comments + from .manifest import Manifest + +TypeDataType = MutableMapping[Tuple[str, ...], Set[ManifestItem]] +PathHashType = MutableMapping[Tuple[str, ...], str] + + +class TypeData(TypeDataType): + def __init__(self, m: "Manifest", type_cls: Type[ManifestItem]) -> None: + """Dict-like object containing the TestItems for each test type. + + Loading an actual Item class for each test is unnecessarily + slow, so this class allows lazy-loading of the test + items. When the manifest is loaded we store the raw json + corresponding to the test type, and only create an Item + subclass when the test is accessed. In order to remain + API-compatible with consumers that depend on getting an Item + from iteration, we do egerly load all items when iterating + over the class.""" + self._manifest = m + self._type_cls: Type[ManifestItem] = type_cls + self._json_data: Dict[Text, Any] = {} + self._data: Dict[Text, Any] = {} + self._hashes: Dict[Tuple[Text, ...], Text] = {} + self.hashes = PathHash(self) + + def _delete_node(self, data: Dict[Text, Any], key: Tuple[Text, ...]) -> None: + """delete a path from a Dict data with a given key""" + path = [] + node = data + for pathseg in key[:-1]: + path.append((node, pathseg)) + node = node[pathseg] + if not isinstance(node, dict): + raise KeyError(key) + + del node[key[-1]] + while path: + node, pathseg = path.pop() + if len(node[pathseg]) == 0: + del node[pathseg] + else: + break + + def __getitem__(self, key: Tuple[Text, ...]) -> Set[ManifestItem]: + node: Union[Dict[Text, Any], Set[ManifestItem], List[Any]] = self._data + for pathseg in key: + if isinstance(node, dict) and pathseg in node: + node = node[pathseg] + else: + break + else: + if isinstance(node, set): + return node + else: + raise KeyError(key) + + node = self._json_data + found = False + for pathseg in key: + if isinstance(node, dict) and pathseg in node: + node = node[pathseg] + else: + break + else: + found = True + + if not found: + raise KeyError(key) + + if not isinstance(node, list): + raise KeyError(key) + + self._hashes[key] = node[0] + + data = set() + path = "/".join(key) + for test in node[1:]: + manifest_item = self._type_cls.from_json(self._manifest, path, test) + data.add(manifest_item) + + node = self._data + assert isinstance(node, dict) + for pathseg in key[:-1]: + node = node.setdefault(pathseg, {}) + assert isinstance(node, dict) + assert key[-1] not in node + node[key[-1]] = data + + self._delete_node(self._json_data, key) + + return data + + def __setitem__(self, key: Tuple[Text, ...], value: Set[ManifestItem]) -> None: + try: + self._delete_node(self._json_data, key) + except KeyError: + pass + + node = self._data + for i, pathseg in enumerate(key[:-1]): + node = node.setdefault(pathseg, {}) + if not isinstance(node, dict): + raise KeyError(f"{key!r} is a child of a test ({key[:i+1]!r})") + node[key[-1]] = value + + def __delitem__(self, key: Tuple[Text, ...]) -> None: + try: + self._delete_node(self._data, key) + except KeyError: + self._delete_node(self._json_data, key) + else: + try: + del self._hashes[key] + except KeyError: + pass + + def __iter__(self) -> Iterator[Tuple[Text, ...]]: + """Iterator over keys in the TypeData in codepoint order""" + data_node: Optional[Union[Dict[Text, Any], Set[ManifestItem]]] = self._data + json_node: Optional[Union[Dict[Text, Any], List[Any]]] = self._json_data + path: Tuple[Text, ...] = tuple() + stack = [(data_node, json_node, path)] + while stack: + data_node, json_node, path = stack.pop() + if isinstance(data_node, set) or isinstance(json_node, list): + assert data_node is None or json_node is None + yield path + else: + assert data_node is None or isinstance(data_node, dict) + assert json_node is None or isinstance(json_node, dict) + + keys: Set[Text] = set() + if data_node is not None: + keys |= set(iter(data_node)) + if json_node is not None: + keys |= set(iter(json_node)) + + for key in sorted(keys, reverse=True): + stack.append((data_node.get(key) if data_node is not None else None, + json_node.get(key) if json_node is not None else None, + path + (key,))) + + def __len__(self) -> int: + count = 0 + + stack: List[Union[Dict[Text, Any], Set[ManifestItem]]] = [self._data] + while stack: + v = stack.pop() + if isinstance(v, set): + count += 1 + else: + stack.extend(v.values()) + + json_stack: List[Union[Dict[Text, Any], List[Any]]] = [self._json_data] + while json_stack: + json_v = json_stack.pop() + if isinstance(json_v, list): + count += 1 + else: + json_stack.extend(json_v.values()) + + return count + + def __nonzero__(self) -> bool: + return bool(self._data) or bool(self._json_data) + + __bool__ = __nonzero__ + + def __contains__(self, key: Any) -> bool: + # we provide our own impl of this to avoid calling __getitem__ and generating items for + # those in self._json_data + node = self._data + for pathseg in key: + if pathseg in node: + node = node[pathseg] + else: + break + else: + return bool(isinstance(node, set)) + + node = self._json_data + for pathseg in key: + if pathseg in node: + node = node[pathseg] + else: + break + else: + return bool(isinstance(node, list)) + + return False + + def clear(self) -> None: + # much, much simpler/quicker than that defined in MutableMapping + self._json_data.clear() + self._data.clear() + self._hashes.clear() + + def set_json(self, json_data: Dict[Text, Any]) -> None: + """Provide the object with a raw JSON blob + + Note that this object graph is assumed to be owned by the TypeData + object after the call, so the caller must not mutate any part of the + graph. + """ + if self._json_data: + raise ValueError("set_json call when JSON data is not empty") + + self._json_data = json_data + + def to_json(self) -> Dict[Text, Any]: + """Convert the current data to JSON + + Note that the returned object may contain references to the internal + data structures, and is only guaranteed to be valid until the next + __getitem__, __setitem__, __delitem__ call, so the caller must not + mutate any part of the returned object graph. + + """ + json_rv = self._json_data.copy() + + def safe_sorter(element: Tuple[str,str]) -> Tuple[str,str]: + """key function to sort lists with None values.""" + if element and not element[0]: + return ("", element[1]) + else: + return element + + stack: List[Tuple[Dict[Text, Any], Dict[Text, Any], Tuple[Text, ...]]] = [(self._data, json_rv, tuple())] + while stack: + data_node, json_node, par_full_key = stack.pop() + for k, v in data_node.items(): + full_key = par_full_key + (k,) + if isinstance(v, set): + assert k not in json_node + json_node[k] = [self._hashes.get( + full_key)] + [t for t in sorted((test.to_json() for test in v), key=safe_sorter)] + else: + json_node[k] = json_node.get(k, {}).copy() + stack.append((v, json_node[k], full_key)) + + return json_rv + + +class PathHash(PathHashType): + def __init__(self, data: TypeData) -> None: + self._data = data + + def __getitem__(self, k: Tuple[Text, ...]) -> Text: + if k not in self._data: + raise KeyError + + if k in self._data._hashes: + return self._data._hashes[k] + + node = self._data._json_data + for pathseg in k: + if pathseg in node: + node = node[pathseg] + else: + break + else: + return node[0] # type: ignore + + assert False, "unreachable" + raise KeyError + + def __setitem__(self, k: Tuple[Text, ...], v: Text) -> None: + if k not in self._data: + raise KeyError + + if k in self._data._hashes: + self._data._hashes[k] = v + + node = self._data._json_data + for pathseg in k: + if pathseg in node: + node = node[pathseg] + else: + break + else: + node[0] = v # type: ignore + return + + self._data._hashes[k] = v + + def __delitem__(self, k: Tuple[Text, ...]) -> None: + raise ValueError("keys here must match underlying data") + + def __iter__(self) -> Iterator[Tuple[Text, ...]]: + return iter(self._data) + + def __len__(self) -> int: + return len(self._data) diff --git a/testing/web-platform/tests/tools/manifest/update.py b/testing/web-platform/tests/tools/manifest/update.py new file mode 100755 index 0000000000..fef0b96b86 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/update.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 +import argparse +import os +from typing import Any, Optional, TYPE_CHECKING + +from . import manifest +from . import vcs +from .log import get_logger, enable_debug_logging +from .download import download_from_github +if TYPE_CHECKING: + from .manifest import Manifest # avoid cyclic import + + +here = os.path.dirname(__file__) + +wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir)) + +logger = get_logger() + + +def update(tests_root: str, + manifest: "Manifest", + manifest_path: Optional[str] = None, + working_copy: bool = True, + cache_root: Optional[str] = None, + rebuild: bool = False, + parallel: bool = True + ) -> bool: + logger.warning("Deprecated; use manifest.load_and_update instead") + logger.info("Updating manifest") + + tree = vcs.get_tree(tests_root, manifest, manifest_path, cache_root, + working_copy, rebuild) + return manifest.update(tree, parallel) + + +def update_from_cli(**kwargs: Any) -> None: + tests_root = kwargs["tests_root"] + path = kwargs["path"] + assert tests_root is not None + + if not kwargs["rebuild"] and kwargs["download"]: + download_from_github(path, tests_root) + + manifest.load_and_update(tests_root, + path, + kwargs["url_base"], + update=True, + rebuild=kwargs["rebuild"], + cache_root=kwargs["cache_root"], + parallel=kwargs["parallel"]) + + +def abs_path(path: str) -> str: + return os.path.abspath(os.path.expanduser(path)) + + +def create_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser() + parser.add_argument( + "-v", "--verbose", dest="verbose", action="store_true", default=False, + help="Turn on verbose logging") + parser.add_argument( + "-p", "--path", type=abs_path, help="Path to manifest file.") + parser.add_argument( + "--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.") + parser.add_argument( + "-r", "--rebuild", action="store_true", default=False, + help="Force a full rebuild of the manifest.") + parser.add_argument( + "--url-base", action="store", default="/", + help="Base url to use as the mount point for tests in this manifest.") + parser.add_argument( + "--no-download", dest="download", action="store_false", default=True, + help="Never attempt to download the manifest.") + parser.add_argument( + "--cache-root", action="store", default=os.path.join(wpt_root, ".wptcache"), + help="Path in which to store any caches (default <tests_root>/.wptcache/)") + parser.add_argument( + "--no-parallel", dest="parallel", action="store_false", default=True, + help="Do not parallelize building the manifest") + return parser + + +def run(*args: Any, **kwargs: Any) -> None: + if kwargs["path"] is None: + kwargs["path"] = os.path.join(kwargs["tests_root"], "MANIFEST.json") + if kwargs["verbose"]: + enable_debug_logging() + update_from_cli(**kwargs) + + +def main() -> None: + opts = create_parser().parse_args() + + run(**vars(opts)) diff --git a/testing/web-platform/tests/tools/manifest/utils.py b/testing/web-platform/tests/tools/manifest/utils.py new file mode 100644 index 0000000000..7ccd3afb71 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/utils.py @@ -0,0 +1,72 @@ +import os +import subprocess +import sys +from typing import Any, Callable, Generic, Optional, Text, TypeVar +T = TypeVar("T") + + +def rel_path_to_url(rel_path: Text, url_base: Text = "/") -> Text: + assert not os.path.isabs(rel_path), rel_path + if url_base[0] != "/": + url_base = "/" + url_base + if url_base[-1] != "/": + url_base += "/" + return url_base + rel_path.replace(os.sep, "/") + + +def from_os_path(path: Text) -> Text: + assert os.path.sep == "/" or sys.platform == "win32" + if "/" == os.path.sep: + rv = path + else: + rv = path.replace(os.path.sep, "/") + if "\\" in rv: + raise ValueError("path contains \\ when separator is %s" % os.path.sep) + return rv + + +def to_os_path(path: Text) -> Text: + assert os.path.sep == "/" or sys.platform == "win32" + if "\\" in path: + raise ValueError("normalised path contains \\") + if "/" == os.path.sep: + return path + return path.replace("/", os.path.sep) + + +def git(path: Text) -> Optional[Callable[..., Text]]: + def gitfunc(cmd: Text, *args: Text) -> Text: + full_cmd = ["git", cmd] + list(args) + try: + return subprocess.check_output(full_cmd, cwd=path, stderr=subprocess.STDOUT).decode('utf8') + except Exception as e: + if sys.platform == "win32" and isinstance(e, WindowsError): + full_cmd[0] = "git.bat" + return subprocess.check_output(full_cmd, cwd=path, stderr=subprocess.STDOUT).decode('utf8') + else: + raise + + try: + # this needs to be a command that fails if we aren't in a git repo + gitfunc("rev-parse", "--show-toplevel") + except (subprocess.CalledProcessError, OSError): + return None + else: + return gitfunc + + +class cached_property(Generic[T]): + def __init__(self, func: Callable[[Any], T]) -> None: + self.func = func + self.__doc__ = getattr(func, "__doc__") + self.name = func.__name__ + + def __get__(self, obj: Any, cls: Optional[type] = None) -> T: + if obj is None: + return self # type: ignore + + # we can unconditionally assign as next time this won't be called + assert self.name not in obj.__dict__ + rv = obj.__dict__[self.name] = self.func(obj) + obj.__dict__.setdefault("__cached_properties__", set()).add(self.name) + return rv diff --git a/testing/web-platform/tests/tools/manifest/vcs.py b/testing/web-platform/tests/tools/manifest/vcs.py new file mode 100644 index 0000000000..7b6b73d877 --- /dev/null +++ b/testing/web-platform/tests/tools/manifest/vcs.py @@ -0,0 +1,305 @@ +import abc +import os +import stat +from collections import deque +from os import stat_result +from typing import (Any, Dict, Iterable, Iterator, List, MutableMapping, Optional, Set, Text, Tuple, + TYPE_CHECKING) + +from . import jsonlib +from .utils import git + +# Cannot do `from ..gitignore import gitignore` because +# relative import beyond toplevel throws *ImportError*! +from gitignore import gitignore # type: ignore + + +if TYPE_CHECKING: + from .manifest import Manifest # avoid cyclic import + +GitIgnoreCacheType = MutableMapping[bytes, bool] + + +def get_tree(tests_root: Text, + manifest: "Manifest", + manifest_path: Optional[Text], + cache_root: Optional[Text], + working_copy: bool = True, + rebuild: bool = False) -> "FileSystem": + tree = None + if cache_root is None: + cache_root = os.path.join(tests_root, ".wptcache") + if not os.path.exists(cache_root): + try: + os.makedirs(cache_root) + except OSError: + cache_root = None + + if not working_copy: + raise ValueError("working_copy=False unsupported") + + if tree is None: + tree = FileSystem(tests_root, + manifest.url_base, + manifest_path=manifest_path, + cache_path=cache_root, + rebuild=rebuild) + return tree + + +class GitHasher: + def __init__(self, path: Text) -> None: + self.git = git(path) + + def _local_changes(self) -> Set[Text]: + """get a set of files which have changed between HEAD and working copy""" + assert self.git is not None + # note that git runs the command with tests_root as the cwd, which may + # not be the root of the git repo (e.g., within a browser repo) + # + # `git diff-index --relative` without a path still compares all tracked + # files before non-WPT files are filtered out, which can be slow in + # vendor repos. Explicitly pass the CWD (i.e., `tests_root`) as a path + # argument to avoid unnecessary diffing. + cmd = ["diff-index", "--relative", "--no-renames", "--name-only", "-z", "HEAD", os.curdir] + data = self.git(*cmd) + return set(data.split("\0")) + + def hash_cache(self) -> Dict[Text, Optional[Text]]: + """ + A dict of rel_path -> current git object id if the working tree matches HEAD else None + """ + hash_cache: Dict[Text, Optional[Text]] = {} + + if self.git is None: + return hash_cache + + # note that git runs the command with tests_root as the cwd, which may + # not be the root of the git repo (e.g., within a browser repo) + cmd = ["ls-tree", "-r", "-z", "HEAD"] + local_changes = self._local_changes() + for result in self.git(*cmd).split("\0")[:-1]: # type: Text + data, rel_path = result.rsplit("\t", 1) + hash_cache[rel_path] = None if rel_path in local_changes else data.split(" ", 3)[2] + + return hash_cache + + + +class FileSystem: + def __init__(self, + tests_root: Text, + url_base: Text, + cache_path: Optional[Text], + manifest_path: Optional[Text] = None, + rebuild: bool = False) -> None: + self.tests_root = tests_root + self.url_base = url_base + self.ignore_cache = None + self.mtime_cache = None + tests_root_bytes = tests_root.encode("utf8") + if cache_path is not None: + if manifest_path is not None: + self.mtime_cache = MtimeCache(cache_path, tests_root, manifest_path, rebuild) + if gitignore.has_ignore(tests_root_bytes): + self.ignore_cache = GitIgnoreCache(cache_path, tests_root, rebuild) + self.path_filter = gitignore.PathFilter(tests_root_bytes, + extras=[b".git/"], + cache=self.ignore_cache) + git = GitHasher(tests_root) + self.hash_cache = git.hash_cache() + + def __iter__(self) -> Iterator[Tuple[Text, Optional[Text], bool]]: + mtime_cache = self.mtime_cache + for dirpath, dirnames, filenames in self.path_filter( + walk(self.tests_root.encode("utf8"))): + for filename, path_stat in filenames: + path = os.path.join(dirpath, filename).decode("utf8") + if mtime_cache is None or mtime_cache.updated(path, path_stat): + file_hash = self.hash_cache.get(path, None) + yield path, file_hash, True + else: + yield path, None, False + + def dump_caches(self) -> None: + for cache in [self.mtime_cache, self.ignore_cache]: + if cache is not None: + cache.dump() + + +class CacheFile(metaclass=abc.ABCMeta): + def __init__(self, cache_root: Text, tests_root: Text, rebuild: bool = False) -> None: + self.tests_root = tests_root + if not os.path.exists(cache_root): + os.makedirs(cache_root) + self.path = os.path.join(cache_root, self.file_name) + self.modified = False + self.data = self.load(rebuild) + + @abc.abstractproperty + def file_name(self) -> Text: + pass + + def dump(self) -> None: + if not self.modified: + return + with open(self.path, 'w') as f: + jsonlib.dump_local(self.data, f) + + def load(self, rebuild: bool = False) -> Dict[Text, Any]: + data: Dict[Text, Any] = {} + try: + if not rebuild: + with open(self.path) as f: + try: + data = jsonlib.load(f) + except ValueError: + pass + data = self.check_valid(data) + except OSError: + pass + return data + + def check_valid(self, data: Dict[Text, Any]) -> Dict[Text, Any]: + """Check if the cached data is valid and return an updated copy of the + cache containing only data that can be used.""" + return data + + +class MtimeCache(CacheFile): + file_name = "mtime.json" + + def __init__(self, cache_root: Text, tests_root: Text, manifest_path: Text, rebuild: bool = False) -> None: + self.manifest_path = manifest_path + super().__init__(cache_root, tests_root, rebuild) + + def updated(self, rel_path: Text, stat: stat_result) -> bool: + """Return a boolean indicating whether the file changed since the cache was last updated. + + This implicitly updates the cache with the new mtime data.""" + mtime = stat.st_mtime + if mtime != self.data.get(rel_path): + self.modified = True + self.data[rel_path] = mtime + return True + return False + + def check_valid(self, data: Dict[Any, Any]) -> Dict[Any, Any]: + if data.get("/tests_root") != self.tests_root: + self.modified = True + else: + if self.manifest_path is not None and os.path.exists(self.manifest_path): + mtime = os.path.getmtime(self.manifest_path) + if data.get("/manifest_path") != [self.manifest_path, mtime]: + self.modified = True + else: + self.modified = True + if self.modified: + data = {} + data["/tests_root"] = self.tests_root + return data + + def dump(self) -> None: + if self.manifest_path is None: + raise ValueError + if not os.path.exists(self.manifest_path): + return + mtime = os.path.getmtime(self.manifest_path) + self.data["/manifest_path"] = [self.manifest_path, mtime] + self.data["/tests_root"] = self.tests_root + super().dump() + + +class GitIgnoreCache(CacheFile, GitIgnoreCacheType): + file_name = "gitignore2.json" + + def check_valid(self, data: Dict[Any, Any]) -> Dict[Any, Any]: + ignore_path = os.path.join(self.tests_root, ".gitignore") + mtime = os.path.getmtime(ignore_path) + if data.get("/gitignore_file") != [ignore_path, mtime]: + self.modified = True + data = {} + data["/gitignore_file"] = [ignore_path, mtime] + return data + + def __contains__(self, key: Any) -> bool: + try: + key = key.decode("utf-8") + except Exception: + return False + + return key in self.data + + def __getitem__(self, key: bytes) -> bool: + real_key = key.decode("utf-8") + v = self.data[real_key] + assert isinstance(v, bool) + return v + + def __setitem__(self, key: bytes, value: bool) -> None: + real_key = key.decode("utf-8") + if self.data.get(real_key) != value: + self.modified = True + self.data[real_key] = value + + def __delitem__(self, key: bytes) -> None: + real_key = key.decode("utf-8") + del self.data[real_key] + + def __iter__(self) -> Iterator[bytes]: + return (key.encode("utf-8") for key in self.data) + + def __len__(self) -> int: + return len(self.data) + + +def walk(root: bytes) -> Iterable[Tuple[bytes, List[Tuple[bytes, stat_result]], List[Tuple[bytes, stat_result]]]]: + """Re-implementation of os.walk. Returns an iterator over + (dirpath, dirnames, filenames), with some semantic differences + to os.walk. + + This has a similar interface to os.walk, with the important difference + that instead of lists of filenames and directory names, it yields + lists of tuples of the form [(name, stat)] where stat is the result of + os.stat for the file. That allows reusing the same stat data in the + caller. It also always returns the dirpath relative to the root, with + the root iself being returned as the empty string. + + Unlike os.walk the implementation is not recursive.""" + + get_stat = os.stat + is_dir = stat.S_ISDIR + is_link = stat.S_ISLNK + join = os.path.join + listdir = os.listdir + relpath = os.path.relpath + + root = os.path.abspath(root) + stack = deque([(root, b"")]) + + while stack: + dir_path, rel_path = stack.popleft() + try: + # Note that listdir and error are globals in this module due + # to earlier import-*. + names = listdir(dir_path) + except OSError: + continue + + dirs, non_dirs = [], [] + for name in names: + path = join(dir_path, name) + try: + path_stat = get_stat(path) + except OSError: + continue + if is_dir(path_stat.st_mode): + dirs.append((name, path_stat)) + else: + non_dirs.append((name, path_stat)) + + yield rel_path, dirs, non_dirs + for name, path_stat in dirs: + new_path = join(dir_path, name) + if not is_link(path_stat.st_mode): + stack.append((new_path, relpath(new_path, root))) |