diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-06-12 05:35:29 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-06-12 05:35:29 +0000 |
commit | 59203c63bb777a3bacec32fb8830fba33540e809 (patch) | |
tree | 58298e711c0ff0575818c30485b44a2f21bf28a0 /devtools/client/shared/output-parser.js | |
parent | Adding upstream version 126.0.1. (diff) | |
download | firefox-59203c63bb777a3bacec32fb8830fba33540e809.tar.xz firefox-59203c63bb777a3bacec32fb8830fba33540e809.zip |
Adding upstream version 127.0.upstream/127.0
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'devtools/client/shared/output-parser.js')
-rw-r--r-- | devtools/client/shared/output-parser.js | 316 |
1 files changed, 180 insertions, 136 deletions
diff --git a/devtools/client/shared/output-parser.js b/devtools/client/shared/output-parser.js index bd514096b3..5202ae3a8e 100644 --- a/devtools/client/shared/output-parser.js +++ b/devtools/client/shared/output-parser.js @@ -8,7 +8,9 @@ const { angleUtils, } = require("resource://devtools/client/shared/css-angle.js"); const { colorUtils } = require("resource://devtools/shared/css/color.js"); -const { getCSSLexer } = require("resource://devtools/shared/css/lexer.js"); +const { + InspectorCSSParserWrapper, +} = require("resource://devtools/shared/css/lexer.js"); const { appendText, } = require("resource://devtools/client/inspector/shared/utils.js"); @@ -67,6 +69,13 @@ const BACKDROP_FILTER_ENABLED = Services.prefs.getBoolPref( ); const HTML_NS = "http://www.w3.org/1999/xhtml"; +// This regexp matches a URL token. It puts the "url(", any +// leading whitespace, and any opening quote into |leader|; the +// URL text itself into |body|, and any trailing quote, trailing +// whitespace, and the ")" into |trailer|. +const URL_REGEX = + /^(?<leader>url\([ \t\r\n\f]*(["']?))(?<body>.*?)(?<trailer>\2[ \t\r\n\f]*\))$/i; + // Very long text properties should be truncated using CSS to avoid creating // extremely tall propertyvalue containers. 5000 characters is an arbitrary // limit. Assuming an average ruleview can hold 50 characters per line, this @@ -175,7 +184,7 @@ class OutputParser { * @param {Boolean} stopAtComma * If true, stop at a comma. * @return {Object} - * An object of the form {tokens, functionData, sawComma, sawVariable}. + * An object of the form {tokens, functionData, sawComma, sawVariable, depth}. * |tokens| is a list of the non-comment, non-whitespace tokens * that were seen. The stopping token (paren or comma) will not * be included. @@ -184,6 +193,7 @@ class OutputParser { * not be included. * |sawComma| is true if the stop was due to a comma, or false otherwise. * |sawVariable| is true if a variable was seen while parsing the text. + * |depth| is the number of unclosed parenthesis remaining when we return. */ #parseMatchingParens(text, tokenStream, options, stopAtComma) { let depth = 1; @@ -196,24 +206,22 @@ class OutputParser { if (!token) { break; } - if (token.tokenType === "comment") { + if (token.tokenType === "Comment") { continue; } - if (token.tokenType === "symbol") { - if (stopAtComma && depth === 1 && token.text === ",") { - return { tokens, functionData, sawComma: true, sawVariable }; - } else if (token.text === "(") { - ++depth; - } else if (token.text === ")") { - --depth; - if (depth === 0) { - break; - } + if (stopAtComma && depth === 1 && token.tokenType === "Comma") { + return { tokens, functionData, sawComma: true, sawVariable, depth }; + } else if (token.tokenType === "ParenthesisBlock") { + ++depth; + } else if (token.tokenType === "CloseParenthesis") { + --depth; + if (depth === 0) { + break; } } else if ( - token.tokenType === "function" && - token.text === "var" && + token.tokenType === "Function" && + token.value === "var" && options.getVariableValue ) { sawVariable = true; @@ -224,24 +232,24 @@ class OutputParser { options ); functionData.push({ node, value, fallbackValue }); - } else if (token.tokenType === "function") { + } else if (token.tokenType === "Function") { ++depth; } if ( - token.tokenType !== "function" || - token.text !== "var" || + token.tokenType !== "Function" || + token.value !== "var" || !options.getVariableValue ) { functionData.push(text.substring(token.startOffset, token.endOffset)); } - if (token.tokenType !== "whitespace") { + if (token.tokenType !== "WhiteSpace") { tokens.push(token); } } - return { tokens, functionData, sawComma: false, sawVariable }; + return { tokens, functionData, sawComma: false, sawVariable, depth }; } /** @@ -389,19 +397,23 @@ class OutputParser { } const lowerCaseTokenText = token.text?.toLowerCase(); - if (token.tokenType === "comment") { + if (token.tokenType === "Comment") { // This doesn't change spaceNeeded, because we didn't emit // anything to the output. continue; } switch (token.tokenType) { - case "function": { - const isColorTakingFunction = - COLOR_TAKING_FUNCTIONS.includes(lowerCaseTokenText); + case "Function": { + const functionName = token.value; + const lowerCaseFunctionName = functionName.toLowerCase(); + + const isColorTakingFunction = COLOR_TAKING_FUNCTIONS.includes( + lowerCaseFunctionName + ); if ( isColorTakingFunction || - ANGLE_TAKING_FUNCTIONS.includes(lowerCaseTokenText) + ANGLE_TAKING_FUNCTIONS.includes(lowerCaseFunctionName) ) { // The function can accept a color or an angle argument, and we know // it isn't special in some other way. So, we let it @@ -414,10 +426,13 @@ class OutputParser { outerMostFunctionTakesColor = isColorTakingFunction; } if (isColorTakingFunction) { - colorFunctions.push({ parenDepth, functionName: token.text }); + colorFunctions.push({ parenDepth, functionName }); } ++parenDepth; - } else if (lowerCaseTokenText === "var" && options.getVariableValue) { + } else if ( + lowerCaseFunctionName === "var" && + options.getVariableValue + ) { const { node: variableNode, value } = this.#parseVariable( token, text, @@ -434,20 +449,17 @@ class OutputParser { this.#parsed.push(variableNode); } } else { - const { functionData, sawVariable } = this.#parseMatchingParens( - text, - tokenStream, - options - ); - - const functionName = text.substring( - token.startOffset, - token.endOffset - ); + const { + functionData, + sawVariable, + tokens: functionArgTokens, + depth, + } = this.#parseMatchingParens(text, tokenStream, options); if (sawVariable) { const computedFunctionText = functionName + + "(" + functionData .map(data => { if (typeof data === "string") { @@ -466,6 +478,7 @@ class OutputParser { colorFunction: colorFunctions.at(-1)?.functionName, valueParts: [ functionName, + "(", ...functionData.map(data => data.node || data), ")", ], @@ -473,7 +486,7 @@ class OutputParser { } else { // If function contains variable, we need to add both strings // and nodes. - this.#appendTextNode(functionName); + this.#appendTextNode(functionName + "("); for (const data of functionData) { if (typeof data === "string") { this.#appendTextNode(data); @@ -486,16 +499,40 @@ class OutputParser { } else { // If no variable in function, join the text together and add // to DOM accordingly. - const functionText = functionName + functionData.join("") + ")"; + const functionText = + functionName + + "(" + + functionData.join("") + + // only append closing parenthesis if the authored text actually had it + // In such case, we should probably indicate that there's a "syntax error" + // See Bug 1891461. + (depth == 0 ? ")" : ""); + + if (lowerCaseFunctionName === "url" && options.urlClass) { + // url() with quoted strings are not mapped as UnquotedUrl, + // instead, we get a "Function" token with "url" function name, + // and later, a "QuotedString" token, which contains the actual URL. + let url; + for (const argToken of functionArgTokens) { + if (argToken.tokenType === "QuotedString") { + url = argToken.value; + break; + } + } - if ( + if (url !== undefined) { + this.#appendURL(functionText, url, options); + } else { + this.#appendTextNode(functionText); + } + } else if ( options.expectCubicBezier && - lowerCaseTokenText === "cubic-bezier" + lowerCaseFunctionName === "cubic-bezier" ) { this.#appendCubicBezier(functionText, options); } else if ( options.expectLinearEasing && - lowerCaseTokenText === "linear" + lowerCaseFunctionName === "linear" ) { this.#appendLinear(functionText, options); } else if ( @@ -508,7 +545,7 @@ class OutputParser { }); } else if ( options.expectShape && - BASIC_SHAPE_FUNCTIONS.includes(lowerCaseTokenText) + BASIC_SHAPE_FUNCTIONS.includes(lowerCaseFunctionName) ) { this.#appendShape(functionText, options); } else { @@ -519,7 +556,7 @@ class OutputParser { break; } - case "ident": + case "Ident": if ( options.expectCubicBezier && BEZIER_KEYWORDS.includes(lowerCaseTokenText) @@ -553,8 +590,8 @@ class OutputParser { } break; - case "id": - case "hash": { + case "IDHash": + case "Hash": { const original = text.substring(token.startOffset, token.endOffset); if (colorOK() && InspectorUtils.isValidCSSColor(original)) { if (spaceNeeded) { @@ -571,7 +608,7 @@ class OutputParser { } break; } - case "dimension": + case "Dimension": const value = text.substring(token.startOffset, token.endOffset); if (angleOK(value)) { this.#appendAngle(value, options); @@ -579,16 +616,16 @@ class OutputParser { this.#appendTextNode(value); } break; - case "url": - case "bad_url": + case "UnquotedUrl": + case "BadUrl": this.#appendURL( text.substring(token.startOffset, token.endOffset), - token.text, + token.value, options ); break; - case "string": + case "QuotedString": if (options.expectFont) { fontFamilyNameParts.push( text.substring(token.startOffset, token.endOffset) @@ -600,7 +637,7 @@ class OutputParser { } break; - case "whitespace": + case "WhiteSpace": if (options.expectFont) { fontFamilyNameParts.push(" "); } else { @@ -610,32 +647,44 @@ class OutputParser { } break; - case "symbol": - if (token.text === "(") { - ++parenDepth; - } else if (token.text === ")") { - --parenDepth; + case "ParenthesisBlock": + ++parenDepth; + this.#appendTextNode( + text.substring(token.startOffset, token.endOffset) + ); + break; - if (colorFunctions.at(-1)?.parenDepth == parenDepth) { - colorFunctions.pop(); - } + case "CloseParenthesis": + --parenDepth; - if (stopAtCloseParen && parenDepth === 0) { - done = true; - break; - } + if (colorFunctions.at(-1)?.parenDepth == parenDepth) { + colorFunctions.pop(); + } - if (parenDepth === 0) { - outerMostFunctionTakesColor = false; - } - } else if ( - (token.text === "," || token.text === "!") && + if (stopAtCloseParen && parenDepth === 0) { + done = true; + break; + } + + if (parenDepth === 0) { + outerMostFunctionTakesColor = false; + } + this.#appendTextNode( + text.substring(token.startOffset, token.endOffset) + ); + break; + + case "Comma": + case "Delim": + if ( + (token.tokenType === "Comma" || token.text === "!") && options.expectFont && fontFamilyNameParts.length !== 0 ) { this.#appendFontFamily(fontFamilyNameParts.join(""), options); fontFamilyNameParts = []; } + // falls through default: this.#appendTextNode( @@ -647,15 +696,15 @@ class OutputParser { // If this token might possibly introduce token pasting when // color-cycling, require a space. spaceNeeded = - token.tokenType === "ident" || - token.tokenType === "at" || - token.tokenType === "id" || - token.tokenType === "hash" || - token.tokenType === "number" || - token.tokenType === "dimension" || - token.tokenType === "percentage" || - token.tokenType === "dimension"; - previousWasBang = token.tokenType === "symbol" && token.text === "!"; + token.tokenType === "Ident" || + token.tokenType === "AtKeyword" || + token.tokenType === "IDHash" || + token.tokenType === "Hash" || + token.tokenType === "Number" || + token.tokenType === "Dimension" || + token.tokenType === "Percentage" || + token.tokenType === "Dimension"; + previousWasBang = token.tokenType === "Delim" && token.text === "!"; } if (options.expectFont && fontFamilyNameParts.length !== 0) { @@ -686,7 +735,7 @@ class OutputParser { text = text.trim(); this.#parsed.length = 0; - const tokenStream = getCSSLexer(text); + const tokenStream = new InspectorCSSParserWrapper(text); return this.#doParse(text, options, tokenStream, false); } @@ -884,7 +933,7 @@ class OutputParser { */ // eslint-disable-next-line complexity #addPolygonPointNodes(coords, container) { - const tokenStream = getCSSLexer(coords); + const tokenStream = new InspectorCSSParserWrapper(coords); let token = tokenStream.nextToken(); let coord = ""; let i = 0; @@ -897,7 +946,7 @@ class OutputParser { }); while (token) { - if (token.tokenType === "symbol" && token.text === ",") { + if (token.tokenType === "Comma") { // Comma separating coordinate pairs; add coordNode to container and reset vars if (!isXCoord) { // Y coord not added to coordNode yet @@ -933,19 +982,19 @@ class OutputParser { class: "ruleview-shape-point", "data-point": `${i}`, }); - } else if (token.tokenType === "symbol" && token.text === "(") { + } else if (token.tokenType === "ParenthesisBlock") { depth++; coord += coords.substring(token.startOffset, token.endOffset); - } else if (token.tokenType === "symbol" && token.text === ")") { + } else if (token.tokenType === "CloseParenthesis") { depth--; coord += coords.substring(token.startOffset, token.endOffset); - } else if (token.tokenType === "whitespace" && coord === "") { + } else if (token.tokenType === "WhiteSpace" && coord === "") { // Whitespace at beginning of coord; add to container appendText( container, coords.substring(token.startOffset, token.endOffset) ); - } else if (token.tokenType === "whitespace" && depth === 0) { + } else if (token.tokenType === "WhiteSpace" && depth === 0) { // Whitespace signifying end of coord const node = this.#createNode( "span", @@ -964,10 +1013,10 @@ class OutputParser { coord = ""; isXCoord = !isXCoord; } else if ( - token.tokenType === "number" || - token.tokenType === "dimension" || - token.tokenType === "percentage" || - token.tokenType === "function" + token.tokenType === "Number" || + token.tokenType === "Dimension" || + token.tokenType === "Percentage" || + token.tokenType === "Function" ) { if (isXCoord && coord && depth === 0) { // Whitespace is not necessary between x/y coords. @@ -986,11 +1035,11 @@ class OutputParser { } coord += coords.substring(token.startOffset, token.endOffset); - if (token.tokenType === "function") { + if (token.tokenType === "Function") { depth++; } } else if ( - token.tokenType === "ident" && + token.tokenType === "Ident" && (token.text === "nonzero" || token.text === "evenodd") ) { // A fill-rule (nonzero or evenodd). @@ -1034,7 +1083,7 @@ class OutputParser { */ // eslint-disable-next-line complexity #addCirclePointNodes(coords, container) { - const tokenStream = getCSSLexer(coords); + const tokenStream = new InspectorCSSParserWrapper(coords); let token = tokenStream.nextToken(); let depth = 0; let coord = ""; @@ -1044,20 +1093,20 @@ class OutputParser { "data-point": "center", }); while (token) { - if (token.tokenType === "symbol" && token.text === "(") { + if (token.tokenType === "ParenthesisBlock") { depth++; coord += coords.substring(token.startOffset, token.endOffset); - } else if (token.tokenType === "symbol" && token.text === ")") { + } else if (token.tokenType === "CloseParenthesis") { depth--; coord += coords.substring(token.startOffset, token.endOffset); - } else if (token.tokenType === "whitespace" && coord === "") { + } else if (token.tokenType === "WhiteSpace" && coord === "") { // Whitespace at beginning of coord; add to container appendText( container, coords.substring(token.startOffset, token.endOffset) ); } else if ( - token.tokenType === "whitespace" && + token.tokenType === "WhiteSpace" && point === "radius" && depth === 0 ) { @@ -1078,7 +1127,7 @@ class OutputParser { point = "cx"; coord = ""; depth = 0; - } else if (token.tokenType === "whitespace" && depth === 0) { + } else if (token.tokenType === "WhiteSpace" && depth === 0) { // Whitespace signifying end of cx/cy const node = this.#createNode( "span", @@ -1097,7 +1146,7 @@ class OutputParser { point = point === "cx" ? "cy" : "cx"; coord = ""; depth = 0; - } else if (token.tokenType === "ident" && token.text === "at") { + } else if (token.tokenType === "Ident" && token.text === "at") { // "at"; Add radius to container if not already done so if (point === "radius" && coord) { const node = this.#createNode( @@ -1118,10 +1167,10 @@ class OutputParser { coord = ""; depth = 0; } else if ( - token.tokenType === "number" || - token.tokenType === "dimension" || - token.tokenType === "percentage" || - token.tokenType === "function" + token.tokenType === "Number" || + token.tokenType === "Dimension" || + token.tokenType === "Percentage" || + token.tokenType === "Function" ) { if (point === "cx" && coord && depth === 0) { // Center coords don't require whitespace between x/y. So if current point is @@ -1142,7 +1191,7 @@ class OutputParser { } coord += coords.substring(token.startOffset, token.endOffset); - if (token.tokenType === "function") { + if (token.tokenType === "Function") { depth++; } } else { @@ -1195,7 +1244,7 @@ class OutputParser { */ // eslint-disable-next-line complexity #addEllipsePointNodes(coords, container) { - const tokenStream = getCSSLexer(coords); + const tokenStream = new InspectorCSSParserWrapper(coords); let token = tokenStream.nextToken(); let depth = 0; let coord = ""; @@ -1205,19 +1254,19 @@ class OutputParser { "data-point": "center", }); while (token) { - if (token.tokenType === "symbol" && token.text === "(") { + if (token.tokenType === "ParenthesisBlock") { depth++; coord += coords.substring(token.startOffset, token.endOffset); - } else if (token.tokenType === "symbol" && token.text === ")") { + } else if (token.tokenType === "CloseParenthesis") { depth--; coord += coords.substring(token.startOffset, token.endOffset); - } else if (token.tokenType === "whitespace" && coord === "") { + } else if (token.tokenType === "WhiteSpace" && coord === "") { // Whitespace at beginning of coord; add to container appendText( container, coords.substring(token.startOffset, token.endOffset) ); - } else if (token.tokenType === "whitespace" && depth === 0) { + } else if (token.tokenType === "WhiteSpace" && depth === 0) { if (point === "rx" || point === "ry") { // Whitespace signifying end of rx/ry const node = this.#createNode( @@ -1256,7 +1305,7 @@ class OutputParser { coord = ""; depth = 0; } - } else if (token.tokenType === "ident" && token.text === "at") { + } else if (token.tokenType === "Ident" && token.text === "at") { // "at"; Add radius to container if not already done so if (point === "ry" && coord) { const node = this.#createNode( @@ -1277,10 +1326,10 @@ class OutputParser { coord = ""; depth = 0; } else if ( - token.tokenType === "number" || - token.tokenType === "dimension" || - token.tokenType === "percentage" || - token.tokenType === "function" + token.tokenType === "Number" || + token.tokenType === "Dimension" || + token.tokenType === "Percentage" || + token.tokenType === "Function" ) { if (point === "rx" && coord && depth === 0) { // Radius coords don't require whitespace between x/y. @@ -1313,7 +1362,7 @@ class OutputParser { } coord += coords.substring(token.startOffset, token.endOffset); - if (token.tokenType === "function") { + if (token.tokenType === "Function") { depth++; } } else { @@ -1366,7 +1415,7 @@ class OutputParser { // eslint-disable-next-line complexity #addInsetPointNodes(coords, container) { const insetPoints = ["top", "right", "bottom", "left"]; - const tokenStream = getCSSLexer(coords); + const tokenStream = new InspectorCSSParserWrapper(coords); let token = tokenStream.nextToken(); let depth = 0; let coord = ""; @@ -1383,16 +1432,16 @@ class OutputParser { if (round) { // Everything that comes after "round" should just be plain text otherText[i].push(coords.substring(token.startOffset, token.endOffset)); - } else if (token.tokenType === "symbol" && token.text === "(") { + } else if (token.tokenType === "ParenthesisBlock") { depth++; coord += coords.substring(token.startOffset, token.endOffset); - } else if (token.tokenType === "symbol" && token.text === ")") { + } else if (token.tokenType === "CloseParenthesis") { depth--; coord += coords.substring(token.startOffset, token.endOffset); - } else if (token.tokenType === "whitespace" && coord === "") { + } else if (token.tokenType === "WhiteSpace" && coord === "") { // Whitespace at beginning of coord; add to container otherText[i].push(coords.substring(token.startOffset, token.endOffset)); - } else if (token.tokenType === "whitespace" && depth === 0) { + } else if (token.tokenType === "WhiteSpace" && depth === 0) { // Whitespace signifying end of coord; create node and push to nodes const node = this.#createNode( "span", @@ -1407,10 +1456,10 @@ class OutputParser { otherText[i] = [coords.substring(token.startOffset, token.endOffset)]; depth = 0; } else if ( - token.tokenType === "number" || - token.tokenType === "dimension" || - token.tokenType === "percentage" || - token.tokenType === "function" + token.tokenType === "Number" || + token.tokenType === "Dimension" || + token.tokenType === "Percentage" || + token.tokenType === "Function" ) { if (coord && depth === 0) { // Inset coords don't require whitespace between each coord. @@ -1428,10 +1477,10 @@ class OutputParser { } coord += coords.substring(token.startOffset, token.endOffset); - if (token.tokenType === "function") { + if (token.tokenType === "Function") { depth++; } - } else if (token.tokenType === "ident" && token.text === "round") { + } else if (token.tokenType === "Ident" && token.text === "round") { if (coord && depth === 0) { // Whitespace is not necessary before "round"; create a new node for the coord const node = this.#createNode( @@ -1730,13 +1779,15 @@ class OutputParser { */ #sanitizeURL(url) { // Re-lex the URL and add any needed termination characters. - const urlTokenizer = getCSSLexer(url); + const urlTokenizer = new InspectorCSSParserWrapper(url, { + trackEOFChars: true, + }); // Just read until EOF; there will only be a single token. while (urlTokenizer.nextToken()) { // Nothing. } - return urlTokenizer.performEOFFixup(url, true); + return urlTokenizer.performEOFFixup(url); } /** @@ -1756,14 +1807,7 @@ class OutputParser { // leave the termination characters. This isn't strictly // "as-authored", but it makes a bit more sense. match = this.#sanitizeURL(match); - // This regexp matches a URL token. It puts the "url(", any - // leading whitespace, and any opening quote into |leader|; the - // URL text itself into |body|, and any trailing quote, trailing - // whitespace, and the ")" into |trailer|. We considered adding - // functionality for this to CSSLexer, in some way, but this - // seemed simpler on the whole. - const urlParts = - /^(url\([ \t\r\n\f]*(["']?))(.*?)(\2[ \t\r\n\f]*\))$/i.exec(match); + const urlParts = URL_REGEX.exec(match); // Bail out if that didn't match anything. if (!urlParts) { @@ -1771,7 +1815,7 @@ class OutputParser { return; } - const [, leader, , body, trailer] = urlParts; + const { leader, body, trailer } = urlParts.groups; this.#appendTextNode(leader); |