summaryrefslogtreecommitdiffstats
path: root/devtools/shared/inspector/css-logic.js
diff options
context:
space:
mode:
Diffstat (limited to 'devtools/shared/inspector/css-logic.js')
-rw-r--r--devtools/shared/inspector/css-logic.js37
1 files changed, 18 insertions, 19 deletions
diff --git a/devtools/shared/inspector/css-logic.js b/devtools/shared/inspector/css-logic.js
index a716827f17..6cf9aea42c 100644
--- a/devtools/shared/inspector/css-logic.js
+++ b/devtools/shared/inspector/css-logic.js
@@ -17,7 +17,7 @@ const MAX_DATA_URL_LENGTH = 40;
loader.lazyRequireGetter(
this,
- "getCSSLexer",
+ "InspectorCSSParserWrapper",
"resource://devtools/shared/css/lexer.js",
true
);
@@ -290,7 +290,7 @@ function prettifyCSS(text, ruleCount) {
// minified file.
let indent = "";
let indentLevel = 0;
- const tokens = getCSSLexer(text);
+ const lexer = new InspectorCSSParserWrapper(text);
// List of mappings of token positions from original source to prettified source.
const mappings = [];
// Line and column offsets used to shift the token positions after prettyfication.
@@ -307,15 +307,15 @@ function prettifyCSS(text, ruleCount) {
// seen. This function also updates |pushbackToken|.
const readUntilSignificantToken = () => {
while (true) {
- const token = tokens.nextToken();
- if (!token || token.tokenType !== "whitespace") {
+ const token = lexer.nextToken();
+ if (!token || token.tokenType !== "WhiteSpace") {
pushbackToken = token;
return token;
}
// Saw whitespace. Before committing to it, check the next
// token.
- const nextToken = tokens.nextToken();
- if (!nextToken || nextToken.tokenType !== "comment") {
+ const nextToken = lexer.nextToken();
+ if (!nextToken || nextToken.tokenType !== "Comment") {
pushbackToken = nextToken;
return token;
}
@@ -355,15 +355,15 @@ function prettifyCSS(text, ruleCount) {
token = pushbackToken;
pushbackToken = undefined;
} else {
- token = tokens.nextToken();
+ token = lexer.nextToken();
}
if (!token) {
endIndex = text.length;
break;
}
- const line = tokens.lineNumber;
- const column = tokens.columnNumber;
+ const line = lexer.lineNumber;
+ const column = lexer.columnNumber;
mappings.push({
original: {
line,
@@ -377,17 +377,17 @@ function prettifyCSS(text, ruleCount) {
// Shift the column offset for the next token by the current token's length.
columnOffset += token.endOffset - token.startOffset;
- if (token.tokenType === "at") {
+ if (token.tokenType === "AtKeyword") {
isInAtRuleDefinition = true;
}
// A "}" symbol must be inserted later, to deal with indentation
// and newline.
- if (token.tokenType === "symbol" && token.text === "}") {
+ if (token.tokenType === "CloseCurlyBracket") {
isInSelector = true;
isCloseBrace = true;
break;
- } else if (token.tokenType === "symbol" && token.text === "{") {
+ } else if (token.tokenType === "CurlyBracketBlock") {
if (isInAtRuleDefinition) {
isInAtRuleDefinition = false;
} else {
@@ -396,7 +396,7 @@ function prettifyCSS(text, ruleCount) {
break;
}
- if (token.tokenType !== "whitespace") {
+ if (token.tokenType !== "WhiteSpace") {
anyNonWS = true;
}
@@ -405,20 +405,19 @@ function prettifyCSS(text, ruleCount) {
}
endIndex = token.endOffset;
- if (token.tokenType === "symbol" && token.text === ";") {
+ if (token.tokenType === "Semicolon") {
break;
}
if (
- token.tokenType === "symbol" &&
- token.text === "," &&
+ token.tokenType === "Comma" &&
isInSelector &&
!isInAtRuleDefinition
) {
break;
}
- lastWasWS = token.tokenType === "whitespace";
+ lastWasWS = token.tokenType === "WhiteSpace";
}
return token;
};
@@ -474,7 +473,7 @@ function prettifyCSS(text, ruleCount) {
break;
}
- if (token.tokenType === "symbol" && token.text === "{") {
+ if (token.tokenType === "CurlyBracketBlock") {
if (!lastWasWS) {
result += " ";
columnOffset++;
@@ -503,7 +502,7 @@ function prettifyCSS(text, ruleCount) {
ruleCount !== null &&
pushbackToken &&
token &&
- token.tokenType === "whitespace" &&
+ token.tokenType === "WhiteSpace" &&
/\n/g.test(text.substring(token.startOffset, token.endOffset))
) {
return { result: originalText, mappings: [] };