summaryrefslogtreecommitdiffstats
path: root/vendor/minifier/src
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/minifier/src')
-rw-r--r--vendor/minifier/src/css/mod.rs2
-rw-r--r--vendor/minifier/src/css/tests.rs7
-rw-r--r--vendor/minifier/src/css/token.rs8
-rw-r--r--vendor/minifier/src/js/token.rs35
-rw-r--r--vendor/minifier/src/js/utils.rs22
5 files changed, 31 insertions, 43 deletions
diff --git a/vendor/minifier/src/css/mod.rs b/vendor/minifier/src/css/mod.rs
index 224ad8126..70ac41037 100644
--- a/vendor/minifier/src/css/mod.rs
+++ b/vendor/minifier/src/css/mod.rs
@@ -18,7 +18,7 @@ mod token;
/// let css_minified = minify(css).expect("minification failed");
/// assert_eq!(&css_minified.to_string(), ".foo>p{color:red;}");
/// ```
-pub fn minify<'a>(content: &'a str) -> Result<Minified<'a>, &'static str> {
+pub fn minify(content: &str) -> Result<Minified<'_>, &'static str> {
token::tokenize(content).map(Minified)
}
diff --git a/vendor/minifier/src/css/tests.rs b/vendor/minifier/src/css/tests.rs
index dd696afde..2a3459e1e 100644
--- a/vendor/minifier/src/css/tests.rs
+++ b/vendor/minifier/src/css/tests.rs
@@ -200,6 +200,13 @@ fn check_calc() {
}
#[test]
+fn check_container() {
+ let s = "@container rustdoc (min-width: 1250px) { .foo { width: 100px; } }";
+ let expected = "@container rustdoc (min-width:1250px){.foo{width:100px;}}";
+ assert_eq!(minify(s).expect("minify failed").to_string(), expected);
+}
+
+#[test]
fn check_spaces() {
let s = ".line-numbers .line-highlighted { color: #0a042f !important; }";
let expected = ".line-numbers .line-highlighted{color:#0a042f !important;}";
diff --git a/vendor/minifier/src/css/token.rs b/vendor/minifier/src/css/token.rs
index 58e416fcd..467bed0fa 100644
--- a/vendor/minifier/src/css/token.rs
+++ b/vendor/minifier/src/css/token.rs
@@ -427,7 +427,7 @@ fn fill_other<'a>(
}
#[allow(clippy::comparison_chain)]
-pub(super) fn tokenize<'a>(source: &'a str) -> Result<Tokens<'a>, &'static str> {
+pub(super) fn tokenize(source: &str) -> Result<Tokens<'_>, &'static str> {
let mut v = Vec::with_capacity(1000);
let mut iterator = source.char_indices().peekable();
let mut start = 0;
@@ -579,6 +579,7 @@ fn clean_tokens(mut v: Vec<Token<'_>>) -> Vec<Token<'_>> {
// Index of the previous retained token, if there is one.
let mut ip: Option<usize> = None;
let mut is_in_calc = false;
+ let mut is_in_container = false;
let mut paren = 0;
// A vector of bools indicating which elements are to be retained.
let mut b = Vec::with_capacity(v.len());
@@ -594,6 +595,9 @@ fn clean_tokens(mut v: Vec<Token<'_>>) -> Vec<Token<'_>> {
paren += 1;
}
}
+ if v[i] == Token::SelectorElement(SelectorElement::Media("container")) {
+ is_in_container = true;
+ }
let mut retain = true;
if v[i].is_useless() {
@@ -609,6 +613,8 @@ fn clean_tokens(mut v: Vec<Token<'_>>) -> Vec<Token<'_>> {
// retain the space after "and", "or" or "not"
} else if is_in_calc && v[ip.unwrap()].is_useless() {
retain = false;
+ } else if is_in_container && matches!(v[ip.unwrap()], Token::Other(_)) {
+ // retain spaces between keywords in container queryes
} else if !is_in_calc
&& ((ip.is_some() && {
let prev = &v[ip.unwrap()];
diff --git a/vendor/minifier/src/js/token.rs b/vendor/minifier/src/js/token.rs
index 251394cf6..3888e7076 100644
--- a/vendor/minifier/src/js/token.rs
+++ b/vendor/minifier/src/js/token.rs
@@ -821,12 +821,7 @@ fn handle_equal_sign(v: &mut Vec<Token<'_>>, c: ReservedChar) -> bool {
true
}
-fn check_if_number<'a>(
- iterator: &mut MyPeekable<'_>,
- start: usize,
- pos: usize,
- source: &'a str,
-) -> bool {
+fn check_if_number(iterator: &mut MyPeekable<'_>, start: usize, pos: usize, source: &str) -> bool {
if source[start..pos].find('.').is_some() {
return false;
} else if u64::from_str(&source[start..pos]).is_ok() {
@@ -1064,32 +1059,12 @@ impl<'a> Tokens<'a> {
}
}
-pub struct IntoIterTokens<'a> {
- inner: Tokens<'a>,
-}
-
impl<'a> IntoIterator for Tokens<'a> {
- type Item = (Token<'a>, Option<&'a Token<'a>>);
- type IntoIter = IntoIterTokens<'a>;
+ type Item = Token<'a>;
+ type IntoIter = std::vec::IntoIter<Token<'a>>;
- fn into_iter(mut self) -> Self::IntoIter {
- self.0.reverse();
- IntoIterTokens { inner: self }
- }
-}
-
-impl<'a> Iterator for IntoIterTokens<'a> {
- type Item = (Token<'a>, Option<&'a Token<'a>>);
-
- fn next(&mut self) -> Option<Self::Item> {
- if self.inner.0.is_empty() {
- None
- } else {
- let ret = self.inner.0.pop().expect("pop() failed");
- // FIXME once generic traits' types are stabilized, use a second
- // lifetime instead of transmute!
- Some((ret, unsafe { std::mem::transmute(self.inner.0.last()) }))
- }
+ fn into_iter(self) -> Self::IntoIter {
+ self.0.into_iter()
}
}
diff --git a/vendor/minifier/src/js/utils.rs b/vendor/minifier/src/js/utils.rs
index db765e804..f19d26ebb 100644
--- a/vendor/minifier/src/js/utils.rs
+++ b/vendor/minifier/src/js/utils.rs
@@ -559,17 +559,17 @@ var n = null;
"#;
let expected_result = "var x=['a','b',N,'d',{'x':N,'e':N,'z':'w'}];var n=N;";
- let res: Tokens = crate::js::simple_minify(source)
- .into_iter()
- .filter(|(x, next)| crate::js::clean_token(x, next))
- .map(|(t, _)| {
- if t == Token::Keyword(Keyword::Null) {
+ let mut iter = crate::js::simple_minify(source).into_iter().peekable();
+ let mut tokens = Vec::new();
+ while let Some(token) = iter.next() {
+ if crate::js::clean_token(&token, &iter.peek()) {
+ tokens.push(if token == Token::Keyword(Keyword::Null) {
Token::Other("N")
} else {
- t
- }
- })
- .collect::<Vec<_>>()
- .into();
- assert_eq!(res.to_string(), expected_result);
+ token
+ });
+ }
+ }
+ let tokens: Tokens = tokens.into();
+ assert_eq!(tokens.to_string(), expected_result);
}