summaryrefslogtreecommitdiffstats
path: root/third_party/rust/askama_shared
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-21 11:44:51 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-21 11:44:51 +0000
commit9e3c08db40b8916968b9f30096c7be3f00ce9647 (patch)
treea68f146d7fa01f0134297619fbe7e33db084e0aa /third_party/rust/askama_shared
parentInitial commit. (diff)
downloadthunderbird-9e3c08db40b8916968b9f30096c7be3f00ce9647.tar.xz
thunderbird-9e3c08db40b8916968b9f30096c7be3f00ce9647.zip
Adding upstream version 1:115.7.0.upstream/1%115.7.0upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/rust/askama_shared')
-rw-r--r--third_party/rust/askama_shared/.cargo-checksum.json1
-rw-r--r--third_party/rust/askama_shared/Cargo.toml83
-rw-r--r--third_party/rust/askama_shared/LICENSE-APACHE25
-rw-r--r--third_party/rust/askama_shared/LICENSE-MIT25
-rw-r--r--third_party/rust/askama_shared/README.md9
-rw-r--r--third_party/rust/askama_shared/src/error.rs95
-rw-r--r--third_party/rust/askama_shared/src/filters/json.rs44
-rw-r--r--third_party/rust/askama_shared/src/filters/mod.rs675
-rw-r--r--third_party/rust/askama_shared/src/filters/yaml.rs34
-rw-r--r--third_party/rust/askama_shared/src/generator.rs1895
-rw-r--r--third_party/rust/askama_shared/src/helpers/mod.rs48
-rw-r--r--third_party/rust/askama_shared/src/heritage.rs125
-rw-r--r--third_party/rust/askama_shared/src/input.rs336
-rw-r--r--third_party/rust/askama_shared/src/lib.rs538
-rw-r--r--third_party/rust/askama_shared/src/parser.rs1799
-rw-r--r--third_party/rust/askama_shared/templates/a.html1
-rw-r--r--third_party/rust/askama_shared/templates/b.html1
-rw-r--r--third_party/rust/askama_shared/templates/sub/b.html1
-rw-r--r--third_party/rust/askama_shared/templates/sub/c.html1
-rw-r--r--third_party/rust/askama_shared/templates/sub/sub1/d.html1
20 files changed, 5737 insertions, 0 deletions
diff --git a/third_party/rust/askama_shared/.cargo-checksum.json b/third_party/rust/askama_shared/.cargo-checksum.json
new file mode 100644
index 0000000000..737b78f1da
--- /dev/null
+++ b/third_party/rust/askama_shared/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"d843e6077028802df1970bc4934bb5bd517bd028a1892a610f8a984a084a641c","LICENSE-APACHE":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","LICENSE-MIT":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","README.md":"dbea023a90feb38fd85bb365b453e919b3990d2b1413396b00d46b70c4a855e8","src/error.rs":"e0337c3fb6e9c8babe42d07888c23d2a5ba7fa08670e833c900e41e6d131020d","src/filters/json.rs":"ce662c9835d82dfce51e7a61216f5c527b31592686b7853f72eafc60e82a4651","src/filters/mod.rs":"4115fc70613750a3a5a957ee2700f866793a940f30ce6a06a409e83534b78baf","src/filters/yaml.rs":"90b69e1d29dbed5fccb40c2f868ebf1deb1f7dbb3ced2fcab9bf244a52924e1d","src/generator.rs":"557b10f7aa2567771479243ad46ba885a72c9d97564e2792bbba15225a634b8b","src/helpers/mod.rs":"76e0422acd4ccba7b1735d6ab7622a93f6ec5a2fa89531111d877266784d5334","src/heritage.rs":"a363ef47b061c642d258b849ce7d1644f2c94376a49d6999d955abb0c8f7a685","src/input.rs":"7ccac91c5fa48da23e8ca142a5b392cae73e0e02940c25d9dda0733106f95bc9","src/lib.rs":"bac88b35b3ebd9aa3f1e9f761b1f0c6fc9155c6a714fbd4e81d2dfc34a542645","src/parser.rs":"4604fc4a18ab5e73d6da17e46486ac555907e3874ace372e3aa0c6dba8107fc0","templates/a.html":"b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c","templates/b.html":"7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730","templates/sub/b.html":"7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730","templates/sub/c.html":"bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c","templates/sub/sub1/d.html":"86b0c5a1e2b73b08fd54c727f4458649ed9fe3ad1b6e8ac9460c070113509a1e"},"package":"bf722b94118a07fcbc6640190f247334027685d4e218b794dbfe17c32bf38ed0"} \ No newline at end of file
diff --git a/third_party/rust/askama_shared/Cargo.toml b/third_party/rust/askama_shared/Cargo.toml
new file mode 100644
index 0000000000..58629debea
--- /dev/null
+++ b/third_party/rust/askama_shared/Cargo.toml
@@ -0,0 +1,83 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2018"
+name = "askama_shared"
+version = "0.12.2"
+description = "Shared code for Askama"
+homepage = "https://github.com/djc/askama"
+readme = "README.md"
+license = "MIT/Apache-2.0"
+repository = "https://github.com/djc/askama"
+[package.metadata.docs.rs]
+features = ["config", "humansize", "num-traits", "json", "yaml", "percent-encoding"]
+[dependencies.askama_escape]
+version = "0.10.3"
+
+[dependencies.comrak]
+version = "0.12"
+optional = true
+default-features = false
+
+[dependencies.humansize]
+version = "1.1.0"
+optional = true
+
+[dependencies.mime]
+version = "0.3"
+
+[dependencies.mime_guess]
+version = "2"
+
+[dependencies.nom]
+version = "7"
+
+[dependencies.num-traits]
+version = "0.2.6"
+optional = true
+
+[dependencies.percent-encoding]
+version = "2.1.0"
+optional = true
+
+[dependencies.proc-macro2]
+version = "1"
+
+[dependencies.quote]
+version = "1"
+
+[dependencies.serde]
+version = "1.0"
+features = ["derive"]
+optional = true
+
+[dependencies.serde_json]
+version = "1.0"
+optional = true
+
+[dependencies.serde_yaml]
+version = "0.8"
+optional = true
+
+[dependencies.syn]
+version = "1"
+
+[dependencies.toml]
+version = "0.5"
+optional = true
+
+[features]
+config = ["serde", "toml"]
+default = ["config", "humansize", "num-traits", "percent-encoding"]
+json = ["serde", "serde_json", "askama_escape/json"]
+markdown = ["comrak"]
+yaml = ["serde", "serde_yaml"]
diff --git a/third_party/rust/askama_shared/LICENSE-APACHE b/third_party/rust/askama_shared/LICENSE-APACHE
new file mode 100644
index 0000000000..c765f2e75a
--- /dev/null
+++ b/third_party/rust/askama_shared/LICENSE-APACHE
@@ -0,0 +1,25 @@
+Copyright (c) 2017-2020 Dirkjan Ochtman
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/third_party/rust/askama_shared/LICENSE-MIT b/third_party/rust/askama_shared/LICENSE-MIT
new file mode 100644
index 0000000000..c765f2e75a
--- /dev/null
+++ b/third_party/rust/askama_shared/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2017-2020 Dirkjan Ochtman
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/third_party/rust/askama_shared/README.md b/third_party/rust/askama_shared/README.md
new file mode 100644
index 0000000000..21b4a962fb
--- /dev/null
+++ b/third_party/rust/askama_shared/README.md
@@ -0,0 +1,9 @@
+# askama_shared: shared code for the Askama templating engine
+
+[![Documentation](https://docs.rs/askama_shared/badge.svg)](https://docs.rs/askama_shared/)
+[![Latest version](https://img.shields.io/crates/v/askama_shared.svg)](https://crates.io/crates/askama_shared)
+[![Build Status](https://github.com/djc/askama/workflows/CI/badge.svg)](https://github.com/djc/askama/actions?query=workflow%3ACI)
+[![Chat](https://badges.gitter.im/gitterHQ/gitter.svg)](https://gitter.im/djc/askama)
+
+This crate contains helper code used by the [Askama](https://github.com/djc/askama)
+templating engine.
diff --git a/third_party/rust/askama_shared/src/error.rs b/third_party/rust/askama_shared/src/error.rs
new file mode 100644
index 0000000000..98f2703323
--- /dev/null
+++ b/third_party/rust/askama_shared/src/error.rs
@@ -0,0 +1,95 @@
+use std::fmt::{self, Display};
+
+pub type Result<I, E = Error> = ::std::result::Result<I, E>;
+
+/// askama error type
+///
+/// # Feature Interaction
+///
+/// If the feature `serde_json` is enabled an
+/// additional error variant `Json` is added.
+///
+/// # Why not `failure`/`error-chain`?
+///
+/// Error from `error-chain` are not `Sync` which
+/// can lead to problems e.g. when this is used
+/// by a crate which use `failure`. Implementing
+/// `Fail` on the other hand prevents the implementation
+/// of `std::error::Error` until specialization lands
+/// on stable. While errors impl. `Fail` can be
+/// converted to a type impl. `std::error::Error`
+/// using a adapter the benefits `failure` would
+/// bring to this crate are small, which is why
+/// `std::error::Error` was used.
+///
+#[non_exhaustive]
+#[derive(Debug)]
+pub enum Error {
+ /// formatting error
+ Fmt(fmt::Error),
+
+ /// an error raised by using `?` in a template
+ Custom(Box<dyn std::error::Error + Send + Sync>),
+
+ /// json conversion error
+ #[cfg(feature = "serde_json")]
+ Json(::serde_json::Error),
+
+ /// yaml conversion error
+ #[cfg(feature = "serde_yaml")]
+ Yaml(::serde_yaml::Error),
+}
+
+impl std::error::Error for Error {
+ fn cause(&self) -> Option<&dyn std::error::Error> {
+ match *self {
+ Error::Fmt(ref err) => err.source(),
+ Error::Custom(ref err) => Some(err.as_ref()),
+ #[cfg(feature = "serde_json")]
+ Error::Json(ref err) => err.source(),
+ #[cfg(feature = "serde_yaml")]
+ Error::Yaml(ref err) => err.source(),
+ }
+ }
+}
+
+impl Display for Error {
+ fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Error::Fmt(err) => write!(formatter, "formatting error: {}", err),
+ Error::Custom(err) => write!(formatter, "{}", err),
+ #[cfg(feature = "serde_json")]
+ Error::Json(err) => write!(formatter, "json conversion error: {}", err),
+ #[cfg(feature = "serde_yaml")]
+ Error::Yaml(err) => write!(formatter, "yaml conversion error: {}", err),
+ }
+ }
+}
+
+impl From<fmt::Error> for Error {
+ fn from(err: fmt::Error) -> Self {
+ Error::Fmt(err)
+ }
+}
+
+#[cfg(feature = "serde_json")]
+impl From<::serde_json::Error> for Error {
+ fn from(err: ::serde_json::Error) -> Self {
+ Error::Json(err)
+ }
+}
+
+#[cfg(feature = "serde_yaml")]
+impl From<::serde_yaml::Error> for Error {
+ fn from(err: ::serde_yaml::Error) -> Self {
+ Error::Yaml(err)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::Error;
+
+ trait AssertSendSyncStatic: Send + Sync + 'static {}
+ impl AssertSendSyncStatic for Error {}
+}
diff --git a/third_party/rust/askama_shared/src/filters/json.rs b/third_party/rust/askama_shared/src/filters/json.rs
new file mode 100644
index 0000000000..e94e50c1be
--- /dev/null
+++ b/third_party/rust/askama_shared/src/filters/json.rs
@@ -0,0 +1,44 @@
+use crate::error::{Error, Result};
+use askama_escape::JsonEscapeBuffer;
+use serde::Serialize;
+use serde_json::to_writer_pretty;
+
+/// Serialize to JSON (requires `json` feature)
+///
+/// The generated string does not contain ampersands `&`, chevrons `< >`, or apostrophes `'`.
+/// To use it in a `<script>` you can combine it with the safe filter:
+///
+/// ``` html
+/// <script>
+/// var data = {{data|json|safe}};
+/// </script>
+/// ```
+///
+/// To use it in HTML attributes, you can either use it in quotation marks `"{{data|json}}"` as is,
+/// or in apostrophes with the (optional) safe filter `'{{data|json|safe}}'`.
+/// In HTML texts the output of e.g. `<pre>{{data|json|safe}}</pre>` is safe, too.
+pub fn json<S: Serialize>(s: S) -> Result<String> {
+ let mut writer = JsonEscapeBuffer::new();
+ to_writer_pretty(&mut writer, &s).map_err(Error::from)?;
+ Ok(writer.finish())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_json() {
+ assert_eq!(json(true).unwrap(), "true");
+ assert_eq!(json("foo").unwrap(), r#""foo""#);
+ assert_eq!(json(&true).unwrap(), "true");
+ assert_eq!(json(&"foo").unwrap(), r#""foo""#);
+ assert_eq!(
+ json(&vec!["foo", "bar"]).unwrap(),
+ r#"[
+ "foo",
+ "bar"
+]"#
+ );
+ }
+}
diff --git a/third_party/rust/askama_shared/src/filters/mod.rs b/third_party/rust/askama_shared/src/filters/mod.rs
new file mode 100644
index 0000000000..9fb10d2fec
--- /dev/null
+++ b/third_party/rust/askama_shared/src/filters/mod.rs
@@ -0,0 +1,675 @@
+//! Module for built-in filter functions
+//!
+//! Contains all the built-in filter functions for use in templates.
+//! You can define your own filters, as well.
+//! For more information, read the [book](https://djc.github.io/askama/filters.html).
+#![allow(clippy::trivially_copy_pass_by_ref)]
+
+use std::fmt;
+
+#[cfg(feature = "serde_json")]
+mod json;
+#[cfg(feature = "serde_json")]
+pub use self::json::json;
+
+#[cfg(feature = "serde_yaml")]
+mod yaml;
+#[cfg(feature = "serde_yaml")]
+pub use self::yaml::yaml;
+
+#[allow(unused_imports)]
+use crate::error::Error::Fmt;
+use askama_escape::{Escaper, MarkupDisplay};
+#[cfg(feature = "humansize")]
+use humansize::{file_size_opts, FileSize};
+#[cfg(feature = "num-traits")]
+use num_traits::{cast::NumCast, Signed};
+#[cfg(feature = "percent-encoding")]
+use percent_encoding::{utf8_percent_encode, AsciiSet, NON_ALPHANUMERIC};
+
+use super::Result;
+
+#[cfg(feature = "percent-encoding")]
+// Urlencode char encoding set. Only the characters in the unreserved set don't
+// have any special purpose in any part of a URI and can be safely left
+// unencoded as specified in https://tools.ietf.org/html/rfc3986.html#section-2.3
+const URLENCODE_STRICT_SET: &AsciiSet = &NON_ALPHANUMERIC
+ .remove(b'_')
+ .remove(b'.')
+ .remove(b'-')
+ .remove(b'~');
+
+#[cfg(feature = "percent-encoding")]
+// Same as URLENCODE_STRICT_SET, but preserves forward slashes for encoding paths
+const URLENCODE_SET: &AsciiSet = &URLENCODE_STRICT_SET.remove(b'/');
+
+// This is used by the code generator to decide whether a named filter is part of
+// Askama or should refer to a local `filters` module. It should contain all the
+// filters shipped with Askama, even the optional ones (since optional inclusion
+// in the const vector based on features seems impossible right now).
+pub const BUILT_IN_FILTERS: &[&str] = &[
+ "abs",
+ "capitalize",
+ "center",
+ "e",
+ "escape",
+ "filesizeformat",
+ "fmt",
+ "format",
+ "indent",
+ "into_f64",
+ "into_isize",
+ "join",
+ "linebreaks",
+ "linebreaksbr",
+ "paragraphbreaks",
+ "lower",
+ "lowercase",
+ "safe",
+ "trim",
+ "truncate",
+ "upper",
+ "uppercase",
+ "urlencode",
+ "urlencode_strict",
+ "wordcount",
+ // optional features, reserve the names anyway:
+ "json",
+ "markdown",
+ "yaml",
+];
+
+/// Marks a string (or other `Display` type) as safe
+///
+/// Use this is you want to allow markup in an expression, or if you know
+/// that the expression's contents don't need to be escaped.
+///
+/// Askama will automatically insert the first (`Escaper`) argument,
+/// so this filter only takes a single argument of any type that implements
+/// `Display`.
+pub fn safe<E, T>(e: E, v: T) -> Result<MarkupDisplay<E, T>>
+where
+ E: Escaper,
+ T: fmt::Display,
+{
+ Ok(MarkupDisplay::new_safe(v, e))
+}
+
+/// Escapes `&`, `<` and `>` in strings
+///
+/// Askama will automatically insert the first (`Escaper`) argument,
+/// so this filter only takes a single argument of any type that implements
+/// `Display`.
+pub fn escape<E, T>(e: E, v: T) -> Result<MarkupDisplay<E, T>>
+where
+ E: Escaper,
+ T: fmt::Display,
+{
+ Ok(MarkupDisplay::new_unsafe(v, e))
+}
+
+#[cfg(feature = "humansize")]
+/// Returns adequate string representation (in KB, ..) of number of bytes
+pub fn filesizeformat<B: FileSize>(b: &B) -> Result<String> {
+ b.file_size(file_size_opts::DECIMAL)
+ .map_err(|_| Fmt(fmt::Error))
+}
+
+#[cfg(feature = "percent-encoding")]
+/// Percent-encodes the argument for safe use in URI; does not encode `/`.
+///
+/// This should be safe for all parts of URI (paths segments, query keys, query
+/// values). In the rare case that the server can't deal with forward slashes in
+/// the query string, use [`urlencode_strict`], which encodes them as well.
+///
+/// Encodes all characters except ASCII letters, digits, and `_.-~/`. In other
+/// words, encodes all characters which are not in the unreserved set,
+/// as specified by [RFC3986](https://tools.ietf.org/html/rfc3986#section-2.3),
+/// with the exception of `/`.
+///
+/// ```none,ignore
+/// <a href="/metro{{ "/stations/Château d'Eau"|urlencode }}">Station</a>
+/// <a href="/page?text={{ "look, unicode/emojis ✨"|urlencode }}">Page</a>
+/// ```
+///
+/// To encode `/` as well, see [`urlencode_strict`](./fn.urlencode_strict.html).
+///
+/// [`urlencode_strict`]: ./fn.urlencode_strict.html
+pub fn urlencode<T: fmt::Display>(s: T) -> Result<String> {
+ let s = s.to_string();
+ Ok(utf8_percent_encode(&s, URLENCODE_SET).to_string())
+}
+
+#[cfg(feature = "percent-encoding")]
+/// Percent-encodes the argument for safe use in URI; encodes `/`.
+///
+/// Use this filter for encoding query keys and values in the rare case that
+/// the server can't process them unencoded.
+///
+/// Encodes all characters except ASCII letters, digits, and `_.-~`. In other
+/// words, encodes all characters which are not in the unreserved set,
+/// as specified by [RFC3986](https://tools.ietf.org/html/rfc3986#section-2.3).
+///
+/// ```none,ignore
+/// <a href="/page?text={{ "look, unicode/emojis ✨"|urlencode_strict }}">Page</a>
+/// ```
+///
+/// If you want to preserve `/`, see [`urlencode`](./fn.urlencode.html).
+pub fn urlencode_strict<T: fmt::Display>(s: T) -> Result<String> {
+ let s = s.to_string();
+ Ok(utf8_percent_encode(&s, URLENCODE_STRICT_SET).to_string())
+}
+
+/// Formats arguments according to the specified format
+///
+/// The *second* argument to this filter must be a string literal (as in normal
+/// Rust). The two arguments are passed through to the `format!()`
+/// [macro](https://doc.rust-lang.org/stable/std/macro.format.html) by
+/// the Askama code generator, but the order is swapped to support filter
+/// composition.
+///
+/// ```ignore
+/// {{ value | fmt("{:?}") }}
+/// ```
+///
+/// Compare with [format](./fn.format.html).
+pub fn fmt() {}
+
+/// Formats arguments according to the specified format
+///
+/// The first argument to this filter must be a string literal (as in normal
+/// Rust). All arguments are passed through to the `format!()`
+/// [macro](https://doc.rust-lang.org/stable/std/macro.format.html) by
+/// the Askama code generator.
+///
+/// ```ignore
+/// {{ "{:?}{:?}" | format(value, other_value) }}
+/// ```
+///
+/// Compare with [fmt](./fn.fmt.html).
+pub fn format() {}
+
+/// Replaces line breaks in plain text with appropriate HTML
+///
+/// A single newline becomes an HTML line break `<br>` and a new line
+/// followed by a blank line becomes a paragraph break `<p>`.
+pub fn linebreaks<T: fmt::Display>(s: T) -> Result<String> {
+ let s = s.to_string();
+ let linebroken = s.replace("\n\n", "</p><p>").replace('\n', "<br/>");
+
+ Ok(format!("<p>{}</p>", linebroken))
+}
+
+/// Converts all newlines in a piece of plain text to HTML line breaks
+pub fn linebreaksbr<T: fmt::Display>(s: T) -> Result<String> {
+ let s = s.to_string();
+ Ok(s.replace('\n', "<br/>"))
+}
+
+/// Replaces only paragraph breaks in plain text with appropriate HTML
+///
+/// A new line followed by a blank line becomes a paragraph break `<p>`.
+/// Paragraph tags only wrap content; empty paragraphs are removed.
+/// No `<br/>` tags are added.
+pub fn paragraphbreaks<T: fmt::Display>(s: T) -> Result<String> {
+ let s = s.to_string();
+ let linebroken = s.replace("\n\n", "</p><p>").replace("<p></p>", "");
+
+ Ok(format!("<p>{}</p>", linebroken))
+}
+
+/// Converts to lowercase
+pub fn lower<T: fmt::Display>(s: T) -> Result<String> {
+ let s = s.to_string();
+ Ok(s.to_lowercase())
+}
+
+/// Alias for the `lower()` filter
+pub fn lowercase<T: fmt::Display>(s: T) -> Result<String> {
+ lower(s)
+}
+
+/// Converts to uppercase
+pub fn upper<T: fmt::Display>(s: T) -> Result<String> {
+ let s = s.to_string();
+ Ok(s.to_uppercase())
+}
+
+/// Alias for the `upper()` filter
+pub fn uppercase<T: fmt::Display>(s: T) -> Result<String> {
+ upper(s)
+}
+
+/// Strip leading and trailing whitespace
+pub fn trim<T: fmt::Display>(s: T) -> Result<String> {
+ let s = s.to_string();
+ Ok(s.trim().to_owned())
+}
+
+/// Limit string length, appends '...' if truncated
+pub fn truncate<T: fmt::Display>(s: T, len: usize) -> Result<String> {
+ let mut s = s.to_string();
+ if s.len() > len {
+ let mut real_len = len;
+ while !s.is_char_boundary(real_len) {
+ real_len += 1;
+ }
+ s.truncate(real_len);
+ s.push_str("...");
+ }
+ Ok(s)
+}
+
+/// Indent lines with `width` spaces
+pub fn indent<T: fmt::Display>(s: T, width: usize) -> Result<String> {
+ let s = s.to_string();
+
+ let mut indented = String::new();
+
+ for (i, c) in s.char_indices() {
+ indented.push(c);
+
+ if c == '\n' && i < s.len() - 1 {
+ for _ in 0..width {
+ indented.push(' ');
+ }
+ }
+ }
+
+ Ok(indented)
+}
+
+#[cfg(feature = "num-traits")]
+/// Casts number to f64
+pub fn into_f64<T>(number: T) -> Result<f64>
+where
+ T: NumCast,
+{
+ number.to_f64().ok_or(Fmt(fmt::Error))
+}
+
+#[cfg(feature = "num-traits")]
+/// Casts number to isize
+pub fn into_isize<T>(number: T) -> Result<isize>
+where
+ T: NumCast,
+{
+ number.to_isize().ok_or(Fmt(fmt::Error))
+}
+
+/// Joins iterable into a string separated by provided argument
+pub fn join<T, I, S>(input: I, separator: S) -> Result<String>
+where
+ T: fmt::Display,
+ I: Iterator<Item = T>,
+ S: AsRef<str>,
+{
+ let separator: &str = separator.as_ref();
+
+ let mut rv = String::new();
+
+ for (num, item) in input.enumerate() {
+ if num > 0 {
+ rv.push_str(separator);
+ }
+
+ rv.push_str(&format!("{}", item));
+ }
+
+ Ok(rv)
+}
+
+#[cfg(feature = "num-traits")]
+/// Absolute value
+pub fn abs<T>(number: T) -> Result<T>
+where
+ T: Signed,
+{
+ Ok(number.abs())
+}
+
+/// Capitalize a value. The first character will be uppercase, all others lowercase.
+pub fn capitalize<T: fmt::Display>(s: T) -> Result<String> {
+ let mut s = s.to_string();
+
+ match s.get_mut(0..1).map(|s| {
+ s.make_ascii_uppercase();
+ &*s
+ }) {
+ None => Ok(s),
+ _ => {
+ s.get_mut(1..).map(|s| {
+ s.make_ascii_lowercase();
+ &*s
+ });
+ Ok(s)
+ }
+ }
+}
+
+/// Centers the value in a field of a given width
+pub fn center(src: &dyn fmt::Display, dst_len: usize) -> Result<String> {
+ let src = src.to_string();
+ let len = src.len();
+
+ if dst_len <= len {
+ Ok(src)
+ } else {
+ let diff = dst_len - len;
+ let mid = diff / 2;
+ let r = diff % 2;
+ let mut buf = String::with_capacity(dst_len);
+
+ for _ in 0..mid {
+ buf.push(' ');
+ }
+
+ buf.push_str(&src);
+
+ for _ in 0..mid + r {
+ buf.push(' ');
+ }
+
+ Ok(buf)
+ }
+}
+
+/// Count the words in that string
+pub fn wordcount<T: fmt::Display>(s: T) -> Result<usize> {
+ let s = s.to_string();
+
+ Ok(s.split_whitespace().count())
+}
+
+#[cfg(feature = "markdown")]
+pub fn markdown<E, S>(
+ e: E,
+ s: S,
+ options: Option<&comrak::ComrakOptions>,
+) -> Result<MarkupDisplay<E, String>>
+where
+ E: Escaper,
+ S: AsRef<str>,
+{
+ use comrak::{
+ markdown_to_html, ComrakExtensionOptions, ComrakOptions, ComrakParseOptions,
+ ComrakRenderOptions,
+ };
+
+ const DEFAULT_OPTIONS: ComrakOptions = ComrakOptions {
+ extension: ComrakExtensionOptions {
+ strikethrough: true,
+ tagfilter: true,
+ table: true,
+ autolink: true,
+ // default:
+ tasklist: false,
+ superscript: false,
+ header_ids: None,
+ footnotes: false,
+ description_lists: false,
+ front_matter_delimiter: None,
+ },
+ parse: ComrakParseOptions {
+ // default:
+ smart: false,
+ default_info_string: None,
+ },
+ render: ComrakRenderOptions {
+ unsafe_: false,
+ escape: true,
+ // default:
+ hardbreaks: false,
+ github_pre_lang: false,
+ width: 0,
+ },
+ };
+
+ let s = markdown_to_html(s.as_ref(), options.unwrap_or(&DEFAULT_OPTIONS));
+ Ok(MarkupDisplay::new_safe(s, e))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ #[cfg(feature = "num-traits")]
+ use std::f64::INFINITY;
+
+ #[cfg(feature = "humansize")]
+ #[test]
+ fn test_filesizeformat() {
+ assert_eq!(filesizeformat(&0).unwrap(), "0 B");
+ assert_eq!(filesizeformat(&999u64).unwrap(), "999 B");
+ assert_eq!(filesizeformat(&1000i32).unwrap(), "1 KB");
+ assert_eq!(filesizeformat(&1023).unwrap(), "1.02 KB");
+ assert_eq!(filesizeformat(&1024usize).unwrap(), "1.02 KB");
+ }
+
+ #[cfg(feature = "percent-encoding")]
+ #[test]
+ fn test_urlencoding() {
+ // Unreserved (https://tools.ietf.org/html/rfc3986.html#section-2.3)
+ // alpha / digit
+ assert_eq!(urlencode(&"AZaz09").unwrap(), "AZaz09");
+ assert_eq!(urlencode_strict(&"AZaz09").unwrap(), "AZaz09");
+ // other
+ assert_eq!(urlencode(&"_.-~").unwrap(), "_.-~");
+ assert_eq!(urlencode_strict(&"_.-~").unwrap(), "_.-~");
+
+ // Reserved (https://tools.ietf.org/html/rfc3986.html#section-2.2)
+ // gen-delims
+ assert_eq!(urlencode(&":/?#[]@").unwrap(), "%3A/%3F%23%5B%5D%40");
+ assert_eq!(
+ urlencode_strict(&":/?#[]@").unwrap(),
+ "%3A%2F%3F%23%5B%5D%40"
+ );
+ // sub-delims
+ assert_eq!(
+ urlencode(&"!$&'()*+,;=").unwrap(),
+ "%21%24%26%27%28%29%2A%2B%2C%3B%3D"
+ );
+ assert_eq!(
+ urlencode_strict(&"!$&'()*+,;=").unwrap(),
+ "%21%24%26%27%28%29%2A%2B%2C%3B%3D"
+ );
+
+ // Other
+ assert_eq!(
+ urlencode(&"žŠďŤňĚáÉóŮ").unwrap(),
+ "%C5%BE%C5%A0%C4%8F%C5%A4%C5%88%C4%9A%C3%A1%C3%89%C3%B3%C5%AE"
+ );
+ assert_eq!(
+ urlencode_strict(&"žŠďŤňĚáÉóŮ").unwrap(),
+ "%C5%BE%C5%A0%C4%8F%C5%A4%C5%88%C4%9A%C3%A1%C3%89%C3%B3%C5%AE"
+ );
+
+ // Ferris
+ assert_eq!(urlencode(&"🦀").unwrap(), "%F0%9F%A6%80");
+ assert_eq!(urlencode_strict(&"🦀").unwrap(), "%F0%9F%A6%80");
+ }
+
+ #[test]
+ fn test_linebreaks() {
+ assert_eq!(
+ linebreaks(&"Foo\nBar Baz").unwrap(),
+ "<p>Foo<br/>Bar Baz</p>"
+ );
+ assert_eq!(
+ linebreaks(&"Foo\nBar\n\nBaz").unwrap(),
+ "<p>Foo<br/>Bar</p><p>Baz</p>"
+ );
+ }
+
+ #[test]
+ fn test_linebreaksbr() {
+ assert_eq!(linebreaksbr(&"Foo\nBar").unwrap(), "Foo<br/>Bar");
+ assert_eq!(
+ linebreaksbr(&"Foo\nBar\n\nBaz").unwrap(),
+ "Foo<br/>Bar<br/><br/>Baz"
+ );
+ }
+
+ #[test]
+ fn test_paragraphbreaks() {
+ assert_eq!(
+ paragraphbreaks(&"Foo\nBar Baz").unwrap(),
+ "<p>Foo\nBar Baz</p>"
+ );
+ assert_eq!(
+ paragraphbreaks(&"Foo\nBar\n\nBaz").unwrap(),
+ "<p>Foo\nBar</p><p>Baz</p>"
+ );
+ assert_eq!(
+ paragraphbreaks(&"Foo\n\n\n\n\nBar\n\nBaz").unwrap(),
+ "<p>Foo</p><p>\nBar</p><p>Baz</p>"
+ );
+ }
+
+ #[test]
+ fn test_lower() {
+ assert_eq!(lower(&"Foo").unwrap(), "foo");
+ assert_eq!(lower(&"FOO").unwrap(), "foo");
+ assert_eq!(lower(&"FooBar").unwrap(), "foobar");
+ assert_eq!(lower(&"foo").unwrap(), "foo");
+ }
+
+ #[test]
+ fn test_upper() {
+ assert_eq!(upper(&"Foo").unwrap(), "FOO");
+ assert_eq!(upper(&"FOO").unwrap(), "FOO");
+ assert_eq!(upper(&"FooBar").unwrap(), "FOOBAR");
+ assert_eq!(upper(&"foo").unwrap(), "FOO");
+ }
+
+ #[test]
+ fn test_trim() {
+ assert_eq!(trim(&" Hello\tworld\t").unwrap(), "Hello\tworld");
+ }
+
+ #[test]
+ fn test_truncate() {
+ assert_eq!(truncate(&"hello", 2).unwrap(), "he...");
+ let a = String::from("您好");
+ assert_eq!(a.len(), 6);
+ assert_eq!(String::from("您").len(), 3);
+ assert_eq!(truncate(&"您好", 1).unwrap(), "您...");
+ assert_eq!(truncate(&"您好", 2).unwrap(), "您...");
+ assert_eq!(truncate(&"您好", 3).unwrap(), "您...");
+ assert_eq!(truncate(&"您好", 4).unwrap(), "您好...");
+ assert_eq!(truncate(&"您好", 6).unwrap(), "您好");
+ assert_eq!(truncate(&"您好", 7).unwrap(), "您好");
+ let s = String::from("🤚a🤚");
+ assert_eq!(s.len(), 9);
+ assert_eq!(String::from("🤚").len(), 4);
+ assert_eq!(truncate(&"🤚a🤚", 1).unwrap(), "🤚...");
+ assert_eq!(truncate(&"🤚a🤚", 2).unwrap(), "🤚...");
+ assert_eq!(truncate(&"🤚a🤚", 3).unwrap(), "🤚...");
+ assert_eq!(truncate(&"🤚a🤚", 4).unwrap(), "🤚...");
+ assert_eq!(truncate(&"🤚a🤚", 5).unwrap(), "🤚a...");
+ assert_eq!(truncate(&"🤚a🤚", 6).unwrap(), "🤚a🤚...");
+ assert_eq!(truncate(&"🤚a🤚", 9).unwrap(), "🤚a🤚");
+ assert_eq!(truncate(&"🤚a🤚", 10).unwrap(), "🤚a🤚");
+ }
+
+ #[test]
+ fn test_indent() {
+ assert_eq!(indent(&"hello", 2).unwrap(), "hello");
+ assert_eq!(indent(&"hello\n", 2).unwrap(), "hello\n");
+ assert_eq!(indent(&"hello\nfoo", 2).unwrap(), "hello\n foo");
+ assert_eq!(
+ indent(&"hello\nfoo\n bar", 4).unwrap(),
+ "hello\n foo\n bar"
+ );
+ }
+
+ #[cfg(feature = "num-traits")]
+ #[test]
+ #[allow(clippy::float_cmp)]
+ fn test_into_f64() {
+ assert_eq!(into_f64(1).unwrap(), 1.0_f64);
+ assert_eq!(into_f64(1.9).unwrap(), 1.9_f64);
+ assert_eq!(into_f64(-1.9).unwrap(), -1.9_f64);
+ assert_eq!(into_f64(INFINITY as f32).unwrap(), INFINITY);
+ assert_eq!(into_f64(-INFINITY as f32).unwrap(), -INFINITY);
+ }
+
+ #[cfg(feature = "num-traits")]
+ #[test]
+ fn test_into_isize() {
+ assert_eq!(into_isize(1).unwrap(), 1_isize);
+ assert_eq!(into_isize(1.9).unwrap(), 1_isize);
+ assert_eq!(into_isize(-1.9).unwrap(), -1_isize);
+ assert_eq!(into_isize(1.5_f64).unwrap(), 1_isize);
+ assert_eq!(into_isize(-1.5_f64).unwrap(), -1_isize);
+ match into_isize(INFINITY) {
+ Err(Fmt(fmt::Error)) => {}
+ _ => panic!("Should return error of type Err(Fmt(fmt::Error))"),
+ };
+ }
+
+ #[allow(clippy::needless_borrow)]
+ #[test]
+ fn test_join() {
+ assert_eq!(
+ join((&["hello", "world"]).iter(), ", ").unwrap(),
+ "hello, world"
+ );
+ assert_eq!(join((&["hello"]).iter(), ", ").unwrap(), "hello");
+
+ let empty: &[&str] = &[];
+ assert_eq!(join(empty.iter(), ", ").unwrap(), "");
+
+ let input: Vec<String> = vec!["foo".into(), "bar".into(), "bazz".into()];
+ assert_eq!(join(input.iter(), ":").unwrap(), "foo:bar:bazz");
+
+ let input: &[String] = &["foo".into(), "bar".into()];
+ assert_eq!(join(input.iter(), ":").unwrap(), "foo:bar");
+
+ let real: String = "blah".into();
+ let input: Vec<&str> = vec![&real];
+ assert_eq!(join(input.iter(), ";").unwrap(), "blah");
+
+ assert_eq!(
+ join((&&&&&["foo", "bar"]).iter(), ", ").unwrap(),
+ "foo, bar"
+ );
+ }
+
+ #[cfg(feature = "num-traits")]
+ #[test]
+ #[allow(clippy::float_cmp)]
+ fn test_abs() {
+ assert_eq!(abs(1).unwrap(), 1);
+ assert_eq!(abs(-1).unwrap(), 1);
+ assert_eq!(abs(1.0).unwrap(), 1.0);
+ assert_eq!(abs(-1.0).unwrap(), 1.0);
+ assert_eq!(abs(1.0_f64).unwrap(), 1.0_f64);
+ assert_eq!(abs(-1.0_f64).unwrap(), 1.0_f64);
+ }
+
+ #[test]
+ fn test_capitalize() {
+ assert_eq!(capitalize(&"foo").unwrap(), "Foo".to_string());
+ assert_eq!(capitalize(&"f").unwrap(), "F".to_string());
+ assert_eq!(capitalize(&"fO").unwrap(), "Fo".to_string());
+ assert_eq!(capitalize(&"").unwrap(), "".to_string());
+ assert_eq!(capitalize(&"FoO").unwrap(), "Foo".to_string());
+ assert_eq!(capitalize(&"foO BAR").unwrap(), "Foo bar".to_string());
+ }
+
+ #[test]
+ fn test_center() {
+ assert_eq!(center(&"f", 3).unwrap(), " f ".to_string());
+ assert_eq!(center(&"f", 4).unwrap(), " f ".to_string());
+ assert_eq!(center(&"foo", 1).unwrap(), "foo".to_string());
+ assert_eq!(center(&"foo bar", 8).unwrap(), "foo bar ".to_string());
+ }
+
+ #[test]
+ fn test_wordcount() {
+ assert_eq!(wordcount(&"").unwrap(), 0);
+ assert_eq!(wordcount(&" \n\t").unwrap(), 0);
+ assert_eq!(wordcount(&"foo").unwrap(), 1);
+ assert_eq!(wordcount(&"foo bar").unwrap(), 2);
+ }
+}
diff --git a/third_party/rust/askama_shared/src/filters/yaml.rs b/third_party/rust/askama_shared/src/filters/yaml.rs
new file mode 100644
index 0000000000..d71e6303e4
--- /dev/null
+++ b/third_party/rust/askama_shared/src/filters/yaml.rs
@@ -0,0 +1,34 @@
+use crate::error::{Error, Result};
+use askama_escape::{Escaper, MarkupDisplay};
+use serde::Serialize;
+
+/// Serialize to YAML (requires `serde_yaml` feature)
+///
+/// ## Errors
+///
+/// This will panic if `S`'s implementation of `Serialize` decides to fail,
+/// or if `T` contains a map with non-string keys.
+pub fn yaml<E: Escaper, S: Serialize>(e: E, s: S) -> Result<MarkupDisplay<E, String>> {
+ match serde_yaml::to_string(&s) {
+ Ok(s) => Ok(MarkupDisplay::new_safe(s, e)),
+ Err(e) => Err(Error::from(e)),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use askama_escape::Html;
+
+ #[test]
+ fn test_yaml() {
+ assert_eq!(yaml(Html, true).unwrap().to_string(), "---\ntrue");
+ assert_eq!(yaml(Html, "foo").unwrap().to_string(), "---\nfoo");
+ assert_eq!(yaml(Html, &true).unwrap().to_string(), "---\ntrue");
+ assert_eq!(yaml(Html, &"foo").unwrap().to_string(), "---\nfoo");
+ assert_eq!(
+ yaml(Html, &vec!["foo", "bar"]).unwrap().to_string(),
+ "---\n- foo\n- bar"
+ );
+ }
+}
diff --git a/third_party/rust/askama_shared/src/generator.rs b/third_party/rust/askama_shared/src/generator.rs
new file mode 100644
index 0000000000..99971392dc
--- /dev/null
+++ b/third_party/rust/askama_shared/src/generator.rs
@@ -0,0 +1,1895 @@
+use super::{get_template_source, CompileError, Integrations};
+use crate::filters;
+use crate::heritage::{Context, Heritage};
+use crate::input::{Source, TemplateInput};
+use crate::parser::{parse, Cond, CondTest, Expr, Loop, Node, Target, When, Ws};
+
+use proc_macro2::Span;
+
+use quote::{quote, ToTokens};
+
+use std::collections::HashMap;
+use std::path::Path;
+use std::{cmp, hash, mem, str};
+
+pub fn generate<S: std::hash::BuildHasher>(
+ input: &TemplateInput<'_>,
+ contexts: &HashMap<&Path, Context<'_>, S>,
+ heritage: Option<&Heritage<'_>>,
+ integrations: Integrations,
+) -> Result<String, CompileError> {
+ Generator::new(input, contexts, heritage, integrations, MapChain::new())
+ .build(&contexts[input.path.as_path()])
+}
+
+struct Generator<'a, S: std::hash::BuildHasher> {
+ // The template input state: original struct AST and attributes
+ input: &'a TemplateInput<'a>,
+ // All contexts, keyed by the package-relative template path
+ contexts: &'a HashMap<&'a Path, Context<'a>, S>,
+ // The heritage contains references to blocks and their ancestry
+ heritage: Option<&'a Heritage<'a>>,
+ // What integrations need to be generated
+ integrations: Integrations,
+ // Variables accessible directly from the current scope (not redirected to context)
+ locals: MapChain<'a, &'a str, LocalMeta>,
+ // Suffix whitespace from the previous literal. Will be flushed to the
+ // output buffer unless suppressed by whitespace suppression on the next
+ // non-literal.
+ next_ws: Option<&'a str>,
+ // Whitespace suppression from the previous non-literal. Will be used to
+ // determine whether to flush prefix whitespace from the next literal.
+ skip_ws: bool,
+ // If currently in a block, this will contain the name of a potential parent block
+ super_block: Option<(&'a str, usize)>,
+ // buffer for writable
+ buf_writable: Vec<Writable<'a>>,
+ // Counter for write! hash named arguments
+ named: usize,
+}
+
+impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
+ fn new<'n>(
+ input: &'n TemplateInput<'_>,
+ contexts: &'n HashMap<&'n Path, Context<'n>, S>,
+ heritage: Option<&'n Heritage<'_>>,
+ integrations: Integrations,
+ locals: MapChain<'n, &'n str, LocalMeta>,
+ ) -> Generator<'n, S> {
+ Generator {
+ input,
+ contexts,
+ heritage,
+ integrations,
+ locals,
+ next_ws: None,
+ skip_ws: false,
+ super_block: None,
+ buf_writable: vec![],
+ named: 0,
+ }
+ }
+
+ fn child(&mut self) -> Generator<'_, S> {
+ let locals = MapChain::with_parent(&self.locals);
+ Self::new(
+ self.input,
+ self.contexts,
+ self.heritage,
+ self.integrations,
+ locals,
+ )
+ }
+
+ // Takes a Context and generates the relevant implementations.
+ fn build(mut self, ctx: &'a Context<'_>) -> Result<String, CompileError> {
+ let mut buf = Buffer::new(0);
+ if !ctx.blocks.is_empty() {
+ if let Some(parent) = self.input.parent {
+ self.deref_to_parent(&mut buf, parent)?;
+ }
+ };
+
+ self.impl_template(ctx, &mut buf)?;
+ self.impl_display(&mut buf)?;
+
+ if self.integrations.actix {
+ self.impl_actix_web_responder(&mut buf)?;
+ }
+ if self.integrations.axum {
+ self.impl_axum_into_response(&mut buf)?;
+ }
+ if self.integrations.gotham {
+ self.impl_gotham_into_response(&mut buf)?;
+ }
+ if self.integrations.mendes {
+ self.impl_mendes_responder(&mut buf)?;
+ }
+ if self.integrations.rocket {
+ self.impl_rocket_responder(&mut buf)?;
+ }
+ if self.integrations.tide {
+ self.impl_tide_integrations(&mut buf)?;
+ }
+ if self.integrations.warp {
+ self.impl_warp_reply(&mut buf)?;
+ }
+ Ok(buf.buf)
+ }
+
+ // Implement `Template` for the given context struct.
+ fn impl_template(
+ &mut self,
+ ctx: &'a Context<'_>,
+ buf: &mut Buffer,
+ ) -> Result<(), CompileError> {
+ self.write_header(buf, "::askama::Template", None)?;
+ buf.writeln(
+ "fn render_into(&self, writer: &mut (impl ::std::fmt::Write + ?Sized)) -> \
+ ::askama::Result<()> {",
+ )?;
+
+ // Make sure the compiler understands that the generated code depends on the template files.
+ for path in self.contexts.keys() {
+ // Skip the fake path of templates defined in rust source.
+ let path_is_valid = match self.input.source {
+ Source::Path(_) => true,
+ Source::Source(_) => path != &self.input.path,
+ };
+ if path_is_valid {
+ let path = path.to_str().unwrap();
+ buf.writeln(
+ &quote! {
+ include_bytes!(#path);
+ }
+ .to_string(),
+ )?;
+ }
+ }
+
+ let size_hint = if let Some(heritage) = self.heritage {
+ self.handle(heritage.root, heritage.root.nodes, buf, AstLevel::Top)
+ } else {
+ self.handle(ctx, ctx.nodes, buf, AstLevel::Top)
+ }?;
+
+ self.flush_ws(Ws(false, false));
+ buf.writeln("::askama::Result::Ok(())")?;
+ buf.writeln("}")?;
+
+ buf.writeln("const EXTENSION: ::std::option::Option<&'static ::std::primitive::str> = ")?;
+ buf.writeln(&format!("{:?}", self.input.extension()))?;
+ buf.writeln(";")?;
+
+ buf.writeln("const SIZE_HINT: ::std::primitive::usize = ")?;
+ buf.writeln(&format!("{}", size_hint))?;
+ buf.writeln(";")?;
+
+ buf.writeln("const MIME_TYPE: &'static ::std::primitive::str = ")?;
+ buf.writeln(&format!("{:?}", &self.input.mime_type))?;
+ buf.writeln(";")?;
+
+ buf.writeln("}")?;
+ Ok(())
+ }
+
+ // Implement `Deref<Parent>` for an inheriting context struct.
+ fn deref_to_parent(
+ &mut self,
+ buf: &mut Buffer,
+ parent_type: &syn::Type,
+ ) -> Result<(), CompileError> {
+ self.write_header(buf, "::std::ops::Deref", None)?;
+ buf.writeln(&format!(
+ "type Target = {};",
+ parent_type.into_token_stream()
+ ))?;
+ buf.writeln("#[inline]")?;
+ buf.writeln("fn deref(&self) -> &Self::Target {")?;
+ buf.writeln("&self._parent")?;
+ buf.writeln("}")?;
+ buf.writeln("}")
+ }
+
+ // Implement `Display` for the given context struct.
+ fn impl_display(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ self.write_header(buf, "::std::fmt::Display", None)?;
+ buf.writeln("#[inline]")?;
+ buf.writeln("fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {")?;
+ buf.writeln("::askama::Template::render_into(self, f).map_err(|_| ::std::fmt::Error {})")?;
+ buf.writeln("}")?;
+ buf.writeln("}")
+ }
+
+ // Implement Actix-web's `Responder`.
+ fn impl_actix_web_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ self.write_header(buf, "::askama_actix::actix_web::Responder", None)?;
+ buf.writeln("type Body = ::askama_actix::actix_web::body::BoxBody;")?;
+ buf.writeln("#[inline]")?;
+ buf.writeln(
+ "fn respond_to(self, _req: &::askama_actix::actix_web::HttpRequest) \
+ -> ::askama_actix::actix_web::HttpResponse<Self::Body> {",
+ )?;
+ buf.writeln("<Self as ::askama_actix::TemplateToResponse>::to_response(&self)")?;
+ buf.writeln("}")?;
+ buf.writeln("}")
+ }
+
+ // Implement Axum's `IntoResponse`.
+ fn impl_axum_into_response(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ self.write_header(buf, "::askama_axum::IntoResponse", None)?;
+ buf.writeln("#[inline]")?;
+ buf.writeln(
+ "fn into_response(self)\
+ -> ::askama_axum::Response<::askama_axum::BoxBody> {",
+ )?;
+ let ext = self.input.extension().unwrap_or("txt");
+ buf.writeln(&format!("::askama_axum::into_response(&self, {:?})", ext))?;
+ buf.writeln("}")?;
+ buf.writeln("}")
+ }
+
+ // Implement gotham's `IntoResponse`.
+ fn impl_gotham_into_response(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ self.write_header(buf, "::askama_gotham::IntoResponse", None)?;
+ buf.writeln("#[inline]")?;
+ buf.writeln(
+ "fn into_response(self, _state: &::askama_gotham::State)\
+ -> ::askama_gotham::Response<::askama_gotham::Body> {",
+ )?;
+ let ext = self.input.extension().unwrap_or("txt");
+ buf.writeln(&format!("::askama_gotham::respond(&self, {:?})", ext))?;
+ buf.writeln("}")?;
+ buf.writeln("}")
+ }
+
+ // Implement mendes' `Responder`.
+ fn impl_mendes_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ let param = syn::parse_str("A: ::mendes::Application").unwrap();
+
+ let mut generics = self.input.ast.generics.clone();
+ generics.params.push(param);
+ let (_, orig_ty_generics, _) = self.input.ast.generics.split_for_impl();
+ let (impl_generics, _, where_clause) = generics.split_for_impl();
+
+ let mut where_clause = match where_clause {
+ Some(clause) => clause.clone(),
+ None => syn::WhereClause {
+ where_token: syn::Token![where](Span::call_site()),
+ predicates: syn::punctuated::Punctuated::new(),
+ },
+ };
+
+ where_clause
+ .predicates
+ .push(syn::parse_str("A::ResponseBody: From<String>").unwrap());
+ where_clause
+ .predicates
+ .push(syn::parse_str("A::Error: From<::askama_mendes::Error>").unwrap());
+
+ buf.writeln(
+ format!(
+ "{} {} for {} {} {{",
+ quote!(impl#impl_generics),
+ "::mendes::application::IntoResponse<A>",
+ self.input.ast.ident,
+ quote!(#orig_ty_generics #where_clause),
+ )
+ .as_ref(),
+ )?;
+
+ buf.writeln(
+ "fn into_response(self, app: &A, req: &::mendes::http::request::Parts) \
+ -> ::mendes::http::Response<A::ResponseBody> {",
+ )?;
+
+ buf.writeln(&format!(
+ "::askama_mendes::into_response(app, req, &self, {:?})",
+ self.input.extension()
+ ))?;
+ buf.writeln("}")?;
+ buf.writeln("}")?;
+ Ok(())
+ }
+
+ // Implement Rocket's `Responder`.
+ fn impl_rocket_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ let lifetime = syn::Lifetime::new("'askama", Span::call_site());
+ let param = syn::GenericParam::Lifetime(syn::LifetimeDef::new(lifetime));
+ self.write_header(
+ buf,
+ "::askama_rocket::Responder<'askama>",
+ Some(vec![param]),
+ )?;
+
+ buf.writeln("#[inline]")?;
+ buf.writeln(
+ "fn respond_to(self, _: &::askama_rocket::Request) \
+ -> ::askama_rocket::Result<'askama> {",
+ )?;
+ let ext = self.input.extension().unwrap_or("txt");
+ buf.writeln(&format!("::askama_rocket::respond(&self, {:?})", ext))?;
+
+ buf.writeln("}")?;
+ buf.writeln("}")?;
+ Ok(())
+ }
+
+ fn impl_tide_integrations(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ let ext = self.input.extension().unwrap_or("txt");
+
+ self.write_header(
+ buf,
+ "::std::convert::TryInto<::askama_tide::tide::Body>",
+ None,
+ )?;
+ buf.writeln(
+ "type Error = ::askama_tide::askama::Error;\n\
+ #[inline]\n\
+ fn try_into(self) -> ::askama_tide::askama::Result<::askama_tide::tide::Body> {",
+ )?;
+ buf.writeln(&format!("::askama_tide::try_into_body(&self, {:?})", &ext))?;
+ buf.writeln("}")?;
+ buf.writeln("}")?;
+
+ buf.writeln("#[allow(clippy::from_over_into)]")?;
+ self.write_header(buf, "Into<::askama_tide::tide::Response>", None)?;
+ buf.writeln("#[inline]")?;
+ buf.writeln("fn into(self) -> ::askama_tide::tide::Response {")?;
+ buf.writeln(&format!("::askama_tide::into_response(&self, {:?})", ext))?;
+ buf.writeln("}\n}")
+ }
+
+ fn impl_warp_reply(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
+ self.write_header(buf, "::askama_warp::warp::reply::Reply", None)?;
+ buf.writeln("#[inline]")?;
+ buf.writeln("fn into_response(self) -> ::askama_warp::warp::reply::Response {")?;
+ let ext = self.input.extension().unwrap_or("txt");
+ buf.writeln(&format!("::askama_warp::reply(&self, {:?})", ext))?;
+ buf.writeln("}")?;
+ buf.writeln("}")
+ }
+
+ // Writes header for the `impl` for `TraitFromPathName` or `Template`
+ // for the given context struct.
+ fn write_header(
+ &mut self,
+ buf: &mut Buffer,
+ target: &str,
+ params: Option<Vec<syn::GenericParam>>,
+ ) -> Result<(), CompileError> {
+ let mut generics = self.input.ast.generics.clone();
+ if let Some(params) = params {
+ for param in params {
+ generics.params.push(param);
+ }
+ }
+ let (_, orig_ty_generics, _) = self.input.ast.generics.split_for_impl();
+ let (impl_generics, _, where_clause) = generics.split_for_impl();
+ buf.writeln(
+ format!(
+ "{} {} for {}{} {{",
+ quote!(impl#impl_generics),
+ target,
+ self.input.ast.ident,
+ quote!(#orig_ty_generics #where_clause),
+ )
+ .as_ref(),
+ )
+ }
+
+ /* Helper methods for handling node types */
+
+ fn handle(
+ &mut self,
+ ctx: &'a Context<'_>,
+ nodes: &'a [Node<'_>],
+ buf: &mut Buffer,
+ level: AstLevel,
+ ) -> Result<usize, CompileError> {
+ let mut size_hint = 0;
+ for n in nodes {
+ match *n {
+ Node::Lit(lws, val, rws) => {
+ self.visit_lit(lws, val, rws);
+ }
+ Node::Comment(ws) => {
+ self.write_comment(ws);
+ }
+ Node::Expr(ws, ref val) => {
+ self.write_expr(ws, val);
+ }
+ Node::LetDecl(ws, ref var) => {
+ self.write_let_decl(buf, ws, var)?;
+ }
+ Node::Let(ws, ref var, ref val) => {
+ self.write_let(buf, ws, var, val)?;
+ }
+ Node::Cond(ref conds, ws) => {
+ self.write_cond(ctx, buf, conds, ws)?;
+ }
+ Node::Match(ws1, ref expr, ref arms, ws2) => {
+ self.write_match(ctx, buf, ws1, expr, arms, ws2)?;
+ }
+ Node::Loop(ref loop_block) => {
+ self.write_loop(ctx, buf, loop_block)?;
+ }
+ Node::BlockDef(ws1, name, _, ws2) => {
+ self.write_block(buf, Some(name), Ws(ws1.0, ws2.1))?;
+ }
+ Node::Include(ws, path) => {
+ size_hint += self.handle_include(ctx, buf, ws, path)?;
+ }
+ Node::Call(ws, scope, name, ref args) => {
+ size_hint += self.write_call(ctx, buf, ws, scope, name, args)?;
+ }
+ Node::Macro(_, ref m) => {
+ if level != AstLevel::Top {
+ return Err("macro blocks only allowed at the top level".into());
+ }
+ self.flush_ws(m.ws1);
+ self.prepare_ws(m.ws2);
+ }
+ Node::Raw(ws1, lws, val, rws, ws2) => {
+ self.handle_ws(ws1);
+ self.visit_lit(lws, val, rws);
+ self.handle_ws(ws2);
+ }
+ Node::Import(ws, _, _) => {
+ if level != AstLevel::Top {
+ return Err("import blocks only allowed at the top level".into());
+ }
+ self.handle_ws(ws);
+ }
+ Node::Extends(_) => {
+ if level != AstLevel::Top {
+ return Err("extend blocks only allowed at the top level".into());
+ }
+ // No whitespace handling: child template top-level is not used,
+ // except for the blocks defined in it.
+ }
+ Node::Break(ws) => {
+ self.handle_ws(ws);
+ self.write_buf_writable(buf)?;
+ buf.writeln("break;")?;
+ }
+ Node::Continue(ws) => {
+ self.handle_ws(ws);
+ self.write_buf_writable(buf)?;
+ buf.writeln("continue;")?;
+ }
+ }
+ }
+
+ if AstLevel::Top == level {
+ size_hint += self.write_buf_writable(buf)?;
+ }
+ Ok(size_hint)
+ }
+
+ fn write_cond(
+ &mut self,
+ ctx: &'a Context<'_>,
+ buf: &mut Buffer,
+ conds: &'a [Cond<'_>],
+ ws: Ws,
+ ) -> Result<usize, CompileError> {
+ let mut flushed = 0;
+ let mut arm_sizes = Vec::new();
+ let mut has_else = false;
+ for (i, &(cws, ref cond, ref nodes)) in conds.iter().enumerate() {
+ self.handle_ws(cws);
+ flushed += self.write_buf_writable(buf)?;
+ if i > 0 {
+ self.locals.pop();
+ }
+
+ self.locals.push();
+ let mut arm_size = 0;
+ if let Some(CondTest { target, expr }) = cond {
+ if i == 0 {
+ buf.write("if ");
+ } else {
+ buf.dedent()?;
+ buf.write("} else if ");
+ }
+
+ if let Some(target) = target {
+ let mut expr_buf = Buffer::new(0);
+ self.visit_expr(&mut expr_buf, expr)?;
+ buf.write("let ");
+ self.visit_target(buf, true, true, target);
+ buf.write(" = &(");
+ buf.write(&expr_buf.buf);
+ buf.write(")");
+ } else {
+ // The following syntax `*(&(...) as &bool)` is used to
+ // trigger Rust's automatic dereferencing, to coerce
+ // e.g. `&&&&&bool` to `bool`. First `&(...) as &bool`
+ // coerces e.g. `&&&bool` to `&bool`. Then `*(&bool)`
+ // finally dereferences it to `bool`.
+ buf.write("*(&(");
+ let expr_code = self.visit_expr_root(expr)?;
+ buf.write(&expr_code);
+ buf.write(") as &bool)");
+ }
+ } else {
+ buf.dedent()?;
+ buf.write("} else");
+ has_else = true;
+ }
+
+ buf.writeln(" {")?;
+
+ arm_size += self.handle(ctx, nodes, buf, AstLevel::Nested)?;
+ arm_sizes.push(arm_size);
+ }
+ self.handle_ws(ws);
+ flushed += self.write_buf_writable(buf)?;
+ buf.writeln("}")?;
+
+ self.locals.pop();
+
+ if !has_else {
+ arm_sizes.push(0);
+ }
+ Ok(flushed + median(&mut arm_sizes))
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn write_match(
+ &mut self,
+ ctx: &'a Context<'_>,
+ buf: &mut Buffer,
+ ws1: Ws,
+ expr: &Expr<'_>,
+ arms: &'a [When<'_>],
+ ws2: Ws,
+ ) -> Result<usize, CompileError> {
+ self.flush_ws(ws1);
+ let flushed = self.write_buf_writable(buf)?;
+ let mut arm_sizes = Vec::new();
+
+ let expr_code = self.visit_expr_root(expr)?;
+ buf.writeln(&format!("match &{} {{", expr_code))?;
+
+ let mut arm_size = 0;
+ for (i, arm) in arms.iter().enumerate() {
+ let &(ws, ref target, ref body) = arm;
+ self.handle_ws(ws);
+
+ if i > 0 {
+ arm_sizes.push(arm_size + self.write_buf_writable(buf)?);
+
+ buf.writeln("}")?;
+ self.locals.pop();
+ }
+
+ self.locals.push();
+ self.visit_target(buf, true, true, target);
+ buf.writeln(" => {")?;
+
+ arm_size = self.handle(ctx, body, buf, AstLevel::Nested)?;
+ }
+
+ self.handle_ws(ws2);
+ arm_sizes.push(arm_size + self.write_buf_writable(buf)?);
+ buf.writeln("}")?;
+ self.locals.pop();
+
+ buf.writeln("}")?;
+
+ Ok(flushed + median(&mut arm_sizes))
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn write_loop(
+ &mut self,
+ ctx: &'a Context<'_>,
+ buf: &mut Buffer,
+ loop_block: &'a Loop<'_>,
+ ) -> Result<usize, CompileError> {
+ self.handle_ws(loop_block.ws1);
+ self.locals.push();
+
+ let expr_code = self.visit_expr_root(&loop_block.iter)?;
+
+ let flushed = self.write_buf_writable(buf)?;
+ buf.writeln("{")?;
+ buf.writeln("let mut _did_loop = false;")?;
+ match loop_block.iter {
+ Expr::Range(_, _, _) => buf.writeln(&format!("let _iter = {};", expr_code)),
+ Expr::Array(..) => buf.writeln(&format!("let _iter = {}.iter();", expr_code)),
+ // If `iter` is a call then we assume it's something that returns
+ // an iterator. If not then the user can explicitly add the needed
+ // call without issues.
+ Expr::Call(..) | Expr::Index(..) => {
+ buf.writeln(&format!("let _iter = ({}).into_iter();", expr_code))
+ }
+ // If accessing `self` then it most likely needs to be
+ // borrowed, to prevent an attempt of moving.
+ _ if expr_code.starts_with("self.") => {
+ buf.writeln(&format!("let _iter = (&{}).into_iter();", expr_code))
+ }
+ // If accessing a field then it most likely needs to be
+ // borrowed, to prevent an attempt of moving.
+ Expr::Attr(..) => buf.writeln(&format!("let _iter = (&{}).into_iter();", expr_code)),
+ // Otherwise, we borrow `iter` assuming that it implements `IntoIterator`.
+ _ => buf.writeln(&format!("let _iter = ({}).into_iter();", expr_code)),
+ }?;
+ if let Some(cond) = &loop_block.cond {
+ self.locals.push();
+ buf.write("let _iter = _iter.filter(|");
+ self.visit_target(buf, true, true, &loop_block.var);
+ buf.write("| -> bool {");
+ self.visit_expr(buf, cond)?;
+ buf.writeln("});")?;
+ self.locals.pop();
+ }
+
+ self.locals.push();
+ buf.write("for (");
+ self.visit_target(buf, true, true, &loop_block.var);
+ buf.writeln(", _loop_item) in ::askama::helpers::TemplateLoop::new(_iter) {")?;
+
+ buf.writeln("_did_loop = true;")?;
+ let mut size_hint1 = self.handle(ctx, &loop_block.body, buf, AstLevel::Nested)?;
+ self.handle_ws(loop_block.ws2);
+ size_hint1 += self.write_buf_writable(buf)?;
+ self.locals.pop();
+ buf.writeln("}")?;
+
+ buf.writeln("if !_did_loop {")?;
+ self.locals.push();
+ let mut size_hint2 = self.handle(ctx, &loop_block.else_block, buf, AstLevel::Nested)?;
+ self.handle_ws(loop_block.ws3);
+ size_hint2 += self.write_buf_writable(buf)?;
+ self.locals.pop();
+ buf.writeln("}")?;
+
+ buf.writeln("}")?;
+
+ Ok(flushed + ((size_hint1 * 3) + size_hint2) / 2)
+ }
+
+ fn write_call(
+ &mut self,
+ ctx: &'a Context<'_>,
+ buf: &mut Buffer,
+ ws: Ws,
+ scope: Option<&str>,
+ name: &str,
+ args: &[Expr<'_>],
+ ) -> Result<usize, CompileError> {
+ if name == "super" {
+ return self.write_block(buf, None, ws);
+ }
+
+ let (def, own_ctx) = match scope {
+ Some(s) => {
+ let path = ctx.imports.get(s).ok_or_else(|| {
+ CompileError::from(format!("no import found for scope {:?}", s))
+ })?;
+ let mctx = self.contexts.get(path.as_path()).ok_or_else(|| {
+ CompileError::from(format!("context for {:?} not found", path))
+ })?;
+ let def = mctx.macros.get(name).ok_or_else(|| {
+ CompileError::from(format!("macro {:?} not found in scope {:?}", name, s))
+ })?;
+ (def, mctx)
+ }
+ None => {
+ let def = ctx
+ .macros
+ .get(name)
+ .ok_or_else(|| CompileError::from(format!("macro {:?} not found", name)))?;
+ (def, ctx)
+ }
+ };
+
+ self.flush_ws(ws); // Cannot handle_ws() here: whitespace from macro definition comes first
+ self.locals.push();
+ self.write_buf_writable(buf)?;
+ buf.writeln("{")?;
+ self.prepare_ws(def.ws1);
+
+ let mut names = Buffer::new(0);
+ let mut values = Buffer::new(0);
+ let mut is_first_variable = true;
+ for (i, arg) in def.args.iter().enumerate() {
+ let expr = args.get(i).ok_or_else(|| {
+ CompileError::from(format!("macro {:?} takes more than {} arguments", name, i))
+ })?;
+
+ match expr {
+ // If `expr` is already a form of variable then
+ // don't reintroduce a new variable. This is
+ // to avoid moving non-copyable values.
+ Expr::Var(name) => {
+ let var = self.locals.resolve_or_self(name);
+ self.locals.insert(arg, LocalMeta::with_ref(var));
+ }
+ Expr::Attr(obj, attr) => {
+ let mut attr_buf = Buffer::new(0);
+ self.visit_attr(&mut attr_buf, obj, attr)?;
+
+ let var = self.locals.resolve(&attr_buf.buf).unwrap_or(attr_buf.buf);
+ self.locals.insert(arg, LocalMeta::with_ref(var));
+ }
+ // Everything else still needs to become variables,
+ // to avoid having the same logic be executed
+ // multiple times, e.g. in the case of macro
+ // parameters being used multiple times.
+ _ => {
+ if is_first_variable {
+ is_first_variable = false
+ } else {
+ names.write(", ");
+ values.write(", ");
+ }
+ names.write(arg);
+
+ values.write("(");
+ values.write(&self.visit_expr_root(expr)?);
+ values.write(")");
+ self.locals.insert_with_default(arg);
+ }
+ }
+ }
+
+ debug_assert_eq!(names.buf.is_empty(), values.buf.is_empty());
+ if !names.buf.is_empty() {
+ buf.writeln(&format!("let ({}) = ({});", names.buf, values.buf))?;
+ }
+
+ let mut size_hint = self.handle(own_ctx, &def.nodes, buf, AstLevel::Nested)?;
+
+ self.flush_ws(def.ws2);
+ size_hint += self.write_buf_writable(buf)?;
+ buf.writeln("}")?;
+ self.locals.pop();
+ self.prepare_ws(ws);
+ Ok(size_hint)
+ }
+
+ fn handle_include(
+ &mut self,
+ ctx: &'a Context<'_>,
+ buf: &mut Buffer,
+ ws: Ws,
+ path: &str,
+ ) -> Result<usize, CompileError> {
+ self.flush_ws(ws);
+ self.write_buf_writable(buf)?;
+ let path = self
+ .input
+ .config
+ .find_template(path, Some(&self.input.path))?;
+ let src = get_template_source(&path)?;
+ let nodes = parse(&src, self.input.syntax)?;
+
+ // Make sure the compiler understands that the generated code depends on the template file.
+ {
+ let path = path.to_str().unwrap();
+ buf.writeln(
+ &quote! {
+ include_bytes!(#path);
+ }
+ .to_string(),
+ )?;
+ }
+
+ let size_hint = {
+ // Since nodes must not outlive the Generator, we instantiate
+ // a nested Generator here to handle the include's nodes.
+ let mut gen = self.child();
+ let mut size_hint = gen.handle(ctx, &nodes, buf, AstLevel::Nested)?;
+ size_hint += gen.write_buf_writable(buf)?;
+ size_hint
+ };
+ self.prepare_ws(ws);
+ Ok(size_hint)
+ }
+
+ fn write_let_decl(
+ &mut self,
+ buf: &mut Buffer,
+ ws: Ws,
+ var: &'a Target<'_>,
+ ) -> Result<(), CompileError> {
+ self.handle_ws(ws);
+ self.write_buf_writable(buf)?;
+ buf.write("let ");
+ self.visit_target(buf, false, true, var);
+ buf.writeln(";")
+ }
+
+ fn is_shadowing_variable(&self, var: &Target<'a>) -> Result<bool, CompileError> {
+ match var {
+ Target::Name(name) => {
+ let name = normalize_identifier(name);
+ match self.locals.get(&name) {
+ // declares a new variable
+ None => Ok(false),
+ // an initialized variable gets shadowed
+ Some(meta) if meta.initialized => Ok(true),
+ // initializes a variable that was introduced in a LetDecl before
+ _ => Ok(false),
+ }
+ }
+ Target::Tuple(_, targets) => {
+ for target in targets {
+ match self.is_shadowing_variable(target) {
+ Ok(false) => continue,
+ outcome => return outcome,
+ }
+ }
+ Ok(false)
+ }
+ Target::Struct(_, named_targets) => {
+ for (_, target) in named_targets {
+ match self.is_shadowing_variable(target) {
+ Ok(false) => continue,
+ outcome => return outcome,
+ }
+ }
+ Ok(false)
+ }
+ _ => Err("literals are not allowed on the left-hand side of an assignment".into()),
+ }
+ }
+
+ fn write_let(
+ &mut self,
+ buf: &mut Buffer,
+ ws: Ws,
+ var: &'a Target<'_>,
+ val: &Expr<'_>,
+ ) -> Result<(), CompileError> {
+ self.handle_ws(ws);
+ let mut expr_buf = Buffer::new(0);
+ self.visit_expr(&mut expr_buf, val)?;
+
+ let shadowed = self.is_shadowing_variable(var)?;
+ if shadowed {
+ // Need to flush the buffer if the variable is being shadowed,
+ // to ensure the old variable is used.
+ self.write_buf_writable(buf)?;
+ }
+ if shadowed
+ || !matches!(var, &Target::Name(_))
+ || matches!(var, Target::Name(name) if self.locals.get(name).is_none())
+ {
+ buf.write("let ");
+ }
+
+ self.visit_target(buf, true, true, var);
+ buf.writeln(&format!(" = {};", &expr_buf.buf))
+ }
+
+ // If `name` is `Some`, this is a call to a block definition, and we have to find
+ // the first block for that name from the ancestry chain. If name is `None`, this
+ // is from a `super()` call, and we can get the name from `self.super_block`.
+ fn write_block(
+ &mut self,
+ buf: &mut Buffer,
+ name: Option<&'a str>,
+ outer: Ws,
+ ) -> Result<usize, CompileError> {
+ // Flush preceding whitespace according to the outer WS spec
+ self.flush_ws(outer);
+
+ let prev_block = self.super_block;
+ let cur = match (name, prev_block) {
+ // The top-level context contains a block definition
+ (Some(cur_name), None) => (cur_name, 0),
+ // A block definition contains a block definition of the same name
+ (Some(cur_name), Some((prev_name, _))) if cur_name == prev_name => {
+ return Err(format!("cannot define recursive blocks ({})", cur_name).into());
+ }
+ // A block definition contains a definition of another block
+ (Some(cur_name), Some((_, _))) => (cur_name, 0),
+ // `super()` was called inside a block
+ (None, Some((prev_name, gen))) => (prev_name, gen + 1),
+ // `super()` is called from outside a block
+ (None, None) => return Err("cannot call 'super()' outside block".into()),
+ };
+ self.super_block = Some(cur);
+
+ // Get the block definition from the heritage chain
+ let heritage = self
+ .heritage
+ .as_ref()
+ .ok_or_else(|| CompileError::from("no block ancestors available"))?;
+ let (ctx, def) = heritage.blocks[cur.0].get(cur.1).ok_or_else(|| {
+ CompileError::from(match name {
+ None => format!("no super() block found for block '{}'", cur.0),
+ Some(name) => format!("no block found for name '{}'", name),
+ })
+ })?;
+
+ // Get the nodes and whitespace suppression data from the block definition
+ let (ws1, nodes, ws2) = if let Node::BlockDef(ws1, _, nodes, ws2) = def {
+ (ws1, nodes, ws2)
+ } else {
+ unreachable!()
+ };
+
+ // Handle inner whitespace suppression spec and process block nodes
+ self.prepare_ws(*ws1);
+ self.locals.push();
+ let size_hint = self.handle(ctx, nodes, buf, AstLevel::Block)?;
+
+ if !self.locals.is_current_empty() {
+ // Need to flush the buffer before popping the variable stack
+ self.write_buf_writable(buf)?;
+ }
+
+ self.locals.pop();
+ self.flush_ws(*ws2);
+
+ // Restore original block context and set whitespace suppression for
+ // succeeding whitespace according to the outer WS spec
+ self.super_block = prev_block;
+ self.prepare_ws(outer);
+ Ok(size_hint)
+ }
+
+ fn write_expr(&mut self, ws: Ws, s: &'a Expr<'a>) {
+ self.handle_ws(ws);
+ self.buf_writable.push(Writable::Expr(s));
+ }
+
+ // Write expression buffer and empty
+ fn write_buf_writable(&mut self, buf: &mut Buffer) -> Result<usize, CompileError> {
+ if self.buf_writable.is_empty() {
+ return Ok(0);
+ }
+
+ if self
+ .buf_writable
+ .iter()
+ .all(|w| matches!(w, Writable::Lit(_)))
+ {
+ let mut buf_lit = Buffer::new(0);
+ for s in mem::take(&mut self.buf_writable) {
+ if let Writable::Lit(s) = s {
+ buf_lit.write(s);
+ };
+ }
+ buf.writeln(&format!("writer.write_str({:#?})?;", &buf_lit.buf))?;
+ return Ok(buf_lit.buf.len());
+ }
+
+ let mut size_hint = 0;
+ let mut buf_format = Buffer::new(0);
+ let mut buf_expr = Buffer::new(buf.indent + 1);
+ let mut expr_cache = HashMap::with_capacity(self.buf_writable.len());
+ for s in mem::take(&mut self.buf_writable) {
+ match s {
+ Writable::Lit(s) => {
+ buf_format.write(&s.replace('{', "{{").replace('}', "}}"));
+ size_hint += s.len();
+ }
+ Writable::Expr(s) => {
+ use self::DisplayWrap::*;
+ let mut expr_buf = Buffer::new(0);
+ let wrapped = self.visit_expr(&mut expr_buf, s)?;
+ let expression = match wrapped {
+ Wrapped => expr_buf.buf,
+ Unwrapped => format!(
+ "::askama::MarkupDisplay::new_unsafe(&({}), {})",
+ expr_buf.buf, self.input.escaper
+ ),
+ };
+
+ use std::collections::hash_map::Entry;
+ let id = match expr_cache.entry(expression.clone()) {
+ Entry::Occupied(e) => *e.get(),
+ Entry::Vacant(e) => {
+ let id = self.named;
+ self.named += 1;
+
+ buf_expr.write(&format!("expr{} = ", id));
+ buf_expr.write("&");
+ buf_expr.write(&expression);
+ buf_expr.writeln(",")?;
+
+ e.insert(id);
+ id
+ }
+ };
+
+ buf_format.write(&format!("{{expr{}}}", id));
+ size_hint += 3;
+ }
+ }
+ }
+
+ buf.writeln("::std::write!(")?;
+ buf.indent();
+ buf.writeln("writer,")?;
+ buf.writeln(&format!("{:#?},", &buf_format.buf))?;
+ buf.writeln(buf_expr.buf.trim())?;
+ buf.dedent()?;
+ buf.writeln(")?;")?;
+ Ok(size_hint)
+ }
+
+ fn visit_lit(&mut self, lws: &'a str, val: &'a str, rws: &'a str) {
+ assert!(self.next_ws.is_none());
+ if !lws.is_empty() {
+ if self.skip_ws {
+ self.skip_ws = false;
+ } else if val.is_empty() {
+ assert!(rws.is_empty());
+ self.next_ws = Some(lws);
+ } else {
+ self.buf_writable.push(Writable::Lit(lws));
+ }
+ }
+
+ if !val.is_empty() {
+ self.buf_writable.push(Writable::Lit(val));
+ }
+
+ if !rws.is_empty() {
+ self.next_ws = Some(rws);
+ }
+ }
+
+ fn write_comment(&mut self, ws: Ws) {
+ self.handle_ws(ws);
+ }
+
+ /* Visitor methods for expression types */
+
+ fn visit_expr_root(&mut self, expr: &Expr<'_>) -> Result<String, CompileError> {
+ let mut buf = Buffer::new(0);
+ self.visit_expr(&mut buf, expr)?;
+ Ok(buf.buf)
+ }
+
+ fn visit_expr(
+ &mut self,
+ buf: &mut Buffer,
+ expr: &Expr<'_>,
+ ) -> Result<DisplayWrap, CompileError> {
+ Ok(match *expr {
+ Expr::BoolLit(s) => self.visit_bool_lit(buf, s),
+ Expr::NumLit(s) => self.visit_num_lit(buf, s),
+ Expr::StrLit(s) => self.visit_str_lit(buf, s),
+ Expr::CharLit(s) => self.visit_char_lit(buf, s),
+ Expr::Var(s) => self.visit_var(buf, s),
+ Expr::Path(ref path) => self.visit_path(buf, path),
+ Expr::Array(ref elements) => self.visit_array(buf, elements)?,
+ Expr::Attr(ref obj, name) => self.visit_attr(buf, obj, name)?,
+ Expr::Index(ref obj, ref key) => self.visit_index(buf, obj, key)?,
+ Expr::Filter(name, ref args) => self.visit_filter(buf, name, args)?,
+ Expr::Unary(op, ref inner) => self.visit_unary(buf, op, inner)?,
+ Expr::BinOp(op, ref left, ref right) => self.visit_binop(buf, op, left, right)?,
+ Expr::Range(op, ref left, ref right) => self.visit_range(buf, op, left, right)?,
+ Expr::Group(ref inner) => self.visit_group(buf, inner)?,
+ Expr::Call(ref obj, ref args) => self.visit_call(buf, obj, args)?,
+ Expr::RustMacro(name, args) => self.visit_rust_macro(buf, name, args),
+ Expr::Try(ref expr) => self.visit_try(buf, expr.as_ref())?,
+ Expr::Tuple(ref exprs) => self.visit_tuple(buf, exprs)?,
+ })
+ }
+
+ fn visit_try(
+ &mut self,
+ buf: &mut Buffer,
+ expr: &Expr<'_>,
+ ) -> Result<DisplayWrap, CompileError> {
+ buf.write("::core::result::Result::map_err(");
+ self.visit_expr(buf, expr)?;
+ buf.write(", |err| ::askama::shared::Error::Custom(::core::convert::Into::into(err)))?");
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_rust_macro(&mut self, buf: &mut Buffer, name: &str, args: &str) -> DisplayWrap {
+ buf.write(name);
+ buf.write("!(");
+ buf.write(args);
+ buf.write(")");
+
+ DisplayWrap::Unwrapped
+ }
+
+ #[cfg(not(feature = "markdown"))]
+ fn _visit_markdown_filter(
+ &mut self,
+ _buf: &mut Buffer,
+ _args: &[Expr<'_>],
+ ) -> Result<DisplayWrap, CompileError> {
+ Err("the `markdown` filter requires the `markdown` feature to be enabled".into())
+ }
+
+ #[cfg(feature = "markdown")]
+ fn _visit_markdown_filter(
+ &mut self,
+ buf: &mut Buffer,
+ args: &[Expr<'_>],
+ ) -> Result<DisplayWrap, CompileError> {
+ let (md, options) = match args {
+ [md] => (md, None),
+ [md, options] => (md, Some(options)),
+ _ => return Err("markdown filter expects no more than one option argument".into()),
+ };
+
+ buf.write(&format!(
+ "::askama::filters::markdown({}, ",
+ self.input.escaper
+ ));
+ self.visit_expr(buf, md)?;
+ match options {
+ Some(options) => {
+ buf.write(", ::core::option::Option::Some(");
+ self.visit_expr(buf, options)?;
+ buf.write(")");
+ }
+ None => buf.write(", ::core::option::Option::None"),
+ }
+ buf.write(")?");
+
+ Ok(DisplayWrap::Wrapped)
+ }
+
+ fn visit_filter(
+ &mut self,
+ buf: &mut Buffer,
+ mut name: &str,
+ args: &[Expr<'_>],
+ ) -> Result<DisplayWrap, CompileError> {
+ if matches!(name, "escape" | "e") {
+ self._visit_escape_filter(buf, args)?;
+ return Ok(DisplayWrap::Wrapped);
+ } else if name == "format" {
+ self._visit_format_filter(buf, args)?;
+ return Ok(DisplayWrap::Unwrapped);
+ } else if name == "fmt" {
+ self._visit_fmt_filter(buf, args)?;
+ return Ok(DisplayWrap::Unwrapped);
+ } else if name == "join" {
+ self._visit_join_filter(buf, args)?;
+ return Ok(DisplayWrap::Unwrapped);
+ } else if name == "markdown" {
+ return self._visit_markdown_filter(buf, args);
+ }
+
+ if name == "tojson" {
+ name = "json";
+ }
+
+ #[cfg(not(feature = "json"))]
+ if name == "json" {
+ return Err("the `json` filter requires the `serde-json` feature to be enabled".into());
+ }
+ #[cfg(not(feature = "yaml"))]
+ if name == "yaml" {
+ return Err("the `yaml` filter requires the `serde-yaml` feature to be enabled".into());
+ }
+
+ const FILTERS: [&str; 2] = ["safe", "yaml"];
+ if FILTERS.contains(&name) {
+ buf.write(&format!(
+ "::askama::filters::{}({}, ",
+ name, self.input.escaper
+ ));
+ } else if filters::BUILT_IN_FILTERS.contains(&name) {
+ buf.write(&format!("::askama::filters::{}(", name));
+ } else {
+ buf.write(&format!("filters::{}(", name));
+ }
+
+ self._visit_args(buf, args)?;
+ buf.write(")?");
+ Ok(match FILTERS.contains(&name) {
+ true => DisplayWrap::Wrapped,
+ false => DisplayWrap::Unwrapped,
+ })
+ }
+
+ fn _visit_escape_filter(
+ &mut self,
+ buf: &mut Buffer,
+ args: &[Expr<'_>],
+ ) -> Result<(), CompileError> {
+ if args.len() > 2 {
+ return Err("only two arguments allowed to escape filter".into());
+ }
+ let opt_escaper = match args.get(1) {
+ Some(Expr::StrLit(name)) => Some(*name),
+ Some(_) => return Err("invalid escaper type for escape filter".into()),
+ None => None,
+ };
+ let escaper = match opt_escaper {
+ Some(name) => self
+ .input
+ .config
+ .escapers
+ .iter()
+ .find_map(|(escapers, escaper)| escapers.contains(name).then(|| escaper))
+ .ok_or_else(|| CompileError::from("invalid escaper for escape filter"))?,
+ None => self.input.escaper,
+ };
+ buf.write("::askama::filters::escape(");
+ buf.write(escaper);
+ buf.write(", ");
+ self._visit_args(buf, &args[..1])?;
+ buf.write(")?");
+ Ok(())
+ }
+
+ fn _visit_format_filter(
+ &mut self,
+ buf: &mut Buffer,
+ args: &[Expr<'_>],
+ ) -> Result<(), CompileError> {
+ buf.write("format!(");
+ if let Some(Expr::StrLit(v)) = args.first() {
+ self.visit_str_lit(buf, v);
+ if args.len() > 1 {
+ buf.write(", ");
+ }
+ } else {
+ return Err("invalid expression type for format filter".into());
+ }
+ self._visit_args(buf, &args[1..])?;
+ buf.write(")");
+ Ok(())
+ }
+
+ fn _visit_fmt_filter(
+ &mut self,
+ buf: &mut Buffer,
+ args: &[Expr<'_>],
+ ) -> Result<(), CompileError> {
+ buf.write("format!(");
+ if let Some(Expr::StrLit(v)) = args.get(1) {
+ self.visit_str_lit(buf, v);
+ buf.write(", ");
+ } else {
+ return Err("invalid expression type for fmt filter".into());
+ }
+ self._visit_args(buf, &args[0..1])?;
+ if args.len() > 2 {
+ return Err("only two arguments allowed to fmt filter".into());
+ }
+ buf.write(")");
+ Ok(())
+ }
+
+ // Force type coercion on first argument to `join` filter (see #39).
+ fn _visit_join_filter(
+ &mut self,
+ buf: &mut Buffer,
+ args: &[Expr<'_>],
+ ) -> Result<(), CompileError> {
+ buf.write("::askama::filters::join((&");
+ for (i, arg) in args.iter().enumerate() {
+ if i > 0 {
+ buf.write(", &");
+ }
+ self.visit_expr(buf, arg)?;
+ if i == 0 {
+ buf.write(").into_iter()");
+ }
+ }
+ buf.write(")?");
+ Ok(())
+ }
+
+ fn _visit_args(&mut self, buf: &mut Buffer, args: &[Expr<'_>]) -> Result<(), CompileError> {
+ if args.is_empty() {
+ return Ok(());
+ }
+
+ for (i, arg) in args.iter().enumerate() {
+ if i > 0 {
+ buf.write(", ");
+ }
+
+ let borrow = !arg.is_copyable();
+ if borrow {
+ buf.write("&(");
+ }
+
+ match arg {
+ Expr::Call(left, _) if !matches!(left.as_ref(), Expr::Path(_)) => {
+ buf.writeln("{")?;
+ self.visit_expr(buf, arg)?;
+ buf.writeln("}")?;
+ }
+ _ => {
+ self.visit_expr(buf, arg)?;
+ }
+ }
+
+ if borrow {
+ buf.write(")");
+ }
+ }
+ Ok(())
+ }
+
+ fn visit_attr(
+ &mut self,
+ buf: &mut Buffer,
+ obj: &Expr<'_>,
+ attr: &str,
+ ) -> Result<DisplayWrap, CompileError> {
+ if let Expr::Var(name) = *obj {
+ if name == "loop" {
+ if attr == "index" {
+ buf.write("(_loop_item.index + 1)");
+ return Ok(DisplayWrap::Unwrapped);
+ } else if attr == "index0" {
+ buf.write("_loop_item.index");
+ return Ok(DisplayWrap::Unwrapped);
+ } else if attr == "first" {
+ buf.write("_loop_item.first");
+ return Ok(DisplayWrap::Unwrapped);
+ } else if attr == "last" {
+ buf.write("_loop_item.last");
+ return Ok(DisplayWrap::Unwrapped);
+ } else {
+ return Err("unknown loop variable".into());
+ }
+ }
+ }
+ self.visit_expr(buf, obj)?;
+ buf.write(&format!(".{}", normalize_identifier(attr)));
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_index(
+ &mut self,
+ buf: &mut Buffer,
+ obj: &Expr<'_>,
+ key: &Expr<'_>,
+ ) -> Result<DisplayWrap, CompileError> {
+ buf.write("&");
+ self.visit_expr(buf, obj)?;
+ buf.write("[");
+ self.visit_expr(buf, key)?;
+ buf.write("]");
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_call(
+ &mut self,
+ buf: &mut Buffer,
+ left: &Expr<'_>,
+ args: &[Expr<'_>],
+ ) -> Result<DisplayWrap, CompileError> {
+ match left {
+ Expr::Attr(left, method) if **left == Expr::Var("loop") => match *method {
+ "cycle" => match args {
+ [arg] => {
+ if matches!(arg, Expr::Array(arr) if arr.is_empty()) {
+ return Err("loop.cycle(…) cannot use an empty array".into());
+ }
+ buf.write("({");
+ buf.write("let _cycle = &(");
+ self.visit_expr(buf, arg)?;
+ buf.writeln(");")?;
+ buf.writeln("let _len = _cycle.len();")?;
+ buf.writeln("if _len == 0 {")?;
+ buf.writeln("return ::core::result::Result::Err(::askama::Error::Fmt(::core::fmt::Error));")?;
+ buf.writeln("}")?;
+ buf.writeln("_cycle[_loop_item.index % _len]")?;
+ buf.writeln("})")?;
+ }
+ _ => return Err("loop.cycle(…) expects exactly one argument".into()),
+ },
+ s => return Err(format!("unknown loop method: {:?}", s).into()),
+ },
+ left => {
+ match left {
+ Expr::Var(name) => match self.locals.resolve(name) {
+ Some(resolved) => buf.write(&resolved),
+ None => buf.write(&format!("(&self.{})", normalize_identifier(name))),
+ },
+ left => {
+ self.visit_expr(buf, left)?;
+ }
+ }
+
+ buf.write("(");
+ self._visit_args(buf, args)?;
+ buf.write(")");
+ }
+ }
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_unary(
+ &mut self,
+ buf: &mut Buffer,
+ op: &str,
+ inner: &Expr<'_>,
+ ) -> Result<DisplayWrap, CompileError> {
+ buf.write(op);
+ self.visit_expr(buf, inner)?;
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_range(
+ &mut self,
+ buf: &mut Buffer,
+ op: &str,
+ left: &Option<Box<Expr<'_>>>,
+ right: &Option<Box<Expr<'_>>>,
+ ) -> Result<DisplayWrap, CompileError> {
+ if let Some(left) = left {
+ self.visit_expr(buf, left)?;
+ }
+ buf.write(op);
+ if let Some(right) = right {
+ self.visit_expr(buf, right)?;
+ }
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_binop(
+ &mut self,
+ buf: &mut Buffer,
+ op: &str,
+ left: &Expr<'_>,
+ right: &Expr<'_>,
+ ) -> Result<DisplayWrap, CompileError> {
+ self.visit_expr(buf, left)?;
+ buf.write(&format!(" {} ", op));
+ self.visit_expr(buf, right)?;
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_group(
+ &mut self,
+ buf: &mut Buffer,
+ inner: &Expr<'_>,
+ ) -> Result<DisplayWrap, CompileError> {
+ buf.write("(");
+ self.visit_expr(buf, inner)?;
+ buf.write(")");
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_tuple(
+ &mut self,
+ buf: &mut Buffer,
+ exprs: &[Expr<'_>],
+ ) -> Result<DisplayWrap, CompileError> {
+ buf.write("(");
+ for (index, expr) in exprs.iter().enumerate() {
+ if index > 0 {
+ buf.write(" ");
+ }
+ self.visit_expr(buf, expr)?;
+ buf.write(",");
+ }
+ buf.write(")");
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_array(
+ &mut self,
+ buf: &mut Buffer,
+ elements: &[Expr<'_>],
+ ) -> Result<DisplayWrap, CompileError> {
+ buf.write("[");
+ for (i, el) in elements.iter().enumerate() {
+ if i > 0 {
+ buf.write(", ");
+ }
+ self.visit_expr(buf, el)?;
+ }
+ buf.write("]");
+ Ok(DisplayWrap::Unwrapped)
+ }
+
+ fn visit_path(&mut self, buf: &mut Buffer, path: &[&str]) -> DisplayWrap {
+ for (i, part) in path.iter().enumerate() {
+ if i > 0 {
+ buf.write("::");
+ }
+ buf.write(part);
+ }
+ DisplayWrap::Unwrapped
+ }
+
+ fn visit_var(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap {
+ if s == "self" {
+ buf.write(s);
+ return DisplayWrap::Unwrapped;
+ }
+
+ buf.write(normalize_identifier(&self.locals.resolve_or_self(s)));
+ DisplayWrap::Unwrapped
+ }
+
+ fn visit_bool_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap {
+ buf.write(s);
+ DisplayWrap::Unwrapped
+ }
+
+ fn visit_str_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap {
+ buf.write(&format!("\"{}\"", s));
+ DisplayWrap::Unwrapped
+ }
+
+ fn visit_char_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap {
+ buf.write(&format!("'{}'", s));
+ DisplayWrap::Unwrapped
+ }
+
+ fn visit_num_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap {
+ buf.write(s);
+ DisplayWrap::Unwrapped
+ }
+
+ fn visit_target(
+ &mut self,
+ buf: &mut Buffer,
+ initialized: bool,
+ first_level: bool,
+ target: &Target<'a>,
+ ) {
+ match target {
+ Target::Name("_") => {
+ buf.write("_");
+ }
+ Target::Name(name) => {
+ let name = normalize_identifier(name);
+ match initialized {
+ true => self.locals.insert(name, LocalMeta::initialized()),
+ false => self.locals.insert_with_default(name),
+ }
+ buf.write(name);
+ }
+ Target::Tuple(path, targets) => {
+ buf.write(&path.join("::"));
+ buf.write("(");
+ for target in targets {
+ self.visit_target(buf, initialized, false, target);
+ buf.write(",");
+ }
+ buf.write(")");
+ }
+ Target::Struct(path, targets) => {
+ buf.write(&path.join("::"));
+ buf.write(" { ");
+ for (name, target) in targets {
+ buf.write(normalize_identifier(name));
+ buf.write(": ");
+ self.visit_target(buf, initialized, false, target);
+ buf.write(",");
+ }
+ buf.write(" }");
+ }
+ Target::Path(path) => {
+ self.visit_path(buf, path);
+ }
+ Target::StrLit(s) => {
+ if first_level {
+ buf.write("&");
+ }
+ self.visit_str_lit(buf, s);
+ }
+ Target::NumLit(s) => {
+ if first_level {
+ buf.write("&");
+ }
+ self.visit_num_lit(buf, s);
+ }
+ Target::CharLit(s) => {
+ if first_level {
+ buf.write("&");
+ }
+ self.visit_char_lit(buf, s);
+ }
+ Target::BoolLit(s) => {
+ if first_level {
+ buf.write("&");
+ }
+ buf.write(s);
+ }
+ }
+ }
+
+ /* Helper methods for dealing with whitespace nodes */
+
+ // Combines `flush_ws()` and `prepare_ws()` to handle both trailing whitespace from the
+ // preceding literal and leading whitespace from the succeeding literal.
+ fn handle_ws(&mut self, ws: Ws) {
+ self.flush_ws(ws);
+ self.prepare_ws(ws);
+ }
+
+ // If the previous literal left some trailing whitespace in `next_ws` and the
+ // prefix whitespace suppressor from the given argument, flush that whitespace.
+ // In either case, `next_ws` is reset to `None` (no trailing whitespace).
+ fn flush_ws(&mut self, ws: Ws) {
+ if self.next_ws.is_some() && !ws.0 {
+ let val = self.next_ws.unwrap();
+ if !val.is_empty() {
+ self.buf_writable.push(Writable::Lit(val));
+ }
+ }
+ self.next_ws = None;
+ }
+
+ // Sets `skip_ws` to match the suffix whitespace suppressor from the given
+ // argument, to determine whether to suppress leading whitespace from the
+ // next literal.
+ fn prepare_ws(&mut self, ws: Ws) {
+ self.skip_ws = ws.1;
+ }
+}
+
+struct Buffer {
+ // The buffer to generate the code into
+ buf: String,
+ // The current level of indentation (in spaces)
+ indent: u8,
+ // Whether the output buffer is currently at the start of a line
+ start: bool,
+}
+
+impl Buffer {
+ fn new(indent: u8) -> Self {
+ Self {
+ buf: String::new(),
+ indent,
+ start: true,
+ }
+ }
+
+ fn writeln(&mut self, s: &str) -> Result<(), CompileError> {
+ if s == "}" {
+ self.dedent()?;
+ }
+ if !s.is_empty() {
+ self.write(s);
+ }
+ self.buf.push('\n');
+ if s.ends_with('{') {
+ self.indent();
+ }
+ self.start = true;
+ Ok(())
+ }
+
+ fn write(&mut self, s: &str) {
+ if self.start {
+ for _ in 0..(self.indent * 4) {
+ self.buf.push(' ');
+ }
+ self.start = false;
+ }
+ self.buf.push_str(s);
+ }
+
+ fn indent(&mut self) {
+ self.indent += 1;
+ }
+
+ fn dedent(&mut self) -> Result<(), CompileError> {
+ if self.indent == 0 {
+ return Err("dedent() called while indentation == 0".into());
+ }
+ self.indent -= 1;
+ Ok(())
+ }
+}
+
+#[derive(Clone, Default)]
+struct LocalMeta {
+ refs: Option<String>,
+ initialized: bool,
+}
+
+impl LocalMeta {
+ fn initialized() -> Self {
+ Self {
+ refs: None,
+ initialized: true,
+ }
+ }
+
+ fn with_ref(refs: String) -> Self {
+ Self {
+ refs: Some(refs),
+ initialized: true,
+ }
+ }
+}
+
+// type SetChain<'a, T> = MapChain<'a, T, ()>;
+
+#[derive(Debug)]
+struct MapChain<'a, K, V>
+where
+ K: cmp::Eq + hash::Hash,
+{
+ parent: Option<&'a MapChain<'a, K, V>>,
+ scopes: Vec<HashMap<K, V>>,
+}
+
+impl<'a, K: 'a, V: 'a> MapChain<'a, K, V>
+where
+ K: cmp::Eq + hash::Hash,
+{
+ fn new() -> MapChain<'a, K, V> {
+ MapChain {
+ parent: None,
+ scopes: vec![HashMap::new()],
+ }
+ }
+
+ fn with_parent<'p>(parent: &'p MapChain<'_, K, V>) -> MapChain<'p, K, V> {
+ MapChain {
+ parent: Some(parent),
+ scopes: vec![HashMap::new()],
+ }
+ }
+
+ /// Iterates the scopes in reverse and returns `Some(LocalMeta)`
+ /// from the first scope where `key` exists.
+ fn get(&self, key: &K) -> Option<&V> {
+ let scopes = self.scopes.iter().rev();
+ scopes
+ .filter_map(|set| set.get(key))
+ .next()
+ .or_else(|| self.parent.and_then(|set| set.get(key)))
+ }
+
+ fn is_current_empty(&self) -> bool {
+ self.scopes.last().unwrap().is_empty()
+ }
+
+ fn insert(&mut self, key: K, val: V) {
+ self.scopes.last_mut().unwrap().insert(key, val);
+
+ // Note that if `insert` returns `Some` then it implies
+ // an identifier is reused. For e.g. `{% macro f(a, a) %}`
+ // and `{% let (a, a) = ... %}` then this results in a
+ // generated template, which when compiled fails with the
+ // compile error "identifier `a` used more than once".
+ }
+
+ fn insert_with_default(&mut self, key: K)
+ where
+ V: Default,
+ {
+ self.insert(key, V::default());
+ }
+
+ fn push(&mut self) {
+ self.scopes.push(HashMap::new());
+ }
+
+ fn pop(&mut self) {
+ self.scopes.pop().unwrap();
+ assert!(!self.scopes.is_empty());
+ }
+}
+
+impl MapChain<'_, &str, LocalMeta> {
+ fn resolve(&self, name: &str) -> Option<String> {
+ let name = normalize_identifier(name);
+ self.get(&name).map(|meta| match &meta.refs {
+ Some(expr) => expr.clone(),
+ None => name.to_string(),
+ })
+ }
+
+ fn resolve_or_self(&self, name: &str) -> String {
+ let name = normalize_identifier(name);
+ self.resolve(name)
+ .unwrap_or_else(|| format!("self.{}", name))
+ }
+}
+
+fn median(sizes: &mut [usize]) -> usize {
+ sizes.sort_unstable();
+ if sizes.len() % 2 == 1 {
+ sizes[sizes.len() / 2]
+ } else {
+ (sizes[sizes.len() / 2 - 1] + sizes[sizes.len() / 2]) / 2
+ }
+}
+
+#[derive(Clone, PartialEq)]
+enum AstLevel {
+ Top,
+ Block,
+ Nested,
+}
+
+impl Copy for AstLevel {}
+
+#[derive(Clone)]
+enum DisplayWrap {
+ Wrapped,
+ Unwrapped,
+}
+
+impl Copy for DisplayWrap {}
+
+#[derive(Debug)]
+enum Writable<'a> {
+ Lit(&'a str),
+ Expr(&'a Expr<'a>),
+}
+
+// Identifiers to be replaced with raw identifiers, so as to avoid
+// collisions between template syntax and Rust's syntax. In particular
+// [Rust keywords](https://doc.rust-lang.org/reference/keywords.html)
+// should be replaced, since they're not reserved words in Askama
+// syntax but have a high probability of causing problems in the
+// generated code.
+//
+// This list excludes the Rust keywords *self*, *Self*, and *super*
+// because they are not allowed to be raw identifiers, and *loop*
+// because it's used something like a keyword in the template
+// language.
+static USE_RAW: [(&str, &str); 47] = [
+ ("as", "r#as"),
+ ("break", "r#break"),
+ ("const", "r#const"),
+ ("continue", "r#continue"),
+ ("crate", "r#crate"),
+ ("else", "r#else"),
+ ("enum", "r#enum"),
+ ("extern", "r#extern"),
+ ("false", "r#false"),
+ ("fn", "r#fn"),
+ ("for", "r#for"),
+ ("if", "r#if"),
+ ("impl", "r#impl"),
+ ("in", "r#in"),
+ ("let", "r#let"),
+ ("match", "r#match"),
+ ("mod", "r#mod"),
+ ("move", "r#move"),
+ ("mut", "r#mut"),
+ ("pub", "r#pub"),
+ ("ref", "r#ref"),
+ ("return", "r#return"),
+ ("static", "r#static"),
+ ("struct", "r#struct"),
+ ("trait", "r#trait"),
+ ("true", "r#true"),
+ ("type", "r#type"),
+ ("unsafe", "r#unsafe"),
+ ("use", "r#use"),
+ ("where", "r#where"),
+ ("while", "r#while"),
+ ("async", "r#async"),
+ ("await", "r#await"),
+ ("dyn", "r#dyn"),
+ ("abstract", "r#abstract"),
+ ("become", "r#become"),
+ ("box", "r#box"),
+ ("do", "r#do"),
+ ("final", "r#final"),
+ ("macro", "r#macro"),
+ ("override", "r#override"),
+ ("priv", "r#priv"),
+ ("typeof", "r#typeof"),
+ ("unsized", "r#unsized"),
+ ("virtual", "r#virtual"),
+ ("yield", "r#yield"),
+ ("try", "r#try"),
+];
+
+fn normalize_identifier(ident: &str) -> &str {
+ if let Some(word) = USE_RAW.iter().find(|x| x.0 == ident) {
+ word.1
+ } else {
+ ident
+ }
+}
diff --git a/third_party/rust/askama_shared/src/helpers/mod.rs b/third_party/rust/askama_shared/src/helpers/mod.rs
new file mode 100644
index 0000000000..79a1ada206
--- /dev/null
+++ b/third_party/rust/askama_shared/src/helpers/mod.rs
@@ -0,0 +1,48 @@
+use std::iter::{Enumerate, Peekable};
+
+pub struct TemplateLoop<I>
+where
+ I: Iterator,
+{
+ iter: Peekable<Enumerate<I>>,
+}
+
+impl<I> TemplateLoop<I>
+where
+ I: Iterator,
+{
+ #[inline]
+ pub fn new(iter: I) -> Self {
+ TemplateLoop {
+ iter: iter.enumerate().peekable(),
+ }
+ }
+}
+
+impl<I> Iterator for TemplateLoop<I>
+where
+ I: Iterator,
+{
+ type Item = (<I as Iterator>::Item, LoopItem);
+
+ #[inline]
+ fn next(&mut self) -> Option<(<I as Iterator>::Item, LoopItem)> {
+ self.iter.next().map(|(index, item)| {
+ (
+ item,
+ LoopItem {
+ index,
+ first: index == 0,
+ last: self.iter.peek().is_none(),
+ },
+ )
+ })
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct LoopItem {
+ pub index: usize,
+ pub first: bool,
+ pub last: bool,
+}
diff --git a/third_party/rust/askama_shared/src/heritage.rs b/third_party/rust/askama_shared/src/heritage.rs
new file mode 100644
index 0000000000..8dd97e2e69
--- /dev/null
+++ b/third_party/rust/askama_shared/src/heritage.rs
@@ -0,0 +1,125 @@
+use std::collections::HashMap;
+use std::path::{Path, PathBuf};
+
+use crate::parser::{Expr, Loop, Macro, Node};
+use crate::{CompileError, Config};
+
+pub struct Heritage<'a> {
+ pub root: &'a Context<'a>,
+ pub blocks: BlockAncestry<'a>,
+}
+
+impl Heritage<'_> {
+ pub fn new<'n, S: std::hash::BuildHasher>(
+ mut ctx: &'n Context<'n>,
+ contexts: &'n HashMap<&'n Path, Context<'n>, S>,
+ ) -> Heritage<'n> {
+ let mut blocks: BlockAncestry<'n> = ctx
+ .blocks
+ .iter()
+ .map(|(name, def)| (*name, vec![(ctx, *def)]))
+ .collect();
+
+ while let Some(ref path) = ctx.extends {
+ ctx = &contexts[path.as_path()];
+ for (name, def) in &ctx.blocks {
+ blocks.entry(name).or_insert_with(Vec::new).push((ctx, def));
+ }
+ }
+
+ Heritage { root: ctx, blocks }
+ }
+}
+
+type BlockAncestry<'a> = HashMap<&'a str, Vec<(&'a Context<'a>, &'a Node<'a>)>>;
+
+pub struct Context<'a> {
+ pub nodes: &'a [Node<'a>],
+ pub extends: Option<PathBuf>,
+ pub blocks: HashMap<&'a str, &'a Node<'a>>,
+ pub macros: HashMap<&'a str, &'a Macro<'a>>,
+ pub imports: HashMap<&'a str, PathBuf>,
+}
+
+impl Context<'_> {
+ pub fn new<'n>(
+ config: &Config<'_>,
+ path: &Path,
+ nodes: &'n [Node<'n>],
+ ) -> Result<Context<'n>, CompileError> {
+ let mut extends = None;
+ let mut blocks = Vec::new();
+ let mut macros = HashMap::new();
+ let mut imports = HashMap::new();
+ let mut nested = vec![nodes];
+ let mut top = true;
+
+ while let Some(nodes) = nested.pop() {
+ for n in nodes {
+ match n {
+ Node::Extends(Expr::StrLit(extends_path)) if top => match extends {
+ Some(_) => return Err("multiple extend blocks found".into()),
+ None => {
+ extends = Some(config.find_template(extends_path, Some(path))?);
+ }
+ },
+ Node::Macro(name, m) if top => {
+ macros.insert(*name, m);
+ }
+ Node::Import(_, import_path, scope) if top => {
+ let path = config.find_template(import_path, Some(path))?;
+ imports.insert(*scope, path);
+ }
+ Node::Extends(_) | Node::Macro(_, _) | Node::Import(_, _, _) if !top => {
+ return Err(
+ "extends, macro or import blocks not allowed below top level".into(),
+ );
+ }
+ def @ Node::BlockDef(_, _, _, _) => {
+ blocks.push(def);
+ if let Node::BlockDef(_, _, nodes, _) = def {
+ nested.push(nodes);
+ }
+ }
+ Node::Cond(branches, _) => {
+ for (_, _, nodes) in branches {
+ nested.push(nodes);
+ }
+ }
+ Node::Loop(Loop {
+ body, else_block, ..
+ }) => {
+ nested.push(body);
+ nested.push(else_block);
+ }
+ Node::Match(_, _, arms, _) => {
+ for (_, _, arm) in arms {
+ nested.push(arm);
+ }
+ }
+ _ => {}
+ }
+ }
+ top = false;
+ }
+
+ let blocks: HashMap<_, _> = blocks
+ .iter()
+ .map(|def| {
+ if let Node::BlockDef(_, name, _, _) = def {
+ (*name, *def)
+ } else {
+ unreachable!()
+ }
+ })
+ .collect();
+
+ Ok(Context {
+ nodes,
+ extends,
+ blocks,
+ macros,
+ imports,
+ })
+ }
+}
diff --git a/third_party/rust/askama_shared/src/input.rs b/third_party/rust/askama_shared/src/input.rs
new file mode 100644
index 0000000000..f7eac2336a
--- /dev/null
+++ b/third_party/rust/askama_shared/src/input.rs
@@ -0,0 +1,336 @@
+use crate::{CompileError, Config, Syntax};
+
+use std::path::{Path, PathBuf};
+use std::str::FromStr;
+
+use mime::Mime;
+use quote::ToTokens;
+
+pub struct TemplateInput<'a> {
+ pub ast: &'a syn::DeriveInput,
+ pub config: &'a Config<'a>,
+ pub syntax: &'a Syntax<'a>,
+ pub source: Source,
+ pub print: Print,
+ pub escaper: &'a str,
+ pub ext: Option<String>,
+ pub mime_type: String,
+ pub parent: Option<&'a syn::Type>,
+ pub path: PathBuf,
+}
+
+impl TemplateInput<'_> {
+ /// Extract the template metadata from the `DeriveInput` structure. This
+ /// mostly recovers the data for the `TemplateInput` fields from the
+ /// `template()` attribute list fields; it also finds the of the `_parent`
+ /// field, if any.
+ pub fn new<'n>(
+ ast: &'n syn::DeriveInput,
+ config: &'n Config<'_>,
+ ) -> Result<TemplateInput<'n>, CompileError> {
+ // Check that an attribute called `template()` exists once and that it is
+ // the proper type (list).
+ let mut template_args = None;
+ for attr in &ast.attrs {
+ let ident = match attr.path.get_ident() {
+ Some(ident) => ident,
+ None => continue,
+ };
+
+ if ident == "template" {
+ if template_args.is_some() {
+ return Err("duplicated 'template' attribute".into());
+ }
+
+ match attr.parse_meta() {
+ Ok(syn::Meta::List(syn::MetaList { nested, .. })) => {
+ template_args = Some(nested);
+ }
+ Ok(_) => return Err("'template' attribute must be a list".into()),
+ Err(e) => return Err(format!("unable to parse attribute: {}", e).into()),
+ }
+ }
+ }
+ let template_args =
+ template_args.ok_or_else(|| CompileError::from("no attribute 'template' found"))?;
+
+ // Loop over the meta attributes and find everything that we
+ // understand. Return a CompileError if something is not right.
+ // `source` contains an enum that can represent `path` or `source`.
+ let mut source = None;
+ let mut print = Print::None;
+ let mut escaping = None;
+ let mut ext = None;
+ let mut syntax = None;
+ for item in template_args {
+ let pair = match item {
+ syn::NestedMeta::Meta(syn::Meta::NameValue(ref pair)) => pair,
+ _ => {
+ return Err(format!(
+ "unsupported attribute argument {:?}",
+ item.to_token_stream()
+ )
+ .into())
+ }
+ };
+ let ident = match pair.path.get_ident() {
+ Some(ident) => ident,
+ None => unreachable!("not possible in syn::Meta::NameValue(…)"),
+ };
+
+ if ident == "path" {
+ if let syn::Lit::Str(ref s) = pair.lit {
+ if source.is_some() {
+ return Err("must specify 'source' or 'path', not both".into());
+ }
+ source = Some(Source::Path(s.value()));
+ } else {
+ return Err("template path must be string literal".into());
+ }
+ } else if ident == "source" {
+ if let syn::Lit::Str(ref s) = pair.lit {
+ if source.is_some() {
+ return Err("must specify 'source' or 'path', not both".into());
+ }
+ source = Some(Source::Source(s.value()));
+ } else {
+ return Err("template source must be string literal".into());
+ }
+ } else if ident == "print" {
+ if let syn::Lit::Str(ref s) = pair.lit {
+ print = s.value().parse()?;
+ } else {
+ return Err("print value must be string literal".into());
+ }
+ } else if ident == "escape" {
+ if let syn::Lit::Str(ref s) = pair.lit {
+ escaping = Some(s.value());
+ } else {
+ return Err("escape value must be string literal".into());
+ }
+ } else if ident == "ext" {
+ if let syn::Lit::Str(ref s) = pair.lit {
+ ext = Some(s.value());
+ } else {
+ return Err("ext value must be string literal".into());
+ }
+ } else if ident == "syntax" {
+ if let syn::Lit::Str(ref s) = pair.lit {
+ syntax = Some(s.value())
+ } else {
+ return Err("syntax value must be string literal".into());
+ }
+ } else {
+ return Err(format!("unsupported attribute key {:?} found", ident).into());
+ }
+ }
+
+ // Validate the `source` and `ext` value together, since they are
+ // related. In case `source` was used instead of `path`, the value
+ // of `ext` is merged into a synthetic `path` value here.
+ let source = source.expect("template path or source not found in attributes");
+ let path = match (&source, &ext) {
+ (&Source::Path(ref path), _) => config.find_template(path, None)?,
+ (&Source::Source(_), Some(ext)) => PathBuf::from(format!("{}.{}", ast.ident, ext)),
+ (&Source::Source(_), None) => {
+ return Err("must include 'ext' attribute when using 'source' attribute".into())
+ }
+ };
+
+ // Check to see if a `_parent` field was defined on the context
+ // struct, and store the type for it for use in the code generator.
+ let parent = match ast.data {
+ syn::Data::Struct(syn::DataStruct {
+ fields: syn::Fields::Named(ref fields),
+ ..
+ }) => fields
+ .named
+ .iter()
+ .find(|f| f.ident.as_ref().filter(|name| *name == "_parent").is_some())
+ .map(|f| &f.ty),
+ _ => None,
+ };
+
+ if parent.is_some() {
+ eprint!(
+ " --> in struct {}\n = use of deprecated field '_parent'\n",
+ ast.ident
+ );
+ }
+
+ // Validate syntax
+ let syntax = syntax.map_or_else(
+ || Ok(config.syntaxes.get(config.default_syntax).unwrap()),
+ |s| {
+ config
+ .syntaxes
+ .get(&s)
+ .ok_or_else(|| CompileError::from(format!("attribute syntax {} not exist", s)))
+ },
+ )?;
+
+ // Match extension against defined output formats
+
+ let escaping = escaping.unwrap_or_else(|| {
+ path.extension()
+ .map(|s| s.to_str().unwrap())
+ .unwrap_or("")
+ .to_string()
+ });
+
+ let mut escaper = None;
+ for (extensions, path) in &config.escapers {
+ if extensions.contains(&escaping) {
+ escaper = Some(path);
+ break;
+ }
+ }
+
+ let escaper = escaper.ok_or_else(|| {
+ CompileError::from(format!("no escaper defined for extension '{}'", escaping))
+ })?;
+
+ let mime_type =
+ extension_to_mime_type(ext_default_to_path(ext.as_deref(), &path).unwrap_or("txt"))
+ .to_string();
+
+ Ok(TemplateInput {
+ ast,
+ config,
+ syntax,
+ source,
+ print,
+ escaper,
+ ext,
+ mime_type,
+ parent,
+ path,
+ })
+ }
+
+ #[inline]
+ pub fn extension(&self) -> Option<&str> {
+ ext_default_to_path(self.ext.as_deref(), &self.path)
+ }
+}
+
+#[inline]
+pub fn ext_default_to_path<'a>(ext: Option<&'a str>, path: &'a Path) -> Option<&'a str> {
+ ext.or_else(|| extension(path))
+}
+
+fn extension(path: &Path) -> Option<&str> {
+ let ext = path.extension().map(|s| s.to_str().unwrap())?;
+
+ const JINJA_EXTENSIONS: [&str; 3] = ["j2", "jinja", "jinja2"];
+ if JINJA_EXTENSIONS.contains(&ext) {
+ Path::new(path.file_stem().unwrap())
+ .extension()
+ .map(|s| s.to_str().unwrap())
+ .or(Some(ext))
+ } else {
+ Some(ext)
+ }
+}
+
+pub enum Source {
+ Path(String),
+ Source(String),
+}
+
+#[derive(PartialEq)]
+pub enum Print {
+ All,
+ Ast,
+ Code,
+ None,
+}
+
+impl FromStr for Print {
+ type Err = CompileError;
+
+ fn from_str(s: &str) -> Result<Print, Self::Err> {
+ use self::Print::*;
+ Ok(match s {
+ "all" => All,
+ "ast" => Ast,
+ "code" => Code,
+ "none" => None,
+ v => return Err(format!("invalid value for print option: {}", v,).into()),
+ })
+ }
+}
+
+#[doc(hidden)]
+pub fn extension_to_mime_type(ext: &str) -> Mime {
+ let basic_type = mime_guess::from_ext(ext).first_or_octet_stream();
+ for (simple, utf_8) in &TEXT_TYPES {
+ if &basic_type == simple {
+ return utf_8.clone();
+ }
+ }
+ basic_type
+}
+
+const TEXT_TYPES: [(Mime, Mime); 6] = [
+ (mime::TEXT_PLAIN, mime::TEXT_PLAIN_UTF_8),
+ (mime::TEXT_HTML, mime::TEXT_HTML_UTF_8),
+ (mime::TEXT_CSS, mime::TEXT_CSS_UTF_8),
+ (mime::TEXT_CSV, mime::TEXT_CSV_UTF_8),
+ (
+ mime::TEXT_TAB_SEPARATED_VALUES,
+ mime::TEXT_TAB_SEPARATED_VALUES_UTF_8,
+ ),
+ (
+ mime::APPLICATION_JAVASCRIPT,
+ mime::APPLICATION_JAVASCRIPT_UTF_8,
+ ),
+];
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_ext() {
+ assert_eq!(extension(Path::new("foo-bar.txt")), Some("txt"));
+ assert_eq!(extension(Path::new("foo-bar.html")), Some("html"));
+ assert_eq!(extension(Path::new("foo-bar.unknown")), Some("unknown"));
+
+ assert_eq!(extension(Path::new("foo/bar/baz.txt")), Some("txt"));
+ assert_eq!(extension(Path::new("foo/bar/baz.html")), Some("html"));
+ assert_eq!(extension(Path::new("foo/bar/baz.unknown")), Some("unknown"));
+ }
+
+ #[test]
+ fn test_double_ext() {
+ assert_eq!(extension(Path::new("foo-bar.html.txt")), Some("txt"));
+ assert_eq!(extension(Path::new("foo-bar.txt.html")), Some("html"));
+ assert_eq!(extension(Path::new("foo-bar.txt.unknown")), Some("unknown"));
+
+ assert_eq!(extension(Path::new("foo/bar/baz.html.txt")), Some("txt"));
+ assert_eq!(extension(Path::new("foo/bar/baz.txt.html")), Some("html"));
+ assert_eq!(
+ extension(Path::new("foo/bar/baz.txt.unknown")),
+ Some("unknown")
+ );
+ }
+
+ #[test]
+ fn test_skip_jinja_ext() {
+ assert_eq!(extension(Path::new("foo-bar.html.j2")), Some("html"));
+ assert_eq!(extension(Path::new("foo-bar.html.jinja")), Some("html"));
+ assert_eq!(extension(Path::new("foo-bar.html.jinja2")), Some("html"));
+
+ assert_eq!(extension(Path::new("foo/bar/baz.txt.j2")), Some("txt"));
+ assert_eq!(extension(Path::new("foo/bar/baz.txt.jinja")), Some("txt"));
+ assert_eq!(extension(Path::new("foo/bar/baz.txt.jinja2")), Some("txt"));
+ }
+
+ #[test]
+ fn test_only_jinja_ext() {
+ assert_eq!(extension(Path::new("foo-bar.j2")), Some("j2"));
+ assert_eq!(extension(Path::new("foo-bar.jinja")), Some("jinja"));
+ assert_eq!(extension(Path::new("foo-bar.jinja2")), Some("jinja2"));
+ }
+}
diff --git a/third_party/rust/askama_shared/src/lib.rs b/third_party/rust/askama_shared/src/lib.rs
new file mode 100644
index 0000000000..994662c5f4
--- /dev/null
+++ b/third_party/rust/askama_shared/src/lib.rs
@@ -0,0 +1,538 @@
+#![cfg_attr(feature = "cargo-clippy", allow(unused_parens))]
+#![forbid(unsafe_code)]
+#![deny(elided_lifetimes_in_paths)]
+#![deny(unreachable_pub)]
+
+use std::borrow::Cow;
+use std::collections::{BTreeMap, HashSet};
+use std::convert::TryFrom;
+use std::path::{Path, PathBuf};
+use std::{env, fmt, fs};
+
+use proc_macro2::{Span, TokenStream};
+#[cfg(feature = "serde")]
+use serde::Deserialize;
+
+pub use crate::input::extension_to_mime_type;
+pub use askama_escape::MarkupDisplay;
+
+mod error;
+pub use crate::error::{Error, Result};
+pub mod filters;
+#[doc(hidden)]
+pub mod generator;
+pub mod helpers;
+#[doc(hidden)]
+pub mod heritage;
+#[doc(hidden)]
+pub mod input;
+#[doc(hidden)]
+pub mod parser;
+
+#[derive(Debug)]
+pub struct Config<'a> {
+ pub dirs: Vec<PathBuf>,
+ pub syntaxes: BTreeMap<String, Syntax<'a>>,
+ pub default_syntax: &'a str,
+ pub escapers: Vec<(HashSet<String>, String)>,
+}
+
+impl Config<'_> {
+ pub fn new(s: &str) -> std::result::Result<Config<'_>, CompileError> {
+ let root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
+ let default_dirs = vec![root.join("templates")];
+
+ let mut syntaxes = BTreeMap::new();
+ syntaxes.insert(DEFAULT_SYNTAX_NAME.to_string(), Syntax::default());
+
+ let raw = if s.is_empty() {
+ RawConfig::default()
+ } else {
+ RawConfig::from_toml_str(s)?
+ };
+
+ let (dirs, default_syntax) = match raw.general {
+ Some(General {
+ dirs,
+ default_syntax,
+ }) => (
+ dirs.map_or(default_dirs, |v| {
+ v.into_iter().map(|dir| root.join(dir)).collect()
+ }),
+ default_syntax.unwrap_or(DEFAULT_SYNTAX_NAME),
+ ),
+ None => (default_dirs, DEFAULT_SYNTAX_NAME),
+ };
+
+ if let Some(raw_syntaxes) = raw.syntax {
+ for raw_s in raw_syntaxes {
+ let name = raw_s.name;
+
+ if syntaxes
+ .insert(name.to_string(), Syntax::try_from(raw_s)?)
+ .is_some()
+ {
+ return Err(format!("syntax \"{}\" is already defined", name).into());
+ }
+ }
+ }
+
+ if !syntaxes.contains_key(default_syntax) {
+ return Err(format!("default syntax \"{}\" not found", default_syntax).into());
+ }
+
+ let mut escapers = Vec::new();
+ if let Some(configured) = raw.escaper {
+ for escaper in configured {
+ escapers.push((
+ escaper
+ .extensions
+ .iter()
+ .map(|ext| (*ext).to_string())
+ .collect(),
+ escaper.path.to_string(),
+ ));
+ }
+ }
+ for (extensions, path) in DEFAULT_ESCAPERS {
+ escapers.push((str_set(extensions), (*path).to_string()));
+ }
+
+ Ok(Config {
+ dirs,
+ syntaxes,
+ default_syntax,
+ escapers,
+ })
+ }
+
+ pub fn find_template(
+ &self,
+ path: &str,
+ start_at: Option<&Path>,
+ ) -> std::result::Result<PathBuf, CompileError> {
+ if let Some(root) = start_at {
+ let relative = root.with_file_name(path);
+ if relative.exists() {
+ return Ok(relative);
+ }
+ }
+
+ for dir in &self.dirs {
+ let rooted = dir.join(path);
+ if rooted.exists() {
+ return Ok(rooted);
+ }
+ }
+
+ Err(format!(
+ "template {:?} not found in directories {:?}",
+ path, self.dirs
+ )
+ .into())
+ }
+}
+
+#[derive(Debug)]
+pub struct Syntax<'a> {
+ pub block_start: &'a str,
+ pub block_end: &'a str,
+ pub expr_start: &'a str,
+ pub expr_end: &'a str,
+ pub comment_start: &'a str,
+ pub comment_end: &'a str,
+}
+
+impl Default for Syntax<'_> {
+ fn default() -> Self {
+ Self {
+ block_start: "{%",
+ block_end: "%}",
+ expr_start: "{{",
+ expr_end: "}}",
+ comment_start: "{#",
+ comment_end: "#}",
+ }
+ }
+}
+
+impl<'a> TryFrom<RawSyntax<'a>> for Syntax<'a> {
+ type Error = CompileError;
+
+ fn try_from(raw: RawSyntax<'a>) -> std::result::Result<Self, Self::Error> {
+ let default = Self::default();
+ let syntax = Self {
+ block_start: raw.block_start.unwrap_or(default.block_start),
+ block_end: raw.block_end.unwrap_or(default.block_end),
+ expr_start: raw.expr_start.unwrap_or(default.expr_start),
+ expr_end: raw.expr_end.unwrap_or(default.expr_end),
+ comment_start: raw.comment_start.unwrap_or(default.comment_start),
+ comment_end: raw.comment_end.unwrap_or(default.comment_end),
+ };
+
+ if syntax.block_start.len() != 2
+ || syntax.block_end.len() != 2
+ || syntax.expr_start.len() != 2
+ || syntax.expr_end.len() != 2
+ || syntax.comment_start.len() != 2
+ || syntax.comment_end.len() != 2
+ {
+ return Err("length of delimiters must be two".into());
+ }
+
+ let bs = syntax.block_start.as_bytes()[0];
+ let be = syntax.block_start.as_bytes()[1];
+ let cs = syntax.comment_start.as_bytes()[0];
+ let ce = syntax.comment_start.as_bytes()[1];
+ let es = syntax.block_start.as_bytes()[0];
+ let ee = syntax.block_start.as_bytes()[1];
+ if !((bs == cs && bs == es) || (be == ce && be == ee)) {
+ return Err(format!("bad delimiters block_start: {}, comment_start: {}, expr_start: {}, needs one of the two characters in common", syntax.block_start, syntax.comment_start, syntax.expr_start).into());
+ }
+
+ Ok(syntax)
+ }
+}
+
+#[cfg_attr(feature = "serde", derive(Deserialize))]
+#[derive(Default)]
+struct RawConfig<'d> {
+ #[cfg_attr(feature = "serde", serde(borrow))]
+ general: Option<General<'d>>,
+ syntax: Option<Vec<RawSyntax<'d>>>,
+ escaper: Option<Vec<RawEscaper<'d>>>,
+}
+
+impl RawConfig<'_> {
+ #[cfg(feature = "config")]
+ fn from_toml_str(s: &str) -> std::result::Result<RawConfig<'_>, CompileError> {
+ toml::from_str(s).map_err(|e| format!("invalid TOML in {}: {}", CONFIG_FILE_NAME, e).into())
+ }
+
+ #[cfg(not(feature = "config"))]
+ fn from_toml_str(_: &str) -> std::result::Result<RawConfig<'_>, CompileError> {
+ Err("TOML support not available".into())
+ }
+}
+
+#[cfg_attr(feature = "serde", derive(Deserialize))]
+struct General<'a> {
+ #[cfg_attr(feature = "serde", serde(borrow))]
+ dirs: Option<Vec<&'a str>>,
+ default_syntax: Option<&'a str>,
+}
+
+#[cfg_attr(feature = "serde", derive(Deserialize))]
+struct RawSyntax<'a> {
+ name: &'a str,
+ block_start: Option<&'a str>,
+ block_end: Option<&'a str>,
+ expr_start: Option<&'a str>,
+ expr_end: Option<&'a str>,
+ comment_start: Option<&'a str>,
+ comment_end: Option<&'a str>,
+}
+
+#[cfg_attr(feature = "serde", derive(Deserialize))]
+struct RawEscaper<'a> {
+ path: &'a str,
+ extensions: Vec<&'a str>,
+}
+
+pub fn read_config_file() -> std::result::Result<String, CompileError> {
+ let root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
+ let filename = root.join(CONFIG_FILE_NAME);
+ if filename.exists() {
+ fs::read_to_string(&filename)
+ .map_err(|_| format!("unable to read {:?}", filename.to_str().unwrap()).into())
+ } else {
+ Ok("".to_string())
+ }
+}
+
+fn str_set<T>(vals: &[T]) -> HashSet<String>
+where
+ T: ToString,
+{
+ vals.iter().map(|s| s.to_string()).collect()
+}
+
+#[allow(clippy::match_wild_err_arm)]
+pub fn get_template_source(tpl_path: &Path) -> std::result::Result<String, CompileError> {
+ match fs::read_to_string(tpl_path) {
+ Err(_) => Err(format!(
+ "unable to open template file '{}'",
+ tpl_path.to_str().unwrap()
+ )
+ .into()),
+ Ok(mut source) => {
+ if source.ends_with('\n') {
+ let _ = source.pop();
+ }
+ Ok(source)
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug)]
+pub struct Integrations {
+ pub actix: bool,
+ pub axum: bool,
+ pub gotham: bool,
+ pub mendes: bool,
+ pub rocket: bool,
+ pub tide: bool,
+ pub warp: bool,
+}
+
+static CONFIG_FILE_NAME: &str = "askama.toml";
+static DEFAULT_SYNTAX_NAME: &str = "default";
+static DEFAULT_ESCAPERS: &[(&[&str], &str)] = &[
+ (&["html", "htm", "xml"], "::askama::Html"),
+ (&["md", "none", "txt", "yml", ""], "::askama::Text"),
+ (&["j2", "jinja", "jinja2"], "::askama::Html"),
+];
+
+#[derive(Debug, Clone)]
+pub struct CompileError {
+ msg: Cow<'static, str>,
+ span: Span,
+}
+
+impl CompileError {
+ pub fn new<S: Into<Cow<'static, str>>>(s: S, span: Span) -> Self {
+ Self {
+ msg: s.into(),
+ span,
+ }
+ }
+
+ pub fn to_compile_error(self) -> TokenStream {
+ syn::Error::new(self.span, self.msg).to_compile_error()
+ }
+}
+
+impl std::error::Error for CompileError {}
+
+impl fmt::Display for CompileError {
+ #[inline]
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt.write_str(&self.msg)
+ }
+}
+
+impl From<&'static str> for CompileError {
+ #[inline]
+ fn from(s: &'static str) -> Self {
+ Self::new(s, Span::call_site())
+ }
+}
+
+impl From<String> for CompileError {
+ #[inline]
+ fn from(s: String) -> Self {
+ Self::new(s, Span::call_site())
+ }
+}
+
+#[cfg(test)]
+#[allow(clippy::blacklisted_name)]
+mod tests {
+ use super::*;
+ use std::env;
+ use std::path::{Path, PathBuf};
+
+ #[test]
+ fn get_source() {
+ let path = Config::new("")
+ .and_then(|config| config.find_template("b.html", None))
+ .unwrap();
+ assert_eq!(get_template_source(&path).unwrap(), "bar");
+ }
+
+ #[test]
+ fn test_default_config() {
+ let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
+ root.push("templates");
+ let config = Config::new("").unwrap();
+ assert_eq!(config.dirs, vec![root]);
+ }
+
+ #[cfg(feature = "config")]
+ #[test]
+ fn test_config_dirs() {
+ let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
+ root.push("tpl");
+ let config = Config::new("[general]\ndirs = [\"tpl\"]").unwrap();
+ assert_eq!(config.dirs, vec![root]);
+ }
+
+ fn assert_eq_rooted(actual: &Path, expected: &str) {
+ let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
+ root.push("templates");
+ let mut inner = PathBuf::new();
+ inner.push(expected);
+ assert_eq!(actual.strip_prefix(root).unwrap(), inner);
+ }
+
+ #[test]
+ fn find_absolute() {
+ let config = Config::new("").unwrap();
+ let root = config.find_template("a.html", None).unwrap();
+ let path = config.find_template("sub/b.html", Some(&root)).unwrap();
+ assert_eq_rooted(&path, "sub/b.html");
+ }
+
+ #[test]
+ #[should_panic]
+ fn find_relative_nonexistent() {
+ let config = Config::new("").unwrap();
+ let root = config.find_template("a.html", None).unwrap();
+ config.find_template("c.html", Some(&root)).unwrap();
+ }
+
+ #[test]
+ fn find_relative() {
+ let config = Config::new("").unwrap();
+ let root = config.find_template("sub/b.html", None).unwrap();
+ let path = config.find_template("c.html", Some(&root)).unwrap();
+ assert_eq_rooted(&path, "sub/c.html");
+ }
+
+ #[test]
+ fn find_relative_sub() {
+ let config = Config::new("").unwrap();
+ let root = config.find_template("sub/b.html", None).unwrap();
+ let path = config.find_template("sub1/d.html", Some(&root)).unwrap();
+ assert_eq_rooted(&path, "sub/sub1/d.html");
+ }
+
+ #[cfg(feature = "config")]
+ #[test]
+ fn add_syntax() {
+ let raw_config = r#"
+ [general]
+ default_syntax = "foo"
+
+ [[syntax]]
+ name = "foo"
+ block_start = "{<"
+
+ [[syntax]]
+ name = "bar"
+ expr_start = "{!"
+ "#;
+
+ let default_syntax = Syntax::default();
+ let config = Config::new(raw_config).unwrap();
+ assert_eq!(config.default_syntax, "foo");
+
+ let foo = config.syntaxes.get("foo").unwrap();
+ assert_eq!(foo.block_start, "{<");
+ assert_eq!(foo.block_end, default_syntax.block_end);
+ assert_eq!(foo.expr_start, default_syntax.expr_start);
+ assert_eq!(foo.expr_end, default_syntax.expr_end);
+ assert_eq!(foo.comment_start, default_syntax.comment_start);
+ assert_eq!(foo.comment_end, default_syntax.comment_end);
+
+ let bar = config.syntaxes.get("bar").unwrap();
+ assert_eq!(bar.block_start, default_syntax.block_start);
+ assert_eq!(bar.block_end, default_syntax.block_end);
+ assert_eq!(bar.expr_start, "{!");
+ assert_eq!(bar.expr_end, default_syntax.expr_end);
+ assert_eq!(bar.comment_start, default_syntax.comment_start);
+ assert_eq!(bar.comment_end, default_syntax.comment_end);
+ }
+
+ #[cfg(feature = "config")]
+ #[test]
+ fn add_syntax_two() {
+ let raw_config = r#"
+ syntax = [{ name = "foo", block_start = "{<" },
+ { name = "bar", expr_start = "{!" } ]
+
+ [general]
+ default_syntax = "foo"
+ "#;
+
+ let default_syntax = Syntax::default();
+ let config = Config::new(raw_config).unwrap();
+ assert_eq!(config.default_syntax, "foo");
+
+ let foo = config.syntaxes.get("foo").unwrap();
+ assert_eq!(foo.block_start, "{<");
+ assert_eq!(foo.block_end, default_syntax.block_end);
+ assert_eq!(foo.expr_start, default_syntax.expr_start);
+ assert_eq!(foo.expr_end, default_syntax.expr_end);
+ assert_eq!(foo.comment_start, default_syntax.comment_start);
+ assert_eq!(foo.comment_end, default_syntax.comment_end);
+
+ let bar = config.syntaxes.get("bar").unwrap();
+ assert_eq!(bar.block_start, default_syntax.block_start);
+ assert_eq!(bar.block_end, default_syntax.block_end);
+ assert_eq!(bar.expr_start, "{!");
+ assert_eq!(bar.expr_end, default_syntax.expr_end);
+ assert_eq!(bar.comment_start, default_syntax.comment_start);
+ assert_eq!(bar.comment_end, default_syntax.comment_end);
+ }
+
+ #[cfg(feature = "toml")]
+ #[should_panic]
+ #[test]
+ fn use_default_at_syntax_name() {
+ let raw_config = r#"
+ syntax = [{ name = "default" }]
+ "#;
+
+ let _config = Config::new(raw_config).unwrap();
+ }
+
+ #[cfg(feature = "toml")]
+ #[should_panic]
+ #[test]
+ fn duplicated_syntax_name_on_list() {
+ let raw_config = r#"
+ syntax = [{ name = "foo", block_start = "~<" },
+ { name = "foo", block_start = "%%" } ]
+ "#;
+
+ let _config = Config::new(raw_config).unwrap();
+ }
+
+ #[cfg(feature = "toml")]
+ #[should_panic]
+ #[test]
+ fn is_not_exist_default_syntax() {
+ let raw_config = r#"
+ [general]
+ default_syntax = "foo"
+ "#;
+
+ let _config = Config::new(raw_config).unwrap();
+ }
+
+ #[cfg(feature = "config")]
+ #[test]
+ fn escape_modes() {
+ let config = Config::new(
+ r#"
+ [[escaper]]
+ path = "::askama::Js"
+ extensions = ["js"]
+ "#,
+ )
+ .unwrap();
+ assert_eq!(
+ config.escapers,
+ vec![
+ (str_set(&["js"]), "::askama::Js".into()),
+ (str_set(&["html", "htm", "xml"]), "::askama::Html".into()),
+ (
+ str_set(&["md", "none", "txt", "yml", ""]),
+ "::askama::Text".into()
+ ),
+ (str_set(&["j2", "jinja", "jinja2"]), "::askama::Html".into()),
+ ]
+ );
+ }
+}
diff --git a/third_party/rust/askama_shared/src/parser.rs b/third_party/rust/askama_shared/src/parser.rs
new file mode 100644
index 0000000000..900e71ad7f
--- /dev/null
+++ b/third_party/rust/askama_shared/src/parser.rs
@@ -0,0 +1,1799 @@
+use std::cell::Cell;
+use std::str;
+
+use nom::branch::alt;
+use nom::bytes::complete::{escaped, is_not, tag, take_till, take_until};
+use nom::character::complete::{anychar, char, digit1};
+use nom::combinator::{complete, consumed, cut, eof, map, not, opt, peek, recognize, value};
+use nom::error::{Error, ErrorKind};
+use nom::multi::{fold_many0, many0, many1, separated_list0, separated_list1};
+use nom::sequence::{delimited, pair, preceded, terminated, tuple};
+use nom::{self, error_position, AsChar, IResult, InputTakeAtPosition};
+
+use crate::{CompileError, Syntax};
+
+#[derive(Debug, PartialEq)]
+pub enum Node<'a> {
+ Lit(&'a str, &'a str, &'a str),
+ Comment(Ws),
+ Expr(Ws, Expr<'a>),
+ Call(Ws, Option<&'a str>, &'a str, Vec<Expr<'a>>),
+ LetDecl(Ws, Target<'a>),
+ Let(Ws, Target<'a>, Expr<'a>),
+ Cond(Vec<Cond<'a>>, Ws),
+ Match(Ws, Expr<'a>, Vec<When<'a>>, Ws),
+ Loop(Loop<'a>),
+ Extends(Expr<'a>),
+ BlockDef(Ws, &'a str, Vec<Node<'a>>, Ws),
+ Include(Ws, &'a str),
+ Import(Ws, &'a str, &'a str),
+ Macro(&'a str, Macro<'a>),
+ Raw(Ws, &'a str, &'a str, &'a str, Ws),
+ Break(Ws),
+ Continue(Ws),
+}
+
+#[derive(Debug, PartialEq)]
+pub struct Loop<'a> {
+ pub ws1: Ws,
+ pub var: Target<'a>,
+ pub iter: Expr<'a>,
+ pub cond: Option<Expr<'a>>,
+ pub body: Vec<Node<'a>>,
+ pub ws2: Ws,
+ pub else_block: Vec<Node<'a>>,
+ pub ws3: Ws,
+}
+
+#[derive(Debug, PartialEq)]
+pub enum Expr<'a> {
+ BoolLit(&'a str),
+ NumLit(&'a str),
+ StrLit(&'a str),
+ CharLit(&'a str),
+ Var(&'a str),
+ Path(Vec<&'a str>),
+ Array(Vec<Expr<'a>>),
+ Attr(Box<Expr<'a>>, &'a str),
+ Index(Box<Expr<'a>>, Box<Expr<'a>>),
+ Filter(&'a str, Vec<Expr<'a>>),
+ Unary(&'a str, Box<Expr<'a>>),
+ BinOp(&'a str, Box<Expr<'a>>, Box<Expr<'a>>),
+ Range(&'a str, Option<Box<Expr<'a>>>, Option<Box<Expr<'a>>>),
+ Group(Box<Expr<'a>>),
+ Tuple(Vec<Expr<'a>>),
+ Call(Box<Expr<'a>>, Vec<Expr<'a>>),
+ RustMacro(&'a str, &'a str),
+ Try(Box<Expr<'a>>),
+}
+
+impl Expr<'_> {
+ /// Returns `true` if enough assumptions can be made,
+ /// to determine that `self` is copyable.
+ pub fn is_copyable(&self) -> bool {
+ self.is_copyable_within_op(false)
+ }
+
+ fn is_copyable_within_op(&self, within_op: bool) -> bool {
+ use Expr::*;
+ match self {
+ BoolLit(_) | NumLit(_) | StrLit(_) | CharLit(_) => true,
+ Unary(.., expr) => expr.is_copyable_within_op(true),
+ BinOp(_, lhs, rhs) => {
+ lhs.is_copyable_within_op(true) && rhs.is_copyable_within_op(true)
+ }
+ Range(..) => true,
+ // The result of a call likely doesn't need to be borrowed,
+ // as in that case the call is more likely to return a
+ // reference in the first place then.
+ Call(..) | Path(..) => true,
+ // If the `expr` is within a `Unary` or `BinOp` then
+ // an assumption can be made that the operand is copy.
+ // If not, then the value is moved and adding `.clone()`
+ // will solve that issue. However, if the operand is
+ // implicitly borrowed, then it's likely not even possible
+ // to get the template to compile.
+ _ => within_op && self.is_attr_self(),
+ }
+ }
+
+ /// Returns `true` if this is an `Attr` where the `obj` is `"self"`.
+ pub fn is_attr_self(&self) -> bool {
+ match self {
+ Expr::Attr(obj, _) if matches!(obj.as_ref(), Expr::Var("self")) => true,
+ Expr::Attr(obj, _) if matches!(obj.as_ref(), Expr::Attr(..)) => obj.is_attr_self(),
+ _ => false,
+ }
+ }
+}
+
+pub type When<'a> = (Ws, Target<'a>, Vec<Node<'a>>);
+
+#[derive(Debug, PartialEq)]
+pub struct Macro<'a> {
+ pub ws1: Ws,
+ pub args: Vec<&'a str>,
+ pub nodes: Vec<Node<'a>>,
+ pub ws2: Ws,
+}
+
+#[derive(Debug, PartialEq)]
+pub enum Target<'a> {
+ Name(&'a str),
+ Tuple(Vec<&'a str>, Vec<Target<'a>>),
+ Struct(Vec<&'a str>, Vec<(&'a str, Target<'a>)>),
+ NumLit(&'a str),
+ StrLit(&'a str),
+ CharLit(&'a str),
+ BoolLit(&'a str),
+ Path(Vec<&'a str>),
+}
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub struct Ws(pub bool, pub bool);
+
+pub type Cond<'a> = (Ws, Option<CondTest<'a>>, Vec<Node<'a>>);
+
+#[derive(Debug, PartialEq)]
+pub struct CondTest<'a> {
+ pub target: Option<Target<'a>>,
+ pub expr: Expr<'a>,
+}
+
+fn is_ws(c: char) -> bool {
+ matches!(c, ' ' | '\t' | '\r' | '\n')
+}
+
+fn not_ws(c: char) -> bool {
+ !is_ws(c)
+}
+
+fn ws<'a, O>(
+ inner: impl FnMut(&'a str) -> IResult<&'a str, O>,
+) -> impl FnMut(&'a str) -> IResult<&'a str, O> {
+ delimited(take_till(not_ws), inner, take_till(not_ws))
+}
+
+fn split_ws_parts(s: &str) -> Node<'_> {
+ let trimmed_start = s.trim_start_matches(is_ws);
+ let len_start = s.len() - trimmed_start.len();
+ let trimmed = trimmed_start.trim_end_matches(is_ws);
+ Node::Lit(&s[..len_start], trimmed, &trimmed_start[trimmed.len()..])
+}
+
+/// Skips input until `end` was found, but does not consume it.
+/// Returns tuple that would be returned when parsing `end`.
+fn skip_till<'a, O>(
+ end: impl FnMut(&'a str) -> IResult<&'a str, O>,
+) -> impl FnMut(&'a str) -> IResult<&'a str, (&'a str, O)> {
+ enum Next<O> {
+ IsEnd(O),
+ NotEnd(char),
+ }
+ let mut next = alt((map(end, Next::IsEnd), map(anychar, Next::NotEnd)));
+ move |start: &'a str| {
+ let mut i = start;
+ loop {
+ let (j, is_end) = next(i)?;
+ match is_end {
+ Next::IsEnd(lookahead) => return Ok((i, (j, lookahead))),
+ Next::NotEnd(_) => i = j,
+ }
+ }
+ }
+}
+
+struct State<'a> {
+ syntax: &'a Syntax<'a>,
+ loop_depth: Cell<usize>,
+}
+
+fn take_content<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let p_start = alt((
+ tag(s.syntax.block_start),
+ tag(s.syntax.comment_start),
+ tag(s.syntax.expr_start),
+ ));
+
+ let (i, _) = not(eof)(i)?;
+ let (i, content) = opt(recognize(skip_till(p_start)))(i)?;
+ let (i, content) = match content {
+ Some("") => {
+ // {block,comment,expr}_start follows immediately.
+ return Err(nom::Err::Error(error_position!(i, ErrorKind::TakeUntil)));
+ }
+ Some(content) => (i, content),
+ None => ("", i), // there is no {block,comment,expr}_start: take everything
+ };
+ Ok((i, split_ws_parts(content)))
+}
+
+fn identifier(input: &str) -> IResult<&str, &str> {
+ recognize(pair(identifier_start, opt(identifier_tail)))(input)
+}
+
+fn identifier_start(s: &str) -> IResult<&str, &str> {
+ s.split_at_position1_complete(
+ |c| !(c.is_alpha() || c == '_' || c >= '\u{0080}'),
+ nom::error::ErrorKind::Alpha,
+ )
+}
+
+fn identifier_tail(s: &str) -> IResult<&str, &str> {
+ s.split_at_position1_complete(
+ |c| !(c.is_alphanum() || c == '_' || c >= '\u{0080}'),
+ nom::error::ErrorKind::Alpha,
+ )
+}
+
+fn bool_lit(i: &str) -> IResult<&str, &str> {
+ alt((tag("false"), tag("true")))(i)
+}
+
+fn expr_bool_lit(i: &str) -> IResult<&str, Expr<'_>> {
+ map(bool_lit, Expr::BoolLit)(i)
+}
+
+fn variant_bool_lit(i: &str) -> IResult<&str, Target<'_>> {
+ map(bool_lit, Target::BoolLit)(i)
+}
+
+fn num_lit(i: &str) -> IResult<&str, &str> {
+ recognize(pair(digit1, opt(pair(char('.'), digit1))))(i)
+}
+
+fn expr_num_lit(i: &str) -> IResult<&str, Expr<'_>> {
+ map(num_lit, Expr::NumLit)(i)
+}
+
+fn expr_array_lit(i: &str) -> IResult<&str, Expr<'_>> {
+ delimited(
+ ws(char('[')),
+ map(separated_list1(ws(char(',')), expr_any), Expr::Array),
+ ws(char(']')),
+ )(i)
+}
+
+fn variant_num_lit(i: &str) -> IResult<&str, Target<'_>> {
+ map(num_lit, Target::NumLit)(i)
+}
+
+fn str_lit(i: &str) -> IResult<&str, &str> {
+ let (i, s) = delimited(
+ char('"'),
+ opt(escaped(is_not("\\\""), '\\', anychar)),
+ char('"'),
+ )(i)?;
+ Ok((i, s.unwrap_or_default()))
+}
+
+fn expr_str_lit(i: &str) -> IResult<&str, Expr<'_>> {
+ map(str_lit, Expr::StrLit)(i)
+}
+
+fn variant_str_lit(i: &str) -> IResult<&str, Target<'_>> {
+ map(str_lit, Target::StrLit)(i)
+}
+
+fn char_lit(i: &str) -> IResult<&str, &str> {
+ let (i, s) = delimited(
+ char('\''),
+ opt(escaped(is_not("\\\'"), '\\', anychar)),
+ char('\''),
+ )(i)?;
+ Ok((i, s.unwrap_or_default()))
+}
+
+fn expr_char_lit(i: &str) -> IResult<&str, Expr<'_>> {
+ map(char_lit, Expr::CharLit)(i)
+}
+
+fn variant_char_lit(i: &str) -> IResult<&str, Target<'_>> {
+ map(char_lit, Target::CharLit)(i)
+}
+
+fn expr_var(i: &str) -> IResult<&str, Expr<'_>> {
+ map(identifier, Expr::Var)(i)
+}
+
+fn path(i: &str) -> IResult<&str, Vec<&str>> {
+ let root = opt(value("", ws(tag("::"))));
+ let tail = separated_list1(ws(tag("::")), identifier);
+
+ match tuple((root, identifier, ws(tag("::")), tail))(i) {
+ Ok((i, (root, start, _, rest))) => {
+ let mut path = Vec::new();
+ path.extend(root);
+ path.push(start);
+ path.extend(rest);
+ Ok((i, path))
+ }
+ Err(err) => {
+ if let Ok((i, name)) = identifier(i) {
+ // The returned identifier can be assumed to be path if:
+ // - Contains both a lowercase and uppercase character, i.e. a type name like `None`
+ // - Doesn't contain any lowercase characters, i.e. it's a constant
+ // In short, if it contains any uppercase characters it's a path.
+ if name.contains(char::is_uppercase) {
+ return Ok((i, vec![name]));
+ }
+ }
+
+ // If `identifier()` fails then just return the original error
+ Err(err)
+ }
+ }
+}
+
+fn expr_path(i: &str) -> IResult<&str, Expr<'_>> {
+ let (i, path) = path(i)?;
+ Ok((i, Expr::Path(path)))
+}
+
+fn named_target(i: &str) -> IResult<&str, (&str, Target<'_>)> {
+ let (i, (src, target)) = pair(identifier, opt(preceded(ws(char(':')), target)))(i)?;
+ Ok((i, (src, target.unwrap_or(Target::Name(src)))))
+}
+
+fn variant_lit(i: &str) -> IResult<&str, Target<'_>> {
+ alt((
+ variant_str_lit,
+ variant_char_lit,
+ variant_num_lit,
+ variant_bool_lit,
+ ))(i)
+}
+
+fn target(i: &str) -> IResult<&str, Target<'_>> {
+ let mut opt_opening_paren = map(opt(ws(char('('))), |o| o.is_some());
+ let mut opt_closing_paren = map(opt(ws(char(')'))), |o| o.is_some());
+ let mut opt_opening_brace = map(opt(ws(char('{'))), |o| o.is_some());
+
+ let (i, lit) = opt(variant_lit)(i)?;
+ if let Some(lit) = lit {
+ return Ok((i, lit));
+ }
+
+ // match tuples and unused parentheses
+ let (i, target_is_tuple) = opt_opening_paren(i)?;
+ if target_is_tuple {
+ let (i, is_empty_tuple) = opt_closing_paren(i)?;
+ if is_empty_tuple {
+ return Ok((i, Target::Tuple(Vec::new(), Vec::new())));
+ }
+
+ let (i, first_target) = target(i)?;
+ let (i, is_unused_paren) = opt_closing_paren(i)?;
+ if is_unused_paren {
+ return Ok((i, first_target));
+ }
+
+ let mut targets = vec![first_target];
+ let (i, _) = cut(tuple((
+ fold_many0(
+ preceded(ws(char(',')), target),
+ || (),
+ |_, target| {
+ targets.push(target);
+ },
+ ),
+ opt(ws(char(','))),
+ ws(cut(char(')'))),
+ )))(i)?;
+ return Ok((i, Target::Tuple(Vec::new(), targets)));
+ }
+
+ // match structs
+ let (i, path) = opt(path)(i)?;
+ if let Some(path) = path {
+ let i_before_matching_with = i;
+ let (i, _) = opt(ws(tag("with")))(i)?;
+
+ let (i, is_unnamed_struct) = opt_opening_paren(i)?;
+ if is_unnamed_struct {
+ let (i, targets) = alt((
+ map(char(')'), |_| Vec::new()),
+ terminated(
+ cut(separated_list1(ws(char(',')), target)),
+ pair(opt(ws(char(','))), ws(cut(char(')')))),
+ ),
+ ))(i)?;
+ return Ok((i, Target::Tuple(path, targets)));
+ }
+
+ let (i, is_named_struct) = opt_opening_brace(i)?;
+ if is_named_struct {
+ let (i, targets) = alt((
+ map(char('}'), |_| Vec::new()),
+ terminated(
+ cut(separated_list1(ws(char(',')), named_target)),
+ pair(opt(ws(char(','))), ws(cut(char('}')))),
+ ),
+ ))(i)?;
+ return Ok((i, Target::Struct(path, targets)));
+ }
+
+ return Ok((i_before_matching_with, Target::Path(path)));
+ }
+
+ // neither literal nor struct nor path
+ map(identifier, Target::Name)(i)
+}
+
+fn arguments(i: &str) -> IResult<&str, Vec<Expr<'_>>> {
+ delimited(
+ ws(char('(')),
+ separated_list0(char(','), ws(expr_any)),
+ ws(char(')')),
+ )(i)
+}
+
+fn macro_arguments(i: &str) -> IResult<&str, &str> {
+ delimited(char('('), recognize(nested_parenthesis), char(')'))(i)
+}
+
+fn nested_parenthesis(i: &str) -> IResult<&str, ()> {
+ let mut nested = 0;
+ let mut last = 0;
+ let mut in_str = false;
+ let mut escaped = false;
+
+ for (i, b) in i.chars().enumerate() {
+ if !(b == '(' || b == ')') || !in_str {
+ match b {
+ '(' => nested += 1,
+ ')' => {
+ if nested == 0 {
+ last = i;
+ break;
+ }
+ nested -= 1;
+ }
+ '"' => {
+ if in_str {
+ if !escaped {
+ in_str = false;
+ }
+ } else {
+ in_str = true;
+ }
+ }
+ '\\' => {
+ escaped = !escaped;
+ }
+ _ => (),
+ }
+ }
+
+ if escaped && b != '\\' {
+ escaped = false;
+ }
+ }
+
+ if nested == 0 {
+ Ok((&i[last..], ()))
+ } else {
+ Err(nom::Err::Error(error_position!(
+ i,
+ ErrorKind::SeparatedNonEmptyList
+ )))
+ }
+}
+
+fn parameters(i: &str) -> IResult<&str, Vec<&str>> {
+ delimited(
+ ws(char('(')),
+ separated_list0(char(','), ws(identifier)),
+ ws(char(')')),
+ )(i)
+}
+
+fn expr_group(i: &str) -> IResult<&str, Expr<'_>> {
+ let (i, expr) = preceded(ws(char('(')), opt(expr_any))(i)?;
+ let expr = match expr {
+ Some(expr) => expr,
+ None => {
+ let (i, _) = char(')')(i)?;
+ return Ok((i, Expr::Tuple(vec![])));
+ }
+ };
+
+ let (i, comma) = ws(opt(peek(char(','))))(i)?;
+ if comma.is_none() {
+ let (i, _) = char(')')(i)?;
+ return Ok((i, Expr::Group(Box::new(expr))));
+ }
+
+ let mut exprs = vec![expr];
+ let (i, _) = fold_many0(
+ preceded(char(','), ws(expr_any)),
+ || (),
+ |_, expr| {
+ exprs.push(expr);
+ },
+ )(i)?;
+ let (i, _) = pair(ws(opt(char(','))), char(')'))(i)?;
+ Ok((i, Expr::Tuple(exprs)))
+}
+
+fn expr_single(i: &str) -> IResult<&str, Expr<'_>> {
+ alt((
+ expr_bool_lit,
+ expr_num_lit,
+ expr_str_lit,
+ expr_char_lit,
+ expr_path,
+ expr_rust_macro,
+ expr_array_lit,
+ expr_var,
+ expr_group,
+ ))(i)
+}
+
+enum Suffix<'a> {
+ Attr(&'a str),
+ Index(Expr<'a>),
+ Call(Vec<Expr<'a>>),
+ Try,
+}
+
+fn expr_attr(i: &str) -> IResult<&str, Suffix<'_>> {
+ map(
+ preceded(
+ ws(pair(char('.'), not(char('.')))),
+ cut(alt((num_lit, identifier))),
+ ),
+ Suffix::Attr,
+ )(i)
+}
+
+fn expr_index(i: &str) -> IResult<&str, Suffix<'_>> {
+ map(
+ preceded(ws(char('[')), cut(terminated(expr_any, ws(char(']'))))),
+ Suffix::Index,
+ )(i)
+}
+
+fn expr_call(i: &str) -> IResult<&str, Suffix<'_>> {
+ map(arguments, Suffix::Call)(i)
+}
+
+fn expr_try(i: &str) -> IResult<&str, Suffix<'_>> {
+ map(preceded(take_till(not_ws), char('?')), |_| Suffix::Try)(i)
+}
+
+fn filter(i: &str) -> IResult<&str, (&str, Option<Vec<Expr<'_>>>)> {
+ let (i, (_, fname, args)) = tuple((char('|'), ws(identifier), opt(arguments)))(i)?;
+ Ok((i, (fname, args)))
+}
+
+fn expr_filtered(i: &str) -> IResult<&str, Expr<'_>> {
+ let (i, (obj, filters)) = tuple((expr_prefix, many0(filter)))(i)?;
+
+ let mut res = obj;
+ for (fname, args) in filters {
+ res = Expr::Filter(fname, {
+ let mut args = match args {
+ Some(inner) => inner,
+ None => Vec::new(),
+ };
+ args.insert(0, res);
+ args
+ });
+ }
+
+ Ok((i, res))
+}
+
+fn expr_prefix(i: &str) -> IResult<&str, Expr<'_>> {
+ let (i, (ops, mut expr)) = pair(many0(ws(alt((tag("!"), tag("-"))))), expr_suffix)(i)?;
+ for op in ops.iter().rev() {
+ expr = Expr::Unary(op, Box::new(expr));
+ }
+ Ok((i, expr))
+}
+
+fn expr_suffix(i: &str) -> IResult<&str, Expr<'_>> {
+ let (mut i, mut expr) = expr_single(i)?;
+ loop {
+ let (j, suffix) = opt(alt((expr_attr, expr_index, expr_call, expr_try)))(i)?;
+ i = j;
+ match suffix {
+ Some(Suffix::Attr(attr)) => expr = Expr::Attr(expr.into(), attr),
+ Some(Suffix::Index(index)) => expr = Expr::Index(expr.into(), index.into()),
+ Some(Suffix::Call(args)) => expr = Expr::Call(expr.into(), args),
+ Some(Suffix::Try) => expr = Expr::Try(expr.into()),
+ None => break,
+ }
+ }
+ Ok((i, expr))
+}
+
+fn expr_rust_macro(i: &str) -> IResult<&str, Expr<'_>> {
+ let (i, (mname, _, args)) = tuple((identifier, char('!'), macro_arguments))(i)?;
+ Ok((i, Expr::RustMacro(mname, args)))
+}
+
+macro_rules! expr_prec_layer {
+ ( $name:ident, $inner:ident, $op:expr ) => {
+ fn $name(i: &str) -> IResult<&str, Expr<'_>> {
+ let (i, left) = $inner(i)?;
+ let (i, right) = many0(pair(
+ ws(tag($op)),
+ $inner,
+ ))(i)?;
+ Ok((
+ i,
+ right.into_iter().fold(left, |left, (op, right)| {
+ Expr::BinOp(op, Box::new(left), Box::new(right))
+ }),
+ ))
+ }
+ };
+ ( $name:ident, $inner:ident, $( $op:expr ),+ ) => {
+ fn $name(i: &str) -> IResult<&str, Expr<'_>> {
+ let (i, left) = $inner(i)?;
+ let (i, right) = many0(pair(
+ ws(alt(($( tag($op) ),+,))),
+ $inner,
+ ))(i)?;
+ Ok((
+ i,
+ right.into_iter().fold(left, |left, (op, right)| {
+ Expr::BinOp(op, Box::new(left), Box::new(right))
+ }),
+ ))
+ }
+ }
+}
+
+expr_prec_layer!(expr_muldivmod, expr_filtered, "*", "/", "%");
+expr_prec_layer!(expr_addsub, expr_muldivmod, "+", "-");
+expr_prec_layer!(expr_shifts, expr_addsub, ">>", "<<");
+expr_prec_layer!(expr_band, expr_shifts, "&");
+expr_prec_layer!(expr_bxor, expr_band, "^");
+expr_prec_layer!(expr_bor, expr_bxor, "|");
+expr_prec_layer!(expr_compare, expr_bor, "==", "!=", ">=", ">", "<=", "<");
+expr_prec_layer!(expr_and, expr_compare, "&&");
+expr_prec_layer!(expr_or, expr_and, "||");
+
+fn expr_any(i: &str) -> IResult<&str, Expr<'_>> {
+ let range_right = |i| pair(ws(alt((tag("..="), tag("..")))), opt(expr_or))(i);
+ alt((
+ map(range_right, |(op, right)| {
+ Expr::Range(op, None, right.map(Box::new))
+ }),
+ map(
+ pair(expr_or, opt(range_right)),
+ |(left, right)| match right {
+ Some((op, right)) => Expr::Range(op, Some(Box::new(left)), right.map(Box::new)),
+ None => left,
+ },
+ ),
+ ))(i)
+}
+
+fn expr_node<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((
+ |i| tag_expr_start(i, s),
+ cut(tuple((opt(char('-')), ws(expr_any), opt(char('-')), |i| {
+ tag_expr_end(i, s)
+ }))),
+ ));
+ let (i, (_, (pws, expr, nws, _))) = p(i)?;
+ Ok((i, Node::Expr(Ws(pws.is_some(), nws.is_some()), expr)))
+}
+
+fn block_call(i: &str) -> IResult<&str, Node<'_>> {
+ let mut p = tuple((
+ opt(char('-')),
+ ws(tag("call")),
+ cut(tuple((
+ opt(tuple((ws(identifier), ws(tag("::"))))),
+ ws(identifier),
+ ws(arguments),
+ opt(char('-')),
+ ))),
+ ));
+ let (i, (pws, _, (scope, name, args, nws))) = p(i)?;
+ let scope = scope.map(|(scope, _)| scope);
+ Ok((
+ i,
+ Node::Call(Ws(pws.is_some(), nws.is_some()), scope, name, args),
+ ))
+}
+
+fn cond_if(i: &str) -> IResult<&str, CondTest<'_>> {
+ let mut p = preceded(
+ ws(tag("if")),
+ cut(tuple((
+ opt(delimited(
+ ws(alt((tag("let"), tag("set")))),
+ ws(target),
+ ws(char('=')),
+ )),
+ ws(expr_any),
+ ))),
+ );
+ let (i, (target, expr)) = p(i)?;
+ Ok((i, CondTest { target, expr }))
+}
+
+fn cond_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Cond<'a>> {
+ let mut p = tuple((
+ |i| tag_block_start(i, s),
+ opt(char('-')),
+ ws(tag("else")),
+ cut(tuple((
+ opt(cond_if),
+ opt(char('-')),
+ |i| tag_block_end(i, s),
+ cut(|i| parse_template(i, s)),
+ ))),
+ ));
+ let (i, (_, pws, _, (cond, nws, _, block))) = p(i)?;
+ Ok((i, (Ws(pws.is_some(), nws.is_some()), cond, block)))
+}
+
+fn block_if<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((
+ opt(char('-')),
+ cond_if,
+ cut(tuple((
+ opt(char('-')),
+ |i| tag_block_end(i, s),
+ cut(tuple((
+ |i| parse_template(i, s),
+ many0(|i| cond_block(i, s)),
+ cut(tuple((
+ |i| tag_block_start(i, s),
+ opt(char('-')),
+ ws(tag("endif")),
+ opt(char('-')),
+ ))),
+ ))),
+ ))),
+ ));
+ let (i, (pws1, cond, (nws1, _, (block, elifs, (_, pws2, _, nws2))))) = p(i)?;
+
+ let mut res = vec![(Ws(pws1.is_some(), nws1.is_some()), Some(cond), block)];
+ res.extend(elifs);
+ Ok((i, Node::Cond(res, Ws(pws2.is_some(), nws2.is_some()))))
+}
+
+fn match_else_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, When<'a>> {
+ let mut p = tuple((
+ |i| tag_block_start(i, s),
+ opt(char('-')),
+ ws(tag("else")),
+ cut(tuple((
+ opt(char('-')),
+ |i| tag_block_end(i, s),
+ cut(|i| parse_template(i, s)),
+ ))),
+ ));
+ let (i, (_, pws, _, (nws, _, block))) = p(i)?;
+ Ok((
+ i,
+ (Ws(pws.is_some(), nws.is_some()), Target::Name("_"), block),
+ ))
+}
+
+fn when_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, When<'a>> {
+ let mut p = tuple((
+ |i| tag_block_start(i, s),
+ opt(char('-')),
+ ws(tag("when")),
+ cut(tuple((
+ ws(target),
+ opt(char('-')),
+ |i| tag_block_end(i, s),
+ cut(|i| parse_template(i, s)),
+ ))),
+ ));
+ let (i, (_, pws, _, (target, nws, _, block))) = p(i)?;
+ Ok((i, (Ws(pws.is_some(), nws.is_some()), target, block)))
+}
+
+fn block_match<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((
+ opt(char('-')),
+ ws(tag("match")),
+ cut(tuple((
+ ws(expr_any),
+ opt(char('-')),
+ |i| tag_block_end(i, s),
+ cut(tuple((
+ ws(many0(ws(value((), |i| block_comment(i, s))))),
+ many1(|i| when_block(i, s)),
+ cut(tuple((
+ opt(|i| match_else_block(i, s)),
+ cut(tuple((
+ ws(|i| tag_block_start(i, s)),
+ opt(char('-')),
+ ws(tag("endmatch")),
+ opt(char('-')),
+ ))),
+ ))),
+ ))),
+ ))),
+ ));
+ let (i, (pws1, _, (expr, nws1, _, (_, arms, (else_arm, (_, pws2, _, nws2)))))) = p(i)?;
+
+ let mut arms = arms;
+ if let Some(arm) = else_arm {
+ arms.push(arm);
+ }
+
+ Ok((
+ i,
+ Node::Match(
+ Ws(pws1.is_some(), nws1.is_some()),
+ expr,
+ arms,
+ Ws(pws2.is_some(), nws2.is_some()),
+ ),
+ ))
+}
+
+fn block_let(i: &str) -> IResult<&str, Node<'_>> {
+ let mut p = tuple((
+ opt(char('-')),
+ ws(alt((tag("let"), tag("set")))),
+ cut(tuple((
+ ws(target),
+ opt(tuple((ws(char('=')), ws(expr_any)))),
+ opt(char('-')),
+ ))),
+ ));
+ let (i, (pws, _, (var, val, nws))) = p(i)?;
+
+ Ok((
+ i,
+ if let Some((_, val)) = val {
+ Node::Let(Ws(pws.is_some(), nws.is_some()), var, val)
+ } else {
+ Node::LetDecl(Ws(pws.is_some(), nws.is_some()), var)
+ },
+ ))
+}
+
+fn parse_loop_content<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec<Node<'a>>> {
+ s.loop_depth.set(s.loop_depth.get() + 1);
+ let result = parse_template(i, s);
+ s.loop_depth.set(s.loop_depth.get() - 1);
+ result
+}
+
+fn block_for<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let if_cond = preceded(ws(tag("if")), cut(ws(expr_any)));
+ let else_block = |i| {
+ let mut p = preceded(
+ ws(tag("else")),
+ cut(tuple((
+ opt(tag("-")),
+ delimited(
+ |i| tag_block_end(i, s),
+ |i| parse_template(i, s),
+ |i| tag_block_start(i, s),
+ ),
+ opt(tag("-")),
+ ))),
+ );
+ let (i, (pws, nodes, nws)) = p(i)?;
+ Ok((i, (pws.is_some(), nodes, nws.is_some())))
+ };
+ let mut p = tuple((
+ opt(char('-')),
+ ws(tag("for")),
+ cut(tuple((
+ ws(target),
+ ws(tag("in")),
+ cut(tuple((
+ ws(expr_any),
+ opt(if_cond),
+ opt(char('-')),
+ |i| tag_block_end(i, s),
+ cut(tuple((
+ |i| parse_loop_content(i, s),
+ cut(tuple((
+ |i| tag_block_start(i, s),
+ opt(char('-')),
+ opt(else_block),
+ ws(tag("endfor")),
+ opt(char('-')),
+ ))),
+ ))),
+ ))),
+ ))),
+ ));
+ let (i, (pws1, _, (var, _, (iter, cond, nws1, _, (body, (_, pws2, else_block, _, nws2)))))) =
+ p(i)?;
+ let (nws3, else_block, pws3) = else_block.unwrap_or_default();
+ Ok((
+ i,
+ Node::Loop(Loop {
+ ws1: Ws(pws1.is_some(), nws1.is_some()),
+ var,
+ iter,
+ cond,
+ body,
+ ws2: Ws(pws2.is_some(), nws3),
+ else_block,
+ ws3: Ws(pws3, nws2.is_some()),
+ }),
+ ))
+}
+
+fn block_extends(i: &str) -> IResult<&str, Node<'_>> {
+ let (i, (_, name)) = tuple((ws(tag("extends")), ws(expr_str_lit)))(i)?;
+ Ok((i, Node::Extends(name)))
+}
+
+fn block_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut start = tuple((
+ opt(char('-')),
+ ws(tag("block")),
+ cut(tuple((ws(identifier), opt(char('-')), |i| {
+ tag_block_end(i, s)
+ }))),
+ ));
+ let (i, (pws1, _, (name, nws1, _))) = start(i)?;
+
+ let mut end = cut(tuple((
+ |i| parse_template(i, s),
+ cut(tuple((
+ |i| tag_block_start(i, s),
+ opt(char('-')),
+ ws(tag("endblock")),
+ cut(tuple((opt(ws(tag(name))), opt(char('-'))))),
+ ))),
+ )));
+ let (i, (contents, (_, pws2, _, (_, nws2)))) = end(i)?;
+
+ Ok((
+ i,
+ Node::BlockDef(
+ Ws(pws1.is_some(), nws1.is_some()),
+ name,
+ contents,
+ Ws(pws2.is_some(), nws2.is_some()),
+ ),
+ ))
+}
+
+fn block_include(i: &str) -> IResult<&str, Node<'_>> {
+ let mut p = tuple((
+ opt(char('-')),
+ ws(tag("include")),
+ cut(pair(ws(str_lit), opt(char('-')))),
+ ));
+ let (i, (pws, _, (name, nws))) = p(i)?;
+ Ok((i, Node::Include(Ws(pws.is_some(), nws.is_some()), name)))
+}
+
+fn block_import(i: &str) -> IResult<&str, Node<'_>> {
+ let mut p = tuple((
+ opt(char('-')),
+ ws(tag("import")),
+ cut(tuple((
+ ws(str_lit),
+ ws(tag("as")),
+ cut(pair(ws(identifier), opt(char('-')))),
+ ))),
+ ));
+ let (i, (pws, _, (name, _, (scope, nws)))) = p(i)?;
+ Ok((
+ i,
+ Node::Import(Ws(pws.is_some(), nws.is_some()), name, scope),
+ ))
+}
+
+fn block_macro<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((
+ opt(char('-')),
+ ws(tag("macro")),
+ cut(tuple((
+ ws(identifier),
+ ws(parameters),
+ opt(char('-')),
+ |i| tag_block_end(i, s),
+ cut(tuple((
+ |i| parse_template(i, s),
+ cut(tuple((
+ |i| tag_block_start(i, s),
+ opt(char('-')),
+ ws(tag("endmacro")),
+ opt(char('-')),
+ ))),
+ ))),
+ ))),
+ ));
+
+ let (i, (pws1, _, (name, params, nws1, _, (contents, (_, pws2, _, nws2))))) = p(i)?;
+ assert_ne!(name, "super", "invalid macro name 'super'");
+
+ Ok((
+ i,
+ Node::Macro(
+ name,
+ Macro {
+ ws1: Ws(pws1.is_some(), nws1.is_some()),
+ args: params,
+ nodes: contents,
+ ws2: Ws(pws2.is_some(), nws2.is_some()),
+ },
+ ),
+ ))
+}
+
+fn block_raw<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let endraw = tuple((
+ |i| tag_block_start(i, s),
+ opt(char('-')),
+ ws(tag("endraw")),
+ opt(char('-')),
+ peek(|i| tag_block_end(i, s)),
+ ));
+
+ let mut p = tuple((
+ opt(char('-')),
+ ws(tag("raw")),
+ cut(tuple((
+ opt(char('-')),
+ |i| tag_block_end(i, s),
+ consumed(skip_till(endraw)),
+ ))),
+ ));
+
+ let (_, (pws1, _, (nws1, _, (contents, (i, (_, pws2, _, nws2, _)))))) = p(i)?;
+ let (lws, val, rws) = match split_ws_parts(contents) {
+ Node::Lit(lws, val, rws) => (lws, val, rws),
+ _ => unreachable!(),
+ };
+ let ws1 = Ws(pws1.is_some(), nws1.is_some());
+ let ws2 = Ws(pws2.is_some(), nws2.is_some());
+ Ok((i, Node::Raw(ws1, lws, val, rws, ws2)))
+}
+
+fn break_statement<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((opt(char('-')), ws(tag("break")), opt(char('-'))));
+ let (j, (pws, _, nws)) = p(i)?;
+ if s.loop_depth.get() == 0 {
+ return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag)));
+ }
+ Ok((j, Node::Break(Ws(pws.is_some(), nws.is_some()))))
+}
+
+fn continue_statement<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((opt(char('-')), ws(tag("continue")), opt(char('-'))));
+ let (j, (pws, _, nws)) = p(i)?;
+ if s.loop_depth.get() == 0 {
+ return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag)));
+ }
+ Ok((j, Node::Continue(Ws(pws.is_some(), nws.is_some()))))
+}
+
+fn block_node<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((
+ |i| tag_block_start(i, s),
+ alt((
+ block_call,
+ block_let,
+ |i| block_if(i, s),
+ |i| block_for(i, s),
+ |i| block_match(i, s),
+ block_extends,
+ block_include,
+ block_import,
+ |i| block_block(i, s),
+ |i| block_macro(i, s),
+ |i| block_raw(i, s),
+ |i| break_statement(i, s),
+ |i| continue_statement(i, s),
+ )),
+ cut(|i| tag_block_end(i, s)),
+ ));
+ let (i, (_, contents, _)) = p(i)?;
+ Ok((i, contents))
+}
+
+fn block_comment_body<'a>(mut i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
+ let mut level = 0;
+ loop {
+ let (end, tail) = take_until(s.syntax.comment_end)(i)?;
+ match take_until::<_, _, Error<_>>(s.syntax.comment_start)(i) {
+ Ok((start, _)) if start.as_ptr() < end.as_ptr() => {
+ level += 1;
+ i = &start[2..];
+ }
+ _ if level > 0 => {
+ level -= 1;
+ i = &end[2..];
+ }
+ _ => return Ok((end, tail)),
+ }
+ }
+}
+
+fn block_comment<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
+ let mut p = tuple((
+ |i| tag_comment_start(i, s),
+ cut(tuple((
+ opt(char('-')),
+ |i| block_comment_body(i, s),
+ |i| tag_comment_end(i, s),
+ ))),
+ ));
+ let (i, (_, (pws, tail, _))) = p(i)?;
+ Ok((i, Node::Comment(Ws(pws.is_some(), tail.ends_with('-')))))
+}
+
+fn parse_template<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec<Node<'a>>> {
+ many0(alt((
+ complete(|i| take_content(i, s)),
+ complete(|i| block_comment(i, s)),
+ complete(|i| expr_node(i, s)),
+ complete(|i| block_node(i, s)),
+ )))(i)
+}
+
+fn tag_block_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
+ tag(s.syntax.block_start)(i)
+}
+fn tag_block_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
+ tag(s.syntax.block_end)(i)
+}
+fn tag_comment_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
+ tag(s.syntax.comment_start)(i)
+}
+fn tag_comment_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
+ tag(s.syntax.comment_end)(i)
+}
+fn tag_expr_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
+ tag(s.syntax.expr_start)(i)
+}
+fn tag_expr_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
+ tag(s.syntax.expr_end)(i)
+}
+
+pub fn parse<'a>(src: &'a str, syntax: &'a Syntax<'a>) -> Result<Vec<Node<'a>>, CompileError> {
+ let state = State {
+ syntax,
+ loop_depth: Cell::new(0),
+ };
+ match parse_template(src, &state) {
+ Ok((left, res)) => {
+ if !left.is_empty() {
+ Err(format!("unable to parse template:\n\n{:?}", left).into())
+ } else {
+ Ok(res)
+ }
+ }
+
+ Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => {
+ let nom::error::Error { input, .. } = err;
+ let offset = src.len() - input.len();
+ let (source_before, source_after) = src.split_at(offset);
+
+ let source_after = match source_after.char_indices().enumerate().take(41).last() {
+ Some((40, (i, _))) => format!("{:?}...", &source_after[..i]),
+ _ => format!("{:?}", source_after),
+ };
+
+ let (row, last_line) = source_before.lines().enumerate().last().unwrap();
+ let column = last_line.chars().count();
+
+ let msg = format!(
+ "problems parsing template source at row {}, column {} near:\n{}",
+ row + 1,
+ column,
+ source_after,
+ );
+ Err(msg.into())
+ }
+
+ Err(nom::Err::Incomplete(_)) => Err("parsing incomplete".into()),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::{Expr, Node, Ws};
+ use crate::Syntax;
+
+ fn check_ws_split(s: &str, res: &(&str, &str, &str)) {
+ match super::split_ws_parts(s) {
+ Node::Lit(lws, s, rws) => {
+ assert_eq!(lws, res.0);
+ assert_eq!(s, res.1);
+ assert_eq!(rws, res.2);
+ }
+ _ => {
+ panic!("fail");
+ }
+ }
+ }
+
+ #[test]
+ fn test_ws_splitter() {
+ check_ws_split("", &("", "", ""));
+ check_ws_split("a", &("", "a", ""));
+ check_ws_split("\ta", &("\t", "a", ""));
+ check_ws_split("b\n", &("", "b", "\n"));
+ check_ws_split(" \t\r\n", &(" \t\r\n", "", ""));
+ }
+
+ #[test]
+ #[should_panic]
+ fn test_invalid_block() {
+ super::parse("{% extend \"blah\" %}", &Syntax::default()).unwrap();
+ }
+
+ #[test]
+ fn test_parse_filter() {
+ use Expr::*;
+ let syntax = Syntax::default();
+ assert_eq!(
+ super::parse("{{ strvar|e }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Filter("e", vec![Var("strvar")]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ 2|abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Filter("abs", vec![NumLit("2")]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ -2|abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Filter("abs", vec![Unary("-", NumLit("2").into())]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1 - 2)|abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Filter(
+ "abs",
+ vec![Group(
+ BinOp("-", NumLit("1").into(), NumLit("2").into()).into()
+ )]
+ ),
+ )],
+ );
+ }
+
+ #[test]
+ fn test_parse_numbers() {
+ let syntax = Syntax::default();
+ assert_eq!(
+ super::parse("{{ 2 }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(false, false), Expr::NumLit("2"),)],
+ );
+ assert_eq!(
+ super::parse("{{ 2.5 }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(false, false), Expr::NumLit("2.5"),)],
+ );
+ }
+
+ #[test]
+ fn test_parse_var() {
+ let s = Syntax::default();
+
+ assert_eq!(
+ super::parse("{{ foo }}", &s).unwrap(),
+ vec![Node::Expr(Ws(false, false), Expr::Var("foo"))],
+ );
+ assert_eq!(
+ super::parse("{{ foo_bar }}", &s).unwrap(),
+ vec![Node::Expr(Ws(false, false), Expr::Var("foo_bar"))],
+ );
+
+ assert_eq!(
+ super::parse("{{ none }}", &s).unwrap(),
+ vec![Node::Expr(Ws(false, false), Expr::Var("none"))],
+ );
+ }
+
+ #[test]
+ fn test_parse_const() {
+ let s = Syntax::default();
+
+ assert_eq!(
+ super::parse("{{ FOO }}", &s).unwrap(),
+ vec![Node::Expr(Ws(false, false), Expr::Path(vec!["FOO"]))],
+ );
+ assert_eq!(
+ super::parse("{{ FOO_BAR }}", &s).unwrap(),
+ vec![Node::Expr(Ws(false, false), Expr::Path(vec!["FOO_BAR"]))],
+ );
+
+ assert_eq!(
+ super::parse("{{ NONE }}", &s).unwrap(),
+ vec![Node::Expr(Ws(false, false), Expr::Path(vec!["NONE"]))],
+ );
+ }
+
+ #[test]
+ fn test_parse_path() {
+ let s = Syntax::default();
+
+ assert_eq!(
+ super::parse("{{ None }}", &s).unwrap(),
+ vec![Node::Expr(Ws(false, false), Expr::Path(vec!["None"]))],
+ );
+ assert_eq!(
+ super::parse("{{ Some(123) }}", &s).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Expr::Call(
+ Box::new(Expr::Path(vec!["Some"])),
+ vec![Expr::NumLit("123")]
+ ),
+ )],
+ );
+
+ assert_eq!(
+ super::parse("{{ Ok(123) }}", &s).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Expr::Call(Box::new(Expr::Path(vec!["Ok"])), vec![Expr::NumLit("123")]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ Err(123) }}", &s).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Expr::Call(Box::new(Expr::Path(vec!["Err"])), vec![Expr::NumLit("123")]),
+ )],
+ );
+ }
+
+ #[test]
+ fn test_parse_var_call() {
+ assert_eq!(
+ super::parse("{{ function(\"123\", 3) }}", &Syntax::default()).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Expr::Call(
+ Box::new(Expr::Var("function")),
+ vec![Expr::StrLit("123"), Expr::NumLit("3")]
+ ),
+ )],
+ );
+ }
+
+ #[test]
+ fn test_parse_path_call() {
+ let s = Syntax::default();
+
+ assert_eq!(
+ super::parse("{{ Option::None }}", &s).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Expr::Path(vec!["Option", "None"])
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ Option::Some(123) }}", &s).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Expr::Call(
+ Box::new(Expr::Path(vec!["Option", "Some"])),
+ vec![Expr::NumLit("123")],
+ ),
+ )],
+ );
+
+ assert_eq!(
+ super::parse("{{ self::function(\"123\", 3) }}", &s).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Expr::Call(
+ Box::new(Expr::Path(vec!["self", "function"])),
+ vec![Expr::StrLit("123"), Expr::NumLit("3")],
+ ),
+ )],
+ );
+ }
+
+ #[test]
+ fn test_parse_root_path() {
+ let syntax = Syntax::default();
+ assert_eq!(
+ super::parse("{{ std::string::String::new() }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Expr::Call(
+ Box::new(Expr::Path(vec!["std", "string", "String", "new"])),
+ vec![]
+ ),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ ::std::string::String::new() }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Expr::Call(
+ Box::new(Expr::Path(vec!["", "std", "string", "String", "new"])),
+ vec![]
+ ),
+ )],
+ );
+ }
+
+ #[test]
+ fn change_delimiters_parse_filter() {
+ let syntax = Syntax {
+ expr_start: "{~",
+ expr_end: "~}",
+ ..Syntax::default()
+ };
+
+ super::parse("{~ strvar|e ~}", &syntax).unwrap();
+ }
+
+ #[test]
+ fn test_precedence() {
+ use Expr::*;
+ let syntax = Syntax::default();
+ assert_eq!(
+ super::parse("{{ a + b == c }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ BinOp(
+ "==",
+ BinOp("+", Var("a").into(), Var("b").into()).into(),
+ Var("c").into(),
+ )
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a + b * c - d / e }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ BinOp(
+ "-",
+ BinOp(
+ "+",
+ Var("a").into(),
+ BinOp("*", Var("b").into(), Var("c").into()).into(),
+ )
+ .into(),
+ BinOp("/", Var("d").into(), Var("e").into()).into(),
+ )
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a * (b + c) / -d }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ BinOp(
+ "/",
+ BinOp(
+ "*",
+ Var("a").into(),
+ Group(BinOp("+", Var("b").into(), Var("c").into()).into()).into()
+ )
+ .into(),
+ Unary("-", Var("d").into()).into()
+ )
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a || b && c || d && e }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ BinOp(
+ "||",
+ BinOp(
+ "||",
+ Var("a").into(),
+ BinOp("&&", Var("b").into(), Var("c").into()).into(),
+ )
+ .into(),
+ BinOp("&&", Var("d").into(), Var("e").into()).into(),
+ )
+ )],
+ );
+ }
+
+ #[test]
+ fn test_associativity() {
+ use Expr::*;
+ let syntax = Syntax::default();
+ assert_eq!(
+ super::parse("{{ a + b + c }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ BinOp(
+ "+",
+ BinOp("+", Var("a").into(), Var("b").into()).into(),
+ Var("c").into()
+ )
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a * b * c }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ BinOp(
+ "*",
+ BinOp("*", Var("a").into(), Var("b").into()).into(),
+ Var("c").into()
+ )
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a && b && c }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ BinOp(
+ "&&",
+ BinOp("&&", Var("a").into(), Var("b").into()).into(),
+ Var("c").into()
+ )
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a + b - c + d }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ BinOp(
+ "+",
+ BinOp(
+ "-",
+ BinOp("+", Var("a").into(), Var("b").into()).into(),
+ Var("c").into()
+ )
+ .into(),
+ Var("d").into()
+ )
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a == b != c > d > e == f }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ BinOp(
+ "==",
+ BinOp(
+ ">",
+ BinOp(
+ ">",
+ BinOp(
+ "!=",
+ BinOp("==", Var("a").into(), Var("b").into()).into(),
+ Var("c").into()
+ )
+ .into(),
+ Var("d").into()
+ )
+ .into(),
+ Var("e").into()
+ )
+ .into(),
+ Var("f").into()
+ )
+ )],
+ );
+ }
+
+ #[test]
+ fn test_odd_calls() {
+ use Expr::*;
+ let syntax = Syntax::default();
+ assert_eq!(
+ super::parse("{{ a[b](c) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Call(
+ Box::new(Index(Box::new(Var("a")), Box::new(Var("b")))),
+ vec![Var("c")],
+ ),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (a + b)(c) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Call(
+ Box::new(Group(Box::new(BinOp(
+ "+",
+ Box::new(Var("a")),
+ Box::new(Var("b"))
+ )))),
+ vec![Var("c")],
+ ),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ a + b(c) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ BinOp(
+ "+",
+ Box::new(Var("a")),
+ Box::new(Call(Box::new(Var("b")), vec![Var("c")])),
+ ),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (-a)(b) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Call(
+ Box::new(Group(Box::new(Unary("-", Box::new(Var("a")))))),
+ vec![Var("b")],
+ ),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ -a(b) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Unary("-", Box::new(Call(Box::new(Var("a")), vec![Var("b")])),),
+ )],
+ );
+ }
+
+ #[test]
+ fn test_parse_comments() {
+ let s = &Syntax::default();
+
+ assert_eq!(
+ super::parse("{##}", s).unwrap(),
+ vec![Node::Comment(Ws(false, false))],
+ );
+ assert_eq!(
+ super::parse("{#- #}", s).unwrap(),
+ vec![Node::Comment(Ws(true, false))],
+ );
+ assert_eq!(
+ super::parse("{# -#}", s).unwrap(),
+ vec![Node::Comment(Ws(false, true))],
+ );
+ assert_eq!(
+ super::parse("{#--#}", s).unwrap(),
+ vec![Node::Comment(Ws(true, true))],
+ );
+
+ assert_eq!(
+ super::parse("{#- foo\n bar -#}", s).unwrap(),
+ vec![Node::Comment(Ws(true, true))],
+ );
+ assert_eq!(
+ super::parse("{#- foo\n {#- bar\n -#} baz -#}", s).unwrap(),
+ vec![Node::Comment(Ws(true, true))],
+ );
+ assert_eq!(
+ super::parse("{# foo {# bar #} {# {# baz #} qux #} #}", s).unwrap(),
+ vec![Node::Comment(Ws(false, false))],
+ );
+ }
+
+ #[test]
+ fn test_parse_tuple() {
+ use super::Expr::*;
+ let syntax = Syntax::default();
+ assert_eq!(
+ super::parse("{{ () }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(false, false), Tuple(vec![]),)],
+ );
+ assert_eq!(
+ super::parse("{{ (1) }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(false, false), Group(Box::new(NumLit("1"))),)],
+ );
+ assert_eq!(
+ super::parse("{{ (1,) }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(false, false), Tuple(vec![NumLit("1")]),)],
+ );
+ assert_eq!(
+ super::parse("{{ (1, ) }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(false, false), Tuple(vec![NumLit("1")]),)],
+ );
+ assert_eq!(
+ super::parse("{{ (1 ,) }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(false, false), Tuple(vec![NumLit("1")]),)],
+ );
+ assert_eq!(
+ super::parse("{{ (1 , ) }}", &syntax).unwrap(),
+ vec![Node::Expr(Ws(false, false), Tuple(vec![NumLit("1")]),)],
+ );
+ assert_eq!(
+ super::parse("{{ (1, 2) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Tuple(vec![NumLit("1"), NumLit("2")]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1, 2,) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Tuple(vec![NumLit("1"), NumLit("2")]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1, 2, 3) }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Tuple(vec![NumLit("1"), NumLit("2"), NumLit("3")]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ ()|abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Filter("abs", vec![Tuple(vec![])]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ () | abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ BinOp("|", Box::new(Tuple(vec![])), Box::new(Var("abs"))),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1)|abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Filter("abs", vec![Group(Box::new(NumLit("1")))]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1) | abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ BinOp(
+ "|",
+ Box::new(Group(Box::new(NumLit("1")))),
+ Box::new(Var("abs"))
+ ),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1,)|abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Filter("abs", vec![Tuple(vec![NumLit("1")])]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1,) | abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ BinOp(
+ "|",
+ Box::new(Tuple(vec![NumLit("1")])),
+ Box::new(Var("abs"))
+ ),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1, 2)|abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ Filter("abs", vec![Tuple(vec![NumLit("1"), NumLit("2")])]),
+ )],
+ );
+ assert_eq!(
+ super::parse("{{ (1, 2) | abs }}", &syntax).unwrap(),
+ vec![Node::Expr(
+ Ws(false, false),
+ BinOp(
+ "|",
+ Box::new(Tuple(vec![NumLit("1"), NumLit("2")])),
+ Box::new(Var("abs"))
+ ),
+ )],
+ );
+ }
+}
diff --git a/third_party/rust/askama_shared/templates/a.html b/third_party/rust/askama_shared/templates/a.html
new file mode 100644
index 0000000000..257cc5642c
--- /dev/null
+++ b/third_party/rust/askama_shared/templates/a.html
@@ -0,0 +1 @@
+foo
diff --git a/third_party/rust/askama_shared/templates/b.html b/third_party/rust/askama_shared/templates/b.html
new file mode 100644
index 0000000000..5716ca5987
--- /dev/null
+++ b/third_party/rust/askama_shared/templates/b.html
@@ -0,0 +1 @@
+bar
diff --git a/third_party/rust/askama_shared/templates/sub/b.html b/third_party/rust/askama_shared/templates/sub/b.html
new file mode 100644
index 0000000000..5716ca5987
--- /dev/null
+++ b/third_party/rust/askama_shared/templates/sub/b.html
@@ -0,0 +1 @@
+bar
diff --git a/third_party/rust/askama_shared/templates/sub/c.html b/third_party/rust/askama_shared/templates/sub/c.html
new file mode 100644
index 0000000000..76018072e0
--- /dev/null
+++ b/third_party/rust/askama_shared/templates/sub/c.html
@@ -0,0 +1 @@
+baz
diff --git a/third_party/rust/askama_shared/templates/sub/sub1/d.html b/third_party/rust/askama_shared/templates/sub/sub1/d.html
new file mode 100644
index 0000000000..fa11a6a9c5
--- /dev/null
+++ b/third_party/rust/askama_shared/templates/sub/sub1/d.html
@@ -0,0 +1 @@
+echo