summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/test-utils
diff options
context:
space:
mode:
Diffstat (limited to 'src/tools/rust-analyzer/crates/test-utils')
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/Cargo.toml19
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/assert_linear.rs112
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/bench_fixture.rs45
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/fixture.rs409
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/lib.rs500
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/minicore.rs669
6 files changed, 1754 insertions, 0 deletions
diff --git a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
new file mode 100644
index 000000000..cceafe04e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
@@ -0,0 +1,19 @@
+[package]
+name = "test-utils"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+# Avoid adding deps here, this crate is widely used in tests it should compile fast!
+dissimilar = "1.0.4"
+text-size = "1.1.0"
+rustc-hash = "1.1.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/assert_linear.rs b/src/tools/rust-analyzer/crates/test-utils/src/assert_linear.rs
new file mode 100644
index 000000000..24502ddb4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/assert_linear.rs
@@ -0,0 +1,112 @@
+//! Checks that a set of measurements looks like a linear function rather than
+//! like a quadratic function. Algorithm:
+//!
+//! 1. Linearly scale input to be in [0; 1)
+//! 2. Using linear regression, compute the best linear function approximating
+//! the input.
+//! 3. Compute RMSE and maximal absolute error.
+//! 4. Check that errors are within tolerances and that the constant term is not
+//! too negative.
+//!
+//! Ideally, we should use a proper "model selection" to directly compare
+//! quadratic and linear models, but that sounds rather complicated:
+//!
+//! https://stats.stackexchange.com/questions/21844/selecting-best-model-based-on-linear-quadratic-and-cubic-fit-of-data
+//!
+//! We might get false positives on a VM, but never false negatives. So, if the
+//! first round fails, we repeat the ordeal three more times and fail only if
+//! every time there's a fault.
+use stdx::format_to;
+
+#[derive(Default)]
+pub struct AssertLinear {
+ rounds: Vec<Round>,
+}
+
+#[derive(Default)]
+struct Round {
+ samples: Vec<(f64, f64)>,
+ plot: String,
+ linear: bool,
+}
+
+impl AssertLinear {
+ pub fn next_round(&mut self) -> bool {
+ if let Some(round) = self.rounds.last_mut() {
+ round.finish();
+ }
+ if self.rounds.iter().any(|it| it.linear) || self.rounds.len() == 4 {
+ return false;
+ }
+ self.rounds.push(Round::default());
+ true
+ }
+
+ pub fn sample(&mut self, x: f64, y: f64) {
+ self.rounds.last_mut().unwrap().samples.push((x, y));
+ }
+}
+
+impl Drop for AssertLinear {
+ fn drop(&mut self) {
+ assert!(!self.rounds.is_empty());
+ if self.rounds.iter().all(|it| !it.linear) {
+ for round in &self.rounds {
+ eprintln!("\n{}", round.plot);
+ }
+ panic!("Doesn't look linear!");
+ }
+ }
+}
+
+impl Round {
+ fn finish(&mut self) {
+ let (mut xs, mut ys): (Vec<_>, Vec<_>) = self.samples.iter().copied().unzip();
+ normalize(&mut xs);
+ normalize(&mut ys);
+ let xy = xs.iter().copied().zip(ys.iter().copied());
+
+ // Linear regression: finding a and b to fit y = a + b*x.
+
+ let mean_x = mean(&xs);
+ let mean_y = mean(&ys);
+
+ let b = {
+ let mut num = 0.0;
+ let mut denom = 0.0;
+ for (x, y) in xy.clone() {
+ num += (x - mean_x) * (y - mean_y);
+ denom += (x - mean_x).powi(2);
+ }
+ num / denom
+ };
+
+ let a = mean_y - b * mean_x;
+
+ self.plot = format!("y_pred = {:.3} + {:.3} * x\n\nx y y_pred\n", a, b);
+
+ let mut se = 0.0;
+ let mut max_error = 0.0f64;
+ for (x, y) in xy {
+ let y_pred = a + b * x;
+ se += (y - y_pred).powi(2);
+ max_error = max_error.max((y_pred - y).abs());
+
+ format_to!(self.plot, "{:.3} {:.3} {:.3}\n", x, y, y_pred);
+ }
+
+ let rmse = (se / xs.len() as f64).sqrt();
+ format_to!(self.plot, "\nrmse = {:.3} max error = {:.3}", rmse, max_error);
+
+ self.linear = rmse < 0.05 && max_error < 0.1 && a > -0.1;
+
+ fn normalize(xs: &mut Vec<f64>) {
+ let max = xs.iter().copied().max_by(|a, b| a.partial_cmp(b).unwrap()).unwrap();
+ xs.iter_mut().for_each(|it| *it /= max);
+ }
+
+ fn mean(xs: &[f64]) -> f64 {
+ xs.iter().copied().sum::<f64>() / (xs.len() as f64)
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/bench_fixture.rs b/src/tools/rust-analyzer/crates/test-utils/src/bench_fixture.rs
new file mode 100644
index 000000000..979156263
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/bench_fixture.rs
@@ -0,0 +1,45 @@
+//! Generates large snippets of Rust code for usage in the benchmarks.
+
+use std::fs;
+
+use stdx::format_to;
+
+use crate::project_root;
+
+pub fn big_struct() -> String {
+ let n = 1_000;
+ big_struct_n(n)
+}
+
+pub fn big_struct_n(n: u32) -> String {
+ let mut buf = "pub struct RegisterBlock {".to_string();
+ for i in 0..n {
+ format_to!(buf, " /// Doc comment for {}.\n", i);
+ format_to!(buf, " pub s{}: S{},\n", i, i);
+ }
+ buf.push_str("}\n\n");
+ for i in 0..n {
+ format_to!(
+ buf,
+ "
+
+#[repr(transparent)]
+struct S{} {{
+ field: u32,
+}}",
+ i
+ );
+ }
+
+ buf
+}
+
+pub fn glorious_old_parser() -> String {
+ let path = project_root().join("bench_data/glorious_old_parser");
+ fs::read_to_string(&path).unwrap()
+}
+
+pub fn numerous_macro_rules() -> String {
+ let path = project_root().join("bench_data/numerous_macro_rules");
+ fs::read_to_string(&path).unwrap()
+}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
new file mode 100644
index 000000000..8c806e792
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
@@ -0,0 +1,409 @@
+//! Defines `Fixture` -- a convenient way to describe the initial state of
+//! rust-analyzer database from a single string.
+//!
+//! Fixtures are strings containing rust source code with optional metadata.
+//! A fixture without metadata is parsed into a single source file.
+//! Use this to test functionality local to one file.
+//!
+//! Simple Example:
+//! ```
+//! r#"
+//! fn main() {
+//! println!("Hello World")
+//! }
+//! "#
+//! ```
+//!
+//! Metadata can be added to a fixture after a `//-` comment.
+//! The basic form is specifying filenames,
+//! which is also how to define multiple files in a single test fixture
+//!
+//! Example using two files in the same crate:
+//! ```
+//! "
+//! //- /main.rs
+//! mod foo;
+//! fn main() {
+//! foo::bar();
+//! }
+//!
+//! //- /foo.rs
+//! pub fn bar() {}
+//! "
+//! ```
+//!
+//! Example using two crates with one file each, with one crate depending on the other:
+//! ```
+//! r#"
+//! //- /main.rs crate:a deps:b
+//! fn main() {
+//! b::foo();
+//! }
+//! //- /lib.rs crate:b
+//! pub fn b() {
+//! println!("Hello World")
+//! }
+//! "#
+//! ```
+//!
+//! Metadata allows specifying all settings and variables
+//! that are available in a real rust project:
+//! - crate names via `crate:cratename`
+//! - dependencies via `deps:dep1,dep2`
+//! - configuration settings via `cfg:dbg=false,opt_level=2`
+//! - environment variables via `env:PATH=/bin,RUST_LOG=debug`
+//!
+//! Example using all available metadata:
+//! ```
+//! "
+//! //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo
+//! fn insert_source_code_here() {}
+//! "
+//! ```
+
+use rustc_hash::FxHashMap;
+use stdx::trim_indent;
+
+#[derive(Debug, Eq, PartialEq)]
+pub struct Fixture {
+ pub path: String,
+ pub text: String,
+ pub krate: Option<String>,
+ pub deps: Vec<String>,
+ pub extern_prelude: Option<Vec<String>>,
+ pub cfg_atoms: Vec<String>,
+ pub cfg_key_values: Vec<(String, String)>,
+ pub edition: Option<String>,
+ pub env: FxHashMap<String, String>,
+ pub introduce_new_source_root: Option<String>,
+}
+
+pub struct MiniCore {
+ activated_flags: Vec<String>,
+ valid_flags: Vec<String>,
+}
+
+impl Fixture {
+ /// Parses text which looks like this:
+ ///
+ /// ```not_rust
+ /// //- some meta
+ /// line 1
+ /// line 2
+ /// //- other meta
+ /// ```
+ ///
+ /// Fixture can also start with a proc_macros and minicore declaration(in that order):
+ ///
+ /// ```
+ /// //- proc_macros: identity
+ /// //- minicore: sized
+ /// ```
+ ///
+ /// That will include predefined proc macros and a subset of `libcore` into the fixture, see
+ /// `minicore.rs` for what's available.
+ pub fn parse(ra_fixture: &str) -> (Option<MiniCore>, Vec<String>, Vec<Fixture>) {
+ let fixture = trim_indent(ra_fixture);
+ let mut fixture = fixture.as_str();
+ let mut mini_core = None;
+ let mut res: Vec<Fixture> = Vec::new();
+ let mut test_proc_macros = vec![];
+
+ if fixture.starts_with("//- proc_macros:") {
+ let first_line = fixture.split_inclusive('\n').next().unwrap();
+ test_proc_macros = first_line
+ .strip_prefix("//- proc_macros:")
+ .unwrap()
+ .split(',')
+ .map(|it| it.trim().to_string())
+ .collect();
+ fixture = &fixture[first_line.len()..];
+ }
+
+ if fixture.starts_with("//- minicore:") {
+ let first_line = fixture.split_inclusive('\n').next().unwrap();
+ mini_core = Some(MiniCore::parse(first_line));
+ fixture = &fixture[first_line.len()..];
+ }
+
+ let default = if fixture.contains("//-") { None } else { Some("//- /main.rs") };
+
+ for (ix, line) in default.into_iter().chain(fixture.split_inclusive('\n')).enumerate() {
+ if line.contains("//-") {
+ assert!(
+ line.starts_with("//-"),
+ "Metadata line {} has invalid indentation. \
+ All metadata lines need to have the same indentation.\n\
+ The offending line: {:?}",
+ ix,
+ line
+ );
+ }
+
+ if line.starts_with("//-") {
+ let meta = Fixture::parse_meta_line(line);
+ res.push(meta);
+ } else {
+ if line.starts_with("// ")
+ && line.contains(':')
+ && !line.contains("::")
+ && !line.contains('.')
+ && line.chars().all(|it| !it.is_uppercase())
+ {
+ panic!("looks like invalid metadata line: {:?}", line);
+ }
+
+ if let Some(entry) = res.last_mut() {
+ entry.text.push_str(line);
+ }
+ }
+ }
+
+ (mini_core, test_proc_macros, res)
+ }
+
+ //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo
+ fn parse_meta_line(meta: &str) -> Fixture {
+ assert!(meta.starts_with("//-"));
+ let meta = meta["//-".len()..].trim();
+ let components = meta.split_ascii_whitespace().collect::<Vec<_>>();
+
+ let path = components[0].to_string();
+ assert!(path.starts_with('/'), "fixture path does not start with `/`: {:?}", path);
+
+ let mut krate = None;
+ let mut deps = Vec::new();
+ let mut extern_prelude = None;
+ let mut edition = None;
+ let mut cfg_atoms = Vec::new();
+ let mut cfg_key_values = Vec::new();
+ let mut env = FxHashMap::default();
+ let mut introduce_new_source_root = None;
+ for component in components[1..].iter() {
+ let (key, value) = component
+ .split_once(':')
+ .unwrap_or_else(|| panic!("invalid meta line: {:?}", meta));
+ match key {
+ "crate" => krate = Some(value.to_string()),
+ "deps" => deps = value.split(',').map(|it| it.to_string()).collect(),
+ "extern-prelude" => {
+ if value.is_empty() {
+ extern_prelude = Some(Vec::new());
+ } else {
+ extern_prelude =
+ Some(value.split(',').map(|it| it.to_string()).collect::<Vec<_>>());
+ }
+ }
+ "edition" => edition = Some(value.to_string()),
+ "cfg" => {
+ for entry in value.split(',') {
+ match entry.split_once('=') {
+ Some((k, v)) => cfg_key_values.push((k.to_string(), v.to_string())),
+ None => cfg_atoms.push(entry.to_string()),
+ }
+ }
+ }
+ "env" => {
+ for key in value.split(',') {
+ if let Some((k, v)) = key.split_once('=') {
+ env.insert(k.into(), v.into());
+ }
+ }
+ }
+ "new_source_root" => introduce_new_source_root = Some(value.to_string()),
+ _ => panic!("bad component: {:?}", component),
+ }
+ }
+
+ for prelude_dep in extern_prelude.iter().flatten() {
+ assert!(
+ deps.contains(prelude_dep),
+ "extern-prelude {:?} must be a subset of deps {:?}",
+ extern_prelude,
+ deps
+ );
+ }
+
+ Fixture {
+ path,
+ text: String::new(),
+ krate,
+ deps,
+ extern_prelude,
+ cfg_atoms,
+ cfg_key_values,
+ edition,
+ env,
+ introduce_new_source_root,
+ }
+ }
+}
+
+impl MiniCore {
+ fn has_flag(&self, flag: &str) -> bool {
+ self.activated_flags.iter().any(|it| it == flag)
+ }
+
+ #[track_caller]
+ fn assert_valid_flag(&self, flag: &str) {
+ if !self.valid_flags.iter().any(|it| it == flag) {
+ panic!("invalid flag: {:?}, valid flags: {:?}", flag, self.valid_flags);
+ }
+ }
+
+ fn parse(line: &str) -> MiniCore {
+ let mut res = MiniCore { activated_flags: Vec::new(), valid_flags: Vec::new() };
+
+ let line = line.strip_prefix("//- minicore:").unwrap().trim();
+ for entry in line.split(", ") {
+ if res.has_flag(entry) {
+ panic!("duplicate minicore flag: {:?}", entry);
+ }
+ res.activated_flags.push(entry.to_string());
+ }
+
+ res
+ }
+
+ /// Strips parts of minicore.rs which are flagged by inactive flags.
+ ///
+ /// This is probably over-engineered to support flags dependencies.
+ pub fn source_code(mut self) -> String {
+ let mut buf = String::new();
+ let raw_mini_core = include_str!("./minicore.rs");
+ let mut lines = raw_mini_core.split_inclusive('\n');
+
+ let mut parsing_flags = false;
+ let mut implications = Vec::new();
+
+ // Parse `//!` preamble and extract flags and dependencies.
+ for line in lines.by_ref() {
+ let line = match line.strip_prefix("//!") {
+ Some(it) => it,
+ None => {
+ assert!(line.trim().is_empty());
+ break;
+ }
+ };
+
+ if parsing_flags {
+ let (flag, deps) = line.split_once(':').unwrap();
+ let flag = flag.trim();
+ self.valid_flags.push(flag.to_string());
+ for dep in deps.split(", ") {
+ let dep = dep.trim();
+ if !dep.is_empty() {
+ self.assert_valid_flag(dep);
+ implications.push((flag, dep));
+ }
+ }
+ }
+
+ if line.contains("Available flags:") {
+ parsing_flags = true;
+ }
+ }
+
+ for flag in &self.activated_flags {
+ self.assert_valid_flag(flag);
+ }
+
+ // Fixed point loop to compute transitive closure of flags.
+ loop {
+ let mut changed = false;
+ for &(u, v) in &implications {
+ if self.has_flag(u) && !self.has_flag(v) {
+ self.activated_flags.push(v.to_string());
+ changed = true;
+ }
+ }
+ if !changed {
+ break;
+ }
+ }
+
+ let mut active_regions = Vec::new();
+ let mut seen_regions = Vec::new();
+ for line in lines {
+ let trimmed = line.trim();
+ if let Some(region) = trimmed.strip_prefix("// region:") {
+ active_regions.push(region);
+ continue;
+ }
+ if let Some(region) = trimmed.strip_prefix("// endregion:") {
+ let prev = active_regions.pop().unwrap();
+ assert_eq!(prev, region);
+ continue;
+ }
+
+ let mut line_region = false;
+ if let Some(idx) = trimmed.find("// :") {
+ line_region = true;
+ active_regions.push(&trimmed[idx + "// :".len()..]);
+ }
+
+ let mut keep = true;
+ for &region in &active_regions {
+ assert!(
+ !region.starts_with(' '),
+ "region marker starts with a space: {:?}",
+ region
+ );
+ self.assert_valid_flag(region);
+ seen_regions.push(region);
+ keep &= self.has_flag(region);
+ }
+
+ if keep {
+ buf.push_str(line);
+ }
+ if line_region {
+ active_regions.pop().unwrap();
+ }
+ }
+
+ for flag in &self.valid_flags {
+ if !seen_regions.iter().any(|it| it == flag) {
+ panic!("unused minicore flag: {:?}", flag);
+ }
+ }
+ buf
+ }
+}
+
+#[test]
+#[should_panic]
+fn parse_fixture_checks_further_indented_metadata() {
+ Fixture::parse(
+ r"
+ //- /lib.rs
+ mod bar;
+
+ fn foo() {}
+ //- /bar.rs
+ pub fn baz() {}
+ ",
+ );
+}
+
+#[test]
+fn parse_fixture_gets_full_meta() {
+ let (mini_core, proc_macros, parsed) = Fixture::parse(
+ r#"
+//- proc_macros: identity
+//- minicore: coerce_unsized
+//- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b,atom env:OUTDIR=path/to,OTHER=foo
+mod m;
+"#,
+ );
+ assert_eq!(proc_macros, vec!["identity".to_string()]);
+ assert_eq!(mini_core.unwrap().activated_flags, vec!["coerce_unsized".to_string()]);
+ assert_eq!(1, parsed.len());
+
+ let meta = &parsed[0];
+ assert_eq!("mod m;\n", meta.text);
+
+ assert_eq!("foo", meta.krate.as_ref().unwrap());
+ assert_eq!("/lib.rs", meta.path);
+ assert_eq!(2, meta.env.len());
+}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs
new file mode 100644
index 000000000..8a9cfb6c2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs
@@ -0,0 +1,500 @@
+//! Assorted testing utilities.
+//!
+//! Most notable things are:
+//!
+//! * Rich text comparison, which outputs a diff.
+//! * Extracting markup (mainly, `$0` markers) out of fixture strings.
+//! * marks (see the eponymous module).
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod assert_linear;
+pub mod bench_fixture;
+mod fixture;
+
+use std::{
+ collections::BTreeMap,
+ env, fs,
+ path::{Path, PathBuf},
+};
+
+use profile::StopWatch;
+use stdx::is_ci;
+use text_size::{TextRange, TextSize};
+
+pub use dissimilar::diff as __diff;
+pub use rustc_hash::FxHashMap;
+
+pub use crate::{
+ assert_linear::AssertLinear,
+ fixture::{Fixture, MiniCore},
+};
+
+pub const CURSOR_MARKER: &str = "$0";
+pub const ESCAPED_CURSOR_MARKER: &str = "\\$0";
+
+/// Asserts that two strings are equal, otherwise displays a rich diff between them.
+///
+/// The diff shows changes from the "original" left string to the "actual" right string.
+///
+/// All arguments starting from and including the 3rd one are passed to
+/// `eprintln!()` macro in case of text inequality.
+#[macro_export]
+macro_rules! assert_eq_text {
+ ($left:expr, $right:expr) => {
+ assert_eq_text!($left, $right,)
+ };
+ ($left:expr, $right:expr, $($tt:tt)*) => {{
+ let left = $left;
+ let right = $right;
+ if left != right {
+ if left.trim() == right.trim() {
+ std::eprintln!("Left:\n{:?}\n\nRight:\n{:?}\n\nWhitespace difference\n", left, right);
+ } else {
+ let diff = $crate::__diff(left, right);
+ std::eprintln!("Left:\n{}\n\nRight:\n{}\n\nDiff:\n{}\n", left, right, $crate::format_diff(diff));
+ }
+ std::eprintln!($($tt)*);
+ panic!("text differs");
+ }
+ }};
+}
+
+/// Infallible version of `try_extract_offset()`.
+pub fn extract_offset(text: &str) -> (TextSize, String) {
+ match try_extract_offset(text) {
+ None => panic!("text should contain cursor marker"),
+ Some(result) => result,
+ }
+}
+
+/// Returns the offset of the first occurrence of `$0` marker and the copy of `text`
+/// without the marker.
+fn try_extract_offset(text: &str) -> Option<(TextSize, String)> {
+ let cursor_pos = text.find(CURSOR_MARKER)?;
+ let mut new_text = String::with_capacity(text.len() - CURSOR_MARKER.len());
+ new_text.push_str(&text[..cursor_pos]);
+ new_text.push_str(&text[cursor_pos + CURSOR_MARKER.len()..]);
+ let cursor_pos = TextSize::from(cursor_pos as u32);
+ Some((cursor_pos, new_text))
+}
+
+/// Infallible version of `try_extract_range()`.
+pub fn extract_range(text: &str) -> (TextRange, String) {
+ match try_extract_range(text) {
+ None => panic!("text should contain cursor marker"),
+ Some(result) => result,
+ }
+}
+
+/// Returns `TextRange` between the first two markers `$0...$0` and the copy
+/// of `text` without both of these markers.
+fn try_extract_range(text: &str) -> Option<(TextRange, String)> {
+ let (start, text) = try_extract_offset(text)?;
+ let (end, text) = try_extract_offset(&text)?;
+ Some((TextRange::new(start, end), text))
+}
+
+#[derive(Clone, Copy)]
+pub enum RangeOrOffset {
+ Range(TextRange),
+ Offset(TextSize),
+}
+
+impl RangeOrOffset {
+ pub fn expect_offset(self) -> TextSize {
+ match self {
+ RangeOrOffset::Offset(it) => it,
+ RangeOrOffset::Range(_) => panic!("expected an offset but got a range instead"),
+ }
+ }
+ pub fn expect_range(self) -> TextRange {
+ match self {
+ RangeOrOffset::Range(it) => it,
+ RangeOrOffset::Offset(_) => panic!("expected a range but got an offset"),
+ }
+ }
+ pub fn range_or_empty(self) -> TextRange {
+ match self {
+ RangeOrOffset::Range(range) => range,
+ RangeOrOffset::Offset(offset) => TextRange::empty(offset),
+ }
+ }
+}
+
+impl From<RangeOrOffset> for TextRange {
+ fn from(selection: RangeOrOffset) -> Self {
+ match selection {
+ RangeOrOffset::Range(it) => it,
+ RangeOrOffset::Offset(it) => TextRange::empty(it),
+ }
+ }
+}
+
+/// Extracts `TextRange` or `TextSize` depending on the amount of `$0` markers
+/// found in `text`.
+///
+/// # Panics
+/// Panics if no `$0` marker is present in the `text`.
+pub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) {
+ if let Some((range, text)) = try_extract_range(text) {
+ return (RangeOrOffset::Range(range), text);
+ }
+ let (offset, text) = extract_offset(text);
+ (RangeOrOffset::Offset(offset), text)
+}
+
+/// Extracts ranges, marked with `<tag> </tag>` pairs from the `text`
+pub fn extract_tags(mut text: &str, tag: &str) -> (Vec<(TextRange, Option<String>)>, String) {
+ let open = format!("<{}", tag);
+ let close = format!("</{}>", tag);
+ let mut ranges = Vec::new();
+ let mut res = String::new();
+ let mut stack = Vec::new();
+ loop {
+ match text.find('<') {
+ None => {
+ res.push_str(text);
+ break;
+ }
+ Some(i) => {
+ res.push_str(&text[..i]);
+ text = &text[i..];
+ if text.starts_with(&open) {
+ let close_open = text.find('>').unwrap();
+ let attr = text[open.len()..close_open].trim();
+ let attr = if attr.is_empty() { None } else { Some(attr.to_string()) };
+ text = &text[close_open + '>'.len_utf8()..];
+ let from = TextSize::of(&res);
+ stack.push((from, attr));
+ } else if text.starts_with(&close) {
+ text = &text[close.len()..];
+ let (from, attr) =
+ stack.pop().unwrap_or_else(|| panic!("unmatched </{}>", tag));
+ let to = TextSize::of(&res);
+ ranges.push((TextRange::new(from, to), attr));
+ } else {
+ res.push('<');
+ text = &text['<'.len_utf8()..];
+ }
+ }
+ }
+ }
+ assert!(stack.is_empty(), "unmatched <{}>", tag);
+ ranges.sort_by_key(|r| (r.0.start(), r.0.end()));
+ (ranges, res)
+}
+#[test]
+fn test_extract_tags() {
+ let (tags, text) = extract_tags(r#"<tag fn>fn <tag>main</tag>() {}</tag>"#, "tag");
+ let actual = tags.into_iter().map(|(range, attr)| (&text[range], attr)).collect::<Vec<_>>();
+ assert_eq!(actual, vec![("fn main() {}", Some("fn".into())), ("main", None),]);
+}
+
+/// Inserts `$0` marker into the `text` at `offset`.
+pub fn add_cursor(text: &str, offset: TextSize) -> String {
+ let offset: usize = offset.into();
+ let mut res = String::new();
+ res.push_str(&text[..offset]);
+ res.push_str("$0");
+ res.push_str(&text[offset..]);
+ res
+}
+
+/// Extracts `//^^^ some text` annotations.
+///
+/// A run of `^^^` can be arbitrary long and points to the corresponding range
+/// in the line above.
+///
+/// The `// ^file text` syntax can be used to attach `text` to the entirety of
+/// the file.
+///
+/// Multiline string values are supported:
+///
+/// // ^^^ first line
+/// // | second line
+///
+/// Trailing whitespace is sometimes desired but usually stripped by the editor
+/// if at the end of a line, or incorrectly sized if followed by another
+/// annotation. In those cases the annotation can be explicitly ended with the
+/// `$` character.
+///
+/// // ^^^ trailing-ws-wanted $
+///
+/// Annotations point to the last line that actually was long enough for the
+/// range, not counting annotations themselves. So overlapping annotations are
+/// possible:
+/// ```no_run
+/// // stuff other stuff
+/// // ^^ 'st'
+/// // ^^^^^ 'stuff'
+/// // ^^^^^^^^^^^ 'other stuff'
+/// ```
+pub fn extract_annotations(text: &str) -> Vec<(TextRange, String)> {
+ let mut res = Vec::new();
+ // map from line length to beginning of last line that had that length
+ let mut line_start_map = BTreeMap::new();
+ let mut line_start: TextSize = 0.into();
+ let mut prev_line_annotations: Vec<(TextSize, usize)> = Vec::new();
+ for line in text.split_inclusive('\n') {
+ let mut this_line_annotations = Vec::new();
+ let line_length = if let Some((prefix, suffix)) = line.split_once("//") {
+ let ss_len = TextSize::of("//");
+ let annotation_offset = TextSize::of(prefix) + ss_len;
+ for annotation in extract_line_annotations(suffix.trim_end_matches('\n')) {
+ match annotation {
+ LineAnnotation::Annotation { mut range, content, file } => {
+ range += annotation_offset;
+ this_line_annotations.push((range.end(), res.len()));
+ let range = if file {
+ TextRange::up_to(TextSize::of(text))
+ } else {
+ let line_start = line_start_map.range(range.end()..).next().unwrap();
+
+ range + line_start.1
+ };
+ res.push((range, content));
+ }
+ LineAnnotation::Continuation { mut offset, content } => {
+ offset += annotation_offset;
+ let &(_, idx) = prev_line_annotations
+ .iter()
+ .find(|&&(off, _idx)| off == offset)
+ .unwrap();
+ res[idx].1.push('\n');
+ res[idx].1.push_str(&content);
+ res[idx].1.push('\n');
+ }
+ }
+ }
+ annotation_offset
+ } else {
+ TextSize::of(line)
+ };
+
+ line_start_map = line_start_map.split_off(&line_length);
+ line_start_map.insert(line_length, line_start);
+
+ line_start += TextSize::of(line);
+
+ prev_line_annotations = this_line_annotations;
+ }
+
+ res
+}
+
+enum LineAnnotation {
+ Annotation { range: TextRange, content: String, file: bool },
+ Continuation { offset: TextSize, content: String },
+}
+
+fn extract_line_annotations(mut line: &str) -> Vec<LineAnnotation> {
+ let mut res = Vec::new();
+ let mut offset: TextSize = 0.into();
+ let marker: fn(char) -> bool = if line.contains('^') { |c| c == '^' } else { |c| c == '|' };
+ while let Some(idx) = line.find(marker) {
+ offset += TextSize::try_from(idx).unwrap();
+ line = &line[idx..];
+
+ let mut len = line.chars().take_while(|&it| it == '^').count();
+ let mut continuation = false;
+ if len == 0 {
+ assert!(line.starts_with('|'));
+ continuation = true;
+ len = 1;
+ }
+ let range = TextRange::at(offset, len.try_into().unwrap());
+ let line_no_caret = &line[len..];
+ let end_marker = line_no_caret.find(|c| c == '$');
+ let next = line_no_caret.find(marker).map_or(line.len(), |it| it + len);
+
+ let cond = |end_marker| {
+ end_marker < next
+ && (line_no_caret[end_marker + 1..].is_empty()
+ || line_no_caret[end_marker + 1..]
+ .strip_prefix(|c: char| c.is_whitespace() || c == '^')
+ .is_some())
+ };
+ let mut content = match end_marker {
+ Some(end_marker) if cond(end_marker) => &line_no_caret[..end_marker],
+ _ => line_no_caret[..next - len].trim_end(),
+ };
+
+ let mut file = false;
+ if !continuation && content.starts_with("file") {
+ file = true;
+ content = &content["file".len()..];
+ }
+
+ let content = content.trim_start().to_string();
+
+ let annotation = if continuation {
+ LineAnnotation::Continuation { offset: range.end(), content }
+ } else {
+ LineAnnotation::Annotation { range, content, file }
+ };
+ res.push(annotation);
+
+ line = &line[next..];
+ offset += TextSize::try_from(next).unwrap();
+ }
+
+ res
+}
+
+#[test]
+fn test_extract_annotations_1() {
+ let text = stdx::trim_indent(
+ r#"
+fn main() {
+ let (x, y) = (9, 2);
+ //^ def ^ def
+ zoo + 1
+} //^^^ type:
+ // | i32
+
+// ^file
+ "#,
+ );
+ let res = extract_annotations(&text)
+ .into_iter()
+ .map(|(range, ann)| (&text[range], ann))
+ .collect::<Vec<_>>();
+
+ assert_eq!(
+ res[..3],
+ [("x", "def".into()), ("y", "def".into()), ("zoo", "type:\ni32\n".into())]
+ );
+ assert_eq!(res[3].0.len(), 115);
+}
+
+#[test]
+fn test_extract_annotations_2() {
+ let text = stdx::trim_indent(
+ r#"
+fn main() {
+ (x, y);
+ //^ a
+ // ^ b
+ //^^^^^^^^ c
+}"#,
+ );
+ let res = extract_annotations(&text)
+ .into_iter()
+ .map(|(range, ann)| (&text[range], ann))
+ .collect::<Vec<_>>();
+
+ assert_eq!(res, [("x", "a".into()), ("y", "b".into()), ("(x, y)", "c".into())]);
+}
+
+/// Returns `false` if slow tests should not run, otherwise returns `true` and
+/// also creates a file at `./target/.slow_tests_cookie` which serves as a flag
+/// that slow tests did run.
+pub fn skip_slow_tests() -> bool {
+ let should_skip = (std::env::var("CI").is_err() && std::env::var("RUN_SLOW_TESTS").is_err())
+ || std::env::var("SKIP_SLOW_TESTS").is_ok();
+ if should_skip {
+ eprintln!("ignoring slow test");
+ } else {
+ let path = project_root().join("./target/.slow_tests_cookie");
+ fs::write(&path, ".").unwrap();
+ }
+ should_skip
+}
+
+/// Returns the path to the root directory of `rust-analyzer` project.
+pub fn project_root() -> PathBuf {
+ let dir = env!("CARGO_MANIFEST_DIR");
+ PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned()
+}
+
+pub fn format_diff(chunks: Vec<dissimilar::Chunk<'_>>) -> String {
+ let mut buf = String::new();
+ for chunk in chunks {
+ let formatted = match chunk {
+ dissimilar::Chunk::Equal(text) => text.into(),
+ dissimilar::Chunk::Delete(text) => format!("\x1b[41m{}\x1b[0m", text),
+ dissimilar::Chunk::Insert(text) => format!("\x1b[42m{}\x1b[0m", text),
+ };
+ buf.push_str(&formatted);
+ }
+ buf
+}
+
+/// Utility for writing benchmark tests.
+///
+/// A benchmark test looks like this:
+///
+/// ```
+/// #[test]
+/// fn benchmark_foo() {
+/// if skip_slow_tests() { return; }
+///
+/// let data = bench_fixture::some_fixture();
+/// let analysis = some_setup();
+///
+/// let hash = {
+/// let _b = bench("foo");
+/// actual_work(analysis)
+/// };
+/// assert_eq!(hash, 92);
+/// }
+/// ```
+///
+/// * We skip benchmarks by default, to save time.
+/// Ideal benchmark time is 800 -- 1500 ms in debug.
+/// * We don't count preparation as part of the benchmark
+/// * The benchmark itself returns some kind of numeric hash.
+/// The hash is used as a sanity check that some code is actually run.
+/// Otherwise, it's too easy to win the benchmark by just doing nothing.
+pub fn bench(label: &'static str) -> impl Drop {
+ struct Bencher {
+ sw: StopWatch,
+ label: &'static str,
+ }
+
+ impl Drop for Bencher {
+ fn drop(&mut self) {
+ eprintln!("{}: {}", self.label, self.sw.elapsed());
+ }
+ }
+
+ Bencher { sw: StopWatch::start(), label }
+}
+
+/// Checks that the `file` has the specified `contents`. If that is not the
+/// case, updates the file and then fails the test.
+#[track_caller]
+pub fn ensure_file_contents(file: &Path, contents: &str) {
+ if let Err(()) = try_ensure_file_contents(file, contents) {
+ panic!("Some files were not up-to-date");
+ }
+}
+
+/// Checks that the `file` has the specified `contents`. If that is not the
+/// case, updates the file and return an Error.
+pub fn try_ensure_file_contents(file: &Path, contents: &str) -> Result<(), ()> {
+ match std::fs::read_to_string(file) {
+ Ok(old_contents) if normalize_newlines(&old_contents) == normalize_newlines(contents) => {
+ return Ok(());
+ }
+ _ => (),
+ }
+ let display_path = file.strip_prefix(&project_root()).unwrap_or(file);
+ eprintln!(
+ "\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n",
+ display_path.display()
+ );
+ if is_ci() {
+ eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n");
+ }
+ if let Some(parent) = file.parent() {
+ let _ = std::fs::create_dir_all(parent);
+ }
+ std::fs::write(file, contents).unwrap();
+ Err(())
+}
+
+fn normalize_newlines(s: &str) -> String {
+ s.replace("\r\n", "\n")
+}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
new file mode 100644
index 000000000..f48d1ec66
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
@@ -0,0 +1,669 @@
+//! This is a fixture we use for tests that need lang items.
+//!
+//! We want to include the minimal subset of core for each test, so this file
+//! supports "conditional compilation". Tests use the following syntax to include minicore:
+//!
+//! //- minicore: flag1, flag2
+//!
+//! We then strip all the code marked with other flags.
+//!
+//! Available flags:
+//! sized:
+//! unsize: sized
+//! coerce_unsized: unsize
+//! slice:
+//! range:
+//! deref: sized
+//! deref_mut: deref
+//! index: sized
+//! fn:
+//! try:
+//! pin:
+//! future: pin
+//! option:
+//! result:
+//! iterator: option
+//! iterators: iterator, fn
+//! default: sized
+//! hash:
+//! clone: sized
+//! copy: clone
+//! from: sized
+//! eq: sized
+//! ord: eq, option
+//! derive:
+//! fmt: result
+//! bool_impl: option, fn
+//! add:
+//! as_ref: sized
+//! drop:
+
+pub mod marker {
+ // region:sized
+ #[lang = "sized"]
+ #[fundamental]
+ #[rustc_specialization_trait]
+ pub trait Sized {}
+ // endregion:sized
+
+ // region:unsize
+ #[lang = "unsize"]
+ pub trait Unsize<T: ?Sized> {}
+ // endregion:unsize
+
+ // region:copy
+ #[lang = "copy"]
+ pub trait Copy: Clone {}
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro Copy($item:item) {}
+ // endregion:derive
+
+ mod copy_impls {
+ use super::Copy;
+
+ macro_rules! impl_copy {
+ ($($t:ty)*) => {
+ $(
+ impl Copy for $t {}
+ )*
+ }
+ }
+
+ impl_copy! {
+ usize u8 u16 u32 u64 u128
+ isize i8 i16 i32 i64 i128
+ f32 f64
+ bool char
+ }
+
+ impl<T: ?Sized> Copy for *const T {}
+ impl<T: ?Sized> Copy for *mut T {}
+ impl<T: ?Sized> Copy for &T {}
+ }
+ // endregion:copy
+}
+
+// region:default
+pub mod default {
+ pub trait Default: Sized {
+ fn default() -> Self;
+ }
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro Default($item:item) {}
+ // endregion:derive
+}
+// endregion:default
+
+// region:hash
+pub mod hash {
+ pub trait Hasher {}
+
+ pub trait Hash {
+ fn hash<H: Hasher>(&self, state: &mut H);
+ }
+}
+// endregion:hash
+
+// region:clone
+pub mod clone {
+ #[lang = "clone"]
+ pub trait Clone: Sized {
+ fn clone(&self) -> Self;
+ }
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro Clone($item:item) {}
+ // endregion:derive
+}
+// endregion:clone
+
+pub mod convert {
+ // region:from
+ pub trait From<T>: Sized {
+ fn from(_: T) -> Self;
+ }
+ pub trait Into<T>: Sized {
+ fn into(self) -> T;
+ }
+
+ impl<T, U> Into<U> for T
+ where
+ U: From<T>,
+ {
+ fn into(self) -> U {
+ U::from(self)
+ }
+ }
+
+ impl<T> From<T> for T {
+ fn from(t: T) -> T {
+ t
+ }
+ }
+ // endregion:from
+
+ // region:as_ref
+ pub trait AsRef<T: ?Sized> {
+ fn as_ref(&self) -> &T;
+ }
+ // endregion:as_ref
+}
+
+pub mod ops {
+ // region:coerce_unsized
+ mod unsize {
+ use crate::marker::Unsize;
+
+ #[lang = "coerce_unsized"]
+ pub trait CoerceUnsized<T: ?Sized> {}
+
+ impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a mut U> for &'a mut T {}
+ impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b mut T {}
+ impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for &'a mut T {}
+ impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for &'a mut T {}
+
+ impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
+ impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for &'a T {}
+
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *mut T {}
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {}
+ }
+ pub use self::unsize::CoerceUnsized;
+ // endregion:coerce_unsized
+
+ // region:deref
+ mod deref {
+ #[lang = "deref"]
+ pub trait Deref {
+ #[lang = "deref_target"]
+ type Target: ?Sized;
+ fn deref(&self) -> &Self::Target;
+ }
+ // region:deref_mut
+ #[lang = "deref_mut"]
+ pub trait DerefMut: Deref {
+ fn deref_mut(&mut self) -> &mut Self::Target;
+ }
+ // endregion:deref_mut
+ }
+ pub use self::deref::{
+ Deref,
+ DerefMut, // :deref_mut
+ };
+ // endregion:deref
+
+ // region:drop
+ #[lang = "drop"]
+ pub trait Drop {
+ fn drop(&mut self);
+ }
+ // endregion:drop
+
+ // region:index
+ mod index {
+ #[lang = "index"]
+ pub trait Index<Idx: ?Sized> {
+ type Output: ?Sized;
+ fn index(&self, index: Idx) -> &Self::Output;
+ }
+ #[lang = "index_mut"]
+ pub trait IndexMut<Idx: ?Sized>: Index<Idx> {
+ fn index_mut(&mut self, index: Idx) -> &mut Self::Output;
+ }
+
+ // region:slice
+ impl<T, I> Index<I> for [T]
+ where
+ I: SliceIndex<[T]>,
+ {
+ type Output = I::Output;
+ fn index(&self, index: I) -> &I::Output {
+ loop {}
+ }
+ }
+ impl<T, I> IndexMut<I> for [T]
+ where
+ I: SliceIndex<[T]>,
+ {
+ fn index_mut(&mut self, index: I) -> &mut I::Output {
+ loop {}
+ }
+ }
+
+ pub unsafe trait SliceIndex<T: ?Sized> {
+ type Output: ?Sized;
+ }
+ unsafe impl<T> SliceIndex<[T]> for usize {
+ type Output = T;
+ }
+ // endregion:slice
+ }
+ pub use self::index::{Index, IndexMut};
+ // endregion:index
+
+ // region:drop
+ pub mod mem {
+ pub fn drop<T>(_x: T) {}
+ }
+ // endregion:drop
+
+ // region:range
+ mod range {
+ #[lang = "RangeFull"]
+ pub struct RangeFull;
+
+ #[lang = "Range"]
+ pub struct Range<Idx> {
+ pub start: Idx,
+ pub end: Idx,
+ }
+
+ #[lang = "RangeFrom"]
+ pub struct RangeFrom<Idx> {
+ pub start: Idx,
+ }
+
+ #[lang = "RangeTo"]
+ pub struct RangeTo<Idx> {
+ pub end: Idx,
+ }
+
+ #[lang = "RangeInclusive"]
+ pub struct RangeInclusive<Idx> {
+ pub(crate) start: Idx,
+ pub(crate) end: Idx,
+ pub(crate) exhausted: bool,
+ }
+
+ #[lang = "RangeToInclusive"]
+ pub struct RangeToInclusive<Idx> {
+ pub end: Idx,
+ }
+ }
+ pub use self::range::{Range, RangeFrom, RangeFull, RangeTo};
+ pub use self::range::{RangeInclusive, RangeToInclusive};
+ // endregion:range
+
+ // region:fn
+ mod function {
+ #[lang = "fn"]
+ #[fundamental]
+ pub trait Fn<Args>: FnMut<Args> {}
+
+ #[lang = "fn_mut"]
+ #[fundamental]
+ pub trait FnMut<Args>: FnOnce<Args> {}
+
+ #[lang = "fn_once"]
+ #[fundamental]
+ pub trait FnOnce<Args> {
+ #[lang = "fn_once_output"]
+ type Output;
+ }
+ }
+ pub use self::function::{Fn, FnMut, FnOnce};
+ // endregion:fn
+ // region:try
+ mod try_ {
+ pub enum ControlFlow<B, C = ()> {
+ Continue(C),
+ Break(B),
+ }
+ pub trait FromResidual<R = Self::Residual> {
+ #[lang = "from_residual"]
+ fn from_residual(residual: R) -> Self;
+ }
+ #[lang = "try"]
+ pub trait Try: FromResidual<Self::Residual> {
+ type Output;
+ type Residual;
+ #[lang = "from_output"]
+ fn from_output(output: Self::Output) -> Self;
+ #[lang = "branch"]
+ fn branch(self) -> ControlFlow<Self::Residual, Self::Output>;
+ }
+
+ impl<B, C> Try for ControlFlow<B, C> {
+ type Output = C;
+ type Residual = ControlFlow<B, convert::Infallible>;
+ fn from_output(output: Self::Output) -> Self {}
+ fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {}
+ }
+
+ impl<B, C> FromResidual for ControlFlow<B, C> {
+ fn from_residual(residual: ControlFlow<B, convert::Infallible>) -> Self {}
+ }
+ }
+ pub use self::try_::{ControlFlow, FromResidual, Try};
+ // endregion:try
+
+ // region:add
+ #[lang = "add"]
+ pub trait Add<Rhs = Self> {
+ type Output;
+ fn add(self, rhs: Rhs) -> Self::Output;
+ }
+ // endregion:add
+}
+
+// region:eq
+pub mod cmp {
+ #[lang = "eq"]
+ pub trait PartialEq<Rhs: ?Sized = Self> {
+ fn eq(&self, other: &Rhs) -> bool;
+ fn ne(&self, other: &Rhs) -> bool {
+ !self.eq(other)
+ }
+ }
+
+ pub trait Eq: PartialEq<Self> {}
+
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro PartialEq($item:item) {}
+ #[rustc_builtin_macro]
+ pub macro Eq($item:item) {}
+ // endregion:derive
+
+ // region:ord
+ #[lang = "partial_ord"]
+ pub trait PartialOrd<Rhs: ?Sized = Self>: PartialEq<Rhs> {
+ fn partial_cmp(&self, other: &Rhs) -> Option<Ordering>;
+ }
+
+ pub trait Ord: Eq + PartialOrd<Self> {
+ fn cmp(&self, other: &Self) -> Ordering;
+ }
+
+ pub enum Ordering {
+ Less = -1,
+ Equal = 0,
+ Greater = 1,
+ }
+
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro PartialOrd($item:item) {}
+ #[rustc_builtin_macro]
+ pub macro Ord($item:item) {}
+ // endregion:derive
+
+ // endregion:ord
+}
+// endregion:eq
+
+// region:fmt
+pub mod fmt {
+ pub struct Error;
+ pub type Result = Result<(), Error>;
+ pub struct Formatter<'a>;
+ pub trait Debug {
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result;
+ }
+}
+// endregion:fmt
+
+// region:slice
+pub mod slice {
+ #[lang = "slice"]
+ impl<T> [T] {
+ pub fn len(&self) -> usize {
+ loop {}
+ }
+ }
+}
+// endregion:slice
+
+// region:option
+pub mod option {
+ pub enum Option<T> {
+ #[lang = "None"]
+ None,
+ #[lang = "Some"]
+ Some(T),
+ }
+
+ impl<T> Option<T> {
+ pub const fn unwrap(self) -> T {
+ match self {
+ Some(val) => val,
+ None => panic!("called `Option::unwrap()` on a `None` value"),
+ }
+ }
+ }
+}
+// endregion:option
+
+// region:result
+pub mod result {
+ pub enum Result<T, E> {
+ #[lang = "Ok"]
+ Ok(T),
+ #[lang = "Err"]
+ Err(E),
+ }
+}
+// endregion:result
+
+// region:pin
+pub mod pin {
+ #[lang = "pin"]
+ #[fundamental]
+ pub struct Pin<P> {
+ pointer: P,
+ }
+}
+// endregion:pin
+
+// region:future
+pub mod future {
+ use crate::{
+ pin::Pin,
+ task::{Context, Poll},
+ };
+
+ #[lang = "future_trait"]
+ pub trait Future {
+ type Output;
+ #[lang = "poll"]
+ fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output>;
+ }
+}
+pub mod task {
+ pub enum Poll<T> {
+ #[lang = "Ready"]
+ Ready(T),
+ #[lang = "Pending"]
+ Pending,
+ }
+
+ pub struct Context<'a> {
+ waker: &'a (),
+ }
+}
+// endregion:future
+
+// region:iterator
+pub mod iter {
+ // region:iterators
+ mod adapters {
+ pub struct Take<I> {
+ iter: I,
+ n: usize,
+ }
+ impl<I> Iterator for Take<I>
+ where
+ I: Iterator,
+ {
+ type Item = <I as Iterator>::Item;
+
+ fn next(&mut self) -> Option<<I as Iterator>::Item> {
+ loop {}
+ }
+ }
+
+ pub struct FilterMap<I, F> {
+ iter: I,
+ f: F,
+ }
+ impl<B, I: Iterator, F> Iterator for FilterMap<I, F>
+ where
+ F: FnMut(I::Item) -> Option<B>,
+ {
+ type Item = B;
+
+ #[inline]
+ fn next(&mut self) -> Option<B> {
+ loop {}
+ }
+ }
+ }
+ pub use self::adapters::{Take, FilterMap};
+
+ mod sources {
+ mod repeat {
+ pub fn repeat<T>(elt: T) -> Repeat<T> {
+ loop {}
+ }
+
+ pub struct Repeat<A> {
+ element: A,
+ }
+
+ impl<A> Iterator for Repeat<A> {
+ type Item = A;
+
+ fn next(&mut self) -> Option<A> {
+ loop {}
+ }
+ }
+ }
+ pub use self::repeat::{repeat, Repeat};
+ }
+ pub use self::sources::{repeat, Repeat};
+ // endregion:iterators
+
+ mod traits {
+ mod iterator {
+ use super::super::Take;
+
+ pub trait Iterator {
+ type Item;
+ #[lang = "next"]
+ fn next(&mut self) -> Option<Self::Item>;
+ fn nth(&mut self, n: usize) -> Option<Self::Item> {
+ loop {}
+ }
+ fn by_ref(&mut self) -> &mut Self
+ where
+ Self: Sized,
+ {
+ self
+ }
+ // region:iterators
+ fn take(self, n: usize) -> crate::iter::Take<Self> {
+ loop {}
+ }
+ fn filter_map<B, F>(self, f: F) -> crate::iter::FilterMap<Self, F>
+ where
+ Self: Sized,
+ F: FnMut(Self::Item) -> Option<B>,
+ {
+ loop {}
+ }
+ // endregion:iterators
+ }
+ impl<I: Iterator + ?Sized> Iterator for &mut I {
+ type Item = I::Item;
+ fn next(&mut self) -> Option<I::Item> {
+ (**self).next()
+ }
+ }
+ }
+ pub use self::iterator::Iterator;
+
+ mod collect {
+ pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+ #[lang = "into_iter"]
+ fn into_iter(self) -> Self::IntoIter;
+ }
+ impl<I: Iterator> IntoIterator for I {
+ type Item = I::Item;
+ type IntoIter = I;
+ fn into_iter(self) -> I {
+ self
+ }
+ }
+ }
+ pub use self::collect::IntoIterator;
+ }
+ pub use self::traits::{IntoIterator, Iterator};
+}
+// endregion:iterator
+
+// region:derive
+mod macros {
+ pub(crate) mod builtin {
+ #[rustc_builtin_macro]
+ pub macro derive($item:item) {
+ /* compiler built-in */
+ }
+ }
+}
+// endregion:derive
+
+// region:bool_impl
+#[lang = "bool"]
+impl bool {
+ pub fn then<T, F: FnOnce() -> T>(self, f: F) -> Option<T> {
+ if self {
+ Some(f())
+ } else {
+ None
+ }
+ }
+}
+// endregion:bool_impl
+
+pub mod prelude {
+ pub mod v1 {
+ pub use crate::{
+ clone::Clone, // :clone
+ cmp::{Eq, PartialEq}, // :eq
+ cmp::{Ord, PartialOrd}, // :ord
+ convert::AsRef, // :as_ref
+ convert::{From, Into}, // :from
+ default::Default, // :default
+ iter::{IntoIterator, Iterator}, // :iterator
+ macros::builtin::derive, // :derive
+ marker::Copy, // :copy
+ marker::Sized, // :sized
+ mem::drop, // :drop
+ ops::Drop, // :drop
+ ops::{Fn, FnMut, FnOnce}, // :fn
+ option::Option::{self, None, Some}, // :option
+ result::Result::{self, Err, Ok}, // :result
+ };
+ }
+
+ pub mod rust_2015 {
+ pub use super::v1::*;
+ }
+
+ pub mod rust_2018 {
+ pub use super::v1::*;
+ }
+
+ pub mod rust_2021 {
+ pub use super::v1::*;
+ }
+}
+
+#[prelude_import]
+#[allow(unused)]
+use prelude::v1::*;