summaryrefslogtreecommitdiffstats
path: root/vendor/compiletest_rs
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
commit698f8c2f01ea549d77d7dc3338a12e04c11057b9 (patch)
tree173a775858bd501c378080a10dca74132f05bc50 /vendor/compiletest_rs
parentInitial commit. (diff)
downloadrustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.tar.xz
rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.zip
Adding upstream version 1.64.0+dfsg1.upstream/1.64.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/compiletest_rs')
-rw-r--r--vendor/compiletest_rs/.cargo-checksum.json1
-rw-r--r--vendor/compiletest_rs/Cargo.toml83
-rw-r--r--vendor/compiletest_rs/LICENSE-APACHE201
-rw-r--r--vendor/compiletest_rs/LICENSE-MIT25
-rw-r--r--vendor/compiletest_rs/README.md121
-rw-r--r--vendor/compiletest_rs/build.rs8
-rw-r--r--vendor/compiletest_rs/src/common.rs425
-rw-r--r--vendor/compiletest_rs/src/errors.rs182
-rw-r--r--vendor/compiletest_rs/src/header.rs709
-rw-r--r--vendor/compiletest_rs/src/json.rs263
-rw-r--r--vendor/compiletest_rs/src/lib.rs458
-rw-r--r--vendor/compiletest_rs/src/raise_fd_limit.rs69
-rw-r--r--vendor/compiletest_rs/src/read2.rs208
-rw-r--r--vendor/compiletest_rs/src/runtest.rs2894
-rw-r--r--vendor/compiletest_rs/src/uidiff.rs73
-rw-r--r--vendor/compiletest_rs/src/util.rs134
-rw-r--r--vendor/compiletest_rs/tests/bless.rs85
-rw-r--r--vendor/compiletest_rs/tests/test_support/mod.rs105
18 files changed, 6044 insertions, 0 deletions
diff --git a/vendor/compiletest_rs/.cargo-checksum.json b/vendor/compiletest_rs/.cargo-checksum.json
new file mode 100644
index 000000000..8533fd089
--- /dev/null
+++ b/vendor/compiletest_rs/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"e66bd0207f0ea860188e04376705d67a4658e78295bcaf579b2a89dbd9088d54","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0621878e61f0d0fda054bcbe02df75192c28bde1ecc8289cbd86aeba2dd72720","README.md":"e0655e85239c6e887c0f35e1b97fb2574127ea61c28e3deec93c20b560f47892","build.rs":"905784fbb4a75c17279a88fd476e7221104adf00b4dd745a4a66bd41cac8d77a","src/common.rs":"df14f9c75c6a341c5bbc90c4b386c20f66164196f3b4b2ffc19b454d6b658e2b","src/errors.rs":"88a36d1726d90b11bf9272b1439dd52cd4fdf2e70e6e0cb6cfb323f76d84e3ff","src/header.rs":"04e405d100c46dd82139705e8b833e03d19c597edf3a5f7fee92ad9cbd12c201","src/json.rs":"e278c4c3c32c60e840a250b0979926e2ac333dcbbbaae5ab6fcf01ca05bef625","src/lib.rs":"ac780a6ad8d9eb84bd0c6bd7c56fab64a98ef29b9133f1b4fb2a845b58c839bc","src/raise_fd_limit.rs":"b0840fe16df9edaae0571499e105db47768050fcd62729d1e56295f2db34318e","src/read2.rs":"fb9caa84bec8c9c3d2c1ee39a4a6698c461fdc77aec086c74fd7738a6acf4155","src/runtest.rs":"9dc12384637275790ca5a030f79e7ea72be991ff8d5365193e48895a30db6454","src/uidiff.rs":"2f612bd4ed76d68c0f6b3eb8dda5adf14a3e6ceef261c88c810ac6b6857e2d34","src/util.rs":"9cda9e803ffe5e1b5d4c74cf7da7bd51e28a8d1b09bda3d31e01c0f648c3262d","tests/bless.rs":"29031f02666102ab8cde316b9206bf1e770275f9fa6e0172754dcad859a52b5b","tests/test_support/mod.rs":"21ec96cca07cf95b6e3333917a5158a6c47356940063f77d52503b97fd3d8b79"},"package":"262134ef87408da1ddfe45e33daa0ca43b75286d6b1076446e602d264cf9847e"} \ No newline at end of file
diff --git a/vendor/compiletest_rs/Cargo.toml b/vendor/compiletest_rs/Cargo.toml
new file mode 100644
index 000000000..9403548b8
--- /dev/null
+++ b/vendor/compiletest_rs/Cargo.toml
@@ -0,0 +1,83 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+name = "compiletest_rs"
+version = "0.8.0"
+authors = [
+ "The Rust Project Developers",
+ "Thomas Bracht Laumann Jespersen <laumann.thomas@gmail.com>",
+ "Manish Goregaokar <manishsmail@gmail.com>",
+]
+description = "The compiletest utility from the Rust compiler as a standalone testing harness"
+readme = "README.md"
+keywords = [
+ "compiletest",
+ "test",
+ "plugin",
+]
+license = "Apache-2.0/MIT"
+repository = "https://github.com/laumann/compiletest-rs"
+
+[lib]
+name = "compiletest_rs"
+
+[dependencies.diff]
+version = "0.1.10"
+
+[dependencies.filetime]
+version = "0.2"
+
+[dependencies.getopts]
+version = "0.2"
+
+[dependencies.lazy_static]
+version = "1.4"
+
+[dependencies.log]
+version = "0.4"
+
+[dependencies.regex]
+version = "1.0"
+
+[dependencies.rustfix]
+version = "0.6"
+
+[dependencies.serde]
+version = "1.0"
+
+[dependencies.serde_derive]
+version = "1.0"
+
+[dependencies.serde_json]
+version = "1.0"
+
+[dependencies.tempfile]
+version = "3.0"
+optional = true
+
+[dependencies.tester]
+version = "0.9"
+
+[features]
+rustc = []
+stable = []
+tmp = ["tempfile"]
+
+[target."cfg(unix)".dependencies.libc]
+version = "0.2"
+
+[target."cfg(windows)".dependencies.miow]
+version = "0.3"
+
+[target."cfg(windows)".dependencies.winapi]
+version = "0.3"
+features = ["winerror"]
diff --git a/vendor/compiletest_rs/LICENSE-APACHE b/vendor/compiletest_rs/LICENSE-APACHE
new file mode 100644
index 000000000..16fe87b06
--- /dev/null
+++ b/vendor/compiletest_rs/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/vendor/compiletest_rs/LICENSE-MIT b/vendor/compiletest_rs/LICENSE-MIT
new file mode 100644
index 000000000..25597d583
--- /dev/null
+++ b/vendor/compiletest_rs/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2010 The Rust Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/vendor/compiletest_rs/README.md b/vendor/compiletest_rs/README.md
new file mode 100644
index 000000000..bf7cac194
--- /dev/null
+++ b/vendor/compiletest_rs/README.md
@@ -0,0 +1,121 @@
+compiletest-rs
+==============
+
+This project is an attempt at extracting [the `compiletest` utility][upstream]
+from the Rust compiler.
+
+The `compiletest` utility is useful for library and plugin developers, who want
+to include test programs that should fail to compile, issue warnings or
+otherwise produce compile-time output.
+
+To use in your project
+----------------------
+To use `compiletest-rs` in your application, add the following to `Cargo.toml`
+
+```toml
+[dev-dependencies]
+compiletest_rs = "0.7"
+```
+
+By default, `compiletest-rs` should be able to run on both stable, beta and
+nightly channels of rust. We use the [`tester`][tester] fork of Rust's builtin
+`test` crate, so that we don't have require nightly. If you are running nightly
+and want to use Rust's `test` crate directly, you need to have the rustc development
+libraries install (which you can get by running `rustup component add rustc-dev
+--toolchain nightly`). Once you have the rustc development libraries installed, you
+can use the `rustc` feature to make compiletest use them instead of the `tester`
+crate.
+
+```toml
+[dev-dependencies]
+compiletest_rs = { version = "0.7", features = [ "rustc" ] }
+```
+
+Create a `tests` folder in the root folder of your project. Create a test file
+with something like the following:
+
+```rust
+extern crate compiletest_rs as compiletest;
+
+use std::path::PathBuf;
+
+fn run_mode(mode: &'static str) {
+ let mut config = compiletest::Config::default();
+
+ config.mode = mode.parse().expect("Invalid mode");
+ config.src_base = PathBuf::from(format!("tests/{}", mode));
+ config.link_deps(); // Populate config.target_rustcflags with dependencies on the path
+ config.clean_rmeta(); // If your tests import the parent crate, this helps with E0464
+
+ compiletest::run_tests(&config);
+}
+
+#[test]
+fn compile_test() {
+ run_mode("compile-fail");
+ run_mode("run-pass");
+}
+
+```
+
+Each mode corresponds to a folder with the same name in the `tests` folder. That
+is for the `compile-fail` mode the test runner looks for the
+`tests/compile-fail` folder.
+
+Adding flags to the Rust compiler is a matter of assigning the correct field in
+the config. The most common flag to populate is the
+`target_rustcflags` to include the link dependencies on the path.
+
+```rust
+// NOTE! This is the manual way of adding flags
+config.target_rustcflags = Some("-L target/debug".to_string());
+```
+
+This is useful (and necessary) for library development. Note that other
+secondary library dependencies may have their build artifacts placed in
+different (non-obvious) locations and these locations must also be
+added.
+
+For convenience, `Config` provides a `link_deps()` method that
+populates `target_rustcflags` with all the dependencies found in the
+`PATH` variable (which is OS specific). For most cases, it should be
+sufficient to do:
+
+```rust
+let mut config = compiletest::Config::default();
+config.link_deps();
+```
+
+Note that `link_deps()` should not be used if any of the added paths contain
+spaces, as these are currently not handled correctly.
+
+Example
+-------
+See the `test-project` folder for a complete working example using the
+`compiletest-rs` utility. Simply `cd test-project` and `cargo test` to see the
+tests run. The annotation syntax is documented in the [rustc-guide][tests].
+
+TODO
+----
+ - The `run-pass` mode is strictly not necessary since it's baked right into
+ Cargo, but I haven't bothered to take it out
+
+Contributing
+------------
+
+Thank you for your interest in improving this utility! Please consider
+submitting your patch to [the upstream source][src] instead, as it will
+be incorporated into this repo in due time. Still, there are some supporting
+files that are specific to this repo, for example:
+
+- src/lib.rs
+- src/uidiff.rs
+- test-project/
+
+If you are unsure, open a pull request anyway and we would be glad to help!
+
+
+[upstream]: https://github.com/rust-lang/rust/tree/master/src/tools/compiletest
+[src]: https://github.com/rust-lang/rust/tree/master/src/tools/compiletest/src
+[tests]: https://rustc-dev-guide.rust-lang.org/tests/adding.html#header-commands-configuring-rustc
+[tester]: https://crates.io/crates/tester
diff --git a/vendor/compiletest_rs/build.rs b/vendor/compiletest_rs/build.rs
new file mode 100644
index 000000000..c72d9e55a
--- /dev/null
+++ b/vendor/compiletest_rs/build.rs
@@ -0,0 +1,8 @@
+use std::env;
+
+pub fn main() {
+ if env::var("CARGO_FEATURE_RUSTC").is_err() {
+ println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap());
+ println!("cargo:rustc-env=HOST={}", env::var("HOST").unwrap());
+ }
+}
diff --git a/vendor/compiletest_rs/src/common.rs b/vendor/compiletest_rs/src/common.rs
new file mode 100644
index 000000000..17eaa5447
--- /dev/null
+++ b/vendor/compiletest_rs/src/common.rs
@@ -0,0 +1,425 @@
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+pub use self::Mode::*;
+
+use std::env;
+use std::fmt;
+use std::fs::{read_dir, remove_file};
+use std::str::FromStr;
+use std::path::PathBuf;
+#[cfg(feature = "rustc")]
+use rustc_session;
+
+use test::ColorConfig;
+use runtest::dylib_env_var;
+
+#[derive(Clone, Copy, PartialEq, Debug)]
+pub enum Mode {
+ CompileFail,
+ ParseFail,
+ RunFail,
+ RunPass,
+ RunPassValgrind,
+ Pretty,
+ DebugInfoGdb,
+ DebugInfoLldb,
+ Codegen,
+ Rustdoc,
+ CodegenUnits,
+ Incremental,
+ RunMake,
+ Ui,
+ MirOpt,
+ Assembly,
+}
+
+impl Mode {
+ pub fn disambiguator(self) -> &'static str {
+ // Run-pass and pretty run-pass tests could run concurrently, and if they do,
+ // they need to keep their output segregated. Same is true for debuginfo tests that
+ // can be run both on gdb and lldb.
+ match self {
+ Pretty => ".pretty",
+ DebugInfoGdb => ".gdb",
+ DebugInfoLldb => ".lldb",
+ _ => "",
+ }
+ }
+}
+
+impl FromStr for Mode {
+ type Err = ();
+ fn from_str(s: &str) -> Result<Mode, ()> {
+ match s {
+ "compile-fail" => Ok(CompileFail),
+ "parse-fail" => Ok(ParseFail),
+ "run-fail" => Ok(RunFail),
+ "run-pass" => Ok(RunPass),
+ "run-pass-valgrind" => Ok(RunPassValgrind),
+ "pretty" => Ok(Pretty),
+ "debuginfo-lldb" => Ok(DebugInfoLldb),
+ "debuginfo-gdb" => Ok(DebugInfoGdb),
+ "codegen" => Ok(Codegen),
+ "rustdoc" => Ok(Rustdoc),
+ "codegen-units" => Ok(CodegenUnits),
+ "incremental" => Ok(Incremental),
+ "run-make" => Ok(RunMake),
+ "ui" => Ok(Ui),
+ "mir-opt" => Ok(MirOpt),
+ "assembly" => Ok(Assembly),
+ _ => Err(()),
+ }
+ }
+}
+
+impl fmt::Display for Mode {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(match *self {
+ CompileFail => "compile-fail",
+ ParseFail => "parse-fail",
+ RunFail => "run-fail",
+ RunPass => "run-pass",
+ RunPassValgrind => "run-pass-valgrind",
+ Pretty => "pretty",
+ DebugInfoGdb => "debuginfo-gdb",
+ DebugInfoLldb => "debuginfo-lldb",
+ Codegen => "codegen",
+ Rustdoc => "rustdoc",
+ CodegenUnits => "codegen-units",
+ Incremental => "incremental",
+ RunMake => "run-make",
+ Ui => "ui",
+ MirOpt => "mir-opt",
+ Assembly => "assembly",
+ },
+ f)
+ }
+}
+
+#[derive(Clone)]
+pub struct Config {
+ /// `true` to overwrite stderr/stdout/fixed files instead of complaining about changes in output.
+ pub bless: bool,
+
+ /// The library paths required for running the compiler
+ pub compile_lib_path: PathBuf,
+
+ /// The library paths required for running compiled programs
+ pub run_lib_path: PathBuf,
+
+ /// The rustc executable
+ pub rustc_path: PathBuf,
+
+ /// The rustdoc executable
+ pub rustdoc_path: Option<PathBuf>,
+
+ /// The python executable to use for LLDB
+ pub lldb_python: String,
+
+ /// The python executable to use for htmldocck
+ pub docck_python: String,
+
+ /// The llvm FileCheck binary path
+ pub llvm_filecheck: Option<PathBuf>,
+
+ /// The valgrind path
+ pub valgrind_path: Option<String>,
+
+ /// Whether to fail if we can't run run-pass-valgrind tests under valgrind
+ /// (or, alternatively, to silently run them like regular run-pass tests).
+ pub force_valgrind: bool,
+
+ /// The directory containing the tests to run
+ pub src_base: PathBuf,
+
+ /// The directory where programs should be built
+ pub build_base: PathBuf,
+
+ /// The name of the stage being built (stage1, etc)
+ pub stage_id: String,
+
+ /// The test mode, compile-fail, run-fail, run-pass
+ pub mode: Mode,
+
+ /// Run ignored tests
+ pub run_ignored: bool,
+
+ /// Only run tests that match these filters
+ pub filters: Vec<String>,
+
+ /// Exactly match the filter, rather than a substring
+ pub filter_exact: bool,
+
+ /// Write out a parseable log of tests that were run
+ pub logfile: Option<PathBuf>,
+
+ /// A command line to prefix program execution with,
+ /// for running under valgrind
+ pub runtool: Option<String>,
+
+ /// Flags to pass to the compiler when building for the host
+ pub host_rustcflags: Option<String>,
+
+ /// Flags to pass to the compiler when building for the target
+ pub target_rustcflags: Option<String>,
+
+ /// Target system to be tested
+ pub target: String,
+
+ /// Host triple for the compiler being invoked
+ pub host: String,
+
+ /// Path to / name of the GDB executable
+ pub gdb: Option<String>,
+
+ /// Version of GDB, encoded as ((major * 1000) + minor) * 1000 + patch
+ pub gdb_version: Option<u32>,
+
+ /// Whether GDB has native rust support
+ pub gdb_native_rust: bool,
+
+ /// Version of LLDB
+ pub lldb_version: Option<String>,
+
+ /// Version of LLVM
+ pub llvm_version: Option<String>,
+
+ /// Is LLVM a system LLVM
+ pub system_llvm: bool,
+
+ /// Path to the android tools
+ pub android_cross_path: PathBuf,
+
+ /// Extra parameter to run adb on arm-linux-androideabi
+ pub adb_path: String,
+
+ /// Extra parameter to run test suite on arm-linux-androideabi
+ pub adb_test_dir: String,
+
+ /// status whether android device available or not
+ pub adb_device_status: bool,
+
+ /// the path containing LLDB's Python module
+ pub lldb_python_dir: Option<String>,
+
+ /// Explain what's going on
+ pub verbose: bool,
+
+ /// Print one character per test instead of one line
+ pub quiet: bool,
+
+ /// Whether to use colors in test.
+ pub color: ColorConfig,
+
+ /// where to find the remote test client process, if we're using it
+ pub remote_test_client: Option<PathBuf>,
+
+ /// If true, this will generate a coverage file with UI test files that run `MachineApplicable`
+ /// diagnostics but are missing `run-rustfix` annotations. The generated coverage file is
+ /// created in `/<build_base>/rustfix_missing_coverage.txt`
+ pub rustfix_coverage: bool,
+
+ /// The default Rust edition
+ pub edition: Option<String>,
+
+ // Configuration for various run-make tests frobbing things like C compilers
+ // or querying about various LLVM component information.
+ pub cc: String,
+ pub cxx: String,
+ pub cflags: String,
+ pub ar: String,
+ pub linker: Option<String>,
+ pub llvm_components: String,
+ pub llvm_cxxflags: String,
+ pub nodejs: Option<String>,
+}
+
+#[derive(Clone)]
+pub struct TestPaths {
+ pub file: PathBuf, // e.g., compile-test/foo/bar/baz.rs
+ pub base: PathBuf, // e.g., compile-test, auxiliary
+ pub relative_dir: PathBuf, // e.g., foo/bar
+}
+
+/// Used by `ui` tests to generate things like `foo.stderr` from `foo.rs`.
+pub fn expected_output_path(
+ testpaths: &TestPaths,
+ revision: Option<&str>,
+ kind: &str,
+) -> PathBuf {
+ assert!(UI_EXTENSIONS.contains(&kind));
+ let mut parts = Vec::new();
+
+ if let Some(x) = revision {
+ parts.push(x);
+ }
+ parts.push(kind);
+
+ let extension = parts.join(".");
+ testpaths.file.with_extension(extension)
+}
+
+pub const UI_EXTENSIONS: &[&str] = &[UI_STDERR, UI_STDOUT, UI_FIXED];
+pub const UI_STDERR: &str = "stderr";
+pub const UI_STDOUT: &str = "stdout";
+pub const UI_FIXED: &str = "fixed";
+
+impl Config {
+ /// Add rustc flags to link with the crate's dependencies in addition to the crate itself
+ pub fn link_deps(&mut self) {
+ let varname = dylib_env_var();
+
+ // Dependencies can be found in the environment variable. Throw everything there into the
+ // link flags
+ let lib_paths = env::var(varname).unwrap_or_else(|err| match err {
+ env::VarError::NotPresent => String::new(),
+ err => panic!("can't get {} environment variable: {}", varname, err),
+ });
+
+ // Append to current flags if any are set, otherwise make new String
+ let mut flags = self.target_rustcflags.take().unwrap_or_else(String::new);
+ if !lib_paths.is_empty() {
+ for p in env::split_paths(&lib_paths) {
+ flags += " -L ";
+ flags += p.to_str().unwrap(); // Can't fail. We already know this is unicode
+ }
+ }
+
+ self.target_rustcflags = Some(flags);
+ }
+
+ /// Remove rmeta files from target `deps` directory
+ ///
+ /// These files are created by `cargo check`, and conflict with
+ /// `cargo build` rlib files, causing E0464 for tests which use
+ /// the parent crate.
+ pub fn clean_rmeta(&self) {
+ if self.target_rustcflags.is_some() {
+ for directory in self.target_rustcflags
+ .as_ref()
+ .unwrap()
+ .split_whitespace()
+ .filter(|s| s.ends_with("/deps"))
+ {
+ if let Ok(mut entries) = read_dir(directory) {
+ while let Some(Ok(entry)) = entries.next() {
+ if entry.file_name().to_string_lossy().ends_with(".rmeta") {
+ let _ = remove_file(entry.path());
+ }
+ }
+ }
+ }
+ }
+ }
+
+ #[cfg(feature = "tmp")]
+ pub fn tempdir(mut self) -> ConfigWithTemp {
+ let tmp = tempfile::Builder::new().prefix("compiletest").tempdir()
+ .expect("failed to create temporary directory");
+ self.build_base = tmp.path().to_owned();
+ config_tempdir::ConfigWithTemp {
+ config: self,
+ tempdir: tmp,
+ }
+ }
+}
+
+#[cfg(feature = "tmp")]
+mod config_tempdir {
+ use tempfile;
+ use std::ops;
+
+ pub struct ConfigWithTemp {
+ pub config: super::Config,
+ pub tempdir: tempfile::TempDir,
+ }
+
+ impl ops::Deref for ConfigWithTemp {
+ type Target = super::Config;
+
+ fn deref(&self) -> &Self::Target {
+ &self.config
+ }
+ }
+
+ impl ops::DerefMut for ConfigWithTemp {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.config
+ }
+ }
+}
+
+#[cfg(feature = "tmp")]
+pub use self::config_tempdir::ConfigWithTemp;
+
+
+impl Default for Config {
+ fn default() -> Config {
+ #[cfg(feature = "rustc")]
+ let platform = rustc_session::config::host_triple().to_string();
+
+ Config {
+ bless: false,
+ compile_lib_path: PathBuf::from(""),
+ run_lib_path: PathBuf::from(""),
+ rustc_path: PathBuf::from("rustc"),
+ rustdoc_path: None,
+ lldb_python: "python".to_owned(),
+ docck_python: "docck-python".to_owned(),
+ valgrind_path: None,
+ force_valgrind: false,
+ llvm_filecheck: None,
+ src_base: PathBuf::from("tests/run-pass"),
+ build_base: env::temp_dir(),
+ stage_id: "stage-id".to_owned(),
+ mode: Mode::RunPass,
+ run_ignored: false,
+ filters: vec![],
+ filter_exact: false,
+ logfile: None,
+ runtool: None,
+ host_rustcflags: None,
+ target_rustcflags: None,
+ #[cfg(feature = "rustc")]
+ target: platform.clone(),
+ #[cfg(not(feature = "rustc"))]
+ target: env!("TARGET").to_string(),
+ #[cfg(feature = "rustc")]
+ host: platform.clone(),
+ #[cfg(not(feature = "rustc"))]
+ host: env!("HOST").to_string(),
+ rustfix_coverage: false,
+ gdb: None,
+ gdb_version: None,
+ gdb_native_rust: false,
+ lldb_version: None,
+ llvm_version: None,
+ system_llvm: false,
+ android_cross_path: PathBuf::from("android-cross-path"),
+ adb_path: "adb-path".to_owned(),
+ adb_test_dir: "adb-test-dir/target".to_owned(),
+ adb_device_status: false,
+ lldb_python_dir: None,
+ verbose: false,
+ quiet: false,
+ color: ColorConfig::AutoColor,
+ remote_test_client: None,
+ cc: "cc".to_string(),
+ cxx: "cxx".to_string(),
+ cflags: "cflags".to_string(),
+ ar: "ar".to_string(),
+ linker: None,
+ llvm_components: "llvm-components".to_string(),
+ llvm_cxxflags: "llvm-cxxflags".to_string(),
+ nodejs: None,
+ edition: None,
+ }
+ }
+}
diff --git a/vendor/compiletest_rs/src/errors.rs b/vendor/compiletest_rs/src/errors.rs
new file mode 100644
index 000000000..251dd4d5e
--- /dev/null
+++ b/vendor/compiletest_rs/src/errors.rs
@@ -0,0 +1,182 @@
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+use self::WhichLine::*;
+
+use std::fmt;
+use std::fs::File;
+use std::io::BufReader;
+use std::io::prelude::*;
+use std::path::Path;
+use std::str::FromStr;
+
+#[derive(Clone, Debug, PartialEq)]
+pub enum ErrorKind {
+ Help,
+ Error,
+ Note,
+ Suggestion,
+ Warning,
+}
+
+impl FromStr for ErrorKind {
+ type Err = ();
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let s = s.to_uppercase();
+ let part0: &str = s.split(':').next().unwrap();
+ match part0 {
+ "HELP" => Ok(ErrorKind::Help),
+ "ERROR" => Ok(ErrorKind::Error),
+ "NOTE" => Ok(ErrorKind::Note),
+ "SUGGESTION" => Ok(ErrorKind::Suggestion),
+ "WARN" |
+ "WARNING" => Ok(ErrorKind::Warning),
+ _ => Err(()),
+ }
+ }
+}
+
+impl fmt::Display for ErrorKind {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match *self {
+ ErrorKind::Help => write!(f, "help message"),
+ ErrorKind::Error => write!(f, "error"),
+ ErrorKind::Note => write!(f, "note"),
+ ErrorKind::Suggestion => write!(f, "suggestion"),
+ ErrorKind::Warning => write!(f, "warning"),
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct Error {
+ pub line_num: usize,
+ /// What kind of message we expect (e.g. warning, error, suggestion).
+ /// `None` if not specified or unknown message kind.
+ pub kind: Option<ErrorKind>,
+ pub msg: String,
+}
+
+#[derive(PartialEq, Debug)]
+enum WhichLine {
+ ThisLine,
+ FollowPrevious(usize),
+ AdjustBackward(usize),
+}
+
+/// Looks for either "//~| KIND MESSAGE" or "//~^^... KIND MESSAGE"
+/// The former is a "follow" that inherits its target from the preceding line;
+/// the latter is an "adjusts" that goes that many lines up.
+///
+/// Goal is to enable tests both like: //~^^^ ERROR go up three
+/// and also //~^ ERROR message one for the preceding line, and
+/// //~| ERROR message two for that same line.
+///
+/// If cfg is not None (i.e., in an incremental test), then we look
+/// for `//[X]~` instead, where `X` is the current `cfg`.
+pub fn load_errors(testfile: &Path, cfg: Option<&str>) -> Vec<Error> {
+ let rdr = BufReader::new(File::open(testfile).unwrap());
+
+ // `last_nonfollow_error` tracks the most recently seen
+ // line with an error template that did not use the
+ // follow-syntax, "//~| ...".
+ //
+ // (pnkfelix could not find an easy way to compose Iterator::scan
+ // and Iterator::filter_map to pass along this information into
+ // `parse_expected`. So instead I am storing that state here and
+ // updating it in the map callback below.)
+ let mut last_nonfollow_error = None;
+
+ let tag = match cfg {
+ Some(rev) => format!("//[{}]~", rev),
+ None => "//~".to_string(),
+ };
+
+ rdr.lines()
+ .enumerate()
+ .filter_map(|(line_num, line)| {
+ parse_expected(last_nonfollow_error, line_num + 1, &line.unwrap(), &tag)
+ .map(|(which, error)| {
+ match which {
+ FollowPrevious(_) => {}
+ _ => last_nonfollow_error = Some(error.line_num),
+ }
+ error
+ })
+ })
+ .collect()
+}
+
+fn parse_expected(last_nonfollow_error: Option<usize>,
+ line_num: usize,
+ line: &str,
+ tag: &str)
+ -> Option<(WhichLine, Error)> {
+ let start = match line.find(tag) {
+ Some(i) => i,
+ None => return None,
+ };
+ let (follow, adjusts) = if line[start + tag.len()..].chars().next().unwrap() == '|' {
+ (true, 0)
+ } else {
+ (false, line[start + tag.len()..].chars().take_while(|c| *c == '^').count())
+ };
+ let kind_start = start + tag.len() + adjusts + (follow as usize);
+ let (kind, msg);
+ match line[kind_start..]
+ .split_whitespace()
+ .next()
+ .expect("Encountered unexpected empty comment")
+ .parse::<ErrorKind>() {
+ Ok(k) => {
+ // If we find `//~ ERROR foo` or something like that:
+ kind = Some(k);
+ let letters = line[kind_start..].chars();
+ msg = letters.skip_while(|c| c.is_whitespace())
+ .skip_while(|c| !c.is_whitespace())
+ .collect::<String>();
+ }
+ Err(_) => {
+ // Otherwise we found `//~ foo`:
+ kind = None;
+ let letters = line[kind_start..].chars();
+ msg = letters.skip_while(|c| c.is_whitespace())
+ .collect::<String>();
+ }
+ }
+ let msg = msg.trim().to_owned();
+
+ let (which, line_num) = if follow {
+ assert_eq!(adjusts, 0, "use either //~| or //~^, not both.");
+ let line_num = last_nonfollow_error.expect("encountered //~| without \
+ preceding //~^ line.");
+ (FollowPrevious(line_num), line_num)
+ } else {
+ let which = if adjusts > 0 {
+ AdjustBackward(adjusts)
+ } else {
+ ThisLine
+ };
+ let line_num = line_num - adjusts;
+ (which, line_num)
+ };
+
+ debug!("line={} tag={:?} which={:?} kind={:?} msg={:?}",
+ line_num,
+ tag,
+ which,
+ kind,
+ msg);
+ Some((which,
+ Error {
+ line_num,
+ kind,
+ msg,
+ }))
+}
diff --git a/vendor/compiletest_rs/src/header.rs b/vendor/compiletest_rs/src/header.rs
new file mode 100644
index 000000000..28f3f9de7
--- /dev/null
+++ b/vendor/compiletest_rs/src/header.rs
@@ -0,0 +1,709 @@
+// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::env;
+use std::fs::File;
+use std::io::BufReader;
+use std::io::prelude::*;
+use std::path::{Path, PathBuf};
+
+use common::Config;
+use common;
+use util;
+
+use extract_gdb_version;
+
+/// Properties which must be known very early, before actually running
+/// the test.
+pub struct EarlyProps {
+ pub ignore: bool,
+ pub should_fail: bool,
+ pub aux: Vec<String>,
+}
+
+impl EarlyProps {
+ pub fn from_file(config: &Config, testfile: &Path) -> Self {
+ let mut props = EarlyProps {
+ ignore: false,
+ should_fail: false,
+ aux: Vec::new(),
+ };
+
+ iter_header(testfile,
+ None,
+ &mut |ln| {
+ props.ignore =
+ props.ignore ||
+ config.parse_cfg_name_directive(ln, "ignore");
+
+ if config.has_cfg_prefix(ln, "only") {
+ props.ignore = match config.parse_cfg_name_directive(ln, "only") {
+ true => props.ignore,
+ false => true,
+ };
+ }
+
+ props.ignore =
+ props.ignore ||
+ ignore_gdb(config, ln) ||
+ ignore_lldb(config, ln) ||
+ ignore_llvm(config, ln);
+
+ if let Some(s) = config.parse_aux_build(ln) {
+ props.aux.push(s);
+ }
+
+ props.should_fail = props.should_fail || config.parse_name_directive(ln, "should-fail");
+ });
+
+ return props;
+
+ fn ignore_gdb(config: &Config, line: &str) -> bool {
+ if config.mode != common::DebugInfoGdb {
+ return false;
+ }
+
+ if let Some(actual_version) = config.gdb_version {
+ if line.starts_with("min-gdb-version") {
+ let (start_ver, end_ver) = extract_gdb_version_range(line);
+
+ if start_ver != end_ver {
+ panic!("Expected single GDB version")
+ }
+ // Ignore if actual version is smaller the minimum required
+ // version
+ actual_version < start_ver
+ } else if line.starts_with("ignore-gdb-version") {
+ let (min_version, max_version) = extract_gdb_version_range(line);
+
+ if max_version < min_version {
+ panic!("Malformed GDB version range: max < min")
+ }
+
+ actual_version >= min_version && actual_version <= max_version
+ } else {
+ false
+ }
+ } else {
+ false
+ }
+ }
+
+ // Takes a directive of the form "ignore-gdb-version <version1> [- <version2>]",
+ // returns the numeric representation of <version1> and <version2> as
+ // tuple: (<version1> as u32, <version2> as u32)
+ // If the <version2> part is omitted, the second component of the tuple
+ // is the same as <version1>.
+ fn extract_gdb_version_range(line: &str) -> (u32, u32) {
+ const ERROR_MESSAGE: &'static str = "Malformed GDB version directive";
+
+ let range_components = line.split(&[' ', '-'][..])
+ .filter(|word| !word.is_empty())
+ .map(extract_gdb_version)
+ .skip_while(Option::is_none)
+ .take(3) // 3 or more = invalid, so take at most 3.
+ .collect::<Vec<Option<u32>>>();
+
+ match range_components.len() {
+ 1 => {
+ let v = range_components[0].unwrap();
+ (v, v)
+ }
+ 2 => {
+ let v_min = range_components[0].unwrap();
+ let v_max = range_components[1].expect(ERROR_MESSAGE);
+ (v_min, v_max)
+ }
+ _ => panic!("{}", ERROR_MESSAGE),
+ }
+ }
+
+ fn ignore_lldb(config: &Config, line: &str) -> bool {
+ if config.mode != common::DebugInfoLldb {
+ return false;
+ }
+
+ if let Some(ref actual_version) = config.lldb_version {
+ if line.starts_with("min-lldb-version") {
+ let min_version = line.trim_end()
+ .rsplit(' ')
+ .next()
+ .expect("Malformed lldb version directive");
+ // Ignore if actual version is smaller the minimum required
+ // version
+ lldb_version_to_int(actual_version) < lldb_version_to_int(min_version)
+ } else {
+ false
+ }
+ } else {
+ false
+ }
+ }
+
+ fn ignore_llvm(config: &Config, line: &str) -> bool {
+ if config.system_llvm && line.starts_with("no-system-llvm") {
+ return true;
+ }
+ if let Some(ref actual_version) = config.llvm_version {
+ if line.starts_with("min-llvm-version") {
+ let min_version = line.trim_end()
+ .rsplit(' ')
+ .next()
+ .expect("Malformed llvm version directive");
+ // Ignore if actual version is smaller the minimum required
+ // version
+ &actual_version[..] < min_version
+ } else if line.starts_with("min-system-llvm-version") {
+ let min_version = line.trim_end()
+ .rsplit(' ')
+ .next()
+ .expect("Malformed llvm version directive");
+ // Ignore if using system LLVM and actual version
+ // is smaller the minimum required version
+ !(config.system_llvm && &actual_version[..] < min_version)
+ } else {
+ false
+ }
+ } else {
+ false
+ }
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct TestProps {
+ // Lines that should be expected, in order, on standard out
+ pub error_patterns: Vec<String>,
+ // Extra flags to pass to the compiler
+ pub compile_flags: Vec<String>,
+ // Extra flags to pass when the compiled code is run (such as --bench)
+ pub run_flags: Option<String>,
+ // If present, the name of a file that this test should match when
+ // pretty-printed
+ pub pp_exact: Option<PathBuf>,
+ // Other crates that should be compiled (typically from the same
+ // directory as the test, but for backwards compatibility reasons
+ // we also check the auxiliary directory)
+ pub aux_builds: Vec<String>,
+ // Environment settings to use for compiling
+ pub rustc_env: Vec<(String, String)>,
+ // Environment settings to use during execution
+ pub exec_env: Vec<(String, String)>,
+ // Lines to check if they appear in the expected debugger output
+ pub check_lines: Vec<String>,
+ // Build documentation for all specified aux-builds as well
+ pub build_aux_docs: bool,
+ // Flag to force a crate to be built with the host architecture
+ pub force_host: bool,
+ // Check stdout for error-pattern output as well as stderr
+ pub check_stdout: bool,
+ // Don't force a --crate-type=dylib flag on the command line
+ pub no_prefer_dynamic: bool,
+ // Run --pretty expanded when running pretty printing tests
+ pub pretty_expanded: bool,
+ // Which pretty mode are we testing with, default to 'normal'
+ pub pretty_mode: String,
+ // Only compare pretty output and don't try compiling
+ pub pretty_compare_only: bool,
+ // Patterns which must not appear in the output of a cfail test.
+ pub forbid_output: Vec<String>,
+ // Revisions to test for incremental compilation.
+ pub revisions: Vec<String>,
+ // Directory (if any) to use for incremental compilation. This is
+ // not set by end-users; rather it is set by the incremental
+ // testing harness and used when generating compilation
+ // arguments. (In particular, it propagates to the aux-builds.)
+ pub incremental_dir: Option<PathBuf>,
+ // Specifies that a cfail test must actually compile without errors.
+ pub must_compile_successfully: bool,
+ // rustdoc will test the output of the `--test` option
+ pub check_test_line_numbers_match: bool,
+ // The test must be compiled and run successfully. Only used in UI tests for
+ // now.
+ pub run_pass: bool,
+ // customized normalization rules
+ pub normalize_stdout: Vec<(String, String)>,
+ pub normalize_stderr: Vec<(String, String)>,
+ pub run_rustfix: bool,
+ pub rustfix_only_machine_applicable: bool,
+ pub assembly_output: Option<String>,
+}
+
+impl TestProps {
+ pub fn new() -> Self {
+ TestProps {
+ error_patterns: vec![],
+ compile_flags: vec![],
+ run_flags: None,
+ pp_exact: None,
+ aux_builds: vec![],
+ revisions: vec![],
+ rustc_env: vec![],
+ exec_env: vec![],
+ check_lines: vec![],
+ build_aux_docs: false,
+ force_host: false,
+ check_stdout: false,
+ no_prefer_dynamic: false,
+ pretty_expanded: false,
+ pretty_mode: "normal".to_string(),
+ pretty_compare_only: false,
+ forbid_output: vec![],
+ incremental_dir: None,
+ must_compile_successfully: false,
+ check_test_line_numbers_match: false,
+ run_pass: false,
+ normalize_stdout: vec![],
+ normalize_stderr: vec![],
+ run_rustfix: false,
+ rustfix_only_machine_applicable: false,
+ assembly_output: None,
+ }
+ }
+
+ pub fn from_aux_file(&self,
+ testfile: &Path,
+ cfg: Option<&str>,
+ config: &Config)
+ -> Self {
+ let mut props = TestProps::new();
+
+ // copy over select properties to the aux build:
+ props.incremental_dir = self.incremental_dir.clone();
+ props.load_from(testfile, cfg, config);
+
+ props
+ }
+
+ pub fn from_file(testfile: &Path, cfg: Option<&str>, config: &Config) -> Self {
+ let mut props = TestProps::new();
+ props.load_from(testfile, cfg, config);
+ props
+ }
+
+ /// Load properties from `testfile` into `props`. If a property is
+ /// tied to a particular revision `foo` (indicated by writing
+ /// `//[foo]`), then the property is ignored unless `cfg` is
+ /// `Some("foo")`.
+ fn load_from(&mut self,
+ testfile: &Path,
+ cfg: Option<&str>,
+ config: &Config) {
+ let mut has_edition = false;
+ iter_header(testfile,
+ cfg,
+ &mut |ln| {
+ if let Some(ep) = config.parse_error_pattern(ln) {
+ self.error_patterns.push(ep);
+ }
+
+ if let Some(flags) = config.parse_compile_flags(ln) {
+ self.compile_flags.extend(flags.split_whitespace()
+ .map(|s| s.to_owned()));
+ }
+
+ if let Some(edition) = config.parse_edition(ln) {
+ self.compile_flags.push(format!("--edition={}", edition));
+ has_edition = true;
+ }
+
+ if let Some(r) = config.parse_revisions(ln) {
+ self.revisions.extend(r);
+ }
+
+ if self.run_flags.is_none() {
+ self.run_flags = config.parse_run_flags(ln);
+ }
+
+ if self.pp_exact.is_none() {
+ self.pp_exact = config.parse_pp_exact(ln, testfile);
+ }
+
+ if !self.build_aux_docs {
+ self.build_aux_docs = config.parse_build_aux_docs(ln);
+ }
+
+ if !self.force_host {
+ self.force_host = config.parse_force_host(ln);
+ }
+
+ if !self.check_stdout {
+ self.check_stdout = config.parse_check_stdout(ln);
+ }
+
+ if !self.no_prefer_dynamic {
+ self.no_prefer_dynamic = config.parse_no_prefer_dynamic(ln);
+ }
+
+ if !self.pretty_expanded {
+ self.pretty_expanded = config.parse_pretty_expanded(ln);
+ }
+
+ if let Some(m) = config.parse_pretty_mode(ln) {
+ self.pretty_mode = m;
+ }
+
+ if !self.pretty_compare_only {
+ self.pretty_compare_only = config.parse_pretty_compare_only(ln);
+ }
+
+ if let Some(ab) = config.parse_aux_build(ln) {
+ self.aux_builds.push(ab);
+ }
+
+ if let Some(ee) = config.parse_env(ln, "exec-env") {
+ self.exec_env.push(ee);
+ }
+
+ if let Some(ee) = config.parse_env(ln, "rustc-env") {
+ self.rustc_env.push(ee);
+ }
+
+ if let Some(cl) = config.parse_check_line(ln) {
+ self.check_lines.push(cl);
+ }
+
+ if let Some(of) = config.parse_forbid_output(ln) {
+ self.forbid_output.push(of);
+ }
+
+ if !self.must_compile_successfully {
+ self.must_compile_successfully = config.parse_must_compile_successfully(ln);
+ }
+
+ if !self.check_test_line_numbers_match {
+ self.check_test_line_numbers_match = config.parse_check_test_line_numbers_match(ln);
+ }
+
+ if !self.run_pass {
+ self.run_pass = config.parse_run_pass(ln);
+ }
+
+ if let Some(rule) = config.parse_custom_normalization(ln, "normalize-stdout") {
+ self.normalize_stdout.push(rule);
+ }
+ if let Some(rule) = config.parse_custom_normalization(ln, "normalize-stderr") {
+ self.normalize_stderr.push(rule);
+ }
+
+ if !self.run_rustfix {
+ self.run_rustfix = config.parse_run_rustfix(ln);
+ }
+
+ if !self.rustfix_only_machine_applicable {
+ self.rustfix_only_machine_applicable =
+ config.parse_rustfix_only_machine_applicable(ln);
+ }
+
+ if self.assembly_output.is_none() {
+ self.assembly_output = config.parse_assembly_output(ln);
+ }
+ });
+
+ for key in &["RUST_TEST_NOCAPTURE", "RUST_TEST_THREADS"] {
+ if let Ok(val) = env::var(key) {
+ if self.exec_env.iter().find(|&&(ref x, _)| x == key).is_none() {
+ self.exec_env.push(((*key).to_owned(), val))
+ }
+ }
+ }
+
+ if let (Some(edition), false) = (&config.edition, has_edition) {
+ self.compile_flags.push(format!("--edition={}", edition));
+ }
+ }
+}
+
+fn iter_header(testfile: &Path, cfg: Option<&str>, it: &mut dyn FnMut(&str)) {
+ if testfile.is_dir() {
+ return;
+ }
+ let rdr = BufReader::new(File::open(testfile).unwrap());
+ for ln in rdr.lines() {
+ // Assume that any directives will be found before the first
+ // module or function. This doesn't seem to be an optimization
+ // with a warm page cache. Maybe with a cold one.
+ let ln = ln.unwrap();
+ let ln = ln.trim();
+ if ln.starts_with("fn") || ln.starts_with("mod") {
+ return;
+ } else if ln.starts_with("//[") {
+ // A comment like `//[foo]` is specific to revision `foo`
+ if let Some(close_brace) = ln.find(']') {
+ let lncfg = &ln[3..close_brace];
+ let matches = match cfg {
+ Some(s) => s == &lncfg[..],
+ None => false,
+ };
+ if matches {
+ it(ln[(close_brace + 1) ..].trim_start());
+ }
+ } else {
+ panic!("malformed condition directive: expected `//[foo]`, found `{}`",
+ ln)
+ }
+ } else if ln.starts_with("//") {
+ it(ln[2..].trim_start());
+ }
+ }
+ return;
+}
+
+impl Config {
+ fn parse_error_pattern(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "error-pattern")
+ }
+
+ fn parse_forbid_output(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "forbid-output")
+ }
+
+ fn parse_aux_build(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "aux-build")
+ .map(|r| r.trim().to_string())
+ }
+
+ fn parse_compile_flags(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "compile-flags")
+ }
+
+ fn parse_revisions(&self, line: &str) -> Option<Vec<String>> {
+ self.parse_name_value_directive(line, "revisions")
+ .map(|r| r.split_whitespace().map(|t| t.to_string()).collect())
+ }
+
+ fn parse_run_flags(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "run-flags")
+ }
+
+ fn parse_check_line(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "check")
+ }
+
+ fn parse_force_host(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "force-host")
+ }
+
+ fn parse_build_aux_docs(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "build-aux-docs")
+ }
+
+ fn parse_check_stdout(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "check-stdout")
+ }
+
+ fn parse_no_prefer_dynamic(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "no-prefer-dynamic")
+ }
+
+ fn parse_pretty_expanded(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "pretty-expanded")
+ }
+
+ fn parse_pretty_mode(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "pretty-mode")
+ }
+
+ fn parse_pretty_compare_only(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "pretty-compare-only")
+ }
+
+ fn parse_must_compile_successfully(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "must-compile-successfully")
+ }
+
+ fn parse_check_test_line_numbers_match(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "check-test-line-numbers-match")
+ }
+
+ fn parse_run_pass(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "run-pass")
+ }
+
+ fn parse_assembly_output(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "assembly-output")
+ .map(|r| r.trim().to_string())
+ }
+
+ fn parse_env(&self, line: &str, name: &str) -> Option<(String, String)> {
+ self.parse_name_value_directive(line, name).map(|nv| {
+ // nv is either FOO or FOO=BAR
+ let mut strs: Vec<String> = nv.splitn(2, '=')
+ .map(str::to_owned)
+ .collect();
+
+ match strs.len() {
+ 1 => (strs.pop().unwrap(), "".to_owned()),
+ 2 => {
+ let end = strs.pop().unwrap();
+ (strs.pop().unwrap(), end)
+ }
+ n => panic!("Expected 1 or 2 strings, not {}", n),
+ }
+ })
+ }
+
+ fn parse_pp_exact(&self, line: &str, testfile: &Path) -> Option<PathBuf> {
+ if let Some(s) = self.parse_name_value_directive(line, "pp-exact") {
+ Some(PathBuf::from(&s))
+ } else if self.parse_name_directive(line, "pp-exact") {
+ testfile.file_name().map(PathBuf::from)
+ } else {
+ None
+ }
+ }
+
+ fn parse_custom_normalization(&self, mut line: &str, prefix: &str) -> Option<(String, String)> {
+ if self.parse_cfg_name_directive(line, prefix) {
+ let from = match parse_normalization_string(&mut line) {
+ Some(s) => s,
+ None => return None,
+ };
+ let to = match parse_normalization_string(&mut line) {
+ Some(s) => s,
+ None => return None,
+ };
+ Some((from, to))
+ } else {
+ None
+ }
+ }
+
+ /// Parses a name-value directive which contains config-specific information, e.g. `ignore-x86`
+ /// or `normalize-stderr-32bit`. Returns `true` if the line matches it.
+ fn parse_cfg_name_directive(&self, line: &str, prefix: &str) -> bool {
+ if line.starts_with(prefix) && line.as_bytes().get(prefix.len()) == Some(&b'-') {
+ let name = line[prefix.len()+1 ..].split(&[':', ' '][..]).next().unwrap();
+
+ name == "test" ||
+ util::matches_os(&self.target, name) || // target
+ name == util::get_arch(&self.target) || // architecture
+ name == util::get_pointer_width(&self.target) || // pointer width
+ name == self.stage_id.split('-').next().unwrap() || // stage
+ Some(name) == util::get_env(&self.target) || // env
+ self.target.ends_with(name) || // target and env
+ match self.mode {
+ common::DebugInfoGdb => name == "gdb",
+ common::DebugInfoLldb => name == "lldb",
+ common::Pretty => name == "pretty",
+ _ => false,
+ } ||
+ (self.target != self.host && name == "cross-compile")
+ } else {
+ false
+ }
+ }
+
+ fn has_cfg_prefix(&self, line: &str, prefix: &str) -> bool {
+ // returns whether this line contains this prefix or not. For prefix
+ // "ignore", returns true if line says "ignore-x86_64", "ignore-arch",
+ // "ignore-android" etc.
+ line.starts_with(prefix) && line.as_bytes().get(prefix.len()) == Some(&b'-')
+ }
+
+ fn parse_name_directive(&self, line: &str, directive: &str) -> bool {
+ // Ensure the directive is a whole word. Do not match "ignore-x86" when
+ // the line says "ignore-x86_64".
+ line.starts_with(directive) && match line.as_bytes().get(directive.len()) {
+ None | Some(&b' ') | Some(&b':') => true,
+ _ => false
+ }
+ }
+
+ pub fn parse_name_value_directive(&self, line: &str, directive: &str) -> Option<String> {
+ let colon = directive.len();
+ if line.starts_with(directive) && line.as_bytes().get(colon) == Some(&b':') {
+ let value = line[(colon + 1) ..].to_owned();
+ debug!("{}: {}", directive, value);
+ Some(expand_variables(value, self))
+ } else {
+ None
+ }
+ }
+
+ pub fn find_rust_src_root(&self) -> Option<PathBuf> {
+ let mut path = self.src_base.clone();
+ let path_postfix = Path::new("src/etc/lldb_batchmode.py");
+
+ while path.pop() {
+ if path.join(&path_postfix).is_file() {
+ return Some(path);
+ }
+ }
+
+ None
+ }
+
+ fn parse_run_rustfix(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "run-rustfix")
+ }
+
+ fn parse_rustfix_only_machine_applicable(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "rustfix-only-machine-applicable")
+ }
+
+ fn parse_edition(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "edition")
+ }
+}
+
+pub fn lldb_version_to_int(version_string: &str) -> isize {
+ let error_string = format!("Encountered LLDB version string with unexpected format: {}",
+ version_string);
+ version_string.parse().expect(&error_string)
+}
+
+fn expand_variables(mut value: String, config: &Config) -> String {
+ const CWD: &'static str = "{{cwd}}";
+ const SRC_BASE: &'static str = "{{src-base}}";
+ const BUILD_BASE: &'static str = "{{build-base}}";
+
+ if value.contains(CWD) {
+ let cwd = env::current_dir().unwrap();
+ value = value.replace(CWD, &cwd.to_string_lossy());
+ }
+
+ if value.contains(SRC_BASE) {
+ value = value.replace(SRC_BASE, &config.src_base.to_string_lossy());
+ }
+
+ if value.contains(BUILD_BASE) {
+ value = value.replace(BUILD_BASE, &config.build_base.to_string_lossy());
+ }
+
+ value
+}
+
+/// Finds the next quoted string `"..."` in `line`, and extract the content from it. Move the `line`
+/// variable after the end of the quoted string.
+///
+/// # Examples
+///
+/// ```ignore
+/// let mut s = "normalize-stderr-32bit: \"something (32 bits)\" -> \"something ($WORD bits)\".";
+/// let first = parse_normalization_string(&mut s);
+/// assert_eq!(first, Some("something (32 bits)".to_owned()));
+/// assert_eq!(s, " -> \"something ($WORD bits)\".");
+/// ```
+fn parse_normalization_string(line: &mut &str) -> Option<String> {
+ // FIXME support escapes in strings.
+ let begin = match line.find('"') {
+ Some(i) => i + 1,
+ None => return None,
+ };
+ let end = match line[begin..].find('"') {
+ Some(i) => i + begin,
+ None => return None,
+ };
+ let result = line[begin..end].to_owned();
+ *line = &line[end+1..];
+ Some(result)
+}
diff --git a/vendor/compiletest_rs/src/json.rs b/vendor/compiletest_rs/src/json.rs
new file mode 100644
index 000000000..6f9e2ff10
--- /dev/null
+++ b/vendor/compiletest_rs/src/json.rs
@@ -0,0 +1,263 @@
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use errors::{Error, ErrorKind};
+use serde_json;
+use std::str::FromStr;
+use std::path::Path;
+use runtest::ProcRes;
+
+// These structs are a subset of the ones found in
+// `syntax::json`.
+
+#[derive(Deserialize)]
+struct Diagnostic {
+ message: String,
+ code: Option<DiagnosticCode>,
+ level: String,
+ spans: Vec<DiagnosticSpan>,
+ children: Vec<Diagnostic>,
+ rendered: Option<String>,
+}
+
+#[derive(Deserialize, Clone)]
+struct DiagnosticSpan {
+ file_name: String,
+ line_start: usize,
+ line_end: usize,
+ column_start: usize,
+ column_end: usize,
+ is_primary: bool,
+ label: Option<String>,
+ suggested_replacement: Option<String>,
+ expansion: Option<Box<DiagnosticSpanMacroExpansion>>,
+}
+
+impl DiagnosticSpan {
+ /// Returns the deepest source span in the macro call stack with a given file name.
+ /// This is either the supplied span, or the span for some macro callsite that expanded to it.
+ fn first_callsite_in_file(&self, file_name: &str) -> &DiagnosticSpan {
+ if self.file_name == file_name {
+ self
+ } else {
+ self.expansion
+ .as_ref()
+ .map(|origin| origin.span.first_callsite_in_file(file_name))
+ .unwrap_or(self)
+ }
+ }
+}
+
+#[derive(Deserialize, Clone)]
+struct DiagnosticSpanMacroExpansion {
+ /// span where macro was applied to generate this code
+ span: DiagnosticSpan,
+
+ /// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]")
+ macro_decl_name: String,
+}
+
+#[derive(Deserialize, Clone)]
+struct DiagnosticCode {
+ /// The code itself.
+ code: String,
+ /// An explanation for the code.
+ explanation: Option<String>,
+}
+
+pub fn extract_rendered(output: &str, proc_res: &ProcRes) -> String {
+ output
+ .lines()
+ .filter_map(|line| {
+ if line.starts_with('{') {
+ match serde_json::from_str::<Diagnostic>(line) {
+ Ok(diagnostic) => diagnostic.rendered,
+ Err(error) => {
+ proc_res.fatal(Some(&format!(
+ "failed to decode compiler output as json: \
+ `{}`\nline: {}\noutput: {}",
+ error, line, output
+ )));
+ }
+ }
+ } else {
+ // preserve non-JSON lines, such as ICEs
+ Some(format!("{}\n", line))
+ }
+ })
+ .collect()
+}
+
+pub fn parse_output(file_name: &str, output: &str, proc_res: &ProcRes) -> Vec<Error> {
+ output.lines()
+ .flat_map(|line| parse_line(file_name, line, output, proc_res))
+ .collect()
+}
+
+fn parse_line(file_name: &str, line: &str, output: &str, proc_res: &ProcRes) -> Vec<Error> {
+ // The compiler sometimes intermingles non-JSON stuff into the
+ // output. This hack just skips over such lines. Yuck.
+ if line.starts_with('{') {
+ match serde_json::from_str::<Diagnostic>(line) {
+ Ok(diagnostic) => {
+ let mut expected_errors = vec![];
+ push_expected_errors(&mut expected_errors, &diagnostic, &[], file_name);
+ expected_errors
+ }
+ Err(error) => {
+ proc_res.fatal(Some(&format!("failed to decode compiler output as json: \
+ `{}`\noutput: {}\nline: {}",
+ error,
+ line,
+ output)));
+ }
+ }
+ } else {
+ vec![]
+ }
+}
+
+fn push_expected_errors(expected_errors: &mut Vec<Error>,
+ diagnostic: &Diagnostic,
+ default_spans: &[&DiagnosticSpan],
+ file_name: &str) {
+ // In case of macro expansions, we need to get the span of the callsite
+ let spans_info_in_this_file: Vec<_> = diagnostic
+ .spans
+ .iter()
+ .map(|span| (span.is_primary, span.first_callsite_in_file(file_name)))
+ .filter(|(_, span)| Path::new(&span.file_name) == Path::new(&file_name))
+ .collect();
+
+ let spans_in_this_file: Vec<_> = spans_info_in_this_file.iter()
+ .map(|(_, span)| span)
+ .collect();
+
+ let primary_spans: Vec<_> = spans_info_in_this_file.iter()
+ .filter(|(is_primary, _)| *is_primary)
+ .map(|(_, span)| span)
+ .take(1) // sometimes we have more than one showing up in the json; pick first
+ .cloned()
+ .collect();
+ let primary_spans = if primary_spans.is_empty() {
+ // subdiagnostics often don't have a span of their own;
+ // inherit the span from the parent in that case
+ default_spans
+ } else {
+ &primary_spans
+ };
+
+ // We break the output into multiple lines, and then append the
+ // [E123] to every line in the output. This may be overkill. The
+ // intention was to match existing tests that do things like "//|
+ // found `i32` [E123]" and expect to match that somewhere, and yet
+ // also ensure that `//~ ERROR E123` *always* works. The
+ // assumption is that these multi-line error messages are on their
+ // way out anyhow.
+ let with_code = |span: &DiagnosticSpan, text: &str| {
+ match diagnostic.code {
+ Some(ref code) =>
+ // FIXME(#33000) -- it'd be better to use a dedicated
+ // UI harness than to include the line/col number like
+ // this, but some current tests rely on it.
+ //
+ // Note: Do NOT include the filename. These can easily
+ // cause false matches where the expected message
+ // appears in the filename, and hence the message
+ // changes but the test still passes.
+ format!("{}:{}: {}:{}: {} [{}]",
+ span.line_start, span.column_start,
+ span.line_end, span.column_end,
+ text, code.code.clone()),
+ None =>
+ // FIXME(#33000) -- it'd be better to use a dedicated UI harness
+ format!("{}:{}: {}:{}: {}",
+ span.line_start, span.column_start,
+ span.line_end, span.column_end,
+ text),
+ }
+ };
+
+ // Convert multi-line messages into multiple expected
+ // errors. We expect to replace these with something
+ // more structured shortly anyhow.
+ let mut message_lines = diagnostic.message.lines();
+ if let Some(first_line) = message_lines.next() {
+ for span in primary_spans {
+ let msg = with_code(span, first_line);
+ let kind = ErrorKind::from_str(&diagnostic.level).ok();
+ expected_errors.push(Error {
+ line_num: span.line_start,
+ kind,
+ msg,
+ });
+ }
+ }
+ for next_line in message_lines {
+ for span in primary_spans {
+ expected_errors.push(Error {
+ line_num: span.line_start,
+ kind: None,
+ msg: with_code(span, next_line),
+ });
+ }
+ }
+
+ // If the message has a suggestion, register that.
+ for span in primary_spans {
+ if let Some(ref suggested_replacement) = span.suggested_replacement {
+ for (index, line) in suggested_replacement.lines().enumerate() {
+ expected_errors.push(Error {
+ line_num: span.line_start + index,
+ kind: Some(ErrorKind::Suggestion),
+ msg: line.to_string(),
+ });
+ }
+ }
+ }
+
+ // Add notes for the backtrace
+ for span in primary_spans {
+ for frame in &span.expansion {
+ push_backtrace(expected_errors, frame, file_name);
+ }
+ }
+
+ // Add notes for any labels that appear in the message.
+ for span in spans_in_this_file.iter()
+ .filter(|span| span.label.is_some()) {
+ expected_errors.push(Error {
+ line_num: span.line_start,
+ kind: Some(ErrorKind::Note),
+ msg: span.label.clone().unwrap(),
+ });
+ }
+
+ // Flatten out the children.
+ for child in &diagnostic.children {
+ push_expected_errors(expected_errors, child, primary_spans, file_name);
+ }
+}
+
+fn push_backtrace(expected_errors: &mut Vec<Error>,
+ expansion: &DiagnosticSpanMacroExpansion,
+ file_name: &str) {
+ if Path::new(&expansion.span.file_name) == Path::new(&file_name) {
+ expected_errors.push(Error {
+ line_num: expansion.span.line_start,
+ kind: Some(ErrorKind::Note),
+ msg: format!("in this expansion of {}", expansion.macro_decl_name),
+ });
+ }
+
+ for previous_expansion in &expansion.span.expansion {
+ push_backtrace(expected_errors, previous_expansion, file_name);
+ }
+}
diff --git a/vendor/compiletest_rs/src/lib.rs b/vendor/compiletest_rs/src/lib.rs
new file mode 100644
index 000000000..edfc0208d
--- /dev/null
+++ b/vendor/compiletest_rs/src/lib.rs
@@ -0,0 +1,458 @@
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type = "lib"]
+#![cfg_attr(feature = "rustc", feature(rustc_private))]
+#![cfg_attr(feature = "rustc", feature(test))]
+#![deny(unused_imports)]
+
+#[cfg(feature = "rustc")]
+extern crate rustc_session;
+
+#[cfg(unix)]
+extern crate libc;
+#[cfg(feature = "rustc")]
+extern crate test;
+#[cfg(not(feature = "rustc"))]
+extern crate tester as test;
+
+#[cfg(feature = "tmp")]
+extern crate tempfile;
+
+#[macro_use]
+extern crate log;
+extern crate diff;
+extern crate filetime;
+extern crate regex;
+extern crate serde_json;
+#[macro_use]
+extern crate serde_derive;
+extern crate rustfix;
+
+use common::{DebugInfoGdb, DebugInfoLldb, Pretty};
+use common::{Mode, TestPaths};
+use std::env;
+use std::ffi::OsString;
+use std::fs;
+use std::io;
+use std::path::{Path, PathBuf};
+
+use self::header::EarlyProps;
+
+pub mod common;
+pub mod errors;
+pub mod header;
+mod json;
+mod read2;
+pub mod runtest;
+pub mod uidiff;
+pub mod util;
+
+pub use common::Config;
+
+pub fn run_tests(config: &Config) {
+ if config.target.contains("android") {
+ if let DebugInfoGdb = config.mode {
+ println!(
+ "{} debug-info test uses tcp 5039 port.\
+ please reserve it",
+ config.target
+ );
+ }
+
+ // android debug-info test uses remote debugger
+ // so, we test 1 thread at once.
+ // also trying to isolate problems with adb_run_wrapper.sh ilooping
+ env::set_var("RUST_TEST_THREADS", "1");
+ }
+
+ if let DebugInfoLldb = config.mode {
+ // Some older versions of LLDB seem to have problems with multiple
+ // instances running in parallel, so only run one test task at a
+ // time.
+ env::set_var("RUST_TEST_TASKS", "1");
+ }
+
+ // If we want to collect rustfix coverage information,
+ // we first make sure that the coverage file does not exist.
+ // It will be created later on.
+ if config.rustfix_coverage {
+ let mut coverage_file_path = config.build_base.clone();
+ coverage_file_path.push("rustfix_missing_coverage.txt");
+ if coverage_file_path.exists() {
+ if let Err(e) = fs::remove_file(&coverage_file_path) {
+ panic!(
+ "Could not delete {} due to {}",
+ coverage_file_path.display(),
+ e
+ )
+ }
+ }
+ }
+ let opts = test_opts(config);
+ let tests = make_tests(config);
+ // sadly osx needs some file descriptor limits raised for running tests in
+ // parallel (especially when we have lots and lots of child processes).
+ // For context, see #8904
+ // unsafe { raise_fd_limit::raise_fd_limit(); }
+ // Prevent issue #21352 UAC blocking .exe containing 'patch' etc. on Windows
+ // If #11207 is resolved (adding manifest to .exe) this becomes unnecessary
+ env::set_var("__COMPAT_LAYER", "RunAsInvoker");
+ let res = test::run_tests_console(&opts, tests.into_iter().collect());
+ match res {
+ Ok(true) => {}
+ Ok(false) => panic!("Some tests failed"),
+ Err(e) => {
+ println!("I/O failure during tests: {:?}", e);
+ }
+ }
+}
+
+pub fn test_opts(config: &Config) -> test::TestOpts {
+ test::TestOpts {
+ filters: config.filters.clone(),
+ filter_exact: config.filter_exact,
+ exclude_should_panic: false,
+ force_run_in_process: false,
+ run_ignored: if config.run_ignored {
+ test::RunIgnored::Yes
+ } else {
+ test::RunIgnored::No
+ },
+ format: if config.quiet {
+ test::OutputFormat::Terse
+ } else {
+ test::OutputFormat::Pretty
+ },
+ logfile: config.logfile.clone(),
+ run_tests: true,
+ bench_benchmarks: true,
+ nocapture: match env::var("RUST_TEST_NOCAPTURE") {
+ Ok(val) => &val != "0",
+ Err(_) => false,
+ },
+ color: test::AutoColor,
+ test_threads: None,
+ skip: vec![],
+ list: false,
+ options: test::Options::new(),
+ time_options: None,
+ #[cfg(feature = "rustc")]
+ shuffle: false,
+ #[cfg(feature = "rustc")]
+ shuffle_seed: None,
+ }
+}
+
+pub fn make_tests(config: &Config) -> Vec<test::TestDescAndFn> {
+ debug!("making tests from {:?}", config.src_base.display());
+ let mut tests = Vec::new();
+ collect_tests_from_dir(
+ config,
+ &config.src_base,
+ &config.src_base,
+ &PathBuf::new(),
+ &mut tests,
+ )
+ .unwrap();
+ tests
+}
+
+fn collect_tests_from_dir(
+ config: &Config,
+ base: &Path,
+ dir: &Path,
+ relative_dir_path: &Path,
+ tests: &mut Vec<test::TestDescAndFn>,
+) -> io::Result<()> {
+ // Ignore directories that contain a file
+ // `compiletest-ignore-dir`.
+ for file in fs::read_dir(dir)? {
+ let file = file?;
+ let name = file.file_name();
+ if name == *"compiletest-ignore-dir" {
+ return Ok(());
+ }
+ if name == *"Makefile" && config.mode == Mode::RunMake {
+ let paths = TestPaths {
+ file: dir.to_path_buf(),
+ base: base.to_path_buf(),
+ relative_dir: relative_dir_path.parent().unwrap().to_path_buf(),
+ };
+ tests.push(make_test(config, &paths));
+ return Ok(());
+ }
+ }
+
+ // If we find a test foo/bar.rs, we have to build the
+ // output directory `$build/foo` so we can write
+ // `$build/foo/bar` into it. We do this *now* in this
+ // sequential loop because otherwise, if we do it in the
+ // tests themselves, they race for the privilege of
+ // creating the directories and sometimes fail randomly.
+ let build_dir = config.build_base.join(&relative_dir_path);
+ fs::create_dir_all(&build_dir).unwrap();
+
+ // Add each `.rs` file as a test, and recurse further on any
+ // subdirectories we find, except for `aux` directories.
+ let dirs = fs::read_dir(dir)?;
+ for file in dirs {
+ let file = file?;
+ let file_path = file.path();
+ let file_name = file.file_name();
+ if is_test(&file_name) {
+ debug!("found test file: {:?}", file_path.display());
+ // output directory `$build/foo` so we can write
+ // `$build/foo/bar` into it. We do this *now* in this
+ // sequential loop because otherwise, if we do it in the
+ // tests themselves, they race for the privilege of
+ // creating the directories and sometimes fail randomly.
+ let build_dir = config.build_base.join(&relative_dir_path);
+ fs::create_dir_all(&build_dir).unwrap();
+
+ let paths = TestPaths {
+ file: file_path,
+ base: base.to_path_buf(),
+ relative_dir: relative_dir_path.to_path_buf(),
+ };
+ tests.push(make_test(config, &paths))
+ } else if file_path.is_dir() {
+ let relative_file_path = relative_dir_path.join(file.file_name());
+ if &file_name == "auxiliary" {
+ // `aux` directories contain other crates used for
+ // cross-crate tests. Don't search them for tests, but
+ // do create a directory in the build dir for them,
+ // since we will dump intermediate output in there
+ // sometimes.
+ let build_dir = config.build_base.join(&relative_file_path);
+ fs::create_dir_all(&build_dir).unwrap();
+ } else {
+ debug!("found directory: {:?}", file_path.display());
+ collect_tests_from_dir(config, base, &file_path, &relative_file_path, tests)?;
+ }
+ } else {
+ debug!("found other file/directory: {:?}", file_path.display());
+ }
+ }
+ Ok(())
+}
+
+pub fn is_test(file_name: &OsString) -> bool {
+ let file_name = file_name.to_str().unwrap();
+
+ if !file_name.ends_with(".rs") {
+ return false;
+ }
+
+ // `.`, `#`, and `~` are common temp-file prefixes.
+ let invalid_prefixes = &[".", "#", "~"];
+ !invalid_prefixes.iter().any(|p| file_name.starts_with(p))
+}
+
+pub fn make_test(config: &Config, testpaths: &TestPaths) -> test::TestDescAndFn {
+ let early_props = EarlyProps::from_file(config, &testpaths.file);
+
+ // The `should-fail` annotation doesn't apply to pretty tests,
+ // since we run the pretty printer across all tests by default.
+ // If desired, we could add a `should-fail-pretty` annotation.
+ let should_panic = match config.mode {
+ Pretty => test::ShouldPanic::No,
+ _ => {
+ if early_props.should_fail {
+ test::ShouldPanic::Yes
+ } else {
+ test::ShouldPanic::No
+ }
+ }
+ };
+
+ test::TestDescAndFn {
+ desc: test::TestDesc {
+ name: make_test_name(config, testpaths),
+ ignore: early_props.ignore,
+ should_panic: should_panic,
+ #[cfg(not(feature = "rustc"))]
+ allow_fail: false,
+ #[cfg(feature = "rustc")]
+ compile_fail: false,
+ #[cfg(feature = "rustc")]
+ no_run: false,
+ test_type: test::TestType::IntegrationTest,
+ #[cfg(feature = "rustc")]
+ ignore_message: None,
+ },
+ testfn: make_test_closure(config, testpaths),
+ }
+}
+
+fn stamp(config: &Config, testpaths: &TestPaths) -> PathBuf {
+ let stamp_name = format!(
+ "{}-{}.stamp",
+ testpaths.file.file_name().unwrap().to_str().unwrap(),
+ config.stage_id
+ );
+ config
+ .build_base
+ .canonicalize()
+ .unwrap_or_else(|_| config.build_base.clone())
+ .join(stamp_name)
+}
+
+pub fn make_test_name(config: &Config, testpaths: &TestPaths) -> test::TestName {
+ // Convert a complete path to something like
+ //
+ // run-pass/foo/bar/baz.rs
+ let path = PathBuf::from(config.src_base.file_name().unwrap())
+ .join(&testpaths.relative_dir)
+ .join(&testpaths.file.file_name().unwrap());
+ test::DynTestName(format!("[{}] {}", config.mode, path.display()))
+}
+
+pub fn make_test_closure(config: &Config, testpaths: &TestPaths) -> test::TestFn {
+ let config = config.clone();
+ let testpaths = testpaths.clone();
+ test::DynTestFn(Box::new(move || {
+ let config = config.clone(); // FIXME: why is this needed?
+ runtest::run(config, &testpaths)
+ }))
+}
+
+fn extract_gdb_version(full_version_line: &str) -> Option<u32> {
+ let full_version_line = full_version_line.trim();
+
+ // GDB versions look like this: "major.minor.patch?.yyyymmdd?", with both
+ // of the ? sections being optional
+
+ // We will parse up to 3 digits for minor and patch, ignoring the date
+ // We limit major to 1 digit, otherwise, on openSUSE, we parse the openSUSE version
+
+ // don't start parsing in the middle of a number
+ let mut prev_was_digit = false;
+ for (pos, c) in full_version_line.char_indices() {
+ if prev_was_digit || !c.is_digit(10) {
+ prev_was_digit = c.is_digit(10);
+ continue;
+ }
+
+ prev_was_digit = true;
+
+ let line = &full_version_line[pos..];
+
+ let next_split = match line.find(|c: char| !c.is_digit(10)) {
+ Some(idx) => idx,
+ None => continue, // no minor version
+ };
+
+ if line.as_bytes()[next_split] != b'.' {
+ continue; // no minor version
+ }
+
+ let major = &line[..next_split];
+ let line = &line[next_split + 1..];
+
+ let (minor, patch) = match line.find(|c: char| !c.is_digit(10)) {
+ Some(idx) => {
+ if line.as_bytes()[idx] == b'.' {
+ let patch = &line[idx + 1..];
+
+ let patch_len = patch
+ .find(|c: char| !c.is_digit(10))
+ .unwrap_or_else(|| patch.len());
+ let patch = &patch[..patch_len];
+ let patch = if patch_len > 3 || patch_len == 0 {
+ None
+ } else {
+ Some(patch)
+ };
+
+ (&line[..idx], patch)
+ } else {
+ (&line[..idx], None)
+ }
+ }
+ None => (line, None),
+ };
+
+ if major.len() != 1 || minor.is_empty() {
+ continue;
+ }
+
+ let major: u32 = major.parse().unwrap();
+ let minor: u32 = minor.parse().unwrap();
+ let patch: u32 = patch.unwrap_or("0").parse().unwrap();
+
+ return Some(((major * 1000) + minor) * 1000 + patch);
+ }
+
+ None
+}
+
+#[allow(dead_code)]
+fn extract_lldb_version(full_version_line: Option<String>) -> Option<String> {
+ // Extract the major LLDB version from the given version string.
+ // LLDB version strings are different for Apple and non-Apple platforms.
+ // At the moment, this function only supports the Apple variant, which looks
+ // like this:
+ //
+ // LLDB-179.5 (older versions)
+ // lldb-300.2.51 (new versions)
+ //
+ // We are only interested in the major version number, so this function
+ // will return `Some("179")` and `Some("300")` respectively.
+
+ if let Some(ref full_version_line) = full_version_line {
+ if !full_version_line.trim().is_empty() {
+ let full_version_line = full_version_line.trim();
+
+ for (pos, l) in full_version_line.char_indices() {
+ if l != 'l' && l != 'L' {
+ continue;
+ }
+ if pos + 5 >= full_version_line.len() {
+ continue;
+ }
+ let l = full_version_line[pos + 1..].chars().next().unwrap();
+ if l != 'l' && l != 'L' {
+ continue;
+ }
+ let d = full_version_line[pos + 2..].chars().next().unwrap();
+ if d != 'd' && d != 'D' {
+ continue;
+ }
+ let b = full_version_line[pos + 3..].chars().next().unwrap();
+ if b != 'b' && b != 'B' {
+ continue;
+ }
+ let dash = full_version_line[pos + 4..].chars().next().unwrap();
+ if dash != '-' {
+ continue;
+ }
+
+ let vers = full_version_line[pos + 5..]
+ .chars()
+ .take_while(|c| c.is_digit(10))
+ .collect::<String>();
+ if !vers.is_empty() {
+ return Some(vers);
+ }
+ }
+ println!(
+ "Could not extract LLDB version from line '{}'",
+ full_version_line
+ );
+ }
+ }
+ None
+}
+
+#[allow(dead_code)]
+fn is_blacklisted_lldb_version(version: &str) -> bool {
+ version == "350"
+}
diff --git a/vendor/compiletest_rs/src/raise_fd_limit.rs b/vendor/compiletest_rs/src/raise_fd_limit.rs
new file mode 100644
index 000000000..fcc5a727c
--- /dev/null
+++ b/vendor/compiletest_rs/src/raise_fd_limit.rs
@@ -0,0 +1,69 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+/// darwin_fd_limit exists to work around an issue where launchctl on macOS
+/// defaults the rlimit maxfiles to 256/unlimited. The default soft limit of 256
+/// ends up being far too low for our multithreaded scheduler testing, depending
+/// on the number of cores available.
+///
+/// This fixes issue #7772.
+#[cfg(any(target_os = "macos", target_os = "ios"))]
+#[allow(non_camel_case_types)]
+pub unsafe fn raise_fd_limit() {
+ use libc;
+ use std::cmp;
+ use std::io;
+ use std::mem::size_of_val;
+ use std::ptr::null_mut;
+
+ static CTL_KERN: libc::c_int = 1;
+ static KERN_MAXFILESPERPROC: libc::c_int = 29;
+
+ // The strategy here is to fetch the current resource limits, read the
+ // kern.maxfilesperproc sysctl value, and bump the soft resource limit for
+ // maxfiles up to the sysctl value.
+
+ // Fetch the kern.maxfilesperproc value
+ let mut mib: [libc::c_int; 2] = [CTL_KERN, KERN_MAXFILESPERPROC];
+ let mut maxfiles: libc::c_int = 0;
+ let mut size: libc::size_t = size_of_val(&maxfiles) as libc::size_t;
+ if libc::sysctl(&mut mib[0],
+ 2,
+ &mut maxfiles as *mut _ as *mut _,
+ &mut size,
+ null_mut(),
+ 0) != 0 {
+ let err = io::Error::last_os_error();
+ panic!("raise_fd_limit: error calling sysctl: {}", err);
+ }
+
+ // Fetch the current resource limits
+ let mut rlim = libc::rlimit {
+ rlim_cur: 0,
+ rlim_max: 0,
+ };
+ if libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) != 0 {
+ let err = io::Error::last_os_error();
+ panic!("raise_fd_limit: error calling getrlimit: {}", err);
+ }
+
+ // Bump the soft limit to the smaller of kern.maxfilesperproc and the hard
+ // limit
+ rlim.rlim_cur = cmp::min(maxfiles as libc::rlim_t, rlim.rlim_max);
+
+ // Set our newly-increased resource limit
+ if libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) != 0 {
+ let err = io::Error::last_os_error();
+ panic!("raise_fd_limit: error calling setrlimit: {}", err);
+ }
+}
+
+#[cfg(not(any(target_os = "macos", target_os = "ios")))]
+pub unsafe fn raise_fd_limit() {}
diff --git a/vendor/compiletest_rs/src/read2.rs b/vendor/compiletest_rs/src/read2.rs
new file mode 100644
index 000000000..664572521
--- /dev/null
+++ b/vendor/compiletest_rs/src/read2.rs
@@ -0,0 +1,208 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// FIXME: This is a complete copy of `cargo/src/cargo/util/read2.rs`
+// Consider unify the read2() in libstd, cargo and this to prevent further code duplication.
+
+pub use self::imp::read2;
+
+#[cfg(not(any(unix, windows)))]
+mod imp {
+ use std::io::{self, Read};
+ use std::process::{ChildStdout, ChildStderr};
+
+ pub fn read2(out_pipe: ChildStdout,
+ err_pipe: ChildStderr,
+ data: &mut FnMut(bool, &mut Vec<u8>, bool)) -> io::Result<()> {
+ let mut buffer = Vec::new();
+ out_pipe.read_to_end(&mut buffer)?;
+ data(true, &mut buffer, true);
+ buffer.clear();
+ err_pipe.read_to_end(&mut buffer)?;
+ data(false, &mut buffer, true);
+ Ok(())
+ }
+}
+
+#[cfg(unix)]
+mod imp {
+ use std::io::prelude::*;
+ use std::io;
+ use std::mem;
+ use std::os::unix::prelude::*;
+ use std::process::{ChildStdout, ChildStderr};
+ use libc;
+
+ pub fn read2(mut out_pipe: ChildStdout,
+ mut err_pipe: ChildStderr,
+ data: &mut dyn FnMut(bool, &mut Vec<u8>, bool)) -> io::Result<()> {
+ unsafe {
+ libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK);
+ libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK);
+ }
+
+ let mut out_done = false;
+ let mut err_done = false;
+ let mut out = Vec::new();
+ let mut err = Vec::new();
+
+ let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() };
+ fds[0].fd = out_pipe.as_raw_fd();
+ fds[0].events = libc::POLLIN;
+ fds[1].fd = err_pipe.as_raw_fd();
+ fds[1].events = libc::POLLIN;
+ loop {
+ // wait for either pipe to become readable using `select`
+ let r = unsafe { libc::poll(fds.as_mut_ptr(), 2, -1) };
+ if r == -1 {
+ let err = io::Error::last_os_error();
+ if err.kind() == io::ErrorKind::Interrupted {
+ continue
+ }
+ return Err(err)
+ }
+
+ // Read as much as we can from each pipe, ignoring EWOULDBLOCK or
+ // EAGAIN. If we hit EOF, then this will happen because the underlying
+ // reader will return Ok(0), in which case we'll see `Ok` ourselves. In
+ // this case we flip the other fd back into blocking mode and read
+ // whatever's leftover on that file descriptor.
+ let handle = |res: io::Result<_>| {
+ match res {
+ Ok(_) => Ok(true),
+ Err(e) => {
+ if e.kind() == io::ErrorKind::WouldBlock {
+ Ok(false)
+ } else {
+ Err(e)
+ }
+ }
+ }
+ };
+ if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? {
+ out_done = true;
+ }
+ data(true, &mut out, out_done);
+ if !err_done && fds[1].revents != 0 && handle(err_pipe.read_to_end(&mut err))? {
+ err_done = true;
+ }
+ data(false, &mut err, err_done);
+
+ if out_done && err_done {
+ return Ok(())
+ }
+ }
+ }
+}
+
+#[cfg(windows)]
+mod imp {
+ extern crate miow;
+ extern crate winapi;
+
+ use std::io;
+ use std::os::windows::prelude::*;
+ use std::process::{ChildStdout, ChildStderr};
+ use std::slice;
+
+ use self::miow::iocp::{CompletionPort, CompletionStatus};
+ use self::miow::pipe::NamedPipe;
+ use self::miow::Overlapped;
+ use self::winapi::shared::winerror::ERROR_BROKEN_PIPE;
+
+ struct Pipe<'a> {
+ dst: &'a mut Vec<u8>,
+ overlapped: Overlapped,
+ pipe: NamedPipe,
+ done: bool,
+ }
+
+ pub fn read2(out_pipe: ChildStdout,
+ err_pipe: ChildStderr,
+ data: &mut FnMut(bool, &mut Vec<u8>, bool)) -> io::Result<()> {
+ let mut out = Vec::new();
+ let mut err = Vec::new();
+
+ let port = CompletionPort::new(1)?;
+ port.add_handle(0, &out_pipe)?;
+ port.add_handle(1, &err_pipe)?;
+
+ unsafe {
+ let mut out_pipe = Pipe::new(out_pipe, &mut out);
+ let mut err_pipe = Pipe::new(err_pipe, &mut err);
+
+ out_pipe.read()?;
+ err_pipe.read()?;
+
+ let mut status = [CompletionStatus::zero(), CompletionStatus::zero()];
+
+ while !out_pipe.done || !err_pipe.done {
+ for status in port.get_many(&mut status, None)? {
+ if status.token() == 0 {
+ out_pipe.complete(status);
+ data(true, out_pipe.dst, out_pipe.done);
+ out_pipe.read()?;
+ } else {
+ err_pipe.complete(status);
+ data(false, err_pipe.dst, err_pipe.done);
+ err_pipe.read()?;
+ }
+ }
+ }
+
+ Ok(())
+ }
+ }
+
+ impl<'a> Pipe<'a> {
+ unsafe fn new<P: IntoRawHandle>(p: P, dst: &'a mut Vec<u8>) -> Pipe<'a> {
+ Pipe {
+ dst: dst,
+ pipe: NamedPipe::from_raw_handle(p.into_raw_handle()),
+ overlapped: Overlapped::zero(),
+ done: false,
+ }
+ }
+
+ unsafe fn read(&mut self) -> io::Result<()> {
+ let dst = slice_to_end(self.dst);
+ match self.pipe.read_overlapped(dst, self.overlapped.raw()) {
+ Ok(_) => Ok(()),
+ Err(e) => {
+ if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) {
+ self.done = true;
+ Ok(())
+ } else {
+ Err(e)
+ }
+ }
+ }
+ }
+
+ unsafe fn complete(&mut self, status: &CompletionStatus) {
+ let prev = self.dst.len();
+ self.dst.set_len(prev + status.bytes_transferred() as usize);
+ if status.bytes_transferred() == 0 {
+ self.done = true;
+ }
+ }
+ }
+
+ unsafe fn slice_to_end(v: &mut Vec<u8>) -> &mut [u8] {
+ if v.capacity() == 0 {
+ v.reserve(16);
+ }
+ if v.capacity() == v.len() {
+ v.reserve(1);
+ }
+ slice::from_raw_parts_mut(v.as_mut_ptr().offset(v.len() as isize),
+ v.capacity() - v.len())
+ }
+}
diff --git a/vendor/compiletest_rs/src/runtest.rs b/vendor/compiletest_rs/src/runtest.rs
new file mode 100644
index 000000000..c52551488
--- /dev/null
+++ b/vendor/compiletest_rs/src/runtest.rs
@@ -0,0 +1,2894 @@
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use common::{Config, TestPaths};
+use common::{expected_output_path, UI_FIXED, UI_STDERR, UI_STDOUT};
+use common::{CompileFail, ParseFail, Pretty, RunFail, RunPass, RunPassValgrind};
+use common::{Codegen, DebugInfoLldb, DebugInfoGdb, Rustdoc, CodegenUnits};
+use common::{Incremental, RunMake, Ui, MirOpt, Assembly};
+use diff;
+use errors::{self, ErrorKind, Error};
+use filetime::FileTime;
+use json;
+use regex::Regex;
+use rustfix::{apply_suggestions, get_suggestions_from_json, Filter};
+use header::TestProps;
+use crate::util::{logv, PathBufExt};
+
+use std::collections::HashMap;
+use std::collections::HashSet;
+use std::env;
+use std::ffi::OsString;
+use std::fs::{self, File, create_dir_all, OpenOptions};
+use std::fmt;
+use std::io::prelude::*;
+use std::io::{self, BufReader};
+use std::path::{Path, PathBuf};
+use std::process::{Command, Output, ExitStatus, Stdio, Child};
+use std::str;
+
+use extract_gdb_version;
+
+/// The name of the environment variable that holds dynamic library locations.
+pub fn dylib_env_var() -> &'static str {
+ if cfg!(windows) {
+ "PATH"
+ } else if cfg!(target_os = "macos") {
+ "DYLD_LIBRARY_PATH"
+ } else if cfg!(target_os = "haiku") {
+ "LIBRARY_PATH"
+ } else {
+ "LD_LIBRARY_PATH"
+ }
+}
+
+pub fn run(config: Config, testpaths: &TestPaths) {
+ match &*config.target {
+
+ "arm-linux-androideabi" | "armv7-linux-androideabi" | "aarch64-linux-android" => {
+ if !config.adb_device_status {
+ panic!("android device not available");
+ }
+ }
+
+ _ => {
+ // android has its own gdb handling
+ if config.mode == DebugInfoGdb && config.gdb.is_none() {
+ panic!("gdb not available but debuginfo gdb debuginfo test requested");
+ }
+ }
+ }
+
+ if config.verbose {
+ // We're going to be dumping a lot of info. Start on a new line.
+ print!("\n\n");
+ }
+ debug!("running {:?}", testpaths.file.display());
+ let base_props = TestProps::from_file(&testpaths.file, None, &config);
+
+ let base_cx = TestCx { config: &config,
+ props: &base_props,
+ testpaths,
+ revision: None };
+ base_cx.init_all();
+
+ if base_props.revisions.is_empty() {
+ base_cx.run_revision()
+ } else {
+ for revision in &base_props.revisions {
+ let revision_props = TestProps::from_file(&testpaths.file,
+ Some(revision),
+ &config);
+ let rev_cx = TestCx {
+ config: &config,
+ props: &revision_props,
+ testpaths,
+ revision: Some(revision)
+ };
+ rev_cx.run_revision();
+ }
+ }
+
+ base_cx.complete_all();
+
+ File::create(::stamp(&config, testpaths)).unwrap();
+}
+
+struct TestCx<'test> {
+ config: &'test Config,
+ props: &'test TestProps,
+ testpaths: &'test TestPaths,
+ revision: Option<&'test str>
+}
+
+struct DebuggerCommands {
+ commands: Vec<String>,
+ check_lines: Vec<String>,
+ breakpoint_lines: Vec<usize>,
+}
+
+impl<'test> TestCx<'test> {
+ /// invoked once before any revisions have been processed
+ fn init_all(&self) {
+ assert!(self.revision.is_none(), "init_all invoked for a revision");
+ if let Incremental = self.config.mode {
+ self.init_incremental_test()
+ }
+ }
+
+ /// Code executed for each revision in turn (or, if there are no
+ /// revisions, exactly once, with revision == None).
+ fn run_revision(&self) {
+ match self.config.mode {
+ CompileFail |
+ ParseFail => self.run_cfail_test(),
+ RunFail => self.run_rfail_test(),
+ RunPass => self.run_rpass_test(),
+ RunPassValgrind => self.run_valgrind_test(),
+ Pretty => self.run_pretty_test(),
+ DebugInfoGdb => self.run_debuginfo_gdb_test(),
+ DebugInfoLldb => self.run_debuginfo_lldb_test(),
+ Codegen => self.run_codegen_test(),
+ Rustdoc => self.run_rustdoc_test(),
+ CodegenUnits => self.run_codegen_units_test(),
+ Incremental => self.run_incremental_test(),
+ RunMake => self.run_rmake_test(),
+ Ui => self.run_ui_test(),
+ MirOpt => self.run_mir_opt_test(),
+ Assembly => self.run_assembly_test(),
+ }
+ }
+
+ /// Invoked after all revisions have executed.
+ fn complete_all(&self) {
+ assert!(self.revision.is_none(), "init_all invoked for a revision");
+ }
+
+ fn run_cfail_test(&self) {
+ let proc_res = self.compile_test();
+
+ if self.props.must_compile_successfully {
+ if !proc_res.status.success() {
+ self.fatal_proc_rec(
+ "test compilation failed although it shouldn't!",
+ &proc_res);
+ }
+ } else {
+ if proc_res.status.success() {
+ self.fatal_proc_rec(
+ &format!("{} test compiled successfully!", self.config.mode)[..],
+ &proc_res);
+ }
+
+ self.check_correct_failure_status(&proc_res);
+ }
+
+ let output_to_check = self.get_output(&proc_res);
+ let expected_errors = errors::load_errors(&self.testpaths.file, self.revision);
+ if !expected_errors.is_empty() {
+ if !self.props.error_patterns.is_empty() {
+ self.fatal("both error pattern and expected errors specified");
+ }
+ self.check_expected_errors(expected_errors, &proc_res);
+ } else {
+ self.check_error_patterns(&output_to_check, &proc_res);
+ }
+
+ self.check_no_compiler_crash(&proc_res);
+ self.check_forbid_output(&output_to_check, &proc_res);
+ }
+
+ fn run_rfail_test(&self) {
+ let proc_res = self.compile_test();
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ let proc_res = self.exec_compiled_test();
+
+ // The value our Makefile configures valgrind to return on failure
+ const VALGRIND_ERR: i32 = 100;
+ if proc_res.status.code() == Some(VALGRIND_ERR) {
+ self.fatal_proc_rec("run-fail test isn't valgrind-clean!", &proc_res);
+ }
+
+ let output_to_check = self.get_output(&proc_res);
+ self.check_correct_failure_status(&proc_res);
+ self.check_error_patterns(&output_to_check, &proc_res);
+ }
+
+ fn get_output(&self, proc_res: &ProcRes) -> String {
+ if self.props.check_stdout {
+ format!("{}{}", proc_res.stdout, proc_res.stderr)
+ } else {
+ proc_res.stderr.clone()
+ }
+ }
+
+ fn check_correct_failure_status(&self, proc_res: &ProcRes) {
+ // The value the rust runtime returns on failure
+ const RUST_ERR: i32 = 1;
+ if proc_res.status.code() != Some(RUST_ERR) {
+ self.fatal_proc_rec(
+ &format!("failure produced the wrong error: {}",
+ proc_res.status),
+ proc_res);
+ }
+ }
+
+ fn run_rpass_test(&self) {
+ let proc_res = self.compile_test();
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ // FIXME(#41968): Move this check to tidy?
+ let expected_errors = errors::load_errors(&self.testpaths.file, self.revision);
+ assert!(expected_errors.is_empty(),
+ "run-pass tests with expected warnings should be moved to ui/");
+
+ let proc_res = self.exec_compiled_test();
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("test run failed!", &proc_res);
+ }
+ }
+
+ fn run_valgrind_test(&self) {
+ assert!(self.revision.is_none(), "revisions not relevant here");
+
+ if self.config.valgrind_path.is_none() {
+ assert!(!self.config.force_valgrind);
+ return self.run_rpass_test();
+ }
+
+ let mut proc_res = self.compile_test();
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ let mut new_config = self.config.clone();
+ new_config.runtool = new_config.valgrind_path.clone();
+ let new_cx = TestCx { config: &new_config, ..*self };
+ proc_res = new_cx.exec_compiled_test();
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("test run failed!", &proc_res);
+ }
+ }
+
+ fn run_pretty_test(&self) {
+ if self.props.pp_exact.is_some() {
+ logv(self.config, "testing for exact pretty-printing".to_owned());
+ } else {
+ logv(self.config, "testing for converging pretty-printing".to_owned());
+ }
+
+ let rounds = match self.props.pp_exact { Some(_) => 1, None => 2 };
+
+ let mut src = String::new();
+ File::open(&self.testpaths.file).unwrap().read_to_string(&mut src).unwrap();
+ let mut srcs = vec![src];
+
+ let mut round = 0;
+ while round < rounds {
+ logv(self.config, format!("pretty-printing round {} revision {:?}",
+ round, self.revision));
+ let proc_res = self.print_source(srcs[round].to_owned(), &self.props.pretty_mode);
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec(&format!("pretty-printing failed in round {} revision {:?}",
+ round, self.revision),
+ &proc_res);
+ }
+
+ let ProcRes{ stdout, .. } = proc_res;
+ srcs.push(stdout);
+ round += 1;
+ }
+
+ let mut expected = match self.props.pp_exact {
+ Some(ref file) => {
+ let filepath = self.testpaths.file.parent().unwrap().join(file);
+ let mut s = String::new();
+ File::open(&filepath).unwrap().read_to_string(&mut s).unwrap();
+ s
+ }
+ None => { srcs[srcs.len() - 2].clone() }
+ };
+ let mut actual = srcs[srcs.len() - 1].clone();
+
+ if self.props.pp_exact.is_some() {
+ // Now we have to care about line endings
+ let cr = "\r".to_owned();
+ actual = actual.replace(&cr, "").to_owned();
+ expected = expected.replace(&cr, "").to_owned();
+ }
+
+ self.compare_source(&expected, &actual);
+
+ // If we're only making sure that the output matches then just stop here
+ if self.props.pretty_compare_only { return; }
+
+ // Finally, let's make sure it actually appears to remain valid code
+ let proc_res = self.typecheck_source(actual);
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("pretty-printed source does not typecheck", &proc_res);
+ }
+
+ if !self.props.pretty_expanded { return }
+
+ // additionally, run `--pretty expanded` and try to build it.
+ let proc_res = self.print_source(srcs[round].clone(), "expanded");
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("pretty-printing (expanded) failed", &proc_res);
+ }
+
+ let ProcRes{ stdout: expanded_src, .. } = proc_res;
+ let proc_res = self.typecheck_source(expanded_src);
+ if !proc_res.status.success() {
+ self.fatal_proc_rec(
+ "pretty-printed source (expanded) does not typecheck",
+ &proc_res);
+ }
+ }
+
+ fn print_source(&self, src: String, pretty_type: &str) -> ProcRes {
+ let aux_dir = self.aux_output_dir_name();
+
+ let mut rustc = Command::new(&self.config.rustc_path);
+ rustc.arg("-")
+ .args(&["-Z", &format!("unpretty={}", pretty_type)])
+ .args(&["--target", &self.config.target])
+ .arg("-L").arg(&aux_dir)
+ .args(self.split_maybe_args(&self.config.target_rustcflags))
+ .args(&self.props.compile_flags)
+ .envs(self.props.exec_env.clone());
+
+ self.compose_and_run(rustc,
+ self.config.compile_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ Some(src))
+ }
+
+ fn compare_source(&self,
+ expected: &str,
+ actual: &str) {
+ if expected != actual {
+ self.error("pretty-printed source does not match expected source");
+ println!("\n\
+expected:\n\
+------------------------------------------\n\
+{}\n\
+------------------------------------------\n\
+actual:\n\
+------------------------------------------\n\
+{}\n\
+------------------------------------------\n\
+\n",
+ expected, actual);
+ panic!();
+ }
+ }
+
+ fn typecheck_source(&self, src: String) -> ProcRes {
+ let mut rustc = Command::new(&self.config.rustc_path);
+
+ let out_dir = self.output_base_name().with_extension("pretty-out");
+ let _ = fs::remove_dir_all(&out_dir);
+ create_dir_all(&out_dir).unwrap();
+
+ let target = if self.props.force_host {
+ &*self.config.host
+ } else {
+ &*self.config.target
+ };
+
+ let aux_dir = self.aux_output_dir_name();
+
+ rustc.arg("-")
+ .arg("-Zno-trans")
+ .arg("--out-dir").arg(&out_dir)
+ .arg(&format!("--target={}", target))
+ .arg("-L").arg(&self.config.build_base)
+ .arg("-L").arg(aux_dir);
+
+ if let Some(revision) = self.revision {
+ rustc.args(&["--cfg", revision]);
+ }
+
+ rustc.args(self.split_maybe_args(&self.config.target_rustcflags));
+ rustc.args(&self.props.compile_flags);
+
+ self.compose_and_run_compiler(rustc, Some(src))
+ }
+
+ fn run_debuginfo_gdb_test(&self) {
+ assert!(self.revision.is_none(), "revisions not relevant here");
+
+ let config = Config {
+ target_rustcflags: self.cleanup_debug_info_options(&self.config.target_rustcflags),
+ host_rustcflags: self.cleanup_debug_info_options(&self.config.host_rustcflags),
+ .. self.config.clone()
+ };
+
+ let test_cx = TestCx {
+ config: &config,
+ ..*self
+ };
+
+ test_cx.run_debuginfo_gdb_test_no_opt();
+ }
+
+ fn run_debuginfo_gdb_test_no_opt(&self) {
+ let prefixes = if self.config.gdb_native_rust {
+ // GDB with Rust
+ static PREFIXES: &'static [&'static str] = &["gdb", "gdbr"];
+ println!("NOTE: compiletest thinks it is using GDB with native rust support");
+ PREFIXES
+ } else {
+ // Generic GDB
+ static PREFIXES: &'static [&'static str] = &["gdb", "gdbg"];
+ println!("NOTE: compiletest thinks it is using GDB without native rust support");
+ PREFIXES
+ };
+
+ let DebuggerCommands {
+ commands,
+ check_lines,
+ breakpoint_lines
+ } = self.parse_debugger_commands(prefixes);
+ let mut cmds = commands.join("\n");
+
+ // compile test file (it should have 'compile-flags:-g' in the header)
+ let compiler_run_result = self.compile_test();
+ if !compiler_run_result.status.success() {
+ self.fatal_proc_rec("compilation failed!", &compiler_run_result);
+ }
+
+ let exe_file = self.make_exe_name();
+
+ let debugger_run_result;
+ match &*self.config.target {
+ "arm-linux-androideabi" |
+ "armv7-linux-androideabi" |
+ "aarch64-linux-android" => {
+
+ cmds = cmds.replace("run", "continue");
+
+ let tool_path = match self.config.android_cross_path.to_str() {
+ Some(x) => x.to_owned(),
+ None => self.fatal("cannot find android cross path")
+ };
+
+ // write debugger script
+ let mut script_str = String::with_capacity(2048);
+ script_str.push_str(&format!("set charset {}\n", Self::charset()));
+ script_str.push_str(&format!("set sysroot {}\n", tool_path));
+ script_str.push_str(&format!("file {}\n", exe_file.to_str().unwrap()));
+ script_str.push_str("target remote :5039\n");
+ script_str.push_str(&format!("set solib-search-path \
+ ./{}/stage2/lib/rustlib/{}/lib/\n",
+ self.config.host, self.config.target));
+ for line in &breakpoint_lines {
+ script_str.push_str(&format!("break {:?}:{}\n",
+ self.testpaths.file.file_name()
+ .unwrap()
+ .to_string_lossy(),
+ *line)[..]);
+ }
+ script_str.push_str(&cmds);
+ script_str.push_str("\nquit\n");
+
+ debug!("script_str = {}", script_str);
+ self.dump_output_file(&script_str, "debugger.script");
+
+ let adb_path = &self.config.adb_path;
+
+ Command::new(adb_path)
+ .arg("push")
+ .arg(&exe_file)
+ .arg(&self.config.adb_test_dir)
+ .status()
+ .expect(&format!("failed to exec `{:?}`", adb_path));
+
+ Command::new(adb_path)
+ .args(&["forward", "tcp:5039", "tcp:5039"])
+ .status()
+ .expect(&format!("failed to exec `{:?}`", adb_path));
+
+ let adb_arg = format!("export LD_LIBRARY_PATH={}; \
+ gdbserver{} :5039 {}/{}",
+ self.config.adb_test_dir.clone(),
+ if self.config.target.contains("aarch64")
+ {"64"} else {""},
+ self.config.adb_test_dir.clone(),
+ exe_file.file_name().unwrap().to_str()
+ .unwrap());
+
+ debug!("adb arg: {}", adb_arg);
+ let mut adb = Command::new(adb_path)
+ .args(&["shell", &adb_arg])
+ .stdout(Stdio::piped())
+ .stderr(Stdio::inherit())
+ .spawn()
+ .expect(&format!("failed to exec `{:?}`", adb_path));
+
+ // Wait for the gdbserver to print out "Listening on port ..."
+ // at which point we know that it's started and then we can
+ // execute the debugger below.
+ let mut stdout = BufReader::new(adb.stdout.take().unwrap());
+ let mut line = String::new();
+ loop {
+ line.truncate(0);
+ stdout.read_line(&mut line).unwrap();
+ if line.starts_with("Listening on port 5039") {
+ break
+ }
+ }
+ drop(stdout);
+
+ let debugger_script = self.make_out_name("debugger.script");
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ let debugger_opts =
+ vec!["-quiet".to_owned(),
+ "-batch".to_owned(),
+ "-nx".to_owned(),
+ format!("-command={}", debugger_script.to_str().unwrap())];
+
+ let mut gdb_path = tool_path;
+ gdb_path.push_str("/bin/gdb");
+ let Output {
+ status,
+ stdout,
+ stderr
+ } = Command::new(&gdb_path)
+ .args(&debugger_opts)
+ .output()
+ .expect(&format!("failed to exec `{:?}`", gdb_path));
+ let cmdline = {
+ let mut gdb = Command::new(&format!("{}-gdb", self.config.target));
+ gdb.args(&debugger_opts);
+ let cmdline = self.make_cmdline(&gdb, "");
+ logv(self.config, format!("executing {}", cmdline));
+ cmdline
+ };
+
+ debugger_run_result = ProcRes {
+ status,
+ stdout: String::from_utf8(stdout).unwrap(),
+ stderr: String::from_utf8(stderr).unwrap(),
+ cmdline,
+ };
+ if adb.kill().is_err() {
+ println!("Adb process is already finished.");
+ }
+ }
+
+ _ => {
+ let rust_src_root = self.config.find_rust_src_root().expect(
+ "Could not find Rust source root",
+ );
+ let rust_pp_module_rel_path = Path::new("./src/etc");
+ let rust_pp_module_abs_path = rust_src_root.join(rust_pp_module_rel_path)
+ .to_str()
+ .unwrap()
+ .to_owned();
+ // write debugger script
+ let mut script_str = String::with_capacity(2048);
+ script_str.push_str(&format!("set charset {}\n", Self::charset()));
+ script_str.push_str("show version\n");
+
+ match self.config.gdb_version {
+ Some(version) => {
+ println!("NOTE: compiletest thinks it is using GDB version {}",
+ version);
+
+ if version > extract_gdb_version("7.4").unwrap() {
+ // Add the directory containing the pretty printers to
+ // GDB's script auto loading safe path
+ script_str.push_str(
+ &format!("add-auto-load-safe-path {}\n",
+ rust_pp_module_abs_path.replace(r"\", r"\\"))
+ );
+ }
+ }
+ _ => {
+ println!("NOTE: compiletest does not know which version of \
+ GDB it is using");
+ }
+ }
+
+ // The following line actually doesn't have to do anything with
+ // pretty printing, it just tells GDB to print values on one line:
+ script_str.push_str("set print pretty off\n");
+
+ // Add the pretty printer directory to GDB's source-file search path
+ script_str.push_str(&format!("directory {}\n",
+ rust_pp_module_abs_path));
+
+ // Load the target executable
+ script_str.push_str(&format!("file {}\n",
+ exe_file.to_str().unwrap()
+ .replace(r"\", r"\\")));
+
+ // Force GDB to print values in the Rust format.
+ if self.config.gdb_native_rust {
+ script_str.push_str("set language rust\n");
+ }
+
+ // Add line breakpoints
+ for line in &breakpoint_lines {
+ script_str.push_str(&format!("break '{}':{}\n",
+ self.testpaths.file.file_name().unwrap()
+ .to_string_lossy(),
+ *line));
+ }
+
+ script_str.push_str(&cmds);
+ script_str.push_str("\nquit\n");
+
+ debug!("script_str = {}", script_str);
+ self.dump_output_file(&script_str, "debugger.script");
+
+ let debugger_script = self.make_out_name("debugger.script");
+
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ let debugger_opts =
+ vec!["-quiet".to_owned(),
+ "-batch".to_owned(),
+ "-nx".to_owned(),
+ format!("-command={}", debugger_script.to_str().unwrap())];
+
+ let mut gdb = Command::new(self.config.gdb.as_ref().unwrap());
+ gdb.args(&debugger_opts)
+ .env("PYTHONPATH", rust_pp_module_abs_path);
+
+ debugger_run_result =
+ self.compose_and_run(gdb,
+ self.config.run_lib_path.to_str().unwrap(),
+ None,
+ None);
+ }
+ }
+
+ if !debugger_run_result.status.success() {
+ self.fatal_proc_rec("gdb failed to execute", &debugger_run_result);
+ }
+
+ self.check_debugger_output(&debugger_run_result, &check_lines);
+ }
+
+ fn run_debuginfo_lldb_test(&self) {
+ assert!(self.revision.is_none(), "revisions not relevant here");
+
+ if self.config.lldb_python_dir.is_none() {
+ self.fatal("Can't run LLDB test because LLDB's python path is not set.");
+ }
+
+ let config = Config {
+ target_rustcflags: self.cleanup_debug_info_options(&self.config.target_rustcflags),
+ host_rustcflags: self.cleanup_debug_info_options(&self.config.host_rustcflags),
+ .. self.config.clone()
+ };
+
+
+ let test_cx = TestCx {
+ config: &config,
+ ..*self
+ };
+
+ test_cx.run_debuginfo_lldb_test_no_opt();
+ }
+
+ fn run_debuginfo_lldb_test_no_opt(&self) {
+ // compile test file (it should have 'compile-flags:-g' in the header)
+ let compile_result = self.compile_test();
+ if !compile_result.status.success() {
+ self.fatal_proc_rec("compilation failed!", &compile_result);
+ }
+
+ let exe_file = self.make_exe_name();
+
+ match self.config.lldb_version {
+ Some(ref version) => {
+ println!("NOTE: compiletest thinks it is using LLDB version {}",
+ version);
+ }
+ _ => {
+ println!("NOTE: compiletest does not know which version of \
+ LLDB it is using");
+ }
+ }
+
+ // Parse debugger commands etc from test files
+ let DebuggerCommands {
+ commands,
+ check_lines,
+ breakpoint_lines,
+ ..
+ } = self.parse_debugger_commands(&["lldb"]);
+
+ // Write debugger script:
+ // We don't want to hang when calling `quit` while the process is still running
+ let mut script_str = String::from("settings set auto-confirm true\n");
+
+ // Make LLDB emit its version, so we have it documented in the test output
+ script_str.push_str("version\n");
+
+ // Switch LLDB into "Rust mode"
+ let rust_src_root = self.config.find_rust_src_root().expect(
+ "Could not find Rust source root",
+ );
+ let rust_pp_module_rel_path = Path::new("./src/etc/lldb_rust_formatters.py");
+ let rust_pp_module_abs_path = rust_src_root.join(rust_pp_module_rel_path)
+ .to_str()
+ .unwrap()
+ .to_owned();
+
+ script_str.push_str(&format!("command script import {}\n",
+ &rust_pp_module_abs_path[..])[..]);
+ script_str.push_str("type summary add --no-value ");
+ script_str.push_str("--python-function lldb_rust_formatters.print_val ");
+ script_str.push_str("-x \".*\" --category Rust\n");
+ script_str.push_str("type category enable Rust\n");
+
+ // Set breakpoints on every line that contains the string "#break"
+ let source_file_name = self.testpaths.file.file_name().unwrap().to_string_lossy();
+ for line in &breakpoint_lines {
+ script_str.push_str(&format!("breakpoint set --file '{}' --line {}\n",
+ source_file_name,
+ line));
+ }
+
+ // Append the other commands
+ for line in &commands {
+ script_str.push_str(line);
+ script_str.push_str("\n");
+ }
+
+ // Finally, quit the debugger
+ script_str.push_str("\nquit\n");
+
+ // Write the script into a file
+ debug!("script_str = {}", script_str);
+ self.dump_output_file(&script_str, "debugger.script");
+ let debugger_script = self.make_out_name("debugger.script");
+
+ // Let LLDB execute the script via lldb_batchmode.py
+ let debugger_run_result = self.run_lldb(&exe_file,
+ &debugger_script,
+ &rust_src_root);
+
+ if !debugger_run_result.status.success() {
+ self.fatal_proc_rec("Error while running LLDB", &debugger_run_result);
+ }
+
+ self.check_debugger_output(&debugger_run_result, &check_lines);
+ }
+
+ fn run_lldb(&self,
+ test_executable: &Path,
+ debugger_script: &Path,
+ rust_src_root: &Path)
+ -> ProcRes {
+ // Prepare the lldb_batchmode which executes the debugger script
+ let lldb_script_path = rust_src_root.join("src/etc/lldb_batchmode.py");
+ self.cmd2procres(Command::new(&self.config.lldb_python)
+ .arg(&lldb_script_path)
+ .arg(test_executable)
+ .arg(debugger_script)
+ .env("PYTHONPATH",
+ self.config.lldb_python_dir.as_ref().unwrap()))
+ }
+
+ fn cmd2procres(&self, cmd: &mut Command) -> ProcRes {
+ let (status, out, err) = match cmd.output() {
+ Ok(Output { status, stdout, stderr }) => {
+ (status,
+ String::from_utf8(stdout).unwrap(),
+ String::from_utf8(stderr).unwrap())
+ },
+ Err(e) => {
+ self.fatal(&format!("Failed to setup Python process for \
+ LLDB script: {}", e))
+ }
+ };
+
+ self.dump_output(&out, &err);
+ ProcRes {
+ status,
+ stdout: out,
+ stderr: err,
+ cmdline: format!("{:?}", cmd)
+ }
+ }
+
+ fn parse_debugger_commands(&self, debugger_prefixes: &[&str]) -> DebuggerCommands {
+ let directives = debugger_prefixes.iter().map(|prefix| (
+ format!("{}-command", prefix),
+ format!("{}-check", prefix),
+ )).collect::<Vec<_>>();
+
+ let mut breakpoint_lines = vec![];
+ let mut commands = vec![];
+ let mut check_lines = vec![];
+ let mut counter = 1;
+ let reader = BufReader::new(File::open(&self.testpaths.file).unwrap());
+ for line in reader.lines() {
+ match line {
+ Ok(line) => {
+ if line.contains("#break") {
+ breakpoint_lines.push(counter);
+ }
+
+ for &(ref command_directive, ref check_directive) in &directives {
+ self.config.parse_name_value_directive(
+ &line,
+ command_directive).map(|cmd| {
+ commands.push(cmd)
+ });
+
+ self.config.parse_name_value_directive(
+ &line,
+ check_directive).map(|cmd| {
+ check_lines.push(cmd)
+ });
+ }
+ }
+ Err(e) => {
+ self.fatal(&format!("Error while parsing debugger commands: {}", e))
+ }
+ }
+ counter += 1;
+ }
+
+ DebuggerCommands {
+ commands,
+ check_lines,
+ breakpoint_lines,
+ }
+ }
+
+ fn cleanup_debug_info_options(&self, options: &Option<String>) -> Option<String> {
+ if options.is_none() {
+ return None;
+ }
+
+ // Remove options that are either unwanted (-O) or may lead to duplicates due to RUSTFLAGS.
+ let options_to_remove = [
+ "-O".to_owned(),
+ "-g".to_owned(),
+ "--debuginfo".to_owned()
+ ];
+ let new_options =
+ self.split_maybe_args(options).into_iter()
+ .filter(|x| !options_to_remove.contains(x))
+ .collect::<Vec<String>>();
+
+ Some(new_options.join(" "))
+ }
+
+ fn check_debugger_output(&self, debugger_run_result: &ProcRes, check_lines: &[String]) {
+ let num_check_lines = check_lines.len();
+
+ let mut check_line_index = 0;
+ for line in debugger_run_result.stdout.lines() {
+ if check_line_index >= num_check_lines {
+ break;
+ }
+
+ if check_single_line(line, &(check_lines[check_line_index])[..]) {
+ check_line_index += 1;
+ }
+ }
+ if check_line_index != num_check_lines && num_check_lines > 0 {
+ self.fatal_proc_rec(&format!("line not found in debugger output: {}",
+ check_lines[check_line_index]),
+ debugger_run_result);
+ }
+
+ fn check_single_line(line: &str, check_line: &str) -> bool {
+ // Allow check lines to leave parts unspecified (e.g., uninitialized
+ // bits in the wrong case of an enum) with the notation "[...]".
+ let line = line.trim();
+ let check_line = check_line.trim();
+ let can_start_anywhere = check_line.starts_with("[...]");
+ let can_end_anywhere = check_line.ends_with("[...]");
+
+ let check_fragments: Vec<&str> = check_line.split("[...]")
+ .filter(|frag| !frag.is_empty())
+ .collect();
+ if check_fragments.is_empty() {
+ return true;
+ }
+
+ let (mut rest, first_fragment) = if can_start_anywhere {
+ match line.find(check_fragments[0]) {
+ Some(pos) => (&line[pos + check_fragments[0].len() ..], 1),
+ None => return false
+ }
+ } else {
+ (line, 0)
+ };
+
+ for current_fragment in &check_fragments[first_fragment..] {
+ match rest.find(current_fragment) {
+ Some(pos) => {
+ rest = &rest[pos + current_fragment.len() .. ];
+ }
+ None => return false
+ }
+ }
+
+ if !can_end_anywhere && !rest.is_empty() {
+ return false;
+ }
+
+ true
+ }
+ }
+
+ fn check_error_patterns(&self,
+ output_to_check: &str,
+ proc_res: &ProcRes) {
+ if self.props.error_patterns.is_empty() {
+ if self.props.must_compile_successfully {
+ return
+ } else {
+ self.fatal(&format!("no error pattern specified in {:?}",
+ self.testpaths.file.display()));
+ }
+ }
+ let mut next_err_idx = 0;
+ let mut next_err_pat = self.props.error_patterns[next_err_idx].trim();
+ let mut done = false;
+ for line in output_to_check.lines() {
+ if line.contains(next_err_pat) {
+ debug!("found error pattern {}", next_err_pat);
+ next_err_idx += 1;
+ if next_err_idx == self.props.error_patterns.len() {
+ debug!("found all error patterns");
+ done = true;
+ break;
+ }
+ next_err_pat = self.props.error_patterns[next_err_idx].trim();
+ }
+ }
+ if done { return; }
+
+ let missing_patterns = &self.props.error_patterns[next_err_idx..];
+ if missing_patterns.len() == 1 {
+ self.fatal_proc_rec(
+ &format!("error pattern '{}' not found!", missing_patterns[0]),
+ proc_res);
+ } else {
+ for pattern in missing_patterns {
+ self.error(&format!("error pattern '{}' not found!", *pattern));
+ }
+ self.fatal_proc_rec("multiple error patterns not found", proc_res);
+ }
+ }
+
+ fn check_no_compiler_crash(&self, proc_res: &ProcRes) {
+ for line in proc_res.stderr.lines() {
+ if line.contains("error: internal compiler error") {
+ self.fatal_proc_rec("compiler encountered internal error", proc_res);
+ }
+ }
+ }
+
+ fn check_forbid_output(&self,
+ output_to_check: &str,
+ proc_res: &ProcRes) {
+ for pat in &self.props.forbid_output {
+ if output_to_check.contains(pat) {
+ self.fatal_proc_rec("forbidden pattern found in compiler output", proc_res);
+ }
+ }
+ }
+
+ fn check_expected_errors(&self,
+ expected_errors: Vec<errors::Error>,
+ proc_res: &ProcRes) {
+ if proc_res.status.success() &&
+ expected_errors.iter().any(|x| x.kind == Some(ErrorKind::Error)) {
+ self.fatal_proc_rec("process did not return an error status", proc_res);
+ }
+
+ // On Windows, keep all '\' path separators to match the paths reported in the JSON output
+ // from the compiler
+ let os_file_name = self.testpaths.file.display().to_string();
+
+ let file_name =
+ format!("{}", self.testpaths.file.display())
+ .replace(r"\", "/"); // on windows, translate all '\' path separators to '/'
+
+ // If the testcase being checked contains at least one expected "help"
+ // message, then we'll ensure that all "help" messages are expected.
+ // Otherwise, all "help" messages reported by the compiler will be ignored.
+ // This logic also applies to "note" messages.
+ let expect_help = expected_errors.iter().any(|ee| ee.kind == Some(ErrorKind::Help));
+ let expect_note = expected_errors.iter().any(|ee| ee.kind == Some(ErrorKind::Note));
+
+ // Parse the JSON output from the compiler and extract out the messages.
+ let actual_errors = json::parse_output(&os_file_name, &proc_res.stderr, proc_res);
+ let mut unexpected = Vec::new();
+ let mut found = vec![false; expected_errors.len()];
+ for actual_error in &actual_errors {
+ let opt_index =
+ expected_errors
+ .iter()
+ .enumerate()
+ .position(|(index, expected_error)| {
+ !found[index] &&
+ actual_error.line_num == expected_error.line_num &&
+ (expected_error.kind.is_none() ||
+ actual_error.kind == expected_error.kind) &&
+ actual_error.msg.contains(&expected_error.msg)
+ });
+
+ match opt_index {
+ Some(index) => {
+ // found a match, everybody is happy
+ assert!(!found[index]);
+ found[index] = true;
+ }
+
+ None => {
+ if self.is_unexpected_compiler_message(actual_error, expect_help, expect_note) {
+ self.error(
+ &format!("{}:{}: unexpected {}: '{}'",
+ file_name,
+ actual_error.line_num,
+ actual_error.kind.as_ref()
+ .map_or(String::from("message"),
+ |k| k.to_string()),
+ actual_error.msg));
+ unexpected.push(actual_error);
+ }
+ }
+ }
+ }
+
+ let mut not_found = Vec::new();
+ // anything not yet found is a problem
+ for (index, expected_error) in expected_errors.iter().enumerate() {
+ if !found[index] {
+ self.error(
+ &format!("{}:{}: expected {} not found: {}",
+ file_name,
+ expected_error.line_num,
+ expected_error.kind.as_ref()
+ .map_or("message".into(),
+ |k| k.to_string()),
+ expected_error.msg));
+ not_found.push(expected_error);
+ }
+ }
+
+ if !unexpected.is_empty() || !not_found.is_empty() {
+ self.error(
+ &format!("{} unexpected errors found, {} expected errors not found",
+ unexpected.len(), not_found.len()));
+ println!("status: {}\ncommand: {}",
+ proc_res.status, proc_res.cmdline);
+ if !unexpected.is_empty() {
+ println!("unexpected errors (from JSON output): {:#?}\n", unexpected);
+ }
+ if !not_found.is_empty() {
+ println!("not found errors (from test file): {:#?}\n", not_found);
+ }
+ panic!();
+ }
+ }
+
+ /// Returns true if we should report an error about `actual_error`,
+ /// which did not match any of the expected error. We always require
+ /// errors/warnings to be explicitly listed, but only require
+ /// helps/notes if there are explicit helps/notes given.
+ fn is_unexpected_compiler_message(&self,
+ actual_error: &Error,
+ expect_help: bool,
+ expect_note: bool)
+ -> bool {
+ match actual_error.kind {
+ Some(ErrorKind::Help) => expect_help,
+ Some(ErrorKind::Note) => expect_note,
+ Some(ErrorKind::Error) |
+ Some(ErrorKind::Warning) => true,
+ Some(ErrorKind::Suggestion) |
+ None => false
+ }
+ }
+
+ fn compile_test(&self) -> ProcRes {
+ let allow_unused = match self.config.mode {
+ CompileFail | Ui => {
+ // compile-fail and ui tests tend to have tons of unused code as
+ // it's just testing various pieces of the compile, but we don't
+ // want to actually assert warnings about all this code. Instead
+ // let's just ignore unused code warnings by defaults and tests
+ // can turn it back on if needed.
+ AllowUnused::Yes
+ }
+ _ => AllowUnused::No
+ };
+
+ let mut rustc = self.make_compile_args(
+ &self.testpaths.file, TargetLocation::ThisFile(self.make_exe_name()), allow_unused);
+
+ rustc.arg("-L").arg(&self.aux_output_dir_name());
+
+ self.compose_and_run_compiler(rustc, None)
+ }
+
+ fn document(&self, out_dir: &Path) -> ProcRes {
+ if self.props.build_aux_docs {
+ for rel_ab in &self.props.aux_builds {
+ let aux_testpaths = self.compute_aux_test_paths(rel_ab);
+ let aux_props = self.props.from_aux_file(&aux_testpaths.file,
+ self.revision,
+ self.config);
+ let aux_cx = TestCx {
+ config: self.config,
+ props: &aux_props,
+ testpaths: &aux_testpaths,
+ revision: self.revision
+ };
+ let auxres = aux_cx.document(out_dir);
+ if !auxres.status.success() {
+ return auxres;
+ }
+ }
+ }
+
+ let aux_dir = self.aux_output_dir_name();
+
+ let rustdoc_path = self.config.rustdoc_path.as_ref().expect("--rustdoc-path passed");
+ let mut rustdoc = Command::new(rustdoc_path);
+
+ rustdoc.arg("-L").arg(aux_dir)
+ .arg("-o").arg(out_dir)
+ .arg(&self.testpaths.file)
+ .args(&self.props.compile_flags);
+ if let Some(ref linker) = self.config.linker {
+ rustdoc.arg("--linker").arg(linker).arg("-Z").arg("unstable-options");
+ }
+
+ self.compose_and_run_compiler(rustdoc, None)
+ }
+
+ fn exec_compiled_test(&self) -> ProcRes {
+ let env = &self.props.exec_env;
+
+ match &*self.config.target {
+ // This is pretty similar to below, we're transforming:
+ //
+ // program arg1 arg2
+ //
+ // into
+ //
+ // remote-test-client run program:support-lib.so arg1 arg2
+ //
+ // The test-client program will upload `program` to the emulator
+ // along with all other support libraries listed (in this case
+ // `support-lib.so`. It will then execute the program on the
+ // emulator with the arguments specified (in the environment we give
+ // the process) and then report back the same result.
+ _ if self.config.remote_test_client.is_some() => {
+ let aux_dir = self.aux_output_dir_name();
+ let ProcArgs { mut prog, args } = self.make_run_args();
+ if let Ok(entries) = aux_dir.read_dir() {
+ for entry in entries {
+ let entry = entry.unwrap();
+ if !entry.path().is_file() {
+ continue
+ }
+ prog.push_str(":");
+ prog.push_str(entry.path().to_str().unwrap());
+ }
+ }
+ let mut test_client = Command::new(
+ self.config.remote_test_client.as_ref().unwrap());
+ test_client
+ .args(&["run", &prog])
+ .args(args)
+ .envs(env.clone());
+ self.compose_and_run(test_client,
+ self.config.run_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ None)
+ }
+ _ => {
+ let aux_dir = self.aux_output_dir_name();
+ let ProcArgs { prog, args } = self.make_run_args();
+ let mut program = Command::new(&prog);
+ program.args(args)
+ .current_dir(&self.output_base_name().parent().unwrap())
+ .envs(env.clone());
+ self.compose_and_run(program,
+ self.config.run_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ None)
+ }
+ }
+ }
+
+ /// For each `aux-build: foo/bar` annotation, we check to find the
+ /// file in a `aux` directory relative to the test itself.
+ fn compute_aux_test_paths(&self, rel_ab: &str) -> TestPaths {
+ let test_ab = self.testpaths.file
+ .parent()
+ .expect("test file path has no parent")
+ .join("auxiliary")
+ .join(rel_ab);
+ if !test_ab.exists() {
+ self.fatal(&format!("aux-build `{}` source not found", test_ab.display()))
+ }
+
+ TestPaths {
+ file: test_ab,
+ base: self.testpaths.base.clone(),
+ relative_dir: self.testpaths.relative_dir
+ .join("auxiliary")
+ .join(rel_ab)
+ .parent()
+ .expect("aux-build path has no parent")
+ .to_path_buf()
+ }
+ }
+
+ fn compose_and_run_compiler(&self, mut rustc: Command, input: Option<String>) -> ProcRes {
+ if !self.props.aux_builds.is_empty() {
+ create_dir_all(&self.aux_output_dir_name()).unwrap();
+ }
+
+ let aux_dir = self.aux_output_dir_name();
+
+ for rel_ab in &self.props.aux_builds {
+ let aux_testpaths = self.compute_aux_test_paths(rel_ab);
+ let aux_props = self.props.from_aux_file(&aux_testpaths.file,
+ self.revision,
+ self.config);
+ let aux_output = {
+ let f = self.make_lib_name(&self.testpaths.file);
+ let parent = f.parent().unwrap();
+ TargetLocation::ThisDirectory(parent.to_path_buf())
+ };
+ let aux_cx = TestCx {
+ config: self.config,
+ props: &aux_props,
+ testpaths: &aux_testpaths,
+ revision: self.revision
+ };
+ let mut aux_rustc = aux_cx.make_compile_args(&aux_testpaths.file, aux_output, AllowUnused::No);
+
+ let crate_type = if aux_props.no_prefer_dynamic {
+ None
+ } else if (self.config.target.contains("musl") && !aux_props.force_host) ||
+ self.config.target.contains("wasm32") ||
+ self.config.target.contains("emscripten") {
+ // We primarily compile all auxiliary libraries as dynamic libraries
+ // to avoid code size bloat and large binaries as much as possible
+ // for the test suite (otherwise including libstd statically in all
+ // executables takes up quite a bit of space).
+ //
+ // For targets like MUSL or Emscripten, however, there is no support for
+ // dynamic libraries so we just go back to building a normal library. Note,
+ // however, that for MUSL if the library is built with `force_host` then
+ // it's ok to be a dylib as the host should always support dylibs.
+ Some("lib")
+ } else {
+ Some("dylib")
+ };
+
+ if let Some(crate_type) = crate_type {
+ aux_rustc.args(&["--crate-type", crate_type]);
+ }
+
+ aux_rustc.arg("-L").arg(&aux_dir);
+
+ let auxres = aux_cx.compose_and_run(aux_rustc,
+ aux_cx.config.compile_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ None);
+ if !auxres.status.success() {
+ self.fatal_proc_rec(
+ &format!("auxiliary build of {:?} failed to compile: ",
+ aux_testpaths.file.display()),
+ &auxres);
+ }
+ }
+
+ rustc.envs(self.props.rustc_env.clone());
+ self.compose_and_run(rustc,
+ self.config.compile_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ input)
+ }
+
+ fn compose_and_run(&self,
+ mut command: Command,
+ lib_path: &str,
+ aux_path: Option<&str>,
+ input: Option<String>) -> ProcRes {
+ let cmdline =
+ {
+ let cmdline = self.make_cmdline(&command, lib_path);
+ logv(self.config, format!("executing {}", cmdline));
+ cmdline
+ };
+
+ command
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .stdin(Stdio::piped());
+
+ // Need to be sure to put both the lib_path and the aux path in the dylib
+ // search path for the child.
+ let mut path = env::split_paths(&env::var_os(dylib_env_var()).unwrap_or(OsString::new()))
+ .collect::<Vec<_>>();
+ if let Some(p) = aux_path {
+ path.insert(0, PathBuf::from(p))
+ }
+ path.insert(0, PathBuf::from(lib_path));
+
+ // Add the new dylib search path var
+ let newpath = env::join_paths(&path).unwrap();
+ command.env(dylib_env_var(), newpath);
+
+ let mut child = command.spawn().expect(&format!("failed to exec `{:?}`", &command));
+ if let Some(input) = input {
+ child.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap();
+ }
+
+ let Output { status, stdout, stderr } = read2_abbreviated(child)
+ .expect("failed to read output");
+
+ let result = ProcRes {
+ status,
+ stdout: String::from_utf8_lossy(&stdout).into_owned(),
+ stderr: String::from_utf8_lossy(&stderr).into_owned(),
+ cmdline,
+ };
+
+ self.dump_output(&result.stdout, &result.stderr);
+
+ result
+ }
+
+ fn make_compile_args(&self, input_file: &Path, output_file: TargetLocation, allow_unused: AllowUnused) -> Command {
+ let mut rustc = Command::new(&self.config.rustc_path);
+ rustc.arg(input_file)
+ .arg("-L").arg(&self.config.build_base);
+
+ // Optionally prevent default --target if specified in test compile-flags.
+ let custom_target = self.props.compile_flags
+ .iter()
+ .fold(false, |acc, x| acc || x.starts_with("--target"));
+
+ if !custom_target {
+ let target = if self.props.force_host {
+ &*self.config.host
+ } else {
+ &*self.config.target
+ };
+
+ rustc.arg(&format!("--target={}", target));
+ }
+
+ if let Some(revision) = self.revision {
+ rustc.args(&["--cfg", revision]);
+ }
+
+ if let Some(ref incremental_dir) = self.props.incremental_dir {
+ rustc.args(&["-Z", &format!("incremental={}", incremental_dir.display())]);
+ rustc.args(&["-Z", "incremental-verify-ich"]);
+ rustc.args(&["-Z", "incremental-queries"]);
+ }
+
+ match self.config.mode {
+ CompileFail |
+ ParseFail |
+ Incremental => {
+ // If we are extracting and matching errors in the new
+ // fashion, then you want JSON mode. Old-skool error
+ // patterns still match the raw compiler output.
+ if self.props.error_patterns.is_empty() {
+ rustc.args(&["--error-format", "json"]);
+ }
+ }
+ MirOpt => {
+ rustc.args(&[
+ "-Zdump-mir=all",
+ "-Zmir-opt-level=3",
+ "-Zdump-mir-exclude-pass-number"]);
+
+ let mir_dump_dir = self.get_mir_dump_dir();
+ let _ = fs::remove_dir_all(&mir_dump_dir);
+ create_dir_all(mir_dump_dir.as_path()).unwrap();
+ let mut dir_opt = "-Zdump-mir-dir=".to_string();
+ dir_opt.push_str(mir_dump_dir.to_str().unwrap());
+ debug!("dir_opt: {:?}", dir_opt);
+
+ rustc.arg(dir_opt);
+ }
+ RunPass | Ui => {
+ if !self
+ .props
+ .compile_flags
+ .iter()
+ .any(|s| s.starts_with("--error-format"))
+ {
+ rustc.args(&["--error-format", "json"]);
+ }
+ }
+ RunFail |
+ RunPassValgrind |
+ Pretty |
+ DebugInfoGdb |
+ DebugInfoLldb |
+ Codegen |
+ Rustdoc |
+ RunMake |
+ CodegenUnits |
+ Assembly => {
+ // do not use JSON output
+ }
+ }
+
+
+ if self.config.target == "wasm32-unknown-unknown" {
+ // rustc.arg("-g"); // get any backtrace at all on errors
+ } else if !self.props.no_prefer_dynamic {
+ rustc.args(&["-C", "prefer-dynamic"]);
+ }
+
+ match output_file {
+ TargetLocation::ThisFile(path) => {
+ rustc.arg("-o").arg(path);
+ }
+ TargetLocation::ThisDirectory(path) => {
+ rustc.arg("--out-dir").arg(path);
+ }
+ }
+
+ // Add `-A unused` before `config` flags and in-test (`props`) flags, so that they can
+ // overwrite this.
+ if let AllowUnused::Yes = allow_unused {
+ rustc.args(&["-A", "unused"]);
+ }
+
+ if self.props.force_host {
+ rustc.args(self.split_maybe_args(&self.config.host_rustcflags));
+ } else {
+ rustc.args(self.split_maybe_args(&self.config.target_rustcflags));
+ }
+ if let Some(ref linker) = self.config.linker {
+ rustc.arg(format!("-Clinker={}", linker));
+ }
+
+ rustc.args(&self.props.compile_flags);
+
+ rustc
+ }
+
+ fn make_lib_name(&self, auxfile: &Path) -> PathBuf {
+ // what we return here is not particularly important, as it
+ // happens; rustc ignores everything except for the directory.
+ let auxname = self.output_testname(auxfile);
+ self.aux_output_dir_name().join(&auxname)
+ }
+
+ fn make_exe_name(&self) -> PathBuf {
+ let mut f = self.output_base_name();
+ // FIXME: This is using the host architecture exe suffix, not target!
+ if self.config.target.contains("emscripten") {
+ let mut fname = f.file_name().unwrap().to_os_string();
+ fname.push(".js");
+ f.set_file_name(&fname);
+ } else if self.config.target.contains("spirv") {
+ let mut fname = f.file_name().unwrap().to_os_string();
+ fname.push(".spv");
+ f.set_file_name(&fname);
+ } else if self.config.target.contains("wasm32") {
+ let mut fname = f.file_name().unwrap().to_os_string();
+ fname.push(".wasm");
+ f.set_file_name(&fname);
+ } else if !env::consts::EXE_SUFFIX.is_empty() {
+ let mut fname = f.file_name().unwrap().to_os_string();
+ fname.push(env::consts::EXE_SUFFIX);
+ f.set_file_name(&fname);
+ }
+ f
+ }
+
+ fn make_run_args(&self) -> ProcArgs {
+ // If we've got another tool to run under (valgrind),
+ // then split apart its command
+ let mut args = self.split_maybe_args(&self.config.runtool);
+
+ // If this is emscripten, then run tests under nodejs
+ if self.config.target.contains("emscripten") {
+ if let Some(ref p) = self.config.nodejs {
+ args.push(p.clone());
+ } else {
+ self.fatal("no NodeJS binary found (--nodejs)");
+ }
+ }
+
+ // If this is otherwise wasm , then run tests under nodejs with our
+ // shim
+ if self.config.target.contains("wasm32") {
+ if let Some(ref p) = self.config.nodejs {
+ args.push(p.clone());
+ } else {
+ self.fatal("no NodeJS binary found (--nodejs)");
+ }
+
+ let src = self.config.src_base
+ .parent().unwrap() // chop off `run-pass`
+ .parent().unwrap() // chop off `test`
+ .parent().unwrap(); // chop off `src`
+ args.push(src.join("src/etc/wasm32-shim.js").display().to_string());
+ }
+
+ let exe_file = self.make_exe_name();
+
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ args.push(exe_file.to_str().unwrap().to_owned());
+
+ // Add the arguments in the run_flags directive
+ args.extend(self.split_maybe_args(&self.props.run_flags));
+
+ let prog = args.remove(0);
+ ProcArgs {
+ prog,
+ args,
+ }
+ }
+
+ fn split_maybe_args(&self, argstr: &Option<String>) -> Vec<String> {
+ match *argstr {
+ Some(ref s) => {
+ s
+ .split(' ')
+ .filter_map(|s| {
+ if s.chars().all(|c| c.is_whitespace()) {
+ None
+ } else {
+ Some(s.to_owned())
+ }
+ }).collect()
+ }
+ None => Vec::new()
+ }
+ }
+
+ fn make_cmdline(&self, command: &Command, libpath: &str) -> String {
+ use util;
+
+ // Linux and mac don't require adjusting the library search path
+ if cfg!(unix) {
+ format!("{:?}", command)
+ } else {
+ // Build the LD_LIBRARY_PATH variable as it would be seen on the command line
+ // for diagnostic purposes
+ fn lib_path_cmd_prefix(path: &str) -> String {
+ format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path))
+ }
+
+ format!("{} {:?}", lib_path_cmd_prefix(libpath), command)
+ }
+ }
+
+ fn dump_output(&self, out: &str, err: &str) {
+ let revision = if let Some(r) = self.revision {
+ format!("{}.", r)
+ } else {
+ String::new()
+ };
+
+ self.dump_output_file(out, &format!("{}out", revision));
+ self.dump_output_file(err, &format!("{}err", revision));
+ self.maybe_dump_to_stdout(out, err);
+ }
+
+ fn dump_output_file(&self,
+ out: &str,
+ extension: &str) {
+ let outfile = self.make_out_name(extension);
+ File::create(&outfile).unwrap().write_all(out.as_bytes()).unwrap();
+ }
+
+ fn make_out_name(&self, extension: &str) -> PathBuf {
+ self.output_base_name().with_extension(extension)
+ }
+
+ fn aux_output_dir_name(&self) -> PathBuf {
+ let f = self.output_base_name();
+ let mut fname = f.file_name().unwrap().to_os_string();
+ fname.push(&format!("{}.aux", self.config.mode.disambiguator()));
+ f.with_file_name(&fname)
+ }
+
+ fn output_testname(&self, filepath: &Path) -> PathBuf {
+ PathBuf::from(filepath.file_stem().unwrap())
+ }
+
+ /// Given a test path like `compile-fail/foo/bar.rs` Returns a name like
+ /// `<output>/foo/bar-stage1`
+ fn output_base_name(&self) -> PathBuf {
+ let dir = self.config.build_base.join(&self.testpaths.relative_dir);
+
+ // Note: The directory `dir` is created during `collect_tests_from_dir`
+ dir
+ .join(&self.output_testname(&self.testpaths.file))
+ .with_extension(&self.config.stage_id)
+ }
+
+ fn maybe_dump_to_stdout(&self, out: &str, err: &str) {
+ if self.config.verbose {
+ println!("------{}------------------------------", "stdout");
+ println!("{}", out);
+ println!("------{}------------------------------", "stderr");
+ println!("{}", err);
+ println!("------------------------------------------");
+ }
+ }
+
+ fn error(&self, err: &str) {
+ match self.revision {
+ Some(rev) => println!("\nerror in revision `{}`: {}", rev, err),
+ None => println!("\nerror: {}", err)
+ }
+ }
+
+ fn fatal(&self, err: &str) -> ! {
+ self.error(err); panic!();
+ }
+
+ fn fatal_proc_rec(&self, err: &str, proc_res: &ProcRes) -> ! {
+ self.try_print_open_handles();
+ self.error(err);
+ proc_res.fatal(None);
+ }
+
+ // This function is a poor man's attempt to debug rust-lang/rust#38620, if
+ // that's closed then this should be deleted
+ //
+ // This is a very "opportunistic" debugging attempt, so we ignore all
+ // errors here.
+ fn try_print_open_handles(&self) {
+ if !cfg!(windows) {
+ return
+ }
+ if self.config.mode != Incremental {
+ return
+ }
+
+ let filename = match self.testpaths.file.file_stem() {
+ Some(path) => path,
+ None => return,
+ };
+
+ let mut cmd = Command::new("handle.exe");
+ cmd.arg("-a").arg("-u");
+ cmd.arg(filename);
+ cmd.arg("-nobanner");
+ cmd.stdout(Stdio::piped());
+ cmd.stderr(Stdio::piped());
+ let output = match cmd.spawn().and_then(read2_abbreviated) {
+ Ok(output) => output,
+ Err(_) => return,
+ };
+ println!("---------------------------------------------------");
+ println!("ran extra command to debug rust-lang/rust#38620: ");
+ println!("{:?}", cmd);
+ println!("result: {}", output.status);
+ println!("--- stdout ----------------------------------------");
+ println!("{}", String::from_utf8_lossy(&output.stdout));
+ println!("--- stderr ----------------------------------------");
+ println!("{}", String::from_utf8_lossy(&output.stderr));
+ println!("---------------------------------------------------");
+ }
+
+ // codegen tests (using FileCheck)
+
+ fn compile_test_and_save_ir(&self) -> ProcRes {
+ let aux_dir = self.aux_output_dir_name();
+
+ let output_file = TargetLocation::ThisDirectory(
+ self.output_base_name().parent().unwrap().to_path_buf());
+ let mut rustc = self.make_compile_args(&self.testpaths.file, output_file, AllowUnused::No);
+ rustc.arg("-L").arg(aux_dir)
+ .arg("--emit=llvm-ir");
+
+ self.compose_and_run_compiler(rustc, None)
+ }
+
+ fn compile_test_and_save_assembly(&self) -> (ProcRes, PathBuf) {
+ // This works with both `--emit asm` (as default output name for the assembly)
+ // and `ptx-linker` because the latter can write output at requested location.
+ let output_path = self.output_base_name().with_extension("s");
+
+ let output_file = TargetLocation::ThisFile(output_path.clone());
+ let mut rustc = self.make_compile_args(&self.testpaths.file, output_file, AllowUnused::No);
+
+ rustc.arg("-L").arg(self.aux_output_dir_name());
+
+ match self.props.assembly_output.as_ref().map(AsRef::as_ref) {
+ Some("emit-asm") => {
+ rustc.arg("--emit=asm");
+ }
+
+ Some("ptx-linker") => {
+ // No extra flags needed.
+ }
+
+ Some(_) => self.fatal("unknown 'assembly-output' header"),
+ None => self.fatal("missing 'assembly-output' header"),
+ }
+
+ (self.compose_and_run_compiler(rustc, None), output_path)
+ }
+
+ fn verify_with_filecheck(&self, output: &Path) -> ProcRes {
+ let mut filecheck = Command::new(self.config.llvm_filecheck.as_ref().unwrap());
+ filecheck.arg("--input-file").arg(output)
+ .arg(&self.testpaths.file);
+ self.compose_and_run(filecheck, "", None, None)
+ }
+
+ fn run_codegen_test(&self) {
+ assert!(self.revision.is_none(), "revisions not relevant here");
+
+ if self.config.llvm_filecheck.is_none() {
+ self.fatal("missing --llvm-filecheck");
+ }
+
+ let proc_res = self.compile_test_and_save_ir();
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ let output_path = self.output_base_name().with_extension("ll");
+ let proc_res = self.verify_with_filecheck(&output_path);
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("verification with 'FileCheck' failed", &proc_res);
+ }
+ }
+
+ fn run_assembly_test(&self) {
+ if self.config.llvm_filecheck.is_none() {
+ self.fatal("missing --llvm-filecheck");
+ }
+
+ let (proc_res, output_path) = self.compile_test_and_save_assembly();
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ let proc_res = self.verify_with_filecheck(&output_path);
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("verification with 'FileCheck' failed", &proc_res);
+ }
+ }
+
+ fn charset() -> &'static str {
+ // FreeBSD 10.1 defaults to GDB 6.1.1 which doesn't support "auto" charset
+ if cfg!(target_os = "bitrig") {
+ "auto"
+ } else if cfg!(target_os = "freebsd") {
+ "ISO-8859-1"
+ } else {
+ "UTF-8"
+ }
+ }
+
+ fn run_rustdoc_test(&self) {
+ assert!(self.revision.is_none(), "revisions not relevant here");
+
+ let out_dir = self.output_base_name();
+ let _ = fs::remove_dir_all(&out_dir);
+ create_dir_all(&out_dir).unwrap();
+
+ let proc_res = self.document(&out_dir);
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("rustdoc failed!", &proc_res);
+ }
+
+ if self.props.check_test_line_numbers_match {
+ self.check_rustdoc_test_option(proc_res);
+ } else {
+ let root = self.config.find_rust_src_root().unwrap();
+ let res = self.cmd2procres(
+ Command::new(&self.config.docck_python)
+ .arg(root.join("src/etc/htmldocck.py"))
+ .arg(out_dir)
+ .arg(&self.testpaths.file),
+ );
+ if !res.status.success() {
+ self.fatal_proc_rec("htmldocck failed!", &res);
+ }
+ }
+ }
+
+ fn get_lines<P: AsRef<Path>>(&self, path: &P,
+ mut other_files: Option<&mut Vec<String>>) -> Vec<usize> {
+ let mut file = fs::File::open(path)
+ .expect("markdown_test_output_check_entry File::open failed");
+ let mut content = String::new();
+ file.read_to_string(&mut content)
+ .expect("markdown_test_output_check_entry read_to_string failed");
+ let mut ignore = false;
+ content.lines()
+ .enumerate()
+ .filter_map(|(line_nb, line)| {
+ if (line.trim_start().starts_with("pub mod ") ||
+ line.trim_start().starts_with("mod ")) &&
+ line.ends_with(';') {
+ if let Some(ref mut other_files) = other_files {
+ other_files.push(line.rsplit("mod ")
+ .next()
+ .unwrap()
+ .replace(";", ""));
+ }
+ None
+ } else {
+ let sline = line.split("///").last().unwrap_or("");
+ let line = sline.trim_start();
+ if line.starts_with("```") {
+ if ignore {
+ ignore = false;
+ None
+ } else {
+ ignore = true;
+ Some(line_nb + 1)
+ }
+ } else {
+ None
+ }
+ }
+ })
+ .collect()
+ }
+
+ fn check_rustdoc_test_option(&self, res: ProcRes) {
+ let mut other_files = Vec::new();
+ let mut files: HashMap<String, Vec<usize>> = HashMap::new();
+ let cwd = env::current_dir().unwrap();
+ files.insert(self.testpaths.file.strip_prefix(&cwd)
+ .unwrap_or(&self.testpaths.file)
+ .to_str()
+ .unwrap()
+ .replace('\\', "/"),
+ self.get_lines(&self.testpaths.file, Some(&mut other_files)));
+ for other_file in other_files {
+ let mut path = self.testpaths.file.clone();
+ path.set_file_name(&format!("{}.rs", other_file));
+ files.insert(path.strip_prefix(&cwd)
+ .unwrap_or(&path)
+ .to_str()
+ .unwrap()
+ .replace('\\', "/"),
+ self.get_lines(&path, None));
+ }
+
+ let mut tested = 0;
+ for _ in res.stdout.split('\n')
+ .filter(|s| s.starts_with("test "))
+ .inspect(|s| {
+ let tmp: Vec<&str> = s.split(" - ").collect();
+ if tmp.len() == 2 {
+ let path = tmp[0].rsplit("test ").next().unwrap();
+ if let Some(ref mut v) = files.get_mut(
+ &path.replace('\\', "/")) {
+ tested += 1;
+ let mut iter = tmp[1].split("(line ");
+ iter.next();
+ let line = iter.next()
+ .unwrap_or(")")
+ .split(')')
+ .next()
+ .unwrap_or("0")
+ .parse()
+ .unwrap_or(0);
+ if let Ok(pos) = v.binary_search(&line) {
+ v.remove(pos);
+ } else {
+ self.fatal_proc_rec(
+ &format!("Not found doc test: \"{}\" in \"{}\":{:?}",
+ s, path, v),
+ &res);
+ }
+ }
+ }
+ }) {}
+ if tested == 0 {
+ self.fatal_proc_rec(&format!("No test has been found... {:?}", files), &res);
+ } else {
+ for (entry, v) in &files {
+ if !v.is_empty() {
+ self.fatal_proc_rec(&format!("Not found test at line{} \"{}\":{:?}",
+ if v.len() > 1 { "s" } else { "" }, entry, v),
+ &res);
+ }
+ }
+ }
+ }
+
+ fn run_codegen_units_test(&self) {
+ assert!(self.revision.is_none(), "revisions not relevant here");
+
+ let proc_res = self.compile_test();
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ self.check_no_compiler_crash(&proc_res);
+
+ const PREFIX: &'static str = "TRANS_ITEM ";
+ const CGU_MARKER: &'static str = "@@";
+
+ let actual: Vec<TransItem> = proc_res
+ .stdout
+ .lines()
+ .filter(|line| line.starts_with(PREFIX))
+ .map(str_to_trans_item)
+ .collect();
+
+ let expected: Vec<TransItem> = errors::load_errors(&self.testpaths.file, None)
+ .iter()
+ .map(|e| str_to_trans_item(&e.msg[..]))
+ .collect();
+
+ let mut missing = Vec::new();
+ let mut wrong_cgus = Vec::new();
+
+ for expected_item in &expected {
+ let actual_item_with_same_name = actual.iter()
+ .find(|ti| ti.name == expected_item.name);
+
+ if let Some(actual_item) = actual_item_with_same_name {
+ if !expected_item.codegen_units.is_empty() &&
+ // Also check for codegen units
+ expected_item.codegen_units != actual_item.codegen_units {
+ wrong_cgus.push((expected_item.clone(), actual_item.clone()));
+ }
+ } else {
+ missing.push(expected_item.string.clone());
+ }
+ }
+
+ let unexpected: Vec<_> =
+ actual.iter()
+ .filter(|acgu| !expected.iter().any(|ecgu| acgu.name == ecgu.name))
+ .map(|acgu| acgu.string.clone())
+ .collect();
+
+ if !missing.is_empty() {
+ missing.sort();
+
+ println!("\nThese items should have been contained but were not:\n");
+
+ for item in &missing {
+ println!("{}", item);
+ }
+
+ println!("\n");
+ }
+
+ if !unexpected.is_empty() {
+ let sorted = {
+ let mut sorted = unexpected.clone();
+ sorted.sort();
+ sorted
+ };
+
+ println!("\nThese items were contained but should not have been:\n");
+
+ for item in sorted {
+ println!("{}", item);
+ }
+
+ println!("\n");
+ }
+
+ if !wrong_cgus.is_empty() {
+ wrong_cgus.sort_by_key(|pair| pair.0.name.clone());
+ println!("\nThe following items were assigned to wrong codegen units:\n");
+
+ for &(ref expected_item, ref actual_item) in &wrong_cgus {
+ println!("{}", expected_item.name);
+ println!(" expected: {}", codegen_units_to_str(&expected_item.codegen_units));
+ println!(" actual: {}", codegen_units_to_str(&actual_item.codegen_units));
+ println!("");
+ }
+ }
+
+ if !(missing.is_empty() && unexpected.is_empty() && wrong_cgus.is_empty())
+ {
+ panic!();
+ }
+
+ #[derive(Clone, Eq, PartialEq)]
+ struct TransItem {
+ name: String,
+ codegen_units: HashSet<String>,
+ string: String,
+ }
+
+ // [TRANS_ITEM] name [@@ (cgu)+]
+ fn str_to_trans_item(s: &str) -> TransItem {
+ let s = if s.starts_with(PREFIX) {
+ (&s[PREFIX.len()..]).trim()
+ } else {
+ s.trim()
+ };
+
+ let full_string = format!("{}{}", PREFIX, s.trim().to_owned());
+
+ let parts: Vec<&str> = s.split(CGU_MARKER)
+ .map(str::trim)
+ .filter(|s| !s.is_empty())
+ .collect();
+
+ let name = parts[0].trim();
+
+ let cgus = if parts.len() > 1 {
+ let cgus_str = parts[1];
+
+ cgus_str.split(' ')
+ .map(str::trim)
+ .filter(|s| !s.is_empty())
+ .map(str::to_owned)
+ .collect()
+ }
+ else {
+ HashSet::new()
+ };
+
+ TransItem {
+ name: name.to_owned(),
+ codegen_units: cgus,
+ string: full_string,
+ }
+ }
+
+ fn codegen_units_to_str(cgus: &HashSet<String>) -> String
+ {
+ let mut cgus: Vec<_> = cgus.iter().collect();
+ cgus.sort();
+
+ let mut string = String::new();
+ for cgu in cgus {
+ string.push_str(&cgu[..]);
+ string.push_str(" ");
+ }
+
+ string
+ }
+ }
+
+ fn init_incremental_test(&self) {
+ // (See `run_incremental_test` for an overview of how incremental tests work.)
+
+ // Before any of the revisions have executed, create the
+ // incremental workproduct directory. Delete any old
+ // incremental work products that may be there from prior
+ // runs.
+ let incremental_dir = self.incremental_dir();
+ if incremental_dir.exists() {
+ // Canonicalizing the path will convert it to the //?/ format
+ // on Windows, which enables paths longer than 260 character
+ let canonicalized = incremental_dir.canonicalize().unwrap();
+ fs::remove_dir_all(canonicalized).unwrap();
+ }
+ fs::create_dir_all(&incremental_dir).unwrap();
+
+ if self.config.verbose {
+ print!("init_incremental_test: incremental_dir={}", incremental_dir.display());
+ }
+ }
+
+ fn run_incremental_test(&self) {
+ // Basic plan for a test incremental/foo/bar.rs:
+ // - load list of revisions rpass1, cfail2, rpass3
+ // - each should begin with `rpass`, `cfail`, or `cfail`
+ // - if `rpass`, expect compile and execution to succeed
+ // - if `cfail`, expect compilation to fail
+ // - if `rfail`, expect execution to fail
+ // - create a directory build/foo/bar.incremental
+ // - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C rpass1
+ // - because name of revision starts with "rpass", expect success
+ // - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C cfail2
+ // - because name of revision starts with "cfail", expect an error
+ // - load expected errors as usual, but filter for those that end in `[rfail2]`
+ // - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C rpass3
+ // - because name of revision starts with "rpass", expect success
+ // - execute build/foo/bar.exe and save output
+ //
+ // FIXME -- use non-incremental mode as an oracle? That doesn't apply
+ // to #[rustc_dirty] and clean tests I guess
+
+ let revision = self.revision.expect("incremental tests require a list of revisions");
+
+ // Incremental workproduct directory should have already been created.
+ let incremental_dir = self.incremental_dir();
+ assert!(incremental_dir.exists(), "init_incremental_test failed to create incremental dir");
+
+ // Add an extra flag pointing at the incremental directory.
+ let mut revision_props = self.props.clone();
+ revision_props.incremental_dir = Some(incremental_dir);
+
+ let revision_cx = TestCx {
+ config: self.config,
+ props: &revision_props,
+ testpaths: self.testpaths,
+ revision: self.revision,
+ };
+
+ if self.config.verbose {
+ print!("revision={:?} revision_props={:#?}", revision, revision_props);
+ }
+
+ if revision.starts_with("rpass") {
+ revision_cx.run_rpass_test();
+ } else if revision.starts_with("rfail") {
+ revision_cx.run_rfail_test();
+ } else if revision.starts_with("cfail") {
+ revision_cx.run_cfail_test();
+ } else {
+ revision_cx.fatal(
+ "revision name must begin with rpass, rfail, or cfail");
+ }
+ }
+
+ /// Directory where incremental work products are stored.
+ fn incremental_dir(&self) -> PathBuf {
+ self.output_base_name().with_extension("inc")
+ }
+
+ fn run_rmake_test(&self) {
+ // FIXME(#11094): we should fix these tests
+ if self.config.host != self.config.target {
+ return
+ }
+
+ let cwd = env::current_dir().unwrap();
+ let src_root = self.config.src_base.parent().unwrap()
+ .parent().unwrap()
+ .parent().unwrap();
+ let src_root = cwd.join(&src_root);
+
+ let tmpdir = cwd.join(self.output_base_name());
+ if tmpdir.exists() {
+ self.aggressive_rm_rf(&tmpdir).unwrap();
+ }
+ create_dir_all(&tmpdir).unwrap();
+
+ let host = &self.config.host;
+ let make = if host.contains("bitrig") || host.contains("dragonfly") ||
+ host.contains("freebsd") || host.contains("netbsd") ||
+ host.contains("openbsd") {
+ "gmake"
+ } else {
+ "make"
+ };
+
+ let mut cmd = Command::new(make);
+ cmd.current_dir(&self.testpaths.file)
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .env("TARGET", &self.config.target)
+ .env("PYTHON", &self.config.docck_python)
+ .env("S", src_root)
+ .env("RUST_BUILD_STAGE", &self.config.stage_id)
+ .env("RUSTC", cwd.join(&self.config.rustc_path))
+ .env("RUSTDOC",
+ cwd.join(&self.config.rustdoc_path.as_ref().expect("--rustdoc-path passed")))
+ .env("TMPDIR", &tmpdir)
+ .env("LD_LIB_PATH_ENVVAR", dylib_env_var())
+ .env("HOST_RPATH_DIR", cwd.join(&self.config.compile_lib_path))
+ .env("TARGET_RPATH_DIR", cwd.join(&self.config.run_lib_path))
+ .env("LLVM_COMPONENTS", &self.config.llvm_components)
+ .env("LLVM_CXXFLAGS", &self.config.llvm_cxxflags);
+
+ if let Some(ref linker) = self.config.linker {
+ cmd.env("RUSTC_LINKER", linker);
+ }
+
+ // We don't want RUSTFLAGS set from the outside to interfere with
+ // compiler flags set in the test cases:
+ cmd.env_remove("RUSTFLAGS");
+
+ if self.config.bless {
+ cmd.env("RUSTC_BLESS_TEST", "--bless");
+ // Assume this option is active if the environment variable is "defined", with _any_ value.
+ // As an example, a `Makefile` can use this option by:
+ //
+ // ifdef RUSTC_BLESS_TEST
+ // cp "$(TMPDIR)"/actual_something.ext expected_something.ext
+ // else
+ // $(DIFF) expected_something.ext "$(TMPDIR)"/actual_something.ext
+ // endif
+ }
+
+ if self.config.target.contains("msvc") {
+ // We need to pass a path to `lib.exe`, so assume that `cc` is `cl.exe`
+ // and that `lib.exe` lives next to it.
+ let lib = Path::new(&self.config.cc).parent().unwrap().join("lib.exe");
+
+ // MSYS doesn't like passing flags of the form `/foo` as it thinks it's
+ // a path and instead passes `C:\msys64\foo`, so convert all
+ // `/`-arguments to MSVC here to `-` arguments.
+ let cflags = self.config.cflags.split(' ').map(|s| s.replace("/", "-"))
+ .collect::<Vec<_>>().join(" ");
+
+ cmd.env("IS_MSVC", "1")
+ .env("IS_WINDOWS", "1")
+ .env("MSVC_LIB", format!("'{}' -nologo", lib.display()))
+ .env("CC", format!("'{}' {}", self.config.cc, cflags))
+ .env("CXX", &self.config.cxx);
+ } else {
+ cmd.env("CC", format!("{} {}", self.config.cc, self.config.cflags))
+ .env("CXX", format!("{} {}", self.config.cxx, self.config.cflags))
+ .env("AR", &self.config.ar);
+
+ if self.config.target.contains("windows") {
+ cmd.env("IS_WINDOWS", "1");
+ }
+ }
+
+ let output = cmd.spawn().and_then(read2_abbreviated).expect("failed to spawn `make`");
+ if !output.status.success() {
+ let res = ProcRes {
+ status: output.status,
+ stdout: String::from_utf8_lossy(&output.stdout).into_owned(),
+ stderr: String::from_utf8_lossy(&output.stderr).into_owned(),
+ cmdline: format!("{:?}", cmd),
+ };
+ self.fatal_proc_rec("make failed", &res);
+ }
+ }
+
+ fn aggressive_rm_rf(&self, path: &Path) -> io::Result<()> {
+ for e in path.read_dir()? {
+ let entry = e?;
+ let path = entry.path();
+ if entry.file_type()?.is_dir() {
+ self.aggressive_rm_rf(&path)?;
+ } else {
+ // Remove readonly files as well on windows (by default we can't)
+ fs::remove_file(&path).or_else(|e| {
+ if cfg!(windows) && e.kind() == io::ErrorKind::PermissionDenied {
+ let mut meta = entry.metadata()?.permissions();
+ meta.set_readonly(false);
+ fs::set_permissions(&path, meta)?;
+ fs::remove_file(&path)
+ } else {
+ Err(e)
+ }
+ })?;
+ }
+ }
+ fs::remove_dir(path)
+ }
+
+ fn run_ui_test(&self) {
+ // if the user specified a format in the ui test
+ // print the output to the stderr file, otherwise extract
+ // the rendered error messages from json and print them
+ let explicit = self
+ .props
+ .compile_flags
+ .iter()
+ .any(|s| s.contains("--error-format"));
+ let proc_res = self.compile_test();
+
+ let expected_stderr_path = self.expected_output_path(UI_STDERR);
+ let expected_stderr = self.load_expected_output(&expected_stderr_path);
+
+ let expected_stdout_path = self.expected_output_path(UI_STDOUT);
+ let expected_stdout = self.load_expected_output(&expected_stdout_path);
+
+ let expected_fixed_path = self.expected_output_path(UI_FIXED);
+ let expected_fixed = self.load_expected_output(&expected_fixed_path);
+
+ let normalized_stdout =
+ self.normalize_output(&proc_res.stdout, &self.props.normalize_stdout);
+
+ let stderr = if explicit {
+ proc_res.stderr.clone()
+ } else {
+ json::extract_rendered(&proc_res.stderr, &proc_res)
+ };
+
+ let normalized_stderr =
+ self.normalize_output(&stderr, &self.props.normalize_stderr);
+
+ let mut errors = 0;
+ errors += self.compare_output(UI_STDOUT, &normalized_stdout, &expected_stdout);
+ errors += self.compare_output(UI_STDERR, &normalized_stderr, &expected_stderr);
+
+
+ if self.config.rustfix_coverage {
+ // Find out which tests have `MachineApplicable` suggestions but are missing
+ // `run-rustfix` or `run-rustfix-only-machine-applicable` headers.
+ //
+ // This will return an empty `Vec` in case the executed test file has a
+ // `compile-flags: --error-format=xxxx` header with a value other than `json`.
+ let suggestions = get_suggestions_from_json(
+ &proc_res.stderr,
+ &HashSet::new(),
+ Filter::MachineApplicableOnly
+ ).unwrap_or_default();
+ if suggestions.len() > 0
+ && !self.props.run_rustfix
+ && !self.props.rustfix_only_machine_applicable {
+ let mut coverage_file_path = self.config.build_base.clone();
+ coverage_file_path.push("rustfix_missing_coverage.txt");
+ debug!("coverage_file_path: {}", coverage_file_path.display());
+
+ let mut file = OpenOptions::new()
+ .create(true)
+ .append(true)
+ .open(coverage_file_path.as_path())
+ .expect("could not create or open file");
+
+ if let Err(_) = writeln!(file, "{}", self.testpaths.file.display()) {
+ panic!("couldn't write to {}", coverage_file_path.display());
+ }
+ }
+ }
+
+ if self.props.run_rustfix {
+ // Apply suggestions from lints to the code itself
+ let unfixed_code = self
+ .load_expected_output_from_path(&self.testpaths.file)
+ .expect("Could not load output from path");
+ let suggestions = get_suggestions_from_json(
+ &proc_res.stderr,
+ &HashSet::new(),
+ if self.props.rustfix_only_machine_applicable {
+ Filter::MachineApplicableOnly
+ } else {
+ Filter::Everything
+ },
+ ).expect("Could not retrieve suggestions from JSON");
+ let fixed_code = apply_suggestions(&unfixed_code, &suggestions).expect(&format!(
+ "failed to apply suggestions for {:?} with rustfix",
+ self.testpaths.file
+ ));
+
+ errors += self.compare_output(UI_FIXED, &fixed_code, &expected_fixed);
+ } else if !expected_fixed.is_empty() {
+ panic!(
+ "the `// run-rustfix` directive wasn't found but a `*.fixed` \
+ file was found"
+ );
+ }
+
+ if errors > 0 {
+ println!("To update references, rerun the tests and pass the `--bless` flag");
+ let relative_path_to_file =
+ self.testpaths.relative_dir.join(self.testpaths.file.file_name().unwrap());
+ println!(
+ "To only update this specific test, also pass `--test-args {}`",
+ relative_path_to_file.display(),
+ );
+ self.fatal_proc_rec(
+ &format!("{} errors occurred comparing output.", errors),
+ &proc_res,
+ );
+ }
+
+ if self.props.run_pass {
+ let proc_res = self.exec_compiled_test();
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("test run failed!", &proc_res);
+ }
+ }
+
+ if self.props.run_rustfix {
+ // And finally, compile the fixed code and make sure it both
+ // succeeds and has no diagnostics.
+ let mut rustc = self.make_compile_args(
+ &self.testpaths.file.with_extension(UI_FIXED),
+ TargetLocation::ThisFile(self.make_exe_name()),
+ AllowUnused::No,
+ );
+ rustc.arg("-L").arg(&self.aux_output_dir_name());
+ let res = self.compose_and_run_compiler(rustc, None);
+ if !res.status.success() {
+ self.fatal_proc_rec("failed to compile fixed code", &res);
+ }
+ if !res.stderr.is_empty() && !self.props.rustfix_only_machine_applicable {
+ self.fatal_proc_rec("fixed code is still producing diagnostics", &res);
+ }
+ }
+ }
+
+ fn run_mir_opt_test(&self) {
+ let proc_res = self.compile_test();
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ let proc_res = self.exec_compiled_test();
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("test run failed!", &proc_res);
+ }
+ self.check_mir_dump();
+ }
+
+ fn check_mir_dump(&self) {
+ let mut test_file_contents = String::new();
+ fs::File::open(self.testpaths.file.clone()).unwrap()
+ .read_to_string(&mut test_file_contents)
+ .unwrap();
+ if let Some(idx) = test_file_contents.find("// END RUST SOURCE") {
+ let (_, tests_text) = test_file_contents.split_at(idx + "// END_RUST SOURCE".len());
+ let tests_text_str = String::from(tests_text);
+ let mut curr_test : Option<&str> = None;
+ let mut curr_test_contents = vec![ExpectedLine::Elision];
+ for l in tests_text_str.lines() {
+ debug!("line: {:?}", l);
+ if l.starts_with("// START ") {
+ let (_, t) = l.split_at("// START ".len());
+ curr_test = Some(t);
+ } else if l.starts_with("// END") {
+ let (_, t) = l.split_at("// END ".len());
+ if Some(t) != curr_test {
+ panic!("mismatched START END test name");
+ }
+ self.compare_mir_test_output(curr_test.unwrap(), &curr_test_contents);
+ curr_test = None;
+ curr_test_contents.clear();
+ curr_test_contents.push(ExpectedLine::Elision);
+ } else if l.is_empty() {
+ // ignore
+ } else if l.starts_with("//") && l.split_at("//".len()).1.trim() == "..." {
+ curr_test_contents.push(ExpectedLine::Elision)
+ } else if l.starts_with("// ") {
+ let (_, test_content) = l.split_at("// ".len());
+ curr_test_contents.push(ExpectedLine::Text(test_content));
+ }
+ }
+ }
+ }
+
+ fn check_mir_test_timestamp(&self, test_name: &str, output_file: &Path) {
+ let t = |file| FileTime::from_last_modification_time(&fs::metadata(file).unwrap());
+ let source_file = &self.testpaths.file;
+ let output_time = t(output_file);
+ let source_time = t(source_file);
+ if source_time > output_time {
+ debug!("source file time: {:?} output file time: {:?}", source_time, output_time);
+ panic!("test source file `{}` is newer than potentially stale output file `{}`.",
+ source_file.display(), test_name);
+ }
+ }
+
+ fn compare_mir_test_output(&self, test_name: &str, expected_content: &[ExpectedLine<&str>]) {
+ let mut output_file = PathBuf::new();
+ output_file.push(self.get_mir_dump_dir());
+ output_file.push(test_name);
+ debug!("comparing the contests of: {:?}", output_file);
+ debug!("with: {:?}", expected_content);
+ if !output_file.exists() {
+ panic!("Output file `{}` from test does not exist",
+ output_file.into_os_string().to_string_lossy());
+ }
+ self.check_mir_test_timestamp(test_name, &output_file);
+
+ let mut dumped_file = fs::File::open(output_file.clone()).unwrap();
+ let mut dumped_string = String::new();
+ dumped_file.read_to_string(&mut dumped_string).unwrap();
+ let mut dumped_lines = dumped_string.lines().filter(|l| !l.is_empty());
+ let mut expected_lines = expected_content.iter().filter(|&l| {
+ if let &ExpectedLine::Text(l) = l {
+ !l.is_empty()
+ } else {
+ true
+ }
+ }).peekable();
+
+ let compare = |expected_line, dumped_line| {
+ let e_norm = normalize_mir_line(expected_line);
+ let d_norm = normalize_mir_line(dumped_line);
+ debug!("found: {:?}", d_norm);
+ debug!("expected: {:?}", e_norm);
+ e_norm == d_norm
+ };
+
+ let error = |expected_line, extra_msg| {
+ let normalize_all = dumped_string.lines()
+ .map(nocomment_mir_line)
+ .filter(|l| !l.is_empty())
+ .collect::<Vec<_>>()
+ .join("\n");
+ let f = |l: &ExpectedLine<_>| match l {
+ &ExpectedLine::Elision => "... (elided)".into(),
+ &ExpectedLine::Text(t) => t
+ };
+ let expected_content = expected_content.iter()
+ .map(|l| f(l))
+ .collect::<Vec<_>>()
+ .join("\n");
+ panic!("Did not find expected line, error: {}\n\
+ Actual Line: {:?}\n\
+ Expected:\n{}\n\
+ Actual:\n{}",
+ extra_msg,
+ expected_line,
+ expected_content,
+ normalize_all);
+ };
+
+ // We expect each non-empty line to appear consecutively, non-consecutive lines
+ // must be separated by at least one Elision
+ let mut start_block_line = None;
+ while let Some(dumped_line) = dumped_lines.next() {
+ match expected_lines.next() {
+ Some(&ExpectedLine::Text(expected_line)) => {
+ let normalized_expected_line = normalize_mir_line(expected_line);
+ if normalized_expected_line.contains(":{") {
+ start_block_line = Some(expected_line);
+ }
+
+ if !compare(expected_line, dumped_line) {
+ error!("{:?}", start_block_line);
+ error(expected_line,
+ format!("Mismatch in lines\nCurrnt block: {}\nExpected Line: {:?}",
+ start_block_line.unwrap_or("None"), dumped_line));
+ }
+ },
+ Some(&ExpectedLine::Elision) => {
+ // skip any number of elisions in a row.
+ while let Some(&&ExpectedLine::Elision) = expected_lines.peek() {
+ expected_lines.next();
+ }
+ if let Some(&ExpectedLine::Text(expected_line)) = expected_lines.next() {
+ let mut found = compare(expected_line, dumped_line);
+ if found {
+ continue;
+ }
+ while let Some(dumped_line) = dumped_lines.next() {
+ found = compare(expected_line, dumped_line);
+ if found {
+ break;
+ }
+ }
+ if !found {
+ error(expected_line, "ran out of mir dump to match against".into());
+ }
+ }
+ },
+ None => {},
+ }
+ }
+ }
+
+ fn get_mir_dump_dir(&self) -> PathBuf {
+ let mut mir_dump_dir = PathBuf::from(self.config.build_base.as_path());
+ debug!("input_file: {:?}", self.testpaths.file);
+ mir_dump_dir.push(&self.testpaths.relative_dir);
+ mir_dump_dir.push(self.testpaths.file.file_stem().unwrap());
+ mir_dump_dir
+ }
+
+ fn normalize_output(&self, output: &str, custom_rules: &[(String, String)]) -> String {
+ let cflags = self.props.compile_flags.join(" ");
+ let json = cflags.contains("--error-format json")
+ || cflags.contains("--error-format pretty-json")
+ || cflags.contains("--error-format=json")
+ || cflags.contains("--error-format=pretty-json")
+ || cflags.contains("--output-format json")
+ || cflags.contains("--output-format=json");
+
+ let mut normalized = output.to_string();
+
+ let mut normalize_path = |from: &Path, to: &str| {
+ let mut from = from.display().to_string();
+ if json {
+ from = from.replace("\\", "\\\\");
+ }
+ normalized = normalized.replace(&from, to);
+ };
+
+ let parent_dir = self.testpaths.file.parent().unwrap();
+ normalize_path(parent_dir, "$DIR");
+
+ if let Ok(src_dir_str) = std::env::var("CARGO_MANIFEST_DIR") {
+ let src_dir = Path::new(&src_dir_str);
+ normalize_path(src_dir, "$SRC_DIR");
+ }
+
+ // Paths into the build directory
+ let test_build_dir = &self.config.build_base;
+ normalize_path(test_build_dir, "$TEST_BUILD_DIR");
+
+ if json {
+ // escaped newlines in json strings should be readable
+ // in the stderr files. There's no point int being correct,
+ // since only humans process the stderr files.
+ // Thus we just turn escaped newlines back into newlines.
+ normalized = normalized.replace("\\n", "\n");
+ }
+
+ normalized = normalized.replace("\\\\", "\\") // denormalize for paths on windows
+ .replace("\\", "/") // normalize for paths on windows
+ .replace("\r\n", "\n") // normalize for linebreaks on windows
+ .replace("\t", "\\t"); // makes tabs visible
+ for rule in custom_rules {
+ let re = Regex::new(&rule.0).expect("bad regex in custom normalization rule");
+ normalized = re.replace_all(&normalized, &rule.1[..]).into_owned();
+ }
+ normalized
+ }
+
+ fn expected_output_path(&self, kind: &str) -> PathBuf {
+ let mut path =
+ expected_output_path(&self.testpaths, self.revision, kind);
+
+ if !path.exists() {
+ path = expected_output_path(&self.testpaths, self.revision, kind);
+ }
+
+ path
+ }
+
+ fn load_expected_output(&self, path: &Path) -> String {
+ if !path.exists() {
+ return String::new();
+ }
+
+ let mut result = String::new();
+ match File::open(path).and_then(|mut f| f.read_to_string(&mut result)) {
+ Ok(_) => result,
+ Err(e) => {
+ self.fatal(&format!("failed to load expected output from `{}`: {}",
+ path.display(), e))
+ }
+ }
+ }
+
+ fn load_expected_output_from_path(&self, path: &Path) -> Result<String, String> {
+ fs::read_to_string(path).map_err(|err| {
+ format!("failed to load expected output from `{}`: {}", path.display(), err)
+ })
+ }
+
+ fn delete_file(&self, file: &PathBuf) {
+ if !file.exists() {
+ // Deleting a nonexistant file would error.
+ return;
+ }
+ if let Err(e) = fs::remove_file(file) {
+ self.fatal(&format!("failed to delete `{}`: {}", file.display(), e,));
+ }
+ }
+
+ fn compare_output(&self, kind: &str, actual: &str, expected: &str) -> usize {
+ if actual == expected {
+ return 0;
+ }
+
+ if !self.config.bless {
+ if expected.is_empty() {
+ println!("normalized {}:\n{}\n", kind, actual);
+ } else {
+ println!("diff of {}:\n", kind);
+ for diff in diff::lines(expected, actual) {
+ match diff {
+ diff::Result::Left(l) => println!("-{}", l),
+ diff::Result::Both(l, _) => println!(" {}", l),
+ diff::Result::Right(r) => println!("+{}", r),
+ }
+ }
+ }
+ }
+
+ let output_file = self
+ .output_base_name()
+ .with_extra_extension(self.revision.unwrap_or(""))
+ .with_extra_extension(kind);
+
+ let mut files = vec![output_file];
+ if self.config.bless {
+ files.push(expected_output_path(
+ self.testpaths,
+ self.revision,
+ kind,
+ ));
+ }
+
+ for output_file in &files {
+ if actual.is_empty() {
+ self.delete_file(output_file);
+ } else if let Err(err) = fs::write(&output_file, &actual) {
+ self.fatal(&format!(
+ "failed to write {} to `{}`: {}",
+ kind,
+ output_file.display(),
+ err,
+ ));
+ }
+ }
+
+ println!("\nThe actual {0} differed from the expected {0}.", kind);
+ for output_file in files {
+ println!("Actual {} saved to {}", kind, output_file.display());
+ }
+ if self.config.bless { 0 } else { 1 }
+ }
+}
+
+struct ProcArgs {
+ prog: String,
+ args: Vec<String>,
+}
+
+pub struct ProcRes {
+ status: ExitStatus,
+ stdout: String,
+ stderr: String,
+ cmdline: String,
+}
+
+impl ProcRes {
+ pub fn fatal(&self, err: Option<&str>) -> ! {
+ if let Some(e) = err {
+ println!("\nerror: {}", e);
+ }
+ print!("\
+ status: {}\n\
+ command: {}\n\
+ stdout:\n\
+ ------------------------------------------\n\
+ {}\n\
+ ------------------------------------------\n\
+ stderr:\n\
+ ------------------------------------------\n\
+ {}\n\
+ ------------------------------------------\n\
+ \n",
+ self.status, self.cmdline, self.stdout,
+ self.stderr);
+ // Use resume_unwind instead of panic!() to prevent a panic message + backtrace from
+ // compiletest, which is unnecessary noise.
+ std::panic::resume_unwind(Box::new(()));
+ }
+}
+
+enum TargetLocation {
+ ThisFile(PathBuf),
+ ThisDirectory(PathBuf),
+}
+
+#[derive(Clone, PartialEq, Eq)]
+enum ExpectedLine<T: AsRef<str>> {
+ Elision,
+ Text(T)
+}
+
+enum AllowUnused {
+ Yes,
+ No,
+}
+
+impl<T> fmt::Debug for ExpectedLine<T>
+where
+ T: AsRef<str> + fmt::Debug
+{
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ if let &ExpectedLine::Text(ref t) = self {
+ write!(formatter, "{:?}", t)
+ } else {
+ write!(formatter, "\"...\" (Elision)")
+ }
+ }
+}
+
+fn normalize_mir_line(line: &str) -> String {
+ nocomment_mir_line(line).replace(char::is_whitespace, "")
+}
+
+fn nocomment_mir_line(line: &str) -> &str {
+ if let Some(idx) = line.find("//") {
+ let (l, _) = line.split_at(idx);
+ l.trim_end()
+ } else {
+ line
+ }
+}
+
+fn read2_abbreviated(mut child: Child) -> io::Result<Output> {
+ use std::mem::replace;
+ use read2::read2;
+
+ const HEAD_LEN: usize = 160 * 1024;
+ const TAIL_LEN: usize = 256 * 1024;
+
+ enum ProcOutput {
+ Full(Vec<u8>),
+ Abbreviated {
+ head: Vec<u8>,
+ skipped: usize,
+ tail: Box<[u8]>,
+ }
+ }
+
+ impl ProcOutput {
+ fn extend(&mut self, data: &[u8]) {
+ let new_self = match *self {
+ ProcOutput::Full(ref mut bytes) => {
+ bytes.extend_from_slice(data);
+ let new_len = bytes.len();
+ if new_len <= HEAD_LEN + TAIL_LEN {
+ return;
+ }
+ let tail = bytes.split_off(new_len - TAIL_LEN).into_boxed_slice();
+ let head = replace(bytes, Vec::new());
+ let skipped = new_len - HEAD_LEN - TAIL_LEN;
+ ProcOutput::Abbreviated { head, skipped, tail }
+ }
+ ProcOutput::Abbreviated { ref mut skipped, ref mut tail, .. } => {
+ *skipped += data.len();
+ if data.len() <= TAIL_LEN {
+ tail[..data.len()].copy_from_slice(data);
+ tail.rotate_left(data.len());
+ } else {
+ tail.copy_from_slice(&data[(data.len() - TAIL_LEN)..]);
+ }
+ return;
+ }
+ };
+ *self = new_self;
+ }
+
+ fn into_bytes(self) -> Vec<u8> {
+ match self {
+ ProcOutput::Full(bytes) => bytes,
+ ProcOutput::Abbreviated { mut head, skipped, tail } => {
+ write!(&mut head, "\n\n<<<<<< SKIPPED {} BYTES >>>>>>\n\n", skipped).unwrap();
+ head.extend_from_slice(&tail);
+ head
+ }
+ }
+ }
+ }
+
+ let mut stdout = ProcOutput::Full(Vec::new());
+ let mut stderr = ProcOutput::Full(Vec::new());
+
+ drop(child.stdin.take());
+ read2(child.stdout.take().unwrap(), child.stderr.take().unwrap(), &mut |is_stdout, data, _| {
+ if is_stdout { &mut stdout } else { &mut stderr }.extend(data);
+ data.clear();
+ })?;
+ let status = child.wait()?;
+
+ Ok(Output {
+ status,
+ stdout: stdout.into_bytes(),
+ stderr: stderr.into_bytes(),
+ })
+}
diff --git a/vendor/compiletest_rs/src/uidiff.rs b/vendor/compiletest_rs/src/uidiff.rs
new file mode 100644
index 000000000..fca01029c
--- /dev/null
+++ b/vendor/compiletest_rs/src/uidiff.rs
@@ -0,0 +1,73 @@
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Code for checking whether the output of the compiler matches what is
+//! expected.
+
+pub fn diff_lines(actual: &str, expected: &str) -> Vec<String> {
+ // mega simplistic diff algorithm that just prints the things added/removed
+ zip_all(actual.lines(), expected.lines())
+ .enumerate()
+ .filter_map(|(i, (a, e))| {
+ match (a, e) {
+ (Some(a), Some(e)) => {
+ if lines_match(e, a) {
+ None
+ } else {
+ Some(format!("{:3} - |{}|\n + |{}|\n", i, e, a))
+ }
+ }
+ (Some(a), None) => Some(format!("{:3} -\n + |{}|\n", i, a)),
+ (None, Some(e)) => Some(format!("{:3} - |{}|\n +\n", i, e)),
+ (None, None) => panic!("Cannot get here"),
+ }
+ })
+ .collect()
+}
+
+fn lines_match(expected: &str, mut actual: &str) -> bool {
+ for (i, part) in expected.split("[..]").enumerate() {
+ match actual.find(part) {
+ Some(j) => {
+ if i == 0 && j != 0 {
+ return false;
+ }
+ actual = &actual[j + part.len()..];
+ }
+ None => return false,
+ }
+ }
+ actual.is_empty() || expected.ends_with("[..]")
+}
+
+struct ZipAll<I1: Iterator, I2: Iterator> {
+ first: I1,
+ second: I2,
+}
+
+impl<T, I1: Iterator<Item = T>, I2: Iterator<Item = T>> Iterator for ZipAll<I1, I2> {
+ type Item = (Option<T>, Option<T>);
+ fn next(&mut self) -> Option<(Option<T>, Option<T>)> {
+ let first = self.first.next();
+ let second = self.second.next();
+
+ match (first, second) {
+ (None, None) => None,
+ (a, b) => Some((a, b)),
+ }
+ }
+}
+
+fn zip_all<T, I1: Iterator<Item = T>, I2: Iterator<Item = T>>(a: I1, b: I2) -> ZipAll<I1, I2> {
+ ZipAll {
+ first: a,
+ second: b,
+ }
+}
diff --git a/vendor/compiletest_rs/src/util.rs b/vendor/compiletest_rs/src/util.rs
new file mode 100644
index 000000000..1290fe24e
--- /dev/null
+++ b/vendor/compiletest_rs/src/util.rs
@@ -0,0 +1,134 @@
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::env;
+use common::Config;
+use std::ffi::OsStr;
+use std::path::PathBuf;
+
+/// Conversion table from triple OS name to Rust SYSNAME
+const OS_TABLE: &'static [(&'static str, &'static str)] = &[
+ ("android", "android"),
+ ("bitrig", "bitrig"),
+ ("darwin", "macos"),
+ ("dragonfly", "dragonfly"),
+ ("freebsd", "freebsd"),
+ ("haiku", "haiku"),
+ ("ios", "ios"),
+ ("linux", "linux"),
+ ("mingw32", "windows"),
+ ("netbsd", "netbsd"),
+ ("openbsd", "openbsd"),
+ ("win32", "windows"),
+ ("windows", "windows"),
+ ("solaris", "solaris"),
+ ("emscripten", "emscripten"),
+];
+
+const ARCH_TABLE: &'static [(&'static str, &'static str)] = &[
+ ("aarch64", "aarch64"),
+ ("amd64", "x86_64"),
+ ("arm", "arm"),
+ ("arm64", "aarch64"),
+ ("hexagon", "hexagon"),
+ ("i386", "x86"),
+ ("i586", "x86"),
+ ("i686", "x86"),
+ ("loongarch64", "loongarch64"),
+ ("mips", "mips"),
+ ("msp430", "msp430"),
+ ("nvptx64", "nvptx64"),
+ ("powerpc", "powerpc"),
+ ("powerpc64", "powerpc64"),
+ ("s390x", "s390x"),
+ ("sparc", "sparc"),
+ ("x86_64", "x86_64"),
+ ("xcore", "xcore"),
+ ("asmjs", "asmjs"),
+ ("wasm32", "wasm32"),
+];
+
+pub fn matches_os(triple: &str, name: &str) -> bool {
+ // For the wasm32 bare target we ignore anything also ignored on emscripten
+ // and then we also recognize `wasm32-bare` as the os for the target
+ if triple == "wasm32-unknown-unknown" {
+ return name == "emscripten" || name == "wasm32-bare"
+ }
+ for &(triple_os, os) in OS_TABLE {
+ if triple.contains(triple_os) {
+ return os == name;
+ }
+ }
+ false
+}
+pub fn get_arch(triple: &str) -> &str {
+ for &(triple_arch, arch) in ARCH_TABLE {
+ if triple.contains(triple_arch) {
+ return arch;
+ }
+ }
+ triple.split('-').nth(0).unwrap()
+}
+
+pub fn get_env(triple: &str) -> Option<&str> {
+ triple.split('-').nth(3)
+}
+
+pub fn get_pointer_width(triple: &str) -> &'static str {
+ if (triple.contains("64") && !triple.ends_with("gnux32")) || triple.starts_with("s390x") {
+ "64bit"
+ } else {
+ "32bit"
+ }
+}
+
+pub fn make_new_path(path: &str) -> String {
+ assert!(cfg!(windows));
+ // Windows just uses PATH as the library search path, so we have to
+ // maintain the current value while adding our own
+ match env::var(lib_path_env_var()) {
+ Ok(curr) => format!("{}{}{}", path, path_div(), curr),
+ Err(..) => path.to_owned(),
+ }
+}
+
+pub fn lib_path_env_var() -> &'static str {
+ "PATH"
+}
+fn path_div() -> &'static str {
+ ";"
+}
+
+pub fn logv(config: &Config, s: String) {
+ debug!("{}", s);
+ if config.verbose {
+ println!("{}", s);
+ }
+}
+
+pub trait PathBufExt {
+ /// Append an extension to the path, even if it already has one.
+ fn with_extra_extension<S: AsRef<OsStr>>(&self, extension: S) -> PathBuf;
+}
+
+impl PathBufExt for PathBuf {
+ fn with_extra_extension<S: AsRef<OsStr>>(&self, extension: S) -> PathBuf {
+ if extension.as_ref().is_empty() {
+ self.clone()
+ } else {
+ let mut fname = self.file_name().unwrap().to_os_string();
+ if !extension.as_ref().to_str().unwrap().starts_with('.') {
+ fname.push(".");
+ }
+ fname.push(extension);
+ self.with_file_name(fname)
+ }
+ }
+}
diff --git a/vendor/compiletest_rs/tests/bless.rs b/vendor/compiletest_rs/tests/bless.rs
new file mode 100644
index 000000000..9ee26f4a6
--- /dev/null
+++ b/vendor/compiletest_rs/tests/bless.rs
@@ -0,0 +1,85 @@
+//! Tests for the `bless` option
+
+extern crate compiletest_rs as compiletest;
+
+mod test_support;
+use test_support::{testsuite, TestsuiteBuilder, GLOBAL_ROOT};
+use compiletest::Config;
+
+fn setup(mode: &str) -> (Config, TestsuiteBuilder) {
+ let builder = testsuite(mode);
+ let mut config = Config::default();
+ let cfg_mode = mode.parse().expect("Invalid mode");
+ config.mode = cfg_mode;
+ config.src_base = builder.root.clone();
+ config.build_base = GLOBAL_ROOT.join("build_base");
+
+ (config, builder)
+}
+
+#[test]
+fn test_bless_new_file() {
+ let (mut config, builder) = setup("ui");
+ config.bless = true;
+
+ builder.mk_file(
+ "foobar.rs",
+ r#"
+ #[warn(unused_variables)]
+ fn main() {
+ let abc = "foobar";
+ }
+ "#,
+ );
+ compiletest::run_tests(&config);
+
+ // Blessing should cause the stderr to be created directly
+ assert!(builder.file_contents("foobar.stderr").contains("unused variable"));
+
+ // And a second run of the tests, with blessing disabled should work just fine
+ config.bless = false;
+ compiletest::run_tests(&config);
+}
+
+#[test]
+fn test_bless_update_file() {
+ let (mut config, builder) = setup("ui");
+ config.bless = true;
+
+ builder.mk_file(
+ "foobar2.rs",
+ r#"
+ #[warn(unused_variables)]
+ fn main() {
+ let abc = "foobar_update";
+ }
+ "#,
+ );
+ builder.mk_file(
+ "foobar2.stderr",
+ r#"
+ warning: unused variable: `abc`
+ --> $DIR/foobar2.rs:4:27
+ |
+ 4 | let abc = "foobar";
+ | ^^^ help: if this is intentional, prefix it with an underscore: `_abc`
+ |
+ note: the lint level is defined here
+ --> $DIR/foobar2.rs:2:26
+ |
+ 2 | #[warn(unused_variables)]
+ | ^^^^^^^^^^^^^^^^
+
+ warning: 1 warning emitted
+ "#,
+ );
+ compiletest::run_tests(&config);
+
+ // Blessing should cause the stderr to be created directly
+ assert!(builder.file_contents("foobar2.stderr").contains("unused variable"));
+ assert!(builder.file_contents("foobar2.stderr").contains("foobar_update"));
+
+ // And a second run of the tests, with blessing disabled should work just fine
+ config.bless = false;
+ compiletest::run_tests(&config);
+}
diff --git a/vendor/compiletest_rs/tests/test_support/mod.rs b/vendor/compiletest_rs/tests/test_support/mod.rs
new file mode 100644
index 000000000..c6962af86
--- /dev/null
+++ b/vendor/compiletest_rs/tests/test_support/mod.rs
@@ -0,0 +1,105 @@
+//! Provides a simple way to set up compiletest sample testsuites used in testing.
+//!
+//! Inspired by cargo's `cargo-test-support` crate:
+//! https://github.com/rust-lang/cargo/tree/master/crates/cargo-test-support
+use std::env;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::cell::RefCell;
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+
+static COMPILETEST_INTEGRATION_TEST_DIR: &str = "cit";
+
+thread_local! {
+ static TEST_ID: RefCell<Option<usize>> = RefCell::new(None);
+}
+
+lazy_static::lazy_static! {
+ pub static ref GLOBAL_ROOT: PathBuf = {
+ let mut path = env::current_exe().unwrap();
+ path.pop(); // chop off exe name
+ path.pop(); // chop off 'deps' part
+ path.pop(); // chop off 'debug'
+
+ path.push(COMPILETEST_INTEGRATION_TEST_DIR);
+ path.mkdir_p();
+ path
+ };
+}
+
+pub fn testsuite(mode: &str) -> TestsuiteBuilder {
+ let builder = TestsuiteBuilder::new(mode);
+ builder.build();
+ builder
+}
+
+pub struct TestsuiteBuilder {
+ pub root: PathBuf,
+}
+
+impl TestsuiteBuilder {
+ pub fn new(mode: &str) -> Self {
+ static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
+
+ let id = NEXT_ID.fetch_add(1, Ordering::Relaxed);
+ TEST_ID.with(|n| *n.borrow_mut() = Some(id));
+ let root = GLOBAL_ROOT.join(format!("id{}", TEST_ID.with(|n|n.borrow().unwrap()))).join(mode);
+ root.mkdir_p();
+
+ Self {
+ root,
+ }
+ }
+
+
+ /// Creates a new file to be used for the integration test
+ pub fn mk_file(&self, path: &str, body: &str) {
+ self.root.mkdir_p();
+ fs::write(self.root.join(&path), &body)
+ .unwrap_or_else(|e| panic!("could not create file {}: {}", path, e));
+ }
+
+ /// Returns the contents of the file
+ pub fn file_contents(&self, name: &str) -> String {
+ fs::read_to_string(self.root.join(name)).expect("Unable to read file")
+ }
+
+ // Sets up a new testsuite root directory
+ fn build(&self) {
+ // Cleanup before we run the next test
+ self.rm_root();
+
+ // Create the new directory
+ self.root.mkdir_p();
+ }
+
+ /// Deletes the root directory and all its contents
+ fn rm_root(&self) {
+ self.root.rm_rf();
+ }
+}
+
+pub trait PathExt {
+ fn rm_rf(&self);
+ fn mkdir_p(&self);
+}
+
+impl PathExt for Path {
+ fn rm_rf(&self) {
+ if self.is_dir() {
+ if let Err(e) = fs::remove_dir_all(self) {
+ panic!("failed to remove {:?}: {:?}", self, e)
+ }
+ } else {
+ if let Err(e) = fs::remove_file(self) {
+ panic!("failed to remove {:?}: {:?}", self, e)
+ }
+ }
+ }
+
+ fn mkdir_p(&self) {
+ fs::create_dir_all(self)
+ .unwrap_or_else(|e| panic!("failed to mkdir_p {}: {}", self.display(), e))
+ }
+}