summaryrefslogtreecommitdiffstats
path: root/tests/ui/drop
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:19:13 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:19:13 +0000
commit218caa410aa38c29984be31a5229b9fa717560ee (patch)
treec54bd55eeb6e4c508940a30e94c0032fbd45d677 /tests/ui/drop
parentReleasing progress-linux version 1.67.1+dfsg1-1~progress7.99u1. (diff)
downloadrustc-218caa410aa38c29984be31a5229b9fa717560ee.tar.xz
rustc-218caa410aa38c29984be31a5229b9fa717560ee.zip
Merging upstream version 1.68.2+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/ui/drop')
-rw-r--r--tests/ui/drop/auxiliary/dropck_eyepatch_extern_crate.rs50
-rw-r--r--tests/ui/drop/auxiliary/inline_dtor.rs8
-rw-r--r--tests/ui/drop/auxiliary/issue-10028.rs9
-rw-r--r--tests/ui/drop/drop-foreign-fundamental.rs23
-rw-r--r--tests/ui/drop/drop-foreign-fundamental.stderr9
-rw-r--r--tests/ui/drop/drop-if-let-binding.rs7
-rw-r--r--tests/ui/drop/drop-on-empty-block-exit.rs10
-rw-r--r--tests/ui/drop/drop-on-ret.rs15
-rw-r--r--tests/ui/drop/drop-struct-as-object.rs36
-rw-r--r--tests/ui/drop/drop-trait-enum.rs94
-rw-r--r--tests/ui/drop/drop-trait-generic.rs15
-rw-r--r--tests/ui/drop/drop-trait.rs15
-rw-r--r--tests/ui/drop/drop-uninhabited-enum.rs14
-rw-r--r--tests/ui/drop/drop-with-type-ascription-1.rs8
-rw-r--r--tests/ui/drop/drop-with-type-ascription-2.rs8
-rw-r--r--tests/ui/drop/drop_order.rs292
-rw-r--r--tests/ui/drop/dropck-eyepatch-extern-crate.rs39
-rw-r--r--tests/ui/drop/dropck-eyepatch-reorder.rs79
-rw-r--r--tests/ui/drop/dropck-eyepatch.rs102
-rw-r--r--tests/ui/drop/dropck_legal_cycles.rs1183
-rw-r--r--tests/ui/drop/dynamic-drop-async.rs332
-rw-r--r--tests/ui/drop/dynamic-drop.rs520
-rw-r--r--tests/ui/drop/issue-100276.rs12
-rw-r--r--tests/ui/drop/issue-10028.rs21
-rw-r--r--tests/ui/drop/issue-103107.rs37
-rw-r--r--tests/ui/drop/issue-17718-const-destructors.rs10
-rw-r--r--tests/ui/drop/issue-21486.rs77
-rw-r--r--tests/ui/drop/issue-23338-ensure-param-drop-order.rs162
-rw-r--r--tests/ui/drop/issue-2734.rs22
-rw-r--r--tests/ui/drop/issue-30018-nopanic.rs103
-rw-r--r--tests/ui/drop/issue-35546.rs20
-rw-r--r--tests/ui/drop/issue-48962.rs34
-rw-r--r--tests/ui/drop/issue-90752-raw-ptr-shenanigans.rs41
-rw-r--r--tests/ui/drop/issue-90752.rs32
-rw-r--r--tests/ui/drop/no-drop-flag-size.rs15
-rw-r--r--tests/ui/drop/nondrop-cycle.rs31
-rw-r--r--tests/ui/drop/repeat-drop-2.rs15
-rw-r--r--tests/ui/drop/repeat-drop-2.stderr41
-rw-r--r--tests/ui/drop/repeat-drop.rs118
-rw-r--r--tests/ui/drop/terminate-in-initializer.rs34
-rw-r--r--tests/ui/drop/use_inline_dtor.rs10
41 files changed, 3703 insertions, 0 deletions
diff --git a/tests/ui/drop/auxiliary/dropck_eyepatch_extern_crate.rs b/tests/ui/drop/auxiliary/dropck_eyepatch_extern_crate.rs
new file mode 100644
index 000000000..270d5de7a
--- /dev/null
+++ b/tests/ui/drop/auxiliary/dropck_eyepatch_extern_crate.rs
@@ -0,0 +1,50 @@
+#![feature(dropck_eyepatch)]
+
+// The point of this test is to illustrate that the `#[may_dangle]`
+// attribute specifically allows, in the context of a type
+// implementing `Drop`, a generic parameter to be instantiated with a
+// lifetime that does not strictly outlive the owning type itself,
+// and that this attributes effects are preserved when importing
+// the type from another crate.
+//
+// See also dropck-eyepatch.rs for more information about the general
+// structure of the test.
+
+use std::cell::RefCell;
+
+pub trait Foo { fn foo(&self, _: &str); }
+
+pub struct Dt<A: Foo>(pub &'static str, pub A);
+pub struct Dr<'a, B:'a+Foo>(pub &'static str, pub &'a B);
+pub struct Pt<A,B: Foo>(pub &'static str, pub A, pub B);
+pub struct Pr<'a, 'b, B:'a+'b+Foo>(pub &'static str, pub &'a B, pub &'b B);
+pub struct St<A: Foo>(pub &'static str, pub A);
+pub struct Sr<'a, B:'a+Foo>(pub &'static str, pub &'a B);
+
+impl<A: Foo> Drop for Dt<A> {
+ fn drop(&mut self) { println!("drop {}", self.0); self.1.foo(self.0); }
+}
+impl<'a, B: Foo> Drop for Dr<'a, B> {
+ fn drop(&mut self) { println!("drop {}", self.0); self.1.foo(self.0); }
+}
+unsafe impl<#[may_dangle] A, B: Foo> Drop for Pt<A, B> {
+ // (unsafe to access self.1 due to #[may_dangle] on A)
+ fn drop(&mut self) { println!("drop {}", self.0); self.2.foo(self.0); }
+}
+unsafe impl<#[may_dangle] 'a, 'b, B: Foo> Drop for Pr<'a, 'b, B> {
+ // (unsafe to access self.1 due to #[may_dangle] on 'a)
+ fn drop(&mut self) { println!("drop {}", self.0); self.2.foo(self.0); }
+}
+
+impl Foo for RefCell<String> {
+ fn foo(&self, s: &str) {
+ let s2 = format!("{}|{}", *self.borrow(), s);
+ *self.borrow_mut() = s2;
+ }
+}
+
+impl<'a, T:Foo> Foo for &'a T {
+ fn foo(&self, s: &str) {
+ (*self).foo(s);
+ }
+}
diff --git a/tests/ui/drop/auxiliary/inline_dtor.rs b/tests/ui/drop/auxiliary/inline_dtor.rs
new file mode 100644
index 000000000..5eee89fdc
--- /dev/null
+++ b/tests/ui/drop/auxiliary/inline_dtor.rs
@@ -0,0 +1,8 @@
+#![crate_name="inline_dtor"]
+
+pub struct Foo;
+
+impl Drop for Foo {
+ #[inline]
+ fn drop(&mut self) {}
+}
diff --git a/tests/ui/drop/auxiliary/issue-10028.rs b/tests/ui/drop/auxiliary/issue-10028.rs
new file mode 100644
index 000000000..135f26f40
--- /dev/null
+++ b/tests/ui/drop/auxiliary/issue-10028.rs
@@ -0,0 +1,9 @@
+pub struct ZeroLengthThingWithDestructor;
+impl Drop for ZeroLengthThingWithDestructor {
+ fn drop(&mut self) {}
+}
+impl ZeroLengthThingWithDestructor {
+ pub fn new() -> ZeroLengthThingWithDestructor {
+ ZeroLengthThingWithDestructor
+ }
+}
diff --git a/tests/ui/drop/drop-foreign-fundamental.rs b/tests/ui/drop/drop-foreign-fundamental.rs
new file mode 100644
index 000000000..c43df40d6
--- /dev/null
+++ b/tests/ui/drop/drop-foreign-fundamental.rs
@@ -0,0 +1,23 @@
+use std::ops::Deref;
+use std::pin::Pin;
+
+struct Whatever<T>(T);
+
+impl<T> Deref for Whatever<T> {
+ type Target = T;
+
+ fn deref(&self) -> &T {
+ &self.0
+ }
+}
+
+struct A;
+
+impl Drop for Pin<Whatever<A>> {
+ //~^ ERROR the `Drop` trait may only be implemented for local structs, enums, and unions
+ fn drop(&mut self) {}
+}
+
+fn main() {
+ let x = Pin::new(Whatever(1.0f32));
+}
diff --git a/tests/ui/drop/drop-foreign-fundamental.stderr b/tests/ui/drop/drop-foreign-fundamental.stderr
new file mode 100644
index 000000000..fbd1ba085
--- /dev/null
+++ b/tests/ui/drop/drop-foreign-fundamental.stderr
@@ -0,0 +1,9 @@
+error[E0120]: the `Drop` trait may only be implemented for local structs, enums, and unions
+ --> $DIR/drop-foreign-fundamental.rs:16:15
+ |
+LL | impl Drop for Pin<Whatever<A>> {
+ | ^^^^^^^^^^^^^^^^ must be a struct, enum, or union in the current crate
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0120`.
diff --git a/tests/ui/drop/drop-if-let-binding.rs b/tests/ui/drop/drop-if-let-binding.rs
new file mode 100644
index 000000000..9c1ac4e0c
--- /dev/null
+++ b/tests/ui/drop/drop-if-let-binding.rs
@@ -0,0 +1,7 @@
+// build-pass
+// regression test for issue #88307
+// compile-flags: -C opt-level=s
+
+fn main() {
+ if let Some(_val) = Option::<String>::None {}
+}
diff --git a/tests/ui/drop/drop-on-empty-block-exit.rs b/tests/ui/drop/drop-on-empty-block-exit.rs
new file mode 100644
index 000000000..ef3a90a53
--- /dev/null
+++ b/tests/ui/drop/drop-on-empty-block-exit.rs
@@ -0,0 +1,10 @@
+// run-pass
+// pretty-expanded FIXME #23616
+#![allow(non_camel_case_types)]
+
+enum t { foo(Box<isize>), }
+
+pub fn main() {
+ let tt = t::foo(Box::new(10));
+ match tt { t::foo(_z) => { } }
+}
diff --git a/tests/ui/drop/drop-on-ret.rs b/tests/ui/drop/drop-on-ret.rs
new file mode 100644
index 000000000..290e274f3
--- /dev/null
+++ b/tests/ui/drop/drop-on-ret.rs
@@ -0,0 +1,15 @@
+// run-pass
+
+
+
+// pretty-expanded FIXME #23616
+
+fn f() -> isize {
+ if true {
+ let _s: String = "should not leak".to_string();
+ return 1;
+ }
+ return 0;
+}
+
+pub fn main() { f(); }
diff --git a/tests/ui/drop/drop-struct-as-object.rs b/tests/ui/drop/drop-struct-as-object.rs
new file mode 100644
index 000000000..377027a4f
--- /dev/null
+++ b/tests/ui/drop/drop-struct-as-object.rs
@@ -0,0 +1,36 @@
+// run-pass
+#![allow(unused_variables)]
+#![allow(non_upper_case_globals)]
+
+// Test that destructor on a struct runs successfully after the struct
+// is boxed and converted to an object.
+
+static mut value: usize = 0;
+
+struct Cat {
+ name : usize,
+}
+
+trait Dummy {
+ fn get(&self) -> usize;
+}
+
+impl Dummy for Cat {
+ fn get(&self) -> usize { self.name }
+}
+
+impl Drop for Cat {
+ fn drop(&mut self) {
+ unsafe { value = self.name; }
+ }
+}
+
+pub fn main() {
+ {
+ let x = Box::new(Cat {name: 22});
+ let nyan: Box<dyn Dummy> = x as Box<dyn Dummy>;
+ }
+ unsafe {
+ assert_eq!(value, 22);
+ }
+}
diff --git a/tests/ui/drop/drop-trait-enum.rs b/tests/ui/drop/drop-trait-enum.rs
new file mode 100644
index 000000000..d2b77650a
--- /dev/null
+++ b/tests/ui/drop/drop-trait-enum.rs
@@ -0,0 +1,94 @@
+// run-pass
+#![allow(dead_code)]
+#![allow(unused_assignments)]
+#![allow(unused_variables)]
+// ignore-emscripten no threads support
+// needs-unwind
+
+use std::thread;
+use std::sync::mpsc::{channel, Sender};
+
+#[derive(PartialEq, Debug)]
+enum Message {
+ Dropped,
+ DestructorRan
+}
+
+struct SendOnDrop {
+ sender: Sender<Message>
+}
+
+impl Drop for SendOnDrop {
+ fn drop(&mut self) {
+ self.sender.send(Message::Dropped).unwrap();
+ }
+}
+
+enum Foo {
+ SimpleVariant(Sender<Message>),
+ NestedVariant(Box<usize>, SendOnDrop, Sender<Message>),
+ FailingVariant { on_drop: SendOnDrop }
+}
+
+impl Drop for Foo {
+ fn drop(&mut self) {
+ match self {
+ &mut Foo::SimpleVariant(ref mut sender) => {
+ sender.send(Message::DestructorRan).unwrap();
+ }
+ &mut Foo::NestedVariant(_, _, ref mut sender) => {
+ sender.send(Message::DestructorRan).unwrap();
+ }
+ &mut Foo::FailingVariant { .. } => {
+ panic!("Failed");
+ }
+ }
+ }
+}
+
+pub fn main() {
+ let (sender, receiver) = channel();
+ {
+ let v = Foo::SimpleVariant(sender);
+ }
+ assert_eq!(receiver.recv().unwrap(), Message::DestructorRan);
+ assert_eq!(receiver.recv().ok(), None);
+
+ let (sender, receiver) = channel();
+ {
+ let v = Foo::NestedVariant(Box::new(42), SendOnDrop { sender: sender.clone() }, sender);
+ }
+ assert_eq!(receiver.recv().unwrap(), Message::DestructorRan);
+ assert_eq!(receiver.recv().unwrap(), Message::Dropped);
+ assert_eq!(receiver.recv().ok(), None);
+
+ let (sender, receiver) = channel();
+ let t = thread::spawn(move|| {
+ let v = Foo::FailingVariant { on_drop: SendOnDrop { sender: sender } };
+ });
+ assert_eq!(receiver.recv().unwrap(), Message::Dropped);
+ assert_eq!(receiver.recv().ok(), None);
+ drop(t.join());
+
+ let (sender, receiver) = channel();
+ let t = {
+ thread::spawn(move|| {
+ let mut v = Foo::NestedVariant(Box::new(42), SendOnDrop {
+ sender: sender.clone()
+ }, sender.clone());
+ v = Foo::NestedVariant(Box::new(42),
+ SendOnDrop { sender: sender.clone() },
+ sender.clone());
+ v = Foo::SimpleVariant(sender.clone());
+ v = Foo::FailingVariant { on_drop: SendOnDrop { sender: sender } };
+ })
+ };
+ assert_eq!(receiver.recv().unwrap(), Message::DestructorRan);
+ assert_eq!(receiver.recv().unwrap(), Message::Dropped);
+ assert_eq!(receiver.recv().unwrap(), Message::DestructorRan);
+ assert_eq!(receiver.recv().unwrap(), Message::Dropped);
+ assert_eq!(receiver.recv().unwrap(), Message::DestructorRan);
+ assert_eq!(receiver.recv().unwrap(), Message::Dropped);
+ assert_eq!(receiver.recv().ok(), None);
+ drop(t.join());
+}
diff --git a/tests/ui/drop/drop-trait-generic.rs b/tests/ui/drop/drop-trait-generic.rs
new file mode 100644
index 000000000..cdefb680c
--- /dev/null
+++ b/tests/ui/drop/drop-trait-generic.rs
@@ -0,0 +1,15 @@
+// run-pass
+#![allow(dead_code)]
+struct S<T> {
+ x: T
+}
+
+impl<T> ::std::ops::Drop for S<T> {
+ fn drop(&mut self) {
+ println!("bye");
+ }
+}
+
+pub fn main() {
+ let _x = S { x: 1 };
+}
diff --git a/tests/ui/drop/drop-trait.rs b/tests/ui/drop/drop-trait.rs
new file mode 100644
index 000000000..d93f77180
--- /dev/null
+++ b/tests/ui/drop/drop-trait.rs
@@ -0,0 +1,15 @@
+// run-pass
+#![allow(dead_code)]
+struct Foo {
+ x: isize
+}
+
+impl Drop for Foo {
+ fn drop(&mut self) {
+ println!("bye");
+ }
+}
+
+pub fn main() {
+ let _x: Foo = Foo { x: 3 };
+}
diff --git a/tests/ui/drop/drop-uninhabited-enum.rs b/tests/ui/drop/drop-uninhabited-enum.rs
new file mode 100644
index 000000000..b3566f685
--- /dev/null
+++ b/tests/ui/drop/drop-uninhabited-enum.rs
@@ -0,0 +1,14 @@
+// run-pass
+#![allow(dead_code)]
+#![allow(unused_variables)]
+// pretty-expanded FIXME #23616
+
+enum Foo { }
+
+impl Drop for Foo {
+ fn drop(&mut self) { }
+}
+
+fn foo(x: Foo) { }
+
+fn main() { }
diff --git a/tests/ui/drop/drop-with-type-ascription-1.rs b/tests/ui/drop/drop-with-type-ascription-1.rs
new file mode 100644
index 000000000..e5a1a48df
--- /dev/null
+++ b/tests/ui/drop/drop-with-type-ascription-1.rs
@@ -0,0 +1,8 @@
+// run-pass
+
+fn main() {
+ let foo = "hello".to_string();
+ let foo: Vec<&str> = foo.split_whitespace().collect();
+ let invalid_string = &foo[0];
+ assert_eq!(*invalid_string, "hello");
+}
diff --git a/tests/ui/drop/drop-with-type-ascription-2.rs b/tests/ui/drop/drop-with-type-ascription-2.rs
new file mode 100644
index 000000000..fb70ad48e
--- /dev/null
+++ b/tests/ui/drop/drop-with-type-ascription-2.rs
@@ -0,0 +1,8 @@
+// run-pass
+
+fn main() {
+ let args = vec!["foobie", "asdf::asdf"];
+ let arr: Vec<&str> = args[1].split("::").collect();
+ assert_eq!(arr[0], "asdf");
+ assert_eq!(arr[0], "asdf");
+}
diff --git a/tests/ui/drop/drop_order.rs b/tests/ui/drop/drop_order.rs
new file mode 100644
index 000000000..5ce1fd54a
--- /dev/null
+++ b/tests/ui/drop/drop_order.rs
@@ -0,0 +1,292 @@
+// run-pass
+// compile-flags: -Z validate-mir
+#![feature(let_chains)]
+
+use std::cell::RefCell;
+use std::convert::TryInto;
+
+#[derive(Default)]
+struct DropOrderCollector(RefCell<Vec<u32>>);
+
+struct LoudDrop<'a>(&'a DropOrderCollector, u32);
+
+impl Drop for LoudDrop<'_> {
+ fn drop(&mut self) {
+ println!("{}", self.1);
+ self.0.0.borrow_mut().push(self.1);
+ }
+}
+
+impl DropOrderCollector {
+ fn option_loud_drop(&self, n: u32) -> Option<LoudDrop> {
+ Some(LoudDrop(self, n))
+ }
+
+ fn loud_drop(&self, n: u32) -> LoudDrop {
+ LoudDrop(self, n)
+ }
+
+ fn print(&self, n: u32) {
+ println!("{}", n);
+ self.0.borrow_mut().push(n)
+ }
+
+ fn if_(&self) {
+ if self.option_loud_drop(1).is_some() {
+ self.print(2);
+ }
+
+ if self.option_loud_drop(3).is_none() {
+ unreachable!();
+ } else if self.option_loud_drop(4).is_some() {
+ self.print(5);
+ }
+
+ if {
+ if self.option_loud_drop(6).is_some() && self.option_loud_drop(7).is_some() {
+ self.loud_drop(8);
+ true
+ } else {
+ false
+ }
+ } {
+ self.print(9);
+ }
+ }
+
+ fn if_let(&self) {
+ if let None = self.option_loud_drop(2) {
+ unreachable!();
+ } else {
+ self.print(1);
+ }
+
+ if let Some(_) = self.option_loud_drop(4) {
+ self.print(3);
+ }
+
+ if let Some(_d) = self.option_loud_drop(6) {
+ self.print(5);
+ }
+ }
+
+ fn match_(&self) {
+ match self.option_loud_drop(2) {
+ _any => self.print(1),
+ }
+
+ match self.option_loud_drop(4) {
+ _ => self.print(3),
+ }
+
+ match self.option_loud_drop(6) {
+ Some(_) => self.print(5),
+ _ => unreachable!(),
+ }
+
+ match {
+ let _ = self.loud_drop(7);
+ let _d = self.loud_drop(9);
+ self.print(8);
+ ()
+ } {
+ () => self.print(10),
+ }
+
+ match {
+ match self.option_loud_drop(14) {
+ _ => {
+ self.print(11);
+ self.option_loud_drop(13)
+ }
+ }
+ } {
+ _ => self.print(12),
+ }
+
+ match {
+ loop {
+ break match self.option_loud_drop(16) {
+ _ => {
+ self.print(15);
+ self.option_loud_drop(18)
+ }
+ };
+ }
+ } {
+ _ => self.print(17),
+ }
+ }
+
+ fn and_chain(&self) {
+ // issue-103107
+ if self.option_loud_drop(1).is_some() // 1
+ && self.option_loud_drop(2).is_some() // 2
+ && self.option_loud_drop(3).is_some() // 3
+ && self.option_loud_drop(4).is_some() // 4
+ && self.option_loud_drop(5).is_some() // 5
+ {
+ self.print(6); // 6
+ }
+
+ let _ = self.option_loud_drop(7).is_some() // 1
+ && self.option_loud_drop(8).is_some() // 2
+ && self.option_loud_drop(9).is_some(); // 3
+ self.print(10); // 4
+
+ // Test associativity
+ if self.option_loud_drop(11).is_some() // 1
+ && (self.option_loud_drop(12).is_some() // 2
+ && self.option_loud_drop(13).is_some() // 3
+ && self.option_loud_drop(14).is_some()) // 4
+ && self.option_loud_drop(15).is_some() // 5
+ {
+ self.print(16); // 6
+ }
+ }
+
+ fn or_chain(&self) {
+ // issue-103107
+ if self.option_loud_drop(1).is_none() // 1
+ || self.option_loud_drop(2).is_none() // 2
+ || self.option_loud_drop(3).is_none() // 3
+ || self.option_loud_drop(4).is_none() // 4
+ || self.option_loud_drop(5).is_some() // 5
+ {
+ self.print(6); // 6
+ }
+
+ let _ = self.option_loud_drop(7).is_none() // 1
+ || self.option_loud_drop(8).is_none() // 2
+ || self.option_loud_drop(9).is_none(); // 3
+ self.print(10); // 4
+
+ // Test associativity
+ if self.option_loud_drop(11).is_none() // 1
+ || (self.option_loud_drop(12).is_none() // 2
+ || self.option_loud_drop(13).is_none() // 3
+ || self.option_loud_drop(14).is_none()) // 4
+ || self.option_loud_drop(15).is_some() // 5
+ {
+ self.print(16); // 6
+ }
+ }
+
+ fn mixed_and_or_chain(&self) {
+ // issue-103107
+ if self.option_loud_drop(1).is_none() // 1
+ || self.option_loud_drop(2).is_none() // 2
+ || self.option_loud_drop(3).is_some() // 3
+ && self.option_loud_drop(4).is_some() // 4
+ && self.option_loud_drop(5).is_none() // 5
+ || self.option_loud_drop(6).is_none() // 6
+ || self.option_loud_drop(7).is_some() // 7
+ {
+ self.print(8); // 8
+ }
+ }
+
+ fn let_chain(&self) {
+ // take the "then" branch
+ if self.option_loud_drop(1).is_some() // 1
+ && self.option_loud_drop(2).is_some() // 2
+ && let Some(_d) = self.option_loud_drop(4) { // 4
+ self.print(3); // 3
+ }
+
+ // take the "else" branch
+ if self.option_loud_drop(5).is_some() // 1
+ && self.option_loud_drop(6).is_some() // 2
+ && let None = self.option_loud_drop(8) { // 4
+ unreachable!();
+ } else {
+ self.print(7); // 3
+ }
+
+ // let exprs interspersed
+ if self.option_loud_drop(9).is_some() // 1
+ && let Some(_d) = self.option_loud_drop(13) // 5
+ && self.option_loud_drop(10).is_some() // 2
+ && let Some(_e) = self.option_loud_drop(12) { // 4
+ self.print(11); // 3
+ }
+
+ // let exprs first
+ if let Some(_d) = self.option_loud_drop(18) // 5
+ && let Some(_e) = self.option_loud_drop(17) // 4
+ && self.option_loud_drop(14).is_some() // 1
+ && self.option_loud_drop(15).is_some() { // 2
+ self.print(16); // 3
+ }
+
+ // let exprs last
+ if self.option_loud_drop(19).is_some() // 1
+ && self.option_loud_drop(20).is_some() // 2
+ && let Some(_d) = self.option_loud_drop(23) // 5
+ && let Some(_e) = self.option_loud_drop(22) { // 4
+ self.print(21); // 3
+ }
+ }
+
+ fn while_(&self) {
+ let mut v = self.option_loud_drop(4);
+ while let Some(_d) = v
+ && self.option_loud_drop(1).is_some()
+ && self.option_loud_drop(2).is_some() {
+ self.print(3);
+ v = None;
+ }
+ }
+
+ fn assert_sorted(self) {
+ assert!(
+ self.0
+ .into_inner()
+ .into_iter()
+ .enumerate()
+ .all(|(idx, item)| idx + 1 == item.try_into().unwrap())
+ );
+ }
+}
+
+fn main() {
+ println!("-- if --");
+ let collector = DropOrderCollector::default();
+ collector.if_();
+ collector.assert_sorted();
+
+ println!("-- and chain --");
+ let collector = DropOrderCollector::default();
+ collector.and_chain();
+ collector.assert_sorted();
+
+ println!("-- or chain --");
+ let collector = DropOrderCollector::default();
+ collector.or_chain();
+ collector.assert_sorted();
+
+ println!("-- mixed and/or chain --");
+ let collector = DropOrderCollector::default();
+ collector.mixed_and_or_chain();
+ collector.assert_sorted();
+
+ println!("-- if let --");
+ let collector = DropOrderCollector::default();
+ collector.if_let();
+ collector.assert_sorted();
+
+ println!("-- match --");
+ let collector = DropOrderCollector::default();
+ collector.match_();
+ collector.assert_sorted();
+
+ println!("-- let chain --");
+ let collector = DropOrderCollector::default();
+ collector.let_chain();
+ collector.assert_sorted();
+
+ println!("-- while --");
+ let collector = DropOrderCollector::default();
+ collector.while_();
+ collector.assert_sorted();
+}
diff --git a/tests/ui/drop/dropck-eyepatch-extern-crate.rs b/tests/ui/drop/dropck-eyepatch-extern-crate.rs
new file mode 100644
index 000000000..fecfd5edf
--- /dev/null
+++ b/tests/ui/drop/dropck-eyepatch-extern-crate.rs
@@ -0,0 +1,39 @@
+// run-pass
+// aux-build:dropck_eyepatch_extern_crate.rs
+
+extern crate dropck_eyepatch_extern_crate as other;
+
+use other::{Dt,Dr,Pt,Pr,St,Sr};
+
+fn main() {
+ use std::cell::RefCell;
+
+ struct CheckOnDrop(RefCell<String>, &'static str);
+ impl Drop for CheckOnDrop {
+ fn drop(&mut self) { assert_eq!(*self.0.borrow(), self.1); }
+ }
+
+ let c_long;
+ let (c, dt, dr, pt, pr, st, sr)
+ : (CheckOnDrop, Dt<_>, Dr<_>, Pt<_, _>, Pr<_>, St<_>, Sr<_>);
+ c_long = CheckOnDrop(RefCell::new("c_long".to_string()),
+ "c_long|pr|pt|dr|dt");
+ c = CheckOnDrop(RefCell::new("c".to_string()),
+ "c");
+
+ // No error: sufficiently long-lived state can be referenced in dtors
+ dt = Dt("dt", &c_long.0);
+ dr = Dr("dr", &c_long.0);
+
+ // No error: Drop impl asserts .1 (A and &'a _) are not accessed
+ pt = Pt("pt", &c.0, &c_long.0);
+ pr = Pr("pr", &c.0, &c_long.0);
+
+ // No error: St and Sr have no destructor.
+ st = St("st", &c.0);
+ sr = Sr("sr", &c.0);
+
+ println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0));
+ assert_eq!(*c_long.0.borrow(), "c_long");
+ assert_eq!(*c.0.borrow(), "c");
+}
diff --git a/tests/ui/drop/dropck-eyepatch-reorder.rs b/tests/ui/drop/dropck-eyepatch-reorder.rs
new file mode 100644
index 000000000..0d7af3d4f
--- /dev/null
+++ b/tests/ui/drop/dropck-eyepatch-reorder.rs
@@ -0,0 +1,79 @@
+// run-pass
+#![feature(dropck_eyepatch)]
+
+// The point of this test is to test uses of `#[may_dangle]` attribute
+// where the formal declaration order (in the impl generics) does not
+// match the actual usage order (in the type instantiation).
+//
+// See also dropck-eyepatch.rs for more information about the general
+// structure of the test.
+
+trait Foo { fn foo(&self, _: &str); }
+
+struct Dt<A: Foo>(&'static str, A);
+struct Dr<'a, B:'a+Foo>(&'static str, &'a B);
+struct Pt<A: Foo, B: Foo>(&'static str, #[allow(unused_tuple_struct_fields)] A, B);
+struct Pr<'a, 'b, B:'a+'b+Foo>(&'static str, #[allow(unused_tuple_struct_fields)] &'a B, &'b B);
+struct St<A: Foo>(&'static str, #[allow(unused_tuple_struct_fields)] A);
+struct Sr<'a, B:'a+Foo>(&'static str, #[allow(unused_tuple_struct_fields)] &'a B);
+
+impl<A: Foo> Drop for Dt<A> {
+ fn drop(&mut self) { println!("drop {}", self.0); self.1.foo(self.0); }
+}
+impl<'a, B: Foo> Drop for Dr<'a, B> {
+ fn drop(&mut self) { println!("drop {}", self.0); self.1.foo(self.0); }
+}
+unsafe impl<B: Foo, #[may_dangle] A: Foo> Drop for Pt<A, B> {
+ // (unsafe to access self.1 due to #[may_dangle] on A)
+ fn drop(&mut self) { println!("drop {}", self.0); self.2.foo(self.0); }
+}
+unsafe impl<'b, #[may_dangle] 'a, B: Foo> Drop for Pr<'a, 'b, B> {
+ // (unsafe to access self.1 due to #[may_dangle] on 'a)
+ fn drop(&mut self) { println!("drop {}", self.0); self.2.foo(self.0); }
+}
+
+fn main() {
+ use std::cell::RefCell;
+
+ impl Foo for RefCell<String> {
+ fn foo(&self, s: &str) {
+ let s2 = format!("{}|{}", *self.borrow(), s);
+ *self.borrow_mut() = s2;
+ }
+ }
+
+ impl<'a, T:Foo> Foo for &'a T {
+ fn foo(&self, s: &str) {
+ (*self).foo(s);
+ }
+ }
+
+ struct CheckOnDrop(RefCell<String>, &'static str);
+ impl Drop for CheckOnDrop {
+ fn drop(&mut self) { assert_eq!(*self.0.borrow(), self.1); }
+ }
+
+ let c_long;
+ let (c, dt, dr, pt, pr, st, sr)
+ : (CheckOnDrop, Dt<_>, Dr<_>, Pt<_, _>, Pr<_>, St<_>, Sr<_>);
+ c_long = CheckOnDrop(RefCell::new("c_long".to_string()),
+ "c_long|pr|pt|dr|dt");
+ c = CheckOnDrop(RefCell::new("c".to_string()),
+ "c");
+
+ // No error: sufficiently long-lived state can be referenced in dtors
+ dt = Dt("dt", &c_long.0);
+ dr = Dr("dr", &c_long.0);
+
+ // No error: Drop impl asserts .1 (A and &'a _) are not accessed
+ pt = Pt("pt", &c.0, &c_long.0);
+ pr = Pr("pr", &c.0, &c_long.0);
+
+ // No error: St and Sr have no destructor.
+ st = St("st", &c.0);
+ sr = Sr("sr", &c.0);
+
+ println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0));
+ assert_eq!(*c_long.0.borrow(), "c_long");
+ assert_eq!(*c.0.borrow(), "c");
+}
diff --git a/tests/ui/drop/dropck-eyepatch.rs b/tests/ui/drop/dropck-eyepatch.rs
new file mode 100644
index 000000000..3c4840d5c
--- /dev/null
+++ b/tests/ui/drop/dropck-eyepatch.rs
@@ -0,0 +1,102 @@
+// run-pass
+#![feature(dropck_eyepatch)]
+
+// The point of this test is to illustrate that the `#[may_dangle]`
+// attribute specifically allows, in the context of a type
+// implementing `Drop`, a generic parameter to be instantiated with a
+// lifetime that does not strictly outlive the owning type itself.
+//
+// Here we test that a model use of `#[may_dangle]` will compile and run.
+//
+// The illustration is made concrete by comparison with two variations
+// on the type with `#[may_dangle]`:
+//
+// 1. an analogous type that does not implement `Drop` (and thus
+// should exhibit maximal flexibility with respect to dropck), and
+//
+// 2. an analogous type that does not use `#[may_dangle]` (and thus
+// should exhibit the standard limitations imposed by dropck.
+//
+// The types in this file follow a pattern, {D,P,S}{t,r}, where:
+//
+// - D means "I implement Drop"
+//
+// - P means "I implement Drop but guarantee my (first) parameter is
+// pure, i.e., not accessed from the destructor"; no other parameters
+// are pure.
+//
+// - S means "I do not implement Drop"
+//
+// - t suffix is used when the first generic is a type
+//
+// - r suffix is used when the first generic is a lifetime.
+
+trait Foo { fn foo(&self, _: &str); }
+
+struct Dt<A: Foo>(&'static str, A);
+struct Dr<'a, B:'a+Foo>(&'static str, &'a B);
+struct Pt<A,B: Foo>(&'static str, #[allow(unused_tuple_struct_fields)] A, B);
+struct Pr<'a, 'b, B:'a+'b+Foo>(&'static str, #[allow(unused_tuple_struct_fields)] &'a B, &'b B);
+struct St<A: Foo>(&'static str, #[allow(unused_tuple_struct_fields)] A);
+struct Sr<'a, B:'a+Foo>(&'static str, #[allow(unused_tuple_struct_fields)] &'a B);
+
+impl<A: Foo> Drop for Dt<A> {
+ fn drop(&mut self) { println!("drop {}", self.0); self.1.foo(self.0); }
+}
+impl<'a, B: Foo> Drop for Dr<'a, B> {
+ fn drop(&mut self) { println!("drop {}", self.0); self.1.foo(self.0); }
+}
+unsafe impl<#[may_dangle] A, B: Foo> Drop for Pt<A, B> {
+ // (unsafe to access self.1 due to #[may_dangle] on A)
+ fn drop(&mut self) { println!("drop {}", self.0); self.2.foo(self.0); }
+}
+unsafe impl<#[may_dangle] 'a, 'b, B: Foo> Drop for Pr<'a, 'b, B> {
+ // (unsafe to access self.1 due to #[may_dangle] on 'a)
+ fn drop(&mut self) { println!("drop {}", self.0); self.2.foo(self.0); }
+}
+
+fn main() {
+ use std::cell::RefCell;
+
+ impl Foo for RefCell<String> {
+ fn foo(&self, s: &str) {
+ let s2 = format!("{}|{}", *self.borrow(), s);
+ *self.borrow_mut() = s2;
+ }
+ }
+
+ impl<'a, T:Foo> Foo for &'a T {
+ fn foo(&self, s: &str) {
+ (*self).foo(s);
+ }
+ }
+
+ struct CheckOnDrop(RefCell<String>, &'static str);
+ impl Drop for CheckOnDrop {
+ fn drop(&mut self) { assert_eq!(*self.0.borrow(), self.1); }
+ }
+
+ let c_long;
+ let (c, dt, dr, pt, pr, st, sr)
+ : (CheckOnDrop, Dt<_>, Dr<_>, Pt<_, _>, Pr<_>, St<_>, Sr<_>);
+ c_long = CheckOnDrop(RefCell::new("c_long".to_string()),
+ "c_long|pr|pt|dr|dt");
+ c = CheckOnDrop(RefCell::new("c".to_string()),
+ "c");
+
+ // No error: sufficiently long-lived state can be referenced in dtors
+ dt = Dt("dt", &c_long.0);
+ dr = Dr("dr", &c_long.0);
+
+ // No error: Drop impl asserts .1 (A and &'a _) are not accessed
+ pt = Pt("pt", &c.0, &c_long.0);
+ pr = Pr("pr", &c.0, &c_long.0);
+
+ // No error: St and Sr have no destructor.
+ st = St("st", &c.0);
+ sr = Sr("sr", &c.0);
+
+ println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0));
+ assert_eq!(*c_long.0.borrow(), "c_long");
+ assert_eq!(*c.0.borrow(), "c");
+}
diff --git a/tests/ui/drop/dropck_legal_cycles.rs b/tests/ui/drop/dropck_legal_cycles.rs
new file mode 100644
index 000000000..6a0fe7784
--- /dev/null
+++ b/tests/ui/drop/dropck_legal_cycles.rs
@@ -0,0 +1,1183 @@
+// run-pass
+// This test exercises cases where cyclic structure is legal,
+// including when the cycles go through data-structures such
+// as `Vec` or `TypedArena`.
+//
+// The intent is to cover as many such cases as possible, ensuring
+// that if the compiler did not complain circa Rust 1.x (1.2 as of
+// this writing), then it will continue to not complain in the future.
+//
+// Note that while some of the tests are only exercising using the
+// given collection as a "backing store" for a set of nodes that hold
+// the actual cycle (and thus the cycle does not go through the
+// collection itself in such cases), in general we *do* want to make
+// sure to have at least one example exercising a cycle that goes
+// through the collection, for every collection type that supports
+// this.
+
+// HIGH LEVEL DESCRIPTION OF THE TEST ARCHITECTURE
+// -----------------------------------------------
+//
+// We pick a data structure and want to make a cyclic construction
+// from it. Each test of interest is labelled starting with "Cycle N:
+// { ... }" where N is the test number and the "..."`is filled in with
+// a graphviz-style description of the graph structure that the
+// author believes is being made. So "{ a -> b, b -> (c,d), (c,d) -> e }"
+// describes a line connected to a diamond:
+//
+// c
+// / \
+// a - b e
+// \ /
+// d
+//
+// (Note that the above directed graph is actually acyclic.)
+//
+// The different graph structures are often composed of different data
+// types. Some may be built atop `Vec`, others atop `HashMap`, etc.
+//
+// For each graph structure, we actually *confirm* that a cycle exists
+// (as a safe-guard against a test author accidentally leaving it out)
+// by traversing each graph and "proving" that a cycle exists within it.
+//
+// To do this, while trying to keep the code uniform (despite working
+// with different underlying collection and smart-pointer types), we
+// have a standard traversal API:
+//
+// 1. every node in the graph carries a `mark` (a u32, init'ed to 0).
+//
+// 2. every node provides a method to visit its children
+//
+// 3. a traversal attmepts to visit the nodes of the graph and prove that
+// it sees the same node twice. It does this by setting the mark of each
+// node to a fresh non-zero value, and if it sees the current mark, it
+// "knows" that it must have found a cycle, and stops attempting further
+// traversal.
+//
+// 4. each traversal is controlled by a bit-string that tells it which child
+// it visit when it can take different paths. As a simple example,
+// in a binary tree, 0 could mean "left" (and 1, "right"), so that
+// "00010" means "left, left, left, right, left". (In general it will
+// read as many bits as it needs to choose one child.)
+//
+// The graphs in this test are all meant to be very small, and thus
+// short bitstrings of less than 64 bits should always suffice.
+//
+// (An earlier version of this test infrastructure simply had any
+// given traversal visit all children it encountered, in a
+// depth-first manner; one problem with this approach is that an
+// acyclic graph can still have sharing, which would then be treated
+// as a repeat mark and reported as a detected cycle.)
+//
+// The travseral code is a little more complicated because it has been
+// programmed in a somewhat defensive manner. For example it also has
+// a max threshold for the number of nodes it will visit, to guard
+// against scenarios where the nodes are not correctly setting their
+// mark when asked. There are various other methods not discussed here
+// that are for aiding debugging the test when it runs, such as the
+// `name` method that all nodes provide.
+//
+// So each test:
+//
+// 1. allocates the nodes in the graph,
+//
+// 2. sets up the links in the graph,
+//
+// 3. clones the "ContextData"
+//
+// 4. chooses a new current mark value for this test
+//
+// 5. initiates a traversal, potentially from multiple starting points
+// (aka "roots"), with a given control-string (potentially a
+// different string for each root). if it does start from a
+// distinct root, then such a test should also increment the
+// current mark value, so that this traversal is considered
+// distinct from the prior one on this graph structure.
+//
+// Note that most of the tests work with the default control string
+// of all-zeroes.
+//
+// 6. assert that the context confirms that it actually saw a cycle (since a traversal
+// might have terminated, e.g., on a tree structure that contained no cycles).
+
+use std::cell::{Cell, RefCell};
+use std::cmp::Ordering;
+use std::collections::BinaryHeap;
+use std::collections::HashMap;
+use std::collections::LinkedList;
+use std::collections::VecDeque;
+use std::collections::btree_map::BTreeMap;
+use std::collections::btree_set::BTreeSet;
+use std::hash::{Hash, Hasher};
+use std::rc::Rc;
+use std::sync::{Arc, RwLock, Mutex};
+
+const PRINT: bool = false;
+
+pub fn main() {
+ let c_orig = ContextData {
+ curr_depth: 0,
+ max_depth: 3,
+ visited: 0,
+ max_visits: 1000,
+ skipped: 0,
+ curr_mark: 0,
+ saw_prev_marked: false,
+ control_bits: 0,
+ };
+
+ // SANITY CHECK FOR TEST SUITE (thus unnumbered)
+ // Not a cycle: { v[0] -> (v[1], v[2]), v[1] -> v[3], v[2] -> v[3] };
+ let v: Vec<S2> = vec![Named::new("s0"),
+ Named::new("s1"),
+ Named::new("s2"),
+ Named::new("s3")];
+ v[0].next.set((Some(&v[1]), Some(&v[2])));
+ v[1].next.set((Some(&v[3]), None));
+ v[2].next.set((Some(&v[3]), None));
+ v[3].next.set((None, None));
+
+ let mut c = c_orig.clone();
+ c.curr_mark = 10;
+ assert!(!c.saw_prev_marked);
+ v[0].descend_into_self(&mut c);
+ assert!(!c.saw_prev_marked); // <-- different from below, b/c acyclic above
+
+ if PRINT { println!(); }
+
+ // Cycle 1: { v[0] -> v[1], v[1] -> v[0] };
+ // does not exercise `v` itself
+ let v: Vec<S> = vec![Named::new("s0"),
+ Named::new("s1")];
+ v[0].next.set(Some(&v[1]));
+ v[1].next.set(Some(&v[0]));
+
+ let mut c = c_orig.clone();
+ c.curr_mark = 10;
+ assert!(!c.saw_prev_marked);
+ v[0].descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+
+ if PRINT { println!(); }
+
+ // Cycle 2: { v[0] -> v, v[1] -> v }
+ let v: V = Named::new("v");
+ v.contents[0].set(Some(&v));
+ v.contents[1].set(Some(&v));
+
+ let mut c = c_orig.clone();
+ c.curr_mark = 20;
+ assert!(!c.saw_prev_marked);
+ v.descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+
+ if PRINT { println!(); }
+
+ // Cycle 3: { hk0 -> hv0, hv0 -> hk0, hk1 -> hv1, hv1 -> hk1 };
+ // does not exercise `h` itself
+
+ let mut h: HashMap<H,H> = HashMap::new();
+ h.insert(Named::new("hk0"), Named::new("hv0"));
+ h.insert(Named::new("hk1"), Named::new("hv1"));
+ for (key, val) in h.iter() {
+ val.next.set(Some(key));
+ key.next.set(Some(val));
+ }
+
+ let mut c = c_orig.clone();
+ c.curr_mark = 30;
+ for (key, _) in h.iter() {
+ c.curr_mark += 1;
+ c.saw_prev_marked = false;
+ key.descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+ }
+
+ if PRINT { println!(); }
+
+ // Cycle 4: { h -> (hmk0,hmv0,hmk1,hmv1), {hmk0,hmv0,hmk1,hmv1} -> h }
+
+ let mut h: HashMap<HM,HM> = HashMap::new();
+ h.insert(Named::new("hmk0"), Named::new("hmv0"));
+ h.insert(Named::new("hmk0"), Named::new("hmv0"));
+ for (key, val) in h.iter() {
+ val.contents.set(Some(&h));
+ key.contents.set(Some(&h));
+ }
+
+ let mut c = c_orig.clone();
+ c.max_depth = 2;
+ c.curr_mark = 40;
+ for (key, _) in h.iter() {
+ c.curr_mark += 1;
+ c.saw_prev_marked = false;
+ key.descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+ // break;
+ }
+
+ if PRINT { println!(); }
+
+ // Cycle 5: { vd[0] -> vd[1], vd[1] -> vd[0] };
+ // does not exercise vd itself
+ let mut vd: VecDeque<S> = VecDeque::new();
+ vd.push_back(Named::new("d0"));
+ vd.push_back(Named::new("d1"));
+ vd[0].next.set(Some(&vd[1]));
+ vd[1].next.set(Some(&vd[0]));
+
+ let mut c = c_orig.clone();
+ c.curr_mark = 50;
+ assert!(!c.saw_prev_marked);
+ vd[0].descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+
+ if PRINT { println!(); }
+
+ // Cycle 6: { vd -> (vd0, vd1), {vd0, vd1} -> vd }
+ let mut vd: VecDeque<VD> = VecDeque::new();
+ vd.push_back(Named::new("vd0"));
+ vd.push_back(Named::new("vd1"));
+ vd[0].contents.set(Some(&vd));
+ vd[1].contents.set(Some(&vd));
+
+ let mut c = c_orig.clone();
+ c.curr_mark = 60;
+ assert!(!c.saw_prev_marked);
+ vd[0].descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+
+ if PRINT { println!(); }
+
+ // Cycle 7: { vm -> (vm0, vm1), {vm0, vm1} -> vm }
+ let mut vm: HashMap<usize, VM> = HashMap::new();
+ vm.insert(0, Named::new("vm0"));
+ vm.insert(1, Named::new("vm1"));
+ vm[&0].contents.set(Some(&vm));
+ vm[&1].contents.set(Some(&vm));
+
+ let mut c = c_orig.clone();
+ c.curr_mark = 70;
+ assert!(!c.saw_prev_marked);
+ vm[&0].descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+
+ if PRINT { println!(); }
+
+ // Cycle 8: { ll -> (ll0, ll1), {ll0, ll1} -> ll }
+ let mut ll: LinkedList<LL> = LinkedList::new();
+ ll.push_back(Named::new("ll0"));
+ ll.push_back(Named::new("ll1"));
+ for e in &ll {
+ e.contents.set(Some(&ll));
+ }
+
+ let mut c = c_orig.clone();
+ c.curr_mark = 80;
+ for e in &ll {
+ c.curr_mark += 1;
+ c.saw_prev_marked = false;
+ e.descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+ // break;
+ }
+
+ if PRINT { println!(); }
+
+ // Cycle 9: { bh -> (bh0, bh1), {bh0, bh1} -> bh }
+ let mut bh: BinaryHeap<BH> = BinaryHeap::new();
+ bh.push(Named::new("bh0"));
+ bh.push(Named::new("bh1"));
+ for b in bh.iter() {
+ b.contents.set(Some(&bh));
+ }
+
+ let mut c = c_orig.clone();
+ c.curr_mark = 90;
+ for b in &bh {
+ c.curr_mark += 1;
+ c.saw_prev_marked = false;
+ b.descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+ // break;
+ }
+
+ if PRINT { println!(); }
+
+ // Cycle 10: { btm -> (btk0, btv1), {bt0, bt1} -> btm }
+ let mut btm: BTreeMap<BTM, BTM> = BTreeMap::new();
+ btm.insert(Named::new("btk0"), Named::new("btv0"));
+ btm.insert(Named::new("btk1"), Named::new("btv1"));
+ for (k, v) in btm.iter() {
+ k.contents.set(Some(&btm));
+ v.contents.set(Some(&btm));
+ }
+
+ let mut c = c_orig.clone();
+ c.curr_mark = 100;
+ for (k, _) in &btm {
+ c.curr_mark += 1;
+ c.saw_prev_marked = false;
+ k.descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+ // break;
+ }
+
+ if PRINT { println!(); }
+
+ // Cycle 10: { bts -> (bts0, bts1), {bts0, bts1} -> btm }
+ let mut bts: BTreeSet<BTS> = BTreeSet::new();
+ bts.insert(Named::new("bts0"));
+ bts.insert(Named::new("bts1"));
+ for v in bts.iter() {
+ v.contents.set(Some(&bts));
+ }
+
+ let mut c = c_orig.clone();
+ c.curr_mark = 100;
+ for b in &bts {
+ c.curr_mark += 1;
+ c.saw_prev_marked = false;
+ b.descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+ // break;
+ }
+
+ if PRINT { println!(); }
+
+ // Cycle 11: { rc0 -> (rc1, rc2), rc1 -> (), rc2 -> rc0 }
+ let (rc0, rc1, rc2): (RCRC, RCRC, RCRC);
+ rc0 = RCRC::new("rcrc0");
+ rc1 = RCRC::new("rcrc1");
+ rc2 = RCRC::new("rcrc2");
+ rc0.0.borrow_mut().children.0 = Some(&rc1);
+ rc0.0.borrow_mut().children.1 = Some(&rc2);
+ rc2.0.borrow_mut().children.0 = Some(&rc0);
+
+ let mut c = c_orig.clone();
+ c.control_bits = 0b1;
+ c.curr_mark = 110;
+ assert!(!c.saw_prev_marked);
+ rc0.descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+
+ if PRINT { println!(); }
+
+ // We want to take the previous Rc case and generalize it to Arc.
+ //
+ // We can use refcells if we're single-threaded (as this test is).
+ // If one were to generalize these constructions to a
+ // multi-threaded context, then it might seem like we could choose
+ // between either an RwLock or a Mutex to hold the owned arcs on
+ // each node.
+ //
+ // Part of the point of this test is to actually confirm that the
+ // cycle exists by traversing it. We can do that just fine with an
+ // RwLock (since we can grab the child pointers in read-only
+ // mode), but we cannot lock a std::sync::Mutex to guard reading
+ // from each node via the same pattern, since once you hit the
+ // cycle, you'll be trying to acquiring the same lock twice.
+ // (We deal with this by exiting the traversal early if try_lock fails.)
+
+ // Cycle 12: { arc0 -> (arc1, arc2), arc1 -> (), arc2 -> arc0 }, refcells
+ let (arc0, arc1, arc2): (ARCRC, ARCRC, ARCRC);
+ arc0 = ARCRC::new("arcrc0");
+ arc1 = ARCRC::new("arcrc1");
+ arc2 = ARCRC::new("arcrc2");
+ arc0.0.borrow_mut().children.0 = Some(&arc1);
+ arc0.0.borrow_mut().children.1 = Some(&arc2);
+ arc2.0.borrow_mut().children.0 = Some(&arc0);
+
+ let mut c = c_orig.clone();
+ c.control_bits = 0b1;
+ c.curr_mark = 110;
+ assert!(!c.saw_prev_marked);
+ arc0.descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+
+ if PRINT { println!(); }
+
+ // Cycle 13: { arc0 -> (arc1, arc2), arc1 -> (), arc2 -> arc0 }, rwlocks
+ let (arc0, arc1, arc2): (ARCRW, ARCRW, ARCRW);
+ arc0 = ARCRW::new("arcrw0");
+ arc1 = ARCRW::new("arcrw1");
+ arc2 = ARCRW::new("arcrw2");
+ arc0.0.write().unwrap().children.0 = Some(&arc1);
+ arc0.0.write().unwrap().children.1 = Some(&arc2);
+ arc2.0.write().unwrap().children.0 = Some(&arc0);
+
+ let mut c = c_orig.clone();
+ c.control_bits = 0b1;
+ c.curr_mark = 110;
+ assert!(!c.saw_prev_marked);
+ arc0.descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+
+ if PRINT { println!(); }
+
+ // Cycle 14: { arc0 -> (arc1, arc2), arc1 -> (), arc2 -> arc0 }, mutexs
+ let (arc0, arc1, arc2): (ARCM, ARCM, ARCM);
+ arc0 = ARCM::new("arcm0");
+ arc1 = ARCM::new("arcm1");
+ arc2 = ARCM::new("arcm2");
+ arc0.1.lock().unwrap().children.0 = Some(&arc1);
+ arc0.1.lock().unwrap().children.1 = Some(&arc2);
+ arc2.1.lock().unwrap().children.0 = Some(&arc0);
+
+ let mut c = c_orig.clone();
+ c.control_bits = 0b1;
+ c.curr_mark = 110;
+ assert!(!c.saw_prev_marked);
+ arc0.descend_into_self(&mut c);
+ assert!(c.saw_prev_marked);
+}
+
+trait Named {
+ fn new(_: &'static str) -> Self;
+ fn name(&self) -> &str;
+}
+
+trait Marked<M> {
+ fn mark(&self) -> M;
+ fn set_mark(&self, mark: M);
+}
+
+struct S<'a> {
+ name: &'static str,
+ mark: Cell<u32>,
+ next: Cell<Option<&'a S<'a>>>,
+}
+
+impl<'a> Named for S<'a> {
+ fn new(name: &'static str) -> S<'a> {
+ S { name: name, mark: Cell::new(0), next: Cell::new(None) }
+ }
+ fn name(&self) -> &str { self.name }
+}
+
+impl<'a> Marked<u32> for S<'a> {
+ fn mark(&self) -> u32 { self.mark.get() }
+ fn set_mark(&self, mark: u32) { self.mark.set(mark); }
+}
+
+struct S2<'a> {
+ name: &'static str,
+ mark: Cell<u32>,
+ next: Cell<(Option<&'a S2<'a>>, Option<&'a S2<'a>>)>,
+}
+
+impl<'a> Named for S2<'a> {
+ fn new(name: &'static str) -> S2<'a> {
+ S2 { name: name, mark: Cell::new(0), next: Cell::new((None, None)) }
+ }
+ fn name(&self) -> &str { self.name }
+}
+
+impl<'a> Marked<u32> for S2<'a> {
+ fn mark(&self) -> u32 { self.mark.get() }
+ fn set_mark(&self, mark: u32) {
+ self.mark.set(mark);
+ }
+}
+
+struct V<'a> {
+ name: &'static str,
+ mark: Cell<u32>,
+ contents: Vec<Cell<Option<&'a V<'a>>>>,
+}
+
+impl<'a> Named for V<'a> {
+ fn new(name: &'static str) -> V<'a> {
+ V { name: name,
+ mark: Cell::new(0),
+ contents: vec![Cell::new(None), Cell::new(None)]
+ }
+ }
+ fn name(&self) -> &str { self.name }
+}
+
+impl<'a> Marked<u32> for V<'a> {
+ fn mark(&self) -> u32 { self.mark.get() }
+ fn set_mark(&self, mark: u32) { self.mark.set(mark); }
+}
+
+#[derive(Eq)]
+struct H<'a> {
+ name: &'static str,
+ mark: Cell<u32>,
+ next: Cell<Option<&'a H<'a>>>,
+}
+
+impl<'a> Named for H<'a> {
+ fn new(name: &'static str) -> H<'a> {
+ H { name: name, mark: Cell::new(0), next: Cell::new(None) }
+ }
+ fn name(&self) -> &str { self.name }
+}
+
+impl<'a> Marked<u32> for H<'a> {
+ fn mark(&self) -> u32 { self.mark.get() }
+ fn set_mark(&self, mark: u32) { self.mark.set(mark); }
+}
+
+impl<'a> PartialEq for H<'a> {
+ fn eq(&self, rhs: &H<'a>) -> bool {
+ self.name == rhs.name
+ }
+}
+
+impl<'a> Hash for H<'a> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.name.hash(state)
+ }
+}
+
+#[derive(Eq)]
+struct HM<'a> {
+ name: &'static str,
+ mark: Cell<u32>,
+ contents: Cell<Option<&'a HashMap<HM<'a>, HM<'a>>>>,
+}
+
+impl<'a> Named for HM<'a> {
+ fn new(name: &'static str) -> HM<'a> {
+ HM { name: name,
+ mark: Cell::new(0),
+ contents: Cell::new(None)
+ }
+ }
+ fn name(&self) -> &str { self.name }
+}
+
+impl<'a> Marked<u32> for HM<'a> {
+ fn mark(&self) -> u32 { self.mark.get() }
+ fn set_mark(&self, mark: u32) { self.mark.set(mark); }
+}
+
+impl<'a> PartialEq for HM<'a> {
+ fn eq(&self, rhs: &HM<'a>) -> bool {
+ self.name == rhs.name
+ }
+}
+
+impl<'a> Hash for HM<'a> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.name.hash(state)
+ }
+}
+
+
+struct VD<'a> {
+ name: &'static str,
+ mark: Cell<u32>,
+ contents: Cell<Option<&'a VecDeque<VD<'a>>>>,
+}
+
+impl<'a> Named for VD<'a> {
+ fn new(name: &'static str) -> VD<'a> {
+ VD { name: name,
+ mark: Cell::new(0),
+ contents: Cell::new(None)
+ }
+ }
+ fn name(&self) -> &str { self.name }
+}
+
+impl<'a> Marked<u32> for VD<'a> {
+ fn mark(&self) -> u32 { self.mark.get() }
+ fn set_mark(&self, mark: u32) { self.mark.set(mark); }
+}
+
+struct VM<'a> {
+ name: &'static str,
+ mark: Cell<u32>,
+ contents: Cell<Option<&'a HashMap<usize, VM<'a>>>>,
+}
+
+impl<'a> Named for VM<'a> {
+ fn new(name: &'static str) -> VM<'a> {
+ VM { name: name,
+ mark: Cell::new(0),
+ contents: Cell::new(None)
+ }
+ }
+ fn name(&self) -> &str { self.name }
+}
+
+impl<'a> Marked<u32> for VM<'a> {
+ fn mark(&self) -> u32 { self.mark.get() }
+ fn set_mark(&self, mark: u32) { self.mark.set(mark); }
+}
+
+struct LL<'a> {
+ name: &'static str,
+ mark: Cell<u32>,
+ contents: Cell<Option<&'a LinkedList<LL<'a>>>>,
+}
+
+impl<'a> Named for LL<'a> {
+ fn new(name: &'static str) -> LL<'a> {
+ LL { name: name,
+ mark: Cell::new(0),
+ contents: Cell::new(None)
+ }
+ }
+ fn name(&self) -> &str { self.name }
+}
+
+impl<'a> Marked<u32> for LL<'a> {
+ fn mark(&self) -> u32 { self.mark.get() }
+ fn set_mark(&self, mark: u32) { self.mark.set(mark); }
+}
+
+struct BH<'a> {
+ name: &'static str,
+ mark: Cell<u32>,
+ contents: Cell<Option<&'a BinaryHeap<BH<'a>>>>,
+}
+
+impl<'a> Named for BH<'a> {
+ fn new(name: &'static str) -> BH<'a> {
+ BH { name: name,
+ mark: Cell::new(0),
+ contents: Cell::new(None)
+ }
+ }
+ fn name(&self) -> &str { self.name }
+}
+
+impl<'a> Marked<u32> for BH<'a> {
+ fn mark(&self) -> u32 { self.mark.get() }
+ fn set_mark(&self, mark: u32) { self.mark.set(mark); }
+}
+
+impl<'a> Eq for BH<'a> { }
+
+impl<'a> PartialEq for BH<'a> {
+ fn eq(&self, rhs: &BH<'a>) -> bool {
+ self.name == rhs.name
+ }
+}
+
+impl<'a> PartialOrd for BH<'a> {
+ fn partial_cmp(&self, rhs: &BH<'a>) -> Option<Ordering> {
+ Some(self.cmp(rhs))
+ }
+}
+
+impl<'a> Ord for BH<'a> {
+ fn cmp(&self, rhs: &BH<'a>) -> Ordering {
+ self.name.cmp(rhs.name)
+ }
+}
+
+struct BTM<'a> {
+ name: &'static str,
+ mark: Cell<u32>,
+ contents: Cell<Option<&'a BTreeMap<BTM<'a>, BTM<'a>>>>,
+}
+
+impl<'a> Named for BTM<'a> {
+ fn new(name: &'static str) -> BTM<'a> {
+ BTM { name: name,
+ mark: Cell::new(0),
+ contents: Cell::new(None)
+ }
+ }
+ fn name(&self) -> &str { self.name }
+}
+
+impl<'a> Marked<u32> for BTM<'a> {
+ fn mark(&self) -> u32 { self.mark.get() }
+ fn set_mark(&self, mark: u32) { self.mark.set(mark); }
+}
+
+impl<'a> Eq for BTM<'a> { }
+
+impl<'a> PartialEq for BTM<'a> {
+ fn eq(&self, rhs: &BTM<'a>) -> bool {
+ self.name == rhs.name
+ }
+}
+
+impl<'a> PartialOrd for BTM<'a> {
+ fn partial_cmp(&self, rhs: &BTM<'a>) -> Option<Ordering> {
+ Some(self.cmp(rhs))
+ }
+}
+
+impl<'a> Ord for BTM<'a> {
+ fn cmp(&self, rhs: &BTM<'a>) -> Ordering {
+ self.name.cmp(rhs.name)
+ }
+}
+
+struct BTS<'a> {
+ name: &'static str,
+ mark: Cell<u32>,
+ contents: Cell<Option<&'a BTreeSet<BTS<'a>>>>,
+}
+
+impl<'a> Named for BTS<'a> {
+ fn new(name: &'static str) -> BTS<'a> {
+ BTS { name: name,
+ mark: Cell::new(0),
+ contents: Cell::new(None)
+ }
+ }
+ fn name(&self) -> &str { self.name }
+}
+
+impl<'a> Marked<u32> for BTS<'a> {
+ fn mark(&self) -> u32 { self.mark.get() }
+ fn set_mark(&self, mark: u32) { self.mark.set(mark); }
+}
+
+impl<'a> Eq for BTS<'a> { }
+
+impl<'a> PartialEq for BTS<'a> {
+ fn eq(&self, rhs: &BTS<'a>) -> bool {
+ self.name == rhs.name
+ }
+}
+
+impl<'a> PartialOrd for BTS<'a> {
+ fn partial_cmp(&self, rhs: &BTS<'a>) -> Option<Ordering> {
+ Some(self.cmp(rhs))
+ }
+}
+
+impl<'a> Ord for BTS<'a> {
+ fn cmp(&self, rhs: &BTS<'a>) -> Ordering {
+ self.name.cmp(rhs.name)
+ }
+}
+
+#[derive(Clone)]
+struct RCRCData<'a> {
+ name: &'static str,
+ mark: Cell<u32>,
+ children: (Option<&'a RCRC<'a>>, Option<&'a RCRC<'a>>),
+}
+#[derive(Clone)]
+struct RCRC<'a>(Rc<RefCell<RCRCData<'a>>>);
+
+impl<'a> Named for RCRC<'a> {
+ fn new(name: &'static str) -> Self {
+ RCRC(Rc::new(RefCell::new(RCRCData {
+ name: name, mark: Cell::new(0), children: (None, None), })))
+ }
+ fn name(&self) -> &str { self.0.borrow().name }
+}
+
+impl<'a> Marked<u32> for RCRC<'a> {
+ fn mark(&self) -> u32 { self.0.borrow().mark.get() }
+ fn set_mark(&self, mark: u32) { self.0.borrow().mark.set(mark); }
+}
+
+impl<'a> Children<'a> for RCRC<'a> {
+ fn count_children(&self) -> usize { 2 }
+ fn descend_one_child<C>(&self, context: &mut C, index: usize)
+ where C: Context + PrePost<Self>, Self: Sized
+ {
+ let children = &self.0.borrow().children;
+ let child = match index {
+ 0 => if let Some(child) = children.0 { child } else { return; },
+ 1 => if let Some(child) = children.1 { child } else { return; },
+ _ => panic!("bad children"),
+ };
+ // println!("S2 {} descending into child {} at index {}", self.name, child.name, index);
+ child.descend_into_self(context);
+ }
+}
+#[derive(Clone)]
+struct ARCRCData<'a> {
+ name: &'static str,
+ mark: Cell<u32>,
+ children: (Option<&'a ARCRC<'a>>, Option<&'a ARCRC<'a>>),
+}
+#[derive(Clone)]
+struct ARCRC<'a>(Arc<RefCell<ARCRCData<'a>>>);
+
+impl<'a> Named for ARCRC<'a> {
+ fn new(name: &'static str) -> Self {
+ ARCRC(Arc::new(RefCell::new(ARCRCData {
+ name: name, mark: Cell::new(0), children: (None, None), })))
+ }
+ fn name(&self) -> &str { self.0.borrow().name }
+}
+
+impl<'a> Marked<u32> for ARCRC<'a> {
+ fn mark(&self) -> u32 { self.0.borrow().mark.get() }
+ fn set_mark(&self, mark: u32) { self.0.borrow().mark.set(mark); }
+}
+
+impl<'a> Children<'a> for ARCRC<'a> {
+ fn count_children(&self) -> usize { 2 }
+ fn descend_one_child<C>(&self, context: &mut C, index: usize)
+ where C: Context + PrePost<Self>, Self: Sized
+ {
+ let children = &self.0.borrow().children;
+ match index {
+ 0 => if let Some(ref child) = children.0 {
+ child.descend_into_self(context);
+ },
+ 1 => if let Some(ref child) = children.1 {
+ child.descend_into_self(context);
+ },
+ _ => panic!("bad children!"),
+ }
+ }
+}
+
+#[derive(Clone)]
+struct ARCMData<'a> {
+ mark: Cell<u32>,
+ children: (Option<&'a ARCM<'a>>, Option<&'a ARCM<'a>>),
+}
+
+#[derive(Clone)]
+struct ARCM<'a>(&'static str, Arc<Mutex<ARCMData<'a>>>);
+
+impl<'a> Named for ARCM<'a> {
+ fn new(name: &'static str) -> Self {
+ ARCM(name, Arc::new(Mutex::new(ARCMData {
+ mark: Cell::new(0), children: (None, None), })))
+ }
+ fn name(&self) -> &str { self.0 }
+}
+
+impl<'a> Marked<u32> for ARCM<'a> {
+ fn mark(&self) -> u32 { self.1.lock().unwrap().mark.get() }
+ fn set_mark(&self, mark: u32) { self.1.lock().unwrap().mark.set(mark); }
+}
+
+impl<'a> Children<'a> for ARCM<'a> {
+ fn count_children(&self) -> usize { 2 }
+ fn descend_one_child<C>(&self, context: &mut C, index: usize)
+ where C: Context + PrePost<Self>, Self: Sized
+ {
+ let ref children = if let Ok(data) = self.1.try_lock() {
+ data.children
+ } else { return; };
+ match index {
+ 0 => if let Some(ref child) = children.0 {
+ child.descend_into_self(context);
+ },
+ 1 => if let Some(ref child) = children.1 {
+ child.descend_into_self(context);
+ },
+ _ => panic!("bad children!"),
+ }
+ }
+}
+
+#[derive(Clone)]
+struct ARCRWData<'a> {
+ name: &'static str,
+ mark: Cell<u32>,
+ children: (Option<&'a ARCRW<'a>>, Option<&'a ARCRW<'a>>),
+}
+
+#[derive(Clone)]
+struct ARCRW<'a>(Arc<RwLock<ARCRWData<'a>>>);
+
+impl<'a> Named for ARCRW<'a> {
+ fn new(name: &'static str) -> Self {
+ ARCRW(Arc::new(RwLock::new(ARCRWData {
+ name: name, mark: Cell::new(0), children: (None, None), })))
+ }
+ fn name(&self) -> &str { self.0.read().unwrap().name }
+}
+
+impl<'a> Marked<u32> for ARCRW<'a> {
+ fn mark(&self) -> u32 { self.0.read().unwrap().mark.get() }
+ fn set_mark(&self, mark: u32) { self.0.read().unwrap().mark.set(mark); }
+}
+
+impl<'a> Children<'a> for ARCRW<'a> {
+ fn count_children(&self) -> usize { 2 }
+ fn descend_one_child<C>(&self, context: &mut C, index: usize)
+ where C: Context + PrePost<Self>, Self: Sized
+ {
+ let children = &self.0.read().unwrap().children;
+ match index {
+ 0 => if let Some(ref child) = children.0 {
+ child.descend_into_self(context);
+ },
+ 1 => if let Some(ref child) = children.1 {
+ child.descend_into_self(context);
+ },
+ _ => panic!("bad children!"),
+ }
+ }
+}
+
+trait Context {
+ fn next_index(&mut self, len: usize) -> usize;
+ fn should_act(&self) -> bool;
+ fn increase_visited(&mut self);
+ fn increase_skipped(&mut self);
+ fn increase_depth(&mut self);
+ fn decrease_depth(&mut self);
+}
+
+trait PrePost<T> {
+ fn pre(&mut self, _: &T);
+ fn post(&mut self, _: &T);
+ fn hit_limit(&mut self, _: &T);
+}
+
+trait Children<'a> {
+ fn count_children(&self) -> usize;
+ fn descend_one_child<C>(&self, context: &mut C, index: usize)
+ where C: Context + PrePost<Self>, Self: Sized;
+
+ fn next_child<C>(&self, context: &mut C)
+ where C: Context + PrePost<Self>, Self: Sized
+ {
+ let index = context.next_index(self.count_children());
+ self.descend_one_child(context, index);
+ }
+
+ fn descend_into_self<C>(&self, context: &mut C)
+ where C: Context + PrePost<Self>, Self: Sized
+ {
+ context.pre(self);
+ if context.should_act() {
+ context.increase_visited();
+ context.increase_depth();
+ self.next_child(context);
+ context.decrease_depth();
+ } else {
+ context.hit_limit(self);
+ context.increase_skipped();
+ }
+ context.post(self);
+ }
+
+ fn descend<'b, C>(&self, c: &Cell<Option<&'b Self>>, context: &mut C)
+ where C: Context + PrePost<Self>, Self: Sized
+ {
+ if let Some(r) = c.get() {
+ r.descend_into_self(context);
+ }
+ }
+}
+
+impl<'a> Children<'a> for S<'a> {
+ fn count_children(&self) -> usize { 1 }
+ fn descend_one_child<C>(&self, context: &mut C, _: usize)
+ where C: Context + PrePost<Self>, Self: Sized {
+ self.descend(&self.next, context);
+ }
+}
+
+impl<'a> Children<'a> for S2<'a> {
+ fn count_children(&self) -> usize { 2 }
+ fn descend_one_child<C>(&self, context: &mut C, index: usize)
+ where C: Context + PrePost<Self>, Self: Sized
+ {
+ let children = self.next.get();
+ let child = match index {
+ 0 => if let Some(child) = children.0 { child } else { return; },
+ 1 => if let Some(child) = children.1 { child } else { return; },
+ _ => panic!("bad children"),
+ };
+ // println!("S2 {} descending into child {} at index {}", self.name, child.name, index);
+ child.descend_into_self(context);
+ }
+}
+
+impl<'a> Children<'a> for V<'a> {
+ fn count_children(&self) -> usize { self.contents.len() }
+ fn descend_one_child<C>(&self, context: &mut C, index: usize)
+ where C: Context + PrePost<Self>, Self: Sized
+ {
+ if let Some(child) = self.contents[index].get() {
+ child.descend_into_self(context);
+ }
+ }
+}
+
+impl<'a> Children<'a> for H<'a> {
+ fn count_children(&self) -> usize { 1 }
+ fn descend_one_child<C>(&self, context: &mut C, _: usize)
+ where C: Context + PrePost<Self>, Self: Sized
+ {
+ self.descend(&self.next, context);
+ }
+}
+
+impl<'a> Children<'a> for HM<'a> {
+ fn count_children(&self) -> usize {
+ if let Some(m) = self.contents.get() { 2 * m.iter().count() } else { 0 }
+ }
+ fn descend_one_child<C>(&self, context: &mut C, index: usize)
+ where C: Context + PrePost<Self>, Self: Sized
+ {
+ if let Some(ref hm) = self.contents.get() {
+ if let Some((k, v)) = hm.iter().nth(index / 2) {
+ [k, v][index % 2].descend_into_self(context);
+ }
+ }
+ }
+}
+
+impl<'a> Children<'a> for VD<'a> {
+ fn count_children(&self) -> usize {
+ if let Some(d) = self.contents.get() { d.iter().count() } else { 0 }
+ }
+ fn descend_one_child<C>(&self, context: &mut C, index: usize)
+ where C: Context + PrePost<Self>, Self: Sized
+ {
+ if let Some(ref vd) = self.contents.get() {
+ if let Some(r) = vd.iter().nth(index) {
+ r.descend_into_self(context);
+ }
+ }
+ }
+}
+
+impl<'a> Children<'a> for VM<'a> {
+ fn count_children(&self) -> usize {
+ if let Some(m) = self.contents.get() { m.iter().count() } else { 0 }
+ }
+ fn descend_one_child<C>(&self, context: &mut C, index: usize)
+ where C: Context + PrePost<VM<'a>>
+ {
+ if let Some(ref vd) = self.contents.get() {
+ if let Some((_idx, r)) = vd.iter().nth(index) {
+ r.descend_into_self(context);
+ }
+ }
+ }
+}
+
+impl<'a> Children<'a> for LL<'a> {
+ fn count_children(&self) -> usize {
+ if let Some(l) = self.contents.get() { l.iter().count() } else { 0 }
+ }
+ fn descend_one_child<C>(&self, context: &mut C, index: usize)
+ where C: Context + PrePost<LL<'a>>
+ {
+ if let Some(ref ll) = self.contents.get() {
+ if let Some(r) = ll.iter().nth(index) {
+ r.descend_into_self(context);
+ }
+ }
+ }
+}
+
+impl<'a> Children<'a> for BH<'a> {
+ fn count_children(&self) -> usize {
+ if let Some(h) = self.contents.get() { h.iter().count() } else { 0 }
+ }
+ fn descend_one_child<C>(&self, context: &mut C, index: usize)
+ where C: Context + PrePost<BH<'a>>
+ {
+ if let Some(ref bh) = self.contents.get() {
+ if let Some(r) = bh.iter().nth(index) {
+ r.descend_into_self(context);
+ }
+ }
+ }
+}
+
+impl<'a> Children<'a> for BTM<'a> {
+ fn count_children(&self) -> usize {
+ if let Some(m) = self.contents.get() { 2 * m.iter().count() } else { 0 }
+ }
+ fn descend_one_child<C>(&self, context: &mut C, index: usize)
+ where C: Context + PrePost<BTM<'a>>
+ {
+ if let Some(ref bh) = self.contents.get() {
+ if let Some((k, v)) = bh.iter().nth(index / 2) {
+ [k, v][index % 2].descend_into_self(context);
+ }
+ }
+ }
+}
+
+impl<'a> Children<'a> for BTS<'a> {
+ fn count_children(&self) -> usize {
+ if let Some(s) = self.contents.get() { s.iter().count() } else { 0 }
+ }
+ fn descend_one_child<C>(&self, context: &mut C, index: usize)
+ where C: Context + PrePost<BTS<'a>>
+ {
+ if let Some(ref bh) = self.contents.get() {
+ if let Some(r) = bh.iter().nth(index) {
+ r.descend_into_self(context);
+ }
+ }
+ }
+}
+
+#[derive(Copy, Clone)]
+struct ContextData {
+ curr_depth: usize,
+ max_depth: usize,
+ visited: usize,
+ max_visits: usize,
+ skipped: usize,
+ curr_mark: u32,
+ saw_prev_marked: bool,
+ control_bits: u64,
+}
+
+impl Context for ContextData {
+ fn next_index(&mut self, len: usize) -> usize {
+ if len < 2 { return 0; }
+ let mut pow2 = len.next_power_of_two();
+ let _pow2_orig = pow2;
+ let mut idx = 0;
+ let mut bits = self.control_bits;
+ while pow2 > 1 {
+ idx = (idx << 1) | (bits & 1) as usize;
+ bits = bits >> 1;
+ pow2 = pow2 >> 1;
+ }
+ idx = idx % len;
+ // println!("next_index({} [{:b}]) says {}, pre(bits): {:b} post(bits): {:b}",
+ // len, _pow2_orig, idx, self.control_bits, bits);
+ self.control_bits = bits;
+ return idx;
+ }
+ fn should_act(&self) -> bool {
+ self.curr_depth < self.max_depth && self.visited < self.max_visits
+ }
+ fn increase_visited(&mut self) { self.visited += 1; }
+ fn increase_skipped(&mut self) { self.skipped += 1; }
+ fn increase_depth(&mut self) { self.curr_depth += 1; }
+ fn decrease_depth(&mut self) { self.curr_depth -= 1; }
+}
+
+impl<T:Named+Marked<u32>> PrePost<T> for ContextData {
+ fn pre(&mut self, t: &T) {
+ for _ in 0..self.curr_depth {
+ if PRINT { print!(" "); }
+ }
+ if PRINT { println!("prev {}", t.name()); }
+ if t.mark() == self.curr_mark {
+ for _ in 0..self.curr_depth {
+ if PRINT { print!(" "); }
+ }
+ if PRINT { println!("(probably previously marked)"); }
+ self.saw_prev_marked = true;
+ }
+ t.set_mark(self.curr_mark);
+ }
+ fn post(&mut self, t: &T) {
+ for _ in 0..self.curr_depth {
+ if PRINT { print!(" "); }
+ }
+ if PRINT { println!("post {}", t.name()); }
+ }
+ fn hit_limit(&mut self, t: &T) {
+ for _ in 0..self.curr_depth {
+ if PRINT { print!(" "); }
+ }
+ if PRINT { println!("LIMIT {}", t.name()); }
+ }
+}
diff --git a/tests/ui/drop/dynamic-drop-async.rs b/tests/ui/drop/dynamic-drop-async.rs
new file mode 100644
index 000000000..8f1cc6691
--- /dev/null
+++ b/tests/ui/drop/dynamic-drop-async.rs
@@ -0,0 +1,332 @@
+// Test that values are not leaked in async functions, even in the cases where:
+// * Dropping one of the values panics while running the future.
+// * The future is dropped at one of its suspend points.
+// * Dropping one of the values panics while dropping the future.
+
+// run-pass
+// needs-unwind
+// edition:2018
+
+#![allow(unused)]
+
+use std::{
+ cell::{Cell, RefCell},
+ future::Future,
+ marker::Unpin,
+ panic,
+ pin::Pin,
+ ptr,
+ rc::Rc,
+ task::{Context, Poll, RawWaker, RawWakerVTable, Waker},
+};
+
+struct InjectedFailure;
+
+struct Defer<T> {
+ ready: bool,
+ value: Option<T>,
+}
+
+impl<T: Unpin> Future for Defer<T> {
+ type Output = T;
+ fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
+ if self.ready {
+ Poll::Ready(self.value.take().unwrap())
+ } else {
+ self.ready = true;
+ Poll::Pending
+ }
+ }
+}
+
+/// Allocator tracks the creation and destruction of `Ptr`s.
+/// The `failing_op`-th operation will panic.
+struct Allocator {
+ data: RefCell<Vec<bool>>,
+ failing_op: usize,
+ cur_ops: Cell<usize>,
+}
+
+impl panic::UnwindSafe for Allocator {}
+impl panic::RefUnwindSafe for Allocator {}
+
+impl Drop for Allocator {
+ fn drop(&mut self) {
+ let data = self.data.borrow();
+ if data.iter().any(|d| *d) {
+ panic!("missing free: {:?}", data);
+ }
+ }
+}
+
+impl Allocator {
+ fn new(failing_op: usize) -> Self {
+ Allocator { failing_op, cur_ops: Cell::new(0), data: RefCell::new(vec![]) }
+ }
+ fn alloc(&self) -> impl Future<Output = Ptr<'_>> + '_ {
+ self.fallible_operation();
+
+ let mut data = self.data.borrow_mut();
+
+ let addr = data.len();
+ data.push(true);
+ Defer { ready: false, value: Some(Ptr(addr, self)) }
+ }
+ fn fallible_operation(&self) {
+ self.cur_ops.set(self.cur_ops.get() + 1);
+
+ if self.cur_ops.get() == self.failing_op {
+ panic::panic_any(InjectedFailure);
+ }
+ }
+}
+
+// Type that tracks whether it was dropped and can panic when it's created or
+// destroyed.
+struct Ptr<'a>(usize, &'a Allocator);
+impl<'a> Drop for Ptr<'a> {
+ fn drop(&mut self) {
+ match self.1.data.borrow_mut()[self.0] {
+ false => panic!("double free at index {:?}", self.0),
+ ref mut d => *d = false,
+ }
+
+ self.1.fallible_operation();
+ }
+}
+
+async fn dynamic_init(a: Rc<Allocator>, c: bool) {
+ let _x;
+ if c {
+ _x = Some(a.alloc().await);
+ }
+}
+
+async fn dynamic_drop(a: Rc<Allocator>, c: bool) {
+ let x = a.alloc().await;
+ if c {
+ Some(x)
+ } else {
+ None
+ };
+}
+
+struct TwoPtrs<'a>(Ptr<'a>, Ptr<'a>);
+async fn struct_dynamic_drop(a: Rc<Allocator>, c0: bool, c1: bool, c: bool) {
+ for i in 0..2 {
+ let x;
+ let y;
+ if (c0 && i == 0) || (c1 && i == 1) {
+ x = (a.alloc().await, a.alloc().await, a.alloc().await);
+ y = TwoPtrs(a.alloc().await, a.alloc().await);
+ if c {
+ drop(x.1);
+ a.alloc().await;
+ drop(y.0);
+ a.alloc().await;
+ }
+ }
+ }
+}
+
+async fn field_assignment(a: Rc<Allocator>, c0: bool) {
+ let mut x = (TwoPtrs(a.alloc().await, a.alloc().await), a.alloc().await);
+
+ x.1 = a.alloc().await;
+ x.1 = a.alloc().await;
+
+ let f = (x.0).0;
+ a.alloc().await;
+ if c0 {
+ (x.0).0 = f;
+ }
+ a.alloc().await;
+}
+
+async fn assignment(a: Rc<Allocator>, c0: bool, c1: bool) {
+ let mut _v = a.alloc().await;
+ let mut _w = a.alloc().await;
+ if c0 {
+ drop(_v);
+ }
+ _v = _w;
+ if c1 {
+ _w = a.alloc().await;
+ }
+}
+
+async fn array_simple(a: Rc<Allocator>) {
+ let _x = [a.alloc().await, a.alloc().await, a.alloc().await, a.alloc().await];
+}
+
+async fn vec_simple(a: Rc<Allocator>) {
+ let _x = vec![a.alloc().await, a.alloc().await, a.alloc().await, a.alloc().await];
+}
+
+async fn mixed_drop_and_nondrop(a: Rc<Allocator>) {
+ // check that destructor panics handle drop
+ // and non-drop blocks in the same scope correctly.
+ //
+ // Surprisingly enough, this used to not work.
+ let (x, y, z);
+ x = a.alloc().await;
+ y = 5;
+ z = a.alloc().await;
+}
+
+#[allow(unreachable_code)]
+async fn vec_unreachable(a: Rc<Allocator>) {
+ let _x = vec![a.alloc().await, a.alloc().await, a.alloc().await, return];
+}
+
+async fn slice_pattern_one_of(a: Rc<Allocator>, i: usize) {
+ let array = [a.alloc().await, a.alloc().await, a.alloc().await, a.alloc().await];
+ let _x = match i {
+ 0 => {
+ let [a, ..] = array;
+ a
+ }
+ 1 => {
+ let [_, a, ..] = array;
+ a
+ }
+ 2 => {
+ let [_, _, a, _] = array;
+ a
+ }
+ 3 => {
+ let [_, _, _, a] = array;
+ a
+ }
+ _ => panic!("unmatched"),
+ };
+ a.alloc().await;
+}
+
+async fn subslice_pattern_from_end_with_drop(a: Rc<Allocator>, arg: bool, arg2: bool) {
+ let arr = [a.alloc().await, a.alloc().await, a.alloc().await, a.alloc().await, a.alloc().await];
+ if arg2 {
+ drop(arr);
+ return;
+ }
+
+ if arg {
+ let [.., _x, _] = arr;
+ } else {
+ let [_, _y @ ..] = arr;
+ }
+ a.alloc().await;
+}
+
+async fn subslice_pattern_reassign(a: Rc<Allocator>) {
+ let mut ar = [a.alloc().await, a.alloc().await, a.alloc().await];
+ let [_, _, _x] = ar;
+ ar = [a.alloc().await, a.alloc().await, a.alloc().await];
+ let [_, _y @ ..] = ar;
+ a.alloc().await;
+}
+
+async fn move_ref_pattern(a: Rc<Allocator>) {
+ let mut tup = (a.alloc().await, a.alloc().await, a.alloc().await, a.alloc().await);
+ let (ref _a, ref mut _b, _c, mut _d) = tup;
+ a.alloc().await;
+}
+
+fn run_test<F, G>(cx: &mut Context<'_>, ref f: F)
+where
+ F: Fn(Rc<Allocator>) -> G,
+ G: Future<Output = ()>,
+{
+ for polls in 0.. {
+ // Run without any panics to find which operations happen after the
+ // penultimate `poll`.
+ let first_alloc = Rc::new(Allocator::new(usize::MAX));
+ let mut fut = Box::pin(f(first_alloc.clone()));
+ let mut ops_before_last_poll = 0;
+ let mut completed = false;
+ for _ in 0..polls {
+ ops_before_last_poll = first_alloc.cur_ops.get();
+ if let Poll::Ready(()) = fut.as_mut().poll(cx) {
+ completed = true;
+ }
+ }
+ drop(fut);
+
+ // Start at `ops_before_last_poll` so that we will always be able to
+ // `poll` the expected number of times.
+ for failing_op in ops_before_last_poll..first_alloc.cur_ops.get() {
+ let alloc = Rc::new(Allocator::new(failing_op + 1));
+ let f = &f;
+ let cx = &mut *cx;
+ let result = panic::catch_unwind(panic::AssertUnwindSafe(move || {
+ let mut fut = Box::pin(f(alloc));
+ for _ in 0..polls {
+ let _ = fut.as_mut().poll(cx);
+ }
+ drop(fut);
+ }));
+ match result {
+ Ok(..) => panic!("test executed more ops on first call"),
+ Err(e) => {
+ if e.downcast_ref::<InjectedFailure>().is_none() {
+ panic::resume_unwind(e);
+ }
+ }
+ }
+ }
+
+ if completed {
+ break;
+ }
+ }
+}
+
+fn clone_waker(data: *const ()) -> RawWaker {
+ RawWaker::new(data, &RawWakerVTable::new(clone_waker, drop, drop, drop))
+}
+
+fn main() {
+ let waker = unsafe { Waker::from_raw(clone_waker(ptr::null())) };
+ let context = &mut Context::from_waker(&waker);
+
+ run_test(context, |a| dynamic_init(a, false));
+ run_test(context, |a| dynamic_init(a, true));
+ run_test(context, |a| dynamic_drop(a, false));
+ run_test(context, |a| dynamic_drop(a, true));
+
+ run_test(context, |a| assignment(a, false, false));
+ run_test(context, |a| assignment(a, false, true));
+ run_test(context, |a| assignment(a, true, false));
+ run_test(context, |a| assignment(a, true, true));
+
+ run_test(context, |a| array_simple(a));
+ run_test(context, |a| vec_simple(a));
+ run_test(context, |a| vec_unreachable(a));
+
+ run_test(context, |a| struct_dynamic_drop(a, false, false, false));
+ run_test(context, |a| struct_dynamic_drop(a, false, false, true));
+ run_test(context, |a| struct_dynamic_drop(a, false, true, false));
+ run_test(context, |a| struct_dynamic_drop(a, false, true, true));
+ run_test(context, |a| struct_dynamic_drop(a, true, false, false));
+ run_test(context, |a| struct_dynamic_drop(a, true, false, true));
+ run_test(context, |a| struct_dynamic_drop(a, true, true, false));
+ run_test(context, |a| struct_dynamic_drop(a, true, true, true));
+
+ run_test(context, |a| field_assignment(a, false));
+ run_test(context, |a| field_assignment(a, true));
+
+ run_test(context, |a| mixed_drop_and_nondrop(a));
+
+ run_test(context, |a| slice_pattern_one_of(a, 0));
+ run_test(context, |a| slice_pattern_one_of(a, 1));
+ run_test(context, |a| slice_pattern_one_of(a, 2));
+ run_test(context, |a| slice_pattern_one_of(a, 3));
+
+ run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, true));
+ run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, false));
+ run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, true));
+ run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, false));
+ run_test(context, |a| subslice_pattern_reassign(a));
+
+ run_test(context, |a| move_ref_pattern(a));
+}
diff --git a/tests/ui/drop/dynamic-drop.rs b/tests/ui/drop/dynamic-drop.rs
new file mode 100644
index 000000000..9e51d3ada
--- /dev/null
+++ b/tests/ui/drop/dynamic-drop.rs
@@ -0,0 +1,520 @@
+// run-pass
+// needs-unwind
+
+#![feature(generators, generator_trait)]
+
+#![allow(unused_assignments)]
+#![allow(unused_variables)]
+
+use std::cell::{Cell, RefCell};
+use std::mem::ManuallyDrop;
+use std::ops::Generator;
+use std::panic;
+use std::pin::Pin;
+
+struct InjectedFailure;
+
+struct Allocator {
+ data: RefCell<Vec<bool>>,
+ failing_op: usize,
+ cur_ops: Cell<usize>,
+}
+
+impl panic::UnwindSafe for Allocator {}
+impl panic::RefUnwindSafe for Allocator {}
+
+impl Drop for Allocator {
+ fn drop(&mut self) {
+ let data = self.data.borrow();
+ if data.iter().any(|d| *d) {
+ panic!("missing free: {:?}", data);
+ }
+ }
+}
+
+impl Allocator {
+ fn new(failing_op: usize) -> Self {
+ Allocator {
+ failing_op: failing_op,
+ cur_ops: Cell::new(0),
+ data: RefCell::new(vec![])
+ }
+ }
+ fn alloc(&self) -> Ptr<'_> {
+ self.cur_ops.set(self.cur_ops.get() + 1);
+
+ if self.cur_ops.get() == self.failing_op {
+ panic::panic_any(InjectedFailure);
+ }
+
+ let mut data = self.data.borrow_mut();
+ let addr = data.len();
+ data.push(true);
+ Ptr(addr, self)
+ }
+ // FIXME(#47949) Any use of this indicates a bug in rustc: we should never
+ // be leaking values in the cases here.
+ //
+ // Creates a `Ptr<'_>` and checks that the allocated value is leaked if the
+ // `failing_op` is in the list of exception.
+ fn alloc_leaked(&self, exceptions: Vec<usize>) -> Ptr<'_> {
+ let ptr = self.alloc();
+
+ if exceptions.iter().any(|operation| *operation == self.failing_op) {
+ let mut data = self.data.borrow_mut();
+ data[ptr.0] = false;
+ }
+ ptr
+ }
+}
+
+struct Ptr<'a>(usize, &'a Allocator);
+impl<'a> Drop for Ptr<'a> {
+ fn drop(&mut self) {
+ match self.1.data.borrow_mut()[self.0] {
+ false => {
+ panic!("double free at index {:?}", self.0)
+ }
+ ref mut d => *d = false
+ }
+
+ self.1.cur_ops.set(self.1.cur_ops.get()+1);
+
+ if self.1.cur_ops.get() == self.1.failing_op {
+ panic::panic_any(InjectedFailure);
+ }
+ }
+}
+
+fn dynamic_init(a: &Allocator, c: bool) {
+ let _x;
+ if c {
+ _x = Some(a.alloc());
+ }
+}
+
+fn dynamic_drop(a: &Allocator, c: bool) {
+ let x = a.alloc();
+ if c {
+ Some(x)
+ } else {
+ None
+ };
+}
+
+struct TwoPtrs<'a>(Ptr<'a>, #[allow(unused_tuple_struct_fields)] Ptr<'a>);
+fn struct_dynamic_drop(a: &Allocator, c0: bool, c1: bool, c: bool) {
+ for i in 0..2 {
+ let x;
+ let y;
+ if (c0 && i == 0) || (c1 && i == 1) {
+ x = (a.alloc(), a.alloc(), a.alloc());
+ y = TwoPtrs(a.alloc(), a.alloc());
+ if c {
+ drop(x.1);
+ drop(y.0);
+ }
+ }
+ }
+}
+
+fn field_assignment(a: &Allocator, c0: bool) {
+ let mut x = (TwoPtrs(a.alloc(), a.alloc()), a.alloc());
+
+ x.1 = a.alloc();
+ x.1 = a.alloc();
+
+ let f = (x.0).0;
+ if c0 {
+ (x.0).0 = f;
+ }
+}
+
+fn assignment2(a: &Allocator, c0: bool, c1: bool) {
+ let mut _v = a.alloc();
+ let mut _w = a.alloc();
+ if c0 {
+ drop(_v);
+ }
+ _v = _w;
+ if c1 {
+ _w = a.alloc();
+ }
+}
+
+fn assignment1(a: &Allocator, c0: bool) {
+ let mut _v = a.alloc();
+ let mut _w = a.alloc();
+ if c0 {
+ drop(_v);
+ }
+ _v = _w;
+}
+
+union Boxy<T> {
+ a: ManuallyDrop<T>,
+ b: ManuallyDrop<T>,
+}
+
+fn union1(a: &Allocator) {
+ unsafe {
+ let mut u = Boxy { a: ManuallyDrop::new(a.alloc()) };
+ *u.b = a.alloc(); // drops first alloc
+ drop(ManuallyDrop::into_inner(u.a));
+ }
+}
+
+fn array_simple(a: &Allocator) {
+ let _x = [a.alloc(), a.alloc(), a.alloc(), a.alloc()];
+}
+
+fn vec_simple(a: &Allocator) {
+ let _x = vec![a.alloc(), a.alloc(), a.alloc(), a.alloc()];
+}
+
+fn generator(a: &Allocator, run_count: usize) {
+ assert!(run_count < 4);
+
+ let mut gen = || {
+ (a.alloc(),
+ yield a.alloc(),
+ a.alloc(),
+ yield a.alloc()
+ );
+ };
+ for _ in 0..run_count {
+ Pin::new(&mut gen).resume(());
+ }
+}
+
+fn mixed_drop_and_nondrop(a: &Allocator) {
+ // check that destructor panics handle drop
+ // and non-drop blocks in the same scope correctly.
+ //
+ // Surprisingly enough, this used to not work.
+ let (x, y, z);
+ x = a.alloc();
+ y = 5;
+ z = a.alloc();
+}
+
+#[allow(unreachable_code)]
+fn vec_unreachable(a: &Allocator) {
+ let _x = vec![a.alloc(), a.alloc(), a.alloc(), return];
+}
+
+fn slice_pattern_first(a: &Allocator) {
+ let[_x, ..] = [a.alloc(), a.alloc(), a.alloc()];
+}
+
+fn slice_pattern_middle(a: &Allocator) {
+ let[_, _x, _] = [a.alloc(), a.alloc(), a.alloc()];
+}
+
+fn slice_pattern_two(a: &Allocator) {
+ let[_x, _, _y, _] = [a.alloc(), a.alloc(), a.alloc(), a.alloc()];
+}
+
+fn slice_pattern_last(a: &Allocator) {
+ let[.., _y] = [a.alloc(), a.alloc(), a.alloc(), a.alloc()];
+}
+
+fn slice_pattern_one_of(a: &Allocator, i: usize) {
+ let array = [a.alloc(), a.alloc(), a.alloc(), a.alloc()];
+ let _x = match i {
+ 0 => { let [a, ..] = array; a }
+ 1 => { let [_, a, ..] = array; a }
+ 2 => { let [_, _, a, _] = array; a }
+ 3 => { let [_, _, _, a] = array; a }
+ _ => panic!("unmatched"),
+ };
+}
+
+fn subslice_pattern_from_end(a: &Allocator, arg: bool) {
+ let a = [a.alloc(), a.alloc(), a.alloc()];
+ if arg {
+ let[.., _x, _] = a;
+ } else {
+ let[_, _y @ ..] = a;
+ }
+}
+
+fn subslice_pattern_from_end_with_drop(a: &Allocator, arg: bool, arg2: bool) {
+ let a = [a.alloc(), a.alloc(), a.alloc(), a.alloc(), a.alloc()];
+ if arg2 {
+ drop(a);
+ return;
+ }
+
+ if arg {
+ let[.., _x, _] = a;
+ } else {
+ let[_, _y @ ..] = a;
+ }
+}
+
+fn slice_pattern_reassign(a: &Allocator) {
+ let mut ar = [a.alloc(), a.alloc()];
+ let[_, _x] = ar;
+ ar = [a.alloc(), a.alloc()];
+ let[.., _y] = ar;
+}
+
+fn subslice_pattern_reassign(a: &Allocator) {
+ let mut ar = [a.alloc(), a.alloc(), a.alloc()];
+ let[_, _, _x] = ar;
+ ar = [a.alloc(), a.alloc(), a.alloc()];
+ let[_, _y @ ..] = ar;
+}
+
+fn index_field_mixed_ends(a: &Allocator) {
+ let ar = [(a.alloc(), a.alloc()), (a.alloc(), a.alloc())];
+ let[(_x, _), ..] = ar;
+ let[(_, _y), _] = ar;
+ let[_, (_, _w)] = ar;
+ let[.., (_z, _)] = ar;
+}
+
+fn subslice_mixed_min_lengths(a: &Allocator, c: i32) {
+ let ar = [(a.alloc(), a.alloc()), (a.alloc(), a.alloc())];
+ match c {
+ 0 => { let[_x, ..] = ar; }
+ 1 => { let[_x, _, ..] = ar; }
+ 2 => { let[_x, _] = ar; }
+ 3 => { let[(_x, _), _, ..] = ar; }
+ 4 => { let[.., (_x, _)] = ar; }
+ 5 => { let[.., (_x, _), _] = ar; }
+ 6 => { let [_y @ ..] = ar; }
+ _ => { let [_y @ .., _] = ar; }
+ }
+}
+
+fn bindings_after_at_dynamic_init_move(a: &Allocator, c: bool) {
+ let foo = if c { Some(a.alloc()) } else { None };
+ let _x;
+
+ if let bar @ Some(_) = foo {
+ _x = bar;
+ }
+}
+
+fn bindings_after_at_dynamic_init_ref(a: &Allocator, c: bool) {
+ let foo = if c { Some(a.alloc()) } else { None };
+ let _x;
+
+ if let bar @ Some(_baz) = &foo {
+ _x = bar;
+ }
+}
+
+fn bindings_after_at_dynamic_drop_move(a: &Allocator, c: bool) {
+ let foo = if c { Some(a.alloc()) } else { None };
+
+ if let bar @ Some(_) = foo {
+ bar
+ } else {
+ None
+ };
+}
+
+fn bindings_after_at_dynamic_drop_ref(a: &Allocator, c: bool) {
+ let foo = if c { Some(a.alloc()) } else { None };
+
+ if let bar @ Some(_baz) = &foo {
+ bar
+ } else {
+ &None
+ };
+}
+
+fn move_ref_pattern(a: &Allocator) {
+ let mut tup = (a.alloc(), a.alloc(), a.alloc(), a.alloc());
+ let (ref _a, ref mut _b, _c, mut _d) = tup;
+}
+
+fn panic_after_return(a: &Allocator) -> Ptr<'_> {
+ // Panic in the drop of `p` or `q` can leak
+ let exceptions = vec![8, 9];
+ a.alloc();
+ let p = a.alloc();
+ {
+ a.alloc();
+ let p = a.alloc();
+ // FIXME (#47949) We leak values when we panic in a destructor after
+ // evaluating an expression with `rustc_mir::build::Builder::into`.
+ a.alloc_leaked(exceptions)
+ }
+}
+
+fn panic_after_return_expr(a: &Allocator) -> Ptr<'_> {
+ // Panic in the drop of `p` or `q` can leak
+ let exceptions = vec![8, 9];
+ a.alloc();
+ let p = a.alloc();
+ {
+ a.alloc();
+ let q = a.alloc();
+ // FIXME (#47949)
+ return a.alloc_leaked(exceptions);
+ }
+}
+
+fn panic_after_init(a: &Allocator) {
+ // Panic in the drop of `r` can leak
+ let exceptions = vec![8];
+ a.alloc();
+ let p = a.alloc();
+ let q = {
+ a.alloc();
+ let r = a.alloc();
+ // FIXME (#47949)
+ a.alloc_leaked(exceptions)
+ };
+}
+
+fn panic_after_init_temp(a: &Allocator) {
+ // Panic in the drop of `r` can leak
+ let exceptions = vec![8];
+ a.alloc();
+ let p = a.alloc();
+ {
+ a.alloc();
+ let r = a.alloc();
+ // FIXME (#47949)
+ a.alloc_leaked(exceptions)
+ };
+}
+
+fn panic_after_init_by_loop(a: &Allocator) {
+ // Panic in the drop of `r` can leak
+ let exceptions = vec![8];
+ a.alloc();
+ let p = a.alloc();
+ let q = loop {
+ a.alloc();
+ let r = a.alloc();
+ // FIXME (#47949)
+ break a.alloc_leaked(exceptions);
+ };
+}
+
+fn run_test<F>(mut f: F)
+ where F: FnMut(&Allocator)
+{
+ let first_alloc = Allocator::new(usize::MAX);
+ f(&first_alloc);
+
+ for failing_op in 1..first_alloc.cur_ops.get()+1 {
+ let alloc = Allocator::new(failing_op);
+ let alloc = &alloc;
+ let f = panic::AssertUnwindSafe(&mut f);
+ let result = panic::catch_unwind(move || {
+ f.0(alloc);
+ });
+ match result {
+ Ok(..) => panic!("test executed {} ops but now {}",
+ first_alloc.cur_ops.get(), alloc.cur_ops.get()),
+ Err(e) => {
+ if e.downcast_ref::<InjectedFailure>().is_none() {
+ panic::resume_unwind(e);
+ }
+ }
+ }
+ }
+}
+
+fn run_test_nopanic<F>(mut f: F)
+ where F: FnMut(&Allocator)
+{
+ let first_alloc = Allocator::new(usize::MAX);
+ f(&first_alloc);
+}
+
+fn main() {
+ run_test(|a| dynamic_init(a, false));
+ run_test(|a| dynamic_init(a, true));
+ run_test(|a| dynamic_drop(a, false));
+ run_test(|a| dynamic_drop(a, true));
+
+ run_test(|a| assignment2(a, false, false));
+ run_test(|a| assignment2(a, false, true));
+ run_test(|a| assignment2(a, true, false));
+ run_test(|a| assignment2(a, true, true));
+
+ run_test(|a| assignment1(a, false));
+ run_test(|a| assignment1(a, true));
+
+ run_test(|a| array_simple(a));
+ run_test(|a| vec_simple(a));
+ run_test(|a| vec_unreachable(a));
+
+ run_test(|a| struct_dynamic_drop(a, false, false, false));
+ run_test(|a| struct_dynamic_drop(a, false, false, true));
+ run_test(|a| struct_dynamic_drop(a, false, true, false));
+ run_test(|a| struct_dynamic_drop(a, false, true, true));
+ run_test(|a| struct_dynamic_drop(a, true, false, false));
+ run_test(|a| struct_dynamic_drop(a, true, false, true));
+ run_test(|a| struct_dynamic_drop(a, true, true, false));
+ run_test(|a| struct_dynamic_drop(a, true, true, true));
+
+ run_test(|a| field_assignment(a, false));
+ run_test(|a| field_assignment(a, true));
+
+ run_test(|a| generator(a, 0));
+ run_test(|a| generator(a, 1));
+ run_test(|a| generator(a, 2));
+ run_test(|a| generator(a, 3));
+
+ run_test(|a| mixed_drop_and_nondrop(a));
+
+ run_test(|a| slice_pattern_first(a));
+ run_test(|a| slice_pattern_middle(a));
+ run_test(|a| slice_pattern_two(a));
+ run_test(|a| slice_pattern_last(a));
+ run_test(|a| slice_pattern_one_of(a, 0));
+ run_test(|a| slice_pattern_one_of(a, 1));
+ run_test(|a| slice_pattern_one_of(a, 2));
+ run_test(|a| slice_pattern_one_of(a, 3));
+
+ run_test(|a| subslice_pattern_from_end(a, true));
+ run_test(|a| subslice_pattern_from_end(a, false));
+ run_test(|a| subslice_pattern_from_end_with_drop(a, true, true));
+ run_test(|a| subslice_pattern_from_end_with_drop(a, true, false));
+ run_test(|a| subslice_pattern_from_end_with_drop(a, false, true));
+ run_test(|a| subslice_pattern_from_end_with_drop(a, false, false));
+ run_test(|a| slice_pattern_reassign(a));
+ run_test(|a| subslice_pattern_reassign(a));
+
+ run_test(|a| index_field_mixed_ends(a));
+ run_test(|a| subslice_mixed_min_lengths(a, 0));
+ run_test(|a| subslice_mixed_min_lengths(a, 1));
+ run_test(|a| subslice_mixed_min_lengths(a, 2));
+ run_test(|a| subslice_mixed_min_lengths(a, 3));
+ run_test(|a| subslice_mixed_min_lengths(a, 4));
+ run_test(|a| subslice_mixed_min_lengths(a, 5));
+ run_test(|a| subslice_mixed_min_lengths(a, 6));
+ run_test(|a| subslice_mixed_min_lengths(a, 7));
+
+ run_test(|a| move_ref_pattern(a));
+
+ run_test(|a| {
+ panic_after_return(a);
+ });
+ run_test(|a| {
+ panic_after_return_expr(a);
+ });
+ run_test(|a| panic_after_init(a));
+ run_test(|a| panic_after_init_temp(a));
+ run_test(|a| panic_after_init_by_loop(a));
+
+ run_test(|a| bindings_after_at_dynamic_init_move(a, true));
+ run_test(|a| bindings_after_at_dynamic_init_move(a, false));
+ run_test(|a| bindings_after_at_dynamic_init_ref(a, true));
+ run_test(|a| bindings_after_at_dynamic_init_ref(a, false));
+ run_test(|a| bindings_after_at_dynamic_drop_move(a, true));
+ run_test(|a| bindings_after_at_dynamic_drop_move(a, false));
+ run_test(|a| bindings_after_at_dynamic_drop_ref(a, true));
+ run_test(|a| bindings_after_at_dynamic_drop_ref(a, false));
+
+ run_test_nopanic(|a| union1(a));
+}
diff --git a/tests/ui/drop/issue-100276.rs b/tests/ui/drop/issue-100276.rs
new file mode 100644
index 000000000..6401a8d14
--- /dev/null
+++ b/tests/ui/drop/issue-100276.rs
@@ -0,0 +1,12 @@
+// check-pass
+// compile-flags: -Z validate-mir
+#![feature(let_chains)]
+
+fn let_chains(entry: std::io::Result<std::fs::DirEntry>) {
+ if let Ok(entry) = entry
+ && let Some(s) = entry.file_name().to_str()
+ && s.contains("")
+ {}
+}
+
+fn main() {}
diff --git a/tests/ui/drop/issue-10028.rs b/tests/ui/drop/issue-10028.rs
new file mode 100644
index 000000000..1692470e8
--- /dev/null
+++ b/tests/ui/drop/issue-10028.rs
@@ -0,0 +1,21 @@
+// run-pass
+#![allow(dead_code)]
+// aux-build:issue-10028.rs
+
+// pretty-expanded FIXME #23616
+
+extern crate issue_10028 as issue10028;
+
+use issue10028::ZeroLengthThingWithDestructor;
+
+struct Foo {
+ zero_length_thing: ZeroLengthThingWithDestructor
+}
+
+fn make_foo() -> Foo {
+ Foo { zero_length_thing: ZeroLengthThingWithDestructor::new() }
+}
+
+fn main() {
+ let _f:Foo = make_foo();
+}
diff --git a/tests/ui/drop/issue-103107.rs b/tests/ui/drop/issue-103107.rs
new file mode 100644
index 000000000..5f4475956
--- /dev/null
+++ b/tests/ui/drop/issue-103107.rs
@@ -0,0 +1,37 @@
+// check-pass
+// compile-flags: -Z validate-mir
+
+struct Foo<'a>(&'a mut u32);
+
+impl<'a> Drop for Foo<'a> {
+ fn drop(&mut self) {
+ *self.0 = 0;
+ }
+}
+
+fn and() {
+ let mut foo = 0;
+ // This used to compile also before the fix
+ if true && *Foo(&mut foo).0 == 0 && ({ foo = 0; true}) {}
+
+ // This used to fail before the fix
+ if *Foo(&mut foo).0 == 0 && ({ foo = 0; true}) {}
+
+ println!("{foo}");
+}
+
+fn or() {
+ let mut foo = 0;
+ // This used to compile also before the fix
+ if false || *Foo(&mut foo).0 == 1 || ({ foo = 0; true}) {}
+
+ // This used to fail before the fix
+ if *Foo(&mut foo).0 == 1 || ({ foo = 0; true}) {}
+
+ println!("{foo}");
+}
+
+fn main() {
+ and();
+ or();
+}
diff --git a/tests/ui/drop/issue-17718-const-destructors.rs b/tests/ui/drop/issue-17718-const-destructors.rs
new file mode 100644
index 000000000..c9a729c7b
--- /dev/null
+++ b/tests/ui/drop/issue-17718-const-destructors.rs
@@ -0,0 +1,10 @@
+// check-pass
+#![allow(dead_code)]
+struct A;
+impl Drop for A {
+ fn drop(&mut self) {}
+}
+
+const FOO: A = A;
+
+fn main() {}
diff --git a/tests/ui/drop/issue-21486.rs b/tests/ui/drop/issue-21486.rs
new file mode 100644
index 000000000..46d6ccd56
--- /dev/null
+++ b/tests/ui/drop/issue-21486.rs
@@ -0,0 +1,77 @@
+// run-pass
+#![allow(unreachable_code)]
+// Issue #21486: Make sure that all structures are dropped, even when
+// created via FRU and control-flow breaks in the middle of
+// construction.
+
+use std::sync::atomic::{Ordering, AtomicUsize};
+
+#[derive(Debug)]
+struct Noisy(u8);
+impl Drop for Noisy {
+ fn drop(&mut self) {
+ // println!("splat #{}", self.0);
+ event(self.0);
+ }
+}
+
+#[allow(dead_code)]
+#[derive(Debug)]
+struct Foo { n0: Noisy, n1: Noisy }
+impl Foo {
+ fn vals(&self) -> (u8, u8) { (self.n0.0, self.n1.0) }
+}
+
+fn leak_1_ret() -> Foo {
+ let _old_foo = Foo { n0: Noisy(1), n1: Noisy(2) };
+ Foo { n0: { return Foo { n0: Noisy(3), n1: Noisy(4) } },
+ .._old_foo
+ };
+}
+
+fn leak_2_ret() -> Foo {
+ let _old_foo = Foo { n0: Noisy(1), n1: Noisy(2) };
+ Foo { n1: { return Foo { n0: Noisy(3), n1: Noisy(4) } },
+ .._old_foo
+ };
+}
+
+// In this case, the control flow break happens *before* we construct
+// `Foo(Noisy(1),Noisy(2))`, so there should be no record of it in the
+// event log.
+fn leak_3_ret() -> Foo {
+ let _old_foo = || Foo { n0: Noisy(1), n1: Noisy(2) };
+ Foo { n1: { return Foo { n0: Noisy(3), n1: Noisy(4) } },
+ .._old_foo()
+ };
+}
+
+pub fn main() {
+ reset_log();
+ assert_eq!(leak_1_ret().vals(), (3,4));
+ assert_eq!(0x01_02_03_04, event_log());
+
+ reset_log();
+ assert_eq!(leak_2_ret().vals(), (3,4));
+ assert_eq!(0x01_02_03_04, event_log());
+
+ reset_log();
+ assert_eq!(leak_3_ret().vals(), (3,4));
+ assert_eq!(0x03_04, event_log());
+}
+
+static LOG: AtomicUsize = AtomicUsize::new(0);
+
+fn reset_log() {
+ LOG.store(0, Ordering::SeqCst);
+}
+
+fn event_log() -> usize {
+ LOG.load(Ordering::SeqCst)
+}
+
+fn event(tag: u8) {
+ let old_log = LOG.load(Ordering::SeqCst);
+ let new_log = (old_log << 8) + tag as usize;
+ LOG.store(new_log, Ordering::SeqCst);
+}
diff --git a/tests/ui/drop/issue-23338-ensure-param-drop-order.rs b/tests/ui/drop/issue-23338-ensure-param-drop-order.rs
new file mode 100644
index 000000000..a99f260dd
--- /dev/null
+++ b/tests/ui/drop/issue-23338-ensure-param-drop-order.rs
@@ -0,0 +1,162 @@
+// run-pass
+#![allow(non_upper_case_globals)]
+
+// This test is ensuring that parameters are indeed dropped after
+// temporaries in a fn body.
+
+use std::cell::RefCell;
+
+use self::d::D;
+
+pub fn main() {
+ let log = RefCell::new(vec![]);
+ d::println("created empty log");
+ test(&log);
+
+ assert_eq!(&log.borrow()[..],
+ [
+ // created empty log
+ // +-- Make D(da_0, 0)
+ // | +-- Make D(de_1, 1)
+ // | | calling foo
+ // | | entered foo
+ // | | +-- Make D(de_2, 2)
+ // | | | +-- Make D(da_1, 3)
+ // | | | | +-- Make D(de_3, 4)
+ // | | | | | +-- Make D(de_4, 5)
+ 3, // | | | +-- Drop D(da_1, 3)
+ // | | | | |
+ 4, // | | | +-- Drop D(de_3, 4)
+ // | | | |
+ // | | | | eval tail of foo
+ // | | | +-- Make D(de_5, 6)
+ // | | | | +-- Make D(de_6, 7)
+ 5, // | | | | | +-- Drop D(de_4, 5)
+ // | | | | |
+ 2, // | | +-- Drop D(de_2, 2)
+ // | | | |
+ 6, // | | +-- Drop D(de_5, 6)
+ // | | |
+ 1, // | +-- Drop D(de_1, 1)
+ // | |
+ 0, // +-- Drop D(da_0, 0)
+ // |
+ // | result D(de_6, 7)
+ 7 // +-- Drop D(de_6, 7)
+
+ ]);
+}
+
+fn test<'a>(log: d::Log<'a>) {
+ let da = D::new("da", 0, log);
+ let de = D::new("de", 1, log);
+ d::println("calling foo");
+ let result = foo(da, de);
+ d::println(&format!("result {}", result));
+}
+
+fn foo<'a>(da0: D<'a>, de1: D<'a>) -> D<'a> {
+ d::println("entered foo");
+ let de2 = de1.incr(); // creates D(de_2, 2)
+ let de4 = {
+ let _da1 = da0.incr(); // creates D(da_1, 3)
+ de2.incr().incr() // creates D(de_3, 4) and D(de_4, 5)
+ };
+ d::println("eval tail of foo");
+ de4.incr().incr() // creates D(de_5, 6) and D(de_6, 7)
+}
+
+// This module provides simultaneous printouts of the dynamic extents
+// of all of the D values, in addition to logging the order that each
+// is dropped.
+
+const PREF_INDENT: u32 = 16;
+
+pub mod d {
+ #![allow(unused_parens)]
+ use std::fmt;
+ use std::mem;
+ use std::cell::RefCell;
+
+ static mut counter: u32 = 0;
+ static mut trails: u64 = 0;
+
+ pub type Log<'a> = &'a RefCell<Vec<u32>>;
+
+ pub fn current_width() -> u32 {
+ unsafe { max_width() - trails.leading_zeros() }
+ }
+
+ pub fn max_width() -> u32 {
+ unsafe {
+ (mem::size_of_val(&trails)*8) as u32
+ }
+ }
+
+ pub fn indent_println(my_trails: u32, s: &str) {
+ let mut indent: String = String::new();
+ for i in 0..my_trails {
+ unsafe {
+ if trails & (1 << i) != 0 {
+ indent = indent + "| ";
+ } else {
+ indent = indent + " ";
+ }
+ }
+ }
+ println!("{}{}", indent, s);
+ }
+
+ pub fn println(s: &str) {
+ indent_println(super::PREF_INDENT, s);
+ }
+
+ fn first_avail() -> u32 {
+ unsafe {
+ for i in 0..64 {
+ if trails & (1 << i) == 0 {
+ return i;
+ }
+ }
+ }
+ panic!("exhausted trails");
+ }
+
+ pub struct D<'a> {
+ name: &'static str, i: u32, uid: u32, trail: u32, log: Log<'a>
+ }
+
+ impl<'a> fmt::Display for D<'a> {
+ fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
+ write!(w, "D({}_{}, {})", self.name, self.i, self.uid)
+ }
+ }
+
+ impl<'a> D<'a> {
+ pub fn new(name: &'static str, i: u32, log: Log<'a>) -> D<'a> {
+ unsafe {
+ let trail = first_avail();
+ let ctr = counter;
+ counter += 1;
+ trails |= (1 << trail);
+ let ret = D {
+ name: name, i: i, log: log, uid: ctr, trail: trail
+ };
+ indent_println(trail, &format!("+-- Make {}", ret));
+ ret
+ }
+ }
+ pub fn incr(&self) -> D<'a> {
+ D::new(self.name, self.i + 1, self.log)
+ }
+ }
+
+ impl<'a> Drop for D<'a> {
+ fn drop(&mut self) {
+ unsafe { trails &= !(1 << self.trail); };
+ self.log.borrow_mut().push(self.uid);
+ indent_println(self.trail, &format!("+-- Drop {}", self));
+ indent_println(::PREF_INDENT, "");
+ }
+ }
+}
diff --git a/tests/ui/drop/issue-2734.rs b/tests/ui/drop/issue-2734.rs
new file mode 100644
index 000000000..df4f394dc
--- /dev/null
+++ b/tests/ui/drop/issue-2734.rs
@@ -0,0 +1,22 @@
+// run-pass
+#![allow(dead_code)]
+#![allow(non_camel_case_types)]
+
+// pretty-expanded FIXME #23616
+
+trait hax {
+ fn dummy(&self) { }
+}
+impl<A> hax for A { }
+
+fn perform_hax<T: 'static>(x: Box<T>) -> Box<dyn hax+'static> {
+ Box::new(x) as Box<dyn hax+'static>
+}
+
+fn deadcode() {
+ perform_hax(Box::new("deadcode".to_string()));
+}
+
+pub fn main() {
+ let _ = perform_hax(Box::new(42));
+}
diff --git a/tests/ui/drop/issue-30018-nopanic.rs b/tests/ui/drop/issue-30018-nopanic.rs
new file mode 100644
index 000000000..291bab273
--- /dev/null
+++ b/tests/ui/drop/issue-30018-nopanic.rs
@@ -0,0 +1,103 @@
+// run-pass
+#![allow(unreachable_code)]
+// More thorough regression test for Issues #30018 and #30822. This
+// attempts to explore different ways that array element construction
+// (for both scratch arrays and non-scratch ones) interacts with
+// breaks in the control-flow, in terms of the order of evaluation of
+// the destructors (which may change; see RFC Issue 744) and the
+// number of times that the destructor evaluates for each value (which
+// should never exceed 1; this latter case is what #30822 is about).
+
+use std::cell::RefCell;
+
+struct D<'a>(&'a RefCell<Vec<i32>>, i32);
+
+impl<'a> Drop for D<'a> {
+ fn drop(&mut self) {
+ println!("Dropping D({})", self.1);
+ (self.0).borrow_mut().push(self.1);
+ }
+}
+
+fn main() {
+ println!("Start");
+ break_during_elem();
+ break_after_whole();
+ println!("Finis");
+}
+
+fn break_during_elem() {
+ let log = &RefCell::new(Vec::new());
+
+ // CASE 1: Fixed-size array itself is stored in _r slot.
+ loop {
+ let _r = [D(log, 10),
+ D(log, 11),
+ { D(log, 12); break; },
+ D(log, 13)];
+ }
+ assert_eq!(&log.borrow()[..], &[12, 11, 10]);
+ log.borrow_mut().clear();
+
+ // CASE 2: Slice (borrow of array) is stored in _r slot.
+ // This is the case that is actually being reported in #30018.
+ loop {
+ let _r = &[D(log, 20),
+ D(log, 21),
+ { D(log, 22); break; },
+ D(log, 23)];
+ }
+ assert_eq!(&log.borrow()[..], &[22, 21, 20]);
+ log.borrow_mut().clear();
+
+ // CASE 3: (Borrow of) slice-index of array is stored in _r slot.
+ loop {
+ let _r = &[D(log, 30),
+ D(log, 31),
+ { D(log, 32); break; },
+ D(log, 33)][..];
+ }
+ assert_eq!(&log.borrow()[..], &[32, 31, 30]);
+ log.borrow_mut().clear();
+}
+
+// The purpose of these functions is to test what happens when we
+// panic after an array has been constructed in its entirety.
+//
+// It is meant to act as proof that we still need to continue
+// scheduling the destruction of an array even after we've scheduling
+// drop for its elements during construction; the latter is tested by
+// `fn break_during_elem()`.
+fn break_after_whole() {
+ let log = &RefCell::new(Vec::new());
+
+ // CASE 1: Fixed-size array itself is stored in _r slot.
+ loop {
+ let _r = [D(log, 10),
+ D(log, 11),
+ D(log, 12)];
+ break;
+ }
+ assert_eq!(&log.borrow()[..], &[10, 11, 12]);
+ log.borrow_mut().clear();
+
+ // CASE 2: Slice (borrow of array) is stored in _r slot.
+ loop {
+ let _r = &[D(log, 20),
+ D(log, 21),
+ D(log, 22)];
+ break;
+ }
+ assert_eq!(&log.borrow()[..], &[20, 21, 22]);
+ log.borrow_mut().clear();
+
+ // CASE 3: (Borrow of) slice-index of array is stored in _r slot.
+ loop {
+ let _r = &[D(log, 30),
+ D(log, 31),
+ D(log, 32)][..];
+ break;
+ }
+ assert_eq!(&log.borrow()[..], &[30, 31, 32]);
+ log.borrow_mut().clear();
+}
diff --git a/tests/ui/drop/issue-35546.rs b/tests/ui/drop/issue-35546.rs
new file mode 100644
index 000000000..004679a62
--- /dev/null
+++ b/tests/ui/drop/issue-35546.rs
@@ -0,0 +1,20 @@
+// build-pass
+#![allow(dead_code)]
+// Regression test for #35546. Check that we are able to codegen
+// this. Before we had problems because of the drop glue signature
+// around dropping a trait object (specifically, when dropping the
+// `value` field of `Node<Send>`).
+
+struct Node<T: ?Sized + Send> {
+ next: Option<Box<Node<dyn Send>>>,
+ value: T,
+}
+
+fn clear(head: &mut Option<Box<Node<dyn Send>>>) {
+ match head.take() {
+ Some(node) => *head = node.next,
+ None => (),
+ }
+}
+
+fn main() {}
diff --git a/tests/ui/drop/issue-48962.rs b/tests/ui/drop/issue-48962.rs
new file mode 100644
index 000000000..80d815379
--- /dev/null
+++ b/tests/ui/drop/issue-48962.rs
@@ -0,0 +1,34 @@
+// run-pass
+#![allow(unused_must_use)]
+// Test that we are able to reinitialize box with moved referent
+static mut ORDER: [usize; 3] = [0, 0, 0];
+static mut INDEX: usize = 0;
+
+struct Dropee (usize);
+
+impl Drop for Dropee {
+ fn drop(&mut self) {
+ unsafe {
+ ORDER[INDEX] = self.0;
+ INDEX = INDEX + 1;
+ }
+ }
+}
+
+fn add_sentintel() {
+ unsafe {
+ ORDER[INDEX] = 2;
+ INDEX = INDEX + 1;
+ }
+}
+
+fn main() {
+ let mut x = Box::new(Dropee(1));
+ *x; // move out from `*x`
+ add_sentintel();
+ *x = Dropee(3); // re-initialize `*x`
+ {x}; // drop value
+ unsafe {
+ assert_eq!(ORDER, [1, 2, 3]);
+ }
+}
diff --git a/tests/ui/drop/issue-90752-raw-ptr-shenanigans.rs b/tests/ui/drop/issue-90752-raw-ptr-shenanigans.rs
new file mode 100644
index 000000000..4e67b3594
--- /dev/null
+++ b/tests/ui/drop/issue-90752-raw-ptr-shenanigans.rs
@@ -0,0 +1,41 @@
+// run-pass
+
+use std::cell::RefCell;
+
+struct S<'a>(i32, &'a RefCell<Vec<i32>>);
+
+impl<'a> Drop for S<'a> {
+ fn drop(&mut self) {
+ self.1.borrow_mut().push(self.0);
+ }
+}
+
+fn test(drops: &RefCell<Vec<i32>>) {
+ let mut foo = None;
+ let pfoo: *mut _ = &mut foo;
+
+ match foo {
+ None => (),
+ _ => return,
+ }
+
+ // Both S(0) and S(1) should be dropped, but aren't.
+ unsafe { *pfoo = Some((S(0, drops), S(1, drops))); }
+
+ match foo {
+ Some((_x, _)) => {}
+ _ => {}
+ }
+}
+
+fn main() {
+ let drops = RefCell::new(Vec::new());
+ test(&drops);
+
+ // Ideally, we want this...
+ //assert_eq!(*drops.borrow(), &[0, 1]);
+
+ // But the delayed access through the raw pointer confuses drop elaboration,
+ // causing S(1) to be leaked.
+ assert_eq!(*drops.borrow(), &[0]);
+}
diff --git a/tests/ui/drop/issue-90752.rs b/tests/ui/drop/issue-90752.rs
new file mode 100644
index 000000000..4395e45e7
--- /dev/null
+++ b/tests/ui/drop/issue-90752.rs
@@ -0,0 +1,32 @@
+// run-pass
+
+use std::cell::RefCell;
+
+struct S<'a>(i32, &'a RefCell<Vec<i32>>);
+
+impl<'a> Drop for S<'a> {
+ fn drop(&mut self) {
+ self.1.borrow_mut().push(self.0);
+ }
+}
+
+fn test(drops: &RefCell<Vec<i32>>) {
+ let mut foo = None;
+ match foo {
+ None => (),
+ _ => return,
+ }
+
+ *(&mut foo) = Some((S(0, drops), S(1, drops))); // Both S(0) and S(1) should be dropped
+
+ match foo {
+ Some((_x, _)) => {}
+ _ => {}
+ }
+}
+
+fn main() {
+ let drops = RefCell::new(Vec::new());
+ test(&drops);
+ assert_eq!(*drops.borrow(), &[0, 1]);
+}
diff --git a/tests/ui/drop/no-drop-flag-size.rs b/tests/ui/drop/no-drop-flag-size.rs
new file mode 100644
index 000000000..103e70ef6
--- /dev/null
+++ b/tests/ui/drop/no-drop-flag-size.rs
@@ -0,0 +1,15 @@
+// run-pass
+#![allow(dead_code)]
+use std::mem::size_of;
+
+struct Test<T> {
+ a: T
+}
+
+impl<T> Drop for Test<T> {
+ fn drop(&mut self) { }
+}
+
+pub fn main() {
+ assert_eq!(size_of::<isize>(), size_of::<Test<isize>>());
+}
diff --git a/tests/ui/drop/nondrop-cycle.rs b/tests/ui/drop/nondrop-cycle.rs
new file mode 100644
index 000000000..29070f917
--- /dev/null
+++ b/tests/ui/drop/nondrop-cycle.rs
@@ -0,0 +1,31 @@
+// run-pass
+// pretty-expanded FIXME #23616
+
+use std::cell::Cell;
+
+struct C<'a> {
+ p: Cell<Option<&'a C<'a>>>,
+}
+
+impl<'a> C<'a> {
+ fn new() -> C<'a> { C { p: Cell::new(None) } }
+}
+
+fn f1() {
+ let (c1, c2) = (C::new(), C::new());
+ c1.p.set(Some(&c2));
+ c2.p.set(Some(&c1));
+}
+
+fn f2() {
+ let (c1, c2);
+ c1 = C::new();
+ c2 = C::new();
+ c1.p.set(Some(&c2));
+ c2.p.set(Some(&c1));
+}
+
+fn main() {
+ f1();
+ f2();
+}
diff --git a/tests/ui/drop/repeat-drop-2.rs b/tests/ui/drop/repeat-drop-2.rs
new file mode 100644
index 000000000..3cfacea5e
--- /dev/null
+++ b/tests/ui/drop/repeat-drop-2.rs
@@ -0,0 +1,15 @@
+fn borrowck_catch() {
+ let foo = String::new();
+ let _bar = foo;
+ let _baz = [foo; 0]; //~ ERROR use of moved value: `foo` [E0382]
+}
+
+const _: [String; 0] = [String::new(); 0];
+//~^ ERROR destructor of `String` cannot be evaluated at compile-time [E0493]
+
+fn must_be_init() {
+ let x: u8;
+ let _ = [x; 0]; //~ ERROR E0381
+}
+
+fn main() {}
diff --git a/tests/ui/drop/repeat-drop-2.stderr b/tests/ui/drop/repeat-drop-2.stderr
new file mode 100644
index 000000000..f030228f7
--- /dev/null
+++ b/tests/ui/drop/repeat-drop-2.stderr
@@ -0,0 +1,41 @@
+error[E0382]: use of moved value: `foo`
+ --> $DIR/repeat-drop-2.rs:4:17
+ |
+LL | let foo = String::new();
+ | --- move occurs because `foo` has type `String`, which does not implement the `Copy` trait
+LL | let _bar = foo;
+ | --- value moved here
+LL | let _baz = [foo; 0];
+ | ^^^ value used here after move
+ |
+help: consider cloning the value if the performance cost is acceptable
+ |
+LL | let _bar = foo.clone();
+ | ++++++++
+
+error[E0493]: destructor of `String` cannot be evaluated at compile-time
+ --> $DIR/repeat-drop-2.rs:7:25
+ |
+LL | const _: [String; 0] = [String::new(); 0];
+ | -^^^^^^^^^^^^^----
+ | ||
+ | |the destructor for this type cannot be evaluated in constants
+ | value is dropped here
+
+error[E0381]: used binding `x` isn't initialized
+ --> $DIR/repeat-drop-2.rs:12:14
+ |
+LL | let x: u8;
+ | - binding declared here but left uninitialized
+LL | let _ = [x; 0];
+ | ^ `x` used here but it isn't initialized
+ |
+help: consider assigning a value
+ |
+LL | let x: u8 = 0;
+ | +++
+
+error: aborting due to 3 previous errors
+
+Some errors have detailed explanations: E0381, E0382, E0493.
+For more information about an error, try `rustc --explain E0381`.
diff --git a/tests/ui/drop/repeat-drop.rs b/tests/ui/drop/repeat-drop.rs
new file mode 100644
index 000000000..8fd46ecaf
--- /dev/null
+++ b/tests/ui/drop/repeat-drop.rs
@@ -0,0 +1,118 @@
+// run-pass
+// needs-unwind
+
+static mut CHECK: usize = 0;
+
+struct DropChecker(usize);
+
+impl Drop for DropChecker {
+ fn drop(&mut self) {
+ unsafe {
+ if CHECK != self.0 - 1 {
+ panic!("Found {}, should have found {}", CHECK, self.0 - 1);
+ }
+ CHECK = self.0;
+ }
+ }
+}
+
+macro_rules! check_drops {
+ ($l:literal) => {
+ unsafe { assert_eq!(CHECK, $l) }
+ };
+}
+
+struct DropPanic;
+
+impl Drop for DropPanic {
+ fn drop(&mut self) {
+ panic!()
+ }
+}
+
+fn value_zero() {
+ unsafe { CHECK = 0 };
+ let foo = DropChecker(1);
+ let v: [DropChecker; 0] = [foo; 0];
+ check_drops!(1);
+ std::mem::drop(v);
+ check_drops!(1);
+}
+
+fn value_one() {
+ unsafe { CHECK = 0 };
+ let foo = DropChecker(1);
+ let v: [DropChecker; 1] = [foo; 1];
+ check_drops!(0);
+ std::mem::drop(v);
+ check_drops!(1);
+}
+
+const DROP_CHECKER: DropChecker = DropChecker(1);
+
+fn const_zero() {
+ unsafe { CHECK = 0 };
+ let v: [DropChecker; 0] = [DROP_CHECKER; 0];
+ check_drops!(0);
+ std::mem::drop(v);
+ check_drops!(0);
+}
+
+fn const_one() {
+ unsafe { CHECK = 0 };
+ let v: [DropChecker; 1] = [DROP_CHECKER; 1];
+ check_drops!(0);
+ std::mem::drop(v);
+ check_drops!(1);
+}
+
+fn const_generic_zero<const N: usize>() {
+ unsafe { CHECK = 0 };
+ let v: [DropChecker; N] = [DROP_CHECKER; N];
+ check_drops!(0);
+ std::mem::drop(v);
+ check_drops!(0);
+}
+
+fn const_generic_one<const N: usize>() {
+ unsafe { CHECK = 0 };
+ let v: [DropChecker; N] = [DROP_CHECKER; N];
+ check_drops!(0);
+ std::mem::drop(v);
+ check_drops!(1);
+}
+
+// Make sure that things are allowed to promote as expected
+
+fn allow_promote() {
+ unsafe { CHECK = 0 };
+ let foo = DropChecker(1);
+ let v: &'static [DropChecker; 0] = &[foo; 0];
+ check_drops!(1);
+ std::mem::drop(v);
+ check_drops!(1);
+}
+
+// Verify that unwinding in the drop causes the right things to drop in the right order
+fn on_unwind() {
+ unsafe { CHECK = 0 };
+ std::panic::catch_unwind(|| {
+ let panic = DropPanic;
+ let _local = DropChecker(2);
+ let _v = (DropChecker(1), [panic; 0]);
+ std::process::abort();
+ })
+ .unwrap_err();
+ check_drops!(2);
+}
+
+fn main() {
+ value_zero();
+ value_one();
+ const_zero();
+ const_one();
+ const_generic_zero::<0>();
+ const_generic_one::<1>();
+ allow_promote();
+ on_unwind();
+}
diff --git a/tests/ui/drop/terminate-in-initializer.rs b/tests/ui/drop/terminate-in-initializer.rs
new file mode 100644
index 000000000..66f267aa7
--- /dev/null
+++ b/tests/ui/drop/terminate-in-initializer.rs
@@ -0,0 +1,34 @@
+// run-pass
+// needs-unwind
+// ignore-emscripten no threads support
+
+// Issue #787
+// Don't try to clean up uninitialized locals
+
+
+use std::thread;
+
+fn test_break() { loop { let _x: Box<isize> = break; } }
+
+fn test_cont() { let mut i = 0; while i < 1 { i += 1; let _x: Box<isize> = continue; } }
+
+fn test_ret() { let _x: Box<isize> = return; }
+
+fn test_panic() {
+ fn f() { let _x: Box<isize> = panic!(); }
+ thread::spawn(move|| f() ).join().unwrap_err();
+}
+
+fn test_panic_indirect() {
+ fn f() -> ! { panic!(); }
+ fn g() { let _x: Box<isize> = f(); }
+ thread::spawn(move|| g() ).join().unwrap_err();
+}
+
+pub fn main() {
+ test_break();
+ test_cont();
+ test_ret();
+ test_panic();
+ test_panic_indirect();
+}
diff --git a/tests/ui/drop/use_inline_dtor.rs b/tests/ui/drop/use_inline_dtor.rs
new file mode 100644
index 000000000..ac916de46
--- /dev/null
+++ b/tests/ui/drop/use_inline_dtor.rs
@@ -0,0 +1,10 @@
+// run-pass
+// aux-build:inline_dtor.rs
+
+// pretty-expanded FIXME #23616
+
+extern crate inline_dtor;
+
+pub fn main() {
+ let _x = inline_dtor::Foo;
+}