summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_data_structures/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:19:13 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:19:13 +0000
commit218caa410aa38c29984be31a5229b9fa717560ee (patch)
treec54bd55eeb6e4c508940a30e94c0032fbd45d677 /compiler/rustc_data_structures/src
parentReleasing progress-linux version 1.67.1+dfsg1-1~progress7.99u1. (diff)
downloadrustc-218caa410aa38c29984be31a5229b9fa717560ee.tar.xz
rustc-218caa410aa38c29984be31a5229b9fa717560ee.zip
Merging upstream version 1.68.2+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'compiler/rustc_data_structures/src')
-rw-r--r--compiler/rustc_data_structures/src/base_n.rs2
-rw-r--r--compiler/rustc_data_structures/src/fingerprint.rs1
-rw-r--r--compiler/rustc_data_structures/src/frozen.rs2
-rw-r--r--compiler/rustc_data_structures/src/fx.rs4
-rw-r--r--compiler/rustc_data_structures/src/graph/dominators/mod.rs17
-rw-r--r--compiler/rustc_data_structures/src/graph/implementation/tests.rs8
-rw-r--r--compiler/rustc_data_structures/src/graph/iterate/mod.rs8
-rw-r--r--compiler/rustc_data_structures/src/graph/scc/mod.rs15
-rw-r--r--compiler/rustc_data_structures/src/graph/scc/tests.rs2
-rw-r--r--compiler/rustc_data_structures/src/graph/vec_graph/mod.rs2
-rw-r--r--compiler/rustc_data_structures/src/lib.rs1
-rw-r--r--compiler/rustc_data_structures/src/obligation_forest/graphviz.rs4
-rw-r--r--compiler/rustc_data_structures/src/owning_ref/mod.rs5
-rw-r--r--compiler/rustc_data_structures/src/owning_ref/tests.rs4
-rw-r--r--compiler/rustc_data_structures/src/profiling.rs14
-rw-r--r--compiler/rustc_data_structures/src/small_c_str.rs6
-rw-r--r--compiler/rustc_data_structures/src/sorted_map.rs31
-rw-r--r--compiler/rustc_data_structures/src/sorted_map/index_map.rs5
-rw-r--r--compiler/rustc_data_structures/src/sorted_map/tests.rs2
-rw-r--r--compiler/rustc_data_structures/src/sso/either_iter.rs2
-rw-r--r--compiler/rustc_data_structures/src/sso/map.rs1
-rw-r--r--compiler/rustc_data_structures/src/sso/set.rs1
-rw-r--r--compiler/rustc_data_structures/src/stable_hasher.rs2
-rw-r--r--compiler/rustc_data_structures/src/steal.rs5
-rw-r--r--compiler/rustc_data_structures/src/sync.rs2
-rw-r--r--compiler/rustc_data_structures/src/tagged_ptr/drop.rs2
-rw-r--r--compiler/rustc_data_structures/src/tiny_list.rs17
-rw-r--r--compiler/rustc_data_structures/src/tiny_list/tests.rs2
-rw-r--r--compiler/rustc_data_structures/src/transitive_relation.rs4
-rw-r--r--compiler/rustc_data_structures/src/unord.rs237
-rw-r--r--compiler/rustc_data_structures/src/vec_map.rs4
31 files changed, 313 insertions, 99 deletions
diff --git a/compiler/rustc_data_structures/src/base_n.rs b/compiler/rustc_data_structures/src/base_n.rs
index 3c7bea271..4567759c0 100644
--- a/compiler/rustc_data_structures/src/base_n.rs
+++ b/compiler/rustc_data_structures/src/base_n.rs
@@ -9,7 +9,7 @@ pub const MAX_BASE: usize = 64;
pub const ALPHANUMERIC_ONLY: usize = 62;
pub const CASE_INSENSITIVE: usize = 36;
-const BASE_64: &[u8; MAX_BASE as usize] =
+const BASE_64: &[u8; MAX_BASE] =
b"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ@$";
#[inline]
diff --git a/compiler/rustc_data_structures/src/fingerprint.rs b/compiler/rustc_data_structures/src/fingerprint.rs
index d98f4e43f..b6e866f15 100644
--- a/compiler/rustc_data_structures/src/fingerprint.rs
+++ b/compiler/rustc_data_structures/src/fingerprint.rs
@@ -1,6 +1,5 @@
use crate::stable_hasher;
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
-use std::convert::TryInto;
use std::hash::{Hash, Hasher};
#[cfg(test)]
diff --git a/compiler/rustc_data_structures/src/frozen.rs b/compiler/rustc_data_structures/src/frozen.rs
index c81e1b124..731905746 100644
--- a/compiler/rustc_data_structures/src/frozen.rs
+++ b/compiler/rustc_data_structures/src/frozen.rs
@@ -36,7 +36,7 @@
//! ```
//!
//! `Frozen` impls `Deref`, so we can ergonomically call methods on `Bar`, but it doesn't `impl
-//! DerefMut`. Now calling `foo.compute.mutate()` will result in a compile-time error stating that
+//! DerefMut`. Now calling `foo.compute.mutate()` will result in a compile-time error stating that
//! `mutate` requires a mutable reference but we don't have one.
//!
//! # Caveats
diff --git a/compiler/rustc_data_structures/src/fx.rs b/compiler/rustc_data_structures/src/fx.rs
index 0d0c51b68..9fce0e1e6 100644
--- a/compiler/rustc_data_structures/src/fx.rs
+++ b/compiler/rustc_data_structures/src/fx.rs
@@ -11,8 +11,8 @@ pub type IndexEntry<'a, K, V> = indexmap::map::Entry<'a, K, V>;
#[macro_export]
macro_rules! define_id_collections {
($map_name:ident, $set_name:ident, $entry_name:ident, $key:ty) => {
- pub type $map_name<T> = $crate::fx::FxHashMap<$key, T>;
- pub type $set_name = $crate::fx::FxHashSet<$key>;
+ pub type $map_name<T> = $crate::unord::UnordMap<$key, T>;
+ pub type $set_name = $crate::unord::UnordSet<$key>;
pub type $entry_name<'a, T> = $crate::fx::StdEntry<'a, $key, T>;
};
}
diff --git a/compiler/rustc_data_structures/src/graph/dominators/mod.rs b/compiler/rustc_data_structures/src/graph/dominators/mod.rs
index 00913a483..471457f61 100644
--- a/compiler/rustc_data_structures/src/graph/dominators/mod.rs
+++ b/compiler/rustc_data_structures/src/graph/dominators/mod.rs
@@ -22,7 +22,7 @@ struct PreOrderFrame<Iter> {
}
rustc_index::newtype_index! {
- struct PreorderIndex { .. }
+ struct PreorderIndex {}
}
pub fn dominators<G: ControlFlowGraph>(graph: G) -> Dominators<G::Node> {
@@ -135,7 +135,10 @@ pub fn dominators<G: ControlFlowGraph>(graph: G) -> Dominators<G::Node> {
// This loop computes the semi[w] for w.
semi[w] = w;
for v in graph.predecessors(pre_order_to_real[w]) {
- let v = real_to_pre_order[v].unwrap();
+ // Reachable vertices may have unreachable predecessors, so ignore any of them
+ let Some(v) = real_to_pre_order[v] else {
+ continue
+ };
// eval returns a vertex x from which semi[x] is minimum among
// vertices semi[v] +> x *> v.
@@ -268,21 +271,17 @@ pub struct Dominators<N: Idx> {
}
impl<Node: Idx> Dominators<Node> {
- pub fn dummy() -> Self {
- Self { post_order_rank: IndexVec::new(), immediate_dominators: IndexVec::new() }
- }
-
pub fn is_reachable(&self, node: Node) -> bool {
self.immediate_dominators[node].is_some()
}
pub fn immediate_dominator(&self, node: Node) -> Node {
- assert!(self.is_reachable(node), "node {:?} is not reachable", node);
+ assert!(self.is_reachable(node), "node {node:?} is not reachable");
self.immediate_dominators[node].unwrap()
}
pub fn dominators(&self, node: Node) -> Iter<'_, Node> {
- assert!(self.is_reachable(node), "node {:?} is not reachable", node);
+ assert!(self.is_reachable(node), "node {node:?} is not reachable");
Iter { dominators: self, node: Some(node) }
}
@@ -296,7 +295,7 @@ impl<Node: Idx> Dominators<Node> {
/// of two unrelated nodes will also be consistent, but otherwise the order has no
/// meaning.) This method cannot be used to determine if either Node dominates the other.
pub fn rank_partial_cmp(&self, lhs: Node, rhs: Node) -> Option<Ordering> {
- self.post_order_rank[lhs].partial_cmp(&self.post_order_rank[rhs])
+ self.post_order_rank[rhs].partial_cmp(&self.post_order_rank[lhs])
}
}
diff --git a/compiler/rustc_data_structures/src/graph/implementation/tests.rs b/compiler/rustc_data_structures/src/graph/implementation/tests.rs
index e4e4d0d44..dc1ce1747 100644
--- a/compiler/rustc_data_structures/src/graph/implementation/tests.rs
+++ b/compiler/rustc_data_structures/src/graph/implementation/tests.rs
@@ -70,8 +70,8 @@ fn test_adjacent_edges<N: PartialEq + Debug, E: PartialEq + Debug>(
"counter={:?} expected={:?} edge_index={:?} edge={:?}",
counter, expected_incoming[counter], edge_index, edge
);
- match expected_incoming[counter] {
- (ref e, ref n) => {
+ match &expected_incoming[counter] {
+ (e, n) => {
assert!(e == &edge.data);
assert!(n == graph.node_data(edge.source()));
assert!(start_index == edge.target);
@@ -88,8 +88,8 @@ fn test_adjacent_edges<N: PartialEq + Debug, E: PartialEq + Debug>(
"counter={:?} expected={:?} edge_index={:?} edge={:?}",
counter, expected_outgoing[counter], edge_index, edge
);
- match expected_outgoing[counter] {
- (ref e, ref n) => {
+ match &expected_outgoing[counter] {
+ (e, n) => {
assert!(e == &edge.data);
assert!(start_index == edge.source);
assert!(n == graph.node_data(edge.target));
diff --git a/compiler/rustc_data_structures/src/graph/iterate/mod.rs b/compiler/rustc_data_structures/src/graph/iterate/mod.rs
index 57007611a..8a9af300c 100644
--- a/compiler/rustc_data_structures/src/graph/iterate/mod.rs
+++ b/compiler/rustc_data_structures/src/graph/iterate/mod.rs
@@ -317,12 +317,12 @@ where
_node: G::Node,
_prior_status: Option<NodeStatus>,
) -> ControlFlow<Self::BreakVal> {
- ControlFlow::CONTINUE
+ ControlFlow::Continue(())
}
/// Called after all nodes reachable from this one have been examined.
fn node_settled(&mut self, _node: G::Node) -> ControlFlow<Self::BreakVal> {
- ControlFlow::CONTINUE
+ ControlFlow::Continue(())
}
/// Behave as if no edges exist from `source` to `target`.
@@ -346,8 +346,8 @@ where
prior_status: Option<NodeStatus>,
) -> ControlFlow<Self::BreakVal> {
match prior_status {
- Some(NodeStatus::Visited) => ControlFlow::BREAK,
- _ => ControlFlow::CONTINUE,
+ Some(NodeStatus::Visited) => ControlFlow::Break(()),
+ _ => ControlFlow::Continue(()),
}
}
}
diff --git a/compiler/rustc_data_structures/src/graph/scc/mod.rs b/compiler/rustc_data_structures/src/graph/scc/mod.rs
index 7099ca7eb..c8e66eb67 100644
--- a/compiler/rustc_data_structures/src/graph/scc/mod.rs
+++ b/compiler/rustc_data_structures/src/graph/scc/mod.rs
@@ -9,7 +9,6 @@ use crate::fx::FxHashSet;
use crate::graph::vec_graph::VecGraph;
use crate::graph::{DirectedGraph, GraphSuccessors, WithNumEdges, WithNumNodes, WithSuccessors};
use rustc_index::vec::{Idx, IndexVec};
-use std::cmp::Ord;
use std::ops::Range;
#[cfg(test)]
@@ -234,10 +233,9 @@ where
.map(G::Node::new)
.map(|node| match this.start_walk_from(node) {
WalkReturn::Complete { scc_index } => scc_index,
- WalkReturn::Cycle { min_depth } => panic!(
- "`start_walk_node({:?})` returned cycle with depth {:?}",
- node, min_depth
- ),
+ WalkReturn::Cycle { min_depth } => {
+ panic!("`start_walk_node({node:?})` returned cycle with depth {min_depth:?}")
+ }
})
.collect();
@@ -273,8 +271,7 @@ where
NodeState::NotVisited => return None,
NodeState::InCycleWith { parent } => panic!(
- "`find_state` returned `InCycleWith({:?})`, which ought to be impossible",
- parent
+ "`find_state` returned `InCycleWith({parent:?})`, which ought to be impossible"
),
})
}
@@ -370,7 +367,7 @@ where
previous_node = previous;
}
// Only InCycleWith nodes were added to the reverse linked list.
- other => panic!("Invalid previous link while compressing cycle: {:?}", other),
+ other => panic!("Invalid previous link while compressing cycle: {other:?}"),
}
debug!("find_state: parent_state = {:?}", node_state);
@@ -395,7 +392,7 @@ where
// NotVisited can not be part of a cycle since it should
// have instead gotten explored.
NodeState::NotVisited | NodeState::InCycleWith { .. } => {
- panic!("invalid parent state: {:?}", node_state)
+ panic!("invalid parent state: {node_state:?}")
}
}
}
diff --git a/compiler/rustc_data_structures/src/graph/scc/tests.rs b/compiler/rustc_data_structures/src/graph/scc/tests.rs
index 9940fee60..820a70fc8 100644
--- a/compiler/rustc_data_structures/src/graph/scc/tests.rs
+++ b/compiler/rustc_data_structures/src/graph/scc/tests.rs
@@ -84,7 +84,7 @@ fn test_find_state_2() {
// 0 -> 1 -> 2 -> 1
//
// and at this point detect a cycle. The state of 2 will thus be
- // `InCycleWith { 1 }`. We will then visit the 1 -> 3 edge, which
+ // `InCycleWith { 1 }`. We will then visit the 1 -> 3 edge, which
// will attempt to visit 0 as well, thus going to the state
// `InCycleWith { 0 }`. Finally, node 1 will complete; the lowest
// depth of any successor was 3 which had depth 0, and thus it
diff --git a/compiler/rustc_data_structures/src/graph/vec_graph/mod.rs b/compiler/rustc_data_structures/src/graph/vec_graph/mod.rs
index e8efbd09a..94232bb76 100644
--- a/compiler/rustc_data_structures/src/graph/vec_graph/mod.rs
+++ b/compiler/rustc_data_structures/src/graph/vec_graph/mod.rs
@@ -1,5 +1,3 @@
-use std::cmp::Ord;
-
use crate::graph::{DirectedGraph, GraphSuccessors, WithNumEdges, WithNumNodes, WithSuccessors};
use rustc_index::vec::{Idx, IndexVec};
diff --git a/compiler/rustc_data_structures/src/lib.rs b/compiler/rustc_data_structures/src/lib.rs
index 3a2000233..954e84c30 100644
--- a/compiler/rustc_data_structures/src/lib.rs
+++ b/compiler/rustc_data_structures/src/lib.rs
@@ -11,7 +11,6 @@
#![feature(associated_type_bounds)]
#![feature(auto_traits)]
#![feature(cell_leak)]
-#![feature(control_flow_enum)]
#![feature(extend_one)]
#![feature(hash_raw_entry)]
#![feature(hasher_prefixfree_extras)]
diff --git a/compiler/rustc_data_structures/src/obligation_forest/graphviz.rs b/compiler/rustc_data_structures/src/obligation_forest/graphviz.rs
index 3a268e4b4..4b6aa1165 100644
--- a/compiler/rustc_data_structures/src/obligation_forest/graphviz.rs
+++ b/compiler/rustc_data_structures/src/obligation_forest/graphviz.rs
@@ -30,7 +30,7 @@ impl<O: ForestObligation> ObligationForest<O> {
let counter = COUNTER.fetch_add(1, Ordering::AcqRel);
- let file_path = dir.as_ref().join(format!("{:010}_{}.gv", counter, description));
+ let file_path = dir.as_ref().join(format!("{counter:010}_{description}.gv"));
let mut gv_file = BufWriter::new(File::create(file_path).unwrap());
@@ -47,7 +47,7 @@ impl<'a, O: ForestObligation + 'a> dot::Labeller<'a> for &'a ObligationForest<O>
}
fn node_id(&self, index: &Self::Node) -> dot::Id<'_> {
- dot::Id::new(format!("obligation_{}", index)).unwrap()
+ dot::Id::new(format!("obligation_{index}")).unwrap()
}
fn node_label(&self, index: &Self::Node) -> dot::LabelText<'_> {
diff --git a/compiler/rustc_data_structures/src/owning_ref/mod.rs b/compiler/rustc_data_structures/src/owning_ref/mod.rs
index 980a540cc..d1d92b905 100644
--- a/compiler/rustc_data_structures/src/owning_ref/mod.rs
+++ b/compiler/rustc_data_structures/src/owning_ref/mod.rs
@@ -867,11 +867,9 @@ where
/////////////////////////////////////////////////////////////////////////////
use std::borrow::Borrow;
-use std::cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd};
-use std::convert::From;
+use std::cmp::Ordering;
use std::fmt::{self, Debug};
use std::hash::{Hash, Hasher};
-use std::marker::{Send, Sync};
impl<O, T: ?Sized> Deref for OwningRef<O, T> {
type Target = T;
@@ -1096,7 +1094,6 @@ where
// std types integration and convenience type defs
/////////////////////////////////////////////////////////////////////////////
-use std::boxed::Box;
use std::cell::{Ref, RefCell, RefMut};
use std::rc::Rc;
use std::sync::Arc;
diff --git a/compiler/rustc_data_structures/src/owning_ref/tests.rs b/compiler/rustc_data_structures/src/owning_ref/tests.rs
index 320c03d51..a9b187c4c 100644
--- a/compiler/rustc_data_structures/src/owning_ref/tests.rs
+++ b/compiler/rustc_data_structures/src/owning_ref/tests.rs
@@ -3,7 +3,7 @@
mod owning_ref {
use super::super::OwningRef;
use super::super::{BoxRef, Erased, ErasedBoxRef, RcRef};
- use std::cmp::{Ord, Ordering, PartialEq, PartialOrd};
+ use std::cmp::Ordering;
use std::collections::hash_map::DefaultHasher;
use std::collections::HashMap;
use std::hash::{Hash, Hasher};
@@ -368,7 +368,7 @@ mod owning_handle {
mod owning_ref_mut {
use super::super::BoxRef;
use super::super::{BoxRefMut, Erased, ErasedBoxRefMut, OwningRefMut};
- use std::cmp::{Ord, Ordering, PartialEq, PartialOrd};
+ use std::cmp::Ordering;
use std::collections::hash_map::DefaultHasher;
use std::collections::HashMap;
use std::hash::{Hash, Hasher};
diff --git a/compiler/rustc_data_structures/src/profiling.rs b/compiler/rustc_data_structures/src/profiling.rs
index aa7a01eed..393f17390 100644
--- a/compiler/rustc_data_structures/src/profiling.rs
+++ b/compiler/rustc_data_structures/src/profiling.rs
@@ -86,7 +86,6 @@ use crate::fx::FxHashMap;
use std::borrow::Borrow;
use std::collections::hash_map::Entry;
-use std::convert::Into;
use std::error::Error;
use std::fs;
use std::path::Path;
@@ -206,10 +205,7 @@ impl SelfProfilerRef {
/// VerboseTimingGuard returned from this call is dropped. In addition to recording
/// a measureme event, "verbose" generic activities also print a timing entry to
/// stderr if the compiler is invoked with -Ztime-passes.
- pub fn verbose_generic_activity<'a>(
- &'a self,
- event_label: &'static str,
- ) -> VerboseTimingGuard<'a> {
+ pub fn verbose_generic_activity(&self, event_label: &'static str) -> VerboseTimingGuard<'_> {
let message =
if self.print_verbose_generic_activities { Some(event_label.to_owned()) } else { None };
@@ -217,11 +213,11 @@ impl SelfProfilerRef {
}
/// Like `verbose_generic_activity`, but with an extra arg.
- pub fn verbose_generic_activity_with_arg<'a, A>(
- &'a self,
+ pub fn verbose_generic_activity_with_arg<A>(
+ &self,
event_label: &'static str,
event_arg: A,
- ) -> VerboseTimingGuard<'a>
+ ) -> VerboseTimingGuard<'_>
where
A: Borrow<str> + Into<String>,
{
@@ -549,7 +545,7 @@ impl SelfProfiler {
// length can behave as a source of entropy for heap addresses, when
// ASLR is disabled and the heap is otherwise determinic.
let pid: u32 = process::id();
- let filename = format!("{}-{:07}.rustc_profile", crate_name, pid);
+ let filename = format!("{crate_name}-{pid:07}.rustc_profile");
let path = output_directory.join(&filename);
let profiler =
Profiler::with_counter(&path, measureme::counters::Counter::by_name(counter_name)?)?;
diff --git a/compiler/rustc_data_structures/src/small_c_str.rs b/compiler/rustc_data_structures/src/small_c_str.rs
index 3a8ab8ff9..719e4e3d9 100644
--- a/compiler/rustc_data_structures/src/small_c_str.rs
+++ b/compiler/rustc_data_structures/src/small_c_str.rs
@@ -30,7 +30,7 @@ impl SmallCStr {
SmallVec::from_vec(data)
};
if let Err(e) = ffi::CStr::from_bytes_with_nul(&data) {
- panic!("The string \"{}\" cannot be converted into a CStr: {}", s, e);
+ panic!("The string \"{s}\" cannot be converted into a CStr: {e}");
}
SmallCStr { data }
}
@@ -39,7 +39,7 @@ impl SmallCStr {
pub fn new_with_nul(s: &str) -> SmallCStr {
let b = s.as_bytes();
if let Err(e) = ffi::CStr::from_bytes_with_nul(b) {
- panic!("The string \"{}\" cannot be converted into a CStr: {}", s, e);
+ panic!("The string \"{s}\" cannot be converted into a CStr: {e}");
}
SmallCStr { data: SmallVec::from_slice(s.as_bytes()) }
}
@@ -74,7 +74,7 @@ impl<'a> FromIterator<&'a str> for SmallCStr {
iter.into_iter().flat_map(|s| s.as_bytes()).copied().collect::<SmallVec<_>>();
data.push(0);
if let Err(e) = ffi::CStr::from_bytes_with_nul(&data) {
- panic!("The iterator {:?} cannot be converted into a CStr: {}", data, e);
+ panic!("The iterator {data:?} cannot be converted into a CStr: {e}");
}
Self { data }
}
diff --git a/compiler/rustc_data_structures/src/sorted_map.rs b/compiler/rustc_data_structures/src/sorted_map.rs
index d13313dfd..9409057d4 100644
--- a/compiler/rustc_data_structures/src/sorted_map.rs
+++ b/compiler/rustc_data_structures/src/sorted_map.rs
@@ -1,7 +1,6 @@
use crate::stable_hasher::{HashStable, StableHasher, StableOrd};
use std::borrow::Borrow;
-use std::cmp::Ordering;
-use std::iter::FromIterator;
+use std::fmt::Debug;
use std::mem;
use std::ops::{Bound, Index, IndexMut, RangeBounds};
@@ -17,7 +16,7 @@ pub use index_map::SortedIndexMultiMap;
/// stores data in a more compact way. It also supports accessing contiguous
/// ranges of elements as a slice, and slices of already sorted elements can be
/// inserted efficiently.
-#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Encodable, Decodable)]
+#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)]
pub struct SortedMap<K, V> {
data: Vec<(K, V)>,
}
@@ -127,13 +126,13 @@ impl<K: Ord, V> SortedMap<K, V> {
/// Iterate over the keys, sorted
#[inline]
pub fn keys(&self) -> impl Iterator<Item = &K> + ExactSizeIterator + DoubleEndedIterator {
- self.data.iter().map(|&(ref k, _)| k)
+ self.data.iter().map(|(k, _)| k)
}
/// Iterate over values, sorted by key
#[inline]
pub fn values(&self) -> impl Iterator<Item = &V> + ExactSizeIterator + DoubleEndedIterator {
- self.data.iter().map(|&(_, ref v)| v)
+ self.data.iter().map(|(_, v)| v)
}
#[inline]
@@ -171,7 +170,7 @@ impl<K: Ord, V> SortedMap<K, V> {
where
F: Fn(&mut K),
{
- self.data.iter_mut().map(|&mut (ref mut k, _)| k).for_each(f);
+ self.data.iter_mut().map(|(k, _)| k).for_each(f);
}
/// Inserts a presorted range of elements into the map. If the range can be
@@ -223,7 +222,7 @@ impl<K: Ord, V> SortedMap<K, V> {
K: Borrow<Q>,
Q: Ord + ?Sized,
{
- self.data.binary_search_by(|&(ref x, _)| x.borrow().cmp(key))
+ self.data.binary_search_by(|(x, _)| x.borrow().cmp(key))
}
#[inline]
@@ -232,10 +231,10 @@ impl<K: Ord, V> SortedMap<K, V> {
R: RangeBounds<K>,
{
let start = match range.start_bound() {
- Bound::Included(ref k) => match self.lookup_index_for(k) {
+ Bound::Included(k) => match self.lookup_index_for(k) {
Ok(index) | Err(index) => index,
},
- Bound::Excluded(ref k) => match self.lookup_index_for(k) {
+ Bound::Excluded(k) => match self.lookup_index_for(k) {
Ok(index) => index + 1,
Err(index) => index,
},
@@ -243,11 +242,11 @@ impl<K: Ord, V> SortedMap<K, V> {
};
let end = match range.end_bound() {
- Bound::Included(ref k) => match self.lookup_index_for(k) {
+ Bound::Included(k) => match self.lookup_index_for(k) {
Ok(index) => index + 1,
Err(index) => index,
},
- Bound::Excluded(ref k) => match self.lookup_index_for(k) {
+ Bound::Excluded(k) => match self.lookup_index_for(k) {
Ok(index) | Err(index) => index,
},
Bound::Unbounded => self.data.len(),
@@ -301,8 +300,8 @@ impl<K: Ord, V> FromIterator<(K, V)> for SortedMap<K, V> {
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let mut data: Vec<(K, V)> = iter.into_iter().collect();
- data.sort_unstable_by(|&(ref k1, _), &(ref k2, _)| k1.cmp(k2));
- data.dedup_by(|&mut (ref k1, _), &mut (ref k2, _)| k1.cmp(k2) == Ordering::Equal);
+ data.sort_unstable_by(|(k1, _), (k2, _)| k1.cmp(k2));
+ data.dedup_by(|(k1, _), (k2, _)| k1 == k2);
SortedMap { data }
}
@@ -315,5 +314,11 @@ impl<K: HashStable<CTX> + StableOrd, V: HashStable<CTX>, CTX> HashStable<CTX> fo
}
}
+impl<K: Debug, V: Debug> Debug for SortedMap<K, V> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_map().entries(self.data.iter().map(|(a, b)| (a, b))).finish()
+ }
+}
+
#[cfg(test)]
mod tests;
diff --git a/compiler/rustc_data_structures/src/sorted_map/index_map.rs b/compiler/rustc_data_structures/src/sorted_map/index_map.rs
index c2f0ae328..814e7c7fb 100644
--- a/compiler/rustc_data_structures/src/sorted_map/index_map.rs
+++ b/compiler/rustc_data_structures/src/sorted_map/index_map.rs
@@ -1,7 +1,6 @@
//! A variant of `SortedMap` that preserves insertion order.
use std::hash::{Hash, Hasher};
-use std::iter::FromIterator;
use crate::stable_hasher::{HashStable, StableHasher};
use rustc_index::vec::{Idx, IndexVec};
@@ -64,13 +63,13 @@ impl<I: Idx, K: Ord, V> SortedIndexMultiMap<I, K, V> {
/// Returns an iterator over the items in the map in insertion order.
#[inline]
pub fn iter(&self) -> impl '_ + DoubleEndedIterator<Item = (&K, &V)> {
- self.items.iter().map(|(ref k, ref v)| (k, v))
+ self.items.iter().map(|(k, v)| (k, v))
}
/// Returns an iterator over the items in the map in insertion order along with their indices.
#[inline]
pub fn iter_enumerated(&self) -> impl '_ + DoubleEndedIterator<Item = (I, (&K, &V))> {
- self.items.iter_enumerated().map(|(i, (ref k, ref v))| (i, (k, v)))
+ self.items.iter_enumerated().map(|(i, (k, v))| (i, (k, v)))
}
/// Returns the item in the map with the given index.
diff --git a/compiler/rustc_data_structures/src/sorted_map/tests.rs b/compiler/rustc_data_structures/src/sorted_map/tests.rs
index 1e977d709..3cc250862 100644
--- a/compiler/rustc_data_structures/src/sorted_map/tests.rs
+++ b/compiler/rustc_data_structures/src/sorted_map/tests.rs
@@ -6,7 +6,7 @@ fn test_sorted_index_multi_map() {
let set: SortedIndexMultiMap<usize, _, _> = entries.iter().copied().collect();
// Insertion order is preserved.
- assert!(entries.iter().map(|(ref k, ref v)| (k, v)).eq(set.iter()));
+ assert!(entries.iter().map(|(k, v)| (k, v)).eq(set.iter()));
// Indexing
for (i, expect) in entries.iter().enumerate() {
diff --git a/compiler/rustc_data_structures/src/sso/either_iter.rs b/compiler/rustc_data_structures/src/sso/either_iter.rs
index 131eeef45..bca6c0955 100644
--- a/compiler/rustc_data_structures/src/sso/either_iter.rs
+++ b/compiler/rustc_data_structures/src/sso/either_iter.rs
@@ -1,7 +1,5 @@
use std::fmt;
-use std::iter::ExactSizeIterator;
use std::iter::FusedIterator;
-use std::iter::Iterator;
/// Iterator which may contain instance of
/// one of two specific implementations.
diff --git a/compiler/rustc_data_structures/src/sso/map.rs b/compiler/rustc_data_structures/src/sso/map.rs
index ec6a62016..7cdac5819 100644
--- a/compiler/rustc_data_structures/src/sso/map.rs
+++ b/compiler/rustc_data_structures/src/sso/map.rs
@@ -3,7 +3,6 @@ use crate::fx::FxHashMap;
use arrayvec::ArrayVec;
use std::fmt;
use std::hash::Hash;
-use std::iter::FromIterator;
use std::ops::Index;
// For pointer-sized arguments arrays
diff --git a/compiler/rustc_data_structures/src/sso/set.rs b/compiler/rustc_data_structures/src/sso/set.rs
index 406f0270d..a4b401389 100644
--- a/compiler/rustc_data_structures/src/sso/set.rs
+++ b/compiler/rustc_data_structures/src/sso/set.rs
@@ -1,6 +1,5 @@
use std::fmt;
use std::hash::Hash;
-use std::iter::FromIterator;
use super::map::SsoHashMap;
diff --git a/compiler/rustc_data_structures/src/stable_hasher.rs b/compiler/rustc_data_structures/src/stable_hasher.rs
index 1a728f82f..ae4836645 100644
--- a/compiler/rustc_data_structures/src/stable_hasher.rs
+++ b/compiler/rustc_data_structures/src/stable_hasher.rs
@@ -223,7 +223,7 @@ pub trait ToStableHashKey<HCX> {
/// stable across compilation session boundaries. More formally:
///
/// ```txt
-/// Ord::cmp(a1, b1) == Ord:cmp(a2, b2)
+/// Ord::cmp(a1, b1) == Ord::cmp(a2, b2)
/// where a2 = decode(encode(a1, context1), context2)
/// b2 = decode(encode(b1, context1), context2)
/// ```
diff --git a/compiler/rustc_data_structures/src/steal.rs b/compiler/rustc_data_structures/src/steal.rs
index a3ece6550..9a0fd5267 100644
--- a/compiler/rustc_data_structures/src/steal.rs
+++ b/compiler/rustc_data_structures/src/steal.rs
@@ -41,6 +41,11 @@ impl<T> Steal<T> {
}
#[track_caller]
+ pub fn get_mut(&mut self) -> &mut T {
+ self.value.get_mut().as_mut().expect("attempt to read from stolen value")
+ }
+
+ #[track_caller]
pub fn steal(&self) -> T {
let value_ref = &mut *self.value.try_write().expect("stealing value which is locked");
let value = value_ref.take();
diff --git a/compiler/rustc_data_structures/src/sync.rs b/compiler/rustc_data_structures/src/sync.rs
index e4f47b22a..ed5341c40 100644
--- a/compiler/rustc_data_structures/src/sync.rs
+++ b/compiler/rustc_data_structures/src/sync.rs
@@ -138,7 +138,7 @@ cfg_if! {
}
}
- pub use std::iter::Iterator as ParallelIterator;
+ pub use Iterator as ParallelIterator;
pub fn par_iter<T: IntoIterator>(t: T) -> T::IntoIter {
t.into_iter()
diff --git a/compiler/rustc_data_structures/src/tagged_ptr/drop.rs b/compiler/rustc_data_structures/src/tagged_ptr/drop.rs
index d44ccd368..b0315c93d 100644
--- a/compiler/rustc_data_structures/src/tagged_ptr/drop.rs
+++ b/compiler/rustc_data_structures/src/tagged_ptr/drop.rs
@@ -76,7 +76,7 @@ where
fn drop(&mut self) {
// No need to drop the tag, as it's Copy
unsafe {
- std::mem::drop(P::from_usize(self.raw.pointer_raw()));
+ drop(P::from_usize(self.raw.pointer_raw()));
}
}
}
diff --git a/compiler/rustc_data_structures/src/tiny_list.rs b/compiler/rustc_data_structures/src/tiny_list.rs
index 9b07f8684..11a408f21 100644
--- a/compiler/rustc_data_structures/src/tiny_list.rs
+++ b/compiler/rustc_data_structures/src/tiny_list.rs
@@ -37,9 +37,9 @@ impl<T: PartialEq> TinyList<T> {
#[inline]
pub fn remove(&mut self, data: &T) -> bool {
- self.head = match self.head {
- Some(ref mut head) if head.data == *data => head.next.take().map(|x| *x),
- Some(ref mut head) => return head.remove_next(data),
+ self.head = match &mut self.head {
+ Some(head) if head.data == *data => head.next.take().map(|x| *x),
+ Some(head) => return head.remove_next(data),
None => return false,
};
true
@@ -48,7 +48,7 @@ impl<T: PartialEq> TinyList<T> {
#[inline]
pub fn contains(&self, data: &T) -> bool {
let mut elem = self.head.as_ref();
- while let Some(ref e) = elem {
+ while let Some(e) = elem {
if &e.data == data {
return true;
}
@@ -65,15 +65,14 @@ struct Element<T> {
}
impl<T: PartialEq> Element<T> {
- fn remove_next(&mut self, data: &T) -> bool {
- let mut n = self;
+ fn remove_next(mut self: &mut Self, data: &T) -> bool {
loop {
- match n.next {
+ match self.next {
Some(ref mut next) if next.data == *data => {
- n.next = next.next.take();
+ self.next = next.next.take();
return true;
}
- Some(ref mut next) => n = next,
+ Some(ref mut next) => self = next,
None => return false,
}
}
diff --git a/compiler/rustc_data_structures/src/tiny_list/tests.rs b/compiler/rustc_data_structures/src/tiny_list/tests.rs
index c0334d2e2..4b95e62be 100644
--- a/compiler/rustc_data_structures/src/tiny_list/tests.rs
+++ b/compiler/rustc_data_structures/src/tiny_list/tests.rs
@@ -6,7 +6,7 @@ use test::{black_box, Bencher};
impl<T> TinyList<T> {
fn len(&self) -> usize {
let (mut elem, mut count) = (self.head.as_ref(), 0);
- while let Some(ref e) = elem {
+ while let Some(e) = elem {
count += 1;
elem = e.next.as_deref();
}
diff --git a/compiler/rustc_data_structures/src/transitive_relation.rs b/compiler/rustc_data_structures/src/transitive_relation.rs
index cf6162038..cd391fe35 100644
--- a/compiler/rustc_data_structures/src/transitive_relation.rs
+++ b/compiler/rustc_data_structures/src/transitive_relation.rs
@@ -199,7 +199,7 @@ impl<T: Eq + Hash + Copy> TransitiveRelation<T> {
/// Viewing the relation as a graph, computes the "mutual
/// immediate postdominator" of a set of points (if one
/// exists). See `postdom_upper_bound` for details.
- pub fn mutual_immediate_postdominator<'a>(&'a self, mut mubs: Vec<T>) -> Option<T> {
+ pub fn mutual_immediate_postdominator(&self, mut mubs: Vec<T>) -> Option<T> {
loop {
match mubs.len() {
0 => return None,
@@ -250,7 +250,7 @@ impl<T: Eq + Hash + Copy> TransitiveRelation<T> {
// values. So here is what we do:
//
// 1. Find the vector `[X | a < X && b < X]` of all values
- // `X` where `a < X` and `b < X`. In terms of the
+ // `X` where `a < X` and `b < X`. In terms of the
// graph, this means all values reachable from both `a`
// and `b`. Note that this vector is also a set, but we
// use the term vector because the order matters
diff --git a/compiler/rustc_data_structures/src/unord.rs b/compiler/rustc_data_structures/src/unord.rs
index c015f1232..f35f18e51 100644
--- a/compiler/rustc_data_structures/src/unord.rs
+++ b/compiler/rustc_data_structures/src/unord.rs
@@ -6,13 +6,15 @@ use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
use std::{
borrow::Borrow,
+ collections::hash_map::Entry,
hash::Hash,
iter::{Product, Sum},
+ ops::Index,
};
use crate::{
fingerprint::Fingerprint,
- stable_hasher::{HashStable, StableHasher, ToStableHashKey},
+ stable_hasher::{HashStable, StableHasher, StableOrd, ToStableHashKey},
};
/// `UnordItems` is the order-less version of `Iterator`. It only contains methods
@@ -38,17 +40,17 @@ impl<T, I: Iterator<Item = T>> UnordItems<T, I> {
}
#[inline]
- pub fn all<U, F: Fn(T) -> bool>(mut self, f: F) -> bool {
+ pub fn all<F: Fn(T) -> bool>(mut self, f: F) -> bool {
self.0.all(f)
}
#[inline]
- pub fn any<U, F: Fn(T) -> bool>(mut self, f: F) -> bool {
+ pub fn any<F: Fn(T) -> bool>(mut self, f: F) -> bool {
self.0.any(f)
}
#[inline]
- pub fn filter<U, F: Fn(&T) -> bool>(self, f: F) -> UnordItems<T, impl Iterator<Item = T>> {
+ pub fn filter<F: Fn(&T) -> bool>(self, f: F) -> UnordItems<T, impl Iterator<Item = T>> {
UnordItems(self.0.filter(f))
}
@@ -96,6 +98,15 @@ impl<T, I: Iterator<Item = T>> UnordItems<T, I> {
pub fn count(self) -> usize {
self.0.count()
}
+
+ #[inline]
+ pub fn flat_map<U, F, O>(self, f: F) -> UnordItems<O, impl Iterator<Item = O>>
+ where
+ U: IntoIterator<Item = O>,
+ F: Fn(T) -> U,
+ {
+ UnordItems(self.0.flat_map(f))
+ }
}
impl<'a, T: Clone + 'a, I: Iterator<Item = &'a T>> UnordItems<&'a T, I> {
@@ -147,6 +158,7 @@ pub struct UnordSet<V: Eq + Hash> {
}
impl<V: Eq + Hash> Default for UnordSet<V> {
+ #[inline]
fn default() -> Self {
Self { inner: FxHashSet::default() }
}
@@ -178,6 +190,15 @@ impl<V: Eq + Hash> UnordSet<V> {
}
#[inline]
+ pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> bool
+ where
+ V: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ self.inner.remove(k)
+ }
+
+ #[inline]
pub fn items<'a>(&'a self) -> UnordItems<&'a V, impl Iterator<Item = &'a V>> {
UnordItems(self.inner.iter())
}
@@ -187,20 +208,75 @@ impl<V: Eq + Hash> UnordSet<V> {
UnordItems(self.inner.into_iter())
}
+ /// Returns the items of this set in stable sort order (as defined by `ToStableHashKey`).
+ ///
+ /// The `cache_sort_key` parameter controls if [slice::sort_by_cached_key] or
+ /// [slice::sort_unstable_by_key] will be used for sorting the vec. Use
+ /// `cache_sort_key` when the [ToStableHashKey::to_stable_hash_key] implementation
+ /// for `V` is expensive (e.g. a `DefId -> DefPathHash` lookup).
+ #[inline]
+ pub fn to_sorted<HCX>(&self, hcx: &HCX, cache_sort_key: bool) -> Vec<&V>
+ where
+ V: ToStableHashKey<HCX>,
+ {
+ to_sorted_vec(hcx, self.inner.iter(), cache_sort_key, |&x| x)
+ }
+
+ /// Returns the items of this set in stable sort order (as defined by
+ /// `StableOrd`). This method is much more efficient than
+ /// `into_sorted` because it does not need to transform keys to their
+ /// `ToStableHashKey` equivalent.
+ #[inline]
+ pub fn to_sorted_stable_ord(&self) -> Vec<V>
+ where
+ V: Ord + StableOrd + Copy,
+ {
+ let mut items: Vec<V> = self.inner.iter().copied().collect();
+ items.sort_unstable();
+ items
+ }
+
+ /// Returns the items of this set in stable sort order (as defined by `ToStableHashKey`).
+ ///
+ /// The `cache_sort_key` parameter controls if [slice::sort_by_cached_key] or
+ /// [slice::sort_unstable_by_key] will be used for sorting the vec. Use
+ /// `cache_sort_key` when the [ToStableHashKey::to_stable_hash_key] implementation
+ /// for `V` is expensive (e.g. a `DefId -> DefPathHash` lookup).
+ #[inline]
+ pub fn into_sorted<HCX>(self, hcx: &HCX, cache_sort_key: bool) -> Vec<V>
+ where
+ V: ToStableHashKey<HCX>,
+ {
+ to_sorted_vec(hcx, self.inner.into_iter(), cache_sort_key, |x| x)
+ }
+
// We can safely extend this UnordSet from a set of unordered values because that
// won't expose the internal ordering anywhere.
#[inline]
pub fn extend<I: Iterator<Item = V>>(&mut self, items: UnordItems<V, I>) {
self.inner.extend(items.0)
}
+
+ #[inline]
+ pub fn clear(&mut self) {
+ self.inner.clear();
+ }
}
impl<V: Hash + Eq> Extend<V> for UnordSet<V> {
+ #[inline]
fn extend<T: IntoIterator<Item = V>>(&mut self, iter: T) {
self.inner.extend(iter)
}
}
+impl<V: Hash + Eq> FromIterator<V> for UnordSet<V> {
+ #[inline]
+ fn from_iter<T: IntoIterator<Item = V>>(iter: T) -> Self {
+ UnordSet { inner: FxHashSet::from_iter(iter) }
+ }
+}
+
impl<HCX, V: Hash + Eq + HashStable<HCX>> HashStable<HCX> for UnordSet<V> {
#[inline]
fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) {
@@ -223,17 +299,33 @@ pub struct UnordMap<K: Eq + Hash, V> {
}
impl<K: Eq + Hash, V> Default for UnordMap<K, V> {
+ #[inline]
fn default() -> Self {
Self { inner: FxHashMap::default() }
}
}
impl<K: Hash + Eq, V> Extend<(K, V)> for UnordMap<K, V> {
+ #[inline]
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
self.inner.extend(iter)
}
}
+impl<K: Hash + Eq, V> FromIterator<(K, V)> for UnordMap<K, V> {
+ #[inline]
+ fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
+ UnordMap { inner: FxHashMap::from_iter(iter) }
+ }
+}
+
+impl<K: Hash + Eq, V, I: Iterator<Item = (K, V)>> From<UnordItems<(K, V), I>> for UnordMap<K, V> {
+ #[inline]
+ fn from(items: UnordItems<(K, V), I>) -> Self {
+ UnordMap { inner: FxHashMap::from_iter(items.0) }
+ }
+}
+
impl<K: Eq + Hash, V> UnordMap<K, V> {
#[inline]
pub fn len(&self) -> usize {
@@ -255,6 +347,43 @@ impl<K: Eq + Hash, V> UnordMap<K, V> {
}
#[inline]
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+
+ #[inline]
+ pub fn entry(&mut self, key: K) -> Entry<'_, K, V> {
+ self.inner.entry(key)
+ }
+
+ #[inline]
+ pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ self.inner.get(k)
+ }
+
+ #[inline]
+ pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ self.inner.get_mut(k)
+ }
+
+ #[inline]
+ pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ self.inner.remove(k)
+ }
+
+ #[inline]
pub fn items<'a>(&'a self) -> UnordItems<(&'a K, &'a V), impl Iterator<Item = (&'a K, &'a V)>> {
UnordItems(self.inner.iter())
}
@@ -270,6 +399,77 @@ impl<K: Eq + Hash, V> UnordMap<K, V> {
pub fn extend<I: Iterator<Item = (K, V)>>(&mut self, items: UnordItems<(K, V), I>) {
self.inner.extend(items.0)
}
+
+ /// Returns the entries of this map in stable sort order (as defined by `ToStableHashKey`).
+ ///
+ /// The `cache_sort_key` parameter controls if [slice::sort_by_cached_key] or
+ /// [slice::sort_unstable_by_key] will be used for sorting the vec. Use
+ /// `cache_sort_key` when the [ToStableHashKey::to_stable_hash_key] implementation
+ /// for `K` is expensive (e.g. a `DefId -> DefPathHash` lookup).
+ #[inline]
+ pub fn to_sorted<HCX>(&self, hcx: &HCX, cache_sort_key: bool) -> Vec<(&K, &V)>
+ where
+ K: ToStableHashKey<HCX>,
+ {
+ to_sorted_vec(hcx, self.inner.iter(), cache_sort_key, |&(k, _)| k)
+ }
+
+ /// Returns the entries of this map in stable sort order (as defined by `StableOrd`).
+ /// This method can be much more efficient than `into_sorted` because it does not need
+ /// to transform keys to their `ToStableHashKey` equivalent.
+ #[inline]
+ pub fn to_sorted_stable_ord(&self) -> Vec<(K, &V)>
+ where
+ K: Ord + StableOrd + Copy,
+ {
+ let mut items: Vec<(K, &V)> = self.inner.iter().map(|(&k, v)| (k, v)).collect();
+ items.sort_unstable_by_key(|&(k, _)| k);
+ items
+ }
+
+ /// Returns the entries of this map in stable sort order (as defined by `ToStableHashKey`).
+ ///
+ /// The `cache_sort_key` parameter controls if [slice::sort_by_cached_key] or
+ /// [slice::sort_unstable_by_key] will be used for sorting the vec. Use
+ /// `cache_sort_key` when the [ToStableHashKey::to_stable_hash_key] implementation
+ /// for `K` is expensive (e.g. a `DefId -> DefPathHash` lookup).
+ #[inline]
+ pub fn into_sorted<HCX>(self, hcx: &HCX, cache_sort_key: bool) -> Vec<(K, V)>
+ where
+ K: ToStableHashKey<HCX>,
+ {
+ to_sorted_vec(hcx, self.inner.into_iter(), cache_sort_key, |(k, _)| k)
+ }
+
+ /// Returns the values of this map in stable sort order (as defined by K's
+ /// `ToStableHashKey` implementation).
+ ///
+ /// The `cache_sort_key` parameter controls if [slice::sort_by_cached_key] or
+ /// [slice::sort_unstable_by_key] will be used for sorting the vec. Use
+ /// `cache_sort_key` when the [ToStableHashKey::to_stable_hash_key] implementation
+ /// for `K` is expensive (e.g. a `DefId -> DefPathHash` lookup).
+ #[inline]
+ pub fn values_sorted<HCX>(&self, hcx: &HCX, cache_sort_key: bool) -> impl Iterator<Item = &V>
+ where
+ K: ToStableHashKey<HCX>,
+ {
+ to_sorted_vec(hcx, self.inner.iter(), cache_sort_key, |&(k, _)| k)
+ .into_iter()
+ .map(|(_, v)| v)
+ }
+}
+
+impl<K, Q: ?Sized, V> Index<&Q> for UnordMap<K, V>
+where
+ K: Eq + Hash + Borrow<Q>,
+ Q: Eq + Hash,
+{
+ type Output = V;
+
+ #[inline]
+ fn index(&self, key: &Q) -> &V {
+ &self.inner[key]
+ }
}
impl<HCX, K: Hash + Eq + HashStable<HCX>, V: HashStable<HCX>> HashStable<HCX> for UnordMap<K, V> {
@@ -311,7 +511,7 @@ impl<V> UnordBag<V> {
}
#[inline]
- pub fn items<'a>(&'a self) -> UnordItems<&'a V, impl Iterator<Item = &'a V>> {
+ pub fn items(&self) -> UnordItems<&V, impl Iterator<Item = &V>> {
UnordItems(self.inner.iter())
}
@@ -334,6 +534,12 @@ impl<T> Extend<T> for UnordBag<T> {
}
}
+impl<T, I: Iterator<Item = T>> From<UnordItems<T, I>> for UnordBag<T> {
+ fn from(value: UnordItems<T, I>) -> Self {
+ UnordBag { inner: Vec::from_iter(value.0) }
+ }
+}
+
impl<HCX, V: Hash + Eq + HashStable<HCX>> HashStable<HCX> for UnordBag<V> {
#[inline]
fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) {
@@ -341,6 +547,27 @@ impl<HCX, V: Hash + Eq + HashStable<HCX>> HashStable<HCX> for UnordBag<V> {
}
}
+#[inline]
+fn to_sorted_vec<HCX, T, K, I>(
+ hcx: &HCX,
+ iter: I,
+ cache_sort_key: bool,
+ extract_key: fn(&T) -> &K,
+) -> Vec<T>
+where
+ I: Iterator<Item = T>,
+ K: ToStableHashKey<HCX>,
+{
+ let mut items: Vec<T> = iter.collect();
+ if cache_sort_key {
+ items.sort_by_cached_key(|x| extract_key(x).to_stable_hash_key(hcx));
+ } else {
+ items.sort_unstable_by_key(|x| extract_key(x).to_stable_hash_key(hcx));
+ }
+
+ items
+}
+
fn hash_iter_order_independent<
HCX,
T: HashStable<HCX>,
diff --git a/compiler/rustc_data_structures/src/vec_map.rs b/compiler/rustc_data_structures/src/vec_map.rs
index 86be0bd87..d1a99bcae 100644
--- a/compiler/rustc_data_structures/src/vec_map.rs
+++ b/compiler/rustc_data_structures/src/vec_map.rs
@@ -1,6 +1,5 @@
use std::borrow::Borrow;
use std::fmt::Debug;
-use std::iter::FromIterator;
use std::slice::Iter;
use std::vec::IntoIter;
@@ -72,8 +71,7 @@ where
// This should return just one element, otherwise it's a bug
assert!(
filter.next().is_none(),
- "Collection {:#?} should have just one matching element",
- self
+ "Collection {self:#?} should have just one matching element"
);
Some(value)
}