From 1376c5a617be5c25655d0d7cb63e3beaa5a6e026 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Wed, 17 Apr 2024 14:20:39 +0200 Subject: Merging upstream version 1.70.0+dfsg1. Signed-off-by: Daniel Baumann --- compiler/rustc_query_system/src/dep_graph/graph.rs | 456 ++++++++++++--------- 1 file changed, 262 insertions(+), 194 deletions(-) (limited to 'compiler/rustc_query_system/src/dep_graph/graph.rs') diff --git a/compiler/rustc_query_system/src/dep_graph/graph.rs b/compiler/rustc_query_system/src/dep_graph/graph.rs index 59e0c3597..a9a2e6dd0 100644 --- a/compiler/rustc_query_system/src/dep_graph/graph.rs +++ b/compiler/rustc_query_system/src/dep_graph/graph.rs @@ -6,7 +6,6 @@ use rustc_data_structures::sharded::{self, Sharded}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::steal::Steal; use rustc_data_structures::sync::{AtomicU32, AtomicU64, Lock, Lrc, Ordering}; -use rustc_data_structures::OnDrop; use rustc_index::vec::IndexVec; use rustc_serialize::opaque::{FileEncodeResult, FileEncoder}; use smallvec::{smallvec, SmallVec}; @@ -54,6 +53,11 @@ impl From for QueryInvocationId { } } +pub struct MarkFrame<'a> { + index: SerializedDepNodeIndex, + parent: Option<&'a MarkFrame<'a>>, +} + #[derive(PartialEq)] pub enum DepNodeColor { Red, @@ -70,7 +74,7 @@ impl DepNodeColor { } } -struct DepGraphData { +pub struct DepGraphData { /// The new encoding of the dependency graph, optimized for red/green /// tracking. The `current` field is the dependency graph of only the /// current compilation session: We don't merge the previous dep-graph into @@ -139,7 +143,7 @@ impl DepGraph { assert_eq!(_green_node_index, DepNodeIndex::SINGLETON_DEPENDENCYLESS_ANON_NODE); // Instantiate a dependy-less red node only once for anonymous queries. - let (_red_node_index, _prev_and_index) = current.intern_node( + let (red_node_index, red_node_prev_index_and_color) = current.intern_node( profiler, &prev_graph, DepNode { kind: DepKind::RED, hash: Fingerprint::ZERO.into() }, @@ -147,8 +151,21 @@ impl DepGraph { None, false, ); - assert_eq!(_red_node_index, DepNodeIndex::FOREVER_RED_NODE); - assert!(matches!(_prev_and_index, None | Some((_, DepNodeColor::Red)))); + assert_eq!(red_node_index, DepNodeIndex::FOREVER_RED_NODE); + match red_node_prev_index_and_color { + None => { + // This is expected when we have no previous compilation session. + assert!(prev_graph_node_count == 0); + } + Some((prev_red_node_index, DepNodeColor::Red)) => { + assert_eq!(prev_red_node_index.as_usize(), red_node_index.as_usize()); + colors.insert(prev_red_node_index, DepNodeColor::Red); + } + Some((_, DepNodeColor::Green(_))) => { + // There must be a logic error somewhere if we hit this branch. + panic!("DepNodeIndex::FOREVER_RED_NODE evaluated to DepNodeColor::Green") + } + } DepGraph { data: Some(Lrc::new(DepGraphData { @@ -168,6 +185,11 @@ impl DepGraph { DepGraph { data: None, virtual_dep_node_index: Lrc::new(AtomicU32::new(0)) } } + #[inline] + pub fn data(&self) -> Option<&DepGraphData> { + self.data.as_deref() + } + /// Returns `true` if we are actually building the full dep-graph, and `false` otherwise. #[inline] pub fn is_fully_enabled(&self) -> bool { @@ -252,6 +274,38 @@ impl DepGraph { K::with_deps(TaskDepsRef::Forbid, op) } + #[inline(always)] + pub fn with_task, A: Debug, R>( + &self, + key: DepNode, + cx: Ctxt, + arg: A, + task: fn(Ctxt, A) -> R, + hash_result: Option, &R) -> Fingerprint>, + ) -> (R, DepNodeIndex) { + match self.data() { + Some(data) => data.with_task(key, cx, arg, task, hash_result), + None => (task(cx, arg), self.next_virtual_depnode_index()), + } + } + + pub fn with_anon_task, OP, R>( + &self, + cx: Tcx, + dep_kind: K, + op: OP, + ) -> (R, DepNodeIndex) + where + OP: FnOnce() -> R, + { + match self.data() { + Some(data) => data.with_anon_task(cx, dep_kind, op), + None => (op(), self.next_virtual_depnode_index()), + } + } +} + +impl DepGraphData { /// Starts a new dep-graph task. Dep-graph tasks are specified /// using a free function (`task`) and **not** a closure -- this /// is intentional because we want to exercise tight control over @@ -288,29 +342,6 @@ impl DepGraph { task: fn(Ctxt, A) -> R, hash_result: Option, &R) -> Fingerprint>, ) -> (R, DepNodeIndex) { - if self.is_fully_enabled() { - self.with_task_impl(key, cx, arg, task, hash_result) - } else { - // Incremental compilation is turned off. We just execute the task - // without tracking. We still provide a dep-node index that uniquely - // identifies the task so that we have a cheap way of referring to - // the query for self-profiling. - (task(cx, arg), self.next_virtual_depnode_index()) - } - } - - #[inline(always)] - fn with_task_impl, A: Debug, R>( - &self, - key: DepNode, - cx: Ctxt, - arg: A, - task: fn(Ctxt, A) -> R, - hash_result: Option, &R) -> Fingerprint>, - ) -> (R, DepNodeIndex) { - // This function is only called when the graph is enabled. - let data = self.data.as_ref().unwrap(); - // If the following assertion triggers, it can have two reasons: // 1. Something is wrong with DepNode creation, either here or // in `DepGraph::try_mark_green()`. @@ -335,10 +366,8 @@ impl DepGraph { })) }; - let task_deps_ref = match &task_deps { - Some(deps) => TaskDepsRef::Allow(deps), - None => TaskDepsRef::Ignore, - }; + let task_deps_ref = + task_deps.as_ref().map(TaskDepsRef::Allow).unwrap_or(TaskDepsRef::EvalAlways); let result = K::with_deps(task_deps_ref, || task(cx, arg)); let edges = task_deps.map_or_else(|| smallvec![], |lock| lock.into_inner().reads); @@ -351,9 +380,9 @@ impl DepGraph { let print_status = cfg!(debug_assertions) && dcx.sess().opts.unstable_opts.dep_tasks; // Intern the new `DepNode`. - let (dep_node_index, prev_and_color) = data.current.intern_node( + let (dep_node_index, prev_and_color) = self.current.intern_node( dcx.profiler(), - &data.previous, + &self.previous, key, edges, current_fingerprint, @@ -364,12 +393,12 @@ impl DepGraph { if let Some((prev_index, color)) = prev_and_color { debug_assert!( - data.colors.get(prev_index).is_none(), + self.colors.get(prev_index).is_none(), "DepGraph::with_task() - Duplicate DepNodeColor \ insertion for {key:?}" ); - data.colors.insert(prev_index, color); + self.colors.insert(prev_index, color); } (result, dep_node_index) @@ -388,63 +417,66 @@ impl DepGraph { { debug_assert!(!cx.is_eval_always(dep_kind)); - if let Some(ref data) = self.data { - let task_deps = Lock::new(TaskDeps::default()); - let result = K::with_deps(TaskDepsRef::Allow(&task_deps), op); - let task_deps = task_deps.into_inner(); - let task_deps = task_deps.reads; - - let dep_node_index = match task_deps.len() { - 0 => { - // Because the dep-node id of anon nodes is computed from the sets of its - // dependencies we already know what the ID of this dependency-less node is - // going to be (i.e. equal to the precomputed - // `SINGLETON_DEPENDENCYLESS_ANON_NODE`). As a consequence we can skip creating - // a `StableHasher` and sending the node through interning. - DepNodeIndex::SINGLETON_DEPENDENCYLESS_ANON_NODE - } - 1 => { - // When there is only one dependency, don't bother creating a node. - task_deps[0] - } - _ => { - // The dep node indices are hashed here instead of hashing the dep nodes of the - // dependencies. These indices may refer to different nodes per session, but this isn't - // a problem here because we that ensure the final dep node hash is per session only by - // combining it with the per session random number `anon_id_seed`. This hash only need - // to map the dependencies to a single value on a per session basis. - let mut hasher = StableHasher::new(); - task_deps.hash(&mut hasher); - - let target_dep_node = DepNode { - kind: dep_kind, - // Fingerprint::combine() is faster than sending Fingerprint - // through the StableHasher (at least as long as StableHasher - // is so slow). - hash: data.current.anon_id_seed.combine(hasher.finish()).into(), - }; + let task_deps = Lock::new(TaskDeps::default()); + let result = K::with_deps(TaskDepsRef::Allow(&task_deps), op); + let task_deps = task_deps.into_inner(); + let task_deps = task_deps.reads; + + let dep_node_index = match task_deps.len() { + 0 => { + // Because the dep-node id of anon nodes is computed from the sets of its + // dependencies we already know what the ID of this dependency-less node is + // going to be (i.e. equal to the precomputed + // `SINGLETON_DEPENDENCYLESS_ANON_NODE`). As a consequence we can skip creating + // a `StableHasher` and sending the node through interning. + DepNodeIndex::SINGLETON_DEPENDENCYLESS_ANON_NODE + } + 1 => { + // When there is only one dependency, don't bother creating a node. + task_deps[0] + } + _ => { + // The dep node indices are hashed here instead of hashing the dep nodes of the + // dependencies. These indices may refer to different nodes per session, but this isn't + // a problem here because we that ensure the final dep node hash is per session only by + // combining it with the per session random number `anon_id_seed`. This hash only need + // to map the dependencies to a single value on a per session basis. + let mut hasher = StableHasher::new(); + task_deps.hash(&mut hasher); + + let target_dep_node = DepNode { + kind: dep_kind, + // Fingerprint::combine() is faster than sending Fingerprint + // through the StableHasher (at least as long as StableHasher + // is so slow). + hash: self.current.anon_id_seed.combine(hasher.finish()).into(), + }; - data.current.intern_new_node( - cx.profiler(), - target_dep_node, - task_deps, - Fingerprint::ZERO, - ) - } - }; + self.current.intern_new_node( + cx.profiler(), + target_dep_node, + task_deps, + Fingerprint::ZERO, + ) + } + }; - (result, dep_node_index) - } else { - (op(), self.next_virtual_depnode_index()) - } + (result, dep_node_index) } +} +impl DepGraph { #[inline] pub fn read_index(&self, dep_node_index: DepNodeIndex) { if let Some(ref data) = self.data { K::read_deps(|task_deps| { let mut task_deps = match task_deps { TaskDepsRef::Allow(deps) => deps.lock(), + TaskDepsRef::EvalAlways => { + // We don't need to record dependencies of eval_always + // queries. They are re-evaluated unconditionally anyway. + return; + } TaskDepsRef::Ignore => return, TaskDepsRef::Forbid => { panic!("Illegal read of: {dep_node_index:?}") @@ -519,22 +551,38 @@ impl DepGraph { // value to an existing node. // // For sanity, we still check that the loaded stable hash and the new one match. - if let Some(dep_node_index) = self.dep_node_index_of_opt(&node) { - let _current_fingerprint = - crate::query::incremental_verify_ich(cx, result, &node, hash_result); + if let Some(prev_index) = data.previous.node_to_index_opt(&node) { + let dep_node_index = data.current.prev_index_to_index.lock()[prev_index]; + if let Some(dep_node_index) = dep_node_index { + crate::query::incremental_verify_ich( + cx, + data, + result, + prev_index, + hash_result, + |value| format!("{:?}", value), + ); - #[cfg(debug_assertions)] - if hash_result.is_some() { - data.current.record_edge(dep_node_index, node, _current_fingerprint); - } + #[cfg(debug_assertions)] + if hash_result.is_some() { + data.current.record_edge( + dep_node_index, + node, + data.prev_fingerprint_of(prev_index), + ); + } - return dep_node_index; + return dep_node_index; + } } let mut edges = SmallVec::new(); K::read_deps(|task_deps| match task_deps { TaskDepsRef::Allow(deps) => edges.extend(deps.lock().reads.iter().copied()), - TaskDepsRef::Ignore => {} // During HIR lowering, we have no dependencies. + TaskDepsRef::EvalAlways => { + edges.push(DepNodeIndex::FOREVER_RED_NODE); + } + TaskDepsRef::Ignore => {} TaskDepsRef::Forbid => { panic!("Cannot summarize when dependencies are not recorded.") } @@ -577,32 +625,63 @@ impl DepGraph { self.next_virtual_depnode_index() } } +} +impl DepGraphData { #[inline] - pub fn dep_node_index_of(&self, dep_node: &DepNode) -> DepNodeIndex { - self.dep_node_index_of_opt(dep_node).unwrap() + pub fn dep_node_index_of_opt(&self, dep_node: &DepNode) -> Option { + if let Some(prev_index) = self.previous.node_to_index_opt(dep_node) { + self.current.prev_index_to_index.lock()[prev_index] + } else { + self.current + .new_node_to_index + .get_shard_by_value(dep_node) + .lock() + .get(dep_node) + .copied() + } } #[inline] - pub fn dep_node_index_of_opt(&self, dep_node: &DepNode) -> Option { - let data = self.data.as_ref().unwrap(); - let current = &data.current; + pub fn dep_node_exists(&self, dep_node: &DepNode) -> bool { + self.dep_node_index_of_opt(dep_node).is_some() + } - if let Some(prev_index) = data.previous.node_to_index_opt(dep_node) { - current.prev_index_to_index.lock()[prev_index] + fn node_color(&self, dep_node: &DepNode) -> Option { + if let Some(prev_index) = self.previous.node_to_index_opt(dep_node) { + self.colors.get(prev_index) } else { - current.new_node_to_index.get_shard_by_value(dep_node).lock().get(dep_node).copied() + // This is a node that did not exist in the previous compilation session. + None } } + /// Returns true if the given node has been marked as green during the + /// current compilation session. Used in various assertions #[inline] - pub fn dep_node_exists(&self, dep_node: &DepNode) -> bool { - self.data.is_some() && self.dep_node_index_of_opt(dep_node).is_some() + pub fn is_index_green(&self, prev_index: SerializedDepNodeIndex) -> bool { + self.colors.get(prev_index).map_or(false, |c| c.is_green()) + } + + #[inline] + pub fn prev_fingerprint_of(&self, prev_index: SerializedDepNodeIndex) -> Fingerprint { + self.previous.fingerprint_by_index(prev_index) + } + + #[inline] + pub fn prev_node_of(&self, prev_index: SerializedDepNodeIndex) -> DepNode { + self.previous.index_to_node(prev_index) + } + + pub fn mark_debug_loaded_from_disk(&self, dep_node: DepNode) { + self.debug_loaded_from_disk.lock().insert(dep_node); } +} +impl DepGraph { #[inline] - pub fn prev_fingerprint_of(&self, dep_node: &DepNode) -> Option { - self.data.as_ref().unwrap().previous.fingerprint_of(dep_node) + pub fn dep_node_exists(&self, dep_node: &DepNode) -> bool { + self.data.as_ref().map_or(false, |data| data.dep_node_exists(dep_node)) } /// Checks whether a previous work product exists for `v` and, if @@ -617,10 +696,6 @@ impl DepGraph { &self.data.as_ref().unwrap().previous_work_products } - pub fn mark_debug_loaded_from_disk(&self, dep_node: DepNode) { - self.data.as_ref().unwrap().debug_loaded_from_disk.lock().insert(dep_node); - } - pub fn debug_was_loaded_from_disk(&self, dep_node: DepNode) -> bool { self.data.as_ref().unwrap().debug_loaded_from_disk.lock().contains(&dep_node) } @@ -645,17 +720,22 @@ impl DepGraph { fn node_color(&self, dep_node: &DepNode) -> Option { if let Some(ref data) = self.data { - if let Some(prev_index) = data.previous.node_to_index_opt(dep_node) { - return data.colors.get(prev_index); - } else { - // This is a node that did not exist in the previous compilation session. - return None; - } + return data.node_color(dep_node); } None } + pub fn try_mark_green>( + &self, + qcx: Qcx, + dep_node: &DepNode, + ) -> Option<(SerializedDepNodeIndex, DepNodeIndex)> { + self.data().and_then(|data| data.try_mark_green(qcx, dep_node)) + } +} + +impl DepGraphData { /// Try to mark a node index for the node dep_node. /// /// A node will have an index, when it's already been marked green, or when we can mark it @@ -668,43 +748,33 @@ impl DepGraph { ) -> Option<(SerializedDepNodeIndex, DepNodeIndex)> { debug_assert!(!qcx.dep_context().is_eval_always(dep_node.kind)); - // Return None if the dep graph is disabled - let data = self.data.as_ref()?; - // Return None if the dep node didn't exist in the previous session - let prev_index = data.previous.node_to_index_opt(dep_node)?; + let prev_index = self.previous.node_to_index_opt(dep_node)?; - match data.colors.get(prev_index) { - Some(DepNodeColor::Green(dep_node_index)) => return Some((prev_index, dep_node_index)), - Some(DepNodeColor::Red) => return None, - None => {} + match self.colors.get(prev_index) { + Some(DepNodeColor::Green(dep_node_index)) => Some((prev_index, dep_node_index)), + Some(DepNodeColor::Red) => None, + None => { + // This DepNode and the corresponding query invocation existed + // in the previous compilation session too, so we can try to + // mark it as green by recursively marking all of its + // dependencies green. + self.try_mark_previous_green(qcx, prev_index, &dep_node, None) + .map(|dep_node_index| (prev_index, dep_node_index)) + } } - - let backtrace = backtrace_printer(qcx.dep_context().sess(), data, prev_index); - - // This DepNode and the corresponding query invocation existed - // in the previous compilation session too, so we can try to - // mark it as green by recursively marking all of its - // dependencies green. - let ret = self - .try_mark_previous_green(qcx, data, prev_index, &dep_node) - .map(|dep_node_index| (prev_index, dep_node_index)); - - // We succeeded, no backtrace. - backtrace.disable(); - return ret; } - #[instrument(skip(self, qcx, data, parent_dep_node_index), level = "debug")] + #[instrument(skip(self, qcx, parent_dep_node_index, frame), level = "debug")] fn try_mark_parent_green>( &self, qcx: Qcx, - data: &DepGraphData, parent_dep_node_index: SerializedDepNodeIndex, dep_node: &DepNode, + frame: Option<&MarkFrame<'_>>, ) -> Option<()> { - let dep_dep_node_color = data.colors.get(parent_dep_node_index); - let dep_dep_node = &data.previous.index_to_node(parent_dep_node_index); + let dep_dep_node_color = self.colors.get(parent_dep_node_index); + let dep_dep_node = &self.previous.index_to_node(parent_dep_node_index); match dep_dep_node_color { Some(DepNodeColor::Green(_)) => { @@ -734,7 +804,7 @@ impl DepGraph { ); let node_index = - self.try_mark_previous_green(qcx, data, parent_dep_node_index, dep_dep_node); + self.try_mark_previous_green(qcx, parent_dep_node_index, dep_dep_node, frame); if node_index.is_some() { debug!("managed to MARK dependency {dep_dep_node:?} as green",); @@ -744,13 +814,13 @@ impl DepGraph { // We failed to mark it green, so we try to force the query. debug!("trying to force dependency {dep_dep_node:?}"); - if !qcx.dep_context().try_force_from_dep_node(*dep_dep_node) { + if !qcx.dep_context().try_force_from_dep_node(*dep_dep_node, frame) { // The DepNode could not be forced. debug!("dependency {dep_dep_node:?} could not be forced"); return None; } - let dep_dep_node_color = data.colors.get(parent_dep_node_index); + let dep_dep_node_color = self.colors.get(parent_dep_node_index); match dep_dep_node_color { Some(DepNodeColor::Green(_)) => { @@ -783,32 +853,31 @@ impl DepGraph { } /// Try to mark a dep-node which existed in the previous compilation session as green. - #[instrument(skip(self, qcx, data, prev_dep_node_index), level = "debug")] + #[instrument(skip(self, qcx, prev_dep_node_index, frame), level = "debug")] fn try_mark_previous_green>( &self, qcx: Qcx, - data: &DepGraphData, prev_dep_node_index: SerializedDepNodeIndex, dep_node: &DepNode, + frame: Option<&MarkFrame<'_>>, ) -> Option { + let frame = MarkFrame { index: prev_dep_node_index, parent: frame }; + #[cfg(not(parallel_compiler))] { debug_assert!(!self.dep_node_exists(dep_node)); - debug_assert!(data.colors.get(prev_dep_node_index).is_none()); + debug_assert!(self.colors.get(prev_dep_node_index).is_none()); } // We never try to mark eval_always nodes as green debug_assert!(!qcx.dep_context().is_eval_always(dep_node.kind)); - debug_assert_eq!(data.previous.index_to_node(prev_dep_node_index), *dep_node); + debug_assert_eq!(self.previous.index_to_node(prev_dep_node_index), *dep_node); - let prev_deps = data.previous.edge_targets_from(prev_dep_node_index); + let prev_deps = self.previous.edge_targets_from(prev_dep_node_index); for &dep_dep_node_index in prev_deps { - let backtrace = backtrace_printer(qcx.dep_context().sess(), data, dep_dep_node_index); - let success = self.try_mark_parent_green(qcx, data, dep_dep_node_index, dep_node); - backtrace.disable(); - success?; + self.try_mark_parent_green(qcx, dep_dep_node_index, dep_node, Some(&frame))?; } // If we got here without hitting a `return` that means that all @@ -819,9 +888,9 @@ impl DepGraph { // We allocating an entry for the node in the current dependency graph and // adding all the appropriate edges imported from the previous graph - let dep_node_index = data.current.promote_node_and_deps_to_current( + let dep_node_index = self.current.promote_node_and_deps_to_current( qcx.dep_context().profiler(), - &data.previous, + &self.previous, prev_dep_node_index, ); @@ -833,20 +902,20 @@ impl DepGraph { #[cfg(not(parallel_compiler))] debug_assert!( - data.colors.get(prev_dep_node_index).is_none(), + self.colors.get(prev_dep_node_index).is_none(), "DepGraph::try_mark_previous_green() - Duplicate DepNodeColor \ insertion for {dep_node:?}" ); if !side_effects.is_empty() { - self.with_query_deserialization(|| { - self.emit_side_effects(qcx, data, dep_node_index, side_effects) + qcx.dep_context().dep_graph().with_query_deserialization(|| { + self.emit_side_effects(qcx, dep_node_index, side_effects) }); } // ... and finally storing a "Green" entry in the color map. // Multiple threads can all write the same color here - data.colors.insert(prev_dep_node_index, DepNodeColor::Green(dep_node_index)); + self.colors.insert(prev_dep_node_index, DepNodeColor::Green(dep_node_index)); debug!("successfully marked {dep_node:?} as green"); Some(dep_node_index) @@ -859,11 +928,10 @@ impl DepGraph { fn emit_side_effects>( &self, qcx: Qcx, - data: &DepGraphData, dep_node_index: DepNodeIndex, side_effects: QuerySideEffects, ) { - let mut processed = data.processed_side_effects.lock(); + let mut processed = self.processed_side_effects.lock(); if processed.insert(dep_node_index) { // We were the first to insert the node in the set so this thread @@ -879,7 +947,9 @@ impl DepGraph { } } } +} +impl DepGraph { /// Returns true if the given node has been marked as red during the /// current compilation session. Used in various assertions pub fn is_red(&self, dep_node: &DepNode) -> bool { @@ -937,6 +1007,7 @@ impl DepGraph { } pub(crate) fn next_virtual_depnode_index(&self) -> DepNodeIndex { + debug_assert!(self.data.is_none()); let index = self.virtual_dep_node_index.fetch_add(1, Relaxed); DepNodeIndex::from_u32(index) } @@ -1020,7 +1091,7 @@ pub(super) struct CurrentDepGraph { /// This is used to verify that fingerprints do not change between the creation of a node /// and its recomputation. #[cfg(debug_assertions)] - fingerprints: Lock, Fingerprint>>, + fingerprints: Lock>>, /// Used to trap when a specific edge is added to the graph. /// This is used for debug purposes and is only active with `debug_assertions`. @@ -1106,7 +1177,7 @@ impl CurrentDepGraph { #[cfg(debug_assertions)] forbidden_edge, #[cfg(debug_assertions)] - fingerprints: Lock::new(Default::default()), + fingerprints: Lock::new(IndexVec::from_elem_n(None, new_node_count_estimate)), total_read_count: AtomicU64::new(0), total_duplicate_read_count: AtomicU64::new(0), node_intern_event_id, @@ -1118,14 +1189,8 @@ impl CurrentDepGraph { if let Some(forbidden_edge) = &self.forbidden_edge { forbidden_edge.index_to_node.lock().insert(dep_node_index, key); } - match self.fingerprints.lock().entry(key) { - Entry::Vacant(v) => { - v.insert(fingerprint); - } - Entry::Occupied(o) => { - assert_eq!(*o.get(), fingerprint, "Unstable fingerprints for {:?}", key); - } - } + let previous = *self.fingerprints.lock().get_or_insert_with(dep_node_index, || fingerprint); + assert_eq!(previous, fingerprint, "Unstable fingerprints for {:?}", key); } /// Writes the node to the current dep-graph and allocates a `DepNodeIndex` for it. @@ -1310,10 +1375,13 @@ pub enum TaskDepsRef<'a, K: DepKind> { /// `TaskDeps`. This is used when executing a 'normal' query /// (no `eval_always` modifier) Allow(&'a Lock>), - /// New dependencies are ignored. This is used when - /// executing an `eval_always` query, since there's no + /// This is used when executing an `eval_always` query. We don't /// need to track dependencies for a query that's always - /// re-executed. This is also used for `dep_graph.with_ignore` + /// re-executed -- but we need to know that this is an `eval_always` + /// query in order to emit dependencies to `DepNodeIndex::FOREVER_RED_NODE` + /// when directly feeding other queries. + EvalAlways, + /// New dependencies are ignored. This is also used for `dep_graph.with_ignore`. Ignore, /// Any attempt to add new dependencies will cause a panic. /// This is used when decoding a query result from disk, @@ -1381,25 +1449,25 @@ impl DepNodeColorMap { } } -fn backtrace_printer<'a, K: DepKind>( - sess: &'a rustc_session::Session, - graph: &'a DepGraphData, - node: SerializedDepNodeIndex, -) -> OnDrop { - OnDrop( - #[inline(never)] - #[cold] - move || { - let node = graph.previous.index_to_node(node); - // Do not try to rely on DepNode's Debug implementation, since it may panic. - let diag = rustc_errors::Diagnostic::new( - rustc_errors::Level::FailureNote, - &format!( - "encountered while trying to mark dependency green: {:?}({})", - node.kind, node.hash - ), - ); - sess.diagnostic().force_print_diagnostic(diag); - }, - ) +#[inline(never)] +#[cold] +pub(crate) fn print_markframe_trace( + graph: &DepGraph, + frame: Option<&MarkFrame<'_>>, +) { + let data = graph.data.as_ref().unwrap(); + + eprintln!("there was a panic while trying to force a dep node"); + eprintln!("try_mark_green dep node stack:"); + + let mut i = 0; + let mut current = frame; + while let Some(frame) = current { + let node = data.previous.index_to_node(frame.index); + eprintln!("#{i} {:?}", node); + current = frame.parent; + i += 1; + } + + eprintln!("end of try_mark_green dep node stack"); } -- cgit v1.2.3