summaryrefslogtreecommitdiffstats
path: root/vendor/polonius-engine/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
commit698f8c2f01ea549d77d7dc3338a12e04c11057b9 (patch)
tree173a775858bd501c378080a10dca74132f05bc50 /vendor/polonius-engine/src
parentInitial commit. (diff)
downloadrustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.tar.xz
rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.zip
Adding upstream version 1.64.0+dfsg1.upstream/1.64.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/polonius-engine/src')
-rw-r--r--vendor/polonius-engine/src/facts.rs129
-rw-r--r--vendor/polonius-engine/src/lib.rs16
-rw-r--r--vendor/polonius-engine/src/output/datafrog_opt.rs495
-rw-r--r--vendor/polonius-engine/src/output/initialization.rs284
-rw-r--r--vendor/polonius-engine/src/output/liveness.rs170
-rw-r--r--vendor/polonius-engine/src/output/location_insensitive.rs156
-rw-r--r--vendor/polonius-engine/src/output/mod.rs614
-rw-r--r--vendor/polonius-engine/src/output/naive.rs299
8 files changed, 2163 insertions, 0 deletions
diff --git a/vendor/polonius-engine/src/facts.rs b/vendor/polonius-engine/src/facts.rs
new file mode 100644
index 000000000..442ba186c
--- /dev/null
+++ b/vendor/polonius-engine/src/facts.rs
@@ -0,0 +1,129 @@
+use std::fmt::Debug;
+use std::hash::Hash;
+
+/// The "facts" which are the basis of the NLL borrow analysis.
+#[derive(Clone, Debug)]
+pub struct AllFacts<T: FactTypes> {
+ /// `loan_issued_at(origin, loan, point)` indicates that the `loan` was "issued"
+ /// at the given `point`, creating a reference with the `origin`.
+ /// Effectively, `origin` may refer to data from `loan` starting at `point` (this is usually
+ /// the point *after* a borrow rvalue).
+ pub loan_issued_at: Vec<(T::Origin, T::Loan, T::Point)>,
+
+ /// `universal_region(origin)` -- this is a "free region" within fn body
+ pub universal_region: Vec<T::Origin>,
+
+ /// `cfg_edge(point1, point2)` for each edge `point1 -> point2` in the control flow
+ pub cfg_edge: Vec<(T::Point, T::Point)>,
+
+ /// `loan_killed_at(loan, point)` when some prefix of the path borrowed at `loan`
+ /// is assigned at `point`.
+ /// Indicates that the path borrowed by the `loan` has changed in some way that the loan no
+ /// longer needs to be tracked. (In particular, mutations to the path that was borrowed
+ /// no longer invalidate the loan)
+ pub loan_killed_at: Vec<(T::Loan, T::Point)>,
+
+ /// `subset_base(origin1, origin2, point)` when we require `origin1@point: origin2@point`.
+ /// Indicates that `origin1 <= origin2` -- i.e., the set of loans in `origin1` are a subset
+ /// of those in `origin2`.
+ pub subset_base: Vec<(T::Origin, T::Origin, T::Point)>,
+
+ /// `loan_invalidated_at(point, loan)` indicates that the `loan` is invalidated by some action
+ /// taking place at `point`; if any origin that references this loan is live, this is an error.
+ pub loan_invalidated_at: Vec<(T::Point, T::Loan)>,
+
+ /// `var_used_at(var, point)` when the variable `var` is used for anything
+ /// but a drop at `point`
+ pub var_used_at: Vec<(T::Variable, T::Point)>,
+
+ /// `var_defined_at(var, point)` when the variable `var` is overwritten at `point`
+ pub var_defined_at: Vec<(T::Variable, T::Point)>,
+
+ /// `var_dropped_at(var, point)` when the variable `var` is used in a drop at `point`
+ pub var_dropped_at: Vec<(T::Variable, T::Point)>,
+
+ /// `use_of_var_derefs_origin(variable, origin)`: References with the given
+ /// `origin` may be dereferenced when the `variable` is used.
+ ///
+ /// In rustc, we generate this whenever the type of the variable includes the
+ /// given origin.
+ pub use_of_var_derefs_origin: Vec<(T::Variable, T::Origin)>,
+
+ /// `drop_of_var_derefs_origin(var, origin)` when the type of `var` includes
+ /// the `origin` and uses it when dropping
+ pub drop_of_var_derefs_origin: Vec<(T::Variable, T::Origin)>,
+
+ /// `child_path(child, parent)` when the path `child` is the direct child of
+ /// `parent`, e.g. `child_path(x.y, x)`, but not `child_path(x.y.z, x)`.
+ pub child_path: Vec<(T::Path, T::Path)>,
+
+ /// `path_is_var(path, var)` the root path `path` starting in variable `var`.
+ pub path_is_var: Vec<(T::Path, T::Variable)>,
+
+ /// `path_assigned_at_base(path, point)` when the `path` was initialized at point
+ /// `point`. This fact is only emitted for a prefix `path`, and not for the
+ /// implicit initialization of all of `path`'s children. E.g. a statement like
+ /// `x.y = 3` at `point` would give the fact `path_assigned_at_base(x.y, point)` (but
+ /// neither `path_assigned_at_base(x.y.z, point)` nor `path_assigned_at_base(x, point)`).
+ pub path_assigned_at_base: Vec<(T::Path, T::Point)>,
+
+ /// `path_moved_at_base(path, point)` when the `path` was moved at `point`. The
+ /// same logic is applied as for `path_assigned_at_base` above.
+ pub path_moved_at_base: Vec<(T::Path, T::Point)>,
+
+ /// `path_accessed_at_base(path, point)` when the `path` was accessed at point
+ /// `point`. The same logic as for `path_assigned_at_base` and `path_moved_at_base` applies.
+ pub path_accessed_at_base: Vec<(T::Path, T::Point)>,
+
+ /// These reflect the `'a: 'b` relations that are either declared by the user on function
+ /// declarations or which are inferred via implied bounds.
+ /// For example: `fn foo<'a, 'b: 'a, 'c>(x: &'c &'a u32)` would have two entries:
+ /// - one for the user-supplied subset `'b: 'a`
+ /// - and one for the `'a: 'c` implied bound from the `x` parameter,
+ /// (note that the transitive relation `'b: 'c` is not necessarily included
+ /// explicitly, but rather inferred by polonius).
+ pub known_placeholder_subset: Vec<(T::Origin, T::Origin)>,
+
+ /// `placeholder(origin, loan)` describes a placeholder `origin`, with its associated
+ /// placeholder `loan`.
+ pub placeholder: Vec<(T::Origin, T::Loan)>,
+}
+
+impl<T: FactTypes> Default for AllFacts<T> {
+ fn default() -> Self {
+ AllFacts {
+ loan_issued_at: Vec::default(),
+ universal_region: Vec::default(),
+ cfg_edge: Vec::default(),
+ loan_killed_at: Vec::default(),
+ subset_base: Vec::default(),
+ loan_invalidated_at: Vec::default(),
+ var_used_at: Vec::default(),
+ var_defined_at: Vec::default(),
+ var_dropped_at: Vec::default(),
+ use_of_var_derefs_origin: Vec::default(),
+ drop_of_var_derefs_origin: Vec::default(),
+ child_path: Vec::default(),
+ path_is_var: Vec::default(),
+ path_assigned_at_base: Vec::default(),
+ path_moved_at_base: Vec::default(),
+ path_accessed_at_base: Vec::default(),
+ known_placeholder_subset: Vec::default(),
+ placeholder: Vec::default(),
+ }
+ }
+}
+
+pub trait Atom:
+ From<usize> + Into<usize> + Copy + Clone + Debug + Eq + Ord + Hash + 'static
+{
+ fn index(self) -> usize;
+}
+
+pub trait FactTypes: Copy + Clone + Debug {
+ type Origin: Atom;
+ type Loan: Atom;
+ type Point: Atom;
+ type Variable: Atom;
+ type Path: Atom;
+}
diff --git a/vendor/polonius-engine/src/lib.rs b/vendor/polonius-engine/src/lib.rs
new file mode 100644
index 000000000..0926be8f5
--- /dev/null
+++ b/vendor/polonius-engine/src/lib.rs
@@ -0,0 +1,16 @@
+/// Contains the core of the Polonius borrow checking engine.
+/// Input is fed in via AllFacts, and outputs are returned via Output
+extern crate datafrog;
+#[macro_use]
+extern crate log;
+extern crate rustc_hash;
+
+mod facts;
+mod output;
+
+// Reexports of facts
+pub use facts::AllFacts;
+pub use facts::Atom;
+pub use facts::FactTypes;
+pub use output::Algorithm;
+pub use output::Output;
diff --git a/vendor/polonius-engine/src/output/datafrog_opt.rs b/vendor/polonius-engine/src/output/datafrog_opt.rs
new file mode 100644
index 000000000..da9c343cc
--- /dev/null
+++ b/vendor/polonius-engine/src/output/datafrog_opt.rs
@@ -0,0 +1,495 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use datafrog::{Iteration, Relation, RelationLeaper};
+use std::time::Instant;
+
+use crate::facts::FactTypes;
+use crate::output::{Context, Output};
+
+pub(super) fn compute<T: FactTypes>(
+ ctx: &Context<'_, T>,
+ result: &mut Output<T>,
+) -> (
+ Relation<(T::Loan, T::Point)>,
+ Relation<(T::Origin, T::Origin, T::Point)>,
+) {
+ let timer = Instant::now();
+
+ let (errors, subset_errors) = {
+ // Static inputs
+ let origin_live_on_entry_rel = &ctx.origin_live_on_entry;
+ let cfg_edge_rel = &ctx.cfg_edge;
+ let loan_killed_at = &ctx.loan_killed_at;
+ let known_placeholder_subset = &ctx.known_placeholder_subset;
+ let placeholder_origin = &ctx.placeholder_origin;
+
+ // Create a new iteration context, ...
+ let mut iteration = Iteration::new();
+
+ // `loan_invalidated_at` facts, stored ready for joins
+ let loan_invalidated_at =
+ iteration.variable::<((T::Loan, T::Point), ())>("loan_invalidated_at");
+
+ // we need `origin_live_on_entry` in both variable and relation forms,
+ // (respectively, for join and antijoin).
+ let origin_live_on_entry_var =
+ iteration.variable::<((T::Origin, T::Point), ())>("origin_live_on_entry");
+
+ // `loan_issued_at` input but organized for join
+ let loan_issued_at_op =
+ iteration.variable::<((T::Origin, T::Point), T::Loan)>("loan_issued_at_op");
+
+ // .decl subset(origin1, origin2, point)
+ //
+ // Indicates that `origin1: origin2` at `point`.
+ let subset_o1p = iteration.variable::<((T::Origin, T::Point), T::Origin)>("subset_o1p");
+
+ // .decl origin_contains_loan_on_entry(origin, loan, point)
+ //
+ // At `point`, things with `origin` may depend on data from `loan`.
+ let origin_contains_loan_on_entry_op = iteration
+ .variable::<((T::Origin, T::Point), T::Loan)>("origin_contains_loan_on_entry_op");
+
+ // .decl loan_live_at(loan, point)
+ //
+ // True if the restrictions of the `loan` need to be enforced at `point`.
+ let loan_live_at = iteration.variable::<((T::Loan, T::Point), ())>("loan_live_at");
+
+ // .decl live_to_dying_regions(origin1, origin2, point1, point2)
+ //
+ // The origins `origin1` and `origin2` are "live to dead"
+ // on the edge `point1 -> point2` if:
+ //
+ // - In `point1`, `origin1` <= `origin2`
+ // - In `point2`, `origin1` is live but `origin2` is dead.
+ //
+ // In that case, `point2` would like to add all the
+ // live things reachable from `origin2` to `origin1`.
+ //
+ let live_to_dying_regions_o2pq = iteration
+ .variable::<((T::Origin, T::Point, T::Point), T::Origin)>("live_to_dying_regions_o2pq");
+
+ // .decl dying_region_requires((origin, point1, point2), loan)
+ //
+ // The `origin` requires `loan`, but the `origin` goes dead
+ // along the edge `point1 -> point2`.
+ let dying_region_requires = iteration
+ .variable::<((T::Origin, T::Point, T::Point), T::Loan)>("dying_region_requires");
+
+ // .decl dying_can_reach_origins(origin, point1, point2)
+ //
+ // Contains dead origins where we are interested
+ // in computing the transitive closure of things they
+ // can reach.
+ //
+ // FIXME: this relation was named before renaming the `regions` atoms to `origins`, and
+ // will need to be renamed to change "_origins" to "_ascendants", "_roots", etc.
+ let dying_can_reach_origins =
+ iteration.variable::<((T::Origin, T::Point), T::Point)>("dying_can_reach_origins");
+
+ // .decl dying_can_reach(origin1, origin2, point1, point2)
+ //
+ // Indicates that `origin1`, which is dead
+ // in `point2`, can reach `origin2` in `point1`.
+ //
+ // This is effectively the transitive subset
+ // relation, but we try to limit it to origins
+ // that are dying on the edge `point1 -> point2`.
+ let dying_can_reach_o2q =
+ iteration.variable::<((T::Origin, T::Point), (T::Origin, T::Point))>("dying_can_reach");
+ let dying_can_reach_1 = iteration.variable_indistinct("dying_can_reach_1");
+
+ // .decl dying_can_reach_live(origin1, origin2, point1, point2)
+ //
+ // Indicates that, along the edge `point1 -> point2`, the dead (in `point2`)
+ // `origin1` can reach the live (in `point2`) `origin2` via a subset
+ // relation. This is a subset of the full `dying_can_reach`
+ // relation where we filter down to those cases where `origin2` is
+ // live in `point2`.
+ let dying_can_reach_live = iteration
+ .variable::<((T::Origin, T::Point, T::Point), T::Origin)>("dying_can_reach_live");
+
+ // .decl dead_borrow_region_can_reach_root((origin, point), loan)
+ //
+ // Indicates a "borrow region" `origin` at `point` which is not live on
+ // entry to `point`.
+ let dead_borrow_region_can_reach_root = iteration
+ .variable::<((T::Origin, T::Point), T::Loan)>("dead_borrow_region_can_reach_root");
+
+ // .decl dead_borrow_region_can_reach_dead((origin2, point), loan)
+ let dead_borrow_region_can_reach_dead = iteration
+ .variable::<((T::Origin, T::Point), T::Loan)>("dead_borrow_region_can_reach_dead");
+ let dead_borrow_region_can_reach_dead_1 =
+ iteration.variable_indistinct("dead_borrow_region_can_reach_dead_1");
+
+ // .decl errors(loan, point)
+ let errors = iteration.variable("errors");
+ let subset_errors = iteration.variable::<(T::Origin, T::Origin, T::Point)>("subset_errors");
+
+ let subset_placeholder =
+ iteration.variable::<(T::Origin, T::Origin, T::Point)>("subset_placeholder");
+ let subset_placeholder_o2p = iteration.variable_indistinct("subset_placeholder_o2p");
+
+ // Make "variable" versions of the relations, needed for joins.
+ loan_issued_at_op.extend(
+ ctx.loan_issued_at
+ .iter()
+ .map(|&(origin, loan, point)| ((origin, point), loan)),
+ );
+ loan_invalidated_at.extend(
+ ctx.loan_invalidated_at
+ .iter()
+ .map(|&(loan, point)| ((loan, point), ())),
+ );
+ origin_live_on_entry_var.extend(
+ origin_live_on_entry_rel
+ .iter()
+ .map(|&(origin, point)| ((origin, point), ())),
+ );
+
+ // subset(origin1, origin2, point) :-
+ // subset_base(origin1, origin2, point).
+ subset_o1p.extend(
+ ctx.subset_base
+ .iter()
+ .map(|&(origin1, origin2, point)| ((origin1, point), origin2)),
+ );
+
+ // origin_contains_loan_on_entry(origin, loan, point) :-
+ // loan_issued_at(origin, loan, point).
+ origin_contains_loan_on_entry_op.extend(
+ ctx.loan_issued_at
+ .iter()
+ .map(|&(origin, loan, point)| ((origin, point), loan)),
+ );
+
+ // .. and then start iterating rules!
+ while iteration.changed() {
+ // Cleanup step: remove symmetries
+ // - remove origins which are `subset`s of themselves
+ //
+ // FIXME: investigate whether is there a better way to do that without complicating
+ // the rules too much, because it would also require temporary variables and
+ // impact performance. Until then, the big reduction in tuples improves performance
+ // a lot, even if we're potentially adding a small number of tuples
+ // per round just to remove them in the next round.
+ subset_o1p
+ .recent
+ .borrow_mut()
+ .elements
+ .retain(|&((origin1, _), origin2)| origin1 != origin2);
+
+ subset_placeholder
+ .recent
+ .borrow_mut()
+ .elements
+ .retain(|&(origin1, origin2, _)| origin1 != origin2);
+ subset_placeholder_o2p.from_map(&subset_placeholder, |&(origin1, origin2, point)| {
+ ((origin2, point), origin1)
+ });
+
+ // live_to_dying_regions(origin1, origin2, point1, point2) :-
+ // subset(origin1, origin2, point1),
+ // cfg_edge(point1, point2),
+ // origin_live_on_entry(origin1, point2),
+ // !origin_live_on_entry(origin2, point2).
+ live_to_dying_regions_o2pq.from_leapjoin(
+ &subset_o1p,
+ (
+ cfg_edge_rel.extend_with(|&((_, point1), _)| point1),
+ origin_live_on_entry_rel.extend_with(|&((origin1, _), _)| origin1),
+ origin_live_on_entry_rel.extend_anti(|&((_, _), origin2)| origin2),
+ ),
+ |&((origin1, point1), origin2), &point2| ((origin2, point1, point2), origin1),
+ );
+
+ // dying_region_requires((origin, point1, point2), loan) :-
+ // origin_contains_loan_on_entry(origin, loan, point1),
+ // !loan_killed_at(loan, point1),
+ // cfg_edge(point1, point2),
+ // !origin_live_on_entry(origin, point2).
+ dying_region_requires.from_leapjoin(
+ &origin_contains_loan_on_entry_op,
+ (
+ loan_killed_at.filter_anti(|&((_, point1), loan)| (loan, point1)),
+ cfg_edge_rel.extend_with(|&((_, point1), _)| point1),
+ origin_live_on_entry_rel.extend_anti(|&((origin, _), _)| origin),
+ ),
+ |&((origin, point1), loan), &point2| ((origin, point1, point2), loan),
+ );
+
+ // dying_can_reach_origins(origin2, point1, point2) :-
+ // live_to_dying_regions(_, origin2, point1, point2).
+ dying_can_reach_origins.from_map(
+ &live_to_dying_regions_o2pq,
+ |&((origin2, point1, point2), _origin1)| ((origin2, point1), point2),
+ );
+
+ // dying_can_reach_origins(origin, point1, point2) :-
+ // dying_region_requires(origin, point1, point2, _loan).
+ dying_can_reach_origins.from_map(
+ &dying_region_requires,
+ |&((origin, point1, point2), _loan)| ((origin, point1), point2),
+ );
+
+ // dying_can_reach(origin1, origin2, point1, point2) :-
+ // dying_can_reach_origins(origin1, point1, point2),
+ // subset(origin1, origin2, point1).
+ dying_can_reach_o2q.from_join(
+ &dying_can_reach_origins,
+ &subset_o1p,
+ |&(origin1, point1), &point2, &origin2| ((origin2, point2), (origin1, point1)),
+ );
+
+ // dying_can_reach(origin1, origin3, point1, point2) :-
+ // dying_can_reach(origin1, origin2, point1, point2),
+ // !origin_live_on_entry(origin2, point2),
+ // subset(origin2, origin3, point1).
+ //
+ // This is the "transitive closure" rule, but
+ // note that we only apply it with the
+ // "intermediate" `origin2` is dead at `point2`.
+ dying_can_reach_1.from_antijoin(
+ &dying_can_reach_o2q,
+ &origin_live_on_entry_rel,
+ |&(origin2, point2), &(origin1, point1)| ((origin2, point1), (origin1, point2)),
+ );
+ dying_can_reach_o2q.from_join(
+ &dying_can_reach_1,
+ &subset_o1p,
+ |&(_origin2, point1), &(origin1, point2), &origin3| {
+ ((origin3, point2), (origin1, point1))
+ },
+ );
+
+ // dying_can_reach_live(origin1, origin2, point1, point2) :-
+ // dying_can_reach(origin1, origin2, point1, point2),
+ // origin_live_on_entry(origin2, point2).
+ dying_can_reach_live.from_join(
+ &dying_can_reach_o2q,
+ &origin_live_on_entry_var,
+ |&(origin2, point2), &(origin1, point1), _| ((origin1, point1, point2), origin2),
+ );
+
+ // subset(origin1, origin2, point2) :-
+ // subset(origin1, origin2, point1),
+ // cfg_edge(point1, point2),
+ // origin_live_on_entry(origin1, point2),
+ // origin_live_on_entry(origin2, point2).
+ //
+ // Carry `origin1 <= origin2` from `point1` into `point2` if both `origin1` and
+ // `origin2` are live in `point2`.
+ subset_o1p.from_leapjoin(
+ &subset_o1p,
+ (
+ cfg_edge_rel.extend_with(|&((_, point1), _)| point1),
+ origin_live_on_entry_rel.extend_with(|&((origin1, _), _)| origin1),
+ origin_live_on_entry_rel.extend_with(|&((_, _), origin2)| origin2),
+ ),
+ |&((origin1, _point1), origin2), &point2| ((origin1, point2), origin2),
+ );
+
+ // subset(origin1, origin3, point2) :-
+ // live_to_dying_regions(origin1, origin2, point1, point2),
+ // dying_can_reach_live(origin2, origin3, point1, point2).
+ subset_o1p.from_join(
+ &live_to_dying_regions_o2pq,
+ &dying_can_reach_live,
+ |&(_origin2, _point1, point2), &origin1, &origin3| ((origin1, point2), origin3),
+ );
+
+ // origin_contains_loan_on_entry(origin2, loan, point2) :-
+ // dying_region_requires(origin1, loan, point1, point2),
+ // dying_can_reach_live(origin1, origin2, point1, point2).
+ //
+ // Communicate a `origin1 contains loan` relation across
+ // an edge `point1 -> point2` where `origin1` is dead in `point2`; in
+ // that case, for each origin `origin2` live in `point2`
+ // where `origin1 <= origin2` in `point1`, we add `origin2 contains loan`
+ // to `point2`.
+ origin_contains_loan_on_entry_op.from_join(
+ &dying_region_requires,
+ &dying_can_reach_live,
+ |&(_origin1, _point1, point2), &loan, &origin2| ((origin2, point2), loan),
+ );
+
+ // origin_contains_loan_on_entry(origin, loan, point2) :-
+ // origin_contains_loan_on_entry(origin, loan, point1),
+ // !loan_killed_at(loan, point1),
+ // cfg_edge(point1, point2),
+ // origin_live_on_entry(origin, point2).
+ origin_contains_loan_on_entry_op.from_leapjoin(
+ &origin_contains_loan_on_entry_op,
+ (
+ loan_killed_at.filter_anti(|&((_, point1), loan)| (loan, point1)),
+ cfg_edge_rel.extend_with(|&((_, point1), _)| point1),
+ origin_live_on_entry_rel.extend_with(|&((origin, _), _)| origin),
+ ),
+ |&((origin, _), loan), &point2| ((origin, point2), loan),
+ );
+
+ // dead_borrow_region_can_reach_root((origin, point), loan) :-
+ // loan_issued_at(origin, loan, point),
+ // !origin_live_on_entry(origin, point).
+ dead_borrow_region_can_reach_root.from_antijoin(
+ &loan_issued_at_op,
+ &origin_live_on_entry_rel,
+ |&(origin, point), &loan| ((origin, point), loan),
+ );
+
+ // dead_borrow_region_can_reach_dead((origin, point), loan) :-
+ // dead_borrow_region_can_reach_root((origin, point), loan).
+ dead_borrow_region_can_reach_dead
+ .from_map(&dead_borrow_region_can_reach_root, |&tuple| tuple);
+
+ // dead_borrow_region_can_reach_dead((origin2, point), loan) :-
+ // dead_borrow_region_can_reach_dead(origin1, loan, point),
+ // subset(origin1, origin2, point),
+ // !origin_live_on_entry(origin2, point).
+ dead_borrow_region_can_reach_dead_1.from_join(
+ &dead_borrow_region_can_reach_dead,
+ &subset_o1p,
+ |&(_origin1, point), &loan, &origin2| ((origin2, point), loan),
+ );
+ dead_borrow_region_can_reach_dead.from_antijoin(
+ &dead_borrow_region_can_reach_dead_1,
+ &origin_live_on_entry_rel,
+ |&(origin2, point), &loan| ((origin2, point), loan),
+ );
+
+ // loan_live_at(loan, point) :-
+ // origin_contains_loan_on_entry(origin, loan, point),
+ // origin_live_on_entry(origin, point).
+ loan_live_at.from_join(
+ &origin_contains_loan_on_entry_op,
+ &origin_live_on_entry_var,
+ |&(_origin, point), &loan, _| ((loan, point), ()),
+ );
+
+ // loan_live_at(loan, point) :-
+ // dead_borrow_region_can_reach_dead(origin1, loan, point),
+ // subset(origin1, origin2, point),
+ // origin_live_on_entry(origin2, point).
+ //
+ // NB: the datafrog code below uses
+ // `dead_borrow_region_can_reach_dead_1`, which is equal
+ // to `dead_borrow_region_can_reach_dead` and `subset`
+ // joined together.
+ loan_live_at.from_join(
+ &dead_borrow_region_can_reach_dead_1,
+ &origin_live_on_entry_var,
+ |&(_origin2, point), &loan, _| ((loan, point), ()),
+ );
+
+ // errors(loan, point) :-
+ // loan_invalidated_at(loan, point),
+ // loan_live_at(loan, point).
+ errors.from_join(
+ &loan_invalidated_at,
+ &loan_live_at,
+ |&(loan, point), _, _| (loan, point),
+ );
+
+ // subset_placeholder(Origin1, Origin2, Point) :-
+ // subset(Origin1, Origin2, Point),
+ // placeholder_origin(Origin1).
+ subset_placeholder.from_leapjoin(
+ &subset_o1p,
+ (
+ placeholder_origin.extend_with(|&((origin1, _point), _origin2)| origin1),
+ // remove symmetries:
+ datafrog::ValueFilter::from(|&((origin1, _point), origin2), _| {
+ origin1 != origin2
+ }),
+ ),
+ |&((origin1, point), origin2), _| (origin1, origin2, point),
+ );
+
+ // We compute the transitive closure of the placeholder origins, so we
+ // maintain the invariant from the rule above that `Origin1` is a placeholder origin.
+ //
+ // subset_placeholder(Origin1, Origin3, Point) :-
+ // subset_placeholder(Origin1, Origin2, Point),
+ // subset(Origin2, Origin3, Point).
+ subset_placeholder.from_join(
+ &subset_placeholder_o2p,
+ &subset_o1p,
+ |&(_origin2, point), &origin1, &origin3| (origin1, origin3, point),
+ );
+
+ // subset_error(Origin1, Origin2, Point) :-
+ // subset_placeholder(Origin1, Origin2, Point),
+ // placeholder_origin(Origin2),
+ // !known_placeholder_subset(Origin1, Origin2).
+ subset_errors.from_leapjoin(
+ &subset_placeholder,
+ (
+ placeholder_origin.extend_with(|&(_origin1, origin2, _point)| origin2),
+ known_placeholder_subset
+ .filter_anti(|&(origin1, origin2, _point)| (origin1, origin2)),
+ // remove symmetries:
+ datafrog::ValueFilter::from(|&(origin1, origin2, _point), _| {
+ origin1 != origin2
+ }),
+ ),
+ |&(origin1, origin2, point), _| (origin1, origin2, point),
+ );
+ }
+
+ if result.dump_enabled {
+ let subset_o1p = subset_o1p.complete();
+ assert!(
+ subset_o1p
+ .iter()
+ .filter(|&((origin1, _), origin2)| origin1 == origin2)
+ .count()
+ == 0,
+ "unwanted subset symmetries"
+ );
+ for &((origin1, location), origin2) in subset_o1p.iter() {
+ result
+ .subset
+ .entry(location)
+ .or_default()
+ .entry(origin1)
+ .or_default()
+ .insert(origin2);
+ }
+
+ let origin_contains_loan_on_entry_op = origin_contains_loan_on_entry_op.complete();
+ for &((origin, location), loan) in origin_contains_loan_on_entry_op.iter() {
+ result
+ .origin_contains_loan_at
+ .entry(location)
+ .or_default()
+ .entry(origin)
+ .or_default()
+ .insert(loan);
+ }
+
+ let loan_live_at = loan_live_at.complete();
+ for &((loan, location), _) in loan_live_at.iter() {
+ result.loan_live_at.entry(location).or_default().push(loan);
+ }
+ }
+
+ (errors.complete(), subset_errors.complete())
+ };
+
+ info!(
+ "analysis done: {} `errors` tuples, {} `subset_errors` tuples, {:?}",
+ errors.len(),
+ subset_errors.len(),
+ timer.elapsed()
+ );
+
+ (errors, subset_errors)
+}
diff --git a/vendor/polonius-engine/src/output/initialization.rs b/vendor/polonius-engine/src/output/initialization.rs
new file mode 100644
index 000000000..30409d965
--- /dev/null
+++ b/vendor/polonius-engine/src/output/initialization.rs
@@ -0,0 +1,284 @@
+use std::time::Instant;
+
+use crate::facts::FactTypes;
+use crate::output::{InitializationContext, Output};
+
+use datafrog::{Iteration, Relation, RelationLeaper};
+
+// This represents the output of an intermediate elaboration step (step 1).
+struct TransitivePaths<T: FactTypes> {
+ path_moved_at: Relation<(T::Path, T::Point)>,
+ path_assigned_at: Relation<(T::Path, T::Point)>,
+ path_accessed_at: Relation<(T::Path, T::Point)>,
+ path_begins_with_var: Relation<(T::Path, T::Variable)>,
+}
+
+struct InitializationStatus<T: FactTypes> {
+ var_maybe_partly_initialized_on_exit: Relation<(T::Variable, T::Point)>,
+ move_error: Relation<(T::Path, T::Point)>,
+}
+
+pub(super) struct InitializationResult<T: FactTypes>(
+ pub(super) Relation<(T::Variable, T::Point)>,
+ pub(super) Relation<(T::Path, T::Point)>,
+);
+
+// Step 1: compute transitive closures of path operations. This would elaborate,
+// for example, an access to x into an access to x.f, x.f.0, etc. We do this for:
+// - access to a path
+// - initialization of a path
+// - moves of a path
+// FIXME: transitive rooting in a variable (path_begins_with_var)
+// Note that this step may not be entirely necessary!
+fn compute_transitive_paths<T: FactTypes>(
+ child_path: Vec<(T::Path, T::Path)>,
+ path_assigned_at_base: Vec<(T::Path, T::Point)>,
+ path_moved_at_base: Vec<(T::Path, T::Point)>,
+ path_accessed_at_base: Vec<(T::Path, T::Point)>,
+ path_is_var: Vec<(T::Path, T::Variable)>,
+) -> TransitivePaths<T> {
+ let mut iteration = Iteration::new();
+ let child_path: Relation<(T::Path, T::Path)> = child_path.into();
+
+ let ancestor_path = iteration.variable::<(T::Path, T::Path)>("ancestor");
+
+ // These are the actual targets:
+ let path_moved_at = iteration.variable::<(T::Path, T::Point)>("path_moved_at");
+ let path_assigned_at = iteration.variable::<(T::Path, T::Point)>("path_initialized_at");
+ let path_accessed_at = iteration.variable::<(T::Path, T::Point)>("path_accessed_at");
+ let path_begins_with_var = iteration.variable::<(T::Path, T::Variable)>("path_begins_with_var");
+
+ // ancestor_path(Parent, Child) :- child_path(Child, Parent).
+ ancestor_path.extend(child_path.iter().map(|&(child, parent)| (parent, child)));
+
+ // path_moved_at(Path, Point) :- path_moved_at_base(Path, Point).
+ path_moved_at.insert(path_moved_at_base.into());
+
+ // path_assigned_at(Path, Point) :- path_assigned_at_base(Path, Point).
+ path_assigned_at.insert(path_assigned_at_base.into());
+
+ // path_accessed_at(Path, Point) :- path_accessed_at_base(Path, Point).
+ path_accessed_at.insert(path_accessed_at_base.into());
+
+ // path_begins_with_var(Path, Var) :- path_is_var(Path, Var).
+ path_begins_with_var.insert(path_is_var.into());
+
+ while iteration.changed() {
+ // ancestor_path(Grandparent, Child) :-
+ // ancestor_path(Parent, Child),
+ // child_path(Parent, Grandparent).
+ ancestor_path.from_join(
+ &ancestor_path,
+ &child_path,
+ |&_parent, &child, &grandparent| (grandparent, child),
+ );
+
+ // moving a path moves its children
+ // path_moved_at(Child, Point) :-
+ // path_moved_at(Parent, Point),
+ // ancestor_path(Parent, Child).
+ path_moved_at.from_join(&path_moved_at, &ancestor_path, |&_parent, &p, &child| {
+ (child, p)
+ });
+
+ // initialising x at p initialises all x:s children
+ // path_assigned_at(Child, point) :-
+ // path_assigned_at(Parent, point),
+ // ancestor_path(Parent, Child).
+ path_assigned_at.from_join(&path_assigned_at, &ancestor_path, |&_parent, &p, &child| {
+ (child, p)
+ });
+
+ // accessing x at p accesses all x:s children at p (actually,
+ // accesses should be maximally precise and this shouldn't happen?)
+ // path_accessed_at(Child, point) :-
+ // path_accessed_at(Parent, point),
+ // ancestor_path(Parent, Child).
+ path_accessed_at.from_join(&path_accessed_at, &ancestor_path, |&_parent, &p, &child| {
+ (child, p)
+ });
+
+ // path_begins_with_var(Child, Var) :-
+ // path_begins_with_var(Parent, Var)
+ // ancestor_path(Parent, Child).
+ path_begins_with_var.from_join(
+ &path_begins_with_var,
+ &ancestor_path,
+ |&_parent, &var, &child| (child, var),
+ );
+ }
+
+ TransitivePaths {
+ path_assigned_at: path_assigned_at.complete(),
+ path_moved_at: path_moved_at.complete(),
+ path_accessed_at: path_accessed_at.complete(),
+ path_begins_with_var: path_begins_with_var.complete(),
+ }
+}
+
+// Step 2: Compute path initialization and deinitialization across the CFG.
+fn compute_move_errors<T: FactTypes>(
+ ctx: TransitivePaths<T>,
+ cfg_edge: &Relation<(T::Point, T::Point)>,
+ output: &mut Output<T>,
+) -> InitializationStatus<T> {
+ let mut iteration = Iteration::new();
+ // Variables
+
+ // var_maybe_partly_initialized_on_exit(var, point): Upon leaving `point`,
+ // `var` is partially initialized for some path through the CFG, that is
+ // there has been an initialization of var, and var has not been moved in
+ // all paths through the CFG.
+ let var_maybe_partly_initialized_on_exit =
+ iteration.variable::<(T::Variable, T::Point)>("var_maybe_partly_initialized_on_exit");
+
+ // path_maybe_initialized_on_exit(path, point): Upon leaving `point`, the
+ // move path `path` is initialized for some path through the CFG.
+ let path_maybe_initialized_on_exit =
+ iteration.variable::<(T::Path, T::Point)>("path_maybe_initialized_on_exit");
+
+ // path_maybe_uninitialized_on_exit(Path, Point): There exists at least one
+ // path through the CFG to Point such that `Path` has been moved out by the
+ // time we arrive at `Point` without it being re-initialized for sure.
+ let path_maybe_uninitialized_on_exit =
+ iteration.variable::<(T::Path, T::Point)>("path_maybe_uninitialized_on_exit");
+
+ // move_error(Path, Point): There is an access to `Path` at `Point`, but
+ // `Path` is potentially moved (or never initialised).
+ let move_error = iteration.variable::<(T::Path, T::Point)>("move_error");
+
+ // Initial propagation of static relations
+
+ // path_maybe_initialized_on_exit(path, point) :- path_assigned_at(path, point).
+ path_maybe_initialized_on_exit.insert(ctx.path_assigned_at.clone());
+
+ // path_maybe_uninitialized_on_exit(path, point) :- path_moved_at(path, point).
+ path_maybe_uninitialized_on_exit.insert(ctx.path_moved_at.clone());
+
+ while iteration.changed() {
+ // path_maybe_initialized_on_exit(path, point2) :-
+ // path_maybe_initialized_on_exit(path, point1),
+ // cfg_edge(point1, point2),
+ // !path_moved_at(path, point2).
+ path_maybe_initialized_on_exit.from_leapjoin(
+ &path_maybe_initialized_on_exit,
+ (
+ cfg_edge.extend_with(|&(_path, point1)| point1),
+ ctx.path_moved_at.extend_anti(|&(path, _point1)| path),
+ ),
+ |&(path, _point1), &point2| (path, point2),
+ );
+
+ // path_maybe_uninitialized_on_exit(path, point2) :-
+ // path_maybe_uninitialized_on_exit(path, point1),
+ // cfg_edge(point1, point2)
+ // !path_assigned_at(path, point2).
+ path_maybe_uninitialized_on_exit.from_leapjoin(
+ &path_maybe_uninitialized_on_exit,
+ (
+ cfg_edge.extend_with(|&(_path, point1)| point1),
+ ctx.path_assigned_at.extend_anti(|&(path, _point1)| path),
+ ),
+ |&(path, _point1), &point2| (path, point2),
+ );
+
+ // var_maybe_partly_initialized_on_exit(var, point) :-
+ // path_maybe_initialized_on_exit(path, point).
+ // path_begins_with_var(path, var).
+ var_maybe_partly_initialized_on_exit.from_leapjoin(
+ &path_maybe_initialized_on_exit,
+ ctx.path_begins_with_var.extend_with(|&(path, _point)| path),
+ |&(_path, point), &var| (var, point),
+ );
+
+ // move_error(Path, TargetNode) :-
+ // path_maybe_uninitialized_on_exit(Path, SourceNode),
+ // cfg_edge(SourceNode, TargetNode),
+ // path_accessed_at(Path, TargetNode).
+ move_error.from_leapjoin(
+ &path_maybe_uninitialized_on_exit,
+ (
+ cfg_edge.extend_with(|&(_path, source_node)| source_node),
+ ctx.path_accessed_at
+ .extend_with(|&(path, _source_node)| path),
+ ),
+ |&(path, _source_node), &target_node| (path, target_node),
+ );
+ }
+
+ if output.dump_enabled {
+ for &(path, location) in path_maybe_initialized_on_exit.complete().iter() {
+ output
+ .path_maybe_initialized_on_exit
+ .entry(location)
+ .or_default()
+ .push(path);
+ }
+
+ for &(path, location) in path_maybe_uninitialized_on_exit.complete().iter() {
+ output
+ .path_maybe_uninitialized_on_exit
+ .entry(location)
+ .or_default()
+ .push(path);
+ }
+ }
+
+ InitializationStatus {
+ var_maybe_partly_initialized_on_exit: var_maybe_partly_initialized_on_exit.complete(),
+ move_error: move_error.complete(),
+ }
+}
+
+// Compute two things:
+//
+// - an over-approximation of the initialization of variables. This is used in
+// the origin_live_on_entry computations to determine when a drop may happen; a
+// definitely moved variable would not be actually dropped.
+// - move errors.
+//
+// The process is split into two stages:
+//
+// 1. Compute the transitive closure of path accesses. That is, accessing `f.a`
+// would access `f.a.b`, etc.
+// 2. Use this to compute both paths that may be initialized and paths that may
+// have been deinitialized, which in turn can be used to find move errors (an
+// access to a path that may be deinitialized).
+pub(super) fn compute<T: FactTypes>(
+ ctx: InitializationContext<T>,
+ cfg_edge: &Relation<(T::Point, T::Point)>,
+ output: &mut Output<T>,
+) -> InitializationResult<T> {
+ let timer = Instant::now();
+
+ let transitive_paths = compute_transitive_paths::<T>(
+ ctx.child_path,
+ ctx.path_assigned_at_base,
+ ctx.path_moved_at_base,
+ ctx.path_accessed_at_base,
+ ctx.path_is_var,
+ );
+ info!("initialization phase 1 completed: {:?}", timer.elapsed());
+
+ let InitializationStatus {
+ var_maybe_partly_initialized_on_exit,
+ move_error,
+ } = compute_move_errors::<T>(transitive_paths, cfg_edge, output);
+ info!(
+ "initialization phase 2: {} move errors in {:?}",
+ move_error.elements.len(),
+ timer.elapsed()
+ );
+
+ if output.dump_enabled {
+ for &(var, location) in var_maybe_partly_initialized_on_exit.iter() {
+ output
+ .var_maybe_partly_initialized_on_exit
+ .entry(location)
+ .or_default()
+ .push(var);
+ }
+ }
+
+ InitializationResult(var_maybe_partly_initialized_on_exit, move_error)
+}
diff --git a/vendor/polonius-engine/src/output/liveness.rs b/vendor/polonius-engine/src/output/liveness.rs
new file mode 100644
index 000000000..1b4b4ce53
--- /dev/null
+++ b/vendor/polonius-engine/src/output/liveness.rs
@@ -0,0 +1,170 @@
+// Copyright 2019 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! An implementation of the origin liveness calculation logic
+
+use std::collections::BTreeSet;
+use std::time::Instant;
+
+use crate::facts::FactTypes;
+use crate::output::{LivenessContext, Output};
+
+use datafrog::{Iteration, Relation, RelationLeaper};
+
+pub(super) fn compute_live_origins<T: FactTypes>(
+ ctx: LivenessContext<T>,
+ cfg_edge: &Relation<(T::Point, T::Point)>,
+ var_maybe_partly_initialized_on_exit: Relation<(T::Variable, T::Point)>,
+ output: &mut Output<T>,
+) -> Vec<(T::Origin, T::Point)> {
+ let timer = Instant::now();
+ let mut iteration = Iteration::new();
+
+ // Relations
+ let var_defined_at: Relation<(T::Variable, T::Point)> = ctx.var_defined_at.into();
+ let cfg_edge_reverse: Relation<(T::Point, T::Point)> = cfg_edge
+ .iter()
+ .map(|&(point1, point2)| (point2, point1))
+ .collect();
+ let use_of_var_derefs_origin: Relation<(T::Variable, T::Origin)> =
+ ctx.use_of_var_derefs_origin.into();
+ let drop_of_var_derefs_origin: Relation<(T::Variable, T::Origin)> =
+ ctx.drop_of_var_derefs_origin.into();
+ let var_dropped_at: Relation<((T::Variable, T::Point), ())> = ctx
+ .var_dropped_at
+ .into_iter()
+ .map(|(var, point)| ((var, point), ()))
+ .collect();
+
+ // Variables
+
+ // `var_live_on_entry`: variable `var` is live upon entry at `point`
+ let var_live_on_entry = iteration.variable::<(T::Variable, T::Point)>("var_live_on_entry");
+ // `var_drop_live_on_entry`: variable `var` is drop-live (will be used for a drop) upon entry in `point`
+ let var_drop_live_on_entry =
+ iteration.variable::<(T::Variable, T::Point)>("var_drop_live_on_entry");
+
+ // This is what we are actually calculating:
+ let origin_live_on_entry = iteration.variable::<(T::Origin, T::Point)>("origin_live_on_entry");
+
+ // This propagates the relation `var_live_on_entry(var, point) :- var_used_at(var, point)`:
+ var_live_on_entry.insert(ctx.var_used_at.into());
+
+ // var_maybe_partly_initialized_on_entry(var, point2) :-
+ // var_maybe_partly_initialized_on_exit(var, point1),
+ // cfg_edge(point1, point2).
+ let var_maybe_partly_initialized_on_entry = Relation::from_leapjoin(
+ &var_maybe_partly_initialized_on_exit,
+ cfg_edge.extend_with(|&(_var, point1)| point1),
+ |&(var, _point1), &point2| ((var, point2), ()),
+ );
+
+ // var_drop_live_on_entry(var, point) :-
+ // var_dropped_at(var, point),
+ // var_maybe_partly_initialized_on_entry(var, point).
+ var_drop_live_on_entry.insert(Relation::from_join(
+ &var_dropped_at,
+ &var_maybe_partly_initialized_on_entry,
+ |&(var, point), _, _| (var, point),
+ ));
+
+ while iteration.changed() {
+ // origin_live_on_entry(origin, point) :-
+ // var_drop_live_on_entry(var, point),
+ // drop_of_var_derefs_origin(var, origin).
+ origin_live_on_entry.from_join(
+ &var_drop_live_on_entry,
+ &drop_of_var_derefs_origin,
+ |_var, &point, &origin| (origin, point),
+ );
+
+ // origin_live_on_entry(origin, point) :-
+ // var_live_on_entry(var, point),
+ // use_of_var_derefs_origin(var, origin).
+ origin_live_on_entry.from_join(
+ &var_live_on_entry,
+ &use_of_var_derefs_origin,
+ |_var, &point, &origin| (origin, point),
+ );
+
+ // var_live_on_entry(var, point1) :-
+ // var_live_on_entry(var, point2),
+ // cfg_edge(point1, point2),
+ // !var_defined(var, point1).
+ var_live_on_entry.from_leapjoin(
+ &var_live_on_entry,
+ (
+ var_defined_at.extend_anti(|&(var, _point2)| var),
+ cfg_edge_reverse.extend_with(|&(_var, point2)| point2),
+ ),
+ |&(var, _point2), &point1| (var, point1),
+ );
+
+ // var_drop_live_on_entry(Var, SourceNode) :-
+ // var_drop_live_on_entry(Var, TargetNode),
+ // cfg_edge(SourceNode, TargetNode),
+ // !var_defined_at(Var, SourceNode),
+ // var_maybe_partly_initialized_on_exit(Var, SourceNode).
+ var_drop_live_on_entry.from_leapjoin(
+ &var_drop_live_on_entry,
+ (
+ var_defined_at.extend_anti(|&(var, _target_node)| var),
+ cfg_edge_reverse.extend_with(|&(_var, target_node)| target_node),
+ var_maybe_partly_initialized_on_exit.extend_with(|&(var, _target_node)| var),
+ ),
+ |&(var, _targetnode), &source_node| (var, source_node),
+ );
+ }
+
+ let origin_live_on_entry = origin_live_on_entry.complete();
+
+ info!(
+ "compute_live_origins() completed: {} tuples, {:?}",
+ origin_live_on_entry.len(),
+ timer.elapsed(),
+ );
+
+ if output.dump_enabled {
+ let var_drop_live_on_entry = var_drop_live_on_entry.complete();
+ for &(var, location) in var_drop_live_on_entry.iter() {
+ output
+ .var_drop_live_on_entry
+ .entry(location)
+ .or_default()
+ .push(var);
+ }
+
+ let var_live_on_entry = var_live_on_entry.complete();
+ for &(var, location) in var_live_on_entry.iter() {
+ output
+ .var_live_on_entry
+ .entry(location)
+ .or_default()
+ .push(var);
+ }
+ }
+
+ origin_live_on_entry.elements
+}
+
+pub(super) fn make_universal_regions_live<T: FactTypes>(
+ origin_live_on_entry: &mut Vec<(T::Origin, T::Point)>,
+ cfg_node: &BTreeSet<T::Point>,
+ universal_regions: &[T::Origin],
+) {
+ debug!("make_universal_regions_live()");
+
+ origin_live_on_entry.reserve(universal_regions.len() * cfg_node.len());
+ for &origin in universal_regions.iter() {
+ for &point in cfg_node.iter() {
+ origin_live_on_entry.push((origin, point));
+ }
+ }
+}
diff --git a/vendor/polonius-engine/src/output/location_insensitive.rs b/vendor/polonius-engine/src/output/location_insensitive.rs
new file mode 100644
index 000000000..83ce27757
--- /dev/null
+++ b/vendor/polonius-engine/src/output/location_insensitive.rs
@@ -0,0 +1,156 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use datafrog::{Iteration, Relation, RelationLeaper};
+use std::time::Instant;
+
+use crate::facts::FactTypes;
+use crate::output::{Context, Output};
+
+pub(super) fn compute<T: FactTypes>(
+ ctx: &Context<'_, T>,
+ result: &mut Output<T>,
+) -> (
+ Relation<(T::Loan, T::Point)>,
+ Relation<(T::Origin, T::Origin)>,
+) {
+ let timer = Instant::now();
+
+ let (potential_errors, potential_subset_errors) = {
+ // Static inputs
+ let origin_live_on_entry = &ctx.origin_live_on_entry;
+ let loan_invalidated_at = &ctx.loan_invalidated_at;
+ let placeholder_origin = &ctx.placeholder_origin;
+ let placeholder_loan = &ctx.placeholder_loan;
+ let known_contains = &ctx.known_contains;
+
+ // subset(Origin1, Origin2) :-
+ // subset_base(Origin1, Origin2, _).
+ let subset = Relation::from_iter(
+ ctx.subset_base
+ .iter()
+ .map(|&(origin1, origin2, _point)| (origin1, origin2)),
+ );
+
+ // Create a new iteration context, ...
+ let mut iteration = Iteration::new();
+
+ // .. some variables, ..
+ let origin_contains_loan_on_entry =
+ iteration.variable::<(T::Origin, T::Loan)>("origin_contains_loan_on_entry");
+
+ let potential_errors = iteration.variable::<(T::Loan, T::Point)>("potential_errors");
+ let potential_subset_errors =
+ iteration.variable::<(T::Origin, T::Origin)>("potential_subset_errors");
+
+ // load initial facts.
+
+ // origin_contains_loan_on_entry(Origin, Loan) :-
+ // loan_issued_at(Origin, Loan, _).
+ origin_contains_loan_on_entry.extend(
+ ctx.loan_issued_at
+ .iter()
+ .map(|&(origin, loan, _point)| (origin, loan)),
+ );
+
+ // origin_contains_loan_on_entry(Origin, Loan) :-
+ // placeholder_loan(Origin, Loan).
+ origin_contains_loan_on_entry.extend(
+ placeholder_loan
+ .iter()
+ .map(|&(loan, origin)| (origin, loan)),
+ );
+
+ // .. and then start iterating rules!
+ while iteration.changed() {
+ // origin_contains_loan_on_entry(Origin2, Loan) :-
+ // origin_contains_loan_on_entry(Origin1, Loan),
+ // subset(Origin1, Origin2).
+ //
+ // Note: Since `subset` is effectively a static input, this join can be ported to
+ // a leapjoin. Doing so, however, was 7% slower on `clap`.
+ origin_contains_loan_on_entry.from_join(
+ &origin_contains_loan_on_entry,
+ &subset,
+ |&_origin1, &loan, &origin2| (origin2, loan),
+ );
+
+ // loan_live_at(Loan, Point) :-
+ // origin_contains_loan_on_entry(Origin, Loan),
+ // origin_live_on_entry(Origin, Point)
+ //
+ // potential_errors(Loan, Point) :-
+ // loan_invalidated_at(Loan, Point),
+ // loan_live_at(Loan, Point).
+ //
+ // Note: we don't need to materialize `loan_live_at` here
+ // so we can inline it in the `potential_errors` relation.
+ //
+ potential_errors.from_leapjoin(
+ &origin_contains_loan_on_entry,
+ (
+ origin_live_on_entry.extend_with(|&(origin, _loan)| origin),
+ loan_invalidated_at.extend_with(|&(_origin, loan)| loan),
+ ),
+ |&(_origin, loan), &point| (loan, point),
+ );
+
+ // potential_subset_errors(Origin1, Origin2) :-
+ // placeholder(Origin1, Loan1),
+ // placeholder(Origin2, _),
+ // origin_contains_loan_on_entry(Origin2, Loan1),
+ // !known_contains(Origin2, Loan1).
+ potential_subset_errors.from_leapjoin(
+ &origin_contains_loan_on_entry,
+ (
+ known_contains.filter_anti(|&(origin2, loan1)| (origin2, loan1)),
+ placeholder_origin.filter_with(|&(origin2, _loan1)| (origin2, ())),
+ placeholder_loan.extend_with(|&(_origin2, loan1)| loan1),
+ // remove symmetries:
+ datafrog::ValueFilter::from(|&(origin2, _loan1), &origin1| origin2 != origin1),
+ ),
+ |&(origin2, _loan1), &origin1| (origin1, origin2),
+ );
+ }
+
+ if result.dump_enabled {
+ for &(origin1, origin2) in subset.iter() {
+ result
+ .subset_anywhere
+ .entry(origin1)
+ .or_default()
+ .insert(origin2);
+ }
+
+ let origin_contains_loan_on_entry = origin_contains_loan_on_entry.complete();
+ for &(origin, loan) in origin_contains_loan_on_entry.iter() {
+ result
+ .origin_contains_loan_anywhere
+ .entry(origin)
+ .or_default()
+ .insert(loan);
+ }
+ }
+
+ (
+ potential_errors.complete(),
+ potential_subset_errors.complete(),
+ )
+ };
+
+ info!(
+ "analysis done: {} `potential_errors` tuples, {} `potential_subset_errors` tuples, {:?}",
+ potential_errors.len(),
+ potential_subset_errors.len(),
+ timer.elapsed()
+ );
+
+ (potential_errors, potential_subset_errors)
+}
diff --git a/vendor/polonius-engine/src/output/mod.rs b/vendor/polonius-engine/src/output/mod.rs
new file mode 100644
index 000000000..b840e4bec
--- /dev/null
+++ b/vendor/polonius-engine/src/output/mod.rs
@@ -0,0 +1,614 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use datafrog::Relation;
+use rustc_hash::{FxHashMap, FxHashSet};
+use std::borrow::Cow;
+use std::collections::{BTreeMap, BTreeSet};
+
+use crate::facts::{AllFacts, Atom, FactTypes};
+
+mod datafrog_opt;
+mod initialization;
+mod liveness;
+mod location_insensitive;
+mod naive;
+
+#[derive(Debug, Clone, Copy)]
+pub enum Algorithm {
+ /// Simple rules, but slower to execute
+ Naive,
+
+ /// Optimized variant of the rules
+ DatafrogOpt,
+
+ /// Fast to compute, but imprecise: there can be false-positives
+ /// but no false-negatives. Tailored for quick "early return" situations.
+ LocationInsensitive,
+
+ /// Compares the `Naive` and `DatafrogOpt` variants to ensure they indeed
+ /// compute the same errors.
+ Compare,
+
+ /// Combination of the fast `LocationInsensitive` pre-pass, followed by
+ /// the more expensive `DatafrogOpt` variant.
+ Hybrid,
+}
+
+impl Algorithm {
+ /// Optimized variants that ought to be equivalent to "naive"
+ pub const OPTIMIZED: &'static [Algorithm] = &[Algorithm::DatafrogOpt];
+
+ pub fn variants() -> [&'static str; 5] {
+ [
+ "Naive",
+ "DatafrogOpt",
+ "LocationInsensitive",
+ "Compare",
+ "Hybrid",
+ ]
+ }
+}
+
+impl ::std::str::FromStr for Algorithm {
+ type Err = String;
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s.to_lowercase().as_ref() {
+ "naive" => Ok(Algorithm::Naive),
+ "datafrogopt" => Ok(Algorithm::DatafrogOpt),
+ "locationinsensitive" => Ok(Algorithm::LocationInsensitive),
+ "compare" => Ok(Algorithm::Compare),
+ "hybrid" => Ok(Algorithm::Hybrid),
+ _ => Err(String::from(
+ "valid values: Naive, DatafrogOpt, LocationInsensitive, Compare, Hybrid",
+ )),
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct Output<T: FactTypes> {
+ pub errors: FxHashMap<T::Point, Vec<T::Loan>>,
+ pub subset_errors: FxHashMap<T::Point, BTreeSet<(T::Origin, T::Origin)>>,
+ pub move_errors: FxHashMap<T::Point, Vec<T::Path>>,
+
+ pub dump_enabled: bool,
+
+ // these are just for debugging
+ pub loan_live_at: FxHashMap<T::Point, Vec<T::Loan>>,
+ pub origin_contains_loan_at: FxHashMap<T::Point, BTreeMap<T::Origin, BTreeSet<T::Loan>>>,
+ pub origin_contains_loan_anywhere: FxHashMap<T::Origin, BTreeSet<T::Loan>>,
+ pub origin_live_on_entry: FxHashMap<T::Point, Vec<T::Origin>>,
+ pub loan_invalidated_at: FxHashMap<T::Point, Vec<T::Loan>>,
+ pub subset: FxHashMap<T::Point, BTreeMap<T::Origin, BTreeSet<T::Origin>>>,
+ pub subset_anywhere: FxHashMap<T::Origin, BTreeSet<T::Origin>>,
+ pub var_live_on_entry: FxHashMap<T::Point, Vec<T::Variable>>,
+ pub var_drop_live_on_entry: FxHashMap<T::Point, Vec<T::Variable>>,
+ pub path_maybe_initialized_on_exit: FxHashMap<T::Point, Vec<T::Path>>,
+ pub path_maybe_uninitialized_on_exit: FxHashMap<T::Point, Vec<T::Path>>,
+ pub known_contains: FxHashMap<T::Origin, BTreeSet<T::Loan>>,
+ pub var_maybe_partly_initialized_on_exit: FxHashMap<T::Point, Vec<T::Variable>>,
+}
+
+/// Subset of `AllFacts` dedicated to initialization
+struct InitializationContext<T: FactTypes> {
+ child_path: Vec<(T::Path, T::Path)>,
+ path_is_var: Vec<(T::Path, T::Variable)>,
+ path_assigned_at_base: Vec<(T::Path, T::Point)>,
+ path_moved_at_base: Vec<(T::Path, T::Point)>,
+ path_accessed_at_base: Vec<(T::Path, T::Point)>,
+}
+
+/// Subset of `AllFacts` dedicated to liveness
+struct LivenessContext<T: FactTypes> {
+ var_used_at: Vec<(T::Variable, T::Point)>,
+ var_defined_at: Vec<(T::Variable, T::Point)>,
+ var_dropped_at: Vec<(T::Variable, T::Point)>,
+ use_of_var_derefs_origin: Vec<(T::Variable, T::Origin)>,
+ drop_of_var_derefs_origin: Vec<(T::Variable, T::Origin)>,
+}
+
+/// Subset of `AllFacts` dedicated to borrow checking, and data ready to use by the variants
+struct Context<'ctx, T: FactTypes> {
+ // `Relation`s used as static inputs, by all variants
+ origin_live_on_entry: Relation<(T::Origin, T::Point)>,
+ loan_invalidated_at: Relation<(T::Loan, T::Point)>,
+
+ // static inputs used via `Variable`s, by all variants
+ subset_base: &'ctx Vec<(T::Origin, T::Origin, T::Point)>,
+ loan_issued_at: &'ctx Vec<(T::Origin, T::Loan, T::Point)>,
+
+ // static inputs used by variants other than `LocationInsensitive`
+ loan_killed_at: Relation<(T::Loan, T::Point)>,
+ known_contains: Relation<(T::Origin, T::Loan)>,
+ placeholder_origin: Relation<(T::Origin, ())>,
+ placeholder_loan: Relation<(T::Loan, T::Origin)>,
+
+ // The `known_placeholder_subset` relation in the facts does not necessarily contain all the
+ // transitive subsets. The transitive closure is always needed, so this version here is fully
+ // closed over.
+ known_placeholder_subset: Relation<(T::Origin, T::Origin)>,
+
+ // while this static input is unused by `LocationInsensitive`, it's depended on by
+ // initialization and liveness, so already computed by the time we get to borrowcking.
+ cfg_edge: Relation<(T::Point, T::Point)>,
+
+ // Partial results possibly used by other variants as input. Not currently used yet.
+ #[allow(dead_code)]
+ potential_errors: Option<FxHashSet<T::Loan>>,
+ #[allow(dead_code)]
+ potential_subset_errors: Option<Relation<(T::Origin, T::Origin)>>,
+}
+
+impl<T: FactTypes> Output<T> {
+ /// All variants require the same initial preparations, done in multiple
+ /// successive steps:
+ /// - compute initialization data
+ /// - compute liveness
+ /// - prepare static inputs as shared `Relation`s
+ /// - in cases where `LocationInsensitive` variant is ran as a filtering pre-pass,
+ /// partial results can also be stored in the context, so that the following
+ /// variant can use it to prune its own input data
+ pub fn compute(all_facts: &AllFacts<T>, algorithm: Algorithm, dump_enabled: bool) -> Self {
+ let mut result = Output::new(dump_enabled);
+
+ // TODO: remove all the cloning thereafter, but that needs to be done in concert with rustc
+
+ let cfg_edge = all_facts.cfg_edge.clone().into();
+
+ // 1) Initialization
+ let initialization_ctx = InitializationContext {
+ child_path: all_facts.child_path.clone(),
+ path_is_var: all_facts.path_is_var.clone(),
+ path_assigned_at_base: all_facts.path_assigned_at_base.clone(),
+ path_moved_at_base: all_facts.path_moved_at_base.clone(),
+ path_accessed_at_base: all_facts.path_accessed_at_base.clone(),
+ };
+
+ let initialization::InitializationResult::<T>(
+ var_maybe_partly_initialized_on_exit,
+ move_errors,
+ ) = initialization::compute(initialization_ctx, &cfg_edge, &mut result);
+
+ // FIXME: move errors should prevent the computation from continuing: we can't compute
+ // liveness and analyze loans accurately when there are move errors, and should early
+ // return here.
+ for &(path, location) in move_errors.iter() {
+ result.move_errors.entry(location).or_default().push(path);
+ }
+
+ // 2) Liveness
+ let liveness_ctx = LivenessContext {
+ var_used_at: all_facts.var_used_at.clone(),
+ var_defined_at: all_facts.var_defined_at.clone(),
+ var_dropped_at: all_facts.var_dropped_at.clone(),
+ use_of_var_derefs_origin: all_facts.use_of_var_derefs_origin.clone(),
+ drop_of_var_derefs_origin: all_facts.drop_of_var_derefs_origin.clone(),
+ };
+
+ let mut origin_live_on_entry = liveness::compute_live_origins(
+ liveness_ctx,
+ &cfg_edge,
+ var_maybe_partly_initialized_on_exit,
+ &mut result,
+ );
+
+ let cfg_node = cfg_edge
+ .iter()
+ .map(|&(point1, _)| point1)
+ .chain(cfg_edge.iter().map(|&(_, point2)| point2))
+ .collect();
+
+ liveness::make_universal_regions_live::<T>(
+ &mut origin_live_on_entry,
+ &cfg_node,
+ &all_facts.universal_region,
+ );
+
+ // 3) Borrow checking
+
+ // Prepare data as datafrog relations, ready to join.
+ //
+ // Note: if rustc and polonius had more interaction, we could also delay or avoid
+ // generating some of the facts that are now always present here. For example,
+ // the `LocationInsensitive` variant doesn't use the `loan_killed_at` relation, so we could
+ // technically delay computing and passing it from rustc, when using this or the `Hybrid`
+ // variants, to after the pre-pass has made sure we actually need to compute the full
+ // analysis. If these facts happened to be recorded in separate MIR walks, we might also
+ // avoid generating those facts.
+
+ let origin_live_on_entry = origin_live_on_entry.into();
+
+ // TODO: also flip the order of this relation's arguments in rustc
+ // from `loan_invalidated_at(point, loan)` to `loan_invalidated_at(loan, point)`.
+ // to avoid this allocation.
+ let loan_invalidated_at = Relation::from_iter(
+ all_facts
+ .loan_invalidated_at
+ .iter()
+ .map(|&(point, loan)| (loan, point)),
+ );
+
+ let loan_killed_at = all_facts.loan_killed_at.clone().into();
+
+ // `known_placeholder_subset` is a list of all the `'a: 'b` subset relations the user gave:
+ // it's not required to be transitive. `known_contains` is its transitive closure: a list
+ // of all the known placeholder loans that each of these placeholder origins contains.
+ // Given the `known_placeholder_subset`s `'a: 'b` and `'b: 'c`: in the `known_contains`
+ // relation, `'a` will also contain `'c`'s placeholder loan.
+ let known_placeholder_subset = all_facts.known_placeholder_subset.clone().into();
+ let known_contains =
+ Output::<T>::compute_known_contains(&known_placeholder_subset, &all_facts.placeholder);
+
+ // Fully close over the `known_placeholder_subset` relation.
+ let known_placeholder_subset =
+ Output::<T>::compute_known_placeholder_subset(&known_placeholder_subset);
+
+ let placeholder_origin: Relation<_> = Relation::from_iter(
+ all_facts
+ .universal_region
+ .iter()
+ .map(|&origin| (origin, ())),
+ );
+
+ let placeholder_loan = Relation::from_iter(
+ all_facts
+ .placeholder
+ .iter()
+ .map(|&(origin, loan)| (loan, origin)),
+ );
+
+ // Ask the variants to compute errors in their own way
+ let mut ctx = Context {
+ origin_live_on_entry,
+ loan_invalidated_at,
+ cfg_edge,
+ subset_base: &all_facts.subset_base,
+ loan_issued_at: &all_facts.loan_issued_at,
+ loan_killed_at,
+ known_contains,
+ known_placeholder_subset,
+ placeholder_origin,
+ placeholder_loan,
+ potential_errors: None,
+ potential_subset_errors: None,
+ };
+
+ let (errors, subset_errors) = match algorithm {
+ Algorithm::LocationInsensitive => {
+ let (potential_errors, potential_subset_errors) =
+ location_insensitive::compute(&ctx, &mut result);
+
+ // Note: the error location is meaningless for a location-insensitive
+ // subset error analysis. This is acceptable here as this variant is not one
+ // which should be used directly besides debugging, the `Hybrid` variant will
+ // take advantage of its result.
+ let potential_subset_errors: Relation<(T::Origin, T::Origin, T::Point)> =
+ Relation::from_iter(
+ potential_subset_errors
+ .into_iter()
+ .map(|&(origin1, origin2)| (origin1, origin2, 0.into())),
+ );
+
+ (potential_errors, potential_subset_errors)
+ }
+ Algorithm::Naive => naive::compute(&ctx, &mut result),
+ Algorithm::DatafrogOpt => datafrog_opt::compute(&ctx, &mut result),
+ Algorithm::Hybrid => {
+ // Execute the fast `LocationInsensitive` computation as a pre-pass:
+ // if it finds no possible errors, we don't need to do the more complex
+ // computations as they won't find errors either, and we can return early.
+ let (potential_errors, potential_subset_errors) =
+ location_insensitive::compute(&ctx, &mut result);
+
+ if potential_errors.is_empty() && potential_subset_errors.is_empty() {
+ // There are no loan errors, nor subset errors, we can early return
+ // empty errors lists and avoid doing the heavy analysis.
+ (potential_errors, Vec::new().into())
+ } else {
+ // Record these potential errors as they can be used to limit the next
+ // variant's work to only these loans.
+ ctx.potential_errors =
+ Some(potential_errors.iter().map(|&(loan, _)| loan).collect());
+ ctx.potential_subset_errors = Some(potential_subset_errors);
+
+ datafrog_opt::compute(&ctx, &mut result)
+ }
+ }
+ Algorithm::Compare => {
+ // Ensure the `Naive` and `DatafrogOpt` errors are the same
+ let (naive_errors, naive_subset_errors) = naive::compute(&ctx, &mut result);
+ let (opt_errors, _) = datafrog_opt::compute(&ctx, &mut result);
+
+ // TODO: compare illegal subset relations errors as well here ?
+
+ let mut naive_errors_by_point = FxHashMap::default();
+ for &(loan, point) in naive_errors.iter() {
+ naive_errors_by_point
+ .entry(point)
+ .or_insert_with(Vec::new)
+ .push(loan);
+ }
+
+ let mut opt_errors_by_point = FxHashMap::default();
+ for &(loan, point) in opt_errors.iter() {
+ opt_errors_by_point
+ .entry(point)
+ .or_insert_with(Vec::new)
+ .push(loan);
+ }
+
+ if compare_errors(&naive_errors_by_point, &opt_errors_by_point) {
+ panic!(concat!(
+ "The errors reported by the naive algorithm differ from ",
+ "the errors reported by the optimized algorithm. ",
+ "See the error log for details."
+ ));
+ } else {
+ debug!("Naive and optimized algorithms reported the same errors.");
+ }
+
+ (naive_errors, naive_subset_errors)
+ }
+ };
+
+ // Record illegal access errors
+ for &(loan, location) in errors.iter() {
+ result.errors.entry(location).or_default().push(loan);
+ }
+
+ // Record illegal subset errors
+ for &(origin1, origin2, location) in subset_errors.iter() {
+ result
+ .subset_errors
+ .entry(location)
+ .or_default()
+ .insert((origin1, origin2));
+ }
+
+ // Record more debugging info when asked to do so
+ if dump_enabled {
+ for &(origin, location) in ctx.origin_live_on_entry.iter() {
+ result
+ .origin_live_on_entry
+ .entry(location)
+ .or_default()
+ .push(origin);
+ }
+
+ for &(origin, loan) in ctx.known_contains.iter() {
+ result
+ .known_contains
+ .entry(origin)
+ .or_default()
+ .insert(loan);
+ }
+ }
+
+ result
+ }
+
+ /// Computes the transitive closure of the `known_placeholder_subset` relation, so that we have
+ /// the full list of placeholder loans contained by the placeholder origins.
+ fn compute_known_contains(
+ known_placeholder_subset: &Relation<(T::Origin, T::Origin)>,
+ placeholder: &[(T::Origin, T::Loan)],
+ ) -> Relation<(T::Origin, T::Loan)> {
+ let mut iteration = datafrog::Iteration::new();
+ let known_contains = iteration.variable("known_contains");
+
+ // known_contains(Origin1, Loan1) :-
+ // placeholder(Origin1, Loan1).
+ known_contains.extend(placeholder.iter());
+
+ while iteration.changed() {
+ // known_contains(Origin2, Loan1) :-
+ // known_contains(Origin1, Loan1),
+ // known_placeholder_subset(Origin1, Origin2).
+ known_contains.from_join(
+ &known_contains,
+ known_placeholder_subset,
+ |&_origin1, &loan1, &origin2| (origin2, loan1),
+ );
+ }
+
+ known_contains.complete()
+ }
+
+ /// Computes the transitive closure of the `known_placeholder_subset` relation.
+ fn compute_known_placeholder_subset(
+ known_placeholder_subset_base: &Relation<(T::Origin, T::Origin)>,
+ ) -> Relation<(T::Origin, T::Origin)> {
+ use datafrog::{Iteration, RelationLeaper};
+ let mut iteration = Iteration::new();
+
+ let known_placeholder_subset = iteration.variable("known_placeholder_subset");
+
+ // known_placeholder_subset(Origin1, Origin2) :-
+ // known_placeholder_subset_base(Origin1, Origin2).
+ known_placeholder_subset.extend(known_placeholder_subset_base.iter());
+
+ while iteration.changed() {
+ // known_placeholder_subset(Origin1, Origin3) :-
+ // known_placeholder_subset(Origin1, Origin2),
+ // known_placeholder_subset_base(Origin2, Origin3).
+ known_placeholder_subset.from_leapjoin(
+ &known_placeholder_subset,
+ known_placeholder_subset_base.extend_with(|&(_origin1, origin2)| origin2),
+ |&(origin1, _origin2), &origin3| (origin1, origin3),
+ );
+ }
+
+ known_placeholder_subset.complete()
+ }
+
+ fn new(dump_enabled: bool) -> Self {
+ Output {
+ errors: FxHashMap::default(),
+ subset_errors: FxHashMap::default(),
+ dump_enabled,
+ loan_live_at: FxHashMap::default(),
+ origin_contains_loan_at: FxHashMap::default(),
+ origin_contains_loan_anywhere: FxHashMap::default(),
+ origin_live_on_entry: FxHashMap::default(),
+ loan_invalidated_at: FxHashMap::default(),
+ move_errors: FxHashMap::default(),
+ subset: FxHashMap::default(),
+ subset_anywhere: FxHashMap::default(),
+ var_live_on_entry: FxHashMap::default(),
+ var_drop_live_on_entry: FxHashMap::default(),
+ path_maybe_initialized_on_exit: FxHashMap::default(),
+ path_maybe_uninitialized_on_exit: FxHashMap::default(),
+ var_maybe_partly_initialized_on_exit: FxHashMap::default(),
+ known_contains: FxHashMap::default(),
+ }
+ }
+
+ pub fn errors_at(&self, location: T::Point) -> &[T::Loan] {
+ match self.errors.get(&location) {
+ Some(v) => v,
+ None => &[],
+ }
+ }
+
+ pub fn loans_in_scope_at(&self, location: T::Point) -> &[T::Loan] {
+ match self.loan_live_at.get(&location) {
+ Some(p) => p,
+ None => &[],
+ }
+ }
+
+ pub fn origin_contains_loan_at(
+ &self,
+ location: T::Point,
+ ) -> Cow<'_, BTreeMap<T::Origin, BTreeSet<T::Loan>>> {
+ assert!(self.dump_enabled);
+ match self.origin_contains_loan_at.get(&location) {
+ Some(map) => Cow::Borrowed(map),
+ None => Cow::Owned(BTreeMap::default()),
+ }
+ }
+
+ pub fn origins_live_at(&self, location: T::Point) -> &[T::Origin] {
+ assert!(self.dump_enabled);
+ match self.origin_live_on_entry.get(&location) {
+ Some(v) => v,
+ None => &[],
+ }
+ }
+
+ pub fn subsets_at(
+ &self,
+ location: T::Point,
+ ) -> Cow<'_, BTreeMap<T::Origin, BTreeSet<T::Origin>>> {
+ assert!(self.dump_enabled);
+ match self.subset.get(&location) {
+ Some(v) => Cow::Borrowed(v),
+ None => Cow::Owned(BTreeMap::default()),
+ }
+ }
+}
+
+/// Compares errors reported by Naive implementation with the errors
+/// reported by the optimized implementation.
+fn compare_errors<Loan: Atom, Point: Atom>(
+ all_naive_errors: &FxHashMap<Point, Vec<Loan>>,
+ all_opt_errors: &FxHashMap<Point, Vec<Loan>>,
+) -> bool {
+ let points = all_naive_errors.keys().chain(all_opt_errors.keys());
+
+ let mut differ = false;
+ for point in points {
+ let mut naive_errors = all_naive_errors.get(&point).cloned().unwrap_or_default();
+ naive_errors.sort();
+
+ let mut opt_errors = all_opt_errors.get(&point).cloned().unwrap_or_default();
+ opt_errors.sort();
+
+ for err in naive_errors.iter() {
+ if !opt_errors.contains(err) {
+ error!(
+ "Error {0:?} at {1:?} reported by naive, but not opt.",
+ err, point
+ );
+ differ = true;
+ }
+ }
+
+ for err in opt_errors.iter() {
+ if !naive_errors.contains(err) {
+ error!(
+ "Error {0:?} at {1:?} reported by opt, but not naive.",
+ err, point
+ );
+ differ = true;
+ }
+ }
+ }
+
+ differ
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ impl Atom for usize {
+ fn index(self) -> usize {
+ self
+ }
+ }
+
+ fn compare(
+ errors1: &FxHashMap<usize, Vec<usize>>,
+ errors2: &FxHashMap<usize, Vec<usize>>,
+ ) -> bool {
+ let diff1 = compare_errors(errors1, errors2);
+ let diff2 = compare_errors(errors2, errors1);
+ assert_eq!(diff1, diff2);
+ diff1
+ }
+
+ #[test]
+ fn test_compare_errors() {
+ let empty = FxHashMap::default();
+ assert_eq!(false, compare(&empty, &empty));
+ let mut empty_vec = FxHashMap::default();
+ empty_vec.insert(1, vec![]);
+ empty_vec.insert(2, vec![]);
+ assert_eq!(false, compare(&empty, &empty_vec));
+
+ let mut singleton1 = FxHashMap::default();
+ singleton1.insert(1, vec![10]);
+ assert_eq!(false, compare(&singleton1, &singleton1));
+ let mut singleton2 = FxHashMap::default();
+ singleton2.insert(1, vec![11]);
+ assert_eq!(false, compare(&singleton2, &singleton2));
+ let mut singleton3 = FxHashMap::default();
+ singleton3.insert(2, vec![10]);
+ assert_eq!(false, compare(&singleton3, &singleton3));
+
+ assert_eq!(true, compare(&singleton1, &singleton2));
+ assert_eq!(true, compare(&singleton2, &singleton3));
+ assert_eq!(true, compare(&singleton1, &singleton3));
+
+ assert_eq!(true, compare(&empty, &singleton1));
+ assert_eq!(true, compare(&empty, &singleton2));
+ assert_eq!(true, compare(&empty, &singleton3));
+
+ let mut errors1 = FxHashMap::default();
+ errors1.insert(1, vec![11]);
+ errors1.insert(2, vec![10]);
+ assert_eq!(false, compare(&errors1, &errors1));
+ assert_eq!(true, compare(&errors1, &singleton1));
+ assert_eq!(true, compare(&errors1, &singleton2));
+ assert_eq!(true, compare(&errors1, &singleton3));
+ }
+}
diff --git a/vendor/polonius-engine/src/output/naive.rs b/vendor/polonius-engine/src/output/naive.rs
new file mode 100644
index 000000000..aa4204867
--- /dev/null
+++ b/vendor/polonius-engine/src/output/naive.rs
@@ -0,0 +1,299 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! A version of the Naive datalog analysis using Datafrog.
+
+use datafrog::{Iteration, Relation, RelationLeaper};
+use std::time::Instant;
+
+use crate::facts::FactTypes;
+use crate::output::{Context, Output};
+
+pub(super) fn compute<T: FactTypes>(
+ ctx: &Context<'_, T>,
+ result: &mut Output<T>,
+) -> (
+ Relation<(T::Loan, T::Point)>,
+ Relation<(T::Origin, T::Origin, T::Point)>,
+) {
+ let timer = Instant::now();
+
+ let (errors, subset_errors) = {
+ // Static inputs
+ let origin_live_on_entry_rel = &ctx.origin_live_on_entry;
+ let cfg_edge = &ctx.cfg_edge;
+ let loan_killed_at = &ctx.loan_killed_at;
+ let known_placeholder_subset = &ctx.known_placeholder_subset;
+ let placeholder_origin = &ctx.placeholder_origin;
+
+ // Create a new iteration context, ...
+ let mut iteration = Iteration::new();
+
+ // .. some variables, ..
+ let subset = iteration.variable::<(T::Origin, T::Origin, T::Point)>("subset");
+ let origin_contains_loan_on_entry =
+ iteration.variable::<(T::Origin, T::Loan, T::Point)>("origin_contains_loan_on_entry");
+ let loan_live_at = iteration.variable::<((T::Loan, T::Point), ())>("loan_live_at");
+
+ // `loan_invalidated_at` facts, stored ready for joins
+ let loan_invalidated_at = Relation::from_iter(
+ ctx.loan_invalidated_at
+ .iter()
+ .map(|&(loan, point)| ((loan, point), ())),
+ );
+
+ // different indices for `subset`.
+ let subset_o1p = iteration.variable_indistinct("subset_o1p");
+ let subset_o2p = iteration.variable_indistinct("subset_o2p");
+
+ // different index for `origin_contains_loan_on_entry`.
+ let origin_contains_loan_on_entry_op =
+ iteration.variable_indistinct("origin_contains_loan_on_entry_op");
+
+ // Unfortunately, we need `origin_live_on_entry` in both variable and relation forms:
+ // We need:
+ // - `origin_live_on_entry` as a Relation for the leapjoins in rules 3 & 6
+ // - `origin_live_on_entry` as a Variable for the join in rule 7
+ //
+ // The leapjoins use `origin_live_on_entry` as `(Origin, Point)` tuples, while the join uses
+ // it as a `((O, P), ())` tuple to filter the `((Origin, Point), Loan)` tuples from
+ // `origin_contains_loan_on_entry_op`.
+ //
+ // The regular join in rule 7 could be turned into a `filter_with` leaper but that would
+ // result in a leapjoin with no `extend_*` leapers: a leapjoin that is not well-formed.
+ // Doing the filtering via an `extend_with` leaper would be extremely inefficient.
+ //
+ // Until there's an API in datafrog to handle this use-case better, we do a slightly less
+ // inefficient thing of copying the whole static input into a Variable to use a regular
+ // join, even though the liveness information can be quite heavy (around 1M tuples
+ // on `clap`).
+ // This is the Naive variant so this is not a big problem, but needs an
+ // explanation.
+ let origin_live_on_entry_var =
+ iteration.variable::<((T::Origin, T::Point), ())>("origin_live_on_entry");
+ origin_live_on_entry_var.extend(
+ origin_live_on_entry_rel
+ .iter()
+ .map(|&(origin, point)| ((origin, point), ())),
+ );
+
+ // output relations: illegal accesses errors, and illegal subset relations errors
+ let errors = iteration.variable("errors");
+ let subset_errors = iteration.variable::<(T::Origin, T::Origin, T::Point)>("subset_errors");
+
+ // load initial facts:
+
+ // Rule 1: the initial subsets are the non-transitive `subset_base` static input.
+ //
+ // subset(Origin1, Origin2, Point) :-
+ // subset_base(Origin1, Origin2, Point).
+ subset.extend(ctx.subset_base.iter());
+
+ // Rule 4: the issuing origins are the ones initially containing loans.
+ //
+ // origin_contains_loan_on_entry(Origin, Loan, Point) :-
+ // loan_issued_at(Origin, Loan, Point).
+ origin_contains_loan_on_entry.extend(ctx.loan_issued_at.iter());
+
+ // .. and then start iterating rules!
+ while iteration.changed() {
+ // Cleanup step: remove symmetries
+ // - remove origins which are `subset`s of themselves
+ //
+ // FIXME: investigate whether is there a better way to do that without complicating
+ // the rules too much, because it would also require temporary variables and
+ // impact performance. Until then, the big reduction in tuples improves performance
+ // a lot, even if we're potentially adding a small number of tuples
+ // per round just to remove them in the next round.
+ subset
+ .recent
+ .borrow_mut()
+ .elements
+ .retain(|&(origin1, origin2, _)| origin1 != origin2);
+
+ // Remap fields to re-index by keys, to prepare the data needed by the rules below.
+ subset_o1p.from_map(&subset, |&(origin1, origin2, point)| {
+ ((origin1, point), origin2)
+ });
+ subset_o2p.from_map(&subset, |&(origin1, origin2, point)| {
+ ((origin2, point), origin1)
+ });
+
+ origin_contains_loan_on_entry_op
+ .from_map(&origin_contains_loan_on_entry, |&(origin, loan, point)| {
+ ((origin, point), loan)
+ });
+
+ // Rule 1: done above, as part of the static input facts setup.
+
+ // Rule 2: compute the subset transitive closure, at a given point.
+ //
+ // subset(Origin1, Origin3, Point) :-
+ // subset(Origin1, Origin2, Point),
+ // subset(Origin2, Origin3, Point).
+ subset.from_join(
+ &subset_o2p,
+ &subset_o1p,
+ |&(_origin2, point), &origin1, &origin3| (origin1, origin3, point),
+ );
+
+ // Rule 3: propagate subsets along the CFG, according to liveness.
+ //
+ // subset(Origin1, Origin2, Point2) :-
+ // subset(Origin1, Origin2, Point1),
+ // cfg_edge(Point1, Point2),
+ // origin_live_on_entry(Origin1, Point2),
+ // origin_live_on_entry(Origin2, Point2).
+ subset.from_leapjoin(
+ &subset,
+ (
+ cfg_edge.extend_with(|&(_origin1, _origin2, point1)| point1),
+ origin_live_on_entry_rel.extend_with(|&(origin1, _origin2, _point1)| origin1),
+ origin_live_on_entry_rel.extend_with(|&(_origin1, origin2, _point1)| origin2),
+ ),
+ |&(origin1, origin2, _point1), &point2| (origin1, origin2, point2),
+ );
+
+ // Rule 4: done above as part of the static input facts setup.
+
+ // Rule 5: propagate loans within origins, at a given point, according to subsets.
+ //
+ // origin_contains_loan_on_entry(Origin2, Loan, Point) :-
+ // origin_contains_loan_on_entry(Origin1, Loan, Point),
+ // subset(Origin1, Origin2, Point).
+ origin_contains_loan_on_entry.from_join(
+ &origin_contains_loan_on_entry_op,
+ &subset_o1p,
+ |&(_origin1, point), &loan, &origin2| (origin2, loan, point),
+ );
+
+ // Rule 6: propagate loans along the CFG, according to liveness.
+ //
+ // origin_contains_loan_on_entry(Origin, Loan, Point2) :-
+ // origin_contains_loan_on_entry(Origin, Loan, Point1),
+ // !loan_killed_at(Loan, Point1),
+ // cfg_edge(Point1, Point2),
+ // origin_live_on_entry(Origin, Point2).
+ origin_contains_loan_on_entry.from_leapjoin(
+ &origin_contains_loan_on_entry,
+ (
+ loan_killed_at.filter_anti(|&(_origin, loan, point1)| (loan, point1)),
+ cfg_edge.extend_with(|&(_origin, _loan, point1)| point1),
+ origin_live_on_entry_rel.extend_with(|&(origin, _loan, _point1)| origin),
+ ),
+ |&(origin, loan, _point1), &point2| (origin, loan, point2),
+ );
+
+ // Rule 7: compute whether a loan is live at a given point, i.e. whether it is
+ // contained in a live origin at this point.
+ //
+ // loan_live_at(Loan, Point) :-
+ // origin_contains_loan_on_entry(Origin, Loan, Point),
+ // origin_live_on_entry(Origin, Point).
+ loan_live_at.from_join(
+ &origin_contains_loan_on_entry_op,
+ &origin_live_on_entry_var,
+ |&(_origin, point), &loan, _| ((loan, point), ()),
+ );
+
+ // Rule 8: compute illegal access errors, i.e. an invalidation of a live loan.
+ //
+ // Here again, this join acts as a pure filter and could be a more efficient leapjoin.
+ // However, similarly to the `origin_live_on_entry` example described above, the
+ // leapjoin with a single `filter_with` leaper would currently not be well-formed.
+ // We don't explictly need to materialize `loan_live_at` either, and that doesn't
+ // change the well-formedness situation, so we still materialize it (since that also
+ // helps in testing).
+ //
+ // errors(Loan, Point) :-
+ // loan_invalidated_at(Loan, Point),
+ // loan_live_at(Loan, Point).
+ errors.from_join(
+ &loan_live_at,
+ &loan_invalidated_at,
+ |&(loan, point), _, _| (loan, point),
+ );
+
+ // Rule 9: compute illegal subset relations errors, i.e. the undeclared subsets
+ // between two placeholder origins.
+ // Here as well, WF-ness prevents this join from being a filter-only leapjoin. It
+ // doesn't matter much, as `placeholder_origin` is single-value relation.
+ //
+ // subset_error(Origin1, Origin2, Point) :-
+ // subset(Origin1, Origin2, Point),
+ // placeholder_origin(Origin1),
+ // placeholder_origin(Origin2),
+ // !known_placeholder_subset(Origin1, Origin2).
+ subset_errors.from_leapjoin(
+ &subset,
+ (
+ placeholder_origin.extend_with(|&(origin1, _origin2, _point)| origin1),
+ placeholder_origin.extend_with(|&(_origin1, origin2, _point)| origin2),
+ known_placeholder_subset
+ .filter_anti(|&(origin1, origin2, _point)| (origin1, origin2)),
+ // remove symmetries:
+ datafrog::ValueFilter::from(|&(origin1, origin2, _point), _| {
+ origin1 != origin2
+ }),
+ ),
+ |&(origin1, origin2, point), _| (origin1, origin2, point),
+ );
+ }
+
+ // Handle verbose output data
+ if result.dump_enabled {
+ let subset = subset.complete();
+ assert!(
+ subset
+ .iter()
+ .filter(|&(origin1, origin2, _)| origin1 == origin2)
+ .count()
+ == 0,
+ "unwanted subset symmetries"
+ );
+ for &(origin1, origin2, location) in subset.iter() {
+ result
+ .subset
+ .entry(location)
+ .or_default()
+ .entry(origin1)
+ .or_default()
+ .insert(origin2);
+ }
+
+ let origin_contains_loan_on_entry = origin_contains_loan_on_entry.complete();
+ for &(origin, loan, location) in origin_contains_loan_on_entry.iter() {
+ result
+ .origin_contains_loan_at
+ .entry(location)
+ .or_default()
+ .entry(origin)
+ .or_default()
+ .insert(loan);
+ }
+
+ let loan_live_at = loan_live_at.complete();
+ for &((loan, location), _) in loan_live_at.iter() {
+ result.loan_live_at.entry(location).or_default().push(loan);
+ }
+ }
+
+ (errors.complete(), subset_errors.complete())
+ };
+
+ info!(
+ "analysis done: {} `errors` tuples, {} `subset_errors` tuples, {:?}",
+ errors.len(),
+ subset_errors.len(),
+ timer.elapsed()
+ );
+
+ (errors, subset_errors)
+}