From 9835e2ae736235810b4ea1c162ca5e65c547e770 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Sat, 18 May 2024 04:49:50 +0200 Subject: Merging upstream version 1.71.1+dfsg1. Signed-off-by: Daniel Baumann --- vendor/varisat-checker/.cargo-checksum.json | 1 + vendor/varisat-checker/Cargo.toml | 54 ++ vendor/varisat-checker/LICENSE-APACHE | 201 ++++++++ vendor/varisat-checker/LICENSE-MIT | 25 + vendor/varisat-checker/README.md | 26 + vendor/varisat-checker/src/clauses.rs | 453 ++++++++++++++++ vendor/varisat-checker/src/context.rs | 46 ++ vendor/varisat-checker/src/hash.rs | 74 +++ vendor/varisat-checker/src/internal.rs | 33 ++ vendor/varisat-checker/src/lib.rs | 773 ++++++++++++++++++++++++++++ vendor/varisat-checker/src/processing.rs | 189 +++++++ vendor/varisat-checker/src/rup.rs | 203 ++++++++ vendor/varisat-checker/src/sorted_lits.rs | 56 ++ vendor/varisat-checker/src/state.rs | 645 +++++++++++++++++++++++ vendor/varisat-checker/src/tmp.rs | 8 + vendor/varisat-checker/src/transcript.rs | 133 +++++ vendor/varisat-checker/src/variables.rs | 195 +++++++ 17 files changed, 3115 insertions(+) create mode 100644 vendor/varisat-checker/.cargo-checksum.json create mode 100644 vendor/varisat-checker/Cargo.toml create mode 100644 vendor/varisat-checker/LICENSE-APACHE create mode 100644 vendor/varisat-checker/LICENSE-MIT create mode 100644 vendor/varisat-checker/README.md create mode 100644 vendor/varisat-checker/src/clauses.rs create mode 100644 vendor/varisat-checker/src/context.rs create mode 100644 vendor/varisat-checker/src/hash.rs create mode 100644 vendor/varisat-checker/src/internal.rs create mode 100644 vendor/varisat-checker/src/lib.rs create mode 100644 vendor/varisat-checker/src/processing.rs create mode 100644 vendor/varisat-checker/src/rup.rs create mode 100644 vendor/varisat-checker/src/sorted_lits.rs create mode 100644 vendor/varisat-checker/src/state.rs create mode 100644 vendor/varisat-checker/src/tmp.rs create mode 100644 vendor/varisat-checker/src/transcript.rs create mode 100644 vendor/varisat-checker/src/variables.rs (limited to 'vendor/varisat-checker') diff --git a/vendor/varisat-checker/.cargo-checksum.json b/vendor/varisat-checker/.cargo-checksum.json new file mode 100644 index 000000000..fdc2bd82a --- /dev/null +++ b/vendor/varisat-checker/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{"Cargo.toml":"45120f1d09a8260f37c7e93c8afc570df96fac3161b2b772c2318d029d647929","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"2259cbb177661977dd5510c05a8670da3cb28aa7f4ff74c193798db496fe92d8","README.md":"b28a35365edd47295cec6a3e4a7f6af74f64c40b19e9d970bc71599f6e46a48b","src/clauses.rs":"5fcbf94f561fe867f129ed19135e223717cd2b8c5d9b338e9959ac3bb6a66c05","src/context.rs":"d732afeb3ea13335852b9602a77aa1cd32bdb25fd3065e486593ebd372021fd0","src/hash.rs":"28f0ddc880eded46cb610a5c2d7569d8ce399a83e0cbcd913b56916c5e939a3d","src/internal.rs":"31ce8adf386acf16ec4da8977cbc53737d51e06c1539c4127e2e40d28baa5944","src/lib.rs":"fa86fd787d6374efd28a0d978b439e708328c4c4c33663461a3c220389f2ae7b","src/processing.rs":"521cb1ee11c6013a5e0c428728351be7acea06034ad4111af640cb9873d4df43","src/rup.rs":"13e3f602cb82b0e5b46624b9a8d2885654994b95c38467e2d1c560e8945400ad","src/sorted_lits.rs":"8fa719cb20b7bf95dba4f79c61e6a1b3af6be090b9b1447e2f37435fa36f52a2","src/state.rs":"5b3fbec8527903b07c0c511d9105f58e8f307df773be2d77c34160313b15e2f7","src/tmp.rs":"e36934a28c62e3688538e0539b588a036341cbc6fe2343fba88330075976c4e0","src/transcript.rs":"3813033b74c5941831c05ecb31ff1cd1a28e1b3f8f4ec977eda75b8bcac82348","src/variables.rs":"097655ef6942f6d43ba31285e80f219d9fa69abe5b75f51af796ae2ae2d595af"},"package":"135c977c5913ed6e98f6b81b8e4d322211303b7d40dae773caef7ad1de6c763b"} \ No newline at end of file diff --git a/vendor/varisat-checker/Cargo.toml b/vendor/varisat-checker/Cargo.toml new file mode 100644 index 000000000..bec1e0525 --- /dev/null +++ b/vendor/varisat-checker/Cargo.toml @@ -0,0 +1,54 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +edition = "2018" +name = "varisat-checker" +version = "0.2.2" +authors = ["Jannis Harder "] +description = "Proof checker for proofs generate by the Varisat SAT solver" +homepage = "https://jix.one/project/varisat/" +readme = "README.md" +license = "MIT/Apache-2.0" +repository = "https://github.com/jix/varisat" +[dependencies.anyhow] +version = "1.0.32" + +[dependencies.log] +version = "0.4.6" + +[dependencies.partial_ref] +version = "0.3.1" + +[dependencies.rustc-hash] +version = "1.1.0" + +[dependencies.smallvec] +version = "1.4.2" + +[dependencies.thiserror] +version = "1.0.20" + +[dependencies.varisat-dimacs] +version = "=0.2.2" + +[dependencies.varisat-formula] +version = "=0.2.2" + +[dependencies.varisat-internal-proof] +version = "=0.2.2" +[dev-dependencies.proptest] +version = "0.10.1" + +[dev-dependencies.varisat-formula] +version = "=0.2.2" +features = ["internal-testing"] diff --git a/vendor/varisat-checker/LICENSE-APACHE b/vendor/varisat-checker/LICENSE-APACHE new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/vendor/varisat-checker/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/vendor/varisat-checker/LICENSE-MIT b/vendor/varisat-checker/LICENSE-MIT new file mode 100644 index 000000000..4fd1cb2ff --- /dev/null +++ b/vendor/varisat-checker/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2017-2019 Jannis Harder + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/vendor/varisat-checker/README.md b/vendor/varisat-checker/README.md new file mode 100644 index 000000000..ee537014f --- /dev/null +++ b/vendor/varisat-checker/README.md @@ -0,0 +1,26 @@ +# Varisat - Checker + +Proof checker for proofs generate by the [Varisat SAT solver][crate-varisat]. + +The functionality of this crate is re-exported by the [main Varisat +crate][crate-varisat]. + +## License + +The Varisat source code is licensed under either of + + * Apache License, Version 2.0 + ([LICENSE-APACHE](LICENSE-APACHE) or + http://www.apache.org/licenses/LICENSE-2.0) + * MIT license + ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + +at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in Varisat by you, as defined in the Apache-2.0 license, shall be +dual licensed as above, without any additional terms or conditions. + +[crate-varisat]: https://crates.io/crates/varisat diff --git a/vendor/varisat-checker/src/clauses.rs b/vendor/varisat-checker/src/clauses.rs new file mode 100644 index 000000000..06f99f4fd --- /dev/null +++ b/vendor/varisat-checker/src/clauses.rs @@ -0,0 +1,453 @@ +//! Clause storage (unit and non-unit clauses). +use std::{convert::TryInto, mem::transmute}; + +use partial_ref::{partial, PartialRef}; +use rustc_hash::FxHashMap as HashMap; +use smallvec::SmallVec; + +use varisat_formula::{lit::LitIdx, Lit}; +use varisat_internal_proof::ClauseHash; + +use crate::{ + context::{parts::*, Context}, + processing::{process_step, CheckedProofStep}, + sorted_lits::copy_canonical, + variables::{ensure_sampling_var, ensure_var}, + CheckerError, +}; + +const INLINE_LITS: usize = 3; + +/// Literals of a clause, either inline or an index into a buffer +pub struct ClauseLits { + length: LitIdx, + inline: [LitIdx; INLINE_LITS], +} + +impl ClauseLits { + /// Create a new ClauseLits, storing them in the given buffer if necessary + fn new(lits: &[Lit], buffer: &mut Vec) -> ClauseLits { + let mut inline = [0; INLINE_LITS]; + let length = lits.len(); + + if length > INLINE_LITS { + inline[0] = buffer + .len() + .try_into() + .expect("exceeded maximal literal buffer size"); + buffer.extend(lits); + } else { + let lits = unsafe { + // Lit is a repr(transparent) wrapper of LitIdx + #[allow(clippy::transmute_ptr_to_ptr)] + transmute::<&[Lit], &[LitIdx]>(lits) + }; + inline[..length].copy_from_slice(lits); + } + + ClauseLits { + length: length as LitIdx, + inline, + } + } + + /// Returns the literals as a slice given a storage buffer + pub fn slice<'a, 'b, 'c>(&'a self, buffer: &'b [Lit]) -> &'c [Lit] + where + 'a: 'c, + 'b: 'c, + { + if self.length > INLINE_LITS as LitIdx { + &buffer[self.inline[0] as usize..][..self.length as usize] + } else { + unsafe { + // Lit is a repr(transparent) wrapper of LitIdx + #[allow(clippy::transmute_ptr_to_ptr)] + transmute::<&[LitIdx], &[Lit]>(&self.inline[..self.length as usize]) + } + } + } + + /// Literals stored in the literal buffer + fn buffer_used(&self) -> usize { + if self.length > INLINE_LITS as LitIdx { + self.length as usize + } else { + 0 + } + } +} + +/// Literals and metadata for non-unit clauses. +pub struct Clause { + /// LRAT clause id. + pub id: u64, + /// How often the clause is present as irred., red. clause. + /// + /// For checking the formula is a multiset of clauses. This is necessary as the generating + /// solver might not check for duplicated clauses. + ref_count: [u32; 2], + /// Clause's literals. + pub lits: ClauseLits, +} + +/// Identifies the origin of a unit clause. +#[derive(Copy, Clone, Debug)] +pub enum UnitId { + Global(u64), + TracePos(usize), + InClause, +} + +/// Known unit clauses and metadata. +#[derive(Copy, Clone, Debug)] +pub struct UnitClause { + pub id: UnitId, + pub value: bool, +} + +/// Return type of [`store_clause`] +#[derive(Copy, Clone, PartialEq, Eq)] +pub enum StoreClauseResult { + New, + Duplicate, + NewlyIrredundant, +} + +/// Return type of [`delete_clause`] +#[derive(Copy, Clone, PartialEq, Eq)] +pub enum DeleteClauseResult { + Unchanged, + NewlyRedundant, + Removed, +} +/// Checker clause storage. +#[derive(Default)] +pub struct Clauses { + /// Next clause id to use. + pub next_clause_id: u64, + /// Literal storage for clauses, + pub literal_buffer: Vec, + /// Number of literals in the buffer which are from deleted clauses. + garbage_size: usize, + /// Stores all known non-unit clauses indexed by their hash. + pub clauses: HashMap>, + /// Stores known unit clauses and propagations during a clause check. + pub unit_clauses: Vec>, + /// This stores a conflict of input unit clauses. + /// + /// Our representation for unit clauses doesn't support conflicting units so this is used as a + /// workaround. + pub unit_conflict: Option<[u64; 2]>, +} + +impl Clauses { + /// Value of a literal if known from unit clauses. + pub fn lit_value(&self, lit: Lit) -> Option<(bool, UnitClause)> { + self.unit_clauses[lit.index()] + .map(|unit_clause| (unit_clause.value ^ lit.is_negative(), unit_clause)) + } +} + +/// Adds a clause to the checker. +pub fn add_clause<'a>( + mut ctx: partial!(Context<'a>, mut ClausesP, mut CheckerStateP, mut ProcessingP<'a>, mut TmpDataP, mut VariablesP, ClauseHasherP), + clause: &[Lit], +) -> Result<(), CheckerError> { + if ctx.part(CheckerStateP).unsat { + return Ok(()); + } + + let (tmp_data, mut ctx) = ctx.split_part_mut(TmpDataP); + + if copy_canonical(&mut tmp_data.tmp, clause) { + let (clauses, mut ctx) = ctx.split_part_mut(ClausesP); + process_step( + ctx.borrow(), + &CheckedProofStep::TautologicalClause { + id: clauses.next_clause_id, + clause: &tmp_data.tmp, + }, + )?; + clauses.next_clause_id += 1; + return Ok(()); + } + + for &lit in tmp_data.tmp.iter() { + ensure_sampling_var(ctx.borrow(), lit.var())?; + } + + let (id, added) = store_clause(ctx.borrow(), &tmp_data.tmp, false); + + let (clauses, mut ctx) = ctx.split_part_mut(ClausesP); + + match added { + StoreClauseResult::New => { + process_step( + ctx.borrow(), + &CheckedProofStep::AddClause { + id, + clause: &tmp_data.tmp, + }, + )?; + } + StoreClauseResult::NewlyIrredundant | StoreClauseResult::Duplicate => { + if let StoreClauseResult::NewlyIrredundant = added { + process_step( + ctx.borrow(), + &CheckedProofStep::MakeIrredundant { + id, + clause: &tmp_data.tmp, + }, + )?; + } + + process_step( + ctx.borrow(), + &CheckedProofStep::DuplicatedClause { + id: clauses.next_clause_id, + same_as_id: id, + clause: &tmp_data.tmp, + }, + )?; + // This is a duplicated clause. We want to ensure that the clause ids match the input + // order so we skip a clause id. + clauses.next_clause_id += 1; + } + } + + Ok(()) +} + +/// Adds a clause to the checker data structures. +/// +/// `lits` must be sorted and free of duplicates. +/// +/// Returns the id of the added clause and indicates whether the clause is new or changed from +/// redundant to irredundant. +pub fn store_clause( + mut ctx: partial!( + Context, + mut CheckerStateP, + mut ClausesP, + mut VariablesP, + ClauseHasherP + ), + lits: &[Lit], + redundant: bool, +) -> (u64, StoreClauseResult) { + for &lit in lits.iter() { + ensure_var(ctx.borrow(), lit.var()); + } + + match lits[..] { + [] => { + let id = ctx.part(ClausesP).next_clause_id; + ctx.part_mut(ClausesP).next_clause_id += 1; + + ctx.part_mut(CheckerStateP).unsat = true; + (id, StoreClauseResult::New) + } + [lit] => store_unit_clause(ctx.borrow(), lit), + _ => { + let hash = ctx.part(ClauseHasherP).clause_hash(&lits); + + let (clauses, mut ctx) = ctx.split_part_mut(ClausesP); + + let candidates = clauses.clauses.entry(hash).or_default(); + + for candidate in candidates.iter_mut() { + if candidate.lits.slice(&clauses.literal_buffer) == &lits[..] { + let result = if !redundant && candidate.ref_count[0] == 0 { + // first irredundant copy + StoreClauseResult::NewlyIrredundant + } else { + StoreClauseResult::Duplicate + }; + + let ref_count = &mut candidate.ref_count[redundant as usize]; + *ref_count = ref_count.checked_add(1).expect("ref_count overflow"); + return (candidate.id, result); + } + } + + let id = clauses.next_clause_id; + + let mut ref_count = [0, 0]; + ref_count[redundant as usize] += 1; + + candidates.push(Clause { + id, + ref_count, + lits: ClauseLits::new(&lits, &mut clauses.literal_buffer), + }); + + clauses.next_clause_id += 1; + + for &lit in lits.iter() { + ctx.part_mut(VariablesP).lit_data[lit.code()].clause_count += 1; + } + + (id, StoreClauseResult::New) + } + } +} + +/// Adds a unit clause to the checker data structures. +/// +/// Returns the id of the added clause and a boolean that is true if the clause wasn't already +/// present. +pub fn store_unit_clause( + mut ctx: partial!(Context, mut CheckerStateP, mut ClausesP), + lit: Lit, +) -> (u64, StoreClauseResult) { + match ctx.part(ClausesP).lit_value(lit) { + Some(( + true, + UnitClause { + id: UnitId::Global(id), + .. + }, + )) => (id, StoreClauseResult::Duplicate), + Some(( + false, + UnitClause { + id: UnitId::Global(conflicting_id), + .. + }, + )) => { + ctx.part_mut(CheckerStateP).unsat = true; + let id = ctx.part(ClausesP).next_clause_id; + ctx.part_mut(ClausesP).unit_conflict = Some([conflicting_id, id]); + ctx.part_mut(ClausesP).next_clause_id += 1; + (id, StoreClauseResult::New) + } + Some(_) => unreachable!(), + None => { + let id = ctx.part(ClausesP).next_clause_id; + + ctx.part_mut(ClausesP).unit_clauses[lit.index()] = Some(UnitClause { + value: lit.is_positive(), + id: UnitId::Global(id), + }); + + ctx.part_mut(ClausesP).next_clause_id += 1; + + (id, StoreClauseResult::New) + } + } +} + +/// Delete a clause from the current formula. +/// +/// `lits` must be sorted and free of duplicates. +/// +/// Returns the id of the deleted clause and whether the ref_count (or irredundant ref_count) +/// became zero. +pub fn delete_clause( + mut ctx: partial!( + Context, + mut ClausesP, + mut VariablesP, + CheckerStateP, + ClauseHasherP + ), + lits: &[Lit], + redundant: bool, +) -> Result<(u64, DeleteClauseResult), CheckerError> { + if lits.len() < 2 { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("delete of unit or empty clause {:?}", lits), + )); + } + + let hash = ctx.part(ClauseHasherP).clause_hash(lits); + + let clauses = ctx.part_mut(ClausesP); + + let candidates = clauses.clauses.entry(hash).or_default(); + + let mut found = false; + + let mut result = None; + + let literal_buffer = &clauses.literal_buffer; + let garbage_size = &mut clauses.garbage_size; + + candidates.retain(|candidate| { + if found || candidate.lits.slice(literal_buffer) != lits { + true + } else { + found = true; + let ref_count = &mut candidate.ref_count[redundant as usize]; + + if *ref_count == 0 { + true + } else { + *ref_count -= 1; + + if candidate.ref_count == [0, 0] { + *garbage_size += candidate.lits.buffer_used(); + result = Some((candidate.id, DeleteClauseResult::Removed)); + false + } else { + if !redundant && candidate.ref_count[0] == 0 { + result = Some((candidate.id, DeleteClauseResult::NewlyRedundant)); + } else { + result = Some((candidate.id, DeleteClauseResult::Unchanged)); + } + true + } + } + } + }); + + if candidates.is_empty() { + clauses.clauses.remove(&hash); + } + + if let Some((_, DeleteClauseResult::Removed)) = result { + for &lit in lits.iter() { + ctx.part_mut(VariablesP).lit_data[lit.code()].clause_count -= 1; + } + } + + if let Some(result) = result { + collect_garbage(ctx.borrow()); + return Ok(result); + } + + let msg = match (found, redundant) { + (false, _) => format!("delete of unknown clause {:?}", lits), + (_, true) => format!("delete of redundant clause {:?} which is irredundant", lits), + (_, false) => format!("delete of irredundant clause {:?} which is redundant", lits), + }; + Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + msg, + )) +} + +/// Perform a garbage collection if required +fn collect_garbage(mut ctx: partial!(Context, mut ClausesP)) { + let clauses = ctx.part_mut(ClausesP); + if clauses.garbage_size * 2 <= clauses.literal_buffer.len() { + return; + } + + let mut new_buffer = vec![]; + + new_buffer.reserve(clauses.literal_buffer.len()); + + for (_, candidates) in clauses.clauses.iter_mut() { + for clause in candidates.iter_mut() { + let new_lits = + ClauseLits::new(clause.lits.slice(&clauses.literal_buffer), &mut new_buffer); + clause.lits = new_lits; + } + } + + clauses.literal_buffer = new_buffer; + clauses.garbage_size = 0; +} diff --git a/vendor/varisat-checker/src/context.rs b/vendor/varisat-checker/src/context.rs new file mode 100644 index 000000000..5cd7e807b --- /dev/null +++ b/vendor/varisat-checker/src/context.rs @@ -0,0 +1,46 @@ +//! Central checker data structure. +use partial_ref::{part, PartialRefTarget}; + +use crate::{ + clauses::Clauses, hash::ClauseHasher, processing::Processing, rup::RupCheck, + state::CheckerState, tmp::TmpData, variables::Variables, +}; + +/// Part declarations for the [`Context`] struct. +pub mod parts { + use super::*; + + part!(pub CheckerStateP: CheckerState); + part!(pub ClauseHasherP: ClauseHasher); + part!(pub ClausesP: Clauses); + part!(pub ProcessingP<'a>: Processing<'a>); + part!(pub RupCheckP: RupCheck); + part!(pub TmpDataP: TmpData); + part!(pub VariablesP: Variables); +} + +use parts::*; + +/// Central checker data structure. +/// +/// This struct contains all data kept by the checker. Most functions operating on multiple fields +/// of the context use partial references provided by the `partial_ref` crate. This documents the +/// data dependencies and makes the borrow checker happy without the overhead of passing individual +/// references. +#[derive(PartialRefTarget, Default)] +pub struct Context<'a> { + #[part(CheckerStateP)] + pub checker_state: CheckerState, + #[part(ClauseHasherP)] + pub clause_hasher: ClauseHasher, + #[part(ClausesP)] + pub clauses: Clauses, + #[part(ProcessingP<'a>)] + pub processing: Processing<'a>, + #[part(RupCheckP)] + pub rup_check: RupCheck, + #[part(TmpDataP)] + pub tmp_data: TmpData, + #[part(VariablesP)] + pub variables: Variables, +} diff --git a/vendor/varisat-checker/src/hash.rs b/vendor/varisat-checker/src/hash.rs new file mode 100644 index 000000000..40474ff8e --- /dev/null +++ b/vendor/varisat-checker/src/hash.rs @@ -0,0 +1,74 @@ +//! Computation of clause hashes. +use std::mem::take; + +use partial_ref::{partial, PartialRef}; +use rustc_hash::FxHashMap as HashMap; + +use varisat_formula::{Lit, Var}; +use varisat_internal_proof::{lit_code_hash, lit_hash, ClauseHash}; + +use crate::context::{parts::*, Context}; + +pub struct ClauseHasher { + /// How many bits are used for storing clause hashes. + pub hash_bits: u32, + /// Changed solver names that are not yet reflected in the checkers current clause hashes. + pub buffered_solver_var_names: Vec<(Var, Option)>, + /// Does buffered_solver_var_names contain a new name? + /// + /// If it contains only deletes, there is no need to rehash + pub rename_in_buffered_solver_var_names: bool, + /// Current mapping from global var names to solver var names, used for hashing. + solver_var_names: HashMap, +} + +impl Default for ClauseHasher { + fn default() -> ClauseHasher { + ClauseHasher { + hash_bits: 64, + buffered_solver_var_names: vec![], + rename_in_buffered_solver_var_names: false, + solver_var_names: Default::default(), + } + } +} + +impl ClauseHasher { + /// Compute a clause hash of the current bit size + pub fn clause_hash(&self, lits: &[Lit]) -> ClauseHash { + let shift_bits = ClauseHash::max_value().count_ones() - self.hash_bits; + let mut hash = 0; + for &lit in lits.iter() { + match self.solver_var_names.get(&lit.var()) { + Some(var) => hash ^= lit_hash(var.lit(lit.is_positive())), + None => hash ^= lit_code_hash(lit.code() + Var::max_count() * 2), + } + } + hash >> shift_bits + } +} + +/// Recompute all clause hashes if necessary +pub fn rehash(mut ctx: partial!(Context, mut ClauseHasherP, mut ClausesP)) { + let (hasher, mut ctx) = ctx.split_part_mut(ClauseHasherP); + let clauses = ctx.part_mut(ClausesP); + + for (global, solver) in hasher.buffered_solver_var_names.drain(..) { + if let Some(solver) = solver { + hasher.solver_var_names.insert(global, solver); + } else { + hasher.solver_var_names.remove(&global); + } + } + hasher.rename_in_buffered_solver_var_names = false; + + let mut old_clauses = take(&mut clauses.clauses); + + for (_, mut candidates) in old_clauses.drain() { + for clause in candidates.drain(..) { + let hash = hasher.clause_hash(clause.lits.slice(&clauses.literal_buffer)); + let candidates = clauses.clauses.entry(hash).or_default(); + candidates.push(clause); + } + } +} diff --git a/vendor/varisat-checker/src/internal.rs b/vendor/varisat-checker/src/internal.rs new file mode 100644 index 000000000..5c13f0a03 --- /dev/null +++ b/vendor/varisat-checker/src/internal.rs @@ -0,0 +1,33 @@ +//! Varisat internal interface used for on-the-fly checking. + +use partial_ref::{IntoPartialRefMut, PartialRef}; + +use varisat_internal_proof::ProofStep; + +use crate::{ + state::{check_step, process_unit_conflicts}, + Checker, CheckerError, +}; + +/// Varisat internal interface used for on-the-fly checking. +/// +/// This should only be used within other Varisat crates. It is not considered part of the public +/// API and may change at any time. +pub trait SelfChecker { + fn self_check_step(&mut self, step: ProofStep) -> Result<(), CheckerError>; + + fn self_check_delayed_steps(&mut self) -> Result<(), CheckerError>; +} + +impl<'a> SelfChecker for Checker<'a> { + fn self_check_step(&mut self, step: ProofStep) -> Result<(), CheckerError> { + self.ctx.checker_state.step += 1; + let mut ctx = self.ctx.into_partial_ref_mut(); + check_step(ctx.borrow(), step) + } + + fn self_check_delayed_steps(&mut self) -> Result<(), CheckerError> { + let mut ctx = self.ctx.into_partial_ref_mut(); + process_unit_conflicts(ctx.borrow()) + } +} diff --git a/vendor/varisat-checker/src/lib.rs b/vendor/varisat-checker/src/lib.rs new file mode 100644 index 000000000..22ff9f8ff --- /dev/null +++ b/vendor/varisat-checker/src/lib.rs @@ -0,0 +1,773 @@ +//! Proof checker for Varisat proofs. + +use std::io; + +use anyhow::Error; +use partial_ref::{IntoPartialRefMut, PartialRef}; +use thiserror::Error; + +use varisat_dimacs::DimacsParser; +use varisat_formula::{CnfFormula, Lit}; + +pub mod internal; + +mod clauses; +mod context; +mod hash; +mod processing; +mod rup; +mod sorted_lits; +mod state; +mod tmp; +mod transcript; +mod variables; + +pub use processing::{ + CheckedProofStep, CheckedSamplingMode, CheckedUserVar, CheckerData, ProofProcessor, + ResolutionPropagations, +}; +pub use transcript::{ProofTranscriptProcessor, ProofTranscriptStep}; + +use clauses::add_clause; +use context::Context; +use state::check_proof; + +/// Possible errors while checking a varisat proof. +#[derive(Debug, Error)] +#[non_exhaustive] +pub enum CheckerError { + #[error("step {}: Unexpected end of proof file", step)] + ProofIncomplete { step: u64 }, + #[error("step {}: Error reading proof file: {}", step, cause)] + IoError { + step: u64, + #[source] + cause: io::Error, + }, + #[error("step {}: Could not parse proof step: {}", step, cause)] + ParseError { + step: u64, + #[source] + cause: Error, + }, + #[error("step {}: Checking proof failed: {}", step, msg)] + CheckFailed { + step: u64, + msg: String, + debug_step: String, + }, + #[error("Error in proof processor: {}", cause)] + ProofProcessorError { + #[source] + cause: Error, + }, +} + +impl CheckerError { + /// Generate a CheckFailed error with an empty debug_step + fn check_failed(step: u64, msg: String) -> CheckerError { + CheckerError::CheckFailed { + step, + msg, + debug_step: String::new(), + } + } +} + +/// A checker for unsatisfiability proofs in the native varisat format. +#[derive(Default)] +pub struct Checker<'a> { + ctx: Box>, +} + +impl<'a> Checker<'a> { + /// Create a new checker. + pub fn new() -> Checker<'a> { + Checker::default() + } + + /// Adds a clause to the checker. + pub fn add_clause(&mut self, clause: &[Lit]) -> Result<(), CheckerError> { + let mut ctx = self.ctx.into_partial_ref_mut(); + add_clause(ctx.borrow(), clause) + } + + /// Add a formula to the checker. + pub fn add_formula(&mut self, formula: &CnfFormula) -> Result<(), CheckerError> { + for clause in formula.iter() { + self.add_clause(clause)?; + } + + Ok(()) + } + + /// Reads and adds a formula in DIMACS CNF format. + /// + /// Using this avoids creating a temporary [`CnfFormula`](varisat_formula::CnfFormula). + pub fn add_dimacs_cnf(&mut self, input: impl io::Read) -> Result<(), Error> { + let parser = DimacsParser::parse_incremental(input, |parser| { + Ok(self.add_formula(&parser.take_formula())?) + })?; + + log::info!( + "Parsed formula with {} variables and {} clauses", + parser.var_count(), + parser.clause_count() + ); + + Ok(()) + } + + /// Add a [`ProofProcessor`]. + /// + /// This has to be called before loading any clauses or checking any proofs. + pub fn add_processor(&mut self, processor: &'a mut dyn ProofProcessor) { + self.ctx.processing.processors.push(processor); + } + + /// Add a [`ProofTranscriptProcessor`]. + /// + /// This has to be called before loading any clauses or checking any proofs. + pub fn add_transcript(&mut self, processor: &'a mut dyn ProofTranscriptProcessor) { + self.ctx.processing.transcript_processors.push(processor); + } + + /// Checks a proof in the native Varisat format. + pub fn check_proof(&mut self, input: impl io::Read) -> Result<(), CheckerError> { + let mut ctx = self.ctx.into_partial_ref_mut(); + check_proof(ctx.borrow(), input) + } +} + +#[cfg(test)] +mod tests { + use super::{internal::SelfChecker, *}; + + use varisat_internal_proof::{DeleteClauseProof, ProofStep}; + + use varisat_formula::{cnf_formula, lits, Var}; + + fn expect_check_failed(result: Result<(), CheckerError>, contains: &str) { + match result { + Err(CheckerError::CheckFailed { ref msg, .. }) if msg.contains(contains) => (), + err => panic!("expected {:?} error but got {:?}", contains, err), + } + } + + #[test] + fn conflicting_units() { + let mut checker = Checker::new(); + + checker + .add_formula(&cnf_formula![ + 1; + -1; + ]) + .unwrap(); + + assert!(checker.ctx.checker_state.unsat); + } + + #[test] + fn invalid_delete() { + let mut checker = Checker::new(); + + checker + .add_formula(&cnf_formula![ + 1, 2, 3; + -4, 5; + ]) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::DeleteClause { + clause: &lits![-5, 4], + proof: DeleteClauseProof::Redundant, + }), + "unknown clause", + ); + } + + #[test] + fn ref_counts() { + let mut checker = Checker::new(); + + checker + .add_formula(&cnf_formula![ + 1, 2, 3; + 1, 2, 3; + 1; + ]) + .unwrap(); + + let lits = &lits![1, 2, 3][..]; + + checker + .self_check_step(ProofStep::DeleteClause { + clause: lits, + proof: DeleteClauseProof::Satisfied, + }) + .unwrap(); + + checker.add_clause(lits).unwrap(); + + checker + .self_check_step(ProofStep::DeleteClause { + clause: lits, + proof: DeleteClauseProof::Satisfied, + }) + .unwrap(); + + checker + .self_check_step(ProofStep::DeleteClause { + clause: lits, + proof: DeleteClauseProof::Satisfied, + }) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::DeleteClause { + clause: lits, + proof: DeleteClauseProof::Satisfied, + }), + "unknown clause", + ); + } + + #[test] + fn clause_not_found() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 1, 2, 3; + ]) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::AtClause { + redundant: false, + clause: [][..].into(), + propagation_hashes: [0][..].into(), + }), + "no clause found", + ) + } + + #[test] + fn clause_check_failed() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 1, 2, 3; + ]) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::AtClause { + redundant: false, + clause: [][..].into(), + propagation_hashes: [][..].into(), + }), + "AT check failed", + ) + } + + #[test] + fn add_derived_tautology() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 1, 2, 3; + ]) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::AtClause { + redundant: false, + clause: &lits![-3, 3], + propagation_hashes: &[], + }), + "tautology", + ) + } + + #[test] + fn delete_derived_tautology() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + -3, 3; + ]) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::DeleteClause { + clause: &lits![-3, 3], + proof: DeleteClauseProof::Redundant, + }), + "tautology", + ) + } + + #[test] + fn delete_unit_clause() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 1; + ]) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::DeleteClause { + clause: &lits![1], + proof: DeleteClauseProof::Redundant, + }), + "delete of unit or empty clause", + ) + } + + #[test] + fn delete_clause_not_redundant() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 1, 2, 3; + ]) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::DeleteClause { + clause: &lits![1, 2, 3], + proof: DeleteClauseProof::Redundant, + }), + "is irredundant", + ) + } + + #[test] + fn delete_clause_not_satisfied() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 1, 2, 3; + -2; + 4; + ]) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::DeleteClause { + clause: &lits![1, 2, 3], + proof: DeleteClauseProof::Satisfied, + }), + "not satisfied", + ) + } + + #[test] + fn delete_clause_not_simplified() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 1, 2, 3; + -3, 4; + ]) + .unwrap(); + + let hashes = [ + checker.ctx.clause_hasher.clause_hash(&lits![1, 2, 3]), + checker.ctx.clause_hasher.clause_hash(&lits![-3, 4]), + ]; + + checker + .self_check_step(ProofStep::AtClause { + redundant: false, + clause: &lits![1, 2, 4], + propagation_hashes: &hashes[..], + }) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::DeleteClause { + clause: &lits![1, 2, 3], + proof: DeleteClauseProof::Simplified, + }), + "not subsumed", + ) + } + + #[test] + fn model_unit_conflict() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 1; + 2, 3; + ]) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::Model { + assignment: &lits![-1, 2, -3], + }), + "conflicts with unit clause", + ) + } + + #[test] + fn model_internal_conflict() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 2, 3; + ]) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::Model { + assignment: &lits![-1, 1, 2, -3], + }), + "conflicting assignment", + ) + } + + #[test] + fn model_clause_unsat() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 1, 2, 3; + -1, -2, 3; + 1, -2, -3; + ]) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::Model { + assignment: &lits![-1, 2, 3], + }), + "does not satisfy clause", + ) + } + #[test] + fn model_conflicts_assumptions() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 1, 2; + -1, 2; + ]) + .unwrap(); + + checker + .self_check_step(ProofStep::Assumptions { + assumptions: &lits![-2], + }) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::Model { + assignment: &lits![1, 2], + }), + "does not contain assumption", + ) + } + + #[test] + fn model_misses_assumption() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 1, 2; + -1, 2; + ]) + .unwrap(); + + checker + .self_check_step(ProofStep::Assumptions { + assumptions: &lits![-3], + }) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::Model { + assignment: &lits![1, 2], + }), + "does not contain assumption", + ) + } + + #[test] + fn failed_core_with_non_assumed_vars() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 1, 2; + -1, 2; + ]) + .unwrap(); + + checker + .self_check_step(ProofStep::Assumptions { + assumptions: &lits![-2], + }) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::FailedAssumptions { + failed_core: &lits![-2, -3], + propagation_hashes: &[], + }), + "contains non-assumed variables", + ) + } + + #[test] + fn failed_assumptions_with_missing_propagations() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 1, 2; + -1, 2; + -3, -2; + ]) + .unwrap(); + + checker + .self_check_step(ProofStep::Assumptions { + assumptions: &lits![3], + }) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::FailedAssumptions { + failed_core: &lits![3], + propagation_hashes: &[], + }), + "AT check failed", + ) + } + + #[test] + fn failed_assumptions_with_conflicting_assumptions() { + let mut checker = Checker::new(); + checker + .add_formula(&cnf_formula![ + 1, 2; + -1, 2; + -3, -2; + ]) + .unwrap(); + + checker + .self_check_step(ProofStep::Assumptions { + assumptions: &lits![3, -3, 4], + }) + .unwrap(); + + checker + .self_check_step(ProofStep::FailedAssumptions { + failed_core: &lits![3, -3], + propagation_hashes: &[], + }) + .unwrap(); + } + + #[test] + fn add_clause_to_non_sampling_var() { + let mut checker = Checker::new(); + + checker + .self_check_step(ProofStep::ChangeSamplingMode { + var: Var::from_dimacs(1), + sample: false, + }) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::AddClause { + clause: &lits![1, 2, 3], + }), + "not a sampling variable", + ) + } + + #[test] + fn add_clause_to_hidden_var() { + let mut checker = Checker::new(); + + checker + .self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(1), + user: Some(Var::from_dimacs(1)), + }) + .unwrap(); + + checker + .self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(1), + user: None, + }) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::AddClause { + clause: &lits![1, 2, 3], + }), + "not a sampling variable", + ) + } + + #[test] + fn colloding_user_vars() { + let mut checker = Checker::new(); + + checker + .self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(1), + user: Some(Var::from_dimacs(1)), + }) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(2), + user: Some(Var::from_dimacs(1)), + }), + "used for two different variables", + ) + } + + #[test] + fn observe_without_setting_mode() { + let mut checker = Checker::new(); + + checker + .self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(1), + user: Some(Var::from_dimacs(1)), + }) + .unwrap(); + + checker + .self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(1), + user: None, + }) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(1), + user: Some(Var::from_dimacs(1)), + }), + "still hidden", + ) + } + + #[test] + fn hide_hidden_var() { + let mut checker = Checker::new(); + + checker + .self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(1), + user: Some(Var::from_dimacs(1)), + }) + .unwrap(); + + checker + .self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(1), + user: None, + }) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(1), + user: None, + }), + "no user name to remove", + ) + } + + #[test] + fn delete_user_var() { + let mut checker = Checker::new(); + + checker + .self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(1), + user: Some(Var::from_dimacs(1)), + }) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::DeleteVar { + var: Var::from_dimacs(1), + }), + "corresponds to user variable", + ) + } + + #[test] + fn delete_in_use_var() { + let mut checker = Checker::new(); + + checker + .self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(1), + user: Some(Var::from_dimacs(1)), + }) + .unwrap(); + + checker + .self_check_step(ProofStep::AddClause { + clause: &lits![1, 2, 3], + }) + .unwrap(); + + checker + .self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(1), + user: None, + }) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::DeleteVar { + var: Var::from_dimacs(1), + }), + "still has clauses", + ) + } + + #[test] + fn invalid_hidden_to_sample() { + let mut checker = Checker::new(); + + checker + .self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(1), + user: Some(Var::from_dimacs(1)), + }) + .unwrap(); + + checker + .self_check_step(ProofStep::UserVarName { + global: Var::from_dimacs(1), + user: None, + }) + .unwrap(); + + expect_check_failed( + checker.self_check_step(ProofStep::ChangeSamplingMode { + var: Var::from_dimacs(1), + sample: true, + }), + "cannot sample hidden variable", + ) + } +} diff --git a/vendor/varisat-checker/src/processing.rs b/vendor/varisat-checker/src/processing.rs new file mode 100644 index 000000000..038406cca --- /dev/null +++ b/vendor/varisat-checker/src/processing.rs @@ -0,0 +1,189 @@ +//! Processing of checked proof steps. +use partial_ref::{partial, PartialRef}; + +use anyhow::Error; +use varisat_formula::{Lit, Var}; + +use crate::{ + context::{parts::*, Context}, + transcript::{self, ProofTranscriptProcessor}, + variables::SamplingMode, + CheckerError, +}; + +/// A single step of a proof. +/// +/// Clauses are identified by a unique increasing id assigned by the checker. Whenever the literals +/// of a clause are included in a step, they are sorted and free of duplicates. +#[derive(Debug)] +pub enum CheckedProofStep<'a> { + /// Updates the corresponding user variable for a proof variable. + UserVar { + var: Var, + user_var: Option, + }, + /// A clause of the input formula. + AddClause { id: u64, clause: &'a [Lit] }, + /// A duplicated clause of the input formula. + /// + /// The checker detects duplicated clauses and will use the same id for all copies. This also + /// applies to clauses of the input formula. This step allows proof processors to identify the + /// input formula's clauses by consecutive ids. When a duplicate clause is found, an id is + /// allocated and this step is emitted. The duplicated clause is not considered part of the + /// formula and the allocated id will not be used in any other steps. + DuplicatedClause { + id: u64, + same_as_id: u64, + clause: &'a [Lit], + }, + /// A tautological clause of the input formula. + /// + /// Tautological clauses can be completely ignored. This step is only used to give an id to a + /// tautological input clause. + TautologicalClause { id: u64, clause: &'a [Lit] }, + /// Addition of an asymmetric tautology (AT). + /// + /// A clause C is an asymmetric tautology wrt. a formula F, iff unit propagation in F with the + /// negated literals of C as unit clauses leads to a conflict. The `propagations` field contains + /// clauses in the order they became unit and as last element the clause that caused a conflict. + AtClause { + id: u64, + redundant: bool, + clause: &'a [Lit], + propagations: &'a [u64], + }, + /// Deletion of a redundant clause. + DeleteClause { id: u64, clause: &'a [Lit] }, + /// Deletion of a clause that is an asymmetric tautology w.r.t the remaining irredundant + /// clauses. + DeleteAtClause { + id: u64, + keep_as_redundant: bool, + clause: &'a [Lit], + propagations: &'a [u64], + }, + /// Deletion of a resolution asymmetric tautology w.r.t the remaining irredundant caluses. + /// + /// The pivot is always a hidden variable. + DeleteRatClause { + id: u64, + keep_as_redundant: bool, + clause: &'a [Lit], + pivot: Lit, + propagations: &'a ResolutionPropagations, + }, + /// Make a redundant clause irredundant. + MakeIrredundant { id: u64, clause: &'a [Lit] }, + /// A (partial) assignment that satisfies all clauses and assumptions. + Model { assignment: &'a [Lit] }, + /// Change the active set of assumptions. + Assumptions { assumptions: &'a [Lit] }, + /// Subset of assumptions incompatible with the formula. + /// + /// The proof consists of showing that the negation of the assumptions is an AT wrt. the + /// formula. + FailedAssumptions { + failed_core: &'a [Lit], + propagations: &'a [u64], + }, +} + +/// Sampling mode of a user variable. +#[derive(Debug)] +pub enum CheckedSamplingMode { + Sample, + Witness, +} + +/// Corresponding user variable for a proof variable. +#[derive(Debug)] +pub struct CheckedUserVar { + pub user_var: Var, + pub sampling_mode: CheckedSamplingMode, + pub new_var: bool, +} + +/// A list of clauses to resolve and propagations to show that the resolvent is an AT. +#[derive(Debug)] +pub struct ResolutionPropagations { + // TODO implement ResolutionPropagations +} + +/// Checker data available to proof processors. +#[derive(Copy, Clone)] +pub struct CheckerData<'a, 'b>(pub partial!('a Context<'b>, VariablesP)); + +impl<'a, 'b> CheckerData<'a, 'b> { + /// User variable corresponding to proof variable. + /// + /// Returns `None` if the proof variable is an internal or hidden variable. + pub fn user_from_proof_var(self, proof_var: Var) -> Option { + let variables = self.0.part(VariablesP); + variables + .var_data + .get(proof_var.index()) + .and_then(|var_data| { + var_data.user_var.or_else(|| { + // This is needed as yet another workaround to enable independently loading the + // initial formula and proof. + // TODO can this be solved in a better way? + if var_data.sampling_mode == SamplingMode::Sample { + Some(proof_var) + } else { + None + } + }) + }) + } +} + +/// Implement to process proof steps. +pub trait ProofProcessor { + fn process_step(&mut self, step: &CheckedProofStep, data: CheckerData) -> Result<(), Error>; +} + +/// Registry of proof and transcript processors. +#[derive(Default)] +pub struct Processing<'a> { + /// Registered proof processors. + pub processors: Vec<&'a mut dyn ProofProcessor>, + /// Registered transcript processors. + pub transcript_processors: Vec<&'a mut dyn ProofTranscriptProcessor>, + /// Proof step to transcript step conversion. + transcript: transcript::Transcript, +} + +impl<'a> Processing<'a> { + /// Process a single step + pub fn step<'b>( + &mut self, + step: &CheckedProofStep<'b>, + data: CheckerData, + ) -> Result<(), CheckerError> { + for processor in self.processors.iter_mut() { + if let Err(err) = processor.process_step(step, data) { + return Err(CheckerError::ProofProcessorError { cause: err }); + } + } + if !self.transcript_processors.is_empty() { + if let Some(transcript_step) = self.transcript.transcript_step(step, data) { + for processor in self.transcript_processors.iter_mut() { + if let Err(err) = processor.process_step(&transcript_step) { + return Err(CheckerError::ProofProcessorError { cause: err }); + } + } + } + } + + Ok(()) + } +} + +/// Process a single step +pub fn process_step<'a, 'b>( + mut ctx: partial!(Context<'a>, mut ProcessingP<'a>, VariablesP), + step: &CheckedProofStep<'b>, +) -> Result<(), CheckerError> { + let (processing, mut ctx) = ctx.split_part_mut(ProcessingP); + processing.step(step, CheckerData(ctx.borrow())) +} diff --git a/vendor/varisat-checker/src/rup.rs b/vendor/varisat-checker/src/rup.rs new file mode 100644 index 000000000..e6f2b9f03 --- /dev/null +++ b/vendor/varisat-checker/src/rup.rs @@ -0,0 +1,203 @@ +//! Reverse unit propagation redundancy checks. +use std::ops::Range; + +use partial_ref::{partial, PartialRef}; + +use varisat_formula::{lit::LitIdx, Lit}; +use varisat_internal_proof::ClauseHash; + +use crate::{ + clauses::{UnitClause, UnitId}, + context::{parts::*, Context}, + hash::rehash, + variables::ensure_var, + CheckerError, +}; + +/// Propagation of the RUP check. +struct TraceItem { + id: u64, + edges: Range, + unused: bool, +} + +#[derive(Default)] +pub struct RupCheck { + /// Stores overwritten values in `unit_clauses` to undo assignments. + trail: Vec<(Lit, Option)>, + /// Involved clauses during the last check. + trace: Vec, + /// Edges of the trace implication graph. + trace_edges: Vec, + /// Just the ids of `trace`. + pub trace_ids: Vec, +} + +/// Check whether a clause is implied by clauses of the given hashes. +/// +/// `lits` must be sorted and free of duplicates. +pub fn check_clause_with_hashes<'a>( + mut ctx: partial!( + Context<'a>, + mut ClauseHasherP, + mut ClausesP, + mut ProcessingP<'a>, + mut RupCheckP, + mut VariablesP, + CheckerStateP, + ), + lits: &[Lit], + propagation_hashes: &[ClauseHash], +) -> Result<(), CheckerError> { + if ctx.part(ClauseHasherP).rename_in_buffered_solver_var_names { + // TODO partial rehashing? + rehash(ctx.borrow()); + } + + let (rup, mut ctx) = ctx.split_part_mut(RupCheckP); + + rup.trace.clear(); + rup.trace_edges.clear(); + + let mut rup_is_unsat = false; + + assert!(rup.trail.is_empty()); + + for &lit in lits.iter() { + ensure_var(ctx.borrow(), lit.var()); + } + + let (clauses, ctx) = ctx.split_part_mut(ClausesP); + + for &lit in lits.iter() { + if let Some((true, unit)) = clauses.lit_value(lit) { + if let UnitId::Global(id) = unit.id { + rup.trace_ids.clear(); + rup.trace_ids.push(id); + return Ok(()); + } else { + unreachable!("unexpected non global unit"); + } + } + } + + // Set all lits to false + for &lit in lits.iter() { + rup.trail.push((lit, clauses.unit_clauses[lit.index()])); + + clauses.unit_clauses[lit.index()] = Some(UnitClause { + value: lit.is_negative(), + id: UnitId::InClause, + }); + } + + 'hashes: for &hash in propagation_hashes.iter() { + let candidates = match clauses.clauses.get(&hash) { + Some(candidates) if !candidates.is_empty() => candidates, + _ => { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("no clause found for hash {:x}", hash), + )) + } + }; + + // Check if any clause matching the hash propagates + 'candidates: for clause in candidates.iter() { + let mut unassigned_count = 0; + let mut unassigned_lit = None; + + let range_begin = rup.trace_edges.len(); + + for &lit in clause.lits.slice(&clauses.literal_buffer).iter() { + match clauses.lit_value(lit) { + Some((true, _)) => { + continue 'candidates; + } + Some((false, unit)) => match unit.id { + UnitId::Global(id) => { + rup.trail.push((lit, clauses.unit_clauses[lit.index()])); + clauses.unit_clauses[lit.index()] = Some(UnitClause { + value: lit.is_negative(), + id: UnitId::TracePos(rup.trace.len()), + }); + + rup.trace_edges.push(rup.trace.len() as LitIdx); + + rup.trace.push(TraceItem { + id, + edges: 0..0, + unused: true, + }); + } + UnitId::TracePos(pos) => { + rup.trace_edges.push(pos as LitIdx); + } + UnitId::InClause => {} + }, + None => { + unassigned_count += 1; + unassigned_lit = Some(lit); + } + } + } + + let range = range_begin..rup.trace_edges.len(); + + match unassigned_lit { + None => { + rup.trace.push(TraceItem { + id: clause.id, + edges: range, + unused: false, + }); + + rup_is_unsat = true; + break 'hashes; + } + Some(lit) if unassigned_count == 1 => { + rup.trail.push((lit, clauses.unit_clauses[lit.index()])); + + clauses.unit_clauses[lit.index()] = Some(UnitClause { + value: lit.is_positive(), + id: UnitId::TracePos(rup.trace.len()), + }); + + rup.trace.push(TraceItem { + id: clause.id, + edges: range, + unused: true, + }); + } + _ => (), + } + } + } + + if rup_is_unsat && !ctx.part(ProcessingP).processors.is_empty() { + for i in (0..rup.trace.len()).rev() { + if !rup.trace[i].unused { + let edges = rup.trace[i].edges.clone(); + for &edge in rup.trace_edges[edges].iter() { + rup.trace[edge as usize].unused = false; + } + } + } + rup.trace_ids.clear(); + rup.trace_ids.extend(rup.trace.iter().map(|trace| trace.id)); + } + + // Undo temporary assignments + for (lit, value) in rup.trail.drain(..).rev() { + clauses.unit_clauses[lit.index()] = value; + } + + if rup_is_unsat { + Ok(()) + } else { + Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("AT check failed for {:?}", lits), + )) + } +} diff --git a/vendor/varisat-checker/src/sorted_lits.rs b/vendor/varisat-checker/src/sorted_lits.rs new file mode 100644 index 000000000..3c9e12e63 --- /dev/null +++ b/vendor/varisat-checker/src/sorted_lits.rs @@ -0,0 +1,56 @@ +//! Utilities for slices of sorted literals. +use std::cmp::Ordering; + +use varisat_formula::Lit; + +/// Sort literals, remove duplicates and check for tautologic clauses. +/// +/// Return true if the clause is a tautology +pub fn copy_canonical(target: &mut Vec, src: &[Lit]) -> bool { + target.clear(); + target.extend_from_slice(src); + target.sort(); + target.dedup(); + + let mut last = None; + + target.iter().any(|&lit| { + let tautology = last == Some(!lit); + last = Some(lit); + tautology + }) +} + +/// Test whether a set of literals is a (strict) subset of another set of literals +/// +/// Requires subset and superset to be sorted. +pub fn is_subset(mut subset: &[Lit], mut superset: &[Lit], strict: bool) -> bool { + // We set is_strict to true if we don't require a strict subset + let mut is_strict = !strict; + + while let Some((&sub_min, sub_rest)) = subset.split_first() { + if let Some((&super_min, super_rest)) = superset.split_first() { + match sub_min.cmp(&super_min) { + Ordering::Less => { + // sub_min is not in superset + return false; + } + Ordering::Greater => { + // super_min is not in subset, skip it + superset = super_rest; + is_strict = true; + } + Ordering::Equal => { + // sub_min == super_min, go to next element + superset = super_rest; + subset = sub_rest; + } + } + } else { + // sub_min is not in superset + return false; + } + } + is_strict |= !superset.is_empty(); + is_strict +} diff --git a/vendor/varisat-checker/src/state.rs b/vendor/varisat-checker/src/state.rs new file mode 100644 index 000000000..7f3e7478f --- /dev/null +++ b/vendor/varisat-checker/src/state.rs @@ -0,0 +1,645 @@ +//! Checker state and checking of proof steps. + +use std::{io, mem::replace}; + +use partial_ref::{partial, PartialRef}; +use rustc_hash::FxHashSet as HashSet; + +use varisat_formula::{Lit, Var}; +use varisat_internal_proof::{binary_format::Parser, ClauseHash, DeleteClauseProof, ProofStep}; + +use crate::{ + clauses::{ + add_clause, delete_clause, store_clause, store_unit_clause, DeleteClauseResult, + StoreClauseResult, UnitClause, UnitId, + }, + context::{parts::*, Context}, + hash::rehash, + processing::{ + process_step, CheckedProofStep, CheckedSamplingMode, CheckedUserVar, ResolutionPropagations, + }, + rup::check_clause_with_hashes, + sorted_lits::{copy_canonical, is_subset}, + variables::{ + add_user_mapping, ensure_sampling_var, ensure_var, remove_user_mapping, SamplingMode, + VarData, + }, + CheckerError, +}; + +/// A checker for unsatisfiability proofs in the native varisat format. +#[derive(Default)] +pub struct CheckerState { + /// Current step number. + pub step: u64, + /// Whether unsatisfiability was proven. + pub unsat: bool, + /// Whether an end of proof step was checked. + ended: bool, + /// Last added irredundant clause id. + /// + /// Sorted and free of duplicates. + previous_irred_clause_id: Option, + /// Last added irredundant clause literals. + previous_irred_clause_lits: Vec, + /// Current assumptions, used to check FailedAssumptions and Model + assumptions: Vec, +} + +impl CheckerState { + /// Check whether a given clause is subsumed by the last added irredundant clause. + /// + /// `lits` must be sorted and free of duplicates. + fn subsumed_by_previous_irred_clause(&self, lits: &[Lit]) -> bool { + if self.previous_irred_clause_id.is_none() { + return false; + } + is_subset(&self.previous_irred_clause_lits[..], lits, true) + } +} + +/// Check a single proof step +pub fn check_step<'a>( + mut ctx: partial!( + Context<'a>, + mut CheckerStateP, + mut ClauseHasherP, + mut ClausesP, + mut ProcessingP<'a>, + mut RupCheckP, + mut TmpDataP, + mut VariablesP, + ), + step: ProofStep, +) -> Result<(), CheckerError> { + let mut result = match step { + ProofStep::SolverVarName { global, solver } => { + ctx.part_mut(ClauseHasherP) + .buffered_solver_var_names + .push((global, solver)); + if solver.is_some() { + ctx.part_mut(ClauseHasherP) + .rename_in_buffered_solver_var_names = true; + } + Ok(()) + } + ProofStep::UserVarName { global, user } => { + if let Some(user) = user { + add_user_mapping(ctx.borrow(), global, user)?; + } else { + remove_user_mapping(ctx.borrow(), global)?; + } + Ok(()) + } + ProofStep::DeleteVar { var } => check_delete_var_step(ctx.borrow(), var), + ProofStep::ChangeSamplingMode { var, sample } => { + check_change_sampling_mode(ctx.borrow(), var, sample) + } + ProofStep::AddClause { clause } => add_clause(ctx.borrow(), clause), + ProofStep::AtClause { + redundant, + clause, + propagation_hashes, + } => check_at_clause_step(ctx.borrow(), redundant, clause, propagation_hashes), + ProofStep::DeleteClause { clause, proof } => { + check_delete_clause_step(ctx.borrow(), clause, proof) + } + ProofStep::UnitClauses { units } => check_unit_clauses_step(ctx.borrow(), units), + ProofStep::ChangeHashBits { bits } => { + ctx.part_mut(ClauseHasherP).hash_bits = bits; + rehash(ctx.borrow()); + Ok(()) + } + ProofStep::Model { assignment } => check_model_step(ctx.borrow(), assignment), + ProofStep::Assumptions { assumptions } => { + for &lit in assumptions.iter() { + ensure_sampling_var(ctx.borrow(), lit.var())?; + } + copy_canonical(&mut ctx.part_mut(CheckerStateP).assumptions, assumptions); + + let (state, mut ctx) = ctx.split_part(CheckerStateP); + + process_step( + ctx.borrow(), + &CheckedProofStep::Assumptions { + assumptions: &state.assumptions, + }, + )?; + Ok(()) + } + ProofStep::FailedAssumptions { + failed_core, + propagation_hashes, + } => check_failed_assumptions_step(ctx.borrow(), failed_core, propagation_hashes), + ProofStep::End => { + ctx.part_mut(CheckerStateP).ended = true; + Ok(()) + } + }; + + if let Err(CheckerError::CheckFailed { + ref mut debug_step, .. + }) = result + { + *debug_step = format!("{:?}", step) + } + result +} + +/// Check a DeleteVar step +fn check_delete_var_step<'a>( + mut ctx: partial!( + Context<'a>, + mut ClausesP, + mut ProcessingP<'a>, + mut VariablesP, + CheckerStateP, + ), + var: Var, +) -> Result<(), CheckerError> { + ensure_var(ctx.borrow(), var); + if let Some(user_var) = ctx.part(VariablesP).var_data[var.index()].user_var { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!( + "deleted variable {:?} corresponds to user variable {:?}", + var, user_var + ), + )); + } + + for &polarity in &[false, true] { + if ctx.part(VariablesP).lit_data[var.lit(polarity).code()].clause_count > 0 { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("deleted variable {:?} still has clauses", var), + )); + } + } + + if let Some(unit_clause) = ctx.part(ClausesP).unit_clauses[var.index()] { + let clause = [var.lit(unit_clause.value)]; + + let id = match unit_clause.id { + UnitId::Global(id) => id, + _ => unreachable!(), + }; + + process_step( + ctx.borrow(), + &CheckedProofStep::DeleteRatClause { + id, + keep_as_redundant: false, + clause: &clause[..], + pivot: clause[0], + propagations: &ResolutionPropagations {}, + }, + )?; + ctx.part_mut(ClausesP).unit_clauses[var.index()] = None; + } + + ctx.part_mut(VariablesP).var_data[var.index()] = VarData::default(); + + Ok(()) +} + +/// Check a ChangeSamplingMode step +fn check_change_sampling_mode<'a>( + mut ctx: partial!( + Context<'a>, + mut ClausesP, + mut ProcessingP<'a>, + mut VariablesP, + CheckerStateP, + ), + var: Var, + sample: bool, +) -> Result<(), CheckerError> { + ensure_var(ctx.borrow(), var); + let mut ctx_in = ctx; + let (variables, ctx) = ctx_in.split_part_mut(VariablesP); + let var_data = &mut variables.var_data[var.index()]; + let sampling_mode = if sample { + SamplingMode::Sample + } else { + SamplingMode::Witness + }; + + if var_data.sampling_mode != sampling_mode { + var_data.sampling_mode = sampling_mode; + + if let Some(user_var) = var_data.user_var { + let sampling_mode = if var_data.sampling_mode == SamplingMode::Witness { + CheckedSamplingMode::Witness + } else { + CheckedSamplingMode::Sample + }; + process_step( + ctx_in.borrow(), + &CheckedProofStep::UserVar { + var, + user_var: Some(CheckedUserVar { + user_var, + sampling_mode, + new_var: false, + }), + }, + )?; + } else if sampling_mode == SamplingMode::Sample { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("cannot sample hidden variable {:?}", var), + )); + } + } + + Ok(()) +} + +/// Check an AtClause step +fn check_at_clause_step<'a>( + mut ctx: partial!( + Context<'a>, + mut CheckerStateP, + mut ClauseHasherP, + mut ClausesP, + mut ProcessingP<'a>, + mut RupCheckP, + mut TmpDataP, + mut VariablesP, + ), + redundant: bool, + clause: &[Lit], + propagation_hashes: &[ClauseHash], +) -> Result<(), CheckerError> { + let mut tmp = replace(&mut ctx.part_mut(TmpDataP).tmp, vec![]); + + if copy_canonical(&mut tmp, clause) { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("clause {:?} is a tautology", tmp), + )); + } + + check_clause_with_hashes(ctx.borrow(), &tmp, &*propagation_hashes)?; + + let (id, added) = store_clause(ctx.borrow(), &tmp, redundant); + + if !redundant { + let state = ctx.part_mut(CheckerStateP); + state.previous_irred_clause_id = Some(id); + state.previous_irred_clause_lits.clear(); + state.previous_irred_clause_lits.extend_from_slice(&tmp); + } + + match added { + StoreClauseResult::New => { + let (rup_check, mut ctx) = ctx.split_part(RupCheckP); + process_step( + ctx.borrow(), + &CheckedProofStep::AtClause { + id, + redundant, + clause: &tmp, + propagations: &rup_check.trace_ids, + }, + )?; + } + StoreClauseResult::NewlyIrredundant => { + process_step( + ctx.borrow(), + &CheckedProofStep::MakeIrredundant { id, clause: &tmp }, + )?; + } + StoreClauseResult::Duplicate => (), + } + + ctx.part_mut(TmpDataP).tmp = tmp; + + Ok(()) +} + +/// Check a DeleteClause step +fn check_delete_clause_step<'a>( + mut ctx: partial!( + Context<'a>, + mut CheckerStateP, + mut ClausesP, + mut ProcessingP<'a>, + mut TmpDataP, + mut VariablesP, + ClauseHasherP, + ), + clause: &[Lit], + proof: DeleteClauseProof, +) -> Result<(), CheckerError> { + let mut tmp = replace(&mut ctx.part_mut(TmpDataP).tmp, vec![]); + + if copy_canonical(&mut tmp, clause) { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("clause {:?} is a tautology", tmp), + )); + } + + let redundant = proof == DeleteClauseProof::Redundant; + + let mut subsumed_by = None; + + match proof { + DeleteClauseProof::Redundant => (), + DeleteClauseProof::Satisfied => { + let is_subsumed = tmp.iter().any(|&lit| { + if let Some(( + true, + UnitClause { + id: UnitId::Global(id), + .. + }, + )) = ctx.part(ClausesP).lit_value(lit) + { + subsumed_by = Some(id); + true + } else { + false + } + }); + if !is_subsumed { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("deleted clause {:?} is not satisfied", clause), + )); + } + } + DeleteClauseProof::Simplified => { + subsumed_by = ctx.part(CheckerStateP).previous_irred_clause_id; + if !ctx + .part(CheckerStateP) + .subsumed_by_previous_irred_clause(&tmp) + { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!( + "deleted clause {:?} is not subsumed by previous clause {:?}", + clause, + ctx.part(CheckerStateP).previous_irred_clause_lits + ), + )); + } + } + } + + ctx.part_mut(CheckerStateP).previous_irred_clause_id = None; + ctx.part_mut(CheckerStateP) + .previous_irred_clause_lits + .clear(); + + let (id, deleted) = delete_clause(ctx.borrow(), &tmp, redundant)?; + + if redundant { + match deleted { + DeleteClauseResult::Removed => { + process_step( + ctx.borrow(), + &CheckedProofStep::DeleteClause { id, clause: &tmp }, + )?; + } + DeleteClauseResult::Unchanged => (), + DeleteClauseResult::NewlyRedundant => unreachable!(), + } + } else { + match deleted { + DeleteClauseResult::Removed | DeleteClauseResult::NewlyRedundant => { + process_step( + ctx.borrow(), + &CheckedProofStep::DeleteAtClause { + id, + keep_as_redundant: deleted == DeleteClauseResult::NewlyRedundant, + clause: &tmp, + propagations: &[subsumed_by.unwrap()], + }, + )?; + } + DeleteClauseResult::Unchanged => (), + } + } + + ctx.part_mut(TmpDataP).tmp = tmp; + Ok(()) +} + +/// Check a UnitClauses step +fn check_unit_clauses_step<'a>( + mut ctx: partial!( + Context<'a>, + mut CheckerStateP, + mut ClauseHasherP, + mut ClausesP, + mut ProcessingP<'a>, + mut RupCheckP, + mut VariablesP, + ), + units: &[(Lit, ClauseHash)], +) -> Result<(), CheckerError> { + for &(lit, hash) in units.iter() { + ensure_var(ctx.borrow(), lit.var()); + + let clause = [lit]; + let propagation_hashes = [hash]; + check_clause_with_hashes(ctx.borrow(), &clause[..], &propagation_hashes[..])?; + + let (id, added) = store_unit_clause(ctx.borrow(), lit); + + match added { + StoreClauseResult::New => { + let (rup_check, mut ctx) = ctx.split_part(RupCheckP); + process_step( + ctx.borrow(), + &CheckedProofStep::AtClause { + id, + redundant: false, + clause: &clause, + propagations: &rup_check.trace_ids, + }, + )?; + } + StoreClauseResult::Duplicate => (), + StoreClauseResult::NewlyIrredundant => unreachable!(), + } + } + Ok(()) +} + +/// Check a Model step +fn check_model_step<'a>( + mut ctx: partial!(Context<'a>, mut ProcessingP<'a>, CheckerStateP, ClausesP, VariablesP), + model: &[Lit], +) -> Result<(), CheckerError> { + let mut assignments = HashSet::default(); + + for &lit in model.iter() { + if let Some((false, _)) = ctx.part(ClausesP).lit_value(lit) { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("model assignment conflicts with unit clause {:?}", !lit), + )); + } + if assignments.contains(&!lit) { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("model contains conflicting assignment {:?}", !lit), + )); + } + assignments.insert(lit); + } + + for &lit in ctx.part(CheckerStateP).assumptions.iter() { + if !assignments.contains(&lit) { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("model does not contain assumption {:?}", lit), + )); + } + } + + for (_, candidates) in ctx.part(ClausesP).clauses.iter() { + for clause in candidates.iter() { + let lits = clause.lits.slice(&ctx.part(ClausesP).literal_buffer); + if !lits.iter().any(|lit| assignments.contains(&lit)) { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("model does not satisfy clause {:?}", lits), + )); + } + } + } + + process_step(ctx.borrow(), &CheckedProofStep::Model { assignment: model })?; + + Ok(()) +} + +/// Check a FailedAssumptions step +fn check_failed_assumptions_step<'a>( + mut ctx: partial!( + Context<'a>, + mut ClauseHasherP, + mut ClausesP, + mut ProcessingP<'a>, + mut RupCheckP, + mut TmpDataP, + mut VariablesP, + CheckerStateP, + ), + failed_core: &[Lit], + propagation_hashes: &[ClauseHash], +) -> Result<(), CheckerError> { + let mut tmp = replace(&mut ctx.part_mut(TmpDataP).tmp, vec![]); + + let direct_conflict = copy_canonical(&mut tmp, failed_core); + + if !is_subset(&tmp, &ctx.part(CheckerStateP).assumptions, false) { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + "failed core contains non-assumed variables".to_string(), + )); + } + + if direct_conflict { + // we have x and not x among the assumptions, no need to check + ctx.part_mut(RupCheckP).trace_ids.clear(); + } else { + // invert the assumptions and check for an AT + for lit in tmp.iter_mut() { + *lit = !*lit; + } + check_clause_with_hashes(ctx.borrow(), &tmp, propagation_hashes)?; + + // we undo the inversion to report the correct checked proof step + for lit in tmp.iter_mut() { + *lit = !*lit; + } + } + + let (rup_check, mut ctx) = ctx.split_part(RupCheckP); + process_step( + ctx.borrow(), + &CheckedProofStep::FailedAssumptions { + failed_core: &tmp, + propagations: &rup_check.trace_ids, + }, + )?; + + ctx.part_mut(TmpDataP).tmp = tmp; + + Ok(()) +} + +/// Checks a proof in the native Varisat format. +pub fn check_proof<'a>( + mut ctx: partial!( + Context<'a>, + mut CheckerStateP, + mut ClauseHasherP, + mut ClausesP, + mut ProcessingP<'a>, + mut RupCheckP, + mut TmpDataP, + mut VariablesP, + ), + input: impl io::Read, +) -> Result<(), CheckerError> { + let mut buffer = io::BufReader::new(input); + let mut parser = Parser::default(); + + while !ctx.part(CheckerStateP).ended { + ctx.part_mut(CheckerStateP).step += 1; + + let step = ctx.part(CheckerStateP).step; + + if step % 100000 == 0 { + log::info!("checking step {}k", step / 1000); + } + + match parser.parse_step(&mut buffer) { + Ok(step) => check_step(ctx.borrow(), step)?, + Err(err) => match err.downcast::() { + Ok(io_err) => { + if io_err.kind() == io::ErrorKind::UnexpectedEof { + return Err(CheckerError::ProofIncomplete { step }); + } else { + return Err(CheckerError::IoError { + step, + cause: io_err, + }); + } + } + Err(err) => return Err(CheckerError::ParseError { step, cause: err }), + }, + } + } + + process_unit_conflicts(ctx.borrow()) +} + +/// Process unit conflicts detected during clause loading. +pub fn process_unit_conflicts<'a>( + mut ctx: partial!(Context<'a>, mut ProcessingP<'a>, ClausesP, VariablesP), +) -> Result<(), CheckerError> { + let (clauses, mut ctx) = ctx.split_part(ClausesP); + if let Some(ids) = &clauses.unit_conflict { + let clause = &[]; + + process_step( + ctx.borrow(), + &CheckedProofStep::AtClause { + id: clauses.next_clause_id, + redundant: false, + clause, + propagations: ids, + }, + )?; + } + + Ok(()) +} diff --git a/vendor/varisat-checker/src/tmp.rs b/vendor/varisat-checker/src/tmp.rs new file mode 100644 index 000000000..2c144218f --- /dev/null +++ b/vendor/varisat-checker/src/tmp.rs @@ -0,0 +1,8 @@ +//! Temporary data. +use varisat_formula::Lit; + +#[derive(Default)] +pub struct TmpData { + /// Temporary storage for literals. + pub tmp: Vec, +} diff --git a/vendor/varisat-checker/src/transcript.rs b/vendor/varisat-checker/src/transcript.rs new file mode 100644 index 000000000..c96829ea5 --- /dev/null +++ b/vendor/varisat-checker/src/transcript.rs @@ -0,0 +1,133 @@ +//! Proof transcripts. +use anyhow::Error; + +use varisat_formula::{Lit, Var}; + +use crate::processing::{CheckedProofStep, CheckedSamplingMode, CheckedUserVar, CheckerData}; + +/// Step of a proof transcript. +/// +/// The proof transcript contains the solver queries and results that correspond to a checked proof. +/// +/// The transcript uses the same variable numbering as used for solver calls. +#[derive(Debug)] +pub enum ProofTranscriptStep<'a> { + WitnessVar { var: Var }, + SampleVar { var: Var }, + HideVar { var: Var }, + ObserveInternalVar { var: Var }, + AddClause { clause: &'a [Lit] }, + Unsat, + Model { assignment: &'a [Lit] }, + Assume { assumptions: &'a [Lit] }, + FailedAssumptions { failed_core: &'a [Lit] }, +} + +/// Implement to process transcript steps. +pub trait ProofTranscriptProcessor { + /// Process a single proof transcript step. + fn process_step(&mut self, step: &ProofTranscriptStep) -> Result<(), Error>; +} + +/// Create a transcript from proof steps +#[derive(Default)] +pub(crate) struct Transcript { + lit_buf: Vec, +} + +impl Transcript { + /// If a checked proof step has a corresponding transcript step, return that. + pub fn transcript_step( + &mut self, + step: &CheckedProofStep, + data: CheckerData, + ) -> Option { + match step { + CheckedProofStep::UserVar { var, user_var } => match user_var { + None => Some(ProofTranscriptStep::HideVar { + var: data.user_from_proof_var(*var).unwrap(), + }), + + Some(CheckedUserVar { + sampling_mode: CheckedSamplingMode::Sample, + new_var: true, + .. + }) => None, + + Some(CheckedUserVar { + user_var, + sampling_mode: CheckedSamplingMode::Witness, + new_var: true, + }) => Some(ProofTranscriptStep::ObserveInternalVar { var: *user_var }), + + Some(CheckedUserVar { + user_var, + sampling_mode: CheckedSamplingMode::Witness, + new_var: false, + }) => Some(ProofTranscriptStep::WitnessVar { var: *user_var }), + + Some(CheckedUserVar { + user_var, + sampling_mode: CheckedSamplingMode::Sample, + new_var: false, + }) => Some(ProofTranscriptStep::SampleVar { var: *user_var }), + }, + CheckedProofStep::AddClause { clause, .. } + | CheckedProofStep::DuplicatedClause { clause, .. } + | CheckedProofStep::TautologicalClause { clause, .. } => { + self.lit_buf.clear(); + self.lit_buf.extend(clause.iter().map(|&lit| { + lit.map_var(|var| { + data.user_from_proof_var(var) + .expect("hidden variable in clause") + }) + })); + Some(ProofTranscriptStep::AddClause { + clause: &self.lit_buf, + }) + } + CheckedProofStep::AtClause { clause, .. } => { + if clause.is_empty() { + Some(ProofTranscriptStep::Unsat) + } else { + None + } + } + CheckedProofStep::Model { assignment } => { + self.lit_buf.clear(); + self.lit_buf.extend(assignment.iter().flat_map(|&lit| { + data.user_from_proof_var(lit.var()) + .map(|var| var.lit(lit.is_positive())) + })); + Some(ProofTranscriptStep::Model { + assignment: &self.lit_buf, + }) + } + CheckedProofStep::Assumptions { assumptions } => { + self.lit_buf.clear(); + self.lit_buf.extend(assumptions.iter().map(|&lit| { + lit.map_var(|var| { + data.user_from_proof_var(var) + .expect("hidden variable in assumptions") + }) + })); + Some(ProofTranscriptStep::Assume { + assumptions: &self.lit_buf, + }) + } + CheckedProofStep::FailedAssumptions { failed_core, .. } => { + self.lit_buf.clear(); + self.lit_buf.extend(failed_core.iter().map(|&lit| { + lit.map_var(|var| { + data.user_from_proof_var(var) + .expect("hidden variable in assumptions") + }) + })); + Some(ProofTranscriptStep::FailedAssumptions { + failed_core: &self.lit_buf, + }) + } + _ => None, + } + } +} diff --git a/vendor/varisat-checker/src/variables.rs b/vendor/varisat-checker/src/variables.rs new file mode 100644 index 000000000..51adb1b98 --- /dev/null +++ b/vendor/varisat-checker/src/variables.rs @@ -0,0 +1,195 @@ +//! Variable metadata. +use partial_ref::{partial, PartialRef}; +use rustc_hash::FxHashSet as HashSet; + +use varisat_formula::Var; + +use crate::{ + context::{parts::*, Context}, + processing::{process_step, CheckedProofStep, CheckedSamplingMode, CheckedUserVar}, + CheckerError, +}; + +/// Data for each literal. +#[derive(Clone, Default)] +pub struct LitData { + pub clause_count: usize, +} + +#[derive(Copy, Clone, PartialEq, Eq)] +pub enum SamplingMode { + Sample, + Witness, + Hide, +} + +/// Data for each variable. +#[derive(Clone)] +pub struct VarData { + pub user_var: Option, + pub sampling_mode: SamplingMode, +} + +impl Default for VarData { + fn default() -> VarData { + VarData { + user_var: None, + sampling_mode: SamplingMode::Sample, + } + } +} + +#[derive(Default)] +pub struct Variables { + /// Information about literals in the current formula. + pub lit_data: Vec, + /// Information about variables in the current formula. + pub var_data: Vec, + /// User var names in use. + /// + /// This is used to check for colliding mappings which are not allowed. + used_user_vars: HashSet, +} + +/// Check that var is a sampling user var and create new variables as necessary. +pub fn ensure_sampling_var( + mut ctx: partial!(Context, mut ClausesP, mut VariablesP, CheckerStateP), + var: Var, +) -> Result<(), CheckerError> { + ensure_var(ctx.borrow(), var); + + let variables = ctx.part_mut(VariablesP); + + if variables.var_data[var.index()].sampling_mode != SamplingMode::Sample { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("variable {:?} is not a sampling variable", var), + )); + } + Ok(()) +} + +/// Ensure that a variable is present. +pub fn ensure_var(mut ctx: partial!(Context, mut ClausesP, mut VariablesP), var: Var) { + let (variables, mut ctx) = ctx.split_part_mut(VariablesP); + + if variables.var_data.len() <= var.index() { + variables + .var_data + .resize(var.index() + 1, VarData::default()); + variables + .lit_data + .resize((var.index() + 1) * 2, LitData::default()); + ctx.part_mut(ClausesP) + .unit_clauses + .resize(var.index() + 1, None); + } +} + +/// Add a user/global var mapping. +pub fn add_user_mapping<'a>( + mut ctx: partial!(Context<'a>, mut ClausesP, mut ProcessingP<'a>, mut VariablesP, CheckerStateP), + global_var: Var, + user_var: Var, +) -> Result<(), CheckerError> { + ensure_var(ctx.borrow(), global_var); + + let mut ctx_in = ctx; + let (variables, ctx) = ctx_in.split_part_mut(VariablesP); + + // TODO will the first check cause problems when observing solver added variables? + // That check is required for the workaround in CheckerData's user from proof method + if user_var.index() >= variables.var_data.len() || variables.used_user_vars.contains(&user_var) + { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("user name {:?} used for two different variables", user_var), + )); + } + + let var_data = &mut variables.var_data[global_var.index()]; + + // sampling_mode will be Witness for a newly observed internal variable and Sample for a a + // fresh variable + if var_data.sampling_mode == SamplingMode::Hide { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!( + "user name added to variable {:?} which is still hidden", + global_var + ), + )); + } + + if var_data.user_var.is_some() { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("change of user name for in use varible {:?}", global_var), + )); + } + + var_data.user_var = Some(user_var); + + variables.used_user_vars.insert(user_var); + + let sampling_mode = if var_data.sampling_mode == SamplingMode::Witness { + CheckedSamplingMode::Witness + } else { + CheckedSamplingMode::Sample + }; + + process_step( + ctx_in.borrow(), + &CheckedProofStep::UserVar { + var: global_var, + user_var: Some(CheckedUserVar { + user_var, + sampling_mode, + new_var: true, + }), + }, + )?; + + Ok(()) +} + +/// Remove a user/global var mapping. +pub fn remove_user_mapping<'a>( + mut ctx: partial!(Context<'a>, mut ClausesP, mut ProcessingP<'a>, mut VariablesP, CheckerStateP), + global_var: Var, +) -> Result<(), CheckerError> { + ensure_var(ctx.borrow(), global_var); + + let variables = ctx.part_mut(VariablesP); + + let var_data = &variables.var_data[global_var.index()]; + + // We process this step before removing the mapping, so the processor can still look up the + // mapping. + + if var_data.user_var.is_some() { + process_step( + ctx.borrow(), + &CheckedProofStep::UserVar { + var: global_var, + user_var: None, + }, + )?; + } else { + return Err(CheckerError::check_failed( + ctx.part(CheckerStateP).step, + format!("no user name to remove for variable {:?}", global_var), + )); + } + + let variables = ctx.part_mut(VariablesP); + + let var_data = &mut variables.var_data[global_var.index()]; + if let Some(user_var) = var_data.user_var { + variables.used_user_vars.remove(&user_var); + var_data.user_var = None; + var_data.sampling_mode = SamplingMode::Hide; + } + + Ok(()) +} -- cgit v1.2.3