summaryrefslogtreecommitdiffstats
path: root/vendor/varisat
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-18 02:49:42 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-18 02:49:42 +0000
commit837b550238aa671a591ccf282dddeab29cadb206 (patch)
tree914b6b8862bace72bd3245ca184d374b08d8a672 /vendor/varisat
parentAdding debian version 1.70.0+dfsg2-1. (diff)
downloadrustc-837b550238aa671a591ccf282dddeab29cadb206.tar.xz
rustc-837b550238aa671a591ccf282dddeab29cadb206.zip
Merging upstream version 1.71.1+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/varisat')
-rw-r--r--vendor/varisat/.cargo-checksum.json1
-rw-r--r--vendor/varisat/Cargo.toml85
-rw-r--r--vendor/varisat/LICENSE-APACHE201
-rw-r--r--vendor/varisat/LICENSE-MIT25
-rw-r--r--vendor/varisat/README.md51
-rw-r--r--vendor/varisat/build.rs48
-rw-r--r--vendor/varisat/proptest-regressions/solver.txt7
-rw-r--r--vendor/varisat/src/analyze_conflict.rs368
-rw-r--r--vendor/varisat/src/assumptions.rs376
-rw-r--r--vendor/varisat/src/binary.rs100
-rw-r--r--vendor/varisat/src/cdcl.rs264
-rw-r--r--vendor/varisat/src/clause.rs67
-rw-r--r--vendor/varisat/src/clause/activity.rs107
-rw-r--r--vendor/varisat/src/clause/alloc.rs262
-rw-r--r--vendor/varisat/src/clause/assess.rs69
-rw-r--r--vendor/varisat/src/clause/db.rs269
-rw-r--r--vendor/varisat/src/clause/gc.rs207
-rw-r--r--vendor/varisat/src/clause/header.rs148
-rw-r--r--vendor/varisat/src/clause/reduce.rs132
-rw-r--r--vendor/varisat/src/config.rs31
-rw-r--r--vendor/varisat/src/context.rs131
-rw-r--r--vendor/varisat/src/decision.rs58
-rw-r--r--vendor/varisat/src/decision/vsids.rs346
-rw-r--r--vendor/varisat/src/glue.rs36
-rw-r--r--vendor/varisat/src/lib.rs46
-rw-r--r--vendor/varisat/src/load.rs259
-rw-r--r--vendor/varisat/src/model.rs87
-rw-r--r--vendor/varisat/src/proof.rs434
-rw-r--r--vendor/varisat/src/proof/drat.rs85
-rw-r--r--vendor/varisat/src/proof/map_step.rs128
-rw-r--r--vendor/varisat/src/prop.rs261
-rw-r--r--vendor/varisat/src/prop/assignment.rs237
-rw-r--r--vendor/varisat/src/prop/binary.rs36
-rw-r--r--vendor/varisat/src/prop/graph.rs137
-rw-r--r--vendor/varisat/src/prop/long.rs164
-rw-r--r--vendor/varisat/src/prop/watch.rs135
-rw-r--r--vendor/varisat/src/schedule.rs101
-rw-r--r--vendor/varisat/src/schedule/luby.rs52
-rw-r--r--vendor/varisat/src/solver.rs512
-rw-r--r--vendor/varisat/src/state.rs42
-rw-r--r--vendor/varisat/src/tmp.rs35
-rw-r--r--vendor/varisat/src/unit_simplify.rs198
-rw-r--r--vendor/varisat/src/variables.rs669
-rw-r--r--vendor/varisat/src/variables/data.rs70
-rw-r--r--vendor/varisat/src/variables/var_map.rs124
-rw-r--r--vendor/varisat/tests/checker.rs193
-rw-r--r--vendor/varisat/tests/cnfs.rs27
-rw-r--r--vendor/varisat/tests/cnfs/sgen1_sat_90_0.cnf217
-rw-r--r--vendor/varisat/tests/cnfs/sgen1_unsat_57_0.cnf125
49 files changed, 7763 insertions, 0 deletions
diff --git a/vendor/varisat/.cargo-checksum.json b/vendor/varisat/.cargo-checksum.json
new file mode 100644
index 000000000..5a9da2546
--- /dev/null
+++ b/vendor/varisat/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"21ce29b910fa74cb7bc2e46fa12466dbc22afca75a4398df05121f09ab07a220","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"2259cbb177661977dd5510c05a8670da3cb28aa7f4ff74c193798db496fe92d8","README.md":"ad1aca8172568a522d6e476f3245a44b8fed8097a2c4b353bc01989c8c8a4419","build.rs":"f02c2967a2265e95e0305d66adc164082634962db16d53f91f65baa81f06e0ae","proptest-regressions/solver.txt":"62f46e379e51cf2b2b207e2605a2462bdc92a7970df823ec4fd18683135a52c7","src/analyze_conflict.rs":"aad17da2ce3cc7902cff7b3a2fe419231e4ea4536f58213a644cfa25efcd433c","src/assumptions.rs":"0e0c0374c11b64d03daaca9c955b668890cc7a8911554e3772d71edb2c7a96cc","src/binary.rs":"5d9ad1e92799338d770389dfd0b48118f4ecfc559bd13e98c19c22d0b662e243","src/cdcl.rs":"0d9eb122cfab68eb02f3498a8a7a31c3c6444d9c87beb868f0b463cd885f0cef","src/clause.rs":"9b04487436875255143df5dc1cd07229c7a6578a0d52951e9a0cce2162ce6448","src/clause/activity.rs":"d4377517ceffc65eede5d7323d1fef9a9adaadad1d406f978f61de1d8941c5d3","src/clause/alloc.rs":"f0f30f17c3b8703e4147eae374b7fe0640276002718f44b73084c59cd029f8b3","src/clause/assess.rs":"239b9be0dd566bb7295fd2015ffa9e4bc24b542f106df642835305f8dac221a0","src/clause/db.rs":"58d50caa94db1015f83674bf2426438d87f799231cc85a77749ee3420a5730f3","src/clause/gc.rs":"981b25ec3eca4b84c51df6b912c487c9780055b91f8858ec3ab23b900dab5f1c","src/clause/header.rs":"a1d16bfac19ba7b3f2326a2296b4200ca5f1d2769e6d716d623ae595d41bb3e0","src/clause/reduce.rs":"7bd26e64afde13a77113cc919c56a049cf9b9dd5df137219f441a354f3b1326f","src/config.rs":"9c706c4e7e11a04a098409c9c70dd2bb2109f4649da4a5d54a0a628fd8a7638d","src/context.rs":"5533c6a2a14d8fe6d5b1ec37ae474d40dac8456697a0bb8e14eb5bd7ed0a1e19","src/decision.rs":"5f1e17b514c43ede94978c5dc4d74a9a1a20fb127f0c621439ffd78e6c90edef","src/decision/vsids.rs":"01a6c73f830162ccff3b8e741b3a75a761b21398319edc531dfba5edebaa62f8","src/glue.rs":"c20a45d317db29f7e192ee0b263d72ac4234d0a57faec835c0081024f9c29e71","src/lib.rs":"c840e41c370f06ef96d3bd8f6c3bcdad85a52b81c37b32bcf4f1d5440e236721","src/load.rs":"a78e4a9bc0806407ef1d47ab1089cb005749d743e7dbea7950983e710eb89ffe","src/model.rs":"5c2ca7da7afe414fdc7c51d759ee5a9d2178d30a3e667489d778b72b23da81a8","src/proof.rs":"06a5fcbe4c799a5882adeb250f97bcdfad832d6d7a041503cd89865727ad0513","src/proof/drat.rs":"c27f66a4bb27046bb152adbcd08cd555411623f524876763d8265dce5d8404c3","src/proof/map_step.rs":"9167b80d217019163ee52539c179b36b6f79999f5c2715e7459bed7116d0986b","src/prop.rs":"5fe93c2b5083bbb916e52ced937aca1c0f84cac5c039568fa4bdcf6f120cb34a","src/prop/assignment.rs":"6a208beb26d4e37a61e85bfec708b6cac0f48d01096a726f83e105a73deb941b","src/prop/binary.rs":"48b57ca120dcbcf68a5a35f9b45c665d8eee9ffe7070fad63e820a56aff1ed1b","src/prop/graph.rs":"8cc58f977a6d9cc878afbe5cb8d3bd6cf63555d8b015da989b995913832dd6c0","src/prop/long.rs":"8f6761010c8e5bf85297236fdd2af7bd6b229c1500542b475a257d8f2d7bfc57","src/prop/watch.rs":"cbe9ec16f2e983c0a9877fe63afd407b6109f0a482391ce0fca1e0e14176c441","src/schedule.rs":"bff34535fc65c1639da1699f59985c1728e81f2f7608d42328345b9f815ee6d2","src/schedule/luby.rs":"a16b68688dcb8fb3e98f05bb9383c0d9c1bad391231b7ff7e056c12d2ad4bdaa","src/solver.rs":"4b975f617e21b4fbf5aea47757c3f01ed38fc05d2fc8de61973da60a8bbcefd4","src/state.rs":"012dc415965f5c7a220aca707d8936bd1e2ad84e481e3a11dce34cf403cf90e8","src/tmp.rs":"715381a3cd8119d781582d972afb42463a252a76cf9f927807a9568f893b7527","src/unit_simplify.rs":"28606a72d7d8d0667b027f135b3ee62e61796e61971ccd4af5fd40c1b1697cb2","src/variables.rs":"d0e0d6f34212ddd8f1847ce4c4f6070294a5934e1143555f35a67e86b848679b","src/variables/data.rs":"0a4057da30251e476e40e37417d768755ca00add3e3f80992fccfec7da7f347d","src/variables/var_map.rs":"689de26a54a7c1bdb41a576faa398bcd6f83603306716ec88a000e0bb9b25783","tests/checker.rs":"102be4f4ad9fdb8e0177e266e87fad61ddbf6157c4ffe23ac0eaae89fea898dc","tests/cnfs.rs":"a32c9492cc115cf201704379c7966b1b710499cc01ec2d29a3699c1878a9ac79","tests/cnfs/sgen1_sat_90_0.cnf":"3aef3970087c0370c01ec159a2e462887cdb7519024b772a1dc9f54168ba4607","tests/cnfs/sgen1_unsat_57_0.cnf":"888c236d8a5bda36fc3df4a61bb841a0da6ae8a735eaf20fedff2479342687de"},"package":"ebe609851d1e9196674ac295f656bd8601200a1077343d22b345013497807caf"} \ No newline at end of file
diff --git a/vendor/varisat/Cargo.toml b/vendor/varisat/Cargo.toml
new file mode 100644
index 000000000..a8030e6d5
--- /dev/null
+++ b/vendor/varisat/Cargo.toml
@@ -0,0 +1,85 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+edition = "2018"
+name = "varisat"
+version = "0.2.2"
+authors = ["Jannis Harder <me@jix.one>"]
+build = "build.rs"
+description = "A CDCL based SAT solver (library)"
+homepage = "https://jix.one/project/varisat/"
+readme = "README.md"
+license = "MIT/Apache-2.0"
+repository = "https://github.com/jix/varisat"
+[dependencies.anyhow]
+version = "1.0.32"
+
+[dependencies.itoa]
+version = "0.4.4"
+
+[dependencies.leb128]
+version = "0.2.4"
+
+[dependencies.log]
+version = "0.4.6"
+
+[dependencies.ordered-float]
+version = "2.0.0"
+
+[dependencies.partial_ref]
+version = "0.3.1"
+
+[dependencies.rustc-hash]
+version = "1.1.0"
+
+[dependencies.serde]
+version = "1.0.91"
+features = ["derive"]
+
+[dependencies.thiserror]
+version = "1.0.20"
+
+[dependencies.varisat-checker]
+version = "=0.2.2"
+
+[dependencies.varisat-dimacs]
+version = "=0.2.2"
+
+[dependencies.varisat-formula]
+version = "=0.2.2"
+
+[dependencies.varisat-internal-macros]
+version = "=0.2.2"
+
+[dependencies.varisat-internal-proof]
+version = "=0.2.2"
+
+[dependencies.vec_mut_scan]
+version = "0.3.0"
+[dev-dependencies.env_logger]
+version = "0.7.1"
+
+[dev-dependencies.proptest]
+version = "0.10.1"
+
+[dev-dependencies.rand]
+version = "0.7.3"
+
+[dev-dependencies.tempfile]
+version = "3.0.8"
+
+[dev-dependencies.varisat-formula]
+version = "=0.2.2"
+features = ["proptest-strategies", "internal-testing"]
+[build-dependencies.anyhow]
+version = "1.0.32"
diff --git a/vendor/varisat/LICENSE-APACHE b/vendor/varisat/LICENSE-APACHE
new file mode 100644
index 000000000..16fe87b06
--- /dev/null
+++ b/vendor/varisat/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/vendor/varisat/LICENSE-MIT b/vendor/varisat/LICENSE-MIT
new file mode 100644
index 000000000..4fd1cb2ff
--- /dev/null
+++ b/vendor/varisat/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2017-2019 Jannis Harder
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/vendor/varisat/README.md b/vendor/varisat/README.md
new file mode 100644
index 000000000..ae9783ca0
--- /dev/null
+++ b/vendor/varisat/README.md
@@ -0,0 +1,51 @@
+# Varisat
+
+Varisat is a [CDCL][cdcl] based SAT solver written in rust. Given a boolean
+formula in [conjunctive normal form][cnf], it either finds a variable
+assignment that makes the formula true or finds a proof that this is
+impossible.
+
+[cdcl]: https://en.wikipedia.org/wiki/Conflict-Driven_Clause_Learning
+[cnf]: https://en.wikipedia.org/wiki/Conjunctive_normal_form
+
+This is the library version. Varisat is also available as a command line solver
+([`varisat-cli` on crates.io][crate-varisat-cli]).
+
+## Documentation
+
+ * [User Manual](https://jix.github.io/varisat/manual/0.2.1/)
+ * [Library API Documentation](https://docs.rs/varisat/0.2.2/varisat/)
+
+## Developer Documentation
+
+The internal APIs are documented using rustdoc. It can be generated using
+`cargo doc --document-private-items --all --exclude varisat-cli` or [viewed
+online (master)][dev-docs].
+
+You can also read [a series of blog posts about the development of
+varisat][blog-series].
+
+## License
+
+The Varisat source code is licensed under either of
+
+ * Apache License, Version 2.0
+ ([LICENSE-APACHE](LICENSE-APACHE) or
+ http://www.apache.org/licenses/LICENSE-2.0)
+ * MIT license
+ ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
+
+at your option.
+
+### Contribution
+
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in Varisat by you, as defined in the Apache-2.0 license, shall be
+dual licensed as above, without any additional terms or conditions.
+
+[cdcl]: https://en.wikipedia.org/wiki/Conflict-Driven_Clause_Learning
+[cnf]: https://en.wikipedia.org/wiki/Conjunctive_normal_form
+[dev-docs]: https://jix.github.io/varisat/dev/varisat/
+[blog-series]: https://jix.one/tags/refactoring-varisat/
+[crate-varisat]: https://crates.io/crates/varisat
+[crate-varisat-cli]: https://crates.io/crates/varisat-cli
diff --git a/vendor/varisat/build.rs b/vendor/varisat/build.rs
new file mode 100644
index 000000000..f709e9045
--- /dev/null
+++ b/vendor/varisat/build.rs
@@ -0,0 +1,48 @@
+use anyhow::{ensure, Error};
+use std::{env, process::Command, str::from_utf8};
+
+fn have_drat_trim() -> Result<(), Error> {
+ println!("rerun-if-env-changed=VARISAT_HAVE_DRAT_TRIM");
+ if env::var("VARISAT_HAVE_DRAT_TRIM").is_ok() {
+ return Ok(());
+ }
+
+ let output = Command::new("drat-trim").output()?;
+ let stdout = from_utf8(&output.stdout)?;
+
+ ensure!(
+ stdout.contains("force binary proof parse mode"),
+ "no force binary proof option found"
+ );
+
+ Ok(())
+}
+
+fn have_rate() -> Result<(), Error> {
+ println!("rerun-if-env-changed=VARISAT_HAVE_RATE");
+ if env::var("VARISAT_HAVE_RATE").is_ok() {
+ return Ok(());
+ }
+
+ let _ = Command::new("rate").arg("--version").output()?;
+
+ Ok(())
+}
+
+fn main() {
+ match have_drat_trim() {
+ Ok(_) => println!("cargo:rustc-cfg=test_drat_trim"),
+ Err(err) => println!(
+ "cargo:warning=drat-trim proof checker not found, some tests will be disabled: {}",
+ err
+ ),
+ }
+
+ match have_rate() {
+ Ok(_) => println!("cargo:rustc-cfg=test_rate"),
+ Err(err) => println!(
+ "cargo:warning=rate proof checker not found, some tests will be disabled: {}",
+ err
+ ),
+ }
+}
diff --git a/vendor/varisat/proptest-regressions/solver.txt b/vendor/varisat/proptest-regressions/solver.txt
new file mode 100644
index 000000000..b86cf27f4
--- /dev/null
+++ b/vendor/varisat/proptest-regressions/solver.txt
@@ -0,0 +1,7 @@
+# Seeds for failure cases proptest has generated in the past. It is
+# automatically read and these particular cases re-run before any
+# novel cases are generated.
+#
+# It is recommended to check this file in to source control so that
+# everyone who runs the test benefits from these saved cases.
+cc 26df7e6a68ee2be7d6d6f0229b3e7f96f86cb02069e508a3b62209ecde1eb5b7 # shrinks to (enable_row, columns, formula) = ([1, 2, 3, 4, 5, 6, 7, 8], 4, 40[[17, 18], [25, 32], [27, 32], [-17, -9, -25, -1, -33], [32, 31], [34, 33], [14, 10], [40, 33], [33, 38], [11, 14], [21, 24], [37, 36], [30, 29], [31, 26], [24, 22], [39, 36], [34, 35], [13, 12], [-16, 10], [32, 30], [-37, -29, -13, -21, -5], [9, 14], [26, 30], [-12, -4, -36, -28, -20], [13, 10], [37, 34], [9, 10], [37, 39], [18, 21], [9, 13], [27, 31], [27, 26], [37, 33], [11, -16], [20, 22], [17, 19], [23, 17], [28, 25], [22, 17], [17, 20], [15, -16], [35, 36], [15, 12], [27, 25], [11, 10], [39, 33], [17, 21], [18, 24], [9, -16], [28, 31], [15, 11], [31, 30], [26, 32], [40, 37], [21, 23], [18, 22], [20, 19], [34, 36], [31, 25], [12, 10], [12, 14], [26, 25], [28, 26], [23, 20], [21, 19], [25, 30], [36, 40], [13, -16], [21, 20], [34, 39], [12, 11], [29, 25], [18, 23], [23, 24], [40, 38], [-16, 12], [33, 35], [19, 18], [15, 13], [14, -16], [33, 36], [40, 34], [20, 24], [11, 13], [13, 14], [35, 40], [40, 39], [-14, -30, -6, -22, -38], [27, 28], [29, 26], [39, 38], [34, 38], [29, 31], [19, 22], [11, 9], [-24, -8, -40, -32, 16], [22, 23], [38, 35], [14, 15], [17, 24], [-18, -26, -34, -10, -2], [19, 23], [9, 12], [-15, -7, -23, -39, -31], [37, 35], [9, 15], [10, 15], [-35, -11, -19, -3, -27], [29, 28], [28, 30], [24, 19], [29, 27], [18, 20], [27, 30], [39, 35], [32, 29], [21, 22], [28, 32], [36, 38], [37, 38]])
diff --git a/vendor/varisat/src/analyze_conflict.rs b/vendor/varisat/src/analyze_conflict.rs
new file mode 100644
index 000000000..16f918782
--- /dev/null
+++ b/vendor/varisat/src/analyze_conflict.rs
@@ -0,0 +1,368 @@
+//! Learns a new clause by analyzing a conflict.
+use std::mem::swap;
+
+use partial_ref::{partial, split_borrow, PartialRef};
+
+use vec_mut_scan::VecMutScan;
+
+use varisat_formula::{lit::LitIdx, Lit, Var};
+use varisat_internal_proof::{clause_hash, lit_hash, ClauseHash};
+
+use crate::{
+ clause::ClauseRef,
+ context::{parts::*, Context},
+ prop::{Conflict, Reason},
+};
+
+/// Temporaries for conflict analysis
+#[derive(Default)]
+pub struct AnalyzeConflict {
+ /// This is the learned clause after analysis finishes.
+ clause: Vec<Lit>,
+ /// Number of literals in the current clause at the current level.
+ current_level_count: usize,
+ /// Variables in the current clause.
+ var_flags: Vec<bool>,
+ /// Entries to clean in `var_flags`.
+ to_clean: Vec<Var>,
+ /// Clauses to bump.
+ involved: Vec<ClauseRef>,
+ /// Hashes of all involved clauses needed to proof the minimized clause.
+ clause_hashes: Vec<ClauseHash>,
+ /// Clause hashes paired with the trail depth of the propagated lit.
+ unordered_clause_hashes: Vec<(LitIdx, ClauseHash)>,
+ /// Stack for recursive minimization.
+ stack: Vec<Lit>,
+}
+
+impl AnalyzeConflict {
+ /// Update structures for a new variable count.
+ pub fn set_var_count(&mut self, count: usize) {
+ self.var_flags.resize(count, false);
+ }
+
+ /// The learned clause.
+ pub fn clause(&self) -> &[Lit] {
+ &self.clause
+ }
+
+ /// Long clauses involved in the conflict.
+ pub fn involved(&self) -> &[ClauseRef] {
+ &self.involved
+ }
+
+ /// Hashes of clauses involved in the proof of the learned clause.
+ ///
+ /// Hashes are in clause propagation order.
+ pub fn clause_hashes(&self) -> &[ClauseHash] {
+ &self.clause_hashes
+ }
+}
+
+/// Learns a new clause by analyzing a conflict.
+///
+/// Returns the lowest decision level that makes the learned clause asserting.
+pub fn analyze_conflict<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut AnalyzeConflictP,
+ mut VsidsP,
+ ClauseAllocP,
+ ImplGraphP,
+ ProofP<'a>,
+ TrailP,
+ ),
+ conflict: Conflict,
+) -> usize {
+ split_borrow!(lit_ctx = &(ClauseAllocP) ctx);
+
+ {
+ let analyze = ctx.part_mut(AnalyzeConflictP);
+
+ analyze.clause.clear();
+ analyze.involved.clear();
+ analyze.clause_hashes.clear();
+ analyze.unordered_clause_hashes.clear();
+ analyze.current_level_count = 0;
+ }
+
+ // We start with all the literals of the conflicted clause
+ let conflict_lits = conflict.lits(&lit_ctx);
+
+ if ctx.part(ProofP).clause_hashes_required() {
+ ctx.part_mut(AnalyzeConflictP)
+ .clause_hashes
+ .push(clause_hash(conflict_lits));
+ }
+
+ if ctx.part(TrailP).current_level() == 0 {
+ // Conflict with no decisions, generate empty clause
+ return 0;
+ }
+
+ for &lit in conflict_lits {
+ add_literal(ctx.borrow(), lit);
+ }
+
+ if let Conflict::Long(cref) = conflict {
+ ctx.part_mut(AnalyzeConflictP).involved.push(cref);
+ }
+
+ // To get rid of all but one literal of the current level, we resolve the clause with the reason
+ // for those literals. The correct order for this is reverse chronological.
+
+ split_borrow!(ctx_trail = &(TrailP) ctx);
+ // split_borrow!(ctx_analyze = &(mut AnalyzeConflictP) ctx);
+
+ for &lit in ctx_trail.part(TrailP).trail().iter().rev() {
+ let analyze = ctx.part_mut(AnalyzeConflictP);
+ let lit_present = &mut analyze.var_flags[lit.index()];
+ // Is the lit present in the current clause?
+ if *lit_present {
+ *lit_present = false;
+ analyze.current_level_count -= 1;
+ if analyze.current_level_count == 0 {
+ // lit is the last literal of the current level present in the current clause,
+ // therefore the resulting clause will assert !lit so we put in position 0
+ analyze.clause.push(!lit);
+ let end = analyze.clause.len() - 1;
+ analyze.clause.swap(0, end);
+
+ break;
+ } else {
+ // We removed the literal and now add its reason.
+ let (graph, mut ctx) = ctx.split_part(ImplGraphP);
+
+ let reason = graph.reason(lit.var());
+
+ let lits = reason.lits(&lit_ctx);
+
+ if ctx.part(ProofP).clause_hashes_required() && !reason.is_unit() {
+ let hash = clause_hash(lits) ^ lit_hash(lit);
+ ctx.part_mut(AnalyzeConflictP).clause_hashes.push(hash);
+ }
+
+ for &lit in lits {
+ add_literal(ctx.borrow(), lit);
+ }
+
+ if let Reason::Long(cref) = reason {
+ ctx.part_mut(AnalyzeConflictP).involved.push(*cref);
+ }
+ }
+ }
+ }
+
+ // This needs var_flags set and keeps some var_fags set.
+ minimize_clause(ctx.borrow());
+
+ let (analyze, mut ctx) = ctx.split_part_mut(AnalyzeConflictP);
+
+ if ctx.part(ProofP).clause_hashes_required() {
+ // Clause minimization cannot give us clause hashes in propagation order, so we need to sort
+ // them. Clauses used during minimization propagte before clauses used during initial
+ // analysis. The clauses during initial analysis are discovered in reverse propagation
+ // order. This means we can sort the minimization clauses in reverse order, append them to
+ // the initial clauses and then reverse the order of all clauses.
+ analyze
+ .unordered_clause_hashes
+ .sort_unstable_by_key(|&(depth, _)| !depth);
+
+ analyze
+ .unordered_clause_hashes
+ .dedup_by_key(|&mut (depth, _)| depth);
+
+ analyze.clause_hashes.extend(
+ analyze
+ .unordered_clause_hashes
+ .iter()
+ .map(|&(_, hash)| hash),
+ );
+ analyze.clause_hashes.reverse();
+ }
+
+ for var in analyze.to_clean.drain(..) {
+ analyze.var_flags[var.index()] = false;
+ }
+
+ // We find the highest level literal besides the asserted literal and move it into position 1.
+ // This is important to ensure the watchlist constraints are not violated on backtracking.
+ let mut backtrack_to = 0;
+
+ if analyze.clause.len() > 1 {
+ let (prefix, rest) = analyze.clause.split_at_mut(2);
+ let lit_1 = &mut prefix[1];
+ backtrack_to = ctx.part(ImplGraphP).level(lit_1.var());
+ for lit in rest.iter_mut() {
+ let lit_level = ctx.part(ImplGraphP).level(lit.var());
+ if lit_level > backtrack_to {
+ backtrack_to = lit_level;
+ swap(lit_1, lit);
+ }
+ }
+ }
+
+ ctx.part_mut(VsidsP).decay();
+
+ backtrack_to
+}
+
+/// Add a literal to the current clause.
+fn add_literal(
+ mut ctx: partial!(
+ Context,
+ mut AnalyzeConflictP,
+ mut VsidsP,
+ ImplGraphP,
+ TrailP
+ ),
+ lit: Lit,
+) {
+ let (analyze, mut ctx) = ctx.split_part_mut(AnalyzeConflictP);
+ let lit_level = ctx.part(ImplGraphP).level(lit.var());
+ // No need to add literals that are set by unit clauses or already present
+ if lit_level > 0 && !analyze.var_flags[lit.index()] {
+ ctx.part_mut(VsidsP).bump(lit.var());
+
+ analyze.var_flags[lit.index()] = true;
+ if lit_level == ctx.part(TrailP).current_level() {
+ analyze.current_level_count += 1;
+ } else {
+ analyze.clause.push(lit);
+ analyze.to_clean.push(lit.var());
+ }
+ }
+}
+
+/// A Bloom filter of levels.
+#[derive(Default)]
+struct LevelAbstraction {
+ bits: u64,
+}
+
+impl LevelAbstraction {
+ /// Add a level to the Bloom filter.
+ pub fn add(&mut self, level: usize) {
+ self.bits |= 1 << (level % 64)
+ }
+
+ /// Test whether a level could be in the Bloom filter.
+ pub fn test(&self, level: usize) -> bool {
+ self.bits & (1 << (level % 64)) != 0
+ }
+}
+
+/// Performs recursive clause minimization.
+///
+/// **Note:** Requires AnalyzeConflict's var_flags to be set for exactly the variables of the
+/// unminimized claused. This also sets some more var_flags, but lists them in to_clean.
+///
+/// This routine tries to remove some redundant literals of the learned clause. The idea is to
+/// detect literals of the learned clause that are already implied by other literals of the clause.
+///
+/// This is done by performing a DFS in the implication graph (following edges in reverse) for each
+/// literal (apart from the asserting one). The search doesn't expand literals already known to be
+/// implied by literals of the clause. When a decision literal that is not in the clause is found,
+/// it means that the literal is not redundant.
+///
+/// There are two optimizations used here: The first one is to stop the search as soon as a literal
+/// of a decision level not present in the clause is found. If the DFS would be continued it would
+/// at some point reach the decision of that level. That decision belongs to a level not in the
+/// clause and thus itself can't be in the clause. Checking whether the decision level is among the
+/// clause's decision levels is done approximately using a Bloom filter.
+///
+/// The other optimization is to avoid duplicating work during the DFS searches. When one literal is
+/// found to be redundant that means the whole search stayed within the implied literals. We
+/// remember this and will not expand any of these literals for the following DFS searches.
+///
+/// In this implementation the var_flags array here has two purposes. At the beginning it is set for
+/// all the literals of the clause. It is also used to mark the literals visited during the DFS.
+/// This allows us to combine the already-visited-check with the literal-present-in-clause check. It
+/// also allows for a neat implementation of the second optimization. When the search finds the
+/// literal to be non-redundant, we clear var_flags for the literals we visited, resetting it to the
+/// state at the beginning of the DFS. When the literal was redundant we keep it as is. This means
+/// the following DFS will not expand these literals.
+fn minimize_clause<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut AnalyzeConflictP,
+ mut VsidsP,
+ ClauseAllocP,
+ ImplGraphP,
+ ProofP<'a>,
+ TrailP,
+ ),
+) {
+ let (analyze, mut ctx) = ctx.split_part_mut(AnalyzeConflictP);
+ split_borrow!(lit_ctx = &(ClauseAllocP) ctx);
+ let impl_graph = ctx.part(ImplGraphP);
+
+ let mut involved_levels = LevelAbstraction::default();
+
+ for &lit in analyze.clause.iter() {
+ involved_levels.add(impl_graph.level(lit.var()));
+ }
+
+ let mut scan = VecMutScan::new(&mut analyze.clause);
+
+ // we always keep the first literal
+ scan.next();
+
+ 'next_lit: while let Some(lit) = scan.next() {
+ if impl_graph.reason(lit.var()) == &Reason::Unit {
+ continue;
+ }
+
+ // Start the DFS
+ analyze.stack.clear();
+ analyze.stack.push(!*lit);
+
+ // Used to remember which var_flags are set during this DFS
+ let top = analyze.to_clean.len();
+
+ // Used to remember which clause hashes were added during the DFS, so we can remove them in
+ // case the literal is not redundant.
+ let hashes_top = analyze.unordered_clause_hashes.len();
+
+ while let Some(lit) = analyze.stack.pop() {
+ let reason = impl_graph.reason(lit.var());
+ let lits = reason.lits(&lit_ctx);
+
+ if ctx.part(ProofP).clause_hashes_required() && !reason.is_unit() {
+ let depth = impl_graph.depth(lit.var()) as LitIdx;
+ let hash = clause_hash(lits) ^ lit_hash(lit);
+ analyze.unordered_clause_hashes.push((depth, hash));
+ }
+
+ for &reason_lit in lits {
+ let reason_level = impl_graph.level(reason_lit.var());
+
+ if !analyze.var_flags[reason_lit.index()] && reason_level > 0 {
+ // We haven't established reason_lit to be redundant, haven't visited it yet and
+ // it's not implied by unit clauses.
+
+ if impl_graph.reason(reason_lit.var()) == &Reason::Unit
+ || !involved_levels.test(reason_level)
+ {
+ // reason_lit is a decision not in the clause or in a decision level known
+ // not to be in the clause. Abort the search.
+
+ // Reset the var_flags set during _this_ DFS.
+ for lit in analyze.to_clean.drain(top..) {
+ analyze.var_flags[lit.index()] = false;
+ }
+ // Remove clauses not needed to justify the minimized clause.
+ analyze.unordered_clause_hashes.truncate(hashes_top);
+ continue 'next_lit;
+ } else {
+ analyze.var_flags[reason_lit.index()] = true;
+ analyze.to_clean.push(reason_lit.var());
+ analyze.stack.push(!reason_lit);
+ }
+ }
+ }
+ }
+
+ lit.remove();
+ }
+}
diff --git a/vendor/varisat/src/assumptions.rs b/vendor/varisat/src/assumptions.rs
new file mode 100644
index 000000000..c21d8cb7b
--- /dev/null
+++ b/vendor/varisat/src/assumptions.rs
@@ -0,0 +1,376 @@
+//! Incremental solving.
+
+use partial_ref::{partial, PartialRef};
+
+use varisat_formula::Lit;
+use varisat_internal_proof::{clause_hash, lit_hash, ClauseHash, ProofStep};
+
+use crate::{
+ context::{parts::*, Context},
+ proof,
+ prop::{enqueue_assignment, full_restart, Reason},
+ state::SatState,
+ variables,
+};
+
+/// Incremental solving.
+#[derive(Default)]
+pub struct Assumptions {
+ assumptions: Vec<Lit>,
+ failed_core: Vec<Lit>,
+ user_failed_core: Vec<Lit>,
+ assumption_levels: usize,
+ failed_propagation_hashes: Vec<ClauseHash>,
+}
+
+impl Assumptions {
+ /// Current number of decision levels used for assumptions.
+ pub fn assumption_levels(&self) -> usize {
+ self.assumption_levels
+ }
+
+ /// Resets assumption_levels to zero on a full restart.
+ pub fn full_restart(&mut self) {
+ self.assumption_levels = 0;
+ }
+
+ /// Subset of assumptions that made the formula unsatisfiable.
+ pub fn failed_core(&self) -> &[Lit] {
+ &self.failed_core
+ }
+
+ /// Subset of assumptions that made the formula unsatisfiable.
+ pub fn user_failed_core(&self) -> &[Lit] {
+ &self.user_failed_core
+ }
+
+ /// Current assumptions.
+ pub fn assumptions(&self) -> &[Lit] {
+ &self.assumptions
+ }
+}
+
+/// Return type of [`enqueue_assumption`].
+pub enum EnqueueAssumption {
+ Done,
+ Enqueued,
+ Conflict,
+}
+
+/// Change the currently active assumptions.
+///
+/// The input uses user variable names.
+pub fn set_assumptions<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut AnalyzeConflictP,
+ mut AssignmentP,
+ mut AssumptionsP,
+ mut BinaryClausesP,
+ mut ImplGraphP,
+ mut ProofP<'a>,
+ mut SolverStateP,
+ mut TmpFlagsP,
+ mut TrailP,
+ mut VariablesP,
+ mut VsidsP,
+ mut WatchlistsP,
+ ),
+ user_assumptions: &[Lit],
+) {
+ full_restart(ctx.borrow());
+
+ let state = ctx.part_mut(SolverStateP);
+
+ state.sat_state = match state.sat_state {
+ SatState::Unsat => SatState::Unsat,
+ SatState::Sat | SatState::UnsatUnderAssumptions | SatState::Unknown => SatState::Unknown,
+ };
+
+ let (assumptions, mut ctx_2) = ctx.split_part_mut(AssumptionsP);
+
+ for lit in assumptions.assumptions.iter() {
+ ctx_2
+ .part_mut(VariablesP)
+ .var_data_solver_mut(lit.var())
+ .assumed = false;
+ }
+
+ variables::solver_from_user_lits(
+ ctx_2.borrow(),
+ &mut assumptions.assumptions,
+ user_assumptions,
+ true,
+ );
+
+ for lit in assumptions.assumptions.iter() {
+ ctx_2
+ .part_mut(VariablesP)
+ .var_data_solver_mut(lit.var())
+ .assumed = true;
+ }
+
+ proof::add_step(
+ ctx_2.borrow(),
+ true,
+ &ProofStep::Assumptions {
+ assumptions: &assumptions.assumptions,
+ },
+ );
+}
+
+/// Enqueue another assumption if possible.
+///
+/// Returns whether an assumption was enqueued, whether no assumptions are left or whether the
+/// assumptions result in a conflict.
+pub fn enqueue_assumption<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut AssignmentP,
+ mut AssumptionsP,
+ mut ImplGraphP,
+ mut ProofP<'a>,
+ mut SolverStateP,
+ mut TmpFlagsP,
+ mut TrailP,
+ ClauseAllocP,
+ VariablesP,
+ ),
+) -> EnqueueAssumption {
+ while let Some(&assumption) = ctx
+ .part(AssumptionsP)
+ .assumptions
+ .get(ctx.part(TrailP).current_level())
+ {
+ match ctx.part(AssignmentP).lit_value(assumption) {
+ Some(false) => {
+ analyze_assumption_conflict(ctx.borrow(), assumption);
+ return EnqueueAssumption::Conflict;
+ }
+ Some(true) => {
+ // The next assumption is already implied by other assumptions so we can remove it.
+ let level = ctx.part(TrailP).current_level();
+ let assumptions = ctx.part_mut(AssumptionsP);
+ assumptions.assumptions.swap_remove(level);
+ }
+ None => {
+ ctx.part_mut(TrailP).new_decision_level();
+ enqueue_assignment(ctx.borrow(), assumption, Reason::Unit);
+ let (assumptions, ctx) = ctx.split_part_mut(AssumptionsP);
+ assumptions.assumption_levels = ctx.part(TrailP).current_level();
+ return EnqueueAssumption::Enqueued;
+ }
+ }
+ }
+ EnqueueAssumption::Done
+}
+
+/// Analyze a conflicting set of assumptions.
+///
+/// Compute a set of incompatible assumptions given an assumption that is incompatible with the
+/// assumptions enqueued so far.
+fn analyze_assumption_conflict<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut AssumptionsP,
+ mut ProofP<'a>,
+ mut SolverStateP,
+ mut TmpFlagsP,
+ ClauseAllocP,
+ ImplGraphP,
+ TrailP,
+ VariablesP,
+ ),
+ assumption: Lit,
+) {
+ let (assumptions, mut ctx) = ctx.split_part_mut(AssumptionsP);
+ let (tmp, mut ctx) = ctx.split_part_mut(TmpFlagsP);
+ let (trail, mut ctx) = ctx.split_part(TrailP);
+ let (impl_graph, mut ctx) = ctx.split_part(ImplGraphP);
+
+ let flags = &mut tmp.flags;
+
+ assumptions.failed_core.clear();
+ assumptions.failed_core.push(assumption);
+
+ assumptions.failed_propagation_hashes.clear();
+
+ flags[assumption.index()] = true;
+ let mut flag_count = 1;
+
+ for &lit in trail.trail().iter().rev() {
+ if flags[lit.index()] {
+ flags[lit.index()] = false;
+ flag_count -= 1;
+
+ match impl_graph.reason(lit.var()) {
+ Reason::Unit => {
+ if impl_graph.level(lit.var()) > 0 {
+ assumptions.failed_core.push(lit);
+ }
+ }
+ reason => {
+ let (ctx_lits, ctx) = ctx.split_borrow();
+ let reason_lits = reason.lits(&ctx_lits);
+
+ if ctx.part(ProofP).clause_hashes_required() {
+ let hash = clause_hash(reason_lits) ^ lit_hash(lit);
+ assumptions.failed_propagation_hashes.push(hash);
+ }
+
+ for &reason_lit in reason_lits {
+ if !flags[reason_lit.index()] {
+ flags[reason_lit.index()] = true;
+ flag_count += 1;
+ }
+ }
+ }
+ }
+
+ if flag_count == 0 {
+ break;
+ }
+ }
+ }
+
+ assumptions.failed_propagation_hashes.reverse();
+
+ assumptions.user_failed_core.clear();
+ assumptions
+ .user_failed_core
+ .extend(assumptions.failed_core.iter().map(|solver_lit| {
+ solver_lit
+ .map_var(|solver_var| ctx.part(VariablesP).existing_user_from_solver(solver_var))
+ }));
+
+ proof::add_step(
+ ctx.borrow(),
+ true,
+ &ProofStep::FailedAssumptions {
+ failed_core: &assumptions.failed_core,
+ propagation_hashes: &assumptions.failed_propagation_hashes,
+ },
+ );
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use proptest::{bool, prelude::*};
+
+ use partial_ref::IntoPartialRefMut;
+
+ use varisat_formula::{test::conditional_pigeon_hole, ExtendFormula, Var};
+
+ use crate::{cdcl::conflict_step, load::load_clause, solver::Solver, state::SatState};
+
+ proptest! {
+ #[test]
+ fn pigeon_hole_unsat_assumption_core_internal(
+ (enable_row, columns, formula) in conditional_pigeon_hole(1..5usize, 1..5usize),
+ chain in bool::ANY,
+ ) {
+ let mut ctx = Context::default();
+ let mut ctx = ctx.into_partial_ref_mut();
+
+ for clause in formula.iter() {
+ load_clause(ctx.borrow(), clause);
+ }
+
+ if chain {
+ for (&a, &b) in enable_row.iter().zip(enable_row.iter().skip(1)) {
+ load_clause(ctx.borrow(), &[!a, b]);
+ }
+ }
+
+ while ctx.part(SolverStateP).sat_state == SatState::Unknown {
+ conflict_step(ctx.borrow());
+ }
+
+ prop_assert_eq!(ctx.part(SolverStateP).sat_state, SatState::Sat);
+
+ set_assumptions(ctx.borrow(), &enable_row);
+
+ prop_assert_eq!(ctx.part(SolverStateP).sat_state, SatState::Unknown);
+
+ while ctx.part(SolverStateP).sat_state == SatState::Unknown {
+ conflict_step(ctx.borrow());
+ }
+
+ prop_assert_eq!(ctx.part(SolverStateP).sat_state, SatState::UnsatUnderAssumptions);
+
+ let mut candidates = ctx.part(AssumptionsP).user_failed_core().to_owned();
+ let mut core: Vec<Lit> = vec![];
+
+ loop {
+ set_assumptions(ctx.borrow(), &candidates[0..candidates.len() - 1]);
+
+ while ctx.part(SolverStateP).sat_state == SatState::Unknown {
+ conflict_step(ctx.borrow());
+ }
+
+ match ctx.part(SolverStateP).sat_state {
+ SatState::Unknown => unreachable!(),
+ SatState::Unsat => break,
+ SatState::Sat => {
+ let skipped = *candidates.last().unwrap();
+ core.push(skipped);
+ load_clause(ctx.borrow(), &[skipped]);
+ },
+ SatState::UnsatUnderAssumptions => {
+ candidates = ctx.part(AssumptionsP).user_failed_core().to_owned();
+ }
+ }
+ }
+ if chain {
+ prop_assert_eq!(core.len(), 1);
+ } else {
+ prop_assert_eq!(core.len(), columns + 1);
+ }
+ }
+
+ #[test]
+ fn pigeon_hole_unsat_assumption_core_solver(
+ (enable_row, columns, formula) in conditional_pigeon_hole(1..5usize, 1..5usize),
+ ) {
+ let mut solver = Solver::new();
+ solver.add_formula(&formula);
+
+ prop_assert_eq!(solver.solve().ok(), Some(true));
+
+ let mut assumptions = enable_row;
+
+ assumptions.push(Lit::positive(Var::from_index(formula.var_count() + 10)));
+
+ solver.assume(&assumptions);
+
+ prop_assert_eq!(solver.solve().ok(), Some(false));
+
+
+ let mut candidates = solver.failed_core().unwrap().to_owned();
+ let mut core: Vec<Lit> = vec![];
+
+ while !candidates.is_empty() {
+
+ solver.assume(&candidates[0..candidates.len() - 1]);
+
+ match solver.solve() {
+ Err(_) => unreachable!(),
+ Ok(true) => {
+ let skipped = *candidates.last().unwrap();
+ core.push(skipped);
+
+ solver.add_clause(&[skipped]);
+ solver.hide_var(skipped.var());
+ },
+ Ok(false) => {
+ candidates = solver.failed_core().unwrap().to_owned();
+ }
+ }
+ }
+
+ prop_assert_eq!(core.len(), columns + 1);
+ }
+ }
+}
diff --git a/vendor/varisat/src/binary.rs b/vendor/varisat/src/binary.rs
new file mode 100644
index 000000000..f39d04676
--- /dev/null
+++ b/vendor/varisat/src/binary.rs
@@ -0,0 +1,100 @@
+//! Binary clauses.
+
+use partial_ref::{partial, PartialRef};
+
+use varisat_formula::Lit;
+use varisat_internal_proof::{DeleteClauseProof, ProofStep};
+
+use crate::{
+ context::{parts::*, Context},
+ proof,
+};
+
+/// Binary clauses.
+#[derive(Default)]
+pub struct BinaryClauses {
+ by_lit: Vec<Vec<Lit>>,
+ count: usize,
+}
+
+impl BinaryClauses {
+ /// Update structures for a new variable count.
+ pub fn set_var_count(&mut self, count: usize) {
+ self.by_lit.resize(count * 2, vec![]);
+ }
+
+ /// Add a binary clause.
+ pub fn add_binary_clause(&mut self, lits: [Lit; 2]) {
+ for i in 0..2 {
+ self.by_lit[(!lits[i]).code()].push(lits[i ^ 1]);
+ }
+ self.count += 1;
+ }
+
+ /// Implications of a given literal
+ pub fn implied(&self, lit: Lit) -> &[Lit] {
+ &self.by_lit[lit.code()]
+ }
+
+ /// Number of binary clauses.
+ pub fn count(&self) -> usize {
+ self.count
+ }
+}
+
+/// Remove binary clauses that have an assigned literal.
+pub fn simplify_binary<'a>(
+ mut ctx: partial!(Context<'a>, mut BinaryClausesP, mut ProofP<'a>, mut SolverStateP, AssignmentP, VariablesP),
+) {
+ let (binary_clauses, mut ctx) = ctx.split_part_mut(BinaryClausesP);
+ let (assignment, mut ctx) = ctx.split_part(AssignmentP);
+
+ let mut double_count = 0;
+
+ for (code, implied) in binary_clauses.by_lit.iter_mut().enumerate() {
+ let lit = Lit::from_code(code);
+
+ if !assignment.lit_is_unk(lit) {
+ if ctx.part(ProofP).is_active() {
+ for &other_lit in implied.iter() {
+ // This check avoids deleting binary clauses twice if both literals are assigned.
+ if (!lit) < other_lit {
+ let lits = [!lit, other_lit];
+ proof::add_step(
+ ctx.borrow(),
+ true,
+ &ProofStep::DeleteClause {
+ clause: &lits[..],
+ proof: DeleteClauseProof::Satisfied,
+ },
+ );
+ }
+ }
+ }
+
+ implied.clear();
+ } else {
+ implied.retain(|&other_lit| {
+ let retain = assignment.lit_is_unk(other_lit);
+ // This check avoids deleting binary clauses twice if both literals are assigned.
+ if ctx.part(ProofP).is_active() && !retain && (!lit) < other_lit {
+ let lits = [!lit, other_lit];
+ proof::add_step(
+ ctx.borrow(),
+ true,
+ &ProofStep::DeleteClause {
+ clause: &lits[..],
+ proof: DeleteClauseProof::Satisfied,
+ },
+ );
+ }
+
+ retain
+ });
+
+ double_count += implied.len();
+ }
+ }
+
+ binary_clauses.count = double_count / 2;
+}
diff --git a/vendor/varisat/src/cdcl.rs b/vendor/varisat/src/cdcl.rs
new file mode 100644
index 000000000..88ee76d6a
--- /dev/null
+++ b/vendor/varisat/src/cdcl.rs
@@ -0,0 +1,264 @@
+//! Conflict driven clause learning.
+
+use partial_ref::{partial, PartialRef};
+
+use varisat_internal_proof::ProofStep;
+
+use crate::{
+ analyze_conflict::analyze_conflict,
+ assumptions::{enqueue_assumption, EnqueueAssumption},
+ clause::{assess_learned_clause, bump_clause, db, decay_clause_activities},
+ context::{parts::*, Context},
+ decision::make_decision,
+ model::reconstruct_global_model,
+ proof,
+ prop::{backtrack, enqueue_assignment, propagate, Conflict, Reason},
+ state::SatState,
+ unit_simplify::{prove_units, unit_simplify},
+};
+
+/// Find a conflict, learn a clause and backtrack.
+pub fn conflict_step<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut AnalyzeConflictP,
+ mut AssignmentP,
+ mut AssumptionsP,
+ mut BinaryClausesP,
+ mut ClauseActivityP,
+ mut ClauseAllocP,
+ mut ClauseDbP,
+ mut ImplGraphP,
+ mut ModelP,
+ mut ProofP<'a>,
+ mut SolverStateP,
+ mut TmpDataP,
+ mut TmpFlagsP,
+ mut TrailP,
+ mut VariablesP,
+ mut VsidsP,
+ mut WatchlistsP,
+ ),
+) {
+ let conflict = find_conflict(ctx.borrow());
+
+ let conflict = match conflict {
+ Ok(()) => {
+ reconstruct_global_model(ctx.borrow());
+ return;
+ }
+ Err(FoundConflict::Assumption) => {
+ ctx.part_mut(SolverStateP).sat_state = SatState::UnsatUnderAssumptions;
+ return;
+ }
+ Err(FoundConflict::Conflict(conflict)) => conflict,
+ };
+
+ let backtrack_to = analyze_conflict(ctx.borrow(), conflict);
+
+ let (analyze, mut ctx) = ctx.split_part(AnalyzeConflictP);
+
+ for &cref in analyze.involved() {
+ bump_clause(ctx.borrow(), cref);
+ }
+
+ decay_clause_activities(ctx.borrow());
+
+ backtrack(ctx.borrow(), backtrack_to);
+
+ let clause = analyze.clause();
+
+ proof::add_step(
+ ctx.borrow(),
+ true,
+ &ProofStep::AtClause {
+ redundant: clause.len() > 2,
+ clause,
+ propagation_hashes: analyze.clause_hashes(),
+ },
+ );
+
+ let reason = match clause.len() {
+ 0 => {
+ ctx.part_mut(SolverStateP).sat_state = SatState::Unsat;
+ return;
+ }
+ 1 => Reason::Unit,
+ 2 => {
+ ctx.part_mut(BinaryClausesP)
+ .add_binary_clause([clause[0], clause[1]]);
+ Reason::Binary([clause[1]])
+ }
+ _ => {
+ let header = assess_learned_clause(ctx.borrow(), clause);
+ let cref = db::add_clause(ctx.borrow(), header, clause);
+ Reason::Long(cref)
+ }
+ };
+
+ enqueue_assignment(ctx.borrow(), clause[0], reason);
+}
+
+/// Return type of [`find_conflict`].
+///
+/// Specifies whether a conflict was found during propagation or while enqueuing assumptions.
+enum FoundConflict {
+ Conflict(Conflict),
+ Assumption,
+}
+
+impl From<Conflict> for FoundConflict {
+ fn from(conflict: Conflict) -> FoundConflict {
+ FoundConflict::Conflict(conflict)
+ }
+}
+
+/// Find a conflict.
+///
+/// Returns `Err` if a conflict was found and `Ok` if a satisfying assignment was found instead.
+fn find_conflict<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut AssignmentP,
+ mut AssumptionsP,
+ mut BinaryClausesP,
+ mut ClauseAllocP,
+ mut ClauseDbP,
+ mut ImplGraphP,
+ mut ProofP<'a>,
+ mut SolverStateP,
+ mut TmpFlagsP,
+ mut TrailP,
+ mut VariablesP,
+ mut VsidsP,
+ mut WatchlistsP,
+ ),
+) -> Result<(), FoundConflict> {
+ loop {
+ let propagation_result = propagate(ctx.borrow());
+
+ let new_unit = prove_units(ctx.borrow());
+
+ propagation_result?;
+
+ if new_unit {
+ unit_simplify(ctx.borrow());
+ }
+
+ match enqueue_assumption(ctx.borrow()) {
+ EnqueueAssumption::Enqueued => continue,
+ EnqueueAssumption::Conflict => return Err(FoundConflict::Assumption),
+ EnqueueAssumption::Done => (),
+ }
+
+ if !make_decision(ctx.borrow()) {
+ return Ok(());
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use proptest::prelude::*;
+
+ use partial_ref::IntoPartialRefMut;
+
+ use varisat_formula::{
+ cnf_formula,
+ test::{sat_formula, sgen_unsat_formula},
+ };
+
+ use crate::{load::load_clause, state::SatState};
+
+ #[test]
+ fn level_0_unsat() {
+ let mut ctx = Context::default();
+ let mut ctx = ctx.into_partial_ref_mut();
+
+ let formula = cnf_formula![
+ 1, 2, 3;
+ -1;
+ 1, -2;
+ 2, -3;
+ ];
+
+ for clause in formula.iter() {
+ load_clause(ctx.borrow(), clause);
+ }
+
+ while ctx.part(SolverStateP).sat_state == SatState::Unknown {
+ conflict_step(ctx.borrow());
+ }
+
+ assert_eq!(ctx.part(SolverStateP).sat_state, SatState::Unsat);
+ }
+
+ proptest! {
+ #[test]
+ fn sgen_unsat(formula in sgen_unsat_formula(1..7usize)) {
+ let mut ctx = Context::default();
+ let mut ctx = ctx.into_partial_ref_mut();
+
+ for clause in formula.iter() {
+ load_clause(ctx.borrow(), clause);
+ }
+
+ while ctx.part(SolverStateP).sat_state == SatState::Unknown {
+ conflict_step(ctx.borrow());
+ }
+
+ prop_assert_eq!(ctx.part(SolverStateP).sat_state, SatState::Unsat);
+ }
+
+ #[test]
+ fn sat(formula in sat_formula(4..20usize, 10..100usize, 0.05..0.2, 0.9..1.0)) {
+ let mut ctx = Context::default();
+ let mut ctx = ctx.into_partial_ref_mut();
+
+ for clause in formula.iter() {
+ load_clause(ctx.borrow(), clause);
+ }
+
+ while ctx.part(SolverStateP).sat_state == SatState::Unknown {
+ conflict_step(ctx.borrow());
+ }
+
+ prop_assert_eq!(ctx.part(SolverStateP).sat_state, SatState::Sat);
+
+ for clause in formula.iter() {
+ prop_assert!(clause.iter().any(|&lit| ctx.part(ModelP).lit_is_true(
+ lit.map_var(|user_var| ctx
+ .part(VariablesP)
+ .global_from_user()
+ .get(user_var)
+ .expect("no existing global var for user var"))
+ )));
+ }
+ }
+
+ #[test]
+ fn sgen_unsat_incremetal_clauses(formula in sgen_unsat_formula(1..7usize)) {
+ let mut ctx = Context::default();
+ let mut ctx = ctx.into_partial_ref_mut();
+
+ let mut last_state = SatState::Sat;
+
+ for clause in formula.iter() {
+ load_clause(ctx.borrow(), clause);
+ while ctx.part(SolverStateP).sat_state == SatState::Unknown {
+ conflict_step(ctx.borrow());
+ }
+
+ if ctx.part(SolverStateP).sat_state != last_state {
+ prop_assert_eq!(ctx.part(SolverStateP).sat_state, SatState::Unsat);
+ prop_assert_eq!(last_state, SatState::Sat);
+ last_state = ctx.part(SolverStateP).sat_state;
+ }
+ }
+
+ prop_assert_eq!(last_state, SatState::Unsat);
+ }
+ }
+}
diff --git a/vendor/varisat/src/clause.rs b/vendor/varisat/src/clause.rs
new file mode 100644
index 000000000..0d3591858
--- /dev/null
+++ b/vendor/varisat/src/clause.rs
@@ -0,0 +1,67 @@
+//! Clause storage.
+use std::slice;
+
+use varisat_formula::{lit::LitIdx, Lit};
+
+pub mod activity;
+pub mod alloc;
+pub mod assess;
+pub mod db;
+pub mod gc;
+pub mod header;
+pub mod reduce;
+
+pub use activity::{bump_clause_activity, decay_clause_activities, ClauseActivity};
+pub use alloc::{ClauseAlloc, ClauseRef};
+pub use assess::{assess_learned_clause, bump_clause};
+pub use db::{ClauseDb, Tier};
+pub use gc::collect_garbage;
+pub use header::ClauseHeader;
+
+use header::HEADER_LEN;
+
+/// A clause.
+///
+/// This is stoed in a [`ClauseAlloc`] and thus must have a representation compatible with slice of
+/// [`LitIdx`] values.
+///
+/// It would be nicer to use a DST struct with two members and `repr(C)`, but while that can be
+/// declared in stable rust, it's almost impossible to work with.
+#[repr(transparent)]
+pub struct Clause {
+ data: [LitIdx],
+}
+
+impl Clause {
+ /// The clause's header
+ pub fn header(&self) -> &ClauseHeader {
+ unsafe {
+ let header_ptr = self.data.as_ptr() as *const ClauseHeader;
+ &*header_ptr
+ }
+ }
+
+ /// Mutable reference to the clause's header
+ pub fn header_mut(&mut self) -> &mut ClauseHeader {
+ unsafe {
+ let header_ptr = self.data.as_mut_ptr() as *mut ClauseHeader;
+ &mut *header_ptr
+ }
+ }
+
+ /// The clause's literals
+ pub fn lits(&self) -> &[Lit] {
+ unsafe {
+ let lit_ptr = self.data.as_ptr().add(HEADER_LEN) as *const Lit;
+ slice::from_raw_parts(lit_ptr, self.data.len() - HEADER_LEN)
+ }
+ }
+
+ /// Mutable slice of the clause's literals
+ pub fn lits_mut(&mut self) -> &mut [Lit] {
+ unsafe {
+ let lit_ptr = self.data.as_mut_ptr().add(HEADER_LEN) as *mut Lit;
+ slice::from_raw_parts_mut(lit_ptr, self.data.len() - HEADER_LEN)
+ }
+ }
+}
diff --git a/vendor/varisat/src/clause/activity.rs b/vendor/varisat/src/clause/activity.rs
new file mode 100644
index 000000000..8404c7e75
--- /dev/null
+++ b/vendor/varisat/src/clause/activity.rs
@@ -0,0 +1,107 @@
+//! Clause activity.
+use partial_ref::{partial, PartialRef};
+
+use crate::{
+ config::SolverConfig,
+ context::{parts::*, Context},
+};
+
+use super::ClauseRef;
+
+/// Clause activity.
+///
+/// The individual clause activities are stored in the clause allocator. This stores global metadata
+/// used for bumping and decaying activities.
+pub struct ClauseActivity {
+ /// The value to add on bumping.
+ bump: f32,
+ /// The inverse of the decay factor.
+ inv_decay: f32,
+}
+
+impl Default for ClauseActivity {
+ fn default() -> ClauseActivity {
+ ClauseActivity {
+ bump: 1.0,
+ inv_decay: 1.0 / SolverConfig::default().clause_activity_decay,
+ }
+ }
+}
+
+impl ClauseActivity {
+ /// Change the decay factor.
+ pub fn set_decay(&mut self, decay: f32) {
+ assert!(decay < 1.0);
+ assert!(decay > 1.0 / 16.0);
+ self.inv_decay = 1.0 / decay;
+ }
+}
+
+/// Rescale activities if any value exceeds this value.
+fn rescale_limit() -> f32 {
+ std::f32::MAX / 16.0
+}
+
+/// Increase a clause's activity.
+pub fn bump_clause_activity(
+ mut ctx: partial!(
+ Context,
+ mut ClauseActivityP,
+ mut ClauseAllocP,
+ mut ClauseDbP,
+ ),
+ cref: ClauseRef,
+) {
+ let bump = ctx.part(ClauseActivityP).bump;
+ let header = ctx.part_mut(ClauseAllocP).header_mut(cref);
+
+ let activity = header.activity() + bump;
+
+ header.set_activity(activity);
+
+ if activity > rescale_limit() {
+ rescale_clause_activities(ctx.borrow());
+ }
+}
+
+/// Rescale all values to avoid an overflow.
+fn rescale_clause_activities(
+ mut ctx: partial!(
+ Context,
+ mut ClauseActivityP,
+ mut ClauseAllocP,
+ mut ClauseDbP,
+ ),
+) {
+ let rescale_factor = 1.0 / rescale_limit();
+
+ let (db, mut ctx) = ctx.split_part_mut(ClauseDbP);
+ let (alloc, mut ctx) = ctx.split_part_mut(ClauseAllocP);
+ db.clauses.retain(|&cref| {
+ let header = alloc.header_mut(cref);
+ if header.deleted() {
+ false
+ } else {
+ let activity = header.activity() * rescale_factor;
+ header.set_activity(activity);
+ true
+ }
+ });
+ ctx.part_mut(ClauseActivityP).bump *= rescale_factor;
+}
+
+/// Decay the clause activities.
+pub fn decay_clause_activities(
+ mut ctx: partial!(
+ Context,
+ mut ClauseActivityP,
+ mut ClauseAllocP,
+ mut ClauseDbP,
+ ),
+) {
+ let activities = ctx.part_mut(ClauseActivityP);
+ activities.bump *= activities.inv_decay;
+ if activities.bump >= rescale_limit() {
+ rescale_clause_activities(ctx.borrow());
+ }
+}
diff --git a/vendor/varisat/src/clause/alloc.rs b/vendor/varisat/src/clause/alloc.rs
new file mode 100644
index 000000000..d39514040
--- /dev/null
+++ b/vendor/varisat/src/clause/alloc.rs
@@ -0,0 +1,262 @@
+//! Clause allocator.
+use std::{mem::transmute, slice};
+
+use varisat_formula::{lit::LitIdx, Lit};
+
+use super::{Clause, ClauseHeader, HEADER_LEN};
+
+/// Integer type used to store offsets into [`ClauseAlloc`]'s memory.
+type ClauseOffset = u32;
+
+/// Bump allocator for clause storage.
+///
+/// Clauses are allocated from a single continuous buffer. Clauses cannot be freed individually. To
+/// reclaim space from deleted clauses, a new `ClauseAlloc` is created and the remaining clauses are
+/// copied over.
+///
+/// When the `ClauseAlloc`'s buffer is full, it is reallocated using the growing strategy of
+/// [`Vec`]. External references ([`ClauseRef`]) store an offset into the `ClauseAlloc`'s memory and
+/// remaind valid when the buffer is grown. Clauses are aligned and the offset represents a multiple
+/// of the alignment size. This allows using 32-bit offsets while still supporting up to 16GB of
+/// clauses.
+#[derive(Default)]
+pub struct ClauseAlloc {
+ buffer: Vec<LitIdx>,
+}
+
+impl ClauseAlloc {
+ /// Create an emtpy clause allocator.
+ pub fn new() -> ClauseAlloc {
+ ClauseAlloc::default()
+ }
+
+ /// Create a clause allocator with preallocated capacity.
+ pub fn with_capacity(capacity: usize) -> ClauseAlloc {
+ ClauseAlloc {
+ buffer: Vec::with_capacity(capacity),
+ }
+ }
+
+ /// Allocate space for and add a new clause.
+ ///
+ /// Clauses have a minimal size of 3, as binary and unit clauses are handled separately. This is
+ /// enforced on the ClauseAlloc level to safely avoid extra bound checks when accessing the
+ /// initial literals of a clause.
+ ///
+ /// The size of the header will be set to the size of the given slice. The returned
+ /// [`ClauseRef`] can be used to access the new clause.
+ pub fn add_clause(&mut self, mut header: ClauseHeader, lits: &[Lit]) -> ClauseRef {
+ let offset = self.buffer.len();
+
+ assert!(
+ lits.len() >= 3,
+ "ClauseAlloc can only store ternary and larger clauses"
+ );
+
+ // TODO Maybe let the caller handle this?
+ assert!(
+ offset <= (ClauseRef::max_offset() as usize),
+ "Exceeded ClauseAlloc's maximal buffer size"
+ );
+
+ header.set_len(lits.len());
+
+ self.buffer.extend_from_slice(&header.data);
+
+ let lit_idx_slice = unsafe {
+ // This is safe as Lit and LitIdx have the same representation
+ slice::from_raw_parts(lits.as_ptr() as *const LitIdx, lits.len())
+ };
+
+ self.buffer.extend_from_slice(lit_idx_slice);
+
+ ClauseRef {
+ offset: offset as ClauseOffset,
+ }
+ }
+
+ /// Access the header of a clause.
+ pub fn header(&self, cref: ClauseRef) -> &ClauseHeader {
+ let offset = cref.offset as usize;
+ assert!(
+ offset as usize + HEADER_LEN <= self.buffer.len(),
+ "ClauseRef out of bounds"
+ );
+ unsafe { self.header_unchecked(cref) }
+ }
+
+ /// Mutate the header of a clause.
+ pub fn header_mut(&mut self, cref: ClauseRef) -> &mut ClauseHeader {
+ let offset = cref.offset as usize;
+ assert!(
+ offset as usize + HEADER_LEN <= self.buffer.len(),
+ "ClauseRef out of bounds"
+ );
+ unsafe { self.header_unchecked_mut(cref) }
+ }
+
+ unsafe fn header_unchecked(&self, cref: ClauseRef) -> &ClauseHeader {
+ let offset = cref.offset as usize;
+ let header_pointer = self.buffer.as_ptr().add(offset) as *const ClauseHeader;
+ &*header_pointer
+ }
+
+ /// Mutate the header of a clause without bound checks.
+ pub unsafe fn header_unchecked_mut(&mut self, cref: ClauseRef) -> &mut ClauseHeader {
+ let offset = cref.offset as usize;
+ let header_pointer = self.buffer.as_mut_ptr().add(offset) as *mut ClauseHeader;
+ &mut *header_pointer
+ }
+
+ /// Access a clause.
+ pub fn clause(&self, cref: ClauseRef) -> &Clause {
+ let header = self.header(cref);
+ let len = header.len();
+
+ // Even on 32 bit systems these additions can't overflow as we never create clause refs with
+ // an offset larger than ClauseRef::max_offset()
+
+ let lit_offset = cref.offset as usize + HEADER_LEN;
+ let lit_end = lit_offset + len;
+ assert!(lit_end <= self.buffer.len(), "ClauseRef out of bounds");
+ unsafe { self.clause_with_len_unchecked(cref, len) }
+ }
+
+ /// Mutate a clause.
+ pub fn clause_mut(&mut self, cref: ClauseRef) -> &mut Clause {
+ let header = self.header(cref);
+ let len = header.len();
+
+ // Even on 32 bit systems these additions can't overflow as we never create clause refs with
+ // an offset larger than ClauseRef::max_offset()
+
+ let lit_offset = cref.offset as usize + HEADER_LEN;
+ let lit_end = lit_offset + len;
+ assert!(lit_end <= self.buffer.len(), "ClauseRef out of bounds");
+ unsafe { self.clause_with_len_unchecked_mut(cref, len) }
+ }
+
+ /// Mutate the literals of a clause without bound checks.
+ pub unsafe fn lits_ptr_mut_unchecked(&mut self, cref: ClauseRef) -> *mut Lit {
+ let offset = cref.offset as usize;
+ self.buffer.as_ptr().add(offset + HEADER_LEN) as *mut Lit
+ }
+
+ /// Perform a manual bound check on a ClauseRef assuming a given clause length.
+ pub fn check_bounds(&self, cref: ClauseRef, len: usize) {
+ // Even on 32 bit systems these additions can't overflow as we never create clause refs with
+ // an offset larger than ClauseRef::max_offset()
+
+ let lit_offset = cref.offset as usize + HEADER_LEN;
+ let lit_end = lit_offset + len;
+ assert!(lit_end <= self.buffer.len(), "ClauseRef out of bounds");
+ }
+
+ unsafe fn clause_with_len_unchecked(&self, cref: ClauseRef, len: usize) -> &Clause {
+ let offset = cref.offset as usize;
+ #[allow(clippy::transmute_ptr_to_ptr)]
+ transmute::<&[LitIdx], &Clause>(slice::from_raw_parts(
+ self.buffer.as_ptr().add(offset),
+ len + HEADER_LEN,
+ ))
+ }
+
+ unsafe fn clause_with_len_unchecked_mut(&mut self, cref: ClauseRef, len: usize) -> &mut Clause {
+ let offset = cref.offset as usize;
+ #[allow(clippy::transmute_ptr_to_ptr)]
+ transmute::<&mut [LitIdx], &mut Clause>(slice::from_raw_parts_mut(
+ self.buffer.as_mut_ptr().add(offset),
+ len + HEADER_LEN,
+ ))
+ }
+
+ /// Current buffer size in multiples of [`LitIdx`].
+ pub fn buffer_size(&self) -> usize {
+ self.buffer.len()
+ }
+}
+
+/// Compact reference to a clause.
+///
+/// Used with [`ClauseAlloc`] to access the clause.
+#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
+pub struct ClauseRef {
+ offset: ClauseOffset,
+}
+
+impl ClauseRef {
+ /// The largest offset supported by the ClauseAlloc
+ const fn max_offset() -> ClauseOffset {
+ // Make sure we can savely add a length to an offset without overflowing usize
+ ((usize::max_value() >> 1) & (ClauseOffset::max_value() as usize)) as ClauseOffset
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use varisat_formula::{cnf::strategy::*, CnfFormula, ExtendFormula};
+
+ use proptest::*;
+
+ proptest! {
+ #[test]
+ fn roundtrip_from_cnf_formula(input in cnf_formula(1..100usize, 0..1000, 3..30)) {
+
+ let mut clause_alloc = ClauseAlloc::new();
+ let mut clause_refs = vec![];
+
+ for clause_lits in input.iter() {
+ let header = ClauseHeader::new();
+ clause_refs.push(clause_alloc.add_clause(header, clause_lits));
+ }
+
+ let mut recovered = CnfFormula::new();
+
+ for cref in clause_refs {
+ let clause = clause_alloc.clause(cref);
+ prop_assert_eq!(clause.header().len(), clause.lits().len());
+ recovered.add_clause(clause.lits());
+ }
+
+ // Ignore difference caused by unused vars
+ recovered.set_var_count(input.var_count());
+
+ prop_assert_eq!(input, recovered);
+ }
+
+ #[test]
+ fn clause_mutation(input in cnf_formula(1..100usize, 0..1000, 3..30)) {
+
+ let mut clause_alloc = ClauseAlloc::new();
+ let mut clause_refs = vec![];
+
+ for clause_lits in input.iter() {
+ let header = ClauseHeader::new();
+ clause_refs.push(clause_alloc.add_clause(header, clause_lits));
+ }
+
+ for &cref in clause_refs.iter() {
+ let clause = clause_alloc.clause_mut(cref);
+ clause.lits_mut().reverse();
+ }
+
+ for &cref in clause_refs.iter() {
+ let clause_len = clause_alloc.clause(cref).lits().len();
+ if clause_len > 3 {
+ clause_alloc.header_mut(cref).set_len(clause_len - 1);
+ }
+ }
+
+ for (&cref, lits) in clause_refs.iter().zip(input.iter()) {
+ let expected = if lits.len() > 3 {
+ lits[1..].iter().rev()
+ } else {
+ lits.iter().rev()
+ };
+ prop_assert!(clause_alloc.clause(cref).lits().iter().eq(expected));
+ }
+ }
+ }
+}
diff --git a/vendor/varisat/src/clause/assess.rs b/vendor/varisat/src/clause/assess.rs
new file mode 100644
index 000000000..a28dacb64
--- /dev/null
+++ b/vendor/varisat/src/clause/assess.rs
@@ -0,0 +1,69 @@
+//! Clause assessment.
+use partial_ref::{partial, PartialRef};
+
+use varisat_formula::Lit;
+
+use crate::{
+ clause::{db, ClauseRef},
+ context::{parts::*, Context},
+ glue::compute_glue,
+};
+
+use super::{bump_clause_activity, ClauseHeader, Tier};
+
+/// Assess the newly learned clause and generate a clause header.
+pub fn assess_learned_clause(
+ mut ctx: partial!(Context, mut TmpFlagsP, ImplGraphP),
+ lits: &[Lit],
+) -> ClauseHeader {
+ // This is called while the clause is still in conflict, thus the computed glue level is one
+ // higher than it'll be after backtracking when the clause becomes asserting.
+ let glue = compute_glue(ctx.borrow(), lits) - 1;
+
+ let mut header = ClauseHeader::new();
+
+ header.set_glue(glue);
+ header.set_tier(select_tier(glue));
+
+ header
+}
+
+/// Compute the tier for a redundant clause with a given glue level.
+fn select_tier(glue: usize) -> Tier {
+ if glue <= 2 {
+ Tier::Core
+ } else if glue <= 6 {
+ Tier::Mid
+ } else {
+ Tier::Local
+ }
+}
+
+/// Update stats for clauses involved in the conflict.
+pub fn bump_clause(
+ mut ctx: partial!(
+ Context,
+ mut ClauseActivityP,
+ mut ClauseAllocP,
+ mut ClauseDbP,
+ mut TmpFlagsP,
+ ImplGraphP
+ ),
+ cref: ClauseRef,
+) {
+ bump_clause_activity(ctx.borrow(), cref);
+
+ let (alloc, mut ctx_2) = ctx.split_part_mut(ClauseAllocP);
+
+ let clause = alloc.clause_mut(cref);
+
+ let glue = compute_glue(ctx_2.borrow(), clause.lits());
+
+ clause.header_mut().set_active(true);
+
+ if glue < clause.header().glue() {
+ clause.header_mut().set_glue(glue);
+
+ db::set_clause_tier(ctx.borrow(), cref, select_tier(glue));
+ }
+}
diff --git a/vendor/varisat/src/clause/db.rs b/vendor/varisat/src/clause/db.rs
new file mode 100644
index 000000000..780e14557
--- /dev/null
+++ b/vendor/varisat/src/clause/db.rs
@@ -0,0 +1,269 @@
+//! Database for long clauses.
+use std::mem::transmute;
+
+use partial_ref::{partial, PartialRef};
+
+use varisat_formula::Lit;
+
+use crate::{
+ context::{parts::*, Context},
+ prop::Reason,
+};
+
+use super::{header::HEADER_LEN, ClauseAlloc, ClauseHeader, ClauseRef};
+
+/// Partitions of the clause database.
+///
+/// The long clauses are partitioned into 4 [`Tier`]s. This follows the approach described by
+/// Chanseok Oh in ["Between SAT and UNSAT: The Fundamental Difference in CDCL
+/// SAT"](https://doi.org/10.1007/978-3-319-24318-4_23), section 4.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+#[repr(u8)]
+pub enum Tier {
+ Irred = 0,
+ Core = 1,
+ Mid = 2,
+ Local = 3,
+}
+
+impl Tier {
+ /// Total number of tiers.
+ pub const fn count() -> usize {
+ 4
+ }
+
+ /// Cast an index into the corresponding tier.
+ pub unsafe fn from_index(index: usize) -> Tier {
+ debug_assert!(index < Tier::count());
+ transmute(index as u8)
+ }
+}
+
+/// Database for long clauses.
+///
+/// Removal of clauses from the `clauses` and the `by_tier` fields can be delayed. The clause
+/// header's deleted and tier fields need to be checked when iterating over these. `by_tier` may
+/// also contain duplicate entries.
+#[derive(Default)]
+pub struct ClauseDb {
+ /// May contain deleted clauses, see above
+ pub(super) clauses: Vec<ClauseRef>,
+ /// May contain deleted and moved clauses, see above
+ pub(super) by_tier: [Vec<ClauseRef>; Tier::count()],
+ /// These counts should always be up to date
+ pub(super) count_by_tier: [usize; Tier::count()],
+ /// Size of deleted but not collected clauses
+ pub(super) garbage_size: usize,
+}
+
+impl ClauseDb {
+ /// The number of long clauses of a given tier.
+ pub fn count_by_tier(&self, tier: Tier) -> usize {
+ self.count_by_tier[tier as usize]
+ }
+}
+
+/// Add a long clause to the database.
+pub fn add_clause(
+ mut ctx: partial!(Context, mut ClauseAllocP, mut ClauseDbP, mut WatchlistsP),
+ header: ClauseHeader,
+ lits: &[Lit],
+) -> ClauseRef {
+ let tier = header.tier();
+
+ let cref = ctx.part_mut(ClauseAllocP).add_clause(header, lits);
+
+ ctx.part_mut(WatchlistsP)
+ .watch_clause(cref, [lits[0], lits[1]]);
+
+ let db = ctx.part_mut(ClauseDbP);
+
+ db.clauses.push(cref);
+ db.by_tier[tier as usize].push(cref);
+ db.count_by_tier[tier as usize] += 1;
+
+ cref
+}
+
+/// Change the tier of a long clause.
+///
+/// This is a noop for a clause already of the specified tier.
+pub fn set_clause_tier(
+ mut ctx: partial!(Context, mut ClauseAllocP, mut ClauseDbP),
+ cref: ClauseRef,
+ tier: Tier,
+) {
+ let (alloc, mut ctx) = ctx.split_part_mut(ClauseAllocP);
+ let db = ctx.part_mut(ClauseDbP);
+
+ let old_tier = alloc.header(cref).tier();
+ if old_tier != tier {
+ db.count_by_tier[old_tier as usize] -= 1;
+ db.count_by_tier[tier as usize] += 1;
+
+ alloc.header_mut(cref).set_tier(tier);
+ db.by_tier[tier as usize].push(cref);
+ }
+}
+
+/// Delete a long clause from the database.
+pub fn delete_clause(
+ mut ctx: partial!(Context, mut ClauseAllocP, mut ClauseDbP, mut WatchlistsP),
+ cref: ClauseRef,
+) {
+ // TODO Don't force a rebuild of all watchlists here
+ ctx.part_mut(WatchlistsP).disable();
+
+ let (alloc, mut ctx) = ctx.split_part_mut(ClauseAllocP);
+ let db = ctx.part_mut(ClauseDbP);
+
+ let header = alloc.header_mut(cref);
+
+ debug_assert!(
+ !header.deleted(),
+ "delete_clause for already deleted clause"
+ );
+
+ header.set_deleted(true);
+
+ db.count_by_tier[header.tier() as usize] -= 1;
+
+ db.garbage_size += header.len() + HEADER_LEN;
+}
+
+/// Delete a long clause from the database unless it is asserting.
+///
+/// Returns true if the clause was deleted.
+pub fn try_delete_clause(
+ mut ctx: partial!(
+ Context,
+ mut ClauseAllocP,
+ mut ClauseDbP,
+ mut WatchlistsP,
+ ImplGraphP,
+ AssignmentP,
+ ),
+ cref: ClauseRef,
+) -> bool {
+ let initial_lit = ctx.part(ClauseAllocP).clause(cref).lits()[0];
+ let asserting = ctx.part(AssignmentP).lit_is_true(initial_lit)
+ && ctx.part(ImplGraphP).reason(initial_lit.var()) == &Reason::Long(cref);
+
+ if !asserting {
+ delete_clause(ctx.borrow(), cref);
+ }
+ !asserting
+}
+
+/// Iterator over all long clauses.
+///
+/// This filters deleted (but uncollected) clauses on the fly.
+pub fn clauses_iter<'a>(
+ ctx: &'a partial!('a Context, ClauseAllocP, ClauseDbP),
+) -> impl Iterator<Item = ClauseRef> + 'a {
+ let alloc = ctx.part(ClauseAllocP);
+ ctx.part(ClauseDbP)
+ .clauses
+ .iter()
+ .cloned()
+ .filter(move |&cref| !alloc.header(cref).deleted())
+}
+
+/// Iterate over all and remove some long clauses.
+///
+/// Takes a closure that returns true for each clause that should be kept and false for each that
+/// should be deleted.
+pub fn filter_clauses<F>(
+ mut ctx: partial!(Context, mut ClauseAllocP, mut ClauseDbP, mut WatchlistsP),
+ mut filter: F,
+) where
+ F: FnMut(&mut ClauseAlloc, ClauseRef) -> bool,
+{
+ ctx.part_mut(WatchlistsP).disable();
+
+ let (alloc, mut ctx) = ctx.split_part_mut(ClauseAllocP);
+ let db = ctx.part_mut(ClauseDbP);
+
+ let count_by_tier = &mut db.count_by_tier;
+ let garbage_size = &mut db.garbage_size;
+
+ db.clauses.retain(|&cref| {
+ if alloc.header(cref).deleted() {
+ false
+ } else if filter(alloc, cref) {
+ true
+ } else {
+ let header = alloc.header_mut(cref);
+
+ header.set_deleted(true);
+
+ count_by_tier[header.tier() as usize] -= 1;
+
+ *garbage_size += header.len() + HEADER_LEN;
+
+ false
+ }
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use partial_ref::IntoPartialRefMut;
+
+ use varisat_formula::cnf_formula;
+
+ use crate::context::set_var_count;
+
+ #[test]
+ fn set_tiers_and_deletes() {
+ let mut ctx = Context::default();
+
+ let mut ctx = ctx.into_partial_ref_mut();
+
+ let clauses = cnf_formula![
+ 1, 2, 3;
+ 4, -5, 6;
+ -2, 3, -4;
+ -3, 5, 2, 7, 5;
+ ];
+
+ set_var_count(ctx.borrow(), clauses.var_count());
+
+ let tiers = vec![Tier::Irred, Tier::Core, Tier::Mid, Tier::Local];
+ let new_tiers = vec![Tier::Irred, Tier::Local, Tier::Local, Tier::Core];
+
+ let mut crefs = vec![];
+
+ for (clause, &tier) in clauses.iter().zip(tiers.iter()) {
+ let mut header = ClauseHeader::new();
+ header.set_tier(tier);
+ let cref = add_clause(ctx.borrow(), header, clause);
+ crefs.push(cref);
+ }
+
+ for (&cref, &tier) in crefs.iter().rev().zip(new_tiers.iter().rev()) {
+ set_clause_tier(ctx.borrow(), cref, tier);
+ }
+
+ // We only check presence, as deletion from these lists is delayed
+ assert!(ctx.part(ClauseDbP).by_tier[Tier::Irred as usize].contains(&crefs[0]));
+ assert!(ctx.part(ClauseDbP).by_tier[Tier::Core as usize].contains(&crefs[3]));
+ assert!(ctx.part(ClauseDbP).by_tier[Tier::Local as usize].contains(&crefs[1]));
+ assert!(ctx.part(ClauseDbP).by_tier[Tier::Local as usize].contains(&crefs[2]));
+
+ assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Irred), 1);
+ assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Core), 1);
+ assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Mid), 0);
+ assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Local), 2);
+
+ delete_clause(ctx.borrow(), crefs[0]);
+ delete_clause(ctx.borrow(), crefs[2]);
+
+ assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Irred), 0);
+ assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Core), 1);
+ assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Mid), 0);
+ assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Local), 1);
+ }
+}
diff --git a/vendor/varisat/src/clause/gc.rs b/vendor/varisat/src/clause/gc.rs
new file mode 100644
index 000000000..0ad116660
--- /dev/null
+++ b/vendor/varisat/src/clause/gc.rs
@@ -0,0 +1,207 @@
+//! Garbage collection of long clauses.
+use partial_ref::{partial, PartialRef};
+
+use crate::{
+ context::{parts::*, Context},
+ prop::Reason,
+};
+
+use super::{ClauseAlloc, Tier};
+
+/// Perform a garbage collection of long clauses if necessary.
+pub fn collect_garbage(
+ mut ctx: partial!(
+ Context,
+ mut ClauseAllocP,
+ mut ClauseDbP,
+ mut ImplGraphP,
+ mut WatchlistsP,
+ TrailP,
+ ),
+) {
+ let alloc = ctx.part(ClauseAllocP);
+ let db = ctx.part(ClauseDbP);
+
+ // Collecting when a fixed fraction of the allocation is garbage amortizes collection costs.
+ if db.garbage_size * 2 > alloc.buffer_size() {
+ collect_garbage_now(ctx.borrow());
+ }
+}
+
+/// Unconditionally perform a garbage collection of long clauses.
+///
+/// This needs to invalidate or update any other data structure containing references to
+/// clauses.
+fn collect_garbage_now(
+ mut ctx: partial!(
+ Context,
+ mut ClauseAllocP,
+ mut ClauseDbP,
+ mut ImplGraphP,
+ mut WatchlistsP,
+ TrailP,
+ ),
+) {
+ ctx.part_mut(WatchlistsP).disable();
+
+ mark_asserting_clauses(ctx.borrow());
+
+ let (db, mut ctx) = ctx.split_part_mut(ClauseDbP);
+ let (impl_graph, mut ctx) = ctx.split_part_mut(ImplGraphP);
+ let alloc = ctx.part_mut(ClauseAllocP);
+
+ assert!(
+ db.garbage_size <= alloc.buffer_size(),
+ "Inconsistent garbage tracking in ClauseDb"
+ );
+ let current_size = alloc.buffer_size() - db.garbage_size;
+
+ // Allocating just the current size would lead to an immediate growing when new clauses are
+ // learned, overallocating here avoids that.
+ let mut new_alloc = ClauseAlloc::with_capacity(current_size * 2);
+
+ let mut new_clauses = vec![];
+ let mut new_by_tier: [Vec<_>; Tier::count()] = Default::default();
+
+ // TODO Optimize order of clauses (benchmark this)
+
+ db.clauses.retain(|&cref| {
+ let clause = alloc.clause(cref);
+ let mut header = *clause.header();
+ if header.deleted() {
+ false
+ } else {
+ let clause_is_asserting = header.mark();
+ header.set_mark(false);
+
+ let new_cref = new_alloc.add_clause(header, clause.lits());
+
+ new_clauses.push(new_cref);
+ new_by_tier[header.tier() as usize].push(new_cref);
+
+ if clause_is_asserting {
+ let asserted_lit = clause.lits()[0];
+
+ debug_assert_eq!(impl_graph.reason(asserted_lit.var()), &Reason::Long(cref));
+ impl_graph.update_reason(asserted_lit.var(), Reason::Long(new_cref));
+ }
+ true
+ }
+ });
+
+ *ctx.part_mut(ClauseAllocP) = new_alloc;
+ db.clauses = new_clauses;
+ db.by_tier = new_by_tier;
+ db.garbage_size = 0;
+}
+
+/// Mark asserting clauses to track them through GC.
+fn mark_asserting_clauses(mut ctx: partial!(Context, mut ClauseAllocP, ImplGraphP, TrailP,)) {
+ let (trail, mut ctx) = ctx.split_part(TrailP);
+ let (alloc, ctx) = ctx.split_part_mut(ClauseAllocP);
+ let impl_graph = ctx.part(ImplGraphP);
+
+ for &lit in trail.trail().iter() {
+ if let Reason::Long(cref) = impl_graph.reason(lit.var()) {
+ alloc.header_mut(*cref).set_mark(true);
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use std::cmp::max;
+
+ use partial_ref::IntoPartialRefMut;
+ use proptest::*;
+
+ use varisat_formula::{cnf::strategy::*, Lit};
+
+ use crate::{
+ clause::{db, ClauseHeader},
+ context::set_var_count,
+ prop::enqueue_assignment,
+ };
+
+ proptest! {
+ #[test]
+ fn garbage_collection(
+ input_a in cnf_formula(2..100usize, 500..1000, 3..30),
+ input_b in cnf_formula(2..100usize, 10..500, 4..20),
+ ) {
+ let mut ctx = Context::default();
+ let mut ctx = ctx.into_partial_ref_mut();
+
+ set_var_count(ctx.borrow(), max(input_a.var_count(), input_b.var_count()));
+
+ let mut crefs_a = vec![];
+ let mut crefs_b = vec![];
+
+ for lits in input_a.iter() {
+ let header = ClauseHeader::new();
+ let cref = db::add_clause(ctx.borrow(), header, lits);
+ crefs_a.push(cref);
+ }
+
+ for lits in input_b.iter() {
+ let header = ClauseHeader::new();
+ let cref = db::add_clause(ctx.borrow(), header, lits);
+ crefs_b.push(cref);
+
+ if ctx.part(AssignmentP).lit_value(lits[0]) == None {
+ // This isn't consistent, as the clause isn't actually propagating, but that
+ // isn't checked during garbage collection
+ enqueue_assignment(ctx.borrow(), lits[0], Reason::Long(cref));
+ }
+ }
+
+ for cref in crefs_a {
+ db::delete_clause(ctx.borrow(), cref);
+ prop_assert!(ctx.part(ClauseDbP).garbage_size > 0);
+ }
+
+ let old_buffer_size = ctx.part(ClauseAllocP).buffer_size();
+
+ collect_garbage(ctx.borrow());
+
+ prop_assert!(
+ ctx.part(ClauseDbP).garbage_size * 2 < ctx.part(ClauseAllocP).buffer_size()
+ );
+
+ prop_assert!(
+ old_buffer_size > ctx.part(ClauseAllocP).buffer_size()
+ );
+
+ prop_assert!(!ctx.part(WatchlistsP).enabled());
+
+ let mut output_clauses: Vec<Vec<Lit>> = vec![];
+
+ for &cref in ctx.part(ClauseDbP).clauses.iter() {
+ let clause = ctx.part(ClauseAllocP).clause(cref);
+ if clause.header().deleted() {
+ continue;
+ }
+ prop_assert!(!clause.header().mark());
+ output_clauses.push(clause.lits().to_vec());
+ }
+
+ let mut input_clauses: Vec<Vec<Lit>> = input_b
+ .iter()
+ .map(|c| c.to_vec())
+ .collect();
+
+ output_clauses.sort();
+ input_clauses.sort();
+
+ prop_assert_eq!(input_clauses, output_clauses);
+
+ for &lit in ctx.part(TrailP).trail() {
+ if let Reason::Long(cref) = ctx.part(ImplGraphP).reason(lit.var()) {
+ prop_assert_eq!(ctx.part(ClauseAllocP).clause(*cref).lits()[0], lit)
+ }
+ }
+ }
+ }
+}
diff --git a/vendor/varisat/src/clause/header.rs b/vendor/varisat/src/clause/header.rs
new file mode 100644
index 000000000..74656e8c1
--- /dev/null
+++ b/vendor/varisat/src/clause/header.rs
@@ -0,0 +1,148 @@
+//! Metadata stored in the header of each long clause.
+use std::cmp::min;
+
+use varisat_formula::{lit::LitIdx, Var};
+
+use super::Tier;
+
+/// Length of a [`ClauseHeader`] in multiples of [`LitIdx`]
+pub(super) const HEADER_LEN: usize = 3;
+
+const TIER_WORD: usize = HEADER_LEN - 2;
+const TIER_OFFSET: usize = 0;
+const TIER_MASK: LitIdx = 0b11;
+
+const DELETED_WORD: usize = HEADER_LEN - 2;
+const DELETED_OFFSET: usize = 2;
+
+const MARK_WORD: usize = HEADER_LEN - 2;
+const MARK_OFFSET: usize = 3;
+
+const GLUE_WORD: usize = HEADER_LEN - 2;
+const GLUE_OFFSET: usize = 4;
+const GLUE_MASK: LitIdx = (1 << 6) - 1;
+
+const ACTIVE_WORD: usize = HEADER_LEN - 2;
+const ACTIVE_OFFSET: usize = 10;
+
+const ACTIVITY_WORD: usize = HEADER_LEN - 3;
+
+/// Metadata for a clause.
+///
+/// This is stored in a [`ClauseAlloc`](super::ClauseAlloc) and thus must have a representation
+/// compatible with slice of [`LitIdx`] values.
+#[repr(transparent)]
+#[derive(Copy, Clone, Default)]
+pub struct ClauseHeader {
+ pub(super) data: [LitIdx; HEADER_LEN],
+}
+
+impl ClauseHeader {
+ /// Create a new clause header with default entries.
+ pub fn new() -> ClauseHeader {
+ Self::default()
+ }
+
+ /// Length of the clause.
+ pub fn len(&self) -> usize {
+ self.data[HEADER_LEN - 1] as usize
+ }
+
+ /// Set the length of the clause.
+ ///
+ /// Must be `<= Var::max_count()` as each variable may only be present once per clause.
+ pub fn set_len(&mut self, length: usize) {
+ debug_assert!(length <= Var::max_count());
+
+ self.data[HEADER_LEN - 1] = length as LitIdx;
+ }
+
+ /// Whether the clause is marked as deleted.
+ pub fn deleted(&self) -> bool {
+ (self.data[DELETED_WORD] >> DELETED_OFFSET) & 1 != 0
+ }
+
+ /// Mark the clause as deleted.
+ pub fn set_deleted(&mut self, deleted: bool) {
+ let word = &mut self.data[DELETED_WORD];
+ *word = (*word & !(1 << DELETED_OFFSET)) | ((deleted as LitIdx) << DELETED_OFFSET);
+ }
+
+ /// Current [`Tier`] of the clause.
+ pub fn tier(&self) -> Tier {
+ unsafe { Tier::from_index(((self.data[TIER_WORD] >> TIER_OFFSET) & TIER_MASK) as usize) }
+ }
+
+ /// Set the current [`Tier`] of the clause.
+ pub fn set_tier(&mut self, tier: Tier) {
+ let word = &mut self.data[TIER_WORD];
+ *word = (*word & !(TIER_MASK << TIER_OFFSET)) | ((tier as LitIdx) << TIER_OFFSET);
+ }
+
+ /// Whether the clause is redundant.
+ pub fn redundant(&self) -> bool {
+ self.tier() != Tier::Irred
+ }
+
+ /// Whether the clause is marked.
+ ///
+ /// The mark is a temporary bit that can be set by various routines, but should always be reset
+ /// to false.
+ pub fn mark(&self) -> bool {
+ (self.data[MARK_WORD] >> MARK_OFFSET) & 1 != 0
+ }
+
+ /// Mark or unmark the clause.
+ ///
+ /// Make sure to clear the mark after use.
+ pub fn set_mark(&mut self, mark: bool) {
+ let word = &mut self.data[MARK_WORD];
+ *word = (*word & !(1 << MARK_OFFSET)) | ((mark as LitIdx) << MARK_OFFSET);
+ }
+
+ /// The clause's active flag
+ ///
+ /// This is set when a clause was involved in conflict analysis and periodically reset.
+ pub fn active(&self) -> bool {
+ (self.data[ACTIVE_WORD] >> ACTIVE_OFFSET) & 1 != 0
+ }
+
+ /// Set or reset the clause's active flag.
+ pub fn set_active(&mut self, active: bool) {
+ let word = &mut self.data[ACTIVE_WORD];
+ *word = (*word & !(1 << ACTIVE_OFFSET)) | ((active as LitIdx) << ACTIVE_OFFSET);
+ }
+
+ /// The [glue][crate::glue] level.
+ pub fn glue(&self) -> usize {
+ ((self.data[GLUE_WORD] >> GLUE_OFFSET) & GLUE_MASK) as usize
+ }
+
+ /// Update the [glue][crate::glue] level.
+ pub fn set_glue(&mut self, glue: usize) {
+ let glue = min(glue, GLUE_MASK as usize) as LitIdx;
+ let word = &mut self.data[GLUE_WORD];
+
+ *word = (*word & !(GLUE_MASK << GLUE_OFFSET)) | (glue << GLUE_OFFSET);
+ }
+
+ /// Clause [activity][crate::clause::activity].
+ pub fn activity(&self) -> f32 {
+ f32::from_bits(self.data[ACTIVITY_WORD] as u32)
+ }
+
+ /// Update clause [activity][crate::clause::activity].
+ pub fn set_activity(&mut self, activity: f32) {
+ self.data[ACTIVITY_WORD] = activity.to_bits() as LitIdx;
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn tier_mask() {
+ assert!(Tier::count() <= TIER_MASK as usize + 1);
+ }
+}
diff --git a/vendor/varisat/src/clause/reduce.rs b/vendor/varisat/src/clause/reduce.rs
new file mode 100644
index 000000000..1252ca50b
--- /dev/null
+++ b/vendor/varisat/src/clause/reduce.rs
@@ -0,0 +1,132 @@
+//! Clause database reduction.
+use std::mem::replace;
+
+use ordered_float::OrderedFloat;
+use vec_mut_scan::VecMutScan;
+
+use partial_ref::{partial, PartialRef};
+
+use varisat_internal_proof::{DeleteClauseProof, ProofStep};
+
+use crate::{
+ context::{parts::*, Context},
+ proof,
+};
+
+use super::db::{set_clause_tier, try_delete_clause, Tier};
+
+/// Remove deleted and duplicate entries from the by_tier clause lists.
+///
+/// This has the side effect of setting the mark bit on all clauses of the tier.
+pub fn dedup_and_mark_by_tier(
+ mut ctx: partial!(Context, mut ClauseAllocP, mut ClauseDbP),
+ tier: Tier,
+) {
+ let (alloc, mut ctx) = ctx.split_part_mut(ClauseAllocP);
+ let by_tier = &mut ctx.part_mut(ClauseDbP).by_tier[tier as usize];
+
+ by_tier.retain(|&cref| {
+ let header = alloc.header_mut(cref);
+ let retain = !header.deleted() && !header.mark() && header.tier() == tier;
+ if retain {
+ header.set_mark(true);
+ }
+ retain
+ })
+}
+
+/// Reduce the number of local tier clauses by deleting half of them.
+pub fn reduce_locals<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut ClauseAllocP,
+ mut ClauseDbP,
+ mut ProofP<'a>,
+ mut SolverStateP,
+ mut WatchlistsP,
+ AssignmentP,
+ ImplGraphP,
+ VariablesP,
+ ),
+) {
+ dedup_and_mark_by_tier(ctx.borrow(), Tier::Local);
+
+ let mut locals = replace(
+ &mut ctx.part_mut(ClauseDbP).by_tier[Tier::Local as usize],
+ vec![],
+ );
+
+ locals.sort_unstable_by_key(|&cref| {
+ (
+ OrderedFloat(ctx.part(ClauseAllocP).header(cref).activity()),
+ cref,
+ )
+ });
+
+ let mut to_delete = locals.len() / 2;
+
+ let mut scan = VecMutScan::new(&mut locals);
+
+ if to_delete > 0 {
+ while let Some(cref) = scan.next() {
+ ctx.part_mut(ClauseAllocP).header_mut(*cref).set_mark(false);
+
+ if try_delete_clause(ctx.borrow(), *cref) {
+ if ctx.part(ProofP).is_active() {
+ let (alloc, mut ctx) = ctx.split_part(ClauseAllocP);
+ let lits = alloc.clause(*cref).lits();
+ proof::add_step(
+ ctx.borrow(),
+ true,
+ &ProofStep::DeleteClause {
+ clause: lits,
+ proof: DeleteClauseProof::Redundant,
+ },
+ );
+ }
+
+ cref.remove();
+ to_delete -= 1;
+ if to_delete == 0 {
+ break;
+ }
+ }
+ }
+ }
+
+ // Make sure to clear all marks
+ while let Some(cref) = scan.next() {
+ ctx.part_mut(ClauseAllocP).header_mut(*cref).set_mark(false);
+ }
+
+ drop(scan);
+
+ ctx.part_mut(ClauseDbP).count_by_tier[Tier::Local as usize] = locals.len();
+ ctx.part_mut(ClauseDbP).by_tier[Tier::Local as usize] = locals;
+}
+
+/// Reduce the number of mid tier clauses by moving inactive ones to the local tier.
+pub fn reduce_mids(mut ctx: partial!(Context, mut ClauseAllocP, mut ClauseDbP)) {
+ dedup_and_mark_by_tier(ctx.borrow(), Tier::Mid);
+
+ let mut mids = replace(
+ &mut ctx.part_mut(ClauseDbP).by_tier[Tier::Mid as usize],
+ vec![],
+ );
+
+ mids.retain(|&cref| {
+ let header = ctx.part_mut(ClauseAllocP).header_mut(cref);
+ header.set_mark(false);
+
+ if header.active() {
+ header.set_active(false);
+ true
+ } else {
+ set_clause_tier(ctx.borrow(), cref, Tier::Local);
+ false
+ }
+ });
+
+ ctx.part_mut(ClauseDbP).count_by_tier[Tier::Mid as usize] = mids.len();
+ ctx.part_mut(ClauseDbP).by_tier[Tier::Mid as usize] = mids;
+}
diff --git a/vendor/varisat/src/config.rs b/vendor/varisat/src/config.rs
new file mode 100644
index 000000000..433b91ccf
--- /dev/null
+++ b/vendor/varisat/src/config.rs
@@ -0,0 +1,31 @@
+//! Solver configuration.
+use varisat_internal_macros::{ConfigUpdate, DocDefault};
+
+/// Configurable parameters used during solving.
+#[derive(DocDefault, ConfigUpdate)]
+pub struct SolverConfig {
+ /// Multiplicative decay for the VSIDS decision heuristic.
+ ///
+ /// [default: 0.95] [range: 0.5..1.0]
+ pub vsids_decay: f32,
+
+ /// Multiplicative decay for clause activities.
+ ///
+ /// [default: 0.999] [range: 0.5..1.0]
+ pub clause_activity_decay: f32,
+
+ /// Number of conflicts between local clause reductions.
+ ///
+ /// [default: 15000] [range: 1..]
+ pub reduce_locals_interval: u64,
+
+ /// Number of conflicts between mid clause reductions.
+ ///
+ /// [default: 10000] [range: 1..]
+ pub reduce_mids_interval: u64,
+
+ /// Scaling factor for luby sequence based restarts (number of conflicts).
+ ///
+ /// [default: 128] [range: 1..]
+ pub luby_restart_interval_scale: u64,
+}
diff --git a/vendor/varisat/src/context.rs b/vendor/varisat/src/context.rs
new file mode 100644
index 000000000..47a83c2e3
--- /dev/null
+++ b/vendor/varisat/src/context.rs
@@ -0,0 +1,131 @@
+//! Central solver data structure.
+//!
+//! This module defines the `Context` data structure which holds all data used by the solver. It
+//! also contains global notification functions that likely need to be extended when new parts are
+//! added to the solver.
+use partial_ref::{part, partial, PartialRef, PartialRefTarget};
+
+use crate::{
+ analyze_conflict::AnalyzeConflict,
+ assumptions::Assumptions,
+ binary::BinaryClauses,
+ clause::{ClauseActivity, ClauseAlloc, ClauseDb},
+ config::{SolverConfig, SolverConfigUpdate},
+ decision::vsids::Vsids,
+ model::Model,
+ proof::Proof,
+ prop::{Assignment, ImplGraph, Trail, Watchlists},
+ schedule::Schedule,
+ state::SolverState,
+ tmp::{TmpData, TmpFlags},
+ variables::Variables,
+};
+
+/// Part declarations for the [`Context`] struct.
+pub mod parts {
+ use super::*;
+
+ part!(pub AnalyzeConflictP: AnalyzeConflict);
+ part!(pub AssignmentP: Assignment);
+ part!(pub BinaryClausesP: BinaryClauses);
+ part!(pub ClauseActivityP: ClauseActivity);
+ part!(pub ClauseAllocP: ClauseAlloc);
+ part!(pub ClauseDbP: ClauseDb);
+ part!(pub ImplGraphP: ImplGraph);
+ part!(pub AssumptionsP: Assumptions);
+ part!(pub ModelP: Model);
+ part!(pub ProofP<'a>: Proof<'a>);
+ part!(pub ScheduleP: Schedule);
+ part!(pub SolverConfigP: SolverConfig);
+ part!(pub SolverStateP: SolverState);
+ part!(pub TmpDataP: TmpData);
+ part!(pub TmpFlagsP: TmpFlags);
+ part!(pub TrailP: Trail);
+ part!(pub VariablesP: Variables);
+ part!(pub VsidsP: Vsids);
+ part!(pub WatchlistsP: Watchlists);
+}
+
+use parts::*;
+
+/// Central solver data structure.
+///
+/// This struct contains all data kept by the solver. Most functions operating on multiple fields of
+/// the context use partial references provided by the `partial_ref` crate. This documents the data
+/// dependencies and makes the borrow checker happy without the overhead of passing individual
+/// references.
+#[derive(PartialRefTarget, Default)]
+pub struct Context<'a> {
+ #[part(AnalyzeConflictP)]
+ pub analyze_conflict: AnalyzeConflict,
+ #[part(AssignmentP)]
+ pub assignment: Assignment,
+ #[part(BinaryClausesP)]
+ pub binary_clauses: BinaryClauses,
+ #[part(ClauseActivityP)]
+ pub clause_activity: ClauseActivity,
+ #[part(ClauseAllocP)]
+ pub clause_alloc: ClauseAlloc,
+ #[part(ClauseDbP)]
+ pub clause_db: ClauseDb,
+ #[part(ImplGraphP)]
+ pub impl_graph: ImplGraph,
+ #[part(AssumptionsP)]
+ pub assumptions: Assumptions,
+ #[part(ModelP)]
+ pub model: Model,
+ #[part(ProofP<'a>)]
+ pub proof: Proof<'a>,
+ #[part(ScheduleP)]
+ pub schedule: Schedule,
+ #[part(SolverConfigP)]
+ pub solver_config: SolverConfig,
+ #[part(SolverStateP)]
+ pub solver_state: SolverState,
+ #[part(TmpDataP)]
+ pub tmp_data: TmpData,
+ #[part(TmpFlagsP)]
+ pub tmp_flags: TmpFlags,
+ #[part(TrailP)]
+ pub trail: Trail,
+ #[part(VariablesP)]
+ pub variables: Variables,
+ #[part(VsidsP)]
+ pub vsids: Vsids,
+ #[part(WatchlistsP)]
+ pub watchlists: Watchlists,
+}
+
+/// Update structures for a new variable count.
+pub fn set_var_count(
+ mut ctx: partial!(
+ Context,
+ mut AnalyzeConflictP,
+ mut AssignmentP,
+ mut BinaryClausesP,
+ mut ImplGraphP,
+ mut TmpFlagsP,
+ mut VsidsP,
+ mut WatchlistsP,
+ ),
+ count: usize,
+) {
+ ctx.part_mut(AnalyzeConflictP).set_var_count(count);
+ ctx.part_mut(AssignmentP).set_var_count(count);
+ ctx.part_mut(BinaryClausesP).set_var_count(count);
+ ctx.part_mut(ImplGraphP).set_var_count(count);
+ ctx.part_mut(TmpFlagsP).set_var_count(count);
+ ctx.part_mut(VsidsP).set_var_count(count);
+ ctx.part_mut(WatchlistsP).set_var_count(count);
+}
+
+/// The solver configuration has changed.
+pub fn config_changed(
+ mut ctx: partial!(Context, mut VsidsP, mut ClauseActivityP, SolverConfigP),
+ _update: &SolverConfigUpdate,
+) {
+ let (config, mut ctx) = ctx.split_part(SolverConfigP);
+ ctx.part_mut(VsidsP).set_decay(config.vsids_decay);
+ ctx.part_mut(ClauseActivityP)
+ .set_decay(config.clause_activity_decay);
+}
diff --git a/vendor/varisat/src/decision.rs b/vendor/varisat/src/decision.rs
new file mode 100644
index 000000000..a8c8ed267
--- /dev/null
+++ b/vendor/varisat/src/decision.rs
@@ -0,0 +1,58 @@
+//! Decision heuristics.
+
+use partial_ref::{partial, PartialRef};
+
+use varisat_formula::Var;
+
+use crate::{
+ context::{parts::*, Context},
+ prop::{enqueue_assignment, Reason},
+};
+
+pub mod vsids;
+
+/// Make a decision and enqueue it.
+///
+/// Returns `false` if no decision was made because all variables are assigned.
+pub fn make_decision(
+ mut ctx: partial!(
+ Context,
+ mut AssignmentP,
+ mut ImplGraphP,
+ mut TrailP,
+ mut VsidsP
+ ),
+) -> bool {
+ let (vsids, mut ctx) = ctx.split_part_mut(VsidsP);
+
+ if let Some(decision_var) = vsids.find(|&var| ctx.part(AssignmentP).var_value(var).is_none()) {
+ let decision = decision_var.lit(ctx.part(AssignmentP).last_var_value(decision_var));
+
+ ctx.part_mut(TrailP).new_decision_level();
+
+ enqueue_assignment(ctx.borrow(), decision, Reason::Unit);
+
+ true
+ } else {
+ false
+ }
+}
+
+/// Make a variable available for decisions.
+pub fn make_available(mut ctx: partial!(Context, mut VsidsP), var: Var) {
+ ctx.part_mut(VsidsP).make_available(var);
+}
+
+/// Initialize decision heuristics for a new variable.
+pub fn initialize_var(mut ctx: partial!(Context, mut VsidsP), var: Var, available: bool) {
+ ctx.part_mut(VsidsP).reset(var);
+
+ if available {
+ make_available(ctx.borrow(), var);
+ }
+}
+
+/// Remove a variable from the decision heuristics.
+pub fn remove_var(mut ctx: partial!(Context, mut VsidsP), var: Var) {
+ ctx.part_mut(VsidsP).make_unavailable(var);
+}
diff --git a/vendor/varisat/src/decision/vsids.rs b/vendor/varisat/src/decision/vsids.rs
new file mode 100644
index 000000000..e59656d1b
--- /dev/null
+++ b/vendor/varisat/src/decision/vsids.rs
@@ -0,0 +1,346 @@
+//! The VSIDS branching heuristic.
+//!
+//! The VSIDS (Variable State Independent Decaying Sum) branching heuristic keeps an activity value
+//! for each variable. For each conflict some variables are bumped, which means that their activity
+//! is increased by a constant. After bumping some variables, the activity of all variables is
+//! decayed by multiplying it with a constant below 1.
+//!
+//! When a decision is made, it branches on the vairable with the highest activity among the
+//! unassigned variables.
+//!
+//! There are a few variants that differ in which variables are bumped. Varisat follows Minisat (and
+//! others) by bumping all variables in the conflict clause and all variables resolved on during
+//! conflict analysis.
+
+use ordered_float::OrderedFloat;
+
+use varisat_formula::Var;
+
+use crate::config::SolverConfig;
+
+/// The VSIDS branching heuristic.
+///
+/// As an optimization instead of decaying all activities each conflict, the bump value is divided
+/// by the decay factor each conflict. When this would cause a value to overflow all activities and
+/// the bump value are scaled down. Apart from a scaling factor that is the same for all involved
+/// values, this is equivalent to the naive implementation. As we only care about the order of
+/// activities we can ignore the scaling factor.
+pub struct Vsids {
+ /// The activity of each variable.
+ activity: Vec<OrderedFloat<f32>>,
+ /// A binary heap of the variables.
+ heap: Vec<Var>,
+ /// The position in the binary heap for each variable.
+ position: Vec<Option<usize>>,
+ /// The value to add on bumping.
+ bump: f32,
+ /// The inverse of the decay factor.
+ inv_decay: f32,
+}
+
+impl Default for Vsids {
+ fn default() -> Vsids {
+ Vsids {
+ activity: vec![],
+ heap: vec![],
+ position: vec![],
+ bump: 1.0,
+ inv_decay: 1.0 / SolverConfig::default().vsids_decay,
+ }
+ }
+}
+
+impl Vsids {
+ /// Update structures for a new variable count.
+ pub fn set_var_count(&mut self, count: usize) {
+ self.activity.resize(count, OrderedFloat(0.0));
+ self.position.resize(count, None);
+ }
+
+ /// Rescale activities if any value exceeds this value.
+ fn rescale_limit() -> f32 {
+ std::f32::MAX / 16.0
+ }
+
+ /// Change the decay factor.
+ pub fn set_decay(&mut self, decay: f32) {
+ assert!(decay < 1.0);
+ assert!(decay > 1.0 / 16.0);
+ self.inv_decay = 1.0 / decay;
+ }
+
+ /// Bump a variable by increasing its activity.
+ pub fn bump(&mut self, var: Var) {
+ let rescale = {
+ let value = &mut self.activity[var.index()];
+ value.0 += self.bump;
+ value.0 >= Self::rescale_limit()
+ };
+ if rescale {
+ self.rescale();
+ }
+ if let Some(pos) = self.position[var.index()] {
+ self.sift_up(pos);
+ }
+ }
+
+ /// Decay all variable activities.
+ pub fn decay(&mut self) {
+ self.bump *= self.inv_decay;
+ if self.bump >= Self::rescale_limit() {
+ self.rescale();
+ }
+ }
+
+ /// Rescale all values to avoid an overflow.
+ fn rescale(&mut self) {
+ let rescale_factor = 1.0 / Self::rescale_limit();
+ for activity in &mut self.activity {
+ activity.0 *= rescale_factor;
+ }
+ self.bump *= rescale_factor;
+ }
+
+ /// Reset the activity of an unavailable variable to zero.
+ ///
+ /// Panics if the variable is still available.
+ pub fn reset(&mut self, var: Var) {
+ assert!(self.position[var.index()].is_none());
+ self.activity[var.index()] = OrderedFloat(0.0);
+ }
+
+ /// Remove a variable from the heap if present.
+ pub fn make_unavailable(&mut self, var: Var) {
+ if let Some(position) = self.position[var.index()] {
+ self.heap.swap_remove(position);
+ if self.heap.len() > position {
+ let moved_var = self.heap[position];
+ self.position[moved_var.index()] = Some(position);
+ self.sift_down(position);
+ }
+ self.position[var.index()] = None;
+ }
+ }
+
+ /// Insert a variable into the heap if not already present.
+ pub fn make_available(&mut self, var: Var) {
+ if self.position[var.index()].is_none() {
+ let position = self.heap.len();
+ self.position[var.index()] = Some(position);
+ self.heap.push(var);
+ self.sift_up(position);
+ }
+ }
+
+ /// Move a variable closer to the root until the heap property is satisfied.
+ fn sift_up(&mut self, mut pos: usize) {
+ let var = self.heap[pos];
+ loop {
+ if pos == 0 {
+ return;
+ }
+ let parent_pos = (pos - 1) / 2;
+ let parent_var = self.heap[parent_pos];
+ if self.activity[parent_var.index()] >= self.activity[var.index()] {
+ return;
+ }
+ self.position[var.index()] = Some(parent_pos);
+ self.heap[parent_pos] = var;
+ self.position[parent_var.index()] = Some(pos);
+ self.heap[pos] = parent_var;
+ pos = parent_pos;
+ }
+ }
+
+ /// Move a variable away from the root until the heap property is satisfied.
+ fn sift_down(&mut self, mut pos: usize) {
+ let var = self.heap[pos];
+ loop {
+ let mut largest_pos = pos;
+ let mut largest_var = var;
+
+ let left_pos = pos * 2 + 1;
+ if left_pos < self.heap.len() {
+ let left_var = self.heap[left_pos];
+
+ if self.activity[largest_var.index()] < self.activity[left_var.index()] {
+ largest_pos = left_pos;
+ largest_var = left_var;
+ }
+ }
+
+ let right_pos = pos * 2 + 2;
+ if right_pos < self.heap.len() {
+ let right_var = self.heap[right_pos];
+
+ if self.activity[largest_var.index()] < self.activity[right_var.index()] {
+ largest_pos = right_pos;
+ largest_var = right_var;
+ }
+ }
+
+ if largest_pos == pos {
+ return;
+ }
+
+ self.position[var.index()] = Some(largest_pos);
+ self.heap[largest_pos] = var;
+ self.position[largest_var.index()] = Some(pos);
+ self.heap[pos] = largest_var;
+ pos = largest_pos;
+ }
+ }
+}
+
+impl Iterator for Vsids {
+ type Item = Var;
+
+ fn next(&mut self) -> Option<Var> {
+ if self.heap.is_empty() {
+ None
+ } else {
+ let var = self.heap.swap_remove(0);
+ if !self.heap.is_empty() {
+ let top_var = self.heap[0];
+ self.position[top_var.index()] = Some(0);
+ self.sift_down(0);
+ }
+ self.position[var.index()] = None;
+ Some(var)
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use varisat_formula::var;
+
+ #[test]
+ fn rescale_bump() {
+ let mut vsids = Vsids::default();
+ vsids.set_var_count(4);
+ vsids.set_decay(1.0 / 8.0);
+
+ for _ in 0..4 {
+ vsids.next();
+ }
+
+ for i in 0..4 {
+ for _ in 0..i {
+ vsids.bump(Var::from_index(i));
+ }
+ }
+
+ for _ in 0..41 {
+ vsids.decay();
+ }
+
+ for _ in 0..30 {
+ vsids.bump(var!(4));
+ }
+
+ // Decay is a power of two so these values are exact
+ #[allow(clippy::float_cmp)]
+ {
+ assert_eq!(vsids.activity[0].0, 0.0);
+ assert_eq!(vsids.activity[2].0, vsids.activity[1].0 * 2.0);
+ assert!(vsids.activity[3] > vsids.activity[2]);
+ }
+ }
+
+ #[test]
+ fn rescale_decay() {
+ let mut vsids = Vsids::default();
+ vsids.set_var_count(4);
+ vsids.set_decay(1.0 / 8.0);
+
+ for _ in 0..4 {
+ vsids.next();
+ }
+
+ for i in 0..4 {
+ for _ in 0..i {
+ vsids.bump(Var::from_index(i));
+ }
+ }
+
+ for _ in 0..60 {
+ vsids.decay();
+ }
+
+ // Decay is a power of two so these values are exact
+ #[allow(clippy::float_cmp)]
+ {
+ assert_eq!(vsids.activity[0].0, 0.0);
+ assert_eq!(vsids.activity[2].0, vsids.activity[1].0 * 2.0);
+ assert_eq!(vsids.activity[3].0, vsids.activity[1].0 * 3.0);
+ }
+ }
+
+ #[test]
+ fn heap_sorts() {
+ let mut vsids = Vsids::default();
+ vsids.set_var_count(8);
+
+ for _ in 0..8 {
+ vsids.next();
+ }
+
+ for i in 0..8 {
+ for _ in 0..i {
+ vsids.bump(Var::from_index(i));
+ }
+ }
+
+ for i in 0..8 {
+ vsids.make_available(Var::from_index((i * 5) % 8));
+ }
+
+ for i in (0..8).rev() {
+ assert_eq!(vsids.next(), Some(Var::from_index(i)));
+ }
+ assert_eq!(vsids.next(), None);
+ }
+
+ #[test]
+ fn heap_bump() {
+ let mut vsids = Vsids::default();
+ vsids.set_var_count(8);
+ vsids.set_decay(1.0 / 8.0);
+
+ for _ in 0..8 {
+ vsids.next();
+ }
+
+ for i in 0..8 {
+ for _ in 0..i {
+ vsids.bump(Var::from_index(i));
+ }
+ }
+
+ for i in 0..8 {
+ vsids.make_available(Var::from_index((i * 5) % 8));
+ }
+
+ for i in (0..4).rev() {
+ assert_eq!(vsids.next(), Some(Var::from_index(i + 4)));
+ }
+
+ vsids.decay();
+ vsids.decay();
+
+ for i in 0..8 {
+ for _ in 0..(8 - i) {
+ vsids.bump(Var::from_index(i));
+ }
+ }
+
+ for i in 0..4 {
+ assert_eq!(vsids.next(), Some(Var::from_index(i)));
+ }
+
+ assert_eq!(vsids.next(), None);
+ }
+}
diff --git a/vendor/varisat/src/glue.rs b/vendor/varisat/src/glue.rs
new file mode 100644
index 000000000..2036cdd99
--- /dev/null
+++ b/vendor/varisat/src/glue.rs
@@ -0,0 +1,36 @@
+//! Compute glue levels of clauses.
+//!
+//! The glue level of a propagating clause is the number of distinct decision levels of the clause's
+//! variables. This is also called the literal block distance (LBD). For each clause the smallest
+//! glue level observed is used as an indicator of how useful that clause is.
+
+use partial_ref::{partial, PartialRef};
+
+use varisat_formula::Lit;
+
+use crate::context::{parts::*, Context};
+
+/// Compute the glue level of a clause.
+pub fn compute_glue(mut ctx: partial!(Context, mut TmpFlagsP, ImplGraphP), lits: &[Lit]) -> usize {
+ let (tmp_flags, ctx) = ctx.split_part_mut(TmpFlagsP);
+ let impl_graph = ctx.part(ImplGraphP);
+ let flags = &mut tmp_flags.flags;
+
+ let mut glue = 0;
+
+ for &lit in lits {
+ let level = impl_graph.level(lit.var());
+ let flag = &mut flags[level];
+ if !*flag {
+ *flag = true;
+ glue += 1
+ }
+ }
+
+ for &lit in lits {
+ let level = impl_graph.level(lit.var());
+ flags[level] = false;
+ }
+
+ glue
+}
diff --git a/vendor/varisat/src/lib.rs b/vendor/varisat/src/lib.rs
new file mode 100644
index 000000000..340b57b6f
--- /dev/null
+++ b/vendor/varisat/src/lib.rs
@@ -0,0 +1,46 @@
+//! Varisat is a [CDCL][cdcl] based SAT solver written in rust. Given a boolean formula in
+//! [conjunctive normal form][cnf], it either finds a variable assignment that makes the formula
+//! true or finds a proof that this is impossible.
+//!
+//! In addition to this API documentation, Varisat comes with a [user manual].
+//!
+//! [cdcl]: https://en.wikipedia.org/wiki/Conflict-Driven_Clause_Learning
+//! [cnf]: https://en.wikipedia.org/wiki/Conjunctive_normal_form
+//! [user manual]: https://jix.github.io/varisat/manual/0.2.1/
+
+pub mod config;
+pub mod solver;
+
+mod analyze_conflict;
+mod assumptions;
+mod binary;
+mod cdcl;
+mod clause;
+mod context;
+mod decision;
+mod glue;
+mod load;
+mod model;
+mod proof;
+mod prop;
+mod schedule;
+mod state;
+mod tmp;
+mod unit_simplify;
+mod variables;
+
+pub use solver::{ProofFormat, Solver};
+pub use varisat_formula::{cnf, lit, CnfFormula, ExtendFormula, Lit, Var};
+
+pub mod dimacs {
+ //! DIMCAS CNF parser and writer.
+ pub use varisat_dimacs::*;
+}
+
+pub mod checker {
+ //! Proof checker for Varisat proofs.
+ pub use varisat_checker::{
+ CheckedProofStep, Checker, CheckerData, CheckerError, ProofProcessor,
+ ProofTranscriptProcessor, ProofTranscriptStep,
+ };
+}
diff --git a/vendor/varisat/src/load.rs b/vendor/varisat/src/load.rs
new file mode 100644
index 000000000..43415dbfb
--- /dev/null
+++ b/vendor/varisat/src/load.rs
@@ -0,0 +1,259 @@
+//! Loading a formula into the solver.
+use vec_mut_scan::VecMutScan;
+
+use partial_ref::{partial, PartialRef};
+
+use varisat_formula::Lit;
+use varisat_internal_proof::{DeleteClauseProof, ProofStep};
+
+use crate::{
+ clause::{db, ClauseHeader, Tier},
+ context::{parts::*, Context},
+ proof,
+ prop::{assignment, full_restart, Reason},
+ state::SatState,
+ unit_simplify::resurrect_unit,
+ variables,
+};
+
+/// Adds a clause to the current formula.
+///
+/// The input uses user variable names.
+///
+/// Removes duplicated literals, ignores tautological clauses (eg. x v -x v y), handles empty
+/// clauses and dispatches among unit, binary and long clauses.
+pub fn load_clause<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut AnalyzeConflictP,
+ mut AssignmentP,
+ mut AssumptionsP,
+ mut BinaryClausesP,
+ mut ClauseAllocP,
+ mut ClauseDbP,
+ mut ImplGraphP,
+ mut ProofP<'a>,
+ mut SolverStateP,
+ mut TmpDataP,
+ mut TmpFlagsP,
+ mut TrailP,
+ mut VariablesP,
+ mut VsidsP,
+ mut WatchlistsP,
+ ),
+ user_lits: &[Lit],
+) {
+ match ctx.part(SolverStateP).sat_state {
+ SatState::Unsat => return,
+ SatState::Sat => {
+ ctx.part_mut(SolverStateP).sat_state = SatState::Unknown;
+ }
+ _ => {}
+ }
+
+ ctx.part_mut(SolverStateP).formula_is_empty = false;
+
+ // Restart the search when the user adds new clauses.
+ full_restart(ctx.borrow());
+
+ // Convert the clause from user to solver literals.
+ let (tmp_data, mut ctx_variables) = ctx.split_part_mut(TmpDataP);
+ variables::solver_from_user_lits(ctx_variables.borrow(), &mut tmp_data.lits, user_lits, true);
+
+ let (tmp_data, mut ctx) = ctx.split_part_mut(TmpDataP);
+
+ let lits = &mut tmp_data.lits;
+ let false_lits = &mut tmp_data.lits_2;
+
+ lits.sort_unstable();
+ lits.dedup();
+
+ proof::add_clause(ctx.borrow(), &lits);
+
+ // Detect tautological clauses
+ let mut last = None;
+
+ for &lit in lits.iter() {
+ if last == Some(!lit) {
+ return;
+ }
+ last = Some(lit);
+ }
+
+ // If we're not a unit clause the contained variables are not isolated anymore.
+ if lits.len() > 1 {
+ for &lit in lits.iter() {
+ ctx.part_mut(VariablesP)
+ .var_data_solver_mut(lit.var())
+ .isolated = false;
+ }
+ }
+
+ // Remove satisfied clauses and handle false literals.
+ //
+ // Proof generation expects us to start with the actual input clauses. If we would remove false
+ // literals we would have to generate proof steps for that. This would result in derived clauses
+ // being added during loading. If we're running proof processors on the fly, they'd see those
+ // derived clauses interspersed with the input clauses.
+ //
+ // We don't want to require each proof processor to handle dervied clause additions during
+ // loading of the initial formula. Thus we need to handle clauses with false literals here.
+ false_lits.clear();
+
+ let mut lits_scan = VecMutScan::new(lits);
+
+ let mut clause_is_true = false;
+
+ // We move unassigned literals to the beginning to make sure we're going to watch unassigned
+ // literals.
+ while let Some(lit) = lits_scan.next() {
+ match ctx.part(AssignmentP).lit_value(*lit) {
+ Some(true) => {
+ clause_is_true = true;
+ break;
+ }
+ Some(false) => {
+ false_lits.push(lit.remove());
+ }
+ None => (),
+ }
+ }
+
+ drop(lits_scan);
+
+ let will_conflict = lits.is_empty();
+
+ // We resurrect any removed false literals to ensure propagation by this new clause. This is
+ // also required to eventually simplify this clause.
+ for &lit in false_lits.iter() {
+ resurrect_unit(ctx.borrow(), !lit);
+ }
+
+ lits.extend_from_slice(&false_lits);
+
+ if clause_is_true {
+ if lits.len() > 1 {
+ proof::add_step(
+ ctx.borrow(),
+ true,
+ &ProofStep::DeleteClause {
+ clause: lits,
+ proof: DeleteClauseProof::Satisfied,
+ },
+ );
+ }
+ return;
+ }
+
+ match lits[..] {
+ [] => ctx.part_mut(SolverStateP).sat_state = SatState::Unsat,
+ [lit] => {
+ if will_conflict {
+ ctx.part_mut(SolverStateP).sat_state = SatState::Unsat
+ } else {
+ assignment::enqueue_assignment(ctx.borrow(), lit, Reason::Unit)
+ }
+ }
+ [lit_0, lit_1] => {
+ ctx.part_mut(BinaryClausesP)
+ .add_binary_clause([lit_0, lit_1]);
+ }
+ _ => {
+ let mut header = ClauseHeader::new();
+ header.set_tier(Tier::Irred);
+
+ db::add_clause(ctx.borrow(), header, lits);
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use partial_ref::IntoPartialRefMut;
+
+ use varisat_formula::lits;
+
+ use crate::clause::Tier;
+
+ #[test]
+ fn unsat_on_empty_clause() {
+ let mut ctx = Context::default();
+ let mut ctx = ctx.into_partial_ref_mut();
+
+ load_clause(ctx.borrow(), &[]);
+
+ assert_eq!(ctx.part(SolverStateP).sat_state, SatState::Unsat);
+ }
+
+ #[test]
+ fn unit_clauses() {
+ let mut ctx = Context::default();
+ let mut ctx = ctx.into_partial_ref_mut();
+
+ load_clause(ctx.borrow(), &lits![1]);
+
+ assert_eq!(ctx.part(TrailP).trail().len(), 1);
+
+ load_clause(ctx.borrow(), &lits![3, -3]);
+
+ assert_eq!(ctx.part(TrailP).trail().len(), 1);
+
+ load_clause(ctx.borrow(), &lits![-2]);
+
+ assert_eq!(ctx.part(TrailP).trail().len(), 2);
+
+ load_clause(ctx.borrow(), &lits![1, 1]);
+
+ assert_eq!(ctx.part(TrailP).trail().len(), 2);
+
+ assert_eq!(ctx.part(SolverStateP).sat_state, SatState::Unknown);
+
+ load_clause(ctx.borrow(), &lits![2]);
+
+ assert_eq!(ctx.part(TrailP).trail().len(), 2);
+
+ assert_eq!(ctx.part(SolverStateP).sat_state, SatState::Unsat);
+ }
+
+ #[test]
+ fn binary_clauses() {
+ let mut ctx = Context::default();
+ let mut ctx = ctx.into_partial_ref_mut();
+
+ load_clause(ctx.borrow(), &lits![1, 2]);
+
+ assert_eq!(ctx.part(BinaryClausesP).count(), 1);
+
+ load_clause(ctx.borrow(), &lits![-1, 3, 3]);
+
+ assert_eq!(ctx.part(BinaryClausesP).count(), 2);
+
+ load_clause(ctx.borrow(), &lits![4, -4]);
+
+ assert_eq!(ctx.part(BinaryClausesP).count(), 2);
+
+ assert_eq!(ctx.part(SolverStateP).sat_state, SatState::Unknown);
+ }
+
+ #[test]
+ fn long_clauses() {
+ let mut ctx = Context::default();
+ let mut ctx = ctx.into_partial_ref_mut();
+
+ load_clause(ctx.borrow(), &lits![1, 2, 3]);
+
+ assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Irred), 1);
+
+ load_clause(ctx.borrow(), &lits![-2, 3, 3, 4]);
+
+ assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Irred), 2);
+
+ load_clause(ctx.borrow(), &lits![4, -5, 5, 2]);
+
+ assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Irred), 2);
+
+ assert_eq!(ctx.part(SolverStateP).sat_state, SatState::Unknown);
+ }
+}
diff --git a/vendor/varisat/src/model.rs b/vendor/varisat/src/model.rs
new file mode 100644
index 000000000..d4f269f6f
--- /dev/null
+++ b/vendor/varisat/src/model.rs
@@ -0,0 +1,87 @@
+//! Global model reconstruction
+
+use partial_ref::{partial, PartialRef};
+
+use varisat_formula::Lit;
+use varisat_internal_proof::ProofStep;
+
+use crate::{
+ context::{parts::*, Context},
+ proof,
+ state::SatState,
+};
+
+/// Global model reconstruction
+#[derive(Default)]
+pub struct Model {
+ /// Assignment of the global model.
+ ///
+ /// Whenever the solver state is SAT this must be up to date.
+ assignment: Vec<Option<bool>>,
+}
+
+impl Model {
+ /// Assignment of the global model.
+ ///
+ /// Only valid if the solver state is SAT.
+ pub fn assignment(&self) -> &[Option<bool>] {
+ &self.assignment
+ }
+
+ /// Whether a given global literal is true in the model assignment.
+ ///
+ /// Only valid if the solver state is SAT.
+ pub fn lit_is_true(&self, global_lit: Lit) -> bool {
+ self.assignment[global_lit.index()] == Some(global_lit.is_positive())
+ }
+}
+
+pub fn reconstruct_global_model<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut ModelP,
+ mut ProofP<'a>,
+ mut SolverStateP,
+ mut TmpDataP,
+ AssignmentP,
+ VariablesP
+ ),
+) {
+ let (variables, mut ctx) = ctx.split_part(VariablesP);
+ let (model, mut ctx) = ctx.split_part_mut(ModelP);
+ let (tmp, mut ctx) = ctx.split_part_mut(TmpDataP);
+
+ let models_in_proof = ctx.part(ProofP).models_in_proof();
+
+ tmp.lits.clear();
+
+ model.assignment.clear();
+ model.assignment.resize(variables.global_watermark(), None);
+
+ for global_var in variables.global_var_iter() {
+ let value = if let Some(solver_var) = variables.solver_from_global().get(global_var) {
+ ctx.part(AssignmentP).var_value(solver_var)
+ } else {
+ Some(variables.var_data_global(global_var).unit.unwrap_or(false))
+ };
+
+ model.assignment[global_var.index()] = value;
+
+ if models_in_proof {
+ if let Some(value) = value {
+ tmp.lits.push(global_var.lit(value))
+ }
+ }
+ }
+
+ if models_in_proof {
+ proof::add_step(
+ ctx.borrow(),
+ false,
+ &ProofStep::Model {
+ assignment: &tmp.lits,
+ },
+ );
+ }
+ ctx.part_mut(SolverStateP).sat_state = SatState::Sat;
+}
diff --git a/vendor/varisat/src/proof.rs b/vendor/varisat/src/proof.rs
new file mode 100644
index 000000000..336f7fbc2
--- /dev/null
+++ b/vendor/varisat/src/proof.rs
@@ -0,0 +1,434 @@
+//! Proof generation.
+
+use std::io::{self, sink, BufWriter, Write};
+
+use partial_ref::{partial, PartialRef};
+
+use varisat_checker::{internal::SelfChecker, Checker, CheckerError, ProofProcessor};
+use varisat_formula::{Lit, Var};
+use varisat_internal_proof::{ClauseHash, ProofStep};
+
+use crate::{
+ context::{parts::*, Context},
+ solver::SolverError,
+};
+
+mod drat;
+mod map_step;
+
+/// Proof formats that can be generated during solving.
+#[derive(Copy, Clone, Eq, PartialEq, Debug)]
+pub enum ProofFormat {
+ Varisat,
+ Drat,
+ BinaryDrat,
+}
+
+/// Number of added or removed clauses.
+pub fn clause_count_delta(step: &ProofStep) -> isize {
+ match step {
+ ProofStep::AddClause { clause } | ProofStep::AtClause { clause, .. } => {
+ if clause.len() > 1 {
+ 1
+ } else {
+ 0
+ }
+ }
+ ProofStep::DeleteClause { clause, .. } => {
+ if clause.len() > 1 {
+ -1
+ } else {
+ 0
+ }
+ }
+ ProofStep::SolverVarName { .. }
+ | ProofStep::UserVarName { .. }
+ | ProofStep::DeleteVar { .. }
+ | ProofStep::ChangeSamplingMode { .. }
+ | ProofStep::UnitClauses { .. }
+ | ProofStep::ChangeHashBits { .. }
+ | ProofStep::Model { .. }
+ | ProofStep::Assumptions { .. }
+ | ProofStep::FailedAssumptions { .. }
+ | ProofStep::End => 0,
+ }
+}
+
+/// Proof generation.
+pub struct Proof<'a> {
+ format: Option<ProofFormat>,
+ target: BufWriter<Box<dyn Write + 'a>>,
+ checker: Option<Checker<'a>>,
+ map_step: map_step::MapStep,
+ /// How many bits are used for storing clause hashes.
+ hash_bits: u32,
+ /// How many clauses are currently in the db.
+ ///
+ /// This is used to pick a good number of hash_bits
+ clause_count: isize,
+}
+
+impl<'a> Default for Proof<'a> {
+ fn default() -> Proof<'a> {
+ Proof {
+ format: None,
+ target: BufWriter::new(Box::new(sink())),
+ checker: None,
+ map_step: Default::default(),
+ hash_bits: 64,
+ clause_count: 0,
+ }
+ }
+}
+
+impl<'a> Proof<'a> {
+ /// Start writing proof steps to the given target with the given format.
+ pub fn write_proof(&mut self, target: impl Write + 'a, format: ProofFormat) {
+ self.format = Some(format);
+ self.target = BufWriter::new(Box::new(target))
+ }
+
+ /// Begin checking proof steps.
+ pub fn begin_checking(&mut self) {
+ if self.checker.is_none() {
+ self.checker = Some(Checker::new())
+ }
+ }
+
+ /// Add a [`ProofProcessor`].
+ ///
+ /// See also [`Checker::add_processor`].
+ pub fn add_processor(&mut self, processor: &'a mut dyn ProofProcessor) {
+ self.begin_checking();
+ self.checker.as_mut().unwrap().add_processor(processor);
+ }
+
+ /// Whether proof generation is active.
+ pub fn is_active(&self) -> bool {
+ self.checker.is_some() || self.format.is_some()
+ }
+
+ /// Are we emitting or checking our native format.
+ pub fn native_format(&self) -> bool {
+ self.checker.is_some()
+ || match self.format {
+ Some(ProofFormat::Varisat) => true,
+ _ => false,
+ }
+ }
+
+ /// Whether clause hashes are required for steps that support them.
+ pub fn clause_hashes_required(&self) -> bool {
+ self.native_format()
+ }
+
+ /// Whether found models are included in the proof.
+ pub fn models_in_proof(&self) -> bool {
+ self.native_format()
+ }
+}
+
+/// Call when adding an external clause.
+///
+/// This is required for on the fly checking and checking of incremental solving.
+///
+/// *Note:* this function expects the clause to use solver var names.
+pub fn add_clause<'a>(
+ mut ctx: partial!(Context<'a>, mut ProofP<'a>, mut SolverStateP, VariablesP),
+ clause: &[Lit],
+) {
+ if ctx.part(SolverStateP).solver_invoked {
+ add_step(ctx.borrow(), true, &ProofStep::AddClause { clause })
+ } else {
+ let (variables, mut ctx) = ctx.split_part(VariablesP);
+ let proof = ctx.part_mut(ProofP);
+ if let Some(checker) = &mut proof.checker {
+ let clause = proof.map_step.map_lits(clause, |var| {
+ variables
+ .global_from_solver()
+ .get(var)
+ .expect("no existing global var for solver var")
+ });
+
+ let result = checker.add_clause(clause);
+ handle_self_check_result(ctx.borrow(), result);
+ }
+ if clause.len() > 1 {
+ ctx.part_mut(ProofP).clause_count += 1;
+ }
+ }
+}
+
+/// Add a step to the proof.
+///
+/// Ignored when proof generation is disabled.
+///
+/// When `solver_vars` is true, all variables and literals will be converted from solver to global
+/// vars. Otherwise the proof step needs to use global vars.
+pub fn add_step<'a, 's>(
+ mut ctx: partial!(Context<'a>, mut ProofP<'a>, mut SolverStateP, VariablesP),
+ solver_vars: bool,
+ step: &'s ProofStep<'s>,
+) {
+ if ctx.part(SolverStateP).solver_error.is_some() {
+ return;
+ }
+
+ if ctx.part(SolverStateP).solver_error.is_some() {
+ return;
+ }
+
+ let (variables, mut ctx) = ctx.split_part(VariablesP);
+ let proof = ctx.part_mut(ProofP);
+
+ let map_vars = |var| {
+ if solver_vars {
+ variables
+ .global_from_solver()
+ .get(var)
+ .expect("no existing global var for solver var")
+ } else {
+ var
+ }
+ };
+
+ let io_result = match proof.format {
+ Some(ProofFormat::Varisat) => write_varisat_step(ctx.borrow(), map_vars, step),
+ Some(ProofFormat::Drat) => {
+ let step = proof.map_step.map(step, map_vars, |hash| hash);
+ drat::write_step(&mut proof.target, &step)
+ }
+ Some(ProofFormat::BinaryDrat) => {
+ let step = proof.map_step.map(step, map_vars, |hash| hash);
+ drat::write_binary_step(&mut proof.target, &step)
+ }
+ None => Ok(()),
+ };
+
+ if io_result.is_ok() {
+ let proof = ctx.part_mut(ProofP);
+ if let Some(checker) = &mut proof.checker {
+ let step = proof.map_step.map(step, map_vars, |hash| hash);
+ let result = checker.self_check_step(step);
+ handle_self_check_result(ctx.borrow(), result);
+ }
+ }
+
+ handle_io_errors(ctx.borrow(), io_result);
+}
+
+/// Write a step using our native format
+fn write_varisat_step<'a, 's>(
+ mut ctx: partial!(Context<'a>, mut ProofP<'a>, SolverStateP),
+ map_vars: impl Fn(Var) -> Var,
+ step: &'s ProofStep<'s>,
+) -> io::Result<()> {
+ let (proof, ctx) = ctx.split_part_mut(ProofP);
+
+ proof.clause_count += clause_count_delta(step);
+
+ let mut rehash = false;
+ // Should we change the hash size?
+ while proof.clause_count > (1 << (proof.hash_bits / 2)) {
+ proof.hash_bits += 2;
+ rehash = true;
+ }
+ if ctx.part(SolverStateP).solver_invoked {
+ while proof.hash_bits > 6 && proof.clause_count * 4 < (1 << (proof.hash_bits / 2)) {
+ proof.hash_bits -= 2;
+ rehash = true;
+ }
+ }
+
+ if rehash {
+ varisat_internal_proof::binary_format::write_step(
+ &mut proof.target,
+ &ProofStep::ChangeHashBits {
+ bits: proof.hash_bits,
+ },
+ )?;
+ }
+
+ let shift_bits = ClauseHash::max_value().count_ones() - proof.hash_bits;
+
+ let map_hash = |hash| hash >> shift_bits;
+ let step = proof.map_step.map(step, map_vars, map_hash);
+
+ if proof.format == Some(ProofFormat::Varisat) {
+ varisat_internal_proof::binary_format::write_step(&mut proof.target, &step)?;
+ }
+
+ Ok(())
+}
+
+/// Flush buffers used for writing proof steps.
+pub fn flush_proof<'a>(mut ctx: partial!(Context<'a>, mut ProofP<'a>, mut SolverStateP)) {
+ // We need to explicitly flush to handle IO errors.
+ let result = ctx.part_mut(ProofP).target.flush();
+ handle_io_errors(ctx.borrow(), result);
+}
+
+/// Stop writing proof steps.
+pub fn close_proof<'a>(
+ mut ctx: partial!(Context<'a>, mut ProofP<'a>, mut SolverStateP, VariablesP),
+) {
+ add_step(ctx.borrow(), true, &ProofStep::End);
+ flush_proof(ctx.borrow());
+ ctx.part_mut(ProofP).format = None;
+ ctx.part_mut(ProofP).target = BufWriter::new(Box::new(sink()));
+}
+
+/// Called before solve returns to flush buffers and to trigger delayed unit conflict processing.
+///
+/// We flush buffers before solve returns to ensure that we can pass IO errors to the user.
+pub fn solve_finished<'a>(mut ctx: partial!(Context<'a>, mut ProofP<'a>, mut SolverStateP)) {
+ flush_proof(ctx.borrow());
+ if let Some(checker) = &mut ctx.part_mut(ProofP).checker {
+ let result = checker.self_check_delayed_steps();
+ handle_self_check_result(ctx.borrow(), result);
+ }
+}
+
+/// Handle results of on the fly checking.
+///
+/// Panics when the proof is incorrect and aborts solving when a proof processor produced an error.
+fn handle_self_check_result<'a>(
+ mut ctx: partial!(Context<'a>, mut ProofP<'a>, mut SolverStateP),
+ result: Result<(), CheckerError>,
+) {
+ match result {
+ Err(CheckerError::ProofProcessorError { cause }) => {
+ ctx.part_mut(SolverStateP).solver_error =
+ Some(SolverError::ProofProcessorError { cause });
+ *ctx.part_mut(ProofP) = Proof::default();
+ }
+ Err(err) => {
+ log::error!("{}", err);
+ if let CheckerError::CheckFailed { debug_step, .. } = err {
+ if !debug_step.is_empty() {
+ log::error!("failed step was {}", debug_step)
+ }
+ }
+ panic!("self check failure");
+ }
+ Ok(()) => (),
+ }
+}
+
+/// Handle io errors during proof writing.
+fn handle_io_errors<'a>(
+ mut ctx: partial!(Context<'a>, mut ProofP<'a>, mut SolverStateP),
+ result: io::Result<()>,
+) {
+ if let Err(io_err) = result {
+ ctx.part_mut(SolverStateP).solver_error = Some(SolverError::ProofIoError { cause: io_err });
+ *ctx.part_mut(ProofP) = Proof::default();
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use proptest::prelude::*;
+
+ use std::{fs::File, process::Command};
+
+ use tempfile::TempDir;
+
+ use varisat_dimacs::write_dimacs;
+ use varisat_formula::{test::sgen_unsat_formula, CnfFormula};
+
+ use crate::solver::Solver;
+
+ enum Checker {
+ DratTrim,
+ Rate,
+ }
+
+ fn test_drat(checker: Checker, formula: CnfFormula, binary: bool) -> Result<(), TestCaseError> {
+ let mut solver = Solver::new();
+
+ let tmp = TempDir::new()?;
+
+ let drat_proof = tmp.path().join("proof.drat");
+ let cnf_file = tmp.path().join("input.cnf");
+
+ write_dimacs(&mut File::create(&cnf_file)?, &formula)?;
+
+ let format = if binary {
+ ProofFormat::BinaryDrat
+ } else {
+ ProofFormat::Drat
+ };
+
+ solver.write_proof(File::create(&drat_proof)?, format);
+
+ solver.add_formula(&formula);
+
+ prop_assert_eq!(solver.solve().ok(), Some(false));
+
+ solver
+ .close_proof()
+ .map_err(|e| TestCaseError::fail(e.to_string()))?;
+
+ let output = match checker {
+ Checker::DratTrim => {
+ if binary {
+ Command::new("drat-trim")
+ .arg(&cnf_file)
+ .arg(&drat_proof)
+ .arg("-i")
+ .output()?
+ } else {
+ Command::new("drat-trim")
+ .arg(&cnf_file)
+ .arg(&drat_proof)
+ .output()?
+ }
+ }
+ Checker::Rate => Command::new("rate")
+ .arg(&cnf_file)
+ .arg(&drat_proof)
+ .output()?,
+ };
+
+ prop_assert!(std::str::from_utf8(&output.stdout)?.contains("s VERIFIED"));
+
+ Ok(())
+ }
+
+ proptest! {
+ #[cfg_attr(not(test_drat_trim), ignore)]
+ #[test]
+ fn sgen_unsat_drat_trim(
+ formula in sgen_unsat_formula(1..7usize),
+ ) {
+ test_drat(Checker::DratTrim, formula, false)?;
+ }
+
+ #[cfg_attr(not(test_drat_trim), ignore)]
+ #[test]
+ fn sgen_unsat_binary_drat_trim(
+ formula in sgen_unsat_formula(1..7usize),
+ ) {
+ test_drat(Checker::DratTrim, formula, true)?;
+ }
+
+ #[cfg_attr(not(test_rate), ignore)]
+ #[test]
+ fn sgen_unsat_rate(
+ formula in sgen_unsat_formula(1..7usize),
+ ) {
+ test_drat(Checker::Rate, formula, false)?;
+ }
+
+ #[cfg_attr(not(test_rate), ignore)]
+ #[test]
+ fn sgen_unsat_binary_rate(
+ formula in sgen_unsat_formula(1..7usize),
+ ) {
+ test_drat(Checker::Rate, formula, true)?;
+ }
+ }
+}
diff --git a/vendor/varisat/src/proof/drat.rs b/vendor/varisat/src/proof/drat.rs
new file mode 100644
index 000000000..76382fab3
--- /dev/null
+++ b/vendor/varisat/src/proof/drat.rs
@@ -0,0 +1,85 @@
+use std::io::{self, Write};
+
+use varisat_formula::Lit;
+use varisat_internal_proof::ProofStep;
+
+/// Prepares a proof step for DRAT writing
+fn drat_step(
+ step: &ProofStep,
+ mut emit_drat_step: impl FnMut(bool, &[Lit]) -> io::Result<()>,
+) -> io::Result<()> {
+ match step {
+ ProofStep::AtClause { clause, .. } => {
+ emit_drat_step(true, &clause)?;
+ }
+ ProofStep::UnitClauses { units } => {
+ for &(unit, _hash) in units.iter() {
+ emit_drat_step(true, &[unit])?;
+ }
+ }
+ ProofStep::DeleteClause { clause, .. } => {
+ emit_drat_step(false, &clause[..])?;
+ }
+ ProofStep::SolverVarName { .. }
+ | ProofStep::UserVarName { .. }
+ | ProofStep::DeleteVar { .. }
+ | ProofStep::ChangeSamplingMode { .. }
+ | ProofStep::ChangeHashBits { .. }
+ | ProofStep::Model { .. }
+ | ProofStep::End => (),
+ ProofStep::AddClause { .. } => {
+ // TODO allow error handling here?
+ panic!("incremental clause additions not supported by DRAT proofs");
+ }
+ ProofStep::Assumptions { .. } | ProofStep::FailedAssumptions { .. } => {
+ // TODO allow error handling here?
+ panic!("assumptions not supported by DRAT proofs");
+ }
+ }
+
+ Ok(())
+}
+
+/// Writes a proof step in DRAT format
+pub fn write_step<'s>(target: &mut impl Write, step: &'s ProofStep<'s>) -> io::Result<()> {
+ drat_step(step, |add, clause| {
+ if !add {
+ target.write_all(b"d ")?;
+ }
+ write_literals(target, &clause[..])?;
+ Ok(())
+ })
+}
+
+/// Writes a proof step in binary DRAT format
+pub fn write_binary_step<'s>(target: &mut impl Write, step: &'s ProofStep<'s>) -> io::Result<()> {
+ drat_step(step, |add, clause| {
+ if add {
+ target.write_all(b"a")?;
+ } else {
+ target.write_all(b"d")?;
+ }
+ write_binary_literals(target, &clause[..])?;
+ Ok(())
+ })
+}
+
+/// Writes the literals of a clause for a step in a DRAT proof.
+fn write_literals(target: &mut impl Write, literals: &[Lit]) -> io::Result<()> {
+ for &lit in literals {
+ itoa::write(&mut *target, lit.to_dimacs())?;
+ target.write_all(b" ")?;
+ }
+ target.write_all(b"0\n")?;
+ Ok(())
+}
+
+/// Writes the literals of a clause for a step in a binary DRAT proof.
+fn write_binary_literals(target: &mut impl Write, literals: &[Lit]) -> io::Result<()> {
+ for &lit in literals {
+ let drat_code = lit.code() as u64 + 2;
+ leb128::write::unsigned(target, drat_code)?;
+ }
+ target.write_all(&[0])?;
+ Ok(())
+}
diff --git a/vendor/varisat/src/proof/map_step.rs b/vendor/varisat/src/proof/map_step.rs
new file mode 100644
index 000000000..0c08e6510
--- /dev/null
+++ b/vendor/varisat/src/proof/map_step.rs
@@ -0,0 +1,128 @@
+//! Maps literals and hashes of clause steps between the solver and the checker.
+
+use varisat_formula::{Lit, Var};
+
+use super::{ClauseHash, ProofStep};
+
+/// Maps literals and hashes of clause steps between the solver and the checker.
+#[derive(Default)]
+pub struct MapStep {
+ lit_buf: Vec<Lit>,
+ hash_buf: Vec<ClauseHash>,
+ unit_buf: Vec<(Lit, ClauseHash)>,
+}
+
+impl MapStep {
+ pub fn map_lits(&mut self, lits: &[Lit], map_var: impl Fn(Var) -> Var) -> &[Lit] {
+ let map_var_ref = &map_var;
+ self.lit_buf.clear();
+ self.lit_buf
+ .extend(lits.iter().map(|lit| lit.map_var(map_var_ref)));
+ &self.lit_buf
+ }
+
+ pub fn map<'s, 'a, 'b>(
+ &'a mut self,
+ step: &ProofStep<'b>,
+ map_var: impl Fn(Var) -> Var,
+ map_hash: impl Fn(ClauseHash) -> ClauseHash,
+ ) -> ProofStep<'s>
+ where
+ 'a: 's,
+ 'b: 's,
+ {
+ let map_var_ref = &map_var;
+ let map_lit = |lit: Lit| lit.map_var(map_var_ref);
+ match *step {
+ ProofStep::AddClause { clause } => {
+ self.lit_buf.clear();
+ self.lit_buf.extend(clause.iter().cloned().map(map_lit));
+ ProofStep::AddClause {
+ clause: &self.lit_buf,
+ }
+ }
+
+ ProofStep::AtClause {
+ redundant,
+ clause,
+ propagation_hashes,
+ } => {
+ self.lit_buf.clear();
+ self.lit_buf.extend(clause.iter().cloned().map(map_lit));
+ self.hash_buf.clear();
+ self.hash_buf
+ .extend(propagation_hashes.iter().cloned().map(map_hash));
+ ProofStep::AtClause {
+ redundant,
+ clause: &self.lit_buf,
+ propagation_hashes: &self.hash_buf,
+ }
+ }
+
+ ProofStep::UnitClauses { units } => {
+ self.unit_buf.clear();
+ self.unit_buf.extend(
+ units
+ .iter()
+ .map(|&(lit, hash)| (map_lit(lit), map_hash(hash))),
+ );
+ ProofStep::UnitClauses {
+ units: &self.unit_buf,
+ }
+ }
+
+ ProofStep::DeleteClause { clause, proof } => {
+ self.lit_buf.clear();
+ self.lit_buf.extend(clause.iter().cloned().map(map_lit));
+ ProofStep::DeleteClause {
+ clause: &self.lit_buf,
+ proof,
+ }
+ }
+
+ ProofStep::Model { assignment } => {
+ self.lit_buf.clear();
+ self.lit_buf.extend(assignment.iter().cloned().map(map_lit));
+ ProofStep::Model {
+ assignment: &self.lit_buf,
+ }
+ }
+
+ ProofStep::Assumptions { assumptions } => {
+ self.lit_buf.clear();
+ self.lit_buf
+ .extend(assumptions.iter().cloned().map(map_lit));
+ ProofStep::Assumptions {
+ assumptions: &self.lit_buf,
+ }
+ }
+
+ ProofStep::FailedAssumptions {
+ failed_core,
+ propagation_hashes,
+ } => {
+ self.lit_buf.clear();
+ self.lit_buf
+ .extend(failed_core.iter().cloned().map(map_lit));
+ self.hash_buf.clear();
+ self.hash_buf
+ .extend(propagation_hashes.iter().cloned().map(map_hash));
+ ProofStep::FailedAssumptions {
+ failed_core: &self.lit_buf,
+ propagation_hashes: &self.hash_buf,
+ }
+ }
+
+ ProofStep::ChangeHashBits { .. } | ProofStep::End => *step,
+
+ ProofStep::SolverVarName { .. }
+ | ProofStep::UserVarName { .. }
+ | ProofStep::DeleteVar { .. }
+ | ProofStep::ChangeSamplingMode { .. } => {
+ // while these steps do contain variables, they are used to update the mapping, so
+ // they shouldn't be mapped themselves.
+ *step
+ }
+ }
+ }
+}
diff --git a/vendor/varisat/src/prop.rs b/vendor/varisat/src/prop.rs
new file mode 100644
index 000000000..517f03be2
--- /dev/null
+++ b/vendor/varisat/src/prop.rs
@@ -0,0 +1,261 @@
+//! Unit propagation.
+use partial_ref::{partial, PartialRef};
+
+use crate::context::{parts::*, Context};
+
+pub mod assignment;
+pub mod binary;
+pub mod graph;
+pub mod long;
+pub mod watch;
+
+pub use assignment::{backtrack, enqueue_assignment, full_restart, restart, Assignment, Trail};
+pub use graph::{Conflict, ImplGraph, ImplNode, Reason};
+pub use watch::{enable_watchlists, Watch, Watchlists};
+
+/// Propagate enqueued assignments.
+///
+/// Returns when all enqueued assignments are propagated, including newly propagated assignemnts, or
+/// if there is a conflict.
+///
+/// On conflict the first propagation that would assign the opposite value to an already assigned
+/// literal is returned.
+pub fn propagate(
+ mut ctx: partial!(
+ Context,
+ mut AssignmentP,
+ mut ClauseAllocP,
+ mut ImplGraphP,
+ mut TrailP,
+ mut WatchlistsP,
+ BinaryClausesP,
+ ClauseDbP,
+ ),
+) -> Result<(), Conflict> {
+ enable_watchlists(ctx.borrow());
+
+ while let Some(lit) = ctx.part_mut(TrailP).pop_queue() {
+ binary::propagate_binary(ctx.borrow(), lit)?;
+ long::propagate_long(ctx.borrow(), lit)?;
+ }
+ Ok(())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use proptest::{prelude::*, *};
+
+ use rand::{distributions::Bernoulli, seq::SliceRandom};
+
+ use partial_ref::IntoPartialRefMut;
+
+ use varisat_formula::{cnf::strategy::*, CnfFormula, Lit};
+
+ use crate::{
+ clause::{db, gc},
+ load::load_clause,
+ state::SatState,
+ };
+
+ /// Generate a random formula and list of implied literals.
+ pub fn prop_formula(
+ vars: impl Strategy<Value = usize>,
+ extra_vars: impl Strategy<Value = usize>,
+ extra_clauses: impl Strategy<Value = usize>,
+ density: impl Strategy<Value = f64>,
+ ) -> impl Strategy<Value = (Vec<Lit>, CnfFormula)> {
+ (vars, extra_vars, extra_clauses, density).prop_flat_map(
+ |(vars, extra_vars, extra_clauses, density)| {
+ let polarity = collection::vec(bool::ANY, vars + extra_vars);
+
+ let dist = Bernoulli::new(density).unwrap();
+
+ let lits = polarity
+ .prop_map(|polarity| {
+ polarity
+ .into_iter()
+ .enumerate()
+ .map(|(index, polarity)| Lit::from_index(index, polarity))
+ .collect::<Vec<_>>()
+ })
+ .prop_shuffle();
+
+ lits.prop_perturb(move |mut lits, mut rng| {
+ let assigned_lits = &lits[..vars];
+
+ let mut clauses: Vec<Vec<Lit>> = vec![];
+ for (i, &lit) in assigned_lits.iter().enumerate() {
+ // Build a clause that implies lit
+ let mut clause = vec![lit];
+ for &reason_lit in assigned_lits[..i].iter() {
+ if rng.sample(dist) {
+ clause.push(!reason_lit);
+ }
+ }
+ clause.shuffle(&mut rng);
+ clauses.push(clause);
+ }
+
+ for _ in 0..extra_clauses {
+ // Build a clause that is satisfied
+ let &true_lit = assigned_lits.choose(&mut rng).unwrap();
+ let mut clause = vec![true_lit];
+ for &other_lit in lits.iter() {
+ if other_lit != true_lit && rng.sample(dist) {
+ clause.push(other_lit ^ rng.gen::<bool>());
+ }
+ }
+ clause.shuffle(&mut rng);
+ clauses.push(clause);
+ }
+
+ clauses.shuffle(&mut rng);
+
+ // Only return implied lits
+ lits.drain(vars..);
+
+ (lits, CnfFormula::from(clauses))
+ })
+ },
+ )
+ }
+
+ proptest! {
+ #[test]
+ fn propagation_no_conflict(
+ (mut lits, formula) in prop_formula(
+ 2..30usize,
+ 0..10usize,
+ 0..20usize,
+ 0.1..0.9
+ ),
+ ) {
+ let mut ctx = Context::default();
+ let mut ctx = ctx.into_partial_ref_mut();
+
+ for clause in formula.iter() {
+ load_clause(ctx.borrow(), clause);
+ }
+
+ prop_assert_eq!(ctx.part(SolverStateP).sat_state, SatState::Unknown);
+
+ let prop_result = propagate(ctx.borrow());
+
+ prop_assert_eq!(prop_result, Ok(()));
+
+ lits.sort();
+
+ let mut prop_lits = ctx.part(TrailP).trail().to_owned();
+
+ // Remap vars
+ for lit in prop_lits.iter_mut() {
+ *lit = lit.map_var(|solver_var| {
+ ctx.part(VariablesP).existing_user_from_solver(solver_var)
+ });
+ }
+
+ prop_lits.sort();
+
+ prop_assert_eq!(prop_lits, lits);
+ }
+
+ #[test]
+ fn propagation_conflict(
+ (lits, formula) in prop_formula(
+ 2..30usize,
+ 0..10usize,
+ 0..20usize,
+ 0.1..0.9
+ ),
+ conflict_size in any::<sample::Index>(),
+ ) {
+ let mut ctx = Context::default();
+ let mut ctx = ctx.into_partial_ref_mut();
+
+ // We add the conflict clause first to make sure that it isn't simplified during loading
+
+ let conflict_size = conflict_size.index(lits.len() - 1) + 2;
+
+ let conflict_clause: Vec<_> = lits[..conflict_size].iter().map(|&lit| !lit).collect();
+
+ load_clause(ctx.borrow(), &conflict_clause);
+
+ for clause in formula.iter() {
+ load_clause(ctx.borrow(), clause);
+ }
+
+ prop_assert_eq!(ctx.part(SolverStateP).sat_state, SatState::Unknown);
+
+ let prop_result = propagate(ctx.borrow());
+
+ prop_assert!(prop_result.is_err());
+
+ let conflict = prop_result.unwrap_err();
+
+ let conflict_lits = conflict.lits(&ctx.borrow()).to_owned();
+
+ for &lit in conflict_lits.iter() {
+ prop_assert!(ctx.part(AssignmentP).lit_is_false(lit));
+ }
+ }
+
+ #[test]
+ fn propagation_no_conflict_after_gc(
+ tmp_formula in cnf_formula(3..30usize, 30..100, 3..30),
+ (mut lits, formula) in prop_formula(
+ 2..30usize,
+ 0..10usize,
+ 0..20usize,
+ 0.1..0.9
+ ),
+ ) {
+ let mut ctx = Context::default();
+ let mut ctx = ctx.into_partial_ref_mut();
+
+ for clause in tmp_formula.iter() {
+ // Only add long clauses here
+ let mut lits = clause.to_owned();
+ lits.sort();
+ lits.dedup();
+ if lits.len() >= 3 {
+ load_clause(ctx.borrow(), clause);
+ }
+ }
+
+ let tmp_crefs: Vec<_> = db::clauses_iter(&ctx.borrow()).collect();
+
+ for clause in formula.iter() {
+ load_clause(ctx.borrow(), clause);
+ }
+
+ for cref in tmp_crefs {
+ db::delete_clause(ctx.borrow(), cref);
+ }
+
+ gc::collect_garbage(ctx.borrow());
+
+ prop_assert_eq!(ctx.part(SolverStateP).sat_state, SatState::Unknown);
+
+ let prop_result = propagate(ctx.borrow());
+
+ prop_assert_eq!(prop_result, Ok(()));
+
+ lits.sort();
+
+ let mut prop_lits = ctx.part(TrailP).trail().to_owned();
+
+ // Remap vars
+ for lit in prop_lits.iter_mut() {
+ *lit = lit.map_var(|solver_var| {
+ ctx.part(VariablesP).existing_user_from_solver(solver_var)
+ });
+ }
+
+ prop_lits.sort();
+
+ prop_assert_eq!(prop_lits, lits);
+ }
+ }
+}
diff --git a/vendor/varisat/src/prop/assignment.rs b/vendor/varisat/src/prop/assignment.rs
new file mode 100644
index 000000000..a87f60485
--- /dev/null
+++ b/vendor/varisat/src/prop/assignment.rs
@@ -0,0 +1,237 @@
+//! Partial assignment and backtracking.
+use partial_ref::{partial, PartialRef};
+
+use varisat_formula::{lit::LitIdx, Lit, Var};
+
+use crate::{
+ context::{parts::*, Context},
+ decision::make_available,
+};
+
+use super::Reason;
+
+/// Current partial assignment.
+#[derive(Default)]
+pub struct Assignment {
+ assignment: Vec<Option<bool>>,
+ last_value: Vec<bool>,
+}
+
+/// This compares two `Option<bool>` values as bytes. Workaround for bad code generation.
+pub fn fast_option_eq(a: Option<bool>, b: Option<bool>) -> bool {
+ unsafe { std::mem::transmute::<_, u8>(a) == std::mem::transmute::<_, u8>(b) }
+}
+
+impl Assignment {
+ /// Update structures for a new variable count.
+ pub fn set_var_count(&mut self, count: usize) {
+ self.assignment.resize(count, None);
+ self.last_value.resize(count, false);
+ }
+
+ /// Current partial assignment as slice.
+ pub fn assignment(&self) -> &[Option<bool>] {
+ &self.assignment
+ }
+
+ /// Value assigned to a variable.
+ pub fn var_value(&self, var: Var) -> Option<bool> {
+ self.assignment[var.index()]
+ }
+
+ /// Value last assigned to a variable.
+ ///
+ /// If the variable is currently assigned this returns the previously assigned value. If the
+ /// variable was never assigned this returns false.
+ pub fn last_var_value(&self, var: Var) -> bool {
+ self.last_value[var.index()]
+ }
+
+ /// Value assigned to a literal.
+ pub fn lit_value(&self, lit: Lit) -> Option<bool> {
+ self.assignment[lit.index()].map(|b| b ^ lit.is_negative())
+ }
+
+ pub fn lit_is_true(&self, lit: Lit) -> bool {
+ fast_option_eq(self.assignment[lit.index()], Some(lit.is_positive()))
+ }
+
+ pub fn lit_is_false(&self, lit: Lit) -> bool {
+ fast_option_eq(self.assignment[lit.index()], Some(lit.is_negative()))
+ }
+
+ pub fn lit_is_unk(&self, lit: Lit) -> bool {
+ fast_option_eq(self.assignment[lit.index()], None)
+ }
+
+ pub fn assign_lit(&mut self, lit: Lit) {
+ self.assignment[lit.index()] = lit.is_positive().into()
+ }
+
+ pub fn unassign_var(&mut self, var: Var) {
+ self.assignment[var.index()] = None;
+ }
+
+ /// Set the assigned/unassigned value of a variable.
+ pub fn set_var(&mut self, var: Var, assignment: Option<bool>) {
+ self.assignment[var.index()] = assignment;
+ }
+}
+
+/// Decision and propagation history.
+#[derive(Default)]
+pub struct Trail {
+ /// Stack of all propagated and all enqueued assignments
+ trail: Vec<Lit>,
+ /// Next assignment in trail to propagate
+ queue_head_pos: usize,
+ /// Decision levels as trail indices.
+ decisions: Vec<LitIdx>,
+ /// Number of unit clauses removed from the trail.
+ units_removed: usize,
+}
+
+impl Trail {
+ /// Return the next assigned literal to propagate.
+ pub fn queue_head(&self) -> Option<Lit> {
+ self.trail.get(self.queue_head_pos).cloned()
+ }
+
+ /// Return the next assigned literal to propagate and remove it from the queue.
+ pub fn pop_queue(&mut self) -> Option<Lit> {
+ let head = self.queue_head();
+ if head.is_some() {
+ self.queue_head_pos += 1;
+ }
+ head
+ }
+
+ /// Re-enqueue all assigned literals.
+ pub fn reset_queue(&mut self) {
+ self.queue_head_pos = 0;
+ }
+
+ /// Assigned literals in assignment order.
+ pub fn trail(&self) -> &[Lit] {
+ &self.trail
+ }
+
+ /// Clear the trail.
+ ///
+ /// This simply removes all entries without performing any backtracking. Can only be called with
+ /// no active decisions.
+ pub fn clear(&mut self) {
+ assert!(self.decisions.is_empty());
+ self.units_removed += self.trail.len();
+ self.trail.clear();
+ self.queue_head_pos = 0;
+ }
+
+ /// Start a new decision level.
+ ///
+ /// Does not enqueue the decision itself.
+ pub fn new_decision_level(&mut self) {
+ self.decisions.push(self.trail.len() as LitIdx)
+ }
+
+ /// Current decision level.
+ pub fn current_level(&self) -> usize {
+ self.decisions.len()
+ }
+
+ /// The number of assignments at level 0.
+ pub fn top_level_assignment_count(&self) -> usize {
+ self.decisions
+ .get(0)
+ .map(|&len| len as usize)
+ .unwrap_or(self.trail.len())
+ + self.units_removed
+ }
+
+ /// Whether all assignments are processed.
+ pub fn fully_propagated(&self) -> bool {
+ self.queue_head_pos == self.trail.len()
+ }
+}
+
+/// Enqueues the assignment of true to a literal.
+///
+/// This updates the assignment and trail, but does not perform any propagation. The literal has to
+/// be unassigned when calling this.
+pub fn enqueue_assignment(
+ mut ctx: partial!(Context, mut AssignmentP, mut ImplGraphP, mut TrailP),
+ lit: Lit,
+ reason: Reason,
+) {
+ let assignment = ctx.part_mut(AssignmentP);
+ debug_assert!(assignment.lit_value(lit) == None);
+
+ assignment.assign_lit(lit);
+
+ let (trail, mut ctx) = ctx.split_part_mut(TrailP);
+
+ trail.trail.push(lit);
+
+ let node = &mut ctx.part_mut(ImplGraphP).nodes[lit.index()];
+ node.reason = reason;
+ node.level = trail.decisions.len() as LitIdx;
+ node.depth = trail.trail.len() as LitIdx;
+}
+
+/// Undo all assignments in decision levels deeper than the given level.
+pub fn backtrack(
+ mut ctx: partial!(Context, mut AssignmentP, mut TrailP, mut VsidsP),
+ level: usize,
+) {
+ let (assignment, mut ctx) = ctx.split_part_mut(AssignmentP);
+ let (trail, mut ctx) = ctx.split_part_mut(TrailP);
+
+ // level can actually be greater than the current level. This happens when restart is called
+ // after a conflict backtracked into the assumptions, but before any assumption was reenqueued.
+ // TODO should we update assumption_levels on backtracking instead of allowing this here?
+ if level >= trail.decisions.len() {
+ return;
+ }
+
+ let new_trail_len = trail.decisions[level] as usize;
+
+ trail.queue_head_pos = new_trail_len;
+ trail.decisions.truncate(level);
+
+ let trail_end = &trail.trail[new_trail_len..];
+ for &lit in trail_end {
+ make_available(ctx.borrow(), lit.var());
+ let var_assignment = &mut assignment.assignment[lit.index()];
+ assignment.last_value[lit.index()] = *var_assignment == Some(true);
+ *var_assignment = None;
+ }
+ trail.trail.truncate(new_trail_len);
+}
+
+/// Undo all decisions and assumptions.
+pub fn full_restart(
+ mut ctx: partial!(
+ Context,
+ mut AssignmentP,
+ mut AssumptionsP,
+ mut TrailP,
+ mut VsidsP,
+ ),
+) {
+ ctx.part_mut(AssumptionsP).full_restart();
+ backtrack(ctx.borrow(), 0);
+}
+
+/// Undo all decisions.
+pub fn restart(
+ mut ctx: partial!(
+ Context,
+ mut AssignmentP,
+ mut TrailP,
+ mut VsidsP,
+ AssumptionsP
+ ),
+) {
+ let level = ctx.part(AssumptionsP).assumption_levels();
+ backtrack(ctx.borrow(), level);
+}
diff --git a/vendor/varisat/src/prop/binary.rs b/vendor/varisat/src/prop/binary.rs
new file mode 100644
index 000000000..d89347c8a
--- /dev/null
+++ b/vendor/varisat/src/prop/binary.rs
@@ -0,0 +1,36 @@
+//! Propagation of binary clauses.
+use partial_ref::{partial, PartialRef};
+
+use varisat_formula::Lit;
+
+use crate::context::{parts::*, Context};
+
+use super::{enqueue_assignment, Conflict, Reason};
+
+/// Propagate all literals implied by the given literal via binary clauses.
+///
+/// On conflict return the binary clause propgating the conflicting assignment.
+pub fn propagate_binary(
+ mut ctx: partial!(
+ Context,
+ mut AssignmentP,
+ mut ImplGraphP,
+ mut TrailP,
+ BinaryClausesP,
+ ),
+ lit: Lit,
+) -> Result<(), Conflict> {
+ let (binary_clauses, mut ctx) = ctx.split_part(BinaryClausesP);
+
+ for &implied in binary_clauses.implied(lit) {
+ let assignment = ctx.part(AssignmentP);
+
+ if assignment.lit_is_false(implied) {
+ return Err(Conflict::Binary([implied, !lit]));
+ } else if !assignment.lit_is_true(implied) {
+ enqueue_assignment(ctx.borrow(), implied, Reason::Binary([!lit]));
+ }
+ }
+
+ Ok(())
+}
diff --git a/vendor/varisat/src/prop/graph.rs b/vendor/varisat/src/prop/graph.rs
new file mode 100644
index 000000000..3a0e3a42d
--- /dev/null
+++ b/vendor/varisat/src/prop/graph.rs
@@ -0,0 +1,137 @@
+//! The implication graph.
+
+use partial_ref::{partial, PartialRef};
+
+use varisat_formula::{lit::LitIdx, Lit, Var};
+
+use crate::{
+ clause::ClauseRef,
+ context::{parts::*, Context},
+};
+
+/// Assignments that caused a propagation.
+#[derive(Copy, Clone, Eq, PartialEq, Debug)]
+pub enum Reason {
+ Unit,
+ Binary([Lit; 1]),
+ Long(ClauseRef),
+}
+
+impl Reason {
+ /// The literals that caused the propagation.
+ pub fn lits<'out, 'a, 'b>(&'a self, ctx: &'b partial!('b Context, ClauseAllocP)) -> &'out [Lit]
+ where
+ 'a: 'out,
+ 'b: 'out,
+ {
+ match self {
+ Reason::Unit => &[],
+ Reason::Binary(lit) => lit,
+ // The propagated literal is always kept at position 0
+ Reason::Long(cref) => &ctx.part(ClauseAllocP).clause(*cref).lits()[1..],
+ }
+ }
+
+ /// True if a unit clause or assumption and not a propagation.
+ pub fn is_unit(&self) -> bool {
+ match self {
+ Reason::Unit => true,
+ _ => false,
+ }
+ }
+}
+
+/// Propagation that resulted in a conflict.
+#[derive(Copy, Clone, Eq, PartialEq, Debug)]
+pub enum Conflict {
+ Binary([Lit; 2]),
+ Long(ClauseRef),
+}
+
+impl Conflict {
+ /// The literals that caused the conflict.
+ pub fn lits<'out, 'a, 'b>(&'a self, ctx: &'b partial!('b Context, ClauseAllocP)) -> &'out [Lit]
+ where
+ 'a: 'out,
+ 'b: 'out,
+ {
+ match self {
+ Conflict::Binary(lits) => lits,
+ Conflict::Long(cref) => ctx.part(ClauseAllocP).clause(*cref).lits(),
+ }
+ }
+}
+
+/// Node and incoming edges of the implication graph.
+#[derive(Copy, Clone)]
+pub struct ImplNode {
+ pub reason: Reason,
+ pub level: LitIdx,
+ /// Position in trail when assigned, `LitIdx::max_value()` is used as sentinel for removed
+ /// units.
+ pub depth: LitIdx,
+}
+
+/// The implication graph.
+///
+/// This is a DAG having all assigned variables as nodes. It has unit clauses, assumptions and
+/// decisions as sources. For each propagated assignment it has incomming edges from the literals
+/// whose assignment caused the propagation to happen.
+#[derive(Default)]
+pub struct ImplGraph {
+ /// Contains only valid data for indices of assigned variables.
+ pub nodes: Vec<ImplNode>,
+}
+
+impl ImplGraph {
+ /// Update structures for a new variable count.
+ pub fn set_var_count(&mut self, count: usize) {
+ self.nodes.resize(
+ count,
+ ImplNode {
+ reason: Reason::Unit,
+ level: 0,
+ depth: 0,
+ },
+ );
+ }
+
+ /// Get the reason for an assigned variable.
+ ///
+ /// Returns stale data if the variable isn't assigned.
+ pub fn reason(&self, var: Var) -> &Reason {
+ &self.nodes[var.index()].reason
+ }
+
+ /// Get the decision level of an assigned variable.
+ ///
+ /// Returns stale data if the variable isn't assigned.
+ pub fn level(&self, var: Var) -> usize {
+ self.nodes[var.index()].level as usize
+ }
+
+ /// Get the trail depth of an assigned variable.
+ ///
+ /// Returns stale data if the variable isn't assigned.
+ pub fn depth(&self, var: Var) -> usize {
+ self.nodes[var.index()].depth as usize
+ }
+
+ /// Updates the reason for an assigned variable.
+ ///
+ /// Make sure the reason vars are in front of the assigned variable in the trail.
+ pub fn update_reason(&mut self, var: Var, reason: Reason) {
+ self.nodes[var.index()].reason = reason
+ }
+
+ /// Updates the reason and depth of a unit clause removed from the trail.
+ pub fn update_removed_unit(&mut self, var: Var) {
+ let node = &mut self.nodes[var.index()];
+ node.reason = Reason::Unit;
+ node.depth = LitIdx::max_value();
+ }
+
+ pub fn is_removed_unit(&self, var: Var) -> bool {
+ self.nodes[var.index()].depth == LitIdx::max_value()
+ }
+}
diff --git a/vendor/varisat/src/prop/long.rs b/vendor/varisat/src/prop/long.rs
new file mode 100644
index 000000000..8fde47ec7
--- /dev/null
+++ b/vendor/varisat/src/prop/long.rs
@@ -0,0 +1,164 @@
+//! Propagation of long clauses.
+use partial_ref::{partial, PartialRef};
+
+use varisat_formula::Lit;
+
+use crate::context::{parts::*, Context};
+
+use super::{assignment::fast_option_eq, enqueue_assignment, Conflict, Reason, Watch};
+
+/// Propagate all literals implied by long clauses watched by the given literal.
+///
+/// On conflict return the clause propgating the conflicting assignment.
+///
+/// See [`prop::watch`](crate::prop::watch) for the invariants that this has to uphold.
+pub fn propagate_long(
+ mut ctx: partial!(
+ Context,
+ mut AssignmentP,
+ mut ImplGraphP,
+ mut TrailP,
+ mut WatchlistsP,
+ mut ClauseAllocP,
+ ),
+ lit: Lit,
+) -> Result<(), Conflict> {
+ // The code below is heavily optimized and replaces a much nicer but sadly slower version.
+ // Nevertheless it still performs full bound checks. Therefore this function is safe to call
+ // even when some other code violated invariants of for example the clause db.
+ unsafe {
+ let (watchlists, mut ctx) = ctx.split_part_mut(WatchlistsP);
+ let (alloc, mut ctx) = ctx.split_part_mut(ClauseAllocP);
+
+ let watch_begin;
+ let watch_end;
+ {
+ let watch_list = &mut watchlists.watched_by_mut(lit);
+ watch_begin = watch_list.as_mut_ptr();
+ watch_end = watch_begin.add(watch_list.len());
+ }
+ let mut watch_ptr = watch_begin;
+
+ let false_lit = !lit;
+
+ let mut watch_write = watch_ptr;
+
+ let assignment_limit = ctx.part(AssignmentP).assignment().len();
+ let assignment_ptr = ctx.part(AssignmentP).assignment().as_ptr();
+
+ let is_true = |lit: Lit| {
+ assert!(lit.index() < assignment_limit);
+ fast_option_eq(*assignment_ptr.add(lit.index()), Some(lit.is_positive()))
+ };
+
+ let is_false = |lit: Lit| {
+ assert!(lit.index() < assignment_limit);
+ fast_option_eq(*assignment_ptr.add(lit.index()), Some(lit.is_negative()))
+ };
+
+ 'watchers: while watch_ptr != watch_end {
+ let watch = *watch_ptr;
+ watch_ptr = watch_ptr.add(1);
+
+ // If the blocking literal (which is part of the watched clause) is already true, the
+ // watched clause is satisfied and we don't even have to look at it.
+ if is_true(watch.blocking) {
+ *watch_write = watch;
+ watch_write = watch_write.add(1);
+ continue;
+ }
+
+ let cref = watch.cref;
+
+ // Make sure we can access at least 3 lits
+ alloc.check_bounds(cref, 3);
+
+ let clause_ptr = alloc.lits_ptr_mut_unchecked(cref);
+ let &mut header = alloc.header_unchecked_mut(cref);
+
+ // First we ensure that the literal we're currently propagating is at index 1. This
+ // prepares the literal order for further propagations, as the propagating literal has
+ // to be at index 0. Doing this here also avoids a similar check later should the clause
+ // be satisfied by a non-watched literal, as we can just move it to index 1.
+ let mut first = *clause_ptr.add(0);
+ if first == false_lit {
+ let c1 = *clause_ptr.add(1);
+ first = c1;
+ *clause_ptr.add(0) = c1;
+ *clause_ptr.add(1) = false_lit;
+ }
+
+ // We create a new watch with the other watched literal as blocking literal. This will
+ // either replace the currently processed watch or be added to another literals watch
+ // list.
+ let new_watch = Watch {
+ cref,
+ blocking: first,
+ };
+
+ // If the other watched literal (now the first) isn't the blocking literal, check
+ // whether that one is true. If so nothing else needs to be done.
+ if first != watch.blocking && is_true(first) {
+ *watch_write = new_watch;
+ watch_write = watch_write.add(1);
+ continue;
+ }
+
+ // At this point we try to find a non-false unwatched literal to replace our current
+ // literal as the watched literal.
+
+ let clause_len = header.len();
+ let mut lit_ptr = clause_ptr.add(2);
+ let lit_end = clause_ptr.add(clause_len);
+
+ // Make sure we can access all clause literals.
+ alloc.check_bounds(cref, clause_len);
+
+ while lit_ptr != lit_end {
+ let rest_lit = *lit_ptr;
+ if !is_false(rest_lit) {
+ // We found a non-false literal and make it a watched literal by reordering the
+ // literals and adding the watch to the corresponding watchlist.
+ *clause_ptr.offset(1) = rest_lit;
+ *lit_ptr = false_lit;
+
+ // We're currently using unsafe to modify the watchlist of lit, so make extra
+ // sure we're not aliasing.
+ assert_ne!(!rest_lit, lit);
+ watchlists.add_watch(!rest_lit, new_watch);
+ continue 'watchers;
+ }
+ lit_ptr = lit_ptr.add(1);
+ }
+
+ // We didn't find a non-false unwatched literal, so either we're propagating or we have
+ // a conflict.
+ *watch_write = new_watch;
+ watch_write = watch_write.add(1);
+
+ // If the other watched literal is false we have a conflict.
+ if is_false(first) {
+ // We move all unprocessed watches and resize the currentl watchlist.
+ while watch_ptr != watch_end {
+ *watch_write = *watch_ptr;
+ watch_write = watch_write.add(1);
+ watch_ptr = watch_ptr.add(1);
+ }
+ let out_size = ((watch_write as usize) - (watch_begin as usize))
+ / std::mem::size_of::<Watch>();
+
+ watchlists.watched_by_mut(lit).truncate(out_size as usize);
+
+ return Err(Conflict::Long(cref));
+ }
+
+ // Otherwise we enqueue a new propagation.
+ enqueue_assignment(ctx.borrow(), first, Reason::Long(cref));
+ }
+
+ let out_size =
+ ((watch_write as usize) - (watch_begin as usize)) / std::mem::size_of::<Watch>();
+ watchlists.watched_by_mut(lit).truncate(out_size as usize);
+ }
+ Ok(())
+}
diff --git a/vendor/varisat/src/prop/watch.rs b/vendor/varisat/src/prop/watch.rs
new file mode 100644
index 000000000..095ac86a9
--- /dev/null
+++ b/vendor/varisat/src/prop/watch.rs
@@ -0,0 +1,135 @@
+//! Watchlists to detect clauses that became unit.
+//!
+//! Each (long) clause has always two watches pointing to it. The watches are kept in the watchlists
+//! of two different literals of the clause. Whenever the watches are moved to different literals
+//! the litereals of the clause are permuted so the watched literals are in position 0 and 1.
+//!
+//! When a clause is not unit under the current assignment, the watched literals point at two
+//! non-false literals. When a clause is unit and thus propagating, the true literal is watched and
+//! in position 0, the other watched literal is the one with the largest decision level and kept in
+//! position 1. When a clause becomes satisfied before becoming unit the watches can be kept as they
+//! were.
+//!
+//! When a literal is assigned false that invariant can be invalidated. This can be detected by
+//! scanning the watches of the assigned literal. When the assignment is processed the watches are
+//! moved to restore that invariant. Unless there is a conflict, i.e. a clause with no non-false
+//! literals, this can always be done. This also finds all clauses that became unit. The new unit
+//! clauses are exactly those clauses where no two non-false literals can be found.
+//!
+//! There is no need to update watchlists on backtracking, as unassigning variables cannot
+//! invalidate the invariant.
+//!
+//! See [Section 4.5.1 of the "Handbook of Satisfiability"][handbook-ch4] for more details and
+//! references.
+//!
+//! As a further optimization we use blocking literals. This means that each watch stores a literal
+//! of the clause that is different from the watched literal. It can be the other watched literal or
+//! any unwatched literal. When that literal is true, the clause is already satisfied, meaning that
+//! no watches need to be updated. This can be detected by just looking at the watch, avoiding
+//! access of the clause database. This variant was introduced by [Niklas Sörensson and Niklas Eén
+//! in "MINISAT 2.1 and MINISAT++1.0 — SAT Race 2008 Editions"][minisat-2.1].
+//!
+//! [handbook-ch4]: https://www.satassociation.org/articles/FAIA185-0131.pdf
+//! [minisat-2.1]: https://www.cril.univ-artois.fr/SAT09/solvers/booklet.pdf
+
+use partial_ref::{partial, PartialRef};
+
+use varisat_formula::Lit;
+
+use crate::{
+ clause::{db, ClauseRef},
+ context::{parts::*, Context},
+};
+
+/// A watch on a long clause.
+#[derive(Copy, Clone)]
+pub struct Watch {
+ /// Clause which has the referring lit in position 0 or 1.
+ pub cref: ClauseRef,
+ /// A lit of the clause, different from the referring lit.
+ pub blocking: Lit,
+}
+
+/// Watchlists to detect clauses that became unit.
+pub struct Watchlists {
+ /// Contains only valid data for indices of assigned variables.
+ watches: Vec<Vec<Watch>>,
+ /// Whether watchlists are present
+ enabled: bool,
+}
+
+impl Default for Watchlists {
+ fn default() -> Watchlists {
+ Watchlists {
+ watches: vec![],
+ enabled: true,
+ }
+ }
+}
+
+impl Watchlists {
+ /// Update structures for a new variable count.
+ pub fn set_var_count(&mut self, count: usize) {
+ self.watches.resize(count * 2, vec![]);
+ }
+
+ /// Start watching a clause.
+ ///
+ /// `lits` have to be the first two literals of the given clause.
+ pub fn watch_clause(&mut self, cref: ClauseRef, lits: [Lit; 2]) {
+ if !self.enabled {
+ return;
+ }
+
+ for i in 0..2 {
+ let watch = Watch {
+ cref,
+ blocking: lits[i ^ 1],
+ };
+ self.add_watch(!lits[i], watch);
+ }
+ }
+
+ /// Return watches for a given literal.
+ pub fn watched_by_mut(&mut self, lit: Lit) -> &mut Vec<Watch> {
+ &mut self.watches[lit.code()]
+ }
+
+ /// Make a literal watch a clause.
+ pub fn add_watch(&mut self, lit: Lit, watch: Watch) {
+ self.watches[lit.code()].push(watch)
+ }
+
+ /// Are watchlists enabled.
+ pub fn enabled(&self) -> bool {
+ self.enabled
+ }
+
+ /// Clear and disable watchlists.
+ ///
+ /// Actual clearing of the watchlists is done on re-enabling of the watchlists.
+ pub fn disable(&mut self) {
+ self.enabled = false;
+ }
+}
+
+/// Enable and rebuild watchlists.
+pub fn enable_watchlists(mut ctx: partial!(Context, mut WatchlistsP, ClauseAllocP, ClauseDbP)) {
+ let (watchlists, mut ctx) = ctx.split_part_mut(WatchlistsP);
+ if watchlists.enabled {
+ return;
+ }
+
+ for watchlist in watchlists.watches.iter_mut() {
+ watchlist.clear();
+ }
+
+ watchlists.enabled = true;
+
+ let (alloc, mut ctx) = ctx.split_part(ClauseAllocP);
+
+ for cref in db::clauses_iter(&ctx.borrow()) {
+ let lits = alloc.clause(cref).lits();
+ watchlists.watch_clause(cref, [lits[0], lits[1]]);
+ }
+}
diff --git a/vendor/varisat/src/schedule.rs b/vendor/varisat/src/schedule.rs
new file mode 100644
index 000000000..1e1b04e57
--- /dev/null
+++ b/vendor/varisat/src/schedule.rs
@@ -0,0 +1,101 @@
+//! Scheduling of processing and solving steps.
+//!
+//! The current implementation is temporary and will be replaced with something more flexible.
+use log::info;
+
+use partial_ref::{partial, PartialRef};
+
+use crate::{
+ cdcl::conflict_step,
+ clause::{
+ collect_garbage,
+ reduce::{reduce_locals, reduce_mids},
+ Tier,
+ },
+ context::{parts::*, Context},
+ prop::restart,
+ state::SatState,
+};
+
+mod luby;
+
+use luby::LubySequence;
+
+/// Scheduling of processing and solving steps.
+#[derive(Default)]
+pub struct Schedule {
+ conflicts: u64,
+ next_restart: u64,
+ restarts: u64,
+ luby: LubySequence,
+}
+
+/// Perform one step of the schedule.
+pub fn schedule_step<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut AnalyzeConflictP,
+ mut AssignmentP,
+ mut AssumptionsP,
+ mut BinaryClausesP,
+ mut ClauseActivityP,
+ mut ClauseAllocP,
+ mut ClauseDbP,
+ mut ImplGraphP,
+ mut ModelP,
+ mut ProofP<'a>,
+ mut ScheduleP,
+ mut SolverStateP,
+ mut TmpDataP,
+ mut TmpFlagsP,
+ mut TrailP,
+ mut VariablesP,
+ mut VsidsP,
+ mut WatchlistsP,
+ SolverConfigP,
+ ),
+) -> bool {
+ let (schedule, mut ctx) = ctx.split_part_mut(ScheduleP);
+ let (config, mut ctx) = ctx.split_part(SolverConfigP);
+
+ if ctx.part(SolverStateP).sat_state != SatState::Unknown
+ || ctx.part(SolverStateP).solver_error.is_some()
+ {
+ false
+ } else {
+ if schedule.conflicts > 0 && schedule.conflicts % 5000 == 0 {
+ let db = ctx.part(ClauseDbP);
+ let units = ctx.part(TrailP).top_level_assignment_count();
+ info!(
+ "confl: {}k rest: {} vars: {} bin: {} irred: {} core: {} mid: {} local: {}",
+ schedule.conflicts / 1000,
+ schedule.restarts,
+ ctx.part(AssignmentP).assignment().len() - units,
+ ctx.part(BinaryClausesP).count(),
+ db.count_by_tier(Tier::Irred),
+ db.count_by_tier(Tier::Core),
+ db.count_by_tier(Tier::Mid),
+ db.count_by_tier(Tier::Local)
+ );
+ }
+
+ if schedule.next_restart == schedule.conflicts {
+ restart(ctx.borrow());
+ schedule.restarts += 1;
+ schedule.next_restart += config.luby_restart_interval_scale * schedule.luby.advance();
+ }
+
+ if schedule.conflicts % config.reduce_locals_interval == 0 {
+ reduce_locals(ctx.borrow());
+ }
+ if schedule.conflicts % config.reduce_mids_interval == 0 {
+ reduce_mids(ctx.borrow());
+ }
+
+ collect_garbage(ctx.borrow());
+
+ conflict_step(ctx.borrow());
+ schedule.conflicts += 1;
+ true
+ }
+}
diff --git a/vendor/varisat/src/schedule/luby.rs b/vendor/varisat/src/schedule/luby.rs
new file mode 100644
index 000000000..9b44f7646
--- /dev/null
+++ b/vendor/varisat/src/schedule/luby.rs
@@ -0,0 +1,52 @@
+//! The reluctant doubling Luby sequence.
+//!
+//! This sequence is [A182105](https://oeis.org/A182105).
+
+/// Infinite iterator yielding the Luby sequence.
+pub struct LubySequence {
+ u: u64,
+ v: u64,
+}
+
+impl Default for LubySequence {
+ fn default() -> LubySequence {
+ LubySequence { u: 1, v: 1 }
+ }
+}
+
+impl LubySequence {
+ /// Yields the next number of hte Luby sequence.
+ pub fn advance(&mut self) -> u64 {
+ let result = self.v;
+
+ // Method by Knuth 2012
+ if (self.u & self.u.wrapping_neg()) == self.v {
+ self.u += 1;
+ self.v = 1;
+ } else {
+ self.v <<= 1;
+ }
+
+ result
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn luby_sequence() {
+ let mut luby = LubySequence::default();
+ let initial_terms: Vec<_> = std::iter::repeat_with(|| luby.advance()).take(64).collect();
+
+ assert_eq!(
+ initial_terms,
+ vec![
+ 1, 1, 2, 1, 1, 2, 4, 1, 1, 2, 1, 1, 2, 4, 8, 1, 1, 2, 1, 1, 2, 4, 1, 1, 2, 1, 1, 2,
+ 4, 8, 16, 1, 1, 2, 1, 1, 2, 4, 1, 1, 2, 1, 1, 2, 4, 8, 1, 1, 2, 1, 1, 2, 4, 1, 1,
+ 2, 1, 1, 2, 4, 8, 16, 32, 1,
+ ]
+ )
+ }
+}
diff --git a/vendor/varisat/src/solver.rs b/vendor/varisat/src/solver.rs
new file mode 100644
index 000000000..d347fafd2
--- /dev/null
+++ b/vendor/varisat/src/solver.rs
@@ -0,0 +1,512 @@
+//! Boolean satisfiability solver.
+use std::io;
+
+use partial_ref::{IntoPartialRef, IntoPartialRefMut, PartialRef};
+
+use anyhow::Error;
+use thiserror::Error;
+
+use varisat_checker::ProofProcessor;
+use varisat_dimacs::DimacsParser;
+use varisat_formula::{CnfFormula, ExtendFormula, Lit, Var};
+
+use crate::{
+ assumptions::set_assumptions,
+ config::SolverConfigUpdate,
+ context::{config_changed, parts::*, Context},
+ load::load_clause,
+ proof,
+ schedule::schedule_step,
+ state::SatState,
+ variables,
+};
+
+pub use crate::proof::ProofFormat;
+
+/// Possible errors while solving a formula.
+#[derive(Debug, Error)]
+#[non_exhaustive]
+pub enum SolverError {
+ #[error("The solver was interrupted")]
+ Interrupted,
+ #[error("Error in proof processor: {}", cause)]
+ ProofProcessorError {
+ #[source]
+ cause: Error,
+ },
+ #[error("Error writing proof file: {}", cause)]
+ ProofIoError {
+ #[source]
+ cause: io::Error,
+ },
+}
+
+impl SolverError {
+ /// Whether a Solver instance can be used after producing such an error.
+ pub fn is_recoverable(&self) -> bool {
+ match self {
+ SolverError::Interrupted => true,
+ _ => false,
+ }
+ }
+}
+
+/// A boolean satisfiability solver.
+#[derive(Default)]
+pub struct Solver<'a> {
+ ctx: Box<Context<'a>>,
+}
+
+impl<'a> Solver<'a> {
+ /// Create a new solver.
+ pub fn new() -> Solver<'a> {
+ Solver::default()
+ }
+
+ /// Change the solver configuration.
+ pub fn config(&mut self, config_update: &SolverConfigUpdate) -> Result<(), Error> {
+ config_update.apply(&mut self.ctx.solver_config)?;
+ let mut ctx = self.ctx.into_partial_ref_mut();
+ config_changed(ctx.borrow(), config_update);
+ Ok(())
+ }
+
+ /// Add a formula to the solver.
+ pub fn add_formula(&mut self, formula: &CnfFormula) {
+ let mut ctx = self.ctx.into_partial_ref_mut();
+ for clause in formula.iter() {
+ load_clause(ctx.borrow(), clause);
+ }
+ }
+
+ /// Reads and adds a formula in DIMACS CNF format.
+ ///
+ /// Using this avoids creating a temporary [`CnfFormula`].
+ pub fn add_dimacs_cnf(&mut self, input: impl io::Read) -> Result<(), Error> {
+ let parser = DimacsParser::parse_incremental(input, |parser| {
+ self.add_formula(&parser.take_formula());
+ Ok(())
+ })?;
+
+ log::info!(
+ "Parsed formula with {} variables and {} clauses",
+ parser.var_count(),
+ parser.clause_count()
+ );
+
+ Ok(())
+ }
+
+ /// Sets the "witness" sampling mode for a variable.
+ pub fn witness_var(&mut self, var: Var) {
+ // TODO add link to sampling mode section of the manual when written
+ let mut ctx = self.ctx.into_partial_ref_mut();
+ let global = variables::global_from_user(ctx.borrow(), var, false);
+ variables::set_sampling_mode(ctx.borrow(), global, variables::data::SamplingMode::Witness);
+ }
+
+ /// Sets the "sample" sampling mode for a variable.
+ pub fn sample_var(&mut self, var: Var) {
+ // TODO add link to sampling mode section of the manual when written
+ // TODO add warning about constrainig variables that previously were witness variables
+ let mut ctx = self.ctx.into_partial_ref_mut();
+ let global = variables::global_from_user(ctx.borrow(), var, false);
+ variables::set_sampling_mode(ctx.borrow(), global, variables::data::SamplingMode::Sample);
+ }
+
+ /// Hide a variable.
+ ///
+ /// Turns a free variable into an existentially quantified variable. If the passed `Var` is used
+ /// again after this call, it refers to a new variable not the previously hidden variable.
+ pub fn hide_var(&mut self, var: Var) {
+ // TODO add link to sampling mode section of the manual when written
+ let mut ctx = self.ctx.into_partial_ref_mut();
+ let global = variables::global_from_user(ctx.borrow(), var, false);
+ variables::set_sampling_mode(ctx.borrow(), global, variables::data::SamplingMode::Hide);
+ }
+
+ /// Observe solver internal variables.
+ ///
+ /// This turns solver internal variables into witness variables. There is no guarantee that the
+ /// newly visible variables correspond to previously hidden variables.
+ ///
+ /// Returns a list of newly visible variables.
+ pub fn observe_internal_vars(&mut self) -> Vec<Var> {
+ // TODO add link to sampling mode section of the manual when written
+ let mut ctx = self.ctx.into_partial_ref_mut();
+ variables::observe_internal_vars(ctx.borrow())
+ }
+
+ /// Check the satisfiability of the current formula.
+ pub fn solve(&mut self) -> Result<bool, SolverError> {
+ self.ctx.solver_state.solver_invoked = true;
+
+ let mut ctx = self.ctx.into_partial_ref_mut();
+ assert!(
+ !ctx.part_mut(SolverStateP).state_is_invalid,
+ "solve() called after encountering an unrecoverable error"
+ );
+
+ while schedule_step(ctx.borrow()) {}
+
+ proof::solve_finished(ctx.borrow());
+
+ self.check_for_solver_error()?;
+
+ match self.ctx.solver_state.sat_state {
+ SatState::Unknown => Err(SolverError::Interrupted),
+ SatState::Sat => Ok(true),
+ SatState::Unsat | SatState::UnsatUnderAssumptions => Ok(false),
+ }
+ }
+
+ /// Check for asynchronously generated errors.
+ ///
+ /// To avoid threading errors out of deep call stacks, we have a solver_error field in the
+ /// SolverState. This function takes and returns that error if present.
+ fn check_for_solver_error(&mut self) -> Result<(), SolverError> {
+ let mut ctx = self.ctx.into_partial_ref_mut();
+ let error = ctx.part_mut(SolverStateP).solver_error.take();
+
+ if let Some(error) = error {
+ if !error.is_recoverable() {
+ ctx.part_mut(SolverStateP).state_is_invalid = true;
+ }
+ Err(error)
+ } else {
+ Ok(())
+ }
+ }
+
+ /// Assume given literals for future calls to solve.
+ ///
+ /// This replaces the current set of assumed literals.
+ pub fn assume(&mut self, assumptions: &[Lit]) {
+ let mut ctx = self.ctx.into_partial_ref_mut();
+ set_assumptions(ctx.borrow(), assumptions);
+ }
+
+ /// Set of literals that satisfy the formula.
+ pub fn model(&self) -> Option<Vec<Lit>> {
+ let ctx = self.ctx.into_partial_ref();
+ if ctx.part(SolverStateP).sat_state == SatState::Sat {
+ Some(
+ ctx.part(VariablesP)
+ .user_var_iter()
+ .flat_map(|user_var| {
+ let global_var = ctx
+ .part(VariablesP)
+ .global_from_user()
+ .get(user_var)
+ .expect("no existing global var for user var");
+ ctx.part(ModelP).assignment()[global_var.index()]
+ .map(|value| user_var.lit(value))
+ })
+ .collect(),
+ )
+ } else {
+ None
+ }
+ }
+
+ /// Subset of the assumptions that made the formula unsatisfiable.
+ ///
+ /// This is not guaranteed to be minimal and may just return all assumptions every time.
+ pub fn failed_core(&self) -> Option<&[Lit]> {
+ match self.ctx.solver_state.sat_state {
+ SatState::UnsatUnderAssumptions => Some(self.ctx.assumptions.user_failed_core()),
+ SatState::Unsat => Some(&[]),
+ SatState::Unknown | SatState::Sat => None,
+ }
+ }
+
+ /// Generate a proof of unsatisfiability during solving.
+ ///
+ /// This needs to be called before any clauses are added.
+ pub fn write_proof(&mut self, target: impl io::Write + 'a, format: ProofFormat) {
+ assert!(
+ self.ctx.solver_state.formula_is_empty,
+ "called after clauses were added"
+ );
+ self.ctx.proof.write_proof(target, format);
+ }
+
+ /// Stop generating a proof of unsatisfiability.
+ ///
+ /// This also flushes internal buffers and closes the target file.
+ pub fn close_proof(&mut self) -> Result<(), SolverError> {
+ let mut ctx = self.ctx.into_partial_ref_mut();
+ proof::close_proof(ctx.borrow());
+ self.check_for_solver_error()
+ }
+
+ /// Generate and check a proof on the fly.
+ ///
+ /// This needs to be called before any clauses are added.
+ pub fn enable_self_checking(&mut self) {
+ assert!(
+ self.ctx.solver_state.formula_is_empty,
+ "called after clauses were added"
+ );
+ self.ctx.proof.begin_checking();
+ }
+
+ /// Generate a proof and process it using a [`ProofProcessor`].
+ ///
+ /// This implicitly enables self checking.
+ ///
+ /// This needs to be called before any clauses are added.
+ pub fn add_proof_processor(&mut self, processor: &'a mut dyn ProofProcessor) {
+ assert!(
+ self.ctx.solver_state.formula_is_empty,
+ "called after clauses were added"
+ );
+ self.ctx.proof.add_processor(processor);
+ }
+}
+
+impl<'a> Drop for Solver<'a> {
+ fn drop(&mut self) {
+ let _ = self.close_proof();
+ }
+}
+
+impl<'a> ExtendFormula for Solver<'a> {
+ /// Add a clause to the solver.
+ fn add_clause(&mut self, clause: &[Lit]) {
+ let mut ctx = self.ctx.into_partial_ref_mut();
+ load_clause(ctx.borrow(), clause);
+ }
+
+ /// Add a new variable to the solver.
+ fn new_var(&mut self) -> Var {
+ self.ctx.solver_state.formula_is_empty = false;
+ let mut ctx = self.ctx.into_partial_ref_mut();
+ variables::new_user_var(ctx.borrow())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use proptest::prelude::*;
+
+ use varisat_checker::{CheckedProofStep, CheckerData};
+ use varisat_formula::{
+ cnf_formula, lits,
+ test::{sat_formula, sgen_unsat_formula},
+ };
+
+ use varisat_dimacs::write_dimacs;
+
+ fn enable_test_schedule(solver: &mut Solver) {
+ let mut config = SolverConfigUpdate::new();
+ config.reduce_locals_interval = Some(150);
+ config.reduce_mids_interval = Some(100);
+
+ solver.config(&config).unwrap();
+ }
+
+ #[test]
+ #[should_panic(expected = "solve() called after encountering an unrecoverable error")]
+ fn error_handling_proof_writing() {
+ let mut output_buffer = [0u8; 4];
+ let mut solver = Solver::new();
+ let proof_output = std::io::Cursor::new(&mut output_buffer[..]);
+
+ solver.write_proof(proof_output, ProofFormat::Varisat);
+
+ solver.add_formula(&cnf_formula![
+ -1, -2, -3; -1, -2, -4; -1, -2, -5; -1, -3, -4; -1, -3, -5; -1, -4, -5; -2, -3, -4;
+ -2, -3, -5; -2, -4, -5; -3, -4, -5; 1, 2, 5; 1, 2, 3; 1, 2, 4; 1, 5, 3; 1, 5, 4;
+ 1, 3, 4; 2, 5, 3; 2, 5, 4; 2, 3, 4; 5, 3, 4;
+ ]);
+
+ let result = solver.solve();
+
+ assert!(match result {
+ Err(SolverError::ProofIoError { .. }) => true,
+ _ => false,
+ });
+
+ let _ = solver.solve();
+ }
+
+ struct FailingProcessor;
+
+ impl ProofProcessor for FailingProcessor {
+ fn process_step(
+ &mut self,
+ _step: &CheckedProofStep,
+ _data: CheckerData,
+ ) -> Result<(), Error> {
+ anyhow::bail!("failing processor");
+ }
+ }
+ #[test]
+ #[should_panic(expected = "solve() called after encountering an unrecoverable error")]
+ fn error_handling_proof_processing() {
+ let mut processor = FailingProcessor;
+
+ let mut solver = Solver::new();
+
+ solver.add_proof_processor(&mut processor);
+
+ solver.add_formula(&cnf_formula![
+ -1, -2, -3; -1, -2, -4; -1, -2, -5; -1, -3, -4; -1, -3, -5; -1, -4, -5; -2, -3, -4;
+ -2, -3, -5; -2, -4, -5; -3, -4, -5; 1, 2, 5; 1, 2, 3; 1, 2, 4; 1, 5, 3; 1, 5, 4;
+ 1, 3, 4; 2, 5, 3; 2, 5, 4; 2, 3, 4; 5, 3, 4;
+ ]);
+
+ let result = solver.solve();
+
+ assert!(match result {
+ Err(SolverError::ProofProcessorError { cause }) => {
+ format!("{}", cause) == "failing processor"
+ }
+ _ => false,
+ });
+
+ let _ = solver.solve();
+ }
+
+ #[test]
+ #[should_panic(expected = "called after clauses were added")]
+ fn write_proof_too_late() {
+ let mut solver = Solver::new();
+ solver.add_clause(&lits![1, 2, 3]);
+ solver.write_proof(std::io::sink(), ProofFormat::Varisat);
+ }
+
+ #[test]
+ #[should_panic(expected = "called after clauses were added")]
+ fn add_proof_processor_too_late() {
+ let mut processor = FailingProcessor;
+
+ let mut solver = Solver::new();
+ solver.add_clause(&lits![1, 2, 3]);
+
+ solver.add_proof_processor(&mut processor);
+ }
+
+ #[test]
+ #[should_panic(expected = "called after clauses were added")]
+ fn enable_self_checking_too_late() {
+ let mut solver = Solver::new();
+ solver.add_clause(&lits![1, 2, 3]);
+
+ solver.enable_self_checking();
+ }
+
+ #[test]
+ fn self_check_duplicated_unit_clauses() {
+ let mut solver = Solver::new();
+
+ solver.enable_self_checking();
+
+ solver.add_formula(&cnf_formula![
+ 4;
+ 4;
+ ]);
+
+ assert_eq!(solver.solve().ok(), Some(true));
+ }
+
+ proptest! {
+ #[test]
+ fn sgen_unsat(
+ formula in sgen_unsat_formula(1..7usize),
+ test_schedule in proptest::bool::ANY,
+ ) {
+ let mut solver = Solver::new();
+
+ solver.add_formula(&formula);
+
+ if test_schedule {
+ enable_test_schedule(&mut solver);
+ }
+
+ prop_assert_eq!(solver.solve().ok(), Some(false));
+ }
+
+ #[test]
+ fn sgen_unsat_checked(
+ formula in sgen_unsat_formula(1..7usize),
+ test_schedule in proptest::bool::ANY,
+ ) {
+ let mut solver = Solver::new();
+
+ solver.enable_self_checking();
+
+ solver.add_formula(&formula);
+
+ if test_schedule {
+ enable_test_schedule(&mut solver);
+ }
+
+ prop_assert_eq!(solver.solve().ok(), Some(false));
+ }
+
+ #[test]
+ fn sat(
+ formula in sat_formula(4..20usize, 10..100usize, 0.05..0.2, 0.9..1.0),
+ test_schedule in proptest::bool::ANY,
+ ) {
+ let mut solver = Solver::new();
+
+ solver.add_formula(&formula);
+
+ if test_schedule {
+ enable_test_schedule(&mut solver);
+ }
+
+ prop_assert_eq!(solver.solve().ok(), Some(true));
+
+ let model = solver.model().unwrap();
+
+ for clause in formula.iter() {
+ prop_assert!(clause.iter().any(|lit| model.contains(lit)));
+ }
+ }
+
+ #[test]
+ fn sat_via_dimacs(formula in sat_formula(4..20usize, 10..100usize, 0.05..0.2, 0.9..1.0)) {
+ let mut solver = Solver::new();
+
+ let mut dimacs = vec![];
+
+ write_dimacs(&mut dimacs, &formula).unwrap();
+
+ solver.add_dimacs_cnf(&mut &dimacs[..]).unwrap();
+
+ prop_assert_eq!(solver.solve().ok(), Some(true));
+
+ let model = solver.model().unwrap();
+
+ for clause in formula.iter() {
+ prop_assert!(clause.iter().any(|lit| model.contains(lit)));
+ }
+ }
+
+ #[test]
+ fn sgen_unsat_incremental_clauses(formula in sgen_unsat_formula(1..7usize)) {
+ let mut solver = Solver::new();
+
+ let mut last_state = Some(true);
+
+ for clause in formula.iter() {
+ solver.add_clause(clause);
+
+ let state = solver.solve().ok();
+ if state != last_state {
+ prop_assert_eq!(state, Some(false));
+ prop_assert_eq!(last_state, Some(true));
+ last_state = state;
+ }
+ }
+
+ prop_assert_eq!(last_state, Some(false));
+ }
+ }
+}
diff --git a/vendor/varisat/src/state.rs b/vendor/varisat/src/state.rs
new file mode 100644
index 000000000..4fb636e66
--- /dev/null
+++ b/vendor/varisat/src/state.rs
@@ -0,0 +1,42 @@
+//! Miscellaneous solver state.
+use crate::solver::SolverError;
+
+/// Satisfiability state.
+#[derive(Copy, Clone, Eq, PartialEq, Debug)]
+pub enum SatState {
+ Unknown,
+ Sat,
+ Unsat,
+ UnsatUnderAssumptions,
+}
+
+impl Default for SatState {
+ fn default() -> SatState {
+ SatState::Unknown
+ }
+}
+
+/// Miscellaneous solver state.
+///
+/// Anything larger or any larger group of related state variables should be moved into a separate
+/// part of [`Context`](crate::context::Context).
+pub struct SolverState {
+ pub sat_state: SatState,
+ pub formula_is_empty: bool,
+ /// Whether solve was called at least once.
+ pub solver_invoked: bool,
+ pub state_is_invalid: bool,
+ pub solver_error: Option<SolverError>,
+}
+
+impl Default for SolverState {
+ fn default() -> SolverState {
+ SolverState {
+ sat_state: SatState::Unknown,
+ formula_is_empty: true,
+ solver_invoked: false,
+ state_is_invalid: false,
+ solver_error: None,
+ }
+ }
+}
diff --git a/vendor/varisat/src/tmp.rs b/vendor/varisat/src/tmp.rs
new file mode 100644
index 000000000..a822d7fe8
--- /dev/null
+++ b/vendor/varisat/src/tmp.rs
@@ -0,0 +1,35 @@
+//! Temporary data.
+use varisat_formula::Lit;
+
+/// Temporary data used by various parts of the solver.
+///
+/// Make sure to check any documented invariants when using this. Also make sure to check all
+/// existing users when adding invariants.
+#[derive(Default)]
+pub struct TmpData {
+ pub lits: Vec<Lit>,
+ pub lits_2: Vec<Lit>,
+}
+
+/// Temporary data that is automatically resized.
+///
+/// This contains buffers that are automatically resized when the variable count of the solver
+/// changes. They are also always kept in a clean state, so using them doesn't come with costs
+/// proportional to the number of variables.
+///
+/// Make sure to check any documented invariants when using this. Also make sure to check all
+/// existing users when adding invariants.
+#[derive(Default)]
+pub struct TmpFlags {
+ /// A boolean for each literal.
+ ///
+ /// Reset to all-false, keep size.
+ pub flags: Vec<bool>,
+}
+
+impl TmpFlags {
+ /// Update structures for a new variable count.
+ pub fn set_var_count(&mut self, count: usize) {
+ self.flags.resize(count * 2, false);
+ }
+}
diff --git a/vendor/varisat/src/unit_simplify.rs b/vendor/varisat/src/unit_simplify.rs
new file mode 100644
index 000000000..d70130b58
--- /dev/null
+++ b/vendor/varisat/src/unit_simplify.rs
@@ -0,0 +1,198 @@
+//! Simplification using unit clauses.
+
+use partial_ref::{partial, split_borrow, PartialRef};
+
+use varisat_formula::{Lit, Var};
+use varisat_internal_proof::{clause_hash, lit_hash, DeleteClauseProof, ProofStep};
+
+use crate::{
+ binary::simplify_binary,
+ clause::db::filter_clauses,
+ context::{parts::*, Context},
+ proof,
+ prop::{enqueue_assignment, Reason},
+ variables,
+};
+
+/// Remove satisfied clauses and false literals.
+pub fn prove_units<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut ImplGraphP,
+ mut ProofP<'a>,
+ mut SolverStateP,
+ mut TrailP,
+ AssignmentP,
+ ClauseAllocP,
+ VariablesP,
+ ),
+) -> bool {
+ let mut new_unit = false;
+
+ if ctx.part(TrailP).current_level() == 0 {
+ let (impl_graph, mut ctx) = ctx.split_part_mut(ImplGraphP);
+
+ let mut unit_proofs = vec![];
+
+ let (trail, mut ctx) = ctx.split_part_mut(TrailP);
+
+ for &lit in trail.trail() {
+ new_unit = true;
+ let ctx_lits = ctx.borrow();
+ let reason = impl_graph.reason(lit.var());
+ if !reason.is_unit() {
+ let lits = impl_graph.reason(lit.var()).lits(&ctx_lits);
+ let hash = clause_hash(lits) ^ lit_hash(lit);
+
+ unit_proofs.push((lit, hash));
+ }
+
+ impl_graph.update_removed_unit(lit.var());
+ }
+
+ trail.clear();
+
+ if !unit_proofs.is_empty() {
+ proof::add_step(
+ ctx.borrow(),
+ true,
+ &ProofStep::UnitClauses {
+ units: &unit_proofs,
+ },
+ );
+ }
+ }
+
+ new_unit
+}
+
+/// Put a removed unit back onto the trail.
+pub fn resurrect_unit<'a>(
+ mut ctx: partial!(Context<'a>, mut AssignmentP, mut ImplGraphP, mut TrailP),
+ lit: Lit,
+) {
+ if ctx.part(ImplGraphP).is_removed_unit(lit.var()) {
+ debug_assert!(ctx.part(AssignmentP).lit_is_true(lit));
+ ctx.part_mut(AssignmentP).unassign_var(lit.var());
+
+ // Because we always enqueue with Reason::Unit this will not cause a unit clause to be
+ // proven in `prove_units`.
+ enqueue_assignment(ctx.borrow(), lit, Reason::Unit);
+ }
+}
+
+/// Remove satisfied clauses and false literals.
+pub fn unit_simplify<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut AssignmentP,
+ mut BinaryClausesP,
+ mut ClauseAllocP,
+ mut ClauseDbP,
+ mut ProofP<'a>,
+ mut SolverStateP,
+ mut VariablesP,
+ mut WatchlistsP,
+ mut VsidsP,
+ AssumptionsP,
+ ),
+) {
+ simplify_binary(ctx.borrow());
+
+ let (assignment, mut ctx) = ctx.split_part(AssignmentP);
+
+ let mut new_lits = vec![];
+ {
+ split_borrow!(proof_ctx = &(mut ProofP, mut SolverStateP, VariablesP) ctx);
+ let (ctx_2, mut ctx) = ctx.split_borrow();
+
+ filter_clauses(ctx_2, |alloc, cref| {
+ let clause = alloc.clause_mut(cref);
+ let redundant = clause.header().redundant();
+ new_lits.clear();
+ for &lit in clause.lits() {
+ match assignment.lit_value(lit) {
+ None => new_lits.push(lit),
+ Some(true) => {
+ proof::add_step(
+ proof_ctx.borrow(),
+ true,
+ &ProofStep::DeleteClause {
+ clause: clause.lits(),
+ proof: if redundant {
+ DeleteClauseProof::Redundant
+ } else {
+ DeleteClauseProof::Satisfied
+ },
+ },
+ );
+ return false;
+ }
+ Some(false) => (),
+ }
+ }
+ if new_lits.len() < clause.lits().len() {
+ if proof_ctx.part(ProofP).is_active() {
+ let hash = [clause_hash(clause.lits())];
+ proof::add_step(
+ proof_ctx.borrow(),
+ true,
+ &ProofStep::AtClause {
+ redundant: redundant && new_lits.len() > 2,
+ clause: &new_lits,
+ propagation_hashes: &hash[..],
+ },
+ );
+ proof::add_step(
+ proof_ctx.borrow(),
+ true,
+ &ProofStep::DeleteClause {
+ clause: clause.lits(),
+ proof: if redundant {
+ DeleteClauseProof::Redundant
+ } else {
+ DeleteClauseProof::Simplified
+ },
+ },
+ );
+ }
+
+ match new_lits[..] {
+ // Cannot have empty or unit clauses after full propagation. An empty clause would
+ // have been a conflict and a unit clause must be satisfied and thus would have been
+ // dropped above.
+ [] | [_] => unreachable!(),
+ [lit_0, lit_1] => {
+ ctx.part_mut(BinaryClausesP)
+ .add_binary_clause([lit_0, lit_1]);
+ false
+ }
+ ref lits => {
+ clause.lits_mut()[..lits.len()].copy_from_slice(lits);
+ clause.header_mut().set_len(lits.len());
+ true
+ }
+ }
+ } else {
+ true
+ }
+ });
+ }
+
+ // TODO detect vars that became isolated without being having a unit clause
+
+ for (var_index, &value) in assignment.assignment().iter().enumerate() {
+ let var = Var::from_index(var_index);
+ if !ctx.part(VariablesP).solver_var_present(var) {
+ continue;
+ }
+ let var_data = ctx.part_mut(VariablesP).var_data_solver_mut(var);
+ if let Some(value) = value {
+ var_data.unit = Some(value);
+ var_data.isolated = true;
+ }
+ if var_data.isolated && !var_data.assumed {
+ variables::remove_solver_var(ctx.borrow(), var);
+ }
+ }
+}
diff --git a/vendor/varisat/src/variables.rs b/vendor/varisat/src/variables.rs
new file mode 100644
index 000000000..ec997430e
--- /dev/null
+++ b/vendor/varisat/src/variables.rs
@@ -0,0 +1,669 @@
+//! Variable mapping and metadata.
+
+use rustc_hash::FxHashSet as HashSet;
+
+use partial_ref::{partial, PartialRef};
+
+use varisat_formula::{Lit, Var};
+use varisat_internal_proof::ProofStep;
+
+use crate::{
+ context::{parts::*, set_var_count, Context},
+ decision, proof,
+};
+
+pub mod data;
+pub mod var_map;
+
+use data::{SamplingMode, VarData};
+use var_map::{VarBiMap, VarBiMapMut, VarMap};
+
+/// Variable mapping and metadata.
+pub struct Variables {
+ /// Bidirectional mapping from user variables to global variables.
+ ///
+ /// Initially this is the identity mapping. This ensures that in the non-assumptions setting the
+ /// map from used user variables to global variables is the identity. This is a requirement for
+ /// generating proofs in non-native formats. Those proofs are not aware of variable renaming,
+ /// but are restricted to the non-incremental setting, so this works out.
+ ///
+ /// This is also requried for native proofs, as they assume that the mapping during the initial
+ /// load is the identity.
+ global_from_user: VarBiMap,
+ /// Bidirectional mapping from global variables to user variables.
+ ///
+ /// This starts with the empty mapping, so only used variables are allocated.
+ solver_from_global: VarBiMap,
+ /// User variables that were explicitly hidden by the user.
+ user_freelist: HashSet<Var>,
+ /// Global variables that can be recycled without increasing the global_watermark.
+ global_freelist: HashSet<Var>,
+ /// Solver variables that are unused and can be recycled.
+ solver_freelist: HashSet<Var>,
+
+ /// Variable metadata.
+ ///
+ /// Indexed by global variable indices.
+ var_data: Vec<VarData>,
+}
+
+impl Default for Variables {
+ fn default() -> Variables {
+ Variables {
+ global_from_user: VarBiMap::default(),
+ solver_from_global: VarBiMap::default(),
+ user_freelist: Default::default(),
+ global_freelist: Default::default(),
+ solver_freelist: Default::default(),
+
+ var_data: vec![],
+ }
+ }
+}
+
+impl Variables {
+ /// Number of allocated solver variables.
+ pub fn solver_watermark(&self) -> usize {
+ self.global_from_solver().watermark()
+ }
+
+ /// Number of allocated global variables.
+ pub fn global_watermark(&self) -> usize {
+ self.var_data.len()
+ }
+
+ /// Number of allocated global variables.
+ pub fn user_watermark(&self) -> usize {
+ self.global_from_user().watermark()
+ }
+
+ /// Iterator over all user variables that are in use.
+ pub fn user_var_iter<'a>(&'a self) -> impl Iterator<Item = Var> + 'a {
+ let global_from_user = self.global_from_user.fwd();
+ (0..self.global_from_user().watermark())
+ .map(Var::from_index)
+ .filter(move |&user_var| global_from_user.get(user_var).is_some())
+ }
+
+ /// Iterator over all global variables that are in use.
+ pub fn global_var_iter<'a>(&'a self) -> impl Iterator<Item = Var> + 'a {
+ (0..self.global_watermark())
+ .map(Var::from_index)
+ .filter(move |&global_var| !self.var_data[global_var.index()].deleted)
+ }
+
+ /// The user to global mapping.
+ pub fn global_from_user(&self) -> &VarMap {
+ &self.global_from_user.fwd()
+ }
+
+ /// Mutable user to global mapping.
+ pub fn global_from_user_mut(&mut self) -> VarBiMapMut {
+ self.global_from_user.fwd_mut()
+ }
+
+ /// The global to solver mapping.
+ pub fn solver_from_global(&self) -> &VarMap {
+ &self.solver_from_global.fwd()
+ }
+
+ /// Mutable global to solver mapping.
+ pub fn solver_from_global_mut(&mut self) -> VarBiMapMut {
+ self.solver_from_global.fwd_mut()
+ }
+
+ /// The global to user mapping.
+ pub fn user_from_global(&self) -> &VarMap {
+ &self.global_from_user.bwd()
+ }
+
+ /// Mutable global to user mapping.
+ pub fn user_from_global_mut(&mut self) -> VarBiMapMut {
+ self.global_from_user.bwd_mut()
+ }
+
+ /// The solver to global mapping.
+ pub fn global_from_solver(&self) -> &VarMap {
+ &self.solver_from_global.bwd()
+ }
+
+ /// Mutable solver to global mapping.
+ pub fn global_from_solver_mut(&mut self) -> VarBiMapMut {
+ self.solver_from_global.bwd_mut()
+ }
+
+ /// Get an existing solver var for a user var.
+ pub fn existing_solver_from_user(&self, user: Var) -> Var {
+ let global = self
+ .global_from_user()
+ .get(user)
+ .expect("no existing global var for user var");
+ let solver = self
+ .solver_from_global()
+ .get(global)
+ .expect("no existing solver var for global var");
+ solver
+ }
+
+ /// Get an existing user var from a solver var.
+ pub fn existing_user_from_solver(&self, solver: Var) -> Var {
+ let global = self
+ .global_from_solver()
+ .get(solver)
+ .expect("no existing global var for solver var");
+ let user = self
+ .user_from_global()
+ .get(global)
+ .expect("no existing user var for global var");
+ user
+ }
+
+ /// Mutable reference to the var data for a global variable.
+ pub fn var_data_global_mut(&mut self, global: Var) -> &mut VarData {
+ if self.var_data.len() <= global.index() {
+ self.var_data.resize(global.index() + 1, VarData::default());
+ }
+ &mut self.var_data[global.index()]
+ }
+
+ /// Mutable reference to the var data for a solver variable.
+ pub fn var_data_solver_mut(&mut self, solver: Var) -> &mut VarData {
+ let global = self
+ .global_from_solver()
+ .get(solver)
+ .expect("no existing global var for solver var");
+ &mut self.var_data[global.index()]
+ }
+
+ /// Var data for a global variable.
+ pub fn var_data_global(&self, global: Var) -> &VarData {
+ &self.var_data[global.index()]
+ }
+
+ /// Check if a solver var is mapped to a global var
+ pub fn solver_var_present(&self, solver: Var) -> bool {
+ self.global_from_solver().get(solver).is_some()
+ }
+
+ /// Get an unmapped global variable.
+ pub fn next_unmapped_global(&self) -> Var {
+ self.global_freelist
+ .iter()
+ .next()
+ .cloned()
+ .unwrap_or_else(|| Var::from_index(self.global_watermark()))
+ }
+
+ /// Get an unmapped global variable.
+ pub fn next_unmapped_solver(&self) -> Var {
+ self.solver_freelist
+ .iter()
+ .next()
+ .cloned()
+ .unwrap_or_else(|| Var::from_index(self.solver_watermark()))
+ }
+
+ /// Get an unmapped user variable.
+ pub fn next_unmapped_user(&self) -> Var {
+ self.user_freelist
+ .iter()
+ .next()
+ .cloned()
+ .unwrap_or_else(|| Var::from_index(self.user_watermark()))
+ }
+}
+
+/// Maps a user variable into a global variable.
+///
+/// If no matching global variable exists a new global variable is allocated.
+pub fn global_from_user<'a>(
+ mut ctx: partial!(Context<'a>, mut ProofP<'a>, mut SolverStateP, mut VariablesP),
+ user: Var,
+ require_sampling: bool,
+) -> Var {
+ let variables = ctx.part_mut(VariablesP);
+
+ if user.index() > variables.user_watermark() {
+ // TODO use a batch proof step for this?
+ for index in variables.user_watermark()..user.index() {
+ global_from_user(ctx.borrow(), Var::from_index(index), false);
+ }
+ }
+
+ let variables = ctx.part_mut(VariablesP);
+
+ match variables.global_from_user().get(user) {
+ Some(global) => {
+ if require_sampling
+ && variables.var_data[global.index()].sampling_mode != SamplingMode::Sample
+ {
+ panic!("witness variables cannot be constrained");
+ }
+ global
+ }
+ None => {
+ // Can we add an identity mapping?
+ let global = match variables.var_data.get(user.index()) {
+ Some(var_data) if var_data.deleted => user,
+ None => user,
+ _ => variables.next_unmapped_global(),
+ };
+
+ *variables.var_data_global_mut(global) = VarData::user_default();
+
+ variables.global_from_user_mut().insert(global, user);
+ variables.global_freelist.remove(&global);
+ variables.user_freelist.remove(&user);
+
+ proof::add_step(
+ ctx.borrow(),
+ false,
+ &ProofStep::UserVarName {
+ global,
+ user: Some(user),
+ },
+ );
+
+ global
+ }
+ }
+}
+
+/// Maps an existing global variable to a solver variable.
+///
+/// If no matching solver variable exists a new one is allocated.
+pub fn solver_from_global<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut AnalyzeConflictP,
+ mut AssignmentP,
+ mut BinaryClausesP,
+ mut ImplGraphP,
+ mut ProofP<'a>,
+ mut SolverStateP,
+ mut TmpFlagsP,
+ mut VariablesP,
+ mut VsidsP,
+ mut WatchlistsP,
+ ),
+ global: Var,
+) -> Var {
+ let variables = ctx.part_mut(VariablesP);
+
+ debug_assert!(!variables.var_data[global.index()].deleted);
+
+ match variables.solver_from_global().get(global) {
+ Some(solver) => solver,
+ None => {
+ let solver = variables.next_unmapped_solver();
+
+ let old_watermark = variables.global_from_solver().watermark();
+
+ variables.solver_from_global_mut().insert(solver, global);
+ variables.solver_freelist.remove(&solver);
+
+ let new_watermark = variables.global_from_solver().watermark();
+
+ if new_watermark > old_watermark {
+ set_var_count(ctx.borrow(), new_watermark);
+ }
+
+ initialize_solver_var(ctx.borrow(), solver, global);
+
+ proof::add_step(
+ ctx.borrow(),
+ false,
+ &ProofStep::SolverVarName {
+ global,
+ solver: Some(solver),
+ },
+ );
+
+ solver
+ }
+ }
+}
+
+/// Maps a user variable to a solver variable.
+///
+/// Allocates global and solver variables as requried.
+pub fn solver_from_user<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut AnalyzeConflictP,
+ mut AssignmentP,
+ mut BinaryClausesP,
+ mut ImplGraphP,
+ mut ProofP<'a>,
+ mut SolverStateP,
+ mut TmpFlagsP,
+ mut VariablesP,
+ mut VsidsP,
+ mut WatchlistsP,
+ ),
+ user: Var,
+ require_sampling: bool,
+) -> Var {
+ let global = global_from_user(ctx.borrow(), user, require_sampling);
+ solver_from_global(ctx.borrow(), global)
+}
+
+/// Allocates a currently unused user variable.
+///
+/// This is either a user variable above any user variable used so far, or a user variable that was
+/// previously hidden by the user.
+pub fn new_user_var<'a>(
+ mut ctx: partial!(Context<'a>, mut ProofP<'a>, mut SolverStateP, mut VariablesP),
+) -> Var {
+ let variables = ctx.part_mut(VariablesP);
+ let user_var = variables.next_unmapped_user();
+ global_from_user(ctx.borrow(), user_var, false);
+ user_var
+}
+
+/// Maps a slice of user lits to solver lits using [`solver_from_user`].
+pub fn solver_from_user_lits<'a>(
+ mut ctx: partial!(
+ Context<'a>,
+ mut AnalyzeConflictP,
+ mut AssignmentP,
+ mut BinaryClausesP,
+ mut ImplGraphP,
+ mut ProofP<'a>,
+ mut SolverStateP,
+ mut TmpFlagsP,
+ mut VariablesP,
+ mut VsidsP,
+ mut WatchlistsP,
+ ),
+ solver_lits: &mut Vec<Lit>,
+ user_lits: &[Lit],
+ require_sampling: bool,
+) {
+ solver_lits.clear();
+ solver_lits.extend(user_lits.iter().map(|user_lit| {
+ user_lit.map_var(|user_var| solver_from_user(ctx.borrow(), user_var, require_sampling))
+ }))
+}
+
+/// Changes the sampling mode of a global variable.
+///
+/// If the mode is changed to hidden, an existing user mapping is automatically removed.
+///
+/// If the mode is changed from hidden, a new user mapping is allocated and the user variable is
+/// returned.
+pub fn set_sampling_mode<'a>(
+ mut ctx: partial!(Context<'a>, mut ProofP<'a>, mut SolverStateP, mut VariablesP),
+ global: Var,
+ mode: SamplingMode,
+) -> Option<Var> {
+ let variables = ctx.part_mut(VariablesP);
+
+ let var_data = &mut variables.var_data[global.index()];
+
+ assert!(!var_data.deleted);
+
+ if var_data.assumed {
+ panic!("cannot change sampling mode of assumption variable")
+ }
+
+ let previous_mode = var_data.sampling_mode;
+
+ if previous_mode == mode {
+ return None;
+ }
+
+ var_data.sampling_mode = mode;
+
+ let mut result = None;
+
+ if mode != SamplingMode::Hide {
+ proof::add_step(
+ ctx.borrow(),
+ false,
+ &ProofStep::ChangeSamplingMode {
+ var: global,
+ sample: mode == SamplingMode::Sample,
+ },
+ );
+ }
+ let variables = ctx.part_mut(VariablesP);
+
+ if previous_mode == SamplingMode::Hide {
+ let user = variables.next_unmapped_user();
+ variables.user_from_global_mut().insert(user, global);
+ variables.user_freelist.remove(&user);
+
+ proof::add_step(
+ ctx.borrow(),
+ false,
+ &ProofStep::UserVarName {
+ global,
+ user: Some(user),
+ },
+ );
+
+ result = Some(user);
+ } else if mode == SamplingMode::Hide {
+ if let Some(user) = variables.user_from_global_mut().remove(global) {
+ variables.user_freelist.insert(user);
+ }
+
+ proof::add_step(
+ ctx.borrow(),
+ false,
+ &ProofStep::UserVarName { global, user: None },
+ );
+
+ delete_global_if_unused(ctx.borrow(), global);
+ }
+
+ result
+}
+
+/// Turns all hidden vars into witness vars and returns them.
+pub fn observe_internal_vars<'a>(
+ mut ctx: partial!(Context<'a>, mut ProofP<'a>, mut SolverStateP, mut VariablesP),
+) -> Vec<Var> {
+ let mut result = vec![];
+ let mut variables = ctx.part_mut(VariablesP);
+ for global_index in 0..variables.global_watermark() {
+ let global = Var::from_index(global_index);
+ let var_data = &variables.var_data[global.index()];
+ if !var_data.deleted && var_data.sampling_mode == SamplingMode::Hide {
+ let user = set_sampling_mode(ctx.borrow(), global, SamplingMode::Witness).unwrap();
+ result.push(user);
+ variables = ctx.part_mut(VariablesP);
+ }
+ }
+ result
+}
+
+/// Initialize a newly allocated solver variable
+pub fn initialize_solver_var(
+ mut ctx: partial!(
+ Context,
+ mut AssignmentP,
+ mut ImplGraphP,
+ mut VsidsP,
+ VariablesP
+ ),
+ solver: Var,
+ global: Var,
+) {
+ let (variables, mut ctx) = ctx.split_part(VariablesP);
+ let data = &variables.var_data[global.index()];
+
+ // This recovers the state of a variable that has a known value and was already propagated. This
+ // is important so that when new clauses containing this variable are added, load_clause knows
+ // to reenqueue the assignment.
+ ctx.part_mut(AssignmentP).set_var(solver, data.unit);
+ if data.unit.is_some() {
+ ctx.part_mut(ImplGraphP).update_removed_unit(solver);
+ }
+ decision::initialize_var(ctx.borrow(), solver, data.unit.is_none());
+
+ // TODO unhiding beyond unit clauses
+}
+
+/// Remove a solver var.
+///
+/// If the variable is isolated and hidden, the global variable is also removed.
+pub fn remove_solver_var<'a>(
+ mut ctx: partial!(Context<'a>, mut ProofP<'a>, mut SolverStateP, mut VariablesP, mut VsidsP),
+ solver: Var,
+) {
+ decision::remove_var(ctx.borrow(), solver);
+
+ let variables = ctx.part_mut(VariablesP);
+
+ let global = variables
+ .global_from_solver_mut()
+ .remove(solver)
+ .expect("no existing global var for solver var");
+
+ variables.solver_freelist.insert(solver);
+
+ proof::add_step(
+ ctx.borrow(),
+ false,
+ &ProofStep::SolverVarName {
+ global,
+ solver: None,
+ },
+ );
+
+ delete_global_if_unused(ctx.borrow(), global);
+}
+
+/// Delete a global variable if it is unused
+fn delete_global_if_unused<'a>(
+ mut ctx: partial!(Context<'a>, mut ProofP<'a>, mut SolverStateP, mut VariablesP),
+ global: Var,
+) {
+ let variables = ctx.part_mut(VariablesP);
+
+ if variables.user_from_global().get(global).is_some() {
+ return;
+ }
+
+ if variables.solver_from_global().get(global).is_some() {
+ return;
+ }
+
+ let data = &mut variables.var_data[global.index()];
+
+ assert!(data.sampling_mode == SamplingMode::Hide);
+
+ if !data.isolated {
+ return;
+ }
+
+ data.deleted = true;
+
+ proof::add_step(ctx.borrow(), false, &ProofStep::DeleteVar { var: global });
+
+ // TODO deletion of unit clauses isn't supported by most DRAT checkers, needs an extra
+ // variable mapping instead.
+
+ ctx.part_mut(VariablesP).global_freelist.insert(global);
+}
+
+#[cfg(test)]
+mod tests {
+ use proptest::{collection, prelude::*};
+
+ use varisat_formula::{
+ test::{sat_formula, sgen_unsat_formula},
+ ExtendFormula, Var,
+ };
+
+ use crate::solver::Solver;
+
+ #[test]
+ #[should_panic(expected = "cannot change sampling mode of assumption variable")]
+ fn cannot_hide_assumed_vars() {
+ let mut solver = Solver::new();
+
+ let (x, y, z) = solver.new_lits();
+
+ solver.assume(&[x, y, z]);
+ solver.hide_var(x.var());
+ }
+
+ #[test]
+ #[should_panic(expected = "cannot change sampling mode of assumption variable")]
+ fn cannot_witness_assumed_vars() {
+ let mut solver = Solver::new();
+
+ let (x, y, z) = solver.new_lits();
+
+ solver.assume(&[x, y, z]);
+ solver.witness_var(x.var());
+ }
+
+ proptest! {
+ #[test]
+ fn sgen_unsat_hidden_with_sat(
+ unsat_formula in sgen_unsat_formula(1..7usize),
+ sat_formula in sat_formula(4..20usize, 10..100usize, 0.05..0.2, 0.9..1.0),
+ ) {
+ use std::cmp::max;
+
+ let mut solver = Solver::new();
+
+ solver.enable_self_checking();
+
+ let cond = Var::from_index(max(unsat_formula.var_count(), sat_formula.var_count()));
+
+ let mut tmp = vec![];
+
+ for clause in unsat_formula.iter() {
+ tmp.clear();
+ tmp.extend_from_slice(&clause);
+ tmp.push(cond.negative());
+ solver.add_clause(&tmp);
+ }
+
+ for i in 0..unsat_formula.var_count() {
+ solver.hide_var(Var::from_index(i));
+ }
+
+ solver.add_formula(&sat_formula);
+
+ prop_assert_eq!(solver.solve().ok(), Some(true));
+
+ solver.add_clause(&[cond.positive()]);
+
+ prop_assert_eq!(solver.solve().ok(), Some(false));
+ }
+
+ #[test]
+ fn sgen_sat_many_hidden_observe_internal(
+ sat_formulas in collection::vec(
+ sat_formula(4..20usize, 10..100usize, 0.05..0.2, 0.9..1.0),
+ 1..10,
+ )
+ ) {
+ let mut solver = Solver::new();
+
+ solver.enable_self_checking();
+
+ for formula in sat_formulas {
+ solver.add_formula(&formula);
+
+ let new_vars = solver.observe_internal_vars();
+
+ for i in 0..formula.var_count() {
+ solver.hide_var(Var::from_index(i));
+ }
+
+ for var in new_vars {
+ solver.hide_var(var);
+ }
+ }
+
+ prop_assert_eq!(solver.solve().ok(), Some(true));
+ }
+ }
+}
diff --git a/vendor/varisat/src/variables/data.rs b/vendor/varisat/src/variables/data.rs
new file mode 100644
index 000000000..ea29e6280
--- /dev/null
+++ b/vendor/varisat/src/variables/data.rs
@@ -0,0 +1,70 @@
+//! Data associated with variables.
+
+/// Variable sampling mode.
+///
+/// This partitions all variables into three sets. Using these partitions it is possible to specify
+/// equivalence vs. equisatisfiability per variable. Let V be the set of all variables, S, W and H
+/// the sets of Sampling, Witness and Hidden variables. Let F be the input formula and G be the
+/// current formula. The following invariants are upheld:
+///
+/// * The formulas are always equisatisfiable:
+/// (∃ V) G ⇔ (∃ V) F
+/// * Restricted to the sampling variables they are equivalent:
+/// (∀ S) ((∃ V∖S) G ⇔ (∃ V∖S) F)
+/// * Restricted to the non-hidden variables the input formula is implied:
+/// (∀ V∖H) ((∃ H) G ⇒ (∃ H) F)
+///
+/// This ensures that the solver will be able to find and extend each satisfiable assignment of the
+/// sampling variables to an assignment that covers the witness variables.
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+pub enum SamplingMode {
+ Sample,
+ Witness,
+ Hide,
+}
+
+/// Data associated with variables.
+///
+/// This is available for each _global_ variable, even if eliminated within the solver.
+#[derive(Clone)]
+pub struct VarData {
+ /// The variable's sampling mode.
+ pub sampling_mode: SamplingMode,
+ /// Whether the variable is forced by a unit clause.
+ ///
+ /// This is used to remember unit clauses after they are removed from the solver.
+ pub unit: Option<bool>,
+ /// True if there are no clauses containing this variable and other variables.
+ ///
+ /// This is the case if there are no clauses containing this variable or just a unit clause with
+ /// this variable.
+ pub isolated: bool,
+ /// True if this variable is part of the current assumptions.
+ pub assumed: bool,
+ /// Whether the global variable was deleted.
+ pub deleted: bool,
+}
+
+impl Default for VarData {
+ fn default() -> VarData {
+ VarData {
+ sampling_mode: SamplingMode::Hide,
+ unit: None,
+ isolated: true,
+ assumed: false,
+ deleted: true,
+ }
+ }
+}
+
+impl VarData {
+ /// Default variable data for a new user variable.
+ pub fn user_default() -> VarData {
+ VarData {
+ sampling_mode: SamplingMode::Sample,
+ deleted: false,
+ ..VarData::default()
+ }
+ }
+}
diff --git a/vendor/varisat/src/variables/var_map.rs b/vendor/varisat/src/variables/var_map.rs
new file mode 100644
index 000000000..730a6bf52
--- /dev/null
+++ b/vendor/varisat/src/variables/var_map.rs
@@ -0,0 +1,124 @@
+//! Mappings between variable names
+
+use varisat_formula::{lit::LitIdx, Var};
+
+const NO_VAR_IDX: LitIdx = Var::max_count() as LitIdx;
+
+/// A mapping from variables to variables.
+#[derive(Default)]
+pub struct VarMap {
+ mapping: Vec<LitIdx>,
+}
+
+impl VarMap {
+ /// Look up a variable in the mapping
+ pub fn get(&self, from: Var) -> Option<Var> {
+ match self.mapping.get(from.index()).cloned() {
+ Some(index) if index == NO_VAR_IDX => None,
+ Some(index) => Some(Var::from_index(index as usize)),
+ None => None,
+ }
+ }
+
+ /// Insert a new mapping.
+ ///
+ /// Note that the parameters are reversed from the usual order, to match the naming convention
+ /// used for maps.
+ ///
+ /// This has the precondition that `from` is not mapped.
+ pub fn insert(&mut self, into: Var, from: Var) {
+ if self.mapping.len() <= from.index() {
+ self.mapping.resize(from.index() + 1, NO_VAR_IDX);
+ }
+ debug_assert_eq!(self.mapping[from.index()], NO_VAR_IDX);
+ self.mapping[from.index()] = into.index() as LitIdx;
+ }
+
+ /// Remove a mapping.
+ ///
+ /// Does nothing if `from` is not mapped.
+ pub fn remove(&mut self, from: Var) {
+ if self.mapping.len() > from.index() {
+ self.mapping[from.index()] = NO_VAR_IDX;
+ }
+ }
+
+ /// One above the largest index that is mapped.
+ pub fn watermark(&self) -> usize {
+ self.mapping.len()
+ }
+}
+
+/// A bidirectional mapping between variables.
+///
+/// This is initialized with the identity mapping over all variables. It is possible to remove
+/// variables from this mapping on both sides.
+#[derive(Default)]
+pub struct VarBiMap {
+ fwd: VarMap,
+ bwd: VarMap,
+}
+
+impl VarBiMap {
+ /// Access the forward mapping.
+ pub fn fwd(&self) -> &VarMap {
+ &self.fwd
+ }
+
+ /// Access the backward mapping.
+ pub fn bwd(&self) -> &VarMap {
+ &self.bwd
+ }
+
+ /// Mutate the mapping in forward direction.
+ pub fn fwd_mut(&mut self) -> VarBiMapMut {
+ VarBiMapMut {
+ fwd: &mut self.fwd,
+ bwd: &mut self.bwd,
+ }
+ }
+
+ /// Mutate the mapping in backward direction.
+ pub fn bwd_mut(&mut self) -> VarBiMapMut {
+ VarBiMapMut {
+ fwd: &mut self.bwd,
+ bwd: &mut self.fwd,
+ }
+ }
+}
+
+/// Mutable view of a [`VarBiMap`].
+///
+/// Helper so `VarBiMap` mutating routines can work in both directions.
+pub struct VarBiMapMut<'a> {
+ fwd: &'a mut VarMap,
+ bwd: &'a mut VarMap,
+}
+
+impl<'a> VarBiMapMut<'a> {
+ /// Insert a new mapping.
+ ///
+ /// Note that the parameters are reversed from the usual order, to match the naming convention
+ /// used for maps.
+ ///
+ /// This has the precondition that `into` and `from` are not mapped.
+ pub fn insert(&mut self, into: Var, from: Var) {
+ self.fwd.insert(into, from);
+ self.bwd.insert(from, into);
+ }
+
+ /// Remove a mapping.
+ ///
+ /// Does nothing if `from` is not mapped.
+ ///
+ /// Returns the existing mapping if it was present.
+ pub fn remove(&mut self, from: Var) -> Option<Var> {
+ if let Some(into) = self.fwd.get(from) {
+ self.fwd.remove(from);
+ self.bwd.remove(into);
+ Some(into)
+ } else {
+ None
+ }
+ }
+}
diff --git a/vendor/varisat/tests/checker.rs b/vendor/varisat/tests/checker.rs
new file mode 100644
index 000000000..1a7fd428f
--- /dev/null
+++ b/vendor/varisat/tests/checker.rs
@@ -0,0 +1,193 @@
+//! Checker tests, that require a Solver instance, so they cannot be unit tests of the
+//! varisat-checker crate.
+
+use anyhow::Error;
+
+use proptest::prelude::*;
+
+use varisat::{
+ checker::{Checker, ProofTranscriptProcessor, ProofTranscriptStep},
+ dimacs::write_dimacs,
+ CnfFormula, ExtendFormula, Lit, ProofFormat, Solver, Var,
+};
+use varisat_formula::test::{conditional_pigeon_hole, sgen_unsat_formula};
+
+proptest! {
+ #[test]
+ fn checked_unsat_via_dimacs(formula in sgen_unsat_formula(1..7usize)) {
+ let mut dimacs = vec![];
+ let mut proof = vec![];
+
+ let mut solver = Solver::new();
+
+ write_dimacs(&mut dimacs, &formula).unwrap();
+
+ solver.write_proof(&mut proof, ProofFormat::Varisat);
+
+ solver.add_dimacs_cnf(&mut &dimacs[..]).unwrap();
+
+ prop_assert_eq!(solver.solve().ok(), Some(false));
+
+ solver.close_proof().map_err(|e| TestCaseError::fail(e.to_string()))?;
+
+ drop(solver);
+
+ let mut checker = Checker::new();
+
+ checker.add_dimacs_cnf(&mut &dimacs[..]).unwrap();
+
+ checker.check_proof(&mut &proof[..]).unwrap();
+ }
+
+ #[test]
+ fn sgen_checked_unsat_incremental_clauses(formula in sgen_unsat_formula(1..7usize)) {
+ let mut proof = vec![];
+
+ let mut solver = Solver::new();
+ solver.write_proof(&mut proof, ProofFormat::Varisat);
+
+ let mut expected_models = 0;
+
+ // Add all clauses incrementally so they are recorded in the proof
+ solver.solve().unwrap();
+ expected_models += 1;
+
+ let mut last_state = Some(true);
+
+ for clause in formula.iter() {
+ solver.add_clause(clause);
+
+ let state = solver.solve().ok();
+ if state != last_state {
+ prop_assert_eq!(state, Some(false));
+ prop_assert_eq!(last_state, Some(true));
+ last_state = state;
+ }
+ if state == Some(true) {
+ expected_models += 1;
+ }
+ }
+
+ prop_assert_eq!(last_state, Some(false));
+
+ drop(solver);
+
+ #[derive(Default)]
+ struct FoundModels {
+ counter: usize,
+ unsat: bool,
+ }
+
+ impl ProofTranscriptProcessor for FoundModels {
+ fn process_step(
+ &mut self,
+ step: &ProofTranscriptStep,
+ ) -> Result<(), Error> {
+ if let ProofTranscriptStep::Model { .. } = step {
+ self.counter += 1;
+ } else if let ProofTranscriptStep::Unsat = step {
+ self.unsat = true;
+ }
+ Ok(())
+ }
+ }
+
+ let mut found_models = FoundModels::default();
+ let mut checker = Checker::new();
+ checker.add_transcript(&mut found_models);
+ checker.check_proof(&mut &proof[..]).unwrap();
+
+ prop_assert_eq!(found_models.counter, expected_models);
+ prop_assert!(found_models.unsat);
+ }
+
+ #[test]
+ fn pigeon_hole_checked_unsat_assumption_core(
+ (enable_row, columns, formula) in conditional_pigeon_hole(1..5usize, 1..5usize),
+ ) {
+ let mut proof = vec![];
+
+ let mut solver = Solver::new();
+ solver.write_proof(&mut proof, ProofFormat::Varisat);
+
+ let mut expected_sat = 0;
+ let mut expected_unsat = 0;
+
+ solver.solve().unwrap();
+ expected_sat += 1;
+ solver.add_formula(&formula);
+
+ prop_assert_eq!(solver.solve().ok(), Some(true));
+ expected_sat += 1;
+
+ let mut assumptions = enable_row;
+
+ assumptions.push(Lit::positive(Var::from_index(formula.var_count() + 10)));
+
+ solver.assume(&assumptions);
+
+ prop_assert_eq!(solver.solve().ok(), Some(false));
+ expected_unsat += 1;
+
+ let mut candidates = solver.failed_core().unwrap().to_owned();
+ let mut core: Vec<Lit> = vec![];
+
+ while !candidates.is_empty() {
+
+ solver.assume(&candidates[0..candidates.len() - 1]);
+
+ match solver.solve() {
+ Err(_) => unreachable!(),
+ Ok(true) => {
+ expected_sat += 1;
+ let skipped = *candidates.last().unwrap();
+ core.push(skipped);
+
+ let single_clause = CnfFormula::from(Some([skipped]));
+ solver.add_formula(&single_clause);
+ },
+ Ok(false) => {
+ expected_unsat += 1;
+ candidates = solver.failed_core().unwrap().to_owned();
+ }
+ }
+ }
+
+ prop_assert_eq!(core.len(), columns + 1);
+
+ drop(solver);
+
+ #[derive(Default)]
+ struct CountResults {
+ sat: usize,
+ unsat: usize,
+ }
+
+ impl ProofTranscriptProcessor for CountResults {
+ fn process_step(
+ &mut self,
+ step: &ProofTranscriptStep,
+ ) -> Result<(), Error> {
+ match step {
+ ProofTranscriptStep::Model { .. } => {
+ self.sat += 1;
+ }
+ ProofTranscriptStep::Unsat
+ | ProofTranscriptStep::FailedAssumptions { .. } => {
+ self.unsat += 1;
+ }
+ _ => (),
+ }
+ Ok(())
+ }
+ }
+
+ let mut count_results = CountResults::default();
+ let mut checker = Checker::new();
+ checker.add_transcript(&mut count_results);
+ checker.check_proof(&mut &proof[..]).unwrap();
+
+ prop_assert_eq!(count_results.sat, expected_sat);
+ prop_assert_eq!(count_results.unsat, expected_unsat);
+ }
+}
diff --git a/vendor/varisat/tests/cnfs.rs b/vendor/varisat/tests/cnfs.rs
new file mode 100644
index 000000000..8797c6596
--- /dev/null
+++ b/vendor/varisat/tests/cnfs.rs
@@ -0,0 +1,27 @@
+use std::collections::HashSet;
+
+use varisat::{solver::Solver, Lit};
+
+macro_rules! test_cnf {
+ ($name:ident, $result:expr) => {
+ #[test]
+ fn $name() {
+ let cnf = include_bytes!(concat!("cnfs/", stringify!($name), ".cnf"));
+ let mut solver = Solver::new();
+ solver.enable_self_checking();
+ let formula = varisat_dimacs::DimacsParser::parse(&cnf[..]).expect("parsing failed");
+ solver.add_formula(&formula);
+ let result = $result;
+ assert_eq!(solver.solve().expect("solve failed"), result);
+ if result {
+ let model: HashSet<Lit> = solver.model().unwrap().into_iter().collect();
+ for clause in formula.iter() {
+ assert!(clause.iter().any(|&lit| model.contains(&lit)));
+ }
+ }
+ }
+ };
+}
+
+test_cnf!(sgen1_unsat_57_0, false);
+test_cnf!(sgen1_sat_90_0, true);
diff --git a/vendor/varisat/tests/cnfs/sgen1_sat_90_0.cnf b/vendor/varisat/tests/cnfs/sgen1_sat_90_0.cnf
new file mode 100644
index 000000000..a465a7283
--- /dev/null
+++ b/vendor/varisat/tests/cnfs/sgen1_sat_90_0.cnf
@@ -0,0 +1,217 @@
+p cnf 90 216
+-1 -2 0
+-1 -3 0
+-1 -4 0
+-1 -5 0
+-2 -3 0
+-2 -4 0
+-2 -5 0
+-3 -4 0
+-3 -5 0
+-4 -5 0
+-6 -7 0
+-6 -8 0
+-6 -9 0
+-6 -10 0
+-7 -8 0
+-7 -9 0
+-7 -10 0
+-8 -9 0
+-8 -10 0
+-9 -10 0
+-11 -12 0
+-11 -13 0
+-11 -14 0
+-11 -15 0
+-12 -13 0
+-12 -14 0
+-12 -15 0
+-13 -14 0
+-13 -15 0
+-14 -15 0
+-16 -17 0
+-16 -18 0
+-16 -19 0
+-16 -20 0
+-17 -18 0
+-17 -19 0
+-17 -20 0
+-18 -19 0
+-18 -20 0
+-19 -20 0
+-21 -22 0
+-21 -23 0
+-21 -24 0
+-21 -25 0
+-22 -23 0
+-22 -24 0
+-22 -25 0
+-23 -24 0
+-23 -25 0
+-24 -25 0
+-26 -27 0
+-26 -28 0
+-26 -29 0
+-26 -30 0
+-27 -28 0
+-27 -29 0
+-27 -30 0
+-28 -29 0
+-28 -30 0
+-29 -30 0
+-31 -32 0
+-31 -33 0
+-31 -34 0
+-31 -35 0
+-32 -33 0
+-32 -34 0
+-32 -35 0
+-33 -34 0
+-33 -35 0
+-34 -35 0
+-36 -37 0
+-36 -38 0
+-36 -39 0
+-36 -40 0
+-37 -38 0
+-37 -39 0
+-37 -40 0
+-38 -39 0
+-38 -40 0
+-39 -40 0
+-41 -42 0
+-41 -43 0
+-41 -44 0
+-41 -45 0
+-42 -43 0
+-42 -44 0
+-42 -45 0
+-43 -44 0
+-43 -45 0
+-44 -45 0
+-46 -47 0
+-46 -48 0
+-46 -49 0
+-46 -50 0
+-47 -48 0
+-47 -49 0
+-47 -50 0
+-48 -49 0
+-48 -50 0
+-49 -50 0
+-51 -52 0
+-51 -53 0
+-51 -54 0
+-51 -55 0
+-52 -53 0
+-52 -54 0
+-52 -55 0
+-53 -54 0
+-53 -55 0
+-54 -55 0
+-56 -57 0
+-56 -58 0
+-56 -59 0
+-56 -60 0
+-57 -58 0
+-57 -59 0
+-57 -60 0
+-58 -59 0
+-58 -60 0
+-59 -60 0
+-61 -62 0
+-61 -63 0
+-61 -64 0
+-61 -65 0
+-62 -63 0
+-62 -64 0
+-62 -65 0
+-63 -64 0
+-63 -65 0
+-64 -65 0
+-66 -67 0
+-66 -68 0
+-66 -69 0
+-66 -70 0
+-67 -68 0
+-67 -69 0
+-67 -70 0
+-68 -69 0
+-68 -70 0
+-69 -70 0
+-71 -72 0
+-71 -73 0
+-71 -74 0
+-71 -75 0
+-72 -73 0
+-72 -74 0
+-72 -75 0
+-73 -74 0
+-73 -75 0
+-74 -75 0
+-76 -77 0
+-76 -78 0
+-76 -79 0
+-76 -80 0
+-77 -78 0
+-77 -79 0
+-77 -80 0
+-78 -79 0
+-78 -80 0
+-79 -80 0
+-81 -82 0
+-81 -83 0
+-81 -84 0
+-81 -85 0
+-82 -83 0
+-82 -84 0
+-82 -85 0
+-83 -84 0
+-83 -85 0
+-84 -85 0
+-86 -87 0
+-86 -88 0
+-86 -89 0
+-86 -90 0
+-87 -88 0
+-87 -89 0
+-87 -90 0
+-88 -89 0
+-88 -90 0
+-89 -90 0
+1 85 34 20 9 0
+6 46 59 82 30 0
+11 80 28 33 70 0
+47 17 36 41 35 0
+84 15 23 39 51 0
+21 42 54 29 77 0
+58 32 71 50 53 0
+7 37 55 78 88 0
+83 73 43 2 76 0
+89 25 48 18 67 0
+72 52 5 27 16 0
+56 8 44 63 69 0
+90 40 31 64 26 0
+24 3 68 38 57 0
+22 10 65 74 14 0
+62 49 4 79 13 0
+81 66 75 61 86 0
+60 87 19 45 12 0
+1 90 14 24 59 0
+6 78 53 57 72 0
+11 9 65 86 18 0
+82 17 13 35 38 0
+66 89 23 54 83 0
+69 50 15 29 71 0
+42 32 10 85 3 0
+2 37 70 8 63 0
+5 55 43 20 76 0
+36 88 48 45 75 0
+47 52 61 21 31 0
+56 67 46 19 39 0
+60 73 80 64 34 0
+77 25 68 12 41 0
+22 4 16 74 30 0
+27 84 49 79 7 0
+81 44 62 28 58 0
+26 87 51 40 33 0
diff --git a/vendor/varisat/tests/cnfs/sgen1_unsat_57_0.cnf b/vendor/varisat/tests/cnfs/sgen1_unsat_57_0.cnf
new file mode 100644
index 000000000..045a3c5da
--- /dev/null
+++ b/vendor/varisat/tests/cnfs/sgen1_unsat_57_0.cnf
@@ -0,0 +1,125 @@
+p cnf 57 124
+-2 -3 -4 0
+-1 -3 -4 0
+-1 -2 -4 0
+-1 -2 -3 0
+-6 -7 -8 0
+-5 -7 -8 0
+-5 -6 -8 0
+-5 -6 -7 0
+-10 -11 -12 0
+-9 -11 -12 0
+-9 -10 -12 0
+-9 -10 -11 0
+-14 -15 -16 0
+-13 -15 -16 0
+-13 -14 -16 0
+-13 -14 -15 0
+-18 -19 -20 0
+-17 -19 -20 0
+-17 -18 -20 0
+-17 -18 -19 0
+-22 -23 -24 0
+-21 -23 -24 0
+-21 -22 -24 0
+-21 -22 -23 0
+-26 -27 -28 0
+-25 -27 -28 0
+-25 -26 -28 0
+-25 -26 -27 0
+-30 -31 -32 0
+-29 -31 -32 0
+-29 -30 -32 0
+-29 -30 -31 0
+-34 -35 -36 0
+-33 -35 -36 0
+-33 -34 -36 0
+-33 -34 -35 0
+-38 -39 -40 0
+-37 -39 -40 0
+-37 -38 -40 0
+-37 -38 -39 0
+-42 -43 -44 0
+-41 -43 -44 0
+-41 -42 -44 0
+-41 -42 -43 0
+-46 -47 -48 0
+-45 -47 -48 0
+-45 -46 -48 0
+-45 -46 -47 0
+-50 -51 -52 0
+-49 -51 -52 0
+-49 -50 -52 0
+-49 -50 -51 0
+-53 -54 -55 0
+-53 -54 -56 0
+-53 -54 -57 0
+-53 -55 -56 0
+-53 -55 -57 0
+-53 -56 -57 0
+-54 -55 -56 0
+-54 -55 -57 0
+-54 -56 -57 0
+-55 -56 -57 0
+5 13 43 0
+32 13 43 0
+32 5 43 0
+32 5 13 0
+51 39 19 0
+10 39 19 0
+10 51 19 0
+10 51 39 0
+26 1 16 0
+11 1 16 0
+11 26 16 0
+11 26 1 0
+30 4 20 0
+34 4 20 0
+34 30 20 0
+34 30 4 0
+24 18 28 0
+8 18 28 0
+8 24 28 0
+8 24 18 0
+50 46 3 0
+41 46 3 0
+41 50 3 0
+41 50 46 0
+56 40 2 0
+6 40 2 0
+6 56 2 0
+6 56 40 0
+37 45 31 0
+27 45 31 0
+27 37 31 0
+27 37 45 0
+33 38 42 0
+22 38 42 0
+22 33 42 0
+22 33 38 0
+21 9 29 0
+55 9 29 0
+55 21 29 0
+55 21 9 0
+52 44 25 0
+54 44 25 0
+54 52 25 0
+54 52 44 0
+17 14 47 0
+57 14 47 0
+57 17 47 0
+57 17 14 0
+49 15 36 0
+23 15 36 0
+23 49 36 0
+23 49 15 0
+35 7 48 0
+35 7 53 0
+35 7 12 0
+35 48 53 0
+35 48 12 0
+35 53 12 0
+7 48 53 0
+7 48 12 0
+7 53 12 0
+48 53 12 0