summaryrefslogtreecommitdiffstats
path: root/vendor/gix-filter
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 18:31:44 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 18:31:44 +0000
commitc23a457e72abe608715ac76f076f47dc42af07a5 (patch)
tree2772049aaf84b5c9d0ed12ec8d86812f7a7904b6 /vendor/gix-filter
parentReleasing progress-linux version 1.73.0+dfsg1-1~progress7.99u1. (diff)
downloadrustc-c23a457e72abe608715ac76f076f47dc42af07a5.tar.xz
rustc-c23a457e72abe608715ac76f076f47dc42af07a5.zip
Merging upstream version 1.74.1+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/gix-filter')
-rw-r--r--vendor/gix-filter/.cargo-checksum.json1
-rw-r--r--vendor/gix-filter/CHANGELOG.md286
-rw-r--r--vendor/gix-filter/Cargo.lock494
-rw-r--r--vendor/gix-filter/Cargo.toml65
-rw-r--r--vendor/gix-filter/LICENSE-APACHE191
-rw-r--r--vendor/gix-filter/LICENSE-MIT21
-rw-r--r--vendor/gix-filter/examples/arrow.rs207
-rw-r--r--vendor/gix-filter/src/driver/apply.rs242
-rw-r--r--vendor/gix-filter/src/driver/delayed.rs155
-rw-r--r--vendor/gix-filter/src/driver/init.rs106
-rw-r--r--vendor/gix-filter/src/driver/mod.rs99
-rw-r--r--vendor/gix-filter/src/driver/process/client.rs309
-rw-r--r--vendor/gix-filter/src/driver/process/mod.rs113
-rw-r--r--vendor/gix-filter/src/driver/process/server.rs280
-rw-r--r--vendor/gix-filter/src/driver/shutdown.rs35
-rw-r--r--vendor/gix-filter/src/eol/convert_to_git.rs162
-rw-r--r--vendor/gix-filter/src/eol/convert_to_worktree.rs43
-rw-r--r--vendor/gix-filter/src/eol/mod.rs95
-rw-r--r--vendor/gix-filter/src/eol/utils.rs124
-rw-r--r--vendor/gix-filter/src/ident.rs77
-rw-r--r--vendor/gix-filter/src/lib.rs80
-rw-r--r--vendor/gix-filter/src/pipeline/convert.rs353
-rw-r--r--vendor/gix-filter/src/pipeline/mod.rs115
-rw-r--r--vendor/gix-filter/src/pipeline/tests.rs39
-rw-r--r--vendor/gix-filter/src/pipeline/util.rs206
-rw-r--r--vendor/gix-filter/src/worktree/encode_to_git.rs81
-rw-r--r--vendor/gix-filter/src/worktree/encode_to_worktree.rs60
-rw-r--r--vendor/gix-filter/src/worktree/encoding.rs31
-rw-r--r--vendor/gix-filter/src/worktree/mod.rs16
-rw-r--r--vendor/gix-filter/tests/driver/mod.rs399
-rw-r--r--vendor/gix-filter/tests/eol/convert_to_git.rs175
-rw-r--r--vendor/gix-filter/tests/eol/convert_to_worktree.rs97
-rw-r--r--vendor/gix-filter/tests/eol/mod.rs25
-rw-r--r--vendor/gix-filter/tests/filter.rs7
-rwxr-xr-xvendor/gix-filter/tests/fixtures/baseline.sh41
-rw-r--r--vendor/gix-filter/tests/fixtures/generated-archives/pipeline_repos.tar.xzbin0 -> 432 bytes
-rwxr-xr-xvendor/gix-filter/tests/fixtures/pipeline_repos.sh18
-rw-r--r--vendor/gix-filter/tests/ident/mod.rs111
-rw-r--r--vendor/gix-filter/tests/pipeline/convert_to_git.rs145
-rw-r--r--vendor/gix-filter/tests/pipeline/convert_to_worktree.rs106
-rw-r--r--vendor/gix-filter/tests/pipeline/mod.rs71
-rw-r--r--vendor/gix-filter/tests/worktree/mod.rs99
42 files changed, 5380 insertions, 0 deletions
diff --git a/vendor/gix-filter/.cargo-checksum.json b/vendor/gix-filter/.cargo-checksum.json
new file mode 100644
index 000000000..203b42b3a
--- /dev/null
+++ b/vendor/gix-filter/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"CHANGELOG.md":"840e33c40020b813371613365e13ef1dee679e9c644c812b5c983054c92ae061","Cargo.lock":"108da2aa8f8baac64c3686555565c795b39a2eb50a419bc734fdfe89dc026337","Cargo.toml":"03088c52929252a2faeb38667dad2b5f0106c973b3afc71ff672b1b23c3e62c0","LICENSE-APACHE":"cb4780590812826851ba250f90bed0ed19506ec98f6865a0e2e20bbf62391ff9","LICENSE-MIT":"49df47913ab2beafe8dc45607877ae64198bf0eee64aaad3e82ed9e4d27424e8","examples/arrow.rs":"9dad54b7a3b3032560788209357b2f37e487899e46e521a345d30ec48e151b8f","src/driver/apply.rs":"71237764befa14039bc8001e61b3277f79b4100fc0d987127306699210f517e9","src/driver/delayed.rs":"990f52963dae9aef2c32a1f63fecfe5721022b432296770a871a779b43932e1b","src/driver/init.rs":"18220c5d5da678508352bcc78dab6551c14df75c943614951e5c00aecd0c692d","src/driver/mod.rs":"b43227e3c329db27d8aa121f4adc34e7ba6921db63e76534cf08fb73c5bc3d89","src/driver/process/client.rs":"3501764e9fa3eca5e038d956c207b90400539416f7eda634830ff032fe61dcf2","src/driver/process/mod.rs":"9eefffe43dbd5315441cd08cdb813d047e3d624d0840030ee7bb8b450d9d3cfc","src/driver/process/server.rs":"964acb09eb447c8129f25ba2a488b697b041828b5e130425ab7ae0425022b013","src/driver/shutdown.rs":"27002849563468693d98eb6f2788cec272b5c8915bb42a0fc00c4a92d3ccba72","src/eol/convert_to_git.rs":"e9c1865c3e5a788b2751b7188eedf7ac788f6cce23447e68044d2e383309f5e4","src/eol/convert_to_worktree.rs":"9a9c06b129c9f61d2aed9b7fa9224f7d31996712be81a119fe7f7d4422060e73","src/eol/mod.rs":"9bed52dcbbb4455ec7b1b0ff16ef158afcd063d6bb740d7d3690a4912f4d9e54","src/eol/utils.rs":"7afb1702f595e54a53bce55b3ac42ddcc494b193fb2d5f8adf29c6463c0717fd","src/ident.rs":"574bba21fc10c2e2c1847cc9bf32e03a469e22dd6b9c360196ffa66cf3f11c40","src/lib.rs":"4c8e58f6b0758e9f6442fc19652d4ddf6c366890b91a9b53a1be85869b5bff66","src/pipeline/convert.rs":"54c39d1f5a56e84b22f49876c5106e8760fedd528685879d63a9684f39d4a23e","src/pipeline/mod.rs":"8d008f4d83846c145a4188776c7df635707486ae6bd6f12f3850e5a418eeab52","src/pipeline/tests.rs":"57081ae094bb4384f8cc0f651f1b9def696a7c5416c9a7737bc19c9513c409eb","src/pipeline/util.rs":"79c38e60cd23e96e976c76b6674db9678ddff307934fe31292ac91d81417eae3","src/worktree/encode_to_git.rs":"e7ee1b2ea8f3c8b5a67b816c03d30de554d28e0c8a2ef7fb4a08a21877c4d29a","src/worktree/encode_to_worktree.rs":"5c6c07b0510467c5bc23ec2d6b0b9588fbe83033270e6b8eecb95ddd2faf0da4","src/worktree/encoding.rs":"f6be0f6de6016ea7e8dd66695ad5b33d273d911056726aef5937b8b724b35544","src/worktree/mod.rs":"49b88cc411797a7492ea8ff168cbfc2ccbfc4c20a0d50ad0c382f4b3f731f034","tests/driver/mod.rs":"5f5971585c723543c6027d9c88a9a5518d09107c79af2eadfe3c5248c05e9aff","tests/eol/convert_to_git.rs":"eab4d779cd5994dad2d8fee7e1cd07715c852ea8d4919d2acc15df44c4c5171c","tests/eol/convert_to_worktree.rs":"f60a67ce7d1ae89ebb9d12792f36ff69454b207ab19318a01a5533ece2a8cb2f","tests/eol/mod.rs":"443cdf101fb949f5a1fee83e3c47279258c39c0e808210fdac29322fb8930fa4","tests/filter.rs":"a239e58925089504903eb2d52ff617ca58eca42d80173045a0fbfb3a340e366c","tests/fixtures/baseline.sh":"8e9e2982b2050fa41c851811ff92b1920c431f05daccd6e05ef3fae5788f3abc","tests/fixtures/generated-archives/pipeline_repos.tar.xz":"756c232a96c52fe02bd9002b6813edccfea7c1889075946e2f490da8b058bb80","tests/fixtures/pipeline_repos.sh":"2a168ce8d09fdd5d03ca82deaf105e8dcc51831d9f71a065bc56e35b2f5e5e68","tests/ident/mod.rs":"dc4d68ab493e63abed0c171cccb823eb90981a9b51e7ba824bf1f1377388eead","tests/pipeline/convert_to_git.rs":"0038002616a772af4c37427f52ff3138024fe1f962a9564a964fe90043f64876","tests/pipeline/convert_to_worktree.rs":"76fe13e9df083b2d5830170d3aaf0feac7329cdddc9c7b4f10e855ad8cd84047","tests/pipeline/mod.rs":"4edc80271c76e59a53cf57d09ebba7be3ea2999a0128f39bcda2f972786bf204","tests/worktree/mod.rs":"0f1d7d4e9e8a8211a8107854903e4792b70bcad51427d308624f6d27a14a017c"},"package":"1be40d28cd41445bb6cd52c4d847d915900e5466f7433eaee6a9e0a3d1d88b08"} \ No newline at end of file
diff --git a/vendor/gix-filter/CHANGELOG.md b/vendor/gix-filter/CHANGELOG.md
new file mode 100644
index 000000000..946c14d95
--- /dev/null
+++ b/vendor/gix-filter/CHANGELOG.md
@@ -0,0 +1,286 @@
+# Changelog
+
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+## 0.5.0 (2023-09-24)
+
+A maintenance release without user-facing changes.
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 1 commit contributed to the release.
+ - 16 days passed between releases.
+ - 0 commits were understood as [conventional](https://www.conventionalcommits.org).
+ - 0 issues like '(#ID)' were seen in commit messages
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **Uncategorized**
+ - Prepare changelogs prior to release ([`8a60d5b`](https://github.com/Byron/gitoxide/commit/8a60d5b80877c213c3b646d3061e8a33e0e433ec))
+</details>
+
+## 0.4.0 (2023-09-08)
+
+### Bug Fixes
+
+ - <csr-id-96a07e08e6090222cf398b46aa8d46b56f81f14d/> Use `Vec::resize()` instead of set_len()
+ Otherwise it's possible for uninitialized memory to be used as if it was initialized,
+ which can lead to strange behaviour.
+
+ As the buffer is re-used, it's not actually zeroing that much memory either.
+
+### Bug Fixes (BREAKING)
+
+ - <csr-id-072ee32f693a31161cd6a843da6582d13efbb20b/> use `dyn` trait where possible.
+ This reduces compile time due to avoiding duplication.
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 9 commits contributed to the release over the course of 17 calendar days.
+ - 17 days passed between releases.
+ - 2 commits were understood as [conventional](https://www.conventionalcommits.org).
+ - 0 issues like '(#ID)' were seen in commit messages
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **Uncategorized**
+ - Release gix-date v0.8.0, gix-hash v0.13.0, gix-features v0.34.0, gix-actor v0.26.0, gix-object v0.36.0, gix-path v0.10.0, gix-glob v0.12.0, gix-attributes v0.18.0, gix-packetline-blocking v0.16.6, gix-filter v0.4.0, gix-fs v0.6.0, gix-commitgraph v0.20.0, gix-hashtable v0.4.0, gix-revwalk v0.7.0, gix-traverse v0.32.0, gix-worktree-stream v0.4.0, gix-archive v0.4.0, gix-config-value v0.14.0, gix-tempfile v9.0.0, gix-lock v9.0.0, gix-ref v0.36.0, gix-sec v0.10.0, gix-config v0.29.0, gix-prompt v0.7.0, gix-url v0.23.0, gix-credentials v0.19.0, gix-diff v0.35.0, gix-discover v0.24.0, gix-ignore v0.7.0, gix-index v0.24.0, gix-macros v0.1.0, gix-mailmap v0.18.0, gix-negotiate v0.7.0, gix-pack v0.42.0, gix-odb v0.52.0, gix-pathspec v0.2.0, gix-packetline v0.16.6, gix-transport v0.36.0, gix-protocol v0.39.0, gix-revision v0.21.0, gix-refspec v0.17.0, gix-submodule v0.3.0, gix-worktree v0.25.0, gix-worktree-state v0.2.0, gix v0.53.0, safety bump 39 crates ([`8bd0456`](https://github.com/Byron/gitoxide/commit/8bd045676bb2cdc02624ab93e73ff8518064ca38))
+ - Prepare changelogs for release ([`375db06`](https://github.com/Byron/gitoxide/commit/375db06a8442378c3f7a922fae38e2a6694d9d04))
+ - Merge branch 'optimizations' ([`6135a5e`](https://github.com/Byron/gitoxide/commit/6135a5ea8709646f01da62939a59dd3a9750e007))
+ - Adapt to changes in `gix-worktree` ([`d7fc182`](https://github.com/Byron/gitoxide/commit/d7fc182156847752ee872016b6de0c78f5fb190b))
+ - Merge branch `dyn`ification ([`f658fcc`](https://github.com/Byron/gitoxide/commit/f658fcc52dc2200ae34ca53dc10be97fb9012057))
+ - Use `dyn` trait where possible. ([`072ee32`](https://github.com/Byron/gitoxide/commit/072ee32f693a31161cd6a843da6582d13efbb20b))
+ - Merge branch 'perf-and-safety' ([`9ad9c5b`](https://github.com/Byron/gitoxide/commit/9ad9c5b1cfa3afff5273558b6ef98ca4714d4272))
+ - Use `Vec::resize()` instead of set_len() ([`96a07e0`](https://github.com/Byron/gitoxide/commit/96a07e08e6090222cf398b46aa8d46b56f81f14d))
+ - Merge branch 'gix-submodule' ([`363ee77`](https://github.com/Byron/gitoxide/commit/363ee77400805f473c9ad66eadad9214e7ab66f4))
+</details>
+
+## 0.3.0 (2023-08-22)
+
+<csr-id-229bd4899213f749a7cc124aa2b82a1368fba40f/>
+
+### Chore
+
+ - <csr-id-229bd4899213f749a7cc124aa2b82a1368fba40f/> don't call crate 'WIP' in manifest anymore.
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 8 commits contributed to the release over the course of 18 calendar days.
+ - 30 days passed between releases.
+ - 1 commit was understood as [conventional](https://www.conventionalcommits.org).
+ - 0 issues like '(#ID)' were seen in commit messages
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **Uncategorized**
+ - Release gix-date v0.7.3, gix-hash v0.12.0, gix-features v0.33.0, gix-actor v0.25.0, gix-object v0.35.0, gix-path v0.9.0, gix-glob v0.11.0, gix-quote v0.4.7, gix-attributes v0.17.0, gix-command v0.2.9, gix-packetline-blocking v0.16.5, gix-filter v0.3.0, gix-fs v0.5.0, gix-commitgraph v0.19.0, gix-hashtable v0.3.0, gix-revwalk v0.6.0, gix-traverse v0.31.0, gix-worktree-stream v0.3.0, gix-archive v0.3.0, gix-config-value v0.13.0, gix-tempfile v8.0.0, gix-lock v8.0.0, gix-ref v0.35.0, gix-sec v0.9.0, gix-config v0.28.0, gix-prompt v0.6.0, gix-url v0.22.0, gix-credentials v0.18.0, gix-diff v0.34.0, gix-discover v0.23.0, gix-ignore v0.6.0, gix-bitmap v0.2.7, gix-index v0.22.0, gix-mailmap v0.17.0, gix-negotiate v0.6.0, gix-pack v0.41.0, gix-odb v0.51.0, gix-pathspec v0.1.0, gix-packetline v0.16.5, gix-transport v0.35.0, gix-protocol v0.38.0, gix-revision v0.20.0, gix-refspec v0.16.0, gix-submodule v0.2.0, gix-worktree v0.24.0, gix-worktree-state v0.1.0, gix v0.52.0, gitoxide-core v0.31.0, gitoxide v0.29.0, safety bump 41 crates ([`30b2761`](https://github.com/Byron/gitoxide/commit/30b27615047692d3ced1b2d9c2ac15a80f79fbee))
+ - Update changelogs prior to release ([`f23ea88`](https://github.com/Byron/gitoxide/commit/f23ea8828f2d9ba7559973daca388c9591bcc5fc))
+ - Merge branch 'worktree-organization' ([`8d0d8e0`](https://github.com/Byron/gitoxide/commit/8d0d8e005d7f11924a6717954d892aae5cec45e7))
+ - Adapt to changes in `gix-worktree` ([`e5717e1`](https://github.com/Byron/gitoxide/commit/e5717e1d12c49285d31a90b03b7f8e9cbc6c1108))
+ - Don't call crate 'WIP' in manifest anymore. ([`229bd48`](https://github.com/Byron/gitoxide/commit/229bd4899213f749a7cc124aa2b82a1368fba40f))
+ - Release gix-glob v0.10.2, gix-date v0.7.2, gix-validate v0.8.0, gix-object v0.34.0, gix-ref v0.34.0, gix-config v0.27.0, gix-commitgraph v0.18.2, gix-revwalk v0.5.0, gix-revision v0.19.0, gix-refspec v0.15.0, gix-submodule v0.1.0, safety bump 18 crates ([`4604f83`](https://github.com/Byron/gitoxide/commit/4604f83ef238dc07c85aaeae097399b67f3cfd0c))
+ - Merge branch 'dev-on-linux' ([`6b4a303`](https://github.com/Byron/gitoxide/commit/6b4a30330fe49fc97daa73f55bf56580cc0597aa))
+ - Fix various tests to run properly on linux ([`ef8ccd9`](https://github.com/Byron/gitoxide/commit/ef8ccd9d16143d37155d063747c69cade80f162d))
+</details>
+
+## 0.2.0 (2023-07-22)
+
+A maintenance release without user-facing changes.
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 5 commits contributed to the release over the course of 1 calendar day.
+ - 3 days passed between releases.
+ - 0 commits were understood as [conventional](https://www.conventionalcommits.org).
+ - 0 issues like '(#ID)' were seen in commit messages
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **Uncategorized**
+ - Release gix-features v0.32.1, gix-actor v0.24.1, gix-validate v0.7.7, gix-object v0.33.1, gix-path v0.8.4, gix-glob v0.10.1, gix-quote v0.4.6, gix-attributes v0.16.0, gix-command v0.2.8, gix-packetline-blocking v0.16.4, gix-filter v0.2.0, gix-fs v0.4.1, gix-chunk v0.4.4, gix-commitgraph v0.18.1, gix-hashtable v0.2.4, gix-revwalk v0.4.1, gix-traverse v0.30.1, gix-worktree-stream v0.2.0, gix-archive v0.2.0, gix-config-value v0.12.5, gix-tempfile v7.0.1, gix-utils v0.1.5, gix-lock v7.0.2, gix-ref v0.33.1, gix-sec v0.8.4, gix-prompt v0.5.4, gix-url v0.21.1, gix-credentials v0.17.1, gix-diff v0.33.1, gix-discover v0.22.1, gix-ignore v0.5.1, gix-bitmap v0.2.6, gix-index v0.21.1, gix-mailmap v0.16.1, gix-negotiate v0.5.1, gix-pack v0.40.1, gix-odb v0.50.1, gix-packetline v0.16.4, gix-transport v0.34.1, gix-protocol v0.36.1, gix-revision v0.18.1, gix-refspec v0.14.1, gix-worktree v0.23.0, gix v0.50.0, safety bump 5 crates ([`16295b5`](https://github.com/Byron/gitoxide/commit/16295b58e2581d2e8b8b762816f52baabe871c75))
+ - Prepare more changelogs ([`c4cc5f2`](https://github.com/Byron/gitoxide/commit/c4cc5f261d29f712a101033a18293a97a9d4ae85))
+ - Release gix-date v0.7.1, gix-hash v0.11.4, gix-trace v0.1.3, gix-features v0.32.0, gix-actor v0.24.0, gix-validate v0.7.7, gix-object v0.33.0, gix-path v0.8.4, gix-glob v0.10.0, gix-quote v0.4.6, gix-attributes v0.15.0, gix-command v0.2.7, gix-packetline-blocking v0.16.3, gix-filter v0.1.0, gix-fs v0.4.0, gix-chunk v0.4.4, gix-commitgraph v0.18.0, gix-hashtable v0.2.4, gix-revwalk v0.4.0, gix-traverse v0.30.0, gix-worktree-stream v0.2.0, gix-archive v0.2.0, gix-config-value v0.12.4, gix-tempfile v7.0.1, gix-utils v0.1.5, gix-lock v7.0.2, gix-ref v0.33.0, gix-sec v0.8.4, gix-prompt v0.5.3, gix-url v0.21.0, gix-credentials v0.17.0, gix-diff v0.33.0, gix-discover v0.22.0, gix-ignore v0.5.0, gix-bitmap v0.2.6, gix-index v0.21.0, gix-mailmap v0.16.0, gix-negotiate v0.5.0, gix-pack v0.40.0, gix-odb v0.50.0, gix-packetline v0.16.4, gix-transport v0.34.0, gix-protocol v0.36.0, gix-revision v0.18.0, gix-refspec v0.14.0, gix-worktree v0.22.0, gix v0.49.1 ([`5cb3589`](https://github.com/Byron/gitoxide/commit/5cb3589b74fc5376e02cbfe151e71344e1c417fe))
+ - Update changelogs prior to release ([`2fc66b5`](https://github.com/Byron/gitoxide/commit/2fc66b55097ed494b72d1af939ba5561f71fde97))
+ - Update license field following SPDX 2.1 license expression standard ([`9064ea3`](https://github.com/Byron/gitoxide/commit/9064ea31fae4dc59a56bdd3a06c0ddc990ee689e))
+</details>
+
+## 0.1.0 (2023-07-19)
+
+### New Features (BREAKING)
+
+ - <csr-id-29a64c289946301d5e502ee956f3606280409faf/> make it possible to use filter pipeline with streaming for checkouts.
+ This is done by providing a `gix_filter::Pipeline` to `checkout::Options` to control
+ how filters are applied.
+ - <csr-id-94df6e0e56c0c94e15da89dd9716bcdea5cd4380/> a complete, streaming compatible, `git` filter pipeline.
+ It's configuring itself based on each input path and supports streaming.
+
+ Some name changes where also performed while at it.
+
+### Bug Fixes (BREAKING)
+
+ - <csr-id-35415db5d82b82d3908709b6db177bb8d7c3542c/> improve conversion API of `Pipeline::convert_to_git(index_object)` to be more consistent and practical.
+ `index_object` now receives the path to the entry as first argument, notably in a data-type that is fitting for
+ the purpose.
+ Additionally, implement `Clone` for `Pipeline`.
+ We also make sure that the input stream is turned into a buffer in case there is no driver, previously filtering
+ had nothing to work with if there was no driver.
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 10 commits contributed to the release over the course of 12 calendar days.
+ - 18 days passed between releases.
+ - 3 commits were understood as [conventional](https://www.conventionalcommits.org).
+ - 0 issues like '(#ID)' were seen in commit messages
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **Uncategorized**
+ - Release gix-filter v0.1.0, gix-ignore v0.5.0, gix-revwalk v0.4.0, gix-traverse v0.30.0, gix-index v0.21.0, gix-mailmap v0.16.0, gix-negotiate v0.5.0, gix-pack v0.40.0, gix-odb v0.50.0, gix-transport v0.34.0, gix-protocol v0.36.0, gix-revision v0.18.0, gix-refspec v0.14.0, gix-worktree v0.22.0, gix v0.49.0 ([`4aca8c2`](https://github.com/Byron/gitoxide/commit/4aca8c2ae2ec588fb65ec4faa0c07c19d219569f))
+ - Fix gix-filter dev-dependencies ([`a7f9441`](https://github.com/Byron/gitoxide/commit/a7f9441ed644cf1c2fd543bede179422d16dc1bf))
+ - Release gix-features v0.32.0, gix-actor v0.24.0, gix-glob v0.10.0, gix-attributes v0.15.0, gix-commitgraph v0.18.0, gix-config-value v0.12.4, gix-fs v0.4.0, gix-object v0.33.0, gix-ref v0.33.0, gix-config v0.26.0, gix-command v0.2.7, gix-url v0.21.0, gix-credentials v0.17.0, gix-diff v0.33.0, gix-discover v0.22.0, gix-filter v0.1.0, gix-ignore v0.5.0, gix-revwalk v0.4.0, gix-traverse v0.30.0, gix-index v0.21.0, gix-mailmap v0.16.0, gix-negotiate v0.5.0, gix-pack v0.40.0, gix-odb v0.50.0, gix-transport v0.34.0, gix-protocol v0.36.0, gix-revision v0.18.0, gix-refspec v0.14.0, gix-worktree v0.22.0, gix v0.49.0 ([`68ae3ff`](https://github.com/Byron/gitoxide/commit/68ae3ff9d642ec56f088a6a682a073dc16f4e8ca))
+ - Adjust package versions (by cargo-smart-release) ([`c70e54f`](https://github.com/Byron/gitoxide/commit/c70e54f163c312c87753a506eeaad462e8579bfb))
+ - Prepare changelogs prior to release ([`e4dded0`](https://github.com/Byron/gitoxide/commit/e4dded05138562f9737a7dcfb60570c55769486d))
+ - Just fmt ([`a063c62`](https://github.com/Byron/gitoxide/commit/a063c62e3a30006d837b267e2ce74e70e48b4fb6))
+ - Merge branch 'integrate-filtering' ([`b19a56d`](https://github.com/Byron/gitoxide/commit/b19a56dcfa9bea86332a84aa4e8fad445e7d1724))
+ - Make it possible to use filter pipeline with streaming for checkouts. ([`29a64c2`](https://github.com/Byron/gitoxide/commit/29a64c289946301d5e502ee956f3606280409faf))
+ - Improve conversion API of `Pipeline::convert_to_git(index_object)` to be more consistent and practical. ([`35415db`](https://github.com/Byron/gitoxide/commit/35415db5d82b82d3908709b6db177bb8d7c3542c))
+ - A complete, streaming compatible, `git` filter pipeline. ([`94df6e0`](https://github.com/Byron/gitoxide/commit/94df6e0e56c0c94e15da89dd9716bcdea5cd4380))
+</details>
+
+## 0.0.0 (2023-07-01)
+
+<csr-id-f7f136dbe4f86e7dee1d54835c420ec07c96cd78/>
+<csr-id-533e887e80c5f7ede8392884562e1c5ba56fb9a8/>
+
+An empty crate without any content to reserve the name for the gitoxide project.
+
+### New Features (BREAKING)
+
+ - <csr-id-3d8fa8fef9800b1576beab8a5bc39b821157a5ed/> upgrade edition to 2021 in most crates.
+ MSRV for this is 1.56, and we are now at 1.60 so should be compatible.
+ This isn't more than a patch release as it should break nobody
+ who is adhering to the MSRV, but let's be careful and mark it
+ breaking.
+
+ Note that `git-features` and `git-pack` are still on edition 2018
+ as they make use of a workaround to support (safe) mutable access
+ to non-overlapping entries in a slice which doesn't work anymore
+ in edition 2021.
+
+### New Features
+
+ - <csr-id-198ce27e05fd07d4e511dff1ac50c3f4ed163cdf/> API support for receiving delayed entries
+ - <csr-id-5670bbba73c7d48ba6220c2e3773633c234fb21c/> Ability to steer long running filter processes.
+ - <csr-id-c538c6eba466f22a3000a76a0d37902ac5256e7a/> ability to run define and run simple filters.
+ Simple filters run in real-time and are piped their content to stdin
+ while we read it from stdout.
+ - <csr-id-d1fed3e9907d0a9e3fe45dbfe2ff27bd10b3e1f4/> `worktree::encode_to_worktree()` to turn UTf-8 into the worktree encoding.
+ - <csr-id-1b8f2b7f51e7d17b9b0839f42b75781ae6f940ec/> `worktree::encode_to_git()` to turn worktree encoding to UTF-8.
+ - <csr-id-1517cbc42c43b253046b7359c79731771fd7b941/> add `eol::convert_to_worktree()`.
+ It's the inverse of `eol::convert_to_git()` to re-add CRLF where there were LF only.
+ - <csr-id-e45fec9663f87b7ba4162a9517677f6278c20a98/> Add `eol::convert_to_git()`.
+ This function supports all the logic that git executes to determine
+ if a converion should actually be done.
+ - <csr-id-b79ffeb9ed584c47f2609eea261e1ada557a744c/> `eol::Stats::from_bytes()` to obtain stats about a buffer.
+ It can help to determine if it is binary and if line conversions should be performed at all.
+ - <csr-id-306c8eabcffe80da1d627283c4b188a1b979f692/> add `ident::apply()` to substitute `$Id$` with `$Id: <hex>$`
+ - <csr-id-496445ca97687a38ecb80e871a1cbdc7ecd6b313/> `ident::undo()` to replace `$Id: XXX$` with `$Id$`
+
+### Chore
+
+ - <csr-id-f7f136dbe4f86e7dee1d54835c420ec07c96cd78/> uniformize deny attributes
+ - <csr-id-533e887e80c5f7ede8392884562e1c5ba56fb9a8/> remove default link to cargo doc everywhere
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 41 commits contributed to the release over the course of 438 calendar days.
+ - 13 commits were understood as [conventional](https://www.conventionalcommits.org).
+ - 2 unique issues were worked on: [#301](https://github.com/Byron/gitoxide/issues/301), [#691](https://github.com/Byron/gitoxide/issues/691)
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **[#301](https://github.com/Byron/gitoxide/issues/301)**
+ - Add git-filter crate for name generation ([`5a3c628`](https://github.com/Byron/gitoxide/commit/5a3c628c757a7eb4ecfc5fd7265fa36c8362797b))
+ * **[#691](https://github.com/Byron/gitoxide/issues/691)**
+ - Set `rust-version` to 1.64 ([`55066ce`](https://github.com/Byron/gitoxide/commit/55066ce5fd71209abb5d84da2998b903504584bb))
+ * **Uncategorized**
+ - Release gix-filter v0.0.0 ([`c3ee57b`](https://github.com/Byron/gitoxide/commit/c3ee57b9d71f650784dc0a5022dbf54fe71e5fe5))
+ - Use latest version of `gix-path` to allow `gix-filter` release. ([`bb9f308`](https://github.com/Byron/gitoxide/commit/bb9f3088aee60f140e960602429cc7eb46121b75))
+ - Release gix-packetline-blocking v0.16.3, gix-filter v0.0.0 ([`fb3ad29`](https://github.com/Byron/gitoxide/commit/fb3ad29967d08558e42cbe8e80de5dd0b38f12c5))
+ - Merge branch 'filter-programs' ([`97f8e96`](https://github.com/Byron/gitoxide/commit/97f8e960ed52538bb55b72f9dfc5f9d144d72885))
+ - API support for receiving delayed entries ([`198ce27`](https://github.com/Byron/gitoxide/commit/198ce27e05fd07d4e511dff1ac50c3f4ed163cdf))
+ - Handle process error codes similarly to how git does it ([`d00e6c5`](https://github.com/Byron/gitoxide/commit/d00e6c569a4320e334ef15a82867433497dc235e))
+ - Make it possible to not run a process at all and test graceful shutdowns ([`cb74130`](https://github.com/Byron/gitoxide/commit/cb7413062e3ef46c896f552c889bd7984a35bd03))
+ - Ability to steer long running filter processes. ([`5670bbb`](https://github.com/Byron/gitoxide/commit/5670bbba73c7d48ba6220c2e3773633c234fb21c))
+ - Ability to run define and run simple filters. ([`c538c6e`](https://github.com/Byron/gitoxide/commit/c538c6eba466f22a3000a76a0d37902ac5256e7a))
+ - Release gix-date v0.7.0, gix-trace v0.1.2, gix-actor v0.23.0, gix-commitgraph v0.17.1, gix-utils v0.1.4, gix-object v0.32.0, gix-ref v0.32.0, gix-config v0.25.0, gix-diff v0.32.0, gix-discover v0.21.0, gix-hashtable v0.2.3, gix-revwalk v0.3.0, gix-traverse v0.29.0, gix-index v0.20.0, gix-mailmap v0.15.0, gix-negotiate v0.4.0, gix-pack v0.39.0, gix-odb v0.49.0, gix-protocol v0.35.0, gix-revision v0.17.0, gix-refspec v0.13.0, gix-worktree v0.21.0, gix v0.48.0, safety bump 20 crates ([`27e8c18`](https://github.com/Byron/gitoxide/commit/27e8c18db5a9a21843381c116a8ed6d9f681b3f8))
+ - Merge branch 'worktree-encoding' ([`5af2cf3`](https://github.com/Byron/gitoxide/commit/5af2cf368dcd05fe4dffbd675cffe6bafec127e7))
+ - `worktree::encode_to_worktree()` to turn UTf-8 into the worktree encoding. ([`d1fed3e`](https://github.com/Byron/gitoxide/commit/d1fed3e9907d0a9e3fe45dbfe2ff27bd10b3e1f4))
+ - `worktree::encode_to_git()` to turn worktree encoding to UTF-8. ([`1b8f2b7`](https://github.com/Byron/gitoxide/commit/1b8f2b7f51e7d17b9b0839f42b75781ae6f940ec))
+ - Refactor ([`7ae7ebd`](https://github.com/Byron/gitoxide/commit/7ae7ebd673b7062f7e4116e9ae4fc51a1451e34b))
+ - Merge branch 'basic-filtering' ([`3fd5e16`](https://github.com/Byron/gitoxide/commit/3fd5e16e205db18edc21341fb4c2a75d0726f5a5))
+ - Add `eol::convert_to_worktree()`. ([`1517cbc`](https://github.com/Byron/gitoxide/commit/1517cbc42c43b253046b7359c79731771fd7b941))
+ - Add `eol::convert_to_git()`. ([`e45fec9`](https://github.com/Byron/gitoxide/commit/e45fec9663f87b7ba4162a9517677f6278c20a98))
+ - `eol::Stats::from_bytes()` to obtain stats about a buffer. ([`b79ffeb`](https://github.com/Byron/gitoxide/commit/b79ffeb9ed584c47f2609eea261e1ada557a744c))
+ - Refactor ([`9bb9c48`](https://github.com/Byron/gitoxide/commit/9bb9c48e0c935179885b774cd685bcaf1008c043))
+ - Add `ident::apply()` to substitute `$Id$` with `$Id: <hex>$` ([`306c8ea`](https://github.com/Byron/gitoxide/commit/306c8eabcffe80da1d627283c4b188a1b979f692))
+ - `ident::undo()` to replace `$Id: XXX$` with `$Id$` ([`496445c`](https://github.com/Byron/gitoxide/commit/496445ca97687a38ecb80e871a1cbdc7ecd6b313))
+ - Merge branch 'corpus' ([`aa16c8c`](https://github.com/Byron/gitoxide/commit/aa16c8ce91452a3e3063cf1cf0240b6014c4743f))
+ - Change MSRV to 1.65 ([`4f635fc`](https://github.com/Byron/gitoxide/commit/4f635fc4429350bae2582d25de86429969d28f30))
+ - Merge branch 'main' into auto-clippy ([`3ef5c90`](https://github.com/Byron/gitoxide/commit/3ef5c90aebce23385815f1df674c1d28d58b4b0d))
+ - Merge branch 'blinxen/main' ([`9375cd7`](https://github.com/Byron/gitoxide/commit/9375cd75b01aa22a0e2eed6305fe45fabfd6c1ac))
+ - Include license files in all crates ([`facaaf6`](https://github.com/Byron/gitoxide/commit/facaaf633f01c857dcf2572c6dbe0a92b7105c1c))
+ - Merge branch 'rename-crates' into inform-about-gix-rename ([`c9275b9`](https://github.com/Byron/gitoxide/commit/c9275b99ea43949306d93775d9d78c98fb86cfb1))
+ - Rename `git-filter` to `gix-filter` ([`0875ae6`](https://github.com/Byron/gitoxide/commit/0875ae61d9e2e7553ea990ea1c391a26f0eb1a59))
+ - Rename `git-filter` to `gix-filter` ([`4aa193f`](https://github.com/Byron/gitoxide/commit/4aa193f359ba31fc6ca7c3c28654e08b12ace6b3))
+ - Merge branch 'main' into http-config ([`bcd9654`](https://github.com/Byron/gitoxide/commit/bcd9654e56169799eb706646da6ee1f4ef2021a9))
+ - Merge branch 'version2021' ([`0e4462d`](https://github.com/Byron/gitoxide/commit/0e4462df7a5166fe85c23a779462cdca8ee013e8))
+ - Upgrade edition to 2021 in most crates. ([`3d8fa8f`](https://github.com/Byron/gitoxide/commit/3d8fa8fef9800b1576beab8a5bc39b821157a5ed))
+ - Merge branch 'main' into index-from-tree ([`bc64b96`](https://github.com/Byron/gitoxide/commit/bc64b96a2ec781c72d1d4daad38aa7fb8b74f99b))
+ - Merge branch 'main' into remote-ls-refs ([`e2ee3de`](https://github.com/Byron/gitoxide/commit/e2ee3ded97e5c449933712883535b30d151c7c78))
+ - Merge branch 'docsrs-show-features' ([`31c2351`](https://github.com/Byron/gitoxide/commit/31c235140cad212d16a56195763fbddd971d87ce))
+ - Uniformize deny attributes ([`f7f136d`](https://github.com/Byron/gitoxide/commit/f7f136dbe4f86e7dee1d54835c420ec07c96cd78))
+ - Remove default link to cargo doc everywhere ([`533e887`](https://github.com/Byron/gitoxide/commit/533e887e80c5f7ede8392884562e1c5ba56fb9a8))
+ - Merge branch 'main' into msrv-for-windows ([`7cb1972`](https://github.com/Byron/gitoxide/commit/7cb19729133325bdfacedf44cdc0500cbcf36684))
+ - Release git-filter v0.0.0 ([`2465381`](https://github.com/Byron/gitoxide/commit/2465381626a1f6de58f45df2e68f36c6b585b68f))
+</details>
+
diff --git a/vendor/gix-filter/Cargo.lock b/vendor/gix-filter/Cargo.lock
new file mode 100644
index 000000000..9c70b109b
--- /dev/null
+++ b/vendor/gix-filter/Cargo.lock
@@ -0,0 +1,494 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "bitflags"
+version = "2.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42"
+
+[[package]]
+name = "bstr"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05"
+dependencies = [
+ "memchr",
+ "regex-automata",
+ "serde",
+]
+
+[[package]]
+name = "btoi"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9dd6407f73a9b8b6162d8a2ef999fe6afd7cc15902ebf42c5cd296addf17e0ad"
+dependencies = [
+ "num-traits",
+]
+
+[[package]]
+name = "byteyarn"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7534301c0ea17abb4db06d75efc7b4b0fa360fce8e175a4330d721c71c942ff"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "encoding_rs"
+version = "0.8.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "faster-hex"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e9042d281a5eec0f2387f8c3ea6c4514e2cf2732c90a85aaf383b761ee3b290d"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "gix-actor"
+version = "0.27.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08c60e982c5290897122d4e2622447f014a2dadd5a18cb73d50bb91b31645e27"
+dependencies = [
+ "bstr",
+ "btoi",
+ "gix-date",
+ "itoa",
+ "thiserror",
+ "winnow",
+]
+
+[[package]]
+name = "gix-attributes"
+version = "0.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2451665e70709ba4753b623ef97511ee98c4a73816b2c5b5df25678d607ed820"
+dependencies = [
+ "bstr",
+ "byteyarn",
+ "gix-glob",
+ "gix-path",
+ "gix-quote",
+ "gix-trace",
+ "smallvec",
+ "thiserror",
+ "unicode-bom",
+]
+
+[[package]]
+name = "gix-command"
+version = "0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0f28f654184b5f725c5737c7e4f466cbd8f0102ac352d5257eeab19647ee4256"
+dependencies = [
+ "bstr",
+]
+
+[[package]]
+name = "gix-date"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fc7df669639582dc7c02737642f76890b03b5544e141caba68a7d6b4eb551e0d"
+dependencies = [
+ "bstr",
+ "itoa",
+ "thiserror",
+ "time",
+]
+
+[[package]]
+name = "gix-features"
+version = "0.35.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b9ff423ae4983f762659040d13dd7a5defbd54b6a04ac3cc7347741cec828cd"
+dependencies = [
+ "gix-hash",
+ "gix-trace",
+ "libc",
+ "sha1_smol",
+]
+
+[[package]]
+name = "gix-filter"
+version = "0.5.0"
+dependencies = [
+ "bstr",
+ "encoding_rs",
+ "gix-attributes",
+ "gix-command",
+ "gix-hash",
+ "gix-object",
+ "gix-packetline-blocking",
+ "gix-path",
+ "gix-quote",
+ "gix-trace",
+ "once_cell",
+ "smallvec",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-glob"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9d76e85f11251dcf751d2c5e918a14f562db5be6f727fd24775245653e9b19d"
+dependencies = [
+ "bitflags",
+ "bstr",
+ "gix-features",
+ "gix-path",
+]
+
+[[package]]
+name = "gix-hash"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2ccf425543779cddaa4a7c62aba3fa9d90ea135b160be0a72dd93c063121ad4a"
+dependencies = [
+ "faster-hex",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-object"
+version = "0.37.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e7e19616c67967374137bae83e950e9b518a9ea8a605069bd6716ada357fd6f"
+dependencies = [
+ "bstr",
+ "btoi",
+ "gix-actor",
+ "gix-date",
+ "gix-features",
+ "gix-hash",
+ "gix-validate",
+ "itoa",
+ "smallvec",
+ "thiserror",
+ "winnow",
+]
+
+[[package]]
+name = "gix-packetline-blocking"
+version = "0.16.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7d8395f7501c84d6a1fe902035fdfd8cd86d89e2dd6be0200ec1a72fd3c92d39"
+dependencies = [
+ "bstr",
+ "faster-hex",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-path"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a1d370115171e3ae03c5c6d4f7d096f2981a40ddccb98dfd704c773530ba73b"
+dependencies = [
+ "bstr",
+ "gix-trace",
+ "home",
+ "once_cell",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-quote"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "475c86a97dd0127ba4465fbb239abac9ea10e68301470c9791a6dd5351cdc905"
+dependencies = [
+ "bstr",
+ "btoi",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-trace"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96b6d623a1152c3facb79067d6e2ecdae48130030cf27d6eb21109f13bd7b836"
+
+[[package]]
+name = "gix-validate"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e05cab2b03a45b866156e052aa38619f4ece4adcb2f79978bfc249bc3b21b8c5"
+dependencies = [
+ "bstr",
+ "thiserror",
+]
+
+[[package]]
+name = "home"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb"
+dependencies = [
+ "windows-sys",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
+
+[[package]]
+name = "libc"
+version = "0.2.147"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
+
+[[package]]
+name = "memchr"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "76fc44e2588d5b436dbc3c6cf62aef290f90dab6235744a93dfe1cc18f451e2c"
+
+[[package]]
+name = "num-traits"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "num_threads"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.18.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.66"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310"
+
+[[package]]
+name = "serde"
+version = "1.0.185"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "be9b6f69f1dfd54c3b568ffa45c310d6973a5e5148fd40cf515acaf38cf5bc31"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.185"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc59dfdcbad1437773485e0367fea4b090a2e0a16d9ffc46af47764536a298ec"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "sha1_smol"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012"
+
+[[package]]
+name = "smallvec"
+version = "1.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9"
+
+[[package]]
+name = "syn"
+version = "2.0.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "thiserror"
+version = "1.0.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90"
+dependencies = [
+ "thiserror-impl",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "time"
+version = "0.3.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59e399c068f43a5d116fedaf73b203fa4f9c519f17e2b34f63221d3792f81446"
+dependencies = [
+ "itoa",
+ "libc",
+ "num_threads",
+ "serde",
+ "time-core",
+ "time-macros",
+]
+
+[[package]]
+name = "time-core"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb"
+
+[[package]]
+name = "time-macros"
+version = "0.2.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96ba15a897f3c86766b757e5ac7221554c6750054d74d5b28844fce5fb36a6c4"
+dependencies = [
+ "time-core",
+]
+
+[[package]]
+name = "unicode-bom"
+version = "2.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "98e90c70c9f0d4d1ee6d0a7d04aa06cb9bbd53d8cfbdd62a0269a7c2eb640552"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c"
+
+[[package]]
+name = "windows-sys"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.48.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f"
+dependencies = [
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
+
+[[package]]
+name = "winnow"
+version = "0.5.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d09770118a7eb1ccaf4a594a221334119a44a814fcb0d31c5b85e83e97227a97"
+dependencies = [
+ "memchr",
+]
diff --git a/vendor/gix-filter/Cargo.toml b/vendor/gix-filter/Cargo.toml
new file mode 100644
index 000000000..3d8edccbe
--- /dev/null
+++ b/vendor/gix-filter/Cargo.toml
@@ -0,0 +1,65 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.65"
+name = "gix-filter"
+version = "0.5.0"
+authors = ["Sebastian Thiel <sebastian.thiel@icloud.com>"]
+description = "A crate of the gitoxide project implementing git filters"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/Byron/gitoxide"
+
+[lib]
+doctest = false
+
+[dependencies.bstr]
+version = "1.5.0"
+features = ["std"]
+default-features = false
+
+[dependencies.encoding_rs]
+version = "0.8.32"
+
+[dependencies.gix-attributes]
+version = "^0.19.0"
+
+[dependencies.gix-command]
+version = "^0.2.9"
+
+[dependencies.gix-hash]
+version = "^0.13.0"
+
+[dependencies.gix-object]
+version = "^0.37.0"
+
+[dependencies.gix-packetline]
+version = "^0.16.6"
+package = "gix-packetline-blocking"
+
+[dependencies.gix-path]
+version = "^0.10.0"
+
+[dependencies.gix-quote]
+version = "^0.4.7"
+
+[dependencies.gix-trace]
+version = "^0.1.3"
+
+[dependencies.smallvec]
+version = "1.10.0"
+
+[dependencies.thiserror]
+version = "1.0.38"
+
+[dev-dependencies.once_cell]
+version = "1.18.0"
diff --git a/vendor/gix-filter/LICENSE-APACHE b/vendor/gix-filter/LICENSE-APACHE
new file mode 100644
index 000000000..a51f59a06
--- /dev/null
+++ b/vendor/gix-filter/LICENSE-APACHE
@@ -0,0 +1,191 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ Copyright 2018-2021 Sebastian Thiel, and [contributors](https://github.com/byron/gitoxide/contributors)
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/gix-filter/LICENSE-MIT b/vendor/gix-filter/LICENSE-MIT
new file mode 100644
index 000000000..b58e818f1
--- /dev/null
+++ b/vendor/gix-filter/LICENSE-MIT
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2018-2021 Sebastian Thiel, and [contributors](https://github.com/byron/gitoxide/contributors).
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/gix-filter/examples/arrow.rs b/vendor/gix-filter/examples/arrow.rs
new file mode 100644
index 000000000..1a0c707f9
--- /dev/null
+++ b/vendor/gix-filter/examples/arrow.rs
@@ -0,0 +1,207 @@
+use std::{
+ io::{stdin, stdout, Read, Write},
+ time::Duration,
+};
+
+use bstr::{ByteSlice, ByteVec};
+use gix_filter::driver::process;
+
+static PREFIX: &str = "➡";
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ let mut args = std::env::args();
+ let sub_command = args.nth(1).ok_or("Need sub-command")?;
+ let next_arg = args.next(); // possibly %f
+ let needs_failure = next_arg.as_deref().map_or(false, |file| file.ends_with("fail"));
+ if needs_failure {
+ panic!("failure requested for {sub_command}");
+ }
+
+ match sub_command.as_str() {
+ "process" => {
+ let disallow_delay = next_arg.as_deref().map_or(false, |arg| arg == "disallow-delay");
+ let mut srv = gix_filter::driver::process::Server::handshake(
+ stdin(),
+ stdout(),
+ "git-filter",
+ &mut |versions| versions.contains(&2).then_some(2),
+ if disallow_delay {
+ &["clean", "smudge"]
+ } else {
+ &["clean", "smudge", "delay"]
+ },
+ )?;
+
+ let mut next_smudge_aborts = false;
+ let mut next_smudge_fails_permanently = false; // a test validates that we don't actually hang
+ let mut delayed = Vec::new();
+ while let Some(mut request) = srv.next_request()? {
+ let needs_failure = request
+ .meta
+ .iter()
+ .find_map(|(key, value)| (key == "pathname").then_some(value))
+ .map_or(false, |path| path.ends_with(b"fail"));
+ let pathname = request
+ .meta
+ .iter()
+ .find_map(|(key, value)| (key == "pathname").then(|| value.clone()));
+ if needs_failure {
+ panic!("process failure requested: {:?}", request.meta);
+ }
+ let can_delay = request
+ .meta
+ .iter()
+ .any(|(key, value)| key == "can-delay" && value == "1");
+ match request.command.as_str() {
+ "clean" => {
+ let mut buf = Vec::new();
+ request.as_read().read_to_end(&mut buf)?;
+ request.write_status(if can_delay {
+ process::Status::delayed()
+ } else {
+ process::Status::success()
+ })?;
+
+ let lines = if let Some(delayed_lines) = buf
+ .is_empty()
+ .then(|| {
+ delayed
+ .iter()
+ .position(|(cmd, path, _)| {
+ *cmd == request.command.as_str() && Some(path) == pathname.as_ref()
+ })
+ .map(|pos| delayed.remove(pos).2)
+ })
+ .flatten()
+ {
+ delayed_lines
+ } else {
+ let mut lines = Vec::new();
+ for mut line in buf.lines_with_terminator() {
+ if line.starts_with(PREFIX.as_bytes()) {
+ line = &line[PREFIX.len()..];
+ }
+ lines.push_str(line);
+ }
+ lines
+ };
+ if can_delay {
+ delayed.push(("clean", pathname.expect("needed for delayed operation"), lines));
+ } else {
+ request.as_write().write_all(&lines)?;
+ request.write_status(process::Status::Previous)?;
+ }
+ }
+ "smudge" => {
+ let mut buf = Vec::new();
+ request.as_read().read_to_end(&mut buf)?;
+ let status = if next_smudge_aborts {
+ next_smudge_aborts = false;
+ process::Status::abort()
+ } else if next_smudge_fails_permanently {
+ process::Status::exit()
+ } else if can_delay {
+ process::Status::delayed()
+ } else {
+ process::Status::success()
+ };
+ request.write_status(status)?;
+
+ let lines = if let Some(delayed_lines) = buf
+ .is_empty()
+ .then(|| {
+ delayed
+ .iter()
+ .position(|(cmd, path, _)| {
+ *cmd == request.command.as_str() && Some(path) == pathname.as_ref()
+ })
+ .map(|pos| delayed.remove(pos).2)
+ })
+ .flatten()
+ {
+ delayed_lines
+ } else {
+ let mut lines = Vec::new();
+ for line in buf.lines_with_terminator() {
+ if !line.starts_with(PREFIX.as_bytes()) {
+ lines.push_str(PREFIX.as_bytes());
+ }
+ lines.push_str(line);
+ }
+ lines
+ };
+
+ if can_delay {
+ delayed.push(("smudge", pathname.expect("needed for delayed operation"), lines));
+ } else {
+ request.as_write().write_all(&lines)?;
+ request.write_status(process::Status::Previous)?;
+ }
+ }
+ "list_available_blobs" => {
+ {
+ let mut out = request.as_write();
+ let mut last_cmd = None;
+ let mut buf = Vec::<u8>::new();
+ for (cmd, path, _) in &delayed {
+ if last_cmd.get_or_insert(*cmd) != cmd {
+ panic!("the API doesn't support mixing cmds as paths might not be unique anymore")
+ }
+ buf.clear();
+ buf.push_str("pathname=");
+ buf.extend_from_slice(path);
+ out.write_all(&buf)?
+ }
+ }
+ request.write_status(process::Status::success())?;
+ }
+ "wait-1-s" => {
+ std::io::copy(&mut request.as_read(), &mut std::io::sink())?;
+ request.write_status(process::Status::success())?;
+ std::thread::sleep(Duration::from_secs(1));
+ }
+ "next-smudge-aborts" => {
+ std::io::copy(&mut request.as_read(), &mut std::io::sink())?;
+ request.write_status(process::Status::success())?;
+ next_smudge_aborts = true;
+ }
+ "next-invocation-returns-strange-status-and-smudge-fails-permanently" => {
+ std::io::copy(&mut request.as_read(), &mut std::io::sink())?;
+ request.write_status(process::Status::success())?;
+ next_smudge_fails_permanently = true;
+ }
+ unknown => panic!("Unknown capability requested: {unknown}"),
+ }
+ }
+ }
+ // simple filters actually don't support streaming - they have to first read all input, then produce all output,
+ // but can't mix reading stdin and write to stdout at the same time as `git` (or `gitoxide`) don't read the output while
+ // writing the input.
+ "clean" => {
+ let mut stdin = stdin().lock();
+ let mut stdout = stdout().lock();
+ let mut buf = Vec::new();
+ std::io::copy(&mut stdin, &mut buf)?;
+ for mut line in buf.lines_with_terminator() {
+ if line.starts_with(PREFIX.as_bytes()) {
+ line = &line[PREFIX.len()..];
+ }
+ stdout.write_all(line).map(|_| true)?;
+ }
+ }
+ "smudge" => {
+ let mut stdin = stdin().lock();
+ let mut stdout = stdout().lock();
+ let mut buf = Vec::new();
+ std::io::copy(&mut stdin, &mut buf)?;
+ for line in buf.lines_with_terminator() {
+ if !line.starts_with(PREFIX.as_bytes()) {
+ stdout.write_all(PREFIX.as_bytes())?;
+ }
+ stdout.write_all(line).map(|_| true)?;
+ }
+ }
+ unknown => panic!("Unknown sub-command: {unknown}"),
+ }
+ Ok(())
+}
diff --git a/vendor/gix-filter/src/driver/apply.rs b/vendor/gix-filter/src/driver/apply.rs
new file mode 100644
index 000000000..576369a07
--- /dev/null
+++ b/vendor/gix-filter/src/driver/apply.rs
@@ -0,0 +1,242 @@
+use std::collections::HashMap;
+
+use bstr::{BStr, BString};
+
+use crate::{
+ driver,
+ driver::{process, process::client::invoke, Operation, Process, State},
+ Driver,
+};
+
+/// What to do if delay is supported by a process filter.
+#[derive(Default, Debug, Copy, Clone)]
+pub enum Delay {
+ /// Use delayed processing for this entry.
+ ///
+ /// Note that it's up to the filter to determine whether or not the processing should be delayed.
+ #[default]
+ Allow,
+ /// Do not delay the processing, and force it to happen immediately. In this case, no delayed processing will occur
+ /// even if the filter supports it.
+ ///
+ /// This is the default as it requires no special precautions to be taken by the caller as
+ /// outputs will be produced immediately.
+ Forbid,
+}
+
+/// The error returned by [State::apply()][super::State::apply()].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error(transparent)]
+ Init(#[from] driver::init::Error),
+ #[error("Could not write entire object to driver")]
+ WriteSource(#[from] std::io::Error),
+ #[error("Filter process delayed an entry even though that was not requested")]
+ DelayNotAllowed,
+ #[error("Failed to invoke '{command}' command")]
+ ProcessInvoke {
+ source: process::client::invoke::Error,
+ command: String,
+ },
+ #[error("The invoked command '{command}' in process indicated an error: {status:?}")]
+ ProcessStatus {
+ status: driver::process::Status,
+ command: String,
+ },
+}
+
+/// Additional information for use in the [`State::apply()`] method.
+#[derive(Debug, Copy, Clone)]
+pub struct Context<'a, 'b> {
+ /// The repo-relative using slashes as separator of the entry currently being processed.
+ pub rela_path: &'a BStr,
+ /// The name of the reference that `HEAD` is pointing to. It's passed to `process` filters if present.
+ pub ref_name: Option<&'b BStr>,
+ /// The root-level tree that contains the current entry directly or indirectly, or the commit owning the tree (if available).
+ ///
+ /// This is passed to `process` filters if present.
+ pub treeish: Option<gix_hash::ObjectId>,
+ /// The actual blob-hash of the data we are processing. It's passed to `process` filters if present.
+ ///
+ /// Note that this hash might be different from the `$Id$` of the respective `ident` filter, as the latter generates the hash itself.
+ pub blob: Option<gix_hash::ObjectId>,
+}
+
+impl State {
+ /// Apply `operation` of `driver` to the bytes read from `src` and return a reader to immediately consume the output
+ /// produced by the filter. `rela_path` is the repo-relative path of the entry to handle.
+ /// It's possible that the filter stays inactive, in which case the `src` isn't consumed and has to be used by the caller.
+ ///
+ /// Each call to this method will cause the corresponding filter to be invoked unless `driver` indicates a `process` filter,
+ /// which is only launched once and maintained using this state.
+ ///
+ /// Note that it's not an error if there is no filter process for `operation` or if a long-running process doesn't supported
+ /// the desired capability.
+ ///
+ /// ### Deviation
+ ///
+ /// If a long running process returns the 'abort' status after receiving the data, it will be removed similar to how `git` does it.
+ /// However, it delivers an unsuccessful error status later, it will not be removed, but reports the error only.
+ /// If any other non-'error' status is received, the process will be stopped. But that doesn't happen if if such a status is received
+ /// after reading the filtered result.
+ pub fn apply<'a>(
+ &'a mut self,
+ driver: &Driver,
+ src: &mut impl std::io::Read,
+ operation: Operation,
+ ctx: Context<'_, '_>,
+ ) -> Result<Option<Box<dyn std::io::Read + 'a>>, Error> {
+ match self.apply_delayed(driver, src, operation, Delay::Forbid, ctx)? {
+ Some(MaybeDelayed::Delayed(_)) => {
+ unreachable!("we forbid delaying the entry")
+ }
+ Some(MaybeDelayed::Immediate(read)) => Ok(Some(read)),
+ None => Ok(None),
+ }
+ }
+
+ /// Like [`apply()]`[Self::apply()], but use `delay` to determine if the filter result may be delayed or not.
+ ///
+ /// Poll [`list_delayed_paths()`][Self::list_delayed_paths()] until it is empty and query the available paths again.
+ /// Note that even though it's possible, the API assumes that commands aren't mixed when delays are allowed.
+ pub fn apply_delayed<'a>(
+ &'a mut self,
+ driver: &Driver,
+ src: &mut impl std::io::Read,
+ operation: Operation,
+ delay: Delay,
+ ctx: Context<'_, '_>,
+ ) -> Result<Option<MaybeDelayed<'a>>, Error> {
+ match self.maybe_launch_process(driver, operation, ctx.rela_path)? {
+ Some(Process::SingleFile { mut child, command }) => {
+ std::io::copy(src, &mut child.stdin.take().expect("configured"))?;
+ Ok(Some(MaybeDelayed::Immediate(Box::new(ReadFilterOutput {
+ inner: child.stdout.take(),
+ child: driver.required.then_some((child, command)),
+ }))))
+ }
+ Some(Process::MultiFile { client, key }) => {
+ let command = operation.as_str();
+ if !client.capabilities().contains(command) {
+ return Ok(None);
+ }
+
+ let invoke_result = client.invoke(
+ command,
+ &mut [
+ ("pathname", Some(ctx.rela_path.to_owned())),
+ ("ref", ctx.ref_name.map(ToOwned::to_owned)),
+ ("treeish", ctx.treeish.map(|id| id.to_hex().to_string().into())),
+ ("blob", ctx.blob.map(|id| id.to_hex().to_string().into())),
+ (
+ "can-delay",
+ match delay {
+ Delay::Allow if client.capabilities().contains("delay") => Some("1".into()),
+ Delay::Forbid | Delay::Allow => None,
+ },
+ ),
+ ]
+ .into_iter()
+ .filter_map(|(key, value)| value.map(|v| (key, v))),
+ src,
+ );
+ let status = match invoke_result {
+ Ok(status) => status,
+ Err(err) => {
+ let invoke::Error::Io(io_err) = &err;
+ handle_io_err(io_err, &mut self.running, key.0.as_ref());
+ return Err(Error::ProcessInvoke {
+ command: command.into(),
+ source: err,
+ });
+ }
+ };
+
+ if status.is_delayed() {
+ if matches!(delay, Delay::Forbid) {
+ return Err(Error::DelayNotAllowed);
+ }
+ Ok(Some(MaybeDelayed::Delayed(key)))
+ } else if status.is_success() {
+ // TODO: find a way to not have to do the 'borrow-dance'.
+ let client = self.running.remove(&key.0).expect("present for borrowcheck dance");
+ self.running.insert(key.0.clone(), client);
+ let client = self.running.get_mut(&key.0).expect("just inserted");
+
+ Ok(Some(MaybeDelayed::Immediate(Box::new(client.as_read()))))
+ } else {
+ let message = status.message().unwrap_or_default();
+ match message {
+ "abort" => {
+ client.capabilities_mut().remove(command);
+ }
+ "error" => {}
+ _strange => {
+ let client = self.running.remove(&key.0).expect("we definitely have it");
+ client.into_child().kill().ok();
+ }
+ }
+ Err(Error::ProcessStatus {
+ command: command.into(),
+ status,
+ })
+ }
+ }
+ None => Ok(None),
+ }
+ }
+}
+
+/// A type to represent delayed or immediate apply-filter results.
+pub enum MaybeDelayed<'a> {
+ /// Using the delayed protocol, this entry has been sent to a long-running process and needs to be
+ /// checked for again, later, using the [`driver::Key`] to refer to the filter who owes a response.
+ ///
+ /// Note that the path to the entry is also needed to obtain the filtered result later.
+ Delayed(driver::Key),
+ /// The filtered result can be read from the contained reader right away.
+ ///
+ /// Note that it must be consumed in full or till a read error occurs.
+ Immediate(Box<dyn std::io::Read + 'a>),
+}
+
+/// A utility type to facilitate streaming the output of a filter process.
+struct ReadFilterOutput {
+ inner: Option<std::process::ChildStdout>,
+ /// The child is present if we need its exit code to be positive.
+ child: Option<(std::process::Child, std::process::Command)>,
+}
+
+pub(crate) fn handle_io_err(err: &std::io::Error, running: &mut HashMap<BString, process::Client>, process: &BStr) {
+ if matches!(
+ err.kind(),
+ std::io::ErrorKind::BrokenPipe | std::io::ErrorKind::UnexpectedEof
+ ) {
+ running.remove(process).expect("present or we wouldn't be here");
+ }
+}
+
+impl std::io::Read for ReadFilterOutput {
+ fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
+ match self.inner.as_mut() {
+ Some(inner) => {
+ let num_read = inner.read(buf)?;
+ if num_read == 0 {
+ self.inner.take();
+ if let Some((mut child, cmd)) = self.child.take() {
+ let status = child.wait()?;
+ if !status.success() {
+ return Err(std::io::Error::new(
+ std::io::ErrorKind::Other,
+ format!("Driver process {cmd:?} failed"),
+ ));
+ }
+ }
+ }
+ Ok(num_read)
+ }
+ None => Ok(0),
+ }
+ }
+}
diff --git a/vendor/gix-filter/src/driver/delayed.rs b/vendor/gix-filter/src/driver/delayed.rs
new file mode 100644
index 000000000..bb525a23b
--- /dev/null
+++ b/vendor/gix-filter/src/driver/delayed.rs
@@ -0,0 +1,155 @@
+use bstr::{BStr, BString};
+
+use crate::{
+ driver,
+ driver::{apply::handle_io_err, Operation, State},
+};
+
+///
+pub mod list {
+ use crate::driver;
+
+ /// The error returned by [State::list_delayed_paths()][super::State::list_delayed_paths()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not get process named '{}' which should be running and tracked", wanted.0)]
+ ProcessMissing { wanted: driver::Key },
+ #[error("Failed to run 'list_available_blobs' command")]
+ ProcessInvoke(#[from] driver::process::client::invoke::without_content::Error),
+ #[error("The invoked command 'list_available_blobs' in process indicated an error: {status:?}")]
+ ProcessStatus { status: driver::process::Status },
+ }
+}
+
+///
+pub mod fetch {
+ use crate::driver;
+
+ /// The error returned by [State::fetch_delayed()][super::State::fetch_delayed()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not get process named '{}' which should be running and tracked", wanted.0)]
+ ProcessMissing { wanted: driver::Key },
+ #[error("Failed to run '{command}' command")]
+ ProcessInvoke {
+ command: String,
+ source: driver::process::client::invoke::Error,
+ },
+ #[error("The invoked command '{command}' in process indicated an error: {status:?}")]
+ ProcessStatus {
+ status: driver::process::Status,
+ command: String,
+ },
+ }
+}
+
+impl State {
+ /// Return a list of delayed paths for `process` that can then be obtained with [`fetch_delayed()`][Self::fetch_delayed()].
+ ///
+ /// A process abiding the protocol will eventually list all previously delayed paths for any invoked command, or
+ /// signals that it is done with all delayed paths by returning an empty list.
+ /// It's up to the caller to validate these assumptions.
+ ///
+ /// ### Error Handling
+ ///
+ /// Usually if the process sends the "abort" status, we will not use a certain capability again. Here it's unclear what capability
+ /// that is and what to do, so we leave the process running and do nothing else (just like `git`).
+ pub fn list_delayed_paths(&mut self, process: &driver::Key) -> Result<Vec<BString>, list::Error> {
+ let client = self
+ .running
+ .get_mut(&process.0)
+ .ok_or_else(|| list::Error::ProcessMissing {
+ wanted: process.clone(),
+ })?;
+
+ let mut out = Vec::new();
+ let result = client.invoke_without_content("list_available_blobs", &mut None.into_iter(), &mut |line| {
+ if let Some(path) = line.strip_prefix(b"pathname=") {
+ out.push(path.into())
+ }
+ });
+ let status = match result {
+ Ok(res) => res,
+ Err(err) => {
+ if let driver::process::client::invoke::without_content::Error::Io(err) = &err {
+ handle_io_err(err, &mut self.running, process.0.as_ref());
+ }
+ return Err(err.into());
+ }
+ };
+
+ if status.is_success() {
+ Ok(out)
+ } else {
+ let message = status.message().unwrap_or_default();
+ match message {
+ "error" | "abort" => {}
+ _strange => {
+ let client = self.running.remove(&process.0).expect("we definitely have it");
+ client.into_child().kill().ok();
+ }
+ }
+ Err(list::Error::ProcessStatus { status })
+ }
+ }
+
+ /// Given a `process` and a `path` (as previously returned by [list_delayed_paths()][Self::list_delayed_paths()]), return
+ /// a reader to stream the filtered result. Note that `operation` must match the original operation that produced the delayed result
+ /// or the long-running process might not know the path, depending on its implementation.
+ pub fn fetch_delayed(
+ &mut self,
+ process: &driver::Key,
+ path: &BStr,
+ operation: Operation,
+ ) -> Result<impl std::io::Read + '_, fetch::Error> {
+ let client = self
+ .running
+ .get_mut(&process.0)
+ .ok_or_else(|| fetch::Error::ProcessMissing {
+ wanted: process.clone(),
+ })?;
+
+ let result = client.invoke(
+ operation.as_str(),
+ &mut [("pathname", path.to_owned())].into_iter(),
+ &mut &b""[..],
+ );
+ let status = match result {
+ Ok(status) => status,
+ Err(err) => {
+ let driver::process::client::invoke::Error::Io(io_err) = &err;
+ handle_io_err(io_err, &mut self.running, process.0.as_ref());
+ return Err(fetch::Error::ProcessInvoke {
+ command: operation.as_str().into(),
+ source: err,
+ });
+ }
+ };
+ if status.is_success() {
+ // TODO: find a way to not have to do the 'borrow-dance'.
+ let client = self.running.remove(&process.0).expect("present for borrowcheck dance");
+ self.running.insert(process.0.clone(), client);
+ let client = self.running.get_mut(&process.0).expect("just inserted");
+
+ Ok(client.as_read())
+ } else {
+ let message = status.message().unwrap_or_default();
+ match message {
+ "abort" => {
+ client.capabilities_mut().remove(operation.as_str());
+ }
+ "error" => {}
+ _strange => {
+ let client = self.running.remove(&process.0).expect("we definitely have it");
+ client.into_child().kill().ok();
+ }
+ }
+ Err(fetch::Error::ProcessStatus {
+ command: operation.as_str().into(),
+ status,
+ })
+ }
+ }
+}
diff --git a/vendor/gix-filter/src/driver/init.rs b/vendor/gix-filter/src/driver/init.rs
new file mode 100644
index 000000000..c7fab986e
--- /dev/null
+++ b/vendor/gix-filter/src/driver/init.rs
@@ -0,0 +1,106 @@
+use std::process::Stdio;
+
+use bstr::{BStr, BString};
+
+use crate::{
+ driver,
+ driver::{process, substitute_f_parameter, Operation, Process, State},
+ Driver,
+};
+
+/// The error returned by [State::maybe_launch_process()][super::State::maybe_launch_process()].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("Failed to spawn driver: {command:?}")]
+ SpawnCommand {
+ source: std::io::Error,
+ command: std::process::Command,
+ },
+ #[error("Process handshake with command {command:?} failed")]
+ ProcessHandshake {
+ source: process::client::handshake::Error,
+ command: std::process::Command,
+ },
+}
+
+impl State {
+ /// Obtain a process as defined in `driver` suitable for a given `operation. `rela_path` may be used to substitute the current
+ /// file for use in the invoked `SingleFile` process.
+ ///
+ /// Note that if a long-running process is defined, the `operation` isn't relevant and capabilities are to be checked by the caller.
+ pub fn maybe_launch_process(
+ &mut self,
+ driver: &Driver,
+ operation: Operation,
+ rela_path: &BStr,
+ ) -> Result<Option<Process<'_>>, Error> {
+ match driver.process.as_ref() {
+ Some(process) => {
+ let client = match self.running.remove(process) {
+ Some(c) => c,
+ None => {
+ let (child, cmd) = spawn_driver(process.clone())?;
+ process::Client::handshake(child, "git-filter", &[2], &["clean", "smudge", "delay"]).map_err(
+ |err| Error::ProcessHandshake {
+ source: err,
+ command: cmd,
+ },
+ )?
+ }
+ };
+
+ // TODO: find a way to not have to do this 'borrow-dance'.
+ // this strangeness is to workaround the borrowchecker, who otherwise won't let us return a reader. Quite sad :/.
+ // One would want to `get_mut()` or insert essentially, but it won't work.
+ self.running.insert(process.clone(), client);
+ let client = self.running.get_mut(process).expect("just inserted");
+
+ Ok(Some(Process::MultiFile {
+ client,
+ key: driver::Key(process.to_owned()),
+ }))
+ }
+ None => {
+ let cmd = match operation {
+ Operation::Clean => driver
+ .clean
+ .as_ref()
+ .map(|cmd| substitute_f_parameter(cmd.as_ref(), rela_path)),
+
+ Operation::Smudge => driver
+ .smudge
+ .as_ref()
+ .map(|cmd| substitute_f_parameter(cmd.as_ref(), rela_path)),
+ };
+
+ let cmd = match cmd {
+ Some(cmd) => cmd,
+ None => return Ok(None),
+ };
+
+ let (child, command) = spawn_driver(cmd)?;
+ Ok(Some(Process::SingleFile { child, command }))
+ }
+ }
+ }
+}
+
+fn spawn_driver(cmd: BString) -> Result<(std::process::Child, std::process::Command), Error> {
+ let mut cmd: std::process::Command = gix_command::prepare(gix_path::from_bstr(cmd).into_owned())
+ .with_shell()
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::inherit())
+ .into();
+ let child = match cmd.spawn() {
+ Ok(child) => child,
+ Err(err) => {
+ return Err(Error::SpawnCommand {
+ source: err,
+ command: cmd,
+ })
+ }
+ };
+ Ok((child, cmd))
+}
diff --git a/vendor/gix-filter/src/driver/mod.rs b/vendor/gix-filter/src/driver/mod.rs
new file mode 100644
index 000000000..326adbd27
--- /dev/null
+++ b/vendor/gix-filter/src/driver/mod.rs
@@ -0,0 +1,99 @@
+use std::collections::HashMap;
+
+use bstr::{BStr, BString, ByteSlice, ByteVec};
+
+///
+pub mod init;
+
+///
+pub mod apply;
+
+///
+pub mod shutdown;
+
+///
+pub mod delayed;
+
+///
+pub mod process;
+
+/// A literal driver process.
+pub enum Process<'a> {
+ /// A spawned processes to handle a single file
+ SingleFile {
+ /// The child to use as handle for sending and receiving data.
+ child: std::process::Child,
+ /// The launched command that produced the `child` in the first place
+ command: std::process::Command,
+ },
+ /// A multi-file process which is launched once to handle one or more files by using a custom IO protocol.
+ MultiFile {
+ /// A handle to interact with the long-running process.
+ client: &'a mut process::Client,
+ /// A way to refer to the `client` later if needed.
+ key: Key,
+ },
+}
+
+/// The kind of operation to apply using a driver
+#[derive(Debug, Copy, Clone)]
+pub enum Operation {
+ /// Turn worktree content into content suitable for storage in `git`.
+ Clean,
+ /// Turn content stored in `git` to content suitable for the working tree.
+ Smudge,
+}
+
+impl Operation {
+ /// Return a string that identifies the operation. This happens to be the command-names used in long-running processes as well.
+ pub fn as_str(&self) -> &'static str {
+ match self {
+ Operation::Clean => "clean",
+ Operation::Smudge => "smudge",
+ }
+ }
+}
+
+/// State required to handle `process` filters, which are running until all their work is done.
+///
+/// These can be significantly faster on some platforms as they are launched only once, while supporting asynchronous processing.
+///
+/// ### Lifecycle
+///
+/// Note that [`shutdown()`][State::shutdown()] must be called to finalize long-running processes.
+/// Failing to do so will naturally shut them down by terminating their pipes, but finishing explicitly
+/// allows to wait for processes as well.
+#[derive(Default)]
+pub struct State {
+ /// The list of currently running processes. These are preferred over simple clean-and-smudge programs.
+ ///
+ /// Note that these processes are expected to shut-down once their stdin/stdout are dropped, so nothing else
+ /// needs to be done to clean them up after drop.
+ running: HashMap<BString, process::Client>,
+}
+
+impl Clone for State {
+ fn clone(&self) -> Self {
+ State {
+ running: Default::default(),
+ }
+ }
+}
+
+/// A way to reference a running multi-file filter process for later acquisition of delayed output.
+#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub struct Key(BString);
+
+/// Substitute `path` as shell-save version into `cmd` which could be something like `cmd something %f`.
+fn substitute_f_parameter(cmd: &BStr, path: &BStr) -> BString {
+ let mut buf: BString = Vec::with_capacity(cmd.len()).into();
+
+ let mut ofs = 0;
+ while let Some(pos) = cmd[ofs..].find(b"%f") {
+ buf.push_str(&cmd[..ofs + pos]);
+ buf.extend_from_slice(&gix_quote::single(path));
+ ofs += pos + 2;
+ }
+ buf.push_str(&cmd[ofs..]);
+ buf
+}
diff --git a/vendor/gix-filter/src/driver/process/client.rs b/vendor/gix-filter/src/driver/process/client.rs
new file mode 100644
index 000000000..25635ea0d
--- /dev/null
+++ b/vendor/gix-filter/src/driver/process/client.rs
@@ -0,0 +1,309 @@
+use std::{collections::HashSet, io::Write, str::FromStr};
+
+use bstr::{BStr, BString, ByteVec};
+
+use crate::driver::{
+ process,
+ process::{Capabilities, Client, PacketlineReader},
+};
+
+///
+pub mod handshake {
+ /// The error returned by [Client::handshake()][super::Client::handshake()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Failed to read or write to the process")]
+ Io(#[from] std::io::Error),
+ #[error("{msg} '{actual}'")]
+ Protocol { msg: String, actual: String },
+ #[error("The server sent the '{name}' capability which isn't among the ones we desire can support")]
+ UnsupportedCapability { name: String },
+ }
+}
+
+///
+pub mod invoke {
+ /// The error returned by [Client::invoke()][super::Client::invoke()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Failed to read or write to the process")]
+ Io(#[from] std::io::Error),
+ }
+
+ ///
+ pub mod without_content {
+ /// The error returned by [Client::invoke_without_content()][super::super::Client::invoke_without_content()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Failed to read or write to the process")]
+ Io(#[from] std::io::Error),
+ #[error(transparent)]
+ PacketlineDecode(#[from] gix_packetline::decode::Error),
+ }
+
+ impl From<super::Error> for Error {
+ fn from(value: super::Error) -> Self {
+ match value {
+ super::Error::Io(err) => Error::Io(err),
+ }
+ }
+ }
+ }
+}
+
+/// Protocol implementation
+impl Client {
+ /// Given a spawned `process` as created from `cmd`, use the 'long-running-process' protocol to send `welcome-prefix` and supported
+ /// `versions`, along with the `desired_capabilities`, and perform the handshake to negotiate a version to use along with
+ /// obtaining supported capabilities, which may be a sub-set of the desired capabilities.
+ pub fn handshake(
+ mut process: std::process::Child,
+ welcome_prefix: &str,
+ versions: &[usize],
+ desired_capabilities: &[&str],
+ ) -> Result<Self, handshake::Error> {
+ let mut out = gix_packetline::Writer::new(process.stdin.take().expect("configured stdin when spawning"));
+ out.write_all(format!("{welcome_prefix}-client").as_bytes())?;
+ for version in versions {
+ out.write_all(format!("version={version}").as_bytes())?;
+ }
+ gix_packetline::encode::flush_to_write(out.inner_mut())?;
+ out.flush()?;
+
+ let mut input = gix_packetline::StreamingPeekableIter::new(
+ process.stdout.take().expect("configured stdout when spawning"),
+ &[gix_packetline::PacketLineRef::Flush],
+ );
+ let mut read = input.as_read();
+ let mut buf = String::new();
+ read.read_line_to_string(&mut buf)?;
+ if buf
+ .strip_prefix(welcome_prefix)
+ .map_or(true, |rest| rest.trim_end() != "-server")
+ {
+ return Err(handshake::Error::Protocol {
+ msg: format!("Wanted '{welcome_prefix}-server, got "),
+ actual: buf,
+ });
+ }
+
+ let chosen_version;
+ buf.clear();
+ read.read_line_to_string(&mut buf)?;
+ match buf
+ .strip_prefix("version=")
+ .and_then(|version| usize::from_str(version.trim_end()).ok())
+ {
+ Some(version) => {
+ chosen_version = version;
+ }
+ None => {
+ return Err(handshake::Error::Protocol {
+ msg: "Needed 'version=<integer>', got ".into(),
+ actual: buf,
+ })
+ }
+ }
+
+ if !versions.contains(&chosen_version) {
+ return Err(handshake::Error::Protocol {
+ msg: format!("Server offered {chosen_version}, we only support "),
+ actual: versions.iter().map(ToString::to_string).collect::<Vec<_>>().join(", "),
+ });
+ }
+
+ if read.read_line_to_string(&mut buf)? != 0 {
+ return Err(handshake::Error::Protocol {
+ msg: "expected flush packet, got".into(),
+ actual: buf,
+ });
+ }
+ for capability in desired_capabilities {
+ out.write_all(format!("capability={capability}").as_bytes())?;
+ }
+ gix_packetline::encode::flush_to_write(out.inner_mut())?;
+ out.flush()?;
+
+ read.reset_with(&[gix_packetline::PacketLineRef::Flush]);
+ let mut capabilities = HashSet::new();
+ loop {
+ buf.clear();
+ let num_read = read.read_line_to_string(&mut buf)?;
+ if num_read == 0 {
+ break;
+ }
+ match buf.strip_prefix("capability=") {
+ Some(cap) => {
+ let cap = cap.trim_end();
+ if !desired_capabilities.contains(&cap) {
+ return Err(handshake::Error::UnsupportedCapability { name: cap.into() });
+ }
+ capabilities.insert(cap.to_owned());
+ }
+ None => continue,
+ }
+ }
+
+ drop(read);
+ Ok(Client {
+ child: process,
+ out: input,
+ input: out,
+ capabilities,
+ version: chosen_version,
+ })
+ }
+
+ /// Invoke `command` and send all `meta` data before sending all `content` in full.
+ pub fn invoke(
+ &mut self,
+ command: &str,
+ meta: &mut dyn Iterator<Item = (&str, BString)>,
+ content: &mut dyn std::io::Read,
+ ) -> Result<process::Status, invoke::Error> {
+ self.send_command_and_meta(command, meta)?;
+ std::io::copy(content, &mut self.input)?;
+ gix_packetline::encode::flush_to_write(self.input.inner_mut())?;
+ self.input.flush()?;
+ Ok(self.read_status()?)
+ }
+
+ /// Invoke `command` while passing `meta` data, but don't send any content, and return their status.
+ /// Call `inspect_line` for each line that we see as command response.
+ ///
+ /// This is for commands that don't expect a content stream.
+ pub fn invoke_without_content<'a>(
+ &mut self,
+ command: &str,
+ meta: &mut dyn Iterator<Item = (&'a str, BString)>,
+ inspect_line: &mut dyn FnMut(&BStr),
+ ) -> Result<process::Status, invoke::without_content::Error> {
+ self.send_command_and_meta(command, meta)?;
+ while let Some(data) = self.out.read_line() {
+ let line = data??;
+ if let Some(line) = line.as_bstr() {
+ inspect_line(line);
+ }
+ }
+ self.out.reset_with(&[gix_packetline::PacketLineRef::Flush]);
+ let status = self.read_status()?;
+ Ok(status)
+ }
+
+ /// Return a `Read` implementation that reads the server process output until the next flush package, and validates
+ /// the status. If the status indicates failure, the last read will also fail.
+ pub fn as_read(&mut self) -> impl std::io::Read + '_ {
+ self.out.reset_with(&[gix_packetline::PacketLineRef::Flush]);
+ ReadProcessOutputAndStatus {
+ inner: self.out.as_read(),
+ }
+ }
+
+ /// Read a `status=` line from the process output until it is exhausted.
+ /// Note that the last sent status line wins and no status line means that the `Previous` still counts.
+ pub fn read_status(&mut self) -> std::io::Result<process::Status> {
+ read_status(&mut self.out.as_read())
+ }
+}
+
+impl Client {
+ fn send_command_and_meta(
+ &mut self,
+ command: &str,
+ meta: &mut dyn Iterator<Item = (&str, BString)>,
+ ) -> Result<(), invoke::Error> {
+ self.input.write_all(format!("command={command}").as_bytes())?;
+ let mut buf = BString::default();
+ for (key, value) in meta {
+ buf.clear();
+ buf.push_str(key);
+ buf.push(b'=');
+ buf.push_str(&value);
+ self.input.write_all(&buf)?;
+ }
+ gix_packetline::encode::flush_to_write(self.input.inner_mut())?;
+ Ok(())
+ }
+}
+
+fn read_status(read: &mut PacketlineReader<'_>) -> std::io::Result<process::Status> {
+ let mut status = process::Status::Previous;
+ let mut buf = String::new();
+ let mut count = 0;
+ loop {
+ buf.clear();
+ let num_read = read.read_line_to_string(&mut buf)?;
+ if num_read == 0 {
+ break;
+ }
+ if let Some(name) = buf.strip_prefix("status=") {
+ status = process::Status::Named(name.trim_end().into());
+ }
+ count += 1;
+ }
+ if count > 0 && matches!(status, process::Status::Previous) {
+ status = process::Status::Unset;
+ }
+ read.reset_with(&[gix_packetline::PacketLineRef::Flush]);
+ Ok(status)
+}
+
+struct ReadProcessOutputAndStatus<'a> {
+ inner: PacketlineReader<'a>,
+}
+
+impl<'a> std::io::Read for ReadProcessOutputAndStatus<'a> {
+ fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
+ let num_read = self.inner.read(buf)?;
+ if num_read == 0 {
+ self.inner.reset_with(&[gix_packetline::PacketLineRef::Flush]);
+ let status = read_status(&mut self.inner)?;
+ if status.is_success() {
+ Ok(0)
+ } else {
+ Err(std::io::Error::new(
+ std::io::ErrorKind::Other,
+ format!(
+ "Process indicated error after reading: {}",
+ status.message().unwrap_or_default()
+ ),
+ ))
+ }
+ } else {
+ Ok(num_read)
+ }
+ }
+}
+
+/// Access
+impl Client {
+ /// Return the list of capabilities reported by the serving process.
+ pub fn capabilities(&self) -> &Capabilities {
+ &self.capabilities
+ }
+
+ /// Return the mutable list of capabilities reported by the serving process.
+ pub fn capabilities_mut(&mut self) -> &mut Capabilities {
+ &mut self.capabilities
+ }
+
+ /// Return the negotiated version of the protocol.
+ ///
+ /// Note that it is the highest one that both the client and the server support.
+ pub fn version(&self) -> usize {
+ self.version
+ }
+}
+
+/// Lifecycle
+impl Client {
+ /// Return the child handle of the running process.
+ ///
+ /// Note that this will naturally close input and output handles, which is a signal for the child process to shutdown.
+ pub fn into_child(self) -> std::process::Child {
+ self.child
+ }
+}
diff --git a/vendor/gix-filter/src/driver/process/mod.rs b/vendor/gix-filter/src/driver/process/mod.rs
new file mode 100644
index 000000000..c2f62ddd2
--- /dev/null
+++ b/vendor/gix-filter/src/driver/process/mod.rs
@@ -0,0 +1,113 @@
+use std::collections::HashSet;
+
+/// A set of capabilities that have been negotiated between client and server.
+pub type Capabilities = HashSet<String>;
+
+/// A handle to a client that allows communicating to a long-running process.
+pub struct Client {
+ /// The child process we are communicating with.
+ child: std::process::Child,
+ /// The names of the obtained capabilities after the handshake.
+ capabilities: Capabilities,
+ /// The negotiated version of the protocol.
+ version: usize,
+ /// A way to send packet-line encoded information to the process.
+ input: gix_packetline::Writer<std::process::ChildStdin>,
+ /// A way to read information sent to us by the process.
+ out: gix_packetline::StreamingPeekableIter<std::process::ChildStdout>,
+}
+
+/// A handle to facilitate typical server interactions that include the handshake and command-invocations.
+pub struct Server {
+ /// The names of the capabilities we can expect the client to use.
+ capabilities: Capabilities,
+ /// The negotiated version of the protocol, it's the highest supported one.
+ version: usize,
+ /// A way to receive information from the client.
+ input: gix_packetline::StreamingPeekableIter<std::io::StdinLock<'static>>,
+ /// A way to send information to the client.
+ out: gix_packetline::Writer<std::io::StdoutLock<'static>>,
+}
+
+/// The return status of an [invoked command][Client::invoke()].
+#[derive(Debug, Clone)]
+pub enum Status {
+ /// No new status was set, and nothing was sent, so instead we are to assume the previous status is still in effect.
+ Previous,
+ /// Something was sent, but we couldn't identify it as status.
+ Unset,
+ /// Assume the given named status.
+ Named(String),
+}
+
+/// Initialization
+impl Status {
+ /// Create a new instance that represents a successful operation.
+ pub fn success() -> Self {
+ Status::Named("success".into())
+ }
+
+ /// Create a new instance that represents a delayed operation.
+ pub fn delayed() -> Self {
+ Status::Named("delayed".into())
+ }
+
+ /// Create a status that indicates to the client that the command that caused it will not be run anymore throughout the lifetime
+ /// of the process. However, other commands may still run.
+ pub fn abort() -> Self {
+ Status::Named("abort".into())
+ }
+
+ /// Create a status that makes the client send a kill signal.
+ pub fn exit() -> Self {
+ Status::Named("send-term-signal".into())
+ }
+
+ /// Create a new instance that represents an error with the given `message`.
+ pub fn error(message: impl Into<String>) -> Self {
+ Status::Named(message.into())
+ }
+}
+
+/// Access
+impl Status {
+ /// Note that this is assumed true even if no new status is set, hence we assume that upon error, the caller will not continue
+ /// interacting with the process.
+ pub fn is_success(&self) -> bool {
+ match self {
+ Status::Previous => true,
+ Status::Unset => false,
+ Status::Named(n) => n == "success",
+ }
+ }
+
+ /// Returns true if this is an `abort` status.
+ pub fn is_abort(&self) -> bool {
+ self.message().map_or(false, |m| m == "abort")
+ }
+
+ /// Return true if the status is explicitly set to indicated delayed output processing
+ pub fn is_delayed(&self) -> bool {
+ match self {
+ Status::Previous | Status::Unset => false,
+ Status::Named(n) => n == "delayed",
+ }
+ }
+
+ /// Return the status message if present.
+ pub fn message(&self) -> Option<&str> {
+ match self {
+ Status::Previous | Status::Unset => None,
+ Status::Named(msg) => msg.as_str().into(),
+ }
+ }
+}
+
+///
+pub mod client;
+
+///
+pub mod server;
+
+type PacketlineReader<'a, T = std::process::ChildStdout> =
+ gix_packetline::read::WithSidebands<'a, T, fn(bool, &[u8]) -> gix_packetline::read::ProgressAction>;
diff --git a/vendor/gix-filter/src/driver/process/server.rs b/vendor/gix-filter/src/driver/process/server.rs
new file mode 100644
index 000000000..d337645fc
--- /dev/null
+++ b/vendor/gix-filter/src/driver/process/server.rs
@@ -0,0 +1,280 @@
+use std::{collections::HashSet, io::Write, str::FromStr};
+
+use bstr::{BString, ByteSlice};
+
+use crate::driver::process::Server;
+
+/// A request to be handled by the server, typically done in a loop.
+pub struct Request<'a> {
+ parent: &'a mut Server,
+ /// The command to execute with this request.
+ pub command: String,
+ /// A list of key-value pairs of meta-data related to `command`.
+ pub meta: Vec<(String, BString)>,
+}
+
+///
+pub mod next_request {
+ use bstr::BString;
+
+ /// The error returned by [Server::next_request()][super::Server::next_request()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Failed to read from the client")]
+ Io(#[from] std::io::Error),
+ #[error("{msg} '{actual}'")]
+ Protocol { msg: String, actual: BString },
+ #[error(transparent)]
+ PacketlineDecode(#[from] gix_packetline::decode::Error),
+ }
+}
+
+///
+pub mod handshake {
+ /// The error returned by [Server::handshake()][super::Server::handshake()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Failed to read or write to the client")]
+ Io(#[from] std::io::Error),
+ #[error("{msg} '{actual}'")]
+ Protocol { msg: String, actual: String },
+ #[error("Could not select supported version from the one sent by the client: {}", actual.iter().map(ToString::to_string).collect::<Vec<_>>().join(", "))]
+ VersionMismatch { actual: Vec<usize> },
+ }
+}
+
+impl Server {
+ /// Perform a handshake with the client sending information to our `stdin` and receiving information through our `stdout`
+ /// in packetline format.
+ /// `pick_version` is called with all versions supported by the client to pick one from, or `None` to indicate the handshake
+ /// should stop.
+ /// Use `available_capabilities` to match our capabilities with the ones from the client, so we communicate at most a subset of these.
+ ///
+ /// ### Note
+ ///
+ /// The server claims exclusive access over stdout and stdin, so all kinds of other output has to be steered towards stderr or there
+ /// will be a deadlock.
+ pub fn handshake(
+ stdin: std::io::Stdin,
+ stdout: std::io::Stdout,
+ welcome_prefix: &str,
+ pick_version: &mut dyn FnMut(&[usize]) -> Option<usize>,
+ available_capabilities: &[&str],
+ ) -> Result<Self, handshake::Error> {
+ let mut input =
+ gix_packetline::StreamingPeekableIter::new(stdin.lock(), &[gix_packetline::PacketLineRef::Flush]);
+ let mut read = input.as_read();
+ let mut buf = String::new();
+ read.read_line_to_string(&mut buf)?;
+ if buf
+ .strip_prefix(welcome_prefix)
+ .map_or(true, |rest| rest.trim_end() != "-client")
+ {
+ return Err(handshake::Error::Protocol {
+ msg: format!("Expected '{welcome_prefix}-client, got"),
+ actual: buf,
+ });
+ }
+
+ let mut versions = Vec::new();
+ loop {
+ buf.clear();
+ let num_read = read.read_line_to_string(&mut buf)?;
+ if num_read == 0 {
+ break;
+ }
+ versions.push(
+ match buf
+ .strip_prefix("version=")
+ .and_then(|version| usize::from_str(version.trim_end()).ok())
+ {
+ Some(version) => version,
+ None => {
+ return Err(handshake::Error::Protocol {
+ msg: "Expected 'version=<integer>', got".into(),
+ actual: buf,
+ })
+ }
+ },
+ );
+ }
+ let version = pick_version(&versions).ok_or(handshake::Error::VersionMismatch { actual: versions })?;
+ read.reset_with(&[gix_packetline::PacketLineRef::Flush]);
+ let mut out = gix_packetline::Writer::new(stdout.lock());
+ out.write_all(format!("{welcome_prefix}-server").as_bytes())?;
+ out.write_all(format!("version={version}").as_bytes())?;
+ gix_packetline::encode::flush_to_write(out.inner_mut())?;
+ out.flush()?;
+
+ let mut capabilities = HashSet::new();
+ loop {
+ buf.clear();
+ let num_read = read.read_line_to_string(&mut buf)?;
+ if num_read == 0 {
+ break;
+ }
+ match buf.strip_prefix("capability=") {
+ Some(cap) => {
+ let cap = cap.trim_end();
+ if available_capabilities.contains(&cap) {
+ capabilities.insert(cap.to_owned());
+ }
+ }
+ None => continue,
+ };
+ }
+
+ for cap in &capabilities {
+ out.write_all(format!("capability={cap}").as_bytes())?;
+ }
+ gix_packetline::encode::flush_to_write(out.inner_mut())?;
+ out.flush()?;
+
+ drop(read);
+ Ok(Server {
+ capabilities,
+ version,
+ out,
+ input,
+ })
+ }
+
+ /// Read the next request and return it, even if [`command`][Request::command] is *not* supported by us.
+ /// If `Ok(None)` is reported, the request loop should end and the process should be shutdown gracefully.
+ ///
+ /// The reason for allowing any command is that the caller would have to match on the command anyway, and would
+ /// have to handle invalid commands that way.
+ ///
+ /// ### Lifecycle
+ ///
+ /// Note that the process is supposed to shut-down once there are no more requests, and `git` will wait
+ /// until it has finished.
+ pub fn next_request(&mut self) -> Result<Option<Request<'_>>, next_request::Error> {
+ let mut buf = String::new();
+ let mut read = self.input.as_read();
+
+ match read.read_line_to_string(&mut buf) {
+ Ok(_) => {}
+ Err(err) if err.kind() == std::io::ErrorKind::UnexpectedEof => return Ok(None),
+ Err(err) => return Err(err.into()),
+ }
+ let command = match buf.strip_prefix("command=").map(str::trim_end).map(ToOwned::to_owned) {
+ Some(cmd) => cmd,
+ None => {
+ return Err(next_request::Error::Protocol {
+ msg: "Wanted 'command=<name>', got ".into(),
+ actual: buf.into(),
+ })
+ }
+ };
+
+ let mut meta = Vec::with_capacity(1);
+ while let Some(res) = read.read_data_line() {
+ let line = res??;
+ let line = line
+ .as_bstr()
+ .ok_or_else(|| next_request::Error::Protocol {
+ msg: "expected data line, got ".into(),
+ actual: format!("{line:?}").into(),
+ })?
+ .trim();
+ let mut tokens = line.splitn(2, |b| *b == b'=');
+ let (key, value) = tokens
+ .next()
+ .zip(tokens.next())
+ .ok_or_else(|| next_request::Error::Protocol {
+ msg: "Expected 'key=value' metadata, got".into(),
+ actual: line.into(),
+ })?;
+ assert!(tokens.next().is_none(), "configured to yield at most two tokens");
+ meta.push((key.as_bstr().to_string(), value.into()))
+ }
+
+ drop(read);
+ self.input.reset_with(&[gix_packetline::PacketLineRef::Flush]);
+
+ Ok(Some(Request {
+ parent: self,
+ command,
+ meta,
+ }))
+ }
+}
+
+mod request {
+ use std::io::Write;
+
+ use crate::driver::{
+ process,
+ process::{server::Request, PacketlineReader},
+ };
+
+ impl Request<'_> {
+ /// Turn ourselves into a reader that can read until the next flush packet.
+ pub fn as_read(&mut self) -> PacketlineReader<'_, std::io::StdinLock<'static>> {
+ self.parent.input.as_read()
+ }
+
+ /// Provide the write-end of the underlying process.
+ pub fn as_write(&mut self) -> impl std::io::Write + '_ {
+ WriteAndFlushOnDrop {
+ inner: &mut self.parent.out,
+ }
+ }
+
+ /// Write the `status` message followed by a flush packet.
+ pub fn write_status(&mut self, status: process::Status) -> std::io::Result<()> {
+ let out = &mut self.parent.out;
+ if let Some(message) = status.message() {
+ out.write_all(format!("status={message}").as_bytes())?;
+ }
+ gix_packetline::encode::flush_to_write(out.inner_mut())?;
+ out.flush()
+ }
+ }
+
+ impl std::fmt::Debug for Request<'_> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("Request")
+ .field("command", &self.command)
+ .field("meta", &self.meta)
+ .finish()
+ }
+ }
+
+ struct WriteAndFlushOnDrop<'a> {
+ inner: &'a mut gix_packetline::Writer<std::io::StdoutLock<'static>>,
+ }
+
+ impl std::io::Write for WriteAndFlushOnDrop<'_> {
+ fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
+ self.inner.write(buf)
+ }
+
+ fn flush(&mut self) -> std::io::Result<()> {
+ self.inner.flush()
+ }
+ }
+
+ impl Drop for WriteAndFlushOnDrop<'_> {
+ fn drop(&mut self) {
+ gix_packetline::encode::flush_to_write(self.inner.inner_mut()).ok();
+ self.inner.flush().ok();
+ }
+ }
+}
+
+/// Access
+impl Server {
+ /// Return the list of capabilities we are allowed to use, as negotiated with the client.
+ pub fn capabilities(&self) -> &HashSet<String> {
+ &self.capabilities
+ }
+
+ /// Return the negotiated version of the protocol.
+ pub fn version(&self) -> usize {
+ self.version
+ }
+}
diff --git a/vendor/gix-filter/src/driver/shutdown.rs b/vendor/gix-filter/src/driver/shutdown.rs
new file mode 100644
index 000000000..af75bf1e2
--- /dev/null
+++ b/vendor/gix-filter/src/driver/shutdown.rs
@@ -0,0 +1,35 @@
+use bstr::BString;
+
+use crate::driver::State;
+
+///
+#[derive(Debug, Copy, Clone)]
+pub enum Mode {
+ /// Wait for long-running processes after signaling them to shut down by closing their input and output.
+ WaitForProcesses,
+ /// Do not do anything with long-running processes, which typically allows them to keep running or shut down on their own time.
+ /// This is the fastest mode as no synchronization happens at all.
+ Ignore,
+}
+
+/// Lifecycle
+impl State {
+ /// Handle long-running processes according to `mode`. If an error occours, all remaining processes will be ignored automatically.
+ /// Return a list of `(process, Option<status>)`
+ pub fn shutdown(self, mode: Mode) -> Result<Vec<(BString, Option<std::process::ExitStatus>)>, std::io::Error> {
+ let mut out = Vec::with_capacity(self.running.len());
+ for (cmd, client) in self.running {
+ match mode {
+ Mode::WaitForProcesses => {
+ let mut child = client.into_child();
+ let status = child.wait()?;
+ out.push((cmd, Some(status)));
+ }
+ Mode::Ignore => {
+ out.push((cmd, None));
+ }
+ }
+ }
+ Ok(out)
+ }
+}
diff --git a/vendor/gix-filter/src/eol/convert_to_git.rs b/vendor/gix-filter/src/eol/convert_to_git.rs
new file mode 100644
index 000000000..1bd1e919f
--- /dev/null
+++ b/vendor/gix-filter/src/eol/convert_to_git.rs
@@ -0,0 +1,162 @@
+use std::path::{Path, PathBuf};
+
+/// Additional context for use with [`convert_to_git`][super::convert_to_git()].
+#[derive(Default, Copy, Clone)]
+pub struct Options<'a> {
+ /// How to perform round-trip checks.
+ pub round_trip_check: Option<RoundTripCheck<'a>>,
+ /// Configuration related to EOL.
+ pub config: crate::eol::Configuration,
+}
+
+/// The kind of round-trip check to perform when converting line endings to `git`, i.e. `CRLF` to `LF`.
+#[derive(Debug, Copy, Clone)]
+pub enum RoundTripCheck<'a> {
+ /// Fail with an error if conversion isn't round-trip safe.
+ Fail {
+ /// The repository-relative path of the file to check. Used in case of error.
+ rela_path: &'a Path,
+ },
+ /// Emit a warning using `gix_trace::warn!`, but don't fail.
+ ///
+ /// Note that the parent application has to setup tracing to make these events visible, along with a parent `span!`.
+ Warn {
+ /// The repository-relative path of the file to check. Used in case of error.
+ rela_path: &'a Path,
+ },
+}
+
+/// The error returned by [convert_to_git()][super::convert_to_git()].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("{msg} in '{}'", path.display())]
+ RoundTrip { msg: &'static str, path: PathBuf },
+ #[error("Could not obtain index object to check line endings for")]
+ FetchObjectFromIndex(#[source] Box<dyn std::error::Error + Send + Sync + 'static>),
+}
+
+/// A function that writes a buffer like `fn(&mut buf)` with by tes of an object in the index that is the one that should be converted.
+pub type IndexObjectFn<'a> =
+ dyn FnMut(&mut Vec<u8>) -> Result<Option<()>, Box<dyn std::error::Error + Send + Sync>> + 'a;
+
+pub(crate) mod function {
+ use bstr::ByteSlice;
+
+ use crate::eol::convert_to_git::IndexObjectFn;
+ use crate::{
+ clear_and_set_capacity,
+ eol::{
+ convert_to_git::{Error, Options, RoundTripCheck},
+ AttributesDigest, Stats,
+ },
+ };
+
+ /// Given a `src` buffer, change it `git` (`\n`) line endings and store the result in `buf`.
+ /// Return `true` if `buf` was written or `false` if nothing had to be done.
+ /// Depending on the state in `buf`, `index_object` is called to write the version of `src` as stored in the index
+ /// into the buffer and if it is a blob, or return `Ok(None)` if no such object exists.
+ /// If renormalization is desired, let it return `Ok(None)` at all times to not let it have any influence over the
+ /// outcome of this function.
+ /// If `round_trip_check` is not `None`, round-tripping will be validated and handled accordingly.
+ pub fn convert_to_git(
+ src: &[u8],
+ digest: AttributesDigest,
+ buf: &mut Vec<u8>,
+ index_object: &mut IndexObjectFn<'_>,
+ Options {
+ round_trip_check,
+ config,
+ }: Options<'_>,
+ ) -> Result<bool, Error> {
+ if digest == AttributesDigest::Binary || src.is_empty() {
+ return Ok(false);
+ }
+
+ let stats = Stats::from_bytes(src);
+ let mut convert_crlf_to_lf = stats.crlf > 0;
+ if digest.is_auto_text() {
+ // In this mode, we are supposed to figure out ourselves if we should convert or not.
+ if stats.is_binary() {
+ return Ok(false);
+ }
+
+ if let Some(()) = index_object(buf).map_err(|err| Error::FetchObjectFromIndex(err))? {
+ let has_crlf_in_index = buf
+ .find_byte(b'\r')
+ .map(|_| Stats::from_bytes(buf))
+ .filter(|s| !s.is_binary() && s.crlf > 0)
+ .is_some();
+ if has_crlf_in_index {
+ convert_crlf_to_lf = false;
+ }
+ }
+ }
+
+ if let Some(round_trip_check) = round_trip_check {
+ let mut new_stats = stats;
+ // simulate to-git conversion/git-add
+ if convert_crlf_to_lf {
+ new_stats.lone_lf += new_stats.crlf;
+ new_stats.crlf = 0;
+ }
+ // simulate worktree checkout
+ if new_stats.will_convert_lf_to_crlf(digest, config) {
+ new_stats.crlf += new_stats.lone_lf;
+ new_stats.lone_lf = 0;
+ }
+ if stats.crlf > 0 && new_stats.crlf == 0 {
+ // CRLF would not be restored by checkout
+ match round_trip_check {
+ RoundTripCheck::Fail { rela_path } => {
+ return Err(Error::RoundTrip {
+ msg: "CRLF would be replaced by LF",
+ path: rela_path.to_owned(),
+ })
+ }
+ #[allow(unused_variables)]
+ RoundTripCheck::Warn { rela_path } => {
+ gix_trace::warn!(
+ "in the working copy of '{}', CRLF will be replaced by LF next time git touches it",
+ rela_path.display()
+ )
+ }
+ }
+ } else if stats.lone_lf > 0 && new_stats.lone_lf == 0 {
+ // CRLF would be added by checkout
+ match round_trip_check {
+ RoundTripCheck::Fail { rela_path } => {
+ return Err(Error::RoundTrip {
+ msg: "LF would be replaced by CRLF",
+ path: rela_path.to_owned(),
+ })
+ }
+ #[allow(unused_variables)]
+ RoundTripCheck::Warn { rela_path } => {
+ gix_trace::warn!(
+ "in the working copy of '{}', LF will be replaced by CRLF next time git touches it",
+ rela_path.display()
+ )
+ }
+ }
+ }
+ }
+
+ if !convert_crlf_to_lf {
+ return Ok(false);
+ }
+
+ clear_and_set_capacity(buf, src.len() - stats.crlf);
+ if stats.lone_cr == 0 {
+ buf.extend(src.iter().filter(|b| **b != b'\r'));
+ } else {
+ let mut bytes = src.iter().peekable();
+ while let Some(b) = bytes.next() {
+ if !(*b == b'\r' && bytes.peek() == Some(&&b'\n')) {
+ buf.push(*b);
+ }
+ }
+ }
+ Ok(true)
+ }
+}
diff --git a/vendor/gix-filter/src/eol/convert_to_worktree.rs b/vendor/gix-filter/src/eol/convert_to_worktree.rs
new file mode 100644
index 000000000..5e2e2c183
--- /dev/null
+++ b/vendor/gix-filter/src/eol/convert_to_worktree.rs
@@ -0,0 +1,43 @@
+use bstr::{ByteSlice, ByteVec};
+
+use crate::{
+ clear_and_set_capacity,
+ eol::{AttributesDigest, Configuration, Mode, Stats},
+};
+
+/// Convert all `\n` in `src` to `crlf` if `digest` and `config` indicate it, returning `true` if `buf` holds the result, or `false`
+/// if no change was made after all.
+pub fn convert_to_worktree(src: &[u8], digest: AttributesDigest, buf: &mut Vec<u8>, config: Configuration) -> bool {
+ if src.is_empty() || digest.to_eol(config) != Some(Mode::CrLf) {
+ return false;
+ }
+ let stats = Stats::from_bytes(src);
+ if !stats.will_convert_lf_to_crlf(digest, config) {
+ return false;
+ }
+
+ clear_and_set_capacity(buf, src.len() + stats.lone_lf);
+
+ let mut ofs = 0;
+ while let Some(pos) = src[ofs..].find_byteset(b"\r\n") {
+ match src[ofs + pos] {
+ b'\r' => {
+ if src.get(ofs + pos + 1) == Some(&b'\n') {
+ buf.push_str(&src[ofs..][..pos + 2]);
+ ofs += pos + 2;
+ } else {
+ buf.push_str(&src[ofs..][..pos + 1]);
+ ofs += pos + 1;
+ }
+ }
+ b'\n' => {
+ buf.push_str(&src[ofs..][..pos]);
+ buf.push_str(b"\r\n");
+ ofs += pos + 1;
+ }
+ _ => unreachable!("would only find one of two possible values"),
+ }
+ }
+ buf.push_str(&src[ofs..]);
+ true
+}
diff --git a/vendor/gix-filter/src/eol/mod.rs b/vendor/gix-filter/src/eol/mod.rs
new file mode 100644
index 000000000..ad1553826
--- /dev/null
+++ b/vendor/gix-filter/src/eol/mod.rs
@@ -0,0 +1,95 @@
+///
+pub mod convert_to_git;
+pub use convert_to_git::function::convert_to_git;
+
+mod convert_to_worktree;
+pub use convert_to_worktree::convert_to_worktree;
+
+mod utils;
+
+/// The kind of end of lines to set.
+///
+/// The default is implemented to be the native line ending for the current platform.
+#[derive(Debug, Copy, Clone, Eq, PartialEq)]
+pub enum Mode {
+ /// Equivalent to `git` (`\n`) line-endings.
+ Lf,
+ /// Equivalent to `windows` (`\r\n`) line-endings.
+ CrLf,
+}
+
+/// Possible states for the `core.autocrlf`.
+#[derive(Default, Debug, Copy, Clone, Eq, PartialEq)]
+pub enum AutoCrlf {
+ /// The same as if the `text eol=lf` attribute is set.
+ Input,
+ /// The same as if the `text eol=crlf` attribute is set.
+ Enabled,
+ /// No conversion is performed.
+ #[default]
+ Disabled,
+}
+
+/// The combination of `crlf`, `text` and `eol` attributes into one neat package.
+#[derive(Debug, Copy, Clone, Eq, PartialEq)]
+pub enum AttributesDigest {
+ /// Equivalent to the `-text` attribute.
+ Binary,
+ /// Equivalent to the `text` attribute.
+ Text,
+ /// Equivalent to the `text eol=lf` attributes.
+ TextInput,
+ /// Equivalent to the `text eol=crlf` attributes.
+ TextCrlf,
+ /// Equivalent to the `text=auto` attributes.
+ TextAuto,
+ /// Equivalent to the `text=auto eol=crlf` attributes.
+ TextAutoCrlf,
+ /// Equivalent to the `text=auto eol=lf` attributes.
+ TextAutoInput,
+}
+
+impl From<Mode> for AttributesDigest {
+ fn from(value: Mode) -> Self {
+ match value {
+ Mode::Lf => AttributesDigest::TextInput,
+ Mode::CrLf => AttributesDigest::TextCrlf,
+ }
+ }
+}
+
+impl From<AutoCrlf> for AttributesDigest {
+ fn from(value: AutoCrlf) -> Self {
+ match value {
+ AutoCrlf::Input => AttributesDigest::TextAutoInput,
+ AutoCrlf::Enabled => AttributesDigest::TextAutoCrlf,
+ AutoCrlf::Disabled => AttributesDigest::Binary,
+ }
+ }
+}
+
+/// Git Configuration that affects how CRLF conversions are applied.
+#[derive(Default, Debug, Copy, Clone)]
+pub struct Configuration {
+ /// Corresponds to `core.autocrlf`.
+ pub auto_crlf: AutoCrlf,
+ /// Corresponds to `core.eol`, and is `None` if unset or set to `native`, or `Some(<mode>)` respectively.
+ pub eol: Option<Mode>,
+}
+
+/// Statistics about a buffer that helps to safely perform EOL conversions
+#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
+pub struct Stats {
+ /// The amount of null bytes.
+ pub null: usize,
+ /// The amount of lone carriage returns (`\r`).
+ pub lone_cr: usize,
+ /// The amount of lone line feeds (`\n`).
+ pub lone_lf: usize,
+ /// The amount carriage returns followed by line feeds
+ pub crlf: usize,
+ /// The estimate of printable characters.
+ pub printable: usize,
+ /// The estimate of characters that can't be printed.
+ pub non_printable: usize,
+}
diff --git a/vendor/gix-filter/src/eol/utils.rs b/vendor/gix-filter/src/eol/utils.rs
new file mode 100644
index 000000000..ec8f3c6e2
--- /dev/null
+++ b/vendor/gix-filter/src/eol/utils.rs
@@ -0,0 +1,124 @@
+use crate::eol::{AttributesDigest, AutoCrlf, Configuration, Mode, Stats};
+
+impl Default for Mode {
+ fn default() -> Self {
+ if cfg!(windows) {
+ Mode::CrLf
+ } else {
+ Mode::Lf
+ }
+ }
+}
+
+impl AttributesDigest {
+ /// Return the end-of-line mode this digest would require, or `None` if no conversion would be performed.
+ pub fn to_eol(&self, config: Configuration) -> Option<Mode> {
+ Some(match self {
+ AttributesDigest::Binary => return None,
+ AttributesDigest::TextInput | AttributesDigest::TextAutoInput => Mode::Lf,
+ AttributesDigest::TextCrlf | AttributesDigest::TextAutoCrlf => Mode::CrLf,
+ AttributesDigest::Text | AttributesDigest::TextAuto => config.to_eol(),
+ })
+ }
+
+ /// Return true if this digest allows for auto-determination of CRLF text conversion.
+ pub fn is_auto_text(&self) -> bool {
+ matches!(
+ self,
+ AttributesDigest::TextAuto | AttributesDigest::TextAutoCrlf | AttributesDigest::TextAutoInput
+ )
+ }
+}
+
+impl Configuration {
+ /// Return the line-ending mode that is configured here.
+ pub fn to_eol(&self) -> Mode {
+ match self.auto_crlf {
+ AutoCrlf::Enabled => Mode::CrLf,
+ AutoCrlf::Input => Mode::Lf,
+ AutoCrlf::Disabled => self.eol.unwrap_or_default(),
+ }
+ }
+}
+
+impl Stats {
+ /// Gather statistics from the given `bytes`.
+ ///
+ /// Note that the entire buffer will be scanned.
+ pub fn from_bytes(bytes: &[u8]) -> Self {
+ let mut bytes = bytes.iter().peekable();
+ let mut null = 0;
+ let mut lone_cr = 0;
+ let mut lone_lf = 0;
+ let mut crlf = 0;
+ let mut printable = 0;
+ let mut non_printable = 0;
+ while let Some(b) = bytes.next() {
+ if *b == b'\r' {
+ match bytes.peek() {
+ Some(n) if **n == b'\n' => {
+ bytes.next();
+ crlf += 1
+ }
+ _ => lone_cr += 1,
+ }
+ continue;
+ }
+ if *b == b'\n' {
+ lone_lf += 1;
+ continue;
+ }
+ if *b == 127 {
+ non_printable += 1;
+ } else if *b < 32 {
+ match *b {
+ 8 /* \b */ | b'\t' | 27 /* \033 */ | 12 /* \014 */ => printable += 1,
+ 0 => {
+ non_printable += 1;
+ null += 1;
+ },
+ _ => non_printable += 1,
+ }
+ } else {
+ printable += 1;
+ }
+ }
+
+ Self {
+ null,
+ lone_cr,
+ lone_lf,
+ crlf,
+ printable,
+ non_printable,
+ }
+ }
+
+ /// Returns `true` if these statistics are typical for a binary file.
+ pub fn is_binary(&self) -> bool {
+ self.lone_cr > 0 || self.null > 0 || (self.printable >> 7) < self.non_printable
+ }
+
+ /// Return `true` if we would convert the buffer from which these stats are derived, knowing only the digest
+ pub fn will_convert_lf_to_crlf(&self, digest: AttributesDigest, config: Configuration) -> bool {
+ if digest.to_eol(config) != Some(Mode::CrLf) {
+ return false;
+ }
+
+ // nothing to do?
+ if self.lone_lf == 0 {
+ return false;
+ }
+
+ if digest.is_auto_text() {
+ if self.is_binary() {
+ return false;
+ }
+ // Lone `\r` or mixed LF and CRLF isn't safe as it won't round-trip, and in auto-mode we don't touch it.
+ if self.lone_cr > 0 || self.crlf > 0 {
+ return false;
+ }
+ }
+ true
+ }
+}
diff --git a/vendor/gix-filter/src/ident.rs b/vendor/gix-filter/src/ident.rs
new file mode 100644
index 000000000..1f1bbbd50
--- /dev/null
+++ b/vendor/gix-filter/src/ident.rs
@@ -0,0 +1,77 @@
+use std::ops::Range;
+
+use bstr::{ByteSlice, ByteVec};
+
+use crate::clear_and_set_capacity;
+
+/// Undo identifiers like `$Id:<hexsha>$` to `$Id$` in `src` and write to `buf`. Newlines between dollars are ignored.
+/// Return `true` if `buf` was written or `false` if `src` was left unaltered (as there was nothing to do).
+pub fn undo(src: &[u8], buf: &mut Vec<u8>) -> bool {
+ fn find_range(input: &[u8]) -> Option<Range<usize>> {
+ let mut ofs = 0;
+ loop {
+ let mut cursor = input.get(ofs..)?;
+ let start = cursor.find(b"$Id:")?;
+ cursor = cursor.get((start + 4)..)?;
+ let maybe_end = cursor.find_byteset(b"$\n")?;
+ if cursor[maybe_end] == b'\n' {
+ ofs += start + 4 + maybe_end + 1;
+ continue;
+ } else {
+ return Some((ofs + start)..(ofs + start + 4 + maybe_end + 1));
+ }
+ }
+ }
+
+ let mut ofs = 0;
+ let mut initialized = false;
+ while let Some(range) = find_range(&src[ofs..]) {
+ if !initialized {
+ clear_and_set_capacity(buf, src.len());
+ initialized = true;
+ }
+ buf.push_str(&src[ofs..][..range.start]);
+ buf.push_str(b"$Id$");
+ ofs += range.end;
+ }
+ if initialized {
+ buf.push_str(&src[ofs..]);
+ }
+ initialized
+}
+
+/// Substitute all occurrences of `$Id$` with `$Id: <hexsha-of-input>$` if present in `src` and write all changes to `buf`,
+/// with `object_hash` being used accordingly. Return `true` if `buf` was written to or `false` if no change was made
+/// (as there was nothing to do).
+///
+/// ### Deviation
+///
+/// `Git` also tries to cleanup 'stray' substituted `$Id: <hex>$`, but we don't do that, sticking exactly to what ought to be done.
+/// The respective code is up to 16 years old and one might assume that `git` by now handles checking and checkout filters correctly.
+pub fn apply(src: &[u8], object_hash: gix_hash::Kind, buf: &mut Vec<u8>) -> bool {
+ const HASH_LEN: usize = ": ".len() + gix_hash::Kind::longest().len_in_hex();
+ let mut id = None;
+ let mut ofs = 0;
+ while let Some(pos) = src[ofs..].find(b"$Id$") {
+ let id = match id {
+ None => {
+ let new_id = gix_object::compute_hash(object_hash, gix_object::Kind::Blob, src);
+ id = new_id.into();
+ clear_and_set_capacity(buf, src.len() + HASH_LEN); // pre-allocate for one ID
+ new_id
+ }
+ Some(id) => id.to_owned(),
+ };
+
+ buf.push_str(&src[ofs..][..pos + 3]);
+ buf.push_str(b": ");
+ id.write_hex_to(&mut *buf).expect("writes to memory always work");
+ buf.push(b'$');
+
+ ofs += pos + 4;
+ }
+ if id.is_some() {
+ buf.push_str(&src[ofs..]);
+ }
+ id.is_some()
+}
diff --git a/vendor/gix-filter/src/lib.rs b/vendor/gix-filter/src/lib.rs
new file mode 100644
index 000000000..5fba895fd
--- /dev/null
+++ b/vendor/gix-filter/src/lib.rs
@@ -0,0 +1,80 @@
+//! A library for implementing everything needed to deal with git filter pipelines.
+//!
+//! Generally, multiple filters are applied in a row forming a pipeline, with each filter being a stage in that pipeline.
+//! This pipeline is pre-determined with each stage being configurable.
+//!
+//! The transformation on an input buffer goes in two ways: either a filter is applied, or its effects are undone. Differentiating
+//! between these states is important to avoid comparing unfiltered buffers with filtered ones, for example.
+//!
+//! This crate implements the building blocks in terms of applying and undoing filters, along with logic to decide whether
+//! or not to apply such a filter.
+#![deny(rust_2018_idioms, missing_docs, unsafe_code)]
+
+use bstr::BString;
+/// A forwarding of the `encoding_rs` crate for its types and convenience.
+pub use encoding_rs as encoding;
+
+/// a filter to replace `$Id$` with a git-hash of the buffer.
+pub mod ident;
+
+/// convert line endings in buffers
+pub mod eol;
+
+/// change encodings based on the `working-tree-encoding` attribute.
+pub mod worktree;
+
+/// use filter programs to perform any kind of conversion.
+pub mod driver;
+
+///
+pub mod pipeline;
+
+/// The standard git filter pipeline comprised of multiple standard filters and support for external filters.
+///
+/// It's configuring itself for each provided path based on the path's attributes, implementing the complex logic that governs it.
+#[derive(Clone)]
+pub struct Pipeline {
+ /// Various options that are all defaultable.
+ options: pipeline::Options,
+ /// Storage for the attributes of each item we should process, configured for use with all attributes that concern us.
+ attrs: gix_attributes::search::Outcome,
+ /// Additional context to pass to process filters.
+ context: pipeline::Context,
+ /// State needed to keep running filter processes.
+ processes: driver::State,
+ /// A utility to handle multiple buffers to keep results of various filters.
+ bufs: pipeline::util::Buffers,
+}
+
+/// A declaration of a driver program.
+///
+/// It consists of up to three program declarations.
+#[derive(Debug, Clone)]
+pub struct Driver {
+ /// The name of the driver as stored in the configuration.
+ pub name: BString,
+
+ /// The program invocation that cleans a worktree file for storage in `git`.
+ ///
+ /// Note that the command invocation may need its `%f` argument substituted with the name of the file to process. It will be quoted.
+ pub clean: Option<BString>,
+ /// The program invocation that readies a file stored in `git` for the worktree.
+ ///
+ /// Note that the command invocation may need its `%f` argument substituted with the name of the file to process. It will be quoted.
+ pub smudge: Option<BString>,
+ /// the long-running program that can typically handle both smudge and clean, and possibly delay processing as well.
+ pub process: Option<BString>,
+ /// If `true`, the `clean` or `smudge` programs need to succeed in order to make their content usable. Otherwise their
+ /// exit code is ignored.
+ /// Note that this is more of a suggestion as we will always report errors as they happen as the driver API is streaming in nature,
+ /// which makes soft-failures impossible unless the caller takes precautions.
+ pub required: bool,
+}
+
+fn clear_and_set_capacity(buf: &mut Vec<u8>, cap: usize) {
+ buf.clear();
+ if buf.capacity() < cap {
+ buf.reserve(cap);
+ assert!(buf.capacity() >= cap, "{} >= {}", buf.capacity(), cap);
+ }
+}
diff --git a/vendor/gix-filter/src/pipeline/convert.rs b/vendor/gix-filter/src/pipeline/convert.rs
new file mode 100644
index 000000000..1294944cb
--- /dev/null
+++ b/vendor/gix-filter/src/pipeline/convert.rs
@@ -0,0 +1,353 @@
+use std::{io::Read, path::Path};
+
+use bstr::BStr;
+
+use crate::{driver, eol, ident, pipeline::util::Configuration, worktree, Pipeline};
+
+///
+pub mod configuration {
+ use bstr::BString;
+
+ /// Errors related to the configuration of filter attributes.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The encoding named '{name}' isn't available")]
+ UnknownEncoding { name: BString },
+ #[error("Encodings must be names, like UTF-16, and cannot be booleans.")]
+ InvalidEncoding,
+ }
+}
+
+///
+pub mod to_git {
+ use bstr::BStr;
+
+ /// A function that writes a buffer like `fn(rela_path, &mut buf)` with by tes of an object in the index that is the one
+ /// that should be converted.
+ pub type IndexObjectFn<'a> =
+ dyn FnMut(&BStr, &mut Vec<u8>) -> Result<Option<()>, Box<dyn std::error::Error + Send + Sync>> + 'a;
+
+ /// The error returned by [Pipeline::convert_to_git()][super::Pipeline::convert_to_git()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Eol(#[from] crate::eol::convert_to_git::Error),
+ #[error(transparent)]
+ Worktree(#[from] crate::worktree::encode_to_git::Error),
+ #[error(transparent)]
+ Driver(#[from] crate::driver::apply::Error),
+ #[error(transparent)]
+ Configuration(#[from] super::configuration::Error),
+ #[error("Copy of driver process output to memory failed")]
+ ReadProcessOutputToBuffer(#[from] std::io::Error),
+ }
+}
+
+///
+pub mod to_worktree {
+ /// The error returned by [Pipeline::convert_to_worktree()][super::Pipeline::convert_to_worktree()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Worktree(#[from] crate::worktree::encode_to_worktree::Error),
+ #[error(transparent)]
+ Driver(#[from] crate::driver::apply::Error),
+ #[error(transparent)]
+ Configuration(#[from] super::configuration::Error),
+ }
+}
+
+/// Access
+impl Pipeline {
+ /// Convert a `src` stream (to be found at `rela_path`) to a representation suitable for storage in `git`
+ /// based on the `attributes` at `rela_path` which is passed as first argument..
+ /// When converting to `crlf`, and depending on the configuration, `index_object` might be called to obtain the index
+ /// version of `src` if available. It can return `Ok(None)` if this information isn't available.
+ pub fn convert_to_git<R>(
+ &mut self,
+ mut src: R,
+ rela_path: &Path,
+ attributes: &mut dyn FnMut(&BStr, &mut gix_attributes::search::Outcome),
+ index_object: &mut to_git::IndexObjectFn<'_>,
+ ) -> Result<ToGitOutcome<'_, R>, to_git::Error>
+ where
+ R: std::io::Read,
+ {
+ let bstr_path = gix_path::into_bstr(rela_path);
+ let Configuration {
+ driver,
+ digest,
+ _attr_digest: _,
+ encoding,
+ apply_ident_filter,
+ } = Configuration::at_path(
+ bstr_path.as_ref(),
+ &self.options.drivers,
+ &mut self.attrs,
+ attributes,
+ self.options.eol_config,
+ )?;
+
+ let mut changed = false;
+ // this is just an approximation, but it's as good as it gets without reading the actual input.
+ let would_convert_eol = eol::convert_to_git(
+ b"\r\n",
+ digest,
+ &mut self.bufs.dest,
+ &mut |_| Ok(None),
+ eol::convert_to_git::Options {
+ round_trip_check: None,
+ config: self.options.eol_config,
+ },
+ )?;
+
+ if let Some(driver) = driver {
+ if let Some(mut read) = self.processes.apply(
+ driver,
+ &mut src,
+ driver::Operation::Clean,
+ self.context.with_path(bstr_path.as_ref()),
+ )? {
+ if !apply_ident_filter && encoding.is_none() && !would_convert_eol {
+ // Note that this is not typically a benefit in terms of saving memory as most filters
+ // aren't expected to make the output file larger. It's more about who is waiting for the filter's
+ // output to arrive, which won't be us now. For `git-lfs` it definitely won't matter though.
+ return Ok(ToGitOutcome::Process(read));
+ }
+ self.bufs.clear();
+ read.read_to_end(&mut self.bufs.src)?;
+ changed = true;
+ }
+ }
+ if !changed && (apply_ident_filter || encoding.is_some() || would_convert_eol) {
+ self.bufs.clear();
+ src.read_to_end(&mut self.bufs.src)?;
+ }
+
+ if let Some(encoding) = encoding {
+ worktree::encode_to_git(
+ &self.bufs.src,
+ encoding,
+ &mut self.bufs.dest,
+ if self.options.encodings_with_roundtrip_check.contains(&encoding) {
+ worktree::encode_to_git::RoundTripCheck::Fail
+ } else {
+ worktree::encode_to_git::RoundTripCheck::Skip
+ },
+ )?;
+ self.bufs.swap();
+ changed = true;
+ }
+
+ if eol::convert_to_git(
+ &self.bufs.src,
+ digest,
+ &mut self.bufs.dest,
+ &mut |buf| index_object(bstr_path.as_ref(), buf),
+ eol::convert_to_git::Options {
+ round_trip_check: self.options.crlf_roundtrip_check.to_eol_roundtrip_check(rela_path),
+ config: self.options.eol_config,
+ },
+ )? {
+ self.bufs.swap();
+ changed = true;
+ }
+
+ if apply_ident_filter && ident::undo(&self.bufs.src, &mut self.bufs.dest) {
+ self.bufs.swap();
+ changed = true;
+ }
+ Ok(if changed {
+ ToGitOutcome::Buffer(&self.bufs.src)
+ } else {
+ ToGitOutcome::Unchanged(src)
+ })
+ }
+
+ /// Convert a `src` buffer located at `rela_path` (in the index) from what's in `git` to the worktree representation,
+ /// asking for `attributes` with `rela_path` as first argument to configure the operation automatically.
+ /// `can_delay` defines if long-running processes can delay their response, and if they *choose* to the caller has to
+ /// specifically deal with it by interacting with the [`driver_state`][Pipeline::driver_state_mut()] directly.
+ ///
+ /// The reason `src` is a buffer is to indicate that `git` generally doesn't do well streaming data, so it should be small enough
+ /// to be performant while being held in memory. This is typically the case, especially if `git-lfs` is used as intended.
+ pub fn convert_to_worktree<'input>(
+ &mut self,
+ src: &'input [u8],
+ rela_path: &BStr,
+ attributes: &mut dyn FnMut(&BStr, &mut gix_attributes::search::Outcome),
+ can_delay: driver::apply::Delay,
+ ) -> Result<ToWorktreeOutcome<'input, '_>, to_worktree::Error> {
+ let Configuration {
+ driver,
+ digest,
+ _attr_digest: _,
+ encoding,
+ apply_ident_filter,
+ } = Configuration::at_path(
+ rela_path,
+ &self.options.drivers,
+ &mut self.attrs,
+ attributes,
+ self.options.eol_config,
+ )?;
+
+ let mut bufs = self.bufs.with_src(src);
+ let (src, dest) = bufs.src_and_dest();
+ if apply_ident_filter && ident::apply(src, self.options.object_hash, dest) {
+ bufs.swap();
+ }
+
+ let (src, dest) = bufs.src_and_dest();
+ if eol::convert_to_worktree(src, digest, dest, self.options.eol_config) {
+ bufs.swap();
+ };
+
+ if let Some(encoding) = encoding {
+ let (src, dest) = bufs.src_and_dest();
+ worktree::encode_to_worktree(src, encoding, dest)?;
+ bufs.swap();
+ }
+
+ if let Some(driver) = driver {
+ let (mut src, _dest) = bufs.src_and_dest();
+ if let Some(maybe_delayed) = self.processes.apply_delayed(
+ driver,
+ &mut src,
+ driver::Operation::Smudge,
+ can_delay,
+ self.context.with_path(rela_path),
+ )? {
+ return Ok(ToWorktreeOutcome::Process(maybe_delayed));
+ }
+ }
+
+ Ok(match bufs.ro_src {
+ Some(src) => ToWorktreeOutcome::Unchanged(src),
+ None => ToWorktreeOutcome::Buffer(bufs.src),
+ })
+ }
+}
+
+/// The result of a conversion with zero or more filters to be stored in git.
+pub enum ToGitOutcome<'pipeline, R> {
+ /// The original input wasn't changed and the reader is still available for consumption.
+ Unchanged(R),
+ /// An external filter (and only that) was applied and its results *have to be consumed*.
+ Process(Box<dyn std::io::Read + 'pipeline>),
+ /// A reference to the result of one or more filters of which one didn't support streaming.
+ ///
+ /// This can happen if an `eol`, `working-tree-encoding` or `ident` filter is applied, possibly on top of an external filter.
+ Buffer(&'pipeline [u8]),
+}
+
+/// The result of a conversion with zero or more filters.
+///
+/// ### Panics
+///
+/// If `std::io::Read` is used on it and the output is delayed, a panic will occour. The caller is responsible for either disallowing delayed
+/// results or if allowed, handle them. Use [`is_delayed()][Self::is_delayed()].
+pub enum ToWorktreeOutcome<'input, 'pipeline> {
+ /// The original input wasn't changed and the original buffer is present
+ Unchanged(&'input [u8]),
+ /// A reference to the result of one or more filters of which one didn't support streaming.
+ ///
+ /// This can happen if an `eol`, `working-tree-encoding` or `ident` filter is applied, possibly on top of an external filter.
+ Buffer(&'pipeline [u8]),
+ /// An external filter (and only that) was applied and its results *have to be consumed*. Note that the output might be delayed,
+ /// which requires special handling to eventually receive it.
+ Process(driver::apply::MaybeDelayed<'pipeline>),
+}
+
+impl<'input, 'pipeline> ToWorktreeOutcome<'input, 'pipeline> {
+ /// Return true if this outcome is delayed. In that case, one isn't allowed to use [`Read`][std::io::Read] or cause a panic.
+ pub fn is_delayed(&self) -> bool {
+ matches!(
+ self,
+ ToWorktreeOutcome::Process(driver::apply::MaybeDelayed::Delayed(_))
+ )
+ }
+
+ /// Returns `true` if the input buffer was actually changed, or `false` if it is returned directly.
+ pub fn is_changed(&self) -> bool {
+ !matches!(self, ToWorktreeOutcome::Unchanged(_))
+ }
+
+ /// Return a buffer if we contain one, or `None` otherwise.
+ ///
+ /// This method is useful only if it's clear that no driver is available, which may cause a stream to be returned and not a buffer.
+ pub fn as_bytes(&self) -> Option<&[u8]> {
+ match self {
+ ToWorktreeOutcome::Unchanged(b) | ToWorktreeOutcome::Buffer(b) => Some(b),
+ ToWorktreeOutcome::Process(_) => None,
+ }
+ }
+
+ /// Return a stream to read the drivers output from, if possible.
+ ///
+ /// Note that this is only the case if the driver process was applied last *and* didn't delay its output.
+ pub fn as_read(&mut self) -> Option<&mut (dyn std::io::Read + '_)> {
+ match self {
+ ToWorktreeOutcome::Process(driver::apply::MaybeDelayed::Delayed(_))
+ | ToWorktreeOutcome::Unchanged(_)
+ | ToWorktreeOutcome::Buffer(_) => None,
+ ToWorktreeOutcome::Process(driver::apply::MaybeDelayed::Immediate(read)) => Some(read),
+ }
+ }
+}
+
+impl<'input, 'pipeline> std::io::Read for ToWorktreeOutcome<'input, 'pipeline> {
+ fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
+ match self {
+ ToWorktreeOutcome::Unchanged(b) => b.read(buf),
+ ToWorktreeOutcome::Buffer(b) => b.read(buf),
+ ToWorktreeOutcome::Process(driver::apply::MaybeDelayed::Delayed(_)) => {
+ panic!("BUG: must not try to read delayed output")
+ }
+ ToWorktreeOutcome::Process(driver::apply::MaybeDelayed::Immediate(r)) => r.read(buf),
+ }
+ }
+}
+
+impl<'pipeline, R> std::io::Read for ToGitOutcome<'pipeline, R>
+where
+ R: std::io::Read,
+{
+ fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
+ match self {
+ ToGitOutcome::Unchanged(r) => r.read(buf),
+ ToGitOutcome::Process(r) => r.read(buf),
+ ToGitOutcome::Buffer(mut r) => r.read(buf),
+ }
+ }
+}
+
+impl<R> ToGitOutcome<'_, R>
+where
+ R: std::io::Read,
+{
+ /// If we contain a buffer, and not a stream, return it.
+ pub fn as_bytes(&self) -> Option<&[u8]> {
+ match self {
+ ToGitOutcome::Unchanged(_) | ToGitOutcome::Process(_) => None,
+ ToGitOutcome::Buffer(b) => Some(b),
+ }
+ }
+
+ /// Return a stream to read the drivers output from. This is only possible if there is only a driver, and no other filter.
+ pub fn as_read(&mut self) -> Option<&mut (dyn std::io::Read + '_)> {
+ match self {
+ ToGitOutcome::Process(read) => Some(read),
+ ToGitOutcome::Unchanged(read) => Some(read),
+ ToGitOutcome::Buffer(_) => None,
+ }
+ }
+
+ /// Returns `true` if the input buffer was actually changed, or `false` if it is returned directly.
+ pub fn is_changed(&self) -> bool {
+ !matches!(self, ToGitOutcome::Unchanged(_))
+ }
+}
diff --git a/vendor/gix-filter/src/pipeline/mod.rs b/vendor/gix-filter/src/pipeline/mod.rs
new file mode 100644
index 000000000..0193a4520
--- /dev/null
+++ b/vendor/gix-filter/src/pipeline/mod.rs
@@ -0,0 +1,115 @@
+use bstr::BString;
+
+use crate::{driver, eol, Driver, Pipeline};
+
+/// Define how to perform CRLF round-trip checking when converting to git.
+#[derive(Default, Debug, Copy, Clone, Eq, PartialEq)]
+pub enum CrlfRoundTripCheck {
+ /// Fail with an error if CRLF conversion isn't round-trip safe.
+ Fail,
+ /// Emit a warning using `gix_trace::warn!`, but don't fail.
+ ///
+ /// Note that the parent application has to setup tracing to make these events visible, along with a parent `span!`.
+ #[default]
+ Warn,
+ /// Do nothing, do not perform round-trip check at all.
+ Skip,
+}
+
+/// Additional configuration for the filter pipeline.
+#[derive(Default, Clone)]
+pub struct Options {
+ /// Available (external) driver programs to invoke if attributes for path configure them.
+ pub drivers: Vec<Driver>,
+ /// Global options to configure end-of-line conversions, to worktree or to git.
+ pub eol_config: eol::Configuration,
+ /// How to perform round-trip checks during end-of-line conversions to git.
+ pub crlf_roundtrip_check: CrlfRoundTripCheck,
+ /// All worktree encodings for round-trip checks should be performed.
+ pub encodings_with_roundtrip_check: Vec<&'static encoding_rs::Encoding>,
+ /// The object hash to use when applying the `ident` filter.
+ pub object_hash: gix_hash::Kind,
+}
+
+/// Context that typically doesn't change throughout the lifetime of a pipeline, for use with `process` filters.
+///
+/// Note that this is quite specific to third-party filters that actually make use of this additional context.
+#[derive(Default, Debug, Clone)]
+pub struct Context {
+ /// The name of the reference that `HEAD` is pointing to. It's passed to `process` filters if present.
+ pub ref_name: Option<BString>,
+ /// The root-level tree that contains the current entry directly or indirectly, or the commit owning the tree (if available).
+ ///
+ /// This is passed to `process` filters if present.
+ pub treeish: Option<gix_hash::ObjectId>,
+ /// The actual blob-hash of the data we are processing. It's passed to `process` filters if present.
+ ///
+ /// Note that this hash might be different from the `$Id$` of the respective `ident` filter, as the latter generates the hash itself.
+ pub blob: Option<gix_hash::ObjectId>,
+}
+
+const ATTRS: [&str; 6] = ["crlf", "ident", "filter", "eol", "text", "working-tree-encoding"];
+
+/// Lifecycle
+impl Pipeline {
+ /// Create a new pipeline with configured `drivers` (which should be considered safe to invoke) as well as a way to initialize
+ /// our attributes with `collection`.
+ /// `eol_config` serves as fallback to understand how to convert line endings if no line-ending attributes are present.
+ /// `crlf_roundtrip_check` corresponds to the git-configuration of `core.safecrlf`.
+ /// `object_hash` is relevant for the `ident` filter.
+ pub fn new(collection: &gix_attributes::search::MetadataCollection, options: Options) -> Self {
+ let mut attrs = gix_attributes::search::Outcome::default();
+ attrs.initialize_with_selection(collection, ATTRS);
+ Pipeline {
+ attrs,
+ context: Context::default(),
+ processes: driver::State::default(),
+ options,
+ bufs: Default::default(),
+ }
+ }
+
+ /// Turn ourselves into state managing possibly running driver processes.
+ ///
+ /// This can be used to control how these are terminated via [driver::State::shutdown()].
+ pub fn into_driver_state(self) -> driver::State {
+ self.processes
+ }
+}
+
+impl Default for Pipeline {
+ fn default() -> Self {
+ let collection = Default::default();
+ Pipeline::new(&collection, Default::default())
+ }
+}
+
+/// Access
+impl Pipeline {
+ /// Return a mutable reference to the state that handles long running processes.
+ /// Interacting with it directly allows to handle delayed results.
+ pub fn driver_state_mut(&mut self) -> &mut driver::State {
+ &mut self.processes
+ }
+
+ /// Provide mutable context that is made available to the process filters.
+ ///
+ /// The context set here is relevant for the [`convert_to_git()`][Self::convert_to_git()] and
+ /// [`convert_to_worktree()`][Self::convert_to_worktree()] methods.
+ pub fn driver_context_mut(&mut self) -> &mut Context {
+ &mut self.context
+ }
+
+ /// Return a set of options for configuration after instantiation.
+ pub fn options_mut(&mut self) -> &mut Options {
+ &mut self.options
+ }
+}
+
+///
+pub mod convert;
+
+pub(crate) mod util;
+
+#[cfg(test)]
+mod tests;
diff --git a/vendor/gix-filter/src/pipeline/tests.rs b/vendor/gix-filter/src/pipeline/tests.rs
new file mode 100644
index 000000000..e41a040c6
--- /dev/null
+++ b/vendor/gix-filter/src/pipeline/tests.rs
@@ -0,0 +1,39 @@
+mod buffers {
+ use bstr::ByteSlice;
+
+ use crate::pipeline::util::Buffers;
+
+ #[test]
+ fn usage() {
+ let mut backing = Buffers::default();
+ let mut bufs = backing.with_src(b"a");
+
+ {
+ let (src, dest) = bufs.src_and_dest();
+ assert_eq!(src.as_bstr(), "a");
+ assert!(dest.is_empty());
+ dest.push(b'b');
+ }
+ assert!(bufs.ro_src.is_some(), "read-only source remains until swap");
+ bufs.swap();
+ assert!(
+ bufs.ro_src.is_none(),
+ "after swap it's not used anymore as recent data is in owned buffers"
+ );
+
+ let (src, dest) = bufs.src_and_dest();
+ assert_eq!(src.as_bstr(), "b", "buffers were swapped");
+ assert_eq!(dest.as_bstr(), "", "destination is new and cleared");
+ dest.push(b'c');
+ bufs.swap();
+
+ let (src, dest) = bufs.src_and_dest();
+ assert_eq!(src.as_bstr(), "c");
+ assert_eq!(dest.as_bstr(), "", "destination is cleared");
+
+ let mut bufs = backing.with_src(b"z");
+ let (src, dest) = bufs.src_and_dest();
+ assert_eq!(src.as_bstr(), "z");
+ assert_eq!(dest.as_bstr(), "", "output buffer was cleared by `with_src()`")
+ }
+}
diff --git a/vendor/gix-filter/src/pipeline/util.rs b/vendor/gix-filter/src/pipeline/util.rs
new file mode 100644
index 000000000..a92ac25ef
--- /dev/null
+++ b/vendor/gix-filter/src/pipeline/util.rs
@@ -0,0 +1,206 @@
+use std::path::Path;
+
+use bstr::BStr;
+use gix_attributes::StateRef;
+use smallvec::SmallVec;
+
+use crate::{
+ driver, eol,
+ eol::AttributesDigest,
+ pipeline::{convert::configuration, Context, CrlfRoundTripCheck},
+ Driver,
+};
+
+/// A utility to do buffer-swapping with.
+#[derive(Default, Clone)]
+pub(crate) struct Buffers {
+ pub src: Vec<u8>,
+ pub dest: Vec<u8>,
+}
+
+/// A utility to do buffer-swapping with.
+pub(crate) struct BuffersWithSource<'src, 'bufs> {
+ pub ro_src: Option<&'src [u8]>,
+ pub src: &'bufs mut Vec<u8>,
+ pub dest: &'bufs mut Vec<u8>,
+}
+
+impl Buffers {
+ pub fn with_src<'a, 'src>(&'a mut self, src: &'src [u8]) -> BuffersWithSource<'src, 'a> {
+ self.clear();
+ BuffersWithSource {
+ ro_src: Some(src),
+ src: &mut self.src,
+ dest: &mut self.dest,
+ }
+ }
+ pub fn clear(&mut self) {
+ self.src.clear();
+ self.dest.clear();
+ }
+
+ pub fn swap(&mut self) {
+ std::mem::swap(&mut self.src, &mut self.dest);
+ }
+}
+
+impl BuffersWithSource<'_, '_> {
+ /// Must be called after every change (i.e. when it's known that `dest` was written.
+ pub fn swap(&mut self) {
+ self.ro_src.take();
+ std::mem::swap(&mut self.src, &mut self.dest);
+ self.dest.clear();
+ }
+ pub fn src_and_dest(&mut self) -> (&[u8], &mut Vec<u8>) {
+ match self.ro_src {
+ Some(src) => (src, &mut self.dest),
+ None => (self.src, &mut self.dest),
+ }
+ }
+}
+
+pub(crate) struct Configuration<'a> {
+ pub(crate) driver: Option<&'a Driver>,
+ /// What attributes say about CRLF handling.
+ pub(crate) _attr_digest: Option<eol::AttributesDigest>,
+ /// The final digest that includes configuration values
+ pub(crate) digest: eol::AttributesDigest,
+ pub(crate) encoding: Option<&'static encoding_rs::Encoding>,
+ /// Whether or not to apply the `ident` filter
+ pub(crate) apply_ident_filter: bool,
+}
+
+impl<'driver> Configuration<'driver> {
+ pub(crate) fn at_path(
+ rela_path: &BStr,
+ drivers: &'driver [Driver],
+ attrs: &mut gix_attributes::search::Outcome,
+ attributes: &mut dyn FnMut(&BStr, &mut gix_attributes::search::Outcome),
+ config: eol::Configuration,
+ ) -> Result<Configuration<'driver>, configuration::Error> {
+ fn extract_driver<'a>(drivers: &'a [Driver], attr: &gix_attributes::search::Match<'_>) -> Option<&'a Driver> {
+ if let StateRef::Value(name) = attr.assignment.state {
+ drivers.iter().find(|d| d.name == name.as_bstr())
+ } else {
+ None
+ }
+ }
+
+ fn extract_encoding(
+ attr: &gix_attributes::search::Match<'_>,
+ ) -> Result<Option<&'static encoding_rs::Encoding>, configuration::Error> {
+ match attr.assignment.state {
+ StateRef::Set | StateRef::Unset => Err(configuration::Error::InvalidEncoding),
+ StateRef::Value(name) => encoding_rs::Encoding::for_label(name.as_bstr())
+ .ok_or(configuration::Error::UnknownEncoding {
+ name: name.as_bstr().to_owned(),
+ })
+ .map(|encoding| {
+ // The working-tree-encoding is the encoding we have to expect in the working tree.
+ // If the specified one is the default encoding, there is nothing to do.
+ if encoding == encoding_rs::UTF_8 {
+ None
+ } else {
+ Some(encoding)
+ }
+ }),
+ StateRef::Unspecified => Ok(None),
+ }
+ }
+
+ /// This is based on `git_path_check_crlf` in the git codebase.
+ fn extract_crlf(attr: &gix_attributes::search::Match<'_>) -> Option<eol::AttributesDigest> {
+ match attr.assignment.state {
+ StateRef::Unspecified => None,
+ StateRef::Set => Some(eol::AttributesDigest::Text),
+ StateRef::Unset => Some(eol::AttributesDigest::Binary),
+ StateRef::Value(v) => {
+ if v.as_bstr() == "input" {
+ Some(eol::AttributesDigest::TextInput)
+ } else if v.as_bstr() == "auto" {
+ Some(eol::AttributesDigest::TextAuto)
+ } else {
+ None
+ }
+ }
+ }
+ }
+
+ fn extract_eol(attr: &gix_attributes::search::Match<'_>) -> Option<eol::Mode> {
+ match attr.assignment.state {
+ StateRef::Unspecified | StateRef::Unset | StateRef::Set => None,
+ StateRef::Value(v) => {
+ if v.as_bstr() == "lf" {
+ Some(eol::Mode::Lf)
+ } else if v.as_bstr() == "crlf" {
+ Some(eol::Mode::CrLf)
+ } else {
+ None
+ }
+ }
+ }
+ }
+
+ attributes(rela_path, attrs);
+ let attrs: SmallVec<[_; crate::pipeline::ATTRS.len()]> = attrs.iter_selected().collect();
+ let apply_ident_filter = attrs[1].assignment.state.is_set();
+ let driver = extract_driver(drivers, &attrs[2]);
+ let encoding = extract_encoding(&attrs[5])?;
+
+ let mut digest = extract_crlf(&attrs[4]);
+ if digest.is_none() {
+ digest = extract_crlf(&attrs[0]);
+ }
+
+ if digest != Some(AttributesDigest::Binary) {
+ let eol = extract_eol(&attrs[3]);
+ digest = match digest {
+ Some(AttributesDigest::TextAuto) if eol == Some(eol::Mode::Lf) => Some(AttributesDigest::TextAutoInput),
+ Some(AttributesDigest::TextAuto) if eol == Some(eol::Mode::CrLf) => {
+ Some(AttributesDigest::TextAutoCrlf)
+ }
+ _ => match eol {
+ Some(eol::Mode::CrLf) => Some(AttributesDigest::TextCrlf),
+ Some(eol::Mode::Lf) => Some(AttributesDigest::TextInput),
+ _ => digest,
+ },
+ };
+ }
+
+ let attr_digest = digest;
+ digest = match digest {
+ None => Some(config.auto_crlf.into()),
+ Some(AttributesDigest::TextAuto) => Some(config.to_eol().into()),
+ _ => digest,
+ };
+
+ Ok(Configuration {
+ driver,
+ _attr_digest: attr_digest,
+ digest: digest.expect("always set by now"),
+ encoding,
+ apply_ident_filter,
+ })
+ }
+}
+
+impl Context {
+ pub(crate) fn with_path<'a>(&self, rela_path: &'a BStr) -> driver::apply::Context<'a, '_> {
+ driver::apply::Context {
+ rela_path,
+ ref_name: self.ref_name.as_ref().map(AsRef::as_ref),
+ treeish: self.treeish,
+ blob: self.blob,
+ }
+ }
+}
+
+impl CrlfRoundTripCheck {
+ pub(crate) fn to_eol_roundtrip_check(self, rela_path: &Path) -> Option<eol::convert_to_git::RoundTripCheck<'_>> {
+ match self {
+ CrlfRoundTripCheck::Fail => Some(eol::convert_to_git::RoundTripCheck::Fail { rela_path }),
+ CrlfRoundTripCheck::Warn => Some(eol::convert_to_git::RoundTripCheck::Warn { rela_path }),
+ CrlfRoundTripCheck::Skip => None,
+ }
+ }
+}
diff --git a/vendor/gix-filter/src/worktree/encode_to_git.rs b/vendor/gix-filter/src/worktree/encode_to_git.rs
new file mode 100644
index 000000000..e9512d4ea
--- /dev/null
+++ b/vendor/gix-filter/src/worktree/encode_to_git.rs
@@ -0,0 +1,81 @@
+/// Whether or not to perform round-trip checks.
+#[derive(Debug, Copy, Clone)]
+pub enum RoundTripCheck {
+ /// Assure that we can losslessly convert the UTF-8 result back to the original encoding or fail with an error.
+ Fail,
+ /// Do not check if the encoding is round-trippable.
+ Skip,
+}
+
+/// The error returned by [`encode_to_git()][super::encode_to_git()].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("Cannot convert input of {input_len} bytes to UTF-8 without overflowing")]
+ Overflow { input_len: usize },
+ #[error("The input was malformed and could not be decoded as '{encoding}'")]
+ Malformed { encoding: &'static str },
+ #[error("Encoding from '{src_encoding}' to '{dest_encoding}' and back is not the same")]
+ RoundTrip {
+ src_encoding: &'static str,
+ dest_encoding: &'static str,
+ },
+}
+
+pub(crate) mod function {
+ use encoding_rs::DecoderResult;
+
+ use super::{Error, RoundTripCheck};
+
+ /// Decode `src` according to `src_encoding` to `UTF-8` for storage in git and place it in `buf`.
+ /// Note that the encoding is always applied, there is no conditional even if `src_encoding` already is `UTF-8`.
+ pub fn encode_to_git(
+ src: &[u8],
+ src_encoding: &'static encoding_rs::Encoding,
+ buf: &mut Vec<u8>,
+ round_trip: RoundTripCheck,
+ ) -> Result<(), Error> {
+ let mut decoder = src_encoding.new_decoder_with_bom_removal();
+ let buf_len = decoder
+ .max_utf8_buffer_length_without_replacement(src.len())
+ .ok_or(Error::Overflow { input_len: src.len() })?;
+ buf.clear();
+ buf.resize(buf_len, 0);
+ let (res, read, written) = decoder.decode_to_utf8_without_replacement(src, buf, true);
+ match res {
+ DecoderResult::InputEmpty => {
+ assert!(
+ buf_len >= written,
+ "encoding_rs estimates the maximum amount of bytes written correctly"
+ );
+ assert_eq!(read, src.len(), "input buffer should be fully consumed");
+ buf.truncate(written);
+ }
+ DecoderResult::OutputFull => {
+ unreachable!("we assure that the output buffer is big enough as per the encoder's estimate")
+ }
+ DecoderResult::Malformed(_, _) => {
+ return Err(Error::Malformed {
+ encoding: src_encoding.name(),
+ })
+ }
+ }
+
+ match round_trip {
+ RoundTripCheck::Fail => {
+ // SAFETY: we trust `encoding_rs` to output valid UTF-8 only if we ask it to.
+ #[allow(unsafe_code)]
+ let str = unsafe { std::str::from_utf8_unchecked(buf) };
+ let (should_equal_src, _actual_encoding, _had_errors) = src_encoding.encode(str);
+ if should_equal_src != src {
+ return Err(Error::RoundTrip {
+ src_encoding: src_encoding.name(),
+ dest_encoding: "UTF-8",
+ });
+ }
+ }
+ RoundTripCheck::Skip => {}
+ }
+ Ok(())
+ }
+}
diff --git a/vendor/gix-filter/src/worktree/encode_to_worktree.rs b/vendor/gix-filter/src/worktree/encode_to_worktree.rs
new file mode 100644
index 000000000..0fddeadc0
--- /dev/null
+++ b/vendor/gix-filter/src/worktree/encode_to_worktree.rs
@@ -0,0 +1,60 @@
+/// The error returned by [`encode_to_worktree()][super::encode_to_worktree()].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("Cannot convert input of {input_len} UTF-8 bytes to target encoding without overflowing")]
+ Overflow { input_len: usize },
+ #[error("Input was not UTF-8 encoded")]
+ InputAsUtf8(#[from] std::str::Utf8Error),
+ #[error("The character '{character}' could not be mapped to the {worktree_encoding}")]
+ Unmappable {
+ character: char,
+ worktree_encoding: &'static str,
+ },
+}
+
+pub(crate) mod function {
+ use encoding_rs::EncoderResult;
+
+ use super::Error;
+
+ /// Encode `src_utf8`, which is assumed to be UTF-8 encoded, according to `worktree_encoding` for placement in the working directory,
+ /// and write it to `buf`, possibly resizing it.
+ /// Note that the encoding is always applied, there is no conditional even if `worktree_encoding` and the `src` encoding are the same.
+ pub fn encode_to_worktree(
+ src_utf8: &[u8],
+ worktree_encoding: &'static encoding_rs::Encoding,
+ buf: &mut Vec<u8>,
+ ) -> Result<(), Error> {
+ let mut encoder = worktree_encoding.new_encoder();
+ let buf_len = encoder
+ .max_buffer_length_from_utf8_if_no_unmappables(src_utf8.len())
+ .ok_or(Error::Overflow {
+ input_len: src_utf8.len(),
+ })?;
+ buf.clear();
+ buf.resize(buf_len, 0);
+ let src = std::str::from_utf8(src_utf8)?;
+ let (res, read, written) = encoder.encode_from_utf8_without_replacement(src, buf, true);
+ match res {
+ EncoderResult::InputEmpty => {
+ assert!(
+ buf_len >= written,
+ "encoding_rs estimates the maximum amount of bytes written correctly"
+ );
+ assert_eq!(read, src_utf8.len(), "input buffer should be fully consumed");
+ buf.truncate(written);
+ }
+ EncoderResult::OutputFull => {
+ unreachable!("we assure that the output buffer is big enough as per the encoder's estimate")
+ }
+ EncoderResult::Unmappable(c) => {
+ return Err(Error::Unmappable {
+ worktree_encoding: worktree_encoding.name(),
+ character: c,
+ })
+ }
+ }
+ Ok(())
+ }
+}
diff --git a/vendor/gix-filter/src/worktree/encoding.rs b/vendor/gix-filter/src/worktree/encoding.rs
new file mode 100644
index 000000000..0b75adc96
--- /dev/null
+++ b/vendor/gix-filter/src/worktree/encoding.rs
@@ -0,0 +1,31 @@
+use bstr::BStr;
+use encoding_rs::Encoding;
+
+///
+pub mod for_label {
+ use bstr::BString;
+
+ /// The error returned by [for_label()][super::for_label()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("An encoding named '{name}' is not known")]
+ Unknown { name: BString },
+ }
+}
+
+/// Try to produce a new `Encoding` for `label` or report an error if it is not known.
+///
+/// ### Deviation
+///
+/// * There is no special handling of UTF-16LE/BE with checks if data contains a BOM or not, like `git` as we don't expect to have
+/// data available here.
+/// * Special `-BOM` suffixed versions of `UTF-16` encodings are not supported.
+pub fn for_label<'a>(label: impl Into<&'a BStr>) -> Result<&'static Encoding, for_label::Error> {
+ let mut label = label.into();
+ if label == "latin-1" {
+ label = "ISO-8859-1".into();
+ }
+ let enc = Encoding::for_label(label.as_ref()).ok_or_else(|| for_label::Error::Unknown { name: label.into() })?;
+ Ok(enc)
+}
diff --git a/vendor/gix-filter/src/worktree/mod.rs b/vendor/gix-filter/src/worktree/mod.rs
new file mode 100644
index 000000000..3b13ea49e
--- /dev/null
+++ b/vendor/gix-filter/src/worktree/mod.rs
@@ -0,0 +1,16 @@
+//! Worktree encodings are powered by the `encoding_rs` crate, which has a narrower focus than the `iconv` library. Thus this implementation
+//! is inherently more limited but will handle the common cases.
+//!
+//! Note that for encoding to legacy formats, [additional normalization steps](https://docs.rs/encoding_rs/0.8.32/encoding_rs/#preparing-text-for-the-encoders)
+//! can be taken, which we do not yet take unless there is specific examples or problems to solve.
+
+///
+pub mod encoding;
+
+///
+pub mod encode_to_git;
+pub use encode_to_git::function::encode_to_git;
+
+///
+pub mod encode_to_worktree;
+pub use encode_to_worktree::function::encode_to_worktree;
diff --git a/vendor/gix-filter/tests/driver/mod.rs b/vendor/gix-filter/tests/driver/mod.rs
new file mode 100644
index 000000000..c5a38847e
--- /dev/null
+++ b/vendor/gix-filter/tests/driver/mod.rs
@@ -0,0 +1,399 @@
+use std::path::PathBuf;
+
+use once_cell::sync::Lazy;
+
+static DRIVER: Lazy<PathBuf> = Lazy::new(|| {
+ let mut cargo = std::process::Command::new(env!("CARGO"));
+ let res = cargo
+ .args(["build", "--example", "arrow"])
+ .status()
+ .expect("cargo should run fine");
+ assert!(res.success(), "cargo invocation should be successful");
+
+ let path = PathBuf::from(env!("CARGO_TARGET_TMPDIR"))
+ .ancestors()
+ .nth(1)
+ .expect("first parent in target dir")
+ .join("debug")
+ .join("examples")
+ .join(if cfg!(windows) { "arrow.exe" } else { "arrow" });
+ assert!(path.is_file(), "Expecting driver to be located at {path:?}");
+ path
+});
+
+mod baseline {
+ use crate::driver::DRIVER;
+
+ #[test]
+ fn our_implementation_used_by_git() -> crate::Result {
+ let mut exe = DRIVER.to_string_lossy().into_owned();
+ if cfg!(windows) {
+ exe = exe.replace('\\', "/");
+ }
+ gix_testtools::scripted_fixture_read_only_with_args("baseline.sh", [exe])?;
+ Ok(())
+ }
+}
+
+mod shutdown {
+ use std::time::Duration;
+
+ use gix_filter::driver::{shutdown::Mode, Operation, Process};
+
+ use crate::driver::apply::driver_with_process;
+
+ pub(crate) fn extract_client(
+ res: Option<gix_filter::driver::Process<'_>>,
+ ) -> &mut gix_filter::driver::process::Client {
+ match res {
+ Some(Process::SingleFile { .. }) | None => {
+ unreachable!("process is configured")
+ }
+ Some(Process::MultiFile { client, .. }) => client,
+ }
+ }
+
+ #[test]
+ fn ignore_when_waiting() -> crate::Result {
+ let mut state = gix_filter::driver::State::default();
+ let driver = driver_with_process();
+ let client = extract_client(state.maybe_launch_process(&driver, Operation::Clean, "does not matter".into())?);
+
+ assert!(
+ client
+ .invoke("wait-1-s", &mut None.into_iter(), &mut &b""[..])?
+ .is_success(),
+ "this lets the process wait for a second using our hidden command"
+ );
+
+ let start = std::time::Instant::now();
+ assert_eq!(state.shutdown(Mode::Ignore)?.len(), 1, "we only launch one process");
+ assert!(
+ start.elapsed() < Duration::from_secs(1),
+ "when ignoring processes, there should basically be no wait time"
+ );
+ Ok(())
+ }
+}
+
+pub(crate) mod apply {
+ use std::io::Read;
+
+ use bstr::ByteSlice;
+ use gix_filter::{
+ driver,
+ driver::{apply, apply::Delay, Operation},
+ Driver,
+ };
+
+ use crate::driver::{shutdown::extract_client, DRIVER};
+
+ fn driver_no_process() -> Driver {
+ let mut driver = driver_with_process();
+ driver.process = None;
+ driver
+ }
+
+ pub(crate) fn driver_with_process() -> Driver {
+ let mut exe = DRIVER.to_string_lossy().into_owned();
+ if cfg!(windows) {
+ exe = exe.replace('\\', "/");
+ }
+ Driver {
+ name: "arrow".into(),
+ clean: Some((exe.clone() + " clean %f").into()),
+ smudge: Some((exe.clone() + " smudge %f").into()),
+ process: Some((exe + " process").into()),
+ required: true,
+ }
+ }
+
+ #[test]
+ fn missing_driver_means_no_filter_is_applied() -> crate::Result {
+ let mut state = gix_filter::driver::State::default();
+ let mut driver = driver_no_process();
+ driver.smudge = None;
+ assert!(state
+ .apply(
+ &driver,
+ &mut std::io::empty(),
+ Operation::Smudge,
+ context_from_path("ignored")
+ )?
+ .is_none());
+
+ driver.clean = None;
+ assert!(state
+ .apply(
+ &driver,
+ &mut std::io::empty(),
+ Operation::Clean,
+ context_from_path("ignored")
+ )?
+ .is_none());
+ Ok(())
+ }
+
+ #[test]
+ fn a_crashing_process_can_restart_it() -> crate::Result {
+ let mut state = gix_filter::driver::State::default();
+ let driver = driver_with_process();
+ assert!(
+ matches!(
+ state.apply(
+ &driver,
+ &mut std::io::empty(),
+ Operation::Smudge,
+ context_from_path("fail")
+ ),
+ Err(gix_filter::driver::apply::Error::ProcessInvoke { .. })
+ ),
+ "cannot invoke if failure is requested"
+ );
+
+ let mut filtered = state
+ .apply(
+ &driver,
+ &mut std::io::empty(),
+ Operation::Smudge,
+ context_from_path("fine"),
+ )
+ .expect("process restarts fine")
+ .expect("filter applied");
+ let mut buf = Vec::new();
+ filtered.read_to_end(&mut buf)?;
+ assert_eq!(buf.len(), 0, "nothing was done if input is empty, but it was applied");
+ Ok(())
+ }
+
+ #[test]
+ fn process_status_abort_disables_capability() -> crate::Result {
+ let mut state = gix_filter::driver::State::default();
+ let driver = driver_with_process();
+ let client = extract_client(state.maybe_launch_process(&driver, Operation::Clean, "does not matter".into())?);
+
+ assert!(client
+ .invoke("next-smudge-aborts", &mut None.into_iter(), &mut &b""[..])?
+ .is_success());
+ assert!(
+ matches!(state.apply(&driver, &mut std::io::empty(), Operation::Smudge, context_from_path("any")), Err(driver::apply::Error::ProcessStatus {status: driver::process::Status::Named(name), ..}) if name == "abort")
+ );
+ assert!(
+ state
+ .apply(
+ &driver,
+ &mut std::io::empty(),
+ Operation::Smudge,
+ context_from_path("any")
+ )?
+ .is_none(),
+ "smudge is now disabled permanently"
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn process_status_strange_shuts_down_process() -> crate::Result {
+ let mut state = gix_filter::driver::State::default();
+ let driver = driver_with_process();
+ let client = extract_client(state.maybe_launch_process(&driver, Operation::Clean, "does not matter".into())?);
+
+ assert!(client
+ .invoke(
+ "next-invocation-returns-strange-status-and-smudge-fails-permanently",
+ &mut None.into_iter(),
+ &mut &b""[..]
+ )?
+ .is_success());
+ assert!(
+ matches!(state.apply(&driver, &mut std::io::empty(), Operation::Smudge, context_from_path("any")), Err(driver::apply::Error::ProcessStatus {status: driver::process::Status::Named(name), ..}) if name == "send-term-signal")
+ );
+ let mut filtered = state
+ .apply(&driver, &mut &b"hi\n"[..], Operation::Smudge, context_from_path("any"))?
+ .expect("the process won't fail as it got restarted");
+ let mut buf = Vec::new();
+ filtered.read_to_end(&mut buf)?;
+ assert_eq!(buf.as_bstr(), "➡hi\n", "the process works again as expected");
+ Ok(())
+ }
+
+ #[test]
+ fn smudge_and_clean_failure_is_translated_to_observable_error_for_required_drivers() -> crate::Result {
+ let mut state = gix_filter::driver::State::default();
+ let driver = driver_no_process();
+ assert!(driver.required);
+
+ let mut filtered = state
+ .apply(
+ &driver,
+ &mut &b"hello\nthere\n"[..],
+ driver::Operation::Smudge,
+ context_from_path("do/fail"),
+ )?
+ .expect("filter present");
+ let mut buf = Vec::new();
+ let err = filtered.read_to_end(&mut buf).unwrap_err();
+ assert!(err.to_string().ends_with(" failed"));
+
+ Ok(())
+ }
+
+ #[test]
+ fn smudge_and_clean_failure_means_nothing_if_required_is_false() -> crate::Result {
+ let mut state = gix_filter::driver::State::default();
+ let mut driver = driver_no_process();
+ driver.required = false;
+
+ let mut filtered = state
+ .apply(
+ &driver,
+ &mut &b"hello\nthere\n"[..],
+ driver::Operation::Clean,
+ context_from_path("do/fail"),
+ )?
+ .expect("filter present");
+ let num_read = std::io::copy(&mut filtered, &mut std::io::sink())?;
+ assert_eq!(
+ num_read, 0,
+ "the example fails right away so no output is produced to stdout"
+ );
+
+ Ok(())
+ }
+
+ #[test]
+ fn smudge_and_clean_series() -> crate::Result {
+ let mut state = gix_filter::driver::State::default();
+ for mut driver in [driver_no_process(), driver_with_process()] {
+ assert!(
+ driver.required,
+ "we want errors to definitely show, and don't expect them"
+ );
+ if driver.process.is_none() {
+ // on CI on MacOS, the process seems to actually exit with non-zero status, let's see if this fixes it.
+ driver.required = false;
+ }
+
+ let input = "hello\nthere\n";
+ let mut filtered = state
+ .apply(
+ &driver,
+ &mut input.as_bytes(),
+ driver::Operation::Smudge,
+ context_from_path("some/path.txt"),
+ )?
+ .expect("filter present");
+ let mut buf = Vec::new();
+ filtered.read_to_end(&mut buf)?;
+ drop(filtered);
+ assert_eq!(
+ buf.as_bstr(),
+ "➡hello\n➡there\n",
+ "arrow applies indentation in smudge mode"
+ );
+
+ let smudge_result = buf.clone();
+ let mut filtered = state
+ .apply(
+ &driver,
+ &mut smudge_result.as_bytes(),
+ driver::Operation::Clean,
+ context_from_path("some/path.txt"),
+ )?
+ .expect("filter present");
+ buf.clear();
+ filtered.read_to_end(&mut buf)?;
+ assert_eq!(
+ buf.as_bstr(),
+ input,
+ "the clean filter reverses the smudge filter (and we call the right one)"
+ );
+ }
+ state.shutdown(gix_filter::driver::shutdown::Mode::WaitForProcesses)?;
+ Ok(())
+ }
+
+ #[test]
+ fn smudge_and_clean_delayed() -> crate::Result {
+ let mut state = gix_filter::driver::State::default();
+ let driver = driver_with_process();
+ let input = "hello\nthere\n";
+ let process_key = extract_delayed_key(state.apply_delayed(
+ &driver,
+ &mut input.as_bytes(),
+ driver::Operation::Smudge,
+ Delay::Allow,
+ context_from_path("sub/a.txt"),
+ )?);
+
+ let paths = state.list_delayed_paths(&process_key)?;
+ assert_eq!(
+ paths.len(),
+ 1,
+ "delayed paths have to be queried again and are available until that happens"
+ );
+ assert_eq!(paths[0], "sub/a.txt");
+
+ let mut filtered = state.fetch_delayed(&process_key, paths[0].as_ref(), driver::Operation::Smudge)?;
+ let mut buf = Vec::new();
+ filtered.read_to_end(&mut buf)?;
+ drop(filtered);
+ assert_eq!(
+ buf.as_bstr(),
+ "➡hello\n➡there\n",
+ "arrow applies indentation also in delayed mode"
+ );
+
+ let paths = state.list_delayed_paths(&process_key)?;
+ assert_eq!(paths.len(), 0, "delayed paths are consumed once fetched");
+
+ let process_key = extract_delayed_key(state.apply_delayed(
+ &driver,
+ &mut buf.as_bytes(),
+ driver::Operation::Clean,
+ Delay::Allow,
+ context_from_path("sub/b.txt"),
+ )?);
+
+ let paths = state.list_delayed_paths(&process_key)?;
+ assert_eq!(
+ paths.len(),
+ 1,
+ "we can do another round of commands with the same process (at least if the implementation supports it), it's probably not done in practice"
+ );
+ assert_eq!(paths[0], "sub/b.txt");
+
+ let mut filtered = state.fetch_delayed(&process_key, paths[0].as_ref(), driver::Operation::Clean)?;
+ let mut buf = Vec::new();
+ filtered.read_to_end(&mut buf)?;
+ drop(filtered);
+ assert_eq!(
+ buf.as_bstr(),
+ input,
+ "it's possible to apply clean in delayed mode as well"
+ );
+
+ let paths = state.list_delayed_paths(&process_key)?;
+ assert_eq!(paths.len(), 0, "delayed paths are consumed once fetched");
+
+ state.shutdown(gix_filter::driver::shutdown::Mode::WaitForProcesses)?;
+ Ok(())
+ }
+
+ pub(crate) fn extract_delayed_key(res: Option<apply::MaybeDelayed<'_>>) -> driver::Key {
+ match res {
+ Some(apply::MaybeDelayed::Immediate(_)) | None => {
+ unreachable!("must use process that supports delaying")
+ }
+ Some(apply::MaybeDelayed::Delayed(key)) => key,
+ }
+ }
+ fn context_from_path(path: &str) -> apply::Context<'_, '_> {
+ apply::Context {
+ rela_path: path.into(),
+ ref_name: None,
+ treeish: None,
+ blob: None,
+ }
+ }
+}
diff --git a/vendor/gix-filter/tests/eol/convert_to_git.rs b/vendor/gix-filter/tests/eol/convert_to_git.rs
new file mode 100644
index 000000000..0ab7d7e79
--- /dev/null
+++ b/vendor/gix-filter/tests/eol/convert_to_git.rs
@@ -0,0 +1,175 @@
+use std::path::Path;
+
+use bstr::{ByteSlice, ByteVec};
+use gix_filter::{eol, eol::AttributesDigest};
+
+#[test]
+fn with_binary_attribute_is_never_converted() {
+ let mut buf = Vec::new();
+ let changed = eol::convert_to_git(
+ b"hi\r\nho",
+ AttributesDigest::Binary,
+ &mut buf,
+ &mut no_call,
+ Default::default(),
+ )
+ .expect("no error");
+ assert!(!changed, "the user marked it as binary so it's never being touched");
+}
+
+#[test]
+fn no_crlf_means_no_work() -> crate::Result {
+ let mut buf = Vec::new();
+ let changed = eol::convert_to_git(
+ b"hi",
+ AttributesDigest::TextCrlf,
+ &mut buf,
+ &mut no_call,
+ Default::default(),
+ )
+ .expect("no error");
+ assert!(!changed);
+
+ let changed = eol::convert_to_git(
+ b"hi",
+ AttributesDigest::TextAutoCrlf,
+ &mut buf,
+ &mut no_object_in_index,
+ Default::default(),
+ )
+ .expect("no error");
+ assert!(!changed, "in auto-mode, the object is queried in the index as well.");
+ Ok(())
+}
+
+#[test]
+fn detected_as_binary() -> crate::Result {
+ let mut buf = Vec::new();
+ let changed = eol::convert_to_git(
+ b"hi\0zero makes it binary",
+ AttributesDigest::TextAuto,
+ &mut buf,
+ &mut no_call,
+ Default::default(),
+ )
+ .expect("no error");
+ assert!(
+ !changed,
+ "in auto-mode, we have a heuristic to see if the buffer is binary"
+ );
+ Ok(())
+}
+
+#[test]
+fn fast_conversion_by_stripping_cr() -> crate::Result {
+ let mut buf = Vec::new();
+ let changed = eol::convert_to_git(
+ b"a\r\nb\r\nc",
+ AttributesDigest::TextCrlf,
+ &mut buf,
+ &mut no_call,
+ Default::default(),
+ )
+ .expect("no error");
+ assert!(changed);
+ assert_eq!(buf.as_bstr(), "a\nb\nc", "here carriage returns can just be stripped");
+ Ok(())
+}
+
+#[test]
+fn slower_conversion_due_to_lone_cr() -> crate::Result {
+ let mut buf = Vec::new();
+ let changed = eol::convert_to_git(
+ b"\r\ra\r\nb\r\nc",
+ AttributesDigest::TextCrlf,
+ &mut buf,
+ &mut no_call,
+ Default::default(),
+ )
+ .expect("no error");
+ assert!(changed);
+ assert_eq!(
+ buf.as_bstr(),
+ "\r\ra\nb\nc",
+ "here carriage returns cannot be stripped but must be handled in pairs"
+ );
+ Ok(())
+}
+
+#[test]
+fn crlf_in_index_prevents_conversion_to_lf() -> crate::Result {
+ let mut buf = Vec::new();
+ let mut called = false;
+ let changed = eol::convert_to_git(
+ b"elligible\n",
+ AttributesDigest::TextAutoInput,
+ &mut buf,
+ &mut |buf| {
+ called = true;
+ buf.clear();
+ buf.push_str("with CRLF\r\n");
+ Ok(Some(()))
+ },
+ Default::default(),
+ )
+ .expect("no error");
+ assert!(called, "in auto mode, the index is queried as well");
+ assert!(
+ !changed,
+ "we saw the CRLF is present in the index, so it's unsafe to make changes"
+ );
+ Ok(())
+}
+
+#[test]
+fn round_trip_check() -> crate::Result {
+ let mut buf = Vec::new();
+ for (input, expected) in [
+ (&b"lone-nl\nhi\r\nho"[..], "LF would be replaced by CRLF in 'hello.txt'"),
+ // despite trying, I was unable to get into the other branch
+ (b"lone-cr\nhi\r\nho", "LF would be replaced by CRLF in 'hello.txt'"),
+ ] {
+ let err = eol::convert_to_git(
+ input,
+ AttributesDigest::TextCrlf,
+ &mut buf,
+ &mut no_call,
+ eol::convert_to_git::Options {
+ round_trip_check: Some(gix_filter::eol::convert_to_git::RoundTripCheck::Fail {
+ rela_path: Path::new("hello.txt"),
+ }),
+ config: Default::default(),
+ },
+ )
+ .unwrap_err();
+ assert_eq!(err.to_string(), expected);
+
+ let changed = eol::convert_to_git(
+ input,
+ AttributesDigest::TextCrlf,
+ &mut buf,
+ &mut no_call,
+ eol::convert_to_git::Options {
+ round_trip_check: Some(gix_filter::eol::convert_to_git::RoundTripCheck::Warn {
+ rela_path: Path::new("hello.txt"),
+ }),
+ config: Default::default(),
+ },
+ )?;
+ assert!(
+ changed,
+ "in warn mode, we will get a result even though it won't round-trip"
+ )
+ }
+ Ok(())
+}
+
+#[allow(clippy::ptr_arg)]
+fn no_call(_buf: &mut Vec<u8>) -> Result<Option<()>, Box<dyn std::error::Error + Send + Sync>> {
+ unreachable!("index function will not be called")
+}
+
+#[allow(clippy::ptr_arg)]
+fn no_object_in_index(_buf: &mut Vec<u8>) -> Result<Option<()>, Box<dyn std::error::Error + Send + Sync>> {
+ Ok(None)
+}
diff --git a/vendor/gix-filter/tests/eol/convert_to_worktree.rs b/vendor/gix-filter/tests/eol/convert_to_worktree.rs
new file mode 100644
index 000000000..e7d20ac7e
--- /dev/null
+++ b/vendor/gix-filter/tests/eol/convert_to_worktree.rs
@@ -0,0 +1,97 @@
+use bstr::ByteSlice;
+use gix_filter::{
+ eol,
+ eol::{AttributesDigest, Configuration, Mode},
+};
+
+#[test]
+fn no_conversion_if_attribute_digest_does_not_allow_it() {
+ let mut buf = Vec::new();
+ for digest in [
+ AttributesDigest::Binary,
+ AttributesDigest::TextInput,
+ AttributesDigest::TextAutoInput,
+ ] {
+ let changed = eol::convert_to_worktree(b"hi\nho", digest, &mut buf, Default::default());
+ assert!(!changed, "the digest doesn't allow for CRLF changes");
+ }
+}
+
+#[test]
+fn no_conversion_if_configuration_does_not_allow_it() {
+ let mut buf = Vec::new();
+ for digest in [AttributesDigest::Text, AttributesDigest::TextAuto] {
+ for config in [
+ Configuration {
+ auto_crlf: eol::AutoCrlf::Input,
+ eol: Some(Mode::CrLf),
+ },
+ Configuration {
+ auto_crlf: eol::AutoCrlf::Disabled,
+ eol: Some(Mode::Lf),
+ },
+ ] {
+ let changed = eol::convert_to_worktree(b"hi\nho", digest, &mut buf, config);
+ assert!(!changed, "the configuration doesn't allow for changes");
+ }
+ }
+}
+
+#[test]
+fn no_conversion_if_nothing_to_do() {
+ let mut buf = Vec::new();
+ for (input, digest, msg) in [
+ (
+ &b"hi\r\nho"[..],
+ AttributesDigest::TextCrlf,
+ "no lone line feed to handle",
+ ),
+ (
+ &b"binary\0linefeed\nho"[..],
+ AttributesDigest::TextAutoCrlf,
+ "binary in auto-mode is never handled",
+ ),
+ (
+ &b"binary\nlinefeed\r\nho"[..],
+ AttributesDigest::TextAutoCrlf,
+ "mixed crlf and lf is avoided",
+ ),
+ (
+ &b"elligible-but-disabled\nhere"[..],
+ AttributesDigest::Binary,
+ "designated binary is never handled",
+ ),
+ ] {
+ let changed = eol::convert_to_worktree(input, digest, &mut buf, Default::default());
+ assert!(!changed, "{msg}");
+ }
+}
+
+#[test]
+fn each_nl_is_replaced_with_crnl() {
+ let mut buf = Vec::new();
+ let changed = eol::convert_to_worktree(
+ b"hi\n\nho\nend",
+ AttributesDigest::TextCrlf,
+ &mut buf,
+ Default::default(),
+ );
+ assert!(
+ changed,
+ "the buffer has to be changed as it is explicitly demanded and has newlines to convert"
+ );
+ assert_eq!(buf.as_bstr(), "hi\r\n\r\nho\r\nend");
+}
+
+#[test]
+fn existing_crnl_are_not_replaced_for_safety_nor_are_lone_cr() {
+ let mut buf = Vec::new();
+ let changed = eol::convert_to_worktree(
+ b"hi\r\n\nho\r\nend\r",
+ AttributesDigest::TextCrlf,
+ &mut buf,
+ Default::default(),
+ );
+ assert!(changed);
+ assert_eq!(buf.as_bstr(), "hi\r\n\r\nho\r\nend\r");
+}
diff --git a/vendor/gix-filter/tests/eol/mod.rs b/vendor/gix-filter/tests/eol/mod.rs
new file mode 100644
index 000000000..4a9150aaf
--- /dev/null
+++ b/vendor/gix-filter/tests/eol/mod.rs
@@ -0,0 +1,25 @@
+mod stats {
+ mod from_bytes {
+ use gix_filter::eol;
+
+ #[test]
+ fn all() {
+ let stats = eol::Stats::from_bytes(b"\n\r\nhi\rho\0\tanother line\nother\r\nmixed");
+ assert_eq!(
+ stats,
+ eol::Stats {
+ null: 1,
+ lone_cr: 1,
+ lone_lf: 2,
+ crlf: 2,
+ printable: 27,
+ non_printable: 1,
+ }
+ );
+ assert!(stats.is_binary());
+ }
+ }
+}
+
+pub(crate) mod convert_to_git;
+mod convert_to_worktree;
diff --git a/vendor/gix-filter/tests/filter.rs b/vendor/gix-filter/tests/filter.rs
new file mode 100644
index 000000000..281dc2811
--- /dev/null
+++ b/vendor/gix-filter/tests/filter.rs
@@ -0,0 +1,7 @@
+pub(crate) mod driver;
+pub(crate) mod eol;
+mod ident;
+mod pipeline;
+mod worktree;
+
+pub use gix_testtools::Result;
diff --git a/vendor/gix-filter/tests/fixtures/baseline.sh b/vendor/gix-filter/tests/fixtures/baseline.sh
new file mode 100755
index 000000000..c02b3c22a
--- /dev/null
+++ b/vendor/gix-filter/tests/fixtures/baseline.sh
@@ -0,0 +1,41 @@
+#!/bin/bash
+set -eu -o pipefail
+
+driver=${1:?First argument is the driver program supporting both process mode and clean/smudge}
+
+function repo_assertions() {
+ echo '* filter=arrow' > .gitattributes
+ git add . && git commit -m c1
+ echo hi > file
+ git add file && git commit -m c2
+ rm file
+ git checkout file
+}
+
+(
+ git init no-process && cd no-process
+
+ git config filter.arrow.clean "$driver clean %f"
+ git config filter.arrow.smudge "$driver smudge %f"
+ git config filter.arrow.requred true
+
+ repo_assertions
+)
+
+(
+ git init process && cd process
+
+ git config filter.arrow.process "$driver process"
+ git config filter.arrow.requred true
+
+ repo_assertions
+)
+
+(
+ git init process-no-delay && cd process-no-delay
+
+ git config filter.arrow.process "$driver process disallow-delay"
+ git config filter.arrow.requred true
+
+ repo_assertions
+)
diff --git a/vendor/gix-filter/tests/fixtures/generated-archives/pipeline_repos.tar.xz b/vendor/gix-filter/tests/fixtures/generated-archives/pipeline_repos.tar.xz
new file mode 100644
index 000000000..8271b4467
--- /dev/null
+++ b/vendor/gix-filter/tests/fixtures/generated-archives/pipeline_repos.tar.xz
Binary files differ
diff --git a/vendor/gix-filter/tests/fixtures/pipeline_repos.sh b/vendor/gix-filter/tests/fixtures/pipeline_repos.sh
new file mode 100755
index 000000000..e2f64f99e
--- /dev/null
+++ b/vendor/gix-filter/tests/fixtures/pipeline_repos.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+set -eu -o pipefail
+
+(mkdir all-filters && cd all-filters
+ cat <<EOF > .gitattributes
+* ident text=auto eol=crlf working-tree-encoding=ISO-8859-1 filter=arrow
+EOF
+)
+
+(mkdir no-filters && cd no-filters
+ touch .gitattributes
+)
+
+(mkdir driver-only && cd driver-only
+ cat <<EOF > .gitattributes
+* filter=arrow
+EOF
+)
diff --git a/vendor/gix-filter/tests/ident/mod.rs b/vendor/gix-filter/tests/ident/mod.rs
new file mode 100644
index 000000000..ad63cf70d
--- /dev/null
+++ b/vendor/gix-filter/tests/ident/mod.rs
@@ -0,0 +1,111 @@
+mod undo {
+ use bstr::{ByteSlice, B};
+
+ #[test]
+ fn no_id_changes_nothing() {
+ let mut buf = Vec::new();
+ let changed = gix_filter::ident::undo(B("hello"), &mut buf);
+ assert!(!changed, "the buffer is not touched");
+ assert_eq!(buf.len(), 0);
+ }
+
+ #[test]
+ fn empty() {
+ let mut buf = Vec::new();
+ assert!(
+ !gix_filter::ident::undo(B(""), &mut buf),
+ "nothing to be done in empty buffer"
+ );
+ }
+
+ #[test]
+ fn nothing_if_newline_between_dollars() {
+ let mut buf = Vec::new();
+ assert!(!gix_filter::ident::undo(B(" $Id: \n$"), &mut buf));
+ assert_eq!(buf.len(), 0);
+ }
+
+ #[test]
+ fn nothing_if_it_is_not_id() {
+ let mut buf = Vec::new();
+ assert!(
+ !gix_filter::ident::undo(B(" $id: something$"), &mut buf),
+ "it's matching case-sensitively"
+ );
+ assert_eq!(buf.len(), 0);
+ }
+
+ #[test]
+ fn anything_between_dollar_id_dollar() {
+ let mut buf = Vec::new();
+ assert!(gix_filter::ident::undo(B(" $Id: something$\nhello"), &mut buf));
+ assert_eq!(buf.as_bstr(), " $Id$\nhello");
+ }
+
+ #[test]
+ fn multiple() {
+ let mut buf = Vec::new();
+ assert!(gix_filter::ident::undo(
+ B("$Id: a\n$ $Id: something$\nhello$Id: hex$\nlast $Id:other$\n$Id: \n$"),
+ &mut buf
+ ));
+ assert_eq!(buf.as_bstr(), "$Id: a\n$ $Id$\nhello$Id$\nlast $Id$\n$Id: \n$");
+
+ assert!(gix_filter::ident::undo(
+ B("$Id: a\n$$Id:$$Id: hex$\n$Id:other$$Id: $end"),
+ &mut buf
+ ));
+ assert_eq!(buf.as_bstr(), "$Id: a\n$$Id$$Id$\n$Id$$Id$end");
+ }
+}
+
+mod apply {
+ use bstr::{ByteSlice, B};
+ use gix_filter::ident;
+
+ #[test]
+ fn no_change() {
+ let mut buf = Vec::new();
+ for input_no_match in [
+ "",
+ "nothing",
+ "$ID$ case sensitive matching",
+ "$Id: expanded is ignored$",
+ ] {
+ let changed = ident::apply(input_no_match.as_bytes(), gix_hash::Kind::Sha1, &mut buf);
+ assert!(!changed, "no substitution happens, nothing to do");
+ assert_eq!(buf.len(), 0);
+ }
+ }
+
+ #[test]
+ fn simple() {
+ let mut buf = Vec::new();
+ assert!(
+ ident::apply(B("$Id$"), gix_hash::Kind::Sha1, &mut buf),
+ "a change happens"
+ );
+ assert_eq!(buf.as_bstr(), "$Id: b3f5ebfb5843bc43ceecff6d4f26bb37c615beb1$");
+
+ assert!(ident::apply(B("$Id$ $Id$ foo"), gix_hash::Kind::Sha1, &mut buf));
+ assert_eq!(
+ buf.as_bstr(),
+ "$Id: e230cff7a9624f59eaa28bfb97602c3a03651a49$ $Id: e230cff7a9624f59eaa28bfb97602c3a03651a49$ foo"
+ );
+ }
+
+ #[test]
+ fn round_trips() {
+ let mut buf = Vec::new();
+ for input in [
+ "hi\n$Id$\nho\n\t$Id$$Id$$Id$",
+ "$Id$",
+ "$Id$ and one more $Id$ and done",
+ ] {
+ let changed = ident::apply(B(input), gix_hash::Kind::Sha1, &mut buf);
+ assert!(changed, "the input was rewritten");
+ assert!(ident::undo(&buf.clone(), &mut buf), "undo does something as well");
+ assert_eq!(buf.as_bstr(), input, "the filter can be undone perfectly");
+ }
+ }
+}
diff --git a/vendor/gix-filter/tests/pipeline/convert_to_git.rs b/vendor/gix-filter/tests/pipeline/convert_to_git.rs
new file mode 100644
index 000000000..3a627f28d
--- /dev/null
+++ b/vendor/gix-filter/tests/pipeline/convert_to_git.rs
@@ -0,0 +1,145 @@
+use std::{io::Read, path::Path};
+
+use bstr::{BStr, ByteSlice};
+use gix_filter::{eol, pipeline::CrlfRoundTripCheck};
+
+use crate::{driver::apply::driver_with_process, pipeline::pipeline};
+
+#[test]
+fn no_driver_but_filter_with_autocrlf() -> gix_testtools::Result {
+ let (_cache, mut pipe) = pipeline("no-filter", || {
+ (
+ vec![],
+ Vec::new(),
+ CrlfRoundTripCheck::Fail,
+ eol::Configuration {
+ auto_crlf: eol::AutoCrlf::Enabled,
+ eol: None,
+ },
+ )
+ })?;
+
+ let out = pipe.convert_to_git(
+ "hi\r\n".as_bytes(),
+ Path::new("any.txt"),
+ &mut |_path, _attrs| {},
+ &mut no_object_in_index,
+ )?;
+
+ assert_eq!(
+ out.as_bytes().expect("read converted to buffer").as_bstr(),
+ "hi\n",
+ "the read is read into memory if there is no driver"
+ );
+ Ok(())
+}
+
+#[test]
+fn all_stages_mean_streaming_is_impossible() -> gix_testtools::Result {
+ let (mut cache, mut pipe) = pipeline("all-filters", || {
+ (
+ vec![driver_with_process()],
+ Vec::new(),
+ CrlfRoundTripCheck::Fail,
+ Default::default(),
+ )
+ })?;
+
+ let mut out = pipe.convert_to_git(
+ "➡a\r\n➡b\r\n➡$Id: 2188d1cdee2b93a80084b61af431a49d21bc7cc0$".as_bytes(),
+ Path::new("any.txt"),
+ &mut |path, attrs| {
+ cache
+ .at_entry(path, Some(false), |_oid, _buf| -> Result<_, std::convert::Infallible> {
+ unreachable!("index access disabled")
+ })
+ .expect("cannot fail")
+ .matching_attributes(attrs);
+ },
+ &mut no_object_in_index,
+ )?;
+ assert!(out.is_changed(), "filters were applied");
+ assert!(out.as_read().is_none(), "non-driver filters operate in-memory");
+ let buf = out.as_bytes().expect("in-memory operation");
+ assert_eq!(buf.as_bstr(), "a\nb\n$Id$", "filters were successfully reversed");
+ Ok(())
+}
+
+#[test]
+fn only_driver_means_streaming_is_possible() -> gix_testtools::Result {
+ let (mut cache, mut pipe) = pipeline("driver-only", || {
+ (
+ vec![driver_with_process()],
+ Vec::new(),
+ CrlfRoundTripCheck::Skip,
+ Default::default(),
+ )
+ })?;
+
+ let mut out = pipe.convert_to_git(
+ "➡a\r\n➡b\r\n➡$Id: 2188d1cdee2b93a80084b61af431a49d21bc7cc0$".as_bytes(),
+ Path::new("subdir/doesnot/matter/any.txt"),
+ &mut |path, attrs| {
+ cache
+ .at_entry(path, Some(false), |_oid, _buf| -> Result<_, std::convert::Infallible> {
+ unreachable!("index access disabled")
+ })
+ .expect("cannot fail")
+ .matching_attributes(attrs);
+ },
+ &mut no_object_in_index,
+ )?;
+ assert!(out.is_changed(), "filters were applied");
+ assert!(out.as_read().is_some(), "filter-only can be streamed");
+ let mut buf = Vec::new();
+ out.read_to_end(&mut buf)?;
+ assert_eq!(
+ buf.as_bstr(),
+ "a\r\nb\r\n$Id: 2188d1cdee2b93a80084b61af431a49d21bc7cc0$",
+ "one filter was reversed"
+ );
+ Ok(())
+}
+
+#[test]
+fn no_filter_means_reader_is_returned_unchanged() -> gix_testtools::Result {
+ let (mut cache, mut pipe) = pipeline("no-filters", || {
+ (vec![], Vec::new(), CrlfRoundTripCheck::Fail, Default::default())
+ })?;
+
+ let input = "➡a\r\n➡b\r\n➡$Id: 2188d1cdee2b93a80084b61af431a49d21bc7cc0$";
+ let mut out = pipe.convert_to_git(
+ input.as_bytes(),
+ Path::new("other.txt"),
+ &mut |path, attrs| {
+ cache
+ .at_entry(path, Some(false), |_oid, _buf| -> Result<_, std::convert::Infallible> {
+ unreachable!("index access disabled")
+ })
+ .expect("cannot fail")
+ .matching_attributes(attrs);
+ },
+ &mut no_call,
+ )?;
+ assert!(!out.is_changed(), "no filter was applied");
+ let actual = out
+ .as_read()
+ .expect("input is unchanged, we get the original stream back");
+ let mut buf = Vec::new();
+ actual.read_to_end(&mut buf)?;
+ assert_eq!(buf.as_bstr(), input, "input is unchanged");
+ Ok(())
+}
+
+#[allow(clippy::ptr_arg)]
+fn no_call(_path: &BStr, _buf: &mut Vec<u8>) -> Result<Option<()>, Box<dyn std::error::Error + Send + Sync>> {
+ unreachable!("index function will not be called")
+}
+
+#[allow(clippy::ptr_arg)]
+fn no_object_in_index(
+ _path: &BStr,
+ _buf: &mut Vec<u8>,
+) -> Result<Option<()>, Box<dyn std::error::Error + Send + Sync>> {
+ Ok(None)
+}
diff --git a/vendor/gix-filter/tests/pipeline/convert_to_worktree.rs b/vendor/gix-filter/tests/pipeline/convert_to_worktree.rs
new file mode 100644
index 000000000..bfc15cd3e
--- /dev/null
+++ b/vendor/gix-filter/tests/pipeline/convert_to_worktree.rs
@@ -0,0 +1,106 @@
+use std::io::Read;
+
+use bstr::ByteSlice;
+use gix_filter::pipeline::CrlfRoundTripCheck;
+
+use crate::{driver::apply::driver_with_process, pipeline::pipeline};
+
+#[test]
+fn all_stages() -> gix_testtools::Result {
+ let (mut cache, mut pipe) = pipeline("all-filters", || {
+ (
+ vec![driver_with_process()],
+ Vec::new(),
+ CrlfRoundTripCheck::Skip,
+ Default::default(),
+ )
+ })?;
+
+ let mut out = pipe.convert_to_worktree(
+ b"a\nb\n$Id$",
+ "any.txt".into(),
+ &mut |path, attrs| {
+ cache
+ .at_entry(path, Some(false), |_oid, _buf| -> Result<_, std::convert::Infallible> {
+ unreachable!("index access disabled")
+ })
+ .expect("cannot fail")
+ .matching_attributes(attrs);
+ },
+ gix_filter::driver::apply::Delay::Forbid,
+ )?;
+ assert!(out.is_changed(), "filters were applied");
+ assert!(
+ out.as_bytes().is_none(),
+ "the last filter is a driver which is applied, yielding a stream"
+ );
+ assert!(out.as_read().is_some(), "process filter is last");
+ let mut buf = Vec::new();
+ out.read_to_end(&mut buf)?;
+ assert_eq!(
+ buf.as_bstr(),
+ "➡a\r\n➡b\r\n➡$Id: 2188d1cdee2b93a80084b61af431a49d21bc7cc0$",
+ "the buffer shows that a lot of transformations were applied"
+ );
+ Ok(())
+}
+
+#[test]
+fn all_stages_no_filter() -> gix_testtools::Result {
+ let (mut cache, mut pipe) = pipeline("all-filters", || {
+ (vec![], Vec::new(), CrlfRoundTripCheck::Skip, Default::default())
+ })?;
+
+ let mut out = pipe.convert_to_worktree(
+ b"$Id$a\nb\n",
+ "other.txt".into(),
+ &mut |path, attrs| {
+ cache
+ .at_entry(path, Some(false), |_oid, _buf| -> Result<_, std::convert::Infallible> {
+ unreachable!("index access disabled")
+ })
+ .expect("cannot fail")
+ .matching_attributes(attrs);
+ },
+ gix_filter::driver::apply::Delay::Forbid,
+ )?;
+ assert!(out.is_changed(), "filters were applied");
+ assert!(
+ out.as_read().is_none(),
+ "there is no filter process, so no chance for getting a stream"
+ );
+ let buf = out.as_bytes().expect("no filter process");
+ assert_eq!(
+ buf.as_bstr(),
+ "$Id: a77d7acbc809ac8df987a769221c83137ba1b9f9$a\r\nb\r\n",
+ "the buffer shows that a lot of transformations were applied"
+ );
+ Ok(())
+}
+
+#[test]
+fn no_filter() -> gix_testtools::Result {
+ let (mut cache, mut pipe) = pipeline("no-filters", || {
+ (vec![], Vec::new(), CrlfRoundTripCheck::Skip, Default::default())
+ })?;
+
+ let input = b"$Id$a\nb\n";
+ let out = pipe.convert_to_worktree(
+ input,
+ "other.txt".into(),
+ &mut |path, attrs| {
+ cache
+ .at_entry(path, Some(false), |_oid, _buf| -> Result<_, std::convert::Infallible> {
+ unreachable!("index access disabled")
+ })
+ .expect("cannot fail")
+ .matching_attributes(attrs);
+ },
+ gix_filter::driver::apply::Delay::Forbid,
+ )?;
+ assert!(!out.is_changed(), "no filter was applied");
+ let actual = out.as_bytes().expect("input is unchanged");
+ assert_eq!(actual, input, "so the input is unchanged…");
+ assert_eq!(actual.as_ptr(), input.as_ptr(), "…which means it's exactly the same");
+ Ok(())
+}
diff --git a/vendor/gix-filter/tests/pipeline/mod.rs b/vendor/gix-filter/tests/pipeline/mod.rs
new file mode 100644
index 000000000..1a10043da
--- /dev/null
+++ b/vendor/gix-filter/tests/pipeline/mod.rs
@@ -0,0 +1,71 @@
+use bstr::ByteSlice;
+use gix_attributes::glob::pattern::Case;
+use gix_filter::eol;
+
+mod convert_to_git;
+mod convert_to_worktree;
+
+#[test]
+fn default() -> crate::Result {
+ let mut filters = gix_filter::Pipeline::default();
+ let out = filters.convert_to_worktree(
+ b"hi",
+ "file".into(),
+ &mut |_, _| {},
+ gix_filter::driver::apply::Delay::Allow,
+ )?;
+ assert_eq!(
+ out.as_bytes().expect("unchanged").as_bstr(),
+ "hi",
+ "default-pipelines can be used like normal, they have not effect"
+ );
+ Ok(())
+}
+
+fn attribute_cache(name: &str) -> gix_testtools::Result<gix_worktree::Stack> {
+ let dir = gix_testtools::scripted_fixture_read_only("pipeline_repos.sh")?.join(name);
+ Ok(gix_worktree::Stack::new(
+ dir,
+ gix_worktree::stack::State::for_add(
+ gix_worktree::stack::state::Attributes::new(
+ Default::default(),
+ None,
+ gix_worktree::stack::state::attributes::Source::WorktreeThenIdMapping,
+ Default::default(),
+ ),
+ gix_worktree::stack::state::Ignore::new(
+ Default::default(),
+ Default::default(),
+ None,
+ gix_worktree::stack::state::ignore::Source::WorktreeThenIdMappingIfNotSkipped,
+ ),
+ ),
+ Case::Sensitive,
+ Vec::new(),
+ Default::default(),
+ ))
+}
+
+fn pipeline(
+ name: &str,
+ init: impl FnOnce() -> (
+ Vec<gix_filter::Driver>,
+ Vec<&'static encoding_rs::Encoding>,
+ gix_filter::pipeline::CrlfRoundTripCheck,
+ eol::Configuration,
+ ),
+) -> gix_testtools::Result<(gix_worktree::Stack, gix_filter::Pipeline)> {
+ let cache = attribute_cache(name)?;
+ let (drivers, encodings_with_roundtrip_check, crlf_roundtrip_check, eol_config) = init();
+ let pipe = gix_filter::Pipeline::new(
+ cache.attributes_collection(),
+ gix_filter::pipeline::Options {
+ drivers,
+ eol_config,
+ encodings_with_roundtrip_check,
+ crlf_roundtrip_check,
+ object_hash: gix_hash::Kind::Sha1,
+ },
+ );
+ Ok((cache, pipe))
+}
diff --git a/vendor/gix-filter/tests/worktree/mod.rs b/vendor/gix-filter/tests/worktree/mod.rs
new file mode 100644
index 000000000..364f9f8ce
--- /dev/null
+++ b/vendor/gix-filter/tests/worktree/mod.rs
@@ -0,0 +1,99 @@
+mod encoding {
+ mod for_label {
+ use gix_filter::worktree;
+
+ #[test]
+ fn unknown() {
+ assert_eq!(
+ worktree::encoding::for_label("FOO").unwrap_err().to_string(),
+ "An encoding named 'FOO' is not known"
+ );
+ }
+
+ #[test]
+ fn utf32_is_not_supported() {
+ for enc in ["UTF-32BE", "UTF-32LE", "UTF-32", "UTF-32LE-BOM", "UTF-32BE-BOM"] {
+ assert!(
+ matches!(
+ worktree::encoding::for_label(enc).unwrap_err(),
+ worktree::encoding::for_label::Error::Unknown { .. }
+ ),
+ "it's not needed for the web and this crate is meant for use in firefox"
+ );
+ }
+ }
+
+ #[test]
+ fn various_spellings_of_utf_8_are_supported() {
+ for enc in ["UTF8", "UTF-8", "utf-8", "utf8"] {
+ let enc = worktree::encoding::for_label(enc).unwrap();
+ assert_eq!(enc.name(), "UTF-8");
+ }
+ }
+
+ #[test]
+ fn various_utf_16_without_bom_suffix_are_supported() {
+ for label in ["UTF-16BE", "UTF-16LE"] {
+ let enc = worktree::encoding::for_label(label).unwrap();
+ assert_eq!(enc.name(), label);
+ }
+ }
+
+ #[test]
+ fn various_utf_16_with_bom_suffix_are_unsupported() {
+ for label in ["UTF-16BE-BOM", "UTF-16LE-BOM"] {
+ assert!(
+ matches!(
+ worktree::encoding::for_label(label).unwrap_err(),
+ worktree::encoding::for_label::Error::Unknown { .. }
+ ),
+ "git supports these and has special handling, but we have not for now. Git has no tests for that either."
+ );
+ }
+ }
+
+ #[test]
+ fn latin_1_is_supported_with_fallback() {
+ let enc = worktree::encoding::for_label("latin-1").unwrap();
+ assert_eq!(
+ enc.name(),
+ "windows-1252",
+ "the encoding crate has its own fallback for ISO-8859-1 which we try to use"
+ );
+ }
+ }
+}
+
+mod encode_to_git {
+ use bstr::ByteSlice;
+ use gix_filter::{worktree, worktree::encode_to_git::RoundTripCheck};
+
+ #[test]
+ fn simple() -> crate::Result {
+ let input = &b"hello"[..];
+ for round_trip in [RoundTripCheck::Skip, RoundTripCheck::Fail] {
+ let mut buf = Vec::new();
+ worktree::encode_to_git(input, encoding_rs::UTF_8, &mut buf, round_trip)?;
+ assert_eq!(buf.as_bstr(), input)
+ }
+ Ok(())
+ }
+}
+
+mod encode_to_worktree {
+ use bstr::ByteSlice;
+ use gix_filter::{worktree, worktree::encode_to_git::RoundTripCheck};
+
+ #[test]
+ fn shift_jis() -> crate::Result {
+ let input = "ハローワールド";
+ let mut buf = Vec::new();
+ worktree::encode_to_worktree(input.as_bytes(), encoding_rs::SHIFT_JIS, &mut buf)?;
+
+ let mut re_encoded = Vec::new();
+ worktree::encode_to_git(&buf, encoding_rs::SHIFT_JIS, &mut re_encoded, RoundTripCheck::Fail)?;
+
+ assert_eq!(re_encoded.as_bstr(), input, "this should be round-trippable too");
+ Ok(())
+ }
+}