summaryrefslogtreecommitdiffstats
path: root/vendor/gix
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 12:41:41 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 12:41:41 +0000
commit10ee2acdd26a7f1298c6f6d6b7af9b469fe29b87 (patch)
treebdffd5d80c26cf4a7a518281a204be1ace85b4c1 /vendor/gix
parentReleasing progress-linux version 1.70.0+dfsg1-9~progress7.99u1. (diff)
downloadrustc-10ee2acdd26a7f1298c6f6d6b7af9b469fe29b87.tar.xz
rustc-10ee2acdd26a7f1298c6f6d6b7af9b469fe29b87.zip
Merging upstream version 1.70.0+dfsg2.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/gix')
-rw-r--r--vendor/gix/.cargo-checksum.json1
-rw-r--r--vendor/gix/CHANGELOG.md1606
-rw-r--r--vendor/gix/Cargo.lock3005
-rw-r--r--vendor/gix/Cargo.toml288
-rw-r--r--vendor/gix/src/assets/baseline-init/HEAD1
-rw-r--r--vendor/gix/src/assets/baseline-init/description1
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/applypatch-msg.sample15
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/commit-msg.sample24
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/fsmonitor-watchman.sample173
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/post-update.sample8
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-applypatch.sample14
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-commit.sample49
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-merge-commit.sample13
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-push.sample53
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-rebase.sample169
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-receive.sample24
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/prepare-commit-msg.sample42
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/update.sample128
-rw-r--r--vendor/gix/src/assets/baseline-init/info/exclude6
-rw-r--r--vendor/gix/src/clone/checkout.rs161
-rw-r--r--vendor/gix/src/clone/fetch/mod.rs212
-rw-r--r--vendor/gix/src/clone/fetch/util.rs229
-rw-r--r--vendor/gix/src/clone/mod.rs118
-rw-r--r--vendor/gix/src/commit.rs238
-rw-r--r--vendor/gix/src/config/cache/access.rs233
-rw-r--r--vendor/gix/src/config/cache/incubate.rs111
-rw-r--r--vendor/gix/src/config/cache/init.rs485
-rw-r--r--vendor/gix/src/config/cache/mod.rs18
-rw-r--r--vendor/gix/src/config/cache/util.rs143
-rw-r--r--vendor/gix/src/config/mod.rs454
-rw-r--r--vendor/gix/src/config/overrides.rs49
-rw-r--r--vendor/gix/src/config/snapshot/_impls.rs76
-rw-r--r--vendor/gix/src/config/snapshot/access.rs143
-rw-r--r--vendor/gix/src/config/snapshot/credential_helpers.rs183
-rw-r--r--vendor/gix/src/config/snapshot/mod.rs5
-rw-r--r--vendor/gix/src/config/tree/keys.rs629
-rw-r--r--vendor/gix/src/config/tree/mod.rs123
-rw-r--r--vendor/gix/src/config/tree/sections/author.rs23
-rw-r--r--vendor/gix/src/config/tree/sections/branch.rs65
-rw-r--r--vendor/gix/src/config/tree/sections/checkout.rs58
-rw-r--r--vendor/gix/src/config/tree/sections/clone.rs20
-rw-r--r--vendor/gix/src/config/tree/sections/committer.rs23
-rw-r--r--vendor/gix/src/config/tree/sections/core.rs302
-rw-r--r--vendor/gix/src/config/tree/sections/credential.rs56
-rw-r--r--vendor/gix/src/config/tree/sections/diff.rs133
-rw-r--r--vendor/gix/src/config/tree/sections/extensions.rs59
-rw-r--r--vendor/gix/src/config/tree/sections/gitoxide.rs363
-rw-r--r--vendor/gix/src/config/tree/sections/http.rs317
-rw-r--r--vendor/gix/src/config/tree/sections/init.rs20
-rw-r--r--vendor/gix/src/config/tree/sections/mod.rs96
-rw-r--r--vendor/gix/src/config/tree/sections/pack.rs64
-rw-r--r--vendor/gix/src/config/tree/sections/protocol.rs85
-rw-r--r--vendor/gix/src/config/tree/sections/remote.rs101
-rw-r--r--vendor/gix/src/config/tree/sections/safe.rs27
-rw-r--r--vendor/gix/src/config/tree/sections/ssh.rs65
-rw-r--r--vendor/gix/src/config/tree/sections/url.rs25
-rw-r--r--vendor/gix/src/config/tree/sections/user.rs22
-rw-r--r--vendor/gix/src/config/tree/traits.rs199
-rw-r--r--vendor/gix/src/create.rs251
-rw-r--r--vendor/gix/src/discover.rs88
-rw-r--r--vendor/gix/src/env.rs129
-rw-r--r--vendor/gix/src/ext/mod.rs9
-rw-r--r--vendor/gix/src/ext/object_id.rs34
-rw-r--r--vendor/gix/src/ext/reference.rs15
-rw-r--r--vendor/gix/src/ext/rev_spec.rs20
-rw-r--r--vendor/gix/src/ext/tree.rs44
-rw-r--r--vendor/gix/src/head/log.rs35
-rw-r--r--vendor/gix/src/head/mod.rs122
-rw-r--r--vendor/gix/src/head/peel.rs119
-rw-r--r--vendor/gix/src/id.rs195
-rw-r--r--vendor/gix/src/init.rs101
-rw-r--r--vendor/gix/src/interrupt.rs223
-rw-r--r--vendor/gix/src/kind.rs23
-rw-r--r--vendor/gix/src/lib.rs314
-rw-r--r--vendor/gix/src/mailmap.rs18
-rw-r--r--vendor/gix/src/object/blob.rs148
-rw-r--r--vendor/gix/src/object/commit.rs156
-rw-r--r--vendor/gix/src/object/errors.rs34
-rw-r--r--vendor/gix/src/object/impls.rs123
-rw-r--r--vendor/gix/src/object/mod.rs221
-rw-r--r--vendor/gix/src/object/peel.rs93
-rw-r--r--vendor/gix/src/object/tag.rs15
-rw-r--r--vendor/gix/src/object/tree/diff/change.rs111
-rw-r--r--vendor/gix/src/object/tree/diff/for_each.rs235
-rw-r--r--vendor/gix/src/object/tree/diff/mod.rs118
-rw-r--r--vendor/gix/src/object/tree/diff/rewrites.rs108
-rw-r--r--vendor/gix/src/object/tree/diff/tracked.rs491
-rw-r--r--vendor/gix/src/object/tree/iter.rs53
-rw-r--r--vendor/gix/src/object/tree/mod.rs158
-rw-r--r--vendor/gix/src/object/tree/traverse.rs62
-rw-r--r--vendor/gix/src/open/mod.rs67
-rw-r--r--vendor/gix/src/open/options.rs180
-rw-r--r--vendor/gix/src/open/repository.rs345
-rw-r--r--vendor/gix/src/path.rs11
-rw-r--r--vendor/gix/src/reference/edits.rs75
-rw-r--r--vendor/gix/src/reference/errors.rs89
-rw-r--r--vendor/gix/src/reference/iter.rs127
-rw-r--r--vendor/gix/src/reference/log.rs36
-rw-r--r--vendor/gix/src/reference/mod.rs87
-rw-r--r--vendor/gix/src/reference/remote.rs49
-rw-r--r--vendor/gix/src/remote/access.rs105
-rw-r--r--vendor/gix/src/remote/build.rs84
-rw-r--r--vendor/gix/src/remote/connect.rs166
-rw-r--r--vendor/gix/src/remote/connection/access.rs67
-rw-r--r--vendor/gix/src/remote/connection/fetch/config.rs26
-rw-r--r--vendor/gix/src/remote/connection/fetch/error.rs41
-rw-r--r--vendor/gix/src/remote/connection/fetch/mod.rs240
-rw-r--r--vendor/gix/src/remote/connection/fetch/negotiate.rs78
-rw-r--r--vendor/gix/src/remote/connection/fetch/receive_pack.rs238
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/mod.rs274
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/tests.rs607
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/update.rs128
-rw-r--r--vendor/gix/src/remote/connection/mod.rs29
-rw-r--r--vendor/gix/src/remote/connection/ref_map.rs268
-rw-r--r--vendor/gix/src/remote/errors.rs45
-rw-r--r--vendor/gix/src/remote/fetch.rs166
-rw-r--r--vendor/gix/src/remote/init.rs116
-rw-r--r--vendor/gix/src/remote/mod.rs62
-rw-r--r--vendor/gix/src/remote/name.rs84
-rw-r--r--vendor/gix/src/remote/save.rs125
-rw-r--r--vendor/gix/src/remote/url/mod.rs7
-rw-r--r--vendor/gix/src/remote/url/rewrite.rs100
-rw-r--r--vendor/gix/src/remote/url/scheme_permission.rs120
-rw-r--r--vendor/gix/src/repository/cache.rs30
-rw-r--r--vendor/gix/src/repository/config/mod.rs191
-rw-r--r--vendor/gix/src/repository/config/transport.rs425
-rw-r--r--vendor/gix/src/repository/identity.rs175
-rw-r--r--vendor/gix/src/repository/impls.rs73
-rw-r--r--vendor/gix/src/repository/init.rs55
-rw-r--r--vendor/gix/src/repository/location.rs86
-rw-r--r--vendor/gix/src/repository/mod.rs36
-rw-r--r--vendor/gix/src/repository/object.rs214
-rw-r--r--vendor/gix/src/repository/permissions.rs168
-rw-r--r--vendor/gix/src/repository/reference.rs243
-rw-r--r--vendor/gix/src/repository/remote.rs199
-rw-r--r--vendor/gix/src/repository/revision.rs42
-rw-r--r--vendor/gix/src/repository/snapshots.rs109
-rw-r--r--vendor/gix/src/repository/state.rs44
-rw-r--r--vendor/gix/src/repository/thread_safe.rs66
-rw-r--r--vendor/gix/src/repository/worktree.rs119
-rw-r--r--vendor/gix/src/revision/mod.rs27
-rw-r--r--vendor/gix/src/revision/spec/mod.rs90
-rw-r--r--vendor/gix/src/revision/spec/parse/delegate/mod.rs256
-rw-r--r--vendor/gix/src/revision/spec/parse/delegate/navigate.rs340
-rw-r--r--vendor/gix/src/revision/spec/parse/delegate/revision.rs225
-rw-r--r--vendor/gix/src/revision/spec/parse/error.rs130
-rw-r--r--vendor/gix/src/revision/spec/parse/mod.rs61
-rw-r--r--vendor/gix/src/revision/spec/parse/types.rs182
-rw-r--r--vendor/gix/src/revision/walk.rs127
-rw-r--r--vendor/gix/src/tag.rs16
-rw-r--r--vendor/gix/src/types.rs205
-rw-r--r--vendor/gix/src/worktree/mod.rs160
-rw-r--r--vendor/gix/src/worktree/proxy.rs101
153 files changed, 23656 insertions, 0 deletions
diff --git a/vendor/gix/.cargo-checksum.json b/vendor/gix/.cargo-checksum.json
new file mode 100644
index 000000000..7f9d61a2f
--- /dev/null
+++ b/vendor/gix/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"CHANGELOG.md":"541c0f705d88cc3b56736404db055f64d0ce96285fd2996038d27fc2a4abdbf1","Cargo.lock":"283cdda178b0535db7f6c7bdda874cef1a0cbb914e44dff4d93e109f73d93bfa","Cargo.toml":"0d8e4b58298d5b7b69ab1534d0cf9a114862217d8e681935780a8272b53f8972","src/assets/baseline-init/HEAD":"28d25bf82af4c0e2b72f50959b2beb859e3e60b9630a5e8c603dad4ddb2b6e80","src/assets/baseline-init/description":"85ab6c163d43a17ea9cf7788308bca1466f1b0a8d1cc92e26e9bf63da4062aee","src/assets/baseline-init/hooks/applypatch-msg.sample":"64b47216779d705b5270e94970aa0bc7b96fd256af0a624e936f6b1b85a014db","src/assets/baseline-init/hooks/commit-msg.sample":"1f74d5e9292979b573ebd59741d46cb93ff391acdd083d340b94370753d92437","src/assets/baseline-init/hooks/fsmonitor-watchman.sample":"f3c0228d8e827f1c5260ac59fdd92c3d425c46e54711ef713c5a54ae0a4db2b4","src/assets/baseline-init/hooks/post-update.sample":"81765af2daef323061dcbc5e61fc16481cb74b3bac9ad8a174b186523586f6c5","src/assets/baseline-init/hooks/pre-applypatch.sample":"e99aee3f6ff6a07f775cab64af1c6507d46d20afa4a5a87fd3402d22b1de2797","src/assets/baseline-init/hooks/pre-commit.sample":"f9af7d95eb1231ecf2eba9770fedfa8d4797a12b02d7240e98d568201251244a","src/assets/baseline-init/hooks/pre-merge-commit.sample":"d3825a70337940ebbd0a5c072984e13245920cdf8898bd225c8d27a6dfc9cb53","src/assets/baseline-init/hooks/pre-push.sample":"4b1119e1e13a212571976f4aee77847cdbd40978546d6273a557e238981a40d1","src/assets/baseline-init/hooks/pre-rebase.sample":"3ac3a30cffe859b71a27d77a7fc1635d35ff5d5aaecc896bba11bd01e1b307ce","src/assets/baseline-init/hooks/pre-receive.sample":"a4c3d2b9c7bb3fd8d1441c31bd4ee71a595d66b44fcf49ddb310252320169989","src/assets/baseline-init/hooks/prepare-commit-msg.sample":"e9ddcaa4189fddd25ed97fc8c789eca7b6ca16390b2392ae3276f0c8e1aa4619","src/assets/baseline-init/hooks/update.sample":"978235ae8b913ab4d0f906a8af621a0bfe7c314d84b715f4f64ee9b1aa3cce2d","src/assets/baseline-init/info/exclude":"6671fe83b7a07c8932ee89164d1f2793b2318058eb8b98dc5c06ee0a5a3b0ec1","src/clone/checkout.rs":"b0f4f30d4041653836e8896a00f0a1def2e5625b9f8fa9fe19484aa858231373","src/clone/fetch/mod.rs":"35f0384ec6933739641fbdbe41ceb75e58a8deec73cfe4a5f20260e9649be366","src/clone/fetch/util.rs":"fa8f750881e6a8b5838b7fa3899d91ad4d68c8027f14901003e76274190374ad","src/clone/mod.rs":"16ea330e68b9ac9c39a0702deb5b3d70708cb09347a11e25085efcd6ea894457","src/commit.rs":"2f6c5b5e93702e9b9b731548970a9fe56f309f554f73238e837d05300e4f5a9b","src/config/cache/access.rs":"79e99df8b1ee365936812cfb59b92fb6e95f3a900de9f0193edde7f97b91f584","src/config/cache/incubate.rs":"089fcd2bfe0813b0607e019969d65c8ac314695ecbce832a5b36479df7f37126","src/config/cache/init.rs":"c7fd6613b2ea30d4dd28cf29989263eb5953da37661f0f552d717914a5ece443","src/config/cache/mod.rs":"3cce2cf0fafbc4f828870b85fef411a826ad6d1fe3ac5b6cbd2f5e049331aebf","src/config/cache/util.rs":"b94814e0aab3b300595ae19786333f3103294609ec848f4e761fd4b840fb87dd","src/config/mod.rs":"604efc0aae29cbea6306d761ca46ef18b75246ba514f6d70097caa958776041d","src/config/overrides.rs":"f9bd9b673ad78b9db98b169a05cd04d3aa6735e6e4a5bd621a2d4669fe2e8c58","src/config/snapshot/_impls.rs":"edae9b731c69828effbdfe01d32b7caa8c536df81d33de1cb67b88ece01a7218","src/config/snapshot/access.rs":"fc9b5df53f74627cb0d4f0bbcdb933ae9086394678929623dccb23f957fcfe91","src/config/snapshot/credential_helpers.rs":"690640166406598b0df5cc0dfa8c6a6d0051a897911e9fd5af56820dee7949a5","src/config/snapshot/mod.rs":"5377723de24b00ccb58e15df119831d7c0d9f2695f4499621db37546a5fccaff","src/config/tree/keys.rs":"11990d4d9581a2d182b40cbf7ef2cecd3694f3c127e996578b724c7981fe45d9","src/config/tree/mod.rs":"68de461ed67f1e36c87d14834dbc4700fc94b39d548708bf61d9effb1a55b1f6","src/config/tree/sections/author.rs":"a27cf6ad5150aa5ccbd64d2786183983e1567c094a221b016875864c9e8f6980","src/config/tree/sections/branch.rs":"105211bb0eefbdc718923d5e770261889163a5d61dc8f0c22af33473cfb32100","src/config/tree/sections/checkout.rs":"329ebe10cca7c5414f9e6229d6dce3178cdb5821a1b7e72bbfac3a60b5b42dc2","src/config/tree/sections/clone.rs":"9e0ea11063a5fa4059035a04d1531b3dc62cde48f487cddc56cd428f37b38931","src/config/tree/sections/committer.rs":"2f99b8bc6e2c4ad9bdba3a85bc6b717acb5b09d1ada2723d3ba5954f804368eb","src/config/tree/sections/core.rs":"98e06197a143fd06a014887dcc64279592cca5a138e285ec8f6a08d16a3b4787","src/config/tree/sections/credential.rs":"e1ce72dc24b9d2f06827b9fc6e8c963f888844ab5a4b48f6d867ad8542909b94","src/config/tree/sections/diff.rs":"b2bdfba61b34bc08c81d84b6b9e7ba009f8e0277f71e79393020d26c4112cde6","src/config/tree/sections/extensions.rs":"f73c444f2e2c8fbc8d1b1991c0d63532c792362e42833f3ae06059c07387cf66","src/config/tree/sections/gitoxide.rs":"731e41e042654aa2bb7dc0763aee97f81485ea79b846ac82b3b2dcb71d433578","src/config/tree/sections/http.rs":"58c9a9b8abcee4066eb4e20f9679dbbf888c04c21f4dbfff0d2ad7f0a18d4fcd","src/config/tree/sections/init.rs":"e87e33ac5091797bde302f78c8fb7cefce0fb1752610409c1db53c3f7b4179af","src/config/tree/sections/mod.rs":"0a5298653789a9d00f33a53f60bd7c27857f49d420748dbad48bc7d6ecb71fb6","src/config/tree/sections/pack.rs":"0a6dcdc7603b0d7a84159a70789b63b6c26d539f98808c520a8e644fc9dbd2f3","src/config/tree/sections/protocol.rs":"835f757720767099c5b9993c8c3962dfe93338155d6d0f6b7db12960df5e25de","src/config/tree/sections/remote.rs":"eb06d89125dc72b5754b642cf561e6e430cad4fbbd57dcd1049d306291afc41f","src/config/tree/sections/safe.rs":"4e96f49ff56d6ebb8944406fe2ae3c4a1fa80c5383f08935015ddc6036b583c3","src/config/tree/sections/ssh.rs":"7db83236912bcb7aa1770f0eb1a388ee0fac94c1548bd9a19051b34c25622841","src/config/tree/sections/url.rs":"e342edcd20979feaf54663a919a0751aaf32b4e1f2e47cf5d028faec62e6b731","src/config/tree/sections/user.rs":"8f02172a5db830832b4edf28693fdef33cece6ba1990c4d7a338e4c9291375ab","src/config/tree/traits.rs":"524f1752e9131c27db3a9016ee6c3abc61175345c2167426ffd5a41602cb950d","src/create.rs":"a12348ab9a503a4921accee6a58db46c5650cbbdfa3692f7ea54dce48707eb17","src/discover.rs":"d1ff7f43fb4c2b5303fc6c045bb65f68bb394a5c20e2bf36d8378c7a053ce785","src/env.rs":"57fb7c22af1eb6c719bb4651272793abf87400d3a24ae8b0a212f9e044e74cee","src/ext/mod.rs":"3c96daa11ca8321dedc9b968176e31f6e3626e4cd60f308ad46e2ec7fe586bde","src/ext/object_id.rs":"a70afd79c84731df0023d738332b3688a2adbcbdda802f8ee70297fc8fe0ca38","src/ext/reference.rs":"a51fe4f63e5708afa4fbabd13638b88c34845a64263db77308cd1b974a0f325f","src/ext/rev_spec.rs":"c196f21a3981b7ad84e9385ba3924a554accf9c7a3b4f8e2bba13cb74f108ec8","src/ext/tree.rs":"c4446627523e22c032c0e322e5daa35b3105bf68f6eaa45da84c590edb76420e","src/head/log.rs":"0de9d5f834b633756001bdd246e98266084d521d7f775537e6e6ead5ed93d7e4","src/head/mod.rs":"443c8f5129d2e92d893f53d4d6babc1ae1a79c88c1474738d35f2cc8aac98465","src/head/peel.rs":"039473b0ba6b77414b330115d855452a03ee9bc1c8b447ff2e310cbb45636cb9","src/id.rs":"c39eadc8cd9f46e951d2d9616ef8062409aec494872ea5f716f0ad9363986d85","src/init.rs":"84d97d8ca3f61cb87dfc99be1ee56ee53c6bdac75a741bd5c7bf995f7d5cb137","src/interrupt.rs":"f6aaa74d277900528f5628e483bddc9c821199ea06db39630e3f55ef93dc31e9","src/kind.rs":"84485be9dd514b8b91e104ee3e72944e062eaabf393291a1ac7e6a090104dbe8","src/lib.rs":"2bc758adf5a99afa3ebfda048f7d402e2d0b046c58fed7b9b1ed54aa243d8b42","src/mailmap.rs":"415301bdcb3d6991675c7ea86a92eea152cf65df7b2d14da4b58b3b605039e4e","src/object/blob.rs":"ae504ea412a4b0df0bb1d4e61655ed1d9f5654e1e586eedcf0d72fa1d20f7866","src/object/commit.rs":"6daa18b669950312582f42790011bc5782a8219d20de8bf35b0a2f0ef81d6902","src/object/errors.rs":"f1b322fbff0e400dfb153e67e7f6de1839ec488b53e552ac968545131a79ac74","src/object/impls.rs":"6b9f76d332da496148adaa9b5b5de5dcf98c3fd92deba245dd2e6c2a3895eb77","src/object/mod.rs":"7290e4cc7e071a9fba662e9eeff3c9dfbb1fdba71ba01bf125314d0426083b04","src/object/peel.rs":"c23ac9baac1216d9320c71926f56548d18f7f3964bdbcccd8ca81f6b756f7ad9","src/object/tag.rs":"8915013ff2b2f593cb7cba5fd3f77beef9792ac17cd2afa60d216da104e7863e","src/object/tree/diff/change.rs":"95df7e1fb3b58fa2a0086f205c39940fb590ccdac92aee53a6cff5f29181f30c","src/object/tree/diff/for_each.rs":"419e83b0dfc6b2c9ee7e14489513f1979a3a6d46fa4e03e212f57d8aa90af60f","src/object/tree/diff/mod.rs":"2bdb1e9f10af7a163c776d26ebd9841e7b3d635dd0e8ea63815339926eaf0b7e","src/object/tree/diff/rewrites.rs":"582c5de23ce61dfe5b7bd76ac495d71a1f46b0831c5cffc1fe5c52be704217af","src/object/tree/diff/tracked.rs":"4dd20f5e2c3c577a1f51a0fd9cdcb4d40c8824f943439402bdeaef34fd0b00a6","src/object/tree/iter.rs":"4e54d72fce5a5bc9299ea36147dd2d95d3d25d34d97dd5d663e7ce0eb87b1f13","src/object/tree/mod.rs":"bf64586b62d74dfa2f128a06d61a3d22d230b60e21a44338e89fb8982d052322","src/object/tree/traverse.rs":"ae0342857d42b6afb82167423dfdac9611ce1b7281b6b9b542e230960c564c0f","src/open/mod.rs":"12b28d68fc9b85e40f61e3323d9cabcba0f691099586a553b8bf72e8c9ce1a39","src/open/options.rs":"955a2837697c4545f806dd640142f796ab5accaf8dcad36b806e324a5361ddb8","src/open/repository.rs":"9e99f89d4b34a348f565689b8ca41d99064330e80b6e7ca76272ded0abacc67c","src/path.rs":"3cd4c92a626384f8f4db89b2f7594923b922d7b841681c167748a01abc7dc3f7","src/reference/edits.rs":"df762921fa815470a23bd884e0b1677b5f4705d2b2aaef8f59c0ed759c1ce6cb","src/reference/errors.rs":"6c7e624031b8c09f2011ca30afa487c20487218e4ece6cf34892e8f693f22901","src/reference/iter.rs":"645ebf083877a73c05202e8b79489e19fad290e1e6e41b7d817e08307e2822c3","src/reference/log.rs":"95b3e85dde8ca06251c15756ad3b54eeb48804ac6718ec4634f367091bd42b80","src/reference/mod.rs":"e0cad649928296e68df1b89119a35814ac4c6887957f7cdf644113b59fd74645","src/reference/remote.rs":"a739c6e1c9b8fd61607fdcd3cb25a1d6c80bb81930e2258d1ba4f92d79bf3e66","src/remote/access.rs":"76e75baca0155710c2eea0e439df0774ecb0cd6c7e6b950876dd9e6e75be5e9f","src/remote/build.rs":"773a10e61826a4976b872ae2922f723c55174e0ed4c0b941ab8034bc33b314cc","src/remote/connect.rs":"181a7fc0510a4da3a4be4245f9a80e6d90115298aad23dc1bf8aa4e746ff6dcb","src/remote/connection/access.rs":"728d8c7e4ce2669c59b0737cfd2f8faf4983716ae105b3bf8d3d8b99660b105d","src/remote/connection/fetch/config.rs":"93b1392fdc409dba073e96272f13ad7ab996522d59df819edc800431ee36975b","src/remote/connection/fetch/error.rs":"19df1a6b367fcc8b77a8a05a4fb87a816303ae1260f7d98f03992920fb0de651","src/remote/connection/fetch/mod.rs":"78625af01154e8f2a8e7017ed7c8507276031b430efc1ee50b9b2ad07d3a6e41","src/remote/connection/fetch/negotiate.rs":"d8a6774c66eeee9380d2f3edbeacf927bf800303bd57dc85e1dc498e9e80f873","src/remote/connection/fetch/receive_pack.rs":"293e0feb816c64c136bffea7d5a77b3971df11aaf7b0a37dad0f9fd9d111474a","src/remote/connection/fetch/update_refs/mod.rs":"a1e93355795ee0b4ca28946fc464dcd29af460b3f37b4376b7e31760050805af","src/remote/connection/fetch/update_refs/tests.rs":"f9c197bf7ba8528c888d486bdc9d43c42181e3087f15bb0416514a38a5ec8dbe","src/remote/connection/fetch/update_refs/update.rs":"069156a6fcebd0eab64d1f816eae6612bc5d0523f3dae05e69ece40a23c00a95","src/remote/connection/mod.rs":"ca0216eca9ad853c875d764d8e7cbad757f1047d70aaacd3994f78d35facf2e6","src/remote/connection/ref_map.rs":"e17ae194281232f51716dcc7f3dab208a1eb85856a902099bb0da364e85e5bd8","src/remote/errors.rs":"148318b54894fddf1eee58bbb622861122fcd5d6b6a9470083779945f75c3257","src/remote/fetch.rs":"49ede72db851d21144fa676508ea4f5456fcfb9ee00b9ea432462a69e325fca0","src/remote/init.rs":"519adc4da7ca2739697bd4e48d0100458a146c8cb71069b8a6a721198199d3d9","src/remote/mod.rs":"14a1532470cc76cf3b727657439923f3515dd781ebfc7030d9481056c6bbf162","src/remote/name.rs":"9fa7e81a157375ecb40ddfba5da850fef7513296a68014a50f5148d57f40b8b7","src/remote/save.rs":"7e96e890bcd4dc85a866d0db08e68d27ea183e0e6446a2aa46bed3379d2e4335","src/remote/url/mod.rs":"a3279f5330fc83193e583f2b839ab08006c293b0040db6cc74969d50813fbbaa","src/remote/url/rewrite.rs":"4d43167909020458abc5e3e7310e3efaddb9fc7c783581fbc9422fa8b057d95d","src/remote/url/scheme_permission.rs":"4f6e528260c67af54171477d97707050f048c0c35e77f088f8ddc4ae135e6386","src/repository/cache.rs":"72d41c8f90d2e49c10b884582613fa5941ea0e1d15cc7d0a4d0d7be0b0243d4d","src/repository/config/mod.rs":"02d1a3d68c6a21b0fb7e199f69d7e7641645afa5710e9843a922a040d9422d27","src/repository/config/transport.rs":"cb163133228cb4d75c140b747fdc132b4512bd5b86b10f70bd8cd8f9a432859f","src/repository/identity.rs":"81d368f2b7d65800bf93d623f130ddb44a8d9e965cdd9757455733d31f92d483","src/repository/impls.rs":"ad0296fa71d1253eaceb71cd5dbe73c6204d245d2f1c7a9cc01e7279b678d1fd","src/repository/init.rs":"738f53abc0fdb3189ebe7cf0092d18907d3cb8cd0304af0b68077395a337ebec","src/repository/location.rs":"a5d30567442837259cf718e9f8f26633a17c997476669e15e5d3cd410b3b9971","src/repository/mod.rs":"6fbe6f3e768402c728920118dfa1290f4800544de5cbbd0cb56de1008dcca186","src/repository/object.rs":"18e3218efd301c1c3198134bee57c4e4450a15a929352cfd058779a32c70e40e","src/repository/permissions.rs":"d2c8589667c746baef86533a70a8a6705fc93de3200bfd4874fcf5b0788fa66e","src/repository/reference.rs":"c37e556966dd4308bc8b5784484612d9d62a438a443d1d667374ed604b238f5e","src/repository/remote.rs":"936f6a5e4b7d7a7bfe974c287e1f8fff4a2935d4020c6156bc469b74e84bd861","src/repository/revision.rs":"66126940c3a94776d94b901c14078d5b749723c3c0ed8500639edfe37aaa97da","src/repository/snapshots.rs":"f99ca2e7d1bb3d1fff9d3df2d36b979c57197fcbbb633090edb0319d43873fa2","src/repository/state.rs":"040cca4a26463211aef85e4539d084969240f30d985c1bd3ae9bb3cd01009d1d","src/repository/thread_safe.rs":"ea2f5d07958d3fad08cba5028d18901a99088d3c1fc56934e3e01b0c4ea37c39","src/repository/worktree.rs":"d0a804df89684c61c3d1919a42e0855a4277decbd0f3babaaa9627b23aa436ad","src/revision/mod.rs":"a466315d4aea646ddba99c54f9ae118431b56dafbca6d0462ebabca3695d027d","src/revision/spec/mod.rs":"aa1516ed667a49bfa0db55fdea0bfbcc3227411be105496f0867bd7efb5d5dda","src/revision/spec/parse/delegate/mod.rs":"bfe787c5c881b5e4acf4bbcb79d966b8a92e56e8b2d20525bf0c0cd6ba0a0b8d","src/revision/spec/parse/delegate/navigate.rs":"b182bca82031b4ecfd4c36bcc48599a67a247b4e785591e85933eef50e428ff2","src/revision/spec/parse/delegate/revision.rs":"51ff36007b7376305cfcf8acbacc53ddb72796aceec8b761f4d1f1843554487c","src/revision/spec/parse/error.rs":"48c0c2e7dcffe80aac5563094787e0502c4275619fc35467a2687499bbad0fef","src/revision/spec/parse/mod.rs":"141e591acacfce8cc8d11c5b72d18c8d05baad1b21dead0d8e7242f4963410b7","src/revision/spec/parse/types.rs":"c84d3af2c9f215a375c7b50a7251377c9aca5c09c16ab3afd0af1d25a22d7d89","src/revision/walk.rs":"dd9ceebcbd4560b8c3b6390af29f7d8029eefb6cd82b7b8f947257dea420af51","src/tag.rs":"f60043aaa7afb54652ab6cf64765ebd61607f19a83dd5960abf8077d2cb91d44","src/types.rs":"af0f1a55a39255dca57a87951aa7bab9d9e90d3adffcda7c956917dffce1901b","src/worktree/mod.rs":"2b430fb1f74d625dbf1b6e3e9ddcd50037fdac52dbeb7bd9988b5c7d32aa9a09","src/worktree/proxy.rs":"07ff3cfc6a8f326fb7d62f6762f682498e9e5f131ece935acfd26b5223a338f1"},"package":"dabfac58aecb4a38cdd2568de66eb1f0d968fd6726f5a80cb8bea7944ef10cc0"} \ No newline at end of file
diff --git a/vendor/gix/CHANGELOG.md b/vendor/gix/CHANGELOG.md
new file mode 100644
index 000000000..c98de68d0
--- /dev/null
+++ b/vendor/gix/CHANGELOG.md
@@ -0,0 +1,1606 @@
+# Changelog
+
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+## 0.39.0 (2023-03-04)
+
+A maintenance release without user-facing changes, primarily for getting the progress-bar updates into `cargo`.
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 2 commits contributed to the release.
+ - 3 days passed between releases.
+ - 0 commits were understood as [conventional](https://www.conventionalcommits.org).
+ - 0 issues like '(#ID)' were seen in commit messages
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **Uncategorized**
+ - Prepare changelogs prior to release ([`895e482`](https://github.com/Byron/gitoxide/commit/895e482badf01e953bb9144001eebd5e1b1c4d84))
+ - Release gix-features v0.28.0, gix-actor v0.19.0, gix-object v0.28.0, gix-diff v0.28.0, gix-traverse v0.24.0, gix-pack v0.32.0, safety bump 20 crates ([`0f411e9`](https://github.com/Byron/gitoxide/commit/0f411e93ec812592bb9d3a52b751399dd86f76f7))
+</details>
+
+## 0.38.0 (2023-03-01)
+
+### New Features
+
+ - <csr-id-256f7d46ed88067aa96f47be2a97a6f9f5b98075/> the `hp-tempfile-registry` feature toggle to control the `dashmap` dependency.
+ And also, probably provide a better performance in certain cases.
+ - <csr-id-fd7eebcd922f98c1aed9e3177b9a48ff1415ffd8/> make `gix-pack` feature toggles related to pack caches available.
+ Previously they would have to be configured by pulling in `gix-pack`, which
+ isn't desirable as the only crate we want to expose like that is `gix-features`.
+ - <csr-id-5b0ebd272c3d98e26c9249ed27b4ea9a8ad80746/> Add `comfort` feature toggle (default enabled) to make better progress units available.
+ This could be a breaking change for those who turned default-features off, as you may now
+ have to re-add the `comfort` feature to get nicer progress messages.
+
+### Bug Fixes
+
+ - <csr-id-b2375e3dbe1f87ee3ac6e814fc8f4898143c438d/> `gix-tempfile` is now configured to not use the high-performance hashmap anymore.
+ It was hard to justify as tests actually seemed to be faster without it.
+
+### New Features (BREAKING)
+
+ - <csr-id-fea8c56089e5b354669396853c5bd0f31bdf0d33/> Put `progress::tree` behind the `progress-tree` feature toggle.
+ It's a convenience export that implies pulling in more dependencies, so it
+ should be gated.
+ - <csr-id-441f5ac4dd2f0636ec07065f8095e8bae5ce6985/> gate all signal handling behind the `signals` feature toggle.
+ This change also consolidates all signal handling into its own module called
+ `signal` to provide reusable handlers and as well as well as signal initialization.
+
+ Note that the functions to cleanup tempfiles don't interact with the signal registry,
+ hence they still can be called without the `signals` feature enabled.
+
+ Note that this change sneakily fixes a bug that could have caused a `write_all()`
+ on a tempfile that was removed by a signal to enter an infinite loop.
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 17 commits contributed to the release over the course of 4 calendar days.
+ - 4 days passed between releases.
+ - 6 commits were understood as [conventional](https://www.conventionalcommits.org).
+ - 1 unique issue was worked on: [#339](https://github.com/Byron/gitoxide/issues/339)
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **[#339](https://github.com/Byron/gitoxide/issues/339)**
+ - Gate all signal handling behind the `signals` feature toggle. ([`441f5ac`](https://github.com/Byron/gitoxide/commit/441f5ac4dd2f0636ec07065f8095e8bae5ce6985))
+ * **Uncategorized**
+ - Release gix-tempfile v4.1.0, gix-lock v4.0.0, gix-ref v0.25.0, gix-config v0.17.0, gix-url v0.14.0, gix-credentials v0.10.0, gix-diff v0.27.0, gix-discover v0.14.0, gix-hashtable v0.1.2, gix-bitmap v0.2.2, gix-traverse v0.23.0, gix-index v0.13.0, gix-mailmap v0.10.0, gix-pack v0.31.0, gix-odb v0.41.0, gix-transport v0.26.0, gix-protocol v0.27.0, gix-revision v0.11.0, gix-refspec v0.8.0, gix-worktree v0.13.0, gix v0.38.0, safety bump 6 crates ([`ea9fd1d`](https://github.com/Byron/gitoxide/commit/ea9fd1d9b60e1e9e17042e9e37c06525823c40a5))
+ - Release gix-features v0.27.0, gix-actor v0.18.0, gix-quote v0.4.3, gix-attributes v0.9.0, gix-object v0.27.0, gix-ref v0.25.0, gix-config v0.17.0, gix-url v0.14.0, gix-credentials v0.10.0, gix-diff v0.27.0, gix-discover v0.14.0, gix-hashtable v0.1.2, gix-bitmap v0.2.2, gix-traverse v0.23.0, gix-index v0.13.0, gix-mailmap v0.10.0, gix-pack v0.31.0, gix-odb v0.41.0, gix-transport v0.26.0, gix-protocol v0.27.0, gix-revision v0.11.0, gix-refspec v0.8.0, gix-worktree v0.13.0, gix v0.38.0 ([`e6cc618`](https://github.com/Byron/gitoxide/commit/e6cc6184a7a49dbc2503c1c1bdd3688ca5cec5fe))
+ - Adjust manifests prior to release ([`addd789`](https://github.com/Byron/gitoxide/commit/addd78958fdd1e54eb702854e96079539d01965a))
+ - Prepare changelogs prior to release ([`94c99c7`](https://github.com/Byron/gitoxide/commit/94c99c71520f33269cc8dbc26f82a74747cc7e16))
+ - Merge branch 'adjustments-for-cargo' ([`d686d94`](https://github.com/Byron/gitoxide/commit/d686d94e1030a8591ba074757d56927a346c8351))
+ - `gix-tempfile` is now configured to not use the high-performance hashmap anymore. ([`b2375e3`](https://github.com/Byron/gitoxide/commit/b2375e3dbe1f87ee3ac6e814fc8f4898143c438d))
+ - Depend on latest version of `prodash` for performance improvements. ([`5d00324`](https://github.com/Byron/gitoxide/commit/5d003242abe82b1604e2188d49dec9690ebb2a6a))
+ - The `hp-tempfile-registry` feature toggle to control the `dashmap` dependency. ([`256f7d4`](https://github.com/Byron/gitoxide/commit/256f7d46ed88067aa96f47be2a97a6f9f5b98075))
+ - Make `gix-pack` feature toggles related to pack caches available. ([`fd7eebc`](https://github.com/Byron/gitoxide/commit/fd7eebcd922f98c1aed9e3177b9a48ff1415ffd8))
+ - Put `progress::tree` behind the `progress-tree` feature toggle. ([`fea8c56`](https://github.com/Byron/gitoxide/commit/fea8c56089e5b354669396853c5bd0f31bdf0d33))
+ - Add `comfort` feature toggle (default enabled) to make better progress units available. ([`5b0ebd2`](https://github.com/Byron/gitoxide/commit/5b0ebd272c3d98e26c9249ed27b4ea9a8ad80746))
+ - Prepare for git-tempfile release ([`56c005b`](https://github.com/Byron/gitoxide/commit/56c005b13c44376f71e61781e73c0bf93416d0e4))
+ - Merge branch 'tempfile-upgrades' ([`3522cba`](https://github.com/Byron/gitoxide/commit/3522cbaac721c8079605be51b9053014bc5e863a))
+ - Adjust to changes in `gix-tempfile` ([`c6785fc`](https://github.com/Byron/gitoxide/commit/c6785fc7082b90c8a27cef6a0f5cc5acd8cb8951))
+ - Make fmt ([`8ef1cb2`](https://github.com/Byron/gitoxide/commit/8ef1cb293434c7b9e1fda4a6963368e0435920a9))
+ - Fix diff-tests on windows ([`441a64b`](https://github.com/Byron/gitoxide/commit/441a64b6b703f7f97cfcefe4d3db31bc7427b48c))
+</details>
+
+## 0.37.2 (2023-02-24)
+
+### Bug Fixes
+
+ - <csr-id-1d3d22d45e70222c12fcf5a82063ceb9321a0129/> reproduce a diff issue and fix it
+ Diffs could be quite wrong and this is a small repro along with the fix.
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 5 commits contributed to the release.
+ - 3 days passed between releases.
+ - 1 commit was understood as [conventional](https://www.conventionalcommits.org).
+ - 0 issues like '(#ID)' were seen in commit messages
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **Uncategorized**
+ - Release gix-object v0.26.4, gix-diff v0.26.3, gix v0.37.2, gix-commitgraph v0.13.1, gitoxide-core v0.25.0, gitoxide v0.23.0 ([`9982949`](https://github.com/Byron/gitoxide/commit/9982949cab401501d5ce3cba4e2ba900bc249c53))
+ - Fix new diff tests on windows ([`b1ec1b7`](https://github.com/Byron/gitoxide/commit/b1ec1b776696b4b1d73e3dd26cbaf33260367855))
+ - Prepare changelog for release ([`13a1ec1`](https://github.com/Byron/gitoxide/commit/13a1ec1803d677c2e94f3ea0461118c2426f8071))
+ - Merge branch 'rename-tracking' ([`550144a`](https://github.com/Byron/gitoxide/commit/550144a5fd37d501d86f4b1c4db2948d951d1c93))
+ - Reproduce a diff issue and fix it ([`1d3d22d`](https://github.com/Byron/gitoxide/commit/1d3d22d45e70222c12fcf5a82063ceb9321a0129))
+</details>
+
+## 0.37.1 (2023-02-21)
+
+A maintenance release to restore MSRV (1.64) support.
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 3 commits contributed to the release.
+ - 0 commits were understood as [conventional](https://www.conventionalcommits.org).
+ - 0 issues like '(#ID)' were seen in commit messages
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **Uncategorized**
+ - Release gix-config v0.16.3, gix v0.37.1 ([`a3c283f`](https://github.com/Byron/gitoxide/commit/a3c283ff0e3f21cedb3ba7cd464fdfa0f5133af0))
+ - Prepare changelogs prior to release ([`362d659`](https://github.com/Byron/gitoxide/commit/362d659f946ca1ff2cbf915766113a34a9df97b3))
+ - Restore msrv compatibility by removing sole `if let ... else` ([`9160659`](https://github.com/Byron/gitoxide/commit/91606597b714a6e9b3a2c071bdb08baeacd6056b))
+</details>
+
+## 0.37.0 (2023-02-20)
+
+### Bug Fixes
+
+ - <csr-id-d3b974000133caa0ea107cb4724b950eda91d69b/> `Repository::object_cache_size()` now unsets the cache if `Some(0)` is passed.
+ Previously it would fail.
+
+### New Features (BREAKING)
+
+ - <csr-id-ed87f4c7c2799625bc6c7109368687908f0fb6f0/> `object::tree::diff::Platform::track_rewrites(...)`
+ The invocation of `object::tree::diff::Platform::track_rewrites(Rewrites { percentage: None, ..Default::default() })`
+ is now able to explicitly configure perfect rename tracking without percentage of equivalence.
+
+ By setting `percentage = Some(<fraction>)` one can set how similar both files should be to be considered related.
+
+ The same can be configured for copy-tracking, which also includes something like `--find-copies-harder`.
+
+ Note that by default, renames are considered if a file looks 50% similar, and copies tracking is
+ using the same convention.
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 4 commits contributed to the release.
+ - 2 commits were understood as [conventional](https://www.conventionalcommits.org).
+ - 0 issues like '(#ID)' were seen in commit messages
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **Uncategorized**
+ - Release gix-object v0.26.3, gix-diff v0.26.2, gix-traverse v0.22.2, gix v0.37.0, safety bump 3 crates ([`8b3e42f`](https://github.com/Byron/gitoxide/commit/8b3e42f69fe97fe5083eb845c323f10d7ac087b2))
+ - `Repository::object_cache_size()` now unsets the cache if `Some(0)` is passed. ([`d3b9740`](https://github.com/Byron/gitoxide/commit/d3b974000133caa0ea107cb4724b950eda91d69b))
+ - Merge branch 'rename-tracking' ([`35415c5`](https://github.com/Byron/gitoxide/commit/35415c5061bf5ea90a04db80d06ac3622d0b0f1a))
+ - `object::tree::diff::Platform::track_rewrites(...)` ([`ed87f4c`](https://github.com/Byron/gitoxide/commit/ed87f4c7c2799625bc6c7109368687908f0fb6f0))
+</details>
+
+## 0.36.1 (2023-02-20)
+
+### Bug Fixes
+
+ - <csr-id-135d317065aae87af302beb6c26bb6ca8e30b6aa/> compatibility with `bstr` v1.3, use `*.as_bytes()` instead of `.as_ref()`.
+ `as_ref()` relies on a known target type which isn't always present. However, once
+ there is only one implementation, that's no problem, but when that changes compilation
+ fails due to ambiguity.
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 5 commits contributed to the release over the course of 2 calendar days.
+ - 3 days passed between releases.
+ - 1 commit was understood as [conventional](https://www.conventionalcommits.org).
+ - 0 issues like '(#ID)' were seen in commit messages
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **Uncategorized**
+ - Release gix v0.36.1 ([`fac6bce`](https://github.com/Byron/gitoxide/commit/fac6bce2f9942d7f333f66a92374d5400a00b0a5))
+ - Release gix-date v0.4.3, gix-hash v0.10.3, gix-features v0.26.5, gix-actor v0.17.2, gix-glob v0.5.5, gix-path v0.7.2, gix-quote v0.4.2, gix-attributes v0.8.3, gix-validate v0.7.3, gix-object v0.26.2, gix-ref v0.24.1, gix-config v0.16.2, gix-command v0.2.4, gix-url v0.13.3, gix-credentials v0.9.2, gix-discover v0.13.1, gix-index v0.12.4, gix-mailmap v0.9.3, gix-pack v0.30.3, gix-packetline v0.14.3, gix-transport v0.25.6, gix-protocol v0.26.4, gix-revision v0.10.4, gix-refspec v0.7.3, gix-worktree v0.12.3, gix v0.36.1 ([`9604783`](https://github.com/Byron/gitoxide/commit/96047839a20a657a559376b0b14c65aeab96acbd))
+ - Compatibility with `bstr` v1.3, use `*.as_bytes()` instead of `.as_ref()`. ([`135d317`](https://github.com/Byron/gitoxide/commit/135d317065aae87af302beb6c26bb6ca8e30b6aa))
+ - Release gix-glob v0.5.4 ([`c56d336`](https://github.com/Byron/gitoxide/commit/c56d3365fde21120cf6101cf34f8b5669804977c))
+ - Release gix-transport v0.25.5 ([`f872ba8`](https://github.com/Byron/gitoxide/commit/f872ba8271a5d632acc071e7a857ef19f7cf5610))
+</details>
+
+## 0.36.0 (2023-02-17)
+
+### New Features
+
+ - <csr-id-4f49992fae2bc60b22644e86808d61afe557f192/> cloning repositories doesn't require a committer anymore.
+ This is similar to what git does and probably a decent convenience to have.
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 45 commits contributed to the release over the course of 3 calendar days.
+ - 4 days passed between releases.
+ - 1 commit was understood as [conventional](https://www.conventionalcommits.org).
+ - 1 unique issue was worked on: [#737](https://github.com/Byron/gitoxide/issues/737)
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **[#737](https://github.com/Byron/gitoxide/issues/737)**
+ - Cloning repositories doesn't require a committer anymore. ([`4f49992`](https://github.com/Byron/gitoxide/commit/4f49992fae2bc60b22644e86808d61afe557f192))
+ * **Uncategorized**
+ - Release gix-transport v0.25.4, gix-protocol v0.26.3, gix-revision v0.10.3, gix-refspec v0.7.2, gix-worktree v0.12.2, gix v0.36.0 ([`7fc00f8`](https://github.com/Byron/gitoxide/commit/7fc00f87d74aedf631ce4032be1cdfe1804c7e7d))
+ - Release gix-pack v0.30.2, gix-odb v0.40.2, gix-packetline v0.14.2, gix-transport v0.25.4, gix-protocol v0.26.3, gix-revision v0.10.3, gix-refspec v0.7.2, gix-worktree v0.12.2, gix v0.36.0 ([`59e9fac`](https://github.com/Byron/gitoxide/commit/59e9fac67d1b353e124300435b55f6b5468d7deb))
+ - Release gix-index v0.12.3, gix-mailmap v0.9.2, gix-chunk v0.4.1, gix-pack v0.30.2, gix-odb v0.40.2, gix-packetline v0.14.2, gix-transport v0.25.4, gix-protocol v0.26.3, gix-revision v0.10.3, gix-refspec v0.7.2, gix-worktree v0.12.2, gix v0.36.0 ([`48f5bd2`](https://github.com/Byron/gitoxide/commit/48f5bd2014fa3dda6fbd60d091065c5537f69453))
+ - Release gix-credentials v0.9.1, gix-diff v0.26.1, gix-discover v0.13.0, gix-hashtable v0.1.1, gix-bitmap v0.2.1, gix-traverse v0.22.1, gix-index v0.12.3, gix-mailmap v0.9.2, gix-chunk v0.4.1, gix-pack v0.30.2, gix-odb v0.40.2, gix-packetline v0.14.2, gix-transport v0.25.4, gix-protocol v0.26.3, gix-revision v0.10.3, gix-refspec v0.7.2, gix-worktree v0.12.2, gix v0.36.0 ([`a5869e0`](https://github.com/Byron/gitoxide/commit/a5869e0b223406820bca836e3e3a7fae2bfd9b04))
+ - Release gix-config v0.16.1, gix-command v0.2.3, gix-prompt v0.3.2, gix-url v0.13.2, gix-credentials v0.9.1, gix-diff v0.26.1, gix-discover v0.13.0, gix-hashtable v0.1.1, gix-bitmap v0.2.1, gix-traverse v0.22.1, gix-index v0.12.3, gix-mailmap v0.9.2, gix-chunk v0.4.1, gix-pack v0.30.2, gix-odb v0.40.2, gix-packetline v0.14.2, gix-transport v0.25.4, gix-protocol v0.26.3, gix-revision v0.10.3, gix-refspec v0.7.2, gix-worktree v0.12.2, gix v0.36.0 ([`41d57b9`](https://github.com/Byron/gitoxide/commit/41d57b98964094fc1528adb09f69ca824229bf25))
+ - Release gix-attributes v0.8.2, gix-config-value v0.10.1, gix-tempfile v3.0.2, gix-lock v3.0.2, gix-validate v0.7.2, gix-object v0.26.1, gix-ref v0.24.0, gix-sec v0.6.2, gix-config v0.16.1, gix-command v0.2.3, gix-prompt v0.3.2, gix-url v0.13.2, gix-credentials v0.9.1, gix-diff v0.26.1, gix-discover v0.13.0, gix-hashtable v0.1.1, gix-bitmap v0.2.1, gix-traverse v0.22.1, gix-index v0.12.3, gix-mailmap v0.9.2, gix-chunk v0.4.1, gix-pack v0.30.2, gix-odb v0.40.2, gix-packetline v0.14.2, gix-transport v0.25.4, gix-protocol v0.26.3, gix-revision v0.10.3, gix-refspec v0.7.2, gix-worktree v0.12.2, gix v0.36.0 ([`e313112`](https://github.com/Byron/gitoxide/commit/e31311257bd138b52042dea5fc40c3abab7f269b))
+ - Release gix-features v0.26.4, gix-actor v0.17.1, gix-glob v0.5.3, gix-path v0.7.1, gix-quote v0.4.1, gix-attributes v0.8.2, gix-config-value v0.10.1, gix-tempfile v3.0.2, gix-lock v3.0.2, gix-validate v0.7.2, gix-object v0.26.1, gix-ref v0.24.0, gix-sec v0.6.2, gix-config v0.16.1, gix-command v0.2.3, gix-prompt v0.3.2, gix-url v0.13.2, gix-credentials v0.9.1, gix-diff v0.26.1, gix-discover v0.13.0, gix-hashtable v0.1.1, gix-bitmap v0.2.1, gix-traverse v0.22.1, gix-index v0.12.3, gix-mailmap v0.9.2, gix-chunk v0.4.1, gix-pack v0.30.2, gix-odb v0.40.2, gix-packetline v0.14.2, gix-transport v0.25.4, gix-protocol v0.26.3, gix-revision v0.10.3, gix-refspec v0.7.2, gix-worktree v0.12.2, gix v0.36.0 ([`6efd0d3`](https://github.com/Byron/gitoxide/commit/6efd0d31fbeca31ab7319aa2ac97bb31dc4ce055))
+ - Release gix-date v0.4.2, gix-hash v0.10.2, gix-features v0.26.4, gix-actor v0.17.1, gix-glob v0.5.3, gix-path v0.7.1, gix-quote v0.4.1, gix-attributes v0.8.2, gix-config-value v0.10.1, gix-tempfile v3.0.2, gix-lock v3.0.2, gix-validate v0.7.2, gix-object v0.26.1, gix-ref v0.24.0, gix-sec v0.6.2, gix-config v0.16.1, gix-command v0.2.3, gix-prompt v0.3.2, gix-url v0.13.2, gix-credentials v0.9.1, gix-diff v0.26.1, gix-discover v0.13.0, gix-hashtable v0.1.1, gix-bitmap v0.2.1, gix-traverse v0.22.1, gix-index v0.12.3, gix-mailmap v0.9.2, gix-chunk v0.4.1, gix-pack v0.30.2, gix-odb v0.40.2, gix-packetline v0.14.2, gix-transport v0.25.4, gix-protocol v0.26.3, gix-revision v0.10.3, gix-refspec v0.7.2, gix-worktree v0.12.2, gix v0.36.0 ([`6ccc88a`](https://github.com/Byron/gitoxide/commit/6ccc88a8e4a56973b1a358cf72dc012ee3c75d56))
+ - Merge branch 'rename-crates' into inform-about-gix-rename ([`c9275b9`](https://github.com/Byron/gitoxide/commit/c9275b99ea43949306d93775d9d78c98fb86cfb1))
+ - Release git-date v0.4.3, git-hash v0.10.3, git-features v0.26.5, git-actor v0.17.2, git-glob v0.5.4, git-path v0.7.2, git-quote v0.4.2, git-attributes v0.8.3, git-bitmap v0.2.2, git-chunk v0.4.2, git-command v0.2.4, git-commitgraph v0.13.1, git-config-value v0.10.2, git-tempfile v3.0.3, git-lock v3.0.3, git-validate v0.7.3, git-object v0.26.2, git-ref v0.24.1, git-sec v0.6.3, git-config v0.16.2, git-prompt v0.3.3, git-url v0.13.3, git-credentials v0.9.2, git-diff v0.26.2, git-discover v0.13.1, git-fetchhead v0.1.0, git-filter v0.1.0, git-hashtable v0.1.2, git-traverse v0.22.2, git-index v0.12.4, git-lfs v0.1.0, git-mailmap v0.9.3, git-note v0.1.0, git-pack v0.31.0, git-odb v0.41.0, git-packetline v0.14.3, git-pathspec v0.1.0, git-transport v0.25.5, git-protocol v0.26.4, git-rebase v0.1.0, git-revision v0.10.4, git-refspec v0.7.3, git-sequencer v0.1.0, git-submodule v0.1.0, git-tix v0.1.0, git-tui v0.1.0, git-worktree v0.12.3, safety bump 2 crates ([`90035a3`](https://github.com/Byron/gitoxide/commit/90035a332d0ba67584558db3605500fbcb424ddd))
+ - Rename `git-testtools` to `gix-testtools` ([`b65c33d`](https://github.com/Byron/gitoxide/commit/b65c33d256cfed65d11adeff41132e3e58754089))
+ - Adjust to renaming of `git-pack` to `gix-pack` ([`1ee81ad`](https://github.com/Byron/gitoxide/commit/1ee81ad310285ee4aa118118a2be3810dbace574))
+ - Adjust to renaming of `git-odb` to `gix-odb` ([`476e2ad`](https://github.com/Byron/gitoxide/commit/476e2ad1a64e9e3f0d7c8651d5bcbee36cd78241))
+ - Adjust to renaming of `git-index` to `gix-index` ([`86db5e0`](https://github.com/Byron/gitoxide/commit/86db5e09fc58ce66b252dc13b8d7e2c48e4d5062))
+ - Adjust to renaming of `git-diff` to `gix-diff` ([`49a163e`](https://github.com/Byron/gitoxide/commit/49a163ec8b18f0e5fcd05a315de16d5d8be7650e))
+ - Adjust to renaming of `git-mailmap` to `gix-mailmap` ([`2e28c56`](https://github.com/Byron/gitoxide/commit/2e28c56bb9f70de6f97439818118d3a25859698f))
+ - Adjust to renaming of `git-discover` to `gix-discover` ([`53adfe1`](https://github.com/Byron/gitoxide/commit/53adfe1c34e9ea3b27067a97b5e7ac80b351c441))
+ - Adjust to renaming for `git-protocol` to `gix-protocol` ([`823795a`](https://github.com/Byron/gitoxide/commit/823795addea3810243cab7936cd8ec0137cbc224))
+ - Adjust to renaming of `git-refspec` to `gix-refspec` ([`c958802`](https://github.com/Byron/gitoxide/commit/c9588020561577736faa065e7e5b5bb486ca8fe1))
+ - Adjust to renaming of `git-revision` to `gix-revision` ([`ee0ee84`](https://github.com/Byron/gitoxide/commit/ee0ee84607c2ffe11ee75f27a31903db68afed02))
+ - Adjust to renaming of `git-transport` to `gix-transport` ([`b2ccf71`](https://github.com/Byron/gitoxide/commit/b2ccf716dc4425bb96651d4d58806a3cc2da219e))
+ - Adjust to renaming of `git-credentials` to `gix-credentials` ([`6b18abc`](https://github.com/Byron/gitoxide/commit/6b18abcf2856f02ab938d535a65e51ac282bf94a))
+ - Adjust to renaming of `git-prompt` to `gix-prompt` ([`6a4654e`](https://github.com/Byron/gitoxide/commit/6a4654e0d10ab773dd219cb4b731c0fc1471c36d))
+ - Adjust to renaming of `git-worktree` to `gix-worktree` ([`73a1282`](https://github.com/Byron/gitoxide/commit/73a12821b3d9b66ec1714d07dd27eb7a73e3a544))
+ - Adjust to renamining of `git-hashtable` to `gix-hashtable` ([`26a0c98`](https://github.com/Byron/gitoxide/commit/26a0c98d0a389b03e3dc7bfc758b37155e285244))
+ - Adjust to renamining of `git-worktree` to `gix-worktree` ([`108bb1a`](https://github.com/Byron/gitoxide/commit/108bb1a634f4828853fb590e9fc125f79441dd38))
+ - Adjust to renaming of `git-url` to `gix-url` ([`b50817a`](https://github.com/Byron/gitoxide/commit/b50817aadb143e19f61f64e19b19ec1107d980c6))
+ - Adjust to renaming of `git-date` to `gix-date` ([`9a79ff2`](https://github.com/Byron/gitoxide/commit/9a79ff2d5cc74c1efad9f41e21095ae498cce00b))
+ - Adjust to renamining of `git-attributes` to `gix-attributes` ([`4a8b3b8`](https://github.com/Byron/gitoxide/commit/4a8b3b812ac26f2a2aee8ce8ca81591273383c84))
+ - Adjust to renaming of `git-config` to `gix-config` ([`3a861c8`](https://github.com/Byron/gitoxide/commit/3a861c8f049f6502d3bcbdac752659aa1aeda46a))
+ - Adjust to renaming of `git-ref` to `gix-ref` ([`1f5f695`](https://github.com/Byron/gitoxide/commit/1f5f695407b034377d94b172465ff573562b3fc3))
+ - Adjust to renaming of `git-lock` to `gix-lock` ([`2028e78`](https://github.com/Byron/gitoxide/commit/2028e7884ae1821edeec81612f501e88e4722b17))
+ - Adjust to renaming of `git-tempfile` to `gix-tempfile` ([`b6cc3eb`](https://github.com/Byron/gitoxide/commit/b6cc3ebb5137084a6327af16a7d9364d8f092cc9))
+ - Adjust to renaming of `git-object` to `gix-object` ([`fc86a1e`](https://github.com/Byron/gitoxide/commit/fc86a1e710ad7bf076c25cc6f028ddcf1a5a4311))
+ - Adjust to renaming of `git-actor` to `gix-actor` ([`4dc9b44`](https://github.com/Byron/gitoxide/commit/4dc9b44dc52f2486ffa2040585c6897c1bf55df4))
+ - Adjust to renaming of `git-validate` to `gix-validate` ([`5e40ad0`](https://github.com/Byron/gitoxide/commit/5e40ad078af3d08cbc2ca81ce755c0ed8a065b4f))
+ - Adjust to renaming of `git-hash` to `gix-hash` ([`4a9d025`](https://github.com/Byron/gitoxide/commit/4a9d0257110c3efa61d08c8457c4545b200226d1))
+ - Adjust to renaming of `git-features` to `gix-features` ([`e2dd68a`](https://github.com/Byron/gitoxide/commit/e2dd68a417aad229e194ff20dbbfd77668096ec6))
+ - Adjust to renaming of `git-glob` to `gix-glob` ([`35b2a3a`](https://github.com/Byron/gitoxide/commit/35b2a3acbc8f2a03f151bc0a3863163844e0ca86))
+ - Adjust to renaming of `git-sec` to `gix-sec` ([`eabbb92`](https://github.com/Byron/gitoxide/commit/eabbb923bd5a32fc80fa80f96cfdc2ab7bb2ed17))
+ - Adapt to renaming of `git-path` to `gix-path` ([`d3bbcfc`](https://github.com/Byron/gitoxide/commit/d3bbcfccad80fc44ea8e7bf819f23adaca06ba2d))
+ - Fix `gix` changelog (find-replace issue) to indicate renaming from `git-repository` ([`f86b780`](https://github.com/Byron/gitoxide/commit/f86b7803e85839450ed2eeef57fe738da1e3ec87))
+ - Release git-features v0.26.4 ([`109f434`](https://github.com/Byron/gitoxide/commit/109f434e66559a791d541f86876ded8df10766f1))
+ - Release git-features v0.26.3 ([`1ecfb7f`](https://github.com/Byron/gitoxide/commit/1ecfb7f8bfb24432690d8f31367488f2e59a642a))
+</details>
+
+## 0.35.0 (2023-02-13)
+
+This is the last release under this name and merely a notice to inform that `git-repository` from now on is `gix`.
+
+Furthermore, all `git-*` crates belonging to the `gitoxide` project will be renamed to `gix-*`.
+
+### Changed (BREAKING)
+
+ - <csr-id-1408482fd21be7487b46753bb54a018c7a164f34/> a note of the pending rename of `git-repository` to `gix`
+
+### New Features
+
+ - <csr-id-069eb6c3f0844b43873ae1bd536e2bca53ff5c8a/> tree diffs with simple rename and copy tracking in cases where there is no additional modification.
+ As the fastest way of rename tracking, we now offer support for tracking renames and copies,
+ that is a file was renamed or copied without modification.
+ - <csr-id-f6ed34aa254d34e596ad027c33f78404a630ff76/> Add `diff.renames` and `diff.renameLimit` keys to config tree.
+ In preparation for the implementation.
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 5 commits contributed to the release over the course of 1 calendar day.
+ - 2 days passed between releases.
+ - 0 commits were understood as [conventional](https://www.conventionalcommits.org).
+ - 0 issues like '(#ID)' were seen in commit messages
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **Uncategorized**
+ - Release git-config v0.16.1, git-revision v0.10.3, gix v0.35.0 ([`74390ba`](https://github.com/Byron/gitoxide/commit/74390baf9d177a1abe3c7c35f1d9bc67faba1e97))
+ - Show more debugging information if unreachable code is reached. ([`66f5341`](https://github.com/Byron/gitoxide/commit/66f53414efef6cfd6d03f830520964c9bdd23634))
+ - Prepare changelogs prior to release ([`446f866`](https://github.com/Byron/gitoxide/commit/446f866d146e255ab8302b89f87bf28f2c5f3733))
+ - Merge branch 'rename-crates' ([`6461c3d`](https://github.com/Byron/gitoxide/commit/6461c3da4d6daee857606d94294c3f87fc36965a))
+ - Rename `git-repository` to `gix` ([`7bed2a9`](https://github.com/Byron/gitoxide/commit/7bed2a96604397fa990f427b1a970ddeb6f09f1c))
+</details>
+
+## 0.34.0 (2023-02-09)
+
+<csr-id-a01f5d72346c36fdcb77af095273da6f4ab86e21/>
+
+### Documentation
+
+ - <csr-id-39ed9eda62b7718d5109135e5ad406fb1fe2978c/> fix typos
+
+### New Features
+
+ - <csr-id-297d59e8396fbe2e5a2224a8524fa0778e786773/> add `env::collate::fetch::Error` - a combined error type with its own API.
+ This error API allows to look at all the steps it takes to perform an operation and
+ gather insights from it which require understanding a lot about the semantics of
+ the contained errors.
+ - <csr-id-d792ea543246632bf1ca8d0e1d239bbe7f07e219/> use enumerations to advertise progress ids publicly.
+ Previously these were an implementation detail which also means they
+ couldn't be relied upon.
+
+ Thanks to an intermediate enumeration, they become part of the public API
+ and their actual value is not exposed.
+ - <csr-id-5dc408f726d6f0f480438348eb5d713776329710/> read shared indices by dissolving them into the current one.
+ This allows the 'link' extension to be processed correctly, even though it
+ won't be maintained. When written back, the 'link' extension will be removed
+ automatically.
+
+### Bug Fixes
+
+ - <csr-id-5d3a3a245968d5ad8c29ea11a99b4896d1b41191/> don't panic, but error when parsing the rev-specs `^`, `..`, `...`.
+
+### Chore (BREAKING)
+
+ - <csr-id-a01f5d72346c36fdcb77af095273da6f4ab86e21/> adjust to changes in `gitoxide` for clap upgrade to 4.1
+
+### New Features (BREAKING)
+
+ - <csr-id-2faad43d11283ff06381c51d2466307cfb8736ff/> transfer knowledge about configuration and its usage into the type system.
+ That way it's possible to use configuration overrides, even though ultimately being strings,
+ in a type-safe manner and leverage code-completion while at it.
+
+ In that process, we also change `Repository::(committer|Author)()` to return
+ `Option<Result<...>>` to be able to account for date parse errors.
+
+## 0.33.0 (2023-01-10)
+
+<csr-id-dd7f3bf19cce0d214924fa86aeb4c5823f5bcc02/>
+
+### Chore (BREAKING)
+
+ - <csr-id-dd7f3bf19cce0d214924fa86aeb4c5823f5bcc02/> upgrade MSRV to v1.64 (possible due to `windows` upgrade)
+
+## 0.32.0 (2023-01-09)
+
+<csr-id-80dcb406c5f588122531da115398094de3c3af79/>
+
+### Bug Fixes
+
+ - <csr-id-a05b1c4d82bc6c7758989a3bbe326ea610903820/> default author and committer time
+ When needing to fallback to a default author or committer signature, the
+ time from GIT_AUTHOR_DATE should only be used for the author and
+ GIT_COMMITTER_DATE should only be used for the committer and not
+ intermixed. This change enforces that constraint.
+ - <csr-id-ec7bf71b60f8c1e7529d610557c0305d624c1253/> signature name and email resolution
+ The name and email for the author and/or committer may come from different
+ config files. For example, user.name may be set in the global config and
+ user.email may come from the repository local config.
+
+ This case was broken due to Personas.from_config_and_env() only looking in
+ the last config section containing, for example, a "user" section. Thus if
+ the user.name and user.email are split across multiple sections (i.e.
+ originating from separate config files), the fallback name and email
+ ("gitoxide" and "gitoxide@localhost") would be used.
+
+ The solution is to use gix_config::File::string() to lookup the name and
+ email separately. The string() method correctly resolves the value by
+ looking through all sections from all files in the correct order.
+
+### Other
+
+ - <csr-id-80dcb406c5f588122531da115398094de3c3af79/> name and email from different config sections
+ The user.name, user.email, author.name, author.email, committer.name, and
+ committer.email configuration may come from different sections from
+ different config files. This new test exercises a couple of scenarios that
+ are currently broken.
+
+### Reverted (BREAKING)
+
+ - <csr-id-87abb51596bd0a5a6b552a5de98a920d6c797e3c/> `committer_or_default()`, `author_or_default()` and `user_default()`.
+ This means that all methods that previously succeeded by adding a default
+ will now fail.
+
+ This is preferable over 'doing something' and also admits that gits
+ guesswork that tries to find user information by querying the system
+ is nothing we want to repeat.
+
+## 0.31.0 (2022-12-30)
+
+<csr-id-9fabfc50007603f9c1f7e70b5bb79a39726b12af/>
+<csr-id-91720798889ee7eb26da03a9e732caedda83b3e3/>
+
+### New Features
+
+ - <csr-id-d48b9a7ae2d51676c7549377bcb0b9d3baa83681/> fetching `ssh` urls can ask for a different username.
+ If authentication fails, the user will be queried for a different username
+ to try authentication via ssh mechanisms again.
+ - <csr-id-61d89f586a0ad913fc2f502520282520a5e1fd15/> collect ssh-specific options to control how the ssh program is invoked.
+ These are passed through when creating the ssh transport.
+
+### Other
+
+ - <csr-id-9fabfc50007603f9c1f7e70b5bb79a39726b12af/> explain how it's possible to deal with the first commit when comparing trees
+ The reason the other tree isn't an option is that it's a special case that can more easily be handled
+ with an `.unwrap_or_else(|| repo.empty_tree())` (or similar) for those who need it.
+
+ Making the empty tree explicit also helps to deal with diffs from the empty tree (which can't be `Option<Tree>`)
+ to the first tree of the first commit.
+
+### Chore (BREAKING)
+
+ - <csr-id-91720798889ee7eb26da03a9e732caedda83b3e3/> upgrade to prodash v23
+
+## 0.30.2 (2022-12-26)
+
+<csr-id-114f184855b6177aa1f0dbf6e6589f23deb5ffe6/>
+
+### New Features
+
+ - <csr-id-38ae61a805bd8cca5df8d1c1dcf3a8a0f9c85f5a/> make more HTTP options available
+ - `http.schannelCheckRevoke`
+
+### Other
+
+ - <csr-id-114f184855b6177aa1f0dbf6e6589f23deb5ffe6/> provide a repository clone example
+
+## 0.30.1 (2022-12-22)
+
+### New Features
+
+ - <csr-id-ca84c87734804cbfc65e311b89ff6ccfc236149c/> `open::Options::open_path_as_is()` allows to avoid 'smart opening' to try the path verbatim.
+ The path to git repositories is well-known as they either end in `.git` or `.../.git`.
+ If this is not the case, by default we append `/.git` to the path.
+
+ With this new option enabled, no path transformations apply to open the given path as is,
+ which is preferable if you know it's a non-standard git repository folder name.
+
+## 0.30.0 (2022-12-19)
+
+<csr-id-fceee748c114b2d0760074e911e533cd020f6996/>
+
+### Changed
+
+ - <csr-id-a4ac9cf3e667a3059e33aac8188150529578622d/> represent `GIT_(COMMITTER|AUTHOR)_(NAME|EMAIL|DATE)` with git configuration.
+ That way it becomes more obvious where values are coming from.
+
+### New Features
+
+ - <csr-id-1683a848459cae2b9182b365e3e22b0e8ba73534/> expose `gix-features` crate at root under `features`.
+ That way application developers can use more of the utilities
+ that power most of the `gitoxide` plumbing crates.
+ - <csr-id-90ef6fc36b440cc4baf3fde4a30060f1b4a0c8cf/> `Remote` knows about its `tagOpt` configuration.
+ That way it's clear if it should or shouldn't fetch included/reachable
+ tags automatically.
+
+ The default setting for this is to include tags, similar to `git`.
+
+ The `fetch_tags()` accessor allows to query this information, and the
+ `with_fetch_tags()` builder method allows to set the value comfortably
+ right after creating the `Remote` instance.
+
+ The `tagOpt` key will also be written as part of the remote's git
+ configuration.
+
+ Clone operations can set the `Tags` setting when configuring the
+ remote in a callback.
+
+ This also comes with a fix to assure that ref-updates aren't skipped
+ just because there was no pack to receive. That way, locally missing
+ refs or tags will automatically be put back.
+ - <csr-id-28e48083052216ddf1fd1f187cc22d506d3d9f86/> network related Error type support `is_spurious()` method.
+ That way the caller can determine more easily if it makes sense
+ to try again.
+ - <csr-id-457c2e081b1aa5dfaab3f663b9aba66c16369939/> Make `prodash::tree` available as `progress::tree`.
+ - <csr-id-d1b7ec605f8016c52c088477b6b0c5adf7ea0ab2/> read worktree specific configuration to override the one from the shared repository.
+ This is intensively used when space checkouts are created, along with
+ Cone mode. Thus it's the basis for properly interpreting sparse checkout
+ options which are set on a per-worktree basis.
+ - <csr-id-fc64693d5af0fda402c560d10d15652c24d14219/> add `permissions::Environment::http_transport`.
+ That way it's possible to deny using environment variables that affect
+ the HTTP transport, like setting the proxy.
+ - <csr-id-0ce29a965cf16273cf74bd22e40f464e322e2f62/> `open::Options::modify()` as general pattern to allow builder methods usage in `&mut self`.
+ That way it's easier to configure both the `full` and the `partial` trust instances
+ of discovery options.
+ - <csr-id-8482f90d0a2b61259cd51ca4f40ce322e388cb34/> Add `Repository::commit_as(committer, author, …)` convenience method.
+ That way it's, very much beyond convenience, possible to set the time
+ of a commit.
+
+ Many thanks to @epage for the suggestion.
+ - <csr-id-c8835c6edae784c9ffcb69a674c0a6545dbb2af3/> upgrade to `prodash 21.1` and add `Ids` to all progress instances.
+ That way callers can identify progress they are interested in, say, for
+ selective visualizations.
+
+### Bug Fixes
+
+ - <csr-id-d659bda2e1b0fcab529df7af6467f063ae5d0dd7/> provide a clearer error message when trying to open a git repository that isn't one.
+ - <csr-id-ff0332e815c228cc5cdfe58c3598ad261bb2879e/> http transports can now reuse a connection.
+ This makes connections more efficient generally and `cargo` relies
+ on that behaviour in their tests as well.
+ - <csr-id-9079b9d2e5f7cc133c6f2b2c2e64245b150c7d74/> allow to open a `Repository` from if 'config' file is missing.
+ In this case, treat it similar to having an empty repository configuration
+ file and assume defaults everywhere.
+ - <csr-id-40f7379b7a89f7fe6f916801384e9e65e5b85c57/> improve error verbosity when fetching and cloning
+ - <csr-id-b77fc86ab580dd81b08022996f07cc7925104e77/> `tree::diff::Platform::for_each_to_obtain_tree()` now properly surfaces user provided errors.
+ Previously it would squelch them unintentionally.
+
+ First discovered via https://github.com/Byron/crates-index-diff-rs/issues/35.
+ - <csr-id-5386eed6a13a32a850c59706b15d8988c67733ce/> when fetching from file://, don't upset windows by trying `d:/foo`, use `d:\\foo` instead.
+ - <csr-id-363ac7a74ec841505b5fc7cc1b8fae11c0a63ea9/> `config::CommitAutoRollback` now implements `DerefMut`.
+
+### Changed (BREAKING)
+
+ - <csr-id-3c84cebc5997356ff5f531c6cc9567bdd9b83eb5/> default features are set to `max-performance-safe` to assure compatibility.
+ Previously the `max-performance` setting might have caused issues during compilation
+ or issues at runtime if libraries like `git2` are used in the same binary, and the
+ new default feature settings maximizes compatibility so this won't happen.
+
+ For best performance, however, one will have to activate the `max-performance`
+ feature on top of that.
+ - <csr-id-5fe6aa3f3f2f81d84f0d96e874e68a8bf4de1db1/> environment variable permissions are per topic.
+ Now `Permissions` for environment variables are so that they
+ are by topic instead of by prefix, by default. That way
+ it's easier to allow or deny particular sets of related
+ environment variables.
+
+ The catch-all permissions by prefix are still present for all
+ other variables that aren't contained in one particular topic.
+ - <csr-id-49f39d6bb487c0254176a5082f2c7851b83952a1/> `open::ReplacementObjects` is removed in favor of two custom git-configuration flags.
+ Now it's possible to map the environment variables `GIT_REPLACE_REF_BASE` and `GIT_NO_REPLACE_OBJECTS`
+ to custom git configuration keys which can also be set, namely `gitoxide.odb.replaceObjectsRefBase`
+ and `gitoxide.odb.noReplaceObjects`.
+
+ Along with the possibility of disabling the usage of `GIT_` prefixed environment variables one
+ reaches the previous level of control without making object replacement a special case.
+
+### New Features (BREAKING)
+
+ - <csr-id-f8a2bfb93dadbc64393135e0a447f3d76628509c/> `interrupts::init_handler()` can now be undone.
+ This can be done by calling `deregister()` or `auto_deregister()` on the return value
+ of `interrupts::init_handler(…)`.
+
+ That way it's possible to temporarily do interrupt handling only while some methods
+ that require it are running.
+ - <csr-id-becbd8d896a1663f1607be4e86e632773e926f1f/> represent object cache configuration like `GITOXIDE_PACK_CACHE_MEMORY` in git-configuration.
+ That way there is a unified system for how to set values, which may be overridable by configuration
+ variables or not.
+
+ With this changes, the explicit application of environment variables for setting the cache
+ isn't required anymore as everything happens using git-configuration, and automatically,
+ while providing full control like before.
+ - <csr-id-f16e36168cc93768ba5d53c9848ff2e8432d06b1/> remove `SnapshotMut::apply_cli_overrides()` in favor of `open::Options::cli_overrides()`.
+ - <csr-id-84d594caf3f04f1ce337e455343278a6f4674957/> more type-safety for remote names by making clear they can be named after URLs.
+
+### Other (BREAKING)
+
+ - <csr-id-fceee748c114b2d0760074e911e533cd020f6996/> `Remote::with_refspec()` to `Remote::with_refspecs()` to allow adding more than one refspec as part of the builder.
+
+## 0.29.0 (2022-11-21)
+
+<csr-id-f302fc1bcd06fadccd126f4f5f9c0165afabedda/>
+
+### New Features
+
+<csr-id-ff9e1571b558475e727dc6ba11dab24ef15fb6f4/>
+
+ - <csr-id-3ddbd2de369b521fa3f21935f10fe9c248840893/> Make `reqwest` TLS backend configuration easy.
+ We provide the choice of `native-tls` or `rust-tls`. If none is
+ provided, the user can configure on their on similar to how it's done
+ in `gix`.
+
+ Please note that a choice now has to be made or HTTPS will not be
+ available, so use one of…
+
+ * blocking-http-transport-reqwest-rust-tls
+* blocking-http-transport-reqwest-native-tls
+
+### Bug Fixes
+
+ - <csr-id-c6a690219915b2b401d2d11f61db35b2931e5b3a/> `gix::Commit::describe()` chooses tag names (more) correctly.
+ Previously, if there were multiple choices for tags on the same commit,
+ `git describe` would disagree with `gitoxide` due to different
+ prioritization of names.
+
+ This has now been fixed.
+ - <csr-id-84ed89c3bf6692f18c4bb97173527de1bcba7ac6/> also sort entries lexicographically
+
+### Other
+
+ - <csr-id-f302fc1bcd06fadccd126f4f5f9c0165afabedda/> Set GIT_EDITOR in make_rebase_i_repo.sh
+ If the user has core.editor set in their global git config, then that value
+ takes precedence over the EDITOR environment variable. The GIT_EDITOR
+ environment variable, however, has higher precedence than core.editor. For
+ this test, using GIT_EDITOR ensures that the desired sed command line is
+ used.
+
+### New Features (BREAKING)
+
+ - <csr-id-bc2a399f2fbb69d23b0b05e8dfb95f3c64ff93b9/> rename `blocking-http-transport` feature to `blocking-http-transport-curl`; add `blocking-http-transport-reqwest`.
+ With the new and relatively immature second tier http backend we pave
+ the way to support builds without the use of open-ssl and probably many
+ other C libraries.
+
+ Note that it's early and not `http` configuration option is implemented
+ yet.
+ - <csr-id-3d8fa8fef9800b1576beab8a5bc39b821157a5ed/> upgrade edition to 2021 in most crates.
+ MSRV for this is 1.56, and we are now at 1.60 so should be compatible.
+ This isn't more than a patch release as it should break nobody
+ who is adhering to the MSRV, but let's be careful and mark it
+ breaking.
+
+ Note that `gix-features` and `gix-pack` are still on edition 2018
+ as they make use of a workaround to support (safe) mutable access
+ to non-overlapping entries in a slice which doesn't work anymore
+ in edition 2021.
+
+## 0.28.0 (2022-11-17)
+
+<csr-id-6beb6f263fd40884b440092f39034dd43d3a95de/>
+
+### New Features
+
+ - <csr-id-58e14884b1d025651f874d899cb2d627c4a2afbf/> `Id` implements `std::fmt::Display`
+ - <csr-id-25f7aabe38267b6b6c0547806028b2becb806416/> `Remote::repo()` to obtain the underlying repository.
+ For convenience.
+ - <csr-id-709a73229b7cde56ddffa099158661632c606468/> Support for user-customizable user agent strings.
+ Doable by setting the `gitoxide.userAgent` variable.
+ - <csr-id-e60d07997989993216c2bd93efeb6f1b48da0a87/> add `env::agent()` for obtaining the default client agent string.
+
+### Other
+
+ - <csr-id-6beb6f263fd40884b440092f39034dd43d3a95de/> try to apply maybe-async in a place where it's probably not possible.
+ The goal is to re-use the existing tests, but right now they only
+ compile in async mode as the `maybe-async` crates needs
+ a feature to be set. Doing so is hard(er) if it's not already used
+ in the main crate, which we do not and will do our best to avoid.
+
+### New Features (BREAKING)
+
+ - <csr-id-db9040f0bb3a16879c8da0252a77df80bd417387/> add `remote::Connection::with_transport_config()`, change the way `*::transport_mut()` is used.
+ Previously `transport_mut()` was supposed to be used for calling
+ `configure()`, but that doesn't work anymore as `configure()` can
+ only effectively be called once the initialization of the Connection
+ is complete, as it may depend on the Remote name AND the credential
+ provider for proxy auth credential acquisition.
+
+ Thus we allow callers to set the transport options they need in advance
+ for it to be used when needed.
+
+## 0.27.0 (2022-11-08)
+
+### Changed (BREAKING)
+
+ - <csr-id-c50868c7ed7309515b4f0a188213d332d57dd146/> Move `object::tree::diff::change::DiffPlatform` to `object::blob::diff::Platform`.
+ - <csr-id-4ee32713093c2e41a12d148c6030add1df6aa966/> new `DiffPlatform::counts()`, open `DiffPlatform` for use of `gix-diff::blob::*`.
+
+## 0.26.0 (2022-11-06)
+
+<csr-id-c6f92c15529ddff7539667b74bafa2348f3040e3/>
+
+### New Features
+
+ - <csr-id-b1edb9e3537df86669714f03666f4a88e0ac3709/> diff algorithm is controlled by git configuration `diff.algorithm`
+ - <csr-id-072f5bc9c91c4c09bd6a73f9d7ac672805cae533/> Query of `core.logAllRefUpdates` is now fallible.
+ This is the same behaviour as shown by `git`, which requires valid
+ values or aborts.
+ - <csr-id-2eaf69e5f8f8da10e5af85cb9f0c39577ad1707f/> automatically handle `.keep` files after writing a pack bundle to disk.
+ The logic implemented here tries to do the right thing, that is when
+ we have reason to believe that the objects in the pack are linked up
+ with a ref, we delete the keep file automatically.
+
+ However, if there was no local ref edit as the ref specs didn't contain
+ local destinations, or if the pack was empty, then keep the .keep file
+ and leave it to the caller to handle.
+ - <csr-id-8b9fbd4e9ed7be37976c7203cd9a89c6116a6d3d/> Use `core.askpass` when building the credential helper.
+ Previously it would only consider the environment variable, which can
+ still override the provided git-configuration at core.askpass .
+ - <csr-id-a9d14492322785a14f4ecb5b0d3dbdc87e56f8c5/> `remote::fetch::Prepare::handshake_outcome()` to obtain server information right after listing refs.
+ - <csr-id-0b5c53ec43bdb58b2b7cf46e453ddf858770a95a/> `open::Options::config_overrides()` for early configuration; support for `init.defaultBranch`.
+
+### Bug Fixes
+
+ - <csr-id-f869b224170b0c49a0e4d89e88bfbf5caedaa725/> don't allow non-bare repositories to be initialized into non-empty directories.
+ - <csr-id-91baefad02a0d52c745106359da3693d06aace46/> `init_bare()` now creates the destination directory if it doesn't exist.
+ - <csr-id-5c11b84f4e74e3eefdd0f5804976ebfc505e0f2f/> build correct path for `$HOME/.config/…` files.
+ The special per-user `ignore` and `attributes` files can also be
+ defaulted if some environment variables are set and may be accessed.
+
+ Previously the default for `$HOME` was incorrect, as it was missing the
+ intermediate `.config/` directory. This is now present to build paths
+ exactly like git.
+ - <csr-id-275e80f3d602b63ef91efe31a92b4aafb2eeca44/> ref-map filtering now uses correct prefixes.
+ Previously specs could get filtered out server-side as a matching prefix
+ was entirely missing.
+
+### Changed (BREAKING)
+
+ - <csr-id-449ff066d2b5dd423c639618193dd9e54d03c1f8/> `Repository::branch_remote_name()` returns `reference::remote::Name`.
+ That way it's made clear the remote can also be a URL, while rejecting
+ illformed UTF8. The latter isn't valid for remote names anyway as these
+ only support a very limited character set.
+
+ Note that this error currently is degenerated, making it appear if the
+ remote name doesn't exists if illformed UTF-8 is found in what appears
+ to be a symbolic ref.
+ - <csr-id-71f15fc46fbaea455cf84a2b4cfe3e680047d790/> be specific about the kind of `diff::Error`, moving it to `diff::for_each::Error`.
+
+### New Features (BREAKING)
+
+ - <csr-id-7413a284eb7754e63ba45d0f526347b9f79b557d/> `Tree::lookup_entry*()` returns attached `Entry` type.
+ That way chaining gets even easier.
+
+### Bug Fixes (BREAKING)
+
+ - <csr-id-2bece79285e244a7029f9393dafc990e39420e2d/> `create::into(…)` takes `create::Kind` to determine if it's bare or not.
+ First of all, `bare` is not an option anymore, but a parameter as
+ it can't be defaulted.
+ Other public signatures change as well to accommodate for it.
+
+### Other (BREAKING)
+
+ - <csr-id-c6f92c15529ddff7539667b74bafa2348f3040e3/> `DiffPlatform::text()` to `*::lines()`.
+ This is more specific as one could also do character changes in a single
+ line, and it adapts the signature to the new `imra-diff` powered
+ mechanism, for a 2x speed boost.
+
+## 0.25.0 (2022-10-10)
+
+<csr-id-5bef0a00e8d01110c054a517f6d9696f981a7efc/>
+
+### New Features
+
+ - <csr-id-22d3b37ea6239170a478b859361a7d1d7ba01a9a/> `Url::try_from(path: &std::path::Path)` for more convenient instantiation.
+ - <csr-id-31a7089f2583832727e2175ada6fb5c30c3beebe/> make some private methods public to give callers more flexibility.
+ This allows to implement the fetch-negotiation part oneself and break
+ free from constraints of the delegate.
+ - <csr-id-4367994a8a7476eb44e1309e833e345fdb78f246/> add `config::SnapshotMut::commit()` to make clear it's transactional.
+ - <csr-id-d2bea003230078ffb4e6cd80d1b01c3995435a34/> add `config::SnapshotMut::forget()` to forget all changes before applying them.
+ The documentation was update to make clear when the changes are applied.
+ - <csr-id-4b1e3b3d91c51da3dbea9191e60f959a1266cf47/> add `Repository::find_default_remote()` which works on detached heads as well.
+ - <csr-id-25f06400c49ddd1688fd76f9285542b121b223b4/> `Remote::rem_map()` now specifies ref-prefixes to the remote.
+ This can greatly reduce the amount of refs sent.
+
+### Other
+
+ - <csr-id-5bef0a00e8d01110c054a517f6d9696f981a7efc/> try to make the transport configurable after being boxed, but…
+ …that would force it to be 'static, which is something we explicitly
+ cannot have. We need references to be contained within, if I remember
+ correctly.
+
+### Changed (BREAKING)
+
+ - <csr-id-e88de0f948325773db1925b07aa878e1dbb76bad/> All methods editing references don't take the author as parameter anymore.
+ Instead, these are taken from the git configuration and can be
+ configured on the fly with temporarily altered configuration.
+ - <csr-id-3a0fb1b45c757add49677450836c0aaf6179a2b5/> remote `lock_mode` from all methods dealing with reference edits.
+ It is now read from `core.filesRefLockTimeout` accordingly.
+
+### New Features (BREAKING)
+
+ - <csr-id-3b29fc18672c0176684c797a0f16f85d09369bf8/> make jwalk fully optional
+ - <csr-id-78ad3df64f2c016ba17b158bd9ab1d2341aab399/> add `fetch::Transport::configure` to generically configure any transport.
+
+## 0.24.0 (2022-09-20)
+
+<csr-id-f5959edc1477573278afcfe23e9e52ddaacb37db/>
+<csr-id-79c22557ce0aea1ee8f3a58192c2c76087ccd3d8/>
+
+### New Features
+
+ - <csr-id-0871a96b9cc84d7a496d39393e081999c0a3fe17/> `Object::peel_to_tree()` as convenience method.
+ It's very common to try to work with trees, so let's make that easier.
+ - <csr-id-1027be960852618915014f9ba6e6632bd4999b0e/> `interrupt::Iter` now allows accessing the inner iterator without consumption.
+ This is useful if these provide additional out-of-band information.
+ - <csr-id-8c2e5c60f9f5f8d0859ecd84c17af20e88812512/> Once a change is obtained, it's easy to obtain changes line by line.
+ - <csr-id-7e96d1841989b37133cddf334724a2d6df557e69/> obtain a refmap after listing refs via `remote::Connection::list_refs_to_map()`.
+ With it it's possible to establish a relationship between what's about
+ to be fetched to local tracking branches as established by refspecs for
+ fetching.
+ - <csr-id-d51e7c901fe5ed60d5dd56006c5faedb71cad537/> Add `permissions::Config::git_binary` field
+ When true, default false, inject the git installation configuration file
+ if present at the cost of one `git config` invocation.
+
+ Note that we rely on the underlying `gix-config` crate to not load
+ duplicate files.
+
+ We also currently lie about the scope which is actually unclear - have
+ seen 'unknown' or normal scopes like `system`.
+ - <csr-id-1c13f1125664fbcc276a1ca440d168d07d0bf493/> add `prompt` to top level forwarding #450)
+
+### Bug Fixes
+
+ - <csr-id-ae3866065c9c3c6d01709f8dde1cea1ae1345779/> rev-spec parsing can now handle the empty tree as full hex hash.
+ Even though the empty-tree object can be found when searched via
+ `Repository::find_object()`, previously it was not locatable when
+ used during rev-spec parsing.
+ - <csr-id-74ede2031d1beedf11f1cdf006fff71e597a2cb5/> `Reference::remote()` can produce remotes for URLs
+
+### Refactor
+
+ - <csr-id-f5959edc1477573278afcfe23e9e52ddaacb37db/> use specific error type for `rev_parse_single()`
+
+### Changed (BREAKING)
+
+ - <csr-id-99905bacace8aed42b16d43f0f04cae996cb971c/> upgrade `bstr` to `1.0.1`
+
+### New Features (BREAKING)
+
+ - <csr-id-2992b1ba4e7bbeab26f41175cd31fd664abf2a11/> Add reference remote name type to make usage of `remote_name()` result clear
+
+### Other (BREAKING)
+
+ - <csr-id-79c22557ce0aea1ee8f3a58192c2c76087ccd3d8/> `Tree::lookup_path()` -> `Tree::lookup_entry()`.
+
+## 0.23.1 (2022-09-01)
+
+### Changed (BREAKING)
+
+ - <csr-id-36d8c57824a2b921012439105e49261fac66c955/> Remove 'unstable' feature.
+ It's not worth maintaining it especially since everything is in
+ pre-release mode right now.
+
+ It might be something to re-introduce after go-live.
+
+### Bug Fixes
+
+ - <csr-id-d18e76cfb512ef7fe5bfee6e87726372c6a4a8b6/> `max-performance-safe` mode does not include zlib-ng adjustments anymore.
+ git2 cannot handle this and fails to fetch packs after a couple of
+ seconds.
+
+ It's unclear what is causing this except that git2 doesn't like libz
+ with zlibng support enabled, which happens if git2 in the
+ same tree is with us.
+ - Transitively through a kindly contributed fix in the `gix-discover` crate, `Repository` can now be opened on `exFat` volumes.
+
+## 0.23.0 (2022-08-28)
+
+### New Features
+
+ - <csr-id-70aa850591de268488ae9bf2d3839a5c9c543c35/> The empty tree can always be returned by `Repository::(try_)find_object()`
+ This matches the behaviour of git and libgit2.
+ We consciously chose to only do this on the highest level, allowing lower
+ levels to determine if the object exists or not.
+ - <csr-id-8d0786646e17a82d20ca6b2799b54f6349d389f4/> Make `find::object::*::Error` publicly available.
+ - <csr-id-2d0b63997b276a53b3cf8f09fac51f8e3f044bcd/> Add `Reference::delete()` for simple reference deletion
+ - <csr-id-9170562059c3eaa529850025ef35ac5ffffc0fdf/> `Reference::set_target_id()` to easily set the target id of a reference
+ - <csr-id-950da602925e6376b08640ed3ebfdf407394db34/> `Reference::head_ref()` to quickly access the reference the head points to.
+
+### Bug Fixes
+
+ - <csr-id-2834311b4f262c57e76627addaa4932196fd26b3/> `Commit::tree_id()` now returns a connected id
+
+### New Features (BREAKING)
+
+ - <csr-id-e090f843f5cffc8e8e47a8cac2e6fb98e4c47771/> `gix-diff` is now included by default as part of core functionality
+
+## 0.22.1 (2022-08-24)
+
+A maintenance release without user facing changes.
+
+## 0.22.0 (2022-08-24)
+
+<csr-id-f7f136dbe4f86e7dee1d54835c420ec07c96cd78/>
+
+### Chore
+
+ - <csr-id-f7f136dbe4f86e7dee1d54835c420ec07c96cd78/> uniformize deny attributes
+
+### New Features
+
+ - <csr-id-c28bcec19b5526acf888f928e6ddc4671873368e/> support avoiding usage of `fast-sha1` in gix-features separately.
+ That way one has an angle on compile failures in client libraries,
+ see https://github.com/o2sh/onefetch/pull/752 for motivation.
+ - <csr-id-4f87a0672f7288486a9b6403c0bb07a6279d2cfe/> `Repository::write_blob[_stream]()` to more easily write blobs.
+ That way, one won't have to use the underlying `objects` database but
+ can remain in the land of `Repository` enabled types for longer.
+ - <csr-id-d35cd2a12c6db3d655ce10cad5c027bde99e19b4/> `SnapshotMut::apply_cli_overrides()` to make it easy to support things like `-c`
+ - <csr-id-2a839f3209f3bd35e0c0f7edff664cc953059f65/> `Repository::config_snapshot_mut()` to mutate configuration values in memory.
+ It's a first step towards writing changes back to disk, which can work
+ already, but probably wouldn't as we currently don't localize changes
+ to only one section type, i.e. Api, but instead may change values
+ from other sections.
+ - <csr-id-b1c40b0364ef092cd52d03b34f491b254816b18d/> use docsrs feature in code to show what is feature-gated automatically on docs.rs
+ - <csr-id-517677147f1c17304c62cf97a1dd09f232ebf5db/> pass --cfg docsrs when compiling for https://docs.rs
+
+### Bug Fixes
+
+ - <csr-id-ff71730b4e3635533d9969d9dd44c0f3c75c6648/> Don't fail worktree tests if the system's git version is not supporting required features
+
+## 0.21.1 (2022-08-19)
+
+A maintenance release that speeds up `commit.describe()` performance if `max_candidates()` is 0.
+
+## 0.21.0 (2022-08-17)
+
+<csr-id-b38a212459e2646ab97ad7b5c24e54d962aae960/>
+
+### Changed
+
+ - <csr-id-0235111a4fcc40c7b57d973bfce27a66eddea901/> Invert behaviour to `open::Options::strict_config()`, with lenient being the default.
+ This means API users will get libgit2 behaviour but commands like `gix` can
+ change options to emulate `git` behaviour.
+
+### New Features
+
+ - <csr-id-a01525d159a33d6ad60a5324f2e9abbbe17fcfad/> `Kind` can now represent submodules.
+ This should complete the list of git repository types and flavors.
+ - <csr-id-5dac021bbbc5621167e7f49d62b11f68f76e42b6/> `open()` and `discover()` support opening submodules.
+ This includes submodule checkouts as well as their original module git
+ directories.
+ - <csr-id-067c3342f3564dd7f152a720e93e3aa590ae6524/> `open::Options::lenient_config(…)` to default otherwise invalid configuration values where possible
+ Originally required by https://github.com/starship/starship/issues/4266 .
+ - <csr-id-0bf8371706d319681c3f794af5cd13f2f50a27d0/> support core.worktree option
+ - <csr-id-b47bbb787ef2e31dd2612a56f9e7759ef8a188b8/> display for `object::tree::EntryRef`
+ - <csr-id-727768a49c41165b03ddcdbc71ca88b66c330f32/> `Head::prior_checked_out_branches()`
+ - <csr-id-ffe72918baf5c4c9f0f3709f75f068a663778588/> `Repository::index()` and `Worktree::index()`.
+ These methods provide a possibly updated shared index.
+ - <csr-id-47619f7c06a49dcf60a30e1a43a5352914183092/> add `Repository::object_cache_size_if_unset()`
+ - <csr-id-d2611cee61841bc7bd978bef5af7dc66154248a2/> `Commit::message_raw_sloppy()` to provide yet another way to obtain a commit message.
+ - <csr-id-906c95845fa4aa2d4390c522bb566a188b8c0e78/> add `rev_spec::parse::ObjectKindHint` to support `core.disambiguate`.
+ The latter is seemingly undocumented in the typical place, gix-config.
+ - <csr-id-ef187f0180d89544d9015cbc2bc03d8cb51f4615/> `Remote::with_refspec()` to add new unique refspecs
+ - <csr-id-d51ba42c643d8ee03a3c6b648f8524ff04827170/> `Remote::push_url()` to set it after the fact
+ - <csr-id-9b07b91ad065836e7473df6635025659af2865ee/> `Repository::remote_at(…)` to create an unnamed remote
+ - <csr-id-a67fc26b80e5d1183ddc5c6598396214f3e19945/> more conversions for `TryFrom`: `String` and `&str`
+ - <csr-id-7a512ecdf236afc0b3d562d60fa81ab62c00cd9d/> `Head::into_remote()` to try really hard to find the correct remote
+ - <csr-id-f392f26bec6069ac43ecd461b4f50e0def8b8972/> `Repository::remote_default_name()` to obtain the repo-wide remote for a a direction.
+ - <csr-id-f47464f64f7c21500a24f563b25a8fc070c41778/> `Repository::branch_names()` to obtain branch names for which configuration exists.
+
+### Bug Fixes
+
+ - <csr-id-be6114e7c4ac48467db6acb2180b443dc9f59f32/> assure permissions per trust level are properly inherited into `open::Options`.
+ - <csr-id-270242c707bd086b7746ee45b55791587f1484b1/> provide additional explanation about when to use `open::Options::with()`
+
+### Refactor
+
+ - <csr-id-b38a212459e2646ab97ad7b5c24e54d962aae960/> embrace `revision` module and move `rev_walk` there.
+ Let's embrace the idea of structured modules and platforms in the right
+ spot in the module hierarchy instead of forcing known names on it that
+ over-simplify.
+
+### Changed (BREAKING)
+
+ - <csr-id-0deda0df55c11647f51374ed5a8bf11c932e2bae/> remove `permissions::Config::strict()` as they were unused internally.
+ Furthermore, they were allowing everything as before so better not to
+ have it.
+ - <csr-id-1c12d49eefa6d79ef50b2960f41b29de680ac8eb/> rename `Repository::load_mailmap*` to `Repository::open_mailmap*`.
+ For consistency with other similar methods.
+ - <csr-id-ea35183b53f2ff71bdf2270ac4f7470a85d7756f/> remove `Repository::load_index()` in favor of `repo.worktree().open_index()`.
+ - <csr-id-4fd096840ba27da6ce86678a85ede33e3be974ff/> `gix_revision` is now available in `revision::plumbing`.
+ That way it won't clash with the higher-level constructs on top of it
+ which use the same names.
+ - <csr-id-2424957cff75daacf6f6f14f74b9639f6875c4fb/> Turn `id::Ancestors` into general-purpose `RevWalk`.
+ - <csr-id-1df379ab0046887a330c0a670ad0414e79cfae7b/> remove `Permissions::git_dir` field entirely.
+ It was meant to help dealing with bailing out if the git dir isn't
+ fully trusted, but the way this was done was over-engineered especially
+ since the read-only permission level wasn't implemented at all.
+
+ That function is now performed by a new flag, the `bail_on_untrusted`
+ which is off by default.
+ - <csr-id-5ab81ece15ec802ad4328ce31304233bd25b2929/> rename `Repository::remote_ref()` to `::branch_remote_ref()`
+
+### New Features (BREAKING)
+
+ - <csr-id-e2aff28e818951785d933f4b55b2f1b882729cb6/> `Repository::rev_parse()` returns a `RevSpec`.
+ This lays the foundation for actually handling rev-specs faithfully.
+ Previous users should use `rev_parse().single()` to obtain a single
+ object id which was the only supported usecase previously.
+
+### Bug Fixes (BREAKING)
+
+ - <csr-id-c68b125a46f666700cdbda6f8cd39a044f4feb1b/> Don't panic for `@{1}` in new repos; rename `Head::into_referent()` to `::try_into_referent()`
+ The signature change will prevent such issues in the future as one
+ cannot simply ignore new repositories.
+
+## 0.20.0 (2022-07-22)
+
+### New Features
+
+ - <csr-id-1b765ec6ae70d1f4cc5a885b3c68d6f3335ba827/> respect `safe.directory`.
+ In practice, this code will rarely be hit as it would require very
+ strict settings that forbid any operation within a non-owned git
+ directory.
+ - <csr-id-840d9a3018d11146bb8e80fc92693c65eb534d91/> permissions for configuration.
+ It provides fine-grained control over what sources to load.
+ - <csr-id-657080829867d9dcb0c9b9cb6c1c8126c4df3783/> `gix-config` is now accessible in `git-repository::config`.
+ - <csr-id-d99453ebeb970ed493be236def299d1e82b01f83/> `gix config` lists all entries of all configuration files git considers.
+ Filters allow to narrow down the output.
+ - <csr-id-ebedd03e119aa5d46da07e577bfccad621eaecb5/> repository now initializes global configuration files and resolves includes
+ - <csr-id-de8572ff2ced9422832e1ba433955c33f0994675/> resolve includes in local repository configuration
+ - <csr-id-d5a48b82230b047434610550aacd2dc741b4b5f0/> `config::Snapshot::trusted_path()` to obtain trustworthy paths.
+ We also apply trust-based config query during initialization to assure
+ we don't use paths which aren't owned by the current user.
+ - <csr-id-5f9bfa89ceb61f484be80575b0379bbf9d7a36b3/> `Repository::config_snapshot()` to access configuration values.
+ - <csr-id-7f67b23b9462b805591b1fe5a8406f8d7404f372/> Use `gix-config` to write config file on initialization, including `logallrefupdates` and `precomposeunicode`.
+ - <csr-id-e263e13d312e41aa1481d104fa79ede509fbe1c5/> respect `core.logallrefupdates` configuration setting.
+
+### Changed (BREAKING)
+
+ - <csr-id-68f4bc2570d455c762da7e3d675b9b507cec69bb/> Make `SignatureRef<'_>` mandatory for editing reference changelogs.
+ If defaults are desired, these can be set by the caller.
+ - <csr-id-f932cea68ece997f711add3368db53aeb8cdf064/> `Repository::committer()` now returns an `Option`, see `::committer_or_default()` for a method that doesn't.
+ - <csr-id-89a41bf2b37db29b9983b4e5492cfd67ed490b23/> remove local-time-support feature toggle.
+ We treat local time as default feature without a lot of fuzz, and
+ will eventually document that definitive support needs a compile
+ time switch in the compiler (`--cfg unsound_local_offset` or something).
+
+ One day it will perish. Failure is possible anyway and we will write
+ code to deal with it while minimizing the amount of system time
+ fetches when asking for the current local time.
+ - <csr-id-6f4eea936d64fb9827277c160f989168e7b1dba2/> Associate `file::Metadata` with each `File`.
+ This is the first step towards knowing more about the source of each
+ value to filter them based on some properties.
+
+ This breaks various methods handling the instantiation of configuration
+ files as `file::Metadata` typically has to be provided by the caller
+ now or be associated with each path to read configuration from.
+
+### New Features (BREAKING)
+
+ - <csr-id-d003c0f139d61e3bd998a0283a9c7af25a60db02/> Support for `lossy` load mode.
+ There is a lot of breaking changes as `file::from_paths::Options` now
+ became `file::init::Options`, and the same goes for the error type.
+ - <csr-id-311d4b447daf8d4364670382a20901468748d34d/> change mostly internal uses of [u8] to BString/BStr
+
+## 0.19.0 (2022-06-13)
+
+### New Features (BREAKING)
+
+ - <csr-id-266d4379e9132fd7dd21e6c8fccb36e125069d6e/> Make `realpath()` easier to use by introducing `realpath_opt()`.
+ That way there is consistency about how many symlinks to follow.
+
+## 0.18.1 (2022-05-23)
+
+### New Features
+
+ - <csr-id-c78baecbb37fd92a0a86231810c9e35e9a4c21cd/> `Debug` for `Reference`.
+
+## 0.18.0 (2022-05-21)
+
+<csr-id-e63e722791a7795cd99048bed834459595c60abc/>
+
+### Other
+
+ - <csr-id-e63e722791a7795cd99048bed834459595c60abc/> add ceiling_dirs option to upwards discovery
+
+## 0.17.0 (2022-05-18)
+
+<csr-id-53c06c7e6a3003b34edaab10db1f158e2fb57403/>
+<csr-id-e4f4c4b2c75a63a40a174e3a006ea64ef8d78809/>
+<csr-id-da8059ce26343c8cd275f43c879d98c92f77fa51/>
+
+### New Features
+
+ - <csr-id-45920da7c8c5618c6e7258de08dbd633a637d017/> Add `Repository::head_name()`.
+ A convenient way to obtain the name of a head, if not detached.
+
+### Bug Fixes
+
+ - <csr-id-a1680b44ef568317465d2971da6e0930f9885530/> `Commit::describe()` now returns annotated tags before lightweight ones and prefers more recent ones as well
+ - <csr-id-99365f221065ebc315ac80940ad72cae253743bc/> Support for in truncated history in git-describe
+ This allows `describe()` to work on shallow clones.
+
+### Other
+
+ - <csr-id-53c06c7e6a3003b34edaab10db1f158e2fb57403/> allow reading information about remote branch
+ - <csr-id-e4f4c4b2c75a63a40a174e3a006ea64ef8d78809/> `path::discover()` now returns the shortest path.
+ If and only if it canonicalized the source path. That way, users will
+ still get a familiar path. This is due to `parent()` not operating
+ in the file system, which otherwise would be equivalent to `..`,
+ but that's not how we work.
+
+ Maybe we should overhaul the way this works to use `../` instead
+ and just 'absolutize' the path later (std::path::absolute()) is
+ on the way for that.
+ - <csr-id-da8059ce26343c8cd275f43c879d98c92f77fa51/> remove unused variant
+
+### Changed (BREAKING)
+
+ - <csr-id-80e8fd4a5944890f43f3d888b7a73bb26351b195/> integrate trust model into repository discovery
+ That way it's possible to ignore repositories which effectively
+ aren't owned by the current user, or to not ignore them (default)
+ but assign tighter permissions to the repository.
+ - <csr-id-2e39b0ede98826e6f85c56fef77ac65a5b7e7ac2/> `path::discover::existing()` -> `path::discover()`
+ - <csr-id-38dfdcf80f9b7368ccaa10f4b78b2129849848d0/> remove `values::*Error` in favor of `value::parse::Error`.
+ This makes it easier to work with errors in practice, we are either
+ interested in the value that failed to parse to try something else
+ or want a nice user message.
+
+ Having one decode error type facilitates that.
+
+### New Features (BREAKING)
+
+ - <csr-id-32dc1829a5661f66396d109c8d0a8eaae6b1f532/> use `gix-credentials` in `gix-protocol`
+
+## 0.16.0 (2022-04-05)
+
+### New Features
+
+ - <csr-id-47556f6815148ed960a727fd122f7162345544c3/> auto-calculation of a good hex-len, like what git does
+ If the `core.abbrev` value isn't set or is set to `auto`.
+ - <csr-id-654f4afb794a370b7cd9d9502ff6d0c3378ec417/> `Commit::describe()`
+ A way to fluidly configure a `git describe` operation and run it.
+
+ Along that, a new `Tag` top-level object was added as well to provide
+ convenient access to otherwise lower-level objects. It's not strictly
+ required for our implementation here but it's needed for a symmetric
+ API.
+
+## 0.15.0 (2022-04-03)
+
+<csr-id-5f7595305efc85d6ca3c541e9f9adac3915cbd84/>
+<csr-id-c10f07c50f6dde4b39bf1e3ff26c239c5f202912/>
+<csr-id-bbc6efeceb26050973e1425e68a52e51b9df4572/>
+
+### New Features
+
+ - <csr-id-1322dbf6827ea5cc1d71175afb669e01fb1242ef/> support for object replacement
+ The Repository now respects replacement refs created by `git replace`
+ and picks up environment variables for its configuration as well.
+
+ Which environment variables are used is fully configurable.
+ - <csr-id-a39bf71531ee0a6c8db082758d3212c805ce2bf0/> support for trimming of whitespace around name and email
+ It's separated from parsing to assure we can round-trip, but it's
+ made easy to obtain trimmed results using new methods.
+
+ This high-level git-repository will also trim by default now.
+ - <csr-id-00578040a699e1939b3d3813616d3cc4e1d8669e/> `Repository::head_commit()`
+ A shortcut to get to the commit much faster.
+ - <csr-id-def80df2e165b74f4b053e4030f563902b7d34a4/> `ein tool estimate-hours` now supports mailmaps
+ - <csr-id-f0d8a49587c08713350252e1701a45bb308b6f9d/> `Repository::head_id()`
+ A long-needed shortcut.
+ - <csr-id-d2388d8d80f379eccc9ee84ebe07acd67d154630/> `gix repository mailmap entries`
+ - <csr-id-e3bc1b410409a9e27894a5cac48b06d8c3295e36/> unstable mailmap module
+ - <csr-id-1be00cf9e00ce9428ffddb2c79b2373926069b13/> `Commit::short_id()`
+ - <csr-id-c7dff9e8b695d298a3fb21f19f51752a885a5ce3/> in-manifest and in-lib documentation of feature toggles
+ - <csr-id-9f5663ed83d83c7335b346313837d4cada9cd846/> `easy::Commit::time()` to access the committers time conveniently.
+ - <csr-id-7c88b62e439af7a60ddb68fb6737cb3b1cebf00d/> easy::Head::name() to learn about the name of the HEAD ref
+ It's mainly for completeness to provide people with with a `FullNameRef`
+ of HEAD.
+ - <csr-id-3b0913a2e6695e4e9e94341ef48d2ba3b4a518e6/> `easy::Head::peel_to_commit_in_place()`
+ It allows to quickly get a commit from the head, something most people
+ want when getting started with any kind of tool.
+ - <csr-id-1c22d76c26464db4a185e19bb6c1f9a17fa19bc9/> `Repository::load_index()`
+ This method makes the index of the default workspace available.
+
+### Bug Fixes
+
+ - <csr-id-c329dd75420f82d506fd415cd377f7df6c6ccbad/> Properly classify worktrees as non-bare, helps with `ein t find`
+ They use git-files which point to the actual repository data.
+
+### Changed (BREAKING)
+
+ - <csr-id-a8b6589a7c645f323f95da6cb94321fc967e9b06/> Easier access to local and remote branches
+
+### New Features (BREAKING)
+
+ - <csr-id-8945d95f7fa88562d37ff67ac6e38bead73dd2df/> `interrupt::Iter`, rename `interrupt::Iter` -> `interrupt::IterWithError`
+ - <csr-id-813a3bea88cdbe1fd9b0a8070efeee2a44f7823e/> Let 'easy::Object::try_into_…()` return `try_into::Error`.
+ That way, the typical usage of `try_into_commit()?` will not result
+ in a strange error about `Object` not being convertible into some
+ error. We think having a real error there is the least surprising.
+
+### Bug Fixes (BREAKING)
+
+ - <csr-id-c863ea5b34fa9ee3dac21c1f85587da16045f8d8/> do not install signal handlers by default
+ The previous behaviour is meant to be convenient for the casual
+ user even though it
+ ends up being surprising when used in applications that install
+ their own signal handlers and need more control over how the program
+ shuts down.
+
+ This is now fixed by **requiring an explicit `setup()`** call before
+ the first tempfile is created, which makes it a breaking change.
+
+### Other (BREAKING)
+
+ - <csr-id-5f7595305efc85d6ca3c541e9f9adac3915cbd84/> `Id::prefix` -> `Id::shorten()`
+ It's definitely more intuitive that way.
+
+### Refactor (BREAKING)
+
+ - <csr-id-c10f07c50f6dde4b39bf1e3ff26c239c5f202912/> dissolve 'easy' module by moving everything one level up
+ - <csr-id-bbc6efeceb26050973e1425e68a52e51b9df4572/> clarify different repository types much better
+
+## 0.14.0 (2022-01-23)
+
+<csr-id-7a91212631219e94b9454d2874b53f3ecc1db77e/>
+<csr-id-b2cc0c63570d45de032d63e62d94c3344783440e/>
+<csr-id-ebc7f47708a63c3df4415ba0e702660d976dfb3e/>
+<csr-id-2290d006705ff47ad780b009fe58ee422b3285af/>
+
+### New Features
+
+ - <csr-id-667485e133ca29fcc6914a7142cf953564b5fce3/> Add `easy::Tree::traverse()` platform
+ - <csr-id-8f650c089c88698483f778aa5c0070f606b94f09/> Add `easy::Commit` object
+ It allows to more conveniently access commit information.
+ - <csr-id-0ae2a8da010d848d98bef47ac923ae1d770091ff/> `easy::Oid::ancestors()` now supports `sorting()` and iteration by first commit only
+ Especially the sorting is useful to avoid having to sort commits by
+ hand after collecting them.
+ - <csr-id-bc77534f9c385046f6c9adb994b1443307afda46/> Use GITOXIDE_OBJECT_CACHE_MEMORY to control how much object cache is used
+ Note that this is mostly for debugging or quickly seeing if object
+ caches help with certain operations.
+
+ Ideally the implementation knows themselves and sets up caches
+ accordingly, probably after trying it with these environment variables.
+
+### Changed (BREAKING)
+
+ - <csr-id-6e3a745dfada66a2fcac256dae0ac63959e74d08/> rename `easy::Object` methods returning `Ref` objects to have `ref` in their name
+ That way, it's more clear that the `Ref` versions are low-level ones
+ whereas the `into_` ones are higher-level ones that are part of the
+ `easy` suite.
+ - <csr-id-b6730979808ce28b98c65888a349f1e3d0ea1b9a/> Rename `OwnedObject` to `DetachedObject`
+ The latter more clearly indicates what the difference is to
+ `Object` (which is attached and carries a lifetime)
+ - <csr-id-c4184f3c31ffc4597bd089e8140653906a6594d8/> Remove easy::borrow::Error entirely; support for multiple objects per handle
+ This massive simplification finally allows any amounts of objects to be
+ created while adding support for reusing their data buffers thanks
+ to a simple free-list stored with the handle.
+ - <csr-id-880b56426859306aa30038ff35e2ad14607e9e90/> rename `easy::Object` to `OwnedObject`; remove `Ref` suffix from `ObjectRef` and `TreeRef`
+ - <csr-id-f9c0493460ab7c664aaa231ffcf7dfd56076c920/> use `gix_odb::Find*` traits in prelude, instead of `gix_pack::Find*`
+ These are higher-level and generally more desirable.
+ The Find traits in `gix-pack` are more useful internally when packs
+ have to be handled directly, for example when generating packs.
+ - <csr-id-83d7b31e7dd6d09eea79fc3c68620d099459132f/> rename easy::State to easy::Handle
+ As the first step to remove the 'Easy' abstraction.
+ - <csr-id-5e7aa1689f5d7ea5b510611a3ca0868828226291/> fully rely on OdbHandle in repository State
+ - <csr-id-57de915886b76f80b3641def0ccf4fd79e334fc8/> Rename `Repository::odb` to` Repository::objects`
+ This way it's more inline with `Repository::refs`.
+ - <csr-id-93db4a5e70456d2c33ea010e3c86e5f26eb1bcc0/> remove Repository::refresh_object_database()
+ With the linked DB this is simply not possible anymore and we expect
+ these updates to happen automatically in future for greater convenience.
+
+ For now, in order to refresh a repository, one has to reopen it.
+ - <csr-id-580e96c1b2d9782a2e8cf9d1123f6d53a5376a3d/> Rename `Handle` to `Cache`
+ Because this is exactly what it is effectively.
+ Also add some basic instantiation for the new object store.
+ - remove borrowing Repo as possible failure cause
+ The `easy::Handle` is now a full (but shared) clone of the original
+ Repository with additional thread-local state, hence there is no more
+ need for a way to access the original repository.
+ - remove Easy… abstraction in favor of Handle
+ This great reduction of complexity allows for being multi-threading
+ capable by default with the option to turn that off at compile time.
+
+ All `to|into_easy…()` methods are removed in favor of `to_easy()`
+ along with the removal of all `Easy` types in favor of the single
+ - remove pack-cache from `Find::try_find(…)`
+ With the new architecture this can be an implementation detail without
+ forcing it to be Sync.
+ - move gix_pack::data::Object to gix_object::Data, massively alter gix_odb::Find trait
+ This will break a lot, but has to happen to prepare these traits for the
+ next generation of object databases.
+
+## 0.13.0 (2021-11-29)
+
+<csr-id-951c050ecbb70c9de216603e55c7cfbc89a067e3/>
+<csr-id-0e1875363fea09452789d7a90fc6860a7996d6d3/>
+
+With changes to `gix-ref`, what follows is all the adjustments made to simplify the `gix` implementation.
+
+### Changed (BREAKING)
+
+ - <csr-id-5d498a33236391d8e456f267b1bf6af24de66f11/> file::Store::iter() is now a platform, with `.all()` and `.prefixed(…)` respectively
+ This way, it's possible to keep shared ownership of the packed buffer
+ while allowing the exact same iterator machinery to work as before.
+ - <csr-id-15d429bb50602363292453606902bdce5042d9a5/> file::Store::(try_)find(…, packed) was removed
+ The packed buffer is now handled internally while loading it on demand.
+ When compiled with `gix-features/parallel` the `file::Store` remains
+ send and sync.
+
+ The packed refs buffer is shared across clones and it's recommended
+ to clone one `file::Store` instance per thread, each of which can
+ use its own namespace.
+ - <csr-id-95247322a8191edfa7fac9c5aa72b40239f3aa88/> move `gix_ref::file::WriteRefLog` to `gix_ref::store::WriteRefLog`
+
+### Bug Fixes (BREAKING)
+
+ - <csr-id-fc8e85cd71d4f16bc8daad0b790d875045faefff/> ref namespaces are now thread-local
+ Previously these were shared in the shared Repo instance, which makes
+ threaded applications impossible to remain deterministic across multiple
+ connections.
+
+ Now they are local to the thread, which allowed some methods to remove
+ their Result<> as they cannot fail anymore, the reason for this being
+ a breaking change.
+
+### Other (BREAKING)
+
+ - <csr-id-951c050ecbb70c9de216603e55c7cfbc89a067e3/> Reference::logs() -> Reference::log_iter()
+ The latter now returns a standard Platform to iterate over all
+ reflog entries from oldest to newest or vice versa.
+
+### Refactor (BREAKING)
+
+ - <csr-id-0e1875363fea09452789d7a90fc6860a7996d6d3/> `file::Store::base` is now `file::Store::base()` and read-only
+ That way, file databases can't be repositioned anymore, it's recommended
+ to recreate it if that's desired.
+
+## 0.12.0 (2021-11-16)
+
+### New Features
+
+ - <csr-id-b7aab9efd42975e8f2dcb5c97e51495996175702/> Allow `PartialNameRef` to be created from owned items
+
+### Changed (BREAKING)
+
+ - <csr-id-e8b091943f0c9a26317da0003f7fcdf5a56ef21a/> Rename gix->ein and gixp->gix
+
+## v0.11.0 (2021-10-19)
+
+A maintenance release to properly dealing with previously breaking changes in `gix-hash`.
+
+## v0.10.0 (2021-10-15)
+
+<csr-id-1cb41f81cffe19c75aadf49a5cc7ec390ec6cae7/>
+<csr-id-2f2d856efe733d3cf81110c0e0607d2e7c40d968/>
+<csr-id-a19567eceab0dd7f5478b83c2ff9ce79754db308/>
+<csr-id-61793ff42f5c2f9ddf302901adea2dac6149eac8/>
+<csr-id-0cd585e20a5abd323a34ec32d92fbd48531b3b18/>
+<csr-id-89f15051763a03627f332c46beedfc53b8b9b15b/>
+<csr-id-f644d0ede7a2e8d344a81c7003c3877eed64a6b0/>
+<csr-id-ac3b9efb7b90958274ce55800959d930f8641115/>
+<csr-id-03fe8a7ebd34608d725d4585da5c1630123762ec/>
+<csr-id-8fe461281842b58aa11437445637c6e587bedd63/>
+<csr-id-b209da29f361512ba757febf56bc1aca039f2a41/>
+<csr-id-741558dd8194590c5cc8566aa22f96e73df38edf/>
+<csr-id-e16603b15b5488b81563c583cd8f5292ab9d24a2/>
+<csr-id-54a64a588ff72515451a3d0343306ac4abe1cb35/>
+<csr-id-1f4e45a26a3d2727f00c3f248452dd41fc8a95be/>
+<csr-id-1958e8aa65eb97f9755f065d713f0a48c5e41b1b/>
+<csr-id-066f59b23a125b1ce9a015437a3f4468e5791da0/>
+<csr-id-329d183ad4e256a4f9cdeb34589b5f3432495f79/>
+<csr-id-1a1959f487d69ffdd5394775b707139c44dbd11d/>
+<csr-id-5e091fb2b4fd33879c176e6dadd3c9805d99af50/>
+<csr-id-e3760679547e0dc1bf31761acdb6e63b04a50919/>
+<csr-id-de004b318fdc6923711dd001bff5f4bcbba4270e/>
+<csr-id-41afad3386461b658ee859225785b6de86d13cfb/>
+<csr-id-f582439a3efe5c234f54c488792395e9de09a032/>
+<csr-id-42080aefe3b286afb58235c1c22491579ab73919/>
+<csr-id-d422b9a31a37a03551bec4382039aaf3a7e49902/>
+<csr-id-e7c061b10c263001eb4abf03098d6694b770f828/>
+<csr-id-66292fd1076c2c9db4694c5ded09799a0be11a03/>
+<csr-id-5aadf75a0d93d1a990ad0305c38366c5c22bdcb2/>
+<csr-id-d79a1b75304e397c16b5af7055906591a187ddfd/>
+<csr-id-7d2b6b66e09ff39727fccd68d190679b52d90126/>
+<csr-id-06996e032b1e451a674395ebaca94434fac46f05/>
+<csr-id-daec7167df524b329daad7dabb1b9920b6ef8936/>
+<csr-id-4fe4786797d240a59d29dbf2c6310490a381c8b6/>
+<csr-id-debe0094826f83839f907523715def929133fd58/>
+<csr-id-56e39fac54bfa3871c42bbf76a9f7c49486b85be/>
+<csr-id-293bfc0278c5983c0beaec93253fb51f00d81156/>
+<csr-id-650241251a420602f74037babfc24c9f64df78d8/>
+<csr-id-2b4a61589a7cba3f7600710e21304e731ae3b36a/>
+<csr-id-8b82f7d44c7eb63b7922ddc31ada9cefdce776b0/>
+
+### New Features
+
+ - <csr-id-11b64fce4630371633b6415f227eecdc6b42b20b/> Make `gix_url::Url` available under `gix::Url`
+ - <csr-id-80b8331092f4856f52afa1d85fa375ae688bdd28/> add easy::ext::ObjectAccessExt::tag(…) to create tag objects
+ It's a quick sketch on how tag object creation could work.
+
+ Note the duplication the method name using traits, which seems like a good solution
+ to the problem of differentiating tag objects and tag references while
+ keeping the method name short.
+
+ Most will only ever need one, right?
+
+ Even in my example that's not the case, so maybe we have to rename it.
+ - <csr-id-0ebfeb614264ca06ab763189e55e6c016c9997af/> Make `gix_url::Url` available under `gix::Url`
+
+### BREAKING Changes
+
+ - Use 'to_*' when converting `easy::Object` to specific object kind
+ This also makes the API more consistent while being more idiomatic.
+ - Avoid duplicate module paths in 'tree' and 'commit'
+ - rename ObjectIdExt::ancestors_iter() to *::ancestors()
+ - rename `easy::Object::to_(commit|tag)_iter()`…
+ …to `easy::Object::try_to_(commit|tag)_iter()` for consistency.
+ - rename `*::State` into `*::Platform`
+ - various small API changes
+ - move easy::head::peel::Error -> easy::head::peel::to_id::Error
+ - rename path::is_git to path::is
+ - rename easy::reference::log::State to easy::reference::Logs
+
+### Other
+
+ - <csr-id-293bfc0278c5983c0beaec93253fb51f00d81156/> loose reference iteration with non-dir prefixes…
+ Previously it was expected for the prefix `Path` to always exist for
+ the prefix to be valid. This, however, is not similar to packed
+ prefixes, which allow non-dir prefixes as well.
+
+ Now we will check if the prefix is actually a directory, and if not
+ split it into its parent directory and the filename portion. The latter
+ is then used for prefix matching file names within that directory.
+ - <csr-id-650241251a420602f74037babfc24c9f64df78d8/> Add 'references().all().peeled().'…
+ …to not only make typical usage of iterated references more convenient
+ but also work around a double-borrow error one would see otherwise.
+ - <csr-id-2b4a61589a7cba3f7600710e21304e731ae3b36a/> filter refs correctly, but…
+ …it needs a way to peel references right away without trying
+ to double-borrow. This means the Iterator needs to implement this.
+ - <csr-id-8b82f7d44c7eb63b7922ddc31ada9cefdce776b0/> improved changelog…
+ …akin to 'Keep a changelog'.
+
+### Refactor
+
+ - <csr-id-8fe461281842b58aa11437445637c6e587bedd63/> split data::output::count::objects into files
+ - <csr-id-b209da29f361512ba757febf56bc1aca039f2a41/> use new gix_pack::cache::Object trait
+ - <csr-id-741558dd8194590c5cc8566aa22f96e73df38edf/> remove object cache impl which now lives in gix-pack
+
+### Other
+
+ - <csr-id-e16603b15b5488b81563c583cd8f5292ab9d24a2/> :remote_url() is now optional
+ Otherwise it wouldn't work on repos that don't have a remote set yet.
+ Instead of failing, we don't create links.
+ - <csr-id-54a64a588ff72515451a3d0343306ac4abe1cb35/> try to create persistent Easy iterator, but can't make it Send…
+ …which is fair as it contains borrowed RefCells, which really would have
+ to be owned to work for this, which would in turn require the Ancestor's
+ struct to be kind of self-referential
+ - <csr-id-1f4e45a26a3d2727f00c3f248452dd41fc8a95be/> path::is
+ - <csr-id-1958e8aa65eb97f9755f065d713f0a48c5e41b1b/> path::discover
+ - <csr-id-066f59b23a125b1ce9a015437a3f4468e5791da0/> top-level of 'path' module
+ - <csr-id-329d183ad4e256a4f9cdeb34589b5f3432495f79/> object_id
+ - <csr-id-1a1959f487d69ffdd5394775b707139c44dbd11d/> repository
+ - <csr-id-5e091fb2b4fd33879c176e6dadd3c9805d99af50/> ext::tree
+ - <csr-id-e3760679547e0dc1bf31761acdb6e63b04a50919/> easy::object::peel
+ - <csr-id-de004b318fdc6923711dd001bff5f4bcbba4270e/> easy::object::errors
+ - <csr-id-41afad3386461b658ee859225785b6de86d13cfb/> a seemingly slow version of path lookup, but…
+ …in debug mode it's faster than the fast path, despite doing more
+ and being the same when it comes to searching path components.
+ - <csr-id-f582439a3efe5c234f54c488792395e9de09a032/> easy::object, sans a few child-modules
+ - <csr-id-42080aefe3b286afb58235c1c22491579ab73919/> update 'platform' information to reflect the current usage
+ - <csr-id-d422b9a31a37a03551bec4382039aaf3a7e49902/> configure caches with env vars using `apply_environment()`
+ - <csr-id-e7c061b10c263001eb4abf03098d6694b770f828/> refactor
+ - <csr-id-66292fd1076c2c9db4694c5ded09799a0be11a03/> set package cache via RepositoryAccessExt
+ - <csr-id-5aadf75a0d93d1a990ad0305c38366c5c22bdcb2/> Add GITOXIDE_PACK_CACHE_MEMORY_IN_BYTES=536870912 to control pack-cache size…
+ …which can mean another considerable speed-up for many workloads, but
+ usually needs some knowledge about the application, repos, and should
+ thus be with the user.
+ - <csr-id-d79a1b75304e397c16b5af7055906591a187ddfd/> allow disabling the pack cache with GITOXIDE_DISABLE_PACK_CACHE
+ - <csr-id-7d2b6b66e09ff39727fccd68d190679b52d90126/> prepare for configurable pack cache
+ - <csr-id-06996e032b1e451a674395ebaca94434fac46f05/> object-cache to allow for a speed boost…
+ …by avoiding duplicate accesses to hit the object database.
+ However, the cost for the cache are relatively high and involve some
+ memory copying, so hit rates of about 50% is certainly what is needed
+ to get any speed boost at all.
+ - <csr-id-daec7167df524b329daad7dabb1b9920b6ef8936/> build commit history for later use in changelog generation
+ - <csr-id-4fe4786797d240a59d29dbf2c6310490a381c8b6/> Allow object access during commit ancestor traversal…
+ …by getting only a temporary handle to the pack-cache. The cost of this
+ should be negligible compared to the cost of object decoding.
+ - <csr-id-debe0094826f83839f907523715def929133fd58/> sketch history acquisition
+ - <csr-id-56e39fac54bfa3871c42bbf76a9f7c49486b85be/> add 'Head::peeled()' method
+
+### Changed (BREAKING)
+
+ - <csr-id-c3385cd144298eb9f06d7751d180e26da7b4d338/> `easy::Object::try_to_commit()` now returns `Result<CommitRef>`…
+ …without the nested `Option`, folding the type mismatch into a specific
+ `conversion::Error` instead.
+ - <csr-id-e59f901f47fb0180211494a1591aed62b856406a/> rename `ObjectAccessExt::tag(…)` to `*::tag_reference(…)`, add `easy::Object::try_to_tag()`
+ This one also contains the first and probably only test for tag object
+ creation.
+
+## v0.9.1 (2021-09-10)
+
+<csr-id-293bfc0278c5983c0beaec93253fb51f00d81156/>
+<csr-id-650241251a420602f74037babfc24c9f64df78d8/>
+<csr-id-2b4a61589a7cba3f7600710e21304e731ae3b36a/>
+<csr-id-8b82f7d44c7eb63b7922ddc31ada9cefdce776b0/>
+
+- Remove `max-performance` feature from default set until the `msvc` build issue is fixed. Otherwise it will surprisingly break windows builds.
+
+### Other
+
+ - <csr-id-293bfc0278c5983c0beaec93253fb51f00d81156/> loose reference iteration with non-dir prefixes…
+ Previously it was expected for the prefix `Path` to always exist for
+ the prefix to be valid. This, however, is not similar to packed
+ prefixes, which allow non-dir prefixes as well.
+
+ Now we will check if the prefix is actually a directory, and if not
+ split it into its parent directory and the filename portion. The latter
+ is then used for prefix matching file names within that directory.
+ - <csr-id-650241251a420602f74037babfc24c9f64df78d8/> Add 'references().all().peeled().'…
+ …to not only make typical usage of iterated references more convenient
+ but also work around a double-borrow error one would see otherwise.
+ - <csr-id-2b4a61589a7cba3f7600710e21304e731ae3b36a/> filter refs correctly, but…
+ …it needs a way to peel references right away without trying
+ to double-borrow. This means the Iterator needs to implement this.
+ - <csr-id-8b82f7d44c7eb63b7922ddc31ada9cefdce776b0/> improved changelog…
+ …akin to 'Keep a changelog'.
+
+## v0.9.0 (2021-09-08)
+
+- rename `prelude::ConfigAccessExt` to `prelude::RepositoryAccessExt`
+- `prelude::ObjectAccessExt::commit()` signature change
+- cargo feature changed in incompatible ways. `network` was replaced by more finegrained options for _blocking_ and _async_ networking, as well as optional http transport
+
+### New
+
+- `init()`
+- `init_bare()`
+- `Repository::init(Kind)`
+- `open()`
+- `Repository::open()`
+- `easy::Head`
+- `easy::ext::ReferenceAccessExt::head()`
+- `ext::ReferenceExt` trait
+
+### Breaking
+- **renames / moves / Signature Changes**
+ - `path::Path` to `Path`
+ - `init::repository(dir)` -> `path::create::into(dir, **Kind**)`
+
+## v0.8.2 (2021-09-07)
+
+## v0.8.1 (2021-08-28)
+
+- Introduce `EasyArcExclusive` type, now available thanks to `parking_lot` 0.11.2
+
+## v0.8.0 (2021-08-27)
+
+- Rename `object` to `objs` to be equivalent to `refs` and make space for the new `object` module
+- various minor version updates of pre-release dependencies
+
+## v0.7.2 (2021-08-17)
+
+## v0.7.1 (2021-08-13)
+
+## v0.7.0 (2021-08-10)
+
+## v0.6.0 (2021-05-28)
+
+## v0.5.0 (2021-04-08)
+
+## v0.4.0 (2020-09-12)
+
+## v0.3.0 (2020-08-12)
+
+## v0.1.0 (2020-07-12)
+
+## 0.0.0 (2023-02-10)
+
+### Commit Statistics
+
+<csr-read-only-do-not-edit/>
+
+ - 2 commits contributed to the release.
+ - 0 commits were understood as [conventional](https://www.conventionalcommits.org).
+ - 0 issues like '(#ID)' were seen in commit messages
+
+### Commit Details
+
+<csr-read-only-do-not-edit/>
+
+<details><summary>view details</summary>
+
+ * **Uncategorized**
+ - Release gix v0.0.0 ([`8bce6d5`](https://github.com/Byron/gitoxide/commit/8bce6d5cba12630bf4d12ed92f572a379d945329))
+ - Add `gix` crate to reserve name ([`5104a78`](https://github.com/Byron/gitoxide/commit/5104a787127bf0b1f9b65f371b7c5b79f491e396))
+</details>
+
diff --git a/vendor/gix/Cargo.lock b/vendor/gix/Cargo.lock
new file mode 100644
index 000000000..10cda1ac2
--- /dev/null
+++ b/vendor/gix/Cargo.lock
@@ -0,0 +1,3005 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "adler"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+
+[[package]]
+name = "ahash"
+version = "0.8.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f"
+dependencies = [
+ "cfg-if",
+ "getrandom",
+ "once_cell",
+ "version_check",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800"
+
+[[package]]
+name = "arc-swap"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6"
+
+[[package]]
+name = "arrayvec"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
+
+[[package]]
+name = "async-attributes"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3203e79f4dd9bdda415ed03cf14dae5a2bf775c683a00f94e9cd1faf0f596e5"
+dependencies = [
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "async-channel"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf46fee83e5ccffc220104713af3292ff9bc7c64c7de289f66dae8e38d826833"
+dependencies = [
+ "concurrent-queue",
+ "event-listener",
+ "futures-core",
+]
+
+[[package]]
+name = "async-executor"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "17adb73da160dfb475c183343c8cccd80721ea5a605d3eb57125f0a7b7a92d0b"
+dependencies = [
+ "async-lock",
+ "async-task",
+ "concurrent-queue",
+ "fastrand",
+ "futures-lite",
+ "slab",
+]
+
+[[package]]
+name = "async-global-executor"
+version = "2.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1b6f5d7df27bd294849f8eec66ecfc63d11814df7a4f5d74168a2394467b776"
+dependencies = [
+ "async-channel",
+ "async-executor",
+ "async-io",
+ "async-lock",
+ "blocking",
+ "futures-lite",
+ "once_cell",
+]
+
+[[package]]
+name = "async-io"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8c374dda1ed3e7d8f0d9ba58715f924862c63eae6849c92d3a18e7fbde9e2794"
+dependencies = [
+ "async-lock",
+ "autocfg",
+ "concurrent-queue",
+ "futures-lite",
+ "libc",
+ "log",
+ "parking",
+ "polling",
+ "slab",
+ "socket2",
+ "waker-fn",
+ "windows-sys 0.42.0",
+]
+
+[[package]]
+name = "async-lock"
+version = "2.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa24f727524730b077666307f2734b4a1a1c57acb79193127dcc8914d5242dd7"
+dependencies = [
+ "event-listener",
+]
+
+[[package]]
+name = "async-std"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d"
+dependencies = [
+ "async-attributes",
+ "async-channel",
+ "async-global-executor",
+ "async-io",
+ "async-lock",
+ "crossbeam-utils",
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-lite",
+ "gloo-timers",
+ "kv-log-macro",
+ "log",
+ "memchr",
+ "once_cell",
+ "pin-project-lite",
+ "pin-utils",
+ "slab",
+ "wasm-bindgen-futures",
+]
+
+[[package]]
+name = "async-task"
+version = "4.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524"
+
+[[package]]
+name = "async-trait"
+version = "0.1.65"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "095183a3539c7c7649b2beb87c2d3f0591f3a7fed07761cc546d244e27e0238c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "atomic-waker"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "debc29dde2e69f9e47506b525f639ed42300fc014a3e007832592448fa8e4599"
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "base64"
+version = "0.21.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a"
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "block-buffer"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "blocking"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c67b173a56acffd6d2326fb7ab938ba0b00a71480e14902b2591c87bc5741e8"
+dependencies = [
+ "async-channel",
+ "async-lock",
+ "async-task",
+ "atomic-waker",
+ "fastrand",
+ "futures-lite",
+]
+
+[[package]]
+name = "bstr"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5ffdb39cb703212f3c11973452c2861b972f757b021158f3516ba10f2fa8b2c1"
+dependencies = [
+ "memchr",
+ "once_cell",
+ "regex-automata",
+ "serde",
+]
+
+[[package]]
+name = "btoi"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9dd6407f73a9b8b6162d8a2ef999fe6afd7cc15902ebf42c5cd296addf17e0ad"
+dependencies = [
+ "num-traits",
+]
+
+[[package]]
+name = "bumpalo"
+version = "3.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535"
+
+[[package]]
+name = "bytes"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
+
+[[package]]
+name = "bytesize"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38fcc2979eff34a4b84e1cf9a1e3da42a7d44b3b690a40cdcb23e3d556cfb2e5"
+
+[[package]]
+name = "cc"
+version = "1.0.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "clru"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b8191fa7302e03607ff0e237d4246cc043ff5b3cb9409d995172ba3bea16b807"
+
+[[package]]
+name = "cmake"
+version = "0.1.49"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db34956e100b30725f2eb215f90d4871051239535632f84fea3bc92722c66b7c"
+dependencies = [
+ "cc",
+]
+
+[[package]]
+name = "concurrent-queue"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c278839b831783b70278b14df4d45e1beb1aad306c07bb796637de9a0e323e8e"
+dependencies = [
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "core-foundation"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "core-foundation-sys"
+version = "0.8.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc"
+
+[[package]]
+name = "cpufeatures"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "crc32fast"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "crossbeam"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c"
+dependencies = [
+ "cfg-if",
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-epoch",
+ "crossbeam-queue",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-channel"
+version = "0.5.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf2b3e8478797446514c91ef04bafcb59faba183e621ad488df88983cc14128c"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-deque"
+version = "0.8.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef"
+dependencies = [
+ "cfg-if",
+ "crossbeam-epoch",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-epoch"
+version = "0.9.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695"
+dependencies = [
+ "autocfg",
+ "cfg-if",
+ "crossbeam-utils",
+ "memoffset",
+ "scopeguard",
+]
+
+[[package]]
+name = "crossbeam-queue"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "crypto-common"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
+dependencies = [
+ "generic-array",
+ "typenum",
+]
+
+[[package]]
+name = "ctor"
+version = "0.1.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096"
+dependencies = [
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "curl"
+version = "0.4.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "509bd11746c7ac09ebd19f0b17782eae80aadee26237658a6b4808afb5c11a22"
+dependencies = [
+ "curl-sys",
+ "libc",
+ "openssl-probe",
+ "openssl-sys",
+ "schannel",
+ "socket2",
+ "winapi",
+]
+
+[[package]]
+name = "curl-sys"
+version = "0.4.60+curl-7.88.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "717abe2cb465a5da6ce06617388a3980c9a2844196734bec8ccb8e575250f13f"
+dependencies = [
+ "cc",
+ "libc",
+ "libz-sys",
+ "openssl-sys",
+ "pkg-config",
+ "vcpkg",
+ "winapi",
+]
+
+[[package]]
+name = "dashmap"
+version = "5.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc"
+dependencies = [
+ "cfg-if",
+ "hashbrown 0.12.3",
+ "lock_api",
+ "once_cell",
+ "parking_lot_core",
+]
+
+[[package]]
+name = "data-encoding"
+version = "2.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "23d8666cb01533c39dde32bcbab8e227b4ed6679b2c925eba05feabea39508fb"
+
+[[package]]
+name = "digest"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f"
+dependencies = [
+ "block-buffer",
+ "crypto-common",
+]
+
+[[package]]
+name = "dirs"
+version = "4.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059"
+dependencies = [
+ "dirs-sys",
+]
+
+[[package]]
+name = "dirs-sys"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6"
+dependencies = [
+ "libc",
+ "redox_users",
+ "winapi",
+]
+
+[[package]]
+name = "document-features"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e493c573fce17f00dcab13b6ac057994f3ce17d1af4dc39bfd482b83c6eb6157"
+dependencies = [
+ "litrs",
+]
+
+[[package]]
+name = "dunce"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0bd4b30a6560bbd9b4620f4de34c3f14f60848e58a9b7216801afcb4c7b31c3c"
+
+[[package]]
+name = "either"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
+
+[[package]]
+name = "encoding_rs"
+version = "0.8.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "enum-as-inner"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c9720bba047d567ffc8a3cba48bf19126600e249ab7f128e9233e6376976a116"
+dependencies = [
+ "heck",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "errno"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1"
+dependencies = [
+ "errno-dragonfly",
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "errno-dragonfly"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
+dependencies = [
+ "cc",
+ "libc",
+]
+
+[[package]]
+name = "event-listener"
+version = "2.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
+
+[[package]]
+name = "fastrand"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be"
+dependencies = [
+ "instant",
+]
+
+[[package]]
+name = "filetime"
+version = "0.2.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a3de6e8d11b22ff9edc6d916f890800597d60f8b2da1caf2955c274638d6412"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "windows-sys 0.45.0",
+]
+
+[[package]]
+name = "flate2"
+version = "1.0.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
+dependencies = [
+ "crc32fast",
+ "libz-sys",
+ "miniz_oxide",
+]
+
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
+name = "foreign-types"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
+dependencies = [
+ "foreign-types-shared",
+]
+
+[[package]]
+name = "foreign-types-shared"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
+
+[[package]]
+name = "form_urlencoded"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8"
+dependencies = [
+ "percent-encoding",
+]
+
+[[package]]
+name = "futures"
+version = "0.3.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13e2792b0ff0340399d58445b88fd9770e3489eff258a4cbc1523418f12abf84"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-executor",
+ "futures-io",
+ "futures-sink",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-channel"
+version = "0.3.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e5317663a9089767a1ec00a487df42e0ca174b61b4483213ac24448e4664df5"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+]
+
+[[package]]
+name = "futures-core"
+version = "0.3.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec90ff4d0fe1f57d600049061dc6bb68ed03c7d2fbd697274c41805dcb3f8608"
+
+[[package]]
+name = "futures-executor"
+version = "0.3.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e8de0a35a6ab97ec8869e32a2473f4b1324459e14c29275d14b10cb1fd19b50e"
+dependencies = [
+ "futures-core",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-io"
+version = "0.3.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfb8371b6fb2aeb2d280374607aeabfc99d95c72edfe51692e42d3d7f0d08531"
+
+[[package]]
+name = "futures-lite"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48"
+dependencies = [
+ "fastrand",
+ "futures-core",
+ "futures-io",
+ "memchr",
+ "parking",
+ "pin-project-lite",
+ "waker-fn",
+]
+
+[[package]]
+name = "futures-sink"
+version = "0.3.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f310820bb3e8cfd46c80db4d7fb8353e15dfff853a127158425f31e0be6c8364"
+
+[[package]]
+name = "futures-task"
+version = "0.3.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dcf79a1bf610b10f42aea489289c5a2c478a786509693b80cd39c44ccd936366"
+
+[[package]]
+name = "futures-util"
+version = "0.3.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c1d6de3acfef38d2be4b1f543f553131788603495be83da675e180c8d6b7bd1"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-sink",
+ "futures-task",
+ "memchr",
+ "pin-project-lite",
+ "pin-utils",
+ "slab",
+]
+
+[[package]]
+name = "generic-array"
+version = "0.14.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9"
+dependencies = [
+ "typenum",
+ "version_check",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "gix"
+version = "0.39.0"
+dependencies = [
+ "anyhow",
+ "async-std",
+ "document-features",
+ "gix-actor",
+ "gix-attributes",
+ "gix-config",
+ "gix-credentials",
+ "gix-date",
+ "gix-diff",
+ "gix-discover",
+ "gix-features",
+ "gix-glob",
+ "gix-hash",
+ "gix-hashtable",
+ "gix-index",
+ "gix-lock",
+ "gix-mailmap",
+ "gix-object",
+ "gix-odb",
+ "gix-pack",
+ "gix-path",
+ "gix-prompt",
+ "gix-protocol",
+ "gix-ref",
+ "gix-refspec",
+ "gix-revision",
+ "gix-sec",
+ "gix-tempfile",
+ "gix-transport",
+ "gix-traverse",
+ "gix-url",
+ "gix-validate",
+ "gix-worktree",
+ "is_ci",
+ "log",
+ "once_cell",
+ "prodash",
+ "regex",
+ "reqwest",
+ "serde",
+ "serial_test",
+ "signal-hook",
+ "smallvec",
+ "thiserror",
+ "unicode-normalization",
+ "walkdir",
+]
+
+[[package]]
+name = "gix-actor"
+version = "0.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc22b0cdc52237667c301dd7cdc6ead8f8f73c9f824e9942c8ebd6b764f6c0bf"
+dependencies = [
+ "bstr",
+ "btoi",
+ "gix-date",
+ "itoa",
+ "nom",
+ "serde",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-attributes"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2231a25934a240d0a4b6f4478401c73ee81d8be52de0293eedbc172334abf3e1"
+dependencies = [
+ "bstr",
+ "gix-features",
+ "gix-glob",
+ "gix-path",
+ "gix-quote",
+ "serde",
+ "thiserror",
+ "unicode-bom",
+]
+
+[[package]]
+name = "gix-bitmap"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "024bca0c7187517bda5ea24ab148c9ca8208dd0c3e2bea88cdb2008f91791a6d"
+dependencies = [
+ "thiserror",
+]
+
+[[package]]
+name = "gix-chunk"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b0d39583cab06464b8bf73b3f1707458270f0e7383cb24c3c9c1a16e6f792978"
+dependencies = [
+ "thiserror",
+]
+
+[[package]]
+name = "gix-command"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2c6f75c1e0f924de39e750880a6e21307194bb1ab773efe3c7d2d787277f8ab"
+dependencies = [
+ "bstr",
+]
+
+[[package]]
+name = "gix-config"
+version = "0.18.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "52c62e26ce11f607712e4f49a0a192ed87675d30187fd61be070abbd607d12f1"
+dependencies = [
+ "bstr",
+ "gix-config-value",
+ "gix-features",
+ "gix-glob",
+ "gix-path",
+ "gix-ref",
+ "gix-sec",
+ "memchr",
+ "nom",
+ "once_cell",
+ "smallvec",
+ "thiserror",
+ "unicode-bom",
+]
+
+[[package]]
+name = "gix-config-value"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "693d4a4ba0531e46fe558459557a5b29fb86c3e4b2666c1c0861d93c7c678331"
+dependencies = [
+ "bitflags",
+ "bstr",
+ "gix-path",
+ "libc",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-credentials"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5be32b5fe339a31b8e53fa854081dc914c45020dcb64637f3c21baf69c96fc1b"
+dependencies = [
+ "bstr",
+ "gix-command",
+ "gix-config-value",
+ "gix-path",
+ "gix-prompt",
+ "gix-sec",
+ "gix-url",
+ "serde",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-date"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b96271912ce39822501616f177dea7218784e6c63be90d5f36322ff3a722aae2"
+dependencies = [
+ "bstr",
+ "itoa",
+ "serde",
+ "thiserror",
+ "time",
+]
+
+[[package]]
+name = "gix-diff"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "585b0834d4b6791a848637c4e109545fda9b0f29b591ba55edb33ceda6e7856b"
+dependencies = [
+ "gix-hash",
+ "gix-object",
+ "imara-diff",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-discover"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91c204adba5ebd211c74735cbb65817d277e154486bac0dffa3701f163b80350"
+dependencies = [
+ "bstr",
+ "dunce",
+ "gix-hash",
+ "gix-path",
+ "gix-ref",
+ "gix-sec",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-features"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e6a9dfa7b3c1a99315203e8b97f8f99f3bd95731590607abeaa5ca31bc41fe3"
+dependencies = [
+ "bytes",
+ "bytesize",
+ "crc32fast",
+ "crossbeam-channel",
+ "flate2",
+ "gix-hash",
+ "jwalk",
+ "libc",
+ "once_cell",
+ "parking_lot",
+ "prodash",
+ "sha1",
+ "sha1_smol",
+ "thiserror",
+ "walkdir",
+]
+
+[[package]]
+name = "gix-glob"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93e43efd776bc543f46f0fd0ca3d920c37af71a764a16f2aebd89765e9ff2993"
+dependencies = [
+ "bitflags",
+ "bstr",
+ "serde",
+]
+
+[[package]]
+name = "gix-hash"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c0c5a9f4d621d4f4ea046bb331df5c746ca735b8cae5b234cc2be70ee4dbef0"
+dependencies = [
+ "hex",
+ "serde",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-hashtable"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9609c1b8f36f12968e6a6098f7cdb52004f7d42d570f47a2d6d7c16612f19acb"
+dependencies = [
+ "gix-hash",
+ "hashbrown 0.13.2",
+ "parking_lot",
+]
+
+[[package]]
+name = "gix-index"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c12caf7886c7ba06f2b28835cdc2be1dca86bd047d00299d2d49e707ce1c2616"
+dependencies = [
+ "bitflags",
+ "bstr",
+ "btoi",
+ "filetime",
+ "gix-bitmap",
+ "gix-features",
+ "gix-hash",
+ "gix-lock",
+ "gix-object",
+ "gix-traverse",
+ "itoa",
+ "memmap2",
+ "serde",
+ "smallvec",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-lock"
+version = "4.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "66119ff8a4a395d0ea033fef718bc85f8b4f0855874f4ce1e005fc16cfe1f66e"
+dependencies = [
+ "fastrand",
+ "gix-tempfile",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-mailmap"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b66aea5e52875cd4915f4957a6f4b75831a36981e2ec3f5fad9e370e444fe1a"
+dependencies = [
+ "bstr",
+ "gix-actor",
+ "serde",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-object"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8df068db9180ee935fbb70504848369e270bdcb576b05c0faa8b9fd3b86fc017"
+dependencies = [
+ "bstr",
+ "btoi",
+ "gix-actor",
+ "gix-features",
+ "gix-hash",
+ "gix-validate",
+ "hex",
+ "itoa",
+ "nom",
+ "serde",
+ "smallvec",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-odb"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e9a5f9e1afbd509761977a2ea02869cedaaba500b4e783deb2e4de5179a55a80"
+dependencies = [
+ "arc-swap",
+ "gix-features",
+ "gix-hash",
+ "gix-object",
+ "gix-pack",
+ "gix-path",
+ "gix-quote",
+ "parking_lot",
+ "serde",
+ "tempfile",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-pack"
+version = "0.32.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e51db84e1459a8022e518d40a8778028d793dbb28e4d35c9a5eaf92658fb0775"
+dependencies = [
+ "clru",
+ "gix-chunk",
+ "gix-diff",
+ "gix-features",
+ "gix-hash",
+ "gix-hashtable",
+ "gix-object",
+ "gix-path",
+ "gix-tempfile",
+ "gix-traverse",
+ "memmap2",
+ "parking_lot",
+ "serde",
+ "smallvec",
+ "thiserror",
+ "uluru",
+]
+
+[[package]]
+name = "gix-packetline"
+version = "0.14.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d63e5e5a9a92d4fc6b63ff9d94954d25c779ce25c98d5bbe2e4399aa42f7073c"
+dependencies = [
+ "bstr",
+ "futures-io",
+ "futures-lite",
+ "hex",
+ "pin-project-lite",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-path"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6c104a66dec149cb8f7aaafc6ab797654cf82d67f050fd0cb7e7294e328354b"
+dependencies = [
+ "bstr",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-prompt"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a20cebf73229debaa82574c4fd20dcaf00fa8d4bfce823a862c4e990d7a0b5b4"
+dependencies = [
+ "gix-command",
+ "gix-config-value",
+ "nix",
+ "parking_lot",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-protocol"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d372ab11d5d28ac21800e3f1a6603a67c1ead57f6f5fab07e1e73e960f331c1"
+dependencies = [
+ "async-trait",
+ "bstr",
+ "btoi",
+ "futures-io",
+ "futures-lite",
+ "gix-credentials",
+ "gix-features",
+ "gix-hash",
+ "gix-transport",
+ "maybe-async",
+ "nom",
+ "serde",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-quote"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a282f5a8d9ee0b09ec47390ac727350c48f2f5c76d803cd8da6b3e7ad56e0bcb"
+dependencies = [
+ "bstr",
+ "btoi",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-ref"
+version = "0.26.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "90a0ed29e581f04b904ecd0c32b11f33b8209b5a0af9c43f415249a4f2fba632"
+dependencies = [
+ "gix-actor",
+ "gix-features",
+ "gix-hash",
+ "gix-lock",
+ "gix-object",
+ "gix-path",
+ "gix-tempfile",
+ "gix-validate",
+ "memmap2",
+ "nom",
+ "serde",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-refspec"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aba332462bda2e8efeae4302b39a6ed01ad56ef772fd5b7ef197cf2798294d65"
+dependencies = [
+ "bstr",
+ "gix-hash",
+ "gix-revision",
+ "gix-validate",
+ "smallvec",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-revision"
+version = "0.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed98e4a0254953c64bc913bd23146a1de662067d5cf974cbdde396958b39e5b0"
+dependencies = [
+ "bstr",
+ "gix-date",
+ "gix-hash",
+ "gix-hashtable",
+ "gix-object",
+ "serde",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-sec"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e8ffa5bf0772f9b01de501c035b6b084cf9b8bb07dec41e3afc6a17336a65f47"
+dependencies = [
+ "bitflags",
+ "dirs",
+ "gix-path",
+ "libc",
+ "serde",
+ "windows",
+]
+
+[[package]]
+name = "gix-tempfile"
+version = "4.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8e0227bd284cd16105e8479602bb8af6bddcb800427e881c1feee4806310a31"
+dependencies = [
+ "dashmap",
+ "libc",
+ "once_cell",
+ "parking_lot",
+ "signal-hook",
+ "signal-hook-registry",
+ "tempfile",
+]
+
+[[package]]
+name = "gix-transport"
+version = "0.27.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d633947b36a2fbbc089195bdc71621158f1660c2ff2a6b12b0279c16e2f764bc"
+dependencies = [
+ "async-std",
+ "async-trait",
+ "base64",
+ "bstr",
+ "curl",
+ "futures-io",
+ "futures-lite",
+ "gix-command",
+ "gix-credentials",
+ "gix-features",
+ "gix-packetline",
+ "gix-quote",
+ "gix-sec",
+ "gix-url",
+ "pin-project-lite",
+ "reqwest",
+ "serde",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-traverse"
+version = "0.24.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dd9a4a07bb22168dc79c60e1a6a41919d198187ca83d8a5940ad8d7122a45df3"
+dependencies = [
+ "gix-hash",
+ "gix-hashtable",
+ "gix-object",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-url"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "044072b7ce8601b62dcec841b92129f5cc677072823324121b395d766ac5f528"
+dependencies = [
+ "bstr",
+ "gix-features",
+ "gix-path",
+ "home",
+ "serde",
+ "thiserror",
+ "url",
+]
+
+[[package]]
+name = "gix-validate"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b69ddb780ea1465255e66818d75b7098371c58dbc9560da4488a44b9f5c7e443"
+dependencies = [
+ "bstr",
+ "thiserror",
+]
+
+[[package]]
+name = "gix-worktree"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b7cb9af6e56152953d8fe113c4f9d7cf60cf7a982362711e9200a255579b49cb"
+dependencies = [
+ "bstr",
+ "gix-attributes",
+ "gix-features",
+ "gix-glob",
+ "gix-hash",
+ "gix-index",
+ "gix-object",
+ "gix-path",
+ "io-close",
+ "thiserror",
+]
+
+[[package]]
+name = "gloo-timers"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "h2"
+version = "0.3.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5be7b54589b581f624f566bf5d8eb2bab1db736c51528720b6bd36b96b55924d"
+dependencies = [
+ "bytes",
+ "fnv",
+ "futures-core",
+ "futures-sink",
+ "futures-util",
+ "http",
+ "indexmap",
+ "slab",
+ "tokio",
+ "tokio-util",
+ "tracing",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+
+[[package]]
+name = "hashbrown"
+version = "0.13.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e"
+
+[[package]]
+name = "heck"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
+
+[[package]]
+name = "hermit-abi"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "hex"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+
+[[package]]
+name = "home"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "hostname"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867"
+dependencies = [
+ "libc",
+ "match_cfg",
+ "winapi",
+]
+
+[[package]]
+name = "http"
+version = "0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482"
+dependencies = [
+ "bytes",
+ "fnv",
+ "itoa",
+]
+
+[[package]]
+name = "http-body"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1"
+dependencies = [
+ "bytes",
+ "http",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "httparse"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904"
+
+[[package]]
+name = "httpdate"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421"
+
+[[package]]
+name = "human_format"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86cce260d758a9aa3d7c4b99d55c815a540f8a37514ba6046ab6be402a157cb0"
+
+[[package]]
+name = "hyper"
+version = "0.14.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e011372fa0b68db8350aa7a248930ecc7839bf46d8485577d69f117a75f164c"
+dependencies = [
+ "bytes",
+ "futures-channel",
+ "futures-core",
+ "futures-util",
+ "h2",
+ "http",
+ "http-body",
+ "httparse",
+ "httpdate",
+ "itoa",
+ "pin-project-lite",
+ "socket2",
+ "tokio",
+ "tower-service",
+ "tracing",
+ "want",
+]
+
+[[package]]
+name = "hyper-rustls"
+version = "0.23.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c"
+dependencies = [
+ "http",
+ "hyper",
+ "rustls",
+ "tokio",
+ "tokio-rustls",
+]
+
+[[package]]
+name = "hyper-tls"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905"
+dependencies = [
+ "bytes",
+ "hyper",
+ "native-tls",
+ "tokio",
+ "tokio-native-tls",
+]
+
+[[package]]
+name = "idna"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8"
+dependencies = [
+ "matches",
+ "unicode-bidi",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "idna"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6"
+dependencies = [
+ "unicode-bidi",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "imara-diff"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e98c1d0ad70fc91b8b9654b1f33db55e59579d3b3de2bffdced0fdb810570cb8"
+dependencies = [
+ "ahash",
+ "hashbrown 0.12.3",
+]
+
+[[package]]
+name = "indexmap"
+version = "1.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399"
+dependencies = [
+ "autocfg",
+ "hashbrown 0.12.3",
+]
+
+[[package]]
+name = "instant"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "io-close"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9cadcf447f06744f8ce713d2d6239bb5bde2c357a452397a9ed90c625da390bc"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "io-lifetimes"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1abeb7a0dd0f8181267ff8adc397075586500b81b28a73e8a0208b00fc170fb3"
+dependencies = [
+ "libc",
+ "windows-sys 0.45.0",
+]
+
+[[package]]
+name = "ipconfig"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd302af1b90f2463a98fa5ad469fc212c8e3175a41c3068601bfa2727591c5be"
+dependencies = [
+ "socket2",
+ "widestring",
+ "winapi",
+ "winreg",
+]
+
+[[package]]
+name = "ipnet"
+version = "2.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "30e22bd8629359895450b59ea7a776c850561b96a3b1d31321c1949d9e6c9146"
+
+[[package]]
+name = "is_ci"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "616cde7c720bb2bb5824a224687d8f77bfd38922027f01d825cd7453be5099fb"
+
+[[package]]
+name = "itoa"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6"
+
+[[package]]
+name = "js-sys"
+version = "0.3.61"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730"
+dependencies = [
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "jwalk"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2735847566356cd2179a2a38264839308f7079fa96e6bd5a42d740460e003c56"
+dependencies = [
+ "crossbeam",
+ "rayon",
+]
+
+[[package]]
+name = "kv-log-macro"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f"
+dependencies = [
+ "log",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.139"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
+
+[[package]]
+name = "libz-sys"
+version = "1.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf"
+dependencies = [
+ "cc",
+ "cmake",
+ "libc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "linked-hash-map"
+version = "0.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
+
+[[package]]
+name = "linux-raw-sys"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4"
+
+[[package]]
+name = "litrs"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f9275e0933cf8bb20f008924c0cb07a0692fe54d8064996520bf998de9eb79aa"
+
+[[package]]
+name = "lock_api"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df"
+dependencies = [
+ "autocfg",
+ "scopeguard",
+]
+
+[[package]]
+name = "log"
+version = "0.4.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
+dependencies = [
+ "cfg-if",
+ "value-bag",
+]
+
+[[package]]
+name = "lru-cache"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c"
+dependencies = [
+ "linked-hash-map",
+]
+
+[[package]]
+name = "match_cfg"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4"
+
+[[package]]
+name = "matches"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5"
+
+[[package]]
+name = "maybe-async"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0f1b8c13cb1f814b634a96b2c725449fe7ed464a7b8781de8688be5ffbd3f305"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "memchr"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+
+[[package]]
+name = "memmap2"
+version = "0.5.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "memoffset"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "mime"
+version = "0.3.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d"
+
+[[package]]
+name = "minimal-lexical"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+
+[[package]]
+name = "miniz_oxide"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa"
+dependencies = [
+ "adler",
+]
+
+[[package]]
+name = "mio"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9"
+dependencies = [
+ "libc",
+ "log",
+ "wasi",
+ "windows-sys 0.45.0",
+]
+
+[[package]]
+name = "native-tls"
+version = "0.2.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e"
+dependencies = [
+ "lazy_static",
+ "libc",
+ "log",
+ "openssl",
+ "openssl-probe",
+ "openssl-sys",
+ "schannel",
+ "security-framework",
+ "security-framework-sys",
+ "tempfile",
+]
+
+[[package]]
+name = "nix"
+version = "0.26.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a"
+dependencies = [
+ "bitflags",
+ "cfg-if",
+ "libc",
+ "static_assertions",
+]
+
+[[package]]
+name = "nom"
+version = "7.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
+dependencies = [
+ "memchr",
+ "minimal-lexical",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
+dependencies = [
+ "hermit-abi",
+ "libc",
+]
+
+[[package]]
+name = "num_threads"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.17.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
+
+[[package]]
+name = "openssl"
+version = "0.10.45"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b102428fd03bc5edf97f62620f7298614c45cedf287c271e7ed450bbaf83f2e1"
+dependencies = [
+ "bitflags",
+ "cfg-if",
+ "foreign-types",
+ "libc",
+ "once_cell",
+ "openssl-macros",
+ "openssl-sys",
+]
+
+[[package]]
+name = "openssl-macros"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "openssl-probe"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
+
+[[package]]
+name = "openssl-sys"
+version = "0.9.80"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "23bbbf7854cd45b83958ebe919f0e8e516793727652e27fda10a8384cfc790b7"
+dependencies = [
+ "autocfg",
+ "cc",
+ "libc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "parking"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72"
+
+[[package]]
+name = "parking_lot"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
+dependencies = [
+ "lock_api",
+ "parking_lot_core",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.9.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "windows-sys 0.45.0",
+]
+
+[[package]]
+name = "percent-encoding"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
+
+[[package]]
+name = "pin-utils"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+
+[[package]]
+name = "pkg-config"
+version = "0.3.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
+
+[[package]]
+name = "polling"
+version = "2.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22122d5ec4f9fe1b3916419b76be1e80bcb93f618d071d2edf841b137b2a2bd6"
+dependencies = [
+ "autocfg",
+ "cfg-if",
+ "libc",
+ "log",
+ "wepoll-ffi",
+ "windows-sys 0.42.0",
+]
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.51"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "prodash"
+version = "23.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d73c6b64cb5b99eb63ca97d378685712617ec0172ff5c04cd47a489d3e2c51f8"
+dependencies = [
+ "bytesize",
+ "human_format",
+ "parking_lot",
+]
+
+[[package]]
+name = "quick-error"
+version = "1.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
+
+[[package]]
+name = "quote"
+version = "1.0.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rand"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+dependencies = [
+ "libc",
+ "rand_chacha",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "rayon"
+version = "1.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b"
+dependencies = [
+ "either",
+ "rayon-core",
+]
+
+[[package]]
+name = "rayon-core"
+version = "1.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d"
+dependencies = [
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-utils",
+ "num_cpus",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "redox_users"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b"
+dependencies = [
+ "getrandom",
+ "redox_syscall",
+ "thiserror",
+]
+
+[[package]]
+name = "regex"
+version = "1.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
+dependencies = [
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
+
+[[package]]
+name = "reqwest"
+version = "0.11.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21eed90ec8570952d53b772ecf8f206aa1ec9a3d76b2521c56c42973f2d91ee9"
+dependencies = [
+ "base64",
+ "bytes",
+ "encoding_rs",
+ "futures-core",
+ "futures-util",
+ "h2",
+ "http",
+ "http-body",
+ "hyper",
+ "hyper-rustls",
+ "hyper-tls",
+ "ipnet",
+ "js-sys",
+ "log",
+ "mime",
+ "native-tls",
+ "once_cell",
+ "percent-encoding",
+ "pin-project-lite",
+ "rustls",
+ "rustls-pemfile",
+ "serde",
+ "serde_json",
+ "serde_urlencoded",
+ "tokio",
+ "tokio-native-tls",
+ "tokio-rustls",
+ "tower-service",
+ "trust-dns-resolver",
+ "url",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "web-sys",
+ "webpki-roots",
+ "winreg",
+]
+
+[[package]]
+name = "resolv-conf"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "52e44394d2086d010551b14b53b1f24e31647570cd1deb0379e2c21b329aba00"
+dependencies = [
+ "hostname",
+ "quick-error",
+]
+
+[[package]]
+name = "ring"
+version = "0.16.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc"
+dependencies = [
+ "cc",
+ "libc",
+ "once_cell",
+ "spin",
+ "untrusted",
+ "web-sys",
+ "winapi",
+]
+
+[[package]]
+name = "rustix"
+version = "0.36.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd5c6ff11fecd55b40746d1995a02f2eb375bf8c00d192d521ee09f42bef37bc"
+dependencies = [
+ "bitflags",
+ "errno",
+ "io-lifetimes",
+ "libc",
+ "linux-raw-sys",
+ "windows-sys 0.45.0",
+]
+
+[[package]]
+name = "rustls"
+version = "0.20.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f"
+dependencies = [
+ "log",
+ "ring",
+ "sct",
+ "webpki",
+]
+
+[[package]]
+name = "rustls-pemfile"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b"
+dependencies = [
+ "base64",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "schannel"
+version = "0.1.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3"
+dependencies = [
+ "windows-sys 0.42.0",
+]
+
+[[package]]
+name = "scopeguard"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+
+[[package]]
+name = "sct"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4"
+dependencies = [
+ "ring",
+ "untrusted",
+]
+
+[[package]]
+name = "security-framework"
+version = "2.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a332be01508d814fed64bf28f798a146d73792121129962fdf335bb3c49a4254"
+dependencies = [
+ "bitflags",
+ "core-foundation",
+ "core-foundation-sys",
+ "libc",
+ "security-framework-sys",
+]
+
+[[package]]
+name = "security-framework-sys"
+version = "2.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "31c9bb296072e961fcbd8853511dd39c2d8be2deb1e17c6860b1d30732b323b4"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.152"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.152"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.93"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "serde_urlencoded"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
+dependencies = [
+ "form_urlencoded",
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "serial_test"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "538c30747ae860d6fb88330addbbd3e0ddbe46d662d032855596d8a8ca260611"
+dependencies = [
+ "dashmap",
+ "futures",
+ "lazy_static",
+ "log",
+ "parking_lot",
+ "serial_test_derive",
+]
+
+[[package]]
+name = "serial_test_derive"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "079a83df15f85d89a68d64ae1238f142f172b1fa915d0d76b26a7cba1b659a69"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "sha1"
+version = "0.10.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+ "sha1-asm",
+]
+
+[[package]]
+name = "sha1-asm"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "563d4f7100bc3fce234e5f37bbf63dc2752558964505ba6ac3f7204bdc59eaac"
+dependencies = [
+ "cc",
+]
+
+[[package]]
+name = "sha1_smol"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012"
+
+[[package]]
+name = "signal-hook"
+version = "0.3.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "732768f1176d21d09e076c23a93123d40bba92d50c4058da34d45c8de8e682b9"
+dependencies = [
+ "libc",
+ "signal-hook-registry",
+]
+
+[[package]]
+name = "signal-hook-registry"
+version = "1.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "slab"
+version = "0.4.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "smallvec"
+version = "1.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "socket2"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "spin"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
+
+[[package]]
+name = "static_assertions"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
+
+[[package]]
+name = "syn"
+version = "1.0.109"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "tempfile"
+version = "3.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "af18f7ae1acd354b992402e9ec5864359d693cd8a79dcbef59f76891701c1e95"
+dependencies = [
+ "cfg-if",
+ "fastrand",
+ "redox_syscall",
+ "rustix",
+ "windows-sys 0.42.0",
+]
+
+[[package]]
+name = "thiserror"
+version = "1.0.38"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0"
+dependencies = [
+ "thiserror-impl",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.38"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "time"
+version = "0.3.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890"
+dependencies = [
+ "itoa",
+ "libc",
+ "num_threads",
+ "serde",
+ "time-core",
+ "time-macros",
+]
+
+[[package]]
+name = "time-core"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd"
+
+[[package]]
+name = "time-macros"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd80a657e71da814b8e5d60d3374fc6d35045062245d80224748ae522dd76f36"
+dependencies = [
+ "time-core",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
+
+[[package]]
+name = "tokio"
+version = "1.26.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "03201d01c3c27a29c8a5cee5b55a93ddae1ccf6f08f65365c2c918f8c1b76f64"
+dependencies = [
+ "autocfg",
+ "bytes",
+ "libc",
+ "memchr",
+ "mio",
+ "num_cpus",
+ "pin-project-lite",
+ "socket2",
+ "windows-sys 0.45.0",
+]
+
+[[package]]
+name = "tokio-native-tls"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"
+dependencies = [
+ "native-tls",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-rustls"
+version = "0.23.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59"
+dependencies = [
+ "rustls",
+ "tokio",
+ "webpki",
+]
+
+[[package]]
+name = "tokio-util"
+version = "0.7.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5427d89453009325de0d8f342c9490009f76e999cb7672d77e46267448f7e6b2"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "futures-sink",
+ "pin-project-lite",
+ "tokio",
+ "tracing",
+]
+
+[[package]]
+name = "tower-service"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52"
+
+[[package]]
+name = "tracing"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
+dependencies = [
+ "cfg-if",
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
+name = "trust-dns-proto"
+version = "0.22.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4f7f83d1e4a0e4358ac54c5c3681e5d7da5efc5a7a632c90bb6d6669ddd9bc26"
+dependencies = [
+ "async-trait",
+ "cfg-if",
+ "data-encoding",
+ "enum-as-inner",
+ "futures-channel",
+ "futures-io",
+ "futures-util",
+ "idna 0.2.3",
+ "ipnet",
+ "lazy_static",
+ "rand",
+ "smallvec",
+ "thiserror",
+ "tinyvec",
+ "tokio",
+ "tracing",
+ "url",
+]
+
+[[package]]
+name = "trust-dns-resolver"
+version = "0.22.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aff21aa4dcefb0a1afbfac26deb0adc93888c7d295fb63ab273ef276ba2b7cfe"
+dependencies = [
+ "cfg-if",
+ "futures-util",
+ "ipconfig",
+ "lazy_static",
+ "lru-cache",
+ "parking_lot",
+ "resolv-conf",
+ "smallvec",
+ "thiserror",
+ "tokio",
+ "tracing",
+ "trust-dns-proto",
+]
+
+[[package]]
+name = "try-lock"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed"
+
+[[package]]
+name = "typenum"
+version = "1.16.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
+
+[[package]]
+name = "uluru"
+version = "3.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "794a32261a1f5eb6a4462c81b59cec87b5c27d5deea7dd1ac8fc781c41d226db"
+dependencies = [
+ "arrayvec",
+]
+
+[[package]]
+name = "unicode-bidi"
+version = "0.3.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d54675592c1dbefd78cbd98db9bacd89886e1ca50692a0692baefffdeb92dd58"
+
+[[package]]
+name = "unicode-bom"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "63ec69f541d875b783ca40184d655f2927c95f0bffd486faa83cd3ac3529ec32"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "775c11906edafc97bc378816b94585fbd9a054eabaf86fdd0ced94af449efab7"
+
+[[package]]
+name = "unicode-normalization"
+version = "0.1.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
+dependencies = [
+ "tinyvec",
+]
+
+[[package]]
+name = "untrusted"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
+
+[[package]]
+name = "url"
+version = "2.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643"
+dependencies = [
+ "form_urlencoded",
+ "idna 0.3.0",
+ "percent-encoding",
+]
+
+[[package]]
+name = "value-bag"
+version = "1.0.0-alpha.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2209b78d1249f7e6f3293657c9779fe31ced465df091bbd433a1cf88e916ec55"
+dependencies = [
+ "ctor",
+ "version_check",
+]
+
+[[package]]
+name = "vcpkg"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
+
+[[package]]
+name = "version_check"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+
+[[package]]
+name = "waker-fn"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca"
+
+[[package]]
+name = "walkdir"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
+dependencies = [
+ "same-file",
+ "winapi",
+ "winapi-util",
+]
+
+[[package]]
+name = "want"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0"
+dependencies = [
+ "log",
+ "try-lock",
+]
+
+[[package]]
+name = "wasi"
+version = "0.11.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b"
+dependencies = [
+ "cfg-if",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9"
+dependencies = [
+ "bumpalo",
+ "log",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-futures"
+version = "0.4.34"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454"
+dependencies = [
+ "cfg-if",
+ "js-sys",
+ "wasm-bindgen",
+ "web-sys",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d"
+
+[[package]]
+name = "web-sys"
+version = "0.3.61"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "webpki"
+version = "0.22.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd"
+dependencies = [
+ "ring",
+ "untrusted",
+]
+
+[[package]]
+name = "webpki-roots"
+version = "0.22.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87"
+dependencies = [
+ "webpki",
+]
+
+[[package]]
+name = "wepoll-ffi"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d743fdedc5c64377b5fc2bc036b01c7fd642205a0d96356034ae3404d49eb7fb"
+dependencies = [
+ "cc",
+]
+
+[[package]]
+name = "widestring"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "17882f045410753661207383517a6f62ec3dbeb6a4ed2acce01f0728238d1983"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "windows"
+version = "0.43.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "04662ed0e3e5630dfa9b26e4cb823b817f1a9addda855d973a9458c236556244"
+dependencies = [
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
+dependencies = [
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.45.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.42.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7"
+dependencies = [
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.42.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.42.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.42.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.42.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.42.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.42.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.42.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd"
+
+[[package]]
+name = "winreg"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d"
+dependencies = [
+ "winapi",
+]
diff --git a/vendor/gix/Cargo.toml b/vendor/gix/Cargo.toml
new file mode 100644
index 000000000..6341d11a7
--- /dev/null
+++ b/vendor/gix/Cargo.toml
@@ -0,0 +1,288 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.64"
+name = "gix"
+version = "0.39.0"
+authors = ["Sebastian Thiel <sebastian.thiel@icloud.com>"]
+include = [
+ "src/**/*",
+ "CHANGELOG.md",
+]
+description = "Interact with git repositories just like git would"
+license = "MIT/Apache-2.0"
+repository = "https://github.com/Byron/gitoxide"
+
+[package.metadata.docs.rs]
+features = [
+ "document-features",
+ "max-performance",
+ "blocking-network-client",
+ "serde1",
+]
+rustdoc-args = [
+ "--cfg",
+ "docsrs",
+]
+
+[lib]
+test = true
+doctest = false
+
+[[example]]
+name = "clone"
+path = "examples/clone.rs"
+required-features = ["blocking-network-client"]
+
+[[test]]
+name = "git"
+path = "tests/git.rs"
+required-features = []
+
+[[test]]
+name = "git-with-regex"
+path = "tests/git-with-regex.rs"
+required-features = ["regex"]
+
+[dependencies.async-std]
+version = "1.12.0"
+optional = true
+
+[dependencies.document-features]
+version = "0.2.0"
+optional = true
+
+[dependencies.gix-actor]
+version = "^0.19.0"
+
+[dependencies.gix-attributes]
+version = "^0.10.0"
+
+[dependencies.gix-config]
+version = "^0.18.0"
+
+[dependencies.gix-credentials]
+version = "^0.11.0"
+
+[dependencies.gix-date]
+version = "^0.4.3"
+
+[dependencies.gix-diff]
+version = "^0.28.0"
+
+[dependencies.gix-discover]
+version = "^0.15.0"
+
+[dependencies.gix-features]
+version = "^0.28.0"
+features = [
+ "progress",
+ "once_cell",
+]
+
+[dependencies.gix-glob]
+version = "^0.5.5"
+
+[dependencies.gix-hash]
+version = "^0.10.3"
+
+[dependencies.gix-hashtable]
+version = "^0.1.2"
+
+[dependencies.gix-index]
+version = "^0.14.0"
+
+[dependencies.gix-lock]
+version = "^4.0.0"
+
+[dependencies.gix-mailmap]
+version = "^0.11.0"
+
+[dependencies.gix-object]
+version = "^0.28.0"
+
+[dependencies.gix-odb]
+version = "^0.42.0"
+
+[dependencies.gix-pack]
+version = "^0.32.0"
+features = ["object-cache-dynamic"]
+
+[dependencies.gix-path]
+version = "^0.7.2"
+
+[dependencies.gix-prompt]
+version = "^0.3.2"
+
+[dependencies.gix-protocol]
+version = "^0.28.0"
+optional = true
+
+[dependencies.gix-ref]
+version = "^0.26.0"
+
+[dependencies.gix-refspec]
+version = "^0.9.0"
+
+[dependencies.gix-revision]
+version = "^0.12.0"
+
+[dependencies.gix-sec]
+version = "^0.6.2"
+
+[dependencies.gix-tempfile]
+version = "^4.0.0"
+features = ["signals"]
+default-features = false
+
+[dependencies.gix-transport]
+version = "^0.27.0"
+optional = true
+
+[dependencies.gix-traverse]
+version = "^0.24.0"
+
+[dependencies.gix-url]
+version = "^0.15.0"
+
+[dependencies.gix-validate]
+version = "^0.7.3"
+
+[dependencies.gix-worktree]
+version = "^0.14.0"
+
+[dependencies.log]
+version = "0.4.14"
+
+[dependencies.once_cell]
+version = "1.14.0"
+
+[dependencies.prodash]
+version = "23.1"
+features = ["progress-tree"]
+optional = true
+default-features = false
+
+[dependencies.regex]
+version = "1.6.0"
+features = ["std"]
+optional = true
+default-features = false
+
+[dependencies.reqwest-for-configuration-only]
+version = "0.11.13"
+optional = true
+default-features = false
+package = "reqwest"
+
+[dependencies.serde]
+version = "1.0.114"
+features = ["derive"]
+optional = true
+default-features = false
+
+[dependencies.signal-hook]
+version = "0.3.9"
+default-features = false
+
+[dependencies.smallvec]
+version = "1.9.0"
+
+[dependencies.thiserror]
+version = "1.0.26"
+
+[dev-dependencies.anyhow]
+version = "1"
+
+[dev-dependencies.async-std]
+version = "1.12.0"
+features = ["attributes"]
+
+[dev-dependencies.is_ci]
+version = "1.1.1"
+
+[dev-dependencies.serial_test]
+version = "1.0.0"
+
+[dev-dependencies.walkdir]
+version = "2.3.2"
+
+[features]
+async-network-client = ["gix-protocol/async-client"]
+async-network-client-async-std = [
+ "async-std",
+ "async-network-client",
+ "gix-transport/async-std",
+]
+blocking-http-transport-curl = [
+ "blocking-network-client",
+ "gix-transport/http-client-curl",
+]
+blocking-http-transport-reqwest = [
+ "blocking-network-client",
+ "gix-transport/http-client-reqwest",
+]
+blocking-http-transport-reqwest-native-tls = [
+ "blocking-http-transport-reqwest",
+ "reqwest-for-configuration-only/default-tls",
+]
+blocking-http-transport-reqwest-rust-tls = [
+ "blocking-http-transport-reqwest",
+ "reqwest-for-configuration-only/rustls-tls",
+ "reqwest-for-configuration-only/trust-dns",
+]
+blocking-network-client = ["gix-protocol/blocking-client"]
+cache-efficiency-debug = ["gix-features/cache-efficiency-debug"]
+comfort = [
+ "gix-features/progress-unit-bytes",
+ "gix-features/progress-unit-human-numbers",
+]
+default = [
+ "max-performance-safe",
+ "comfort",
+]
+fast-sha1 = ["gix-features/fast-sha1"]
+hp-tempfile-registry = ["gix-tempfile/hp-hashmap"]
+max-performance = [
+ "max-performance-safe",
+ "gix-features/zlib-ng-compat",
+ "fast-sha1",
+]
+max-performance-safe = [
+ "gix-features/parallel",
+ "pack-cache-lru-static",
+ "pack-cache-lru-dynamic",
+ "gix-features/fs-walkdir-parallel",
+]
+pack-cache-lru-dynamic = ["gix-pack/pack-cache-lru-dynamic"]
+pack-cache-lru-static = ["gix-pack/pack-cache-lru-static"]
+progress-tree = ["prodash/progress-tree"]
+serde1 = [
+ "serde",
+ "gix-pack/serde1",
+ "gix-object/serde1",
+ "gix-protocol?/serde1",
+ "gix-transport?/serde1",
+ "gix-ref/serde1",
+ "gix-odb/serde1",
+ "gix-index/serde1",
+ "gix-mailmap/serde1",
+ "gix-url/serde1",
+ "gix-attributes/serde1",
+ "gix-revision/serde1",
+ "gix-credentials/serde1",
+]
+
+[target."cfg(target_vendor = \"apple\")".dependencies.unicode-normalization]
+version = "0.1.19"
+default-features = false
diff --git a/vendor/gix/src/assets/baseline-init/HEAD b/vendor/gix/src/assets/baseline-init/HEAD
new file mode 100644
index 000000000..b870d8262
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/HEAD
@@ -0,0 +1 @@
+ref: refs/heads/main
diff --git a/vendor/gix/src/assets/baseline-init/description b/vendor/gix/src/assets/baseline-init/description
new file mode 100644
index 000000000..498b267a8
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/description
@@ -0,0 +1 @@
+Unnamed repository; edit this file 'description' to name the repository.
diff --git a/vendor/gix/src/assets/baseline-init/hooks/applypatch-msg.sample b/vendor/gix/src/assets/baseline-init/hooks/applypatch-msg.sample
new file mode 100755
index 000000000..20fbb51a2
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/applypatch-msg.sample
@@ -0,0 +1,15 @@
+#!/bin/sh
+#
+# An example hook script to check the commit log message taken by
+# applypatch from an e-mail message.
+#
+# The hook should exit with non-zero status after issuing an
+# appropriate message if it wants to stop the commit. The hook is
+# allowed to edit the commit message file.
+#
+# To enable this hook, rename this file to "applypatch-msg".
+
+. git-sh-setup
+commitmsg="$(git rev-parse --gix-path hooks/commit-msg)"
+test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
+:
diff --git a/vendor/gix/src/assets/baseline-init/hooks/commit-msg.sample b/vendor/gix/src/assets/baseline-init/hooks/commit-msg.sample
new file mode 100755
index 000000000..b58d1184a
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/commit-msg.sample
@@ -0,0 +1,24 @@
+#!/bin/sh
+#
+# An example hook script to check the commit log message.
+# Called by "git commit" with one argument, the name of the file
+# that has the commit message. The hook should exit with non-zero
+# status after issuing an appropriate message if it wants to stop the
+# commit. The hook is allowed to edit the commit message file.
+#
+# To enable this hook, rename this file to "commit-msg".
+
+# Uncomment the below to add a Signed-off-by line to the message.
+# Doing this in a hook is a bad idea in general, but the prepare-commit-msg
+# hook is more suited to it.
+#
+# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
+# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1"
+
+# This example catches duplicate Signed-off-by lines.
+
+test "" = "$(grep '^Signed-off-by: ' "$1" |
+ sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || {
+ echo >&2 Duplicate Signed-off-by lines.
+ exit 1
+}
diff --git a/vendor/gix/src/assets/baseline-init/hooks/fsmonitor-watchman.sample b/vendor/gix/src/assets/baseline-init/hooks/fsmonitor-watchman.sample
new file mode 100755
index 000000000..14ed0aa42
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/fsmonitor-watchman.sample
@@ -0,0 +1,173 @@
+#!/usr/bin/perl
+
+use strict;
+use warnings;
+use IPC::Open2;
+
+# An example hook script to integrate Watchman
+# (https://facebook.github.io/watchman/) with git to speed up detecting
+# new and modified files.
+#
+# The hook is passed a version (currently 2) and last update token
+# formatted as a string and outputs to stdout a new update token and
+# all files that have been modified since the update token. Paths must
+# be relative to the root of the working tree and separated by a single NUL.
+#
+# To enable this hook, rename this file to "query-watchman" and set
+# 'git config core.fsmonitor .git/hooks/query-watchman'
+#
+my ($version, $last_update_token) = @ARGV;
+
+# Uncomment for debugging
+# print STDERR "$0 $version $last_update_token\n";
+
+# Check the hook interface version
+if ($version ne 2) {
+ die "Unsupported query-fsmonitor hook version '$version'.\n" .
+ "Falling back to scanning...\n";
+}
+
+my $git_work_tree = get_working_dir();
+
+my $retry = 1;
+
+my $json_pkg;
+eval {
+ require JSON::XS;
+ $json_pkg = "JSON::XS";
+ 1;
+} or do {
+ require JSON::PP;
+ $json_pkg = "JSON::PP";
+};
+
+launch_watchman();
+
+sub launch_watchman {
+ my $o = watchman_query();
+ if (is_work_tree_watched($o)) {
+ output_result($o->{clock}, @{$o->{files}});
+ }
+}
+
+sub output_result {
+ my ($clockid, @files) = @_;
+
+ # Uncomment for debugging watchman output
+ # open (my $fh, ">", ".git/watchman-output.out");
+ # binmode $fh, ":utf8";
+ # print $fh "$clockid\n@files\n";
+ # close $fh;
+
+ binmode STDOUT, ":utf8";
+ print $clockid;
+ print "\0";
+ local $, = "\0";
+ print @files;
+}
+
+sub watchman_clock {
+ my $response = qx/watchman clock "$git_work_tree"/;
+ die "Failed to get clock id on '$git_work_tree'.\n" .
+ "Falling back to scanning...\n" if $? != 0;
+
+ return $json_pkg->new->utf8->decode($response);
+}
+
+sub watchman_query {
+ my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty')
+ or die "open2() failed: $!\n" .
+ "Falling back to scanning...\n";
+
+ # In the query expression below we're asking for names of files that
+ # changed since $last_update_token but not from the .git folder.
+ #
+ # To accomplish this, we're using the "since" generator to use the
+ # recency index to select candidate nodes and "fields" to limit the
+ # output to file names only. Then we're using the "expression" term to
+ # further constrain the results.
+ if (substr($last_update_token, 0, 1) eq "c") {
+ $last_update_token = "\"$last_update_token\"";
+ }
+ my $query = <<" END";
+ ["query", "$git_work_tree", {
+ "since": $last_update_token,
+ "fields": ["name"],
+ "expression": ["not", ["dirname", ".git"]]
+ }]
+ END
+
+ # Uncomment for debugging the watchman query
+ # open (my $fh, ">", ".git/watchman-query.json");
+ # print $fh $query;
+ # close $fh;
+
+ print CHLD_IN $query;
+ close CHLD_IN;
+ my $response = do {local $/; <CHLD_OUT>};
+
+ # Uncomment for debugging the watch response
+ # open ($fh, ">", ".git/watchman-response.json");
+ # print $fh $response;
+ # close $fh;
+
+ die "Watchman: command returned no output.\n" .
+ "Falling back to scanning...\n" if $response eq "";
+ die "Watchman: command returned invalid output: $response\n" .
+ "Falling back to scanning...\n" unless $response =~ /^\{/;
+
+ return $json_pkg->new->utf8->decode($response);
+}
+
+sub is_work_tree_watched {
+ my ($output) = @_;
+ my $error = $output->{error};
+ if ($retry > 0 and $error and $error =~ m/unable to resolve root .* directory (.*) is not watched/) {
+ $retry--;
+ my $response = qx/watchman watch "$git_work_tree"/;
+ die "Failed to make watchman watch '$git_work_tree'.\n" .
+ "Falling back to scanning...\n" if $? != 0;
+ $output = $json_pkg->new->utf8->decode($response);
+ $error = $output->{error};
+ die "Watchman: $error.\n" .
+ "Falling back to scanning...\n" if $error;
+
+ # Uncomment for debugging watchman output
+ # open (my $fh, ">", ".git/watchman-output.out");
+ # close $fh;
+
+ # Watchman will always return all files on the first query so
+ # return the fast "everything is dirty" flag to git and do the
+ # Watchman query just to get it over with now so we won't pay
+ # the cost in git to look up each individual file.
+ my $o = watchman_clock();
+ $error = $output->{error};
+
+ die "Watchman: $error.\n" .
+ "Falling back to scanning...\n" if $error;
+
+ output_result($o->{clock}, ("/"));
+ $last_update_token = $o->{clock};
+
+ eval { launch_watchman() };
+ return 0;
+ }
+
+ die "Watchman: $error.\n" .
+ "Falling back to scanning...\n" if $error;
+
+ return 1;
+}
+
+sub get_working_dir {
+ my $working_dir;
+ if ($^O =~ 'msys' || $^O =~ 'cygwin') {
+ $working_dir = Win32::GetCwd();
+ $working_dir =~ tr/\\/\//;
+ } else {
+ require Cwd;
+ $working_dir = Cwd::cwd();
+ }
+
+ return $working_dir;
+}
diff --git a/vendor/gix/src/assets/baseline-init/hooks/post-update.sample b/vendor/gix/src/assets/baseline-init/hooks/post-update.sample
new file mode 100755
index 000000000..ec17ec193
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/post-update.sample
@@ -0,0 +1,8 @@
+#!/bin/sh
+#
+# An example hook script to prepare a packed repository for use over
+# dumb transports.
+#
+# To enable this hook, rename this file to "post-update".
+
+exec git update-server-info
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-applypatch.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-applypatch.sample
new file mode 100755
index 000000000..d61828510
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/pre-applypatch.sample
@@ -0,0 +1,14 @@
+#!/bin/sh
+#
+# An example hook script to verify what is about to be committed
+# by applypatch from an e-mail message.
+#
+# The hook should exit with non-zero status after issuing an
+# appropriate message if it wants to stop the commit.
+#
+# To enable this hook, rename this file to "pre-applypatch".
+
+. git-sh-setup
+precommit="$(git rev-parse --gix-path hooks/pre-commit)"
+test -x "$precommit" && exec "$precommit" ${1+"$@"}
+:
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-commit.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-commit.sample
new file mode 100755
index 000000000..e144712c8
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/pre-commit.sample
@@ -0,0 +1,49 @@
+#!/bin/sh
+#
+# An example hook script to verify what is about to be committed.
+# Called by "git commit" with no arguments. The hook should
+# exit with non-zero status after issuing an appropriate message if
+# it wants to stop the commit.
+#
+# To enable this hook, rename this file to "pre-commit".
+
+if git rev-parse --verify HEAD >/dev/null 2>&1
+then
+ against=HEAD
+else
+ # Initial commit: diff against an empty tree object
+ against=$(git hash-object -t tree /dev/null)
+fi
+
+# If you want to allow non-ASCII filenames set this variable to true.
+allownonascii=$(git config --type=bool hooks.allownonascii)
+
+# Redirect output to stderr.
+exec 1>&2
+
+# Cross platform projects tend to avoid non-ASCII filenames; prevent
+# them from being added to the repository. We exploit the fact that the
+# printable range starts at the space character and ends with tilde.
+if [ "$allownonascii" != "true" ] &&
+ # Note that the use of brackets around a tr range is ok here, (it's
+ # even required, for portability to Solaris 10's /usr/bin/tr), since
+ # the square bracket bytes happen to fall in the designated range.
+ test $(git diff --cached --name-only --diff-filter=A -z $against |
+ LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0
+then
+ cat <<\EOF
+Error: Attempt to add a non-ASCII file name.
+
+This can cause problems if you want to work with people on other platforms.
+
+To be portable it is advisable to rename the file.
+
+If you know what you are doing you can disable this check using:
+
+ git config hooks.allownonascii true
+EOF
+ exit 1
+fi
+
+# If there are whitespace errors, print the offending file names and fail.
+exec git diff-index --check --cached $against --
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-merge-commit.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-merge-commit.sample
new file mode 100755
index 000000000..399eab192
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/pre-merge-commit.sample
@@ -0,0 +1,13 @@
+#!/bin/sh
+#
+# An example hook script to verify what is about to be committed.
+# Called by "git merge" with no arguments. The hook should
+# exit with non-zero status after issuing an appropriate message to
+# stderr if it wants to stop the merge commit.
+#
+# To enable this hook, rename this file to "pre-merge-commit".
+
+. git-sh-setup
+test -x "$GIT_DIR/hooks/pre-commit" &&
+ exec "$GIT_DIR/hooks/pre-commit"
+:
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-push.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-push.sample
new file mode 100755
index 000000000..6187dbf43
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/pre-push.sample
@@ -0,0 +1,53 @@
+#!/bin/sh
+
+# An example hook script to verify what is about to be pushed. Called by "git
+# push" after it has checked the remote status, but before anything has been
+# pushed. If this script exits with a non-zero status nothing will be pushed.
+#
+# This hook is called with the following parameters:
+#
+# $1 -- Name of the remote to which the push is being done
+# $2 -- URL to which the push is being done
+#
+# If pushing without using a named remote those arguments will be equal.
+#
+# Information about the commits which are being pushed is supplied as lines to
+# the standard input in the form:
+#
+# <local ref> <local sha1> <remote ref> <remote sha1>
+#
+# This sample shows how to prevent push of commits where the log message starts
+# with "WIP" (work in progress).
+
+remote="$1"
+url="$2"
+
+z40=0000000000000000000000000000000000000000
+
+while read local_ref local_sha remote_ref remote_sha
+do
+ if [ "$local_sha" = $z40 ]
+ then
+ # Handle delete
+ :
+ else
+ if [ "$remote_sha" = $z40 ]
+ then
+ # New branch, examine all commits
+ range="$local_sha"
+ else
+ # Update to existing branch, examine new commits
+ range="$remote_sha..$local_sha"
+ fi
+
+ # Check for WIP commit
+ commit=`git rev-list -n 1 --grep '^WIP' "$range"`
+ if [ -n "$commit" ]
+ then
+ echo >&2 "Found WIP commit in $local_ref, not pushing"
+ exit 1
+ fi
+ fi
+done
+
+exit 0
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-rebase.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-rebase.sample
new file mode 100755
index 000000000..d6ac43f64
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/pre-rebase.sample
@@ -0,0 +1,169 @@
+#!/bin/sh
+#
+# Copyright (c) 2006, 2008 Junio C Hamano
+#
+# The "pre-rebase" hook is run just before "git rebase" starts doing
+# its job, and can prevent the command from running by exiting with
+# non-zero status.
+#
+# The hook is called with the following parameters:
+#
+# $1 -- the upstream the series was forked from.
+# $2 -- the branch being rebased (or empty when rebasing the current branch).
+#
+# This sample shows how to prevent topic branches that are already
+# merged to 'next' branch from getting rebased, because allowing it
+# would result in rebasing already published history.
+
+publish=next
+basebranch="$1"
+if test "$#" = 2
+then
+ topic="refs/heads/$2"
+else
+ topic=`git symbolic-ref HEAD` ||
+ exit 0 ;# we do not interrupt rebasing detached HEAD
+fi
+
+case "$topic" in
+refs/heads/??/*)
+ ;;
+*)
+ exit 0 ;# we do not interrupt others.
+ ;;
+esac
+
+# Now we are dealing with a topic branch being rebased
+# on top of main. Is it OK to rebase it?
+
+# Does the topic really exist?
+git show-ref -q "$topic" || {
+ echo >&2 "No such branch $topic"
+ exit 1
+}
+
+# Is topic fully merged to main?
+not_in_main=`git rev-list --pretty=oneline ^main "$topic"`
+if test -z "$not_in_main"
+then
+ echo >&2 "$topic is fully merged to main; better remove it."
+ exit 1 ;# we could allow it, but there is no point.
+fi
+
+# Is topic ever merged to next? If so you should not be rebasing it.
+only_next_1=`git rev-list ^main "^$topic" ${publish} | sort`
+only_next_2=`git rev-list ^main ${publish} | sort`
+if test "$only_next_1" = "$only_next_2"
+then
+ not_in_topic=`git rev-list "^$topic" main`
+ if test -z "$not_in_topic"
+ then
+ echo >&2 "$topic is already up to date with main"
+ exit 1 ;# we could allow it, but there is no point.
+ else
+ exit 0
+ fi
+else
+ not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"`
+ /usr/bin/perl -e '
+ my $topic = $ARGV[0];
+ my $msg = "* $topic has commits already merged to public branch:\n";
+ my (%not_in_next) = map {
+ /^([0-9a-f]+) /;
+ ($1 => 1);
+ } split(/\n/, $ARGV[1]);
+ for my $elem (map {
+ /^([0-9a-f]+) (.*)$/;
+ [$1 => $2];
+ } split(/\n/, $ARGV[2])) {
+ if (!exists $not_in_next{$elem->[0]}) {
+ if ($msg) {
+ print STDERR $msg;
+ undef $msg;
+ }
+ print STDERR " $elem->[1]\n";
+ }
+ }
+ ' "$topic" "$not_in_next" "$not_in_main"
+ exit 1
+fi
+
+<<\DOC_END
+
+This sample hook safeguards topic branches that have been
+published from being rewound.
+
+The workflow assumed here is:
+
+ * Once a topic branch forks from "main", "main" is never
+ merged into it again (either directly or indirectly).
+
+ * Once a topic branch is fully cooked and merged into "main",
+ it is deleted. If you need to build on top of it to correct
+ earlier mistakes, a new topic branch is created by forking at
+ the tip of the "main". This is not strictly necessary, but
+ it makes it easier to keep your history simple.
+
+ * Whenever you need to test or publish your changes to topic
+ branches, merge them into "next" branch.
+
+The script, being an example, hardcodes the publish branch name
+to be "next", but it is trivial to make it configurable via
+$GIT_DIR/config mechanism.
+
+With this workflow, you would want to know:
+
+(1) ... if a topic branch has ever been merged to "next". Young
+ topic branches can have stupid mistakes you would rather
+ clean up before publishing, and things that have not been
+ merged into other branches can be easily rebased without
+ affecting other people. But once it is published, you would
+ not want to rewind it.
+
+(2) ... if a topic branch has been fully merged to "main".
+ Then you can delete it. More importantly, you should not
+ build on top of it -- other people may already want to
+ change things related to the topic as patches against your
+ "main", so if you need further changes, it is better to
+ fork the topic (perhaps with the same name) afresh from the
+ tip of "main".
+
+Let's look at this example:
+
+ o---o---o---o---o---o---o---o---o---o "next"
+ / / / /
+ / a---a---b A / /
+ / / / /
+ / / c---c---c---c B /
+ / / / \ /
+ / / / b---b C \ /
+ / / / / \ /
+ ---o---o---o---o---o---o---o---o---o---o---o "main"
+
+
+A, B and C are topic branches.
+
+ * A has one fix since it was merged up to "next".
+
+ * B has finished. It has been fully merged up to "main" and "next",
+ and is ready to be deleted.
+
+ * C has not merged to "next" at all.
+
+We would want to allow C to be rebased, refuse A, and encourage
+B to be deleted.
+
+To compute (1):
+
+ git rev-list ^main ^topic next
+ git rev-list ^main next
+
+ if these match, topic has not merged in next at all.
+
+To compute (2):
+
+ git rev-list main..topic
+
+ if this is empty, it is fully merged to "main".
+
+DOC_END
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-receive.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-receive.sample
new file mode 100755
index 000000000..a1fd29ec1
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/pre-receive.sample
@@ -0,0 +1,24 @@
+#!/bin/sh
+#
+# An example hook script to make use of push options.
+# The example simply echoes all push options that start with 'echoback='
+# and rejects all pushes when the "reject" push option is used.
+#
+# To enable this hook, rename this file to "pre-receive".
+
+if test -n "$GIT_PUSH_OPTION_COUNT"
+then
+ i=0
+ while test "$i" -lt "$GIT_PUSH_OPTION_COUNT"
+ do
+ eval "value=\$GIT_PUSH_OPTION_$i"
+ case "$value" in
+ echoback=*)
+ echo "echo from the pre-receive-hook: ${value#*=}" >&2
+ ;;
+ reject)
+ exit 1
+ esac
+ i=$((i + 1))
+ done
+fi
diff --git a/vendor/gix/src/assets/baseline-init/hooks/prepare-commit-msg.sample b/vendor/gix/src/assets/baseline-init/hooks/prepare-commit-msg.sample
new file mode 100755
index 000000000..10fa14c5a
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/prepare-commit-msg.sample
@@ -0,0 +1,42 @@
+#!/bin/sh
+#
+# An example hook script to prepare the commit log message.
+# Called by "git commit" with the name of the file that has the
+# commit message, followed by the description of the commit
+# message's source. The hook's purpose is to edit the commit
+# message file. If the hook fails with a non-zero status,
+# the commit is aborted.
+#
+# To enable this hook, rename this file to "prepare-commit-msg".
+
+# This hook includes three examples. The first one removes the
+# "# Please enter the commit message..." help message.
+#
+# The second includes the output of "git diff --name-status -r"
+# into the message, just before the "git status" output. It is
+# commented because it doesn't cope with --amend or with squashed
+# commits.
+#
+# The third example adds a Signed-off-by line to the message, that can
+# still be edited. This is rarely a good idea.
+
+COMMIT_MSG_FILE=$1
+COMMIT_SOURCE=$2
+SHA1=$3
+
+/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE"
+
+# case "$COMMIT_SOURCE,$SHA1" in
+# ,|template,)
+# /usr/bin/perl -i.bak -pe '
+# print "\n" . `git diff --cached --name-status -r`
+# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;;
+# *) ;;
+# esac
+
+# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
+# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE"
+# if test -z "$COMMIT_SOURCE"
+# then
+# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE"
+# fi
diff --git a/vendor/gix/src/assets/baseline-init/hooks/update.sample b/vendor/gix/src/assets/baseline-init/hooks/update.sample
new file mode 100755
index 000000000..5014c4b31
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/update.sample
@@ -0,0 +1,128 @@
+#!/bin/sh
+#
+# An example hook script to block unannotated tags from entering.
+# Called by "git receive-pack" with arguments: refname sha1-old sha1-new
+#
+# To enable this hook, rename this file to "update".
+#
+# Config
+# ------
+# hooks.allowunannotated
+# This boolean sets whether unannotated tags will be allowed into the
+# repository. By default they won't be.
+# hooks.allowdeletetag
+# This boolean sets whether deleting tags will be allowed in the
+# repository. By default they won't be.
+# hooks.allowmodifytag
+# This boolean sets whether a tag may be modified after creation. By default
+# it won't be.
+# hooks.allowdeletebranch
+# This boolean sets whether deleting branches will be allowed in the
+# repository. By default they won't be.
+# hooks.denycreatebranch
+# This boolean sets whether remotely creating branches will be denied
+# in the repository. By default this is allowed.
+#
+
+# --- Command line
+refname="$1"
+oldrev="$2"
+newrev="$3"
+
+# --- Safety check
+if [ -z "$GIT_DIR" ]; then
+ echo "Don't run this script from the command line." >&2
+ echo " (if you want, you could supply GIT_DIR then run" >&2
+ echo " $0 <ref> <oldrev> <newrev>)" >&2
+ exit 1
+fi
+
+if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then
+ echo "usage: $0 <ref> <oldrev> <newrev>" >&2
+ exit 1
+fi
+
+# --- Config
+allowunannotated=$(git config --type=bool hooks.allowunannotated)
+allowdeletebranch=$(git config --type=bool hooks.allowdeletebranch)
+denycreatebranch=$(git config --type=bool hooks.denycreatebranch)
+allowdeletetag=$(git config --type=bool hooks.allowdeletetag)
+allowmodifytag=$(git config --type=bool hooks.allowmodifytag)
+
+# check for no description
+projectdesc=$(sed -e '1q' "$GIT_DIR/description")
+case "$projectdesc" in
+"Unnamed repository"* | "")
+ echo "*** Project description file hasn't been set" >&2
+ exit 1
+ ;;
+esac
+
+# --- Check types
+# if $newrev is 0000...0000, it's a commit to delete a ref.
+zero="0000000000000000000000000000000000000000"
+if [ "$newrev" = "$zero" ]; then
+ newrev_type=delete
+else
+ newrev_type=$(git cat-file -t $newrev)
+fi
+
+case "$refname","$newrev_type" in
+ refs/tags/*,commit)
+ # un-annotated tag
+ short_refname=${refname##refs/tags/}
+ if [ "$allowunannotated" != "true" ]; then
+ echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2
+ echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2
+ exit 1
+ fi
+ ;;
+ refs/tags/*,delete)
+ # delete tag
+ if [ "$allowdeletetag" != "true" ]; then
+ echo "*** Deleting a tag is not allowed in this repository" >&2
+ exit 1
+ fi
+ ;;
+ refs/tags/*,tag)
+ # annotated tag
+ if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1
+ then
+ echo "*** Tag '$refname' already exists." >&2
+ echo "*** Modifying a tag is not allowed in this repository." >&2
+ exit 1
+ fi
+ ;;
+ refs/heads/*,commit)
+ # branch
+ if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then
+ echo "*** Creating a branch is not allowed in this repository" >&2
+ exit 1
+ fi
+ ;;
+ refs/heads/*,delete)
+ # delete branch
+ if [ "$allowdeletebranch" != "true" ]; then
+ echo "*** Deleting a branch is not allowed in this repository" >&2
+ exit 1
+ fi
+ ;;
+ refs/remotes/*,commit)
+ # tracking branch
+ ;;
+ refs/remotes/*,delete)
+ # delete tracking branch
+ if [ "$allowdeletebranch" != "true" ]; then
+ echo "*** Deleting a tracking branch is not allowed in this repository" >&2
+ exit 1
+ fi
+ ;;
+ *)
+ # Anything else (is there anything else?)
+ echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2
+ exit 1
+ ;;
+esac
+
+# --- Finished
+exit 0
diff --git a/vendor/gix/src/assets/baseline-init/info/exclude b/vendor/gix/src/assets/baseline-init/info/exclude
new file mode 100644
index 000000000..a5196d1be
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/info/exclude
@@ -0,0 +1,6 @@
+# git ls-files --others --exclude-from=.git/info/exclude
+# Lines that start with '#' are comments.
+# For a project mostly in C, the following would be a good set of
+# exclude patterns (uncomment them if you want to use them):
+# *.[oa]
+# *~
diff --git a/vendor/gix/src/clone/checkout.rs b/vendor/gix/src/clone/checkout.rs
new file mode 100644
index 000000000..50d235f13
--- /dev/null
+++ b/vendor/gix/src/clone/checkout.rs
@@ -0,0 +1,161 @@
+use crate::{clone::PrepareCheckout, Repository};
+
+///
+pub mod main_worktree {
+ use std::{path::PathBuf, sync::atomic::AtomicBool};
+
+ use gix_odb::FindExt;
+
+ use crate::{clone::PrepareCheckout, Progress, Repository};
+
+ /// The error returned by [`PrepareCheckout::main_worktree()`].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Repository at \"{}\" is a bare repository and cannot have a main worktree checkout", git_dir.display())]
+ BareRepository { git_dir: PathBuf },
+ #[error("The object pointed to by HEAD is not a treeish")]
+ NoHeadTree(#[from] crate::object::peel::to_kind::Error),
+ #[error("Could not create index from tree at {id}")]
+ IndexFromTree {
+ id: gix_hash::ObjectId,
+ source: gix_traverse::tree::breadthfirst::Error,
+ },
+ #[error(transparent)]
+ WriteIndex(#[from] gix_index::file::write::Error),
+ #[error(transparent)]
+ CheckoutOptions(#[from] crate::config::checkout_options::Error),
+ #[error(transparent)]
+ IndexCheckout(
+ #[from]
+ gix_worktree::index::checkout::Error<gix_odb::find::existing_object::Error<gix_odb::store::find::Error>>,
+ ),
+ #[error("Failed to reopen object database as Arc (only if thread-safety wasn't compiled in)")]
+ OpenArcOdb(#[from] std::io::Error),
+ #[error("The HEAD reference could not be located")]
+ FindHead(#[from] crate::reference::find::existing::Error),
+ #[error("The HEAD reference could not be located")]
+ PeelHeadToId(#[from] crate::head::peel::Error),
+ }
+
+ /// The progress ids used in [`PrepareCheckout::main_worktree()`].
+ ///
+ /// Use this information to selectively extract the progress of interest in case the parent application has custom visualization.
+ #[derive(Debug, Copy, Clone)]
+ pub enum ProgressId {
+ /// The amount of files checked out thus far.
+ CheckoutFiles,
+ /// The amount of bytes written in total, the aggregate of the size of the content of all files thus far.
+ BytesWritten,
+ }
+
+ impl From<ProgressId> for gix_features::progress::Id {
+ fn from(v: ProgressId) -> Self {
+ match v {
+ ProgressId::CheckoutFiles => *b"CLCF",
+ ProgressId::BytesWritten => *b"CLCB",
+ }
+ }
+ }
+
+ /// Modification
+ impl PrepareCheckout {
+ /// Checkout the main worktree, determining how many threads to use by looking at `checkout.workers`, defaulting to using
+ /// on thread per logical core.
+ ///
+ /// Note that this is a no-op if the remote was empty, leaving this repository empty as well. This can be validated by checking
+ /// if the `head()` of the returned repository is not unborn.
+ pub fn main_worktree(
+ &mut self,
+ mut progress: impl crate::Progress,
+ should_interrupt: &AtomicBool,
+ ) -> Result<(Repository, gix_worktree::index::checkout::Outcome), Error> {
+ let repo = self
+ .repo
+ .as_ref()
+ .expect("still present as we never succeeded the worktree checkout yet");
+ let workdir = repo.work_dir().ok_or_else(|| Error::BareRepository {
+ git_dir: repo.git_dir().to_owned(),
+ })?;
+ let root_tree = match repo.head()?.peel_to_id_in_place().transpose()? {
+ Some(id) => id.object().expect("downloaded from remote").peel_to_tree()?.id,
+ None => {
+ return Ok((
+ self.repo.take().expect("still present"),
+ gix_worktree::index::checkout::Outcome::default(),
+ ))
+ }
+ };
+ let index = gix_index::State::from_tree(&root_tree, |oid, buf| repo.objects.find_tree_iter(oid, buf).ok())
+ .map_err(|err| Error::IndexFromTree {
+ id: root_tree,
+ source: err,
+ })?;
+ let mut index = gix_index::File::from_state(index, repo.index_path());
+
+ let mut opts = repo.config.checkout_options(repo.git_dir())?;
+ opts.destination_is_initially_empty = true;
+
+ let mut files = progress.add_child_with_id("checkout", ProgressId::CheckoutFiles.into());
+ let mut bytes = progress.add_child_with_id("writing", ProgressId::BytesWritten.into());
+
+ files.init(Some(index.entries().len()), crate::progress::count("files"));
+ bytes.init(None, crate::progress::bytes());
+
+ let start = std::time::Instant::now();
+ let outcome = gix_worktree::index::checkout(
+ &mut index,
+ workdir,
+ {
+ let objects = repo.objects.clone().into_arc()?;
+ move |oid, buf| objects.find_blob(oid, buf)
+ },
+ &mut files,
+ &mut bytes,
+ should_interrupt,
+ opts,
+ )?;
+ files.show_throughput(start);
+ bytes.show_throughput(start);
+
+ index.write(Default::default())?;
+ Ok((self.repo.take().expect("still present"), outcome))
+ }
+ }
+}
+
+/// Access
+impl PrepareCheckout {
+ /// Get access to the repository while the checkout isn't yet completed.
+ ///
+ /// # Panics
+ ///
+ /// If the checkout is completed and the [`Repository`] was already passed on to the caller.
+ pub fn repo(&self) -> &Repository {
+ self.repo
+ .as_ref()
+ .expect("present as checkout operation isn't complete")
+ }
+}
+
+/// Consumption
+impl PrepareCheckout {
+ /// Persist the contained repository as is even if an error may have occurred when checking out the main working tree.
+ pub fn persist(mut self) -> Repository {
+ self.repo.take().expect("present and consumed once")
+ }
+}
+
+impl Drop for PrepareCheckout {
+ fn drop(&mut self) {
+ if let Some(repo) = self.repo.take() {
+ std::fs::remove_dir_all(repo.work_dir().unwrap_or_else(|| repo.path())).ok();
+ }
+ }
+}
+
+impl From<PrepareCheckout> for Repository {
+ fn from(prep: PrepareCheckout) -> Self {
+ prep.persist()
+ }
+}
diff --git a/vendor/gix/src/clone/fetch/mod.rs b/vendor/gix/src/clone/fetch/mod.rs
new file mode 100644
index 000000000..d663b47ea
--- /dev/null
+++ b/vendor/gix/src/clone/fetch/mod.rs
@@ -0,0 +1,212 @@
+use crate::{bstr::BString, clone::PrepareFetch, Repository};
+
+/// The error returned by [`PrepareFetch::fetch_only()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+#[cfg(feature = "blocking-network-client")]
+pub enum Error {
+ #[error(transparent)]
+ Connect(#[from] crate::remote::connect::Error),
+ #[error(transparent)]
+ PrepareFetch(#[from] crate::remote::fetch::prepare::Error),
+ #[error(transparent)]
+ Fetch(#[from] crate::remote::fetch::Error),
+ #[error(transparent)]
+ RemoteInit(#[from] crate::remote::init::Error),
+ #[error("Custom configuration of remote to clone from failed")]
+ RemoteConfiguration(#[source] Box<dyn std::error::Error + Send + Sync>),
+ #[error(transparent)]
+ RemoteName(#[from] crate::config::remote::symbolic_name::Error),
+ #[error("Failed to load repo-local git configuration before writing")]
+ LoadConfig(#[from] gix_config::file::init::from_paths::Error),
+ #[error("Failed to store configured remote in memory")]
+ SaveConfig(#[from] crate::remote::save::AsError),
+ #[error("Failed to write repository configuration to disk")]
+ SaveConfigIo(#[from] std::io::Error),
+ #[error("The remote HEAD points to a reference named {head_ref_name:?} which is invalid.")]
+ InvalidHeadRef {
+ source: gix_validate::refname::Error,
+ head_ref_name: BString,
+ },
+ #[error("Failed to update HEAD with values from remote")]
+ HeadUpdate(#[from] crate::reference::edit::Error),
+}
+
+/// Modification
+impl PrepareFetch {
+ /// Fetch a pack and update local branches according to refspecs, providing `progress` and checking `should_interrupt` to stop
+ /// the operation.
+ /// On success, the persisted repository is returned, and this method must not be called again to avoid a **panic**.
+ /// On error, the method may be called again to retry as often as needed.
+ ///
+ /// If the remote repository was empty, that is newly initialized, the returned repository will also be empty and like
+ /// it was newly initialized.
+ ///
+ /// Note that all data we created will be removed once this instance drops if the operation wasn't successful.
+ #[cfg(feature = "blocking-network-client")]
+ pub fn fetch_only<P>(
+ &mut self,
+ progress: P,
+ should_interrupt: &std::sync::atomic::AtomicBool,
+ ) -> Result<(Repository, crate::remote::fetch::Outcome), Error>
+ where
+ P: crate::Progress,
+ P::SubProgress: 'static,
+ {
+ use crate::{bstr::ByteVec, remote, remote::fetch::RefLogMessage};
+
+ let repo = self
+ .repo
+ .as_mut()
+ .expect("user error: multiple calls are allowed only until it succeeds");
+
+ let remote_name = match self.remote_name.as_ref() {
+ Some(name) => name.to_owned(),
+ None => repo
+ .config
+ .resolved
+ .string("clone", None, crate::config::tree::Clone::DEFAULT_REMOTE_NAME.name)
+ .map(|n| crate::config::tree::Clone::DEFAULT_REMOTE_NAME.try_into_symbolic_name(n))
+ .transpose()?
+ .unwrap_or_else(|| "origin".into()),
+ };
+
+ let mut remote = repo
+ .remote_at(self.url.clone())?
+ .with_refspecs(
+ Some(format!("+refs/heads/*:refs/remotes/{remote_name}/*").as_str()),
+ remote::Direction::Fetch,
+ )
+ .expect("valid static spec");
+ let mut clone_fetch_tags = None;
+ if let Some(f) = self.configure_remote.as_mut() {
+ remote = f(remote).map_err(|err| Error::RemoteConfiguration(err))?;
+ } else {
+ clone_fetch_tags = remote::fetch::Tags::All.into();
+ }
+
+ let config = util::write_remote_to_local_config_file(&mut remote, remote_name.clone())?;
+
+ // Now we are free to apply remote configuration we don't want to be written to disk.
+ if let Some(fetch_tags) = clone_fetch_tags {
+ remote = remote.with_fetch_tags(fetch_tags);
+ }
+
+ // Add HEAD after the remote was written to config, we need it to know what to checkout later, and assure
+ // the ref that HEAD points to is present no matter what.
+ let head_refspec = gix_refspec::parse(
+ format!("HEAD:refs/remotes/{remote_name}/HEAD").as_str().into(),
+ gix_refspec::parse::Operation::Fetch,
+ )
+ .expect("valid")
+ .to_owned();
+ let pending_pack: remote::fetch::Prepare<'_, '_, _, _> =
+ remote.connect(remote::Direction::Fetch, progress)?.prepare_fetch({
+ let mut opts = self.fetch_options.clone();
+ if !opts.extra_refspecs.contains(&head_refspec) {
+ opts.extra_refspecs.push(head_refspec)
+ }
+ opts
+ })?;
+ if pending_pack.ref_map().object_hash != repo.object_hash() {
+ unimplemented!("configure repository to expect a different object hash as advertised by the server")
+ }
+ let reflog_message = {
+ let mut b = self.url.to_bstring();
+ b.insert_str(0, "clone: from ");
+ b
+ };
+ let outcome = pending_pack
+ .with_write_packed_refs_only(true)
+ .with_reflog_message(RefLogMessage::Override {
+ message: reflog_message.clone(),
+ })
+ .receive(should_interrupt)?;
+
+ util::append_config_to_repo_config(repo, config);
+ util::update_head(
+ repo,
+ &outcome.ref_map.remote_refs,
+ reflog_message.as_ref(),
+ remote_name.as_ref(),
+ )?;
+
+ Ok((self.repo.take().expect("still present"), outcome))
+ }
+
+ /// Similar to [`fetch_only()`][Self::fetch_only()`], but passes ownership to a utility type to configure a checkout operation.
+ #[cfg(feature = "blocking-network-client")]
+ pub fn fetch_then_checkout<P>(
+ &mut self,
+ progress: P,
+ should_interrupt: &std::sync::atomic::AtomicBool,
+ ) -> Result<(crate::clone::PrepareCheckout, crate::remote::fetch::Outcome), Error>
+ where
+ P: crate::Progress,
+ P::SubProgress: 'static,
+ {
+ let (repo, fetch_outcome) = self.fetch_only(progress, should_interrupt)?;
+ Ok((crate::clone::PrepareCheckout { repo: repo.into() }, fetch_outcome))
+ }
+}
+
+/// Builder
+impl PrepareFetch {
+ /// Set additional options to adjust parts of the fetch operation that are not affected by the git configuration.
+ #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+ pub fn with_fetch_options(mut self, opts: crate::remote::ref_map::Options) -> Self {
+ self.fetch_options = opts;
+ self
+ }
+ /// Use `f` to apply arbitrary changes to the remote that is about to be used to fetch a pack.
+ ///
+ /// The passed in `remote` will be un-named and pre-configured to be a default remote as we know it from git-clone.
+ /// It is not yet present in the configuration of the repository,
+ /// but each change it will eventually be written to the configuration prior to performing a the fetch operation,
+ /// _all changes done in `f()` will be persisted_.
+ ///
+ /// It can also be used to configure additional options, like those for fetching tags. Note that
+ /// [with_fetch_tags()][crate::Remote::with_fetch_tags()] should be called here to configure the clone as desired.
+ /// Otherwise a clone is configured to be complete and fetches all tags, not only those reachable from all branches.
+ pub fn configure_remote(
+ mut self,
+ f: impl FnMut(crate::Remote<'_>) -> Result<crate::Remote<'_>, Box<dyn std::error::Error + Send + Sync>> + 'static,
+ ) -> Self {
+ self.configure_remote = Some(Box::new(f));
+ self
+ }
+
+ /// Set the remote's name to the given value after it was configured using the function provided via
+ /// [`configure_remote()`][Self::configure_remote()].
+ ///
+ /// If not set here, it defaults to `origin` or the value of `clone.defaultRemoteName`.
+ pub fn with_remote_name(mut self, name: impl Into<BString>) -> Result<Self, crate::remote::name::Error> {
+ self.remote_name = Some(crate::remote::name::validated(name)?);
+ Ok(self)
+ }
+}
+
+/// Consumption
+impl PrepareFetch {
+ /// Persist the contained repository as is even if an error may have occurred when fetching from the remote.
+ pub fn persist(mut self) -> Repository {
+ self.repo.take().expect("present and consumed once")
+ }
+}
+
+impl Drop for PrepareFetch {
+ fn drop(&mut self) {
+ if let Some(repo) = self.repo.take() {
+ std::fs::remove_dir_all(repo.work_dir().unwrap_or_else(|| repo.path())).ok();
+ }
+ }
+}
+
+impl From<PrepareFetch> for Repository {
+ fn from(prep: PrepareFetch) -> Self {
+ prep.persist()
+ }
+}
+
+#[cfg(feature = "blocking-network-client")]
+mod util;
diff --git a/vendor/gix/src/clone/fetch/util.rs b/vendor/gix/src/clone/fetch/util.rs
new file mode 100644
index 000000000..ac8943f6e
--- /dev/null
+++ b/vendor/gix/src/clone/fetch/util.rs
@@ -0,0 +1,229 @@
+use std::{borrow::Cow, convert::TryInto, io::Write};
+
+use gix_odb::Find;
+use gix_ref::{
+ transaction::{LogChange, RefLog},
+ FullNameRef,
+};
+
+use super::Error;
+use crate::{
+ bstr::{BStr, BString, ByteSlice},
+ Repository,
+};
+
+enum WriteMode {
+ Overwrite,
+ Append,
+}
+
+#[allow(clippy::result_large_err)]
+pub fn write_remote_to_local_config_file(
+ remote: &mut crate::Remote<'_>,
+ remote_name: BString,
+) -> Result<gix_config::File<'static>, Error> {
+ let mut config = gix_config::File::new(local_config_meta(remote.repo));
+ remote.save_as_to(remote_name, &mut config)?;
+
+ write_to_local_config(&config, WriteMode::Append)?;
+ Ok(config)
+}
+
+fn local_config_meta(repo: &Repository) -> gix_config::file::Metadata {
+ let meta = repo.config.resolved.meta().clone();
+ assert_eq!(
+ meta.source,
+ gix_config::Source::Local,
+ "local path is the default for new sections"
+ );
+ meta
+}
+
+fn write_to_local_config(config: &gix_config::File<'static>, mode: WriteMode) -> std::io::Result<()> {
+ assert_eq!(
+ config.meta().source,
+ gix_config::Source::Local,
+ "made for appending to local configuration file"
+ );
+ let mut local_config = std::fs::OpenOptions::new()
+ .create(false)
+ .write(matches!(mode, WriteMode::Overwrite))
+ .append(matches!(mode, WriteMode::Append))
+ .open(config.meta().path.as_deref().expect("local config with path set"))?;
+ local_config.write_all(config.detect_newline_style())?;
+ config.write_to_filter(&mut local_config, |s| s.meta().source == gix_config::Source::Local)
+}
+
+pub fn append_config_to_repo_config(repo: &mut Repository, config: gix_config::File<'static>) {
+ let repo_config = gix_features::threading::OwnShared::make_mut(&mut repo.config.resolved);
+ repo_config.append(config);
+}
+
+/// HEAD cannot be written by means of refspec by design, so we have to do it manually here. Also create the pointed-to ref
+/// if we have to, as it might not have been naturally included in the ref-specs.
+pub fn update_head(
+ repo: &mut Repository,
+ remote_refs: &[gix_protocol::handshake::Ref],
+ reflog_message: &BStr,
+ remote_name: &BStr,
+) -> Result<(), Error> {
+ use gix_ref::{
+ transaction::{PreviousValue, RefEdit},
+ Target,
+ };
+ let (head_peeled_id, head_ref) = match remote_refs.iter().find_map(|r| {
+ Some(match r {
+ gix_protocol::handshake::Ref::Symbolic {
+ full_ref_name,
+ target,
+ object,
+ } if full_ref_name == "HEAD" => (Some(object.as_ref()), Some(target)),
+ gix_protocol::handshake::Ref::Direct { full_ref_name, object } if full_ref_name == "HEAD" => {
+ (Some(object.as_ref()), None)
+ }
+ gix_protocol::handshake::Ref::Unborn { full_ref_name, target } if full_ref_name == "HEAD" => {
+ (None, Some(target))
+ }
+ _ => return None,
+ })
+ }) {
+ Some(t) => t,
+ None => return Ok(()),
+ };
+
+ let head: gix_ref::FullName = "HEAD".try_into().expect("valid");
+ let reflog_message = || LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: reflog_message.to_owned(),
+ };
+ match head_ref {
+ Some(referent) => {
+ let referent: gix_ref::FullName = referent.try_into().map_err(|err| Error::InvalidHeadRef {
+ head_ref_name: referent.to_owned(),
+ source: err,
+ })?;
+ repo.refs
+ .transaction()
+ .packed_refs(gix_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdates(
+ Box::new(|oid, buf| {
+ repo.objects
+ .try_find(oid, buf)
+ .map(|obj| obj.map(|obj| obj.kind))
+ .map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
+ }),
+ ))
+ .prepare(
+ {
+ let mut edits = vec![RefEdit {
+ change: gix_ref::transaction::Change::Update {
+ log: reflog_message(),
+ expected: PreviousValue::Any,
+ new: Target::Symbolic(referent.clone()),
+ },
+ name: head.clone(),
+ deref: false,
+ }];
+ if let Some(head_peeled_id) = head_peeled_id {
+ edits.push(RefEdit {
+ change: gix_ref::transaction::Change::Update {
+ log: reflog_message(),
+ expected: PreviousValue::Any,
+ new: Target::Peeled(head_peeled_id.to_owned()),
+ },
+ name: referent.clone(),
+ deref: false,
+ });
+ };
+ edits
+ },
+ gix_lock::acquire::Fail::Immediately,
+ gix_lock::acquire::Fail::Immediately,
+ )
+ .map_err(crate::reference::edit::Error::from)?
+ .commit(
+ repo.committer()
+ .transpose()
+ .map_err(|err| Error::HeadUpdate(crate::reference::edit::Error::ParseCommitterTime(err)))?,
+ )
+ .map_err(crate::reference::edit::Error::from)?;
+
+ if let Some(head_peeled_id) = head_peeled_id {
+ let mut log = reflog_message();
+ log.mode = RefLog::Only;
+ repo.edit_reference(RefEdit {
+ change: gix_ref::transaction::Change::Update {
+ log,
+ expected: PreviousValue::Any,
+ new: Target::Peeled(head_peeled_id.to_owned()),
+ },
+ name: head,
+ deref: false,
+ })?;
+ }
+
+ setup_branch_config(repo, referent.as_ref(), head_peeled_id, remote_name)?;
+ }
+ None => {
+ repo.edit_reference(RefEdit {
+ change: gix_ref::transaction::Change::Update {
+ log: reflog_message(),
+ expected: PreviousValue::Any,
+ new: Target::Peeled(
+ head_peeled_id
+ .expect("detached heads always point to something")
+ .to_owned(),
+ ),
+ },
+ name: head,
+ deref: false,
+ })?;
+ }
+ };
+ Ok(())
+}
+
+/// Setup the remote configuration for `branch` so that it points to itself, but on the remote, if and only if currently
+/// saved refspecs are able to match it.
+/// For that we reload the remote of `remote_name` and use its ref_specs for match.
+fn setup_branch_config(
+ repo: &mut Repository,
+ branch: &FullNameRef,
+ branch_id: Option<&gix_hash::oid>,
+ remote_name: &BStr,
+) -> Result<(), Error> {
+ let short_name = match branch.category_and_short_name() {
+ Some((cat, shortened)) if cat == gix_ref::Category::LocalBranch => match shortened.to_str() {
+ Ok(s) => s,
+ Err(_) => return Ok(()),
+ },
+ _ => return Ok(()),
+ };
+ let remote = repo
+ .find_remote(remote_name)
+ .expect("remote was just created and must be visible in config");
+ let group = gix_refspec::MatchGroup::from_fetch_specs(remote.fetch_specs.iter().map(|s| s.to_ref()));
+ let null = gix_hash::ObjectId::null(repo.object_hash());
+ let res = group.match_remotes(
+ Some(gix_refspec::match_group::Item {
+ full_ref_name: branch.as_bstr(),
+ target: branch_id.unwrap_or(&null),
+ object: None,
+ })
+ .into_iter(),
+ );
+ if !res.mappings.is_empty() {
+ let mut config = repo.config_snapshot_mut();
+ let mut section = config
+ .new_section("branch", Some(Cow::Owned(short_name.into())))
+ .expect("section header name is always valid per naming rules, our input branch name is valid");
+ section.push("remote".try_into().expect("valid at compile time"), Some(remote_name));
+ section.push(
+ "merge".try_into().expect("valid at compile time"),
+ Some(branch.as_bstr()),
+ );
+ write_to_local_config(&config, WriteMode::Overwrite)?;
+ config.commit().expect("configuration we set is valid");
+ }
+ Ok(())
+}
diff --git a/vendor/gix/src/clone/mod.rs b/vendor/gix/src/clone/mod.rs
new file mode 100644
index 000000000..249a66a42
--- /dev/null
+++ b/vendor/gix/src/clone/mod.rs
@@ -0,0 +1,118 @@
+#![allow(clippy::result_large_err)]
+use std::convert::TryInto;
+
+use crate::{bstr::BString, config::tree::gitoxide};
+
+type ConfigureRemoteFn =
+ Box<dyn FnMut(crate::Remote<'_>) -> Result<crate::Remote<'_>, Box<dyn std::error::Error + Send + Sync>>>;
+
+/// A utility to collect configuration on how to fetch from a remote and initiate a fetch operation. It will delete the newly
+/// created repository on when dropped without successfully finishing a fetch.
+#[must_use]
+pub struct PrepareFetch {
+ /// A freshly initialized repository which is owned by us, or `None` if it was handed to the user
+ repo: Option<crate::Repository>,
+ /// The name of the remote, which defaults to `origin` if not overridden.
+ remote_name: Option<BString>,
+ /// A function to configure a remote prior to fetching a pack.
+ configure_remote: Option<ConfigureRemoteFn>,
+ /// Options for preparing a fetch operation.
+ #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+ fetch_options: crate::remote::ref_map::Options,
+ /// The url to clone from
+ #[cfg_attr(not(feature = "blocking-network-client"), allow(dead_code))]
+ url: gix_url::Url,
+}
+
+/// The error returned by [`PrepareFetch::new()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error(transparent)]
+ Init(#[from] crate::init::Error),
+ #[error(transparent)]
+ UrlParse(#[from] gix_url::parse::Error),
+ #[error("Failed to turn a the relative file url \"{}\" into an absolute one", url.to_bstring())]
+ CanonicalizeUrl {
+ url: gix_url::Url,
+ source: gix_path::realpath::Error,
+ },
+}
+
+/// Instantiation
+impl PrepareFetch {
+ /// Create a new repository at `path` with `crate_opts` which is ready to clone from `url`, possibly after making additional adjustments to
+ /// configuration and settings.
+ ///
+ /// Note that this is merely a handle to perform the actual connection to the remote, and if any of it fails the freshly initialized repository
+ /// will be removed automatically as soon as this instance drops.
+ ///
+ /// # Deviation
+ ///
+ /// Similar to `git`, a missing user name and email configuration is not terminal and we will fill it in with dummy values. However,
+ /// instead of deriving values from the system, ours are hardcoded to indicate what happened.
+ #[allow(clippy::result_large_err)]
+ pub fn new<Url, E>(
+ url: Url,
+ path: impl AsRef<std::path::Path>,
+ kind: crate::create::Kind,
+ mut create_opts: crate::create::Options,
+ open_opts: crate::open::Options,
+ ) -> Result<Self, Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ let mut url = url.try_into().map_err(gix_url::parse::Error::from)?;
+ url.canonicalize().map_err(|err| Error::CanonicalizeUrl {
+ url: url.clone(),
+ source: err,
+ })?;
+ create_opts.destination_must_be_empty = true;
+ let mut repo = crate::ThreadSafeRepository::init_opts(path, kind, create_opts, open_opts)?.to_thread_local();
+ if repo.committer().is_none() {
+ let mut config = gix_config::File::new(gix_config::file::Metadata::api());
+ config
+ .set_raw_value(
+ "gitoxide",
+ Some("committer".into()),
+ gitoxide::Committer::NAME_FALLBACK.name,
+ "no name configured during clone",
+ )
+ .expect("works - statically known");
+ config
+ .set_raw_value(
+ "gitoxide",
+ Some("committer".into()),
+ gitoxide::Committer::EMAIL_FALLBACK.name,
+ "noEmailAvailable@example.com",
+ )
+ .expect("works - statically known");
+ let mut repo_config = repo.config_snapshot_mut();
+ repo_config.append(config);
+ repo_config.commit().expect("configuration is still valid");
+ }
+ Ok(PrepareFetch {
+ url,
+ #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+ fetch_options: Default::default(),
+ repo: Some(repo),
+ remote_name: None,
+ configure_remote: None,
+ })
+ }
+}
+
+/// A utility to collect configuration on how to perform a checkout into a working tree, and when dropped without checking out successfully
+/// the fetched repository will be dropped.
+#[must_use]
+pub struct PrepareCheckout {
+ /// A freshly initialized repository which is owned by us, or `None` if it was handed to the user
+ pub(self) repo: Option<crate::Repository>,
+}
+
+///
+pub mod fetch;
+
+///
+pub mod checkout;
diff --git a/vendor/gix/src/commit.rs b/vendor/gix/src/commit.rs
new file mode 100644
index 000000000..10fa6f675
--- /dev/null
+++ b/vendor/gix/src/commit.rs
@@ -0,0 +1,238 @@
+//!
+
+/// An empty array of a type usable with the `gix::easy` API to help declaring no parents should be used
+pub const NO_PARENT_IDS: [gix_hash::ObjectId; 0] = [];
+
+/// The error returned by [`commit(…)`][crate::Repository::commit()].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error(transparent)]
+ ParseTime(#[from] crate::config::time::Error),
+ #[error("Committer identity is not configured")]
+ CommitterMissing,
+ #[error("Author identity is not configured")]
+ AuthorMissing,
+ #[error(transparent)]
+ ReferenceNameValidation(#[from] gix_ref::name::Error),
+ #[error(transparent)]
+ WriteObject(#[from] crate::object::write::Error),
+ #[error(transparent)]
+ ReferenceEdit(#[from] crate::reference::edit::Error),
+}
+
+///
+pub mod describe {
+ use std::borrow::Cow;
+
+ use gix_hash::ObjectId;
+ use gix_hashtable::HashMap;
+ use gix_odb::Find;
+
+ use crate::{bstr::BStr, ext::ObjectIdExt, Repository};
+
+ /// The result of [try_resolve()][Platform::try_resolve()].
+ pub struct Resolution<'repo> {
+ /// The outcome of the describe operation.
+ pub outcome: gix_revision::describe::Outcome<'static>,
+ /// The id to describe.
+ pub id: crate::Id<'repo>,
+ }
+
+ impl<'repo> Resolution<'repo> {
+ /// Turn this instance into something displayable
+ pub fn format(self) -> Result<gix_revision::describe::Format<'static>, Error> {
+ let prefix = self.id.shorten()?;
+ Ok(self.outcome.into_format(prefix.hex_len()))
+ }
+ }
+
+ /// The error returned by [try_format()][Platform::try_format()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Describe(#[from] gix_revision::describe::Error<gix_odb::store::find::Error>),
+ #[error("Could not produce an unambiguous shortened id for formatting.")]
+ ShortId(#[from] crate::id::shorten::Error),
+ #[error(transparent)]
+ RefIter(#[from] crate::reference::iter::Error),
+ #[error(transparent)]
+ RefIterInit(#[from] crate::reference::iter::init::Error),
+ }
+
+ /// A selector to choose what kind of references should contribute to names.
+ #[derive(Debug, Clone, Copy, PartialOrd, PartialEq, Ord, Eq, Hash)]
+ pub enum SelectRef {
+ /// Only use annotated tags for names.
+ AnnotatedTags,
+ /// Use all tags for names, annotated or plain reference.
+ AllTags,
+ /// Use all references, including local branch names.
+ AllRefs,
+ }
+
+ impl SelectRef {
+ fn names(&self, repo: &Repository) -> Result<HashMap<ObjectId, Cow<'static, BStr>>, Error> {
+ let platform = repo.references()?;
+
+ Ok(match self {
+ SelectRef::AllTags | SelectRef::AllRefs => {
+ let mut refs: Vec<_> = match self {
+ SelectRef::AllRefs => platform.all()?,
+ SelectRef::AllTags => platform.tags()?,
+ _ => unreachable!(),
+ }
+ .filter_map(Result::ok)
+ .filter_map(|mut r: crate::Reference<'_>| {
+ let target_id = r.target().try_id().map(ToOwned::to_owned);
+ let peeled_id = r.peel_to_id_in_place().ok()?;
+ let (prio, tag_time) = match target_id {
+ Some(target_id) if peeled_id != *target_id => {
+ let tag = repo.find_object(target_id).ok()?.try_into_tag().ok()?;
+ (1, tag.tagger().ok()??.time.seconds_since_unix_epoch)
+ }
+ _ => (0, 0),
+ };
+ (
+ peeled_id.inner,
+ prio,
+ tag_time,
+ Cow::from(r.inner.name.shorten().to_owned()),
+ )
+ .into()
+ })
+ .collect();
+ // By priority, then by time ascending, then lexicographically.
+ // More recent entries overwrite older ones due to collection into hashmap.
+ refs.sort_by(
+ |(_a_peeled_id, a_prio, a_time, a_name), (_b_peeled_id, b_prio, b_time, b_name)| {
+ a_prio
+ .cmp(b_prio)
+ .then_with(|| a_time.cmp(b_time))
+ .then_with(|| b_name.cmp(a_name))
+ },
+ );
+ refs.into_iter().map(|(a, _, _, b)| (a, b)).collect()
+ }
+ SelectRef::AnnotatedTags => {
+ let mut peeled_commits_and_tag_date: Vec<_> = platform
+ .tags()?
+ .filter_map(Result::ok)
+ .filter_map(|r: crate::Reference<'_>| {
+ // TODO: we assume direct refs for tags, which is the common case, but it doesn't have to be
+ // so rather follow symrefs till the first object and then peel tags after the first object was found.
+ let tag = r.try_id()?.object().ok()?.try_into_tag().ok()?;
+ let tag_time = tag
+ .tagger()
+ .ok()
+ .and_then(|s| s.map(|s| s.time.seconds_since_unix_epoch))
+ .unwrap_or(0);
+ let commit_id = tag.target_id().ok()?.object().ok()?.try_into_commit().ok()?.id;
+ Some((commit_id, tag_time, Cow::<BStr>::from(r.name().shorten().to_owned())))
+ })
+ .collect();
+ // Sort by time ascending, then lexicographically.
+ // More recent entries overwrite older ones due to collection into hashmap.
+ peeled_commits_and_tag_date.sort_by(|(_a_id, a_time, a_name), (_b_id, b_time, b_name)| {
+ a_time.cmp(b_time).then_with(|| b_name.cmp(a_name))
+ });
+ peeled_commits_and_tag_date
+ .into_iter()
+ .map(|(a, _, c)| (a, c))
+ .collect()
+ }
+ })
+ }
+ }
+
+ impl Default for SelectRef {
+ fn default() -> Self {
+ SelectRef::AnnotatedTags
+ }
+ }
+
+ /// A support type to allow configuring a `git describe` operation
+ pub struct Platform<'repo> {
+ pub(crate) id: gix_hash::ObjectId,
+ pub(crate) repo: &'repo crate::Repository,
+ pub(crate) select: SelectRef,
+ pub(crate) first_parent: bool,
+ pub(crate) id_as_fallback: bool,
+ pub(crate) max_candidates: usize,
+ }
+
+ impl<'repo> Platform<'repo> {
+ /// Configure which names to `select` from which describe can chose.
+ pub fn names(mut self, select: SelectRef) -> Self {
+ self.select = select;
+ self
+ }
+
+ /// If true, shorten the graph traversal time by just traversing the first parent of merge commits.
+ pub fn traverse_first_parent(mut self, first_parent: bool) -> Self {
+ self.first_parent = first_parent;
+ self
+ }
+
+ /// Only consider the given amount of candidates, instead of the default of 10.
+ pub fn max_candidates(mut self, candidates: usize) -> Self {
+ self.max_candidates = candidates;
+ self
+ }
+
+ /// If true, even if no candidate is available a format will always be produced.
+ pub fn id_as_fallback(mut self, use_fallback: bool) -> Self {
+ self.id_as_fallback = use_fallback;
+ self
+ }
+
+ /// Try to find a name for the configured commit id using all prior configuration, returning `Some(describe::Format)`
+ /// if one was found.
+ ///
+ /// Note that there will always be `Some(format)`
+ pub fn try_format(&self) -> Result<Option<gix_revision::describe::Format<'static>>, Error> {
+ self.try_resolve()?.map(|r| r.format()).transpose()
+ }
+
+ /// Try to find a name for the configured commit id using all prior configuration, returning `Some(Outcome)`
+ /// if one was found.
+ ///
+ /// The outcome provides additional information, but leaves the caller with the burden
+ ///
+ /// # Performance
+ ///
+ /// It is greatly recommended to [assure an object cache is set][crate::Repository::object_cache_size_if_unset()]
+ /// to save ~40% of time.
+ pub fn try_resolve(&self) -> Result<Option<Resolution<'repo>>, Error> {
+ // TODO: dirty suffix with respective dirty-detection
+ let outcome = gix_revision::describe(
+ &self.id,
+ |id, buf| {
+ Ok(self
+ .repo
+ .objects
+ .try_find(id, buf)?
+ .and_then(|d| d.try_into_commit_iter()))
+ },
+ gix_revision::describe::Options {
+ name_by_oid: self.select.names(self.repo)?,
+ fallback_to_oid: self.id_as_fallback,
+ first_parent: self.first_parent,
+ max_candidates: self.max_candidates,
+ },
+ )?;
+
+ Ok(outcome.map(|outcome| crate::commit::describe::Resolution {
+ outcome,
+ id: self.id.attach(self.repo),
+ }))
+ }
+
+ /// Like [`try_format()`][Platform::try_format()], but turns `id_as_fallback()` on to always produce a format.
+ pub fn format(&mut self) -> Result<gix_revision::describe::Format<'static>, Error> {
+ self.id_as_fallback = true;
+ Ok(self.try_format()?.expect("BUG: fallback must always produce a format"))
+ }
+ }
+}
diff --git a/vendor/gix/src/config/cache/access.rs b/vendor/gix/src/config/cache/access.rs
new file mode 100644
index 000000000..8244eaf27
--- /dev/null
+++ b/vendor/gix/src/config/cache/access.rs
@@ -0,0 +1,233 @@
+#![allow(clippy::result_large_err)]
+use std::{borrow::Cow, path::PathBuf, time::Duration};
+
+use gix_lock::acquire::Fail;
+
+use crate::{
+ bstr::BStr,
+ config,
+ config::{
+ cache::util::{ApplyLeniency, ApplyLeniencyDefault},
+ checkout_options,
+ tree::{Checkout, Core, Key},
+ Cache,
+ },
+ remote,
+ repository::identity,
+};
+
+/// Access
+impl Cache {
+ pub(crate) fn diff_algorithm(&self) -> Result<gix_diff::blob::Algorithm, config::diff::algorithm::Error> {
+ use crate::config::diff::algorithm::Error;
+ self.diff_algorithm
+ .get_or_try_init(|| {
+ let name = self
+ .resolved
+ .string("diff", None, "algorithm")
+ .unwrap_or_else(|| Cow::Borrowed("myers".into()));
+ config::tree::Diff::ALGORITHM
+ .try_into_algorithm(name)
+ .or_else(|err| match err {
+ Error::Unimplemented { .. } if self.lenient_config => Ok(gix_diff::blob::Algorithm::Histogram),
+ err => Err(err),
+ })
+ .with_lenient_default(self.lenient_config)
+ })
+ .copied()
+ }
+
+ /// Returns a user agent for use with servers.
+ #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+ pub(crate) fn user_agent_tuple(&self) -> (&'static str, Option<Cow<'static, str>>) {
+ use config::tree::Gitoxide;
+ let agent = self
+ .user_agent
+ .get_or_init(|| {
+ self.resolved
+ .string_by_key(Gitoxide::USER_AGENT.logical_name().as_str())
+ .map(|s| s.to_string())
+ .unwrap_or_else(|| crate::env::agent().into())
+ })
+ .to_owned();
+ ("agent", Some(gix_protocol::agent(agent).into()))
+ }
+
+ pub(crate) fn personas(&self) -> &identity::Personas {
+ self.personas
+ .get_or_init(|| identity::Personas::from_config_and_env(&self.resolved))
+ }
+
+ pub(crate) fn url_rewrite(&self) -> &remote::url::Rewrite {
+ self.url_rewrite
+ .get_or_init(|| remote::url::Rewrite::from_config(&self.resolved, self.filter_config_section))
+ }
+
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ pub(crate) fn url_scheme(&self) -> Result<&remote::url::SchemePermission, config::protocol::allow::Error> {
+ self.url_scheme
+ .get_or_try_init(|| remote::url::SchemePermission::from_config(&self.resolved, self.filter_config_section))
+ }
+
+ pub(crate) fn diff_renames(
+ &self,
+ ) -> Result<Option<crate::object::tree::diff::Rewrites>, crate::object::tree::diff::rewrites::Error> {
+ self.diff_renames
+ .get_or_try_init(|| {
+ crate::object::tree::diff::Rewrites::try_from_config(&self.resolved, self.lenient_config)
+ })
+ .copied()
+ }
+
+ /// Returns (file-timeout, pack-refs timeout)
+ pub(crate) fn lock_timeout(
+ &self,
+ ) -> Result<(gix_lock::acquire::Fail, gix_lock::acquire::Fail), config::lock_timeout::Error> {
+ let mut out: [gix_lock::acquire::Fail; 2] = Default::default();
+ for (idx, (key, default_ms)) in [(&Core::FILES_REF_LOCK_TIMEOUT, 100), (&Core::PACKED_REFS_TIMEOUT, 1000)]
+ .into_iter()
+ .enumerate()
+ {
+ out[idx] = self
+ .resolved
+ .integer_filter("core", None, key.name, &mut self.filter_config_section.clone())
+ .map(|res| key.try_into_lock_timeout(res))
+ .transpose()
+ .with_leniency(self.lenient_config)?
+ .unwrap_or_else(|| Fail::AfterDurationWithBackoff(Duration::from_millis(default_ms)));
+ }
+ Ok((out[0], out[1]))
+ }
+
+ /// The path to the user-level excludes file to ignore certain files in the worktree.
+ pub(crate) fn excludes_file(&self) -> Option<Result<PathBuf, gix_config::path::interpolate::Error>> {
+ self.trusted_file_path("core", None, Core::EXCLUDES_FILE.name)?
+ .map(|p| p.into_owned())
+ .into()
+ }
+
+ /// A helper to obtain a file from trusted configuration at `section_name`, `subsection_name`, and `key`, which is interpolated
+ /// if present.
+ pub(crate) fn trusted_file_path(
+ &self,
+ section_name: impl AsRef<str>,
+ subsection_name: Option<&BStr>,
+ key: impl AsRef<str>,
+ ) -> Option<Result<Cow<'_, std::path::Path>, gix_config::path::interpolate::Error>> {
+ let path = self.resolved.path_filter(
+ section_name,
+ subsection_name,
+ key,
+ &mut self.filter_config_section.clone(),
+ )?;
+
+ let install_dir = crate::path::install_dir().ok();
+ let home = self.home_dir();
+ let ctx = crate::config::cache::interpolate_context(install_dir.as_deref(), home.as_deref());
+ Some(path.interpolate(ctx))
+ }
+
+ pub(crate) fn apply_leniency<T, E>(&self, res: Option<Result<T, E>>) -> Result<Option<T>, E> {
+ res.transpose().with_leniency(self.lenient_config)
+ }
+
+ /// Collect everything needed to checkout files into a worktree.
+ /// Note that some of the options being returned will be defaulted so safe settings, the caller might have to override them
+ /// depending on the use-case.
+ pub(crate) fn checkout_options(
+ &self,
+ git_dir: &std::path::Path,
+ ) -> Result<gix_worktree::index::checkout::Options, checkout_options::Error> {
+ fn boolean(
+ me: &Cache,
+ full_key: &str,
+ key: &'static config::tree::keys::Boolean,
+ default: bool,
+ ) -> Result<bool, checkout_options::Error> {
+ debug_assert_eq!(
+ full_key,
+ key.logical_name(),
+ "BUG: key name and hardcoded name must match"
+ );
+ Ok(me
+ .apply_leniency(me.resolved.boolean_by_key(full_key).map(|v| key.enrich_error(v)))?
+ .unwrap_or(default))
+ }
+
+ fn assemble_attribute_globals(
+ me: &Cache,
+ _git_dir: &std::path::Path,
+ ) -> Result<gix_attributes::MatchGroup, checkout_options::Error> {
+ let _attributes_file = match me
+ .trusted_file_path("core", None, Core::ATTRIBUTES_FILE.name)
+ .transpose()?
+ {
+ Some(attributes) => Some(attributes.into_owned()),
+ None => me.xdg_config_path("attributes").ok().flatten(),
+ };
+ // TODO: implement gix_attributes::MatchGroup::<gix_attributes::Attributes>::from_git_dir(), similar to what's done for `Ignore`.
+ Ok(Default::default())
+ }
+
+ let thread_limit = self.apply_leniency(
+ self.resolved
+ .integer_filter_by_key("checkout.workers", &mut self.filter_config_section.clone())
+ .map(|value| Checkout::WORKERS.try_from_workers(value)),
+ )?;
+ Ok(gix_worktree::index::checkout::Options {
+ fs: gix_worktree::fs::Capabilities {
+ precompose_unicode: boolean(self, "core.precomposeUnicode", &Core::PRECOMPOSE_UNICODE, false)?,
+ ignore_case: boolean(self, "core.ignoreCase", &Core::IGNORE_CASE, false)?,
+ executable_bit: boolean(self, "core.fileMode", &Core::FILE_MODE, true)?,
+ symlink: boolean(self, "core.symlinks", &Core::SYMLINKS, true)?,
+ },
+ thread_limit,
+ destination_is_initially_empty: false,
+ overwrite_existing: false,
+ keep_going: false,
+ trust_ctime: boolean(self, "core.trustCTime", &Core::TRUST_C_TIME, true)?,
+ check_stat: self
+ .apply_leniency(
+ self.resolved
+ .string("core", None, "checkStat")
+ .map(|v| Core::CHECK_STAT.try_into_checkstat(v)),
+ )?
+ .unwrap_or(true),
+ attribute_globals: assemble_attribute_globals(self, git_dir)?,
+ })
+ }
+ pub(crate) fn xdg_config_path(
+ &self,
+ resource_file_name: &str,
+ ) -> Result<Option<PathBuf>, gix_sec::permission::Error<PathBuf>> {
+ std::env::var_os("XDG_CONFIG_HOME")
+ .map(|path| (PathBuf::from(path), &self.xdg_config_home_env))
+ .or_else(|| {
+ std::env::var_os("HOME").map(|path| {
+ (
+ {
+ let mut p = PathBuf::from(path);
+ p.push(".config");
+ p
+ },
+ &self.home_env,
+ )
+ })
+ })
+ .and_then(|(base, permission)| {
+ let resource = base.join("git").join(resource_file_name);
+ permission.check(resource).transpose()
+ })
+ .transpose()
+ }
+
+ /// Return the home directory if we are allowed to read it and if it is set in the environment.
+ ///
+ /// We never fail for here even if the permission is set to deny as we `gix-config` will fail later
+ /// if it actually wants to use the home directory - we don't want to fail prematurely.
+ pub(crate) fn home_dir(&self) -> Option<PathBuf> {
+ std::env::var_os("HOME")
+ .map(PathBuf::from)
+ .and_then(|path| self.home_env.check_opt(path))
+ }
+}
diff --git a/vendor/gix/src/config/cache/incubate.rs b/vendor/gix/src/config/cache/incubate.rs
new file mode 100644
index 000000000..047f2132b
--- /dev/null
+++ b/vendor/gix/src/config/cache/incubate.rs
@@ -0,0 +1,111 @@
+#![allow(clippy::result_large_err)]
+use super::{util, Error};
+use crate::config::tree::{Core, Extensions};
+
+/// A utility to deal with the cyclic dependency between the ref store and the configuration. The ref-store needs the
+/// object hash kind, and the configuration needs the current branch name to resolve conditional includes with `onbranch`.
+pub(crate) struct StageOne {
+ pub git_dir_config: gix_config::File<'static>,
+ pub buf: Vec<u8>,
+
+ pub is_bare: bool,
+ pub lossy: Option<bool>,
+ pub object_hash: gix_hash::Kind,
+ pub reflog: Option<gix_ref::store::WriteReflog>,
+}
+
+/// Initialization
+impl StageOne {
+ pub fn new(
+ common_dir: &std::path::Path,
+ git_dir: &std::path::Path,
+ git_dir_trust: gix_sec::Trust,
+ lossy: Option<bool>,
+ lenient: bool,
+ ) -> Result<Self, Error> {
+ let mut buf = Vec::with_capacity(512);
+ let mut config = load_config(
+ common_dir.join("config"),
+ &mut buf,
+ gix_config::Source::Local,
+ git_dir_trust,
+ lossy,
+ )?;
+
+ // Note that we assume the repo is bare by default unless we are told otherwise. This is relevant if
+ // the repo doesn't have a configuration file.
+ let is_bare = util::config_bool(&config, &Core::BARE, "core.bare", true, lenient)?;
+ let repo_format_version = config
+ .integer_by_key("core.repositoryFormatVersion")
+ .map(|version| Core::REPOSITORY_FORMAT_VERSION.try_into_usize(version))
+ .transpose()?
+ .unwrap_or_default();
+ let object_hash = (repo_format_version != 1)
+ .then_some(Ok(gix_hash::Kind::Sha1))
+ .or_else(|| {
+ config
+ .string("extensions", None, "objectFormat")
+ .map(|format| Extensions::OBJECT_FORMAT.try_into_object_format(format))
+ })
+ .transpose()?
+ .unwrap_or(gix_hash::Kind::Sha1);
+
+ let extension_worktree = util::config_bool(
+ &config,
+ &Extensions::WORKTREE_CONFIG,
+ "extensions.worktreeConfig",
+ false,
+ lenient,
+ )?;
+ if extension_worktree {
+ let worktree_config = load_config(
+ git_dir.join("config.worktree"),
+ &mut buf,
+ gix_config::Source::Worktree,
+ git_dir_trust,
+ lossy,
+ )?;
+ config.append(worktree_config);
+ };
+
+ let reflog = util::query_refupdates(&config, lenient)?;
+ Ok(StageOne {
+ git_dir_config: config,
+ buf,
+ is_bare,
+ lossy,
+ object_hash,
+ reflog,
+ })
+ }
+}
+
+fn load_config(
+ config_path: std::path::PathBuf,
+ buf: &mut Vec<u8>,
+ source: gix_config::Source,
+ git_dir_trust: gix_sec::Trust,
+ lossy: Option<bool>,
+) -> Result<gix_config::File<'static>, Error> {
+ buf.clear();
+ let metadata = gix_config::file::Metadata::from(source)
+ .at(&config_path)
+ .with(git_dir_trust);
+ let mut file = match std::fs::File::open(&config_path) {
+ Ok(f) => f,
+ Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(gix_config::File::new(metadata)),
+ Err(err) => return Err(err.into()),
+ };
+ std::io::copy(&mut file, buf)?;
+
+ let config = gix_config::File::from_bytes_owned(
+ buf,
+ metadata,
+ gix_config::file::init::Options {
+ includes: gix_config::file::includes::Options::no_follow(),
+ ..util::base_options(lossy)
+ },
+ )?;
+
+ Ok(config)
+}
diff --git a/vendor/gix/src/config/cache/init.rs b/vendor/gix/src/config/cache/init.rs
new file mode 100644
index 000000000..dc76f78bb
--- /dev/null
+++ b/vendor/gix/src/config/cache/init.rs
@@ -0,0 +1,485 @@
+#![allow(clippy::result_large_err)]
+use std::borrow::Cow;
+
+use gix_sec::Permission;
+
+use super::{interpolate_context, util, Error, StageOne};
+use crate::{
+ bstr::BString,
+ config,
+ config::{
+ cache::util::ApplyLeniency,
+ tree::{gitoxide, Core, Http},
+ Cache,
+ },
+ repository,
+};
+
+/// Initialization
+impl Cache {
+ #[allow(clippy::too_many_arguments)]
+ pub fn from_stage_one(
+ StageOne {
+ git_dir_config,
+ mut buf,
+ lossy,
+ is_bare,
+ object_hash,
+ reflog: _,
+ }: StageOne,
+ git_dir: &std::path::Path,
+ branch_name: Option<&gix_ref::FullNameRef>,
+ filter_config_section: fn(&gix_config::file::Metadata) -> bool,
+ git_install_dir: Option<&std::path::Path>,
+ home: Option<&std::path::Path>,
+ repository::permissions::Environment {
+ git_prefix,
+ home: home_env,
+ xdg_config_home: xdg_config_home_env,
+ ssh_prefix: _,
+ http_transport,
+ identity,
+ objects,
+ }: repository::permissions::Environment,
+ repository::permissions::Config {
+ git_binary: use_installation,
+ system: use_system,
+ git: use_git,
+ user: use_user,
+ env: use_env,
+ includes: use_includes,
+ }: repository::permissions::Config,
+ lenient_config: bool,
+ api_config_overrides: &[BString],
+ cli_config_overrides: &[BString],
+ ) -> Result<Self, Error> {
+ let options = gix_config::file::init::Options {
+ includes: if use_includes {
+ gix_config::file::includes::Options::follow(
+ interpolate_context(git_install_dir, home),
+ gix_config::file::includes::conditional::Context {
+ git_dir: git_dir.into(),
+ branch_name,
+ },
+ )
+ } else {
+ gix_config::file::includes::Options::no_follow()
+ },
+ ..util::base_options(lossy)
+ };
+
+ let config = {
+ let home_env = &home_env;
+ let xdg_config_home_env = &xdg_config_home_env;
+ let git_prefix = &git_prefix;
+ let metas = [
+ gix_config::source::Kind::GitInstallation,
+ gix_config::source::Kind::System,
+ gix_config::source::Kind::Global,
+ ]
+ .iter()
+ .flat_map(|kind| kind.sources())
+ .filter_map(|source| {
+ match source {
+ gix_config::Source::GitInstallation if !use_installation => return None,
+ gix_config::Source::System if !use_system => return None,
+ gix_config::Source::Git if !use_git => return None,
+ gix_config::Source::User if !use_user => return None,
+ _ => {}
+ }
+ source
+ .storage_location(&mut |name| {
+ match name {
+ git_ if git_.starts_with("GIT_") => Some(git_prefix),
+ "XDG_CONFIG_HOME" => Some(xdg_config_home_env),
+ "HOME" => Some(home_env),
+ _ => None,
+ }
+ .and_then(|perm| perm.check_opt(name).and_then(std::env::var_os))
+ })
+ .map(|p| (source, p.into_owned()))
+ })
+ .map(|(source, path)| gix_config::file::Metadata {
+ path: Some(path),
+ source: *source,
+ level: 0,
+ trust: gix_sec::Trust::Full,
+ });
+
+ let err_on_nonexisting_paths = false;
+ let mut globals = gix_config::File::from_paths_metadata_buf(
+ metas,
+ &mut buf,
+ err_on_nonexisting_paths,
+ gix_config::file::init::Options {
+ includes: gix_config::file::includes::Options::no_follow(),
+ ..options
+ },
+ )
+ .map_err(|err| match err {
+ gix_config::file::init::from_paths::Error::Init(err) => Error::from(err),
+ gix_config::file::init::from_paths::Error::Io(err) => err.into(),
+ })?
+ .unwrap_or_default();
+
+ let local_meta = git_dir_config.meta_owned();
+ globals.append(git_dir_config);
+ globals.resolve_includes(options)?;
+ if use_env {
+ globals.append(gix_config::File::from_env(options)?.unwrap_or_default());
+ }
+ if !cli_config_overrides.is_empty() {
+ config::overrides::append(&mut globals, cli_config_overrides, gix_config::Source::Cli, |_| None)
+ .map_err(|err| Error::ConfigOverrides {
+ err,
+ source: gix_config::Source::Cli,
+ })?;
+ }
+ if !api_config_overrides.is_empty() {
+ config::overrides::append(&mut globals, api_config_overrides, gix_config::Source::Api, |_| None)
+ .map_err(|err| Error::ConfigOverrides {
+ err,
+ source: gix_config::Source::Api,
+ })?;
+ }
+ apply_environment_overrides(&mut globals, *git_prefix, http_transport, identity, objects)?;
+ globals.set_meta(local_meta);
+ globals
+ };
+
+ let hex_len = util::parse_core_abbrev(&config, object_hash).with_leniency(lenient_config)?;
+
+ use util::config_bool;
+ let reflog = util::query_refupdates(&config, lenient_config)?;
+ let ignore_case = config_bool(&config, &Core::IGNORE_CASE, "core.ignoreCase", false, lenient_config)?;
+ let use_multi_pack_index = config_bool(
+ &config,
+ &Core::MULTIPACK_INDEX,
+ "core.multiPackIndex",
+ true,
+ lenient_config,
+ )?;
+ let object_kind_hint = util::disambiguate_hint(&config, lenient_config)?;
+ let (pack_cache_bytes, object_cache_bytes) =
+ util::parse_object_caches(&config, lenient_config, filter_config_section)?;
+ // NOTE: When adding a new initial cache, consider adjusting `reread_values_and_clear_caches()` as well.
+ Ok(Cache {
+ resolved: config.into(),
+ use_multi_pack_index,
+ object_hash,
+ object_kind_hint,
+ pack_cache_bytes,
+ object_cache_bytes,
+ reflog,
+ is_bare,
+ ignore_case,
+ hex_len,
+ filter_config_section,
+ xdg_config_home_env,
+ home_env,
+ lenient_config,
+ user_agent: Default::default(),
+ personas: Default::default(),
+ url_rewrite: Default::default(),
+ diff_renames: Default::default(),
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ url_scheme: Default::default(),
+ diff_algorithm: Default::default(),
+ })
+ }
+
+ /// Call this with new `config` to update values and clear caches. Note that none of the values will be applied if a single
+ /// one is invalid.
+ /// However, those that are lazily read won't be re-evaluated right away and might thus pass now but fail later.
+ ///
+ /// Note that we unconditionally re-read all values.
+ pub fn reread_values_and_clear_caches_replacing_config(&mut self, config: crate::Config) -> Result<(), Error> {
+ let prev = std::mem::replace(&mut self.resolved, config);
+ match self.reread_values_and_clear_caches() {
+ Err(err) => {
+ drop(std::mem::replace(&mut self.resolved, prev));
+ Err(err)
+ }
+ Ok(()) => Ok(()),
+ }
+ }
+
+ /// Similar to `reread_values_and_clear_caches_replacing_config()`, but works on the existing configuration instead of a passed
+ /// in one that it them makes the default.
+ pub fn reread_values_and_clear_caches(&mut self) -> Result<(), Error> {
+ let config = &self.resolved;
+ let hex_len = util::parse_core_abbrev(config, self.object_hash).with_leniency(self.lenient_config)?;
+
+ use util::config_bool;
+ let ignore_case = config_bool(
+ config,
+ &Core::IGNORE_CASE,
+ "core.ignoreCase",
+ false,
+ self.lenient_config,
+ )?;
+ let object_kind_hint = util::disambiguate_hint(config, self.lenient_config)?;
+ let reflog = util::query_refupdates(config, self.lenient_config)?;
+
+ self.hex_len = hex_len;
+ self.ignore_case = ignore_case;
+ self.object_kind_hint = object_kind_hint;
+ self.reflog = reflog;
+
+ self.user_agent = Default::default();
+ self.personas = Default::default();
+ self.url_rewrite = Default::default();
+ self.diff_renames = Default::default();
+ self.diff_algorithm = Default::default();
+ (self.pack_cache_bytes, self.object_cache_bytes) =
+ util::parse_object_caches(config, self.lenient_config, self.filter_config_section)?;
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ {
+ self.url_scheme = Default::default();
+ }
+
+ Ok(())
+ }
+}
+
+impl crate::Repository {
+ /// Replace our own configuration with `config` and re-read all cached values, and apply them to select in-memory instances.
+ pub(crate) fn reread_values_and_clear_caches_replacing_config(
+ &mut self,
+ config: crate::Config,
+ ) -> Result<(), Error> {
+ self.config.reread_values_and_clear_caches_replacing_config(config)?;
+ self.apply_changed_values();
+ Ok(())
+ }
+
+ fn apply_changed_values(&mut self) {
+ self.refs.write_reflog = util::reflog_or_default(self.config.reflog, self.work_dir().is_some());
+ }
+}
+
+fn apply_environment_overrides(
+ config: &mut gix_config::File<'static>,
+ git_prefix: Permission,
+ http_transport: Permission,
+ identity: Permission,
+ objects: Permission,
+) -> Result<(), Error> {
+ fn env(key: &'static dyn config::tree::Key) -> &'static str {
+ key.the_environment_override()
+ }
+ fn var_as_bstring(var: &str, perm: Permission) -> Option<BString> {
+ perm.check_opt(var)
+ .and_then(std::env::var_os)
+ .and_then(|val| gix_path::os_string_into_bstring(val).ok())
+ }
+
+ let mut env_override = gix_config::File::new(gix_config::file::Metadata::from(gix_config::Source::EnvOverride));
+ for (section_name, subsection_name, permission, data) in [
+ (
+ "http",
+ None,
+ http_transport,
+ &[
+ ("GIT_HTTP_LOW_SPEED_LIMIT", "lowSpeedLimit"),
+ ("GIT_HTTP_LOW_SPEED_TIME", "lowSpeedTime"),
+ ("GIT_HTTP_USER_AGENT", "userAgent"),
+ {
+ let key = &Http::SSL_CA_INFO;
+ (env(key), key.name)
+ },
+ {
+ let key = &Http::SSL_VERSION;
+ (env(key), key.name)
+ },
+ ][..],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("https".into())),
+ http_transport,
+ &[
+ ("HTTPS_PROXY", gitoxide::Https::PROXY.name),
+ ("https_proxy", gitoxide::Https::PROXY.name),
+ ],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("http".into())),
+ http_transport,
+ &[
+ ("ALL_PROXY", "allProxy"),
+ {
+ let key = &gitoxide::Http::ALL_PROXY;
+ (env(key), key.name)
+ },
+ ("NO_PROXY", "noProxy"),
+ {
+ let key = &gitoxide::Http::NO_PROXY;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Http::PROXY;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Http::VERBOSE;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Http::PROXY_AUTH_METHOD;
+ (env(key), key.name)
+ },
+ ],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("committer".into())),
+ identity,
+ &[
+ {
+ let key = &gitoxide::Committer::NAME_FALLBACK;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Committer::EMAIL_FALLBACK;
+ (env(key), key.name)
+ },
+ ],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("author".into())),
+ identity,
+ &[
+ {
+ let key = &gitoxide::Author::NAME_FALLBACK;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Author::EMAIL_FALLBACK;
+ (env(key), key.name)
+ },
+ ],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("commit".into())),
+ git_prefix,
+ &[
+ {
+ let key = &gitoxide::Commit::COMMITTER_DATE;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Commit::AUTHOR_DATE;
+ (env(key), key.name)
+ },
+ ],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("allow".into())),
+ http_transport,
+ &[("GIT_PROTOCOL_FROM_USER", "protocolFromUser")],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("user".into())),
+ identity,
+ &[{
+ let key = &gitoxide::User::EMAIL_FALLBACK;
+ (env(key), key.name)
+ }],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("objects".into())),
+ objects,
+ &[
+ {
+ let key = &gitoxide::Objects::NO_REPLACE;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Objects::REPLACE_REF_BASE;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Objects::CACHE_LIMIT;
+ (env(key), key.name)
+ },
+ ],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("ssh".into())),
+ git_prefix,
+ &[{
+ let key = &gitoxide::Ssh::COMMAND_WITHOUT_SHELL_FALLBACK;
+ (env(key), key.name)
+ }],
+ ),
+ (
+ "ssh",
+ None,
+ git_prefix,
+ &[{
+ let key = &config::tree::Ssh::VARIANT;
+ (env(key), key.name)
+ }],
+ ),
+ ] {
+ let mut section = env_override
+ .new_section(section_name, subsection_name)
+ .expect("statically known valid section name");
+ for (var, key) in data {
+ if let Some(value) = var_as_bstring(var, permission) {
+ section.push_with_comment(
+ (*key).try_into().expect("statically known to be valid"),
+ Some(value.as_ref()),
+ format!("from {var}").as_str(),
+ );
+ }
+ }
+ if section.num_values() == 0 {
+ let id = section.id();
+ env_override.remove_section_by_id(id);
+ }
+ }
+
+ {
+ let mut section = env_override
+ .new_section("core", None)
+ .expect("statically known valid section name");
+
+ for (var, key, permission) in [
+ {
+ let key = &Core::DELTA_BASE_CACHE_LIMIT;
+ (env(key), key.name, objects)
+ },
+ {
+ let key = &Core::SSH_COMMAND;
+ (env(key), key.name, git_prefix)
+ },
+ ] {
+ if let Some(value) = var_as_bstring(var, permission) {
+ section.push_with_comment(
+ key.try_into().expect("statically known to be valid"),
+ Some(value.as_ref()),
+ format!("from {var}").as_str(),
+ );
+ }
+ }
+
+ if section.num_values() == 0 {
+ let id = section.id();
+ env_override.remove_section_by_id(id);
+ }
+ }
+
+ if !env_override.is_void() {
+ config.append(env_override);
+ }
+ Ok(())
+}
diff --git a/vendor/gix/src/config/cache/mod.rs b/vendor/gix/src/config/cache/mod.rs
new file mode 100644
index 000000000..1904c5ea9
--- /dev/null
+++ b/vendor/gix/src/config/cache/mod.rs
@@ -0,0 +1,18 @@
+use super::{Cache, Error};
+
+mod incubate;
+pub(crate) use incubate::StageOne;
+
+mod init;
+
+impl std::fmt::Debug for Cache {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("Cache").finish_non_exhaustive()
+ }
+}
+
+mod access;
+
+pub(crate) mod util;
+
+pub(crate) use util::interpolate_context;
diff --git a/vendor/gix/src/config/cache/util.rs b/vendor/gix/src/config/cache/util.rs
new file mode 100644
index 000000000..c12f850e6
--- /dev/null
+++ b/vendor/gix/src/config/cache/util.rs
@@ -0,0 +1,143 @@
+#![allow(clippy::result_large_err)]
+use super::Error;
+use crate::{
+ config,
+ config::tree::{gitoxide, Core},
+ revision::spec::parse::ObjectKindHint,
+};
+
+pub(crate) fn interpolate_context<'a>(
+ git_install_dir: Option<&'a std::path::Path>,
+ home_dir: Option<&'a std::path::Path>,
+) -> gix_config::path::interpolate::Context<'a> {
+ gix_config::path::interpolate::Context {
+ git_install_dir,
+ home_dir,
+ home_for_user: Some(gix_config::path::interpolate::home_for_user), // TODO: figure out how to configure this
+ }
+}
+
+pub(crate) fn base_options(lossy: Option<bool>) -> gix_config::file::init::Options<'static> {
+ gix_config::file::init::Options {
+ lossy: lossy.unwrap_or(!cfg!(debug_assertions)),
+ ..Default::default()
+ }
+}
+
+pub(crate) fn config_bool(
+ config: &gix_config::File<'_>,
+ key: &'static config::tree::keys::Boolean,
+ key_str: &str,
+ default: bool,
+ lenient: bool,
+) -> Result<bool, Error> {
+ use config::tree::Key;
+ debug_assert_eq!(
+ key_str,
+ key.logical_name(),
+ "BUG: key name and hardcoded name must match"
+ );
+ config
+ .boolean_by_key(key_str)
+ .map(|res| key.enrich_error(res))
+ .unwrap_or(Ok(default))
+ .map_err(Error::from)
+ .with_lenient_default(lenient)
+}
+
+pub(crate) fn query_refupdates(
+ config: &gix_config::File<'static>,
+ lenient_config: bool,
+) -> Result<Option<gix_ref::store::WriteReflog>, Error> {
+ let key = "core.logAllRefUpdates";
+ Core::LOG_ALL_REF_UPDATES
+ .try_into_ref_updates(config.boolean_by_key(key), || config.string_by_key(key))
+ .with_leniency(lenient_config)
+ .map_err(Into::into)
+}
+
+pub(crate) fn reflog_or_default(
+ config_reflog: Option<gix_ref::store::WriteReflog>,
+ has_worktree: bool,
+) -> gix_ref::store::WriteReflog {
+ config_reflog.unwrap_or(if has_worktree {
+ gix_ref::store::WriteReflog::Normal
+ } else {
+ gix_ref::store::WriteReflog::Disable
+ })
+}
+
+/// Return `(pack_cache_bytes, object_cache_bytes)` as parsed from gix-config
+pub(crate) fn parse_object_caches(
+ config: &gix_config::File<'static>,
+ lenient: bool,
+ mut filter_config_section: fn(&gix_config::file::Metadata) -> bool,
+) -> Result<(Option<usize>, usize), Error> {
+ let pack_cache_bytes = config
+ .integer_filter_by_key("core.deltaBaseCacheLimit", &mut filter_config_section)
+ .map(|res| Core::DELTA_BASE_CACHE_LIMIT.try_into_usize(res))
+ .transpose()
+ .with_leniency(lenient)?;
+ let object_cache_bytes = config
+ .integer_filter_by_key("gitoxide.objects.cacheLimit", &mut filter_config_section)
+ .map(|res| gitoxide::Objects::CACHE_LIMIT.try_into_usize(res))
+ .transpose()
+ .with_leniency(lenient)?
+ .unwrap_or_default();
+ Ok((pack_cache_bytes, object_cache_bytes))
+}
+
+pub(crate) fn parse_core_abbrev(
+ config: &gix_config::File<'static>,
+ object_hash: gix_hash::Kind,
+) -> Result<Option<usize>, Error> {
+ Ok(config
+ .string_by_key("core.abbrev")
+ .map(|abbrev| Core::ABBREV.try_into_abbreviation(abbrev, object_hash))
+ .transpose()?
+ .flatten())
+}
+
+pub(crate) fn disambiguate_hint(
+ config: &gix_config::File<'static>,
+ lenient_config: bool,
+) -> Result<Option<ObjectKindHint>, config::key::GenericErrorWithValue> {
+ match config.string_by_key("core.disambiguate") {
+ None => Ok(None),
+ Some(value) => Core::DISAMBIGUATE
+ .try_into_object_kind_hint(value)
+ .with_leniency(lenient_config),
+ }
+}
+
+// TODO: Use a specialization here once trait specialization is stabilized. Would be perfect here for `T: Default`.
+pub trait ApplyLeniency {
+ fn with_leniency(self, is_lenient: bool) -> Self;
+}
+
+pub trait ApplyLeniencyDefault {
+ fn with_lenient_default(self, is_lenient: bool) -> Self;
+}
+
+impl<T, E> ApplyLeniency for Result<Option<T>, E> {
+ fn with_leniency(self, is_lenient: bool) -> Self {
+ match self {
+ Ok(v) => Ok(v),
+ Err(_) if is_lenient => Ok(None),
+ Err(err) => Err(err),
+ }
+ }
+}
+
+impl<T, E> ApplyLeniencyDefault for Result<T, E>
+where
+ T: Default,
+{
+ fn with_lenient_default(self, is_lenient: bool) -> Self {
+ match self {
+ Ok(v) => Ok(v),
+ Err(_) if is_lenient => Ok(T::default()),
+ Err(err) => Err(err),
+ }
+ }
+}
diff --git a/vendor/gix/src/config/mod.rs b/vendor/gix/src/config/mod.rs
new file mode 100644
index 000000000..1e2566777
--- /dev/null
+++ b/vendor/gix/src/config/mod.rs
@@ -0,0 +1,454 @@
+pub use gix_config::*;
+use gix_features::threading::OnceCell;
+
+use crate::{bstr::BString, repository::identity, revision::spec, Repository};
+
+pub(crate) mod cache;
+mod snapshot;
+pub use snapshot::credential_helpers;
+
+///
+pub mod overrides;
+
+pub mod tree;
+pub use tree::root::Tree;
+
+/// A platform to access configuration values as read from disk.
+///
+/// Note that these values won't update even if the underlying file(s) change.
+pub struct Snapshot<'repo> {
+ pub(crate) repo: &'repo Repository,
+}
+
+/// A platform to access configuration values and modify them in memory, while making them available when this platform is dropped
+/// as form of auto-commit.
+/// Note that the values will only affect this instance of the parent repository, and not other clones that may exist.
+///
+/// Note that these values won't update even if the underlying file(s) change.
+///
+/// Use [`forget()`][Self::forget()] to not apply any of the changes.
+// TODO: make it possible to load snapshots with reloading via .config() and write mutated snapshots back to disk which should be the way
+// to affect all instances of a repo, probably via `config_mut()` and `config_mut_at()`.
+pub struct SnapshotMut<'repo> {
+ pub(crate) repo: Option<&'repo mut Repository>,
+ pub(crate) config: gix_config::File<'static>,
+}
+
+/// A utility structure created by [`SnapshotMut::commit_auto_rollback()`] that restores the previous configuration on drop.
+pub struct CommitAutoRollback<'repo> {
+ pub(crate) repo: Option<&'repo mut Repository>,
+ pub(crate) prev_config: crate::Config,
+}
+
+pub(crate) mod section {
+ pub fn is_trusted(meta: &gix_config::file::Metadata) -> bool {
+ meta.trust == gix_sec::Trust::Full || meta.source.kind() != gix_config::source::Kind::Repository
+ }
+}
+
+/// The error returned when failing to initialize the repository configuration.
+///
+/// This configuration is on the critical path when opening a repository.
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error(transparent)]
+ ConfigBoolean(#[from] boolean::Error),
+ #[error(transparent)]
+ ConfigUnsigned(#[from] unsigned_integer::Error),
+ #[error(transparent)]
+ ConfigTypedString(#[from] key::GenericErrorWithValue),
+ #[error("Cannot handle objects formatted as {:?}", .name)]
+ UnsupportedObjectFormat { name: BString },
+ #[error(transparent)]
+ CoreAbbrev(#[from] abbrev::Error),
+ #[error("Could not read configuration file")]
+ Io(#[from] std::io::Error),
+ #[error(transparent)]
+ Init(#[from] gix_config::file::init::Error),
+ #[error(transparent)]
+ ResolveIncludes(#[from] gix_config::file::includes::Error),
+ #[error(transparent)]
+ FromEnv(#[from] gix_config::file::init::from_env::Error),
+ #[error(transparent)]
+ PathInterpolation(#[from] gix_config::path::interpolate::Error),
+ #[error("{source:?} configuration overrides at open or init time could not be applied.")]
+ ConfigOverrides {
+ #[source]
+ err: overrides::Error,
+ source: gix_config::Source,
+ },
+}
+
+///
+pub mod diff {
+ ///
+ pub mod algorithm {
+ use crate::bstr::BString;
+
+ /// The error produced when obtaining `diff.algorithm`.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Unknown diff algorithm named '{name}'")]
+ Unknown { name: BString },
+ #[error("The '{name}' algorithm is not yet implemented")]
+ Unimplemented { name: BString },
+ }
+ }
+}
+
+///
+pub mod checkout_options {
+ /// The error produced when collecting all information needed for checking out files into a worktree.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ ConfigCheckStat(#[from] super::key::GenericErrorWithValue),
+ #[error(transparent)]
+ ConfigBoolean(#[from] super::boolean::Error),
+ #[error(transparent)]
+ CheckoutWorkers(#[from] super::checkout::workers::Error),
+ #[error("Failed to interpolate the attribute file configured at `core.attributesFile`")]
+ AttributesFileInterpolation(#[from] gix_config::path::interpolate::Error),
+ }
+}
+
+///
+pub mod protocol {
+ ///
+ pub mod allow {
+ use crate::bstr::BString;
+
+ /// The error returned when obtaining the permission for a particular scheme.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ #[error("The value {value:?} must be allow|deny|user in configuration key protocol{0}.allow", scheme.as_ref().map(|s| format!(".{s}")).unwrap_or_default())]
+ pub struct Error {
+ pub scheme: Option<String>,
+ pub value: BString,
+ }
+ }
+}
+
+///
+pub mod ssh_connect_options {
+ /// The error produced when obtaining ssh connection configuration.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ #[error(transparent)]
+ pub struct Error(#[from] super::key::GenericErrorWithValue);
+}
+
+///
+pub mod key {
+ use crate::bstr::BString;
+
+ const fn prefix(kind: char) -> &'static str {
+ match kind {
+ 'n' => "", // nothing
+ 'k' => "The value of key", // generic key
+ 't' => "The date format at key", // time
+ 'i' => "The timeout at key", // timeout
+ 'd' => "The duration [ms] at key", // duration
+ 'b' => "The boolean at key", // boolean
+ 'v' => "The key", // generic key with value
+ 'r' => "The refspec at", // refspec
+ 's' => "The ssl version at", // ssl-version
+ 'u' => "The url at", // url
+ 'w' => "The utf-8 string at", // string
+ _ => panic!("BUG: invalid prefix kind - add a case for it here"),
+ }
+ }
+ const fn suffix(kind: char) -> &'static str {
+ match kind {
+ 'd' => "could not be decoded", // decoding
+ 'i' => "was invalid", // invalid
+ 'u' => "could not be parsed as unsigned integer", // unsigned integer
+ 'p' => "could not be parsed", // parsing
+ _ => panic!("BUG: invalid suffix kind - add a case for it here"),
+ }
+ }
+ /// A generic error suitable to produce decent messages for all kinds of configuration errors with config-key granularity.
+ ///
+ /// This error is meant to be reusable and help produce uniform error messages related to parsing any configuration key.
+ #[derive(Debug, thiserror::Error)]
+ #[error("{} \"{key}{}\"{} {}", prefix(PREFIX), value.as_ref().map(|v| format!("={v}")).unwrap_or_default(), environment_override.as_deref().map(|var| format!(" (possibly from {var})")).unwrap_or_default(), suffix(SUFFIX))]
+ pub struct Error<E: std::error::Error + Send + Sync + 'static, const PREFIX: char, const SUFFIX: char> {
+ /// The configuration key that contained the value.
+ pub key: BString,
+ /// The value that was assigned to `key`.
+ pub value: Option<BString>,
+ /// The associated environment variable that would override this value.
+ pub environment_override: Option<&'static str>,
+ /// The source of the error if there was one.
+ pub source: Option<E>,
+ }
+
+ /// Initialization
+ /// Instantiate a new error from the given `key`.
+ ///
+ /// Note that specifics of the error message are defined by the `PREFIX` and `SUFFIX` which is usually defined by a typedef.
+ impl<T, E, const PREFIX: char, const SUFFIX: char> From<&'static T> for Error<E, PREFIX, SUFFIX>
+ where
+ E: std::error::Error + Send + Sync + 'static,
+ T: super::tree::Key,
+ {
+ fn from(key: &'static T) -> Self {
+ Error {
+ key: key.logical_name().into(),
+ value: None,
+ environment_override: key.environment_override(),
+ source: None,
+ }
+ }
+ }
+
+ /// Initialization
+ impl<E, const PREFIX: char, const SUFFIX: char> Error<E, PREFIX, SUFFIX>
+ where
+ E: std::error::Error + Send + Sync + 'static,
+ {
+ /// Instantiate an error with all data from `key` along with the `value` of the key.
+ pub fn from_value(key: &'static impl super::tree::Key, value: BString) -> Self {
+ Error::from(key).with_value(value)
+ }
+ }
+
+ /// Builder
+ impl<E, const PREFIX: char, const SUFFIX: char> Error<E, PREFIX, SUFFIX>
+ where
+ E: std::error::Error + Send + Sync + 'static,
+ {
+ /// Attach the given `err` as source.
+ pub fn with_source(mut self, err: E) -> Self {
+ self.source = Some(err);
+ self
+ }
+
+ /// Attach the given `value` as value we observed when the error was produced.
+ pub fn with_value(mut self, value: BString) -> Self {
+ self.value = Some(value);
+ self
+ }
+ }
+
+ /// A generic key error for use when it doesn't seem worth it say more than 'key is invalid' along with meta-data.
+ pub type GenericError<E = gix_config::value::Error> = Error<E, 'k', 'i'>;
+
+ /// A generic key error which will also contain a value.
+ pub type GenericErrorWithValue<E = gix_config::value::Error> = Error<E, 'v', 'i'>;
+}
+
+///
+pub mod checkout {
+ ///
+ pub mod workers {
+ use crate::config;
+
+ /// The error produced when failing to parse the the `checkout.workers` key.
+ pub type Error = config::key::Error<gix_config::value::Error, 'n', 'd'>;
+ }
+}
+
+///
+pub mod abbrev {
+ use crate::bstr::BString;
+
+ /// The error describing an incorrect `core.abbrev` value.
+ #[derive(Debug, thiserror::Error)]
+ #[error("Invalid value for 'core.abbrev' = '{}'. It must be between 4 and {}", .value, .max)]
+ pub struct Error {
+ /// The value found in the git configuration
+ pub value: BString,
+ /// The maximum abbreviation length, the length of an object hash.
+ pub max: u8,
+ }
+}
+
+///
+pub mod remote {
+ ///
+ pub mod symbolic_name {
+ /// The error produced when failing to produce a symbolic remote name from configuration.
+ pub type Error = super::super::key::Error<crate::remote::name::Error, 'v', 'i'>;
+ }
+}
+
+///
+pub mod time {
+ /// The error produced when failing to parse time from configuration.
+ pub type Error = super::key::Error<gix_date::parse::Error, 't', 'i'>;
+}
+
+///
+pub mod lock_timeout {
+ /// The error produced when failing to parse timeout for locks.
+ pub type Error = super::key::Error<gix_config::value::Error, 'i', 'i'>;
+}
+
+///
+pub mod duration {
+ /// The error produced when failing to parse durations (in milliseconds).
+ pub type Error = super::key::Error<gix_config::value::Error, 'd', 'i'>;
+}
+
+///
+pub mod boolean {
+ /// The error produced when failing to parse time from configuration.
+ pub type Error = super::key::Error<gix_config::value::Error, 'b', 'i'>;
+}
+
+///
+pub mod unsigned_integer {
+ /// The error produced when failing to parse a signed integer from configuration.
+ pub type Error = super::key::Error<gix_config::value::Error, 'k', 'u'>;
+}
+
+///
+pub mod url {
+ /// The error produced when failing to parse a url from the configuration.
+ pub type Error = super::key::Error<gix_url::parse::Error, 'u', 'p'>;
+}
+
+///
+pub mod string {
+ /// The error produced when failing to interpret configuration as UTF-8 encoded string.
+ pub type Error = super::key::Error<crate::bstr::Utf8Error, 'w', 'd'>;
+}
+
+///
+pub mod refspec {
+ /// The error produced when failing to parse a refspec from the configuration.
+ pub type Error = super::key::Error<gix_refspec::parse::Error, 'r', 'p'>;
+}
+
+///
+pub mod ssl_version {
+ /// The error produced when failing to parse a refspec from the configuration.
+ pub type Error = super::key::Error<std::convert::Infallible, 's', 'i'>;
+}
+
+///
+pub mod transport {
+ use std::borrow::Cow;
+
+ use crate::bstr::BStr;
+
+ /// The error produced when configuring a transport for a particular protocol.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(
+ "Could not interpret configuration key {key:?} as {kind} integer of desired range with value: {actual}"
+ )]
+ InvalidInteger {
+ key: &'static str,
+ kind: &'static str,
+ actual: i64,
+ },
+ #[error("Could not interpret configuration key {key:?}")]
+ ConfigValue {
+ source: gix_config::value::Error,
+ key: &'static str,
+ },
+ #[error("Could not interpolate path at key {key:?}")]
+ InterpolatePath {
+ source: gix_config::path::interpolate::Error,
+ key: &'static str,
+ },
+ #[error("Could not decode value at key {key:?} as UTF-8 string")]
+ IllformedUtf8 {
+ key: Cow<'static, BStr>,
+ source: crate::config::string::Error,
+ },
+ #[error("Invalid URL passed for configuration")]
+ ParseUrl(#[from] gix_url::parse::Error),
+ #[error("Could obtain configuration for an HTTP url")]
+ Http(#[from] http::Error),
+ }
+
+ ///
+ pub mod http {
+ use std::borrow::Cow;
+
+ use crate::bstr::BStr;
+
+ /// The error produced when configuring a HTTP transport.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Boolean(#[from] crate::config::boolean::Error),
+ #[error(transparent)]
+ UnsignedInteger(#[from] crate::config::unsigned_integer::Error),
+ #[error(transparent)]
+ ConnectTimeout(#[from] crate::config::duration::Error),
+ #[error("The proxy authentication at key `{key}` is invalid")]
+ InvalidProxyAuthMethod {
+ source: crate::config::key::GenericErrorWithValue,
+ key: Cow<'static, BStr>,
+ },
+ #[error("Could not configure the credential helpers for the authenticated proxy url")]
+ ConfigureProxyAuthenticate(#[from] crate::config::snapshot::credential_helpers::Error),
+ #[error(transparent)]
+ InvalidSslVersion(#[from] crate::config::ssl_version::Error),
+ #[error("The HTTP version must be 'HTTP/2' or 'HTTP/1.1'")]
+ InvalidHttpVersion(#[from] crate::config::key::GenericErrorWithValue),
+ #[error("The follow redirects value 'initial', or boolean true or false")]
+ InvalidFollowRedirects(#[source] crate::config::key::GenericErrorWithValue),
+ }
+ }
+}
+
+/// Utility type to keep pre-obtained configuration values, only for those required during initial setup
+/// and other basic operations that are common enough to warrant a permanent cache.
+///
+/// All other values are obtained lazily using OnceCell.
+#[derive(Clone)]
+pub(crate) struct Cache {
+ pub resolved: crate::Config,
+ /// The hex-length to assume when shortening object ids. If `None`, it should be computed based on the approximate object count.
+ pub hex_len: Option<usize>,
+ /// true if the repository is designated as 'bare', without work tree.
+ pub is_bare: bool,
+ /// The type of hash to use.
+ pub object_hash: gix_hash::Kind,
+ /// If true, multi-pack indices, whether present or not, may be used by the object database.
+ pub use_multi_pack_index: bool,
+ /// The representation of `core.logallrefupdates`, or `None` if the variable wasn't set.
+ pub reflog: Option<gix_ref::store::WriteReflog>,
+ /// The configured user agent for presentation to servers.
+ pub(crate) user_agent: OnceCell<String>,
+ /// identities for later use, lazy initialization.
+ pub(crate) personas: OnceCell<identity::Personas>,
+ /// A lazily loaded rewrite list for remote urls
+ pub(crate) url_rewrite: OnceCell<crate::remote::url::Rewrite>,
+ /// The lazy-loaded rename information for diffs.
+ pub(crate) diff_renames: OnceCell<Option<crate::object::tree::diff::Rewrites>>,
+ /// A lazily loaded mapping to know which url schemes to allow
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ pub(crate) url_scheme: OnceCell<crate::remote::url::SchemePermission>,
+ /// The algorithm to use when diffing blobs
+ pub(crate) diff_algorithm: OnceCell<gix_diff::blob::Algorithm>,
+ /// The amount of bytes to use for a memory backed delta pack cache. If `Some(0)`, no cache is used, if `None`
+ /// a standard cache is used which costs near to nothing and always pays for itself.
+ pub(crate) pack_cache_bytes: Option<usize>,
+ /// The amount of bytes to use for caching whole objects, or 0 to turn it off entirely.
+ pub(crate) object_cache_bytes: usize,
+ /// The config section filter from the options used to initialize this instance. Keep these in sync!
+ filter_config_section: fn(&gix_config::file::Metadata) -> bool,
+ /// The object kind to pick if a prefix is ambiguous.
+ pub object_kind_hint: Option<spec::parse::ObjectKindHint>,
+ /// If true, we are on a case-insensitive file system.
+ pub ignore_case: bool,
+ /// If true, we should default what's possible if something is misconfigured, on case by case basis, to be more resilient.
+ /// Also available in options! Keep in sync!
+ pub lenient_config: bool,
+ /// Define how we can use values obtained with `xdg_config(…)` and its `XDG_CONFIG_HOME` variable.
+ xdg_config_home_env: gix_sec::Permission,
+ /// Define how we can use values obtained with `xdg_config(…)`. and its `HOME` variable.
+ home_env: gix_sec::Permission,
+ // TODO: make core.precomposeUnicode available as well.
+}
diff --git a/vendor/gix/src/config/overrides.rs b/vendor/gix/src/config/overrides.rs
new file mode 100644
index 000000000..f43e8471b
--- /dev/null
+++ b/vendor/gix/src/config/overrides.rs
@@ -0,0 +1,49 @@
+use std::convert::TryFrom;
+
+use crate::bstr::{BStr, BString, ByteSlice};
+
+/// The error returned by [SnapshotMut::apply_cli_overrides()][crate::config::SnapshotMut::append_config()].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("{input:?} is not a valid configuration key. Examples are 'core.abbrev' or 'remote.origin.url'")]
+ InvalidKey { input: BString },
+ #[error("Key {key:?} could not be parsed")]
+ SectionKey {
+ key: BString,
+ source: gix_config::parse::section::key::Error,
+ },
+ #[error(transparent)]
+ SectionHeader(#[from] gix_config::parse::section::header::Error),
+}
+
+pub(crate) fn append(
+ config: &mut gix_config::File<'static>,
+ values: impl IntoIterator<Item = impl AsRef<BStr>>,
+ source: gix_config::Source,
+ mut make_comment: impl FnMut(&BStr) -> Option<BString>,
+) -> Result<(), Error> {
+ let mut file = gix_config::File::new(gix_config::file::Metadata::from(source));
+ for key_value in values {
+ let key_value = key_value.as_ref();
+ let mut tokens = key_value.splitn(2, |b| *b == b'=').map(|v| v.trim());
+ let key = tokens.next().expect("always one value").as_bstr();
+ let value = tokens.next();
+ let key = gix_config::parse::key(key.to_str().map_err(|_| Error::InvalidKey { input: key.into() })?)
+ .ok_or_else(|| Error::InvalidKey { input: key.into() })?;
+ let mut section = file.section_mut_or_create_new(key.section_name, key.subsection_name)?;
+ let key =
+ gix_config::parse::section::Key::try_from(key.value_name.to_owned()).map_err(|err| Error::SectionKey {
+ source: err,
+ key: key.value_name.into(),
+ })?;
+ let comment = make_comment(key_value);
+ let value = value.map(|v| v.as_bstr());
+ match comment {
+ Some(comment) => section.push_with_comment(key, value, &**comment),
+ None => section.push(key, value),
+ };
+ }
+ config.append(file);
+ Ok(())
+}
diff --git a/vendor/gix/src/config/snapshot/_impls.rs b/vendor/gix/src/config/snapshot/_impls.rs
new file mode 100644
index 000000000..ec22cb640
--- /dev/null
+++ b/vendor/gix/src/config/snapshot/_impls.rs
@@ -0,0 +1,76 @@
+use std::{
+ fmt::{Debug, Formatter},
+ ops::{Deref, DerefMut},
+};
+
+use crate::config::{CommitAutoRollback, Snapshot, SnapshotMut};
+
+impl Debug for Snapshot<'_> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.write_str(&self.repo.config.resolved.to_string())
+ }
+}
+
+impl Debug for CommitAutoRollback<'_> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.write_str(&self.repo.as_ref().expect("still present").config.resolved.to_string())
+ }
+}
+
+impl Debug for SnapshotMut<'_> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.write_str(&self.config.to_string())
+ }
+}
+
+impl Drop for SnapshotMut<'_> {
+ fn drop(&mut self) {
+ if let Some(repo) = self.repo.take() {
+ self.commit_inner(repo).ok();
+ };
+ }
+}
+
+impl Drop for CommitAutoRollback<'_> {
+ fn drop(&mut self) {
+ if let Some(repo) = self.repo.take() {
+ self.rollback_inner(repo).ok();
+ }
+ }
+}
+
+impl Deref for SnapshotMut<'_> {
+ type Target = gix_config::File<'static>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.config
+ }
+}
+
+impl Deref for Snapshot<'_> {
+ type Target = gix_config::File<'static>;
+
+ fn deref(&self) -> &Self::Target {
+ self.plumbing()
+ }
+}
+
+impl Deref for CommitAutoRollback<'_> {
+ type Target = crate::Repository;
+
+ fn deref(&self) -> &Self::Target {
+ self.repo.as_ref().expect("always present")
+ }
+}
+
+impl DerefMut for CommitAutoRollback<'_> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.repo.as_mut().expect("always present")
+ }
+}
+
+impl DerefMut for SnapshotMut<'_> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.config
+ }
+}
diff --git a/vendor/gix/src/config/snapshot/access.rs b/vendor/gix/src/config/snapshot/access.rs
new file mode 100644
index 000000000..1710348a9
--- /dev/null
+++ b/vendor/gix/src/config/snapshot/access.rs
@@ -0,0 +1,143 @@
+#![allow(clippy::result_large_err)]
+use std::borrow::Cow;
+
+use gix_features::threading::OwnShared;
+
+use crate::{
+ bstr::BStr,
+ config::{CommitAutoRollback, Snapshot, SnapshotMut},
+};
+
+/// Access configuration values, frozen in time, using a `key` which is a `.` separated string of up to
+/// three tokens, namely `section_name.[subsection_name.]value_name`, like `core.bare` or `remote.origin.url`.
+///
+/// Note that single-value methods always return the last value found, which is the one set most recently in the
+/// hierarchy of configuration files, aka 'last one wins'.
+impl<'repo> Snapshot<'repo> {
+ /// Return the boolean at `key`, or `None` if there is no such value or if the value can't be interpreted as
+ /// boolean.
+ ///
+ /// For a non-degenerating version, use [`try_boolean(…)`][Self::try_boolean()].
+ ///
+ /// Note that this method takes the most recent value at `key` even if it is from a file with reduced trust.
+ pub fn boolean<'a>(&self, key: impl Into<&'a BStr>) -> Option<bool> {
+ self.try_boolean(key).and_then(Result::ok)
+ }
+
+ /// Like [`boolean()`][Self::boolean()], but it will report an error if the value couldn't be interpreted as boolean.
+ pub fn try_boolean<'a>(&self, key: impl Into<&'a BStr>) -> Option<Result<bool, gix_config::value::Error>> {
+ self.repo.config.resolved.boolean_by_key(key)
+ }
+
+ /// Return the resolved integer at `key`, or `None` if there is no such value or if the value can't be interpreted as
+ /// integer or exceeded the value range.
+ ///
+ /// For a non-degenerating version, use [`try_integer(…)`][Self::try_integer()].
+ ///
+ /// Note that this method takes the most recent value at `key` even if it is from a file with reduced trust.
+ pub fn integer<'a>(&self, key: impl Into<&'a BStr>) -> Option<i64> {
+ self.try_integer(key).and_then(Result::ok)
+ }
+
+ /// Like [`integer()`][Self::integer()], but it will report an error if the value couldn't be interpreted as boolean.
+ pub fn try_integer<'a>(&self, key: impl Into<&'a BStr>) -> Option<Result<i64, gix_config::value::Error>> {
+ self.repo.config.resolved.integer_by_key(key)
+ }
+
+ /// Return the string at `key`, or `None` if there is no such value.
+ ///
+ /// Note that this method takes the most recent value at `key` even if it is from a file with reduced trust.
+ pub fn string<'a>(&self, key: impl Into<&'a BStr>) -> Option<Cow<'_, BStr>> {
+ self.repo.config.resolved.string_by_key(key)
+ }
+
+ /// Return the trusted and fully interpolated path at `key`, or `None` if there is no such value
+ /// or if no value was found in a trusted file.
+ /// An error occurs if the path could not be interpolated to its final value.
+ pub fn trusted_path<'a>(
+ &self,
+ key: impl Into<&'a BStr>,
+ ) -> Option<Result<Cow<'_, std::path::Path>, gix_config::path::interpolate::Error>> {
+ let key = gix_config::parse::key(key)?;
+ self.repo
+ .config
+ .trusted_file_path(key.section_name, key.subsection_name, key.value_name)
+ }
+}
+
+/// Utilities and additional access
+impl<'repo> Snapshot<'repo> {
+ /// Returns the underlying configuration implementation for a complete API, despite being a little less convenient.
+ ///
+ /// It's expected that more functionality will move up depending on demand.
+ pub fn plumbing(&self) -> &gix_config::File<'static> {
+ &self.repo.config.resolved
+ }
+}
+
+/// Utilities
+impl<'repo> SnapshotMut<'repo> {
+ /// Append configuration values of the form `core.abbrev=5` or `remote.origin.url = foo` or `core.bool-implicit-true`
+ /// to the end of the repository configuration, with each section marked with the given `source`.
+ ///
+ /// Note that doing so applies the configuration at the very end, so it will always override what came before it
+ /// even though the `source` is of lower priority as what's there.
+ pub fn append_config(
+ &mut self,
+ values: impl IntoIterator<Item = impl AsRef<BStr>>,
+ source: gix_config::Source,
+ ) -> Result<&mut Self, crate::config::overrides::Error> {
+ crate::config::overrides::append(&mut self.config, values, source, |v| Some(format!("-c {v}").into()))?;
+ Ok(self)
+ }
+ /// Apply all changes made to this instance.
+ ///
+ /// Note that this would also happen once this instance is dropped, but using this method may be more intuitive and won't squelch errors
+ /// in case the new configuration is partially invalid.
+ pub fn commit(mut self) -> Result<&'repo mut crate::Repository, crate::config::Error> {
+ let repo = self.repo.take().expect("always present here");
+ self.commit_inner(repo)
+ }
+
+ pub(crate) fn commit_inner(
+ &mut self,
+ repo: &'repo mut crate::Repository,
+ ) -> Result<&'repo mut crate::Repository, crate::config::Error> {
+ repo.reread_values_and_clear_caches_replacing_config(std::mem::take(&mut self.config).into())?;
+ Ok(repo)
+ }
+
+ /// Create a structure the temporarily commits the changes, but rolls them back when dropped.
+ pub fn commit_auto_rollback(mut self) -> Result<CommitAutoRollback<'repo>, crate::config::Error> {
+ let repo = self.repo.take().expect("this only runs once on consumption");
+ let prev_config = OwnShared::clone(&repo.config.resolved);
+
+ Ok(CommitAutoRollback {
+ repo: self.commit_inner(repo)?.into(),
+ prev_config,
+ })
+ }
+
+ /// Don't apply any of the changes after consuming this instance, effectively forgetting them, returning the changed configuration.
+ pub fn forget(mut self) -> gix_config::File<'static> {
+ self.repo.take();
+ std::mem::take(&mut self.config)
+ }
+}
+
+/// Utilities
+impl<'repo> CommitAutoRollback<'repo> {
+ /// Rollback the changes previously applied and all values before the change.
+ pub fn rollback(mut self) -> Result<&'repo mut crate::Repository, crate::config::Error> {
+ let repo = self.repo.take().expect("still present, consumed only once");
+ self.rollback_inner(repo)
+ }
+
+ pub(crate) fn rollback_inner(
+ &mut self,
+ repo: &'repo mut crate::Repository,
+ ) -> Result<&'repo mut crate::Repository, crate::config::Error> {
+ repo.reread_values_and_clear_caches_replacing_config(OwnShared::clone(&self.prev_config))?;
+ Ok(repo)
+ }
+}
diff --git a/vendor/gix/src/config/snapshot/credential_helpers.rs b/vendor/gix/src/config/snapshot/credential_helpers.rs
new file mode 100644
index 000000000..5a07e9fe2
--- /dev/null
+++ b/vendor/gix/src/config/snapshot/credential_helpers.rs
@@ -0,0 +1,183 @@
+use std::{borrow::Cow, convert::TryFrom};
+
+pub use error::Error;
+
+use crate::{
+ bstr::{ByteSlice, ByteVec},
+ config::{
+ tree::{credential, Core, Credential, Key},
+ Snapshot,
+ },
+};
+
+mod error {
+ use crate::bstr::BString;
+
+ /// The error returned by [Snapshot::credential_helpers()][super::Snapshot::credential_helpers()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not parse 'useHttpPath' key in section {section}")]
+ InvalidUseHttpPath {
+ section: BString,
+ source: gix_config::value::Error,
+ },
+ #[error("core.askpass could not be read")]
+ CoreAskpass(#[from] gix_config::path::interpolate::Error),
+ }
+}
+
+impl Snapshot<'_> {
+ /// Returns the configuration for all git-credential helpers from trusted configuration that apply
+ /// to the given `url` along with an action preconfigured to invoke the cascade with.
+ /// This includes `url` which may be altered to contain a user-name as configured.
+ ///
+ /// These can be invoked to obtain credentials. Note that the `url` is expected to be the one used
+ /// to connect to a remote, and thus should already have passed the url-rewrite engine.
+ ///
+ /// # Deviation
+ ///
+ /// - Invalid urls can't be used to obtain credential helpers as they are rejected early when creating a valid `url` here.
+ /// - Parsed urls will automatically drop the port if it's the default, i.e. `http://host:80` becomes `http://host` when parsed.
+ /// This affects the prompt provided to the user, so that git will use the verbatim url, whereas we use `http://host`.
+ /// - Upper-case scheme and host will be lower-cased automatically when parsing into a url, so prompts differ compared to git.
+ /// - A **difference in prompt might affect the matching of getting existing stored credentials**, and it's a question of this being
+ /// a feature or a bug.
+ // TODO: when dealing with `http.*.*` configuration, generalize this algorithm as needed and support precedence.
+ pub fn credential_helpers(
+ &self,
+ mut url: gix_url::Url,
+ ) -> Result<
+ (
+ gix_credentials::helper::Cascade,
+ gix_credentials::helper::Action,
+ gix_prompt::Options<'static>,
+ ),
+ Error,
+ > {
+ let mut programs = Vec::new();
+ let mut use_http_path = false;
+ let url_had_user_initially = url.user().is_some();
+ normalize(&mut url);
+
+ if let Some(credential_sections) = self
+ .repo
+ .config
+ .resolved
+ .sections_by_name_and_filter("credential", &mut self.repo.filter_config_section())
+ {
+ for section in credential_sections {
+ let section = match section.header().subsection_name() {
+ Some(pattern) => gix_url::parse(pattern).ok().and_then(|mut pattern| {
+ normalize(&mut pattern);
+ let is_http = matches!(pattern.scheme, gix_url::Scheme::Https | gix_url::Scheme::Http);
+ let scheme = &pattern.scheme;
+ let host = pattern.host();
+ let ports = is_http
+ .then(|| (pattern.port_or_default(), url.port_or_default()))
+ .unwrap_or((pattern.port, url.port));
+ let path = (!(is_http && pattern.path_is_root())).then_some(&pattern.path);
+
+ if !path.map_or(true, |path| path == &url.path) {
+ return None;
+ }
+ if pattern.user().is_some() && pattern.user() != url.user() {
+ return None;
+ }
+ (scheme == &url.scheme && host_matches(host, url.host()) && ports.0 == ports.1).then_some((
+ section,
+ &credential::UrlParameter::HELPER,
+ &credential::UrlParameter::USERNAME,
+ &credential::UrlParameter::USE_HTTP_PATH,
+ ))
+ }),
+ None => Some((
+ section,
+ &Credential::HELPER,
+ &Credential::USERNAME,
+ &Credential::USE_HTTP_PATH,
+ )),
+ };
+ if let Some((section, helper_key, username_key, use_http_path_key)) = section {
+ for value in section.values(helper_key.name) {
+ if value.trim().is_empty() {
+ programs.clear();
+ } else {
+ programs.push(gix_credentials::Program::from_custom_definition(value.into_owned()));
+ }
+ }
+ if let Some(Some(user)) = (!url_had_user_initially).then(|| {
+ section
+ .value(username_key.name)
+ .filter(|n| !n.trim().is_empty())
+ .and_then(|n| {
+ let n: Vec<_> = Cow::into_owned(n).into();
+ n.into_string().ok()
+ })
+ }) {
+ url.set_user(Some(user));
+ }
+ if let Some(toggle) = section
+ .value(use_http_path_key.name)
+ .map(|val| {
+ gix_config::Boolean::try_from(val)
+ .map_err(|err| Error::InvalidUseHttpPath {
+ source: err,
+ section: section.header().to_bstring(),
+ })
+ .map(|b| b.0)
+ })
+ .transpose()?
+ {
+ use_http_path = toggle;
+ }
+ }
+ }
+ }
+
+ let allow_git_env = self.repo.options.permissions.env.git_prefix.is_allowed();
+ let allow_ssh_env = self.repo.options.permissions.env.ssh_prefix.is_allowed();
+ let prompt_options = gix_prompt::Options {
+ askpass: self
+ .trusted_path(Core::ASKPASS.logical_name().as_str())
+ .transpose()?
+ .map(|c| Cow::Owned(c.into_owned())),
+ ..Default::default()
+ }
+ .apply_environment(allow_git_env, allow_ssh_env, allow_git_env);
+ Ok((
+ gix_credentials::helper::Cascade {
+ programs,
+ use_http_path,
+ // The default ssh implementation uses binaries that do their own auth, so our passwords aren't used.
+ query_user_only: url.scheme == gix_url::Scheme::Ssh,
+ ..Default::default()
+ },
+ gix_credentials::helper::Action::get_for_url(url.to_bstring()),
+ prompt_options,
+ ))
+ }
+}
+
+fn host_matches(pattern: Option<&str>, host: Option<&str>) -> bool {
+ match (pattern, host) {
+ (Some(pattern), Some(host)) => {
+ let lfields = pattern.split('.');
+ let rfields = host.split('.');
+ if lfields.clone().count() != rfields.clone().count() {
+ return false;
+ }
+ lfields
+ .zip(rfields)
+ .all(|(pat, value)| gix_glob::wildmatch(pat.into(), value.into(), gix_glob::wildmatch::Mode::empty()))
+ }
+ (None, None) => true,
+ (Some(_), None) | (None, Some(_)) => false,
+ }
+}
+
+fn normalize(url: &mut gix_url::Url) {
+ if !url.path_is_root() && url.path.ends_with(b"/") {
+ url.path.pop();
+ }
+}
diff --git a/vendor/gix/src/config/snapshot/mod.rs b/vendor/gix/src/config/snapshot/mod.rs
new file mode 100644
index 000000000..80ec6f948
--- /dev/null
+++ b/vendor/gix/src/config/snapshot/mod.rs
@@ -0,0 +1,5 @@
+mod _impls;
+mod access;
+
+///
+pub mod credential_helpers;
diff --git a/vendor/gix/src/config/tree/keys.rs b/vendor/gix/src/config/tree/keys.rs
new file mode 100644
index 000000000..1cdd187d0
--- /dev/null
+++ b/vendor/gix/src/config/tree/keys.rs
@@ -0,0 +1,629 @@
+#![allow(clippy::result_large_err)]
+use std::{
+ borrow::Cow,
+ error::Error,
+ fmt::{Debug, Formatter},
+};
+
+use crate::{
+ bstr::BStr,
+ config,
+ config::tree::{Key, Link, Note, Section, SubSectionRequirement},
+};
+
+/// Implements a value without any constraints, i.e. a any value.
+pub struct Any<T: Validate = validate::All> {
+ /// The key of the value in the git configuration.
+ pub name: &'static str,
+ /// The parent section of the key.
+ pub section: &'static dyn Section,
+ /// The subsection requirement to use.
+ pub subsection_requirement: Option<SubSectionRequirement>,
+ /// A link to other resources that might be eligible as value.
+ pub link: Option<Link>,
+ /// A note about this key.
+ pub note: Option<Note>,
+ /// The way validation and transformation should happen.
+ validate: T,
+}
+
+/// Init
+impl Any<validate::All> {
+ /// Create a new instance from `name` and `section`
+ pub const fn new(name: &'static str, section: &'static dyn Section) -> Self {
+ Any::new_with_validate(name, section, validate::All)
+ }
+}
+
+/// Init other validate implementations
+impl<T: Validate> Any<T> {
+ /// Create a new instance from `name` and `section`
+ pub const fn new_with_validate(name: &'static str, section: &'static dyn Section, validate: T) -> Self {
+ Any {
+ name,
+ section,
+ subsection_requirement: Some(SubSectionRequirement::Never),
+ link: None,
+ note: None,
+ validate,
+ }
+ }
+}
+
+/// Builder
+impl<T: Validate> Any<T> {
+ /// Set the subsection requirement to non-default values.
+ pub const fn with_subsection_requirement(mut self, requirement: Option<SubSectionRequirement>) -> Self {
+ self.subsection_requirement = requirement;
+ self
+ }
+
+ /// Associate an environment variable with this key.
+ ///
+ /// This is mainly useful for enriching error messages.
+ pub const fn with_environment_override(mut self, var: &'static str) -> Self {
+ self.link = Some(Link::EnvironmentOverride(var));
+ self
+ }
+
+ /// Set a link to another key which serves as fallback to provide a value if this key is not set.
+ pub const fn with_fallback(mut self, key: &'static dyn Key) -> Self {
+ self.link = Some(Link::FallbackKey(key));
+ self
+ }
+
+ /// Attach an informative message to this key.
+ pub const fn with_note(mut self, message: &'static str) -> Self {
+ self.note = Some(Note::Informative(message));
+ self
+ }
+
+ /// Inform about a deviation in how this key is interpreted.
+ pub const fn with_deviation(mut self, message: &'static str) -> Self {
+ self.note = Some(Note::Deviation(message));
+ self
+ }
+}
+
+/// Conversion
+impl<T: Validate> Any<T> {
+ /// Try to convert `value` into a refspec suitable for the `op` operation.
+ pub fn try_into_refspec(
+ &'static self,
+ value: std::borrow::Cow<'_, BStr>,
+ op: gix_refspec::parse::Operation,
+ ) -> Result<gix_refspec::RefSpec, config::refspec::Error> {
+ gix_refspec::parse(value.as_ref(), op)
+ .map(|spec| spec.to_owned())
+ .map_err(|err| config::refspec::Error::from_value(self, value.into_owned()).with_source(err))
+ }
+
+ /// Try to interpret `value` as UTF-8 encoded string.
+ pub fn try_into_string(&'static self, value: Cow<'_, BStr>) -> Result<std::string::String, config::string::Error> {
+ use crate::bstr::ByteVec;
+ Vec::from(value.into_owned()).into_string().map_err(|err| {
+ let utf8_err = err.utf8_error().clone();
+ config::string::Error::from_value(self, err.into_vec().into()).with_source(utf8_err)
+ })
+ }
+}
+
+impl<T: Validate> Debug for Any<T> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ self.logical_name().fmt(f)
+ }
+}
+
+impl<T: Validate> std::fmt::Display for Any<T> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.write_str(&self.logical_name())
+ }
+}
+
+impl<T: Validate> Key for Any<T> {
+ fn name(&self) -> &str {
+ self.name
+ }
+
+ fn validate(&self, value: &BStr) -> Result<(), config::tree::key::validate::Error> {
+ Ok(self.validate.validate(value)?)
+ }
+
+ fn section(&self) -> &dyn Section {
+ self.section
+ }
+
+ fn subsection_requirement(&self) -> Option<&SubSectionRequirement> {
+ self.subsection_requirement.as_ref()
+ }
+
+ fn link(&self) -> Option<&Link> {
+ self.link.as_ref()
+ }
+
+ fn note(&self) -> Option<&Note> {
+ self.note.as_ref()
+ }
+}
+
+/// A key which represents a date.
+pub type Time = Any<validate::Time>;
+
+/// The `core.(filesRefLockTimeout|packedRefsTimeout)` keys, or any other lock timeout for that matter.
+pub type LockTimeout = Any<validate::LockTimeout>;
+
+/// Keys specifying durations in milliseconds.
+pub type DurationInMilliseconds = Any<validate::DurationInMilliseconds>;
+
+/// A key which represents any unsigned integer.
+pub type UnsignedInteger = Any<validate::UnsignedInteger>;
+
+/// A key that represents a remote name, either as url or symbolic name.
+pub type RemoteName = Any<validate::RemoteName>;
+
+/// A key that represents a boolean value.
+pub type Boolean = Any<validate::Boolean>;
+
+/// A key that represents an executable program, shell script or shell commands.
+pub type Program = Any<validate::Program>;
+
+/// A key that represents an executable program as identified by name or path.
+pub type Executable = Any<validate::Executable>;
+
+/// A key that represents a path (to a resource).
+pub type Path = Any<validate::Path>;
+
+/// A key that represents a URL.
+pub type Url = Any<validate::Url>;
+
+/// A key that represents a UTF-8 string.
+pub type String = Any<validate::String>;
+
+/// A key that represents a RefSpec for pushing.
+pub type PushRefSpec = Any<validate::PushRefSpec>;
+
+/// A key that represents a RefSpec for fetching.
+pub type FetchRefSpec = Any<validate::FetchRefSpec>;
+
+mod duration {
+ use std::time::Duration;
+
+ use crate::{
+ config,
+ config::tree::{keys::DurationInMilliseconds, Section},
+ };
+
+ impl DurationInMilliseconds {
+ /// Create a new instance.
+ pub const fn new_duration(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, super::validate::DurationInMilliseconds)
+ }
+
+ /// Return a valid duration as parsed from an integer that is interpreted as milliseconds.
+ pub fn try_into_duration(
+ &'static self,
+ value: Result<i64, gix_config::value::Error>,
+ ) -> Result<std::time::Duration, config::duration::Error> {
+ let value = value.map_err(|err| config::duration::Error::from(self).with_source(err))?;
+ Ok(match value {
+ val if val < 0 => Duration::from_secs(u64::MAX),
+ val => Duration::from_millis(val.try_into().expect("i64 to u64 always works if positive")),
+ })
+ }
+ }
+}
+
+mod lock_timeout {
+ use std::time::Duration;
+
+ use gix_lock::acquire::Fail;
+
+ use crate::{
+ config,
+ config::tree::{keys::LockTimeout, Section},
+ };
+
+ impl LockTimeout {
+ /// Create a new instance.
+ pub const fn new_lock_timeout(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, super::validate::LockTimeout)
+ }
+
+ /// Return information on how long to wait for locked files.
+ pub fn try_into_lock_timeout(
+ &'static self,
+ value: Result<i64, gix_config::value::Error>,
+ ) -> Result<gix_lock::acquire::Fail, config::lock_timeout::Error> {
+ let value = value.map_err(|err| config::lock_timeout::Error::from(self).with_source(err))?;
+ Ok(match value {
+ val if val < 0 => Fail::AfterDurationWithBackoff(Duration::from_secs(u64::MAX)),
+ val if val == 0 => Fail::Immediately,
+ val => Fail::AfterDurationWithBackoff(Duration::from_millis(
+ val.try_into().expect("i64 to u64 always works if positive"),
+ )),
+ })
+ }
+ }
+}
+
+mod refspecs {
+ use crate::config::tree::{
+ keys::{validate, FetchRefSpec, PushRefSpec},
+ Section,
+ };
+
+ impl PushRefSpec {
+ /// Create a new instance.
+ pub const fn new_push_refspec(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::PushRefSpec)
+ }
+ }
+
+ impl FetchRefSpec {
+ /// Create a new instance.
+ pub const fn new_fetch_refspec(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::FetchRefSpec)
+ }
+ }
+}
+
+mod url {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::BStr,
+ config,
+ config::tree::{
+ keys::{validate, Url},
+ Section,
+ },
+ };
+
+ impl Url {
+ /// Create a new instance.
+ pub const fn new_url(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::Url)
+ }
+
+ /// Try to parse `value` as URL.
+ pub fn try_into_url(&'static self, value: Cow<'_, BStr>) -> Result<gix_url::Url, config::url::Error> {
+ gix_url::parse(value.as_ref())
+ .map_err(|err| config::url::Error::from_value(self, value.into_owned()).with_source(err))
+ }
+ }
+}
+
+impl String {
+ /// Create a new instance.
+ pub const fn new_string(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::String)
+ }
+}
+
+impl Program {
+ /// Create a new instance.
+ pub const fn new_program(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::Program)
+ }
+}
+
+impl Executable {
+ /// Create a new instance.
+ pub const fn new_executable(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::Executable)
+ }
+}
+
+impl Path {
+ /// Create a new instance.
+ pub const fn new_path(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::Path)
+ }
+}
+
+mod workers {
+ use crate::config::tree::{keys::UnsignedInteger, Section};
+
+ impl UnsignedInteger {
+ /// Create a new instance.
+ pub const fn new_unsigned_integer(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, super::validate::UnsignedInteger)
+ }
+
+ /// Convert `value` into a `usize` or wrap it into a specialized error.
+ pub fn try_into_usize(
+ &'static self,
+ value: Result<i64, gix_config::value::Error>,
+ ) -> Result<usize, crate::config::unsigned_integer::Error> {
+ value
+ .map_err(|err| crate::config::unsigned_integer::Error::from(self).with_source(err))
+ .and_then(|value| {
+ value
+ .try_into()
+ .map_err(|_| crate::config::unsigned_integer::Error::from(self))
+ })
+ }
+
+ /// Convert `value` into a `u64` or wrap it into a specialized error.
+ pub fn try_into_u64(
+ &'static self,
+ value: Result<i64, gix_config::value::Error>,
+ ) -> Result<u64, crate::config::unsigned_integer::Error> {
+ value
+ .map_err(|err| crate::config::unsigned_integer::Error::from(self).with_source(err))
+ .and_then(|value| {
+ value
+ .try_into()
+ .map_err(|_| crate::config::unsigned_integer::Error::from(self))
+ })
+ }
+
+ /// Convert `value` into a `u32` or wrap it into a specialized error.
+ pub fn try_into_u32(
+ &'static self,
+ value: Result<i64, gix_config::value::Error>,
+ ) -> Result<u32, crate::config::unsigned_integer::Error> {
+ value
+ .map_err(|err| crate::config::unsigned_integer::Error::from(self).with_source(err))
+ .and_then(|value| {
+ value
+ .try_into()
+ .map_err(|_| crate::config::unsigned_integer::Error::from(self))
+ })
+ }
+ }
+}
+
+mod time {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config::tree::{
+ keys::{validate, Time},
+ Section,
+ },
+ };
+
+ impl Time {
+ /// Create a new instance.
+ pub const fn new_time(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::Time)
+ }
+
+ /// Convert the `value` into a date if possible, with `now` as reference time for relative dates.
+ pub fn try_into_time(
+ &self,
+ value: Cow<'_, BStr>,
+ now: Option<std::time::SystemTime>,
+ ) -> Result<gix_date::Time, gix_date::parse::Error> {
+ gix_date::parse(
+ value
+ .as_ref()
+ .to_str()
+ .map_err(|_| gix_date::parse::Error::InvalidDateString {
+ input: value.to_string(),
+ })?,
+ now,
+ )
+ }
+ }
+}
+
+mod boolean {
+ use crate::{
+ config,
+ config::tree::{
+ keys::{validate, Boolean},
+ Section,
+ },
+ };
+
+ impl Boolean {
+ /// Create a new instance.
+ pub const fn new_boolean(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::Boolean)
+ }
+
+ /// Process the `value` into a result with an improved error message.
+ ///
+ /// `value` is expected to be provided by [`gix_config::File::boolean()`].
+ pub fn enrich_error(
+ &'static self,
+ value: Result<bool, gix_config::value::Error>,
+ ) -> Result<bool, config::boolean::Error> {
+ value.map_err(|err| config::boolean::Error::from(self).with_source(err))
+ }
+ }
+}
+
+mod remote_name {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::{BStr, BString},
+ config,
+ config::tree::{keys::RemoteName, Section},
+ };
+
+ impl RemoteName {
+ /// Create a new instance.
+ pub const fn new_remote_name(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, super::validate::RemoteName)
+ }
+
+ /// Try to validate `name` as symbolic remote name and return it.
+ #[allow(clippy::result_large_err)]
+ pub fn try_into_symbolic_name(
+ &'static self,
+ name: Cow<'_, BStr>,
+ ) -> Result<BString, config::remote::symbolic_name::Error> {
+ crate::remote::name::validated(name.into_owned())
+ .map_err(|err| config::remote::symbolic_name::Error::from(self).with_source(err))
+ }
+ }
+}
+
+/// Provide a way to validate a value, or decode a value from `gix-config`.
+pub trait Validate {
+ /// Validate `value` or return an error.
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>>;
+}
+
+/// various implementations of the `Validate` trait.
+pub mod validate {
+ use std::{borrow::Cow, error::Error};
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config::tree::keys::Validate,
+ remote,
+ };
+
+ /// Everything is valid.
+ #[derive(Default)]
+ pub struct All;
+
+ impl Validate for All {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ Ok(())
+ }
+ }
+
+ /// Assure that values that parse as git dates are valid.
+ #[derive(Default)]
+ pub struct Time;
+
+ impl Validate for Time {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ gix_date::parse(value.to_str()?, std::time::SystemTime::now().into())?;
+ Ok(())
+ }
+ }
+
+ /// Assure that values that parse as unsigned integers are valid.
+ #[derive(Default)]
+ pub struct UnsignedInteger;
+
+ impl Validate for UnsignedInteger {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ usize::try_from(
+ gix_config::Integer::try_from(value)?
+ .to_decimal()
+ .ok_or_else(|| format!("integer {value} cannot be represented as `usize`"))?,
+ )?;
+ Ok(())
+ }
+ }
+
+ /// Assure that values that parse as git booleans are valid.
+ #[derive(Default)]
+ pub struct Boolean;
+
+ impl Validate for Boolean {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ gix_config::Boolean::try_from(value)?;
+ Ok(())
+ }
+ }
+
+ /// Values that are git remotes, symbolic or urls
+ #[derive(Default)]
+ pub struct RemoteName;
+ impl Validate for RemoteName {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ remote::Name::try_from(Cow::Borrowed(value))
+ .map_err(|_| format!("Illformed UTF-8 in remote name: \"{}\"", value.to_str_lossy()))?;
+ Ok(())
+ }
+ }
+
+ /// Values that are programs - everything is allowed.
+ #[derive(Default)]
+ pub struct Program;
+ impl Validate for Program {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ Ok(())
+ }
+ }
+
+ /// Values that are programs executables, everything is allowed.
+ #[derive(Default)]
+ pub struct Executable;
+ impl Validate for Executable {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ Ok(())
+ }
+ }
+
+ /// Values that parse as URLs.
+ #[derive(Default)]
+ pub struct Url;
+ impl Validate for Url {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ gix_url::parse(value)?;
+ Ok(())
+ }
+ }
+
+ /// Values that parse as ref-specs for pushing.
+ #[derive(Default)]
+ pub struct PushRefSpec;
+ impl Validate for PushRefSpec {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ gix_refspec::parse(value, gix_refspec::parse::Operation::Push)?;
+ Ok(())
+ }
+ }
+
+ /// Values that parse as ref-specs for pushing.
+ #[derive(Default)]
+ pub struct FetchRefSpec;
+ impl Validate for FetchRefSpec {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ gix_refspec::parse(value, gix_refspec::parse::Operation::Fetch)?;
+ Ok(())
+ }
+ }
+
+ /// Timeouts used for file locks.
+ pub struct LockTimeout;
+ impl Validate for LockTimeout {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ let value = gix_config::Integer::try_from(value)?
+ .to_decimal()
+ .ok_or_else(|| format!("integer {value} cannot be represented as integer"));
+ super::super::Core::FILES_REF_LOCK_TIMEOUT.try_into_lock_timeout(Ok(value?))?;
+ Ok(())
+ }
+ }
+
+ /// Durations in milliseconds.
+ pub struct DurationInMilliseconds;
+ impl Validate for DurationInMilliseconds {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ let value = gix_config::Integer::try_from(value)?
+ .to_decimal()
+ .ok_or_else(|| format!("integer {value} cannot be represented as integer"));
+ super::super::gitoxide::Http::CONNECT_TIMEOUT.try_into_duration(Ok(value?))?;
+ Ok(())
+ }
+ }
+
+ /// A UTF-8 string.
+ pub struct String;
+ impl Validate for String {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ value.to_str()?;
+ Ok(())
+ }
+ }
+
+ /// Any path - everything is allowed.
+ pub struct Path;
+ impl Validate for Path {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/mod.rs b/vendor/gix/src/config/tree/mod.rs
new file mode 100644
index 000000000..fd769f3ed
--- /dev/null
+++ b/vendor/gix/src/config/tree/mod.rs
@@ -0,0 +1,123 @@
+//! The tree of supported configuration values for use in [`config_overrides`][crate::open::Options::config_overrides()]
+//! or for validating and transforming well-known configuration values.
+//!
+//! It can also be used to traverse all implemented keys and to validate values before usage as configuration overrides.
+//!
+//! ### Leniency
+//!
+//! When validating values, we don't apply leniency here which is left to the caller. Leniency is an application defined configuration
+//! to ignore errors on non-security related values, which might make applications more resilient towards misconfiguration.
+pub(crate) mod root {
+ use super::sections;
+ use crate::config::tree::Section;
+
+ /// The root of the configuration tree, suitable to discover all sub-sections at runtime or compile time.
+ #[derive(Copy, Clone, Default)]
+ pub struct Tree;
+
+ impl Tree {
+ /// The `author` section.
+ pub const AUTHOR: sections::Author = sections::Author;
+ /// The `branch` section.
+ pub const BRANCH: sections::Branch = sections::Branch;
+ /// The `checkout` section.
+ pub const CHECKOUT: sections::Checkout = sections::Checkout;
+ /// The `clone` section.
+ pub const CLONE: sections::Clone = sections::Clone;
+ /// The `committer` section.
+ pub const COMMITTER: sections::Committer = sections::Committer;
+ /// The `core` section.
+ pub const CORE: sections::Core = sections::Core;
+ /// The `credential` section.
+ pub const CREDENTIAL: sections::Credential = sections::Credential;
+ /// The `diff` section.
+ pub const DIFF: sections::Diff = sections::Diff;
+ /// The `extensions` section.
+ pub const EXTENSIONS: sections::Extensions = sections::Extensions;
+ /// The `gitoxide` section.
+ pub const GITOXIDE: sections::Gitoxide = sections::Gitoxide;
+ /// The `http` section.
+ pub const HTTP: sections::Http = sections::Http;
+ /// The `init` section.
+ pub const INIT: sections::Init = sections::Init;
+ /// The `pack` section.
+ pub const PACK: sections::Pack = sections::Pack;
+ /// The `protocol` section.
+ pub const PROTOCOL: sections::Protocol = sections::Protocol;
+ /// The `remote` section.
+ pub const REMOTE: sections::Remote = sections::Remote;
+ /// The `safe` section.
+ pub const SAFE: sections::Safe = sections::Safe;
+ /// The `ssh` section.
+ pub const SSH: sections::Ssh = sections::Ssh;
+ /// The `user` section.
+ pub const USER: sections::User = sections::User;
+ /// The `url` section.
+ pub const URL: sections::Url = sections::Url;
+
+ /// List all available sections.
+ pub fn sections(&self) -> &[&dyn Section] {
+ &[
+ &Self::AUTHOR,
+ &Self::BRANCH,
+ &Self::CHECKOUT,
+ &Self::CLONE,
+ &Self::COMMITTER,
+ &Self::CORE,
+ &Self::CREDENTIAL,
+ &Self::DIFF,
+ &Self::EXTENSIONS,
+ &Self::GITOXIDE,
+ &Self::HTTP,
+ &Self::INIT,
+ &Self::PACK,
+ &Self::PROTOCOL,
+ &Self::REMOTE,
+ &Self::SAFE,
+ &Self::SSH,
+ &Self::USER,
+ &Self::URL,
+ ]
+ }
+ }
+}
+
+mod sections;
+pub use sections::{
+ branch, checkout, core, credential, diff, extensions, gitoxide, http, protocol, remote, ssh, Author, Branch,
+ Checkout, Clone, Committer, Core, Credential, Diff, Extensions, Gitoxide, Http, Init, Pack, Protocol, Remote, Safe,
+ Ssh, Url, User,
+};
+
+/// Generic value implementations for static instantiation.
+pub mod keys;
+
+///
+pub mod key {
+ ///
+ pub mod validate {
+ /// The error returned by [Key::validate()][crate::config::tree::Key::validate()].
+ #[derive(Debug, thiserror::Error)]
+ #[error(transparent)]
+ #[allow(missing_docs)]
+ pub struct Error {
+ #[from]
+ source: Box<dyn std::error::Error + Send + Sync + 'static>,
+ }
+ }
+ ///
+ pub mod validate_assignment {
+ /// The error returned by [Key::validated_assignment*()][crate::config::tree::Key::validated_assignment_fmt()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Failed to validate the value to be assigned to this key")]
+ Validate(#[from] super::validate::Error),
+ #[error("{message}")]
+ Name { message: String },
+ }
+ }
+}
+
+mod traits;
+pub use traits::{Key, Link, Note, Section, SubSectionRequirement};
diff --git a/vendor/gix/src/config/tree/sections/author.rs b/vendor/gix/src/config/tree/sections/author.rs
new file mode 100644
index 000000000..4101e3817
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/author.rs
@@ -0,0 +1,23 @@
+use crate::{
+ config,
+ config::tree::{gitoxide, keys, Author, Key, Section},
+};
+
+impl Author {
+ /// The `author.name` key.
+ pub const NAME: keys::Any =
+ keys::Any::new("name", &config::Tree::AUTHOR).with_fallback(&gitoxide::Author::NAME_FALLBACK);
+ /// The `author.email` key.
+ pub const EMAIL: keys::Any =
+ keys::Any::new("email", &config::Tree::AUTHOR).with_fallback(&gitoxide::Author::EMAIL_FALLBACK);
+}
+
+impl Section for Author {
+ fn name(&self) -> &str {
+ "author"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::NAME, &Self::EMAIL]
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/branch.rs b/vendor/gix/src/config/tree/sections/branch.rs
new file mode 100644
index 000000000..8e1e0a4b8
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/branch.rs
@@ -0,0 +1,65 @@
+use crate::config::tree::{keys, traits::SubSectionRequirement, Branch, Key, Section};
+
+const NAME_PARAMETER: Option<SubSectionRequirement> = Some(SubSectionRequirement::Parameter("name"));
+
+impl Branch {
+ /// The `branch.<name>.merge` key.
+ pub const MERGE: Merge = Merge::new_with_validate("merge", &crate::config::Tree::BRANCH, validate::FullNameRef)
+ .with_subsection_requirement(NAME_PARAMETER);
+ /// The `branch.<name>.pushRemote` key.
+ pub const PUSH_REMOTE: keys::RemoteName =
+ keys::RemoteName::new_remote_name("pushRemote", &crate::config::Tree::BRANCH)
+ .with_subsection_requirement(NAME_PARAMETER);
+ /// The `branch.<name>.remote` key.
+ pub const REMOTE: keys::RemoteName = keys::RemoteName::new_remote_name("remote", &crate::config::Tree::BRANCH)
+ .with_subsection_requirement(NAME_PARAMETER);
+}
+
+impl Section for Branch {
+ fn name(&self) -> &str {
+ "branch"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::MERGE, &Self::PUSH_REMOTE, &Self::REMOTE]
+ }
+}
+
+/// The `branch.<name>.merge` key.
+pub type Merge = keys::Any<validate::FullNameRef>;
+
+mod merge {
+ use std::borrow::Cow;
+
+ use gix_ref::FullNameRef;
+
+ use crate::{bstr::BStr, config::tree::branch::Merge};
+
+ impl Merge {
+ /// Return the validated full ref name from `value` if it is valid.
+ pub fn try_into_fullrefname(
+ value: Cow<'_, BStr>,
+ ) -> Result<Cow<'_, FullNameRef>, gix_validate::reference::name::Error> {
+ match value {
+ Cow::Borrowed(v) => v.try_into().map(Cow::Borrowed),
+ Cow::Owned(v) => v.try_into().map(Cow::Owned),
+ }
+ }
+ }
+}
+
+///
+pub mod validate {
+ use crate::{
+ bstr::BStr,
+ config::tree::{branch::Merge, keys},
+ };
+
+ pub struct FullNameRef;
+ impl keys::Validate for FullNameRef {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ Merge::try_into_fullrefname(value.into())?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/checkout.rs b/vendor/gix/src/config/tree/sections/checkout.rs
new file mode 100644
index 000000000..27f31ee84
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/checkout.rs
@@ -0,0 +1,58 @@
+use crate::{
+ config,
+ config::tree::{keys, Checkout, Key, Section},
+};
+
+impl Checkout {
+ /// The `checkout.workers` key.
+ pub const WORKERS: Workers = Workers::new_with_validate("workers", &config::Tree::CHECKOUT, validate::Workers)
+ .with_deviation("if unset, uses all cores instead of just one");
+}
+
+/// The `checkout.workers` key.
+pub type Workers = keys::Any<validate::Workers>;
+
+impl Section for Checkout {
+ fn name(&self) -> &str {
+ "checkout"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::WORKERS]
+ }
+}
+
+mod workers {
+ use crate::config::tree::checkout::Workers;
+
+ impl Workers {
+ /// Return the amount of threads to use for checkout, with `0` meaning all available ones, after decoding our integer value from `config`,
+ /// or `None` if the value isn't set which is typically interpreted as "as many threads as available"
+ pub fn try_from_workers(
+ &'static self,
+ value: Result<i64, gix_config::value::Error>,
+ ) -> Result<usize, crate::config::checkout::workers::Error> {
+ match value {
+ Ok(v) if v < 0 => Ok(0),
+ Ok(v) => Ok(v.try_into().expect("positive i64 can always be usize on 64 bit")),
+ Err(err) => Err(crate::config::key::Error::from(&super::Checkout::WORKERS).with_source(err)),
+ }
+ }
+ }
+}
+
+///
+pub mod validate {
+ use crate::{bstr::BStr, config::tree::keys};
+
+ pub struct Workers;
+ impl keys::Validate for Workers {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ super::Checkout::WORKERS.try_from_workers(gix_config::Integer::try_from(value).and_then(|i| {
+ i.to_decimal()
+ .ok_or_else(|| gix_config::value::Error::new("Integer overflow", value.to_owned()))
+ }))?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/clone.rs b/vendor/gix/src/config/tree/sections/clone.rs
new file mode 100644
index 000000000..616185a0b
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/clone.rs
@@ -0,0 +1,20 @@
+use crate::{
+ config,
+ config::tree::{keys, Clone, Key, Section},
+};
+
+impl Clone {
+ /// The `clone.defaultRemoteName` key.
+ pub const DEFAULT_REMOTE_NAME: keys::RemoteName =
+ keys::RemoteName::new_remote_name("defaultRemoteName", &config::Tree::CLONE);
+}
+
+impl Section for Clone {
+ fn name(&self) -> &str {
+ "clone"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::DEFAULT_REMOTE_NAME]
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/committer.rs b/vendor/gix/src/config/tree/sections/committer.rs
new file mode 100644
index 000000000..acc25c930
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/committer.rs
@@ -0,0 +1,23 @@
+use crate::{
+ config,
+ config::tree::{gitoxide, keys, Committer, Key, Section},
+};
+
+impl Committer {
+ /// The `committer.name` key.
+ pub const NAME: keys::Any =
+ keys::Any::new("name", &config::Tree::COMMITTER).with_fallback(&gitoxide::Committer::NAME_FALLBACK);
+ /// The `committer.email` key.
+ pub const EMAIL: keys::Any =
+ keys::Any::new("email", &config::Tree::COMMITTER).with_fallback(&gitoxide::Committer::EMAIL_FALLBACK);
+}
+
+impl Section for Committer {
+ fn name(&self) -> &str {
+ "committer"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::NAME, &Self::EMAIL]
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/core.rs b/vendor/gix/src/config/tree/sections/core.rs
new file mode 100644
index 000000000..6ea0580e1
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/core.rs
@@ -0,0 +1,302 @@
+use crate::{
+ config,
+ config::tree::{keys, Core, Key, Section},
+};
+
+impl Core {
+ /// The `core.abbrev` key.
+ pub const ABBREV: Abbrev = Abbrev::new_with_validate("abbrev", &config::Tree::CORE, validate::Abbrev);
+ /// The `core.bare` key.
+ pub const BARE: keys::Boolean = keys::Boolean::new_boolean("bare", &config::Tree::CORE);
+ /// The `core.checkStat` key.
+ pub const CHECK_STAT: CheckStat =
+ CheckStat::new_with_validate("checkStat", &config::Tree::CORE, validate::CheckStat);
+ /// The `core.deltaBaseCacheLimit` key.
+ pub const DELTA_BASE_CACHE_LIMIT: keys::UnsignedInteger =
+ keys::UnsignedInteger::new_unsigned_integer("deltaBaseCacheLimit", &config::Tree::CORE)
+ .with_environment_override("GITOXIDE_PACK_CACHE_MEMORY")
+ .with_note("if unset, we default to a small 64 slot fixed-size cache that holds at most 64 full delta base objects of any size. Set to 0 to deactivate it entirely");
+ /// The `core.disambiguate` key.
+ pub const DISAMBIGUATE: Disambiguate =
+ Disambiguate::new_with_validate("disambiguate", &config::Tree::CORE, validate::Disambiguate);
+ /// The `core.fileMode` key.
+ pub const FILE_MODE: keys::Boolean = keys::Boolean::new_boolean("fileMode", &config::Tree::CORE);
+ /// The `core.ignoreCase` key.
+ pub const IGNORE_CASE: keys::Boolean = keys::Boolean::new_boolean("ignoreCase", &config::Tree::CORE);
+ /// The `core.filesRefLockTimeout` key.
+ pub const FILES_REF_LOCK_TIMEOUT: keys::LockTimeout =
+ keys::LockTimeout::new_lock_timeout("filesRefLockTimeout", &config::Tree::CORE);
+ /// The `core.packedRefsTimeout` key.
+ pub const PACKED_REFS_TIMEOUT: keys::LockTimeout =
+ keys::LockTimeout::new_lock_timeout("packedRefsTimeout", &config::Tree::CORE);
+ /// The `core.multiPackIndex` key.
+ pub const MULTIPACK_INDEX: keys::Boolean = keys::Boolean::new_boolean("multiPackIndex", &config::Tree::CORE);
+ /// The `core.logAllRefUpdates` key.
+ pub const LOG_ALL_REF_UPDATES: LogAllRefUpdates =
+ LogAllRefUpdates::new_with_validate("logAllRefUpdates", &config::Tree::CORE, validate::LogAllRefUpdates);
+ /// The `core.precomposeUnicode` key.
+ ///
+ /// Needs application to use [env::args_os][crate::env::args_os()] to conform all input paths before they are used.
+ pub const PRECOMPOSE_UNICODE: keys::Boolean = keys::Boolean::new_boolean("precomposeUnicode", &config::Tree::CORE)
+ .with_note("application needs to conform all program input by using gix::env::args_os()");
+ /// The `core.repositoryFormatVersion` key.
+ pub const REPOSITORY_FORMAT_VERSION: keys::UnsignedInteger =
+ keys::UnsignedInteger::new_unsigned_integer("repositoryFormatVersion", &config::Tree::CORE);
+ /// The `core.symlinks` key.
+ pub const SYMLINKS: keys::Boolean = keys::Boolean::new_boolean("symlinks", &config::Tree::CORE);
+ /// The `core.trustCTime` key.
+ pub const TRUST_C_TIME: keys::Boolean = keys::Boolean::new_boolean("trustCTime", &config::Tree::CORE);
+ /// The `core.worktree` key.
+ pub const WORKTREE: keys::Any = keys::Any::new("worktree", &config::Tree::CORE)
+ .with_environment_override("GIT_WORK_TREE")
+ .with_deviation("Overriding the worktree with environment variables is supported using `ThreadSafeRepository::open_with_environment_overrides()");
+ /// The `core.askPass` key.
+ pub const ASKPASS: keys::Executable = keys::Executable::new_executable("askPass", &config::Tree::CORE)
+ .with_environment_override("GIT_ASKPASS")
+ .with_note("fallback is 'SSH_ASKPASS'");
+ /// The `core.excludesFile` key.
+ pub const EXCLUDES_FILE: keys::Executable = keys::Executable::new_executable("excludesFile", &config::Tree::CORE);
+ /// The `core.attributesFile` key.
+ pub const ATTRIBUTES_FILE: keys::Executable =
+ keys::Executable::new_executable("attributesFile", &config::Tree::CORE)
+ .with_deviation("for checkout - it's already queried but needs building of attributes group, and of course support during checkout");
+ /// The `core.sshCommand` key.
+ pub const SSH_COMMAND: keys::Executable = keys::Executable::new_executable("sshCommand", &config::Tree::CORE)
+ .with_environment_override("GIT_SSH_COMMAND");
+}
+
+impl Section for Core {
+ fn name(&self) -> &str {
+ "core"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[
+ &Self::ABBREV,
+ &Self::BARE,
+ &Self::CHECK_STAT,
+ &Self::DELTA_BASE_CACHE_LIMIT,
+ &Self::DISAMBIGUATE,
+ &Self::FILE_MODE,
+ &Self::IGNORE_CASE,
+ &Self::FILES_REF_LOCK_TIMEOUT,
+ &Self::PACKED_REFS_TIMEOUT,
+ &Self::MULTIPACK_INDEX,
+ &Self::LOG_ALL_REF_UPDATES,
+ &Self::PRECOMPOSE_UNICODE,
+ &Self::REPOSITORY_FORMAT_VERSION,
+ &Self::SYMLINKS,
+ &Self::TRUST_C_TIME,
+ &Self::WORKTREE,
+ &Self::ASKPASS,
+ &Self::EXCLUDES_FILE,
+ &Self::ATTRIBUTES_FILE,
+ &Self::SSH_COMMAND,
+ ]
+ }
+}
+
+/// The `core.checkStat` key.
+pub type CheckStat = keys::Any<validate::CheckStat>;
+
+/// The `core.abbrev` key.
+pub type Abbrev = keys::Any<validate::Abbrev>;
+
+/// The `core.logAllRefUpdates` key.
+pub type LogAllRefUpdates = keys::Any<validate::LogAllRefUpdates>;
+
+/// The `core.disambiguate` key.
+pub type Disambiguate = keys::Any<validate::Disambiguate>;
+
+mod disambiguate {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config,
+ config::tree::core::Disambiguate,
+ revision::spec::parse::ObjectKindHint,
+ };
+
+ impl Disambiguate {
+ /// Convert a disambiguation marker into the respective enum.
+ pub fn try_into_object_kind_hint(
+ &'static self,
+ value: Cow<'_, BStr>,
+ ) -> Result<Option<ObjectKindHint>, config::key::GenericErrorWithValue> {
+ let hint = match value.as_ref().as_bytes() {
+ b"none" => return Ok(None),
+ b"commit" => ObjectKindHint::Commit,
+ b"committish" => ObjectKindHint::Committish,
+ b"tree" => ObjectKindHint::Tree,
+ b"treeish" => ObjectKindHint::Treeish,
+ b"blob" => ObjectKindHint::Blob,
+ _ => return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())),
+ };
+ Ok(Some(hint))
+ }
+ }
+}
+
+mod log_all_ref_updates {
+ use std::borrow::Cow;
+
+ use crate::{bstr::BStr, config, config::tree::core::LogAllRefUpdates};
+
+ impl LogAllRefUpdates {
+ /// Returns the mode for ref-updates as parsed from `value`. If `value` is not a boolean, `string_on_failure` will be called
+ /// to obtain the key `core.logAllRefUpdates` as string instead. For correctness, this two step process is necessary as
+ /// the interpretation of booleans in special in `gix-config`, i.e. we can't just treat it as string.
+ pub fn try_into_ref_updates<'a>(
+ &'static self,
+ value: Option<Result<bool, gix_config::value::Error>>,
+ string_on_failure: impl FnOnce() -> Option<Cow<'a, BStr>>,
+ ) -> Result<Option<gix_ref::store::WriteReflog>, config::key::GenericErrorWithValue> {
+ match value.transpose().ok().flatten() {
+ Some(bool) => Ok(Some(if bool {
+ gix_ref::store::WriteReflog::Normal
+ } else {
+ gix_ref::store::WriteReflog::Disable
+ })),
+ None => match string_on_failure() {
+ Some(val) if val.eq_ignore_ascii_case(b"always") => Ok(Some(gix_ref::store::WriteReflog::Always)),
+ Some(val) => Err(config::key::GenericErrorWithValue::from_value(self, val.into_owned())),
+ None => Ok(None),
+ },
+ }
+ }
+ }
+}
+
+mod check_stat {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config,
+ config::tree::core::CheckStat,
+ };
+
+ impl CheckStat {
+ /// Returns true if the full set of stat entries should be checked, and it's just as lenient as git.
+ pub fn try_into_checkstat(
+ &'static self,
+ value: Cow<'_, BStr>,
+ ) -> Result<bool, config::key::GenericErrorWithValue> {
+ Ok(match value.as_ref().as_bytes() {
+ b"minimal" => false,
+ b"default" => true,
+ _ => {
+ return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned()));
+ }
+ })
+ }
+ }
+}
+
+mod abbrev {
+ use std::borrow::Cow;
+
+ use config::abbrev::Error;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config,
+ config::tree::core::Abbrev,
+ };
+
+ impl Abbrev {
+ /// Convert the given `hex_len_str` into the amount of characters that a short hash should have.
+ /// If `None` is returned, the correct value can be determined based on the amount of objects in the repo.
+ pub fn try_into_abbreviation(
+ &'static self,
+ hex_len_str: Cow<'_, BStr>,
+ object_hash: gix_hash::Kind,
+ ) -> Result<Option<usize>, Error> {
+ let max = object_hash.len_in_hex() as u8;
+ if hex_len_str.trim().is_empty() {
+ return Err(Error {
+ value: hex_len_str.into_owned(),
+ max,
+ });
+ }
+ if hex_len_str.trim().eq_ignore_ascii_case(b"auto") {
+ Ok(None)
+ } else {
+ let value_bytes = hex_len_str.as_ref();
+ if let Ok(false) = gix_config::Boolean::try_from(value_bytes).map(Into::into) {
+ Ok(object_hash.len_in_hex().into())
+ } else {
+ let value = gix_config::Integer::try_from(value_bytes)
+ .map_err(|_| Error {
+ value: hex_len_str.clone().into_owned(),
+ max,
+ })?
+ .to_decimal()
+ .ok_or_else(|| Error {
+ value: hex_len_str.clone().into_owned(),
+ max,
+ })?;
+ if value < 4 || value as usize > object_hash.len_in_hex() {
+ return Err(Error {
+ value: hex_len_str.clone().into_owned(),
+ max,
+ });
+ }
+ Ok(Some(value as usize))
+ }
+ }
+ }
+ }
+}
+
+mod validate {
+ use crate::{bstr::BStr, config::tree::keys};
+
+ pub struct LockTimeout;
+ impl keys::Validate for LockTimeout {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ let value = gix_config::Integer::try_from(value)?
+ .to_decimal()
+ .ok_or_else(|| format!("integer {value} cannot be represented as integer"));
+ super::Core::FILES_REF_LOCK_TIMEOUT.try_into_lock_timeout(Ok(value?))?;
+ Ok(())
+ }
+ }
+
+ pub struct Disambiguate;
+ impl keys::Validate for Disambiguate {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ super::Core::DISAMBIGUATE.try_into_object_kind_hint(value.into())?;
+ Ok(())
+ }
+ }
+
+ pub struct LogAllRefUpdates;
+ impl keys::Validate for LogAllRefUpdates {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ super::Core::LOG_ALL_REF_UPDATES
+ .try_into_ref_updates(Some(gix_config::Boolean::try_from(value).map(|b| b.0)), || {
+ Some(value.into())
+ })?;
+ Ok(())
+ }
+ }
+
+ pub struct CheckStat;
+ impl keys::Validate for CheckStat {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ super::Core::CHECK_STAT.try_into_checkstat(value.into())?;
+ Ok(())
+ }
+ }
+
+ pub struct Abbrev;
+ impl keys::Validate for Abbrev {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ // TODO: when there is options, validate against all hashes and assure all fail to trigger a validation failure.
+ super::Core::ABBREV.try_into_abbreviation(value.into(), gix_hash::Kind::Sha1)?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/credential.rs b/vendor/gix/src/config/tree/sections/credential.rs
new file mode 100644
index 000000000..d370db0c5
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/credential.rs
@@ -0,0 +1,56 @@
+use crate::{
+ config,
+ config::tree::{keys, Credential, Key, Section},
+};
+
+impl Credential {
+ /// The `credential.helper` key.
+ pub const HELPER: keys::Program = keys::Program::new_program("helper", &config::Tree::CREDENTIAL);
+ /// The `credential.username` key.
+ pub const USERNAME: keys::Any = keys::Any::new("username", &config::Tree::CREDENTIAL);
+ /// The `credential.useHttpPath` key.
+ pub const USE_HTTP_PATH: keys::Boolean = keys::Boolean::new_boolean("useHttpPath", &config::Tree::CREDENTIAL);
+
+ /// The `credential.<url>` subsection
+ pub const URL_PARAMETER: UrlParameter = UrlParameter;
+}
+
+/// The `credential.<url>` parameter section.
+pub struct UrlParameter;
+
+impl UrlParameter {
+ /// The `credential.<url>.helper` key.
+ pub const HELPER: keys::Program = keys::Program::new_program("helper", &Credential::URL_PARAMETER);
+ /// The `credential.<url>.username` key.
+ pub const USERNAME: keys::Any = keys::Any::new("username", &Credential::URL_PARAMETER);
+ /// The `credential.<url>.useHttpPath` key.
+ pub const USE_HTTP_PATH: keys::Boolean = keys::Boolean::new_boolean("useHttpPath", &Credential::URL_PARAMETER);
+}
+
+impl Section for UrlParameter {
+ fn name(&self) -> &str {
+ "<url>"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::HELPER, &Self::USERNAME, &Self::USE_HTTP_PATH]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&config::Tree::CREDENTIAL)
+ }
+}
+
+impl Section for Credential {
+ fn name(&self) -> &str {
+ "credential"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::HELPER, &Self::USERNAME, &Self::USE_HTTP_PATH]
+ }
+
+ fn sub_sections(&self) -> &[&dyn Section] {
+ &[&Self::URL_PARAMETER]
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/diff.rs b/vendor/gix/src/config/tree/sections/diff.rs
new file mode 100644
index 000000000..103bb7001
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/diff.rs
@@ -0,0 +1,133 @@
+use crate::{
+ config,
+ config::tree::{keys, Diff, Key, Section},
+};
+
+impl Diff {
+ /// The `diff.algorithm` key.
+ pub const ALGORITHM: Algorithm = Algorithm::new_with_validate("algorithm", &config::Tree::DIFF, validate::Algorithm)
+ .with_deviation("'patience' diff is not implemented and can default to 'histogram' if lenient config is used, and defaults to histogram if unset for fastest and best results");
+ /// The `diff.renameLimit` key.
+ pub const RENAME_LIMIT: keys::UnsignedInteger = keys::UnsignedInteger::new_unsigned_integer(
+ "renameLimit",
+ &config::Tree::DIFF,
+ )
+ .with_note(
+ "The limit is actually squared, so 1000 stands for up to 1 million diffs if fuzzy rename tracking is enabled",
+ );
+ /// The `diff.renames` key.
+ pub const RENAMES: Renames = Renames::new_renames("renames", &config::Tree::DIFF);
+}
+
+impl Section for Diff {
+ fn name(&self) -> &str {
+ "diff"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::ALGORITHM, &Self::RENAME_LIMIT, &Self::RENAMES]
+ }
+}
+
+/// The `diff.algorithm` key.
+pub type Algorithm = keys::Any<validate::Algorithm>;
+
+/// The `diff.renames` key.
+pub type Renames = keys::Any<validate::Renames>;
+
+mod algorithm {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::BStr,
+ config,
+ config::{diff::algorithm::Error, tree::sections::diff::Algorithm},
+ };
+
+ impl Algorithm {
+ /// Derive the diff algorithm identified by `name`, case-insensitively.
+ pub fn try_into_algorithm(&self, name: Cow<'_, BStr>) -> Result<gix_diff::blob::Algorithm, Error> {
+ let algo = if name.eq_ignore_ascii_case(b"myers") || name.eq_ignore_ascii_case(b"default") {
+ gix_diff::blob::Algorithm::Myers
+ } else if name.eq_ignore_ascii_case(b"minimal") {
+ gix_diff::blob::Algorithm::MyersMinimal
+ } else if name.eq_ignore_ascii_case(b"histogram") {
+ gix_diff::blob::Algorithm::Histogram
+ } else if name.eq_ignore_ascii_case(b"patience") {
+ return Err(config::diff::algorithm::Error::Unimplemented {
+ name: name.into_owned(),
+ });
+ } else {
+ return Err(Error::Unknown {
+ name: name.into_owned(),
+ });
+ };
+ Ok(algo)
+ }
+ }
+}
+
+mod renames {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config::{
+ key::GenericError,
+ tree::{keys, sections::diff::Renames, Section},
+ },
+ diff::rename::Tracking,
+ };
+
+ impl Renames {
+ /// Create a new instance.
+ pub const fn new_renames(name: &'static str, section: &'static dyn Section) -> Self {
+ keys::Any::new_with_validate(name, section, super::validate::Renames)
+ }
+ /// Try to convert the configuration into a valid rename tracking variant. Use `value` and if it's an error, call `value_string`
+ /// to try and interpret the key as string.
+ pub fn try_into_renames<'a>(
+ &'static self,
+ value: Result<bool, gix_config::value::Error>,
+ value_string: impl FnOnce() -> Option<Cow<'a, BStr>>,
+ ) -> Result<Tracking, GenericError> {
+ Ok(match value {
+ Ok(true) => Tracking::Renames,
+ Ok(false) => Tracking::Disabled,
+ Err(err) => {
+ let value = value_string().ok_or_else(|| GenericError::from(self))?;
+ match value.as_ref().as_bytes() {
+ b"copy" | b"copies" => Tracking::RenamesAndCopies,
+ _ => return Err(GenericError::from_value(self, value.into_owned()).with_source(err)),
+ }
+ }
+ })
+ }
+ }
+}
+
+mod validate {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::BStr,
+ config::tree::{keys, Diff},
+ };
+
+ pub struct Algorithm;
+ impl keys::Validate for Algorithm {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ Diff::ALGORITHM.try_into_algorithm(value.into())?;
+ Ok(())
+ }
+ }
+
+ pub struct Renames;
+ impl keys::Validate for Renames {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ let boolean = gix_config::Boolean::try_from(value).map(|b| b.0);
+ Diff::RENAMES.try_into_renames(boolean, || Some(Cow::Borrowed(value)))?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/extensions.rs b/vendor/gix/src/config/tree/sections/extensions.rs
new file mode 100644
index 000000000..77130f804
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/extensions.rs
@@ -0,0 +1,59 @@
+use crate::{
+ config,
+ config::tree::{keys, Extensions, Key, Section},
+};
+
+impl Extensions {
+ /// The `extensions.worktreeConfig` key.
+ pub const WORKTREE_CONFIG: keys::Boolean = keys::Boolean::new_boolean("worktreeConfig", &config::Tree::EXTENSIONS);
+ /// The `extensions.objectFormat` key.
+ pub const OBJECT_FORMAT: ObjectFormat =
+ ObjectFormat::new_with_validate("objectFormat", &config::Tree::EXTENSIONS, validate::ObjectFormat).with_note(
+ "Support for SHA256 is prepared but not fully implemented yet. For now we abort when encountered",
+ );
+}
+
+/// The `core.checkStat` key.
+pub type ObjectFormat = keys::Any<validate::ObjectFormat>;
+
+mod object_format {
+ use std::borrow::Cow;
+
+ use crate::{bstr::BStr, config, config::tree::sections::extensions::ObjectFormat};
+
+ impl ObjectFormat {
+ pub fn try_into_object_format(
+ &'static self,
+ value: Cow<'_, BStr>,
+ ) -> Result<gix_hash::Kind, config::key::GenericErrorWithValue> {
+ if value.as_ref().eq_ignore_ascii_case(b"sha1") {
+ Ok(gix_hash::Kind::Sha1)
+ } else {
+ Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned()))
+ }
+ }
+ }
+}
+
+impl Section for Extensions {
+ fn name(&self) -> &str {
+ "extensions"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::OBJECT_FORMAT, &Self::WORKTREE_CONFIG]
+ }
+}
+
+mod validate {
+ use crate::{bstr::BStr, config::tree::keys};
+
+ pub struct ObjectFormat;
+
+ impl keys::Validate for ObjectFormat {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ super::Extensions::OBJECT_FORMAT.try_into_object_format(value.into())?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/gitoxide.rs b/vendor/gix/src/config/tree/sections/gitoxide.rs
new file mode 100644
index 000000000..8c3defd0b
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/gitoxide.rs
@@ -0,0 +1,363 @@
+use crate::config::tree::{keys, Gitoxide, Key, Section};
+
+impl Gitoxide {
+ /// The `gitoxide.allow` section.
+ pub const ALLOW: Allow = Allow;
+ /// The `gitoxide.author` section.
+ pub const AUTHOR: Author = Author;
+ /// The `gitoxide.commit` section.
+ pub const COMMIT: Commit = Commit;
+ /// The `gitoxide.committer` section.
+ pub const COMMITTER: Committer = Committer;
+ /// The `gitoxide.http` section.
+ pub const HTTP: Http = Http;
+ /// The `gitoxide.https` section.
+ pub const HTTPS: Https = Https;
+ /// The `gitoxide.objects` section.
+ pub const OBJECTS: Objects = Objects;
+ /// The `gitoxide.ssh` section.
+ pub const SSH: Ssh = Ssh;
+ /// The `gitoxide.user` section.
+ pub const USER: User = User;
+
+ /// The `gitoxide.userAgent` Key.
+ pub const USER_AGENT: keys::Any = keys::Any::new("userAgent", &config::Tree::GITOXIDE).with_note(
+ "The user agent presented on the git protocol layer, serving as fallback for when no `http.userAgent` is set",
+ );
+}
+
+impl Section for Gitoxide {
+ fn name(&self) -> &str {
+ "gitoxide"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::USER_AGENT]
+ }
+
+ fn sub_sections(&self) -> &[&dyn Section] {
+ &[
+ &Self::ALLOW,
+ &Self::AUTHOR,
+ &Self::COMMIT,
+ &Self::COMMITTER,
+ &Self::HTTP,
+ &Self::HTTPS,
+ &Self::OBJECTS,
+ &Self::SSH,
+ &Self::USER,
+ ]
+ }
+}
+
+mod subsections {
+ use crate::config::{
+ tree::{http, keys, Gitoxide, Key, Section},
+ Tree,
+ };
+
+ /// The `Http` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Http;
+
+ impl Http {
+ /// The `gitoxide.http.proxy` key.
+ pub const PROXY: keys::String =
+ keys::String::new_string("proxy", &Gitoxide::HTTP).with_environment_override("http_proxy");
+ /// The `gitoxide.http.allProxy` key.
+ pub const ALL_PROXY: keys::String = keys::String::new_string("allProxy", &Gitoxide::HTTP)
+ .with_environment_override("all_proxy")
+ .with_note("fallback environment is `ALL_PROXY`");
+ /// The `gitoxide.http.verbose` key.
+ ///
+ /// If set, curl will be configured to log verbosely.
+ pub const VERBOSE: keys::Boolean = keys::Boolean::new_boolean("verbose", &Gitoxide::HTTP)
+ .with_environment_override("GIT_CURL_VERBOSE")
+ .with_deviation("we parse it as boolean for convenience (infallible) but git only checks the presence");
+ /// The `gitoxide.http.noProxy` key.
+ pub const NO_PROXY: keys::String = keys::String::new_string("noProxy", &Gitoxide::HTTP)
+ .with_environment_override("no_proxy")
+ .with_note("fallback environment is `NO_PROXY`");
+ /// The `gitoxide.http.connectTimeout` key.
+ pub const CONNECT_TIMEOUT: keys::DurationInMilliseconds =
+ keys::DurationInMilliseconds::new_duration("connectTimeout", &Gitoxide::HTTP).with_note(
+ "entirely new, and in milliseconds, to describe how long to wait until a connection attempt is aborted",
+ );
+ /// The `gitoxide.http.sslVersionMin` key.
+ pub const SSL_VERSION_MIN: http::SslVersion =
+ http::SslVersion::new_ssl_version("sslVersionMin", &Gitoxide::HTTP).with_note(
+ "entirely new to set the lower bound for the allowed ssl version range. Overwrites the min bound of `http.sslVersion` if set. Min and Max must be set to become effective.",
+ );
+ /// The `gitoxide.http.sslVersionMax` key.
+ pub const SSL_VERSION_MAX: http::SslVersion =
+ http::SslVersion::new_ssl_version("sslVersionMax", &Gitoxide::HTTP).with_note(
+ "entirely new to set the upper bound for the allowed ssl version range. Overwrites the max bound of `http.sslVersion` if set. Min and Max must be set to become effective.",
+ );
+ /// The `gitoxide.http.proxyAuthMethod` key.
+ pub const PROXY_AUTH_METHOD: http::ProxyAuthMethod =
+ http::ProxyAuthMethod::new_proxy_auth_method("proxyAuthMethod", &Gitoxide::HTTP)
+ .with_environment_override("GIT_HTTP_PROXY_AUTHMETHOD");
+ }
+
+ impl Section for Http {
+ fn name(&self) -> &str {
+ "http"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[
+ &Self::PROXY,
+ &Self::ALL_PROXY,
+ &Self::VERBOSE,
+ &Self::NO_PROXY,
+ &Self::CONNECT_TIMEOUT,
+ &Self::SSL_VERSION_MIN,
+ &Self::SSL_VERSION_MAX,
+ &Self::PROXY_AUTH_METHOD,
+ ]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `Https` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Https;
+
+ impl Https {
+ /// The `gitoxide.https.proxy` key.
+ pub const PROXY: keys::String = keys::String::new_string("proxy", &Gitoxide::HTTPS)
+ .with_environment_override("HTTPS_PROXY")
+ .with_note("fallback environment variable is `https_proxy`");
+ }
+
+ impl Section for Https {
+ fn name(&self) -> &str {
+ "https"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::PROXY]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `allow` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Allow;
+
+ /// The `gitoxide.allow.protocolFromUser` key.
+ pub type ProtocolFromUser = keys::Any<super::validate::ProtocolFromUser>;
+
+ impl Allow {
+ /// The `gitoxide.allow.protocolFromUser` key.
+ pub const PROTOCOL_FROM_USER: ProtocolFromUser = ProtocolFromUser::new_with_validate(
+ "protocolFromUser",
+ &Gitoxide::ALLOW,
+ super::validate::ProtocolFromUser,
+ )
+ .with_environment_override("GIT_PROTOCOL_FROM_USER");
+ }
+
+ impl Section for Allow {
+ fn name(&self) -> &str {
+ "allow"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::PROTOCOL_FROM_USER]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `author` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Author;
+
+ impl Author {
+ /// The `gitoxide.author.nameFallback` key.
+ pub const NAME_FALLBACK: keys::Any =
+ keys::Any::new("nameFallback", &Gitoxide::AUTHOR).with_environment_override("GIT_AUTHOR_NAME");
+ /// The `gitoxide.author.emailFallback` key.
+ pub const EMAIL_FALLBACK: keys::Any =
+ keys::Any::new("emailFallback", &Gitoxide::AUTHOR).with_environment_override("GIT_AUTHOR_EMAIL");
+ }
+
+ impl Section for Author {
+ fn name(&self) -> &str {
+ "author"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::NAME_FALLBACK, &Self::EMAIL_FALLBACK]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `user` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct User;
+
+ impl User {
+ /// The `gitoxide.user.emailFallback` key.
+ pub const EMAIL_FALLBACK: keys::Any =
+ keys::Any::new("emailFallback", &Gitoxide::USER).with_environment_override("EMAIL");
+ }
+
+ impl Section for User {
+ fn name(&self) -> &str {
+ "user"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::EMAIL_FALLBACK]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `ssh` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Ssh;
+
+ impl Ssh {
+ /// The `gitoxide.ssh.commandWithoutShellFallback` key.
+ pub const COMMAND_WITHOUT_SHELL_FALLBACK: keys::Executable =
+ keys::Executable::new_executable("commandWithoutShellFallback", &Gitoxide::SSH)
+ .with_environment_override("GIT_SSH")
+ .with_note("is always executed without shell and treated as fallback");
+ }
+
+ impl Section for Ssh {
+ fn name(&self) -> &str {
+ "ssh"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::COMMAND_WITHOUT_SHELL_FALLBACK]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `objects` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Objects;
+
+ impl Objects {
+ /// The `gitoxide.objects.cacheLimit` key.
+ pub const CACHE_LIMIT: keys::UnsignedInteger =
+ keys::UnsignedInteger::new_unsigned_integer("cacheLimit", &Gitoxide::OBJECTS)
+ .with_note("If unset or 0, there is no object cache")
+ .with_environment_override("GITOXIDE_OBJECT_CACHE_MEMORY");
+ /// The `gitoxide.objects.noReplace` key.
+ pub const NO_REPLACE: keys::Boolean = keys::Boolean::new_boolean("noReplace", &Gitoxide::OBJECTS)
+ .with_environment_override("GIT_NO_REPLACE_OBJECTS");
+ /// The `gitoxide.objects.replaceRefBase` key.
+ pub const REPLACE_REF_BASE: keys::Any =
+ keys::Any::new("replaceRefBase", &Gitoxide::OBJECTS).with_environment_override("GIT_REPLACE_REF_BASE");
+ }
+
+ impl Section for Objects {
+ fn name(&self) -> &str {
+ "objects"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::CACHE_LIMIT, &Self::NO_REPLACE, &Self::REPLACE_REF_BASE]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `committer` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Committer;
+
+ impl Committer {
+ /// The `gitoxide.committer.nameFallback` key.
+ pub const NAME_FALLBACK: keys::Any =
+ keys::Any::new("nameFallback", &Gitoxide::COMMITTER).with_environment_override("GIT_COMMITTER_NAME");
+ /// The `gitoxide.committer.emailFallback` key.
+ pub const EMAIL_FALLBACK: keys::Any =
+ keys::Any::new("emailFallback", &Gitoxide::COMMITTER).with_environment_override("GIT_COMMITTER_EMAIL");
+ }
+
+ impl Section for Committer {
+ fn name(&self) -> &str {
+ "committer"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::NAME_FALLBACK, &Self::EMAIL_FALLBACK]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `commit` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Commit;
+
+ impl Commit {
+ /// The `gitoxide.commit.authorDate` key.
+ pub const AUTHOR_DATE: keys::Time =
+ keys::Time::new_time("authorDate", &Gitoxide::COMMIT).with_environment_override("GIT_AUTHOR_DATE");
+ /// The `gitoxide.commit.committerDate` key.
+ pub const COMMITTER_DATE: keys::Time =
+ keys::Time::new_time("committerDate", &Gitoxide::COMMIT).with_environment_override("GIT_COMMITTER_DATE");
+ }
+
+ impl Section for Commit {
+ fn name(&self) -> &str {
+ "commit"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+}
+
+pub mod validate {
+ use std::error::Error;
+
+ use crate::{bstr::BStr, config::tree::keys::Validate};
+
+ pub struct ProtocolFromUser;
+ impl Validate for ProtocolFromUser {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ if value != "1" {
+ return Err("GIT_PROTOCOL_FROM_USER is either unset or as the value '1'".into());
+ }
+ Ok(())
+ }
+ }
+}
+
+pub use subsections::{Allow, Author, Commit, Committer, Http, Https, Objects, Ssh, User};
+
+use crate::config;
diff --git a/vendor/gix/src/config/tree/sections/http.rs b/vendor/gix/src/config/tree/sections/http.rs
new file mode 100644
index 000000000..f45c37076
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/http.rs
@@ -0,0 +1,317 @@
+use crate::{
+ config,
+ config::tree::{keys, Http, Key, Section},
+};
+
+impl Http {
+ /// The `http.sslVersion` key.
+ pub const SSL_VERSION: SslVersion = SslVersion::new_ssl_version("sslVersion", &config::Tree::HTTP)
+ .with_environment_override("GIT_SSL_VERSION")
+ .with_deviation(
+ "accepts the new 'default' value which means to use the curl default just like the empty string does",
+ );
+ /// The `http.proxy` key.
+ pub const PROXY: keys::String =
+ keys::String::new_string("proxy", &config::Tree::HTTP).with_deviation("fails on strings with illformed UTF-8");
+ /// The `http.proxyAuthMethod` key.
+ pub const PROXY_AUTH_METHOD: ProxyAuthMethod =
+ ProxyAuthMethod::new_proxy_auth_method("proxyAuthMethod", &config::Tree::HTTP)
+ .with_deviation("implemented like git, but never actually tried");
+ /// The `http.version` key.
+ pub const VERSION: Version = Version::new_with_validate("version", &config::Tree::HTTP, validate::Version)
+ .with_deviation("fails on illformed UTF-8");
+ /// The `http.userAgent` key.
+ pub const USER_AGENT: keys::String =
+ keys::String::new_string("userAgent", &config::Tree::HTTP).with_deviation("fails on illformed UTF-8");
+ /// The `http.extraHeader` key.
+ pub const EXTRA_HEADER: ExtraHeader =
+ ExtraHeader::new_with_validate("extraHeader", &config::Tree::HTTP, validate::ExtraHeader)
+ .with_deviation("fails on illformed UTF-8, without leniency");
+ /// The `http.followRedirects` key.
+ pub const FOLLOW_REDIRECTS: FollowRedirects =
+ FollowRedirects::new_with_validate("followRedirects", &config::Tree::HTTP, validate::FollowRedirects);
+ /// The `http.lowSpeedTime` key.
+ pub const LOW_SPEED_TIME: keys::UnsignedInteger =
+ keys::UnsignedInteger::new_unsigned_integer("lowSpeedTime", &config::Tree::HTTP)
+ .with_deviation("fails on negative values");
+ /// The `http.lowSpeedLimit` key.
+ pub const LOW_SPEED_LIMIT: keys::UnsignedInteger =
+ keys::UnsignedInteger::new_unsigned_integer("lowSpeedLimit", &config::Tree::HTTP)
+ .with_deviation("fails on negative values");
+ /// The `http.schannelUseSSLCAInfo` key.
+ pub const SCHANNEL_USE_SSL_CA_INFO: keys::Boolean =
+ keys::Boolean::new_boolean("schannelUseSSLCAInfo", &config::Tree::HTTP)
+ .with_deviation("only used as switch internally to turn off using the sslCAInfo, unconditionally. If unset, it has no effect, whereas in `git` it defaults to false.");
+ /// The `http.sslCAInfo` key.
+ pub const SSL_CA_INFO: keys::Path =
+ keys::Path::new_path("sslCAInfo", &config::Tree::HTTP).with_environment_override("GIT_SSL_CAINFO");
+ /// The `http.schannelCheckRevoke` key.
+ pub const SCHANNEL_CHECK_REVOKE: keys::Boolean =
+ keys::Boolean::new_boolean("schannelCheckRevoke", &config::Tree::HTTP);
+}
+
+impl Section for Http {
+ fn name(&self) -> &str {
+ "http"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[
+ &Self::SSL_VERSION,
+ &Self::PROXY,
+ &Self::PROXY_AUTH_METHOD,
+ &Self::VERSION,
+ &Self::USER_AGENT,
+ &Self::EXTRA_HEADER,
+ &Self::FOLLOW_REDIRECTS,
+ &Self::LOW_SPEED_TIME,
+ &Self::LOW_SPEED_LIMIT,
+ &Self::SCHANNEL_USE_SSL_CA_INFO,
+ &Self::SSL_CA_INFO,
+ &Self::SCHANNEL_CHECK_REVOKE,
+ ]
+ }
+}
+
+/// The `http.followRedirects` key.
+pub type FollowRedirects = keys::Any<validate::FollowRedirects>;
+
+/// The `http.extraHeader` key.
+pub type ExtraHeader = keys::Any<validate::ExtraHeader>;
+
+/// The `http.sslVersion` key, as well as others of the same type.
+pub type SslVersion = keys::Any<validate::SslVersion>;
+
+/// The `http.proxyAuthMethod` key, as well as others of the same type.
+pub type ProxyAuthMethod = keys::Any<validate::ProxyAuthMethod>;
+
+/// The `http.version` key.
+pub type Version = keys::Any<validate::Version>;
+
+mod key_impls {
+ use crate::config::tree::{
+ http::{ProxyAuthMethod, SslVersion},
+ keys, Section,
+ };
+
+ impl SslVersion {
+ pub const fn new_ssl_version(name: &'static str, section: &'static dyn Section) -> Self {
+ keys::Any::new_with_validate(name, section, super::validate::SslVersion)
+ }
+ }
+
+ impl ProxyAuthMethod {
+ pub const fn new_proxy_auth_method(name: &'static str, section: &'static dyn Section) -> Self {
+ keys::Any::new_with_validate(name, section, super::validate::ProxyAuthMethod)
+ }
+ }
+
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ impl crate::config::tree::http::FollowRedirects {
+ /// Convert `value` into the redirect specification, or query the same value as `boolean`
+ /// for additional possible input values.
+ ///
+ /// Note that `boolean` only queries the underlying key as boolean, which is a necessity to handle
+ /// empty booleans correctly, that is those without a value separator.
+ pub fn try_into_follow_redirects(
+ &'static self,
+ value: std::borrow::Cow<'_, crate::bstr::BStr>,
+ boolean: impl FnOnce() -> Result<Option<bool>, gix_config::value::Error>,
+ ) -> Result<
+ crate::protocol::transport::client::http::options::FollowRedirects,
+ crate::config::key::GenericErrorWithValue,
+ > {
+ use crate::{bstr::ByteSlice, protocol::transport::client::http::options::FollowRedirects};
+ Ok(if value.as_ref().as_bytes() == b"initial" {
+ FollowRedirects::Initial
+ } else if let Some(value) = boolean().map_err(|err| {
+ crate::config::key::GenericErrorWithValue::from_value(self, value.into_owned()).with_source(err)
+ })? {
+ if value {
+ FollowRedirects::All
+ } else {
+ FollowRedirects::None
+ }
+ } else {
+ FollowRedirects::Initial
+ })
+ }
+ }
+
+ impl super::ExtraHeader {
+ /// Convert a list of values into extra-headers, while failing entirely on illformed UTF-8.
+ pub fn try_into_extra_header(
+ &'static self,
+ values: Vec<std::borrow::Cow<'_, crate::bstr::BStr>>,
+ ) -> Result<Vec<String>, crate::config::string::Error> {
+ let mut out = Vec::with_capacity(values.len());
+ for value in values {
+ if value.is_empty() {
+ out.clear();
+ } else {
+ out.push(self.try_into_string(value)?);
+ }
+ }
+ Ok(out)
+ }
+ }
+
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ impl super::Version {
+ pub fn try_into_http_version(
+ &'static self,
+ value: std::borrow::Cow<'_, crate::bstr::BStr>,
+ ) -> Result<
+ gix_protocol::transport::client::http::options::HttpVersion,
+ crate::config::key::GenericErrorWithValue,
+ > {
+ use gix_protocol::transport::client::http::options::HttpVersion;
+
+ use crate::bstr::ByteSlice;
+ Ok(match value.as_ref().as_bytes() {
+ b"HTTP/1.1" => HttpVersion::V1_1,
+ b"HTTP/2" => HttpVersion::V2,
+ _ => {
+ return Err(crate::config::key::GenericErrorWithValue::from_value(
+ self,
+ value.into_owned(),
+ ))
+ }
+ })
+ }
+ }
+
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ impl ProxyAuthMethod {
+ pub fn try_into_proxy_auth_method(
+ &'static self,
+ value: std::borrow::Cow<'_, crate::bstr::BStr>,
+ ) -> Result<
+ gix_protocol::transport::client::http::options::ProxyAuthMethod,
+ crate::config::key::GenericErrorWithValue,
+ > {
+ use gix_protocol::transport::client::http::options::ProxyAuthMethod;
+
+ use crate::bstr::ByteSlice;
+ Ok(match value.as_ref().as_bytes() {
+ b"anyauth" => ProxyAuthMethod::AnyAuth,
+ b"basic" => ProxyAuthMethod::Basic,
+ b"digest" => ProxyAuthMethod::Digest,
+ b"negotiate" => ProxyAuthMethod::Negotiate,
+ b"ntlm" => ProxyAuthMethod::Ntlm,
+ _ => {
+ return Err(crate::config::key::GenericErrorWithValue::from_value(
+ self,
+ value.into_owned(),
+ ))
+ }
+ })
+ }
+ }
+
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ impl SslVersion {
+ pub fn try_into_ssl_version(
+ &'static self,
+ value: std::borrow::Cow<'_, crate::bstr::BStr>,
+ ) -> Result<gix_protocol::transport::client::http::options::SslVersion, crate::config::ssl_version::Error>
+ {
+ use gix_protocol::transport::client::http::options::SslVersion::*;
+
+ use crate::bstr::ByteSlice;
+ Ok(match value.as_ref().as_bytes() {
+ b"default" | b"" => Default,
+ b"tlsv1" => TlsV1,
+ b"sslv2" => SslV2,
+ b"sslv3" => SslV3,
+ b"tlsv1.0" => TlsV1_0,
+ b"tlsv1.1" => TlsV1_1,
+ b"tlsv1.2" => TlsV1_2,
+ b"tlsv1.3" => TlsV1_3,
+ _ => return Err(crate::config::ssl_version::Error::from_value(self, value.into_owned())),
+ })
+ }
+ }
+}
+
+pub mod validate {
+ use std::error::Error;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config::tree::keys::Validate,
+ };
+
+ pub struct SslVersion;
+ impl Validate for SslVersion {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ super::Http::SSL_VERSION.try_into_ssl_version(std::borrow::Cow::Borrowed(_value))?;
+
+ Ok(())
+ }
+ }
+
+ pub struct ProxyAuthMethod;
+ impl Validate for ProxyAuthMethod {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ super::Http::PROXY_AUTH_METHOD.try_into_proxy_auth_method(std::borrow::Cow::Borrowed(_value))?;
+
+ Ok(())
+ }
+ }
+
+ pub struct Version;
+ impl Validate for Version {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ super::Http::VERSION.try_into_http_version(std::borrow::Cow::Borrowed(_value))?;
+
+ Ok(())
+ }
+ }
+
+ pub struct ExtraHeader;
+ impl Validate for ExtraHeader {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ value.to_str()?;
+ Ok(())
+ }
+ }
+
+ pub struct FollowRedirects;
+ impl Validate for FollowRedirects {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ super::Http::FOLLOW_REDIRECTS.try_into_follow_redirects(std::borrow::Cow::Borrowed(_value), || {
+ gix_config::Boolean::try_from(_value).map(|b| Some(b.0))
+ })?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/init.rs b/vendor/gix/src/config/tree/sections/init.rs
new file mode 100644
index 000000000..de42d3b62
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/init.rs
@@ -0,0 +1,20 @@
+use crate::{
+ config,
+ config::tree::{keys, Init, Key, Section},
+};
+
+impl Init {
+ /// The `init.defaultBranch` key.
+ pub const DEFAULT_BRANCH: keys::Any = keys::Any::new("defaultBranch", &config::Tree::INIT)
+ .with_deviation("If not set, we use `main` instead of `master`");
+}
+
+impl Section for Init {
+ fn name(&self) -> &str {
+ "init"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::DEFAULT_BRANCH]
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/mod.rs b/vendor/gix/src/config/tree/sections/mod.rs
new file mode 100644
index 000000000..fb9b50786
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/mod.rs
@@ -0,0 +1,96 @@
+#![allow(missing_docs)]
+
+/// The `author` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Author;
+mod author;
+
+/// The `branch` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Branch;
+pub mod branch;
+
+/// The `checkout` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Checkout;
+pub mod checkout;
+
+/// The `clone` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Clone;
+mod clone;
+
+/// The `committer` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Committer;
+mod committer;
+
+/// The `core` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Core;
+pub mod core;
+
+/// The `credential` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Credential;
+pub mod credential;
+
+/// The `diff` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Diff;
+pub mod diff;
+
+/// The `extension` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Extensions;
+pub mod extensions;
+
+/// The `gitoxide` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Gitoxide;
+pub mod gitoxide;
+
+/// The `http` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Http;
+pub mod http;
+
+/// The `init` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Init;
+mod init;
+
+/// The `pack` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Pack;
+pub mod pack;
+
+/// The `protocol` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Protocol;
+pub mod protocol;
+
+/// The `remote` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Remote;
+pub mod remote;
+
+/// The `safe` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Safe;
+mod safe;
+
+/// The `ssh` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Ssh;
+pub mod ssh;
+
+/// The `user` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct User;
+mod user;
+
+/// The `url` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Url;
+mod url;
diff --git a/vendor/gix/src/config/tree/sections/pack.rs b/vendor/gix/src/config/tree/sections/pack.rs
new file mode 100644
index 000000000..941817e5b
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/pack.rs
@@ -0,0 +1,64 @@
+use crate::{
+ config,
+ config::tree::{keys, Key, Pack, Section},
+};
+
+impl Pack {
+ /// The `pack.threads` key.
+ pub const THREADS: keys::UnsignedInteger =
+ keys::UnsignedInteger::new_unsigned_integer("threads", &config::Tree::PACK)
+ .with_deviation("Leaving this key unspecified uses all available cores, instead of 1");
+
+ /// The `pack.indexVersion` key.
+ pub const INDEX_VERSION: IndexVersion =
+ IndexVersion::new_with_validate("indexVersion", &config::Tree::PACK, validate::IndexVersion);
+}
+
+/// The `pack.indexVersion` key.
+pub type IndexVersion = keys::Any<validate::IndexVersion>;
+
+mod index_version {
+ use crate::{config, config::tree::sections::pack::IndexVersion};
+
+ impl IndexVersion {
+ /// Try to interpret an integer value as index version.
+ pub fn try_into_index_version(
+ &'static self,
+ value: Result<i64, gix_config::value::Error>,
+ ) -> Result<gix_pack::index::Version, config::key::GenericError> {
+ let value = value.map_err(|err| config::key::GenericError::from(self).with_source(err))?;
+ Ok(match value {
+ 1 => gix_pack::index::Version::V1,
+ 2 => gix_pack::index::Version::V2,
+ _ => return Err(config::key::GenericError::from(self)),
+ })
+ }
+ }
+}
+
+impl Section for Pack {
+ fn name(&self) -> &str {
+ "pack"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::THREADS, &Self::INDEX_VERSION]
+ }
+}
+
+mod validate {
+ use crate::{bstr::BStr, config::tree::keys};
+
+ pub struct IndexVersion;
+ impl keys::Validate for IndexVersion {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ super::Pack::INDEX_VERSION.try_into_index_version(gix_config::Integer::try_from(value).and_then(
+ |int| {
+ int.to_decimal()
+ .ok_or_else(|| gix_config::value::Error::new("integer out of range", value))
+ },
+ ))?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/protocol.rs b/vendor/gix/src/config/tree/sections/protocol.rs
new file mode 100644
index 000000000..58e907b0f
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/protocol.rs
@@ -0,0 +1,85 @@
+use crate::{
+ config,
+ config::tree::{keys, Key, Protocol, Section},
+};
+
+impl Protocol {
+ /// The `protocol.allow` key.
+ pub const ALLOW: Allow = Allow::new_with_validate("allow", &config::Tree::PROTOCOL, validate::Allow);
+
+ /// The `protocol.<name>` subsection
+ pub const NAME_PARAMETER: NameParameter = NameParameter;
+}
+
+/// The `protocol.allow` key type.
+pub type Allow = keys::Any<validate::Allow>;
+
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+mod allow {
+ use std::borrow::Cow;
+
+ use crate::{bstr::BStr, config, config::tree::protocol::Allow, remote::url::scheme_permission};
+
+ impl Allow {
+ /// Convert `value` into its respective `Allow` variant, possibly informing about the `scheme` we are looking at in the error.
+ pub fn try_into_allow(
+ &'static self,
+ value: Cow<'_, BStr>,
+ scheme: Option<&str>,
+ ) -> Result<scheme_permission::Allow, config::protocol::allow::Error> {
+ scheme_permission::Allow::try_from(value).map_err(|value| config::protocol::allow::Error {
+ value,
+ scheme: scheme.map(ToOwned::to_owned),
+ })
+ }
+ }
+}
+
+/// The `protocol.<name>` parameter section.
+pub struct NameParameter;
+
+impl NameParameter {
+ /// The `credential.<url>.helper` key.
+ pub const ALLOW: Allow = Allow::new_with_validate("allow", &Protocol::NAME_PARAMETER, validate::Allow);
+}
+
+impl Section for NameParameter {
+ fn name(&self) -> &str {
+ "<name>"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::ALLOW]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&config::Tree::PROTOCOL)
+ }
+}
+
+impl Section for Protocol {
+ fn name(&self) -> &str {
+ "protocol"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::ALLOW]
+ }
+
+ fn sub_sections(&self) -> &[&dyn Section] {
+ &[&Self::NAME_PARAMETER]
+ }
+}
+
+mod validate {
+ use crate::{bstr::BStr, config::tree::keys};
+
+ pub struct Allow;
+ impl keys::Validate for Allow {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ super::Protocol::ALLOW.try_into_allow(std::borrow::Cow::Borrowed(_value), None)?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/remote.rs b/vendor/gix/src/config/tree/sections/remote.rs
new file mode 100644
index 000000000..b242c9c14
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/remote.rs
@@ -0,0 +1,101 @@
+use crate::{
+ config,
+ config::tree::{http, keys, Key, Remote, Section, SubSectionRequirement},
+};
+
+const NAME_PARAMETER: Option<SubSectionRequirement> = Some(SubSectionRequirement::Parameter("name"));
+
+impl Remote {
+ /// The `remote.pushDefault` key
+ pub const PUSH_DEFAULT: keys::RemoteName = keys::RemoteName::new_remote_name("pushDefault", &config::Tree::REMOTE);
+ /// The `remote.<name>.tagOpt` key
+ pub const TAG_OPT: TagOpt = TagOpt::new_with_validate("tagOpt", &config::Tree::REMOTE, validate::TagOpt)
+ .with_subsection_requirement(Some(SubSectionRequirement::Parameter("name")));
+ /// The `remote.<name>.url` key
+ pub const URL: keys::Url =
+ keys::Url::new_url("url", &config::Tree::REMOTE).with_subsection_requirement(NAME_PARAMETER);
+ /// The `remote.<name>.pushUrl` key
+ pub const PUSH_URL: keys::Url =
+ keys::Url::new_url("pushUrl", &config::Tree::REMOTE).with_subsection_requirement(NAME_PARAMETER);
+ /// The `remote.<name>.fetch` key
+ pub const FETCH: keys::FetchRefSpec = keys::FetchRefSpec::new_fetch_refspec("fetch", &config::Tree::REMOTE)
+ .with_subsection_requirement(NAME_PARAMETER);
+ /// The `remote.<name>.push` key
+ pub const PUSH: keys::PushRefSpec =
+ keys::PushRefSpec::new_push_refspec("push", &config::Tree::REMOTE).with_subsection_requirement(NAME_PARAMETER);
+ /// The `remote.<name>.proxy` key
+ pub const PROXY: keys::String =
+ keys::String::new_string("proxy", &config::Tree::REMOTE).with_subsection_requirement(NAME_PARAMETER);
+ /// The `remote.<name>.proxyAuthMethod` key.
+ pub const PROXY_AUTH_METHOD: http::ProxyAuthMethod =
+ http::ProxyAuthMethod::new_proxy_auth_method("proxyAuthMethod", &config::Tree::REMOTE)
+ .with_subsection_requirement(NAME_PARAMETER)
+ .with_deviation("implemented like git, but never actually tried");
+}
+
+impl Section for Remote {
+ fn name(&self) -> &str {
+ "remote"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[
+ &Self::PUSH_DEFAULT,
+ &Self::TAG_OPT,
+ &Self::URL,
+ &Self::PUSH_URL,
+ &Self::FETCH,
+ &Self::PUSH,
+ &Self::PROXY,
+ &Self::PROXY_AUTH_METHOD,
+ ]
+ }
+}
+
+/// The `remote.<name>.tagOpt` key type.
+pub type TagOpt = keys::Any<validate::TagOpt>;
+
+mod tag_opts {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config,
+ config::tree::remote::TagOpt,
+ remote,
+ };
+
+ impl TagOpt {
+ /// Try to interpret `value` as tag option.
+ ///
+ /// # Note
+ ///
+ /// It's heavily biased towards the git command-line unfortunately, and the only
+ /// value of its kind. Maybe in future more values will be supported which are less
+ /// about passing them to a sub-process.
+ pub fn try_into_tag_opt(
+ &'static self,
+ value: Cow<'_, BStr>,
+ ) -> Result<remote::fetch::Tags, config::key::GenericErrorWithValue> {
+ Ok(match value.as_ref().as_bytes() {
+ b"--tags" => remote::fetch::Tags::All,
+ b"--no-tags" => remote::fetch::Tags::None,
+ _ => return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())),
+ })
+ }
+ }
+}
+
+pub mod validate {
+ use std::{borrow::Cow, error::Error};
+
+ use crate::{bstr::BStr, config::tree::keys::Validate};
+
+ pub struct TagOpt;
+ impl Validate for TagOpt {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ super::Remote::TAG_OPT.try_into_tag_opt(Cow::Borrowed(value))?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/safe.rs b/vendor/gix/src/config/tree/sections/safe.rs
new file mode 100644
index 000000000..e76d28888
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/safe.rs
@@ -0,0 +1,27 @@
+use crate::{
+ config,
+ config::tree::{keys, Key, Safe, Section},
+};
+
+impl Safe {
+ /// The `safe.directory` key
+ pub const DIRECTORY: keys::Any = keys::Any::new("directory", &config::Tree::SAFE);
+}
+
+impl Safe {
+ /// Implements the directory filter to trust only global and system files, for use with `safe.directory`.
+ pub fn directory_filter(meta: &gix_config::file::Metadata) -> bool {
+ let kind = meta.source.kind();
+ kind == gix_config::source::Kind::System || kind == gix_config::source::Kind::Global
+ }
+}
+
+impl Section for Safe {
+ fn name(&self) -> &str {
+ "safe"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::DIRECTORY]
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/ssh.rs b/vendor/gix/src/config/tree/sections/ssh.rs
new file mode 100644
index 000000000..600ee663b
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/ssh.rs
@@ -0,0 +1,65 @@
+use crate::{
+ config,
+ config::tree::{keys, Key, Section, Ssh},
+};
+
+impl Ssh {
+ /// The `ssh.variant` key
+ pub const VARIANT: Variant = Variant::new_with_validate("variant", &config::Tree::SSH, validate::Variant)
+ .with_environment_override("GIT_SSH_VARIANT")
+ .with_deviation("We error if a variant is chosen that we don't know, as opposed to defaulting to 'ssh'");
+}
+
+/// The `ssh.variant` key.
+pub type Variant = keys::Any<validate::Variant>;
+
+#[cfg(feature = "blocking-network-client")]
+mod variant {
+ use std::borrow::Cow;
+
+ use crate::{bstr::BStr, config, config::tree::ssh::Variant};
+
+ impl Variant {
+ pub fn try_into_variant(
+ &'static self,
+ value: Cow<'_, BStr>,
+ ) -> Result<Option<gix_protocol::transport::client::ssh::ProgramKind>, config::key::GenericErrorWithValue>
+ {
+ use gix_protocol::transport::client::ssh::ProgramKind;
+
+ use crate::bstr::ByteSlice;
+ Ok(Some(match value.as_ref().as_bytes() {
+ b"auto" => return Ok(None),
+ b"ssh" => ProgramKind::Ssh,
+ b"plink" => ProgramKind::Plink,
+ b"putty" => ProgramKind::Putty,
+ b"tortoiseplink" => ProgramKind::TortoisePlink,
+ b"simple" => ProgramKind::Simple,
+ _ => return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())),
+ }))
+ }
+ }
+}
+
+impl Section for Ssh {
+ fn name(&self) -> &str {
+ "ssh"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::VARIANT]
+ }
+}
+
+mod validate {
+ use crate::{bstr::BStr, config::tree::keys};
+
+ pub struct Variant;
+ impl keys::Validate for Variant {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ #[cfg(feature = "blocking-network-client")]
+ super::Ssh::VARIANT.try_into_variant(_value.into())?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/url.rs b/vendor/gix/src/config/tree/sections/url.rs
new file mode 100644
index 000000000..6a9c0bfdb
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/url.rs
@@ -0,0 +1,25 @@
+use crate::{
+ config,
+ config::tree::{keys, Key, Section, SubSectionRequirement, Url},
+};
+
+const BASE_PARAMETER: Option<SubSectionRequirement> = Some(SubSectionRequirement::Parameter("base"));
+
+impl Url {
+ /// The `url.<base>.insteadOf` key
+ pub const INSTEAD_OF: keys::Any =
+ keys::Any::new("insteadOf", &config::Tree::URL).with_subsection_requirement(BASE_PARAMETER);
+ /// The `url.<base>.pushInsteadOf` key
+ pub const PUSH_INSTEAD_OF: keys::Any =
+ keys::Any::new("pushInsteadOf", &config::Tree::URL).with_subsection_requirement(BASE_PARAMETER);
+}
+
+impl Section for Url {
+ fn name(&self) -> &str {
+ "url"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::INSTEAD_OF, &Self::PUSH_INSTEAD_OF]
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/user.rs b/vendor/gix/src/config/tree/sections/user.rs
new file mode 100644
index 000000000..d1f4f7102
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/user.rs
@@ -0,0 +1,22 @@
+use crate::{
+ config,
+ config::tree::{gitoxide, keys, Key, Section, User},
+};
+
+impl User {
+ /// The `user.name` key
+ pub const NAME: keys::Any = keys::Any::new("name", &config::Tree::USER);
+ /// The `user.email` key
+ pub const EMAIL: keys::Any =
+ keys::Any::new("email", &config::Tree::USER).with_fallback(&gitoxide::User::EMAIL_FALLBACK);
+}
+
+impl Section for User {
+ fn name(&self) -> &str {
+ "user"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::NAME, &Self::EMAIL]
+ }
+}
diff --git a/vendor/gix/src/config/tree/traits.rs b/vendor/gix/src/config/tree/traits.rs
new file mode 100644
index 000000000..7cfd7aac4
--- /dev/null
+++ b/vendor/gix/src/config/tree/traits.rs
@@ -0,0 +1,199 @@
+use crate::{
+ bstr::{BStr, BString, ByteVec},
+ config::tree::key::validate_assignment,
+};
+
+/// Provide information about a configuration section.
+pub trait Section {
+ /// The section name, like `remote` in `remote.origin.url`.
+ fn name(&self) -> &str;
+ /// The keys directly underneath it for carrying configuration values.
+ fn keys(&self) -> &[&dyn Key];
+ /// The list of sub-section names, which may be empty if there are no statically known sub-sections.
+ fn sub_sections(&self) -> &[&dyn Section] {
+ &[]
+ }
+ /// The parent section if this is a statically known sub-section.
+ fn parent(&self) -> Option<&dyn Section> {
+ None
+ }
+}
+
+/// Determine how subsections may be used with a given key, suitable for obtaining the full name for use in assignments.
+#[derive(Debug, Copy, Clone)]
+pub enum SubSectionRequirement {
+ /// Subsections must not be used, this key can only be below a section.
+ Never,
+ /// The sub-section is used as parameter with the given name.
+ Parameter(&'static str),
+}
+
+/// A way to link a key with other resources.
+#[derive(Debug, Copy, Clone)]
+pub enum Link {
+ /// The environment variable of the given name will override the value of this key.
+ EnvironmentOverride(&'static str),
+ /// This config key is used as fallback if this key isn't set.
+ FallbackKey(&'static dyn Key),
+}
+
+/// A note attached to a key.
+#[derive(Debug, Copy, Clone)]
+pub enum Note {
+ /// A piece of information related to a key to help the user.
+ Informative(&'static str),
+ /// This key works differently than is described by git, explaining the deviation further.
+ Deviation(&'static str),
+}
+
+/// A leaf-level entry in the git configuration, like `url` in `remote.origin.url`.
+pub trait Key: std::fmt::Debug {
+ /// The key's name, like `url` in `remote.origin.url`.
+ fn name(&self) -> &str;
+ /// See if `value` is allowed as value of this key, or return a descriptive error if it is not.
+ fn validate(&self, value: &BStr) -> Result<(), crate::config::tree::key::validate::Error>;
+ /// The section containing this key. Git configuration has no free-standing keys, they are always underneath a section.
+ fn section(&self) -> &dyn Section;
+ /// The return value encodes three possible states to indicate subsection requirements
+ /// * `None` = subsections may or may not be used, the most flexible setting.
+ /// * `Some([Requirement][SubSectionRequirement])` = subsections must or must not be used, depending on the value
+ fn subsection_requirement(&self) -> Option<&SubSectionRequirement> {
+ Some(&SubSectionRequirement::Never)
+ }
+ /// Return the link to other resources, if available.
+ fn link(&self) -> Option<&Link> {
+ None
+ }
+ /// Return a note about this key, if available.
+ fn note(&self) -> Option<&Note> {
+ None
+ }
+
+ /// Return the name of an environment variable that would override this value (after following links until one is found).
+ fn environment_override(&self) -> Option<&str> {
+ let mut cursor = self.link()?;
+ loop {
+ match cursor {
+ Link::EnvironmentOverride(name) => return Some(name),
+ Link::FallbackKey(next) => {
+ cursor = next.link()?;
+ }
+ }
+ }
+ }
+
+ /// Return the environment override that must be set on this key.
+ /// # Panics
+ /// If no environment variable is set
+ fn the_environment_override(&self) -> &str {
+ self.environment_override()
+ .expect("BUG: environment override must be set")
+ }
+ /// Produce a name that describes how the name is composed. This is `core.bare` for statically known keys, or `branch.<name>.key`
+ /// for complex ones.
+ fn logical_name(&self) -> String {
+ let section = self.section();
+ let mut buf = String::new();
+ let parameter = if let Some(parent) = section.parent() {
+ buf.push_str(parent.name());
+ buf.push('.');
+ None
+ } else {
+ self.subsection_requirement().and_then(|requirement| match requirement {
+ SubSectionRequirement::Parameter(name) => Some(name),
+ SubSectionRequirement::Never => None,
+ })
+ };
+ buf.push_str(section.name());
+ buf.push('.');
+ if let Some(parameter) = parameter {
+ buf.push('<');
+ buf.push_str(parameter);
+ buf.push('>');
+ buf.push('.');
+ }
+ buf.push_str(self.name());
+ buf
+ }
+
+ /// The full name of the key for use in configuration overrides, like `core.bare`, or `remote.<subsection>.url` if `subsection` is
+ /// not `None`.
+ /// May fail if this key needs a subsection, or may not have a subsection.
+ fn full_name(&self, subsection: Option<&BStr>) -> Result<BString, String> {
+ let section = self.section();
+ let mut buf = BString::default();
+ let subsection = match self.subsection_requirement() {
+ None => subsection,
+ Some(requirement) => match (requirement, subsection) {
+ (SubSectionRequirement::Never, Some(_)) => {
+ return Err(format!(
+ "The key named '{}' cannot be used with non-static subsections.",
+ self.logical_name()
+ ));
+ }
+ (SubSectionRequirement::Parameter(_), None) => {
+ return Err(format!(
+ "The key named '{}' cannot be used without subsections.",
+ self.logical_name()
+ ))
+ }
+ _ => subsection,
+ },
+ };
+
+ if let Some(parent) = section.parent() {
+ buf.push_str(parent.name());
+ buf.push(b'.');
+ }
+ buf.push_str(section.name());
+ buf.push(b'.');
+ if let Some(subsection) = subsection {
+ debug_assert!(
+ section.parent().is_none(),
+ "BUG: sections with parameterized sub-sections must be top-level sections"
+ );
+ buf.push_str(subsection);
+ buf.push(b'.');
+ }
+ buf.push_str(self.name());
+ Ok(buf)
+ }
+
+ /// Return an assignment with the keys full name to `value`, suitable for [configuration overrides][crate::open::Options::config_overrides()].
+ /// Note that this will fail if the key requires a subsection name.
+ fn validated_assignment(&self, value: &BStr) -> Result<BString, validate_assignment::Error> {
+ self.validate(value)?;
+ let mut key = self
+ .full_name(None)
+ .map_err(|message| validate_assignment::Error::Name { message })?;
+ key.push(b'=');
+ key.push_str(value);
+ Ok(key)
+ }
+
+ /// Return an assignment with the keys full name to `value`, suitable for [configuration overrides][crate::open::Options::config_overrides()].
+ /// Note that this will fail if the key requires a subsection name.
+ fn validated_assignment_fmt(
+ &self,
+ value: &dyn std::fmt::Display,
+ ) -> Result<BString, crate::config::tree::key::validate_assignment::Error> {
+ let value = value.to_string();
+ self.validated_assignment(value.as_str().into())
+ }
+
+ /// Return an assignment to `value` with the keys full name within `subsection`, suitable for [configuration overrides][crate::open::Options::config_overrides()].
+ /// Note that this is only valid if this key supports parameterized sub-sections, or else an error is returned.
+ fn validated_assignment_with_subsection(
+ &self,
+ value: &BStr,
+ subsection: &BStr,
+ ) -> Result<BString, crate::config::tree::key::validate_assignment::Error> {
+ self.validate(value)?;
+ let mut key = self
+ .full_name(Some(subsection))
+ .map_err(|message| validate_assignment::Error::Name { message })?;
+ key.push(b'=');
+ key.push_str(value);
+ Ok(key)
+ }
+}
diff --git a/vendor/gix/src/create.rs b/vendor/gix/src/create.rs
new file mode 100644
index 000000000..96d047e3b
--- /dev/null
+++ b/vendor/gix/src/create.rs
@@ -0,0 +1,251 @@
+use std::{
+ convert::TryFrom,
+ fs::{self, OpenOptions},
+ io::Write,
+ path::{Path, PathBuf},
+};
+
+use gix_config::parse::section;
+use gix_discover::DOT_GIT_DIR;
+
+/// The error used in [`into()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("Could not obtain the current directory")]
+ CurrentDir(#[from] std::io::Error),
+ #[error("Could not open data at '{}'", .path.display())]
+ IoOpen { source: std::io::Error, path: PathBuf },
+ #[error("Could not write data at '{}'", .path.display())]
+ IoWrite { source: std::io::Error, path: PathBuf },
+ #[error("Refusing to initialize the existing '{}' directory", .path.display())]
+ DirectoryExists { path: PathBuf },
+ #[error("Refusing to initialize the non-empty directory as '{}'", .path.display())]
+ DirectoryNotEmpty { path: PathBuf },
+ #[error("Could not create directory at '{}'", .path.display())]
+ CreateDirectory { source: std::io::Error, path: PathBuf },
+}
+
+/// The kind of repository to create.
+#[derive(Debug, Copy, Clone)]
+pub enum Kind {
+ /// An empty repository with a `.git` folder, setup to contain files in its worktree.
+ WithWorktree,
+ /// A bare repository without a worktree.
+ Bare,
+}
+
+const TPL_INFO_EXCLUDE: &[u8] = include_bytes!("assets/baseline-init/info/exclude");
+const TPL_HOOKS_APPLYPATCH_MSG: &[u8] = include_bytes!("assets/baseline-init/hooks/applypatch-msg.sample");
+const TPL_HOOKS_COMMIT_MSG: &[u8] = include_bytes!("assets/baseline-init/hooks/commit-msg.sample");
+const TPL_HOOKS_FSMONITOR_WATCHMAN: &[u8] = include_bytes!("assets/baseline-init/hooks/fsmonitor-watchman.sample");
+const TPL_HOOKS_POST_UPDATE: &[u8] = include_bytes!("assets/baseline-init/hooks/post-update.sample");
+const TPL_HOOKS_PRE_APPLYPATCH: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-applypatch.sample");
+const TPL_HOOKS_PRE_COMMIT: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-commit.sample");
+const TPL_HOOKS_PRE_MERGE_COMMIT: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-merge-commit.sample");
+const TPL_HOOKS_PRE_PUSH: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-push.sample");
+const TPL_HOOKS_PRE_REBASE: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-rebase.sample");
+const TPL_HOOKS_PRE_RECEIVE: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-receive.sample");
+const TPL_HOOKS_PREPARE_COMMIT_MSG: &[u8] = include_bytes!("assets/baseline-init/hooks/prepare-commit-msg.sample");
+const TPL_HOOKS_UPDATE: &[u8] = include_bytes!("assets/baseline-init/hooks/update.sample");
+const TPL_DESCRIPTION: &[u8] = include_bytes!("assets/baseline-init/description");
+const TPL_HEAD: &[u8] = include_bytes!("assets/baseline-init/HEAD");
+
+struct PathCursor<'a>(&'a mut PathBuf);
+
+struct NewDir<'a>(&'a mut PathBuf);
+
+impl<'a> PathCursor<'a> {
+ fn at(&mut self, component: &str) -> &Path {
+ self.0.push(component);
+ self.0.as_path()
+ }
+}
+
+impl<'a> NewDir<'a> {
+ fn at(self, component: &str) -> Result<Self, Error> {
+ self.0.push(component);
+ create_dir(self.0)?;
+ Ok(self)
+ }
+ fn as_mut(&mut self) -> &mut PathBuf {
+ self.0
+ }
+}
+
+impl<'a> Drop for NewDir<'a> {
+ fn drop(&mut self) {
+ self.0.pop();
+ }
+}
+
+impl<'a> Drop for PathCursor<'a> {
+ fn drop(&mut self) {
+ self.0.pop();
+ }
+}
+
+fn write_file(data: &[u8], path: &Path) -> Result<(), Error> {
+ let mut file = OpenOptions::new()
+ .write(true)
+ .create(true)
+ .append(false)
+ .open(path)
+ .map_err(|e| Error::IoOpen {
+ source: e,
+ path: path.to_owned(),
+ })?;
+ file.write_all(data).map_err(|e| Error::IoWrite {
+ source: e,
+ path: path.to_owned(),
+ })
+}
+
+fn create_dir(p: &Path) -> Result<(), Error> {
+ fs::create_dir_all(p).map_err(|e| Error::CreateDirectory {
+ source: e,
+ path: p.to_owned(),
+ })
+}
+
+/// Options for use in [`into()`];
+#[derive(Copy, Clone, Default)]
+pub struct Options {
+ /// If true, and the kind of repository to create has a worktree, then the destination directory must be empty.
+ ///
+ /// By default repos with worktree can be initialized into a non-empty repository as long as there is no `.git` directory.
+ pub destination_must_be_empty: bool,
+ /// If set, use these filesystem capabilities to populate the respective gix-config fields.
+ /// If `None`, the directory will be probed.
+ pub fs_capabilities: Option<gix_worktree::fs::Capabilities>,
+}
+
+/// Create a new `.git` repository of `kind` within the possibly non-existing `directory`
+/// and return its path.
+/// Note that this is a simple template-based initialization routine which should be accompanied with additional corrections
+/// to respect git configuration, which is accomplished by [its callers][crate::ThreadSafeRepository::init_opts()]
+/// that return a [Repository][crate::Repository].
+pub fn into(
+ directory: impl Into<PathBuf>,
+ kind: Kind,
+ Options {
+ fs_capabilities,
+ destination_must_be_empty,
+ }: Options,
+) -> Result<gix_discover::repository::Path, Error> {
+ let mut dot_git = directory.into();
+ let bare = matches!(kind, Kind::Bare);
+
+ if bare || destination_must_be_empty {
+ let num_entries_in_dot_git = fs::read_dir(&dot_git)
+ .or_else(|err| {
+ if err.kind() == std::io::ErrorKind::NotFound {
+ fs::create_dir(&dot_git).and_then(|_| fs::read_dir(&dot_git))
+ } else {
+ Err(err)
+ }
+ })
+ .map_err(|err| Error::IoOpen {
+ source: err,
+ path: dot_git.clone(),
+ })?
+ .count();
+ if num_entries_in_dot_git != 0 {
+ return Err(Error::DirectoryNotEmpty { path: dot_git });
+ }
+ }
+
+ if !bare {
+ dot_git.push(DOT_GIT_DIR);
+
+ if dot_git.is_dir() {
+ return Err(Error::DirectoryExists { path: dot_git });
+ }
+ };
+ create_dir(&dot_git)?;
+
+ {
+ let mut cursor = NewDir(&mut dot_git).at("info")?;
+ write_file(TPL_INFO_EXCLUDE, PathCursor(cursor.as_mut()).at("exclude"))?;
+ }
+
+ {
+ let mut cursor = NewDir(&mut dot_git).at("hooks")?;
+ for (tpl, filename) in &[
+ (TPL_HOOKS_UPDATE, "update.sample"),
+ (TPL_HOOKS_PREPARE_COMMIT_MSG, "prepare-commit-msg.sample"),
+ (TPL_HOOKS_PRE_RECEIVE, "pre-receive.sample"),
+ (TPL_HOOKS_PRE_REBASE, "pre-rebase.sample"),
+ (TPL_HOOKS_PRE_PUSH, "pre-push.sample"),
+ (TPL_HOOKS_PRE_COMMIT, "pre-commit.sample"),
+ (TPL_HOOKS_PRE_MERGE_COMMIT, "pre-merge-commit.sample"),
+ (TPL_HOOKS_PRE_APPLYPATCH, "pre-applypatch.sample"),
+ (TPL_HOOKS_POST_UPDATE, "post-update.sample"),
+ (TPL_HOOKS_FSMONITOR_WATCHMAN, "fsmonitor-watchman.sample"),
+ (TPL_HOOKS_COMMIT_MSG, "commit-msg.sample"),
+ (TPL_HOOKS_APPLYPATCH_MSG, "applypatch-msg.sample"),
+ ] {
+ write_file(tpl, PathCursor(cursor.as_mut()).at(filename))?;
+ }
+ }
+
+ {
+ let mut cursor = NewDir(&mut dot_git).at("objects")?;
+ create_dir(PathCursor(cursor.as_mut()).at("info"))?;
+ create_dir(PathCursor(cursor.as_mut()).at("pack"))?;
+ }
+
+ {
+ let mut cursor = NewDir(&mut dot_git).at("refs")?;
+ create_dir(PathCursor(cursor.as_mut()).at("heads"))?;
+ create_dir(PathCursor(cursor.as_mut()).at("tags"))?;
+ }
+
+ for (tpl, filename) in &[(TPL_HEAD, "HEAD"), (TPL_DESCRIPTION, "description")] {
+ write_file(tpl, PathCursor(&mut dot_git).at(filename))?;
+ }
+
+ {
+ let mut config = gix_config::File::default();
+ {
+ let caps = fs_capabilities.unwrap_or_else(|| gix_worktree::fs::Capabilities::probe(&dot_git));
+ let mut core = config.new_section("core", None).expect("valid section name");
+
+ core.push(key("repositoryformatversion"), Some("0".into()));
+ core.push(key("filemode"), Some(bool(caps.executable_bit).into()));
+ core.push(key("bare"), Some(bool(bare).into()));
+ core.push(key("logallrefupdates"), Some(bool(!bare).into()));
+ core.push(key("symlinks"), Some(bool(caps.symlink).into()));
+ core.push(key("ignorecase"), Some(bool(caps.ignore_case).into()));
+ core.push(key("precomposeunicode"), Some(bool(caps.precompose_unicode).into()));
+ }
+ let mut cursor = PathCursor(&mut dot_git);
+ let config_path = cursor.at("config");
+ std::fs::write(config_path, config.to_bstring()).map_err(|err| Error::IoWrite {
+ source: err,
+ path: config_path.to_owned(),
+ })?;
+ }
+
+ Ok(gix_discover::repository::Path::from_dot_git_dir(
+ dot_git,
+ if bare {
+ gix_discover::repository::Kind::Bare
+ } else {
+ gix_discover::repository::Kind::WorkTree { linked_git_dir: None }
+ },
+ std::env::current_dir()?,
+ )
+ .expect("by now the `dot_git` dir is valid as we have accessed it"))
+}
+
+fn key(name: &'static str) -> section::Key<'static> {
+ section::Key::try_from(name).expect("valid key name")
+}
+
+fn bool(v: bool) -> &'static str {
+ match v {
+ true => "true",
+ false => "false",
+ }
+}
diff --git a/vendor/gix/src/discover.rs b/vendor/gix/src/discover.rs
new file mode 100644
index 000000000..fa0edfd5f
--- /dev/null
+++ b/vendor/gix/src/discover.rs
@@ -0,0 +1,88 @@
+#![allow(clippy::result_large_err)]
+use std::path::Path;
+
+pub use gix_discover::*;
+
+use crate::{bstr::BString, ThreadSafeRepository};
+
+/// The error returned by [`crate::discover()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error(transparent)]
+ Discover(#[from] upwards::Error),
+ #[error(transparent)]
+ Open(#[from] crate::open::Error),
+}
+
+impl ThreadSafeRepository {
+ /// Try to open a git repository in `directory` and search upwards through its parents until one is found,
+ /// using default trust options which matters in case the found repository isn't owned by the current user.
+ pub fn discover(directory: impl AsRef<Path>) -> Result<Self, Error> {
+ Self::discover_opts(directory, Default::default(), Default::default())
+ }
+
+ /// Try to open a git repository in `directory` and search upwards through its parents until one is found,
+ /// while applying `options`. Then use the `trust_map` to determine which of our own repository options to use
+ /// for instantiations.
+ ///
+ /// Note that [trust overrides](crate::open::Options::with()) in the `trust_map` are not effective here and we will
+ /// always override it with the determined trust value. This is a precaution as the API user is unable to actually know
+ /// if the directory that is discovered can indeed be trusted (or else they'd have to implement the discovery themselves
+ /// and be sure that no attacker ever gets access to a directory structure. The cost of this is a permission check, which
+ /// seems acceptable).
+ pub fn discover_opts(
+ directory: impl AsRef<Path>,
+ options: upwards::Options<'_>,
+ trust_map: gix_sec::trust::Mapping<crate::open::Options>,
+ ) -> Result<Self, Error> {
+ let (path, trust) = upwards_opts(directory, options)?;
+ let (git_dir, worktree_dir) = path.into_repository_and_work_tree_directories();
+ let mut options = trust_map.into_value_by_level(trust);
+ options.git_dir_trust = trust.into();
+ options.current_dir = Some(std::env::current_dir().map_err(upwards::Error::CurrentDir)?);
+ Self::open_from_paths(git_dir, worktree_dir, options).map_err(Into::into)
+ }
+
+ /// Try to open a git repository directly from the environment.
+ /// If that fails, discover upwards from `directory` until one is found,
+ /// while applying discovery options from the environment.
+ pub fn discover_with_environment_overrides(directory: impl AsRef<Path>) -> Result<Self, Error> {
+ Self::discover_with_environment_overrides_opts(directory, Default::default(), Default::default())
+ }
+
+ /// Try to open a git repository directly from the environment, which reads `GIT_DIR`
+ /// if it is set. If unset, discover upwards from `directory` until one is found,
+ /// while applying `options` with overrides from the environment which includes:
+ ///
+ /// - `GIT_DISCOVERY_ACROSS_FILESYSTEM`
+ /// - `GIT_CEILING_DIRECTORIES`
+ ///
+ /// Finally, use the `trust_map` to determine which of our own repository options to use
+ /// based on the trust level of the effective repository directory.
+ pub fn discover_with_environment_overrides_opts(
+ directory: impl AsRef<Path>,
+ mut options: upwards::Options<'_>,
+ trust_map: gix_sec::trust::Mapping<crate::open::Options>,
+ ) -> Result<Self, Error> {
+ fn apply_additional_environment(mut opts: upwards::Options<'_>) -> upwards::Options<'_> {
+ use crate::bstr::ByteVec;
+
+ if let Some(cross_fs) = std::env::var_os("GIT_DISCOVERY_ACROSS_FILESYSTEM")
+ .and_then(|v| Vec::from_os_string(v).ok().map(BString::from))
+ {
+ if let Ok(b) = gix_config::Boolean::try_from(cross_fs.as_ref()) {
+ opts.cross_fs = b.into();
+ }
+ }
+ opts
+ }
+
+ if std::env::var_os("GIT_DIR").is_some() {
+ return Self::open_with_environment_overrides(directory.as_ref(), trust_map).map_err(Error::Open);
+ }
+
+ options = apply_additional_environment(options.apply_environment());
+ Self::discover_opts(directory, options, trust_map)
+ }
+}
diff --git a/vendor/gix/src/env.rs b/vendor/gix/src/env.rs
new file mode 100644
index 000000000..4c61ceb4e
--- /dev/null
+++ b/vendor/gix/src/env.rs
@@ -0,0 +1,129 @@
+//! Utilities to handle program arguments and other values of interest.
+use std::ffi::{OsStr, OsString};
+
+use crate::bstr::{BString, ByteVec};
+
+/// Returns the name of the agent for identification towards a remote server as statically known when compiling the crate.
+/// Suitable for both `git` servers and HTTP servers, and used unless configured otherwise.
+///
+/// Note that it's meant to be used in conjunction with [`protocol::agent()`][crate::protocol::agent()] which
+/// prepends `git/`.
+pub fn agent() -> &'static str {
+ concat!("oxide-", env!("CARGO_PKG_VERSION"))
+}
+
+/// Equivalent to `std::env::args_os()`, but with precomposed unicode on MacOS and other apple platforms.
+#[cfg(not(target_vendor = "apple"))]
+pub fn args_os() -> impl Iterator<Item = OsString> {
+ std::env::args_os()
+}
+
+/// Equivalent to `std::env::args_os()`, but with precomposed unicode on MacOS and other apple platforms.
+///
+/// Note that this ignores `core.precomposeUnicode` as gix-config isn't available yet. It's default enabled in modern git though.
+#[cfg(target_vendor = "apple")]
+pub fn args_os() -> impl Iterator<Item = OsString> {
+ use unicode_normalization::UnicodeNormalization;
+ std::env::args_os().map(|arg| match arg.to_str() {
+ Some(arg) => arg.nfc().collect::<String>().into(),
+ None => arg,
+ })
+}
+
+/// Convert the given `input` into a `BString`, useful for usage in `clap`.
+pub fn os_str_to_bstring(input: &OsStr) -> Option<BString> {
+ Vec::from_os_string(input.into()).map(Into::into).ok()
+}
+
+/// Utilities to collate errors of common operations into one error type.
+///
+/// This is useful as this type can present an API to answer common questions, like whether a network request seems to have failed
+/// spuriously or if the underlying repository seems to be corrupted.
+/// Error collation supports all operations, including opening the repository.
+///
+/// ### Usage
+///
+/// The caller may define a function that specifies the result type as `Result<T, gix::env::collate::{operation}::Error>` to collect
+/// errors into a well-known error type which provides an API for simple queries.
+pub mod collate {
+
+ ///
+ pub mod fetch {
+ /// An error which combines all possible errors when opening a repository, finding remotes and using them to fetch.
+ ///
+ /// It can be used to detect if the repository is likely be corrupted in some way, or if the fetch failed spuriously
+ /// and thus can be retried.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error<E: std::error::Error + Send + Sync + 'static = std::convert::Infallible> {
+ #[error(transparent)]
+ Open(#[from] crate::open::Error),
+ #[error(transparent)]
+ FindExistingReference(#[from] crate::reference::find::existing::Error),
+ #[error(transparent)]
+ RemoteInit(#[from] crate::remote::init::Error),
+ #[error(transparent)]
+ FindExistingRemote(#[from] crate::remote::find::existing::Error),
+ #[error(transparent)]
+ CredentialHelperConfig(#[from] crate::config::credential_helpers::Error),
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ #[error(transparent)]
+ Connect(#[from] crate::remote::connect::Error),
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ #[error(transparent)]
+ PrepareFetch(#[from] crate::remote::fetch::prepare::Error),
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ #[error(transparent)]
+ Fetch(#[from] crate::remote::fetch::Error),
+ #[error(transparent)]
+ Other(E),
+ }
+
+ #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+ impl<E> crate::protocol::transport::IsSpuriousError for Error<E>
+ where
+ E: std::error::Error + Send + Sync + 'static,
+ {
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::Open(_)
+ | Error::CredentialHelperConfig(_)
+ | Error::RemoteInit(_)
+ | Error::FindExistingReference(_)
+ | Error::FindExistingRemote(_)
+ | Error::Other(_) => false,
+ Error::Connect(err) => err.is_spurious(),
+ Error::PrepareFetch(err) => err.is_spurious(),
+ Error::Fetch(err) => err.is_spurious(),
+ }
+ }
+ }
+
+ /// Queries
+ impl<E> Error<E>
+ where
+ E: std::error::Error + Send + Sync + 'static,
+ {
+ /// Return true if repository corruption caused the failure.
+ pub fn is_corrupted(&self) -> bool {
+ match self {
+ Error::Open(crate::open::Error::NotARepository { .. } | crate::open::Error::Config(_)) => true,
+ #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+ Error::PrepareFetch(crate::remote::fetch::prepare::Error::RefMap(
+ // Configuration couldn't be accessed or was incomplete.
+ crate::remote::ref_map::Error::GatherTransportConfig { .. }
+ | crate::remote::ref_map::Error::ConfigureCredentials(_),
+ )) => true,
+ // Maybe the value of the configuration was corrupted, or a file couldn't be removed.
+ #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+ Error::Fetch(
+ crate::remote::fetch::Error::PackThreads(_)
+ | crate::remote::fetch::Error::PackIndexVersion(_)
+ | crate::remote::fetch::Error::RemovePackKeepFile { .. },
+ ) => true,
+ _ => false,
+ }
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/ext/mod.rs b/vendor/gix/src/ext/mod.rs
new file mode 100644
index 000000000..beb9007fa
--- /dev/null
+++ b/vendor/gix/src/ext/mod.rs
@@ -0,0 +1,9 @@
+pub use object_id::ObjectIdExt;
+pub use reference::ReferenceExt;
+pub use rev_spec::RevSpecExt;
+pub use tree::TreeIterExt;
+
+mod object_id;
+mod reference;
+mod rev_spec;
+mod tree;
diff --git a/vendor/gix/src/ext/object_id.rs b/vendor/gix/src/ext/object_id.rs
new file mode 100644
index 000000000..a4515022b
--- /dev/null
+++ b/vendor/gix/src/ext/object_id.rs
@@ -0,0 +1,34 @@
+use gix_hash::ObjectId;
+use gix_traverse::commit::{ancestors, Ancestors};
+
+pub trait Sealed {}
+
+pub type AncestorsIter<Find> = Ancestors<Find, fn(&gix_hash::oid) -> bool, ancestors::State>;
+
+/// An extension trait to add functionality to [`ObjectId`]s.
+pub trait ObjectIdExt: Sealed {
+ /// Create an iterator over the ancestry of the commits reachable from this id, which must be a commit.
+ fn ancestors<Find, E>(self, find: Find) -> AncestorsIter<Find>
+ where
+ Find: for<'a> FnMut(&gix_hash::oid, &'a mut Vec<u8>) -> Result<gix_object::CommitRefIter<'a>, E>,
+ E: std::error::Error + Send + Sync + 'static;
+
+ /// Infuse this object id `repo` access.
+ fn attach(self, repo: &crate::Repository) -> crate::Id<'_>;
+}
+
+impl Sealed for ObjectId {}
+
+impl ObjectIdExt for ObjectId {
+ fn ancestors<Find, E>(self, find: Find) -> AncestorsIter<Find>
+ where
+ Find: for<'a> FnMut(&gix_hash::oid, &'a mut Vec<u8>) -> Result<gix_object::CommitRefIter<'a>, E>,
+ E: std::error::Error + Send + Sync + 'static,
+ {
+ Ancestors::new(Some(self), ancestors::State::default(), find)
+ }
+
+ fn attach(self, repo: &crate::Repository) -> crate::Id<'_> {
+ crate::Id::from_id(self, repo)
+ }
+}
diff --git a/vendor/gix/src/ext/reference.rs b/vendor/gix/src/ext/reference.rs
new file mode 100644
index 000000000..57e4e4fe7
--- /dev/null
+++ b/vendor/gix/src/ext/reference.rs
@@ -0,0 +1,15 @@
+pub trait Sealed {}
+
+impl Sealed for gix_ref::Reference {}
+
+/// Extensions for [references][gix_ref::Reference].
+pub trait ReferenceExt {
+ /// Attach [`Repository`][crate::Repository] to the given reference. It can be detached later with [`detach()]`.
+ fn attach(self, repo: &crate::Repository) -> crate::Reference<'_>;
+}
+
+impl ReferenceExt for gix_ref::Reference {
+ fn attach(self, repo: &crate::Repository) -> crate::Reference<'_> {
+ crate::Reference::from_ref(self, repo)
+ }
+}
diff --git a/vendor/gix/src/ext/rev_spec.rs b/vendor/gix/src/ext/rev_spec.rs
new file mode 100644
index 000000000..ed7dc0460
--- /dev/null
+++ b/vendor/gix/src/ext/rev_spec.rs
@@ -0,0 +1,20 @@
+pub trait Sealed {}
+
+impl Sealed for gix_ref::Reference {}
+
+/// Extensions for [revision specifications][gix_revision::Spec].
+pub trait RevSpecExt {
+ /// Attach [`Repository`][crate::Repository] to the given rev-spec.
+ fn attach(self, repo: &crate::Repository) -> crate::revision::Spec<'_>;
+}
+
+impl RevSpecExt for gix_revision::Spec {
+ fn attach(self, repo: &crate::Repository) -> crate::revision::Spec<'_> {
+ crate::revision::Spec {
+ inner: self,
+ first_ref: None,
+ second_ref: None,
+ repo,
+ }
+ }
+}
diff --git a/vendor/gix/src/ext/tree.rs b/vendor/gix/src/ext/tree.rs
new file mode 100644
index 000000000..09220fc40
--- /dev/null
+++ b/vendor/gix/src/ext/tree.rs
@@ -0,0 +1,44 @@
+use std::borrow::BorrowMut;
+
+use gix_hash::oid;
+use gix_object::TreeRefIter;
+use gix_traverse::tree::breadthfirst;
+
+pub trait Sealed {}
+
+/// An extension trait for tree iterators
+pub trait TreeIterExt: Sealed {
+ /// Traverse this tree with `state` being provided to potentially reuse allocations, and `find` being a function to lookup trees
+ /// and turn them into iterators.
+ ///
+ /// The `delegate` implements a way to store details about the traversal to allow paying only for what's actually used.
+ /// Since it is expected to store the operation result, _unit_ is returned.
+ fn traverse<StateMut, Find, V>(
+ &self,
+ state: StateMut,
+ find: Find,
+ delegate: &mut V,
+ ) -> Result<(), breadthfirst::Error>
+ where
+ Find: for<'a> FnMut(&oid, &'a mut Vec<u8>) -> Option<TreeRefIter<'a>>,
+ StateMut: BorrowMut<breadthfirst::State>,
+ V: gix_traverse::tree::Visit;
+}
+
+impl<'d> Sealed for TreeRefIter<'d> {}
+
+impl<'d> TreeIterExt for TreeRefIter<'d> {
+ fn traverse<StateMut, Find, V>(
+ &self,
+ state: StateMut,
+ find: Find,
+ delegate: &mut V,
+ ) -> Result<(), breadthfirst::Error>
+ where
+ Find: for<'a> FnMut(&oid, &'a mut Vec<u8>) -> Option<TreeRefIter<'a>>,
+ StateMut: BorrowMut<breadthfirst::State>,
+ V: gix_traverse::tree::Visit,
+ {
+ breadthfirst(self.clone(), state, find, delegate)
+ }
+}
diff --git a/vendor/gix/src/head/log.rs b/vendor/gix/src/head/log.rs
new file mode 100644
index 000000000..6aa7ed1d3
--- /dev/null
+++ b/vendor/gix/src/head/log.rs
@@ -0,0 +1,35 @@
+use std::convert::TryInto;
+
+use gix_hash::ObjectId;
+
+use crate::{
+ bstr::{BString, ByteSlice},
+ Head,
+};
+
+impl<'repo> Head<'repo> {
+ /// Return a platform for obtaining iterators on the reference log associated with the `HEAD` reference.
+ pub fn log_iter(&self) -> gix_ref::file::log::iter::Platform<'static, 'repo> {
+ gix_ref::file::log::iter::Platform {
+ store: &self.repo.refs,
+ name: "HEAD".try_into().expect("HEAD is always valid"),
+ buf: Vec::new(),
+ }
+ }
+
+ /// Return a list of all branch names that were previously checked out with the first-ever checked out branch
+ /// being the first entry of the list, and the most recent is the last, along with the commit they were pointing to
+ /// at the time.
+ pub fn prior_checked_out_branches(&self) -> std::io::Result<Option<Vec<(BString, ObjectId)>>> {
+ Ok(self.log_iter().all()?.map(|log| {
+ log.filter_map(Result::ok)
+ .filter_map(|line| {
+ line.message
+ .strip_prefix(b"checkout: moving from ")
+ .and_then(|from_to| from_to.find(" to ").map(|pos| &from_to[..pos]))
+ .map(|from_branch| (from_branch.as_bstr().to_owned(), line.previous_oid()))
+ })
+ .collect()
+ }))
+ }
+}
diff --git a/vendor/gix/src/head/mod.rs b/vendor/gix/src/head/mod.rs
new file mode 100644
index 000000000..094e78a86
--- /dev/null
+++ b/vendor/gix/src/head/mod.rs
@@ -0,0 +1,122 @@
+//!
+use std::convert::TryInto;
+
+use gix_hash::ObjectId;
+use gix_ref::FullNameRef;
+
+use crate::{
+ ext::{ObjectIdExt, ReferenceExt},
+ Head,
+};
+
+/// Represents the kind of `HEAD` reference.
+#[derive(Clone)]
+pub enum Kind {
+ /// The existing reference the symbolic HEAD points to.
+ ///
+ /// This is the common case.
+ Symbolic(gix_ref::Reference),
+ /// The yet-to-be-created reference the symbolic HEAD refers to.
+ ///
+ /// This is the case in a newly initialized repository.
+ Unborn(gix_ref::FullName),
+ /// The head points to an object directly, not to a symbolic reference.
+ ///
+ /// This state is less common and can occur when checking out commits directly.
+ Detached {
+ /// The object to which the head points to
+ target: ObjectId,
+ /// Possibly the final destination of `target` after following the object chain from tag objects to commits.
+ peeled: Option<ObjectId>,
+ },
+}
+
+impl Kind {
+ /// Attach this instance to a `repo` to produce a [`Head`].
+ pub fn attach(self, repo: &crate::Repository) -> Head<'_> {
+ Head { kind: self, repo }
+ }
+}
+
+/// Access
+impl<'repo> Head<'repo> {
+ /// Returns the name of this references, always `HEAD`.
+ pub fn name(&self) -> &'static FullNameRef {
+ // TODO: use a statically checked version of this when available.
+ "HEAD".try_into().expect("HEAD is valid")
+ }
+
+ /// Returns the full reference name of this head if it is not detached, or `None` otherwise.
+ pub fn referent_name(&self) -> Option<&FullNameRef> {
+ Some(match &self.kind {
+ Kind::Symbolic(r) => r.name.as_ref(),
+ Kind::Unborn(name) => name.as_ref(),
+ Kind::Detached { .. } => return None,
+ })
+ }
+
+ /// Returns true if this instance is detached, and points to an object directly.
+ pub fn is_detached(&self) -> bool {
+ matches!(self.kind, Kind::Detached { .. })
+ }
+
+ /// Returns true if this instance is not yet born, hence it points to a ref that doesn't exist yet.
+ ///
+ /// This is the case in a newly initialized repository.
+ pub fn is_unborn(&self) -> bool {
+ matches!(self.kind, Kind::Unborn(_))
+ }
+
+ // TODO: tests
+ /// Returns the id the head points to, which isn't possible on unborn heads.
+ pub fn id(&self) -> Option<crate::Id<'repo>> {
+ match &self.kind {
+ Kind::Symbolic(r) => r.target.try_id().map(|oid| oid.to_owned().attach(self.repo)),
+ Kind::Detached { peeled, target } => {
+ (*peeled).unwrap_or_else(|| target.to_owned()).attach(self.repo).into()
+ }
+ Kind::Unborn(_) => None,
+ }
+ }
+
+ /// Try to transform this instance into the symbolic reference that it points to, or return `None` if head is detached or unborn.
+ pub fn try_into_referent(self) -> Option<crate::Reference<'repo>> {
+ match self.kind {
+ Kind::Symbolic(r) => r.attach(self.repo).into(),
+ _ => None,
+ }
+ }
+}
+
+mod remote {
+ use super::Head;
+ use crate::{remote, Remote};
+
+ /// Remote
+ impl<'repo> Head<'repo> {
+ /// Return the remote with which the currently checked our reference can be handled as configured by `branch.<name>.remote|pushRemote`
+ /// or fall back to the non-branch specific remote configuration. `None` is returned if the head is detached or unborn, so there is
+ /// no branch specific remote.
+ ///
+ /// This is equivalent to calling [`Reference::remote(…)`][crate::Reference::remote()] and
+ /// [`Repository::remote_default_name()`][crate::Repository::remote_default_name()] in order.
+ ///
+ /// Combine it with [`find_default_remote()`][crate::Repository::find_default_remote()] as fallback to handle detached heads,
+ /// i.e. obtain a remote even in case of detached heads.
+ pub fn into_remote(
+ self,
+ direction: remote::Direction,
+ ) -> Option<Result<Remote<'repo>, remote::find::existing::Error>> {
+ let repo = self.repo;
+ self.try_into_referent()?
+ .remote(direction)
+ .or_else(|| repo.find_default_remote(direction))
+ }
+ }
+}
+
+///
+pub mod log;
+
+///
+pub mod peel;
diff --git a/vendor/gix/src/head/peel.rs b/vendor/gix/src/head/peel.rs
new file mode 100644
index 000000000..65a876bc4
--- /dev/null
+++ b/vendor/gix/src/head/peel.rs
@@ -0,0 +1,119 @@
+use crate::{
+ ext::{ObjectIdExt, ReferenceExt},
+ Head,
+};
+
+mod error {
+ use crate::{object, reference};
+
+ /// The error returned by [Head::peel_to_id_in_place()][super::Head::peel_to_id_in_place()] and [Head::into_fully_peeled_id()][super::Head::into_fully_peeled_id()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ FindExistingObject(#[from] object::find::existing::Error),
+ #[error(transparent)]
+ PeelReference(#[from] reference::peel::Error),
+ }
+}
+
+pub use error::Error;
+
+use crate::head::Kind;
+
+///
+pub mod to_commit {
+ use crate::object;
+
+ /// The error returned by [Head::peel_to_commit_in_place()][super::Head::peel_to_commit_in_place()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Peel(#[from] super::Error),
+ #[error("Branch '{name}' does not have any commits")]
+ Unborn { name: gix_ref::FullName },
+ #[error(transparent)]
+ ObjectKind(#[from] object::try_into::Error),
+ }
+}
+
+impl<'repo> Head<'repo> {
+ // TODO: tests
+ /// Peel this instance to make obtaining its final target id possible, while returning an error on unborn heads.
+ pub fn peeled(mut self) -> Result<Self, Error> {
+ self.peel_to_id_in_place().transpose()?;
+ Ok(self)
+ }
+
+ // TODO: tests
+ /// Follow the symbolic reference of this head until its target object and peel it by following tag objects until there is no
+ /// more object to follow, and return that object id.
+ ///
+ /// Returns `None` if the head is unborn.
+ pub fn peel_to_id_in_place(&mut self) -> Option<Result<crate::Id<'repo>, Error>> {
+ Some(match &mut self.kind {
+ Kind::Unborn(_name) => return None,
+ Kind::Detached {
+ peeled: Some(peeled), ..
+ } => Ok((*peeled).attach(self.repo)),
+ Kind::Detached { peeled: None, target } => {
+ match target
+ .attach(self.repo)
+ .object()
+ .map_err(Into::into)
+ .and_then(|obj| obj.peel_tags_to_end().map_err(Into::into))
+ .map(|peeled| peeled.id)
+ {
+ Ok(peeled) => {
+ self.kind = Kind::Detached {
+ peeled: Some(peeled),
+ target: *target,
+ };
+ Ok(peeled.attach(self.repo))
+ }
+ Err(err) => Err(err),
+ }
+ }
+ Kind::Symbolic(r) => {
+ let mut nr = r.clone().attach(self.repo);
+ let peeled = nr.peel_to_id_in_place().map_err(Into::into);
+ *r = nr.detach();
+ peeled
+ }
+ })
+ }
+
+ // TODO: tests
+ // TODO: something similar in `crate::Reference`
+ /// Follow the symbolic reference of this head until its target object and peel it by following tag objects until there is no
+ /// more object to follow, transform the id into a commit if possible and return that.
+ ///
+ /// Returns an error if the head is unborn or if it doesn't point to a commit.
+ pub fn peel_to_commit_in_place(&mut self) -> Result<crate::Commit<'repo>, to_commit::Error> {
+ let id = self.peel_to_id_in_place().ok_or_else(|| to_commit::Error::Unborn {
+ name: self.referent_name().expect("unborn").to_owned(),
+ })??;
+ id.object()
+ .map_err(|err| to_commit::Error::Peel(Error::FindExistingObject(err)))
+ .and_then(|object| object.try_into_commit().map_err(Into::into))
+ }
+
+ /// Consume this instance and transform it into the final object that it points to, or `None` if the `HEAD`
+ /// reference is yet to be born.
+ pub fn into_fully_peeled_id(self) -> Option<Result<crate::Id<'repo>, Error>> {
+ Some(match self.kind {
+ Kind::Unborn(_name) => return None,
+ Kind::Detached {
+ peeled: Some(peeled), ..
+ } => Ok(peeled.attach(self.repo)),
+ Kind::Detached { peeled: None, target } => target
+ .attach(self.repo)
+ .object()
+ .map_err(Into::into)
+ .and_then(|obj| obj.peel_tags_to_end().map_err(Into::into))
+ .map(|obj| obj.id.attach(self.repo)),
+ Kind::Symbolic(r) => r.attach(self.repo).peel_to_id_in_place().map_err(Into::into),
+ })
+ }
+}
diff --git a/vendor/gix/src/id.rs b/vendor/gix/src/id.rs
new file mode 100644
index 000000000..c57565fb5
--- /dev/null
+++ b/vendor/gix/src/id.rs
@@ -0,0 +1,195 @@
+//!
+use std::ops::Deref;
+
+use gix_hash::{oid, ObjectId};
+
+use crate::{object::find, revision, Id, Object};
+
+/// An [object id][ObjectId] infused with `Easy`.
+impl<'repo> Id<'repo> {
+ /// Find the [`Object`] associated with this object id, and consider it an error if it doesn't exist.
+ ///
+ /// # Note
+ ///
+ /// There can only be one `ObjectRef` per `Easy`. To increase that limit, clone the `Easy`.
+ pub fn object(&self) -> Result<Object<'repo>, find::existing::Error> {
+ self.repo.find_object(self.inner)
+ }
+
+ /// Try to find the [`Object`] associated with this object id, and return `None` if it's not available locally.
+ ///
+ /// # Note
+ ///
+ /// There can only be one `ObjectRef` per `Easy`. To increase that limit, clone the `Easy`.
+ pub fn try_object(&self) -> Result<Option<Object<'repo>>, find::Error> {
+ self.repo.try_find_object(self.inner)
+ }
+
+ /// Turn this object id into a shortened id with a length in hex as configured by `core.abbrev`.
+ pub fn shorten(&self) -> Result<gix_hash::Prefix, shorten::Error> {
+ let hex_len = self.repo.config.hex_len.map_or_else(
+ || self.repo.objects.packed_object_count().map(calculate_auto_hex_len),
+ Ok,
+ )?;
+
+ let prefix = gix_odb::store::prefix::disambiguate::Candidate::new(self.inner, hex_len)
+ .expect("BUG: internal hex-len must always be valid");
+ self.repo
+ .objects
+ .disambiguate_prefix(prefix)?
+ .ok_or(shorten::Error::NotFound { oid: self.inner })
+ }
+
+ /// Turn this object id into a shortened id with a length in hex as configured by `core.abbrev`, or default
+ /// to a prefix which equals our id in the unlikely error case.
+ pub fn shorten_or_id(&self) -> gix_hash::Prefix {
+ self.shorten().unwrap_or_else(|_| self.inner.into())
+ }
+}
+
+fn calculate_auto_hex_len(num_packed_objects: u64) -> usize {
+ let mut len = 64 - num_packed_objects.leading_zeros();
+ len = (len + 1) / 2;
+ len.max(7) as usize
+}
+
+///
+pub mod shorten {
+ /// Returned by [`Id::prefix()`][super::Id::shorten()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ PackedObjectsCount(#[from] gix_odb::store::load_index::Error),
+ #[error(transparent)]
+ DisambiguatePrefix(#[from] gix_odb::store::prefix::disambiguate::Error),
+ #[error("Id could not be shortened as the object with id {} could not be found", .oid)]
+ NotFound { oid: gix_hash::ObjectId },
+ }
+}
+
+impl<'repo> Deref for Id<'repo> {
+ type Target = oid;
+
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+
+impl<'repo> Id<'repo> {
+ pub(crate) fn from_id(id: impl Into<ObjectId>, repo: &'repo crate::Repository) -> Self {
+ Id { inner: id.into(), repo }
+ }
+
+ /// Turn this instance into its bare [ObjectId].
+ pub fn detach(self) -> ObjectId {
+ self.inner
+ }
+}
+
+impl<'repo> Id<'repo> {
+ /// Obtain a platform for traversing ancestors of this commit.
+ ///
+ /// Note that unless [`error_on_missing_commit()`][revision::Walk::error_on_missing_commit()] is enabled, which be default it is not,
+ /// one will always see an empty iteration even if this id is not a commit, instead of an error.
+ /// If this is undesirable, it's best to check for the correct object type before creating an iterator.
+ pub fn ancestors(&self) -> revision::walk::Platform<'repo> {
+ revision::walk::Platform::new(Some(self.inner), self.repo)
+ }
+}
+
+mod impls {
+ use std::{cmp::Ordering, hash::Hasher};
+
+ use gix_hash::{oid, ObjectId};
+
+ use crate::{Id, Object, ObjectDetached};
+
+ // Eq, Hash, Ord, PartialOrd,
+
+ impl<'a> std::hash::Hash for Id<'a> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.inner.hash(state)
+ }
+ }
+
+ impl<'a> PartialOrd<Id<'a>> for Id<'a> {
+ fn partial_cmp(&self, other: &Id<'a>) -> Option<Ordering> {
+ self.inner.partial_cmp(&other.inner)
+ }
+ }
+
+ impl<'repo> PartialEq<Id<'repo>> for Id<'repo> {
+ fn eq(&self, other: &Id<'repo>) -> bool {
+ self.inner == other.inner
+ }
+ }
+
+ impl<'repo> PartialEq<ObjectId> for Id<'repo> {
+ fn eq(&self, other: &ObjectId) -> bool {
+ &self.inner == other
+ }
+ }
+
+ impl<'repo> PartialEq<Id<'repo>> for ObjectId {
+ fn eq(&self, other: &Id<'repo>) -> bool {
+ self == &other.inner
+ }
+ }
+
+ impl<'repo> PartialEq<oid> for Id<'repo> {
+ fn eq(&self, other: &oid) -> bool {
+ self.inner == other
+ }
+ }
+
+ impl<'repo> PartialEq<Object<'repo>> for Id<'repo> {
+ fn eq(&self, other: &Object<'repo>) -> bool {
+ self.inner == other.id
+ }
+ }
+
+ impl<'repo> PartialEq<ObjectDetached> for Id<'repo> {
+ fn eq(&self, other: &ObjectDetached) -> bool {
+ self.inner == other.id
+ }
+ }
+
+ impl<'repo> std::fmt::Debug for Id<'repo> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.inner.fmt(f)
+ }
+ }
+
+ impl<'repo> std::fmt::Display for Id<'repo> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.inner.fmt(f)
+ }
+ }
+
+ impl<'repo> AsRef<oid> for Id<'repo> {
+ fn as_ref(&self) -> &oid {
+ &self.inner
+ }
+ }
+
+ impl<'repo> From<Id<'repo>> for ObjectId {
+ fn from(v: Id<'repo>) -> Self {
+ v.inner
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn size_of_oid() {
+ assert_eq!(
+ std::mem::size_of::<Id<'_>>(),
+ 32,
+ "size of oid shouldn't change without notice"
+ )
+ }
+}
diff --git a/vendor/gix/src/init.rs b/vendor/gix/src/init.rs
new file mode 100644
index 000000000..d04de0806
--- /dev/null
+++ b/vendor/gix/src/init.rs
@@ -0,0 +1,101 @@
+#![allow(clippy::result_large_err)]
+use std::{borrow::Cow, convert::TryInto, path::Path};
+
+use gix_ref::{
+ store::WriteReflog,
+ transaction::{PreviousValue, RefEdit},
+ FullName, Target,
+};
+
+use crate::{bstr::BString, config::tree::Init, ThreadSafeRepository};
+
+/// The name of the branch to use if non is configured via git configuration.
+///
+/// # Deviation
+///
+/// We use `main` instead of `master`.
+pub const DEFAULT_BRANCH_NAME: &str = "main";
+
+/// The error returned by [`crate::init()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("Could not obtain the current directory")]
+ CurrentDir(#[from] std::io::Error),
+ #[error(transparent)]
+ Init(#[from] crate::create::Error),
+ #[error(transparent)]
+ Open(#[from] crate::open::Error),
+ #[error("Invalid default branch name: {name:?}")]
+ InvalidBranchName {
+ name: BString,
+ source: gix_validate::refname::Error,
+ },
+ #[error("Could not edit HEAD reference with new default name")]
+ EditHeadForDefaultBranch(#[from] crate::reference::edit::Error),
+}
+
+impl ThreadSafeRepository {
+ /// Create a repository with work-tree within `directory`, creating intermediate directories as needed.
+ ///
+ /// Fails without action if there is already a `.git` repository inside of `directory`, but
+ /// won't mind if the `directory` otherwise is non-empty.
+ pub fn init(
+ directory: impl AsRef<Path>,
+ kind: crate::create::Kind,
+ options: crate::create::Options,
+ ) -> Result<Self, Error> {
+ use gix_sec::trust::DefaultForLevel;
+ let open_options = crate::open::Options::default_for_level(gix_sec::Trust::Full);
+ Self::init_opts(directory, kind, options, open_options)
+ }
+
+ /// Similar to [`init`][Self::init()], but allows to determine how exactly to open the newly created repository.
+ ///
+ /// # Deviation
+ ///
+ /// Instead of naming the default branch `master`, we name it `main` unless configured explicitly using the `init.defaultBranch`
+ /// configuration key.
+ pub fn init_opts(
+ directory: impl AsRef<Path>,
+ kind: crate::create::Kind,
+ create_options: crate::create::Options,
+ mut open_options: crate::open::Options,
+ ) -> Result<Self, Error> {
+ let path = crate::create::into(directory.as_ref(), kind, create_options)?;
+ let (git_dir, worktree_dir) = path.into_repository_and_work_tree_directories();
+ open_options.git_dir_trust = Some(gix_sec::Trust::Full);
+ open_options.current_dir = std::env::current_dir()?.into();
+ let repo = ThreadSafeRepository::open_from_paths(git_dir, worktree_dir, open_options)?;
+
+ let branch_name = repo
+ .config
+ .resolved
+ .string("init", None, Init::DEFAULT_BRANCH.name)
+ .unwrap_or_else(|| Cow::Borrowed(DEFAULT_BRANCH_NAME.into()));
+ if branch_name.as_ref() != DEFAULT_BRANCH_NAME {
+ let sym_ref: FullName =
+ format!("refs/heads/{branch_name}")
+ .try_into()
+ .map_err(|err| Error::InvalidBranchName {
+ name: branch_name.into_owned(),
+ source: err,
+ })?;
+ let mut repo = repo.to_thread_local();
+ let prev_write_reflog = repo.refs.write_reflog;
+ repo.refs.write_reflog = WriteReflog::Disable;
+ repo.edit_reference(RefEdit {
+ change: gix_ref::transaction::Change::Update {
+ log: Default::default(),
+ expected: PreviousValue::Any,
+ new: Target::Symbolic(sym_ref),
+ },
+ name: "HEAD".try_into().expect("valid"),
+ deref: false,
+ })?;
+ repo.refs.write_reflog = prev_write_reflog;
+ }
+
+ Ok(repo)
+ }
+}
diff --git a/vendor/gix/src/interrupt.rs b/vendor/gix/src/interrupt.rs
new file mode 100644
index 000000000..c94cbdbfa
--- /dev/null
+++ b/vendor/gix/src/interrupt.rs
@@ -0,0 +1,223 @@
+//! Process-global interrupt handling
+//!
+//! This module contains facilities to globally request an interrupt, which will cause supporting computations to
+//! abort once it is observed.
+//! Such checks for interrupts are provided in custom implementations of various traits to transparently add interrupt
+//! support to methods who wouldn't otherwise by injecting it. see [`Read`].
+
+mod init {
+ use std::{
+ io,
+ sync::atomic::{AtomicBool, AtomicUsize, Ordering},
+ };
+
+ static IS_INITIALIZED: AtomicBool = AtomicBool::new(false);
+
+ #[derive(Default)]
+ pub struct Deregister(Vec<(i32, signal_hook::SigId)>);
+ pub struct AutoDeregister(Deregister);
+
+ impl Deregister {
+ /// Remove all previously registered handlers, and assure the default behaviour is reinstated.
+ ///
+ /// Note that only the instantiation of the default behaviour can fail.
+ pub fn deregister(self) -> std::io::Result<()> {
+ if self.0.is_empty() {
+ return Ok(());
+ }
+ static REINSTATE_DEFAULT_BEHAVIOUR: AtomicBool = AtomicBool::new(true);
+ for (_, hook_id) in &self.0 {
+ signal_hook::low_level::unregister(*hook_id);
+ }
+ IS_INITIALIZED.store(false, Ordering::SeqCst);
+ if REINSTATE_DEFAULT_BEHAVIOUR
+ .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |_| Some(false))
+ .expect("always returns value")
+ {
+ for (sig, _) in self.0 {
+ // # SAFETY
+ // * we only call a handler that is specifically designed to run in this environment.
+ #[allow(unsafe_code)]
+ unsafe {
+ signal_hook::low_level::register(sig, move || {
+ signal_hook::low_level::emulate_default_handler(sig).ok();
+ })?;
+ }
+ }
+ }
+ Ok(())
+ }
+
+ /// Return a type that deregisters all installed signal handlers on drop.
+ pub fn auto_deregister(self) -> AutoDeregister {
+ AutoDeregister(self)
+ }
+ }
+
+ impl Drop for AutoDeregister {
+ fn drop(&mut self) {
+ std::mem::take(&mut self.0).deregister().ok();
+ }
+ }
+
+ /// Initialize a signal handler to listen to SIGINT and SIGTERM and trigger our [`trigger()`][super::trigger()] that way.
+ /// Also trigger `interrupt()` which promises to never use a Mutex, allocate or deallocate.
+ ///
+ /// # Note
+ ///
+ /// It will abort the process on second press and won't inform the user about this behaviour either as we are unable to do so without
+ /// deadlocking even when trying to write to stderr directly.
+ pub fn init_handler(interrupt: impl Fn() + Send + Sync + Clone + 'static) -> io::Result<Deregister> {
+ if IS_INITIALIZED
+ .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |_| Some(true))
+ .expect("always returns value")
+ {
+ return Err(io::Error::new(io::ErrorKind::Other, "Already initialized"));
+ }
+ let mut hooks = Vec::with_capacity(signal_hook::consts::TERM_SIGNALS.len());
+ for sig in signal_hook::consts::TERM_SIGNALS {
+ // # SAFETY
+ // * we only set atomics or call functions that do
+ // * there is no use of the heap
+ let interrupt = interrupt.clone();
+ #[allow(unsafe_code)]
+ unsafe {
+ let hook_id = signal_hook::low_level::register(*sig, move || {
+ static INTERRUPT_COUNT: AtomicUsize = AtomicUsize::new(0);
+ if !super::is_triggered() {
+ INTERRUPT_COUNT.store(0, Ordering::SeqCst);
+ }
+ let msg_idx = INTERRUPT_COUNT.fetch_add(1, Ordering::SeqCst);
+ if msg_idx == 1 {
+ gix_tempfile::registry::cleanup_tempfiles_signal_safe();
+ signal_hook::low_level::emulate_default_handler(*sig).ok();
+ }
+ interrupt();
+ super::trigger();
+ })?;
+ hooks.push((*sig, hook_id));
+ }
+ }
+
+ // This means that they won't setup a handler allowing us to call them right before we actually abort.
+ gix_tempfile::signal::setup(gix_tempfile::signal::handler::Mode::None);
+
+ Ok(Deregister(hooks))
+ }
+}
+use std::{
+ io,
+ sync::atomic::{AtomicBool, Ordering},
+};
+
+pub use init::init_handler;
+
+/// A wrapper for an inner iterator which will check for interruptions on each iteration.
+pub struct Iter<I, EFN> {
+ /// The actual iterator to yield elements from.
+ inner: gix_features::interrupt::IterWithErr<'static, I, EFN>,
+}
+
+impl<I, EFN, E> Iter<I, EFN>
+where
+ I: Iterator,
+ EFN: FnOnce() -> E,
+{
+ /// Create a new iterator over `inner` which checks for interruptions on each iteration and calls `make_err()` to
+ /// signal an interruption happened, causing no further items to be iterated from that point on.
+ pub fn new(inner: I, make_err: EFN) -> Self {
+ Iter {
+ inner: gix_features::interrupt::IterWithErr::new(inner, make_err, &IS_INTERRUPTED),
+ }
+ }
+
+ /// Return the inner iterator
+ pub fn into_inner(self) -> I {
+ self.inner.inner
+ }
+
+ /// Return the inner iterator as reference
+ pub fn inner(&self) -> &I {
+ &self.inner.inner
+ }
+}
+
+impl<I, EFN, E> Iterator for Iter<I, EFN>
+where
+ I: Iterator,
+ EFN: FnOnce() -> E,
+{
+ type Item = Result<I::Item, E>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next()
+ }
+}
+
+/// A wrapper for implementors of [`std::io::Read`] or [`std::io::BufRead`] with interrupt support.
+///
+/// It fails a [read][`std::io::Read::read`] while an interrupt was requested.
+pub struct Read<R> {
+ /// The actual implementor of [`std::io::Read`] to which interrupt support will be added.
+ inner: gix_features::interrupt::Read<'static, R>,
+}
+
+impl<R> Read<R>
+where
+ R: io::Read,
+{
+ /// Create a new interruptible reader from `read`.
+ pub fn new(read: R) -> Self {
+ Read {
+ inner: gix_features::interrupt::Read {
+ inner: read,
+ should_interrupt: &IS_INTERRUPTED,
+ },
+ }
+ }
+
+ /// Return the inner reader
+ pub fn into_inner(self) -> R {
+ self.inner.inner
+ }
+}
+
+impl<R> io::Read for Read<R>
+where
+ R: io::Read,
+{
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+ self.inner.read(buf)
+ }
+}
+
+impl<R> io::BufRead for Read<R>
+where
+ R: io::BufRead,
+{
+ fn fill_buf(&mut self) -> io::Result<&[u8]> {
+ self.inner.fill_buf()
+ }
+
+ fn consume(&mut self, amt: usize) {
+ self.inner.consume(amt)
+ }
+}
+
+/// The flag behind all utility functions in this module.
+pub static IS_INTERRUPTED: AtomicBool = AtomicBool::new(false);
+
+/// Returns true if an interrupt is requested.
+pub fn is_triggered() -> bool {
+ IS_INTERRUPTED.load(Ordering::Relaxed)
+}
+
+/// Trigger an interrupt, signalling to those checking for [`is_triggered()`] to stop what they are doing.
+pub fn trigger() {
+ IS_INTERRUPTED.store(true, Ordering::SeqCst);
+}
+
+/// Sets the interrupt request to false, thus allowing those checking for [`is_triggered()`] to proceed.
+pub fn reset() {
+ IS_INTERRUPTED.store(false, Ordering::SeqCst);
+}
diff --git a/vendor/gix/src/kind.rs b/vendor/gix/src/kind.rs
new file mode 100644
index 000000000..a8213475f
--- /dev/null
+++ b/vendor/gix/src/kind.rs
@@ -0,0 +1,23 @@
+use crate::Kind;
+
+impl Kind {
+ /// Returns true if this is a bare repository, one without a work tree.
+ pub fn is_bare(&self) -> bool {
+ matches!(self, Kind::Bare)
+ }
+}
+
+impl From<gix_discover::repository::Kind> for Kind {
+ fn from(v: gix_discover::repository::Kind) -> Self {
+ match v {
+ gix_discover::repository::Kind::Submodule { .. } | gix_discover::repository::Kind::SubmoduleGitDir => {
+ Kind::WorkTree { is_linked: false }
+ }
+ gix_discover::repository::Kind::Bare => Kind::Bare,
+ gix_discover::repository::Kind::WorkTreeGitDir { .. } => Kind::WorkTree { is_linked: true },
+ gix_discover::repository::Kind::WorkTree { linked_git_dir } => Kind::WorkTree {
+ is_linked: linked_git_dir.is_some(),
+ },
+ }
+ }
+}
diff --git a/vendor/gix/src/lib.rs b/vendor/gix/src/lib.rs
new file mode 100644
index 000000000..257a613d7
--- /dev/null
+++ b/vendor/gix/src/lib.rs
@@ -0,0 +1,314 @@
+//! This crate provides the [`Repository`] abstraction which serves as a hub into all the functionality of git.
+//!
+//! It's powerful and won't sacrifice performance while still increasing convenience compared to using the sub-crates
+//! individually. Sometimes it may hide complexity under the assumption that the performance difference doesn't matter
+//! for all but the fewest tools out there, which would be using the underlying crates directly or file an issue.
+//!
+//! # The prelude and extensions
+//!
+//! With `use git_repository::prelude::*` you should be ready to go as it pulls in various extension traits to make functionality
+//! available on objects that may use it.
+//!
+//! The method signatures are still complex and may require various arguments for configuration and cache control.
+//!
+//! Most extensions to existing objects provide an `obj_with_extension.attach(&repo).an_easier_version_of_a_method()` for simpler
+//! call signatures.
+//!
+//! ## ThreadSafe Mode
+//!
+//! By default, the [`Repository`] isn't `Sync` and thus can't be used in certain contexts which require the `Sync` trait.
+//!
+//! To help with this, convert it with [`.into_sync()`][Repository::into_sync()] into a [`ThreadSafeRepository`].
+//!
+//! ## Object-Access Performance
+//!
+//! Accessing objects quickly is the bread-and-butter of working with git, right after accessing references. Hence it's vital
+//! to understand which cache levels exist and how to leverage them.
+//!
+//! When accessing an object, the first cache that's queried is a memory-capped LRU object cache, mapping their id to data and kind.
+//! It has to be specifically enabled a [`Repository`].
+//! On miss, the object is looked up and if a pack is hit, there is a small fixed-size cache for delta-base objects.
+//!
+//! In scenarios where the same objects are accessed multiple times, the object cache can be useful and is to be configured specifically
+//! using the [`object_cache_size(…)`][crate::Repository::object_cache_size()] method.
+//!
+//! Use the `cache-efficiency-debug` cargo feature to learn how efficient the cache actually is - it's easy to end up with lowered
+//! performance if the cache is not hit in 50% of the time.
+//!
+//! ### Terminology
+//!
+//! #### WorkingTree and WorkTree
+//!
+//! When reading the documentation of the canonical gix-worktree program one gets the impression work tree and working tree are used
+//! interchangeably. We use the term _work tree_ only and try to do so consistently as its shorter and assumed to be the same.
+//!
+//! # Cargo-features
+//!
+//! To make using _sub-crates_ easier these are re-exported into the root of this crate. Here we list how to access nested plumbing
+//! crates which are otherwise harder to discover:
+//!
+//! **`git_repository::`**
+//! * [`odb`]
+//! * [`pack`][odb::pack]
+//! * [`protocol`]
+//! * [`transport`][protocol::transport]
+//! * [`packetline`][protocol::transport::packetline]
+//!
+//!
+//! ## Feature Flags
+#![cfg_attr(
+ feature = "document-features",
+ cfg_attr(doc, doc = ::document_features::document_features!())
+)]
+#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))]
+#![deny(missing_docs, rust_2018_idioms, unsafe_code)]
+
+// Re-exports to make this a potential one-stop shop crate avoiding people from having to reference various crates themselves.
+// This also means that their major version changes affect our major version, but that's alright as we directly expose their
+// APIs/instances anyway.
+pub use gix_actor as actor;
+pub use gix_attributes as attrs;
+pub use gix_credentials as credentials;
+pub use gix_date as date;
+pub use gix_features as features;
+use gix_features::threading::OwnShared;
+pub use gix_features::{parallel, progress::Progress, threading};
+pub use gix_glob as glob;
+pub use gix_hash as hash;
+#[doc(inline)]
+pub use gix_index as index;
+pub use gix_lock as lock;
+pub use gix_object as objs;
+pub use gix_object::bstr;
+pub use gix_odb as odb;
+pub use gix_prompt as prompt;
+#[cfg(all(feature = "gix-protocol"))]
+pub use gix_protocol as protocol;
+pub use gix_ref as refs;
+pub use gix_refspec as refspec;
+pub use gix_sec as sec;
+pub use gix_tempfile as tempfile;
+pub use gix_traverse as traverse;
+pub use gix_url as url;
+#[doc(inline)]
+pub use gix_url::Url;
+pub use hash::{oid, ObjectId};
+
+pub mod interrupt;
+
+mod ext;
+///
+pub mod prelude {
+ pub use gix_features::parallel::reduce::Finalize;
+ pub use gix_odb::{Find, FindExt, Header, HeaderExt, Write};
+
+ pub use crate::ext::*;
+}
+
+///
+pub mod path;
+
+/// The standard type for a store to handle git references.
+pub type RefStore = gix_ref::file::Store;
+/// A handle for finding objects in an object database, abstracting away caches for thread-local use.
+pub type OdbHandle = gix_odb::Handle;
+/// A way to access git configuration
+pub(crate) type Config = OwnShared<gix_config::File<'static>>;
+
+///
+mod types;
+pub use types::{
+ Commit, Head, Id, Kind, Object, ObjectDetached, Reference, Remote, Repository, Tag, ThreadSafeRepository, Tree,
+ Worktree,
+};
+
+///
+pub mod clone;
+pub mod commit;
+pub mod head;
+pub mod id;
+pub mod object;
+pub mod reference;
+mod repository;
+pub mod tag;
+
+///
+pub mod progress {
+ #[cfg(feature = "progress-tree")]
+ pub use gix_features::progress::prodash::tree;
+ pub use gix_features::progress::*;
+}
+
+///
+pub mod diff {
+ pub use gix_diff::*;
+ ///
+ pub mod rename {
+ /// Determine how to do rename tracking.
+ #[derive(Debug, Copy, Clone, Eq, PartialEq)]
+ pub enum Tracking {
+ /// Do not track renames at all, the fastest option.
+ Disabled,
+ /// Track renames.
+ Renames,
+ /// Track renames and copies.
+ ///
+ /// This is the most expensive option.
+ RenamesAndCopies,
+ }
+ }
+}
+
+/// See [ThreadSafeRepository::discover()], but returns a [`Repository`] instead.
+#[allow(clippy::result_large_err)]
+pub fn discover(directory: impl AsRef<std::path::Path>) -> Result<Repository, discover::Error> {
+ ThreadSafeRepository::discover(directory).map(Into::into)
+}
+
+/// See [ThreadSafeRepository::init()], but returns a [`Repository`] instead.
+#[allow(clippy::result_large_err)]
+pub fn init(directory: impl AsRef<std::path::Path>) -> Result<Repository, init::Error> {
+ ThreadSafeRepository::init(directory, create::Kind::WithWorktree, create::Options::default()).map(Into::into)
+}
+
+/// See [ThreadSafeRepository::init()], but returns a [`Repository`] instead.
+#[allow(clippy::result_large_err)]
+pub fn init_bare(directory: impl AsRef<std::path::Path>) -> Result<Repository, init::Error> {
+ ThreadSafeRepository::init(directory, create::Kind::Bare, create::Options::default()).map(Into::into)
+}
+
+/// Create a platform for configuring a bare clone from `url` to the local `path`, using default options for opening it (but
+/// amended with using configuration from the git installation to ensure all authentication options are honored).
+///
+/// See [`clone::PrepareFetch::new()] for a function to take full control over all options.
+#[allow(clippy::result_large_err)]
+pub fn prepare_clone_bare<Url, E>(
+ url: Url,
+ path: impl AsRef<std::path::Path>,
+) -> Result<clone::PrepareFetch, clone::Error>
+where
+ Url: std::convert::TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+{
+ clone::PrepareFetch::new(
+ url,
+ path,
+ create::Kind::Bare,
+ create::Options::default(),
+ open_opts_with_git_binary_config(),
+ )
+}
+
+/// Create a platform for configuring a clone with main working tree from `url` to the local `path`, using default options for opening it
+/// (but amended with using configuration from the git installation to ensure all authentication options are honored).
+///
+/// See [`clone::PrepareFetch::new()] for a function to take full control over all options.
+#[allow(clippy::result_large_err)]
+pub fn prepare_clone<Url, E>(url: Url, path: impl AsRef<std::path::Path>) -> Result<clone::PrepareFetch, clone::Error>
+where
+ Url: std::convert::TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+{
+ clone::PrepareFetch::new(
+ url,
+ path,
+ create::Kind::WithWorktree,
+ create::Options::default(),
+ open_opts_with_git_binary_config(),
+ )
+}
+
+fn open_opts_with_git_binary_config() -> open::Options {
+ use gix_sec::trust::DefaultForLevel;
+ let mut opts = open::Options::default_for_level(gix_sec::Trust::Full);
+ opts.permissions.config.git_binary = true;
+ opts
+}
+
+/// See [ThreadSafeRepository::open()], but returns a [`Repository`] instead.
+#[allow(clippy::result_large_err)]
+pub fn open(directory: impl Into<std::path::PathBuf>) -> Result<Repository, open::Error> {
+ ThreadSafeRepository::open(directory).map(Into::into)
+}
+
+/// See [ThreadSafeRepository::open_opts()], but returns a [`Repository`] instead.
+#[allow(clippy::result_large_err)]
+pub fn open_opts(directory: impl Into<std::path::PathBuf>, options: open::Options) -> Result<Repository, open::Error> {
+ ThreadSafeRepository::open_opts(directory, options).map(Into::into)
+}
+
+///
+pub mod permission {
+ ///
+ pub mod env_var {
+ ///
+ pub mod resource {
+ ///
+ pub type Error = gix_sec::permission::Error<std::path::PathBuf>;
+ }
+ }
+}
+///
+pub mod permissions {
+ pub use crate::repository::permissions::{Config, Environment};
+}
+pub use repository::permissions::Permissions;
+
+///
+pub mod create;
+
+///
+pub mod open;
+
+///
+pub mod config;
+
+///
+pub mod mailmap;
+
+///
+pub mod worktree;
+
+pub mod revision;
+
+///
+pub mod remote;
+
+///
+pub mod init;
+
+/// Not to be confused with 'status'.
+pub mod state {
+ /// Tell what operation is currently in progress.
+ #[derive(Debug, PartialEq, Eq)]
+ pub enum InProgress {
+ /// A mailbox is being applied.
+ ApplyMailbox,
+ /// A rebase is happening while a mailbox is being applied.
+ // TODO: test
+ ApplyMailboxRebase,
+ /// A git bisect operation has not yet been concluded.
+ Bisect,
+ /// A cherry pick operation.
+ CherryPick,
+ /// A cherry pick with multiple commits pending.
+ CherryPickSequence,
+ /// A merge operation.
+ Merge,
+ /// A rebase operation.
+ Rebase,
+ /// An interactive rebase operation.
+ RebaseInteractive,
+ /// A revert operation.
+ Revert,
+ /// A revert operation with multiple commits pending.
+ RevertSequence,
+ }
+}
+
+///
+pub mod discover;
+
+pub mod env;
+
+mod kind;
diff --git a/vendor/gix/src/mailmap.rs b/vendor/gix/src/mailmap.rs
new file mode 100644
index 000000000..6ea6bcc2d
--- /dev/null
+++ b/vendor/gix/src/mailmap.rs
@@ -0,0 +1,18 @@
+pub use gix_mailmap::*;
+
+///
+pub mod load {
+ /// The error returned by [`crate::Repository::open_mailmap_into()`].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The mailmap file declared in `mailmap.file` could not be read")]
+ Io(#[from] std::io::Error),
+ #[error("The configured mailmap.blob could not be parsed")]
+ BlobSpec(#[from] gix_hash::decode::Error),
+ #[error(transparent)]
+ PathInterpolate(#[from] gix_config::path::interpolate::Error),
+ #[error("Could not find object configured in `mailmap.blob`")]
+ FindExisting(#[from] crate::object::find::existing::Error),
+ }
+}
diff --git a/vendor/gix/src/object/blob.rs b/vendor/gix/src/object/blob.rs
new file mode 100644
index 000000000..f35605422
--- /dev/null
+++ b/vendor/gix/src/object/blob.rs
@@ -0,0 +1,148 @@
+///
+pub mod diff {
+ use std::ops::Range;
+
+ use crate::{bstr::ByteSlice, object::blob::diff::line::Change};
+
+ /// A platform to keep temporary information to perform line diffs on modified blobs.
+ ///
+ pub struct Platform<'old, 'new> {
+ /// The previous version of the blob.
+ pub old: crate::Object<'old>,
+ /// The new version of the blob.
+ pub new: crate::Object<'new>,
+ /// The algorithm to use when calling [imara_diff::diff()][gix_diff::blob::diff()].
+ /// This value is determined by the `diff.algorithm` configuration.
+ pub algo: gix_diff::blob::Algorithm,
+ }
+
+ ///
+ pub mod init {
+ /// The error returned by [`Platform::from_ids()`][super::Platform::from_ids()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not find the previous blob or the new blob to diff against")]
+ FindExisting(#[from] crate::object::find::existing::Error),
+ #[error("Could not obtain diff algorithm from configuration")]
+ DiffAlgorithm(#[from] crate::config::diff::algorithm::Error),
+ }
+ }
+
+ impl<'old, 'new> Platform<'old, 'new> {
+ /// Produce a platform for performing various diffs after obtaining the object data of `previous_id` and `new_id`.
+ ///
+ /// Note that these objects are treated as raw data and are assumed to be blobs.
+ pub fn from_ids(
+ previous_id: &crate::Id<'old>,
+ new_id: &crate::Id<'new>,
+ ) -> Result<Platform<'old, 'new>, init::Error> {
+ match previous_id
+ .object()
+ .and_then(|old| new_id.object().map(|new| (old, new)))
+ {
+ Ok((old, new)) => {
+ let algo = match new_id.repo.config.diff_algorithm() {
+ Ok(algo) => algo,
+ Err(err) => return Err(err.into()),
+ };
+ Ok(Platform { old, new, algo })
+ }
+ Err(err) => Err(err.into()),
+ }
+ }
+ }
+
+ ///
+ pub mod line {
+ use crate::bstr::BStr;
+
+ /// A change to a hunk of lines.
+ pub enum Change<'a, 'data> {
+ /// Lines were added.
+ Addition {
+ /// The lines themselves without terminator.
+ lines: &'a [&'data BStr],
+ },
+ /// Lines were removed.
+ Deletion {
+ /// The lines themselves without terminator.
+ lines: &'a [&'data BStr],
+ },
+ /// Lines have been replaced.
+ Modification {
+ /// The replaced lines without terminator.
+ lines_before: &'a [&'data BStr],
+ /// The new lines without terminator.
+ lines_after: &'a [&'data BStr],
+ },
+ }
+ }
+
+ impl<'old, 'new> Platform<'old, 'new> {
+ /// Perform a diff on lines between the old and the new version of a blob, passing each hunk of lines to `process_hunk`.
+ /// The diffing algorithm is determined by the `diff.algorithm` configuration.
+ ///
+ /// Note that you can invoke the diff more flexibly as well.
+ // TODO: more tests (only tested insertion right now)
+ pub fn lines<FnH, E>(&self, mut process_hunk: FnH) -> Result<(), E>
+ where
+ FnH: FnMut(line::Change<'_, '_>) -> Result<(), E>,
+ E: std::error::Error,
+ {
+ let input = self.line_tokens();
+ let mut err = None;
+ let mut lines = Vec::new();
+ gix_diff::blob::diff(self.algo, &input, |before: Range<u32>, after: Range<u32>| {
+ if err.is_some() {
+ return;
+ }
+ lines.clear();
+ lines.extend(
+ input.before[before.start as usize..before.end as usize]
+ .iter()
+ .map(|&line| input.interner[line].as_bstr()),
+ );
+ let end_of_before = lines.len();
+ lines.extend(
+ input.after[after.start as usize..after.end as usize]
+ .iter()
+ .map(|&line| input.interner[line].as_bstr()),
+ );
+ let hunk_before = &lines[..end_of_before];
+ let hunk_after = &lines[end_of_before..];
+ if hunk_after.is_empty() {
+ err = process_hunk(Change::Deletion { lines: hunk_before }).err();
+ } else if hunk_before.is_empty() {
+ err = process_hunk(Change::Addition { lines: hunk_after }).err();
+ } else {
+ err = process_hunk(Change::Modification {
+ lines_before: hunk_before,
+ lines_after: hunk_after,
+ })
+ .err();
+ }
+ });
+
+ match err {
+ Some(err) => Err(err),
+ None => Ok(()),
+ }
+ }
+
+ /// Count the amount of removed and inserted lines efficiently.
+ pub fn line_counts(&self) -> gix_diff::blob::sink::Counter<()> {
+ let tokens = self.line_tokens();
+ gix_diff::blob::diff(self.algo, &tokens, gix_diff::blob::sink::Counter::default())
+ }
+
+ /// Return a tokenizer which treats lines as smallest unit for use in a [diff operation][gix_diff::blob::diff()].
+ ///
+ /// The line separator is determined according to normal git rules and filters.
+ pub fn line_tokens(&self) -> gix_diff::blob::intern::InternedInput<&[u8]> {
+ // TODO: make use of `core.eol` and/or filters to do line-counting correctly. It's probably
+ // OK to just know how these objects are saved to know what constitutes a line.
+ gix_diff::blob::intern::InternedInput::new(self.old.data.as_bytes(), self.new.data.as_bytes())
+ }
+ }
+}
diff --git a/vendor/gix/src/object/commit.rs b/vendor/gix/src/object/commit.rs
new file mode 100644
index 000000000..e28a12955
--- /dev/null
+++ b/vendor/gix/src/object/commit.rs
@@ -0,0 +1,156 @@
+use crate::{bstr, bstr::BStr, revision, Commit, ObjectDetached, Tree};
+
+mod error {
+ use crate::object;
+
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ FindExistingObject(#[from] object::find::existing::Error),
+ #[error("The commit could not be decoded fully or partially")]
+ Decode(#[from] gix_object::decode::Error),
+ #[error("Expected object of type {}, but got {}", .expected, .actual)]
+ ObjectKind {
+ expected: gix_object::Kind,
+ actual: gix_object::Kind,
+ },
+ }
+}
+
+pub use error::Error;
+
+impl<'repo> Commit<'repo> {
+ /// Create an owned instance of this object, copying our data in the process.
+ pub fn detached(&self) -> ObjectDetached {
+ ObjectDetached {
+ id: self.id,
+ kind: gix_object::Kind::Commit,
+ data: self.data.clone(),
+ }
+ }
+
+ /// Sever the connection to the `Repository` and turn this instance into a standalone object.
+ pub fn detach(self) -> ObjectDetached {
+ self.into()
+ }
+}
+
+impl<'repo> Commit<'repo> {
+ /// Turn this objects id into a shortened id with a length in hex as configured by `core.abbrev`.
+ pub fn short_id(&self) -> Result<gix_hash::Prefix, crate::id::shorten::Error> {
+ use crate::ext::ObjectIdExt;
+ self.id.attach(self.repo).shorten()
+ }
+
+ /// Parse the commits message into a [`MessageRef`][gix_object::commit::MessageRef]
+ pub fn message(&self) -> Result<gix_object::commit::MessageRef<'_>, gix_object::decode::Error> {
+ Ok(gix_object::commit::MessageRef::from_bytes(self.message_raw()?))
+ }
+ /// Decode the commit object until the message and return it.
+ pub fn message_raw(&self) -> Result<&'_ BStr, gix_object::decode::Error> {
+ gix_object::CommitRefIter::from_bytes(&self.data).message()
+ }
+ /// Obtain the message by using intricate knowledge about the encoding, which is fastest and
+ /// can't fail at the expense of error handling.
+ pub fn message_raw_sloppy(&self) -> &BStr {
+ use bstr::ByteSlice;
+ self.data
+ .find(b"\n\n")
+ .map(|pos| &self.data[pos + 2..])
+ .unwrap_or_default()
+ .as_bstr()
+ }
+
+ /// Decode the commit and obtain the time at which the commit was created.
+ ///
+ /// For the time at which it was authored, refer to `.decode()?.author.time`.
+ pub fn time(&self) -> Result<gix_actor::Time, Error> {
+ Ok(self.committer()?.time)
+ }
+
+ /// Decode the entire commit object and return it for accessing all commit information.
+ ///
+ /// It will allocate only if there are more than 2 parents.
+ ///
+ /// Note that the returned commit object does make lookup easy and should be
+ /// used for successive calls to string-ish information to avoid decoding the object
+ /// more than once.
+ pub fn decode(&self) -> Result<gix_object::CommitRef<'_>, gix_object::decode::Error> {
+ gix_object::CommitRef::from_bytes(&self.data)
+ }
+
+ /// Return an iterator over tokens, representing this commit piece by piece.
+ pub fn iter(&self) -> gix_object::CommitRefIter<'_> {
+ gix_object::CommitRefIter::from_bytes(&self.data)
+ }
+
+ /// Return the commits author, with surrounding whitespace trimmed.
+ pub fn author(&self) -> Result<gix_actor::SignatureRef<'_>, gix_object::decode::Error> {
+ gix_object::CommitRefIter::from_bytes(&self.data)
+ .author()
+ .map(|s| s.trim())
+ }
+
+ /// Return the commits committer. with surrounding whitespace trimmed.
+ pub fn committer(&self) -> Result<gix_actor::SignatureRef<'_>, gix_object::decode::Error> {
+ gix_object::CommitRefIter::from_bytes(&self.data)
+ .committer()
+ .map(|s| s.trim())
+ }
+
+ /// Decode this commits parent ids on the fly without allocating.
+ // TODO: tests
+ pub fn parent_ids(&self) -> impl Iterator<Item = crate::Id<'repo>> + '_ {
+ use crate::ext::ObjectIdExt;
+ let repo = self.repo;
+ gix_object::CommitRefIter::from_bytes(&self.data)
+ .parent_ids()
+ .map(move |id| id.attach(repo))
+ }
+
+ /// Parse the commit and return the the tree object it points to.
+ pub fn tree(&self) -> Result<Tree<'repo>, Error> {
+ match self.tree_id()?.object()?.try_into_tree() {
+ Ok(tree) => Ok(tree),
+ Err(crate::object::try_into::Error { actual, expected, .. }) => Err(Error::ObjectKind { actual, expected }),
+ }
+ }
+
+ /// Parse the commit and return the the tree id it points to.
+ pub fn tree_id(&self) -> Result<crate::Id<'repo>, gix_object::decode::Error> {
+ gix_object::CommitRefIter::from_bytes(&self.data)
+ .tree_id()
+ .map(|id| crate::Id::from_id(id, self.repo))
+ }
+
+ /// Return our id own id with connection to this repository.
+ pub fn id(&self) -> crate::Id<'repo> {
+ use crate::ext::ObjectIdExt;
+ self.id.attach(self.repo)
+ }
+
+ /// Obtain a platform for traversing ancestors of this commit.
+ pub fn ancestors(&self) -> revision::walk::Platform<'repo> {
+ self.id().ancestors()
+ }
+
+ /// Create a platform to further configure a `git describe` operation to find a name for this commit by looking
+ /// at the closest annotated tags (by default) in its past.
+ pub fn describe(&self) -> crate::commit::describe::Platform<'repo> {
+ crate::commit::describe::Platform {
+ id: self.id,
+ repo: self.repo,
+ select: Default::default(),
+ first_parent: false,
+ id_as_fallback: false,
+ max_candidates: 10,
+ }
+ }
+}
+
+impl<'r> std::fmt::Debug for Commit<'r> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "Commit({})", self.id)
+ }
+}
diff --git a/vendor/gix/src/object/errors.rs b/vendor/gix/src/object/errors.rs
new file mode 100644
index 000000000..eb7733473
--- /dev/null
+++ b/vendor/gix/src/object/errors.rs
@@ -0,0 +1,34 @@
+///
+pub mod conversion {
+
+ /// The error returned by [`crate::object::try_to_()`][crate::Object::try_to_commit_ref()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Decode(#[from] gix_object::decode::Error),
+ #[error("Expected object type {}, but got {}", .expected, .actual)]
+ UnexpectedType {
+ expected: gix_object::Kind,
+ actual: gix_object::Kind,
+ },
+ }
+}
+
+///
+pub mod find {
+ /// Indicate that an error occurred when trying to find an object.
+ pub type Error = gix_odb::store::find::Error;
+
+ ///
+ pub mod existing {
+ /// An object could not be found in the database, or an error occurred when trying to obtain it.
+ pub type Error = gix_odb::find::existing::Error<gix_odb::store::find::Error>;
+ }
+}
+
+///
+pub mod write {
+ /// An error to indicate writing to the loose object store failed.
+ pub type Error = gix_odb::store::write::Error;
+}
diff --git a/vendor/gix/src/object/impls.rs b/vendor/gix/src/object/impls.rs
new file mode 100644
index 000000000..3453b1b3c
--- /dev/null
+++ b/vendor/gix/src/object/impls.rs
@@ -0,0 +1,123 @@
+use std::convert::TryFrom;
+
+use crate::{object, Commit, Object, ObjectDetached, Tag, Tree};
+
+impl<'repo> From<Object<'repo>> for ObjectDetached {
+ fn from(mut v: Object<'repo>) -> Self {
+ ObjectDetached {
+ id: v.id,
+ kind: v.kind,
+ data: std::mem::take(&mut v.data),
+ }
+ }
+}
+
+impl<'repo> From<Commit<'repo>> for ObjectDetached {
+ fn from(mut v: Commit<'repo>) -> Self {
+ ObjectDetached {
+ id: v.id,
+ kind: gix_object::Kind::Commit,
+ data: std::mem::take(&mut v.data),
+ }
+ }
+}
+
+impl<'repo> From<Tag<'repo>> for ObjectDetached {
+ fn from(mut v: Tag<'repo>) -> Self {
+ ObjectDetached {
+ id: v.id,
+ kind: gix_object::Kind::Tag,
+ data: std::mem::take(&mut v.data),
+ }
+ }
+}
+
+impl<'repo> From<Commit<'repo>> for Object<'repo> {
+ fn from(mut v: Commit<'repo>) -> Self {
+ Object {
+ id: v.id,
+ kind: gix_object::Kind::Commit,
+ data: steal_from_freelist(&mut v.data),
+ repo: v.repo,
+ }
+ }
+}
+
+impl<'repo> AsRef<[u8]> for Object<'repo> {
+ fn as_ref(&self) -> &[u8] {
+ &self.data
+ }
+}
+
+impl AsRef<[u8]> for ObjectDetached {
+ fn as_ref(&self) -> &[u8] {
+ &self.data
+ }
+}
+
+impl<'repo> TryFrom<Object<'repo>> for Commit<'repo> {
+ type Error = Object<'repo>;
+
+ fn try_from(mut value: Object<'repo>) -> Result<Self, Self::Error> {
+ let handle = value.repo;
+ match value.kind {
+ object::Kind::Commit => Ok(Commit {
+ id: value.id,
+ repo: handle,
+ data: steal_from_freelist(&mut value.data),
+ }),
+ _ => Err(value),
+ }
+ }
+}
+
+impl<'repo> TryFrom<Object<'repo>> for Tag<'repo> {
+ type Error = Object<'repo>;
+
+ fn try_from(mut value: Object<'repo>) -> Result<Self, Self::Error> {
+ let handle = value.repo;
+ match value.kind {
+ object::Kind::Tag => Ok(Tag {
+ id: value.id,
+ repo: handle,
+ data: steal_from_freelist(&mut value.data),
+ }),
+ _ => Err(value),
+ }
+ }
+}
+
+impl<'repo> TryFrom<Object<'repo>> for Tree<'repo> {
+ type Error = Object<'repo>;
+
+ fn try_from(mut value: Object<'repo>) -> Result<Self, Self::Error> {
+ let handle = value.repo;
+ match value.kind {
+ object::Kind::Tree => Ok(Tree {
+ id: value.id,
+ repo: handle,
+ data: steal_from_freelist(&mut value.data),
+ }),
+ _ => Err(value),
+ }
+ }
+}
+
+impl<'r> std::fmt::Debug for Object<'r> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ use gix_object::Kind::*;
+ let type_name = match self.kind {
+ Blob => "Blob",
+ Commit => "Commit",
+ Tree => "Tree",
+ Tag => "Tag",
+ };
+ write!(f, "{}({})", type_name, self.id)
+ }
+}
+
+/// In conjunction with the handles free list, leaving an empty Vec in place of the original causes it to not be
+/// returned to the free list.
+fn steal_from_freelist(data: &mut Vec<u8>) -> Vec<u8> {
+ std::mem::take(data)
+}
diff --git a/vendor/gix/src/object/mod.rs b/vendor/gix/src/object/mod.rs
new file mode 100644
index 000000000..75d77d138
--- /dev/null
+++ b/vendor/gix/src/object/mod.rs
@@ -0,0 +1,221 @@
+//!
+use std::convert::TryInto;
+
+use gix_hash::ObjectId;
+pub use gix_object::Kind;
+
+use crate::{Commit, Id, Object, ObjectDetached, Tag, Tree};
+
+mod errors;
+pub(crate) mod cache {
+ pub use gix_pack::cache::object::MemoryCappedHashmap;
+}
+pub use errors::{conversion, find, write};
+///
+pub mod blob;
+///
+pub mod commit;
+mod impls;
+pub mod peel;
+mod tag;
+///
+pub mod tree;
+
+///
+pub mod try_into {
+ #[derive(thiserror::Error, Debug)]
+ #[allow(missing_docs)]
+ #[error("Object named {id} was supposed to be of kind {expected}, but was kind {actual}.")]
+ pub struct Error {
+ pub actual: gix_object::Kind,
+ pub expected: gix_object::Kind,
+ pub id: gix_hash::ObjectId,
+ }
+}
+
+impl ObjectDetached {
+ /// Infuse this owned object with `repo` access.
+ pub fn attach(self, repo: &crate::Repository) -> Object<'_> {
+ Object {
+ id: self.id,
+ kind: self.kind,
+ data: self.data,
+ repo,
+ }
+ }
+}
+
+impl std::fmt::Debug for ObjectDetached {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ use gix_object::Kind::*;
+ let type_name = match self.kind {
+ Blob => "Blob",
+ Commit => "Commit",
+ Tree => "Tree",
+ Tag => "Tag",
+ };
+ write!(f, "{}({})", type_name, self.id)
+ }
+}
+
+/// Consuming conversions to attached object kinds.
+impl<'repo> Object<'repo> {
+ pub(crate) fn from_data(
+ id: impl Into<ObjectId>,
+ kind: Kind,
+ data: Vec<u8>,
+ repo: &'repo crate::Repository,
+ ) -> Self {
+ Object {
+ id: id.into(),
+ kind,
+ data,
+ repo,
+ }
+ }
+
+ /// Transform this object into a tree, or panic if it is none.
+ pub fn into_tree(self) -> Tree<'repo> {
+ match self.try_into() {
+ Ok(tree) => tree,
+ Err(this) => panic!("Tried to use {} as tree, but was {}", this.id, this.kind),
+ }
+ }
+
+ /// Transform this object into a commit, or panic if it is none.
+ pub fn into_commit(self) -> Commit<'repo> {
+ match self.try_into() {
+ Ok(commit) => commit,
+ Err(this) => panic!("Tried to use {} as commit, but was {}", this.id, this.kind),
+ }
+ }
+
+ /// Transform this object into a commit, or return it as part of the `Err` if it is no commit.
+ pub fn try_into_commit(self) -> Result<Commit<'repo>, try_into::Error> {
+ self.try_into().map_err(|this: Self| try_into::Error {
+ id: this.id,
+ actual: this.kind,
+ expected: gix_object::Kind::Commit,
+ })
+ }
+
+ /// Transform this object into a tag, or return it as part of the `Err` if it is no commit.
+ pub fn try_into_tag(self) -> Result<Tag<'repo>, try_into::Error> {
+ self.try_into().map_err(|this: Self| try_into::Error {
+ id: this.id,
+ actual: this.kind,
+ expected: gix_object::Kind::Commit,
+ })
+ }
+
+ /// Transform this object into a tree, or return it as part of the `Err` if it is no tree.
+ pub fn try_into_tree(self) -> Result<Tree<'repo>, try_into::Error> {
+ self.try_into().map_err(|this: Self| try_into::Error {
+ id: this.id,
+ actual: this.kind,
+ expected: gix_object::Kind::Tree,
+ })
+ }
+}
+
+impl<'repo> Object<'repo> {
+ /// Create an owned instance of this object, copying our data in the process.
+ pub fn detached(&self) -> ObjectDetached {
+ ObjectDetached {
+ id: self.id,
+ kind: self.kind,
+ data: self.data.clone(),
+ }
+ }
+
+ /// Sever the connection to the `Repository` and turn this instance into a standalone object.
+ pub fn detach(self) -> ObjectDetached {
+ self.into()
+ }
+}
+
+/// Conversions to detached, lower-level object types.
+impl<'repo> Object<'repo> {
+ /// Obtain a fully parsed commit whose fields reference our data buffer,
+ ///
+ /// # Panic
+ ///
+ /// - this object is not a commit
+ /// - the commit could not be decoded
+ pub fn to_commit_ref(&self) -> gix_object::CommitRef<'_> {
+ self.try_to_commit_ref().expect("BUG: need a commit")
+ }
+
+ /// Obtain a fully parsed commit whose fields reference our data buffer.
+ pub fn try_to_commit_ref(&self) -> Result<gix_object::CommitRef<'_>, conversion::Error> {
+ gix_object::Data::new(self.kind, &self.data)
+ .decode()?
+ .into_commit()
+ .ok_or(conversion::Error::UnexpectedType {
+ expected: gix_object::Kind::Commit,
+ actual: self.kind,
+ })
+ }
+
+ /// Obtain a an iterator over commit tokens like in [`to_commit_iter()`][Object::try_to_commit_ref_iter()].
+ ///
+ /// # Panic
+ ///
+ /// - this object is not a commit
+ pub fn to_commit_ref_iter(&self) -> gix_object::CommitRefIter<'_> {
+ gix_object::Data::new(self.kind, &self.data)
+ .try_into_commit_iter()
+ .expect("BUG: This object must be a commit")
+ }
+
+ /// Obtain a commit token iterator from the data in this instance, if it is a commit.
+ pub fn try_to_commit_ref_iter(&self) -> Option<gix_object::CommitRefIter<'_>> {
+ gix_object::Data::new(self.kind, &self.data).try_into_commit_iter()
+ }
+
+ /// Obtain a tag token iterator from the data in this instance.
+ ///
+ /// # Panic
+ ///
+ /// - this object is not a tag
+ pub fn to_tag_ref_iter(&self) -> gix_object::TagRefIter<'_> {
+ gix_object::Data::new(self.kind, &self.data)
+ .try_into_tag_iter()
+ .expect("BUG: this object must be a tag")
+ }
+
+ /// Obtain a tag token iterator from the data in this instance.
+ ///
+ /// # Panic
+ ///
+ /// - this object is not a tag
+ pub fn try_to_tag_ref_iter(&self) -> Option<gix_object::TagRefIter<'_>> {
+ gix_object::Data::new(self.kind, &self.data).try_into_tag_iter()
+ }
+
+ /// Obtain a tag object from the data in this instance.
+ ///
+ /// # Panic
+ ///
+ /// - this object is not a tag
+ /// - the tag could not be decoded
+ pub fn to_tag_ref(&self) -> gix_object::TagRef<'_> {
+ self.try_to_tag_ref().expect("BUG: need tag")
+ }
+
+ /// Obtain a fully parsed tag object whose fields reference our data buffer.
+ pub fn try_to_tag_ref(&self) -> Result<gix_object::TagRef<'_>, conversion::Error> {
+ gix_object::Data::new(self.kind, &self.data)
+ .decode()?
+ .into_tag()
+ .ok_or(conversion::Error::UnexpectedType {
+ expected: gix_object::Kind::Tag,
+ actual: self.kind,
+ })
+ }
+
+ /// Return the attached id of this object.
+ pub fn id(&self) -> Id<'repo> {
+ Id::from_id(self.id, self.repo)
+ }
+}
diff --git a/vendor/gix/src/object/peel.rs b/vendor/gix/src/object/peel.rs
new file mode 100644
index 000000000..c906c0c75
--- /dev/null
+++ b/vendor/gix/src/object/peel.rs
@@ -0,0 +1,93 @@
+//!
+use crate::{
+ object,
+ object::{peel, Kind},
+ Object, Tree,
+};
+
+///
+pub mod to_kind {
+ mod error {
+
+ use crate::object;
+
+ /// The error returned by [`Object::peel_to_kind()`][crate::Object::peel_to_kind()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ FindExistingObject(#[from] object::find::existing::Error),
+ #[error("Last encountered object {oid} was {actual} while trying to peel to {expected}")]
+ NotFound {
+ oid: gix_hash::Prefix,
+ actual: object::Kind,
+ expected: object::Kind,
+ },
+ }
+ }
+ pub use error::Error;
+}
+
+impl<'repo> Object<'repo> {
+ // TODO: tests
+ /// Follow tags to their target and commits to trees until the given `kind` of object is encountered.
+ ///
+ /// Note that this object doesn't necessarily have to be the end of the chain.
+ /// Typical values are [`Kind::Commit`] or [`Kind::Tree`].
+ pub fn peel_to_kind(mut self, kind: Kind) -> Result<Self, peel::to_kind::Error> {
+ loop {
+ match self.kind {
+ our_kind if kind == our_kind => {
+ return Ok(self);
+ }
+ Kind::Commit => {
+ let tree_id = self
+ .try_to_commit_ref_iter()
+ .expect("commit")
+ .tree_id()
+ .expect("valid commit");
+ let repo = self.repo;
+ drop(self);
+ self = repo.find_object(tree_id)?;
+ }
+ Kind::Tag => {
+ let target_id = self.to_tag_ref_iter().target_id().expect("valid tag");
+ let repo = self.repo;
+ drop(self);
+ self = repo.find_object(target_id)?;
+ }
+ Kind::Tree | Kind::Blob => {
+ return Err(peel::to_kind::Error::NotFound {
+ oid: self.id().shorten().unwrap_or_else(|_| self.id.into()),
+ actual: self.kind,
+ expected: kind,
+ })
+ }
+ }
+ }
+ }
+
+ /// Peel this object into a tree and return it, if this is possible.
+ pub fn peel_to_tree(self) -> Result<Tree<'repo>, peel::to_kind::Error> {
+ Ok(self.peel_to_kind(gix_object::Kind::Tree)?.into_tree())
+ }
+
+ // TODO: tests
+ /// Follow all tag object targets until a commit, tree or blob is reached.
+ ///
+ /// Note that this method is different from [`peel_to_kind(…)`][Object::peel_to_kind()] as it won't
+ /// peel commits to their tree, but handles tags only.
+ pub fn peel_tags_to_end(mut self) -> Result<Self, object::find::existing::Error> {
+ loop {
+ match self.kind {
+ Kind::Commit | Kind::Tree | Kind::Blob => break Ok(self),
+ Kind::Tag => {
+ let target_id = self.to_tag_ref_iter().target_id().expect("valid tag");
+ let repo = self.repo;
+ drop(self);
+ self = repo.find_object(target_id)?;
+ }
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/object/tag.rs b/vendor/gix/src/object/tag.rs
new file mode 100644
index 000000000..ce9d7360a
--- /dev/null
+++ b/vendor/gix/src/object/tag.rs
@@ -0,0 +1,15 @@
+use crate::{ext::ObjectIdExt, Tag};
+
+impl<'repo> Tag<'repo> {
+ /// Decode this tag partially and return the id of its target.
+ pub fn target_id(&self) -> Result<crate::Id<'repo>, gix_object::decode::Error> {
+ gix_object::TagRefIter::from_bytes(&self.data)
+ .target_id()
+ .map(|id| id.attach(self.repo))
+ }
+
+ /// Decode this tag partially and return the tagger, if the field exists.
+ pub fn tagger(&self) -> Result<Option<gix_actor::SignatureRef<'_>>, gix_object::decode::Error> {
+ gix_object::TagRefIter::from_bytes(&self.data).tagger()
+ }
+}
diff --git a/vendor/gix/src/object/tree/diff/change.rs b/vendor/gix/src/object/tree/diff/change.rs
new file mode 100644
index 000000000..e6826d6ed
--- /dev/null
+++ b/vendor/gix/src/object/tree/diff/change.rs
@@ -0,0 +1,111 @@
+use crate::{bstr::BStr, Id};
+
+/// Information about the diff performed to detect similarity of a [Rewrite][Event::Rewrite].
+#[derive(Debug, Default, Clone, Copy, Eq, PartialEq)]
+pub struct DiffLineStats {
+ /// The amount of lines to remove from the source to get to the destination.
+ pub removals: u32,
+ /// The amount of lines to add to the source to get to the destination.
+ pub insertions: u32,
+ /// The amount of lines of the previous state, in the source.
+ pub before: u32,
+ /// The amount of lines of the new state, in the destination.
+ pub after: u32,
+}
+
+/// An event emitted when finding differences between two trees.
+#[derive(Debug, Clone, Copy)]
+pub enum Event<'a, 'old, 'new> {
+ /// An entry was added, like the addition of a file or directory.
+ Addition {
+ /// The mode of the added entry.
+ entry_mode: gix_object::tree::EntryMode,
+ /// The object id of the added entry.
+ id: Id<'new>,
+ },
+ /// An entry was deleted, like the deletion of a file or directory.
+ Deletion {
+ /// The mode of the deleted entry.
+ entry_mode: gix_object::tree::EntryMode,
+ /// The object id of the deleted entry.
+ id: Id<'old>,
+ },
+ /// An entry was modified, e.g. changing the contents of a file adjusts its object id and turning
+ /// a file into a symbolic link adjusts its mode.
+ Modification {
+ /// The mode of the entry before the modification.
+ previous_entry_mode: gix_object::tree::EntryMode,
+ /// The object id of the entry before the modification.
+ previous_id: Id<'old>,
+
+ /// The mode of the entry after the modification.
+ entry_mode: gix_object::tree::EntryMode,
+ /// The object id after the modification.
+ id: Id<'new>,
+ },
+ /// Entries are considered rewritten if they are not trees and they, according to some understanding of identity, were renamed
+ /// or copied.
+ /// In case of renames, this means they originally appeared as [`Deletion`][Event::Deletion] signalling their source as well as an
+ /// [`Addition`][Event::Addition] acting as destination.
+ ///
+ /// In case of copies, the `copy` flag is true and typically represents a perfect copy of a source was made.
+ ///
+ /// This variant can only be encountered if [rewrite tracking][super::Platform::track_rewrites()] is enabled.
+ ///
+ /// Note that mode changes may have occurred as well, i.e. changes from executable to non-executable or vice-versa.
+ Rewrite {
+ /// The location of the source of the rename operation.
+ ///
+ /// It may be empty if neither [file names][super::Platform::track_filename()] nor [file paths][super::Platform::track_path()]
+ /// are tracked.
+ source_location: &'a BStr,
+ /// The mode of the entry before the rename.
+ source_entry_mode: gix_object::tree::EntryMode,
+ /// The object id of the entry before the rename.
+ ///
+ /// Note that this is the same as `id` if we require the [similarity to be 100%][super::Rewrites::percentage], but may
+ /// be different otherwise.
+ source_id: Id<'old>,
+ /// Information about the diff we performed to detect similarity and match the `source_id` with the current state at `id`.
+ /// It's `None` if `source_id` is equal to `id`, as identity made an actual diff computation unnecessary.
+ diff: Option<DiffLineStats>,
+ /// The mode of the entry after the rename.
+ /// It could differ but still be considered a rename as we are concerned only about content.
+ entry_mode: gix_object::tree::EntryMode,
+ /// The object id after the rename.
+ id: Id<'new>,
+ /// If true, this rewrite is created by copy, and `source_id` is pointing to its source. Otherwise it's a rename, and `source_id`
+ /// points to a deleted object, as renames are tracked as deletions and additions of the same or similar content.
+ copy: bool,
+ },
+}
+
+impl<'a, 'old, 'new> Event<'a, 'old, 'new> {
+ /// Produce a platform for performing a line-diff, or `None` if this is not a [`Modification`][Event::Modification]
+ /// or one of the entries to compare is not a blob.
+ pub fn diff(
+ &self,
+ ) -> Option<Result<crate::object::blob::diff::Platform<'old, 'new>, crate::object::blob::diff::init::Error>> {
+ match self {
+ Event::Modification {
+ previous_entry_mode,
+ previous_id,
+ entry_mode,
+ id,
+ } if entry_mode.is_blob() && previous_entry_mode.is_blob() => {
+ Some(crate::object::blob::diff::Platform::from_ids(previous_id, id))
+ }
+ _ => None,
+ }
+ }
+
+ /// Return the current mode of this instance.
+ pub fn entry_mode(&self) -> gix_object::tree::EntryMode {
+ match self {
+ Event::Addition { entry_mode, .. }
+ | Event::Deletion { entry_mode, .. }
+ | Event::Modification { entry_mode, .. }
+ | Event::Rewrite { entry_mode, .. } => *entry_mode,
+ }
+ }
+}
diff --git a/vendor/gix/src/object/tree/diff/for_each.rs b/vendor/gix/src/object/tree/diff/for_each.rs
new file mode 100644
index 000000000..5cae4cf2f
--- /dev/null
+++ b/vendor/gix/src/object/tree/diff/for_each.rs
@@ -0,0 +1,235 @@
+use gix_object::TreeRefIter;
+use gix_odb::FindExt;
+
+use super::{change, Action, Change, Platform};
+use crate::{
+ bstr::BStr,
+ ext::ObjectIdExt,
+ object::tree::{
+ diff,
+ diff::{rewrites, tracked},
+ },
+ Repository, Tree,
+};
+
+/// The error return by methods on the [diff platform][Platform].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error(transparent)]
+ Diff(#[from] gix_diff::tree::changes::Error),
+ #[error("The user-provided callback failed")]
+ ForEach(#[source] Box<dyn std::error::Error + Send + Sync + 'static>),
+ #[error("Could not find blob for similarity checking")]
+ FindExistingBlob(#[from] crate::object::find::existing::Error),
+ #[error("Could not configure diff algorithm prior to checking similarity")]
+ ConfigureDiffAlgorithm(#[from] crate::config::diff::algorithm::Error),
+ #[error("Could not traverse tree to obtain possible sources for copies")]
+ TraverseTreeForExhaustiveCopyDetection(#[from] gix_traverse::tree::breadthfirst::Error),
+}
+
+///
+#[derive(Clone, Debug, Copy, PartialEq)]
+pub struct Outcome {
+ /// Available only if [rewrite-tracking was enabled][Platform::track_rewrites()].
+ pub rewrites: Option<rewrites::Outcome>,
+}
+
+/// Add the item to compare to.
+impl<'a, 'old> Platform<'a, 'old> {
+ /// Call `for_each` repeatedly with all changes that are needed to convert the source of the diff to the tree to `other`.
+ ///
+ /// `other` could also be created with the [`empty_tree()`][crate::Repository::empty_tree()] method to handle the first commit
+ /// in a repository - it doesn't have a parent, equivalent to compare 'nothing' to something.
+ pub fn for_each_to_obtain_tree<'new, E>(
+ &mut self,
+ other: &Tree<'new>,
+ for_each: impl FnMut(Change<'_, 'old, 'new>) -> Result<Action, E>,
+ ) -> Result<Outcome, Error>
+ where
+ E: std::error::Error + Sync + Send + 'static,
+ {
+ let repo = self.lhs.repo;
+ let mut delegate = Delegate {
+ src_tree: self.lhs,
+ other_repo: other.repo,
+ recorder: gix_diff::tree::Recorder::default().track_location(self.tracking),
+ visit: for_each,
+ tracked: self.rewrites.map(|r| tracked::State::new(r, self.tracking)),
+ err: None,
+ };
+ match gix_diff::tree::Changes::from(TreeRefIter::from_bytes(&self.lhs.data)).needed_to_obtain(
+ TreeRefIter::from_bytes(&other.data),
+ &mut self.state,
+ |oid, buf| repo.objects.find_tree_iter(oid, buf),
+ &mut delegate,
+ ) {
+ Ok(()) => {
+ let outcome = Outcome {
+ rewrites: delegate.process_tracked_changes()?,
+ };
+ match delegate.err {
+ Some(err) => Err(Error::ForEach(Box::new(err))),
+ None => Ok(outcome),
+ }
+ }
+ Err(gix_diff::tree::changes::Error::Cancelled) => delegate
+ .err
+ .map(|err| Err(Error::ForEach(Box::new(err))))
+ .unwrap_or(Err(Error::Diff(gix_diff::tree::changes::Error::Cancelled))),
+ Err(err) => Err(err.into()),
+ }
+ }
+}
+
+struct Delegate<'a, 'old, 'new, VisitFn, E> {
+ src_tree: &'a Tree<'old>,
+ other_repo: &'new Repository,
+ recorder: gix_diff::tree::Recorder,
+ visit: VisitFn,
+ tracked: Option<tracked::State>,
+ err: Option<E>,
+}
+
+impl<'a, 'old, 'new, VisitFn, E> Delegate<'a, 'old, 'new, VisitFn, E>
+where
+ VisitFn: for<'delegate> FnMut(Change<'delegate, 'old, 'new>) -> Result<Action, E>,
+ E: std::error::Error + Sync + Send + 'static,
+{
+ /// Call `visit` on an attached version of `change`.
+ fn emit_change(
+ change: gix_diff::tree::visit::Change,
+ location: &BStr,
+ visit: &mut VisitFn,
+ repo: &'old Repository,
+ other_repo: &'new Repository,
+ stored_err: &mut Option<E>,
+ ) -> gix_diff::tree::visit::Action {
+ use gix_diff::tree::visit::Change::*;
+ let event = match change {
+ Addition { entry_mode, oid } => change::Event::Addition {
+ entry_mode,
+ id: oid.attach(other_repo),
+ },
+ Deletion { entry_mode, oid } => change::Event::Deletion {
+ entry_mode,
+ id: oid.attach(repo),
+ },
+ Modification {
+ previous_entry_mode,
+ previous_oid,
+ entry_mode,
+ oid,
+ } => change::Event::Modification {
+ previous_entry_mode,
+ entry_mode,
+ previous_id: previous_oid.attach(repo),
+ id: oid.attach(other_repo),
+ },
+ };
+ match visit(Change { event, location }) {
+ Ok(Action::Cancel) => gix_diff::tree::visit::Action::Cancel,
+ Ok(Action::Continue) => gix_diff::tree::visit::Action::Continue,
+ Err(err) => {
+ *stored_err = Some(err);
+ gix_diff::tree::visit::Action::Cancel
+ }
+ }
+ }
+
+ fn process_tracked_changes(&mut self) -> Result<Option<rewrites::Outcome>, Error> {
+ let tracked = match self.tracked.as_mut() {
+ Some(t) => t,
+ None => return Ok(None),
+ };
+
+ let outcome = tracked.emit(
+ |dest, source| match source {
+ Some(source) => {
+ let (oid, mode) = dest.change.oid_and_entry_mode();
+ let change = diff::Change {
+ location: dest.location,
+ event: diff::change::Event::Rewrite {
+ source_location: source.location,
+ source_entry_mode: source.mode,
+ source_id: source.id.attach(self.src_tree.repo),
+ entry_mode: mode,
+ id: oid.to_owned().attach(self.other_repo),
+ diff: source.diff,
+ copy: match source.kind {
+ tracked::visit::Kind::RenameTarget => false,
+ tracked::visit::Kind::CopyDestination => true,
+ },
+ },
+ };
+ match (self.visit)(change) {
+ Ok(Action::Cancel) => gix_diff::tree::visit::Action::Cancel,
+ Ok(Action::Continue) => gix_diff::tree::visit::Action::Continue,
+ Err(err) => {
+ self.err = Some(err);
+ gix_diff::tree::visit::Action::Cancel
+ }
+ }
+ }
+ None => Self::emit_change(
+ dest.change,
+ dest.location,
+ &mut self.visit,
+ self.src_tree.repo,
+ self.other_repo,
+ &mut self.err,
+ ),
+ },
+ self.src_tree,
+ )?;
+ Ok(Some(outcome))
+ }
+}
+
+impl<'a, 'old, 'new, VisitFn, E> gix_diff::tree::Visit for Delegate<'a, 'old, 'new, VisitFn, E>
+where
+ VisitFn: for<'delegate> FnMut(Change<'delegate, 'old, 'new>) -> Result<Action, E>,
+ E: std::error::Error + Sync + Send + 'static,
+{
+ fn pop_front_tracked_path_and_set_current(&mut self) {
+ self.recorder.pop_front_tracked_path_and_set_current()
+ }
+
+ fn push_back_tracked_path_component(&mut self, component: &BStr) {
+ self.recorder.push_back_tracked_path_component(component)
+ }
+
+ fn push_path_component(&mut self, component: &BStr) {
+ self.recorder.push_path_component(component)
+ }
+
+ fn pop_path_component(&mut self) {
+ self.recorder.pop_path_component()
+ }
+
+ fn visit(&mut self, change: gix_diff::tree::visit::Change) -> gix_diff::tree::visit::Action {
+ match self.tracked.as_mut() {
+ Some(tracked) => tracked
+ .try_push_change(change, self.recorder.path())
+ .map(|change| {
+ Self::emit_change(
+ change,
+ self.recorder.path(),
+ &mut self.visit,
+ self.src_tree.repo,
+ self.other_repo,
+ &mut self.err,
+ )
+ })
+ .unwrap_or(gix_diff::tree::visit::Action::Continue),
+ None => Self::emit_change(
+ change,
+ self.recorder.path(),
+ &mut self.visit,
+ self.src_tree.repo,
+ self.other_repo,
+ &mut self.err,
+ ),
+ }
+ }
+}
diff --git a/vendor/gix/src/object/tree/diff/mod.rs b/vendor/gix/src/object/tree/diff/mod.rs
new file mode 100644
index 000000000..5a3bf6ddf
--- /dev/null
+++ b/vendor/gix/src/object/tree/diff/mod.rs
@@ -0,0 +1,118 @@
+use gix_diff::tree::recorder::Location;
+
+use crate::{bstr::BStr, Tree};
+
+/// Returned by the `for_each` function to control flow.
+#[derive(Clone, Copy, PartialOrd, PartialEq, Ord, Eq, Hash)]
+pub enum Action {
+ /// Continue the traversal of changes.
+ Continue,
+ /// Stop the traversal of changes and stop calling this function.
+ Cancel,
+}
+
+impl Default for Action {
+ fn default() -> Self {
+ Action::Continue
+ }
+}
+
+/// Represents any possible change in order to turn one tree into another.
+#[derive(Debug, Clone, Copy)]
+pub struct Change<'a, 'old, 'new> {
+ /// The location of the file or directory described by `event`, if tracking was enabled.
+ ///
+ /// Otherwise this value is always an empty path.
+ pub location: &'a BStr,
+ /// The diff event itself to provide information about what would need to change.
+ pub event: change::Event<'a, 'old, 'new>,
+}
+
+///
+pub mod change;
+
+/// Diffing
+impl<'repo> Tree<'repo> {
+ /// Return a platform to see the changes needed to create other trees, for instance.
+ ///
+ /// # Performance
+ ///
+ /// It's highly recommended to set an object cache to avoid extracting the same object multiple times.
+ /// By default, similar to `git diff`, rename tracking will be enabled if it is not configured.
+ #[allow(clippy::result_large_err)]
+ pub fn changes<'a>(&'a self) -> Result<Platform<'a, 'repo>, rewrites::Error> {
+ Ok(Platform {
+ state: Default::default(),
+ lhs: self,
+ tracking: None,
+ rewrites: self.repo.config.diff_renames()?.unwrap_or_default().into(),
+ })
+ }
+}
+
+/// The diffing platform returned by [`Tree::changes()`].
+#[derive(Clone)]
+pub struct Platform<'a, 'repo> {
+ state: gix_diff::tree::State,
+ lhs: &'a Tree<'repo>,
+ tracking: Option<Location>,
+ rewrites: Option<Rewrites>,
+}
+
+/// A structure to capture how to perform rename and copy tracking
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct Rewrites {
+ /// If `Some(…)`, do also find copies. `None` is the default which does not try to detect copies at all.
+ ///
+ /// Note that this is an even more expensive operation than detecting renames as files.
+ pub copies: Option<rewrites::Copies>,
+ /// The percentage of similarity needed for files to be considered renamed, defaulting to `Some(0.5)`.
+ /// This field is similar to `git diff -M50%`.
+ ///
+ /// If `None`, files are only considered equal if their content matches 100%.
+ /// Note that values greater than 1.0 have no different effect than 1.0.
+ pub percentage: Option<f32>,
+ /// The amount of files to consider for fuzzy rename or copy tracking. Defaults to 1000, meaning that only 1000*1000
+ /// combinations can be tested for fuzzy matches, i.e. the ones that try to find matches by comparing similarity.
+ /// If 0, there is no limit.
+ ///
+ /// If the limit would not be enough to test the entire set of combinations, the algorithm will trade in precision and not
+ /// run the fuzzy version of identity tests at all. That way results are never partial.
+ pub limit: usize,
+}
+
+///
+pub mod rewrites;
+
+/// types to actually perform rename tracking.
+pub(crate) mod tracked;
+
+/// Configuration
+impl<'a, 'repo> Platform<'a, 'repo> {
+ /// Keep track of file-names, which makes the [`location`][Change::location] field usable with the filename of the changed item.
+ pub fn track_filename(&mut self) -> &mut Self {
+ self.tracking = Some(Location::FileName);
+ self
+ }
+
+ /// Keep track of the entire path of a change, relative to the repository.
+ ///
+ /// This makes the [`location`][Change::location] field usable.
+ pub fn track_path(&mut self) -> &mut Self {
+ self.tracking = Some(Location::Path);
+ self
+ }
+
+ /// Provide `None` to disable rewrite tracking entirely, or pass `Some(<configuration>)` to control to
+ /// what extend rename and copy tracking is performed.
+ ///
+ /// Note that by default, the git configuration determines rewrite tracking and git defaults are used
+ /// if nothing is configured, which turns rename tracking with 50% similarity on, while not tracking copies at all.
+ pub fn track_rewrites(&mut self, renames: Option<Rewrites>) -> &mut Self {
+ self.rewrites = renames;
+ self
+ }
+}
+
+///
+pub mod for_each;
diff --git a/vendor/gix/src/object/tree/diff/rewrites.rs b/vendor/gix/src/object/tree/diff/rewrites.rs
new file mode 100644
index 000000000..304894d15
--- /dev/null
+++ b/vendor/gix/src/object/tree/diff/rewrites.rs
@@ -0,0 +1,108 @@
+use crate::{
+ config::{cache::util::ApplyLeniency, tree::Diff},
+ diff::rename::Tracking,
+ object::tree::diff::Rewrites,
+};
+
+/// From where to source copies
+#[derive(Debug, Copy, Clone, Eq, PartialEq)]
+pub enum CopySource {
+ /// Find copies from the set of modified files only.
+ FromSetOfModifiedFiles,
+ /// Find copies from the set of changed files, as well as all files known to the source (i.e. previous state) of the tree.
+ ///
+ /// This can be an expensive operation as it scales exponentially with the total amount of files in the tree.
+ FromSetOfModifiedFilesAndSourceTree,
+}
+
+/// How to determine copied files.
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct Copies {
+ /// The set of files to search when finding the source of copies.
+ pub source: CopySource,
+ /// Equivalent to [`Rewrites::percentage`], but used for copy tracking.
+ ///
+ /// Useful to have similarity-based rename tracking and cheaper copy tracking, which also is the default
+ /// as only identity plays a role.
+ pub percentage: Option<f32>,
+}
+
+impl Default for Copies {
+ fn default() -> Self {
+ Copies {
+ source: CopySource::FromSetOfModifiedFiles,
+ percentage: Some(0.5),
+ }
+ }
+}
+
+/// Information collected while handling rewrites of files which may be tracked.
+#[derive(Default, Clone, Copy, Debug, PartialEq)]
+pub struct Outcome {
+ /// The options used to guide the rewrite tracking. Either fully provided by the caller or retrieved from git configuration.
+ pub options: Rewrites,
+ /// The amount of similarity checks that have been conducted to find renamed files and potentially copies.
+ pub num_similarity_checks: usize,
+ /// Set to the amount of worst-case rename permutations we didn't search as our limit didn't allow it.
+ pub num_similarity_checks_skipped_for_rename_tracking_due_to_limit: usize,
+ /// Set to the amount of worst-case copy permutations we didn't search as our limit didn't allow it.
+ pub num_similarity_checks_skipped_for_copy_tracking_due_to_limit: usize,
+}
+
+/// The error returned by [`Rewrites::try_from_config()].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error(transparent)]
+ ConfigDiffRenames(#[from] crate::config::key::GenericError),
+ #[error(transparent)]
+ ConfigDiffRenameLimit(#[from] crate::config::unsigned_integer::Error),
+}
+
+/// The default settings for rewrites according to the git configuration defaults.
+impl Default for Rewrites {
+ fn default() -> Self {
+ Rewrites {
+ copies: None,
+ percentage: Some(0.5),
+ limit: 1000,
+ }
+ }
+}
+
+impl Rewrites {
+ /// Create an instance by reading all relevant information from the `config`uration, while being `lenient` or not.
+ /// Returns `Ok(None)` if nothing is configured.
+ ///
+ /// Note that missing values will be defaulted similar to what git does.
+ #[allow(clippy::result_large_err)]
+ pub fn try_from_config(config: &gix_config::File<'static>, lenient: bool) -> Result<Option<Self>, Error> {
+ let key = "diff.renames";
+ let copies = match config
+ .boolean_by_key(key)
+ .map(|value| Diff::RENAMES.try_into_renames(value, || config.string_by_key(key)))
+ .transpose()
+ .with_leniency(lenient)?
+ {
+ Some(renames) => match renames {
+ Tracking::Disabled => return Ok(None),
+ Tracking::Renames => None,
+ Tracking::RenamesAndCopies => Some(Copies::default()),
+ },
+ None => return Ok(None),
+ };
+
+ let default = Self::default();
+ Ok(Rewrites {
+ copies,
+ limit: config
+ .integer_by_key("diff.renameLimit")
+ .map(|value| Diff::RENAME_LIMIT.try_into_usize(value))
+ .transpose()
+ .with_leniency(lenient)?
+ .unwrap_or(default.limit),
+ ..default
+ }
+ .into())
+ }
+}
diff --git a/vendor/gix/src/object/tree/diff/tracked.rs b/vendor/gix/src/object/tree/diff/tracked.rs
new file mode 100644
index 000000000..3bbe01624
--- /dev/null
+++ b/vendor/gix/src/object/tree/diff/tracked.rs
@@ -0,0 +1,491 @@
+use std::ops::Range;
+
+use gix_diff::tree::visit::Change;
+use gix_object::tree::EntryMode;
+
+use crate::{
+ bstr::BStr,
+ ext::ObjectIdExt,
+ object::tree::diff::{
+ change::DiffLineStats,
+ rewrites::{CopySource, Outcome},
+ Rewrites,
+ },
+ Repository, Tree,
+};
+
+/// A set of tracked items allows to figure out their relations by figuring out their similarity.
+pub struct Item {
+ /// The underlying raw change
+ change: Change,
+ /// That slice into the backing for paths.
+ location: Range<usize>,
+ /// If true, this item was already emitted, i.e. seen by the caller.
+ emitted: bool,
+}
+
+impl Item {
+ fn location<'a>(&self, backing: &'a [u8]) -> &'a BStr {
+ backing[self.location.clone()].as_ref()
+ }
+ fn entry_mode_compatible(&self, mode: EntryMode) -> bool {
+ use EntryMode::*;
+ matches!(
+ (mode, self.change.entry_mode()),
+ (Blob | BlobExecutable, Blob | BlobExecutable) | (Link, Link)
+ )
+ }
+
+ fn is_source_for_destination_of(&self, kind: visit::Kind, dest_item_mode: EntryMode) -> bool {
+ self.entry_mode_compatible(dest_item_mode)
+ && match kind {
+ visit::Kind::RenameTarget => !self.emitted && matches!(self.change, Change::Deletion { .. }),
+ visit::Kind::CopyDestination => {
+ matches!(self.change, Change::Modification { .. })
+ }
+ }
+ }
+}
+
+pub struct State {
+ items: Vec<Item>,
+ path_backing: Vec<u8>,
+ rewrites: Rewrites,
+ tracking: Option<gix_diff::tree::recorder::Location>,
+}
+
+pub mod visit {
+ use crate::{bstr::BStr, object::tree::diff::change::DiffLineStats};
+
+ pub struct Source<'a> {
+ pub mode: gix_object::tree::EntryMode,
+ pub id: gix_hash::ObjectId,
+ pub kind: Kind,
+ pub location: &'a BStr,
+ pub diff: Option<DiffLineStats>,
+ }
+
+ #[derive(Debug, Copy, Clone, Eq, PartialEq)]
+ pub enum Kind {
+ RenameTarget,
+ CopyDestination,
+ }
+
+ pub struct Destination<'a> {
+ pub change: gix_diff::tree::visit::Change,
+ pub location: &'a BStr,
+ }
+}
+
+impl State {
+ pub(crate) fn new(renames: Rewrites, tracking: Option<gix_diff::tree::recorder::Location>) -> Self {
+ State {
+ items: vec![],
+ path_backing: vec![],
+ rewrites: renames,
+ tracking,
+ }
+ }
+}
+
+/// build state and find matches.
+impl State {
+ /// We may refuse the push if that information isn't needed for what we have to track.
+ pub fn try_push_change(&mut self, change: Change, location: &BStr) -> Option<Change> {
+ if !change.entry_mode().is_blob_or_symlink() {
+ return Some(change);
+ }
+ let keep = match (self.rewrites.copies, &change) {
+ (Some(_find_copies), _) => true,
+ (None, Change::Modification { .. }) => false,
+ (None, _) => true,
+ };
+
+ if !keep {
+ return Some(change);
+ }
+
+ let start = self.path_backing.len();
+ self.path_backing.extend_from_slice(location);
+ self.items.push(Item {
+ location: start..self.path_backing.len(),
+ change,
+ emitted: false,
+ });
+ None
+ }
+
+ /// Can only be called once effectively as it alters its own state.
+ ///
+ /// `cb(destination, source)` is called for each item, either with `Some(source)` if it's
+ /// the destination of a copy or rename, or with `None` for source if no relation to other
+ /// items in the tracked set exist.
+ pub fn emit(
+ &mut self,
+ mut cb: impl FnMut(visit::Destination<'_>, Option<visit::Source<'_>>) -> gix_diff::tree::visit::Action,
+ src_tree: &Tree<'_>,
+ ) -> Result<Outcome, crate::object::tree::diff::for_each::Error> {
+ fn by_id_and_location(a: &Item, b: &Item) -> std::cmp::Ordering {
+ a.change.oid().cmp(b.change.oid()).then_with(|| {
+ a.location
+ .start
+ .cmp(&b.location.start)
+ .then(a.location.end.cmp(&b.location.end))
+ })
+ }
+ self.items.sort_by(by_id_and_location);
+
+ let mut out = Outcome {
+ options: self.rewrites,
+ ..Default::default()
+ };
+ out = self.match_pairs_of_kind(
+ visit::Kind::RenameTarget,
+ &mut cb,
+ self.rewrites.percentage,
+ out,
+ src_tree.repo,
+ )?;
+
+ if let Some(copies) = self.rewrites.copies {
+ out = self.match_pairs_of_kind(
+ visit::Kind::CopyDestination,
+ &mut cb,
+ copies.percentage,
+ out,
+ src_tree.repo,
+ )?;
+
+ match copies.source {
+ CopySource::FromSetOfModifiedFiles => {}
+ CopySource::FromSetOfModifiedFilesAndSourceTree => {
+ src_tree
+ .traverse()
+ .breadthfirst(&mut tree_to_events::Delegate::new(self))?;
+ self.items.sort_by(by_id_and_location);
+
+ out = self.match_pairs_of_kind(
+ visit::Kind::CopyDestination,
+ &mut cb,
+ copies.percentage,
+ out,
+ src_tree.repo,
+ )?;
+ }
+ }
+ }
+
+ self.items
+ .sort_by(|a, b| a.location(&self.path_backing).cmp(b.location(&self.path_backing)));
+ for item in self.items.drain(..).filter(|item| !item.emitted) {
+ if cb(
+ visit::Destination {
+ location: item.location(&self.path_backing),
+ change: item.change,
+ },
+ None,
+ ) == gix_diff::tree::visit::Action::Cancel
+ {
+ break;
+ }
+ }
+ Ok(out)
+ }
+
+ fn match_pairs_of_kind(
+ &mut self,
+ kind: visit::Kind,
+ cb: &mut impl FnMut(visit::Destination<'_>, Option<visit::Source<'_>>) -> gix_diff::tree::visit::Action,
+ percentage: Option<f32>,
+ mut out: Outcome,
+ repo: &Repository,
+ ) -> Result<Outcome, crate::object::tree::diff::for_each::Error> {
+ // we try to cheaply reduce the set of possibilities first, before possibly looking more exhaustively.
+ let needs_second_pass = !needs_exact_match(percentage);
+ if self.match_pairs(cb, None /* by identity */, kind, repo, &mut out)? == gix_diff::tree::visit::Action::Cancel
+ {
+ return Ok(out);
+ }
+ if needs_second_pass {
+ let is_limited = if self.rewrites.limit == 0 {
+ false
+ } else if let Some(permutations) = permutations_over_limit(&self.items, self.rewrites.limit, kind) {
+ match kind {
+ visit::Kind::RenameTarget => {
+ out.num_similarity_checks_skipped_for_rename_tracking_due_to_limit = permutations;
+ }
+ visit::Kind::CopyDestination => {
+ out.num_similarity_checks_skipped_for_copy_tracking_due_to_limit = permutations;
+ }
+ }
+ true
+ } else {
+ false
+ };
+ if !is_limited {
+ self.match_pairs(cb, self.rewrites.percentage, kind, repo, &mut out)?;
+ }
+ }
+ Ok(out)
+ }
+
+ fn match_pairs(
+ &mut self,
+ cb: &mut impl FnMut(visit::Destination<'_>, Option<visit::Source<'_>>) -> gix_diff::tree::visit::Action,
+ percentage: Option<f32>,
+ kind: visit::Kind,
+ repo: &Repository,
+ stats: &mut Outcome,
+ ) -> Result<gix_diff::tree::visit::Action, crate::object::tree::diff::for_each::Error> {
+ // TODO(perf): reuse object data and interner state and interned tokens, make these available to `find_match()`
+ let mut dest_ofs = 0;
+ while let Some((mut dest_idx, dest)) = self.items[dest_ofs..].iter().enumerate().find_map(|(idx, item)| {
+ (!item.emitted && matches!(item.change, Change::Addition { .. })).then_some((idx, item))
+ }) {
+ dest_idx += dest_ofs;
+ dest_ofs = dest_idx + 1;
+ let src =
+ find_match(&self.items, dest, dest_idx, percentage, kind, repo, stats)?.map(|(src_idx, src, diff)| {
+ let (id, mode) = src.change.oid_and_entry_mode();
+ let id = id.to_owned();
+ let location = src.location(&self.path_backing);
+ (
+ visit::Source {
+ mode,
+ id,
+ kind,
+ location,
+ diff,
+ },
+ src_idx,
+ )
+ });
+ if src.is_none() {
+ continue;
+ }
+ let location = dest.location(&self.path_backing);
+ let change = dest.change.clone();
+ let dest = visit::Destination { change, location };
+ self.items[dest_idx].emitted = true;
+ if let Some(src_idx) = src.as_ref().map(|t| t.1) {
+ self.items[src_idx].emitted = true;
+ }
+ if cb(dest, src.map(|t| t.0)) == gix_diff::tree::visit::Action::Cancel {
+ return Ok(gix_diff::tree::visit::Action::Cancel);
+ }
+ }
+ Ok(gix_diff::tree::visit::Action::Continue)
+ }
+}
+
+fn permutations_over_limit(items: &[Item], limit: usize, kind: visit::Kind) -> Option<usize> {
+ let (sources, destinations) = items
+ .iter()
+ .filter(|item| match kind {
+ visit::Kind::RenameTarget => !item.emitted,
+ visit::Kind::CopyDestination => true,
+ })
+ .fold((0, 0), |(mut src, mut dest), item| {
+ match item.change {
+ Change::Addition { .. } => {
+ dest += 1;
+ }
+ Change::Deletion { .. } => {
+ if kind == visit::Kind::RenameTarget {
+ src += 1
+ }
+ }
+ Change::Modification { .. } => {
+ if kind == visit::Kind::CopyDestination {
+ src += 1
+ }
+ }
+ }
+ (src, dest)
+ });
+ let permutations = sources * destinations;
+ (permutations > limit * limit).then_some(permutations)
+}
+
+fn needs_exact_match(percentage: Option<f32>) -> bool {
+ percentage.map_or(true, |p| p >= 1.0)
+}
+
+/// <src_idx, src, possibly diff stat>
+type SourceTuple<'a> = (usize, &'a Item, Option<DiffLineStats>);
+
+/// Find `item` in our set of items ignoring `item_idx` to avoid finding ourselves, by similarity indicated by `percentage`.
+/// The latter can be `None` or `Some(x)` where `x>=1` for identity, and anything else for similarity.
+/// We also ignore emitted items entirely.
+/// Use `kind` to indicate what kind of match we are looking for, which might be deletions matching an `item` addition, or
+/// any non-deletion otherwise.
+/// Note that we always try to find by identity first even if a percentage is given as it's much faster and may reduce the set
+/// of items to be searched.
+fn find_match<'a>(
+ items: &'a [Item],
+ item: &Item,
+ item_idx: usize,
+ percentage: Option<f32>,
+ kind: visit::Kind,
+ repo: &Repository,
+ stats: &mut Outcome,
+) -> Result<Option<SourceTuple<'a>>, crate::object::tree::diff::for_each::Error> {
+ let (item_id, item_mode) = item.change.oid_and_entry_mode();
+ if needs_exact_match(percentage) || item_mode == gix_object::tree::EntryMode::Link {
+ let first_idx = items.partition_point(|a| a.change.oid() < item_id);
+ let range = match items.get(first_idx..).map(|items| {
+ let end = items
+ .iter()
+ .position(|a| a.change.oid() != item_id)
+ .map(|idx| first_idx + idx)
+ .unwrap_or(items.len());
+ first_idx..end
+ }) {
+ Some(range) => range,
+ None => return Ok(None),
+ };
+ if range.is_empty() {
+ return Ok(None);
+ }
+ let res = items[range.clone()].iter().enumerate().find_map(|(mut src_idx, src)| {
+ src_idx += range.start;
+ (src_idx != item_idx && src.is_source_for_destination_of(kind, item_mode)).then_some((src_idx, src, None))
+ });
+ if let Some(src) = res {
+ return Ok(Some(src));
+ }
+ } else {
+ let new = item_id.to_owned().attach(repo).object()?;
+ let percentage = percentage.expect("it's set to something below 1.0 and we assured this");
+ debug_assert!(
+ item.change.entry_mode().is_blob(),
+ "symlinks are matched exactly, and trees aren't used here"
+ );
+ let algo = repo.config.diff_algorithm()?;
+ for (can_idx, src) in items
+ .iter()
+ .enumerate()
+ .filter(|(src_idx, src)| *src_idx != item_idx && src.is_source_for_destination_of(kind, item_mode))
+ {
+ let old = src.change.oid().to_owned().attach(repo).object()?;
+ // TODO: make sure we get attribute handling and binary skips and filters right here. There is crate::object::blob::diff::Platform
+ // which should have facilities for that one day, but we don't use it because we need newlines in our tokens.
+ let tokens = gix_diff::blob::intern::InternedInput::new(
+ gix_diff::blob::sources::byte_lines_with_terminator(&old.data),
+ gix_diff::blob::sources::byte_lines_with_terminator(&new.data),
+ );
+ let counts = gix_diff::blob::diff(
+ algo,
+ &tokens,
+ gix_diff::blob::sink::Counter::new(diff::Statistics {
+ removed_bytes: 0,
+ input: &tokens,
+ }),
+ );
+ let similarity = (old.data.len() - counts.wrapped) as f32 / old.data.len().max(new.data.len()) as f32;
+ stats.num_similarity_checks += 1;
+ if similarity >= percentage {
+ return Ok(Some((
+ can_idx,
+ src,
+ DiffLineStats {
+ removals: counts.removals,
+ insertions: counts.insertions,
+ before: tokens.before.len().try_into().expect("interner handles only u32"),
+ after: tokens.after.len().try_into().expect("interner handles only u32"),
+ }
+ .into(),
+ )));
+ }
+ }
+ }
+ Ok(None)
+}
+
+mod diff {
+ use std::ops::Range;
+
+ pub struct Statistics<'a, 'data> {
+ pub removed_bytes: usize,
+ pub input: &'a gix_diff::blob::intern::InternedInput<&'data [u8]>,
+ }
+
+ impl<'a, 'data> gix_diff::blob::Sink for Statistics<'a, 'data> {
+ type Out = usize;
+
+ fn process_change(&mut self, before: Range<u32>, _after: Range<u32>) {
+ self.removed_bytes = self.input.before[before.start as usize..before.end as usize]
+ .iter()
+ .map(|token| self.input.interner[*token].len())
+ .sum();
+ }
+
+ fn finish(self) -> Self::Out {
+ self.removed_bytes
+ }
+ }
+}
+
+mod tree_to_events {
+ use gix_diff::tree::visit::Change;
+ use gix_object::tree::EntryRef;
+
+ use crate::bstr::BStr;
+
+ pub struct Delegate<'a> {
+ parent: &'a mut super::State,
+ recorder: gix_traverse::tree::Recorder,
+ }
+
+ impl<'a> Delegate<'a> {
+ pub fn new(parent: &'a mut super::State) -> Self {
+ let tracking = parent.tracking.map(|t| match t {
+ gix_diff::tree::recorder::Location::FileName => gix_traverse::tree::recorder::Location::FileName,
+ gix_diff::tree::recorder::Location::Path => gix_traverse::tree::recorder::Location::Path,
+ });
+ Self {
+ parent,
+ recorder: gix_traverse::tree::Recorder::default().track_location(tracking),
+ }
+ }
+ }
+
+ impl gix_traverse::tree::Visit for Delegate<'_> {
+ fn pop_front_tracked_path_and_set_current(&mut self) {
+ self.recorder.pop_front_tracked_path_and_set_current()
+ }
+
+ fn push_back_tracked_path_component(&mut self, component: &BStr) {
+ self.recorder.push_back_tracked_path_component(component)
+ }
+
+ fn push_path_component(&mut self, component: &BStr) {
+ self.recorder.push_path_component(component)
+ }
+
+ fn pop_path_component(&mut self) {
+ self.recorder.pop_path_component();
+ }
+
+ fn visit_tree(&mut self, _entry: &EntryRef<'_>) -> gix_traverse::tree::visit::Action {
+ gix_traverse::tree::visit::Action::Continue
+ }
+
+ fn visit_nontree(&mut self, entry: &EntryRef<'_>) -> gix_traverse::tree::visit::Action {
+ if entry.mode.is_blob() {
+ self.parent.try_push_change(
+ Change::Modification {
+ previous_entry_mode: entry.mode,
+ previous_oid: gix_hash::ObjectId::null(entry.oid.kind()),
+ entry_mode: entry.mode,
+ oid: entry.oid.to_owned(),
+ },
+ self.recorder.path(),
+ );
+ // make sure these aren't viable to be emitted anymore.
+ self.parent.items.last_mut().expect("just pushed").emitted = true;
+ }
+ gix_traverse::tree::visit::Action::Continue
+ }
+ }
+}
diff --git a/vendor/gix/src/object/tree/iter.rs b/vendor/gix/src/object/tree/iter.rs
new file mode 100644
index 000000000..c841e2574
--- /dev/null
+++ b/vendor/gix/src/object/tree/iter.rs
@@ -0,0 +1,53 @@
+use super::Tree;
+use crate::Repository;
+
+/// An entry within a tree
+pub struct EntryRef<'repo, 'a> {
+ /// The actual entry ref we are wrapping.
+ pub inner: gix_object::tree::EntryRef<'a>,
+
+ pub(crate) repo: &'repo Repository,
+}
+
+impl<'repo, 'a> EntryRef<'repo, 'a> {
+ /// The kind of object to which [`id()`][Self::id()] is pointing.
+ pub fn mode(&self) -> gix_object::tree::EntryMode {
+ self.inner.mode
+ }
+
+ /// The name of the file in the parent tree.
+ pub fn filename(&self) -> &gix_object::bstr::BStr {
+ self.inner.filename
+ }
+
+ /// Return the entries id, connected to the underlying repository.
+ pub fn id(&self) -> crate::Id<'repo> {
+ crate::Id::from_id(self.inner.oid, self.repo)
+ }
+
+ /// Return the entries id, without repository connection.
+ pub fn oid(&self) -> gix_hash::ObjectId {
+ self.inner.oid.to_owned()
+ }
+}
+
+impl<'repo, 'a> std::fmt::Display for EntryRef<'repo, 'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(
+ f,
+ "{:06o} {:>6} {}\t{}",
+ self.mode() as u32,
+ self.mode().as_str(),
+ self.id().shorten_or_id(),
+ self.filename()
+ )
+ }
+}
+
+impl<'repo> Tree<'repo> {
+ /// Return an iterator over tree entries to obtain information about files and directories this tree contains.
+ pub fn iter(&self) -> impl Iterator<Item = Result<EntryRef<'repo, '_>, gix_object::decode::Error>> {
+ let repo = self.repo;
+ gix_object::TreeRefIter::from_bytes(&self.data).map(move |e| e.map(|entry| EntryRef { inner: entry, repo }))
+ }
+}
diff --git a/vendor/gix/src/object/tree/mod.rs b/vendor/gix/src/object/tree/mod.rs
new file mode 100644
index 000000000..db094bcb9
--- /dev/null
+++ b/vendor/gix/src/object/tree/mod.rs
@@ -0,0 +1,158 @@
+use gix_hash::ObjectId;
+use gix_object::{bstr::BStr, TreeRefIter};
+
+use crate::{object::find, Id, Tree};
+
+/// Initialization
+impl<'repo> Tree<'repo> {
+ /// Obtain a tree instance by handing in all components that it is made up of.
+ pub fn from_data(id: impl Into<ObjectId>, data: Vec<u8>, repo: &'repo crate::Repository) -> Self {
+ Tree {
+ id: id.into(),
+ data,
+ repo,
+ }
+ }
+}
+
+/// Access
+impl<'repo> Tree<'repo> {
+ /// Return this tree's identifier.
+ pub fn id(&self) -> Id<'repo> {
+ Id::from_id(self.id, self.repo)
+ }
+
+ // TODO: tests.
+ /// Follow a sequence of `path` components starting from this instance, and look them up one by one until the last component
+ /// is looked up and its tree entry is returned.
+ ///
+ /// # Performance Notes
+ ///
+ /// Searching tree entries is currently done in sequence, which allows to the search to be allocation free. It would be possible
+ /// to re-use a vector and use a binary search instead, which might be able to improve performance over all.
+ /// However, a benchmark should be created first to have some data and see which trade-off to choose here.
+ ///
+ /// # Why is this consuming?
+ ///
+ /// The borrow checker shows pathological behaviour in loops that mutate a buffer, but also want to return from it.
+ /// Workarounds include keeping an index and doing a separate access to the memory, which seems hard to do here without
+ /// re-parsing the entries.
+ pub fn lookup_entry<I, P>(mut self, path: I) -> Result<Option<Entry<'repo>>, find::existing::Error>
+ where
+ I: IntoIterator<Item = P>,
+ P: PartialEq<BStr>,
+ {
+ let mut path = path.into_iter().peekable();
+ while let Some(component) = path.next() {
+ match TreeRefIter::from_bytes(&self.data)
+ .filter_map(Result::ok)
+ .find(|entry| component.eq(entry.filename))
+ {
+ Some(entry) => {
+ if path.peek().is_none() {
+ return Ok(Some(Entry {
+ inner: entry.into(),
+ repo: self.repo,
+ }));
+ } else {
+ let next_id = entry.oid.to_owned();
+ let repo = self.repo;
+ drop(self);
+ self = match repo.find_object(next_id)?.try_into_tree() {
+ Ok(tree) => tree,
+ Err(_) => return Ok(None),
+ };
+ }
+ }
+ None => return Ok(None),
+ }
+ }
+ Ok(None)
+ }
+
+ /// Like [`lookup_entry()`][Self::lookup_entry()], but takes a `Path` directly via `relative_path`, a path relative to this tree.
+ ///
+ /// # Note
+ ///
+ /// If any path component contains illformed UTF-8 and thus can't be converted to bytes on platforms which can't do so natively,
+ /// the returned component will be empty which makes the lookup fail.
+ pub fn lookup_entry_by_path(
+ self,
+ relative_path: impl AsRef<std::path::Path>,
+ ) -> Result<Option<Entry<'repo>>, find::existing::Error> {
+ use crate::bstr::ByteSlice;
+ self.lookup_entry(relative_path.as_ref().components().map(|c: std::path::Component<'_>| {
+ gix_path::os_str_into_bstr(c.as_os_str())
+ .unwrap_or_else(|_| "".into())
+ .as_bytes()
+ }))
+ }
+}
+
+///
+pub mod diff;
+
+///
+pub mod traverse;
+
+///
+mod iter;
+pub use iter::EntryRef;
+
+impl<'r> std::fmt::Debug for Tree<'r> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "Tree({})", self.id)
+ }
+}
+
+/// An entry in a [`Tree`], similar to an entry in a directory.
+#[derive(PartialEq, Debug, Clone)]
+pub struct Entry<'repo> {
+ inner: gix_object::tree::Entry,
+ repo: &'repo crate::Repository,
+}
+
+mod entry {
+ use crate::{bstr::BStr, ext::ObjectIdExt, object::tree::Entry};
+
+ /// Access
+ impl<'repo> Entry<'repo> {
+ /// The kind of object to which `oid` is pointing to.
+ pub fn mode(&self) -> gix_object::tree::EntryMode {
+ self.inner.mode
+ }
+
+ /// The name of the file in the parent tree.
+ pub fn filename(&self) -> &BStr {
+ self.inner.filename.as_ref()
+ }
+
+ /// Return the object id of the entry.
+ pub fn id(&self) -> crate::Id<'repo> {
+ self.inner.oid.attach(self.repo)
+ }
+
+ /// Return the object this entry points to.
+ pub fn object(&self) -> Result<crate::Object<'repo>, crate::object::find::existing::Error> {
+ self.id().object()
+ }
+
+ /// Return the plain object id of this entry, without access to the repository.
+ pub fn oid(&self) -> &gix_hash::oid {
+ &self.inner.oid
+ }
+
+ /// Return the plain object id of this entry, without access to the repository.
+ pub fn object_id(&self) -> gix_hash::ObjectId {
+ self.inner.oid
+ }
+ }
+
+ /// Consuming
+ impl Entry<'_> {
+ /// Return the contained object.
+ pub fn detach(self) -> gix_object::tree::Entry {
+ self.inner
+ }
+ }
+}
diff --git a/vendor/gix/src/object/tree/traverse.rs b/vendor/gix/src/object/tree/traverse.rs
new file mode 100644
index 000000000..974df6b0d
--- /dev/null
+++ b/vendor/gix/src/object/tree/traverse.rs
@@ -0,0 +1,62 @@
+use gix_odb::FindExt;
+
+use crate::Tree;
+
+/// Traversal
+impl<'repo> Tree<'repo> {
+ /// Obtain a platform for initiating a variety of traversals.
+ pub fn traverse(&self) -> Platform<'_, 'repo> {
+ Platform {
+ root: self,
+ breadthfirst: BreadthFirstPresets { root: self },
+ }
+ }
+}
+
+/// An intermediate object to start traversing the parent tree from.
+pub struct Platform<'a, 'repo> {
+ root: &'a Tree<'repo>,
+ /// Provides easy access to presets for common breadth-first traversal.
+ pub breadthfirst: BreadthFirstPresets<'a, 'repo>,
+}
+
+/// Presets for common choices in breadth-first traversal.
+#[derive(Copy, Clone)]
+pub struct BreadthFirstPresets<'a, 'repo> {
+ root: &'a Tree<'repo>,
+}
+
+impl<'a, 'repo> BreadthFirstPresets<'a, 'repo> {
+ /// Returns all entries and their file paths, recursively, as reachable from this tree.
+ pub fn files(&self) -> Result<Vec<gix_traverse::tree::recorder::Entry>, gix_traverse::tree::breadthfirst::Error> {
+ let mut recorder = gix_traverse::tree::Recorder::default();
+ Platform {
+ root: self.root,
+ breadthfirst: *self,
+ }
+ .breadthfirst(&mut recorder)?;
+ Ok(recorder.records)
+ }
+}
+
+impl<'a, 'repo> Platform<'a, 'repo> {
+ /// Start a breadth-first, recursive traversal using `delegate`, for which a [`Recorder`][gix_traverse::tree::Recorder] can be used to get started.
+ ///
+ /// # Note
+ ///
+ /// - Results are returned in sort order according to tree-entry sorting rules, one level at a time.
+ /// - for obtaining the direct children of the tree, use [.iter()][crate::Tree::iter()] instead.
+ pub fn breadthfirst<V>(&self, delegate: &mut V) -> Result<(), gix_traverse::tree::breadthfirst::Error>
+ where
+ V: gix_traverse::tree::Visit,
+ {
+ let root = gix_object::TreeRefIter::from_bytes(&self.root.data);
+ let state = gix_traverse::tree::breadthfirst::State::default();
+ gix_traverse::tree::breadthfirst(
+ root,
+ state,
+ |oid, buf| self.root.repo.objects.find_tree_iter(oid, buf).ok(),
+ delegate,
+ )
+ }
+}
diff --git a/vendor/gix/src/open/mod.rs b/vendor/gix/src/open/mod.rs
new file mode 100644
index 000000000..77018f5a2
--- /dev/null
+++ b/vendor/gix/src/open/mod.rs
@@ -0,0 +1,67 @@
+use std::path::PathBuf;
+
+use crate::{bstr::BString, config, permission, Permissions};
+
+/// The options used in [`ThreadSafeRepository::open_opts()`][crate::ThreadSafeRepository::open_opts()].
+///
+/// ### Replacement Objects for the object database
+///
+/// The environment variables `GIT_REPLACE_REF_BASE` and `GIT_NO_REPLACE_OBJECTS` are mapped to `gitoxide.objects.replaceRefBase`
+/// and `gitoxide.objects.noReplace` respectively and then interpreted exactly as their environment variable counterparts.
+///
+/// Use [Permissions] to control which environment variables can be read, and config-overrides to control these values programmatically.
+#[derive(Clone)]
+pub struct Options {
+ pub(crate) object_store_slots: gix_odb::store::init::Slots,
+ /// Define what is allowed while opening a repository.
+ pub permissions: Permissions,
+ pub(crate) git_dir_trust: Option<gix_sec::Trust>,
+ /// Warning: this one is copied to to config::Cache - don't change it after repo open or keep in sync.
+ pub(crate) filter_config_section: Option<fn(&gix_config::file::Metadata) -> bool>,
+ pub(crate) lossy_config: Option<bool>,
+ pub(crate) lenient_config: bool,
+ pub(crate) bail_if_untrusted: bool,
+ pub(crate) api_config_overrides: Vec<BString>,
+ pub(crate) cli_config_overrides: Vec<BString>,
+ pub(crate) open_path_as_is: bool,
+ /// Internal to pass an already obtained CWD on to where it may also be used. This avoids the CWD being queried more than once per repo.
+ pub(crate) current_dir: Option<PathBuf>,
+}
+
+/// The error returned by [`crate::open()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("Failed to load the git configuration")]
+ Config(#[from] config::Error),
+ #[error("\"{path}\" does not appear to be a git repository")]
+ NotARepository {
+ source: gix_discover::is_git::Error,
+ path: PathBuf,
+ },
+ #[error(transparent)]
+ Io(#[from] std::io::Error),
+ #[error("The git directory at '{}' is considered unsafe as it's not owned by the current user.", .path.display())]
+ UnsafeGitDir { path: PathBuf },
+ #[error(transparent)]
+ EnvironmentAccessDenied(#[from] permission::env_var::resource::Error),
+}
+
+mod options;
+
+mod repository;
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn size_of_options() {
+ let actual = std::mem::size_of::<Options>();
+ let limit = 160;
+ assert!(
+ actual <= limit,
+ "{actual} <= {limit}: size shouldn't change without us knowing (on windows, it's bigger)"
+ );
+ }
+}
diff --git a/vendor/gix/src/open/options.rs b/vendor/gix/src/open/options.rs
new file mode 100644
index 000000000..fb648e3c2
--- /dev/null
+++ b/vendor/gix/src/open/options.rs
@@ -0,0 +1,180 @@
+use std::path::PathBuf;
+
+use super::{Error, Options};
+use crate::{bstr::BString, config, Permissions, ThreadSafeRepository};
+
+impl Default for Options {
+ fn default() -> Self {
+ Options {
+ object_store_slots: Default::default(),
+ permissions: Default::default(),
+ git_dir_trust: None,
+ filter_config_section: None,
+ lossy_config: None,
+ lenient_config: true,
+ bail_if_untrusted: false,
+ open_path_as_is: false,
+ api_config_overrides: Vec::new(),
+ cli_config_overrides: Vec::new(),
+ current_dir: None,
+ }
+ }
+}
+
+/// Instantiation
+impl Options {
+ /// Options configured to prevent accessing anything else than the repository configuration file, prohibiting
+ /// accessing the environment or spreading beyond the git repository location.
+ pub fn isolated() -> Self {
+ Options::default().permissions(Permissions::isolated())
+ }
+}
+
+/// Generic modification
+impl Options {
+ /// An adapter to allow calling any builder method on this instance despite only having a mutable reference.
+ pub fn modify(&mut self, f: impl FnOnce(Self) -> Self) {
+ *self = f(std::mem::take(self));
+ }
+}
+
+/// Builder methods
+impl Options {
+ /// Apply the given configuration `values` like `init.defaultBranch=special` or `core.bool-implicit-true` in memory to as early
+ /// as the configuration is initialized to allow affecting the repository instantiation phase, both on disk or when opening.
+ /// The configuration is marked with [source API][gix_config::Source::Api].
+ pub fn config_overrides(mut self, values: impl IntoIterator<Item = impl Into<BString>>) -> Self {
+ self.api_config_overrides = values.into_iter().map(Into::into).collect();
+ self
+ }
+
+ /// Set configuration values of the form `core.abbrev=5` or `remote.origin.url = foo` or `core.bool-implicit-true` for application
+ /// as CLI overrides to the repository configuration, marked with [source CLI][gix_config::Source::Cli].
+ /// These are equivalent to CLI overrides passed with `-c` in `git`, for example.
+ pub fn cli_overrides(mut self, values: impl IntoIterator<Item = impl Into<BString>>) -> Self {
+ self.cli_config_overrides = values.into_iter().map(Into::into).collect();
+ self
+ }
+
+ /// Set the amount of slots to use for the object database. It's a value that doesn't need changes on the client, typically,
+ /// but should be controlled on the server.
+ pub fn object_store_slots(mut self, slots: gix_odb::store::init::Slots) -> Self {
+ self.object_store_slots = slots;
+ self
+ }
+
+ // TODO: tests
+ /// Set the given permissions, which are typically derived by a `Trust` level.
+ pub fn permissions(mut self, permissions: Permissions) -> Self {
+ self.permissions = permissions;
+ self
+ }
+
+ /// If `true`, default `false`, we will not modify the incoming path to open to assure it is a `.git` directory.
+ ///
+ /// If `false`, we will try to open the input directory as is, even though it doesn't appear to be a `git` repository
+ /// due to the lack of `.git` suffix or because its basename is not `.git` as in `worktree/.git`.
+ pub fn open_path_as_is(mut self, enable: bool) -> Self {
+ self.open_path_as_is = enable;
+ self
+ }
+
+ /// Set the trust level of the `.git` directory we are about to open.
+ ///
+ /// This can be set manually to force trust even though otherwise it might
+ /// not be fully trusted, leading to limitations in how configuration files
+ /// are interpreted.
+ ///
+ /// If not called explicitly, it will be determined by looking at its
+ /// ownership via [`gix_sec::Trust::from_path_ownership()`].
+ ///
+ /// # Security Warning
+ ///
+ /// Use with extreme care and only if it's absolutely known that the repository
+ /// is always controlled by the desired user. Using this capability _only_ saves
+ /// a permission check and only so if the [`open()`][Self::open()] method is used,
+ /// as opposed to discovery.
+ pub fn with(mut self, trust: gix_sec::Trust) -> Self {
+ self.git_dir_trust = trust.into();
+ self
+ }
+
+ /// If true, default false, and if the repository's trust level is not `Full`
+ /// (see [`with()`][Self::with()] for more), then the open operation will fail.
+ ///
+ /// Use this to mimic `git`s way of handling untrusted repositories. Note that `gitoxide` solves
+ /// this by not using configuration from untrusted sources and by generally being secured against
+ /// doctored input files which at worst could cause out-of-memory at the time of writing.
+ pub fn bail_if_untrusted(mut self, toggle: bool) -> Self {
+ self.bail_if_untrusted = toggle;
+ self
+ }
+
+ /// Set the filter which determines if a configuration section can be used to read values from,
+ /// hence it returns true if it is eligible.
+ ///
+ /// The default filter selects sections whose trust level is [`full`][gix_sec::Trust::Full] or
+ /// whose source is not [`repository-local`][gix_config::source::Kind::Repository].
+ pub fn filter_config_section(mut self, filter: fn(&gix_config::file::Metadata) -> bool) -> Self {
+ self.filter_config_section = Some(filter);
+ self
+ }
+
+ /// By default, in release mode configuration will be read without retaining non-essential information like
+ /// comments or whitespace to optimize lookup performance.
+ ///
+ /// Some application might want to toggle this to false in they want to display or edit configuration losslessly
+ /// with all whitespace and comments included.
+ pub fn lossy_config(mut self, toggle: bool) -> Self {
+ self.lossy_config = toggle.into();
+ self
+ }
+
+ /// If set, default is false, invalid configuration values will cause an error even if these can safely be defaulted.
+ ///
+ /// This is recommended for all applications that prefer correctness over usability.
+ /// `git` itself defaults to strict configuration mode, flagging incorrect configuration immediately.
+ pub fn strict_config(mut self, toggle: bool) -> Self {
+ self.lenient_config = !toggle;
+ self
+ }
+
+ /// Open a repository at `path` with the options set so far.
+ #[allow(clippy::result_large_err)]
+ pub fn open(self, path: impl Into<PathBuf>) -> Result<ThreadSafeRepository, Error> {
+ ThreadSafeRepository::open_opts(path, self)
+ }
+}
+
+impl gix_sec::trust::DefaultForLevel for Options {
+ fn default_for_level(level: gix_sec::Trust) -> Self {
+ match level {
+ gix_sec::Trust::Full => Options {
+ object_store_slots: Default::default(),
+ permissions: Permissions::default_for_level(level),
+ git_dir_trust: gix_sec::Trust::Full.into(),
+ filter_config_section: Some(config::section::is_trusted),
+ lossy_config: None,
+ bail_if_untrusted: false,
+ lenient_config: true,
+ open_path_as_is: false,
+ api_config_overrides: Vec::new(),
+ cli_config_overrides: Vec::new(),
+ current_dir: None,
+ },
+ gix_sec::Trust::Reduced => Options {
+ object_store_slots: gix_odb::store::init::Slots::Given(32), // limit resource usage
+ permissions: Permissions::default_for_level(level),
+ git_dir_trust: gix_sec::Trust::Reduced.into(),
+ filter_config_section: Some(config::section::is_trusted),
+ bail_if_untrusted: false,
+ lenient_config: true,
+ open_path_as_is: false,
+ lossy_config: None,
+ api_config_overrides: Vec::new(),
+ cli_config_overrides: Vec::new(),
+ current_dir: None,
+ },
+ }
+ }
+}
diff --git a/vendor/gix/src/open/repository.rs b/vendor/gix/src/open/repository.rs
new file mode 100644
index 000000000..85dd91da7
--- /dev/null
+++ b/vendor/gix/src/open/repository.rs
@@ -0,0 +1,345 @@
+#![allow(clippy::result_large_err)]
+use std::{borrow::Cow, path::PathBuf};
+
+use gix_features::threading::OwnShared;
+
+use super::{Error, Options};
+use crate::{
+ config,
+ config::{
+ cache::{interpolate_context, util::ApplyLeniency},
+ tree::{gitoxide, Core, Key, Safe},
+ },
+ permission, Permissions, ThreadSafeRepository,
+};
+
+#[derive(Default, Clone)]
+pub(crate) struct EnvironmentOverrides {
+ /// An override of the worktree typically from the environment, and overrides even worktree dirs set as parameter.
+ ///
+ /// This emulates the way git handles this override.
+ worktree_dir: Option<PathBuf>,
+ /// An override for the .git directory, typically from the environment.
+ ///
+ /// If set, the passed in `git_dir` parameter will be ignored in favor of this one.
+ git_dir: Option<PathBuf>,
+}
+
+impl EnvironmentOverrides {
+ fn from_env() -> Result<Self, permission::env_var::resource::Error> {
+ let mut worktree_dir = None;
+ if let Some(path) = std::env::var_os(Core::WORKTREE.the_environment_override()) {
+ worktree_dir = PathBuf::from(path).into();
+ }
+ let mut git_dir = None;
+ if let Some(path) = std::env::var_os("GIT_DIR") {
+ git_dir = PathBuf::from(path).into();
+ }
+ Ok(EnvironmentOverrides { worktree_dir, git_dir })
+ }
+}
+
+impl ThreadSafeRepository {
+ /// Open a git repository at the given `path`, possibly expanding it to `path/.git` if `path` is a work tree dir.
+ pub fn open(path: impl Into<PathBuf>) -> Result<Self, Error> {
+ Self::open_opts(path, Options::default())
+ }
+
+ /// Open a git repository at the given `path`, possibly expanding it to `path/.git` if `path` is a work tree dir, and use
+ /// `options` for fine-grained control.
+ ///
+ /// Note that you should use [`crate::discover()`] if security should be adjusted by ownership.
+ pub fn open_opts(path: impl Into<PathBuf>, mut options: Options) -> Result<Self, Error> {
+ let (path, kind) = {
+ let path = path.into();
+ let looks_like_git_dir =
+ path.ends_with(gix_discover::DOT_GIT_DIR) || path.extension() == Some(std::ffi::OsStr::new("git"));
+ let candidate = if !options.open_path_as_is && !looks_like_git_dir {
+ Cow::Owned(path.join(gix_discover::DOT_GIT_DIR))
+ } else {
+ Cow::Borrowed(&path)
+ };
+ match gix_discover::is_git(candidate.as_ref()) {
+ Ok(kind) => (candidate.into_owned(), kind),
+ Err(err) => {
+ if options.open_path_as_is || matches!(candidate, Cow::Borrowed(_)) {
+ return Err(Error::NotARepository {
+ source: err,
+ path: candidate.into_owned(),
+ });
+ }
+ match gix_discover::is_git(&path) {
+ Ok(kind) => (path, kind),
+ Err(err) => return Err(Error::NotARepository { source: err, path }),
+ }
+ }
+ }
+ };
+ let cwd = std::env::current_dir()?;
+ let (git_dir, worktree_dir) = gix_discover::repository::Path::from_dot_git_dir(path, kind, &cwd)
+ .expect("we have sanitized path with is_git()")
+ .into_repository_and_work_tree_directories();
+ if options.git_dir_trust.is_none() {
+ options.git_dir_trust = gix_sec::Trust::from_path_ownership(&git_dir)?.into();
+ }
+ options.current_dir = Some(cwd);
+ ThreadSafeRepository::open_from_paths(git_dir, worktree_dir, options)
+ }
+
+ /// Try to open a git repository in `fallback_directory` (can be worktree or `.git` directory) only if there is no override
+ /// from of the `gitdir` using git environment variables.
+ ///
+ /// Use the `trust_map` to apply options depending in the trust level for `directory` or the directory it's overridden with.
+ /// The `.git` directory whether given or computed is used for trust checks.
+ ///
+ /// Note that this will read various `GIT_*` environment variables to check for overrides, and is probably most useful when implementing
+ /// custom hooks.
+ // TODO: tests, with hooks, GIT_QUARANTINE for ref-log and transaction control (needs gix-sec support to remove write access in gix-ref)
+ // TODO: The following vars should end up as overrides of the respective configuration values (see gix-config).
+ // GIT_PROXY_SSL_CERT, GIT_PROXY_SSL_KEY, GIT_PROXY_SSL_CERT_PASSWORD_PROTECTED.
+ // GIT_PROXY_SSL_CAINFO, GIT_SSL_CIPHER_LIST, GIT_HTTP_MAX_REQUESTS, GIT_CURL_FTP_NO_EPSV,
+ pub fn open_with_environment_overrides(
+ fallback_directory: impl Into<PathBuf>,
+ trust_map: gix_sec::trust::Mapping<Options>,
+ ) -> Result<Self, Error> {
+ let overrides = EnvironmentOverrides::from_env()?;
+ let (path, path_kind): (PathBuf, _) = match overrides.git_dir {
+ Some(git_dir) => gix_discover::is_git(&git_dir)
+ .map_err(|err| Error::NotARepository {
+ source: err,
+ path: git_dir.clone(),
+ })
+ .map(|kind| (git_dir, kind))?,
+ None => {
+ let fallback_directory = fallback_directory.into();
+ gix_discover::is_git(&fallback_directory)
+ .map_err(|err| Error::NotARepository {
+ source: err,
+ path: fallback_directory.clone(),
+ })
+ .map(|kind| (fallback_directory, kind))?
+ }
+ };
+
+ let cwd = std::env::current_dir()?;
+ let (git_dir, worktree_dir) = gix_discover::repository::Path::from_dot_git_dir(path, path_kind, &cwd)
+ .expect("we have sanitized path with is_git()")
+ .into_repository_and_work_tree_directories();
+ let worktree_dir = worktree_dir.or(overrides.worktree_dir);
+
+ let git_dir_trust = gix_sec::Trust::from_path_ownership(&git_dir)?;
+ let mut options = trust_map.into_value_by_level(git_dir_trust);
+ options.current_dir = Some(cwd);
+ ThreadSafeRepository::open_from_paths(git_dir, worktree_dir, options)
+ }
+
+ pub(crate) fn open_from_paths(
+ git_dir: PathBuf,
+ mut worktree_dir: Option<PathBuf>,
+ options: Options,
+ ) -> Result<Self, Error> {
+ let Options {
+ git_dir_trust,
+ object_store_slots,
+ filter_config_section,
+ lossy_config,
+ lenient_config,
+ bail_if_untrusted,
+ open_path_as_is: _,
+ permissions: Permissions { ref env, config },
+ ref api_config_overrides,
+ ref cli_config_overrides,
+ ref current_dir,
+ } = options;
+ let current_dir = current_dir.as_deref().expect("BUG: current_dir must be set by caller");
+ let git_dir_trust = git_dir_trust.expect("trust must be been determined by now");
+
+ // TODO: assure we handle the worktree-dir properly as we can have config per worktree with an extension.
+ // This would be something read in later as have to first check for extensions. Also this means
+ // that each worktree, even if accessible through this instance, has to come in its own Repository instance
+ // as it may have its own configuration. That's fine actually.
+ let common_dir = gix_discover::path::from_plain_file(git_dir.join("commondir"))
+ .transpose()?
+ .map(|cd| git_dir.join(cd));
+ let common_dir_ref = common_dir.as_deref().unwrap_or(&git_dir);
+
+ let repo_config = config::cache::StageOne::new(
+ common_dir_ref,
+ git_dir.as_ref(),
+ git_dir_trust,
+ lossy_config,
+ lenient_config,
+ )?;
+ let mut refs = {
+ let reflog = repo_config.reflog.unwrap_or(gix_ref::store::WriteReflog::Disable);
+ let object_hash = repo_config.object_hash;
+ match &common_dir {
+ Some(common_dir) => crate::RefStore::for_linked_worktree(&git_dir, common_dir, reflog, object_hash),
+ None => crate::RefStore::at(&git_dir, reflog, object_hash),
+ }
+ };
+ let head = refs.find("HEAD").ok();
+ let git_install_dir = crate::path::install_dir().ok();
+ let home = std::env::var_os("HOME")
+ .map(PathBuf::from)
+ .and_then(|home| env.home.check_opt(home));
+
+ let mut filter_config_section = filter_config_section.unwrap_or(config::section::is_trusted);
+ let config = config::Cache::from_stage_one(
+ repo_config,
+ common_dir_ref,
+ head.as_ref().and_then(|head| head.target.try_name()),
+ filter_config_section,
+ git_install_dir.as_deref(),
+ home.as_deref(),
+ env.clone(),
+ config,
+ lenient_config,
+ api_config_overrides,
+ cli_config_overrides,
+ )?;
+
+ if bail_if_untrusted && git_dir_trust != gix_sec::Trust::Full {
+ check_safe_directories(&git_dir, git_install_dir.as_deref(), home.as_deref(), &config)?;
+ }
+
+ // core.worktree might be used to overwrite the worktree directory
+ if !config.is_bare {
+ if let Some(wt) = config
+ .resolved
+ .path_filter("core", None, Core::WORKTREE.name, &mut filter_config_section)
+ {
+ let wt_path = wt
+ .interpolate(interpolate_context(git_install_dir.as_deref(), home.as_deref()))
+ .map_err(config::Error::PathInterpolation)?;
+ worktree_dir = {
+ gix_path::normalize(git_dir.join(wt_path), current_dir)
+ .and_then(|wt| wt.as_ref().is_dir().then(|| wt.into_owned()))
+ }
+ }
+ }
+
+ match worktree_dir {
+ None if !config.is_bare => {
+ worktree_dir = Some(git_dir.parent().expect("parent is always available").to_owned());
+ }
+ Some(_) => {
+ // note that we might be bare even with a worktree directory - work trees don't have to be
+ // the parent of a non-bare repository.
+ }
+ None => {}
+ }
+
+ refs.write_reflog = config::cache::util::reflog_or_default(config.reflog, worktree_dir.is_some());
+ let replacements = replacement_objects_refs_prefix(&config.resolved, lenient_config, filter_config_section)?
+ .and_then(|prefix| {
+ let platform = refs.iter().ok()?;
+ let iter = platform.prefixed(&prefix).ok()?;
+ let prefix = prefix.to_str()?;
+ let replacements = iter
+ .filter_map(Result::ok)
+ .filter_map(|r: gix_ref::Reference| {
+ let target = r.target.try_id()?.to_owned();
+ let source =
+ gix_hash::ObjectId::from_hex(r.name.as_bstr().strip_prefix(prefix.as_bytes())?).ok()?;
+ Some((source, target))
+ })
+ .collect::<Vec<_>>();
+ Some(replacements)
+ })
+ .unwrap_or_default();
+
+ Ok(ThreadSafeRepository {
+ objects: OwnShared::new(gix_odb::Store::at_opts(
+ common_dir_ref.join("objects"),
+ replacements,
+ gix_odb::store::init::Options {
+ slots: object_store_slots,
+ object_hash: config.object_hash,
+ use_multi_pack_index: config.use_multi_pack_index,
+ current_dir: current_dir.to_owned().into(),
+ },
+ )?),
+ common_dir,
+ refs,
+ work_tree: worktree_dir,
+ config,
+ // used when spawning new repositories off this one when following worktrees
+ linked_worktree_options: options,
+ index: gix_features::fs::MutableSnapshot::new().into(),
+ })
+ }
+}
+
+// TODO: tests
+fn replacement_objects_refs_prefix(
+ config: &gix_config::File<'static>,
+ lenient: bool,
+ mut filter_config_section: fn(&gix_config::file::Metadata) -> bool,
+) -> Result<Option<PathBuf>, Error> {
+ let is_disabled = config
+ .boolean_filter_by_key("gitoxide.objects.noReplace", &mut filter_config_section)
+ .map(|b| gitoxide::Objects::NO_REPLACE.enrich_error(b))
+ .transpose()
+ .with_leniency(lenient)
+ .map_err(config::Error::ConfigBoolean)?
+ .unwrap_or_default();
+
+ if is_disabled {
+ return Ok(None);
+ }
+
+ let ref_base = gix_path::from_bstr({
+ let key = "gitoxide.objects.replaceRefBase";
+ debug_assert_eq!(gitoxide::Objects::REPLACE_REF_BASE.logical_name(), key);
+ config
+ .string_filter_by_key(key, &mut filter_config_section)
+ .unwrap_or_else(|| Cow::Borrowed("refs/replace/".into()))
+ })
+ .into_owned();
+ Ok(ref_base.into())
+}
+
+fn check_safe_directories(
+ git_dir: &std::path::Path,
+ git_install_dir: Option<&std::path::Path>,
+ home: Option<&std::path::Path>,
+ config: &config::Cache,
+) -> Result<(), Error> {
+ let mut is_safe = false;
+ let git_dir = match gix_path::realpath(git_dir) {
+ Ok(p) => p,
+ Err(_) => git_dir.to_owned(),
+ };
+ for safe_dir in config
+ .resolved
+ .strings_filter("safe", None, Safe::DIRECTORY.name, &mut Safe::directory_filter)
+ .unwrap_or_default()
+ {
+ if safe_dir.as_ref() == "*" {
+ is_safe = true;
+ continue;
+ }
+ if safe_dir.is_empty() {
+ is_safe = false;
+ continue;
+ }
+ if !is_safe {
+ let safe_dir = match gix_config::Path::from(std::borrow::Cow::Borrowed(safe_dir.as_ref()))
+ .interpolate(interpolate_context(git_install_dir, home))
+ {
+ Ok(path) => path,
+ Err(_) => gix_path::from_bstr(safe_dir),
+ };
+ if safe_dir == git_dir {
+ is_safe = true;
+ continue;
+ }
+ }
+ }
+ if is_safe {
+ Ok(())
+ } else {
+ Err(Error::UnsafeGitDir { path: git_dir })
+ }
+}
diff --git a/vendor/gix/src/path.rs b/vendor/gix/src/path.rs
new file mode 100644
index 000000000..9fd6d4b01
--- /dev/null
+++ b/vendor/gix/src/path.rs
@@ -0,0 +1,11 @@
+use std::path::PathBuf;
+
+pub use gix_path::*;
+
+pub(crate) fn install_dir() -> std::io::Result<PathBuf> {
+ std::env::current_exe().and_then(|exe| {
+ exe.parent()
+ .map(ToOwned::to_owned)
+ .ok_or_else(|| std::io::Error::new(std::io::ErrorKind::Other, "no parent for current executable"))
+ })
+}
diff --git a/vendor/gix/src/reference/edits.rs b/vendor/gix/src/reference/edits.rs
new file mode 100644
index 000000000..aadd087ba
--- /dev/null
+++ b/vendor/gix/src/reference/edits.rs
@@ -0,0 +1,75 @@
+///
+pub mod set_target_id {
+ use gix_ref::{transaction::PreviousValue, Target};
+
+ use crate::{bstr::BString, Reference};
+
+ mod error {
+ use gix_ref::FullName;
+
+ /// The error returned by [`Reference::set_target_id()`][super::Reference::set_target_id()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Cannot change symbolic reference {name:?} into a direct one by setting it to an id")]
+ SymbolicReference { name: FullName },
+ #[error(transparent)]
+ ReferenceEdit(#[from] crate::reference::edit::Error),
+ }
+ }
+ pub use error::Error;
+
+ impl<'repo> Reference<'repo> {
+ /// Set the id of this direct reference to `id` and use `reflog_message` for the reflog (if enabled in the repository).
+ ///
+ /// Note that the operation will fail on symbolic references, to change their type use the lower level reference database,
+ /// or if the reference was deleted or changed in the mean time.
+ /// Furthermore, refrain from using this method for more than a one-off change as it creates a transaction for each invocation.
+ /// If multiple reference should be changed, use [Repository::edit_references()][crate::Repository::edit_references()]
+ /// or the lower level reference database instead.
+ #[allow(clippy::result_large_err)]
+ pub fn set_target_id(
+ &mut self,
+ id: impl Into<gix_hash::ObjectId>,
+ reflog_message: impl Into<BString>,
+ ) -> Result<(), Error> {
+ match &self.inner.target {
+ Target::Symbolic(name) => return Err(Error::SymbolicReference { name: name.clone() }),
+ Target::Peeled(current_id) => {
+ let changed = self.repo.reference(
+ self.name(),
+ id,
+ PreviousValue::MustExistAndMatch(Target::Peeled(current_id.to_owned())),
+ reflog_message,
+ )?;
+ *self = changed;
+ }
+ }
+ Ok(())
+ }
+ }
+}
+
+///
+pub mod delete {
+ use gix_ref::transaction::{Change, PreviousValue, RefEdit, RefLog};
+
+ use crate::Reference;
+
+ impl<'repo> Reference<'repo> {
+ /// Delete this reference or fail if it was changed since last observed.
+ /// Note that this instance remains available in memory but probably shouldn't be used anymore.
+ pub fn delete(&self) -> Result<(), crate::reference::edit::Error> {
+ self.repo
+ .edit_reference(RefEdit {
+ change: Change::Delete {
+ expected: PreviousValue::MustExistAndMatch(self.inner.target.clone()),
+ log: RefLog::AndReference,
+ },
+ name: self.inner.name.clone(),
+ deref: false,
+ })
+ .map(|_| ())
+ }
+ }
+}
diff --git a/vendor/gix/src/reference/errors.rs b/vendor/gix/src/reference/errors.rs
new file mode 100644
index 000000000..364456fd1
--- /dev/null
+++ b/vendor/gix/src/reference/errors.rs
@@ -0,0 +1,89 @@
+///
+pub mod edit {
+ use crate::config;
+
+ /// The error returned by [edit_references(…)][crate::Repository::edit_references()], and others
+ /// which ultimately create a reference.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ FileTransactionPrepare(#[from] gix_ref::file::transaction::prepare::Error),
+ #[error(transparent)]
+ FileTransactionCommit(#[from] gix_ref::file::transaction::commit::Error),
+ #[error(transparent)]
+ NameValidation(#[from] gix_validate::reference::name::Error),
+ #[error("Could not interpret core.filesRefLockTimeout or core.packedRefsTimeout, it must be the number in milliseconds to wait for locks or negative to wait forever")]
+ LockTimeoutConfiguration(#[from] config::lock_timeout::Error),
+ #[error(transparent)]
+ ParseCommitterTime(#[from] crate::config::time::Error),
+ }
+}
+
+///
+pub mod peel {
+ /// The error returned by [Reference::peel_to_id_in_place(…)][crate::Reference::peel_to_id_in_place()] and
+ /// [Reference::into_fully_peeled_id(…)][crate::Reference::into_fully_peeled_id()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ ToId(#[from] gix_ref::peel::to_id::Error),
+ #[error(transparent)]
+ PackedRefsOpen(#[from] gix_ref::packed::buffer::open::Error),
+ }
+}
+
+///
+pub mod head_id {
+ /// The error returned by [Repository::head_id(…)][crate::Repository::head_id()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Head(#[from] crate::reference::find::existing::Error),
+ #[error(transparent)]
+ PeelToId(#[from] crate::head::peel::Error),
+ #[error("Branch '{name}' does not have any commits")]
+ Unborn { name: gix_ref::FullName },
+ }
+}
+
+///
+pub mod head_commit {
+ /// The error returned by [Repository::head_commit(…)][crate::Repository::head_commit()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Head(#[from] crate::reference::find::existing::Error),
+ #[error(transparent)]
+ PeelToCommit(#[from] crate::head::peel::to_commit::Error),
+ }
+}
+
+///
+pub mod find {
+ ///
+ pub mod existing {
+ /// The error returned by [find_reference(…)][crate::Repository::find_reference()], and others.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Find(#[from] crate::reference::find::Error),
+ #[error("The reference did not exist")]
+ NotFound,
+ }
+ }
+
+ /// The error returned by [try_find_reference(…)][crate::Repository::try_find_reference()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Find(#[from] gix_ref::file::find::Error),
+ #[error(transparent)]
+ PackedRefsOpen(#[from] gix_ref::packed::buffer::open::Error),
+ }
+}
diff --git a/vendor/gix/src/reference/iter.rs b/vendor/gix/src/reference/iter.rs
new file mode 100644
index 000000000..a2b022f64
--- /dev/null
+++ b/vendor/gix/src/reference/iter.rs
@@ -0,0 +1,127 @@
+//!
+use std::path::Path;
+
+use gix_odb::pack::Find;
+use gix_ref::file::ReferenceExt;
+
+/// A platform to create iterators over references.
+#[must_use = "Iterators should be obtained from this iterator platform"]
+pub struct Platform<'r> {
+ pub(crate) platform: gix_ref::file::iter::Platform<'r>,
+ pub(crate) repo: &'r crate::Repository,
+}
+
+/// An iterator over references, with or without filter.
+pub struct Iter<'r> {
+ inner: gix_ref::file::iter::LooseThenPacked<'r, 'r>,
+ peel: bool,
+ repo: &'r crate::Repository,
+}
+
+impl<'r> Iter<'r> {
+ fn new(repo: &'r crate::Repository, platform: gix_ref::file::iter::LooseThenPacked<'r, 'r>) -> Self {
+ Iter {
+ inner: platform,
+ peel: false,
+ repo,
+ }
+ }
+}
+
+impl<'r> Platform<'r> {
+ /// Return an iterator over all references in the repository.
+ ///
+ /// Even broken or otherwise unparsable or inaccessible references are returned and have to be handled by the caller on a
+ /// case by case basis.
+ pub fn all(&self) -> Result<Iter<'_>, init::Error> {
+ Ok(Iter::new(self.repo, self.platform.all()?))
+ }
+
+ /// Return an iterator over all references that match the given `prefix`.
+ ///
+ /// These are of the form `refs/heads` or `refs/remotes/origin`, and must not contain relative paths components like `.` or `..`.
+ // TODO: Create a custom `Path` type that enforces the requirements of git naturally, this type is surprising possibly on windows
+ // and when not using a trailing '/' to signal directories.
+ pub fn prefixed(&self, prefix: impl AsRef<Path>) -> Result<Iter<'_>, init::Error> {
+ Ok(Iter::new(self.repo, self.platform.prefixed(prefix)?))
+ }
+
+ // TODO: tests
+ /// Return an iterator over all references that are tags.
+ ///
+ /// They are all prefixed with `refs/tags`.
+ pub fn tags(&self) -> Result<Iter<'_>, init::Error> {
+ Ok(Iter::new(self.repo, self.platform.prefixed("refs/tags/")?))
+ }
+
+ // TODO: tests
+ /// Return an iterator over all local branches.
+ ///
+ /// They are all prefixed with `refs/heads`.
+ pub fn local_branches(&self) -> Result<Iter<'_>, init::Error> {
+ Ok(Iter::new(self.repo, self.platform.prefixed("refs/heads/")?))
+ }
+
+ // TODO: tests
+ /// Return an iterator over all remote branches.
+ ///
+ /// They are all prefixed with `refs/remotes`.
+ pub fn remote_branches(&self) -> Result<Iter<'_>, init::Error> {
+ Ok(Iter::new(self.repo, self.platform.prefixed("refs/remotes/")?))
+ }
+}
+
+impl<'r> Iter<'r> {
+ /// Automatically peel references before yielding them during iteration.
+ ///
+ /// This has the same effect as using `iter.map(|r| {r.peel_to_id_in_place(); r})`.
+ ///
+ /// # Note
+ ///
+ /// Doing this is necessary as the packed-refs buffer is already held by the iterator, disallowing the consumer of the iterator
+ /// to peel the returned references themselves.
+ pub fn peeled(mut self) -> Self {
+ self.peel = true;
+ self
+ }
+}
+
+impl<'r> Iterator for Iter<'r> {
+ type Item = Result<crate::Reference<'r>, Box<dyn std::error::Error + Send + Sync + 'static>>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next().map(|res| {
+ res.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
+ .and_then(|mut r| {
+ if self.peel {
+ let handle = &self.repo;
+ r.peel_to_id_in_place(&handle.refs, |oid, buf| {
+ handle
+ .objects
+ .try_find(oid, buf)
+ .map(|po| po.map(|(o, _l)| (o.kind, o.data)))
+ })
+ .map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
+ .map(|_| r)
+ } else {
+ Ok(r)
+ }
+ })
+ .map(|r| crate::Reference::from_ref(r, self.repo))
+ })
+ }
+}
+
+///
+pub mod init {
+ /// The error returned by [`Platform::all()`][super::Platform::all()] or [`Platform::prefixed()`][super::Platform::prefixed()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Io(#[from] std::io::Error),
+ }
+}
+
+/// The error returned by [references()][crate::Repository::references()].
+pub type Error = gix_ref::packed::buffer::open::Error;
diff --git a/vendor/gix/src/reference/log.rs b/vendor/gix/src/reference/log.rs
new file mode 100644
index 000000000..b516e6499
--- /dev/null
+++ b/vendor/gix/src/reference/log.rs
@@ -0,0 +1,36 @@
+//!
+use gix_object::commit::MessageRef;
+use gix_ref::file::ReferenceExt;
+
+use crate::{
+ bstr::{BStr, BString, ByteVec},
+ Reference,
+};
+
+impl<'repo> Reference<'repo> {
+ /// Return a platform for obtaining iterators over reference logs.
+ pub fn log_iter(&self) -> gix_ref::file::log::iter::Platform<'_, '_> {
+ self.inner.log_iter(&self.repo.refs)
+ }
+}
+
+/// Generate a message typical for git commit logs based on the given `operation`, commit `message` and `num_parents` of the commit.
+pub fn message(operation: &str, message: &BStr, num_parents: usize) -> BString {
+ let mut out = BString::from(operation);
+ if let Some(commit_type) = commit_type_by_parents(num_parents) {
+ out.push_str(b" (");
+ out.extend_from_slice(commit_type.as_bytes());
+ out.push_byte(b')');
+ }
+ out.push_str(b": ");
+ out.extend_from_slice(&MessageRef::from_bytes(message).summary());
+ out
+}
+
+pub(crate) fn commit_type_by_parents(count: usize) -> Option<&'static str> {
+ Some(match count {
+ 0 => "initial",
+ 1 => return None,
+ _two_or_more => "merge",
+ })
+}
diff --git a/vendor/gix/src/reference/mod.rs b/vendor/gix/src/reference/mod.rs
new file mode 100644
index 000000000..e2ee0d3b2
--- /dev/null
+++ b/vendor/gix/src/reference/mod.rs
@@ -0,0 +1,87 @@
+//!
+
+use gix_odb::pack::Find;
+use gix_ref::file::ReferenceExt;
+
+use crate::{Id, Reference};
+
+pub mod iter;
+///
+pub mod remote;
+
+mod errors;
+pub use errors::{edit, find, head_commit, head_id, peel};
+
+use crate::ext::ObjectIdExt;
+
+pub mod log;
+
+pub use gix_ref::{Category, Kind};
+
+/// Access
+impl<'repo> Reference<'repo> {
+ /// Returns the attached id we point to, or `None` if this is a symbolic ref.
+ pub fn try_id(&self) -> Option<Id<'repo>> {
+ match self.inner.target {
+ gix_ref::Target::Symbolic(_) => None,
+ gix_ref::Target::Peeled(oid) => oid.to_owned().attach(self.repo).into(),
+ }
+ }
+
+ /// Returns the attached id we point to, or panic if this is a symbolic ref.
+ pub fn id(&self) -> Id<'repo> {
+ self.try_id()
+ .expect("BUG: tries to obtain object id from symbolic target")
+ }
+
+ /// Return the target to which this reference points to.
+ pub fn target(&self) -> gix_ref::TargetRef<'_> {
+ self.inner.target.to_ref()
+ }
+
+ /// Return the reference's full name.
+ pub fn name(&self) -> &gix_ref::FullNameRef {
+ self.inner.name.as_ref()
+ }
+
+ /// Turn this instances into a stand-alone reference.
+ pub fn detach(self) -> gix_ref::Reference {
+ self.inner
+ }
+}
+
+impl<'repo> std::fmt::Debug for Reference<'repo> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Debug::fmt(&self.inner, f)
+ }
+}
+
+impl<'repo> Reference<'repo> {
+ pub(crate) fn from_ref(reference: gix_ref::Reference, repo: &'repo crate::Repository) -> Self {
+ Reference { inner: reference, repo }
+ }
+}
+
+impl<'repo> Reference<'repo> {
+ /// Follow all symbolic targets this reference might point to and peel the underlying object
+ /// to the end of the chain, and return it.
+ ///
+ /// This is useful to learn where this reference is ultimately pointing to.
+ pub fn peel_to_id_in_place(&mut self) -> Result<Id<'repo>, peel::Error> {
+ let repo = &self.repo;
+ let oid = self.inner.peel_to_id_in_place(&repo.refs, |oid, buf| {
+ repo.objects
+ .try_find(oid, buf)
+ .map(|po| po.map(|(o, _l)| (o.kind, o.data)))
+ })?;
+ Ok(Id::from_id(oid, repo))
+ }
+
+ /// Similar to [`peel_to_id_in_place()`][Reference::peel_to_id_in_place()], but consumes this instance.
+ pub fn into_fully_peeled_id(mut self) -> Result<Id<'repo>, peel::Error> {
+ self.peel_to_id_in_place()
+ }
+}
+
+mod edits;
+pub use edits::{delete, set_target_id};
diff --git a/vendor/gix/src/reference/remote.rs b/vendor/gix/src/reference/remote.rs
new file mode 100644
index 000000000..dd96892e2
--- /dev/null
+++ b/vendor/gix/src/reference/remote.rs
@@ -0,0 +1,49 @@
+use crate::{config, config::tree::Branch, remote, Reference};
+
+/// Remotes
+impl<'repo> Reference<'repo> {
+ /// Find the unvalidated name of our remote for `direction` as configured in `branch.<name>.remote|pushRemote` respectively.
+ /// If `Some(<name>)` it can be used in [`Repository::find_remote(…)`][crate::Repository::find_remote()], or if `None` then
+ /// [Repository::remote_default_name()][crate::Repository::remote_default_name()] could be used in its place.
+ ///
+ /// Return `None` if no remote is configured.
+ ///
+ /// # Note
+ ///
+ /// - it's recommended to use the [`remote(…)`][Self::remote()] method as it will configure the remote with additional
+ /// information.
+ /// - `branch.<name>.pushRemote` falls back to `branch.<name>.remote`.
+ pub fn remote_name(&self, direction: remote::Direction) -> Option<remote::Name<'repo>> {
+ let name = self.name().shorten();
+ let config = &self.repo.config.resolved;
+ (direction == remote::Direction::Push)
+ .then(|| {
+ config
+ .string("branch", Some(name), Branch::PUSH_REMOTE.name)
+ .or_else(|| config.string("remote", None, config::tree::Remote::PUSH_DEFAULT.name))
+ })
+ .flatten()
+ .or_else(|| config.string("branch", Some(name), Branch::REMOTE.name))
+ .and_then(|name| name.try_into().ok())
+ }
+
+ /// Like [`remote_name(…)`][Self::remote_name()], but configures the returned `Remote` with additional information like
+ ///
+ /// - `branch.<name>.merge` to know which branch on the remote side corresponds to this one for merging when pulling.
+ ///
+ /// It also handles if the remote is a configured URL, which has no name.
+ pub fn remote(
+ &self,
+ direction: remote::Direction,
+ ) -> Option<Result<crate::Remote<'repo>, remote::find::existing::Error>> {
+ // TODO: use `branch.<name>.merge`
+ self.remote_name(direction).map(|name| match name {
+ remote::Name::Symbol(name) => self.repo.find_remote(name.as_ref()).map_err(Into::into),
+ remote::Name::Url(url) => gix_url::parse(url.as_ref()).map_err(Into::into).and_then(|url| {
+ self.repo
+ .remote_at(url)
+ .map_err(|err| remote::find::existing::Error::Find(remote::find::Error::Init(err)))
+ }),
+ })
+ }
+}
diff --git a/vendor/gix/src/remote/access.rs b/vendor/gix/src/remote/access.rs
new file mode 100644
index 000000000..1a1cee5de
--- /dev/null
+++ b/vendor/gix/src/remote/access.rs
@@ -0,0 +1,105 @@
+use gix_refspec::RefSpec;
+
+use crate::{bstr::BStr, remote, Remote};
+
+/// Access
+impl<'repo> Remote<'repo> {
+ /// Return the name of this remote or `None` if it wasn't persisted to disk yet.
+ pub fn name(&self) -> Option<&remote::Name<'static>> {
+ self.name.as_ref()
+ }
+
+ /// Return our repository reference.
+ pub fn repo(&self) -> &'repo crate::Repository {
+ self.repo
+ }
+
+ /// Return the set of ref-specs used for `direction`, which may be empty, in order of occurrence in the configuration.
+ pub fn refspecs(&self, direction: remote::Direction) -> &[RefSpec] {
+ match direction {
+ remote::Direction::Fetch => &self.fetch_specs,
+ remote::Direction::Push => &self.push_specs,
+ }
+ }
+
+ /// Return how we handle tags when fetching the remote.
+ pub fn fetch_tags(&self) -> remote::fetch::Tags {
+ self.fetch_tags
+ }
+
+ /// Return the url used for the given `direction` with rewrites from `url.<base>.insteadOf|pushInsteadOf`, unless the instance
+ /// was created with one of the `_without_url_rewrite()` methods.
+ /// For pushing, this is the `remote.<name>.pushUrl` or the `remote.<name>.url` used for fetching, and for fetching it's
+ /// the `remote.<name>.url`.
+ /// Note that it's possible to only have the push url set, in which case there will be no way to fetch from the remote as
+ /// the push-url isn't used for that.
+ pub fn url(&self, direction: remote::Direction) -> Option<&gix_url::Url> {
+ match direction {
+ remote::Direction::Fetch => self.url_alias.as_ref().or(self.url.as_ref()),
+ remote::Direction::Push => self
+ .push_url_alias
+ .as_ref()
+ .or(self.push_url.as_ref())
+ .or_else(|| self.url(remote::Direction::Fetch)),
+ }
+ }
+}
+
+/// Modification
+impl Remote<'_> {
+ /// Read `url.<base>.insteadOf|pushInsteadOf` configuration variables and apply them to our urls, changing them in place.
+ ///
+ /// This happens only once, and one if them may be changed even when reporting an error.
+ /// If both urls fail, only the first error (for fetch urls) is reported.
+ pub fn rewrite_urls(&mut self) -> Result<&mut Self, remote::init::Error> {
+ let url_err = match remote::init::rewrite_url(&self.repo.config, self.url.as_ref(), remote::Direction::Fetch) {
+ Ok(url) => {
+ self.url_alias = url;
+ None
+ }
+ Err(err) => err.into(),
+ };
+ let push_url_err =
+ match remote::init::rewrite_url(&self.repo.config, self.push_url.as_ref(), remote::Direction::Push) {
+ Ok(url) => {
+ self.push_url_alias = url;
+ None
+ }
+ Err(err) => err.into(),
+ };
+ url_err.or(push_url_err).map(Err::<&mut Self, _>).transpose()?;
+ Ok(self)
+ }
+
+ /// Replace all currently set refspecs, typically from configuration, with the given `specs` for `direction`,
+ /// or `None` if one of the input specs could not be parsed.
+ pub fn replace_refspecs<Spec>(
+ &mut self,
+ specs: impl IntoIterator<Item = Spec>,
+ direction: remote::Direction,
+ ) -> Result<(), gix_refspec::parse::Error>
+ where
+ Spec: AsRef<BStr>,
+ {
+ use remote::Direction::*;
+ let specs: Vec<_> = specs
+ .into_iter()
+ .map(|spec| {
+ gix_refspec::parse(
+ spec.as_ref(),
+ match direction {
+ Push => gix_refspec::parse::Operation::Push,
+ Fetch => gix_refspec::parse::Operation::Fetch,
+ },
+ )
+ .map(|url| url.to_owned())
+ })
+ .collect::<Result<_, _>>()?;
+ let dst = match direction {
+ Push => &mut self.push_specs,
+ Fetch => &mut self.fetch_specs,
+ };
+ *dst = specs;
+ Ok(())
+ }
+}
diff --git a/vendor/gix/src/remote/build.rs b/vendor/gix/src/remote/build.rs
new file mode 100644
index 000000000..10c216537
--- /dev/null
+++ b/vendor/gix/src/remote/build.rs
@@ -0,0 +1,84 @@
+use std::convert::TryInto;
+
+use crate::{bstr::BStr, remote, Remote};
+
+/// Builder methods
+impl Remote<'_> {
+ /// Set the `url` to be used when pushing data to a remote.
+ pub fn push_url<Url, E>(self, url: Url) -> Result<Self, remote::init::Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ self.push_url_inner(url, true)
+ }
+
+ /// Set the `url` to be used when pushing data to a remote, without applying rewrite rules in case these could be faulty,
+ /// eliminating one failure mode.
+ pub fn push_url_without_url_rewrite<Url, E>(self, url: Url) -> Result<Self, remote::init::Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ self.push_url_inner(url, false)
+ }
+
+ /// Configure how tags should be handled when fetching from the remote.
+ pub fn with_fetch_tags(mut self, tags: remote::fetch::Tags) -> Self {
+ self.fetch_tags = tags;
+ self
+ }
+
+ fn push_url_inner<Url, E>(mut self, push_url: Url, should_rewrite_urls: bool) -> Result<Self, remote::init::Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ let push_url = push_url
+ .try_into()
+ .map_err(|err| remote::init::Error::Url(err.into()))?;
+ self.push_url = push_url.into();
+
+ let (_, push_url_alias) = should_rewrite_urls
+ .then(|| remote::init::rewrite_urls(&self.repo.config, None, self.push_url.as_ref()))
+ .unwrap_or(Ok((None, None)))?;
+ self.push_url_alias = push_url_alias;
+
+ Ok(self)
+ }
+
+ /// Add `specs` as refspecs for `direction` to our list if they are unique, or ignore them otherwise.
+ pub fn with_refspecs<Spec>(
+ mut self,
+ specs: impl IntoIterator<Item = Spec>,
+ direction: remote::Direction,
+ ) -> Result<Self, gix_refspec::parse::Error>
+ where
+ Spec: AsRef<BStr>,
+ {
+ use remote::Direction::*;
+ let new_specs = specs
+ .into_iter()
+ .map(|spec| {
+ gix_refspec::parse(
+ spec.as_ref(),
+ match direction {
+ Push => gix_refspec::parse::Operation::Push,
+ Fetch => gix_refspec::parse::Operation::Fetch,
+ },
+ )
+ .map(|s| s.to_owned())
+ })
+ .collect::<Result<Vec<_>, _>>()?;
+ let specs = match direction {
+ Push => &mut self.push_specs,
+ Fetch => &mut self.fetch_specs,
+ };
+ for spec in new_specs {
+ if !specs.contains(&spec) {
+ specs.push(spec);
+ }
+ }
+ Ok(self)
+ }
+}
diff --git a/vendor/gix/src/remote/connect.rs b/vendor/gix/src/remote/connect.rs
new file mode 100644
index 000000000..8e656975e
--- /dev/null
+++ b/vendor/gix/src/remote/connect.rs
@@ -0,0 +1,166 @@
+#![allow(clippy::result_large_err)]
+use gix_protocol::transport::client::Transport;
+
+use crate::{remote::Connection, Progress, Remote};
+
+mod error {
+ use crate::{bstr::BString, config, remote};
+
+ /// The error returned by [connect()][crate::Remote::connect()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not obtain options for connecting via ssh")]
+ SshOptions(#[from] config::ssh_connect_options::Error),
+ #[error("Could not obtain the current directory")]
+ CurrentDir(#[from] std::io::Error),
+ #[error("Could not access remote repository at \"{}\"", directory.display())]
+ InvalidRemoteRepositoryPath { directory: std::path::PathBuf },
+ #[error(transparent)]
+ SchemePermission(#[from] config::protocol::allow::Error),
+ #[error("Protocol {scheme:?} of url {url:?} is denied per configuration")]
+ ProtocolDenied { url: BString, scheme: gix_url::Scheme },
+ #[error(transparent)]
+ Connect(#[from] gix_protocol::transport::client::connect::Error),
+ #[error("The {} url was missing - don't know where to establish a connection to", direction.as_str())]
+ MissingUrl { direction: remote::Direction },
+ #[error("Protocol named {given:?} is not a valid protocol. Choose between 1 and 2")]
+ UnknownProtocol { given: BString },
+ #[error("Could not verify that \"{}\" url is a valid git directory before attempting to use it", url.to_bstring())]
+ FileUrl {
+ source: Box<gix_discover::is_git::Error>,
+ url: gix_url::Url,
+ },
+ }
+
+ impl gix_protocol::transport::IsSpuriousError for Error {
+ /// Return `true` if retrying might result in a different outcome due to IO working out differently.
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::Connect(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+ }
+}
+pub use error::Error;
+
+/// Establishing connections to remote hosts (without performing a git-handshake).
+impl<'repo> Remote<'repo> {
+ /// Create a new connection using `transport` to communicate, with `progress` to indicate changes.
+ ///
+ /// Note that this method expects the `transport` to be created by the user, which would involve the [`url()`][Self::url()].
+ /// It's meant to be used when async operation is needed with runtimes of the user's choice.
+ pub fn to_connection_with_transport<T, P>(&self, transport: T, progress: P) -> Connection<'_, 'repo, T, P>
+ where
+ T: Transport,
+ P: Progress,
+ {
+ Connection {
+ remote: self,
+ authenticate: None,
+ transport_options: None,
+ transport,
+ progress,
+ }
+ }
+
+ /// Connect to the url suitable for `direction` and return a handle through which operations can be performed.
+ ///
+ /// Note that the `protocol.version` configuration key affects the transport protocol used to connect,
+ /// with `2` being the default.
+ ///
+ /// The transport used for connection can be configured via `transport_mut().configure()` assuming the actually
+ /// used transport is well known. If that's not the case, the transport can be created by hand and passed to
+ /// [to_connection_with_transport()][Self::to_connection_with_transport()].
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client-async-std"))]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn connect<P>(
+ &self,
+ direction: crate::remote::Direction,
+ progress: P,
+ ) -> Result<Connection<'_, 'repo, Box<dyn Transport + Send>, P>, Error>
+ where
+ P: Progress,
+ {
+ let (url, version) = self.sanitized_url_and_version(direction)?;
+ #[cfg(feature = "blocking-network-client")]
+ let scheme_is_ssh = url.scheme == gix_url::Scheme::Ssh;
+ let transport = gix_protocol::transport::connect(
+ url,
+ gix_protocol::transport::client::connect::Options {
+ version,
+ #[cfg(feature = "blocking-network-client")]
+ ssh: scheme_is_ssh
+ .then(|| self.repo.ssh_connect_options())
+ .transpose()?
+ .unwrap_or_default(),
+ },
+ )
+ .await?;
+ Ok(self.to_connection_with_transport(transport, progress))
+ }
+
+ /// Produce the sanitized URL and protocol version to use as obtained by querying the repository configuration.
+ ///
+ /// This can be useful when using custom transports to allow additional configuration.
+ pub fn sanitized_url_and_version(
+ &self,
+ direction: crate::remote::Direction,
+ ) -> Result<(gix_url::Url, gix_protocol::transport::Protocol), Error> {
+ fn sanitize(mut url: gix_url::Url) -> Result<gix_url::Url, Error> {
+ if url.scheme == gix_url::Scheme::File {
+ let mut dir = gix_path::to_native_path_on_windows(url.path.as_ref());
+ let kind = gix_discover::is_git(dir.as_ref())
+ .or_else(|_| {
+ dir.to_mut().push(gix_discover::DOT_GIT_DIR);
+ gix_discover::is_git(dir.as_ref())
+ })
+ .map_err(|err| Error::FileUrl {
+ source: err.into(),
+ url: url.clone(),
+ })?;
+ let (git_dir, _work_dir) = gix_discover::repository::Path::from_dot_git_dir(
+ dir.clone().into_owned(),
+ kind,
+ std::env::current_dir()?,
+ )
+ .ok_or_else(|| Error::InvalidRemoteRepositoryPath {
+ directory: dir.into_owned(),
+ })?
+ .into_repository_and_work_tree_directories();
+ url.path = gix_path::into_bstr(git_dir).into_owned();
+ }
+ Ok(url)
+ }
+
+ use gix_protocol::transport::Protocol;
+ let version = self
+ .repo
+ .config
+ .resolved
+ .integer("protocol", None, "version")
+ .unwrap_or(Ok(2))
+ .map_err(|err| Error::UnknownProtocol { given: err.input })
+ .and_then(|num| {
+ Ok(match num {
+ 1 => Protocol::V1,
+ 2 => Protocol::V2,
+ num => {
+ return Err(Error::UnknownProtocol {
+ given: num.to_string().into(),
+ })
+ }
+ })
+ })?;
+
+ let url = self.url(direction).ok_or(Error::MissingUrl { direction })?.to_owned();
+ if !self.repo.config.url_scheme()?.allow(&url.scheme) {
+ return Err(Error::ProtocolDenied {
+ url: url.to_bstring(),
+ scheme: url.scheme,
+ });
+ }
+ Ok((sanitize(url)?, version))
+ }
+}
diff --git a/vendor/gix/src/remote/connection/access.rs b/vendor/gix/src/remote/connection/access.rs
new file mode 100644
index 000000000..e4c31c3f5
--- /dev/null
+++ b/vendor/gix/src/remote/connection/access.rs
@@ -0,0 +1,67 @@
+use crate::{
+ remote::{connection::AuthenticateFn, Connection},
+ Remote,
+};
+
+/// Builder
+impl<'a, 'repo, T, P> Connection<'a, 'repo, T, P> {
+ /// Set a custom credentials callback to provide credentials if the remotes require authentication.
+ ///
+ /// Otherwise we will use the git configuration to perform the same task as the `git credential` helper program,
+ /// which is calling other helper programs in succession while resorting to a prompt to obtain credentials from the
+ /// user.
+ ///
+ /// A custom function may also be used to prevent accessing resources with authentication.
+ ///
+ /// Use the [configured_credentials()][Connection::configured_credentials()] method to obtain the implementation
+ /// that would otherwise be used, which can be useful to proxy the default configuration and obtain information about the
+ /// URLs to authenticate with.
+ pub fn with_credentials(
+ mut self,
+ helper: impl FnMut(gix_credentials::helper::Action) -> gix_credentials::protocol::Result + 'a,
+ ) -> Self {
+ self.authenticate = Some(Box::new(helper));
+ self
+ }
+
+ /// Provide configuration to be used before the first handshake is conducted.
+ /// It's typically created by initializing it with [`Repository::transport_options()`][crate::Repository::transport_options()], which
+ /// is also the default if this isn't set explicitly. Note that all of the default configuration is created from `git`
+ /// configuration, which can also be manipulated through overrides to affect the default configuration.
+ ///
+ /// Use this method to provide transport configuration with custom backend configuration that is not configurable by other means and
+ /// custom to the application at hand.
+ pub fn with_transport_options(mut self, config: Box<dyn std::any::Any>) -> Self {
+ self.transport_options = Some(config);
+ self
+ }
+}
+
+/// Access
+impl<'a, 'repo, T, P> Connection<'a, 'repo, T, P> {
+ /// A utility to return a function that will use this repository's configuration to obtain credentials, similar to
+ /// what `git credential` is doing.
+ ///
+ /// It's meant to be used by users of the [`with_credentials()`][Self::with_credentials()] builder to gain access to the
+ /// default way of handling credentials, which they can call as fallback.
+ pub fn configured_credentials(
+ &self,
+ url: gix_url::Url,
+ ) -> Result<AuthenticateFn<'static>, crate::config::credential_helpers::Error> {
+ let (mut cascade, _action_with_normalized_url, prompt_opts) =
+ self.remote.repo.config_snapshot().credential_helpers(url)?;
+ Ok(Box::new(move |action| cascade.invoke(action, prompt_opts.clone())) as AuthenticateFn<'_>)
+ }
+ /// Return the underlying remote that instantiate this connection.
+ pub fn remote(&self) -> &Remote<'repo> {
+ self.remote
+ }
+
+ /// Provide a mutable transport to allow interacting with it according to its actual type.
+ /// Note that the caller _should not_ call [`configure()`][gix_protocol::transport::client::TransportWithoutIO::configure()]
+ /// as we will call it automatically before performing the handshake. Instead, to bring in custom configuration,
+ /// call [`with_transport_options()`][Connection::with_transport_options()].
+ pub fn transport_mut(&mut self) -> &mut T {
+ &mut self.transport
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/config.rs b/vendor/gix/src/remote/connection/fetch/config.rs
new file mode 100644
index 000000000..4782991bc
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/config.rs
@@ -0,0 +1,26 @@
+use super::Error;
+use crate::{
+ config::{cache::util::ApplyLeniency, tree::Pack},
+ Repository,
+};
+
+pub fn index_threads(repo: &Repository) -> Result<Option<usize>, Error> {
+ Ok(repo
+ .config
+ .resolved
+ .integer_filter("pack", None, Pack::THREADS.name, &mut repo.filter_config_section())
+ .map(|threads| Pack::THREADS.try_into_usize(threads))
+ .transpose()
+ .with_leniency(repo.options.lenient_config)?)
+}
+
+pub fn pack_index_version(repo: &Repository) -> Result<gix_pack::index::Version, Error> {
+ Ok(repo
+ .config
+ .resolved
+ .integer("pack", None, Pack::INDEX_VERSION.name)
+ .map(|value| Pack::INDEX_VERSION.try_into_index_version(value))
+ .transpose()
+ .with_leniency(repo.options.lenient_config)?
+ .unwrap_or(gix_pack::index::Version::V2))
+}
diff --git a/vendor/gix/src/remote/connection/fetch/error.rs b/vendor/gix/src/remote/connection/fetch/error.rs
new file mode 100644
index 000000000..0e6a4b840
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/error.rs
@@ -0,0 +1,41 @@
+use crate::config;
+
+/// The error returned by [`receive()`](super::Prepare::receive()).
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("The value to configure pack threads should be 0 to auto-configure or the amount of threads to use")]
+ PackThreads(#[from] config::unsigned_integer::Error),
+ #[error("The value to configure the pack index version should be 1 or 2")]
+ PackIndexVersion(#[from] config::key::GenericError),
+ #[error("Could not decode server reply")]
+ FetchResponse(#[from] gix_protocol::fetch::response::Error),
+ #[error("Cannot fetch from a remote that uses {remote} while local repository uses {local} for object hashes")]
+ IncompatibleObjectHash {
+ local: gix_hash::Kind,
+ remote: gix_hash::Kind,
+ },
+ #[error(transparent)]
+ Negotiate(#[from] super::negotiate::Error),
+ #[error(transparent)]
+ Client(#[from] gix_protocol::transport::client::Error),
+ #[error(transparent)]
+ WritePack(#[from] gix_pack::bundle::write::Error),
+ #[error(transparent)]
+ UpdateRefs(#[from] super::refs::update::Error),
+ #[error("Failed to remove .keep file at \"{}\"", path.display())]
+ RemovePackKeepFile {
+ path: std::path::PathBuf,
+ source: std::io::Error,
+ },
+}
+
+impl gix_protocol::transport::IsSpuriousError for Error {
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::FetchResponse(err) => err.is_spurious(),
+ Error::Client(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/mod.rs b/vendor/gix/src/remote/connection/fetch/mod.rs
new file mode 100644
index 000000000..4ce631b1e
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/mod.rs
@@ -0,0 +1,240 @@
+use gix_protocol::transport::client::Transport;
+
+use crate::{
+ bstr::BString,
+ remote,
+ remote::{
+ fetch::{DryRun, RefMap},
+ ref_map, Connection,
+ },
+ Progress,
+};
+
+mod error;
+pub use error::Error;
+
+use crate::remote::fetch::WritePackedRefs;
+
+/// The way reflog messages should be composed whenever a ref is written with recent objects from a remote.
+pub enum RefLogMessage {
+ /// Prefix the log with `action` and generate the typical suffix as `git` would.
+ Prefixed {
+ /// The action to use, like `fetch` or `pull`.
+ action: String,
+ },
+ /// Control the entire message, using `message` verbatim.
+ Override {
+ /// The complete reflog message.
+ message: BString,
+ },
+}
+
+impl RefLogMessage {
+ pub(crate) fn compose(&self, context: &str) -> BString {
+ match self {
+ RefLogMessage::Prefixed { action } => format!("{action}: {context}").into(),
+ RefLogMessage::Override { message } => message.to_owned(),
+ }
+ }
+}
+
+/// The status of the repository after the fetch operation
+#[derive(Debug, Clone)]
+pub enum Status {
+ /// Nothing changed as the remote didn't have anything new compared to our tracking branches, thus no pack was received
+ /// and no new object was added.
+ NoPackReceived {
+ /// However, depending on the refspecs, references might have been updated nonetheless to point to objects as
+ /// reported by the remote.
+ update_refs: refs::update::Outcome,
+ },
+ /// There was at least one tip with a new object which we received.
+ Change {
+ /// Information collected while writing the pack and its index.
+ write_pack_bundle: gix_pack::bundle::write::Outcome,
+ /// Information collected while updating references.
+ update_refs: refs::update::Outcome,
+ },
+ /// A dry run was performed which leaves the local repository without any change
+ /// nor will a pack have been received.
+ DryRun {
+ /// Information about what updates to refs would have been done.
+ update_refs: refs::update::Outcome,
+ },
+}
+
+/// The outcome of receiving a pack via [`Prepare::receive()`].
+#[derive(Debug, Clone)]
+pub struct Outcome {
+ /// The result of the initial mapping of references, the prerequisite for any fetch.
+ pub ref_map: RefMap,
+ /// The status of the operation to indicate what happened.
+ pub status: Status,
+}
+
+/// The progress ids used in during various steps of the fetch operation.
+///
+/// Note that tagged progress isn't very widely available yet, but support can be improved as needed.
+///
+/// Use this information to selectively extract the progress of interest in case the parent application has custom visualization.
+#[derive(Debug, Copy, Clone)]
+pub enum ProgressId {
+ /// The progress name is defined by the remote and the progress messages it sets, along with their progress values and limits.
+ RemoteProgress,
+}
+
+impl From<ProgressId> for gix_features::progress::Id {
+ fn from(v: ProgressId) -> Self {
+ match v {
+ ProgressId::RemoteProgress => *b"FERP",
+ }
+ }
+}
+
+///
+pub mod negotiate;
+
+///
+pub mod prepare {
+ /// The error returned by [`prepare_fetch()`][super::Connection::prepare_fetch()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Cannot perform a meaningful fetch operation without any configured ref-specs")]
+ MissingRefSpecs,
+ #[error(transparent)]
+ RefMap(#[from] crate::remote::ref_map::Error),
+ }
+
+ impl gix_protocol::transport::IsSpuriousError for Error {
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::RefMap(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+ }
+}
+
+impl<'remote, 'repo, T, P> Connection<'remote, 'repo, T, P>
+where
+ T: Transport,
+ P: Progress,
+{
+ /// Perform a handshake with the remote and obtain a ref-map with `options`, and from there one
+ /// Note that at this point, the `transport` should already be configured using the [`transport_mut()`][Self::transport_mut()]
+ /// method, as it will be consumed here.
+ ///
+ /// From there additional properties of the fetch can be adjusted to override the defaults that are configured via gix-config.
+ ///
+ /// # Async Experimental
+ ///
+ /// Note that this implementation is currently limited correctly in blocking mode only as it relies on Drop semantics to close the connection
+ /// should the fetch not be performed. Furthermore, there the code doing the fetch is inherently blocking and it's not offloaded to a thread,
+ /// making this call block the executor.
+ /// It's best to unblock it by placing it into its own thread or offload it should usage in an async context be truly required.
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn prepare_fetch(
+ mut self,
+ options: ref_map::Options,
+ ) -> Result<Prepare<'remote, 'repo, T, P>, prepare::Error> {
+ if self.remote.refspecs(remote::Direction::Fetch).is_empty() {
+ return Err(prepare::Error::MissingRefSpecs);
+ }
+ let ref_map = self.ref_map_inner(options).await?;
+ Ok(Prepare {
+ con: Some(self),
+ ref_map,
+ dry_run: DryRun::No,
+ reflog_message: None,
+ write_packed_refs: WritePackedRefs::Never,
+ })
+ }
+}
+
+impl<'remote, 'repo, T, P> Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ /// Return the ref_map (that includes the server handshake) which was part of listing refs prior to fetching a pack.
+ pub fn ref_map(&self) -> &RefMap {
+ &self.ref_map
+ }
+}
+
+mod config;
+mod receive_pack;
+///
+#[path = "update_refs/mod.rs"]
+pub mod refs;
+
+/// A structure to hold the result of the handshake with the remote and configure the upcoming fetch operation.
+pub struct Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ con: Option<Connection<'remote, 'repo, T, P>>,
+ ref_map: RefMap,
+ dry_run: DryRun,
+ reflog_message: Option<RefLogMessage>,
+ write_packed_refs: WritePackedRefs,
+}
+
+/// Builder
+impl<'remote, 'repo, T, P> Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ /// If dry run is enabled, no change to the repository will be made.
+ ///
+ /// This works by not actually fetching the pack after negotiating it, nor will refs be updated.
+ pub fn with_dry_run(mut self, enabled: bool) -> Self {
+ self.dry_run = if enabled { DryRun::Yes } else { DryRun::No };
+ self
+ }
+
+ /// If enabled, don't write ref updates to loose refs, but put them exclusively to packed-refs.
+ ///
+ /// This improves performance and allows case-sensitive filesystems to deal with ref names that would otherwise
+ /// collide.
+ pub fn with_write_packed_refs_only(mut self, enabled: bool) -> Self {
+ self.write_packed_refs = if enabled {
+ WritePackedRefs::Only
+ } else {
+ WritePackedRefs::Never
+ };
+ self
+ }
+
+ /// Set the reflog message to use when updating refs after fetching a pack.
+ pub fn with_reflog_message(mut self, reflog_message: RefLogMessage) -> Self {
+ self.reflog_message = reflog_message.into();
+ self
+ }
+}
+
+impl<'remote, 'repo, T, P> Drop for Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ fn drop(&mut self) {
+ if let Some(mut con) = self.con.take() {
+ #[cfg(feature = "async-network-client")]
+ {
+ // TODO: this should be an async drop once the feature is available.
+ // Right now we block the executor by forcing this communication, but that only
+ // happens if the user didn't actually try to receive a pack, which consumes the
+ // connection in an async context.
+ gix_protocol::futures_lite::future::block_on(gix_protocol::indicate_end_of_interaction(
+ &mut con.transport,
+ ))
+ .ok();
+ }
+ #[cfg(not(feature = "async-network-client"))]
+ {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).ok();
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/negotiate.rs b/vendor/gix/src/remote/connection/fetch/negotiate.rs
new file mode 100644
index 000000000..f5051ec72
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/negotiate.rs
@@ -0,0 +1,78 @@
+/// The way the negotiation is performed
+#[derive(Copy, Clone)]
+pub(crate) enum Algorithm {
+ /// Our very own implementation that probably should be replaced by one of the known algorithms soon.
+ Naive,
+}
+
+/// The error returned during negotiation.
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("We were unable to figure out what objects the server should send after {rounds} round(s)")]
+ NegotiationFailed { rounds: usize },
+}
+
+/// Negotiate one round with `algo` by looking at `ref_map` and adjust `arguments` to contain the haves and wants.
+/// If this is not the first round, the `previous_response` is set with the last recorded server response.
+/// Returns `true` if the negotiation is done from our side so the server won't keep asking.
+pub(crate) fn one_round(
+ algo: Algorithm,
+ round: usize,
+ repo: &crate::Repository,
+ ref_map: &crate::remote::fetch::RefMap,
+ fetch_tags: crate::remote::fetch::Tags,
+ arguments: &mut gix_protocol::fetch::Arguments,
+ _previous_response: Option<&gix_protocol::fetch::Response>,
+) -> Result<bool, Error> {
+ let tag_refspec_to_ignore = fetch_tags
+ .to_refspec()
+ .filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
+ match algo {
+ Algorithm::Naive => {
+ assert_eq!(round, 1, "Naive always finishes after the first round, and claims.");
+ let mut has_missing_tracking_branch = false;
+ for mapping in &ref_map.mappings {
+ if tag_refspec_to_ignore.map_or(false, |tag_spec| {
+ mapping
+ .spec_index
+ .implicit_index()
+ .and_then(|idx| ref_map.extra_refspecs.get(idx))
+ .map_or(false, |spec| spec.to_ref() == tag_spec)
+ }) {
+ continue;
+ }
+ let have_id = mapping.local.as_ref().and_then(|name| {
+ repo.find_reference(name)
+ .ok()
+ .and_then(|r| r.target().try_id().map(ToOwned::to_owned))
+ });
+ match have_id {
+ Some(have_id) => {
+ if let Some(want_id) = mapping.remote.as_id() {
+ if want_id != have_id {
+ arguments.want(want_id);
+ arguments.have(have_id);
+ }
+ }
+ }
+ None => {
+ if let Some(want_id) = mapping.remote.as_id() {
+ arguments.want(want_id);
+ has_missing_tracking_branch = true;
+ }
+ }
+ }
+ }
+
+ if has_missing_tracking_branch {
+ if let Ok(Some(r)) = repo.head_ref() {
+ if let Some(id) = r.target().try_id() {
+ arguments.have(id);
+ }
+ }
+ }
+ Ok(true)
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/receive_pack.rs b/vendor/gix/src/remote/connection/fetch/receive_pack.rs
new file mode 100644
index 000000000..686de5999
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/receive_pack.rs
@@ -0,0 +1,238 @@
+use std::sync::atomic::AtomicBool;
+
+use gix_odb::FindExt;
+use gix_protocol::transport::client::Transport;
+
+use crate::{
+ remote,
+ remote::{
+ connection::fetch::config,
+ fetch,
+ fetch::{negotiate, refs, Error, Outcome, Prepare, ProgressId, RefLogMessage, Status},
+ },
+ Progress,
+};
+
+impl<'remote, 'repo, T, P> Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+ P: Progress,
+ P::SubProgress: 'static,
+{
+ /// Receive the pack and perform the operation as configured by git via `gix-config` or overridden by various builder methods.
+ /// Return `Ok(None)` if there was nothing to do because all remote refs are at the same state as they are locally, or `Ok(Some(outcome))`
+ /// to inform about all the changes that were made.
+ ///
+ /// ### Negotiation
+ ///
+ /// "fetch.negotiationAlgorithm" describes algorithms `git` uses currently, with the default being `consecutive` and `skipping` being
+ /// experimented with. We currently implement something we could call 'naive' which works for now.
+ ///
+ /// ### Pack `.keep` files
+ ///
+ /// That packs that are freshly written to the object database are vulnerable to garbage collection for the brief time that it takes between
+ /// them being placed and the respective references to be written to disk which binds their objects to the commit graph, making them reachable.
+ ///
+ /// To circumvent this issue, a `.keep` file is created before any pack related file (i.e. `.pack` or `.idx`) is written, which indicates the
+ /// garbage collector (like `git maintenance`, `git gc`) to leave the corresponding pack file alone.
+ ///
+ /// If there were any ref updates or the received pack was empty, the `.keep` file will be deleted automatically leaving in its place at
+ /// `write_pack_bundle.keep_path` a `None`.
+ /// However, if no ref-update happened the path will still be present in `write_pack_bundle.keep_path` and is expected to be handled by the caller.
+ /// A known application for this behaviour is in `remote-helper` implementations which should send this path via `lock <path>` to stdout
+ /// to inform git about the file that it will remove once it updated the refs accordingly.
+ ///
+ /// ### Deviation
+ ///
+ /// When **updating refs**, the `git-fetch` docs state that the following:
+ ///
+ /// > Unlike when pushing with git-push, any updates outside of refs/{tags,heads}/* will be accepted without + in the refspec (or --force), whether that’s swapping e.g. a tree object for a blob, or a commit for another commit that’s doesn’t have the previous commit as an ancestor etc.
+ ///
+ /// We explicitly don't special case those refs and expect the user to take control. Note that by its nature,
+ /// force only applies to refs pointing to commits and if they don't, they will be updated either way in our
+ /// implementation as well.
+ ///
+ /// ### Async Mode Shortcoming
+ ///
+ /// Currently the entire process of resolving a pack is blocking the executor. This can be fixed using the `blocking` crate, but it
+ /// didn't seem worth the tradeoff of having more complex code.
+ ///
+ /// ### Configuration
+ ///
+ /// - `gitoxide.userAgent` is read to obtain the application user agent for git servers and for HTTP servers as well.
+ ///
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn receive(mut self, should_interrupt: &AtomicBool) -> Result<Outcome, Error> {
+ let mut con = self.con.take().expect("receive() can only be called once");
+
+ let handshake = &self.ref_map.handshake;
+ let protocol_version = handshake.server_protocol_version;
+
+ let fetch = gix_protocol::Command::Fetch;
+ let progress = &mut con.progress;
+ let repo = con.remote.repo;
+ let fetch_features = {
+ let mut f = fetch.default_features(protocol_version, &handshake.capabilities);
+ f.push(repo.config.user_agent_tuple());
+ f
+ };
+
+ gix_protocol::fetch::Response::check_required_features(protocol_version, &fetch_features)?;
+ let sideband_all = fetch_features.iter().any(|(n, _)| *n == "sideband-all");
+ let mut arguments = gix_protocol::fetch::Arguments::new(protocol_version, fetch_features);
+ if matches!(con.remote.fetch_tags, crate::remote::fetch::Tags::Included) {
+ if !arguments.can_use_include_tag() {
+ unimplemented!("we expect servers to support 'include-tag', otherwise we have to implement another pass to fetch attached tags separately");
+ }
+ arguments.use_include_tag();
+ }
+ let mut previous_response = None::<gix_protocol::fetch::Response>;
+ let mut round = 1;
+
+ if self.ref_map.object_hash != repo.object_hash() {
+ return Err(Error::IncompatibleObjectHash {
+ local: repo.object_hash(),
+ remote: self.ref_map.object_hash,
+ });
+ }
+
+ let reader = 'negotiation: loop {
+ progress.step();
+ progress.set_name(format!("negotiate (round {round})"));
+
+ let is_done = match negotiate::one_round(
+ negotiate::Algorithm::Naive,
+ round,
+ repo,
+ &self.ref_map,
+ con.remote.fetch_tags,
+ &mut arguments,
+ previous_response.as_ref(),
+ ) {
+ Ok(_) if arguments.is_empty() => {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
+ let update_refs = refs::update(
+ repo,
+ self.reflog_message
+ .take()
+ .unwrap_or_else(|| RefLogMessage::Prefixed { action: "fetch".into() }),
+ &self.ref_map.mappings,
+ con.remote.refspecs(remote::Direction::Fetch),
+ &self.ref_map.extra_refspecs,
+ con.remote.fetch_tags,
+ self.dry_run,
+ self.write_packed_refs,
+ )?;
+ return Ok(Outcome {
+ ref_map: std::mem::take(&mut self.ref_map),
+ status: Status::NoPackReceived { update_refs },
+ });
+ }
+ Ok(is_done) => is_done,
+ Err(err) => {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
+ return Err(err.into());
+ }
+ };
+ round += 1;
+ let mut reader = arguments.send(&mut con.transport, is_done).await?;
+ if sideband_all {
+ setup_remote_progress(progress, &mut reader);
+ }
+ let response = gix_protocol::fetch::Response::from_line_reader(protocol_version, &mut reader).await?;
+ if response.has_pack() {
+ progress.step();
+ progress.set_name("receiving pack");
+ if !sideband_all {
+ setup_remote_progress(progress, &mut reader);
+ }
+ break 'negotiation reader;
+ } else {
+ previous_response = Some(response);
+ }
+ };
+
+ let options = gix_pack::bundle::write::Options {
+ thread_limit: config::index_threads(repo)?,
+ index_version: config::pack_index_version(repo)?,
+ iteration_mode: gix_pack::data::input::Mode::Verify,
+ object_hash: con.remote.repo.object_hash(),
+ };
+
+ let mut write_pack_bundle = if matches!(self.dry_run, fetch::DryRun::No) {
+ Some(gix_pack::Bundle::write_to_directory(
+ #[cfg(feature = "async-network-client")]
+ {
+ gix_protocol::futures_lite::io::BlockOn::new(reader)
+ },
+ #[cfg(not(feature = "async-network-client"))]
+ {
+ reader
+ },
+ Some(repo.objects.store_ref().path().join("pack")),
+ con.progress,
+ should_interrupt,
+ Some(Box::new({
+ let repo = repo.clone();
+ move |oid, buf| repo.objects.find(oid, buf).ok()
+ })),
+ options,
+ )?)
+ } else {
+ drop(reader);
+ None
+ };
+
+ if matches!(protocol_version, gix_protocol::transport::Protocol::V2) {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
+ }
+
+ let update_refs = refs::update(
+ repo,
+ self.reflog_message
+ .take()
+ .unwrap_or_else(|| RefLogMessage::Prefixed { action: "fetch".into() }),
+ &self.ref_map.mappings,
+ con.remote.refspecs(remote::Direction::Fetch),
+ &self.ref_map.extra_refspecs,
+ con.remote.fetch_tags,
+ self.dry_run,
+ self.write_packed_refs,
+ )?;
+
+ if let Some(bundle) = write_pack_bundle.as_mut() {
+ if !update_refs.edits.is_empty() || bundle.index.num_objects == 0 {
+ if let Some(path) = bundle.keep_path.take() {
+ std::fs::remove_file(&path).map_err(|err| Error::RemovePackKeepFile { path, source: err })?;
+ }
+ }
+ }
+
+ Ok(Outcome {
+ ref_map: std::mem::take(&mut self.ref_map),
+ status: match write_pack_bundle {
+ Some(write_pack_bundle) => Status::Change {
+ write_pack_bundle,
+ update_refs,
+ },
+ None => Status::DryRun { update_refs },
+ },
+ })
+ }
+}
+
+fn setup_remote_progress<P>(
+ progress: &mut P,
+ reader: &mut Box<dyn gix_protocol::transport::client::ExtendedBufRead + Unpin + '_>,
+) where
+ P: Progress,
+ P::SubProgress: 'static,
+{
+ use gix_protocol::transport::client::ExtendedBufRead;
+ reader.set_progress_handler(Some(Box::new({
+ let mut remote_progress = progress.add_child_with_id("remote", ProgressId::RemoteProgress.into());
+ move |is_err: bool, data: &[u8]| {
+ gix_protocol::RemoteProgress::translate_to_progress(is_err, data, &mut remote_progress)
+ }
+ }) as gix_protocol::transport::client::HandleProgress));
+}
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs b/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs
new file mode 100644
index 000000000..953490672
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs
@@ -0,0 +1,274 @@
+#![allow(clippy::result_large_err)]
+use std::{collections::BTreeMap, convert::TryInto, path::PathBuf};
+
+use gix_odb::{Find, FindExt};
+use gix_ref::{
+ transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog},
+ Target, TargetRef,
+};
+
+use crate::{
+ ext::ObjectIdExt,
+ remote::{
+ fetch,
+ fetch::{refs::update::Mode, RefLogMessage, Source},
+ },
+ Repository,
+};
+
+///
+pub mod update;
+
+/// Information about the update of a single reference, corresponding the respective entry in [`RefMap::mappings`][crate::remote::fetch::RefMap::mappings].
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Update {
+ /// The way the update was performed.
+ pub mode: update::Mode,
+ /// The index to the edit that was created from the corresponding mapping, or `None` if there was no local ref.
+ pub edit_index: Option<usize>,
+}
+
+impl From<update::Mode> for Update {
+ fn from(mode: Mode) -> Self {
+ Update { mode, edit_index: None }
+ }
+}
+
+/// Update all refs as derived from `refmap.mappings` and produce an `Outcome` informing about all applied changes in detail, with each
+/// [`update`][Update] corresponding to the [`fetch::Mapping`] of at the same index.
+/// If `dry_run` is true, ref transactions won't actually be applied, but are assumed to work without error so the underlying
+/// `repo` is not actually changed. Also it won't perform an 'object exists' check as these are likely not to exist as the pack
+/// wasn't fetched either.
+/// `action` is the prefix used for reflog entries, and is typically "fetch".
+///
+/// It can be used to produce typical information that one is used to from `git fetch`.
+#[allow(clippy::too_many_arguments)]
+pub(crate) fn update(
+ repo: &Repository,
+ message: RefLogMessage,
+ mappings: &[fetch::Mapping],
+ refspecs: &[gix_refspec::RefSpec],
+ extra_refspecs: &[gix_refspec::RefSpec],
+ fetch_tags: fetch::Tags,
+ dry_run: fetch::DryRun,
+ write_packed_refs: fetch::WritePackedRefs,
+) -> Result<update::Outcome, update::Error> {
+ let mut edits = Vec::new();
+ let mut updates = Vec::new();
+
+ let implicit_tag_refspec = fetch_tags
+ .to_refspec()
+ .filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
+ for (remote, local, spec, is_implicit_tag) in mappings.iter().filter_map(
+ |fetch::Mapping {
+ remote,
+ local,
+ spec_index,
+ }| {
+ spec_index.get(refspecs, extra_refspecs).map(|spec| {
+ (
+ remote,
+ local,
+ spec,
+ implicit_tag_refspec.map_or(false, |tag_spec| spec.to_ref() == tag_spec),
+ )
+ })
+ },
+ ) {
+ let remote_id = match remote.as_id() {
+ Some(id) => id,
+ None => continue,
+ };
+ if dry_run == fetch::DryRun::No && !repo.objects.contains(remote_id) {
+ let update = if is_implicit_tag {
+ update::Mode::ImplicitTagNotSentByRemote.into()
+ } else {
+ update::Mode::RejectedSourceObjectNotFound { id: remote_id.into() }.into()
+ };
+ updates.push(update);
+ continue;
+ }
+ let checked_out_branches = worktree_branches(repo)?;
+ let (mode, edit_index) = match local {
+ Some(name) => {
+ let (mode, reflog_message, name, previous_value) = match repo.try_find_reference(name)? {
+ Some(existing) => {
+ if let Some(wt_dir) = checked_out_branches.get(existing.name()) {
+ let mode = update::Mode::RejectedCurrentlyCheckedOut {
+ worktree_dir: wt_dir.to_owned(),
+ };
+ updates.push(mode.into());
+ continue;
+ }
+ match existing.target() {
+ TargetRef::Symbolic(_) => {
+ updates.push(update::Mode::RejectedSymbolic.into());
+ continue;
+ }
+ TargetRef::Peeled(local_id) => {
+ let previous_value =
+ PreviousValue::MustExistAndMatch(Target::Peeled(local_id.to_owned()));
+ let (mode, reflog_message) = if local_id == remote_id {
+ (update::Mode::NoChangeNeeded, "no update will be performed")
+ } else if let Some(gix_ref::Category::Tag) = existing.name().category() {
+ if spec.allow_non_fast_forward() {
+ (update::Mode::Forced, "updating tag")
+ } else {
+ updates.push(update::Mode::RejectedTagUpdate.into());
+ continue;
+ }
+ } else {
+ let mut force = spec.allow_non_fast_forward();
+ let is_fast_forward = match dry_run {
+ fetch::DryRun::No => {
+ let ancestors = repo
+ .find_object(local_id)?
+ .try_into_commit()
+ .map_err(|_| ())
+ .and_then(|c| {
+ c.committer().map(|a| a.time.seconds_since_unix_epoch).map_err(|_| ())
+ }).and_then(|local_commit_time|
+ remote_id
+ .to_owned()
+ .ancestors(|id, buf| repo.objects.find_commit_iter(id, buf))
+ .sorting(
+ gix_traverse::commit::Sorting::ByCommitTimeNewestFirstCutoffOlderThan {
+ time_in_seconds_since_epoch: local_commit_time
+ },
+ )
+ .map_err(|_| ())
+ );
+ match ancestors {
+ Ok(mut ancestors) => {
+ ancestors.any(|cid| cid.map_or(false, |cid| cid == local_id))
+ }
+ Err(_) => {
+ force = true;
+ false
+ }
+ }
+ }
+ fetch::DryRun::Yes => true,
+ };
+ if is_fast_forward {
+ (
+ update::Mode::FastForward,
+ matches!(dry_run, fetch::DryRun::Yes)
+ .then(|| "fast-forward (guessed in dry-run)")
+ .unwrap_or("fast-forward"),
+ )
+ } else if force {
+ (update::Mode::Forced, "forced-update")
+ } else {
+ updates.push(update::Mode::RejectedNonFastForward.into());
+ continue;
+ }
+ };
+ (mode, reflog_message, existing.name().to_owned(), previous_value)
+ }
+ }
+ }
+ None => {
+ let name: gix_ref::FullName = name.try_into()?;
+ let reflog_msg = match name.category() {
+ Some(gix_ref::Category::Tag) => "storing tag",
+ Some(gix_ref::Category::LocalBranch) => "storing head",
+ _ => "storing ref",
+ };
+ (
+ update::Mode::New,
+ reflog_msg,
+ name,
+ PreviousValue::ExistingMustMatch(Target::Peeled(remote_id.to_owned())),
+ )
+ }
+ };
+ let edit = RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: message.compose(reflog_message),
+ },
+ expected: previous_value,
+ new: if let Source::Ref(gix_protocol::handshake::Ref::Symbolic { target, .. }) = &remote {
+ match mappings.iter().find_map(|m| {
+ m.remote.as_name().and_then(|name| {
+ (name == target)
+ .then(|| m.local.as_ref().and_then(|local| local.try_into().ok()))
+ .flatten()
+ })
+ }) {
+ Some(local_branch) => {
+ // This is always safe because…
+ // - the reference may exist already
+ // - if it doesn't exist it will be created - we are here because it's in the list of mappings after all
+ // - if it exists and is updated, and the update is rejected due to non-fastforward for instance, the
+ // target reference still exists and we can point to it.
+ Target::Symbolic(local_branch)
+ }
+ None => Target::Peeled(remote_id.into()),
+ }
+ } else {
+ Target::Peeled(remote_id.into())
+ },
+ },
+ name,
+ deref: false,
+ };
+ let edit_index = edits.len();
+ edits.push(edit);
+ (mode, Some(edit_index))
+ }
+ None => (update::Mode::NoChangeNeeded, None),
+ };
+ updates.push(Update { mode, edit_index })
+ }
+
+ let edits = match dry_run {
+ fetch::DryRun::No => {
+ let (file_lock_fail, packed_refs_lock_fail) = repo
+ .config
+ .lock_timeout()
+ .map_err(crate::reference::edit::Error::from)?;
+ repo.refs
+ .transaction()
+ .packed_refs(
+ match write_packed_refs {
+ fetch::WritePackedRefs::Only => {
+ gix_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box::new(|oid, buf| {
+ repo.objects
+ .try_find(oid, buf)
+ .map(|obj| obj.map(|obj| obj.kind))
+ .map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
+ }))},
+ fetch::WritePackedRefs::Never => gix_ref::file::transaction::PackedRefs::DeletionsOnly
+ }
+ )
+ .prepare(edits, file_lock_fail, packed_refs_lock_fail)
+ .map_err(crate::reference::edit::Error::from)?
+ .commit(repo.committer().transpose().map_err(|err| update::Error::EditReferences(crate::reference::edit::Error::ParseCommitterTime(err)))?)
+ .map_err(crate::reference::edit::Error::from)?
+ }
+ fetch::DryRun::Yes => edits,
+ };
+
+ Ok(update::Outcome { edits, updates })
+}
+
+fn worktree_branches(repo: &Repository) -> Result<BTreeMap<gix_ref::FullName, PathBuf>, update::Error> {
+ let mut map = BTreeMap::new();
+ if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
+ map.insert(head_ref.inner.name, wt_dir.to_owned());
+ }
+ for proxy in repo.worktrees()? {
+ let repo = proxy.into_repo_with_possibly_inaccessible_worktree()?;
+ if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
+ map.insert(head_ref.inner.name, wt_dir.to_owned());
+ }
+ }
+ Ok(map)
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs b/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs
new file mode 100644
index 000000000..145990ac8
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs
@@ -0,0 +1,607 @@
+pub fn restricted() -> crate::open::Options {
+ crate::open::Options::isolated().config_overrides(["user.name=gitoxide", "user.email=gitoxide@localhost"])
+}
+
+/// Convert a hexadecimal hash into its corresponding `ObjectId` or _panic_.
+fn hex_to_id(hex: &str) -> gix_hash::ObjectId {
+ gix_hash::ObjectId::from_hex(hex.as_bytes()).expect("40 bytes hex")
+}
+
+mod update {
+ use std::convert::TryInto;
+
+ use gix_testtools::Result;
+
+ use super::hex_to_id;
+ use crate as gix;
+
+ fn base_repo_path() -> String {
+ gix::path::realpath(
+ gix_testtools::scripted_fixture_read_only("make_remote_repos.sh")
+ .unwrap()
+ .join("base"),
+ )
+ .unwrap()
+ .to_string_lossy()
+ .into_owned()
+ }
+
+ fn repo(name: &str) -> gix::Repository {
+ let dir =
+ gix_testtools::scripted_fixture_read_only_with_args("make_fetch_repos.sh", [base_repo_path()]).unwrap();
+ gix::open_opts(dir.join(name), restricted()).unwrap()
+ }
+ fn repo_rw(name: &str) -> (gix::Repository, gix_testtools::tempfile::TempDir) {
+ let dir = gix_testtools::scripted_fixture_writable_with_args(
+ "make_fetch_repos.sh",
+ [base_repo_path()],
+ gix_testtools::Creation::ExecuteScript,
+ )
+ .unwrap();
+ let repo = gix::open_opts(dir.path().join(name), restricted()).unwrap();
+ (repo, dir)
+ }
+ use gix_ref::{transaction::Change, TargetRef};
+
+ use crate::{
+ bstr::BString,
+ remote::{
+ fetch,
+ fetch::{refs::tests::restricted, Mapping, RefLogMessage, Source, SpecIndex},
+ },
+ };
+
+ #[test]
+ fn various_valid_updates() {
+ let repo = repo("two-origins");
+ for (spec, expected_mode, reflog_message, detail) in [
+ (
+ "refs/heads/main:refs/remotes/origin/main",
+ fetch::refs::update::Mode::NoChangeNeeded,
+ Some("no update will be performed"),
+ "these refs are en-par since the initial clone",
+ ),
+ (
+ "refs/heads/main",
+ fetch::refs::update::Mode::NoChangeNeeded,
+ None,
+ "without local destination ref there is nothing to do for us, ever (except for FETCH_HEADs) later",
+ ),
+ (
+ "refs/heads/main:refs/remotes/origin/new-main",
+ fetch::refs::update::Mode::New,
+ Some("storing ref"),
+ "the destination branch doesn't exist and needs to be created",
+ ),
+ (
+ "refs/heads/main:refs/heads/feature",
+ fetch::refs::update::Mode::New,
+ Some("storing head"),
+ "reflog messages are specific to the type of branch stored, to some limited extend",
+ ),
+ (
+ "refs/heads/main:refs/tags/new-tag",
+ fetch::refs::update::Mode::New,
+ Some("storing tag"),
+ "reflog messages are specific to the type of branch stored, to some limited extend",
+ ),
+ (
+ "+refs/heads/main:refs/remotes/origin/new-main",
+ fetch::refs::update::Mode::New,
+ Some("storing ref"),
+ "just to validate that we really are in dry-run mode, or else this ref would be present now",
+ ),
+ (
+ "+refs/heads/main:refs/remotes/origin/g",
+ fetch::refs::update::Mode::FastForward,
+ Some("fast-forward (guessed in dry-run)"),
+ "a forced non-fastforward (main goes backwards), but dry-run calls it fast-forward",
+ ),
+ (
+ "+refs/heads/main:refs/tags/b-tag",
+ fetch::refs::update::Mode::Forced,
+ Some("updating tag"),
+ "tags can only be forced",
+ ),
+ (
+ "refs/heads/main:refs/tags/b-tag",
+ fetch::refs::update::Mode::RejectedTagUpdate,
+ None,
+ "otherwise a tag is always refusing itself to be overwritten (no-clobber)",
+ ),
+ (
+ "+refs/remotes/origin/g:refs/heads/main",
+ fetch::refs::update::Mode::RejectedCurrentlyCheckedOut {
+ worktree_dir: repo.work_dir().expect("present").to_owned(),
+ },
+ None,
+ "checked out branches cannot be written, as it requires a merge of sorts which isn't done here",
+ ),
+ (
+ "ffffffffffffffffffffffffffffffffffffffff:refs/heads/invalid-source-object",
+ fetch::refs::update::Mode::RejectedSourceObjectNotFound {
+ id: hex_to_id("ffffffffffffffffffffffffffffffffffffffff"),
+ },
+ None,
+ "checked out branches cannot be written, as it requires a merge of sorts which isn't done here",
+ ),
+ (
+ "refs/remotes/origin/g:refs/heads/not-currently-checked-out",
+ fetch::refs::update::Mode::FastForward,
+ Some("fast-forward (guessed in dry-run)"),
+ "a fast-forward only fast-forward situation, all good",
+ ),
+ ] {
+ let (mapping, specs) = mapping_from_spec(spec, &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mapping,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ reflog_message.map(|_| fetch::DryRun::Yes).unwrap_or(fetch::DryRun::No),
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: expected_mode.clone(),
+ edit_index: reflog_message.map(|_| 0),
+ }],
+ "{spec:?}: {detail}"
+ );
+ assert_eq!(out.edits.len(), reflog_message.map(|_| 1).unwrap_or(0));
+ if let Some(reflog_message) = reflog_message {
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(
+ log.message,
+ format!("action: {reflog_message}"),
+ "{spec}: reflog messages are specific and we emulate git word for word"
+ );
+ let remote_ref = repo
+ .find_reference(specs[0].to_ref().source().expect("always present"))
+ .unwrap();
+ assert_eq!(
+ new.id(),
+ remote_ref.target().id(),
+ "remote ref provides the id to set in the local reference"
+ )
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn checked_out_branches_in_worktrees_are_rejected_with_additional_information() -> Result {
+ let root = gix_path::realpath(gix_testtools::scripted_fixture_read_only_with_args(
+ "make_fetch_repos.sh",
+ [base_repo_path()],
+ )?)?;
+ let repo = root.join("worktree-root");
+ let repo = gix::open_opts(repo, restricted())?;
+ for (branch, path_from_root) in [
+ ("main", "worktree-root"),
+ ("wt-a-nested", "prev/wt-a-nested"),
+ ("wt-a", "wt-a"),
+ ("nested-wt-b", "wt-a/nested-wt-b"),
+ ("wt-c-locked", "wt-c-locked"),
+ ("wt-deleted", "wt-deleted"),
+ ] {
+ let spec = format!("refs/heads/main:refs/heads/{branch}");
+ let (mappings, specs) = mapping_from_spec(&spec, &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )?;
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedCurrentlyCheckedOut {
+ worktree_dir: root.join(path_from_root),
+ },
+ edit_index: None,
+ }],
+ "{spec}: checked-out checks are done before checking if a change would actually be required (here it isn't)"
+ );
+ assert_eq!(out.edits.len(), 0);
+ }
+ Ok(())
+ }
+
+ #[test]
+ fn local_symbolic_refs_are_never_written() {
+ let repo = repo("two-origins");
+ for source in ["refs/heads/main", "refs/heads/symbolic", "HEAD"] {
+ let (mappings, specs) = mapping_from_spec(&format!("{source}:refs/heads/symbolic"), &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 0);
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedSymbolic,
+ edit_index: None
+ }],
+ "we don't overwrite these as the checked-out check needs to consider much more than it currently does, we are playing it safe"
+ );
+ }
+ }
+
+ #[test]
+ fn remote_symbolic_refs_can_always_be_set_as_there_is_no_scenario_where_it_could_be_nonexisting_and_rejected() {
+ let repo = repo("two-origins");
+ let (mut mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/remotes/origin/new", &repo);
+ mappings.push(Mapping {
+ remote: Source::Ref(gix_protocol::handshake::Ref::Direct {
+ full_ref_name: "refs/heads/main".try_into().unwrap(),
+ object: hex_to_id("f99771fe6a1b535783af3163eba95a927aae21d5"),
+ }),
+ local: Some("refs/heads/symbolic".into()),
+ spec_index: SpecIndex::ExplicitInRemote(0),
+ });
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 1);
+ assert_eq!(
+ out.updates,
+ vec![
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ edit_index: Some(0)
+ },
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedSymbolic,
+ edit_index: None
+ }
+ ],
+ );
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(log.message, "action: storing ref");
+ assert!(
+ new.try_name().is_some(),
+ "remote falls back to peeled id as it's the only thing we seem to have locally, it won't refer to a non-existing local ref"
+ );
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ #[test]
+ fn local_direct_refs_are_never_written_with_symbolic_ones_but_see_only_the_destination() {
+ let repo = repo("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/heads/not-currently-checked-out", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 1);
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::NoChangeNeeded,
+ edit_index: Some(0)
+ }],
+ );
+ }
+
+ #[test]
+ fn remote_refs_cannot_map_to_local_head() {
+ let repo = repo("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/heads/main:HEAD", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 1);
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ edit_index: Some(0),
+ }],
+ );
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(log.message, "action: storing head");
+ assert!(
+ new.try_id().is_some(),
+ "remote is peeled, so local will be peeled as well"
+ );
+ }
+ _ => unreachable!("only updates"),
+ }
+ assert_eq!(
+ edit.name.as_bstr(),
+ "refs/heads/HEAD",
+ "it's not possible to refer to the local HEAD with refspecs"
+ );
+ }
+
+ #[test]
+ fn remote_symbolic_refs_can_be_written_locally_and_point_to_tracking_branch() {
+ let repo = repo("two-origins");
+ let (mut mappings, specs) = mapping_from_spec("HEAD:refs/remotes/origin/new-HEAD", &repo);
+ mappings.push(Mapping {
+ remote: Source::Ref(gix_protocol::handshake::Ref::Direct {
+ full_ref_name: "refs/heads/main".try_into().unwrap(),
+ object: hex_to_id("f99771fe6a1b535783af3163eba95a927aae21d5"),
+ }),
+ local: Some("refs/remotes/origin/main".into()),
+ spec_index: SpecIndex::ExplicitInRemote(0),
+ });
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ edit_index: Some(0),
+ },
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::NoChangeNeeded,
+ edit_index: Some(1),
+ }
+ ],
+ );
+ assert_eq!(out.edits.len(), 2);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(log.message, "action: storing ref");
+ assert_eq!(
+ new.try_name().expect("symbolic ref").as_bstr(),
+ "refs/remotes/origin/main",
+ "remote is symbolic, so local will be symbolic as well, but is rewritten to tracking branch"
+ );
+ }
+ _ => unreachable!("only updates"),
+ }
+ assert_eq!(edit.name.as_bstr(), "refs/remotes/origin/new-HEAD",);
+ }
+
+ #[test]
+ fn non_fast_forward_is_rejected_but_appears_to_be_fast_forward_in_dryrun_mode() {
+ let repo = repo("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/heads/main:refs/remotes/origin/g", &repo);
+ let reflog_message: BString = "very special".into();
+ let out = fetch::refs::update(
+ &repo,
+ RefLogMessage::Override {
+ message: reflog_message.clone(),
+ },
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::FastForward,
+ edit_index: Some(0),
+ }],
+ "The caller has to be aware and note that dry-runs can't know about fast-forwards as they don't have remote objects"
+ );
+ assert_eq!(out.edits.len(), 1);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, .. } => {
+ assert_eq!(log.message, reflog_message);
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ #[test]
+ fn non_fast_forward_is_rejected_if_dry_run_is_disabled() {
+ let (repo, _tmp) = repo_rw("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/remotes/origin/g:refs/heads/not-currently-checked-out", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::No,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedNonFastForward,
+ edit_index: None,
+ }]
+ );
+ assert_eq!(out.edits.len(), 0);
+
+ let (mappings, specs) = mapping_from_spec("refs/heads/main:refs/remotes/origin/g", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("prefix"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::No,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::FastForward,
+ edit_index: Some(0),
+ }]
+ );
+ assert_eq!(out.edits.len(), 1);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, .. } => {
+ assert_eq!(log.message, format!("prefix: {}", "fast-forward"));
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ #[test]
+ fn fast_forwards_are_called_out_even_if_force_is_given() {
+ let (repo, _tmp) = repo_rw("two-origins");
+ let (mappings, specs) = mapping_from_spec("+refs/heads/main:refs/remotes/origin/g", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("prefix"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::No,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::FastForward,
+ edit_index: Some(0),
+ }]
+ );
+ assert_eq!(out.edits.len(), 1);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, .. } => {
+ assert_eq!(log.message, format!("prefix: {}", "fast-forward"));
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ fn mapping_from_spec(spec: &str, repo: &gix::Repository) -> (Vec<fetch::Mapping>, Vec<gix::refspec::RefSpec>) {
+ let spec = gix_refspec::parse(spec.into(), gix_refspec::parse::Operation::Fetch).unwrap();
+ let group = gix_refspec::MatchGroup::from_fetch_specs(Some(spec));
+ let references = repo.references().unwrap();
+ let mut references: Vec<_> = references.all().unwrap().map(|r| into_remote_ref(r.unwrap())).collect();
+ references.push(into_remote_ref(repo.find_reference("HEAD").unwrap()));
+ let mappings = group
+ .match_remotes(references.iter().map(remote_ref_to_item))
+ .mappings
+ .into_iter()
+ .map(|m| fetch::Mapping {
+ remote: m
+ .item_index
+ .map(|idx| fetch::Source::Ref(references[idx].clone()))
+ .unwrap_or_else(|| match m.lhs {
+ gix_refspec::match_group::SourceRef::ObjectId(id) => fetch::Source::ObjectId(id),
+ _ => unreachable!("not a ref, must be id: {:?}", m),
+ }),
+ local: m.rhs.map(|r| r.into_owned()),
+ spec_index: SpecIndex::ExplicitInRemote(m.spec_index),
+ })
+ .collect();
+ (mappings, vec![spec.to_owned()])
+ }
+
+ fn into_remote_ref(mut r: gix::Reference<'_>) -> gix_protocol::handshake::Ref {
+ let full_ref_name = r.name().as_bstr().into();
+ match r.target() {
+ TargetRef::Peeled(id) => gix_protocol::handshake::Ref::Direct {
+ full_ref_name,
+ object: id.into(),
+ },
+ TargetRef::Symbolic(name) => {
+ let target = name.as_bstr().into();
+ let id = r.peel_to_id_in_place().unwrap();
+ gix_protocol::handshake::Ref::Symbolic {
+ full_ref_name,
+ target,
+ object: id.detach(),
+ }
+ }
+ }
+ }
+
+ fn remote_ref_to_item(r: &gix_protocol::handshake::Ref) -> gix_refspec::match_group::Item<'_> {
+ let (full_ref_name, target, object) = r.unpack();
+ gix_refspec::match_group::Item {
+ full_ref_name,
+ target: target.expect("no unborn HEAD"),
+ object,
+ }
+ }
+
+ fn prefixed(action: &str) -> RefLogMessage {
+ RefLogMessage::Prefixed { action: action.into() }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/update.rs b/vendor/gix/src/remote/connection/fetch/update_refs/update.rs
new file mode 100644
index 000000000..6eda1ffc0
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/update.rs
@@ -0,0 +1,128 @@
+use std::path::PathBuf;
+
+use crate::remote::fetch;
+
+mod error {
+ /// The error returned when updating references.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ FindReference(#[from] crate::reference::find::Error),
+ #[error("A remote reference had a name that wasn't considered valid. Corrupt remote repo or insufficient checks on remote?")]
+ InvalidRefName(#[from] gix_validate::refname::Error),
+ #[error("Failed to update references to their new position to match their remote locations")]
+ EditReferences(#[from] crate::reference::edit::Error),
+ #[error("Failed to read or iterate worktree dir")]
+ WorktreeListing(#[from] std::io::Error),
+ #[error("Could not open worktree repository")]
+ OpenWorktreeRepo(#[from] crate::open::Error),
+ #[error("Could not find local commit for fast-forward ancestor check")]
+ FindCommit(#[from] crate::object::find::existing::Error),
+ }
+}
+
+pub use error::Error;
+
+/// The outcome of the refs-update operation at the end of a fetch.
+#[derive(Debug, Clone)]
+pub struct Outcome {
+ /// All edits that were performed to update local refs.
+ pub edits: Vec<gix_ref::transaction::RefEdit>,
+ /// Each update provides more information about what happened to the corresponding mapping.
+ /// Use [`iter_mapping_updates()`][Self::iter_mapping_updates()] to recombine the update information with ref-edits and their
+ /// mapping.
+ pub updates: Vec<super::Update>,
+}
+
+/// Describe the way a ref was updated
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum Mode {
+ /// No change was attempted as the remote ref didn't change compared to the current ref, or because no remote ref was specified
+ /// in the ref-spec.
+ NoChangeNeeded,
+ /// The old ref's commit was an ancestor of the new one, allowing for a fast-forward without a merge.
+ FastForward,
+ /// The ref was set to point to the new commit from the remote without taking into consideration its ancestry.
+ Forced,
+ /// A new ref has been created as there was none before.
+ New,
+ /// The reference belongs to a tag that was listed by the server but whose target didn't get sent as it doesn't point
+ /// to the commit-graph we were fetching explicitly.
+ ///
+ /// This is kind of update is only happening if `remote.<name>.tagOpt` is not set explicitly to either `--tags` or `--no-tags`.
+ ImplicitTagNotSentByRemote,
+ /// The object id to set the target reference to could not be found.
+ RejectedSourceObjectNotFound {
+ /// The id of the object that didn't exist in the object database, even though it should since it should be part of the pack.
+ id: gix_hash::ObjectId,
+ },
+ /// Tags can never be overwritten (whether the new object would be a fast-forward or not, or unchanged), unless the refspec
+ /// specifies force.
+ RejectedTagUpdate,
+ /// The reference update would not have been a fast-forward, and force is not specified in the ref-spec.
+ RejectedNonFastForward,
+ /// The update of a local symbolic reference was rejected.
+ RejectedSymbolic,
+ /// The update was rejected because the branch is checked out in the given worktree_dir.
+ ///
+ /// Note that the check applies to any known worktree, whether it's present on disk or not.
+ RejectedCurrentlyCheckedOut {
+ /// The path to the worktree directory where the branch is checked out.
+ worktree_dir: PathBuf,
+ },
+}
+
+impl std::fmt::Display for Mode {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Mode::NoChangeNeeded => "up-to-date",
+ Mode::FastForward => "fast-forward",
+ Mode::Forced => "forced-update",
+ Mode::New => "new",
+ Mode::ImplicitTagNotSentByRemote => "unrelated tag on remote",
+ Mode::RejectedSourceObjectNotFound { id } => return write!(f, "rejected ({id} not found)"),
+ Mode::RejectedTagUpdate => "rejected (would overwrite existing tag)",
+ Mode::RejectedNonFastForward => "rejected (non-fast-forward)",
+ Mode::RejectedSymbolic => "rejected (refusing to write symbolic refs)",
+ Mode::RejectedCurrentlyCheckedOut { worktree_dir } => {
+ return write!(
+ f,
+ "rejected (cannot write into checked-out branch at \"{}\")",
+ worktree_dir.display()
+ )
+ }
+ }
+ .fmt(f)
+ }
+}
+
+impl Outcome {
+ /// Produce an iterator over all information used to produce the this outcome, ref-update by ref-update, using the `mappings`
+ /// used when producing the ref update.
+ ///
+ /// Note that mappings that don't have a corresponding entry in `refspecs` these will be `None` even though that should never be the case.
+ /// This can happen if the `refspecs` passed in aren't the respecs used to create the `mapping`, and it's up to the caller to sort it out.
+ pub fn iter_mapping_updates<'a, 'b>(
+ &self,
+ mappings: &'a [fetch::Mapping],
+ refspecs: &'b [gix_refspec::RefSpec],
+ extra_refspecs: &'b [gix_refspec::RefSpec],
+ ) -> impl Iterator<
+ Item = (
+ &super::Update,
+ &'a fetch::Mapping,
+ Option<&'b gix_refspec::RefSpec>,
+ Option<&gix_ref::transaction::RefEdit>,
+ ),
+ > {
+ self.updates.iter().zip(mappings.iter()).map(move |(update, mapping)| {
+ (
+ update,
+ mapping,
+ mapping.spec_index.get(refspecs, extra_refspecs),
+ update.edit_index.and_then(|idx| self.edits.get(idx)),
+ )
+ })
+ }
+}
diff --git a/vendor/gix/src/remote/connection/mod.rs b/vendor/gix/src/remote/connection/mod.rs
new file mode 100644
index 000000000..09943ecc4
--- /dev/null
+++ b/vendor/gix/src/remote/connection/mod.rs
@@ -0,0 +1,29 @@
+use crate::Remote;
+
+pub(crate) struct HandshakeWithRefs {
+ outcome: gix_protocol::handshake::Outcome,
+ refs: Vec<gix_protocol::handshake::Ref>,
+}
+
+/// A function that performs a given credential action, trying to obtain credentials for an operation that needs it.
+pub type AuthenticateFn<'a> = Box<dyn FnMut(gix_credentials::helper::Action) -> gix_credentials::protocol::Result + 'a>;
+
+/// A type to represent an ongoing connection to a remote host, typically with the connection already established.
+///
+/// It can be used to perform a variety of operations with the remote without worrying about protocol details,
+/// much like a remote procedure call.
+pub struct Connection<'a, 'repo, T, P> {
+ pub(crate) remote: &'a Remote<'repo>,
+ pub(crate) authenticate: Option<AuthenticateFn<'a>>,
+ pub(crate) transport_options: Option<Box<dyn std::any::Any>>,
+ pub(crate) transport: T,
+ pub(crate) progress: P,
+}
+
+mod access;
+
+///
+pub mod ref_map;
+
+///
+pub mod fetch;
diff --git a/vendor/gix/src/remote/connection/ref_map.rs b/vendor/gix/src/remote/connection/ref_map.rs
new file mode 100644
index 000000000..0206e9002
--- /dev/null
+++ b/vendor/gix/src/remote/connection/ref_map.rs
@@ -0,0 +1,268 @@
+use std::collections::HashSet;
+
+use gix_features::progress::Progress;
+use gix_protocol::transport::client::Transport;
+
+use crate::{
+ bstr,
+ bstr::{BString, ByteVec},
+ remote::{connection::HandshakeWithRefs, fetch, fetch::SpecIndex, Connection, Direction},
+};
+
+/// The error returned by [`Connection::ref_map()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("Failed to configure the transport before connecting to {url:?}")]
+ GatherTransportConfig {
+ url: BString,
+ source: crate::config::transport::Error,
+ },
+ #[error("Failed to configure the transport layer")]
+ ConfigureTransport(#[from] Box<dyn std::error::Error + Send + Sync + 'static>),
+ #[error(transparent)]
+ Handshake(#[from] gix_protocol::handshake::Error),
+ #[error("The object format {format:?} as used by the remote is unsupported")]
+ UnknownObjectFormat { format: BString },
+ #[error(transparent)]
+ ListRefs(#[from] gix_protocol::ls_refs::Error),
+ #[error(transparent)]
+ Transport(#[from] gix_protocol::transport::client::Error),
+ #[error(transparent)]
+ ConfigureCredentials(#[from] crate::config::credential_helpers::Error),
+ #[error(transparent)]
+ MappingValidation(#[from] gix_refspec::match_group::validate::Error),
+}
+
+impl gix_protocol::transport::IsSpuriousError for Error {
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::Transport(err) => err.is_spurious(),
+ Error::ListRefs(err) => err.is_spurious(),
+ Error::Handshake(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+}
+
+/// For use in [`Connection::ref_map()`].
+#[derive(Debug, Clone)]
+pub struct Options {
+ /// Use a two-component prefix derived from the ref-spec's source, like `refs/heads/` to let the server pre-filter refs
+ /// with great potential for savings in traffic and local CPU time. Defaults to `true`.
+ pub prefix_from_spec_as_filter_on_remote: bool,
+ /// Parameters in the form of `(name, optional value)` to add to the handshake.
+ ///
+ /// This is useful in case of custom servers.
+ pub handshake_parameters: Vec<(String, Option<String>)>,
+ /// A list of refspecs to use as implicit refspecs which won't be saved or otherwise be part of the remote in question.
+ ///
+ /// This is useful for handling `remote.<name>.tagOpt` for example.
+ pub extra_refspecs: Vec<gix_refspec::RefSpec>,
+}
+
+impl Default for Options {
+ fn default() -> Self {
+ Options {
+ prefix_from_spec_as_filter_on_remote: true,
+ handshake_parameters: Vec::new(),
+ extra_refspecs: Vec::new(),
+ }
+ }
+}
+
+impl<'remote, 'repo, T, P> Connection<'remote, 'repo, T, P>
+where
+ T: Transport,
+ P: Progress,
+{
+ /// List all references on the remote that have been filtered through our remote's [`refspecs`][crate::Remote::refspecs()]
+ /// for _fetching_.
+ ///
+ /// This comes in the form of all matching tips on the remote and the object they point to, along with
+ /// with the local tracking branch of these tips (if available).
+ ///
+ /// Note that this doesn't fetch the objects mentioned in the tips nor does it make any change to underlying repository.
+ ///
+ /// # Consumption
+ ///
+ /// Due to management of the transport, it's cleanest to only use it for a single interaction. Thus it's consumed along with
+ /// the connection.
+ ///
+ /// ### Configuration
+ ///
+ /// - `gitoxide.userAgent` is read to obtain the application user agent for git servers and for HTTP servers as well.
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn ref_map(mut self, options: Options) -> Result<fetch::RefMap, Error> {
+ let res = self.ref_map_inner(options).await;
+ gix_protocol::indicate_end_of_interaction(&mut self.transport)
+ .await
+ .ok();
+ res
+ }
+
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub(crate) async fn ref_map_inner(
+ &mut self,
+ Options {
+ prefix_from_spec_as_filter_on_remote,
+ handshake_parameters,
+ mut extra_refspecs,
+ }: Options,
+ ) -> Result<fetch::RefMap, Error> {
+ let null = gix_hash::ObjectId::null(gix_hash::Kind::Sha1); // OK to hardcode Sha1, it's not supposed to match, ever.
+
+ if let Some(tag_spec) = self.remote.fetch_tags.to_refspec().map(|spec| spec.to_owned()) {
+ if !extra_refspecs.contains(&tag_spec) {
+ extra_refspecs.push(tag_spec);
+ }
+ };
+ let specs = {
+ let mut s = self.remote.fetch_specs.clone();
+ s.extend(extra_refspecs.clone());
+ s
+ };
+ let remote = self
+ .fetch_refs(prefix_from_spec_as_filter_on_remote, handshake_parameters, &specs)
+ .await?;
+ let num_explicit_specs = self.remote.fetch_specs.len();
+ let group = gix_refspec::MatchGroup::from_fetch_specs(specs.iter().map(|s| s.to_ref()));
+ let (res, fixes) = group
+ .match_remotes(remote.refs.iter().map(|r| {
+ let (full_ref_name, target, object) = r.unpack();
+ gix_refspec::match_group::Item {
+ full_ref_name,
+ target: target.unwrap_or(&null),
+ object,
+ }
+ }))
+ .validated()?;
+ let mappings = res.mappings;
+ let mappings = mappings
+ .into_iter()
+ .map(|m| fetch::Mapping {
+ remote: m
+ .item_index
+ .map(|idx| fetch::Source::Ref(remote.refs[idx].clone()))
+ .unwrap_or_else(|| {
+ fetch::Source::ObjectId(match m.lhs {
+ gix_refspec::match_group::SourceRef::ObjectId(id) => id,
+ _ => unreachable!("no item index implies having an object id"),
+ })
+ }),
+ local: m.rhs.map(|c| c.into_owned()),
+ spec_index: if m.spec_index < num_explicit_specs {
+ SpecIndex::ExplicitInRemote(m.spec_index)
+ } else {
+ SpecIndex::Implicit(m.spec_index - num_explicit_specs)
+ },
+ })
+ .collect();
+
+ let object_hash = extract_object_format(self.remote.repo, &remote.outcome)?;
+ Ok(fetch::RefMap {
+ mappings,
+ extra_refspecs,
+ fixes,
+ remote_refs: remote.refs,
+ handshake: remote.outcome,
+ object_hash,
+ })
+ }
+
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ async fn fetch_refs(
+ &mut self,
+ filter_by_prefix: bool,
+ extra_parameters: Vec<(String, Option<String>)>,
+ refspecs: &[gix_refspec::RefSpec],
+ ) -> Result<HandshakeWithRefs, Error> {
+ let mut credentials_storage;
+ let url = self.transport.to_url();
+ let authenticate = match self.authenticate.as_mut() {
+ Some(f) => f,
+ None => {
+ let url = self
+ .remote
+ .url(Direction::Fetch)
+ .map(ToOwned::to_owned)
+ .unwrap_or_else(|| gix_url::parse(url.as_ref()).expect("valid URL to be provided by transport"));
+ credentials_storage = self.configured_credentials(url)?;
+ &mut credentials_storage
+ }
+ };
+
+ if self.transport_options.is_none() {
+ self.transport_options = self
+ .remote
+ .repo
+ .transport_options(url.as_ref(), self.remote.name().map(|n| n.as_bstr()))
+ .map_err(|err| Error::GatherTransportConfig {
+ source: err,
+ url: url.into_owned(),
+ })?;
+ }
+ if let Some(config) = self.transport_options.as_ref() {
+ self.transport.configure(&**config)?;
+ }
+ let mut outcome =
+ gix_protocol::fetch::handshake(&mut self.transport, authenticate, extra_parameters, &mut self.progress)
+ .await?;
+ let refs = match outcome.refs.take() {
+ Some(refs) => refs,
+ None => {
+ let agent_feature = self.remote.repo.config.user_agent_tuple();
+ gix_protocol::ls_refs(
+ &mut self.transport,
+ &outcome.capabilities,
+ move |_capabilities, arguments, features| {
+ features.push(agent_feature);
+ if filter_by_prefix {
+ let mut seen = HashSet::new();
+ for spec in refspecs {
+ let spec = spec.to_ref();
+ if seen.insert(spec.instruction()) {
+ let mut prefixes = Vec::with_capacity(1);
+ spec.expand_prefixes(&mut prefixes);
+ for mut prefix in prefixes {
+ prefix.insert_str(0, "ref-prefix ");
+ arguments.push(prefix);
+ }
+ }
+ }
+ }
+ Ok(gix_protocol::ls_refs::Action::Continue)
+ },
+ &mut self.progress,
+ )
+ .await?
+ }
+ };
+ Ok(HandshakeWithRefs { outcome, refs })
+ }
+}
+
+/// Assume sha1 if server says nothing, otherwise configure anything beyond sha1 in the local repo configuration
+#[allow(clippy::result_large_err)]
+fn extract_object_format(
+ _repo: &crate::Repository,
+ outcome: &gix_protocol::handshake::Outcome,
+) -> Result<gix_hash::Kind, Error> {
+ use bstr::ByteSlice;
+ let object_hash =
+ if let Some(object_format) = outcome.capabilities.capability("object-format").and_then(|c| c.value()) {
+ let object_format = object_format.to_str().map_err(|_| Error::UnknownObjectFormat {
+ format: object_format.into(),
+ })?;
+ match object_format {
+ "sha1" => gix_hash::Kind::Sha1,
+ unknown => return Err(Error::UnknownObjectFormat { format: unknown.into() }),
+ }
+ } else {
+ gix_hash::Kind::Sha1
+ };
+ Ok(object_hash)
+}
diff --git a/vendor/gix/src/remote/errors.rs b/vendor/gix/src/remote/errors.rs
new file mode 100644
index 000000000..20060cedf
--- /dev/null
+++ b/vendor/gix/src/remote/errors.rs
@@ -0,0 +1,45 @@
+///
+pub mod find {
+ use crate::{bstr::BString, config, remote};
+
+ /// The error returned by [`Repository::find_remote(…)`][crate::Repository::find_remote()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The value for 'remote.<name>.tagOpt` is invalid and must either be '--tags' or '--no-tags'")]
+ TagOpt(#[from] config::key::GenericErrorWithValue),
+ #[error("{kind} ref-spec under `remote.{remote_name}` was invalid")]
+ RefSpec {
+ kind: &'static str,
+ remote_name: BString,
+ source: config::refspec::Error,
+ },
+ #[error("Neither 'url` nor 'pushUrl' fields were set in the remote's configuration.")]
+ UrlMissing,
+ #[error("The {kind} url under `remote.{remote_name}` was invalid")]
+ Url {
+ kind: &'static str,
+ remote_name: BString,
+ source: config::url::Error,
+ },
+ #[error(transparent)]
+ Init(#[from] remote::init::Error),
+ }
+
+ ///
+ pub mod existing {
+ use crate::bstr::BString;
+
+ /// The error returned by [`Repository::find_remote(…)`][crate::Repository::find_remote()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Find(#[from] super::Error),
+ #[error("remote name could not be parsed as URL")]
+ UrlParse(#[from] gix_url::parse::Error),
+ #[error("The remote named {name:?} did not exist")]
+ NotFound { name: BString },
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/fetch.rs b/vendor/gix/src/remote/fetch.rs
new file mode 100644
index 000000000..4add96a65
--- /dev/null
+++ b/vendor/gix/src/remote/fetch.rs
@@ -0,0 +1,166 @@
+/// If `Yes`, don't really make changes but do as much as possible to get an idea of what would be done.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub(crate) enum DryRun {
+ /// Enable dry-run mode and don't actually change the underlying repository in any way.
+ Yes,
+ /// Run the operation like normal, making changes to the underlying repository.
+ No,
+}
+
+/// How to deal with refs when cloning or fetching.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub(crate) enum WritePackedRefs {
+ /// Normal operation, i.e. don't use packed-refs at all for writing.
+ Never,
+ /// Put ref updates straight into the `packed-refs` file, without creating loose refs first or dealing with them in any way.
+ Only,
+}
+
+/// Describe how to handle tags when fetching
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum Tags {
+ /// Fetch all tags from the remote, even if these are not reachable from objects referred to by our refspecs.
+ All,
+ /// Fetch only the tags that point to the objects being sent.
+ /// That way, annotated tags that point to an object we receive are automatically transmitted and their refs are created.
+ /// The same goes for lightweight tags.
+ Included,
+ /// Do not fetch any tags.
+ None,
+}
+
+impl Default for Tags {
+ fn default() -> Self {
+ Tags::Included
+ }
+}
+
+impl Tags {
+ /// Obtain a refspec that determines whether or not to fetch all tags, depending on this variant.
+ ///
+ /// The returned refspec is the default refspec for tags, but won't overwrite local tags ever.
+ pub fn to_refspec(&self) -> Option<gix_refspec::RefSpecRef<'static>> {
+ match self {
+ Tags::All | Tags::Included => Some(
+ gix_refspec::parse("refs/tags/*:refs/tags/*".into(), gix_refspec::parse::Operation::Fetch)
+ .expect("valid"),
+ ),
+ Tags::None => None,
+ }
+ }
+}
+
+/// Information about the relationship between our refspecs, and remote references with their local counterparts.
+#[derive(Default, Debug, Clone)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub struct RefMap {
+ /// A mapping between a remote reference and a local tracking branch.
+ pub mappings: Vec<Mapping>,
+ /// Refspecs which have been added implicitly due to settings of the `remote`, possibly pre-initialized from
+ /// [`extra_refspecs` in RefMap options][crate::remote::ref_map::Options::extra_refspecs].
+ ///
+ /// They are never persisted nor are they typically presented to the user.
+ pub extra_refspecs: Vec<gix_refspec::RefSpec>,
+ /// Information about the fixes applied to the `mapping` due to validation and sanitization.
+ pub fixes: Vec<gix_refspec::match_group::validate::Fix>,
+ /// All refs advertised by the remote.
+ pub remote_refs: Vec<gix_protocol::handshake::Ref>,
+ /// Additional information provided by the server as part of the handshake.
+ ///
+ /// Note that the `refs` field is always `None` as the refs are placed in `remote_refs`.
+ pub handshake: gix_protocol::handshake::Outcome,
+ /// The kind of hash used for all data sent by the server, if understood by this client implementation.
+ ///
+ /// It was extracted from the `handshake` as advertised by the server.
+ pub object_hash: gix_hash::Kind,
+}
+
+/// Either an object id that the remote has or the matched remote ref itself.
+#[derive(Debug, Clone)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub enum Source {
+ /// An object id, as the matched ref-spec was an object id itself.
+ ObjectId(gix_hash::ObjectId),
+ /// The remote reference that matched the ref-specs name.
+ Ref(gix_protocol::handshake::Ref),
+}
+
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+impl Source {
+ /// Return either the direct object id we refer to or the direct target that a reference refers to.
+ /// The latter may be a direct or a symbolic reference, and we degenerate this to the peeled object id.
+ /// If unborn, `None` is returned.
+ pub fn as_id(&self) -> Option<&gix_hash::oid> {
+ match self {
+ Source::ObjectId(id) => Some(id),
+ Source::Ref(r) => r.unpack().1,
+ }
+ }
+
+ /// Return ourselves as the full name of the reference we represent, or `None` if this source isn't a reference but an object.
+ pub fn as_name(&self) -> Option<&crate::bstr::BStr> {
+ match self {
+ Source::ObjectId(_) => None,
+ Source::Ref(r) => match r {
+ gix_protocol::handshake::Ref::Unborn { full_ref_name, .. }
+ | gix_protocol::handshake::Ref::Symbolic { full_ref_name, .. }
+ | gix_protocol::handshake::Ref::Direct { full_ref_name, .. }
+ | gix_protocol::handshake::Ref::Peeled { full_ref_name, .. } => Some(full_ref_name.as_ref()),
+ },
+ }
+ }
+}
+
+/// An index into various lists of refspecs that have been used in a [Mapping] of remote references to local ones.
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)]
+pub enum SpecIndex {
+ /// An index into the _refspecs of the remote_ that triggered a fetch operation.
+ /// These refspecs are explicit and visible to the user.
+ ExplicitInRemote(usize),
+ /// An index into the list of [extra refspecs][crate::remote::fetch::RefMap::extra_refspecs] that are implicit
+ /// to a particular fetch operation.
+ Implicit(usize),
+}
+
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+impl SpecIndex {
+ /// Depending on our index variant, get the index either from `refspecs` or from `extra_refspecs` for `Implicit` variants.
+ pub fn get<'a>(
+ self,
+ refspecs: &'a [gix_refspec::RefSpec],
+ extra_refspecs: &'a [gix_refspec::RefSpec],
+ ) -> Option<&'a gix_refspec::RefSpec> {
+ match self {
+ SpecIndex::ExplicitInRemote(idx) => refspecs.get(idx),
+ SpecIndex::Implicit(idx) => extra_refspecs.get(idx),
+ }
+ }
+
+ /// If this is an `Implicit` variant, return its index.
+ pub fn implicit_index(self) -> Option<usize> {
+ match self {
+ SpecIndex::Implicit(idx) => Some(idx),
+ SpecIndex::ExplicitInRemote(_) => None,
+ }
+ }
+}
+
+/// A mapping between a single remote reference and its advertised objects to a local destination which may or may not exist.
+#[derive(Debug, Clone)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub struct Mapping {
+ /// The reference on the remote side, along with information about the objects they point to as advertised by the server.
+ pub remote: Source,
+ /// The local tracking reference to update after fetching the object visible via `remote`.
+ pub local: Option<crate::bstr::BString>,
+ /// The index into the fetch ref-specs used to produce the mapping, allowing it to be recovered.
+ pub spec_index: SpecIndex,
+}
+
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub use super::connection::fetch::{
+ negotiate, prepare, refs, Error, Outcome, Prepare, ProgressId, RefLogMessage, Status,
+};
diff --git a/vendor/gix/src/remote/init.rs b/vendor/gix/src/remote/init.rs
new file mode 100644
index 000000000..bba116946
--- /dev/null
+++ b/vendor/gix/src/remote/init.rs
@@ -0,0 +1,116 @@
+use std::convert::TryInto;
+
+use gix_refspec::RefSpec;
+
+use crate::{config, remote, Remote, Repository};
+
+mod error {
+ use crate::bstr::BString;
+
+ /// The error returned by [`Repository::remote_at(…)`][crate::Repository::remote_at()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Url(#[from] gix_url::parse::Error),
+ #[error("The rewritten {kind} url {rewritten_url:?} failed to parse")]
+ RewrittenUrlInvalid {
+ kind: &'static str,
+ rewritten_url: BString,
+ source: gix_url::parse::Error,
+ },
+ }
+}
+pub use error::Error;
+
+use crate::bstr::BString;
+
+/// Initialization
+impl<'repo> Remote<'repo> {
+ #[allow(clippy::too_many_arguments)]
+ pub(crate) fn from_preparsed_config(
+ name_or_url: Option<BString>,
+ url: Option<gix_url::Url>,
+ push_url: Option<gix_url::Url>,
+ fetch_specs: Vec<RefSpec>,
+ push_specs: Vec<RefSpec>,
+ should_rewrite_urls: bool,
+ fetch_tags: remote::fetch::Tags,
+ repo: &'repo Repository,
+ ) -> Result<Self, Error> {
+ debug_assert!(
+ url.is_some() || push_url.is_some(),
+ "BUG: fetch or push url must be set at least"
+ );
+ let (url_alias, push_url_alias) = should_rewrite_urls
+ .then(|| rewrite_urls(&repo.config, url.as_ref(), push_url.as_ref()))
+ .unwrap_or(Ok((None, None)))?;
+ Ok(Remote {
+ name: name_or_url.map(Into::into),
+ url,
+ url_alias,
+ push_url,
+ push_url_alias,
+ fetch_specs,
+ push_specs,
+ fetch_tags,
+ repo,
+ })
+ }
+
+ pub(crate) fn from_fetch_url<Url, E>(
+ url: Url,
+ should_rewrite_urls: bool,
+ repo: &'repo Repository,
+ ) -> Result<Self, Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ let url = url.try_into().map_err(|err| Error::Url(err.into()))?;
+ let (url_alias, _) = should_rewrite_urls
+ .then(|| rewrite_urls(&repo.config, Some(&url), None))
+ .unwrap_or(Ok((None, None)))?;
+ Ok(Remote {
+ name: None,
+ url: Some(url),
+ url_alias,
+ push_url: None,
+ push_url_alias: None,
+ fetch_specs: Vec::new(),
+ push_specs: Vec::new(),
+ fetch_tags: Default::default(),
+ repo,
+ })
+ }
+}
+
+pub(crate) fn rewrite_url(
+ config: &config::Cache,
+ url: Option<&gix_url::Url>,
+ direction: remote::Direction,
+) -> Result<Option<gix_url::Url>, Error> {
+ url.and_then(|url| config.url_rewrite().longest(url, direction))
+ .map(|url| {
+ gix_url::parse(url.as_ref()).map_err(|err| Error::RewrittenUrlInvalid {
+ kind: match direction {
+ remote::Direction::Fetch => "fetch",
+ remote::Direction::Push => "push",
+ },
+ source: err,
+ rewritten_url: url,
+ })
+ })
+ .transpose()
+}
+
+pub(crate) fn rewrite_urls(
+ config: &config::Cache,
+ url: Option<&gix_url::Url>,
+ push_url: Option<&gix_url::Url>,
+) -> Result<(Option<gix_url::Url>, Option<gix_url::Url>), Error> {
+ let url_alias = rewrite_url(config, url, remote::Direction::Fetch)?;
+ let push_url_alias = rewrite_url(config, push_url, remote::Direction::Push)?;
+
+ Ok((url_alias, push_url_alias))
+}
diff --git a/vendor/gix/src/remote/mod.rs b/vendor/gix/src/remote/mod.rs
new file mode 100644
index 000000000..f016575c7
--- /dev/null
+++ b/vendor/gix/src/remote/mod.rs
@@ -0,0 +1,62 @@
+use std::borrow::Cow;
+
+use crate::bstr::BStr;
+
+/// The direction of an operation carried out (or to be carried out) through a remote.
+#[derive(Debug, Eq, PartialEq, Copy, Clone, Hash)]
+pub enum Direction {
+ /// Push local changes to the remote.
+ Push,
+ /// Fetch changes from the remote to the local repository.
+ Fetch,
+}
+
+impl Direction {
+ /// Return ourselves as string suitable for use as verb in an english sentence.
+ pub fn as_str(&self) -> &'static str {
+ match self {
+ Direction::Push => "push",
+ Direction::Fetch => "fetch",
+ }
+ }
+}
+
+/// The name of a remote, either interpreted as symbol like `origin` or as url as returned by [`Remote::name()`][crate::Remote::name()].
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum Name<'repo> {
+ /// A symbolic name, like `origin`.
+ /// Note that it has not necessarily been validated yet.
+ Symbol(Cow<'repo, str>),
+ /// A url pointing to the remote host directly.
+ Url(Cow<'repo, BStr>),
+}
+
+///
+pub mod name;
+
+mod build;
+
+mod errors;
+pub use errors::find;
+
+///
+pub mod init;
+
+///
+pub mod fetch;
+
+///
+#[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+pub mod connect;
+
+#[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+mod connection;
+#[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+pub use connection::{ref_map, AuthenticateFn, Connection};
+
+///
+pub mod save;
+
+mod access;
+///
+pub mod url;
diff --git a/vendor/gix/src/remote/name.rs b/vendor/gix/src/remote/name.rs
new file mode 100644
index 000000000..6c6afe745
--- /dev/null
+++ b/vendor/gix/src/remote/name.rs
@@ -0,0 +1,84 @@
+use std::{borrow::Cow, convert::TryFrom};
+
+use super::Name;
+use crate::bstr::{BStr, BString, ByteSlice, ByteVec};
+
+/// The error returned by [validated()].
+#[derive(Debug, thiserror::Error)]
+#[error("remote names must be valid within refspecs for fetching: {name:?}")]
+#[allow(missing_docs)]
+pub struct Error {
+ pub source: gix_refspec::parse::Error,
+ pub name: BString,
+}
+
+/// Return `name` if it is valid as symbolic remote name.
+///
+/// This means it has to be valid within a the ref path of a tracking branch.
+pub fn validated(name: impl Into<BString>) -> Result<BString, Error> {
+ let name = name.into();
+ match gix_refspec::parse(
+ format!("refs/heads/test:refs/remotes/{name}/test").as_str().into(),
+ gix_refspec::parse::Operation::Fetch,
+ ) {
+ Ok(_) => Ok(name),
+ Err(err) => Err(Error { source: err, name }),
+ }
+}
+
+impl Name<'_> {
+ /// Obtain the name as string representation.
+ pub fn as_bstr(&self) -> &BStr {
+ match self {
+ Name::Symbol(v) => v.as_ref().into(),
+ Name::Url(v) => v.as_ref(),
+ }
+ }
+
+ /// Return this instance as a symbolic name, if it is one.
+ pub fn as_symbol(&self) -> Option<&str> {
+ match self {
+ Name::Symbol(n) => n.as_ref().into(),
+ Name::Url(_) => None,
+ }
+ }
+
+ /// Return this instance as url, if it is one.
+ pub fn as_url(&self) -> Option<&BStr> {
+ match self {
+ Name::Url(n) => n.as_ref().into(),
+ Name::Symbol(_) => None,
+ }
+ }
+}
+
+impl<'a> TryFrom<Cow<'a, BStr>> for Name<'a> {
+ type Error = Cow<'a, BStr>;
+
+ fn try_from(name: Cow<'a, BStr>) -> Result<Self, Self::Error> {
+ if name.contains(&b'/') || name.as_ref() == "." {
+ Ok(Name::Url(name))
+ } else {
+ match name {
+ Cow::Borrowed(n) => n.to_str().ok().map(Cow::Borrowed).ok_or(name),
+ Cow::Owned(n) => Vec::from(n)
+ .into_string()
+ .map_err(|err| Cow::Owned(err.into_vec().into()))
+ .map(Cow::Owned),
+ }
+ .map(Name::Symbol)
+ }
+ }
+}
+
+impl From<BString> for Name<'static> {
+ fn from(name: BString) -> Self {
+ Self::try_from(Cow::Owned(name)).expect("String is never illformed")
+ }
+}
+
+impl<'a> AsRef<BStr> for Name<'a> {
+ fn as_ref(&self) -> &BStr {
+ self.as_bstr()
+ }
+}
diff --git a/vendor/gix/src/remote/save.rs b/vendor/gix/src/remote/save.rs
new file mode 100644
index 000000000..0e347551e
--- /dev/null
+++ b/vendor/gix/src/remote/save.rs
@@ -0,0 +1,125 @@
+use std::convert::TryInto;
+
+use crate::{
+ bstr::{BStr, BString},
+ config, remote, Remote,
+};
+
+/// The error returned by [`Remote::save_to()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("The remote pointing to {} is anonymous and can't be saved.", url.to_bstring())]
+ NameMissing { url: gix_url::Url },
+}
+
+/// The error returned by [`Remote::save_as_to()`].
+///
+/// Note that this type should rather be in the `as` module, but cannot be as it's part of the Rust syntax.
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum AsError {
+ #[error(transparent)]
+ Save(#[from] Error),
+ #[error(transparent)]
+ Name(#[from] crate::remote::name::Error),
+}
+
+/// Serialize into gix-config.
+impl Remote<'_> {
+ /// Save ourselves to the given `config` if we are a named remote or fail otherwise.
+ ///
+ /// Note that all sections named `remote "<name>"` will be cleared of all values we are about to write,
+ /// and the last `remote "<name>"` section will be containing all relevant values so that reloading the remote
+ /// from `config` would yield the same in-memory state.
+ pub fn save_to(&self, config: &mut gix_config::File<'static>) -> Result<(), Error> {
+ fn as_key(name: &str) -> gix_config::parse::section::Key<'_> {
+ name.try_into().expect("valid")
+ }
+ let name = self.name().ok_or_else(|| Error::NameMissing {
+ url: self
+ .url
+ .as_ref()
+ .or(self.push_url.as_ref())
+ .expect("one url is always set")
+ .to_owned(),
+ })?;
+ if let Some(section_ids) = config.sections_and_ids_by_name("remote").map(|it| {
+ it.filter_map(|(s, id)| (s.header().subsection_name() == Some(name.as_bstr())).then_some(id))
+ .collect::<Vec<_>>()
+ }) {
+ let mut sections_to_remove = Vec::new();
+ const KEYS_TO_REMOVE: &[&str] = &[
+ config::tree::Remote::URL.name,
+ config::tree::Remote::PUSH_URL.name,
+ config::tree::Remote::FETCH.name,
+ config::tree::Remote::PUSH.name,
+ config::tree::Remote::TAG_OPT.name,
+ ];
+ for id in section_ids {
+ let mut section = config.section_mut_by_id(id).expect("just queried");
+ let was_empty = section.num_values() == 0;
+
+ for key in KEYS_TO_REMOVE {
+ while section.remove(key).is_some() {}
+ }
+
+ let is_empty_after_deletions_of_values_to_be_written = section.num_values() == 0;
+ if !was_empty && is_empty_after_deletions_of_values_to_be_written {
+ sections_to_remove.push(id);
+ }
+ }
+ for id in sections_to_remove {
+ config.remove_section_by_id(id);
+ }
+ }
+ let mut section = config
+ .section_mut_or_create_new("remote", Some(name.as_ref()))
+ .expect("section name is validated and 'remote' is acceptable");
+ if let Some(url) = self.url.as_ref() {
+ section.push(as_key("url"), Some(url.to_bstring().as_ref()));
+ }
+ if let Some(url) = self.push_url.as_ref() {
+ section.push(as_key("pushurl"), Some(url.to_bstring().as_ref()));
+ }
+ if self.fetch_tags != Default::default() {
+ section.push(
+ as_key(config::tree::Remote::TAG_OPT.name),
+ BStr::new(match self.fetch_tags {
+ remote::fetch::Tags::All => "--tags",
+ remote::fetch::Tags::None => "--no-tags",
+ remote::fetch::Tags::Included => unreachable!("BUG: the default shouldn't be written and we try"),
+ })
+ .into(),
+ );
+ }
+ for (key, spec) in self
+ .fetch_specs
+ .iter()
+ .map(|spec| ("fetch", spec))
+ .chain(self.push_specs.iter().map(|spec| ("push", spec)))
+ {
+ section.push(as_key(key), Some(spec.to_ref().to_bstring().as_ref()));
+ }
+ Ok(())
+ }
+
+ /// Forcefully set our name to `name` and write our state to `config` similar to [`save_to()`][Self::save_to()].
+ ///
+ /// Note that this sets a name for anonymous remotes, but overwrites the name for those who were named before.
+ /// If this name is different from the current one, the git configuration will still contain the previous name,
+ /// and the caller should account for that.
+ pub fn save_as_to(
+ &mut self,
+ name: impl Into<BString>,
+ config: &mut gix_config::File<'static>,
+ ) -> Result<(), AsError> {
+ let name = crate::remote::name::validated(name)?;
+ let prev_name = self.name.take();
+ self.name = Some(name.into());
+ self.save_to(config).map_err(|err| {
+ self.name = prev_name;
+ err.into()
+ })
+ }
+}
diff --git a/vendor/gix/src/remote/url/mod.rs b/vendor/gix/src/remote/url/mod.rs
new file mode 100644
index 000000000..7b8815812
--- /dev/null
+++ b/vendor/gix/src/remote/url/mod.rs
@@ -0,0 +1,7 @@
+mod rewrite;
+///
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub mod scheme_permission;
+pub(crate) use rewrite::Rewrite;
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub(crate) use scheme_permission::SchemePermission;
diff --git a/vendor/gix/src/remote/url/rewrite.rs b/vendor/gix/src/remote/url/rewrite.rs
new file mode 100644
index 000000000..ae0eee426
--- /dev/null
+++ b/vendor/gix/src/remote/url/rewrite.rs
@@ -0,0 +1,100 @@
+use gix_features::threading::OwnShared;
+
+use crate::{
+ bstr::{BStr, BString, ByteVec},
+ config,
+ remote::Direction,
+};
+
+#[derive(Debug, Clone)]
+struct Replace {
+ find: BString,
+ with: OwnShared<BString>,
+}
+
+#[derive(Default, Debug, Clone)]
+pub(crate) struct Rewrite {
+ url_rewrite: Vec<Replace>,
+ push_url_rewrite: Vec<Replace>,
+}
+
+/// Init
+impl Rewrite {
+ pub fn from_config(
+ config: &gix_config::File<'static>,
+ mut filter: fn(&gix_config::file::Metadata) -> bool,
+ ) -> Rewrite {
+ config
+ .sections_by_name_and_filter("url", &mut filter)
+ .map(|sections| {
+ let mut url_rewrite = Vec::new();
+ let mut push_url_rewrite = Vec::new();
+ for section in sections {
+ let replace = match section.header().subsection_name() {
+ Some(base) => OwnShared::new(base.to_owned()),
+ None => continue,
+ };
+
+ for instead_of in section.values(config::tree::Url::INSTEAD_OF.name) {
+ url_rewrite.push(Replace {
+ with: OwnShared::clone(&replace),
+ find: instead_of.into_owned(),
+ });
+ }
+ for instead_of in section.values(config::tree::Url::PUSH_INSTEAD_OF.name) {
+ push_url_rewrite.push(Replace {
+ with: OwnShared::clone(&replace),
+ find: instead_of.into_owned(),
+ });
+ }
+ }
+ Rewrite {
+ url_rewrite,
+ push_url_rewrite,
+ }
+ })
+ .unwrap_or_default()
+ }
+}
+
+/// Access
+impl Rewrite {
+ fn replacements_for(&self, direction: Direction) -> &[Replace] {
+ match direction {
+ Direction::Fetch => &self.url_rewrite,
+ Direction::Push => &self.push_url_rewrite,
+ }
+ }
+
+ pub fn longest(&self, url: &gix_url::Url, direction: Direction) -> Option<BString> {
+ if self.replacements_for(direction).is_empty() {
+ None
+ } else {
+ let mut url = url.to_bstring();
+ self.rewrite_url_in_place(&mut url, direction).then_some(url)
+ }
+ }
+
+ /// Rewrite the given `url` of `direction` and return `true` if a replacement happened.
+ ///
+ /// Note that the result must still be checked for validity, it might not be a valid URL as we do a syntax-unaware replacement.
+ pub fn rewrite_url_in_place(&self, url: &mut BString, direction: Direction) -> bool {
+ self.replacements_for(direction)
+ .iter()
+ .fold(None::<(usize, &BStr)>, |mut acc, replace| {
+ if url.starts_with(replace.find.as_ref()) {
+ let (bytes_matched, prev_rewrite_with) =
+ acc.get_or_insert((replace.find.len(), replace.with.as_slice().into()));
+ if *bytes_matched < replace.find.len() {
+ *bytes_matched = replace.find.len();
+ *prev_rewrite_with = replace.with.as_slice().into();
+ }
+ };
+ acc
+ })
+ .map(|(bytes_matched, replace_with)| {
+ url.replace_range(..bytes_matched, replace_with);
+ })
+ .is_some()
+ }
+}
diff --git a/vendor/gix/src/remote/url/scheme_permission.rs b/vendor/gix/src/remote/url/scheme_permission.rs
new file mode 100644
index 000000000..ddb87e111
--- /dev/null
+++ b/vendor/gix/src/remote/url/scheme_permission.rs
@@ -0,0 +1,120 @@
+use std::{borrow::Cow, collections::BTreeMap, convert::TryFrom};
+
+use crate::{
+ bstr::{BStr, BString, ByteSlice},
+ config,
+ config::tree::{gitoxide, Key, Protocol},
+};
+
+/// All allowed values of the `protocol.allow` key.
+#[derive(Debug, Clone, Copy, Eq, PartialEq)]
+pub enum Allow {
+ /// Allow use this protocol.
+ Always,
+ /// Forbid using this protocol
+ Never,
+ /// Only supported if the `GIT_PROTOCOL_FROM_USER` is unset or is set to `1`.
+ User,
+}
+
+impl Allow {
+ /// Return true if we represent something like 'allow == true'.
+ pub fn to_bool(self, user_allowed: Option<bool>) -> bool {
+ match self {
+ Allow::Always => true,
+ Allow::Never => false,
+ Allow::User => user_allowed.unwrap_or(true),
+ }
+ }
+}
+
+impl<'a> TryFrom<Cow<'a, BStr>> for Allow {
+ type Error = BString;
+
+ fn try_from(v: Cow<'a, BStr>) -> Result<Self, Self::Error> {
+ Ok(match v.as_ref().as_bytes() {
+ b"never" => Allow::Never,
+ b"always" => Allow::Always,
+ b"user" => Allow::User,
+ unknown => return Err(unknown.into()),
+ })
+ }
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct SchemePermission {
+ /// `None`, env-var is unset or wasn't queried, otherwise true if `GIT_PROTOCOL_FROM_USER` is `1`.
+ user_allowed: Option<bool>,
+ /// The general allow value from `protocol.allow`.
+ allow: Option<Allow>,
+ /// Per scheme allow information
+ allow_per_scheme: BTreeMap<gix_url::Scheme, Allow>,
+}
+
+/// Init
+impl SchemePermission {
+ /// NOTE: _intentionally without leniency_
+ pub fn from_config(
+ config: &gix_config::File<'static>,
+ mut filter: fn(&gix_config::file::Metadata) -> bool,
+ ) -> Result<Self, config::protocol::allow::Error> {
+ let allow: Option<Allow> = config
+ .string_filter_by_key("protocol.allow", &mut filter)
+ .map(|value| Protocol::ALLOW.try_into_allow(value, None))
+ .transpose()?;
+
+ let mut saw_user = allow.map_or(false, |allow| allow == Allow::User);
+ let allow_per_scheme = match config.sections_by_name_and_filter("protocol", &mut filter) {
+ Some(it) => {
+ let mut map = BTreeMap::default();
+ for (section, scheme) in it.filter_map(|section| {
+ section.header().subsection_name().and_then(|scheme| {
+ scheme
+ .to_str()
+ .ok()
+ .and_then(|scheme| gix_url::Scheme::try_from(scheme).ok().map(|scheme| (section, scheme)))
+ })
+ }) {
+ if let Some(value) = section
+ .value("allow")
+ .map(|value| Protocol::ALLOW.try_into_allow(value, Some(scheme.as_str())))
+ .transpose()?
+ {
+ saw_user |= value == Allow::User;
+ map.insert(scheme, value);
+ }
+ }
+ map
+ }
+ None => Default::default(),
+ };
+
+ let user_allowed = saw_user.then(|| {
+ config
+ .string_filter_by_key(gitoxide::Allow::PROTOCOL_FROM_USER.logical_name().as_str(), &mut filter)
+ .map_or(true, |val| val.as_ref() == "1")
+ });
+ Ok(SchemePermission {
+ allow,
+ allow_per_scheme,
+ user_allowed,
+ })
+ }
+}
+
+/// Access
+impl SchemePermission {
+ pub fn allow(&self, scheme: &gix_url::Scheme) -> bool {
+ self.allow_per_scheme.get(scheme).or(self.allow.as_ref()).map_or_else(
+ || {
+ use gix_url::Scheme::*;
+ match scheme {
+ File | Git | Ssh | Http | Https => true,
+ Ext(_) => false,
+ // TODO: figure out what 'ext' really entails, and what 'other' protocols are which aren't representable for us yet
+ }
+ },
+ |allow| allow.to_bool(self.user_allowed),
+ )
+ }
+}
diff --git a/vendor/gix/src/repository/cache.rs b/vendor/gix/src/repository/cache.rs
new file mode 100644
index 000000000..7dcd844e6
--- /dev/null
+++ b/vendor/gix/src/repository/cache.rs
@@ -0,0 +1,30 @@
+/// Configure how caches are used to speed up various git repository operations
+impl crate::Repository {
+ /// Sets the amount of space used at most for caching most recently accessed fully decoded objects, to `Some(bytes)`,
+ /// or `None` to deactivate it entirely.
+ ///
+ /// Note that it is unset by default but can be enabled once there is time for performance optimization.
+ /// Well-chosen cache sizes can improve performance particularly if objects are accessed multiple times in a row.
+ /// The cache is configured to grow gradually.
+ ///
+ /// Note that a cache on application level should be considered as well as the best object access is not doing one.
+ pub fn object_cache_size(&mut self, bytes: impl Into<Option<usize>>) {
+ let bytes = bytes.into();
+ match bytes {
+ Some(bytes) if bytes == 0 => self.objects.unset_object_cache(),
+ Some(bytes) => self
+ .objects
+ .set_object_cache(move || Box::new(crate::object::cache::MemoryCappedHashmap::new(bytes))),
+ None => self.objects.unset_object_cache(),
+ }
+ }
+
+ /// Set an object cache of size `bytes` if none is set.
+ ///
+ /// Use this method to avoid overwriting any existing value while assuring better performance in case no value is set.
+ pub fn object_cache_size_if_unset(&mut self, bytes: usize) {
+ if !self.objects.has_object_cache() {
+ self.object_cache_size(bytes)
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/config/mod.rs b/vendor/gix/src/repository/config/mod.rs
new file mode 100644
index 000000000..92b2618cc
--- /dev/null
+++ b/vendor/gix/src/repository/config/mod.rs
@@ -0,0 +1,191 @@
+use std::collections::BTreeSet;
+
+use crate::{bstr::ByteSlice, config};
+
+/// General Configuration
+impl crate::Repository {
+ /// Return a snapshot of the configuration as seen upon opening the repository.
+ pub fn config_snapshot(&self) -> config::Snapshot<'_> {
+ config::Snapshot { repo: self }
+ }
+
+ /// Return a mutable snapshot of the configuration as seen upon opening the repository, starting a transaction.
+ /// When the returned instance is dropped, it is applied in full, even if the reason for the drop is an error.
+ ///
+ /// Note that changes to the configuration are in-memory only and are observed only the this instance
+ /// of the [`Repository`][crate::Repository].
+ pub fn config_snapshot_mut(&mut self) -> config::SnapshotMut<'_> {
+ let config = self.config.resolved.as_ref().clone();
+ config::SnapshotMut {
+ repo: Some(self),
+ config,
+ }
+ }
+
+ /// The options used to open the repository.
+ pub fn open_options(&self) -> &crate::open::Options {
+ &self.options
+ }
+
+ /// Obtain options for use when connecting via `ssh`.
+ #[cfg(feature = "blocking-network-client")]
+ pub fn ssh_connect_options(
+ &self,
+ ) -> Result<gix_protocol::transport::client::ssh::connect::Options, config::ssh_connect_options::Error> {
+ use crate::config::{
+ cache::util::ApplyLeniency,
+ tree::{gitoxide, Core, Ssh},
+ };
+
+ let config = &self.config.resolved;
+ let mut trusted = self.filter_config_section();
+ let mut fallback_active = false;
+ let ssh_command = config
+ .string_filter("core", None, Core::SSH_COMMAND.name, &mut trusted)
+ .or_else(|| {
+ fallback_active = true;
+ config.string_filter(
+ "gitoxide",
+ Some("ssh".into()),
+ gitoxide::Ssh::COMMAND_WITHOUT_SHELL_FALLBACK.name,
+ &mut trusted,
+ )
+ })
+ .map(|cmd| gix_path::from_bstr(cmd).into_owned().into());
+ let opts = gix_protocol::transport::client::ssh::connect::Options {
+ disallow_shell: fallback_active,
+ command: ssh_command,
+ kind: config
+ .string_filter_by_key("ssh.variant", &mut trusted)
+ .and_then(|variant| Ssh::VARIANT.try_into_variant(variant).transpose())
+ .transpose()
+ .with_leniency(self.options.lenient_config)?,
+ };
+ Ok(opts)
+ }
+
+ /// The kind of object hash the repository is configured to use.
+ pub fn object_hash(&self) -> gix_hash::Kind {
+ self.config.object_hash
+ }
+}
+
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+mod transport;
+
+mod remote {
+ use std::{borrow::Cow, collections::BTreeSet};
+
+ use crate::{bstr::ByteSlice, remote};
+
+ impl crate::Repository {
+ /// Returns a sorted list unique of symbolic names of remotes that
+ /// we deem [trustworthy][crate::open::Options::filter_config_section()].
+ // TODO: Use `remote::Name` here
+ pub fn remote_names(&self) -> BTreeSet<&str> {
+ self.subsection_names_of("remote")
+ }
+
+ /// Obtain the branch-independent name for a remote for use in the given `direction`, or `None` if it could not be determined.
+ ///
+ /// For _fetching_, use the only configured remote, or default to `origin` if it exists.
+ /// For _pushing_, use the `remote.pushDefault` trusted configuration key, or fall back to the rules for _fetching_.
+ ///
+ /// # Notes
+ ///
+ /// It's up to the caller to determine what to do if the current `head` is unborn or detached.
+ // TODO: use remote::Name here
+ pub fn remote_default_name(&self, direction: remote::Direction) -> Option<Cow<'_, str>> {
+ let name = (direction == remote::Direction::Push)
+ .then(|| {
+ self.config
+ .resolved
+ .string_filter("remote", None, "pushDefault", &mut self.filter_config_section())
+ .and_then(|s| match s {
+ Cow::Borrowed(s) => s.to_str().ok().map(Cow::Borrowed),
+ Cow::Owned(s) => s.to_str().ok().map(|s| Cow::Owned(s.into())),
+ })
+ })
+ .flatten();
+ name.or_else(|| {
+ let names = self.remote_names();
+ match names.len() {
+ 0 => None,
+ 1 => names.iter().next().copied().map(Cow::Borrowed),
+ _more_than_one => names.get("origin").copied().map(Cow::Borrowed),
+ }
+ })
+ }
+ }
+}
+
+mod branch {
+ use std::{borrow::Cow, collections::BTreeSet, convert::TryInto};
+
+ use gix_ref::FullNameRef;
+ use gix_validate::reference::name::Error as ValidateNameError;
+
+ use crate::bstr::BStr;
+
+ impl crate::Repository {
+ /// Return a set of unique short branch names for which custom configuration exists in the configuration,
+ /// if we deem them [trustworthy][crate::open::Options::filter_config_section()].
+ pub fn branch_names(&self) -> BTreeSet<&str> {
+ self.subsection_names_of("branch")
+ }
+
+ /// Returns the validated reference on the remote associated with the given `short_branch_name`,
+ /// always `main` instead of `refs/heads/main`.
+ ///
+ /// The returned reference is the one we track on the remote side for merging and pushing.
+ /// Returns `None` if the remote reference was not found.
+ /// May return an error if the reference is invalid.
+ pub fn branch_remote_ref<'a>(
+ &self,
+ short_branch_name: impl Into<&'a BStr>,
+ ) -> Option<Result<Cow<'_, FullNameRef>, ValidateNameError>> {
+ self.config
+ .resolved
+ .string("branch", Some(short_branch_name.into()), "merge")
+ .map(crate::config::tree::branch::Merge::try_into_fullrefname)
+ }
+
+ /// Returns the unvalidated name of the remote associated with the given `short_branch_name`,
+ /// typically `main` instead of `refs/heads/main`.
+ /// In some cases, the returned name will be an URL.
+ /// Returns `None` if the remote was not found or if the name contained illformed UTF-8.
+ ///
+ /// See also [Reference::remote_name()][crate::Reference::remote_name()] for a more typesafe version
+ /// to be used when a `Reference` is available.
+ pub fn branch_remote_name<'a>(
+ &self,
+ short_branch_name: impl Into<&'a BStr>,
+ ) -> Option<crate::remote::Name<'_>> {
+ self.config
+ .resolved
+ .string("branch", Some(short_branch_name.into()), "remote")
+ .and_then(|name| name.try_into().ok())
+ }
+ }
+}
+
+impl crate::Repository {
+ pub(crate) fn filter_config_section(&self) -> fn(&gix_config::file::Metadata) -> bool {
+ self.options
+ .filter_config_section
+ .unwrap_or(config::section::is_trusted)
+ }
+
+ fn subsection_names_of<'a>(&'a self, header_name: &'a str) -> BTreeSet<&'a str> {
+ self.config
+ .resolved
+ .sections_by_name(header_name)
+ .map(|it| {
+ let filter = self.filter_config_section();
+ it.filter(move |s| filter(s.meta()))
+ .filter_map(|section| section.header().subsection_name().and_then(|b| b.to_str().ok()))
+ .collect()
+ })
+ .unwrap_or_default()
+ }
+}
diff --git a/vendor/gix/src/repository/config/transport.rs b/vendor/gix/src/repository/config/transport.rs
new file mode 100644
index 000000000..dcfbc0bf6
--- /dev/null
+++ b/vendor/gix/src/repository/config/transport.rs
@@ -0,0 +1,425 @@
+#![allow(clippy::result_large_err)]
+use std::any::Any;
+
+use crate::bstr::BStr;
+
+impl crate::Repository {
+ /// Produce configuration suitable for `url`, as differentiated by its protocol/scheme, to be passed to a transport instance via
+ /// [configure()][gix_transport::client::TransportWithoutIO::configure()] (via `&**config` to pass the contained `Any` and not the `Box`).
+ /// `None` is returned if there is no known configuration. If `remote_name` is not `None`, the remote's name may contribute to
+ /// configuration overrides, typically for the HTTP transport.
+ ///
+ /// Note that the caller may cast the instance themselves to modify it before passing it on.
+ ///
+ /// For transports that support proxy authentication, the
+ /// [default authentication method](crate::config::Snapshot::credential_helpers()) will be used with the url of the proxy
+ /// if it contains a user name.
+ #[cfg_attr(
+ not(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ )),
+ allow(unused_variables)
+ )]
+ pub fn transport_options<'a>(
+ &self,
+ url: impl Into<&'a BStr>,
+ remote_name: Option<&BStr>,
+ ) -> Result<Option<Box<dyn Any>>, crate::config::transport::Error> {
+ let url = gix_url::parse(url.into())?;
+ use gix_url::Scheme::*;
+
+ match &url.scheme {
+ Http | Https => {
+ #[cfg(not(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ )))]
+ {
+ Ok(None)
+ }
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ {
+ use std::{
+ borrow::Cow,
+ sync::{Arc, Mutex},
+ };
+
+ use gix_transport::client::{
+ http,
+ http::options::{ProxyAuthMethod, SslVersion, SslVersionRangeInclusive},
+ };
+
+ use crate::{
+ config,
+ config::{
+ cache::util::ApplyLeniency,
+ tree::{gitoxide, Key, Remote},
+ },
+ };
+ fn try_cow_to_string(
+ v: Cow<'_, BStr>,
+ lenient: bool,
+ key_str: impl Into<Cow<'static, BStr>>,
+ key: &'static config::tree::keys::String,
+ ) -> Result<Option<String>, config::transport::Error> {
+ key.try_into_string(v)
+ .map_err(|err| config::transport::Error::IllformedUtf8 {
+ source: err,
+ key: key_str.into(),
+ })
+ .map(Some)
+ .with_leniency(lenient)
+ }
+
+ fn cow_bstr(v: &str) -> Cow<'_, BStr> {
+ Cow::Borrowed(v.into())
+ }
+
+ fn proxy_auth_method(
+ value_and_key: Option<(
+ Cow<'_, BStr>,
+ Cow<'static, BStr>,
+ &'static config::tree::http::ProxyAuthMethod,
+ )>,
+ ) -> Result<ProxyAuthMethod, config::transport::Error> {
+ let value = value_and_key
+ .map(|(method, key, key_type)| {
+ key_type.try_into_proxy_auth_method(method).map_err(|err| {
+ config::transport::http::Error::InvalidProxyAuthMethod { source: err, key }
+ })
+ })
+ .transpose()?
+ .unwrap_or_default();
+ Ok(value)
+ }
+
+ fn ssl_version(
+ config: &gix_config::File<'static>,
+ key_str: &'static str,
+ key: &'static config::tree::http::SslVersion,
+ mut filter: fn(&gix_config::file::Metadata) -> bool,
+ lenient: bool,
+ ) -> Result<Option<SslVersion>, config::transport::Error> {
+ debug_assert_eq!(
+ key_str,
+ key.logical_name(),
+ "BUG: hardcoded and generated key names must match"
+ );
+ config
+ .string_filter_by_key(key_str, &mut filter)
+ .filter(|v| !v.is_empty())
+ .map(|v| {
+ key.try_into_ssl_version(v)
+ .map_err(crate::config::transport::http::Error::from)
+ })
+ .transpose()
+ .with_leniency(lenient)
+ .map_err(Into::into)
+ }
+
+ fn proxy(
+ value: Option<(Cow<'_, BStr>, Cow<'static, BStr>, &'static config::tree::keys::String)>,
+ lenient: bool,
+ ) -> Result<Option<String>, config::transport::Error> {
+ Ok(value
+ .and_then(|(v, k, key)| try_cow_to_string(v, lenient, k.clone(), key).transpose())
+ .transpose()?
+ .map(|mut proxy| {
+ if !proxy.trim().is_empty() && !proxy.contains("://") {
+ proxy.insert_str(0, "http://");
+ proxy
+ } else {
+ proxy
+ }
+ }))
+ }
+
+ let mut opts = http::Options::default();
+ let config = &self.config.resolved;
+ let mut trusted_only = self.filter_config_section();
+ let lenient = self.config.lenient_config;
+ opts.extra_headers = {
+ let key = "http.extraHeader";
+ debug_assert_eq!(key, &config::tree::Http::EXTRA_HEADER.logical_name());
+ config
+ .strings_filter_by_key(key, &mut trusted_only)
+ .map(|values| config::tree::Http::EXTRA_HEADER.try_into_extra_header(values))
+ .transpose()
+ .map_err(|err| config::transport::Error::IllformedUtf8 {
+ source: err,
+ key: Cow::Borrowed(key.into()),
+ })?
+ .unwrap_or_default()
+ };
+
+ opts.follow_redirects = {
+ let key = "http.followRedirects";
+
+ config::tree::Http::FOLLOW_REDIRECTS
+ .try_into_follow_redirects(
+ config.string_filter_by_key(key, &mut trusted_only).unwrap_or_default(),
+ || {
+ config
+ .boolean_filter_by_key(key, &mut trusted_only)
+ .transpose()
+ .with_leniency(lenient)
+ },
+ )
+ .map_err(config::transport::http::Error::InvalidFollowRedirects)?
+ };
+
+ opts.low_speed_time_seconds = config
+ .integer_filter_by_key("http.lowSpeedTime", &mut trusted_only)
+ .map(|value| config::tree::Http::LOW_SPEED_TIME.try_into_u64(value))
+ .transpose()
+ .with_leniency(lenient)
+ .map_err(config::transport::http::Error::from)?
+ .unwrap_or_default();
+ opts.low_speed_limit_bytes_per_second = config
+ .integer_filter_by_key("http.lowSpeedLimit", &mut trusted_only)
+ .map(|value| config::tree::Http::LOW_SPEED_LIMIT.try_into_u32(value))
+ .transpose()
+ .with_leniency(lenient)
+ .map_err(config::transport::http::Error::from)?
+ .unwrap_or_default();
+ opts.proxy = proxy(
+ remote_name
+ .and_then(|name| {
+ config
+ .string_filter("remote", Some(name), Remote::PROXY.name, &mut trusted_only)
+ .map(|v| (v, Cow::Owned(format!("remote.{name}.proxy").into()), &Remote::PROXY))
+ })
+ .or_else(|| {
+ let key = "http.proxy";
+ debug_assert_eq!(key, config::tree::Http::PROXY.logical_name());
+ let http_proxy = config
+ .string_filter_by_key(key, &mut trusted_only)
+ .map(|v| (v, cow_bstr(key), &config::tree::Http::PROXY))
+ .or_else(|| {
+ let key = "gitoxide.http.proxy";
+ debug_assert_eq!(key, gitoxide::Http::PROXY.logical_name());
+ config
+ .string_filter_by_key(key, &mut trusted_only)
+ .map(|v| (v, cow_bstr(key), &gitoxide::Http::PROXY))
+ });
+ if url.scheme == Https {
+ http_proxy.or_else(|| {
+ let key = "gitoxide.https.proxy";
+ debug_assert_eq!(key, gitoxide::Https::PROXY.logical_name());
+ config
+ .string_filter_by_key(key, &mut trusted_only)
+ .map(|v| (v, cow_bstr(key), &gitoxide::Https::PROXY))
+ })
+ } else {
+ http_proxy
+ }
+ })
+ .or_else(|| {
+ let key = "gitoxide.http.allProxy";
+ debug_assert_eq!(key, gitoxide::Http::ALL_PROXY.logical_name());
+ config
+ .string_filter_by_key(key, &mut trusted_only)
+ .map(|v| (v, cow_bstr(key), &gitoxide::Http::ALL_PROXY))
+ }),
+ lenient,
+ )?;
+ {
+ let key = "gitoxide.http.noProxy";
+ debug_assert_eq!(key, gitoxide::Http::NO_PROXY.logical_name());
+ opts.no_proxy = config
+ .string_filter_by_key(key, &mut trusted_only)
+ .and_then(|v| {
+ try_cow_to_string(v, lenient, Cow::Borrowed(key.into()), &gitoxide::Http::NO_PROXY)
+ .transpose()
+ })
+ .transpose()?;
+ }
+ opts.proxy_auth_method = proxy_auth_method({
+ let key = "gitoxide.http.proxyAuthMethod";
+ debug_assert_eq!(key, gitoxide::Http::PROXY_AUTH_METHOD.logical_name());
+ config
+ .string_filter_by_key(key, &mut trusted_only)
+ .map(|v| (v, Cow::Borrowed(key.into()), &gitoxide::Http::PROXY_AUTH_METHOD))
+ .or_else(|| {
+ remote_name
+ .and_then(|name| {
+ config
+ .string_filter("remote", Some(name), "proxyAuthMethod", &mut trusted_only)
+ .map(|v| {
+ (
+ v,
+ Cow::Owned(format!("remote.{name}.proxyAuthMethod").into()),
+ &Remote::PROXY_AUTH_METHOD,
+ )
+ })
+ })
+ .or_else(|| {
+ let key = "http.proxyAuthMethod";
+ debug_assert_eq!(key, config::tree::Http::PROXY_AUTH_METHOD.logical_name());
+ config.string_filter_by_key(key, &mut trusted_only).map(|v| {
+ (v, Cow::Borrowed(key.into()), &config::tree::Http::PROXY_AUTH_METHOD)
+ })
+ })
+ })
+ })?;
+ opts.proxy_authenticate = opts
+ .proxy
+ .as_deref()
+ .filter(|url| !url.is_empty())
+ .map(|url| gix_url::parse(url.into()))
+ .transpose()?
+ .filter(|url| url.user().is_some())
+ .map(|url| -> Result<_, config::transport::http::Error> {
+ let (mut cascade, action_with_normalized_url, prompt_opts) =
+ self.config_snapshot().credential_helpers(url)?;
+ Ok((
+ action_with_normalized_url,
+ Arc::new(Mutex::new(move |action| cascade.invoke(action, prompt_opts.clone())))
+ as Arc<Mutex<http::options::AuthenticateFn>>,
+ ))
+ })
+ .transpose()?;
+ opts.connect_timeout = {
+ let key = "gitoxide.http.connectTimeout";
+ config
+ .integer_filter_by_key(key, &mut trusted_only)
+ .map(|v| {
+ debug_assert_eq!(key, gitoxide::Http::CONNECT_TIMEOUT.logical_name());
+ gitoxide::Http::CONNECT_TIMEOUT
+ .try_into_duration(v)
+ .map_err(crate::config::transport::http::Error::from)
+ })
+ .transpose()
+ .with_leniency(lenient)?
+ };
+ {
+ let key = "http.userAgent";
+ opts.user_agent = config
+ .string_filter_by_key(key, &mut trusted_only)
+ .and_then(|v| {
+ try_cow_to_string(
+ v,
+ lenient,
+ Cow::Borrowed(key.into()),
+ &config::tree::Http::USER_AGENT,
+ )
+ .transpose()
+ })
+ .transpose()?
+ .or_else(|| Some(crate::env::agent().into()));
+ }
+
+ {
+ let key = "http.version";
+ opts.http_version = config
+ .string_filter_by_key(key, &mut trusted_only)
+ .map(|v| {
+ config::tree::Http::VERSION
+ .try_into_http_version(v)
+ .map_err(config::transport::http::Error::InvalidHttpVersion)
+ })
+ .transpose()?;
+ }
+
+ {
+ opts.verbose = config
+ .boolean_filter(
+ "gitoxide",
+ Some("http".into()),
+ gitoxide::Http::VERBOSE.name,
+ &mut trusted_only,
+ )
+ .and_then(Result::ok)
+ .unwrap_or_default();
+ }
+
+ let may_use_cainfo = {
+ let key = "http.schannelUseSSLCAInfo";
+ config
+ .boolean_filter_by_key(key, &mut trusted_only)
+ .map(|value| config::tree::Http::SCHANNEL_USE_SSL_CA_INFO.enrich_error(value))
+ .transpose()
+ .with_leniency(lenient)
+ .map_err(config::transport::http::Error::from)?
+ .unwrap_or(true)
+ };
+
+ if may_use_cainfo {
+ let key = "http.sslCAInfo";
+ debug_assert_eq!(key, config::tree::Http::SSL_CA_INFO.logical_name());
+ opts.ssl_ca_info = config
+ .path_filter_by_key(key, &mut trusted_only)
+ .map(|p| {
+ use crate::config::cache::interpolate_context;
+ p.interpolate(interpolate_context(
+ self.install_dir().ok().as_deref(),
+ self.config.home_dir().as_deref(),
+ ))
+ .map(|cow| cow.into_owned())
+ })
+ .transpose()
+ .with_leniency(lenient)
+ .map_err(|err| config::transport::Error::InterpolatePath { source: err, key })?;
+ }
+
+ {
+ opts.ssl_version = ssl_version(
+ config,
+ "http.sslVersion",
+ &config::tree::Http::SSL_VERSION,
+ trusted_only,
+ lenient,
+ )?
+ .map(|v| SslVersionRangeInclusive { min: v, max: v });
+ let min_max = ssl_version(
+ config,
+ "gitoxide.http.sslVersionMin",
+ &gitoxide::Http::SSL_VERSION_MIN,
+ trusted_only,
+ lenient,
+ )
+ .and_then(|min| {
+ ssl_version(
+ config,
+ "gitoxide.http.sslVersionMax",
+ &gitoxide::Http::SSL_VERSION_MAX,
+ trusted_only,
+ lenient,
+ )
+ .map(|max| min.and_then(|min| max.map(|max| (min, max))))
+ })?;
+ if let Some((min, max)) = min_max {
+ let v = opts.ssl_version.get_or_insert(SslVersionRangeInclusive {
+ min: SslVersion::TlsV1_3,
+ max: SslVersion::TlsV1_3,
+ });
+ v.min = min;
+ v.max = max;
+ }
+ }
+
+ #[cfg(feature = "blocking-http-transport-curl")]
+ {
+ let key = "http.schannelCheckRevoke";
+ let schannel_check_revoke = config
+ .boolean_filter_by_key(key, &mut trusted_only)
+ .map(|value| config::tree::Http::SCHANNEL_CHECK_REVOKE.enrich_error(value))
+ .transpose()
+ .with_leniency(lenient)
+ .map_err(config::transport::http::Error::from)?;
+ let backend = gix_protocol::transport::client::http::curl::Options { schannel_check_revoke };
+ opts.backend =
+ Some(Arc::new(Mutex::new(backend)) as Arc<Mutex<dyn Any + Send + Sync + 'static>>);
+ }
+
+ Ok(Some(Box::new(opts)))
+ }
+ }
+ File | Git | Ssh | Ext(_) => Ok(None),
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/identity.rs b/vendor/gix/src/repository/identity.rs
new file mode 100644
index 000000000..61a4b4a98
--- /dev/null
+++ b/vendor/gix/src/repository/identity.rs
@@ -0,0 +1,175 @@
+use std::time::SystemTime;
+
+use crate::{
+ bstr::BString,
+ config,
+ config::tree::{gitoxide, keys, Author, Committer, Key, User},
+};
+
+/// Identity handling.
+///
+/// # Deviation
+///
+/// There is no notion of a default user like in git, and instead failing to provide a user
+/// is fatal. That way, we enforce correctness and force application developers to take care
+/// of this issue which can be done in various ways, for instance by setting
+/// `gitoxide.committer.nameFallback` and similar.
+impl crate::Repository {
+ /// Return the committer as configured by this repository, which is determined by…
+ ///
+ /// * …the git configuration `committer.name|email`…
+ /// * …the `GIT_COMMITTER_(NAME|EMAIL|DATE)` environment variables…
+ /// * …the configuration for `user.name|email` as fallback…
+ ///
+ /// …and in that order, or `None` if no committer name or email was configured, or `Some(Err(…))`
+ /// if the committer date could not be parsed.
+ ///
+ /// # Note
+ ///
+ /// The values are cached when the repository is instantiated.
+ pub fn committer(&self) -> Option<Result<gix_actor::SignatureRef<'_>, config::time::Error>> {
+ let p = self.config.personas();
+
+ Ok(gix_actor::SignatureRef {
+ name: p.committer.name.as_ref().or(p.user.name.as_ref()).map(|v| v.as_ref())?,
+ email: p
+ .committer
+ .email
+ .as_ref()
+ .or(p.user.email.as_ref())
+ .map(|v| v.as_ref())?,
+ time: match extract_time_or_default(p.committer.time.as_ref(), &gitoxide::Commit::COMMITTER_DATE) {
+ Ok(t) => t,
+ Err(err) => return Some(Err(err)),
+ },
+ })
+ .into()
+ }
+
+ /// Return the author as configured by this repository, which is determined by…
+ ///
+ /// * …the git configuration `author.name|email`…
+ /// * …the `GIT_AUTHOR_(NAME|EMAIL|DATE)` environment variables…
+ /// * …the configuration for `user.name|email` as fallback…
+ ///
+ /// …and in that order, or `None` if there was nothing configured.
+ ///
+ /// # Note
+ ///
+ /// The values are cached when the repository is instantiated.
+ pub fn author(&self) -> Option<Result<gix_actor::SignatureRef<'_>, config::time::Error>> {
+ let p = self.config.personas();
+
+ Ok(gix_actor::SignatureRef {
+ name: p.author.name.as_ref().or(p.user.name.as_ref()).map(|v| v.as_ref())?,
+ email: p.author.email.as_ref().or(p.user.email.as_ref()).map(|v| v.as_ref())?,
+ time: match extract_time_or_default(p.author.time.as_ref(), &gitoxide::Commit::AUTHOR_DATE) {
+ Ok(t) => t,
+ Err(err) => return Some(Err(err)),
+ },
+ })
+ .into()
+ }
+}
+
+fn extract_time_or_default(
+ time: Option<&Result<gix_actor::Time, gix_date::parse::Error>>,
+ config_key: &'static keys::Time,
+) -> Result<gix_actor::Time, config::time::Error> {
+ match time {
+ Some(Ok(t)) => Ok(*t),
+ None => Ok(gix_date::Time::now_local_or_utc()),
+ Some(Err(err)) => Err(config::time::Error::from(config_key).with_source(err.clone())),
+ }
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct Entity {
+ pub name: Option<BString>,
+ pub email: Option<BString>,
+ /// A time parsed from an environment variable, handling potential errors is delayed.
+ pub time: Option<Result<gix_actor::Time, gix_date::parse::Error>>,
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct Personas {
+ user: Entity,
+ committer: Entity,
+ author: Entity,
+}
+
+impl Personas {
+ pub fn from_config_and_env(config: &gix_config::File<'_>) -> Self {
+ fn entity_in_section(
+ config: &gix_config::File<'_>,
+ name_key: &keys::Any,
+ email_key: &keys::Any,
+ fallback: Option<(&keys::Any, &keys::Any)>,
+ ) -> (Option<BString>, Option<BString>) {
+ let fallback = fallback.and_then(|(name_key, email_key)| {
+ debug_assert_eq!(name_key.section.name(), email_key.section.name());
+ config
+ .section("gitoxide", Some(name_key.section.name().into()))
+ .ok()
+ .map(|section| (section, name_key, email_key))
+ });
+ (
+ config
+ .string(name_key.section.name(), None, name_key.name)
+ .or_else(|| fallback.as_ref().and_then(|(s, name_key, _)| s.value(name_key.name)))
+ .map(|v| v.into_owned()),
+ config
+ .string(email_key.section.name(), None, email_key.name)
+ .or_else(|| fallback.as_ref().and_then(|(s, _, email_key)| s.value(email_key.name)))
+ .map(|v| v.into_owned()),
+ )
+ }
+ let now = SystemTime::now();
+ let parse_date = |key: &str, date: &keys::Time| -> Option<Result<gix_date::Time, gix_date::parse::Error>> {
+ debug_assert_eq!(
+ key,
+ date.logical_name(),
+ "BUG: drift of expected name and actual name of the key (we hardcode it to save an allocation)"
+ );
+ config
+ .string_by_key(key)
+ .map(|time| date.try_into_time(time, now.into()))
+ };
+
+ let fallback = (
+ &gitoxide::Committer::NAME_FALLBACK,
+ &gitoxide::Committer::EMAIL_FALLBACK,
+ );
+ let (committer_name, committer_email) =
+ entity_in_section(config, &Committer::NAME, &Committer::EMAIL, Some(fallback));
+ let fallback = (&gitoxide::Author::NAME_FALLBACK, &gitoxide::Author::EMAIL_FALLBACK);
+ let (author_name, author_email) = entity_in_section(config, &Author::NAME, &Author::EMAIL, Some(fallback));
+ let (user_name, mut user_email) = entity_in_section(config, &User::NAME, &User::EMAIL, None);
+
+ let committer_date = parse_date("gitoxide.commit.committerDate", &gitoxide::Commit::COMMITTER_DATE);
+ let author_date = parse_date("gitoxide.commit.authorDate", &gitoxide::Commit::AUTHOR_DATE);
+
+ user_email = user_email.or_else(|| {
+ config
+ .string_by_key(gitoxide::User::EMAIL_FALLBACK.logical_name().as_str())
+ .map(|v| v.into_owned())
+ });
+ Personas {
+ user: Entity {
+ name: user_name,
+ email: user_email,
+ time: None,
+ },
+ committer: Entity {
+ name: committer_name,
+ email: committer_email,
+ time: committer_date,
+ },
+ author: Entity {
+ name: author_name,
+ email: author_email,
+ time: author_date,
+ },
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/impls.rs b/vendor/gix/src/repository/impls.rs
new file mode 100644
index 000000000..6cf2b2e9b
--- /dev/null
+++ b/vendor/gix/src/repository/impls.rs
@@ -0,0 +1,73 @@
+impl Clone for crate::Repository {
+ fn clone(&self) -> Self {
+ crate::Repository::from_refs_and_objects(
+ self.refs.clone(),
+ self.objects.clone(),
+ self.work_tree.clone(),
+ self.common_dir.clone(),
+ self.config.clone(),
+ self.options.clone(),
+ self.index.clone(),
+ )
+ }
+}
+
+impl std::fmt::Debug for crate::Repository {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("Repository")
+ .field("kind", &self.kind())
+ .field("git_dir", &self.git_dir())
+ .field("work_dir", &self.work_dir())
+ .finish()
+ }
+}
+
+impl PartialEq<crate::Repository> for crate::Repository {
+ fn eq(&self, other: &crate::Repository) -> bool {
+ self.git_dir().canonicalize().ok() == other.git_dir().canonicalize().ok()
+ && self.work_tree.as_deref().and_then(|wt| wt.canonicalize().ok())
+ == other.work_tree.as_deref().and_then(|wt| wt.canonicalize().ok())
+ }
+}
+
+impl From<&crate::ThreadSafeRepository> for crate::Repository {
+ fn from(repo: &crate::ThreadSafeRepository) -> Self {
+ crate::Repository::from_refs_and_objects(
+ repo.refs.clone(),
+ repo.objects.to_handle().into(),
+ repo.work_tree.clone(),
+ repo.common_dir.clone(),
+ repo.config.clone(),
+ repo.linked_worktree_options.clone(),
+ repo.index.clone(),
+ )
+ }
+}
+
+impl From<crate::ThreadSafeRepository> for crate::Repository {
+ fn from(repo: crate::ThreadSafeRepository) -> Self {
+ crate::Repository::from_refs_and_objects(
+ repo.refs,
+ repo.objects.to_handle().into(),
+ repo.work_tree,
+ repo.common_dir,
+ repo.config,
+ repo.linked_worktree_options,
+ repo.index,
+ )
+ }
+}
+
+impl From<crate::Repository> for crate::ThreadSafeRepository {
+ fn from(r: crate::Repository) -> Self {
+ crate::ThreadSafeRepository {
+ refs: r.refs,
+ objects: r.objects.into_inner().store(),
+ work_tree: r.work_tree,
+ common_dir: r.common_dir,
+ config: r.config,
+ linked_worktree_options: r.options,
+ index: r.index,
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/init.rs b/vendor/gix/src/repository/init.rs
new file mode 100644
index 000000000..ae6a42c3b
--- /dev/null
+++ b/vendor/gix/src/repository/init.rs
@@ -0,0 +1,55 @@
+use std::cell::RefCell;
+
+impl crate::Repository {
+ pub(crate) fn from_refs_and_objects(
+ refs: crate::RefStore,
+ objects: crate::OdbHandle,
+ work_tree: Option<std::path::PathBuf>,
+ common_dir: Option<std::path::PathBuf>,
+ config: crate::config::Cache,
+ linked_worktree_options: crate::open::Options,
+ index: crate::worktree::IndexStorage,
+ ) -> Self {
+ let objects = setup_objects(objects, &config);
+ crate::Repository {
+ bufs: RefCell::new(Vec::with_capacity(4)),
+ work_tree,
+ common_dir,
+ objects,
+ refs,
+ config,
+ options: linked_worktree_options,
+ index,
+ }
+ }
+
+ /// Convert this instance into a [`ThreadSafeRepository`][crate::ThreadSafeRepository] by dropping all thread-local data.
+ pub fn into_sync(self) -> crate::ThreadSafeRepository {
+ self.into()
+ }
+}
+
+#[cfg_attr(not(feature = "max-performance-safe"), allow(unused_variables, unused_mut))]
+fn setup_objects(mut objects: crate::OdbHandle, config: &crate::config::Cache) -> crate::OdbHandle {
+ #[cfg(feature = "max-performance-safe")]
+ {
+ match config.pack_cache_bytes {
+ None => objects.set_pack_cache(|| Box::<gix_pack::cache::lru::StaticLinkedList<64>>::default()),
+ Some(0) => objects.unset_pack_cache(),
+ Some(bytes) => objects.set_pack_cache(move || -> Box<gix_odb::cache::PackCache> {
+ Box::new(gix_pack::cache::lru::MemoryCappedHashmap::new(bytes))
+ }),
+ };
+ if config.object_cache_bytes == 0 {
+ objects.unset_object_cache();
+ } else {
+ let bytes = config.object_cache_bytes;
+ objects.set_object_cache(move || Box::new(gix_pack::cache::object::MemoryCappedHashmap::new(bytes)));
+ }
+ objects
+ }
+ #[cfg(not(feature = "max-performance-safe"))]
+ {
+ objects
+ }
+}
diff --git a/vendor/gix/src/repository/location.rs b/vendor/gix/src/repository/location.rs
new file mode 100644
index 000000000..0bb8ea253
--- /dev/null
+++ b/vendor/gix/src/repository/location.rs
@@ -0,0 +1,86 @@
+use std::path::PathBuf;
+
+use gix_path::realpath::MAX_SYMLINKS;
+
+impl crate::Repository {
+ /// Return the path to the repository itself, containing objects, references, configuration, and more.
+ ///
+ /// Synonymous to [`path()`][crate::Repository::path()].
+ pub fn git_dir(&self) -> &std::path::Path {
+ self.refs.git_dir()
+ }
+
+ /// The trust we place in the git-dir, with lower amounts of trust causing access to configuration to be limited.
+ pub fn git_dir_trust(&self) -> gix_sec::Trust {
+ self.options.git_dir_trust.expect("definitely set by now")
+ }
+
+ /// Returns the main git repository if this is a repository on a linked work-tree, or the `git_dir` itself.
+ pub fn common_dir(&self) -> &std::path::Path {
+ self.common_dir.as_deref().unwrap_or_else(|| self.git_dir())
+ }
+
+ /// Return the path to the worktree index file, which may or may not exist.
+ pub fn index_path(&self) -> PathBuf {
+ self.git_dir().join("index")
+ }
+
+ /// The path to the `.git` directory itself, or equivalent if this is a bare repository.
+ pub fn path(&self) -> &std::path::Path {
+ self.git_dir()
+ }
+
+ /// Return the work tree containing all checked out files, if there is one.
+ pub fn work_dir(&self) -> Option<&std::path::Path> {
+ self.work_tree.as_deref()
+ }
+
+ // TODO: tests, respect precomposeUnicode
+ /// The directory of the binary path of the current process.
+ pub fn install_dir(&self) -> std::io::Result<PathBuf> {
+ crate::path::install_dir()
+ }
+
+ /// Returns the relative path which is the components between the working tree and the current working dir (CWD).
+ /// Note that there may be `None` if there is no work tree, even though the `PathBuf` will be empty
+ /// if the CWD is at the root of the work tree.
+ // TODO: tests, details - there is a lot about environment variables to change things around.
+ pub fn prefix(&self) -> Option<std::io::Result<PathBuf>> {
+ self.work_tree.as_ref().map(|root| {
+ std::env::current_dir().and_then(|cwd| {
+ gix_path::realpath_opts(root, &cwd, MAX_SYMLINKS)
+ .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))
+ .and_then(|root| {
+ cwd.strip_prefix(&root)
+ .map_err(|_| {
+ std::io::Error::new(
+ std::io::ErrorKind::Other,
+ format!(
+ "CWD '{}' isn't within the work tree '{}'",
+ cwd.display(),
+ root.display()
+ ),
+ )
+ })
+ .map(ToOwned::to_owned)
+ })
+ })
+ })
+ }
+
+ /// Return the kind of repository, either bare or one with a work tree.
+ pub fn kind(&self) -> crate::Kind {
+ match self.worktree() {
+ Some(wt) => {
+ if gix_discover::is_submodule_git_dir(self.git_dir()) {
+ crate::Kind::Submodule
+ } else {
+ crate::Kind::WorkTree {
+ is_linked: !wt.is_main(),
+ }
+ }
+ }
+ None => crate::Kind::Bare,
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/mod.rs b/vendor/gix/src/repository/mod.rs
new file mode 100644
index 000000000..31199e22d
--- /dev/null
+++ b/vendor/gix/src/repository/mod.rs
@@ -0,0 +1,36 @@
+//!
+
+/// Internal
+impl crate::Repository {
+ #[inline]
+ pub(crate) fn free_buf(&self) -> Vec<u8> {
+ self.bufs.borrow_mut().pop().unwrap_or_default()
+ }
+
+ /// This method is commonly called from the destructor of objects that previously claimed an entry
+ /// in the free-list with `free_buf()`.
+ /// They are welcome to take out the data themselves, for instance when the object is detached, to avoid
+ /// it to be reclaimed.
+ #[inline]
+ pub(crate) fn reuse_buffer(&self, data: &mut Vec<u8>) {
+ if data.capacity() > 0 {
+ self.bufs.borrow_mut().push(std::mem::take(data));
+ }
+ }
+}
+
+mod cache;
+mod config;
+pub(crate) mod identity;
+mod impls;
+mod init;
+mod location;
+mod object;
+pub(crate) mod permissions;
+mod reference;
+mod remote;
+mod revision;
+mod snapshots;
+mod state;
+mod thread_safe;
+mod worktree;
diff --git a/vendor/gix/src/repository/object.rs b/vendor/gix/src/repository/object.rs
new file mode 100644
index 000000000..bda1a54c3
--- /dev/null
+++ b/vendor/gix/src/repository/object.rs
@@ -0,0 +1,214 @@
+#![allow(clippy::result_large_err)]
+use std::convert::TryInto;
+
+use gix_hash::ObjectId;
+use gix_odb::{Find, FindExt, Write};
+use gix_ref::{
+ transaction::{LogChange, PreviousValue, RefLog},
+ FullName,
+};
+
+use crate::{commit, ext::ObjectIdExt, object, tag, Id, Object, Reference, Tree};
+
+/// Methods related to object creation.
+impl crate::Repository {
+ /// Find the object with `id` in the object database or return an error if it could not be found.
+ ///
+ /// There are various legitimate reasons for an object to not be present, which is why
+ /// [`try_find_object(…)`][crate::Repository::try_find_object()] might be preferable instead.
+ ///
+ /// # Performance Note
+ ///
+ /// In order to get the kind of the object, is must be fully decoded from storage if it is packed with deltas.
+ /// Loose object could be partially decoded, even though that's not implemented.
+ pub fn find_object(&self, id: impl Into<ObjectId>) -> Result<Object<'_>, object::find::existing::Error> {
+ let id = id.into();
+ if id == gix_hash::ObjectId::empty_tree(self.object_hash()) {
+ return Ok(Object {
+ id,
+ kind: gix_object::Kind::Tree,
+ data: Vec::new(),
+ repo: self,
+ });
+ }
+ let mut buf = self.free_buf();
+ let kind = self.objects.find(id, &mut buf)?.kind;
+ Ok(Object::from_data(id, kind, buf, self))
+ }
+
+ /// Try to find the object with `id` or return `None` it it wasn't found.
+ pub fn try_find_object(&self, id: impl Into<ObjectId>) -> Result<Option<Object<'_>>, object::find::Error> {
+ let id = id.into();
+ if id == gix_hash::ObjectId::empty_tree(self.object_hash()) {
+ return Ok(Some(Object {
+ id,
+ kind: gix_object::Kind::Tree,
+ data: Vec::new(),
+ repo: self,
+ }));
+ }
+
+ let mut buf = self.free_buf();
+ match self.objects.try_find(id, &mut buf)? {
+ Some(obj) => {
+ let kind = obj.kind;
+ Ok(Some(Object::from_data(id, kind, buf, self)))
+ }
+ None => Ok(None),
+ }
+ }
+
+ /// Write the given object into the object database and return its object id.
+ pub fn write_object(&self, object: impl gix_object::WriteTo) -> Result<Id<'_>, object::write::Error> {
+ self.objects
+ .write(object)
+ .map(|oid| oid.attach(self))
+ .map_err(Into::into)
+ }
+
+ /// Write a blob from the given `bytes`.
+ pub fn write_blob(&self, bytes: impl AsRef<[u8]>) -> Result<Id<'_>, object::write::Error> {
+ self.objects
+ .write_buf(gix_object::Kind::Blob, bytes.as_ref())
+ .map(|oid| oid.attach(self))
+ }
+
+ /// Write a blob from the given `Read` implementation.
+ pub fn write_blob_stream(
+ &self,
+ mut bytes: impl std::io::Read + std::io::Seek,
+ ) -> Result<Id<'_>, object::write::Error> {
+ let current = bytes.stream_position()?;
+ let len = bytes.seek(std::io::SeekFrom::End(0))? - current;
+ bytes.seek(std::io::SeekFrom::Start(current))?;
+
+ self.objects
+ .write_stream(gix_object::Kind::Blob, len, bytes)
+ .map(|oid| oid.attach(self))
+ }
+
+ /// Create a tag reference named `name` (without `refs/tags/` prefix) pointing to a newly created tag object
+ /// which in turn points to `target` and return the newly created reference.
+ ///
+ /// It will be created with `constraint` which is most commonly to [only create it][PreviousValue::MustNotExist]
+ /// or to [force overwriting a possibly existing tag](PreviousValue::Any).
+ pub fn tag(
+ &self,
+ name: impl AsRef<str>,
+ target: impl AsRef<gix_hash::oid>,
+ target_kind: gix_object::Kind,
+ tagger: Option<gix_actor::SignatureRef<'_>>,
+ message: impl AsRef<str>,
+ constraint: PreviousValue,
+ ) -> Result<Reference<'_>, tag::Error> {
+ let tag = gix_object::Tag {
+ target: target.as_ref().into(),
+ target_kind,
+ name: name.as_ref().into(),
+ tagger: tagger.map(|t| t.to_owned()),
+ message: message.as_ref().into(),
+ pgp_signature: None,
+ };
+ let tag_id = self.write_object(&tag)?;
+ self.tag_reference(name, tag_id, constraint).map_err(Into::into)
+ }
+
+ /// Similar to [`commit(…)`][crate::Repository::commit()], but allows to create the commit with `committer` and `author` specified.
+ ///
+ /// This forces setting the commit time and author time by hand. Note that typically, committer and author are the same.
+ pub fn commit_as<'a, 'c, Name, E>(
+ &self,
+ committer: impl Into<gix_actor::SignatureRef<'c>>,
+ author: impl Into<gix_actor::SignatureRef<'a>>,
+ reference: Name,
+ message: impl AsRef<str>,
+ tree: impl Into<ObjectId>,
+ parents: impl IntoIterator<Item = impl Into<ObjectId>>,
+ ) -> Result<Id<'_>, commit::Error>
+ where
+ Name: TryInto<FullName, Error = E>,
+ commit::Error: From<E>,
+ {
+ use gix_ref::{
+ transaction::{Change, RefEdit},
+ Target,
+ };
+
+ // TODO: possibly use CommitRef to save a few allocations (but will have to allocate for object ids anyway.
+ // This can be made vastly more efficient though if we wanted to, so we lie in the API
+ let reference = reference.try_into()?;
+ let commit = gix_object::Commit {
+ message: message.as_ref().into(),
+ tree: tree.into(),
+ author: author.into().to_owned(),
+ committer: committer.into().to_owned(),
+ encoding: None,
+ parents: parents.into_iter().map(|id| id.into()).collect(),
+ extra_headers: Default::default(),
+ };
+
+ let commit_id = self.write_object(&commit)?;
+ self.edit_reference(RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: crate::reference::log::message("commit", commit.message.as_ref(), commit.parents.len()),
+ },
+ expected: match commit.parents.first().map(|p| Target::Peeled(*p)) {
+ Some(previous) => {
+ if reference.as_bstr() == "HEAD" {
+ PreviousValue::MustExistAndMatch(previous)
+ } else {
+ PreviousValue::ExistingMustMatch(previous)
+ }
+ }
+ None => PreviousValue::MustNotExist,
+ },
+ new: Target::Peeled(commit_id.inner),
+ },
+ name: reference,
+ deref: true,
+ })?;
+ Ok(commit_id)
+ }
+
+ /// Create a new commit object with `message` referring to `tree` with `parents`, and point `reference`
+ /// to it. The commit is written without message encoding field, which can be assumed to be UTF-8.
+ /// `author` and `committer` fields are pre-set from the configuration, which can be altered
+ /// [temporarily][crate::Repository::config_snapshot_mut()] before the call if required.
+ ///
+ /// `reference` will be created if it doesn't exist, and can be `"HEAD"` to automatically write-through to the symbolic reference
+ /// that `HEAD` points to if it is not detached. For this reason, detached head states cannot be created unless the `HEAD` is detached
+ /// already. The reflog will be written as canonical git would do, like `<operation> (<detail>): <summary>`.
+ ///
+ /// The first parent id in `parents` is expected to be the current target of `reference` and the operation will fail if it is not.
+ /// If there is no parent, the `reference` is expected to not exist yet.
+ ///
+ /// The method fails immediately if a `reference` lock can't be acquired.
+ pub fn commit<Name, E>(
+ &self,
+ reference: Name,
+ message: impl AsRef<str>,
+ tree: impl Into<ObjectId>,
+ parents: impl IntoIterator<Item = impl Into<ObjectId>>,
+ ) -> Result<Id<'_>, commit::Error>
+ where
+ Name: TryInto<FullName, Error = E>,
+ commit::Error: From<E>,
+ {
+ let author = self.author().ok_or(commit::Error::AuthorMissing)??;
+ let committer = self.committer().ok_or(commit::Error::CommitterMissing)??;
+ self.commit_as(committer, author, reference, message, tree, parents)
+ }
+
+ /// Return an empty tree object, suitable for [getting changes](crate::Tree::changes()).
+ ///
+ /// Note that it is special and doesn't physically exist in the object database even though it can be returned.
+ /// This means that this object can be used in an uninitialized, empty repository which would report to have no objects at all.
+ pub fn empty_tree(&self) -> Tree<'_> {
+ self.find_object(gix_hash::ObjectId::empty_tree(self.object_hash()))
+ .expect("always present")
+ .into_tree()
+ }
+}
diff --git a/vendor/gix/src/repository/permissions.rs b/vendor/gix/src/repository/permissions.rs
new file mode 100644
index 000000000..88b61b739
--- /dev/null
+++ b/vendor/gix/src/repository/permissions.rs
@@ -0,0 +1,168 @@
+use gix_sec::Trust;
+
+/// Permissions associated with various resources of a git repository
+#[derive(Debug, Clone)]
+pub struct Permissions {
+ /// Permissions related to the environment
+ pub env: Environment,
+ /// Permissions related to the handling of git configuration.
+ pub config: Config,
+}
+
+/// Configure from which sources git configuration may be loaded.
+///
+/// Note that configuration from inside of the repository is always loaded as it's definitely required for correctness.
+#[derive(Copy, Clone, Ord, PartialOrd, PartialEq, Eq, Debug, Hash)]
+pub struct Config {
+ /// The git binary may come with configuration as part of its configuration, and if this is true (default false)
+ /// we will load the configuration of the git binary, if present and not a duplicate of the ones below.
+ ///
+ /// It's disable by default as it involves executing the git binary once per execution of the application.
+ pub git_binary: bool,
+ /// Whether to use the system configuration.
+ /// This is defined as `$(prefix)/etc/gitconfig` on unix.
+ pub system: bool,
+ /// Whether to use the git application configuration.
+ ///
+ /// A platform defined location for where a user's git application configuration should be located.
+ /// If `$XDG_CONFIG_HOME` is not set or empty, `$HOME/.config/git/config` will be used
+ /// on unix.
+ pub git: bool,
+ /// Whether to use the user configuration.
+ /// This is usually `~/.gitconfig` on unix.
+ pub user: bool,
+ /// Whether to use the configuration from environment variables.
+ pub env: bool,
+ /// Whether to follow include files are encountered in loaded configuration,
+ /// via `include` and `includeIf` sections.
+ pub includes: bool,
+}
+
+impl Config {
+ /// Allow everything which usually relates to a fully trusted environment
+ pub fn all() -> Self {
+ Config {
+ git_binary: false,
+ system: true,
+ git: true,
+ user: true,
+ env: true,
+ includes: true,
+ }
+ }
+}
+
+impl Default for Config {
+ fn default() -> Self {
+ Self::all()
+ }
+}
+
+/// Permissions related to the usage of environment variables
+#[derive(Debug, Clone)]
+pub struct Environment {
+ /// Control whether resources pointed to by `XDG_CONFIG_HOME` can be used when looking up common configuration values.
+ ///
+ /// Note that [`gix_sec::Permission::Forbid`] will cause the operation to abort if a resource is set via the XDG config environment.
+ pub xdg_config_home: gix_sec::Permission,
+ /// Control the way resources pointed to by the home directory (similar to `xdg_config_home`) may be used.
+ pub home: gix_sec::Permission,
+ /// Control if environment variables to configure the HTTP transport, like `http_proxy` may be used.
+ ///
+ /// Note that http-transport related environment variables prefixed with `GIT_` may also be included here
+ /// if they match this category like `GIT_HTTP_USER_AGENT`.
+ pub http_transport: gix_sec::Permission,
+ /// Control if the `EMAIL` environment variables may be read.
+ ///
+ /// Note that identity related environment variables prefixed with `GIT_` may also be included here
+ /// if they match this category.
+ pub identity: gix_sec::Permission,
+ /// Control if environment variables related to the object database are handled. This includes features and performance
+ /// options alike.
+ pub objects: gix_sec::Permission,
+ /// Control if resources pointed to by `GIT_*` prefixed environment variables can be used, **but only** if they
+ /// are not contained in any other category. This is a catch-all section.
+ pub git_prefix: gix_sec::Permission,
+ /// Control if resources pointed to by `SSH_*` prefixed environment variables can be used (like `SSH_ASKPASS`)
+ pub ssh_prefix: gix_sec::Permission,
+}
+
+impl Environment {
+ /// Allow access to the entire environment.
+ pub fn all() -> Self {
+ let allow = gix_sec::Permission::Allow;
+ Environment {
+ xdg_config_home: allow,
+ home: allow,
+ git_prefix: allow,
+ ssh_prefix: allow,
+ http_transport: allow,
+ identity: allow,
+ objects: allow,
+ }
+ }
+}
+
+impl Permissions {
+ /// Return permissions that will not include configuration files not owned by the current user,
+ /// but trust system and global configuration files along with those which are owned by the current user.
+ ///
+ /// This allows to read and write repositories even if they aren't owned by the current user, but avoid using
+ /// anything else that could cause us to write into unknown locations or use programs beyond our `PATH`.
+ pub fn secure() -> Self {
+ Permissions {
+ env: Environment::all(),
+ config: Config::all(),
+ }
+ }
+
+ /// Everything is allowed with this set of permissions, thus we read all configuration and do what git typically
+ /// does with owned repositories.
+ pub fn all() -> Self {
+ Permissions {
+ env: Environment::all(),
+ config: Config::all(),
+ }
+ }
+
+ /// Don't read any but the local git configuration and deny reading any environment variables.
+ pub fn isolated() -> Self {
+ Permissions {
+ config: Config {
+ git_binary: false,
+ system: false,
+ git: false,
+ user: false,
+ env: false,
+ includes: false,
+ },
+ env: {
+ let deny = gix_sec::Permission::Deny;
+ Environment {
+ xdg_config_home: deny,
+ home: deny,
+ ssh_prefix: deny,
+ git_prefix: deny,
+ http_transport: deny,
+ identity: deny,
+ objects: deny,
+ }
+ },
+ }
+ }
+}
+
+impl gix_sec::trust::DefaultForLevel for Permissions {
+ fn default_for_level(level: Trust) -> Self {
+ match level {
+ Trust::Full => Permissions::all(),
+ Trust::Reduced => Permissions::secure(),
+ }
+ }
+}
+
+impl Default for Permissions {
+ fn default() -> Self {
+ Permissions::secure()
+ }
+}
diff --git a/vendor/gix/src/repository/reference.rs b/vendor/gix/src/repository/reference.rs
new file mode 100644
index 000000000..e5a8aadcb
--- /dev/null
+++ b/vendor/gix/src/repository/reference.rs
@@ -0,0 +1,243 @@
+use std::convert::TryInto;
+
+use gix_hash::ObjectId;
+use gix_ref::{
+ transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog},
+ FullName, PartialNameRef, Target,
+};
+
+use crate::{bstr::BString, ext::ReferenceExt, reference, Reference};
+
+/// Obtain and alter references comfortably
+impl crate::Repository {
+ /// Create a lightweight tag with given `name` (and without `refs/tags/` prefix) pointing to the given `target`, and return it as reference.
+ ///
+ /// It will be created with `constraint` which is most commonly to [only create it][PreviousValue::MustNotExist]
+ /// or to [force overwriting a possibly existing tag](PreviousValue::Any).
+ pub fn tag_reference(
+ &self,
+ name: impl AsRef<str>,
+ target: impl Into<ObjectId>,
+ constraint: PreviousValue,
+ ) -> Result<Reference<'_>, reference::edit::Error> {
+ let id = target.into();
+ let mut edits = self.edit_reference(RefEdit {
+ change: Change::Update {
+ log: Default::default(),
+ expected: constraint,
+ new: Target::Peeled(id),
+ },
+ name: format!("refs/tags/{}", name.as_ref()).try_into()?,
+ deref: false,
+ })?;
+ assert_eq!(edits.len(), 1, "reference splits should ever happen");
+ let edit = edits.pop().expect("exactly one item");
+ Ok(Reference {
+ inner: gix_ref::Reference {
+ name: edit.name,
+ target: id.into(),
+ peeled: None,
+ },
+ repo: self,
+ })
+ }
+
+ /// Returns the currently set namespace for references, or `None` if it is not set.
+ ///
+ /// Namespaces allow to partition references, and is configured per `Easy`.
+ pub fn namespace(&self) -> Option<&gix_ref::Namespace> {
+ self.refs.namespace.as_ref()
+ }
+
+ /// Remove the currently set reference namespace and return it, affecting only this `Easy`.
+ pub fn clear_namespace(&mut self) -> Option<gix_ref::Namespace> {
+ self.refs.namespace.take()
+ }
+
+ /// Set the reference namespace to the given value, like `"foo"` or `"foo/bar"`.
+ ///
+ /// Note that this value is shared across all `Easy…` instances as the value is stored in the shared `Repository`.
+ pub fn set_namespace<'a, Name, E>(
+ &mut self,
+ namespace: Name,
+ ) -> Result<Option<gix_ref::Namespace>, gix_validate::refname::Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ gix_validate::refname::Error: From<E>,
+ {
+ let namespace = gix_ref::namespace::expand(namespace)?;
+ Ok(self.refs.namespace.replace(namespace))
+ }
+
+ // TODO: more tests or usage
+ /// Create a new reference with `name`, like `refs/heads/branch`, pointing to `target`, adhering to `constraint`
+ /// during creation and writing `log_message` into the reflog. Note that a ref-log will be written even if `log_message` is empty.
+ ///
+ /// The newly created Reference is returned.
+ pub fn reference<Name, E>(
+ &self,
+ name: Name,
+ target: impl Into<ObjectId>,
+ constraint: PreviousValue,
+ log_message: impl Into<BString>,
+ ) -> Result<Reference<'_>, reference::edit::Error>
+ where
+ Name: TryInto<FullName, Error = E>,
+ gix_validate::reference::name::Error: From<E>,
+ {
+ let name = name.try_into().map_err(gix_validate::reference::name::Error::from)?;
+ let id = target.into();
+ let mut edits = self.edit_reference(RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: log_message.into(),
+ },
+ expected: constraint,
+ new: Target::Peeled(id),
+ },
+ name,
+ deref: false,
+ })?;
+ assert_eq!(
+ edits.len(),
+ 1,
+ "only one reference can be created, splits aren't possible"
+ );
+
+ Ok(gix_ref::Reference {
+ name: edits.pop().expect("exactly one edit").name,
+ target: Target::Peeled(id),
+ peeled: None,
+ }
+ .attach(self))
+ }
+
+ /// Edit a single reference as described in `edit`, and write reference logs as `log_committer`.
+ ///
+ /// One or more `RefEdit`s are returned - symbolic reference splits can cause more edits to be performed. All edits have the previous
+ /// reference values set to the ones encountered at rest after acquiring the respective reference's lock.
+ pub fn edit_reference(&self, edit: RefEdit) -> Result<Vec<RefEdit>, reference::edit::Error> {
+ self.edit_references(Some(edit))
+ }
+
+ /// Edit one or more references as described by their `edits`.
+ /// Note that one can set the committer name for use in the ref-log by temporarily
+ /// [overriding the gix-config][crate::Repository::config_snapshot_mut()].
+ ///
+ /// Returns all reference edits, which might be more than where provided due the splitting of symbolic references, and
+ /// whose previous (_old_) values are the ones seen on in storage after the reference was locked.
+ pub fn edit_references(
+ &self,
+ edits: impl IntoIterator<Item = RefEdit>,
+ ) -> Result<Vec<RefEdit>, reference::edit::Error> {
+ let (file_lock_fail, packed_refs_lock_fail) = self.config.lock_timeout()?;
+ self.refs
+ .transaction()
+ .prepare(edits, file_lock_fail, packed_refs_lock_fail)?
+ .commit(self.committer().transpose()?)
+ .map_err(Into::into)
+ }
+
+ /// Return the repository head, an abstraction to help dealing with the `HEAD` reference.
+ ///
+ /// The `HEAD` reference can be in various states, for more information, the documentation of [`Head`][crate::Head].
+ pub fn head(&self) -> Result<crate::Head<'_>, reference::find::existing::Error> {
+ let head = self.find_reference("HEAD")?;
+ Ok(match head.inner.target {
+ Target::Symbolic(branch) => match self.find_reference(&branch) {
+ Ok(r) => crate::head::Kind::Symbolic(r.detach()),
+ Err(reference::find::existing::Error::NotFound) => crate::head::Kind::Unborn(branch),
+ Err(err) => return Err(err),
+ },
+ Target::Peeled(target) => crate::head::Kind::Detached {
+ target,
+ peeled: head.inner.peeled,
+ },
+ }
+ .attach(self))
+ }
+
+ /// Resolve the `HEAD` reference, follow and peel its target and obtain its object id.
+ ///
+ /// Note that this may fail for various reasons, most notably because the repository
+ /// is freshly initialized and doesn't have any commits yet.
+ ///
+ /// Also note that the returned id is likely to point to a commit, but could also
+ /// point to a tree or blob. It won't, however, point to a tag as these are always peeled.
+ pub fn head_id(&self) -> Result<crate::Id<'_>, reference::head_id::Error> {
+ let mut head = self.head()?;
+ head.peel_to_id_in_place()
+ .ok_or_else(|| reference::head_id::Error::Unborn {
+ name: head.referent_name().expect("unborn").to_owned(),
+ })?
+ .map_err(Into::into)
+ }
+
+ /// Return the name to the symbolic reference `HEAD` points to, or `None` if the head is detached.
+ ///
+ /// The difference to [`head_ref()`][Self::head_ref()] is that the latter requires the reference to exist,
+ /// whereas here we merely return a the name of the possibly unborn reference.
+ pub fn head_name(&self) -> Result<Option<FullName>, reference::find::existing::Error> {
+ Ok(self.head()?.referent_name().map(|n| n.to_owned()))
+ }
+
+ /// Return the reference that `HEAD` points to, or `None` if the head is detached or unborn.
+ pub fn head_ref(&self) -> Result<Option<Reference<'_>>, reference::find::existing::Error> {
+ Ok(self.head()?.try_into_referent())
+ }
+
+ /// Return the commit object the `HEAD` reference currently points to after peeling it fully.
+ ///
+ /// Note that this may fail for various reasons, most notably because the repository
+ /// is freshly initialized and doesn't have any commits yet. It could also fail if the
+ /// head does not point to a commit.
+ pub fn head_commit(&self) -> Result<crate::Commit<'_>, reference::head_commit::Error> {
+ Ok(self.head()?.peel_to_commit_in_place()?)
+ }
+
+ /// Find the reference with the given partial or full `name`, like `main`, `HEAD`, `heads/branch` or `origin/other`,
+ /// or return an error if it wasn't found.
+ ///
+ /// Consider [`try_find_reference(…)`][crate::Repository::try_find_reference()] if the reference might not exist
+ /// without that being considered an error.
+ pub fn find_reference<'a, Name, E>(&self, name: Name) -> Result<Reference<'_>, reference::find::existing::Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ gix_ref::file::find::Error: From<E>,
+ {
+ self.try_find_reference(name)?
+ .ok_or(reference::find::existing::Error::NotFound)
+ }
+
+ /// Return a platform for iterating references.
+ ///
+ /// Common kinds of iteration are [all][crate::reference::iter::Platform::all()] or [prefixed][crate::reference::iter::Platform::prefixed()]
+ /// references.
+ pub fn references(&self) -> Result<reference::iter::Platform<'_>, reference::iter::Error> {
+ Ok(reference::iter::Platform {
+ platform: self.refs.iter()?,
+ repo: self,
+ })
+ }
+
+ /// Try to find the reference named `name`, like `main`, `heads/branch`, `HEAD` or `origin/other`, and return it.
+ ///
+ /// Otherwise return `None` if the reference wasn't found.
+ /// If the reference is expected to exist, use [`find_reference()`][crate::Repository::find_reference()].
+ pub fn try_find_reference<'a, Name, E>(&self, name: Name) -> Result<Option<Reference<'_>>, reference::find::Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ gix_ref::file::find::Error: From<E>,
+ {
+ let state = self;
+ match state.refs.try_find(name) {
+ Ok(r) => match r {
+ Some(r) => Ok(Some(Reference::from_ref(r, self))),
+ None => Ok(None),
+ },
+ Err(err) => Err(err.into()),
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/remote.rs b/vendor/gix/src/repository/remote.rs
new file mode 100644
index 000000000..e3f210899
--- /dev/null
+++ b/vendor/gix/src/repository/remote.rs
@@ -0,0 +1,199 @@
+#![allow(clippy::result_large_err)]
+use std::convert::TryInto;
+
+use crate::{bstr::BStr, config, remote, remote::find, Remote};
+
+impl crate::Repository {
+ /// Create a new remote available at the given `url`.
+ ///
+ /// It's configured to fetch included tags by default, similar to git.
+ /// See [`with_fetch_tags(…)`][Remote::with_fetch_tags()] for a way to change it.
+ pub fn remote_at<Url, E>(&self, url: Url) -> Result<Remote<'_>, remote::init::Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ Remote::from_fetch_url(url, true, self)
+ }
+
+ /// Create a new remote available at the given `url` similarly to [`remote_at()`][crate::Repository::remote_at()],
+ /// but don't rewrite the url according to rewrite rules.
+ /// This eliminates a failure mode in case the rewritten URL is faulty, allowing to selectively [apply rewrite
+ /// rules][Remote::rewrite_urls()] later and do so non-destructively.
+ pub fn remote_at_without_url_rewrite<Url, E>(&self, url: Url) -> Result<Remote<'_>, remote::init::Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ Remote::from_fetch_url(url, false, self)
+ }
+
+ /// Find the remote with the given `name_or_url` or report an error, similar to [`try_find_remote(…)`][Self::try_find_remote()].
+ ///
+ /// Note that we will obtain remotes only if we deem them [trustworthy][crate::open::Options::filter_config_section()].
+ pub fn find_remote<'a>(&self, name_or_url: impl Into<&'a BStr>) -> Result<Remote<'_>, find::existing::Error> {
+ let name_or_url = name_or_url.into();
+ Ok(self
+ .try_find_remote(name_or_url)
+ .ok_or_else(|| find::existing::Error::NotFound {
+ name: name_or_url.into(),
+ })??)
+ }
+
+ /// Find the default remote as configured, or `None` if no such configuration could be found.
+ ///
+ /// See [remote_default_name()][Self::remote_default_name()] for more information on the `direction` parameter.
+ pub fn find_default_remote(
+ &self,
+ direction: remote::Direction,
+ ) -> Option<Result<Remote<'_>, find::existing::Error>> {
+ self.remote_default_name(direction)
+ .map(|name| self.find_remote(name.as_ref()))
+ }
+
+ /// Find the remote with the given `name_or_url` or return `None` if it doesn't exist, for the purpose of fetching or pushing
+ /// data to a remote.
+ ///
+ /// There are various error kinds related to partial information or incorrectly formatted URLs or ref-specs.
+ /// Also note that the created `Remote` may have neither fetch nor push ref-specs set at all.
+ ///
+ /// Note that ref-specs are de-duplicated right away which may change their order. This doesn't affect matching in any way
+ /// as negations/excludes are applied after includes.
+ ///
+ /// We will only include information if we deem it [trustworthy][crate::open::Options::filter_config_section()].
+ pub fn try_find_remote<'a>(&self, name_or_url: impl Into<&'a BStr>) -> Option<Result<Remote<'_>, find::Error>> {
+ self.try_find_remote_inner(name_or_url, true)
+ }
+
+ /// Similar to [try_find_remote()][Self::try_find_remote()], but removes a failure mode if rewritten URLs turn out to be invalid
+ /// as it skips rewriting them.
+ /// Use this in conjunction with [`Remote::rewrite_urls()`] to non-destructively apply the rules and keep the failed urls unchanged.
+ pub fn try_find_remote_without_url_rewrite<'a>(
+ &self,
+ name_or_url: impl Into<&'a BStr>,
+ ) -> Option<Result<Remote<'_>, find::Error>> {
+ self.try_find_remote_inner(name_or_url, false)
+ }
+
+ fn try_find_remote_inner<'a>(
+ &self,
+ name_or_url: impl Into<&'a BStr>,
+ rewrite_urls: bool,
+ ) -> Option<Result<Remote<'_>, find::Error>> {
+ fn config_spec<T: config::tree::keys::Validate>(
+ specs: Vec<std::borrow::Cow<'_, BStr>>,
+ name_or_url: &BStr,
+ key: &'static config::tree::keys::Any<T>,
+ op: gix_refspec::parse::Operation,
+ ) -> Result<Vec<gix_refspec::RefSpec>, find::Error> {
+ let kind = key.name;
+ specs
+ .into_iter()
+ .map(|spec| {
+ key.try_into_refspec(spec, op).map_err(|err| find::Error::RefSpec {
+ remote_name: name_or_url.into(),
+ kind,
+ source: err,
+ })
+ })
+ .collect::<Result<Vec<_>, _>>()
+ .map(|mut specs| {
+ specs.sort();
+ specs.dedup();
+ specs
+ })
+ }
+
+ let mut filter = self.filter_config_section();
+ let name_or_url = name_or_url.into();
+ let mut config_url = |key: &'static config::tree::keys::Url, kind: &'static str| {
+ self.config
+ .resolved
+ .string_filter("remote", Some(name_or_url), key.name, &mut filter)
+ .map(|url| {
+ key.try_into_url(url).map_err(|err| find::Error::Url {
+ kind,
+ remote_name: name_or_url.into(),
+ source: err,
+ })
+ })
+ };
+ let url = config_url(&config::tree::Remote::URL, "fetch");
+ let push_url = config_url(&config::tree::Remote::PUSH_URL, "push");
+ let config = &self.config.resolved;
+
+ let fetch_specs = config
+ .strings_filter("remote", Some(name_or_url), "fetch", &mut filter)
+ .map(|specs| {
+ config_spec(
+ specs,
+ name_or_url,
+ &config::tree::Remote::FETCH,
+ gix_refspec::parse::Operation::Fetch,
+ )
+ });
+ let push_specs = config
+ .strings_filter("remote", Some(name_or_url), "push", &mut filter)
+ .map(|specs| {
+ config_spec(
+ specs,
+ name_or_url,
+ &config::tree::Remote::PUSH,
+ gix_refspec::parse::Operation::Push,
+ )
+ });
+ let fetch_tags = config
+ .string_filter("remote", Some(name_or_url), "tagOpt", &mut filter)
+ .map(|value| {
+ config::tree::Remote::TAG_OPT
+ .try_into_tag_opt(value)
+ .map_err(Into::into)
+ });
+ let fetch_tags = match fetch_tags {
+ Some(Ok(v)) => v,
+ Some(Err(err)) => return Some(Err(err)),
+ None => Default::default(),
+ };
+
+ match (url, fetch_specs, push_url, push_specs) {
+ (None, None, None, None) => None,
+ (None, _, None, _) => Some(Err(find::Error::UrlMissing)),
+ (url, fetch_specs, push_url, push_specs) => {
+ let url = match url {
+ Some(Ok(v)) => Some(v),
+ Some(Err(err)) => return Some(Err(err)),
+ None => None,
+ };
+ let push_url = match push_url {
+ Some(Ok(v)) => Some(v),
+ Some(Err(err)) => return Some(Err(err)),
+ None => None,
+ };
+ let fetch_specs = match fetch_specs {
+ Some(Ok(v)) => v,
+ Some(Err(err)) => return Some(Err(err)),
+ None => Vec::new(),
+ };
+ let push_specs = match push_specs {
+ Some(Ok(v)) => v,
+ Some(Err(err)) => return Some(Err(err)),
+ None => Vec::new(),
+ };
+
+ Some(
+ Remote::from_preparsed_config(
+ Some(name_or_url.to_owned()),
+ url,
+ push_url,
+ fetch_specs,
+ push_specs,
+ rewrite_urls,
+ fetch_tags,
+ self,
+ )
+ .map_err(Into::into),
+ )
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/revision.rs b/vendor/gix/src/repository/revision.rs
new file mode 100644
index 000000000..3018c2be8
--- /dev/null
+++ b/vendor/gix/src/repository/revision.rs
@@ -0,0 +1,42 @@
+use crate::{bstr::BStr, revision, Id};
+
+/// Methods for resolving revisions by spec or working with the commit graph.
+impl crate::Repository {
+ /// Parse a revision specification and turn it into the object(s) it describes, similar to `git rev-parse`.
+ ///
+ /// # Deviation
+ ///
+ /// - `@` actually stands for `HEAD`, whereas `git` resolves it to the object pointed to by `HEAD` without making the
+ /// `HEAD` ref available for lookups.
+ pub fn rev_parse<'a>(&self, spec: impl Into<&'a BStr>) -> Result<revision::Spec<'_>, revision::spec::parse::Error> {
+ revision::Spec::from_bstr(
+ spec,
+ self,
+ revision::spec::parse::Options {
+ object_kind_hint: self.config.object_kind_hint,
+ ..Default::default()
+ },
+ )
+ }
+
+ /// Parse a revision specification and return single object id as represented by this instance.
+ pub fn rev_parse_single<'repo, 'a>(
+ &'repo self,
+ spec: impl Into<&'a BStr>,
+ ) -> Result<Id<'repo>, revision::spec::parse::single::Error> {
+ let spec = spec.into();
+ self.rev_parse(spec)?
+ .single()
+ .ok_or(revision::spec::parse::single::Error::RangedRev { spec: spec.into() })
+ }
+
+ /// Create the baseline for a revision walk by initializing it with the `tips` to start iterating on.
+ ///
+ /// It can be configured further before starting the actual walk.
+ pub fn rev_walk(
+ &self,
+ tips: impl IntoIterator<Item = impl Into<gix_hash::ObjectId>>,
+ ) -> revision::walk::Platform<'_> {
+ revision::walk::Platform::new(tips, self)
+ }
+}
diff --git a/vendor/gix/src/repository/snapshots.rs b/vendor/gix/src/repository/snapshots.rs
new file mode 100644
index 000000000..6933dc9c6
--- /dev/null
+++ b/vendor/gix/src/repository/snapshots.rs
@@ -0,0 +1,109 @@
+impl crate::Repository {
+ // TODO: tests
+ /// Similar to [`open_mailmap_into()`][crate::Repository::open_mailmap_into()], but ignores all errors and returns at worst
+ /// an empty mailmap, e.g. if there is no mailmap or if there were errors loading them.
+ ///
+ /// This represents typical usage within git, which also works with what's there without considering a populated mailmap
+ /// a reason to abort an operation, considering it optional.
+ pub fn open_mailmap(&self) -> gix_mailmap::Snapshot {
+ let mut out = gix_mailmap::Snapshot::default();
+ self.open_mailmap_into(&mut out).ok();
+ out
+ }
+
+ // TODO: tests
+ /// Try to merge mailmaps from the following locations into `target`:
+ ///
+ /// - read the `.mailmap` file without following symlinks from the working tree, if present
+ /// - OR read `HEAD:.mailmap` if this repository is bare (i.e. has no working tree), if the `mailmap.blob` is not set.
+ /// - read the mailmap as configured in `mailmap.blob`, if set.
+ /// - read the file as configured by `mailmap.file`, following symlinks, if set.
+ ///
+ /// Only the first error will be reported, and as many source mailmaps will be merged into `target` as possible.
+ /// Parsing errors will be ignored.
+ pub fn open_mailmap_into(&self, target: &mut gix_mailmap::Snapshot) -> Result<(), crate::mailmap::load::Error> {
+ let mut err = None::<crate::mailmap::load::Error>;
+ let mut buf = Vec::new();
+ let mut blob_id = self
+ .config
+ .resolved
+ .raw_value("mailmap", None, "blob")
+ .ok()
+ .and_then(|spec| {
+ // TODO: actually resolve this as spec (once we can do that)
+ gix_hash::ObjectId::from_hex(spec.as_ref())
+ .map_err(|e| err.get_or_insert(e.into()))
+ .ok()
+ });
+ match self.work_dir() {
+ None => {
+ // TODO: replace with ref-spec `HEAD:.mailmap` for less verbose way of getting the blob id
+ blob_id = blob_id.or_else(|| {
+ self.head().ok().and_then(|mut head| {
+ let commit = head.peel_to_commit_in_place().ok()?;
+ let tree = commit.tree().ok()?;
+ tree.lookup_entry(Some(".mailmap")).ok()?.map(|e| e.object_id())
+ })
+ });
+ }
+ Some(root) => {
+ if let Ok(mut file) = gix_features::fs::open_options_no_follow()
+ .read(true)
+ .open(root.join(".mailmap"))
+ .map_err(|e| {
+ if e.kind() != std::io::ErrorKind::NotFound {
+ err.get_or_insert(e.into());
+ }
+ })
+ {
+ buf.clear();
+ std::io::copy(&mut file, &mut buf)
+ .map_err(|e| err.get_or_insert(e.into()))
+ .ok();
+ target.merge(gix_mailmap::parse_ignore_errors(&buf));
+ }
+ }
+ }
+
+ if let Some(blob) = blob_id.and_then(|id| self.find_object(id).map_err(|e| err.get_or_insert(e.into())).ok()) {
+ target.merge(gix_mailmap::parse_ignore_errors(&blob.data));
+ }
+
+ let configured_path = self
+ .config
+ .resolved
+ .value::<gix_config::Path<'_>>("mailmap", None, "file")
+ .ok()
+ .and_then(|path| {
+ let install_dir = self.install_dir().ok()?;
+ let home = self.config.home_dir();
+ match path.interpolate(gix_config::path::interpolate::Context {
+ git_install_dir: Some(install_dir.as_path()),
+ home_dir: home.as_deref(),
+ home_for_user: if self.options.git_dir_trust.expect("trust is set") == gix_sec::Trust::Full {
+ Some(gix_config::path::interpolate::home_for_user)
+ } else {
+ None
+ },
+ }) {
+ Ok(path) => Some(path),
+ Err(e) => {
+ err.get_or_insert(e.into());
+ None
+ }
+ }
+ });
+
+ if let Some(mut file) =
+ configured_path.and_then(|path| std::fs::File::open(path).map_err(|e| err.get_or_insert(e.into())).ok())
+ {
+ buf.clear();
+ std::io::copy(&mut file, &mut buf)
+ .map_err(|e| err.get_or_insert(e.into()))
+ .ok();
+ target.merge(gix_mailmap::parse_ignore_errors(&buf));
+ }
+
+ err.map(Err).unwrap_or(Ok(()))
+ }
+}
diff --git a/vendor/gix/src/repository/state.rs b/vendor/gix/src/repository/state.rs
new file mode 100644
index 000000000..4034fe349
--- /dev/null
+++ b/vendor/gix/src/repository/state.rs
@@ -0,0 +1,44 @@
+use crate::state;
+
+impl crate::Repository {
+ /// Returns the status of an in progress operation on a repository or [`None`]
+ /// if no operation is currently in progress.
+ ///
+ /// Note to be confused with the repositories 'status'.
+ pub fn state(&self) -> Option<state::InProgress> {
+ let git_dir = self.path();
+
+ // This is modeled on the logic from wt_status_get_state in git's wt-status.c and
+ // ps1 from gix-prompt.sh.
+
+ if git_dir.join("rebase-apply/applying").is_file() {
+ Some(state::InProgress::ApplyMailbox)
+ } else if git_dir.join("rebase-apply/rebasing").is_file() {
+ Some(state::InProgress::Rebase)
+ } else if git_dir.join("rebase-apply").is_dir() {
+ Some(state::InProgress::ApplyMailboxRebase)
+ } else if git_dir.join("rebase-merge/interactive").is_file() {
+ Some(state::InProgress::RebaseInteractive)
+ } else if git_dir.join("rebase-merge").is_dir() {
+ Some(state::InProgress::Rebase)
+ } else if git_dir.join("CHERRY_PICK_HEAD").is_file() {
+ if git_dir.join("sequencer/todo").is_file() {
+ Some(state::InProgress::CherryPickSequence)
+ } else {
+ Some(state::InProgress::CherryPick)
+ }
+ } else if git_dir.join("MERGE_HEAD").is_file() {
+ Some(state::InProgress::Merge)
+ } else if git_dir.join("BISECT_LOG").is_file() {
+ Some(state::InProgress::Bisect)
+ } else if git_dir.join("REVERT_HEAD").is_file() {
+ if git_dir.join("sequencer/todo").is_file() {
+ Some(state::InProgress::RevertSequence)
+ } else {
+ Some(state::InProgress::Revert)
+ }
+ } else {
+ None
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/thread_safe.rs b/vendor/gix/src/repository/thread_safe.rs
new file mode 100644
index 000000000..7c89aee60
--- /dev/null
+++ b/vendor/gix/src/repository/thread_safe.rs
@@ -0,0 +1,66 @@
+mod access {
+ use crate::Kind;
+
+ impl crate::ThreadSafeRepository {
+ /// Return the kind of repository, either bare or one with a work tree.
+ pub fn kind(&self) -> Kind {
+ match self.work_tree {
+ Some(_) => Kind::WorkTree {
+ is_linked: crate::worktree::id(self.git_dir(), self.common_dir.is_some()).is_some(),
+ },
+ None => Kind::Bare,
+ }
+ }
+
+ /// Add thread-local state to an easy-to-use thread-local repository for the most convenient API.
+ pub fn to_thread_local(&self) -> crate::Repository {
+ self.into()
+ }
+ }
+}
+
+mod location {
+
+ impl crate::ThreadSafeRepository {
+ /// The path to the `.git` directory itself, or equivalent if this is a bare repository.
+ pub fn path(&self) -> &std::path::Path {
+ self.git_dir()
+ }
+
+ /// Return the path to the repository itself, containing objects, references, configuration, and more.
+ ///
+ /// Synonymous to [`path()`][crate::ThreadSafeRepository::path()].
+ pub fn git_dir(&self) -> &std::path::Path {
+ self.refs.git_dir()
+ }
+
+ /// Return the path to the working directory if this is not a bare repository.
+ pub fn work_dir(&self) -> Option<&std::path::Path> {
+ self.work_tree.as_deref()
+ }
+
+ /// Return the path to the directory containing all objects.
+ pub fn objects_dir(&self) -> &std::path::Path {
+ self.objects.path()
+ }
+ }
+}
+
+mod impls {
+ impl std::fmt::Debug for crate::ThreadSafeRepository {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(
+ f,
+ "Repository(git = '{}', working_tree: {:?}",
+ self.git_dir().display(),
+ self.work_tree
+ )
+ }
+ }
+
+ impl PartialEq<crate::ThreadSafeRepository> for crate::ThreadSafeRepository {
+ fn eq(&self, other: &crate::ThreadSafeRepository) -> bool {
+ self.git_dir() == other.git_dir() && self.work_tree == other.work_tree
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/worktree.rs b/vendor/gix/src/repository/worktree.rs
new file mode 100644
index 000000000..2de31bc86
--- /dev/null
+++ b/vendor/gix/src/repository/worktree.rs
@@ -0,0 +1,119 @@
+use crate::{worktree, Worktree};
+
+/// Worktree iteration
+impl crate::Repository {
+ /// Return a list of all _linked_ worktrees sorted by private git dir path as a lightweight proxy.
+ ///
+ /// Note that these need additional processing to become usable, but provide a first glimpse a typical worktree information.
+ pub fn worktrees(&self) -> std::io::Result<Vec<worktree::Proxy<'_>>> {
+ let mut res = Vec::new();
+ let iter = match std::fs::read_dir(self.common_dir().join("worktrees")) {
+ Ok(iter) => iter,
+ Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(res),
+ Err(err) => return Err(err),
+ };
+ for entry in iter {
+ let entry = entry?;
+ let worktree_git_dir = entry.path();
+ if worktree_git_dir.join("gitdir").is_file() {
+ res.push(worktree::Proxy {
+ parent: self,
+ git_dir: worktree_git_dir,
+ })
+ }
+ }
+ res.sort_by(|a, b| a.git_dir.cmp(&b.git_dir));
+ Ok(res)
+ }
+}
+
+/// Interact with individual worktrees and their information.
+impl crate::Repository {
+ /// Return the repository owning the main worktree, typically from a linked worktree.
+ ///
+ /// Note that it might be the one that is currently open if this repository doesn't point to a linked worktree.
+ /// Also note that the main repo might be bare.
+ #[allow(clippy::result_large_err)]
+ pub fn main_repo(&self) -> Result<crate::Repository, crate::open::Error> {
+ crate::ThreadSafeRepository::open_opts(self.common_dir(), self.options.clone()).map(Into::into)
+ }
+
+ /// Return the currently set worktree if there is one, acting as platform providing a validated worktree base path.
+ ///
+ /// Note that there would be `None` if this repository is `bare` and the parent [`Repository`][crate::Repository] was instantiated without
+ /// registered worktree in the current working dir.
+ pub fn worktree(&self) -> Option<Worktree<'_>> {
+ self.work_dir().map(|path| Worktree { parent: self, path })
+ }
+
+ /// Return true if this repository is bare, and has no main work tree.
+ ///
+ /// This is not to be confused with the [`worktree()`][crate::Repository::worktree()] worktree, which may exists if this instance
+ /// was opened in a worktree that was created separately.
+ pub fn is_bare(&self) -> bool {
+ self.config.is_bare && self.work_dir().is_none()
+ }
+
+ /// Open a new copy of the index file and decode it entirely.
+ ///
+ /// It will use the `index.threads` configuration key to learn how many threads to use.
+ /// Note that it may fail if there is no index.
+ // TODO: test
+ pub fn open_index(&self) -> Result<gix_index::File, worktree::open_index::Error> {
+ let thread_limit = self
+ .config
+ .resolved
+ .boolean("index", None, "threads")
+ .map(|res| {
+ res.map(|value| usize::from(!value)).or_else(|err| {
+ gix_config::Integer::try_from(err.input.as_ref())
+ .map_err(|err| worktree::open_index::Error::ConfigIndexThreads {
+ value: err.input.clone(),
+ err,
+ })
+ .map(|value| value.to_decimal().and_then(|v| v.try_into().ok()).unwrap_or(1))
+ })
+ })
+ .transpose()?;
+ gix_index::File::at(
+ self.index_path(),
+ self.object_hash(),
+ gix_index::decode::Options {
+ thread_limit,
+ min_extension_block_in_bytes_for_threading: 0,
+ expected_checksum: None,
+ },
+ )
+ .map_err(Into::into)
+ }
+
+ /// Return a shared worktree index which is updated automatically if the in-memory snapshot has become stale as the underlying file
+ /// on disk has changed.
+ ///
+ /// The index file is shared across all clones of this repository.
+ pub fn index(&self) -> Result<worktree::Index, worktree::open_index::Error> {
+ self.index
+ .recent_snapshot(
+ || self.index_path().metadata().and_then(|m| m.modified()).ok(),
+ || {
+ self.open_index().map(Some).or_else(|err| match err {
+ worktree::open_index::Error::IndexFile(gix_index::file::init::Error::Io(err))
+ if err.kind() == std::io::ErrorKind::NotFound =>
+ {
+ Ok(None)
+ }
+ err => Err(err),
+ })
+ },
+ )
+ .and_then(|opt| match opt {
+ Some(index) => Ok(index),
+ None => Err(worktree::open_index::Error::IndexFile(
+ gix_index::file::init::Error::Io(std::io::Error::new(
+ std::io::ErrorKind::NotFound,
+ format!("Could not find index file at {:?} for opening.", self.index_path()),
+ )),
+ )),
+ })
+ }
+}
diff --git a/vendor/gix/src/revision/mod.rs b/vendor/gix/src/revision/mod.rs
new file mode 100644
index 000000000..4b11a8766
--- /dev/null
+++ b/vendor/gix/src/revision/mod.rs
@@ -0,0 +1,27 @@
+//! Revisions is the generalized notion of a commit.
+//!
+//! This module provides utilities to walk graphs of revisions and specify revisions and ranges of revisions.
+
+pub use gix_revision as plumbing;
+
+///
+pub mod walk;
+pub use walk::iter::Walk;
+
+///
+pub mod spec;
+
+/// The specification of a revision as parsed from a revision specification like `HEAD@{1}` or `v1.2.3...main`.
+/// It's typically created by [`repo.rev_parse()`][crate::Repository::rev_parse()].
+///
+/// See the [official git documentation](https://git-scm.com/docs/git-rev-parse#_specifying_revisions) for reference on how
+/// to specify revisions and revision ranges.
+#[derive(Clone, Debug)]
+pub struct Spec<'repo> {
+ pub(crate) inner: gix_revision::Spec,
+ /// The first name of a reference as seen while parsing a `RevSpec`, for completeness.
+ pub(crate) first_ref: Option<gix_ref::Reference>,
+ /// The second name of a reference as seen while parsing a `RevSpec`, for completeness.
+ pub(crate) second_ref: Option<gix_ref::Reference>,
+ pub(crate) repo: &'repo crate::Repository,
+}
diff --git a/vendor/gix/src/revision/spec/mod.rs b/vendor/gix/src/revision/spec/mod.rs
new file mode 100644
index 000000000..a6a6eb739
--- /dev/null
+++ b/vendor/gix/src/revision/spec/mod.rs
@@ -0,0 +1,90 @@
+use crate::{ext::ReferenceExt, revision::Spec, Id, Reference};
+
+///
+pub mod parse;
+
+mod impls {
+ use std::ops::{Deref, DerefMut};
+
+ use crate::revision::Spec;
+
+ impl<'repo> Deref for Spec<'repo> {
+ type Target = gix_revision::Spec;
+
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+ }
+
+ impl<'repo> DerefMut for Spec<'repo> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+ }
+
+ impl<'repo> PartialEq for Spec<'repo> {
+ fn eq(&self, other: &Self) -> bool {
+ self.inner == other.inner
+ }
+ }
+
+ impl<'repo> Eq for Spec<'repo> {}
+}
+
+/// Initialization
+impl<'repo> Spec<'repo> {
+ /// Create a single specification which points to `id`.
+ pub fn from_id(id: Id<'repo>) -> Self {
+ Spec {
+ inner: gix_revision::Spec::Include(id.inner),
+ repo: id.repo,
+ first_ref: None,
+ second_ref: None,
+ }
+ }
+}
+
+/// Access
+impl<'repo> Spec<'repo> {
+ /// Detach the `Repository` from this instance, leaving only plain data that can be moved freely and serialized.
+ pub fn detach(self) -> gix_revision::Spec {
+ self.inner
+ }
+
+ /// Some revision specifications leave information about references which are returned as `(from-ref, to-ref)` here, e.g.
+ /// `HEAD@{-1}..main` might be `(Some(refs/heads/previous-branch), Some(refs/heads/main))`,
+ /// or `@` returns `(Some(refs/heads/main), None)`.
+ pub fn into_references(self) -> (Option<Reference<'repo>>, Option<Reference<'repo>>) {
+ let repo = self.repo;
+ (
+ self.first_ref.map(|r| r.attach(repo)),
+ self.second_ref.map(|r| r.attach(repo)),
+ )
+ }
+
+ /// Return the name of the first reference we encountered while resolving the rev-spec, or `None` if a short hash
+ /// was used. For example, `@` might yield `Some(HEAD)`, but `abcd` yields `None`.
+ pub fn first_reference(&self) -> Option<&gix_ref::Reference> {
+ self.first_ref.as_ref()
+ }
+
+ /// Return the name of the second reference we encountered while resolving the rev-spec, or `None` if a short hash
+ /// was used or there was no second reference. For example, `..@` might yield `Some(HEAD)`, but `..abcd` or `@`
+ /// yields `None`.
+ pub fn second_reference(&self) -> Option<&gix_ref::Reference> {
+ self.second_ref.as_ref()
+ }
+
+ /// Return the single included object represented by this instance, or `None` if it is a range of any kind.
+ pub fn single(&self) -> Option<Id<'repo>> {
+ match self.inner {
+ gix_revision::Spec::Include(id) | gix_revision::Spec::ExcludeParents(id) => {
+ Id::from_id(id, self.repo).into()
+ }
+ gix_revision::Spec::Exclude(_)
+ | gix_revision::Spec::Range { .. }
+ | gix_revision::Spec::Merge { .. }
+ | gix_revision::Spec::IncludeOnlyParents { .. } => None,
+ }
+ }
+}
diff --git a/vendor/gix/src/revision/spec/parse/delegate/mod.rs b/vendor/gix/src/revision/spec/parse/delegate/mod.rs
new file mode 100644
index 000000000..78e4ab9ee
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/delegate/mod.rs
@@ -0,0 +1,256 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+use gix_revision::spec::{
+ parse,
+ parse::delegate::{self},
+};
+use smallvec::SmallVec;
+
+use super::{Delegate, Error, ObjectKindHint};
+use crate::{
+ ext::{ObjectIdExt, ReferenceExt},
+ Repository,
+};
+
+type Replacements = SmallVec<[(ObjectId, ObjectId); 1]>;
+
+impl<'repo> Delegate<'repo> {
+ pub fn new(repo: &'repo Repository, opts: crate::revision::spec::parse::Options) -> Self {
+ Delegate {
+ refs: Default::default(),
+ objs: Default::default(),
+ ambiguous_objects: Default::default(),
+ idx: 0,
+ kind: None,
+ err: Vec::new(),
+ prefix: Default::default(),
+ last_call_was_disambiguate_prefix: Default::default(),
+ opts,
+ repo,
+ }
+ }
+
+ pub fn into_err(mut self) -> Error {
+ let repo = self.repo;
+ for err in self
+ .ambiguous_objects
+ .iter_mut()
+ .zip(self.prefix)
+ .filter_map(|(a, b)| a.take().filter(|candidates| candidates.len() > 1).zip(b))
+ .map(|(candidates, prefix)| Error::ambiguous(candidates, prefix, repo))
+ .rev()
+ {
+ self.err.insert(0, err);
+ }
+ Error::from_errors(self.err)
+ }
+
+ pub fn into_rev_spec(mut self) -> Result<crate::revision::Spec<'repo>, Error> {
+ fn zero_or_one_objects_or_ambiguity_err(
+ mut candidates: [Option<HashSet<ObjectId>>; 2],
+ prefix: [Option<gix_hash::Prefix>; 2],
+ mut errors: Vec<Error>,
+ repo: &Repository,
+ ) -> Result<[Option<ObjectId>; 2], Error> {
+ let mut out = [None, None];
+ for ((candidates, prefix), out) in candidates.iter_mut().zip(prefix).zip(out.iter_mut()) {
+ let candidates = candidates.take();
+ match candidates {
+ None => *out = None,
+ Some(candidates) => {
+ match candidates.len() {
+ 0 => unreachable!(
+ "BUG: let's avoid still being around if no candidate matched the requirements"
+ ),
+ 1 => {
+ *out = candidates.into_iter().next();
+ }
+ _ => {
+ errors.insert(
+ 0,
+ Error::ambiguous(candidates, prefix.expect("set when obtaining candidates"), repo),
+ );
+ return Err(Error::from_errors(errors));
+ }
+ };
+ }
+ };
+ }
+ Ok(out)
+ }
+
+ fn kind_to_spec(
+ kind: Option<gix_revision::spec::Kind>,
+ [first, second]: [Option<ObjectId>; 2],
+ ) -> Result<gix_revision::Spec, Error> {
+ use gix_revision::spec::Kind::*;
+ Ok(match kind.unwrap_or_default() {
+ IncludeReachable => gix_revision::Spec::Include(first.ok_or(Error::Malformed)?),
+ ExcludeReachable => gix_revision::Spec::Exclude(first.ok_or(Error::Malformed)?),
+ RangeBetween => gix_revision::Spec::Range {
+ from: first.ok_or(Error::Malformed)?,
+ to: second.ok_or(Error::Malformed)?,
+ },
+ ReachableToMergeBase => gix_revision::Spec::Merge {
+ theirs: first.ok_or(Error::Malformed)?,
+ ours: second.ok_or(Error::Malformed)?,
+ },
+ IncludeReachableFromParents => gix_revision::Spec::IncludeOnlyParents(first.ok_or(Error::Malformed)?),
+ ExcludeReachableFromParents => gix_revision::Spec::ExcludeParents(first.ok_or(Error::Malformed)?),
+ })
+ }
+
+ let range = zero_or_one_objects_or_ambiguity_err(self.objs, self.prefix, self.err, self.repo)?;
+ Ok(crate::revision::Spec {
+ first_ref: self.refs[0].take(),
+ second_ref: self.refs[1].take(),
+ inner: kind_to_spec(self.kind, range)?,
+ repo: self.repo,
+ })
+ }
+}
+
+impl<'repo> parse::Delegate for Delegate<'repo> {
+ fn done(&mut self) {
+ self.follow_refs_to_objects_if_needed();
+ self.disambiguate_objects_by_fallback_hint(
+ self.kind_implies_committish()
+ .then_some(ObjectKindHint::Committish)
+ .or(self.opts.object_kind_hint),
+ );
+ }
+}
+
+impl<'repo> delegate::Kind for Delegate<'repo> {
+ fn kind(&mut self, kind: gix_revision::spec::Kind) -> Option<()> {
+ use gix_revision::spec::Kind::*;
+ self.kind = Some(kind);
+
+ if self.kind_implies_committish() {
+ self.disambiguate_objects_by_fallback_hint(ObjectKindHint::Committish.into());
+ }
+ if matches!(kind, RangeBetween | ReachableToMergeBase) {
+ self.idx += 1;
+ }
+
+ Some(())
+ }
+}
+
+impl<'repo> Delegate<'repo> {
+ fn kind_implies_committish(&self) -> bool {
+ self.kind.unwrap_or(gix_revision::spec::Kind::IncludeReachable) != gix_revision::spec::Kind::IncludeReachable
+ }
+ fn disambiguate_objects_by_fallback_hint(&mut self, hint: Option<ObjectKindHint>) {
+ fn require_object_kind(repo: &Repository, obj: &gix_hash::oid, kind: gix_object::Kind) -> Result<(), Error> {
+ let obj = repo.find_object(obj)?;
+ if obj.kind == kind {
+ Ok(())
+ } else {
+ Err(Error::ObjectKind {
+ actual: obj.kind,
+ expected: kind,
+ oid: obj.id.attach(repo).shorten_or_id(),
+ })
+ }
+ }
+
+ if self.last_call_was_disambiguate_prefix[self.idx] {
+ self.unset_disambiguate_call();
+
+ if let Some(objs) = self.objs[self.idx].as_mut() {
+ let repo = self.repo;
+ let errors: Vec<_> = match hint {
+ Some(kind_hint) => match kind_hint {
+ ObjectKindHint::Treeish | ObjectKindHint::Committish => {
+ let kind = match kind_hint {
+ ObjectKindHint::Treeish => gix_object::Kind::Tree,
+ ObjectKindHint::Committish => gix_object::Kind::Commit,
+ _ => unreachable!("BUG: we narrow possibilities above"),
+ };
+ objs.iter()
+ .filter_map(|obj| peel(repo, obj, kind).err().map(|err| (*obj, err)))
+ .collect()
+ }
+ ObjectKindHint::Tree | ObjectKindHint::Commit | ObjectKindHint::Blob => {
+ let kind = match kind_hint {
+ ObjectKindHint::Tree => gix_object::Kind::Tree,
+ ObjectKindHint::Commit => gix_object::Kind::Commit,
+ ObjectKindHint::Blob => gix_object::Kind::Blob,
+ _ => unreachable!("BUG: we narrow possibilities above"),
+ };
+ objs.iter()
+ .filter_map(|obj| require_object_kind(repo, obj, kind).err().map(|err| (*obj, err)))
+ .collect()
+ }
+ },
+ None => return,
+ };
+
+ if errors.len() == objs.len() {
+ self.err.extend(errors.into_iter().map(|(_, err)| err));
+ } else {
+ for (obj, err) in errors {
+ objs.remove(&obj);
+ self.err.push(err);
+ }
+ }
+ }
+ }
+ }
+ fn follow_refs_to_objects_if_needed(&mut self) -> Option<()> {
+ assert_eq!(self.refs.len(), self.objs.len());
+ let repo = self.repo;
+ for (r, obj) in self.refs.iter().zip(self.objs.iter_mut()) {
+ if let (_ref_opt @ Some(ref_), obj_opt @ None) = (r, obj) {
+ if let Some(id) = ref_.target.try_id().map(ToOwned::to_owned).or_else(|| {
+ ref_.clone()
+ .attach(repo)
+ .peel_to_id_in_place()
+ .ok()
+ .map(|id| id.detach())
+ }) {
+ obj_opt.get_or_insert_with(HashSet::default).insert(id);
+ };
+ };
+ }
+ Some(())
+ }
+
+ fn unset_disambiguate_call(&mut self) {
+ self.last_call_was_disambiguate_prefix[self.idx] = false;
+ }
+}
+
+fn peel(repo: &Repository, obj: &gix_hash::oid, kind: gix_object::Kind) -> Result<ObjectId, Error> {
+ let mut obj = repo.find_object(obj)?;
+ obj = obj.peel_to_kind(kind)?;
+ debug_assert_eq!(obj.kind, kind, "bug in Object::peel_to_kind() which didn't deliver");
+ Ok(obj.id)
+}
+
+fn handle_errors_and_replacements(
+ destination: &mut Vec<Error>,
+ objs: &mut HashSet<ObjectId>,
+ errors: Vec<(ObjectId, Error)>,
+ replacements: &mut Replacements,
+) -> Option<()> {
+ if errors.len() == objs.len() {
+ destination.extend(errors.into_iter().map(|(_, err)| err));
+ None
+ } else {
+ for (obj, err) in errors {
+ objs.remove(&obj);
+ destination.push(err);
+ }
+ for (find, replace) in replacements {
+ objs.remove(find);
+ objs.insert(*replace);
+ }
+ Some(())
+ }
+}
+
+mod navigate;
+mod revision;
diff --git a/vendor/gix/src/revision/spec/parse/delegate/navigate.rs b/vendor/gix/src/revision/spec/parse/delegate/navigate.rs
new file mode 100644
index 000000000..882c2835c
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/delegate/navigate.rs
@@ -0,0 +1,340 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+use gix_revision::spec::parse::{
+ delegate,
+ delegate::{PeelTo, Traversal},
+};
+use gix_traverse::commit::Sorting;
+
+use crate::{
+ bstr::{BStr, ByteSlice},
+ ext::ObjectIdExt,
+ object,
+ revision::spec::parse::{
+ delegate::{handle_errors_and_replacements, peel, Replacements},
+ Delegate, Error,
+ },
+};
+
+impl<'repo> delegate::Navigate for Delegate<'repo> {
+ fn traverse(&mut self, kind: Traversal) -> Option<()> {
+ self.unset_disambiguate_call();
+ self.follow_refs_to_objects_if_needed()?;
+
+ let mut replacements = Replacements::default();
+ let mut errors = Vec::new();
+ let objs = self.objs[self.idx].as_mut()?;
+ let repo = self.repo;
+
+ for obj in objs.iter() {
+ match kind {
+ Traversal::NthParent(num) => {
+ match self.repo.find_object(*obj).map_err(Error::from).and_then(|obj| {
+ obj.try_into_commit().map_err(|err| {
+ let object::try_into::Error { actual, expected, id } = err;
+ Error::ObjectKind {
+ oid: id.attach(repo).shorten_or_id(),
+ actual,
+ expected,
+ }
+ })
+ }) {
+ Ok(commit) => match commit.parent_ids().nth(num.saturating_sub(1)) {
+ Some(id) => replacements.push((commit.id, id.detach())),
+ None => errors.push((
+ commit.id,
+ Error::ParentOutOfRange {
+ oid: commit.id().shorten_or_id(),
+ desired: num,
+ available: commit.parent_ids().count(),
+ },
+ )),
+ },
+ Err(err) => errors.push((*obj, err)),
+ }
+ }
+ Traversal::NthAncestor(num) => {
+ let id = obj.attach(repo);
+ match id
+ .ancestors()
+ .first_parent_only()
+ .all()
+ .expect("cannot fail without sorting")
+ .skip(num)
+ .filter_map(Result::ok)
+ .next()
+ {
+ Some(id) => replacements.push((*obj, id.detach())),
+ None => errors.push((
+ *obj,
+ Error::AncestorOutOfRange {
+ oid: id.shorten_or_id(),
+ desired: num,
+ available: id
+ .ancestors()
+ .first_parent_only()
+ .all()
+ .expect("cannot fail without sorting")
+ .skip(1)
+ .count(),
+ },
+ )),
+ }
+ }
+ }
+ }
+
+ handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements)
+ }
+
+ fn peel_until(&mut self, kind: PeelTo<'_>) -> Option<()> {
+ self.unset_disambiguate_call();
+ self.follow_refs_to_objects_if_needed()?;
+
+ let mut replacements = Replacements::default();
+ let mut errors = Vec::new();
+ let objs = self.objs[self.idx].as_mut()?;
+ let repo = self.repo;
+
+ match kind {
+ PeelTo::ValidObject => {
+ for obj in objs.iter() {
+ match repo.find_object(*obj) {
+ Ok(_) => {}
+ Err(err) => {
+ errors.push((*obj, err.into()));
+ }
+ };
+ }
+ }
+ PeelTo::ObjectKind(kind) => {
+ let peel = |obj| peel(repo, obj, kind);
+ for obj in objs.iter() {
+ match peel(obj) {
+ Ok(replace) => replacements.push((*obj, replace)),
+ Err(err) => errors.push((*obj, err)),
+ }
+ }
+ }
+ PeelTo::Path(path) => {
+ let lookup_path = |obj: &ObjectId| {
+ let tree_id = peel(repo, obj, gix_object::Kind::Tree)?;
+ if path.is_empty() {
+ return Ok(tree_id);
+ }
+ let tree = repo.find_object(tree_id)?.into_tree();
+ let entry =
+ tree.lookup_entry_by_path(gix_path::from_bstr(path))?
+ .ok_or_else(|| Error::PathNotFound {
+ path: path.into(),
+ object: obj.attach(repo).shorten_or_id(),
+ tree: tree_id.attach(repo).shorten_or_id(),
+ })?;
+ Ok(entry.object_id())
+ };
+ for obj in objs.iter() {
+ match lookup_path(obj) {
+ Ok(replace) => replacements.push((*obj, replace)),
+ Err(err) => errors.push((*obj, err)),
+ }
+ }
+ }
+ PeelTo::RecursiveTagObject => {
+ for oid in objs.iter() {
+ match oid.attach(repo).object().and_then(|obj| obj.peel_tags_to_end()) {
+ Ok(obj) => replacements.push((*oid, obj.id)),
+ Err(err) => errors.push((*oid, err.into())),
+ }
+ }
+ }
+ }
+
+ handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements)
+ }
+
+ fn find(&mut self, regex: &BStr, negated: bool) -> Option<()> {
+ self.unset_disambiguate_call();
+ self.follow_refs_to_objects_if_needed()?;
+
+ #[cfg(not(feature = "regex"))]
+ let matches = |message: &BStr| -> bool { message.contains_str(regex) ^ negated };
+ #[cfg(feature = "regex")]
+ let matches = match regex::bytes::Regex::new(regex.to_str_lossy().as_ref()) {
+ Ok(compiled) => {
+ let needs_regex = regex::escape(compiled.as_str()) != regex;
+ move |message: &BStr| -> bool {
+ if needs_regex {
+ compiled.is_match(message) ^ negated
+ } else {
+ message.contains_str(regex) ^ negated
+ }
+ }
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ return None;
+ }
+ };
+
+ match self.objs[self.idx].as_mut() {
+ Some(objs) => {
+ let repo = self.repo;
+ let mut errors = Vec::new();
+ let mut replacements = Replacements::default();
+ for oid in objs.iter() {
+ match oid
+ .attach(repo)
+ .ancestors()
+ .sorting(Sorting::ByCommitTimeNewestFirst)
+ .all()
+ {
+ Ok(iter) => {
+ let mut matched = false;
+ let mut count = 0;
+ let commits = iter.map(|res| {
+ res.map_err(Error::from).and_then(|commit_id| {
+ commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
+ })
+ });
+ for commit in commits {
+ count += 1;
+ match commit {
+ Ok(commit) => {
+ if matches(commit.message_raw_sloppy()) {
+ replacements.push((*oid, commit.id));
+ matched = true;
+ break;
+ }
+ }
+ Err(err) => errors.push((*oid, err)),
+ }
+ }
+ if !matched {
+ errors.push((
+ *oid,
+ Error::NoRegexMatch {
+ regex: regex.into(),
+ commits_searched: count,
+ oid: oid.attach(repo).shorten_or_id(),
+ },
+ ))
+ }
+ }
+ Err(err) => errors.push((*oid, err.into())),
+ }
+ }
+ handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements)
+ }
+ None => match self.repo.references() {
+ Ok(references) => match references.all() {
+ Ok(references) => {
+ match self
+ .repo
+ .rev_walk(
+ references
+ .peeled()
+ .filter_map(Result::ok)
+ .filter(|r| {
+ r.id()
+ .object()
+ .ok()
+ .map(|obj| obj.kind == gix_object::Kind::Commit)
+ .unwrap_or(false)
+ })
+ .filter_map(|r| r.detach().peeled),
+ )
+ .sorting(Sorting::ByCommitTimeNewestFirst)
+ .all()
+ {
+ Ok(iter) => {
+ let mut matched = false;
+ let mut count = 0;
+ let commits = iter.map(|res| {
+ res.map_err(Error::from).and_then(|commit_id| {
+ commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
+ })
+ });
+ for commit in commits {
+ count += 1;
+ match commit {
+ Ok(commit) => {
+ if matches(commit.message_raw_sloppy()) {
+ self.objs[self.idx]
+ .get_or_insert_with(HashSet::default)
+ .insert(commit.id);
+ matched = true;
+ break;
+ }
+ }
+ Err(err) => self.err.push(err),
+ }
+ }
+ if matched {
+ Some(())
+ } else {
+ self.err.push(Error::NoRegexMatchAllRefs {
+ regex: regex.into(),
+ commits_searched: count,
+ });
+ None
+ }
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ }
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ },
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ },
+ }
+ }
+
+ fn index_lookup(&mut self, path: &BStr, stage: u8) -> Option<()> {
+ self.unset_disambiguate_call();
+ match self.repo.index() {
+ Ok(index) => match index.entry_by_path_and_stage(path, stage.into()) {
+ Some(entry) => {
+ self.objs[self.idx]
+ .get_or_insert_with(HashSet::default)
+ .insert(entry.id);
+ Some(())
+ }
+ None => {
+ let stage_hint = [0, 1, 2]
+ .iter()
+ .filter(|our_stage| **our_stage != stage)
+ .find_map(|stage| {
+ index
+ .entry_index_by_path_and_stage(path, (*stage).into())
+ .map(|_| (*stage).into())
+ });
+ let exists = self
+ .repo
+ .work_dir()
+ .map_or(false, |root| root.join(gix_path::from_bstr(path)).exists());
+ self.err.push(Error::IndexLookup {
+ desired_path: path.into(),
+ desired_stage: stage.into(),
+ exists,
+ stage_hint,
+ });
+ None
+ }
+ },
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/revision/spec/parse/delegate/revision.rs b/vendor/gix/src/revision/spec/parse/delegate/revision.rs
new file mode 100644
index 000000000..7ea691a28
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/delegate/revision.rs
@@ -0,0 +1,225 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+use gix_revision::spec::parse::{
+ delegate,
+ delegate::{ReflogLookup, SiblingBranch},
+};
+
+use crate::{
+ bstr::{BStr, BString, ByteSlice},
+ ext::ReferenceExt,
+ revision::spec::parse::{Delegate, Error, RefsHint},
+};
+
+impl<'repo> delegate::Revision for Delegate<'repo> {
+ fn find_ref(&mut self, name: &BStr) -> Option<()> {
+ self.unset_disambiguate_call();
+ if !self.err.is_empty() && self.refs[self.idx].is_some() {
+ return None;
+ }
+ match self.repo.refs.find(name) {
+ Ok(r) => {
+ assert!(self.refs[self.idx].is_none(), "BUG: cannot set the same ref twice");
+ self.refs[self.idx] = Some(r);
+ Some(())
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ }
+ }
+
+ fn disambiguate_prefix(
+ &mut self,
+ prefix: gix_hash::Prefix,
+ _must_be_commit: Option<delegate::PrefixHint<'_>>,
+ ) -> Option<()> {
+ self.last_call_was_disambiguate_prefix[self.idx] = true;
+ let mut candidates = Some(HashSet::default());
+ self.prefix[self.idx] = Some(prefix);
+
+ let empty_tree_id = gix_hash::ObjectId::empty_tree(prefix.as_oid().kind());
+ let res = if prefix.as_oid() == empty_tree_id {
+ candidates.as_mut().expect("set").insert(empty_tree_id);
+ Ok(Some(Err(())))
+ } else {
+ self.repo.objects.lookup_prefix(prefix, candidates.as_mut())
+ };
+
+ match res {
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ Ok(None) => {
+ self.err.push(Error::PrefixNotFound { prefix });
+ None
+ }
+ Ok(Some(Ok(_) | Err(()))) => {
+ assert!(self.objs[self.idx].is_none(), "BUG: cannot set the same prefix twice");
+ let candidates = candidates.expect("set above");
+ match self.opts.refs_hint {
+ RefsHint::PreferObjectOnFullLengthHexShaUseRefOtherwise
+ if prefix.hex_len() == candidates.iter().next().expect("at least one").kind().len_in_hex() =>
+ {
+ self.ambiguous_objects[self.idx] = Some(candidates.clone());
+ self.objs[self.idx] = Some(candidates);
+ Some(())
+ }
+ RefsHint::PreferObject => {
+ self.ambiguous_objects[self.idx] = Some(candidates.clone());
+ self.objs[self.idx] = Some(candidates);
+ Some(())
+ }
+ RefsHint::PreferRef | RefsHint::PreferObjectOnFullLengthHexShaUseRefOtherwise | RefsHint::Fail => {
+ match self.repo.refs.find(&prefix.to_string()) {
+ Ok(ref_) => {
+ assert!(self.refs[self.idx].is_none(), "BUG: cannot set the same ref twice");
+ if self.opts.refs_hint == RefsHint::Fail {
+ self.refs[self.idx] = Some(ref_.clone());
+ self.err.push(Error::AmbiguousRefAndObject {
+ prefix,
+ reference: ref_,
+ });
+ self.err.push(Error::ambiguous(candidates, prefix, self.repo));
+ None
+ } else {
+ self.refs[self.idx] = Some(ref_);
+ Some(())
+ }
+ }
+ Err(_) => {
+ self.ambiguous_objects[self.idx] = Some(candidates.clone());
+ self.objs[self.idx] = Some(candidates);
+ Some(())
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ fn reflog(&mut self, query: ReflogLookup) -> Option<()> {
+ self.unset_disambiguate_call();
+ match query {
+ ReflogLookup::Date(_date) => {
+ self.err.push(Error::Planned {
+ dependency: "remote handling and ref-specs are fleshed out more",
+ });
+ None
+ }
+ ReflogLookup::Entry(no) => {
+ let r = match &mut self.refs[self.idx] {
+ Some(r) => r.clone().attach(self.repo),
+ val @ None => match self.repo.head().map(|head| head.try_into_referent()) {
+ Ok(Some(r)) => {
+ *val = Some(r.clone().detach());
+ r
+ }
+ Ok(None) => {
+ self.err.push(Error::UnbornHeadsHaveNoRefLog);
+ return None;
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ return None;
+ }
+ },
+ };
+ let mut platform = r.log_iter();
+ match platform.rev().ok().flatten() {
+ Some(mut it) => match it.nth(no).and_then(Result::ok) {
+ Some(line) => {
+ self.objs[self.idx]
+ .get_or_insert_with(HashSet::default)
+ .insert(line.new_oid);
+ Some(())
+ }
+ None => {
+ let available = platform.rev().ok().flatten().map_or(0, |it| it.count());
+ self.err.push(Error::RefLogEntryOutOfRange {
+ reference: r.detach(),
+ desired: no,
+ available,
+ });
+ None
+ }
+ },
+ None => {
+ self.err.push(Error::MissingRefLog {
+ reference: r.name().as_bstr().into(),
+ action: "lookup entry",
+ });
+ None
+ }
+ }
+ }
+ }
+ }
+
+ fn nth_checked_out_branch(&mut self, branch_no: usize) -> Option<()> {
+ self.unset_disambiguate_call();
+ fn prior_checkouts_iter<'a>(
+ platform: &'a mut gix_ref::file::log::iter::Platform<'static, '_>,
+ ) -> Result<impl Iterator<Item = (BString, ObjectId)> + 'a, Error> {
+ match platform.rev().ok().flatten() {
+ Some(log) => Ok(log.filter_map(Result::ok).filter_map(|line| {
+ line.message
+ .strip_prefix(b"checkout: moving from ")
+ .and_then(|from_to| from_to.find(" to ").map(|pos| &from_to[..pos]))
+ .map(|from_branch| (from_branch.into(), line.previous_oid))
+ })),
+ None => Err(Error::MissingRefLog {
+ reference: "HEAD".into(),
+ action: "search prior checked out branch",
+ }),
+ }
+ }
+
+ let head = match self.repo.head() {
+ Ok(head) => head,
+ Err(err) => {
+ self.err.push(err.into());
+ return None;
+ }
+ };
+ match prior_checkouts_iter(&mut head.log_iter()).map(|mut it| it.nth(branch_no.saturating_sub(1))) {
+ Ok(Some((ref_name, id))) => {
+ let id = match self.repo.find_reference(ref_name.as_bstr()) {
+ Ok(mut r) => {
+ let id = r.peel_to_id_in_place().map(|id| id.detach()).unwrap_or(id);
+ self.refs[self.idx] = Some(r.detach());
+ id
+ }
+ Err(_) => id,
+ };
+ self.objs[self.idx].get_or_insert_with(HashSet::default).insert(id);
+ Some(())
+ }
+ Ok(None) => {
+ self.err.push(Error::PriorCheckoutOutOfRange {
+ desired: branch_no,
+ available: prior_checkouts_iter(&mut head.log_iter())
+ .map(|it| it.count())
+ .unwrap_or(0),
+ });
+ None
+ }
+ Err(err) => {
+ self.err.push(err);
+ None
+ }
+ }
+ }
+
+ fn sibling_branch(&mut self, _kind: SiblingBranch) -> Option<()> {
+ self.unset_disambiguate_call();
+ self.err.push(Error::Planned {
+ dependency: "remote handling and ref-specs are fleshed out more",
+ });
+ None
+ }
+}
diff --git a/vendor/gix/src/revision/spec/parse/error.rs b/vendor/gix/src/revision/spec/parse/error.rs
new file mode 100644
index 000000000..3af4697b0
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/error.rs
@@ -0,0 +1,130 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+
+use super::Error;
+use crate::{bstr, bstr::BString, ext::ObjectIdExt, Repository};
+
+/// Additional information about candidates that caused ambiguity.
+#[derive(Debug)]
+pub enum CandidateInfo {
+ /// An error occurred when looking up the object in the database.
+ FindError {
+ /// The reported error.
+ source: crate::object::find::existing::Error,
+ },
+ /// The candidate is an object of the given `kind`.
+ Object {
+ /// The kind of the object.
+ kind: gix_object::Kind,
+ },
+ /// The candidate is a tag.
+ Tag {
+ /// The name of the tag.
+ name: BString,
+ },
+ /// The candidate is a commit.
+ Commit {
+ /// The date of the commit.
+ date: gix_date::Time,
+ /// The subject line.
+ title: BString,
+ },
+}
+
+impl std::fmt::Display for CandidateInfo {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ CandidateInfo::FindError { source } => write!(f, "lookup error: {source}"),
+ CandidateInfo::Tag { name } => write!(f, "tag {name:?}"),
+ CandidateInfo::Object { kind } => std::fmt::Display::fmt(kind, f),
+ CandidateInfo::Commit { date, title } => {
+ write!(f, "commit {} {title:?}", date.format(gix_date::time::format::SHORT))
+ }
+ }
+ }
+}
+
+impl Error {
+ pub(crate) fn ambiguous(candidates: HashSet<ObjectId>, prefix: gix_hash::Prefix, repo: &Repository) -> Self {
+ #[derive(PartialOrd, Ord, Eq, PartialEq, Copy, Clone)]
+ enum Order {
+ Tag,
+ Commit,
+ Tree,
+ Blob,
+ Invalid,
+ }
+ let candidates = {
+ let mut c: Vec<_> = candidates
+ .into_iter()
+ .map(|oid| {
+ let obj = repo.find_object(oid);
+ let order = match &obj {
+ Err(_) => Order::Invalid,
+ Ok(obj) => match obj.kind {
+ gix_object::Kind::Tag => Order::Tag,
+ gix_object::Kind::Commit => Order::Commit,
+ gix_object::Kind::Tree => Order::Tree,
+ gix_object::Kind::Blob => Order::Blob,
+ },
+ };
+ (oid, obj, order)
+ })
+ .collect();
+ c.sort_by(|lhs, rhs| lhs.2.cmp(&rhs.2).then_with(|| lhs.0.cmp(&rhs.0)));
+ c
+ };
+ Error::AmbiguousPrefix {
+ prefix,
+ info: candidates
+ .into_iter()
+ .map(|(oid, find_result, _)| {
+ let info = match find_result {
+ Ok(obj) => match obj.kind {
+ gix_object::Kind::Tree | gix_object::Kind::Blob => CandidateInfo::Object { kind: obj.kind },
+ gix_object::Kind::Tag => {
+ let tag = obj.to_tag_ref();
+ CandidateInfo::Tag { name: tag.name.into() }
+ }
+ gix_object::Kind::Commit => {
+ use bstr::ByteSlice;
+ let commit = obj.to_commit_ref();
+ CandidateInfo::Commit {
+ date: commit.committer().time,
+ title: commit.message().title.trim().into(),
+ }
+ }
+ },
+ Err(err) => CandidateInfo::FindError { source: err },
+ };
+ (oid.attach(repo).shorten().unwrap_or_else(|_| oid.into()), info)
+ })
+ .collect(),
+ }
+ }
+
+ pub(crate) fn from_errors(errors: Vec<Self>) -> Self {
+ assert!(!errors.is_empty());
+ match errors.len() {
+ 0 => unreachable!(
+ "BUG: cannot create something from nothing, must have recorded some errors to call from_errors()"
+ ),
+ 1 => errors.into_iter().next().expect("one"),
+ _ => {
+ let mut it = errors.into_iter().rev();
+ let mut recent = Error::Multi {
+ current: Box::new(it.next().expect("at least one error")),
+ next: None,
+ };
+ for err in it {
+ recent = Error::Multi {
+ current: Box::new(err),
+ next: Some(Box::new(recent)),
+ }
+ }
+ recent
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/revision/spec/parse/mod.rs b/vendor/gix/src/revision/spec/parse/mod.rs
new file mode 100644
index 000000000..f69ecc4af
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/mod.rs
@@ -0,0 +1,61 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+use gix_revision::spec::parse;
+
+use crate::{bstr::BStr, revision::Spec, Repository};
+
+mod types;
+pub use types::{Error, ObjectKindHint, Options, RefsHint};
+
+///
+pub mod single {
+ use crate::bstr::BString;
+
+ /// The error returned by [`crate::Repository::rev_parse_single()`].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Parse(#[from] super::Error),
+ #[error("revspec {spec:?} did not resolve to a single object")]
+ RangedRev { spec: BString },
+ }
+}
+
+///
+pub mod error;
+
+impl<'repo> Spec<'repo> {
+ /// Parse `spec` and use information from `repo` to resolve it, using `opts` to learn how to deal with ambiguity.
+ ///
+ /// Note that it's easier and to use [`repo.rev_parse()`][Repository::rev_parse()] instead.
+ pub fn from_bstr<'a>(spec: impl Into<&'a BStr>, repo: &'repo Repository, opts: Options) -> Result<Self, Error> {
+ let mut delegate = Delegate::new(repo, opts);
+ match gix_revision::spec::parse(spec.into(), &mut delegate) {
+ Err(parse::Error::Delegate) => Err(delegate.into_err()),
+ Err(err) => Err(err.into()),
+ Ok(()) => delegate.into_rev_spec(),
+ }
+ }
+}
+
+struct Delegate<'repo> {
+ refs: [Option<gix_ref::Reference>; 2],
+ objs: [Option<HashSet<ObjectId>>; 2],
+ /// The originally encountered ambiguous objects for potential later use in errors.
+ ambiguous_objects: [Option<HashSet<ObjectId>>; 2],
+ idx: usize,
+ kind: Option<gix_revision::spec::Kind>,
+
+ opts: Options,
+ err: Vec<Error>,
+ /// The ambiguous prefix obtained during a call to `disambiguate_prefix()`.
+ prefix: [Option<gix_hash::Prefix>; 2],
+ /// If true, we didn't try to do any other transformation which might have helped with disambiguation.
+ last_call_was_disambiguate_prefix: [bool; 2],
+
+ repo: &'repo Repository,
+}
+
+mod delegate;
diff --git a/vendor/gix/src/revision/spec/parse/types.rs b/vendor/gix/src/revision/spec/parse/types.rs
new file mode 100644
index 000000000..4e523ab14
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/types.rs
@@ -0,0 +1,182 @@
+use crate::{bstr::BString, object, reference};
+
+/// A hint to know what to do if refs and object names are equal.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum RefsHint {
+ /// This is the default, and leads to specs that look like objects identified by full hex sha and are objects to be used
+ /// instead of similarly named references. The latter is not typical but can absolutely happen by accident.
+ /// If the object prefix is shorter than the maximum hash length of the repository, use the reference instead, which is
+ /// preferred as there are many valid object names like `beef` and `cafe` that are short and both valid and typical prefixes
+ /// for objects.
+ /// Git chooses this as default as well, even though it means that every object prefix is also looked up as ref.
+ PreferObjectOnFullLengthHexShaUseRefOtherwise,
+ /// No matter what, if it looks like an object prefix and has an object, use it.
+ /// Note that no ref-lookup is made here which is the fastest option.
+ PreferObject,
+ /// When an object is found for a given prefix, also check if a reference exists with that name and if it does,
+ /// use that moving forward.
+ PreferRef,
+ /// If there is an ambiguous situation, instead of silently choosing one over the other, fail instead.
+ Fail,
+}
+
+/// A hint to know which object kind to prefer if multiple objects match a prefix.
+///
+/// This disambiguation mechanism is applied only if there is no disambiguation hints in the spec itself.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum ObjectKindHint {
+ /// Pick objects that are commits themselves.
+ Commit,
+ /// Pick objects that can be peeled into a commit, i.e. commits themselves or tags which are peeled until a commit is found.
+ Committish,
+ /// Pick objects that are trees themselves.
+ Tree,
+ /// Pick objects that can be peeled into a tree, i.e. trees themselves or tags which are peeled until a tree is found or commits
+ /// whose tree is chosen.
+ Treeish,
+ /// Pick objects that are blobs.
+ Blob,
+}
+
+impl Default for RefsHint {
+ fn default() -> Self {
+ RefsHint::PreferObjectOnFullLengthHexShaUseRefOtherwise
+ }
+}
+
+/// Options for use in [`revision::Spec::from_bstr()`][crate::revision::Spec::from_bstr()].
+#[derive(Debug, Default, Copy, Clone)]
+pub struct Options {
+ /// What to do if both refs and object names match the same input.
+ pub refs_hint: RefsHint,
+ /// The hint to use when encountering multiple object matching a prefix.
+ ///
+ /// If `None`, the rev-spec itself must disambiguate the object by drilling down to desired kinds or applying
+ /// other disambiguating transformations.
+ pub object_kind_hint: Option<ObjectKindHint>,
+}
+
+/// The error returned by [`crate::Repository::rev_parse()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("The rev-spec is malformed and misses a ref name")]
+ Malformed,
+ #[error("Unborn heads do not have a reflog yet")]
+ UnbornHeadsHaveNoRefLog,
+ #[error("This feature will be implemented once {dependency}")]
+ Planned { dependency: &'static str },
+ #[error("Reference {reference:?} does not have a reference log, cannot {action}")]
+ MissingRefLog { reference: BString, action: &'static str },
+ #[error("HEAD has {available} prior checkouts and checkout number {desired} is out of range")]
+ PriorCheckoutOutOfRange { desired: usize, available: usize },
+ #[error("Reference {:?} has {available} ref-log entries and entry number {desired} is out of range", reference.name.as_bstr())]
+ RefLogEntryOutOfRange {
+ reference: gix_ref::Reference,
+ desired: usize,
+ available: usize,
+ },
+ #[error(
+ "Commit {oid} has {available} ancestors along the first parent and ancestor number {desired} is out of range"
+ )]
+ AncestorOutOfRange {
+ oid: gix_hash::Prefix,
+ desired: usize,
+ available: usize,
+ },
+ #[error("Commit {oid} has {available} parents and parent number {desired} is out of range")]
+ ParentOutOfRange {
+ oid: gix_hash::Prefix,
+ desired: usize,
+ available: usize,
+ },
+ #[error("Path {desired_path:?} did not exist in index at stage {desired_stage}{}{}", stage_hint.map(|actual|format!(". It does exist at stage {actual}")).unwrap_or_default(), exists.then(|| ". It exists on disk").unwrap_or(". It does not exist on disk"))]
+ IndexLookup {
+ desired_path: BString,
+ desired_stage: gix_index::entry::Stage,
+ stage_hint: Option<gix_index::entry::Stage>,
+ exists: bool,
+ },
+ #[error(transparent)]
+ FindHead(#[from] reference::find::existing::Error),
+ #[error(transparent)]
+ Index(#[from] crate::worktree::open_index::Error),
+ #[error(transparent)]
+ RevWalkIterInit(#[from] crate::reference::iter::init::Error),
+ #[error(transparent)]
+ RevWalkAllReferences(#[from] gix_ref::packed::buffer::open::Error),
+ #[cfg(feature = "regex")]
+ #[error(transparent)]
+ InvalidRegex(#[from] regex::Error),
+ #[cfg_attr(
+ feature = "regex",
+ error("None of {commits_searched} commits from {oid} matched regex {regex:?}")
+ )]
+ #[cfg_attr(
+ not(feature = "regex"),
+ error("None of {commits_searched} commits from {oid} matched text {regex:?}")
+ )]
+ NoRegexMatch {
+ regex: BString,
+ oid: gix_hash::Prefix,
+ commits_searched: usize,
+ },
+ #[cfg_attr(
+ feature = "regex",
+ error("None of {commits_searched} commits reached from all references matched regex {regex:?}")
+ )]
+ #[cfg_attr(
+ not(feature = "regex"),
+ error("None of {commits_searched} commits reached from all references matched text {regex:?}")
+ )]
+ NoRegexMatchAllRefs { regex: BString, commits_searched: usize },
+ #[error(
+ "The short hash {prefix} matched both the reference {} and at least one object", reference.name)]
+ AmbiguousRefAndObject {
+ /// The prefix to look for.
+ prefix: gix_hash::Prefix,
+ /// The reference matching the prefix.
+ reference: gix_ref::Reference,
+ },
+ #[error(transparent)]
+ IdFromHex(#[from] gix_hash::decode::Error),
+ #[error(transparent)]
+ FindReference(#[from] gix_ref::file::find::existing::Error),
+ #[error(transparent)]
+ FindObject(#[from] object::find::existing::Error),
+ #[error(transparent)]
+ LookupPrefix(#[from] gix_odb::store::prefix::lookup::Error),
+ #[error(transparent)]
+ PeelToKind(#[from] object::peel::to_kind::Error),
+ #[error("Object {oid} was a {actual}, but needed it to be a {expected}")]
+ ObjectKind {
+ oid: gix_hash::Prefix,
+ actual: gix_object::Kind,
+ expected: gix_object::Kind,
+ },
+ #[error(transparent)]
+ Parse(#[from] gix_revision::spec::parse::Error),
+ #[error("An object prefixed {prefix} could not be found")]
+ PrefixNotFound { prefix: gix_hash::Prefix },
+ #[error("Short id {prefix} is ambiguous. Candidates are:\n{}", info.iter().map(|(oid, info)| format!("\t{oid} {info}")).collect::<Vec<_>>().join("\n"))]
+ AmbiguousPrefix {
+ prefix: gix_hash::Prefix,
+ info: Vec<(gix_hash::Prefix, super::error::CandidateInfo)>,
+ },
+ #[error("Could not find path {path:?} in tree {tree} of parent object {object}")]
+ PathNotFound {
+ object: gix_hash::Prefix,
+ tree: gix_hash::Prefix,
+ path: BString,
+ },
+ #[error("{current}")]
+ Multi {
+ current: Box<dyn std::error::Error + Send + Sync + 'static>,
+ #[source]
+ next: Option<Box<dyn std::error::Error + Send + Sync + 'static>>,
+ },
+ #[error(transparent)]
+ Traverse(#[from] gix_traverse::commit::ancestors::Error),
+ #[error("Spec does not contain a single object id")]
+ SingleNotFound,
+}
diff --git a/vendor/gix/src/revision/walk.rs b/vendor/gix/src/revision/walk.rs
new file mode 100644
index 000000000..5b04b43a7
--- /dev/null
+++ b/vendor/gix/src/revision/walk.rs
@@ -0,0 +1,127 @@
+use gix_hash::ObjectId;
+use gix_odb::FindExt;
+
+use crate::{revision, Repository};
+
+/// A platform to traverse the revision graph by adding starting points as well as points which shouldn't be crossed,
+/// returned by [`Repository::rev_walk()`].
+pub struct Platform<'repo> {
+ pub(crate) repo: &'repo Repository,
+ pub(crate) tips: Vec<ObjectId>,
+ pub(crate) sorting: gix_traverse::commit::Sorting,
+ pub(crate) parents: gix_traverse::commit::Parents,
+}
+
+impl<'repo> Platform<'repo> {
+ pub(crate) fn new(tips: impl IntoIterator<Item = impl Into<ObjectId>>, repo: &'repo Repository) -> Self {
+ revision::walk::Platform {
+ repo,
+ tips: tips.into_iter().map(Into::into).collect(),
+ sorting: Default::default(),
+ parents: Default::default(),
+ }
+ }
+}
+
+/// Create-time builder methods
+impl<'repo> Platform<'repo> {
+ /// Set the sort mode for commits to the given value. The default is to order by topology.
+ pub fn sorting(mut self, sorting: gix_traverse::commit::Sorting) -> Self {
+ self.sorting = sorting;
+ self
+ }
+
+ /// Only traverse the first parent of the commit graph.
+ pub fn first_parent_only(mut self) -> Self {
+ self.parents = gix_traverse::commit::Parents::First;
+ self
+ }
+}
+
+/// Produce the iterator
+impl<'repo> Platform<'repo> {
+ /// Return an iterator to traverse all commits reachable as configured by the [Platform].
+ ///
+ /// # Performance
+ ///
+ /// It's highly recommended to set an [`object cache`][Repository::object_cache_size()] on the parent repo
+ /// to greatly speed up performance if the returned id is supposed to be looked up right after.
+ pub fn all(self) -> Result<revision::Walk<'repo>, gix_traverse::commit::ancestors::Error> {
+ let Platform {
+ repo,
+ tips,
+ sorting,
+ parents,
+ } = self;
+ Ok(revision::Walk {
+ repo,
+ inner: Box::new(
+ gix_traverse::commit::Ancestors::new(
+ tips,
+ gix_traverse::commit::ancestors::State::default(),
+ move |oid, buf| repo.objects.find_commit_iter(oid, buf),
+ )
+ .sorting(sorting)?
+ .parents(parents),
+ ),
+ is_shallow: None,
+ error_on_missing_commit: false,
+ })
+ }
+}
+
+pub(crate) mod iter {
+ use crate::{ext::ObjectIdExt, Id};
+
+ /// The iterator returned by [`crate::revision::walk::Platform::all()`].
+ pub struct Walk<'repo> {
+ pub(crate) repo: &'repo crate::Repository,
+ pub(crate) inner:
+ Box<dyn Iterator<Item = Result<gix_hash::ObjectId, gix_traverse::commit::ancestors::Error>> + 'repo>,
+ pub(crate) error_on_missing_commit: bool,
+ // TODO: tests
+ /// After iteration this flag is true if the iteration was stopped prematurely due to missing parent commits.
+ /// Note that this flag won't be `Some` if any iteration error occurs, which is the case if
+ /// [`error_on_missing_commit()`][Walk::error_on_missing_commit()] was called.
+ ///
+ /// This happens if a repository is a shallow clone.
+ /// Note that this value is `None` as long as the iteration isn't complete.
+ pub is_shallow: Option<bool>,
+ }
+
+ impl<'repo> Walk<'repo> {
+ // TODO: tests
+ /// Once invoked, the iteration will return an error if a commit cannot be found in the object database. This typically happens
+ /// when operating on a shallow clone and thus is non-critical by default.
+ ///
+ /// Check the [`is_shallow`][Walk::is_shallow] field once the iteration ended otherwise to learn if a shallow commit graph
+ /// was encountered.
+ pub fn error_on_missing_commit(mut self) -> Self {
+ self.error_on_missing_commit = true;
+ self
+ }
+ }
+
+ impl<'repo> Iterator for Walk<'repo> {
+ type Item = Result<Id<'repo>, gix_traverse::commit::ancestors::Error>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.inner.next() {
+ None => {
+ self.is_shallow = Some(false);
+ None
+ }
+ Some(Ok(oid)) => Some(Ok(oid.attach(self.repo))),
+ Some(Err(err @ gix_traverse::commit::ancestors::Error::FindExisting { .. })) => {
+ if self.error_on_missing_commit {
+ Some(Err(err))
+ } else {
+ self.is_shallow = Some(true);
+ None
+ }
+ }
+ Some(Err(err)) => Some(Err(err)),
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/tag.rs b/vendor/gix/src/tag.rs
new file mode 100644
index 000000000..84af3b43a
--- /dev/null
+++ b/vendor/gix/src/tag.rs
@@ -0,0 +1,16 @@
+//!
+mod error {
+
+ /// The error returned by [`tag(…)`][crate::Repository::tag()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ ReferenceNameValidation(#[from] gix_ref::name::Error),
+ #[error(transparent)]
+ WriteObject(#[from] crate::object::write::Error),
+ #[error(transparent)]
+ ReferenceEdit(#[from] crate::reference::edit::Error),
+ }
+}
+pub use error::Error;
diff --git a/vendor/gix/src/types.rs b/vendor/gix/src/types.rs
new file mode 100644
index 000000000..34ffdc8bf
--- /dev/null
+++ b/vendor/gix/src/types.rs
@@ -0,0 +1,205 @@
+use std::{cell::RefCell, path::PathBuf};
+
+use gix_hash::ObjectId;
+
+use crate::{head, remote};
+
+/// The kind of repository.
+#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
+pub enum Kind {
+ /// A submodule worktree, whose `git` repository lives in `.git/modules/**/<name>` of the parent repository.
+ Submodule,
+ /// A bare repository does not have a work tree, that is files on disk beyond the `git` repository itself.
+ Bare,
+ /// A `git` repository along with a checked out files in a work tree.
+ WorkTree {
+ /// If true, this is the git dir associated with this _linked_ worktree, otherwise it is a repository with _main_ worktree.
+ is_linked: bool,
+ },
+}
+
+/// A worktree checkout containing the files of the repository in consumable form.
+pub struct Worktree<'repo> {
+ pub(crate) parent: &'repo Repository,
+ /// The root path of the checkout.
+ pub(crate) path: &'repo std::path::Path,
+}
+
+/// The head reference, as created from looking at `.git/HEAD`, able to represent all of its possible states.
+///
+/// Note that like [`Reference`], this type's data is snapshot of persisted state on disk.
+#[derive(Clone)]
+pub struct Head<'repo> {
+ /// One of various possible states for the HEAD reference
+ pub kind: head::Kind,
+ pub(crate) repo: &'repo Repository,
+}
+
+/// An [ObjectId] with access to a repository.
+#[derive(Clone, Copy)]
+pub struct Id<'r> {
+ /// The actual object id
+ pub(crate) inner: ObjectId,
+ pub(crate) repo: &'r Repository,
+}
+
+/// A decoded object with a reference to its owning repository.
+pub struct Object<'repo> {
+ /// The id of the object
+ pub id: ObjectId,
+ /// The kind of the object
+ pub kind: gix_object::Kind,
+ /// The fully decoded object data
+ pub data: Vec<u8>,
+ pub(crate) repo: &'repo Repository,
+}
+
+impl<'a> Drop for Object<'a> {
+ fn drop(&mut self) {
+ self.repo.reuse_buffer(&mut self.data);
+ }
+}
+
+/// A decoded tree object with access to its owning repository.
+pub struct Tree<'repo> {
+ /// The id of the tree
+ pub id: ObjectId,
+ /// The fully decoded tree data
+ pub data: Vec<u8>,
+ pub(crate) repo: &'repo Repository,
+}
+
+impl<'a> Drop for Tree<'a> {
+ fn drop(&mut self) {
+ self.repo.reuse_buffer(&mut self.data);
+ }
+}
+
+/// A decoded tag object with access to its owning repository.
+pub struct Tag<'repo> {
+ /// The id of the tree
+ pub id: ObjectId,
+ /// The fully decoded tag data
+ pub data: Vec<u8>,
+ pub(crate) repo: &'repo Repository,
+}
+
+impl<'a> Drop for Tag<'a> {
+ fn drop(&mut self) {
+ self.repo.reuse_buffer(&mut self.data);
+ }
+}
+
+/// A decoded commit object with access to its owning repository.
+pub struct Commit<'repo> {
+ /// The id of the commit
+ pub id: ObjectId,
+ /// The fully decoded commit data
+ pub data: Vec<u8>,
+ pub(crate) repo: &'repo Repository,
+}
+
+impl<'a> Drop for Commit<'a> {
+ fn drop(&mut self) {
+ self.repo.reuse_buffer(&mut self.data);
+ }
+}
+
+/// A detached, self-contained object, without access to its source repository.
+///
+/// Use it if an `ObjectRef` should be sent over thread boundaries or stored in collections.
+#[derive(Clone)]
+pub struct ObjectDetached {
+ /// The id of the object
+ pub id: ObjectId,
+ /// The kind of the object
+ pub kind: gix_object::Kind,
+ /// The fully decoded object data
+ pub data: Vec<u8>,
+}
+
+/// A reference that points to an object or reference, with access to its source repository.
+///
+/// Note that these are snapshots and won't recognize if they are stale.
+#[derive(Clone)]
+pub struct Reference<'r> {
+ /// The actual reference data
+ pub inner: gix_ref::Reference,
+ pub(crate) repo: &'r Repository,
+}
+
+/// A thread-local handle to interact with a repository from a single thread.
+///
+/// It is `Send` but **not** `Sync` - for the latter you can convert it `to_sync()`.
+/// Note that it clones itself so that it is empty, requiring the user to configure each clone separately, specifically
+/// and explicitly. This is to have the fastest-possible default configuration available by default, but allow
+/// those who experiment with workloads to get speed boosts of 2x or more.
+pub struct Repository {
+ /// A ref store with shared ownership (or the equivalent of it).
+ pub refs: crate::RefStore,
+ /// A way to access objects.
+ pub objects: crate::OdbHandle,
+
+ pub(crate) work_tree: Option<PathBuf>,
+ /// The path to the resolved common directory if this is a linked worktree repository or it is otherwise set.
+ pub(crate) common_dir: Option<PathBuf>,
+ /// A free-list of re-usable object backing buffers
+ pub(crate) bufs: RefCell<Vec<Vec<u8>>>,
+ /// A pre-assembled selection of often-accessed configuration values for quick access.
+ pub(crate) config: crate::config::Cache,
+ /// the options obtained when instantiating this repository.
+ ///
+ /// Particularly useful when following linked worktrees and instantiating new equally configured worktree repositories.
+ pub(crate) options: crate::open::Options,
+ pub(crate) index: crate::worktree::IndexStorage,
+}
+
+/// An instance with access to everything a git repository entails, best imagined as container implementing `Sync + Send` for _most_
+/// for system resources required to interact with a `git` repository which are loaded in once the instance is created.
+///
+/// Use this type to reference it in a threaded context for creation the creation of a thread-local [`Repositories`][Repository].
+///
+/// Note that this type purposefully isn't very useful until it is converted into a thread-local repository with `to_thread_local()`,
+/// it's merely meant to be able to exist in a `Sync` context.
+pub struct ThreadSafeRepository {
+ /// A store for references to point at objects
+ pub refs: crate::RefStore,
+ /// A store for objects that contain data
+ pub objects: gix_features::threading::OwnShared<gix_odb::Store>,
+ /// The path to the worktree at which to find checked out files
+ pub work_tree: Option<PathBuf>,
+ /// The path to the common directory if this is a linked worktree repository or it is otherwise set.
+ pub common_dir: Option<PathBuf>,
+ pub(crate) config: crate::config::Cache,
+ /// options obtained when instantiating this repository for use when following linked worktrees.
+ pub(crate) linked_worktree_options: crate::open::Options,
+ /// The index of this instances worktree.
+ pub(crate) index: crate::worktree::IndexStorage,
+}
+
+/// A remote which represents a way to interact with hosts for remote clones of the parent repository.
+#[derive(Debug, Clone, PartialEq)]
+pub struct Remote<'repo> {
+ /// The remotes symbolic name, only present if persisted in git configuration files.
+ pub(crate) name: Option<remote::Name<'static>>,
+ /// The url of the host to talk to, after application of replacements. If it is unset, the `push_url` must be set.
+ /// and fetches aren't possible.
+ pub(crate) url: Option<gix_url::Url>,
+ /// The rewritten `url`, if it was rewritten.
+ pub(crate) url_alias: Option<gix_url::Url>,
+ /// The url to use for pushing specifically.
+ pub(crate) push_url: Option<gix_url::Url>,
+ /// The rewritten `push_url`, if it was rewritten.
+ pub(crate) push_url_alias: Option<gix_url::Url>,
+ /// Refspecs for use when fetching.
+ pub(crate) fetch_specs: Vec<gix_refspec::RefSpec>,
+ /// Refspecs for use when pushing.
+ pub(crate) push_specs: Vec<gix_refspec::RefSpec>,
+ /// Tell us what to do with tags when fetched.
+ pub(crate) fetch_tags: remote::fetch::Tags,
+ // /// Delete local tracking branches that don't exist on the remote anymore.
+ // pub(crate) prune: bool,
+ // /// Delete tags that don't exist on the remote anymore, equivalent to pruning the refspec `refs/tags/*:refs/tags/*`.
+ // pub(crate) prune_tags: bool,
+ pub(crate) repo: &'repo Repository,
+}
diff --git a/vendor/gix/src/worktree/mod.rs b/vendor/gix/src/worktree/mod.rs
new file mode 100644
index 000000000..19a44a900
--- /dev/null
+++ b/vendor/gix/src/worktree/mod.rs
@@ -0,0 +1,160 @@
+use std::path::PathBuf;
+
+pub use gix_worktree::*;
+
+use crate::{
+ bstr::{BStr, BString},
+ Repository,
+};
+
+pub(crate) type IndexStorage = gix_features::threading::OwnShared<gix_features::fs::MutableSnapshot<gix_index::File>>;
+/// A lazily loaded and auto-updated worktree index.
+pub type Index = gix_features::fs::SharedSnapshot<gix_index::File>;
+
+/// A stand-in to a worktree as result of a worktree iteration.
+///
+/// It provides access to typical worktree state, but may not actually point to a valid checkout as the latter has been moved or
+/// deleted.
+#[derive(Debug, Clone)]
+pub struct Proxy<'repo> {
+ pub(crate) parent: &'repo Repository,
+ pub(crate) git_dir: PathBuf,
+}
+
+/// Access
+impl<'repo> crate::Worktree<'repo> {
+ /// Read the location of the checkout, the base of the work tree
+ pub fn base(&self) -> &'repo std::path::Path {
+ self.path
+ }
+
+ /// Return true if this worktree is the main worktree associated with a non-bare git repository.
+ ///
+ /// It cannot be removed.
+ pub fn is_main(&self) -> bool {
+ self.id().is_none()
+ }
+
+ /// Return true if this worktree cannot be pruned, moved or deleted, which is useful if it is located on an external storage device.
+ ///
+ /// Always false for the main worktree.
+ pub fn is_locked(&self) -> bool {
+ Proxy::new(self.parent, self.parent.git_dir()).is_locked()
+ }
+
+ /// Provide a reason for the locking of this worktree, if it is locked at all.
+ ///
+ /// Note that we squelch errors in case the file cannot be read in which case the
+ /// reason is an empty string.
+ pub fn lock_reason(&self) -> Option<BString> {
+ Proxy::new(self.parent, self.parent.git_dir()).lock_reason()
+ }
+
+ /// Return the ID of the repository worktree, if it is a linked worktree, or `None` if it's a linked worktree.
+ pub fn id(&self) -> Option<&BStr> {
+ id(self.parent.git_dir(), self.parent.common_dir.is_some())
+ }
+}
+
+pub(crate) fn id(git_dir: &std::path::Path, has_common_dir: bool) -> Option<&BStr> {
+ if !has_common_dir {
+ return None;
+ }
+ let candidate = gix_path::os_str_into_bstr(git_dir.file_name().expect("at least one directory level"))
+ .expect("no illformed UTF-8");
+ let maybe_worktrees = git_dir.parent()?;
+ (maybe_worktrees.file_name()?.to_str()? == "worktrees").then_some(candidate)
+}
+
+///
+pub mod proxy;
+
+///
+pub mod open_index {
+ use crate::bstr::BString;
+
+ /// The error returned by [`Worktree::open_index()`][crate::Worktree::open_index()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not interpret value '{}' as 'index.threads'", .value)]
+ ConfigIndexThreads {
+ value: BString,
+ #[source]
+ err: gix_config::value::Error,
+ },
+ #[error(transparent)]
+ IndexFile(#[from] gix_index::file::init::Error),
+ }
+
+ impl<'repo> crate::Worktree<'repo> {
+ /// A shortcut to [`crate::Repository::open_index()`].
+ pub fn open_index(&self) -> Result<gix_index::File, Error> {
+ self.parent.open_index()
+ }
+
+ /// A shortcut to [`crate::Repository::index()`].
+ pub fn index(&self) -> Result<crate::worktree::Index, Error> {
+ self.parent.index()
+ }
+ }
+}
+
+///
+pub mod excludes {
+ use std::path::PathBuf;
+
+ /// The error returned by [`Worktree::excludes()`][crate::Worktree::excludes()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not read repository exclude.")]
+ Io(#[from] std::io::Error),
+ #[error(transparent)]
+ EnvironmentPermission(#[from] gix_sec::permission::Error<PathBuf>),
+ #[error("The value for `core.excludesFile` could not be read from configuration")]
+ ExcludesFilePathInterpolation(#[from] gix_config::path::interpolate::Error),
+ }
+
+ impl<'repo> crate::Worktree<'repo> {
+ /// Configure a file-system cache checking if files below the repository are excluded.
+ ///
+ /// This takes into consideration all the usual repository configuration.
+ // TODO: test, provide higher-level interface that is much easier to use and doesn't panic.
+ pub fn excludes(
+ &self,
+ index: &gix_index::State,
+ overrides: Option<gix_attributes::MatchGroup<gix_attributes::Ignore>>,
+ ) -> Result<gix_worktree::fs::Cache, Error> {
+ let repo = self.parent;
+ let case = repo
+ .config
+ .ignore_case
+ .then_some(gix_glob::pattern::Case::Fold)
+ .unwrap_or_default();
+ let mut buf = Vec::with_capacity(512);
+ let excludes_file = match repo.config.excludes_file().transpose()? {
+ Some(user_path) => Some(user_path),
+ None => repo.config.xdg_config_path("ignore")?,
+ };
+ let state = gix_worktree::fs::cache::State::IgnoreStack(gix_worktree::fs::cache::state::Ignore::new(
+ overrides.unwrap_or_default(),
+ gix_attributes::MatchGroup::<gix_attributes::Ignore>::from_git_dir(
+ repo.git_dir(),
+ excludes_file,
+ &mut buf,
+ )?,
+ None,
+ case,
+ ));
+ let attribute_list = state.build_attribute_list(index, index.path_backing(), case);
+ Ok(gix_worktree::fs::Cache::new(
+ self.path,
+ state,
+ case,
+ buf,
+ attribute_list,
+ ))
+ }
+ }
+}
diff --git a/vendor/gix/src/worktree/proxy.rs b/vendor/gix/src/worktree/proxy.rs
new file mode 100644
index 000000000..8a77db815
--- /dev/null
+++ b/vendor/gix/src/worktree/proxy.rs
@@ -0,0 +1,101 @@
+#![allow(clippy::result_large_err)]
+use std::path::{Path, PathBuf};
+
+use crate::{
+ bstr::{BStr, BString, ByteSlice},
+ worktree::Proxy,
+ Repository, ThreadSafeRepository,
+};
+
+#[allow(missing_docs)]
+pub mod into_repo {
+ use std::path::PathBuf;
+
+ /// The error returned by [`Proxy::into_repo()`][super::Proxy::into_repo()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Open(#[from] crate::open::Error),
+ #[error("Worktree at '{}' is inaccessible", .base.display())]
+ MissingWorktree { base: PathBuf },
+ #[error(transparent)]
+ MissingGitDirFile(#[from] std::io::Error),
+ }
+}
+
+impl<'repo> Proxy<'repo> {
+ pub(crate) fn new(parent: &'repo Repository, git_dir: impl Into<PathBuf>) -> Self {
+ Proxy {
+ parent,
+ git_dir: git_dir.into(),
+ }
+ }
+}
+
+impl<'repo> Proxy<'repo> {
+ /// Read the location of the checkout, the base of the work tree.
+ /// Note that the location might not exist.
+ pub fn base(&self) -> std::io::Result<PathBuf> {
+ let git_dir = self.git_dir.join("gitdir");
+ let base_dot_git = gix_discover::path::from_plain_file(&git_dir).ok_or_else(|| {
+ std::io::Error::new(
+ std::io::ErrorKind::NotFound,
+ format!("Required file '{}' does not exist", git_dir.display()),
+ )
+ })??;
+
+ Ok(gix_discover::path::without_dot_git_dir(base_dot_git))
+ }
+
+ /// The git directory for the work tree, typically contained within the parent git dir.
+ pub fn git_dir(&self) -> &Path {
+ &self.git_dir
+ }
+
+ /// The name of the worktree, which is derived from its folder within the `worktrees` directory within the parent `.git` folder.
+ pub fn id(&self) -> &BStr {
+ gix_path::os_str_into_bstr(self.git_dir.file_name().expect("worktrees/ parent dir"))
+ .expect("no illformed UTF-8")
+ }
+
+ /// Return true if the worktree cannot be pruned, moved or deleted, which is useful if it is located on an external storage device.
+ pub fn is_locked(&self) -> bool {
+ self.git_dir.join("locked").is_file()
+ }
+
+ /// Provide a reason for the locking of this worktree, if it is locked at all.
+ ///
+ /// Note that we squelch errors in case the file cannot be read in which case the
+ /// reason is an empty string.
+ pub fn lock_reason(&self) -> Option<BString> {
+ std::fs::read(self.git_dir.join("locked"))
+ .ok()
+ .map(|contents| contents.trim().into())
+ }
+
+ /// Transform this proxy into a [`Repository`] while ignoring issues reading `base()` and ignoring that it might not exist.
+ ///
+ /// Most importantly, the `Repository` might be initialized with a non-existing work tree directory as the checkout
+ /// was removed or moved in the mean time or is unavailable for other reasons.
+ /// The caller will encounter io errors if it's used like the work tree is guaranteed to be present, but can still access
+ /// a lot of information if work tree access is avoided.
+ pub fn into_repo_with_possibly_inaccessible_worktree(self) -> Result<Repository, crate::open::Error> {
+ let base = self.base().ok();
+ let repo = ThreadSafeRepository::open_from_paths(self.git_dir, base, self.parent.options.clone())?;
+ Ok(repo.into())
+ }
+
+ /// Like `into_repo_with_possibly_inaccessible_worktree()` but will fail if the `base()` cannot be read or
+ /// if the worktree doesn't exist.
+ ///
+ /// Note that it won't fail if the worktree doesn't exist.
+ pub fn into_repo(self) -> Result<Repository, into_repo::Error> {
+ let base = self.base()?;
+ if !base.is_dir() {
+ return Err(into_repo::Error::MissingWorktree { base });
+ }
+ let repo = ThreadSafeRepository::open_from_paths(self.git_dir, base.into(), self.parent.options.clone())?;
+ Ok(repo.into())
+ }
+}