summaryrefslogtreecommitdiffstats
path: root/vendor/gix/src
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/gix/src')
-rw-r--r--vendor/gix/src/assets/baseline-init/HEAD1
-rw-r--r--vendor/gix/src/assets/baseline-init/description1
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/applypatch-msg.sample15
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/commit-msg.sample24
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/fsmonitor-watchman.sample173
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/post-update.sample8
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-applypatch.sample14
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-commit.sample49
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-merge-commit.sample13
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-push.sample53
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-rebase.sample169
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-receive.sample24
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/prepare-commit-msg.sample42
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/update.sample128
-rw-r--r--vendor/gix/src/assets/baseline-init/info/exclude6
-rw-r--r--vendor/gix/src/clone/checkout.rs161
-rw-r--r--vendor/gix/src/clone/fetch/mod.rs212
-rw-r--r--vendor/gix/src/clone/fetch/util.rs229
-rw-r--r--vendor/gix/src/clone/mod.rs118
-rw-r--r--vendor/gix/src/commit.rs238
-rw-r--r--vendor/gix/src/config/cache/access.rs233
-rw-r--r--vendor/gix/src/config/cache/incubate.rs111
-rw-r--r--vendor/gix/src/config/cache/init.rs485
-rw-r--r--vendor/gix/src/config/cache/mod.rs18
-rw-r--r--vendor/gix/src/config/cache/util.rs143
-rw-r--r--vendor/gix/src/config/mod.rs454
-rw-r--r--vendor/gix/src/config/overrides.rs49
-rw-r--r--vendor/gix/src/config/snapshot/_impls.rs76
-rw-r--r--vendor/gix/src/config/snapshot/access.rs143
-rw-r--r--vendor/gix/src/config/snapshot/credential_helpers.rs183
-rw-r--r--vendor/gix/src/config/snapshot/mod.rs5
-rw-r--r--vendor/gix/src/config/tree/keys.rs629
-rw-r--r--vendor/gix/src/config/tree/mod.rs123
-rw-r--r--vendor/gix/src/config/tree/sections/author.rs23
-rw-r--r--vendor/gix/src/config/tree/sections/branch.rs65
-rw-r--r--vendor/gix/src/config/tree/sections/checkout.rs58
-rw-r--r--vendor/gix/src/config/tree/sections/clone.rs20
-rw-r--r--vendor/gix/src/config/tree/sections/committer.rs23
-rw-r--r--vendor/gix/src/config/tree/sections/core.rs302
-rw-r--r--vendor/gix/src/config/tree/sections/credential.rs56
-rw-r--r--vendor/gix/src/config/tree/sections/diff.rs133
-rw-r--r--vendor/gix/src/config/tree/sections/extensions.rs59
-rw-r--r--vendor/gix/src/config/tree/sections/gitoxide.rs363
-rw-r--r--vendor/gix/src/config/tree/sections/http.rs317
-rw-r--r--vendor/gix/src/config/tree/sections/init.rs20
-rw-r--r--vendor/gix/src/config/tree/sections/mod.rs96
-rw-r--r--vendor/gix/src/config/tree/sections/pack.rs64
-rw-r--r--vendor/gix/src/config/tree/sections/protocol.rs85
-rw-r--r--vendor/gix/src/config/tree/sections/remote.rs101
-rw-r--r--vendor/gix/src/config/tree/sections/safe.rs27
-rw-r--r--vendor/gix/src/config/tree/sections/ssh.rs65
-rw-r--r--vendor/gix/src/config/tree/sections/url.rs25
-rw-r--r--vendor/gix/src/config/tree/sections/user.rs22
-rw-r--r--vendor/gix/src/config/tree/traits.rs199
-rw-r--r--vendor/gix/src/create.rs251
-rw-r--r--vendor/gix/src/discover.rs88
-rw-r--r--vendor/gix/src/env.rs129
-rw-r--r--vendor/gix/src/ext/mod.rs9
-rw-r--r--vendor/gix/src/ext/object_id.rs34
-rw-r--r--vendor/gix/src/ext/reference.rs15
-rw-r--r--vendor/gix/src/ext/rev_spec.rs20
-rw-r--r--vendor/gix/src/ext/tree.rs44
-rw-r--r--vendor/gix/src/head/log.rs35
-rw-r--r--vendor/gix/src/head/mod.rs122
-rw-r--r--vendor/gix/src/head/peel.rs119
-rw-r--r--vendor/gix/src/id.rs195
-rw-r--r--vendor/gix/src/init.rs101
-rw-r--r--vendor/gix/src/interrupt.rs223
-rw-r--r--vendor/gix/src/kind.rs23
-rw-r--r--vendor/gix/src/lib.rs314
-rw-r--r--vendor/gix/src/mailmap.rs18
-rw-r--r--vendor/gix/src/object/blob.rs148
-rw-r--r--vendor/gix/src/object/commit.rs156
-rw-r--r--vendor/gix/src/object/errors.rs34
-rw-r--r--vendor/gix/src/object/impls.rs123
-rw-r--r--vendor/gix/src/object/mod.rs221
-rw-r--r--vendor/gix/src/object/peel.rs93
-rw-r--r--vendor/gix/src/object/tag.rs15
-rw-r--r--vendor/gix/src/object/tree/diff/change.rs111
-rw-r--r--vendor/gix/src/object/tree/diff/for_each.rs235
-rw-r--r--vendor/gix/src/object/tree/diff/mod.rs118
-rw-r--r--vendor/gix/src/object/tree/diff/rewrites.rs108
-rw-r--r--vendor/gix/src/object/tree/diff/tracked.rs491
-rw-r--r--vendor/gix/src/object/tree/iter.rs53
-rw-r--r--vendor/gix/src/object/tree/mod.rs158
-rw-r--r--vendor/gix/src/object/tree/traverse.rs62
-rw-r--r--vendor/gix/src/open/mod.rs67
-rw-r--r--vendor/gix/src/open/options.rs180
-rw-r--r--vendor/gix/src/open/repository.rs345
-rw-r--r--vendor/gix/src/path.rs11
-rw-r--r--vendor/gix/src/reference/edits.rs75
-rw-r--r--vendor/gix/src/reference/errors.rs89
-rw-r--r--vendor/gix/src/reference/iter.rs127
-rw-r--r--vendor/gix/src/reference/log.rs36
-rw-r--r--vendor/gix/src/reference/mod.rs87
-rw-r--r--vendor/gix/src/reference/remote.rs49
-rw-r--r--vendor/gix/src/remote/access.rs105
-rw-r--r--vendor/gix/src/remote/build.rs84
-rw-r--r--vendor/gix/src/remote/connect.rs166
-rw-r--r--vendor/gix/src/remote/connection/access.rs67
-rw-r--r--vendor/gix/src/remote/connection/fetch/config.rs26
-rw-r--r--vendor/gix/src/remote/connection/fetch/error.rs41
-rw-r--r--vendor/gix/src/remote/connection/fetch/mod.rs240
-rw-r--r--vendor/gix/src/remote/connection/fetch/negotiate.rs78
-rw-r--r--vendor/gix/src/remote/connection/fetch/receive_pack.rs238
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/mod.rs274
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/tests.rs607
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/update.rs128
-rw-r--r--vendor/gix/src/remote/connection/mod.rs29
-rw-r--r--vendor/gix/src/remote/connection/ref_map.rs268
-rw-r--r--vendor/gix/src/remote/errors.rs45
-rw-r--r--vendor/gix/src/remote/fetch.rs166
-rw-r--r--vendor/gix/src/remote/init.rs116
-rw-r--r--vendor/gix/src/remote/mod.rs62
-rw-r--r--vendor/gix/src/remote/name.rs84
-rw-r--r--vendor/gix/src/remote/save.rs125
-rw-r--r--vendor/gix/src/remote/url/mod.rs7
-rw-r--r--vendor/gix/src/remote/url/rewrite.rs100
-rw-r--r--vendor/gix/src/remote/url/scheme_permission.rs120
-rw-r--r--vendor/gix/src/repository/cache.rs30
-rw-r--r--vendor/gix/src/repository/config/mod.rs191
-rw-r--r--vendor/gix/src/repository/config/transport.rs425
-rw-r--r--vendor/gix/src/repository/identity.rs175
-rw-r--r--vendor/gix/src/repository/impls.rs73
-rw-r--r--vendor/gix/src/repository/init.rs55
-rw-r--r--vendor/gix/src/repository/location.rs86
-rw-r--r--vendor/gix/src/repository/mod.rs36
-rw-r--r--vendor/gix/src/repository/object.rs214
-rw-r--r--vendor/gix/src/repository/permissions.rs168
-rw-r--r--vendor/gix/src/repository/reference.rs243
-rw-r--r--vendor/gix/src/repository/remote.rs199
-rw-r--r--vendor/gix/src/repository/revision.rs42
-rw-r--r--vendor/gix/src/repository/snapshots.rs109
-rw-r--r--vendor/gix/src/repository/state.rs44
-rw-r--r--vendor/gix/src/repository/thread_safe.rs66
-rw-r--r--vendor/gix/src/repository/worktree.rs119
-rw-r--r--vendor/gix/src/revision/mod.rs27
-rw-r--r--vendor/gix/src/revision/spec/mod.rs90
-rw-r--r--vendor/gix/src/revision/spec/parse/delegate/mod.rs256
-rw-r--r--vendor/gix/src/revision/spec/parse/delegate/navigate.rs340
-rw-r--r--vendor/gix/src/revision/spec/parse/delegate/revision.rs225
-rw-r--r--vendor/gix/src/revision/spec/parse/error.rs130
-rw-r--r--vendor/gix/src/revision/spec/parse/mod.rs61
-rw-r--r--vendor/gix/src/revision/spec/parse/types.rs182
-rw-r--r--vendor/gix/src/revision/walk.rs127
-rw-r--r--vendor/gix/src/tag.rs16
-rw-r--r--vendor/gix/src/types.rs205
-rw-r--r--vendor/gix/src/worktree/mod.rs160
-rw-r--r--vendor/gix/src/worktree/proxy.rs101
149 files changed, 18756 insertions, 0 deletions
diff --git a/vendor/gix/src/assets/baseline-init/HEAD b/vendor/gix/src/assets/baseline-init/HEAD
new file mode 100644
index 000000000..b870d8262
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/HEAD
@@ -0,0 +1 @@
+ref: refs/heads/main
diff --git a/vendor/gix/src/assets/baseline-init/description b/vendor/gix/src/assets/baseline-init/description
new file mode 100644
index 000000000..498b267a8
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/description
@@ -0,0 +1 @@
+Unnamed repository; edit this file 'description' to name the repository.
diff --git a/vendor/gix/src/assets/baseline-init/hooks/applypatch-msg.sample b/vendor/gix/src/assets/baseline-init/hooks/applypatch-msg.sample
new file mode 100755
index 000000000..20fbb51a2
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/applypatch-msg.sample
@@ -0,0 +1,15 @@
+#!/bin/sh
+#
+# An example hook script to check the commit log message taken by
+# applypatch from an e-mail message.
+#
+# The hook should exit with non-zero status after issuing an
+# appropriate message if it wants to stop the commit. The hook is
+# allowed to edit the commit message file.
+#
+# To enable this hook, rename this file to "applypatch-msg".
+
+. git-sh-setup
+commitmsg="$(git rev-parse --gix-path hooks/commit-msg)"
+test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
+:
diff --git a/vendor/gix/src/assets/baseline-init/hooks/commit-msg.sample b/vendor/gix/src/assets/baseline-init/hooks/commit-msg.sample
new file mode 100755
index 000000000..b58d1184a
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/commit-msg.sample
@@ -0,0 +1,24 @@
+#!/bin/sh
+#
+# An example hook script to check the commit log message.
+# Called by "git commit" with one argument, the name of the file
+# that has the commit message. The hook should exit with non-zero
+# status after issuing an appropriate message if it wants to stop the
+# commit. The hook is allowed to edit the commit message file.
+#
+# To enable this hook, rename this file to "commit-msg".
+
+# Uncomment the below to add a Signed-off-by line to the message.
+# Doing this in a hook is a bad idea in general, but the prepare-commit-msg
+# hook is more suited to it.
+#
+# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
+# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1"
+
+# This example catches duplicate Signed-off-by lines.
+
+test "" = "$(grep '^Signed-off-by: ' "$1" |
+ sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || {
+ echo >&2 Duplicate Signed-off-by lines.
+ exit 1
+}
diff --git a/vendor/gix/src/assets/baseline-init/hooks/fsmonitor-watchman.sample b/vendor/gix/src/assets/baseline-init/hooks/fsmonitor-watchman.sample
new file mode 100755
index 000000000..14ed0aa42
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/fsmonitor-watchman.sample
@@ -0,0 +1,173 @@
+#!/usr/bin/perl
+
+use strict;
+use warnings;
+use IPC::Open2;
+
+# An example hook script to integrate Watchman
+# (https://facebook.github.io/watchman/) with git to speed up detecting
+# new and modified files.
+#
+# The hook is passed a version (currently 2) and last update token
+# formatted as a string and outputs to stdout a new update token and
+# all files that have been modified since the update token. Paths must
+# be relative to the root of the working tree and separated by a single NUL.
+#
+# To enable this hook, rename this file to "query-watchman" and set
+# 'git config core.fsmonitor .git/hooks/query-watchman'
+#
+my ($version, $last_update_token) = @ARGV;
+
+# Uncomment for debugging
+# print STDERR "$0 $version $last_update_token\n";
+
+# Check the hook interface version
+if ($version ne 2) {
+ die "Unsupported query-fsmonitor hook version '$version'.\n" .
+ "Falling back to scanning...\n";
+}
+
+my $git_work_tree = get_working_dir();
+
+my $retry = 1;
+
+my $json_pkg;
+eval {
+ require JSON::XS;
+ $json_pkg = "JSON::XS";
+ 1;
+} or do {
+ require JSON::PP;
+ $json_pkg = "JSON::PP";
+};
+
+launch_watchman();
+
+sub launch_watchman {
+ my $o = watchman_query();
+ if (is_work_tree_watched($o)) {
+ output_result($o->{clock}, @{$o->{files}});
+ }
+}
+
+sub output_result {
+ my ($clockid, @files) = @_;
+
+ # Uncomment for debugging watchman output
+ # open (my $fh, ">", ".git/watchman-output.out");
+ # binmode $fh, ":utf8";
+ # print $fh "$clockid\n@files\n";
+ # close $fh;
+
+ binmode STDOUT, ":utf8";
+ print $clockid;
+ print "\0";
+ local $, = "\0";
+ print @files;
+}
+
+sub watchman_clock {
+ my $response = qx/watchman clock "$git_work_tree"/;
+ die "Failed to get clock id on '$git_work_tree'.\n" .
+ "Falling back to scanning...\n" if $? != 0;
+
+ return $json_pkg->new->utf8->decode($response);
+}
+
+sub watchman_query {
+ my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty')
+ or die "open2() failed: $!\n" .
+ "Falling back to scanning...\n";
+
+ # In the query expression below we're asking for names of files that
+ # changed since $last_update_token but not from the .git folder.
+ #
+ # To accomplish this, we're using the "since" generator to use the
+ # recency index to select candidate nodes and "fields" to limit the
+ # output to file names only. Then we're using the "expression" term to
+ # further constrain the results.
+ if (substr($last_update_token, 0, 1) eq "c") {
+ $last_update_token = "\"$last_update_token\"";
+ }
+ my $query = <<" END";
+ ["query", "$git_work_tree", {
+ "since": $last_update_token,
+ "fields": ["name"],
+ "expression": ["not", ["dirname", ".git"]]
+ }]
+ END
+
+ # Uncomment for debugging the watchman query
+ # open (my $fh, ">", ".git/watchman-query.json");
+ # print $fh $query;
+ # close $fh;
+
+ print CHLD_IN $query;
+ close CHLD_IN;
+ my $response = do {local $/; <CHLD_OUT>};
+
+ # Uncomment for debugging the watch response
+ # open ($fh, ">", ".git/watchman-response.json");
+ # print $fh $response;
+ # close $fh;
+
+ die "Watchman: command returned no output.\n" .
+ "Falling back to scanning...\n" if $response eq "";
+ die "Watchman: command returned invalid output: $response\n" .
+ "Falling back to scanning...\n" unless $response =~ /^\{/;
+
+ return $json_pkg->new->utf8->decode($response);
+}
+
+sub is_work_tree_watched {
+ my ($output) = @_;
+ my $error = $output->{error};
+ if ($retry > 0 and $error and $error =~ m/unable to resolve root .* directory (.*) is not watched/) {
+ $retry--;
+ my $response = qx/watchman watch "$git_work_tree"/;
+ die "Failed to make watchman watch '$git_work_tree'.\n" .
+ "Falling back to scanning...\n" if $? != 0;
+ $output = $json_pkg->new->utf8->decode($response);
+ $error = $output->{error};
+ die "Watchman: $error.\n" .
+ "Falling back to scanning...\n" if $error;
+
+ # Uncomment for debugging watchman output
+ # open (my $fh, ">", ".git/watchman-output.out");
+ # close $fh;
+
+ # Watchman will always return all files on the first query so
+ # return the fast "everything is dirty" flag to git and do the
+ # Watchman query just to get it over with now so we won't pay
+ # the cost in git to look up each individual file.
+ my $o = watchman_clock();
+ $error = $output->{error};
+
+ die "Watchman: $error.\n" .
+ "Falling back to scanning...\n" if $error;
+
+ output_result($o->{clock}, ("/"));
+ $last_update_token = $o->{clock};
+
+ eval { launch_watchman() };
+ return 0;
+ }
+
+ die "Watchman: $error.\n" .
+ "Falling back to scanning...\n" if $error;
+
+ return 1;
+}
+
+sub get_working_dir {
+ my $working_dir;
+ if ($^O =~ 'msys' || $^O =~ 'cygwin') {
+ $working_dir = Win32::GetCwd();
+ $working_dir =~ tr/\\/\//;
+ } else {
+ require Cwd;
+ $working_dir = Cwd::cwd();
+ }
+
+ return $working_dir;
+}
diff --git a/vendor/gix/src/assets/baseline-init/hooks/post-update.sample b/vendor/gix/src/assets/baseline-init/hooks/post-update.sample
new file mode 100755
index 000000000..ec17ec193
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/post-update.sample
@@ -0,0 +1,8 @@
+#!/bin/sh
+#
+# An example hook script to prepare a packed repository for use over
+# dumb transports.
+#
+# To enable this hook, rename this file to "post-update".
+
+exec git update-server-info
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-applypatch.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-applypatch.sample
new file mode 100755
index 000000000..d61828510
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/pre-applypatch.sample
@@ -0,0 +1,14 @@
+#!/bin/sh
+#
+# An example hook script to verify what is about to be committed
+# by applypatch from an e-mail message.
+#
+# The hook should exit with non-zero status after issuing an
+# appropriate message if it wants to stop the commit.
+#
+# To enable this hook, rename this file to "pre-applypatch".
+
+. git-sh-setup
+precommit="$(git rev-parse --gix-path hooks/pre-commit)"
+test -x "$precommit" && exec "$precommit" ${1+"$@"}
+:
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-commit.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-commit.sample
new file mode 100755
index 000000000..e144712c8
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/pre-commit.sample
@@ -0,0 +1,49 @@
+#!/bin/sh
+#
+# An example hook script to verify what is about to be committed.
+# Called by "git commit" with no arguments. The hook should
+# exit with non-zero status after issuing an appropriate message if
+# it wants to stop the commit.
+#
+# To enable this hook, rename this file to "pre-commit".
+
+if git rev-parse --verify HEAD >/dev/null 2>&1
+then
+ against=HEAD
+else
+ # Initial commit: diff against an empty tree object
+ against=$(git hash-object -t tree /dev/null)
+fi
+
+# If you want to allow non-ASCII filenames set this variable to true.
+allownonascii=$(git config --type=bool hooks.allownonascii)
+
+# Redirect output to stderr.
+exec 1>&2
+
+# Cross platform projects tend to avoid non-ASCII filenames; prevent
+# them from being added to the repository. We exploit the fact that the
+# printable range starts at the space character and ends with tilde.
+if [ "$allownonascii" != "true" ] &&
+ # Note that the use of brackets around a tr range is ok here, (it's
+ # even required, for portability to Solaris 10's /usr/bin/tr), since
+ # the square bracket bytes happen to fall in the designated range.
+ test $(git diff --cached --name-only --diff-filter=A -z $against |
+ LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0
+then
+ cat <<\EOF
+Error: Attempt to add a non-ASCII file name.
+
+This can cause problems if you want to work with people on other platforms.
+
+To be portable it is advisable to rename the file.
+
+If you know what you are doing you can disable this check using:
+
+ git config hooks.allownonascii true
+EOF
+ exit 1
+fi
+
+# If there are whitespace errors, print the offending file names and fail.
+exec git diff-index --check --cached $against --
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-merge-commit.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-merge-commit.sample
new file mode 100755
index 000000000..399eab192
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/pre-merge-commit.sample
@@ -0,0 +1,13 @@
+#!/bin/sh
+#
+# An example hook script to verify what is about to be committed.
+# Called by "git merge" with no arguments. The hook should
+# exit with non-zero status after issuing an appropriate message to
+# stderr if it wants to stop the merge commit.
+#
+# To enable this hook, rename this file to "pre-merge-commit".
+
+. git-sh-setup
+test -x "$GIT_DIR/hooks/pre-commit" &&
+ exec "$GIT_DIR/hooks/pre-commit"
+:
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-push.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-push.sample
new file mode 100755
index 000000000..6187dbf43
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/pre-push.sample
@@ -0,0 +1,53 @@
+#!/bin/sh
+
+# An example hook script to verify what is about to be pushed. Called by "git
+# push" after it has checked the remote status, but before anything has been
+# pushed. If this script exits with a non-zero status nothing will be pushed.
+#
+# This hook is called with the following parameters:
+#
+# $1 -- Name of the remote to which the push is being done
+# $2 -- URL to which the push is being done
+#
+# If pushing without using a named remote those arguments will be equal.
+#
+# Information about the commits which are being pushed is supplied as lines to
+# the standard input in the form:
+#
+# <local ref> <local sha1> <remote ref> <remote sha1>
+#
+# This sample shows how to prevent push of commits where the log message starts
+# with "WIP" (work in progress).
+
+remote="$1"
+url="$2"
+
+z40=0000000000000000000000000000000000000000
+
+while read local_ref local_sha remote_ref remote_sha
+do
+ if [ "$local_sha" = $z40 ]
+ then
+ # Handle delete
+ :
+ else
+ if [ "$remote_sha" = $z40 ]
+ then
+ # New branch, examine all commits
+ range="$local_sha"
+ else
+ # Update to existing branch, examine new commits
+ range="$remote_sha..$local_sha"
+ fi
+
+ # Check for WIP commit
+ commit=`git rev-list -n 1 --grep '^WIP' "$range"`
+ if [ -n "$commit" ]
+ then
+ echo >&2 "Found WIP commit in $local_ref, not pushing"
+ exit 1
+ fi
+ fi
+done
+
+exit 0
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-rebase.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-rebase.sample
new file mode 100755
index 000000000..d6ac43f64
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/pre-rebase.sample
@@ -0,0 +1,169 @@
+#!/bin/sh
+#
+# Copyright (c) 2006, 2008 Junio C Hamano
+#
+# The "pre-rebase" hook is run just before "git rebase" starts doing
+# its job, and can prevent the command from running by exiting with
+# non-zero status.
+#
+# The hook is called with the following parameters:
+#
+# $1 -- the upstream the series was forked from.
+# $2 -- the branch being rebased (or empty when rebasing the current branch).
+#
+# This sample shows how to prevent topic branches that are already
+# merged to 'next' branch from getting rebased, because allowing it
+# would result in rebasing already published history.
+
+publish=next
+basebranch="$1"
+if test "$#" = 2
+then
+ topic="refs/heads/$2"
+else
+ topic=`git symbolic-ref HEAD` ||
+ exit 0 ;# we do not interrupt rebasing detached HEAD
+fi
+
+case "$topic" in
+refs/heads/??/*)
+ ;;
+*)
+ exit 0 ;# we do not interrupt others.
+ ;;
+esac
+
+# Now we are dealing with a topic branch being rebased
+# on top of main. Is it OK to rebase it?
+
+# Does the topic really exist?
+git show-ref -q "$topic" || {
+ echo >&2 "No such branch $topic"
+ exit 1
+}
+
+# Is topic fully merged to main?
+not_in_main=`git rev-list --pretty=oneline ^main "$topic"`
+if test -z "$not_in_main"
+then
+ echo >&2 "$topic is fully merged to main; better remove it."
+ exit 1 ;# we could allow it, but there is no point.
+fi
+
+# Is topic ever merged to next? If so you should not be rebasing it.
+only_next_1=`git rev-list ^main "^$topic" ${publish} | sort`
+only_next_2=`git rev-list ^main ${publish} | sort`
+if test "$only_next_1" = "$only_next_2"
+then
+ not_in_topic=`git rev-list "^$topic" main`
+ if test -z "$not_in_topic"
+ then
+ echo >&2 "$topic is already up to date with main"
+ exit 1 ;# we could allow it, but there is no point.
+ else
+ exit 0
+ fi
+else
+ not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"`
+ /usr/bin/perl -e '
+ my $topic = $ARGV[0];
+ my $msg = "* $topic has commits already merged to public branch:\n";
+ my (%not_in_next) = map {
+ /^([0-9a-f]+) /;
+ ($1 => 1);
+ } split(/\n/, $ARGV[1]);
+ for my $elem (map {
+ /^([0-9a-f]+) (.*)$/;
+ [$1 => $2];
+ } split(/\n/, $ARGV[2])) {
+ if (!exists $not_in_next{$elem->[0]}) {
+ if ($msg) {
+ print STDERR $msg;
+ undef $msg;
+ }
+ print STDERR " $elem->[1]\n";
+ }
+ }
+ ' "$topic" "$not_in_next" "$not_in_main"
+ exit 1
+fi
+
+<<\DOC_END
+
+This sample hook safeguards topic branches that have been
+published from being rewound.
+
+The workflow assumed here is:
+
+ * Once a topic branch forks from "main", "main" is never
+ merged into it again (either directly or indirectly).
+
+ * Once a topic branch is fully cooked and merged into "main",
+ it is deleted. If you need to build on top of it to correct
+ earlier mistakes, a new topic branch is created by forking at
+ the tip of the "main". This is not strictly necessary, but
+ it makes it easier to keep your history simple.
+
+ * Whenever you need to test or publish your changes to topic
+ branches, merge them into "next" branch.
+
+The script, being an example, hardcodes the publish branch name
+to be "next", but it is trivial to make it configurable via
+$GIT_DIR/config mechanism.
+
+With this workflow, you would want to know:
+
+(1) ... if a topic branch has ever been merged to "next". Young
+ topic branches can have stupid mistakes you would rather
+ clean up before publishing, and things that have not been
+ merged into other branches can be easily rebased without
+ affecting other people. But once it is published, you would
+ not want to rewind it.
+
+(2) ... if a topic branch has been fully merged to "main".
+ Then you can delete it. More importantly, you should not
+ build on top of it -- other people may already want to
+ change things related to the topic as patches against your
+ "main", so if you need further changes, it is better to
+ fork the topic (perhaps with the same name) afresh from the
+ tip of "main".
+
+Let's look at this example:
+
+ o---o---o---o---o---o---o---o---o---o "next"
+ / / / /
+ / a---a---b A / /
+ / / / /
+ / / c---c---c---c B /
+ / / / \ /
+ / / / b---b C \ /
+ / / / / \ /
+ ---o---o---o---o---o---o---o---o---o---o---o "main"
+
+
+A, B and C are topic branches.
+
+ * A has one fix since it was merged up to "next".
+
+ * B has finished. It has been fully merged up to "main" and "next",
+ and is ready to be deleted.
+
+ * C has not merged to "next" at all.
+
+We would want to allow C to be rebased, refuse A, and encourage
+B to be deleted.
+
+To compute (1):
+
+ git rev-list ^main ^topic next
+ git rev-list ^main next
+
+ if these match, topic has not merged in next at all.
+
+To compute (2):
+
+ git rev-list main..topic
+
+ if this is empty, it is fully merged to "main".
+
+DOC_END
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-receive.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-receive.sample
new file mode 100755
index 000000000..a1fd29ec1
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/pre-receive.sample
@@ -0,0 +1,24 @@
+#!/bin/sh
+#
+# An example hook script to make use of push options.
+# The example simply echoes all push options that start with 'echoback='
+# and rejects all pushes when the "reject" push option is used.
+#
+# To enable this hook, rename this file to "pre-receive".
+
+if test -n "$GIT_PUSH_OPTION_COUNT"
+then
+ i=0
+ while test "$i" -lt "$GIT_PUSH_OPTION_COUNT"
+ do
+ eval "value=\$GIT_PUSH_OPTION_$i"
+ case "$value" in
+ echoback=*)
+ echo "echo from the pre-receive-hook: ${value#*=}" >&2
+ ;;
+ reject)
+ exit 1
+ esac
+ i=$((i + 1))
+ done
+fi
diff --git a/vendor/gix/src/assets/baseline-init/hooks/prepare-commit-msg.sample b/vendor/gix/src/assets/baseline-init/hooks/prepare-commit-msg.sample
new file mode 100755
index 000000000..10fa14c5a
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/prepare-commit-msg.sample
@@ -0,0 +1,42 @@
+#!/bin/sh
+#
+# An example hook script to prepare the commit log message.
+# Called by "git commit" with the name of the file that has the
+# commit message, followed by the description of the commit
+# message's source. The hook's purpose is to edit the commit
+# message file. If the hook fails with a non-zero status,
+# the commit is aborted.
+#
+# To enable this hook, rename this file to "prepare-commit-msg".
+
+# This hook includes three examples. The first one removes the
+# "# Please enter the commit message..." help message.
+#
+# The second includes the output of "git diff --name-status -r"
+# into the message, just before the "git status" output. It is
+# commented because it doesn't cope with --amend or with squashed
+# commits.
+#
+# The third example adds a Signed-off-by line to the message, that can
+# still be edited. This is rarely a good idea.
+
+COMMIT_MSG_FILE=$1
+COMMIT_SOURCE=$2
+SHA1=$3
+
+/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE"
+
+# case "$COMMIT_SOURCE,$SHA1" in
+# ,|template,)
+# /usr/bin/perl -i.bak -pe '
+# print "\n" . `git diff --cached --name-status -r`
+# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;;
+# *) ;;
+# esac
+
+# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
+# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE"
+# if test -z "$COMMIT_SOURCE"
+# then
+# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE"
+# fi
diff --git a/vendor/gix/src/assets/baseline-init/hooks/update.sample b/vendor/gix/src/assets/baseline-init/hooks/update.sample
new file mode 100755
index 000000000..5014c4b31
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/hooks/update.sample
@@ -0,0 +1,128 @@
+#!/bin/sh
+#
+# An example hook script to block unannotated tags from entering.
+# Called by "git receive-pack" with arguments: refname sha1-old sha1-new
+#
+# To enable this hook, rename this file to "update".
+#
+# Config
+# ------
+# hooks.allowunannotated
+# This boolean sets whether unannotated tags will be allowed into the
+# repository. By default they won't be.
+# hooks.allowdeletetag
+# This boolean sets whether deleting tags will be allowed in the
+# repository. By default they won't be.
+# hooks.allowmodifytag
+# This boolean sets whether a tag may be modified after creation. By default
+# it won't be.
+# hooks.allowdeletebranch
+# This boolean sets whether deleting branches will be allowed in the
+# repository. By default they won't be.
+# hooks.denycreatebranch
+# This boolean sets whether remotely creating branches will be denied
+# in the repository. By default this is allowed.
+#
+
+# --- Command line
+refname="$1"
+oldrev="$2"
+newrev="$3"
+
+# --- Safety check
+if [ -z "$GIT_DIR" ]; then
+ echo "Don't run this script from the command line." >&2
+ echo " (if you want, you could supply GIT_DIR then run" >&2
+ echo " $0 <ref> <oldrev> <newrev>)" >&2
+ exit 1
+fi
+
+if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then
+ echo "usage: $0 <ref> <oldrev> <newrev>" >&2
+ exit 1
+fi
+
+# --- Config
+allowunannotated=$(git config --type=bool hooks.allowunannotated)
+allowdeletebranch=$(git config --type=bool hooks.allowdeletebranch)
+denycreatebranch=$(git config --type=bool hooks.denycreatebranch)
+allowdeletetag=$(git config --type=bool hooks.allowdeletetag)
+allowmodifytag=$(git config --type=bool hooks.allowmodifytag)
+
+# check for no description
+projectdesc=$(sed -e '1q' "$GIT_DIR/description")
+case "$projectdesc" in
+"Unnamed repository"* | "")
+ echo "*** Project description file hasn't been set" >&2
+ exit 1
+ ;;
+esac
+
+# --- Check types
+# if $newrev is 0000...0000, it's a commit to delete a ref.
+zero="0000000000000000000000000000000000000000"
+if [ "$newrev" = "$zero" ]; then
+ newrev_type=delete
+else
+ newrev_type=$(git cat-file -t $newrev)
+fi
+
+case "$refname","$newrev_type" in
+ refs/tags/*,commit)
+ # un-annotated tag
+ short_refname=${refname##refs/tags/}
+ if [ "$allowunannotated" != "true" ]; then
+ echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2
+ echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2
+ exit 1
+ fi
+ ;;
+ refs/tags/*,delete)
+ # delete tag
+ if [ "$allowdeletetag" != "true" ]; then
+ echo "*** Deleting a tag is not allowed in this repository" >&2
+ exit 1
+ fi
+ ;;
+ refs/tags/*,tag)
+ # annotated tag
+ if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1
+ then
+ echo "*** Tag '$refname' already exists." >&2
+ echo "*** Modifying a tag is not allowed in this repository." >&2
+ exit 1
+ fi
+ ;;
+ refs/heads/*,commit)
+ # branch
+ if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then
+ echo "*** Creating a branch is not allowed in this repository" >&2
+ exit 1
+ fi
+ ;;
+ refs/heads/*,delete)
+ # delete branch
+ if [ "$allowdeletebranch" != "true" ]; then
+ echo "*** Deleting a branch is not allowed in this repository" >&2
+ exit 1
+ fi
+ ;;
+ refs/remotes/*,commit)
+ # tracking branch
+ ;;
+ refs/remotes/*,delete)
+ # delete tracking branch
+ if [ "$allowdeletebranch" != "true" ]; then
+ echo "*** Deleting a tracking branch is not allowed in this repository" >&2
+ exit 1
+ fi
+ ;;
+ *)
+ # Anything else (is there anything else?)
+ echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2
+ exit 1
+ ;;
+esac
+
+# --- Finished
+exit 0
diff --git a/vendor/gix/src/assets/baseline-init/info/exclude b/vendor/gix/src/assets/baseline-init/info/exclude
new file mode 100644
index 000000000..a5196d1be
--- /dev/null
+++ b/vendor/gix/src/assets/baseline-init/info/exclude
@@ -0,0 +1,6 @@
+# git ls-files --others --exclude-from=.git/info/exclude
+# Lines that start with '#' are comments.
+# For a project mostly in C, the following would be a good set of
+# exclude patterns (uncomment them if you want to use them):
+# *.[oa]
+# *~
diff --git a/vendor/gix/src/clone/checkout.rs b/vendor/gix/src/clone/checkout.rs
new file mode 100644
index 000000000..50d235f13
--- /dev/null
+++ b/vendor/gix/src/clone/checkout.rs
@@ -0,0 +1,161 @@
+use crate::{clone::PrepareCheckout, Repository};
+
+///
+pub mod main_worktree {
+ use std::{path::PathBuf, sync::atomic::AtomicBool};
+
+ use gix_odb::FindExt;
+
+ use crate::{clone::PrepareCheckout, Progress, Repository};
+
+ /// The error returned by [`PrepareCheckout::main_worktree()`].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Repository at \"{}\" is a bare repository and cannot have a main worktree checkout", git_dir.display())]
+ BareRepository { git_dir: PathBuf },
+ #[error("The object pointed to by HEAD is not a treeish")]
+ NoHeadTree(#[from] crate::object::peel::to_kind::Error),
+ #[error("Could not create index from tree at {id}")]
+ IndexFromTree {
+ id: gix_hash::ObjectId,
+ source: gix_traverse::tree::breadthfirst::Error,
+ },
+ #[error(transparent)]
+ WriteIndex(#[from] gix_index::file::write::Error),
+ #[error(transparent)]
+ CheckoutOptions(#[from] crate::config::checkout_options::Error),
+ #[error(transparent)]
+ IndexCheckout(
+ #[from]
+ gix_worktree::index::checkout::Error<gix_odb::find::existing_object::Error<gix_odb::store::find::Error>>,
+ ),
+ #[error("Failed to reopen object database as Arc (only if thread-safety wasn't compiled in)")]
+ OpenArcOdb(#[from] std::io::Error),
+ #[error("The HEAD reference could not be located")]
+ FindHead(#[from] crate::reference::find::existing::Error),
+ #[error("The HEAD reference could not be located")]
+ PeelHeadToId(#[from] crate::head::peel::Error),
+ }
+
+ /// The progress ids used in [`PrepareCheckout::main_worktree()`].
+ ///
+ /// Use this information to selectively extract the progress of interest in case the parent application has custom visualization.
+ #[derive(Debug, Copy, Clone)]
+ pub enum ProgressId {
+ /// The amount of files checked out thus far.
+ CheckoutFiles,
+ /// The amount of bytes written in total, the aggregate of the size of the content of all files thus far.
+ BytesWritten,
+ }
+
+ impl From<ProgressId> for gix_features::progress::Id {
+ fn from(v: ProgressId) -> Self {
+ match v {
+ ProgressId::CheckoutFiles => *b"CLCF",
+ ProgressId::BytesWritten => *b"CLCB",
+ }
+ }
+ }
+
+ /// Modification
+ impl PrepareCheckout {
+ /// Checkout the main worktree, determining how many threads to use by looking at `checkout.workers`, defaulting to using
+ /// on thread per logical core.
+ ///
+ /// Note that this is a no-op if the remote was empty, leaving this repository empty as well. This can be validated by checking
+ /// if the `head()` of the returned repository is not unborn.
+ pub fn main_worktree(
+ &mut self,
+ mut progress: impl crate::Progress,
+ should_interrupt: &AtomicBool,
+ ) -> Result<(Repository, gix_worktree::index::checkout::Outcome), Error> {
+ let repo = self
+ .repo
+ .as_ref()
+ .expect("still present as we never succeeded the worktree checkout yet");
+ let workdir = repo.work_dir().ok_or_else(|| Error::BareRepository {
+ git_dir: repo.git_dir().to_owned(),
+ })?;
+ let root_tree = match repo.head()?.peel_to_id_in_place().transpose()? {
+ Some(id) => id.object().expect("downloaded from remote").peel_to_tree()?.id,
+ None => {
+ return Ok((
+ self.repo.take().expect("still present"),
+ gix_worktree::index::checkout::Outcome::default(),
+ ))
+ }
+ };
+ let index = gix_index::State::from_tree(&root_tree, |oid, buf| repo.objects.find_tree_iter(oid, buf).ok())
+ .map_err(|err| Error::IndexFromTree {
+ id: root_tree,
+ source: err,
+ })?;
+ let mut index = gix_index::File::from_state(index, repo.index_path());
+
+ let mut opts = repo.config.checkout_options(repo.git_dir())?;
+ opts.destination_is_initially_empty = true;
+
+ let mut files = progress.add_child_with_id("checkout", ProgressId::CheckoutFiles.into());
+ let mut bytes = progress.add_child_with_id("writing", ProgressId::BytesWritten.into());
+
+ files.init(Some(index.entries().len()), crate::progress::count("files"));
+ bytes.init(None, crate::progress::bytes());
+
+ let start = std::time::Instant::now();
+ let outcome = gix_worktree::index::checkout(
+ &mut index,
+ workdir,
+ {
+ let objects = repo.objects.clone().into_arc()?;
+ move |oid, buf| objects.find_blob(oid, buf)
+ },
+ &mut files,
+ &mut bytes,
+ should_interrupt,
+ opts,
+ )?;
+ files.show_throughput(start);
+ bytes.show_throughput(start);
+
+ index.write(Default::default())?;
+ Ok((self.repo.take().expect("still present"), outcome))
+ }
+ }
+}
+
+/// Access
+impl PrepareCheckout {
+ /// Get access to the repository while the checkout isn't yet completed.
+ ///
+ /// # Panics
+ ///
+ /// If the checkout is completed and the [`Repository`] was already passed on to the caller.
+ pub fn repo(&self) -> &Repository {
+ self.repo
+ .as_ref()
+ .expect("present as checkout operation isn't complete")
+ }
+}
+
+/// Consumption
+impl PrepareCheckout {
+ /// Persist the contained repository as is even if an error may have occurred when checking out the main working tree.
+ pub fn persist(mut self) -> Repository {
+ self.repo.take().expect("present and consumed once")
+ }
+}
+
+impl Drop for PrepareCheckout {
+ fn drop(&mut self) {
+ if let Some(repo) = self.repo.take() {
+ std::fs::remove_dir_all(repo.work_dir().unwrap_or_else(|| repo.path())).ok();
+ }
+ }
+}
+
+impl From<PrepareCheckout> for Repository {
+ fn from(prep: PrepareCheckout) -> Self {
+ prep.persist()
+ }
+}
diff --git a/vendor/gix/src/clone/fetch/mod.rs b/vendor/gix/src/clone/fetch/mod.rs
new file mode 100644
index 000000000..d663b47ea
--- /dev/null
+++ b/vendor/gix/src/clone/fetch/mod.rs
@@ -0,0 +1,212 @@
+use crate::{bstr::BString, clone::PrepareFetch, Repository};
+
+/// The error returned by [`PrepareFetch::fetch_only()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+#[cfg(feature = "blocking-network-client")]
+pub enum Error {
+ #[error(transparent)]
+ Connect(#[from] crate::remote::connect::Error),
+ #[error(transparent)]
+ PrepareFetch(#[from] crate::remote::fetch::prepare::Error),
+ #[error(transparent)]
+ Fetch(#[from] crate::remote::fetch::Error),
+ #[error(transparent)]
+ RemoteInit(#[from] crate::remote::init::Error),
+ #[error("Custom configuration of remote to clone from failed")]
+ RemoteConfiguration(#[source] Box<dyn std::error::Error + Send + Sync>),
+ #[error(transparent)]
+ RemoteName(#[from] crate::config::remote::symbolic_name::Error),
+ #[error("Failed to load repo-local git configuration before writing")]
+ LoadConfig(#[from] gix_config::file::init::from_paths::Error),
+ #[error("Failed to store configured remote in memory")]
+ SaveConfig(#[from] crate::remote::save::AsError),
+ #[error("Failed to write repository configuration to disk")]
+ SaveConfigIo(#[from] std::io::Error),
+ #[error("The remote HEAD points to a reference named {head_ref_name:?} which is invalid.")]
+ InvalidHeadRef {
+ source: gix_validate::refname::Error,
+ head_ref_name: BString,
+ },
+ #[error("Failed to update HEAD with values from remote")]
+ HeadUpdate(#[from] crate::reference::edit::Error),
+}
+
+/// Modification
+impl PrepareFetch {
+ /// Fetch a pack and update local branches according to refspecs, providing `progress` and checking `should_interrupt` to stop
+ /// the operation.
+ /// On success, the persisted repository is returned, and this method must not be called again to avoid a **panic**.
+ /// On error, the method may be called again to retry as often as needed.
+ ///
+ /// If the remote repository was empty, that is newly initialized, the returned repository will also be empty and like
+ /// it was newly initialized.
+ ///
+ /// Note that all data we created will be removed once this instance drops if the operation wasn't successful.
+ #[cfg(feature = "blocking-network-client")]
+ pub fn fetch_only<P>(
+ &mut self,
+ progress: P,
+ should_interrupt: &std::sync::atomic::AtomicBool,
+ ) -> Result<(Repository, crate::remote::fetch::Outcome), Error>
+ where
+ P: crate::Progress,
+ P::SubProgress: 'static,
+ {
+ use crate::{bstr::ByteVec, remote, remote::fetch::RefLogMessage};
+
+ let repo = self
+ .repo
+ .as_mut()
+ .expect("user error: multiple calls are allowed only until it succeeds");
+
+ let remote_name = match self.remote_name.as_ref() {
+ Some(name) => name.to_owned(),
+ None => repo
+ .config
+ .resolved
+ .string("clone", None, crate::config::tree::Clone::DEFAULT_REMOTE_NAME.name)
+ .map(|n| crate::config::tree::Clone::DEFAULT_REMOTE_NAME.try_into_symbolic_name(n))
+ .transpose()?
+ .unwrap_or_else(|| "origin".into()),
+ };
+
+ let mut remote = repo
+ .remote_at(self.url.clone())?
+ .with_refspecs(
+ Some(format!("+refs/heads/*:refs/remotes/{remote_name}/*").as_str()),
+ remote::Direction::Fetch,
+ )
+ .expect("valid static spec");
+ let mut clone_fetch_tags = None;
+ if let Some(f) = self.configure_remote.as_mut() {
+ remote = f(remote).map_err(|err| Error::RemoteConfiguration(err))?;
+ } else {
+ clone_fetch_tags = remote::fetch::Tags::All.into();
+ }
+
+ let config = util::write_remote_to_local_config_file(&mut remote, remote_name.clone())?;
+
+ // Now we are free to apply remote configuration we don't want to be written to disk.
+ if let Some(fetch_tags) = clone_fetch_tags {
+ remote = remote.with_fetch_tags(fetch_tags);
+ }
+
+ // Add HEAD after the remote was written to config, we need it to know what to checkout later, and assure
+ // the ref that HEAD points to is present no matter what.
+ let head_refspec = gix_refspec::parse(
+ format!("HEAD:refs/remotes/{remote_name}/HEAD").as_str().into(),
+ gix_refspec::parse::Operation::Fetch,
+ )
+ .expect("valid")
+ .to_owned();
+ let pending_pack: remote::fetch::Prepare<'_, '_, _, _> =
+ remote.connect(remote::Direction::Fetch, progress)?.prepare_fetch({
+ let mut opts = self.fetch_options.clone();
+ if !opts.extra_refspecs.contains(&head_refspec) {
+ opts.extra_refspecs.push(head_refspec)
+ }
+ opts
+ })?;
+ if pending_pack.ref_map().object_hash != repo.object_hash() {
+ unimplemented!("configure repository to expect a different object hash as advertised by the server")
+ }
+ let reflog_message = {
+ let mut b = self.url.to_bstring();
+ b.insert_str(0, "clone: from ");
+ b
+ };
+ let outcome = pending_pack
+ .with_write_packed_refs_only(true)
+ .with_reflog_message(RefLogMessage::Override {
+ message: reflog_message.clone(),
+ })
+ .receive(should_interrupt)?;
+
+ util::append_config_to_repo_config(repo, config);
+ util::update_head(
+ repo,
+ &outcome.ref_map.remote_refs,
+ reflog_message.as_ref(),
+ remote_name.as_ref(),
+ )?;
+
+ Ok((self.repo.take().expect("still present"), outcome))
+ }
+
+ /// Similar to [`fetch_only()`][Self::fetch_only()`], but passes ownership to a utility type to configure a checkout operation.
+ #[cfg(feature = "blocking-network-client")]
+ pub fn fetch_then_checkout<P>(
+ &mut self,
+ progress: P,
+ should_interrupt: &std::sync::atomic::AtomicBool,
+ ) -> Result<(crate::clone::PrepareCheckout, crate::remote::fetch::Outcome), Error>
+ where
+ P: crate::Progress,
+ P::SubProgress: 'static,
+ {
+ let (repo, fetch_outcome) = self.fetch_only(progress, should_interrupt)?;
+ Ok((crate::clone::PrepareCheckout { repo: repo.into() }, fetch_outcome))
+ }
+}
+
+/// Builder
+impl PrepareFetch {
+ /// Set additional options to adjust parts of the fetch operation that are not affected by the git configuration.
+ #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+ pub fn with_fetch_options(mut self, opts: crate::remote::ref_map::Options) -> Self {
+ self.fetch_options = opts;
+ self
+ }
+ /// Use `f` to apply arbitrary changes to the remote that is about to be used to fetch a pack.
+ ///
+ /// The passed in `remote` will be un-named and pre-configured to be a default remote as we know it from git-clone.
+ /// It is not yet present in the configuration of the repository,
+ /// but each change it will eventually be written to the configuration prior to performing a the fetch operation,
+ /// _all changes done in `f()` will be persisted_.
+ ///
+ /// It can also be used to configure additional options, like those for fetching tags. Note that
+ /// [with_fetch_tags()][crate::Remote::with_fetch_tags()] should be called here to configure the clone as desired.
+ /// Otherwise a clone is configured to be complete and fetches all tags, not only those reachable from all branches.
+ pub fn configure_remote(
+ mut self,
+ f: impl FnMut(crate::Remote<'_>) -> Result<crate::Remote<'_>, Box<dyn std::error::Error + Send + Sync>> + 'static,
+ ) -> Self {
+ self.configure_remote = Some(Box::new(f));
+ self
+ }
+
+ /// Set the remote's name to the given value after it was configured using the function provided via
+ /// [`configure_remote()`][Self::configure_remote()].
+ ///
+ /// If not set here, it defaults to `origin` or the value of `clone.defaultRemoteName`.
+ pub fn with_remote_name(mut self, name: impl Into<BString>) -> Result<Self, crate::remote::name::Error> {
+ self.remote_name = Some(crate::remote::name::validated(name)?);
+ Ok(self)
+ }
+}
+
+/// Consumption
+impl PrepareFetch {
+ /// Persist the contained repository as is even if an error may have occurred when fetching from the remote.
+ pub fn persist(mut self) -> Repository {
+ self.repo.take().expect("present and consumed once")
+ }
+}
+
+impl Drop for PrepareFetch {
+ fn drop(&mut self) {
+ if let Some(repo) = self.repo.take() {
+ std::fs::remove_dir_all(repo.work_dir().unwrap_or_else(|| repo.path())).ok();
+ }
+ }
+}
+
+impl From<PrepareFetch> for Repository {
+ fn from(prep: PrepareFetch) -> Self {
+ prep.persist()
+ }
+}
+
+#[cfg(feature = "blocking-network-client")]
+mod util;
diff --git a/vendor/gix/src/clone/fetch/util.rs b/vendor/gix/src/clone/fetch/util.rs
new file mode 100644
index 000000000..ac8943f6e
--- /dev/null
+++ b/vendor/gix/src/clone/fetch/util.rs
@@ -0,0 +1,229 @@
+use std::{borrow::Cow, convert::TryInto, io::Write};
+
+use gix_odb::Find;
+use gix_ref::{
+ transaction::{LogChange, RefLog},
+ FullNameRef,
+};
+
+use super::Error;
+use crate::{
+ bstr::{BStr, BString, ByteSlice},
+ Repository,
+};
+
+enum WriteMode {
+ Overwrite,
+ Append,
+}
+
+#[allow(clippy::result_large_err)]
+pub fn write_remote_to_local_config_file(
+ remote: &mut crate::Remote<'_>,
+ remote_name: BString,
+) -> Result<gix_config::File<'static>, Error> {
+ let mut config = gix_config::File::new(local_config_meta(remote.repo));
+ remote.save_as_to(remote_name, &mut config)?;
+
+ write_to_local_config(&config, WriteMode::Append)?;
+ Ok(config)
+}
+
+fn local_config_meta(repo: &Repository) -> gix_config::file::Metadata {
+ let meta = repo.config.resolved.meta().clone();
+ assert_eq!(
+ meta.source,
+ gix_config::Source::Local,
+ "local path is the default for new sections"
+ );
+ meta
+}
+
+fn write_to_local_config(config: &gix_config::File<'static>, mode: WriteMode) -> std::io::Result<()> {
+ assert_eq!(
+ config.meta().source,
+ gix_config::Source::Local,
+ "made for appending to local configuration file"
+ );
+ let mut local_config = std::fs::OpenOptions::new()
+ .create(false)
+ .write(matches!(mode, WriteMode::Overwrite))
+ .append(matches!(mode, WriteMode::Append))
+ .open(config.meta().path.as_deref().expect("local config with path set"))?;
+ local_config.write_all(config.detect_newline_style())?;
+ config.write_to_filter(&mut local_config, |s| s.meta().source == gix_config::Source::Local)
+}
+
+pub fn append_config_to_repo_config(repo: &mut Repository, config: gix_config::File<'static>) {
+ let repo_config = gix_features::threading::OwnShared::make_mut(&mut repo.config.resolved);
+ repo_config.append(config);
+}
+
+/// HEAD cannot be written by means of refspec by design, so we have to do it manually here. Also create the pointed-to ref
+/// if we have to, as it might not have been naturally included in the ref-specs.
+pub fn update_head(
+ repo: &mut Repository,
+ remote_refs: &[gix_protocol::handshake::Ref],
+ reflog_message: &BStr,
+ remote_name: &BStr,
+) -> Result<(), Error> {
+ use gix_ref::{
+ transaction::{PreviousValue, RefEdit},
+ Target,
+ };
+ let (head_peeled_id, head_ref) = match remote_refs.iter().find_map(|r| {
+ Some(match r {
+ gix_protocol::handshake::Ref::Symbolic {
+ full_ref_name,
+ target,
+ object,
+ } if full_ref_name == "HEAD" => (Some(object.as_ref()), Some(target)),
+ gix_protocol::handshake::Ref::Direct { full_ref_name, object } if full_ref_name == "HEAD" => {
+ (Some(object.as_ref()), None)
+ }
+ gix_protocol::handshake::Ref::Unborn { full_ref_name, target } if full_ref_name == "HEAD" => {
+ (None, Some(target))
+ }
+ _ => return None,
+ })
+ }) {
+ Some(t) => t,
+ None => return Ok(()),
+ };
+
+ let head: gix_ref::FullName = "HEAD".try_into().expect("valid");
+ let reflog_message = || LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: reflog_message.to_owned(),
+ };
+ match head_ref {
+ Some(referent) => {
+ let referent: gix_ref::FullName = referent.try_into().map_err(|err| Error::InvalidHeadRef {
+ head_ref_name: referent.to_owned(),
+ source: err,
+ })?;
+ repo.refs
+ .transaction()
+ .packed_refs(gix_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdates(
+ Box::new(|oid, buf| {
+ repo.objects
+ .try_find(oid, buf)
+ .map(|obj| obj.map(|obj| obj.kind))
+ .map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
+ }),
+ ))
+ .prepare(
+ {
+ let mut edits = vec![RefEdit {
+ change: gix_ref::transaction::Change::Update {
+ log: reflog_message(),
+ expected: PreviousValue::Any,
+ new: Target::Symbolic(referent.clone()),
+ },
+ name: head.clone(),
+ deref: false,
+ }];
+ if let Some(head_peeled_id) = head_peeled_id {
+ edits.push(RefEdit {
+ change: gix_ref::transaction::Change::Update {
+ log: reflog_message(),
+ expected: PreviousValue::Any,
+ new: Target::Peeled(head_peeled_id.to_owned()),
+ },
+ name: referent.clone(),
+ deref: false,
+ });
+ };
+ edits
+ },
+ gix_lock::acquire::Fail::Immediately,
+ gix_lock::acquire::Fail::Immediately,
+ )
+ .map_err(crate::reference::edit::Error::from)?
+ .commit(
+ repo.committer()
+ .transpose()
+ .map_err(|err| Error::HeadUpdate(crate::reference::edit::Error::ParseCommitterTime(err)))?,
+ )
+ .map_err(crate::reference::edit::Error::from)?;
+
+ if let Some(head_peeled_id) = head_peeled_id {
+ let mut log = reflog_message();
+ log.mode = RefLog::Only;
+ repo.edit_reference(RefEdit {
+ change: gix_ref::transaction::Change::Update {
+ log,
+ expected: PreviousValue::Any,
+ new: Target::Peeled(head_peeled_id.to_owned()),
+ },
+ name: head,
+ deref: false,
+ })?;
+ }
+
+ setup_branch_config(repo, referent.as_ref(), head_peeled_id, remote_name)?;
+ }
+ None => {
+ repo.edit_reference(RefEdit {
+ change: gix_ref::transaction::Change::Update {
+ log: reflog_message(),
+ expected: PreviousValue::Any,
+ new: Target::Peeled(
+ head_peeled_id
+ .expect("detached heads always point to something")
+ .to_owned(),
+ ),
+ },
+ name: head,
+ deref: false,
+ })?;
+ }
+ };
+ Ok(())
+}
+
+/// Setup the remote configuration for `branch` so that it points to itself, but on the remote, if and only if currently
+/// saved refspecs are able to match it.
+/// For that we reload the remote of `remote_name` and use its ref_specs for match.
+fn setup_branch_config(
+ repo: &mut Repository,
+ branch: &FullNameRef,
+ branch_id: Option<&gix_hash::oid>,
+ remote_name: &BStr,
+) -> Result<(), Error> {
+ let short_name = match branch.category_and_short_name() {
+ Some((cat, shortened)) if cat == gix_ref::Category::LocalBranch => match shortened.to_str() {
+ Ok(s) => s,
+ Err(_) => return Ok(()),
+ },
+ _ => return Ok(()),
+ };
+ let remote = repo
+ .find_remote(remote_name)
+ .expect("remote was just created and must be visible in config");
+ let group = gix_refspec::MatchGroup::from_fetch_specs(remote.fetch_specs.iter().map(|s| s.to_ref()));
+ let null = gix_hash::ObjectId::null(repo.object_hash());
+ let res = group.match_remotes(
+ Some(gix_refspec::match_group::Item {
+ full_ref_name: branch.as_bstr(),
+ target: branch_id.unwrap_or(&null),
+ object: None,
+ })
+ .into_iter(),
+ );
+ if !res.mappings.is_empty() {
+ let mut config = repo.config_snapshot_mut();
+ let mut section = config
+ .new_section("branch", Some(Cow::Owned(short_name.into())))
+ .expect("section header name is always valid per naming rules, our input branch name is valid");
+ section.push("remote".try_into().expect("valid at compile time"), Some(remote_name));
+ section.push(
+ "merge".try_into().expect("valid at compile time"),
+ Some(branch.as_bstr()),
+ );
+ write_to_local_config(&config, WriteMode::Overwrite)?;
+ config.commit().expect("configuration we set is valid");
+ }
+ Ok(())
+}
diff --git a/vendor/gix/src/clone/mod.rs b/vendor/gix/src/clone/mod.rs
new file mode 100644
index 000000000..249a66a42
--- /dev/null
+++ b/vendor/gix/src/clone/mod.rs
@@ -0,0 +1,118 @@
+#![allow(clippy::result_large_err)]
+use std::convert::TryInto;
+
+use crate::{bstr::BString, config::tree::gitoxide};
+
+type ConfigureRemoteFn =
+ Box<dyn FnMut(crate::Remote<'_>) -> Result<crate::Remote<'_>, Box<dyn std::error::Error + Send + Sync>>>;
+
+/// A utility to collect configuration on how to fetch from a remote and initiate a fetch operation. It will delete the newly
+/// created repository on when dropped without successfully finishing a fetch.
+#[must_use]
+pub struct PrepareFetch {
+ /// A freshly initialized repository which is owned by us, or `None` if it was handed to the user
+ repo: Option<crate::Repository>,
+ /// The name of the remote, which defaults to `origin` if not overridden.
+ remote_name: Option<BString>,
+ /// A function to configure a remote prior to fetching a pack.
+ configure_remote: Option<ConfigureRemoteFn>,
+ /// Options for preparing a fetch operation.
+ #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+ fetch_options: crate::remote::ref_map::Options,
+ /// The url to clone from
+ #[cfg_attr(not(feature = "blocking-network-client"), allow(dead_code))]
+ url: gix_url::Url,
+}
+
+/// The error returned by [`PrepareFetch::new()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error(transparent)]
+ Init(#[from] crate::init::Error),
+ #[error(transparent)]
+ UrlParse(#[from] gix_url::parse::Error),
+ #[error("Failed to turn a the relative file url \"{}\" into an absolute one", url.to_bstring())]
+ CanonicalizeUrl {
+ url: gix_url::Url,
+ source: gix_path::realpath::Error,
+ },
+}
+
+/// Instantiation
+impl PrepareFetch {
+ /// Create a new repository at `path` with `crate_opts` which is ready to clone from `url`, possibly after making additional adjustments to
+ /// configuration and settings.
+ ///
+ /// Note that this is merely a handle to perform the actual connection to the remote, and if any of it fails the freshly initialized repository
+ /// will be removed automatically as soon as this instance drops.
+ ///
+ /// # Deviation
+ ///
+ /// Similar to `git`, a missing user name and email configuration is not terminal and we will fill it in with dummy values. However,
+ /// instead of deriving values from the system, ours are hardcoded to indicate what happened.
+ #[allow(clippy::result_large_err)]
+ pub fn new<Url, E>(
+ url: Url,
+ path: impl AsRef<std::path::Path>,
+ kind: crate::create::Kind,
+ mut create_opts: crate::create::Options,
+ open_opts: crate::open::Options,
+ ) -> Result<Self, Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ let mut url = url.try_into().map_err(gix_url::parse::Error::from)?;
+ url.canonicalize().map_err(|err| Error::CanonicalizeUrl {
+ url: url.clone(),
+ source: err,
+ })?;
+ create_opts.destination_must_be_empty = true;
+ let mut repo = crate::ThreadSafeRepository::init_opts(path, kind, create_opts, open_opts)?.to_thread_local();
+ if repo.committer().is_none() {
+ let mut config = gix_config::File::new(gix_config::file::Metadata::api());
+ config
+ .set_raw_value(
+ "gitoxide",
+ Some("committer".into()),
+ gitoxide::Committer::NAME_FALLBACK.name,
+ "no name configured during clone",
+ )
+ .expect("works - statically known");
+ config
+ .set_raw_value(
+ "gitoxide",
+ Some("committer".into()),
+ gitoxide::Committer::EMAIL_FALLBACK.name,
+ "noEmailAvailable@example.com",
+ )
+ .expect("works - statically known");
+ let mut repo_config = repo.config_snapshot_mut();
+ repo_config.append(config);
+ repo_config.commit().expect("configuration is still valid");
+ }
+ Ok(PrepareFetch {
+ url,
+ #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+ fetch_options: Default::default(),
+ repo: Some(repo),
+ remote_name: None,
+ configure_remote: None,
+ })
+ }
+}
+
+/// A utility to collect configuration on how to perform a checkout into a working tree, and when dropped without checking out successfully
+/// the fetched repository will be dropped.
+#[must_use]
+pub struct PrepareCheckout {
+ /// A freshly initialized repository which is owned by us, or `None` if it was handed to the user
+ pub(self) repo: Option<crate::Repository>,
+}
+
+///
+pub mod fetch;
+
+///
+pub mod checkout;
diff --git a/vendor/gix/src/commit.rs b/vendor/gix/src/commit.rs
new file mode 100644
index 000000000..10fa6f675
--- /dev/null
+++ b/vendor/gix/src/commit.rs
@@ -0,0 +1,238 @@
+//!
+
+/// An empty array of a type usable with the `gix::easy` API to help declaring no parents should be used
+pub const NO_PARENT_IDS: [gix_hash::ObjectId; 0] = [];
+
+/// The error returned by [`commit(…)`][crate::Repository::commit()].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error(transparent)]
+ ParseTime(#[from] crate::config::time::Error),
+ #[error("Committer identity is not configured")]
+ CommitterMissing,
+ #[error("Author identity is not configured")]
+ AuthorMissing,
+ #[error(transparent)]
+ ReferenceNameValidation(#[from] gix_ref::name::Error),
+ #[error(transparent)]
+ WriteObject(#[from] crate::object::write::Error),
+ #[error(transparent)]
+ ReferenceEdit(#[from] crate::reference::edit::Error),
+}
+
+///
+pub mod describe {
+ use std::borrow::Cow;
+
+ use gix_hash::ObjectId;
+ use gix_hashtable::HashMap;
+ use gix_odb::Find;
+
+ use crate::{bstr::BStr, ext::ObjectIdExt, Repository};
+
+ /// The result of [try_resolve()][Platform::try_resolve()].
+ pub struct Resolution<'repo> {
+ /// The outcome of the describe operation.
+ pub outcome: gix_revision::describe::Outcome<'static>,
+ /// The id to describe.
+ pub id: crate::Id<'repo>,
+ }
+
+ impl<'repo> Resolution<'repo> {
+ /// Turn this instance into something displayable
+ pub fn format(self) -> Result<gix_revision::describe::Format<'static>, Error> {
+ let prefix = self.id.shorten()?;
+ Ok(self.outcome.into_format(prefix.hex_len()))
+ }
+ }
+
+ /// The error returned by [try_format()][Platform::try_format()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Describe(#[from] gix_revision::describe::Error<gix_odb::store::find::Error>),
+ #[error("Could not produce an unambiguous shortened id for formatting.")]
+ ShortId(#[from] crate::id::shorten::Error),
+ #[error(transparent)]
+ RefIter(#[from] crate::reference::iter::Error),
+ #[error(transparent)]
+ RefIterInit(#[from] crate::reference::iter::init::Error),
+ }
+
+ /// A selector to choose what kind of references should contribute to names.
+ #[derive(Debug, Clone, Copy, PartialOrd, PartialEq, Ord, Eq, Hash)]
+ pub enum SelectRef {
+ /// Only use annotated tags for names.
+ AnnotatedTags,
+ /// Use all tags for names, annotated or plain reference.
+ AllTags,
+ /// Use all references, including local branch names.
+ AllRefs,
+ }
+
+ impl SelectRef {
+ fn names(&self, repo: &Repository) -> Result<HashMap<ObjectId, Cow<'static, BStr>>, Error> {
+ let platform = repo.references()?;
+
+ Ok(match self {
+ SelectRef::AllTags | SelectRef::AllRefs => {
+ let mut refs: Vec<_> = match self {
+ SelectRef::AllRefs => platform.all()?,
+ SelectRef::AllTags => platform.tags()?,
+ _ => unreachable!(),
+ }
+ .filter_map(Result::ok)
+ .filter_map(|mut r: crate::Reference<'_>| {
+ let target_id = r.target().try_id().map(ToOwned::to_owned);
+ let peeled_id = r.peel_to_id_in_place().ok()?;
+ let (prio, tag_time) = match target_id {
+ Some(target_id) if peeled_id != *target_id => {
+ let tag = repo.find_object(target_id).ok()?.try_into_tag().ok()?;
+ (1, tag.tagger().ok()??.time.seconds_since_unix_epoch)
+ }
+ _ => (0, 0),
+ };
+ (
+ peeled_id.inner,
+ prio,
+ tag_time,
+ Cow::from(r.inner.name.shorten().to_owned()),
+ )
+ .into()
+ })
+ .collect();
+ // By priority, then by time ascending, then lexicographically.
+ // More recent entries overwrite older ones due to collection into hashmap.
+ refs.sort_by(
+ |(_a_peeled_id, a_prio, a_time, a_name), (_b_peeled_id, b_prio, b_time, b_name)| {
+ a_prio
+ .cmp(b_prio)
+ .then_with(|| a_time.cmp(b_time))
+ .then_with(|| b_name.cmp(a_name))
+ },
+ );
+ refs.into_iter().map(|(a, _, _, b)| (a, b)).collect()
+ }
+ SelectRef::AnnotatedTags => {
+ let mut peeled_commits_and_tag_date: Vec<_> = platform
+ .tags()?
+ .filter_map(Result::ok)
+ .filter_map(|r: crate::Reference<'_>| {
+ // TODO: we assume direct refs for tags, which is the common case, but it doesn't have to be
+ // so rather follow symrefs till the first object and then peel tags after the first object was found.
+ let tag = r.try_id()?.object().ok()?.try_into_tag().ok()?;
+ let tag_time = tag
+ .tagger()
+ .ok()
+ .and_then(|s| s.map(|s| s.time.seconds_since_unix_epoch))
+ .unwrap_or(0);
+ let commit_id = tag.target_id().ok()?.object().ok()?.try_into_commit().ok()?.id;
+ Some((commit_id, tag_time, Cow::<BStr>::from(r.name().shorten().to_owned())))
+ })
+ .collect();
+ // Sort by time ascending, then lexicographically.
+ // More recent entries overwrite older ones due to collection into hashmap.
+ peeled_commits_and_tag_date.sort_by(|(_a_id, a_time, a_name), (_b_id, b_time, b_name)| {
+ a_time.cmp(b_time).then_with(|| b_name.cmp(a_name))
+ });
+ peeled_commits_and_tag_date
+ .into_iter()
+ .map(|(a, _, c)| (a, c))
+ .collect()
+ }
+ })
+ }
+ }
+
+ impl Default for SelectRef {
+ fn default() -> Self {
+ SelectRef::AnnotatedTags
+ }
+ }
+
+ /// A support type to allow configuring a `git describe` operation
+ pub struct Platform<'repo> {
+ pub(crate) id: gix_hash::ObjectId,
+ pub(crate) repo: &'repo crate::Repository,
+ pub(crate) select: SelectRef,
+ pub(crate) first_parent: bool,
+ pub(crate) id_as_fallback: bool,
+ pub(crate) max_candidates: usize,
+ }
+
+ impl<'repo> Platform<'repo> {
+ /// Configure which names to `select` from which describe can chose.
+ pub fn names(mut self, select: SelectRef) -> Self {
+ self.select = select;
+ self
+ }
+
+ /// If true, shorten the graph traversal time by just traversing the first parent of merge commits.
+ pub fn traverse_first_parent(mut self, first_parent: bool) -> Self {
+ self.first_parent = first_parent;
+ self
+ }
+
+ /// Only consider the given amount of candidates, instead of the default of 10.
+ pub fn max_candidates(mut self, candidates: usize) -> Self {
+ self.max_candidates = candidates;
+ self
+ }
+
+ /// If true, even if no candidate is available a format will always be produced.
+ pub fn id_as_fallback(mut self, use_fallback: bool) -> Self {
+ self.id_as_fallback = use_fallback;
+ self
+ }
+
+ /// Try to find a name for the configured commit id using all prior configuration, returning `Some(describe::Format)`
+ /// if one was found.
+ ///
+ /// Note that there will always be `Some(format)`
+ pub fn try_format(&self) -> Result<Option<gix_revision::describe::Format<'static>>, Error> {
+ self.try_resolve()?.map(|r| r.format()).transpose()
+ }
+
+ /// Try to find a name for the configured commit id using all prior configuration, returning `Some(Outcome)`
+ /// if one was found.
+ ///
+ /// The outcome provides additional information, but leaves the caller with the burden
+ ///
+ /// # Performance
+ ///
+ /// It is greatly recommended to [assure an object cache is set][crate::Repository::object_cache_size_if_unset()]
+ /// to save ~40% of time.
+ pub fn try_resolve(&self) -> Result<Option<Resolution<'repo>>, Error> {
+ // TODO: dirty suffix with respective dirty-detection
+ let outcome = gix_revision::describe(
+ &self.id,
+ |id, buf| {
+ Ok(self
+ .repo
+ .objects
+ .try_find(id, buf)?
+ .and_then(|d| d.try_into_commit_iter()))
+ },
+ gix_revision::describe::Options {
+ name_by_oid: self.select.names(self.repo)?,
+ fallback_to_oid: self.id_as_fallback,
+ first_parent: self.first_parent,
+ max_candidates: self.max_candidates,
+ },
+ )?;
+
+ Ok(outcome.map(|outcome| crate::commit::describe::Resolution {
+ outcome,
+ id: self.id.attach(self.repo),
+ }))
+ }
+
+ /// Like [`try_format()`][Platform::try_format()], but turns `id_as_fallback()` on to always produce a format.
+ pub fn format(&mut self) -> Result<gix_revision::describe::Format<'static>, Error> {
+ self.id_as_fallback = true;
+ Ok(self.try_format()?.expect("BUG: fallback must always produce a format"))
+ }
+ }
+}
diff --git a/vendor/gix/src/config/cache/access.rs b/vendor/gix/src/config/cache/access.rs
new file mode 100644
index 000000000..8244eaf27
--- /dev/null
+++ b/vendor/gix/src/config/cache/access.rs
@@ -0,0 +1,233 @@
+#![allow(clippy::result_large_err)]
+use std::{borrow::Cow, path::PathBuf, time::Duration};
+
+use gix_lock::acquire::Fail;
+
+use crate::{
+ bstr::BStr,
+ config,
+ config::{
+ cache::util::{ApplyLeniency, ApplyLeniencyDefault},
+ checkout_options,
+ tree::{Checkout, Core, Key},
+ Cache,
+ },
+ remote,
+ repository::identity,
+};
+
+/// Access
+impl Cache {
+ pub(crate) fn diff_algorithm(&self) -> Result<gix_diff::blob::Algorithm, config::diff::algorithm::Error> {
+ use crate::config::diff::algorithm::Error;
+ self.diff_algorithm
+ .get_or_try_init(|| {
+ let name = self
+ .resolved
+ .string("diff", None, "algorithm")
+ .unwrap_or_else(|| Cow::Borrowed("myers".into()));
+ config::tree::Diff::ALGORITHM
+ .try_into_algorithm(name)
+ .or_else(|err| match err {
+ Error::Unimplemented { .. } if self.lenient_config => Ok(gix_diff::blob::Algorithm::Histogram),
+ err => Err(err),
+ })
+ .with_lenient_default(self.lenient_config)
+ })
+ .copied()
+ }
+
+ /// Returns a user agent for use with servers.
+ #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+ pub(crate) fn user_agent_tuple(&self) -> (&'static str, Option<Cow<'static, str>>) {
+ use config::tree::Gitoxide;
+ let agent = self
+ .user_agent
+ .get_or_init(|| {
+ self.resolved
+ .string_by_key(Gitoxide::USER_AGENT.logical_name().as_str())
+ .map(|s| s.to_string())
+ .unwrap_or_else(|| crate::env::agent().into())
+ })
+ .to_owned();
+ ("agent", Some(gix_protocol::agent(agent).into()))
+ }
+
+ pub(crate) fn personas(&self) -> &identity::Personas {
+ self.personas
+ .get_or_init(|| identity::Personas::from_config_and_env(&self.resolved))
+ }
+
+ pub(crate) fn url_rewrite(&self) -> &remote::url::Rewrite {
+ self.url_rewrite
+ .get_or_init(|| remote::url::Rewrite::from_config(&self.resolved, self.filter_config_section))
+ }
+
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ pub(crate) fn url_scheme(&self) -> Result<&remote::url::SchemePermission, config::protocol::allow::Error> {
+ self.url_scheme
+ .get_or_try_init(|| remote::url::SchemePermission::from_config(&self.resolved, self.filter_config_section))
+ }
+
+ pub(crate) fn diff_renames(
+ &self,
+ ) -> Result<Option<crate::object::tree::diff::Rewrites>, crate::object::tree::diff::rewrites::Error> {
+ self.diff_renames
+ .get_or_try_init(|| {
+ crate::object::tree::diff::Rewrites::try_from_config(&self.resolved, self.lenient_config)
+ })
+ .copied()
+ }
+
+ /// Returns (file-timeout, pack-refs timeout)
+ pub(crate) fn lock_timeout(
+ &self,
+ ) -> Result<(gix_lock::acquire::Fail, gix_lock::acquire::Fail), config::lock_timeout::Error> {
+ let mut out: [gix_lock::acquire::Fail; 2] = Default::default();
+ for (idx, (key, default_ms)) in [(&Core::FILES_REF_LOCK_TIMEOUT, 100), (&Core::PACKED_REFS_TIMEOUT, 1000)]
+ .into_iter()
+ .enumerate()
+ {
+ out[idx] = self
+ .resolved
+ .integer_filter("core", None, key.name, &mut self.filter_config_section.clone())
+ .map(|res| key.try_into_lock_timeout(res))
+ .transpose()
+ .with_leniency(self.lenient_config)?
+ .unwrap_or_else(|| Fail::AfterDurationWithBackoff(Duration::from_millis(default_ms)));
+ }
+ Ok((out[0], out[1]))
+ }
+
+ /// The path to the user-level excludes file to ignore certain files in the worktree.
+ pub(crate) fn excludes_file(&self) -> Option<Result<PathBuf, gix_config::path::interpolate::Error>> {
+ self.trusted_file_path("core", None, Core::EXCLUDES_FILE.name)?
+ .map(|p| p.into_owned())
+ .into()
+ }
+
+ /// A helper to obtain a file from trusted configuration at `section_name`, `subsection_name`, and `key`, which is interpolated
+ /// if present.
+ pub(crate) fn trusted_file_path(
+ &self,
+ section_name: impl AsRef<str>,
+ subsection_name: Option<&BStr>,
+ key: impl AsRef<str>,
+ ) -> Option<Result<Cow<'_, std::path::Path>, gix_config::path::interpolate::Error>> {
+ let path = self.resolved.path_filter(
+ section_name,
+ subsection_name,
+ key,
+ &mut self.filter_config_section.clone(),
+ )?;
+
+ let install_dir = crate::path::install_dir().ok();
+ let home = self.home_dir();
+ let ctx = crate::config::cache::interpolate_context(install_dir.as_deref(), home.as_deref());
+ Some(path.interpolate(ctx))
+ }
+
+ pub(crate) fn apply_leniency<T, E>(&self, res: Option<Result<T, E>>) -> Result<Option<T>, E> {
+ res.transpose().with_leniency(self.lenient_config)
+ }
+
+ /// Collect everything needed to checkout files into a worktree.
+ /// Note that some of the options being returned will be defaulted so safe settings, the caller might have to override them
+ /// depending on the use-case.
+ pub(crate) fn checkout_options(
+ &self,
+ git_dir: &std::path::Path,
+ ) -> Result<gix_worktree::index::checkout::Options, checkout_options::Error> {
+ fn boolean(
+ me: &Cache,
+ full_key: &str,
+ key: &'static config::tree::keys::Boolean,
+ default: bool,
+ ) -> Result<bool, checkout_options::Error> {
+ debug_assert_eq!(
+ full_key,
+ key.logical_name(),
+ "BUG: key name and hardcoded name must match"
+ );
+ Ok(me
+ .apply_leniency(me.resolved.boolean_by_key(full_key).map(|v| key.enrich_error(v)))?
+ .unwrap_or(default))
+ }
+
+ fn assemble_attribute_globals(
+ me: &Cache,
+ _git_dir: &std::path::Path,
+ ) -> Result<gix_attributes::MatchGroup, checkout_options::Error> {
+ let _attributes_file = match me
+ .trusted_file_path("core", None, Core::ATTRIBUTES_FILE.name)
+ .transpose()?
+ {
+ Some(attributes) => Some(attributes.into_owned()),
+ None => me.xdg_config_path("attributes").ok().flatten(),
+ };
+ // TODO: implement gix_attributes::MatchGroup::<gix_attributes::Attributes>::from_git_dir(), similar to what's done for `Ignore`.
+ Ok(Default::default())
+ }
+
+ let thread_limit = self.apply_leniency(
+ self.resolved
+ .integer_filter_by_key("checkout.workers", &mut self.filter_config_section.clone())
+ .map(|value| Checkout::WORKERS.try_from_workers(value)),
+ )?;
+ Ok(gix_worktree::index::checkout::Options {
+ fs: gix_worktree::fs::Capabilities {
+ precompose_unicode: boolean(self, "core.precomposeUnicode", &Core::PRECOMPOSE_UNICODE, false)?,
+ ignore_case: boolean(self, "core.ignoreCase", &Core::IGNORE_CASE, false)?,
+ executable_bit: boolean(self, "core.fileMode", &Core::FILE_MODE, true)?,
+ symlink: boolean(self, "core.symlinks", &Core::SYMLINKS, true)?,
+ },
+ thread_limit,
+ destination_is_initially_empty: false,
+ overwrite_existing: false,
+ keep_going: false,
+ trust_ctime: boolean(self, "core.trustCTime", &Core::TRUST_C_TIME, true)?,
+ check_stat: self
+ .apply_leniency(
+ self.resolved
+ .string("core", None, "checkStat")
+ .map(|v| Core::CHECK_STAT.try_into_checkstat(v)),
+ )?
+ .unwrap_or(true),
+ attribute_globals: assemble_attribute_globals(self, git_dir)?,
+ })
+ }
+ pub(crate) fn xdg_config_path(
+ &self,
+ resource_file_name: &str,
+ ) -> Result<Option<PathBuf>, gix_sec::permission::Error<PathBuf>> {
+ std::env::var_os("XDG_CONFIG_HOME")
+ .map(|path| (PathBuf::from(path), &self.xdg_config_home_env))
+ .or_else(|| {
+ std::env::var_os("HOME").map(|path| {
+ (
+ {
+ let mut p = PathBuf::from(path);
+ p.push(".config");
+ p
+ },
+ &self.home_env,
+ )
+ })
+ })
+ .and_then(|(base, permission)| {
+ let resource = base.join("git").join(resource_file_name);
+ permission.check(resource).transpose()
+ })
+ .transpose()
+ }
+
+ /// Return the home directory if we are allowed to read it and if it is set in the environment.
+ ///
+ /// We never fail for here even if the permission is set to deny as we `gix-config` will fail later
+ /// if it actually wants to use the home directory - we don't want to fail prematurely.
+ pub(crate) fn home_dir(&self) -> Option<PathBuf> {
+ std::env::var_os("HOME")
+ .map(PathBuf::from)
+ .and_then(|path| self.home_env.check_opt(path))
+ }
+}
diff --git a/vendor/gix/src/config/cache/incubate.rs b/vendor/gix/src/config/cache/incubate.rs
new file mode 100644
index 000000000..047f2132b
--- /dev/null
+++ b/vendor/gix/src/config/cache/incubate.rs
@@ -0,0 +1,111 @@
+#![allow(clippy::result_large_err)]
+use super::{util, Error};
+use crate::config::tree::{Core, Extensions};
+
+/// A utility to deal with the cyclic dependency between the ref store and the configuration. The ref-store needs the
+/// object hash kind, and the configuration needs the current branch name to resolve conditional includes with `onbranch`.
+pub(crate) struct StageOne {
+ pub git_dir_config: gix_config::File<'static>,
+ pub buf: Vec<u8>,
+
+ pub is_bare: bool,
+ pub lossy: Option<bool>,
+ pub object_hash: gix_hash::Kind,
+ pub reflog: Option<gix_ref::store::WriteReflog>,
+}
+
+/// Initialization
+impl StageOne {
+ pub fn new(
+ common_dir: &std::path::Path,
+ git_dir: &std::path::Path,
+ git_dir_trust: gix_sec::Trust,
+ lossy: Option<bool>,
+ lenient: bool,
+ ) -> Result<Self, Error> {
+ let mut buf = Vec::with_capacity(512);
+ let mut config = load_config(
+ common_dir.join("config"),
+ &mut buf,
+ gix_config::Source::Local,
+ git_dir_trust,
+ lossy,
+ )?;
+
+ // Note that we assume the repo is bare by default unless we are told otherwise. This is relevant if
+ // the repo doesn't have a configuration file.
+ let is_bare = util::config_bool(&config, &Core::BARE, "core.bare", true, lenient)?;
+ let repo_format_version = config
+ .integer_by_key("core.repositoryFormatVersion")
+ .map(|version| Core::REPOSITORY_FORMAT_VERSION.try_into_usize(version))
+ .transpose()?
+ .unwrap_or_default();
+ let object_hash = (repo_format_version != 1)
+ .then_some(Ok(gix_hash::Kind::Sha1))
+ .or_else(|| {
+ config
+ .string("extensions", None, "objectFormat")
+ .map(|format| Extensions::OBJECT_FORMAT.try_into_object_format(format))
+ })
+ .transpose()?
+ .unwrap_or(gix_hash::Kind::Sha1);
+
+ let extension_worktree = util::config_bool(
+ &config,
+ &Extensions::WORKTREE_CONFIG,
+ "extensions.worktreeConfig",
+ false,
+ lenient,
+ )?;
+ if extension_worktree {
+ let worktree_config = load_config(
+ git_dir.join("config.worktree"),
+ &mut buf,
+ gix_config::Source::Worktree,
+ git_dir_trust,
+ lossy,
+ )?;
+ config.append(worktree_config);
+ };
+
+ let reflog = util::query_refupdates(&config, lenient)?;
+ Ok(StageOne {
+ git_dir_config: config,
+ buf,
+ is_bare,
+ lossy,
+ object_hash,
+ reflog,
+ })
+ }
+}
+
+fn load_config(
+ config_path: std::path::PathBuf,
+ buf: &mut Vec<u8>,
+ source: gix_config::Source,
+ git_dir_trust: gix_sec::Trust,
+ lossy: Option<bool>,
+) -> Result<gix_config::File<'static>, Error> {
+ buf.clear();
+ let metadata = gix_config::file::Metadata::from(source)
+ .at(&config_path)
+ .with(git_dir_trust);
+ let mut file = match std::fs::File::open(&config_path) {
+ Ok(f) => f,
+ Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(gix_config::File::new(metadata)),
+ Err(err) => return Err(err.into()),
+ };
+ std::io::copy(&mut file, buf)?;
+
+ let config = gix_config::File::from_bytes_owned(
+ buf,
+ metadata,
+ gix_config::file::init::Options {
+ includes: gix_config::file::includes::Options::no_follow(),
+ ..util::base_options(lossy)
+ },
+ )?;
+
+ Ok(config)
+}
diff --git a/vendor/gix/src/config/cache/init.rs b/vendor/gix/src/config/cache/init.rs
new file mode 100644
index 000000000..dc76f78bb
--- /dev/null
+++ b/vendor/gix/src/config/cache/init.rs
@@ -0,0 +1,485 @@
+#![allow(clippy::result_large_err)]
+use std::borrow::Cow;
+
+use gix_sec::Permission;
+
+use super::{interpolate_context, util, Error, StageOne};
+use crate::{
+ bstr::BString,
+ config,
+ config::{
+ cache::util::ApplyLeniency,
+ tree::{gitoxide, Core, Http},
+ Cache,
+ },
+ repository,
+};
+
+/// Initialization
+impl Cache {
+ #[allow(clippy::too_many_arguments)]
+ pub fn from_stage_one(
+ StageOne {
+ git_dir_config,
+ mut buf,
+ lossy,
+ is_bare,
+ object_hash,
+ reflog: _,
+ }: StageOne,
+ git_dir: &std::path::Path,
+ branch_name: Option<&gix_ref::FullNameRef>,
+ filter_config_section: fn(&gix_config::file::Metadata) -> bool,
+ git_install_dir: Option<&std::path::Path>,
+ home: Option<&std::path::Path>,
+ repository::permissions::Environment {
+ git_prefix,
+ home: home_env,
+ xdg_config_home: xdg_config_home_env,
+ ssh_prefix: _,
+ http_transport,
+ identity,
+ objects,
+ }: repository::permissions::Environment,
+ repository::permissions::Config {
+ git_binary: use_installation,
+ system: use_system,
+ git: use_git,
+ user: use_user,
+ env: use_env,
+ includes: use_includes,
+ }: repository::permissions::Config,
+ lenient_config: bool,
+ api_config_overrides: &[BString],
+ cli_config_overrides: &[BString],
+ ) -> Result<Self, Error> {
+ let options = gix_config::file::init::Options {
+ includes: if use_includes {
+ gix_config::file::includes::Options::follow(
+ interpolate_context(git_install_dir, home),
+ gix_config::file::includes::conditional::Context {
+ git_dir: git_dir.into(),
+ branch_name,
+ },
+ )
+ } else {
+ gix_config::file::includes::Options::no_follow()
+ },
+ ..util::base_options(lossy)
+ };
+
+ let config = {
+ let home_env = &home_env;
+ let xdg_config_home_env = &xdg_config_home_env;
+ let git_prefix = &git_prefix;
+ let metas = [
+ gix_config::source::Kind::GitInstallation,
+ gix_config::source::Kind::System,
+ gix_config::source::Kind::Global,
+ ]
+ .iter()
+ .flat_map(|kind| kind.sources())
+ .filter_map(|source| {
+ match source {
+ gix_config::Source::GitInstallation if !use_installation => return None,
+ gix_config::Source::System if !use_system => return None,
+ gix_config::Source::Git if !use_git => return None,
+ gix_config::Source::User if !use_user => return None,
+ _ => {}
+ }
+ source
+ .storage_location(&mut |name| {
+ match name {
+ git_ if git_.starts_with("GIT_") => Some(git_prefix),
+ "XDG_CONFIG_HOME" => Some(xdg_config_home_env),
+ "HOME" => Some(home_env),
+ _ => None,
+ }
+ .and_then(|perm| perm.check_opt(name).and_then(std::env::var_os))
+ })
+ .map(|p| (source, p.into_owned()))
+ })
+ .map(|(source, path)| gix_config::file::Metadata {
+ path: Some(path),
+ source: *source,
+ level: 0,
+ trust: gix_sec::Trust::Full,
+ });
+
+ let err_on_nonexisting_paths = false;
+ let mut globals = gix_config::File::from_paths_metadata_buf(
+ metas,
+ &mut buf,
+ err_on_nonexisting_paths,
+ gix_config::file::init::Options {
+ includes: gix_config::file::includes::Options::no_follow(),
+ ..options
+ },
+ )
+ .map_err(|err| match err {
+ gix_config::file::init::from_paths::Error::Init(err) => Error::from(err),
+ gix_config::file::init::from_paths::Error::Io(err) => err.into(),
+ })?
+ .unwrap_or_default();
+
+ let local_meta = git_dir_config.meta_owned();
+ globals.append(git_dir_config);
+ globals.resolve_includes(options)?;
+ if use_env {
+ globals.append(gix_config::File::from_env(options)?.unwrap_or_default());
+ }
+ if !cli_config_overrides.is_empty() {
+ config::overrides::append(&mut globals, cli_config_overrides, gix_config::Source::Cli, |_| None)
+ .map_err(|err| Error::ConfigOverrides {
+ err,
+ source: gix_config::Source::Cli,
+ })?;
+ }
+ if !api_config_overrides.is_empty() {
+ config::overrides::append(&mut globals, api_config_overrides, gix_config::Source::Api, |_| None)
+ .map_err(|err| Error::ConfigOverrides {
+ err,
+ source: gix_config::Source::Api,
+ })?;
+ }
+ apply_environment_overrides(&mut globals, *git_prefix, http_transport, identity, objects)?;
+ globals.set_meta(local_meta);
+ globals
+ };
+
+ let hex_len = util::parse_core_abbrev(&config, object_hash).with_leniency(lenient_config)?;
+
+ use util::config_bool;
+ let reflog = util::query_refupdates(&config, lenient_config)?;
+ let ignore_case = config_bool(&config, &Core::IGNORE_CASE, "core.ignoreCase", false, lenient_config)?;
+ let use_multi_pack_index = config_bool(
+ &config,
+ &Core::MULTIPACK_INDEX,
+ "core.multiPackIndex",
+ true,
+ lenient_config,
+ )?;
+ let object_kind_hint = util::disambiguate_hint(&config, lenient_config)?;
+ let (pack_cache_bytes, object_cache_bytes) =
+ util::parse_object_caches(&config, lenient_config, filter_config_section)?;
+ // NOTE: When adding a new initial cache, consider adjusting `reread_values_and_clear_caches()` as well.
+ Ok(Cache {
+ resolved: config.into(),
+ use_multi_pack_index,
+ object_hash,
+ object_kind_hint,
+ pack_cache_bytes,
+ object_cache_bytes,
+ reflog,
+ is_bare,
+ ignore_case,
+ hex_len,
+ filter_config_section,
+ xdg_config_home_env,
+ home_env,
+ lenient_config,
+ user_agent: Default::default(),
+ personas: Default::default(),
+ url_rewrite: Default::default(),
+ diff_renames: Default::default(),
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ url_scheme: Default::default(),
+ diff_algorithm: Default::default(),
+ })
+ }
+
+ /// Call this with new `config` to update values and clear caches. Note that none of the values will be applied if a single
+ /// one is invalid.
+ /// However, those that are lazily read won't be re-evaluated right away and might thus pass now but fail later.
+ ///
+ /// Note that we unconditionally re-read all values.
+ pub fn reread_values_and_clear_caches_replacing_config(&mut self, config: crate::Config) -> Result<(), Error> {
+ let prev = std::mem::replace(&mut self.resolved, config);
+ match self.reread_values_and_clear_caches() {
+ Err(err) => {
+ drop(std::mem::replace(&mut self.resolved, prev));
+ Err(err)
+ }
+ Ok(()) => Ok(()),
+ }
+ }
+
+ /// Similar to `reread_values_and_clear_caches_replacing_config()`, but works on the existing configuration instead of a passed
+ /// in one that it them makes the default.
+ pub fn reread_values_and_clear_caches(&mut self) -> Result<(), Error> {
+ let config = &self.resolved;
+ let hex_len = util::parse_core_abbrev(config, self.object_hash).with_leniency(self.lenient_config)?;
+
+ use util::config_bool;
+ let ignore_case = config_bool(
+ config,
+ &Core::IGNORE_CASE,
+ "core.ignoreCase",
+ false,
+ self.lenient_config,
+ )?;
+ let object_kind_hint = util::disambiguate_hint(config, self.lenient_config)?;
+ let reflog = util::query_refupdates(config, self.lenient_config)?;
+
+ self.hex_len = hex_len;
+ self.ignore_case = ignore_case;
+ self.object_kind_hint = object_kind_hint;
+ self.reflog = reflog;
+
+ self.user_agent = Default::default();
+ self.personas = Default::default();
+ self.url_rewrite = Default::default();
+ self.diff_renames = Default::default();
+ self.diff_algorithm = Default::default();
+ (self.pack_cache_bytes, self.object_cache_bytes) =
+ util::parse_object_caches(config, self.lenient_config, self.filter_config_section)?;
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ {
+ self.url_scheme = Default::default();
+ }
+
+ Ok(())
+ }
+}
+
+impl crate::Repository {
+ /// Replace our own configuration with `config` and re-read all cached values, and apply them to select in-memory instances.
+ pub(crate) fn reread_values_and_clear_caches_replacing_config(
+ &mut self,
+ config: crate::Config,
+ ) -> Result<(), Error> {
+ self.config.reread_values_and_clear_caches_replacing_config(config)?;
+ self.apply_changed_values();
+ Ok(())
+ }
+
+ fn apply_changed_values(&mut self) {
+ self.refs.write_reflog = util::reflog_or_default(self.config.reflog, self.work_dir().is_some());
+ }
+}
+
+fn apply_environment_overrides(
+ config: &mut gix_config::File<'static>,
+ git_prefix: Permission,
+ http_transport: Permission,
+ identity: Permission,
+ objects: Permission,
+) -> Result<(), Error> {
+ fn env(key: &'static dyn config::tree::Key) -> &'static str {
+ key.the_environment_override()
+ }
+ fn var_as_bstring(var: &str, perm: Permission) -> Option<BString> {
+ perm.check_opt(var)
+ .and_then(std::env::var_os)
+ .and_then(|val| gix_path::os_string_into_bstring(val).ok())
+ }
+
+ let mut env_override = gix_config::File::new(gix_config::file::Metadata::from(gix_config::Source::EnvOverride));
+ for (section_name, subsection_name, permission, data) in [
+ (
+ "http",
+ None,
+ http_transport,
+ &[
+ ("GIT_HTTP_LOW_SPEED_LIMIT", "lowSpeedLimit"),
+ ("GIT_HTTP_LOW_SPEED_TIME", "lowSpeedTime"),
+ ("GIT_HTTP_USER_AGENT", "userAgent"),
+ {
+ let key = &Http::SSL_CA_INFO;
+ (env(key), key.name)
+ },
+ {
+ let key = &Http::SSL_VERSION;
+ (env(key), key.name)
+ },
+ ][..],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("https".into())),
+ http_transport,
+ &[
+ ("HTTPS_PROXY", gitoxide::Https::PROXY.name),
+ ("https_proxy", gitoxide::Https::PROXY.name),
+ ],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("http".into())),
+ http_transport,
+ &[
+ ("ALL_PROXY", "allProxy"),
+ {
+ let key = &gitoxide::Http::ALL_PROXY;
+ (env(key), key.name)
+ },
+ ("NO_PROXY", "noProxy"),
+ {
+ let key = &gitoxide::Http::NO_PROXY;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Http::PROXY;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Http::VERBOSE;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Http::PROXY_AUTH_METHOD;
+ (env(key), key.name)
+ },
+ ],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("committer".into())),
+ identity,
+ &[
+ {
+ let key = &gitoxide::Committer::NAME_FALLBACK;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Committer::EMAIL_FALLBACK;
+ (env(key), key.name)
+ },
+ ],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("author".into())),
+ identity,
+ &[
+ {
+ let key = &gitoxide::Author::NAME_FALLBACK;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Author::EMAIL_FALLBACK;
+ (env(key), key.name)
+ },
+ ],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("commit".into())),
+ git_prefix,
+ &[
+ {
+ let key = &gitoxide::Commit::COMMITTER_DATE;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Commit::AUTHOR_DATE;
+ (env(key), key.name)
+ },
+ ],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("allow".into())),
+ http_transport,
+ &[("GIT_PROTOCOL_FROM_USER", "protocolFromUser")],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("user".into())),
+ identity,
+ &[{
+ let key = &gitoxide::User::EMAIL_FALLBACK;
+ (env(key), key.name)
+ }],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("objects".into())),
+ objects,
+ &[
+ {
+ let key = &gitoxide::Objects::NO_REPLACE;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Objects::REPLACE_REF_BASE;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Objects::CACHE_LIMIT;
+ (env(key), key.name)
+ },
+ ],
+ ),
+ (
+ "gitoxide",
+ Some(Cow::Borrowed("ssh".into())),
+ git_prefix,
+ &[{
+ let key = &gitoxide::Ssh::COMMAND_WITHOUT_SHELL_FALLBACK;
+ (env(key), key.name)
+ }],
+ ),
+ (
+ "ssh",
+ None,
+ git_prefix,
+ &[{
+ let key = &config::tree::Ssh::VARIANT;
+ (env(key), key.name)
+ }],
+ ),
+ ] {
+ let mut section = env_override
+ .new_section(section_name, subsection_name)
+ .expect("statically known valid section name");
+ for (var, key) in data {
+ if let Some(value) = var_as_bstring(var, permission) {
+ section.push_with_comment(
+ (*key).try_into().expect("statically known to be valid"),
+ Some(value.as_ref()),
+ format!("from {var}").as_str(),
+ );
+ }
+ }
+ if section.num_values() == 0 {
+ let id = section.id();
+ env_override.remove_section_by_id(id);
+ }
+ }
+
+ {
+ let mut section = env_override
+ .new_section("core", None)
+ .expect("statically known valid section name");
+
+ for (var, key, permission) in [
+ {
+ let key = &Core::DELTA_BASE_CACHE_LIMIT;
+ (env(key), key.name, objects)
+ },
+ {
+ let key = &Core::SSH_COMMAND;
+ (env(key), key.name, git_prefix)
+ },
+ ] {
+ if let Some(value) = var_as_bstring(var, permission) {
+ section.push_with_comment(
+ key.try_into().expect("statically known to be valid"),
+ Some(value.as_ref()),
+ format!("from {var}").as_str(),
+ );
+ }
+ }
+
+ if section.num_values() == 0 {
+ let id = section.id();
+ env_override.remove_section_by_id(id);
+ }
+ }
+
+ if !env_override.is_void() {
+ config.append(env_override);
+ }
+ Ok(())
+}
diff --git a/vendor/gix/src/config/cache/mod.rs b/vendor/gix/src/config/cache/mod.rs
new file mode 100644
index 000000000..1904c5ea9
--- /dev/null
+++ b/vendor/gix/src/config/cache/mod.rs
@@ -0,0 +1,18 @@
+use super::{Cache, Error};
+
+mod incubate;
+pub(crate) use incubate::StageOne;
+
+mod init;
+
+impl std::fmt::Debug for Cache {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("Cache").finish_non_exhaustive()
+ }
+}
+
+mod access;
+
+pub(crate) mod util;
+
+pub(crate) use util::interpolate_context;
diff --git a/vendor/gix/src/config/cache/util.rs b/vendor/gix/src/config/cache/util.rs
new file mode 100644
index 000000000..c12f850e6
--- /dev/null
+++ b/vendor/gix/src/config/cache/util.rs
@@ -0,0 +1,143 @@
+#![allow(clippy::result_large_err)]
+use super::Error;
+use crate::{
+ config,
+ config::tree::{gitoxide, Core},
+ revision::spec::parse::ObjectKindHint,
+};
+
+pub(crate) fn interpolate_context<'a>(
+ git_install_dir: Option<&'a std::path::Path>,
+ home_dir: Option<&'a std::path::Path>,
+) -> gix_config::path::interpolate::Context<'a> {
+ gix_config::path::interpolate::Context {
+ git_install_dir,
+ home_dir,
+ home_for_user: Some(gix_config::path::interpolate::home_for_user), // TODO: figure out how to configure this
+ }
+}
+
+pub(crate) fn base_options(lossy: Option<bool>) -> gix_config::file::init::Options<'static> {
+ gix_config::file::init::Options {
+ lossy: lossy.unwrap_or(!cfg!(debug_assertions)),
+ ..Default::default()
+ }
+}
+
+pub(crate) fn config_bool(
+ config: &gix_config::File<'_>,
+ key: &'static config::tree::keys::Boolean,
+ key_str: &str,
+ default: bool,
+ lenient: bool,
+) -> Result<bool, Error> {
+ use config::tree::Key;
+ debug_assert_eq!(
+ key_str,
+ key.logical_name(),
+ "BUG: key name and hardcoded name must match"
+ );
+ config
+ .boolean_by_key(key_str)
+ .map(|res| key.enrich_error(res))
+ .unwrap_or(Ok(default))
+ .map_err(Error::from)
+ .with_lenient_default(lenient)
+}
+
+pub(crate) fn query_refupdates(
+ config: &gix_config::File<'static>,
+ lenient_config: bool,
+) -> Result<Option<gix_ref::store::WriteReflog>, Error> {
+ let key = "core.logAllRefUpdates";
+ Core::LOG_ALL_REF_UPDATES
+ .try_into_ref_updates(config.boolean_by_key(key), || config.string_by_key(key))
+ .with_leniency(lenient_config)
+ .map_err(Into::into)
+}
+
+pub(crate) fn reflog_or_default(
+ config_reflog: Option<gix_ref::store::WriteReflog>,
+ has_worktree: bool,
+) -> gix_ref::store::WriteReflog {
+ config_reflog.unwrap_or(if has_worktree {
+ gix_ref::store::WriteReflog::Normal
+ } else {
+ gix_ref::store::WriteReflog::Disable
+ })
+}
+
+/// Return `(pack_cache_bytes, object_cache_bytes)` as parsed from gix-config
+pub(crate) fn parse_object_caches(
+ config: &gix_config::File<'static>,
+ lenient: bool,
+ mut filter_config_section: fn(&gix_config::file::Metadata) -> bool,
+) -> Result<(Option<usize>, usize), Error> {
+ let pack_cache_bytes = config
+ .integer_filter_by_key("core.deltaBaseCacheLimit", &mut filter_config_section)
+ .map(|res| Core::DELTA_BASE_CACHE_LIMIT.try_into_usize(res))
+ .transpose()
+ .with_leniency(lenient)?;
+ let object_cache_bytes = config
+ .integer_filter_by_key("gitoxide.objects.cacheLimit", &mut filter_config_section)
+ .map(|res| gitoxide::Objects::CACHE_LIMIT.try_into_usize(res))
+ .transpose()
+ .with_leniency(lenient)?
+ .unwrap_or_default();
+ Ok((pack_cache_bytes, object_cache_bytes))
+}
+
+pub(crate) fn parse_core_abbrev(
+ config: &gix_config::File<'static>,
+ object_hash: gix_hash::Kind,
+) -> Result<Option<usize>, Error> {
+ Ok(config
+ .string_by_key("core.abbrev")
+ .map(|abbrev| Core::ABBREV.try_into_abbreviation(abbrev, object_hash))
+ .transpose()?
+ .flatten())
+}
+
+pub(crate) fn disambiguate_hint(
+ config: &gix_config::File<'static>,
+ lenient_config: bool,
+) -> Result<Option<ObjectKindHint>, config::key::GenericErrorWithValue> {
+ match config.string_by_key("core.disambiguate") {
+ None => Ok(None),
+ Some(value) => Core::DISAMBIGUATE
+ .try_into_object_kind_hint(value)
+ .with_leniency(lenient_config),
+ }
+}
+
+// TODO: Use a specialization here once trait specialization is stabilized. Would be perfect here for `T: Default`.
+pub trait ApplyLeniency {
+ fn with_leniency(self, is_lenient: bool) -> Self;
+}
+
+pub trait ApplyLeniencyDefault {
+ fn with_lenient_default(self, is_lenient: bool) -> Self;
+}
+
+impl<T, E> ApplyLeniency for Result<Option<T>, E> {
+ fn with_leniency(self, is_lenient: bool) -> Self {
+ match self {
+ Ok(v) => Ok(v),
+ Err(_) if is_lenient => Ok(None),
+ Err(err) => Err(err),
+ }
+ }
+}
+
+impl<T, E> ApplyLeniencyDefault for Result<T, E>
+where
+ T: Default,
+{
+ fn with_lenient_default(self, is_lenient: bool) -> Self {
+ match self {
+ Ok(v) => Ok(v),
+ Err(_) if is_lenient => Ok(T::default()),
+ Err(err) => Err(err),
+ }
+ }
+}
diff --git a/vendor/gix/src/config/mod.rs b/vendor/gix/src/config/mod.rs
new file mode 100644
index 000000000..1e2566777
--- /dev/null
+++ b/vendor/gix/src/config/mod.rs
@@ -0,0 +1,454 @@
+pub use gix_config::*;
+use gix_features::threading::OnceCell;
+
+use crate::{bstr::BString, repository::identity, revision::spec, Repository};
+
+pub(crate) mod cache;
+mod snapshot;
+pub use snapshot::credential_helpers;
+
+///
+pub mod overrides;
+
+pub mod tree;
+pub use tree::root::Tree;
+
+/// A platform to access configuration values as read from disk.
+///
+/// Note that these values won't update even if the underlying file(s) change.
+pub struct Snapshot<'repo> {
+ pub(crate) repo: &'repo Repository,
+}
+
+/// A platform to access configuration values and modify them in memory, while making them available when this platform is dropped
+/// as form of auto-commit.
+/// Note that the values will only affect this instance of the parent repository, and not other clones that may exist.
+///
+/// Note that these values won't update even if the underlying file(s) change.
+///
+/// Use [`forget()`][Self::forget()] to not apply any of the changes.
+// TODO: make it possible to load snapshots with reloading via .config() and write mutated snapshots back to disk which should be the way
+// to affect all instances of a repo, probably via `config_mut()` and `config_mut_at()`.
+pub struct SnapshotMut<'repo> {
+ pub(crate) repo: Option<&'repo mut Repository>,
+ pub(crate) config: gix_config::File<'static>,
+}
+
+/// A utility structure created by [`SnapshotMut::commit_auto_rollback()`] that restores the previous configuration on drop.
+pub struct CommitAutoRollback<'repo> {
+ pub(crate) repo: Option<&'repo mut Repository>,
+ pub(crate) prev_config: crate::Config,
+}
+
+pub(crate) mod section {
+ pub fn is_trusted(meta: &gix_config::file::Metadata) -> bool {
+ meta.trust == gix_sec::Trust::Full || meta.source.kind() != gix_config::source::Kind::Repository
+ }
+}
+
+/// The error returned when failing to initialize the repository configuration.
+///
+/// This configuration is on the critical path when opening a repository.
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error(transparent)]
+ ConfigBoolean(#[from] boolean::Error),
+ #[error(transparent)]
+ ConfigUnsigned(#[from] unsigned_integer::Error),
+ #[error(transparent)]
+ ConfigTypedString(#[from] key::GenericErrorWithValue),
+ #[error("Cannot handle objects formatted as {:?}", .name)]
+ UnsupportedObjectFormat { name: BString },
+ #[error(transparent)]
+ CoreAbbrev(#[from] abbrev::Error),
+ #[error("Could not read configuration file")]
+ Io(#[from] std::io::Error),
+ #[error(transparent)]
+ Init(#[from] gix_config::file::init::Error),
+ #[error(transparent)]
+ ResolveIncludes(#[from] gix_config::file::includes::Error),
+ #[error(transparent)]
+ FromEnv(#[from] gix_config::file::init::from_env::Error),
+ #[error(transparent)]
+ PathInterpolation(#[from] gix_config::path::interpolate::Error),
+ #[error("{source:?} configuration overrides at open or init time could not be applied.")]
+ ConfigOverrides {
+ #[source]
+ err: overrides::Error,
+ source: gix_config::Source,
+ },
+}
+
+///
+pub mod diff {
+ ///
+ pub mod algorithm {
+ use crate::bstr::BString;
+
+ /// The error produced when obtaining `diff.algorithm`.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Unknown diff algorithm named '{name}'")]
+ Unknown { name: BString },
+ #[error("The '{name}' algorithm is not yet implemented")]
+ Unimplemented { name: BString },
+ }
+ }
+}
+
+///
+pub mod checkout_options {
+ /// The error produced when collecting all information needed for checking out files into a worktree.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ ConfigCheckStat(#[from] super::key::GenericErrorWithValue),
+ #[error(transparent)]
+ ConfigBoolean(#[from] super::boolean::Error),
+ #[error(transparent)]
+ CheckoutWorkers(#[from] super::checkout::workers::Error),
+ #[error("Failed to interpolate the attribute file configured at `core.attributesFile`")]
+ AttributesFileInterpolation(#[from] gix_config::path::interpolate::Error),
+ }
+}
+
+///
+pub mod protocol {
+ ///
+ pub mod allow {
+ use crate::bstr::BString;
+
+ /// The error returned when obtaining the permission for a particular scheme.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ #[error("The value {value:?} must be allow|deny|user in configuration key protocol{0}.allow", scheme.as_ref().map(|s| format!(".{s}")).unwrap_or_default())]
+ pub struct Error {
+ pub scheme: Option<String>,
+ pub value: BString,
+ }
+ }
+}
+
+///
+pub mod ssh_connect_options {
+ /// The error produced when obtaining ssh connection configuration.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ #[error(transparent)]
+ pub struct Error(#[from] super::key::GenericErrorWithValue);
+}
+
+///
+pub mod key {
+ use crate::bstr::BString;
+
+ const fn prefix(kind: char) -> &'static str {
+ match kind {
+ 'n' => "", // nothing
+ 'k' => "The value of key", // generic key
+ 't' => "The date format at key", // time
+ 'i' => "The timeout at key", // timeout
+ 'd' => "The duration [ms] at key", // duration
+ 'b' => "The boolean at key", // boolean
+ 'v' => "The key", // generic key with value
+ 'r' => "The refspec at", // refspec
+ 's' => "The ssl version at", // ssl-version
+ 'u' => "The url at", // url
+ 'w' => "The utf-8 string at", // string
+ _ => panic!("BUG: invalid prefix kind - add a case for it here"),
+ }
+ }
+ const fn suffix(kind: char) -> &'static str {
+ match kind {
+ 'd' => "could not be decoded", // decoding
+ 'i' => "was invalid", // invalid
+ 'u' => "could not be parsed as unsigned integer", // unsigned integer
+ 'p' => "could not be parsed", // parsing
+ _ => panic!("BUG: invalid suffix kind - add a case for it here"),
+ }
+ }
+ /// A generic error suitable to produce decent messages for all kinds of configuration errors with config-key granularity.
+ ///
+ /// This error is meant to be reusable and help produce uniform error messages related to parsing any configuration key.
+ #[derive(Debug, thiserror::Error)]
+ #[error("{} \"{key}{}\"{} {}", prefix(PREFIX), value.as_ref().map(|v| format!("={v}")).unwrap_or_default(), environment_override.as_deref().map(|var| format!(" (possibly from {var})")).unwrap_or_default(), suffix(SUFFIX))]
+ pub struct Error<E: std::error::Error + Send + Sync + 'static, const PREFIX: char, const SUFFIX: char> {
+ /// The configuration key that contained the value.
+ pub key: BString,
+ /// The value that was assigned to `key`.
+ pub value: Option<BString>,
+ /// The associated environment variable that would override this value.
+ pub environment_override: Option<&'static str>,
+ /// The source of the error if there was one.
+ pub source: Option<E>,
+ }
+
+ /// Initialization
+ /// Instantiate a new error from the given `key`.
+ ///
+ /// Note that specifics of the error message are defined by the `PREFIX` and `SUFFIX` which is usually defined by a typedef.
+ impl<T, E, const PREFIX: char, const SUFFIX: char> From<&'static T> for Error<E, PREFIX, SUFFIX>
+ where
+ E: std::error::Error + Send + Sync + 'static,
+ T: super::tree::Key,
+ {
+ fn from(key: &'static T) -> Self {
+ Error {
+ key: key.logical_name().into(),
+ value: None,
+ environment_override: key.environment_override(),
+ source: None,
+ }
+ }
+ }
+
+ /// Initialization
+ impl<E, const PREFIX: char, const SUFFIX: char> Error<E, PREFIX, SUFFIX>
+ where
+ E: std::error::Error + Send + Sync + 'static,
+ {
+ /// Instantiate an error with all data from `key` along with the `value` of the key.
+ pub fn from_value(key: &'static impl super::tree::Key, value: BString) -> Self {
+ Error::from(key).with_value(value)
+ }
+ }
+
+ /// Builder
+ impl<E, const PREFIX: char, const SUFFIX: char> Error<E, PREFIX, SUFFIX>
+ where
+ E: std::error::Error + Send + Sync + 'static,
+ {
+ /// Attach the given `err` as source.
+ pub fn with_source(mut self, err: E) -> Self {
+ self.source = Some(err);
+ self
+ }
+
+ /// Attach the given `value` as value we observed when the error was produced.
+ pub fn with_value(mut self, value: BString) -> Self {
+ self.value = Some(value);
+ self
+ }
+ }
+
+ /// A generic key error for use when it doesn't seem worth it say more than 'key is invalid' along with meta-data.
+ pub type GenericError<E = gix_config::value::Error> = Error<E, 'k', 'i'>;
+
+ /// A generic key error which will also contain a value.
+ pub type GenericErrorWithValue<E = gix_config::value::Error> = Error<E, 'v', 'i'>;
+}
+
+///
+pub mod checkout {
+ ///
+ pub mod workers {
+ use crate::config;
+
+ /// The error produced when failing to parse the the `checkout.workers` key.
+ pub type Error = config::key::Error<gix_config::value::Error, 'n', 'd'>;
+ }
+}
+
+///
+pub mod abbrev {
+ use crate::bstr::BString;
+
+ /// The error describing an incorrect `core.abbrev` value.
+ #[derive(Debug, thiserror::Error)]
+ #[error("Invalid value for 'core.abbrev' = '{}'. It must be between 4 and {}", .value, .max)]
+ pub struct Error {
+ /// The value found in the git configuration
+ pub value: BString,
+ /// The maximum abbreviation length, the length of an object hash.
+ pub max: u8,
+ }
+}
+
+///
+pub mod remote {
+ ///
+ pub mod symbolic_name {
+ /// The error produced when failing to produce a symbolic remote name from configuration.
+ pub type Error = super::super::key::Error<crate::remote::name::Error, 'v', 'i'>;
+ }
+}
+
+///
+pub mod time {
+ /// The error produced when failing to parse time from configuration.
+ pub type Error = super::key::Error<gix_date::parse::Error, 't', 'i'>;
+}
+
+///
+pub mod lock_timeout {
+ /// The error produced when failing to parse timeout for locks.
+ pub type Error = super::key::Error<gix_config::value::Error, 'i', 'i'>;
+}
+
+///
+pub mod duration {
+ /// The error produced when failing to parse durations (in milliseconds).
+ pub type Error = super::key::Error<gix_config::value::Error, 'd', 'i'>;
+}
+
+///
+pub mod boolean {
+ /// The error produced when failing to parse time from configuration.
+ pub type Error = super::key::Error<gix_config::value::Error, 'b', 'i'>;
+}
+
+///
+pub mod unsigned_integer {
+ /// The error produced when failing to parse a signed integer from configuration.
+ pub type Error = super::key::Error<gix_config::value::Error, 'k', 'u'>;
+}
+
+///
+pub mod url {
+ /// The error produced when failing to parse a url from the configuration.
+ pub type Error = super::key::Error<gix_url::parse::Error, 'u', 'p'>;
+}
+
+///
+pub mod string {
+ /// The error produced when failing to interpret configuration as UTF-8 encoded string.
+ pub type Error = super::key::Error<crate::bstr::Utf8Error, 'w', 'd'>;
+}
+
+///
+pub mod refspec {
+ /// The error produced when failing to parse a refspec from the configuration.
+ pub type Error = super::key::Error<gix_refspec::parse::Error, 'r', 'p'>;
+}
+
+///
+pub mod ssl_version {
+ /// The error produced when failing to parse a refspec from the configuration.
+ pub type Error = super::key::Error<std::convert::Infallible, 's', 'i'>;
+}
+
+///
+pub mod transport {
+ use std::borrow::Cow;
+
+ use crate::bstr::BStr;
+
+ /// The error produced when configuring a transport for a particular protocol.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(
+ "Could not interpret configuration key {key:?} as {kind} integer of desired range with value: {actual}"
+ )]
+ InvalidInteger {
+ key: &'static str,
+ kind: &'static str,
+ actual: i64,
+ },
+ #[error("Could not interpret configuration key {key:?}")]
+ ConfigValue {
+ source: gix_config::value::Error,
+ key: &'static str,
+ },
+ #[error("Could not interpolate path at key {key:?}")]
+ InterpolatePath {
+ source: gix_config::path::interpolate::Error,
+ key: &'static str,
+ },
+ #[error("Could not decode value at key {key:?} as UTF-8 string")]
+ IllformedUtf8 {
+ key: Cow<'static, BStr>,
+ source: crate::config::string::Error,
+ },
+ #[error("Invalid URL passed for configuration")]
+ ParseUrl(#[from] gix_url::parse::Error),
+ #[error("Could obtain configuration for an HTTP url")]
+ Http(#[from] http::Error),
+ }
+
+ ///
+ pub mod http {
+ use std::borrow::Cow;
+
+ use crate::bstr::BStr;
+
+ /// The error produced when configuring a HTTP transport.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Boolean(#[from] crate::config::boolean::Error),
+ #[error(transparent)]
+ UnsignedInteger(#[from] crate::config::unsigned_integer::Error),
+ #[error(transparent)]
+ ConnectTimeout(#[from] crate::config::duration::Error),
+ #[error("The proxy authentication at key `{key}` is invalid")]
+ InvalidProxyAuthMethod {
+ source: crate::config::key::GenericErrorWithValue,
+ key: Cow<'static, BStr>,
+ },
+ #[error("Could not configure the credential helpers for the authenticated proxy url")]
+ ConfigureProxyAuthenticate(#[from] crate::config::snapshot::credential_helpers::Error),
+ #[error(transparent)]
+ InvalidSslVersion(#[from] crate::config::ssl_version::Error),
+ #[error("The HTTP version must be 'HTTP/2' or 'HTTP/1.1'")]
+ InvalidHttpVersion(#[from] crate::config::key::GenericErrorWithValue),
+ #[error("The follow redirects value 'initial', or boolean true or false")]
+ InvalidFollowRedirects(#[source] crate::config::key::GenericErrorWithValue),
+ }
+ }
+}
+
+/// Utility type to keep pre-obtained configuration values, only for those required during initial setup
+/// and other basic operations that are common enough to warrant a permanent cache.
+///
+/// All other values are obtained lazily using OnceCell.
+#[derive(Clone)]
+pub(crate) struct Cache {
+ pub resolved: crate::Config,
+ /// The hex-length to assume when shortening object ids. If `None`, it should be computed based on the approximate object count.
+ pub hex_len: Option<usize>,
+ /// true if the repository is designated as 'bare', without work tree.
+ pub is_bare: bool,
+ /// The type of hash to use.
+ pub object_hash: gix_hash::Kind,
+ /// If true, multi-pack indices, whether present or not, may be used by the object database.
+ pub use_multi_pack_index: bool,
+ /// The representation of `core.logallrefupdates`, or `None` if the variable wasn't set.
+ pub reflog: Option<gix_ref::store::WriteReflog>,
+ /// The configured user agent for presentation to servers.
+ pub(crate) user_agent: OnceCell<String>,
+ /// identities for later use, lazy initialization.
+ pub(crate) personas: OnceCell<identity::Personas>,
+ /// A lazily loaded rewrite list for remote urls
+ pub(crate) url_rewrite: OnceCell<crate::remote::url::Rewrite>,
+ /// The lazy-loaded rename information for diffs.
+ pub(crate) diff_renames: OnceCell<Option<crate::object::tree::diff::Rewrites>>,
+ /// A lazily loaded mapping to know which url schemes to allow
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ pub(crate) url_scheme: OnceCell<crate::remote::url::SchemePermission>,
+ /// The algorithm to use when diffing blobs
+ pub(crate) diff_algorithm: OnceCell<gix_diff::blob::Algorithm>,
+ /// The amount of bytes to use for a memory backed delta pack cache. If `Some(0)`, no cache is used, if `None`
+ /// a standard cache is used which costs near to nothing and always pays for itself.
+ pub(crate) pack_cache_bytes: Option<usize>,
+ /// The amount of bytes to use for caching whole objects, or 0 to turn it off entirely.
+ pub(crate) object_cache_bytes: usize,
+ /// The config section filter from the options used to initialize this instance. Keep these in sync!
+ filter_config_section: fn(&gix_config::file::Metadata) -> bool,
+ /// The object kind to pick if a prefix is ambiguous.
+ pub object_kind_hint: Option<spec::parse::ObjectKindHint>,
+ /// If true, we are on a case-insensitive file system.
+ pub ignore_case: bool,
+ /// If true, we should default what's possible if something is misconfigured, on case by case basis, to be more resilient.
+ /// Also available in options! Keep in sync!
+ pub lenient_config: bool,
+ /// Define how we can use values obtained with `xdg_config(…)` and its `XDG_CONFIG_HOME` variable.
+ xdg_config_home_env: gix_sec::Permission,
+ /// Define how we can use values obtained with `xdg_config(…)`. and its `HOME` variable.
+ home_env: gix_sec::Permission,
+ // TODO: make core.precomposeUnicode available as well.
+}
diff --git a/vendor/gix/src/config/overrides.rs b/vendor/gix/src/config/overrides.rs
new file mode 100644
index 000000000..f43e8471b
--- /dev/null
+++ b/vendor/gix/src/config/overrides.rs
@@ -0,0 +1,49 @@
+use std::convert::TryFrom;
+
+use crate::bstr::{BStr, BString, ByteSlice};
+
+/// The error returned by [SnapshotMut::apply_cli_overrides()][crate::config::SnapshotMut::append_config()].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("{input:?} is not a valid configuration key. Examples are 'core.abbrev' or 'remote.origin.url'")]
+ InvalidKey { input: BString },
+ #[error("Key {key:?} could not be parsed")]
+ SectionKey {
+ key: BString,
+ source: gix_config::parse::section::key::Error,
+ },
+ #[error(transparent)]
+ SectionHeader(#[from] gix_config::parse::section::header::Error),
+}
+
+pub(crate) fn append(
+ config: &mut gix_config::File<'static>,
+ values: impl IntoIterator<Item = impl AsRef<BStr>>,
+ source: gix_config::Source,
+ mut make_comment: impl FnMut(&BStr) -> Option<BString>,
+) -> Result<(), Error> {
+ let mut file = gix_config::File::new(gix_config::file::Metadata::from(source));
+ for key_value in values {
+ let key_value = key_value.as_ref();
+ let mut tokens = key_value.splitn(2, |b| *b == b'=').map(|v| v.trim());
+ let key = tokens.next().expect("always one value").as_bstr();
+ let value = tokens.next();
+ let key = gix_config::parse::key(key.to_str().map_err(|_| Error::InvalidKey { input: key.into() })?)
+ .ok_or_else(|| Error::InvalidKey { input: key.into() })?;
+ let mut section = file.section_mut_or_create_new(key.section_name, key.subsection_name)?;
+ let key =
+ gix_config::parse::section::Key::try_from(key.value_name.to_owned()).map_err(|err| Error::SectionKey {
+ source: err,
+ key: key.value_name.into(),
+ })?;
+ let comment = make_comment(key_value);
+ let value = value.map(|v| v.as_bstr());
+ match comment {
+ Some(comment) => section.push_with_comment(key, value, &**comment),
+ None => section.push(key, value),
+ };
+ }
+ config.append(file);
+ Ok(())
+}
diff --git a/vendor/gix/src/config/snapshot/_impls.rs b/vendor/gix/src/config/snapshot/_impls.rs
new file mode 100644
index 000000000..ec22cb640
--- /dev/null
+++ b/vendor/gix/src/config/snapshot/_impls.rs
@@ -0,0 +1,76 @@
+use std::{
+ fmt::{Debug, Formatter},
+ ops::{Deref, DerefMut},
+};
+
+use crate::config::{CommitAutoRollback, Snapshot, SnapshotMut};
+
+impl Debug for Snapshot<'_> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.write_str(&self.repo.config.resolved.to_string())
+ }
+}
+
+impl Debug for CommitAutoRollback<'_> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.write_str(&self.repo.as_ref().expect("still present").config.resolved.to_string())
+ }
+}
+
+impl Debug for SnapshotMut<'_> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.write_str(&self.config.to_string())
+ }
+}
+
+impl Drop for SnapshotMut<'_> {
+ fn drop(&mut self) {
+ if let Some(repo) = self.repo.take() {
+ self.commit_inner(repo).ok();
+ };
+ }
+}
+
+impl Drop for CommitAutoRollback<'_> {
+ fn drop(&mut self) {
+ if let Some(repo) = self.repo.take() {
+ self.rollback_inner(repo).ok();
+ }
+ }
+}
+
+impl Deref for SnapshotMut<'_> {
+ type Target = gix_config::File<'static>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.config
+ }
+}
+
+impl Deref for Snapshot<'_> {
+ type Target = gix_config::File<'static>;
+
+ fn deref(&self) -> &Self::Target {
+ self.plumbing()
+ }
+}
+
+impl Deref for CommitAutoRollback<'_> {
+ type Target = crate::Repository;
+
+ fn deref(&self) -> &Self::Target {
+ self.repo.as_ref().expect("always present")
+ }
+}
+
+impl DerefMut for CommitAutoRollback<'_> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.repo.as_mut().expect("always present")
+ }
+}
+
+impl DerefMut for SnapshotMut<'_> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.config
+ }
+}
diff --git a/vendor/gix/src/config/snapshot/access.rs b/vendor/gix/src/config/snapshot/access.rs
new file mode 100644
index 000000000..1710348a9
--- /dev/null
+++ b/vendor/gix/src/config/snapshot/access.rs
@@ -0,0 +1,143 @@
+#![allow(clippy::result_large_err)]
+use std::borrow::Cow;
+
+use gix_features::threading::OwnShared;
+
+use crate::{
+ bstr::BStr,
+ config::{CommitAutoRollback, Snapshot, SnapshotMut},
+};
+
+/// Access configuration values, frozen in time, using a `key` which is a `.` separated string of up to
+/// three tokens, namely `section_name.[subsection_name.]value_name`, like `core.bare` or `remote.origin.url`.
+///
+/// Note that single-value methods always return the last value found, which is the one set most recently in the
+/// hierarchy of configuration files, aka 'last one wins'.
+impl<'repo> Snapshot<'repo> {
+ /// Return the boolean at `key`, or `None` if there is no such value or if the value can't be interpreted as
+ /// boolean.
+ ///
+ /// For a non-degenerating version, use [`try_boolean(…)`][Self::try_boolean()].
+ ///
+ /// Note that this method takes the most recent value at `key` even if it is from a file with reduced trust.
+ pub fn boolean<'a>(&self, key: impl Into<&'a BStr>) -> Option<bool> {
+ self.try_boolean(key).and_then(Result::ok)
+ }
+
+ /// Like [`boolean()`][Self::boolean()], but it will report an error if the value couldn't be interpreted as boolean.
+ pub fn try_boolean<'a>(&self, key: impl Into<&'a BStr>) -> Option<Result<bool, gix_config::value::Error>> {
+ self.repo.config.resolved.boolean_by_key(key)
+ }
+
+ /// Return the resolved integer at `key`, or `None` if there is no such value or if the value can't be interpreted as
+ /// integer or exceeded the value range.
+ ///
+ /// For a non-degenerating version, use [`try_integer(…)`][Self::try_integer()].
+ ///
+ /// Note that this method takes the most recent value at `key` even if it is from a file with reduced trust.
+ pub fn integer<'a>(&self, key: impl Into<&'a BStr>) -> Option<i64> {
+ self.try_integer(key).and_then(Result::ok)
+ }
+
+ /// Like [`integer()`][Self::integer()], but it will report an error if the value couldn't be interpreted as boolean.
+ pub fn try_integer<'a>(&self, key: impl Into<&'a BStr>) -> Option<Result<i64, gix_config::value::Error>> {
+ self.repo.config.resolved.integer_by_key(key)
+ }
+
+ /// Return the string at `key`, or `None` if there is no such value.
+ ///
+ /// Note that this method takes the most recent value at `key` even if it is from a file with reduced trust.
+ pub fn string<'a>(&self, key: impl Into<&'a BStr>) -> Option<Cow<'_, BStr>> {
+ self.repo.config.resolved.string_by_key(key)
+ }
+
+ /// Return the trusted and fully interpolated path at `key`, or `None` if there is no such value
+ /// or if no value was found in a trusted file.
+ /// An error occurs if the path could not be interpolated to its final value.
+ pub fn trusted_path<'a>(
+ &self,
+ key: impl Into<&'a BStr>,
+ ) -> Option<Result<Cow<'_, std::path::Path>, gix_config::path::interpolate::Error>> {
+ let key = gix_config::parse::key(key)?;
+ self.repo
+ .config
+ .trusted_file_path(key.section_name, key.subsection_name, key.value_name)
+ }
+}
+
+/// Utilities and additional access
+impl<'repo> Snapshot<'repo> {
+ /// Returns the underlying configuration implementation for a complete API, despite being a little less convenient.
+ ///
+ /// It's expected that more functionality will move up depending on demand.
+ pub fn plumbing(&self) -> &gix_config::File<'static> {
+ &self.repo.config.resolved
+ }
+}
+
+/// Utilities
+impl<'repo> SnapshotMut<'repo> {
+ /// Append configuration values of the form `core.abbrev=5` or `remote.origin.url = foo` or `core.bool-implicit-true`
+ /// to the end of the repository configuration, with each section marked with the given `source`.
+ ///
+ /// Note that doing so applies the configuration at the very end, so it will always override what came before it
+ /// even though the `source` is of lower priority as what's there.
+ pub fn append_config(
+ &mut self,
+ values: impl IntoIterator<Item = impl AsRef<BStr>>,
+ source: gix_config::Source,
+ ) -> Result<&mut Self, crate::config::overrides::Error> {
+ crate::config::overrides::append(&mut self.config, values, source, |v| Some(format!("-c {v}").into()))?;
+ Ok(self)
+ }
+ /// Apply all changes made to this instance.
+ ///
+ /// Note that this would also happen once this instance is dropped, but using this method may be more intuitive and won't squelch errors
+ /// in case the new configuration is partially invalid.
+ pub fn commit(mut self) -> Result<&'repo mut crate::Repository, crate::config::Error> {
+ let repo = self.repo.take().expect("always present here");
+ self.commit_inner(repo)
+ }
+
+ pub(crate) fn commit_inner(
+ &mut self,
+ repo: &'repo mut crate::Repository,
+ ) -> Result<&'repo mut crate::Repository, crate::config::Error> {
+ repo.reread_values_and_clear_caches_replacing_config(std::mem::take(&mut self.config).into())?;
+ Ok(repo)
+ }
+
+ /// Create a structure the temporarily commits the changes, but rolls them back when dropped.
+ pub fn commit_auto_rollback(mut self) -> Result<CommitAutoRollback<'repo>, crate::config::Error> {
+ let repo = self.repo.take().expect("this only runs once on consumption");
+ let prev_config = OwnShared::clone(&repo.config.resolved);
+
+ Ok(CommitAutoRollback {
+ repo: self.commit_inner(repo)?.into(),
+ prev_config,
+ })
+ }
+
+ /// Don't apply any of the changes after consuming this instance, effectively forgetting them, returning the changed configuration.
+ pub fn forget(mut self) -> gix_config::File<'static> {
+ self.repo.take();
+ std::mem::take(&mut self.config)
+ }
+}
+
+/// Utilities
+impl<'repo> CommitAutoRollback<'repo> {
+ /// Rollback the changes previously applied and all values before the change.
+ pub fn rollback(mut self) -> Result<&'repo mut crate::Repository, crate::config::Error> {
+ let repo = self.repo.take().expect("still present, consumed only once");
+ self.rollback_inner(repo)
+ }
+
+ pub(crate) fn rollback_inner(
+ &mut self,
+ repo: &'repo mut crate::Repository,
+ ) -> Result<&'repo mut crate::Repository, crate::config::Error> {
+ repo.reread_values_and_clear_caches_replacing_config(OwnShared::clone(&self.prev_config))?;
+ Ok(repo)
+ }
+}
diff --git a/vendor/gix/src/config/snapshot/credential_helpers.rs b/vendor/gix/src/config/snapshot/credential_helpers.rs
new file mode 100644
index 000000000..5a07e9fe2
--- /dev/null
+++ b/vendor/gix/src/config/snapshot/credential_helpers.rs
@@ -0,0 +1,183 @@
+use std::{borrow::Cow, convert::TryFrom};
+
+pub use error::Error;
+
+use crate::{
+ bstr::{ByteSlice, ByteVec},
+ config::{
+ tree::{credential, Core, Credential, Key},
+ Snapshot,
+ },
+};
+
+mod error {
+ use crate::bstr::BString;
+
+ /// The error returned by [Snapshot::credential_helpers()][super::Snapshot::credential_helpers()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not parse 'useHttpPath' key in section {section}")]
+ InvalidUseHttpPath {
+ section: BString,
+ source: gix_config::value::Error,
+ },
+ #[error("core.askpass could not be read")]
+ CoreAskpass(#[from] gix_config::path::interpolate::Error),
+ }
+}
+
+impl Snapshot<'_> {
+ /// Returns the configuration for all git-credential helpers from trusted configuration that apply
+ /// to the given `url` along with an action preconfigured to invoke the cascade with.
+ /// This includes `url` which may be altered to contain a user-name as configured.
+ ///
+ /// These can be invoked to obtain credentials. Note that the `url` is expected to be the one used
+ /// to connect to a remote, and thus should already have passed the url-rewrite engine.
+ ///
+ /// # Deviation
+ ///
+ /// - Invalid urls can't be used to obtain credential helpers as they are rejected early when creating a valid `url` here.
+ /// - Parsed urls will automatically drop the port if it's the default, i.e. `http://host:80` becomes `http://host` when parsed.
+ /// This affects the prompt provided to the user, so that git will use the verbatim url, whereas we use `http://host`.
+ /// - Upper-case scheme and host will be lower-cased automatically when parsing into a url, so prompts differ compared to git.
+ /// - A **difference in prompt might affect the matching of getting existing stored credentials**, and it's a question of this being
+ /// a feature or a bug.
+ // TODO: when dealing with `http.*.*` configuration, generalize this algorithm as needed and support precedence.
+ pub fn credential_helpers(
+ &self,
+ mut url: gix_url::Url,
+ ) -> Result<
+ (
+ gix_credentials::helper::Cascade,
+ gix_credentials::helper::Action,
+ gix_prompt::Options<'static>,
+ ),
+ Error,
+ > {
+ let mut programs = Vec::new();
+ let mut use_http_path = false;
+ let url_had_user_initially = url.user().is_some();
+ normalize(&mut url);
+
+ if let Some(credential_sections) = self
+ .repo
+ .config
+ .resolved
+ .sections_by_name_and_filter("credential", &mut self.repo.filter_config_section())
+ {
+ for section in credential_sections {
+ let section = match section.header().subsection_name() {
+ Some(pattern) => gix_url::parse(pattern).ok().and_then(|mut pattern| {
+ normalize(&mut pattern);
+ let is_http = matches!(pattern.scheme, gix_url::Scheme::Https | gix_url::Scheme::Http);
+ let scheme = &pattern.scheme;
+ let host = pattern.host();
+ let ports = is_http
+ .then(|| (pattern.port_or_default(), url.port_or_default()))
+ .unwrap_or((pattern.port, url.port));
+ let path = (!(is_http && pattern.path_is_root())).then_some(&pattern.path);
+
+ if !path.map_or(true, |path| path == &url.path) {
+ return None;
+ }
+ if pattern.user().is_some() && pattern.user() != url.user() {
+ return None;
+ }
+ (scheme == &url.scheme && host_matches(host, url.host()) && ports.0 == ports.1).then_some((
+ section,
+ &credential::UrlParameter::HELPER,
+ &credential::UrlParameter::USERNAME,
+ &credential::UrlParameter::USE_HTTP_PATH,
+ ))
+ }),
+ None => Some((
+ section,
+ &Credential::HELPER,
+ &Credential::USERNAME,
+ &Credential::USE_HTTP_PATH,
+ )),
+ };
+ if let Some((section, helper_key, username_key, use_http_path_key)) = section {
+ for value in section.values(helper_key.name) {
+ if value.trim().is_empty() {
+ programs.clear();
+ } else {
+ programs.push(gix_credentials::Program::from_custom_definition(value.into_owned()));
+ }
+ }
+ if let Some(Some(user)) = (!url_had_user_initially).then(|| {
+ section
+ .value(username_key.name)
+ .filter(|n| !n.trim().is_empty())
+ .and_then(|n| {
+ let n: Vec<_> = Cow::into_owned(n).into();
+ n.into_string().ok()
+ })
+ }) {
+ url.set_user(Some(user));
+ }
+ if let Some(toggle) = section
+ .value(use_http_path_key.name)
+ .map(|val| {
+ gix_config::Boolean::try_from(val)
+ .map_err(|err| Error::InvalidUseHttpPath {
+ source: err,
+ section: section.header().to_bstring(),
+ })
+ .map(|b| b.0)
+ })
+ .transpose()?
+ {
+ use_http_path = toggle;
+ }
+ }
+ }
+ }
+
+ let allow_git_env = self.repo.options.permissions.env.git_prefix.is_allowed();
+ let allow_ssh_env = self.repo.options.permissions.env.ssh_prefix.is_allowed();
+ let prompt_options = gix_prompt::Options {
+ askpass: self
+ .trusted_path(Core::ASKPASS.logical_name().as_str())
+ .transpose()?
+ .map(|c| Cow::Owned(c.into_owned())),
+ ..Default::default()
+ }
+ .apply_environment(allow_git_env, allow_ssh_env, allow_git_env);
+ Ok((
+ gix_credentials::helper::Cascade {
+ programs,
+ use_http_path,
+ // The default ssh implementation uses binaries that do their own auth, so our passwords aren't used.
+ query_user_only: url.scheme == gix_url::Scheme::Ssh,
+ ..Default::default()
+ },
+ gix_credentials::helper::Action::get_for_url(url.to_bstring()),
+ prompt_options,
+ ))
+ }
+}
+
+fn host_matches(pattern: Option<&str>, host: Option<&str>) -> bool {
+ match (pattern, host) {
+ (Some(pattern), Some(host)) => {
+ let lfields = pattern.split('.');
+ let rfields = host.split('.');
+ if lfields.clone().count() != rfields.clone().count() {
+ return false;
+ }
+ lfields
+ .zip(rfields)
+ .all(|(pat, value)| gix_glob::wildmatch(pat.into(), value.into(), gix_glob::wildmatch::Mode::empty()))
+ }
+ (None, None) => true,
+ (Some(_), None) | (None, Some(_)) => false,
+ }
+}
+
+fn normalize(url: &mut gix_url::Url) {
+ if !url.path_is_root() && url.path.ends_with(b"/") {
+ url.path.pop();
+ }
+}
diff --git a/vendor/gix/src/config/snapshot/mod.rs b/vendor/gix/src/config/snapshot/mod.rs
new file mode 100644
index 000000000..80ec6f948
--- /dev/null
+++ b/vendor/gix/src/config/snapshot/mod.rs
@@ -0,0 +1,5 @@
+mod _impls;
+mod access;
+
+///
+pub mod credential_helpers;
diff --git a/vendor/gix/src/config/tree/keys.rs b/vendor/gix/src/config/tree/keys.rs
new file mode 100644
index 000000000..1cdd187d0
--- /dev/null
+++ b/vendor/gix/src/config/tree/keys.rs
@@ -0,0 +1,629 @@
+#![allow(clippy::result_large_err)]
+use std::{
+ borrow::Cow,
+ error::Error,
+ fmt::{Debug, Formatter},
+};
+
+use crate::{
+ bstr::BStr,
+ config,
+ config::tree::{Key, Link, Note, Section, SubSectionRequirement},
+};
+
+/// Implements a value without any constraints, i.e. a any value.
+pub struct Any<T: Validate = validate::All> {
+ /// The key of the value in the git configuration.
+ pub name: &'static str,
+ /// The parent section of the key.
+ pub section: &'static dyn Section,
+ /// The subsection requirement to use.
+ pub subsection_requirement: Option<SubSectionRequirement>,
+ /// A link to other resources that might be eligible as value.
+ pub link: Option<Link>,
+ /// A note about this key.
+ pub note: Option<Note>,
+ /// The way validation and transformation should happen.
+ validate: T,
+}
+
+/// Init
+impl Any<validate::All> {
+ /// Create a new instance from `name` and `section`
+ pub const fn new(name: &'static str, section: &'static dyn Section) -> Self {
+ Any::new_with_validate(name, section, validate::All)
+ }
+}
+
+/// Init other validate implementations
+impl<T: Validate> Any<T> {
+ /// Create a new instance from `name` and `section`
+ pub const fn new_with_validate(name: &'static str, section: &'static dyn Section, validate: T) -> Self {
+ Any {
+ name,
+ section,
+ subsection_requirement: Some(SubSectionRequirement::Never),
+ link: None,
+ note: None,
+ validate,
+ }
+ }
+}
+
+/// Builder
+impl<T: Validate> Any<T> {
+ /// Set the subsection requirement to non-default values.
+ pub const fn with_subsection_requirement(mut self, requirement: Option<SubSectionRequirement>) -> Self {
+ self.subsection_requirement = requirement;
+ self
+ }
+
+ /// Associate an environment variable with this key.
+ ///
+ /// This is mainly useful for enriching error messages.
+ pub const fn with_environment_override(mut self, var: &'static str) -> Self {
+ self.link = Some(Link::EnvironmentOverride(var));
+ self
+ }
+
+ /// Set a link to another key which serves as fallback to provide a value if this key is not set.
+ pub const fn with_fallback(mut self, key: &'static dyn Key) -> Self {
+ self.link = Some(Link::FallbackKey(key));
+ self
+ }
+
+ /// Attach an informative message to this key.
+ pub const fn with_note(mut self, message: &'static str) -> Self {
+ self.note = Some(Note::Informative(message));
+ self
+ }
+
+ /// Inform about a deviation in how this key is interpreted.
+ pub const fn with_deviation(mut self, message: &'static str) -> Self {
+ self.note = Some(Note::Deviation(message));
+ self
+ }
+}
+
+/// Conversion
+impl<T: Validate> Any<T> {
+ /// Try to convert `value` into a refspec suitable for the `op` operation.
+ pub fn try_into_refspec(
+ &'static self,
+ value: std::borrow::Cow<'_, BStr>,
+ op: gix_refspec::parse::Operation,
+ ) -> Result<gix_refspec::RefSpec, config::refspec::Error> {
+ gix_refspec::parse(value.as_ref(), op)
+ .map(|spec| spec.to_owned())
+ .map_err(|err| config::refspec::Error::from_value(self, value.into_owned()).with_source(err))
+ }
+
+ /// Try to interpret `value` as UTF-8 encoded string.
+ pub fn try_into_string(&'static self, value: Cow<'_, BStr>) -> Result<std::string::String, config::string::Error> {
+ use crate::bstr::ByteVec;
+ Vec::from(value.into_owned()).into_string().map_err(|err| {
+ let utf8_err = err.utf8_error().clone();
+ config::string::Error::from_value(self, err.into_vec().into()).with_source(utf8_err)
+ })
+ }
+}
+
+impl<T: Validate> Debug for Any<T> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ self.logical_name().fmt(f)
+ }
+}
+
+impl<T: Validate> std::fmt::Display for Any<T> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.write_str(&self.logical_name())
+ }
+}
+
+impl<T: Validate> Key for Any<T> {
+ fn name(&self) -> &str {
+ self.name
+ }
+
+ fn validate(&self, value: &BStr) -> Result<(), config::tree::key::validate::Error> {
+ Ok(self.validate.validate(value)?)
+ }
+
+ fn section(&self) -> &dyn Section {
+ self.section
+ }
+
+ fn subsection_requirement(&self) -> Option<&SubSectionRequirement> {
+ self.subsection_requirement.as_ref()
+ }
+
+ fn link(&self) -> Option<&Link> {
+ self.link.as_ref()
+ }
+
+ fn note(&self) -> Option<&Note> {
+ self.note.as_ref()
+ }
+}
+
+/// A key which represents a date.
+pub type Time = Any<validate::Time>;
+
+/// The `core.(filesRefLockTimeout|packedRefsTimeout)` keys, or any other lock timeout for that matter.
+pub type LockTimeout = Any<validate::LockTimeout>;
+
+/// Keys specifying durations in milliseconds.
+pub type DurationInMilliseconds = Any<validate::DurationInMilliseconds>;
+
+/// A key which represents any unsigned integer.
+pub type UnsignedInteger = Any<validate::UnsignedInteger>;
+
+/// A key that represents a remote name, either as url or symbolic name.
+pub type RemoteName = Any<validate::RemoteName>;
+
+/// A key that represents a boolean value.
+pub type Boolean = Any<validate::Boolean>;
+
+/// A key that represents an executable program, shell script or shell commands.
+pub type Program = Any<validate::Program>;
+
+/// A key that represents an executable program as identified by name or path.
+pub type Executable = Any<validate::Executable>;
+
+/// A key that represents a path (to a resource).
+pub type Path = Any<validate::Path>;
+
+/// A key that represents a URL.
+pub type Url = Any<validate::Url>;
+
+/// A key that represents a UTF-8 string.
+pub type String = Any<validate::String>;
+
+/// A key that represents a RefSpec for pushing.
+pub type PushRefSpec = Any<validate::PushRefSpec>;
+
+/// A key that represents a RefSpec for fetching.
+pub type FetchRefSpec = Any<validate::FetchRefSpec>;
+
+mod duration {
+ use std::time::Duration;
+
+ use crate::{
+ config,
+ config::tree::{keys::DurationInMilliseconds, Section},
+ };
+
+ impl DurationInMilliseconds {
+ /// Create a new instance.
+ pub const fn new_duration(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, super::validate::DurationInMilliseconds)
+ }
+
+ /// Return a valid duration as parsed from an integer that is interpreted as milliseconds.
+ pub fn try_into_duration(
+ &'static self,
+ value: Result<i64, gix_config::value::Error>,
+ ) -> Result<std::time::Duration, config::duration::Error> {
+ let value = value.map_err(|err| config::duration::Error::from(self).with_source(err))?;
+ Ok(match value {
+ val if val < 0 => Duration::from_secs(u64::MAX),
+ val => Duration::from_millis(val.try_into().expect("i64 to u64 always works if positive")),
+ })
+ }
+ }
+}
+
+mod lock_timeout {
+ use std::time::Duration;
+
+ use gix_lock::acquire::Fail;
+
+ use crate::{
+ config,
+ config::tree::{keys::LockTimeout, Section},
+ };
+
+ impl LockTimeout {
+ /// Create a new instance.
+ pub const fn new_lock_timeout(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, super::validate::LockTimeout)
+ }
+
+ /// Return information on how long to wait for locked files.
+ pub fn try_into_lock_timeout(
+ &'static self,
+ value: Result<i64, gix_config::value::Error>,
+ ) -> Result<gix_lock::acquire::Fail, config::lock_timeout::Error> {
+ let value = value.map_err(|err| config::lock_timeout::Error::from(self).with_source(err))?;
+ Ok(match value {
+ val if val < 0 => Fail::AfterDurationWithBackoff(Duration::from_secs(u64::MAX)),
+ val if val == 0 => Fail::Immediately,
+ val => Fail::AfterDurationWithBackoff(Duration::from_millis(
+ val.try_into().expect("i64 to u64 always works if positive"),
+ )),
+ })
+ }
+ }
+}
+
+mod refspecs {
+ use crate::config::tree::{
+ keys::{validate, FetchRefSpec, PushRefSpec},
+ Section,
+ };
+
+ impl PushRefSpec {
+ /// Create a new instance.
+ pub const fn new_push_refspec(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::PushRefSpec)
+ }
+ }
+
+ impl FetchRefSpec {
+ /// Create a new instance.
+ pub const fn new_fetch_refspec(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::FetchRefSpec)
+ }
+ }
+}
+
+mod url {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::BStr,
+ config,
+ config::tree::{
+ keys::{validate, Url},
+ Section,
+ },
+ };
+
+ impl Url {
+ /// Create a new instance.
+ pub const fn new_url(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::Url)
+ }
+
+ /// Try to parse `value` as URL.
+ pub fn try_into_url(&'static self, value: Cow<'_, BStr>) -> Result<gix_url::Url, config::url::Error> {
+ gix_url::parse(value.as_ref())
+ .map_err(|err| config::url::Error::from_value(self, value.into_owned()).with_source(err))
+ }
+ }
+}
+
+impl String {
+ /// Create a new instance.
+ pub const fn new_string(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::String)
+ }
+}
+
+impl Program {
+ /// Create a new instance.
+ pub const fn new_program(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::Program)
+ }
+}
+
+impl Executable {
+ /// Create a new instance.
+ pub const fn new_executable(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::Executable)
+ }
+}
+
+impl Path {
+ /// Create a new instance.
+ pub const fn new_path(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::Path)
+ }
+}
+
+mod workers {
+ use crate::config::tree::{keys::UnsignedInteger, Section};
+
+ impl UnsignedInteger {
+ /// Create a new instance.
+ pub const fn new_unsigned_integer(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, super::validate::UnsignedInteger)
+ }
+
+ /// Convert `value` into a `usize` or wrap it into a specialized error.
+ pub fn try_into_usize(
+ &'static self,
+ value: Result<i64, gix_config::value::Error>,
+ ) -> Result<usize, crate::config::unsigned_integer::Error> {
+ value
+ .map_err(|err| crate::config::unsigned_integer::Error::from(self).with_source(err))
+ .and_then(|value| {
+ value
+ .try_into()
+ .map_err(|_| crate::config::unsigned_integer::Error::from(self))
+ })
+ }
+
+ /// Convert `value` into a `u64` or wrap it into a specialized error.
+ pub fn try_into_u64(
+ &'static self,
+ value: Result<i64, gix_config::value::Error>,
+ ) -> Result<u64, crate::config::unsigned_integer::Error> {
+ value
+ .map_err(|err| crate::config::unsigned_integer::Error::from(self).with_source(err))
+ .and_then(|value| {
+ value
+ .try_into()
+ .map_err(|_| crate::config::unsigned_integer::Error::from(self))
+ })
+ }
+
+ /// Convert `value` into a `u32` or wrap it into a specialized error.
+ pub fn try_into_u32(
+ &'static self,
+ value: Result<i64, gix_config::value::Error>,
+ ) -> Result<u32, crate::config::unsigned_integer::Error> {
+ value
+ .map_err(|err| crate::config::unsigned_integer::Error::from(self).with_source(err))
+ .and_then(|value| {
+ value
+ .try_into()
+ .map_err(|_| crate::config::unsigned_integer::Error::from(self))
+ })
+ }
+ }
+}
+
+mod time {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config::tree::{
+ keys::{validate, Time},
+ Section,
+ },
+ };
+
+ impl Time {
+ /// Create a new instance.
+ pub const fn new_time(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::Time)
+ }
+
+ /// Convert the `value` into a date if possible, with `now` as reference time for relative dates.
+ pub fn try_into_time(
+ &self,
+ value: Cow<'_, BStr>,
+ now: Option<std::time::SystemTime>,
+ ) -> Result<gix_date::Time, gix_date::parse::Error> {
+ gix_date::parse(
+ value
+ .as_ref()
+ .to_str()
+ .map_err(|_| gix_date::parse::Error::InvalidDateString {
+ input: value.to_string(),
+ })?,
+ now,
+ )
+ }
+ }
+}
+
+mod boolean {
+ use crate::{
+ config,
+ config::tree::{
+ keys::{validate, Boolean},
+ Section,
+ },
+ };
+
+ impl Boolean {
+ /// Create a new instance.
+ pub const fn new_boolean(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, validate::Boolean)
+ }
+
+ /// Process the `value` into a result with an improved error message.
+ ///
+ /// `value` is expected to be provided by [`gix_config::File::boolean()`].
+ pub fn enrich_error(
+ &'static self,
+ value: Result<bool, gix_config::value::Error>,
+ ) -> Result<bool, config::boolean::Error> {
+ value.map_err(|err| config::boolean::Error::from(self).with_source(err))
+ }
+ }
+}
+
+mod remote_name {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::{BStr, BString},
+ config,
+ config::tree::{keys::RemoteName, Section},
+ };
+
+ impl RemoteName {
+ /// Create a new instance.
+ pub const fn new_remote_name(name: &'static str, section: &'static dyn Section) -> Self {
+ Self::new_with_validate(name, section, super::validate::RemoteName)
+ }
+
+ /// Try to validate `name` as symbolic remote name and return it.
+ #[allow(clippy::result_large_err)]
+ pub fn try_into_symbolic_name(
+ &'static self,
+ name: Cow<'_, BStr>,
+ ) -> Result<BString, config::remote::symbolic_name::Error> {
+ crate::remote::name::validated(name.into_owned())
+ .map_err(|err| config::remote::symbolic_name::Error::from(self).with_source(err))
+ }
+ }
+}
+
+/// Provide a way to validate a value, or decode a value from `gix-config`.
+pub trait Validate {
+ /// Validate `value` or return an error.
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>>;
+}
+
+/// various implementations of the `Validate` trait.
+pub mod validate {
+ use std::{borrow::Cow, error::Error};
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config::tree::keys::Validate,
+ remote,
+ };
+
+ /// Everything is valid.
+ #[derive(Default)]
+ pub struct All;
+
+ impl Validate for All {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ Ok(())
+ }
+ }
+
+ /// Assure that values that parse as git dates are valid.
+ #[derive(Default)]
+ pub struct Time;
+
+ impl Validate for Time {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ gix_date::parse(value.to_str()?, std::time::SystemTime::now().into())?;
+ Ok(())
+ }
+ }
+
+ /// Assure that values that parse as unsigned integers are valid.
+ #[derive(Default)]
+ pub struct UnsignedInteger;
+
+ impl Validate for UnsignedInteger {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ usize::try_from(
+ gix_config::Integer::try_from(value)?
+ .to_decimal()
+ .ok_or_else(|| format!("integer {value} cannot be represented as `usize`"))?,
+ )?;
+ Ok(())
+ }
+ }
+
+ /// Assure that values that parse as git booleans are valid.
+ #[derive(Default)]
+ pub struct Boolean;
+
+ impl Validate for Boolean {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ gix_config::Boolean::try_from(value)?;
+ Ok(())
+ }
+ }
+
+ /// Values that are git remotes, symbolic or urls
+ #[derive(Default)]
+ pub struct RemoteName;
+ impl Validate for RemoteName {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ remote::Name::try_from(Cow::Borrowed(value))
+ .map_err(|_| format!("Illformed UTF-8 in remote name: \"{}\"", value.to_str_lossy()))?;
+ Ok(())
+ }
+ }
+
+ /// Values that are programs - everything is allowed.
+ #[derive(Default)]
+ pub struct Program;
+ impl Validate for Program {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ Ok(())
+ }
+ }
+
+ /// Values that are programs executables, everything is allowed.
+ #[derive(Default)]
+ pub struct Executable;
+ impl Validate for Executable {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ Ok(())
+ }
+ }
+
+ /// Values that parse as URLs.
+ #[derive(Default)]
+ pub struct Url;
+ impl Validate for Url {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ gix_url::parse(value)?;
+ Ok(())
+ }
+ }
+
+ /// Values that parse as ref-specs for pushing.
+ #[derive(Default)]
+ pub struct PushRefSpec;
+ impl Validate for PushRefSpec {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ gix_refspec::parse(value, gix_refspec::parse::Operation::Push)?;
+ Ok(())
+ }
+ }
+
+ /// Values that parse as ref-specs for pushing.
+ #[derive(Default)]
+ pub struct FetchRefSpec;
+ impl Validate for FetchRefSpec {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ gix_refspec::parse(value, gix_refspec::parse::Operation::Fetch)?;
+ Ok(())
+ }
+ }
+
+ /// Timeouts used for file locks.
+ pub struct LockTimeout;
+ impl Validate for LockTimeout {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ let value = gix_config::Integer::try_from(value)?
+ .to_decimal()
+ .ok_or_else(|| format!("integer {value} cannot be represented as integer"));
+ super::super::Core::FILES_REF_LOCK_TIMEOUT.try_into_lock_timeout(Ok(value?))?;
+ Ok(())
+ }
+ }
+
+ /// Durations in milliseconds.
+ pub struct DurationInMilliseconds;
+ impl Validate for DurationInMilliseconds {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ let value = gix_config::Integer::try_from(value)?
+ .to_decimal()
+ .ok_or_else(|| format!("integer {value} cannot be represented as integer"));
+ super::super::gitoxide::Http::CONNECT_TIMEOUT.try_into_duration(Ok(value?))?;
+ Ok(())
+ }
+ }
+
+ /// A UTF-8 string.
+ pub struct String;
+ impl Validate for String {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ value.to_str()?;
+ Ok(())
+ }
+ }
+
+ /// Any path - everything is allowed.
+ pub struct Path;
+ impl Validate for Path {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/mod.rs b/vendor/gix/src/config/tree/mod.rs
new file mode 100644
index 000000000..fd769f3ed
--- /dev/null
+++ b/vendor/gix/src/config/tree/mod.rs
@@ -0,0 +1,123 @@
+//! The tree of supported configuration values for use in [`config_overrides`][crate::open::Options::config_overrides()]
+//! or for validating and transforming well-known configuration values.
+//!
+//! It can also be used to traverse all implemented keys and to validate values before usage as configuration overrides.
+//!
+//! ### Leniency
+//!
+//! When validating values, we don't apply leniency here which is left to the caller. Leniency is an application defined configuration
+//! to ignore errors on non-security related values, which might make applications more resilient towards misconfiguration.
+pub(crate) mod root {
+ use super::sections;
+ use crate::config::tree::Section;
+
+ /// The root of the configuration tree, suitable to discover all sub-sections at runtime or compile time.
+ #[derive(Copy, Clone, Default)]
+ pub struct Tree;
+
+ impl Tree {
+ /// The `author` section.
+ pub const AUTHOR: sections::Author = sections::Author;
+ /// The `branch` section.
+ pub const BRANCH: sections::Branch = sections::Branch;
+ /// The `checkout` section.
+ pub const CHECKOUT: sections::Checkout = sections::Checkout;
+ /// The `clone` section.
+ pub const CLONE: sections::Clone = sections::Clone;
+ /// The `committer` section.
+ pub const COMMITTER: sections::Committer = sections::Committer;
+ /// The `core` section.
+ pub const CORE: sections::Core = sections::Core;
+ /// The `credential` section.
+ pub const CREDENTIAL: sections::Credential = sections::Credential;
+ /// The `diff` section.
+ pub const DIFF: sections::Diff = sections::Diff;
+ /// The `extensions` section.
+ pub const EXTENSIONS: sections::Extensions = sections::Extensions;
+ /// The `gitoxide` section.
+ pub const GITOXIDE: sections::Gitoxide = sections::Gitoxide;
+ /// The `http` section.
+ pub const HTTP: sections::Http = sections::Http;
+ /// The `init` section.
+ pub const INIT: sections::Init = sections::Init;
+ /// The `pack` section.
+ pub const PACK: sections::Pack = sections::Pack;
+ /// The `protocol` section.
+ pub const PROTOCOL: sections::Protocol = sections::Protocol;
+ /// The `remote` section.
+ pub const REMOTE: sections::Remote = sections::Remote;
+ /// The `safe` section.
+ pub const SAFE: sections::Safe = sections::Safe;
+ /// The `ssh` section.
+ pub const SSH: sections::Ssh = sections::Ssh;
+ /// The `user` section.
+ pub const USER: sections::User = sections::User;
+ /// The `url` section.
+ pub const URL: sections::Url = sections::Url;
+
+ /// List all available sections.
+ pub fn sections(&self) -> &[&dyn Section] {
+ &[
+ &Self::AUTHOR,
+ &Self::BRANCH,
+ &Self::CHECKOUT,
+ &Self::CLONE,
+ &Self::COMMITTER,
+ &Self::CORE,
+ &Self::CREDENTIAL,
+ &Self::DIFF,
+ &Self::EXTENSIONS,
+ &Self::GITOXIDE,
+ &Self::HTTP,
+ &Self::INIT,
+ &Self::PACK,
+ &Self::PROTOCOL,
+ &Self::REMOTE,
+ &Self::SAFE,
+ &Self::SSH,
+ &Self::USER,
+ &Self::URL,
+ ]
+ }
+ }
+}
+
+mod sections;
+pub use sections::{
+ branch, checkout, core, credential, diff, extensions, gitoxide, http, protocol, remote, ssh, Author, Branch,
+ Checkout, Clone, Committer, Core, Credential, Diff, Extensions, Gitoxide, Http, Init, Pack, Protocol, Remote, Safe,
+ Ssh, Url, User,
+};
+
+/// Generic value implementations for static instantiation.
+pub mod keys;
+
+///
+pub mod key {
+ ///
+ pub mod validate {
+ /// The error returned by [Key::validate()][crate::config::tree::Key::validate()].
+ #[derive(Debug, thiserror::Error)]
+ #[error(transparent)]
+ #[allow(missing_docs)]
+ pub struct Error {
+ #[from]
+ source: Box<dyn std::error::Error + Send + Sync + 'static>,
+ }
+ }
+ ///
+ pub mod validate_assignment {
+ /// The error returned by [Key::validated_assignment*()][crate::config::tree::Key::validated_assignment_fmt()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Failed to validate the value to be assigned to this key")]
+ Validate(#[from] super::validate::Error),
+ #[error("{message}")]
+ Name { message: String },
+ }
+ }
+}
+
+mod traits;
+pub use traits::{Key, Link, Note, Section, SubSectionRequirement};
diff --git a/vendor/gix/src/config/tree/sections/author.rs b/vendor/gix/src/config/tree/sections/author.rs
new file mode 100644
index 000000000..4101e3817
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/author.rs
@@ -0,0 +1,23 @@
+use crate::{
+ config,
+ config::tree::{gitoxide, keys, Author, Key, Section},
+};
+
+impl Author {
+ /// The `author.name` key.
+ pub const NAME: keys::Any =
+ keys::Any::new("name", &config::Tree::AUTHOR).with_fallback(&gitoxide::Author::NAME_FALLBACK);
+ /// The `author.email` key.
+ pub const EMAIL: keys::Any =
+ keys::Any::new("email", &config::Tree::AUTHOR).with_fallback(&gitoxide::Author::EMAIL_FALLBACK);
+}
+
+impl Section for Author {
+ fn name(&self) -> &str {
+ "author"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::NAME, &Self::EMAIL]
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/branch.rs b/vendor/gix/src/config/tree/sections/branch.rs
new file mode 100644
index 000000000..8e1e0a4b8
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/branch.rs
@@ -0,0 +1,65 @@
+use crate::config::tree::{keys, traits::SubSectionRequirement, Branch, Key, Section};
+
+const NAME_PARAMETER: Option<SubSectionRequirement> = Some(SubSectionRequirement::Parameter("name"));
+
+impl Branch {
+ /// The `branch.<name>.merge` key.
+ pub const MERGE: Merge = Merge::new_with_validate("merge", &crate::config::Tree::BRANCH, validate::FullNameRef)
+ .with_subsection_requirement(NAME_PARAMETER);
+ /// The `branch.<name>.pushRemote` key.
+ pub const PUSH_REMOTE: keys::RemoteName =
+ keys::RemoteName::new_remote_name("pushRemote", &crate::config::Tree::BRANCH)
+ .with_subsection_requirement(NAME_PARAMETER);
+ /// The `branch.<name>.remote` key.
+ pub const REMOTE: keys::RemoteName = keys::RemoteName::new_remote_name("remote", &crate::config::Tree::BRANCH)
+ .with_subsection_requirement(NAME_PARAMETER);
+}
+
+impl Section for Branch {
+ fn name(&self) -> &str {
+ "branch"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::MERGE, &Self::PUSH_REMOTE, &Self::REMOTE]
+ }
+}
+
+/// The `branch.<name>.merge` key.
+pub type Merge = keys::Any<validate::FullNameRef>;
+
+mod merge {
+ use std::borrow::Cow;
+
+ use gix_ref::FullNameRef;
+
+ use crate::{bstr::BStr, config::tree::branch::Merge};
+
+ impl Merge {
+ /// Return the validated full ref name from `value` if it is valid.
+ pub fn try_into_fullrefname(
+ value: Cow<'_, BStr>,
+ ) -> Result<Cow<'_, FullNameRef>, gix_validate::reference::name::Error> {
+ match value {
+ Cow::Borrowed(v) => v.try_into().map(Cow::Borrowed),
+ Cow::Owned(v) => v.try_into().map(Cow::Owned),
+ }
+ }
+ }
+}
+
+///
+pub mod validate {
+ use crate::{
+ bstr::BStr,
+ config::tree::{branch::Merge, keys},
+ };
+
+ pub struct FullNameRef;
+ impl keys::Validate for FullNameRef {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ Merge::try_into_fullrefname(value.into())?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/checkout.rs b/vendor/gix/src/config/tree/sections/checkout.rs
new file mode 100644
index 000000000..27f31ee84
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/checkout.rs
@@ -0,0 +1,58 @@
+use crate::{
+ config,
+ config::tree::{keys, Checkout, Key, Section},
+};
+
+impl Checkout {
+ /// The `checkout.workers` key.
+ pub const WORKERS: Workers = Workers::new_with_validate("workers", &config::Tree::CHECKOUT, validate::Workers)
+ .with_deviation("if unset, uses all cores instead of just one");
+}
+
+/// The `checkout.workers` key.
+pub type Workers = keys::Any<validate::Workers>;
+
+impl Section for Checkout {
+ fn name(&self) -> &str {
+ "checkout"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::WORKERS]
+ }
+}
+
+mod workers {
+ use crate::config::tree::checkout::Workers;
+
+ impl Workers {
+ /// Return the amount of threads to use for checkout, with `0` meaning all available ones, after decoding our integer value from `config`,
+ /// or `None` if the value isn't set which is typically interpreted as "as many threads as available"
+ pub fn try_from_workers(
+ &'static self,
+ value: Result<i64, gix_config::value::Error>,
+ ) -> Result<usize, crate::config::checkout::workers::Error> {
+ match value {
+ Ok(v) if v < 0 => Ok(0),
+ Ok(v) => Ok(v.try_into().expect("positive i64 can always be usize on 64 bit")),
+ Err(err) => Err(crate::config::key::Error::from(&super::Checkout::WORKERS).with_source(err)),
+ }
+ }
+ }
+}
+
+///
+pub mod validate {
+ use crate::{bstr::BStr, config::tree::keys};
+
+ pub struct Workers;
+ impl keys::Validate for Workers {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ super::Checkout::WORKERS.try_from_workers(gix_config::Integer::try_from(value).and_then(|i| {
+ i.to_decimal()
+ .ok_or_else(|| gix_config::value::Error::new("Integer overflow", value.to_owned()))
+ }))?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/clone.rs b/vendor/gix/src/config/tree/sections/clone.rs
new file mode 100644
index 000000000..616185a0b
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/clone.rs
@@ -0,0 +1,20 @@
+use crate::{
+ config,
+ config::tree::{keys, Clone, Key, Section},
+};
+
+impl Clone {
+ /// The `clone.defaultRemoteName` key.
+ pub const DEFAULT_REMOTE_NAME: keys::RemoteName =
+ keys::RemoteName::new_remote_name("defaultRemoteName", &config::Tree::CLONE);
+}
+
+impl Section for Clone {
+ fn name(&self) -> &str {
+ "clone"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::DEFAULT_REMOTE_NAME]
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/committer.rs b/vendor/gix/src/config/tree/sections/committer.rs
new file mode 100644
index 000000000..acc25c930
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/committer.rs
@@ -0,0 +1,23 @@
+use crate::{
+ config,
+ config::tree::{gitoxide, keys, Committer, Key, Section},
+};
+
+impl Committer {
+ /// The `committer.name` key.
+ pub const NAME: keys::Any =
+ keys::Any::new("name", &config::Tree::COMMITTER).with_fallback(&gitoxide::Committer::NAME_FALLBACK);
+ /// The `committer.email` key.
+ pub const EMAIL: keys::Any =
+ keys::Any::new("email", &config::Tree::COMMITTER).with_fallback(&gitoxide::Committer::EMAIL_FALLBACK);
+}
+
+impl Section for Committer {
+ fn name(&self) -> &str {
+ "committer"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::NAME, &Self::EMAIL]
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/core.rs b/vendor/gix/src/config/tree/sections/core.rs
new file mode 100644
index 000000000..6ea0580e1
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/core.rs
@@ -0,0 +1,302 @@
+use crate::{
+ config,
+ config::tree::{keys, Core, Key, Section},
+};
+
+impl Core {
+ /// The `core.abbrev` key.
+ pub const ABBREV: Abbrev = Abbrev::new_with_validate("abbrev", &config::Tree::CORE, validate::Abbrev);
+ /// The `core.bare` key.
+ pub const BARE: keys::Boolean = keys::Boolean::new_boolean("bare", &config::Tree::CORE);
+ /// The `core.checkStat` key.
+ pub const CHECK_STAT: CheckStat =
+ CheckStat::new_with_validate("checkStat", &config::Tree::CORE, validate::CheckStat);
+ /// The `core.deltaBaseCacheLimit` key.
+ pub const DELTA_BASE_CACHE_LIMIT: keys::UnsignedInteger =
+ keys::UnsignedInteger::new_unsigned_integer("deltaBaseCacheLimit", &config::Tree::CORE)
+ .with_environment_override("GITOXIDE_PACK_CACHE_MEMORY")
+ .with_note("if unset, we default to a small 64 slot fixed-size cache that holds at most 64 full delta base objects of any size. Set to 0 to deactivate it entirely");
+ /// The `core.disambiguate` key.
+ pub const DISAMBIGUATE: Disambiguate =
+ Disambiguate::new_with_validate("disambiguate", &config::Tree::CORE, validate::Disambiguate);
+ /// The `core.fileMode` key.
+ pub const FILE_MODE: keys::Boolean = keys::Boolean::new_boolean("fileMode", &config::Tree::CORE);
+ /// The `core.ignoreCase` key.
+ pub const IGNORE_CASE: keys::Boolean = keys::Boolean::new_boolean("ignoreCase", &config::Tree::CORE);
+ /// The `core.filesRefLockTimeout` key.
+ pub const FILES_REF_LOCK_TIMEOUT: keys::LockTimeout =
+ keys::LockTimeout::new_lock_timeout("filesRefLockTimeout", &config::Tree::CORE);
+ /// The `core.packedRefsTimeout` key.
+ pub const PACKED_REFS_TIMEOUT: keys::LockTimeout =
+ keys::LockTimeout::new_lock_timeout("packedRefsTimeout", &config::Tree::CORE);
+ /// The `core.multiPackIndex` key.
+ pub const MULTIPACK_INDEX: keys::Boolean = keys::Boolean::new_boolean("multiPackIndex", &config::Tree::CORE);
+ /// The `core.logAllRefUpdates` key.
+ pub const LOG_ALL_REF_UPDATES: LogAllRefUpdates =
+ LogAllRefUpdates::new_with_validate("logAllRefUpdates", &config::Tree::CORE, validate::LogAllRefUpdates);
+ /// The `core.precomposeUnicode` key.
+ ///
+ /// Needs application to use [env::args_os][crate::env::args_os()] to conform all input paths before they are used.
+ pub const PRECOMPOSE_UNICODE: keys::Boolean = keys::Boolean::new_boolean("precomposeUnicode", &config::Tree::CORE)
+ .with_note("application needs to conform all program input by using gix::env::args_os()");
+ /// The `core.repositoryFormatVersion` key.
+ pub const REPOSITORY_FORMAT_VERSION: keys::UnsignedInteger =
+ keys::UnsignedInteger::new_unsigned_integer("repositoryFormatVersion", &config::Tree::CORE);
+ /// The `core.symlinks` key.
+ pub const SYMLINKS: keys::Boolean = keys::Boolean::new_boolean("symlinks", &config::Tree::CORE);
+ /// The `core.trustCTime` key.
+ pub const TRUST_C_TIME: keys::Boolean = keys::Boolean::new_boolean("trustCTime", &config::Tree::CORE);
+ /// The `core.worktree` key.
+ pub const WORKTREE: keys::Any = keys::Any::new("worktree", &config::Tree::CORE)
+ .with_environment_override("GIT_WORK_TREE")
+ .with_deviation("Overriding the worktree with environment variables is supported using `ThreadSafeRepository::open_with_environment_overrides()");
+ /// The `core.askPass` key.
+ pub const ASKPASS: keys::Executable = keys::Executable::new_executable("askPass", &config::Tree::CORE)
+ .with_environment_override("GIT_ASKPASS")
+ .with_note("fallback is 'SSH_ASKPASS'");
+ /// The `core.excludesFile` key.
+ pub const EXCLUDES_FILE: keys::Executable = keys::Executable::new_executable("excludesFile", &config::Tree::CORE);
+ /// The `core.attributesFile` key.
+ pub const ATTRIBUTES_FILE: keys::Executable =
+ keys::Executable::new_executable("attributesFile", &config::Tree::CORE)
+ .with_deviation("for checkout - it's already queried but needs building of attributes group, and of course support during checkout");
+ /// The `core.sshCommand` key.
+ pub const SSH_COMMAND: keys::Executable = keys::Executable::new_executable("sshCommand", &config::Tree::CORE)
+ .with_environment_override("GIT_SSH_COMMAND");
+}
+
+impl Section for Core {
+ fn name(&self) -> &str {
+ "core"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[
+ &Self::ABBREV,
+ &Self::BARE,
+ &Self::CHECK_STAT,
+ &Self::DELTA_BASE_CACHE_LIMIT,
+ &Self::DISAMBIGUATE,
+ &Self::FILE_MODE,
+ &Self::IGNORE_CASE,
+ &Self::FILES_REF_LOCK_TIMEOUT,
+ &Self::PACKED_REFS_TIMEOUT,
+ &Self::MULTIPACK_INDEX,
+ &Self::LOG_ALL_REF_UPDATES,
+ &Self::PRECOMPOSE_UNICODE,
+ &Self::REPOSITORY_FORMAT_VERSION,
+ &Self::SYMLINKS,
+ &Self::TRUST_C_TIME,
+ &Self::WORKTREE,
+ &Self::ASKPASS,
+ &Self::EXCLUDES_FILE,
+ &Self::ATTRIBUTES_FILE,
+ &Self::SSH_COMMAND,
+ ]
+ }
+}
+
+/// The `core.checkStat` key.
+pub type CheckStat = keys::Any<validate::CheckStat>;
+
+/// The `core.abbrev` key.
+pub type Abbrev = keys::Any<validate::Abbrev>;
+
+/// The `core.logAllRefUpdates` key.
+pub type LogAllRefUpdates = keys::Any<validate::LogAllRefUpdates>;
+
+/// The `core.disambiguate` key.
+pub type Disambiguate = keys::Any<validate::Disambiguate>;
+
+mod disambiguate {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config,
+ config::tree::core::Disambiguate,
+ revision::spec::parse::ObjectKindHint,
+ };
+
+ impl Disambiguate {
+ /// Convert a disambiguation marker into the respective enum.
+ pub fn try_into_object_kind_hint(
+ &'static self,
+ value: Cow<'_, BStr>,
+ ) -> Result<Option<ObjectKindHint>, config::key::GenericErrorWithValue> {
+ let hint = match value.as_ref().as_bytes() {
+ b"none" => return Ok(None),
+ b"commit" => ObjectKindHint::Commit,
+ b"committish" => ObjectKindHint::Committish,
+ b"tree" => ObjectKindHint::Tree,
+ b"treeish" => ObjectKindHint::Treeish,
+ b"blob" => ObjectKindHint::Blob,
+ _ => return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())),
+ };
+ Ok(Some(hint))
+ }
+ }
+}
+
+mod log_all_ref_updates {
+ use std::borrow::Cow;
+
+ use crate::{bstr::BStr, config, config::tree::core::LogAllRefUpdates};
+
+ impl LogAllRefUpdates {
+ /// Returns the mode for ref-updates as parsed from `value`. If `value` is not a boolean, `string_on_failure` will be called
+ /// to obtain the key `core.logAllRefUpdates` as string instead. For correctness, this two step process is necessary as
+ /// the interpretation of booleans in special in `gix-config`, i.e. we can't just treat it as string.
+ pub fn try_into_ref_updates<'a>(
+ &'static self,
+ value: Option<Result<bool, gix_config::value::Error>>,
+ string_on_failure: impl FnOnce() -> Option<Cow<'a, BStr>>,
+ ) -> Result<Option<gix_ref::store::WriteReflog>, config::key::GenericErrorWithValue> {
+ match value.transpose().ok().flatten() {
+ Some(bool) => Ok(Some(if bool {
+ gix_ref::store::WriteReflog::Normal
+ } else {
+ gix_ref::store::WriteReflog::Disable
+ })),
+ None => match string_on_failure() {
+ Some(val) if val.eq_ignore_ascii_case(b"always") => Ok(Some(gix_ref::store::WriteReflog::Always)),
+ Some(val) => Err(config::key::GenericErrorWithValue::from_value(self, val.into_owned())),
+ None => Ok(None),
+ },
+ }
+ }
+ }
+}
+
+mod check_stat {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config,
+ config::tree::core::CheckStat,
+ };
+
+ impl CheckStat {
+ /// Returns true if the full set of stat entries should be checked, and it's just as lenient as git.
+ pub fn try_into_checkstat(
+ &'static self,
+ value: Cow<'_, BStr>,
+ ) -> Result<bool, config::key::GenericErrorWithValue> {
+ Ok(match value.as_ref().as_bytes() {
+ b"minimal" => false,
+ b"default" => true,
+ _ => {
+ return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned()));
+ }
+ })
+ }
+ }
+}
+
+mod abbrev {
+ use std::borrow::Cow;
+
+ use config::abbrev::Error;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config,
+ config::tree::core::Abbrev,
+ };
+
+ impl Abbrev {
+ /// Convert the given `hex_len_str` into the amount of characters that a short hash should have.
+ /// If `None` is returned, the correct value can be determined based on the amount of objects in the repo.
+ pub fn try_into_abbreviation(
+ &'static self,
+ hex_len_str: Cow<'_, BStr>,
+ object_hash: gix_hash::Kind,
+ ) -> Result<Option<usize>, Error> {
+ let max = object_hash.len_in_hex() as u8;
+ if hex_len_str.trim().is_empty() {
+ return Err(Error {
+ value: hex_len_str.into_owned(),
+ max,
+ });
+ }
+ if hex_len_str.trim().eq_ignore_ascii_case(b"auto") {
+ Ok(None)
+ } else {
+ let value_bytes = hex_len_str.as_ref();
+ if let Ok(false) = gix_config::Boolean::try_from(value_bytes).map(Into::into) {
+ Ok(object_hash.len_in_hex().into())
+ } else {
+ let value = gix_config::Integer::try_from(value_bytes)
+ .map_err(|_| Error {
+ value: hex_len_str.clone().into_owned(),
+ max,
+ })?
+ .to_decimal()
+ .ok_or_else(|| Error {
+ value: hex_len_str.clone().into_owned(),
+ max,
+ })?;
+ if value < 4 || value as usize > object_hash.len_in_hex() {
+ return Err(Error {
+ value: hex_len_str.clone().into_owned(),
+ max,
+ });
+ }
+ Ok(Some(value as usize))
+ }
+ }
+ }
+ }
+}
+
+mod validate {
+ use crate::{bstr::BStr, config::tree::keys};
+
+ pub struct LockTimeout;
+ impl keys::Validate for LockTimeout {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ let value = gix_config::Integer::try_from(value)?
+ .to_decimal()
+ .ok_or_else(|| format!("integer {value} cannot be represented as integer"));
+ super::Core::FILES_REF_LOCK_TIMEOUT.try_into_lock_timeout(Ok(value?))?;
+ Ok(())
+ }
+ }
+
+ pub struct Disambiguate;
+ impl keys::Validate for Disambiguate {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ super::Core::DISAMBIGUATE.try_into_object_kind_hint(value.into())?;
+ Ok(())
+ }
+ }
+
+ pub struct LogAllRefUpdates;
+ impl keys::Validate for LogAllRefUpdates {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ super::Core::LOG_ALL_REF_UPDATES
+ .try_into_ref_updates(Some(gix_config::Boolean::try_from(value).map(|b| b.0)), || {
+ Some(value.into())
+ })?;
+ Ok(())
+ }
+ }
+
+ pub struct CheckStat;
+ impl keys::Validate for CheckStat {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ super::Core::CHECK_STAT.try_into_checkstat(value.into())?;
+ Ok(())
+ }
+ }
+
+ pub struct Abbrev;
+ impl keys::Validate for Abbrev {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ // TODO: when there is options, validate against all hashes and assure all fail to trigger a validation failure.
+ super::Core::ABBREV.try_into_abbreviation(value.into(), gix_hash::Kind::Sha1)?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/credential.rs b/vendor/gix/src/config/tree/sections/credential.rs
new file mode 100644
index 000000000..d370db0c5
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/credential.rs
@@ -0,0 +1,56 @@
+use crate::{
+ config,
+ config::tree::{keys, Credential, Key, Section},
+};
+
+impl Credential {
+ /// The `credential.helper` key.
+ pub const HELPER: keys::Program = keys::Program::new_program("helper", &config::Tree::CREDENTIAL);
+ /// The `credential.username` key.
+ pub const USERNAME: keys::Any = keys::Any::new("username", &config::Tree::CREDENTIAL);
+ /// The `credential.useHttpPath` key.
+ pub const USE_HTTP_PATH: keys::Boolean = keys::Boolean::new_boolean("useHttpPath", &config::Tree::CREDENTIAL);
+
+ /// The `credential.<url>` subsection
+ pub const URL_PARAMETER: UrlParameter = UrlParameter;
+}
+
+/// The `credential.<url>` parameter section.
+pub struct UrlParameter;
+
+impl UrlParameter {
+ /// The `credential.<url>.helper` key.
+ pub const HELPER: keys::Program = keys::Program::new_program("helper", &Credential::URL_PARAMETER);
+ /// The `credential.<url>.username` key.
+ pub const USERNAME: keys::Any = keys::Any::new("username", &Credential::URL_PARAMETER);
+ /// The `credential.<url>.useHttpPath` key.
+ pub const USE_HTTP_PATH: keys::Boolean = keys::Boolean::new_boolean("useHttpPath", &Credential::URL_PARAMETER);
+}
+
+impl Section for UrlParameter {
+ fn name(&self) -> &str {
+ "<url>"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::HELPER, &Self::USERNAME, &Self::USE_HTTP_PATH]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&config::Tree::CREDENTIAL)
+ }
+}
+
+impl Section for Credential {
+ fn name(&self) -> &str {
+ "credential"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::HELPER, &Self::USERNAME, &Self::USE_HTTP_PATH]
+ }
+
+ fn sub_sections(&self) -> &[&dyn Section] {
+ &[&Self::URL_PARAMETER]
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/diff.rs b/vendor/gix/src/config/tree/sections/diff.rs
new file mode 100644
index 000000000..103bb7001
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/diff.rs
@@ -0,0 +1,133 @@
+use crate::{
+ config,
+ config::tree::{keys, Diff, Key, Section},
+};
+
+impl Diff {
+ /// The `diff.algorithm` key.
+ pub const ALGORITHM: Algorithm = Algorithm::new_with_validate("algorithm", &config::Tree::DIFF, validate::Algorithm)
+ .with_deviation("'patience' diff is not implemented and can default to 'histogram' if lenient config is used, and defaults to histogram if unset for fastest and best results");
+ /// The `diff.renameLimit` key.
+ pub const RENAME_LIMIT: keys::UnsignedInteger = keys::UnsignedInteger::new_unsigned_integer(
+ "renameLimit",
+ &config::Tree::DIFF,
+ )
+ .with_note(
+ "The limit is actually squared, so 1000 stands for up to 1 million diffs if fuzzy rename tracking is enabled",
+ );
+ /// The `diff.renames` key.
+ pub const RENAMES: Renames = Renames::new_renames("renames", &config::Tree::DIFF);
+}
+
+impl Section for Diff {
+ fn name(&self) -> &str {
+ "diff"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::ALGORITHM, &Self::RENAME_LIMIT, &Self::RENAMES]
+ }
+}
+
+/// The `diff.algorithm` key.
+pub type Algorithm = keys::Any<validate::Algorithm>;
+
+/// The `diff.renames` key.
+pub type Renames = keys::Any<validate::Renames>;
+
+mod algorithm {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::BStr,
+ config,
+ config::{diff::algorithm::Error, tree::sections::diff::Algorithm},
+ };
+
+ impl Algorithm {
+ /// Derive the diff algorithm identified by `name`, case-insensitively.
+ pub fn try_into_algorithm(&self, name: Cow<'_, BStr>) -> Result<gix_diff::blob::Algorithm, Error> {
+ let algo = if name.eq_ignore_ascii_case(b"myers") || name.eq_ignore_ascii_case(b"default") {
+ gix_diff::blob::Algorithm::Myers
+ } else if name.eq_ignore_ascii_case(b"minimal") {
+ gix_diff::blob::Algorithm::MyersMinimal
+ } else if name.eq_ignore_ascii_case(b"histogram") {
+ gix_diff::blob::Algorithm::Histogram
+ } else if name.eq_ignore_ascii_case(b"patience") {
+ return Err(config::diff::algorithm::Error::Unimplemented {
+ name: name.into_owned(),
+ });
+ } else {
+ return Err(Error::Unknown {
+ name: name.into_owned(),
+ });
+ };
+ Ok(algo)
+ }
+ }
+}
+
+mod renames {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config::{
+ key::GenericError,
+ tree::{keys, sections::diff::Renames, Section},
+ },
+ diff::rename::Tracking,
+ };
+
+ impl Renames {
+ /// Create a new instance.
+ pub const fn new_renames(name: &'static str, section: &'static dyn Section) -> Self {
+ keys::Any::new_with_validate(name, section, super::validate::Renames)
+ }
+ /// Try to convert the configuration into a valid rename tracking variant. Use `value` and if it's an error, call `value_string`
+ /// to try and interpret the key as string.
+ pub fn try_into_renames<'a>(
+ &'static self,
+ value: Result<bool, gix_config::value::Error>,
+ value_string: impl FnOnce() -> Option<Cow<'a, BStr>>,
+ ) -> Result<Tracking, GenericError> {
+ Ok(match value {
+ Ok(true) => Tracking::Renames,
+ Ok(false) => Tracking::Disabled,
+ Err(err) => {
+ let value = value_string().ok_or_else(|| GenericError::from(self))?;
+ match value.as_ref().as_bytes() {
+ b"copy" | b"copies" => Tracking::RenamesAndCopies,
+ _ => return Err(GenericError::from_value(self, value.into_owned()).with_source(err)),
+ }
+ }
+ })
+ }
+ }
+}
+
+mod validate {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::BStr,
+ config::tree::{keys, Diff},
+ };
+
+ pub struct Algorithm;
+ impl keys::Validate for Algorithm {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ Diff::ALGORITHM.try_into_algorithm(value.into())?;
+ Ok(())
+ }
+ }
+
+ pub struct Renames;
+ impl keys::Validate for Renames {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ let boolean = gix_config::Boolean::try_from(value).map(|b| b.0);
+ Diff::RENAMES.try_into_renames(boolean, || Some(Cow::Borrowed(value)))?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/extensions.rs b/vendor/gix/src/config/tree/sections/extensions.rs
new file mode 100644
index 000000000..77130f804
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/extensions.rs
@@ -0,0 +1,59 @@
+use crate::{
+ config,
+ config::tree::{keys, Extensions, Key, Section},
+};
+
+impl Extensions {
+ /// The `extensions.worktreeConfig` key.
+ pub const WORKTREE_CONFIG: keys::Boolean = keys::Boolean::new_boolean("worktreeConfig", &config::Tree::EXTENSIONS);
+ /// The `extensions.objectFormat` key.
+ pub const OBJECT_FORMAT: ObjectFormat =
+ ObjectFormat::new_with_validate("objectFormat", &config::Tree::EXTENSIONS, validate::ObjectFormat).with_note(
+ "Support for SHA256 is prepared but not fully implemented yet. For now we abort when encountered",
+ );
+}
+
+/// The `core.checkStat` key.
+pub type ObjectFormat = keys::Any<validate::ObjectFormat>;
+
+mod object_format {
+ use std::borrow::Cow;
+
+ use crate::{bstr::BStr, config, config::tree::sections::extensions::ObjectFormat};
+
+ impl ObjectFormat {
+ pub fn try_into_object_format(
+ &'static self,
+ value: Cow<'_, BStr>,
+ ) -> Result<gix_hash::Kind, config::key::GenericErrorWithValue> {
+ if value.as_ref().eq_ignore_ascii_case(b"sha1") {
+ Ok(gix_hash::Kind::Sha1)
+ } else {
+ Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned()))
+ }
+ }
+ }
+}
+
+impl Section for Extensions {
+ fn name(&self) -> &str {
+ "extensions"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::OBJECT_FORMAT, &Self::WORKTREE_CONFIG]
+ }
+}
+
+mod validate {
+ use crate::{bstr::BStr, config::tree::keys};
+
+ pub struct ObjectFormat;
+
+ impl keys::Validate for ObjectFormat {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ super::Extensions::OBJECT_FORMAT.try_into_object_format(value.into())?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/gitoxide.rs b/vendor/gix/src/config/tree/sections/gitoxide.rs
new file mode 100644
index 000000000..8c3defd0b
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/gitoxide.rs
@@ -0,0 +1,363 @@
+use crate::config::tree::{keys, Gitoxide, Key, Section};
+
+impl Gitoxide {
+ /// The `gitoxide.allow` section.
+ pub const ALLOW: Allow = Allow;
+ /// The `gitoxide.author` section.
+ pub const AUTHOR: Author = Author;
+ /// The `gitoxide.commit` section.
+ pub const COMMIT: Commit = Commit;
+ /// The `gitoxide.committer` section.
+ pub const COMMITTER: Committer = Committer;
+ /// The `gitoxide.http` section.
+ pub const HTTP: Http = Http;
+ /// The `gitoxide.https` section.
+ pub const HTTPS: Https = Https;
+ /// The `gitoxide.objects` section.
+ pub const OBJECTS: Objects = Objects;
+ /// The `gitoxide.ssh` section.
+ pub const SSH: Ssh = Ssh;
+ /// The `gitoxide.user` section.
+ pub const USER: User = User;
+
+ /// The `gitoxide.userAgent` Key.
+ pub const USER_AGENT: keys::Any = keys::Any::new("userAgent", &config::Tree::GITOXIDE).with_note(
+ "The user agent presented on the git protocol layer, serving as fallback for when no `http.userAgent` is set",
+ );
+}
+
+impl Section for Gitoxide {
+ fn name(&self) -> &str {
+ "gitoxide"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::USER_AGENT]
+ }
+
+ fn sub_sections(&self) -> &[&dyn Section] {
+ &[
+ &Self::ALLOW,
+ &Self::AUTHOR,
+ &Self::COMMIT,
+ &Self::COMMITTER,
+ &Self::HTTP,
+ &Self::HTTPS,
+ &Self::OBJECTS,
+ &Self::SSH,
+ &Self::USER,
+ ]
+ }
+}
+
+mod subsections {
+ use crate::config::{
+ tree::{http, keys, Gitoxide, Key, Section},
+ Tree,
+ };
+
+ /// The `Http` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Http;
+
+ impl Http {
+ /// The `gitoxide.http.proxy` key.
+ pub const PROXY: keys::String =
+ keys::String::new_string("proxy", &Gitoxide::HTTP).with_environment_override("http_proxy");
+ /// The `gitoxide.http.allProxy` key.
+ pub const ALL_PROXY: keys::String = keys::String::new_string("allProxy", &Gitoxide::HTTP)
+ .with_environment_override("all_proxy")
+ .with_note("fallback environment is `ALL_PROXY`");
+ /// The `gitoxide.http.verbose` key.
+ ///
+ /// If set, curl will be configured to log verbosely.
+ pub const VERBOSE: keys::Boolean = keys::Boolean::new_boolean("verbose", &Gitoxide::HTTP)
+ .with_environment_override("GIT_CURL_VERBOSE")
+ .with_deviation("we parse it as boolean for convenience (infallible) but git only checks the presence");
+ /// The `gitoxide.http.noProxy` key.
+ pub const NO_PROXY: keys::String = keys::String::new_string("noProxy", &Gitoxide::HTTP)
+ .with_environment_override("no_proxy")
+ .with_note("fallback environment is `NO_PROXY`");
+ /// The `gitoxide.http.connectTimeout` key.
+ pub const CONNECT_TIMEOUT: keys::DurationInMilliseconds =
+ keys::DurationInMilliseconds::new_duration("connectTimeout", &Gitoxide::HTTP).with_note(
+ "entirely new, and in milliseconds, to describe how long to wait until a connection attempt is aborted",
+ );
+ /// The `gitoxide.http.sslVersionMin` key.
+ pub const SSL_VERSION_MIN: http::SslVersion =
+ http::SslVersion::new_ssl_version("sslVersionMin", &Gitoxide::HTTP).with_note(
+ "entirely new to set the lower bound for the allowed ssl version range. Overwrites the min bound of `http.sslVersion` if set. Min and Max must be set to become effective.",
+ );
+ /// The `gitoxide.http.sslVersionMax` key.
+ pub const SSL_VERSION_MAX: http::SslVersion =
+ http::SslVersion::new_ssl_version("sslVersionMax", &Gitoxide::HTTP).with_note(
+ "entirely new to set the upper bound for the allowed ssl version range. Overwrites the max bound of `http.sslVersion` if set. Min and Max must be set to become effective.",
+ );
+ /// The `gitoxide.http.proxyAuthMethod` key.
+ pub const PROXY_AUTH_METHOD: http::ProxyAuthMethod =
+ http::ProxyAuthMethod::new_proxy_auth_method("proxyAuthMethod", &Gitoxide::HTTP)
+ .with_environment_override("GIT_HTTP_PROXY_AUTHMETHOD");
+ }
+
+ impl Section for Http {
+ fn name(&self) -> &str {
+ "http"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[
+ &Self::PROXY,
+ &Self::ALL_PROXY,
+ &Self::VERBOSE,
+ &Self::NO_PROXY,
+ &Self::CONNECT_TIMEOUT,
+ &Self::SSL_VERSION_MIN,
+ &Self::SSL_VERSION_MAX,
+ &Self::PROXY_AUTH_METHOD,
+ ]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `Https` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Https;
+
+ impl Https {
+ /// The `gitoxide.https.proxy` key.
+ pub const PROXY: keys::String = keys::String::new_string("proxy", &Gitoxide::HTTPS)
+ .with_environment_override("HTTPS_PROXY")
+ .with_note("fallback environment variable is `https_proxy`");
+ }
+
+ impl Section for Https {
+ fn name(&self) -> &str {
+ "https"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::PROXY]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `allow` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Allow;
+
+ /// The `gitoxide.allow.protocolFromUser` key.
+ pub type ProtocolFromUser = keys::Any<super::validate::ProtocolFromUser>;
+
+ impl Allow {
+ /// The `gitoxide.allow.protocolFromUser` key.
+ pub const PROTOCOL_FROM_USER: ProtocolFromUser = ProtocolFromUser::new_with_validate(
+ "protocolFromUser",
+ &Gitoxide::ALLOW,
+ super::validate::ProtocolFromUser,
+ )
+ .with_environment_override("GIT_PROTOCOL_FROM_USER");
+ }
+
+ impl Section for Allow {
+ fn name(&self) -> &str {
+ "allow"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::PROTOCOL_FROM_USER]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `author` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Author;
+
+ impl Author {
+ /// The `gitoxide.author.nameFallback` key.
+ pub const NAME_FALLBACK: keys::Any =
+ keys::Any::new("nameFallback", &Gitoxide::AUTHOR).with_environment_override("GIT_AUTHOR_NAME");
+ /// The `gitoxide.author.emailFallback` key.
+ pub const EMAIL_FALLBACK: keys::Any =
+ keys::Any::new("emailFallback", &Gitoxide::AUTHOR).with_environment_override("GIT_AUTHOR_EMAIL");
+ }
+
+ impl Section for Author {
+ fn name(&self) -> &str {
+ "author"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::NAME_FALLBACK, &Self::EMAIL_FALLBACK]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `user` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct User;
+
+ impl User {
+ /// The `gitoxide.user.emailFallback` key.
+ pub const EMAIL_FALLBACK: keys::Any =
+ keys::Any::new("emailFallback", &Gitoxide::USER).with_environment_override("EMAIL");
+ }
+
+ impl Section for User {
+ fn name(&self) -> &str {
+ "user"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::EMAIL_FALLBACK]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `ssh` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Ssh;
+
+ impl Ssh {
+ /// The `gitoxide.ssh.commandWithoutShellFallback` key.
+ pub const COMMAND_WITHOUT_SHELL_FALLBACK: keys::Executable =
+ keys::Executable::new_executable("commandWithoutShellFallback", &Gitoxide::SSH)
+ .with_environment_override("GIT_SSH")
+ .with_note("is always executed without shell and treated as fallback");
+ }
+
+ impl Section for Ssh {
+ fn name(&self) -> &str {
+ "ssh"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::COMMAND_WITHOUT_SHELL_FALLBACK]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `objects` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Objects;
+
+ impl Objects {
+ /// The `gitoxide.objects.cacheLimit` key.
+ pub const CACHE_LIMIT: keys::UnsignedInteger =
+ keys::UnsignedInteger::new_unsigned_integer("cacheLimit", &Gitoxide::OBJECTS)
+ .with_note("If unset or 0, there is no object cache")
+ .with_environment_override("GITOXIDE_OBJECT_CACHE_MEMORY");
+ /// The `gitoxide.objects.noReplace` key.
+ pub const NO_REPLACE: keys::Boolean = keys::Boolean::new_boolean("noReplace", &Gitoxide::OBJECTS)
+ .with_environment_override("GIT_NO_REPLACE_OBJECTS");
+ /// The `gitoxide.objects.replaceRefBase` key.
+ pub const REPLACE_REF_BASE: keys::Any =
+ keys::Any::new("replaceRefBase", &Gitoxide::OBJECTS).with_environment_override("GIT_REPLACE_REF_BASE");
+ }
+
+ impl Section for Objects {
+ fn name(&self) -> &str {
+ "objects"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::CACHE_LIMIT, &Self::NO_REPLACE, &Self::REPLACE_REF_BASE]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `committer` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Committer;
+
+ impl Committer {
+ /// The `gitoxide.committer.nameFallback` key.
+ pub const NAME_FALLBACK: keys::Any =
+ keys::Any::new("nameFallback", &Gitoxide::COMMITTER).with_environment_override("GIT_COMMITTER_NAME");
+ /// The `gitoxide.committer.emailFallback` key.
+ pub const EMAIL_FALLBACK: keys::Any =
+ keys::Any::new("emailFallback", &Gitoxide::COMMITTER).with_environment_override("GIT_COMMITTER_EMAIL");
+ }
+
+ impl Section for Committer {
+ fn name(&self) -> &str {
+ "committer"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::NAME_FALLBACK, &Self::EMAIL_FALLBACK]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
+ /// The `commit` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Commit;
+
+ impl Commit {
+ /// The `gitoxide.commit.authorDate` key.
+ pub const AUTHOR_DATE: keys::Time =
+ keys::Time::new_time("authorDate", &Gitoxide::COMMIT).with_environment_override("GIT_AUTHOR_DATE");
+ /// The `gitoxide.commit.committerDate` key.
+ pub const COMMITTER_DATE: keys::Time =
+ keys::Time::new_time("committerDate", &Gitoxide::COMMIT).with_environment_override("GIT_COMMITTER_DATE");
+ }
+
+ impl Section for Commit {
+ fn name(&self) -> &str {
+ "commit"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+}
+
+pub mod validate {
+ use std::error::Error;
+
+ use crate::{bstr::BStr, config::tree::keys::Validate};
+
+ pub struct ProtocolFromUser;
+ impl Validate for ProtocolFromUser {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ if value != "1" {
+ return Err("GIT_PROTOCOL_FROM_USER is either unset or as the value '1'".into());
+ }
+ Ok(())
+ }
+ }
+}
+
+pub use subsections::{Allow, Author, Commit, Committer, Http, Https, Objects, Ssh, User};
+
+use crate::config;
diff --git a/vendor/gix/src/config/tree/sections/http.rs b/vendor/gix/src/config/tree/sections/http.rs
new file mode 100644
index 000000000..f45c37076
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/http.rs
@@ -0,0 +1,317 @@
+use crate::{
+ config,
+ config::tree::{keys, Http, Key, Section},
+};
+
+impl Http {
+ /// The `http.sslVersion` key.
+ pub const SSL_VERSION: SslVersion = SslVersion::new_ssl_version("sslVersion", &config::Tree::HTTP)
+ .with_environment_override("GIT_SSL_VERSION")
+ .with_deviation(
+ "accepts the new 'default' value which means to use the curl default just like the empty string does",
+ );
+ /// The `http.proxy` key.
+ pub const PROXY: keys::String =
+ keys::String::new_string("proxy", &config::Tree::HTTP).with_deviation("fails on strings with illformed UTF-8");
+ /// The `http.proxyAuthMethod` key.
+ pub const PROXY_AUTH_METHOD: ProxyAuthMethod =
+ ProxyAuthMethod::new_proxy_auth_method("proxyAuthMethod", &config::Tree::HTTP)
+ .with_deviation("implemented like git, but never actually tried");
+ /// The `http.version` key.
+ pub const VERSION: Version = Version::new_with_validate("version", &config::Tree::HTTP, validate::Version)
+ .with_deviation("fails on illformed UTF-8");
+ /// The `http.userAgent` key.
+ pub const USER_AGENT: keys::String =
+ keys::String::new_string("userAgent", &config::Tree::HTTP).with_deviation("fails on illformed UTF-8");
+ /// The `http.extraHeader` key.
+ pub const EXTRA_HEADER: ExtraHeader =
+ ExtraHeader::new_with_validate("extraHeader", &config::Tree::HTTP, validate::ExtraHeader)
+ .with_deviation("fails on illformed UTF-8, without leniency");
+ /// The `http.followRedirects` key.
+ pub const FOLLOW_REDIRECTS: FollowRedirects =
+ FollowRedirects::new_with_validate("followRedirects", &config::Tree::HTTP, validate::FollowRedirects);
+ /// The `http.lowSpeedTime` key.
+ pub const LOW_SPEED_TIME: keys::UnsignedInteger =
+ keys::UnsignedInteger::new_unsigned_integer("lowSpeedTime", &config::Tree::HTTP)
+ .with_deviation("fails on negative values");
+ /// The `http.lowSpeedLimit` key.
+ pub const LOW_SPEED_LIMIT: keys::UnsignedInteger =
+ keys::UnsignedInteger::new_unsigned_integer("lowSpeedLimit", &config::Tree::HTTP)
+ .with_deviation("fails on negative values");
+ /// The `http.schannelUseSSLCAInfo` key.
+ pub const SCHANNEL_USE_SSL_CA_INFO: keys::Boolean =
+ keys::Boolean::new_boolean("schannelUseSSLCAInfo", &config::Tree::HTTP)
+ .with_deviation("only used as switch internally to turn off using the sslCAInfo, unconditionally. If unset, it has no effect, whereas in `git` it defaults to false.");
+ /// The `http.sslCAInfo` key.
+ pub const SSL_CA_INFO: keys::Path =
+ keys::Path::new_path("sslCAInfo", &config::Tree::HTTP).with_environment_override("GIT_SSL_CAINFO");
+ /// The `http.schannelCheckRevoke` key.
+ pub const SCHANNEL_CHECK_REVOKE: keys::Boolean =
+ keys::Boolean::new_boolean("schannelCheckRevoke", &config::Tree::HTTP);
+}
+
+impl Section for Http {
+ fn name(&self) -> &str {
+ "http"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[
+ &Self::SSL_VERSION,
+ &Self::PROXY,
+ &Self::PROXY_AUTH_METHOD,
+ &Self::VERSION,
+ &Self::USER_AGENT,
+ &Self::EXTRA_HEADER,
+ &Self::FOLLOW_REDIRECTS,
+ &Self::LOW_SPEED_TIME,
+ &Self::LOW_SPEED_LIMIT,
+ &Self::SCHANNEL_USE_SSL_CA_INFO,
+ &Self::SSL_CA_INFO,
+ &Self::SCHANNEL_CHECK_REVOKE,
+ ]
+ }
+}
+
+/// The `http.followRedirects` key.
+pub type FollowRedirects = keys::Any<validate::FollowRedirects>;
+
+/// The `http.extraHeader` key.
+pub type ExtraHeader = keys::Any<validate::ExtraHeader>;
+
+/// The `http.sslVersion` key, as well as others of the same type.
+pub type SslVersion = keys::Any<validate::SslVersion>;
+
+/// The `http.proxyAuthMethod` key, as well as others of the same type.
+pub type ProxyAuthMethod = keys::Any<validate::ProxyAuthMethod>;
+
+/// The `http.version` key.
+pub type Version = keys::Any<validate::Version>;
+
+mod key_impls {
+ use crate::config::tree::{
+ http::{ProxyAuthMethod, SslVersion},
+ keys, Section,
+ };
+
+ impl SslVersion {
+ pub const fn new_ssl_version(name: &'static str, section: &'static dyn Section) -> Self {
+ keys::Any::new_with_validate(name, section, super::validate::SslVersion)
+ }
+ }
+
+ impl ProxyAuthMethod {
+ pub const fn new_proxy_auth_method(name: &'static str, section: &'static dyn Section) -> Self {
+ keys::Any::new_with_validate(name, section, super::validate::ProxyAuthMethod)
+ }
+ }
+
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ impl crate::config::tree::http::FollowRedirects {
+ /// Convert `value` into the redirect specification, or query the same value as `boolean`
+ /// for additional possible input values.
+ ///
+ /// Note that `boolean` only queries the underlying key as boolean, which is a necessity to handle
+ /// empty booleans correctly, that is those without a value separator.
+ pub fn try_into_follow_redirects(
+ &'static self,
+ value: std::borrow::Cow<'_, crate::bstr::BStr>,
+ boolean: impl FnOnce() -> Result<Option<bool>, gix_config::value::Error>,
+ ) -> Result<
+ crate::protocol::transport::client::http::options::FollowRedirects,
+ crate::config::key::GenericErrorWithValue,
+ > {
+ use crate::{bstr::ByteSlice, protocol::transport::client::http::options::FollowRedirects};
+ Ok(if value.as_ref().as_bytes() == b"initial" {
+ FollowRedirects::Initial
+ } else if let Some(value) = boolean().map_err(|err| {
+ crate::config::key::GenericErrorWithValue::from_value(self, value.into_owned()).with_source(err)
+ })? {
+ if value {
+ FollowRedirects::All
+ } else {
+ FollowRedirects::None
+ }
+ } else {
+ FollowRedirects::Initial
+ })
+ }
+ }
+
+ impl super::ExtraHeader {
+ /// Convert a list of values into extra-headers, while failing entirely on illformed UTF-8.
+ pub fn try_into_extra_header(
+ &'static self,
+ values: Vec<std::borrow::Cow<'_, crate::bstr::BStr>>,
+ ) -> Result<Vec<String>, crate::config::string::Error> {
+ let mut out = Vec::with_capacity(values.len());
+ for value in values {
+ if value.is_empty() {
+ out.clear();
+ } else {
+ out.push(self.try_into_string(value)?);
+ }
+ }
+ Ok(out)
+ }
+ }
+
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ impl super::Version {
+ pub fn try_into_http_version(
+ &'static self,
+ value: std::borrow::Cow<'_, crate::bstr::BStr>,
+ ) -> Result<
+ gix_protocol::transport::client::http::options::HttpVersion,
+ crate::config::key::GenericErrorWithValue,
+ > {
+ use gix_protocol::transport::client::http::options::HttpVersion;
+
+ use crate::bstr::ByteSlice;
+ Ok(match value.as_ref().as_bytes() {
+ b"HTTP/1.1" => HttpVersion::V1_1,
+ b"HTTP/2" => HttpVersion::V2,
+ _ => {
+ return Err(crate::config::key::GenericErrorWithValue::from_value(
+ self,
+ value.into_owned(),
+ ))
+ }
+ })
+ }
+ }
+
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ impl ProxyAuthMethod {
+ pub fn try_into_proxy_auth_method(
+ &'static self,
+ value: std::borrow::Cow<'_, crate::bstr::BStr>,
+ ) -> Result<
+ gix_protocol::transport::client::http::options::ProxyAuthMethod,
+ crate::config::key::GenericErrorWithValue,
+ > {
+ use gix_protocol::transport::client::http::options::ProxyAuthMethod;
+
+ use crate::bstr::ByteSlice;
+ Ok(match value.as_ref().as_bytes() {
+ b"anyauth" => ProxyAuthMethod::AnyAuth,
+ b"basic" => ProxyAuthMethod::Basic,
+ b"digest" => ProxyAuthMethod::Digest,
+ b"negotiate" => ProxyAuthMethod::Negotiate,
+ b"ntlm" => ProxyAuthMethod::Ntlm,
+ _ => {
+ return Err(crate::config::key::GenericErrorWithValue::from_value(
+ self,
+ value.into_owned(),
+ ))
+ }
+ })
+ }
+ }
+
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ impl SslVersion {
+ pub fn try_into_ssl_version(
+ &'static self,
+ value: std::borrow::Cow<'_, crate::bstr::BStr>,
+ ) -> Result<gix_protocol::transport::client::http::options::SslVersion, crate::config::ssl_version::Error>
+ {
+ use gix_protocol::transport::client::http::options::SslVersion::*;
+
+ use crate::bstr::ByteSlice;
+ Ok(match value.as_ref().as_bytes() {
+ b"default" | b"" => Default,
+ b"tlsv1" => TlsV1,
+ b"sslv2" => SslV2,
+ b"sslv3" => SslV3,
+ b"tlsv1.0" => TlsV1_0,
+ b"tlsv1.1" => TlsV1_1,
+ b"tlsv1.2" => TlsV1_2,
+ b"tlsv1.3" => TlsV1_3,
+ _ => return Err(crate::config::ssl_version::Error::from_value(self, value.into_owned())),
+ })
+ }
+ }
+}
+
+pub mod validate {
+ use std::error::Error;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config::tree::keys::Validate,
+ };
+
+ pub struct SslVersion;
+ impl Validate for SslVersion {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ super::Http::SSL_VERSION.try_into_ssl_version(std::borrow::Cow::Borrowed(_value))?;
+
+ Ok(())
+ }
+ }
+
+ pub struct ProxyAuthMethod;
+ impl Validate for ProxyAuthMethod {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ super::Http::PROXY_AUTH_METHOD.try_into_proxy_auth_method(std::borrow::Cow::Borrowed(_value))?;
+
+ Ok(())
+ }
+ }
+
+ pub struct Version;
+ impl Validate for Version {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ super::Http::VERSION.try_into_http_version(std::borrow::Cow::Borrowed(_value))?;
+
+ Ok(())
+ }
+ }
+
+ pub struct ExtraHeader;
+ impl Validate for ExtraHeader {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ value.to_str()?;
+ Ok(())
+ }
+ }
+
+ pub struct FollowRedirects;
+ impl Validate for FollowRedirects {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ super::Http::FOLLOW_REDIRECTS.try_into_follow_redirects(std::borrow::Cow::Borrowed(_value), || {
+ gix_config::Boolean::try_from(_value).map(|b| Some(b.0))
+ })?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/init.rs b/vendor/gix/src/config/tree/sections/init.rs
new file mode 100644
index 000000000..de42d3b62
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/init.rs
@@ -0,0 +1,20 @@
+use crate::{
+ config,
+ config::tree::{keys, Init, Key, Section},
+};
+
+impl Init {
+ /// The `init.defaultBranch` key.
+ pub const DEFAULT_BRANCH: keys::Any = keys::Any::new("defaultBranch", &config::Tree::INIT)
+ .with_deviation("If not set, we use `main` instead of `master`");
+}
+
+impl Section for Init {
+ fn name(&self) -> &str {
+ "init"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::DEFAULT_BRANCH]
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/mod.rs b/vendor/gix/src/config/tree/sections/mod.rs
new file mode 100644
index 000000000..fb9b50786
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/mod.rs
@@ -0,0 +1,96 @@
+#![allow(missing_docs)]
+
+/// The `author` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Author;
+mod author;
+
+/// The `branch` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Branch;
+pub mod branch;
+
+/// The `checkout` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Checkout;
+pub mod checkout;
+
+/// The `clone` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Clone;
+mod clone;
+
+/// The `committer` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Committer;
+mod committer;
+
+/// The `core` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Core;
+pub mod core;
+
+/// The `credential` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Credential;
+pub mod credential;
+
+/// The `diff` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Diff;
+pub mod diff;
+
+/// The `extension` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Extensions;
+pub mod extensions;
+
+/// The `gitoxide` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Gitoxide;
+pub mod gitoxide;
+
+/// The `http` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Http;
+pub mod http;
+
+/// The `init` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Init;
+mod init;
+
+/// The `pack` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Pack;
+pub mod pack;
+
+/// The `protocol` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Protocol;
+pub mod protocol;
+
+/// The `remote` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Remote;
+pub mod remote;
+
+/// The `safe` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Safe;
+mod safe;
+
+/// The `ssh` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Ssh;
+pub mod ssh;
+
+/// The `user` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct User;
+mod user;
+
+/// The `url` top-level section.
+#[derive(Copy, Clone, Default)]
+pub struct Url;
+mod url;
diff --git a/vendor/gix/src/config/tree/sections/pack.rs b/vendor/gix/src/config/tree/sections/pack.rs
new file mode 100644
index 000000000..941817e5b
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/pack.rs
@@ -0,0 +1,64 @@
+use crate::{
+ config,
+ config::tree::{keys, Key, Pack, Section},
+};
+
+impl Pack {
+ /// The `pack.threads` key.
+ pub const THREADS: keys::UnsignedInteger =
+ keys::UnsignedInteger::new_unsigned_integer("threads", &config::Tree::PACK)
+ .with_deviation("Leaving this key unspecified uses all available cores, instead of 1");
+
+ /// The `pack.indexVersion` key.
+ pub const INDEX_VERSION: IndexVersion =
+ IndexVersion::new_with_validate("indexVersion", &config::Tree::PACK, validate::IndexVersion);
+}
+
+/// The `pack.indexVersion` key.
+pub type IndexVersion = keys::Any<validate::IndexVersion>;
+
+mod index_version {
+ use crate::{config, config::tree::sections::pack::IndexVersion};
+
+ impl IndexVersion {
+ /// Try to interpret an integer value as index version.
+ pub fn try_into_index_version(
+ &'static self,
+ value: Result<i64, gix_config::value::Error>,
+ ) -> Result<gix_pack::index::Version, config::key::GenericError> {
+ let value = value.map_err(|err| config::key::GenericError::from(self).with_source(err))?;
+ Ok(match value {
+ 1 => gix_pack::index::Version::V1,
+ 2 => gix_pack::index::Version::V2,
+ _ => return Err(config::key::GenericError::from(self)),
+ })
+ }
+ }
+}
+
+impl Section for Pack {
+ fn name(&self) -> &str {
+ "pack"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::THREADS, &Self::INDEX_VERSION]
+ }
+}
+
+mod validate {
+ use crate::{bstr::BStr, config::tree::keys};
+
+ pub struct IndexVersion;
+ impl keys::Validate for IndexVersion {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ super::Pack::INDEX_VERSION.try_into_index_version(gix_config::Integer::try_from(value).and_then(
+ |int| {
+ int.to_decimal()
+ .ok_or_else(|| gix_config::value::Error::new("integer out of range", value))
+ },
+ ))?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/protocol.rs b/vendor/gix/src/config/tree/sections/protocol.rs
new file mode 100644
index 000000000..58e907b0f
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/protocol.rs
@@ -0,0 +1,85 @@
+use crate::{
+ config,
+ config::tree::{keys, Key, Protocol, Section},
+};
+
+impl Protocol {
+ /// The `protocol.allow` key.
+ pub const ALLOW: Allow = Allow::new_with_validate("allow", &config::Tree::PROTOCOL, validate::Allow);
+
+ /// The `protocol.<name>` subsection
+ pub const NAME_PARAMETER: NameParameter = NameParameter;
+}
+
+/// The `protocol.allow` key type.
+pub type Allow = keys::Any<validate::Allow>;
+
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+mod allow {
+ use std::borrow::Cow;
+
+ use crate::{bstr::BStr, config, config::tree::protocol::Allow, remote::url::scheme_permission};
+
+ impl Allow {
+ /// Convert `value` into its respective `Allow` variant, possibly informing about the `scheme` we are looking at in the error.
+ pub fn try_into_allow(
+ &'static self,
+ value: Cow<'_, BStr>,
+ scheme: Option<&str>,
+ ) -> Result<scheme_permission::Allow, config::protocol::allow::Error> {
+ scheme_permission::Allow::try_from(value).map_err(|value| config::protocol::allow::Error {
+ value,
+ scheme: scheme.map(ToOwned::to_owned),
+ })
+ }
+ }
+}
+
+/// The `protocol.<name>` parameter section.
+pub struct NameParameter;
+
+impl NameParameter {
+ /// The `credential.<url>.helper` key.
+ pub const ALLOW: Allow = Allow::new_with_validate("allow", &Protocol::NAME_PARAMETER, validate::Allow);
+}
+
+impl Section for NameParameter {
+ fn name(&self) -> &str {
+ "<name>"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::ALLOW]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&config::Tree::PROTOCOL)
+ }
+}
+
+impl Section for Protocol {
+ fn name(&self) -> &str {
+ "protocol"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::ALLOW]
+ }
+
+ fn sub_sections(&self) -> &[&dyn Section] {
+ &[&Self::NAME_PARAMETER]
+ }
+}
+
+mod validate {
+ use crate::{bstr::BStr, config::tree::keys};
+
+ pub struct Allow;
+ impl keys::Validate for Allow {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ super::Protocol::ALLOW.try_into_allow(std::borrow::Cow::Borrowed(_value), None)?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/remote.rs b/vendor/gix/src/config/tree/sections/remote.rs
new file mode 100644
index 000000000..b242c9c14
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/remote.rs
@@ -0,0 +1,101 @@
+use crate::{
+ config,
+ config::tree::{http, keys, Key, Remote, Section, SubSectionRequirement},
+};
+
+const NAME_PARAMETER: Option<SubSectionRequirement> = Some(SubSectionRequirement::Parameter("name"));
+
+impl Remote {
+ /// The `remote.pushDefault` key
+ pub const PUSH_DEFAULT: keys::RemoteName = keys::RemoteName::new_remote_name("pushDefault", &config::Tree::REMOTE);
+ /// The `remote.<name>.tagOpt` key
+ pub const TAG_OPT: TagOpt = TagOpt::new_with_validate("tagOpt", &config::Tree::REMOTE, validate::TagOpt)
+ .with_subsection_requirement(Some(SubSectionRequirement::Parameter("name")));
+ /// The `remote.<name>.url` key
+ pub const URL: keys::Url =
+ keys::Url::new_url("url", &config::Tree::REMOTE).with_subsection_requirement(NAME_PARAMETER);
+ /// The `remote.<name>.pushUrl` key
+ pub const PUSH_URL: keys::Url =
+ keys::Url::new_url("pushUrl", &config::Tree::REMOTE).with_subsection_requirement(NAME_PARAMETER);
+ /// The `remote.<name>.fetch` key
+ pub const FETCH: keys::FetchRefSpec = keys::FetchRefSpec::new_fetch_refspec("fetch", &config::Tree::REMOTE)
+ .with_subsection_requirement(NAME_PARAMETER);
+ /// The `remote.<name>.push` key
+ pub const PUSH: keys::PushRefSpec =
+ keys::PushRefSpec::new_push_refspec("push", &config::Tree::REMOTE).with_subsection_requirement(NAME_PARAMETER);
+ /// The `remote.<name>.proxy` key
+ pub const PROXY: keys::String =
+ keys::String::new_string("proxy", &config::Tree::REMOTE).with_subsection_requirement(NAME_PARAMETER);
+ /// The `remote.<name>.proxyAuthMethod` key.
+ pub const PROXY_AUTH_METHOD: http::ProxyAuthMethod =
+ http::ProxyAuthMethod::new_proxy_auth_method("proxyAuthMethod", &config::Tree::REMOTE)
+ .with_subsection_requirement(NAME_PARAMETER)
+ .with_deviation("implemented like git, but never actually tried");
+}
+
+impl Section for Remote {
+ fn name(&self) -> &str {
+ "remote"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[
+ &Self::PUSH_DEFAULT,
+ &Self::TAG_OPT,
+ &Self::URL,
+ &Self::PUSH_URL,
+ &Self::FETCH,
+ &Self::PUSH,
+ &Self::PROXY,
+ &Self::PROXY_AUTH_METHOD,
+ ]
+ }
+}
+
+/// The `remote.<name>.tagOpt` key type.
+pub type TagOpt = keys::Any<validate::TagOpt>;
+
+mod tag_opts {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config,
+ config::tree::remote::TagOpt,
+ remote,
+ };
+
+ impl TagOpt {
+ /// Try to interpret `value` as tag option.
+ ///
+ /// # Note
+ ///
+ /// It's heavily biased towards the git command-line unfortunately, and the only
+ /// value of its kind. Maybe in future more values will be supported which are less
+ /// about passing them to a sub-process.
+ pub fn try_into_tag_opt(
+ &'static self,
+ value: Cow<'_, BStr>,
+ ) -> Result<remote::fetch::Tags, config::key::GenericErrorWithValue> {
+ Ok(match value.as_ref().as_bytes() {
+ b"--tags" => remote::fetch::Tags::All,
+ b"--no-tags" => remote::fetch::Tags::None,
+ _ => return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())),
+ })
+ }
+ }
+}
+
+pub mod validate {
+ use std::{borrow::Cow, error::Error};
+
+ use crate::{bstr::BStr, config::tree::keys::Validate};
+
+ pub struct TagOpt;
+ impl Validate for TagOpt {
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
+ super::Remote::TAG_OPT.try_into_tag_opt(Cow::Borrowed(value))?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/safe.rs b/vendor/gix/src/config/tree/sections/safe.rs
new file mode 100644
index 000000000..e76d28888
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/safe.rs
@@ -0,0 +1,27 @@
+use crate::{
+ config,
+ config::tree::{keys, Key, Safe, Section},
+};
+
+impl Safe {
+ /// The `safe.directory` key
+ pub const DIRECTORY: keys::Any = keys::Any::new("directory", &config::Tree::SAFE);
+}
+
+impl Safe {
+ /// Implements the directory filter to trust only global and system files, for use with `safe.directory`.
+ pub fn directory_filter(meta: &gix_config::file::Metadata) -> bool {
+ let kind = meta.source.kind();
+ kind == gix_config::source::Kind::System || kind == gix_config::source::Kind::Global
+ }
+}
+
+impl Section for Safe {
+ fn name(&self) -> &str {
+ "safe"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::DIRECTORY]
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/ssh.rs b/vendor/gix/src/config/tree/sections/ssh.rs
new file mode 100644
index 000000000..600ee663b
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/ssh.rs
@@ -0,0 +1,65 @@
+use crate::{
+ config,
+ config::tree::{keys, Key, Section, Ssh},
+};
+
+impl Ssh {
+ /// The `ssh.variant` key
+ pub const VARIANT: Variant = Variant::new_with_validate("variant", &config::Tree::SSH, validate::Variant)
+ .with_environment_override("GIT_SSH_VARIANT")
+ .with_deviation("We error if a variant is chosen that we don't know, as opposed to defaulting to 'ssh'");
+}
+
+/// The `ssh.variant` key.
+pub type Variant = keys::Any<validate::Variant>;
+
+#[cfg(feature = "blocking-network-client")]
+mod variant {
+ use std::borrow::Cow;
+
+ use crate::{bstr::BStr, config, config::tree::ssh::Variant};
+
+ impl Variant {
+ pub fn try_into_variant(
+ &'static self,
+ value: Cow<'_, BStr>,
+ ) -> Result<Option<gix_protocol::transport::client::ssh::ProgramKind>, config::key::GenericErrorWithValue>
+ {
+ use gix_protocol::transport::client::ssh::ProgramKind;
+
+ use crate::bstr::ByteSlice;
+ Ok(Some(match value.as_ref().as_bytes() {
+ b"auto" => return Ok(None),
+ b"ssh" => ProgramKind::Ssh,
+ b"plink" => ProgramKind::Plink,
+ b"putty" => ProgramKind::Putty,
+ b"tortoiseplink" => ProgramKind::TortoisePlink,
+ b"simple" => ProgramKind::Simple,
+ _ => return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())),
+ }))
+ }
+ }
+}
+
+impl Section for Ssh {
+ fn name(&self) -> &str {
+ "ssh"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::VARIANT]
+ }
+}
+
+mod validate {
+ use crate::{bstr::BStr, config::tree::keys};
+
+ pub struct Variant;
+ impl keys::Validate for Variant {
+ fn validate(&self, _value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ #[cfg(feature = "blocking-network-client")]
+ super::Ssh::VARIANT.try_into_variant(_value.into())?;
+ Ok(())
+ }
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/url.rs b/vendor/gix/src/config/tree/sections/url.rs
new file mode 100644
index 000000000..6a9c0bfdb
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/url.rs
@@ -0,0 +1,25 @@
+use crate::{
+ config,
+ config::tree::{keys, Key, Section, SubSectionRequirement, Url},
+};
+
+const BASE_PARAMETER: Option<SubSectionRequirement> = Some(SubSectionRequirement::Parameter("base"));
+
+impl Url {
+ /// The `url.<base>.insteadOf` key
+ pub const INSTEAD_OF: keys::Any =
+ keys::Any::new("insteadOf", &config::Tree::URL).with_subsection_requirement(BASE_PARAMETER);
+ /// The `url.<base>.pushInsteadOf` key
+ pub const PUSH_INSTEAD_OF: keys::Any =
+ keys::Any::new("pushInsteadOf", &config::Tree::URL).with_subsection_requirement(BASE_PARAMETER);
+}
+
+impl Section for Url {
+ fn name(&self) -> &str {
+ "url"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::INSTEAD_OF, &Self::PUSH_INSTEAD_OF]
+ }
+}
diff --git a/vendor/gix/src/config/tree/sections/user.rs b/vendor/gix/src/config/tree/sections/user.rs
new file mode 100644
index 000000000..d1f4f7102
--- /dev/null
+++ b/vendor/gix/src/config/tree/sections/user.rs
@@ -0,0 +1,22 @@
+use crate::{
+ config,
+ config::tree::{gitoxide, keys, Key, Section, User},
+};
+
+impl User {
+ /// The `user.name` key
+ pub const NAME: keys::Any = keys::Any::new("name", &config::Tree::USER);
+ /// The `user.email` key
+ pub const EMAIL: keys::Any =
+ keys::Any::new("email", &config::Tree::USER).with_fallback(&gitoxide::User::EMAIL_FALLBACK);
+}
+
+impl Section for User {
+ fn name(&self) -> &str {
+ "user"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[&Self::NAME, &Self::EMAIL]
+ }
+}
diff --git a/vendor/gix/src/config/tree/traits.rs b/vendor/gix/src/config/tree/traits.rs
new file mode 100644
index 000000000..7cfd7aac4
--- /dev/null
+++ b/vendor/gix/src/config/tree/traits.rs
@@ -0,0 +1,199 @@
+use crate::{
+ bstr::{BStr, BString, ByteVec},
+ config::tree::key::validate_assignment,
+};
+
+/// Provide information about a configuration section.
+pub trait Section {
+ /// The section name, like `remote` in `remote.origin.url`.
+ fn name(&self) -> &str;
+ /// The keys directly underneath it for carrying configuration values.
+ fn keys(&self) -> &[&dyn Key];
+ /// The list of sub-section names, which may be empty if there are no statically known sub-sections.
+ fn sub_sections(&self) -> &[&dyn Section] {
+ &[]
+ }
+ /// The parent section if this is a statically known sub-section.
+ fn parent(&self) -> Option<&dyn Section> {
+ None
+ }
+}
+
+/// Determine how subsections may be used with a given key, suitable for obtaining the full name for use in assignments.
+#[derive(Debug, Copy, Clone)]
+pub enum SubSectionRequirement {
+ /// Subsections must not be used, this key can only be below a section.
+ Never,
+ /// The sub-section is used as parameter with the given name.
+ Parameter(&'static str),
+}
+
+/// A way to link a key with other resources.
+#[derive(Debug, Copy, Clone)]
+pub enum Link {
+ /// The environment variable of the given name will override the value of this key.
+ EnvironmentOverride(&'static str),
+ /// This config key is used as fallback if this key isn't set.
+ FallbackKey(&'static dyn Key),
+}
+
+/// A note attached to a key.
+#[derive(Debug, Copy, Clone)]
+pub enum Note {
+ /// A piece of information related to a key to help the user.
+ Informative(&'static str),
+ /// This key works differently than is described by git, explaining the deviation further.
+ Deviation(&'static str),
+}
+
+/// A leaf-level entry in the git configuration, like `url` in `remote.origin.url`.
+pub trait Key: std::fmt::Debug {
+ /// The key's name, like `url` in `remote.origin.url`.
+ fn name(&self) -> &str;
+ /// See if `value` is allowed as value of this key, or return a descriptive error if it is not.
+ fn validate(&self, value: &BStr) -> Result<(), crate::config::tree::key::validate::Error>;
+ /// The section containing this key. Git configuration has no free-standing keys, they are always underneath a section.
+ fn section(&self) -> &dyn Section;
+ /// The return value encodes three possible states to indicate subsection requirements
+ /// * `None` = subsections may or may not be used, the most flexible setting.
+ /// * `Some([Requirement][SubSectionRequirement])` = subsections must or must not be used, depending on the value
+ fn subsection_requirement(&self) -> Option<&SubSectionRequirement> {
+ Some(&SubSectionRequirement::Never)
+ }
+ /// Return the link to other resources, if available.
+ fn link(&self) -> Option<&Link> {
+ None
+ }
+ /// Return a note about this key, if available.
+ fn note(&self) -> Option<&Note> {
+ None
+ }
+
+ /// Return the name of an environment variable that would override this value (after following links until one is found).
+ fn environment_override(&self) -> Option<&str> {
+ let mut cursor = self.link()?;
+ loop {
+ match cursor {
+ Link::EnvironmentOverride(name) => return Some(name),
+ Link::FallbackKey(next) => {
+ cursor = next.link()?;
+ }
+ }
+ }
+ }
+
+ /// Return the environment override that must be set on this key.
+ /// # Panics
+ /// If no environment variable is set
+ fn the_environment_override(&self) -> &str {
+ self.environment_override()
+ .expect("BUG: environment override must be set")
+ }
+ /// Produce a name that describes how the name is composed. This is `core.bare` for statically known keys, or `branch.<name>.key`
+ /// for complex ones.
+ fn logical_name(&self) -> String {
+ let section = self.section();
+ let mut buf = String::new();
+ let parameter = if let Some(parent) = section.parent() {
+ buf.push_str(parent.name());
+ buf.push('.');
+ None
+ } else {
+ self.subsection_requirement().and_then(|requirement| match requirement {
+ SubSectionRequirement::Parameter(name) => Some(name),
+ SubSectionRequirement::Never => None,
+ })
+ };
+ buf.push_str(section.name());
+ buf.push('.');
+ if let Some(parameter) = parameter {
+ buf.push('<');
+ buf.push_str(parameter);
+ buf.push('>');
+ buf.push('.');
+ }
+ buf.push_str(self.name());
+ buf
+ }
+
+ /// The full name of the key for use in configuration overrides, like `core.bare`, or `remote.<subsection>.url` if `subsection` is
+ /// not `None`.
+ /// May fail if this key needs a subsection, or may not have a subsection.
+ fn full_name(&self, subsection: Option<&BStr>) -> Result<BString, String> {
+ let section = self.section();
+ let mut buf = BString::default();
+ let subsection = match self.subsection_requirement() {
+ None => subsection,
+ Some(requirement) => match (requirement, subsection) {
+ (SubSectionRequirement::Never, Some(_)) => {
+ return Err(format!(
+ "The key named '{}' cannot be used with non-static subsections.",
+ self.logical_name()
+ ));
+ }
+ (SubSectionRequirement::Parameter(_), None) => {
+ return Err(format!(
+ "The key named '{}' cannot be used without subsections.",
+ self.logical_name()
+ ))
+ }
+ _ => subsection,
+ },
+ };
+
+ if let Some(parent) = section.parent() {
+ buf.push_str(parent.name());
+ buf.push(b'.');
+ }
+ buf.push_str(section.name());
+ buf.push(b'.');
+ if let Some(subsection) = subsection {
+ debug_assert!(
+ section.parent().is_none(),
+ "BUG: sections with parameterized sub-sections must be top-level sections"
+ );
+ buf.push_str(subsection);
+ buf.push(b'.');
+ }
+ buf.push_str(self.name());
+ Ok(buf)
+ }
+
+ /// Return an assignment with the keys full name to `value`, suitable for [configuration overrides][crate::open::Options::config_overrides()].
+ /// Note that this will fail if the key requires a subsection name.
+ fn validated_assignment(&self, value: &BStr) -> Result<BString, validate_assignment::Error> {
+ self.validate(value)?;
+ let mut key = self
+ .full_name(None)
+ .map_err(|message| validate_assignment::Error::Name { message })?;
+ key.push(b'=');
+ key.push_str(value);
+ Ok(key)
+ }
+
+ /// Return an assignment with the keys full name to `value`, suitable for [configuration overrides][crate::open::Options::config_overrides()].
+ /// Note that this will fail if the key requires a subsection name.
+ fn validated_assignment_fmt(
+ &self,
+ value: &dyn std::fmt::Display,
+ ) -> Result<BString, crate::config::tree::key::validate_assignment::Error> {
+ let value = value.to_string();
+ self.validated_assignment(value.as_str().into())
+ }
+
+ /// Return an assignment to `value` with the keys full name within `subsection`, suitable for [configuration overrides][crate::open::Options::config_overrides()].
+ /// Note that this is only valid if this key supports parameterized sub-sections, or else an error is returned.
+ fn validated_assignment_with_subsection(
+ &self,
+ value: &BStr,
+ subsection: &BStr,
+ ) -> Result<BString, crate::config::tree::key::validate_assignment::Error> {
+ self.validate(value)?;
+ let mut key = self
+ .full_name(Some(subsection))
+ .map_err(|message| validate_assignment::Error::Name { message })?;
+ key.push(b'=');
+ key.push_str(value);
+ Ok(key)
+ }
+}
diff --git a/vendor/gix/src/create.rs b/vendor/gix/src/create.rs
new file mode 100644
index 000000000..96d047e3b
--- /dev/null
+++ b/vendor/gix/src/create.rs
@@ -0,0 +1,251 @@
+use std::{
+ convert::TryFrom,
+ fs::{self, OpenOptions},
+ io::Write,
+ path::{Path, PathBuf},
+};
+
+use gix_config::parse::section;
+use gix_discover::DOT_GIT_DIR;
+
+/// The error used in [`into()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("Could not obtain the current directory")]
+ CurrentDir(#[from] std::io::Error),
+ #[error("Could not open data at '{}'", .path.display())]
+ IoOpen { source: std::io::Error, path: PathBuf },
+ #[error("Could not write data at '{}'", .path.display())]
+ IoWrite { source: std::io::Error, path: PathBuf },
+ #[error("Refusing to initialize the existing '{}' directory", .path.display())]
+ DirectoryExists { path: PathBuf },
+ #[error("Refusing to initialize the non-empty directory as '{}'", .path.display())]
+ DirectoryNotEmpty { path: PathBuf },
+ #[error("Could not create directory at '{}'", .path.display())]
+ CreateDirectory { source: std::io::Error, path: PathBuf },
+}
+
+/// The kind of repository to create.
+#[derive(Debug, Copy, Clone)]
+pub enum Kind {
+ /// An empty repository with a `.git` folder, setup to contain files in its worktree.
+ WithWorktree,
+ /// A bare repository without a worktree.
+ Bare,
+}
+
+const TPL_INFO_EXCLUDE: &[u8] = include_bytes!("assets/baseline-init/info/exclude");
+const TPL_HOOKS_APPLYPATCH_MSG: &[u8] = include_bytes!("assets/baseline-init/hooks/applypatch-msg.sample");
+const TPL_HOOKS_COMMIT_MSG: &[u8] = include_bytes!("assets/baseline-init/hooks/commit-msg.sample");
+const TPL_HOOKS_FSMONITOR_WATCHMAN: &[u8] = include_bytes!("assets/baseline-init/hooks/fsmonitor-watchman.sample");
+const TPL_HOOKS_POST_UPDATE: &[u8] = include_bytes!("assets/baseline-init/hooks/post-update.sample");
+const TPL_HOOKS_PRE_APPLYPATCH: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-applypatch.sample");
+const TPL_HOOKS_PRE_COMMIT: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-commit.sample");
+const TPL_HOOKS_PRE_MERGE_COMMIT: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-merge-commit.sample");
+const TPL_HOOKS_PRE_PUSH: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-push.sample");
+const TPL_HOOKS_PRE_REBASE: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-rebase.sample");
+const TPL_HOOKS_PRE_RECEIVE: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-receive.sample");
+const TPL_HOOKS_PREPARE_COMMIT_MSG: &[u8] = include_bytes!("assets/baseline-init/hooks/prepare-commit-msg.sample");
+const TPL_HOOKS_UPDATE: &[u8] = include_bytes!("assets/baseline-init/hooks/update.sample");
+const TPL_DESCRIPTION: &[u8] = include_bytes!("assets/baseline-init/description");
+const TPL_HEAD: &[u8] = include_bytes!("assets/baseline-init/HEAD");
+
+struct PathCursor<'a>(&'a mut PathBuf);
+
+struct NewDir<'a>(&'a mut PathBuf);
+
+impl<'a> PathCursor<'a> {
+ fn at(&mut self, component: &str) -> &Path {
+ self.0.push(component);
+ self.0.as_path()
+ }
+}
+
+impl<'a> NewDir<'a> {
+ fn at(self, component: &str) -> Result<Self, Error> {
+ self.0.push(component);
+ create_dir(self.0)?;
+ Ok(self)
+ }
+ fn as_mut(&mut self) -> &mut PathBuf {
+ self.0
+ }
+}
+
+impl<'a> Drop for NewDir<'a> {
+ fn drop(&mut self) {
+ self.0.pop();
+ }
+}
+
+impl<'a> Drop for PathCursor<'a> {
+ fn drop(&mut self) {
+ self.0.pop();
+ }
+}
+
+fn write_file(data: &[u8], path: &Path) -> Result<(), Error> {
+ let mut file = OpenOptions::new()
+ .write(true)
+ .create(true)
+ .append(false)
+ .open(path)
+ .map_err(|e| Error::IoOpen {
+ source: e,
+ path: path.to_owned(),
+ })?;
+ file.write_all(data).map_err(|e| Error::IoWrite {
+ source: e,
+ path: path.to_owned(),
+ })
+}
+
+fn create_dir(p: &Path) -> Result<(), Error> {
+ fs::create_dir_all(p).map_err(|e| Error::CreateDirectory {
+ source: e,
+ path: p.to_owned(),
+ })
+}
+
+/// Options for use in [`into()`];
+#[derive(Copy, Clone, Default)]
+pub struct Options {
+ /// If true, and the kind of repository to create has a worktree, then the destination directory must be empty.
+ ///
+ /// By default repos with worktree can be initialized into a non-empty repository as long as there is no `.git` directory.
+ pub destination_must_be_empty: bool,
+ /// If set, use these filesystem capabilities to populate the respective gix-config fields.
+ /// If `None`, the directory will be probed.
+ pub fs_capabilities: Option<gix_worktree::fs::Capabilities>,
+}
+
+/// Create a new `.git` repository of `kind` within the possibly non-existing `directory`
+/// and return its path.
+/// Note that this is a simple template-based initialization routine which should be accompanied with additional corrections
+/// to respect git configuration, which is accomplished by [its callers][crate::ThreadSafeRepository::init_opts()]
+/// that return a [Repository][crate::Repository].
+pub fn into(
+ directory: impl Into<PathBuf>,
+ kind: Kind,
+ Options {
+ fs_capabilities,
+ destination_must_be_empty,
+ }: Options,
+) -> Result<gix_discover::repository::Path, Error> {
+ let mut dot_git = directory.into();
+ let bare = matches!(kind, Kind::Bare);
+
+ if bare || destination_must_be_empty {
+ let num_entries_in_dot_git = fs::read_dir(&dot_git)
+ .or_else(|err| {
+ if err.kind() == std::io::ErrorKind::NotFound {
+ fs::create_dir(&dot_git).and_then(|_| fs::read_dir(&dot_git))
+ } else {
+ Err(err)
+ }
+ })
+ .map_err(|err| Error::IoOpen {
+ source: err,
+ path: dot_git.clone(),
+ })?
+ .count();
+ if num_entries_in_dot_git != 0 {
+ return Err(Error::DirectoryNotEmpty { path: dot_git });
+ }
+ }
+
+ if !bare {
+ dot_git.push(DOT_GIT_DIR);
+
+ if dot_git.is_dir() {
+ return Err(Error::DirectoryExists { path: dot_git });
+ }
+ };
+ create_dir(&dot_git)?;
+
+ {
+ let mut cursor = NewDir(&mut dot_git).at("info")?;
+ write_file(TPL_INFO_EXCLUDE, PathCursor(cursor.as_mut()).at("exclude"))?;
+ }
+
+ {
+ let mut cursor = NewDir(&mut dot_git).at("hooks")?;
+ for (tpl, filename) in &[
+ (TPL_HOOKS_UPDATE, "update.sample"),
+ (TPL_HOOKS_PREPARE_COMMIT_MSG, "prepare-commit-msg.sample"),
+ (TPL_HOOKS_PRE_RECEIVE, "pre-receive.sample"),
+ (TPL_HOOKS_PRE_REBASE, "pre-rebase.sample"),
+ (TPL_HOOKS_PRE_PUSH, "pre-push.sample"),
+ (TPL_HOOKS_PRE_COMMIT, "pre-commit.sample"),
+ (TPL_HOOKS_PRE_MERGE_COMMIT, "pre-merge-commit.sample"),
+ (TPL_HOOKS_PRE_APPLYPATCH, "pre-applypatch.sample"),
+ (TPL_HOOKS_POST_UPDATE, "post-update.sample"),
+ (TPL_HOOKS_FSMONITOR_WATCHMAN, "fsmonitor-watchman.sample"),
+ (TPL_HOOKS_COMMIT_MSG, "commit-msg.sample"),
+ (TPL_HOOKS_APPLYPATCH_MSG, "applypatch-msg.sample"),
+ ] {
+ write_file(tpl, PathCursor(cursor.as_mut()).at(filename))?;
+ }
+ }
+
+ {
+ let mut cursor = NewDir(&mut dot_git).at("objects")?;
+ create_dir(PathCursor(cursor.as_mut()).at("info"))?;
+ create_dir(PathCursor(cursor.as_mut()).at("pack"))?;
+ }
+
+ {
+ let mut cursor = NewDir(&mut dot_git).at("refs")?;
+ create_dir(PathCursor(cursor.as_mut()).at("heads"))?;
+ create_dir(PathCursor(cursor.as_mut()).at("tags"))?;
+ }
+
+ for (tpl, filename) in &[(TPL_HEAD, "HEAD"), (TPL_DESCRIPTION, "description")] {
+ write_file(tpl, PathCursor(&mut dot_git).at(filename))?;
+ }
+
+ {
+ let mut config = gix_config::File::default();
+ {
+ let caps = fs_capabilities.unwrap_or_else(|| gix_worktree::fs::Capabilities::probe(&dot_git));
+ let mut core = config.new_section("core", None).expect("valid section name");
+
+ core.push(key("repositoryformatversion"), Some("0".into()));
+ core.push(key("filemode"), Some(bool(caps.executable_bit).into()));
+ core.push(key("bare"), Some(bool(bare).into()));
+ core.push(key("logallrefupdates"), Some(bool(!bare).into()));
+ core.push(key("symlinks"), Some(bool(caps.symlink).into()));
+ core.push(key("ignorecase"), Some(bool(caps.ignore_case).into()));
+ core.push(key("precomposeunicode"), Some(bool(caps.precompose_unicode).into()));
+ }
+ let mut cursor = PathCursor(&mut dot_git);
+ let config_path = cursor.at("config");
+ std::fs::write(config_path, config.to_bstring()).map_err(|err| Error::IoWrite {
+ source: err,
+ path: config_path.to_owned(),
+ })?;
+ }
+
+ Ok(gix_discover::repository::Path::from_dot_git_dir(
+ dot_git,
+ if bare {
+ gix_discover::repository::Kind::Bare
+ } else {
+ gix_discover::repository::Kind::WorkTree { linked_git_dir: None }
+ },
+ std::env::current_dir()?,
+ )
+ .expect("by now the `dot_git` dir is valid as we have accessed it"))
+}
+
+fn key(name: &'static str) -> section::Key<'static> {
+ section::Key::try_from(name).expect("valid key name")
+}
+
+fn bool(v: bool) -> &'static str {
+ match v {
+ true => "true",
+ false => "false",
+ }
+}
diff --git a/vendor/gix/src/discover.rs b/vendor/gix/src/discover.rs
new file mode 100644
index 000000000..fa0edfd5f
--- /dev/null
+++ b/vendor/gix/src/discover.rs
@@ -0,0 +1,88 @@
+#![allow(clippy::result_large_err)]
+use std::path::Path;
+
+pub use gix_discover::*;
+
+use crate::{bstr::BString, ThreadSafeRepository};
+
+/// The error returned by [`crate::discover()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error(transparent)]
+ Discover(#[from] upwards::Error),
+ #[error(transparent)]
+ Open(#[from] crate::open::Error),
+}
+
+impl ThreadSafeRepository {
+ /// Try to open a git repository in `directory` and search upwards through its parents until one is found,
+ /// using default trust options which matters in case the found repository isn't owned by the current user.
+ pub fn discover(directory: impl AsRef<Path>) -> Result<Self, Error> {
+ Self::discover_opts(directory, Default::default(), Default::default())
+ }
+
+ /// Try to open a git repository in `directory` and search upwards through its parents until one is found,
+ /// while applying `options`. Then use the `trust_map` to determine which of our own repository options to use
+ /// for instantiations.
+ ///
+ /// Note that [trust overrides](crate::open::Options::with()) in the `trust_map` are not effective here and we will
+ /// always override it with the determined trust value. This is a precaution as the API user is unable to actually know
+ /// if the directory that is discovered can indeed be trusted (or else they'd have to implement the discovery themselves
+ /// and be sure that no attacker ever gets access to a directory structure. The cost of this is a permission check, which
+ /// seems acceptable).
+ pub fn discover_opts(
+ directory: impl AsRef<Path>,
+ options: upwards::Options<'_>,
+ trust_map: gix_sec::trust::Mapping<crate::open::Options>,
+ ) -> Result<Self, Error> {
+ let (path, trust) = upwards_opts(directory, options)?;
+ let (git_dir, worktree_dir) = path.into_repository_and_work_tree_directories();
+ let mut options = trust_map.into_value_by_level(trust);
+ options.git_dir_trust = trust.into();
+ options.current_dir = Some(std::env::current_dir().map_err(upwards::Error::CurrentDir)?);
+ Self::open_from_paths(git_dir, worktree_dir, options).map_err(Into::into)
+ }
+
+ /// Try to open a git repository directly from the environment.
+ /// If that fails, discover upwards from `directory` until one is found,
+ /// while applying discovery options from the environment.
+ pub fn discover_with_environment_overrides(directory: impl AsRef<Path>) -> Result<Self, Error> {
+ Self::discover_with_environment_overrides_opts(directory, Default::default(), Default::default())
+ }
+
+ /// Try to open a git repository directly from the environment, which reads `GIT_DIR`
+ /// if it is set. If unset, discover upwards from `directory` until one is found,
+ /// while applying `options` with overrides from the environment which includes:
+ ///
+ /// - `GIT_DISCOVERY_ACROSS_FILESYSTEM`
+ /// - `GIT_CEILING_DIRECTORIES`
+ ///
+ /// Finally, use the `trust_map` to determine which of our own repository options to use
+ /// based on the trust level of the effective repository directory.
+ pub fn discover_with_environment_overrides_opts(
+ directory: impl AsRef<Path>,
+ mut options: upwards::Options<'_>,
+ trust_map: gix_sec::trust::Mapping<crate::open::Options>,
+ ) -> Result<Self, Error> {
+ fn apply_additional_environment(mut opts: upwards::Options<'_>) -> upwards::Options<'_> {
+ use crate::bstr::ByteVec;
+
+ if let Some(cross_fs) = std::env::var_os("GIT_DISCOVERY_ACROSS_FILESYSTEM")
+ .and_then(|v| Vec::from_os_string(v).ok().map(BString::from))
+ {
+ if let Ok(b) = gix_config::Boolean::try_from(cross_fs.as_ref()) {
+ opts.cross_fs = b.into();
+ }
+ }
+ opts
+ }
+
+ if std::env::var_os("GIT_DIR").is_some() {
+ return Self::open_with_environment_overrides(directory.as_ref(), trust_map).map_err(Error::Open);
+ }
+
+ options = apply_additional_environment(options.apply_environment());
+ Self::discover_opts(directory, options, trust_map)
+ }
+}
diff --git a/vendor/gix/src/env.rs b/vendor/gix/src/env.rs
new file mode 100644
index 000000000..4c61ceb4e
--- /dev/null
+++ b/vendor/gix/src/env.rs
@@ -0,0 +1,129 @@
+//! Utilities to handle program arguments and other values of interest.
+use std::ffi::{OsStr, OsString};
+
+use crate::bstr::{BString, ByteVec};
+
+/// Returns the name of the agent for identification towards a remote server as statically known when compiling the crate.
+/// Suitable for both `git` servers and HTTP servers, and used unless configured otherwise.
+///
+/// Note that it's meant to be used in conjunction with [`protocol::agent()`][crate::protocol::agent()] which
+/// prepends `git/`.
+pub fn agent() -> &'static str {
+ concat!("oxide-", env!("CARGO_PKG_VERSION"))
+}
+
+/// Equivalent to `std::env::args_os()`, but with precomposed unicode on MacOS and other apple platforms.
+#[cfg(not(target_vendor = "apple"))]
+pub fn args_os() -> impl Iterator<Item = OsString> {
+ std::env::args_os()
+}
+
+/// Equivalent to `std::env::args_os()`, but with precomposed unicode on MacOS and other apple platforms.
+///
+/// Note that this ignores `core.precomposeUnicode` as gix-config isn't available yet. It's default enabled in modern git though.
+#[cfg(target_vendor = "apple")]
+pub fn args_os() -> impl Iterator<Item = OsString> {
+ use unicode_normalization::UnicodeNormalization;
+ std::env::args_os().map(|arg| match arg.to_str() {
+ Some(arg) => arg.nfc().collect::<String>().into(),
+ None => arg,
+ })
+}
+
+/// Convert the given `input` into a `BString`, useful for usage in `clap`.
+pub fn os_str_to_bstring(input: &OsStr) -> Option<BString> {
+ Vec::from_os_string(input.into()).map(Into::into).ok()
+}
+
+/// Utilities to collate errors of common operations into one error type.
+///
+/// This is useful as this type can present an API to answer common questions, like whether a network request seems to have failed
+/// spuriously or if the underlying repository seems to be corrupted.
+/// Error collation supports all operations, including opening the repository.
+///
+/// ### Usage
+///
+/// The caller may define a function that specifies the result type as `Result<T, gix::env::collate::{operation}::Error>` to collect
+/// errors into a well-known error type which provides an API for simple queries.
+pub mod collate {
+
+ ///
+ pub mod fetch {
+ /// An error which combines all possible errors when opening a repository, finding remotes and using them to fetch.
+ ///
+ /// It can be used to detect if the repository is likely be corrupted in some way, or if the fetch failed spuriously
+ /// and thus can be retried.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error<E: std::error::Error + Send + Sync + 'static = std::convert::Infallible> {
+ #[error(transparent)]
+ Open(#[from] crate::open::Error),
+ #[error(transparent)]
+ FindExistingReference(#[from] crate::reference::find::existing::Error),
+ #[error(transparent)]
+ RemoteInit(#[from] crate::remote::init::Error),
+ #[error(transparent)]
+ FindExistingRemote(#[from] crate::remote::find::existing::Error),
+ #[error(transparent)]
+ CredentialHelperConfig(#[from] crate::config::credential_helpers::Error),
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ #[error(transparent)]
+ Connect(#[from] crate::remote::connect::Error),
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ #[error(transparent)]
+ PrepareFetch(#[from] crate::remote::fetch::prepare::Error),
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+ #[error(transparent)]
+ Fetch(#[from] crate::remote::fetch::Error),
+ #[error(transparent)]
+ Other(E),
+ }
+
+ #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+ impl<E> crate::protocol::transport::IsSpuriousError for Error<E>
+ where
+ E: std::error::Error + Send + Sync + 'static,
+ {
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::Open(_)
+ | Error::CredentialHelperConfig(_)
+ | Error::RemoteInit(_)
+ | Error::FindExistingReference(_)
+ | Error::FindExistingRemote(_)
+ | Error::Other(_) => false,
+ Error::Connect(err) => err.is_spurious(),
+ Error::PrepareFetch(err) => err.is_spurious(),
+ Error::Fetch(err) => err.is_spurious(),
+ }
+ }
+ }
+
+ /// Queries
+ impl<E> Error<E>
+ where
+ E: std::error::Error + Send + Sync + 'static,
+ {
+ /// Return true if repository corruption caused the failure.
+ pub fn is_corrupted(&self) -> bool {
+ match self {
+ Error::Open(crate::open::Error::NotARepository { .. } | crate::open::Error::Config(_)) => true,
+ #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+ Error::PrepareFetch(crate::remote::fetch::prepare::Error::RefMap(
+ // Configuration couldn't be accessed or was incomplete.
+ crate::remote::ref_map::Error::GatherTransportConfig { .. }
+ | crate::remote::ref_map::Error::ConfigureCredentials(_),
+ )) => true,
+ // Maybe the value of the configuration was corrupted, or a file couldn't be removed.
+ #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+ Error::Fetch(
+ crate::remote::fetch::Error::PackThreads(_)
+ | crate::remote::fetch::Error::PackIndexVersion(_)
+ | crate::remote::fetch::Error::RemovePackKeepFile { .. },
+ ) => true,
+ _ => false,
+ }
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/ext/mod.rs b/vendor/gix/src/ext/mod.rs
new file mode 100644
index 000000000..beb9007fa
--- /dev/null
+++ b/vendor/gix/src/ext/mod.rs
@@ -0,0 +1,9 @@
+pub use object_id::ObjectIdExt;
+pub use reference::ReferenceExt;
+pub use rev_spec::RevSpecExt;
+pub use tree::TreeIterExt;
+
+mod object_id;
+mod reference;
+mod rev_spec;
+mod tree;
diff --git a/vendor/gix/src/ext/object_id.rs b/vendor/gix/src/ext/object_id.rs
new file mode 100644
index 000000000..a4515022b
--- /dev/null
+++ b/vendor/gix/src/ext/object_id.rs
@@ -0,0 +1,34 @@
+use gix_hash::ObjectId;
+use gix_traverse::commit::{ancestors, Ancestors};
+
+pub trait Sealed {}
+
+pub type AncestorsIter<Find> = Ancestors<Find, fn(&gix_hash::oid) -> bool, ancestors::State>;
+
+/// An extension trait to add functionality to [`ObjectId`]s.
+pub trait ObjectIdExt: Sealed {
+ /// Create an iterator over the ancestry of the commits reachable from this id, which must be a commit.
+ fn ancestors<Find, E>(self, find: Find) -> AncestorsIter<Find>
+ where
+ Find: for<'a> FnMut(&gix_hash::oid, &'a mut Vec<u8>) -> Result<gix_object::CommitRefIter<'a>, E>,
+ E: std::error::Error + Send + Sync + 'static;
+
+ /// Infuse this object id `repo` access.
+ fn attach(self, repo: &crate::Repository) -> crate::Id<'_>;
+}
+
+impl Sealed for ObjectId {}
+
+impl ObjectIdExt for ObjectId {
+ fn ancestors<Find, E>(self, find: Find) -> AncestorsIter<Find>
+ where
+ Find: for<'a> FnMut(&gix_hash::oid, &'a mut Vec<u8>) -> Result<gix_object::CommitRefIter<'a>, E>,
+ E: std::error::Error + Send + Sync + 'static,
+ {
+ Ancestors::new(Some(self), ancestors::State::default(), find)
+ }
+
+ fn attach(self, repo: &crate::Repository) -> crate::Id<'_> {
+ crate::Id::from_id(self, repo)
+ }
+}
diff --git a/vendor/gix/src/ext/reference.rs b/vendor/gix/src/ext/reference.rs
new file mode 100644
index 000000000..57e4e4fe7
--- /dev/null
+++ b/vendor/gix/src/ext/reference.rs
@@ -0,0 +1,15 @@
+pub trait Sealed {}
+
+impl Sealed for gix_ref::Reference {}
+
+/// Extensions for [references][gix_ref::Reference].
+pub trait ReferenceExt {
+ /// Attach [`Repository`][crate::Repository] to the given reference. It can be detached later with [`detach()]`.
+ fn attach(self, repo: &crate::Repository) -> crate::Reference<'_>;
+}
+
+impl ReferenceExt for gix_ref::Reference {
+ fn attach(self, repo: &crate::Repository) -> crate::Reference<'_> {
+ crate::Reference::from_ref(self, repo)
+ }
+}
diff --git a/vendor/gix/src/ext/rev_spec.rs b/vendor/gix/src/ext/rev_spec.rs
new file mode 100644
index 000000000..ed7dc0460
--- /dev/null
+++ b/vendor/gix/src/ext/rev_spec.rs
@@ -0,0 +1,20 @@
+pub trait Sealed {}
+
+impl Sealed for gix_ref::Reference {}
+
+/// Extensions for [revision specifications][gix_revision::Spec].
+pub trait RevSpecExt {
+ /// Attach [`Repository`][crate::Repository] to the given rev-spec.
+ fn attach(self, repo: &crate::Repository) -> crate::revision::Spec<'_>;
+}
+
+impl RevSpecExt for gix_revision::Spec {
+ fn attach(self, repo: &crate::Repository) -> crate::revision::Spec<'_> {
+ crate::revision::Spec {
+ inner: self,
+ first_ref: None,
+ second_ref: None,
+ repo,
+ }
+ }
+}
diff --git a/vendor/gix/src/ext/tree.rs b/vendor/gix/src/ext/tree.rs
new file mode 100644
index 000000000..09220fc40
--- /dev/null
+++ b/vendor/gix/src/ext/tree.rs
@@ -0,0 +1,44 @@
+use std::borrow::BorrowMut;
+
+use gix_hash::oid;
+use gix_object::TreeRefIter;
+use gix_traverse::tree::breadthfirst;
+
+pub trait Sealed {}
+
+/// An extension trait for tree iterators
+pub trait TreeIterExt: Sealed {
+ /// Traverse this tree with `state` being provided to potentially reuse allocations, and `find` being a function to lookup trees
+ /// and turn them into iterators.
+ ///
+ /// The `delegate` implements a way to store details about the traversal to allow paying only for what's actually used.
+ /// Since it is expected to store the operation result, _unit_ is returned.
+ fn traverse<StateMut, Find, V>(
+ &self,
+ state: StateMut,
+ find: Find,
+ delegate: &mut V,
+ ) -> Result<(), breadthfirst::Error>
+ where
+ Find: for<'a> FnMut(&oid, &'a mut Vec<u8>) -> Option<TreeRefIter<'a>>,
+ StateMut: BorrowMut<breadthfirst::State>,
+ V: gix_traverse::tree::Visit;
+}
+
+impl<'d> Sealed for TreeRefIter<'d> {}
+
+impl<'d> TreeIterExt for TreeRefIter<'d> {
+ fn traverse<StateMut, Find, V>(
+ &self,
+ state: StateMut,
+ find: Find,
+ delegate: &mut V,
+ ) -> Result<(), breadthfirst::Error>
+ where
+ Find: for<'a> FnMut(&oid, &'a mut Vec<u8>) -> Option<TreeRefIter<'a>>,
+ StateMut: BorrowMut<breadthfirst::State>,
+ V: gix_traverse::tree::Visit,
+ {
+ breadthfirst(self.clone(), state, find, delegate)
+ }
+}
diff --git a/vendor/gix/src/head/log.rs b/vendor/gix/src/head/log.rs
new file mode 100644
index 000000000..6aa7ed1d3
--- /dev/null
+++ b/vendor/gix/src/head/log.rs
@@ -0,0 +1,35 @@
+use std::convert::TryInto;
+
+use gix_hash::ObjectId;
+
+use crate::{
+ bstr::{BString, ByteSlice},
+ Head,
+};
+
+impl<'repo> Head<'repo> {
+ /// Return a platform for obtaining iterators on the reference log associated with the `HEAD` reference.
+ pub fn log_iter(&self) -> gix_ref::file::log::iter::Platform<'static, 'repo> {
+ gix_ref::file::log::iter::Platform {
+ store: &self.repo.refs,
+ name: "HEAD".try_into().expect("HEAD is always valid"),
+ buf: Vec::new(),
+ }
+ }
+
+ /// Return a list of all branch names that were previously checked out with the first-ever checked out branch
+ /// being the first entry of the list, and the most recent is the last, along with the commit they were pointing to
+ /// at the time.
+ pub fn prior_checked_out_branches(&self) -> std::io::Result<Option<Vec<(BString, ObjectId)>>> {
+ Ok(self.log_iter().all()?.map(|log| {
+ log.filter_map(Result::ok)
+ .filter_map(|line| {
+ line.message
+ .strip_prefix(b"checkout: moving from ")
+ .and_then(|from_to| from_to.find(" to ").map(|pos| &from_to[..pos]))
+ .map(|from_branch| (from_branch.as_bstr().to_owned(), line.previous_oid()))
+ })
+ .collect()
+ }))
+ }
+}
diff --git a/vendor/gix/src/head/mod.rs b/vendor/gix/src/head/mod.rs
new file mode 100644
index 000000000..094e78a86
--- /dev/null
+++ b/vendor/gix/src/head/mod.rs
@@ -0,0 +1,122 @@
+//!
+use std::convert::TryInto;
+
+use gix_hash::ObjectId;
+use gix_ref::FullNameRef;
+
+use crate::{
+ ext::{ObjectIdExt, ReferenceExt},
+ Head,
+};
+
+/// Represents the kind of `HEAD` reference.
+#[derive(Clone)]
+pub enum Kind {
+ /// The existing reference the symbolic HEAD points to.
+ ///
+ /// This is the common case.
+ Symbolic(gix_ref::Reference),
+ /// The yet-to-be-created reference the symbolic HEAD refers to.
+ ///
+ /// This is the case in a newly initialized repository.
+ Unborn(gix_ref::FullName),
+ /// The head points to an object directly, not to a symbolic reference.
+ ///
+ /// This state is less common and can occur when checking out commits directly.
+ Detached {
+ /// The object to which the head points to
+ target: ObjectId,
+ /// Possibly the final destination of `target` after following the object chain from tag objects to commits.
+ peeled: Option<ObjectId>,
+ },
+}
+
+impl Kind {
+ /// Attach this instance to a `repo` to produce a [`Head`].
+ pub fn attach(self, repo: &crate::Repository) -> Head<'_> {
+ Head { kind: self, repo }
+ }
+}
+
+/// Access
+impl<'repo> Head<'repo> {
+ /// Returns the name of this references, always `HEAD`.
+ pub fn name(&self) -> &'static FullNameRef {
+ // TODO: use a statically checked version of this when available.
+ "HEAD".try_into().expect("HEAD is valid")
+ }
+
+ /// Returns the full reference name of this head if it is not detached, or `None` otherwise.
+ pub fn referent_name(&self) -> Option<&FullNameRef> {
+ Some(match &self.kind {
+ Kind::Symbolic(r) => r.name.as_ref(),
+ Kind::Unborn(name) => name.as_ref(),
+ Kind::Detached { .. } => return None,
+ })
+ }
+
+ /// Returns true if this instance is detached, and points to an object directly.
+ pub fn is_detached(&self) -> bool {
+ matches!(self.kind, Kind::Detached { .. })
+ }
+
+ /// Returns true if this instance is not yet born, hence it points to a ref that doesn't exist yet.
+ ///
+ /// This is the case in a newly initialized repository.
+ pub fn is_unborn(&self) -> bool {
+ matches!(self.kind, Kind::Unborn(_))
+ }
+
+ // TODO: tests
+ /// Returns the id the head points to, which isn't possible on unborn heads.
+ pub fn id(&self) -> Option<crate::Id<'repo>> {
+ match &self.kind {
+ Kind::Symbolic(r) => r.target.try_id().map(|oid| oid.to_owned().attach(self.repo)),
+ Kind::Detached { peeled, target } => {
+ (*peeled).unwrap_or_else(|| target.to_owned()).attach(self.repo).into()
+ }
+ Kind::Unborn(_) => None,
+ }
+ }
+
+ /// Try to transform this instance into the symbolic reference that it points to, or return `None` if head is detached or unborn.
+ pub fn try_into_referent(self) -> Option<crate::Reference<'repo>> {
+ match self.kind {
+ Kind::Symbolic(r) => r.attach(self.repo).into(),
+ _ => None,
+ }
+ }
+}
+
+mod remote {
+ use super::Head;
+ use crate::{remote, Remote};
+
+ /// Remote
+ impl<'repo> Head<'repo> {
+ /// Return the remote with which the currently checked our reference can be handled as configured by `branch.<name>.remote|pushRemote`
+ /// or fall back to the non-branch specific remote configuration. `None` is returned if the head is detached or unborn, so there is
+ /// no branch specific remote.
+ ///
+ /// This is equivalent to calling [`Reference::remote(…)`][crate::Reference::remote()] and
+ /// [`Repository::remote_default_name()`][crate::Repository::remote_default_name()] in order.
+ ///
+ /// Combine it with [`find_default_remote()`][crate::Repository::find_default_remote()] as fallback to handle detached heads,
+ /// i.e. obtain a remote even in case of detached heads.
+ pub fn into_remote(
+ self,
+ direction: remote::Direction,
+ ) -> Option<Result<Remote<'repo>, remote::find::existing::Error>> {
+ let repo = self.repo;
+ self.try_into_referent()?
+ .remote(direction)
+ .or_else(|| repo.find_default_remote(direction))
+ }
+ }
+}
+
+///
+pub mod log;
+
+///
+pub mod peel;
diff --git a/vendor/gix/src/head/peel.rs b/vendor/gix/src/head/peel.rs
new file mode 100644
index 000000000..65a876bc4
--- /dev/null
+++ b/vendor/gix/src/head/peel.rs
@@ -0,0 +1,119 @@
+use crate::{
+ ext::{ObjectIdExt, ReferenceExt},
+ Head,
+};
+
+mod error {
+ use crate::{object, reference};
+
+ /// The error returned by [Head::peel_to_id_in_place()][super::Head::peel_to_id_in_place()] and [Head::into_fully_peeled_id()][super::Head::into_fully_peeled_id()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ FindExistingObject(#[from] object::find::existing::Error),
+ #[error(transparent)]
+ PeelReference(#[from] reference::peel::Error),
+ }
+}
+
+pub use error::Error;
+
+use crate::head::Kind;
+
+///
+pub mod to_commit {
+ use crate::object;
+
+ /// The error returned by [Head::peel_to_commit_in_place()][super::Head::peel_to_commit_in_place()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Peel(#[from] super::Error),
+ #[error("Branch '{name}' does not have any commits")]
+ Unborn { name: gix_ref::FullName },
+ #[error(transparent)]
+ ObjectKind(#[from] object::try_into::Error),
+ }
+}
+
+impl<'repo> Head<'repo> {
+ // TODO: tests
+ /// Peel this instance to make obtaining its final target id possible, while returning an error on unborn heads.
+ pub fn peeled(mut self) -> Result<Self, Error> {
+ self.peel_to_id_in_place().transpose()?;
+ Ok(self)
+ }
+
+ // TODO: tests
+ /// Follow the symbolic reference of this head until its target object and peel it by following tag objects until there is no
+ /// more object to follow, and return that object id.
+ ///
+ /// Returns `None` if the head is unborn.
+ pub fn peel_to_id_in_place(&mut self) -> Option<Result<crate::Id<'repo>, Error>> {
+ Some(match &mut self.kind {
+ Kind::Unborn(_name) => return None,
+ Kind::Detached {
+ peeled: Some(peeled), ..
+ } => Ok((*peeled).attach(self.repo)),
+ Kind::Detached { peeled: None, target } => {
+ match target
+ .attach(self.repo)
+ .object()
+ .map_err(Into::into)
+ .and_then(|obj| obj.peel_tags_to_end().map_err(Into::into))
+ .map(|peeled| peeled.id)
+ {
+ Ok(peeled) => {
+ self.kind = Kind::Detached {
+ peeled: Some(peeled),
+ target: *target,
+ };
+ Ok(peeled.attach(self.repo))
+ }
+ Err(err) => Err(err),
+ }
+ }
+ Kind::Symbolic(r) => {
+ let mut nr = r.clone().attach(self.repo);
+ let peeled = nr.peel_to_id_in_place().map_err(Into::into);
+ *r = nr.detach();
+ peeled
+ }
+ })
+ }
+
+ // TODO: tests
+ // TODO: something similar in `crate::Reference`
+ /// Follow the symbolic reference of this head until its target object and peel it by following tag objects until there is no
+ /// more object to follow, transform the id into a commit if possible and return that.
+ ///
+ /// Returns an error if the head is unborn or if it doesn't point to a commit.
+ pub fn peel_to_commit_in_place(&mut self) -> Result<crate::Commit<'repo>, to_commit::Error> {
+ let id = self.peel_to_id_in_place().ok_or_else(|| to_commit::Error::Unborn {
+ name: self.referent_name().expect("unborn").to_owned(),
+ })??;
+ id.object()
+ .map_err(|err| to_commit::Error::Peel(Error::FindExistingObject(err)))
+ .and_then(|object| object.try_into_commit().map_err(Into::into))
+ }
+
+ /// Consume this instance and transform it into the final object that it points to, or `None` if the `HEAD`
+ /// reference is yet to be born.
+ pub fn into_fully_peeled_id(self) -> Option<Result<crate::Id<'repo>, Error>> {
+ Some(match self.kind {
+ Kind::Unborn(_name) => return None,
+ Kind::Detached {
+ peeled: Some(peeled), ..
+ } => Ok(peeled.attach(self.repo)),
+ Kind::Detached { peeled: None, target } => target
+ .attach(self.repo)
+ .object()
+ .map_err(Into::into)
+ .and_then(|obj| obj.peel_tags_to_end().map_err(Into::into))
+ .map(|obj| obj.id.attach(self.repo)),
+ Kind::Symbolic(r) => r.attach(self.repo).peel_to_id_in_place().map_err(Into::into),
+ })
+ }
+}
diff --git a/vendor/gix/src/id.rs b/vendor/gix/src/id.rs
new file mode 100644
index 000000000..c57565fb5
--- /dev/null
+++ b/vendor/gix/src/id.rs
@@ -0,0 +1,195 @@
+//!
+use std::ops::Deref;
+
+use gix_hash::{oid, ObjectId};
+
+use crate::{object::find, revision, Id, Object};
+
+/// An [object id][ObjectId] infused with `Easy`.
+impl<'repo> Id<'repo> {
+ /// Find the [`Object`] associated with this object id, and consider it an error if it doesn't exist.
+ ///
+ /// # Note
+ ///
+ /// There can only be one `ObjectRef` per `Easy`. To increase that limit, clone the `Easy`.
+ pub fn object(&self) -> Result<Object<'repo>, find::existing::Error> {
+ self.repo.find_object(self.inner)
+ }
+
+ /// Try to find the [`Object`] associated with this object id, and return `None` if it's not available locally.
+ ///
+ /// # Note
+ ///
+ /// There can only be one `ObjectRef` per `Easy`. To increase that limit, clone the `Easy`.
+ pub fn try_object(&self) -> Result<Option<Object<'repo>>, find::Error> {
+ self.repo.try_find_object(self.inner)
+ }
+
+ /// Turn this object id into a shortened id with a length in hex as configured by `core.abbrev`.
+ pub fn shorten(&self) -> Result<gix_hash::Prefix, shorten::Error> {
+ let hex_len = self.repo.config.hex_len.map_or_else(
+ || self.repo.objects.packed_object_count().map(calculate_auto_hex_len),
+ Ok,
+ )?;
+
+ let prefix = gix_odb::store::prefix::disambiguate::Candidate::new(self.inner, hex_len)
+ .expect("BUG: internal hex-len must always be valid");
+ self.repo
+ .objects
+ .disambiguate_prefix(prefix)?
+ .ok_or(shorten::Error::NotFound { oid: self.inner })
+ }
+
+ /// Turn this object id into a shortened id with a length in hex as configured by `core.abbrev`, or default
+ /// to a prefix which equals our id in the unlikely error case.
+ pub fn shorten_or_id(&self) -> gix_hash::Prefix {
+ self.shorten().unwrap_or_else(|_| self.inner.into())
+ }
+}
+
+fn calculate_auto_hex_len(num_packed_objects: u64) -> usize {
+ let mut len = 64 - num_packed_objects.leading_zeros();
+ len = (len + 1) / 2;
+ len.max(7) as usize
+}
+
+///
+pub mod shorten {
+ /// Returned by [`Id::prefix()`][super::Id::shorten()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ PackedObjectsCount(#[from] gix_odb::store::load_index::Error),
+ #[error(transparent)]
+ DisambiguatePrefix(#[from] gix_odb::store::prefix::disambiguate::Error),
+ #[error("Id could not be shortened as the object with id {} could not be found", .oid)]
+ NotFound { oid: gix_hash::ObjectId },
+ }
+}
+
+impl<'repo> Deref for Id<'repo> {
+ type Target = oid;
+
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+
+impl<'repo> Id<'repo> {
+ pub(crate) fn from_id(id: impl Into<ObjectId>, repo: &'repo crate::Repository) -> Self {
+ Id { inner: id.into(), repo }
+ }
+
+ /// Turn this instance into its bare [ObjectId].
+ pub fn detach(self) -> ObjectId {
+ self.inner
+ }
+}
+
+impl<'repo> Id<'repo> {
+ /// Obtain a platform for traversing ancestors of this commit.
+ ///
+ /// Note that unless [`error_on_missing_commit()`][revision::Walk::error_on_missing_commit()] is enabled, which be default it is not,
+ /// one will always see an empty iteration even if this id is not a commit, instead of an error.
+ /// If this is undesirable, it's best to check for the correct object type before creating an iterator.
+ pub fn ancestors(&self) -> revision::walk::Platform<'repo> {
+ revision::walk::Platform::new(Some(self.inner), self.repo)
+ }
+}
+
+mod impls {
+ use std::{cmp::Ordering, hash::Hasher};
+
+ use gix_hash::{oid, ObjectId};
+
+ use crate::{Id, Object, ObjectDetached};
+
+ // Eq, Hash, Ord, PartialOrd,
+
+ impl<'a> std::hash::Hash for Id<'a> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.inner.hash(state)
+ }
+ }
+
+ impl<'a> PartialOrd<Id<'a>> for Id<'a> {
+ fn partial_cmp(&self, other: &Id<'a>) -> Option<Ordering> {
+ self.inner.partial_cmp(&other.inner)
+ }
+ }
+
+ impl<'repo> PartialEq<Id<'repo>> for Id<'repo> {
+ fn eq(&self, other: &Id<'repo>) -> bool {
+ self.inner == other.inner
+ }
+ }
+
+ impl<'repo> PartialEq<ObjectId> for Id<'repo> {
+ fn eq(&self, other: &ObjectId) -> bool {
+ &self.inner == other
+ }
+ }
+
+ impl<'repo> PartialEq<Id<'repo>> for ObjectId {
+ fn eq(&self, other: &Id<'repo>) -> bool {
+ self == &other.inner
+ }
+ }
+
+ impl<'repo> PartialEq<oid> for Id<'repo> {
+ fn eq(&self, other: &oid) -> bool {
+ self.inner == other
+ }
+ }
+
+ impl<'repo> PartialEq<Object<'repo>> for Id<'repo> {
+ fn eq(&self, other: &Object<'repo>) -> bool {
+ self.inner == other.id
+ }
+ }
+
+ impl<'repo> PartialEq<ObjectDetached> for Id<'repo> {
+ fn eq(&self, other: &ObjectDetached) -> bool {
+ self.inner == other.id
+ }
+ }
+
+ impl<'repo> std::fmt::Debug for Id<'repo> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.inner.fmt(f)
+ }
+ }
+
+ impl<'repo> std::fmt::Display for Id<'repo> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.inner.fmt(f)
+ }
+ }
+
+ impl<'repo> AsRef<oid> for Id<'repo> {
+ fn as_ref(&self) -> &oid {
+ &self.inner
+ }
+ }
+
+ impl<'repo> From<Id<'repo>> for ObjectId {
+ fn from(v: Id<'repo>) -> Self {
+ v.inner
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn size_of_oid() {
+ assert_eq!(
+ std::mem::size_of::<Id<'_>>(),
+ 32,
+ "size of oid shouldn't change without notice"
+ )
+ }
+}
diff --git a/vendor/gix/src/init.rs b/vendor/gix/src/init.rs
new file mode 100644
index 000000000..d04de0806
--- /dev/null
+++ b/vendor/gix/src/init.rs
@@ -0,0 +1,101 @@
+#![allow(clippy::result_large_err)]
+use std::{borrow::Cow, convert::TryInto, path::Path};
+
+use gix_ref::{
+ store::WriteReflog,
+ transaction::{PreviousValue, RefEdit},
+ FullName, Target,
+};
+
+use crate::{bstr::BString, config::tree::Init, ThreadSafeRepository};
+
+/// The name of the branch to use if non is configured via git configuration.
+///
+/// # Deviation
+///
+/// We use `main` instead of `master`.
+pub const DEFAULT_BRANCH_NAME: &str = "main";
+
+/// The error returned by [`crate::init()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("Could not obtain the current directory")]
+ CurrentDir(#[from] std::io::Error),
+ #[error(transparent)]
+ Init(#[from] crate::create::Error),
+ #[error(transparent)]
+ Open(#[from] crate::open::Error),
+ #[error("Invalid default branch name: {name:?}")]
+ InvalidBranchName {
+ name: BString,
+ source: gix_validate::refname::Error,
+ },
+ #[error("Could not edit HEAD reference with new default name")]
+ EditHeadForDefaultBranch(#[from] crate::reference::edit::Error),
+}
+
+impl ThreadSafeRepository {
+ /// Create a repository with work-tree within `directory`, creating intermediate directories as needed.
+ ///
+ /// Fails without action if there is already a `.git` repository inside of `directory`, but
+ /// won't mind if the `directory` otherwise is non-empty.
+ pub fn init(
+ directory: impl AsRef<Path>,
+ kind: crate::create::Kind,
+ options: crate::create::Options,
+ ) -> Result<Self, Error> {
+ use gix_sec::trust::DefaultForLevel;
+ let open_options = crate::open::Options::default_for_level(gix_sec::Trust::Full);
+ Self::init_opts(directory, kind, options, open_options)
+ }
+
+ /// Similar to [`init`][Self::init()], but allows to determine how exactly to open the newly created repository.
+ ///
+ /// # Deviation
+ ///
+ /// Instead of naming the default branch `master`, we name it `main` unless configured explicitly using the `init.defaultBranch`
+ /// configuration key.
+ pub fn init_opts(
+ directory: impl AsRef<Path>,
+ kind: crate::create::Kind,
+ create_options: crate::create::Options,
+ mut open_options: crate::open::Options,
+ ) -> Result<Self, Error> {
+ let path = crate::create::into(directory.as_ref(), kind, create_options)?;
+ let (git_dir, worktree_dir) = path.into_repository_and_work_tree_directories();
+ open_options.git_dir_trust = Some(gix_sec::Trust::Full);
+ open_options.current_dir = std::env::current_dir()?.into();
+ let repo = ThreadSafeRepository::open_from_paths(git_dir, worktree_dir, open_options)?;
+
+ let branch_name = repo
+ .config
+ .resolved
+ .string("init", None, Init::DEFAULT_BRANCH.name)
+ .unwrap_or_else(|| Cow::Borrowed(DEFAULT_BRANCH_NAME.into()));
+ if branch_name.as_ref() != DEFAULT_BRANCH_NAME {
+ let sym_ref: FullName =
+ format!("refs/heads/{branch_name}")
+ .try_into()
+ .map_err(|err| Error::InvalidBranchName {
+ name: branch_name.into_owned(),
+ source: err,
+ })?;
+ let mut repo = repo.to_thread_local();
+ let prev_write_reflog = repo.refs.write_reflog;
+ repo.refs.write_reflog = WriteReflog::Disable;
+ repo.edit_reference(RefEdit {
+ change: gix_ref::transaction::Change::Update {
+ log: Default::default(),
+ expected: PreviousValue::Any,
+ new: Target::Symbolic(sym_ref),
+ },
+ name: "HEAD".try_into().expect("valid"),
+ deref: false,
+ })?;
+ repo.refs.write_reflog = prev_write_reflog;
+ }
+
+ Ok(repo)
+ }
+}
diff --git a/vendor/gix/src/interrupt.rs b/vendor/gix/src/interrupt.rs
new file mode 100644
index 000000000..c94cbdbfa
--- /dev/null
+++ b/vendor/gix/src/interrupt.rs
@@ -0,0 +1,223 @@
+//! Process-global interrupt handling
+//!
+//! This module contains facilities to globally request an interrupt, which will cause supporting computations to
+//! abort once it is observed.
+//! Such checks for interrupts are provided in custom implementations of various traits to transparently add interrupt
+//! support to methods who wouldn't otherwise by injecting it. see [`Read`].
+
+mod init {
+ use std::{
+ io,
+ sync::atomic::{AtomicBool, AtomicUsize, Ordering},
+ };
+
+ static IS_INITIALIZED: AtomicBool = AtomicBool::new(false);
+
+ #[derive(Default)]
+ pub struct Deregister(Vec<(i32, signal_hook::SigId)>);
+ pub struct AutoDeregister(Deregister);
+
+ impl Deregister {
+ /// Remove all previously registered handlers, and assure the default behaviour is reinstated.
+ ///
+ /// Note that only the instantiation of the default behaviour can fail.
+ pub fn deregister(self) -> std::io::Result<()> {
+ if self.0.is_empty() {
+ return Ok(());
+ }
+ static REINSTATE_DEFAULT_BEHAVIOUR: AtomicBool = AtomicBool::new(true);
+ for (_, hook_id) in &self.0 {
+ signal_hook::low_level::unregister(*hook_id);
+ }
+ IS_INITIALIZED.store(false, Ordering::SeqCst);
+ if REINSTATE_DEFAULT_BEHAVIOUR
+ .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |_| Some(false))
+ .expect("always returns value")
+ {
+ for (sig, _) in self.0 {
+ // # SAFETY
+ // * we only call a handler that is specifically designed to run in this environment.
+ #[allow(unsafe_code)]
+ unsafe {
+ signal_hook::low_level::register(sig, move || {
+ signal_hook::low_level::emulate_default_handler(sig).ok();
+ })?;
+ }
+ }
+ }
+ Ok(())
+ }
+
+ /// Return a type that deregisters all installed signal handlers on drop.
+ pub fn auto_deregister(self) -> AutoDeregister {
+ AutoDeregister(self)
+ }
+ }
+
+ impl Drop for AutoDeregister {
+ fn drop(&mut self) {
+ std::mem::take(&mut self.0).deregister().ok();
+ }
+ }
+
+ /// Initialize a signal handler to listen to SIGINT and SIGTERM and trigger our [`trigger()`][super::trigger()] that way.
+ /// Also trigger `interrupt()` which promises to never use a Mutex, allocate or deallocate.
+ ///
+ /// # Note
+ ///
+ /// It will abort the process on second press and won't inform the user about this behaviour either as we are unable to do so without
+ /// deadlocking even when trying to write to stderr directly.
+ pub fn init_handler(interrupt: impl Fn() + Send + Sync + Clone + 'static) -> io::Result<Deregister> {
+ if IS_INITIALIZED
+ .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |_| Some(true))
+ .expect("always returns value")
+ {
+ return Err(io::Error::new(io::ErrorKind::Other, "Already initialized"));
+ }
+ let mut hooks = Vec::with_capacity(signal_hook::consts::TERM_SIGNALS.len());
+ for sig in signal_hook::consts::TERM_SIGNALS {
+ // # SAFETY
+ // * we only set atomics or call functions that do
+ // * there is no use of the heap
+ let interrupt = interrupt.clone();
+ #[allow(unsafe_code)]
+ unsafe {
+ let hook_id = signal_hook::low_level::register(*sig, move || {
+ static INTERRUPT_COUNT: AtomicUsize = AtomicUsize::new(0);
+ if !super::is_triggered() {
+ INTERRUPT_COUNT.store(0, Ordering::SeqCst);
+ }
+ let msg_idx = INTERRUPT_COUNT.fetch_add(1, Ordering::SeqCst);
+ if msg_idx == 1 {
+ gix_tempfile::registry::cleanup_tempfiles_signal_safe();
+ signal_hook::low_level::emulate_default_handler(*sig).ok();
+ }
+ interrupt();
+ super::trigger();
+ })?;
+ hooks.push((*sig, hook_id));
+ }
+ }
+
+ // This means that they won't setup a handler allowing us to call them right before we actually abort.
+ gix_tempfile::signal::setup(gix_tempfile::signal::handler::Mode::None);
+
+ Ok(Deregister(hooks))
+ }
+}
+use std::{
+ io,
+ sync::atomic::{AtomicBool, Ordering},
+};
+
+pub use init::init_handler;
+
+/// A wrapper for an inner iterator which will check for interruptions on each iteration.
+pub struct Iter<I, EFN> {
+ /// The actual iterator to yield elements from.
+ inner: gix_features::interrupt::IterWithErr<'static, I, EFN>,
+}
+
+impl<I, EFN, E> Iter<I, EFN>
+where
+ I: Iterator,
+ EFN: FnOnce() -> E,
+{
+ /// Create a new iterator over `inner` which checks for interruptions on each iteration and calls `make_err()` to
+ /// signal an interruption happened, causing no further items to be iterated from that point on.
+ pub fn new(inner: I, make_err: EFN) -> Self {
+ Iter {
+ inner: gix_features::interrupt::IterWithErr::new(inner, make_err, &IS_INTERRUPTED),
+ }
+ }
+
+ /// Return the inner iterator
+ pub fn into_inner(self) -> I {
+ self.inner.inner
+ }
+
+ /// Return the inner iterator as reference
+ pub fn inner(&self) -> &I {
+ &self.inner.inner
+ }
+}
+
+impl<I, EFN, E> Iterator for Iter<I, EFN>
+where
+ I: Iterator,
+ EFN: FnOnce() -> E,
+{
+ type Item = Result<I::Item, E>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next()
+ }
+}
+
+/// A wrapper for implementors of [`std::io::Read`] or [`std::io::BufRead`] with interrupt support.
+///
+/// It fails a [read][`std::io::Read::read`] while an interrupt was requested.
+pub struct Read<R> {
+ /// The actual implementor of [`std::io::Read`] to which interrupt support will be added.
+ inner: gix_features::interrupt::Read<'static, R>,
+}
+
+impl<R> Read<R>
+where
+ R: io::Read,
+{
+ /// Create a new interruptible reader from `read`.
+ pub fn new(read: R) -> Self {
+ Read {
+ inner: gix_features::interrupt::Read {
+ inner: read,
+ should_interrupt: &IS_INTERRUPTED,
+ },
+ }
+ }
+
+ /// Return the inner reader
+ pub fn into_inner(self) -> R {
+ self.inner.inner
+ }
+}
+
+impl<R> io::Read for Read<R>
+where
+ R: io::Read,
+{
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+ self.inner.read(buf)
+ }
+}
+
+impl<R> io::BufRead for Read<R>
+where
+ R: io::BufRead,
+{
+ fn fill_buf(&mut self) -> io::Result<&[u8]> {
+ self.inner.fill_buf()
+ }
+
+ fn consume(&mut self, amt: usize) {
+ self.inner.consume(amt)
+ }
+}
+
+/// The flag behind all utility functions in this module.
+pub static IS_INTERRUPTED: AtomicBool = AtomicBool::new(false);
+
+/// Returns true if an interrupt is requested.
+pub fn is_triggered() -> bool {
+ IS_INTERRUPTED.load(Ordering::Relaxed)
+}
+
+/// Trigger an interrupt, signalling to those checking for [`is_triggered()`] to stop what they are doing.
+pub fn trigger() {
+ IS_INTERRUPTED.store(true, Ordering::SeqCst);
+}
+
+/// Sets the interrupt request to false, thus allowing those checking for [`is_triggered()`] to proceed.
+pub fn reset() {
+ IS_INTERRUPTED.store(false, Ordering::SeqCst);
+}
diff --git a/vendor/gix/src/kind.rs b/vendor/gix/src/kind.rs
new file mode 100644
index 000000000..a8213475f
--- /dev/null
+++ b/vendor/gix/src/kind.rs
@@ -0,0 +1,23 @@
+use crate::Kind;
+
+impl Kind {
+ /// Returns true if this is a bare repository, one without a work tree.
+ pub fn is_bare(&self) -> bool {
+ matches!(self, Kind::Bare)
+ }
+}
+
+impl From<gix_discover::repository::Kind> for Kind {
+ fn from(v: gix_discover::repository::Kind) -> Self {
+ match v {
+ gix_discover::repository::Kind::Submodule { .. } | gix_discover::repository::Kind::SubmoduleGitDir => {
+ Kind::WorkTree { is_linked: false }
+ }
+ gix_discover::repository::Kind::Bare => Kind::Bare,
+ gix_discover::repository::Kind::WorkTreeGitDir { .. } => Kind::WorkTree { is_linked: true },
+ gix_discover::repository::Kind::WorkTree { linked_git_dir } => Kind::WorkTree {
+ is_linked: linked_git_dir.is_some(),
+ },
+ }
+ }
+}
diff --git a/vendor/gix/src/lib.rs b/vendor/gix/src/lib.rs
new file mode 100644
index 000000000..257a613d7
--- /dev/null
+++ b/vendor/gix/src/lib.rs
@@ -0,0 +1,314 @@
+//! This crate provides the [`Repository`] abstraction which serves as a hub into all the functionality of git.
+//!
+//! It's powerful and won't sacrifice performance while still increasing convenience compared to using the sub-crates
+//! individually. Sometimes it may hide complexity under the assumption that the performance difference doesn't matter
+//! for all but the fewest tools out there, which would be using the underlying crates directly or file an issue.
+//!
+//! # The prelude and extensions
+//!
+//! With `use git_repository::prelude::*` you should be ready to go as it pulls in various extension traits to make functionality
+//! available on objects that may use it.
+//!
+//! The method signatures are still complex and may require various arguments for configuration and cache control.
+//!
+//! Most extensions to existing objects provide an `obj_with_extension.attach(&repo).an_easier_version_of_a_method()` for simpler
+//! call signatures.
+//!
+//! ## ThreadSafe Mode
+//!
+//! By default, the [`Repository`] isn't `Sync` and thus can't be used in certain contexts which require the `Sync` trait.
+//!
+//! To help with this, convert it with [`.into_sync()`][Repository::into_sync()] into a [`ThreadSafeRepository`].
+//!
+//! ## Object-Access Performance
+//!
+//! Accessing objects quickly is the bread-and-butter of working with git, right after accessing references. Hence it's vital
+//! to understand which cache levels exist and how to leverage them.
+//!
+//! When accessing an object, the first cache that's queried is a memory-capped LRU object cache, mapping their id to data and kind.
+//! It has to be specifically enabled a [`Repository`].
+//! On miss, the object is looked up and if a pack is hit, there is a small fixed-size cache for delta-base objects.
+//!
+//! In scenarios where the same objects are accessed multiple times, the object cache can be useful and is to be configured specifically
+//! using the [`object_cache_size(…)`][crate::Repository::object_cache_size()] method.
+//!
+//! Use the `cache-efficiency-debug` cargo feature to learn how efficient the cache actually is - it's easy to end up with lowered
+//! performance if the cache is not hit in 50% of the time.
+//!
+//! ### Terminology
+//!
+//! #### WorkingTree and WorkTree
+//!
+//! When reading the documentation of the canonical gix-worktree program one gets the impression work tree and working tree are used
+//! interchangeably. We use the term _work tree_ only and try to do so consistently as its shorter and assumed to be the same.
+//!
+//! # Cargo-features
+//!
+//! To make using _sub-crates_ easier these are re-exported into the root of this crate. Here we list how to access nested plumbing
+//! crates which are otherwise harder to discover:
+//!
+//! **`git_repository::`**
+//! * [`odb`]
+//! * [`pack`][odb::pack]
+//! * [`protocol`]
+//! * [`transport`][protocol::transport]
+//! * [`packetline`][protocol::transport::packetline]
+//!
+//!
+//! ## Feature Flags
+#![cfg_attr(
+ feature = "document-features",
+ cfg_attr(doc, doc = ::document_features::document_features!())
+)]
+#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))]
+#![deny(missing_docs, rust_2018_idioms, unsafe_code)]
+
+// Re-exports to make this a potential one-stop shop crate avoiding people from having to reference various crates themselves.
+// This also means that their major version changes affect our major version, but that's alright as we directly expose their
+// APIs/instances anyway.
+pub use gix_actor as actor;
+pub use gix_attributes as attrs;
+pub use gix_credentials as credentials;
+pub use gix_date as date;
+pub use gix_features as features;
+use gix_features::threading::OwnShared;
+pub use gix_features::{parallel, progress::Progress, threading};
+pub use gix_glob as glob;
+pub use gix_hash as hash;
+#[doc(inline)]
+pub use gix_index as index;
+pub use gix_lock as lock;
+pub use gix_object as objs;
+pub use gix_object::bstr;
+pub use gix_odb as odb;
+pub use gix_prompt as prompt;
+#[cfg(all(feature = "gix-protocol"))]
+pub use gix_protocol as protocol;
+pub use gix_ref as refs;
+pub use gix_refspec as refspec;
+pub use gix_sec as sec;
+pub use gix_tempfile as tempfile;
+pub use gix_traverse as traverse;
+pub use gix_url as url;
+#[doc(inline)]
+pub use gix_url::Url;
+pub use hash::{oid, ObjectId};
+
+pub mod interrupt;
+
+mod ext;
+///
+pub mod prelude {
+ pub use gix_features::parallel::reduce::Finalize;
+ pub use gix_odb::{Find, FindExt, Header, HeaderExt, Write};
+
+ pub use crate::ext::*;
+}
+
+///
+pub mod path;
+
+/// The standard type for a store to handle git references.
+pub type RefStore = gix_ref::file::Store;
+/// A handle for finding objects in an object database, abstracting away caches for thread-local use.
+pub type OdbHandle = gix_odb::Handle;
+/// A way to access git configuration
+pub(crate) type Config = OwnShared<gix_config::File<'static>>;
+
+///
+mod types;
+pub use types::{
+ Commit, Head, Id, Kind, Object, ObjectDetached, Reference, Remote, Repository, Tag, ThreadSafeRepository, Tree,
+ Worktree,
+};
+
+///
+pub mod clone;
+pub mod commit;
+pub mod head;
+pub mod id;
+pub mod object;
+pub mod reference;
+mod repository;
+pub mod tag;
+
+///
+pub mod progress {
+ #[cfg(feature = "progress-tree")]
+ pub use gix_features::progress::prodash::tree;
+ pub use gix_features::progress::*;
+}
+
+///
+pub mod diff {
+ pub use gix_diff::*;
+ ///
+ pub mod rename {
+ /// Determine how to do rename tracking.
+ #[derive(Debug, Copy, Clone, Eq, PartialEq)]
+ pub enum Tracking {
+ /// Do not track renames at all, the fastest option.
+ Disabled,
+ /// Track renames.
+ Renames,
+ /// Track renames and copies.
+ ///
+ /// This is the most expensive option.
+ RenamesAndCopies,
+ }
+ }
+}
+
+/// See [ThreadSafeRepository::discover()], but returns a [`Repository`] instead.
+#[allow(clippy::result_large_err)]
+pub fn discover(directory: impl AsRef<std::path::Path>) -> Result<Repository, discover::Error> {
+ ThreadSafeRepository::discover(directory).map(Into::into)
+}
+
+/// See [ThreadSafeRepository::init()], but returns a [`Repository`] instead.
+#[allow(clippy::result_large_err)]
+pub fn init(directory: impl AsRef<std::path::Path>) -> Result<Repository, init::Error> {
+ ThreadSafeRepository::init(directory, create::Kind::WithWorktree, create::Options::default()).map(Into::into)
+}
+
+/// See [ThreadSafeRepository::init()], but returns a [`Repository`] instead.
+#[allow(clippy::result_large_err)]
+pub fn init_bare(directory: impl AsRef<std::path::Path>) -> Result<Repository, init::Error> {
+ ThreadSafeRepository::init(directory, create::Kind::Bare, create::Options::default()).map(Into::into)
+}
+
+/// Create a platform for configuring a bare clone from `url` to the local `path`, using default options for opening it (but
+/// amended with using configuration from the git installation to ensure all authentication options are honored).
+///
+/// See [`clone::PrepareFetch::new()] for a function to take full control over all options.
+#[allow(clippy::result_large_err)]
+pub fn prepare_clone_bare<Url, E>(
+ url: Url,
+ path: impl AsRef<std::path::Path>,
+) -> Result<clone::PrepareFetch, clone::Error>
+where
+ Url: std::convert::TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+{
+ clone::PrepareFetch::new(
+ url,
+ path,
+ create::Kind::Bare,
+ create::Options::default(),
+ open_opts_with_git_binary_config(),
+ )
+}
+
+/// Create a platform for configuring a clone with main working tree from `url` to the local `path`, using default options for opening it
+/// (but amended with using configuration from the git installation to ensure all authentication options are honored).
+///
+/// See [`clone::PrepareFetch::new()] for a function to take full control over all options.
+#[allow(clippy::result_large_err)]
+pub fn prepare_clone<Url, E>(url: Url, path: impl AsRef<std::path::Path>) -> Result<clone::PrepareFetch, clone::Error>
+where
+ Url: std::convert::TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+{
+ clone::PrepareFetch::new(
+ url,
+ path,
+ create::Kind::WithWorktree,
+ create::Options::default(),
+ open_opts_with_git_binary_config(),
+ )
+}
+
+fn open_opts_with_git_binary_config() -> open::Options {
+ use gix_sec::trust::DefaultForLevel;
+ let mut opts = open::Options::default_for_level(gix_sec::Trust::Full);
+ opts.permissions.config.git_binary = true;
+ opts
+}
+
+/// See [ThreadSafeRepository::open()], but returns a [`Repository`] instead.
+#[allow(clippy::result_large_err)]
+pub fn open(directory: impl Into<std::path::PathBuf>) -> Result<Repository, open::Error> {
+ ThreadSafeRepository::open(directory).map(Into::into)
+}
+
+/// See [ThreadSafeRepository::open_opts()], but returns a [`Repository`] instead.
+#[allow(clippy::result_large_err)]
+pub fn open_opts(directory: impl Into<std::path::PathBuf>, options: open::Options) -> Result<Repository, open::Error> {
+ ThreadSafeRepository::open_opts(directory, options).map(Into::into)
+}
+
+///
+pub mod permission {
+ ///
+ pub mod env_var {
+ ///
+ pub mod resource {
+ ///
+ pub type Error = gix_sec::permission::Error<std::path::PathBuf>;
+ }
+ }
+}
+///
+pub mod permissions {
+ pub use crate::repository::permissions::{Config, Environment};
+}
+pub use repository::permissions::Permissions;
+
+///
+pub mod create;
+
+///
+pub mod open;
+
+///
+pub mod config;
+
+///
+pub mod mailmap;
+
+///
+pub mod worktree;
+
+pub mod revision;
+
+///
+pub mod remote;
+
+///
+pub mod init;
+
+/// Not to be confused with 'status'.
+pub mod state {
+ /// Tell what operation is currently in progress.
+ #[derive(Debug, PartialEq, Eq)]
+ pub enum InProgress {
+ /// A mailbox is being applied.
+ ApplyMailbox,
+ /// A rebase is happening while a mailbox is being applied.
+ // TODO: test
+ ApplyMailboxRebase,
+ /// A git bisect operation has not yet been concluded.
+ Bisect,
+ /// A cherry pick operation.
+ CherryPick,
+ /// A cherry pick with multiple commits pending.
+ CherryPickSequence,
+ /// A merge operation.
+ Merge,
+ /// A rebase operation.
+ Rebase,
+ /// An interactive rebase operation.
+ RebaseInteractive,
+ /// A revert operation.
+ Revert,
+ /// A revert operation with multiple commits pending.
+ RevertSequence,
+ }
+}
+
+///
+pub mod discover;
+
+pub mod env;
+
+mod kind;
diff --git a/vendor/gix/src/mailmap.rs b/vendor/gix/src/mailmap.rs
new file mode 100644
index 000000000..6ea6bcc2d
--- /dev/null
+++ b/vendor/gix/src/mailmap.rs
@@ -0,0 +1,18 @@
+pub use gix_mailmap::*;
+
+///
+pub mod load {
+ /// The error returned by [`crate::Repository::open_mailmap_into()`].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The mailmap file declared in `mailmap.file` could not be read")]
+ Io(#[from] std::io::Error),
+ #[error("The configured mailmap.blob could not be parsed")]
+ BlobSpec(#[from] gix_hash::decode::Error),
+ #[error(transparent)]
+ PathInterpolate(#[from] gix_config::path::interpolate::Error),
+ #[error("Could not find object configured in `mailmap.blob`")]
+ FindExisting(#[from] crate::object::find::existing::Error),
+ }
+}
diff --git a/vendor/gix/src/object/blob.rs b/vendor/gix/src/object/blob.rs
new file mode 100644
index 000000000..f35605422
--- /dev/null
+++ b/vendor/gix/src/object/blob.rs
@@ -0,0 +1,148 @@
+///
+pub mod diff {
+ use std::ops::Range;
+
+ use crate::{bstr::ByteSlice, object::blob::diff::line::Change};
+
+ /// A platform to keep temporary information to perform line diffs on modified blobs.
+ ///
+ pub struct Platform<'old, 'new> {
+ /// The previous version of the blob.
+ pub old: crate::Object<'old>,
+ /// The new version of the blob.
+ pub new: crate::Object<'new>,
+ /// The algorithm to use when calling [imara_diff::diff()][gix_diff::blob::diff()].
+ /// This value is determined by the `diff.algorithm` configuration.
+ pub algo: gix_diff::blob::Algorithm,
+ }
+
+ ///
+ pub mod init {
+ /// The error returned by [`Platform::from_ids()`][super::Platform::from_ids()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not find the previous blob or the new blob to diff against")]
+ FindExisting(#[from] crate::object::find::existing::Error),
+ #[error("Could not obtain diff algorithm from configuration")]
+ DiffAlgorithm(#[from] crate::config::diff::algorithm::Error),
+ }
+ }
+
+ impl<'old, 'new> Platform<'old, 'new> {
+ /// Produce a platform for performing various diffs after obtaining the object data of `previous_id` and `new_id`.
+ ///
+ /// Note that these objects are treated as raw data and are assumed to be blobs.
+ pub fn from_ids(
+ previous_id: &crate::Id<'old>,
+ new_id: &crate::Id<'new>,
+ ) -> Result<Platform<'old, 'new>, init::Error> {
+ match previous_id
+ .object()
+ .and_then(|old| new_id.object().map(|new| (old, new)))
+ {
+ Ok((old, new)) => {
+ let algo = match new_id.repo.config.diff_algorithm() {
+ Ok(algo) => algo,
+ Err(err) => return Err(err.into()),
+ };
+ Ok(Platform { old, new, algo })
+ }
+ Err(err) => Err(err.into()),
+ }
+ }
+ }
+
+ ///
+ pub mod line {
+ use crate::bstr::BStr;
+
+ /// A change to a hunk of lines.
+ pub enum Change<'a, 'data> {
+ /// Lines were added.
+ Addition {
+ /// The lines themselves without terminator.
+ lines: &'a [&'data BStr],
+ },
+ /// Lines were removed.
+ Deletion {
+ /// The lines themselves without terminator.
+ lines: &'a [&'data BStr],
+ },
+ /// Lines have been replaced.
+ Modification {
+ /// The replaced lines without terminator.
+ lines_before: &'a [&'data BStr],
+ /// The new lines without terminator.
+ lines_after: &'a [&'data BStr],
+ },
+ }
+ }
+
+ impl<'old, 'new> Platform<'old, 'new> {
+ /// Perform a diff on lines between the old and the new version of a blob, passing each hunk of lines to `process_hunk`.
+ /// The diffing algorithm is determined by the `diff.algorithm` configuration.
+ ///
+ /// Note that you can invoke the diff more flexibly as well.
+ // TODO: more tests (only tested insertion right now)
+ pub fn lines<FnH, E>(&self, mut process_hunk: FnH) -> Result<(), E>
+ where
+ FnH: FnMut(line::Change<'_, '_>) -> Result<(), E>,
+ E: std::error::Error,
+ {
+ let input = self.line_tokens();
+ let mut err = None;
+ let mut lines = Vec::new();
+ gix_diff::blob::diff(self.algo, &input, |before: Range<u32>, after: Range<u32>| {
+ if err.is_some() {
+ return;
+ }
+ lines.clear();
+ lines.extend(
+ input.before[before.start as usize..before.end as usize]
+ .iter()
+ .map(|&line| input.interner[line].as_bstr()),
+ );
+ let end_of_before = lines.len();
+ lines.extend(
+ input.after[after.start as usize..after.end as usize]
+ .iter()
+ .map(|&line| input.interner[line].as_bstr()),
+ );
+ let hunk_before = &lines[..end_of_before];
+ let hunk_after = &lines[end_of_before..];
+ if hunk_after.is_empty() {
+ err = process_hunk(Change::Deletion { lines: hunk_before }).err();
+ } else if hunk_before.is_empty() {
+ err = process_hunk(Change::Addition { lines: hunk_after }).err();
+ } else {
+ err = process_hunk(Change::Modification {
+ lines_before: hunk_before,
+ lines_after: hunk_after,
+ })
+ .err();
+ }
+ });
+
+ match err {
+ Some(err) => Err(err),
+ None => Ok(()),
+ }
+ }
+
+ /// Count the amount of removed and inserted lines efficiently.
+ pub fn line_counts(&self) -> gix_diff::blob::sink::Counter<()> {
+ let tokens = self.line_tokens();
+ gix_diff::blob::diff(self.algo, &tokens, gix_diff::blob::sink::Counter::default())
+ }
+
+ /// Return a tokenizer which treats lines as smallest unit for use in a [diff operation][gix_diff::blob::diff()].
+ ///
+ /// The line separator is determined according to normal git rules and filters.
+ pub fn line_tokens(&self) -> gix_diff::blob::intern::InternedInput<&[u8]> {
+ // TODO: make use of `core.eol` and/or filters to do line-counting correctly. It's probably
+ // OK to just know how these objects are saved to know what constitutes a line.
+ gix_diff::blob::intern::InternedInput::new(self.old.data.as_bytes(), self.new.data.as_bytes())
+ }
+ }
+}
diff --git a/vendor/gix/src/object/commit.rs b/vendor/gix/src/object/commit.rs
new file mode 100644
index 000000000..e28a12955
--- /dev/null
+++ b/vendor/gix/src/object/commit.rs
@@ -0,0 +1,156 @@
+use crate::{bstr, bstr::BStr, revision, Commit, ObjectDetached, Tree};
+
+mod error {
+ use crate::object;
+
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ FindExistingObject(#[from] object::find::existing::Error),
+ #[error("The commit could not be decoded fully or partially")]
+ Decode(#[from] gix_object::decode::Error),
+ #[error("Expected object of type {}, but got {}", .expected, .actual)]
+ ObjectKind {
+ expected: gix_object::Kind,
+ actual: gix_object::Kind,
+ },
+ }
+}
+
+pub use error::Error;
+
+impl<'repo> Commit<'repo> {
+ /// Create an owned instance of this object, copying our data in the process.
+ pub fn detached(&self) -> ObjectDetached {
+ ObjectDetached {
+ id: self.id,
+ kind: gix_object::Kind::Commit,
+ data: self.data.clone(),
+ }
+ }
+
+ /// Sever the connection to the `Repository` and turn this instance into a standalone object.
+ pub fn detach(self) -> ObjectDetached {
+ self.into()
+ }
+}
+
+impl<'repo> Commit<'repo> {
+ /// Turn this objects id into a shortened id with a length in hex as configured by `core.abbrev`.
+ pub fn short_id(&self) -> Result<gix_hash::Prefix, crate::id::shorten::Error> {
+ use crate::ext::ObjectIdExt;
+ self.id.attach(self.repo).shorten()
+ }
+
+ /// Parse the commits message into a [`MessageRef`][gix_object::commit::MessageRef]
+ pub fn message(&self) -> Result<gix_object::commit::MessageRef<'_>, gix_object::decode::Error> {
+ Ok(gix_object::commit::MessageRef::from_bytes(self.message_raw()?))
+ }
+ /// Decode the commit object until the message and return it.
+ pub fn message_raw(&self) -> Result<&'_ BStr, gix_object::decode::Error> {
+ gix_object::CommitRefIter::from_bytes(&self.data).message()
+ }
+ /// Obtain the message by using intricate knowledge about the encoding, which is fastest and
+ /// can't fail at the expense of error handling.
+ pub fn message_raw_sloppy(&self) -> &BStr {
+ use bstr::ByteSlice;
+ self.data
+ .find(b"\n\n")
+ .map(|pos| &self.data[pos + 2..])
+ .unwrap_or_default()
+ .as_bstr()
+ }
+
+ /// Decode the commit and obtain the time at which the commit was created.
+ ///
+ /// For the time at which it was authored, refer to `.decode()?.author.time`.
+ pub fn time(&self) -> Result<gix_actor::Time, Error> {
+ Ok(self.committer()?.time)
+ }
+
+ /// Decode the entire commit object and return it for accessing all commit information.
+ ///
+ /// It will allocate only if there are more than 2 parents.
+ ///
+ /// Note that the returned commit object does make lookup easy and should be
+ /// used for successive calls to string-ish information to avoid decoding the object
+ /// more than once.
+ pub fn decode(&self) -> Result<gix_object::CommitRef<'_>, gix_object::decode::Error> {
+ gix_object::CommitRef::from_bytes(&self.data)
+ }
+
+ /// Return an iterator over tokens, representing this commit piece by piece.
+ pub fn iter(&self) -> gix_object::CommitRefIter<'_> {
+ gix_object::CommitRefIter::from_bytes(&self.data)
+ }
+
+ /// Return the commits author, with surrounding whitespace trimmed.
+ pub fn author(&self) -> Result<gix_actor::SignatureRef<'_>, gix_object::decode::Error> {
+ gix_object::CommitRefIter::from_bytes(&self.data)
+ .author()
+ .map(|s| s.trim())
+ }
+
+ /// Return the commits committer. with surrounding whitespace trimmed.
+ pub fn committer(&self) -> Result<gix_actor::SignatureRef<'_>, gix_object::decode::Error> {
+ gix_object::CommitRefIter::from_bytes(&self.data)
+ .committer()
+ .map(|s| s.trim())
+ }
+
+ /// Decode this commits parent ids on the fly without allocating.
+ // TODO: tests
+ pub fn parent_ids(&self) -> impl Iterator<Item = crate::Id<'repo>> + '_ {
+ use crate::ext::ObjectIdExt;
+ let repo = self.repo;
+ gix_object::CommitRefIter::from_bytes(&self.data)
+ .parent_ids()
+ .map(move |id| id.attach(repo))
+ }
+
+ /// Parse the commit and return the the tree object it points to.
+ pub fn tree(&self) -> Result<Tree<'repo>, Error> {
+ match self.tree_id()?.object()?.try_into_tree() {
+ Ok(tree) => Ok(tree),
+ Err(crate::object::try_into::Error { actual, expected, .. }) => Err(Error::ObjectKind { actual, expected }),
+ }
+ }
+
+ /// Parse the commit and return the the tree id it points to.
+ pub fn tree_id(&self) -> Result<crate::Id<'repo>, gix_object::decode::Error> {
+ gix_object::CommitRefIter::from_bytes(&self.data)
+ .tree_id()
+ .map(|id| crate::Id::from_id(id, self.repo))
+ }
+
+ /// Return our id own id with connection to this repository.
+ pub fn id(&self) -> crate::Id<'repo> {
+ use crate::ext::ObjectIdExt;
+ self.id.attach(self.repo)
+ }
+
+ /// Obtain a platform for traversing ancestors of this commit.
+ pub fn ancestors(&self) -> revision::walk::Platform<'repo> {
+ self.id().ancestors()
+ }
+
+ /// Create a platform to further configure a `git describe` operation to find a name for this commit by looking
+ /// at the closest annotated tags (by default) in its past.
+ pub fn describe(&self) -> crate::commit::describe::Platform<'repo> {
+ crate::commit::describe::Platform {
+ id: self.id,
+ repo: self.repo,
+ select: Default::default(),
+ first_parent: false,
+ id_as_fallback: false,
+ max_candidates: 10,
+ }
+ }
+}
+
+impl<'r> std::fmt::Debug for Commit<'r> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "Commit({})", self.id)
+ }
+}
diff --git a/vendor/gix/src/object/errors.rs b/vendor/gix/src/object/errors.rs
new file mode 100644
index 000000000..eb7733473
--- /dev/null
+++ b/vendor/gix/src/object/errors.rs
@@ -0,0 +1,34 @@
+///
+pub mod conversion {
+
+ /// The error returned by [`crate::object::try_to_()`][crate::Object::try_to_commit_ref()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Decode(#[from] gix_object::decode::Error),
+ #[error("Expected object type {}, but got {}", .expected, .actual)]
+ UnexpectedType {
+ expected: gix_object::Kind,
+ actual: gix_object::Kind,
+ },
+ }
+}
+
+///
+pub mod find {
+ /// Indicate that an error occurred when trying to find an object.
+ pub type Error = gix_odb::store::find::Error;
+
+ ///
+ pub mod existing {
+ /// An object could not be found in the database, or an error occurred when trying to obtain it.
+ pub type Error = gix_odb::find::existing::Error<gix_odb::store::find::Error>;
+ }
+}
+
+///
+pub mod write {
+ /// An error to indicate writing to the loose object store failed.
+ pub type Error = gix_odb::store::write::Error;
+}
diff --git a/vendor/gix/src/object/impls.rs b/vendor/gix/src/object/impls.rs
new file mode 100644
index 000000000..3453b1b3c
--- /dev/null
+++ b/vendor/gix/src/object/impls.rs
@@ -0,0 +1,123 @@
+use std::convert::TryFrom;
+
+use crate::{object, Commit, Object, ObjectDetached, Tag, Tree};
+
+impl<'repo> From<Object<'repo>> for ObjectDetached {
+ fn from(mut v: Object<'repo>) -> Self {
+ ObjectDetached {
+ id: v.id,
+ kind: v.kind,
+ data: std::mem::take(&mut v.data),
+ }
+ }
+}
+
+impl<'repo> From<Commit<'repo>> for ObjectDetached {
+ fn from(mut v: Commit<'repo>) -> Self {
+ ObjectDetached {
+ id: v.id,
+ kind: gix_object::Kind::Commit,
+ data: std::mem::take(&mut v.data),
+ }
+ }
+}
+
+impl<'repo> From<Tag<'repo>> for ObjectDetached {
+ fn from(mut v: Tag<'repo>) -> Self {
+ ObjectDetached {
+ id: v.id,
+ kind: gix_object::Kind::Tag,
+ data: std::mem::take(&mut v.data),
+ }
+ }
+}
+
+impl<'repo> From<Commit<'repo>> for Object<'repo> {
+ fn from(mut v: Commit<'repo>) -> Self {
+ Object {
+ id: v.id,
+ kind: gix_object::Kind::Commit,
+ data: steal_from_freelist(&mut v.data),
+ repo: v.repo,
+ }
+ }
+}
+
+impl<'repo> AsRef<[u8]> for Object<'repo> {
+ fn as_ref(&self) -> &[u8] {
+ &self.data
+ }
+}
+
+impl AsRef<[u8]> for ObjectDetached {
+ fn as_ref(&self) -> &[u8] {
+ &self.data
+ }
+}
+
+impl<'repo> TryFrom<Object<'repo>> for Commit<'repo> {
+ type Error = Object<'repo>;
+
+ fn try_from(mut value: Object<'repo>) -> Result<Self, Self::Error> {
+ let handle = value.repo;
+ match value.kind {
+ object::Kind::Commit => Ok(Commit {
+ id: value.id,
+ repo: handle,
+ data: steal_from_freelist(&mut value.data),
+ }),
+ _ => Err(value),
+ }
+ }
+}
+
+impl<'repo> TryFrom<Object<'repo>> for Tag<'repo> {
+ type Error = Object<'repo>;
+
+ fn try_from(mut value: Object<'repo>) -> Result<Self, Self::Error> {
+ let handle = value.repo;
+ match value.kind {
+ object::Kind::Tag => Ok(Tag {
+ id: value.id,
+ repo: handle,
+ data: steal_from_freelist(&mut value.data),
+ }),
+ _ => Err(value),
+ }
+ }
+}
+
+impl<'repo> TryFrom<Object<'repo>> for Tree<'repo> {
+ type Error = Object<'repo>;
+
+ fn try_from(mut value: Object<'repo>) -> Result<Self, Self::Error> {
+ let handle = value.repo;
+ match value.kind {
+ object::Kind::Tree => Ok(Tree {
+ id: value.id,
+ repo: handle,
+ data: steal_from_freelist(&mut value.data),
+ }),
+ _ => Err(value),
+ }
+ }
+}
+
+impl<'r> std::fmt::Debug for Object<'r> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ use gix_object::Kind::*;
+ let type_name = match self.kind {
+ Blob => "Blob",
+ Commit => "Commit",
+ Tree => "Tree",
+ Tag => "Tag",
+ };
+ write!(f, "{}({})", type_name, self.id)
+ }
+}
+
+/// In conjunction with the handles free list, leaving an empty Vec in place of the original causes it to not be
+/// returned to the free list.
+fn steal_from_freelist(data: &mut Vec<u8>) -> Vec<u8> {
+ std::mem::take(data)
+}
diff --git a/vendor/gix/src/object/mod.rs b/vendor/gix/src/object/mod.rs
new file mode 100644
index 000000000..75d77d138
--- /dev/null
+++ b/vendor/gix/src/object/mod.rs
@@ -0,0 +1,221 @@
+//!
+use std::convert::TryInto;
+
+use gix_hash::ObjectId;
+pub use gix_object::Kind;
+
+use crate::{Commit, Id, Object, ObjectDetached, Tag, Tree};
+
+mod errors;
+pub(crate) mod cache {
+ pub use gix_pack::cache::object::MemoryCappedHashmap;
+}
+pub use errors::{conversion, find, write};
+///
+pub mod blob;
+///
+pub mod commit;
+mod impls;
+pub mod peel;
+mod tag;
+///
+pub mod tree;
+
+///
+pub mod try_into {
+ #[derive(thiserror::Error, Debug)]
+ #[allow(missing_docs)]
+ #[error("Object named {id} was supposed to be of kind {expected}, but was kind {actual}.")]
+ pub struct Error {
+ pub actual: gix_object::Kind,
+ pub expected: gix_object::Kind,
+ pub id: gix_hash::ObjectId,
+ }
+}
+
+impl ObjectDetached {
+ /// Infuse this owned object with `repo` access.
+ pub fn attach(self, repo: &crate::Repository) -> Object<'_> {
+ Object {
+ id: self.id,
+ kind: self.kind,
+ data: self.data,
+ repo,
+ }
+ }
+}
+
+impl std::fmt::Debug for ObjectDetached {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ use gix_object::Kind::*;
+ let type_name = match self.kind {
+ Blob => "Blob",
+ Commit => "Commit",
+ Tree => "Tree",
+ Tag => "Tag",
+ };
+ write!(f, "{}({})", type_name, self.id)
+ }
+}
+
+/// Consuming conversions to attached object kinds.
+impl<'repo> Object<'repo> {
+ pub(crate) fn from_data(
+ id: impl Into<ObjectId>,
+ kind: Kind,
+ data: Vec<u8>,
+ repo: &'repo crate::Repository,
+ ) -> Self {
+ Object {
+ id: id.into(),
+ kind,
+ data,
+ repo,
+ }
+ }
+
+ /// Transform this object into a tree, or panic if it is none.
+ pub fn into_tree(self) -> Tree<'repo> {
+ match self.try_into() {
+ Ok(tree) => tree,
+ Err(this) => panic!("Tried to use {} as tree, but was {}", this.id, this.kind),
+ }
+ }
+
+ /// Transform this object into a commit, or panic if it is none.
+ pub fn into_commit(self) -> Commit<'repo> {
+ match self.try_into() {
+ Ok(commit) => commit,
+ Err(this) => panic!("Tried to use {} as commit, but was {}", this.id, this.kind),
+ }
+ }
+
+ /// Transform this object into a commit, or return it as part of the `Err` if it is no commit.
+ pub fn try_into_commit(self) -> Result<Commit<'repo>, try_into::Error> {
+ self.try_into().map_err(|this: Self| try_into::Error {
+ id: this.id,
+ actual: this.kind,
+ expected: gix_object::Kind::Commit,
+ })
+ }
+
+ /// Transform this object into a tag, or return it as part of the `Err` if it is no commit.
+ pub fn try_into_tag(self) -> Result<Tag<'repo>, try_into::Error> {
+ self.try_into().map_err(|this: Self| try_into::Error {
+ id: this.id,
+ actual: this.kind,
+ expected: gix_object::Kind::Commit,
+ })
+ }
+
+ /// Transform this object into a tree, or return it as part of the `Err` if it is no tree.
+ pub fn try_into_tree(self) -> Result<Tree<'repo>, try_into::Error> {
+ self.try_into().map_err(|this: Self| try_into::Error {
+ id: this.id,
+ actual: this.kind,
+ expected: gix_object::Kind::Tree,
+ })
+ }
+}
+
+impl<'repo> Object<'repo> {
+ /// Create an owned instance of this object, copying our data in the process.
+ pub fn detached(&self) -> ObjectDetached {
+ ObjectDetached {
+ id: self.id,
+ kind: self.kind,
+ data: self.data.clone(),
+ }
+ }
+
+ /// Sever the connection to the `Repository` and turn this instance into a standalone object.
+ pub fn detach(self) -> ObjectDetached {
+ self.into()
+ }
+}
+
+/// Conversions to detached, lower-level object types.
+impl<'repo> Object<'repo> {
+ /// Obtain a fully parsed commit whose fields reference our data buffer,
+ ///
+ /// # Panic
+ ///
+ /// - this object is not a commit
+ /// - the commit could not be decoded
+ pub fn to_commit_ref(&self) -> gix_object::CommitRef<'_> {
+ self.try_to_commit_ref().expect("BUG: need a commit")
+ }
+
+ /// Obtain a fully parsed commit whose fields reference our data buffer.
+ pub fn try_to_commit_ref(&self) -> Result<gix_object::CommitRef<'_>, conversion::Error> {
+ gix_object::Data::new(self.kind, &self.data)
+ .decode()?
+ .into_commit()
+ .ok_or(conversion::Error::UnexpectedType {
+ expected: gix_object::Kind::Commit,
+ actual: self.kind,
+ })
+ }
+
+ /// Obtain a an iterator over commit tokens like in [`to_commit_iter()`][Object::try_to_commit_ref_iter()].
+ ///
+ /// # Panic
+ ///
+ /// - this object is not a commit
+ pub fn to_commit_ref_iter(&self) -> gix_object::CommitRefIter<'_> {
+ gix_object::Data::new(self.kind, &self.data)
+ .try_into_commit_iter()
+ .expect("BUG: This object must be a commit")
+ }
+
+ /// Obtain a commit token iterator from the data in this instance, if it is a commit.
+ pub fn try_to_commit_ref_iter(&self) -> Option<gix_object::CommitRefIter<'_>> {
+ gix_object::Data::new(self.kind, &self.data).try_into_commit_iter()
+ }
+
+ /// Obtain a tag token iterator from the data in this instance.
+ ///
+ /// # Panic
+ ///
+ /// - this object is not a tag
+ pub fn to_tag_ref_iter(&self) -> gix_object::TagRefIter<'_> {
+ gix_object::Data::new(self.kind, &self.data)
+ .try_into_tag_iter()
+ .expect("BUG: this object must be a tag")
+ }
+
+ /// Obtain a tag token iterator from the data in this instance.
+ ///
+ /// # Panic
+ ///
+ /// - this object is not a tag
+ pub fn try_to_tag_ref_iter(&self) -> Option<gix_object::TagRefIter<'_>> {
+ gix_object::Data::new(self.kind, &self.data).try_into_tag_iter()
+ }
+
+ /// Obtain a tag object from the data in this instance.
+ ///
+ /// # Panic
+ ///
+ /// - this object is not a tag
+ /// - the tag could not be decoded
+ pub fn to_tag_ref(&self) -> gix_object::TagRef<'_> {
+ self.try_to_tag_ref().expect("BUG: need tag")
+ }
+
+ /// Obtain a fully parsed tag object whose fields reference our data buffer.
+ pub fn try_to_tag_ref(&self) -> Result<gix_object::TagRef<'_>, conversion::Error> {
+ gix_object::Data::new(self.kind, &self.data)
+ .decode()?
+ .into_tag()
+ .ok_or(conversion::Error::UnexpectedType {
+ expected: gix_object::Kind::Tag,
+ actual: self.kind,
+ })
+ }
+
+ /// Return the attached id of this object.
+ pub fn id(&self) -> Id<'repo> {
+ Id::from_id(self.id, self.repo)
+ }
+}
diff --git a/vendor/gix/src/object/peel.rs b/vendor/gix/src/object/peel.rs
new file mode 100644
index 000000000..c906c0c75
--- /dev/null
+++ b/vendor/gix/src/object/peel.rs
@@ -0,0 +1,93 @@
+//!
+use crate::{
+ object,
+ object::{peel, Kind},
+ Object, Tree,
+};
+
+///
+pub mod to_kind {
+ mod error {
+
+ use crate::object;
+
+ /// The error returned by [`Object::peel_to_kind()`][crate::Object::peel_to_kind()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ FindExistingObject(#[from] object::find::existing::Error),
+ #[error("Last encountered object {oid} was {actual} while trying to peel to {expected}")]
+ NotFound {
+ oid: gix_hash::Prefix,
+ actual: object::Kind,
+ expected: object::Kind,
+ },
+ }
+ }
+ pub use error::Error;
+}
+
+impl<'repo> Object<'repo> {
+ // TODO: tests
+ /// Follow tags to their target and commits to trees until the given `kind` of object is encountered.
+ ///
+ /// Note that this object doesn't necessarily have to be the end of the chain.
+ /// Typical values are [`Kind::Commit`] or [`Kind::Tree`].
+ pub fn peel_to_kind(mut self, kind: Kind) -> Result<Self, peel::to_kind::Error> {
+ loop {
+ match self.kind {
+ our_kind if kind == our_kind => {
+ return Ok(self);
+ }
+ Kind::Commit => {
+ let tree_id = self
+ .try_to_commit_ref_iter()
+ .expect("commit")
+ .tree_id()
+ .expect("valid commit");
+ let repo = self.repo;
+ drop(self);
+ self = repo.find_object(tree_id)?;
+ }
+ Kind::Tag => {
+ let target_id = self.to_tag_ref_iter().target_id().expect("valid tag");
+ let repo = self.repo;
+ drop(self);
+ self = repo.find_object(target_id)?;
+ }
+ Kind::Tree | Kind::Blob => {
+ return Err(peel::to_kind::Error::NotFound {
+ oid: self.id().shorten().unwrap_or_else(|_| self.id.into()),
+ actual: self.kind,
+ expected: kind,
+ })
+ }
+ }
+ }
+ }
+
+ /// Peel this object into a tree and return it, if this is possible.
+ pub fn peel_to_tree(self) -> Result<Tree<'repo>, peel::to_kind::Error> {
+ Ok(self.peel_to_kind(gix_object::Kind::Tree)?.into_tree())
+ }
+
+ // TODO: tests
+ /// Follow all tag object targets until a commit, tree or blob is reached.
+ ///
+ /// Note that this method is different from [`peel_to_kind(…)`][Object::peel_to_kind()] as it won't
+ /// peel commits to their tree, but handles tags only.
+ pub fn peel_tags_to_end(mut self) -> Result<Self, object::find::existing::Error> {
+ loop {
+ match self.kind {
+ Kind::Commit | Kind::Tree | Kind::Blob => break Ok(self),
+ Kind::Tag => {
+ let target_id = self.to_tag_ref_iter().target_id().expect("valid tag");
+ let repo = self.repo;
+ drop(self);
+ self = repo.find_object(target_id)?;
+ }
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/object/tag.rs b/vendor/gix/src/object/tag.rs
new file mode 100644
index 000000000..ce9d7360a
--- /dev/null
+++ b/vendor/gix/src/object/tag.rs
@@ -0,0 +1,15 @@
+use crate::{ext::ObjectIdExt, Tag};
+
+impl<'repo> Tag<'repo> {
+ /// Decode this tag partially and return the id of its target.
+ pub fn target_id(&self) -> Result<crate::Id<'repo>, gix_object::decode::Error> {
+ gix_object::TagRefIter::from_bytes(&self.data)
+ .target_id()
+ .map(|id| id.attach(self.repo))
+ }
+
+ /// Decode this tag partially and return the tagger, if the field exists.
+ pub fn tagger(&self) -> Result<Option<gix_actor::SignatureRef<'_>>, gix_object::decode::Error> {
+ gix_object::TagRefIter::from_bytes(&self.data).tagger()
+ }
+}
diff --git a/vendor/gix/src/object/tree/diff/change.rs b/vendor/gix/src/object/tree/diff/change.rs
new file mode 100644
index 000000000..e6826d6ed
--- /dev/null
+++ b/vendor/gix/src/object/tree/diff/change.rs
@@ -0,0 +1,111 @@
+use crate::{bstr::BStr, Id};
+
+/// Information about the diff performed to detect similarity of a [Rewrite][Event::Rewrite].
+#[derive(Debug, Default, Clone, Copy, Eq, PartialEq)]
+pub struct DiffLineStats {
+ /// The amount of lines to remove from the source to get to the destination.
+ pub removals: u32,
+ /// The amount of lines to add to the source to get to the destination.
+ pub insertions: u32,
+ /// The amount of lines of the previous state, in the source.
+ pub before: u32,
+ /// The amount of lines of the new state, in the destination.
+ pub after: u32,
+}
+
+/// An event emitted when finding differences between two trees.
+#[derive(Debug, Clone, Copy)]
+pub enum Event<'a, 'old, 'new> {
+ /// An entry was added, like the addition of a file or directory.
+ Addition {
+ /// The mode of the added entry.
+ entry_mode: gix_object::tree::EntryMode,
+ /// The object id of the added entry.
+ id: Id<'new>,
+ },
+ /// An entry was deleted, like the deletion of a file or directory.
+ Deletion {
+ /// The mode of the deleted entry.
+ entry_mode: gix_object::tree::EntryMode,
+ /// The object id of the deleted entry.
+ id: Id<'old>,
+ },
+ /// An entry was modified, e.g. changing the contents of a file adjusts its object id and turning
+ /// a file into a symbolic link adjusts its mode.
+ Modification {
+ /// The mode of the entry before the modification.
+ previous_entry_mode: gix_object::tree::EntryMode,
+ /// The object id of the entry before the modification.
+ previous_id: Id<'old>,
+
+ /// The mode of the entry after the modification.
+ entry_mode: gix_object::tree::EntryMode,
+ /// The object id after the modification.
+ id: Id<'new>,
+ },
+ /// Entries are considered rewritten if they are not trees and they, according to some understanding of identity, were renamed
+ /// or copied.
+ /// In case of renames, this means they originally appeared as [`Deletion`][Event::Deletion] signalling their source as well as an
+ /// [`Addition`][Event::Addition] acting as destination.
+ ///
+ /// In case of copies, the `copy` flag is true and typically represents a perfect copy of a source was made.
+ ///
+ /// This variant can only be encountered if [rewrite tracking][super::Platform::track_rewrites()] is enabled.
+ ///
+ /// Note that mode changes may have occurred as well, i.e. changes from executable to non-executable or vice-versa.
+ Rewrite {
+ /// The location of the source of the rename operation.
+ ///
+ /// It may be empty if neither [file names][super::Platform::track_filename()] nor [file paths][super::Platform::track_path()]
+ /// are tracked.
+ source_location: &'a BStr,
+ /// The mode of the entry before the rename.
+ source_entry_mode: gix_object::tree::EntryMode,
+ /// The object id of the entry before the rename.
+ ///
+ /// Note that this is the same as `id` if we require the [similarity to be 100%][super::Rewrites::percentage], but may
+ /// be different otherwise.
+ source_id: Id<'old>,
+ /// Information about the diff we performed to detect similarity and match the `source_id` with the current state at `id`.
+ /// It's `None` if `source_id` is equal to `id`, as identity made an actual diff computation unnecessary.
+ diff: Option<DiffLineStats>,
+ /// The mode of the entry after the rename.
+ /// It could differ but still be considered a rename as we are concerned only about content.
+ entry_mode: gix_object::tree::EntryMode,
+ /// The object id after the rename.
+ id: Id<'new>,
+ /// If true, this rewrite is created by copy, and `source_id` is pointing to its source. Otherwise it's a rename, and `source_id`
+ /// points to a deleted object, as renames are tracked as deletions and additions of the same or similar content.
+ copy: bool,
+ },
+}
+
+impl<'a, 'old, 'new> Event<'a, 'old, 'new> {
+ /// Produce a platform for performing a line-diff, or `None` if this is not a [`Modification`][Event::Modification]
+ /// or one of the entries to compare is not a blob.
+ pub fn diff(
+ &self,
+ ) -> Option<Result<crate::object::blob::diff::Platform<'old, 'new>, crate::object::blob::diff::init::Error>> {
+ match self {
+ Event::Modification {
+ previous_entry_mode,
+ previous_id,
+ entry_mode,
+ id,
+ } if entry_mode.is_blob() && previous_entry_mode.is_blob() => {
+ Some(crate::object::blob::diff::Platform::from_ids(previous_id, id))
+ }
+ _ => None,
+ }
+ }
+
+ /// Return the current mode of this instance.
+ pub fn entry_mode(&self) -> gix_object::tree::EntryMode {
+ match self {
+ Event::Addition { entry_mode, .. }
+ | Event::Deletion { entry_mode, .. }
+ | Event::Modification { entry_mode, .. }
+ | Event::Rewrite { entry_mode, .. } => *entry_mode,
+ }
+ }
+}
diff --git a/vendor/gix/src/object/tree/diff/for_each.rs b/vendor/gix/src/object/tree/diff/for_each.rs
new file mode 100644
index 000000000..5cae4cf2f
--- /dev/null
+++ b/vendor/gix/src/object/tree/diff/for_each.rs
@@ -0,0 +1,235 @@
+use gix_object::TreeRefIter;
+use gix_odb::FindExt;
+
+use super::{change, Action, Change, Platform};
+use crate::{
+ bstr::BStr,
+ ext::ObjectIdExt,
+ object::tree::{
+ diff,
+ diff::{rewrites, tracked},
+ },
+ Repository, Tree,
+};
+
+/// The error return by methods on the [diff platform][Platform].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error(transparent)]
+ Diff(#[from] gix_diff::tree::changes::Error),
+ #[error("The user-provided callback failed")]
+ ForEach(#[source] Box<dyn std::error::Error + Send + Sync + 'static>),
+ #[error("Could not find blob for similarity checking")]
+ FindExistingBlob(#[from] crate::object::find::existing::Error),
+ #[error("Could not configure diff algorithm prior to checking similarity")]
+ ConfigureDiffAlgorithm(#[from] crate::config::diff::algorithm::Error),
+ #[error("Could not traverse tree to obtain possible sources for copies")]
+ TraverseTreeForExhaustiveCopyDetection(#[from] gix_traverse::tree::breadthfirst::Error),
+}
+
+///
+#[derive(Clone, Debug, Copy, PartialEq)]
+pub struct Outcome {
+ /// Available only if [rewrite-tracking was enabled][Platform::track_rewrites()].
+ pub rewrites: Option<rewrites::Outcome>,
+}
+
+/// Add the item to compare to.
+impl<'a, 'old> Platform<'a, 'old> {
+ /// Call `for_each` repeatedly with all changes that are needed to convert the source of the diff to the tree to `other`.
+ ///
+ /// `other` could also be created with the [`empty_tree()`][crate::Repository::empty_tree()] method to handle the first commit
+ /// in a repository - it doesn't have a parent, equivalent to compare 'nothing' to something.
+ pub fn for_each_to_obtain_tree<'new, E>(
+ &mut self,
+ other: &Tree<'new>,
+ for_each: impl FnMut(Change<'_, 'old, 'new>) -> Result<Action, E>,
+ ) -> Result<Outcome, Error>
+ where
+ E: std::error::Error + Sync + Send + 'static,
+ {
+ let repo = self.lhs.repo;
+ let mut delegate = Delegate {
+ src_tree: self.lhs,
+ other_repo: other.repo,
+ recorder: gix_diff::tree::Recorder::default().track_location(self.tracking),
+ visit: for_each,
+ tracked: self.rewrites.map(|r| tracked::State::new(r, self.tracking)),
+ err: None,
+ };
+ match gix_diff::tree::Changes::from(TreeRefIter::from_bytes(&self.lhs.data)).needed_to_obtain(
+ TreeRefIter::from_bytes(&other.data),
+ &mut self.state,
+ |oid, buf| repo.objects.find_tree_iter(oid, buf),
+ &mut delegate,
+ ) {
+ Ok(()) => {
+ let outcome = Outcome {
+ rewrites: delegate.process_tracked_changes()?,
+ };
+ match delegate.err {
+ Some(err) => Err(Error::ForEach(Box::new(err))),
+ None => Ok(outcome),
+ }
+ }
+ Err(gix_diff::tree::changes::Error::Cancelled) => delegate
+ .err
+ .map(|err| Err(Error::ForEach(Box::new(err))))
+ .unwrap_or(Err(Error::Diff(gix_diff::tree::changes::Error::Cancelled))),
+ Err(err) => Err(err.into()),
+ }
+ }
+}
+
+struct Delegate<'a, 'old, 'new, VisitFn, E> {
+ src_tree: &'a Tree<'old>,
+ other_repo: &'new Repository,
+ recorder: gix_diff::tree::Recorder,
+ visit: VisitFn,
+ tracked: Option<tracked::State>,
+ err: Option<E>,
+}
+
+impl<'a, 'old, 'new, VisitFn, E> Delegate<'a, 'old, 'new, VisitFn, E>
+where
+ VisitFn: for<'delegate> FnMut(Change<'delegate, 'old, 'new>) -> Result<Action, E>,
+ E: std::error::Error + Sync + Send + 'static,
+{
+ /// Call `visit` on an attached version of `change`.
+ fn emit_change(
+ change: gix_diff::tree::visit::Change,
+ location: &BStr,
+ visit: &mut VisitFn,
+ repo: &'old Repository,
+ other_repo: &'new Repository,
+ stored_err: &mut Option<E>,
+ ) -> gix_diff::tree::visit::Action {
+ use gix_diff::tree::visit::Change::*;
+ let event = match change {
+ Addition { entry_mode, oid } => change::Event::Addition {
+ entry_mode,
+ id: oid.attach(other_repo),
+ },
+ Deletion { entry_mode, oid } => change::Event::Deletion {
+ entry_mode,
+ id: oid.attach(repo),
+ },
+ Modification {
+ previous_entry_mode,
+ previous_oid,
+ entry_mode,
+ oid,
+ } => change::Event::Modification {
+ previous_entry_mode,
+ entry_mode,
+ previous_id: previous_oid.attach(repo),
+ id: oid.attach(other_repo),
+ },
+ };
+ match visit(Change { event, location }) {
+ Ok(Action::Cancel) => gix_diff::tree::visit::Action::Cancel,
+ Ok(Action::Continue) => gix_diff::tree::visit::Action::Continue,
+ Err(err) => {
+ *stored_err = Some(err);
+ gix_diff::tree::visit::Action::Cancel
+ }
+ }
+ }
+
+ fn process_tracked_changes(&mut self) -> Result<Option<rewrites::Outcome>, Error> {
+ let tracked = match self.tracked.as_mut() {
+ Some(t) => t,
+ None => return Ok(None),
+ };
+
+ let outcome = tracked.emit(
+ |dest, source| match source {
+ Some(source) => {
+ let (oid, mode) = dest.change.oid_and_entry_mode();
+ let change = diff::Change {
+ location: dest.location,
+ event: diff::change::Event::Rewrite {
+ source_location: source.location,
+ source_entry_mode: source.mode,
+ source_id: source.id.attach(self.src_tree.repo),
+ entry_mode: mode,
+ id: oid.to_owned().attach(self.other_repo),
+ diff: source.diff,
+ copy: match source.kind {
+ tracked::visit::Kind::RenameTarget => false,
+ tracked::visit::Kind::CopyDestination => true,
+ },
+ },
+ };
+ match (self.visit)(change) {
+ Ok(Action::Cancel) => gix_diff::tree::visit::Action::Cancel,
+ Ok(Action::Continue) => gix_diff::tree::visit::Action::Continue,
+ Err(err) => {
+ self.err = Some(err);
+ gix_diff::tree::visit::Action::Cancel
+ }
+ }
+ }
+ None => Self::emit_change(
+ dest.change,
+ dest.location,
+ &mut self.visit,
+ self.src_tree.repo,
+ self.other_repo,
+ &mut self.err,
+ ),
+ },
+ self.src_tree,
+ )?;
+ Ok(Some(outcome))
+ }
+}
+
+impl<'a, 'old, 'new, VisitFn, E> gix_diff::tree::Visit for Delegate<'a, 'old, 'new, VisitFn, E>
+where
+ VisitFn: for<'delegate> FnMut(Change<'delegate, 'old, 'new>) -> Result<Action, E>,
+ E: std::error::Error + Sync + Send + 'static,
+{
+ fn pop_front_tracked_path_and_set_current(&mut self) {
+ self.recorder.pop_front_tracked_path_and_set_current()
+ }
+
+ fn push_back_tracked_path_component(&mut self, component: &BStr) {
+ self.recorder.push_back_tracked_path_component(component)
+ }
+
+ fn push_path_component(&mut self, component: &BStr) {
+ self.recorder.push_path_component(component)
+ }
+
+ fn pop_path_component(&mut self) {
+ self.recorder.pop_path_component()
+ }
+
+ fn visit(&mut self, change: gix_diff::tree::visit::Change) -> gix_diff::tree::visit::Action {
+ match self.tracked.as_mut() {
+ Some(tracked) => tracked
+ .try_push_change(change, self.recorder.path())
+ .map(|change| {
+ Self::emit_change(
+ change,
+ self.recorder.path(),
+ &mut self.visit,
+ self.src_tree.repo,
+ self.other_repo,
+ &mut self.err,
+ )
+ })
+ .unwrap_or(gix_diff::tree::visit::Action::Continue),
+ None => Self::emit_change(
+ change,
+ self.recorder.path(),
+ &mut self.visit,
+ self.src_tree.repo,
+ self.other_repo,
+ &mut self.err,
+ ),
+ }
+ }
+}
diff --git a/vendor/gix/src/object/tree/diff/mod.rs b/vendor/gix/src/object/tree/diff/mod.rs
new file mode 100644
index 000000000..5a3bf6ddf
--- /dev/null
+++ b/vendor/gix/src/object/tree/diff/mod.rs
@@ -0,0 +1,118 @@
+use gix_diff::tree::recorder::Location;
+
+use crate::{bstr::BStr, Tree};
+
+/// Returned by the `for_each` function to control flow.
+#[derive(Clone, Copy, PartialOrd, PartialEq, Ord, Eq, Hash)]
+pub enum Action {
+ /// Continue the traversal of changes.
+ Continue,
+ /// Stop the traversal of changes and stop calling this function.
+ Cancel,
+}
+
+impl Default for Action {
+ fn default() -> Self {
+ Action::Continue
+ }
+}
+
+/// Represents any possible change in order to turn one tree into another.
+#[derive(Debug, Clone, Copy)]
+pub struct Change<'a, 'old, 'new> {
+ /// The location of the file or directory described by `event`, if tracking was enabled.
+ ///
+ /// Otherwise this value is always an empty path.
+ pub location: &'a BStr,
+ /// The diff event itself to provide information about what would need to change.
+ pub event: change::Event<'a, 'old, 'new>,
+}
+
+///
+pub mod change;
+
+/// Diffing
+impl<'repo> Tree<'repo> {
+ /// Return a platform to see the changes needed to create other trees, for instance.
+ ///
+ /// # Performance
+ ///
+ /// It's highly recommended to set an object cache to avoid extracting the same object multiple times.
+ /// By default, similar to `git diff`, rename tracking will be enabled if it is not configured.
+ #[allow(clippy::result_large_err)]
+ pub fn changes<'a>(&'a self) -> Result<Platform<'a, 'repo>, rewrites::Error> {
+ Ok(Platform {
+ state: Default::default(),
+ lhs: self,
+ tracking: None,
+ rewrites: self.repo.config.diff_renames()?.unwrap_or_default().into(),
+ })
+ }
+}
+
+/// The diffing platform returned by [`Tree::changes()`].
+#[derive(Clone)]
+pub struct Platform<'a, 'repo> {
+ state: gix_diff::tree::State,
+ lhs: &'a Tree<'repo>,
+ tracking: Option<Location>,
+ rewrites: Option<Rewrites>,
+}
+
+/// A structure to capture how to perform rename and copy tracking
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct Rewrites {
+ /// If `Some(…)`, do also find copies. `None` is the default which does not try to detect copies at all.
+ ///
+ /// Note that this is an even more expensive operation than detecting renames as files.
+ pub copies: Option<rewrites::Copies>,
+ /// The percentage of similarity needed for files to be considered renamed, defaulting to `Some(0.5)`.
+ /// This field is similar to `git diff -M50%`.
+ ///
+ /// If `None`, files are only considered equal if their content matches 100%.
+ /// Note that values greater than 1.0 have no different effect than 1.0.
+ pub percentage: Option<f32>,
+ /// The amount of files to consider for fuzzy rename or copy tracking. Defaults to 1000, meaning that only 1000*1000
+ /// combinations can be tested for fuzzy matches, i.e. the ones that try to find matches by comparing similarity.
+ /// If 0, there is no limit.
+ ///
+ /// If the limit would not be enough to test the entire set of combinations, the algorithm will trade in precision and not
+ /// run the fuzzy version of identity tests at all. That way results are never partial.
+ pub limit: usize,
+}
+
+///
+pub mod rewrites;
+
+/// types to actually perform rename tracking.
+pub(crate) mod tracked;
+
+/// Configuration
+impl<'a, 'repo> Platform<'a, 'repo> {
+ /// Keep track of file-names, which makes the [`location`][Change::location] field usable with the filename of the changed item.
+ pub fn track_filename(&mut self) -> &mut Self {
+ self.tracking = Some(Location::FileName);
+ self
+ }
+
+ /// Keep track of the entire path of a change, relative to the repository.
+ ///
+ /// This makes the [`location`][Change::location] field usable.
+ pub fn track_path(&mut self) -> &mut Self {
+ self.tracking = Some(Location::Path);
+ self
+ }
+
+ /// Provide `None` to disable rewrite tracking entirely, or pass `Some(<configuration>)` to control to
+ /// what extend rename and copy tracking is performed.
+ ///
+ /// Note that by default, the git configuration determines rewrite tracking and git defaults are used
+ /// if nothing is configured, which turns rename tracking with 50% similarity on, while not tracking copies at all.
+ pub fn track_rewrites(&mut self, renames: Option<Rewrites>) -> &mut Self {
+ self.rewrites = renames;
+ self
+ }
+}
+
+///
+pub mod for_each;
diff --git a/vendor/gix/src/object/tree/diff/rewrites.rs b/vendor/gix/src/object/tree/diff/rewrites.rs
new file mode 100644
index 000000000..304894d15
--- /dev/null
+++ b/vendor/gix/src/object/tree/diff/rewrites.rs
@@ -0,0 +1,108 @@
+use crate::{
+ config::{cache::util::ApplyLeniency, tree::Diff},
+ diff::rename::Tracking,
+ object::tree::diff::Rewrites,
+};
+
+/// From where to source copies
+#[derive(Debug, Copy, Clone, Eq, PartialEq)]
+pub enum CopySource {
+ /// Find copies from the set of modified files only.
+ FromSetOfModifiedFiles,
+ /// Find copies from the set of changed files, as well as all files known to the source (i.e. previous state) of the tree.
+ ///
+ /// This can be an expensive operation as it scales exponentially with the total amount of files in the tree.
+ FromSetOfModifiedFilesAndSourceTree,
+}
+
+/// How to determine copied files.
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct Copies {
+ /// The set of files to search when finding the source of copies.
+ pub source: CopySource,
+ /// Equivalent to [`Rewrites::percentage`], but used for copy tracking.
+ ///
+ /// Useful to have similarity-based rename tracking and cheaper copy tracking, which also is the default
+ /// as only identity plays a role.
+ pub percentage: Option<f32>,
+}
+
+impl Default for Copies {
+ fn default() -> Self {
+ Copies {
+ source: CopySource::FromSetOfModifiedFiles,
+ percentage: Some(0.5),
+ }
+ }
+}
+
+/// Information collected while handling rewrites of files which may be tracked.
+#[derive(Default, Clone, Copy, Debug, PartialEq)]
+pub struct Outcome {
+ /// The options used to guide the rewrite tracking. Either fully provided by the caller or retrieved from git configuration.
+ pub options: Rewrites,
+ /// The amount of similarity checks that have been conducted to find renamed files and potentially copies.
+ pub num_similarity_checks: usize,
+ /// Set to the amount of worst-case rename permutations we didn't search as our limit didn't allow it.
+ pub num_similarity_checks_skipped_for_rename_tracking_due_to_limit: usize,
+ /// Set to the amount of worst-case copy permutations we didn't search as our limit didn't allow it.
+ pub num_similarity_checks_skipped_for_copy_tracking_due_to_limit: usize,
+}
+
+/// The error returned by [`Rewrites::try_from_config()].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error(transparent)]
+ ConfigDiffRenames(#[from] crate::config::key::GenericError),
+ #[error(transparent)]
+ ConfigDiffRenameLimit(#[from] crate::config::unsigned_integer::Error),
+}
+
+/// The default settings for rewrites according to the git configuration defaults.
+impl Default for Rewrites {
+ fn default() -> Self {
+ Rewrites {
+ copies: None,
+ percentage: Some(0.5),
+ limit: 1000,
+ }
+ }
+}
+
+impl Rewrites {
+ /// Create an instance by reading all relevant information from the `config`uration, while being `lenient` or not.
+ /// Returns `Ok(None)` if nothing is configured.
+ ///
+ /// Note that missing values will be defaulted similar to what git does.
+ #[allow(clippy::result_large_err)]
+ pub fn try_from_config(config: &gix_config::File<'static>, lenient: bool) -> Result<Option<Self>, Error> {
+ let key = "diff.renames";
+ let copies = match config
+ .boolean_by_key(key)
+ .map(|value| Diff::RENAMES.try_into_renames(value, || config.string_by_key(key)))
+ .transpose()
+ .with_leniency(lenient)?
+ {
+ Some(renames) => match renames {
+ Tracking::Disabled => return Ok(None),
+ Tracking::Renames => None,
+ Tracking::RenamesAndCopies => Some(Copies::default()),
+ },
+ None => return Ok(None),
+ };
+
+ let default = Self::default();
+ Ok(Rewrites {
+ copies,
+ limit: config
+ .integer_by_key("diff.renameLimit")
+ .map(|value| Diff::RENAME_LIMIT.try_into_usize(value))
+ .transpose()
+ .with_leniency(lenient)?
+ .unwrap_or(default.limit),
+ ..default
+ }
+ .into())
+ }
+}
diff --git a/vendor/gix/src/object/tree/diff/tracked.rs b/vendor/gix/src/object/tree/diff/tracked.rs
new file mode 100644
index 000000000..3bbe01624
--- /dev/null
+++ b/vendor/gix/src/object/tree/diff/tracked.rs
@@ -0,0 +1,491 @@
+use std::ops::Range;
+
+use gix_diff::tree::visit::Change;
+use gix_object::tree::EntryMode;
+
+use crate::{
+ bstr::BStr,
+ ext::ObjectIdExt,
+ object::tree::diff::{
+ change::DiffLineStats,
+ rewrites::{CopySource, Outcome},
+ Rewrites,
+ },
+ Repository, Tree,
+};
+
+/// A set of tracked items allows to figure out their relations by figuring out their similarity.
+pub struct Item {
+ /// The underlying raw change
+ change: Change,
+ /// That slice into the backing for paths.
+ location: Range<usize>,
+ /// If true, this item was already emitted, i.e. seen by the caller.
+ emitted: bool,
+}
+
+impl Item {
+ fn location<'a>(&self, backing: &'a [u8]) -> &'a BStr {
+ backing[self.location.clone()].as_ref()
+ }
+ fn entry_mode_compatible(&self, mode: EntryMode) -> bool {
+ use EntryMode::*;
+ matches!(
+ (mode, self.change.entry_mode()),
+ (Blob | BlobExecutable, Blob | BlobExecutable) | (Link, Link)
+ )
+ }
+
+ fn is_source_for_destination_of(&self, kind: visit::Kind, dest_item_mode: EntryMode) -> bool {
+ self.entry_mode_compatible(dest_item_mode)
+ && match kind {
+ visit::Kind::RenameTarget => !self.emitted && matches!(self.change, Change::Deletion { .. }),
+ visit::Kind::CopyDestination => {
+ matches!(self.change, Change::Modification { .. })
+ }
+ }
+ }
+}
+
+pub struct State {
+ items: Vec<Item>,
+ path_backing: Vec<u8>,
+ rewrites: Rewrites,
+ tracking: Option<gix_diff::tree::recorder::Location>,
+}
+
+pub mod visit {
+ use crate::{bstr::BStr, object::tree::diff::change::DiffLineStats};
+
+ pub struct Source<'a> {
+ pub mode: gix_object::tree::EntryMode,
+ pub id: gix_hash::ObjectId,
+ pub kind: Kind,
+ pub location: &'a BStr,
+ pub diff: Option<DiffLineStats>,
+ }
+
+ #[derive(Debug, Copy, Clone, Eq, PartialEq)]
+ pub enum Kind {
+ RenameTarget,
+ CopyDestination,
+ }
+
+ pub struct Destination<'a> {
+ pub change: gix_diff::tree::visit::Change,
+ pub location: &'a BStr,
+ }
+}
+
+impl State {
+ pub(crate) fn new(renames: Rewrites, tracking: Option<gix_diff::tree::recorder::Location>) -> Self {
+ State {
+ items: vec![],
+ path_backing: vec![],
+ rewrites: renames,
+ tracking,
+ }
+ }
+}
+
+/// build state and find matches.
+impl State {
+ /// We may refuse the push if that information isn't needed for what we have to track.
+ pub fn try_push_change(&mut self, change: Change, location: &BStr) -> Option<Change> {
+ if !change.entry_mode().is_blob_or_symlink() {
+ return Some(change);
+ }
+ let keep = match (self.rewrites.copies, &change) {
+ (Some(_find_copies), _) => true,
+ (None, Change::Modification { .. }) => false,
+ (None, _) => true,
+ };
+
+ if !keep {
+ return Some(change);
+ }
+
+ let start = self.path_backing.len();
+ self.path_backing.extend_from_slice(location);
+ self.items.push(Item {
+ location: start..self.path_backing.len(),
+ change,
+ emitted: false,
+ });
+ None
+ }
+
+ /// Can only be called once effectively as it alters its own state.
+ ///
+ /// `cb(destination, source)` is called for each item, either with `Some(source)` if it's
+ /// the destination of a copy or rename, or with `None` for source if no relation to other
+ /// items in the tracked set exist.
+ pub fn emit(
+ &mut self,
+ mut cb: impl FnMut(visit::Destination<'_>, Option<visit::Source<'_>>) -> gix_diff::tree::visit::Action,
+ src_tree: &Tree<'_>,
+ ) -> Result<Outcome, crate::object::tree::diff::for_each::Error> {
+ fn by_id_and_location(a: &Item, b: &Item) -> std::cmp::Ordering {
+ a.change.oid().cmp(b.change.oid()).then_with(|| {
+ a.location
+ .start
+ .cmp(&b.location.start)
+ .then(a.location.end.cmp(&b.location.end))
+ })
+ }
+ self.items.sort_by(by_id_and_location);
+
+ let mut out = Outcome {
+ options: self.rewrites,
+ ..Default::default()
+ };
+ out = self.match_pairs_of_kind(
+ visit::Kind::RenameTarget,
+ &mut cb,
+ self.rewrites.percentage,
+ out,
+ src_tree.repo,
+ )?;
+
+ if let Some(copies) = self.rewrites.copies {
+ out = self.match_pairs_of_kind(
+ visit::Kind::CopyDestination,
+ &mut cb,
+ copies.percentage,
+ out,
+ src_tree.repo,
+ )?;
+
+ match copies.source {
+ CopySource::FromSetOfModifiedFiles => {}
+ CopySource::FromSetOfModifiedFilesAndSourceTree => {
+ src_tree
+ .traverse()
+ .breadthfirst(&mut tree_to_events::Delegate::new(self))?;
+ self.items.sort_by(by_id_and_location);
+
+ out = self.match_pairs_of_kind(
+ visit::Kind::CopyDestination,
+ &mut cb,
+ copies.percentage,
+ out,
+ src_tree.repo,
+ )?;
+ }
+ }
+ }
+
+ self.items
+ .sort_by(|a, b| a.location(&self.path_backing).cmp(b.location(&self.path_backing)));
+ for item in self.items.drain(..).filter(|item| !item.emitted) {
+ if cb(
+ visit::Destination {
+ location: item.location(&self.path_backing),
+ change: item.change,
+ },
+ None,
+ ) == gix_diff::tree::visit::Action::Cancel
+ {
+ break;
+ }
+ }
+ Ok(out)
+ }
+
+ fn match_pairs_of_kind(
+ &mut self,
+ kind: visit::Kind,
+ cb: &mut impl FnMut(visit::Destination<'_>, Option<visit::Source<'_>>) -> gix_diff::tree::visit::Action,
+ percentage: Option<f32>,
+ mut out: Outcome,
+ repo: &Repository,
+ ) -> Result<Outcome, crate::object::tree::diff::for_each::Error> {
+ // we try to cheaply reduce the set of possibilities first, before possibly looking more exhaustively.
+ let needs_second_pass = !needs_exact_match(percentage);
+ if self.match_pairs(cb, None /* by identity */, kind, repo, &mut out)? == gix_diff::tree::visit::Action::Cancel
+ {
+ return Ok(out);
+ }
+ if needs_second_pass {
+ let is_limited = if self.rewrites.limit == 0 {
+ false
+ } else if let Some(permutations) = permutations_over_limit(&self.items, self.rewrites.limit, kind) {
+ match kind {
+ visit::Kind::RenameTarget => {
+ out.num_similarity_checks_skipped_for_rename_tracking_due_to_limit = permutations;
+ }
+ visit::Kind::CopyDestination => {
+ out.num_similarity_checks_skipped_for_copy_tracking_due_to_limit = permutations;
+ }
+ }
+ true
+ } else {
+ false
+ };
+ if !is_limited {
+ self.match_pairs(cb, self.rewrites.percentage, kind, repo, &mut out)?;
+ }
+ }
+ Ok(out)
+ }
+
+ fn match_pairs(
+ &mut self,
+ cb: &mut impl FnMut(visit::Destination<'_>, Option<visit::Source<'_>>) -> gix_diff::tree::visit::Action,
+ percentage: Option<f32>,
+ kind: visit::Kind,
+ repo: &Repository,
+ stats: &mut Outcome,
+ ) -> Result<gix_diff::tree::visit::Action, crate::object::tree::diff::for_each::Error> {
+ // TODO(perf): reuse object data and interner state and interned tokens, make these available to `find_match()`
+ let mut dest_ofs = 0;
+ while let Some((mut dest_idx, dest)) = self.items[dest_ofs..].iter().enumerate().find_map(|(idx, item)| {
+ (!item.emitted && matches!(item.change, Change::Addition { .. })).then_some((idx, item))
+ }) {
+ dest_idx += dest_ofs;
+ dest_ofs = dest_idx + 1;
+ let src =
+ find_match(&self.items, dest, dest_idx, percentage, kind, repo, stats)?.map(|(src_idx, src, diff)| {
+ let (id, mode) = src.change.oid_and_entry_mode();
+ let id = id.to_owned();
+ let location = src.location(&self.path_backing);
+ (
+ visit::Source {
+ mode,
+ id,
+ kind,
+ location,
+ diff,
+ },
+ src_idx,
+ )
+ });
+ if src.is_none() {
+ continue;
+ }
+ let location = dest.location(&self.path_backing);
+ let change = dest.change.clone();
+ let dest = visit::Destination { change, location };
+ self.items[dest_idx].emitted = true;
+ if let Some(src_idx) = src.as_ref().map(|t| t.1) {
+ self.items[src_idx].emitted = true;
+ }
+ if cb(dest, src.map(|t| t.0)) == gix_diff::tree::visit::Action::Cancel {
+ return Ok(gix_diff::tree::visit::Action::Cancel);
+ }
+ }
+ Ok(gix_diff::tree::visit::Action::Continue)
+ }
+}
+
+fn permutations_over_limit(items: &[Item], limit: usize, kind: visit::Kind) -> Option<usize> {
+ let (sources, destinations) = items
+ .iter()
+ .filter(|item| match kind {
+ visit::Kind::RenameTarget => !item.emitted,
+ visit::Kind::CopyDestination => true,
+ })
+ .fold((0, 0), |(mut src, mut dest), item| {
+ match item.change {
+ Change::Addition { .. } => {
+ dest += 1;
+ }
+ Change::Deletion { .. } => {
+ if kind == visit::Kind::RenameTarget {
+ src += 1
+ }
+ }
+ Change::Modification { .. } => {
+ if kind == visit::Kind::CopyDestination {
+ src += 1
+ }
+ }
+ }
+ (src, dest)
+ });
+ let permutations = sources * destinations;
+ (permutations > limit * limit).then_some(permutations)
+}
+
+fn needs_exact_match(percentage: Option<f32>) -> bool {
+ percentage.map_or(true, |p| p >= 1.0)
+}
+
+/// <src_idx, src, possibly diff stat>
+type SourceTuple<'a> = (usize, &'a Item, Option<DiffLineStats>);
+
+/// Find `item` in our set of items ignoring `item_idx` to avoid finding ourselves, by similarity indicated by `percentage`.
+/// The latter can be `None` or `Some(x)` where `x>=1` for identity, and anything else for similarity.
+/// We also ignore emitted items entirely.
+/// Use `kind` to indicate what kind of match we are looking for, which might be deletions matching an `item` addition, or
+/// any non-deletion otherwise.
+/// Note that we always try to find by identity first even if a percentage is given as it's much faster and may reduce the set
+/// of items to be searched.
+fn find_match<'a>(
+ items: &'a [Item],
+ item: &Item,
+ item_idx: usize,
+ percentage: Option<f32>,
+ kind: visit::Kind,
+ repo: &Repository,
+ stats: &mut Outcome,
+) -> Result<Option<SourceTuple<'a>>, crate::object::tree::diff::for_each::Error> {
+ let (item_id, item_mode) = item.change.oid_and_entry_mode();
+ if needs_exact_match(percentage) || item_mode == gix_object::tree::EntryMode::Link {
+ let first_idx = items.partition_point(|a| a.change.oid() < item_id);
+ let range = match items.get(first_idx..).map(|items| {
+ let end = items
+ .iter()
+ .position(|a| a.change.oid() != item_id)
+ .map(|idx| first_idx + idx)
+ .unwrap_or(items.len());
+ first_idx..end
+ }) {
+ Some(range) => range,
+ None => return Ok(None),
+ };
+ if range.is_empty() {
+ return Ok(None);
+ }
+ let res = items[range.clone()].iter().enumerate().find_map(|(mut src_idx, src)| {
+ src_idx += range.start;
+ (src_idx != item_idx && src.is_source_for_destination_of(kind, item_mode)).then_some((src_idx, src, None))
+ });
+ if let Some(src) = res {
+ return Ok(Some(src));
+ }
+ } else {
+ let new = item_id.to_owned().attach(repo).object()?;
+ let percentage = percentage.expect("it's set to something below 1.0 and we assured this");
+ debug_assert!(
+ item.change.entry_mode().is_blob(),
+ "symlinks are matched exactly, and trees aren't used here"
+ );
+ let algo = repo.config.diff_algorithm()?;
+ for (can_idx, src) in items
+ .iter()
+ .enumerate()
+ .filter(|(src_idx, src)| *src_idx != item_idx && src.is_source_for_destination_of(kind, item_mode))
+ {
+ let old = src.change.oid().to_owned().attach(repo).object()?;
+ // TODO: make sure we get attribute handling and binary skips and filters right here. There is crate::object::blob::diff::Platform
+ // which should have facilities for that one day, but we don't use it because we need newlines in our tokens.
+ let tokens = gix_diff::blob::intern::InternedInput::new(
+ gix_diff::blob::sources::byte_lines_with_terminator(&old.data),
+ gix_diff::blob::sources::byte_lines_with_terminator(&new.data),
+ );
+ let counts = gix_diff::blob::diff(
+ algo,
+ &tokens,
+ gix_diff::blob::sink::Counter::new(diff::Statistics {
+ removed_bytes: 0,
+ input: &tokens,
+ }),
+ );
+ let similarity = (old.data.len() - counts.wrapped) as f32 / old.data.len().max(new.data.len()) as f32;
+ stats.num_similarity_checks += 1;
+ if similarity >= percentage {
+ return Ok(Some((
+ can_idx,
+ src,
+ DiffLineStats {
+ removals: counts.removals,
+ insertions: counts.insertions,
+ before: tokens.before.len().try_into().expect("interner handles only u32"),
+ after: tokens.after.len().try_into().expect("interner handles only u32"),
+ }
+ .into(),
+ )));
+ }
+ }
+ }
+ Ok(None)
+}
+
+mod diff {
+ use std::ops::Range;
+
+ pub struct Statistics<'a, 'data> {
+ pub removed_bytes: usize,
+ pub input: &'a gix_diff::blob::intern::InternedInput<&'data [u8]>,
+ }
+
+ impl<'a, 'data> gix_diff::blob::Sink for Statistics<'a, 'data> {
+ type Out = usize;
+
+ fn process_change(&mut self, before: Range<u32>, _after: Range<u32>) {
+ self.removed_bytes = self.input.before[before.start as usize..before.end as usize]
+ .iter()
+ .map(|token| self.input.interner[*token].len())
+ .sum();
+ }
+
+ fn finish(self) -> Self::Out {
+ self.removed_bytes
+ }
+ }
+}
+
+mod tree_to_events {
+ use gix_diff::tree::visit::Change;
+ use gix_object::tree::EntryRef;
+
+ use crate::bstr::BStr;
+
+ pub struct Delegate<'a> {
+ parent: &'a mut super::State,
+ recorder: gix_traverse::tree::Recorder,
+ }
+
+ impl<'a> Delegate<'a> {
+ pub fn new(parent: &'a mut super::State) -> Self {
+ let tracking = parent.tracking.map(|t| match t {
+ gix_diff::tree::recorder::Location::FileName => gix_traverse::tree::recorder::Location::FileName,
+ gix_diff::tree::recorder::Location::Path => gix_traverse::tree::recorder::Location::Path,
+ });
+ Self {
+ parent,
+ recorder: gix_traverse::tree::Recorder::default().track_location(tracking),
+ }
+ }
+ }
+
+ impl gix_traverse::tree::Visit for Delegate<'_> {
+ fn pop_front_tracked_path_and_set_current(&mut self) {
+ self.recorder.pop_front_tracked_path_and_set_current()
+ }
+
+ fn push_back_tracked_path_component(&mut self, component: &BStr) {
+ self.recorder.push_back_tracked_path_component(component)
+ }
+
+ fn push_path_component(&mut self, component: &BStr) {
+ self.recorder.push_path_component(component)
+ }
+
+ fn pop_path_component(&mut self) {
+ self.recorder.pop_path_component();
+ }
+
+ fn visit_tree(&mut self, _entry: &EntryRef<'_>) -> gix_traverse::tree::visit::Action {
+ gix_traverse::tree::visit::Action::Continue
+ }
+
+ fn visit_nontree(&mut self, entry: &EntryRef<'_>) -> gix_traverse::tree::visit::Action {
+ if entry.mode.is_blob() {
+ self.parent.try_push_change(
+ Change::Modification {
+ previous_entry_mode: entry.mode,
+ previous_oid: gix_hash::ObjectId::null(entry.oid.kind()),
+ entry_mode: entry.mode,
+ oid: entry.oid.to_owned(),
+ },
+ self.recorder.path(),
+ );
+ // make sure these aren't viable to be emitted anymore.
+ self.parent.items.last_mut().expect("just pushed").emitted = true;
+ }
+ gix_traverse::tree::visit::Action::Continue
+ }
+ }
+}
diff --git a/vendor/gix/src/object/tree/iter.rs b/vendor/gix/src/object/tree/iter.rs
new file mode 100644
index 000000000..c841e2574
--- /dev/null
+++ b/vendor/gix/src/object/tree/iter.rs
@@ -0,0 +1,53 @@
+use super::Tree;
+use crate::Repository;
+
+/// An entry within a tree
+pub struct EntryRef<'repo, 'a> {
+ /// The actual entry ref we are wrapping.
+ pub inner: gix_object::tree::EntryRef<'a>,
+
+ pub(crate) repo: &'repo Repository,
+}
+
+impl<'repo, 'a> EntryRef<'repo, 'a> {
+ /// The kind of object to which [`id()`][Self::id()] is pointing.
+ pub fn mode(&self) -> gix_object::tree::EntryMode {
+ self.inner.mode
+ }
+
+ /// The name of the file in the parent tree.
+ pub fn filename(&self) -> &gix_object::bstr::BStr {
+ self.inner.filename
+ }
+
+ /// Return the entries id, connected to the underlying repository.
+ pub fn id(&self) -> crate::Id<'repo> {
+ crate::Id::from_id(self.inner.oid, self.repo)
+ }
+
+ /// Return the entries id, without repository connection.
+ pub fn oid(&self) -> gix_hash::ObjectId {
+ self.inner.oid.to_owned()
+ }
+}
+
+impl<'repo, 'a> std::fmt::Display for EntryRef<'repo, 'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(
+ f,
+ "{:06o} {:>6} {}\t{}",
+ self.mode() as u32,
+ self.mode().as_str(),
+ self.id().shorten_or_id(),
+ self.filename()
+ )
+ }
+}
+
+impl<'repo> Tree<'repo> {
+ /// Return an iterator over tree entries to obtain information about files and directories this tree contains.
+ pub fn iter(&self) -> impl Iterator<Item = Result<EntryRef<'repo, '_>, gix_object::decode::Error>> {
+ let repo = self.repo;
+ gix_object::TreeRefIter::from_bytes(&self.data).map(move |e| e.map(|entry| EntryRef { inner: entry, repo }))
+ }
+}
diff --git a/vendor/gix/src/object/tree/mod.rs b/vendor/gix/src/object/tree/mod.rs
new file mode 100644
index 000000000..db094bcb9
--- /dev/null
+++ b/vendor/gix/src/object/tree/mod.rs
@@ -0,0 +1,158 @@
+use gix_hash::ObjectId;
+use gix_object::{bstr::BStr, TreeRefIter};
+
+use crate::{object::find, Id, Tree};
+
+/// Initialization
+impl<'repo> Tree<'repo> {
+ /// Obtain a tree instance by handing in all components that it is made up of.
+ pub fn from_data(id: impl Into<ObjectId>, data: Vec<u8>, repo: &'repo crate::Repository) -> Self {
+ Tree {
+ id: id.into(),
+ data,
+ repo,
+ }
+ }
+}
+
+/// Access
+impl<'repo> Tree<'repo> {
+ /// Return this tree's identifier.
+ pub fn id(&self) -> Id<'repo> {
+ Id::from_id(self.id, self.repo)
+ }
+
+ // TODO: tests.
+ /// Follow a sequence of `path` components starting from this instance, and look them up one by one until the last component
+ /// is looked up and its tree entry is returned.
+ ///
+ /// # Performance Notes
+ ///
+ /// Searching tree entries is currently done in sequence, which allows to the search to be allocation free. It would be possible
+ /// to re-use a vector and use a binary search instead, which might be able to improve performance over all.
+ /// However, a benchmark should be created first to have some data and see which trade-off to choose here.
+ ///
+ /// # Why is this consuming?
+ ///
+ /// The borrow checker shows pathological behaviour in loops that mutate a buffer, but also want to return from it.
+ /// Workarounds include keeping an index and doing a separate access to the memory, which seems hard to do here without
+ /// re-parsing the entries.
+ pub fn lookup_entry<I, P>(mut self, path: I) -> Result<Option<Entry<'repo>>, find::existing::Error>
+ where
+ I: IntoIterator<Item = P>,
+ P: PartialEq<BStr>,
+ {
+ let mut path = path.into_iter().peekable();
+ while let Some(component) = path.next() {
+ match TreeRefIter::from_bytes(&self.data)
+ .filter_map(Result::ok)
+ .find(|entry| component.eq(entry.filename))
+ {
+ Some(entry) => {
+ if path.peek().is_none() {
+ return Ok(Some(Entry {
+ inner: entry.into(),
+ repo: self.repo,
+ }));
+ } else {
+ let next_id = entry.oid.to_owned();
+ let repo = self.repo;
+ drop(self);
+ self = match repo.find_object(next_id)?.try_into_tree() {
+ Ok(tree) => tree,
+ Err(_) => return Ok(None),
+ };
+ }
+ }
+ None => return Ok(None),
+ }
+ }
+ Ok(None)
+ }
+
+ /// Like [`lookup_entry()`][Self::lookup_entry()], but takes a `Path` directly via `relative_path`, a path relative to this tree.
+ ///
+ /// # Note
+ ///
+ /// If any path component contains illformed UTF-8 and thus can't be converted to bytes on platforms which can't do so natively,
+ /// the returned component will be empty which makes the lookup fail.
+ pub fn lookup_entry_by_path(
+ self,
+ relative_path: impl AsRef<std::path::Path>,
+ ) -> Result<Option<Entry<'repo>>, find::existing::Error> {
+ use crate::bstr::ByteSlice;
+ self.lookup_entry(relative_path.as_ref().components().map(|c: std::path::Component<'_>| {
+ gix_path::os_str_into_bstr(c.as_os_str())
+ .unwrap_or_else(|_| "".into())
+ .as_bytes()
+ }))
+ }
+}
+
+///
+pub mod diff;
+
+///
+pub mod traverse;
+
+///
+mod iter;
+pub use iter::EntryRef;
+
+impl<'r> std::fmt::Debug for Tree<'r> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "Tree({})", self.id)
+ }
+}
+
+/// An entry in a [`Tree`], similar to an entry in a directory.
+#[derive(PartialEq, Debug, Clone)]
+pub struct Entry<'repo> {
+ inner: gix_object::tree::Entry,
+ repo: &'repo crate::Repository,
+}
+
+mod entry {
+ use crate::{bstr::BStr, ext::ObjectIdExt, object::tree::Entry};
+
+ /// Access
+ impl<'repo> Entry<'repo> {
+ /// The kind of object to which `oid` is pointing to.
+ pub fn mode(&self) -> gix_object::tree::EntryMode {
+ self.inner.mode
+ }
+
+ /// The name of the file in the parent tree.
+ pub fn filename(&self) -> &BStr {
+ self.inner.filename.as_ref()
+ }
+
+ /// Return the object id of the entry.
+ pub fn id(&self) -> crate::Id<'repo> {
+ self.inner.oid.attach(self.repo)
+ }
+
+ /// Return the object this entry points to.
+ pub fn object(&self) -> Result<crate::Object<'repo>, crate::object::find::existing::Error> {
+ self.id().object()
+ }
+
+ /// Return the plain object id of this entry, without access to the repository.
+ pub fn oid(&self) -> &gix_hash::oid {
+ &self.inner.oid
+ }
+
+ /// Return the plain object id of this entry, without access to the repository.
+ pub fn object_id(&self) -> gix_hash::ObjectId {
+ self.inner.oid
+ }
+ }
+
+ /// Consuming
+ impl Entry<'_> {
+ /// Return the contained object.
+ pub fn detach(self) -> gix_object::tree::Entry {
+ self.inner
+ }
+ }
+}
diff --git a/vendor/gix/src/object/tree/traverse.rs b/vendor/gix/src/object/tree/traverse.rs
new file mode 100644
index 000000000..974df6b0d
--- /dev/null
+++ b/vendor/gix/src/object/tree/traverse.rs
@@ -0,0 +1,62 @@
+use gix_odb::FindExt;
+
+use crate::Tree;
+
+/// Traversal
+impl<'repo> Tree<'repo> {
+ /// Obtain a platform for initiating a variety of traversals.
+ pub fn traverse(&self) -> Platform<'_, 'repo> {
+ Platform {
+ root: self,
+ breadthfirst: BreadthFirstPresets { root: self },
+ }
+ }
+}
+
+/// An intermediate object to start traversing the parent tree from.
+pub struct Platform<'a, 'repo> {
+ root: &'a Tree<'repo>,
+ /// Provides easy access to presets for common breadth-first traversal.
+ pub breadthfirst: BreadthFirstPresets<'a, 'repo>,
+}
+
+/// Presets for common choices in breadth-first traversal.
+#[derive(Copy, Clone)]
+pub struct BreadthFirstPresets<'a, 'repo> {
+ root: &'a Tree<'repo>,
+}
+
+impl<'a, 'repo> BreadthFirstPresets<'a, 'repo> {
+ /// Returns all entries and their file paths, recursively, as reachable from this tree.
+ pub fn files(&self) -> Result<Vec<gix_traverse::tree::recorder::Entry>, gix_traverse::tree::breadthfirst::Error> {
+ let mut recorder = gix_traverse::tree::Recorder::default();
+ Platform {
+ root: self.root,
+ breadthfirst: *self,
+ }
+ .breadthfirst(&mut recorder)?;
+ Ok(recorder.records)
+ }
+}
+
+impl<'a, 'repo> Platform<'a, 'repo> {
+ /// Start a breadth-first, recursive traversal using `delegate`, for which a [`Recorder`][gix_traverse::tree::Recorder] can be used to get started.
+ ///
+ /// # Note
+ ///
+ /// - Results are returned in sort order according to tree-entry sorting rules, one level at a time.
+ /// - for obtaining the direct children of the tree, use [.iter()][crate::Tree::iter()] instead.
+ pub fn breadthfirst<V>(&self, delegate: &mut V) -> Result<(), gix_traverse::tree::breadthfirst::Error>
+ where
+ V: gix_traverse::tree::Visit,
+ {
+ let root = gix_object::TreeRefIter::from_bytes(&self.root.data);
+ let state = gix_traverse::tree::breadthfirst::State::default();
+ gix_traverse::tree::breadthfirst(
+ root,
+ state,
+ |oid, buf| self.root.repo.objects.find_tree_iter(oid, buf).ok(),
+ delegate,
+ )
+ }
+}
diff --git a/vendor/gix/src/open/mod.rs b/vendor/gix/src/open/mod.rs
new file mode 100644
index 000000000..77018f5a2
--- /dev/null
+++ b/vendor/gix/src/open/mod.rs
@@ -0,0 +1,67 @@
+use std::path::PathBuf;
+
+use crate::{bstr::BString, config, permission, Permissions};
+
+/// The options used in [`ThreadSafeRepository::open_opts()`][crate::ThreadSafeRepository::open_opts()].
+///
+/// ### Replacement Objects for the object database
+///
+/// The environment variables `GIT_REPLACE_REF_BASE` and `GIT_NO_REPLACE_OBJECTS` are mapped to `gitoxide.objects.replaceRefBase`
+/// and `gitoxide.objects.noReplace` respectively and then interpreted exactly as their environment variable counterparts.
+///
+/// Use [Permissions] to control which environment variables can be read, and config-overrides to control these values programmatically.
+#[derive(Clone)]
+pub struct Options {
+ pub(crate) object_store_slots: gix_odb::store::init::Slots,
+ /// Define what is allowed while opening a repository.
+ pub permissions: Permissions,
+ pub(crate) git_dir_trust: Option<gix_sec::Trust>,
+ /// Warning: this one is copied to to config::Cache - don't change it after repo open or keep in sync.
+ pub(crate) filter_config_section: Option<fn(&gix_config::file::Metadata) -> bool>,
+ pub(crate) lossy_config: Option<bool>,
+ pub(crate) lenient_config: bool,
+ pub(crate) bail_if_untrusted: bool,
+ pub(crate) api_config_overrides: Vec<BString>,
+ pub(crate) cli_config_overrides: Vec<BString>,
+ pub(crate) open_path_as_is: bool,
+ /// Internal to pass an already obtained CWD on to where it may also be used. This avoids the CWD being queried more than once per repo.
+ pub(crate) current_dir: Option<PathBuf>,
+}
+
+/// The error returned by [`crate::open()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("Failed to load the git configuration")]
+ Config(#[from] config::Error),
+ #[error("\"{path}\" does not appear to be a git repository")]
+ NotARepository {
+ source: gix_discover::is_git::Error,
+ path: PathBuf,
+ },
+ #[error(transparent)]
+ Io(#[from] std::io::Error),
+ #[error("The git directory at '{}' is considered unsafe as it's not owned by the current user.", .path.display())]
+ UnsafeGitDir { path: PathBuf },
+ #[error(transparent)]
+ EnvironmentAccessDenied(#[from] permission::env_var::resource::Error),
+}
+
+mod options;
+
+mod repository;
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn size_of_options() {
+ let actual = std::mem::size_of::<Options>();
+ let limit = 160;
+ assert!(
+ actual <= limit,
+ "{actual} <= {limit}: size shouldn't change without us knowing (on windows, it's bigger)"
+ );
+ }
+}
diff --git a/vendor/gix/src/open/options.rs b/vendor/gix/src/open/options.rs
new file mode 100644
index 000000000..fb648e3c2
--- /dev/null
+++ b/vendor/gix/src/open/options.rs
@@ -0,0 +1,180 @@
+use std::path::PathBuf;
+
+use super::{Error, Options};
+use crate::{bstr::BString, config, Permissions, ThreadSafeRepository};
+
+impl Default for Options {
+ fn default() -> Self {
+ Options {
+ object_store_slots: Default::default(),
+ permissions: Default::default(),
+ git_dir_trust: None,
+ filter_config_section: None,
+ lossy_config: None,
+ lenient_config: true,
+ bail_if_untrusted: false,
+ open_path_as_is: false,
+ api_config_overrides: Vec::new(),
+ cli_config_overrides: Vec::new(),
+ current_dir: None,
+ }
+ }
+}
+
+/// Instantiation
+impl Options {
+ /// Options configured to prevent accessing anything else than the repository configuration file, prohibiting
+ /// accessing the environment or spreading beyond the git repository location.
+ pub fn isolated() -> Self {
+ Options::default().permissions(Permissions::isolated())
+ }
+}
+
+/// Generic modification
+impl Options {
+ /// An adapter to allow calling any builder method on this instance despite only having a mutable reference.
+ pub fn modify(&mut self, f: impl FnOnce(Self) -> Self) {
+ *self = f(std::mem::take(self));
+ }
+}
+
+/// Builder methods
+impl Options {
+ /// Apply the given configuration `values` like `init.defaultBranch=special` or `core.bool-implicit-true` in memory to as early
+ /// as the configuration is initialized to allow affecting the repository instantiation phase, both on disk or when opening.
+ /// The configuration is marked with [source API][gix_config::Source::Api].
+ pub fn config_overrides(mut self, values: impl IntoIterator<Item = impl Into<BString>>) -> Self {
+ self.api_config_overrides = values.into_iter().map(Into::into).collect();
+ self
+ }
+
+ /// Set configuration values of the form `core.abbrev=5` or `remote.origin.url = foo` or `core.bool-implicit-true` for application
+ /// as CLI overrides to the repository configuration, marked with [source CLI][gix_config::Source::Cli].
+ /// These are equivalent to CLI overrides passed with `-c` in `git`, for example.
+ pub fn cli_overrides(mut self, values: impl IntoIterator<Item = impl Into<BString>>) -> Self {
+ self.cli_config_overrides = values.into_iter().map(Into::into).collect();
+ self
+ }
+
+ /// Set the amount of slots to use for the object database. It's a value that doesn't need changes on the client, typically,
+ /// but should be controlled on the server.
+ pub fn object_store_slots(mut self, slots: gix_odb::store::init::Slots) -> Self {
+ self.object_store_slots = slots;
+ self
+ }
+
+ // TODO: tests
+ /// Set the given permissions, which are typically derived by a `Trust` level.
+ pub fn permissions(mut self, permissions: Permissions) -> Self {
+ self.permissions = permissions;
+ self
+ }
+
+ /// If `true`, default `false`, we will not modify the incoming path to open to assure it is a `.git` directory.
+ ///
+ /// If `false`, we will try to open the input directory as is, even though it doesn't appear to be a `git` repository
+ /// due to the lack of `.git` suffix or because its basename is not `.git` as in `worktree/.git`.
+ pub fn open_path_as_is(mut self, enable: bool) -> Self {
+ self.open_path_as_is = enable;
+ self
+ }
+
+ /// Set the trust level of the `.git` directory we are about to open.
+ ///
+ /// This can be set manually to force trust even though otherwise it might
+ /// not be fully trusted, leading to limitations in how configuration files
+ /// are interpreted.
+ ///
+ /// If not called explicitly, it will be determined by looking at its
+ /// ownership via [`gix_sec::Trust::from_path_ownership()`].
+ ///
+ /// # Security Warning
+ ///
+ /// Use with extreme care and only if it's absolutely known that the repository
+ /// is always controlled by the desired user. Using this capability _only_ saves
+ /// a permission check and only so if the [`open()`][Self::open()] method is used,
+ /// as opposed to discovery.
+ pub fn with(mut self, trust: gix_sec::Trust) -> Self {
+ self.git_dir_trust = trust.into();
+ self
+ }
+
+ /// If true, default false, and if the repository's trust level is not `Full`
+ /// (see [`with()`][Self::with()] for more), then the open operation will fail.
+ ///
+ /// Use this to mimic `git`s way of handling untrusted repositories. Note that `gitoxide` solves
+ /// this by not using configuration from untrusted sources and by generally being secured against
+ /// doctored input files which at worst could cause out-of-memory at the time of writing.
+ pub fn bail_if_untrusted(mut self, toggle: bool) -> Self {
+ self.bail_if_untrusted = toggle;
+ self
+ }
+
+ /// Set the filter which determines if a configuration section can be used to read values from,
+ /// hence it returns true if it is eligible.
+ ///
+ /// The default filter selects sections whose trust level is [`full`][gix_sec::Trust::Full] or
+ /// whose source is not [`repository-local`][gix_config::source::Kind::Repository].
+ pub fn filter_config_section(mut self, filter: fn(&gix_config::file::Metadata) -> bool) -> Self {
+ self.filter_config_section = Some(filter);
+ self
+ }
+
+ /// By default, in release mode configuration will be read without retaining non-essential information like
+ /// comments or whitespace to optimize lookup performance.
+ ///
+ /// Some application might want to toggle this to false in they want to display or edit configuration losslessly
+ /// with all whitespace and comments included.
+ pub fn lossy_config(mut self, toggle: bool) -> Self {
+ self.lossy_config = toggle.into();
+ self
+ }
+
+ /// If set, default is false, invalid configuration values will cause an error even if these can safely be defaulted.
+ ///
+ /// This is recommended for all applications that prefer correctness over usability.
+ /// `git` itself defaults to strict configuration mode, flagging incorrect configuration immediately.
+ pub fn strict_config(mut self, toggle: bool) -> Self {
+ self.lenient_config = !toggle;
+ self
+ }
+
+ /// Open a repository at `path` with the options set so far.
+ #[allow(clippy::result_large_err)]
+ pub fn open(self, path: impl Into<PathBuf>) -> Result<ThreadSafeRepository, Error> {
+ ThreadSafeRepository::open_opts(path, self)
+ }
+}
+
+impl gix_sec::trust::DefaultForLevel for Options {
+ fn default_for_level(level: gix_sec::Trust) -> Self {
+ match level {
+ gix_sec::Trust::Full => Options {
+ object_store_slots: Default::default(),
+ permissions: Permissions::default_for_level(level),
+ git_dir_trust: gix_sec::Trust::Full.into(),
+ filter_config_section: Some(config::section::is_trusted),
+ lossy_config: None,
+ bail_if_untrusted: false,
+ lenient_config: true,
+ open_path_as_is: false,
+ api_config_overrides: Vec::new(),
+ cli_config_overrides: Vec::new(),
+ current_dir: None,
+ },
+ gix_sec::Trust::Reduced => Options {
+ object_store_slots: gix_odb::store::init::Slots::Given(32), // limit resource usage
+ permissions: Permissions::default_for_level(level),
+ git_dir_trust: gix_sec::Trust::Reduced.into(),
+ filter_config_section: Some(config::section::is_trusted),
+ bail_if_untrusted: false,
+ lenient_config: true,
+ open_path_as_is: false,
+ lossy_config: None,
+ api_config_overrides: Vec::new(),
+ cli_config_overrides: Vec::new(),
+ current_dir: None,
+ },
+ }
+ }
+}
diff --git a/vendor/gix/src/open/repository.rs b/vendor/gix/src/open/repository.rs
new file mode 100644
index 000000000..85dd91da7
--- /dev/null
+++ b/vendor/gix/src/open/repository.rs
@@ -0,0 +1,345 @@
+#![allow(clippy::result_large_err)]
+use std::{borrow::Cow, path::PathBuf};
+
+use gix_features::threading::OwnShared;
+
+use super::{Error, Options};
+use crate::{
+ config,
+ config::{
+ cache::{interpolate_context, util::ApplyLeniency},
+ tree::{gitoxide, Core, Key, Safe},
+ },
+ permission, Permissions, ThreadSafeRepository,
+};
+
+#[derive(Default, Clone)]
+pub(crate) struct EnvironmentOverrides {
+ /// An override of the worktree typically from the environment, and overrides even worktree dirs set as parameter.
+ ///
+ /// This emulates the way git handles this override.
+ worktree_dir: Option<PathBuf>,
+ /// An override for the .git directory, typically from the environment.
+ ///
+ /// If set, the passed in `git_dir` parameter will be ignored in favor of this one.
+ git_dir: Option<PathBuf>,
+}
+
+impl EnvironmentOverrides {
+ fn from_env() -> Result<Self, permission::env_var::resource::Error> {
+ let mut worktree_dir = None;
+ if let Some(path) = std::env::var_os(Core::WORKTREE.the_environment_override()) {
+ worktree_dir = PathBuf::from(path).into();
+ }
+ let mut git_dir = None;
+ if let Some(path) = std::env::var_os("GIT_DIR") {
+ git_dir = PathBuf::from(path).into();
+ }
+ Ok(EnvironmentOverrides { worktree_dir, git_dir })
+ }
+}
+
+impl ThreadSafeRepository {
+ /// Open a git repository at the given `path`, possibly expanding it to `path/.git` if `path` is a work tree dir.
+ pub fn open(path: impl Into<PathBuf>) -> Result<Self, Error> {
+ Self::open_opts(path, Options::default())
+ }
+
+ /// Open a git repository at the given `path`, possibly expanding it to `path/.git` if `path` is a work tree dir, and use
+ /// `options` for fine-grained control.
+ ///
+ /// Note that you should use [`crate::discover()`] if security should be adjusted by ownership.
+ pub fn open_opts(path: impl Into<PathBuf>, mut options: Options) -> Result<Self, Error> {
+ let (path, kind) = {
+ let path = path.into();
+ let looks_like_git_dir =
+ path.ends_with(gix_discover::DOT_GIT_DIR) || path.extension() == Some(std::ffi::OsStr::new("git"));
+ let candidate = if !options.open_path_as_is && !looks_like_git_dir {
+ Cow::Owned(path.join(gix_discover::DOT_GIT_DIR))
+ } else {
+ Cow::Borrowed(&path)
+ };
+ match gix_discover::is_git(candidate.as_ref()) {
+ Ok(kind) => (candidate.into_owned(), kind),
+ Err(err) => {
+ if options.open_path_as_is || matches!(candidate, Cow::Borrowed(_)) {
+ return Err(Error::NotARepository {
+ source: err,
+ path: candidate.into_owned(),
+ });
+ }
+ match gix_discover::is_git(&path) {
+ Ok(kind) => (path, kind),
+ Err(err) => return Err(Error::NotARepository { source: err, path }),
+ }
+ }
+ }
+ };
+ let cwd = std::env::current_dir()?;
+ let (git_dir, worktree_dir) = gix_discover::repository::Path::from_dot_git_dir(path, kind, &cwd)
+ .expect("we have sanitized path with is_git()")
+ .into_repository_and_work_tree_directories();
+ if options.git_dir_trust.is_none() {
+ options.git_dir_trust = gix_sec::Trust::from_path_ownership(&git_dir)?.into();
+ }
+ options.current_dir = Some(cwd);
+ ThreadSafeRepository::open_from_paths(git_dir, worktree_dir, options)
+ }
+
+ /// Try to open a git repository in `fallback_directory` (can be worktree or `.git` directory) only if there is no override
+ /// from of the `gitdir` using git environment variables.
+ ///
+ /// Use the `trust_map` to apply options depending in the trust level for `directory` or the directory it's overridden with.
+ /// The `.git` directory whether given or computed is used for trust checks.
+ ///
+ /// Note that this will read various `GIT_*` environment variables to check for overrides, and is probably most useful when implementing
+ /// custom hooks.
+ // TODO: tests, with hooks, GIT_QUARANTINE for ref-log and transaction control (needs gix-sec support to remove write access in gix-ref)
+ // TODO: The following vars should end up as overrides of the respective configuration values (see gix-config).
+ // GIT_PROXY_SSL_CERT, GIT_PROXY_SSL_KEY, GIT_PROXY_SSL_CERT_PASSWORD_PROTECTED.
+ // GIT_PROXY_SSL_CAINFO, GIT_SSL_CIPHER_LIST, GIT_HTTP_MAX_REQUESTS, GIT_CURL_FTP_NO_EPSV,
+ pub fn open_with_environment_overrides(
+ fallback_directory: impl Into<PathBuf>,
+ trust_map: gix_sec::trust::Mapping<Options>,
+ ) -> Result<Self, Error> {
+ let overrides = EnvironmentOverrides::from_env()?;
+ let (path, path_kind): (PathBuf, _) = match overrides.git_dir {
+ Some(git_dir) => gix_discover::is_git(&git_dir)
+ .map_err(|err| Error::NotARepository {
+ source: err,
+ path: git_dir.clone(),
+ })
+ .map(|kind| (git_dir, kind))?,
+ None => {
+ let fallback_directory = fallback_directory.into();
+ gix_discover::is_git(&fallback_directory)
+ .map_err(|err| Error::NotARepository {
+ source: err,
+ path: fallback_directory.clone(),
+ })
+ .map(|kind| (fallback_directory, kind))?
+ }
+ };
+
+ let cwd = std::env::current_dir()?;
+ let (git_dir, worktree_dir) = gix_discover::repository::Path::from_dot_git_dir(path, path_kind, &cwd)
+ .expect("we have sanitized path with is_git()")
+ .into_repository_and_work_tree_directories();
+ let worktree_dir = worktree_dir.or(overrides.worktree_dir);
+
+ let git_dir_trust = gix_sec::Trust::from_path_ownership(&git_dir)?;
+ let mut options = trust_map.into_value_by_level(git_dir_trust);
+ options.current_dir = Some(cwd);
+ ThreadSafeRepository::open_from_paths(git_dir, worktree_dir, options)
+ }
+
+ pub(crate) fn open_from_paths(
+ git_dir: PathBuf,
+ mut worktree_dir: Option<PathBuf>,
+ options: Options,
+ ) -> Result<Self, Error> {
+ let Options {
+ git_dir_trust,
+ object_store_slots,
+ filter_config_section,
+ lossy_config,
+ lenient_config,
+ bail_if_untrusted,
+ open_path_as_is: _,
+ permissions: Permissions { ref env, config },
+ ref api_config_overrides,
+ ref cli_config_overrides,
+ ref current_dir,
+ } = options;
+ let current_dir = current_dir.as_deref().expect("BUG: current_dir must be set by caller");
+ let git_dir_trust = git_dir_trust.expect("trust must be been determined by now");
+
+ // TODO: assure we handle the worktree-dir properly as we can have config per worktree with an extension.
+ // This would be something read in later as have to first check for extensions. Also this means
+ // that each worktree, even if accessible through this instance, has to come in its own Repository instance
+ // as it may have its own configuration. That's fine actually.
+ let common_dir = gix_discover::path::from_plain_file(git_dir.join("commondir"))
+ .transpose()?
+ .map(|cd| git_dir.join(cd));
+ let common_dir_ref = common_dir.as_deref().unwrap_or(&git_dir);
+
+ let repo_config = config::cache::StageOne::new(
+ common_dir_ref,
+ git_dir.as_ref(),
+ git_dir_trust,
+ lossy_config,
+ lenient_config,
+ )?;
+ let mut refs = {
+ let reflog = repo_config.reflog.unwrap_or(gix_ref::store::WriteReflog::Disable);
+ let object_hash = repo_config.object_hash;
+ match &common_dir {
+ Some(common_dir) => crate::RefStore::for_linked_worktree(&git_dir, common_dir, reflog, object_hash),
+ None => crate::RefStore::at(&git_dir, reflog, object_hash),
+ }
+ };
+ let head = refs.find("HEAD").ok();
+ let git_install_dir = crate::path::install_dir().ok();
+ let home = std::env::var_os("HOME")
+ .map(PathBuf::from)
+ .and_then(|home| env.home.check_opt(home));
+
+ let mut filter_config_section = filter_config_section.unwrap_or(config::section::is_trusted);
+ let config = config::Cache::from_stage_one(
+ repo_config,
+ common_dir_ref,
+ head.as_ref().and_then(|head| head.target.try_name()),
+ filter_config_section,
+ git_install_dir.as_deref(),
+ home.as_deref(),
+ env.clone(),
+ config,
+ lenient_config,
+ api_config_overrides,
+ cli_config_overrides,
+ )?;
+
+ if bail_if_untrusted && git_dir_trust != gix_sec::Trust::Full {
+ check_safe_directories(&git_dir, git_install_dir.as_deref(), home.as_deref(), &config)?;
+ }
+
+ // core.worktree might be used to overwrite the worktree directory
+ if !config.is_bare {
+ if let Some(wt) = config
+ .resolved
+ .path_filter("core", None, Core::WORKTREE.name, &mut filter_config_section)
+ {
+ let wt_path = wt
+ .interpolate(interpolate_context(git_install_dir.as_deref(), home.as_deref()))
+ .map_err(config::Error::PathInterpolation)?;
+ worktree_dir = {
+ gix_path::normalize(git_dir.join(wt_path), current_dir)
+ .and_then(|wt| wt.as_ref().is_dir().then(|| wt.into_owned()))
+ }
+ }
+ }
+
+ match worktree_dir {
+ None if !config.is_bare => {
+ worktree_dir = Some(git_dir.parent().expect("parent is always available").to_owned());
+ }
+ Some(_) => {
+ // note that we might be bare even with a worktree directory - work trees don't have to be
+ // the parent of a non-bare repository.
+ }
+ None => {}
+ }
+
+ refs.write_reflog = config::cache::util::reflog_or_default(config.reflog, worktree_dir.is_some());
+ let replacements = replacement_objects_refs_prefix(&config.resolved, lenient_config, filter_config_section)?
+ .and_then(|prefix| {
+ let platform = refs.iter().ok()?;
+ let iter = platform.prefixed(&prefix).ok()?;
+ let prefix = prefix.to_str()?;
+ let replacements = iter
+ .filter_map(Result::ok)
+ .filter_map(|r: gix_ref::Reference| {
+ let target = r.target.try_id()?.to_owned();
+ let source =
+ gix_hash::ObjectId::from_hex(r.name.as_bstr().strip_prefix(prefix.as_bytes())?).ok()?;
+ Some((source, target))
+ })
+ .collect::<Vec<_>>();
+ Some(replacements)
+ })
+ .unwrap_or_default();
+
+ Ok(ThreadSafeRepository {
+ objects: OwnShared::new(gix_odb::Store::at_opts(
+ common_dir_ref.join("objects"),
+ replacements,
+ gix_odb::store::init::Options {
+ slots: object_store_slots,
+ object_hash: config.object_hash,
+ use_multi_pack_index: config.use_multi_pack_index,
+ current_dir: current_dir.to_owned().into(),
+ },
+ )?),
+ common_dir,
+ refs,
+ work_tree: worktree_dir,
+ config,
+ // used when spawning new repositories off this one when following worktrees
+ linked_worktree_options: options,
+ index: gix_features::fs::MutableSnapshot::new().into(),
+ })
+ }
+}
+
+// TODO: tests
+fn replacement_objects_refs_prefix(
+ config: &gix_config::File<'static>,
+ lenient: bool,
+ mut filter_config_section: fn(&gix_config::file::Metadata) -> bool,
+) -> Result<Option<PathBuf>, Error> {
+ let is_disabled = config
+ .boolean_filter_by_key("gitoxide.objects.noReplace", &mut filter_config_section)
+ .map(|b| gitoxide::Objects::NO_REPLACE.enrich_error(b))
+ .transpose()
+ .with_leniency(lenient)
+ .map_err(config::Error::ConfigBoolean)?
+ .unwrap_or_default();
+
+ if is_disabled {
+ return Ok(None);
+ }
+
+ let ref_base = gix_path::from_bstr({
+ let key = "gitoxide.objects.replaceRefBase";
+ debug_assert_eq!(gitoxide::Objects::REPLACE_REF_BASE.logical_name(), key);
+ config
+ .string_filter_by_key(key, &mut filter_config_section)
+ .unwrap_or_else(|| Cow::Borrowed("refs/replace/".into()))
+ })
+ .into_owned();
+ Ok(ref_base.into())
+}
+
+fn check_safe_directories(
+ git_dir: &std::path::Path,
+ git_install_dir: Option<&std::path::Path>,
+ home: Option<&std::path::Path>,
+ config: &config::Cache,
+) -> Result<(), Error> {
+ let mut is_safe = false;
+ let git_dir = match gix_path::realpath(git_dir) {
+ Ok(p) => p,
+ Err(_) => git_dir.to_owned(),
+ };
+ for safe_dir in config
+ .resolved
+ .strings_filter("safe", None, Safe::DIRECTORY.name, &mut Safe::directory_filter)
+ .unwrap_or_default()
+ {
+ if safe_dir.as_ref() == "*" {
+ is_safe = true;
+ continue;
+ }
+ if safe_dir.is_empty() {
+ is_safe = false;
+ continue;
+ }
+ if !is_safe {
+ let safe_dir = match gix_config::Path::from(std::borrow::Cow::Borrowed(safe_dir.as_ref()))
+ .interpolate(interpolate_context(git_install_dir, home))
+ {
+ Ok(path) => path,
+ Err(_) => gix_path::from_bstr(safe_dir),
+ };
+ if safe_dir == git_dir {
+ is_safe = true;
+ continue;
+ }
+ }
+ }
+ if is_safe {
+ Ok(())
+ } else {
+ Err(Error::UnsafeGitDir { path: git_dir })
+ }
+}
diff --git a/vendor/gix/src/path.rs b/vendor/gix/src/path.rs
new file mode 100644
index 000000000..9fd6d4b01
--- /dev/null
+++ b/vendor/gix/src/path.rs
@@ -0,0 +1,11 @@
+use std::path::PathBuf;
+
+pub use gix_path::*;
+
+pub(crate) fn install_dir() -> std::io::Result<PathBuf> {
+ std::env::current_exe().and_then(|exe| {
+ exe.parent()
+ .map(ToOwned::to_owned)
+ .ok_or_else(|| std::io::Error::new(std::io::ErrorKind::Other, "no parent for current executable"))
+ })
+}
diff --git a/vendor/gix/src/reference/edits.rs b/vendor/gix/src/reference/edits.rs
new file mode 100644
index 000000000..aadd087ba
--- /dev/null
+++ b/vendor/gix/src/reference/edits.rs
@@ -0,0 +1,75 @@
+///
+pub mod set_target_id {
+ use gix_ref::{transaction::PreviousValue, Target};
+
+ use crate::{bstr::BString, Reference};
+
+ mod error {
+ use gix_ref::FullName;
+
+ /// The error returned by [`Reference::set_target_id()`][super::Reference::set_target_id()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Cannot change symbolic reference {name:?} into a direct one by setting it to an id")]
+ SymbolicReference { name: FullName },
+ #[error(transparent)]
+ ReferenceEdit(#[from] crate::reference::edit::Error),
+ }
+ }
+ pub use error::Error;
+
+ impl<'repo> Reference<'repo> {
+ /// Set the id of this direct reference to `id` and use `reflog_message` for the reflog (if enabled in the repository).
+ ///
+ /// Note that the operation will fail on symbolic references, to change their type use the lower level reference database,
+ /// or if the reference was deleted or changed in the mean time.
+ /// Furthermore, refrain from using this method for more than a one-off change as it creates a transaction for each invocation.
+ /// If multiple reference should be changed, use [Repository::edit_references()][crate::Repository::edit_references()]
+ /// or the lower level reference database instead.
+ #[allow(clippy::result_large_err)]
+ pub fn set_target_id(
+ &mut self,
+ id: impl Into<gix_hash::ObjectId>,
+ reflog_message: impl Into<BString>,
+ ) -> Result<(), Error> {
+ match &self.inner.target {
+ Target::Symbolic(name) => return Err(Error::SymbolicReference { name: name.clone() }),
+ Target::Peeled(current_id) => {
+ let changed = self.repo.reference(
+ self.name(),
+ id,
+ PreviousValue::MustExistAndMatch(Target::Peeled(current_id.to_owned())),
+ reflog_message,
+ )?;
+ *self = changed;
+ }
+ }
+ Ok(())
+ }
+ }
+}
+
+///
+pub mod delete {
+ use gix_ref::transaction::{Change, PreviousValue, RefEdit, RefLog};
+
+ use crate::Reference;
+
+ impl<'repo> Reference<'repo> {
+ /// Delete this reference or fail if it was changed since last observed.
+ /// Note that this instance remains available in memory but probably shouldn't be used anymore.
+ pub fn delete(&self) -> Result<(), crate::reference::edit::Error> {
+ self.repo
+ .edit_reference(RefEdit {
+ change: Change::Delete {
+ expected: PreviousValue::MustExistAndMatch(self.inner.target.clone()),
+ log: RefLog::AndReference,
+ },
+ name: self.inner.name.clone(),
+ deref: false,
+ })
+ .map(|_| ())
+ }
+ }
+}
diff --git a/vendor/gix/src/reference/errors.rs b/vendor/gix/src/reference/errors.rs
new file mode 100644
index 000000000..364456fd1
--- /dev/null
+++ b/vendor/gix/src/reference/errors.rs
@@ -0,0 +1,89 @@
+///
+pub mod edit {
+ use crate::config;
+
+ /// The error returned by [edit_references(…)][crate::Repository::edit_references()], and others
+ /// which ultimately create a reference.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ FileTransactionPrepare(#[from] gix_ref::file::transaction::prepare::Error),
+ #[error(transparent)]
+ FileTransactionCommit(#[from] gix_ref::file::transaction::commit::Error),
+ #[error(transparent)]
+ NameValidation(#[from] gix_validate::reference::name::Error),
+ #[error("Could not interpret core.filesRefLockTimeout or core.packedRefsTimeout, it must be the number in milliseconds to wait for locks or negative to wait forever")]
+ LockTimeoutConfiguration(#[from] config::lock_timeout::Error),
+ #[error(transparent)]
+ ParseCommitterTime(#[from] crate::config::time::Error),
+ }
+}
+
+///
+pub mod peel {
+ /// The error returned by [Reference::peel_to_id_in_place(…)][crate::Reference::peel_to_id_in_place()] and
+ /// [Reference::into_fully_peeled_id(…)][crate::Reference::into_fully_peeled_id()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ ToId(#[from] gix_ref::peel::to_id::Error),
+ #[error(transparent)]
+ PackedRefsOpen(#[from] gix_ref::packed::buffer::open::Error),
+ }
+}
+
+///
+pub mod head_id {
+ /// The error returned by [Repository::head_id(…)][crate::Repository::head_id()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Head(#[from] crate::reference::find::existing::Error),
+ #[error(transparent)]
+ PeelToId(#[from] crate::head::peel::Error),
+ #[error("Branch '{name}' does not have any commits")]
+ Unborn { name: gix_ref::FullName },
+ }
+}
+
+///
+pub mod head_commit {
+ /// The error returned by [Repository::head_commit(…)][crate::Repository::head_commit()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Head(#[from] crate::reference::find::existing::Error),
+ #[error(transparent)]
+ PeelToCommit(#[from] crate::head::peel::to_commit::Error),
+ }
+}
+
+///
+pub mod find {
+ ///
+ pub mod existing {
+ /// The error returned by [find_reference(…)][crate::Repository::find_reference()], and others.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Find(#[from] crate::reference::find::Error),
+ #[error("The reference did not exist")]
+ NotFound,
+ }
+ }
+
+ /// The error returned by [try_find_reference(…)][crate::Repository::try_find_reference()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Find(#[from] gix_ref::file::find::Error),
+ #[error(transparent)]
+ PackedRefsOpen(#[from] gix_ref::packed::buffer::open::Error),
+ }
+}
diff --git a/vendor/gix/src/reference/iter.rs b/vendor/gix/src/reference/iter.rs
new file mode 100644
index 000000000..a2b022f64
--- /dev/null
+++ b/vendor/gix/src/reference/iter.rs
@@ -0,0 +1,127 @@
+//!
+use std::path::Path;
+
+use gix_odb::pack::Find;
+use gix_ref::file::ReferenceExt;
+
+/// A platform to create iterators over references.
+#[must_use = "Iterators should be obtained from this iterator platform"]
+pub struct Platform<'r> {
+ pub(crate) platform: gix_ref::file::iter::Platform<'r>,
+ pub(crate) repo: &'r crate::Repository,
+}
+
+/// An iterator over references, with or without filter.
+pub struct Iter<'r> {
+ inner: gix_ref::file::iter::LooseThenPacked<'r, 'r>,
+ peel: bool,
+ repo: &'r crate::Repository,
+}
+
+impl<'r> Iter<'r> {
+ fn new(repo: &'r crate::Repository, platform: gix_ref::file::iter::LooseThenPacked<'r, 'r>) -> Self {
+ Iter {
+ inner: platform,
+ peel: false,
+ repo,
+ }
+ }
+}
+
+impl<'r> Platform<'r> {
+ /// Return an iterator over all references in the repository.
+ ///
+ /// Even broken or otherwise unparsable or inaccessible references are returned and have to be handled by the caller on a
+ /// case by case basis.
+ pub fn all(&self) -> Result<Iter<'_>, init::Error> {
+ Ok(Iter::new(self.repo, self.platform.all()?))
+ }
+
+ /// Return an iterator over all references that match the given `prefix`.
+ ///
+ /// These are of the form `refs/heads` or `refs/remotes/origin`, and must not contain relative paths components like `.` or `..`.
+ // TODO: Create a custom `Path` type that enforces the requirements of git naturally, this type is surprising possibly on windows
+ // and when not using a trailing '/' to signal directories.
+ pub fn prefixed(&self, prefix: impl AsRef<Path>) -> Result<Iter<'_>, init::Error> {
+ Ok(Iter::new(self.repo, self.platform.prefixed(prefix)?))
+ }
+
+ // TODO: tests
+ /// Return an iterator over all references that are tags.
+ ///
+ /// They are all prefixed with `refs/tags`.
+ pub fn tags(&self) -> Result<Iter<'_>, init::Error> {
+ Ok(Iter::new(self.repo, self.platform.prefixed("refs/tags/")?))
+ }
+
+ // TODO: tests
+ /// Return an iterator over all local branches.
+ ///
+ /// They are all prefixed with `refs/heads`.
+ pub fn local_branches(&self) -> Result<Iter<'_>, init::Error> {
+ Ok(Iter::new(self.repo, self.platform.prefixed("refs/heads/")?))
+ }
+
+ // TODO: tests
+ /// Return an iterator over all remote branches.
+ ///
+ /// They are all prefixed with `refs/remotes`.
+ pub fn remote_branches(&self) -> Result<Iter<'_>, init::Error> {
+ Ok(Iter::new(self.repo, self.platform.prefixed("refs/remotes/")?))
+ }
+}
+
+impl<'r> Iter<'r> {
+ /// Automatically peel references before yielding them during iteration.
+ ///
+ /// This has the same effect as using `iter.map(|r| {r.peel_to_id_in_place(); r})`.
+ ///
+ /// # Note
+ ///
+ /// Doing this is necessary as the packed-refs buffer is already held by the iterator, disallowing the consumer of the iterator
+ /// to peel the returned references themselves.
+ pub fn peeled(mut self) -> Self {
+ self.peel = true;
+ self
+ }
+}
+
+impl<'r> Iterator for Iter<'r> {
+ type Item = Result<crate::Reference<'r>, Box<dyn std::error::Error + Send + Sync + 'static>>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next().map(|res| {
+ res.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
+ .and_then(|mut r| {
+ if self.peel {
+ let handle = &self.repo;
+ r.peel_to_id_in_place(&handle.refs, |oid, buf| {
+ handle
+ .objects
+ .try_find(oid, buf)
+ .map(|po| po.map(|(o, _l)| (o.kind, o.data)))
+ })
+ .map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
+ .map(|_| r)
+ } else {
+ Ok(r)
+ }
+ })
+ .map(|r| crate::Reference::from_ref(r, self.repo))
+ })
+ }
+}
+
+///
+pub mod init {
+ /// The error returned by [`Platform::all()`][super::Platform::all()] or [`Platform::prefixed()`][super::Platform::prefixed()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Io(#[from] std::io::Error),
+ }
+}
+
+/// The error returned by [references()][crate::Repository::references()].
+pub type Error = gix_ref::packed::buffer::open::Error;
diff --git a/vendor/gix/src/reference/log.rs b/vendor/gix/src/reference/log.rs
new file mode 100644
index 000000000..b516e6499
--- /dev/null
+++ b/vendor/gix/src/reference/log.rs
@@ -0,0 +1,36 @@
+//!
+use gix_object::commit::MessageRef;
+use gix_ref::file::ReferenceExt;
+
+use crate::{
+ bstr::{BStr, BString, ByteVec},
+ Reference,
+};
+
+impl<'repo> Reference<'repo> {
+ /// Return a platform for obtaining iterators over reference logs.
+ pub fn log_iter(&self) -> gix_ref::file::log::iter::Platform<'_, '_> {
+ self.inner.log_iter(&self.repo.refs)
+ }
+}
+
+/// Generate a message typical for git commit logs based on the given `operation`, commit `message` and `num_parents` of the commit.
+pub fn message(operation: &str, message: &BStr, num_parents: usize) -> BString {
+ let mut out = BString::from(operation);
+ if let Some(commit_type) = commit_type_by_parents(num_parents) {
+ out.push_str(b" (");
+ out.extend_from_slice(commit_type.as_bytes());
+ out.push_byte(b')');
+ }
+ out.push_str(b": ");
+ out.extend_from_slice(&MessageRef::from_bytes(message).summary());
+ out
+}
+
+pub(crate) fn commit_type_by_parents(count: usize) -> Option<&'static str> {
+ Some(match count {
+ 0 => "initial",
+ 1 => return None,
+ _two_or_more => "merge",
+ })
+}
diff --git a/vendor/gix/src/reference/mod.rs b/vendor/gix/src/reference/mod.rs
new file mode 100644
index 000000000..e2ee0d3b2
--- /dev/null
+++ b/vendor/gix/src/reference/mod.rs
@@ -0,0 +1,87 @@
+//!
+
+use gix_odb::pack::Find;
+use gix_ref::file::ReferenceExt;
+
+use crate::{Id, Reference};
+
+pub mod iter;
+///
+pub mod remote;
+
+mod errors;
+pub use errors::{edit, find, head_commit, head_id, peel};
+
+use crate::ext::ObjectIdExt;
+
+pub mod log;
+
+pub use gix_ref::{Category, Kind};
+
+/// Access
+impl<'repo> Reference<'repo> {
+ /// Returns the attached id we point to, or `None` if this is a symbolic ref.
+ pub fn try_id(&self) -> Option<Id<'repo>> {
+ match self.inner.target {
+ gix_ref::Target::Symbolic(_) => None,
+ gix_ref::Target::Peeled(oid) => oid.to_owned().attach(self.repo).into(),
+ }
+ }
+
+ /// Returns the attached id we point to, or panic if this is a symbolic ref.
+ pub fn id(&self) -> Id<'repo> {
+ self.try_id()
+ .expect("BUG: tries to obtain object id from symbolic target")
+ }
+
+ /// Return the target to which this reference points to.
+ pub fn target(&self) -> gix_ref::TargetRef<'_> {
+ self.inner.target.to_ref()
+ }
+
+ /// Return the reference's full name.
+ pub fn name(&self) -> &gix_ref::FullNameRef {
+ self.inner.name.as_ref()
+ }
+
+ /// Turn this instances into a stand-alone reference.
+ pub fn detach(self) -> gix_ref::Reference {
+ self.inner
+ }
+}
+
+impl<'repo> std::fmt::Debug for Reference<'repo> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Debug::fmt(&self.inner, f)
+ }
+}
+
+impl<'repo> Reference<'repo> {
+ pub(crate) fn from_ref(reference: gix_ref::Reference, repo: &'repo crate::Repository) -> Self {
+ Reference { inner: reference, repo }
+ }
+}
+
+impl<'repo> Reference<'repo> {
+ /// Follow all symbolic targets this reference might point to and peel the underlying object
+ /// to the end of the chain, and return it.
+ ///
+ /// This is useful to learn where this reference is ultimately pointing to.
+ pub fn peel_to_id_in_place(&mut self) -> Result<Id<'repo>, peel::Error> {
+ let repo = &self.repo;
+ let oid = self.inner.peel_to_id_in_place(&repo.refs, |oid, buf| {
+ repo.objects
+ .try_find(oid, buf)
+ .map(|po| po.map(|(o, _l)| (o.kind, o.data)))
+ })?;
+ Ok(Id::from_id(oid, repo))
+ }
+
+ /// Similar to [`peel_to_id_in_place()`][Reference::peel_to_id_in_place()], but consumes this instance.
+ pub fn into_fully_peeled_id(mut self) -> Result<Id<'repo>, peel::Error> {
+ self.peel_to_id_in_place()
+ }
+}
+
+mod edits;
+pub use edits::{delete, set_target_id};
diff --git a/vendor/gix/src/reference/remote.rs b/vendor/gix/src/reference/remote.rs
new file mode 100644
index 000000000..dd96892e2
--- /dev/null
+++ b/vendor/gix/src/reference/remote.rs
@@ -0,0 +1,49 @@
+use crate::{config, config::tree::Branch, remote, Reference};
+
+/// Remotes
+impl<'repo> Reference<'repo> {
+ /// Find the unvalidated name of our remote for `direction` as configured in `branch.<name>.remote|pushRemote` respectively.
+ /// If `Some(<name>)` it can be used in [`Repository::find_remote(…)`][crate::Repository::find_remote()], or if `None` then
+ /// [Repository::remote_default_name()][crate::Repository::remote_default_name()] could be used in its place.
+ ///
+ /// Return `None` if no remote is configured.
+ ///
+ /// # Note
+ ///
+ /// - it's recommended to use the [`remote(…)`][Self::remote()] method as it will configure the remote with additional
+ /// information.
+ /// - `branch.<name>.pushRemote` falls back to `branch.<name>.remote`.
+ pub fn remote_name(&self, direction: remote::Direction) -> Option<remote::Name<'repo>> {
+ let name = self.name().shorten();
+ let config = &self.repo.config.resolved;
+ (direction == remote::Direction::Push)
+ .then(|| {
+ config
+ .string("branch", Some(name), Branch::PUSH_REMOTE.name)
+ .or_else(|| config.string("remote", None, config::tree::Remote::PUSH_DEFAULT.name))
+ })
+ .flatten()
+ .or_else(|| config.string("branch", Some(name), Branch::REMOTE.name))
+ .and_then(|name| name.try_into().ok())
+ }
+
+ /// Like [`remote_name(…)`][Self::remote_name()], but configures the returned `Remote` with additional information like
+ ///
+ /// - `branch.<name>.merge` to know which branch on the remote side corresponds to this one for merging when pulling.
+ ///
+ /// It also handles if the remote is a configured URL, which has no name.
+ pub fn remote(
+ &self,
+ direction: remote::Direction,
+ ) -> Option<Result<crate::Remote<'repo>, remote::find::existing::Error>> {
+ // TODO: use `branch.<name>.merge`
+ self.remote_name(direction).map(|name| match name {
+ remote::Name::Symbol(name) => self.repo.find_remote(name.as_ref()).map_err(Into::into),
+ remote::Name::Url(url) => gix_url::parse(url.as_ref()).map_err(Into::into).and_then(|url| {
+ self.repo
+ .remote_at(url)
+ .map_err(|err| remote::find::existing::Error::Find(remote::find::Error::Init(err)))
+ }),
+ })
+ }
+}
diff --git a/vendor/gix/src/remote/access.rs b/vendor/gix/src/remote/access.rs
new file mode 100644
index 000000000..1a1cee5de
--- /dev/null
+++ b/vendor/gix/src/remote/access.rs
@@ -0,0 +1,105 @@
+use gix_refspec::RefSpec;
+
+use crate::{bstr::BStr, remote, Remote};
+
+/// Access
+impl<'repo> Remote<'repo> {
+ /// Return the name of this remote or `None` if it wasn't persisted to disk yet.
+ pub fn name(&self) -> Option<&remote::Name<'static>> {
+ self.name.as_ref()
+ }
+
+ /// Return our repository reference.
+ pub fn repo(&self) -> &'repo crate::Repository {
+ self.repo
+ }
+
+ /// Return the set of ref-specs used for `direction`, which may be empty, in order of occurrence in the configuration.
+ pub fn refspecs(&self, direction: remote::Direction) -> &[RefSpec] {
+ match direction {
+ remote::Direction::Fetch => &self.fetch_specs,
+ remote::Direction::Push => &self.push_specs,
+ }
+ }
+
+ /// Return how we handle tags when fetching the remote.
+ pub fn fetch_tags(&self) -> remote::fetch::Tags {
+ self.fetch_tags
+ }
+
+ /// Return the url used for the given `direction` with rewrites from `url.<base>.insteadOf|pushInsteadOf`, unless the instance
+ /// was created with one of the `_without_url_rewrite()` methods.
+ /// For pushing, this is the `remote.<name>.pushUrl` or the `remote.<name>.url` used for fetching, and for fetching it's
+ /// the `remote.<name>.url`.
+ /// Note that it's possible to only have the push url set, in which case there will be no way to fetch from the remote as
+ /// the push-url isn't used for that.
+ pub fn url(&self, direction: remote::Direction) -> Option<&gix_url::Url> {
+ match direction {
+ remote::Direction::Fetch => self.url_alias.as_ref().or(self.url.as_ref()),
+ remote::Direction::Push => self
+ .push_url_alias
+ .as_ref()
+ .or(self.push_url.as_ref())
+ .or_else(|| self.url(remote::Direction::Fetch)),
+ }
+ }
+}
+
+/// Modification
+impl Remote<'_> {
+ /// Read `url.<base>.insteadOf|pushInsteadOf` configuration variables and apply them to our urls, changing them in place.
+ ///
+ /// This happens only once, and one if them may be changed even when reporting an error.
+ /// If both urls fail, only the first error (for fetch urls) is reported.
+ pub fn rewrite_urls(&mut self) -> Result<&mut Self, remote::init::Error> {
+ let url_err = match remote::init::rewrite_url(&self.repo.config, self.url.as_ref(), remote::Direction::Fetch) {
+ Ok(url) => {
+ self.url_alias = url;
+ None
+ }
+ Err(err) => err.into(),
+ };
+ let push_url_err =
+ match remote::init::rewrite_url(&self.repo.config, self.push_url.as_ref(), remote::Direction::Push) {
+ Ok(url) => {
+ self.push_url_alias = url;
+ None
+ }
+ Err(err) => err.into(),
+ };
+ url_err.or(push_url_err).map(Err::<&mut Self, _>).transpose()?;
+ Ok(self)
+ }
+
+ /// Replace all currently set refspecs, typically from configuration, with the given `specs` for `direction`,
+ /// or `None` if one of the input specs could not be parsed.
+ pub fn replace_refspecs<Spec>(
+ &mut self,
+ specs: impl IntoIterator<Item = Spec>,
+ direction: remote::Direction,
+ ) -> Result<(), gix_refspec::parse::Error>
+ where
+ Spec: AsRef<BStr>,
+ {
+ use remote::Direction::*;
+ let specs: Vec<_> = specs
+ .into_iter()
+ .map(|spec| {
+ gix_refspec::parse(
+ spec.as_ref(),
+ match direction {
+ Push => gix_refspec::parse::Operation::Push,
+ Fetch => gix_refspec::parse::Operation::Fetch,
+ },
+ )
+ .map(|url| url.to_owned())
+ })
+ .collect::<Result<_, _>>()?;
+ let dst = match direction {
+ Push => &mut self.push_specs,
+ Fetch => &mut self.fetch_specs,
+ };
+ *dst = specs;
+ Ok(())
+ }
+}
diff --git a/vendor/gix/src/remote/build.rs b/vendor/gix/src/remote/build.rs
new file mode 100644
index 000000000..10c216537
--- /dev/null
+++ b/vendor/gix/src/remote/build.rs
@@ -0,0 +1,84 @@
+use std::convert::TryInto;
+
+use crate::{bstr::BStr, remote, Remote};
+
+/// Builder methods
+impl Remote<'_> {
+ /// Set the `url` to be used when pushing data to a remote.
+ pub fn push_url<Url, E>(self, url: Url) -> Result<Self, remote::init::Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ self.push_url_inner(url, true)
+ }
+
+ /// Set the `url` to be used when pushing data to a remote, without applying rewrite rules in case these could be faulty,
+ /// eliminating one failure mode.
+ pub fn push_url_without_url_rewrite<Url, E>(self, url: Url) -> Result<Self, remote::init::Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ self.push_url_inner(url, false)
+ }
+
+ /// Configure how tags should be handled when fetching from the remote.
+ pub fn with_fetch_tags(mut self, tags: remote::fetch::Tags) -> Self {
+ self.fetch_tags = tags;
+ self
+ }
+
+ fn push_url_inner<Url, E>(mut self, push_url: Url, should_rewrite_urls: bool) -> Result<Self, remote::init::Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ let push_url = push_url
+ .try_into()
+ .map_err(|err| remote::init::Error::Url(err.into()))?;
+ self.push_url = push_url.into();
+
+ let (_, push_url_alias) = should_rewrite_urls
+ .then(|| remote::init::rewrite_urls(&self.repo.config, None, self.push_url.as_ref()))
+ .unwrap_or(Ok((None, None)))?;
+ self.push_url_alias = push_url_alias;
+
+ Ok(self)
+ }
+
+ /// Add `specs` as refspecs for `direction` to our list if they are unique, or ignore them otherwise.
+ pub fn with_refspecs<Spec>(
+ mut self,
+ specs: impl IntoIterator<Item = Spec>,
+ direction: remote::Direction,
+ ) -> Result<Self, gix_refspec::parse::Error>
+ where
+ Spec: AsRef<BStr>,
+ {
+ use remote::Direction::*;
+ let new_specs = specs
+ .into_iter()
+ .map(|spec| {
+ gix_refspec::parse(
+ spec.as_ref(),
+ match direction {
+ Push => gix_refspec::parse::Operation::Push,
+ Fetch => gix_refspec::parse::Operation::Fetch,
+ },
+ )
+ .map(|s| s.to_owned())
+ })
+ .collect::<Result<Vec<_>, _>>()?;
+ let specs = match direction {
+ Push => &mut self.push_specs,
+ Fetch => &mut self.fetch_specs,
+ };
+ for spec in new_specs {
+ if !specs.contains(&spec) {
+ specs.push(spec);
+ }
+ }
+ Ok(self)
+ }
+}
diff --git a/vendor/gix/src/remote/connect.rs b/vendor/gix/src/remote/connect.rs
new file mode 100644
index 000000000..8e656975e
--- /dev/null
+++ b/vendor/gix/src/remote/connect.rs
@@ -0,0 +1,166 @@
+#![allow(clippy::result_large_err)]
+use gix_protocol::transport::client::Transport;
+
+use crate::{remote::Connection, Progress, Remote};
+
+mod error {
+ use crate::{bstr::BString, config, remote};
+
+ /// The error returned by [connect()][crate::Remote::connect()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not obtain options for connecting via ssh")]
+ SshOptions(#[from] config::ssh_connect_options::Error),
+ #[error("Could not obtain the current directory")]
+ CurrentDir(#[from] std::io::Error),
+ #[error("Could not access remote repository at \"{}\"", directory.display())]
+ InvalidRemoteRepositoryPath { directory: std::path::PathBuf },
+ #[error(transparent)]
+ SchemePermission(#[from] config::protocol::allow::Error),
+ #[error("Protocol {scheme:?} of url {url:?} is denied per configuration")]
+ ProtocolDenied { url: BString, scheme: gix_url::Scheme },
+ #[error(transparent)]
+ Connect(#[from] gix_protocol::transport::client::connect::Error),
+ #[error("The {} url was missing - don't know where to establish a connection to", direction.as_str())]
+ MissingUrl { direction: remote::Direction },
+ #[error("Protocol named {given:?} is not a valid protocol. Choose between 1 and 2")]
+ UnknownProtocol { given: BString },
+ #[error("Could not verify that \"{}\" url is a valid git directory before attempting to use it", url.to_bstring())]
+ FileUrl {
+ source: Box<gix_discover::is_git::Error>,
+ url: gix_url::Url,
+ },
+ }
+
+ impl gix_protocol::transport::IsSpuriousError for Error {
+ /// Return `true` if retrying might result in a different outcome due to IO working out differently.
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::Connect(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+ }
+}
+pub use error::Error;
+
+/// Establishing connections to remote hosts (without performing a git-handshake).
+impl<'repo> Remote<'repo> {
+ /// Create a new connection using `transport` to communicate, with `progress` to indicate changes.
+ ///
+ /// Note that this method expects the `transport` to be created by the user, which would involve the [`url()`][Self::url()].
+ /// It's meant to be used when async operation is needed with runtimes of the user's choice.
+ pub fn to_connection_with_transport<T, P>(&self, transport: T, progress: P) -> Connection<'_, 'repo, T, P>
+ where
+ T: Transport,
+ P: Progress,
+ {
+ Connection {
+ remote: self,
+ authenticate: None,
+ transport_options: None,
+ transport,
+ progress,
+ }
+ }
+
+ /// Connect to the url suitable for `direction` and return a handle through which operations can be performed.
+ ///
+ /// Note that the `protocol.version` configuration key affects the transport protocol used to connect,
+ /// with `2` being the default.
+ ///
+ /// The transport used for connection can be configured via `transport_mut().configure()` assuming the actually
+ /// used transport is well known. If that's not the case, the transport can be created by hand and passed to
+ /// [to_connection_with_transport()][Self::to_connection_with_transport()].
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client-async-std"))]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn connect<P>(
+ &self,
+ direction: crate::remote::Direction,
+ progress: P,
+ ) -> Result<Connection<'_, 'repo, Box<dyn Transport + Send>, P>, Error>
+ where
+ P: Progress,
+ {
+ let (url, version) = self.sanitized_url_and_version(direction)?;
+ #[cfg(feature = "blocking-network-client")]
+ let scheme_is_ssh = url.scheme == gix_url::Scheme::Ssh;
+ let transport = gix_protocol::transport::connect(
+ url,
+ gix_protocol::transport::client::connect::Options {
+ version,
+ #[cfg(feature = "blocking-network-client")]
+ ssh: scheme_is_ssh
+ .then(|| self.repo.ssh_connect_options())
+ .transpose()?
+ .unwrap_or_default(),
+ },
+ )
+ .await?;
+ Ok(self.to_connection_with_transport(transport, progress))
+ }
+
+ /// Produce the sanitized URL and protocol version to use as obtained by querying the repository configuration.
+ ///
+ /// This can be useful when using custom transports to allow additional configuration.
+ pub fn sanitized_url_and_version(
+ &self,
+ direction: crate::remote::Direction,
+ ) -> Result<(gix_url::Url, gix_protocol::transport::Protocol), Error> {
+ fn sanitize(mut url: gix_url::Url) -> Result<gix_url::Url, Error> {
+ if url.scheme == gix_url::Scheme::File {
+ let mut dir = gix_path::to_native_path_on_windows(url.path.as_ref());
+ let kind = gix_discover::is_git(dir.as_ref())
+ .or_else(|_| {
+ dir.to_mut().push(gix_discover::DOT_GIT_DIR);
+ gix_discover::is_git(dir.as_ref())
+ })
+ .map_err(|err| Error::FileUrl {
+ source: err.into(),
+ url: url.clone(),
+ })?;
+ let (git_dir, _work_dir) = gix_discover::repository::Path::from_dot_git_dir(
+ dir.clone().into_owned(),
+ kind,
+ std::env::current_dir()?,
+ )
+ .ok_or_else(|| Error::InvalidRemoteRepositoryPath {
+ directory: dir.into_owned(),
+ })?
+ .into_repository_and_work_tree_directories();
+ url.path = gix_path::into_bstr(git_dir).into_owned();
+ }
+ Ok(url)
+ }
+
+ use gix_protocol::transport::Protocol;
+ let version = self
+ .repo
+ .config
+ .resolved
+ .integer("protocol", None, "version")
+ .unwrap_or(Ok(2))
+ .map_err(|err| Error::UnknownProtocol { given: err.input })
+ .and_then(|num| {
+ Ok(match num {
+ 1 => Protocol::V1,
+ 2 => Protocol::V2,
+ num => {
+ return Err(Error::UnknownProtocol {
+ given: num.to_string().into(),
+ })
+ }
+ })
+ })?;
+
+ let url = self.url(direction).ok_or(Error::MissingUrl { direction })?.to_owned();
+ if !self.repo.config.url_scheme()?.allow(&url.scheme) {
+ return Err(Error::ProtocolDenied {
+ url: url.to_bstring(),
+ scheme: url.scheme,
+ });
+ }
+ Ok((sanitize(url)?, version))
+ }
+}
diff --git a/vendor/gix/src/remote/connection/access.rs b/vendor/gix/src/remote/connection/access.rs
new file mode 100644
index 000000000..e4c31c3f5
--- /dev/null
+++ b/vendor/gix/src/remote/connection/access.rs
@@ -0,0 +1,67 @@
+use crate::{
+ remote::{connection::AuthenticateFn, Connection},
+ Remote,
+};
+
+/// Builder
+impl<'a, 'repo, T, P> Connection<'a, 'repo, T, P> {
+ /// Set a custom credentials callback to provide credentials if the remotes require authentication.
+ ///
+ /// Otherwise we will use the git configuration to perform the same task as the `git credential` helper program,
+ /// which is calling other helper programs in succession while resorting to a prompt to obtain credentials from the
+ /// user.
+ ///
+ /// A custom function may also be used to prevent accessing resources with authentication.
+ ///
+ /// Use the [configured_credentials()][Connection::configured_credentials()] method to obtain the implementation
+ /// that would otherwise be used, which can be useful to proxy the default configuration and obtain information about the
+ /// URLs to authenticate with.
+ pub fn with_credentials(
+ mut self,
+ helper: impl FnMut(gix_credentials::helper::Action) -> gix_credentials::protocol::Result + 'a,
+ ) -> Self {
+ self.authenticate = Some(Box::new(helper));
+ self
+ }
+
+ /// Provide configuration to be used before the first handshake is conducted.
+ /// It's typically created by initializing it with [`Repository::transport_options()`][crate::Repository::transport_options()], which
+ /// is also the default if this isn't set explicitly. Note that all of the default configuration is created from `git`
+ /// configuration, which can also be manipulated through overrides to affect the default configuration.
+ ///
+ /// Use this method to provide transport configuration with custom backend configuration that is not configurable by other means and
+ /// custom to the application at hand.
+ pub fn with_transport_options(mut self, config: Box<dyn std::any::Any>) -> Self {
+ self.transport_options = Some(config);
+ self
+ }
+}
+
+/// Access
+impl<'a, 'repo, T, P> Connection<'a, 'repo, T, P> {
+ /// A utility to return a function that will use this repository's configuration to obtain credentials, similar to
+ /// what `git credential` is doing.
+ ///
+ /// It's meant to be used by users of the [`with_credentials()`][Self::with_credentials()] builder to gain access to the
+ /// default way of handling credentials, which they can call as fallback.
+ pub fn configured_credentials(
+ &self,
+ url: gix_url::Url,
+ ) -> Result<AuthenticateFn<'static>, crate::config::credential_helpers::Error> {
+ let (mut cascade, _action_with_normalized_url, prompt_opts) =
+ self.remote.repo.config_snapshot().credential_helpers(url)?;
+ Ok(Box::new(move |action| cascade.invoke(action, prompt_opts.clone())) as AuthenticateFn<'_>)
+ }
+ /// Return the underlying remote that instantiate this connection.
+ pub fn remote(&self) -> &Remote<'repo> {
+ self.remote
+ }
+
+ /// Provide a mutable transport to allow interacting with it according to its actual type.
+ /// Note that the caller _should not_ call [`configure()`][gix_protocol::transport::client::TransportWithoutIO::configure()]
+ /// as we will call it automatically before performing the handshake. Instead, to bring in custom configuration,
+ /// call [`with_transport_options()`][Connection::with_transport_options()].
+ pub fn transport_mut(&mut self) -> &mut T {
+ &mut self.transport
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/config.rs b/vendor/gix/src/remote/connection/fetch/config.rs
new file mode 100644
index 000000000..4782991bc
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/config.rs
@@ -0,0 +1,26 @@
+use super::Error;
+use crate::{
+ config::{cache::util::ApplyLeniency, tree::Pack},
+ Repository,
+};
+
+pub fn index_threads(repo: &Repository) -> Result<Option<usize>, Error> {
+ Ok(repo
+ .config
+ .resolved
+ .integer_filter("pack", None, Pack::THREADS.name, &mut repo.filter_config_section())
+ .map(|threads| Pack::THREADS.try_into_usize(threads))
+ .transpose()
+ .with_leniency(repo.options.lenient_config)?)
+}
+
+pub fn pack_index_version(repo: &Repository) -> Result<gix_pack::index::Version, Error> {
+ Ok(repo
+ .config
+ .resolved
+ .integer("pack", None, Pack::INDEX_VERSION.name)
+ .map(|value| Pack::INDEX_VERSION.try_into_index_version(value))
+ .transpose()
+ .with_leniency(repo.options.lenient_config)?
+ .unwrap_or(gix_pack::index::Version::V2))
+}
diff --git a/vendor/gix/src/remote/connection/fetch/error.rs b/vendor/gix/src/remote/connection/fetch/error.rs
new file mode 100644
index 000000000..0e6a4b840
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/error.rs
@@ -0,0 +1,41 @@
+use crate::config;
+
+/// The error returned by [`receive()`](super::Prepare::receive()).
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("The value to configure pack threads should be 0 to auto-configure or the amount of threads to use")]
+ PackThreads(#[from] config::unsigned_integer::Error),
+ #[error("The value to configure the pack index version should be 1 or 2")]
+ PackIndexVersion(#[from] config::key::GenericError),
+ #[error("Could not decode server reply")]
+ FetchResponse(#[from] gix_protocol::fetch::response::Error),
+ #[error("Cannot fetch from a remote that uses {remote} while local repository uses {local} for object hashes")]
+ IncompatibleObjectHash {
+ local: gix_hash::Kind,
+ remote: gix_hash::Kind,
+ },
+ #[error(transparent)]
+ Negotiate(#[from] super::negotiate::Error),
+ #[error(transparent)]
+ Client(#[from] gix_protocol::transport::client::Error),
+ #[error(transparent)]
+ WritePack(#[from] gix_pack::bundle::write::Error),
+ #[error(transparent)]
+ UpdateRefs(#[from] super::refs::update::Error),
+ #[error("Failed to remove .keep file at \"{}\"", path.display())]
+ RemovePackKeepFile {
+ path: std::path::PathBuf,
+ source: std::io::Error,
+ },
+}
+
+impl gix_protocol::transport::IsSpuriousError for Error {
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::FetchResponse(err) => err.is_spurious(),
+ Error::Client(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/mod.rs b/vendor/gix/src/remote/connection/fetch/mod.rs
new file mode 100644
index 000000000..4ce631b1e
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/mod.rs
@@ -0,0 +1,240 @@
+use gix_protocol::transport::client::Transport;
+
+use crate::{
+ bstr::BString,
+ remote,
+ remote::{
+ fetch::{DryRun, RefMap},
+ ref_map, Connection,
+ },
+ Progress,
+};
+
+mod error;
+pub use error::Error;
+
+use crate::remote::fetch::WritePackedRefs;
+
+/// The way reflog messages should be composed whenever a ref is written with recent objects from a remote.
+pub enum RefLogMessage {
+ /// Prefix the log with `action` and generate the typical suffix as `git` would.
+ Prefixed {
+ /// The action to use, like `fetch` or `pull`.
+ action: String,
+ },
+ /// Control the entire message, using `message` verbatim.
+ Override {
+ /// The complete reflog message.
+ message: BString,
+ },
+}
+
+impl RefLogMessage {
+ pub(crate) fn compose(&self, context: &str) -> BString {
+ match self {
+ RefLogMessage::Prefixed { action } => format!("{action}: {context}").into(),
+ RefLogMessage::Override { message } => message.to_owned(),
+ }
+ }
+}
+
+/// The status of the repository after the fetch operation
+#[derive(Debug, Clone)]
+pub enum Status {
+ /// Nothing changed as the remote didn't have anything new compared to our tracking branches, thus no pack was received
+ /// and no new object was added.
+ NoPackReceived {
+ /// However, depending on the refspecs, references might have been updated nonetheless to point to objects as
+ /// reported by the remote.
+ update_refs: refs::update::Outcome,
+ },
+ /// There was at least one tip with a new object which we received.
+ Change {
+ /// Information collected while writing the pack and its index.
+ write_pack_bundle: gix_pack::bundle::write::Outcome,
+ /// Information collected while updating references.
+ update_refs: refs::update::Outcome,
+ },
+ /// A dry run was performed which leaves the local repository without any change
+ /// nor will a pack have been received.
+ DryRun {
+ /// Information about what updates to refs would have been done.
+ update_refs: refs::update::Outcome,
+ },
+}
+
+/// The outcome of receiving a pack via [`Prepare::receive()`].
+#[derive(Debug, Clone)]
+pub struct Outcome {
+ /// The result of the initial mapping of references, the prerequisite for any fetch.
+ pub ref_map: RefMap,
+ /// The status of the operation to indicate what happened.
+ pub status: Status,
+}
+
+/// The progress ids used in during various steps of the fetch operation.
+///
+/// Note that tagged progress isn't very widely available yet, but support can be improved as needed.
+///
+/// Use this information to selectively extract the progress of interest in case the parent application has custom visualization.
+#[derive(Debug, Copy, Clone)]
+pub enum ProgressId {
+ /// The progress name is defined by the remote and the progress messages it sets, along with their progress values and limits.
+ RemoteProgress,
+}
+
+impl From<ProgressId> for gix_features::progress::Id {
+ fn from(v: ProgressId) -> Self {
+ match v {
+ ProgressId::RemoteProgress => *b"FERP",
+ }
+ }
+}
+
+///
+pub mod negotiate;
+
+///
+pub mod prepare {
+ /// The error returned by [`prepare_fetch()`][super::Connection::prepare_fetch()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Cannot perform a meaningful fetch operation without any configured ref-specs")]
+ MissingRefSpecs,
+ #[error(transparent)]
+ RefMap(#[from] crate::remote::ref_map::Error),
+ }
+
+ impl gix_protocol::transport::IsSpuriousError for Error {
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::RefMap(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+ }
+}
+
+impl<'remote, 'repo, T, P> Connection<'remote, 'repo, T, P>
+where
+ T: Transport,
+ P: Progress,
+{
+ /// Perform a handshake with the remote and obtain a ref-map with `options`, and from there one
+ /// Note that at this point, the `transport` should already be configured using the [`transport_mut()`][Self::transport_mut()]
+ /// method, as it will be consumed here.
+ ///
+ /// From there additional properties of the fetch can be adjusted to override the defaults that are configured via gix-config.
+ ///
+ /// # Async Experimental
+ ///
+ /// Note that this implementation is currently limited correctly in blocking mode only as it relies on Drop semantics to close the connection
+ /// should the fetch not be performed. Furthermore, there the code doing the fetch is inherently blocking and it's not offloaded to a thread,
+ /// making this call block the executor.
+ /// It's best to unblock it by placing it into its own thread or offload it should usage in an async context be truly required.
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn prepare_fetch(
+ mut self,
+ options: ref_map::Options,
+ ) -> Result<Prepare<'remote, 'repo, T, P>, prepare::Error> {
+ if self.remote.refspecs(remote::Direction::Fetch).is_empty() {
+ return Err(prepare::Error::MissingRefSpecs);
+ }
+ let ref_map = self.ref_map_inner(options).await?;
+ Ok(Prepare {
+ con: Some(self),
+ ref_map,
+ dry_run: DryRun::No,
+ reflog_message: None,
+ write_packed_refs: WritePackedRefs::Never,
+ })
+ }
+}
+
+impl<'remote, 'repo, T, P> Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ /// Return the ref_map (that includes the server handshake) which was part of listing refs prior to fetching a pack.
+ pub fn ref_map(&self) -> &RefMap {
+ &self.ref_map
+ }
+}
+
+mod config;
+mod receive_pack;
+///
+#[path = "update_refs/mod.rs"]
+pub mod refs;
+
+/// A structure to hold the result of the handshake with the remote and configure the upcoming fetch operation.
+pub struct Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ con: Option<Connection<'remote, 'repo, T, P>>,
+ ref_map: RefMap,
+ dry_run: DryRun,
+ reflog_message: Option<RefLogMessage>,
+ write_packed_refs: WritePackedRefs,
+}
+
+/// Builder
+impl<'remote, 'repo, T, P> Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ /// If dry run is enabled, no change to the repository will be made.
+ ///
+ /// This works by not actually fetching the pack after negotiating it, nor will refs be updated.
+ pub fn with_dry_run(mut self, enabled: bool) -> Self {
+ self.dry_run = if enabled { DryRun::Yes } else { DryRun::No };
+ self
+ }
+
+ /// If enabled, don't write ref updates to loose refs, but put them exclusively to packed-refs.
+ ///
+ /// This improves performance and allows case-sensitive filesystems to deal with ref names that would otherwise
+ /// collide.
+ pub fn with_write_packed_refs_only(mut self, enabled: bool) -> Self {
+ self.write_packed_refs = if enabled {
+ WritePackedRefs::Only
+ } else {
+ WritePackedRefs::Never
+ };
+ self
+ }
+
+ /// Set the reflog message to use when updating refs after fetching a pack.
+ pub fn with_reflog_message(mut self, reflog_message: RefLogMessage) -> Self {
+ self.reflog_message = reflog_message.into();
+ self
+ }
+}
+
+impl<'remote, 'repo, T, P> Drop for Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ fn drop(&mut self) {
+ if let Some(mut con) = self.con.take() {
+ #[cfg(feature = "async-network-client")]
+ {
+ // TODO: this should be an async drop once the feature is available.
+ // Right now we block the executor by forcing this communication, but that only
+ // happens if the user didn't actually try to receive a pack, which consumes the
+ // connection in an async context.
+ gix_protocol::futures_lite::future::block_on(gix_protocol::indicate_end_of_interaction(
+ &mut con.transport,
+ ))
+ .ok();
+ }
+ #[cfg(not(feature = "async-network-client"))]
+ {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).ok();
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/negotiate.rs b/vendor/gix/src/remote/connection/fetch/negotiate.rs
new file mode 100644
index 000000000..f5051ec72
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/negotiate.rs
@@ -0,0 +1,78 @@
+/// The way the negotiation is performed
+#[derive(Copy, Clone)]
+pub(crate) enum Algorithm {
+ /// Our very own implementation that probably should be replaced by one of the known algorithms soon.
+ Naive,
+}
+
+/// The error returned during negotiation.
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("We were unable to figure out what objects the server should send after {rounds} round(s)")]
+ NegotiationFailed { rounds: usize },
+}
+
+/// Negotiate one round with `algo` by looking at `ref_map` and adjust `arguments` to contain the haves and wants.
+/// If this is not the first round, the `previous_response` is set with the last recorded server response.
+/// Returns `true` if the negotiation is done from our side so the server won't keep asking.
+pub(crate) fn one_round(
+ algo: Algorithm,
+ round: usize,
+ repo: &crate::Repository,
+ ref_map: &crate::remote::fetch::RefMap,
+ fetch_tags: crate::remote::fetch::Tags,
+ arguments: &mut gix_protocol::fetch::Arguments,
+ _previous_response: Option<&gix_protocol::fetch::Response>,
+) -> Result<bool, Error> {
+ let tag_refspec_to_ignore = fetch_tags
+ .to_refspec()
+ .filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
+ match algo {
+ Algorithm::Naive => {
+ assert_eq!(round, 1, "Naive always finishes after the first round, and claims.");
+ let mut has_missing_tracking_branch = false;
+ for mapping in &ref_map.mappings {
+ if tag_refspec_to_ignore.map_or(false, |tag_spec| {
+ mapping
+ .spec_index
+ .implicit_index()
+ .and_then(|idx| ref_map.extra_refspecs.get(idx))
+ .map_or(false, |spec| spec.to_ref() == tag_spec)
+ }) {
+ continue;
+ }
+ let have_id = mapping.local.as_ref().and_then(|name| {
+ repo.find_reference(name)
+ .ok()
+ .and_then(|r| r.target().try_id().map(ToOwned::to_owned))
+ });
+ match have_id {
+ Some(have_id) => {
+ if let Some(want_id) = mapping.remote.as_id() {
+ if want_id != have_id {
+ arguments.want(want_id);
+ arguments.have(have_id);
+ }
+ }
+ }
+ None => {
+ if let Some(want_id) = mapping.remote.as_id() {
+ arguments.want(want_id);
+ has_missing_tracking_branch = true;
+ }
+ }
+ }
+ }
+
+ if has_missing_tracking_branch {
+ if let Ok(Some(r)) = repo.head_ref() {
+ if let Some(id) = r.target().try_id() {
+ arguments.have(id);
+ }
+ }
+ }
+ Ok(true)
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/receive_pack.rs b/vendor/gix/src/remote/connection/fetch/receive_pack.rs
new file mode 100644
index 000000000..686de5999
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/receive_pack.rs
@@ -0,0 +1,238 @@
+use std::sync::atomic::AtomicBool;
+
+use gix_odb::FindExt;
+use gix_protocol::transport::client::Transport;
+
+use crate::{
+ remote,
+ remote::{
+ connection::fetch::config,
+ fetch,
+ fetch::{negotiate, refs, Error, Outcome, Prepare, ProgressId, RefLogMessage, Status},
+ },
+ Progress,
+};
+
+impl<'remote, 'repo, T, P> Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+ P: Progress,
+ P::SubProgress: 'static,
+{
+ /// Receive the pack and perform the operation as configured by git via `gix-config` or overridden by various builder methods.
+ /// Return `Ok(None)` if there was nothing to do because all remote refs are at the same state as they are locally, or `Ok(Some(outcome))`
+ /// to inform about all the changes that were made.
+ ///
+ /// ### Negotiation
+ ///
+ /// "fetch.negotiationAlgorithm" describes algorithms `git` uses currently, with the default being `consecutive` and `skipping` being
+ /// experimented with. We currently implement something we could call 'naive' which works for now.
+ ///
+ /// ### Pack `.keep` files
+ ///
+ /// That packs that are freshly written to the object database are vulnerable to garbage collection for the brief time that it takes between
+ /// them being placed and the respective references to be written to disk which binds their objects to the commit graph, making them reachable.
+ ///
+ /// To circumvent this issue, a `.keep` file is created before any pack related file (i.e. `.pack` or `.idx`) is written, which indicates the
+ /// garbage collector (like `git maintenance`, `git gc`) to leave the corresponding pack file alone.
+ ///
+ /// If there were any ref updates or the received pack was empty, the `.keep` file will be deleted automatically leaving in its place at
+ /// `write_pack_bundle.keep_path` a `None`.
+ /// However, if no ref-update happened the path will still be present in `write_pack_bundle.keep_path` and is expected to be handled by the caller.
+ /// A known application for this behaviour is in `remote-helper` implementations which should send this path via `lock <path>` to stdout
+ /// to inform git about the file that it will remove once it updated the refs accordingly.
+ ///
+ /// ### Deviation
+ ///
+ /// When **updating refs**, the `git-fetch` docs state that the following:
+ ///
+ /// > Unlike when pushing with git-push, any updates outside of refs/{tags,heads}/* will be accepted without + in the refspec (or --force), whether that’s swapping e.g. a tree object for a blob, or a commit for another commit that’s doesn’t have the previous commit as an ancestor etc.
+ ///
+ /// We explicitly don't special case those refs and expect the user to take control. Note that by its nature,
+ /// force only applies to refs pointing to commits and if they don't, they will be updated either way in our
+ /// implementation as well.
+ ///
+ /// ### Async Mode Shortcoming
+ ///
+ /// Currently the entire process of resolving a pack is blocking the executor. This can be fixed using the `blocking` crate, but it
+ /// didn't seem worth the tradeoff of having more complex code.
+ ///
+ /// ### Configuration
+ ///
+ /// - `gitoxide.userAgent` is read to obtain the application user agent for git servers and for HTTP servers as well.
+ ///
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn receive(mut self, should_interrupt: &AtomicBool) -> Result<Outcome, Error> {
+ let mut con = self.con.take().expect("receive() can only be called once");
+
+ let handshake = &self.ref_map.handshake;
+ let protocol_version = handshake.server_protocol_version;
+
+ let fetch = gix_protocol::Command::Fetch;
+ let progress = &mut con.progress;
+ let repo = con.remote.repo;
+ let fetch_features = {
+ let mut f = fetch.default_features(protocol_version, &handshake.capabilities);
+ f.push(repo.config.user_agent_tuple());
+ f
+ };
+
+ gix_protocol::fetch::Response::check_required_features(protocol_version, &fetch_features)?;
+ let sideband_all = fetch_features.iter().any(|(n, _)| *n == "sideband-all");
+ let mut arguments = gix_protocol::fetch::Arguments::new(protocol_version, fetch_features);
+ if matches!(con.remote.fetch_tags, crate::remote::fetch::Tags::Included) {
+ if !arguments.can_use_include_tag() {
+ unimplemented!("we expect servers to support 'include-tag', otherwise we have to implement another pass to fetch attached tags separately");
+ }
+ arguments.use_include_tag();
+ }
+ let mut previous_response = None::<gix_protocol::fetch::Response>;
+ let mut round = 1;
+
+ if self.ref_map.object_hash != repo.object_hash() {
+ return Err(Error::IncompatibleObjectHash {
+ local: repo.object_hash(),
+ remote: self.ref_map.object_hash,
+ });
+ }
+
+ let reader = 'negotiation: loop {
+ progress.step();
+ progress.set_name(format!("negotiate (round {round})"));
+
+ let is_done = match negotiate::one_round(
+ negotiate::Algorithm::Naive,
+ round,
+ repo,
+ &self.ref_map,
+ con.remote.fetch_tags,
+ &mut arguments,
+ previous_response.as_ref(),
+ ) {
+ Ok(_) if arguments.is_empty() => {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
+ let update_refs = refs::update(
+ repo,
+ self.reflog_message
+ .take()
+ .unwrap_or_else(|| RefLogMessage::Prefixed { action: "fetch".into() }),
+ &self.ref_map.mappings,
+ con.remote.refspecs(remote::Direction::Fetch),
+ &self.ref_map.extra_refspecs,
+ con.remote.fetch_tags,
+ self.dry_run,
+ self.write_packed_refs,
+ )?;
+ return Ok(Outcome {
+ ref_map: std::mem::take(&mut self.ref_map),
+ status: Status::NoPackReceived { update_refs },
+ });
+ }
+ Ok(is_done) => is_done,
+ Err(err) => {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
+ return Err(err.into());
+ }
+ };
+ round += 1;
+ let mut reader = arguments.send(&mut con.transport, is_done).await?;
+ if sideband_all {
+ setup_remote_progress(progress, &mut reader);
+ }
+ let response = gix_protocol::fetch::Response::from_line_reader(protocol_version, &mut reader).await?;
+ if response.has_pack() {
+ progress.step();
+ progress.set_name("receiving pack");
+ if !sideband_all {
+ setup_remote_progress(progress, &mut reader);
+ }
+ break 'negotiation reader;
+ } else {
+ previous_response = Some(response);
+ }
+ };
+
+ let options = gix_pack::bundle::write::Options {
+ thread_limit: config::index_threads(repo)?,
+ index_version: config::pack_index_version(repo)?,
+ iteration_mode: gix_pack::data::input::Mode::Verify,
+ object_hash: con.remote.repo.object_hash(),
+ };
+
+ let mut write_pack_bundle = if matches!(self.dry_run, fetch::DryRun::No) {
+ Some(gix_pack::Bundle::write_to_directory(
+ #[cfg(feature = "async-network-client")]
+ {
+ gix_protocol::futures_lite::io::BlockOn::new(reader)
+ },
+ #[cfg(not(feature = "async-network-client"))]
+ {
+ reader
+ },
+ Some(repo.objects.store_ref().path().join("pack")),
+ con.progress,
+ should_interrupt,
+ Some(Box::new({
+ let repo = repo.clone();
+ move |oid, buf| repo.objects.find(oid, buf).ok()
+ })),
+ options,
+ )?)
+ } else {
+ drop(reader);
+ None
+ };
+
+ if matches!(protocol_version, gix_protocol::transport::Protocol::V2) {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
+ }
+
+ let update_refs = refs::update(
+ repo,
+ self.reflog_message
+ .take()
+ .unwrap_or_else(|| RefLogMessage::Prefixed { action: "fetch".into() }),
+ &self.ref_map.mappings,
+ con.remote.refspecs(remote::Direction::Fetch),
+ &self.ref_map.extra_refspecs,
+ con.remote.fetch_tags,
+ self.dry_run,
+ self.write_packed_refs,
+ )?;
+
+ if let Some(bundle) = write_pack_bundle.as_mut() {
+ if !update_refs.edits.is_empty() || bundle.index.num_objects == 0 {
+ if let Some(path) = bundle.keep_path.take() {
+ std::fs::remove_file(&path).map_err(|err| Error::RemovePackKeepFile { path, source: err })?;
+ }
+ }
+ }
+
+ Ok(Outcome {
+ ref_map: std::mem::take(&mut self.ref_map),
+ status: match write_pack_bundle {
+ Some(write_pack_bundle) => Status::Change {
+ write_pack_bundle,
+ update_refs,
+ },
+ None => Status::DryRun { update_refs },
+ },
+ })
+ }
+}
+
+fn setup_remote_progress<P>(
+ progress: &mut P,
+ reader: &mut Box<dyn gix_protocol::transport::client::ExtendedBufRead + Unpin + '_>,
+) where
+ P: Progress,
+ P::SubProgress: 'static,
+{
+ use gix_protocol::transport::client::ExtendedBufRead;
+ reader.set_progress_handler(Some(Box::new({
+ let mut remote_progress = progress.add_child_with_id("remote", ProgressId::RemoteProgress.into());
+ move |is_err: bool, data: &[u8]| {
+ gix_protocol::RemoteProgress::translate_to_progress(is_err, data, &mut remote_progress)
+ }
+ }) as gix_protocol::transport::client::HandleProgress));
+}
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs b/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs
new file mode 100644
index 000000000..953490672
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs
@@ -0,0 +1,274 @@
+#![allow(clippy::result_large_err)]
+use std::{collections::BTreeMap, convert::TryInto, path::PathBuf};
+
+use gix_odb::{Find, FindExt};
+use gix_ref::{
+ transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog},
+ Target, TargetRef,
+};
+
+use crate::{
+ ext::ObjectIdExt,
+ remote::{
+ fetch,
+ fetch::{refs::update::Mode, RefLogMessage, Source},
+ },
+ Repository,
+};
+
+///
+pub mod update;
+
+/// Information about the update of a single reference, corresponding the respective entry in [`RefMap::mappings`][crate::remote::fetch::RefMap::mappings].
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Update {
+ /// The way the update was performed.
+ pub mode: update::Mode,
+ /// The index to the edit that was created from the corresponding mapping, or `None` if there was no local ref.
+ pub edit_index: Option<usize>,
+}
+
+impl From<update::Mode> for Update {
+ fn from(mode: Mode) -> Self {
+ Update { mode, edit_index: None }
+ }
+}
+
+/// Update all refs as derived from `refmap.mappings` and produce an `Outcome` informing about all applied changes in detail, with each
+/// [`update`][Update] corresponding to the [`fetch::Mapping`] of at the same index.
+/// If `dry_run` is true, ref transactions won't actually be applied, but are assumed to work without error so the underlying
+/// `repo` is not actually changed. Also it won't perform an 'object exists' check as these are likely not to exist as the pack
+/// wasn't fetched either.
+/// `action` is the prefix used for reflog entries, and is typically "fetch".
+///
+/// It can be used to produce typical information that one is used to from `git fetch`.
+#[allow(clippy::too_many_arguments)]
+pub(crate) fn update(
+ repo: &Repository,
+ message: RefLogMessage,
+ mappings: &[fetch::Mapping],
+ refspecs: &[gix_refspec::RefSpec],
+ extra_refspecs: &[gix_refspec::RefSpec],
+ fetch_tags: fetch::Tags,
+ dry_run: fetch::DryRun,
+ write_packed_refs: fetch::WritePackedRefs,
+) -> Result<update::Outcome, update::Error> {
+ let mut edits = Vec::new();
+ let mut updates = Vec::new();
+
+ let implicit_tag_refspec = fetch_tags
+ .to_refspec()
+ .filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
+ for (remote, local, spec, is_implicit_tag) in mappings.iter().filter_map(
+ |fetch::Mapping {
+ remote,
+ local,
+ spec_index,
+ }| {
+ spec_index.get(refspecs, extra_refspecs).map(|spec| {
+ (
+ remote,
+ local,
+ spec,
+ implicit_tag_refspec.map_or(false, |tag_spec| spec.to_ref() == tag_spec),
+ )
+ })
+ },
+ ) {
+ let remote_id = match remote.as_id() {
+ Some(id) => id,
+ None => continue,
+ };
+ if dry_run == fetch::DryRun::No && !repo.objects.contains(remote_id) {
+ let update = if is_implicit_tag {
+ update::Mode::ImplicitTagNotSentByRemote.into()
+ } else {
+ update::Mode::RejectedSourceObjectNotFound { id: remote_id.into() }.into()
+ };
+ updates.push(update);
+ continue;
+ }
+ let checked_out_branches = worktree_branches(repo)?;
+ let (mode, edit_index) = match local {
+ Some(name) => {
+ let (mode, reflog_message, name, previous_value) = match repo.try_find_reference(name)? {
+ Some(existing) => {
+ if let Some(wt_dir) = checked_out_branches.get(existing.name()) {
+ let mode = update::Mode::RejectedCurrentlyCheckedOut {
+ worktree_dir: wt_dir.to_owned(),
+ };
+ updates.push(mode.into());
+ continue;
+ }
+ match existing.target() {
+ TargetRef::Symbolic(_) => {
+ updates.push(update::Mode::RejectedSymbolic.into());
+ continue;
+ }
+ TargetRef::Peeled(local_id) => {
+ let previous_value =
+ PreviousValue::MustExistAndMatch(Target::Peeled(local_id.to_owned()));
+ let (mode, reflog_message) = if local_id == remote_id {
+ (update::Mode::NoChangeNeeded, "no update will be performed")
+ } else if let Some(gix_ref::Category::Tag) = existing.name().category() {
+ if spec.allow_non_fast_forward() {
+ (update::Mode::Forced, "updating tag")
+ } else {
+ updates.push(update::Mode::RejectedTagUpdate.into());
+ continue;
+ }
+ } else {
+ let mut force = spec.allow_non_fast_forward();
+ let is_fast_forward = match dry_run {
+ fetch::DryRun::No => {
+ let ancestors = repo
+ .find_object(local_id)?
+ .try_into_commit()
+ .map_err(|_| ())
+ .and_then(|c| {
+ c.committer().map(|a| a.time.seconds_since_unix_epoch).map_err(|_| ())
+ }).and_then(|local_commit_time|
+ remote_id
+ .to_owned()
+ .ancestors(|id, buf| repo.objects.find_commit_iter(id, buf))
+ .sorting(
+ gix_traverse::commit::Sorting::ByCommitTimeNewestFirstCutoffOlderThan {
+ time_in_seconds_since_epoch: local_commit_time
+ },
+ )
+ .map_err(|_| ())
+ );
+ match ancestors {
+ Ok(mut ancestors) => {
+ ancestors.any(|cid| cid.map_or(false, |cid| cid == local_id))
+ }
+ Err(_) => {
+ force = true;
+ false
+ }
+ }
+ }
+ fetch::DryRun::Yes => true,
+ };
+ if is_fast_forward {
+ (
+ update::Mode::FastForward,
+ matches!(dry_run, fetch::DryRun::Yes)
+ .then(|| "fast-forward (guessed in dry-run)")
+ .unwrap_or("fast-forward"),
+ )
+ } else if force {
+ (update::Mode::Forced, "forced-update")
+ } else {
+ updates.push(update::Mode::RejectedNonFastForward.into());
+ continue;
+ }
+ };
+ (mode, reflog_message, existing.name().to_owned(), previous_value)
+ }
+ }
+ }
+ None => {
+ let name: gix_ref::FullName = name.try_into()?;
+ let reflog_msg = match name.category() {
+ Some(gix_ref::Category::Tag) => "storing tag",
+ Some(gix_ref::Category::LocalBranch) => "storing head",
+ _ => "storing ref",
+ };
+ (
+ update::Mode::New,
+ reflog_msg,
+ name,
+ PreviousValue::ExistingMustMatch(Target::Peeled(remote_id.to_owned())),
+ )
+ }
+ };
+ let edit = RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: message.compose(reflog_message),
+ },
+ expected: previous_value,
+ new: if let Source::Ref(gix_protocol::handshake::Ref::Symbolic { target, .. }) = &remote {
+ match mappings.iter().find_map(|m| {
+ m.remote.as_name().and_then(|name| {
+ (name == target)
+ .then(|| m.local.as_ref().and_then(|local| local.try_into().ok()))
+ .flatten()
+ })
+ }) {
+ Some(local_branch) => {
+ // This is always safe because…
+ // - the reference may exist already
+ // - if it doesn't exist it will be created - we are here because it's in the list of mappings after all
+ // - if it exists and is updated, and the update is rejected due to non-fastforward for instance, the
+ // target reference still exists and we can point to it.
+ Target::Symbolic(local_branch)
+ }
+ None => Target::Peeled(remote_id.into()),
+ }
+ } else {
+ Target::Peeled(remote_id.into())
+ },
+ },
+ name,
+ deref: false,
+ };
+ let edit_index = edits.len();
+ edits.push(edit);
+ (mode, Some(edit_index))
+ }
+ None => (update::Mode::NoChangeNeeded, None),
+ };
+ updates.push(Update { mode, edit_index })
+ }
+
+ let edits = match dry_run {
+ fetch::DryRun::No => {
+ let (file_lock_fail, packed_refs_lock_fail) = repo
+ .config
+ .lock_timeout()
+ .map_err(crate::reference::edit::Error::from)?;
+ repo.refs
+ .transaction()
+ .packed_refs(
+ match write_packed_refs {
+ fetch::WritePackedRefs::Only => {
+ gix_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box::new(|oid, buf| {
+ repo.objects
+ .try_find(oid, buf)
+ .map(|obj| obj.map(|obj| obj.kind))
+ .map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
+ }))},
+ fetch::WritePackedRefs::Never => gix_ref::file::transaction::PackedRefs::DeletionsOnly
+ }
+ )
+ .prepare(edits, file_lock_fail, packed_refs_lock_fail)
+ .map_err(crate::reference::edit::Error::from)?
+ .commit(repo.committer().transpose().map_err(|err| update::Error::EditReferences(crate::reference::edit::Error::ParseCommitterTime(err)))?)
+ .map_err(crate::reference::edit::Error::from)?
+ }
+ fetch::DryRun::Yes => edits,
+ };
+
+ Ok(update::Outcome { edits, updates })
+}
+
+fn worktree_branches(repo: &Repository) -> Result<BTreeMap<gix_ref::FullName, PathBuf>, update::Error> {
+ let mut map = BTreeMap::new();
+ if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
+ map.insert(head_ref.inner.name, wt_dir.to_owned());
+ }
+ for proxy in repo.worktrees()? {
+ let repo = proxy.into_repo_with_possibly_inaccessible_worktree()?;
+ if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
+ map.insert(head_ref.inner.name, wt_dir.to_owned());
+ }
+ }
+ Ok(map)
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs b/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs
new file mode 100644
index 000000000..145990ac8
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs
@@ -0,0 +1,607 @@
+pub fn restricted() -> crate::open::Options {
+ crate::open::Options::isolated().config_overrides(["user.name=gitoxide", "user.email=gitoxide@localhost"])
+}
+
+/// Convert a hexadecimal hash into its corresponding `ObjectId` or _panic_.
+fn hex_to_id(hex: &str) -> gix_hash::ObjectId {
+ gix_hash::ObjectId::from_hex(hex.as_bytes()).expect("40 bytes hex")
+}
+
+mod update {
+ use std::convert::TryInto;
+
+ use gix_testtools::Result;
+
+ use super::hex_to_id;
+ use crate as gix;
+
+ fn base_repo_path() -> String {
+ gix::path::realpath(
+ gix_testtools::scripted_fixture_read_only("make_remote_repos.sh")
+ .unwrap()
+ .join("base"),
+ )
+ .unwrap()
+ .to_string_lossy()
+ .into_owned()
+ }
+
+ fn repo(name: &str) -> gix::Repository {
+ let dir =
+ gix_testtools::scripted_fixture_read_only_with_args("make_fetch_repos.sh", [base_repo_path()]).unwrap();
+ gix::open_opts(dir.join(name), restricted()).unwrap()
+ }
+ fn repo_rw(name: &str) -> (gix::Repository, gix_testtools::tempfile::TempDir) {
+ let dir = gix_testtools::scripted_fixture_writable_with_args(
+ "make_fetch_repos.sh",
+ [base_repo_path()],
+ gix_testtools::Creation::ExecuteScript,
+ )
+ .unwrap();
+ let repo = gix::open_opts(dir.path().join(name), restricted()).unwrap();
+ (repo, dir)
+ }
+ use gix_ref::{transaction::Change, TargetRef};
+
+ use crate::{
+ bstr::BString,
+ remote::{
+ fetch,
+ fetch::{refs::tests::restricted, Mapping, RefLogMessage, Source, SpecIndex},
+ },
+ };
+
+ #[test]
+ fn various_valid_updates() {
+ let repo = repo("two-origins");
+ for (spec, expected_mode, reflog_message, detail) in [
+ (
+ "refs/heads/main:refs/remotes/origin/main",
+ fetch::refs::update::Mode::NoChangeNeeded,
+ Some("no update will be performed"),
+ "these refs are en-par since the initial clone",
+ ),
+ (
+ "refs/heads/main",
+ fetch::refs::update::Mode::NoChangeNeeded,
+ None,
+ "without local destination ref there is nothing to do for us, ever (except for FETCH_HEADs) later",
+ ),
+ (
+ "refs/heads/main:refs/remotes/origin/new-main",
+ fetch::refs::update::Mode::New,
+ Some("storing ref"),
+ "the destination branch doesn't exist and needs to be created",
+ ),
+ (
+ "refs/heads/main:refs/heads/feature",
+ fetch::refs::update::Mode::New,
+ Some("storing head"),
+ "reflog messages are specific to the type of branch stored, to some limited extend",
+ ),
+ (
+ "refs/heads/main:refs/tags/new-tag",
+ fetch::refs::update::Mode::New,
+ Some("storing tag"),
+ "reflog messages are specific to the type of branch stored, to some limited extend",
+ ),
+ (
+ "+refs/heads/main:refs/remotes/origin/new-main",
+ fetch::refs::update::Mode::New,
+ Some("storing ref"),
+ "just to validate that we really are in dry-run mode, or else this ref would be present now",
+ ),
+ (
+ "+refs/heads/main:refs/remotes/origin/g",
+ fetch::refs::update::Mode::FastForward,
+ Some("fast-forward (guessed in dry-run)"),
+ "a forced non-fastforward (main goes backwards), but dry-run calls it fast-forward",
+ ),
+ (
+ "+refs/heads/main:refs/tags/b-tag",
+ fetch::refs::update::Mode::Forced,
+ Some("updating tag"),
+ "tags can only be forced",
+ ),
+ (
+ "refs/heads/main:refs/tags/b-tag",
+ fetch::refs::update::Mode::RejectedTagUpdate,
+ None,
+ "otherwise a tag is always refusing itself to be overwritten (no-clobber)",
+ ),
+ (
+ "+refs/remotes/origin/g:refs/heads/main",
+ fetch::refs::update::Mode::RejectedCurrentlyCheckedOut {
+ worktree_dir: repo.work_dir().expect("present").to_owned(),
+ },
+ None,
+ "checked out branches cannot be written, as it requires a merge of sorts which isn't done here",
+ ),
+ (
+ "ffffffffffffffffffffffffffffffffffffffff:refs/heads/invalid-source-object",
+ fetch::refs::update::Mode::RejectedSourceObjectNotFound {
+ id: hex_to_id("ffffffffffffffffffffffffffffffffffffffff"),
+ },
+ None,
+ "checked out branches cannot be written, as it requires a merge of sorts which isn't done here",
+ ),
+ (
+ "refs/remotes/origin/g:refs/heads/not-currently-checked-out",
+ fetch::refs::update::Mode::FastForward,
+ Some("fast-forward (guessed in dry-run)"),
+ "a fast-forward only fast-forward situation, all good",
+ ),
+ ] {
+ let (mapping, specs) = mapping_from_spec(spec, &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mapping,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ reflog_message.map(|_| fetch::DryRun::Yes).unwrap_or(fetch::DryRun::No),
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: expected_mode.clone(),
+ edit_index: reflog_message.map(|_| 0),
+ }],
+ "{spec:?}: {detail}"
+ );
+ assert_eq!(out.edits.len(), reflog_message.map(|_| 1).unwrap_or(0));
+ if let Some(reflog_message) = reflog_message {
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(
+ log.message,
+ format!("action: {reflog_message}"),
+ "{spec}: reflog messages are specific and we emulate git word for word"
+ );
+ let remote_ref = repo
+ .find_reference(specs[0].to_ref().source().expect("always present"))
+ .unwrap();
+ assert_eq!(
+ new.id(),
+ remote_ref.target().id(),
+ "remote ref provides the id to set in the local reference"
+ )
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn checked_out_branches_in_worktrees_are_rejected_with_additional_information() -> Result {
+ let root = gix_path::realpath(gix_testtools::scripted_fixture_read_only_with_args(
+ "make_fetch_repos.sh",
+ [base_repo_path()],
+ )?)?;
+ let repo = root.join("worktree-root");
+ let repo = gix::open_opts(repo, restricted())?;
+ for (branch, path_from_root) in [
+ ("main", "worktree-root"),
+ ("wt-a-nested", "prev/wt-a-nested"),
+ ("wt-a", "wt-a"),
+ ("nested-wt-b", "wt-a/nested-wt-b"),
+ ("wt-c-locked", "wt-c-locked"),
+ ("wt-deleted", "wt-deleted"),
+ ] {
+ let spec = format!("refs/heads/main:refs/heads/{branch}");
+ let (mappings, specs) = mapping_from_spec(&spec, &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )?;
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedCurrentlyCheckedOut {
+ worktree_dir: root.join(path_from_root),
+ },
+ edit_index: None,
+ }],
+ "{spec}: checked-out checks are done before checking if a change would actually be required (here it isn't)"
+ );
+ assert_eq!(out.edits.len(), 0);
+ }
+ Ok(())
+ }
+
+ #[test]
+ fn local_symbolic_refs_are_never_written() {
+ let repo = repo("two-origins");
+ for source in ["refs/heads/main", "refs/heads/symbolic", "HEAD"] {
+ let (mappings, specs) = mapping_from_spec(&format!("{source}:refs/heads/symbolic"), &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 0);
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedSymbolic,
+ edit_index: None
+ }],
+ "we don't overwrite these as the checked-out check needs to consider much more than it currently does, we are playing it safe"
+ );
+ }
+ }
+
+ #[test]
+ fn remote_symbolic_refs_can_always_be_set_as_there_is_no_scenario_where_it_could_be_nonexisting_and_rejected() {
+ let repo = repo("two-origins");
+ let (mut mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/remotes/origin/new", &repo);
+ mappings.push(Mapping {
+ remote: Source::Ref(gix_protocol::handshake::Ref::Direct {
+ full_ref_name: "refs/heads/main".try_into().unwrap(),
+ object: hex_to_id("f99771fe6a1b535783af3163eba95a927aae21d5"),
+ }),
+ local: Some("refs/heads/symbolic".into()),
+ spec_index: SpecIndex::ExplicitInRemote(0),
+ });
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 1);
+ assert_eq!(
+ out.updates,
+ vec![
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ edit_index: Some(0)
+ },
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedSymbolic,
+ edit_index: None
+ }
+ ],
+ );
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(log.message, "action: storing ref");
+ assert!(
+ new.try_name().is_some(),
+ "remote falls back to peeled id as it's the only thing we seem to have locally, it won't refer to a non-existing local ref"
+ );
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ #[test]
+ fn local_direct_refs_are_never_written_with_symbolic_ones_but_see_only_the_destination() {
+ let repo = repo("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/heads/not-currently-checked-out", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 1);
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::NoChangeNeeded,
+ edit_index: Some(0)
+ }],
+ );
+ }
+
+ #[test]
+ fn remote_refs_cannot_map_to_local_head() {
+ let repo = repo("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/heads/main:HEAD", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 1);
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ edit_index: Some(0),
+ }],
+ );
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(log.message, "action: storing head");
+ assert!(
+ new.try_id().is_some(),
+ "remote is peeled, so local will be peeled as well"
+ );
+ }
+ _ => unreachable!("only updates"),
+ }
+ assert_eq!(
+ edit.name.as_bstr(),
+ "refs/heads/HEAD",
+ "it's not possible to refer to the local HEAD with refspecs"
+ );
+ }
+
+ #[test]
+ fn remote_symbolic_refs_can_be_written_locally_and_point_to_tracking_branch() {
+ let repo = repo("two-origins");
+ let (mut mappings, specs) = mapping_from_spec("HEAD:refs/remotes/origin/new-HEAD", &repo);
+ mappings.push(Mapping {
+ remote: Source::Ref(gix_protocol::handshake::Ref::Direct {
+ full_ref_name: "refs/heads/main".try_into().unwrap(),
+ object: hex_to_id("f99771fe6a1b535783af3163eba95a927aae21d5"),
+ }),
+ local: Some("refs/remotes/origin/main".into()),
+ spec_index: SpecIndex::ExplicitInRemote(0),
+ });
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ edit_index: Some(0),
+ },
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::NoChangeNeeded,
+ edit_index: Some(1),
+ }
+ ],
+ );
+ assert_eq!(out.edits.len(), 2);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(log.message, "action: storing ref");
+ assert_eq!(
+ new.try_name().expect("symbolic ref").as_bstr(),
+ "refs/remotes/origin/main",
+ "remote is symbolic, so local will be symbolic as well, but is rewritten to tracking branch"
+ );
+ }
+ _ => unreachable!("only updates"),
+ }
+ assert_eq!(edit.name.as_bstr(), "refs/remotes/origin/new-HEAD",);
+ }
+
+ #[test]
+ fn non_fast_forward_is_rejected_but_appears_to_be_fast_forward_in_dryrun_mode() {
+ let repo = repo("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/heads/main:refs/remotes/origin/g", &repo);
+ let reflog_message: BString = "very special".into();
+ let out = fetch::refs::update(
+ &repo,
+ RefLogMessage::Override {
+ message: reflog_message.clone(),
+ },
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::FastForward,
+ edit_index: Some(0),
+ }],
+ "The caller has to be aware and note that dry-runs can't know about fast-forwards as they don't have remote objects"
+ );
+ assert_eq!(out.edits.len(), 1);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, .. } => {
+ assert_eq!(log.message, reflog_message);
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ #[test]
+ fn non_fast_forward_is_rejected_if_dry_run_is_disabled() {
+ let (repo, _tmp) = repo_rw("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/remotes/origin/g:refs/heads/not-currently-checked-out", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::No,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedNonFastForward,
+ edit_index: None,
+ }]
+ );
+ assert_eq!(out.edits.len(), 0);
+
+ let (mappings, specs) = mapping_from_spec("refs/heads/main:refs/remotes/origin/g", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("prefix"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::No,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::FastForward,
+ edit_index: Some(0),
+ }]
+ );
+ assert_eq!(out.edits.len(), 1);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, .. } => {
+ assert_eq!(log.message, format!("prefix: {}", "fast-forward"));
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ #[test]
+ fn fast_forwards_are_called_out_even_if_force_is_given() {
+ let (repo, _tmp) = repo_rw("two-origins");
+ let (mappings, specs) = mapping_from_spec("+refs/heads/main:refs/remotes/origin/g", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("prefix"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::No,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::FastForward,
+ edit_index: Some(0),
+ }]
+ );
+ assert_eq!(out.edits.len(), 1);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, .. } => {
+ assert_eq!(log.message, format!("prefix: {}", "fast-forward"));
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ fn mapping_from_spec(spec: &str, repo: &gix::Repository) -> (Vec<fetch::Mapping>, Vec<gix::refspec::RefSpec>) {
+ let spec = gix_refspec::parse(spec.into(), gix_refspec::parse::Operation::Fetch).unwrap();
+ let group = gix_refspec::MatchGroup::from_fetch_specs(Some(spec));
+ let references = repo.references().unwrap();
+ let mut references: Vec<_> = references.all().unwrap().map(|r| into_remote_ref(r.unwrap())).collect();
+ references.push(into_remote_ref(repo.find_reference("HEAD").unwrap()));
+ let mappings = group
+ .match_remotes(references.iter().map(remote_ref_to_item))
+ .mappings
+ .into_iter()
+ .map(|m| fetch::Mapping {
+ remote: m
+ .item_index
+ .map(|idx| fetch::Source::Ref(references[idx].clone()))
+ .unwrap_or_else(|| match m.lhs {
+ gix_refspec::match_group::SourceRef::ObjectId(id) => fetch::Source::ObjectId(id),
+ _ => unreachable!("not a ref, must be id: {:?}", m),
+ }),
+ local: m.rhs.map(|r| r.into_owned()),
+ spec_index: SpecIndex::ExplicitInRemote(m.spec_index),
+ })
+ .collect();
+ (mappings, vec![spec.to_owned()])
+ }
+
+ fn into_remote_ref(mut r: gix::Reference<'_>) -> gix_protocol::handshake::Ref {
+ let full_ref_name = r.name().as_bstr().into();
+ match r.target() {
+ TargetRef::Peeled(id) => gix_protocol::handshake::Ref::Direct {
+ full_ref_name,
+ object: id.into(),
+ },
+ TargetRef::Symbolic(name) => {
+ let target = name.as_bstr().into();
+ let id = r.peel_to_id_in_place().unwrap();
+ gix_protocol::handshake::Ref::Symbolic {
+ full_ref_name,
+ target,
+ object: id.detach(),
+ }
+ }
+ }
+ }
+
+ fn remote_ref_to_item(r: &gix_protocol::handshake::Ref) -> gix_refspec::match_group::Item<'_> {
+ let (full_ref_name, target, object) = r.unpack();
+ gix_refspec::match_group::Item {
+ full_ref_name,
+ target: target.expect("no unborn HEAD"),
+ object,
+ }
+ }
+
+ fn prefixed(action: &str) -> RefLogMessage {
+ RefLogMessage::Prefixed { action: action.into() }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/update.rs b/vendor/gix/src/remote/connection/fetch/update_refs/update.rs
new file mode 100644
index 000000000..6eda1ffc0
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/update.rs
@@ -0,0 +1,128 @@
+use std::path::PathBuf;
+
+use crate::remote::fetch;
+
+mod error {
+ /// The error returned when updating references.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ FindReference(#[from] crate::reference::find::Error),
+ #[error("A remote reference had a name that wasn't considered valid. Corrupt remote repo or insufficient checks on remote?")]
+ InvalidRefName(#[from] gix_validate::refname::Error),
+ #[error("Failed to update references to their new position to match their remote locations")]
+ EditReferences(#[from] crate::reference::edit::Error),
+ #[error("Failed to read or iterate worktree dir")]
+ WorktreeListing(#[from] std::io::Error),
+ #[error("Could not open worktree repository")]
+ OpenWorktreeRepo(#[from] crate::open::Error),
+ #[error("Could not find local commit for fast-forward ancestor check")]
+ FindCommit(#[from] crate::object::find::existing::Error),
+ }
+}
+
+pub use error::Error;
+
+/// The outcome of the refs-update operation at the end of a fetch.
+#[derive(Debug, Clone)]
+pub struct Outcome {
+ /// All edits that were performed to update local refs.
+ pub edits: Vec<gix_ref::transaction::RefEdit>,
+ /// Each update provides more information about what happened to the corresponding mapping.
+ /// Use [`iter_mapping_updates()`][Self::iter_mapping_updates()] to recombine the update information with ref-edits and their
+ /// mapping.
+ pub updates: Vec<super::Update>,
+}
+
+/// Describe the way a ref was updated
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum Mode {
+ /// No change was attempted as the remote ref didn't change compared to the current ref, or because no remote ref was specified
+ /// in the ref-spec.
+ NoChangeNeeded,
+ /// The old ref's commit was an ancestor of the new one, allowing for a fast-forward without a merge.
+ FastForward,
+ /// The ref was set to point to the new commit from the remote without taking into consideration its ancestry.
+ Forced,
+ /// A new ref has been created as there was none before.
+ New,
+ /// The reference belongs to a tag that was listed by the server but whose target didn't get sent as it doesn't point
+ /// to the commit-graph we were fetching explicitly.
+ ///
+ /// This is kind of update is only happening if `remote.<name>.tagOpt` is not set explicitly to either `--tags` or `--no-tags`.
+ ImplicitTagNotSentByRemote,
+ /// The object id to set the target reference to could not be found.
+ RejectedSourceObjectNotFound {
+ /// The id of the object that didn't exist in the object database, even though it should since it should be part of the pack.
+ id: gix_hash::ObjectId,
+ },
+ /// Tags can never be overwritten (whether the new object would be a fast-forward or not, or unchanged), unless the refspec
+ /// specifies force.
+ RejectedTagUpdate,
+ /// The reference update would not have been a fast-forward, and force is not specified in the ref-spec.
+ RejectedNonFastForward,
+ /// The update of a local symbolic reference was rejected.
+ RejectedSymbolic,
+ /// The update was rejected because the branch is checked out in the given worktree_dir.
+ ///
+ /// Note that the check applies to any known worktree, whether it's present on disk or not.
+ RejectedCurrentlyCheckedOut {
+ /// The path to the worktree directory where the branch is checked out.
+ worktree_dir: PathBuf,
+ },
+}
+
+impl std::fmt::Display for Mode {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Mode::NoChangeNeeded => "up-to-date",
+ Mode::FastForward => "fast-forward",
+ Mode::Forced => "forced-update",
+ Mode::New => "new",
+ Mode::ImplicitTagNotSentByRemote => "unrelated tag on remote",
+ Mode::RejectedSourceObjectNotFound { id } => return write!(f, "rejected ({id} not found)"),
+ Mode::RejectedTagUpdate => "rejected (would overwrite existing tag)",
+ Mode::RejectedNonFastForward => "rejected (non-fast-forward)",
+ Mode::RejectedSymbolic => "rejected (refusing to write symbolic refs)",
+ Mode::RejectedCurrentlyCheckedOut { worktree_dir } => {
+ return write!(
+ f,
+ "rejected (cannot write into checked-out branch at \"{}\")",
+ worktree_dir.display()
+ )
+ }
+ }
+ .fmt(f)
+ }
+}
+
+impl Outcome {
+ /// Produce an iterator over all information used to produce the this outcome, ref-update by ref-update, using the `mappings`
+ /// used when producing the ref update.
+ ///
+ /// Note that mappings that don't have a corresponding entry in `refspecs` these will be `None` even though that should never be the case.
+ /// This can happen if the `refspecs` passed in aren't the respecs used to create the `mapping`, and it's up to the caller to sort it out.
+ pub fn iter_mapping_updates<'a, 'b>(
+ &self,
+ mappings: &'a [fetch::Mapping],
+ refspecs: &'b [gix_refspec::RefSpec],
+ extra_refspecs: &'b [gix_refspec::RefSpec],
+ ) -> impl Iterator<
+ Item = (
+ &super::Update,
+ &'a fetch::Mapping,
+ Option<&'b gix_refspec::RefSpec>,
+ Option<&gix_ref::transaction::RefEdit>,
+ ),
+ > {
+ self.updates.iter().zip(mappings.iter()).map(move |(update, mapping)| {
+ (
+ update,
+ mapping,
+ mapping.spec_index.get(refspecs, extra_refspecs),
+ update.edit_index.and_then(|idx| self.edits.get(idx)),
+ )
+ })
+ }
+}
diff --git a/vendor/gix/src/remote/connection/mod.rs b/vendor/gix/src/remote/connection/mod.rs
new file mode 100644
index 000000000..09943ecc4
--- /dev/null
+++ b/vendor/gix/src/remote/connection/mod.rs
@@ -0,0 +1,29 @@
+use crate::Remote;
+
+pub(crate) struct HandshakeWithRefs {
+ outcome: gix_protocol::handshake::Outcome,
+ refs: Vec<gix_protocol::handshake::Ref>,
+}
+
+/// A function that performs a given credential action, trying to obtain credentials for an operation that needs it.
+pub type AuthenticateFn<'a> = Box<dyn FnMut(gix_credentials::helper::Action) -> gix_credentials::protocol::Result + 'a>;
+
+/// A type to represent an ongoing connection to a remote host, typically with the connection already established.
+///
+/// It can be used to perform a variety of operations with the remote without worrying about protocol details,
+/// much like a remote procedure call.
+pub struct Connection<'a, 'repo, T, P> {
+ pub(crate) remote: &'a Remote<'repo>,
+ pub(crate) authenticate: Option<AuthenticateFn<'a>>,
+ pub(crate) transport_options: Option<Box<dyn std::any::Any>>,
+ pub(crate) transport: T,
+ pub(crate) progress: P,
+}
+
+mod access;
+
+///
+pub mod ref_map;
+
+///
+pub mod fetch;
diff --git a/vendor/gix/src/remote/connection/ref_map.rs b/vendor/gix/src/remote/connection/ref_map.rs
new file mode 100644
index 000000000..0206e9002
--- /dev/null
+++ b/vendor/gix/src/remote/connection/ref_map.rs
@@ -0,0 +1,268 @@
+use std::collections::HashSet;
+
+use gix_features::progress::Progress;
+use gix_protocol::transport::client::Transport;
+
+use crate::{
+ bstr,
+ bstr::{BString, ByteVec},
+ remote::{connection::HandshakeWithRefs, fetch, fetch::SpecIndex, Connection, Direction},
+};
+
+/// The error returned by [`Connection::ref_map()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("Failed to configure the transport before connecting to {url:?}")]
+ GatherTransportConfig {
+ url: BString,
+ source: crate::config::transport::Error,
+ },
+ #[error("Failed to configure the transport layer")]
+ ConfigureTransport(#[from] Box<dyn std::error::Error + Send + Sync + 'static>),
+ #[error(transparent)]
+ Handshake(#[from] gix_protocol::handshake::Error),
+ #[error("The object format {format:?} as used by the remote is unsupported")]
+ UnknownObjectFormat { format: BString },
+ #[error(transparent)]
+ ListRefs(#[from] gix_protocol::ls_refs::Error),
+ #[error(transparent)]
+ Transport(#[from] gix_protocol::transport::client::Error),
+ #[error(transparent)]
+ ConfigureCredentials(#[from] crate::config::credential_helpers::Error),
+ #[error(transparent)]
+ MappingValidation(#[from] gix_refspec::match_group::validate::Error),
+}
+
+impl gix_protocol::transport::IsSpuriousError for Error {
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::Transport(err) => err.is_spurious(),
+ Error::ListRefs(err) => err.is_spurious(),
+ Error::Handshake(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+}
+
+/// For use in [`Connection::ref_map()`].
+#[derive(Debug, Clone)]
+pub struct Options {
+ /// Use a two-component prefix derived from the ref-spec's source, like `refs/heads/` to let the server pre-filter refs
+ /// with great potential for savings in traffic and local CPU time. Defaults to `true`.
+ pub prefix_from_spec_as_filter_on_remote: bool,
+ /// Parameters in the form of `(name, optional value)` to add to the handshake.
+ ///
+ /// This is useful in case of custom servers.
+ pub handshake_parameters: Vec<(String, Option<String>)>,
+ /// A list of refspecs to use as implicit refspecs which won't be saved or otherwise be part of the remote in question.
+ ///
+ /// This is useful for handling `remote.<name>.tagOpt` for example.
+ pub extra_refspecs: Vec<gix_refspec::RefSpec>,
+}
+
+impl Default for Options {
+ fn default() -> Self {
+ Options {
+ prefix_from_spec_as_filter_on_remote: true,
+ handshake_parameters: Vec::new(),
+ extra_refspecs: Vec::new(),
+ }
+ }
+}
+
+impl<'remote, 'repo, T, P> Connection<'remote, 'repo, T, P>
+where
+ T: Transport,
+ P: Progress,
+{
+ /// List all references on the remote that have been filtered through our remote's [`refspecs`][crate::Remote::refspecs()]
+ /// for _fetching_.
+ ///
+ /// This comes in the form of all matching tips on the remote and the object they point to, along with
+ /// with the local tracking branch of these tips (if available).
+ ///
+ /// Note that this doesn't fetch the objects mentioned in the tips nor does it make any change to underlying repository.
+ ///
+ /// # Consumption
+ ///
+ /// Due to management of the transport, it's cleanest to only use it for a single interaction. Thus it's consumed along with
+ /// the connection.
+ ///
+ /// ### Configuration
+ ///
+ /// - `gitoxide.userAgent` is read to obtain the application user agent for git servers and for HTTP servers as well.
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn ref_map(mut self, options: Options) -> Result<fetch::RefMap, Error> {
+ let res = self.ref_map_inner(options).await;
+ gix_protocol::indicate_end_of_interaction(&mut self.transport)
+ .await
+ .ok();
+ res
+ }
+
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub(crate) async fn ref_map_inner(
+ &mut self,
+ Options {
+ prefix_from_spec_as_filter_on_remote,
+ handshake_parameters,
+ mut extra_refspecs,
+ }: Options,
+ ) -> Result<fetch::RefMap, Error> {
+ let null = gix_hash::ObjectId::null(gix_hash::Kind::Sha1); // OK to hardcode Sha1, it's not supposed to match, ever.
+
+ if let Some(tag_spec) = self.remote.fetch_tags.to_refspec().map(|spec| spec.to_owned()) {
+ if !extra_refspecs.contains(&tag_spec) {
+ extra_refspecs.push(tag_spec);
+ }
+ };
+ let specs = {
+ let mut s = self.remote.fetch_specs.clone();
+ s.extend(extra_refspecs.clone());
+ s
+ };
+ let remote = self
+ .fetch_refs(prefix_from_spec_as_filter_on_remote, handshake_parameters, &specs)
+ .await?;
+ let num_explicit_specs = self.remote.fetch_specs.len();
+ let group = gix_refspec::MatchGroup::from_fetch_specs(specs.iter().map(|s| s.to_ref()));
+ let (res, fixes) = group
+ .match_remotes(remote.refs.iter().map(|r| {
+ let (full_ref_name, target, object) = r.unpack();
+ gix_refspec::match_group::Item {
+ full_ref_name,
+ target: target.unwrap_or(&null),
+ object,
+ }
+ }))
+ .validated()?;
+ let mappings = res.mappings;
+ let mappings = mappings
+ .into_iter()
+ .map(|m| fetch::Mapping {
+ remote: m
+ .item_index
+ .map(|idx| fetch::Source::Ref(remote.refs[idx].clone()))
+ .unwrap_or_else(|| {
+ fetch::Source::ObjectId(match m.lhs {
+ gix_refspec::match_group::SourceRef::ObjectId(id) => id,
+ _ => unreachable!("no item index implies having an object id"),
+ })
+ }),
+ local: m.rhs.map(|c| c.into_owned()),
+ spec_index: if m.spec_index < num_explicit_specs {
+ SpecIndex::ExplicitInRemote(m.spec_index)
+ } else {
+ SpecIndex::Implicit(m.spec_index - num_explicit_specs)
+ },
+ })
+ .collect();
+
+ let object_hash = extract_object_format(self.remote.repo, &remote.outcome)?;
+ Ok(fetch::RefMap {
+ mappings,
+ extra_refspecs,
+ fixes,
+ remote_refs: remote.refs,
+ handshake: remote.outcome,
+ object_hash,
+ })
+ }
+
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ async fn fetch_refs(
+ &mut self,
+ filter_by_prefix: bool,
+ extra_parameters: Vec<(String, Option<String>)>,
+ refspecs: &[gix_refspec::RefSpec],
+ ) -> Result<HandshakeWithRefs, Error> {
+ let mut credentials_storage;
+ let url = self.transport.to_url();
+ let authenticate = match self.authenticate.as_mut() {
+ Some(f) => f,
+ None => {
+ let url = self
+ .remote
+ .url(Direction::Fetch)
+ .map(ToOwned::to_owned)
+ .unwrap_or_else(|| gix_url::parse(url.as_ref()).expect("valid URL to be provided by transport"));
+ credentials_storage = self.configured_credentials(url)?;
+ &mut credentials_storage
+ }
+ };
+
+ if self.transport_options.is_none() {
+ self.transport_options = self
+ .remote
+ .repo
+ .transport_options(url.as_ref(), self.remote.name().map(|n| n.as_bstr()))
+ .map_err(|err| Error::GatherTransportConfig {
+ source: err,
+ url: url.into_owned(),
+ })?;
+ }
+ if let Some(config) = self.transport_options.as_ref() {
+ self.transport.configure(&**config)?;
+ }
+ let mut outcome =
+ gix_protocol::fetch::handshake(&mut self.transport, authenticate, extra_parameters, &mut self.progress)
+ .await?;
+ let refs = match outcome.refs.take() {
+ Some(refs) => refs,
+ None => {
+ let agent_feature = self.remote.repo.config.user_agent_tuple();
+ gix_protocol::ls_refs(
+ &mut self.transport,
+ &outcome.capabilities,
+ move |_capabilities, arguments, features| {
+ features.push(agent_feature);
+ if filter_by_prefix {
+ let mut seen = HashSet::new();
+ for spec in refspecs {
+ let spec = spec.to_ref();
+ if seen.insert(spec.instruction()) {
+ let mut prefixes = Vec::with_capacity(1);
+ spec.expand_prefixes(&mut prefixes);
+ for mut prefix in prefixes {
+ prefix.insert_str(0, "ref-prefix ");
+ arguments.push(prefix);
+ }
+ }
+ }
+ }
+ Ok(gix_protocol::ls_refs::Action::Continue)
+ },
+ &mut self.progress,
+ )
+ .await?
+ }
+ };
+ Ok(HandshakeWithRefs { outcome, refs })
+ }
+}
+
+/// Assume sha1 if server says nothing, otherwise configure anything beyond sha1 in the local repo configuration
+#[allow(clippy::result_large_err)]
+fn extract_object_format(
+ _repo: &crate::Repository,
+ outcome: &gix_protocol::handshake::Outcome,
+) -> Result<gix_hash::Kind, Error> {
+ use bstr::ByteSlice;
+ let object_hash =
+ if let Some(object_format) = outcome.capabilities.capability("object-format").and_then(|c| c.value()) {
+ let object_format = object_format.to_str().map_err(|_| Error::UnknownObjectFormat {
+ format: object_format.into(),
+ })?;
+ match object_format {
+ "sha1" => gix_hash::Kind::Sha1,
+ unknown => return Err(Error::UnknownObjectFormat { format: unknown.into() }),
+ }
+ } else {
+ gix_hash::Kind::Sha1
+ };
+ Ok(object_hash)
+}
diff --git a/vendor/gix/src/remote/errors.rs b/vendor/gix/src/remote/errors.rs
new file mode 100644
index 000000000..20060cedf
--- /dev/null
+++ b/vendor/gix/src/remote/errors.rs
@@ -0,0 +1,45 @@
+///
+pub mod find {
+ use crate::{bstr::BString, config, remote};
+
+ /// The error returned by [`Repository::find_remote(…)`][crate::Repository::find_remote()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The value for 'remote.<name>.tagOpt` is invalid and must either be '--tags' or '--no-tags'")]
+ TagOpt(#[from] config::key::GenericErrorWithValue),
+ #[error("{kind} ref-spec under `remote.{remote_name}` was invalid")]
+ RefSpec {
+ kind: &'static str,
+ remote_name: BString,
+ source: config::refspec::Error,
+ },
+ #[error("Neither 'url` nor 'pushUrl' fields were set in the remote's configuration.")]
+ UrlMissing,
+ #[error("The {kind} url under `remote.{remote_name}` was invalid")]
+ Url {
+ kind: &'static str,
+ remote_name: BString,
+ source: config::url::Error,
+ },
+ #[error(transparent)]
+ Init(#[from] remote::init::Error),
+ }
+
+ ///
+ pub mod existing {
+ use crate::bstr::BString;
+
+ /// The error returned by [`Repository::find_remote(…)`][crate::Repository::find_remote()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Find(#[from] super::Error),
+ #[error("remote name could not be parsed as URL")]
+ UrlParse(#[from] gix_url::parse::Error),
+ #[error("The remote named {name:?} did not exist")]
+ NotFound { name: BString },
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/fetch.rs b/vendor/gix/src/remote/fetch.rs
new file mode 100644
index 000000000..4add96a65
--- /dev/null
+++ b/vendor/gix/src/remote/fetch.rs
@@ -0,0 +1,166 @@
+/// If `Yes`, don't really make changes but do as much as possible to get an idea of what would be done.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub(crate) enum DryRun {
+ /// Enable dry-run mode and don't actually change the underlying repository in any way.
+ Yes,
+ /// Run the operation like normal, making changes to the underlying repository.
+ No,
+}
+
+/// How to deal with refs when cloning or fetching.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub(crate) enum WritePackedRefs {
+ /// Normal operation, i.e. don't use packed-refs at all for writing.
+ Never,
+ /// Put ref updates straight into the `packed-refs` file, without creating loose refs first or dealing with them in any way.
+ Only,
+}
+
+/// Describe how to handle tags when fetching
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum Tags {
+ /// Fetch all tags from the remote, even if these are not reachable from objects referred to by our refspecs.
+ All,
+ /// Fetch only the tags that point to the objects being sent.
+ /// That way, annotated tags that point to an object we receive are automatically transmitted and their refs are created.
+ /// The same goes for lightweight tags.
+ Included,
+ /// Do not fetch any tags.
+ None,
+}
+
+impl Default for Tags {
+ fn default() -> Self {
+ Tags::Included
+ }
+}
+
+impl Tags {
+ /// Obtain a refspec that determines whether or not to fetch all tags, depending on this variant.
+ ///
+ /// The returned refspec is the default refspec for tags, but won't overwrite local tags ever.
+ pub fn to_refspec(&self) -> Option<gix_refspec::RefSpecRef<'static>> {
+ match self {
+ Tags::All | Tags::Included => Some(
+ gix_refspec::parse("refs/tags/*:refs/tags/*".into(), gix_refspec::parse::Operation::Fetch)
+ .expect("valid"),
+ ),
+ Tags::None => None,
+ }
+ }
+}
+
+/// Information about the relationship between our refspecs, and remote references with their local counterparts.
+#[derive(Default, Debug, Clone)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub struct RefMap {
+ /// A mapping between a remote reference and a local tracking branch.
+ pub mappings: Vec<Mapping>,
+ /// Refspecs which have been added implicitly due to settings of the `remote`, possibly pre-initialized from
+ /// [`extra_refspecs` in RefMap options][crate::remote::ref_map::Options::extra_refspecs].
+ ///
+ /// They are never persisted nor are they typically presented to the user.
+ pub extra_refspecs: Vec<gix_refspec::RefSpec>,
+ /// Information about the fixes applied to the `mapping` due to validation and sanitization.
+ pub fixes: Vec<gix_refspec::match_group::validate::Fix>,
+ /// All refs advertised by the remote.
+ pub remote_refs: Vec<gix_protocol::handshake::Ref>,
+ /// Additional information provided by the server as part of the handshake.
+ ///
+ /// Note that the `refs` field is always `None` as the refs are placed in `remote_refs`.
+ pub handshake: gix_protocol::handshake::Outcome,
+ /// The kind of hash used for all data sent by the server, if understood by this client implementation.
+ ///
+ /// It was extracted from the `handshake` as advertised by the server.
+ pub object_hash: gix_hash::Kind,
+}
+
+/// Either an object id that the remote has or the matched remote ref itself.
+#[derive(Debug, Clone)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub enum Source {
+ /// An object id, as the matched ref-spec was an object id itself.
+ ObjectId(gix_hash::ObjectId),
+ /// The remote reference that matched the ref-specs name.
+ Ref(gix_protocol::handshake::Ref),
+}
+
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+impl Source {
+ /// Return either the direct object id we refer to or the direct target that a reference refers to.
+ /// The latter may be a direct or a symbolic reference, and we degenerate this to the peeled object id.
+ /// If unborn, `None` is returned.
+ pub fn as_id(&self) -> Option<&gix_hash::oid> {
+ match self {
+ Source::ObjectId(id) => Some(id),
+ Source::Ref(r) => r.unpack().1,
+ }
+ }
+
+ /// Return ourselves as the full name of the reference we represent, or `None` if this source isn't a reference but an object.
+ pub fn as_name(&self) -> Option<&crate::bstr::BStr> {
+ match self {
+ Source::ObjectId(_) => None,
+ Source::Ref(r) => match r {
+ gix_protocol::handshake::Ref::Unborn { full_ref_name, .. }
+ | gix_protocol::handshake::Ref::Symbolic { full_ref_name, .. }
+ | gix_protocol::handshake::Ref::Direct { full_ref_name, .. }
+ | gix_protocol::handshake::Ref::Peeled { full_ref_name, .. } => Some(full_ref_name.as_ref()),
+ },
+ }
+ }
+}
+
+/// An index into various lists of refspecs that have been used in a [Mapping] of remote references to local ones.
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)]
+pub enum SpecIndex {
+ /// An index into the _refspecs of the remote_ that triggered a fetch operation.
+ /// These refspecs are explicit and visible to the user.
+ ExplicitInRemote(usize),
+ /// An index into the list of [extra refspecs][crate::remote::fetch::RefMap::extra_refspecs] that are implicit
+ /// to a particular fetch operation.
+ Implicit(usize),
+}
+
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+impl SpecIndex {
+ /// Depending on our index variant, get the index either from `refspecs` or from `extra_refspecs` for `Implicit` variants.
+ pub fn get<'a>(
+ self,
+ refspecs: &'a [gix_refspec::RefSpec],
+ extra_refspecs: &'a [gix_refspec::RefSpec],
+ ) -> Option<&'a gix_refspec::RefSpec> {
+ match self {
+ SpecIndex::ExplicitInRemote(idx) => refspecs.get(idx),
+ SpecIndex::Implicit(idx) => extra_refspecs.get(idx),
+ }
+ }
+
+ /// If this is an `Implicit` variant, return its index.
+ pub fn implicit_index(self) -> Option<usize> {
+ match self {
+ SpecIndex::Implicit(idx) => Some(idx),
+ SpecIndex::ExplicitInRemote(_) => None,
+ }
+ }
+}
+
+/// A mapping between a single remote reference and its advertised objects to a local destination which may or may not exist.
+#[derive(Debug, Clone)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub struct Mapping {
+ /// The reference on the remote side, along with information about the objects they point to as advertised by the server.
+ pub remote: Source,
+ /// The local tracking reference to update after fetching the object visible via `remote`.
+ pub local: Option<crate::bstr::BString>,
+ /// The index into the fetch ref-specs used to produce the mapping, allowing it to be recovered.
+ pub spec_index: SpecIndex,
+}
+
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub use super::connection::fetch::{
+ negotiate, prepare, refs, Error, Outcome, Prepare, ProgressId, RefLogMessage, Status,
+};
diff --git a/vendor/gix/src/remote/init.rs b/vendor/gix/src/remote/init.rs
new file mode 100644
index 000000000..bba116946
--- /dev/null
+++ b/vendor/gix/src/remote/init.rs
@@ -0,0 +1,116 @@
+use std::convert::TryInto;
+
+use gix_refspec::RefSpec;
+
+use crate::{config, remote, Remote, Repository};
+
+mod error {
+ use crate::bstr::BString;
+
+ /// The error returned by [`Repository::remote_at(…)`][crate::Repository::remote_at()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Url(#[from] gix_url::parse::Error),
+ #[error("The rewritten {kind} url {rewritten_url:?} failed to parse")]
+ RewrittenUrlInvalid {
+ kind: &'static str,
+ rewritten_url: BString,
+ source: gix_url::parse::Error,
+ },
+ }
+}
+pub use error::Error;
+
+use crate::bstr::BString;
+
+/// Initialization
+impl<'repo> Remote<'repo> {
+ #[allow(clippy::too_many_arguments)]
+ pub(crate) fn from_preparsed_config(
+ name_or_url: Option<BString>,
+ url: Option<gix_url::Url>,
+ push_url: Option<gix_url::Url>,
+ fetch_specs: Vec<RefSpec>,
+ push_specs: Vec<RefSpec>,
+ should_rewrite_urls: bool,
+ fetch_tags: remote::fetch::Tags,
+ repo: &'repo Repository,
+ ) -> Result<Self, Error> {
+ debug_assert!(
+ url.is_some() || push_url.is_some(),
+ "BUG: fetch or push url must be set at least"
+ );
+ let (url_alias, push_url_alias) = should_rewrite_urls
+ .then(|| rewrite_urls(&repo.config, url.as_ref(), push_url.as_ref()))
+ .unwrap_or(Ok((None, None)))?;
+ Ok(Remote {
+ name: name_or_url.map(Into::into),
+ url,
+ url_alias,
+ push_url,
+ push_url_alias,
+ fetch_specs,
+ push_specs,
+ fetch_tags,
+ repo,
+ })
+ }
+
+ pub(crate) fn from_fetch_url<Url, E>(
+ url: Url,
+ should_rewrite_urls: bool,
+ repo: &'repo Repository,
+ ) -> Result<Self, Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ let url = url.try_into().map_err(|err| Error::Url(err.into()))?;
+ let (url_alias, _) = should_rewrite_urls
+ .then(|| rewrite_urls(&repo.config, Some(&url), None))
+ .unwrap_or(Ok((None, None)))?;
+ Ok(Remote {
+ name: None,
+ url: Some(url),
+ url_alias,
+ push_url: None,
+ push_url_alias: None,
+ fetch_specs: Vec::new(),
+ push_specs: Vec::new(),
+ fetch_tags: Default::default(),
+ repo,
+ })
+ }
+}
+
+pub(crate) fn rewrite_url(
+ config: &config::Cache,
+ url: Option<&gix_url::Url>,
+ direction: remote::Direction,
+) -> Result<Option<gix_url::Url>, Error> {
+ url.and_then(|url| config.url_rewrite().longest(url, direction))
+ .map(|url| {
+ gix_url::parse(url.as_ref()).map_err(|err| Error::RewrittenUrlInvalid {
+ kind: match direction {
+ remote::Direction::Fetch => "fetch",
+ remote::Direction::Push => "push",
+ },
+ source: err,
+ rewritten_url: url,
+ })
+ })
+ .transpose()
+}
+
+pub(crate) fn rewrite_urls(
+ config: &config::Cache,
+ url: Option<&gix_url::Url>,
+ push_url: Option<&gix_url::Url>,
+) -> Result<(Option<gix_url::Url>, Option<gix_url::Url>), Error> {
+ let url_alias = rewrite_url(config, url, remote::Direction::Fetch)?;
+ let push_url_alias = rewrite_url(config, push_url, remote::Direction::Push)?;
+
+ Ok((url_alias, push_url_alias))
+}
diff --git a/vendor/gix/src/remote/mod.rs b/vendor/gix/src/remote/mod.rs
new file mode 100644
index 000000000..f016575c7
--- /dev/null
+++ b/vendor/gix/src/remote/mod.rs
@@ -0,0 +1,62 @@
+use std::borrow::Cow;
+
+use crate::bstr::BStr;
+
+/// The direction of an operation carried out (or to be carried out) through a remote.
+#[derive(Debug, Eq, PartialEq, Copy, Clone, Hash)]
+pub enum Direction {
+ /// Push local changes to the remote.
+ Push,
+ /// Fetch changes from the remote to the local repository.
+ Fetch,
+}
+
+impl Direction {
+ /// Return ourselves as string suitable for use as verb in an english sentence.
+ pub fn as_str(&self) -> &'static str {
+ match self {
+ Direction::Push => "push",
+ Direction::Fetch => "fetch",
+ }
+ }
+}
+
+/// The name of a remote, either interpreted as symbol like `origin` or as url as returned by [`Remote::name()`][crate::Remote::name()].
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum Name<'repo> {
+ /// A symbolic name, like `origin`.
+ /// Note that it has not necessarily been validated yet.
+ Symbol(Cow<'repo, str>),
+ /// A url pointing to the remote host directly.
+ Url(Cow<'repo, BStr>),
+}
+
+///
+pub mod name;
+
+mod build;
+
+mod errors;
+pub use errors::find;
+
+///
+pub mod init;
+
+///
+pub mod fetch;
+
+///
+#[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+pub mod connect;
+
+#[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+mod connection;
+#[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+pub use connection::{ref_map, AuthenticateFn, Connection};
+
+///
+pub mod save;
+
+mod access;
+///
+pub mod url;
diff --git a/vendor/gix/src/remote/name.rs b/vendor/gix/src/remote/name.rs
new file mode 100644
index 000000000..6c6afe745
--- /dev/null
+++ b/vendor/gix/src/remote/name.rs
@@ -0,0 +1,84 @@
+use std::{borrow::Cow, convert::TryFrom};
+
+use super::Name;
+use crate::bstr::{BStr, BString, ByteSlice, ByteVec};
+
+/// The error returned by [validated()].
+#[derive(Debug, thiserror::Error)]
+#[error("remote names must be valid within refspecs for fetching: {name:?}")]
+#[allow(missing_docs)]
+pub struct Error {
+ pub source: gix_refspec::parse::Error,
+ pub name: BString,
+}
+
+/// Return `name` if it is valid as symbolic remote name.
+///
+/// This means it has to be valid within a the ref path of a tracking branch.
+pub fn validated(name: impl Into<BString>) -> Result<BString, Error> {
+ let name = name.into();
+ match gix_refspec::parse(
+ format!("refs/heads/test:refs/remotes/{name}/test").as_str().into(),
+ gix_refspec::parse::Operation::Fetch,
+ ) {
+ Ok(_) => Ok(name),
+ Err(err) => Err(Error { source: err, name }),
+ }
+}
+
+impl Name<'_> {
+ /// Obtain the name as string representation.
+ pub fn as_bstr(&self) -> &BStr {
+ match self {
+ Name::Symbol(v) => v.as_ref().into(),
+ Name::Url(v) => v.as_ref(),
+ }
+ }
+
+ /// Return this instance as a symbolic name, if it is one.
+ pub fn as_symbol(&self) -> Option<&str> {
+ match self {
+ Name::Symbol(n) => n.as_ref().into(),
+ Name::Url(_) => None,
+ }
+ }
+
+ /// Return this instance as url, if it is one.
+ pub fn as_url(&self) -> Option<&BStr> {
+ match self {
+ Name::Url(n) => n.as_ref().into(),
+ Name::Symbol(_) => None,
+ }
+ }
+}
+
+impl<'a> TryFrom<Cow<'a, BStr>> for Name<'a> {
+ type Error = Cow<'a, BStr>;
+
+ fn try_from(name: Cow<'a, BStr>) -> Result<Self, Self::Error> {
+ if name.contains(&b'/') || name.as_ref() == "." {
+ Ok(Name::Url(name))
+ } else {
+ match name {
+ Cow::Borrowed(n) => n.to_str().ok().map(Cow::Borrowed).ok_or(name),
+ Cow::Owned(n) => Vec::from(n)
+ .into_string()
+ .map_err(|err| Cow::Owned(err.into_vec().into()))
+ .map(Cow::Owned),
+ }
+ .map(Name::Symbol)
+ }
+ }
+}
+
+impl From<BString> for Name<'static> {
+ fn from(name: BString) -> Self {
+ Self::try_from(Cow::Owned(name)).expect("String is never illformed")
+ }
+}
+
+impl<'a> AsRef<BStr> for Name<'a> {
+ fn as_ref(&self) -> &BStr {
+ self.as_bstr()
+ }
+}
diff --git a/vendor/gix/src/remote/save.rs b/vendor/gix/src/remote/save.rs
new file mode 100644
index 000000000..0e347551e
--- /dev/null
+++ b/vendor/gix/src/remote/save.rs
@@ -0,0 +1,125 @@
+use std::convert::TryInto;
+
+use crate::{
+ bstr::{BStr, BString},
+ config, remote, Remote,
+};
+
+/// The error returned by [`Remote::save_to()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("The remote pointing to {} is anonymous and can't be saved.", url.to_bstring())]
+ NameMissing { url: gix_url::Url },
+}
+
+/// The error returned by [`Remote::save_as_to()`].
+///
+/// Note that this type should rather be in the `as` module, but cannot be as it's part of the Rust syntax.
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum AsError {
+ #[error(transparent)]
+ Save(#[from] Error),
+ #[error(transparent)]
+ Name(#[from] crate::remote::name::Error),
+}
+
+/// Serialize into gix-config.
+impl Remote<'_> {
+ /// Save ourselves to the given `config` if we are a named remote or fail otherwise.
+ ///
+ /// Note that all sections named `remote "<name>"` will be cleared of all values we are about to write,
+ /// and the last `remote "<name>"` section will be containing all relevant values so that reloading the remote
+ /// from `config` would yield the same in-memory state.
+ pub fn save_to(&self, config: &mut gix_config::File<'static>) -> Result<(), Error> {
+ fn as_key(name: &str) -> gix_config::parse::section::Key<'_> {
+ name.try_into().expect("valid")
+ }
+ let name = self.name().ok_or_else(|| Error::NameMissing {
+ url: self
+ .url
+ .as_ref()
+ .or(self.push_url.as_ref())
+ .expect("one url is always set")
+ .to_owned(),
+ })?;
+ if let Some(section_ids) = config.sections_and_ids_by_name("remote").map(|it| {
+ it.filter_map(|(s, id)| (s.header().subsection_name() == Some(name.as_bstr())).then_some(id))
+ .collect::<Vec<_>>()
+ }) {
+ let mut sections_to_remove = Vec::new();
+ const KEYS_TO_REMOVE: &[&str] = &[
+ config::tree::Remote::URL.name,
+ config::tree::Remote::PUSH_URL.name,
+ config::tree::Remote::FETCH.name,
+ config::tree::Remote::PUSH.name,
+ config::tree::Remote::TAG_OPT.name,
+ ];
+ for id in section_ids {
+ let mut section = config.section_mut_by_id(id).expect("just queried");
+ let was_empty = section.num_values() == 0;
+
+ for key in KEYS_TO_REMOVE {
+ while section.remove(key).is_some() {}
+ }
+
+ let is_empty_after_deletions_of_values_to_be_written = section.num_values() == 0;
+ if !was_empty && is_empty_after_deletions_of_values_to_be_written {
+ sections_to_remove.push(id);
+ }
+ }
+ for id in sections_to_remove {
+ config.remove_section_by_id(id);
+ }
+ }
+ let mut section = config
+ .section_mut_or_create_new("remote", Some(name.as_ref()))
+ .expect("section name is validated and 'remote' is acceptable");
+ if let Some(url) = self.url.as_ref() {
+ section.push(as_key("url"), Some(url.to_bstring().as_ref()));
+ }
+ if let Some(url) = self.push_url.as_ref() {
+ section.push(as_key("pushurl"), Some(url.to_bstring().as_ref()));
+ }
+ if self.fetch_tags != Default::default() {
+ section.push(
+ as_key(config::tree::Remote::TAG_OPT.name),
+ BStr::new(match self.fetch_tags {
+ remote::fetch::Tags::All => "--tags",
+ remote::fetch::Tags::None => "--no-tags",
+ remote::fetch::Tags::Included => unreachable!("BUG: the default shouldn't be written and we try"),
+ })
+ .into(),
+ );
+ }
+ for (key, spec) in self
+ .fetch_specs
+ .iter()
+ .map(|spec| ("fetch", spec))
+ .chain(self.push_specs.iter().map(|spec| ("push", spec)))
+ {
+ section.push(as_key(key), Some(spec.to_ref().to_bstring().as_ref()));
+ }
+ Ok(())
+ }
+
+ /// Forcefully set our name to `name` and write our state to `config` similar to [`save_to()`][Self::save_to()].
+ ///
+ /// Note that this sets a name for anonymous remotes, but overwrites the name for those who were named before.
+ /// If this name is different from the current one, the git configuration will still contain the previous name,
+ /// and the caller should account for that.
+ pub fn save_as_to(
+ &mut self,
+ name: impl Into<BString>,
+ config: &mut gix_config::File<'static>,
+ ) -> Result<(), AsError> {
+ let name = crate::remote::name::validated(name)?;
+ let prev_name = self.name.take();
+ self.name = Some(name.into());
+ self.save_to(config).map_err(|err| {
+ self.name = prev_name;
+ err.into()
+ })
+ }
+}
diff --git a/vendor/gix/src/remote/url/mod.rs b/vendor/gix/src/remote/url/mod.rs
new file mode 100644
index 000000000..7b8815812
--- /dev/null
+++ b/vendor/gix/src/remote/url/mod.rs
@@ -0,0 +1,7 @@
+mod rewrite;
+///
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub mod scheme_permission;
+pub(crate) use rewrite::Rewrite;
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub(crate) use scheme_permission::SchemePermission;
diff --git a/vendor/gix/src/remote/url/rewrite.rs b/vendor/gix/src/remote/url/rewrite.rs
new file mode 100644
index 000000000..ae0eee426
--- /dev/null
+++ b/vendor/gix/src/remote/url/rewrite.rs
@@ -0,0 +1,100 @@
+use gix_features::threading::OwnShared;
+
+use crate::{
+ bstr::{BStr, BString, ByteVec},
+ config,
+ remote::Direction,
+};
+
+#[derive(Debug, Clone)]
+struct Replace {
+ find: BString,
+ with: OwnShared<BString>,
+}
+
+#[derive(Default, Debug, Clone)]
+pub(crate) struct Rewrite {
+ url_rewrite: Vec<Replace>,
+ push_url_rewrite: Vec<Replace>,
+}
+
+/// Init
+impl Rewrite {
+ pub fn from_config(
+ config: &gix_config::File<'static>,
+ mut filter: fn(&gix_config::file::Metadata) -> bool,
+ ) -> Rewrite {
+ config
+ .sections_by_name_and_filter("url", &mut filter)
+ .map(|sections| {
+ let mut url_rewrite = Vec::new();
+ let mut push_url_rewrite = Vec::new();
+ for section in sections {
+ let replace = match section.header().subsection_name() {
+ Some(base) => OwnShared::new(base.to_owned()),
+ None => continue,
+ };
+
+ for instead_of in section.values(config::tree::Url::INSTEAD_OF.name) {
+ url_rewrite.push(Replace {
+ with: OwnShared::clone(&replace),
+ find: instead_of.into_owned(),
+ });
+ }
+ for instead_of in section.values(config::tree::Url::PUSH_INSTEAD_OF.name) {
+ push_url_rewrite.push(Replace {
+ with: OwnShared::clone(&replace),
+ find: instead_of.into_owned(),
+ });
+ }
+ }
+ Rewrite {
+ url_rewrite,
+ push_url_rewrite,
+ }
+ })
+ .unwrap_or_default()
+ }
+}
+
+/// Access
+impl Rewrite {
+ fn replacements_for(&self, direction: Direction) -> &[Replace] {
+ match direction {
+ Direction::Fetch => &self.url_rewrite,
+ Direction::Push => &self.push_url_rewrite,
+ }
+ }
+
+ pub fn longest(&self, url: &gix_url::Url, direction: Direction) -> Option<BString> {
+ if self.replacements_for(direction).is_empty() {
+ None
+ } else {
+ let mut url = url.to_bstring();
+ self.rewrite_url_in_place(&mut url, direction).then_some(url)
+ }
+ }
+
+ /// Rewrite the given `url` of `direction` and return `true` if a replacement happened.
+ ///
+ /// Note that the result must still be checked for validity, it might not be a valid URL as we do a syntax-unaware replacement.
+ pub fn rewrite_url_in_place(&self, url: &mut BString, direction: Direction) -> bool {
+ self.replacements_for(direction)
+ .iter()
+ .fold(None::<(usize, &BStr)>, |mut acc, replace| {
+ if url.starts_with(replace.find.as_ref()) {
+ let (bytes_matched, prev_rewrite_with) =
+ acc.get_or_insert((replace.find.len(), replace.with.as_slice().into()));
+ if *bytes_matched < replace.find.len() {
+ *bytes_matched = replace.find.len();
+ *prev_rewrite_with = replace.with.as_slice().into();
+ }
+ };
+ acc
+ })
+ .map(|(bytes_matched, replace_with)| {
+ url.replace_range(..bytes_matched, replace_with);
+ })
+ .is_some()
+ }
+}
diff --git a/vendor/gix/src/remote/url/scheme_permission.rs b/vendor/gix/src/remote/url/scheme_permission.rs
new file mode 100644
index 000000000..ddb87e111
--- /dev/null
+++ b/vendor/gix/src/remote/url/scheme_permission.rs
@@ -0,0 +1,120 @@
+use std::{borrow::Cow, collections::BTreeMap, convert::TryFrom};
+
+use crate::{
+ bstr::{BStr, BString, ByteSlice},
+ config,
+ config::tree::{gitoxide, Key, Protocol},
+};
+
+/// All allowed values of the `protocol.allow` key.
+#[derive(Debug, Clone, Copy, Eq, PartialEq)]
+pub enum Allow {
+ /// Allow use this protocol.
+ Always,
+ /// Forbid using this protocol
+ Never,
+ /// Only supported if the `GIT_PROTOCOL_FROM_USER` is unset or is set to `1`.
+ User,
+}
+
+impl Allow {
+ /// Return true if we represent something like 'allow == true'.
+ pub fn to_bool(self, user_allowed: Option<bool>) -> bool {
+ match self {
+ Allow::Always => true,
+ Allow::Never => false,
+ Allow::User => user_allowed.unwrap_or(true),
+ }
+ }
+}
+
+impl<'a> TryFrom<Cow<'a, BStr>> for Allow {
+ type Error = BString;
+
+ fn try_from(v: Cow<'a, BStr>) -> Result<Self, Self::Error> {
+ Ok(match v.as_ref().as_bytes() {
+ b"never" => Allow::Never,
+ b"always" => Allow::Always,
+ b"user" => Allow::User,
+ unknown => return Err(unknown.into()),
+ })
+ }
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct SchemePermission {
+ /// `None`, env-var is unset or wasn't queried, otherwise true if `GIT_PROTOCOL_FROM_USER` is `1`.
+ user_allowed: Option<bool>,
+ /// The general allow value from `protocol.allow`.
+ allow: Option<Allow>,
+ /// Per scheme allow information
+ allow_per_scheme: BTreeMap<gix_url::Scheme, Allow>,
+}
+
+/// Init
+impl SchemePermission {
+ /// NOTE: _intentionally without leniency_
+ pub fn from_config(
+ config: &gix_config::File<'static>,
+ mut filter: fn(&gix_config::file::Metadata) -> bool,
+ ) -> Result<Self, config::protocol::allow::Error> {
+ let allow: Option<Allow> = config
+ .string_filter_by_key("protocol.allow", &mut filter)
+ .map(|value| Protocol::ALLOW.try_into_allow(value, None))
+ .transpose()?;
+
+ let mut saw_user = allow.map_or(false, |allow| allow == Allow::User);
+ let allow_per_scheme = match config.sections_by_name_and_filter("protocol", &mut filter) {
+ Some(it) => {
+ let mut map = BTreeMap::default();
+ for (section, scheme) in it.filter_map(|section| {
+ section.header().subsection_name().and_then(|scheme| {
+ scheme
+ .to_str()
+ .ok()
+ .and_then(|scheme| gix_url::Scheme::try_from(scheme).ok().map(|scheme| (section, scheme)))
+ })
+ }) {
+ if let Some(value) = section
+ .value("allow")
+ .map(|value| Protocol::ALLOW.try_into_allow(value, Some(scheme.as_str())))
+ .transpose()?
+ {
+ saw_user |= value == Allow::User;
+ map.insert(scheme, value);
+ }
+ }
+ map
+ }
+ None => Default::default(),
+ };
+
+ let user_allowed = saw_user.then(|| {
+ config
+ .string_filter_by_key(gitoxide::Allow::PROTOCOL_FROM_USER.logical_name().as_str(), &mut filter)
+ .map_or(true, |val| val.as_ref() == "1")
+ });
+ Ok(SchemePermission {
+ allow,
+ allow_per_scheme,
+ user_allowed,
+ })
+ }
+}
+
+/// Access
+impl SchemePermission {
+ pub fn allow(&self, scheme: &gix_url::Scheme) -> bool {
+ self.allow_per_scheme.get(scheme).or(self.allow.as_ref()).map_or_else(
+ || {
+ use gix_url::Scheme::*;
+ match scheme {
+ File | Git | Ssh | Http | Https => true,
+ Ext(_) => false,
+ // TODO: figure out what 'ext' really entails, and what 'other' protocols are which aren't representable for us yet
+ }
+ },
+ |allow| allow.to_bool(self.user_allowed),
+ )
+ }
+}
diff --git a/vendor/gix/src/repository/cache.rs b/vendor/gix/src/repository/cache.rs
new file mode 100644
index 000000000..7dcd844e6
--- /dev/null
+++ b/vendor/gix/src/repository/cache.rs
@@ -0,0 +1,30 @@
+/// Configure how caches are used to speed up various git repository operations
+impl crate::Repository {
+ /// Sets the amount of space used at most for caching most recently accessed fully decoded objects, to `Some(bytes)`,
+ /// or `None` to deactivate it entirely.
+ ///
+ /// Note that it is unset by default but can be enabled once there is time for performance optimization.
+ /// Well-chosen cache sizes can improve performance particularly if objects are accessed multiple times in a row.
+ /// The cache is configured to grow gradually.
+ ///
+ /// Note that a cache on application level should be considered as well as the best object access is not doing one.
+ pub fn object_cache_size(&mut self, bytes: impl Into<Option<usize>>) {
+ let bytes = bytes.into();
+ match bytes {
+ Some(bytes) if bytes == 0 => self.objects.unset_object_cache(),
+ Some(bytes) => self
+ .objects
+ .set_object_cache(move || Box::new(crate::object::cache::MemoryCappedHashmap::new(bytes))),
+ None => self.objects.unset_object_cache(),
+ }
+ }
+
+ /// Set an object cache of size `bytes` if none is set.
+ ///
+ /// Use this method to avoid overwriting any existing value while assuring better performance in case no value is set.
+ pub fn object_cache_size_if_unset(&mut self, bytes: usize) {
+ if !self.objects.has_object_cache() {
+ self.object_cache_size(bytes)
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/config/mod.rs b/vendor/gix/src/repository/config/mod.rs
new file mode 100644
index 000000000..92b2618cc
--- /dev/null
+++ b/vendor/gix/src/repository/config/mod.rs
@@ -0,0 +1,191 @@
+use std::collections::BTreeSet;
+
+use crate::{bstr::ByteSlice, config};
+
+/// General Configuration
+impl crate::Repository {
+ /// Return a snapshot of the configuration as seen upon opening the repository.
+ pub fn config_snapshot(&self) -> config::Snapshot<'_> {
+ config::Snapshot { repo: self }
+ }
+
+ /// Return a mutable snapshot of the configuration as seen upon opening the repository, starting a transaction.
+ /// When the returned instance is dropped, it is applied in full, even if the reason for the drop is an error.
+ ///
+ /// Note that changes to the configuration are in-memory only and are observed only the this instance
+ /// of the [`Repository`][crate::Repository].
+ pub fn config_snapshot_mut(&mut self) -> config::SnapshotMut<'_> {
+ let config = self.config.resolved.as_ref().clone();
+ config::SnapshotMut {
+ repo: Some(self),
+ config,
+ }
+ }
+
+ /// The options used to open the repository.
+ pub fn open_options(&self) -> &crate::open::Options {
+ &self.options
+ }
+
+ /// Obtain options for use when connecting via `ssh`.
+ #[cfg(feature = "blocking-network-client")]
+ pub fn ssh_connect_options(
+ &self,
+ ) -> Result<gix_protocol::transport::client::ssh::connect::Options, config::ssh_connect_options::Error> {
+ use crate::config::{
+ cache::util::ApplyLeniency,
+ tree::{gitoxide, Core, Ssh},
+ };
+
+ let config = &self.config.resolved;
+ let mut trusted = self.filter_config_section();
+ let mut fallback_active = false;
+ let ssh_command = config
+ .string_filter("core", None, Core::SSH_COMMAND.name, &mut trusted)
+ .or_else(|| {
+ fallback_active = true;
+ config.string_filter(
+ "gitoxide",
+ Some("ssh".into()),
+ gitoxide::Ssh::COMMAND_WITHOUT_SHELL_FALLBACK.name,
+ &mut trusted,
+ )
+ })
+ .map(|cmd| gix_path::from_bstr(cmd).into_owned().into());
+ let opts = gix_protocol::transport::client::ssh::connect::Options {
+ disallow_shell: fallback_active,
+ command: ssh_command,
+ kind: config
+ .string_filter_by_key("ssh.variant", &mut trusted)
+ .and_then(|variant| Ssh::VARIANT.try_into_variant(variant).transpose())
+ .transpose()
+ .with_leniency(self.options.lenient_config)?,
+ };
+ Ok(opts)
+ }
+
+ /// The kind of object hash the repository is configured to use.
+ pub fn object_hash(&self) -> gix_hash::Kind {
+ self.config.object_hash
+ }
+}
+
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+mod transport;
+
+mod remote {
+ use std::{borrow::Cow, collections::BTreeSet};
+
+ use crate::{bstr::ByteSlice, remote};
+
+ impl crate::Repository {
+ /// Returns a sorted list unique of symbolic names of remotes that
+ /// we deem [trustworthy][crate::open::Options::filter_config_section()].
+ // TODO: Use `remote::Name` here
+ pub fn remote_names(&self) -> BTreeSet<&str> {
+ self.subsection_names_of("remote")
+ }
+
+ /// Obtain the branch-independent name for a remote for use in the given `direction`, or `None` if it could not be determined.
+ ///
+ /// For _fetching_, use the only configured remote, or default to `origin` if it exists.
+ /// For _pushing_, use the `remote.pushDefault` trusted configuration key, or fall back to the rules for _fetching_.
+ ///
+ /// # Notes
+ ///
+ /// It's up to the caller to determine what to do if the current `head` is unborn or detached.
+ // TODO: use remote::Name here
+ pub fn remote_default_name(&self, direction: remote::Direction) -> Option<Cow<'_, str>> {
+ let name = (direction == remote::Direction::Push)
+ .then(|| {
+ self.config
+ .resolved
+ .string_filter("remote", None, "pushDefault", &mut self.filter_config_section())
+ .and_then(|s| match s {
+ Cow::Borrowed(s) => s.to_str().ok().map(Cow::Borrowed),
+ Cow::Owned(s) => s.to_str().ok().map(|s| Cow::Owned(s.into())),
+ })
+ })
+ .flatten();
+ name.or_else(|| {
+ let names = self.remote_names();
+ match names.len() {
+ 0 => None,
+ 1 => names.iter().next().copied().map(Cow::Borrowed),
+ _more_than_one => names.get("origin").copied().map(Cow::Borrowed),
+ }
+ })
+ }
+ }
+}
+
+mod branch {
+ use std::{borrow::Cow, collections::BTreeSet, convert::TryInto};
+
+ use gix_ref::FullNameRef;
+ use gix_validate::reference::name::Error as ValidateNameError;
+
+ use crate::bstr::BStr;
+
+ impl crate::Repository {
+ /// Return a set of unique short branch names for which custom configuration exists in the configuration,
+ /// if we deem them [trustworthy][crate::open::Options::filter_config_section()].
+ pub fn branch_names(&self) -> BTreeSet<&str> {
+ self.subsection_names_of("branch")
+ }
+
+ /// Returns the validated reference on the remote associated with the given `short_branch_name`,
+ /// always `main` instead of `refs/heads/main`.
+ ///
+ /// The returned reference is the one we track on the remote side for merging and pushing.
+ /// Returns `None` if the remote reference was not found.
+ /// May return an error if the reference is invalid.
+ pub fn branch_remote_ref<'a>(
+ &self,
+ short_branch_name: impl Into<&'a BStr>,
+ ) -> Option<Result<Cow<'_, FullNameRef>, ValidateNameError>> {
+ self.config
+ .resolved
+ .string("branch", Some(short_branch_name.into()), "merge")
+ .map(crate::config::tree::branch::Merge::try_into_fullrefname)
+ }
+
+ /// Returns the unvalidated name of the remote associated with the given `short_branch_name`,
+ /// typically `main` instead of `refs/heads/main`.
+ /// In some cases, the returned name will be an URL.
+ /// Returns `None` if the remote was not found or if the name contained illformed UTF-8.
+ ///
+ /// See also [Reference::remote_name()][crate::Reference::remote_name()] for a more typesafe version
+ /// to be used when a `Reference` is available.
+ pub fn branch_remote_name<'a>(
+ &self,
+ short_branch_name: impl Into<&'a BStr>,
+ ) -> Option<crate::remote::Name<'_>> {
+ self.config
+ .resolved
+ .string("branch", Some(short_branch_name.into()), "remote")
+ .and_then(|name| name.try_into().ok())
+ }
+ }
+}
+
+impl crate::Repository {
+ pub(crate) fn filter_config_section(&self) -> fn(&gix_config::file::Metadata) -> bool {
+ self.options
+ .filter_config_section
+ .unwrap_or(config::section::is_trusted)
+ }
+
+ fn subsection_names_of<'a>(&'a self, header_name: &'a str) -> BTreeSet<&'a str> {
+ self.config
+ .resolved
+ .sections_by_name(header_name)
+ .map(|it| {
+ let filter = self.filter_config_section();
+ it.filter(move |s| filter(s.meta()))
+ .filter_map(|section| section.header().subsection_name().and_then(|b| b.to_str().ok()))
+ .collect()
+ })
+ .unwrap_or_default()
+ }
+}
diff --git a/vendor/gix/src/repository/config/transport.rs b/vendor/gix/src/repository/config/transport.rs
new file mode 100644
index 000000000..dcfbc0bf6
--- /dev/null
+++ b/vendor/gix/src/repository/config/transport.rs
@@ -0,0 +1,425 @@
+#![allow(clippy::result_large_err)]
+use std::any::Any;
+
+use crate::bstr::BStr;
+
+impl crate::Repository {
+ /// Produce configuration suitable for `url`, as differentiated by its protocol/scheme, to be passed to a transport instance via
+ /// [configure()][gix_transport::client::TransportWithoutIO::configure()] (via `&**config` to pass the contained `Any` and not the `Box`).
+ /// `None` is returned if there is no known configuration. If `remote_name` is not `None`, the remote's name may contribute to
+ /// configuration overrides, typically for the HTTP transport.
+ ///
+ /// Note that the caller may cast the instance themselves to modify it before passing it on.
+ ///
+ /// For transports that support proxy authentication, the
+ /// [default authentication method](crate::config::Snapshot::credential_helpers()) will be used with the url of the proxy
+ /// if it contains a user name.
+ #[cfg_attr(
+ not(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ )),
+ allow(unused_variables)
+ )]
+ pub fn transport_options<'a>(
+ &self,
+ url: impl Into<&'a BStr>,
+ remote_name: Option<&BStr>,
+ ) -> Result<Option<Box<dyn Any>>, crate::config::transport::Error> {
+ let url = gix_url::parse(url.into())?;
+ use gix_url::Scheme::*;
+
+ match &url.scheme {
+ Http | Https => {
+ #[cfg(not(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ )))]
+ {
+ Ok(None)
+ }
+ #[cfg(any(
+ feature = "blocking-http-transport-reqwest",
+ feature = "blocking-http-transport-curl"
+ ))]
+ {
+ use std::{
+ borrow::Cow,
+ sync::{Arc, Mutex},
+ };
+
+ use gix_transport::client::{
+ http,
+ http::options::{ProxyAuthMethod, SslVersion, SslVersionRangeInclusive},
+ };
+
+ use crate::{
+ config,
+ config::{
+ cache::util::ApplyLeniency,
+ tree::{gitoxide, Key, Remote},
+ },
+ };
+ fn try_cow_to_string(
+ v: Cow<'_, BStr>,
+ lenient: bool,
+ key_str: impl Into<Cow<'static, BStr>>,
+ key: &'static config::tree::keys::String,
+ ) -> Result<Option<String>, config::transport::Error> {
+ key.try_into_string(v)
+ .map_err(|err| config::transport::Error::IllformedUtf8 {
+ source: err,
+ key: key_str.into(),
+ })
+ .map(Some)
+ .with_leniency(lenient)
+ }
+
+ fn cow_bstr(v: &str) -> Cow<'_, BStr> {
+ Cow::Borrowed(v.into())
+ }
+
+ fn proxy_auth_method(
+ value_and_key: Option<(
+ Cow<'_, BStr>,
+ Cow<'static, BStr>,
+ &'static config::tree::http::ProxyAuthMethod,
+ )>,
+ ) -> Result<ProxyAuthMethod, config::transport::Error> {
+ let value = value_and_key
+ .map(|(method, key, key_type)| {
+ key_type.try_into_proxy_auth_method(method).map_err(|err| {
+ config::transport::http::Error::InvalidProxyAuthMethod { source: err, key }
+ })
+ })
+ .transpose()?
+ .unwrap_or_default();
+ Ok(value)
+ }
+
+ fn ssl_version(
+ config: &gix_config::File<'static>,
+ key_str: &'static str,
+ key: &'static config::tree::http::SslVersion,
+ mut filter: fn(&gix_config::file::Metadata) -> bool,
+ lenient: bool,
+ ) -> Result<Option<SslVersion>, config::transport::Error> {
+ debug_assert_eq!(
+ key_str,
+ key.logical_name(),
+ "BUG: hardcoded and generated key names must match"
+ );
+ config
+ .string_filter_by_key(key_str, &mut filter)
+ .filter(|v| !v.is_empty())
+ .map(|v| {
+ key.try_into_ssl_version(v)
+ .map_err(crate::config::transport::http::Error::from)
+ })
+ .transpose()
+ .with_leniency(lenient)
+ .map_err(Into::into)
+ }
+
+ fn proxy(
+ value: Option<(Cow<'_, BStr>, Cow<'static, BStr>, &'static config::tree::keys::String)>,
+ lenient: bool,
+ ) -> Result<Option<String>, config::transport::Error> {
+ Ok(value
+ .and_then(|(v, k, key)| try_cow_to_string(v, lenient, k.clone(), key).transpose())
+ .transpose()?
+ .map(|mut proxy| {
+ if !proxy.trim().is_empty() && !proxy.contains("://") {
+ proxy.insert_str(0, "http://");
+ proxy
+ } else {
+ proxy
+ }
+ }))
+ }
+
+ let mut opts = http::Options::default();
+ let config = &self.config.resolved;
+ let mut trusted_only = self.filter_config_section();
+ let lenient = self.config.lenient_config;
+ opts.extra_headers = {
+ let key = "http.extraHeader";
+ debug_assert_eq!(key, &config::tree::Http::EXTRA_HEADER.logical_name());
+ config
+ .strings_filter_by_key(key, &mut trusted_only)
+ .map(|values| config::tree::Http::EXTRA_HEADER.try_into_extra_header(values))
+ .transpose()
+ .map_err(|err| config::transport::Error::IllformedUtf8 {
+ source: err,
+ key: Cow::Borrowed(key.into()),
+ })?
+ .unwrap_or_default()
+ };
+
+ opts.follow_redirects = {
+ let key = "http.followRedirects";
+
+ config::tree::Http::FOLLOW_REDIRECTS
+ .try_into_follow_redirects(
+ config.string_filter_by_key(key, &mut trusted_only).unwrap_or_default(),
+ || {
+ config
+ .boolean_filter_by_key(key, &mut trusted_only)
+ .transpose()
+ .with_leniency(lenient)
+ },
+ )
+ .map_err(config::transport::http::Error::InvalidFollowRedirects)?
+ };
+
+ opts.low_speed_time_seconds = config
+ .integer_filter_by_key("http.lowSpeedTime", &mut trusted_only)
+ .map(|value| config::tree::Http::LOW_SPEED_TIME.try_into_u64(value))
+ .transpose()
+ .with_leniency(lenient)
+ .map_err(config::transport::http::Error::from)?
+ .unwrap_or_default();
+ opts.low_speed_limit_bytes_per_second = config
+ .integer_filter_by_key("http.lowSpeedLimit", &mut trusted_only)
+ .map(|value| config::tree::Http::LOW_SPEED_LIMIT.try_into_u32(value))
+ .transpose()
+ .with_leniency(lenient)
+ .map_err(config::transport::http::Error::from)?
+ .unwrap_or_default();
+ opts.proxy = proxy(
+ remote_name
+ .and_then(|name| {
+ config
+ .string_filter("remote", Some(name), Remote::PROXY.name, &mut trusted_only)
+ .map(|v| (v, Cow::Owned(format!("remote.{name}.proxy").into()), &Remote::PROXY))
+ })
+ .or_else(|| {
+ let key = "http.proxy";
+ debug_assert_eq!(key, config::tree::Http::PROXY.logical_name());
+ let http_proxy = config
+ .string_filter_by_key(key, &mut trusted_only)
+ .map(|v| (v, cow_bstr(key), &config::tree::Http::PROXY))
+ .or_else(|| {
+ let key = "gitoxide.http.proxy";
+ debug_assert_eq!(key, gitoxide::Http::PROXY.logical_name());
+ config
+ .string_filter_by_key(key, &mut trusted_only)
+ .map(|v| (v, cow_bstr(key), &gitoxide::Http::PROXY))
+ });
+ if url.scheme == Https {
+ http_proxy.or_else(|| {
+ let key = "gitoxide.https.proxy";
+ debug_assert_eq!(key, gitoxide::Https::PROXY.logical_name());
+ config
+ .string_filter_by_key(key, &mut trusted_only)
+ .map(|v| (v, cow_bstr(key), &gitoxide::Https::PROXY))
+ })
+ } else {
+ http_proxy
+ }
+ })
+ .or_else(|| {
+ let key = "gitoxide.http.allProxy";
+ debug_assert_eq!(key, gitoxide::Http::ALL_PROXY.logical_name());
+ config
+ .string_filter_by_key(key, &mut trusted_only)
+ .map(|v| (v, cow_bstr(key), &gitoxide::Http::ALL_PROXY))
+ }),
+ lenient,
+ )?;
+ {
+ let key = "gitoxide.http.noProxy";
+ debug_assert_eq!(key, gitoxide::Http::NO_PROXY.logical_name());
+ opts.no_proxy = config
+ .string_filter_by_key(key, &mut trusted_only)
+ .and_then(|v| {
+ try_cow_to_string(v, lenient, Cow::Borrowed(key.into()), &gitoxide::Http::NO_PROXY)
+ .transpose()
+ })
+ .transpose()?;
+ }
+ opts.proxy_auth_method = proxy_auth_method({
+ let key = "gitoxide.http.proxyAuthMethod";
+ debug_assert_eq!(key, gitoxide::Http::PROXY_AUTH_METHOD.logical_name());
+ config
+ .string_filter_by_key(key, &mut trusted_only)
+ .map(|v| (v, Cow::Borrowed(key.into()), &gitoxide::Http::PROXY_AUTH_METHOD))
+ .or_else(|| {
+ remote_name
+ .and_then(|name| {
+ config
+ .string_filter("remote", Some(name), "proxyAuthMethod", &mut trusted_only)
+ .map(|v| {
+ (
+ v,
+ Cow::Owned(format!("remote.{name}.proxyAuthMethod").into()),
+ &Remote::PROXY_AUTH_METHOD,
+ )
+ })
+ })
+ .or_else(|| {
+ let key = "http.proxyAuthMethod";
+ debug_assert_eq!(key, config::tree::Http::PROXY_AUTH_METHOD.logical_name());
+ config.string_filter_by_key(key, &mut trusted_only).map(|v| {
+ (v, Cow::Borrowed(key.into()), &config::tree::Http::PROXY_AUTH_METHOD)
+ })
+ })
+ })
+ })?;
+ opts.proxy_authenticate = opts
+ .proxy
+ .as_deref()
+ .filter(|url| !url.is_empty())
+ .map(|url| gix_url::parse(url.into()))
+ .transpose()?
+ .filter(|url| url.user().is_some())
+ .map(|url| -> Result<_, config::transport::http::Error> {
+ let (mut cascade, action_with_normalized_url, prompt_opts) =
+ self.config_snapshot().credential_helpers(url)?;
+ Ok((
+ action_with_normalized_url,
+ Arc::new(Mutex::new(move |action| cascade.invoke(action, prompt_opts.clone())))
+ as Arc<Mutex<http::options::AuthenticateFn>>,
+ ))
+ })
+ .transpose()?;
+ opts.connect_timeout = {
+ let key = "gitoxide.http.connectTimeout";
+ config
+ .integer_filter_by_key(key, &mut trusted_only)
+ .map(|v| {
+ debug_assert_eq!(key, gitoxide::Http::CONNECT_TIMEOUT.logical_name());
+ gitoxide::Http::CONNECT_TIMEOUT
+ .try_into_duration(v)
+ .map_err(crate::config::transport::http::Error::from)
+ })
+ .transpose()
+ .with_leniency(lenient)?
+ };
+ {
+ let key = "http.userAgent";
+ opts.user_agent = config
+ .string_filter_by_key(key, &mut trusted_only)
+ .and_then(|v| {
+ try_cow_to_string(
+ v,
+ lenient,
+ Cow::Borrowed(key.into()),
+ &config::tree::Http::USER_AGENT,
+ )
+ .transpose()
+ })
+ .transpose()?
+ .or_else(|| Some(crate::env::agent().into()));
+ }
+
+ {
+ let key = "http.version";
+ opts.http_version = config
+ .string_filter_by_key(key, &mut trusted_only)
+ .map(|v| {
+ config::tree::Http::VERSION
+ .try_into_http_version(v)
+ .map_err(config::transport::http::Error::InvalidHttpVersion)
+ })
+ .transpose()?;
+ }
+
+ {
+ opts.verbose = config
+ .boolean_filter(
+ "gitoxide",
+ Some("http".into()),
+ gitoxide::Http::VERBOSE.name,
+ &mut trusted_only,
+ )
+ .and_then(Result::ok)
+ .unwrap_or_default();
+ }
+
+ let may_use_cainfo = {
+ let key = "http.schannelUseSSLCAInfo";
+ config
+ .boolean_filter_by_key(key, &mut trusted_only)
+ .map(|value| config::tree::Http::SCHANNEL_USE_SSL_CA_INFO.enrich_error(value))
+ .transpose()
+ .with_leniency(lenient)
+ .map_err(config::transport::http::Error::from)?
+ .unwrap_or(true)
+ };
+
+ if may_use_cainfo {
+ let key = "http.sslCAInfo";
+ debug_assert_eq!(key, config::tree::Http::SSL_CA_INFO.logical_name());
+ opts.ssl_ca_info = config
+ .path_filter_by_key(key, &mut trusted_only)
+ .map(|p| {
+ use crate::config::cache::interpolate_context;
+ p.interpolate(interpolate_context(
+ self.install_dir().ok().as_deref(),
+ self.config.home_dir().as_deref(),
+ ))
+ .map(|cow| cow.into_owned())
+ })
+ .transpose()
+ .with_leniency(lenient)
+ .map_err(|err| config::transport::Error::InterpolatePath { source: err, key })?;
+ }
+
+ {
+ opts.ssl_version = ssl_version(
+ config,
+ "http.sslVersion",
+ &config::tree::Http::SSL_VERSION,
+ trusted_only,
+ lenient,
+ )?
+ .map(|v| SslVersionRangeInclusive { min: v, max: v });
+ let min_max = ssl_version(
+ config,
+ "gitoxide.http.sslVersionMin",
+ &gitoxide::Http::SSL_VERSION_MIN,
+ trusted_only,
+ lenient,
+ )
+ .and_then(|min| {
+ ssl_version(
+ config,
+ "gitoxide.http.sslVersionMax",
+ &gitoxide::Http::SSL_VERSION_MAX,
+ trusted_only,
+ lenient,
+ )
+ .map(|max| min.and_then(|min| max.map(|max| (min, max))))
+ })?;
+ if let Some((min, max)) = min_max {
+ let v = opts.ssl_version.get_or_insert(SslVersionRangeInclusive {
+ min: SslVersion::TlsV1_3,
+ max: SslVersion::TlsV1_3,
+ });
+ v.min = min;
+ v.max = max;
+ }
+ }
+
+ #[cfg(feature = "blocking-http-transport-curl")]
+ {
+ let key = "http.schannelCheckRevoke";
+ let schannel_check_revoke = config
+ .boolean_filter_by_key(key, &mut trusted_only)
+ .map(|value| config::tree::Http::SCHANNEL_CHECK_REVOKE.enrich_error(value))
+ .transpose()
+ .with_leniency(lenient)
+ .map_err(config::transport::http::Error::from)?;
+ let backend = gix_protocol::transport::client::http::curl::Options { schannel_check_revoke };
+ opts.backend =
+ Some(Arc::new(Mutex::new(backend)) as Arc<Mutex<dyn Any + Send + Sync + 'static>>);
+ }
+
+ Ok(Some(Box::new(opts)))
+ }
+ }
+ File | Git | Ssh | Ext(_) => Ok(None),
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/identity.rs b/vendor/gix/src/repository/identity.rs
new file mode 100644
index 000000000..61a4b4a98
--- /dev/null
+++ b/vendor/gix/src/repository/identity.rs
@@ -0,0 +1,175 @@
+use std::time::SystemTime;
+
+use crate::{
+ bstr::BString,
+ config,
+ config::tree::{gitoxide, keys, Author, Committer, Key, User},
+};
+
+/// Identity handling.
+///
+/// # Deviation
+///
+/// There is no notion of a default user like in git, and instead failing to provide a user
+/// is fatal. That way, we enforce correctness and force application developers to take care
+/// of this issue which can be done in various ways, for instance by setting
+/// `gitoxide.committer.nameFallback` and similar.
+impl crate::Repository {
+ /// Return the committer as configured by this repository, which is determined by…
+ ///
+ /// * …the git configuration `committer.name|email`…
+ /// * …the `GIT_COMMITTER_(NAME|EMAIL|DATE)` environment variables…
+ /// * …the configuration for `user.name|email` as fallback…
+ ///
+ /// …and in that order, or `None` if no committer name or email was configured, or `Some(Err(…))`
+ /// if the committer date could not be parsed.
+ ///
+ /// # Note
+ ///
+ /// The values are cached when the repository is instantiated.
+ pub fn committer(&self) -> Option<Result<gix_actor::SignatureRef<'_>, config::time::Error>> {
+ let p = self.config.personas();
+
+ Ok(gix_actor::SignatureRef {
+ name: p.committer.name.as_ref().or(p.user.name.as_ref()).map(|v| v.as_ref())?,
+ email: p
+ .committer
+ .email
+ .as_ref()
+ .or(p.user.email.as_ref())
+ .map(|v| v.as_ref())?,
+ time: match extract_time_or_default(p.committer.time.as_ref(), &gitoxide::Commit::COMMITTER_DATE) {
+ Ok(t) => t,
+ Err(err) => return Some(Err(err)),
+ },
+ })
+ .into()
+ }
+
+ /// Return the author as configured by this repository, which is determined by…
+ ///
+ /// * …the git configuration `author.name|email`…
+ /// * …the `GIT_AUTHOR_(NAME|EMAIL|DATE)` environment variables…
+ /// * …the configuration for `user.name|email` as fallback…
+ ///
+ /// …and in that order, or `None` if there was nothing configured.
+ ///
+ /// # Note
+ ///
+ /// The values are cached when the repository is instantiated.
+ pub fn author(&self) -> Option<Result<gix_actor::SignatureRef<'_>, config::time::Error>> {
+ let p = self.config.personas();
+
+ Ok(gix_actor::SignatureRef {
+ name: p.author.name.as_ref().or(p.user.name.as_ref()).map(|v| v.as_ref())?,
+ email: p.author.email.as_ref().or(p.user.email.as_ref()).map(|v| v.as_ref())?,
+ time: match extract_time_or_default(p.author.time.as_ref(), &gitoxide::Commit::AUTHOR_DATE) {
+ Ok(t) => t,
+ Err(err) => return Some(Err(err)),
+ },
+ })
+ .into()
+ }
+}
+
+fn extract_time_or_default(
+ time: Option<&Result<gix_actor::Time, gix_date::parse::Error>>,
+ config_key: &'static keys::Time,
+) -> Result<gix_actor::Time, config::time::Error> {
+ match time {
+ Some(Ok(t)) => Ok(*t),
+ None => Ok(gix_date::Time::now_local_or_utc()),
+ Some(Err(err)) => Err(config::time::Error::from(config_key).with_source(err.clone())),
+ }
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct Entity {
+ pub name: Option<BString>,
+ pub email: Option<BString>,
+ /// A time parsed from an environment variable, handling potential errors is delayed.
+ pub time: Option<Result<gix_actor::Time, gix_date::parse::Error>>,
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct Personas {
+ user: Entity,
+ committer: Entity,
+ author: Entity,
+}
+
+impl Personas {
+ pub fn from_config_and_env(config: &gix_config::File<'_>) -> Self {
+ fn entity_in_section(
+ config: &gix_config::File<'_>,
+ name_key: &keys::Any,
+ email_key: &keys::Any,
+ fallback: Option<(&keys::Any, &keys::Any)>,
+ ) -> (Option<BString>, Option<BString>) {
+ let fallback = fallback.and_then(|(name_key, email_key)| {
+ debug_assert_eq!(name_key.section.name(), email_key.section.name());
+ config
+ .section("gitoxide", Some(name_key.section.name().into()))
+ .ok()
+ .map(|section| (section, name_key, email_key))
+ });
+ (
+ config
+ .string(name_key.section.name(), None, name_key.name)
+ .or_else(|| fallback.as_ref().and_then(|(s, name_key, _)| s.value(name_key.name)))
+ .map(|v| v.into_owned()),
+ config
+ .string(email_key.section.name(), None, email_key.name)
+ .or_else(|| fallback.as_ref().and_then(|(s, _, email_key)| s.value(email_key.name)))
+ .map(|v| v.into_owned()),
+ )
+ }
+ let now = SystemTime::now();
+ let parse_date = |key: &str, date: &keys::Time| -> Option<Result<gix_date::Time, gix_date::parse::Error>> {
+ debug_assert_eq!(
+ key,
+ date.logical_name(),
+ "BUG: drift of expected name and actual name of the key (we hardcode it to save an allocation)"
+ );
+ config
+ .string_by_key(key)
+ .map(|time| date.try_into_time(time, now.into()))
+ };
+
+ let fallback = (
+ &gitoxide::Committer::NAME_FALLBACK,
+ &gitoxide::Committer::EMAIL_FALLBACK,
+ );
+ let (committer_name, committer_email) =
+ entity_in_section(config, &Committer::NAME, &Committer::EMAIL, Some(fallback));
+ let fallback = (&gitoxide::Author::NAME_FALLBACK, &gitoxide::Author::EMAIL_FALLBACK);
+ let (author_name, author_email) = entity_in_section(config, &Author::NAME, &Author::EMAIL, Some(fallback));
+ let (user_name, mut user_email) = entity_in_section(config, &User::NAME, &User::EMAIL, None);
+
+ let committer_date = parse_date("gitoxide.commit.committerDate", &gitoxide::Commit::COMMITTER_DATE);
+ let author_date = parse_date("gitoxide.commit.authorDate", &gitoxide::Commit::AUTHOR_DATE);
+
+ user_email = user_email.or_else(|| {
+ config
+ .string_by_key(gitoxide::User::EMAIL_FALLBACK.logical_name().as_str())
+ .map(|v| v.into_owned())
+ });
+ Personas {
+ user: Entity {
+ name: user_name,
+ email: user_email,
+ time: None,
+ },
+ committer: Entity {
+ name: committer_name,
+ email: committer_email,
+ time: committer_date,
+ },
+ author: Entity {
+ name: author_name,
+ email: author_email,
+ time: author_date,
+ },
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/impls.rs b/vendor/gix/src/repository/impls.rs
new file mode 100644
index 000000000..6cf2b2e9b
--- /dev/null
+++ b/vendor/gix/src/repository/impls.rs
@@ -0,0 +1,73 @@
+impl Clone for crate::Repository {
+ fn clone(&self) -> Self {
+ crate::Repository::from_refs_and_objects(
+ self.refs.clone(),
+ self.objects.clone(),
+ self.work_tree.clone(),
+ self.common_dir.clone(),
+ self.config.clone(),
+ self.options.clone(),
+ self.index.clone(),
+ )
+ }
+}
+
+impl std::fmt::Debug for crate::Repository {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("Repository")
+ .field("kind", &self.kind())
+ .field("git_dir", &self.git_dir())
+ .field("work_dir", &self.work_dir())
+ .finish()
+ }
+}
+
+impl PartialEq<crate::Repository> for crate::Repository {
+ fn eq(&self, other: &crate::Repository) -> bool {
+ self.git_dir().canonicalize().ok() == other.git_dir().canonicalize().ok()
+ && self.work_tree.as_deref().and_then(|wt| wt.canonicalize().ok())
+ == other.work_tree.as_deref().and_then(|wt| wt.canonicalize().ok())
+ }
+}
+
+impl From<&crate::ThreadSafeRepository> for crate::Repository {
+ fn from(repo: &crate::ThreadSafeRepository) -> Self {
+ crate::Repository::from_refs_and_objects(
+ repo.refs.clone(),
+ repo.objects.to_handle().into(),
+ repo.work_tree.clone(),
+ repo.common_dir.clone(),
+ repo.config.clone(),
+ repo.linked_worktree_options.clone(),
+ repo.index.clone(),
+ )
+ }
+}
+
+impl From<crate::ThreadSafeRepository> for crate::Repository {
+ fn from(repo: crate::ThreadSafeRepository) -> Self {
+ crate::Repository::from_refs_and_objects(
+ repo.refs,
+ repo.objects.to_handle().into(),
+ repo.work_tree,
+ repo.common_dir,
+ repo.config,
+ repo.linked_worktree_options,
+ repo.index,
+ )
+ }
+}
+
+impl From<crate::Repository> for crate::ThreadSafeRepository {
+ fn from(r: crate::Repository) -> Self {
+ crate::ThreadSafeRepository {
+ refs: r.refs,
+ objects: r.objects.into_inner().store(),
+ work_tree: r.work_tree,
+ common_dir: r.common_dir,
+ config: r.config,
+ linked_worktree_options: r.options,
+ index: r.index,
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/init.rs b/vendor/gix/src/repository/init.rs
new file mode 100644
index 000000000..ae6a42c3b
--- /dev/null
+++ b/vendor/gix/src/repository/init.rs
@@ -0,0 +1,55 @@
+use std::cell::RefCell;
+
+impl crate::Repository {
+ pub(crate) fn from_refs_and_objects(
+ refs: crate::RefStore,
+ objects: crate::OdbHandle,
+ work_tree: Option<std::path::PathBuf>,
+ common_dir: Option<std::path::PathBuf>,
+ config: crate::config::Cache,
+ linked_worktree_options: crate::open::Options,
+ index: crate::worktree::IndexStorage,
+ ) -> Self {
+ let objects = setup_objects(objects, &config);
+ crate::Repository {
+ bufs: RefCell::new(Vec::with_capacity(4)),
+ work_tree,
+ common_dir,
+ objects,
+ refs,
+ config,
+ options: linked_worktree_options,
+ index,
+ }
+ }
+
+ /// Convert this instance into a [`ThreadSafeRepository`][crate::ThreadSafeRepository] by dropping all thread-local data.
+ pub fn into_sync(self) -> crate::ThreadSafeRepository {
+ self.into()
+ }
+}
+
+#[cfg_attr(not(feature = "max-performance-safe"), allow(unused_variables, unused_mut))]
+fn setup_objects(mut objects: crate::OdbHandle, config: &crate::config::Cache) -> crate::OdbHandle {
+ #[cfg(feature = "max-performance-safe")]
+ {
+ match config.pack_cache_bytes {
+ None => objects.set_pack_cache(|| Box::<gix_pack::cache::lru::StaticLinkedList<64>>::default()),
+ Some(0) => objects.unset_pack_cache(),
+ Some(bytes) => objects.set_pack_cache(move || -> Box<gix_odb::cache::PackCache> {
+ Box::new(gix_pack::cache::lru::MemoryCappedHashmap::new(bytes))
+ }),
+ };
+ if config.object_cache_bytes == 0 {
+ objects.unset_object_cache();
+ } else {
+ let bytes = config.object_cache_bytes;
+ objects.set_object_cache(move || Box::new(gix_pack::cache::object::MemoryCappedHashmap::new(bytes)));
+ }
+ objects
+ }
+ #[cfg(not(feature = "max-performance-safe"))]
+ {
+ objects
+ }
+}
diff --git a/vendor/gix/src/repository/location.rs b/vendor/gix/src/repository/location.rs
new file mode 100644
index 000000000..0bb8ea253
--- /dev/null
+++ b/vendor/gix/src/repository/location.rs
@@ -0,0 +1,86 @@
+use std::path::PathBuf;
+
+use gix_path::realpath::MAX_SYMLINKS;
+
+impl crate::Repository {
+ /// Return the path to the repository itself, containing objects, references, configuration, and more.
+ ///
+ /// Synonymous to [`path()`][crate::Repository::path()].
+ pub fn git_dir(&self) -> &std::path::Path {
+ self.refs.git_dir()
+ }
+
+ /// The trust we place in the git-dir, with lower amounts of trust causing access to configuration to be limited.
+ pub fn git_dir_trust(&self) -> gix_sec::Trust {
+ self.options.git_dir_trust.expect("definitely set by now")
+ }
+
+ /// Returns the main git repository if this is a repository on a linked work-tree, or the `git_dir` itself.
+ pub fn common_dir(&self) -> &std::path::Path {
+ self.common_dir.as_deref().unwrap_or_else(|| self.git_dir())
+ }
+
+ /// Return the path to the worktree index file, which may or may not exist.
+ pub fn index_path(&self) -> PathBuf {
+ self.git_dir().join("index")
+ }
+
+ /// The path to the `.git` directory itself, or equivalent if this is a bare repository.
+ pub fn path(&self) -> &std::path::Path {
+ self.git_dir()
+ }
+
+ /// Return the work tree containing all checked out files, if there is one.
+ pub fn work_dir(&self) -> Option<&std::path::Path> {
+ self.work_tree.as_deref()
+ }
+
+ // TODO: tests, respect precomposeUnicode
+ /// The directory of the binary path of the current process.
+ pub fn install_dir(&self) -> std::io::Result<PathBuf> {
+ crate::path::install_dir()
+ }
+
+ /// Returns the relative path which is the components between the working tree and the current working dir (CWD).
+ /// Note that there may be `None` if there is no work tree, even though the `PathBuf` will be empty
+ /// if the CWD is at the root of the work tree.
+ // TODO: tests, details - there is a lot about environment variables to change things around.
+ pub fn prefix(&self) -> Option<std::io::Result<PathBuf>> {
+ self.work_tree.as_ref().map(|root| {
+ std::env::current_dir().and_then(|cwd| {
+ gix_path::realpath_opts(root, &cwd, MAX_SYMLINKS)
+ .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))
+ .and_then(|root| {
+ cwd.strip_prefix(&root)
+ .map_err(|_| {
+ std::io::Error::new(
+ std::io::ErrorKind::Other,
+ format!(
+ "CWD '{}' isn't within the work tree '{}'",
+ cwd.display(),
+ root.display()
+ ),
+ )
+ })
+ .map(ToOwned::to_owned)
+ })
+ })
+ })
+ }
+
+ /// Return the kind of repository, either bare or one with a work tree.
+ pub fn kind(&self) -> crate::Kind {
+ match self.worktree() {
+ Some(wt) => {
+ if gix_discover::is_submodule_git_dir(self.git_dir()) {
+ crate::Kind::Submodule
+ } else {
+ crate::Kind::WorkTree {
+ is_linked: !wt.is_main(),
+ }
+ }
+ }
+ None => crate::Kind::Bare,
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/mod.rs b/vendor/gix/src/repository/mod.rs
new file mode 100644
index 000000000..31199e22d
--- /dev/null
+++ b/vendor/gix/src/repository/mod.rs
@@ -0,0 +1,36 @@
+//!
+
+/// Internal
+impl crate::Repository {
+ #[inline]
+ pub(crate) fn free_buf(&self) -> Vec<u8> {
+ self.bufs.borrow_mut().pop().unwrap_or_default()
+ }
+
+ /// This method is commonly called from the destructor of objects that previously claimed an entry
+ /// in the free-list with `free_buf()`.
+ /// They are welcome to take out the data themselves, for instance when the object is detached, to avoid
+ /// it to be reclaimed.
+ #[inline]
+ pub(crate) fn reuse_buffer(&self, data: &mut Vec<u8>) {
+ if data.capacity() > 0 {
+ self.bufs.borrow_mut().push(std::mem::take(data));
+ }
+ }
+}
+
+mod cache;
+mod config;
+pub(crate) mod identity;
+mod impls;
+mod init;
+mod location;
+mod object;
+pub(crate) mod permissions;
+mod reference;
+mod remote;
+mod revision;
+mod snapshots;
+mod state;
+mod thread_safe;
+mod worktree;
diff --git a/vendor/gix/src/repository/object.rs b/vendor/gix/src/repository/object.rs
new file mode 100644
index 000000000..bda1a54c3
--- /dev/null
+++ b/vendor/gix/src/repository/object.rs
@@ -0,0 +1,214 @@
+#![allow(clippy::result_large_err)]
+use std::convert::TryInto;
+
+use gix_hash::ObjectId;
+use gix_odb::{Find, FindExt, Write};
+use gix_ref::{
+ transaction::{LogChange, PreviousValue, RefLog},
+ FullName,
+};
+
+use crate::{commit, ext::ObjectIdExt, object, tag, Id, Object, Reference, Tree};
+
+/// Methods related to object creation.
+impl crate::Repository {
+ /// Find the object with `id` in the object database or return an error if it could not be found.
+ ///
+ /// There are various legitimate reasons for an object to not be present, which is why
+ /// [`try_find_object(…)`][crate::Repository::try_find_object()] might be preferable instead.
+ ///
+ /// # Performance Note
+ ///
+ /// In order to get the kind of the object, is must be fully decoded from storage if it is packed with deltas.
+ /// Loose object could be partially decoded, even though that's not implemented.
+ pub fn find_object(&self, id: impl Into<ObjectId>) -> Result<Object<'_>, object::find::existing::Error> {
+ let id = id.into();
+ if id == gix_hash::ObjectId::empty_tree(self.object_hash()) {
+ return Ok(Object {
+ id,
+ kind: gix_object::Kind::Tree,
+ data: Vec::new(),
+ repo: self,
+ });
+ }
+ let mut buf = self.free_buf();
+ let kind = self.objects.find(id, &mut buf)?.kind;
+ Ok(Object::from_data(id, kind, buf, self))
+ }
+
+ /// Try to find the object with `id` or return `None` it it wasn't found.
+ pub fn try_find_object(&self, id: impl Into<ObjectId>) -> Result<Option<Object<'_>>, object::find::Error> {
+ let id = id.into();
+ if id == gix_hash::ObjectId::empty_tree(self.object_hash()) {
+ return Ok(Some(Object {
+ id,
+ kind: gix_object::Kind::Tree,
+ data: Vec::new(),
+ repo: self,
+ }));
+ }
+
+ let mut buf = self.free_buf();
+ match self.objects.try_find(id, &mut buf)? {
+ Some(obj) => {
+ let kind = obj.kind;
+ Ok(Some(Object::from_data(id, kind, buf, self)))
+ }
+ None => Ok(None),
+ }
+ }
+
+ /// Write the given object into the object database and return its object id.
+ pub fn write_object(&self, object: impl gix_object::WriteTo) -> Result<Id<'_>, object::write::Error> {
+ self.objects
+ .write(object)
+ .map(|oid| oid.attach(self))
+ .map_err(Into::into)
+ }
+
+ /// Write a blob from the given `bytes`.
+ pub fn write_blob(&self, bytes: impl AsRef<[u8]>) -> Result<Id<'_>, object::write::Error> {
+ self.objects
+ .write_buf(gix_object::Kind::Blob, bytes.as_ref())
+ .map(|oid| oid.attach(self))
+ }
+
+ /// Write a blob from the given `Read` implementation.
+ pub fn write_blob_stream(
+ &self,
+ mut bytes: impl std::io::Read + std::io::Seek,
+ ) -> Result<Id<'_>, object::write::Error> {
+ let current = bytes.stream_position()?;
+ let len = bytes.seek(std::io::SeekFrom::End(0))? - current;
+ bytes.seek(std::io::SeekFrom::Start(current))?;
+
+ self.objects
+ .write_stream(gix_object::Kind::Blob, len, bytes)
+ .map(|oid| oid.attach(self))
+ }
+
+ /// Create a tag reference named `name` (without `refs/tags/` prefix) pointing to a newly created tag object
+ /// which in turn points to `target` and return the newly created reference.
+ ///
+ /// It will be created with `constraint` which is most commonly to [only create it][PreviousValue::MustNotExist]
+ /// or to [force overwriting a possibly existing tag](PreviousValue::Any).
+ pub fn tag(
+ &self,
+ name: impl AsRef<str>,
+ target: impl AsRef<gix_hash::oid>,
+ target_kind: gix_object::Kind,
+ tagger: Option<gix_actor::SignatureRef<'_>>,
+ message: impl AsRef<str>,
+ constraint: PreviousValue,
+ ) -> Result<Reference<'_>, tag::Error> {
+ let tag = gix_object::Tag {
+ target: target.as_ref().into(),
+ target_kind,
+ name: name.as_ref().into(),
+ tagger: tagger.map(|t| t.to_owned()),
+ message: message.as_ref().into(),
+ pgp_signature: None,
+ };
+ let tag_id = self.write_object(&tag)?;
+ self.tag_reference(name, tag_id, constraint).map_err(Into::into)
+ }
+
+ /// Similar to [`commit(…)`][crate::Repository::commit()], but allows to create the commit with `committer` and `author` specified.
+ ///
+ /// This forces setting the commit time and author time by hand. Note that typically, committer and author are the same.
+ pub fn commit_as<'a, 'c, Name, E>(
+ &self,
+ committer: impl Into<gix_actor::SignatureRef<'c>>,
+ author: impl Into<gix_actor::SignatureRef<'a>>,
+ reference: Name,
+ message: impl AsRef<str>,
+ tree: impl Into<ObjectId>,
+ parents: impl IntoIterator<Item = impl Into<ObjectId>>,
+ ) -> Result<Id<'_>, commit::Error>
+ where
+ Name: TryInto<FullName, Error = E>,
+ commit::Error: From<E>,
+ {
+ use gix_ref::{
+ transaction::{Change, RefEdit},
+ Target,
+ };
+
+ // TODO: possibly use CommitRef to save a few allocations (but will have to allocate for object ids anyway.
+ // This can be made vastly more efficient though if we wanted to, so we lie in the API
+ let reference = reference.try_into()?;
+ let commit = gix_object::Commit {
+ message: message.as_ref().into(),
+ tree: tree.into(),
+ author: author.into().to_owned(),
+ committer: committer.into().to_owned(),
+ encoding: None,
+ parents: parents.into_iter().map(|id| id.into()).collect(),
+ extra_headers: Default::default(),
+ };
+
+ let commit_id = self.write_object(&commit)?;
+ self.edit_reference(RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: crate::reference::log::message("commit", commit.message.as_ref(), commit.parents.len()),
+ },
+ expected: match commit.parents.first().map(|p| Target::Peeled(*p)) {
+ Some(previous) => {
+ if reference.as_bstr() == "HEAD" {
+ PreviousValue::MustExistAndMatch(previous)
+ } else {
+ PreviousValue::ExistingMustMatch(previous)
+ }
+ }
+ None => PreviousValue::MustNotExist,
+ },
+ new: Target::Peeled(commit_id.inner),
+ },
+ name: reference,
+ deref: true,
+ })?;
+ Ok(commit_id)
+ }
+
+ /// Create a new commit object with `message` referring to `tree` with `parents`, and point `reference`
+ /// to it. The commit is written without message encoding field, which can be assumed to be UTF-8.
+ /// `author` and `committer` fields are pre-set from the configuration, which can be altered
+ /// [temporarily][crate::Repository::config_snapshot_mut()] before the call if required.
+ ///
+ /// `reference` will be created if it doesn't exist, and can be `"HEAD"` to automatically write-through to the symbolic reference
+ /// that `HEAD` points to if it is not detached. For this reason, detached head states cannot be created unless the `HEAD` is detached
+ /// already. The reflog will be written as canonical git would do, like `<operation> (<detail>): <summary>`.
+ ///
+ /// The first parent id in `parents` is expected to be the current target of `reference` and the operation will fail if it is not.
+ /// If there is no parent, the `reference` is expected to not exist yet.
+ ///
+ /// The method fails immediately if a `reference` lock can't be acquired.
+ pub fn commit<Name, E>(
+ &self,
+ reference: Name,
+ message: impl AsRef<str>,
+ tree: impl Into<ObjectId>,
+ parents: impl IntoIterator<Item = impl Into<ObjectId>>,
+ ) -> Result<Id<'_>, commit::Error>
+ where
+ Name: TryInto<FullName, Error = E>,
+ commit::Error: From<E>,
+ {
+ let author = self.author().ok_or(commit::Error::AuthorMissing)??;
+ let committer = self.committer().ok_or(commit::Error::CommitterMissing)??;
+ self.commit_as(committer, author, reference, message, tree, parents)
+ }
+
+ /// Return an empty tree object, suitable for [getting changes](crate::Tree::changes()).
+ ///
+ /// Note that it is special and doesn't physically exist in the object database even though it can be returned.
+ /// This means that this object can be used in an uninitialized, empty repository which would report to have no objects at all.
+ pub fn empty_tree(&self) -> Tree<'_> {
+ self.find_object(gix_hash::ObjectId::empty_tree(self.object_hash()))
+ .expect("always present")
+ .into_tree()
+ }
+}
diff --git a/vendor/gix/src/repository/permissions.rs b/vendor/gix/src/repository/permissions.rs
new file mode 100644
index 000000000..88b61b739
--- /dev/null
+++ b/vendor/gix/src/repository/permissions.rs
@@ -0,0 +1,168 @@
+use gix_sec::Trust;
+
+/// Permissions associated with various resources of a git repository
+#[derive(Debug, Clone)]
+pub struct Permissions {
+ /// Permissions related to the environment
+ pub env: Environment,
+ /// Permissions related to the handling of git configuration.
+ pub config: Config,
+}
+
+/// Configure from which sources git configuration may be loaded.
+///
+/// Note that configuration from inside of the repository is always loaded as it's definitely required for correctness.
+#[derive(Copy, Clone, Ord, PartialOrd, PartialEq, Eq, Debug, Hash)]
+pub struct Config {
+ /// The git binary may come with configuration as part of its configuration, and if this is true (default false)
+ /// we will load the configuration of the git binary, if present and not a duplicate of the ones below.
+ ///
+ /// It's disable by default as it involves executing the git binary once per execution of the application.
+ pub git_binary: bool,
+ /// Whether to use the system configuration.
+ /// This is defined as `$(prefix)/etc/gitconfig` on unix.
+ pub system: bool,
+ /// Whether to use the git application configuration.
+ ///
+ /// A platform defined location for where a user's git application configuration should be located.
+ /// If `$XDG_CONFIG_HOME` is not set or empty, `$HOME/.config/git/config` will be used
+ /// on unix.
+ pub git: bool,
+ /// Whether to use the user configuration.
+ /// This is usually `~/.gitconfig` on unix.
+ pub user: bool,
+ /// Whether to use the configuration from environment variables.
+ pub env: bool,
+ /// Whether to follow include files are encountered in loaded configuration,
+ /// via `include` and `includeIf` sections.
+ pub includes: bool,
+}
+
+impl Config {
+ /// Allow everything which usually relates to a fully trusted environment
+ pub fn all() -> Self {
+ Config {
+ git_binary: false,
+ system: true,
+ git: true,
+ user: true,
+ env: true,
+ includes: true,
+ }
+ }
+}
+
+impl Default for Config {
+ fn default() -> Self {
+ Self::all()
+ }
+}
+
+/// Permissions related to the usage of environment variables
+#[derive(Debug, Clone)]
+pub struct Environment {
+ /// Control whether resources pointed to by `XDG_CONFIG_HOME` can be used when looking up common configuration values.
+ ///
+ /// Note that [`gix_sec::Permission::Forbid`] will cause the operation to abort if a resource is set via the XDG config environment.
+ pub xdg_config_home: gix_sec::Permission,
+ /// Control the way resources pointed to by the home directory (similar to `xdg_config_home`) may be used.
+ pub home: gix_sec::Permission,
+ /// Control if environment variables to configure the HTTP transport, like `http_proxy` may be used.
+ ///
+ /// Note that http-transport related environment variables prefixed with `GIT_` may also be included here
+ /// if they match this category like `GIT_HTTP_USER_AGENT`.
+ pub http_transport: gix_sec::Permission,
+ /// Control if the `EMAIL` environment variables may be read.
+ ///
+ /// Note that identity related environment variables prefixed with `GIT_` may also be included here
+ /// if they match this category.
+ pub identity: gix_sec::Permission,
+ /// Control if environment variables related to the object database are handled. This includes features and performance
+ /// options alike.
+ pub objects: gix_sec::Permission,
+ /// Control if resources pointed to by `GIT_*` prefixed environment variables can be used, **but only** if they
+ /// are not contained in any other category. This is a catch-all section.
+ pub git_prefix: gix_sec::Permission,
+ /// Control if resources pointed to by `SSH_*` prefixed environment variables can be used (like `SSH_ASKPASS`)
+ pub ssh_prefix: gix_sec::Permission,
+}
+
+impl Environment {
+ /// Allow access to the entire environment.
+ pub fn all() -> Self {
+ let allow = gix_sec::Permission::Allow;
+ Environment {
+ xdg_config_home: allow,
+ home: allow,
+ git_prefix: allow,
+ ssh_prefix: allow,
+ http_transport: allow,
+ identity: allow,
+ objects: allow,
+ }
+ }
+}
+
+impl Permissions {
+ /// Return permissions that will not include configuration files not owned by the current user,
+ /// but trust system and global configuration files along with those which are owned by the current user.
+ ///
+ /// This allows to read and write repositories even if they aren't owned by the current user, but avoid using
+ /// anything else that could cause us to write into unknown locations or use programs beyond our `PATH`.
+ pub fn secure() -> Self {
+ Permissions {
+ env: Environment::all(),
+ config: Config::all(),
+ }
+ }
+
+ /// Everything is allowed with this set of permissions, thus we read all configuration and do what git typically
+ /// does with owned repositories.
+ pub fn all() -> Self {
+ Permissions {
+ env: Environment::all(),
+ config: Config::all(),
+ }
+ }
+
+ /// Don't read any but the local git configuration and deny reading any environment variables.
+ pub fn isolated() -> Self {
+ Permissions {
+ config: Config {
+ git_binary: false,
+ system: false,
+ git: false,
+ user: false,
+ env: false,
+ includes: false,
+ },
+ env: {
+ let deny = gix_sec::Permission::Deny;
+ Environment {
+ xdg_config_home: deny,
+ home: deny,
+ ssh_prefix: deny,
+ git_prefix: deny,
+ http_transport: deny,
+ identity: deny,
+ objects: deny,
+ }
+ },
+ }
+ }
+}
+
+impl gix_sec::trust::DefaultForLevel for Permissions {
+ fn default_for_level(level: Trust) -> Self {
+ match level {
+ Trust::Full => Permissions::all(),
+ Trust::Reduced => Permissions::secure(),
+ }
+ }
+}
+
+impl Default for Permissions {
+ fn default() -> Self {
+ Permissions::secure()
+ }
+}
diff --git a/vendor/gix/src/repository/reference.rs b/vendor/gix/src/repository/reference.rs
new file mode 100644
index 000000000..e5a8aadcb
--- /dev/null
+++ b/vendor/gix/src/repository/reference.rs
@@ -0,0 +1,243 @@
+use std::convert::TryInto;
+
+use gix_hash::ObjectId;
+use gix_ref::{
+ transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog},
+ FullName, PartialNameRef, Target,
+};
+
+use crate::{bstr::BString, ext::ReferenceExt, reference, Reference};
+
+/// Obtain and alter references comfortably
+impl crate::Repository {
+ /// Create a lightweight tag with given `name` (and without `refs/tags/` prefix) pointing to the given `target`, and return it as reference.
+ ///
+ /// It will be created with `constraint` which is most commonly to [only create it][PreviousValue::MustNotExist]
+ /// or to [force overwriting a possibly existing tag](PreviousValue::Any).
+ pub fn tag_reference(
+ &self,
+ name: impl AsRef<str>,
+ target: impl Into<ObjectId>,
+ constraint: PreviousValue,
+ ) -> Result<Reference<'_>, reference::edit::Error> {
+ let id = target.into();
+ let mut edits = self.edit_reference(RefEdit {
+ change: Change::Update {
+ log: Default::default(),
+ expected: constraint,
+ new: Target::Peeled(id),
+ },
+ name: format!("refs/tags/{}", name.as_ref()).try_into()?,
+ deref: false,
+ })?;
+ assert_eq!(edits.len(), 1, "reference splits should ever happen");
+ let edit = edits.pop().expect("exactly one item");
+ Ok(Reference {
+ inner: gix_ref::Reference {
+ name: edit.name,
+ target: id.into(),
+ peeled: None,
+ },
+ repo: self,
+ })
+ }
+
+ /// Returns the currently set namespace for references, or `None` if it is not set.
+ ///
+ /// Namespaces allow to partition references, and is configured per `Easy`.
+ pub fn namespace(&self) -> Option<&gix_ref::Namespace> {
+ self.refs.namespace.as_ref()
+ }
+
+ /// Remove the currently set reference namespace and return it, affecting only this `Easy`.
+ pub fn clear_namespace(&mut self) -> Option<gix_ref::Namespace> {
+ self.refs.namespace.take()
+ }
+
+ /// Set the reference namespace to the given value, like `"foo"` or `"foo/bar"`.
+ ///
+ /// Note that this value is shared across all `Easy…` instances as the value is stored in the shared `Repository`.
+ pub fn set_namespace<'a, Name, E>(
+ &mut self,
+ namespace: Name,
+ ) -> Result<Option<gix_ref::Namespace>, gix_validate::refname::Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ gix_validate::refname::Error: From<E>,
+ {
+ let namespace = gix_ref::namespace::expand(namespace)?;
+ Ok(self.refs.namespace.replace(namespace))
+ }
+
+ // TODO: more tests or usage
+ /// Create a new reference with `name`, like `refs/heads/branch`, pointing to `target`, adhering to `constraint`
+ /// during creation and writing `log_message` into the reflog. Note that a ref-log will be written even if `log_message` is empty.
+ ///
+ /// The newly created Reference is returned.
+ pub fn reference<Name, E>(
+ &self,
+ name: Name,
+ target: impl Into<ObjectId>,
+ constraint: PreviousValue,
+ log_message: impl Into<BString>,
+ ) -> Result<Reference<'_>, reference::edit::Error>
+ where
+ Name: TryInto<FullName, Error = E>,
+ gix_validate::reference::name::Error: From<E>,
+ {
+ let name = name.try_into().map_err(gix_validate::reference::name::Error::from)?;
+ let id = target.into();
+ let mut edits = self.edit_reference(RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: log_message.into(),
+ },
+ expected: constraint,
+ new: Target::Peeled(id),
+ },
+ name,
+ deref: false,
+ })?;
+ assert_eq!(
+ edits.len(),
+ 1,
+ "only one reference can be created, splits aren't possible"
+ );
+
+ Ok(gix_ref::Reference {
+ name: edits.pop().expect("exactly one edit").name,
+ target: Target::Peeled(id),
+ peeled: None,
+ }
+ .attach(self))
+ }
+
+ /// Edit a single reference as described in `edit`, and write reference logs as `log_committer`.
+ ///
+ /// One or more `RefEdit`s are returned - symbolic reference splits can cause more edits to be performed. All edits have the previous
+ /// reference values set to the ones encountered at rest after acquiring the respective reference's lock.
+ pub fn edit_reference(&self, edit: RefEdit) -> Result<Vec<RefEdit>, reference::edit::Error> {
+ self.edit_references(Some(edit))
+ }
+
+ /// Edit one or more references as described by their `edits`.
+ /// Note that one can set the committer name for use in the ref-log by temporarily
+ /// [overriding the gix-config][crate::Repository::config_snapshot_mut()].
+ ///
+ /// Returns all reference edits, which might be more than where provided due the splitting of symbolic references, and
+ /// whose previous (_old_) values are the ones seen on in storage after the reference was locked.
+ pub fn edit_references(
+ &self,
+ edits: impl IntoIterator<Item = RefEdit>,
+ ) -> Result<Vec<RefEdit>, reference::edit::Error> {
+ let (file_lock_fail, packed_refs_lock_fail) = self.config.lock_timeout()?;
+ self.refs
+ .transaction()
+ .prepare(edits, file_lock_fail, packed_refs_lock_fail)?
+ .commit(self.committer().transpose()?)
+ .map_err(Into::into)
+ }
+
+ /// Return the repository head, an abstraction to help dealing with the `HEAD` reference.
+ ///
+ /// The `HEAD` reference can be in various states, for more information, the documentation of [`Head`][crate::Head].
+ pub fn head(&self) -> Result<crate::Head<'_>, reference::find::existing::Error> {
+ let head = self.find_reference("HEAD")?;
+ Ok(match head.inner.target {
+ Target::Symbolic(branch) => match self.find_reference(&branch) {
+ Ok(r) => crate::head::Kind::Symbolic(r.detach()),
+ Err(reference::find::existing::Error::NotFound) => crate::head::Kind::Unborn(branch),
+ Err(err) => return Err(err),
+ },
+ Target::Peeled(target) => crate::head::Kind::Detached {
+ target,
+ peeled: head.inner.peeled,
+ },
+ }
+ .attach(self))
+ }
+
+ /// Resolve the `HEAD` reference, follow and peel its target and obtain its object id.
+ ///
+ /// Note that this may fail for various reasons, most notably because the repository
+ /// is freshly initialized and doesn't have any commits yet.
+ ///
+ /// Also note that the returned id is likely to point to a commit, but could also
+ /// point to a tree or blob. It won't, however, point to a tag as these are always peeled.
+ pub fn head_id(&self) -> Result<crate::Id<'_>, reference::head_id::Error> {
+ let mut head = self.head()?;
+ head.peel_to_id_in_place()
+ .ok_or_else(|| reference::head_id::Error::Unborn {
+ name: head.referent_name().expect("unborn").to_owned(),
+ })?
+ .map_err(Into::into)
+ }
+
+ /// Return the name to the symbolic reference `HEAD` points to, or `None` if the head is detached.
+ ///
+ /// The difference to [`head_ref()`][Self::head_ref()] is that the latter requires the reference to exist,
+ /// whereas here we merely return a the name of the possibly unborn reference.
+ pub fn head_name(&self) -> Result<Option<FullName>, reference::find::existing::Error> {
+ Ok(self.head()?.referent_name().map(|n| n.to_owned()))
+ }
+
+ /// Return the reference that `HEAD` points to, or `None` if the head is detached or unborn.
+ pub fn head_ref(&self) -> Result<Option<Reference<'_>>, reference::find::existing::Error> {
+ Ok(self.head()?.try_into_referent())
+ }
+
+ /// Return the commit object the `HEAD` reference currently points to after peeling it fully.
+ ///
+ /// Note that this may fail for various reasons, most notably because the repository
+ /// is freshly initialized and doesn't have any commits yet. It could also fail if the
+ /// head does not point to a commit.
+ pub fn head_commit(&self) -> Result<crate::Commit<'_>, reference::head_commit::Error> {
+ Ok(self.head()?.peel_to_commit_in_place()?)
+ }
+
+ /// Find the reference with the given partial or full `name`, like `main`, `HEAD`, `heads/branch` or `origin/other`,
+ /// or return an error if it wasn't found.
+ ///
+ /// Consider [`try_find_reference(…)`][crate::Repository::try_find_reference()] if the reference might not exist
+ /// without that being considered an error.
+ pub fn find_reference<'a, Name, E>(&self, name: Name) -> Result<Reference<'_>, reference::find::existing::Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ gix_ref::file::find::Error: From<E>,
+ {
+ self.try_find_reference(name)?
+ .ok_or(reference::find::existing::Error::NotFound)
+ }
+
+ /// Return a platform for iterating references.
+ ///
+ /// Common kinds of iteration are [all][crate::reference::iter::Platform::all()] or [prefixed][crate::reference::iter::Platform::prefixed()]
+ /// references.
+ pub fn references(&self) -> Result<reference::iter::Platform<'_>, reference::iter::Error> {
+ Ok(reference::iter::Platform {
+ platform: self.refs.iter()?,
+ repo: self,
+ })
+ }
+
+ /// Try to find the reference named `name`, like `main`, `heads/branch`, `HEAD` or `origin/other`, and return it.
+ ///
+ /// Otherwise return `None` if the reference wasn't found.
+ /// If the reference is expected to exist, use [`find_reference()`][crate::Repository::find_reference()].
+ pub fn try_find_reference<'a, Name, E>(&self, name: Name) -> Result<Option<Reference<'_>>, reference::find::Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ gix_ref::file::find::Error: From<E>,
+ {
+ let state = self;
+ match state.refs.try_find(name) {
+ Ok(r) => match r {
+ Some(r) => Ok(Some(Reference::from_ref(r, self))),
+ None => Ok(None),
+ },
+ Err(err) => Err(err.into()),
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/remote.rs b/vendor/gix/src/repository/remote.rs
new file mode 100644
index 000000000..e3f210899
--- /dev/null
+++ b/vendor/gix/src/repository/remote.rs
@@ -0,0 +1,199 @@
+#![allow(clippy::result_large_err)]
+use std::convert::TryInto;
+
+use crate::{bstr::BStr, config, remote, remote::find, Remote};
+
+impl crate::Repository {
+ /// Create a new remote available at the given `url`.
+ ///
+ /// It's configured to fetch included tags by default, similar to git.
+ /// See [`with_fetch_tags(…)`][Remote::with_fetch_tags()] for a way to change it.
+ pub fn remote_at<Url, E>(&self, url: Url) -> Result<Remote<'_>, remote::init::Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ Remote::from_fetch_url(url, true, self)
+ }
+
+ /// Create a new remote available at the given `url` similarly to [`remote_at()`][crate::Repository::remote_at()],
+ /// but don't rewrite the url according to rewrite rules.
+ /// This eliminates a failure mode in case the rewritten URL is faulty, allowing to selectively [apply rewrite
+ /// rules][Remote::rewrite_urls()] later and do so non-destructively.
+ pub fn remote_at_without_url_rewrite<Url, E>(&self, url: Url) -> Result<Remote<'_>, remote::init::Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ Remote::from_fetch_url(url, false, self)
+ }
+
+ /// Find the remote with the given `name_or_url` or report an error, similar to [`try_find_remote(…)`][Self::try_find_remote()].
+ ///
+ /// Note that we will obtain remotes only if we deem them [trustworthy][crate::open::Options::filter_config_section()].
+ pub fn find_remote<'a>(&self, name_or_url: impl Into<&'a BStr>) -> Result<Remote<'_>, find::existing::Error> {
+ let name_or_url = name_or_url.into();
+ Ok(self
+ .try_find_remote(name_or_url)
+ .ok_or_else(|| find::existing::Error::NotFound {
+ name: name_or_url.into(),
+ })??)
+ }
+
+ /// Find the default remote as configured, or `None` if no such configuration could be found.
+ ///
+ /// See [remote_default_name()][Self::remote_default_name()] for more information on the `direction` parameter.
+ pub fn find_default_remote(
+ &self,
+ direction: remote::Direction,
+ ) -> Option<Result<Remote<'_>, find::existing::Error>> {
+ self.remote_default_name(direction)
+ .map(|name| self.find_remote(name.as_ref()))
+ }
+
+ /// Find the remote with the given `name_or_url` or return `None` if it doesn't exist, for the purpose of fetching or pushing
+ /// data to a remote.
+ ///
+ /// There are various error kinds related to partial information or incorrectly formatted URLs or ref-specs.
+ /// Also note that the created `Remote` may have neither fetch nor push ref-specs set at all.
+ ///
+ /// Note that ref-specs are de-duplicated right away which may change their order. This doesn't affect matching in any way
+ /// as negations/excludes are applied after includes.
+ ///
+ /// We will only include information if we deem it [trustworthy][crate::open::Options::filter_config_section()].
+ pub fn try_find_remote<'a>(&self, name_or_url: impl Into<&'a BStr>) -> Option<Result<Remote<'_>, find::Error>> {
+ self.try_find_remote_inner(name_or_url, true)
+ }
+
+ /// Similar to [try_find_remote()][Self::try_find_remote()], but removes a failure mode if rewritten URLs turn out to be invalid
+ /// as it skips rewriting them.
+ /// Use this in conjunction with [`Remote::rewrite_urls()`] to non-destructively apply the rules and keep the failed urls unchanged.
+ pub fn try_find_remote_without_url_rewrite<'a>(
+ &self,
+ name_or_url: impl Into<&'a BStr>,
+ ) -> Option<Result<Remote<'_>, find::Error>> {
+ self.try_find_remote_inner(name_or_url, false)
+ }
+
+ fn try_find_remote_inner<'a>(
+ &self,
+ name_or_url: impl Into<&'a BStr>,
+ rewrite_urls: bool,
+ ) -> Option<Result<Remote<'_>, find::Error>> {
+ fn config_spec<T: config::tree::keys::Validate>(
+ specs: Vec<std::borrow::Cow<'_, BStr>>,
+ name_or_url: &BStr,
+ key: &'static config::tree::keys::Any<T>,
+ op: gix_refspec::parse::Operation,
+ ) -> Result<Vec<gix_refspec::RefSpec>, find::Error> {
+ let kind = key.name;
+ specs
+ .into_iter()
+ .map(|spec| {
+ key.try_into_refspec(spec, op).map_err(|err| find::Error::RefSpec {
+ remote_name: name_or_url.into(),
+ kind,
+ source: err,
+ })
+ })
+ .collect::<Result<Vec<_>, _>>()
+ .map(|mut specs| {
+ specs.sort();
+ specs.dedup();
+ specs
+ })
+ }
+
+ let mut filter = self.filter_config_section();
+ let name_or_url = name_or_url.into();
+ let mut config_url = |key: &'static config::tree::keys::Url, kind: &'static str| {
+ self.config
+ .resolved
+ .string_filter("remote", Some(name_or_url), key.name, &mut filter)
+ .map(|url| {
+ key.try_into_url(url).map_err(|err| find::Error::Url {
+ kind,
+ remote_name: name_or_url.into(),
+ source: err,
+ })
+ })
+ };
+ let url = config_url(&config::tree::Remote::URL, "fetch");
+ let push_url = config_url(&config::tree::Remote::PUSH_URL, "push");
+ let config = &self.config.resolved;
+
+ let fetch_specs = config
+ .strings_filter("remote", Some(name_or_url), "fetch", &mut filter)
+ .map(|specs| {
+ config_spec(
+ specs,
+ name_or_url,
+ &config::tree::Remote::FETCH,
+ gix_refspec::parse::Operation::Fetch,
+ )
+ });
+ let push_specs = config
+ .strings_filter("remote", Some(name_or_url), "push", &mut filter)
+ .map(|specs| {
+ config_spec(
+ specs,
+ name_or_url,
+ &config::tree::Remote::PUSH,
+ gix_refspec::parse::Operation::Push,
+ )
+ });
+ let fetch_tags = config
+ .string_filter("remote", Some(name_or_url), "tagOpt", &mut filter)
+ .map(|value| {
+ config::tree::Remote::TAG_OPT
+ .try_into_tag_opt(value)
+ .map_err(Into::into)
+ });
+ let fetch_tags = match fetch_tags {
+ Some(Ok(v)) => v,
+ Some(Err(err)) => return Some(Err(err)),
+ None => Default::default(),
+ };
+
+ match (url, fetch_specs, push_url, push_specs) {
+ (None, None, None, None) => None,
+ (None, _, None, _) => Some(Err(find::Error::UrlMissing)),
+ (url, fetch_specs, push_url, push_specs) => {
+ let url = match url {
+ Some(Ok(v)) => Some(v),
+ Some(Err(err)) => return Some(Err(err)),
+ None => None,
+ };
+ let push_url = match push_url {
+ Some(Ok(v)) => Some(v),
+ Some(Err(err)) => return Some(Err(err)),
+ None => None,
+ };
+ let fetch_specs = match fetch_specs {
+ Some(Ok(v)) => v,
+ Some(Err(err)) => return Some(Err(err)),
+ None => Vec::new(),
+ };
+ let push_specs = match push_specs {
+ Some(Ok(v)) => v,
+ Some(Err(err)) => return Some(Err(err)),
+ None => Vec::new(),
+ };
+
+ Some(
+ Remote::from_preparsed_config(
+ Some(name_or_url.to_owned()),
+ url,
+ push_url,
+ fetch_specs,
+ push_specs,
+ rewrite_urls,
+ fetch_tags,
+ self,
+ )
+ .map_err(Into::into),
+ )
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/revision.rs b/vendor/gix/src/repository/revision.rs
new file mode 100644
index 000000000..3018c2be8
--- /dev/null
+++ b/vendor/gix/src/repository/revision.rs
@@ -0,0 +1,42 @@
+use crate::{bstr::BStr, revision, Id};
+
+/// Methods for resolving revisions by spec or working with the commit graph.
+impl crate::Repository {
+ /// Parse a revision specification and turn it into the object(s) it describes, similar to `git rev-parse`.
+ ///
+ /// # Deviation
+ ///
+ /// - `@` actually stands for `HEAD`, whereas `git` resolves it to the object pointed to by `HEAD` without making the
+ /// `HEAD` ref available for lookups.
+ pub fn rev_parse<'a>(&self, spec: impl Into<&'a BStr>) -> Result<revision::Spec<'_>, revision::spec::parse::Error> {
+ revision::Spec::from_bstr(
+ spec,
+ self,
+ revision::spec::parse::Options {
+ object_kind_hint: self.config.object_kind_hint,
+ ..Default::default()
+ },
+ )
+ }
+
+ /// Parse a revision specification and return single object id as represented by this instance.
+ pub fn rev_parse_single<'repo, 'a>(
+ &'repo self,
+ spec: impl Into<&'a BStr>,
+ ) -> Result<Id<'repo>, revision::spec::parse::single::Error> {
+ let spec = spec.into();
+ self.rev_parse(spec)?
+ .single()
+ .ok_or(revision::spec::parse::single::Error::RangedRev { spec: spec.into() })
+ }
+
+ /// Create the baseline for a revision walk by initializing it with the `tips` to start iterating on.
+ ///
+ /// It can be configured further before starting the actual walk.
+ pub fn rev_walk(
+ &self,
+ tips: impl IntoIterator<Item = impl Into<gix_hash::ObjectId>>,
+ ) -> revision::walk::Platform<'_> {
+ revision::walk::Platform::new(tips, self)
+ }
+}
diff --git a/vendor/gix/src/repository/snapshots.rs b/vendor/gix/src/repository/snapshots.rs
new file mode 100644
index 000000000..6933dc9c6
--- /dev/null
+++ b/vendor/gix/src/repository/snapshots.rs
@@ -0,0 +1,109 @@
+impl crate::Repository {
+ // TODO: tests
+ /// Similar to [`open_mailmap_into()`][crate::Repository::open_mailmap_into()], but ignores all errors and returns at worst
+ /// an empty mailmap, e.g. if there is no mailmap or if there were errors loading them.
+ ///
+ /// This represents typical usage within git, which also works with what's there without considering a populated mailmap
+ /// a reason to abort an operation, considering it optional.
+ pub fn open_mailmap(&self) -> gix_mailmap::Snapshot {
+ let mut out = gix_mailmap::Snapshot::default();
+ self.open_mailmap_into(&mut out).ok();
+ out
+ }
+
+ // TODO: tests
+ /// Try to merge mailmaps from the following locations into `target`:
+ ///
+ /// - read the `.mailmap` file without following symlinks from the working tree, if present
+ /// - OR read `HEAD:.mailmap` if this repository is bare (i.e. has no working tree), if the `mailmap.blob` is not set.
+ /// - read the mailmap as configured in `mailmap.blob`, if set.
+ /// - read the file as configured by `mailmap.file`, following symlinks, if set.
+ ///
+ /// Only the first error will be reported, and as many source mailmaps will be merged into `target` as possible.
+ /// Parsing errors will be ignored.
+ pub fn open_mailmap_into(&self, target: &mut gix_mailmap::Snapshot) -> Result<(), crate::mailmap::load::Error> {
+ let mut err = None::<crate::mailmap::load::Error>;
+ let mut buf = Vec::new();
+ let mut blob_id = self
+ .config
+ .resolved
+ .raw_value("mailmap", None, "blob")
+ .ok()
+ .and_then(|spec| {
+ // TODO: actually resolve this as spec (once we can do that)
+ gix_hash::ObjectId::from_hex(spec.as_ref())
+ .map_err(|e| err.get_or_insert(e.into()))
+ .ok()
+ });
+ match self.work_dir() {
+ None => {
+ // TODO: replace with ref-spec `HEAD:.mailmap` for less verbose way of getting the blob id
+ blob_id = blob_id.or_else(|| {
+ self.head().ok().and_then(|mut head| {
+ let commit = head.peel_to_commit_in_place().ok()?;
+ let tree = commit.tree().ok()?;
+ tree.lookup_entry(Some(".mailmap")).ok()?.map(|e| e.object_id())
+ })
+ });
+ }
+ Some(root) => {
+ if let Ok(mut file) = gix_features::fs::open_options_no_follow()
+ .read(true)
+ .open(root.join(".mailmap"))
+ .map_err(|e| {
+ if e.kind() != std::io::ErrorKind::NotFound {
+ err.get_or_insert(e.into());
+ }
+ })
+ {
+ buf.clear();
+ std::io::copy(&mut file, &mut buf)
+ .map_err(|e| err.get_or_insert(e.into()))
+ .ok();
+ target.merge(gix_mailmap::parse_ignore_errors(&buf));
+ }
+ }
+ }
+
+ if let Some(blob) = blob_id.and_then(|id| self.find_object(id).map_err(|e| err.get_or_insert(e.into())).ok()) {
+ target.merge(gix_mailmap::parse_ignore_errors(&blob.data));
+ }
+
+ let configured_path = self
+ .config
+ .resolved
+ .value::<gix_config::Path<'_>>("mailmap", None, "file")
+ .ok()
+ .and_then(|path| {
+ let install_dir = self.install_dir().ok()?;
+ let home = self.config.home_dir();
+ match path.interpolate(gix_config::path::interpolate::Context {
+ git_install_dir: Some(install_dir.as_path()),
+ home_dir: home.as_deref(),
+ home_for_user: if self.options.git_dir_trust.expect("trust is set") == gix_sec::Trust::Full {
+ Some(gix_config::path::interpolate::home_for_user)
+ } else {
+ None
+ },
+ }) {
+ Ok(path) => Some(path),
+ Err(e) => {
+ err.get_or_insert(e.into());
+ None
+ }
+ }
+ });
+
+ if let Some(mut file) =
+ configured_path.and_then(|path| std::fs::File::open(path).map_err(|e| err.get_or_insert(e.into())).ok())
+ {
+ buf.clear();
+ std::io::copy(&mut file, &mut buf)
+ .map_err(|e| err.get_or_insert(e.into()))
+ .ok();
+ target.merge(gix_mailmap::parse_ignore_errors(&buf));
+ }
+
+ err.map(Err).unwrap_or(Ok(()))
+ }
+}
diff --git a/vendor/gix/src/repository/state.rs b/vendor/gix/src/repository/state.rs
new file mode 100644
index 000000000..4034fe349
--- /dev/null
+++ b/vendor/gix/src/repository/state.rs
@@ -0,0 +1,44 @@
+use crate::state;
+
+impl crate::Repository {
+ /// Returns the status of an in progress operation on a repository or [`None`]
+ /// if no operation is currently in progress.
+ ///
+ /// Note to be confused with the repositories 'status'.
+ pub fn state(&self) -> Option<state::InProgress> {
+ let git_dir = self.path();
+
+ // This is modeled on the logic from wt_status_get_state in git's wt-status.c and
+ // ps1 from gix-prompt.sh.
+
+ if git_dir.join("rebase-apply/applying").is_file() {
+ Some(state::InProgress::ApplyMailbox)
+ } else if git_dir.join("rebase-apply/rebasing").is_file() {
+ Some(state::InProgress::Rebase)
+ } else if git_dir.join("rebase-apply").is_dir() {
+ Some(state::InProgress::ApplyMailboxRebase)
+ } else if git_dir.join("rebase-merge/interactive").is_file() {
+ Some(state::InProgress::RebaseInteractive)
+ } else if git_dir.join("rebase-merge").is_dir() {
+ Some(state::InProgress::Rebase)
+ } else if git_dir.join("CHERRY_PICK_HEAD").is_file() {
+ if git_dir.join("sequencer/todo").is_file() {
+ Some(state::InProgress::CherryPickSequence)
+ } else {
+ Some(state::InProgress::CherryPick)
+ }
+ } else if git_dir.join("MERGE_HEAD").is_file() {
+ Some(state::InProgress::Merge)
+ } else if git_dir.join("BISECT_LOG").is_file() {
+ Some(state::InProgress::Bisect)
+ } else if git_dir.join("REVERT_HEAD").is_file() {
+ if git_dir.join("sequencer/todo").is_file() {
+ Some(state::InProgress::RevertSequence)
+ } else {
+ Some(state::InProgress::Revert)
+ }
+ } else {
+ None
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/thread_safe.rs b/vendor/gix/src/repository/thread_safe.rs
new file mode 100644
index 000000000..7c89aee60
--- /dev/null
+++ b/vendor/gix/src/repository/thread_safe.rs
@@ -0,0 +1,66 @@
+mod access {
+ use crate::Kind;
+
+ impl crate::ThreadSafeRepository {
+ /// Return the kind of repository, either bare or one with a work tree.
+ pub fn kind(&self) -> Kind {
+ match self.work_tree {
+ Some(_) => Kind::WorkTree {
+ is_linked: crate::worktree::id(self.git_dir(), self.common_dir.is_some()).is_some(),
+ },
+ None => Kind::Bare,
+ }
+ }
+
+ /// Add thread-local state to an easy-to-use thread-local repository for the most convenient API.
+ pub fn to_thread_local(&self) -> crate::Repository {
+ self.into()
+ }
+ }
+}
+
+mod location {
+
+ impl crate::ThreadSafeRepository {
+ /// The path to the `.git` directory itself, or equivalent if this is a bare repository.
+ pub fn path(&self) -> &std::path::Path {
+ self.git_dir()
+ }
+
+ /// Return the path to the repository itself, containing objects, references, configuration, and more.
+ ///
+ /// Synonymous to [`path()`][crate::ThreadSafeRepository::path()].
+ pub fn git_dir(&self) -> &std::path::Path {
+ self.refs.git_dir()
+ }
+
+ /// Return the path to the working directory if this is not a bare repository.
+ pub fn work_dir(&self) -> Option<&std::path::Path> {
+ self.work_tree.as_deref()
+ }
+
+ /// Return the path to the directory containing all objects.
+ pub fn objects_dir(&self) -> &std::path::Path {
+ self.objects.path()
+ }
+ }
+}
+
+mod impls {
+ impl std::fmt::Debug for crate::ThreadSafeRepository {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(
+ f,
+ "Repository(git = '{}', working_tree: {:?}",
+ self.git_dir().display(),
+ self.work_tree
+ )
+ }
+ }
+
+ impl PartialEq<crate::ThreadSafeRepository> for crate::ThreadSafeRepository {
+ fn eq(&self, other: &crate::ThreadSafeRepository) -> bool {
+ self.git_dir() == other.git_dir() && self.work_tree == other.work_tree
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/worktree.rs b/vendor/gix/src/repository/worktree.rs
new file mode 100644
index 000000000..2de31bc86
--- /dev/null
+++ b/vendor/gix/src/repository/worktree.rs
@@ -0,0 +1,119 @@
+use crate::{worktree, Worktree};
+
+/// Worktree iteration
+impl crate::Repository {
+ /// Return a list of all _linked_ worktrees sorted by private git dir path as a lightweight proxy.
+ ///
+ /// Note that these need additional processing to become usable, but provide a first glimpse a typical worktree information.
+ pub fn worktrees(&self) -> std::io::Result<Vec<worktree::Proxy<'_>>> {
+ let mut res = Vec::new();
+ let iter = match std::fs::read_dir(self.common_dir().join("worktrees")) {
+ Ok(iter) => iter,
+ Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(res),
+ Err(err) => return Err(err),
+ };
+ for entry in iter {
+ let entry = entry?;
+ let worktree_git_dir = entry.path();
+ if worktree_git_dir.join("gitdir").is_file() {
+ res.push(worktree::Proxy {
+ parent: self,
+ git_dir: worktree_git_dir,
+ })
+ }
+ }
+ res.sort_by(|a, b| a.git_dir.cmp(&b.git_dir));
+ Ok(res)
+ }
+}
+
+/// Interact with individual worktrees and their information.
+impl crate::Repository {
+ /// Return the repository owning the main worktree, typically from a linked worktree.
+ ///
+ /// Note that it might be the one that is currently open if this repository doesn't point to a linked worktree.
+ /// Also note that the main repo might be bare.
+ #[allow(clippy::result_large_err)]
+ pub fn main_repo(&self) -> Result<crate::Repository, crate::open::Error> {
+ crate::ThreadSafeRepository::open_opts(self.common_dir(), self.options.clone()).map(Into::into)
+ }
+
+ /// Return the currently set worktree if there is one, acting as platform providing a validated worktree base path.
+ ///
+ /// Note that there would be `None` if this repository is `bare` and the parent [`Repository`][crate::Repository] was instantiated without
+ /// registered worktree in the current working dir.
+ pub fn worktree(&self) -> Option<Worktree<'_>> {
+ self.work_dir().map(|path| Worktree { parent: self, path })
+ }
+
+ /// Return true if this repository is bare, and has no main work tree.
+ ///
+ /// This is not to be confused with the [`worktree()`][crate::Repository::worktree()] worktree, which may exists if this instance
+ /// was opened in a worktree that was created separately.
+ pub fn is_bare(&self) -> bool {
+ self.config.is_bare && self.work_dir().is_none()
+ }
+
+ /// Open a new copy of the index file and decode it entirely.
+ ///
+ /// It will use the `index.threads` configuration key to learn how many threads to use.
+ /// Note that it may fail if there is no index.
+ // TODO: test
+ pub fn open_index(&self) -> Result<gix_index::File, worktree::open_index::Error> {
+ let thread_limit = self
+ .config
+ .resolved
+ .boolean("index", None, "threads")
+ .map(|res| {
+ res.map(|value| usize::from(!value)).or_else(|err| {
+ gix_config::Integer::try_from(err.input.as_ref())
+ .map_err(|err| worktree::open_index::Error::ConfigIndexThreads {
+ value: err.input.clone(),
+ err,
+ })
+ .map(|value| value.to_decimal().and_then(|v| v.try_into().ok()).unwrap_or(1))
+ })
+ })
+ .transpose()?;
+ gix_index::File::at(
+ self.index_path(),
+ self.object_hash(),
+ gix_index::decode::Options {
+ thread_limit,
+ min_extension_block_in_bytes_for_threading: 0,
+ expected_checksum: None,
+ },
+ )
+ .map_err(Into::into)
+ }
+
+ /// Return a shared worktree index which is updated automatically if the in-memory snapshot has become stale as the underlying file
+ /// on disk has changed.
+ ///
+ /// The index file is shared across all clones of this repository.
+ pub fn index(&self) -> Result<worktree::Index, worktree::open_index::Error> {
+ self.index
+ .recent_snapshot(
+ || self.index_path().metadata().and_then(|m| m.modified()).ok(),
+ || {
+ self.open_index().map(Some).or_else(|err| match err {
+ worktree::open_index::Error::IndexFile(gix_index::file::init::Error::Io(err))
+ if err.kind() == std::io::ErrorKind::NotFound =>
+ {
+ Ok(None)
+ }
+ err => Err(err),
+ })
+ },
+ )
+ .and_then(|opt| match opt {
+ Some(index) => Ok(index),
+ None => Err(worktree::open_index::Error::IndexFile(
+ gix_index::file::init::Error::Io(std::io::Error::new(
+ std::io::ErrorKind::NotFound,
+ format!("Could not find index file at {:?} for opening.", self.index_path()),
+ )),
+ )),
+ })
+ }
+}
diff --git a/vendor/gix/src/revision/mod.rs b/vendor/gix/src/revision/mod.rs
new file mode 100644
index 000000000..4b11a8766
--- /dev/null
+++ b/vendor/gix/src/revision/mod.rs
@@ -0,0 +1,27 @@
+//! Revisions is the generalized notion of a commit.
+//!
+//! This module provides utilities to walk graphs of revisions and specify revisions and ranges of revisions.
+
+pub use gix_revision as plumbing;
+
+///
+pub mod walk;
+pub use walk::iter::Walk;
+
+///
+pub mod spec;
+
+/// The specification of a revision as parsed from a revision specification like `HEAD@{1}` or `v1.2.3...main`.
+/// It's typically created by [`repo.rev_parse()`][crate::Repository::rev_parse()].
+///
+/// See the [official git documentation](https://git-scm.com/docs/git-rev-parse#_specifying_revisions) for reference on how
+/// to specify revisions and revision ranges.
+#[derive(Clone, Debug)]
+pub struct Spec<'repo> {
+ pub(crate) inner: gix_revision::Spec,
+ /// The first name of a reference as seen while parsing a `RevSpec`, for completeness.
+ pub(crate) first_ref: Option<gix_ref::Reference>,
+ /// The second name of a reference as seen while parsing a `RevSpec`, for completeness.
+ pub(crate) second_ref: Option<gix_ref::Reference>,
+ pub(crate) repo: &'repo crate::Repository,
+}
diff --git a/vendor/gix/src/revision/spec/mod.rs b/vendor/gix/src/revision/spec/mod.rs
new file mode 100644
index 000000000..a6a6eb739
--- /dev/null
+++ b/vendor/gix/src/revision/spec/mod.rs
@@ -0,0 +1,90 @@
+use crate::{ext::ReferenceExt, revision::Spec, Id, Reference};
+
+///
+pub mod parse;
+
+mod impls {
+ use std::ops::{Deref, DerefMut};
+
+ use crate::revision::Spec;
+
+ impl<'repo> Deref for Spec<'repo> {
+ type Target = gix_revision::Spec;
+
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+ }
+
+ impl<'repo> DerefMut for Spec<'repo> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+ }
+
+ impl<'repo> PartialEq for Spec<'repo> {
+ fn eq(&self, other: &Self) -> bool {
+ self.inner == other.inner
+ }
+ }
+
+ impl<'repo> Eq for Spec<'repo> {}
+}
+
+/// Initialization
+impl<'repo> Spec<'repo> {
+ /// Create a single specification which points to `id`.
+ pub fn from_id(id: Id<'repo>) -> Self {
+ Spec {
+ inner: gix_revision::Spec::Include(id.inner),
+ repo: id.repo,
+ first_ref: None,
+ second_ref: None,
+ }
+ }
+}
+
+/// Access
+impl<'repo> Spec<'repo> {
+ /// Detach the `Repository` from this instance, leaving only plain data that can be moved freely and serialized.
+ pub fn detach(self) -> gix_revision::Spec {
+ self.inner
+ }
+
+ /// Some revision specifications leave information about references which are returned as `(from-ref, to-ref)` here, e.g.
+ /// `HEAD@{-1}..main` might be `(Some(refs/heads/previous-branch), Some(refs/heads/main))`,
+ /// or `@` returns `(Some(refs/heads/main), None)`.
+ pub fn into_references(self) -> (Option<Reference<'repo>>, Option<Reference<'repo>>) {
+ let repo = self.repo;
+ (
+ self.first_ref.map(|r| r.attach(repo)),
+ self.second_ref.map(|r| r.attach(repo)),
+ )
+ }
+
+ /// Return the name of the first reference we encountered while resolving the rev-spec, or `None` if a short hash
+ /// was used. For example, `@` might yield `Some(HEAD)`, but `abcd` yields `None`.
+ pub fn first_reference(&self) -> Option<&gix_ref::Reference> {
+ self.first_ref.as_ref()
+ }
+
+ /// Return the name of the second reference we encountered while resolving the rev-spec, or `None` if a short hash
+ /// was used or there was no second reference. For example, `..@` might yield `Some(HEAD)`, but `..abcd` or `@`
+ /// yields `None`.
+ pub fn second_reference(&self) -> Option<&gix_ref::Reference> {
+ self.second_ref.as_ref()
+ }
+
+ /// Return the single included object represented by this instance, or `None` if it is a range of any kind.
+ pub fn single(&self) -> Option<Id<'repo>> {
+ match self.inner {
+ gix_revision::Spec::Include(id) | gix_revision::Spec::ExcludeParents(id) => {
+ Id::from_id(id, self.repo).into()
+ }
+ gix_revision::Spec::Exclude(_)
+ | gix_revision::Spec::Range { .. }
+ | gix_revision::Spec::Merge { .. }
+ | gix_revision::Spec::IncludeOnlyParents { .. } => None,
+ }
+ }
+}
diff --git a/vendor/gix/src/revision/spec/parse/delegate/mod.rs b/vendor/gix/src/revision/spec/parse/delegate/mod.rs
new file mode 100644
index 000000000..78e4ab9ee
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/delegate/mod.rs
@@ -0,0 +1,256 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+use gix_revision::spec::{
+ parse,
+ parse::delegate::{self},
+};
+use smallvec::SmallVec;
+
+use super::{Delegate, Error, ObjectKindHint};
+use crate::{
+ ext::{ObjectIdExt, ReferenceExt},
+ Repository,
+};
+
+type Replacements = SmallVec<[(ObjectId, ObjectId); 1]>;
+
+impl<'repo> Delegate<'repo> {
+ pub fn new(repo: &'repo Repository, opts: crate::revision::spec::parse::Options) -> Self {
+ Delegate {
+ refs: Default::default(),
+ objs: Default::default(),
+ ambiguous_objects: Default::default(),
+ idx: 0,
+ kind: None,
+ err: Vec::new(),
+ prefix: Default::default(),
+ last_call_was_disambiguate_prefix: Default::default(),
+ opts,
+ repo,
+ }
+ }
+
+ pub fn into_err(mut self) -> Error {
+ let repo = self.repo;
+ for err in self
+ .ambiguous_objects
+ .iter_mut()
+ .zip(self.prefix)
+ .filter_map(|(a, b)| a.take().filter(|candidates| candidates.len() > 1).zip(b))
+ .map(|(candidates, prefix)| Error::ambiguous(candidates, prefix, repo))
+ .rev()
+ {
+ self.err.insert(0, err);
+ }
+ Error::from_errors(self.err)
+ }
+
+ pub fn into_rev_spec(mut self) -> Result<crate::revision::Spec<'repo>, Error> {
+ fn zero_or_one_objects_or_ambiguity_err(
+ mut candidates: [Option<HashSet<ObjectId>>; 2],
+ prefix: [Option<gix_hash::Prefix>; 2],
+ mut errors: Vec<Error>,
+ repo: &Repository,
+ ) -> Result<[Option<ObjectId>; 2], Error> {
+ let mut out = [None, None];
+ for ((candidates, prefix), out) in candidates.iter_mut().zip(prefix).zip(out.iter_mut()) {
+ let candidates = candidates.take();
+ match candidates {
+ None => *out = None,
+ Some(candidates) => {
+ match candidates.len() {
+ 0 => unreachable!(
+ "BUG: let's avoid still being around if no candidate matched the requirements"
+ ),
+ 1 => {
+ *out = candidates.into_iter().next();
+ }
+ _ => {
+ errors.insert(
+ 0,
+ Error::ambiguous(candidates, prefix.expect("set when obtaining candidates"), repo),
+ );
+ return Err(Error::from_errors(errors));
+ }
+ };
+ }
+ };
+ }
+ Ok(out)
+ }
+
+ fn kind_to_spec(
+ kind: Option<gix_revision::spec::Kind>,
+ [first, second]: [Option<ObjectId>; 2],
+ ) -> Result<gix_revision::Spec, Error> {
+ use gix_revision::spec::Kind::*;
+ Ok(match kind.unwrap_or_default() {
+ IncludeReachable => gix_revision::Spec::Include(first.ok_or(Error::Malformed)?),
+ ExcludeReachable => gix_revision::Spec::Exclude(first.ok_or(Error::Malformed)?),
+ RangeBetween => gix_revision::Spec::Range {
+ from: first.ok_or(Error::Malformed)?,
+ to: second.ok_or(Error::Malformed)?,
+ },
+ ReachableToMergeBase => gix_revision::Spec::Merge {
+ theirs: first.ok_or(Error::Malformed)?,
+ ours: second.ok_or(Error::Malformed)?,
+ },
+ IncludeReachableFromParents => gix_revision::Spec::IncludeOnlyParents(first.ok_or(Error::Malformed)?),
+ ExcludeReachableFromParents => gix_revision::Spec::ExcludeParents(first.ok_or(Error::Malformed)?),
+ })
+ }
+
+ let range = zero_or_one_objects_or_ambiguity_err(self.objs, self.prefix, self.err, self.repo)?;
+ Ok(crate::revision::Spec {
+ first_ref: self.refs[0].take(),
+ second_ref: self.refs[1].take(),
+ inner: kind_to_spec(self.kind, range)?,
+ repo: self.repo,
+ })
+ }
+}
+
+impl<'repo> parse::Delegate for Delegate<'repo> {
+ fn done(&mut self) {
+ self.follow_refs_to_objects_if_needed();
+ self.disambiguate_objects_by_fallback_hint(
+ self.kind_implies_committish()
+ .then_some(ObjectKindHint::Committish)
+ .or(self.opts.object_kind_hint),
+ );
+ }
+}
+
+impl<'repo> delegate::Kind for Delegate<'repo> {
+ fn kind(&mut self, kind: gix_revision::spec::Kind) -> Option<()> {
+ use gix_revision::spec::Kind::*;
+ self.kind = Some(kind);
+
+ if self.kind_implies_committish() {
+ self.disambiguate_objects_by_fallback_hint(ObjectKindHint::Committish.into());
+ }
+ if matches!(kind, RangeBetween | ReachableToMergeBase) {
+ self.idx += 1;
+ }
+
+ Some(())
+ }
+}
+
+impl<'repo> Delegate<'repo> {
+ fn kind_implies_committish(&self) -> bool {
+ self.kind.unwrap_or(gix_revision::spec::Kind::IncludeReachable) != gix_revision::spec::Kind::IncludeReachable
+ }
+ fn disambiguate_objects_by_fallback_hint(&mut self, hint: Option<ObjectKindHint>) {
+ fn require_object_kind(repo: &Repository, obj: &gix_hash::oid, kind: gix_object::Kind) -> Result<(), Error> {
+ let obj = repo.find_object(obj)?;
+ if obj.kind == kind {
+ Ok(())
+ } else {
+ Err(Error::ObjectKind {
+ actual: obj.kind,
+ expected: kind,
+ oid: obj.id.attach(repo).shorten_or_id(),
+ })
+ }
+ }
+
+ if self.last_call_was_disambiguate_prefix[self.idx] {
+ self.unset_disambiguate_call();
+
+ if let Some(objs) = self.objs[self.idx].as_mut() {
+ let repo = self.repo;
+ let errors: Vec<_> = match hint {
+ Some(kind_hint) => match kind_hint {
+ ObjectKindHint::Treeish | ObjectKindHint::Committish => {
+ let kind = match kind_hint {
+ ObjectKindHint::Treeish => gix_object::Kind::Tree,
+ ObjectKindHint::Committish => gix_object::Kind::Commit,
+ _ => unreachable!("BUG: we narrow possibilities above"),
+ };
+ objs.iter()
+ .filter_map(|obj| peel(repo, obj, kind).err().map(|err| (*obj, err)))
+ .collect()
+ }
+ ObjectKindHint::Tree | ObjectKindHint::Commit | ObjectKindHint::Blob => {
+ let kind = match kind_hint {
+ ObjectKindHint::Tree => gix_object::Kind::Tree,
+ ObjectKindHint::Commit => gix_object::Kind::Commit,
+ ObjectKindHint::Blob => gix_object::Kind::Blob,
+ _ => unreachable!("BUG: we narrow possibilities above"),
+ };
+ objs.iter()
+ .filter_map(|obj| require_object_kind(repo, obj, kind).err().map(|err| (*obj, err)))
+ .collect()
+ }
+ },
+ None => return,
+ };
+
+ if errors.len() == objs.len() {
+ self.err.extend(errors.into_iter().map(|(_, err)| err));
+ } else {
+ for (obj, err) in errors {
+ objs.remove(&obj);
+ self.err.push(err);
+ }
+ }
+ }
+ }
+ }
+ fn follow_refs_to_objects_if_needed(&mut self) -> Option<()> {
+ assert_eq!(self.refs.len(), self.objs.len());
+ let repo = self.repo;
+ for (r, obj) in self.refs.iter().zip(self.objs.iter_mut()) {
+ if let (_ref_opt @ Some(ref_), obj_opt @ None) = (r, obj) {
+ if let Some(id) = ref_.target.try_id().map(ToOwned::to_owned).or_else(|| {
+ ref_.clone()
+ .attach(repo)
+ .peel_to_id_in_place()
+ .ok()
+ .map(|id| id.detach())
+ }) {
+ obj_opt.get_or_insert_with(HashSet::default).insert(id);
+ };
+ };
+ }
+ Some(())
+ }
+
+ fn unset_disambiguate_call(&mut self) {
+ self.last_call_was_disambiguate_prefix[self.idx] = false;
+ }
+}
+
+fn peel(repo: &Repository, obj: &gix_hash::oid, kind: gix_object::Kind) -> Result<ObjectId, Error> {
+ let mut obj = repo.find_object(obj)?;
+ obj = obj.peel_to_kind(kind)?;
+ debug_assert_eq!(obj.kind, kind, "bug in Object::peel_to_kind() which didn't deliver");
+ Ok(obj.id)
+}
+
+fn handle_errors_and_replacements(
+ destination: &mut Vec<Error>,
+ objs: &mut HashSet<ObjectId>,
+ errors: Vec<(ObjectId, Error)>,
+ replacements: &mut Replacements,
+) -> Option<()> {
+ if errors.len() == objs.len() {
+ destination.extend(errors.into_iter().map(|(_, err)| err));
+ None
+ } else {
+ for (obj, err) in errors {
+ objs.remove(&obj);
+ destination.push(err);
+ }
+ for (find, replace) in replacements {
+ objs.remove(find);
+ objs.insert(*replace);
+ }
+ Some(())
+ }
+}
+
+mod navigate;
+mod revision;
diff --git a/vendor/gix/src/revision/spec/parse/delegate/navigate.rs b/vendor/gix/src/revision/spec/parse/delegate/navigate.rs
new file mode 100644
index 000000000..882c2835c
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/delegate/navigate.rs
@@ -0,0 +1,340 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+use gix_revision::spec::parse::{
+ delegate,
+ delegate::{PeelTo, Traversal},
+};
+use gix_traverse::commit::Sorting;
+
+use crate::{
+ bstr::{BStr, ByteSlice},
+ ext::ObjectIdExt,
+ object,
+ revision::spec::parse::{
+ delegate::{handle_errors_and_replacements, peel, Replacements},
+ Delegate, Error,
+ },
+};
+
+impl<'repo> delegate::Navigate for Delegate<'repo> {
+ fn traverse(&mut self, kind: Traversal) -> Option<()> {
+ self.unset_disambiguate_call();
+ self.follow_refs_to_objects_if_needed()?;
+
+ let mut replacements = Replacements::default();
+ let mut errors = Vec::new();
+ let objs = self.objs[self.idx].as_mut()?;
+ let repo = self.repo;
+
+ for obj in objs.iter() {
+ match kind {
+ Traversal::NthParent(num) => {
+ match self.repo.find_object(*obj).map_err(Error::from).and_then(|obj| {
+ obj.try_into_commit().map_err(|err| {
+ let object::try_into::Error { actual, expected, id } = err;
+ Error::ObjectKind {
+ oid: id.attach(repo).shorten_or_id(),
+ actual,
+ expected,
+ }
+ })
+ }) {
+ Ok(commit) => match commit.parent_ids().nth(num.saturating_sub(1)) {
+ Some(id) => replacements.push((commit.id, id.detach())),
+ None => errors.push((
+ commit.id,
+ Error::ParentOutOfRange {
+ oid: commit.id().shorten_or_id(),
+ desired: num,
+ available: commit.parent_ids().count(),
+ },
+ )),
+ },
+ Err(err) => errors.push((*obj, err)),
+ }
+ }
+ Traversal::NthAncestor(num) => {
+ let id = obj.attach(repo);
+ match id
+ .ancestors()
+ .first_parent_only()
+ .all()
+ .expect("cannot fail without sorting")
+ .skip(num)
+ .filter_map(Result::ok)
+ .next()
+ {
+ Some(id) => replacements.push((*obj, id.detach())),
+ None => errors.push((
+ *obj,
+ Error::AncestorOutOfRange {
+ oid: id.shorten_or_id(),
+ desired: num,
+ available: id
+ .ancestors()
+ .first_parent_only()
+ .all()
+ .expect("cannot fail without sorting")
+ .skip(1)
+ .count(),
+ },
+ )),
+ }
+ }
+ }
+ }
+
+ handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements)
+ }
+
+ fn peel_until(&mut self, kind: PeelTo<'_>) -> Option<()> {
+ self.unset_disambiguate_call();
+ self.follow_refs_to_objects_if_needed()?;
+
+ let mut replacements = Replacements::default();
+ let mut errors = Vec::new();
+ let objs = self.objs[self.idx].as_mut()?;
+ let repo = self.repo;
+
+ match kind {
+ PeelTo::ValidObject => {
+ for obj in objs.iter() {
+ match repo.find_object(*obj) {
+ Ok(_) => {}
+ Err(err) => {
+ errors.push((*obj, err.into()));
+ }
+ };
+ }
+ }
+ PeelTo::ObjectKind(kind) => {
+ let peel = |obj| peel(repo, obj, kind);
+ for obj in objs.iter() {
+ match peel(obj) {
+ Ok(replace) => replacements.push((*obj, replace)),
+ Err(err) => errors.push((*obj, err)),
+ }
+ }
+ }
+ PeelTo::Path(path) => {
+ let lookup_path = |obj: &ObjectId| {
+ let tree_id = peel(repo, obj, gix_object::Kind::Tree)?;
+ if path.is_empty() {
+ return Ok(tree_id);
+ }
+ let tree = repo.find_object(tree_id)?.into_tree();
+ let entry =
+ tree.lookup_entry_by_path(gix_path::from_bstr(path))?
+ .ok_or_else(|| Error::PathNotFound {
+ path: path.into(),
+ object: obj.attach(repo).shorten_or_id(),
+ tree: tree_id.attach(repo).shorten_or_id(),
+ })?;
+ Ok(entry.object_id())
+ };
+ for obj in objs.iter() {
+ match lookup_path(obj) {
+ Ok(replace) => replacements.push((*obj, replace)),
+ Err(err) => errors.push((*obj, err)),
+ }
+ }
+ }
+ PeelTo::RecursiveTagObject => {
+ for oid in objs.iter() {
+ match oid.attach(repo).object().and_then(|obj| obj.peel_tags_to_end()) {
+ Ok(obj) => replacements.push((*oid, obj.id)),
+ Err(err) => errors.push((*oid, err.into())),
+ }
+ }
+ }
+ }
+
+ handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements)
+ }
+
+ fn find(&mut self, regex: &BStr, negated: bool) -> Option<()> {
+ self.unset_disambiguate_call();
+ self.follow_refs_to_objects_if_needed()?;
+
+ #[cfg(not(feature = "regex"))]
+ let matches = |message: &BStr| -> bool { message.contains_str(regex) ^ negated };
+ #[cfg(feature = "regex")]
+ let matches = match regex::bytes::Regex::new(regex.to_str_lossy().as_ref()) {
+ Ok(compiled) => {
+ let needs_regex = regex::escape(compiled.as_str()) != regex;
+ move |message: &BStr| -> bool {
+ if needs_regex {
+ compiled.is_match(message) ^ negated
+ } else {
+ message.contains_str(regex) ^ negated
+ }
+ }
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ return None;
+ }
+ };
+
+ match self.objs[self.idx].as_mut() {
+ Some(objs) => {
+ let repo = self.repo;
+ let mut errors = Vec::new();
+ let mut replacements = Replacements::default();
+ for oid in objs.iter() {
+ match oid
+ .attach(repo)
+ .ancestors()
+ .sorting(Sorting::ByCommitTimeNewestFirst)
+ .all()
+ {
+ Ok(iter) => {
+ let mut matched = false;
+ let mut count = 0;
+ let commits = iter.map(|res| {
+ res.map_err(Error::from).and_then(|commit_id| {
+ commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
+ })
+ });
+ for commit in commits {
+ count += 1;
+ match commit {
+ Ok(commit) => {
+ if matches(commit.message_raw_sloppy()) {
+ replacements.push((*oid, commit.id));
+ matched = true;
+ break;
+ }
+ }
+ Err(err) => errors.push((*oid, err)),
+ }
+ }
+ if !matched {
+ errors.push((
+ *oid,
+ Error::NoRegexMatch {
+ regex: regex.into(),
+ commits_searched: count,
+ oid: oid.attach(repo).shorten_or_id(),
+ },
+ ))
+ }
+ }
+ Err(err) => errors.push((*oid, err.into())),
+ }
+ }
+ handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements)
+ }
+ None => match self.repo.references() {
+ Ok(references) => match references.all() {
+ Ok(references) => {
+ match self
+ .repo
+ .rev_walk(
+ references
+ .peeled()
+ .filter_map(Result::ok)
+ .filter(|r| {
+ r.id()
+ .object()
+ .ok()
+ .map(|obj| obj.kind == gix_object::Kind::Commit)
+ .unwrap_or(false)
+ })
+ .filter_map(|r| r.detach().peeled),
+ )
+ .sorting(Sorting::ByCommitTimeNewestFirst)
+ .all()
+ {
+ Ok(iter) => {
+ let mut matched = false;
+ let mut count = 0;
+ let commits = iter.map(|res| {
+ res.map_err(Error::from).and_then(|commit_id| {
+ commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
+ })
+ });
+ for commit in commits {
+ count += 1;
+ match commit {
+ Ok(commit) => {
+ if matches(commit.message_raw_sloppy()) {
+ self.objs[self.idx]
+ .get_or_insert_with(HashSet::default)
+ .insert(commit.id);
+ matched = true;
+ break;
+ }
+ }
+ Err(err) => self.err.push(err),
+ }
+ }
+ if matched {
+ Some(())
+ } else {
+ self.err.push(Error::NoRegexMatchAllRefs {
+ regex: regex.into(),
+ commits_searched: count,
+ });
+ None
+ }
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ }
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ },
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ },
+ }
+ }
+
+ fn index_lookup(&mut self, path: &BStr, stage: u8) -> Option<()> {
+ self.unset_disambiguate_call();
+ match self.repo.index() {
+ Ok(index) => match index.entry_by_path_and_stage(path, stage.into()) {
+ Some(entry) => {
+ self.objs[self.idx]
+ .get_or_insert_with(HashSet::default)
+ .insert(entry.id);
+ Some(())
+ }
+ None => {
+ let stage_hint = [0, 1, 2]
+ .iter()
+ .filter(|our_stage| **our_stage != stage)
+ .find_map(|stage| {
+ index
+ .entry_index_by_path_and_stage(path, (*stage).into())
+ .map(|_| (*stage).into())
+ });
+ let exists = self
+ .repo
+ .work_dir()
+ .map_or(false, |root| root.join(gix_path::from_bstr(path)).exists());
+ self.err.push(Error::IndexLookup {
+ desired_path: path.into(),
+ desired_stage: stage.into(),
+ exists,
+ stage_hint,
+ });
+ None
+ }
+ },
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/revision/spec/parse/delegate/revision.rs b/vendor/gix/src/revision/spec/parse/delegate/revision.rs
new file mode 100644
index 000000000..7ea691a28
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/delegate/revision.rs
@@ -0,0 +1,225 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+use gix_revision::spec::parse::{
+ delegate,
+ delegate::{ReflogLookup, SiblingBranch},
+};
+
+use crate::{
+ bstr::{BStr, BString, ByteSlice},
+ ext::ReferenceExt,
+ revision::spec::parse::{Delegate, Error, RefsHint},
+};
+
+impl<'repo> delegate::Revision for Delegate<'repo> {
+ fn find_ref(&mut self, name: &BStr) -> Option<()> {
+ self.unset_disambiguate_call();
+ if !self.err.is_empty() && self.refs[self.idx].is_some() {
+ return None;
+ }
+ match self.repo.refs.find(name) {
+ Ok(r) => {
+ assert!(self.refs[self.idx].is_none(), "BUG: cannot set the same ref twice");
+ self.refs[self.idx] = Some(r);
+ Some(())
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ }
+ }
+
+ fn disambiguate_prefix(
+ &mut self,
+ prefix: gix_hash::Prefix,
+ _must_be_commit: Option<delegate::PrefixHint<'_>>,
+ ) -> Option<()> {
+ self.last_call_was_disambiguate_prefix[self.idx] = true;
+ let mut candidates = Some(HashSet::default());
+ self.prefix[self.idx] = Some(prefix);
+
+ let empty_tree_id = gix_hash::ObjectId::empty_tree(prefix.as_oid().kind());
+ let res = if prefix.as_oid() == empty_tree_id {
+ candidates.as_mut().expect("set").insert(empty_tree_id);
+ Ok(Some(Err(())))
+ } else {
+ self.repo.objects.lookup_prefix(prefix, candidates.as_mut())
+ };
+
+ match res {
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ Ok(None) => {
+ self.err.push(Error::PrefixNotFound { prefix });
+ None
+ }
+ Ok(Some(Ok(_) | Err(()))) => {
+ assert!(self.objs[self.idx].is_none(), "BUG: cannot set the same prefix twice");
+ let candidates = candidates.expect("set above");
+ match self.opts.refs_hint {
+ RefsHint::PreferObjectOnFullLengthHexShaUseRefOtherwise
+ if prefix.hex_len() == candidates.iter().next().expect("at least one").kind().len_in_hex() =>
+ {
+ self.ambiguous_objects[self.idx] = Some(candidates.clone());
+ self.objs[self.idx] = Some(candidates);
+ Some(())
+ }
+ RefsHint::PreferObject => {
+ self.ambiguous_objects[self.idx] = Some(candidates.clone());
+ self.objs[self.idx] = Some(candidates);
+ Some(())
+ }
+ RefsHint::PreferRef | RefsHint::PreferObjectOnFullLengthHexShaUseRefOtherwise | RefsHint::Fail => {
+ match self.repo.refs.find(&prefix.to_string()) {
+ Ok(ref_) => {
+ assert!(self.refs[self.idx].is_none(), "BUG: cannot set the same ref twice");
+ if self.opts.refs_hint == RefsHint::Fail {
+ self.refs[self.idx] = Some(ref_.clone());
+ self.err.push(Error::AmbiguousRefAndObject {
+ prefix,
+ reference: ref_,
+ });
+ self.err.push(Error::ambiguous(candidates, prefix, self.repo));
+ None
+ } else {
+ self.refs[self.idx] = Some(ref_);
+ Some(())
+ }
+ }
+ Err(_) => {
+ self.ambiguous_objects[self.idx] = Some(candidates.clone());
+ self.objs[self.idx] = Some(candidates);
+ Some(())
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ fn reflog(&mut self, query: ReflogLookup) -> Option<()> {
+ self.unset_disambiguate_call();
+ match query {
+ ReflogLookup::Date(_date) => {
+ self.err.push(Error::Planned {
+ dependency: "remote handling and ref-specs are fleshed out more",
+ });
+ None
+ }
+ ReflogLookup::Entry(no) => {
+ let r = match &mut self.refs[self.idx] {
+ Some(r) => r.clone().attach(self.repo),
+ val @ None => match self.repo.head().map(|head| head.try_into_referent()) {
+ Ok(Some(r)) => {
+ *val = Some(r.clone().detach());
+ r
+ }
+ Ok(None) => {
+ self.err.push(Error::UnbornHeadsHaveNoRefLog);
+ return None;
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ return None;
+ }
+ },
+ };
+ let mut platform = r.log_iter();
+ match platform.rev().ok().flatten() {
+ Some(mut it) => match it.nth(no).and_then(Result::ok) {
+ Some(line) => {
+ self.objs[self.idx]
+ .get_or_insert_with(HashSet::default)
+ .insert(line.new_oid);
+ Some(())
+ }
+ None => {
+ let available = platform.rev().ok().flatten().map_or(0, |it| it.count());
+ self.err.push(Error::RefLogEntryOutOfRange {
+ reference: r.detach(),
+ desired: no,
+ available,
+ });
+ None
+ }
+ },
+ None => {
+ self.err.push(Error::MissingRefLog {
+ reference: r.name().as_bstr().into(),
+ action: "lookup entry",
+ });
+ None
+ }
+ }
+ }
+ }
+ }
+
+ fn nth_checked_out_branch(&mut self, branch_no: usize) -> Option<()> {
+ self.unset_disambiguate_call();
+ fn prior_checkouts_iter<'a>(
+ platform: &'a mut gix_ref::file::log::iter::Platform<'static, '_>,
+ ) -> Result<impl Iterator<Item = (BString, ObjectId)> + 'a, Error> {
+ match platform.rev().ok().flatten() {
+ Some(log) => Ok(log.filter_map(Result::ok).filter_map(|line| {
+ line.message
+ .strip_prefix(b"checkout: moving from ")
+ .and_then(|from_to| from_to.find(" to ").map(|pos| &from_to[..pos]))
+ .map(|from_branch| (from_branch.into(), line.previous_oid))
+ })),
+ None => Err(Error::MissingRefLog {
+ reference: "HEAD".into(),
+ action: "search prior checked out branch",
+ }),
+ }
+ }
+
+ let head = match self.repo.head() {
+ Ok(head) => head,
+ Err(err) => {
+ self.err.push(err.into());
+ return None;
+ }
+ };
+ match prior_checkouts_iter(&mut head.log_iter()).map(|mut it| it.nth(branch_no.saturating_sub(1))) {
+ Ok(Some((ref_name, id))) => {
+ let id = match self.repo.find_reference(ref_name.as_bstr()) {
+ Ok(mut r) => {
+ let id = r.peel_to_id_in_place().map(|id| id.detach()).unwrap_or(id);
+ self.refs[self.idx] = Some(r.detach());
+ id
+ }
+ Err(_) => id,
+ };
+ self.objs[self.idx].get_or_insert_with(HashSet::default).insert(id);
+ Some(())
+ }
+ Ok(None) => {
+ self.err.push(Error::PriorCheckoutOutOfRange {
+ desired: branch_no,
+ available: prior_checkouts_iter(&mut head.log_iter())
+ .map(|it| it.count())
+ .unwrap_or(0),
+ });
+ None
+ }
+ Err(err) => {
+ self.err.push(err);
+ None
+ }
+ }
+ }
+
+ fn sibling_branch(&mut self, _kind: SiblingBranch) -> Option<()> {
+ self.unset_disambiguate_call();
+ self.err.push(Error::Planned {
+ dependency: "remote handling and ref-specs are fleshed out more",
+ });
+ None
+ }
+}
diff --git a/vendor/gix/src/revision/spec/parse/error.rs b/vendor/gix/src/revision/spec/parse/error.rs
new file mode 100644
index 000000000..3af4697b0
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/error.rs
@@ -0,0 +1,130 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+
+use super::Error;
+use crate::{bstr, bstr::BString, ext::ObjectIdExt, Repository};
+
+/// Additional information about candidates that caused ambiguity.
+#[derive(Debug)]
+pub enum CandidateInfo {
+ /// An error occurred when looking up the object in the database.
+ FindError {
+ /// The reported error.
+ source: crate::object::find::existing::Error,
+ },
+ /// The candidate is an object of the given `kind`.
+ Object {
+ /// The kind of the object.
+ kind: gix_object::Kind,
+ },
+ /// The candidate is a tag.
+ Tag {
+ /// The name of the tag.
+ name: BString,
+ },
+ /// The candidate is a commit.
+ Commit {
+ /// The date of the commit.
+ date: gix_date::Time,
+ /// The subject line.
+ title: BString,
+ },
+}
+
+impl std::fmt::Display for CandidateInfo {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ CandidateInfo::FindError { source } => write!(f, "lookup error: {source}"),
+ CandidateInfo::Tag { name } => write!(f, "tag {name:?}"),
+ CandidateInfo::Object { kind } => std::fmt::Display::fmt(kind, f),
+ CandidateInfo::Commit { date, title } => {
+ write!(f, "commit {} {title:?}", date.format(gix_date::time::format::SHORT))
+ }
+ }
+ }
+}
+
+impl Error {
+ pub(crate) fn ambiguous(candidates: HashSet<ObjectId>, prefix: gix_hash::Prefix, repo: &Repository) -> Self {
+ #[derive(PartialOrd, Ord, Eq, PartialEq, Copy, Clone)]
+ enum Order {
+ Tag,
+ Commit,
+ Tree,
+ Blob,
+ Invalid,
+ }
+ let candidates = {
+ let mut c: Vec<_> = candidates
+ .into_iter()
+ .map(|oid| {
+ let obj = repo.find_object(oid);
+ let order = match &obj {
+ Err(_) => Order::Invalid,
+ Ok(obj) => match obj.kind {
+ gix_object::Kind::Tag => Order::Tag,
+ gix_object::Kind::Commit => Order::Commit,
+ gix_object::Kind::Tree => Order::Tree,
+ gix_object::Kind::Blob => Order::Blob,
+ },
+ };
+ (oid, obj, order)
+ })
+ .collect();
+ c.sort_by(|lhs, rhs| lhs.2.cmp(&rhs.2).then_with(|| lhs.0.cmp(&rhs.0)));
+ c
+ };
+ Error::AmbiguousPrefix {
+ prefix,
+ info: candidates
+ .into_iter()
+ .map(|(oid, find_result, _)| {
+ let info = match find_result {
+ Ok(obj) => match obj.kind {
+ gix_object::Kind::Tree | gix_object::Kind::Blob => CandidateInfo::Object { kind: obj.kind },
+ gix_object::Kind::Tag => {
+ let tag = obj.to_tag_ref();
+ CandidateInfo::Tag { name: tag.name.into() }
+ }
+ gix_object::Kind::Commit => {
+ use bstr::ByteSlice;
+ let commit = obj.to_commit_ref();
+ CandidateInfo::Commit {
+ date: commit.committer().time,
+ title: commit.message().title.trim().into(),
+ }
+ }
+ },
+ Err(err) => CandidateInfo::FindError { source: err },
+ };
+ (oid.attach(repo).shorten().unwrap_or_else(|_| oid.into()), info)
+ })
+ .collect(),
+ }
+ }
+
+ pub(crate) fn from_errors(errors: Vec<Self>) -> Self {
+ assert!(!errors.is_empty());
+ match errors.len() {
+ 0 => unreachable!(
+ "BUG: cannot create something from nothing, must have recorded some errors to call from_errors()"
+ ),
+ 1 => errors.into_iter().next().expect("one"),
+ _ => {
+ let mut it = errors.into_iter().rev();
+ let mut recent = Error::Multi {
+ current: Box::new(it.next().expect("at least one error")),
+ next: None,
+ };
+ for err in it {
+ recent = Error::Multi {
+ current: Box::new(err),
+ next: Some(Box::new(recent)),
+ }
+ }
+ recent
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/revision/spec/parse/mod.rs b/vendor/gix/src/revision/spec/parse/mod.rs
new file mode 100644
index 000000000..f69ecc4af
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/mod.rs
@@ -0,0 +1,61 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+use gix_revision::spec::parse;
+
+use crate::{bstr::BStr, revision::Spec, Repository};
+
+mod types;
+pub use types::{Error, ObjectKindHint, Options, RefsHint};
+
+///
+pub mod single {
+ use crate::bstr::BString;
+
+ /// The error returned by [`crate::Repository::rev_parse_single()`].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Parse(#[from] super::Error),
+ #[error("revspec {spec:?} did not resolve to a single object")]
+ RangedRev { spec: BString },
+ }
+}
+
+///
+pub mod error;
+
+impl<'repo> Spec<'repo> {
+ /// Parse `spec` and use information from `repo` to resolve it, using `opts` to learn how to deal with ambiguity.
+ ///
+ /// Note that it's easier and to use [`repo.rev_parse()`][Repository::rev_parse()] instead.
+ pub fn from_bstr<'a>(spec: impl Into<&'a BStr>, repo: &'repo Repository, opts: Options) -> Result<Self, Error> {
+ let mut delegate = Delegate::new(repo, opts);
+ match gix_revision::spec::parse(spec.into(), &mut delegate) {
+ Err(parse::Error::Delegate) => Err(delegate.into_err()),
+ Err(err) => Err(err.into()),
+ Ok(()) => delegate.into_rev_spec(),
+ }
+ }
+}
+
+struct Delegate<'repo> {
+ refs: [Option<gix_ref::Reference>; 2],
+ objs: [Option<HashSet<ObjectId>>; 2],
+ /// The originally encountered ambiguous objects for potential later use in errors.
+ ambiguous_objects: [Option<HashSet<ObjectId>>; 2],
+ idx: usize,
+ kind: Option<gix_revision::spec::Kind>,
+
+ opts: Options,
+ err: Vec<Error>,
+ /// The ambiguous prefix obtained during a call to `disambiguate_prefix()`.
+ prefix: [Option<gix_hash::Prefix>; 2],
+ /// If true, we didn't try to do any other transformation which might have helped with disambiguation.
+ last_call_was_disambiguate_prefix: [bool; 2],
+
+ repo: &'repo Repository,
+}
+
+mod delegate;
diff --git a/vendor/gix/src/revision/spec/parse/types.rs b/vendor/gix/src/revision/spec/parse/types.rs
new file mode 100644
index 000000000..4e523ab14
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/types.rs
@@ -0,0 +1,182 @@
+use crate::{bstr::BString, object, reference};
+
+/// A hint to know what to do if refs and object names are equal.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum RefsHint {
+ /// This is the default, and leads to specs that look like objects identified by full hex sha and are objects to be used
+ /// instead of similarly named references. The latter is not typical but can absolutely happen by accident.
+ /// If the object prefix is shorter than the maximum hash length of the repository, use the reference instead, which is
+ /// preferred as there are many valid object names like `beef` and `cafe` that are short and both valid and typical prefixes
+ /// for objects.
+ /// Git chooses this as default as well, even though it means that every object prefix is also looked up as ref.
+ PreferObjectOnFullLengthHexShaUseRefOtherwise,
+ /// No matter what, if it looks like an object prefix and has an object, use it.
+ /// Note that no ref-lookup is made here which is the fastest option.
+ PreferObject,
+ /// When an object is found for a given prefix, also check if a reference exists with that name and if it does,
+ /// use that moving forward.
+ PreferRef,
+ /// If there is an ambiguous situation, instead of silently choosing one over the other, fail instead.
+ Fail,
+}
+
+/// A hint to know which object kind to prefer if multiple objects match a prefix.
+///
+/// This disambiguation mechanism is applied only if there is no disambiguation hints in the spec itself.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum ObjectKindHint {
+ /// Pick objects that are commits themselves.
+ Commit,
+ /// Pick objects that can be peeled into a commit, i.e. commits themselves or tags which are peeled until a commit is found.
+ Committish,
+ /// Pick objects that are trees themselves.
+ Tree,
+ /// Pick objects that can be peeled into a tree, i.e. trees themselves or tags which are peeled until a tree is found or commits
+ /// whose tree is chosen.
+ Treeish,
+ /// Pick objects that are blobs.
+ Blob,
+}
+
+impl Default for RefsHint {
+ fn default() -> Self {
+ RefsHint::PreferObjectOnFullLengthHexShaUseRefOtherwise
+ }
+}
+
+/// Options for use in [`revision::Spec::from_bstr()`][crate::revision::Spec::from_bstr()].
+#[derive(Debug, Default, Copy, Clone)]
+pub struct Options {
+ /// What to do if both refs and object names match the same input.
+ pub refs_hint: RefsHint,
+ /// The hint to use when encountering multiple object matching a prefix.
+ ///
+ /// If `None`, the rev-spec itself must disambiguate the object by drilling down to desired kinds or applying
+ /// other disambiguating transformations.
+ pub object_kind_hint: Option<ObjectKindHint>,
+}
+
+/// The error returned by [`crate::Repository::rev_parse()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("The rev-spec is malformed and misses a ref name")]
+ Malformed,
+ #[error("Unborn heads do not have a reflog yet")]
+ UnbornHeadsHaveNoRefLog,
+ #[error("This feature will be implemented once {dependency}")]
+ Planned { dependency: &'static str },
+ #[error("Reference {reference:?} does not have a reference log, cannot {action}")]
+ MissingRefLog { reference: BString, action: &'static str },
+ #[error("HEAD has {available} prior checkouts and checkout number {desired} is out of range")]
+ PriorCheckoutOutOfRange { desired: usize, available: usize },
+ #[error("Reference {:?} has {available} ref-log entries and entry number {desired} is out of range", reference.name.as_bstr())]
+ RefLogEntryOutOfRange {
+ reference: gix_ref::Reference,
+ desired: usize,
+ available: usize,
+ },
+ #[error(
+ "Commit {oid} has {available} ancestors along the first parent and ancestor number {desired} is out of range"
+ )]
+ AncestorOutOfRange {
+ oid: gix_hash::Prefix,
+ desired: usize,
+ available: usize,
+ },
+ #[error("Commit {oid} has {available} parents and parent number {desired} is out of range")]
+ ParentOutOfRange {
+ oid: gix_hash::Prefix,
+ desired: usize,
+ available: usize,
+ },
+ #[error("Path {desired_path:?} did not exist in index at stage {desired_stage}{}{}", stage_hint.map(|actual|format!(". It does exist at stage {actual}")).unwrap_or_default(), exists.then(|| ". It exists on disk").unwrap_or(". It does not exist on disk"))]
+ IndexLookup {
+ desired_path: BString,
+ desired_stage: gix_index::entry::Stage,
+ stage_hint: Option<gix_index::entry::Stage>,
+ exists: bool,
+ },
+ #[error(transparent)]
+ FindHead(#[from] reference::find::existing::Error),
+ #[error(transparent)]
+ Index(#[from] crate::worktree::open_index::Error),
+ #[error(transparent)]
+ RevWalkIterInit(#[from] crate::reference::iter::init::Error),
+ #[error(transparent)]
+ RevWalkAllReferences(#[from] gix_ref::packed::buffer::open::Error),
+ #[cfg(feature = "regex")]
+ #[error(transparent)]
+ InvalidRegex(#[from] regex::Error),
+ #[cfg_attr(
+ feature = "regex",
+ error("None of {commits_searched} commits from {oid} matched regex {regex:?}")
+ )]
+ #[cfg_attr(
+ not(feature = "regex"),
+ error("None of {commits_searched} commits from {oid} matched text {regex:?}")
+ )]
+ NoRegexMatch {
+ regex: BString,
+ oid: gix_hash::Prefix,
+ commits_searched: usize,
+ },
+ #[cfg_attr(
+ feature = "regex",
+ error("None of {commits_searched} commits reached from all references matched regex {regex:?}")
+ )]
+ #[cfg_attr(
+ not(feature = "regex"),
+ error("None of {commits_searched} commits reached from all references matched text {regex:?}")
+ )]
+ NoRegexMatchAllRefs { regex: BString, commits_searched: usize },
+ #[error(
+ "The short hash {prefix} matched both the reference {} and at least one object", reference.name)]
+ AmbiguousRefAndObject {
+ /// The prefix to look for.
+ prefix: gix_hash::Prefix,
+ /// The reference matching the prefix.
+ reference: gix_ref::Reference,
+ },
+ #[error(transparent)]
+ IdFromHex(#[from] gix_hash::decode::Error),
+ #[error(transparent)]
+ FindReference(#[from] gix_ref::file::find::existing::Error),
+ #[error(transparent)]
+ FindObject(#[from] object::find::existing::Error),
+ #[error(transparent)]
+ LookupPrefix(#[from] gix_odb::store::prefix::lookup::Error),
+ #[error(transparent)]
+ PeelToKind(#[from] object::peel::to_kind::Error),
+ #[error("Object {oid} was a {actual}, but needed it to be a {expected}")]
+ ObjectKind {
+ oid: gix_hash::Prefix,
+ actual: gix_object::Kind,
+ expected: gix_object::Kind,
+ },
+ #[error(transparent)]
+ Parse(#[from] gix_revision::spec::parse::Error),
+ #[error("An object prefixed {prefix} could not be found")]
+ PrefixNotFound { prefix: gix_hash::Prefix },
+ #[error("Short id {prefix} is ambiguous. Candidates are:\n{}", info.iter().map(|(oid, info)| format!("\t{oid} {info}")).collect::<Vec<_>>().join("\n"))]
+ AmbiguousPrefix {
+ prefix: gix_hash::Prefix,
+ info: Vec<(gix_hash::Prefix, super::error::CandidateInfo)>,
+ },
+ #[error("Could not find path {path:?} in tree {tree} of parent object {object}")]
+ PathNotFound {
+ object: gix_hash::Prefix,
+ tree: gix_hash::Prefix,
+ path: BString,
+ },
+ #[error("{current}")]
+ Multi {
+ current: Box<dyn std::error::Error + Send + Sync + 'static>,
+ #[source]
+ next: Option<Box<dyn std::error::Error + Send + Sync + 'static>>,
+ },
+ #[error(transparent)]
+ Traverse(#[from] gix_traverse::commit::ancestors::Error),
+ #[error("Spec does not contain a single object id")]
+ SingleNotFound,
+}
diff --git a/vendor/gix/src/revision/walk.rs b/vendor/gix/src/revision/walk.rs
new file mode 100644
index 000000000..5b04b43a7
--- /dev/null
+++ b/vendor/gix/src/revision/walk.rs
@@ -0,0 +1,127 @@
+use gix_hash::ObjectId;
+use gix_odb::FindExt;
+
+use crate::{revision, Repository};
+
+/// A platform to traverse the revision graph by adding starting points as well as points which shouldn't be crossed,
+/// returned by [`Repository::rev_walk()`].
+pub struct Platform<'repo> {
+ pub(crate) repo: &'repo Repository,
+ pub(crate) tips: Vec<ObjectId>,
+ pub(crate) sorting: gix_traverse::commit::Sorting,
+ pub(crate) parents: gix_traverse::commit::Parents,
+}
+
+impl<'repo> Platform<'repo> {
+ pub(crate) fn new(tips: impl IntoIterator<Item = impl Into<ObjectId>>, repo: &'repo Repository) -> Self {
+ revision::walk::Platform {
+ repo,
+ tips: tips.into_iter().map(Into::into).collect(),
+ sorting: Default::default(),
+ parents: Default::default(),
+ }
+ }
+}
+
+/// Create-time builder methods
+impl<'repo> Platform<'repo> {
+ /// Set the sort mode for commits to the given value. The default is to order by topology.
+ pub fn sorting(mut self, sorting: gix_traverse::commit::Sorting) -> Self {
+ self.sorting = sorting;
+ self
+ }
+
+ /// Only traverse the first parent of the commit graph.
+ pub fn first_parent_only(mut self) -> Self {
+ self.parents = gix_traverse::commit::Parents::First;
+ self
+ }
+}
+
+/// Produce the iterator
+impl<'repo> Platform<'repo> {
+ /// Return an iterator to traverse all commits reachable as configured by the [Platform].
+ ///
+ /// # Performance
+ ///
+ /// It's highly recommended to set an [`object cache`][Repository::object_cache_size()] on the parent repo
+ /// to greatly speed up performance if the returned id is supposed to be looked up right after.
+ pub fn all(self) -> Result<revision::Walk<'repo>, gix_traverse::commit::ancestors::Error> {
+ let Platform {
+ repo,
+ tips,
+ sorting,
+ parents,
+ } = self;
+ Ok(revision::Walk {
+ repo,
+ inner: Box::new(
+ gix_traverse::commit::Ancestors::new(
+ tips,
+ gix_traverse::commit::ancestors::State::default(),
+ move |oid, buf| repo.objects.find_commit_iter(oid, buf),
+ )
+ .sorting(sorting)?
+ .parents(parents),
+ ),
+ is_shallow: None,
+ error_on_missing_commit: false,
+ })
+ }
+}
+
+pub(crate) mod iter {
+ use crate::{ext::ObjectIdExt, Id};
+
+ /// The iterator returned by [`crate::revision::walk::Platform::all()`].
+ pub struct Walk<'repo> {
+ pub(crate) repo: &'repo crate::Repository,
+ pub(crate) inner:
+ Box<dyn Iterator<Item = Result<gix_hash::ObjectId, gix_traverse::commit::ancestors::Error>> + 'repo>,
+ pub(crate) error_on_missing_commit: bool,
+ // TODO: tests
+ /// After iteration this flag is true if the iteration was stopped prematurely due to missing parent commits.
+ /// Note that this flag won't be `Some` if any iteration error occurs, which is the case if
+ /// [`error_on_missing_commit()`][Walk::error_on_missing_commit()] was called.
+ ///
+ /// This happens if a repository is a shallow clone.
+ /// Note that this value is `None` as long as the iteration isn't complete.
+ pub is_shallow: Option<bool>,
+ }
+
+ impl<'repo> Walk<'repo> {
+ // TODO: tests
+ /// Once invoked, the iteration will return an error if a commit cannot be found in the object database. This typically happens
+ /// when operating on a shallow clone and thus is non-critical by default.
+ ///
+ /// Check the [`is_shallow`][Walk::is_shallow] field once the iteration ended otherwise to learn if a shallow commit graph
+ /// was encountered.
+ pub fn error_on_missing_commit(mut self) -> Self {
+ self.error_on_missing_commit = true;
+ self
+ }
+ }
+
+ impl<'repo> Iterator for Walk<'repo> {
+ type Item = Result<Id<'repo>, gix_traverse::commit::ancestors::Error>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.inner.next() {
+ None => {
+ self.is_shallow = Some(false);
+ None
+ }
+ Some(Ok(oid)) => Some(Ok(oid.attach(self.repo))),
+ Some(Err(err @ gix_traverse::commit::ancestors::Error::FindExisting { .. })) => {
+ if self.error_on_missing_commit {
+ Some(Err(err))
+ } else {
+ self.is_shallow = Some(true);
+ None
+ }
+ }
+ Some(Err(err)) => Some(Err(err)),
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/tag.rs b/vendor/gix/src/tag.rs
new file mode 100644
index 000000000..84af3b43a
--- /dev/null
+++ b/vendor/gix/src/tag.rs
@@ -0,0 +1,16 @@
+//!
+mod error {
+
+ /// The error returned by [`tag(…)`][crate::Repository::tag()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ ReferenceNameValidation(#[from] gix_ref::name::Error),
+ #[error(transparent)]
+ WriteObject(#[from] crate::object::write::Error),
+ #[error(transparent)]
+ ReferenceEdit(#[from] crate::reference::edit::Error),
+ }
+}
+pub use error::Error;
diff --git a/vendor/gix/src/types.rs b/vendor/gix/src/types.rs
new file mode 100644
index 000000000..34ffdc8bf
--- /dev/null
+++ b/vendor/gix/src/types.rs
@@ -0,0 +1,205 @@
+use std::{cell::RefCell, path::PathBuf};
+
+use gix_hash::ObjectId;
+
+use crate::{head, remote};
+
+/// The kind of repository.
+#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
+pub enum Kind {
+ /// A submodule worktree, whose `git` repository lives in `.git/modules/**/<name>` of the parent repository.
+ Submodule,
+ /// A bare repository does not have a work tree, that is files on disk beyond the `git` repository itself.
+ Bare,
+ /// A `git` repository along with a checked out files in a work tree.
+ WorkTree {
+ /// If true, this is the git dir associated with this _linked_ worktree, otherwise it is a repository with _main_ worktree.
+ is_linked: bool,
+ },
+}
+
+/// A worktree checkout containing the files of the repository in consumable form.
+pub struct Worktree<'repo> {
+ pub(crate) parent: &'repo Repository,
+ /// The root path of the checkout.
+ pub(crate) path: &'repo std::path::Path,
+}
+
+/// The head reference, as created from looking at `.git/HEAD`, able to represent all of its possible states.
+///
+/// Note that like [`Reference`], this type's data is snapshot of persisted state on disk.
+#[derive(Clone)]
+pub struct Head<'repo> {
+ /// One of various possible states for the HEAD reference
+ pub kind: head::Kind,
+ pub(crate) repo: &'repo Repository,
+}
+
+/// An [ObjectId] with access to a repository.
+#[derive(Clone, Copy)]
+pub struct Id<'r> {
+ /// The actual object id
+ pub(crate) inner: ObjectId,
+ pub(crate) repo: &'r Repository,
+}
+
+/// A decoded object with a reference to its owning repository.
+pub struct Object<'repo> {
+ /// The id of the object
+ pub id: ObjectId,
+ /// The kind of the object
+ pub kind: gix_object::Kind,
+ /// The fully decoded object data
+ pub data: Vec<u8>,
+ pub(crate) repo: &'repo Repository,
+}
+
+impl<'a> Drop for Object<'a> {
+ fn drop(&mut self) {
+ self.repo.reuse_buffer(&mut self.data);
+ }
+}
+
+/// A decoded tree object with access to its owning repository.
+pub struct Tree<'repo> {
+ /// The id of the tree
+ pub id: ObjectId,
+ /// The fully decoded tree data
+ pub data: Vec<u8>,
+ pub(crate) repo: &'repo Repository,
+}
+
+impl<'a> Drop for Tree<'a> {
+ fn drop(&mut self) {
+ self.repo.reuse_buffer(&mut self.data);
+ }
+}
+
+/// A decoded tag object with access to its owning repository.
+pub struct Tag<'repo> {
+ /// The id of the tree
+ pub id: ObjectId,
+ /// The fully decoded tag data
+ pub data: Vec<u8>,
+ pub(crate) repo: &'repo Repository,
+}
+
+impl<'a> Drop for Tag<'a> {
+ fn drop(&mut self) {
+ self.repo.reuse_buffer(&mut self.data);
+ }
+}
+
+/// A decoded commit object with access to its owning repository.
+pub struct Commit<'repo> {
+ /// The id of the commit
+ pub id: ObjectId,
+ /// The fully decoded commit data
+ pub data: Vec<u8>,
+ pub(crate) repo: &'repo Repository,
+}
+
+impl<'a> Drop for Commit<'a> {
+ fn drop(&mut self) {
+ self.repo.reuse_buffer(&mut self.data);
+ }
+}
+
+/// A detached, self-contained object, without access to its source repository.
+///
+/// Use it if an `ObjectRef` should be sent over thread boundaries or stored in collections.
+#[derive(Clone)]
+pub struct ObjectDetached {
+ /// The id of the object
+ pub id: ObjectId,
+ /// The kind of the object
+ pub kind: gix_object::Kind,
+ /// The fully decoded object data
+ pub data: Vec<u8>,
+}
+
+/// A reference that points to an object or reference, with access to its source repository.
+///
+/// Note that these are snapshots and won't recognize if they are stale.
+#[derive(Clone)]
+pub struct Reference<'r> {
+ /// The actual reference data
+ pub inner: gix_ref::Reference,
+ pub(crate) repo: &'r Repository,
+}
+
+/// A thread-local handle to interact with a repository from a single thread.
+///
+/// It is `Send` but **not** `Sync` - for the latter you can convert it `to_sync()`.
+/// Note that it clones itself so that it is empty, requiring the user to configure each clone separately, specifically
+/// and explicitly. This is to have the fastest-possible default configuration available by default, but allow
+/// those who experiment with workloads to get speed boosts of 2x or more.
+pub struct Repository {
+ /// A ref store with shared ownership (or the equivalent of it).
+ pub refs: crate::RefStore,
+ /// A way to access objects.
+ pub objects: crate::OdbHandle,
+
+ pub(crate) work_tree: Option<PathBuf>,
+ /// The path to the resolved common directory if this is a linked worktree repository or it is otherwise set.
+ pub(crate) common_dir: Option<PathBuf>,
+ /// A free-list of re-usable object backing buffers
+ pub(crate) bufs: RefCell<Vec<Vec<u8>>>,
+ /// A pre-assembled selection of often-accessed configuration values for quick access.
+ pub(crate) config: crate::config::Cache,
+ /// the options obtained when instantiating this repository.
+ ///
+ /// Particularly useful when following linked worktrees and instantiating new equally configured worktree repositories.
+ pub(crate) options: crate::open::Options,
+ pub(crate) index: crate::worktree::IndexStorage,
+}
+
+/// An instance with access to everything a git repository entails, best imagined as container implementing `Sync + Send` for _most_
+/// for system resources required to interact with a `git` repository which are loaded in once the instance is created.
+///
+/// Use this type to reference it in a threaded context for creation the creation of a thread-local [`Repositories`][Repository].
+///
+/// Note that this type purposefully isn't very useful until it is converted into a thread-local repository with `to_thread_local()`,
+/// it's merely meant to be able to exist in a `Sync` context.
+pub struct ThreadSafeRepository {
+ /// A store for references to point at objects
+ pub refs: crate::RefStore,
+ /// A store for objects that contain data
+ pub objects: gix_features::threading::OwnShared<gix_odb::Store>,
+ /// The path to the worktree at which to find checked out files
+ pub work_tree: Option<PathBuf>,
+ /// The path to the common directory if this is a linked worktree repository or it is otherwise set.
+ pub common_dir: Option<PathBuf>,
+ pub(crate) config: crate::config::Cache,
+ /// options obtained when instantiating this repository for use when following linked worktrees.
+ pub(crate) linked_worktree_options: crate::open::Options,
+ /// The index of this instances worktree.
+ pub(crate) index: crate::worktree::IndexStorage,
+}
+
+/// A remote which represents a way to interact with hosts for remote clones of the parent repository.
+#[derive(Debug, Clone, PartialEq)]
+pub struct Remote<'repo> {
+ /// The remotes symbolic name, only present if persisted in git configuration files.
+ pub(crate) name: Option<remote::Name<'static>>,
+ /// The url of the host to talk to, after application of replacements. If it is unset, the `push_url` must be set.
+ /// and fetches aren't possible.
+ pub(crate) url: Option<gix_url::Url>,
+ /// The rewritten `url`, if it was rewritten.
+ pub(crate) url_alias: Option<gix_url::Url>,
+ /// The url to use for pushing specifically.
+ pub(crate) push_url: Option<gix_url::Url>,
+ /// The rewritten `push_url`, if it was rewritten.
+ pub(crate) push_url_alias: Option<gix_url::Url>,
+ /// Refspecs for use when fetching.
+ pub(crate) fetch_specs: Vec<gix_refspec::RefSpec>,
+ /// Refspecs for use when pushing.
+ pub(crate) push_specs: Vec<gix_refspec::RefSpec>,
+ /// Tell us what to do with tags when fetched.
+ pub(crate) fetch_tags: remote::fetch::Tags,
+ // /// Delete local tracking branches that don't exist on the remote anymore.
+ // pub(crate) prune: bool,
+ // /// Delete tags that don't exist on the remote anymore, equivalent to pruning the refspec `refs/tags/*:refs/tags/*`.
+ // pub(crate) prune_tags: bool,
+ pub(crate) repo: &'repo Repository,
+}
diff --git a/vendor/gix/src/worktree/mod.rs b/vendor/gix/src/worktree/mod.rs
new file mode 100644
index 000000000..19a44a900
--- /dev/null
+++ b/vendor/gix/src/worktree/mod.rs
@@ -0,0 +1,160 @@
+use std::path::PathBuf;
+
+pub use gix_worktree::*;
+
+use crate::{
+ bstr::{BStr, BString},
+ Repository,
+};
+
+pub(crate) type IndexStorage = gix_features::threading::OwnShared<gix_features::fs::MutableSnapshot<gix_index::File>>;
+/// A lazily loaded and auto-updated worktree index.
+pub type Index = gix_features::fs::SharedSnapshot<gix_index::File>;
+
+/// A stand-in to a worktree as result of a worktree iteration.
+///
+/// It provides access to typical worktree state, but may not actually point to a valid checkout as the latter has been moved or
+/// deleted.
+#[derive(Debug, Clone)]
+pub struct Proxy<'repo> {
+ pub(crate) parent: &'repo Repository,
+ pub(crate) git_dir: PathBuf,
+}
+
+/// Access
+impl<'repo> crate::Worktree<'repo> {
+ /// Read the location of the checkout, the base of the work tree
+ pub fn base(&self) -> &'repo std::path::Path {
+ self.path
+ }
+
+ /// Return true if this worktree is the main worktree associated with a non-bare git repository.
+ ///
+ /// It cannot be removed.
+ pub fn is_main(&self) -> bool {
+ self.id().is_none()
+ }
+
+ /// Return true if this worktree cannot be pruned, moved or deleted, which is useful if it is located on an external storage device.
+ ///
+ /// Always false for the main worktree.
+ pub fn is_locked(&self) -> bool {
+ Proxy::new(self.parent, self.parent.git_dir()).is_locked()
+ }
+
+ /// Provide a reason for the locking of this worktree, if it is locked at all.
+ ///
+ /// Note that we squelch errors in case the file cannot be read in which case the
+ /// reason is an empty string.
+ pub fn lock_reason(&self) -> Option<BString> {
+ Proxy::new(self.parent, self.parent.git_dir()).lock_reason()
+ }
+
+ /// Return the ID of the repository worktree, if it is a linked worktree, or `None` if it's a linked worktree.
+ pub fn id(&self) -> Option<&BStr> {
+ id(self.parent.git_dir(), self.parent.common_dir.is_some())
+ }
+}
+
+pub(crate) fn id(git_dir: &std::path::Path, has_common_dir: bool) -> Option<&BStr> {
+ if !has_common_dir {
+ return None;
+ }
+ let candidate = gix_path::os_str_into_bstr(git_dir.file_name().expect("at least one directory level"))
+ .expect("no illformed UTF-8");
+ let maybe_worktrees = git_dir.parent()?;
+ (maybe_worktrees.file_name()?.to_str()? == "worktrees").then_some(candidate)
+}
+
+///
+pub mod proxy;
+
+///
+pub mod open_index {
+ use crate::bstr::BString;
+
+ /// The error returned by [`Worktree::open_index()`][crate::Worktree::open_index()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not interpret value '{}' as 'index.threads'", .value)]
+ ConfigIndexThreads {
+ value: BString,
+ #[source]
+ err: gix_config::value::Error,
+ },
+ #[error(transparent)]
+ IndexFile(#[from] gix_index::file::init::Error),
+ }
+
+ impl<'repo> crate::Worktree<'repo> {
+ /// A shortcut to [`crate::Repository::open_index()`].
+ pub fn open_index(&self) -> Result<gix_index::File, Error> {
+ self.parent.open_index()
+ }
+
+ /// A shortcut to [`crate::Repository::index()`].
+ pub fn index(&self) -> Result<crate::worktree::Index, Error> {
+ self.parent.index()
+ }
+ }
+}
+
+///
+pub mod excludes {
+ use std::path::PathBuf;
+
+ /// The error returned by [`Worktree::excludes()`][crate::Worktree::excludes()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not read repository exclude.")]
+ Io(#[from] std::io::Error),
+ #[error(transparent)]
+ EnvironmentPermission(#[from] gix_sec::permission::Error<PathBuf>),
+ #[error("The value for `core.excludesFile` could not be read from configuration")]
+ ExcludesFilePathInterpolation(#[from] gix_config::path::interpolate::Error),
+ }
+
+ impl<'repo> crate::Worktree<'repo> {
+ /// Configure a file-system cache checking if files below the repository are excluded.
+ ///
+ /// This takes into consideration all the usual repository configuration.
+ // TODO: test, provide higher-level interface that is much easier to use and doesn't panic.
+ pub fn excludes(
+ &self,
+ index: &gix_index::State,
+ overrides: Option<gix_attributes::MatchGroup<gix_attributes::Ignore>>,
+ ) -> Result<gix_worktree::fs::Cache, Error> {
+ let repo = self.parent;
+ let case = repo
+ .config
+ .ignore_case
+ .then_some(gix_glob::pattern::Case::Fold)
+ .unwrap_or_default();
+ let mut buf = Vec::with_capacity(512);
+ let excludes_file = match repo.config.excludes_file().transpose()? {
+ Some(user_path) => Some(user_path),
+ None => repo.config.xdg_config_path("ignore")?,
+ };
+ let state = gix_worktree::fs::cache::State::IgnoreStack(gix_worktree::fs::cache::state::Ignore::new(
+ overrides.unwrap_or_default(),
+ gix_attributes::MatchGroup::<gix_attributes::Ignore>::from_git_dir(
+ repo.git_dir(),
+ excludes_file,
+ &mut buf,
+ )?,
+ None,
+ case,
+ ));
+ let attribute_list = state.build_attribute_list(index, index.path_backing(), case);
+ Ok(gix_worktree::fs::Cache::new(
+ self.path,
+ state,
+ case,
+ buf,
+ attribute_list,
+ ))
+ }
+ }
+}
diff --git a/vendor/gix/src/worktree/proxy.rs b/vendor/gix/src/worktree/proxy.rs
new file mode 100644
index 000000000..8a77db815
--- /dev/null
+++ b/vendor/gix/src/worktree/proxy.rs
@@ -0,0 +1,101 @@
+#![allow(clippy::result_large_err)]
+use std::path::{Path, PathBuf};
+
+use crate::{
+ bstr::{BStr, BString, ByteSlice},
+ worktree::Proxy,
+ Repository, ThreadSafeRepository,
+};
+
+#[allow(missing_docs)]
+pub mod into_repo {
+ use std::path::PathBuf;
+
+ /// The error returned by [`Proxy::into_repo()`][super::Proxy::into_repo()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Open(#[from] crate::open::Error),
+ #[error("Worktree at '{}' is inaccessible", .base.display())]
+ MissingWorktree { base: PathBuf },
+ #[error(transparent)]
+ MissingGitDirFile(#[from] std::io::Error),
+ }
+}
+
+impl<'repo> Proxy<'repo> {
+ pub(crate) fn new(parent: &'repo Repository, git_dir: impl Into<PathBuf>) -> Self {
+ Proxy {
+ parent,
+ git_dir: git_dir.into(),
+ }
+ }
+}
+
+impl<'repo> Proxy<'repo> {
+ /// Read the location of the checkout, the base of the work tree.
+ /// Note that the location might not exist.
+ pub fn base(&self) -> std::io::Result<PathBuf> {
+ let git_dir = self.git_dir.join("gitdir");
+ let base_dot_git = gix_discover::path::from_plain_file(&git_dir).ok_or_else(|| {
+ std::io::Error::new(
+ std::io::ErrorKind::NotFound,
+ format!("Required file '{}' does not exist", git_dir.display()),
+ )
+ })??;
+
+ Ok(gix_discover::path::without_dot_git_dir(base_dot_git))
+ }
+
+ /// The git directory for the work tree, typically contained within the parent git dir.
+ pub fn git_dir(&self) -> &Path {
+ &self.git_dir
+ }
+
+ /// The name of the worktree, which is derived from its folder within the `worktrees` directory within the parent `.git` folder.
+ pub fn id(&self) -> &BStr {
+ gix_path::os_str_into_bstr(self.git_dir.file_name().expect("worktrees/ parent dir"))
+ .expect("no illformed UTF-8")
+ }
+
+ /// Return true if the worktree cannot be pruned, moved or deleted, which is useful if it is located on an external storage device.
+ pub fn is_locked(&self) -> bool {
+ self.git_dir.join("locked").is_file()
+ }
+
+ /// Provide a reason for the locking of this worktree, if it is locked at all.
+ ///
+ /// Note that we squelch errors in case the file cannot be read in which case the
+ /// reason is an empty string.
+ pub fn lock_reason(&self) -> Option<BString> {
+ std::fs::read(self.git_dir.join("locked"))
+ .ok()
+ .map(|contents| contents.trim().into())
+ }
+
+ /// Transform this proxy into a [`Repository`] while ignoring issues reading `base()` and ignoring that it might not exist.
+ ///
+ /// Most importantly, the `Repository` might be initialized with a non-existing work tree directory as the checkout
+ /// was removed or moved in the mean time or is unavailable for other reasons.
+ /// The caller will encounter io errors if it's used like the work tree is guaranteed to be present, but can still access
+ /// a lot of information if work tree access is avoided.
+ pub fn into_repo_with_possibly_inaccessible_worktree(self) -> Result<Repository, crate::open::Error> {
+ let base = self.base().ok();
+ let repo = ThreadSafeRepository::open_from_paths(self.git_dir, base, self.parent.options.clone())?;
+ Ok(repo.into())
+ }
+
+ /// Like `into_repo_with_possibly_inaccessible_worktree()` but will fail if the `base()` cannot be read or
+ /// if the worktree doesn't exist.
+ ///
+ /// Note that it won't fail if the worktree doesn't exist.
+ pub fn into_repo(self) -> Result<Repository, into_repo::Error> {
+ let base = self.base()?;
+ if !base.is_dir() {
+ return Err(into_repo::Error::MissingWorktree { base });
+ }
+ let repo = ThreadSafeRepository::open_from_paths(self.git_dir, base.into(), self.parent.options.clone())?;
+ Ok(repo.into())
+ }
+}