summaryrefslogtreecommitdiffstats
path: root/vendor/gix/src
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/gix/src')
-rw-r--r--vendor/gix/src/assets/baseline-init/description1
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/applypatch-msg.sample15
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/commit-msg.sample24
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/fsmonitor-watchman.sample173
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/post-update.sample8
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-applypatch.sample14
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-commit.sample49
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-merge-commit.sample13
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-push.sample53
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-rebase.sample169
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/pre-receive.sample24
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/prepare-commit-msg.sample42
-rwxr-xr-xvendor/gix/src/assets/baseline-init/hooks/update.sample128
-rw-r--r--vendor/gix/src/assets/baseline-init/info/exclude6
-rw-r--r--vendor/gix/src/assets/init/HEAD (renamed from vendor/gix/src/assets/baseline-init/HEAD)0
-rw-r--r--vendor/gix/src/assets/init/description1
-rwxr-xr-xvendor/gix/src/assets/init/hooks/applypatch-msg.sample25
-rwxr-xr-xvendor/gix/src/assets/init/hooks/commit-msg.sample25
-rw-r--r--vendor/gix/src/assets/init/hooks/docs.url1
-rwxr-xr-xvendor/gix/src/assets/init/hooks/fsmonitor-watchman.sample16
-rwxr-xr-xvendor/gix/src/assets/init/hooks/post-update.sample12
-rwxr-xr-xvendor/gix/src/assets/init/hooks/pre-applypatch.sample27
-rwxr-xr-xvendor/gix/src/assets/init/hooks/pre-commit.sample19
-rwxr-xr-xvendor/gix/src/assets/init/hooks/pre-merge-commit.sample16
-rwxr-xr-xvendor/gix/src/assets/init/hooks/pre-push.sample46
-rwxr-xr-xvendor/gix/src/assets/init/hooks/pre-rebase.sample40
-rwxr-xr-xvendor/gix/src/assets/init/hooks/prepare-commit-msg.sample54
-rw-r--r--vendor/gix/src/assets/init/info/exclude5
-rw-r--r--vendor/gix/src/attribute_stack.rs69
-rw-r--r--vendor/gix/src/attributes.rs9
-rw-r--r--vendor/gix/src/clone/checkout.rs39
-rw-r--r--vendor/gix/src/clone/fetch/mod.rs21
-rw-r--r--vendor/gix/src/clone/fetch/util.rs12
-rw-r--r--vendor/gix/src/clone/mod.rs31
-rw-r--r--vendor/gix/src/commit.rs17
-rw-r--r--vendor/gix/src/config/cache/access.rs200
-rw-r--r--vendor/gix/src/config/cache/incubate.rs4
-rw-r--r--vendor/gix/src/config/cache/init.rs58
-rw-r--r--vendor/gix/src/config/cache/util.rs36
-rw-r--r--vendor/gix/src/config/mod.rs60
-rw-r--r--vendor/gix/src/config/overrides.rs12
-rw-r--r--vendor/gix/src/config/snapshot/access.rs54
-rw-r--r--vendor/gix/src/config/snapshot/credential_helpers.rs4
-rw-r--r--vendor/gix/src/config/snapshot/mod.rs1
-rw-r--r--vendor/gix/src/config/tree/keys.rs2
-rw-r--r--vendor/gix/src/config/tree/mod.rs8
-rw-r--r--vendor/gix/src/config/tree/sections/core.rs249
-rw-r--r--vendor/gix/src/config/tree/sections/diff.rs21
-rw-r--r--vendor/gix/src/config/tree/sections/fetch.rs81
-rw-r--r--vendor/gix/src/config/tree/sections/gitoxide.rs62
-rw-r--r--vendor/gix/src/config/tree/sections/index.rs5
-rw-r--r--vendor/gix/src/config/tree/sections/mod.rs2
-rw-r--r--vendor/gix/src/config/tree/sections/protocol.rs2
-rw-r--r--vendor/gix/src/create.rs38
-rw-r--r--vendor/gix/src/discover.rs6
-rw-r--r--vendor/gix/src/env.rs3
-rw-r--r--vendor/gix/src/ext/mod.rs4
-rw-r--r--vendor/gix/src/ext/tree.rs24
-rw-r--r--vendor/gix/src/filter.rs229
-rw-r--r--vendor/gix/src/head/mod.rs6
-rw-r--r--vendor/gix/src/id.rs23
-rw-r--r--vendor/gix/src/init.rs5
-rw-r--r--vendor/gix/src/interrupt.rs99
-rw-r--r--vendor/gix/src/lib.rs73
-rw-r--r--vendor/gix/src/object/blob.rs1
-rw-r--r--vendor/gix/src/object/commit.rs16
-rw-r--r--vendor/gix/src/object/errors.rs10
-rw-r--r--vendor/gix/src/object/tree/diff/for_each.rs5
-rw-r--r--vendor/gix/src/object/tree/diff/mod.rs4
-rw-r--r--vendor/gix/src/object/tree/diff/rewrites.rs2
-rw-r--r--vendor/gix/src/object/tree/iter.rs19
-rw-r--r--vendor/gix/src/object/tree/mod.rs111
-rw-r--r--vendor/gix/src/open/options.rs6
-rw-r--r--vendor/gix/src/open/repository.rs44
-rw-r--r--vendor/gix/src/pathspec.rs207
-rw-r--r--vendor/gix/src/reference/edits.rs5
-rw-r--r--vendor/gix/src/reference/iter.rs14
-rw-r--r--vendor/gix/src/reference/log.rs5
-rw-r--r--vendor/gix/src/reference/mod.rs17
-rw-r--r--vendor/gix/src/remote/build.rs23
-rw-r--r--vendor/gix/src/remote/connect.rs6
-rw-r--r--vendor/gix/src/remote/connection/fetch/mod.rs60
-rw-r--r--vendor/gix/src/remote/connection/fetch/negotiate.rs141
-rw-r--r--vendor/gix/src/remote/connection/fetch/receive_pack.rs129
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/mod.rs368
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/tests.rs427
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/update.rs45
-rw-r--r--vendor/gix/src/remote/connection/ref_map.rs6
-rw-r--r--vendor/gix/src/remote/errors.rs23
-rw-r--r--vendor/gix/src/remote/fetch.rs19
-rw-r--r--vendor/gix/src/remote/init.rs13
-rw-r--r--vendor/gix/src/remote/save.rs5
-rw-r--r--vendor/gix/src/repository/attributes.rs119
-rw-r--r--vendor/gix/src/repository/config/mod.rs15
-rw-r--r--vendor/gix/src/repository/config/transport.rs5
-rw-r--r--vendor/gix/src/repository/excludes.rs45
-rw-r--r--vendor/gix/src/repository/filter.rs64
-rw-r--r--vendor/gix/src/repository/graph.rs26
-rw-r--r--vendor/gix/src/repository/identity.rs20
-rw-r--r--vendor/gix/src/repository/impls.rs12
-rw-r--r--vendor/gix/src/repository/index.rs133
-rw-r--r--vendor/gix/src/repository/init.rs17
-rw-r--r--vendor/gix/src/repository/location.rs42
-rw-r--r--vendor/gix/src/repository/mailmap.rs (renamed from vendor/gix/src/repository/snapshots.rs)3
-rw-r--r--vendor/gix/src/repository/mod.rs94
-rw-r--r--vendor/gix/src/repository/object.rs105
-rw-r--r--vendor/gix/src/repository/pathspec.rs55
-rw-r--r--vendor/gix/src/repository/reference.rs29
-rw-r--r--vendor/gix/src/repository/remote.rs42
-rw-r--r--vendor/gix/src/repository/revision.rs5
-rw-r--r--vendor/gix/src/repository/submodule.rs96
-rw-r--r--vendor/gix/src/repository/worktree.rs137
-rw-r--r--vendor/gix/src/revision/mod.rs3
-rw-r--r--vendor/gix/src/revision/spec/parse/delegate/mod.rs7
-rw-r--r--vendor/gix/src/revision/spec/parse/delegate/navigate.rs24
-rw-r--r--vendor/gix/src/revision/spec/parse/delegate/revision.rs8
-rw-r--r--vendor/gix/src/revision/spec/parse/mod.rs2
-rw-r--r--vendor/gix/src/revision/spec/parse/types.rs10
-rw-r--r--vendor/gix/src/revision/walk.rs128
-rw-r--r--vendor/gix/src/submodule/errors.rs106
-rw-r--r--vendor/gix/src/submodule/mod.rs287
-rw-r--r--vendor/gix/src/types.rs53
-rw-r--r--vendor/gix/src/worktree/mod.rs111
123 files changed, 4574 insertions, 1535 deletions
diff --git a/vendor/gix/src/assets/baseline-init/description b/vendor/gix/src/assets/baseline-init/description
deleted file mode 100644
index 498b267a8..000000000
--- a/vendor/gix/src/assets/baseline-init/description
+++ /dev/null
@@ -1 +0,0 @@
-Unnamed repository; edit this file 'description' to name the repository.
diff --git a/vendor/gix/src/assets/baseline-init/hooks/applypatch-msg.sample b/vendor/gix/src/assets/baseline-init/hooks/applypatch-msg.sample
deleted file mode 100755
index 20fbb51a2..000000000
--- a/vendor/gix/src/assets/baseline-init/hooks/applypatch-msg.sample
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/sh
-#
-# An example hook script to check the commit log message taken by
-# applypatch from an e-mail message.
-#
-# The hook should exit with non-zero status after issuing an
-# appropriate message if it wants to stop the commit. The hook is
-# allowed to edit the commit message file.
-#
-# To enable this hook, rename this file to "applypatch-msg".
-
-. git-sh-setup
-commitmsg="$(git rev-parse --gix-path hooks/commit-msg)"
-test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
-:
diff --git a/vendor/gix/src/assets/baseline-init/hooks/commit-msg.sample b/vendor/gix/src/assets/baseline-init/hooks/commit-msg.sample
deleted file mode 100755
index b58d1184a..000000000
--- a/vendor/gix/src/assets/baseline-init/hooks/commit-msg.sample
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/sh
-#
-# An example hook script to check the commit log message.
-# Called by "git commit" with one argument, the name of the file
-# that has the commit message. The hook should exit with non-zero
-# status after issuing an appropriate message if it wants to stop the
-# commit. The hook is allowed to edit the commit message file.
-#
-# To enable this hook, rename this file to "commit-msg".
-
-# Uncomment the below to add a Signed-off-by line to the message.
-# Doing this in a hook is a bad idea in general, but the prepare-commit-msg
-# hook is more suited to it.
-#
-# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
-# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1"
-
-# This example catches duplicate Signed-off-by lines.
-
-test "" = "$(grep '^Signed-off-by: ' "$1" |
- sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || {
- echo >&2 Duplicate Signed-off-by lines.
- exit 1
-}
diff --git a/vendor/gix/src/assets/baseline-init/hooks/fsmonitor-watchman.sample b/vendor/gix/src/assets/baseline-init/hooks/fsmonitor-watchman.sample
deleted file mode 100755
index 14ed0aa42..000000000
--- a/vendor/gix/src/assets/baseline-init/hooks/fsmonitor-watchman.sample
+++ /dev/null
@@ -1,173 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-use IPC::Open2;
-
-# An example hook script to integrate Watchman
-# (https://facebook.github.io/watchman/) with git to speed up detecting
-# new and modified files.
-#
-# The hook is passed a version (currently 2) and last update token
-# formatted as a string and outputs to stdout a new update token and
-# all files that have been modified since the update token. Paths must
-# be relative to the root of the working tree and separated by a single NUL.
-#
-# To enable this hook, rename this file to "query-watchman" and set
-# 'git config core.fsmonitor .git/hooks/query-watchman'
-#
-my ($version, $last_update_token) = @ARGV;
-
-# Uncomment for debugging
-# print STDERR "$0 $version $last_update_token\n";
-
-# Check the hook interface version
-if ($version ne 2) {
- die "Unsupported query-fsmonitor hook version '$version'.\n" .
- "Falling back to scanning...\n";
-}
-
-my $git_work_tree = get_working_dir();
-
-my $retry = 1;
-
-my $json_pkg;
-eval {
- require JSON::XS;
- $json_pkg = "JSON::XS";
- 1;
-} or do {
- require JSON::PP;
- $json_pkg = "JSON::PP";
-};
-
-launch_watchman();
-
-sub launch_watchman {
- my $o = watchman_query();
- if (is_work_tree_watched($o)) {
- output_result($o->{clock}, @{$o->{files}});
- }
-}
-
-sub output_result {
- my ($clockid, @files) = @_;
-
- # Uncomment for debugging watchman output
- # open (my $fh, ">", ".git/watchman-output.out");
- # binmode $fh, ":utf8";
- # print $fh "$clockid\n@files\n";
- # close $fh;
-
- binmode STDOUT, ":utf8";
- print $clockid;
- print "\0";
- local $, = "\0";
- print @files;
-}
-
-sub watchman_clock {
- my $response = qx/watchman clock "$git_work_tree"/;
- die "Failed to get clock id on '$git_work_tree'.\n" .
- "Falling back to scanning...\n" if $? != 0;
-
- return $json_pkg->new->utf8->decode($response);
-}
-
-sub watchman_query {
- my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty')
- or die "open2() failed: $!\n" .
- "Falling back to scanning...\n";
-
- # In the query expression below we're asking for names of files that
- # changed since $last_update_token but not from the .git folder.
- #
- # To accomplish this, we're using the "since" generator to use the
- # recency index to select candidate nodes and "fields" to limit the
- # output to file names only. Then we're using the "expression" term to
- # further constrain the results.
- if (substr($last_update_token, 0, 1) eq "c") {
- $last_update_token = "\"$last_update_token\"";
- }
- my $query = <<" END";
- ["query", "$git_work_tree", {
- "since": $last_update_token,
- "fields": ["name"],
- "expression": ["not", ["dirname", ".git"]]
- }]
- END
-
- # Uncomment for debugging the watchman query
- # open (my $fh, ">", ".git/watchman-query.json");
- # print $fh $query;
- # close $fh;
-
- print CHLD_IN $query;
- close CHLD_IN;
- my $response = do {local $/; <CHLD_OUT>};
-
- # Uncomment for debugging the watch response
- # open ($fh, ">", ".git/watchman-response.json");
- # print $fh $response;
- # close $fh;
-
- die "Watchman: command returned no output.\n" .
- "Falling back to scanning...\n" if $response eq "";
- die "Watchman: command returned invalid output: $response\n" .
- "Falling back to scanning...\n" unless $response =~ /^\{/;
-
- return $json_pkg->new->utf8->decode($response);
-}
-
-sub is_work_tree_watched {
- my ($output) = @_;
- my $error = $output->{error};
- if ($retry > 0 and $error and $error =~ m/unable to resolve root .* directory (.*) is not watched/) {
- $retry--;
- my $response = qx/watchman watch "$git_work_tree"/;
- die "Failed to make watchman watch '$git_work_tree'.\n" .
- "Falling back to scanning...\n" if $? != 0;
- $output = $json_pkg->new->utf8->decode($response);
- $error = $output->{error};
- die "Watchman: $error.\n" .
- "Falling back to scanning...\n" if $error;
-
- # Uncomment for debugging watchman output
- # open (my $fh, ">", ".git/watchman-output.out");
- # close $fh;
-
- # Watchman will always return all files on the first query so
- # return the fast "everything is dirty" flag to git and do the
- # Watchman query just to get it over with now so we won't pay
- # the cost in git to look up each individual file.
- my $o = watchman_clock();
- $error = $output->{error};
-
- die "Watchman: $error.\n" .
- "Falling back to scanning...\n" if $error;
-
- output_result($o->{clock}, ("/"));
- $last_update_token = $o->{clock};
-
- eval { launch_watchman() };
- return 0;
- }
-
- die "Watchman: $error.\n" .
- "Falling back to scanning...\n" if $error;
-
- return 1;
-}
-
-sub get_working_dir {
- my $working_dir;
- if ($^O =~ 'msys' || $^O =~ 'cygwin') {
- $working_dir = Win32::GetCwd();
- $working_dir =~ tr/\\/\//;
- } else {
- require Cwd;
- $working_dir = Cwd::cwd();
- }
-
- return $working_dir;
-}
diff --git a/vendor/gix/src/assets/baseline-init/hooks/post-update.sample b/vendor/gix/src/assets/baseline-init/hooks/post-update.sample
deleted file mode 100755
index ec17ec193..000000000
--- a/vendor/gix/src/assets/baseline-init/hooks/post-update.sample
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/sh
-#
-# An example hook script to prepare a packed repository for use over
-# dumb transports.
-#
-# To enable this hook, rename this file to "post-update".
-
-exec git update-server-info
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-applypatch.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-applypatch.sample
deleted file mode 100755
index d61828510..000000000
--- a/vendor/gix/src/assets/baseline-init/hooks/pre-applypatch.sample
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-#
-# An example hook script to verify what is about to be committed
-# by applypatch from an e-mail message.
-#
-# The hook should exit with non-zero status after issuing an
-# appropriate message if it wants to stop the commit.
-#
-# To enable this hook, rename this file to "pre-applypatch".
-
-. git-sh-setup
-precommit="$(git rev-parse --gix-path hooks/pre-commit)"
-test -x "$precommit" && exec "$precommit" ${1+"$@"}
-:
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-commit.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-commit.sample
deleted file mode 100755
index e144712c8..000000000
--- a/vendor/gix/src/assets/baseline-init/hooks/pre-commit.sample
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/bin/sh
-#
-# An example hook script to verify what is about to be committed.
-# Called by "git commit" with no arguments. The hook should
-# exit with non-zero status after issuing an appropriate message if
-# it wants to stop the commit.
-#
-# To enable this hook, rename this file to "pre-commit".
-
-if git rev-parse --verify HEAD >/dev/null 2>&1
-then
- against=HEAD
-else
- # Initial commit: diff against an empty tree object
- against=$(git hash-object -t tree /dev/null)
-fi
-
-# If you want to allow non-ASCII filenames set this variable to true.
-allownonascii=$(git config --type=bool hooks.allownonascii)
-
-# Redirect output to stderr.
-exec 1>&2
-
-# Cross platform projects tend to avoid non-ASCII filenames; prevent
-# them from being added to the repository. We exploit the fact that the
-# printable range starts at the space character and ends with tilde.
-if [ "$allownonascii" != "true" ] &&
- # Note that the use of brackets around a tr range is ok here, (it's
- # even required, for portability to Solaris 10's /usr/bin/tr), since
- # the square bracket bytes happen to fall in the designated range.
- test $(git diff --cached --name-only --diff-filter=A -z $against |
- LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0
-then
- cat <<\EOF
-Error: Attempt to add a non-ASCII file name.
-
-This can cause problems if you want to work with people on other platforms.
-
-To be portable it is advisable to rename the file.
-
-If you know what you are doing you can disable this check using:
-
- git config hooks.allownonascii true
-EOF
- exit 1
-fi
-
-# If there are whitespace errors, print the offending file names and fail.
-exec git diff-index --check --cached $against --
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-merge-commit.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-merge-commit.sample
deleted file mode 100755
index 399eab192..000000000
--- a/vendor/gix/src/assets/baseline-init/hooks/pre-merge-commit.sample
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/sh
-#
-# An example hook script to verify what is about to be committed.
-# Called by "git merge" with no arguments. The hook should
-# exit with non-zero status after issuing an appropriate message to
-# stderr if it wants to stop the merge commit.
-#
-# To enable this hook, rename this file to "pre-merge-commit".
-
-. git-sh-setup
-test -x "$GIT_DIR/hooks/pre-commit" &&
- exec "$GIT_DIR/hooks/pre-commit"
-:
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-push.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-push.sample
deleted file mode 100755
index 6187dbf43..000000000
--- a/vendor/gix/src/assets/baseline-init/hooks/pre-push.sample
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/bin/sh
-
-# An example hook script to verify what is about to be pushed. Called by "git
-# push" after it has checked the remote status, but before anything has been
-# pushed. If this script exits with a non-zero status nothing will be pushed.
-#
-# This hook is called with the following parameters:
-#
-# $1 -- Name of the remote to which the push is being done
-# $2 -- URL to which the push is being done
-#
-# If pushing without using a named remote those arguments will be equal.
-#
-# Information about the commits which are being pushed is supplied as lines to
-# the standard input in the form:
-#
-# <local ref> <local sha1> <remote ref> <remote sha1>
-#
-# This sample shows how to prevent push of commits where the log message starts
-# with "WIP" (work in progress).
-
-remote="$1"
-url="$2"
-
-z40=0000000000000000000000000000000000000000
-
-while read local_ref local_sha remote_ref remote_sha
-do
- if [ "$local_sha" = $z40 ]
- then
- # Handle delete
- :
- else
- if [ "$remote_sha" = $z40 ]
- then
- # New branch, examine all commits
- range="$local_sha"
- else
- # Update to existing branch, examine new commits
- range="$remote_sha..$local_sha"
- fi
-
- # Check for WIP commit
- commit=`git rev-list -n 1 --grep '^WIP' "$range"`
- if [ -n "$commit" ]
- then
- echo >&2 "Found WIP commit in $local_ref, not pushing"
- exit 1
- fi
- fi
-done
-
-exit 0
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-rebase.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-rebase.sample
deleted file mode 100755
index d6ac43f64..000000000
--- a/vendor/gix/src/assets/baseline-init/hooks/pre-rebase.sample
+++ /dev/null
@@ -1,169 +0,0 @@
-#!/bin/sh
-#
-# Copyright (c) 2006, 2008 Junio C Hamano
-#
-# The "pre-rebase" hook is run just before "git rebase" starts doing
-# its job, and can prevent the command from running by exiting with
-# non-zero status.
-#
-# The hook is called with the following parameters:
-#
-# $1 -- the upstream the series was forked from.
-# $2 -- the branch being rebased (or empty when rebasing the current branch).
-#
-# This sample shows how to prevent topic branches that are already
-# merged to 'next' branch from getting rebased, because allowing it
-# would result in rebasing already published history.
-
-publish=next
-basebranch="$1"
-if test "$#" = 2
-then
- topic="refs/heads/$2"
-else
- topic=`git symbolic-ref HEAD` ||
- exit 0 ;# we do not interrupt rebasing detached HEAD
-fi
-
-case "$topic" in
-refs/heads/??/*)
- ;;
-*)
- exit 0 ;# we do not interrupt others.
- ;;
-esac
-
-# Now we are dealing with a topic branch being rebased
-# on top of main. Is it OK to rebase it?
-
-# Does the topic really exist?
-git show-ref -q "$topic" || {
- echo >&2 "No such branch $topic"
- exit 1
-}
-
-# Is topic fully merged to main?
-not_in_main=`git rev-list --pretty=oneline ^main "$topic"`
-if test -z "$not_in_main"
-then
- echo >&2 "$topic is fully merged to main; better remove it."
- exit 1 ;# we could allow it, but there is no point.
-fi
-
-# Is topic ever merged to next? If so you should not be rebasing it.
-only_next_1=`git rev-list ^main "^$topic" ${publish} | sort`
-only_next_2=`git rev-list ^main ${publish} | sort`
-if test "$only_next_1" = "$only_next_2"
-then
- not_in_topic=`git rev-list "^$topic" main`
- if test -z "$not_in_topic"
- then
- echo >&2 "$topic is already up to date with main"
- exit 1 ;# we could allow it, but there is no point.
- else
- exit 0
- fi
-else
- not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"`
- /usr/bin/perl -e '
- my $topic = $ARGV[0];
- my $msg = "* $topic has commits already merged to public branch:\n";
- my (%not_in_next) = map {
- /^([0-9a-f]+) /;
- ($1 => 1);
- } split(/\n/, $ARGV[1]);
- for my $elem (map {
- /^([0-9a-f]+) (.*)$/;
- [$1 => $2];
- } split(/\n/, $ARGV[2])) {
- if (!exists $not_in_next{$elem->[0]}) {
- if ($msg) {
- print STDERR $msg;
- undef $msg;
- }
- print STDERR " $elem->[1]\n";
- }
- }
- ' "$topic" "$not_in_next" "$not_in_main"
- exit 1
-fi
-
-<<\DOC_END
-
-This sample hook safeguards topic branches that have been
-published from being rewound.
-
-The workflow assumed here is:
-
- * Once a topic branch forks from "main", "main" is never
- merged into it again (either directly or indirectly).
-
- * Once a topic branch is fully cooked and merged into "main",
- it is deleted. If you need to build on top of it to correct
- earlier mistakes, a new topic branch is created by forking at
- the tip of the "main". This is not strictly necessary, but
- it makes it easier to keep your history simple.
-
- * Whenever you need to test or publish your changes to topic
- branches, merge them into "next" branch.
-
-The script, being an example, hardcodes the publish branch name
-to be "next", but it is trivial to make it configurable via
-$GIT_DIR/config mechanism.
-
-With this workflow, you would want to know:
-
-(1) ... if a topic branch has ever been merged to "next". Young
- topic branches can have stupid mistakes you would rather
- clean up before publishing, and things that have not been
- merged into other branches can be easily rebased without
- affecting other people. But once it is published, you would
- not want to rewind it.
-
-(2) ... if a topic branch has been fully merged to "main".
- Then you can delete it. More importantly, you should not
- build on top of it -- other people may already want to
- change things related to the topic as patches against your
- "main", so if you need further changes, it is better to
- fork the topic (perhaps with the same name) afresh from the
- tip of "main".
-
-Let's look at this example:
-
- o---o---o---o---o---o---o---o---o---o "next"
- / / / /
- / a---a---b A / /
- / / / /
- / / c---c---c---c B /
- / / / \ /
- / / / b---b C \ /
- / / / / \ /
- ---o---o---o---o---o---o---o---o---o---o---o "main"
-
-
-A, B and C are topic branches.
-
- * A has one fix since it was merged up to "next".
-
- * B has finished. It has been fully merged up to "main" and "next",
- and is ready to be deleted.
-
- * C has not merged to "next" at all.
-
-We would want to allow C to be rebased, refuse A, and encourage
-B to be deleted.
-
-To compute (1):
-
- git rev-list ^main ^topic next
- git rev-list ^main next
-
- if these match, topic has not merged in next at all.
-
-To compute (2):
-
- git rev-list main..topic
-
- if this is empty, it is fully merged to "main".
-
-DOC_END
diff --git a/vendor/gix/src/assets/baseline-init/hooks/pre-receive.sample b/vendor/gix/src/assets/baseline-init/hooks/pre-receive.sample
deleted file mode 100755
index a1fd29ec1..000000000
--- a/vendor/gix/src/assets/baseline-init/hooks/pre-receive.sample
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/sh
-#
-# An example hook script to make use of push options.
-# The example simply echoes all push options that start with 'echoback='
-# and rejects all pushes when the "reject" push option is used.
-#
-# To enable this hook, rename this file to "pre-receive".
-
-if test -n "$GIT_PUSH_OPTION_COUNT"
-then
- i=0
- while test "$i" -lt "$GIT_PUSH_OPTION_COUNT"
- do
- eval "value=\$GIT_PUSH_OPTION_$i"
- case "$value" in
- echoback=*)
- echo "echo from the pre-receive-hook: ${value#*=}" >&2
- ;;
- reject)
- exit 1
- esac
- i=$((i + 1))
- done
-fi
diff --git a/vendor/gix/src/assets/baseline-init/hooks/prepare-commit-msg.sample b/vendor/gix/src/assets/baseline-init/hooks/prepare-commit-msg.sample
deleted file mode 100755
index 10fa14c5a..000000000
--- a/vendor/gix/src/assets/baseline-init/hooks/prepare-commit-msg.sample
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/sh
-#
-# An example hook script to prepare the commit log message.
-# Called by "git commit" with the name of the file that has the
-# commit message, followed by the description of the commit
-# message's source. The hook's purpose is to edit the commit
-# message file. If the hook fails with a non-zero status,
-# the commit is aborted.
-#
-# To enable this hook, rename this file to "prepare-commit-msg".
-
-# This hook includes three examples. The first one removes the
-# "# Please enter the commit message..." help message.
-#
-# The second includes the output of "git diff --name-status -r"
-# into the message, just before the "git status" output. It is
-# commented because it doesn't cope with --amend or with squashed
-# commits.
-#
-# The third example adds a Signed-off-by line to the message, that can
-# still be edited. This is rarely a good idea.
-
-COMMIT_MSG_FILE=$1
-COMMIT_SOURCE=$2
-SHA1=$3
-
-/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE"
-
-# case "$COMMIT_SOURCE,$SHA1" in
-# ,|template,)
-# /usr/bin/perl -i.bak -pe '
-# print "\n" . `git diff --cached --name-status -r`
-# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;;
-# *) ;;
-# esac
-
-# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
-# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE"
-# if test -z "$COMMIT_SOURCE"
-# then
-# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE"
-# fi
diff --git a/vendor/gix/src/assets/baseline-init/hooks/update.sample b/vendor/gix/src/assets/baseline-init/hooks/update.sample
deleted file mode 100755
index 5014c4b31..000000000
--- a/vendor/gix/src/assets/baseline-init/hooks/update.sample
+++ /dev/null
@@ -1,128 +0,0 @@
-#!/bin/sh
-#
-# An example hook script to block unannotated tags from entering.
-# Called by "git receive-pack" with arguments: refname sha1-old sha1-new
-#
-# To enable this hook, rename this file to "update".
-#
-# Config
-# ------
-# hooks.allowunannotated
-# This boolean sets whether unannotated tags will be allowed into the
-# repository. By default they won't be.
-# hooks.allowdeletetag
-# This boolean sets whether deleting tags will be allowed in the
-# repository. By default they won't be.
-# hooks.allowmodifytag
-# This boolean sets whether a tag may be modified after creation. By default
-# it won't be.
-# hooks.allowdeletebranch
-# This boolean sets whether deleting branches will be allowed in the
-# repository. By default they won't be.
-# hooks.denycreatebranch
-# This boolean sets whether remotely creating branches will be denied
-# in the repository. By default this is allowed.
-#
-
-# --- Command line
-refname="$1"
-oldrev="$2"
-newrev="$3"
-
-# --- Safety check
-if [ -z "$GIT_DIR" ]; then
- echo "Don't run this script from the command line." >&2
- echo " (if you want, you could supply GIT_DIR then run" >&2
- echo " $0 <ref> <oldrev> <newrev>)" >&2
- exit 1
-fi
-
-if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then
- echo "usage: $0 <ref> <oldrev> <newrev>" >&2
- exit 1
-fi
-
-# --- Config
-allowunannotated=$(git config --type=bool hooks.allowunannotated)
-allowdeletebranch=$(git config --type=bool hooks.allowdeletebranch)
-denycreatebranch=$(git config --type=bool hooks.denycreatebranch)
-allowdeletetag=$(git config --type=bool hooks.allowdeletetag)
-allowmodifytag=$(git config --type=bool hooks.allowmodifytag)
-
-# check for no description
-projectdesc=$(sed -e '1q' "$GIT_DIR/description")
-case "$projectdesc" in
-"Unnamed repository"* | "")
- echo "*** Project description file hasn't been set" >&2
- exit 1
- ;;
-esac
-
-# --- Check types
-# if $newrev is 0000...0000, it's a commit to delete a ref.
-zero="0000000000000000000000000000000000000000"
-if [ "$newrev" = "$zero" ]; then
- newrev_type=delete
-else
- newrev_type=$(git cat-file -t $newrev)
-fi
-
-case "$refname","$newrev_type" in
- refs/tags/*,commit)
- # un-annotated tag
- short_refname=${refname##refs/tags/}
- if [ "$allowunannotated" != "true" ]; then
- echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2
- echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2
- exit 1
- fi
- ;;
- refs/tags/*,delete)
- # delete tag
- if [ "$allowdeletetag" != "true" ]; then
- echo "*** Deleting a tag is not allowed in this repository" >&2
- exit 1
- fi
- ;;
- refs/tags/*,tag)
- # annotated tag
- if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1
- then
- echo "*** Tag '$refname' already exists." >&2
- echo "*** Modifying a tag is not allowed in this repository." >&2
- exit 1
- fi
- ;;
- refs/heads/*,commit)
- # branch
- if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then
- echo "*** Creating a branch is not allowed in this repository" >&2
- exit 1
- fi
- ;;
- refs/heads/*,delete)
- # delete branch
- if [ "$allowdeletebranch" != "true" ]; then
- echo "*** Deleting a branch is not allowed in this repository" >&2
- exit 1
- fi
- ;;
- refs/remotes/*,commit)
- # tracking branch
- ;;
- refs/remotes/*,delete)
- # delete tracking branch
- if [ "$allowdeletebranch" != "true" ]; then
- echo "*** Deleting a tracking branch is not allowed in this repository" >&2
- exit 1
- fi
- ;;
- *)
- # Anything else (is there anything else?)
- echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2
- exit 1
- ;;
-esac
-
-# --- Finished
-exit 0
diff --git a/vendor/gix/src/assets/baseline-init/info/exclude b/vendor/gix/src/assets/baseline-init/info/exclude
deleted file mode 100644
index a5196d1be..000000000
--- a/vendor/gix/src/assets/baseline-init/info/exclude
+++ /dev/null
@@ -1,6 +0,0 @@
-# git ls-files --others --exclude-from=.git/info/exclude
-# Lines that start with '#' are comments.
-# For a project mostly in C, the following would be a good set of
-# exclude patterns (uncomment them if you want to use them):
-# *.[oa]
-# *~
diff --git a/vendor/gix/src/assets/baseline-init/HEAD b/vendor/gix/src/assets/init/HEAD
index b870d8262..b870d8262 100644
--- a/vendor/gix/src/assets/baseline-init/HEAD
+++ b/vendor/gix/src/assets/init/HEAD
diff --git a/vendor/gix/src/assets/init/description b/vendor/gix/src/assets/init/description
new file mode 100644
index 000000000..7ffa6830d
--- /dev/null
+++ b/vendor/gix/src/assets/init/description
@@ -0,0 +1 @@
+Unnamed repository; everything before the `;` is the name of the repository.
diff --git a/vendor/gix/src/assets/init/hooks/applypatch-msg.sample b/vendor/gix/src/assets/init/hooks/applypatch-msg.sample
new file mode 100755
index 000000000..945f2f6b3
--- /dev/null
+++ b/vendor/gix/src/assets/init/hooks/applypatch-msg.sample
@@ -0,0 +1,25 @@
+#!/bin/sh
+# A sample hook to check commit messages created by `git am`
+###########################################################
+#
+# When you receive a patch via email, the `git am` command is commonly used to apply
+# that patch. During the `git am` process, the `applypatch-msg` hook is executed before
+# creating the commit. Its purpose is to validate and modify the commit log message
+# before the patch is applied as a commit in your Git repository.
+#
+# This script serves as an example to validate that the commit message introduced by
+# the patch from an email would pass the `commit-msg` hook, which would be executed
+# if you had created the commit yourself.
+#
+# This hook is the first and followed up by `pre-applypatch` and `post-applypatch`.
+#
+# To enable this hook remove the `.sample` suffix from this file entirely.
+
+# Retrieve the path of the commit-msg hook script.
+commitmsg="$(git rev-parse --git-path hooks/commit-msg)"
+
+# If the commit-msg hook script is executable, execute it and pass any command-line arguments to it.
+test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
+
+# Be sure to exit without error if `exec` isn't called.
+:
diff --git a/vendor/gix/src/assets/init/hooks/commit-msg.sample b/vendor/gix/src/assets/init/hooks/commit-msg.sample
new file mode 100755
index 000000000..a7f612f6b
--- /dev/null
+++ b/vendor/gix/src/assets/init/hooks/commit-msg.sample
@@ -0,0 +1,25 @@
+#!/bin/sh
+# A sample hook to check commit messages created by `git commit`
+################################################################
+#
+# This example script checks commit messages for duplicate `Signed-off-by`
+# lines and rejects the commit if these are present.
+#
+# It is called by "git commit" with a single argument: the name of the file
+# that contains the final commit message, which would be used in the commit.
+# A a non-zero exit status after issuing an appropriate message stops the operation.
+# The hook is allowed to edit the commit message file by rewriting the file
+# containing it.
+#
+# To enable this hook remove the `.sample` suffix from this file entirely.
+
+# Check for duplicate Signed-off-by lines in the commit message.
+# The following command uses grep to find lines starting with "Signed-off-by: "
+# in the commit message file specified by the first argument `$1`.
+# It then sorts the lines, counts the number of occurrences of each line,
+# and removes any lines that occur only once.
+# If there are any remaining lines, it means there are duplicate Signed-off-by lines.
+test "$(grep '^Signed-off-by: ' "$1" | sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" = "" || {
+ echo "Remove duplicate Signed-off-by lines and repeat the commit." 1>&2
+ exit 1
+}
diff --git a/vendor/gix/src/assets/init/hooks/docs.url b/vendor/gix/src/assets/init/hooks/docs.url
new file mode 100644
index 000000000..bbec3978f
--- /dev/null
+++ b/vendor/gix/src/assets/init/hooks/docs.url
@@ -0,0 +1 @@
+https://git-scm.com/docs/githooks
diff --git a/vendor/gix/src/assets/init/hooks/fsmonitor-watchman.sample b/vendor/gix/src/assets/init/hooks/fsmonitor-watchman.sample
new file mode 100755
index 000000000..cd8985bc8
--- /dev/null
+++ b/vendor/gix/src/assets/init/hooks/fsmonitor-watchman.sample
@@ -0,0 +1,16 @@
+#!/usr/bin/sh
+# How to use hook-based fs-monitor integrations
+###############################################
+
+# This script is meant as a placeholder for integrating filesystem monitors with git
+# using hooks in order to speed up commands like `git-status`.
+#
+# To setup the fs-monitor for use with watchman, run
+# `git config core.fsmonitor .git/hooks/fsmonitor-watchman` and paste the content of
+# the example script over at https://github.com/git/git/blob/aa9166bcc0ba654fc21f198a30647ec087f733ed/templates/hooks--fsmonitor-watchman.sample
+# into `.git/hooks/fsmonitor-watchman`.
+#
+# Note that by now and as of this writing on MacOS and Windows and starting from git 2.35.1
+# one can use the built-in fs-monitor implementation using `git config core.fsmonitor true`
+
+exit 42
diff --git a/vendor/gix/src/assets/init/hooks/post-update.sample b/vendor/gix/src/assets/init/hooks/post-update.sample
new file mode 100755
index 000000000..506a06511
--- /dev/null
+++ b/vendor/gix/src/assets/init/hooks/post-update.sample
@@ -0,0 +1,12 @@
+#!/bin/sh
+# A sample hook that runs after receiving a pack on a remote
+############################################################
+# This hook is called after a pack was received on the remote, i.e. after a successful `git push` operation.
+# It's useful on the server side only.
+#
+# There many more receive hooks which are documented in the official documentation: https://git-scm.com/docs/githooks.
+#
+# To enable this hook remove the `.sample` suffix from this file entirely.
+
+# Update static files to support the 'dumb' git HTTP protocol.
+exec git update-server-info
diff --git a/vendor/gix/src/assets/init/hooks/pre-applypatch.sample b/vendor/gix/src/assets/init/hooks/pre-applypatch.sample
new file mode 100755
index 000000000..de06c7f7f
--- /dev/null
+++ b/vendor/gix/src/assets/init/hooks/pre-applypatch.sample
@@ -0,0 +1,27 @@
+#!/bin/sh
+# A sample hook to check commit messages created by `git am`
+###########################################################
+
+# This hook script is triggered by `git am` without any context just before creating a commit,
+# which is useful to inspect the current tree or run scripts for further verification.
+#
+# If it exits with a non-zero exit code, the commit will not be created. Everything printed
+# to the output or error channels will be visible to the user.
+#
+# Note that there is a sibling hook called `post-applypatch` (also without further context)
+# which is run after the commit was created. It is useful to use the commit hash for further
+# processing, like sending information to the involved parties.
+# Finally, the `applypatch-msg` hook is called at the very beginning of the `git am` operation
+# to provide access to the commit-message.
+#
+# To enable this hook remove the `.sample` suffix from this file entirely.
+
+# Retrieve the path to the pre-commit hook script using the "git rev-parse" command.
+precommit="$(git rev-parse --git-path hooks/pre-commit)"
+
+# Check if the pre-commit hook script exists and is executable.
+# If it does, execute it passing the arguments from this script (if any) using the "exec" command.
+test -x "$precommit" && exec "$precommit" ${1+"$@"}
+
+# Be sure to exit without error if `exec` isn't called.
+:
diff --git a/vendor/gix/src/assets/init/hooks/pre-commit.sample b/vendor/gix/src/assets/init/hooks/pre-commit.sample
new file mode 100755
index 000000000..9d256d4c6
--- /dev/null
+++ b/vendor/gix/src/assets/init/hooks/pre-commit.sample
@@ -0,0 +1,19 @@
+#!/bin/sh
+# A sample hook to prevent commits with merge-markers
+#####################################################
+# This example hook rejects changes that are about to be committed with merge markers,
+# as that would be a clear indication of a failed merge. It is triggered by `git commit`
+# and returning with non-zero exit status prevents the commit from being created.
+#
+# To enable this hook remove the `.sample` suffix from this file entirely.
+
+# Check for merge markers in modified files
+for file in $(git diff --cached --name-only); do
+ if grep -q -E '^(<<<<<<<|=======|>>>>>>>|\|\|\|\|\|\|\|)$' "$file"; then
+ echo "Error: File '$file' contains merge markers. Please remove them before committing."
+ exit 1
+ fi
+done
+
+# Exit with success if there are no errors
+exit 0
diff --git a/vendor/gix/src/assets/init/hooks/pre-merge-commit.sample b/vendor/gix/src/assets/init/hooks/pre-merge-commit.sample
new file mode 100755
index 000000000..0896f5b6f
--- /dev/null
+++ b/vendor/gix/src/assets/init/hooks/pre-merge-commit.sample
@@ -0,0 +1,16 @@
+#!/bin/sh
+# A sample hook to check commits created by `git merge`
+#######################################################
+#
+# This hook is invoked by `git merge` without further context right before creating a commit.
+# It should be used to validate the current state that is supposed to be committed, or exit
+# with a non-zero status to prevent the commit.
+# All output will be visible to the user.
+#
+# To enable this hook remove the `.sample` suffix from this file entirely.
+
+# Check if the pre-commit hook exists and is executable. If it is, it executes the pre-commit hook script.
+test -x "$GIT_DIR/hooks/pre-commit" && exec "$GIT_DIR/hooks/pre-commit"
+
+# Be sure to exit without error if `exec` isn't called.
+:
diff --git a/vendor/gix/src/assets/init/hooks/pre-push.sample b/vendor/gix/src/assets/init/hooks/pre-push.sample
new file mode 100755
index 000000000..8846fe8a1
--- /dev/null
+++ b/vendor/gix/src/assets/init/hooks/pre-push.sample
@@ -0,0 +1,46 @@
+#!/bin/sh
+# Check for "DELME" in commit messages of about-to-be-pushed commits
+####################################################################
+# This hook script is triggered by `git push` right after a connection to the remote
+# was established and its initial response was received, and right before generating
+# and pushing a pack-file.
+# The operation will be aborted when exiting with a non-zero status.
+#
+# The following arguments are provided:
+#
+# $1 - The symbolic name of the remote to push to, like "origin" or the URL like "https://github.com/Byron/gitoxide" if there is no such name.
+# $2 - The URL of the remote to push to, like "https://github.com/Byron/gitoxide".
+#
+# The hook should then read from standard input in a line-by-line fashion and split the following space-separated fields:
+#
+# * local ref - the left side of a ref-spec, i.e. "local" of the "local:refs/heads/remote" ref-spec
+# * local hash - the hash of the commit pointed to by `local ref`
+# * remote ref - the right side of a ref-spec, i.e. "refs/heads/remote" of the "local:refs/heads/remote" ref-spec
+# * remote hash - the hash of the commit pointed to by `remote ref`
+#
+# In this example, we abort the push if any of the about-to-be-pushed commits have "DELME" in their commit message.
+#
+# To enable this hook remove the `.sample` suffix from this file entirely.
+
+remote="$1"
+url="$2"
+
+# Check each commit being pushed
+while read _local_ref local_hash _remote_ref _remote_hash; do
+ # Skip if the local hash is all zeroes (deletion)
+ zero_sha=$(printf "%0${#local_hash}d" 0)
+ if [ "$local_hash" = "$zero_sha" ]; then
+ continue
+ fi
+ # Get the commit message
+ commit_msg=$(git log --format=%s -n 1 "$local_hash")
+
+ # Check if the commit message contains "DELME"
+ if echo "$commit_msg" | grep -iq "DELME"; then
+ echo "Error: Found commit with 'DELME' in message. Push aborted to $remote ($url) aborted." 1>&2
+ exit 1
+ fi
+done
+
+# If no commit with "DELME" found, allow the push
+exit 0
diff --git a/vendor/gix/src/assets/init/hooks/pre-rebase.sample b/vendor/gix/src/assets/init/hooks/pre-rebase.sample
new file mode 100755
index 000000000..485012068
--- /dev/null
+++ b/vendor/gix/src/assets/init/hooks/pre-rebase.sample
@@ -0,0 +1,40 @@
+#!/bin/sh
+# A sample hook to validate the branches involved in a rebase operation
+#######################################################################
+#
+# This hook is invoked right before `git rebase` starts its work and
+# prevents anything else to happen by returning a non-zero exit code.
+#
+# The following arguments are provided:
+#
+# $1 - the branch that contains the commit from which $2 was forked.
+# $2 - the branch being rebased or no second argument at all if the rebase applies to `HEAD`.
+#
+# This example hook aborts the rebase operation if the branch being rebased is not up to date
+# with the latest changes from the upstream branch, or if there are any uncommitted changes.
+#
+# To enable this hook remove the `.sample` suffix from this file entirely.
+
+upstream_branch=$1
+if [ "$#" -eq 2 ]; then
+ branch_being_rebased=$2
+else
+ branch_being_rebased=$(git symbolic-ref --quiet --short HEAD) || exit 0 # ignore rebases on detached heads
+fi
+
+# Check if the branch being rebased is behind the upstream branch
+if git log --oneline ${upstream_branch}..${branch_being_rebased} > /dev/null; then
+ echo "Warning: The branch being rebased (${branch_being_rebased}) is behind the upstream branch (${upstream_branch})." 1>&2
+ echo "Please update your branch before rebasing." 1>&2
+ exit 1
+fi
+
+# Check if there are any uncommitted changes
+if ! git diff-index --quiet HEAD --; then
+ echo "Warning: There are uncommitted changes in your branch ${branch_being_rebased}." 1>&2
+ echo "Please commit or stash your changes before rebasing." 1>&2
+ exit 2
+fi
+
+# All good, let the rebase proceed.
+exit 0
diff --git a/vendor/gix/src/assets/init/hooks/prepare-commit-msg.sample b/vendor/gix/src/assets/init/hooks/prepare-commit-msg.sample
new file mode 100755
index 000000000..a38ff5a59
--- /dev/null
+++ b/vendor/gix/src/assets/init/hooks/prepare-commit-msg.sample
@@ -0,0 +1,54 @@
+#!/bin/sh
+# A hook called by `git commit` to adjust the commit message right before the user sees it
+##########################################################################################
+#
+# This script is called by `git commit` after commit message was initialized and right before
+# an editor is launched.
+#
+# It receives one to three arguments:
+#
+# $1 - the path to the file containing the commit message. It can be edited to change the message.
+# $2 - the kind of source of the message contained in $1. Possible values are
+# "message" - a message was provided via `-m` or `-F`
+# "commit" - `-c`, `-C` or `--amend` was given
+# "squash" - the `.git/SQUASH_MSG` file exists
+# "merge" - this is a merge or the `.git/MERGE` file exists
+# "template" - `-t` was provided or `commit.template` was set
+# $3 - If $2 is "commit" then this is the hash of the commit.
+# It can also take other values, best understood by studying the source code at
+# https://github.com/git/git/blob/aa9166bcc0ba654fc21f198a30647ec087f733ed/builtin/commit.c#L745
+#
+# The following example
+#
+# To enable this hook remove the `.sample` suffix from this file entirely.
+
+COMMIT_MSG_FILE=$1
+
+# Check if the commit message file is empty or already contains a message
+if [ -s "$COMMIT_MSG_FILE" ]; then
+ # If the commit message is already provided, exit without making any changes.
+ # This can happen if the user provided a message via `-m` or a template.
+ exit 0
+fi
+
+# Retrieve the branch name from the current HEAD commit
+BRANCH_NAME=$(git symbolic-ref --short HEAD)
+
+# Generate a default commit message based on the branch name
+DEFAULT_MSG=""
+
+case "$BRANCH_NAME" in
+ "feature/*")
+ DEFAULT_MSG="feat: "
+ ;;
+ "bugfix/*")
+ DEFAULT_MSG="fix: "
+ ;;
+ *)
+ DEFAULT_MSG="chore: "
+ ;;
+esac
+
+# Set the commit message that will be presented to the user.
+echo "$DEFAULT_MSG" > "$COMMIT_MSG_FILE"
+
diff --git a/vendor/gix/src/assets/init/info/exclude b/vendor/gix/src/assets/init/info/exclude
new file mode 100644
index 000000000..1beb19eee
--- /dev/null
+++ b/vendor/gix/src/assets/init/info/exclude
@@ -0,0 +1,5 @@
+# Thise file contains repository-wide exclude patterns that git will ignore.
+# They are local and will not be shared when pushing or pulling.
+# When using Rust the following would be typical exclude patterns.
+# Remove the '# ' prefix to let them take effect.
+# /target/
diff --git a/vendor/gix/src/attribute_stack.rs b/vendor/gix/src/attribute_stack.rs
new file mode 100644
index 000000000..1aaca0f2b
--- /dev/null
+++ b/vendor/gix/src/attribute_stack.rs
@@ -0,0 +1,69 @@
+use crate::bstr::BStr;
+use crate::types::AttributeStack;
+use crate::Repository;
+use gix_odb::FindExt;
+use std::ops::{Deref, DerefMut};
+
+/// Lifecycle
+impl<'repo> AttributeStack<'repo> {
+ /// Create a new instance from a `repo` and the underlying pre-configured `stack`.
+ ///
+ /// Note that this type is typically created by [`Repository::attributes()`] or [`Repository::attributes_only()`].
+ pub fn new(stack: gix_worktree::Stack, repo: &'repo Repository) -> Self {
+ AttributeStack { repo, inner: stack }
+ }
+
+ /// Detach the repository and return the underlying plumbing datatype.
+ pub fn detach(self) -> gix_worktree::Stack {
+ self.inner
+ }
+}
+
+impl Deref for AttributeStack<'_> {
+ type Target = gix_worktree::Stack;
+
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+
+impl DerefMut for AttributeStack<'_> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+
+/// Platform retrieval
+impl<'repo> AttributeStack<'repo> {
+ /// Append the `relative` path to the root directory of the cache and efficiently create leading directories, while assuring that no
+ /// symlinks are in that path.
+ /// Unless `is_dir` is known with `Some(…)`, then `relative` points to a directory itself in which case the entire resulting
+ /// path is created as directory. If it's not known it is assumed to be a file.
+ ///
+ /// Provide access to cached information for that `relative` path via the returned platform.
+ pub fn at_path(
+ &mut self,
+ relative: impl AsRef<std::path::Path>,
+ is_dir: Option<bool>,
+ ) -> std::io::Result<gix_worktree::stack::Platform<'_>> {
+ self.inner
+ .at_path(relative, is_dir, |id, buf| self.repo.objects.find_blob(id, buf))
+ }
+
+ /// Obtain a platform for lookups from a repo-`relative` path, typically obtained from an index entry. `is_dir` should reflect
+ /// whether it's a directory or not, or left at `None` if unknown.
+ ///
+ /// If `relative` ends with `/` and `is_dir` is `None`, it is automatically assumed to be a directory.
+ ///
+ /// ### Panics
+ ///
+ /// - on illformed UTF8 in `relative`
+ pub fn at_entry<'r>(
+ &mut self,
+ relative: impl Into<&'r BStr>,
+ is_dir: Option<bool>,
+ ) -> std::io::Result<gix_worktree::stack::Platform<'_>> {
+ self.inner
+ .at_entry(relative, is_dir, |id, buf| self.repo.objects.find_blob(id, buf))
+ }
+}
diff --git a/vendor/gix/src/attributes.rs b/vendor/gix/src/attributes.rs
deleted file mode 100644
index bb8636460..000000000
--- a/vendor/gix/src/attributes.rs
+++ /dev/null
@@ -1,9 +0,0 @@
-/// The error returned by [`Repository::attributes()`][crate::Repository::attributes()].
-#[derive(Debug, thiserror::Error)]
-#[allow(missing_docs)]
-pub enum Error {
- #[error(transparent)]
- ConfigureAttributes(#[from] crate::config::attribute_stack::Error),
- #[error(transparent)]
- ConfigureExcludes(#[from] crate::config::exclude_stack::Error),
-}
diff --git a/vendor/gix/src/clone/checkout.rs b/vendor/gix/src/clone/checkout.rs
index 823005551..ece480a56 100644
--- a/vendor/gix/src/clone/checkout.rs
+++ b/vendor/gix/src/clone/checkout.rs
@@ -26,9 +26,7 @@ pub mod main_worktree {
#[error(transparent)]
CheckoutOptions(#[from] crate::config::checkout_options::Error),
#[error(transparent)]
- IndexCheckout(
- #[from] gix_worktree::checkout::Error<gix_odb::find::existing_object::Error<gix_odb::store::find::Error>>,
- ),
+ IndexCheckout(#[from] gix_worktree_state::checkout::Error<gix_odb::find::existing_object::Error>),
#[error("Failed to reopen object database as Arc (only if thread-safety wasn't compiled in)")]
OpenArcOdb(#[from] std::io::Error),
#[error("The HEAD reference could not be located")]
@@ -64,11 +62,24 @@ pub mod main_worktree {
///
/// Note that this is a no-op if the remote was empty, leaving this repository empty as well. This can be validated by checking
/// if the `head()` of the returned repository is not unborn.
- pub fn main_worktree(
+ pub fn main_worktree<P>(
&mut self,
- mut progress: impl crate::Progress,
+ mut progress: P,
should_interrupt: &AtomicBool,
- ) -> Result<(Repository, gix_worktree::checkout::Outcome), Error> {
+ ) -> Result<(Repository, gix_worktree_state::checkout::Outcome), Error>
+ where
+ P: gix_features::progress::NestedProgress,
+ P::SubProgress: gix_features::progress::NestedProgress + 'static,
+ {
+ self.main_worktree_inner(&mut progress, should_interrupt)
+ }
+
+ fn main_worktree_inner(
+ &mut self,
+ progress: &mut dyn gix_features::progress::DynNestedProgress,
+ should_interrupt: &AtomicBool,
+ ) -> Result<(Repository, gix_worktree_state::checkout::Outcome), Error> {
+ let _span = gix_trace::coarse!("gix::clone::PrepareCheckout::main_worktree()");
let repo = self
.repo
.as_ref()
@@ -81,7 +92,7 @@ pub mod main_worktree {
None => {
return Ok((
self.repo.take().expect("still present"),
- gix_worktree::checkout::Outcome::default(),
+ gix_worktree_state::checkout::Outcome::default(),
))
}
};
@@ -92,25 +103,27 @@ pub mod main_worktree {
})?;
let mut index = gix_index::File::from_state(index, repo.index_path());
- let mut opts = repo.config.checkout_options(repo.git_dir())?;
+ let mut opts = repo
+ .config
+ .checkout_options(repo, gix_worktree::stack::state::attributes::Source::IdMapping)?;
opts.destination_is_initially_empty = true;
- let mut files = progress.add_child_with_id("checkout", ProgressId::CheckoutFiles.into());
- let mut bytes = progress.add_child_with_id("writing", ProgressId::BytesWritten.into());
+ let mut files = progress.add_child_with_id("checkout".to_string(), ProgressId::CheckoutFiles.into());
+ let mut bytes = progress.add_child_with_id("writing".to_string(), ProgressId::BytesWritten.into());
files.init(Some(index.entries().len()), crate::progress::count("files"));
bytes.init(None, crate::progress::bytes());
let start = std::time::Instant::now();
- let outcome = gix_worktree::checkout(
+ let outcome = gix_worktree_state::checkout(
&mut index,
workdir,
{
let objects = repo.objects.clone().into_arc()?;
move |oid, buf| objects.find_blob(oid, buf)
},
- &mut files,
- &mut bytes,
+ &files,
+ &bytes,
should_interrupt,
opts,
)?;
diff --git a/vendor/gix/src/clone/fetch/mod.rs b/vendor/gix/src/clone/fetch/mod.rs
index e20cc96cb..c03b8f839 100644
--- a/vendor/gix/src/clone/fetch/mod.rs
+++ b/vendor/gix/src/clone/fetch/mod.rs
@@ -26,7 +26,7 @@ pub enum Error {
SaveConfigIo(#[from] std::io::Error),
#[error("The remote HEAD points to a reference named {head_ref_name:?} which is invalid.")]
InvalidHeadRef {
- source: gix_validate::refname::Error,
+ source: gix_validate::reference::name::Error,
head_ref_name: crate::bstr::BString,
},
#[error("Failed to update HEAD with values from remote")]
@@ -55,9 +55,18 @@ impl PrepareFetch {
should_interrupt: &std::sync::atomic::AtomicBool,
) -> Result<(crate::Repository, crate::remote::fetch::Outcome), Error>
where
- P: crate::Progress,
+ P: crate::NestedProgress,
P::SubProgress: 'static,
{
+ self.fetch_only_inner(&mut progress, should_interrupt).await
+ }
+
+ #[gix_protocol::maybe_async::maybe_async]
+ async fn fetch_only_inner(
+ &mut self,
+ progress: &mut dyn crate::DynNestedProgress,
+ should_interrupt: &std::sync::atomic::AtomicBool,
+ ) -> Result<(crate::Repository, crate::remote::fetch::Outcome), Error> {
use crate::{bstr::ByteVec, remote, remote::fetch::RefLogMessage};
let repo = self
@@ -111,7 +120,7 @@ impl PrepareFetch {
f(&mut connection).map_err(|err| Error::RemoteConnection(err))?;
}
connection
- .prepare_fetch(&mut progress, {
+ .prepare_fetch(&mut *progress, {
let mut opts = self.fetch_options.clone();
if !opts.extra_refspecs.contains(&head_refspec) {
opts.extra_refspecs.push(head_refspec)
@@ -134,7 +143,7 @@ impl PrepareFetch {
message: reflog_message.clone(),
})
.with_shallow(self.shallow.clone())
- .receive(progress, should_interrupt)
+ .receive_inner(progress, should_interrupt)
.await?;
util::append_config_to_repo_config(repo, config);
@@ -149,14 +158,14 @@ impl PrepareFetch {
}
/// Similar to [`fetch_only()`][Self::fetch_only()`], but passes ownership to a utility type to configure a checkout operation.
- #[cfg(feature = "blocking-network-client")]
+ #[cfg(all(feature = "worktree-mutation", feature = "blocking-network-client"))]
pub fn fetch_then_checkout<P>(
&mut self,
progress: P,
should_interrupt: &std::sync::atomic::AtomicBool,
) -> Result<(crate::clone::PrepareCheckout, crate::remote::fetch::Outcome), Error>
where
- P: crate::Progress,
+ P: crate::NestedProgress,
P::SubProgress: 'static,
{
let (repo, fetch_outcome) = self.fetch_only(progress, should_interrupt)?;
diff --git a/vendor/gix/src/clone/fetch/util.rs b/vendor/gix/src/clone/fetch/util.rs
index cb79669ac..ab90435d0 100644
--- a/vendor/gix/src/clone/fetch/util.rs
+++ b/vendor/gix/src/clone/fetch/util.rs
@@ -51,7 +51,7 @@ fn write_to_local_config(config: &gix_config::File<'static>, mode: WriteMode) ->
.append(matches!(mode, WriteMode::Append))
.open(config.meta().path.as_deref().expect("local config with path set"))?;
local_config.write_all(config.detect_newline_style())?;
- config.write_to_filter(&mut local_config, |s| s.meta().source == gix_config::Source::Local)
+ config.write_to_filter(&mut local_config, &mut |s| s.meta().source == gix_config::Source::Local)
}
pub fn append_config_to_repo_config(repo: &mut Repository, config: gix_config::File<'static>) {
@@ -76,6 +76,7 @@ pub fn update_head(
gix_protocol::handshake::Ref::Symbolic {
full_ref_name,
target,
+ tag: _,
object,
} if full_ref_name == "HEAD" => (Some(object.as_ref()), Some(target)),
gix_protocol::handshake::Ref::Direct { full_ref_name, object } if full_ref_name == "HEAD" => {
@@ -106,12 +107,7 @@ pub fn update_head(
repo.refs
.transaction()
.packed_refs(gix_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdates(
- Box::new(|oid, buf| {
- repo.objects
- .try_find(oid, buf)
- .map(|obj| obj.map(|obj| obj.kind))
- .map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
- }),
+ Box::new(|oid, buf| repo.objects.try_find(&oid, buf).map(|obj| obj.map(|obj| obj.kind))),
))
.prepare(
{
@@ -202,7 +198,7 @@ fn setup_branch_config(
let remote = repo
.find_remote(remote_name)
.expect("remote was just created and must be visible in config");
- let group = gix_refspec::MatchGroup::from_fetch_specs(remote.fetch_specs.iter().map(|s| s.to_ref()));
+ let group = gix_refspec::MatchGroup::from_fetch_specs(remote.fetch_specs.iter().map(gix_refspec::RefSpec::to_ref));
let null = gix_hash::ObjectId::null(repo.object_hash());
let res = group.match_remotes(
Some(gix_refspec::match_group::Item {
diff --git a/vendor/gix/src/clone/mod.rs b/vendor/gix/src/clone/mod.rs
index 9ec226135..8afc3b99b 100644
--- a/vendor/gix/src/clone/mod.rs
+++ b/vendor/gix/src/clone/mod.rs
@@ -68,20 +68,37 @@ impl PrepareFetch {
url: Url,
path: impl AsRef<std::path::Path>,
kind: crate::create::Kind,
- mut create_opts: crate::create::Options,
+ create_opts: crate::create::Options,
open_opts: crate::open::Options,
) -> Result<Self, Error>
where
Url: TryInto<gix_url::Url, Error = E>,
gix_url::parse::Error: From<E>,
{
- let mut url = url.try_into().map_err(gix_url::parse::Error::from)?;
- url.canonicalize().map_err(|err| Error::CanonicalizeUrl {
- url: url.clone(),
- source: err,
- })?;
+ Self::new_inner(
+ url.try_into().map_err(gix_url::parse::Error::from)?,
+ path.as_ref(),
+ kind,
+ create_opts,
+ open_opts,
+ )
+ }
+
+ #[allow(clippy::result_large_err)]
+ fn new_inner(
+ mut url: gix_url::Url,
+ path: &std::path::Path,
+ kind: crate::create::Kind,
+ mut create_opts: crate::create::Options,
+ open_opts: crate::open::Options,
+ ) -> Result<Self, Error> {
create_opts.destination_must_be_empty = true;
let mut repo = crate::ThreadSafeRepository::init_opts(path, kind, create_opts, open_opts)?.to_thread_local();
+ url.canonicalize(repo.options.current_dir_or_empty())
+ .map_err(|err| Error::CanonicalizeUrl {
+ url: url.clone(),
+ source: err,
+ })?;
if repo.committer().is_none() {
let mut config = gix_config::File::new(gix_config::file::Metadata::api());
config
@@ -121,6 +138,7 @@ impl PrepareFetch {
/// A utility to collect configuration on how to perform a checkout into a working tree, and when dropped without checking out successfully
/// the fetched repository will be dropped.
#[must_use]
+#[cfg(feature = "worktree-mutation")]
pub struct PrepareCheckout {
/// A freshly initialized repository which is owned by us, or `None` if it was handed to the user
pub(self) repo: Option<crate::Repository>,
@@ -164,4 +182,5 @@ mod access_feat {
pub mod fetch;
///
+#[cfg(feature = "worktree-mutation")]
pub mod checkout;
diff --git a/vendor/gix/src/commit.rs b/vendor/gix/src/commit.rs
index 68e1eeba7..2cc8226f5 100644
--- a/vendor/gix/src/commit.rs
+++ b/vendor/gix/src/commit.rs
@@ -22,6 +22,7 @@ pub enum Error {
}
///
+#[cfg(feature = "revision")]
pub mod describe {
use std::borrow::Cow;
@@ -91,7 +92,7 @@ pub mod describe {
let (prio, tag_time) = match target_id {
Some(target_id) if peeled_id != *target_id => {
let tag = repo.find_object(target_id).ok()?.try_into_tag().ok()?;
- (1, tag.tagger().ok()??.time.seconds_since_unix_epoch)
+ (1, tag.tagger().ok()??.time.seconds)
}
_ => (0, 0),
};
@@ -124,11 +125,7 @@ pub mod describe {
// TODO: we assume direct refs for tags, which is the common case, but it doesn't have to be
// so rather follow symrefs till the first object and then peel tags after the first object was found.
let tag = r.try_id()?.object().ok()?.try_into_tag().ok()?;
- let tag_time = tag
- .tagger()
- .ok()
- .and_then(|s| s.map(|s| s.time.seconds_since_unix_epoch))
- .unwrap_or(0);
+ let tag_time = tag.tagger().ok().and_then(|s| s.map(|s| s.time.seconds)).unwrap_or(0);
let commit_id = tag.target_id().ok()?.object().ok()?.try_into_commit().ok()?.id;
Some((commit_id, tag_time, Cow::<BStr>::from(r.name().shorten().to_owned())))
})
@@ -187,7 +184,7 @@ pub mod describe {
///
/// Note that there will always be `Some(format)`
pub fn try_format(&self) -> Result<Option<gix_revision::describe::Format<'static>>, Error> {
- self.try_resolve()?.map(|r| r.format()).transpose()
+ self.try_resolve()?.map(Resolution::format).transpose()
}
/// Try to find a name for the configured commit id using all prior configuration, returning `Some(Outcome)`
@@ -201,14 +198,14 @@ pub mod describe {
/// to save ~40% of time.
pub fn try_resolve(&self) -> Result<Option<Resolution<'repo>>, Error> {
// TODO: dirty suffix with respective dirty-detection
- let mut graph = gix_revision::Graph::new(
+ let mut graph = gix_revwalk::Graph::new(
|id, buf| {
self.repo
.objects
.try_find(id, buf)
- .map(|r| r.and_then(|d| d.try_into_commit_iter()))
+ .map(|r| r.and_then(gix_object::Data::try_into_commit_iter))
},
- gix_commitgraph::Graph::from_info_dir(self.repo.objects.store_ref().path().join("info")).ok(),
+ gix_commitgraph::Graph::from_info_dir(self.repo.objects.store_ref().path().join("info").as_ref()).ok(),
);
let outcome = gix_revision::describe(
&self.id,
diff --git a/vendor/gix/src/config/cache/access.rs b/vendor/gix/src/config/cache/access.rs
index cea56f973..352bc9712 100644
--- a/vendor/gix/src/config/cache/access.rs
+++ b/vendor/gix/src/config/cache/access.rs
@@ -1,16 +1,15 @@
#![allow(clippy::result_large_err)]
use std::{borrow::Cow, path::PathBuf, time::Duration};
-use gix_attributes::Source;
use gix_lock::acquire::Fail;
use crate::{
bstr::BStr,
config,
config::{
- cache::util::{ApplyLeniency, ApplyLeniencyDefault},
- checkout_options,
- tree::{gitoxide, Checkout, Core, Key},
+ boolean,
+ cache::util::{ApplyLeniency, ApplyLeniencyDefaultValue},
+ tree::{Core, Key},
Cache,
},
remote,
@@ -19,7 +18,9 @@ use crate::{
/// Access
impl Cache {
+ #[cfg(feature = "blob-diff")]
pub(crate) fn diff_algorithm(&self) -> Result<gix_diff::blob::Algorithm, config::diff::algorithm::Error> {
+ use crate::config::cache::util::ApplyLeniencyDefault;
use crate::config::diff::algorithm::Error;
self.diff_algorithm
.get_or_try_init(|| {
@@ -69,6 +70,18 @@ impl Cache {
.get_or_try_init(|| remote::url::SchemePermission::from_config(&self.resolved, self.filter_config_section))
}
+ pub(crate) fn may_use_commit_graph(&self) -> Result<bool, config::boolean::Error> {
+ const DEFAULT: bool = true;
+ self.resolved
+ .boolean_by_key("core.commitGraph")
+ .map_or(Ok(DEFAULT), |res| {
+ Core::COMMIT_GRAPH
+ .enrich_error(res)
+ .with_lenient_default_value(self.lenient_config, DEFAULT)
+ })
+ }
+
+ #[cfg(feature = "blob-diff")]
pub(crate) fn diff_renames(
&self,
) -> Result<Option<crate::object::tree::diff::Rewrites>, crate::object::tree::diff::rewrites::Error> {
@@ -100,9 +113,10 @@ impl Cache {
}
/// The path to the user-level excludes file to ignore certain files in the worktree.
+ #[cfg(feature = "excludes")]
pub(crate) fn excludes_file(&self) -> Option<Result<PathBuf, gix_config::path::interpolate::Error>> {
self.trusted_file_path("core", None, Core::EXCLUDES_FILE.name)?
- .map(|p| p.into_owned())
+ .map(std::borrow::Cow::into_owned)
.into()
}
@@ -123,7 +137,7 @@ impl Cache {
let install_dir = crate::path::install_dir().ok();
let home = self.home_dir();
- let ctx = crate::config::cache::interpolate_context(install_dir.as_deref(), home.as_deref());
+ let ctx = config::cache::interpolate_context(install_dir.as_deref(), home.as_deref());
Some(path.interpolate(ctx))
}
@@ -131,80 +145,108 @@ impl Cache {
res.transpose().with_leniency(self.lenient_config)
}
+ pub(crate) fn fs_capabilities(&self) -> Result<gix_fs::Capabilities, boolean::Error> {
+ Ok(gix_fs::Capabilities {
+ precompose_unicode: boolean(self, "core.precomposeUnicode", &Core::PRECOMPOSE_UNICODE, false)?,
+ ignore_case: boolean(self, "core.ignoreCase", &Core::IGNORE_CASE, false)?,
+ executable_bit: boolean(self, "core.fileMode", &Core::FILE_MODE, true)?,
+ symlink: boolean(self, "core.symlinks", &Core::SYMLINKS, true)?,
+ })
+ }
+
+ #[cfg(feature = "index")]
+ pub(crate) fn stat_options(&self) -> Result<gix_index::entry::stat::Options, config::stat_options::Error> {
+ use crate::config::tree::gitoxide;
+ Ok(gix_index::entry::stat::Options {
+ trust_ctime: boolean(
+ self,
+ "core.trustCTime",
+ &Core::TRUST_C_TIME,
+ // For now, on MacOS it's known to not be trust-worthy at least with the Rust STDlib, being 2s off
+ !cfg!(target_os = "macos"),
+ )?,
+ use_nsec: boolean(self, "gitoxide.core.useNsec", &gitoxide::Core::USE_NSEC, false)?,
+ use_stdev: boolean(self, "gitoxide.core.useStdev", &gitoxide::Core::USE_STDEV, false)?,
+ check_stat: self
+ .apply_leniency(
+ self.resolved
+ .string("core", None, "checkStat")
+ .map(|v| Core::CHECK_STAT.try_into_checkstat(v)),
+ )?
+ .unwrap_or(true),
+ })
+ }
+
/// Collect everything needed to checkout files into a worktree.
/// Note that some of the options being returned will be defaulted so safe settings, the caller might have to override them
/// depending on the use-case.
+ #[cfg(feature = "worktree-mutation")]
pub(crate) fn checkout_options(
&self,
- git_dir: &std::path::Path,
- ) -> Result<gix_worktree::checkout::Options, checkout_options::Error> {
- fn boolean(
- me: &Cache,
- full_key: &str,
- key: &'static config::tree::keys::Boolean,
- default: bool,
- ) -> Result<bool, checkout_options::Error> {
- debug_assert_eq!(
- full_key,
- key.logical_name(),
- "BUG: key name and hardcoded name must match"
- );
- Ok(me
- .apply_leniency(me.resolved.boolean_by_key(full_key).map(|v| key.enrich_error(v)))?
- .unwrap_or(default))
- }
-
+ repo: &crate::Repository,
+ attributes_source: gix_worktree::stack::state::attributes::Source,
+ ) -> Result<gix_worktree_state::checkout::Options, config::checkout_options::Error> {
+ use crate::config::tree::gitoxide;
+ let git_dir = repo.git_dir();
let thread_limit = self.apply_leniency(
self.resolved
.integer_filter_by_key("checkout.workers", &mut self.filter_config_section.clone())
- .map(|value| Checkout::WORKERS.try_from_workers(value)),
+ .map(|value| crate::config::tree::Checkout::WORKERS.try_from_workers(value)),
)?;
- let capabilities = gix_fs::Capabilities {
- precompose_unicode: boolean(self, "core.precomposeUnicode", &Core::PRECOMPOSE_UNICODE, false)?,
- ignore_case: boolean(self, "core.ignoreCase", &Core::IGNORE_CASE, false)?,
- executable_bit: boolean(self, "core.fileMode", &Core::FILE_MODE, true)?,
- symlink: boolean(self, "core.symlinks", &Core::SYMLINKS, true)?,
+ let capabilities = self.fs_capabilities()?;
+ let filters = {
+ let collection = Default::default();
+ let mut filters = gix_filter::Pipeline::new(&collection, crate::filter::Pipeline::options(repo)?);
+ if let Ok(mut head) = repo.head() {
+ let ctx = filters.driver_context_mut();
+ ctx.ref_name = head.referent_name().map(|name| name.as_bstr().to_owned());
+ ctx.treeish = head.peel_to_commit_in_place().ok().map(|commit| commit.id);
+ }
+ filters
+ };
+ let filter_process_delay = if boolean(
+ self,
+ "gitoxide.core.filterProcessDelay",
+ &gitoxide::Core::FILTER_PROCESS_DELAY,
+ true,
+ )? {
+ gix_filter::driver::apply::Delay::Allow
+ } else {
+ gix_filter::driver::apply::Delay::Forbid
};
- Ok(gix_worktree::checkout::Options {
+ Ok(gix_worktree_state::checkout::Options {
+ filter_process_delay,
+ filters,
attributes: self
- .assemble_attribute_globals(
- git_dir,
- gix_worktree::cache::state::attributes::Source::IdMappingThenWorktree,
- self.attributes,
- )?
+ .assemble_attribute_globals(git_dir, attributes_source, self.attributes)?
.0,
fs: capabilities,
thread_limit,
destination_is_initially_empty: false,
overwrite_existing: false,
keep_going: false,
- stat_options: gix_index::entry::stat::Options {
- trust_ctime: boolean(self, "core.trustCTime", &Core::TRUST_C_TIME, true)?,
- use_nsec: boolean(self, "gitoxide.core.useNsec", &gitoxide::Core::USE_NSEC, false)?,
- use_stdev: boolean(self, "gitoxide.core.useStdev", &gitoxide::Core::USE_STDEV, false)?,
- check_stat: self
- .apply_leniency(
- self.resolved
- .string("core", None, "checkStat")
- .map(|v| Core::CHECK_STAT.try_into_checkstat(v)),
- )?
- .unwrap_or(true),
- },
+ stat_options: self.stat_options().map_err(|err| match err {
+ config::stat_options::Error::ConfigCheckStat(err) => {
+ config::checkout_options::Error::ConfigCheckStat(err)
+ }
+ config::stat_options::Error::ConfigBoolean(err) => config::checkout_options::Error::ConfigBoolean(err),
+ })?,
})
}
+ #[cfg(feature = "excludes")]
pub(crate) fn assemble_exclude_globals(
&self,
git_dir: &std::path::Path,
overrides: Option<gix_ignore::Search>,
- source: gix_worktree::cache::state::ignore::Source,
+ source: gix_worktree::stack::state::ignore::Source,
buf: &mut Vec<u8>,
- ) -> Result<gix_worktree::cache::state::Ignore, config::exclude_stack::Error> {
+ ) -> Result<gix_worktree::stack::state::Ignore, config::exclude_stack::Error> {
let excludes_file = match self.excludes_file().transpose()? {
Some(user_path) => Some(user_path),
None => self.xdg_config_path("ignore")?,
};
- Ok(gix_worktree::cache::state::Ignore::new(
+ Ok(gix_worktree::stack::state::Ignore::new(
overrides.unwrap_or_default(),
gix_ignore::Search::from_git_dir(git_dir, excludes_file, buf)?,
None,
@@ -212,12 +254,14 @@ impl Cache {
))
}
// TODO: at least one test, maybe related to core.attributesFile configuration.
+ #[cfg(feature = "attributes")]
pub(crate) fn assemble_attribute_globals(
&self,
git_dir: &std::path::Path,
- source: gix_worktree::cache::state::attributes::Source,
+ source: gix_worktree::stack::state::attributes::Source,
attributes: crate::open::permissions::Attributes,
- ) -> Result<(gix_worktree::cache::state::Attributes, Vec<u8>), config::attribute_stack::Error> {
+ ) -> Result<(gix_worktree::stack::state::Attributes, Vec<u8>), config::attribute_stack::Error> {
+ use gix_attributes::Source;
let configured_or_user_attributes = match self
.trusted_file_path("core", None, Core::ATTRIBUTES_FILE.name)
.transpose()?
@@ -243,15 +287,45 @@ impl Cache {
let info_attributes_path = git_dir.join("info").join("attributes");
let mut buf = Vec::new();
let mut collection = gix_attributes::search::MetadataCollection::default();
- let res = gix_worktree::cache::state::Attributes::new(
+ let state = gix_worktree::stack::state::Attributes::new(
gix_attributes::Search::new_globals(attribute_files, &mut buf, &mut collection)?,
Some(info_attributes_path),
source,
collection,
);
- Ok((res, buf))
+ Ok((state, buf))
}
+ #[cfg(feature = "attributes")]
+ pub(crate) fn pathspec_defaults(
+ &self,
+ ) -> Result<gix_pathspec::Defaults, gix_pathspec::defaults::from_environment::Error> {
+ use crate::config::tree::gitoxide;
+ let res = gix_pathspec::Defaults::from_environment(&mut |name| {
+ let key = [
+ &gitoxide::Pathspec::ICASE,
+ &gitoxide::Pathspec::GLOB,
+ &gitoxide::Pathspec::NOGLOB,
+ &gitoxide::Pathspec::LITERAL,
+ ]
+ .iter()
+ .find_map(|key| (key.environment_override().expect("set") == name).then_some(key))
+ .expect("we must know all possible input variable names");
+
+ let val = self
+ .resolved
+ .string("gitoxide", Some("pathspec".into()), key.name())
+ .map(gix_path::from_bstr)?;
+ Some(val.into_owned().into())
+ });
+ if res.is_err() && self.lenient_config {
+ Ok(gix_pathspec::Defaults::default())
+ } else {
+ res
+ }
+ }
+
+ #[cfg(any(feature = "attributes", feature = "excludes"))]
pub(crate) fn xdg_config_path(
&self,
resource_file_name: &str,
@@ -284,3 +358,19 @@ impl Cache {
gix_path::env::home_dir().and_then(|path| self.environment.home.check_opt(path))
}
}
+
+fn boolean(
+ me: &Cache,
+ full_key: &str,
+ key: &'static config::tree::keys::Boolean,
+ default: bool,
+) -> Result<bool, boolean::Error> {
+ debug_assert_eq!(
+ full_key,
+ key.logical_name(),
+ "BUG: key name and hardcoded name must match"
+ );
+ Ok(me
+ .apply_leniency(me.resolved.boolean_by_key(full_key).map(|v| key.enrich_error(v)))?
+ .unwrap_or(default))
+}
diff --git a/vendor/gix/src/config/cache/incubate.rs b/vendor/gix/src/config/cache/incubate.rs
index 44c537b50..cf7c5dcdf 100644
--- a/vendor/gix/src/config/cache/incubate.rs
+++ b/vendor/gix/src/config/cache/incubate.rs
@@ -102,7 +102,7 @@ fn load_config(
path: config_path,
};
if lenient {
- log::warn!("ignoring: {err:#?}");
+ gix_trace::warn!("ignoring: {err:#?}");
return Ok(gix_config::File::new(metadata));
} else {
return Err(err);
@@ -117,7 +117,7 @@ fn load_config(
path: config_path,
};
if lenient {
- log::warn!("ignoring: {err:#?}");
+ gix_trace::warn!("ignoring: {err:#?}");
buf.clear();
} else {
return Err(err);
diff --git a/vendor/gix/src/config/cache/init.rs b/vendor/gix/src/config/cache/init.rs
index 6fcbcc4ec..3c482b154 100644
--- a/vendor/gix/src/config/cache/init.rs
+++ b/vendor/gix/src/config/cache/init.rs
@@ -13,6 +13,7 @@ use crate::{
Cache,
},
open,
+ repository::init::setup_objects,
};
/// Initialization
@@ -71,7 +72,7 @@ impl Cache {
let config = {
let git_prefix = &git_prefix;
- let metas = [
+ let mut metas = [
gix_config::source::Kind::GitInstallation,
gix_config::source::Kind::System,
gix_config::source::Kind::Global,
@@ -99,7 +100,7 @@ impl Cache {
let err_on_nonexisting_paths = false;
let mut globals = gix_config::File::from_paths_metadata_buf(
- metas,
+ &mut metas,
&mut buf,
err_on_nonexisting_paths,
gix_config::file::init::Options {
@@ -150,6 +151,7 @@ impl Cache {
true,
lenient_config,
)?;
+ #[cfg(feature = "revision")]
let object_kind_hint = util::disambiguate_hint(&config, lenient_config)?;
let (static_pack_cache_limit_bytes, pack_cache_bytes, object_cache_bytes) =
util::parse_object_caches(&config, lenient_config, filter_config_section)?;
@@ -158,6 +160,7 @@ impl Cache {
resolved: config.into(),
use_multi_pack_index,
object_hash,
+ #[cfg(feature = "revision")]
object_kind_hint,
static_pack_cache_limit_bytes,
pack_cache_bytes,
@@ -173,9 +176,11 @@ impl Cache {
user_agent: Default::default(),
personas: Default::default(),
url_rewrite: Default::default(),
+ #[cfg(feature = "blob-diff")]
diff_renames: Default::default(),
#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
url_scheme: Default::default(),
+ #[cfg(feature = "blob-diff")]
diff_algorithm: Default::default(),
})
}
@@ -210,19 +215,26 @@ impl Cache {
false,
self.lenient_config,
)?;
- let object_kind_hint = util::disambiguate_hint(config, self.lenient_config)?;
+
+ #[cfg(feature = "revision")]
+ {
+ let object_kind_hint = util::disambiguate_hint(config, self.lenient_config)?;
+ self.object_kind_hint = object_kind_hint;
+ }
let reflog = util::query_refupdates(config, self.lenient_config)?;
self.hex_len = hex_len;
self.ignore_case = ignore_case;
- self.object_kind_hint = object_kind_hint;
self.reflog = reflog;
self.user_agent = Default::default();
self.personas = Default::default();
self.url_rewrite = Default::default();
- self.diff_renames = Default::default();
- self.diff_algorithm = Default::default();
+ #[cfg(feature = "blob-diff")]
+ {
+ self.diff_renames = Default::default();
+ self.diff_algorithm = Default::default();
+ }
(
self.static_pack_cache_limit_bytes,
self.pack_cache_bytes,
@@ -268,8 +280,19 @@ impl crate::Repository {
&mut self,
config: crate::Config,
) -> Result<(), Error> {
+ let (a, b, c) = (
+ self.config.static_pack_cache_limit_bytes,
+ self.config.pack_cache_bytes,
+ self.config.object_cache_bytes,
+ );
self.config.reread_values_and_clear_caches_replacing_config(config)?;
self.apply_changed_values();
+ if a != self.config.static_pack_cache_limit_bytes
+ || b != self.config.pack_cache_bytes
+ || c != self.config.object_cache_bytes
+ {
+ setup_objects(&mut self.objects, &self.config);
+ }
Ok(())
}
@@ -446,6 +469,29 @@ fn apply_environment_overrides(
}],
),
(
+ "gitoxide",
+ Some(Cow::Borrowed("pathspec".into())),
+ git_prefix,
+ &[
+ {
+ let key = &gitoxide::Pathspec::LITERAL;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Pathspec::GLOB;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Pathspec::NOGLOB;
+ (env(key), key.name)
+ },
+ {
+ let key = &gitoxide::Pathspec::ICASE;
+ (env(key), key.name)
+ },
+ ],
+ ),
+ (
"ssh",
None,
git_prefix,
diff --git a/vendor/gix/src/config/cache/util.rs b/vendor/gix/src/config/cache/util.rs
index 7c478fcf9..4032b2cb1 100644
--- a/vendor/gix/src/config/cache/util.rs
+++ b/vendor/gix/src/config/cache/util.rs
@@ -3,7 +3,6 @@ use super::Error;
use crate::{
config,
config::tree::{gitoxide, Core},
- revision::spec::parse::ObjectKindHint,
};
pub(crate) fn interpolate_context<'a>(
@@ -51,7 +50,7 @@ pub(crate) fn query_refupdates(
) -> Result<Option<gix_ref::store::WriteReflog>, Error> {
let key = "core.logAllRefUpdates";
Core::LOG_ALL_REF_UPDATES
- .try_into_ref_updates(config.boolean_by_key(key), || config.string_by_key(key))
+ .try_into_ref_updates(config.boolean_by_key(key))
.with_leniency(lenient_config)
.map_err(Into::into)
}
@@ -74,7 +73,7 @@ pub(crate) fn parse_object_caches(
mut filter_config_section: fn(&gix_config::file::Metadata) -> bool,
) -> Result<(Option<usize>, Option<usize>, usize), Error> {
let static_pack_cache_limit = config
- .integer_filter_by_key("core.deltaBaseCacheLimit", &mut filter_config_section)
+ .integer_filter_by_key("gitoxide.core.deltaBaseCacheLimit", &mut filter_config_section)
.map(|res| gitoxide::Core::DEFAULT_PACK_CACHE_MEMORY_LIMIT.try_into_usize(res))
.transpose()
.with_leniency(lenient)?;
@@ -103,10 +102,11 @@ pub(crate) fn parse_core_abbrev(
.flatten())
}
+#[cfg(feature = "revision")]
pub(crate) fn disambiguate_hint(
config: &gix_config::File<'static>,
lenient_config: bool,
-) -> Result<Option<ObjectKindHint>, config::key::GenericErrorWithValue> {
+) -> Result<Option<crate::revision::spec::parse::ObjectKindHint>, config::key::GenericErrorWithValue> {
match config.string_by_key("core.disambiguate") {
None => Ok(None),
Some(value) => Core::DISAMBIGUATE
@@ -120,10 +120,18 @@ pub trait ApplyLeniency {
fn with_leniency(self, is_lenient: bool) -> Self;
}
+pub trait IgnoreEmptyPath {
+ fn ignore_empty(self) -> Self;
+}
+
pub trait ApplyLeniencyDefault {
fn with_lenient_default(self, is_lenient: bool) -> Self;
}
+pub trait ApplyLeniencyDefaultValue<T> {
+ fn with_lenient_default_value(self, is_lenient: bool, default: T) -> Self;
+}
+
impl<T, E> ApplyLeniency for Result<Option<T>, E> {
fn with_leniency(self, is_lenient: bool) -> Self {
match self {
@@ -134,6 +142,16 @@ impl<T, E> ApplyLeniency for Result<Option<T>, E> {
}
}
+impl IgnoreEmptyPath for Result<Option<std::borrow::Cow<'_, std::path::Path>>, gix_config::path::interpolate::Error> {
+ fn ignore_empty(self) -> Self {
+ match self {
+ Ok(maybe_path) => Ok(maybe_path),
+ Err(gix_config::path::interpolate::Error::Missing { .. }) => Ok(None),
+ Err(err) => Err(err),
+ }
+ }
+}
+
impl<T, E> ApplyLeniencyDefault for Result<T, E>
where
T: Default,
@@ -146,3 +164,13 @@ where
}
}
}
+
+impl<T, E> ApplyLeniencyDefaultValue<T> for Result<T, E> {
+ fn with_lenient_default_value(self, is_lenient: bool, default: T) -> Self {
+ match self {
+ Ok(v) => Ok(v),
+ Err(_) if is_lenient => Ok(default),
+ Err(err) => Err(err),
+ }
+ }
+}
diff --git a/vendor/gix/src/config/mod.rs b/vendor/gix/src/config/mod.rs
index 3353806f8..102c7a482 100644
--- a/vendor/gix/src/config/mod.rs
+++ b/vendor/gix/src/config/mod.rs
@@ -1,10 +1,11 @@
pub use gix_config::*;
use gix_features::threading::OnceCell;
-use crate::{bstr::BString, repository::identity, revision::spec, Repository};
+use crate::{bstr::BString, repository::identity, Repository};
pub(crate) mod cache;
mod snapshot;
+#[cfg(feature = "credentials")]
pub use snapshot::credential_helpers;
///
@@ -46,6 +47,23 @@ pub(crate) mod section {
}
}
+///
+pub mod set_value {
+ /// The error produced when calling [`SnapshotMut::set(_subsection)?_value()`][crate::config::SnapshotMut::set_value()]
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ SetRaw(#[from] gix_config::file::set_raw_value::Error),
+ #[error(transparent)]
+ Validate(#[from] crate::config::tree::key::validate::Error),
+ #[error("The key needs a subsection parameter to be valid.")]
+ SubSectionRequired,
+ #[error("The key must not be used with a subsection")]
+ SubSectionForbidden,
+ }
+}
+
/// The error returned when failing to initialize the repository configuration.
///
/// This configuration is on the critical path when opening a repository.
@@ -102,6 +120,20 @@ pub mod diff {
}
///
+pub mod stat_options {
+ /// The error produced when collecting stat information, and returned by [Repository::stat_options()](crate::Repository::stat_options()).
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ ConfigCheckStat(#[from] super::key::GenericErrorWithValue),
+ #[error(transparent)]
+ ConfigBoolean(#[from] super::boolean::Error),
+ }
+}
+
+///
+#[cfg(feature = "attributes")]
pub mod checkout_options {
/// The error produced when collecting all information needed for checking out files into a worktree.
#[derive(Debug, thiserror::Error)]
@@ -115,6 +147,8 @@ pub mod checkout_options {
CheckoutWorkers(#[from] super::checkout::workers::Error),
#[error(transparent)]
Attributes(#[from] super::attribute_stack::Error),
+ #[error(transparent)]
+ FilterPipelineOptions(#[from] crate::filter::pipeline::options::Error),
}
}
@@ -275,6 +309,23 @@ pub mod key {
}
///
+pub mod encoding {
+ use crate::bstr::BString;
+
+ /// The error produced when failing to parse the `core.checkRoundTripEncoding` key.
+ #[derive(Debug, thiserror::Error)]
+ #[error("The encoding named '{encoding}' seen in key '{key}={value}' is unsupported")]
+ pub struct Error {
+ /// The configuration key that contained the value.
+ pub key: BString,
+ /// The value that was assigned to `key`.
+ pub value: BString,
+ /// The encoding that failed.
+ pub encoding: BString,
+ }
+}
+
+///
pub mod checkout {
///
pub mod workers {
@@ -424,6 +475,7 @@ pub mod transport {
key: Cow<'static, BStr>,
},
#[error("Could not configure the credential helpers for the authenticated proxy url")]
+ #[cfg(feature = "credentials")]
ConfigureProxyAuthenticate(#[from] crate::config::snapshot::credential_helpers::Error),
#[error(transparent)]
InvalidSslVersion(#[from] crate::config::ssl_version::Error),
@@ -459,11 +511,13 @@ pub(crate) struct Cache {
/// A lazily loaded rewrite list for remote urls
pub(crate) url_rewrite: OnceCell<crate::remote::url::Rewrite>,
/// The lazy-loaded rename information for diffs.
+ #[cfg(feature = "blob-diff")]
pub(crate) diff_renames: OnceCell<Option<crate::object::tree::diff::Rewrites>>,
/// A lazily loaded mapping to know which url schemes to allow
#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
pub(crate) url_scheme: OnceCell<crate::remote::url::SchemePermission>,
/// The algorithm to use when diffing blobs
+ #[cfg(feature = "blob-diff")]
pub(crate) diff_algorithm: OnceCell<gix_diff::blob::Algorithm>,
/// The amount of bytes to use for a memory backed delta pack cache. If `Some(0)`, no cache is used, if `None`
/// a standard cache is used which costs near to nothing and always pays for itself.
@@ -475,12 +529,14 @@ pub(crate) struct Cache {
/// The config section filter from the options used to initialize this instance. Keep these in sync!
filter_config_section: fn(&gix_config::file::Metadata) -> bool,
/// The object kind to pick if a prefix is ambiguous.
- pub object_kind_hint: Option<spec::parse::ObjectKindHint>,
+ #[cfg(feature = "revision")]
+ pub object_kind_hint: Option<crate::revision::spec::parse::ObjectKindHint>,
/// If true, we are on a case-insensitive file system.
pub ignore_case: bool,
/// If true, we should default what's possible if something is misconfigured, on case by case basis, to be more resilient.
/// Also available in options! Keep in sync!
pub lenient_config: bool,
+ #[cfg_attr(not(feature = "worktree-mutation"), allow(dead_code))]
attributes: crate::open::permissions::Attributes,
environment: crate::open::permissions::Environment,
// TODO: make core.precomposeUnicode available as well.
diff --git a/vendor/gix/src/config/overrides.rs b/vendor/gix/src/config/overrides.rs
index 4bdf4a13f..6b3fc728c 100644
--- a/vendor/gix/src/config/overrides.rs
+++ b/vendor/gix/src/config/overrides.rs
@@ -26,11 +26,15 @@ pub(crate) fn append(
let mut file = gix_config::File::new(gix_config::file::Metadata::from(source));
for key_value in values {
let key_value = key_value.as_ref();
- let mut tokens = key_value.splitn(2, |b| *b == b'=').map(|v| v.trim());
+ let mut tokens = key_value.splitn(2, |b| *b == b'=').map(ByteSlice::trim);
let key = tokens.next().expect("always one value").as_bstr();
let value = tokens.next();
- let key = gix_config::parse::key(key.to_str().map_err(|_| Error::InvalidKey { input: key.into() })?)
- .ok_or_else(|| Error::InvalidKey { input: key.into() })?;
+ let key = gix_config::parse::key(
+ key.to_str()
+ .map_err(|_| Error::InvalidKey { input: key.into() })?
+ .into(),
+ )
+ .ok_or_else(|| Error::InvalidKey { input: key.into() })?;
let mut section = file.section_mut_or_create_new(key.section_name, key.subsection_name)?;
let key =
gix_config::parse::section::Key::try_from(key.value_name.to_owned()).map_err(|err| Error::SectionKey {
@@ -38,7 +42,7 @@ pub(crate) fn append(
key: key.value_name.into(),
})?;
let comment = make_comment(key_value);
- let value = value.map(|v| v.as_bstr());
+ let value = value.map(ByteSlice::as_bstr);
match comment {
Some(comment) => section.push_with_comment(key, value, &**comment),
None => section.push(key, value),
diff --git a/vendor/gix/src/config/snapshot/access.rs b/vendor/gix/src/config/snapshot/access.rs
index 1710348a9..7dc593880 100644
--- a/vendor/gix/src/config/snapshot/access.rs
+++ b/vendor/gix/src/config/snapshot/access.rs
@@ -2,9 +2,11 @@
use std::borrow::Cow;
use gix_features::threading::OwnShared;
+use gix_macros::momo;
+use crate::bstr::ByteSlice;
use crate::{
- bstr::BStr,
+ bstr::{BStr, BString},
config::{CommitAutoRollback, Snapshot, SnapshotMut},
};
@@ -25,6 +27,7 @@ impl<'repo> Snapshot<'repo> {
}
/// Like [`boolean()`][Self::boolean()], but it will report an error if the value couldn't be interpreted as boolean.
+ #[momo]
pub fn try_boolean<'a>(&self, key: impl Into<&'a BStr>) -> Option<Result<bool, gix_config::value::Error>> {
self.repo.config.resolved.boolean_by_key(key)
}
@@ -40,6 +43,7 @@ impl<'repo> Snapshot<'repo> {
}
/// Like [`integer()`][Self::integer()], but it will report an error if the value couldn't be interpreted as boolean.
+ #[momo]
pub fn try_integer<'a>(&self, key: impl Into<&'a BStr>) -> Option<Result<i64, gix_config::value::Error>> {
self.repo.config.resolved.integer_by_key(key)
}
@@ -47,6 +51,7 @@ impl<'repo> Snapshot<'repo> {
/// Return the string at `key`, or `None` if there is no such value.
///
/// Note that this method takes the most recent value at `key` even if it is from a file with reduced trust.
+ #[momo]
pub fn string<'a>(&self, key: impl Into<&'a BStr>) -> Option<Cow<'_, BStr>> {
self.repo.config.resolved.string_by_key(key)
}
@@ -54,11 +59,12 @@ impl<'repo> Snapshot<'repo> {
/// Return the trusted and fully interpolated path at `key`, or `None` if there is no such value
/// or if no value was found in a trusted file.
/// An error occurs if the path could not be interpolated to its final value.
+ #[momo]
pub fn trusted_path<'a>(
&self,
key: impl Into<&'a BStr>,
) -> Option<Result<Cow<'_, std::path::Path>, gix_config::path::interpolate::Error>> {
- let key = gix_config::parse::key(key)?;
+ let key = gix_config::parse::key(key.into())?;
self.repo
.config
.trusted_file_path(key.section_name, key.subsection_name, key.value_name)
@@ -99,6 +105,50 @@ impl<'repo> SnapshotMut<'repo> {
self.commit_inner(repo)
}
+ /// Set the value at `key` to `new_value`, possibly creating the section if it doesn't exist yet, or overriding the most recent existing
+ /// value, which will be returned.
+ #[momo]
+ pub fn set_value<'b>(
+ &mut self,
+ key: &'static dyn crate::config::tree::Key,
+ new_value: impl Into<&'b BStr>,
+ ) -> Result<Option<BString>, crate::config::set_value::Error> {
+ if let Some(crate::config::tree::SubSectionRequirement::Parameter(_)) = key.subsection_requirement() {
+ return Err(crate::config::set_value::Error::SubSectionRequired);
+ }
+ let value = new_value.into();
+ key.validate(value)?;
+ let current = self
+ .config
+ .set_raw_value(key.section().name(), None, key.name(), value)?;
+ Ok(current.map(std::borrow::Cow::into_owned))
+ }
+
+ /// Set the value at `key` to `new_value` in the given `subsection`, possibly creating the section and sub-section if it doesn't exist yet,
+ /// or overriding the most recent existing value, which will be returned.
+ #[momo]
+ pub fn set_subsection_value<'a, 'b>(
+ &mut self,
+ key: &'static dyn crate::config::tree::Key,
+ subsection: impl Into<&'a BStr>,
+ new_value: impl Into<&'b BStr>,
+ ) -> Result<Option<BString>, crate::config::set_value::Error> {
+ if let Some(crate::config::tree::SubSectionRequirement::Never) = key.subsection_requirement() {
+ return Err(crate::config::set_value::Error::SubSectionForbidden);
+ }
+ let value = new_value.into();
+ key.validate(value)?;
+
+ let name = key
+ .full_name(Some(subsection.into()))
+ .expect("we know it needs a subsection");
+ let key = gix_config::parse::key((**name).as_bstr()).expect("statically known keys can always be parsed");
+ let current =
+ self.config
+ .set_raw_value(key.section_name, key.subsection_name, key.value_name.to_owned(), value)?;
+ Ok(current.map(std::borrow::Cow::into_owned))
+ }
+
pub(crate) fn commit_inner(
&mut self,
repo: &'repo mut crate::Repository,
diff --git a/vendor/gix/src/config/snapshot/credential_helpers.rs b/vendor/gix/src/config/snapshot/credential_helpers.rs
index c4eef35d6..189e74471 100644
--- a/vendor/gix/src/config/snapshot/credential_helpers.rs
+++ b/vendor/gix/src/config/snapshot/credential_helpers.rs
@@ -2,6 +2,7 @@ use std::{borrow::Cow, convert::TryFrom};
pub use error::Error;
+use crate::config::cache::util::IgnoreEmptyPath;
use crate::{
bstr::{ByteSlice, ByteVec},
config::{
@@ -140,7 +141,8 @@ impl Snapshot<'_> {
let prompt_options = gix_prompt::Options {
askpass: self
.trusted_path(Core::ASKPASS.logical_name().as_str())
- .transpose()?
+ .transpose()
+ .ignore_empty()?
.map(|c| Cow::Owned(c.into_owned())),
..Default::default()
}
diff --git a/vendor/gix/src/config/snapshot/mod.rs b/vendor/gix/src/config/snapshot/mod.rs
index 80ec6f948..de143ea1f 100644
--- a/vendor/gix/src/config/snapshot/mod.rs
+++ b/vendor/gix/src/config/snapshot/mod.rs
@@ -2,4 +2,5 @@ mod _impls;
mod access;
///
+#[cfg(feature = "credentials")]
pub mod credential_helpers;
diff --git a/vendor/gix/src/config/tree/keys.rs b/vendor/gix/src/config/tree/keys.rs
index b03fa49c6..5a5257af5 100644
--- a/vendor/gix/src/config/tree/keys.rs
+++ b/vendor/gix/src/config/tree/keys.rs
@@ -464,7 +464,7 @@ mod remote_name {
}
}
-/// Provide a way to validate a value, or decode a value from `gix-config`.
+/// Provide a way to validate a value, or decode a value from `git-config`.
pub trait Validate {
/// Validate `value` or return an error.
fn validate(&self, value: &BStr) -> Result<(), Box<dyn Error + Send + Sync + 'static>>;
diff --git a/vendor/gix/src/config/tree/mod.rs b/vendor/gix/src/config/tree/mod.rs
index 3f69ccb97..d8415154f 100644
--- a/vendor/gix/src/config/tree/mod.rs
+++ b/vendor/gix/src/config/tree/mod.rs
@@ -31,6 +31,7 @@ pub(crate) mod root {
/// The `credential` section.
pub const CREDENTIAL: sections::Credential = sections::Credential;
/// The `diff` section.
+ #[cfg(feature = "blob-diff")]
pub const DIFF: sections::Diff = sections::Diff;
/// The `extensions` section.
pub const EXTENSIONS: sections::Extensions = sections::Extensions;
@@ -69,6 +70,7 @@ pub(crate) mod root {
&Self::COMMITTER,
&Self::CORE,
&Self::CREDENTIAL,
+ #[cfg(feature = "blob-diff")]
&Self::DIFF,
&Self::EXTENSIONS,
&Self::FETCH,
@@ -90,10 +92,12 @@ pub(crate) mod root {
mod sections;
pub use sections::{
- branch, checkout, core, credential, diff, extensions, fetch, gitoxide, http, index, protocol, remote, ssh, Author,
- Branch, Checkout, Clone, Committer, Core, Credential, Diff, Extensions, Fetch, Gitoxide, Http, Index, Init, Pack,
+ branch, checkout, core, credential, extensions, fetch, gitoxide, http, index, protocol, remote, ssh, Author,
+ Branch, Checkout, Clone, Committer, Core, Credential, Extensions, Fetch, Gitoxide, Http, Index, Init, Pack,
Protocol, Remote, Safe, Ssh, Url, User,
};
+#[cfg(feature = "blob-diff")]
+pub use sections::{diff, Diff};
/// Generic value implementations for static instantiation.
pub mod keys;
diff --git a/vendor/gix/src/config/tree/sections/core.rs b/vendor/gix/src/config/tree/sections/core.rs
index 93d2fcd01..ab3e2bab9 100644
--- a/vendor/gix/src/config/tree/sections/core.rs
+++ b/vendor/gix/src/config/tree/sections/core.rs
@@ -45,7 +45,8 @@ impl Core {
/// The `core.symlinks` key.
pub const SYMLINKS: keys::Boolean = keys::Boolean::new_boolean("symlinks", &config::Tree::CORE);
/// The `core.trustCTime` key.
- pub const TRUST_C_TIME: keys::Boolean = keys::Boolean::new_boolean("trustCTime", &config::Tree::CORE);
+ pub const TRUST_C_TIME: keys::Boolean = keys::Boolean::new_boolean("trustCTime", &config::Tree::CORE)
+ .with_deviation("Currently the default is false, instead of true, as it seems to be 2s off in tests");
/// The `core.worktree` key.
pub const WORKTREE: keys::Any = keys::Any::new("worktree", &config::Tree::CORE)
.with_environment_override("GIT_WORK_TREE")
@@ -66,6 +67,24 @@ impl Core {
/// The `core.useReplaceRefs` key.
pub const USE_REPLACE_REFS: keys::Boolean = keys::Boolean::new_boolean("useReplaceRefs", &config::Tree::CORE)
.with_environment_override("GIT_NO_REPLACE_OBJECTS");
+ /// The `core.commitGraph` key.
+ pub const COMMIT_GRAPH: keys::Boolean = keys::Boolean::new_boolean("commitGraph", &config::Tree::CORE);
+ /// The `core.safecrlf` key.
+ #[cfg(feature = "attributes")]
+ pub const SAFE_CRLF: SafeCrlf = SafeCrlf::new_with_validate("safecrlf", &config::Tree::CORE, validate::SafeCrlf);
+ /// The `core.autocrlf` key.
+ #[cfg(feature = "attributes")]
+ pub const AUTO_CRLF: AutoCrlf = AutoCrlf::new_with_validate("autocrlf", &config::Tree::CORE, validate::AutoCrlf);
+ /// The `core.eol` key.
+ #[cfg(feature = "attributes")]
+ pub const EOL: Eol = Eol::new_with_validate("eol", &config::Tree::CORE, validate::Eol);
+ /// The `core.checkRoundTripEncoding` key.
+ #[cfg(feature = "attributes")]
+ pub const CHECK_ROUND_TRIP_ENCODING: CheckRoundTripEncoding = CheckRoundTripEncoding::new_with_validate(
+ "checkRoundTripEncoding",
+ &config::Tree::CORE,
+ validate::CheckRoundTripEncoding,
+ );
}
impl Section for Core {
@@ -96,6 +115,15 @@ impl Section for Core {
&Self::ATTRIBUTES_FILE,
&Self::SSH_COMMAND,
&Self::USE_REPLACE_REFS,
+ &Self::COMMIT_GRAPH,
+ #[cfg(feature = "attributes")]
+ &Self::SAFE_CRLF,
+ #[cfg(feature = "attributes")]
+ &Self::AUTO_CRLF,
+ #[cfg(feature = "attributes")]
+ &Self::EOL,
+ #[cfg(feature = "attributes")]
+ &Self::CHECK_ROUND_TRIP_ENCODING,
]
}
}
@@ -112,6 +140,154 @@ pub type LogAllRefUpdates = keys::Any<validate::LogAllRefUpdates>;
/// The `core.disambiguate` key.
pub type Disambiguate = keys::Any<validate::Disambiguate>;
+#[cfg(feature = "attributes")]
+mod filter {
+ use super::validate;
+ use crate::config::tree::keys;
+
+ /// The `core.safecrlf` key.
+ pub type SafeCrlf = keys::Any<validate::SafeCrlf>;
+
+ /// The `core.autocrlf` key.
+ pub type AutoCrlf = keys::Any<validate::AutoCrlf>;
+
+ /// The `core.eol` key.
+ pub type Eol = keys::Any<validate::Eol>;
+
+ /// The `core.checkRoundTripEncoding` key.
+ pub type CheckRoundTripEncoding = keys::Any<validate::CheckRoundTripEncoding>;
+
+ mod check_round_trip_encoding {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config,
+ config::tree::{core::CheckRoundTripEncoding, Key},
+ };
+
+ impl CheckRoundTripEncoding {
+ /// Convert `value` into a list of encodings, which are either space or coma separated. Fail if an encoding is unknown.
+ /// If `None`, the default is returned.
+ pub fn try_into_encodings(
+ &'static self,
+ value: Option<Cow<'_, BStr>>,
+ ) -> Result<Vec<&'static gix_filter::encoding::Encoding>, config::encoding::Error> {
+ Ok(match value {
+ None => vec![gix_filter::encoding::SHIFT_JIS],
+ Some(value) => {
+ let mut out = Vec::new();
+ for encoding in value
+ .as_ref()
+ .split(|b| *b == b',' || *b == b' ')
+ .filter(|e| !e.trim().is_empty())
+ {
+ out.push(
+ gix_filter::encoding::Encoding::for_label(encoding.trim()).ok_or_else(|| {
+ config::encoding::Error {
+ key: self.logical_name().into(),
+ value: value.as_ref().to_owned(),
+ encoding: encoding.into(),
+ }
+ })?,
+ );
+ }
+ out
+ }
+ })
+ }
+ }
+ }
+
+ mod eol {
+ use std::borrow::Cow;
+
+ use crate::{
+ bstr::{BStr, ByteSlice},
+ config,
+ config::tree::core::Eol,
+ };
+
+ impl Eol {
+ /// Convert `value` into the default end-of-line mode.
+ ///
+ /// ### Deviation
+ ///
+ /// git will allow any value and silently leaves it unset, we will fail if the value is not known.
+ pub fn try_into_eol(
+ &'static self,
+ value: Cow<'_, BStr>,
+ ) -> Result<gix_filter::eol::Mode, config::key::GenericErrorWithValue> {
+ Ok(match value.to_str_lossy().as_ref() {
+ "lf" => gix_filter::eol::Mode::Lf,
+ "crlf" => gix_filter::eol::Mode::CrLf,
+ "native" => gix_filter::eol::Mode::default(),
+ _ => return Err(config::key::GenericErrorWithValue::from_value(self, value.into_owned())),
+ })
+ }
+ }
+ }
+
+ mod safecrlf {
+ use std::borrow::Cow;
+
+ use gix_filter::pipeline::CrlfRoundTripCheck;
+
+ use crate::{bstr::BStr, config, config::tree::core::SafeCrlf};
+
+ impl SafeCrlf {
+ /// Convert `value` into the safe-crlf enumeration, if possible.
+ pub fn try_into_safecrlf(
+ &'static self,
+ value: Cow<'_, BStr>,
+ ) -> Result<CrlfRoundTripCheck, config::key::GenericErrorWithValue> {
+ if value.as_ref() == "warn" {
+ return Ok(CrlfRoundTripCheck::Warn);
+ }
+ let value = gix_config::Boolean::try_from(value.as_ref()).map_err(|err| {
+ config::key::GenericErrorWithValue::from_value(self, value.into_owned()).with_source(err)
+ })?;
+ Ok(if value.into() {
+ CrlfRoundTripCheck::Fail
+ } else {
+ CrlfRoundTripCheck::Skip
+ })
+ }
+ }
+ }
+
+ mod autocrlf {
+ use std::borrow::Cow;
+
+ use gix_filter::eol;
+
+ use crate::{bstr::BStr, config, config::tree::core::AutoCrlf};
+
+ impl AutoCrlf {
+ /// Convert `value` into the safe-crlf enumeration, if possible.
+ pub fn try_into_autocrlf(
+ &'static self,
+ value: Cow<'_, BStr>,
+ ) -> Result<eol::AutoCrlf, config::key::GenericErrorWithValue> {
+ if value.as_ref() == "input" {
+ return Ok(eol::AutoCrlf::Input);
+ }
+ let value = gix_config::Boolean::try_from(value.as_ref()).map_err(|err| {
+ config::key::GenericErrorWithValue::from_value(self, value.into_owned()).with_source(err)
+ })?;
+ Ok(if value.into() {
+ eol::AutoCrlf::Enabled
+ } else {
+ eol::AutoCrlf::Disabled
+ })
+ }
+ }
+ }
+}
+#[cfg(feature = "attributes")]
+pub use filter::*;
+
+#[cfg(feature = "revision")]
mod disambiguate {
use std::borrow::Cow;
@@ -143,30 +319,27 @@ mod disambiguate {
}
mod log_all_ref_updates {
- use std::borrow::Cow;
-
- use crate::{bstr::BStr, config, config::tree::core::LogAllRefUpdates};
+ use crate::{config, config::tree::core::LogAllRefUpdates};
impl LogAllRefUpdates {
- /// Returns the mode for ref-updates as parsed from `value`. If `value` is not a boolean, `string_on_failure` will be called
- /// to obtain the key `core.logAllRefUpdates` as string instead. For correctness, this two step process is necessary as
- /// the interpretation of booleans in special in `gix-config`, i.e. we can't just treat it as string.
- pub fn try_into_ref_updates<'a>(
+ /// Returns the mode for ref-updates as parsed from `value`. If `value` is not a boolean, we try
+ /// to interpret the string value instead. For correctness, this two step process is necessary as
+ /// the interpretation of booleans in special in `git-config`, i.e. we can't just treat it as string.
+ pub fn try_into_ref_updates(
&'static self,
value: Option<Result<bool, gix_config::value::Error>>,
- string_on_failure: impl FnOnce() -> Option<Cow<'a, BStr>>,
) -> Result<Option<gix_ref::store::WriteReflog>, config::key::GenericErrorWithValue> {
- match value.transpose().ok().flatten() {
- Some(bool) => Ok(Some(if bool {
+ match value {
+ Some(Ok(bool)) => Ok(Some(if bool {
gix_ref::store::WriteReflog::Normal
} else {
gix_ref::store::WriteReflog::Disable
})),
- None => match string_on_failure() {
- Some(val) if val.eq_ignore_ascii_case(b"always") => Ok(Some(gix_ref::store::WriteReflog::Always)),
- Some(val) => Err(config::key::GenericErrorWithValue::from_value(self, val.into_owned())),
- None => Ok(None),
+ Some(Err(err)) => match err.input {
+ val if val.eq_ignore_ascii_case(b"always") => Ok(Some(gix_ref::store::WriteReflog::Always)),
+ val => Err(config::key::GenericErrorWithValue::from_value(self, val)),
},
+ None => Ok(None),
}
}
}
@@ -270,7 +443,9 @@ mod validate {
pub struct Disambiguate;
impl keys::Validate for Disambiguate {
+ #[cfg_attr(not(feature = "revision"), allow(unused_variables))]
fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ #[cfg(feature = "revision")]
super::Core::DISAMBIGUATE.try_into_object_kind_hint(value.into())?;
Ok(())
}
@@ -280,9 +455,7 @@ mod validate {
impl keys::Validate for LogAllRefUpdates {
fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
super::Core::LOG_ALL_REF_UPDATES
- .try_into_ref_updates(Some(gix_config::Boolean::try_from(value).map(|b| b.0)), || {
- Some(value.into())
- })?;
+ .try_into_ref_updates(Some(gix_config::Boolean::try_from(value).map(|b| b.0)))?;
Ok(())
}
}
@@ -303,4 +476,44 @@ mod validate {
Ok(())
}
}
+
+ pub struct SafeCrlf;
+ impl keys::Validate for SafeCrlf {
+ #[cfg_attr(not(feature = "attributes"), allow(unused_variables))]
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ #[cfg(feature = "attributes")]
+ super::Core::SAFE_CRLF.try_into_safecrlf(value.into())?;
+ Ok(())
+ }
+ }
+
+ pub struct AutoCrlf;
+ impl keys::Validate for AutoCrlf {
+ #[cfg_attr(not(feature = "attributes"), allow(unused_variables))]
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ #[cfg(feature = "attributes")]
+ super::Core::AUTO_CRLF.try_into_autocrlf(value.into())?;
+ Ok(())
+ }
+ }
+
+ pub struct Eol;
+ impl keys::Validate for Eol {
+ #[cfg_attr(not(feature = "attributes"), allow(unused_variables))]
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ #[cfg(feature = "attributes")]
+ super::Core::EOL.try_into_eol(value.into())?;
+ Ok(())
+ }
+ }
+
+ pub struct CheckRoundTripEncoding;
+ impl keys::Validate for CheckRoundTripEncoding {
+ #[cfg_attr(not(feature = "attributes"), allow(unused_variables))]
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ #[cfg(feature = "attributes")]
+ super::Core::CHECK_ROUND_TRIP_ENCODING.try_into_encodings(Some(value.into()))?;
+ Ok(())
+ }
+ }
}
diff --git a/vendor/gix/src/config/tree/sections/diff.rs b/vendor/gix/src/config/tree/sections/diff.rs
index 103bb7001..7c467b8f5 100644
--- a/vendor/gix/src/config/tree/sections/diff.rs
+++ b/vendor/gix/src/config/tree/sections/diff.rs
@@ -68,10 +68,8 @@ mod algorithm {
}
mod renames {
- use std::borrow::Cow;
-
use crate::{
- bstr::{BStr, ByteSlice},
+ bstr::ByteSlice,
config::{
key::GenericError,
tree::{keys, sections::diff::Renames, Section},
@@ -84,21 +82,20 @@ mod renames {
pub const fn new_renames(name: &'static str, section: &'static dyn Section) -> Self {
keys::Any::new_with_validate(name, section, super::validate::Renames)
}
- /// Try to convert the configuration into a valid rename tracking variant. Use `value` and if it's an error, call `value_string`
- /// to try and interpret the key as string.
- pub fn try_into_renames<'a>(
+ /// Try to convert the configuration into a valid rename tracking variant. Use `value` and if it's an error, interpret
+ /// the boolean as string
+ pub fn try_into_renames(
&'static self,
value: Result<bool, gix_config::value::Error>,
- value_string: impl FnOnce() -> Option<Cow<'a, BStr>>,
) -> Result<Tracking, GenericError> {
Ok(match value {
Ok(true) => Tracking::Renames,
Ok(false) => Tracking::Disabled,
Err(err) => {
- let value = value_string().ok_or_else(|| GenericError::from(self))?;
- match value.as_ref().as_bytes() {
+ let value = &err.input;
+ match value.as_bytes() {
b"copy" | b"copies" => Tracking::RenamesAndCopies,
- _ => return Err(GenericError::from_value(self, value.into_owned()).with_source(err)),
+ _ => return Err(GenericError::from_value(self, value.clone()).with_source(err)),
}
}
})
@@ -107,8 +104,6 @@ mod renames {
}
mod validate {
- use std::borrow::Cow;
-
use crate::{
bstr::BStr,
config::tree::{keys, Diff},
@@ -126,7 +121,7 @@ mod validate {
impl keys::Validate for Renames {
fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
let boolean = gix_config::Boolean::try_from(value).map(|b| b.0);
- Diff::RENAMES.try_into_renames(boolean, || Some(Cow::Borrowed(value)))?;
+ Diff::RENAMES.try_into_renames(boolean)?;
Ok(())
}
}
diff --git a/vendor/gix/src/config/tree/sections/fetch.rs b/vendor/gix/src/config/tree/sections/fetch.rs
index 117cabfd2..32db7be5f 100644
--- a/vendor/gix/src/config/tree/sections/fetch.rs
+++ b/vendor/gix/src/config/tree/sections/fetch.rs
@@ -10,6 +10,10 @@ impl Fetch {
&config::Tree::FETCH,
validate::NegotiationAlgorithm,
);
+ /// The `fetch.recurseSubmodules` key.
+ #[cfg(feature = "attributes")]
+ pub const RECURSE_SUBMODULES: RecurseSubmodules =
+ RecurseSubmodules::new_with_validate("recurseSubmodules", &config::Tree::FETCH, validate::RecurseSubmodules);
}
impl Section for Fetch {
@@ -18,50 +22,81 @@ impl Section for Fetch {
}
fn keys(&self) -> &[&dyn Key] {
- &[&Self::NEGOTIATION_ALGORITHM]
+ &[
+ &Self::NEGOTIATION_ALGORITHM,
+ #[cfg(feature = "attributes")]
+ &Self::RECURSE_SUBMODULES,
+ ]
}
}
/// The `fetch.negotiationAlgorithm` key.
pub type NegotiationAlgorithm = keys::Any<validate::NegotiationAlgorithm>;
-mod algorithm {
- use std::borrow::Cow;
-
- use gix_object::bstr::ByteSlice;
+/// The `fetch.recurseSubmodules` key.
+#[cfg(feature = "attributes")]
+pub type RecurseSubmodules = keys::Any<validate::RecurseSubmodules>;
- use crate::{
- bstr::BStr,
- config::{key::GenericErrorWithValue, tree::sections::fetch::NegotiationAlgorithm},
- remote::fetch::negotiate,
- };
-
- impl NegotiationAlgorithm {
+mod algorithm {
+ #[cfg(feature = "credentials")]
+ impl crate::config::tree::sections::fetch::NegotiationAlgorithm {
/// Derive the negotiation algorithm identified by `name`, case-sensitively.
pub fn try_into_negotiation_algorithm(
&'static self,
- name: Cow<'_, BStr>,
- ) -> Result<negotiate::Algorithm, GenericErrorWithValue> {
+ name: std::borrow::Cow<'_, crate::bstr::BStr>,
+ ) -> Result<crate::remote::fetch::negotiate::Algorithm, crate::config::key::GenericErrorWithValue> {
+ use crate::bstr::ByteSlice;
+ use crate::remote::fetch::negotiate::Algorithm;
+
Ok(match name.as_ref().as_bytes() {
- b"noop" => negotiate::Algorithm::Noop,
- b"consecutive" | b"default" => negotiate::Algorithm::Consecutive,
- b"skipping" => negotiate::Algorithm::Skipping,
- _ => return Err(GenericErrorWithValue::from_value(self, name.into_owned())),
+ b"noop" => Algorithm::Noop,
+ b"consecutive" | b"default" => Algorithm::Consecutive,
+ b"skipping" => Algorithm::Skipping,
+ _ => {
+ return Err(crate::config::key::GenericErrorWithValue::from_value(
+ self,
+ name.into_owned(),
+ ))
+ }
})
}
}
+
+ #[cfg(feature = "attributes")]
+ impl crate::config::tree::sections::fetch::RecurseSubmodules {
+ /// Obtain the way submodules should be updated.
+ pub fn try_into_recurse_submodules(
+ &'static self,
+ value: Result<bool, gix_config::value::Error>,
+ ) -> Result<gix_submodule::config::FetchRecurse, crate::config::key::GenericErrorWithValue> {
+ gix_submodule::config::FetchRecurse::new(value)
+ .map_err(|err| crate::config::key::GenericErrorWithValue::from_value(self, err))
+ }
+ }
}
mod validate {
- use crate::{
- bstr::BStr,
- config::tree::{keys, Fetch},
- };
+ use crate::{bstr::BStr, config::tree::keys};
pub struct NegotiationAlgorithm;
impl keys::Validate for NegotiationAlgorithm {
+ #[cfg_attr(not(feature = "credentials"), allow(unused_variables))]
+ fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ #[cfg(feature = "credentials")]
+ crate::config::tree::Fetch::NEGOTIATION_ALGORITHM.try_into_negotiation_algorithm(value.into())?;
+ Ok(())
+ }
+ }
+
+ pub struct RecurseSubmodules;
+ impl keys::Validate for RecurseSubmodules {
+ #[cfg_attr(not(feature = "attributes"), allow(unused_variables))]
fn validate(&self, value: &BStr) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
- Fetch::NEGOTIATION_ALGORITHM.try_into_negotiation_algorithm(value.into())?;
+ #[cfg(feature = "attributes")]
+ {
+ let boolean = gix_config::Boolean::try_from(value).map(|b| b.0);
+ crate::config::tree::Fetch::RECURSE_SUBMODULES.try_into_recurse_submodules(boolean)?;
+ }
Ok(())
}
}
diff --git a/vendor/gix/src/config/tree/sections/gitoxide.rs b/vendor/gix/src/config/tree/sections/gitoxide.rs
index f07d494fb..966d5af7c 100644
--- a/vendor/gix/src/config/tree/sections/gitoxide.rs
+++ b/vendor/gix/src/config/tree/sections/gitoxide.rs
@@ -24,6 +24,8 @@ impl Gitoxide {
pub const SSH: Ssh = Ssh;
/// The `gitoxide.user` section.
pub const USER: User = User;
+ /// The `gitoxide.pathspec` section.
+ pub const PATHSPEC: Pathspec = Pathspec;
/// The `gitoxide.userAgent` Key.
pub const USER_AGENT: keys::Any = keys::Any::new("userAgent", &config::Tree::GITOXIDE).with_note(
@@ -52,6 +54,7 @@ impl Section for Gitoxide {
&Self::OBJECTS,
&Self::SSH,
&Self::USER,
+ &Self::PATHSPEC,
]
}
}
@@ -86,6 +89,12 @@ mod subsections {
.with_deviation(
"relative file paths will always be made relative to the git-common-dir, whereas `git` keeps them as is.",
);
+
+ /// The `gitoxide.core.filterProcessDelay` key (default `true`).
+ ///
+ /// It controls whether or not long running filter driver processes can use the 'delay' capability.
+ pub const FILTER_PROCESS_DELAY: keys::Boolean =
+ keys::Boolean::new_boolean("filterProcessDelay", &Gitoxide::CORE);
}
impl Section for Core {
@@ -99,6 +108,7 @@ mod subsections {
&Self::USE_NSEC,
&Self::USE_STDEV,
&Self::SHALLOW_FILE,
+ &Self::FILTER_PROCESS_DELAY,
]
}
@@ -306,6 +316,56 @@ mod subsections {
}
}
+ /// The `pathspec` sub-section.
+ #[derive(Copy, Clone, Default)]
+ pub struct Pathspec;
+
+ impl Pathspec {
+ /// The `gitoxide.pathspec.glob` key.
+ pub const GLOB: keys::Boolean = keys::Boolean::new_boolean("glob", &Gitoxide::PATHSPEC)
+ .with_environment_override("GIT_GLOB_PATHSPECS")
+ .with_note("pathspec wildcards don't match the slash character, then needing '**' to get past them");
+ /// The `gitoxide.pathspec.noglob` key.
+ pub const NOGLOB: keys::Boolean = keys::Boolean::new_boolean("noglob", &Gitoxide::PATHSPEC)
+ .with_environment_override("GIT_NOGLOB_PATHSPECS")
+ .with_note("Enable literal matching for glob patterns, effectively disabling globbing");
+ /// The `gitoxide.pathspec.literal` key.
+ pub const LITERAL: keys::Boolean = keys::Boolean::new_boolean("literal", &Gitoxide::PATHSPEC)
+ .with_environment_override("GIT_LITERAL_PATHSPECS")
+ .with_note("Make the entire spec used verbatim, the only way to get ':()name' verbatim for instance");
+ /// The `gitoxide.pathspec.icase` key.
+ pub const ICASE: keys::Boolean = keys::Boolean::new_boolean("icase", &Gitoxide::PATHSPEC)
+ .with_environment_override("GIT_ICASE_PATHSPECS")
+ .with_note("Compare string in a case-insensitive manner");
+ /// The `gitoxide.pathspec.inheritIgnoreCase` key, defaulting to `true` if unspecified.
+ /// If set, pathspecs will automatically be match case-insensitively if the underlying filesystem is configured that way.
+ pub const INHERIT_IGNORE_CASE: keys::Boolean =
+ keys::Boolean::new_boolean("inheritIgnoreCase", &Gitoxide::PATHSPEC)
+ .with_note("Inherit `core.ignoreCase` for defaults in pathspecs");
+ /// The default value for `gitoxide.pathspec.inheritIgnoreCase`.
+ pub const INHERIT_IGNORE_CASE_DEFAULT: bool = true;
+ }
+
+ impl Section for Pathspec {
+ fn name(&self) -> &str {
+ "pathspec"
+ }
+
+ fn keys(&self) -> &[&dyn Key] {
+ &[
+ &Self::GLOB,
+ &Self::NOGLOB,
+ &Self::LITERAL,
+ &Self::ICASE,
+ &Self::INHERIT_IGNORE_CASE,
+ ]
+ }
+
+ fn parent(&self) -> Option<&dyn Section> {
+ Some(&Tree::GITOXIDE)
+ }
+ }
+
/// The `objects` sub-section.
#[derive(Copy, Clone, Default)]
pub struct Objects;
@@ -391,7 +451,7 @@ mod subsections {
}
}
}
-pub use subsections::{Allow, Author, Commit, Committer, Core, Http, Https, Objects, Ssh, User};
+pub use subsections::{Allow, Author, Commit, Committer, Core, Http, Https, Objects, Pathspec, Ssh, User};
pub mod validate {
use std::error::Error;
diff --git a/vendor/gix/src/config/tree/sections/index.rs b/vendor/gix/src/config/tree/sections/index.rs
index 08f7ec1bd..026f35b6d 100644
--- a/vendor/gix/src/config/tree/sections/index.rs
+++ b/vendor/gix/src/config/tree/sections/index.rs
@@ -7,6 +7,9 @@ impl Index {
/// The `index.threads` key.
pub const THREADS: IndexThreads =
IndexThreads::new_with_validate("threads", &config::Tree::INDEX, validate::IndexThreads);
+ /// The `index.skipHash` key.
+ pub const SKIP_HASH: keys::Boolean = keys::Boolean::new_boolean("skipHash", &config::Tree::INDEX)
+ .with_deviation("also used to skip the hash when reading, even if a hash exists in the index file");
}
/// The `index.threads` key.
@@ -47,7 +50,7 @@ impl Section for Index {
}
fn keys(&self) -> &[&dyn Key] {
- &[&Self::THREADS]
+ &[&Self::THREADS, &Self::SKIP_HASH]
}
}
diff --git a/vendor/gix/src/config/tree/sections/mod.rs b/vendor/gix/src/config/tree/sections/mod.rs
index ebf24a8b7..34929c5d1 100644
--- a/vendor/gix/src/config/tree/sections/mod.rs
+++ b/vendor/gix/src/config/tree/sections/mod.rs
@@ -37,7 +37,9 @@ pub mod credential;
/// The `diff` top-level section.
#[derive(Copy, Clone, Default)]
+#[cfg(feature = "blob-diff")]
pub struct Diff;
+#[cfg(feature = "blob-diff")]
pub mod diff;
/// The `extension` top-level section.
diff --git a/vendor/gix/src/config/tree/sections/protocol.rs b/vendor/gix/src/config/tree/sections/protocol.rs
index a0510f2b8..7ef2cc8cb 100644
--- a/vendor/gix/src/config/tree/sections/protocol.rs
+++ b/vendor/gix/src/config/tree/sections/protocol.rs
@@ -127,7 +127,7 @@ mod validate {
.to_decimal()
.ok_or_else(|| format!("integer {value} cannot be represented as integer"))?;
match value {
- 0 | 1 | 2 => Ok(()),
+ 0..=2 => Ok(()),
_ => Err(format!("protocol version {value} is unknown").into()),
}
}
diff --git a/vendor/gix/src/create.rs b/vendor/gix/src/create.rs
index 878ec1164..1ef63b9aa 100644
--- a/vendor/gix/src/create.rs
+++ b/vendor/gix/src/create.rs
@@ -7,6 +7,7 @@ use std::{
use gix_config::parse::section;
use gix_discover::DOT_GIT_DIR;
+use gix_macros::momo;
/// The error used in [`into()`].
#[derive(Debug, thiserror::Error)]
@@ -35,21 +36,20 @@ pub enum Kind {
Bare,
}
-const TPL_INFO_EXCLUDE: &[u8] = include_bytes!("assets/baseline-init/info/exclude");
-const TPL_HOOKS_APPLYPATCH_MSG: &[u8] = include_bytes!("assets/baseline-init/hooks/applypatch-msg.sample");
-const TPL_HOOKS_COMMIT_MSG: &[u8] = include_bytes!("assets/baseline-init/hooks/commit-msg.sample");
-const TPL_HOOKS_FSMONITOR_WATCHMAN: &[u8] = include_bytes!("assets/baseline-init/hooks/fsmonitor-watchman.sample");
-const TPL_HOOKS_POST_UPDATE: &[u8] = include_bytes!("assets/baseline-init/hooks/post-update.sample");
-const TPL_HOOKS_PRE_APPLYPATCH: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-applypatch.sample");
-const TPL_HOOKS_PRE_COMMIT: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-commit.sample");
-const TPL_HOOKS_PRE_MERGE_COMMIT: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-merge-commit.sample");
-const TPL_HOOKS_PRE_PUSH: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-push.sample");
-const TPL_HOOKS_PRE_REBASE: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-rebase.sample");
-const TPL_HOOKS_PRE_RECEIVE: &[u8] = include_bytes!("assets/baseline-init/hooks/pre-receive.sample");
-const TPL_HOOKS_PREPARE_COMMIT_MSG: &[u8] = include_bytes!("assets/baseline-init/hooks/prepare-commit-msg.sample");
-const TPL_HOOKS_UPDATE: &[u8] = include_bytes!("assets/baseline-init/hooks/update.sample");
-const TPL_DESCRIPTION: &[u8] = include_bytes!("assets/baseline-init/description");
-const TPL_HEAD: &[u8] = include_bytes!("assets/baseline-init/HEAD");
+const TPL_INFO_EXCLUDE: &[u8] = include_bytes!("assets/init/info/exclude");
+const TPL_HOOKS_APPLYPATCH_MSG: &[u8] = include_bytes!("assets/init/hooks/applypatch-msg.sample");
+const TPL_HOOKS_COMMIT_MSG: &[u8] = include_bytes!("assets/init/hooks/commit-msg.sample");
+const TPL_HOOKS_FSMONITOR_WATCHMAN: &[u8] = include_bytes!("assets/init/hooks/fsmonitor-watchman.sample");
+const TPL_HOOKS_POST_UPDATE: &[u8] = include_bytes!("assets/init/hooks/post-update.sample");
+const TPL_HOOKS_PRE_APPLYPATCH: &[u8] = include_bytes!("assets/init/hooks/pre-applypatch.sample");
+const TPL_HOOKS_PRE_COMMIT: &[u8] = include_bytes!("assets/init/hooks/pre-commit.sample");
+const TPL_HOOKS_PRE_MERGE_COMMIT: &[u8] = include_bytes!("assets/init/hooks/pre-merge-commit.sample");
+const TPL_HOOKS_PRE_PUSH: &[u8] = include_bytes!("assets/init/hooks/pre-push.sample");
+const TPL_HOOKS_PRE_REBASE: &[u8] = include_bytes!("assets/init/hooks/pre-rebase.sample");
+const TPL_HOOKS_PREPARE_COMMIT_MSG: &[u8] = include_bytes!("assets/init/hooks/prepare-commit-msg.sample");
+const TPL_HOOKS_DOCS_URL: &[u8] = include_bytes!("assets/init/hooks/docs.url");
+const TPL_DESCRIPTION: &[u8] = include_bytes!("assets/init/description");
+const TPL_HEAD: &[u8] = include_bytes!("assets/init/HEAD");
struct PathCursor<'a>(&'a mut PathBuf);
@@ -115,7 +115,7 @@ pub struct Options {
///
/// By default repos with worktree can be initialized into a non-empty repository as long as there is no `.git` directory.
pub destination_must_be_empty: bool,
- /// If set, use these filesystem capabilities to populate the respective gix-config fields.
+ /// If set, use these filesystem capabilities to populate the respective git-config fields.
/// If `None`, the directory will be probed.
pub fs_capabilities: Option<gix_fs::Capabilities>,
}
@@ -125,6 +125,7 @@ pub struct Options {
/// Note that this is a simple template-based initialization routine which should be accompanied with additional corrections
/// to respect git configuration, which is accomplished by [its callers][crate::ThreadSafeRepository::init_opts()]
/// that return a [Repository][crate::Repository].
+#[momo]
pub fn into(
directory: impl Into<PathBuf>,
kind: Kind,
@@ -172,9 +173,8 @@ pub fn into(
{
let mut cursor = NewDir(&mut dot_git).at("hooks")?;
for (tpl, filename) in &[
- (TPL_HOOKS_UPDATE, "update.sample"),
+ (TPL_HOOKS_DOCS_URL, "docs.url"),
(TPL_HOOKS_PREPARE_COMMIT_MSG, "prepare-commit-msg.sample"),
- (TPL_HOOKS_PRE_RECEIVE, "pre-receive.sample"),
(TPL_HOOKS_PRE_REBASE, "pre-rebase.sample"),
(TPL_HOOKS_PRE_PUSH, "pre-push.sample"),
(TPL_HOOKS_PRE_COMMIT, "pre-commit.sample"),
@@ -234,7 +234,7 @@ pub fn into(
} else {
gix_discover::repository::Kind::WorkTree { linked_git_dir: None }
},
- std::env::current_dir()?,
+ &std::env::current_dir()?,
)
.expect("by now the `dot_git` dir is valid as we have accessed it"))
}
diff --git a/vendor/gix/src/discover.rs b/vendor/gix/src/discover.rs
index fa0edfd5f..964108810 100644
--- a/vendor/gix/src/discover.rs
+++ b/vendor/gix/src/discover.rs
@@ -2,6 +2,7 @@
use std::path::Path;
pub use gix_discover::*;
+use gix_macros::momo;
use crate::{bstr::BString, ThreadSafeRepository};
@@ -31,12 +32,14 @@ impl ThreadSafeRepository {
/// if the directory that is discovered can indeed be trusted (or else they'd have to implement the discovery themselves
/// and be sure that no attacker ever gets access to a directory structure. The cost of this is a permission check, which
/// seems acceptable).
+ #[momo]
pub fn discover_opts(
directory: impl AsRef<Path>,
options: upwards::Options<'_>,
trust_map: gix_sec::trust::Mapping<crate::open::Options>,
) -> Result<Self, Error> {
- let (path, trust) = upwards_opts(directory, options)?;
+ let _span = gix_trace::coarse!("ThreadSafeRepository::discover()");
+ let (path, trust) = upwards_opts(directory.as_ref(), options)?;
let (git_dir, worktree_dir) = path.into_repository_and_work_tree_directories();
let mut options = trust_map.into_value_by_level(trust);
options.git_dir_trust = trust.into();
@@ -60,6 +63,7 @@ impl ThreadSafeRepository {
///
/// Finally, use the `trust_map` to determine which of our own repository options to use
/// based on the trust level of the effective repository directory.
+ #[momo]
pub fn discover_with_environment_overrides_opts(
directory: impl AsRef<Path>,
mut options: upwards::Options<'_>,
diff --git a/vendor/gix/src/env.rs b/vendor/gix/src/env.rs
index ce5461bcc..b4973b8d5 100644
--- a/vendor/gix/src/env.rs
+++ b/vendor/gix/src/env.rs
@@ -20,7 +20,7 @@ pub fn args_os() -> impl Iterator<Item = OsString> {
/// Equivalent to `std::env::args_os()`, but with precomposed unicode on MacOS and other apple platforms.
///
-/// Note that this ignores `core.precomposeUnicode` as gix-config isn't available yet. It's default enabled in modern git though.
+/// Note that this ignores `core.precomposeUnicode` as git-config isn't available yet. It's default enabled in modern git though.
#[cfg(target_vendor = "apple")]
pub fn args_os() -> impl Iterator<Item = OsString> {
use unicode_normalization::UnicodeNormalization;
@@ -65,6 +65,7 @@ pub mod collate {
#[error(transparent)]
FindExistingRemote(#[from] crate::remote::find::existing::Error),
#[error(transparent)]
+ #[cfg(feature = "credentials")]
CredentialHelperConfig(#[from] crate::config::credential_helpers::Error),
#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
#[error(transparent)]
diff --git a/vendor/gix/src/ext/mod.rs b/vendor/gix/src/ext/mod.rs
index beb9007fa..ad69fec07 100644
--- a/vendor/gix/src/ext/mod.rs
+++ b/vendor/gix/src/ext/mod.rs
@@ -1,9 +1,11 @@
pub use object_id::ObjectIdExt;
pub use reference::ReferenceExt;
+#[cfg(feature = "revision")]
pub use rev_spec::RevSpecExt;
-pub use tree::TreeIterExt;
+pub use tree::{TreeEntryExt, TreeEntryRefExt, TreeIterExt};
mod object_id;
mod reference;
+#[cfg(feature = "revision")]
mod rev_spec;
mod tree;
diff --git a/vendor/gix/src/ext/tree.rs b/vendor/gix/src/ext/tree.rs
index 09220fc40..56b832b84 100644
--- a/vendor/gix/src/ext/tree.rs
+++ b/vendor/gix/src/ext/tree.rs
@@ -42,3 +42,27 @@ impl<'d> TreeIterExt for TreeRefIter<'d> {
breadthfirst(self.clone(), state, find, delegate)
}
}
+
+/// Extensions for [EntryRef][gix_object::tree::EntryRef].
+pub trait TreeEntryRefExt<'a>: 'a {
+ /// Attach [`Repository`][crate::Repository] to the given tree entry. It can be detached later with `detach()`.
+ fn attach<'repo>(self, repo: &'repo crate::Repository) -> crate::object::tree::EntryRef<'repo, 'a>;
+}
+
+impl<'a> TreeEntryRefExt<'a> for gix_object::tree::EntryRef<'a> {
+ fn attach<'repo>(self, repo: &'repo crate::Repository) -> crate::object::tree::EntryRef<'repo, 'a> {
+ crate::object::tree::EntryRef { inner: self, repo }
+ }
+}
+
+/// Extensions for [Entry][gix_object::tree::Entry].
+pub trait TreeEntryExt {
+ /// Attach [`Repository`][crate::Repository] to the given tree entry. It can be detached later with `detach()`.
+ fn attach(self, repo: &crate::Repository) -> crate::object::tree::Entry<'_>;
+}
+
+impl TreeEntryExt for gix_object::tree::Entry {
+ fn attach(self, repo: &crate::Repository) -> crate::object::tree::Entry<'_> {
+ crate::object::tree::Entry { inner: self, repo }
+ }
+}
diff --git a/vendor/gix/src/filter.rs b/vendor/gix/src/filter.rs
new file mode 100644
index 000000000..935c91108
--- /dev/null
+++ b/vendor/gix/src/filter.rs
@@ -0,0 +1,229 @@
+//! lower-level access to filters which are applied to create working tree checkouts or to 'clean' working tree contents for storage in git.
+use std::borrow::Cow;
+
+pub use gix_filter as plumbing;
+use gix_odb::{Find, FindExt};
+
+use crate::{
+ bstr::BStr,
+ config::{
+ cache::util::{ApplyLeniency, ApplyLeniencyDefaultValue},
+ tree::Core,
+ },
+ Repository,
+};
+
+///
+pub mod pipeline {
+ ///
+ pub mod options {
+ use crate::{bstr::BString, config};
+
+ /// The error returned by [Pipeline::options()][crate::filter::Pipeline::options()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ CheckRoundTripEncodings(#[from] config::encoding::Error),
+ #[error(transparent)]
+ SafeCrlf(#[from] config::key::GenericErrorWithValue),
+ #[error("Could not interpret 'filter.{name}.required' configuration")]
+ Driver {
+ name: BString,
+ source: gix_config::value::Error,
+ },
+ }
+ }
+
+ ///
+ pub mod convert_to_git {
+ /// The error returned by [Pipeline::convert_to_git()][crate::filter::Pipeline::convert_to_git()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Failed to prime attributes to the path at which the data resides")]
+ WorktreeCacheAtPath(#[from] std::io::Error),
+ #[error(transparent)]
+ Convert(#[from] gix_filter::pipeline::convert::to_git::Error),
+ }
+ }
+
+ ///
+ pub mod convert_to_worktree {
+ /// The error returned by [Pipeline::convert_to_worktree()][crate::filter::Pipeline::convert_to_worktree()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Failed to prime attributes to the path at which the data resides")]
+ WorktreeCacheAtPath(#[from] std::io::Error),
+ #[error(transparent)]
+ Convert(#[from] gix_filter::pipeline::convert::to_worktree::Error),
+ }
+ }
+}
+
+/// A git pipeline for transforming data *to-git* and *to-worktree*, based
+/// [on git configuration and attributes](https://git-scm.com/docs/gitattributes).
+#[derive(Clone)]
+pub struct Pipeline<'repo> {
+ inner: gix_filter::Pipeline,
+ cache: gix_worktree::Stack,
+ repo: &'repo Repository,
+}
+
+/// Lifecycle
+impl<'repo> Pipeline<'repo> {
+ /// Extract options from `repo` that are needed to properly drive a standard git filter pipeline.
+ pub fn options(repo: &'repo Repository) -> Result<gix_filter::pipeline::Options, pipeline::options::Error> {
+ let config = &repo.config.resolved;
+ let encodings =
+ Core::CHECK_ROUND_TRIP_ENCODING.try_into_encodings(config.string_by_key("core.checkRoundtripEncoding"))?;
+ let safe_crlf = config
+ .string_by_key("core.safecrlf")
+ .map(|value| Core::SAFE_CRLF.try_into_safecrlf(value))
+ .transpose()
+ .map(Option::unwrap_or_default)
+ .with_lenient_default_value(
+ repo.config.lenient_config,
+ // in lenient mode, we prefer the safe option, instead of just (trying) to output warnings.
+ gix_filter::pipeline::CrlfRoundTripCheck::Fail,
+ )?;
+ let auto_crlf = config
+ .string_by_key("core.autocrlf")
+ .map(|value| Core::AUTO_CRLF.try_into_autocrlf(value))
+ .transpose()
+ .with_leniency(repo.config.lenient_config)?
+ .unwrap_or_default();
+ let eol = config
+ .string_by_key("core.eol")
+ .map(|value| Core::EOL.try_into_eol(value))
+ .transpose()?;
+ let drivers = extract_drivers(repo)?;
+ Ok(gix_filter::pipeline::Options {
+ drivers,
+ eol_config: gix_filter::eol::Configuration { auto_crlf, eol },
+ encodings_with_roundtrip_check: encodings,
+ crlf_roundtrip_check: safe_crlf,
+ object_hash: repo.object_hash(),
+ })
+ }
+
+ /// Create a new instance by extracting all necessary information and configuration from a `repo` along with `cache` for accessing
+ /// attributes. The `index` is used for some filters which may access it under very specific circumstances.
+ pub fn new(repo: &'repo Repository, cache: gix_worktree::Stack) -> Result<Self, pipeline::options::Error> {
+ let pipeline = gix_filter::Pipeline::new(cache.attributes_collection(), Self::options(repo)?);
+ Ok(Pipeline {
+ inner: pipeline,
+ cache,
+ repo,
+ })
+ }
+
+ /// Detach the repository and obtain the individual functional parts.
+ pub fn into_parts(self) -> (gix_filter::Pipeline, gix_worktree::Stack) {
+ (self.inner, self.cache)
+ }
+}
+
+/// Conversions
+impl<'repo> Pipeline<'repo> {
+ /// Convert a `src` stream (to be found at `rela_path`, a repo-relative path) to a representation suitable for storage in `git`
+ /// by using all attributes at `rela_path` and configuration of the repository to know exactly which filters apply.
+ /// `index` is used in particularly rare cases where the CRLF filter in auto-mode tries to determine whether or not to apply itself,
+ /// and it should match the state used when [instantiating this instance][Self::new()].
+ /// Note that the return-type implements [`std::io::Read`].
+ pub fn convert_to_git<R>(
+ &mut self,
+ src: R,
+ rela_path: &std::path::Path,
+ index: &gix_index::State,
+ ) -> Result<gix_filter::pipeline::convert::ToGitOutcome<'_, R>, pipeline::convert_to_git::Error>
+ where
+ R: std::io::Read,
+ {
+ let entry = self
+ .cache
+ .at_path(rela_path, Some(false), |id, buf| self.repo.objects.find_blob(id, buf))?;
+ Ok(self.inner.convert_to_git(
+ src,
+ rela_path,
+ &mut |_, attrs| {
+ entry.matching_attributes(attrs);
+ },
+ &mut |rela_path, buf| -> Result<_, gix_odb::find::Error> {
+ let entry = match index.entry_by_path(rela_path) {
+ None => return Ok(None),
+ Some(entry) => entry,
+ };
+ let obj = self.repo.objects.try_find(&entry.id, buf)?;
+ Ok(obj.filter(|obj| obj.kind == gix_object::Kind::Blob).map(|_| ()))
+ },
+ )?)
+ }
+
+ /// Convert a `src` buffer located at `rela_path` (in the index) from what's in `git` to the worktree representation.
+ /// This method will obtain all attributes and configuration necessary to know exactly which filters to apply.
+ /// Note that the return-type implements [`std::io::Read`].
+ ///
+ /// Use `can_delay` to tell driver processes that they may delay the return of data. Doing this will require the caller to specifically
+ /// handle delayed files by keeping state and using [`Self::into_parts()`] to get access to the driver state to follow the delayed-files
+ /// protocol. For simplicity, most will want to disallow delayed processing.
+ pub fn convert_to_worktree<'input>(
+ &mut self,
+ src: &'input [u8],
+ rela_path: &BStr,
+ can_delay: gix_filter::driver::apply::Delay,
+ ) -> Result<gix_filter::pipeline::convert::ToWorktreeOutcome<'input, '_>, pipeline::convert_to_worktree::Error>
+ {
+ let entry = self
+ .cache
+ .at_entry(rela_path, Some(false), |id, buf| self.repo.objects.find_blob(id, buf))?;
+ Ok(self.inner.convert_to_worktree(
+ src,
+ rela_path,
+ &mut |_, attrs| {
+ entry.matching_attributes(attrs);
+ },
+ can_delay,
+ )?)
+ }
+
+ /// Retrieve the static context that is made available to the process filters.
+ ///
+ /// The context set here is relevant for the [`convert_to_git()`][Self::convert_to_git()] and
+ /// [`convert_to_worktree()`][Self::convert_to_worktree()] methods.
+ pub fn driver_context_mut(&mut self) -> &mut gix_filter::pipeline::Context {
+ self.inner.driver_context_mut()
+ }
+}
+
+/// Obtain a list of all configured driver, but ignore those in sections that we don't trust enough.
+fn extract_drivers(repo: &Repository) -> Result<Vec<gix_filter::Driver>, pipeline::options::Error> {
+ repo.config
+ .resolved
+ .sections_by_name("filter")
+ .into_iter()
+ .flatten()
+ .filter(|s| repo.filter_config_section()(s.meta()))
+ .filter_map(|s| {
+ s.header().subsection_name().map(|name| {
+ Ok(gix_filter::Driver {
+ name: name.to_owned(),
+ clean: s.value("clean").map(Cow::into_owned),
+ smudge: s.value("smudge").map(Cow::into_owned),
+ process: s.value("process").map(Cow::into_owned),
+ required: s
+ .value("required")
+ .map(|value| gix_config::Boolean::try_from(value.as_ref()))
+ .transpose()
+ .map_err(|err| pipeline::options::Error::Driver {
+ name: name.to_owned(),
+ source: err,
+ })?
+ .unwrap_or_default()
+ .into(),
+ })
+ })
+ })
+ .collect::<Result<Vec<_>, pipeline::options::Error>>()
+}
diff --git a/vendor/gix/src/head/mod.rs b/vendor/gix/src/head/mod.rs
index 094e78a86..399b872ba 100644
--- a/vendor/gix/src/head/mod.rs
+++ b/vendor/gix/src/head/mod.rs
@@ -101,8 +101,10 @@ mod remote {
/// This is equivalent to calling [`Reference::remote(…)`][crate::Reference::remote()] and
/// [`Repository::remote_default_name()`][crate::Repository::remote_default_name()] in order.
///
- /// Combine it with [`find_default_remote()`][crate::Repository::find_default_remote()] as fallback to handle detached heads,
- /// i.e. obtain a remote even in case of detached heads.
+ /// Combine it with [`Repository::find_default_remote()`][crate::Repository::find_default_remote()] as fallback to
+ /// handle detached heads, i.e. obtain a remote even in case of detached heads,
+ /// or call [`Repository::find_fetch_remote(…)`](crate::Repository::find_fetch_remote()) for the highest-level way of finding
+ /// the right remote, just like `git fetch` does.
pub fn into_remote(
self,
direction: remote::Direction,
diff --git a/vendor/gix/src/id.rs b/vendor/gix/src/id.rs
index 0d5c86752..7214ec320 100644
--- a/vendor/gix/src/id.rs
+++ b/vendor/gix/src/id.rs
@@ -3,9 +3,9 @@ use std::ops::Deref;
use gix_hash::{oid, ObjectId};
-use crate::{object::find, revision, Id, Object};
+use crate::{object::find, Id, Object};
-/// An [object id][ObjectId] infused with `Easy`.
+/// An [object id][ObjectId] infused with a [`Repository`][crate::Repository].
impl<'repo> Id<'repo> {
/// Find the [`Object`] associated with this object id, and consider it an error if it doesn't exist.
///
@@ -16,6 +16,13 @@ impl<'repo> Id<'repo> {
self.repo.find_object(self.inner)
}
+ /// Find the [`header`][gix_odb::find::Header] associated with this object id, or an error if it doesn't exist.
+ ///
+ /// Use this method if there is no interest in the contents of the object, which generally is much faster to obtain.
+ pub fn header(&self) -> Result<gix_odb::find::Header, find::existing::Error> {
+ self.repo.find_header(self.inner)
+ }
+
/// Try to find the [`Object`] associated with this object id, and return `None` if it's not available locally.
///
/// # Note
@@ -25,6 +32,13 @@ impl<'repo> Id<'repo> {
self.repo.try_find_object(self.inner)
}
+ /// Find the [`header`][gix_odb::find::Header] associated with this object id, or return `None` if it doesn't exist.
+ ///
+ /// Use this method if there is no interest in the contents of the object, which generally is much faster to obtain.
+ pub fn try_header(&self) -> Result<Option<gix_odb::find::Header>, find::Error> {
+ self.repo.try_find_header(self.inner)
+ }
+
/// Turn this object id into a shortened id with a length in hex as configured by `core.abbrev`.
pub fn shorten(&self) -> Result<gix_hash::Prefix, shorten::Error> {
let hex_len = self.repo.config.hex_len.map_or_else(
@@ -89,9 +103,8 @@ impl<'repo> Id<'repo> {
impl<'repo> Id<'repo> {
/// Obtain a platform for traversing ancestors of this commit.
- ///
- pub fn ancestors(&self) -> revision::walk::Platform<'repo> {
- revision::walk::Platform::new(Some(self.inner), self.repo)
+ pub fn ancestors(&self) -> crate::revision::walk::Platform<'repo> {
+ crate::revision::walk::Platform::new(Some(self.inner), self.repo)
}
}
diff --git a/vendor/gix/src/init.rs b/vendor/gix/src/init.rs
index d04de0806..21c2debd8 100644
--- a/vendor/gix/src/init.rs
+++ b/vendor/gix/src/init.rs
@@ -1,6 +1,7 @@
#![allow(clippy::result_large_err)]
use std::{borrow::Cow, convert::TryInto, path::Path};
+use gix_macros::momo;
use gix_ref::{
store::WriteReflog,
transaction::{PreviousValue, RefEdit},
@@ -29,7 +30,7 @@ pub enum Error {
#[error("Invalid default branch name: {name:?}")]
InvalidBranchName {
name: BString,
- source: gix_validate::refname::Error,
+ source: gix_validate::reference::name::Error,
},
#[error("Could not edit HEAD reference with new default name")]
EditHeadForDefaultBranch(#[from] crate::reference::edit::Error),
@@ -40,6 +41,7 @@ impl ThreadSafeRepository {
///
/// Fails without action if there is already a `.git` repository inside of `directory`, but
/// won't mind if the `directory` otherwise is non-empty.
+ #[momo]
pub fn init(
directory: impl AsRef<Path>,
kind: crate::create::Kind,
@@ -56,6 +58,7 @@ impl ThreadSafeRepository {
///
/// Instead of naming the default branch `master`, we name it `main` unless configured explicitly using the `init.defaultBranch`
/// configuration key.
+ #[momo]
pub fn init_opts(
directory: impl AsRef<Path>,
kind: crate::create::Kind,
diff --git a/vendor/gix/src/interrupt.rs b/vendor/gix/src/interrupt.rs
index c94cbdbfa..b3244a7c0 100644
--- a/vendor/gix/src/interrupt.rs
+++ b/vendor/gix/src/interrupt.rs
@@ -5,49 +5,66 @@
//! Such checks for interrupts are provided in custom implementations of various traits to transparently add interrupt
//! support to methods who wouldn't otherwise by injecting it. see [`Read`].
+#[cfg(feature = "interrupt")]
mod init {
use std::{
io,
- sync::atomic::{AtomicBool, AtomicUsize, Ordering},
+ sync::atomic::{AtomicUsize, Ordering},
};
- static IS_INITIALIZED: AtomicBool = AtomicBool::new(false);
+ static DEREGISTER_COUNT: AtomicUsize = AtomicUsize::new(0);
+ static REGISTERED_HOOKS: once_cell::sync::Lazy<parking_lot::Mutex<Vec<(i32, signal_hook::SigId)>>> =
+ once_cell::sync::Lazy::new(Default::default);
+ static DEFAULT_BEHAVIOUR_HOOKS: once_cell::sync::Lazy<parking_lot::Mutex<Vec<signal_hook::SigId>>> =
+ once_cell::sync::Lazy::new(Default::default);
+ /// A type to help deregistering hooks registered with [`init_handler`](super::init_handler());
#[derive(Default)]
- pub struct Deregister(Vec<(i32, signal_hook::SigId)>);
+ pub struct Deregister {
+ do_reset: bool,
+ }
pub struct AutoDeregister(Deregister);
impl Deregister {
- /// Remove all previously registered handlers, and assure the default behaviour is reinstated.
+ /// Remove all previously registered handlers, and assure the default behaviour is reinstated, if this is the last available instance.
///
/// Note that only the instantiation of the default behaviour can fail.
pub fn deregister(self) -> std::io::Result<()> {
- if self.0.is_empty() {
+ let mut hooks = REGISTERED_HOOKS.lock();
+ let count = DEREGISTER_COUNT.fetch_sub(1, Ordering::SeqCst);
+ if count > 1 || hooks.is_empty() {
return Ok(());
}
- static REINSTATE_DEFAULT_BEHAVIOUR: AtomicBool = AtomicBool::new(true);
- for (_, hook_id) in &self.0 {
+ if self.do_reset {
+ super::reset();
+ }
+ for (_, hook_id) in hooks.iter() {
signal_hook::low_level::unregister(*hook_id);
}
- IS_INITIALIZED.store(false, Ordering::SeqCst);
- if REINSTATE_DEFAULT_BEHAVIOUR
- .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |_| Some(false))
- .expect("always returns value")
- {
- for (sig, _) in self.0 {
- // # SAFETY
- // * we only call a handler that is specifically designed to run in this environment.
- #[allow(unsafe_code)]
- unsafe {
- signal_hook::low_level::register(sig, move || {
- signal_hook::low_level::emulate_default_handler(sig).ok();
- })?;
- }
+
+ let hooks = hooks.drain(..);
+ let mut default_hooks = DEFAULT_BEHAVIOUR_HOOKS.lock();
+ // Even if dropped, `drain(..)` clears the vec which is a must.
+ for (sig, _) in hooks {
+ // # SAFETY
+ // * we only register a handler that is specifically designed to run in this environment.
+ #[allow(unsafe_code)]
+ unsafe {
+ default_hooks.push(signal_hook::low_level::register(sig, move || {
+ signal_hook::low_level::emulate_default_handler(sig).ok();
+ })?);
}
}
Ok(())
}
+ /// If called with `toggle` being `true`, when actually deregistering, we will also reset the trigger by
+ /// calling [`reset()`](super::reset()).
+ pub fn with_reset(mut self, toggle: bool) -> Self {
+ self.do_reset = toggle;
+ self
+ }
+
/// Return a type that deregisters all installed signal handlers on drop.
pub fn auto_deregister(self) -> AutoDeregister {
AutoDeregister(self)
@@ -60,20 +77,33 @@ mod init {
}
}
- /// Initialize a signal handler to listen to SIGINT and SIGTERM and trigger our [`trigger()`][super::trigger()] that way.
- /// Also trigger `interrupt()` which promises to never use a Mutex, allocate or deallocate.
+ /// Initialize a signal handler to listen to SIGINT and SIGTERM and trigger our [`trigger()`](super::trigger()) that way.
+ /// Also trigger `interrupt()` which promises to never use a Mutex, allocate or deallocate, or do anything else that's blocking.
+ /// Use `grace_count` to determine how often the termination signal can be received before it's terminal, e.g. 1 would only terminate
+ /// the application the second time the signal is received.
+ /// Note that only the `grace_count` and `interrupt` of the first call are effective, all others will be ignored.
+ ///
+ /// Use the returned `Deregister` type to explicitly deregister hooks, or to do so automatically.
///
/// # Note
///
/// It will abort the process on second press and won't inform the user about this behaviour either as we are unable to do so without
/// deadlocking even when trying to write to stderr directly.
- pub fn init_handler(interrupt: impl Fn() + Send + Sync + Clone + 'static) -> io::Result<Deregister> {
- if IS_INITIALIZED
- .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |_| Some(true))
- .expect("always returns value")
- {
- return Err(io::Error::new(io::ErrorKind::Other, "Already initialized"));
+ pub fn init_handler(
+ grace_count: usize,
+ interrupt: impl Fn() + Send + Sync + Clone + 'static,
+ ) -> io::Result<Deregister> {
+ let prev_count = DEREGISTER_COUNT.fetch_add(1, Ordering::SeqCst);
+ if prev_count != 0 {
+ // Try to obtain the lock before we return just to wait for the signals to actually be registered.
+ let _guard = REGISTERED_HOOKS.lock();
+ return Ok(Deregister::default());
+ }
+ let mut guard = REGISTERED_HOOKS.lock();
+ if !guard.is_empty() {
+ return Ok(Deregister::default());
}
+
let mut hooks = Vec::with_capacity(signal_hook::consts::TERM_SIGNALS.len());
for sig in signal_hook::consts::TERM_SIGNALS {
// # SAFETY
@@ -88,7 +118,7 @@ mod init {
INTERRUPT_COUNT.store(0, Ordering::SeqCst);
}
let msg_idx = INTERRUPT_COUNT.fetch_add(1, Ordering::SeqCst);
- if msg_idx == 1 {
+ if msg_idx == grace_count {
gix_tempfile::registry::cleanup_tempfiles_signal_safe();
signal_hook::low_level::emulate_default_handler(*sig).ok();
}
@@ -98,11 +128,15 @@ mod init {
hooks.push((*sig, hook_id));
}
}
+ for hook_id in DEFAULT_BEHAVIOUR_HOOKS.lock().drain(..) {
+ signal_hook::low_level::unregister(hook_id);
+ }
// This means that they won't setup a handler allowing us to call them right before we actually abort.
gix_tempfile::signal::setup(gix_tempfile::signal::handler::Mode::None);
- Ok(Deregister(hooks))
+ *guard = hooks;
+ Ok(Deregister::default())
}
}
use std::{
@@ -110,7 +144,8 @@ use std::{
sync::atomic::{AtomicBool, Ordering},
};
-pub use init::init_handler;
+#[cfg(feature = "interrupt")]
+pub use init::{init_handler, Deregister};
/// A wrapper for an inner iterator which will check for interruptions on each iteration.
pub struct Iter<I, EFN> {
diff --git a/vendor/gix/src/lib.rs b/vendor/gix/src/lib.rs
index 5de702dbf..672d5c91c 100644
--- a/vendor/gix/src/lib.rs
+++ b/vendor/gix/src/lib.rs
@@ -4,7 +4,18 @@
//! individually. Sometimes it may hide complexity under the assumption that the performance difference doesn't matter
//! for all but the fewest tools out there, which would be using the underlying crates directly or file an issue.
//!
-//! # The prelude and extensions
+//! ### The Trust Model
+//!
+//! It is very simple - based on the ownership of the repository compared to the user of the current process [Trust](sec::Trust)
+//! is assigned. This can be [overridden](open::Options::with()) as well. Further, git configuration files track their trust level
+//! per section based on and sensitive values like paths to executables or certain values will be skipped if they are from a source
+//! that isn't [fully](sec::Trust::Full) trusted.
+//!
+//! That way, data can safely be obtained without risking to execute untrusted executables.
+//!
+//! Note that it's possible to let `gix` act like `git` or `git2` by setting the [open::Options::bail_if_untrusted()] option.
+//!
+//! ### The prelude and extensions
//!
//! With `use git_repository::prelude::*` you should be ready to go as it pulls in various extension traits to make functionality
//! available on objects that may use it.
@@ -14,13 +25,13 @@
//! Most extensions to existing objects provide an `obj_with_extension.attach(&repo).an_easier_version_of_a_method()` for simpler
//! call signatures.
//!
-//! ## `ThreadSafe` Mode
+//! ### `ThreadSafe` Mode
//!
//! By default, the [`Repository`] isn't `Sync` and thus can't be used in certain contexts which require the `Sync` trait.
//!
//! To help with this, convert it with [`.into_sync()`][Repository::into_sync()] into a [`ThreadSafeRepository`].
//!
-//! ## Object-Access Performance
+//! ### Object-Access Performance
//!
//! Accessing objects quickly is the bread-and-butter of working with git, right after accessing references. Hence it's vital
//! to understand which cache levels exist and how to leverage them.
@@ -42,9 +53,9 @@
//! When reading the documentation of the canonical gix-worktree program one gets the impression work tree and working tree are used
//! interchangeably. We use the term _work tree_ only and try to do so consistently as its shorter and assumed to be the same.
//!
-//! # Cargo-features
+//! ### Plumbing Crates
//!
-//! To make using _sub-crates_ easier these are re-exported into the root of this crate. Here we list how to access nested plumbing
+//! To make using _sub-crates_ and their types easier, these are re-exported into the root of this crate. Here we list how to access nested plumbing
//! crates which are otherwise harder to discover:
//!
//! **`git_repository::`**
@@ -54,44 +65,69 @@
//! * [`transport`][protocol::transport]
//! * [`packetline`][protocol::transport::packetline]
//!
+//! ### `libgit2` API to `gix`
+//!
+//! This doc-aliases are used to help finding methods under a possibly changed name. Just search in the docs.
+//! Entering `git2` into the search field will also surface all methods with such annotations.
+//!
+//! What follows is a list of methods you might be missing, along with workarounds if available.
+//! * [`git2::Repository::open_bare()`](https://docs.rs/git2/*/git2/struct.Repository.html#method.open_bare) ➡ ❌ - use [`open()`] and discard if it is not bare.
+//! * [`git2::build::CheckoutBuilder::disable_filters()](https://docs.rs/git2/*/git2/build/struct.CheckoutBuilder.html#method.disable_filters) ➡ ❌ *(filters are always applied during checkouts)*
+//! * [`git2::Repository::submodule_status()`](https://docs.rs/git2/*/git2/struct.Repository.html#method.submodule_status) ➡ [`Submodule::state()`] - status provides more information and conveniences though, and an actual worktree status isn't performed.
//!
-//! ## Feature Flags
+//! ### Feature Flags
#![cfg_attr(
feature = "document-features",
cfg_attr(doc, doc = ::document_features::document_features!())
)]
#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))]
#![deny(missing_docs, rust_2018_idioms, unsafe_code)]
+#![allow(clippy::result_large_err)]
// Re-exports to make this a potential one-stop shop crate avoiding people from having to reference various crates themselves.
// This also means that their major version changes affect our major version, but that's alright as we directly expose their
// APIs/instances anyway.
pub use gix_actor as actor;
+#[cfg(feature = "attributes")]
pub use gix_attributes as attrs;
pub use gix_commitgraph as commitgraph;
+#[cfg(feature = "credentials")]
pub use gix_credentials as credentials;
pub use gix_date as date;
pub use gix_features as features;
use gix_features::threading::OwnShared;
-pub use gix_features::{parallel, progress::Progress, threading};
+pub use gix_features::{
+ parallel,
+ progress::{Count, DynNestedProgress, NestedProgress, Progress},
+ threading,
+};
pub use gix_fs as fs;
pub use gix_glob as glob;
pub use gix_hash as hash;
+pub use gix_hashtable as hashtable;
+#[cfg(feature = "excludes")]
pub use gix_ignore as ignore;
#[doc(inline)]
+#[cfg(feature = "index")]
pub use gix_index as index;
pub use gix_lock as lock;
+#[cfg(feature = "credentials")]
pub use gix_negotiate as negotiate;
pub use gix_object as objs;
pub use gix_object::bstr;
pub use gix_odb as odb;
+#[cfg(feature = "credentials")]
pub use gix_prompt as prompt;
-#[cfg(all(feature = "gix-protocol"))]
+#[cfg(feature = "gix-protocol")]
pub use gix_protocol as protocol;
pub use gix_ref as refs;
pub use gix_refspec as refspec;
+pub use gix_revwalk as revwalk;
pub use gix_sec as sec;
+#[cfg(feature = "status")]
+pub use gix_status as status;
pub use gix_tempfile as tempfile;
+pub use gix_trace as trace;
pub use gix_traverse as traverse;
pub use gix_url as url;
#[doc(inline)]
@@ -101,13 +137,13 @@ pub use hash::{oid, ObjectId};
pub mod interrupt;
-///
-pub mod attributes;
-
mod ext;
///
pub mod prelude;
+#[cfg(feature = "excludes")]
+mod attribute_stack;
+
///
pub mod path;
@@ -118,11 +154,14 @@ pub type OdbHandle = gix_odb::Handle;
/// A way to access git configuration
pub(crate) type Config = OwnShared<gix_config::File<'static>>;
-///
mod types;
+#[cfg(any(feature = "excludes", feature = "attributes"))]
+pub use types::AttributeStack;
pub use types::{
Commit, Head, Id, Object, ObjectDetached, Reference, Remote, Repository, Tag, ThreadSafeRepository, Tree, Worktree,
};
+#[cfg(feature = "attributes")]
+pub use types::{Pathspec, PathspecDetached, Submodule};
///
pub mod clone;
@@ -130,8 +169,12 @@ pub mod commit;
pub mod head;
pub mod id;
pub mod object;
+#[cfg(feature = "attributes")]
+pub mod pathspec;
pub mod reference;
pub mod repository;
+#[cfg(feature = "attributes")]
+pub mod submodule;
pub mod tag;
///
@@ -217,12 +260,14 @@ fn open_opts_with_git_binary_config() -> open::Options {
/// See [`ThreadSafeRepository::open()`], but returns a [`Repository`] instead.
#[allow(clippy::result_large_err)]
+#[doc(alias = "git2")]
pub fn open(directory: impl Into<std::path::PathBuf>) -> Result<Repository, open::Error> {
ThreadSafeRepository::open(directory).map(Into::into)
}
/// See [`ThreadSafeRepository::open_opts()`], but returns a [`Repository`] instead.
#[allow(clippy::result_large_err)]
+#[doc(alias = "open_ext", alias = "git2")]
pub fn open_opts(directory: impl Into<std::path::PathBuf>, options: open::Options) -> Result<Repository, open::Error> {
ThreadSafeRepository::open_opts(directory, options).map(Into::into)
}
@@ -237,6 +282,7 @@ pub mod open;
pub mod config;
///
+#[cfg(feature = "mailmap")]
pub mod mailmap;
///
@@ -244,6 +290,9 @@ pub mod worktree;
pub mod revision;
+#[cfg(feature = "attributes")]
+pub mod filter;
+
///
pub mod remote;
diff --git a/vendor/gix/src/object/blob.rs b/vendor/gix/src/object/blob.rs
index f35605422..00b3519ed 100644
--- a/vendor/gix/src/object/blob.rs
+++ b/vendor/gix/src/object/blob.rs
@@ -1,4 +1,5 @@
///
+#[cfg(feature = "blob-diff")]
pub mod diff {
use std::ops::Range;
diff --git a/vendor/gix/src/object/commit.rs b/vendor/gix/src/object/commit.rs
index 5a9dfd4f3..1fb9eff67 100644
--- a/vendor/gix/src/object/commit.rs
+++ b/vendor/gix/src/object/commit.rs
@@ -1,4 +1,4 @@
-use crate::{bstr, bstr::BStr, revision, Commit, ObjectDetached, Tree};
+use crate::{bstr, bstr::BStr, Commit, ObjectDetached, Tree};
mod error {
use crate::object;
@@ -65,7 +65,7 @@ impl<'repo> Commit<'repo> {
/// Decode the commit and obtain the time at which the commit was created.
///
/// For the time at which it was authored, refer to `.decode()?.author.time`.
- pub fn time(&self) -> Result<gix_actor::Time, Error> {
+ pub fn time(&self) -> Result<gix_date::Time, Error> {
Ok(self.committer()?.time)
}
@@ -131,12 +131,13 @@ impl<'repo> Commit<'repo> {
}
/// Obtain a platform for traversing ancestors of this commit.
- pub fn ancestors(&self) -> revision::walk::Platform<'repo> {
+ pub fn ancestors(&self) -> crate::revision::walk::Platform<'repo> {
self.id().ancestors()
}
/// Create a platform to further configure a `git describe` operation to find a name for this commit by looking
/// at the closest annotated tags (by default) in its past.
+ #[cfg(feature = "revision")]
pub fn describe(&self) -> crate::commit::describe::Platform<'repo> {
crate::commit::describe::Platform {
id: self.id,
@@ -147,6 +148,15 @@ impl<'repo> Commit<'repo> {
max_candidates: 10,
}
}
+
+ /// Extracts the PGP signature and the data that was used to create the signature, or `None` if it wasn't signed.
+ // TODO: make it possible to verify the signature, probably by wrapping `SignedData`. It's quite some work to do it properly.
+ pub fn signature(
+ &self,
+ ) -> Result<Option<(std::borrow::Cow<'_, BStr>, gix_object::commit::SignedData<'_>)>, gix_object::decode::Error>
+ {
+ gix_object::CommitRefIter::signature(&self.data)
+ }
}
impl<'r> std::fmt::Debug for Commit<'r> {
diff --git a/vendor/gix/src/object/errors.rs b/vendor/gix/src/object/errors.rs
index eb7733473..92789b6cb 100644
--- a/vendor/gix/src/object/errors.rs
+++ b/vendor/gix/src/object/errors.rs
@@ -18,17 +18,21 @@ pub mod conversion {
///
pub mod find {
/// Indicate that an error occurred when trying to find an object.
- pub type Error = gix_odb::store::find::Error;
+ #[derive(Debug, thiserror::Error)]
+ #[error(transparent)]
+ pub struct Error(#[from] pub gix_odb::find::Error);
///
pub mod existing {
/// An object could not be found in the database, or an error occurred when trying to obtain it.
- pub type Error = gix_odb::find::existing::Error<gix_odb::store::find::Error>;
+ pub type Error = gix_odb::find::existing::Error;
}
}
///
pub mod write {
/// An error to indicate writing to the loose object store failed.
- pub type Error = gix_odb::store::write::Error;
+ #[derive(Debug, thiserror::Error)]
+ #[error(transparent)]
+ pub struct Error(#[from] pub gix_odb::find::Error);
}
diff --git a/vendor/gix/src/object/tree/diff/for_each.rs b/vendor/gix/src/object/tree/diff/for_each.rs
index a72033182..3932f9027 100644
--- a/vendor/gix/src/object/tree/diff/for_each.rs
+++ b/vendor/gix/src/object/tree/diff/for_each.rs
@@ -75,8 +75,9 @@ impl<'a, 'old> Platform<'a, 'old> {
}
Err(gix_diff::tree::changes::Error::Cancelled) => delegate
.err
- .map(|err| Err(Error::ForEach(Box::new(err))))
- .unwrap_or(Err(Error::Diff(gix_diff::tree::changes::Error::Cancelled))),
+ .map_or(Err(Error::Diff(gix_diff::tree::changes::Error::Cancelled)), |err| {
+ Err(Error::ForEach(Box::new(err)))
+ }),
Err(err) => Err(err.into()),
}
}
diff --git a/vendor/gix/src/object/tree/diff/mod.rs b/vendor/gix/src/object/tree/diff/mod.rs
index 447eeaa84..5f7a041e4 100644
--- a/vendor/gix/src/object/tree/diff/mod.rs
+++ b/vendor/gix/src/object/tree/diff/mod.rs
@@ -34,6 +34,10 @@ impl<'repo> Tree<'repo> {
///
/// It's highly recommended to set an object cache to avoid extracting the same object multiple times.
/// By default, similar to `git diff`, rename tracking will be enabled if it is not configured.
+ ///
+ /// Note that if a clone with `--filter=blob=none` was created, rename tracking may fail as it might
+ /// try to access blobs to compute a similarity metric. Thus, it's more compatible to turn rewrite tracking off
+ /// using [`Platform::track_rewrites()`].
#[allow(clippy::result_large_err)]
pub fn changes<'a>(&'a self) -> Result<Platform<'a, 'repo>, rewrites::Error> {
Ok(Platform {
diff --git a/vendor/gix/src/object/tree/diff/rewrites.rs b/vendor/gix/src/object/tree/diff/rewrites.rs
index 1502048ec..e434726d9 100644
--- a/vendor/gix/src/object/tree/diff/rewrites.rs
+++ b/vendor/gix/src/object/tree/diff/rewrites.rs
@@ -80,7 +80,7 @@ impl Rewrites {
let key = "diff.renames";
let copies = match config
.boolean_by_key(key)
- .map(|value| Diff::RENAMES.try_into_renames(value, || config.string_by_key(key)))
+ .map(|value| Diff::RENAMES.try_into_renames(value))
.transpose()
.with_leniency(lenient)?
{
diff --git a/vendor/gix/src/object/tree/iter.rs b/vendor/gix/src/object/tree/iter.rs
index c841e2574..848d9eeab 100644
--- a/vendor/gix/src/object/tree/iter.rs
+++ b/vendor/gix/src/object/tree/iter.rs
@@ -25,10 +25,25 @@ impl<'repo, 'a> EntryRef<'repo, 'a> {
crate::Id::from_id(self.inner.oid, self.repo)
}
- /// Return the entries id, without repository connection.
- pub fn oid(&self) -> gix_hash::ObjectId {
+ /// Return the plain object id of this entry, without access to the repository.
+ pub fn oid(&self) -> &gix_hash::oid {
+ self.inner.oid
+ }
+
+ /// Return the object this entry points to.
+ pub fn object(&self) -> Result<crate::Object<'repo>, crate::object::find::existing::Error> {
+ self.id().object()
+ }
+
+ /// Return the plain object id of this entry, without access to the repository.
+ pub fn object_id(&self) -> gix_hash::ObjectId {
self.inner.oid.to_owned()
}
+
+ /// Detach the repository from this instance.
+ pub fn detach(&self) -> gix_object::tree::EntryRef<'a> {
+ self.inner
+ }
}
impl<'repo, 'a> std::fmt::Display for EntryRef<'repo, 'a> {
diff --git a/vendor/gix/src/object/tree/mod.rs b/vendor/gix/src/object/tree/mod.rs
index bbd392289..5bf59a25c 100644
--- a/vendor/gix/src/object/tree/mod.rs
+++ b/vendor/gix/src/object/tree/mod.rs
@@ -1,5 +1,8 @@
use gix_hash::ObjectId;
+use gix_macros::momo;
+pub use gix_object::tree::EntryMode;
use gix_object::{bstr::BStr, TreeRefIter};
+use gix_odb::FindExt;
use crate::{object::find, Id, Tree};
@@ -27,9 +30,20 @@ impl<'repo> Tree<'repo> {
gix_object::TreeRef::from_bytes(&self.data)
}
- // TODO: tests.
+ /// Find the entry named `name` by iteration, or return `None` if it wasn't found.
+ pub fn find_entry(&self, name: impl PartialEq<BStr>) -> Option<EntryRef<'repo, '_>> {
+ TreeRefIter::from_bytes(&self.data)
+ .filter_map(Result::ok)
+ .find(|entry| name.eq(entry.filename))
+ .map(|entry| EntryRef {
+ inner: entry,
+ repo: self.repo,
+ })
+ }
+
/// Follow a sequence of `path` components starting from this instance, and look them up one by one until the last component
/// is looked up and its tree entry is returned.
+ /// Use `buf` as temporary location for sub-trees to avoid allocating a temporary buffer for each lookup.
///
/// # Performance Notes
///
@@ -37,12 +51,51 @@ impl<'repo> Tree<'repo> {
/// to re-use a vector and use a binary search instead, which might be able to improve performance over all.
/// However, a benchmark should be created first to have some data and see which trade-off to choose here.
///
- /// # Why is this consuming?
+ pub fn lookup_entry<I, P>(&self, path: I, buf: &mut Vec<u8>) -> Result<Option<Entry<'repo>>, find::existing::Error>
+ where
+ I: IntoIterator<Item = P>,
+ P: PartialEq<BStr>,
+ {
+ let mut path = path.into_iter().peekable();
+ buf.clear();
+ buf.extend_from_slice(&self.data);
+ while let Some(component) = path.next() {
+ match TreeRefIter::from_bytes(buf)
+ .filter_map(Result::ok)
+ .find(|entry| component.eq(entry.filename))
+ {
+ Some(entry) => {
+ if path.peek().is_none() {
+ return Ok(Some(Entry {
+ inner: entry.into(),
+ repo: self.repo,
+ }));
+ } else {
+ let next_id = entry.oid.to_owned();
+ let obj = self.repo.objects.find(&next_id, buf)?;
+ if !obj.kind.is_tree() {
+ return Ok(None);
+ }
+ }
+ }
+ None => return Ok(None),
+ }
+ }
+ Ok(None)
+ }
+
+ /// Follow a sequence of `path` components starting from this instance, and look them up one by one until the last component
+ /// is looked up and its tree entry is returned, while changing this instance to point to the last seen tree.
+ /// Note that if the lookup fails, it may be impossible to continue making lookups through this tree.
+ /// It's useful to have this function to be able to reuse the internal buffer of the tree.
+ ///
+ /// # Performance Notes
+ ///
+ /// Searching tree entries is currently done in sequence, which allows to the search to be allocation free. It would be possible
+ /// to re-use a vector and use a binary search instead, which might be able to improve performance over all.
+ /// However, a benchmark should be created first to have some data and see which trade-off to choose here.
///
- /// The borrow checker shows pathological behaviour in loops that mutate a buffer, but also want to return from it.
- /// Workarounds include keeping an index and doing a separate access to the memory, which seems hard to do here without
- /// re-parsing the entries.
- pub fn lookup_entry<I, P>(mut self, path: I) -> Result<Option<Entry<'repo>>, find::existing::Error>
+ pub fn peel_to_entry<I, P>(&mut self, path: I) -> Result<Option<Entry<'repo>>, find::existing::Error>
where
I: IntoIterator<Item = P>,
P: PartialEq<BStr>,
@@ -61,12 +114,11 @@ impl<'repo> Tree<'repo> {
}));
} else {
let next_id = entry.oid.to_owned();
- let repo = self.repo;
- drop(self);
- self = match repo.find_object(next_id)?.try_into_tree() {
- Ok(tree) => tree,
- Err(_) => return Ok(None),
- };
+ let obj = self.repo.objects.find(&next_id, &mut self.data)?;
+ self.id = next_id;
+ if !obj.kind.is_tree() {
+ return Ok(None);
+ }
}
}
None => return Ok(None),
@@ -75,18 +127,42 @@ impl<'repo> Tree<'repo> {
Ok(None)
}
- /// Like [`lookup_entry()`][Self::lookup_entry()], but takes a `Path` directly via `relative_path`, a path relative to this tree.
+ /// Like [`Self::lookup_entry()`], but takes a `Path` directly via `relative_path`, a path relative to this tree.
///
/// # Note
///
/// If any path component contains illformed UTF-8 and thus can't be converted to bytes on platforms which can't do so natively,
/// the returned component will be empty which makes the lookup fail.
+ #[momo]
pub fn lookup_entry_by_path(
- self,
+ &self,
+ relative_path: impl AsRef<std::path::Path>,
+ buf: &mut Vec<u8>,
+ ) -> Result<Option<Entry<'repo>>, find::existing::Error> {
+ use crate::bstr::ByteSlice;
+ self.lookup_entry(
+ relative_path.as_ref().components().map(|c: std::path::Component<'_>| {
+ gix_path::os_str_into_bstr(c.as_os_str())
+ .unwrap_or_else(|_| "".into())
+ .as_bytes()
+ }),
+ buf,
+ )
+ }
+
+ /// Like [`Self::peel_to_entry()`], but takes a `Path` directly via `relative_path`, a path relative to this tree.
+ ///
+ /// # Note
+ ///
+ /// If any path component contains illformed UTF-8 and thus can't be converted to bytes on platforms which can't do so natively,
+ /// the returned component will be empty which makes the lookup fail.
+ #[momo]
+ pub fn peel_to_entry_by_path(
+ &mut self,
relative_path: impl AsRef<std::path::Path>,
) -> Result<Option<Entry<'repo>>, find::existing::Error> {
use crate::bstr::ByteSlice;
- self.lookup_entry(relative_path.as_ref().components().map(|c: std::path::Component<'_>| {
+ self.peel_to_entry(relative_path.as_ref().components().map(|c: std::path::Component<'_>| {
gix_path::os_str_into_bstr(c.as_os_str())
.unwrap_or_else(|_| "".into())
.as_bytes()
@@ -95,6 +171,7 @@ impl<'repo> Tree<'repo> {
}
///
+#[cfg(feature = "blob-diff")]
pub mod diff;
///
@@ -113,8 +190,8 @@ impl<'r> std::fmt::Debug for Tree<'r> {
/// An entry in a [`Tree`], similar to an entry in a directory.
#[derive(PartialEq, Debug, Clone)]
pub struct Entry<'repo> {
- inner: gix_object::tree::Entry,
- repo: &'repo crate::Repository,
+ pub(crate) inner: gix_object::tree::Entry,
+ pub(crate) repo: &'repo crate::Repository,
}
mod entry {
diff --git a/vendor/gix/src/open/options.rs b/vendor/gix/src/open/options.rs
index b098d55c1..930fb414c 100644
--- a/vendor/gix/src/open/options.rs
+++ b/vendor/gix/src/open/options.rs
@@ -149,6 +149,12 @@ impl Options {
}
}
+impl Options {
+ pub(crate) fn current_dir_or_empty(&self) -> &std::path::Path {
+ self.current_dir.as_deref().unwrap_or(std::path::Path::new(""))
+ }
+}
+
impl gix_sec::trust::DefaultForLevel for Options {
fn default_for_level(level: gix_sec::Trust) -> Self {
match level {
diff --git a/vendor/gix/src/open/repository.rs b/vendor/gix/src/open/repository.rs
index e89fdc430..fde647a4e 100644
--- a/vendor/gix/src/open/repository.rs
+++ b/vendor/gix/src/open/repository.rs
@@ -2,6 +2,7 @@
use std::{borrow::Cow, path::PathBuf};
use gix_features::threading::OwnShared;
+use gix_macros::momo;
use super::{Error, Options};
use crate::{
@@ -50,7 +51,17 @@ impl ThreadSafeRepository {
/// `options` for fine-grained control.
///
/// Note that you should use [`crate::discover()`] if security should be adjusted by ownership.
+ ///
+ /// ### Differences to `git2::Repository::open_ext()`
+ ///
+ /// Whereas `open_ext()` is the jack-of-all-trades that can do anything depending on its options, `gix` will always differentiate
+ /// between discovering git repositories by searching, and opening a well-known repository by work tree or `.git` repository.
+ ///
+ /// Note that opening a repository for implementing custom hooks is also handle specifically in
+ /// [`open_with_environment_overrides()`][Self::open_with_environment_overrides()].
+ #[momo]
pub fn open_opts(path: impl Into<PathBuf>, mut options: Options) -> Result<Self, Error> {
+ let _span = gix_trace::coarse!("ThreadSafeRepository::open()");
let (path, kind) = {
let path = path.into();
let looks_like_git_dir =
@@ -96,13 +107,16 @@ impl ThreadSafeRepository {
/// Note that this will read various `GIT_*` environment variables to check for overrides, and is probably most useful when implementing
/// custom hooks.
// TODO: tests, with hooks, GIT_QUARANTINE for ref-log and transaction control (needs gix-sec support to remove write access in gix-ref)
- // TODO: The following vars should end up as overrides of the respective configuration values (see gix-config).
+ // TODO: The following vars should end up as overrides of the respective configuration values (see git-config).
// GIT_PROXY_SSL_CERT, GIT_PROXY_SSL_KEY, GIT_PROXY_SSL_CERT_PASSWORD_PROTECTED.
// GIT_PROXY_SSL_CAINFO, GIT_SSL_CIPHER_LIST, GIT_HTTP_MAX_REQUESTS, GIT_CURL_FTP_NO_EPSV,
+ #[doc(alias = "open_from_env", alias = "git2")]
+ #[momo]
pub fn open_with_environment_overrides(
fallback_directory: impl Into<PathBuf>,
trust_map: gix_sec::trust::Mapping<Options>,
) -> Result<Self, Error> {
+ let _span = gix_trace::coarse!("ThreadSafeRepository::open_with_environment_overrides()");
let overrides = EnvironmentOverrides::from_env()?;
let (path, path_kind): (PathBuf, _) = match overrides.git_dir {
Some(git_dir) => gix_discover::is_git(&git_dir)
@@ -139,6 +153,7 @@ impl ThreadSafeRepository {
mut worktree_dir: Option<PathBuf>,
options: Options,
) -> Result<Self, Error> {
+ let _span = gix_trace::detail!("open_from_paths()");
let Options {
git_dir_trust,
object_store_slots,
@@ -164,7 +179,7 @@ impl ThreadSafeRepository {
// This would be something read in later as have to first check for extensions. Also this means
// that each worktree, even if accessible through this instance, has to come in its own Repository instance
// as it may have its own configuration. That's fine actually.
- let common_dir = gix_discover::path::from_plain_file(git_dir.join("commondir"))
+ let common_dir = gix_discover::path::from_plain_file(git_dir.join("commondir").as_ref())
.transpose()?
.map(|cd| git_dir.join(cd));
let common_dir_ref = common_dir.as_deref().unwrap_or(&git_dir);
@@ -180,8 +195,10 @@ impl ThreadSafeRepository {
let reflog = repo_config.reflog.unwrap_or(gix_ref::store::WriteReflog::Disable);
let object_hash = repo_config.object_hash;
match &common_dir {
- Some(common_dir) => crate::RefStore::for_linked_worktree(&git_dir, common_dir, reflog, object_hash),
- None => crate::RefStore::at(&git_dir, reflog, object_hash),
+ Some(common_dir) => {
+ crate::RefStore::for_linked_worktree(git_dir.to_owned(), common_dir.into(), reflog, object_hash)
+ }
+ None => crate::RefStore::at(git_dir.to_owned(), reflog, object_hash),
}
};
let head = refs.find("HEAD").ok();
@@ -205,7 +222,13 @@ impl ThreadSafeRepository {
)?;
if bail_if_untrusted && git_dir_trust != gix_sec::Trust::Full {
- check_safe_directories(&git_dir, git_install_dir.as_deref(), home.as_deref(), &config)?;
+ check_safe_directories(
+ &git_dir,
+ git_install_dir.as_deref(),
+ current_dir,
+ home.as_deref(),
+ &config,
+ )?;
}
// core.worktree might be used to overwrite the worktree directory
@@ -218,7 +241,7 @@ impl ThreadSafeRepository {
.interpolate(interpolate_context(git_install_dir.as_deref(), home.as_deref()))
.map_err(config::Error::PathInterpolation)?;
worktree_dir = {
- gix_path::normalize(git_dir.join(wt_path), current_dir)
+ gix_path::normalize(git_dir.join(wt_path).into(), current_dir)
.and_then(|wt| wt.as_ref().is_dir().then(|| wt.into_owned()))
}
}
@@ -238,6 +261,7 @@ impl ThreadSafeRepository {
refs.write_reflog = config::cache::util::reflog_or_default(config.reflog, worktree_dir.is_some());
let replacements = replacement_objects_refs_prefix(&config.resolved, lenient_config, filter_config_section)?
.and_then(|prefix| {
+ let _span = gix_trace::detail!("find replacement objects");
let platform = refs.iter().ok()?;
let iter = platform.prefixed(&prefix).ok()?;
let prefix = prefix.to_str()?;
@@ -257,7 +281,7 @@ impl ThreadSafeRepository {
Ok(ThreadSafeRepository {
objects: OwnShared::new(gix_odb::Store::at_opts(
common_dir_ref.join("objects"),
- replacements,
+ &mut replacements.into_iter(),
gix_odb::store::init::Options {
slots: object_store_slots,
object_hash: config.object_hash,
@@ -271,8 +295,11 @@ impl ThreadSafeRepository {
config,
// used when spawning new repositories off this one when following worktrees
linked_worktree_options: options,
+ #[cfg(feature = "index")]
index: gix_fs::SharedFileSnapshotMut::new().into(),
shallow_commits: gix_fs::SharedFileSnapshotMut::new().into(),
+ #[cfg(feature = "attributes")]
+ modules: gix_fs::SharedFileSnapshotMut::new().into(),
})
}
}
@@ -309,11 +336,12 @@ fn replacement_objects_refs_prefix(
fn check_safe_directories(
git_dir: &std::path::Path,
git_install_dir: Option<&std::path::Path>,
+ current_dir: &std::path::Path,
home: Option<&std::path::Path>,
config: &config::Cache,
) -> Result<(), Error> {
let mut is_safe = false;
- let git_dir = match gix_path::realpath(git_dir) {
+ let git_dir = match gix_path::realpath_opts(git_dir, current_dir, gix_path::realpath::MAX_SYMLINKS) {
Ok(p) => p,
Err(_) => git_dir.to_owned(),
};
diff --git a/vendor/gix/src/pathspec.rs b/vendor/gix/src/pathspec.rs
new file mode 100644
index 000000000..235a91d76
--- /dev/null
+++ b/vendor/gix/src/pathspec.rs
@@ -0,0 +1,207 @@
+//! Pathspec plumbing and abstractions
+use gix_macros::momo;
+use gix_odb::FindExt;
+pub use gix_pathspec::*;
+
+use crate::{bstr::BStr, AttributeStack, Pathspec, PathspecDetached, Repository};
+
+///
+pub mod init {
+ /// The error returned by [`Pathspec::new()`](super::Pathspec::new()).
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ MakeAttributes(#[from] Box<dyn std::error::Error + Send + Sync + 'static>),
+ #[error(transparent)]
+ Defaults(#[from] crate::repository::pathspec_defaults_ignore_case::Error),
+ #[error(transparent)]
+ ParseSpec(#[from] gix_pathspec::parse::Error),
+ #[error(
+ "Could not obtain the repository prefix as the relative path of the CWD as seen from the working tree"
+ )]
+ NormalizeSpec(#[from] gix_pathspec::normalize::Error),
+ #[error(transparent)]
+ RepoPrefix(#[from] gix_path::realpath::Error),
+ }
+}
+
+/// Lifecycle
+impl<'repo> Pathspec<'repo> {
+ /// Create a new instance by parsing `patterns` into [`Pathspecs`](Pattern) to make them usable for searches.
+ /// `make_attribute` may be called if one of the patterns has a `(attr:a)` element which requires attribute matching. It should
+ /// be used to control where attributes are coming from.
+ /// If `inherit_ignore_case` is `true`, the pathspecs may have their ignore-case default overridden to be case-insensitive by default.
+ /// This only works towards turning ignore-case for pathspecs on, but won't ever turn that setting off if.
+ ///
+ /// ### Deviation
+ ///
+ /// Pathspecs can declare to be case-insensitive as part of their elements, which is a setting that is now respected for attribute
+ /// queries as well.
+ pub fn new(
+ repo: &'repo Repository,
+ patterns: impl IntoIterator<Item = impl AsRef<BStr>>,
+ inherit_ignore_case: bool,
+ make_attributes: impl FnOnce() -> Result<gix_worktree::Stack, Box<dyn std::error::Error + Send + Sync + 'static>>,
+ ) -> Result<Self, init::Error> {
+ let defaults = repo.pathspec_defaults_inherit_ignore_case(inherit_ignore_case)?;
+ let patterns = patterns
+ .into_iter()
+ .map(move |p| parse(p.as_ref(), defaults))
+ .collect::<Result<Vec<_>, _>>()?;
+ let needs_cache = patterns.iter().any(|p| !p.attributes.is_empty());
+ let search = Search::from_specs(
+ patterns,
+ repo.prefix()?,
+ &gix_path::realpath_opts(
+ repo.work_dir().unwrap_or_else(|| repo.git_dir()),
+ repo.options.current_dir_or_empty(),
+ gix_path::realpath::MAX_SYMLINKS,
+ )?,
+ )?;
+ let cache = needs_cache.then(make_attributes).transpose()?;
+ Ok(Self {
+ repo,
+ search,
+ stack: cache,
+ })
+ }
+ /// Turn ourselves into the functional parts for direct usage.
+ /// Note that the [`cache`](AttributeStack) is only set if one of the [`search` patterns](Search)
+ /// is specifying attributes to match for.
+ pub fn into_parts(self) -> (Search, Option<AttributeStack<'repo>>) {
+ (
+ self.search,
+ self.stack.map(|stack| AttributeStack::new(stack, self.repo)),
+ )
+ }
+
+ /// Turn ourselves into an implementation that works without a repository instance and that is rather minimal.
+ pub fn detach(self) -> std::io::Result<PathspecDetached> {
+ Ok(PathspecDetached {
+ search: self.search,
+ stack: self.stack,
+ odb: self.repo.objects.clone().into_arc()?,
+ })
+ }
+}
+
+/// Access
+impl<'repo> Pathspec<'repo> {
+ /// Return the attributes cache which is used when matching attributes in pathspecs, or `None` if none of the pathspecs require that.
+ pub fn attributes(&self) -> Option<&gix_worktree::Stack> {
+ self.stack.as_ref()
+ }
+
+ /// Return the search itself which can be used for matching paths or accessing the actual patterns that will be used.
+ pub fn search(&self) -> &gix_pathspec::Search {
+ &self.search
+ }
+
+ /// Return the first [`Match`](search::Match) of `relative_path`, or `None`.
+ /// Note that the match might [be excluded](search::Match::is_excluded()).
+ /// `is_dir` is true if `relative_path` is a directory.
+ #[doc(
+ alias = "match_diff",
+ alias = "match_tree",
+ alias = "match_index",
+ alias = "match_workdir",
+ alias = "matches_path",
+ alias = "git2"
+ )]
+ #[momo]
+ pub fn pattern_matching_relative_path<'a>(
+ &mut self,
+ relative_path: impl Into<&'a BStr>,
+ is_dir: Option<bool>,
+ ) -> Option<gix_pathspec::search::Match<'_>> {
+ self.search.pattern_matching_relative_path(
+ relative_path.into(),
+ is_dir,
+ &mut |relative_path, case, is_dir, out| {
+ let stack = self.stack.as_mut().expect("initialized in advance");
+ stack
+ .set_case(case)
+ .at_entry(relative_path, Some(is_dir), |id, buf| {
+ self.repo.objects.find_blob(id, buf)
+ })
+ .map_or(false, |platform| platform.matching_attributes(out))
+ },
+ )
+ }
+
+ /// The simplified version of [`pattern_matching_relative_path()`](Self::pattern_matching_relative_path()) which returns
+ /// `true` if `relative_path` is included in the set of positive pathspecs, while not being excluded.
+ #[momo]
+ pub fn is_included<'a>(&mut self, relative_path: impl Into<&'a BStr>, is_dir: Option<bool>) -> bool {
+ self.pattern_matching_relative_path(relative_path, is_dir)
+ .map_or(false, |m| !m.is_excluded())
+ }
+
+ /// Return an iterator over all entries along with their path if the path matches the pathspec, or `None` if the pathspec is
+ /// known to match no entry.
+ // TODO: tests
+ pub fn index_entries_with_paths<'s: 'repo, 'a: 'repo>(
+ &'s mut self,
+ index: &'a gix_index::State,
+ ) -> Option<impl Iterator<Item = (&'a BStr, &'a gix_index::Entry)> + 'repo + 's> {
+ index.prefixed_entries(self.search.common_prefix()).map(|entries| {
+ entries.iter().filter_map(move |entry| {
+ let path = entry.path(index);
+ self.is_included(path, Some(false)).then_some((path, entry))
+ })
+ })
+ }
+}
+
+/// Access
+impl PathspecDetached {
+ /// Return the first [`Match`](search::Match) of `relative_path`, or `None`.
+ /// Note that the match might [be excluded](search::Match::is_excluded()).
+ /// `is_dir` is true if `relative_path` is a directory.
+ #[doc(
+ alias = "match_diff",
+ alias = "match_tree",
+ alias = "match_index",
+ alias = "match_workdir",
+ alias = "matches_path",
+ alias = "git2"
+ )]
+ #[momo]
+ pub fn pattern_matching_relative_path<'a>(
+ &mut self,
+ relative_path: impl Into<&'a BStr>,
+ is_dir: Option<bool>,
+ ) -> Option<gix_pathspec::search::Match<'_>> {
+ self.search.pattern_matching_relative_path(
+ relative_path.into(),
+ is_dir,
+ &mut |relative_path, case, is_dir, out| {
+ let stack = self.stack.as_mut().expect("initialized in advance");
+ stack
+ .set_case(case)
+ .at_entry(relative_path, Some(is_dir), |id, buf| self.odb.find_blob(id, buf))
+ .map_or(false, |platform| platform.matching_attributes(out))
+ },
+ )
+ }
+
+ /// The simplified version of [`pattern_matching_relative_path()`](Self::pattern_matching_relative_path()) which returns
+ /// `true` if `relative_path` is included in the set of positive pathspecs, while not being excluded.
+ #[momo]
+ pub fn is_included<'a>(&mut self, relative_path: impl Into<&'a BStr>, is_dir: Option<bool>) -> bool {
+ self.pattern_matching_relative_path(relative_path, is_dir)
+ .map_or(false, |m| !m.is_excluded())
+ }
+}
+
+#[cfg(feature = "status")]
+impl gix_status::Pathspec for PathspecDetached {
+ fn common_prefix(&self) -> &BStr {
+ self.search.common_prefix()
+ }
+
+ fn is_included(&mut self, relative_path: &BStr, is_dir: Option<bool>) -> bool {
+ self.is_included(relative_path, is_dir)
+ }
+}
diff --git a/vendor/gix/src/reference/edits.rs b/vendor/gix/src/reference/edits.rs
index c6510c2e0..208340770 100644
--- a/vendor/gix/src/reference/edits.rs
+++ b/vendor/gix/src/reference/edits.rs
@@ -1,8 +1,8 @@
///
pub mod set_target_id {
- use gix_ref::{transaction::PreviousValue, Target};
-
use crate::{bstr::BString, Reference};
+ use gix_macros::momo;
+ use gix_ref::{transaction::PreviousValue, Target};
mod error {
use gix_ref::FullName;
@@ -28,6 +28,7 @@ pub mod set_target_id {
/// If multiple reference should be changed, use [`Repository::edit_references()`][crate::Repository::edit_references()]
/// or the lower level reference database instead.
#[allow(clippy::result_large_err)]
+ #[momo]
pub fn set_target_id(
&mut self,
id: impl Into<gix_hash::ObjectId>,
diff --git a/vendor/gix/src/reference/iter.rs b/vendor/gix/src/reference/iter.rs
index a2b022f64..a79a74743 100644
--- a/vendor/gix/src/reference/iter.rs
+++ b/vendor/gix/src/reference/iter.rs
@@ -1,6 +1,7 @@
//!
use std::path::Path;
+use gix_macros::momo;
use gix_odb::pack::Find;
use gix_ref::file::ReferenceExt;
@@ -42,8 +43,9 @@ impl<'r> Platform<'r> {
/// These are of the form `refs/heads` or `refs/remotes/origin`, and must not contain relative paths components like `.` or `..`.
// TODO: Create a custom `Path` type that enforces the requirements of git naturally, this type is surprising possibly on windows
// and when not using a trailing '/' to signal directories.
+ #[momo]
pub fn prefixed(&self, prefix: impl AsRef<Path>) -> Result<Iter<'_>, init::Error> {
- Ok(Iter::new(self.repo, self.platform.prefixed(prefix)?))
+ Ok(Iter::new(self.repo, self.platform.prefixed(prefix.as_ref())?))
}
// TODO: tests
@@ -51,7 +53,7 @@ impl<'r> Platform<'r> {
///
/// They are all prefixed with `refs/tags`.
pub fn tags(&self) -> Result<Iter<'_>, init::Error> {
- Ok(Iter::new(self.repo, self.platform.prefixed("refs/tags/")?))
+ Ok(Iter::new(self.repo, self.platform.prefixed("refs/tags/".as_ref())?))
}
// TODO: tests
@@ -59,7 +61,7 @@ impl<'r> Platform<'r> {
///
/// They are all prefixed with `refs/heads`.
pub fn local_branches(&self) -> Result<Iter<'_>, init::Error> {
- Ok(Iter::new(self.repo, self.platform.prefixed("refs/heads/")?))
+ Ok(Iter::new(self.repo, self.platform.prefixed("refs/heads/".as_ref())?))
}
// TODO: tests
@@ -67,7 +69,7 @@ impl<'r> Platform<'r> {
///
/// They are all prefixed with `refs/remotes`.
pub fn remote_branches(&self) -> Result<Iter<'_>, init::Error> {
- Ok(Iter::new(self.repo, self.platform.prefixed("refs/remotes/")?))
+ Ok(Iter::new(self.repo, self.platform.prefixed("refs/remotes/".as_ref())?))
}
}
@@ -95,10 +97,10 @@ impl<'r> Iterator for Iter<'r> {
.and_then(|mut r| {
if self.peel {
let handle = &self.repo;
- r.peel_to_id_in_place(&handle.refs, |oid, buf| {
+ r.peel_to_id_in_place(&handle.refs, &mut |oid, buf| {
handle
.objects
- .try_find(oid, buf)
+ .try_find(oid.as_ref(), buf)
.map(|po| po.map(|(o, _l)| (o.kind, o.data)))
})
.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
diff --git a/vendor/gix/src/reference/log.rs b/vendor/gix/src/reference/log.rs
index b516e6499..2fea1782c 100644
--- a/vendor/gix/src/reference/log.rs
+++ b/vendor/gix/src/reference/log.rs
@@ -12,6 +12,11 @@ impl<'repo> Reference<'repo> {
pub fn log_iter(&self) -> gix_ref::file::log::iter::Platform<'_, '_> {
self.inner.log_iter(&self.repo.refs)
}
+
+ /// Return true if a reflog is present for this reference.
+ pub fn log_exists(&self) -> bool {
+ self.inner.log_exists(&self.repo.refs)
+ }
}
/// Generate a message typical for git commit logs based on the given `operation`, commit `message` and `num_parents` of the commit.
diff --git a/vendor/gix/src/reference/mod.rs b/vendor/gix/src/reference/mod.rs
index e2ee0d3b2..e80057fb4 100644
--- a/vendor/gix/src/reference/mod.rs
+++ b/vendor/gix/src/reference/mod.rs
@@ -62,6 +62,7 @@ impl<'repo> Reference<'repo> {
}
}
+/// Peeling
impl<'repo> Reference<'repo> {
/// Follow all symbolic targets this reference might point to and peel the underlying object
/// to the end of the chain, and return it.
@@ -69,9 +70,9 @@ impl<'repo> Reference<'repo> {
/// This is useful to learn where this reference is ultimately pointing to.
pub fn peel_to_id_in_place(&mut self) -> Result<Id<'repo>, peel::Error> {
let repo = &self.repo;
- let oid = self.inner.peel_to_id_in_place(&repo.refs, |oid, buf| {
+ let oid = self.inner.peel_to_id_in_place(&repo.refs, &mut |oid, buf| {
repo.objects
- .try_find(oid, buf)
+ .try_find(&oid, buf)
.map(|po| po.map(|(o, _l)| (o.kind, o.data)))
})?;
Ok(Id::from_id(oid, repo))
@@ -81,6 +82,18 @@ impl<'repo> Reference<'repo> {
pub fn into_fully_peeled_id(mut self) -> Result<Id<'repo>, peel::Error> {
self.peel_to_id_in_place()
}
+
+ /// Follow this symbolic reference one level and return the ref it refers to.
+ ///
+ /// Returns `None` if this is not a symbolic reference, hence the leaf of the chain.
+ pub fn follow(&self) -> Option<Result<Reference<'repo>, gix_ref::file::find::existing::Error>> {
+ self.inner.follow(&self.repo.refs).map(|res| {
+ res.map(|r| Reference {
+ inner: r,
+ repo: self.repo,
+ })
+ })
+ }
}
mod edits;
diff --git a/vendor/gix/src/remote/build.rs b/vendor/gix/src/remote/build.rs
index 10c216537..452da66a0 100644
--- a/vendor/gix/src/remote/build.rs
+++ b/vendor/gix/src/remote/build.rs
@@ -10,7 +10,10 @@ impl Remote<'_> {
Url: TryInto<gix_url::Url, Error = E>,
gix_url::parse::Error: From<E>,
{
- self.push_url_inner(url, true)
+ self.push_url_inner(
+ url.try_into().map_err(|err| remote::init::Error::Url(err.into()))?,
+ true,
+ )
}
/// Set the `url` to be used when pushing data to a remote, without applying rewrite rules in case these could be faulty,
@@ -20,7 +23,10 @@ impl Remote<'_> {
Url: TryInto<gix_url::Url, Error = E>,
gix_url::parse::Error: From<E>,
{
- self.push_url_inner(url, false)
+ self.push_url_inner(
+ url.try_into().map_err(|err| remote::init::Error::Url(err.into()))?,
+ false,
+ )
}
/// Configure how tags should be handled when fetching from the remote.
@@ -29,14 +35,11 @@ impl Remote<'_> {
self
}
- fn push_url_inner<Url, E>(mut self, push_url: Url, should_rewrite_urls: bool) -> Result<Self, remote::init::Error>
- where
- Url: TryInto<gix_url::Url, Error = E>,
- gix_url::parse::Error: From<E>,
- {
- let push_url = push_url
- .try_into()
- .map_err(|err| remote::init::Error::Url(err.into()))?;
+ fn push_url_inner(
+ mut self,
+ push_url: gix_url::Url,
+ should_rewrite_urls: bool,
+ ) -> Result<Self, remote::init::Error> {
self.push_url = push_url.into();
let (_, push_url_alias) = should_rewrite_urls
diff --git a/vendor/gix/src/remote/connect.rs b/vendor/gix/src/remote/connect.rs
index 63475b7c5..6acc9f67f 100644
--- a/vendor/gix/src/remote/connect.rs
+++ b/vendor/gix/src/remote/connect.rs
@@ -1,5 +1,7 @@
#![allow(clippy::result_large_err)]
+
use gix_protocol::transport::client::Transport;
+use std::borrow::Cow;
use crate::{remote::Connection, Remote};
@@ -104,7 +106,7 @@ impl<'repo> Remote<'repo> {
) -> Result<(gix_url::Url, gix_protocol::transport::Protocol), Error> {
fn sanitize(mut url: gix_url::Url) -> Result<gix_url::Url, Error> {
if url.scheme == gix_url::Scheme::File {
- let mut dir = gix_path::to_native_path_on_windows(url.path.as_ref());
+ let mut dir = gix_path::to_native_path_on_windows(Cow::Borrowed(url.path.as_ref()));
let kind = gix_discover::is_git(dir.as_ref())
.or_else(|_| {
dir.to_mut().push(gix_discover::DOT_GIT_DIR);
@@ -117,7 +119,7 @@ impl<'repo> Remote<'repo> {
let (git_dir, _work_dir) = gix_discover::repository::Path::from_dot_git_dir(
dir.clone().into_owned(),
kind,
- std::env::current_dir()?,
+ &std::env::current_dir()?,
)
.ok_or_else(|| Error::InvalidRemoteRepositoryPath {
directory: dir.into_owned(),
diff --git a/vendor/gix/src/remote/connection/fetch/mod.rs b/vendor/gix/src/remote/connection/fetch/mod.rs
index b4fe00935..8327d5abc 100644
--- a/vendor/gix/src/remote/connection/fetch/mod.rs
+++ b/vendor/gix/src/remote/connection/fetch/mod.rs
@@ -46,27 +46,25 @@ pub enum Status {
///
/// As we could determine that nothing changed without remote interaction, there was no negotiation at all.
NoPackReceived {
+ /// If `true`, we didn't receive a pack due to dry-run mode being enabled.
+ dry_run: bool,
+ /// Information about the pack negotiation phase if negotiation happened at all.
+ ///
+ /// It's possible that negotiation didn't have to happen as no reference of interest changed on the server.
+ negotiate: Option<outcome::Negotiate>,
/// However, depending on the refspecs, references might have been updated nonetheless to point to objects as
/// reported by the remote.
update_refs: refs::update::Outcome,
},
/// There was at least one tip with a new object which we received.
Change {
- /// The number of rounds it took to minimize the pack to contain only the objects we don't have.
- negotiation_rounds: usize,
+ /// Information about the pack negotiation phase.
+ negotiate: outcome::Negotiate,
/// Information collected while writing the pack and its index.
write_pack_bundle: gix_pack::bundle::write::Outcome,
/// Information collected while updating references.
update_refs: refs::update::Outcome,
},
- /// A dry run was performed which leaves the local repository without any change
- /// nor will a pack have been received.
- DryRun {
- /// The number of rounds it took to minimize the *would-be-sent*-pack to contain only the objects we don't have.
- negotiation_rounds: usize,
- /// Information about what updates to refs would have been done.
- update_refs: refs::update::Outcome,
- },
}
/// The outcome of receiving a pack via [`Prepare::receive()`].
@@ -78,6 +76,46 @@ pub struct Outcome {
pub status: Status,
}
+/// Additional types related to the outcome of a fetch operation.
+pub mod outcome {
+ /// Information about the negotiation phase of a fetch.
+ ///
+ /// Note that negotiation can happen even if no pack is ultimately produced.
+ #[derive(Default, Debug, Clone)]
+ pub struct Negotiate {
+ /// The negotiation graph indicating what kind of information 'the algorithm' collected in the end.
+ pub graph: gix_negotiate::IdMap,
+ /// Additional information for each round of negotiation.
+ pub rounds: Vec<negotiate::Round>,
+ }
+
+ ///
+ pub mod negotiate {
+ /// Key information about each round in the pack-negotiation.
+ #[derive(Debug, Clone)]
+ pub struct Round {
+ /// The amount of `HAVE` lines sent this round.
+ ///
+ /// Each `HAVE` is an object that we tell the server about which would acknowledge each one it has as well.
+ pub haves_sent: usize,
+ /// A total counter, over all previous rounds, indicating how many `HAVE`s we sent without seeing a single acknowledgement,
+ /// i.e. the indication of a common object.
+ ///
+ /// This number maybe zero or be lower compared to the previous round if we have received at least one acknowledgement.
+ pub in_vain: usize,
+ /// The amount of haves we should send in this round.
+ ///
+ /// If the value is lower than `haves_sent` (the `HAVE` lines actually sent), the negotiation algorithm has run out of options
+ /// which typically indicates the end of the negotiation phase.
+ pub haves_to_send: usize,
+ /// If `true`, the server reported, as response to our previous `HAVE`s, that at least one of them is in common by acknowledging it.
+ ///
+ /// This may also lead to the server responding with a pack.
+ pub previous_response_had_at_least_one_in_common: bool,
+ }
+ }
+}
+
/// The progress ids used in during various steps of the fetch operation.
///
/// Note that tagged progress isn't very widely available yet, but support can be improved as needed.
@@ -129,7 +167,7 @@ where
/// Note that at this point, the `transport` should already be configured using the [`transport_mut()`][Self::transport_mut()]
/// method, as it will be consumed here.
///
- /// From there additional properties of the fetch can be adjusted to override the defaults that are configured via gix-config.
+ /// From there additional properties of the fetch can be adjusted to override the defaults that are configured via git-config.
///
/// # Async Experimental
///
diff --git a/vendor/gix/src/remote/connection/fetch/negotiate.rs b/vendor/gix/src/remote/connection/fetch/negotiate.rs
index e94461bab..92a141f6f 100644
--- a/vendor/gix/src/remote/connection/fetch/negotiate.rs
+++ b/vendor/gix/src/remote/connection/fetch/negotiate.rs
@@ -1,12 +1,13 @@
use std::borrow::Cow;
+use gix_date::SecondsSinceUnixEpoch;
use gix_negotiate::Flags;
use gix_odb::HeaderExt;
use gix_pack::Find;
use crate::remote::{fetch, fetch::Shallow};
-type Queue = gix_revision::PriorityQueue<gix_revision::graph::CommitterTimestamp, gix_hash::ObjectId>;
+type Queue = gix_revwalk::PriorityQueue<SecondsSinceUnixEpoch, gix_hash::ObjectId>;
/// The error returned during negotiation.
#[derive(Debug, thiserror::Error)]
@@ -15,7 +16,7 @@ pub enum Error {
#[error("We were unable to figure out what objects the server should send after {rounds} round(s)")]
NegotiationFailed { rounds: usize },
#[error(transparent)]
- LookupCommitInGraph(#[from] gix_revision::graph::lookup::commit::Error),
+ LookupCommitInGraph(#[from] gix_revwalk::graph::lookup::commit::Error),
#[error(transparent)]
InitRefsIterator(#[from] crate::reference::iter::init::Error),
#[error(transparent)]
@@ -67,7 +68,9 @@ pub(crate) fn mark_complete_and_common_ref(
graph: &mut gix_negotiate::Graph<'_>,
ref_map: &fetch::RefMap,
shallow: &fetch::Shallow,
+ mapping_is_ignored: impl Fn(&fetch::Mapping) -> bool,
) -> Result<Action, Error> {
+ let _span = gix_trace::detail!("mark_complete_and_common_ref", mappings = ref_map.mappings.len());
if let fetch::Shallow::Deepen(0) = shallow {
// Avoid deepening (relative) with zero as it seems to upset the server. Git also doesn't actually
// perform the negotiation for some reason (couldn't find it in code).
@@ -85,9 +88,10 @@ pub(crate) fn mark_complete_and_common_ref(
// Compute the cut-off date by checking which of the refs advertised (and matched in refspecs) by the remote we have,
// and keep the oldest one.
- let mut cutoff_date = None::<gix_revision::graph::CommitterTimestamp>;
+ let mut cutoff_date = None::<SecondsSinceUnixEpoch>;
let mut num_mappings_with_change = 0;
let mut remote_ref_target_known: Vec<bool> = std::iter::repeat(false).take(ref_map.mappings.len()).collect();
+ let mut remote_ref_included: Vec<bool> = std::iter::repeat(false).take(ref_map.mappings.len()).collect();
for (mapping_idx, mapping) in ref_map.mappings.iter().enumerate() {
let want_id = mapping.remote.as_id();
@@ -97,9 +101,13 @@ pub(crate) fn mark_complete_and_common_ref(
r.target().try_id().map(ToOwned::to_owned)
});
- // Like git, we don't let known unchanged mappings participate in the tree traversal
- if want_id.zip(have_id).map_or(true, |(want, have)| want != have) {
- num_mappings_with_change += 1;
+ // Even for ignored mappings we want to know if the `want` is already present locally, so skip nothing else.
+ if !mapping_is_ignored(mapping) {
+ remote_ref_included[mapping_idx] = true;
+ // Like git, we don't let known unchanged mappings participate in the tree traversal
+ if want_id.zip(have_id).map_or(true, |(want, have)| want != have) {
+ num_mappings_with_change += 1;
+ }
}
if let Some(commit) = want_id
@@ -113,11 +121,15 @@ pub(crate) fn mark_complete_and_common_ref(
}
}
- // If any kind of shallowing operation is desired, the server may still create a pack for us.
if matches!(shallow, Shallow::NoChange) {
if num_mappings_with_change == 0 {
return Ok(Action::NoChange);
- } else if remote_ref_target_known.iter().all(|known| *known) {
+ } else if remote_ref_target_known
+ .iter()
+ .zip(remote_ref_included)
+ .filter_map(|(known, included)| included.then_some(known))
+ .all(|known| *known)
+ {
return Ok(Action::SkipToRefUpdate);
}
}
@@ -137,51 +149,75 @@ pub(crate) fn mark_complete_and_common_ref(
Cow::Borrowed(&queue)
};
- // mark all complete advertised refs as common refs.
- for mapping in ref_map
- .mappings
- .iter()
- .zip(remote_ref_target_known.iter().copied())
- // We need this filter as the graph wouldn't contain annotated tags.
- .filter_map(|(mapping, known)| (!known).then_some(mapping))
- {
- let want_id = mapping.remote.as_id();
- if let Some(common_id) = want_id
- .and_then(|id| graph.get(id).map(|c| (c, id)))
- .filter(|(c, _)| c.data.flags.contains(Flags::COMPLETE))
- .map(|(_, id)| id)
+ gix_trace::detail!("mark known_common").into_scope(|| -> Result<_, Error> {
+ // mark all complete advertised refs as common refs.
+ for mapping in ref_map
+ .mappings
+ .iter()
+ .zip(remote_ref_target_known.iter().copied())
+ // We need this filter as the graph wouldn't contain annotated tags.
+ .filter_map(|(mapping, known)| (!known).then_some(mapping))
{
- negotiator.known_common(common_id.into(), graph)?;
+ let want_id = mapping.remote.as_id();
+ if let Some(common_id) = want_id
+ .and_then(|id| graph.get(id).map(|c| (c, id)))
+ .filter(|(c, _)| c.data.flags.contains(Flags::COMPLETE))
+ .map(|(_, id)| id)
+ {
+ negotiator.known_common(common_id.into(), graph)?;
+ }
}
- }
+ Ok(())
+ })?;
// As negotiators currently may rely on getting `known_common` calls first and tips after, we adhere to that which is the only
// reason we cached the set of tips.
- for tip in tips.iter_unordered() {
- negotiator.add_tip(*tip, graph)?;
- }
+ gix_trace::detail!("mark tips", num_tips = tips.len()).into_scope(|| -> Result<_, Error> {
+ for tip in tips.iter_unordered() {
+ negotiator.add_tip(*tip, graph)?;
+ }
+ Ok(())
+ })?;
Ok(Action::MustNegotiate {
remote_ref_target_known,
})
}
-/// Add all `wants` to `arguments`, which is the unpeeled direct target that the advertised remote ref points to.
-pub(crate) fn add_wants(
- repo: &crate::Repository,
- arguments: &mut gix_protocol::fetch::Arguments,
- ref_map: &fetch::RefMap,
- mapping_known: &[bool],
- shallow: &fetch::Shallow,
+/// Create a predicate that checks if a refspec mapping should be ignored.
+///
+/// We want to ignore mappings during negotiation if they would be handled implicitly by the server, which is the case
+/// when tags would be sent implicitly due to `Tags::Included`.
+pub(crate) fn make_refmapping_ignore_predicate(
fetch_tags: fetch::Tags,
-) {
+ ref_map: &fetch::RefMap,
+) -> impl Fn(&fetch::Mapping) -> bool + '_ {
// With included tags, we have to keep mappings of tags to handle them later when updating refs, but we don't want to
// explicitly `want` them as the server will determine by itself which tags are pointing to a commit it wants to send.
// If we would not exclude implicit tag mappings like this, we would get too much of the graph.
let tag_refspec_to_ignore = matches!(fetch_tags, crate::remote::fetch::Tags::Included)
.then(|| fetch_tags.to_refspec())
.flatten();
+ move |mapping| {
+ tag_refspec_to_ignore.map_or(false, |tag_spec| {
+ mapping
+ .spec_index
+ .implicit_index()
+ .and_then(|idx| ref_map.extra_refspecs.get(idx))
+ .map_or(false, |spec| spec.to_ref() == tag_spec)
+ })
+ }
+}
+/// Add all `wants` to `arguments`, which is the unpeeled direct target that the advertised remote ref points to.
+pub(crate) fn add_wants(
+ repo: &crate::Repository,
+ arguments: &mut gix_protocol::fetch::Arguments,
+ ref_map: &fetch::RefMap,
+ mapping_known: &[bool],
+ shallow: &fetch::Shallow,
+ mapping_is_ignored: impl Fn(&fetch::Mapping) -> bool,
+) {
// When using shallow, we can't exclude `wants` as the remote won't send anything then. Thus we have to resend everything
// we have as want instead to get exactly the same graph, but possibly deepened.
let is_shallow = !matches!(shallow, fetch::Shallow::NoChange);
@@ -189,17 +225,9 @@ pub(crate) fn add_wants(
.mappings
.iter()
.zip(mapping_known)
- .filter_map(|(m, known)| (is_shallow || !*known).then_some(m));
+ .filter_map(|(m, known)| (is_shallow || !*known).then_some(m))
+ .filter(|m| !mapping_is_ignored(m));
for want in wants {
- // Here we ignore implicit tag mappings if needed.
- if tag_refspec_to_ignore.map_or(false, |tag_spec| {
- want.spec_index
- .implicit_index()
- .and_then(|idx| ref_map.extra_refspecs.get(idx))
- .map_or(false, |spec| spec.to_ref() == tag_spec)
- }) {
- continue;
- }
let id_on_remote = want.remote.as_id();
if !arguments.can_use_ref_in_want() || matches!(want.remote, fetch::Source::ObjectId(_)) {
if let Some(id) = id_on_remote {
@@ -228,13 +256,14 @@ pub(crate) fn add_wants(
fn mark_recent_complete_commits(
queue: &mut Queue,
graph: &mut gix_negotiate::Graph<'_>,
- cutoff: gix_revision::graph::CommitterTimestamp,
+ cutoff: SecondsSinceUnixEpoch,
) -> Result<(), Error> {
+ let _span = gix_trace::detail!("mark_recent_complete", queue_len = queue.len());
while let Some(id) = queue
.peek()
.and_then(|(commit_time, id)| (commit_time >= &cutoff).then_some(*id))
{
- queue.pop();
+ queue.pop_value();
let commit = graph.get(&id).expect("definitely set when adding tips or parents");
for parent_id in commit.parents.clone() {
let mut was_complete = false;
@@ -258,6 +287,7 @@ fn mark_all_refs_in_repo(
queue: &mut Queue,
mark: Flags,
) -> Result<(), Error> {
+ let _span = gix_trace::detail!("mark_all_refs");
for local_ref in repo.references()?.all()?.peeled() {
let local_ref = local_ref?;
let id = local_ref.id().detach();
@@ -280,17 +310,14 @@ fn mark_alternate_complete(
graph: &mut gix_negotiate::Graph<'_>,
queue: &mut Queue,
) -> Result<(), Error> {
- for alternate_repo in repo
- .objects
- .store_ref()
- .alternate_db_paths()?
- .into_iter()
- .filter_map(|path| {
- path.ancestors()
- .nth(1)
- .and_then(|git_dir| crate::open_opts(git_dir, repo.options.clone()).ok())
- })
- {
+ let alternates = repo.objects.store_ref().alternate_db_paths()?;
+ let _span = gix_trace::detail!("mark_alternate_refs", num_odb = alternates.len());
+
+ for alternate_repo in alternates.into_iter().filter_map(|path| {
+ path.ancestors()
+ .nth(1)
+ .and_then(|git_dir| crate::open_opts(git_dir, repo.options.clone()).ok())
+ }) {
mark_all_refs_in_repo(&alternate_repo, graph, queue, Flags::ALTERNATE | Flags::COMPLETE)?;
}
Ok(())
diff --git a/vendor/gix/src/remote/connection/fetch/receive_pack.rs b/vendor/gix/src/remote/connection/fetch/receive_pack.rs
index 7837a9d3a..18e5ac159 100644
--- a/vendor/gix/src/remote/connection/fetch/receive_pack.rs
+++ b/vendor/gix/src/remote/connection/fetch/receive_pack.rs
@@ -19,17 +19,18 @@ use crate::{
connection::fetch::config,
fetch,
fetch::{
- negotiate, negotiate::Algorithm, refs, Error, Outcome, Prepare, ProgressId, RefLogMessage, Shallow, Status,
+ negotiate, negotiate::Algorithm, outcome, refs, Error, Outcome, Prepare, ProgressId, RefLogMessage,
+ Shallow, Status,
},
},
- Progress, Repository,
+ Repository,
};
impl<'remote, 'repo, T> Prepare<'remote, 'repo, T>
where
T: Transport,
{
- /// Receive the pack and perform the operation as configured by git via `gix-config` or overridden by various builder methods.
+ /// Receive the pack and perform the operation as configured by git via `git-config` or overridden by various builder methods.
/// Return `Ok(None)` if there was nothing to do because all remote refs are at the same state as they are locally, or `Ok(Some(outcome))`
/// to inform about all the changes that were made.
///
@@ -72,18 +73,28 @@ where
/// - `gitoxide.userAgent` is read to obtain the application user agent for git servers and for HTTP servers as well.
///
#[gix_protocol::maybe_async::maybe_async]
- pub async fn receive<P>(mut self, mut progress: P, should_interrupt: &AtomicBool) -> Result<Outcome, Error>
+ pub async fn receive<P>(self, mut progress: P, should_interrupt: &AtomicBool) -> Result<Outcome, Error>
where
- P: Progress,
+ P: gix_features::progress::NestedProgress,
P::SubProgress: 'static,
{
+ self.receive_inner(&mut progress, should_interrupt).await
+ }
+
+ #[gix_protocol::maybe_async::maybe_async]
+ #[allow(clippy::drop_non_drop)]
+ pub(crate) async fn receive_inner(
+ mut self,
+ progress: &mut dyn crate::DynNestedProgress,
+ should_interrupt: &AtomicBool,
+ ) -> Result<Outcome, Error> {
+ let _span = gix_trace::coarse!("fetch::Prepare::receive()");
let mut con = self.con.take().expect("receive() can only be called once");
let handshake = &self.ref_map.handshake;
let protocol_version = handshake.server_protocol_version;
let fetch = gix_protocol::Command::Fetch;
- let progress = &mut progress;
let repo = con.remote.repo;
let fetch_features = {
let mut f = fetch.default_features(protocol_version, &handshake.capabilities);
@@ -114,6 +125,7 @@ where
});
}
+ let negotiate_span = gix_trace::detail!("negotiate");
let mut negotiator = repo
.config
.resolved
@@ -131,20 +143,20 @@ where
r.objects.unset_object_cache();
r
};
- let mut graph = graph_repo.commit_graph();
+ let mut graph = graph_repo.revision_graph();
let action = negotiate::mark_complete_and_common_ref(
&graph_repo,
negotiator.deref_mut(),
&mut graph,
&self.ref_map,
&self.shallow,
+ negotiate::make_refmapping_ignore_predicate(con.remote.fetch_tags, &self.ref_map),
)?;
let mut previous_response = None::<gix_protocol::fetch::Response>;
- let mut round = 1;
- let mut write_pack_bundle = match &action {
+ let (mut write_pack_bundle, negotiate) = match &action {
negotiate::Action::NoChange | negotiate::Action::SkipToRefUpdate => {
gix_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
- None
+ (None, None)
}
negotiate::Action::MustNegotiate {
remote_ref_target_known,
@@ -155,17 +167,19 @@ where
&self.ref_map,
remote_ref_target_known,
&self.shallow,
- con.remote.fetch_tags,
+ negotiate::make_refmapping_ignore_predicate(con.remote.fetch_tags, &self.ref_map),
);
+ let mut rounds = Vec::new();
let is_stateless =
arguments.is_stateless(!con.transport.connection_persists_across_multiple_requests());
let mut haves_to_send = gix_negotiate::window_size(is_stateless, None);
let mut seen_ack = false;
let mut in_vain = 0;
let mut common = is_stateless.then(Vec::new);
- let reader = 'negotiation: loop {
+ let mut reader = 'negotiation: loop {
+ let _round = gix_trace::detail!("negotiate round", round = rounds.len() + 1);
progress.step();
- progress.set_name(format!("negotiate (round {round})"));
+ progress.set_name(format!("negotiate (round {})", rounds.len() + 1));
let is_done = match negotiate::one_round(
negotiator.deref_mut(),
@@ -181,8 +195,14 @@ where
}
seen_ack |= ack_seen;
in_vain += haves_sent;
+ rounds.push(outcome::negotiate::Round {
+ haves_sent,
+ in_vain,
+ haves_to_send,
+ previous_response_had_at_least_one_in_common: ack_seen,
+ });
let is_done = haves_sent != haves_to_send || (seen_ack && in_vain >= 256);
- haves_to_send = gix_negotiate::window_size(is_stateless, haves_to_send);
+ haves_to_send = gix_negotiate::window_size(is_stateless, Some(haves_to_send));
is_done
}
Err(err) => {
@@ -200,17 +220,17 @@ where
previous_response = Some(response);
if has_pack {
progress.step();
- progress.set_name("receiving pack");
+ progress.set_name("receiving pack".into());
if !sideband_all {
setup_remote_progress(progress, &mut reader, should_interrupt);
}
break 'negotiation reader;
- } else {
- round += 1;
}
};
- drop(graph);
+ let graph = graph.detach();
drop(graph_repo);
+ drop(negotiate_span);
+
let previous_response = previous_response.expect("knowledge of a pack means a response was received");
if !previous_response.shallow_updates().is_empty() && shallow_lock.is_none() {
let reject_shallow_remote = repo
@@ -234,28 +254,34 @@ where
};
let write_pack_bundle = if matches!(self.dry_run, fetch::DryRun::No) {
- Some(gix_pack::Bundle::write_to_directory(
- #[cfg(feature = "async-network-client")]
- {
- gix_protocol::futures_lite::io::BlockOn::new(reader)
- },
- #[cfg(not(feature = "async-network-client"))]
- {
- reader
- },
- Some(repo.objects.store_ref().path().join("pack")),
+ #[cfg(not(feature = "async-network-client"))]
+ let mut rd = reader;
+ #[cfg(feature = "async-network-client")]
+ let mut rd = gix_protocol::futures_lite::io::BlockOn::new(reader);
+ let res = gix_pack::Bundle::write_to_directory(
+ &mut rd,
+ Some(&repo.objects.store_ref().path().join("pack")),
progress,
should_interrupt,
Some(Box::new({
let repo = repo.clone();
- move |oid, buf| repo.objects.find(oid, buf).ok()
+ move |oid, buf| repo.objects.find(&oid, buf).ok()
})),
options,
- )?)
+ )?;
+ #[cfg(feature = "async-network-client")]
+ {
+ reader = rd.into_inner();
+ }
+ #[cfg(not(feature = "async-network-client"))]
+ {
+ reader = rd;
+ }
+ Some(res)
} else {
- drop(reader);
None
};
+ drop(reader);
if matches!(protocol_version, gix_protocol::transport::Protocol::V2) {
gix_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
@@ -266,7 +292,7 @@ where
crate::shallow::write(shallow_lock, shallow_commits, previous_response.shallow_updates())?;
}
}
- write_pack_bundle
+ (write_pack_bundle, Some(outcome::Negotiate { graph, rounds }))
}
};
@@ -293,21 +319,17 @@ where
let out = Outcome {
ref_map: std::mem::take(&mut self.ref_map),
- status: if matches!(self.dry_run, fetch::DryRun::Yes) {
- assert!(write_pack_bundle.is_none(), "in dry run we never read a bundle");
- Status::DryRun {
+ status: match write_pack_bundle {
+ Some(write_pack_bundle) => Status::Change {
+ write_pack_bundle,
update_refs,
- negotiation_rounds: round,
- }
- } else {
- match write_pack_bundle {
- Some(write_pack_bundle) => Status::Change {
- write_pack_bundle,
- update_refs,
- negotiation_rounds: round,
- },
- None => Status::NoPackReceived { update_refs },
- }
+ negotiate: negotiate.expect("if we have a pack, we always negotiated it"),
+ },
+ None => Status::NoPackReceived {
+ dry_run: matches!(self.dry_run, fetch::DryRun::Yes),
+ negotiate,
+ update_refs,
+ },
},
};
Ok(out)
@@ -348,14 +370,14 @@ fn add_shallow_args(
args.deepen_relative();
}
Shallow::Since { cutoff } => {
- args.deepen_since(cutoff.seconds_since_unix_epoch as usize);
+ args.deepen_since(cutoff.seconds);
}
Shallow::Exclude {
remote_refs,
since_cutoff,
} => {
if let Some(cutoff) = since_cutoff {
- args.deepen_since(cutoff.seconds_since_unix_epoch as usize);
+ args.deepen_since(cutoff.seconds);
}
for ref_ in remote_refs {
args.deepen_not(ref_.as_ref().as_bstr());
@@ -365,17 +387,14 @@ fn add_shallow_args(
Ok((shallow_commits, shallow_lock))
}
-fn setup_remote_progress<P>(
- progress: &mut P,
+fn setup_remote_progress(
+ progress: &mut dyn crate::DynNestedProgress,
reader: &mut Box<dyn gix_protocol::transport::client::ExtendedBufRead + Unpin + '_>,
should_interrupt: &AtomicBool,
-) where
- P: Progress,
- P::SubProgress: 'static,
-{
+) {
use gix_protocol::transport::client::ExtendedBufRead;
reader.set_progress_handler(Some(Box::new({
- let mut remote_progress = progress.add_child_with_id("remote", ProgressId::RemoteProgress.into());
+ let mut remote_progress = progress.add_child_with_id("remote".to_string(), ProgressId::RemoteProgress.into());
// SAFETY: Ugh, so, with current Rust I can't declare lifetimes in the involved traits the way they need to
// be and I also can't use scoped threads to pump from local scopes to an Arc version that could be
// used here due to the this being called from sync AND async code (and the async version doesn't work
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs b/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs
index 953490672..3d6fb18bd 100644
--- a/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs
@@ -11,7 +11,10 @@ use crate::{
ext::ObjectIdExt,
remote::{
fetch,
- fetch::{refs::update::Mode, RefLogMessage, Source},
+ fetch::{
+ refs::update::{Mode, TypeChange},
+ RefLogMessage, Source,
+ },
},
Repository,
};
@@ -23,14 +26,20 @@ pub mod update;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Update {
/// The way the update was performed.
- pub mode: update::Mode,
+ pub mode: Mode,
+ /// If not `None`, the update also affects the type of the reference. This also implies that `edit_index` is not None.
+ pub type_change: Option<TypeChange>,
/// The index to the edit that was created from the corresponding mapping, or `None` if there was no local ref.
pub edit_index: Option<usize>,
}
-impl From<update::Mode> for Update {
+impl From<Mode> for Update {
fn from(mode: Mode) -> Self {
- Update { mode, edit_index: None }
+ Update {
+ mode,
+ type_change: None,
+ edit_index: None,
+ }
}
}
@@ -42,6 +51,14 @@ impl From<update::Mode> for Update {
/// `action` is the prefix used for reflog entries, and is typically "fetch".
///
/// It can be used to produce typical information that one is used to from `git fetch`.
+///
+/// We will reject updates only if…
+///
+/// * …fast-forward rules are violated
+/// * …the local ref is currently checked out
+/// * …existing refs would not become 'unborn', i.e. point to a reference that doesn't exist and won't be created due to ref-specs
+///
+/// With these safeguards in place, one can handle each naturally and implement mirrors or bare repos easily.
#[allow(clippy::too_many_arguments)]
pub(crate) fn update(
repo: &Repository,
@@ -53,8 +70,10 @@ pub(crate) fn update(
dry_run: fetch::DryRun,
write_packed_refs: fetch::WritePackedRefs,
) -> Result<update::Outcome, update::Error> {
+ let _span = gix_trace::detail!("update_refs()", mappings = mappings.len());
let mut edits = Vec::new();
let mut updates = Vec::new();
+ let mut edit_indices_to_validate = Vec::new();
let implicit_tag_refspec = fetch_tags
.to_refspec()
@@ -75,46 +94,56 @@ pub(crate) fn update(
})
},
) {
- let remote_id = match remote.as_id() {
- Some(id) => id,
- None => continue,
- };
- if dry_run == fetch::DryRun::No && !repo.objects.contains(remote_id) {
- let update = if is_implicit_tag {
- update::Mode::ImplicitTagNotSentByRemote.into()
- } else {
- update::Mode::RejectedSourceObjectNotFound { id: remote_id.into() }.into()
- };
- updates.push(update);
- continue;
+ // `None` only if unborn.
+ let remote_id = remote.as_id();
+ if matches!(dry_run, fetch::DryRun::No) && !remote_id.map_or(true, |id| repo.objects.contains(id)) {
+ if let Some(remote_id) = remote_id.filter(|id| !repo.objects.contains(id)) {
+ let update = if is_implicit_tag {
+ Mode::ImplicitTagNotSentByRemote.into()
+ } else {
+ Mode::RejectedSourceObjectNotFound { id: remote_id.into() }.into()
+ };
+ updates.push(update);
+ continue;
+ }
}
- let checked_out_branches = worktree_branches(repo)?;
- let (mode, edit_index) = match local {
+ let mut checked_out_branches = worktree_branches(repo)?;
+ let (mode, edit_index, type_change) = match local {
Some(name) => {
let (mode, reflog_message, name, previous_value) = match repo.try_find_reference(name)? {
Some(existing) => {
- if let Some(wt_dir) = checked_out_branches.get(existing.name()) {
- let mode = update::Mode::RejectedCurrentlyCheckedOut {
- worktree_dir: wt_dir.to_owned(),
+ if let Some(wt_dirs) = checked_out_branches.get_mut(existing.name()) {
+ wt_dirs.sort();
+ wt_dirs.dedup();
+ let mode = Mode::RejectedCurrentlyCheckedOut {
+ worktree_dirs: wt_dirs.to_owned(),
};
updates.push(mode.into());
continue;
}
- match existing.target() {
- TargetRef::Symbolic(_) => {
- updates.push(update::Mode::RejectedSymbolic.into());
- continue;
- }
- TargetRef::Peeled(local_id) => {
- let previous_value =
- PreviousValue::MustExistAndMatch(Target::Peeled(local_id.to_owned()));
+
+ match existing
+ .try_id()
+ .map_or_else(|| existing.clone().peel_to_id_in_place(), Ok)
+ .map(crate::Id::detach)
+ {
+ Ok(local_id) => {
+ let remote_id = match remote_id {
+ Some(id) => id,
+ None => {
+ // we don't allow to go back to unborn state if there is a local reference already present.
+ // Note that we will be changing it to a symbolic reference just fine.
+ updates.push(Mode::RejectedToReplaceWithUnborn.into());
+ continue;
+ }
+ };
let (mode, reflog_message) = if local_id == remote_id {
- (update::Mode::NoChangeNeeded, "no update will be performed")
+ (Mode::NoChangeNeeded, "no update will be performed")
} else if let Some(gix_ref::Category::Tag) = existing.name().category() {
if spec.allow_non_fast_forward() {
- (update::Mode::Forced, "updating tag")
+ (Mode::Forced, "updating tag")
} else {
- updates.push(update::Mode::RejectedTagUpdate.into());
+ updates.push(Mode::RejectedTagUpdate.into());
continue;
}
} else {
@@ -126,21 +155,21 @@ pub(crate) fn update(
.try_into_commit()
.map_err(|_| ())
.and_then(|c| {
- c.committer().map(|a| a.time.seconds_since_unix_epoch).map_err(|_| ())
+ c.committer().map(|a| a.time.seconds).map_err(|_| ())
}).and_then(|local_commit_time|
- remote_id
- .to_owned()
- .ancestors(|id, buf| repo.objects.find_commit_iter(id, buf))
- .sorting(
- gix_traverse::commit::Sorting::ByCommitTimeNewestFirstCutoffOlderThan {
- time_in_seconds_since_epoch: local_commit_time
- },
- )
- .map_err(|_| ())
- );
+ remote_id
+ .to_owned()
+ .ancestors(|id, buf| repo.objects.find_commit_iter(id, buf))
+ .sorting(
+ gix_traverse::commit::Sorting::ByCommitTimeNewestFirstCutoffOlderThan {
+ seconds: local_commit_time
+ },
+ )
+ .map_err(|_| ())
+ );
match ancestors {
Ok(mut ancestors) => {
- ancestors.any(|cid| cid.map_or(false, |cid| cid == local_id))
+ ancestors.any(|cid| cid.map_or(false, |c| c.id == local_id))
}
Err(_) => {
force = true;
@@ -152,20 +181,41 @@ pub(crate) fn update(
};
if is_fast_forward {
(
- update::Mode::FastForward,
+ Mode::FastForward,
matches!(dry_run, fetch::DryRun::Yes)
.then(|| "fast-forward (guessed in dry-run)")
.unwrap_or("fast-forward"),
)
} else if force {
- (update::Mode::Forced, "forced-update")
+ (Mode::Forced, "forced-update")
} else {
- updates.push(update::Mode::RejectedNonFastForward.into());
+ updates.push(Mode::RejectedNonFastForward.into());
continue;
}
};
- (mode, reflog_message, existing.name().to_owned(), previous_value)
+ (
+ mode,
+ reflog_message,
+ existing.name().to_owned(),
+ PreviousValue::MustExistAndMatch(existing.target().into_owned()),
+ )
}
+ Err(crate::reference::peel::Error::ToId(gix_ref::peel::to_id::Error::Follow(_))) => {
+ // An unborn reference, always allow it to be changed to whatever the remote wants.
+ (
+ if existing.target().try_name().map(gix_ref::FullNameRef::as_bstr)
+ == remote.as_target()
+ {
+ Mode::NoChangeNeeded
+ } else {
+ Mode::Forced
+ },
+ "change unborn ref",
+ existing.name().to_owned(),
+ PreviousValue::MustExistAndMatch(existing.target().into_owned()),
+ )
+ }
+ Err(err) => return Err(err.into()),
}
}
None => {
@@ -176,13 +226,37 @@ pub(crate) fn update(
_ => "storing ref",
};
(
- update::Mode::New,
+ Mode::New,
reflog_msg,
name,
- PreviousValue::ExistingMustMatch(Target::Peeled(remote_id.to_owned())),
+ PreviousValue::ExistingMustMatch(new_value_by_remote(repo, remote, mappings)?),
)
}
};
+
+ let new = new_value_by_remote(repo, remote, mappings)?;
+ let type_change = match (&previous_value, &new) {
+ (
+ PreviousValue::ExistingMustMatch(Target::Peeled(_))
+ | PreviousValue::MustExistAndMatch(Target::Peeled(_)),
+ Target::Symbolic(_),
+ ) => Some(TypeChange::DirectToSymbolic),
+ (
+ PreviousValue::ExistingMustMatch(Target::Symbolic(_))
+ | PreviousValue::MustExistAndMatch(Target::Symbolic(_)),
+ Target::Peeled(_),
+ ) => Some(TypeChange::SymbolicToDirect),
+ _ => None,
+ };
+ // We are here because this edit should work and fast-forward rules are respected.
+ // But for setting a symref-target, we have to be sure that the target already exists
+ // or will exists. To be sure all rules are respected, we delay the check to when the
+ // edit-list has been built.
+ let edit_index = edits.len();
+ if matches!(new, Target::Symbolic(_)) {
+ let anticipated_update_index = updates.len();
+ edit_indices_to_validate.push((anticipated_update_index, edit_index));
+ }
let edit = RefEdit {
change: Change::Update {
log: LogChange {
@@ -191,42 +265,57 @@ pub(crate) fn update(
message: message.compose(reflog_message),
},
expected: previous_value,
- new: if let Source::Ref(gix_protocol::handshake::Ref::Symbolic { target, .. }) = &remote {
- match mappings.iter().find_map(|m| {
- m.remote.as_name().and_then(|name| {
- (name == target)
- .then(|| m.local.as_ref().and_then(|local| local.try_into().ok()))
- .flatten()
- })
- }) {
- Some(local_branch) => {
- // This is always safe because…
- // - the reference may exist already
- // - if it doesn't exist it will be created - we are here because it's in the list of mappings after all
- // - if it exists and is updated, and the update is rejected due to non-fastforward for instance, the
- // target reference still exists and we can point to it.
- Target::Symbolic(local_branch)
- }
- None => Target::Peeled(remote_id.into()),
- }
- } else {
- Target::Peeled(remote_id.into())
- },
+ new,
},
name,
+ // We must not deref symrefs or we will overwrite their destination, which might be checked out
+ // and we don't check for that case.
deref: false,
};
- let edit_index = edits.len();
edits.push(edit);
- (mode, Some(edit_index))
+ (mode, Some(edit_index), type_change)
}
- None => (update::Mode::NoChangeNeeded, None),
+ None => (Mode::NoChangeNeeded, None, None),
};
- updates.push(Update { mode, edit_index })
+ updates.push(Update {
+ mode,
+ type_change,
+ edit_index,
+ })
+ }
+
+ for (update_index, edit_index) in edit_indices_to_validate {
+ let edit = &edits[edit_index];
+ if update_needs_adjustment_as_edits_symbolic_target_is_missing(edit, repo, &edits) {
+ let edit = &mut edits[edit_index];
+ let update = &mut updates[update_index];
+
+ update.mode = Mode::RejectedToReplaceWithUnborn;
+ update.type_change = None;
+
+ match edit.change {
+ Change::Update {
+ ref expected,
+ ref mut new,
+ ref mut log,
+ ..
+ } => match expected {
+ PreviousValue::MustExistAndMatch(existing) => {
+ *new = existing.clone();
+ log.message = "no-op".into();
+ }
+ _ => unreachable!("at this point it can only be one variant"),
+ },
+ Change::Delete { .. } => {
+ unreachable!("we don't do that here")
+ }
+ };
+ }
}
let edits = match dry_run {
fetch::DryRun::No => {
+ let _span = gix_trace::detail!("apply", edits = edits.len());
let (file_lock_fail, packed_refs_lock_fail) = repo
.config
.lock_timeout()
@@ -238,9 +327,8 @@ pub(crate) fn update(
fetch::WritePackedRefs::Only => {
gix_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box::new(|oid, buf| {
repo.objects
- .try_find(oid, buf)
+ .try_find(&oid, buf)
.map(|obj| obj.map(|obj| obj.kind))
- .map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
}))},
fetch::WritePackedRefs::Never => gix_ref::file::transaction::PackedRefs::DeletionsOnly
}
@@ -256,16 +344,128 @@ pub(crate) fn update(
Ok(update::Outcome { edits, updates })
}
-fn worktree_branches(repo: &Repository) -> Result<BTreeMap<gix_ref::FullName, PathBuf>, update::Error> {
- let mut map = BTreeMap::new();
- if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
- map.insert(head_ref.inner.name, wt_dir.to_owned());
+/// Figure out if target of `edit` points to a reference that doesn't exist in `repo` and won't exist as it's not in any of `edits`.
+/// If so, return true.
+fn update_needs_adjustment_as_edits_symbolic_target_is_missing(
+ edit: &RefEdit,
+ repo: &Repository,
+ edits: &[RefEdit],
+) -> bool {
+ match edit.change.new_value().expect("here we need a symlink") {
+ TargetRef::Peeled(_) => unreachable!("BUG: we already know it's symbolic"),
+ TargetRef::Symbolic(new_target_ref) => {
+ match &edit.change {
+ Change::Update { expected, .. } => match expected {
+ PreviousValue::MustExistAndMatch(current_target) => {
+ if let Target::Symbolic(current_target_name) = current_target {
+ if current_target_name.as_ref() == new_target_ref {
+ return false; // no-op are always fine
+ }
+ let current_is_unborn = repo.refs.try_find(current_target_name).ok().flatten().is_none();
+ if current_is_unborn {
+ return false;
+ }
+ }
+ }
+ PreviousValue::ExistingMustMatch(_) => return false, // this means the ref doesn't exist locally, so we can create unborn refs anyway
+ _ => {
+ unreachable!("BUG: we don't do that here")
+ }
+ },
+ Change::Delete { .. } => {
+ unreachable!("we don't ever delete here")
+ }
+ };
+ let target_ref_exists_locally = repo.refs.try_find(new_target_ref).ok().flatten().is_some();
+ if target_ref_exists_locally {
+ return false;
+ }
+
+ let target_ref_will_be_created = edits.iter().any(|edit| edit.name.as_ref() == new_target_ref);
+ !target_ref_will_be_created
+ }
}
+}
+
+fn new_value_by_remote(
+ repo: &Repository,
+ remote: &Source,
+ mappings: &[fetch::Mapping],
+) -> Result<Target, update::Error> {
+ let remote_id = remote.as_id();
+ Ok(
+ if let Source::Ref(
+ gix_protocol::handshake::Ref::Symbolic { target, .. } | gix_protocol::handshake::Ref::Unborn { target, .. },
+ ) = &remote
+ {
+ match mappings.iter().find_map(|m| {
+ m.remote.as_name().and_then(|name| {
+ (name == target)
+ .then(|| m.local.as_ref().and_then(|local| local.try_into().ok()))
+ .flatten()
+ })
+ }) {
+ // Map the target on the remote to the local branch name, which should be covered by refspecs.
+ Some(local_branch) => {
+ // This is always safe because…
+ // - the reference may exist already
+ // - if it doesn't exist it will be created - we are here because it's in the list of mappings after all
+ // - if it exists and is updated, and the update is rejected due to non-fastforward for instance, the
+ // target reference still exists and we can point to it.
+ Target::Symbolic(local_branch)
+ }
+ None => {
+ // If we can't map it, it's usually a an unborn branch causing this, or a the target isn't covered
+ // by any refspec so we don't officially pull it in.
+ match remote_id {
+ Some(desired_id) => {
+ if repo.try_find_reference(target)?.is_some() {
+ // We are allowed to change a direct reference to a symbolic one, which may point to other objects
+ // than the remote. The idea is that we are fine as long as the resulting refs are valid.
+ Target::Symbolic(target.try_into()?)
+ } else {
+ // born branches that we don't have in our refspecs we create peeled. That way they can be used.
+ Target::Peeled(desired_id.to_owned())
+ }
+ }
+ // Unborn branches we create as such, with the location they point to on the remote which helps mirroring.
+ None => Target::Symbolic(target.try_into()?),
+ }
+ }
+ }
+ } else {
+ Target::Peeled(remote_id.expect("unborn case handled earlier").to_owned())
+ },
+ )
+}
+
+fn insert_head(
+ head: Option<crate::Head<'_>>,
+ out: &mut BTreeMap<gix_ref::FullName, Vec<PathBuf>>,
+) -> Result<(), update::Error> {
+ if let Some((head, wd)) = head.and_then(|head| head.repo.work_dir().map(|wd| (head, wd))) {
+ out.entry("HEAD".try_into().expect("valid"))
+ .or_default()
+ .push(wd.to_owned());
+ let mut ref_chain = Vec::new();
+ let mut cursor = head.try_into_referent();
+ while let Some(ref_) = cursor {
+ ref_chain.push(ref_.name().to_owned());
+ cursor = ref_.follow().transpose()?;
+ }
+ for name in ref_chain {
+ out.entry(name).or_default().push(wd.to_owned());
+ }
+ }
+ Ok(())
+}
+
+fn worktree_branches(repo: &Repository) -> Result<BTreeMap<gix_ref::FullName, Vec<PathBuf>>, update::Error> {
+ let mut map = BTreeMap::new();
+ insert_head(repo.head().ok(), &mut map)?;
for proxy in repo.worktrees()? {
let repo = proxy.into_repo_with_possibly_inaccessible_worktree()?;
- if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
- map.insert(head_ref.inner.name, wt_dir.to_owned());
- }
+ insert_head(repo.head().ok(), &mut map)?;
}
Ok(map)
}
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs b/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs
index 47ab5d1a5..0b29f14f4 100644
--- a/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs
@@ -31,6 +31,10 @@ mod update {
gix_testtools::scripted_fixture_read_only_with_args("make_fetch_repos.sh", [base_repo_path()]).unwrap();
gix::open_opts(dir.join(name), restricted()).unwrap()
}
+ fn named_repo(name: &str) -> gix::Repository {
+ let dir = gix_testtools::scripted_fixture_read_only("make_remote_repos.sh").unwrap();
+ gix::open_opts(dir.join(name), restricted()).unwrap()
+ }
fn repo_rw(name: &str) -> (gix::Repository, gix_testtools::tempfile::TempDir) {
let dir = gix_testtools::scripted_fixture_writable_with_args(
"make_fetch_repos.sh",
@@ -41,13 +45,19 @@ mod update {
let repo = gix::open_opts(dir.path().join(name), restricted()).unwrap();
(repo, dir)
}
- use gix_ref::{transaction::Change, TargetRef};
+ use gix_ref::{
+ transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog},
+ Target, TargetRef,
+ };
use crate::{
bstr::BString,
remote::{
fetch,
- fetch::{refs::tests::restricted, Mapping, RefLogMessage, Source, SpecIndex},
+ fetch::{
+ refs::{tests::restricted, update::TypeChange},
+ Mapping, RefLogMessage, Source, SpecIndex,
+ },
},
};
@@ -112,7 +122,7 @@ mod update {
(
"+refs/remotes/origin/g:refs/heads/main",
fetch::refs::update::Mode::RejectedCurrentlyCheckedOut {
- worktree_dir: repo.work_dir().expect("present").to_owned(),
+ worktree_dirs: vec![repo.work_dir().expect("present").to_owned()],
},
None,
"checked out branches cannot be written, as it requires a merge of sorts which isn't done here",
@@ -148,6 +158,7 @@ mod update {
assert_eq!(
out.updates,
vec![fetch::refs::Update {
+ type_change: None,
mode: expected_mode.clone(),
edit_index: reflog_message.map(|_| 0),
}],
@@ -180,7 +191,7 @@ mod update {
#[test]
fn checked_out_branches_in_worktrees_are_rejected_with_additional_information() -> Result {
- let root = gix_path::realpath(gix_testtools::scripted_fixture_read_only_with_args(
+ let root = gix_path::realpath(&gix_testtools::scripted_fixture_read_only_with_args(
"make_fetch_repos.sh",
[base_repo_path()],
)?)?;
@@ -211,8 +222,9 @@ mod update {
out.updates,
vec![fetch::refs::Update {
mode: fetch::refs::update::Mode::RejectedCurrentlyCheckedOut {
- worktree_dir: root.join(path_from_root),
+ worktree_dirs: vec![root.join(path_from_root)],
},
+ type_change: None,
edit_index: None,
}],
"{spec}: checked-out checks are done before checking if a change would actually be required (here it isn't)"
@@ -223,10 +235,350 @@ mod update {
}
#[test]
- fn local_symbolic_refs_are_never_written() {
+ fn unborn_remote_branches_can_be_created_locally_if_they_are_new() -> Result {
+ let repo = named_repo("unborn");
+ let (mappings, specs) = mapping_from_spec("HEAD:refs/remotes/origin/HEAD", &repo);
+ assert_eq!(mappings.len(), 1);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )?;
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ type_change: None,
+ edit_index: Some(0)
+ }]
+ );
+ assert_eq!(out.edits.len(), 1, "we are OK with creating unborn refs");
+ Ok(())
+ }
+
+ #[test]
+ fn unborn_remote_branches_can_update_local_unborn_branches() -> Result {
+ let repo = named_repo("unborn");
+ let (mappings, specs) = mapping_from_spec("HEAD:refs/heads/existing-unborn-symbolic", &repo);
+ assert_eq!(mappings.len(), 1);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )?;
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::NoChangeNeeded,
+ type_change: None,
+ edit_index: Some(0)
+ }]
+ );
+ assert_eq!(out.edits.len(), 1, "we are OK with updating unborn refs");
+ assert_eq!(
+ out.edits[0],
+ RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: "action: change unborn ref".into(),
+ },
+ expected: PreviousValue::MustExistAndMatch(Target::Symbolic(
+ "refs/heads/main".try_into().expect("valid"),
+ )),
+ new: Target::Symbolic("refs/heads/main".try_into().expect("valid")),
+ },
+ name: "refs/heads/existing-unborn-symbolic".try_into().expect("valid"),
+ deref: false,
+ }
+ );
+
+ let (mappings, specs) = mapping_from_spec("HEAD:refs/heads/existing-unborn-symbolic-other", &repo);
+ assert_eq!(mappings.len(), 1);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )?;
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::Forced,
+ type_change: None,
+ edit_index: Some(0)
+ }]
+ );
+ assert_eq!(
+ out.edits.len(),
+ 1,
+ "we are OK with creating unborn refs even without actually forcing it"
+ );
+ assert_eq!(
+ out.edits[0],
+ RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: "action: change unborn ref".into(),
+ },
+ expected: PreviousValue::MustExistAndMatch(Target::Symbolic(
+ "refs/heads/other".try_into().expect("valid"),
+ )),
+ new: Target::Symbolic("refs/heads/main".try_into().expect("valid")),
+ },
+ name: "refs/heads/existing-unborn-symbolic-other".try_into().expect("valid"),
+ deref: false,
+ }
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn remote_symbolic_refs_with_locally_unavailable_target_result_in_valid_peeled_branches() -> Result {
+ let remote_repo = named_repo("one-commit-with-symref");
+ let local_repo = named_repo("unborn");
+ let (mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/heads/new", &remote_repo);
+ assert_eq!(mappings.len(), 1);
+
+ let out = fetch::refs::update(
+ &local_repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )?;
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ type_change: None,
+ edit_index: Some(0)
+ }]
+ );
+ assert_eq!(out.edits.len(), 1);
+ let target = Target::Peeled(hex_to_id("66f16e4e8baf5c77bb6d0484495bebea80e916ce"));
+ assert_eq!(
+ out.edits[0],
+ RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: "action: storing head".into(),
+ },
+ expected: PreviousValue::ExistingMustMatch(target.clone()),
+ new: target,
+ },
+ name: "refs/heads/new".try_into().expect("valid"),
+ deref: false,
+ },
+ "we create local-refs whose targets aren't present yet, even though the remote knows them.\
+ This leaves the caller with assuring all refs are mentioned in mappings."
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn remote_symbolic_refs_with_locally_unavailable_target_dont_overwrite_valid_local_branches() -> Result {
+ let remote_repo = named_repo("one-commit-with-symref");
+ let local_repo = named_repo("one-commit-with-symref-missing-branch");
+ let (mappings, specs) = mapping_from_spec("refs/heads/unborn:refs/heads/valid-locally", &remote_repo);
+ assert_eq!(mappings.len(), 1);
+
+ let out = fetch::refs::update(
+ &local_repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )?;
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedToReplaceWithUnborn,
+ type_change: None,
+ edit_index: None
+ }]
+ );
+ assert_eq!(out.edits.len(), 0);
+ Ok(())
+ }
+
+ #[test]
+ fn unborn_remote_refs_dont_overwrite_valid_local_refs() -> Result {
+ let remote_repo = named_repo("unborn");
+ let local_repo = named_repo("one-commit-with-symref");
+ let (mappings, specs) =
+ mapping_from_spec("refs/heads/existing-unborn-symbolic:refs/heads/branch", &remote_repo);
+ assert_eq!(mappings.len(), 1);
+
+ let out = fetch::refs::update(
+ &local_repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )?;
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedToReplaceWithUnborn,
+ type_change: None,
+ edit_index: None
+ }],
+ "we don't overwrite locally present refs with unborn ones for safety"
+ );
+ assert_eq!(out.edits.len(), 0);
+ Ok(())
+ }
+
+ #[test]
+ fn local_symbolic_refs_can_be_overwritten() {
let repo = repo("two-origins");
- for source in ["refs/heads/main", "refs/heads/symbolic", "HEAD"] {
- let (mappings, specs) = mapping_from_spec(&format!("{source}:refs/heads/symbolic"), &repo);
+ for (source, destination, expected_update, expected_edit) in [
+ (
+ // attempt to overwrite HEAD isn't possible as the matching engine will normalize the path. That way, `HEAD`
+ // can never be set. This is by design (of git) and we follow it.
+ "refs/heads/symbolic",
+ "HEAD",
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ type_change: None,
+ edit_index: Some(0),
+ },
+ Some(RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: "action: storing head".into(),
+ },
+ expected: PreviousValue::ExistingMustMatch(Target::Symbolic(
+ "refs/heads/main".try_into().expect("valid"),
+ )),
+ new: Target::Symbolic("refs/heads/main".try_into().expect("valid")),
+ },
+ name: "refs/heads/HEAD".try_into().expect("valid"),
+ deref: false,
+ }),
+ ),
+ (
+ // attempt to overwrite checked out branch fails
+ "refs/remotes/origin/b", // strange, but the remote-refs are simulated and based on local refs
+ "refs/heads/main",
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedCurrentlyCheckedOut {
+ worktree_dirs: vec![repo.work_dir().expect("present").to_owned()],
+ },
+ type_change: None,
+ edit_index: None,
+ },
+ None,
+ ),
+ (
+ // symbolic becomes direct
+ "refs/heads/main",
+ "refs/heads/symbolic",
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::NoChangeNeeded,
+ type_change: Some(TypeChange::SymbolicToDirect),
+ edit_index: Some(0),
+ },
+ Some(RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: "action: no update will be performed".into(),
+ },
+ expected: PreviousValue::MustExistAndMatch(Target::Symbolic(
+ "refs/heads/main".try_into().expect("valid"),
+ )),
+ new: Target::Peeled(hex_to_id("f99771fe6a1b535783af3163eba95a927aae21d5")),
+ },
+ name: "refs/heads/symbolic".try_into().expect("valid"),
+ deref: false,
+ }),
+ ),
+ (
+ // direct becomes symbolic
+ "refs/heads/symbolic",
+ "refs/remotes/origin/a",
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::NoChangeNeeded,
+ type_change: Some(TypeChange::DirectToSymbolic),
+ edit_index: Some(0),
+ },
+ Some(RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: "action: no update will be performed".into(),
+ },
+ expected: PreviousValue::MustExistAndMatch(Target::Peeled(hex_to_id(
+ "f99771fe6a1b535783af3163eba95a927aae21d5",
+ ))),
+ new: Target::Symbolic("refs/heads/main".try_into().expect("valid")),
+ },
+ name: "refs/remotes/origin/a".try_into().expect("valid"),
+ deref: false,
+ }),
+ ),
+ (
+ // symbolic to symbolic (same)
+ "refs/heads/symbolic",
+ "refs/heads/symbolic",
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::NoChangeNeeded,
+ type_change: None,
+ edit_index: Some(0),
+ },
+ Some(RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: "action: no update will be performed".into(),
+ },
+ expected: PreviousValue::MustExistAndMatch(Target::Symbolic(
+ "refs/heads/main".try_into().expect("valid"),
+ )),
+ new: Target::Symbolic("refs/heads/main".try_into().expect("valid")),
+ },
+ name: "refs/heads/symbolic".try_into().expect("valid"),
+ deref: false,
+ }),
+ ),
+ ] {
+ let (mappings, specs) = mapping_from_spec(&format!("{source}:{destination}"), &repo);
+ assert_eq!(mappings.len(), 1);
let out = fetch::refs::update(
&repo,
prefixed("action"),
@@ -239,15 +591,11 @@ mod update {
)
.unwrap();
- assert_eq!(out.edits.len(), 0);
- assert_eq!(
- out.updates,
- vec![fetch::refs::Update {
- mode: fetch::refs::update::Mode::RejectedSymbolic,
- edit_index: None
- }],
- "we don't overwrite these as the checked-out check needs to consider much more than it currently does, we are playing it safe"
- );
+ assert_eq!(out.edits.len(), usize::from(expected_edit.is_some()));
+ assert_eq!(out.updates, vec![expected_update]);
+ if let Some(expected) = expected_edit {
+ assert_eq!(out.edits, vec![expected]);
+ }
}
}
@@ -275,17 +623,19 @@ mod update {
)
.unwrap();
- assert_eq!(out.edits.len(), 1);
+ assert_eq!(out.edits.len(), 2, "symbolic refs are handled just like any other ref");
assert_eq!(
out.updates,
vec![
fetch::refs::Update {
mode: fetch::refs::update::Mode::New,
+ type_change: None,
edit_index: Some(0)
},
fetch::refs::Update {
- mode: fetch::refs::update::Mode::RejectedSymbolic,
- edit_index: None
+ mode: fetch::refs::update::Mode::NoChangeNeeded,
+ type_change: Some(TypeChange::SymbolicToDirect),
+ edit_index: Some(1)
}
],
);
@@ -303,7 +653,7 @@ mod update {
}
#[test]
- fn local_direct_refs_are_never_written_with_symbolic_ones_but_see_only_the_destination() {
+ fn local_direct_refs_are_written_with_symbolic_ones() {
let repo = repo("two-origins");
let (mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/heads/not-currently-checked-out", &repo);
let out = fetch::refs::update(
@@ -323,6 +673,7 @@ mod update {
out.updates,
vec![fetch::refs::Update {
mode: fetch::refs::update::Mode::NoChangeNeeded,
+ type_change: Some(fetch::refs::update::TypeChange::DirectToSymbolic),
edit_index: Some(0)
}],
);
@@ -349,6 +700,7 @@ mod update {
out.updates,
vec![fetch::refs::Update {
mode: fetch::refs::update::Mode::New,
+ type_change: None,
edit_index: Some(0),
}],
);
@@ -399,10 +751,12 @@ mod update {
vec![
fetch::refs::Update {
mode: fetch::refs::update::Mode::New,
+ type_change: None,
edit_index: Some(0),
},
fetch::refs::Update {
mode: fetch::refs::update::Mode::NoChangeNeeded,
+ type_change: None,
edit_index: Some(1),
}
],
@@ -446,6 +800,7 @@ mod update {
out.updates,
vec![fetch::refs::Update {
mode: fetch::refs::update::Mode::FastForward,
+ type_change: None,
edit_index: Some(0),
}],
"The caller has to be aware and note that dry-runs can't know about fast-forwards as they don't have remote objects"
@@ -480,6 +835,7 @@ mod update {
out.updates,
vec![fetch::refs::Update {
mode: fetch::refs::update::Mode::RejectedNonFastForward,
+ type_change: None,
edit_index: None,
}]
);
@@ -502,6 +858,7 @@ mod update {
out.updates,
vec![fetch::refs::Update {
mode: fetch::refs::update::Mode::FastForward,
+ type_change: None,
edit_index: Some(0),
}]
);
@@ -535,6 +892,7 @@ mod update {
out.updates,
vec![fetch::refs::Update {
mode: fetch::refs::update::Mode::FastForward,
+ type_change: None,
edit_index: Some(0),
}]
);
@@ -548,12 +906,15 @@ mod update {
}
}
- fn mapping_from_spec(spec: &str, repo: &gix::Repository) -> (Vec<fetch::Mapping>, Vec<gix::refspec::RefSpec>) {
+ fn mapping_from_spec(
+ spec: &str,
+ remote_repo: &gix::Repository,
+ ) -> (Vec<fetch::Mapping>, Vec<gix::refspec::RefSpec>) {
let spec = gix_refspec::parse(spec.into(), gix_refspec::parse::Operation::Fetch).unwrap();
let group = gix_refspec::MatchGroup::from_fetch_specs(Some(spec));
- let references = repo.references().unwrap();
+ let references = remote_repo.references().unwrap();
let mut references: Vec<_> = references.all().unwrap().map(|r| into_remote_ref(r.unwrap())).collect();
- references.push(into_remote_ref(repo.find_reference("HEAD").unwrap()));
+ references.push(into_remote_ref(remote_repo.find_reference("HEAD").unwrap()));
let mappings = group
.match_remotes(references.iter().map(remote_ref_to_item))
.mappings
@@ -566,7 +927,7 @@ mod update {
},
|idx| fetch::Source::Ref(references[idx].clone()),
),
- local: m.rhs.map(|r| r.into_owned()),
+ local: m.rhs.map(std::borrow::Cow::into_owned),
spec_index: SpecIndex::ExplicitInRemote(m.spec_index),
})
.collect();
@@ -582,11 +943,14 @@ mod update {
},
TargetRef::Symbolic(name) => {
let target = name.as_bstr().into();
- let id = r.peel_to_id_in_place().unwrap();
- gix_protocol::handshake::Ref::Symbolic {
- full_ref_name,
- target,
- object: id.detach(),
+ match r.peel_to_id_in_place() {
+ Ok(id) => gix_protocol::handshake::Ref::Symbolic {
+ full_ref_name,
+ target,
+ tag: None,
+ object: id.detach(),
+ },
+ Err(_) => gix_protocol::handshake::Ref::Unborn { full_ref_name, target },
}
}
}
@@ -594,9 +958,10 @@ mod update {
fn remote_ref_to_item(r: &gix_protocol::handshake::Ref) -> gix_refspec::match_group::Item<'_> {
let (full_ref_name, target, object) = r.unpack();
+ static NULL: gix_hash::ObjectId = gix_hash::Kind::Sha1.null();
gix_refspec::match_group::Item {
full_ref_name,
- target: target.expect("no unborn HEAD"),
+ target: target.unwrap_or(NULL.as_ref()),
object,
}
}
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/update.rs b/vendor/gix/src/remote/connection/fetch/update_refs/update.rs
index 6eda1ffc0..41ed3753d 100644
--- a/vendor/gix/src/remote/connection/fetch/update_refs/update.rs
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/update.rs
@@ -10,7 +10,7 @@ mod error {
#[error(transparent)]
FindReference(#[from] crate::reference::find::Error),
#[error("A remote reference had a name that wasn't considered valid. Corrupt remote repo or insufficient checks on remote?")]
- InvalidRefName(#[from] gix_validate::refname::Error),
+ InvalidRefName(#[from] gix_validate::reference::name::Error),
#[error("Failed to update references to their new position to match their remote locations")]
EditReferences(#[from] crate::reference::edit::Error),
#[error("Failed to read or iterate worktree dir")]
@@ -19,6 +19,10 @@ mod error {
OpenWorktreeRepo(#[from] crate::open::Error),
#[error("Could not find local commit for fast-forward ancestor check")]
FindCommit(#[from] crate::object::find::existing::Error),
+ #[error("Could not peel symbolic local reference to its ID")]
+ PeelToId(#[from] crate::reference::peel::Error),
+ #[error("Failed to follow a symbolic reference to assure worktree isn't affected")]
+ FollowSymref(#[from] gix_ref::file::find::existing::Error),
}
}
@@ -35,11 +39,14 @@ pub struct Outcome {
pub updates: Vec<super::Update>,
}
-/// Describe the way a ref was updated
+/// Describe the way a ref was updated, with particular focus on how the (peeled) target commit was affected.
+///
+/// Note that for all the variants that signal a change or `NoChangeNeeded` it's additionally possible to change the target type
+/// from symbolic to direct, or the other way around.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Mode {
/// No change was attempted as the remote ref didn't change compared to the current ref, or because no remote ref was specified
- /// in the ref-spec.
+ /// in the ref-spec. Note that the expected value is still asserted to uncover potential race conditions with other processes.
NoChangeNeeded,
/// The old ref's commit was an ancestor of the new one, allowing for a fast-forward without a merge.
FastForward,
@@ -62,14 +69,19 @@ pub enum Mode {
RejectedTagUpdate,
/// The reference update would not have been a fast-forward, and force is not specified in the ref-spec.
RejectedNonFastForward,
- /// The update of a local symbolic reference was rejected.
- RejectedSymbolic,
+ /// The remote has an unborn symbolic reference where we have one that is set. This means the remote
+ /// has reset itself to a newly initialized state or a state that is highly unusual.
+ /// It may also mean that the remote knows the target name, but it's not available locally and not included in the ref-mappings
+ /// to be created, so we would effectively change a valid local ref into one that seems unborn, which is rejected.
+ /// Note that this mode may have an associated ref-edit that is a no-op, or current-state assertion, for logistical reasons only
+ /// and having no edit would be preferred.
+ RejectedToReplaceWithUnborn,
/// The update was rejected because the branch is checked out in the given worktree_dir.
///
/// Note that the check applies to any known worktree, whether it's present on disk or not.
RejectedCurrentlyCheckedOut {
- /// The path to the worktree directory where the branch is checked out.
- worktree_dir: PathBuf,
+ /// The path(s) to the worktree directory where the branch is checked out.
+ worktree_dirs: Vec<PathBuf>,
},
}
@@ -84,12 +96,16 @@ impl std::fmt::Display for Mode {
Mode::RejectedSourceObjectNotFound { id } => return write!(f, "rejected ({id} not found)"),
Mode::RejectedTagUpdate => "rejected (would overwrite existing tag)",
Mode::RejectedNonFastForward => "rejected (non-fast-forward)",
- Mode::RejectedSymbolic => "rejected (refusing to write symbolic refs)",
- Mode::RejectedCurrentlyCheckedOut { worktree_dir } => {
+ Mode::RejectedToReplaceWithUnborn => "rejected (refusing to overwrite existing with unborn ref)",
+ Mode::RejectedCurrentlyCheckedOut { worktree_dirs } => {
return write!(
f,
"rejected (cannot write into checked-out branch at \"{}\")",
- worktree_dir.display()
+ worktree_dirs
+ .iter()
+ .filter_map(|d| d.to_str())
+ .collect::<Vec<_>>()
+ .join(", ")
)
}
}
@@ -97,6 +113,15 @@ impl std::fmt::Display for Mode {
}
}
+/// Indicates that a ref changes its type.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
+pub enum TypeChange {
+ /// A local direct reference is changed into a symbolic one.
+ DirectToSymbolic,
+ /// A local symbolic reference is changed into a direct one.
+ SymbolicToDirect,
+}
+
impl Outcome {
/// Produce an iterator over all information used to produce the this outcome, ref-update by ref-update, using the `mappings`
/// used when producing the ref update.
diff --git a/vendor/gix/src/remote/connection/ref_map.rs b/vendor/gix/src/remote/connection/ref_map.rs
index 95ddb6214..f1b40d56e 100644
--- a/vendor/gix/src/remote/connection/ref_map.rs
+++ b/vendor/gix/src/remote/connection/ref_map.rs
@@ -133,7 +133,7 @@ where
)
.await?;
let num_explicit_specs = self.remote.fetch_specs.len();
- let group = gix_refspec::MatchGroup::from_fetch_specs(specs.iter().map(|s| s.to_ref()));
+ let group = gix_refspec::MatchGroup::from_fetch_specs(specs.iter().map(gix_refspec::RefSpec::to_ref));
let (res, fixes) = group
.match_remotes(remote.refs.iter().map(|r| {
let (full_ref_name, target, object) = r.unpack();
@@ -157,7 +157,7 @@ where
},
|idx| fetch::Source::Ref(remote.refs[idx].clone()),
),
- local: m.rhs.map(|c| c.into_owned()),
+ local: m.rhs.map(std::borrow::Cow::into_owned),
spec_index: if m.spec_index < num_explicit_specs {
SpecIndex::ExplicitInRemote(m.spec_index)
} else {
@@ -204,7 +204,7 @@ where
self.transport_options = self
.remote
.repo
- .transport_options(url.as_ref(), self.remote.name().map(|n| n.as_bstr()))
+ .transport_options(url.as_ref(), self.remote.name().map(crate::remote::Name::as_bstr))
.map_err(|err| Error::GatherTransportConfig {
source: err,
url: url.into_owned(),
diff --git a/vendor/gix/src/remote/errors.rs b/vendor/gix/src/remote/errors.rs
index 20060cedf..34ed8246b 100644
--- a/vendor/gix/src/remote/errors.rs
+++ b/vendor/gix/src/remote/errors.rs
@@ -2,7 +2,7 @@
pub mod find {
use crate::{bstr::BString, config, remote};
- /// The error returned by [`Repository::find_remote(…)`][crate::Repository::find_remote()].
+ /// The error returned by [`Repository::find_remote(…)`](crate::Repository::find_remote()).
#[derive(Debug, thiserror::Error)]
#[allow(missing_docs)]
pub enum Error {
@@ -30,7 +30,7 @@ pub mod find {
pub mod existing {
use crate::bstr::BString;
- /// The error returned by [`Repository::find_remote(…)`][crate::Repository::find_remote()].
+ /// The error returned by [`Repository::find_remote(…)`](crate::Repository::find_remote()).
#[derive(Debug, thiserror::Error)]
#[allow(missing_docs)]
pub enum Error {
@@ -42,4 +42,23 @@ pub mod find {
NotFound { name: BString },
}
}
+
+ ///
+ pub mod for_fetch {
+ /// The error returned by [`Repository::find_fetch_remote(…)`](crate::Repository::find_fetch_remote()).
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ FindExisting(#[from] super::existing::Error),
+ #[error(transparent)]
+ FindExistingReferences(#[from] crate::reference::find::existing::Error),
+ #[error("Could not initialize a URL remote")]
+ Init(#[from] crate::remote::init::Error),
+ #[error("remote name could not be parsed as URL")]
+ UrlParse(#[from] gix_url::parse::Error),
+ #[error("No configured remote could be found, or too many were available")]
+ ExactlyOneRemoteNotAvailable,
+ }
+ }
}
diff --git a/vendor/gix/src/remote/fetch.rs b/vendor/gix/src/remote/fetch.rs
index 0947ace3f..4700201de 100644
--- a/vendor/gix/src/remote/fetch.rs
+++ b/vendor/gix/src/remote/fetch.rs
@@ -1,17 +1,20 @@
///
pub mod negotiate {
+ #[cfg(feature = "credentials")]
pub use gix_negotiate::Algorithm;
#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
pub use super::super::connection::fetch::negotiate::Error;
#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
pub(crate) use super::super::connection::fetch::negotiate::{
- add_wants, mark_complete_and_common_ref, one_round, Action,
+ add_wants, make_refmapping_ignore_predicate, mark_complete_and_common_ref, one_round, Action,
};
}
#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
-pub use super::connection::fetch::{prepare, refs, Error, Outcome, Prepare, ProgressId, RefLogMessage, Status};
+pub use super::connection::fetch::{
+ outcome, prepare, refs, Error, Outcome, Prepare, ProgressId, RefLogMessage, Status,
+};
/// If `Yes`, don't really make changes but do as much as possible to get an idea of what would be done.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
@@ -152,6 +155,18 @@ impl Source {
}
}
+ /// Return the target that this symbolic ref is pointing to, or `None` if it is no symbolic ref.
+ pub fn as_target(&self) -> Option<&crate::bstr::BStr> {
+ match self {
+ Source::ObjectId(_) => None,
+ Source::Ref(r) => match r {
+ gix_protocol::handshake::Ref::Peeled { .. } | gix_protocol::handshake::Ref::Direct { .. } => None,
+ gix_protocol::handshake::Ref::Symbolic { target, .. }
+ | gix_protocol::handshake::Ref::Unborn { target, .. } => Some(target.as_ref()),
+ },
+ }
+ }
+
/// Returns the peeled id of this instance, that is the object that can't be de-referenced anymore.
pub fn peeled_id(&self) -> Option<&gix_hash::oid> {
match self {
diff --git a/vendor/gix/src/remote/init.rs b/vendor/gix/src/remote/init.rs
index bba116946..13b747eda 100644
--- a/vendor/gix/src/remote/init.rs
+++ b/vendor/gix/src/remote/init.rs
@@ -67,7 +67,18 @@ impl<'repo> Remote<'repo> {
Url: TryInto<gix_url::Url, Error = E>,
gix_url::parse::Error: From<E>,
{
- let url = url.try_into().map_err(|err| Error::Url(err.into()))?;
+ Self::from_fetch_url_inner(
+ url.try_into().map_err(|err| Error::Url(err.into()))?,
+ should_rewrite_urls,
+ repo,
+ )
+ }
+
+ fn from_fetch_url_inner(
+ url: gix_url::Url,
+ should_rewrite_urls: bool,
+ repo: &'repo Repository,
+ ) -> Result<Self, Error> {
let (url_alias, _) = should_rewrite_urls
.then(|| rewrite_urls(&repo.config, Some(&url), None))
.unwrap_or(Ok((None, None)))?;
diff --git a/vendor/gix/src/remote/save.rs b/vendor/gix/src/remote/save.rs
index ad6a75b14..2a91dfa9c 100644
--- a/vendor/gix/src/remote/save.rs
+++ b/vendor/gix/src/remote/save.rs
@@ -1,5 +1,7 @@
use std::convert::TryInto;
+use gix_macros::momo;
+
use crate::{
bstr::{BStr, BString},
config, remote, Remote,
@@ -25,7 +27,7 @@ pub enum AsError {
Name(#[from] crate::remote::name::Error),
}
-/// Serialize into gix-config.
+/// Serialize into git-config.
impl Remote<'_> {
/// Save ourselves to the given `config` if we are a named remote or fail otherwise.
///
@@ -111,6 +113,7 @@ impl Remote<'_> {
/// If this name is different from the current one, the git configuration will still contain the previous name,
/// and the caller should account for that.
#[allow(clippy::result_large_err)]
+ #[momo]
pub fn save_as_to(
&mut self,
name: impl Into<BString>,
diff --git a/vendor/gix/src/repository/attributes.rs b/vendor/gix/src/repository/attributes.rs
index 252529761..7f747f7fd 100644
--- a/vendor/gix/src/repository/attributes.rs
+++ b/vendor/gix/src/repository/attributes.rs
@@ -1,5 +1,15 @@
//! exclude information
-use crate::Repository;
+use crate::{config, AttributeStack, Repository};
+
+/// The error returned by [`Repository::attributes()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error(transparent)]
+ ConfigureAttributes(#[from] config::attribute_stack::Error),
+ #[error(transparent)]
+ ConfigureExcludes(#[from] config::exclude_stack::Error),
+}
impl Repository {
/// Configure a file-system cache for accessing git attributes *and* excludes on a per-path basis.
@@ -14,15 +24,14 @@ impl Repository {
///
/// * `$XDG_CONFIG_HOME/…/ignore|attributes` if `core.excludesFile|attributesFile` is *not* set, otherwise use the configured file.
/// * `$GIT_DIR/info/exclude|attributes` if present.
- // TODO: test, provide higher-level custom Cache wrapper that is much easier to use and doesn't panic when accessing entries
- // by non-relative path.
+ #[cfg(feature = "attributes")]
pub fn attributes(
&self,
index: &gix_index::State,
- attributes_source: gix_worktree::cache::state::attributes::Source,
- ignore_source: gix_worktree::cache::state::ignore::Source,
+ attributes_source: gix_worktree::stack::state::attributes::Source,
+ ignore_source: gix_worktree::stack::state::ignore::Source,
exclude_overrides: Option<gix_ignore::Search>,
- ) -> Result<gix_worktree::Cache, crate::attributes::Error> {
+ ) -> Result<AttributeStack<'_>, Error> {
let case = if self.config.ignore_case {
gix_glob::pattern::Case::Fold
} else {
@@ -36,15 +45,95 @@ impl Repository {
let ignore =
self.config
.assemble_exclude_globals(self.git_dir(), exclude_overrides, ignore_source, &mut buf)?;
- let state = gix_worktree::cache::State::AttributesAndIgnoreStack { attributes, ignore };
- let attribute_list = state.id_mappings_from_index(index, index.path_backing(), ignore_source, case);
- Ok(gix_worktree::Cache::new(
- // this is alright as we don't cause mutation of that directory, it's virtual.
- self.work_dir().unwrap_or(self.git_dir()),
- state,
- case,
- buf,
- attribute_list,
+ let state = gix_worktree::stack::State::AttributesAndIgnoreStack { attributes, ignore };
+ let attribute_list = state.id_mappings_from_index(index, index.path_backing(), case);
+ Ok(AttributeStack::new(
+ gix_worktree::Stack::new(
+ // this is alright as we don't cause mutation of that directory, it's virtual.
+ self.work_dir().unwrap_or(self.git_dir()),
+ state,
+ case,
+ buf,
+ attribute_list,
+ ),
+ self,
+ ))
+ }
+
+ /// Like [attributes()][Self::attributes()], but without access to exclude/ignore information.
+ #[cfg(feature = "attributes")]
+ pub fn attributes_only(
+ &self,
+ index: &gix_index::State,
+ attributes_source: gix_worktree::stack::state::attributes::Source,
+ ) -> Result<AttributeStack<'_>, config::attribute_stack::Error> {
+ let case = if self.config.ignore_case {
+ gix_glob::pattern::Case::Fold
+ } else {
+ gix_glob::pattern::Case::Sensitive
+ };
+ let (attributes, buf) = self.config.assemble_attribute_globals(
+ self.git_dir(),
+ attributes_source,
+ self.options.permissions.attributes,
+ )?;
+ let state = gix_worktree::stack::State::AttributesStack(attributes);
+ let attribute_list = state.id_mappings_from_index(index, index.path_backing(), case);
+ Ok(AttributeStack::new(
+ gix_worktree::Stack::new(
+ // this is alright as we don't cause mutation of that directory, it's virtual.
+ self.work_dir().unwrap_or(self.git_dir()),
+ state,
+ case,
+ buf,
+ attribute_list,
+ ),
+ self,
+ ))
+ }
+
+ /// Configure a file-system cache checking if files below the repository are excluded, reading `.gitignore` files from
+ /// the specified `source`.
+ ///
+ /// Note that no worktree is required for this to work, even though access to in-tree `.gitignore` files would require
+ /// a non-empty `index` that represents a tree with `.gitignore` files.
+ ///
+ /// This takes into consideration all the usual repository configuration, namely:
+ ///
+ /// * `$XDG_CONFIG_HOME/…/ignore` if `core.excludesFile` is *not* set, otherwise use the configured file.
+ /// * `$GIT_DIR/info/exclude` if present.
+ ///
+ /// When only excludes are desired, this is the most efficient way to obtain them. Otherwise use
+ /// [`Repository::attributes()`] for accessing both attributes and excludes.
+ // TODO: test
+ #[cfg(feature = "excludes")]
+ pub fn excludes(
+ &self,
+ index: &gix_index::State,
+ overrides: Option<gix_ignore::Search>,
+ source: gix_worktree::stack::state::ignore::Source,
+ ) -> Result<AttributeStack<'_>, config::exclude_stack::Error> {
+ let case = if self.config.ignore_case {
+ gix_glob::pattern::Case::Fold
+ } else {
+ gix_glob::pattern::Case::Sensitive
+ };
+ let mut buf = Vec::with_capacity(512);
+ let ignore = self
+ .config
+ .assemble_exclude_globals(self.git_dir(), overrides, source, &mut buf)?;
+ let state = gix_worktree::stack::State::IgnoreStack(ignore);
+ let attribute_list = state.id_mappings_from_index(index, index.path_backing(), case);
+ Ok(AttributeStack::new(
+ gix_worktree::Stack::new(
+ // this is alright as we don't cause mutation of that directory, it's virtual.
+ self.work_dir().unwrap_or(self.git_dir()),
+ state,
+ case,
+ buf,
+ attribute_list,
+ ),
+ self,
))
}
}
diff --git a/vendor/gix/src/repository/config/mod.rs b/vendor/gix/src/repository/config/mod.rs
index e5c8b64f3..618ccf0f6 100644
--- a/vendor/gix/src/repository/config/mod.rs
+++ b/vendor/gix/src/repository/config/mod.rs
@@ -22,6 +22,21 @@ impl crate::Repository {
}
}
+ /// Return filesystem options as retrieved from the repository configuration.
+ ///
+ /// Note that these values have not been [probed](gix_fs::Capabilities::probe()).
+ pub fn filesystem_options(&self) -> Result<gix_fs::Capabilities, config::boolean::Error> {
+ self.config.fs_capabilities()
+ }
+
+ /// Return filesystem options on how to perform stat-checks, typically in relation to the index.
+ ///
+ /// Note that these values have not been [probed](gix_fs::Capabilities::probe()).
+ #[cfg(feature = "index")]
+ pub fn stat_options(&self) -> Result<gix_index::entry::stat::Options, config::stat_options::Error> {
+ self.config.stat_options()
+ }
+
/// The options used to open the repository.
pub fn open_options(&self) -> &crate::open::Options {
&self.options
diff --git a/vendor/gix/src/repository/config/transport.rs b/vendor/gix/src/repository/config/transport.rs
index dcfbc0bf6..99b5a7f47 100644
--- a/vendor/gix/src/repository/config/transport.rs
+++ b/vendor/gix/src/repository/config/transport.rs
@@ -1,6 +1,8 @@
#![allow(clippy::result_large_err)]
use std::any::Any;
+use gix_macros::momo;
+
use crate::bstr::BStr;
impl crate::Repository {
@@ -21,6 +23,7 @@ impl crate::Repository {
)),
allow(unused_variables)
)]
+ #[momo]
pub fn transport_options<'a>(
&self,
url: impl Into<&'a BStr>,
@@ -359,7 +362,7 @@ impl crate::Repository {
self.install_dir().ok().as_deref(),
self.config.home_dir().as_deref(),
))
- .map(|cow| cow.into_owned())
+ .map(std::borrow::Cow::into_owned)
})
.transpose()
.with_leniency(lenient)
diff --git a/vendor/gix/src/repository/excludes.rs b/vendor/gix/src/repository/excludes.rs
deleted file mode 100644
index 6281549e0..000000000
--- a/vendor/gix/src/repository/excludes.rs
+++ /dev/null
@@ -1,45 +0,0 @@
-//! exclude information
-use crate::{config, Repository};
-impl Repository {
- /// Configure a file-system cache checking if files below the repository are excluded, reading `.gitignore` files from
- /// the specified `source`.
- ///
- /// Note that no worktree is required for this to work, even though access to in-tree `.gitignore` files would require
- /// a non-empty `index` that represents a tree with `.gitignore` files.
- ///
- /// This takes into consideration all the usual repository configuration, namely:
- ///
- /// * `$XDG_CONFIG_HOME/…/ignore` if `core.excludesFile` is *not* set, otherwise use the configured file.
- /// * `$GIT_DIR/info/exclude` if present.
- ///
- /// When only excludes are desired, this is the most efficient way to obtain them. Otherwise use
- /// [`Repository::attributes()`] for accessing both attributes and excludes.
- // TODO: test, provide higher-level custom Cache wrapper that is much easier to use and doesn't panic when accessing entries
- // by non-relative path.
- pub fn excludes(
- &self,
- index: &gix_index::State,
- overrides: Option<gix_ignore::Search>,
- source: gix_worktree::cache::state::ignore::Source,
- ) -> Result<gix_worktree::Cache, config::exclude_stack::Error> {
- let case = if self.config.ignore_case {
- gix_glob::pattern::Case::Fold
- } else {
- gix_glob::pattern::Case::Sensitive
- };
- let mut buf = Vec::with_capacity(512);
- let ignore = self
- .config
- .assemble_exclude_globals(self.git_dir(), overrides, source, &mut buf)?;
- let state = gix_worktree::cache::State::IgnoreStack(ignore);
- let attribute_list = state.id_mappings_from_index(index, index.path_backing(), source, case);
- Ok(gix_worktree::Cache::new(
- // this is alright as we don't cause mutation of that directory, it's virtual.
- self.work_dir().unwrap_or(self.git_dir()),
- state,
- case,
- buf,
- attribute_list,
- ))
- }
-}
diff --git a/vendor/gix/src/repository/filter.rs b/vendor/gix/src/repository/filter.rs
new file mode 100644
index 000000000..3aacb1a3d
--- /dev/null
+++ b/vendor/gix/src/repository/filter.rs
@@ -0,0 +1,64 @@
+use crate::{filter, repository::IndexPersistedOrInMemory, Id, Repository};
+
+///
+pub mod pipeline {
+ /// The error returned by [Repository::filter_pipeline()][super::Repository::filter_pipeline()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not obtain head commit of bare repository")]
+ HeadCommit(#[from] crate::reference::head_commit::Error),
+ #[error(transparent)]
+ DecodeCommit(#[from] gix_object::decode::Error),
+ #[error("Could not create index from tree at HEAD^{{tree}}")]
+ TreeTraverse(#[from] gix_traverse::tree::breadthfirst::Error),
+ #[error(transparent)]
+ BareAttributes(#[from] crate::config::attribute_stack::Error),
+ #[error(transparent)]
+ WorktreeIndex(#[from] crate::worktree::open_index::Error),
+ #[error(transparent)]
+ Init(#[from] crate::filter::pipeline::options::Error),
+ }
+}
+
+impl Repository {
+ /// Configure a pipeline for converting byte buffers to the worktree representation, and byte streams to the git-internal
+ /// representation. Also return the index that was used when initializing the pipeline as it may be useful when calling
+ /// [convert_to_git()][filter::Pipeline::convert_to_git()].
+ /// Bare repositories will either use `HEAD^{tree}` for accessing all relevant worktree files or the given `tree_if_bare`.
+ ///
+ /// Note that this is considered a primitive as it operates on data directly and will not have permanent effects.
+ /// We also return the index that was used to configure the attributes cache (for accessing `.gitattributes`), which can be reused
+ /// after it was possibly created from a tree, an expensive operation.
+ ///
+ /// ### Performance
+ ///
+ /// Note that when in a repository with worktree, files in the worktree will be read with priority, which causes at least a stat
+ /// each time the directory is changed. This can be expensive if access isn't in sorted order, which would cause more then necessary
+ /// stats: one per directory.
+ pub fn filter_pipeline(
+ &self,
+ tree_if_bare: Option<gix_hash::ObjectId>,
+ ) -> Result<(filter::Pipeline<'_>, IndexPersistedOrInMemory), pipeline::Error> {
+ let (cache, index) = if self.is_bare() {
+ let index = self.index_from_tree(&tree_if_bare.map_or_else(
+ || {
+ self.head_commit()
+ .map_err(pipeline::Error::from)
+ .and_then(|c| c.tree_id().map(Id::detach).map_err(Into::into))
+ },
+ Ok,
+ )?)?;
+ let cache = self.attributes_only(&index, gix_worktree::stack::state::attributes::Source::IdMapping)?;
+ (cache, IndexPersistedOrInMemory::InMemory(index))
+ } else {
+ let index = self.index()?;
+ let cache = self.attributes_only(
+ &index,
+ gix_worktree::stack::state::attributes::Source::WorktreeThenIdMapping,
+ )?;
+ (cache, IndexPersistedOrInMemory::Persisted(index))
+ };
+ Ok((filter::Pipeline::new(self, cache.detach())?, index))
+ }
+}
diff --git a/vendor/gix/src/repository/graph.rs b/vendor/gix/src/repository/graph.rs
index a1f6c7f89..f4f2b18cc 100644
--- a/vendor/gix/src/repository/graph.rs
+++ b/vendor/gix/src/repository/graph.rs
@@ -7,18 +7,34 @@ impl crate::Repository {
/// Note that the commitgraph will be used if it is present and readable, but it won't be an error if it is corrupted. In that case,
/// it will just not be used.
///
+ /// Note that a commitgraph is only allowed to be used if `core.commitGraph` is true (the default), and that configuration errors are
+ /// ignored as well.
+ ///
/// ### Performance
///
- /// Note that the [Graph][gix_revision::Graph] can be sensitive to various object database settings that may affect the performance
+ /// Note that the [Graph][gix_revwalk::Graph] can be sensitive to various object database settings that may affect the performance
/// of the commit walk.
- pub fn commit_graph<T>(&self) -> gix_revision::Graph<'_, T> {
- gix_revision::Graph::new(
+ pub fn revision_graph<T>(&self) -> gix_revwalk::Graph<'_, T> {
+ gix_revwalk::Graph::new(
|id, buf| {
self.objects
.try_find(id, buf)
- .map(|r| r.and_then(|d| d.try_into_commit_iter()))
+ .map(|r| r.and_then(gix_object::Data::try_into_commit_iter))
},
- gix_commitgraph::at(self.objects.store_ref().path().join("info")).ok(),
+ self.config
+ .may_use_commit_graph()
+ .unwrap_or(true)
+ .then(|| gix_commitgraph::at(self.objects.store_ref().path().join("info")).ok())
+ .flatten(),
)
}
+
+ /// Return a cache for commits and their graph structure, as managed by `git commit-graph`, for accelerating commit walks on
+ /// a low level.
+ ///
+ /// Note that [`revision_graph()`][crate::Repository::revision_graph()] should be preferred for general purpose walks that don't
+ /// rely on the actual commit cache to be present, while leveraging it if possible.
+ pub fn commit_graph(&self) -> Result<gix_commitgraph::Graph, gix_commitgraph::init::Error> {
+ gix_commitgraph::at(self.objects.store_ref().path().join("info"))
+ }
}
diff --git a/vendor/gix/src/repository/identity.rs b/vendor/gix/src/repository/identity.rs
index 61a4b4a98..a4e39089e 100644
--- a/vendor/gix/src/repository/identity.rs
+++ b/vendor/gix/src/repository/identity.rs
@@ -31,13 +31,13 @@ impl crate::Repository {
let p = self.config.personas();
Ok(gix_actor::SignatureRef {
- name: p.committer.name.as_ref().or(p.user.name.as_ref()).map(|v| v.as_ref())?,
+ name: p.committer.name.as_ref().or(p.user.name.as_ref()).map(AsRef::as_ref)?,
email: p
.committer
.email
.as_ref()
.or(p.user.email.as_ref())
- .map(|v| v.as_ref())?,
+ .map(AsRef::as_ref)?,
time: match extract_time_or_default(p.committer.time.as_ref(), &gitoxide::Commit::COMMITTER_DATE) {
Ok(t) => t,
Err(err) => return Some(Err(err)),
@@ -61,8 +61,8 @@ impl crate::Repository {
let p = self.config.personas();
Ok(gix_actor::SignatureRef {
- name: p.author.name.as_ref().or(p.user.name.as_ref()).map(|v| v.as_ref())?,
- email: p.author.email.as_ref().or(p.user.email.as_ref()).map(|v| v.as_ref())?,
+ name: p.author.name.as_ref().or(p.user.name.as_ref()).map(AsRef::as_ref)?,
+ email: p.author.email.as_ref().or(p.user.email.as_ref()).map(AsRef::as_ref)?,
time: match extract_time_or_default(p.author.time.as_ref(), &gitoxide::Commit::AUTHOR_DATE) {
Ok(t) => t,
Err(err) => return Some(Err(err)),
@@ -73,9 +73,9 @@ impl crate::Repository {
}
fn extract_time_or_default(
- time: Option<&Result<gix_actor::Time, gix_date::parse::Error>>,
+ time: Option<&Result<gix_date::Time, gix_date::parse::Error>>,
config_key: &'static keys::Time,
-) -> Result<gix_actor::Time, config::time::Error> {
+) -> Result<gix_date::Time, config::time::Error> {
match time {
Some(Ok(t)) => Ok(*t),
None => Ok(gix_date::Time::now_local_or_utc()),
@@ -88,7 +88,7 @@ pub(crate) struct Entity {
pub name: Option<BString>,
pub email: Option<BString>,
/// A time parsed from an environment variable, handling potential errors is delayed.
- pub time: Option<Result<gix_actor::Time, gix_date::parse::Error>>,
+ pub time: Option<Result<gix_date::Time, gix_date::parse::Error>>,
}
#[derive(Debug, Clone)]
@@ -117,11 +117,11 @@ impl Personas {
config
.string(name_key.section.name(), None, name_key.name)
.or_else(|| fallback.as_ref().and_then(|(s, name_key, _)| s.value(name_key.name)))
- .map(|v| v.into_owned()),
+ .map(std::borrow::Cow::into_owned),
config
.string(email_key.section.name(), None, email_key.name)
.or_else(|| fallback.as_ref().and_then(|(s, _, email_key)| s.value(email_key.name)))
- .map(|v| v.into_owned()),
+ .map(std::borrow::Cow::into_owned),
)
}
let now = SystemTime::now();
@@ -152,7 +152,7 @@ impl Personas {
user_email = user_email.or_else(|| {
config
.string_by_key(gitoxide::User::EMAIL_FALLBACK.logical_name().as_str())
- .map(|v| v.into_owned())
+ .map(std::borrow::Cow::into_owned)
});
Personas {
user: Entity {
diff --git a/vendor/gix/src/repository/impls.rs b/vendor/gix/src/repository/impls.rs
index 5da55290c..36fd788dc 100644
--- a/vendor/gix/src/repository/impls.rs
+++ b/vendor/gix/src/repository/impls.rs
@@ -7,8 +7,11 @@ impl Clone for crate::Repository {
self.common_dir.clone(),
self.config.clone(),
self.options.clone(),
+ #[cfg(feature = "index")]
self.index.clone(),
self.shallow_commits.clone(),
+ #[cfg(feature = "attributes")]
+ self.modules.clone(),
)
}
}
@@ -40,8 +43,11 @@ impl From<&crate::ThreadSafeRepository> for crate::Repository {
repo.common_dir.clone(),
repo.config.clone(),
repo.linked_worktree_options.clone(),
+ #[cfg(feature = "index")]
repo.index.clone(),
repo.shallow_commits.clone(),
+ #[cfg(feature = "attributes")]
+ repo.modules.clone(),
)
}
}
@@ -55,8 +61,11 @@ impl From<crate::ThreadSafeRepository> for crate::Repository {
repo.common_dir,
repo.config,
repo.linked_worktree_options,
+ #[cfg(feature = "index")]
repo.index,
repo.shallow_commits,
+ #[cfg(feature = "attributes")]
+ repo.modules.clone(),
)
}
}
@@ -70,7 +79,10 @@ impl From<crate::Repository> for crate::ThreadSafeRepository {
common_dir: r.common_dir,
config: r.config,
linked_worktree_options: r.options,
+ #[cfg(feature = "index")]
index: r.index,
+ #[cfg(feature = "attributes")]
+ modules: r.modules,
shallow_commits: r.shallow_commits,
}
}
diff --git a/vendor/gix/src/repository/index.rs b/vendor/gix/src/repository/index.rs
new file mode 100644
index 000000000..a21b138a5
--- /dev/null
+++ b/vendor/gix/src/repository/index.rs
@@ -0,0 +1,133 @@
+use gix_odb::FindExt;
+
+use crate::{config::cache::util::ApplyLeniencyDefault, repository::IndexPersistedOrInMemory, worktree};
+
+/// Index access
+impl crate::Repository {
+ /// Open a new copy of the index file and decode it entirely.
+ ///
+ /// It will use the `index.threads` configuration key to learn how many threads to use.
+ /// Note that it may fail if there is no index.
+ pub fn open_index(&self) -> Result<gix_index::File, worktree::open_index::Error> {
+ let thread_limit = self
+ .config
+ .resolved
+ .string("index", None, "threads")
+ .map(|value| crate::config::tree::Index::THREADS.try_into_index_threads(value))
+ .transpose()
+ .with_lenient_default(self.config.lenient_config)?;
+ let skip_hash = self
+ .config
+ .resolved
+ .boolean("index", None, "skipHash")
+ .map(|res| crate::config::tree::Index::SKIP_HASH.enrich_error(res))
+ .transpose()
+ .with_lenient_default(self.config.lenient_config)?
+ .unwrap_or_default();
+
+ let index = gix_index::File::at(
+ self.index_path(),
+ self.object_hash(),
+ skip_hash,
+ gix_index::decode::Options {
+ thread_limit,
+ min_extension_block_in_bytes_for_threading: 0,
+ expected_checksum: None,
+ },
+ )?;
+
+ Ok(index)
+ }
+
+ /// Return a shared worktree index which is updated automatically if the in-memory snapshot has become stale as the underlying file
+ /// on disk has changed.
+ ///
+ /// The index file is shared across all clones of this repository.
+ pub fn index(&self) -> Result<worktree::Index, worktree::open_index::Error> {
+ self.try_index().and_then(|opt| match opt {
+ Some(index) => Ok(index),
+ None => Err(worktree::open_index::Error::IndexFile(
+ gix_index::file::init::Error::Io(std::io::Error::new(
+ std::io::ErrorKind::NotFound,
+ format!("Could not find index file at {:?} for opening.", self.index_path()),
+ )),
+ )),
+ })
+ }
+
+ /// Return a shared worktree index which is updated automatically if the in-memory snapshot has become stale as the underlying file
+ /// on disk has changed, or `None` if no such file exists.
+ ///
+ /// The index file is shared across all clones of this repository.
+ pub fn try_index(&self) -> Result<Option<worktree::Index>, worktree::open_index::Error> {
+ self.index.recent_snapshot(
+ || self.index_path().metadata().and_then(|m| m.modified()).ok(),
+ || {
+ self.open_index().map(Some).or_else(|err| match err {
+ worktree::open_index::Error::IndexFile(gix_index::file::init::Error::Io(err))
+ if err.kind() == std::io::ErrorKind::NotFound =>
+ {
+ Ok(None)
+ }
+ err => Err(err),
+ })
+ },
+ )
+ }
+
+ /// Open the persisted worktree index or generate it from the current `HEAD^{tree}` to live in-memory only.
+ ///
+ /// Use this method to get an index in any repository, even bare ones that don't have one naturally.
+ ///
+ /// ### Note
+ ///
+ /// The locally stored index is not guaranteed to represent `HEAD^{tree}` if this repository is bare - bare repos
+ /// don't naturally have an index and if an index is present it must have been generated by hand.
+ pub fn index_or_load_from_head(
+ &self,
+ ) -> Result<IndexPersistedOrInMemory, crate::repository::index_or_load_from_head::Error> {
+ Ok(match self.try_index()? {
+ Some(index) => IndexPersistedOrInMemory::Persisted(index),
+ None => {
+ let tree = self.head_commit()?.tree_id()?;
+ IndexPersistedOrInMemory::InMemory(self.index_from_tree(&tree)?)
+ }
+ })
+ }
+
+ /// Create new index-file, which would live at the correct location, in memory from the given `tree`.
+ ///
+ /// Note that this is an expensive operation as it requires recursively traversing the entire tree to unpack it into the index.
+ pub fn index_from_tree(
+ &self,
+ tree: &gix_hash::oid,
+ ) -> Result<gix_index::File, gix_traverse::tree::breadthfirst::Error> {
+ Ok(gix_index::File::from_state(
+ gix_index::State::from_tree(tree, |oid, buf| self.objects.find_tree_iter(oid, buf).ok())?,
+ self.git_dir().join("index"),
+ ))
+ }
+}
+
+impl std::ops::Deref for IndexPersistedOrInMemory {
+ type Target = gix_index::File;
+
+ fn deref(&self) -> &Self::Target {
+ match self {
+ IndexPersistedOrInMemory::Persisted(i) => i,
+ IndexPersistedOrInMemory::InMemory(i) => i,
+ }
+ }
+}
+
+impl IndexPersistedOrInMemory {
+ /// Consume this instance and turn it into an owned index file.
+ ///
+ /// Note that this will cause the persisted index to be cloned, which would happen whenever the repository has a worktree.
+ pub fn into_owned(self) -> gix_index::File {
+ match self {
+ IndexPersistedOrInMemory::Persisted(i) => gix_index::File::clone(&i),
+ IndexPersistedOrInMemory::InMemory(i) => i,
+ }
+ }
+}
diff --git a/vendor/gix/src/repository/init.rs b/vendor/gix/src/repository/init.rs
index 255ff90d6..65b3d59ab 100644
--- a/vendor/gix/src/repository/init.rs
+++ b/vendor/gix/src/repository/init.rs
@@ -4,15 +4,16 @@ impl crate::Repository {
#[allow(clippy::too_many_arguments)]
pub(crate) fn from_refs_and_objects(
refs: crate::RefStore,
- objects: crate::OdbHandle,
+ mut objects: crate::OdbHandle,
work_tree: Option<std::path::PathBuf>,
common_dir: Option<std::path::PathBuf>,
config: crate::config::Cache,
linked_worktree_options: crate::open::Options,
- index: crate::worktree::IndexStorage,
+ #[cfg(feature = "index")] index: crate::worktree::IndexStorage,
shallow_commits: crate::shallow::CommitsStorage,
+ #[cfg(feature = "attributes")] modules: crate::submodule::ModulesFileStorage,
) -> Self {
- let objects = setup_objects(objects, &config);
+ setup_objects(&mut objects, &config);
crate::Repository {
bufs: RefCell::new(Vec::with_capacity(4)),
work_tree,
@@ -21,8 +22,11 @@ impl crate::Repository {
refs,
config,
options: linked_worktree_options,
+ #[cfg(feature = "index")]
index,
shallow_commits,
+ #[cfg(feature = "attributes")]
+ modules,
}
}
@@ -33,7 +37,7 @@ impl crate::Repository {
}
#[cfg_attr(not(feature = "max-performance-safe"), allow(unused_variables, unused_mut))]
-fn setup_objects(mut objects: crate::OdbHandle, config: &crate::config::Cache) -> crate::OdbHandle {
+pub(crate) fn setup_objects(objects: &mut crate::OdbHandle, config: &crate::config::Cache) {
#[cfg(feature = "max-performance-safe")]
{
match config.pack_cache_bytes {
@@ -54,10 +58,5 @@ fn setup_objects(mut objects: crate::OdbHandle, config: &crate::config::Cache) -
let bytes = config.object_cache_bytes;
objects.set_object_cache(move || Box::new(gix_pack::cache::object::MemoryCappedHashmap::new(bytes)));
}
- objects
- }
- #[cfg(not(feature = "max-performance-safe"))]
- {
- objects
}
}
diff --git a/vendor/gix/src/repository/location.rs b/vendor/gix/src/repository/location.rs
index 3e2ff907c..5811e7bf9 100644
--- a/vendor/gix/src/repository/location.rs
+++ b/vendor/gix/src/repository/location.rs
@@ -1,4 +1,4 @@
-use std::path::PathBuf;
+use std::path::{Path, PathBuf};
use gix_path::realpath::MAX_SYMLINKS;
@@ -25,6 +25,12 @@ impl crate::Repository {
self.git_dir().join("index")
}
+ /// The path to the `.gitmodules` file in the worktree, if a worktree is available.
+ #[cfg(feature = "attributes")]
+ pub fn modules_path(&self) -> Option<PathBuf> {
+ self.work_dir().map(|wtd| wtd.join(crate::submodule::MODULES_FILE))
+ }
+
/// The path to the `.git` directory itself, or equivalent if this is a bare repository.
pub fn path(&self) -> &std::path::Path {
self.git_dir()
@@ -42,30 +48,18 @@ impl crate::Repository {
}
/// Returns the relative path which is the components between the working tree and the current working dir (CWD).
- /// Note that there may be `None` if there is no work tree, even though the `PathBuf` will be empty
- /// if the CWD is at the root of the work tree.
+ /// Note that it may be `None` if there is no work tree, or if CWD isn't inside of the working tree directory.
+ ///
+ /// Note that the CWD is obtained once upon instantiation of the repository.
// TODO: tests, details - there is a lot about environment variables to change things around.
- pub fn prefix(&self) -> Option<std::io::Result<PathBuf>> {
- self.work_tree.as_ref().map(|root| {
- std::env::current_dir().and_then(|cwd| {
- gix_path::realpath_opts(root, &cwd, MAX_SYMLINKS)
- .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))
- .and_then(|root| {
- cwd.strip_prefix(&root)
- .map_err(|_| {
- std::io::Error::new(
- std::io::ErrorKind::Other,
- format!(
- "CWD '{}' isn't within the work tree '{}'",
- cwd.display(),
- root.display()
- ),
- )
- })
- .map(ToOwned::to_owned)
- })
- })
- })
+ pub fn prefix(&self) -> Result<Option<&Path>, gix_path::realpath::Error> {
+ let (root, current_dir) = match self.work_dir().zip(self.options.current_dir.as_deref()) {
+ Some((work_dir, cwd)) => (work_dir, cwd),
+ None => return Ok(None),
+ };
+
+ let root = gix_path::realpath_opts(root, current_dir, MAX_SYMLINKS)?;
+ Ok(current_dir.strip_prefix(&root).ok())
}
/// Return the kind of repository, either bare or one with a work tree.
diff --git a/vendor/gix/src/repository/snapshots.rs b/vendor/gix/src/repository/mailmap.rs
index 96de5080d..b4a2f4a0e 100644
--- a/vendor/gix/src/repository/snapshots.rs
+++ b/vendor/gix/src/repository/mailmap.rs
@@ -37,12 +37,11 @@ impl crate::Repository {
});
match self.work_dir() {
None => {
- // TODO: replace with ref-spec `HEAD:.mailmap` for less verbose way of getting the blob id
blob_id = blob_id.or_else(|| {
self.head().ok().and_then(|mut head| {
let commit = head.peel_to_commit_in_place().ok()?;
let tree = commit.tree().ok()?;
- tree.lookup_entry(Some(".mailmap")).ok()?.map(|e| e.object_id())
+ tree.find_entry(".mailmap").map(|e| e.object_id())
})
});
}
diff --git a/vendor/gix/src/repository/mod.rs b/vendor/gix/src/repository/mod.rs
index f8a51e8d0..e3742894b 100644
--- a/vendor/gix/src/repository/mod.rs
+++ b/vendor/gix/src/repository/mod.rs
@@ -4,6 +4,8 @@
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum Kind {
/// A submodule worktree, whose `git` repository lives in `.git/modules/**/<name>` of the parent repository.
+ ///
+ /// Note that 'old-form' submodule will register as `Worktree {is_linked: false}`.
Submodule,
/// A bare repository does not have a work tree, that is files on disk beyond the `git` repository itself.
Bare,
@@ -33,22 +35,106 @@ impl crate::Repository {
}
}
-mod attributes;
+#[cfg(any(feature = "attributes", feature = "excludes"))]
+pub mod attributes;
mod cache;
mod config;
-mod excludes;
+///
+#[cfg(feature = "attributes")]
+pub mod filter;
mod graph;
pub(crate) mod identity;
mod impls;
-mod init;
+#[cfg(feature = "index")]
+mod index;
+pub(crate) mod init;
mod kind;
mod location;
+#[cfg(feature = "mailmap")]
+mod mailmap;
mod object;
+#[cfg(feature = "attributes")]
+mod pathspec;
mod reference;
mod remote;
+#[cfg(feature = "revision")]
mod revision;
mod shallow;
-mod snapshots;
mod state;
+#[cfg(feature = "attributes")]
+mod submodule;
mod thread_safe;
mod worktree;
+
+/// A type to represent an index which either was loaded from disk as it was persisted there, or created on the fly in memory.
+#[cfg(feature = "index")]
+pub enum IndexPersistedOrInMemory {
+ /// The index as loaded from disk, and shared across clones of the owning `Repository`.
+ Persisted(crate::worktree::Index),
+ /// A temporary index as created from the `HEAD^{tree}`, with the file path set to the place where it would be stored naturally.
+ ///
+ /// Note that unless saved explicitly, it will not persist.
+ InMemory(gix_index::File),
+}
+
+///
+#[cfg(feature = "attributes")]
+pub mod pathspec_defaults_ignore_case {
+ /// The error returned by [Repository::pathspec_defaults_ignore_case()](crate::Repository::pathspec_defaults_inherit_ignore_case()).
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Filesystem configuration could not be obtained to learn about case sensitivity")]
+ FilesystemConfig(#[from] crate::config::boolean::Error),
+ #[error(transparent)]
+ Defaults(#[from] gix_pathspec::defaults::from_environment::Error),
+ }
+}
+
+///
+#[cfg(feature = "index")]
+pub mod index_or_load_from_head {
+ /// The error returned by [`Repository::index_or_load_from_head()`][crate::Repository::index_or_load_from_head()].
+ #[derive(thiserror::Error, Debug)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ HeadCommit(#[from] crate::reference::head_commit::Error),
+ #[error(transparent)]
+ TreeId(#[from] gix_object::decode::Error),
+ #[error(transparent)]
+ TraverseTree(#[from] gix_traverse::tree::breadthfirst::Error),
+ #[error(transparent)]
+ OpenIndex(#[from] crate::worktree::open_index::Error),
+ }
+}
+
+///
+#[cfg(feature = "worktree-stream")]
+pub mod worktree_stream {
+ /// The error returned by [`Repository::worktree_stream()`][crate::Repository::worktree_stream()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ FindTree(#[from] crate::object::find::existing::Error),
+ #[error(transparent)]
+ OpenTree(#[from] gix_traverse::tree::breadthfirst::Error),
+ #[error(transparent)]
+ AttributesCache(#[from] crate::config::attribute_stack::Error),
+ #[error(transparent)]
+ FilterPipeline(#[from] crate::filter::pipeline::options::Error),
+ #[error("Needed {id} to be a tree to turn into a workspace stream, got {actual}")]
+ NotATree {
+ id: gix_hash::ObjectId,
+ actual: gix_object::Kind,
+ },
+ }
+}
+
+///
+#[cfg(feature = "worktree-archive")]
+pub mod worktree_archive {
+ /// The error returned by [`Repository::worktree_archive()`][crate::Repository::worktree_archive()].
+ pub type Error = gix_archive::Error;
+}
diff --git a/vendor/gix/src/repository/object.rs b/vendor/gix/src/repository/object.rs
index 787dcda4e..c156971d0 100644
--- a/vendor/gix/src/repository/object.rs
+++ b/vendor/gix/src/repository/object.rs
@@ -2,11 +2,13 @@
use std::{convert::TryInto, ops::DerefMut};
use gix_hash::ObjectId;
-use gix_odb::{Find, FindExt, Write};
+use gix_macros::momo;
+use gix_odb::{Find, FindExt, Header, HeaderExt, Write};
use gix_ref::{
transaction::{LogChange, PreviousValue, RefLog},
FullName,
};
+use smallvec::SmallVec;
use crate::{commit, ext::ObjectIdExt, object, tag, Id, Object, Reference, Tree};
@@ -21,6 +23,7 @@ impl crate::Repository {
///
/// In order to get the kind of the object, is must be fully decoded from storage if it is packed with deltas.
/// Loose object could be partially decoded, even though that's not implemented.
+ #[momo]
pub fn find_object(&self, id: impl Into<ObjectId>) -> Result<Object<'_>, object::find::existing::Error> {
let id = id.into();
if id == gix_hash::ObjectId::empty_tree(self.object_hash()) {
@@ -32,11 +35,46 @@ impl crate::Repository {
});
}
let mut buf = self.free_buf();
- let kind = self.objects.find(id, &mut buf)?.kind;
+ let kind = self.objects.find(&id, &mut buf)?.kind;
Ok(Object::from_data(id, kind, buf, self))
}
+ /// Obtain information about an object without fully decoding it, or fail if the object doesn't exist.
+ ///
+ /// Note that despite being cheaper than [`Self::find_object()`], there is still some effort traversing delta-chains.
+ #[doc(alias = "read_header", alias = "git2")]
+ #[momo]
+ pub fn find_header(&self, id: impl Into<ObjectId>) -> Result<gix_odb::find::Header, object::find::existing::Error> {
+ let id = id.into();
+ if id == gix_hash::ObjectId::empty_tree(self.object_hash()) {
+ return Ok(gix_odb::find::Header::Loose {
+ kind: gix_object::Kind::Tree,
+ size: 0,
+ });
+ }
+ self.objects.header(id)
+ }
+
+ /// Obtain information about an object without fully decoding it, or `None` if the object doesn't exist.
+ ///
+ /// Note that despite being cheaper than [`Self::try_find_object()`], there is still some effort traversing delta-chains.
+ #[momo]
+ pub fn try_find_header(
+ &self,
+ id: impl Into<ObjectId>,
+ ) -> Result<Option<gix_odb::find::Header>, object::find::Error> {
+ let id = id.into();
+ if id == gix_hash::ObjectId::empty_tree(self.object_hash()) {
+ return Ok(Some(gix_odb::find::Header::Loose {
+ kind: gix_object::Kind::Tree,
+ size: 0,
+ }));
+ }
+ self.objects.try_header(&id).map_err(Into::into)
+ }
+
/// Try to find the object with `id` or return `None` if it wasn't found.
+ #[momo]
pub fn try_find_object(&self, id: impl Into<ObjectId>) -> Result<Option<Object<'_>>, object::find::Error> {
let id = id.into();
if id == gix_hash::ObjectId::empty_tree(self.object_hash()) {
@@ -49,7 +87,7 @@ impl crate::Repository {
}
let mut buf = self.free_buf();
- match self.objects.try_find(id, &mut buf)? {
+ match self.objects.try_find(&id, &mut buf)? {
Some(obj) => {
let kind = obj.kind;
Ok(Some(Object::from_data(id, kind, buf, self)))
@@ -76,15 +114,19 @@ impl crate::Repository {
/// we avoid writing duplicate objects using slow disks that will eventually have to be garbage collected.
pub fn write_object(&self, object: impl gix_object::WriteTo) -> Result<Id<'_>, object::write::Error> {
let mut buf = self.shared_empty_buf();
- object.write_to(buf.deref_mut())?;
+ object.write_to(buf.deref_mut()).expect("write to memory works");
- let oid = gix_object::compute_hash(self.object_hash(), object.kind(), &buf);
- if self.objects.contains(oid) {
+ self.write_object_inner(&buf, object.kind())
+ }
+
+ fn write_object_inner(&self, buf: &[u8], kind: gix_object::Kind) -> Result<Id<'_>, object::write::Error> {
+ let oid = gix_object::compute_hash(self.object_hash(), kind, buf);
+ if self.objects.contains(&oid) {
return Ok(oid.attach(self));
}
self.objects
- .write_buf(object.kind(), &buf)
+ .write_buf(kind, buf)
.map(|oid| oid.attach(self))
.map_err(Into::into)
}
@@ -93,14 +135,16 @@ impl crate::Repository {
///
/// We avoid writing duplicate objects to slow disks that will eventually have to be garbage collected by
/// pre-hashing the data, and checking if the object is already present.
+ #[momo]
pub fn write_blob(&self, bytes: impl AsRef<[u8]>) -> Result<Id<'_>, object::write::Error> {
let bytes = bytes.as_ref();
let oid = gix_object::compute_hash(self.object_hash(), gix_object::Kind::Blob, bytes);
- if self.objects.contains(oid) {
+ if self.objects.contains(&oid) {
return Ok(oid.attach(self));
}
self.objects
.write_buf(gix_object::Kind::Blob, bytes)
+ .map_err(Into::into)
.map(|oid| oid.attach(self))
}
@@ -115,14 +159,20 @@ impl crate::Repository {
mut bytes: impl std::io::Read + std::io::Seek,
) -> Result<Id<'_>, object::write::Error> {
let mut buf = self.shared_empty_buf();
- std::io::copy(&mut bytes, buf.deref_mut())?;
- let oid = gix_object::compute_hash(self.object_hash(), gix_object::Kind::Blob, &buf);
- if self.objects.contains(oid) {
+ std::io::copy(&mut bytes, buf.deref_mut()).expect("write to memory works");
+
+ self.write_blob_stream_inner(&buf)
+ }
+
+ fn write_blob_stream_inner(&self, buf: &[u8]) -> Result<Id<'_>, object::write::Error> {
+ let oid = gix_object::compute_hash(self.object_hash(), gix_object::Kind::Blob, buf);
+ if self.objects.contains(&oid) {
return Ok(oid.attach(self));
}
self.objects
- .write_buf(gix_object::Kind::Blob, &buf)
+ .write_buf(gix_object::Kind::Blob, buf)
+ .map_err(Into::into)
.map(|oid| oid.attach(self))
}
@@ -131,6 +181,7 @@ impl crate::Repository {
///
/// It will be created with `constraint` which is most commonly to [only create it][PreviousValue::MustNotExist]
/// or to [force overwriting a possibly existing tag](PreviousValue::Any).
+ #[momo]
pub fn tag(
&self,
name: impl AsRef<str>,
@@ -168,6 +219,25 @@ impl crate::Repository {
Name: TryInto<FullName, Error = E>,
commit::Error: From<E>,
{
+ self.commit_as_inner(
+ committer.into(),
+ author.into(),
+ reference.try_into()?,
+ message.as_ref(),
+ tree.into(),
+ parents.into_iter().map(Into::into).collect(),
+ )
+ }
+
+ fn commit_as_inner(
+ &self,
+ committer: gix_actor::SignatureRef<'_>,
+ author: gix_actor::SignatureRef<'_>,
+ reference: FullName,
+ message: &str,
+ tree: ObjectId,
+ parents: SmallVec<[gix_hash::ObjectId; 1]>,
+ ) -> Result<Id<'_>, commit::Error> {
use gix_ref::{
transaction::{Change, RefEdit},
Target,
@@ -175,14 +245,13 @@ impl crate::Repository {
// TODO: possibly use CommitRef to save a few allocations (but will have to allocate for object ids anyway.
// This can be made vastly more efficient though if we wanted to, so we lie in the API
- let reference = reference.try_into()?;
let commit = gix_object::Commit {
- message: message.as_ref().into(),
- tree: tree.into(),
- author: author.into().to_owned(),
- committer: committer.into().to_owned(),
+ message: message.into(),
+ tree,
+ author: author.into(),
+ committer: committer.into(),
encoding: None,
- parents: parents.into_iter().map(|id| id.into()).collect(),
+ parents,
extra_headers: Default::default(),
};
diff --git a/vendor/gix/src/repository/pathspec.rs b/vendor/gix/src/repository/pathspec.rs
new file mode 100644
index 000000000..8e7e9bbe9
--- /dev/null
+++ b/vendor/gix/src/repository/pathspec.rs
@@ -0,0 +1,55 @@
+use gix_pathspec::MagicSignature;
+
+use crate::{bstr::BStr, config::cache::util::ApplyLeniencyDefault, AttributeStack, Pathspec, Repository};
+
+impl Repository {
+ /// Create a new pathspec abstraction that allows to conduct searches using `patterns`.
+ /// `inherit_ignore_case` should be `true` if `patterns` will match against files on disk, or `false` otherwise, for more natural matching
+ /// (but also note that `git` does not do that).
+ /// `index` may be needed to load attributes which is required only if `patterns` refer to attributes via `:(attr:…)` syntax.
+ /// In the same vein, `attributes_source` affects where `.gitattributes` files are read from if pathspecs need to match against attributes.
+ ///
+ /// It will be initialized exactly how it would, and attribute matching will be conducted by reading the worktree first if available.
+ /// If that is not desirable, consider calling [`Pathspec::new()`] directly.
+ #[doc(alias = "Pathspec", alias = "git2")]
+ pub fn pathspec(
+ &self,
+ patterns: impl IntoIterator<Item = impl AsRef<BStr>>,
+ inherit_ignore_case: bool,
+ index: &gix_index::State,
+ attributes_source: gix_worktree::stack::state::attributes::Source,
+ ) -> Result<Pathspec<'_>, crate::pathspec::init::Error> {
+ Pathspec::new(self, patterns, inherit_ignore_case, || {
+ self.attributes_only(index, attributes_source)
+ .map(AttributeStack::detach)
+ .map_err(Into::into)
+ })
+ }
+
+ /// Return default settings that are required when [parsing pathspecs](gix_pathspec::parse()) by hand.
+ ///
+ /// These are stemming from environment variables which have been converted to [config settings](crate::config::tree::gitoxide::Pathspec),
+ /// which now serve as authority for configuration.
+ pub fn pathspec_defaults(&self) -> Result<gix_pathspec::Defaults, gix_pathspec::defaults::from_environment::Error> {
+ self.config.pathspec_defaults()
+ }
+
+ /// Similar to [Self::pathspec_defaults()], but will automatically configure the returned defaults to match case-insensitively if the underlying
+ /// filesystem is also configured to be case-insensitive according to `core.ignoreCase`, and `inherit_ignore_case` is `true`.
+ pub fn pathspec_defaults_inherit_ignore_case(
+ &self,
+ inherit_ignore_case: bool,
+ ) -> Result<gix_pathspec::Defaults, crate::repository::pathspec_defaults_ignore_case::Error> {
+ let mut defaults = self.config.pathspec_defaults()?;
+ if inherit_ignore_case
+ && self
+ .config
+ .fs_capabilities()
+ .with_lenient_default(self.config.lenient_config)?
+ .ignore_case
+ {
+ defaults.signature |= MagicSignature::ICASE;
+ }
+ Ok(defaults)
+ }
+}
diff --git a/vendor/gix/src/repository/reference.rs b/vendor/gix/src/repository/reference.rs
index e5a8aadcb..5a14c60b5 100644
--- a/vendor/gix/src/repository/reference.rs
+++ b/vendor/gix/src/repository/reference.rs
@@ -1,6 +1,7 @@
use std::convert::TryInto;
use gix_hash::ObjectId;
+use gix_macros::momo;
use gix_ref::{
transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog},
FullName, PartialNameRef, Target,
@@ -14,6 +15,7 @@ impl crate::Repository {
///
/// It will be created with `constraint` which is most commonly to [only create it][PreviousValue::MustNotExist]
/// or to [force overwriting a possibly existing tag](PreviousValue::Any).
+ #[momo]
pub fn tag_reference(
&self,
name: impl AsRef<str>,
@@ -60,10 +62,10 @@ impl crate::Repository {
pub fn set_namespace<'a, Name, E>(
&mut self,
namespace: Name,
- ) -> Result<Option<gix_ref::Namespace>, gix_validate::refname::Error>
+ ) -> Result<Option<gix_ref::Namespace>, gix_validate::reference::name::Error>
where
Name: TryInto<&'a PartialNameRef, Error = E>,
- gix_validate::refname::Error: From<E>,
+ gix_validate::reference::name::Error: From<E>,
{
let namespace = gix_ref::namespace::expand(namespace)?;
Ok(self.refs.namespace.replace(namespace))
@@ -85,14 +87,27 @@ impl crate::Repository {
Name: TryInto<FullName, Error = E>,
gix_validate::reference::name::Error: From<E>,
{
- let name = name.try_into().map_err(gix_validate::reference::name::Error::from)?;
- let id = target.into();
+ self.reference_inner(
+ name.try_into().map_err(gix_validate::reference::name::Error::from)?,
+ target.into(),
+ constraint,
+ log_message.into(),
+ )
+ }
+
+ fn reference_inner(
+ &self,
+ name: FullName,
+ id: ObjectId,
+ constraint: PreviousValue,
+ log_message: BString,
+ ) -> Result<Reference<'_>, reference::edit::Error> {
let mut edits = self.edit_reference(RefEdit {
change: Change::Update {
log: LogChange {
mode: RefLog::AndReference,
force_create_reflog: false,
- message: log_message.into(),
+ message: log_message,
},
expected: constraint,
new: Target::Peeled(id),
@@ -124,7 +139,7 @@ impl crate::Repository {
/// Edit one or more references as described by their `edits`.
/// Note that one can set the committer name for use in the ref-log by temporarily
- /// [overriding the gix-config][crate::Repository::config_snapshot_mut()].
+ /// [overriding the git-config][crate::Repository::config_snapshot_mut()].
///
/// Returns all reference edits, which might be more than where provided due the splitting of symbolic references, and
/// whose previous (_old_) values are the ones seen on in storage after the reference was locked.
@@ -180,7 +195,7 @@ impl crate::Repository {
/// The difference to [`head_ref()`][Self::head_ref()] is that the latter requires the reference to exist,
/// whereas here we merely return a the name of the possibly unborn reference.
pub fn head_name(&self) -> Result<Option<FullName>, reference::find::existing::Error> {
- Ok(self.head()?.referent_name().map(|n| n.to_owned()))
+ Ok(self.head()?.referent_name().map(std::borrow::ToOwned::to_owned))
}
/// Return the reference that `HEAD` points to, or `None` if the head is detached or unborn.
diff --git a/vendor/gix/src/repository/remote.rs b/vendor/gix/src/repository/remote.rs
index 74ebbaea0..be0845178 100644
--- a/vendor/gix/src/repository/remote.rs
+++ b/vendor/gix/src/repository/remote.rs
@@ -28,7 +28,8 @@ impl crate::Repository {
Remote::from_fetch_url(url, false, self)
}
- /// Find the remote with the given `name_or_url` or report an error, similar to [`try_find_remote(…)`][Self::try_find_remote()].
+ /// Find the configured remote with the given `name_or_url` or report an error,
+ /// similar to [`try_find_remote(…)`][Self::try_find_remote()].
///
/// Note that we will obtain remotes only if we deem them [trustworthy][crate::open::Options::filter_config_section()].
pub fn find_remote<'a>(&self, name_or_url: impl Into<&'a BStr>) -> Result<Remote<'_>, find::existing::Error> {
@@ -42,7 +43,7 @@ impl crate::Repository {
/// Find the default remote as configured, or `None` if no such configuration could be found.
///
- /// See [`remote_default_name()`][Self::remote_default_name()] for more information on the `direction` parameter.
+ /// See [`remote_default_name()`](Self::remote_default_name()) for more information on the `direction` parameter.
pub fn find_default_remote(
&self,
direction: remote::Direction,
@@ -51,8 +52,8 @@ impl crate::Repository {
.map(|name| self.find_remote(name.as_ref()))
}
- /// Find the remote with the given `name_or_url` or return `None` if it doesn't exist, for the purpose of fetching or pushing
- /// data to a remote.
+ /// Find the configured remote with the given `name_or_url` or return `None` if it doesn't exist,
+ /// for the purpose of fetching or pushing data.
///
/// There are various error kinds related to partial information or incorrectly formatted URLs or ref-specs.
/// Also note that the created `Remote` may have neither fetch nor push ref-specs set at all.
@@ -62,7 +63,36 @@ impl crate::Repository {
///
/// We will only include information if we deem it [trustworthy][crate::open::Options::filter_config_section()].
pub fn try_find_remote<'a>(&self, name_or_url: impl Into<&'a BStr>) -> Option<Result<Remote<'_>, find::Error>> {
- self.try_find_remote_inner(name_or_url, true)
+ self.try_find_remote_inner(name_or_url.into(), true)
+ }
+
+ /// This method emulate what `git fetch <remote>` does in order to obtain a remote to fetch from.
+ ///
+ /// As such, with `name_or_url` being `Some`, it will:
+ ///
+ /// * use `name_or_url` verbatim if it is a URL, creating a new remote in memory as needed.
+ /// * find the named remote if `name_or_url` is a remote name
+ ///
+ /// If `name_or_url` is `None`:
+ ///
+ /// * use the current `HEAD` branch to find a configured remote
+ /// * fall back to either a generally configured remote or the only configured remote.
+ ///
+ /// Fail if no remote could be found despite all of the above.
+ pub fn find_fetch_remote(&self, name_or_url: Option<&BStr>) -> Result<Remote<'_>, find::for_fetch::Error> {
+ Ok(match name_or_url {
+ Some(name) => match self.try_find_remote(name).and_then(Result::ok) {
+ Some(remote) => remote,
+ None => self.remote_at(gix_url::parse(name)?)?,
+ },
+ None => self
+ .head()?
+ .into_remote(remote::Direction::Fetch)
+ .transpose()?
+ .map(Ok)
+ .or_else(|| self.find_default_remote(remote::Direction::Fetch))
+ .ok_or_else(|| find::for_fetch::Error::ExactlyOneRemoteNotAvailable)??,
+ })
}
/// Similar to [`try_find_remote()`][Self::try_find_remote()], but removes a failure mode if rewritten URLs turn out to be invalid
@@ -72,7 +102,7 @@ impl crate::Repository {
&self,
name_or_url: impl Into<&'a BStr>,
) -> Option<Result<Remote<'_>, find::Error>> {
- self.try_find_remote_inner(name_or_url, false)
+ self.try_find_remote_inner(name_or_url.into(), false)
}
fn try_find_remote_inner<'a>(
diff --git a/vendor/gix/src/repository/revision.rs b/vendor/gix/src/repository/revision.rs
index 3018c2be8..bb9b56d57 100644
--- a/vendor/gix/src/repository/revision.rs
+++ b/vendor/gix/src/repository/revision.rs
@@ -1,4 +1,5 @@
use crate::{bstr::BStr, revision, Id};
+use gix_macros::momo;
/// Methods for resolving revisions by spec or working with the commit graph.
impl crate::Repository {
@@ -8,6 +9,8 @@ impl crate::Repository {
///
/// - `@` actually stands for `HEAD`, whereas `git` resolves it to the object pointed to by `HEAD` without making the
/// `HEAD` ref available for lookups.
+ #[doc(alias = "revparse", alias = "git2")]
+ #[momo]
pub fn rev_parse<'a>(&self, spec: impl Into<&'a BStr>) -> Result<revision::Spec<'_>, revision::spec::parse::Error> {
revision::Spec::from_bstr(
spec,
@@ -20,6 +23,7 @@ impl crate::Repository {
}
/// Parse a revision specification and return single object id as represented by this instance.
+ #[doc(alias = "revparse_single", alias = "git2")]
pub fn rev_parse_single<'repo, 'a>(
&'repo self,
spec: impl Into<&'a BStr>,
@@ -33,6 +37,7 @@ impl crate::Repository {
/// Create the baseline for a revision walk by initializing it with the `tips` to start iterating on.
///
/// It can be configured further before starting the actual walk.
+ #[doc(alias = "revwalk", alias = "git2")]
pub fn rev_walk(
&self,
tips: impl IntoIterator<Item = impl Into<gix_hash::ObjectId>>,
diff --git a/vendor/gix/src/repository/submodule.rs b/vendor/gix/src/repository/submodule.rs
new file mode 100644
index 000000000..a605bfbd3
--- /dev/null
+++ b/vendor/gix/src/repository/submodule.rs
@@ -0,0 +1,96 @@
+use std::rc::Rc;
+
+use crate::{submodule, Repository};
+
+impl Repository {
+ /// Open the `.gitmodules` file as present in the worktree, or return `None` if no such file is available.
+ /// Note that git configuration is also contributing to the result based on the current snapshot.
+ ///
+ /// Note that his method will not look in other places, like the index or the `HEAD` tree.
+ // TODO(submodule): make it use an updated snapshot instead once we have `config()`.
+ pub fn open_modules_file(&self) -> Result<Option<gix_submodule::File>, submodule::open_modules_file::Error> {
+ let path = match self.modules_path() {
+ Some(path) => path,
+ None => return Ok(None),
+ };
+ let buf = match std::fs::read(&path) {
+ Ok(buf) => buf,
+ Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(None),
+ Err(err) => return Err(err.into()),
+ };
+
+ Ok(Some(gix_submodule::File::from_bytes(
+ &buf,
+ path,
+ &self.config.resolved,
+ )?))
+ }
+
+ /// Return a shared [`.gitmodules` file](crate::submodule::File) which is updated automatically if the in-memory snapshot
+ /// has become stale as the underlying file on disk has changed. The snapshot based on the file on disk is shared across all
+ /// clones of this repository.
+ ///
+ /// If a file on disk isn't present, we will try to load it from the index, and finally from the current tree.
+ /// In the latter two cases, the result will not be cached in this repository instance as we can't detect freshness anymore,
+ /// so time this method is called a new [modules file](submodule::ModulesSnapshot) will be created.
+ ///
+ /// Note that git configuration is also contributing to the result based on the current snapshot.
+ ///
+ // TODO(submodule): make it use an updated snapshot instead once we have `config()`.
+ pub fn modules(&self) -> Result<Option<submodule::ModulesSnapshot>, submodule::modules::Error> {
+ match self.modules.recent_snapshot(
+ || {
+ self.modules_path()
+ .and_then(|path| path.metadata().and_then(|m| m.modified()).ok())
+ },
+ || self.open_modules_file(),
+ )? {
+ Some(m) => Ok(Some(m)),
+ None => {
+ let id = match self.try_index()?.and_then(|index| {
+ index
+ .entry_by_path(submodule::MODULES_FILE.into())
+ .map(|entry| entry.id)
+ }) {
+ Some(id) => id,
+ None => match self
+ .head_commit()?
+ .tree()?
+ .find_entry(submodule::MODULES_FILE)
+ .map(|entry| entry.inner.oid)
+ {
+ Some(id) => id.to_owned(),
+ None => return Ok(None),
+ },
+ };
+ Ok(Some(gix_features::threading::OwnShared::new(
+ gix_submodule::File::from_bytes(&self.find_object(id)?.data, None, &self.config.resolved)
+ .map_err(submodule::open_modules_file::Error::from)?
+ .into(),
+ )))
+ }
+ }
+ }
+
+ /// Return the list of available submodules, or `None` if there is no submodule configuration.
+ #[doc(alias = "git2")]
+ pub fn submodules(&self) -> Result<Option<impl Iterator<Item = crate::Submodule<'_>>>, submodule::modules::Error> {
+ let modules = match self.modules()? {
+ None => return Ok(None),
+ Some(m) => m,
+ };
+ let shared_state = Rc::new(submodule::SharedState::new(self, modules));
+ Ok(Some(
+ shared_state
+ .modules
+ .names()
+ .map(ToOwned::to_owned)
+ .collect::<Vec<_>>()
+ .into_iter()
+ .map(move |name| crate::Submodule {
+ state: shared_state.clone(),
+ name,
+ }),
+ ))
+ }
+}
diff --git a/vendor/gix/src/repository/worktree.rs b/vendor/gix/src/repository/worktree.rs
index f522a3f18..cc6f0bf73 100644
--- a/vendor/gix/src/repository/worktree.rs
+++ b/vendor/gix/src/repository/worktree.rs
@@ -1,4 +1,4 @@
-use crate::{config::cache::util::ApplyLeniencyDefault, worktree, Worktree};
+use crate::{worktree, Worktree};
/// Interact with individual worktrees and their information.
impl crate::Repository {
@@ -37,7 +37,8 @@ impl crate::Repository {
/// Return the currently set worktree if there is one, acting as platform providing a validated worktree base path.
///
/// Note that there would be `None` if this repository is `bare` and the parent [`Repository`][crate::Repository] was instantiated without
- /// registered worktree in the current working dir.
+ /// registered worktree in the current working dir, even if no `.git` file or directory exists.
+ /// It's merely based on configuration, see [Worktree::dot_git_exists()] for a way to perform more validation.
pub fn worktree(&self) -> Option<Worktree<'_>> {
self.work_dir().map(|path| Worktree { parent: self, path })
}
@@ -50,57 +51,95 @@ impl crate::Repository {
self.config.is_bare && self.work_dir().is_none()
}
- /// Open a new copy of the index file and decode it entirely.
+ /// If `id` points to a tree, produce a stream that yields one worktree entry after the other. The index of the tree at `id`
+ /// is returned as well as it is an intermediate byproduct that might be useful to callers.
///
- /// It will use the `index.threads` configuration key to learn how many threads to use.
- /// Note that it may fail if there is no index.
- pub fn open_index(&self) -> Result<gix_index::File, worktree::open_index::Error> {
- let thread_limit = self
- .config
- .resolved
- .string("index", None, "threads")
- .map(|value| crate::config::tree::Index::THREADS.try_into_index_threads(value))
- .transpose()
- .with_lenient_default(self.config.lenient_config)?;
- gix_index::File::at(
- self.index_path(),
- self.object_hash(),
- gix_index::decode::Options {
- thread_limit,
- min_extension_block_in_bytes_for_threading: 0,
- expected_checksum: None,
+ /// The entries will look exactly like they would if one would check them out, with filters applied.
+ /// The `export-ignore` attribute is used to skip blobs or directories to which it applies.
+ #[cfg(feature = "worktree-stream")]
+ #[gix_macros::momo]
+ pub fn worktree_stream(
+ &self,
+ id: impl Into<gix_hash::ObjectId>,
+ ) -> Result<(gix_worktree_stream::Stream, gix_index::File), crate::repository::worktree_stream::Error> {
+ use gix_odb::{FindExt, HeaderExt};
+ let id = id.into();
+ let header = self.objects.header(id)?;
+ if !header.kind().is_tree() {
+ return Err(crate::repository::worktree_stream::Error::NotATree {
+ id,
+ actual: header.kind(),
+ });
+ }
+
+ // TODO(perf): potential performance improvements could be to use the index at `HEAD` if possible (`index_from_head_tree…()`)
+ // TODO(perf): when loading a non-HEAD tree, we effectively traverse the tree twice. This is usually fast though, and sharing
+ // an object cache between the copies of the ODB handles isn't trivial and needs a lock.
+ let index = self.index_from_tree(&id)?;
+ let mut cache = self
+ .attributes_only(&index, gix_worktree::stack::state::attributes::Source::IdMapping)?
+ .detach();
+ let pipeline =
+ gix_filter::Pipeline::new(cache.attributes_collection(), crate::filter::Pipeline::options(self)?);
+ let objects = self.objects.clone().into_arc().expect("TBD error handling");
+ let stream = gix_worktree_stream::from_tree(
+ id,
+ {
+ let objects = objects.clone();
+ move |id, buf| objects.find(id, buf)
+ },
+ pipeline,
+ move |path, mode, attrs| -> std::io::Result<()> {
+ let entry = cache.at_entry(path, Some(mode.is_tree()), |id, buf| objects.find_blob(id, buf))?;
+ entry.matching_attributes(attrs);
+ Ok(())
},
- )
- .map_err(Into::into)
+ );
+ Ok((stream, index))
}
- /// Return a shared worktree index which is updated automatically if the in-memory snapshot has become stale as the underlying file
- /// on disk has changed.
+ /// Produce an archive from the `stream` and write it to `out` according to `options`.
+ /// Use `blob` to provide progress for each entry written to `out`, and note that it should already be initialized to the amount
+ /// of expected entries, with `should_interrupt` being queried between each entry to abort if needed, and on each write to `out`.
+ ///
+ /// ### Performance
+ ///
+ /// Be sure that `out` is able to handle a lot of write calls. Otherwise wrap it in a [`BufWriter`][std::io::BufWriter].
///
- /// The index file is shared across all clones of this repository.
- pub fn index(&self) -> Result<worktree::Index, worktree::open_index::Error> {
- self.index
- .recent_snapshot(
- || self.index_path().metadata().and_then(|m| m.modified()).ok(),
- || {
- self.open_index().map(Some).or_else(|err| match err {
- worktree::open_index::Error::IndexFile(gix_index::file::init::Error::Io(err))
- if err.kind() == std::io::ErrorKind::NotFound =>
- {
- Ok(None)
- }
- err => Err(err),
- })
- },
- )
- .and_then(|opt| match opt {
- Some(index) => Ok(index),
- None => Err(worktree::open_index::Error::IndexFile(
- gix_index::file::init::Error::Io(std::io::Error::new(
- std::io::ErrorKind::NotFound,
- format!("Could not find index file at {:?} for opening.", self.index_path()),
- )),
- )),
- })
+ /// ### Additional progress and fine-grained interrupt handling
+ ///
+ /// For additional progress reporting, wrap `out` into a writer that counts throughput on each write.
+ /// This can also be used to react to interrupts on each write, instead of only for each entry.
+ #[cfg(feature = "worktree-archive")]
+ pub fn worktree_archive(
+ &self,
+ mut stream: gix_worktree_stream::Stream,
+ out: impl std::io::Write + std::io::Seek,
+ blobs: impl gix_features::progress::Count,
+ should_interrupt: &std::sync::atomic::AtomicBool,
+ options: gix_archive::Options,
+ ) -> Result<(), crate::repository::worktree_archive::Error> {
+ let mut out = gix_features::interrupt::Write {
+ inner: out,
+ should_interrupt,
+ };
+ if options.format == gix_archive::Format::InternalTransientNonPersistable {
+ std::io::copy(&mut stream.into_read(), &mut out)?;
+ return Ok(());
+ }
+ gix_archive::write_stream_seek(
+ &mut stream,
+ |stream| {
+ if should_interrupt.load(std::sync::atomic::Ordering::Relaxed) {
+ return Err(std::io::Error::new(std::io::ErrorKind::Other, "Cancelled by user").into());
+ }
+ let res = stream.next_entry();
+ blobs.inc();
+ res
+ },
+ out,
+ options,
+ )?;
+ Ok(())
}
}
diff --git a/vendor/gix/src/revision/mod.rs b/vendor/gix/src/revision/mod.rs
index 4b11a8766..3de528ecd 100644
--- a/vendor/gix/src/revision/mod.rs
+++ b/vendor/gix/src/revision/mod.rs
@@ -2,6 +2,7 @@
//!
//! This module provides utilities to walk graphs of revisions and specify revisions and ranges of revisions.
+#[cfg(feature = "revision")]
pub use gix_revision as plumbing;
///
@@ -9,6 +10,7 @@ pub mod walk;
pub use walk::iter::Walk;
///
+#[cfg(feature = "revision")]
pub mod spec;
/// The specification of a revision as parsed from a revision specification like `HEAD@{1}` or `v1.2.3...main`.
@@ -17,6 +19,7 @@ pub mod spec;
/// See the [official git documentation](https://git-scm.com/docs/git-rev-parse#_specifying_revisions) for reference on how
/// to specify revisions and revision ranges.
#[derive(Clone, Debug)]
+#[cfg(feature = "revision")]
pub struct Spec<'repo> {
pub(crate) inner: gix_revision::Spec,
/// The first name of a reference as seen while parsing a `RevSpec`, for completeness.
diff --git a/vendor/gix/src/revision/spec/parse/delegate/mod.rs b/vendor/gix/src/revision/spec/parse/delegate/mod.rs
index 78e4ab9ee..eaf7f5fd6 100644
--- a/vendor/gix/src/revision/spec/parse/delegate/mod.rs
+++ b/vendor/gix/src/revision/spec/parse/delegate/mod.rs
@@ -1,10 +1,7 @@
use std::collections::HashSet;
use gix_hash::ObjectId;
-use gix_revision::spec::{
- parse,
- parse::delegate::{self},
-};
+use gix_revision::spec::{parse, parse::delegate};
use smallvec::SmallVec;
use super::{Delegate, Error, ObjectKindHint};
@@ -209,7 +206,7 @@ impl<'repo> Delegate<'repo> {
.attach(repo)
.peel_to_id_in_place()
.ok()
- .map(|id| id.detach())
+ .map(crate::Id::detach)
}) {
obj_opt.get_or_insert_with(HashSet::default).insert(id);
};
diff --git a/vendor/gix/src/revision/spec/parse/delegate/navigate.rs b/vendor/gix/src/revision/spec/parse/delegate/navigate.rs
index f6e085368..51feb1d76 100644
--- a/vendor/gix/src/revision/spec/parse/delegate/navigate.rs
+++ b/vendor/gix/src/revision/spec/parse/delegate/navigate.rs
@@ -15,6 +15,7 @@ use crate::{
delegate::{handle_errors_and_replacements, peel, Replacements},
Delegate, Error,
},
+ Object,
};
impl<'repo> delegate::Navigate for Delegate<'repo> {
@@ -62,10 +63,9 @@ impl<'repo> delegate::Navigate for Delegate<'repo> {
.all()
.expect("cannot fail without sorting")
.skip(num)
- .filter_map(Result::ok)
- .next()
+ .find_map(Result::ok)
{
- Some(id) => replacements.push((*obj, id.detach())),
+ Some(commit) => replacements.push((*obj, commit.id)),
None => errors.push((
*obj,
Error::AncestorOutOfRange {
@@ -123,9 +123,9 @@ impl<'repo> delegate::Navigate for Delegate<'repo> {
if path.is_empty() {
return Ok(tree_id);
}
- let tree = repo.find_object(tree_id)?.into_tree();
+ let mut tree = repo.find_object(tree_id)?.into_tree();
let entry =
- tree.lookup_entry_by_path(gix_path::from_bstr(path))?
+ tree.peel_to_entry_by_path(gix_path::from_bstr(path))?
.ok_or_else(|| Error::PathNotFound {
path: path.into(),
object: obj.attach(repo).shorten_or_id(),
@@ -142,7 +142,7 @@ impl<'repo> delegate::Navigate for Delegate<'repo> {
}
PeelTo::RecursiveTagObject => {
for oid in objs.iter() {
- match oid.attach(repo).object().and_then(|obj| obj.peel_tags_to_end()) {
+ match oid.attach(repo).object().and_then(Object::peel_tags_to_end) {
Ok(obj) => replacements.push((*oid, obj.id)),
Err(err) => errors.push((*oid, err.into())),
}
@@ -157,9 +157,9 @@ impl<'repo> delegate::Navigate for Delegate<'repo> {
self.unset_disambiguate_call();
self.follow_refs_to_objects_if_needed()?;
- #[cfg(not(feature = "regex"))]
+ #[cfg(not(feature = "revparse-regex"))]
let matches = |message: &BStr| -> bool { message.contains_str(regex) ^ negated };
- #[cfg(feature = "regex")]
+ #[cfg(feature = "revparse-regex")]
let matches = match regex::bytes::Regex::new(regex.to_str_lossy().as_ref()) {
Ok(compiled) => {
let needs_regex = regex::escape(compiled.as_str()) != regex;
@@ -193,8 +193,8 @@ impl<'repo> delegate::Navigate for Delegate<'repo> {
let mut matched = false;
let mut count = 0;
let commits = iter.map(|res| {
- res.map_err(Error::from).and_then(|commit_id| {
- commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
+ res.map_err(Error::from).and_then(|commit| {
+ commit.id().object().map_err(Error::from).map(Object::into_commit)
})
});
for commit in commits {
@@ -250,8 +250,8 @@ impl<'repo> delegate::Navigate for Delegate<'repo> {
let mut matched = false;
let mut count = 0;
let commits = iter.map(|res| {
- res.map_err(Error::from).and_then(|commit_id| {
- commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
+ res.map_err(Error::from).and_then(|commit| {
+ commit.id().object().map_err(Error::from).map(Object::into_commit)
})
});
for commit in commits {
diff --git a/vendor/gix/src/revision/spec/parse/delegate/revision.rs b/vendor/gix/src/revision/spec/parse/delegate/revision.rs
index 7ea691a28..2b84dda23 100644
--- a/vendor/gix/src/revision/spec/parse/delegate/revision.rs
+++ b/vendor/gix/src/revision/spec/parse/delegate/revision.rs
@@ -114,7 +114,7 @@ impl<'repo> delegate::Revision for Delegate<'repo> {
ReflogLookup::Entry(no) => {
let r = match &mut self.refs[self.idx] {
Some(r) => r.clone().attach(self.repo),
- val @ None => match self.repo.head().map(|head| head.try_into_referent()) {
+ val @ None => match self.repo.head().map(crate::Head::try_into_referent) {
Ok(Some(r)) => {
*val = Some(r.clone().detach());
r
@@ -139,7 +139,7 @@ impl<'repo> delegate::Revision for Delegate<'repo> {
Some(())
}
None => {
- let available = platform.rev().ok().flatten().map_or(0, |it| it.count());
+ let available = platform.rev().ok().flatten().map_or(0, Iterator::count);
self.err.push(Error::RefLogEntryOutOfRange {
reference: r.detach(),
desired: no,
@@ -190,7 +190,7 @@ impl<'repo> delegate::Revision for Delegate<'repo> {
Ok(Some((ref_name, id))) => {
let id = match self.repo.find_reference(ref_name.as_bstr()) {
Ok(mut r) => {
- let id = r.peel_to_id_in_place().map(|id| id.detach()).unwrap_or(id);
+ let id = r.peel_to_id_in_place().map(crate::Id::detach).unwrap_or(id);
self.refs[self.idx] = Some(r.detach());
id
}
@@ -203,7 +203,7 @@ impl<'repo> delegate::Revision for Delegate<'repo> {
self.err.push(Error::PriorCheckoutOutOfRange {
desired: branch_no,
available: prior_checkouts_iter(&mut head.log_iter())
- .map(|it| it.count())
+ .map(Iterator::count)
.unwrap_or(0),
});
None
diff --git a/vendor/gix/src/revision/spec/parse/mod.rs b/vendor/gix/src/revision/spec/parse/mod.rs
index f69ecc4af..950dfa004 100644
--- a/vendor/gix/src/revision/spec/parse/mod.rs
+++ b/vendor/gix/src/revision/spec/parse/mod.rs
@@ -1,6 +1,7 @@
use std::collections::HashSet;
use gix_hash::ObjectId;
+use gix_macros::momo;
use gix_revision::spec::parse;
use crate::{bstr::BStr, revision::Spec, Repository};
@@ -30,6 +31,7 @@ impl<'repo> Spec<'repo> {
/// Parse `spec` and use information from `repo` to resolve it, using `opts` to learn how to deal with ambiguity.
///
/// Note that it's easier and to use [`repo.rev_parse()`][Repository::rev_parse()] instead.
+ #[momo]
pub fn from_bstr<'a>(spec: impl Into<&'a BStr>, repo: &'repo Repository, opts: Options) -> Result<Self, Error> {
let mut delegate = Delegate::new(repo, opts);
match gix_revision::spec::parse(spec.into(), &mut delegate) {
diff --git a/vendor/gix/src/revision/spec/parse/types.rs b/vendor/gix/src/revision/spec/parse/types.rs
index d852c297e..5dc626eee 100644
--- a/vendor/gix/src/revision/spec/parse/types.rs
+++ b/vendor/gix/src/revision/spec/parse/types.rs
@@ -100,15 +100,15 @@ pub enum Error {
RevWalkIterInit(#[from] crate::reference::iter::init::Error),
#[error(transparent)]
RevWalkAllReferences(#[from] gix_ref::packed::buffer::open::Error),
- #[cfg(feature = "regex")]
+ #[cfg(feature = "revparse-regex")]
#[error(transparent)]
InvalidRegex(#[from] regex::Error),
#[cfg_attr(
- feature = "regex",
+ feature = "revparse-regex",
error("None of {commits_searched} commits from {oid} matched regex {regex:?}")
)]
#[cfg_attr(
- not(feature = "regex"),
+ not(feature = "revparse-regex"),
error("None of {commits_searched} commits from {oid} matched text {regex:?}")
)]
NoRegexMatch {
@@ -117,11 +117,11 @@ pub enum Error {
commits_searched: usize,
},
#[cfg_attr(
- feature = "regex",
+ feature = "revparse-regex",
error("None of {commits_searched} commits reached from all references matched regex {regex:?}")
)]
#[cfg_attr(
- not(feature = "regex"),
+ not(feature = "revparse-regex"),
error("None of {commits_searched} commits reached from all references matched text {regex:?}")
)]
NoRegexMatchAllRefs { regex: BString, commits_searched: usize },
diff --git a/vendor/gix/src/revision/walk.rs b/vendor/gix/src/revision/walk.rs
index 9c545d0d4..5e76ad898 100644
--- a/vendor/gix/src/revision/walk.rs
+++ b/vendor/gix/src/revision/walk.rs
@@ -1,7 +1,7 @@
use gix_hash::ObjectId;
use gix_odb::FindExt;
-use crate::{revision, Repository};
+use crate::{ext::ObjectIdExt, revision, Repository};
/// The error returned by [`Platform::all()`].
#[derive(Debug, thiserror::Error)]
@@ -11,15 +11,90 @@ pub enum Error {
AncestorIter(#[from] gix_traverse::commit::ancestors::Error),
#[error(transparent)]
ShallowCommits(#[from] crate::shallow::open::Error),
+ #[error(transparent)]
+ ConfigBoolean(#[from] crate::config::boolean::Error),
+}
+
+/// Information about a commit that we obtained naturally as part of the iteration.
+#[derive(Debug, Clone)]
+pub struct Info<'repo> {
+ /// The detached id of the commit.
+ pub id: gix_hash::ObjectId,
+ /// All parent ids we have encountered. Note that these will be at most one if [`Parents::First`][gix_traverse::commit::Parents::First] is enabled.
+ pub parent_ids: gix_traverse::commit::ParentIds,
+ /// The time at which the commit was created. It's only `Some(_)` if sorting is not [`Sorting::BreadthFirst`][gix_traverse::commit::Sorting::BreadthFirst],
+ /// as the walk needs to require the commit-date.
+ pub commit_time: Option<gix_date::SecondsSinceUnixEpoch>,
+
+ repo: &'repo Repository,
+}
+
+/// Access
+impl<'repo> Info<'repo> {
+ /// Provide an attached version of our [`id`][Info::id] field.
+ pub fn id(&self) -> crate::Id<'repo> {
+ self.id.attach(self.repo)
+ }
+
+ /// Read the whole object from the object database.
+ ///
+ /// Note that this is an expensive operation which shouldn't be performed unless one needs more than parent ids
+ /// and commit time.
+ pub fn object(&self) -> Result<crate::Commit<'repo>, crate::object::find::existing::Error> {
+ Ok(self.id().object()?.into_commit())
+ }
+
+ /// Provide an iterator yielding attached versions of our [`parent_ids`][Info::parent_ids] field.
+ pub fn parent_ids(&self) -> impl Iterator<Item = crate::Id<'repo>> + '_ {
+ self.parent_ids.iter().map(|id| id.attach(self.repo))
+ }
+
+ /// Returns the commit-time of this commit.
+ ///
+ /// ### Panics
+ ///
+ /// If the iteration wasn't ordered by date.
+ pub fn commit_time(&self) -> gix_date::SecondsSinceUnixEpoch {
+ self.commit_time.expect("traversal involving date caused it to be set")
+ }
+}
+
+/// Initialization and detachment
+impl<'repo> Info<'repo> {
+ /// Create a new instance that represents `info`, but is attached to `repo` as well.
+ pub fn new(info: gix_traverse::commit::Info, repo: &'repo Repository) -> Self {
+ Info {
+ id: info.id,
+ parent_ids: info.parent_ids,
+ commit_time: info.commit_time,
+ repo,
+ }
+ }
+ /// Consume this instance and remove the reference to the underlying repository.
+ ///
+ /// This is useful for sending instances across threads, for example.
+ pub fn detach(self) -> gix_traverse::commit::Info {
+ gix_traverse::commit::Info {
+ id: self.id,
+ parent_ids: self.parent_ids,
+ commit_time: self.commit_time,
+ }
+ }
}
/// A platform to traverse the revision graph by adding starting points as well as points which shouldn't be crossed,
/// returned by [`Repository::rev_walk()`].
+///
+/// **Note that we automatically leverage the commitgraph data structure**, but if you know that additional information like
+/// author or commit messages will be required of *all* commits traversed here, it should be better to avoid trying to load it
+/// by [turning commit-graph support off][Platform::use_commit_graph()]. This certainly is a micro-optimization though.
pub struct Platform<'repo> {
pub(crate) repo: &'repo Repository,
pub(crate) tips: Vec<ObjectId>,
pub(crate) sorting: gix_traverse::commit::Sorting,
pub(crate) parents: gix_traverse::commit::Parents,
+ pub(crate) use_commit_graph: Option<bool>,
+ pub(crate) commit_graph: Option<gix_commitgraph::Graph>,
}
impl<'repo> Platform<'repo> {
@@ -29,13 +104,15 @@ impl<'repo> Platform<'repo> {
tips: tips.into_iter().map(Into::into).collect(),
sorting: Default::default(),
parents: Default::default(),
+ use_commit_graph: None,
+ commit_graph: None,
}
}
}
/// Create-time builder methods
impl<'repo> Platform<'repo> {
- /// Set the sort mode for commits to the given value. The default is to order by topology.
+ /// Set the sort mode for commits to the given value. The default is to order topologically breadth-first.
pub fn sorting(mut self, sorting: gix_traverse::commit::Sorting) -> Self {
self.sorting = sorting;
self
@@ -46,6 +123,26 @@ impl<'repo> Platform<'repo> {
self.parents = gix_traverse::commit::Parents::First;
self
}
+
+ /// Allow using the commitgraph, if present, if `toggle` is `true`, or disallow it with `false`. Set it to `None` to leave
+ /// control over this to the configuration of `core.commitGraph` (the default).
+ ///
+ /// Errors when loading the graph lead to falling back to the object database, it's treated as optional cache.
+ pub fn use_commit_graph(mut self, toggle: impl Into<Option<bool>>) -> Self {
+ self.use_commit_graph = toggle.into();
+ self
+ }
+
+ /// Set or unset the commit-graph to use for the iteration. This is useful if the caller wants to check if a commit-graph exists
+ /// and refer different implementations depending on the outcome.
+ ///
+ /// It interacts with [`use_commit_graph`][Platform::use_commit_graph()] as one would expect, but it's worth noting that if `None`,
+ /// with [`use_commit_graph`][Platform::use_commit_graph()] being `true`, a graph will still be used for iteration.
+ /// To turn the commit-graph off, call [`use_commit_graph(false)`][Platform::use_commit_graph()] instead.
+ pub fn with_commit_graph(mut self, graph: Option<gix_commitgraph::Graph>) -> Self {
+ self.commit_graph = graph;
+ self
+ }
}
/// Produce the iterator
@@ -63,6 +160,8 @@ impl<'repo> Platform<'repo> {
tips,
sorting,
parents,
+ use_commit_graph,
+ commit_graph,
} = self;
Ok(revision::Walk {
repo,
@@ -72,6 +171,8 @@ impl<'repo> Platform<'repo> {
gix_traverse::commit::ancestors::State::default(),
move |oid, buf| repo.objects.find_commit_iter(oid, buf),
{
+ // Note that specific shallow handling for commit-graphs isn't needed as these contain
+ // all information there is, and exclude shallow parents to be structurally consistent.
let shallow_commits = repo.shallow_commits()?;
let mut grafted_parents_to_skip = Vec::new();
let mut buf = Vec::new();
@@ -87,7 +188,7 @@ impl<'repo> Platform<'repo> {
return false;
};
if commits.binary_search(&id).is_ok() {
- if let Ok(commit) = repo.objects.find_commit_iter(id, &mut buf) {
+ if let Ok(commit) = repo.objects.find_commit_iter(&id, &mut buf) {
grafted_parents_to_skip.extend(commit.parent_ids());
grafted_parents_to_skip.sort();
}
@@ -100,7 +201,13 @@ impl<'repo> Platform<'repo> {
},
)
.sorting(sorting)?
- .parents(parents),
+ .parents(parents)
+ .commit_graph(
+ commit_graph.or(use_commit_graph
+ .map_or_else(|| self.repo.config.may_use_commit_graph(), Ok)?
+ .then(|| self.repo.commit_graph().ok())
+ .flatten()),
+ ),
),
})
}
@@ -116,20 +223,21 @@ impl<'repo> Platform<'repo> {
}
pub(crate) mod iter {
- use crate::{ext::ObjectIdExt, Id};
-
/// The iterator returned by [`crate::revision::walk::Platform::all()`].
pub struct Walk<'repo> {
pub(crate) repo: &'repo crate::Repository,
- pub(crate) inner:
- Box<dyn Iterator<Item = Result<gix_hash::ObjectId, gix_traverse::commit::ancestors::Error>> + 'repo>,
+ pub(crate) inner: Box<
+ dyn Iterator<Item = Result<gix_traverse::commit::Info, gix_traverse::commit::ancestors::Error>> + 'repo,
+ >,
}
impl<'repo> Iterator for Walk<'repo> {
- type Item = Result<Id<'repo>, gix_traverse::commit::ancestors::Error>;
+ type Item = Result<super::Info<'repo>, gix_traverse::commit::ancestors::Error>;
fn next(&mut self) -> Option<Self::Item> {
- self.inner.next().map(|res| res.map(|id| id.attach(self.repo)))
+ self.inner
+ .next()
+ .map(|res| res.map(|info| super::Info::new(info, self.repo)))
}
}
}
diff --git a/vendor/gix/src/submodule/errors.rs b/vendor/gix/src/submodule/errors.rs
new file mode 100644
index 000000000..4e41337de
--- /dev/null
+++ b/vendor/gix/src/submodule/errors.rs
@@ -0,0 +1,106 @@
+///
+pub mod open_modules_file {
+ /// The error returned by [Repository::open_modules_file()](crate::Repository::open_modules_file()).
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Configuration(#[from] gix_config::parse::Error),
+ #[error("Could not read '.gitmodules' file")]
+ Io(#[from] std::io::Error),
+ }
+}
+
+///
+pub mod modules {
+ /// The error returned by [Repository::modules()](crate::Repository::modules()).
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ OpenModulesFile(#[from] crate::submodule::open_modules_file::Error),
+ #[error(transparent)]
+ OpenIndex(#[from] crate::worktree::open_index::Error),
+ #[error("Could not find the .gitmodules file by id in the object database")]
+ FindExistingBlob(#[from] crate::object::find::existing::Error),
+ #[error("Did not find commit in current HEAD to access its tree")]
+ FindHeadCommit(#[from] crate::reference::head_commit::Error),
+ #[error(transparent)]
+ TreeFromCommit(#[from] crate::object::commit::Error),
+ }
+}
+
+///
+pub mod is_active {
+ /// The error returned by [Submodule::is_active()](crate::Submodule::is_active()).
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ InitIsActivePlatform(#[from] gix_submodule::is_active_platform::Error),
+ #[error(transparent)]
+ QueryIsActive(#[from] gix_config::value::Error),
+ #[error(transparent)]
+ InitAttributes(#[from] crate::config::attribute_stack::Error),
+ #[error(transparent)]
+ InitPathspecDefaults(#[from] gix_pathspec::defaults::from_environment::Error),
+ #[error(transparent)]
+ ObtainIndex(#[from] crate::repository::index_or_load_from_head::Error),
+ }
+}
+
+///
+pub mod fetch_recurse {
+ /// The error returned by [Submodule::fetch_recurse()](crate::Submodule::fetch_recurse()).
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ ModuleBoolean(#[from] gix_submodule::config::Error),
+ #[error(transparent)]
+ ConfigurationFallback(#[from] crate::config::key::GenericErrorWithValue),
+ }
+}
+
+///
+pub mod open {
+ /// The error returned by [Submodule::open()](crate::Submodule::open()).
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ OpenRepository(#[from] crate::open::Error),
+ #[error(transparent)]
+ PathConfiguration(#[from] gix_submodule::config::path::Error),
+ }
+}
+
+///
+pub mod index_id {
+ /// The error returned by [Submodule::index_id()](crate::Submodule::index_id()).
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ PathConfiguration(#[from] gix_submodule::config::path::Error),
+ #[error(transparent)]
+ Index(#[from] crate::repository::index_or_load_from_head::Error),
+ }
+}
+
+///
+pub mod head_id {
+ /// The error returned by [Submodule::head_id()](crate::Submodule::head_id()).
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ HeadCommit(#[from] crate::reference::head_commit::Error),
+ #[error("Could not get tree of head commit")]
+ CommitTree(#[from] crate::object::commit::Error),
+ #[error("Could not peel tree to submodule path")]
+ PeelTree(#[from] crate::object::find::existing::Error),
+ #[error(transparent)]
+ PathConfiguration(#[from] gix_submodule::config::path::Error),
+ }
+}
diff --git a/vendor/gix/src/submodule/mod.rs b/vendor/gix/src/submodule/mod.rs
new file mode 100644
index 000000000..52c5938fc
--- /dev/null
+++ b/vendor/gix/src/submodule/mod.rs
@@ -0,0 +1,287 @@
+#![allow(clippy::result_large_err)]
+//! Submodule plumbing and abstractions
+//!
+use std::{
+ borrow::Cow,
+ cell::{Ref, RefCell, RefMut},
+ path::PathBuf,
+};
+
+use gix_odb::FindExt;
+pub use gix_submodule::*;
+
+use crate::{bstr::BStr, repository::IndexPersistedOrInMemory, Repository, Submodule};
+
+pub(crate) type ModulesFileStorage = gix_features::threading::OwnShared<gix_fs::SharedFileSnapshotMut<File>>;
+/// A lazily loaded and auto-updated worktree index.
+pub type ModulesSnapshot = gix_fs::SharedFileSnapshot<File>;
+
+/// The name of the file containing (sub) module information.
+pub(crate) const MODULES_FILE: &str = ".gitmodules";
+
+mod errors;
+pub use errors::*;
+
+/// A platform maintaining state needed to interact with submodules, created by [`Repository::submodules()].
+pub(crate) struct SharedState<'repo> {
+ pub(crate) repo: &'repo Repository,
+ pub(crate) modules: ModulesSnapshot,
+ is_active: RefCell<Option<IsActiveState>>,
+ index: RefCell<Option<IndexPersistedOrInMemory>>,
+}
+
+impl<'repo> SharedState<'repo> {
+ pub(crate) fn new(repo: &'repo Repository, modules: ModulesSnapshot) -> Self {
+ SharedState {
+ repo,
+ modules,
+ is_active: RefCell::new(None),
+ index: RefCell::new(None),
+ }
+ }
+
+ fn index(&self) -> Result<Ref<'_, IndexPersistedOrInMemory>, crate::repository::index_or_load_from_head::Error> {
+ {
+ let mut state = self.index.borrow_mut();
+ if state.is_none() {
+ *state = self.repo.index_or_load_from_head()?.into();
+ }
+ }
+ Ok(Ref::map(self.index.borrow(), |opt| {
+ opt.as_ref().expect("just initialized")
+ }))
+ }
+
+ fn active_state_mut(
+ &self,
+ ) -> Result<(RefMut<'_, IsActivePlatform>, RefMut<'_, gix_worktree::Stack>), is_active::Error> {
+ let mut state = self.is_active.borrow_mut();
+ if state.is_none() {
+ let platform = self
+ .modules
+ .is_active_platform(&self.repo.config.resolved, self.repo.config.pathspec_defaults()?)?;
+ let index = self.index()?;
+ let attributes = self
+ .repo
+ .attributes_only(
+ &index,
+ gix_worktree::stack::state::attributes::Source::WorktreeThenIdMapping
+ .adjust_for_bare(self.repo.is_bare()),
+ )?
+ .detach();
+ *state = Some(IsActiveState { platform, attributes });
+ }
+ Ok(RefMut::map_split(state, |opt| {
+ let state = opt.as_mut().expect("populated above");
+ (&mut state.platform, &mut state.attributes)
+ }))
+ }
+}
+
+struct IsActiveState {
+ platform: IsActivePlatform,
+ attributes: gix_worktree::Stack,
+}
+
+///Access
+impl<'repo> Submodule<'repo> {
+ /// Return the submodule's name.
+ pub fn name(&self) -> &BStr {
+ self.name.as_ref()
+ }
+ /// Return the path at which the submodule can be found, relative to the repository.
+ ///
+ /// For details, see [gix_submodule::File::path()].
+ pub fn path(&self) -> Result<Cow<'_, BStr>, config::path::Error> {
+ self.state.modules.path(self.name())
+ }
+
+ /// Return the url from which to clone or update the submodule.
+ pub fn url(&self) -> Result<gix_url::Url, config::url::Error> {
+ self.state.modules.url(self.name())
+ }
+
+ /// Return the `update` field from this submodule's configuration, if present, or `None`.
+ pub fn update(&self) -> Result<Option<config::Update>, config::update::Error> {
+ self.state.modules.update(self.name())
+ }
+
+ /// Return the `branch` field from this submodule's configuration, if present, or `None`.
+ pub fn branch(&self) -> Result<Option<config::Branch>, config::branch::Error> {
+ self.state.modules.branch(self.name())
+ }
+
+ /// Return the `fetchRecurseSubmodules` field from this submodule's configuration, or retrieve the value from `fetch.recurseSubmodules` if unset.
+ pub fn fetch_recurse(&self) -> Result<Option<config::FetchRecurse>, fetch_recurse::Error> {
+ Ok(match self.state.modules.fetch_recurse(self.name())? {
+ Some(val) => Some(val),
+ None => self
+ .state
+ .repo
+ .config
+ .resolved
+ .boolean_by_key("fetch.recurseSubmodules")
+ .map(|res| crate::config::tree::Fetch::RECURSE_SUBMODULES.try_into_recurse_submodules(res))
+ .transpose()?,
+ })
+ }
+
+ /// Return the `ignore` field from this submodule's configuration, if present, or `None`.
+ pub fn ignore(&self) -> Result<Option<config::Ignore>, config::Error> {
+ self.state.modules.ignore(self.name())
+ }
+
+ /// Return the `shallow` field from this submodule's configuration, if present, or `None`.
+ ///
+ /// If `true`, the submodule will be checked out with `depth = 1`. If unset, `false` is assumed.
+ pub fn shallow(&self) -> Result<Option<bool>, gix_config::value::Error> {
+ self.state.modules.shallow(self.name())
+ }
+
+ /// Returns true if this submodule is considered active and can thus participate in an operation.
+ ///
+ /// Please see the [plumbing crate documentation](gix_submodule::IsActivePlatform::is_active()) for details.
+ pub fn is_active(&self) -> Result<bool, is_active::Error> {
+ let (mut platform, mut attributes) = self.state.active_state_mut()?;
+ let is_active = platform.is_active(&self.state.repo.config.resolved, self.name.as_ref(), {
+ &mut |relative_path, case, is_dir, out| {
+ attributes
+ .set_case(case)
+ .at_entry(relative_path, Some(is_dir), |id, buf| {
+ self.state.repo.objects.find_blob(id, buf)
+ })
+ .map_or(false, |platform| platform.matching_attributes(out))
+ }
+ })?;
+ Ok(is_active)
+ }
+
+ /// Return the object id of the submodule as stored in the index of the superproject,
+ /// or `None` if it was deleted from the index.
+ ///
+ /// If `None`, but `Some()` when calling [`Self::head_id()`], then the submodule was just deleted but the change
+ /// wasn't yet committed. Note that `None` is also returned if the entry at the submodule path isn't a submodule.
+ /// If `Some()`, but `None` when calling [`Self::head_id()`], then the submodule was just added without having committed the change.
+ pub fn index_id(&self) -> Result<Option<gix_hash::ObjectId>, index_id::Error> {
+ let path = self.path()?;
+ Ok(self
+ .state
+ .index()?
+ .entry_by_path(&path)
+ .and_then(|entry| (entry.mode == gix_index::entry::Mode::COMMIT).then_some(entry.id)))
+ }
+
+ /// Return the object id of the submodule as stored in `HEAD^{tree}` of the superproject, or `None` if it wasn't yet committed.
+ ///
+ /// If `Some()`, but `None` when calling [`Self::index_id()`], then the submodule was just deleted but the change
+ /// wasn't yet committed. Note that `None` is also returned if the entry at the submodule path isn't a submodule.
+ /// If `None`, but `Some()` when calling [`Self::index_id()`], then the submodule was just added without having committed the change.
+ pub fn head_id(&self) -> Result<Option<gix_hash::ObjectId>, head_id::Error> {
+ let path = self.path()?;
+ Ok(self
+ .state
+ .repo
+ .head_commit()?
+ .tree()?
+ .peel_to_entry_by_path(gix_path::from_bstr(path.as_ref()))?
+ .and_then(|entry| (entry.mode() == gix_object::tree::EntryMode::Commit).then_some(entry.inner.oid)))
+ }
+
+ /// Return the path at which the repository of the submodule should be located.
+ ///
+ /// The directory might not exist yet.
+ pub fn git_dir(&self) -> PathBuf {
+ self.state
+ .repo
+ .common_dir()
+ .join("modules")
+ .join(gix_path::from_bstr(self.name()))
+ }
+
+ /// Return the path to the location at which the workdir would be checked out.
+ ///
+ /// Note that it may be a path relative to the repository if, for some reason, the parent directory
+ /// doesn't have a working dir set.
+ pub fn work_dir(&self) -> Result<PathBuf, config::path::Error> {
+ let worktree_git = gix_path::from_bstr(self.path()?);
+ Ok(match self.state.repo.work_dir() {
+ None => worktree_git.into_owned(),
+ Some(prefix) => prefix.join(worktree_git),
+ })
+ }
+
+ /// Return the path at which the repository of the submodule should be located, or the path inside of
+ /// the superproject's worktree where it actually *is* located if the submodule in the 'old-form', thus is a directory
+ /// inside of the superproject's work-tree.
+ ///
+ /// Note that 'old-form' paths returned aren't verified, i.e. the `.git` repository might be corrupt or otherwise
+ /// invalid - it's left to the caller to try to open it.
+ ///
+ /// Also note that the returned path may not actually exist.
+ pub fn git_dir_try_old_form(&self) -> Result<PathBuf, config::path::Error> {
+ let worktree_git = self.work_dir()?.join(gix_discover::DOT_GIT_DIR);
+ Ok(if worktree_git.is_dir() {
+ worktree_git
+ } else {
+ self.git_dir()
+ })
+ }
+
+ /// Query various parts of the submodule and assemble it into state information.
+ #[doc(alias = "status", alias = "git2")]
+ pub fn state(&self) -> Result<State, config::path::Error> {
+ let maybe_old_path = self.git_dir_try_old_form()?;
+ let git_dir = self.git_dir();
+ let worktree_git = self.work_dir()?.join(gix_discover::DOT_GIT_DIR);
+ let superproject_configuration = self
+ .state
+ .repo
+ .config
+ .resolved
+ .sections_by_name("submodule")
+ .into_iter()
+ .flatten()
+ .any(|section| section.header().subsection_name() == Some(self.name.as_ref()));
+ Ok(State {
+ repository_exists: maybe_old_path.is_dir(),
+ is_old_form: maybe_old_path != git_dir,
+ worktree_checkout: worktree_git.exists(),
+ superproject_configuration,
+ })
+ }
+
+ /// Open the submodule as repository, or `None` if the submodule wasn't initialized yet.
+ ///
+ /// More states can be derived here:
+ ///
+ /// * *initialized* - a repository exists, i.e. `Some(repo)` and the working tree is present.
+ /// * *uninitialized* - a repository does not exist, i.e. `None`
+ /// * *deinitialized* - a repository does exist, i.e. `Some(repo)`, but its working tree is empty.
+ ///
+ /// Also see the [state()](Self::state()) method for learning about the submodule.
+ /// The repository can also be used to learn about the submodule `HEAD`, i.e. where its working tree is at,
+ /// which may differ compared to the superproject's index or `HEAD` commit.
+ pub fn open(&self) -> Result<Option<Repository>, open::Error> {
+ match crate::open_opts(self.git_dir_try_old_form()?, self.state.repo.options.clone()) {
+ Ok(repo) => Ok(Some(repo)),
+ Err(crate::open::Error::NotARepository { .. }) => Ok(None),
+ Err(err) => Err(err.into()),
+ }
+ }
+}
+
+/// A summary of the state of all parts forming a submodule, which allows to answer various questions about it.
+///
+/// Note that expensive questions about its presence in the `HEAD` or the `index` are left to the caller.
+#[derive(Default, Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub struct State {
+ /// if the submodule repository has been cloned.
+ pub repository_exists: bool,
+ /// if the submodule repository is located directly in the worktree of the superproject.
+ pub is_old_form: bool,
+ /// if the worktree is checked out.
+ pub worktree_checkout: bool,
+ /// If submodule configuration was found in the superproject's `.git/config` file.
+ /// Note that the presence of a single section is enough, independently of the actual values.
+ pub superproject_configuration: bool,
+}
diff --git a/vendor/gix/src/types.rs b/vendor/gix/src/types.rs
index cdb5f731d..0739cdd25 100644
--- a/vendor/gix/src/types.rs
+++ b/vendor/gix/src/types.rs
@@ -5,6 +5,7 @@ use gix_hash::ObjectId;
use crate::{head, remote};
/// A worktree checkout containing the files of the repository in consumable form.
+#[derive(Debug, Clone)]
pub struct Worktree<'repo> {
pub(crate) parent: &'repo Repository,
/// The root path of the checkout.
@@ -137,7 +138,10 @@ pub struct Repository {
///
/// Particularly useful when following linked worktrees and instantiating new equally configured worktree repositories.
pub(crate) options: crate::open::Options,
+ #[cfg(feature = "index")]
pub(crate) index: crate::worktree::IndexStorage,
+ #[cfg(feature = "attributes")]
+ pub(crate) modules: crate::submodule::ModulesFileStorage,
pub(crate) shallow_commits: crate::shallow::CommitsStorage,
}
@@ -148,6 +152,9 @@ pub struct Repository {
///
/// Note that this type purposefully isn't very useful until it is converted into a thread-local repository with `to_thread_local()`,
/// it's merely meant to be able to exist in a `Sync` context.
+///
+/// Note that it can also cheaply be cloned, and it will retain references to all contained resources.
+#[derive(Clone)]
pub struct ThreadSafeRepository {
/// A store for references to point at objects
pub refs: crate::RefStore,
@@ -161,7 +168,10 @@ pub struct ThreadSafeRepository {
/// options obtained when instantiating this repository for use when following linked worktrees.
pub(crate) linked_worktree_options: crate::open::Options,
/// The index of this instances worktree.
+ #[cfg(feature = "index")]
pub(crate) index: crate::worktree::IndexStorage,
+ #[cfg(feature = "attributes")]
+ pub(crate) modules: crate::submodule::ModulesFileStorage,
pub(crate) shallow_commits: crate::shallow::CommitsStorage,
}
@@ -191,3 +201,46 @@ pub struct Remote<'repo> {
// pub(crate) prune_tags: bool,
pub(crate) repo: &'repo Repository,
}
+
+/// A utility to make matching against pathspecs simple.
+///
+/// Note that to perform pathspec matching, attribute access might need to be provided. For that, we use our own
+/// and argue that the implementation is only going to incur costs for it when a pathspec matches *and* has attributes.
+/// Should this potential duplication of effort to maintain attribute state be unacceptable, the user may fall back
+/// to the underlying plumbing.
+#[derive(Clone)]
+#[cfg(feature = "attributes")]
+pub struct Pathspec<'repo> {
+ pub(crate) repo: &'repo Repository,
+ /// The cache to power attribute access. It's only initialized if we have a pattern with attributes.
+ pub(crate) stack: Option<gix_worktree::Stack>,
+ /// The prepared search to use for checking matches.
+ pub(crate) search: gix_pathspec::Search,
+}
+
+/// Like [`Pathspec`], but without a Repository reference and with minimal API.
+#[derive(Clone)]
+#[cfg(feature = "attributes")]
+pub struct PathspecDetached {
+ /// The cache to power attribute access. It's only initialized if we have a pattern with attributes.
+ pub stack: Option<gix_worktree::Stack>,
+ /// The prepared search to use for checking matches.
+ pub search: gix_pathspec::Search,
+ /// A thread-safe version of an ODB.
+ pub odb: gix_odb::HandleArc,
+}
+
+/// A stand-in for the submodule of a particular name.
+#[derive(Clone)]
+#[cfg(feature = "attributes")]
+pub struct Submodule<'repo> {
+ pub(crate) state: std::rc::Rc<crate::submodule::SharedState<'repo>>,
+ pub(crate) name: crate::bstr::BString,
+}
+
+/// A utility to access `.gitattributes` and `.gitignore` information efficiently.
+#[cfg(any(feature = "attributes", feature = "excludes"))]
+pub struct AttributeStack<'repo> {
+ pub(crate) repo: &'repo Repository,
+ pub(crate) inner: gix_worktree::Stack,
+}
diff --git a/vendor/gix/src/worktree/mod.rs b/vendor/gix/src/worktree/mod.rs
index 8db123554..b0a1cc6f4 100644
--- a/vendor/gix/src/worktree/mod.rs
+++ b/vendor/gix/src/worktree/mod.rs
@@ -1,14 +1,23 @@
use std::path::PathBuf;
+#[cfg(feature = "worktree-archive")]
+pub use gix_archive as archive;
+#[cfg(feature = "excludes")]
pub use gix_worktree::*;
+#[cfg(feature = "worktree-mutation")]
+pub use gix_worktree_state as state;
+#[cfg(feature = "worktree-stream")]
+pub use gix_worktree_stream as stream;
use crate::{
bstr::{BStr, BString},
Repository,
};
+#[cfg(feature = "index")]
pub(crate) type IndexStorage = gix_features::threading::OwnShared<gix_fs::SharedFileSnapshotMut<gix_index::File>>;
/// A lazily loaded and auto-updated worktree index.
+#[cfg(feature = "index")]
pub type Index = gix_fs::SharedFileSnapshot<gix_index::File>;
/// A stand-in to a worktree as result of a worktree iteration.
@@ -54,6 +63,13 @@ impl<'repo> crate::Worktree<'repo> {
pub fn id(&self) -> Option<&BStr> {
id(self.parent.git_dir(), self.parent.common_dir.is_some())
}
+
+ /// Returns true if the `.git` file or directory exists within the worktree.
+ ///
+ /// This is an indicator for the worktree to be checked out particularly if the parent repository is a submodule.
+ pub fn dot_git_exists(&self) -> bool {
+ self.path.join(gix_discover::DOT_GIT_DIR).exists()
+ }
}
pub(crate) fn id(git_dir: &std::path::Path, has_common_dir: bool) -> Option<&BStr> {
@@ -70,6 +86,7 @@ pub(crate) fn id(git_dir: &std::path::Path, has_common_dir: bool) -> Option<&BSt
pub mod proxy;
///
+#[cfg(feature = "index")]
pub mod open_index {
/// The error returned by [`Worktree::open_index()`][crate::Worktree::open_index()].
#[derive(Debug, thiserror::Error)]
@@ -78,7 +95,11 @@ pub mod open_index {
#[error(transparent)]
ConfigIndexThreads(#[from] crate::config::key::GenericErrorWithValue),
#[error(transparent)]
+ ConfigSkipHash(#[from] crate::config::boolean::Error),
+ #[error(transparent)]
IndexFile(#[from] gix_index::file::init::Error),
+ #[error(transparent)]
+ IndexCorrupt(#[from] gix_index::file::verify::Error),
}
impl<'repo> crate::Worktree<'repo> {
@@ -95,7 +116,10 @@ pub mod open_index {
}
///
+#[cfg(feature = "excludes")]
pub mod excludes {
+ use crate::AttributeStack;
+
/// The error returned by [`Worktree::excludes()`][crate::Worktree::excludes()].
#[derive(Debug, thiserror::Error)]
#[allow(missing_docs)]
@@ -116,44 +140,115 @@ pub mod excludes {
///
/// When only excludes are desired, this is the most efficient way to obtain them. Otherwise use
/// [`Worktree::attributes()`][crate::Worktree::attributes()] for accessing both attributes and excludes.
- pub fn excludes(&self, overrides: Option<gix_ignore::Search>) -> Result<gix_worktree::Cache, Error> {
+ pub fn excludes(&self, overrides: Option<gix_ignore::Search>) -> Result<AttributeStack<'_>, Error> {
let index = self.index()?;
Ok(self.parent.excludes(
&index,
overrides,
- gix_worktree::cache::state::ignore::Source::WorktreeThenIdMappingIfNotSkipped,
+ gix_worktree::stack::state::ignore::Source::WorktreeThenIdMappingIfNotSkipped,
)?)
}
}
}
///
+#[cfg(feature = "attributes")]
pub mod attributes {
- /// The error returned by [`Worktree::attributes()`][crate::Worktree::attributes()].
+ use crate::{AttributeStack, Worktree};
+
+ /// The error returned by [`Worktree::attributes()`].
#[derive(Debug, thiserror::Error)]
#[allow(missing_docs)]
pub enum Error {
#[error(transparent)]
OpenIndex(#[from] crate::worktree::open_index::Error),
#[error(transparent)]
- CreateCache(#[from] crate::attributes::Error),
+ CreateCache(#[from] crate::repository::attributes::Error),
}
- impl<'repo> crate::Worktree<'repo> {
+ impl<'repo> Worktree<'repo> {
/// Configure a file-system cache checking if files below the repository are excluded or for querying their attributes.
///
/// This takes into consideration all the usual repository configuration, namely:
///
/// * `$XDG_CONFIG_HOME/…/ignore|attributes` if `core.excludesFile|attributesFile` is *not* set, otherwise use the configured file.
/// * `$GIT_DIR/info/exclude|attributes` if present.
- pub fn attributes(&self, overrides: Option<gix_ignore::Search>) -> Result<gix_worktree::Cache, Error> {
+ pub fn attributes(&self, overrides: Option<gix_ignore::Search>) -> Result<AttributeStack<'repo>, Error> {
let index = self.index()?;
Ok(self.parent.attributes(
&index,
- gix_worktree::cache::state::attributes::Source::WorktreeThenIdMapping,
- gix_worktree::cache::state::ignore::Source::WorktreeThenIdMappingIfNotSkipped,
+ gix_worktree::stack::state::attributes::Source::WorktreeThenIdMapping,
+ gix_worktree::stack::state::ignore::Source::WorktreeThenIdMappingIfNotSkipped,
overrides,
)?)
}
+
+ /// Like [attributes()][Self::attributes()], but without access to exclude/ignore information.
+ pub fn attributes_only(&self) -> Result<AttributeStack<'repo>, Error> {
+ let index = self.index()?;
+ self.parent
+ .attributes_only(
+ &index,
+ gix_worktree::stack::state::attributes::Source::WorktreeThenIdMapping,
+ )
+ .map_err(|err| Error::CreateCache(err.into()))
+ }
+ }
+}
+
+///
+#[cfg(feature = "attributes")]
+pub mod pathspec {
+ use crate::{
+ bstr::BStr,
+ config::{cache::util::ApplyLeniencyDefaultValue, tree::gitoxide},
+ Worktree,
+ };
+
+ /// The error returned by [`Worktree::pathspec()`].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Init(#[from] crate::pathspec::init::Error),
+ #[error(transparent)]
+ OpenIndex(#[from] crate::worktree::open_index::Error),
+ }
+
+ impl<'repo> Worktree<'repo> {
+ /// Configure pathspecs `patterns` to be matched against, with pathspec attributes read from the worktree and then from the index
+ /// if needed.
+ ///
+ /// ### Deviation
+ ///
+ /// Pathspec attributes match case-insensitively by default if the underlying filesystem is configured that way.
+ pub fn pathspec(
+ &self,
+ patterns: impl IntoIterator<Item = impl AsRef<BStr>>,
+ ) -> Result<crate::Pathspec<'repo>, Error> {
+ let index = self.index()?;
+ let inherit_ignore_case = self
+ .parent
+ .config
+ .resolved
+ .boolean_by_key("gitoxide.pathspec.inheritIgnoreCase")
+ .map(|res| {
+ gitoxide::Pathspec::INHERIT_IGNORE_CASE
+ .enrich_error(res)
+ .with_lenient_default_value(
+ self.parent.config.lenient_config,
+ gitoxide::Pathspec::INHERIT_IGNORE_CASE_DEFAULT,
+ )
+ })
+ .transpose()
+ .map_err(|err| Error::Init(crate::pathspec::init::Error::Defaults(err.into())))?
+ .unwrap_or(gitoxide::Pathspec::INHERIT_IGNORE_CASE_DEFAULT);
+ Ok(self.parent.pathspec(
+ patterns,
+ inherit_ignore_case,
+ &index,
+ gix_worktree::stack::state::attributes::Source::WorktreeThenIdMapping,
+ )?)
+ }
}
}