summaryrefslogtreecommitdiffstats
path: root/vendor/gix/src/remote
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/gix/src/remote')
-rw-r--r--vendor/gix/src/remote/access.rs105
-rw-r--r--vendor/gix/src/remote/build.rs84
-rw-r--r--vendor/gix/src/remote/connect.rs166
-rw-r--r--vendor/gix/src/remote/connection/access.rs67
-rw-r--r--vendor/gix/src/remote/connection/fetch/config.rs26
-rw-r--r--vendor/gix/src/remote/connection/fetch/error.rs41
-rw-r--r--vendor/gix/src/remote/connection/fetch/mod.rs240
-rw-r--r--vendor/gix/src/remote/connection/fetch/negotiate.rs78
-rw-r--r--vendor/gix/src/remote/connection/fetch/receive_pack.rs238
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/mod.rs274
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/tests.rs607
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/update.rs128
-rw-r--r--vendor/gix/src/remote/connection/mod.rs29
-rw-r--r--vendor/gix/src/remote/connection/ref_map.rs268
-rw-r--r--vendor/gix/src/remote/errors.rs45
-rw-r--r--vendor/gix/src/remote/fetch.rs166
-rw-r--r--vendor/gix/src/remote/init.rs116
-rw-r--r--vendor/gix/src/remote/mod.rs62
-rw-r--r--vendor/gix/src/remote/name.rs84
-rw-r--r--vendor/gix/src/remote/save.rs125
-rw-r--r--vendor/gix/src/remote/url/mod.rs7
-rw-r--r--vendor/gix/src/remote/url/rewrite.rs100
-rw-r--r--vendor/gix/src/remote/url/scheme_permission.rs120
23 files changed, 3176 insertions, 0 deletions
diff --git a/vendor/gix/src/remote/access.rs b/vendor/gix/src/remote/access.rs
new file mode 100644
index 000000000..1a1cee5de
--- /dev/null
+++ b/vendor/gix/src/remote/access.rs
@@ -0,0 +1,105 @@
+use gix_refspec::RefSpec;
+
+use crate::{bstr::BStr, remote, Remote};
+
+/// Access
+impl<'repo> Remote<'repo> {
+ /// Return the name of this remote or `None` if it wasn't persisted to disk yet.
+ pub fn name(&self) -> Option<&remote::Name<'static>> {
+ self.name.as_ref()
+ }
+
+ /// Return our repository reference.
+ pub fn repo(&self) -> &'repo crate::Repository {
+ self.repo
+ }
+
+ /// Return the set of ref-specs used for `direction`, which may be empty, in order of occurrence in the configuration.
+ pub fn refspecs(&self, direction: remote::Direction) -> &[RefSpec] {
+ match direction {
+ remote::Direction::Fetch => &self.fetch_specs,
+ remote::Direction::Push => &self.push_specs,
+ }
+ }
+
+ /// Return how we handle tags when fetching the remote.
+ pub fn fetch_tags(&self) -> remote::fetch::Tags {
+ self.fetch_tags
+ }
+
+ /// Return the url used for the given `direction` with rewrites from `url.<base>.insteadOf|pushInsteadOf`, unless the instance
+ /// was created with one of the `_without_url_rewrite()` methods.
+ /// For pushing, this is the `remote.<name>.pushUrl` or the `remote.<name>.url` used for fetching, and for fetching it's
+ /// the `remote.<name>.url`.
+ /// Note that it's possible to only have the push url set, in which case there will be no way to fetch from the remote as
+ /// the push-url isn't used for that.
+ pub fn url(&self, direction: remote::Direction) -> Option<&gix_url::Url> {
+ match direction {
+ remote::Direction::Fetch => self.url_alias.as_ref().or(self.url.as_ref()),
+ remote::Direction::Push => self
+ .push_url_alias
+ .as_ref()
+ .or(self.push_url.as_ref())
+ .or_else(|| self.url(remote::Direction::Fetch)),
+ }
+ }
+}
+
+/// Modification
+impl Remote<'_> {
+ /// Read `url.<base>.insteadOf|pushInsteadOf` configuration variables and apply them to our urls, changing them in place.
+ ///
+ /// This happens only once, and one if them may be changed even when reporting an error.
+ /// If both urls fail, only the first error (for fetch urls) is reported.
+ pub fn rewrite_urls(&mut self) -> Result<&mut Self, remote::init::Error> {
+ let url_err = match remote::init::rewrite_url(&self.repo.config, self.url.as_ref(), remote::Direction::Fetch) {
+ Ok(url) => {
+ self.url_alias = url;
+ None
+ }
+ Err(err) => err.into(),
+ };
+ let push_url_err =
+ match remote::init::rewrite_url(&self.repo.config, self.push_url.as_ref(), remote::Direction::Push) {
+ Ok(url) => {
+ self.push_url_alias = url;
+ None
+ }
+ Err(err) => err.into(),
+ };
+ url_err.or(push_url_err).map(Err::<&mut Self, _>).transpose()?;
+ Ok(self)
+ }
+
+ /// Replace all currently set refspecs, typically from configuration, with the given `specs` for `direction`,
+ /// or `None` if one of the input specs could not be parsed.
+ pub fn replace_refspecs<Spec>(
+ &mut self,
+ specs: impl IntoIterator<Item = Spec>,
+ direction: remote::Direction,
+ ) -> Result<(), gix_refspec::parse::Error>
+ where
+ Spec: AsRef<BStr>,
+ {
+ use remote::Direction::*;
+ let specs: Vec<_> = specs
+ .into_iter()
+ .map(|spec| {
+ gix_refspec::parse(
+ spec.as_ref(),
+ match direction {
+ Push => gix_refspec::parse::Operation::Push,
+ Fetch => gix_refspec::parse::Operation::Fetch,
+ },
+ )
+ .map(|url| url.to_owned())
+ })
+ .collect::<Result<_, _>>()?;
+ let dst = match direction {
+ Push => &mut self.push_specs,
+ Fetch => &mut self.fetch_specs,
+ };
+ *dst = specs;
+ Ok(())
+ }
+}
diff --git a/vendor/gix/src/remote/build.rs b/vendor/gix/src/remote/build.rs
new file mode 100644
index 000000000..10c216537
--- /dev/null
+++ b/vendor/gix/src/remote/build.rs
@@ -0,0 +1,84 @@
+use std::convert::TryInto;
+
+use crate::{bstr::BStr, remote, Remote};
+
+/// Builder methods
+impl Remote<'_> {
+ /// Set the `url` to be used when pushing data to a remote.
+ pub fn push_url<Url, E>(self, url: Url) -> Result<Self, remote::init::Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ self.push_url_inner(url, true)
+ }
+
+ /// Set the `url` to be used when pushing data to a remote, without applying rewrite rules in case these could be faulty,
+ /// eliminating one failure mode.
+ pub fn push_url_without_url_rewrite<Url, E>(self, url: Url) -> Result<Self, remote::init::Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ self.push_url_inner(url, false)
+ }
+
+ /// Configure how tags should be handled when fetching from the remote.
+ pub fn with_fetch_tags(mut self, tags: remote::fetch::Tags) -> Self {
+ self.fetch_tags = tags;
+ self
+ }
+
+ fn push_url_inner<Url, E>(mut self, push_url: Url, should_rewrite_urls: bool) -> Result<Self, remote::init::Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ let push_url = push_url
+ .try_into()
+ .map_err(|err| remote::init::Error::Url(err.into()))?;
+ self.push_url = push_url.into();
+
+ let (_, push_url_alias) = should_rewrite_urls
+ .then(|| remote::init::rewrite_urls(&self.repo.config, None, self.push_url.as_ref()))
+ .unwrap_or(Ok((None, None)))?;
+ self.push_url_alias = push_url_alias;
+
+ Ok(self)
+ }
+
+ /// Add `specs` as refspecs for `direction` to our list if they are unique, or ignore them otherwise.
+ pub fn with_refspecs<Spec>(
+ mut self,
+ specs: impl IntoIterator<Item = Spec>,
+ direction: remote::Direction,
+ ) -> Result<Self, gix_refspec::parse::Error>
+ where
+ Spec: AsRef<BStr>,
+ {
+ use remote::Direction::*;
+ let new_specs = specs
+ .into_iter()
+ .map(|spec| {
+ gix_refspec::parse(
+ spec.as_ref(),
+ match direction {
+ Push => gix_refspec::parse::Operation::Push,
+ Fetch => gix_refspec::parse::Operation::Fetch,
+ },
+ )
+ .map(|s| s.to_owned())
+ })
+ .collect::<Result<Vec<_>, _>>()?;
+ let specs = match direction {
+ Push => &mut self.push_specs,
+ Fetch => &mut self.fetch_specs,
+ };
+ for spec in new_specs {
+ if !specs.contains(&spec) {
+ specs.push(spec);
+ }
+ }
+ Ok(self)
+ }
+}
diff --git a/vendor/gix/src/remote/connect.rs b/vendor/gix/src/remote/connect.rs
new file mode 100644
index 000000000..8e656975e
--- /dev/null
+++ b/vendor/gix/src/remote/connect.rs
@@ -0,0 +1,166 @@
+#![allow(clippy::result_large_err)]
+use gix_protocol::transport::client::Transport;
+
+use crate::{remote::Connection, Progress, Remote};
+
+mod error {
+ use crate::{bstr::BString, config, remote};
+
+ /// The error returned by [connect()][crate::Remote::connect()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not obtain options for connecting via ssh")]
+ SshOptions(#[from] config::ssh_connect_options::Error),
+ #[error("Could not obtain the current directory")]
+ CurrentDir(#[from] std::io::Error),
+ #[error("Could not access remote repository at \"{}\"", directory.display())]
+ InvalidRemoteRepositoryPath { directory: std::path::PathBuf },
+ #[error(transparent)]
+ SchemePermission(#[from] config::protocol::allow::Error),
+ #[error("Protocol {scheme:?} of url {url:?} is denied per configuration")]
+ ProtocolDenied { url: BString, scheme: gix_url::Scheme },
+ #[error(transparent)]
+ Connect(#[from] gix_protocol::transport::client::connect::Error),
+ #[error("The {} url was missing - don't know where to establish a connection to", direction.as_str())]
+ MissingUrl { direction: remote::Direction },
+ #[error("Protocol named {given:?} is not a valid protocol. Choose between 1 and 2")]
+ UnknownProtocol { given: BString },
+ #[error("Could not verify that \"{}\" url is a valid git directory before attempting to use it", url.to_bstring())]
+ FileUrl {
+ source: Box<gix_discover::is_git::Error>,
+ url: gix_url::Url,
+ },
+ }
+
+ impl gix_protocol::transport::IsSpuriousError for Error {
+ /// Return `true` if retrying might result in a different outcome due to IO working out differently.
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::Connect(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+ }
+}
+pub use error::Error;
+
+/// Establishing connections to remote hosts (without performing a git-handshake).
+impl<'repo> Remote<'repo> {
+ /// Create a new connection using `transport` to communicate, with `progress` to indicate changes.
+ ///
+ /// Note that this method expects the `transport` to be created by the user, which would involve the [`url()`][Self::url()].
+ /// It's meant to be used when async operation is needed with runtimes of the user's choice.
+ pub fn to_connection_with_transport<T, P>(&self, transport: T, progress: P) -> Connection<'_, 'repo, T, P>
+ where
+ T: Transport,
+ P: Progress,
+ {
+ Connection {
+ remote: self,
+ authenticate: None,
+ transport_options: None,
+ transport,
+ progress,
+ }
+ }
+
+ /// Connect to the url suitable for `direction` and return a handle through which operations can be performed.
+ ///
+ /// Note that the `protocol.version` configuration key affects the transport protocol used to connect,
+ /// with `2` being the default.
+ ///
+ /// The transport used for connection can be configured via `transport_mut().configure()` assuming the actually
+ /// used transport is well known. If that's not the case, the transport can be created by hand and passed to
+ /// [to_connection_with_transport()][Self::to_connection_with_transport()].
+ #[cfg(any(feature = "blocking-network-client", feature = "async-network-client-async-std"))]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn connect<P>(
+ &self,
+ direction: crate::remote::Direction,
+ progress: P,
+ ) -> Result<Connection<'_, 'repo, Box<dyn Transport + Send>, P>, Error>
+ where
+ P: Progress,
+ {
+ let (url, version) = self.sanitized_url_and_version(direction)?;
+ #[cfg(feature = "blocking-network-client")]
+ let scheme_is_ssh = url.scheme == gix_url::Scheme::Ssh;
+ let transport = gix_protocol::transport::connect(
+ url,
+ gix_protocol::transport::client::connect::Options {
+ version,
+ #[cfg(feature = "blocking-network-client")]
+ ssh: scheme_is_ssh
+ .then(|| self.repo.ssh_connect_options())
+ .transpose()?
+ .unwrap_or_default(),
+ },
+ )
+ .await?;
+ Ok(self.to_connection_with_transport(transport, progress))
+ }
+
+ /// Produce the sanitized URL and protocol version to use as obtained by querying the repository configuration.
+ ///
+ /// This can be useful when using custom transports to allow additional configuration.
+ pub fn sanitized_url_and_version(
+ &self,
+ direction: crate::remote::Direction,
+ ) -> Result<(gix_url::Url, gix_protocol::transport::Protocol), Error> {
+ fn sanitize(mut url: gix_url::Url) -> Result<gix_url::Url, Error> {
+ if url.scheme == gix_url::Scheme::File {
+ let mut dir = gix_path::to_native_path_on_windows(url.path.as_ref());
+ let kind = gix_discover::is_git(dir.as_ref())
+ .or_else(|_| {
+ dir.to_mut().push(gix_discover::DOT_GIT_DIR);
+ gix_discover::is_git(dir.as_ref())
+ })
+ .map_err(|err| Error::FileUrl {
+ source: err.into(),
+ url: url.clone(),
+ })?;
+ let (git_dir, _work_dir) = gix_discover::repository::Path::from_dot_git_dir(
+ dir.clone().into_owned(),
+ kind,
+ std::env::current_dir()?,
+ )
+ .ok_or_else(|| Error::InvalidRemoteRepositoryPath {
+ directory: dir.into_owned(),
+ })?
+ .into_repository_and_work_tree_directories();
+ url.path = gix_path::into_bstr(git_dir).into_owned();
+ }
+ Ok(url)
+ }
+
+ use gix_protocol::transport::Protocol;
+ let version = self
+ .repo
+ .config
+ .resolved
+ .integer("protocol", None, "version")
+ .unwrap_or(Ok(2))
+ .map_err(|err| Error::UnknownProtocol { given: err.input })
+ .and_then(|num| {
+ Ok(match num {
+ 1 => Protocol::V1,
+ 2 => Protocol::V2,
+ num => {
+ return Err(Error::UnknownProtocol {
+ given: num.to_string().into(),
+ })
+ }
+ })
+ })?;
+
+ let url = self.url(direction).ok_or(Error::MissingUrl { direction })?.to_owned();
+ if !self.repo.config.url_scheme()?.allow(&url.scheme) {
+ return Err(Error::ProtocolDenied {
+ url: url.to_bstring(),
+ scheme: url.scheme,
+ });
+ }
+ Ok((sanitize(url)?, version))
+ }
+}
diff --git a/vendor/gix/src/remote/connection/access.rs b/vendor/gix/src/remote/connection/access.rs
new file mode 100644
index 000000000..e4c31c3f5
--- /dev/null
+++ b/vendor/gix/src/remote/connection/access.rs
@@ -0,0 +1,67 @@
+use crate::{
+ remote::{connection::AuthenticateFn, Connection},
+ Remote,
+};
+
+/// Builder
+impl<'a, 'repo, T, P> Connection<'a, 'repo, T, P> {
+ /// Set a custom credentials callback to provide credentials if the remotes require authentication.
+ ///
+ /// Otherwise we will use the git configuration to perform the same task as the `git credential` helper program,
+ /// which is calling other helper programs in succession while resorting to a prompt to obtain credentials from the
+ /// user.
+ ///
+ /// A custom function may also be used to prevent accessing resources with authentication.
+ ///
+ /// Use the [configured_credentials()][Connection::configured_credentials()] method to obtain the implementation
+ /// that would otherwise be used, which can be useful to proxy the default configuration and obtain information about the
+ /// URLs to authenticate with.
+ pub fn with_credentials(
+ mut self,
+ helper: impl FnMut(gix_credentials::helper::Action) -> gix_credentials::protocol::Result + 'a,
+ ) -> Self {
+ self.authenticate = Some(Box::new(helper));
+ self
+ }
+
+ /// Provide configuration to be used before the first handshake is conducted.
+ /// It's typically created by initializing it with [`Repository::transport_options()`][crate::Repository::transport_options()], which
+ /// is also the default if this isn't set explicitly. Note that all of the default configuration is created from `git`
+ /// configuration, which can also be manipulated through overrides to affect the default configuration.
+ ///
+ /// Use this method to provide transport configuration with custom backend configuration that is not configurable by other means and
+ /// custom to the application at hand.
+ pub fn with_transport_options(mut self, config: Box<dyn std::any::Any>) -> Self {
+ self.transport_options = Some(config);
+ self
+ }
+}
+
+/// Access
+impl<'a, 'repo, T, P> Connection<'a, 'repo, T, P> {
+ /// A utility to return a function that will use this repository's configuration to obtain credentials, similar to
+ /// what `git credential` is doing.
+ ///
+ /// It's meant to be used by users of the [`with_credentials()`][Self::with_credentials()] builder to gain access to the
+ /// default way of handling credentials, which they can call as fallback.
+ pub fn configured_credentials(
+ &self,
+ url: gix_url::Url,
+ ) -> Result<AuthenticateFn<'static>, crate::config::credential_helpers::Error> {
+ let (mut cascade, _action_with_normalized_url, prompt_opts) =
+ self.remote.repo.config_snapshot().credential_helpers(url)?;
+ Ok(Box::new(move |action| cascade.invoke(action, prompt_opts.clone())) as AuthenticateFn<'_>)
+ }
+ /// Return the underlying remote that instantiate this connection.
+ pub fn remote(&self) -> &Remote<'repo> {
+ self.remote
+ }
+
+ /// Provide a mutable transport to allow interacting with it according to its actual type.
+ /// Note that the caller _should not_ call [`configure()`][gix_protocol::transport::client::TransportWithoutIO::configure()]
+ /// as we will call it automatically before performing the handshake. Instead, to bring in custom configuration,
+ /// call [`with_transport_options()`][Connection::with_transport_options()].
+ pub fn transport_mut(&mut self) -> &mut T {
+ &mut self.transport
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/config.rs b/vendor/gix/src/remote/connection/fetch/config.rs
new file mode 100644
index 000000000..4782991bc
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/config.rs
@@ -0,0 +1,26 @@
+use super::Error;
+use crate::{
+ config::{cache::util::ApplyLeniency, tree::Pack},
+ Repository,
+};
+
+pub fn index_threads(repo: &Repository) -> Result<Option<usize>, Error> {
+ Ok(repo
+ .config
+ .resolved
+ .integer_filter("pack", None, Pack::THREADS.name, &mut repo.filter_config_section())
+ .map(|threads| Pack::THREADS.try_into_usize(threads))
+ .transpose()
+ .with_leniency(repo.options.lenient_config)?)
+}
+
+pub fn pack_index_version(repo: &Repository) -> Result<gix_pack::index::Version, Error> {
+ Ok(repo
+ .config
+ .resolved
+ .integer("pack", None, Pack::INDEX_VERSION.name)
+ .map(|value| Pack::INDEX_VERSION.try_into_index_version(value))
+ .transpose()
+ .with_leniency(repo.options.lenient_config)?
+ .unwrap_or(gix_pack::index::Version::V2))
+}
diff --git a/vendor/gix/src/remote/connection/fetch/error.rs b/vendor/gix/src/remote/connection/fetch/error.rs
new file mode 100644
index 000000000..0e6a4b840
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/error.rs
@@ -0,0 +1,41 @@
+use crate::config;
+
+/// The error returned by [`receive()`](super::Prepare::receive()).
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("The value to configure pack threads should be 0 to auto-configure or the amount of threads to use")]
+ PackThreads(#[from] config::unsigned_integer::Error),
+ #[error("The value to configure the pack index version should be 1 or 2")]
+ PackIndexVersion(#[from] config::key::GenericError),
+ #[error("Could not decode server reply")]
+ FetchResponse(#[from] gix_protocol::fetch::response::Error),
+ #[error("Cannot fetch from a remote that uses {remote} while local repository uses {local} for object hashes")]
+ IncompatibleObjectHash {
+ local: gix_hash::Kind,
+ remote: gix_hash::Kind,
+ },
+ #[error(transparent)]
+ Negotiate(#[from] super::negotiate::Error),
+ #[error(transparent)]
+ Client(#[from] gix_protocol::transport::client::Error),
+ #[error(transparent)]
+ WritePack(#[from] gix_pack::bundle::write::Error),
+ #[error(transparent)]
+ UpdateRefs(#[from] super::refs::update::Error),
+ #[error("Failed to remove .keep file at \"{}\"", path.display())]
+ RemovePackKeepFile {
+ path: std::path::PathBuf,
+ source: std::io::Error,
+ },
+}
+
+impl gix_protocol::transport::IsSpuriousError for Error {
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::FetchResponse(err) => err.is_spurious(),
+ Error::Client(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/mod.rs b/vendor/gix/src/remote/connection/fetch/mod.rs
new file mode 100644
index 000000000..4ce631b1e
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/mod.rs
@@ -0,0 +1,240 @@
+use gix_protocol::transport::client::Transport;
+
+use crate::{
+ bstr::BString,
+ remote,
+ remote::{
+ fetch::{DryRun, RefMap},
+ ref_map, Connection,
+ },
+ Progress,
+};
+
+mod error;
+pub use error::Error;
+
+use crate::remote::fetch::WritePackedRefs;
+
+/// The way reflog messages should be composed whenever a ref is written with recent objects from a remote.
+pub enum RefLogMessage {
+ /// Prefix the log with `action` and generate the typical suffix as `git` would.
+ Prefixed {
+ /// The action to use, like `fetch` or `pull`.
+ action: String,
+ },
+ /// Control the entire message, using `message` verbatim.
+ Override {
+ /// The complete reflog message.
+ message: BString,
+ },
+}
+
+impl RefLogMessage {
+ pub(crate) fn compose(&self, context: &str) -> BString {
+ match self {
+ RefLogMessage::Prefixed { action } => format!("{action}: {context}").into(),
+ RefLogMessage::Override { message } => message.to_owned(),
+ }
+ }
+}
+
+/// The status of the repository after the fetch operation
+#[derive(Debug, Clone)]
+pub enum Status {
+ /// Nothing changed as the remote didn't have anything new compared to our tracking branches, thus no pack was received
+ /// and no new object was added.
+ NoPackReceived {
+ /// However, depending on the refspecs, references might have been updated nonetheless to point to objects as
+ /// reported by the remote.
+ update_refs: refs::update::Outcome,
+ },
+ /// There was at least one tip with a new object which we received.
+ Change {
+ /// Information collected while writing the pack and its index.
+ write_pack_bundle: gix_pack::bundle::write::Outcome,
+ /// Information collected while updating references.
+ update_refs: refs::update::Outcome,
+ },
+ /// A dry run was performed which leaves the local repository without any change
+ /// nor will a pack have been received.
+ DryRun {
+ /// Information about what updates to refs would have been done.
+ update_refs: refs::update::Outcome,
+ },
+}
+
+/// The outcome of receiving a pack via [`Prepare::receive()`].
+#[derive(Debug, Clone)]
+pub struct Outcome {
+ /// The result of the initial mapping of references, the prerequisite for any fetch.
+ pub ref_map: RefMap,
+ /// The status of the operation to indicate what happened.
+ pub status: Status,
+}
+
+/// The progress ids used in during various steps of the fetch operation.
+///
+/// Note that tagged progress isn't very widely available yet, but support can be improved as needed.
+///
+/// Use this information to selectively extract the progress of interest in case the parent application has custom visualization.
+#[derive(Debug, Copy, Clone)]
+pub enum ProgressId {
+ /// The progress name is defined by the remote and the progress messages it sets, along with their progress values and limits.
+ RemoteProgress,
+}
+
+impl From<ProgressId> for gix_features::progress::Id {
+ fn from(v: ProgressId) -> Self {
+ match v {
+ ProgressId::RemoteProgress => *b"FERP",
+ }
+ }
+}
+
+///
+pub mod negotiate;
+
+///
+pub mod prepare {
+ /// The error returned by [`prepare_fetch()`][super::Connection::prepare_fetch()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Cannot perform a meaningful fetch operation without any configured ref-specs")]
+ MissingRefSpecs,
+ #[error(transparent)]
+ RefMap(#[from] crate::remote::ref_map::Error),
+ }
+
+ impl gix_protocol::transport::IsSpuriousError for Error {
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::RefMap(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+ }
+}
+
+impl<'remote, 'repo, T, P> Connection<'remote, 'repo, T, P>
+where
+ T: Transport,
+ P: Progress,
+{
+ /// Perform a handshake with the remote and obtain a ref-map with `options`, and from there one
+ /// Note that at this point, the `transport` should already be configured using the [`transport_mut()`][Self::transport_mut()]
+ /// method, as it will be consumed here.
+ ///
+ /// From there additional properties of the fetch can be adjusted to override the defaults that are configured via gix-config.
+ ///
+ /// # Async Experimental
+ ///
+ /// Note that this implementation is currently limited correctly in blocking mode only as it relies on Drop semantics to close the connection
+ /// should the fetch not be performed. Furthermore, there the code doing the fetch is inherently blocking and it's not offloaded to a thread,
+ /// making this call block the executor.
+ /// It's best to unblock it by placing it into its own thread or offload it should usage in an async context be truly required.
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn prepare_fetch(
+ mut self,
+ options: ref_map::Options,
+ ) -> Result<Prepare<'remote, 'repo, T, P>, prepare::Error> {
+ if self.remote.refspecs(remote::Direction::Fetch).is_empty() {
+ return Err(prepare::Error::MissingRefSpecs);
+ }
+ let ref_map = self.ref_map_inner(options).await?;
+ Ok(Prepare {
+ con: Some(self),
+ ref_map,
+ dry_run: DryRun::No,
+ reflog_message: None,
+ write_packed_refs: WritePackedRefs::Never,
+ })
+ }
+}
+
+impl<'remote, 'repo, T, P> Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ /// Return the ref_map (that includes the server handshake) which was part of listing refs prior to fetching a pack.
+ pub fn ref_map(&self) -> &RefMap {
+ &self.ref_map
+ }
+}
+
+mod config;
+mod receive_pack;
+///
+#[path = "update_refs/mod.rs"]
+pub mod refs;
+
+/// A structure to hold the result of the handshake with the remote and configure the upcoming fetch operation.
+pub struct Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ con: Option<Connection<'remote, 'repo, T, P>>,
+ ref_map: RefMap,
+ dry_run: DryRun,
+ reflog_message: Option<RefLogMessage>,
+ write_packed_refs: WritePackedRefs,
+}
+
+/// Builder
+impl<'remote, 'repo, T, P> Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ /// If dry run is enabled, no change to the repository will be made.
+ ///
+ /// This works by not actually fetching the pack after negotiating it, nor will refs be updated.
+ pub fn with_dry_run(mut self, enabled: bool) -> Self {
+ self.dry_run = if enabled { DryRun::Yes } else { DryRun::No };
+ self
+ }
+
+ /// If enabled, don't write ref updates to loose refs, but put them exclusively to packed-refs.
+ ///
+ /// This improves performance and allows case-sensitive filesystems to deal with ref names that would otherwise
+ /// collide.
+ pub fn with_write_packed_refs_only(mut self, enabled: bool) -> Self {
+ self.write_packed_refs = if enabled {
+ WritePackedRefs::Only
+ } else {
+ WritePackedRefs::Never
+ };
+ self
+ }
+
+ /// Set the reflog message to use when updating refs after fetching a pack.
+ pub fn with_reflog_message(mut self, reflog_message: RefLogMessage) -> Self {
+ self.reflog_message = reflog_message.into();
+ self
+ }
+}
+
+impl<'remote, 'repo, T, P> Drop for Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ fn drop(&mut self) {
+ if let Some(mut con) = self.con.take() {
+ #[cfg(feature = "async-network-client")]
+ {
+ // TODO: this should be an async drop once the feature is available.
+ // Right now we block the executor by forcing this communication, but that only
+ // happens if the user didn't actually try to receive a pack, which consumes the
+ // connection in an async context.
+ gix_protocol::futures_lite::future::block_on(gix_protocol::indicate_end_of_interaction(
+ &mut con.transport,
+ ))
+ .ok();
+ }
+ #[cfg(not(feature = "async-network-client"))]
+ {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).ok();
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/negotiate.rs b/vendor/gix/src/remote/connection/fetch/negotiate.rs
new file mode 100644
index 000000000..f5051ec72
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/negotiate.rs
@@ -0,0 +1,78 @@
+/// The way the negotiation is performed
+#[derive(Copy, Clone)]
+pub(crate) enum Algorithm {
+ /// Our very own implementation that probably should be replaced by one of the known algorithms soon.
+ Naive,
+}
+
+/// The error returned during negotiation.
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("We were unable to figure out what objects the server should send after {rounds} round(s)")]
+ NegotiationFailed { rounds: usize },
+}
+
+/// Negotiate one round with `algo` by looking at `ref_map` and adjust `arguments` to contain the haves and wants.
+/// If this is not the first round, the `previous_response` is set with the last recorded server response.
+/// Returns `true` if the negotiation is done from our side so the server won't keep asking.
+pub(crate) fn one_round(
+ algo: Algorithm,
+ round: usize,
+ repo: &crate::Repository,
+ ref_map: &crate::remote::fetch::RefMap,
+ fetch_tags: crate::remote::fetch::Tags,
+ arguments: &mut gix_protocol::fetch::Arguments,
+ _previous_response: Option<&gix_protocol::fetch::Response>,
+) -> Result<bool, Error> {
+ let tag_refspec_to_ignore = fetch_tags
+ .to_refspec()
+ .filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
+ match algo {
+ Algorithm::Naive => {
+ assert_eq!(round, 1, "Naive always finishes after the first round, and claims.");
+ let mut has_missing_tracking_branch = false;
+ for mapping in &ref_map.mappings {
+ if tag_refspec_to_ignore.map_or(false, |tag_spec| {
+ mapping
+ .spec_index
+ .implicit_index()
+ .and_then(|idx| ref_map.extra_refspecs.get(idx))
+ .map_or(false, |spec| spec.to_ref() == tag_spec)
+ }) {
+ continue;
+ }
+ let have_id = mapping.local.as_ref().and_then(|name| {
+ repo.find_reference(name)
+ .ok()
+ .and_then(|r| r.target().try_id().map(ToOwned::to_owned))
+ });
+ match have_id {
+ Some(have_id) => {
+ if let Some(want_id) = mapping.remote.as_id() {
+ if want_id != have_id {
+ arguments.want(want_id);
+ arguments.have(have_id);
+ }
+ }
+ }
+ None => {
+ if let Some(want_id) = mapping.remote.as_id() {
+ arguments.want(want_id);
+ has_missing_tracking_branch = true;
+ }
+ }
+ }
+ }
+
+ if has_missing_tracking_branch {
+ if let Ok(Some(r)) = repo.head_ref() {
+ if let Some(id) = r.target().try_id() {
+ arguments.have(id);
+ }
+ }
+ }
+ Ok(true)
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/receive_pack.rs b/vendor/gix/src/remote/connection/fetch/receive_pack.rs
new file mode 100644
index 000000000..686de5999
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/receive_pack.rs
@@ -0,0 +1,238 @@
+use std::sync::atomic::AtomicBool;
+
+use gix_odb::FindExt;
+use gix_protocol::transport::client::Transport;
+
+use crate::{
+ remote,
+ remote::{
+ connection::fetch::config,
+ fetch,
+ fetch::{negotiate, refs, Error, Outcome, Prepare, ProgressId, RefLogMessage, Status},
+ },
+ Progress,
+};
+
+impl<'remote, 'repo, T, P> Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+ P: Progress,
+ P::SubProgress: 'static,
+{
+ /// Receive the pack and perform the operation as configured by git via `gix-config` or overridden by various builder methods.
+ /// Return `Ok(None)` if there was nothing to do because all remote refs are at the same state as they are locally, or `Ok(Some(outcome))`
+ /// to inform about all the changes that were made.
+ ///
+ /// ### Negotiation
+ ///
+ /// "fetch.negotiationAlgorithm" describes algorithms `git` uses currently, with the default being `consecutive` and `skipping` being
+ /// experimented with. We currently implement something we could call 'naive' which works for now.
+ ///
+ /// ### Pack `.keep` files
+ ///
+ /// That packs that are freshly written to the object database are vulnerable to garbage collection for the brief time that it takes between
+ /// them being placed and the respective references to be written to disk which binds their objects to the commit graph, making them reachable.
+ ///
+ /// To circumvent this issue, a `.keep` file is created before any pack related file (i.e. `.pack` or `.idx`) is written, which indicates the
+ /// garbage collector (like `git maintenance`, `git gc`) to leave the corresponding pack file alone.
+ ///
+ /// If there were any ref updates or the received pack was empty, the `.keep` file will be deleted automatically leaving in its place at
+ /// `write_pack_bundle.keep_path` a `None`.
+ /// However, if no ref-update happened the path will still be present in `write_pack_bundle.keep_path` and is expected to be handled by the caller.
+ /// A known application for this behaviour is in `remote-helper` implementations which should send this path via `lock <path>` to stdout
+ /// to inform git about the file that it will remove once it updated the refs accordingly.
+ ///
+ /// ### Deviation
+ ///
+ /// When **updating refs**, the `git-fetch` docs state that the following:
+ ///
+ /// > Unlike when pushing with git-push, any updates outside of refs/{tags,heads}/* will be accepted without + in the refspec (or --force), whether that’s swapping e.g. a tree object for a blob, or a commit for another commit that’s doesn’t have the previous commit as an ancestor etc.
+ ///
+ /// We explicitly don't special case those refs and expect the user to take control. Note that by its nature,
+ /// force only applies to refs pointing to commits and if they don't, they will be updated either way in our
+ /// implementation as well.
+ ///
+ /// ### Async Mode Shortcoming
+ ///
+ /// Currently the entire process of resolving a pack is blocking the executor. This can be fixed using the `blocking` crate, but it
+ /// didn't seem worth the tradeoff of having more complex code.
+ ///
+ /// ### Configuration
+ ///
+ /// - `gitoxide.userAgent` is read to obtain the application user agent for git servers and for HTTP servers as well.
+ ///
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn receive(mut self, should_interrupt: &AtomicBool) -> Result<Outcome, Error> {
+ let mut con = self.con.take().expect("receive() can only be called once");
+
+ let handshake = &self.ref_map.handshake;
+ let protocol_version = handshake.server_protocol_version;
+
+ let fetch = gix_protocol::Command::Fetch;
+ let progress = &mut con.progress;
+ let repo = con.remote.repo;
+ let fetch_features = {
+ let mut f = fetch.default_features(protocol_version, &handshake.capabilities);
+ f.push(repo.config.user_agent_tuple());
+ f
+ };
+
+ gix_protocol::fetch::Response::check_required_features(protocol_version, &fetch_features)?;
+ let sideband_all = fetch_features.iter().any(|(n, _)| *n == "sideband-all");
+ let mut arguments = gix_protocol::fetch::Arguments::new(protocol_version, fetch_features);
+ if matches!(con.remote.fetch_tags, crate::remote::fetch::Tags::Included) {
+ if !arguments.can_use_include_tag() {
+ unimplemented!("we expect servers to support 'include-tag', otherwise we have to implement another pass to fetch attached tags separately");
+ }
+ arguments.use_include_tag();
+ }
+ let mut previous_response = None::<gix_protocol::fetch::Response>;
+ let mut round = 1;
+
+ if self.ref_map.object_hash != repo.object_hash() {
+ return Err(Error::IncompatibleObjectHash {
+ local: repo.object_hash(),
+ remote: self.ref_map.object_hash,
+ });
+ }
+
+ let reader = 'negotiation: loop {
+ progress.step();
+ progress.set_name(format!("negotiate (round {round})"));
+
+ let is_done = match negotiate::one_round(
+ negotiate::Algorithm::Naive,
+ round,
+ repo,
+ &self.ref_map,
+ con.remote.fetch_tags,
+ &mut arguments,
+ previous_response.as_ref(),
+ ) {
+ Ok(_) if arguments.is_empty() => {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
+ let update_refs = refs::update(
+ repo,
+ self.reflog_message
+ .take()
+ .unwrap_or_else(|| RefLogMessage::Prefixed { action: "fetch".into() }),
+ &self.ref_map.mappings,
+ con.remote.refspecs(remote::Direction::Fetch),
+ &self.ref_map.extra_refspecs,
+ con.remote.fetch_tags,
+ self.dry_run,
+ self.write_packed_refs,
+ )?;
+ return Ok(Outcome {
+ ref_map: std::mem::take(&mut self.ref_map),
+ status: Status::NoPackReceived { update_refs },
+ });
+ }
+ Ok(is_done) => is_done,
+ Err(err) => {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
+ return Err(err.into());
+ }
+ };
+ round += 1;
+ let mut reader = arguments.send(&mut con.transport, is_done).await?;
+ if sideband_all {
+ setup_remote_progress(progress, &mut reader);
+ }
+ let response = gix_protocol::fetch::Response::from_line_reader(protocol_version, &mut reader).await?;
+ if response.has_pack() {
+ progress.step();
+ progress.set_name("receiving pack");
+ if !sideband_all {
+ setup_remote_progress(progress, &mut reader);
+ }
+ break 'negotiation reader;
+ } else {
+ previous_response = Some(response);
+ }
+ };
+
+ let options = gix_pack::bundle::write::Options {
+ thread_limit: config::index_threads(repo)?,
+ index_version: config::pack_index_version(repo)?,
+ iteration_mode: gix_pack::data::input::Mode::Verify,
+ object_hash: con.remote.repo.object_hash(),
+ };
+
+ let mut write_pack_bundle = if matches!(self.dry_run, fetch::DryRun::No) {
+ Some(gix_pack::Bundle::write_to_directory(
+ #[cfg(feature = "async-network-client")]
+ {
+ gix_protocol::futures_lite::io::BlockOn::new(reader)
+ },
+ #[cfg(not(feature = "async-network-client"))]
+ {
+ reader
+ },
+ Some(repo.objects.store_ref().path().join("pack")),
+ con.progress,
+ should_interrupt,
+ Some(Box::new({
+ let repo = repo.clone();
+ move |oid, buf| repo.objects.find(oid, buf).ok()
+ })),
+ options,
+ )?)
+ } else {
+ drop(reader);
+ None
+ };
+
+ if matches!(protocol_version, gix_protocol::transport::Protocol::V2) {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
+ }
+
+ let update_refs = refs::update(
+ repo,
+ self.reflog_message
+ .take()
+ .unwrap_or_else(|| RefLogMessage::Prefixed { action: "fetch".into() }),
+ &self.ref_map.mappings,
+ con.remote.refspecs(remote::Direction::Fetch),
+ &self.ref_map.extra_refspecs,
+ con.remote.fetch_tags,
+ self.dry_run,
+ self.write_packed_refs,
+ )?;
+
+ if let Some(bundle) = write_pack_bundle.as_mut() {
+ if !update_refs.edits.is_empty() || bundle.index.num_objects == 0 {
+ if let Some(path) = bundle.keep_path.take() {
+ std::fs::remove_file(&path).map_err(|err| Error::RemovePackKeepFile { path, source: err })?;
+ }
+ }
+ }
+
+ Ok(Outcome {
+ ref_map: std::mem::take(&mut self.ref_map),
+ status: match write_pack_bundle {
+ Some(write_pack_bundle) => Status::Change {
+ write_pack_bundle,
+ update_refs,
+ },
+ None => Status::DryRun { update_refs },
+ },
+ })
+ }
+}
+
+fn setup_remote_progress<P>(
+ progress: &mut P,
+ reader: &mut Box<dyn gix_protocol::transport::client::ExtendedBufRead + Unpin + '_>,
+) where
+ P: Progress,
+ P::SubProgress: 'static,
+{
+ use gix_protocol::transport::client::ExtendedBufRead;
+ reader.set_progress_handler(Some(Box::new({
+ let mut remote_progress = progress.add_child_with_id("remote", ProgressId::RemoteProgress.into());
+ move |is_err: bool, data: &[u8]| {
+ gix_protocol::RemoteProgress::translate_to_progress(is_err, data, &mut remote_progress)
+ }
+ }) as gix_protocol::transport::client::HandleProgress));
+}
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs b/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs
new file mode 100644
index 000000000..953490672
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs
@@ -0,0 +1,274 @@
+#![allow(clippy::result_large_err)]
+use std::{collections::BTreeMap, convert::TryInto, path::PathBuf};
+
+use gix_odb::{Find, FindExt};
+use gix_ref::{
+ transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog},
+ Target, TargetRef,
+};
+
+use crate::{
+ ext::ObjectIdExt,
+ remote::{
+ fetch,
+ fetch::{refs::update::Mode, RefLogMessage, Source},
+ },
+ Repository,
+};
+
+///
+pub mod update;
+
+/// Information about the update of a single reference, corresponding the respective entry in [`RefMap::mappings`][crate::remote::fetch::RefMap::mappings].
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Update {
+ /// The way the update was performed.
+ pub mode: update::Mode,
+ /// The index to the edit that was created from the corresponding mapping, or `None` if there was no local ref.
+ pub edit_index: Option<usize>,
+}
+
+impl From<update::Mode> for Update {
+ fn from(mode: Mode) -> Self {
+ Update { mode, edit_index: None }
+ }
+}
+
+/// Update all refs as derived from `refmap.mappings` and produce an `Outcome` informing about all applied changes in detail, with each
+/// [`update`][Update] corresponding to the [`fetch::Mapping`] of at the same index.
+/// If `dry_run` is true, ref transactions won't actually be applied, but are assumed to work without error so the underlying
+/// `repo` is not actually changed. Also it won't perform an 'object exists' check as these are likely not to exist as the pack
+/// wasn't fetched either.
+/// `action` is the prefix used for reflog entries, and is typically "fetch".
+///
+/// It can be used to produce typical information that one is used to from `git fetch`.
+#[allow(clippy::too_many_arguments)]
+pub(crate) fn update(
+ repo: &Repository,
+ message: RefLogMessage,
+ mappings: &[fetch::Mapping],
+ refspecs: &[gix_refspec::RefSpec],
+ extra_refspecs: &[gix_refspec::RefSpec],
+ fetch_tags: fetch::Tags,
+ dry_run: fetch::DryRun,
+ write_packed_refs: fetch::WritePackedRefs,
+) -> Result<update::Outcome, update::Error> {
+ let mut edits = Vec::new();
+ let mut updates = Vec::new();
+
+ let implicit_tag_refspec = fetch_tags
+ .to_refspec()
+ .filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
+ for (remote, local, spec, is_implicit_tag) in mappings.iter().filter_map(
+ |fetch::Mapping {
+ remote,
+ local,
+ spec_index,
+ }| {
+ spec_index.get(refspecs, extra_refspecs).map(|spec| {
+ (
+ remote,
+ local,
+ spec,
+ implicit_tag_refspec.map_or(false, |tag_spec| spec.to_ref() == tag_spec),
+ )
+ })
+ },
+ ) {
+ let remote_id = match remote.as_id() {
+ Some(id) => id,
+ None => continue,
+ };
+ if dry_run == fetch::DryRun::No && !repo.objects.contains(remote_id) {
+ let update = if is_implicit_tag {
+ update::Mode::ImplicitTagNotSentByRemote.into()
+ } else {
+ update::Mode::RejectedSourceObjectNotFound { id: remote_id.into() }.into()
+ };
+ updates.push(update);
+ continue;
+ }
+ let checked_out_branches = worktree_branches(repo)?;
+ let (mode, edit_index) = match local {
+ Some(name) => {
+ let (mode, reflog_message, name, previous_value) = match repo.try_find_reference(name)? {
+ Some(existing) => {
+ if let Some(wt_dir) = checked_out_branches.get(existing.name()) {
+ let mode = update::Mode::RejectedCurrentlyCheckedOut {
+ worktree_dir: wt_dir.to_owned(),
+ };
+ updates.push(mode.into());
+ continue;
+ }
+ match existing.target() {
+ TargetRef::Symbolic(_) => {
+ updates.push(update::Mode::RejectedSymbolic.into());
+ continue;
+ }
+ TargetRef::Peeled(local_id) => {
+ let previous_value =
+ PreviousValue::MustExistAndMatch(Target::Peeled(local_id.to_owned()));
+ let (mode, reflog_message) = if local_id == remote_id {
+ (update::Mode::NoChangeNeeded, "no update will be performed")
+ } else if let Some(gix_ref::Category::Tag) = existing.name().category() {
+ if spec.allow_non_fast_forward() {
+ (update::Mode::Forced, "updating tag")
+ } else {
+ updates.push(update::Mode::RejectedTagUpdate.into());
+ continue;
+ }
+ } else {
+ let mut force = spec.allow_non_fast_forward();
+ let is_fast_forward = match dry_run {
+ fetch::DryRun::No => {
+ let ancestors = repo
+ .find_object(local_id)?
+ .try_into_commit()
+ .map_err(|_| ())
+ .and_then(|c| {
+ c.committer().map(|a| a.time.seconds_since_unix_epoch).map_err(|_| ())
+ }).and_then(|local_commit_time|
+ remote_id
+ .to_owned()
+ .ancestors(|id, buf| repo.objects.find_commit_iter(id, buf))
+ .sorting(
+ gix_traverse::commit::Sorting::ByCommitTimeNewestFirstCutoffOlderThan {
+ time_in_seconds_since_epoch: local_commit_time
+ },
+ )
+ .map_err(|_| ())
+ );
+ match ancestors {
+ Ok(mut ancestors) => {
+ ancestors.any(|cid| cid.map_or(false, |cid| cid == local_id))
+ }
+ Err(_) => {
+ force = true;
+ false
+ }
+ }
+ }
+ fetch::DryRun::Yes => true,
+ };
+ if is_fast_forward {
+ (
+ update::Mode::FastForward,
+ matches!(dry_run, fetch::DryRun::Yes)
+ .then(|| "fast-forward (guessed in dry-run)")
+ .unwrap_or("fast-forward"),
+ )
+ } else if force {
+ (update::Mode::Forced, "forced-update")
+ } else {
+ updates.push(update::Mode::RejectedNonFastForward.into());
+ continue;
+ }
+ };
+ (mode, reflog_message, existing.name().to_owned(), previous_value)
+ }
+ }
+ }
+ None => {
+ let name: gix_ref::FullName = name.try_into()?;
+ let reflog_msg = match name.category() {
+ Some(gix_ref::Category::Tag) => "storing tag",
+ Some(gix_ref::Category::LocalBranch) => "storing head",
+ _ => "storing ref",
+ };
+ (
+ update::Mode::New,
+ reflog_msg,
+ name,
+ PreviousValue::ExistingMustMatch(Target::Peeled(remote_id.to_owned())),
+ )
+ }
+ };
+ let edit = RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: message.compose(reflog_message),
+ },
+ expected: previous_value,
+ new: if let Source::Ref(gix_protocol::handshake::Ref::Symbolic { target, .. }) = &remote {
+ match mappings.iter().find_map(|m| {
+ m.remote.as_name().and_then(|name| {
+ (name == target)
+ .then(|| m.local.as_ref().and_then(|local| local.try_into().ok()))
+ .flatten()
+ })
+ }) {
+ Some(local_branch) => {
+ // This is always safe because…
+ // - the reference may exist already
+ // - if it doesn't exist it will be created - we are here because it's in the list of mappings after all
+ // - if it exists and is updated, and the update is rejected due to non-fastforward for instance, the
+ // target reference still exists and we can point to it.
+ Target::Symbolic(local_branch)
+ }
+ None => Target::Peeled(remote_id.into()),
+ }
+ } else {
+ Target::Peeled(remote_id.into())
+ },
+ },
+ name,
+ deref: false,
+ };
+ let edit_index = edits.len();
+ edits.push(edit);
+ (mode, Some(edit_index))
+ }
+ None => (update::Mode::NoChangeNeeded, None),
+ };
+ updates.push(Update { mode, edit_index })
+ }
+
+ let edits = match dry_run {
+ fetch::DryRun::No => {
+ let (file_lock_fail, packed_refs_lock_fail) = repo
+ .config
+ .lock_timeout()
+ .map_err(crate::reference::edit::Error::from)?;
+ repo.refs
+ .transaction()
+ .packed_refs(
+ match write_packed_refs {
+ fetch::WritePackedRefs::Only => {
+ gix_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box::new(|oid, buf| {
+ repo.objects
+ .try_find(oid, buf)
+ .map(|obj| obj.map(|obj| obj.kind))
+ .map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
+ }))},
+ fetch::WritePackedRefs::Never => gix_ref::file::transaction::PackedRefs::DeletionsOnly
+ }
+ )
+ .prepare(edits, file_lock_fail, packed_refs_lock_fail)
+ .map_err(crate::reference::edit::Error::from)?
+ .commit(repo.committer().transpose().map_err(|err| update::Error::EditReferences(crate::reference::edit::Error::ParseCommitterTime(err)))?)
+ .map_err(crate::reference::edit::Error::from)?
+ }
+ fetch::DryRun::Yes => edits,
+ };
+
+ Ok(update::Outcome { edits, updates })
+}
+
+fn worktree_branches(repo: &Repository) -> Result<BTreeMap<gix_ref::FullName, PathBuf>, update::Error> {
+ let mut map = BTreeMap::new();
+ if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
+ map.insert(head_ref.inner.name, wt_dir.to_owned());
+ }
+ for proxy in repo.worktrees()? {
+ let repo = proxy.into_repo_with_possibly_inaccessible_worktree()?;
+ if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
+ map.insert(head_ref.inner.name, wt_dir.to_owned());
+ }
+ }
+ Ok(map)
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs b/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs
new file mode 100644
index 000000000..145990ac8
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs
@@ -0,0 +1,607 @@
+pub fn restricted() -> crate::open::Options {
+ crate::open::Options::isolated().config_overrides(["user.name=gitoxide", "user.email=gitoxide@localhost"])
+}
+
+/// Convert a hexadecimal hash into its corresponding `ObjectId` or _panic_.
+fn hex_to_id(hex: &str) -> gix_hash::ObjectId {
+ gix_hash::ObjectId::from_hex(hex.as_bytes()).expect("40 bytes hex")
+}
+
+mod update {
+ use std::convert::TryInto;
+
+ use gix_testtools::Result;
+
+ use super::hex_to_id;
+ use crate as gix;
+
+ fn base_repo_path() -> String {
+ gix::path::realpath(
+ gix_testtools::scripted_fixture_read_only("make_remote_repos.sh")
+ .unwrap()
+ .join("base"),
+ )
+ .unwrap()
+ .to_string_lossy()
+ .into_owned()
+ }
+
+ fn repo(name: &str) -> gix::Repository {
+ let dir =
+ gix_testtools::scripted_fixture_read_only_with_args("make_fetch_repos.sh", [base_repo_path()]).unwrap();
+ gix::open_opts(dir.join(name), restricted()).unwrap()
+ }
+ fn repo_rw(name: &str) -> (gix::Repository, gix_testtools::tempfile::TempDir) {
+ let dir = gix_testtools::scripted_fixture_writable_with_args(
+ "make_fetch_repos.sh",
+ [base_repo_path()],
+ gix_testtools::Creation::ExecuteScript,
+ )
+ .unwrap();
+ let repo = gix::open_opts(dir.path().join(name), restricted()).unwrap();
+ (repo, dir)
+ }
+ use gix_ref::{transaction::Change, TargetRef};
+
+ use crate::{
+ bstr::BString,
+ remote::{
+ fetch,
+ fetch::{refs::tests::restricted, Mapping, RefLogMessage, Source, SpecIndex},
+ },
+ };
+
+ #[test]
+ fn various_valid_updates() {
+ let repo = repo("two-origins");
+ for (spec, expected_mode, reflog_message, detail) in [
+ (
+ "refs/heads/main:refs/remotes/origin/main",
+ fetch::refs::update::Mode::NoChangeNeeded,
+ Some("no update will be performed"),
+ "these refs are en-par since the initial clone",
+ ),
+ (
+ "refs/heads/main",
+ fetch::refs::update::Mode::NoChangeNeeded,
+ None,
+ "without local destination ref there is nothing to do for us, ever (except for FETCH_HEADs) later",
+ ),
+ (
+ "refs/heads/main:refs/remotes/origin/new-main",
+ fetch::refs::update::Mode::New,
+ Some("storing ref"),
+ "the destination branch doesn't exist and needs to be created",
+ ),
+ (
+ "refs/heads/main:refs/heads/feature",
+ fetch::refs::update::Mode::New,
+ Some("storing head"),
+ "reflog messages are specific to the type of branch stored, to some limited extend",
+ ),
+ (
+ "refs/heads/main:refs/tags/new-tag",
+ fetch::refs::update::Mode::New,
+ Some("storing tag"),
+ "reflog messages are specific to the type of branch stored, to some limited extend",
+ ),
+ (
+ "+refs/heads/main:refs/remotes/origin/new-main",
+ fetch::refs::update::Mode::New,
+ Some("storing ref"),
+ "just to validate that we really are in dry-run mode, or else this ref would be present now",
+ ),
+ (
+ "+refs/heads/main:refs/remotes/origin/g",
+ fetch::refs::update::Mode::FastForward,
+ Some("fast-forward (guessed in dry-run)"),
+ "a forced non-fastforward (main goes backwards), but dry-run calls it fast-forward",
+ ),
+ (
+ "+refs/heads/main:refs/tags/b-tag",
+ fetch::refs::update::Mode::Forced,
+ Some("updating tag"),
+ "tags can only be forced",
+ ),
+ (
+ "refs/heads/main:refs/tags/b-tag",
+ fetch::refs::update::Mode::RejectedTagUpdate,
+ None,
+ "otherwise a tag is always refusing itself to be overwritten (no-clobber)",
+ ),
+ (
+ "+refs/remotes/origin/g:refs/heads/main",
+ fetch::refs::update::Mode::RejectedCurrentlyCheckedOut {
+ worktree_dir: repo.work_dir().expect("present").to_owned(),
+ },
+ None,
+ "checked out branches cannot be written, as it requires a merge of sorts which isn't done here",
+ ),
+ (
+ "ffffffffffffffffffffffffffffffffffffffff:refs/heads/invalid-source-object",
+ fetch::refs::update::Mode::RejectedSourceObjectNotFound {
+ id: hex_to_id("ffffffffffffffffffffffffffffffffffffffff"),
+ },
+ None,
+ "checked out branches cannot be written, as it requires a merge of sorts which isn't done here",
+ ),
+ (
+ "refs/remotes/origin/g:refs/heads/not-currently-checked-out",
+ fetch::refs::update::Mode::FastForward,
+ Some("fast-forward (guessed in dry-run)"),
+ "a fast-forward only fast-forward situation, all good",
+ ),
+ ] {
+ let (mapping, specs) = mapping_from_spec(spec, &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mapping,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ reflog_message.map(|_| fetch::DryRun::Yes).unwrap_or(fetch::DryRun::No),
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: expected_mode.clone(),
+ edit_index: reflog_message.map(|_| 0),
+ }],
+ "{spec:?}: {detail}"
+ );
+ assert_eq!(out.edits.len(), reflog_message.map(|_| 1).unwrap_or(0));
+ if let Some(reflog_message) = reflog_message {
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(
+ log.message,
+ format!("action: {reflog_message}"),
+ "{spec}: reflog messages are specific and we emulate git word for word"
+ );
+ let remote_ref = repo
+ .find_reference(specs[0].to_ref().source().expect("always present"))
+ .unwrap();
+ assert_eq!(
+ new.id(),
+ remote_ref.target().id(),
+ "remote ref provides the id to set in the local reference"
+ )
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn checked_out_branches_in_worktrees_are_rejected_with_additional_information() -> Result {
+ let root = gix_path::realpath(gix_testtools::scripted_fixture_read_only_with_args(
+ "make_fetch_repos.sh",
+ [base_repo_path()],
+ )?)?;
+ let repo = root.join("worktree-root");
+ let repo = gix::open_opts(repo, restricted())?;
+ for (branch, path_from_root) in [
+ ("main", "worktree-root"),
+ ("wt-a-nested", "prev/wt-a-nested"),
+ ("wt-a", "wt-a"),
+ ("nested-wt-b", "wt-a/nested-wt-b"),
+ ("wt-c-locked", "wt-c-locked"),
+ ("wt-deleted", "wt-deleted"),
+ ] {
+ let spec = format!("refs/heads/main:refs/heads/{branch}");
+ let (mappings, specs) = mapping_from_spec(&spec, &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )?;
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedCurrentlyCheckedOut {
+ worktree_dir: root.join(path_from_root),
+ },
+ edit_index: None,
+ }],
+ "{spec}: checked-out checks are done before checking if a change would actually be required (here it isn't)"
+ );
+ assert_eq!(out.edits.len(), 0);
+ }
+ Ok(())
+ }
+
+ #[test]
+ fn local_symbolic_refs_are_never_written() {
+ let repo = repo("two-origins");
+ for source in ["refs/heads/main", "refs/heads/symbolic", "HEAD"] {
+ let (mappings, specs) = mapping_from_spec(&format!("{source}:refs/heads/symbolic"), &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 0);
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedSymbolic,
+ edit_index: None
+ }],
+ "we don't overwrite these as the checked-out check needs to consider much more than it currently does, we are playing it safe"
+ );
+ }
+ }
+
+ #[test]
+ fn remote_symbolic_refs_can_always_be_set_as_there_is_no_scenario_where_it_could_be_nonexisting_and_rejected() {
+ let repo = repo("two-origins");
+ let (mut mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/remotes/origin/new", &repo);
+ mappings.push(Mapping {
+ remote: Source::Ref(gix_protocol::handshake::Ref::Direct {
+ full_ref_name: "refs/heads/main".try_into().unwrap(),
+ object: hex_to_id("f99771fe6a1b535783af3163eba95a927aae21d5"),
+ }),
+ local: Some("refs/heads/symbolic".into()),
+ spec_index: SpecIndex::ExplicitInRemote(0),
+ });
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 1);
+ assert_eq!(
+ out.updates,
+ vec![
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ edit_index: Some(0)
+ },
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedSymbolic,
+ edit_index: None
+ }
+ ],
+ );
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(log.message, "action: storing ref");
+ assert!(
+ new.try_name().is_some(),
+ "remote falls back to peeled id as it's the only thing we seem to have locally, it won't refer to a non-existing local ref"
+ );
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ #[test]
+ fn local_direct_refs_are_never_written_with_symbolic_ones_but_see_only_the_destination() {
+ let repo = repo("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/heads/not-currently-checked-out", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 1);
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::NoChangeNeeded,
+ edit_index: Some(0)
+ }],
+ );
+ }
+
+ #[test]
+ fn remote_refs_cannot_map_to_local_head() {
+ let repo = repo("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/heads/main:HEAD", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 1);
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ edit_index: Some(0),
+ }],
+ );
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(log.message, "action: storing head");
+ assert!(
+ new.try_id().is_some(),
+ "remote is peeled, so local will be peeled as well"
+ );
+ }
+ _ => unreachable!("only updates"),
+ }
+ assert_eq!(
+ edit.name.as_bstr(),
+ "refs/heads/HEAD",
+ "it's not possible to refer to the local HEAD with refspecs"
+ );
+ }
+
+ #[test]
+ fn remote_symbolic_refs_can_be_written_locally_and_point_to_tracking_branch() {
+ let repo = repo("two-origins");
+ let (mut mappings, specs) = mapping_from_spec("HEAD:refs/remotes/origin/new-HEAD", &repo);
+ mappings.push(Mapping {
+ remote: Source::Ref(gix_protocol::handshake::Ref::Direct {
+ full_ref_name: "refs/heads/main".try_into().unwrap(),
+ object: hex_to_id("f99771fe6a1b535783af3163eba95a927aae21d5"),
+ }),
+ local: Some("refs/remotes/origin/main".into()),
+ spec_index: SpecIndex::ExplicitInRemote(0),
+ });
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ edit_index: Some(0),
+ },
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::NoChangeNeeded,
+ edit_index: Some(1),
+ }
+ ],
+ );
+ assert_eq!(out.edits.len(), 2);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(log.message, "action: storing ref");
+ assert_eq!(
+ new.try_name().expect("symbolic ref").as_bstr(),
+ "refs/remotes/origin/main",
+ "remote is symbolic, so local will be symbolic as well, but is rewritten to tracking branch"
+ );
+ }
+ _ => unreachable!("only updates"),
+ }
+ assert_eq!(edit.name.as_bstr(), "refs/remotes/origin/new-HEAD",);
+ }
+
+ #[test]
+ fn non_fast_forward_is_rejected_but_appears_to_be_fast_forward_in_dryrun_mode() {
+ let repo = repo("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/heads/main:refs/remotes/origin/g", &repo);
+ let reflog_message: BString = "very special".into();
+ let out = fetch::refs::update(
+ &repo,
+ RefLogMessage::Override {
+ message: reflog_message.clone(),
+ },
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::FastForward,
+ edit_index: Some(0),
+ }],
+ "The caller has to be aware and note that dry-runs can't know about fast-forwards as they don't have remote objects"
+ );
+ assert_eq!(out.edits.len(), 1);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, .. } => {
+ assert_eq!(log.message, reflog_message);
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ #[test]
+ fn non_fast_forward_is_rejected_if_dry_run_is_disabled() {
+ let (repo, _tmp) = repo_rw("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/remotes/origin/g:refs/heads/not-currently-checked-out", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::No,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedNonFastForward,
+ edit_index: None,
+ }]
+ );
+ assert_eq!(out.edits.len(), 0);
+
+ let (mappings, specs) = mapping_from_spec("refs/heads/main:refs/remotes/origin/g", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("prefix"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::No,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::FastForward,
+ edit_index: Some(0),
+ }]
+ );
+ assert_eq!(out.edits.len(), 1);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, .. } => {
+ assert_eq!(log.message, format!("prefix: {}", "fast-forward"));
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ #[test]
+ fn fast_forwards_are_called_out_even_if_force_is_given() {
+ let (repo, _tmp) = repo_rw("two-origins");
+ let (mappings, specs) = mapping_from_spec("+refs/heads/main:refs/remotes/origin/g", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("prefix"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::No,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::FastForward,
+ edit_index: Some(0),
+ }]
+ );
+ assert_eq!(out.edits.len(), 1);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, .. } => {
+ assert_eq!(log.message, format!("prefix: {}", "fast-forward"));
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ fn mapping_from_spec(spec: &str, repo: &gix::Repository) -> (Vec<fetch::Mapping>, Vec<gix::refspec::RefSpec>) {
+ let spec = gix_refspec::parse(spec.into(), gix_refspec::parse::Operation::Fetch).unwrap();
+ let group = gix_refspec::MatchGroup::from_fetch_specs(Some(spec));
+ let references = repo.references().unwrap();
+ let mut references: Vec<_> = references.all().unwrap().map(|r| into_remote_ref(r.unwrap())).collect();
+ references.push(into_remote_ref(repo.find_reference("HEAD").unwrap()));
+ let mappings = group
+ .match_remotes(references.iter().map(remote_ref_to_item))
+ .mappings
+ .into_iter()
+ .map(|m| fetch::Mapping {
+ remote: m
+ .item_index
+ .map(|idx| fetch::Source::Ref(references[idx].clone()))
+ .unwrap_or_else(|| match m.lhs {
+ gix_refspec::match_group::SourceRef::ObjectId(id) => fetch::Source::ObjectId(id),
+ _ => unreachable!("not a ref, must be id: {:?}", m),
+ }),
+ local: m.rhs.map(|r| r.into_owned()),
+ spec_index: SpecIndex::ExplicitInRemote(m.spec_index),
+ })
+ .collect();
+ (mappings, vec![spec.to_owned()])
+ }
+
+ fn into_remote_ref(mut r: gix::Reference<'_>) -> gix_protocol::handshake::Ref {
+ let full_ref_name = r.name().as_bstr().into();
+ match r.target() {
+ TargetRef::Peeled(id) => gix_protocol::handshake::Ref::Direct {
+ full_ref_name,
+ object: id.into(),
+ },
+ TargetRef::Symbolic(name) => {
+ let target = name.as_bstr().into();
+ let id = r.peel_to_id_in_place().unwrap();
+ gix_protocol::handshake::Ref::Symbolic {
+ full_ref_name,
+ target,
+ object: id.detach(),
+ }
+ }
+ }
+ }
+
+ fn remote_ref_to_item(r: &gix_protocol::handshake::Ref) -> gix_refspec::match_group::Item<'_> {
+ let (full_ref_name, target, object) = r.unpack();
+ gix_refspec::match_group::Item {
+ full_ref_name,
+ target: target.expect("no unborn HEAD"),
+ object,
+ }
+ }
+
+ fn prefixed(action: &str) -> RefLogMessage {
+ RefLogMessage::Prefixed { action: action.into() }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/update.rs b/vendor/gix/src/remote/connection/fetch/update_refs/update.rs
new file mode 100644
index 000000000..6eda1ffc0
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/update.rs
@@ -0,0 +1,128 @@
+use std::path::PathBuf;
+
+use crate::remote::fetch;
+
+mod error {
+ /// The error returned when updating references.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ FindReference(#[from] crate::reference::find::Error),
+ #[error("A remote reference had a name that wasn't considered valid. Corrupt remote repo or insufficient checks on remote?")]
+ InvalidRefName(#[from] gix_validate::refname::Error),
+ #[error("Failed to update references to their new position to match their remote locations")]
+ EditReferences(#[from] crate::reference::edit::Error),
+ #[error("Failed to read or iterate worktree dir")]
+ WorktreeListing(#[from] std::io::Error),
+ #[error("Could not open worktree repository")]
+ OpenWorktreeRepo(#[from] crate::open::Error),
+ #[error("Could not find local commit for fast-forward ancestor check")]
+ FindCommit(#[from] crate::object::find::existing::Error),
+ }
+}
+
+pub use error::Error;
+
+/// The outcome of the refs-update operation at the end of a fetch.
+#[derive(Debug, Clone)]
+pub struct Outcome {
+ /// All edits that were performed to update local refs.
+ pub edits: Vec<gix_ref::transaction::RefEdit>,
+ /// Each update provides more information about what happened to the corresponding mapping.
+ /// Use [`iter_mapping_updates()`][Self::iter_mapping_updates()] to recombine the update information with ref-edits and their
+ /// mapping.
+ pub updates: Vec<super::Update>,
+}
+
+/// Describe the way a ref was updated
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum Mode {
+ /// No change was attempted as the remote ref didn't change compared to the current ref, or because no remote ref was specified
+ /// in the ref-spec.
+ NoChangeNeeded,
+ /// The old ref's commit was an ancestor of the new one, allowing for a fast-forward without a merge.
+ FastForward,
+ /// The ref was set to point to the new commit from the remote without taking into consideration its ancestry.
+ Forced,
+ /// A new ref has been created as there was none before.
+ New,
+ /// The reference belongs to a tag that was listed by the server but whose target didn't get sent as it doesn't point
+ /// to the commit-graph we were fetching explicitly.
+ ///
+ /// This is kind of update is only happening if `remote.<name>.tagOpt` is not set explicitly to either `--tags` or `--no-tags`.
+ ImplicitTagNotSentByRemote,
+ /// The object id to set the target reference to could not be found.
+ RejectedSourceObjectNotFound {
+ /// The id of the object that didn't exist in the object database, even though it should since it should be part of the pack.
+ id: gix_hash::ObjectId,
+ },
+ /// Tags can never be overwritten (whether the new object would be a fast-forward or not, or unchanged), unless the refspec
+ /// specifies force.
+ RejectedTagUpdate,
+ /// The reference update would not have been a fast-forward, and force is not specified in the ref-spec.
+ RejectedNonFastForward,
+ /// The update of a local symbolic reference was rejected.
+ RejectedSymbolic,
+ /// The update was rejected because the branch is checked out in the given worktree_dir.
+ ///
+ /// Note that the check applies to any known worktree, whether it's present on disk or not.
+ RejectedCurrentlyCheckedOut {
+ /// The path to the worktree directory where the branch is checked out.
+ worktree_dir: PathBuf,
+ },
+}
+
+impl std::fmt::Display for Mode {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Mode::NoChangeNeeded => "up-to-date",
+ Mode::FastForward => "fast-forward",
+ Mode::Forced => "forced-update",
+ Mode::New => "new",
+ Mode::ImplicitTagNotSentByRemote => "unrelated tag on remote",
+ Mode::RejectedSourceObjectNotFound { id } => return write!(f, "rejected ({id} not found)"),
+ Mode::RejectedTagUpdate => "rejected (would overwrite existing tag)",
+ Mode::RejectedNonFastForward => "rejected (non-fast-forward)",
+ Mode::RejectedSymbolic => "rejected (refusing to write symbolic refs)",
+ Mode::RejectedCurrentlyCheckedOut { worktree_dir } => {
+ return write!(
+ f,
+ "rejected (cannot write into checked-out branch at \"{}\")",
+ worktree_dir.display()
+ )
+ }
+ }
+ .fmt(f)
+ }
+}
+
+impl Outcome {
+ /// Produce an iterator over all information used to produce the this outcome, ref-update by ref-update, using the `mappings`
+ /// used when producing the ref update.
+ ///
+ /// Note that mappings that don't have a corresponding entry in `refspecs` these will be `None` even though that should never be the case.
+ /// This can happen if the `refspecs` passed in aren't the respecs used to create the `mapping`, and it's up to the caller to sort it out.
+ pub fn iter_mapping_updates<'a, 'b>(
+ &self,
+ mappings: &'a [fetch::Mapping],
+ refspecs: &'b [gix_refspec::RefSpec],
+ extra_refspecs: &'b [gix_refspec::RefSpec],
+ ) -> impl Iterator<
+ Item = (
+ &super::Update,
+ &'a fetch::Mapping,
+ Option<&'b gix_refspec::RefSpec>,
+ Option<&gix_ref::transaction::RefEdit>,
+ ),
+ > {
+ self.updates.iter().zip(mappings.iter()).map(move |(update, mapping)| {
+ (
+ update,
+ mapping,
+ mapping.spec_index.get(refspecs, extra_refspecs),
+ update.edit_index.and_then(|idx| self.edits.get(idx)),
+ )
+ })
+ }
+}
diff --git a/vendor/gix/src/remote/connection/mod.rs b/vendor/gix/src/remote/connection/mod.rs
new file mode 100644
index 000000000..09943ecc4
--- /dev/null
+++ b/vendor/gix/src/remote/connection/mod.rs
@@ -0,0 +1,29 @@
+use crate::Remote;
+
+pub(crate) struct HandshakeWithRefs {
+ outcome: gix_protocol::handshake::Outcome,
+ refs: Vec<gix_protocol::handshake::Ref>,
+}
+
+/// A function that performs a given credential action, trying to obtain credentials for an operation that needs it.
+pub type AuthenticateFn<'a> = Box<dyn FnMut(gix_credentials::helper::Action) -> gix_credentials::protocol::Result + 'a>;
+
+/// A type to represent an ongoing connection to a remote host, typically with the connection already established.
+///
+/// It can be used to perform a variety of operations with the remote without worrying about protocol details,
+/// much like a remote procedure call.
+pub struct Connection<'a, 'repo, T, P> {
+ pub(crate) remote: &'a Remote<'repo>,
+ pub(crate) authenticate: Option<AuthenticateFn<'a>>,
+ pub(crate) transport_options: Option<Box<dyn std::any::Any>>,
+ pub(crate) transport: T,
+ pub(crate) progress: P,
+}
+
+mod access;
+
+///
+pub mod ref_map;
+
+///
+pub mod fetch;
diff --git a/vendor/gix/src/remote/connection/ref_map.rs b/vendor/gix/src/remote/connection/ref_map.rs
new file mode 100644
index 000000000..0206e9002
--- /dev/null
+++ b/vendor/gix/src/remote/connection/ref_map.rs
@@ -0,0 +1,268 @@
+use std::collections::HashSet;
+
+use gix_features::progress::Progress;
+use gix_protocol::transport::client::Transport;
+
+use crate::{
+ bstr,
+ bstr::{BString, ByteVec},
+ remote::{connection::HandshakeWithRefs, fetch, fetch::SpecIndex, Connection, Direction},
+};
+
+/// The error returned by [`Connection::ref_map()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("Failed to configure the transport before connecting to {url:?}")]
+ GatherTransportConfig {
+ url: BString,
+ source: crate::config::transport::Error,
+ },
+ #[error("Failed to configure the transport layer")]
+ ConfigureTransport(#[from] Box<dyn std::error::Error + Send + Sync + 'static>),
+ #[error(transparent)]
+ Handshake(#[from] gix_protocol::handshake::Error),
+ #[error("The object format {format:?} as used by the remote is unsupported")]
+ UnknownObjectFormat { format: BString },
+ #[error(transparent)]
+ ListRefs(#[from] gix_protocol::ls_refs::Error),
+ #[error(transparent)]
+ Transport(#[from] gix_protocol::transport::client::Error),
+ #[error(transparent)]
+ ConfigureCredentials(#[from] crate::config::credential_helpers::Error),
+ #[error(transparent)]
+ MappingValidation(#[from] gix_refspec::match_group::validate::Error),
+}
+
+impl gix_protocol::transport::IsSpuriousError for Error {
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::Transport(err) => err.is_spurious(),
+ Error::ListRefs(err) => err.is_spurious(),
+ Error::Handshake(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+}
+
+/// For use in [`Connection::ref_map()`].
+#[derive(Debug, Clone)]
+pub struct Options {
+ /// Use a two-component prefix derived from the ref-spec's source, like `refs/heads/` to let the server pre-filter refs
+ /// with great potential for savings in traffic and local CPU time. Defaults to `true`.
+ pub prefix_from_spec_as_filter_on_remote: bool,
+ /// Parameters in the form of `(name, optional value)` to add to the handshake.
+ ///
+ /// This is useful in case of custom servers.
+ pub handshake_parameters: Vec<(String, Option<String>)>,
+ /// A list of refspecs to use as implicit refspecs which won't be saved or otherwise be part of the remote in question.
+ ///
+ /// This is useful for handling `remote.<name>.tagOpt` for example.
+ pub extra_refspecs: Vec<gix_refspec::RefSpec>,
+}
+
+impl Default for Options {
+ fn default() -> Self {
+ Options {
+ prefix_from_spec_as_filter_on_remote: true,
+ handshake_parameters: Vec::new(),
+ extra_refspecs: Vec::new(),
+ }
+ }
+}
+
+impl<'remote, 'repo, T, P> Connection<'remote, 'repo, T, P>
+where
+ T: Transport,
+ P: Progress,
+{
+ /// List all references on the remote that have been filtered through our remote's [`refspecs`][crate::Remote::refspecs()]
+ /// for _fetching_.
+ ///
+ /// This comes in the form of all matching tips on the remote and the object they point to, along with
+ /// with the local tracking branch of these tips (if available).
+ ///
+ /// Note that this doesn't fetch the objects mentioned in the tips nor does it make any change to underlying repository.
+ ///
+ /// # Consumption
+ ///
+ /// Due to management of the transport, it's cleanest to only use it for a single interaction. Thus it's consumed along with
+ /// the connection.
+ ///
+ /// ### Configuration
+ ///
+ /// - `gitoxide.userAgent` is read to obtain the application user agent for git servers and for HTTP servers as well.
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn ref_map(mut self, options: Options) -> Result<fetch::RefMap, Error> {
+ let res = self.ref_map_inner(options).await;
+ gix_protocol::indicate_end_of_interaction(&mut self.transport)
+ .await
+ .ok();
+ res
+ }
+
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub(crate) async fn ref_map_inner(
+ &mut self,
+ Options {
+ prefix_from_spec_as_filter_on_remote,
+ handshake_parameters,
+ mut extra_refspecs,
+ }: Options,
+ ) -> Result<fetch::RefMap, Error> {
+ let null = gix_hash::ObjectId::null(gix_hash::Kind::Sha1); // OK to hardcode Sha1, it's not supposed to match, ever.
+
+ if let Some(tag_spec) = self.remote.fetch_tags.to_refspec().map(|spec| spec.to_owned()) {
+ if !extra_refspecs.contains(&tag_spec) {
+ extra_refspecs.push(tag_spec);
+ }
+ };
+ let specs = {
+ let mut s = self.remote.fetch_specs.clone();
+ s.extend(extra_refspecs.clone());
+ s
+ };
+ let remote = self
+ .fetch_refs(prefix_from_spec_as_filter_on_remote, handshake_parameters, &specs)
+ .await?;
+ let num_explicit_specs = self.remote.fetch_specs.len();
+ let group = gix_refspec::MatchGroup::from_fetch_specs(specs.iter().map(|s| s.to_ref()));
+ let (res, fixes) = group
+ .match_remotes(remote.refs.iter().map(|r| {
+ let (full_ref_name, target, object) = r.unpack();
+ gix_refspec::match_group::Item {
+ full_ref_name,
+ target: target.unwrap_or(&null),
+ object,
+ }
+ }))
+ .validated()?;
+ let mappings = res.mappings;
+ let mappings = mappings
+ .into_iter()
+ .map(|m| fetch::Mapping {
+ remote: m
+ .item_index
+ .map(|idx| fetch::Source::Ref(remote.refs[idx].clone()))
+ .unwrap_or_else(|| {
+ fetch::Source::ObjectId(match m.lhs {
+ gix_refspec::match_group::SourceRef::ObjectId(id) => id,
+ _ => unreachable!("no item index implies having an object id"),
+ })
+ }),
+ local: m.rhs.map(|c| c.into_owned()),
+ spec_index: if m.spec_index < num_explicit_specs {
+ SpecIndex::ExplicitInRemote(m.spec_index)
+ } else {
+ SpecIndex::Implicit(m.spec_index - num_explicit_specs)
+ },
+ })
+ .collect();
+
+ let object_hash = extract_object_format(self.remote.repo, &remote.outcome)?;
+ Ok(fetch::RefMap {
+ mappings,
+ extra_refspecs,
+ fixes,
+ remote_refs: remote.refs,
+ handshake: remote.outcome,
+ object_hash,
+ })
+ }
+
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ async fn fetch_refs(
+ &mut self,
+ filter_by_prefix: bool,
+ extra_parameters: Vec<(String, Option<String>)>,
+ refspecs: &[gix_refspec::RefSpec],
+ ) -> Result<HandshakeWithRefs, Error> {
+ let mut credentials_storage;
+ let url = self.transport.to_url();
+ let authenticate = match self.authenticate.as_mut() {
+ Some(f) => f,
+ None => {
+ let url = self
+ .remote
+ .url(Direction::Fetch)
+ .map(ToOwned::to_owned)
+ .unwrap_or_else(|| gix_url::parse(url.as_ref()).expect("valid URL to be provided by transport"));
+ credentials_storage = self.configured_credentials(url)?;
+ &mut credentials_storage
+ }
+ };
+
+ if self.transport_options.is_none() {
+ self.transport_options = self
+ .remote
+ .repo
+ .transport_options(url.as_ref(), self.remote.name().map(|n| n.as_bstr()))
+ .map_err(|err| Error::GatherTransportConfig {
+ source: err,
+ url: url.into_owned(),
+ })?;
+ }
+ if let Some(config) = self.transport_options.as_ref() {
+ self.transport.configure(&**config)?;
+ }
+ let mut outcome =
+ gix_protocol::fetch::handshake(&mut self.transport, authenticate, extra_parameters, &mut self.progress)
+ .await?;
+ let refs = match outcome.refs.take() {
+ Some(refs) => refs,
+ None => {
+ let agent_feature = self.remote.repo.config.user_agent_tuple();
+ gix_protocol::ls_refs(
+ &mut self.transport,
+ &outcome.capabilities,
+ move |_capabilities, arguments, features| {
+ features.push(agent_feature);
+ if filter_by_prefix {
+ let mut seen = HashSet::new();
+ for spec in refspecs {
+ let spec = spec.to_ref();
+ if seen.insert(spec.instruction()) {
+ let mut prefixes = Vec::with_capacity(1);
+ spec.expand_prefixes(&mut prefixes);
+ for mut prefix in prefixes {
+ prefix.insert_str(0, "ref-prefix ");
+ arguments.push(prefix);
+ }
+ }
+ }
+ }
+ Ok(gix_protocol::ls_refs::Action::Continue)
+ },
+ &mut self.progress,
+ )
+ .await?
+ }
+ };
+ Ok(HandshakeWithRefs { outcome, refs })
+ }
+}
+
+/// Assume sha1 if server says nothing, otherwise configure anything beyond sha1 in the local repo configuration
+#[allow(clippy::result_large_err)]
+fn extract_object_format(
+ _repo: &crate::Repository,
+ outcome: &gix_protocol::handshake::Outcome,
+) -> Result<gix_hash::Kind, Error> {
+ use bstr::ByteSlice;
+ let object_hash =
+ if let Some(object_format) = outcome.capabilities.capability("object-format").and_then(|c| c.value()) {
+ let object_format = object_format.to_str().map_err(|_| Error::UnknownObjectFormat {
+ format: object_format.into(),
+ })?;
+ match object_format {
+ "sha1" => gix_hash::Kind::Sha1,
+ unknown => return Err(Error::UnknownObjectFormat { format: unknown.into() }),
+ }
+ } else {
+ gix_hash::Kind::Sha1
+ };
+ Ok(object_hash)
+}
diff --git a/vendor/gix/src/remote/errors.rs b/vendor/gix/src/remote/errors.rs
new file mode 100644
index 000000000..20060cedf
--- /dev/null
+++ b/vendor/gix/src/remote/errors.rs
@@ -0,0 +1,45 @@
+///
+pub mod find {
+ use crate::{bstr::BString, config, remote};
+
+ /// The error returned by [`Repository::find_remote(…)`][crate::Repository::find_remote()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The value for 'remote.<name>.tagOpt` is invalid and must either be '--tags' or '--no-tags'")]
+ TagOpt(#[from] config::key::GenericErrorWithValue),
+ #[error("{kind} ref-spec under `remote.{remote_name}` was invalid")]
+ RefSpec {
+ kind: &'static str,
+ remote_name: BString,
+ source: config::refspec::Error,
+ },
+ #[error("Neither 'url` nor 'pushUrl' fields were set in the remote's configuration.")]
+ UrlMissing,
+ #[error("The {kind} url under `remote.{remote_name}` was invalid")]
+ Url {
+ kind: &'static str,
+ remote_name: BString,
+ source: config::url::Error,
+ },
+ #[error(transparent)]
+ Init(#[from] remote::init::Error),
+ }
+
+ ///
+ pub mod existing {
+ use crate::bstr::BString;
+
+ /// The error returned by [`Repository::find_remote(…)`][crate::Repository::find_remote()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Find(#[from] super::Error),
+ #[error("remote name could not be parsed as URL")]
+ UrlParse(#[from] gix_url::parse::Error),
+ #[error("The remote named {name:?} did not exist")]
+ NotFound { name: BString },
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/fetch.rs b/vendor/gix/src/remote/fetch.rs
new file mode 100644
index 000000000..4add96a65
--- /dev/null
+++ b/vendor/gix/src/remote/fetch.rs
@@ -0,0 +1,166 @@
+/// If `Yes`, don't really make changes but do as much as possible to get an idea of what would be done.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub(crate) enum DryRun {
+ /// Enable dry-run mode and don't actually change the underlying repository in any way.
+ Yes,
+ /// Run the operation like normal, making changes to the underlying repository.
+ No,
+}
+
+/// How to deal with refs when cloning or fetching.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub(crate) enum WritePackedRefs {
+ /// Normal operation, i.e. don't use packed-refs at all for writing.
+ Never,
+ /// Put ref updates straight into the `packed-refs` file, without creating loose refs first or dealing with them in any way.
+ Only,
+}
+
+/// Describe how to handle tags when fetching
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum Tags {
+ /// Fetch all tags from the remote, even if these are not reachable from objects referred to by our refspecs.
+ All,
+ /// Fetch only the tags that point to the objects being sent.
+ /// That way, annotated tags that point to an object we receive are automatically transmitted and their refs are created.
+ /// The same goes for lightweight tags.
+ Included,
+ /// Do not fetch any tags.
+ None,
+}
+
+impl Default for Tags {
+ fn default() -> Self {
+ Tags::Included
+ }
+}
+
+impl Tags {
+ /// Obtain a refspec that determines whether or not to fetch all tags, depending on this variant.
+ ///
+ /// The returned refspec is the default refspec for tags, but won't overwrite local tags ever.
+ pub fn to_refspec(&self) -> Option<gix_refspec::RefSpecRef<'static>> {
+ match self {
+ Tags::All | Tags::Included => Some(
+ gix_refspec::parse("refs/tags/*:refs/tags/*".into(), gix_refspec::parse::Operation::Fetch)
+ .expect("valid"),
+ ),
+ Tags::None => None,
+ }
+ }
+}
+
+/// Information about the relationship between our refspecs, and remote references with their local counterparts.
+#[derive(Default, Debug, Clone)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub struct RefMap {
+ /// A mapping between a remote reference and a local tracking branch.
+ pub mappings: Vec<Mapping>,
+ /// Refspecs which have been added implicitly due to settings of the `remote`, possibly pre-initialized from
+ /// [`extra_refspecs` in RefMap options][crate::remote::ref_map::Options::extra_refspecs].
+ ///
+ /// They are never persisted nor are they typically presented to the user.
+ pub extra_refspecs: Vec<gix_refspec::RefSpec>,
+ /// Information about the fixes applied to the `mapping` due to validation and sanitization.
+ pub fixes: Vec<gix_refspec::match_group::validate::Fix>,
+ /// All refs advertised by the remote.
+ pub remote_refs: Vec<gix_protocol::handshake::Ref>,
+ /// Additional information provided by the server as part of the handshake.
+ ///
+ /// Note that the `refs` field is always `None` as the refs are placed in `remote_refs`.
+ pub handshake: gix_protocol::handshake::Outcome,
+ /// The kind of hash used for all data sent by the server, if understood by this client implementation.
+ ///
+ /// It was extracted from the `handshake` as advertised by the server.
+ pub object_hash: gix_hash::Kind,
+}
+
+/// Either an object id that the remote has or the matched remote ref itself.
+#[derive(Debug, Clone)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub enum Source {
+ /// An object id, as the matched ref-spec was an object id itself.
+ ObjectId(gix_hash::ObjectId),
+ /// The remote reference that matched the ref-specs name.
+ Ref(gix_protocol::handshake::Ref),
+}
+
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+impl Source {
+ /// Return either the direct object id we refer to or the direct target that a reference refers to.
+ /// The latter may be a direct or a symbolic reference, and we degenerate this to the peeled object id.
+ /// If unborn, `None` is returned.
+ pub fn as_id(&self) -> Option<&gix_hash::oid> {
+ match self {
+ Source::ObjectId(id) => Some(id),
+ Source::Ref(r) => r.unpack().1,
+ }
+ }
+
+ /// Return ourselves as the full name of the reference we represent, or `None` if this source isn't a reference but an object.
+ pub fn as_name(&self) -> Option<&crate::bstr::BStr> {
+ match self {
+ Source::ObjectId(_) => None,
+ Source::Ref(r) => match r {
+ gix_protocol::handshake::Ref::Unborn { full_ref_name, .. }
+ | gix_protocol::handshake::Ref::Symbolic { full_ref_name, .. }
+ | gix_protocol::handshake::Ref::Direct { full_ref_name, .. }
+ | gix_protocol::handshake::Ref::Peeled { full_ref_name, .. } => Some(full_ref_name.as_ref()),
+ },
+ }
+ }
+}
+
+/// An index into various lists of refspecs that have been used in a [Mapping] of remote references to local ones.
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)]
+pub enum SpecIndex {
+ /// An index into the _refspecs of the remote_ that triggered a fetch operation.
+ /// These refspecs are explicit and visible to the user.
+ ExplicitInRemote(usize),
+ /// An index into the list of [extra refspecs][crate::remote::fetch::RefMap::extra_refspecs] that are implicit
+ /// to a particular fetch operation.
+ Implicit(usize),
+}
+
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+impl SpecIndex {
+ /// Depending on our index variant, get the index either from `refspecs` or from `extra_refspecs` for `Implicit` variants.
+ pub fn get<'a>(
+ self,
+ refspecs: &'a [gix_refspec::RefSpec],
+ extra_refspecs: &'a [gix_refspec::RefSpec],
+ ) -> Option<&'a gix_refspec::RefSpec> {
+ match self {
+ SpecIndex::ExplicitInRemote(idx) => refspecs.get(idx),
+ SpecIndex::Implicit(idx) => extra_refspecs.get(idx),
+ }
+ }
+
+ /// If this is an `Implicit` variant, return its index.
+ pub fn implicit_index(self) -> Option<usize> {
+ match self {
+ SpecIndex::Implicit(idx) => Some(idx),
+ SpecIndex::ExplicitInRemote(_) => None,
+ }
+ }
+}
+
+/// A mapping between a single remote reference and its advertised objects to a local destination which may or may not exist.
+#[derive(Debug, Clone)]
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub struct Mapping {
+ /// The reference on the remote side, along with information about the objects they point to as advertised by the server.
+ pub remote: Source,
+ /// The local tracking reference to update after fetching the object visible via `remote`.
+ pub local: Option<crate::bstr::BString>,
+ /// The index into the fetch ref-specs used to produce the mapping, allowing it to be recovered.
+ pub spec_index: SpecIndex,
+}
+
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub use super::connection::fetch::{
+ negotiate, prepare, refs, Error, Outcome, Prepare, ProgressId, RefLogMessage, Status,
+};
diff --git a/vendor/gix/src/remote/init.rs b/vendor/gix/src/remote/init.rs
new file mode 100644
index 000000000..bba116946
--- /dev/null
+++ b/vendor/gix/src/remote/init.rs
@@ -0,0 +1,116 @@
+use std::convert::TryInto;
+
+use gix_refspec::RefSpec;
+
+use crate::{config, remote, Remote, Repository};
+
+mod error {
+ use crate::bstr::BString;
+
+ /// The error returned by [`Repository::remote_at(…)`][crate::Repository::remote_at()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Url(#[from] gix_url::parse::Error),
+ #[error("The rewritten {kind} url {rewritten_url:?} failed to parse")]
+ RewrittenUrlInvalid {
+ kind: &'static str,
+ rewritten_url: BString,
+ source: gix_url::parse::Error,
+ },
+ }
+}
+pub use error::Error;
+
+use crate::bstr::BString;
+
+/// Initialization
+impl<'repo> Remote<'repo> {
+ #[allow(clippy::too_many_arguments)]
+ pub(crate) fn from_preparsed_config(
+ name_or_url: Option<BString>,
+ url: Option<gix_url::Url>,
+ push_url: Option<gix_url::Url>,
+ fetch_specs: Vec<RefSpec>,
+ push_specs: Vec<RefSpec>,
+ should_rewrite_urls: bool,
+ fetch_tags: remote::fetch::Tags,
+ repo: &'repo Repository,
+ ) -> Result<Self, Error> {
+ debug_assert!(
+ url.is_some() || push_url.is_some(),
+ "BUG: fetch or push url must be set at least"
+ );
+ let (url_alias, push_url_alias) = should_rewrite_urls
+ .then(|| rewrite_urls(&repo.config, url.as_ref(), push_url.as_ref()))
+ .unwrap_or(Ok((None, None)))?;
+ Ok(Remote {
+ name: name_or_url.map(Into::into),
+ url,
+ url_alias,
+ push_url,
+ push_url_alias,
+ fetch_specs,
+ push_specs,
+ fetch_tags,
+ repo,
+ })
+ }
+
+ pub(crate) fn from_fetch_url<Url, E>(
+ url: Url,
+ should_rewrite_urls: bool,
+ repo: &'repo Repository,
+ ) -> Result<Self, Error>
+ where
+ Url: TryInto<gix_url::Url, Error = E>,
+ gix_url::parse::Error: From<E>,
+ {
+ let url = url.try_into().map_err(|err| Error::Url(err.into()))?;
+ let (url_alias, _) = should_rewrite_urls
+ .then(|| rewrite_urls(&repo.config, Some(&url), None))
+ .unwrap_or(Ok((None, None)))?;
+ Ok(Remote {
+ name: None,
+ url: Some(url),
+ url_alias,
+ push_url: None,
+ push_url_alias: None,
+ fetch_specs: Vec::new(),
+ push_specs: Vec::new(),
+ fetch_tags: Default::default(),
+ repo,
+ })
+ }
+}
+
+pub(crate) fn rewrite_url(
+ config: &config::Cache,
+ url: Option<&gix_url::Url>,
+ direction: remote::Direction,
+) -> Result<Option<gix_url::Url>, Error> {
+ url.and_then(|url| config.url_rewrite().longest(url, direction))
+ .map(|url| {
+ gix_url::parse(url.as_ref()).map_err(|err| Error::RewrittenUrlInvalid {
+ kind: match direction {
+ remote::Direction::Fetch => "fetch",
+ remote::Direction::Push => "push",
+ },
+ source: err,
+ rewritten_url: url,
+ })
+ })
+ .transpose()
+}
+
+pub(crate) fn rewrite_urls(
+ config: &config::Cache,
+ url: Option<&gix_url::Url>,
+ push_url: Option<&gix_url::Url>,
+) -> Result<(Option<gix_url::Url>, Option<gix_url::Url>), Error> {
+ let url_alias = rewrite_url(config, url, remote::Direction::Fetch)?;
+ let push_url_alias = rewrite_url(config, push_url, remote::Direction::Push)?;
+
+ Ok((url_alias, push_url_alias))
+}
diff --git a/vendor/gix/src/remote/mod.rs b/vendor/gix/src/remote/mod.rs
new file mode 100644
index 000000000..f016575c7
--- /dev/null
+++ b/vendor/gix/src/remote/mod.rs
@@ -0,0 +1,62 @@
+use std::borrow::Cow;
+
+use crate::bstr::BStr;
+
+/// The direction of an operation carried out (or to be carried out) through a remote.
+#[derive(Debug, Eq, PartialEq, Copy, Clone, Hash)]
+pub enum Direction {
+ /// Push local changes to the remote.
+ Push,
+ /// Fetch changes from the remote to the local repository.
+ Fetch,
+}
+
+impl Direction {
+ /// Return ourselves as string suitable for use as verb in an english sentence.
+ pub fn as_str(&self) -> &'static str {
+ match self {
+ Direction::Push => "push",
+ Direction::Fetch => "fetch",
+ }
+ }
+}
+
+/// The name of a remote, either interpreted as symbol like `origin` or as url as returned by [`Remote::name()`][crate::Remote::name()].
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum Name<'repo> {
+ /// A symbolic name, like `origin`.
+ /// Note that it has not necessarily been validated yet.
+ Symbol(Cow<'repo, str>),
+ /// A url pointing to the remote host directly.
+ Url(Cow<'repo, BStr>),
+}
+
+///
+pub mod name;
+
+mod build;
+
+mod errors;
+pub use errors::find;
+
+///
+pub mod init;
+
+///
+pub mod fetch;
+
+///
+#[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+pub mod connect;
+
+#[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+mod connection;
+#[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))]
+pub use connection::{ref_map, AuthenticateFn, Connection};
+
+///
+pub mod save;
+
+mod access;
+///
+pub mod url;
diff --git a/vendor/gix/src/remote/name.rs b/vendor/gix/src/remote/name.rs
new file mode 100644
index 000000000..6c6afe745
--- /dev/null
+++ b/vendor/gix/src/remote/name.rs
@@ -0,0 +1,84 @@
+use std::{borrow::Cow, convert::TryFrom};
+
+use super::Name;
+use crate::bstr::{BStr, BString, ByteSlice, ByteVec};
+
+/// The error returned by [validated()].
+#[derive(Debug, thiserror::Error)]
+#[error("remote names must be valid within refspecs for fetching: {name:?}")]
+#[allow(missing_docs)]
+pub struct Error {
+ pub source: gix_refspec::parse::Error,
+ pub name: BString,
+}
+
+/// Return `name` if it is valid as symbolic remote name.
+///
+/// This means it has to be valid within a the ref path of a tracking branch.
+pub fn validated(name: impl Into<BString>) -> Result<BString, Error> {
+ let name = name.into();
+ match gix_refspec::parse(
+ format!("refs/heads/test:refs/remotes/{name}/test").as_str().into(),
+ gix_refspec::parse::Operation::Fetch,
+ ) {
+ Ok(_) => Ok(name),
+ Err(err) => Err(Error { source: err, name }),
+ }
+}
+
+impl Name<'_> {
+ /// Obtain the name as string representation.
+ pub fn as_bstr(&self) -> &BStr {
+ match self {
+ Name::Symbol(v) => v.as_ref().into(),
+ Name::Url(v) => v.as_ref(),
+ }
+ }
+
+ /// Return this instance as a symbolic name, if it is one.
+ pub fn as_symbol(&self) -> Option<&str> {
+ match self {
+ Name::Symbol(n) => n.as_ref().into(),
+ Name::Url(_) => None,
+ }
+ }
+
+ /// Return this instance as url, if it is one.
+ pub fn as_url(&self) -> Option<&BStr> {
+ match self {
+ Name::Url(n) => n.as_ref().into(),
+ Name::Symbol(_) => None,
+ }
+ }
+}
+
+impl<'a> TryFrom<Cow<'a, BStr>> for Name<'a> {
+ type Error = Cow<'a, BStr>;
+
+ fn try_from(name: Cow<'a, BStr>) -> Result<Self, Self::Error> {
+ if name.contains(&b'/') || name.as_ref() == "." {
+ Ok(Name::Url(name))
+ } else {
+ match name {
+ Cow::Borrowed(n) => n.to_str().ok().map(Cow::Borrowed).ok_or(name),
+ Cow::Owned(n) => Vec::from(n)
+ .into_string()
+ .map_err(|err| Cow::Owned(err.into_vec().into()))
+ .map(Cow::Owned),
+ }
+ .map(Name::Symbol)
+ }
+ }
+}
+
+impl From<BString> for Name<'static> {
+ fn from(name: BString) -> Self {
+ Self::try_from(Cow::Owned(name)).expect("String is never illformed")
+ }
+}
+
+impl<'a> AsRef<BStr> for Name<'a> {
+ fn as_ref(&self) -> &BStr {
+ self.as_bstr()
+ }
+}
diff --git a/vendor/gix/src/remote/save.rs b/vendor/gix/src/remote/save.rs
new file mode 100644
index 000000000..0e347551e
--- /dev/null
+++ b/vendor/gix/src/remote/save.rs
@@ -0,0 +1,125 @@
+use std::convert::TryInto;
+
+use crate::{
+ bstr::{BStr, BString},
+ config, remote, Remote,
+};
+
+/// The error returned by [`Remote::save_to()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("The remote pointing to {} is anonymous and can't be saved.", url.to_bstring())]
+ NameMissing { url: gix_url::Url },
+}
+
+/// The error returned by [`Remote::save_as_to()`].
+///
+/// Note that this type should rather be in the `as` module, but cannot be as it's part of the Rust syntax.
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum AsError {
+ #[error(transparent)]
+ Save(#[from] Error),
+ #[error(transparent)]
+ Name(#[from] crate::remote::name::Error),
+}
+
+/// Serialize into gix-config.
+impl Remote<'_> {
+ /// Save ourselves to the given `config` if we are a named remote or fail otherwise.
+ ///
+ /// Note that all sections named `remote "<name>"` will be cleared of all values we are about to write,
+ /// and the last `remote "<name>"` section will be containing all relevant values so that reloading the remote
+ /// from `config` would yield the same in-memory state.
+ pub fn save_to(&self, config: &mut gix_config::File<'static>) -> Result<(), Error> {
+ fn as_key(name: &str) -> gix_config::parse::section::Key<'_> {
+ name.try_into().expect("valid")
+ }
+ let name = self.name().ok_or_else(|| Error::NameMissing {
+ url: self
+ .url
+ .as_ref()
+ .or(self.push_url.as_ref())
+ .expect("one url is always set")
+ .to_owned(),
+ })?;
+ if let Some(section_ids) = config.sections_and_ids_by_name("remote").map(|it| {
+ it.filter_map(|(s, id)| (s.header().subsection_name() == Some(name.as_bstr())).then_some(id))
+ .collect::<Vec<_>>()
+ }) {
+ let mut sections_to_remove = Vec::new();
+ const KEYS_TO_REMOVE: &[&str] = &[
+ config::tree::Remote::URL.name,
+ config::tree::Remote::PUSH_URL.name,
+ config::tree::Remote::FETCH.name,
+ config::tree::Remote::PUSH.name,
+ config::tree::Remote::TAG_OPT.name,
+ ];
+ for id in section_ids {
+ let mut section = config.section_mut_by_id(id).expect("just queried");
+ let was_empty = section.num_values() == 0;
+
+ for key in KEYS_TO_REMOVE {
+ while section.remove(key).is_some() {}
+ }
+
+ let is_empty_after_deletions_of_values_to_be_written = section.num_values() == 0;
+ if !was_empty && is_empty_after_deletions_of_values_to_be_written {
+ sections_to_remove.push(id);
+ }
+ }
+ for id in sections_to_remove {
+ config.remove_section_by_id(id);
+ }
+ }
+ let mut section = config
+ .section_mut_or_create_new("remote", Some(name.as_ref()))
+ .expect("section name is validated and 'remote' is acceptable");
+ if let Some(url) = self.url.as_ref() {
+ section.push(as_key("url"), Some(url.to_bstring().as_ref()));
+ }
+ if let Some(url) = self.push_url.as_ref() {
+ section.push(as_key("pushurl"), Some(url.to_bstring().as_ref()));
+ }
+ if self.fetch_tags != Default::default() {
+ section.push(
+ as_key(config::tree::Remote::TAG_OPT.name),
+ BStr::new(match self.fetch_tags {
+ remote::fetch::Tags::All => "--tags",
+ remote::fetch::Tags::None => "--no-tags",
+ remote::fetch::Tags::Included => unreachable!("BUG: the default shouldn't be written and we try"),
+ })
+ .into(),
+ );
+ }
+ for (key, spec) in self
+ .fetch_specs
+ .iter()
+ .map(|spec| ("fetch", spec))
+ .chain(self.push_specs.iter().map(|spec| ("push", spec)))
+ {
+ section.push(as_key(key), Some(spec.to_ref().to_bstring().as_ref()));
+ }
+ Ok(())
+ }
+
+ /// Forcefully set our name to `name` and write our state to `config` similar to [`save_to()`][Self::save_to()].
+ ///
+ /// Note that this sets a name for anonymous remotes, but overwrites the name for those who were named before.
+ /// If this name is different from the current one, the git configuration will still contain the previous name,
+ /// and the caller should account for that.
+ pub fn save_as_to(
+ &mut self,
+ name: impl Into<BString>,
+ config: &mut gix_config::File<'static>,
+ ) -> Result<(), AsError> {
+ let name = crate::remote::name::validated(name)?;
+ let prev_name = self.name.take();
+ self.name = Some(name.into());
+ self.save_to(config).map_err(|err| {
+ self.name = prev_name;
+ err.into()
+ })
+ }
+}
diff --git a/vendor/gix/src/remote/url/mod.rs b/vendor/gix/src/remote/url/mod.rs
new file mode 100644
index 000000000..7b8815812
--- /dev/null
+++ b/vendor/gix/src/remote/url/mod.rs
@@ -0,0 +1,7 @@
+mod rewrite;
+///
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub mod scheme_permission;
+pub(crate) use rewrite::Rewrite;
+#[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))]
+pub(crate) use scheme_permission::SchemePermission;
diff --git a/vendor/gix/src/remote/url/rewrite.rs b/vendor/gix/src/remote/url/rewrite.rs
new file mode 100644
index 000000000..ae0eee426
--- /dev/null
+++ b/vendor/gix/src/remote/url/rewrite.rs
@@ -0,0 +1,100 @@
+use gix_features::threading::OwnShared;
+
+use crate::{
+ bstr::{BStr, BString, ByteVec},
+ config,
+ remote::Direction,
+};
+
+#[derive(Debug, Clone)]
+struct Replace {
+ find: BString,
+ with: OwnShared<BString>,
+}
+
+#[derive(Default, Debug, Clone)]
+pub(crate) struct Rewrite {
+ url_rewrite: Vec<Replace>,
+ push_url_rewrite: Vec<Replace>,
+}
+
+/// Init
+impl Rewrite {
+ pub fn from_config(
+ config: &gix_config::File<'static>,
+ mut filter: fn(&gix_config::file::Metadata) -> bool,
+ ) -> Rewrite {
+ config
+ .sections_by_name_and_filter("url", &mut filter)
+ .map(|sections| {
+ let mut url_rewrite = Vec::new();
+ let mut push_url_rewrite = Vec::new();
+ for section in sections {
+ let replace = match section.header().subsection_name() {
+ Some(base) => OwnShared::new(base.to_owned()),
+ None => continue,
+ };
+
+ for instead_of in section.values(config::tree::Url::INSTEAD_OF.name) {
+ url_rewrite.push(Replace {
+ with: OwnShared::clone(&replace),
+ find: instead_of.into_owned(),
+ });
+ }
+ for instead_of in section.values(config::tree::Url::PUSH_INSTEAD_OF.name) {
+ push_url_rewrite.push(Replace {
+ with: OwnShared::clone(&replace),
+ find: instead_of.into_owned(),
+ });
+ }
+ }
+ Rewrite {
+ url_rewrite,
+ push_url_rewrite,
+ }
+ })
+ .unwrap_or_default()
+ }
+}
+
+/// Access
+impl Rewrite {
+ fn replacements_for(&self, direction: Direction) -> &[Replace] {
+ match direction {
+ Direction::Fetch => &self.url_rewrite,
+ Direction::Push => &self.push_url_rewrite,
+ }
+ }
+
+ pub fn longest(&self, url: &gix_url::Url, direction: Direction) -> Option<BString> {
+ if self.replacements_for(direction).is_empty() {
+ None
+ } else {
+ let mut url = url.to_bstring();
+ self.rewrite_url_in_place(&mut url, direction).then_some(url)
+ }
+ }
+
+ /// Rewrite the given `url` of `direction` and return `true` if a replacement happened.
+ ///
+ /// Note that the result must still be checked for validity, it might not be a valid URL as we do a syntax-unaware replacement.
+ pub fn rewrite_url_in_place(&self, url: &mut BString, direction: Direction) -> bool {
+ self.replacements_for(direction)
+ .iter()
+ .fold(None::<(usize, &BStr)>, |mut acc, replace| {
+ if url.starts_with(replace.find.as_ref()) {
+ let (bytes_matched, prev_rewrite_with) =
+ acc.get_or_insert((replace.find.len(), replace.with.as_slice().into()));
+ if *bytes_matched < replace.find.len() {
+ *bytes_matched = replace.find.len();
+ *prev_rewrite_with = replace.with.as_slice().into();
+ }
+ };
+ acc
+ })
+ .map(|(bytes_matched, replace_with)| {
+ url.replace_range(..bytes_matched, replace_with);
+ })
+ .is_some()
+ }
+}
diff --git a/vendor/gix/src/remote/url/scheme_permission.rs b/vendor/gix/src/remote/url/scheme_permission.rs
new file mode 100644
index 000000000..ddb87e111
--- /dev/null
+++ b/vendor/gix/src/remote/url/scheme_permission.rs
@@ -0,0 +1,120 @@
+use std::{borrow::Cow, collections::BTreeMap, convert::TryFrom};
+
+use crate::{
+ bstr::{BStr, BString, ByteSlice},
+ config,
+ config::tree::{gitoxide, Key, Protocol},
+};
+
+/// All allowed values of the `protocol.allow` key.
+#[derive(Debug, Clone, Copy, Eq, PartialEq)]
+pub enum Allow {
+ /// Allow use this protocol.
+ Always,
+ /// Forbid using this protocol
+ Never,
+ /// Only supported if the `GIT_PROTOCOL_FROM_USER` is unset or is set to `1`.
+ User,
+}
+
+impl Allow {
+ /// Return true if we represent something like 'allow == true'.
+ pub fn to_bool(self, user_allowed: Option<bool>) -> bool {
+ match self {
+ Allow::Always => true,
+ Allow::Never => false,
+ Allow::User => user_allowed.unwrap_or(true),
+ }
+ }
+}
+
+impl<'a> TryFrom<Cow<'a, BStr>> for Allow {
+ type Error = BString;
+
+ fn try_from(v: Cow<'a, BStr>) -> Result<Self, Self::Error> {
+ Ok(match v.as_ref().as_bytes() {
+ b"never" => Allow::Never,
+ b"always" => Allow::Always,
+ b"user" => Allow::User,
+ unknown => return Err(unknown.into()),
+ })
+ }
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct SchemePermission {
+ /// `None`, env-var is unset or wasn't queried, otherwise true if `GIT_PROTOCOL_FROM_USER` is `1`.
+ user_allowed: Option<bool>,
+ /// The general allow value from `protocol.allow`.
+ allow: Option<Allow>,
+ /// Per scheme allow information
+ allow_per_scheme: BTreeMap<gix_url::Scheme, Allow>,
+}
+
+/// Init
+impl SchemePermission {
+ /// NOTE: _intentionally without leniency_
+ pub fn from_config(
+ config: &gix_config::File<'static>,
+ mut filter: fn(&gix_config::file::Metadata) -> bool,
+ ) -> Result<Self, config::protocol::allow::Error> {
+ let allow: Option<Allow> = config
+ .string_filter_by_key("protocol.allow", &mut filter)
+ .map(|value| Protocol::ALLOW.try_into_allow(value, None))
+ .transpose()?;
+
+ let mut saw_user = allow.map_or(false, |allow| allow == Allow::User);
+ let allow_per_scheme = match config.sections_by_name_and_filter("protocol", &mut filter) {
+ Some(it) => {
+ let mut map = BTreeMap::default();
+ for (section, scheme) in it.filter_map(|section| {
+ section.header().subsection_name().and_then(|scheme| {
+ scheme
+ .to_str()
+ .ok()
+ .and_then(|scheme| gix_url::Scheme::try_from(scheme).ok().map(|scheme| (section, scheme)))
+ })
+ }) {
+ if let Some(value) = section
+ .value("allow")
+ .map(|value| Protocol::ALLOW.try_into_allow(value, Some(scheme.as_str())))
+ .transpose()?
+ {
+ saw_user |= value == Allow::User;
+ map.insert(scheme, value);
+ }
+ }
+ map
+ }
+ None => Default::default(),
+ };
+
+ let user_allowed = saw_user.then(|| {
+ config
+ .string_filter_by_key(gitoxide::Allow::PROTOCOL_FROM_USER.logical_name().as_str(), &mut filter)
+ .map_or(true, |val| val.as_ref() == "1")
+ });
+ Ok(SchemePermission {
+ allow,
+ allow_per_scheme,
+ user_allowed,
+ })
+ }
+}
+
+/// Access
+impl SchemePermission {
+ pub fn allow(&self, scheme: &gix_url::Scheme) -> bool {
+ self.allow_per_scheme.get(scheme).or(self.allow.as_ref()).map_or_else(
+ || {
+ use gix_url::Scheme::*;
+ match scheme {
+ File | Git | Ssh | Http | Https => true,
+ Ext(_) => false,
+ // TODO: figure out what 'ext' really entails, and what 'other' protocols are which aren't representable for us yet
+ }
+ },
+ |allow| allow.to_bool(self.user_allowed),
+ )
+ }
+}