summaryrefslogtreecommitdiffstats
path: root/vendor/gix/src/remote/connection
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/gix/src/remote/connection')
-rw-r--r--vendor/gix/src/remote/connection/access.rs67
-rw-r--r--vendor/gix/src/remote/connection/fetch/config.rs26
-rw-r--r--vendor/gix/src/remote/connection/fetch/error.rs41
-rw-r--r--vendor/gix/src/remote/connection/fetch/mod.rs240
-rw-r--r--vendor/gix/src/remote/connection/fetch/negotiate.rs78
-rw-r--r--vendor/gix/src/remote/connection/fetch/receive_pack.rs238
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/mod.rs274
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/tests.rs607
-rw-r--r--vendor/gix/src/remote/connection/fetch/update_refs/update.rs128
-rw-r--r--vendor/gix/src/remote/connection/mod.rs29
-rw-r--r--vendor/gix/src/remote/connection/ref_map.rs268
11 files changed, 1996 insertions, 0 deletions
diff --git a/vendor/gix/src/remote/connection/access.rs b/vendor/gix/src/remote/connection/access.rs
new file mode 100644
index 000000000..e4c31c3f5
--- /dev/null
+++ b/vendor/gix/src/remote/connection/access.rs
@@ -0,0 +1,67 @@
+use crate::{
+ remote::{connection::AuthenticateFn, Connection},
+ Remote,
+};
+
+/// Builder
+impl<'a, 'repo, T, P> Connection<'a, 'repo, T, P> {
+ /// Set a custom credentials callback to provide credentials if the remotes require authentication.
+ ///
+ /// Otherwise we will use the git configuration to perform the same task as the `git credential` helper program,
+ /// which is calling other helper programs in succession while resorting to a prompt to obtain credentials from the
+ /// user.
+ ///
+ /// A custom function may also be used to prevent accessing resources with authentication.
+ ///
+ /// Use the [configured_credentials()][Connection::configured_credentials()] method to obtain the implementation
+ /// that would otherwise be used, which can be useful to proxy the default configuration and obtain information about the
+ /// URLs to authenticate with.
+ pub fn with_credentials(
+ mut self,
+ helper: impl FnMut(gix_credentials::helper::Action) -> gix_credentials::protocol::Result + 'a,
+ ) -> Self {
+ self.authenticate = Some(Box::new(helper));
+ self
+ }
+
+ /// Provide configuration to be used before the first handshake is conducted.
+ /// It's typically created by initializing it with [`Repository::transport_options()`][crate::Repository::transport_options()], which
+ /// is also the default if this isn't set explicitly. Note that all of the default configuration is created from `git`
+ /// configuration, which can also be manipulated through overrides to affect the default configuration.
+ ///
+ /// Use this method to provide transport configuration with custom backend configuration that is not configurable by other means and
+ /// custom to the application at hand.
+ pub fn with_transport_options(mut self, config: Box<dyn std::any::Any>) -> Self {
+ self.transport_options = Some(config);
+ self
+ }
+}
+
+/// Access
+impl<'a, 'repo, T, P> Connection<'a, 'repo, T, P> {
+ /// A utility to return a function that will use this repository's configuration to obtain credentials, similar to
+ /// what `git credential` is doing.
+ ///
+ /// It's meant to be used by users of the [`with_credentials()`][Self::with_credentials()] builder to gain access to the
+ /// default way of handling credentials, which they can call as fallback.
+ pub fn configured_credentials(
+ &self,
+ url: gix_url::Url,
+ ) -> Result<AuthenticateFn<'static>, crate::config::credential_helpers::Error> {
+ let (mut cascade, _action_with_normalized_url, prompt_opts) =
+ self.remote.repo.config_snapshot().credential_helpers(url)?;
+ Ok(Box::new(move |action| cascade.invoke(action, prompt_opts.clone())) as AuthenticateFn<'_>)
+ }
+ /// Return the underlying remote that instantiate this connection.
+ pub fn remote(&self) -> &Remote<'repo> {
+ self.remote
+ }
+
+ /// Provide a mutable transport to allow interacting with it according to its actual type.
+ /// Note that the caller _should not_ call [`configure()`][gix_protocol::transport::client::TransportWithoutIO::configure()]
+ /// as we will call it automatically before performing the handshake. Instead, to bring in custom configuration,
+ /// call [`with_transport_options()`][Connection::with_transport_options()].
+ pub fn transport_mut(&mut self) -> &mut T {
+ &mut self.transport
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/config.rs b/vendor/gix/src/remote/connection/fetch/config.rs
new file mode 100644
index 000000000..4782991bc
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/config.rs
@@ -0,0 +1,26 @@
+use super::Error;
+use crate::{
+ config::{cache::util::ApplyLeniency, tree::Pack},
+ Repository,
+};
+
+pub fn index_threads(repo: &Repository) -> Result<Option<usize>, Error> {
+ Ok(repo
+ .config
+ .resolved
+ .integer_filter("pack", None, Pack::THREADS.name, &mut repo.filter_config_section())
+ .map(|threads| Pack::THREADS.try_into_usize(threads))
+ .transpose()
+ .with_leniency(repo.options.lenient_config)?)
+}
+
+pub fn pack_index_version(repo: &Repository) -> Result<gix_pack::index::Version, Error> {
+ Ok(repo
+ .config
+ .resolved
+ .integer("pack", None, Pack::INDEX_VERSION.name)
+ .map(|value| Pack::INDEX_VERSION.try_into_index_version(value))
+ .transpose()
+ .with_leniency(repo.options.lenient_config)?
+ .unwrap_or(gix_pack::index::Version::V2))
+}
diff --git a/vendor/gix/src/remote/connection/fetch/error.rs b/vendor/gix/src/remote/connection/fetch/error.rs
new file mode 100644
index 000000000..0e6a4b840
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/error.rs
@@ -0,0 +1,41 @@
+use crate::config;
+
+/// The error returned by [`receive()`](super::Prepare::receive()).
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("The value to configure pack threads should be 0 to auto-configure or the amount of threads to use")]
+ PackThreads(#[from] config::unsigned_integer::Error),
+ #[error("The value to configure the pack index version should be 1 or 2")]
+ PackIndexVersion(#[from] config::key::GenericError),
+ #[error("Could not decode server reply")]
+ FetchResponse(#[from] gix_protocol::fetch::response::Error),
+ #[error("Cannot fetch from a remote that uses {remote} while local repository uses {local} for object hashes")]
+ IncompatibleObjectHash {
+ local: gix_hash::Kind,
+ remote: gix_hash::Kind,
+ },
+ #[error(transparent)]
+ Negotiate(#[from] super::negotiate::Error),
+ #[error(transparent)]
+ Client(#[from] gix_protocol::transport::client::Error),
+ #[error(transparent)]
+ WritePack(#[from] gix_pack::bundle::write::Error),
+ #[error(transparent)]
+ UpdateRefs(#[from] super::refs::update::Error),
+ #[error("Failed to remove .keep file at \"{}\"", path.display())]
+ RemovePackKeepFile {
+ path: std::path::PathBuf,
+ source: std::io::Error,
+ },
+}
+
+impl gix_protocol::transport::IsSpuriousError for Error {
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::FetchResponse(err) => err.is_spurious(),
+ Error::Client(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/mod.rs b/vendor/gix/src/remote/connection/fetch/mod.rs
new file mode 100644
index 000000000..4ce631b1e
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/mod.rs
@@ -0,0 +1,240 @@
+use gix_protocol::transport::client::Transport;
+
+use crate::{
+ bstr::BString,
+ remote,
+ remote::{
+ fetch::{DryRun, RefMap},
+ ref_map, Connection,
+ },
+ Progress,
+};
+
+mod error;
+pub use error::Error;
+
+use crate::remote::fetch::WritePackedRefs;
+
+/// The way reflog messages should be composed whenever a ref is written with recent objects from a remote.
+pub enum RefLogMessage {
+ /// Prefix the log with `action` and generate the typical suffix as `git` would.
+ Prefixed {
+ /// The action to use, like `fetch` or `pull`.
+ action: String,
+ },
+ /// Control the entire message, using `message` verbatim.
+ Override {
+ /// The complete reflog message.
+ message: BString,
+ },
+}
+
+impl RefLogMessage {
+ pub(crate) fn compose(&self, context: &str) -> BString {
+ match self {
+ RefLogMessage::Prefixed { action } => format!("{action}: {context}").into(),
+ RefLogMessage::Override { message } => message.to_owned(),
+ }
+ }
+}
+
+/// The status of the repository after the fetch operation
+#[derive(Debug, Clone)]
+pub enum Status {
+ /// Nothing changed as the remote didn't have anything new compared to our tracking branches, thus no pack was received
+ /// and no new object was added.
+ NoPackReceived {
+ /// However, depending on the refspecs, references might have been updated nonetheless to point to objects as
+ /// reported by the remote.
+ update_refs: refs::update::Outcome,
+ },
+ /// There was at least one tip with a new object which we received.
+ Change {
+ /// Information collected while writing the pack and its index.
+ write_pack_bundle: gix_pack::bundle::write::Outcome,
+ /// Information collected while updating references.
+ update_refs: refs::update::Outcome,
+ },
+ /// A dry run was performed which leaves the local repository without any change
+ /// nor will a pack have been received.
+ DryRun {
+ /// Information about what updates to refs would have been done.
+ update_refs: refs::update::Outcome,
+ },
+}
+
+/// The outcome of receiving a pack via [`Prepare::receive()`].
+#[derive(Debug, Clone)]
+pub struct Outcome {
+ /// The result of the initial mapping of references, the prerequisite for any fetch.
+ pub ref_map: RefMap,
+ /// The status of the operation to indicate what happened.
+ pub status: Status,
+}
+
+/// The progress ids used in during various steps of the fetch operation.
+///
+/// Note that tagged progress isn't very widely available yet, but support can be improved as needed.
+///
+/// Use this information to selectively extract the progress of interest in case the parent application has custom visualization.
+#[derive(Debug, Copy, Clone)]
+pub enum ProgressId {
+ /// The progress name is defined by the remote and the progress messages it sets, along with their progress values and limits.
+ RemoteProgress,
+}
+
+impl From<ProgressId> for gix_features::progress::Id {
+ fn from(v: ProgressId) -> Self {
+ match v {
+ ProgressId::RemoteProgress => *b"FERP",
+ }
+ }
+}
+
+///
+pub mod negotiate;
+
+///
+pub mod prepare {
+ /// The error returned by [`prepare_fetch()`][super::Connection::prepare_fetch()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Cannot perform a meaningful fetch operation without any configured ref-specs")]
+ MissingRefSpecs,
+ #[error(transparent)]
+ RefMap(#[from] crate::remote::ref_map::Error),
+ }
+
+ impl gix_protocol::transport::IsSpuriousError for Error {
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::RefMap(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+ }
+}
+
+impl<'remote, 'repo, T, P> Connection<'remote, 'repo, T, P>
+where
+ T: Transport,
+ P: Progress,
+{
+ /// Perform a handshake with the remote and obtain a ref-map with `options`, and from there one
+ /// Note that at this point, the `transport` should already be configured using the [`transport_mut()`][Self::transport_mut()]
+ /// method, as it will be consumed here.
+ ///
+ /// From there additional properties of the fetch can be adjusted to override the defaults that are configured via gix-config.
+ ///
+ /// # Async Experimental
+ ///
+ /// Note that this implementation is currently limited correctly in blocking mode only as it relies on Drop semantics to close the connection
+ /// should the fetch not be performed. Furthermore, there the code doing the fetch is inherently blocking and it's not offloaded to a thread,
+ /// making this call block the executor.
+ /// It's best to unblock it by placing it into its own thread or offload it should usage in an async context be truly required.
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn prepare_fetch(
+ mut self,
+ options: ref_map::Options,
+ ) -> Result<Prepare<'remote, 'repo, T, P>, prepare::Error> {
+ if self.remote.refspecs(remote::Direction::Fetch).is_empty() {
+ return Err(prepare::Error::MissingRefSpecs);
+ }
+ let ref_map = self.ref_map_inner(options).await?;
+ Ok(Prepare {
+ con: Some(self),
+ ref_map,
+ dry_run: DryRun::No,
+ reflog_message: None,
+ write_packed_refs: WritePackedRefs::Never,
+ })
+ }
+}
+
+impl<'remote, 'repo, T, P> Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ /// Return the ref_map (that includes the server handshake) which was part of listing refs prior to fetching a pack.
+ pub fn ref_map(&self) -> &RefMap {
+ &self.ref_map
+ }
+}
+
+mod config;
+mod receive_pack;
+///
+#[path = "update_refs/mod.rs"]
+pub mod refs;
+
+/// A structure to hold the result of the handshake with the remote and configure the upcoming fetch operation.
+pub struct Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ con: Option<Connection<'remote, 'repo, T, P>>,
+ ref_map: RefMap,
+ dry_run: DryRun,
+ reflog_message: Option<RefLogMessage>,
+ write_packed_refs: WritePackedRefs,
+}
+
+/// Builder
+impl<'remote, 'repo, T, P> Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ /// If dry run is enabled, no change to the repository will be made.
+ ///
+ /// This works by not actually fetching the pack after negotiating it, nor will refs be updated.
+ pub fn with_dry_run(mut self, enabled: bool) -> Self {
+ self.dry_run = if enabled { DryRun::Yes } else { DryRun::No };
+ self
+ }
+
+ /// If enabled, don't write ref updates to loose refs, but put them exclusively to packed-refs.
+ ///
+ /// This improves performance and allows case-sensitive filesystems to deal with ref names that would otherwise
+ /// collide.
+ pub fn with_write_packed_refs_only(mut self, enabled: bool) -> Self {
+ self.write_packed_refs = if enabled {
+ WritePackedRefs::Only
+ } else {
+ WritePackedRefs::Never
+ };
+ self
+ }
+
+ /// Set the reflog message to use when updating refs after fetching a pack.
+ pub fn with_reflog_message(mut self, reflog_message: RefLogMessage) -> Self {
+ self.reflog_message = reflog_message.into();
+ self
+ }
+}
+
+impl<'remote, 'repo, T, P> Drop for Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+{
+ fn drop(&mut self) {
+ if let Some(mut con) = self.con.take() {
+ #[cfg(feature = "async-network-client")]
+ {
+ // TODO: this should be an async drop once the feature is available.
+ // Right now we block the executor by forcing this communication, but that only
+ // happens if the user didn't actually try to receive a pack, which consumes the
+ // connection in an async context.
+ gix_protocol::futures_lite::future::block_on(gix_protocol::indicate_end_of_interaction(
+ &mut con.transport,
+ ))
+ .ok();
+ }
+ #[cfg(not(feature = "async-network-client"))]
+ {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).ok();
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/negotiate.rs b/vendor/gix/src/remote/connection/fetch/negotiate.rs
new file mode 100644
index 000000000..f5051ec72
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/negotiate.rs
@@ -0,0 +1,78 @@
+/// The way the negotiation is performed
+#[derive(Copy, Clone)]
+pub(crate) enum Algorithm {
+ /// Our very own implementation that probably should be replaced by one of the known algorithms soon.
+ Naive,
+}
+
+/// The error returned during negotiation.
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("We were unable to figure out what objects the server should send after {rounds} round(s)")]
+ NegotiationFailed { rounds: usize },
+}
+
+/// Negotiate one round with `algo` by looking at `ref_map` and adjust `arguments` to contain the haves and wants.
+/// If this is not the first round, the `previous_response` is set with the last recorded server response.
+/// Returns `true` if the negotiation is done from our side so the server won't keep asking.
+pub(crate) fn one_round(
+ algo: Algorithm,
+ round: usize,
+ repo: &crate::Repository,
+ ref_map: &crate::remote::fetch::RefMap,
+ fetch_tags: crate::remote::fetch::Tags,
+ arguments: &mut gix_protocol::fetch::Arguments,
+ _previous_response: Option<&gix_protocol::fetch::Response>,
+) -> Result<bool, Error> {
+ let tag_refspec_to_ignore = fetch_tags
+ .to_refspec()
+ .filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
+ match algo {
+ Algorithm::Naive => {
+ assert_eq!(round, 1, "Naive always finishes after the first round, and claims.");
+ let mut has_missing_tracking_branch = false;
+ for mapping in &ref_map.mappings {
+ if tag_refspec_to_ignore.map_or(false, |tag_spec| {
+ mapping
+ .spec_index
+ .implicit_index()
+ .and_then(|idx| ref_map.extra_refspecs.get(idx))
+ .map_or(false, |spec| spec.to_ref() == tag_spec)
+ }) {
+ continue;
+ }
+ let have_id = mapping.local.as_ref().and_then(|name| {
+ repo.find_reference(name)
+ .ok()
+ .and_then(|r| r.target().try_id().map(ToOwned::to_owned))
+ });
+ match have_id {
+ Some(have_id) => {
+ if let Some(want_id) = mapping.remote.as_id() {
+ if want_id != have_id {
+ arguments.want(want_id);
+ arguments.have(have_id);
+ }
+ }
+ }
+ None => {
+ if let Some(want_id) = mapping.remote.as_id() {
+ arguments.want(want_id);
+ has_missing_tracking_branch = true;
+ }
+ }
+ }
+ }
+
+ if has_missing_tracking_branch {
+ if let Ok(Some(r)) = repo.head_ref() {
+ if let Some(id) = r.target().try_id() {
+ arguments.have(id);
+ }
+ }
+ }
+ Ok(true)
+ }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/receive_pack.rs b/vendor/gix/src/remote/connection/fetch/receive_pack.rs
new file mode 100644
index 000000000..686de5999
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/receive_pack.rs
@@ -0,0 +1,238 @@
+use std::sync::atomic::AtomicBool;
+
+use gix_odb::FindExt;
+use gix_protocol::transport::client::Transport;
+
+use crate::{
+ remote,
+ remote::{
+ connection::fetch::config,
+ fetch,
+ fetch::{negotiate, refs, Error, Outcome, Prepare, ProgressId, RefLogMessage, Status},
+ },
+ Progress,
+};
+
+impl<'remote, 'repo, T, P> Prepare<'remote, 'repo, T, P>
+where
+ T: Transport,
+ P: Progress,
+ P::SubProgress: 'static,
+{
+ /// Receive the pack and perform the operation as configured by git via `gix-config` or overridden by various builder methods.
+ /// Return `Ok(None)` if there was nothing to do because all remote refs are at the same state as they are locally, or `Ok(Some(outcome))`
+ /// to inform about all the changes that were made.
+ ///
+ /// ### Negotiation
+ ///
+ /// "fetch.negotiationAlgorithm" describes algorithms `git` uses currently, with the default being `consecutive` and `skipping` being
+ /// experimented with. We currently implement something we could call 'naive' which works for now.
+ ///
+ /// ### Pack `.keep` files
+ ///
+ /// That packs that are freshly written to the object database are vulnerable to garbage collection for the brief time that it takes between
+ /// them being placed and the respective references to be written to disk which binds their objects to the commit graph, making them reachable.
+ ///
+ /// To circumvent this issue, a `.keep` file is created before any pack related file (i.e. `.pack` or `.idx`) is written, which indicates the
+ /// garbage collector (like `git maintenance`, `git gc`) to leave the corresponding pack file alone.
+ ///
+ /// If there were any ref updates or the received pack was empty, the `.keep` file will be deleted automatically leaving in its place at
+ /// `write_pack_bundle.keep_path` a `None`.
+ /// However, if no ref-update happened the path will still be present in `write_pack_bundle.keep_path` and is expected to be handled by the caller.
+ /// A known application for this behaviour is in `remote-helper` implementations which should send this path via `lock <path>` to stdout
+ /// to inform git about the file that it will remove once it updated the refs accordingly.
+ ///
+ /// ### Deviation
+ ///
+ /// When **updating refs**, the `git-fetch` docs state that the following:
+ ///
+ /// > Unlike when pushing with git-push, any updates outside of refs/{tags,heads}/* will be accepted without + in the refspec (or --force), whether that’s swapping e.g. a tree object for a blob, or a commit for another commit that’s doesn’t have the previous commit as an ancestor etc.
+ ///
+ /// We explicitly don't special case those refs and expect the user to take control. Note that by its nature,
+ /// force only applies to refs pointing to commits and if they don't, they will be updated either way in our
+ /// implementation as well.
+ ///
+ /// ### Async Mode Shortcoming
+ ///
+ /// Currently the entire process of resolving a pack is blocking the executor. This can be fixed using the `blocking` crate, but it
+ /// didn't seem worth the tradeoff of having more complex code.
+ ///
+ /// ### Configuration
+ ///
+ /// - `gitoxide.userAgent` is read to obtain the application user agent for git servers and for HTTP servers as well.
+ ///
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn receive(mut self, should_interrupt: &AtomicBool) -> Result<Outcome, Error> {
+ let mut con = self.con.take().expect("receive() can only be called once");
+
+ let handshake = &self.ref_map.handshake;
+ let protocol_version = handshake.server_protocol_version;
+
+ let fetch = gix_protocol::Command::Fetch;
+ let progress = &mut con.progress;
+ let repo = con.remote.repo;
+ let fetch_features = {
+ let mut f = fetch.default_features(protocol_version, &handshake.capabilities);
+ f.push(repo.config.user_agent_tuple());
+ f
+ };
+
+ gix_protocol::fetch::Response::check_required_features(protocol_version, &fetch_features)?;
+ let sideband_all = fetch_features.iter().any(|(n, _)| *n == "sideband-all");
+ let mut arguments = gix_protocol::fetch::Arguments::new(protocol_version, fetch_features);
+ if matches!(con.remote.fetch_tags, crate::remote::fetch::Tags::Included) {
+ if !arguments.can_use_include_tag() {
+ unimplemented!("we expect servers to support 'include-tag', otherwise we have to implement another pass to fetch attached tags separately");
+ }
+ arguments.use_include_tag();
+ }
+ let mut previous_response = None::<gix_protocol::fetch::Response>;
+ let mut round = 1;
+
+ if self.ref_map.object_hash != repo.object_hash() {
+ return Err(Error::IncompatibleObjectHash {
+ local: repo.object_hash(),
+ remote: self.ref_map.object_hash,
+ });
+ }
+
+ let reader = 'negotiation: loop {
+ progress.step();
+ progress.set_name(format!("negotiate (round {round})"));
+
+ let is_done = match negotiate::one_round(
+ negotiate::Algorithm::Naive,
+ round,
+ repo,
+ &self.ref_map,
+ con.remote.fetch_tags,
+ &mut arguments,
+ previous_response.as_ref(),
+ ) {
+ Ok(_) if arguments.is_empty() => {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
+ let update_refs = refs::update(
+ repo,
+ self.reflog_message
+ .take()
+ .unwrap_or_else(|| RefLogMessage::Prefixed { action: "fetch".into() }),
+ &self.ref_map.mappings,
+ con.remote.refspecs(remote::Direction::Fetch),
+ &self.ref_map.extra_refspecs,
+ con.remote.fetch_tags,
+ self.dry_run,
+ self.write_packed_refs,
+ )?;
+ return Ok(Outcome {
+ ref_map: std::mem::take(&mut self.ref_map),
+ status: Status::NoPackReceived { update_refs },
+ });
+ }
+ Ok(is_done) => is_done,
+ Err(err) => {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
+ return Err(err.into());
+ }
+ };
+ round += 1;
+ let mut reader = arguments.send(&mut con.transport, is_done).await?;
+ if sideband_all {
+ setup_remote_progress(progress, &mut reader);
+ }
+ let response = gix_protocol::fetch::Response::from_line_reader(protocol_version, &mut reader).await?;
+ if response.has_pack() {
+ progress.step();
+ progress.set_name("receiving pack");
+ if !sideband_all {
+ setup_remote_progress(progress, &mut reader);
+ }
+ break 'negotiation reader;
+ } else {
+ previous_response = Some(response);
+ }
+ };
+
+ let options = gix_pack::bundle::write::Options {
+ thread_limit: config::index_threads(repo)?,
+ index_version: config::pack_index_version(repo)?,
+ iteration_mode: gix_pack::data::input::Mode::Verify,
+ object_hash: con.remote.repo.object_hash(),
+ };
+
+ let mut write_pack_bundle = if matches!(self.dry_run, fetch::DryRun::No) {
+ Some(gix_pack::Bundle::write_to_directory(
+ #[cfg(feature = "async-network-client")]
+ {
+ gix_protocol::futures_lite::io::BlockOn::new(reader)
+ },
+ #[cfg(not(feature = "async-network-client"))]
+ {
+ reader
+ },
+ Some(repo.objects.store_ref().path().join("pack")),
+ con.progress,
+ should_interrupt,
+ Some(Box::new({
+ let repo = repo.clone();
+ move |oid, buf| repo.objects.find(oid, buf).ok()
+ })),
+ options,
+ )?)
+ } else {
+ drop(reader);
+ None
+ };
+
+ if matches!(protocol_version, gix_protocol::transport::Protocol::V2) {
+ gix_protocol::indicate_end_of_interaction(&mut con.transport).await.ok();
+ }
+
+ let update_refs = refs::update(
+ repo,
+ self.reflog_message
+ .take()
+ .unwrap_or_else(|| RefLogMessage::Prefixed { action: "fetch".into() }),
+ &self.ref_map.mappings,
+ con.remote.refspecs(remote::Direction::Fetch),
+ &self.ref_map.extra_refspecs,
+ con.remote.fetch_tags,
+ self.dry_run,
+ self.write_packed_refs,
+ )?;
+
+ if let Some(bundle) = write_pack_bundle.as_mut() {
+ if !update_refs.edits.is_empty() || bundle.index.num_objects == 0 {
+ if let Some(path) = bundle.keep_path.take() {
+ std::fs::remove_file(&path).map_err(|err| Error::RemovePackKeepFile { path, source: err })?;
+ }
+ }
+ }
+
+ Ok(Outcome {
+ ref_map: std::mem::take(&mut self.ref_map),
+ status: match write_pack_bundle {
+ Some(write_pack_bundle) => Status::Change {
+ write_pack_bundle,
+ update_refs,
+ },
+ None => Status::DryRun { update_refs },
+ },
+ })
+ }
+}
+
+fn setup_remote_progress<P>(
+ progress: &mut P,
+ reader: &mut Box<dyn gix_protocol::transport::client::ExtendedBufRead + Unpin + '_>,
+) where
+ P: Progress,
+ P::SubProgress: 'static,
+{
+ use gix_protocol::transport::client::ExtendedBufRead;
+ reader.set_progress_handler(Some(Box::new({
+ let mut remote_progress = progress.add_child_with_id("remote", ProgressId::RemoteProgress.into());
+ move |is_err: bool, data: &[u8]| {
+ gix_protocol::RemoteProgress::translate_to_progress(is_err, data, &mut remote_progress)
+ }
+ }) as gix_protocol::transport::client::HandleProgress));
+}
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs b/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs
new file mode 100644
index 000000000..953490672
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/mod.rs
@@ -0,0 +1,274 @@
+#![allow(clippy::result_large_err)]
+use std::{collections::BTreeMap, convert::TryInto, path::PathBuf};
+
+use gix_odb::{Find, FindExt};
+use gix_ref::{
+ transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog},
+ Target, TargetRef,
+};
+
+use crate::{
+ ext::ObjectIdExt,
+ remote::{
+ fetch,
+ fetch::{refs::update::Mode, RefLogMessage, Source},
+ },
+ Repository,
+};
+
+///
+pub mod update;
+
+/// Information about the update of a single reference, corresponding the respective entry in [`RefMap::mappings`][crate::remote::fetch::RefMap::mappings].
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Update {
+ /// The way the update was performed.
+ pub mode: update::Mode,
+ /// The index to the edit that was created from the corresponding mapping, or `None` if there was no local ref.
+ pub edit_index: Option<usize>,
+}
+
+impl From<update::Mode> for Update {
+ fn from(mode: Mode) -> Self {
+ Update { mode, edit_index: None }
+ }
+}
+
+/// Update all refs as derived from `refmap.mappings` and produce an `Outcome` informing about all applied changes in detail, with each
+/// [`update`][Update] corresponding to the [`fetch::Mapping`] of at the same index.
+/// If `dry_run` is true, ref transactions won't actually be applied, but are assumed to work without error so the underlying
+/// `repo` is not actually changed. Also it won't perform an 'object exists' check as these are likely not to exist as the pack
+/// wasn't fetched either.
+/// `action` is the prefix used for reflog entries, and is typically "fetch".
+///
+/// It can be used to produce typical information that one is used to from `git fetch`.
+#[allow(clippy::too_many_arguments)]
+pub(crate) fn update(
+ repo: &Repository,
+ message: RefLogMessage,
+ mappings: &[fetch::Mapping],
+ refspecs: &[gix_refspec::RefSpec],
+ extra_refspecs: &[gix_refspec::RefSpec],
+ fetch_tags: fetch::Tags,
+ dry_run: fetch::DryRun,
+ write_packed_refs: fetch::WritePackedRefs,
+) -> Result<update::Outcome, update::Error> {
+ let mut edits = Vec::new();
+ let mut updates = Vec::new();
+
+ let implicit_tag_refspec = fetch_tags
+ .to_refspec()
+ .filter(|_| matches!(fetch_tags, crate::remote::fetch::Tags::Included));
+ for (remote, local, spec, is_implicit_tag) in mappings.iter().filter_map(
+ |fetch::Mapping {
+ remote,
+ local,
+ spec_index,
+ }| {
+ spec_index.get(refspecs, extra_refspecs).map(|spec| {
+ (
+ remote,
+ local,
+ spec,
+ implicit_tag_refspec.map_or(false, |tag_spec| spec.to_ref() == tag_spec),
+ )
+ })
+ },
+ ) {
+ let remote_id = match remote.as_id() {
+ Some(id) => id,
+ None => continue,
+ };
+ if dry_run == fetch::DryRun::No && !repo.objects.contains(remote_id) {
+ let update = if is_implicit_tag {
+ update::Mode::ImplicitTagNotSentByRemote.into()
+ } else {
+ update::Mode::RejectedSourceObjectNotFound { id: remote_id.into() }.into()
+ };
+ updates.push(update);
+ continue;
+ }
+ let checked_out_branches = worktree_branches(repo)?;
+ let (mode, edit_index) = match local {
+ Some(name) => {
+ let (mode, reflog_message, name, previous_value) = match repo.try_find_reference(name)? {
+ Some(existing) => {
+ if let Some(wt_dir) = checked_out_branches.get(existing.name()) {
+ let mode = update::Mode::RejectedCurrentlyCheckedOut {
+ worktree_dir: wt_dir.to_owned(),
+ };
+ updates.push(mode.into());
+ continue;
+ }
+ match existing.target() {
+ TargetRef::Symbolic(_) => {
+ updates.push(update::Mode::RejectedSymbolic.into());
+ continue;
+ }
+ TargetRef::Peeled(local_id) => {
+ let previous_value =
+ PreviousValue::MustExistAndMatch(Target::Peeled(local_id.to_owned()));
+ let (mode, reflog_message) = if local_id == remote_id {
+ (update::Mode::NoChangeNeeded, "no update will be performed")
+ } else if let Some(gix_ref::Category::Tag) = existing.name().category() {
+ if spec.allow_non_fast_forward() {
+ (update::Mode::Forced, "updating tag")
+ } else {
+ updates.push(update::Mode::RejectedTagUpdate.into());
+ continue;
+ }
+ } else {
+ let mut force = spec.allow_non_fast_forward();
+ let is_fast_forward = match dry_run {
+ fetch::DryRun::No => {
+ let ancestors = repo
+ .find_object(local_id)?
+ .try_into_commit()
+ .map_err(|_| ())
+ .and_then(|c| {
+ c.committer().map(|a| a.time.seconds_since_unix_epoch).map_err(|_| ())
+ }).and_then(|local_commit_time|
+ remote_id
+ .to_owned()
+ .ancestors(|id, buf| repo.objects.find_commit_iter(id, buf))
+ .sorting(
+ gix_traverse::commit::Sorting::ByCommitTimeNewestFirstCutoffOlderThan {
+ time_in_seconds_since_epoch: local_commit_time
+ },
+ )
+ .map_err(|_| ())
+ );
+ match ancestors {
+ Ok(mut ancestors) => {
+ ancestors.any(|cid| cid.map_or(false, |cid| cid == local_id))
+ }
+ Err(_) => {
+ force = true;
+ false
+ }
+ }
+ }
+ fetch::DryRun::Yes => true,
+ };
+ if is_fast_forward {
+ (
+ update::Mode::FastForward,
+ matches!(dry_run, fetch::DryRun::Yes)
+ .then(|| "fast-forward (guessed in dry-run)")
+ .unwrap_or("fast-forward"),
+ )
+ } else if force {
+ (update::Mode::Forced, "forced-update")
+ } else {
+ updates.push(update::Mode::RejectedNonFastForward.into());
+ continue;
+ }
+ };
+ (mode, reflog_message, existing.name().to_owned(), previous_value)
+ }
+ }
+ }
+ None => {
+ let name: gix_ref::FullName = name.try_into()?;
+ let reflog_msg = match name.category() {
+ Some(gix_ref::Category::Tag) => "storing tag",
+ Some(gix_ref::Category::LocalBranch) => "storing head",
+ _ => "storing ref",
+ };
+ (
+ update::Mode::New,
+ reflog_msg,
+ name,
+ PreviousValue::ExistingMustMatch(Target::Peeled(remote_id.to_owned())),
+ )
+ }
+ };
+ let edit = RefEdit {
+ change: Change::Update {
+ log: LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: message.compose(reflog_message),
+ },
+ expected: previous_value,
+ new: if let Source::Ref(gix_protocol::handshake::Ref::Symbolic { target, .. }) = &remote {
+ match mappings.iter().find_map(|m| {
+ m.remote.as_name().and_then(|name| {
+ (name == target)
+ .then(|| m.local.as_ref().and_then(|local| local.try_into().ok()))
+ .flatten()
+ })
+ }) {
+ Some(local_branch) => {
+ // This is always safe because…
+ // - the reference may exist already
+ // - if it doesn't exist it will be created - we are here because it's in the list of mappings after all
+ // - if it exists and is updated, and the update is rejected due to non-fastforward for instance, the
+ // target reference still exists and we can point to it.
+ Target::Symbolic(local_branch)
+ }
+ None => Target::Peeled(remote_id.into()),
+ }
+ } else {
+ Target::Peeled(remote_id.into())
+ },
+ },
+ name,
+ deref: false,
+ };
+ let edit_index = edits.len();
+ edits.push(edit);
+ (mode, Some(edit_index))
+ }
+ None => (update::Mode::NoChangeNeeded, None),
+ };
+ updates.push(Update { mode, edit_index })
+ }
+
+ let edits = match dry_run {
+ fetch::DryRun::No => {
+ let (file_lock_fail, packed_refs_lock_fail) = repo
+ .config
+ .lock_timeout()
+ .map_err(crate::reference::edit::Error::from)?;
+ repo.refs
+ .transaction()
+ .packed_refs(
+ match write_packed_refs {
+ fetch::WritePackedRefs::Only => {
+ gix_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box::new(|oid, buf| {
+ repo.objects
+ .try_find(oid, buf)
+ .map(|obj| obj.map(|obj| obj.kind))
+ .map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)
+ }))},
+ fetch::WritePackedRefs::Never => gix_ref::file::transaction::PackedRefs::DeletionsOnly
+ }
+ )
+ .prepare(edits, file_lock_fail, packed_refs_lock_fail)
+ .map_err(crate::reference::edit::Error::from)?
+ .commit(repo.committer().transpose().map_err(|err| update::Error::EditReferences(crate::reference::edit::Error::ParseCommitterTime(err)))?)
+ .map_err(crate::reference::edit::Error::from)?
+ }
+ fetch::DryRun::Yes => edits,
+ };
+
+ Ok(update::Outcome { edits, updates })
+}
+
+fn worktree_branches(repo: &Repository) -> Result<BTreeMap<gix_ref::FullName, PathBuf>, update::Error> {
+ let mut map = BTreeMap::new();
+ if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
+ map.insert(head_ref.inner.name, wt_dir.to_owned());
+ }
+ for proxy in repo.worktrees()? {
+ let repo = proxy.into_repo_with_possibly_inaccessible_worktree()?;
+ if let Some((wt_dir, head_ref)) = repo.work_dir().zip(repo.head_ref().ok().flatten()) {
+ map.insert(head_ref.inner.name, wt_dir.to_owned());
+ }
+ }
+ Ok(map)
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs b/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs
new file mode 100644
index 000000000..145990ac8
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/tests.rs
@@ -0,0 +1,607 @@
+pub fn restricted() -> crate::open::Options {
+ crate::open::Options::isolated().config_overrides(["user.name=gitoxide", "user.email=gitoxide@localhost"])
+}
+
+/// Convert a hexadecimal hash into its corresponding `ObjectId` or _panic_.
+fn hex_to_id(hex: &str) -> gix_hash::ObjectId {
+ gix_hash::ObjectId::from_hex(hex.as_bytes()).expect("40 bytes hex")
+}
+
+mod update {
+ use std::convert::TryInto;
+
+ use gix_testtools::Result;
+
+ use super::hex_to_id;
+ use crate as gix;
+
+ fn base_repo_path() -> String {
+ gix::path::realpath(
+ gix_testtools::scripted_fixture_read_only("make_remote_repos.sh")
+ .unwrap()
+ .join("base"),
+ )
+ .unwrap()
+ .to_string_lossy()
+ .into_owned()
+ }
+
+ fn repo(name: &str) -> gix::Repository {
+ let dir =
+ gix_testtools::scripted_fixture_read_only_with_args("make_fetch_repos.sh", [base_repo_path()]).unwrap();
+ gix::open_opts(dir.join(name), restricted()).unwrap()
+ }
+ fn repo_rw(name: &str) -> (gix::Repository, gix_testtools::tempfile::TempDir) {
+ let dir = gix_testtools::scripted_fixture_writable_with_args(
+ "make_fetch_repos.sh",
+ [base_repo_path()],
+ gix_testtools::Creation::ExecuteScript,
+ )
+ .unwrap();
+ let repo = gix::open_opts(dir.path().join(name), restricted()).unwrap();
+ (repo, dir)
+ }
+ use gix_ref::{transaction::Change, TargetRef};
+
+ use crate::{
+ bstr::BString,
+ remote::{
+ fetch,
+ fetch::{refs::tests::restricted, Mapping, RefLogMessage, Source, SpecIndex},
+ },
+ };
+
+ #[test]
+ fn various_valid_updates() {
+ let repo = repo("two-origins");
+ for (spec, expected_mode, reflog_message, detail) in [
+ (
+ "refs/heads/main:refs/remotes/origin/main",
+ fetch::refs::update::Mode::NoChangeNeeded,
+ Some("no update will be performed"),
+ "these refs are en-par since the initial clone",
+ ),
+ (
+ "refs/heads/main",
+ fetch::refs::update::Mode::NoChangeNeeded,
+ None,
+ "without local destination ref there is nothing to do for us, ever (except for FETCH_HEADs) later",
+ ),
+ (
+ "refs/heads/main:refs/remotes/origin/new-main",
+ fetch::refs::update::Mode::New,
+ Some("storing ref"),
+ "the destination branch doesn't exist and needs to be created",
+ ),
+ (
+ "refs/heads/main:refs/heads/feature",
+ fetch::refs::update::Mode::New,
+ Some("storing head"),
+ "reflog messages are specific to the type of branch stored, to some limited extend",
+ ),
+ (
+ "refs/heads/main:refs/tags/new-tag",
+ fetch::refs::update::Mode::New,
+ Some("storing tag"),
+ "reflog messages are specific to the type of branch stored, to some limited extend",
+ ),
+ (
+ "+refs/heads/main:refs/remotes/origin/new-main",
+ fetch::refs::update::Mode::New,
+ Some("storing ref"),
+ "just to validate that we really are in dry-run mode, or else this ref would be present now",
+ ),
+ (
+ "+refs/heads/main:refs/remotes/origin/g",
+ fetch::refs::update::Mode::FastForward,
+ Some("fast-forward (guessed in dry-run)"),
+ "a forced non-fastforward (main goes backwards), but dry-run calls it fast-forward",
+ ),
+ (
+ "+refs/heads/main:refs/tags/b-tag",
+ fetch::refs::update::Mode::Forced,
+ Some("updating tag"),
+ "tags can only be forced",
+ ),
+ (
+ "refs/heads/main:refs/tags/b-tag",
+ fetch::refs::update::Mode::RejectedTagUpdate,
+ None,
+ "otherwise a tag is always refusing itself to be overwritten (no-clobber)",
+ ),
+ (
+ "+refs/remotes/origin/g:refs/heads/main",
+ fetch::refs::update::Mode::RejectedCurrentlyCheckedOut {
+ worktree_dir: repo.work_dir().expect("present").to_owned(),
+ },
+ None,
+ "checked out branches cannot be written, as it requires a merge of sorts which isn't done here",
+ ),
+ (
+ "ffffffffffffffffffffffffffffffffffffffff:refs/heads/invalid-source-object",
+ fetch::refs::update::Mode::RejectedSourceObjectNotFound {
+ id: hex_to_id("ffffffffffffffffffffffffffffffffffffffff"),
+ },
+ None,
+ "checked out branches cannot be written, as it requires a merge of sorts which isn't done here",
+ ),
+ (
+ "refs/remotes/origin/g:refs/heads/not-currently-checked-out",
+ fetch::refs::update::Mode::FastForward,
+ Some("fast-forward (guessed in dry-run)"),
+ "a fast-forward only fast-forward situation, all good",
+ ),
+ ] {
+ let (mapping, specs) = mapping_from_spec(spec, &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mapping,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ reflog_message.map(|_| fetch::DryRun::Yes).unwrap_or(fetch::DryRun::No),
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: expected_mode.clone(),
+ edit_index: reflog_message.map(|_| 0),
+ }],
+ "{spec:?}: {detail}"
+ );
+ assert_eq!(out.edits.len(), reflog_message.map(|_| 1).unwrap_or(0));
+ if let Some(reflog_message) = reflog_message {
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(
+ log.message,
+ format!("action: {reflog_message}"),
+ "{spec}: reflog messages are specific and we emulate git word for word"
+ );
+ let remote_ref = repo
+ .find_reference(specs[0].to_ref().source().expect("always present"))
+ .unwrap();
+ assert_eq!(
+ new.id(),
+ remote_ref.target().id(),
+ "remote ref provides the id to set in the local reference"
+ )
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn checked_out_branches_in_worktrees_are_rejected_with_additional_information() -> Result {
+ let root = gix_path::realpath(gix_testtools::scripted_fixture_read_only_with_args(
+ "make_fetch_repos.sh",
+ [base_repo_path()],
+ )?)?;
+ let repo = root.join("worktree-root");
+ let repo = gix::open_opts(repo, restricted())?;
+ for (branch, path_from_root) in [
+ ("main", "worktree-root"),
+ ("wt-a-nested", "prev/wt-a-nested"),
+ ("wt-a", "wt-a"),
+ ("nested-wt-b", "wt-a/nested-wt-b"),
+ ("wt-c-locked", "wt-c-locked"),
+ ("wt-deleted", "wt-deleted"),
+ ] {
+ let spec = format!("refs/heads/main:refs/heads/{branch}");
+ let (mappings, specs) = mapping_from_spec(&spec, &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )?;
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedCurrentlyCheckedOut {
+ worktree_dir: root.join(path_from_root),
+ },
+ edit_index: None,
+ }],
+ "{spec}: checked-out checks are done before checking if a change would actually be required (here it isn't)"
+ );
+ assert_eq!(out.edits.len(), 0);
+ }
+ Ok(())
+ }
+
+ #[test]
+ fn local_symbolic_refs_are_never_written() {
+ let repo = repo("two-origins");
+ for source in ["refs/heads/main", "refs/heads/symbolic", "HEAD"] {
+ let (mappings, specs) = mapping_from_spec(&format!("{source}:refs/heads/symbolic"), &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 0);
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedSymbolic,
+ edit_index: None
+ }],
+ "we don't overwrite these as the checked-out check needs to consider much more than it currently does, we are playing it safe"
+ );
+ }
+ }
+
+ #[test]
+ fn remote_symbolic_refs_can_always_be_set_as_there_is_no_scenario_where_it_could_be_nonexisting_and_rejected() {
+ let repo = repo("two-origins");
+ let (mut mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/remotes/origin/new", &repo);
+ mappings.push(Mapping {
+ remote: Source::Ref(gix_protocol::handshake::Ref::Direct {
+ full_ref_name: "refs/heads/main".try_into().unwrap(),
+ object: hex_to_id("f99771fe6a1b535783af3163eba95a927aae21d5"),
+ }),
+ local: Some("refs/heads/symbolic".into()),
+ spec_index: SpecIndex::ExplicitInRemote(0),
+ });
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 1);
+ assert_eq!(
+ out.updates,
+ vec![
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ edit_index: Some(0)
+ },
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedSymbolic,
+ edit_index: None
+ }
+ ],
+ );
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(log.message, "action: storing ref");
+ assert!(
+ new.try_name().is_some(),
+ "remote falls back to peeled id as it's the only thing we seem to have locally, it won't refer to a non-existing local ref"
+ );
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ #[test]
+ fn local_direct_refs_are_never_written_with_symbolic_ones_but_see_only_the_destination() {
+ let repo = repo("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/heads/not-currently-checked-out", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 1);
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::NoChangeNeeded,
+ edit_index: Some(0)
+ }],
+ );
+ }
+
+ #[test]
+ fn remote_refs_cannot_map_to_local_head() {
+ let repo = repo("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/heads/main:HEAD", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(out.edits.len(), 1);
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ edit_index: Some(0),
+ }],
+ );
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(log.message, "action: storing head");
+ assert!(
+ new.try_id().is_some(),
+ "remote is peeled, so local will be peeled as well"
+ );
+ }
+ _ => unreachable!("only updates"),
+ }
+ assert_eq!(
+ edit.name.as_bstr(),
+ "refs/heads/HEAD",
+ "it's not possible to refer to the local HEAD with refspecs"
+ );
+ }
+
+ #[test]
+ fn remote_symbolic_refs_can_be_written_locally_and_point_to_tracking_branch() {
+ let repo = repo("two-origins");
+ let (mut mappings, specs) = mapping_from_spec("HEAD:refs/remotes/origin/new-HEAD", &repo);
+ mappings.push(Mapping {
+ remote: Source::Ref(gix_protocol::handshake::Ref::Direct {
+ full_ref_name: "refs/heads/main".try_into().unwrap(),
+ object: hex_to_id("f99771fe6a1b535783af3163eba95a927aae21d5"),
+ }),
+ local: Some("refs/remotes/origin/main".into()),
+ spec_index: SpecIndex::ExplicitInRemote(0),
+ });
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::New,
+ edit_index: Some(0),
+ },
+ fetch::refs::Update {
+ mode: fetch::refs::update::Mode::NoChangeNeeded,
+ edit_index: Some(1),
+ }
+ ],
+ );
+ assert_eq!(out.edits.len(), 2);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, new, .. } => {
+ assert_eq!(log.message, "action: storing ref");
+ assert_eq!(
+ new.try_name().expect("symbolic ref").as_bstr(),
+ "refs/remotes/origin/main",
+ "remote is symbolic, so local will be symbolic as well, but is rewritten to tracking branch"
+ );
+ }
+ _ => unreachable!("only updates"),
+ }
+ assert_eq!(edit.name.as_bstr(), "refs/remotes/origin/new-HEAD",);
+ }
+
+ #[test]
+ fn non_fast_forward_is_rejected_but_appears_to_be_fast_forward_in_dryrun_mode() {
+ let repo = repo("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/heads/main:refs/remotes/origin/g", &repo);
+ let reflog_message: BString = "very special".into();
+ let out = fetch::refs::update(
+ &repo,
+ RefLogMessage::Override {
+ message: reflog_message.clone(),
+ },
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::Yes,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::FastForward,
+ edit_index: Some(0),
+ }],
+ "The caller has to be aware and note that dry-runs can't know about fast-forwards as they don't have remote objects"
+ );
+ assert_eq!(out.edits.len(), 1);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, .. } => {
+ assert_eq!(log.message, reflog_message);
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ #[test]
+ fn non_fast_forward_is_rejected_if_dry_run_is_disabled() {
+ let (repo, _tmp) = repo_rw("two-origins");
+ let (mappings, specs) = mapping_from_spec("refs/remotes/origin/g:refs/heads/not-currently-checked-out", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("action"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::No,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::RejectedNonFastForward,
+ edit_index: None,
+ }]
+ );
+ assert_eq!(out.edits.len(), 0);
+
+ let (mappings, specs) = mapping_from_spec("refs/heads/main:refs/remotes/origin/g", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("prefix"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::No,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::FastForward,
+ edit_index: Some(0),
+ }]
+ );
+ assert_eq!(out.edits.len(), 1);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, .. } => {
+ assert_eq!(log.message, format!("prefix: {}", "fast-forward"));
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ #[test]
+ fn fast_forwards_are_called_out_even_if_force_is_given() {
+ let (repo, _tmp) = repo_rw("two-origins");
+ let (mappings, specs) = mapping_from_spec("+refs/heads/main:refs/remotes/origin/g", &repo);
+ let out = fetch::refs::update(
+ &repo,
+ prefixed("prefix"),
+ &mappings,
+ &specs,
+ &[],
+ fetch::Tags::None,
+ fetch::DryRun::No,
+ fetch::WritePackedRefs::Never,
+ )
+ .unwrap();
+
+ assert_eq!(
+ out.updates,
+ vec![fetch::refs::Update {
+ mode: fetch::refs::update::Mode::FastForward,
+ edit_index: Some(0),
+ }]
+ );
+ assert_eq!(out.edits.len(), 1);
+ let edit = &out.edits[0];
+ match &edit.change {
+ Change::Update { log, .. } => {
+ assert_eq!(log.message, format!("prefix: {}", "fast-forward"));
+ }
+ _ => unreachable!("only updates"),
+ }
+ }
+
+ fn mapping_from_spec(spec: &str, repo: &gix::Repository) -> (Vec<fetch::Mapping>, Vec<gix::refspec::RefSpec>) {
+ let spec = gix_refspec::parse(spec.into(), gix_refspec::parse::Operation::Fetch).unwrap();
+ let group = gix_refspec::MatchGroup::from_fetch_specs(Some(spec));
+ let references = repo.references().unwrap();
+ let mut references: Vec<_> = references.all().unwrap().map(|r| into_remote_ref(r.unwrap())).collect();
+ references.push(into_remote_ref(repo.find_reference("HEAD").unwrap()));
+ let mappings = group
+ .match_remotes(references.iter().map(remote_ref_to_item))
+ .mappings
+ .into_iter()
+ .map(|m| fetch::Mapping {
+ remote: m
+ .item_index
+ .map(|idx| fetch::Source::Ref(references[idx].clone()))
+ .unwrap_or_else(|| match m.lhs {
+ gix_refspec::match_group::SourceRef::ObjectId(id) => fetch::Source::ObjectId(id),
+ _ => unreachable!("not a ref, must be id: {:?}", m),
+ }),
+ local: m.rhs.map(|r| r.into_owned()),
+ spec_index: SpecIndex::ExplicitInRemote(m.spec_index),
+ })
+ .collect();
+ (mappings, vec![spec.to_owned()])
+ }
+
+ fn into_remote_ref(mut r: gix::Reference<'_>) -> gix_protocol::handshake::Ref {
+ let full_ref_name = r.name().as_bstr().into();
+ match r.target() {
+ TargetRef::Peeled(id) => gix_protocol::handshake::Ref::Direct {
+ full_ref_name,
+ object: id.into(),
+ },
+ TargetRef::Symbolic(name) => {
+ let target = name.as_bstr().into();
+ let id = r.peel_to_id_in_place().unwrap();
+ gix_protocol::handshake::Ref::Symbolic {
+ full_ref_name,
+ target,
+ object: id.detach(),
+ }
+ }
+ }
+ }
+
+ fn remote_ref_to_item(r: &gix_protocol::handshake::Ref) -> gix_refspec::match_group::Item<'_> {
+ let (full_ref_name, target, object) = r.unpack();
+ gix_refspec::match_group::Item {
+ full_ref_name,
+ target: target.expect("no unborn HEAD"),
+ object,
+ }
+ }
+
+ fn prefixed(action: &str) -> RefLogMessage {
+ RefLogMessage::Prefixed { action: action.into() }
+ }
+}
diff --git a/vendor/gix/src/remote/connection/fetch/update_refs/update.rs b/vendor/gix/src/remote/connection/fetch/update_refs/update.rs
new file mode 100644
index 000000000..6eda1ffc0
--- /dev/null
+++ b/vendor/gix/src/remote/connection/fetch/update_refs/update.rs
@@ -0,0 +1,128 @@
+use std::path::PathBuf;
+
+use crate::remote::fetch;
+
+mod error {
+ /// The error returned when updating references.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ FindReference(#[from] crate::reference::find::Error),
+ #[error("A remote reference had a name that wasn't considered valid. Corrupt remote repo or insufficient checks on remote?")]
+ InvalidRefName(#[from] gix_validate::refname::Error),
+ #[error("Failed to update references to their new position to match their remote locations")]
+ EditReferences(#[from] crate::reference::edit::Error),
+ #[error("Failed to read or iterate worktree dir")]
+ WorktreeListing(#[from] std::io::Error),
+ #[error("Could not open worktree repository")]
+ OpenWorktreeRepo(#[from] crate::open::Error),
+ #[error("Could not find local commit for fast-forward ancestor check")]
+ FindCommit(#[from] crate::object::find::existing::Error),
+ }
+}
+
+pub use error::Error;
+
+/// The outcome of the refs-update operation at the end of a fetch.
+#[derive(Debug, Clone)]
+pub struct Outcome {
+ /// All edits that were performed to update local refs.
+ pub edits: Vec<gix_ref::transaction::RefEdit>,
+ /// Each update provides more information about what happened to the corresponding mapping.
+ /// Use [`iter_mapping_updates()`][Self::iter_mapping_updates()] to recombine the update information with ref-edits and their
+ /// mapping.
+ pub updates: Vec<super::Update>,
+}
+
+/// Describe the way a ref was updated
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum Mode {
+ /// No change was attempted as the remote ref didn't change compared to the current ref, or because no remote ref was specified
+ /// in the ref-spec.
+ NoChangeNeeded,
+ /// The old ref's commit was an ancestor of the new one, allowing for a fast-forward without a merge.
+ FastForward,
+ /// The ref was set to point to the new commit from the remote without taking into consideration its ancestry.
+ Forced,
+ /// A new ref has been created as there was none before.
+ New,
+ /// The reference belongs to a tag that was listed by the server but whose target didn't get sent as it doesn't point
+ /// to the commit-graph we were fetching explicitly.
+ ///
+ /// This is kind of update is only happening if `remote.<name>.tagOpt` is not set explicitly to either `--tags` or `--no-tags`.
+ ImplicitTagNotSentByRemote,
+ /// The object id to set the target reference to could not be found.
+ RejectedSourceObjectNotFound {
+ /// The id of the object that didn't exist in the object database, even though it should since it should be part of the pack.
+ id: gix_hash::ObjectId,
+ },
+ /// Tags can never be overwritten (whether the new object would be a fast-forward or not, or unchanged), unless the refspec
+ /// specifies force.
+ RejectedTagUpdate,
+ /// The reference update would not have been a fast-forward, and force is not specified in the ref-spec.
+ RejectedNonFastForward,
+ /// The update of a local symbolic reference was rejected.
+ RejectedSymbolic,
+ /// The update was rejected because the branch is checked out in the given worktree_dir.
+ ///
+ /// Note that the check applies to any known worktree, whether it's present on disk or not.
+ RejectedCurrentlyCheckedOut {
+ /// The path to the worktree directory where the branch is checked out.
+ worktree_dir: PathBuf,
+ },
+}
+
+impl std::fmt::Display for Mode {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Mode::NoChangeNeeded => "up-to-date",
+ Mode::FastForward => "fast-forward",
+ Mode::Forced => "forced-update",
+ Mode::New => "new",
+ Mode::ImplicitTagNotSentByRemote => "unrelated tag on remote",
+ Mode::RejectedSourceObjectNotFound { id } => return write!(f, "rejected ({id} not found)"),
+ Mode::RejectedTagUpdate => "rejected (would overwrite existing tag)",
+ Mode::RejectedNonFastForward => "rejected (non-fast-forward)",
+ Mode::RejectedSymbolic => "rejected (refusing to write symbolic refs)",
+ Mode::RejectedCurrentlyCheckedOut { worktree_dir } => {
+ return write!(
+ f,
+ "rejected (cannot write into checked-out branch at \"{}\")",
+ worktree_dir.display()
+ )
+ }
+ }
+ .fmt(f)
+ }
+}
+
+impl Outcome {
+ /// Produce an iterator over all information used to produce the this outcome, ref-update by ref-update, using the `mappings`
+ /// used when producing the ref update.
+ ///
+ /// Note that mappings that don't have a corresponding entry in `refspecs` these will be `None` even though that should never be the case.
+ /// This can happen if the `refspecs` passed in aren't the respecs used to create the `mapping`, and it's up to the caller to sort it out.
+ pub fn iter_mapping_updates<'a, 'b>(
+ &self,
+ mappings: &'a [fetch::Mapping],
+ refspecs: &'b [gix_refspec::RefSpec],
+ extra_refspecs: &'b [gix_refspec::RefSpec],
+ ) -> impl Iterator<
+ Item = (
+ &super::Update,
+ &'a fetch::Mapping,
+ Option<&'b gix_refspec::RefSpec>,
+ Option<&gix_ref::transaction::RefEdit>,
+ ),
+ > {
+ self.updates.iter().zip(mappings.iter()).map(move |(update, mapping)| {
+ (
+ update,
+ mapping,
+ mapping.spec_index.get(refspecs, extra_refspecs),
+ update.edit_index.and_then(|idx| self.edits.get(idx)),
+ )
+ })
+ }
+}
diff --git a/vendor/gix/src/remote/connection/mod.rs b/vendor/gix/src/remote/connection/mod.rs
new file mode 100644
index 000000000..09943ecc4
--- /dev/null
+++ b/vendor/gix/src/remote/connection/mod.rs
@@ -0,0 +1,29 @@
+use crate::Remote;
+
+pub(crate) struct HandshakeWithRefs {
+ outcome: gix_protocol::handshake::Outcome,
+ refs: Vec<gix_protocol::handshake::Ref>,
+}
+
+/// A function that performs a given credential action, trying to obtain credentials for an operation that needs it.
+pub type AuthenticateFn<'a> = Box<dyn FnMut(gix_credentials::helper::Action) -> gix_credentials::protocol::Result + 'a>;
+
+/// A type to represent an ongoing connection to a remote host, typically with the connection already established.
+///
+/// It can be used to perform a variety of operations with the remote without worrying about protocol details,
+/// much like a remote procedure call.
+pub struct Connection<'a, 'repo, T, P> {
+ pub(crate) remote: &'a Remote<'repo>,
+ pub(crate) authenticate: Option<AuthenticateFn<'a>>,
+ pub(crate) transport_options: Option<Box<dyn std::any::Any>>,
+ pub(crate) transport: T,
+ pub(crate) progress: P,
+}
+
+mod access;
+
+///
+pub mod ref_map;
+
+///
+pub mod fetch;
diff --git a/vendor/gix/src/remote/connection/ref_map.rs b/vendor/gix/src/remote/connection/ref_map.rs
new file mode 100644
index 000000000..0206e9002
--- /dev/null
+++ b/vendor/gix/src/remote/connection/ref_map.rs
@@ -0,0 +1,268 @@
+use std::collections::HashSet;
+
+use gix_features::progress::Progress;
+use gix_protocol::transport::client::Transport;
+
+use crate::{
+ bstr,
+ bstr::{BString, ByteVec},
+ remote::{connection::HandshakeWithRefs, fetch, fetch::SpecIndex, Connection, Direction},
+};
+
+/// The error returned by [`Connection::ref_map()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("Failed to configure the transport before connecting to {url:?}")]
+ GatherTransportConfig {
+ url: BString,
+ source: crate::config::transport::Error,
+ },
+ #[error("Failed to configure the transport layer")]
+ ConfigureTransport(#[from] Box<dyn std::error::Error + Send + Sync + 'static>),
+ #[error(transparent)]
+ Handshake(#[from] gix_protocol::handshake::Error),
+ #[error("The object format {format:?} as used by the remote is unsupported")]
+ UnknownObjectFormat { format: BString },
+ #[error(transparent)]
+ ListRefs(#[from] gix_protocol::ls_refs::Error),
+ #[error(transparent)]
+ Transport(#[from] gix_protocol::transport::client::Error),
+ #[error(transparent)]
+ ConfigureCredentials(#[from] crate::config::credential_helpers::Error),
+ #[error(transparent)]
+ MappingValidation(#[from] gix_refspec::match_group::validate::Error),
+}
+
+impl gix_protocol::transport::IsSpuriousError for Error {
+ fn is_spurious(&self) -> bool {
+ match self {
+ Error::Transport(err) => err.is_spurious(),
+ Error::ListRefs(err) => err.is_spurious(),
+ Error::Handshake(err) => err.is_spurious(),
+ _ => false,
+ }
+ }
+}
+
+/// For use in [`Connection::ref_map()`].
+#[derive(Debug, Clone)]
+pub struct Options {
+ /// Use a two-component prefix derived from the ref-spec's source, like `refs/heads/` to let the server pre-filter refs
+ /// with great potential for savings in traffic and local CPU time. Defaults to `true`.
+ pub prefix_from_spec_as_filter_on_remote: bool,
+ /// Parameters in the form of `(name, optional value)` to add to the handshake.
+ ///
+ /// This is useful in case of custom servers.
+ pub handshake_parameters: Vec<(String, Option<String>)>,
+ /// A list of refspecs to use as implicit refspecs which won't be saved or otherwise be part of the remote in question.
+ ///
+ /// This is useful for handling `remote.<name>.tagOpt` for example.
+ pub extra_refspecs: Vec<gix_refspec::RefSpec>,
+}
+
+impl Default for Options {
+ fn default() -> Self {
+ Options {
+ prefix_from_spec_as_filter_on_remote: true,
+ handshake_parameters: Vec::new(),
+ extra_refspecs: Vec::new(),
+ }
+ }
+}
+
+impl<'remote, 'repo, T, P> Connection<'remote, 'repo, T, P>
+where
+ T: Transport,
+ P: Progress,
+{
+ /// List all references on the remote that have been filtered through our remote's [`refspecs`][crate::Remote::refspecs()]
+ /// for _fetching_.
+ ///
+ /// This comes in the form of all matching tips on the remote and the object they point to, along with
+ /// with the local tracking branch of these tips (if available).
+ ///
+ /// Note that this doesn't fetch the objects mentioned in the tips nor does it make any change to underlying repository.
+ ///
+ /// # Consumption
+ ///
+ /// Due to management of the transport, it's cleanest to only use it for a single interaction. Thus it's consumed along with
+ /// the connection.
+ ///
+ /// ### Configuration
+ ///
+ /// - `gitoxide.userAgent` is read to obtain the application user agent for git servers and for HTTP servers as well.
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub async fn ref_map(mut self, options: Options) -> Result<fetch::RefMap, Error> {
+ let res = self.ref_map_inner(options).await;
+ gix_protocol::indicate_end_of_interaction(&mut self.transport)
+ .await
+ .ok();
+ res
+ }
+
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ pub(crate) async fn ref_map_inner(
+ &mut self,
+ Options {
+ prefix_from_spec_as_filter_on_remote,
+ handshake_parameters,
+ mut extra_refspecs,
+ }: Options,
+ ) -> Result<fetch::RefMap, Error> {
+ let null = gix_hash::ObjectId::null(gix_hash::Kind::Sha1); // OK to hardcode Sha1, it's not supposed to match, ever.
+
+ if let Some(tag_spec) = self.remote.fetch_tags.to_refspec().map(|spec| spec.to_owned()) {
+ if !extra_refspecs.contains(&tag_spec) {
+ extra_refspecs.push(tag_spec);
+ }
+ };
+ let specs = {
+ let mut s = self.remote.fetch_specs.clone();
+ s.extend(extra_refspecs.clone());
+ s
+ };
+ let remote = self
+ .fetch_refs(prefix_from_spec_as_filter_on_remote, handshake_parameters, &specs)
+ .await?;
+ let num_explicit_specs = self.remote.fetch_specs.len();
+ let group = gix_refspec::MatchGroup::from_fetch_specs(specs.iter().map(|s| s.to_ref()));
+ let (res, fixes) = group
+ .match_remotes(remote.refs.iter().map(|r| {
+ let (full_ref_name, target, object) = r.unpack();
+ gix_refspec::match_group::Item {
+ full_ref_name,
+ target: target.unwrap_or(&null),
+ object,
+ }
+ }))
+ .validated()?;
+ let mappings = res.mappings;
+ let mappings = mappings
+ .into_iter()
+ .map(|m| fetch::Mapping {
+ remote: m
+ .item_index
+ .map(|idx| fetch::Source::Ref(remote.refs[idx].clone()))
+ .unwrap_or_else(|| {
+ fetch::Source::ObjectId(match m.lhs {
+ gix_refspec::match_group::SourceRef::ObjectId(id) => id,
+ _ => unreachable!("no item index implies having an object id"),
+ })
+ }),
+ local: m.rhs.map(|c| c.into_owned()),
+ spec_index: if m.spec_index < num_explicit_specs {
+ SpecIndex::ExplicitInRemote(m.spec_index)
+ } else {
+ SpecIndex::Implicit(m.spec_index - num_explicit_specs)
+ },
+ })
+ .collect();
+
+ let object_hash = extract_object_format(self.remote.repo, &remote.outcome)?;
+ Ok(fetch::RefMap {
+ mappings,
+ extra_refspecs,
+ fixes,
+ remote_refs: remote.refs,
+ handshake: remote.outcome,
+ object_hash,
+ })
+ }
+
+ #[allow(clippy::result_large_err)]
+ #[gix_protocol::maybe_async::maybe_async]
+ async fn fetch_refs(
+ &mut self,
+ filter_by_prefix: bool,
+ extra_parameters: Vec<(String, Option<String>)>,
+ refspecs: &[gix_refspec::RefSpec],
+ ) -> Result<HandshakeWithRefs, Error> {
+ let mut credentials_storage;
+ let url = self.transport.to_url();
+ let authenticate = match self.authenticate.as_mut() {
+ Some(f) => f,
+ None => {
+ let url = self
+ .remote
+ .url(Direction::Fetch)
+ .map(ToOwned::to_owned)
+ .unwrap_or_else(|| gix_url::parse(url.as_ref()).expect("valid URL to be provided by transport"));
+ credentials_storage = self.configured_credentials(url)?;
+ &mut credentials_storage
+ }
+ };
+
+ if self.transport_options.is_none() {
+ self.transport_options = self
+ .remote
+ .repo
+ .transport_options(url.as_ref(), self.remote.name().map(|n| n.as_bstr()))
+ .map_err(|err| Error::GatherTransportConfig {
+ source: err,
+ url: url.into_owned(),
+ })?;
+ }
+ if let Some(config) = self.transport_options.as_ref() {
+ self.transport.configure(&**config)?;
+ }
+ let mut outcome =
+ gix_protocol::fetch::handshake(&mut self.transport, authenticate, extra_parameters, &mut self.progress)
+ .await?;
+ let refs = match outcome.refs.take() {
+ Some(refs) => refs,
+ None => {
+ let agent_feature = self.remote.repo.config.user_agent_tuple();
+ gix_protocol::ls_refs(
+ &mut self.transport,
+ &outcome.capabilities,
+ move |_capabilities, arguments, features| {
+ features.push(agent_feature);
+ if filter_by_prefix {
+ let mut seen = HashSet::new();
+ for spec in refspecs {
+ let spec = spec.to_ref();
+ if seen.insert(spec.instruction()) {
+ let mut prefixes = Vec::with_capacity(1);
+ spec.expand_prefixes(&mut prefixes);
+ for mut prefix in prefixes {
+ prefix.insert_str(0, "ref-prefix ");
+ arguments.push(prefix);
+ }
+ }
+ }
+ }
+ Ok(gix_protocol::ls_refs::Action::Continue)
+ },
+ &mut self.progress,
+ )
+ .await?
+ }
+ };
+ Ok(HandshakeWithRefs { outcome, refs })
+ }
+}
+
+/// Assume sha1 if server says nothing, otherwise configure anything beyond sha1 in the local repo configuration
+#[allow(clippy::result_large_err)]
+fn extract_object_format(
+ _repo: &crate::Repository,
+ outcome: &gix_protocol::handshake::Outcome,
+) -> Result<gix_hash::Kind, Error> {
+ use bstr::ByteSlice;
+ let object_hash =
+ if let Some(object_format) = outcome.capabilities.capability("object-format").and_then(|c| c.value()) {
+ let object_format = object_format.to_str().map_err(|_| Error::UnknownObjectFormat {
+ format: object_format.into(),
+ })?;
+ match object_format {
+ "sha1" => gix_hash::Kind::Sha1,
+ unknown => return Err(Error::UnknownObjectFormat { format: unknown.into() }),
+ }
+ } else {
+ gix_hash::Kind::Sha1
+ };
+ Ok(object_hash)
+}