summaryrefslogtreecommitdiffstats
path: root/vendor/gix-ref/src/store/file
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 12:41:41 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 12:41:41 +0000
commit10ee2acdd26a7f1298c6f6d6b7af9b469fe29b87 (patch)
treebdffd5d80c26cf4a7a518281a204be1ace85b4c1 /vendor/gix-ref/src/store/file
parentReleasing progress-linux version 1.70.0+dfsg1-9~progress7.99u1. (diff)
downloadrustc-10ee2acdd26a7f1298c6f6d6b7af9b469fe29b87.tar.xz
rustc-10ee2acdd26a7f1298c6f6d6b7af9b469fe29b87.zip
Merging upstream version 1.70.0+dfsg2.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/gix-ref/src/store/file')
-rw-r--r--vendor/gix-ref/src/store/file/find.rs353
-rw-r--r--vendor/gix-ref/src/store/file/log/iter.rs245
-rw-r--r--vendor/gix-ref/src/store/file/log/line.rs285
-rw-r--r--vendor/gix-ref/src/store/file/log/mod.rs23
-rw-r--r--vendor/gix-ref/src/store/file/loose/iter.rs95
-rw-r--r--vendor/gix-ref/src/store/file/loose/mod.rs65
-rw-r--r--vendor/gix-ref/src/store/file/loose/reference/decode.rs83
-rw-r--r--vendor/gix-ref/src/store/file/loose/reference/logiter.rs47
-rw-r--r--vendor/gix-ref/src/store/file/loose/reference/mod.rs4
-rw-r--r--vendor/gix-ref/src/store/file/loose/reflog.rs244
-rw-r--r--vendor/gix-ref/src/store/file/loose/reflog/create_or_update/tests.rs155
-rw-r--r--vendor/gix-ref/src/store/file/mod.rs104
-rw-r--r--vendor/gix-ref/src/store/file/overlay_iter.rs432
-rw-r--r--vendor/gix-ref/src/store/file/packed.rs97
-rw-r--r--vendor/gix-ref/src/store/file/raw_ext.rs174
-rw-r--r--vendor/gix-ref/src/store/file/transaction/commit.rs201
-rw-r--r--vendor/gix-ref/src/store/file/transaction/mod.rs108
-rw-r--r--vendor/gix-ref/src/store/file/transaction/prepare.rs478
18 files changed, 3193 insertions, 0 deletions
diff --git a/vendor/gix-ref/src/store/file/find.rs b/vendor/gix-ref/src/store/file/find.rs
new file mode 100644
index 000000000..0c6d04b6c
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/find.rs
@@ -0,0 +1,353 @@
+use std::{
+ borrow::Cow,
+ convert::TryInto,
+ io::{self, Read},
+ path::{Path, PathBuf},
+};
+
+pub use error::Error;
+
+use crate::{
+ file,
+ store_impl::{file::loose, packed},
+ BStr, BString, FullNameRef, PartialNameRef, Reference,
+};
+
+enum Transform {
+ EnforceRefsPrefix,
+ None,
+}
+
+impl file::Store {
+ /// Find a single reference by the given `path` which is required to be a valid reference name.
+ ///
+ /// Returns `Ok(None)` if no such ref exists.
+ ///
+ /// ### Note
+ ///
+ /// * The lookup algorithm follows the one in [the git documentation][git-lookup-docs].
+ /// * The packed buffer is checked for modifications each time the method is called. See [`file::Store::try_find_packed()`]
+ /// for a version with more control.
+ ///
+ /// [git-lookup-docs]: https://github.com/git/git/blob/5d5b1473453400224ebb126bf3947e0a3276bdf5/Documentation/revisions.txt#L34-L46
+ pub fn try_find<'a, Name, E>(&self, partial: Name) -> Result<Option<Reference>, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ Error: From<E>,
+ {
+ let packed = self.assure_packed_refs_uptodate()?;
+ self.find_one_with_verified_input(partial.try_into()?, packed.as_ref().map(|b| &***b))
+ }
+
+ /// Similar to [`file::Store::find()`] but a non-existing ref is treated as error.
+ ///
+ /// Find only loose references, that is references that aren't in the packed-refs buffer.
+ /// All symbolic references are loose references.
+ /// `HEAD` is always a loose reference.
+ pub fn try_find_loose<'a, Name, E>(&self, partial: Name) -> Result<Option<loose::Reference>, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ Error: From<E>,
+ {
+ self.find_one_with_verified_input(partial.try_into()?, None)
+ .map(|r| r.map(|r| r.try_into().expect("only loose refs are found without pack")))
+ }
+
+ /// Similar to [`file::Store::find()`], but allows to pass a snapshotted packed buffer instead.
+ pub fn try_find_packed<'a, Name, E>(
+ &self,
+ partial: Name,
+ packed: Option<&packed::Buffer>,
+ ) -> Result<Option<Reference>, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ Error: From<E>,
+ {
+ self.find_one_with_verified_input(partial.try_into()?, packed)
+ }
+
+ pub(crate) fn find_one_with_verified_input(
+ &self,
+ partial_name: &PartialNameRef,
+ packed: Option<&packed::Buffer>,
+ ) -> Result<Option<Reference>, Error> {
+ let mut buf = BString::default();
+ if partial_name.looks_like_full_name() {
+ if let Some(r) = self.find_inner("", partial_name, None, Transform::None, &mut buf)? {
+ return Ok(Some(r));
+ }
+ }
+
+ for inbetween in &["", "tags", "heads", "remotes"] {
+ match self.find_inner(inbetween, partial_name, packed, Transform::EnforceRefsPrefix, &mut buf) {
+ Ok(Some(r)) => return Ok(Some(r)),
+ Ok(None) => {
+ continue;
+ }
+ Err(err) => return Err(err),
+ }
+ }
+ self.find_inner(
+ "remotes",
+ partial_name
+ .to_owned()
+ .join("HEAD")
+ .expect("HEAD is valid name")
+ .as_ref(),
+ None,
+ Transform::EnforceRefsPrefix,
+ &mut buf,
+ )
+ }
+
+ fn find_inner(
+ &self,
+ inbetween: &str,
+ partial_name: &PartialNameRef,
+ packed: Option<&packed::Buffer>,
+ transform: Transform,
+ path_buf: &mut BString,
+ ) -> Result<Option<Reference>, Error> {
+ let add_refs_prefix = matches!(transform, Transform::EnforceRefsPrefix);
+ let full_name = partial_name.construct_full_name_ref(add_refs_prefix, inbetween, path_buf);
+ let content_buf = self.ref_contents(full_name).map_err(|err| Error::ReadFileContents {
+ source: err,
+ path: self.reference_path(full_name),
+ })?;
+
+ match content_buf {
+ None => {
+ if let Some(packed) = packed {
+ if let Some(full_name) = packed::find::transform_full_name_for_lookup(full_name) {
+ let full_name_backing;
+ let full_name = match &self.namespace {
+ Some(namespace) => {
+ full_name_backing = namespace.to_owned().into_namespaced_name(full_name);
+ full_name_backing.as_ref()
+ }
+ None => full_name,
+ };
+ if let Some(packed_ref) = packed.try_find_full_name(full_name)? {
+ let mut res: Reference = packed_ref.into();
+ if let Some(namespace) = &self.namespace {
+ res.strip_namespace(namespace);
+ }
+ return Ok(Some(res));
+ };
+ }
+ }
+ Ok(None)
+ }
+ Some(content) => Ok(Some(
+ loose::Reference::try_from_path(full_name.to_owned(), &content)
+ .map(Into::into)
+ .map(|mut r: Reference| {
+ if let Some(namespace) = &self.namespace {
+ r.strip_namespace(namespace);
+ }
+ r
+ })
+ .map_err(|err| Error::ReferenceCreation {
+ source: err,
+ relative_path: full_name.to_path().to_owned(),
+ })?,
+ )),
+ }
+ }
+}
+
+impl file::Store {
+ pub(crate) fn to_base_dir_and_relative_name<'a>(
+ &self,
+ name: &'a FullNameRef,
+ is_reflog: bool,
+ ) -> (Cow<'_, Path>, &'a FullNameRef) {
+ let commondir = self.common_dir_resolved();
+ let linked_git_dir =
+ |worktree_name: &BStr| commondir.join("worktrees").join(gix_path::from_bstr(worktree_name));
+ name.category_and_short_name()
+ .and_then(|(c, sn)| {
+ use crate::Category::*;
+ let sn = FullNameRef::new_unchecked(sn);
+ Some(match c {
+ LinkedPseudoRef { name: worktree_name } => is_reflog
+ .then(|| (linked_git_dir(worktree_name).into(), sn))
+ .unwrap_or((commondir.into(), name)),
+ Tag | LocalBranch | RemoteBranch | Note => (commondir.into(), name),
+ MainRef | MainPseudoRef => (commondir.into(), sn),
+ LinkedRef { name: worktree_name } => sn
+ .category()
+ .map_or(false, |cat| cat.is_worktree_private())
+ .then(|| {
+ if is_reflog {
+ (linked_git_dir(worktree_name).into(), sn)
+ } else {
+ (commondir.into(), name)
+ }
+ })
+ .unwrap_or((commondir.into(), sn)),
+ PseudoRef | Bisect | Rewritten | WorktreePrivate => return None,
+ })
+ })
+ .unwrap_or((self.git_dir.as_path().into(), name))
+ }
+
+ /// Implements the logic required to transform a fully qualified refname into a filesystem path
+ pub(crate) fn reference_path_with_base<'b>(&self, name: &'b FullNameRef) -> (Cow<'_, Path>, Cow<'b, Path>) {
+ let (base, name) = self.to_base_dir_and_relative_name(name, false);
+ (
+ base,
+ match &self.namespace {
+ None => gix_path::to_native_path_on_windows(name.as_bstr()),
+ Some(namespace) => {
+ gix_path::to_native_path_on_windows(namespace.to_owned().into_namespaced_name(name).into_inner())
+ }
+ },
+ )
+ }
+
+ /// Implements the logic required to transform a fully qualified refname into a filesystem path
+ pub(crate) fn reference_path(&self, name: &FullNameRef) -> PathBuf {
+ let (base, relative_path) = self.reference_path_with_base(name);
+ base.join(relative_path)
+ }
+
+ /// Read the file contents with a verified full reference path and return it in the given vector if possible.
+ pub(crate) fn ref_contents(&self, name: &FullNameRef) -> io::Result<Option<Vec<u8>>> {
+ let ref_path = self.reference_path(name);
+
+ match std::fs::File::open(&ref_path) {
+ Ok(mut file) => {
+ let mut buf = Vec::with_capacity(128);
+ if let Err(err) = file.read_to_end(&mut buf) {
+ return if ref_path.is_dir() { Ok(None) } else { Err(err) };
+ }
+ Ok(buf.into())
+ }
+ Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(None),
+ #[cfg(windows)]
+ Err(err) if err.kind() == std::io::ErrorKind::PermissionDenied => Ok(None),
+ Err(err) => Err(err),
+ }
+ }
+}
+
+///
+pub mod existing {
+ use std::convert::TryInto;
+
+ pub use error::Error;
+
+ use crate::{
+ file::{self},
+ store_impl::{
+ file::{find, loose},
+ packed,
+ },
+ PartialNameRef, Reference,
+ };
+
+ impl file::Store {
+ /// Similar to [`file::Store::try_find()`] but a non-existing ref is treated as error.
+ pub fn find<'a, Name, E>(&self, partial: Name) -> Result<Reference, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ let packed = self.assure_packed_refs_uptodate().map_err(find::Error::PackedOpen)?;
+ self.find_existing_inner(partial, packed.as_ref().map(|b| &***b))
+ }
+
+ /// Similar to [`file::Store::find()`], but supports a stable packed buffer.
+ pub fn find_packed<'a, Name, E>(
+ &self,
+ partial: Name,
+ packed: Option<&packed::Buffer>,
+ ) -> Result<Reference, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ self.find_existing_inner(partial, packed)
+ }
+
+ /// Similar to [`file::Store::find()`] won't handle packed-refs.
+ pub fn find_loose<'a, Name, E>(&self, partial: Name) -> Result<loose::Reference, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ self.find_existing_inner(partial, None)
+ .map(|r| r.try_into().expect("always loose without packed"))
+ }
+
+ /// Similar to [`file::Store::find()`] but a non-existing ref is treated as error.
+ pub(crate) fn find_existing_inner<'a, Name, E>(
+ &self,
+ partial: Name,
+ packed: Option<&packed::Buffer>,
+ ) -> Result<Reference, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ let path = partial
+ .try_into()
+ .map_err(|err| Error::Find(find::Error::RefnameValidation(err.into())))?;
+ match self.find_one_with_verified_input(path, packed) {
+ Ok(Some(r)) => Ok(r),
+ Ok(None) => Err(Error::NotFound {
+ name: path.to_partial_path().to_owned(),
+ }),
+ Err(err) => Err(err.into()),
+ }
+ }
+ }
+
+ mod error {
+ use std::path::PathBuf;
+
+ use crate::store_impl::file::find;
+
+ /// The error returned by [file::Store::find_existing()][crate::file::Store::find()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("An error occurred while trying to find a reference")]
+ Find(#[from] find::Error),
+ #[error("The ref partially named {name:?} could not be found")]
+ NotFound { name: PathBuf },
+ }
+ }
+}
+
+mod error {
+ use std::{convert::Infallible, io, path::PathBuf};
+
+ use crate::{file, store_impl::packed};
+
+ /// The error returned by [file::Store::find()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The ref name or path is not a valid ref name")]
+ RefnameValidation(#[from] crate::name::Error),
+ #[error("The ref file {path:?} could not be read in full")]
+ ReadFileContents { source: io::Error, path: PathBuf },
+ #[error("The reference at \"{relative_path}\" could not be instantiated")]
+ ReferenceCreation {
+ source: file::loose::reference::decode::Error,
+ relative_path: PathBuf,
+ },
+ #[error("A packed ref lookup failed")]
+ PackedRef(#[from] packed::find::Error),
+ #[error("Could not open the packed refs buffer when trying to find references.")]
+ PackedOpen(#[from] packed::buffer::open::Error),
+ }
+
+ impl From<Infallible> for Error {
+ fn from(_: Infallible) -> Self {
+ unreachable!("this impl is needed to allow passing a known valid partial path as parameter")
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/log/iter.rs b/vendor/gix-ref/src/store/file/log/iter.rs
new file mode 100644
index 000000000..d62df6800
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/log/iter.rs
@@ -0,0 +1,245 @@
+use gix_object::bstr::ByteSlice;
+
+use crate::{
+ file,
+ file::loose::reference::logiter::must_be_io_err,
+ store_impl::file::{log, log::iter::decode::LineNumber},
+ FullNameRef,
+};
+
+///
+pub mod decode {
+ use crate::store_impl::file::log;
+
+ /// The error returned by items in the [forward][super::forward()] and [reverse][super::reverse()] iterators
+ #[derive(Debug)]
+ pub struct Error {
+ inner: log::line::decode::Error,
+ line: LineNumber,
+ }
+
+ impl std::fmt::Display for Error {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "In line {}: {}", self.line, self.inner)
+ }
+ }
+
+ impl std::error::Error for Error {}
+
+ impl Error {
+ pub(crate) fn new(err: log::line::decode::Error, line: LineNumber) -> Self {
+ Error { line, inner: err }
+ }
+ }
+
+ #[derive(Debug)]
+ pub(crate) enum LineNumber {
+ FromStart(usize),
+ FromEnd(usize),
+ }
+
+ impl std::fmt::Display for LineNumber {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let (line, suffix) = match self {
+ LineNumber::FromStart(line) => (line, ""),
+ LineNumber::FromEnd(line) => (line, " from the end"),
+ };
+ write!(f, "{}{}", line + 1, suffix)
+ }
+ }
+}
+
+/// Returns a forward iterator over the given `lines`, starting from the first line in the file and ending at the last.
+///
+/// Note that `lines` are an entire reflog file.
+///
+/// This iterator is useful when the ref log file is going to be rewritten which forces processing of the entire file.
+/// It will continue parsing even if individual log entries failed to parse, leaving it to the driver to decide whether to
+/// abort or continue.
+pub fn forward(lines: &[u8]) -> Forward<'_> {
+ Forward {
+ inner: lines.as_bstr().lines().enumerate(),
+ }
+}
+
+/// An iterator yielding parsed lines in a file from start to end, oldest to newest.
+pub struct Forward<'a> {
+ inner: std::iter::Enumerate<gix_object::bstr::Lines<'a>>,
+}
+
+impl<'a> Iterator for Forward<'a> {
+ type Item = Result<log::LineRef<'a>, decode::Error>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next().map(|(ln, line)| {
+ log::LineRef::from_bytes(line).map_err(|err| decode::Error::new(err, decode::LineNumber::FromStart(ln)))
+ })
+ }
+}
+
+/// A platform to store a buffer to hold ref log lines for iteration.
+#[must_use = "Iterators should be obtained from this platform"]
+pub struct Platform<'a, 's> {
+ /// The store containing the reflogs
+ pub store: &'s file::Store,
+ /// The full name of the reference whose reflog to retrieve.
+ pub name: &'a FullNameRef,
+ /// A reusable buffer for storing log lines read from disk.
+ pub buf: Vec<u8>,
+}
+
+impl<'a, 's> Platform<'a, 's> {
+ /// Return a forward iterator over all log-lines, most recent to oldest.
+ pub fn rev(&mut self) -> std::io::Result<Option<log::iter::Reverse<'_, std::fs::File>>> {
+ self.buf.clear();
+ self.buf.resize(512, 0);
+ self.store
+ .reflog_iter_rev(self.name, &mut self.buf)
+ .map_err(must_be_io_err)
+ }
+
+ /// Return a forward iterator over all log-lines, oldest to most recent.
+ pub fn all(&mut self) -> std::io::Result<Option<log::iter::Forward<'_>>> {
+ self.buf.clear();
+ self.store.reflog_iter(self.name, &mut self.buf).map_err(must_be_io_err)
+ }
+}
+
+/// An iterator yielding parsed lines in a file in reverse, most recent to oldest.
+pub struct Reverse<'a, F> {
+ buf: &'a mut [u8],
+ count: usize,
+ read_and_pos: Option<(F, u64)>,
+ last_nl_pos: Option<usize>,
+}
+
+/// An iterator over entries of the `log` file in reverse, using `buf` as sliding window.
+///
+/// Note that `buf` must be big enough to capture typical line length or else partial lines will be parsed and probably fail
+/// in the process.
+///
+/// This iterator is very expensive in terms of I/O operations and shouldn't be used to read more than the last few entries of the log.
+/// Use a forward iterator instead for these cases.
+///
+/// It will continue parsing even if individual log entries failed to parse, leaving it to the driver to decide whether to
+/// abort or continue.
+pub fn reverse<F>(mut log: F, buf: &mut [u8]) -> std::io::Result<Reverse<'_, F>>
+where
+ F: std::io::Read + std::io::Seek,
+{
+ let pos = log.seek(std::io::SeekFrom::End(0))?;
+ if buf.is_empty() {
+ return Err(std::io::Error::new(
+ std::io::ErrorKind::Other,
+ "Zero sized buffers are not allowed, use 256 bytes or more for typical logs",
+ ));
+ }
+ Ok(Reverse {
+ buf,
+ count: 0,
+ read_and_pos: Some((log, pos)),
+ last_nl_pos: None,
+ })
+}
+
+///
+pub mod reverse {
+
+ use super::decode;
+
+ /// The error returned by the [`Reverse`][super::Reverse] iterator
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The buffer could not be filled to make more lines available")]
+ Io(#[from] std::io::Error),
+ #[error("Could not decode log line")]
+ Decode(#[from] decode::Error),
+ }
+}
+
+impl<'a, F> Iterator for Reverse<'a, F>
+where
+ F: std::io::Read + std::io::Seek,
+{
+ type Item = Result<crate::log::Line, reverse::Error>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match (self.last_nl_pos.take(), self.read_and_pos.take()) {
+ // Initial state - load first data block
+ (None, Some((mut read, pos))) => {
+ let npos = pos.saturating_sub(self.buf.len() as u64);
+ if let Err(err) = read.seek(std::io::SeekFrom::Start(npos)) {
+ return Some(Err(err.into()));
+ }
+
+ let n = (pos - npos) as usize;
+ if n == 0 {
+ return None;
+ }
+ let buf = &mut self.buf[..n];
+ if let Err(err) = read.read_exact(buf) {
+ return Some(Err(err.into()));
+ };
+
+ let last_byte = *buf.last().expect("we have read non-zero bytes before");
+ self.last_nl_pos = Some(if last_byte != b'\n' { buf.len() } else { buf.len() - 1 });
+ self.read_and_pos = Some((read, npos));
+ self.next()
+ }
+ // Has data block and can extract lines from it, load new blocks as needed
+ (Some(end), Some(read_and_pos)) => match self.buf[..end].rfind_byte(b'\n') {
+ Some(start) => {
+ self.read_and_pos = Some(read_and_pos);
+ self.last_nl_pos = Some(start);
+ let buf = &self.buf[start + 1..end];
+ let res = Some(
+ log::LineRef::from_bytes(buf)
+ .map_err(|err| {
+ reverse::Error::Decode(decode::Error::new(err, LineNumber::FromEnd(self.count)))
+ })
+ .map(Into::into),
+ );
+ self.count += 1;
+ res
+ }
+ None => {
+ let (mut read, last_read_pos) = read_and_pos;
+ if last_read_pos == 0 {
+ let buf = &self.buf[..end];
+ Some(
+ log::LineRef::from_bytes(buf)
+ .map_err(|err| {
+ reverse::Error::Decode(decode::Error::new(err, LineNumber::FromEnd(self.count)))
+ })
+ .map(Into::into),
+ )
+ } else {
+ let npos = last_read_pos.saturating_sub((self.buf.len() - end) as u64);
+ if npos == last_read_pos {
+ return Some(Err(std::io::Error::new(
+ std::io::ErrorKind::Other,
+ "buffer too small for line size",
+ )
+ .into()));
+ }
+ let n = (last_read_pos - npos) as usize;
+ self.buf.copy_within(0..end, n);
+ if let Err(err) = read.seek(std::io::SeekFrom::Start(npos)) {
+ return Some(Err(err.into()));
+ }
+ if let Err(err) = read.read_exact(&mut self.buf[..n]) {
+ return Some(Err(err.into()));
+ }
+ self.read_and_pos = Some((read, npos));
+ self.last_nl_pos = Some(n + end);
+ self.next()
+ }
+ }
+ },
+ // depleted
+ (None, None) => None,
+ (Some(_), None) => unreachable!("BUG: Invalid state: we never discard only our file, always both."),
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/log/line.rs b/vendor/gix-ref/src/store/file/log/line.rs
new file mode 100644
index 000000000..1ac45c75c
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/log/line.rs
@@ -0,0 +1,285 @@
+use gix_hash::ObjectId;
+
+use crate::{log::Line, store_impl::file::log::LineRef};
+
+impl<'a> LineRef<'a> {
+ /// Convert this instance into its mutable counterpart
+ pub fn to_owned(&self) -> Line {
+ self.clone().into()
+ }
+}
+
+mod write {
+ use std::io;
+
+ use gix_object::bstr::{BStr, ByteSlice};
+
+ use crate::log::Line;
+
+ /// The Error produced by [`Line::write_to()`] (but wrapped in an io error).
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ enum Error {
+ #[error("Messages must not contain newlines\\n")]
+ IllegalCharacter,
+ }
+
+ impl From<Error> for io::Error {
+ fn from(err: Error) -> Self {
+ io::Error::new(io::ErrorKind::Other, err)
+ }
+ }
+
+ /// Output
+ impl Line {
+ /// Serialize this instance to `out` in the git serialization format for ref log lines.
+ pub fn write_to(&self, mut out: impl io::Write) -> io::Result<()> {
+ write!(out, "{} {} ", self.previous_oid, self.new_oid)?;
+ self.signature.write_to(&mut out)?;
+ writeln!(out, "\t{}", check_newlines(self.message.as_ref())?)
+ }
+ }
+
+ fn check_newlines(input: &BStr) -> Result<&BStr, Error> {
+ if input.find_byte(b'\n').is_some() {
+ return Err(Error::IllegalCharacter);
+ }
+ Ok(input)
+ }
+}
+
+impl<'a> LineRef<'a> {
+ /// The previous object id of the ref. It will be a null hash if there was no previous id as
+ /// this ref is being created.
+ pub fn previous_oid(&self) -> ObjectId {
+ ObjectId::from_hex(self.previous_oid).expect("parse validation")
+ }
+ /// The new object id of the ref, or a null hash if it is removed.
+ pub fn new_oid(&self) -> ObjectId {
+ ObjectId::from_hex(self.new_oid).expect("parse validation")
+ }
+}
+
+impl<'a> From<LineRef<'a>> for Line {
+ fn from(v: LineRef<'a>) -> Self {
+ Line {
+ previous_oid: v.previous_oid(),
+ new_oid: v.new_oid(),
+ signature: v.signature.into(),
+ message: v.message.into(),
+ }
+ }
+}
+
+///
+pub mod decode {
+ use gix_object::bstr::{BStr, ByteSlice};
+ use nom::{
+ bytes::complete::{tag, take_while},
+ combinator::opt,
+ error::{context, ContextError, ParseError},
+ sequence::{terminated, tuple},
+ IResult,
+ };
+
+ use crate::{file::log::LineRef, parse::hex_hash};
+
+ ///
+ mod error {
+ use gix_object::bstr::{BString, ByteSlice};
+
+ /// The error returned by [from_bytes(…)][super::Line::from_bytes()]
+ #[derive(Debug)]
+ pub struct Error {
+ pub input: BString,
+ }
+
+ impl std::fmt::Display for Error {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(
+ f,
+ "{:?} did not match '<old-hexsha> <new-hexsha> <name> <<email>> <timestamp> <tz>\\t<message>'",
+ self.input
+ )
+ }
+ }
+
+ impl std::error::Error for Error {}
+
+ impl Error {
+ pub(crate) fn new(input: &[u8]) -> Self {
+ Error {
+ input: input.as_bstr().to_owned(),
+ }
+ }
+ }
+ }
+ pub use error::Error;
+
+ impl<'a> LineRef<'a> {
+ /// Decode a line from the given bytes which are expected to start at a hex sha.
+ pub fn from_bytes(input: &'a [u8]) -> Result<LineRef<'a>, Error> {
+ one::<()>(input).map(|(_, l)| l).map_err(|_| Error::new(input))
+ }
+ }
+
+ fn message<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a BStr, E> {
+ if i.is_empty() {
+ Ok((&[], i.as_bstr()))
+ } else {
+ terminated(take_while(|c| c != b'\n'), opt(tag(b"\n")))(i).map(|(i, o)| (i, o.as_bstr()))
+ }
+ }
+
+ fn one<'a, E: ParseError<&'a [u8]> + ContextError<&'a [u8]>>(bytes: &'a [u8]) -> IResult<&[u8], LineRef<'a>, E> {
+ let (i, (old, new, signature, message_sep, message)) = context(
+ "<old-hexsha> <new-hexsha> <name> <<email>> <timestamp> <tz>\\t<message>",
+ tuple((
+ context("<old-hexsha>", terminated(hex_hash, tag(b" "))),
+ context("<new-hexsha>", terminated(hex_hash, tag(b" "))),
+ context("<name> <<email>> <timestamp>", gix_actor::signature::decode),
+ opt(tag(b"\t")),
+ context("<optional message>", message),
+ )),
+ )(bytes)?;
+
+ if message_sep.is_none() {
+ if let Some(first) = message.first() {
+ if !first.is_ascii_whitespace() {
+ return Err(nom::Err::Error(E::add_context(
+ i,
+ "log message must be separated from signature with whitespace",
+ E::from_error_kind(i, nom::error::ErrorKind::MapRes),
+ )));
+ }
+ }
+ }
+
+ Ok((
+ i,
+ LineRef {
+ previous_oid: old,
+ new_oid: new,
+ signature,
+ message,
+ },
+ ))
+ }
+
+ #[cfg(test)]
+ mod test {
+ use gix_actor::{Sign, Time};
+ use gix_object::bstr::ByteSlice;
+
+ use super::*;
+
+ /// Convert a hexadecimal hash into its corresponding `ObjectId` or _panic_.
+ fn hex_to_oid(hex: &str) -> gix_hash::ObjectId {
+ gix_hash::ObjectId::from_hex(hex.as_bytes()).expect("40 bytes hex")
+ }
+
+ fn with_newline(mut v: Vec<u8>) -> Vec<u8> {
+ v.push(b'\n');
+ v
+ }
+
+ mod invalid {
+ use gix_testtools::to_bstr_err;
+ use nom::error::VerboseError;
+
+ use super::one;
+
+ #[test]
+ fn completely_bogus_shows_error_with_context() {
+ let err = one::<VerboseError<&[u8]>>(b"definitely not a log entry")
+ .map_err(to_bstr_err)
+ .expect_err("this should fail");
+ assert!(err.to_string().contains("<old-hexsha> <new-hexsha>"));
+ }
+
+ #[test]
+ fn missing_whitespace_between_signature_and_message() {
+ let line = "0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 one <foo@example.com> 1234567890 -0000message";
+ let err = one::<VerboseError<&[u8]>>(line.as_bytes())
+ .map_err(to_bstr_err)
+ .expect_err("this should fail");
+ assert!(err
+ .to_string()
+ .contains("log message must be separated from signature with whitespace"));
+ }
+ }
+
+ const NULL_SHA1: &[u8] = b"0000000000000000000000000000000000000000";
+
+ #[test]
+ fn entry_with_empty_message() {
+ let line_without_nl: Vec<_> = b"0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 name <foo@example.com> 1234567890 -0000".to_vec();
+ let line_with_nl = with_newline(line_without_nl.clone());
+ for input in &[line_without_nl, line_with_nl] {
+ assert_eq!(
+ one::<nom::error::Error<_>>(input).expect("successful parsing").1,
+ LineRef {
+ previous_oid: NULL_SHA1.as_bstr(),
+ new_oid: NULL_SHA1.as_bstr(),
+ signature: gix_actor::SignatureRef {
+ name: b"name".as_bstr(),
+ email: b"foo@example.com".as_bstr(),
+ time: Time {
+ seconds_since_unix_epoch: 1234567890,
+ offset_in_seconds: 0,
+ sign: Sign::Minus
+ }
+ },
+ message: b"".as_bstr(),
+ }
+ );
+ }
+ }
+
+ #[test]
+ fn entry_with_message_without_newline_and_with_newline() {
+ let line_without_nl: Vec<_> = b"a5828ae6b52137b913b978e16cd2334482eb4c1f 89b43f80a514aee58b662ad606e6352e03eaeee4 Sebastian Thiel <foo@example.com> 1618030561 +0800\tpull --ff-only: Fast-forward".to_vec();
+ let line_with_nl = with_newline(line_without_nl.clone());
+
+ for input in &[line_without_nl, line_with_nl] {
+ let (remaining, res) = one::<nom::error::Error<_>>(input).expect("successful parsing");
+ assert!(remaining.is_empty(), "all consuming even without trailing newline");
+ let actual = LineRef {
+ previous_oid: b"a5828ae6b52137b913b978e16cd2334482eb4c1f".as_bstr(),
+ new_oid: b"89b43f80a514aee58b662ad606e6352e03eaeee4".as_bstr(),
+ signature: gix_actor::SignatureRef {
+ name: b"Sebastian Thiel".as_bstr(),
+ email: b"foo@example.com".as_bstr(),
+ time: Time {
+ seconds_since_unix_epoch: 1618030561,
+ offset_in_seconds: 28800,
+ sign: Sign::Plus,
+ },
+ },
+ message: b"pull --ff-only: Fast-forward".as_bstr(),
+ };
+ assert_eq!(res, actual);
+ assert_eq!(
+ actual.previous_oid(),
+ hex_to_oid("a5828ae6b52137b913b978e16cd2334482eb4c1f")
+ );
+ assert_eq!(actual.new_oid(), hex_to_oid("89b43f80a514aee58b662ad606e6352e03eaeee4"));
+ }
+ }
+
+ #[test]
+ fn two_lines_in_a_row_with_and_without_newline() {
+ let lines = b"0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 one <foo@example.com> 1234567890 -0000\t\n0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 two <foo@example.com> 1234567890 -0000\thello";
+ let (remainder, parsed) = one::<nom::error::Error<_>>(lines).expect("parse single line");
+ assert_eq!(parsed.message, b"".as_bstr(), "first message is empty");
+
+ let (remainder, parsed) = one::<nom::error::Error<_>>(remainder).expect("parse single line");
+ assert_eq!(
+ parsed.message,
+ b"hello".as_bstr(),
+ "second message is not and contains no newline"
+ );
+ assert!(remainder.is_empty());
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/log/mod.rs b/vendor/gix-ref/src/store/file/log/mod.rs
new file mode 100644
index 000000000..5791358e4
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/log/mod.rs
@@ -0,0 +1,23 @@
+use gix_object::bstr::BStr;
+
+pub use super::loose::reflog::{create_or_update, Error};
+
+///
+pub mod iter;
+mod line;
+
+/// A parsed ref log line.
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]
+#[cfg_attr(feature = "serde1", derive(serde::Serialize, serde::Deserialize))]
+#[non_exhaustive]
+pub struct LineRef<'a> {
+ /// The previous object id in hexadecimal. Use [`LineRef::previous_oid()`] to get a more usable form.
+ pub previous_oid: &'a BStr,
+ /// The new object id in hexadecimal. Use [`LineRef::new_oid()`] to get a more usable form.
+ pub new_oid: &'a BStr,
+ /// The signature of the currently configured committer.
+ #[cfg_attr(feature = "serde1", serde(borrow))]
+ pub signature: gix_actor::SignatureRef<'a>,
+ /// The message providing details about the operation performed in this log line.
+ pub message: &'a BStr,
+}
diff --git a/vendor/gix-ref/src/store/file/loose/iter.rs b/vendor/gix-ref/src/store/file/loose/iter.rs
new file mode 100644
index 000000000..b4b46ccc4
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/loose/iter.rs
@@ -0,0 +1,95 @@
+use std::path::{Path, PathBuf};
+
+use gix_features::fs::walkdir::DirEntryIter;
+use gix_object::bstr::ByteSlice;
+
+use crate::{file::iter::LooseThenPacked, store_impl::file, BString, FullName};
+
+/// An iterator over all valid loose reference paths as seen from a particular base directory.
+pub(in crate::store_impl::file) struct SortedLoosePaths {
+ pub(crate) base: PathBuf,
+ filename_prefix: Option<BString>,
+ file_walk: Option<DirEntryIter>,
+}
+
+impl SortedLoosePaths {
+ pub fn at(path: impl AsRef<Path>, base: impl Into<PathBuf>, filename_prefix: Option<BString>) -> Self {
+ let path = path.as_ref();
+ SortedLoosePaths {
+ base: base.into(),
+ filename_prefix,
+ file_walk: path.is_dir().then(|| {
+ // serial iteration as we expect most refs in packed-refs anyway.
+ gix_features::fs::walkdir_sorted_new(path, gix_features::fs::walkdir::Parallelism::Serial).into_iter()
+ }),
+ }
+ }
+}
+
+impl Iterator for SortedLoosePaths {
+ type Item = std::io::Result<(PathBuf, FullName)>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ for entry in self.file_walk.as_mut()?.by_ref() {
+ match entry {
+ Ok(entry) => {
+ if !entry.file_type().is_file() {
+ continue;
+ }
+ let full_path = entry.path().to_owned();
+ if let Some((prefix, name)) = self
+ .filename_prefix
+ .as_deref()
+ .and_then(|prefix| full_path.file_name().map(|name| (prefix, name)))
+ {
+ match gix_path::os_str_into_bstr(name) {
+ Ok(name) => {
+ if !name.starts_with(prefix) {
+ continue;
+ }
+ }
+ Err(_) => continue, // TODO: silently skipping ill-formed UTF-8 on windows - maybe this can be better?
+ }
+ }
+ let full_name = full_path
+ .strip_prefix(&self.base)
+ .expect("prefix-stripping cannot fail as prefix is our root");
+ let full_name = match gix_path::try_into_bstr(full_name) {
+ Ok(name) => {
+ let name = gix_path::to_unix_separators_on_windows(name);
+ name.into_owned()
+ }
+ Err(_) => continue, // TODO: silently skipping ill-formed UTF-8 on windows here, maybe there are better ways?
+ };
+
+ if gix_validate::reference::name_partial(full_name.as_bstr()).is_ok() {
+ let name = FullName(full_name);
+ return Some(Ok((full_path, name)));
+ } else {
+ continue;
+ }
+ }
+ Err(err) => return Some(Err(err.into_io_error().expect("no symlink related errors"))),
+ }
+ }
+ None
+ }
+}
+
+impl file::Store {
+ /// Return an iterator over all loose references, notably not including any packed ones, in lexical order.
+ /// Each of the references may fail to parse and the iterator will not stop if parsing fails, allowing the caller
+ /// to see all files that look like references whether valid or not.
+ ///
+ /// Reference files that do not constitute valid names will be silently ignored.
+ pub fn loose_iter(&self) -> std::io::Result<LooseThenPacked<'_, '_>> {
+ self.iter_packed(None)
+ }
+
+ /// Return an iterator over all loose references that start with the given `prefix`.
+ ///
+ /// Otherwise it's similar to [`loose_iter()`][file::Store::loose_iter()].
+ pub fn loose_iter_prefixed(&self, prefix: impl AsRef<Path>) -> std::io::Result<LooseThenPacked<'_, '_>> {
+ self.iter_prefixed_packed(prefix, None)
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/loose/mod.rs b/vendor/gix-ref/src/store/file/loose/mod.rs
new file mode 100644
index 000000000..230641509
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/loose/mod.rs
@@ -0,0 +1,65 @@
+use crate::{FullName, Kind, Target};
+
+/// A git _ref_ which is stored in a file.
+#[derive(Debug, PartialOrd, PartialEq, Ord, Eq, Hash, Clone)]
+pub struct Reference {
+ /// The path to uniquely identify this ref within its store.
+ pub name: FullName,
+ /// The target of the reference, either a symbolic reference by full name or an object by its id.
+ pub target: Target,
+}
+
+impl Reference {
+ /// Return the kind of ref.
+ pub fn kind(&self) -> Kind {
+ self.target.kind()
+ }
+}
+
+///
+pub(crate) mod reflog;
+
+///
+pub(crate) mod iter;
+///
+pub mod reference;
+
+mod init {
+ use std::path::PathBuf;
+
+ use crate::store_impl::file;
+
+ impl file::Store {
+ /// Create a new instance at the given `git_dir`, which commonly is a standard git repository with a
+ /// `refs/` subdirectory.
+ /// The `object_hash` defines which kind of hash we should recognize.
+ pub fn at(git_dir: impl Into<PathBuf>, write_reflog: file::WriteReflog, object_hash: gix_hash::Kind) -> Self {
+ file::Store {
+ git_dir: git_dir.into(),
+ common_dir: None,
+ write_reflog,
+ namespace: None,
+ packed: gix_features::fs::MutableSnapshot::new().into(),
+ object_hash,
+ }
+ }
+
+ /// Like [`at()`][file::Store::at()], but for _linked_ work-trees which use `git_dir` as private ref store and `common_dir` for
+ /// shared references.
+ pub fn for_linked_worktree(
+ git_dir: impl Into<PathBuf>,
+ common_dir: impl Into<PathBuf>,
+ write_reflog: file::WriteReflog,
+ object_hash: gix_hash::Kind,
+ ) -> Self {
+ file::Store {
+ git_dir: git_dir.into(),
+ common_dir: Some(common_dir.into()),
+ write_reflog,
+ namespace: None,
+ packed: gix_features::fs::MutableSnapshot::new().into(),
+ object_hash,
+ }
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/loose/reference/decode.rs b/vendor/gix-ref/src/store/file/loose/reference/decode.rs
new file mode 100644
index 000000000..9bf2f7c29
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/loose/reference/decode.rs
@@ -0,0 +1,83 @@
+use std::convert::{TryFrom, TryInto};
+
+use gix_hash::ObjectId;
+use gix_object::bstr::BString;
+use nom::{
+ bytes::complete::{tag, take_while},
+ combinator::{map, opt},
+ sequence::terminated,
+ IResult,
+};
+
+use crate::{
+ parse::{hex_hash, newline},
+ store_impl::file::loose::Reference,
+ FullName, Target,
+};
+
+enum MaybeUnsafeState {
+ Id(ObjectId),
+ UnvalidatedPath(BString),
+}
+
+/// The error returned by [`Reference::try_from_path()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("{content:?} could not be parsed")]
+ Parse { content: BString },
+ #[error("The path {path:?} to a symbolic reference within a ref file is invalid")]
+ RefnameValidation {
+ source: gix_validate::reference::name::Error,
+ path: BString,
+ },
+}
+
+impl TryFrom<MaybeUnsafeState> for Target {
+ type Error = Error;
+
+ fn try_from(v: MaybeUnsafeState) -> Result<Self, Self::Error> {
+ Ok(match v {
+ MaybeUnsafeState::Id(id) => Target::Peeled(id),
+ MaybeUnsafeState::UnvalidatedPath(name) => Target::Symbolic(match gix_validate::refname(name.as_ref()) {
+ Ok(_) => FullName(name),
+ Err(err) => {
+ return Err(Error::RefnameValidation {
+ source: err,
+ path: name,
+ })
+ }
+ }),
+ })
+ }
+}
+
+impl Reference {
+ /// Create a new reference of the given `parent` store with `relative_path` service as unique identifier
+ /// at which the `path_contents` was read to obtain the refs value.
+ pub fn try_from_path(name: FullName, path_contents: &[u8]) -> Result<Self, Error> {
+ Ok(Reference {
+ name,
+ target: parse(path_contents)
+ .map_err(|_| Error::Parse {
+ content: path_contents.into(),
+ })?
+ .1
+ .try_into()?,
+ })
+ }
+}
+
+fn parse(bytes: &[u8]) -> IResult<&[u8], MaybeUnsafeState> {
+ let is_space = |b: u8| b == b' ';
+ if let (path, Some(_ref_prefix)) = opt(terminated(tag("ref: "), take_while(is_space)))(bytes)? {
+ map(
+ terminated(take_while(|b| b != b'\r' && b != b'\n'), opt(newline)),
+ |path| MaybeUnsafeState::UnvalidatedPath(path.into()),
+ )(path)
+ } else {
+ map(terminated(hex_hash, opt(newline)), |hex| {
+ MaybeUnsafeState::Id(ObjectId::from_hex(hex).expect("prior validation"))
+ })(bytes)
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/loose/reference/logiter.rs b/vendor/gix-ref/src/store/file/loose/reference/logiter.rs
new file mode 100644
index 000000000..0bc81f22d
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/loose/reference/logiter.rs
@@ -0,0 +1,47 @@
+use crate::store_impl::{
+ file,
+ file::{log, loose, loose::Reference},
+};
+
+pub(crate) fn must_be_io_err(err: loose::reflog::Error) -> std::io::Error {
+ match err {
+ loose::reflog::Error::Io(err) => err,
+ loose::reflog::Error::RefnameValidation(_) => unreachable!("we are called from a valid ref"),
+ }
+}
+
+impl Reference {
+ /// Returns true if a reflog exists in the given `store`.
+ ///
+ /// Please note that this method shouldn't be used to check if a log exists before trying to read it, but instead
+ /// is meant to be the fastest possible way to determine if a log exists or not.
+ /// If the caller needs to know if it's readable, try to read the log instead with a reverse or forward iterator.
+ pub fn log_exists(&self, store: &file::Store) -> bool {
+ store
+ .reflog_exists(self.name.as_ref())
+ .expect("name conversion infallible")
+ }
+ /// Return a reflog reverse iterator for this ref, reading chunks from the back into the fixed buffer `buf`, in the given `store`.
+ ///
+ /// The iterator will traverse log entries from most recent to oldest, reading the underlying file in chunks from the back.
+ /// Return `Ok(None)` if no reflog exists.
+ pub fn log_iter_rev<'b>(
+ &self,
+ store: &file::Store,
+ buf: &'b mut [u8],
+ ) -> std::io::Result<Option<log::iter::Reverse<'b, std::fs::File>>> {
+ store.reflog_iter_rev(self.name.as_ref(), buf).map_err(must_be_io_err)
+ }
+
+ /// Return a reflog forward iterator for this ref and write its file contents into `buf`, in the given `store`.
+ ///
+ /// The iterator will traverse log entries from oldest to newest.
+ /// Return `Ok(None)` if no reflog exists.
+ pub fn log_iter<'a, 'b: 'a>(
+ &'a self,
+ store: &file::Store,
+ buf: &'b mut Vec<u8>,
+ ) -> std::io::Result<Option<impl Iterator<Item = Result<log::LineRef<'b>, log::iter::decode::Error>> + 'a>> {
+ store.reflog_iter(self.name.as_ref(), buf).map_err(must_be_io_err)
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/loose/reference/mod.rs b/vendor/gix-ref/src/store/file/loose/reference/mod.rs
new file mode 100644
index 000000000..3e5ce0683
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/loose/reference/mod.rs
@@ -0,0 +1,4 @@
+pub(crate) mod logiter;
+
+///
+pub mod decode;
diff --git a/vendor/gix-ref/src/store/file/loose/reflog.rs b/vendor/gix-ref/src/store/file/loose/reflog.rs
new file mode 100644
index 000000000..a43d773fe
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/loose/reflog.rs
@@ -0,0 +1,244 @@
+use std::{convert::TryInto, io::Read, path::PathBuf};
+
+use crate::{
+ store_impl::{file, file::log},
+ FullNameRef,
+};
+
+impl file::Store {
+ /// Returns true if a reflog exists for the given reference `name`.
+ ///
+ /// Please note that this method shouldn't be used to check if a log exists before trying to read it, but instead
+ /// is meant to be the fastest possible way to determine if a log exists or not.
+ /// If the caller needs to know if it's readable, try to read the log instead with a reverse or forward iterator.
+ pub fn reflog_exists<'a, Name, E>(&self, name: Name) -> Result<bool, E>
+ where
+ Name: TryInto<&'a FullNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ Ok(self.reflog_path(name.try_into()?).is_file())
+ }
+
+ /// Return a reflog reverse iterator for the given fully qualified `name`, reading chunks from the back into the fixed buffer `buf`.
+ ///
+ /// The iterator will traverse log entries from most recent to oldest, reading the underlying file in chunks from the back.
+ /// Return `Ok(None)` if no reflog exists.
+ pub fn reflog_iter_rev<'a, 'b, Name, E>(
+ &self,
+ name: Name,
+ buf: &'b mut [u8],
+ ) -> Result<Option<log::iter::Reverse<'b, std::fs::File>>, Error>
+ where
+ Name: TryInto<&'a FullNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ let name: &FullNameRef = name.try_into().map_err(|err| Error::RefnameValidation(err.into()))?;
+ let path = self.reflog_path(name);
+ if path.is_dir() {
+ return Ok(None);
+ }
+ match std::fs::File::open(&path) {
+ Ok(file) => Ok(Some(log::iter::reverse(file, buf)?)),
+ Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
+ Err(err) => Err(err.into()),
+ }
+ }
+
+ /// Return a reflog forward iterator for the given fully qualified `name` and write its file contents into `buf`.
+ ///
+ /// The iterator will traverse log entries from oldest to newest.
+ /// Return `Ok(None)` if no reflog exists.
+ pub fn reflog_iter<'a, 'b, Name, E>(
+ &self,
+ name: Name,
+ buf: &'b mut Vec<u8>,
+ ) -> Result<Option<log::iter::Forward<'b>>, Error>
+ where
+ Name: TryInto<&'a FullNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ let name: &FullNameRef = name.try_into().map_err(|err| Error::RefnameValidation(err.into()))?;
+ let path = self.reflog_path(name);
+ match std::fs::File::open(&path) {
+ Ok(mut file) => {
+ buf.clear();
+ if let Err(err) = file.read_to_end(buf) {
+ return if path.is_dir() { Ok(None) } else { Err(err.into()) };
+ }
+ Ok(Some(log::iter::forward(buf)))
+ }
+ Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
+ #[cfg(windows)]
+ Err(err) if err.kind() == std::io::ErrorKind::PermissionDenied => Ok(None),
+ Err(err) => Err(err.into()),
+ }
+ }
+}
+
+impl file::Store {
+ /// Implements the logic required to transform a fully qualified refname into its log name
+ pub(crate) fn reflog_path(&self, name: &FullNameRef) -> PathBuf {
+ let (base, rela_path) = self.reflog_base_and_relative_path(name);
+ base.join(rela_path)
+ }
+}
+
+///
+pub mod create_or_update {
+ use std::{
+ borrow::Cow,
+ io::Write,
+ path::{Path, PathBuf},
+ };
+
+ use gix_hash::{oid, ObjectId};
+ use gix_object::bstr::BStr;
+
+ use crate::store_impl::{file, file::WriteReflog};
+
+ impl file::Store {
+ #[allow(clippy::too_many_arguments)]
+ pub(crate) fn reflog_create_or_append(
+ &self,
+ name: &FullNameRef,
+ previous_oid: Option<ObjectId>,
+ new: &oid,
+ committer: Option<gix_actor::SignatureRef<'_>>,
+ message: &BStr,
+ mut force_create_reflog: bool,
+ ) -> Result<(), Error> {
+ let (reflog_base, full_name) = self.reflog_base_and_relative_path(name);
+ match self.write_reflog {
+ WriteReflog::Normal | WriteReflog::Always => {
+ if self.write_reflog == WriteReflog::Always {
+ force_create_reflog = true;
+ }
+ let mut options = std::fs::OpenOptions::new();
+ options.append(true).read(false);
+ let log_path = reflog_base.join(&full_name);
+
+ if force_create_reflog || self.should_autocreate_reflog(&full_name) {
+ let parent_dir = log_path.parent().expect("always with parent directory");
+ gix_tempfile::create_dir::all(parent_dir, Default::default()).map_err(|err| {
+ Error::CreateLeadingDirectories {
+ source: err,
+ reflog_directory: parent_dir.to_owned(),
+ }
+ })?;
+ options.create(true);
+ };
+
+ let file_for_appending = match options.open(&log_path) {
+ Ok(f) => Some(f),
+ Err(err) if err.kind() == std::io::ErrorKind::NotFound => None,
+ Err(err) => {
+ // TODO: when Kind::IsADirectory becomes stable, use that.
+ if log_path.is_dir() {
+ gix_tempfile::remove_dir::empty_depth_first(&log_path)
+ .and_then(|_| options.open(&log_path))
+ .map(Some)
+ .map_err(|_| Error::Append {
+ source: err,
+ reflog_path: self.reflog_path(name),
+ })?
+ } else {
+ return Err(Error::Append {
+ source: err,
+ reflog_path: log_path,
+ });
+ }
+ }
+ };
+
+ if let Some(mut file) = file_for_appending {
+ let committer = committer.ok_or(Error::MissingCommitter)?;
+ write!(file, "{} {} ", previous_oid.unwrap_or_else(|| new.kind().null()), new)
+ .and_then(|_| committer.write_to(&mut file))
+ .and_then(|_| {
+ if !message.is_empty() {
+ writeln!(file, "\t{message}")
+ } else {
+ writeln!(file)
+ }
+ })
+ .map_err(|err| Error::Append {
+ source: err,
+ reflog_path: self.reflog_path(name),
+ })?;
+ }
+ Ok(())
+ }
+ WriteReflog::Disable => Ok(()),
+ }
+ }
+
+ fn should_autocreate_reflog(&self, full_name: &Path) -> bool {
+ full_name.starts_with("refs/heads/")
+ || full_name.starts_with("refs/remotes/")
+ || full_name.starts_with("refs/notes/")
+ || full_name.starts_with("refs/worktree/") // NOTE: git does not write reflogs for worktree private refs
+ || full_name == Path::new("HEAD")
+ }
+
+ /// Returns the base paths for all reflogs
+ pub(in crate::store_impl::file) fn reflog_base_and_relative_path<'a>(
+ &self,
+ name: &'a FullNameRef,
+ ) -> (PathBuf, Cow<'a, Path>) {
+ let is_reflog = true;
+ let (base, name) = self.to_base_dir_and_relative_name(name, is_reflog);
+ (
+ base.join("logs"),
+ match &self.namespace {
+ None => gix_path::to_native_path_on_windows(name.as_bstr()),
+ Some(namespace) => gix_path::to_native_path_on_windows(
+ namespace.to_owned().into_namespaced_name(name).into_inner(),
+ ),
+ },
+ )
+ }
+ }
+
+ #[cfg(test)]
+ mod tests;
+
+ mod error {
+ use std::path::PathBuf;
+
+ /// The error returned when creating or appending to a reflog
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could create one or more directories in {reflog_directory:?} to contain reflog file")]
+ CreateLeadingDirectories {
+ source: std::io::Error,
+ reflog_directory: PathBuf,
+ },
+ #[error("Could not open reflog file at {reflog_path:?} for appending")]
+ Append {
+ source: std::io::Error,
+ reflog_path: PathBuf,
+ },
+ #[error("reflog message must not contain newlines")]
+ MessageWithNewlines,
+ #[error("reflog messages need a committer which isn't set")]
+ MissingCommitter,
+ }
+ }
+ pub use error::Error;
+
+ use crate::FullNameRef;
+}
+
+mod error {
+ /// The error returned by [crate::file::Store::reflog_iter()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The reflog name or path is not a valid ref name")]
+ RefnameValidation(#[from] crate::name::Error),
+ #[error("The reflog file could not read")]
+ Io(#[from] std::io::Error),
+ }
+}
+pub use error::Error;
diff --git a/vendor/gix-ref/src/store/file/loose/reflog/create_or_update/tests.rs b/vendor/gix-ref/src/store/file/loose/reflog/create_or_update/tests.rs
new file mode 100644
index 000000000..16b9b1492
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/loose/reflog/create_or_update/tests.rs
@@ -0,0 +1,155 @@
+use std::{convert::TryInto, path::Path};
+
+use gix_actor::{Sign, Signature, Time};
+use gix_object::bstr::ByteSlice;
+use tempfile::TempDir;
+
+use super::*;
+use crate::{file::WriteReflog, FullNameRef};
+
+type Result<T = ()> = std::result::Result<T, Box<dyn std::error::Error>>;
+
+/// Convert a hexadecimal hash into its corresponding `ObjectId` or _panic_.
+fn hex_to_id(hex: &str) -> gix_hash::ObjectId {
+ gix_hash::ObjectId::from_hex(hex.as_bytes()).expect("40 bytes hex")
+}
+
+fn empty_store(writemode: WriteReflog) -> Result<(TempDir, file::Store)> {
+ let dir = TempDir::new()?;
+ let store = file::Store::at(dir.path(), writemode, gix_hash::Kind::Sha1);
+ Ok((dir, store))
+}
+
+fn reflog_lines(store: &file::Store, name: &str, buf: &mut Vec<u8>) -> Result<Vec<crate::log::Line>> {
+ store
+ .reflog_iter(name, buf)?
+ .expect("existing reflog")
+ .map(|l| l.map(crate::log::Line::from))
+ .collect::<std::result::Result<Vec<_>, _>>()
+ .map_err(Into::into)
+}
+
+const WRITE_MODES: &[WriteReflog] = &[WriteReflog::Normal, WriteReflog::Disable, WriteReflog::Always];
+
+#[test]
+fn should_autocreate_is_unaffected_by_writemode() -> Result {
+ let (_keep, store) = empty_store(WriteReflog::Disable)?;
+ for should_create_name in &["HEAD", "refs/heads/main", "refs/remotes/any", "refs/notes/any"] {
+ assert!(store.should_autocreate_reflog(Path::new(should_create_name)));
+ }
+ for should_not_create_name in &["FETCH_HEAD", "SOMETHING", "refs/special/this", "refs/tags/0.1.0"] {
+ assert!(!store.should_autocreate_reflog(Path::new(should_not_create_name)));
+ }
+ Ok(())
+}
+
+#[test]
+fn missing_reflog_creates_it_even_if_similarly_named_empty_dir_exists_and_append_log_lines() -> Result {
+ for mode in WRITE_MODES {
+ let (_keep, store) = empty_store(*mode)?;
+ let full_name_str = "refs/heads/main";
+ let full_name: &FullNameRef = full_name_str.try_into()?;
+ let new = hex_to_id("28ce6a8b26aa170e1de65536fe8abe1832bd3242");
+ let committer = Signature {
+ name: "committer".into(),
+ email: "committer@example.com".into(),
+ time: Time {
+ seconds_since_unix_epoch: 1234,
+ offset_in_seconds: 1800,
+ sign: Sign::Plus,
+ },
+ };
+ store.reflog_create_or_append(
+ full_name,
+ None,
+ &new,
+ committer.to_ref().into(),
+ b"the message".as_bstr(),
+ false,
+ )?;
+
+ let mut buf = Vec::new();
+ match mode {
+ WriteReflog::Normal | WriteReflog::Always => {
+ assert_eq!(
+ reflog_lines(&store, full_name_str, &mut buf)?,
+ vec![crate::log::Line {
+ previous_oid: gix_hash::Kind::Sha1.null(),
+ new_oid: new,
+ signature: committer.clone(),
+ message: "the message".into()
+ }]
+ );
+ let previous = hex_to_id("0000000000000000000000111111111111111111");
+ store.reflog_create_or_append(
+ full_name,
+ Some(previous),
+ &new,
+ committer.to_ref().into(),
+ b"next message".as_bstr(),
+ false,
+ )?;
+
+ let lines = reflog_lines(&store, full_name_str, &mut buf)?;
+ assert_eq!(lines.len(), 2, "now there is another line");
+ assert_eq!(
+ lines.last().expect("non-empty"),
+ &crate::log::Line {
+ previous_oid: previous,
+ new_oid: new,
+ signature: committer.clone(),
+ message: "next message".into()
+ }
+ );
+ }
+ WriteReflog::Disable => {
+ assert!(
+ store.reflog_iter(full_name, &mut buf)?.is_none(),
+ "there is no logs in disabled mode"
+ );
+ }
+ };
+
+ // create onto existing directory
+ let full_name_str = "refs/heads/other";
+ let full_name: &FullNameRef = full_name_str.try_into()?;
+ let reflog_path = store.reflog_path(full_name_str.try_into().expect("valid"));
+ let directory_in_place_of_reflog = reflog_path.join("empty-a").join("empty-b");
+ std::fs::create_dir_all(directory_in_place_of_reflog)?;
+
+ store.reflog_create_or_append(
+ full_name,
+ None,
+ &new,
+ committer.to_ref().into(),
+ b"more complicated reflog creation".as_bstr(),
+ false,
+ )?;
+
+ match mode {
+ WriteReflog::Normal | WriteReflog::Always => {
+ assert_eq!(
+ reflog_lines(&store, full_name_str, &mut buf)?.len(),
+ 1,
+ "reflog was written despite directory"
+ );
+ assert!(
+ reflog_path.is_file(),
+ "the empty directory was replaced with the reflog file"
+ );
+ }
+ WriteReflog::Disable => {
+ assert!(
+ store.reflog_iter(full_name_str, &mut buf)?.is_none(),
+ "reflog still doesn't exist"
+ );
+ assert!(
+ store.reflog_iter_rev(full_name_str, &mut buf)?.is_none(),
+ "reflog still doesn't exist"
+ );
+ assert!(reflog_path.is_dir(), "reflog directory wasn't touched");
+ }
+ }
+ }
+ Ok(())
+}
diff --git a/vendor/gix-ref/src/store/file/mod.rs b/vendor/gix-ref/src/store/file/mod.rs
new file mode 100644
index 000000000..cadc1d3b7
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/mod.rs
@@ -0,0 +1,104 @@
+use std::{
+ borrow::Cow,
+ path::{Path, PathBuf},
+};
+
+use crate::{bstr::BStr, store::WriteReflog, Namespace};
+
+/// A store for reference which uses plain files.
+///
+/// Each ref is represented as a single file on disk in a folder structure that follows the relative path
+/// used to identify [references][crate::Reference].
+#[derive(Debug, Clone)]
+pub struct Store {
+ /// The location at which loose references can be found as per conventions of a typical git repository.
+ ///
+ /// Typical base paths are `.git` repository folders.
+ git_dir: PathBuf,
+ /// Possibly the common directory at which to find shared references. Only set if this `Store` is for a work tree.
+ common_dir: Option<PathBuf>,
+ /// The kind of hash to assume in a couple of situations. Note that currently we are able to read any valid hash from files
+ /// which might want to change one day.
+ object_hash: gix_hash::Kind,
+
+ /// The way to handle reflog edits
+ pub write_reflog: WriteReflog,
+ /// The namespace to use for edits and reads
+ pub namespace: Option<Namespace>,
+ /// A packed buffer which can be mapped in one version and shared as such.
+ /// It's updated only in one spot, which is prior to reading it based on file stamps.
+ /// Doing it like this has the benefit of being able to hand snapshots out to people without blocking others from updating it.
+ packed: packed::modifiable::MutableSharedBuffer,
+}
+
+mod access {
+ use std::path::Path;
+
+ use crate::file;
+
+ impl file::Store {
+ /// Return the `.git` directory at which all references are loaded.
+ ///
+ /// For worktrees, this is the linked work-tree private ref location,
+ /// then [`common_dir()`][file::Store::common_dir()] is `Some(parent_git_dir)`.
+ pub fn git_dir(&self) -> &Path {
+ &self.git_dir
+ }
+
+ /// If this is a linked work tree, there will be `Some(git_dir)` pointing to the parent repository,
+ /// while [`git_dir()`][file::Store::git_dir()] points to the location holding linked work-tree private references.
+ pub fn common_dir(&self) -> Option<&Path> {
+ self.common_dir.as_deref()
+ }
+
+ /// Similar to [`common_dir()`][file::Store::common_dir()], but it will produce either the common-dir, or the git-dir if the former
+ /// isn't present.
+ ///
+ /// This is also the directory in which the packed references file would be placed.
+ pub fn common_dir_resolved(&self) -> &Path {
+ self.common_dir.as_deref().unwrap_or(&self.git_dir)
+ }
+ }
+}
+
+/// A transaction on a file store
+pub struct Transaction<'s, 'p> {
+ store: &'s Store,
+ packed_transaction: Option<crate::store_impl::packed::Transaction>,
+ updates: Option<Vec<transaction::Edit>>,
+ packed_refs: transaction::PackedRefs<'p>,
+}
+
+pub(in crate::store_impl::file) fn path_to_name<'a>(path: impl Into<Cow<'a, Path>>) -> Cow<'a, BStr> {
+ let path = gix_path::into_bstr(path.into());
+ gix_path::to_unix_separators_on_windows(path)
+}
+
+///
+pub mod loose;
+mod overlay_iter;
+
+///
+pub mod iter {
+ pub use super::overlay_iter::{LooseThenPacked, Platform};
+
+ ///
+ pub mod loose_then_packed {
+ pub use super::super::overlay_iter::Error;
+ }
+}
+
+///
+pub mod log;
+
+///
+pub mod find;
+
+///
+pub mod transaction;
+
+///
+pub mod packed;
+
+mod raw_ext;
+pub use raw_ext::ReferenceExt;
diff --git a/vendor/gix-ref/src/store/file/overlay_iter.rs b/vendor/gix-ref/src/store/file/overlay_iter.rs
new file mode 100644
index 000000000..51f290c7b
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/overlay_iter.rs
@@ -0,0 +1,432 @@
+use std::{
+ borrow::Cow,
+ cmp::Ordering,
+ io::Read,
+ iter::Peekable,
+ path::{Path, PathBuf},
+};
+
+use crate::{
+ file::{loose, loose::iter::SortedLoosePaths, path_to_name},
+ store_impl::{file, packed},
+ BString, FullName, Namespace, Reference,
+};
+
+/// An iterator stepping through sorted input of loose references and packed references, preferring loose refs over otherwise
+/// equivalent packed references.
+///
+/// All errors will be returned verbatim, while packed errors are depleted first if loose refs also error.
+pub struct LooseThenPacked<'p, 's> {
+ git_dir: &'s Path,
+ common_dir: Option<&'s Path>,
+ namespace: Option<&'s Namespace>,
+ iter_packed: Option<Peekable<packed::Iter<'p>>>,
+ iter_git_dir: Peekable<SortedLoosePaths>,
+ #[allow(dead_code)]
+ iter_common_dir: Option<Peekable<SortedLoosePaths>>,
+ buf: Vec<u8>,
+}
+
+enum IterKind {
+ Git,
+ GitAndConsumeCommon,
+ Common,
+}
+
+/// An intermediate structure to hold shared state alive long enough for iteration to happen.
+#[must_use = "Iterators should be obtained from this platform"]
+pub struct Platform<'s> {
+ store: &'s file::Store,
+ packed: Option<file::packed::SharedBufferSnapshot>,
+}
+
+impl<'p, 's> LooseThenPacked<'p, 's> {
+ fn strip_namespace(&self, mut r: Reference) -> Reference {
+ if let Some(namespace) = &self.namespace {
+ r.strip_namespace(namespace);
+ }
+ r
+ }
+
+ fn loose_iter(&mut self, kind: IterKind) -> &mut Peekable<SortedLoosePaths> {
+ match kind {
+ IterKind::GitAndConsumeCommon => {
+ drop(self.iter_common_dir.as_mut().map(|iter| iter.next()));
+ &mut self.iter_git_dir
+ }
+ IterKind::Git => &mut self.iter_git_dir,
+ IterKind::Common => self
+ .iter_common_dir
+ .as_mut()
+ .expect("caller knows there is a common iter"),
+ }
+ }
+
+ fn convert_packed(
+ &mut self,
+ packed: Result<packed::Reference<'p>, packed::iter::Error>,
+ ) -> Result<Reference, Error> {
+ packed
+ .map(Into::into)
+ .map(|r| self.strip_namespace(r))
+ .map_err(|err| match err {
+ packed::iter::Error::Reference {
+ invalid_line,
+ line_number,
+ } => Error::PackedReference {
+ invalid_line,
+ line_number,
+ },
+ packed::iter::Error::Header { .. } => unreachable!("this one only happens on iteration creation"),
+ })
+ }
+
+ fn convert_loose(&mut self, res: std::io::Result<(PathBuf, FullName)>) -> Result<Reference, Error> {
+ let (refpath, name) = res.map_err(Error::Traversal)?;
+ std::fs::File::open(&refpath)
+ .and_then(|mut f| {
+ self.buf.clear();
+ f.read_to_end(&mut self.buf)
+ })
+ .map_err(|err| Error::ReadFileContents {
+ source: err,
+ path: refpath.to_owned(),
+ })?;
+ loose::Reference::try_from_path(name, &self.buf)
+ .map_err(|err| {
+ let relative_path = refpath
+ .strip_prefix(self.git_dir)
+ .ok()
+ .or_else(|| {
+ self.common_dir
+ .and_then(|common_dir| refpath.strip_prefix(common_dir).ok())
+ })
+ .expect("one of our bases contains the path");
+ Error::ReferenceCreation {
+ source: err,
+ relative_path: relative_path.into(),
+ }
+ })
+ .map(Into::into)
+ .map(|r| self.strip_namespace(r))
+ }
+}
+
+impl<'p, 's> Iterator for LooseThenPacked<'p, 's> {
+ type Item = Result<Reference, Error>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ fn advance_to_non_private(iter: &mut Peekable<SortedLoosePaths>) {
+ while let Some(Ok((_path, name))) = iter.peek() {
+ if name.category().map_or(true, |cat| cat.is_worktree_private()) {
+ iter.next();
+ } else {
+ break;
+ }
+ }
+ }
+
+ fn peek_loose<'a>(
+ git_dir: &'a mut Peekable<SortedLoosePaths>,
+ common_dir: Option<&'a mut Peekable<SortedLoosePaths>>,
+ ) -> Option<(&'a std::io::Result<(PathBuf, FullName)>, IterKind)> {
+ match common_dir {
+ Some(common_dir) => match (git_dir.peek(), {
+ advance_to_non_private(common_dir);
+ common_dir.peek()
+ }) {
+ (None, None) => None,
+ (None, Some(res)) | (Some(_), Some(res @ Err(_))) => Some((res, IterKind::Common)),
+ (Some(res), None) | (Some(res @ Err(_)), Some(_)) => Some((res, IterKind::Git)),
+ (Some(r_gitdir @ Ok((_, git_dir_name))), Some(r_cd @ Ok((_, common_dir_name)))) => {
+ match git_dir_name.cmp(common_dir_name) {
+ Ordering::Less => Some((r_gitdir, IterKind::Git)),
+ Ordering::Equal => Some((r_gitdir, IterKind::GitAndConsumeCommon)),
+ Ordering::Greater => Some((r_cd, IterKind::Common)),
+ }
+ }
+ },
+ None => git_dir.peek().map(|r| (r, IterKind::Git)),
+ }
+ }
+ match self.iter_packed.as_mut() {
+ Some(packed_iter) => match (
+ peek_loose(&mut self.iter_git_dir, self.iter_common_dir.as_mut()),
+ packed_iter.peek(),
+ ) {
+ (None, None) => None,
+ (None, Some(_)) | (Some(_), Some(Err(_))) => {
+ let res = packed_iter.next().expect("peeked value exists");
+ Some(self.convert_packed(res))
+ }
+ (Some((_, kind)), None) | (Some((Err(_), kind)), Some(_)) => {
+ let res = self.loose_iter(kind).next().expect("prior peek");
+ Some(self.convert_loose(res))
+ }
+ (Some((Ok((_, loose_name)), kind)), Some(Ok(packed))) => match loose_name.as_ref().cmp(packed.name) {
+ Ordering::Less => {
+ let res = self.loose_iter(kind).next().expect("prior peek");
+ Some(self.convert_loose(res))
+ }
+ Ordering::Equal => {
+ drop(packed_iter.next());
+ let res = self.loose_iter(kind).next().expect("prior peek");
+ Some(self.convert_loose(res))
+ }
+ Ordering::Greater => {
+ let res = packed_iter.next().expect("name retrieval configured");
+ Some(self.convert_packed(res))
+ }
+ },
+ },
+ None => match peek_loose(&mut self.iter_git_dir, self.iter_common_dir.as_mut()) {
+ None => None,
+ Some((_, kind)) => self.loose_iter(kind).next().map(|res| self.convert_loose(res)),
+ },
+ }
+ }
+}
+
+impl<'s> Platform<'s> {
+ /// Return an iterator over all references, loose or `packed`, sorted by their name.
+ ///
+ /// Errors are returned similarly to what would happen when loose and packed refs where iterated by themselves.
+ pub fn all(&self) -> std::io::Result<LooseThenPacked<'_, '_>> {
+ self.store.iter_packed(self.packed.as_ref().map(|b| &***b))
+ }
+
+ /// As [`iter(…)`][file::Store::iter()], but filters by `prefix`, i.e. "refs/heads".
+ ///
+ /// Please note that "refs/heads` or "refs\\heads" is equivalent to "refs/heads/"
+ pub fn prefixed(&self, prefix: impl AsRef<Path>) -> std::io::Result<LooseThenPacked<'_, '_>> {
+ self.store
+ .iter_prefixed_packed(prefix, self.packed.as_ref().map(|b| &***b))
+ }
+}
+
+impl file::Store {
+ /// Return a platform to obtain iterator over all references, or prefixed ones, loose or packed, sorted by their name.
+ ///
+ /// Errors are returned similarly to what would happen when loose and packed refs where iterated by themselves.
+ pub fn iter(&self) -> Result<Platform<'_>, packed::buffer::open::Error> {
+ Ok(Platform {
+ store: self,
+ packed: self.assure_packed_refs_uptodate()?,
+ })
+ }
+}
+
+#[derive(Debug)]
+pub(crate) enum IterInfo<'a> {
+ Base {
+ base: &'a Path,
+ },
+ BaseAndIterRoot {
+ base: &'a Path,
+ iter_root: PathBuf,
+ prefix: Cow<'a, Path>,
+ },
+ PrefixAndBase {
+ base: &'a Path,
+ prefix: &'a Path,
+ },
+ ComputedIterationRoot {
+ /// The root to iterate over
+ iter_root: PathBuf,
+ /// The top-level directory as boundary of all references, used to create their short-names after iteration
+ base: &'a Path,
+ /// The original prefix
+ prefix: Cow<'a, Path>,
+ /// The remainder of the prefix that wasn't a valid path
+ remainder: Option<BString>,
+ },
+}
+
+impl<'a> IterInfo<'a> {
+ fn prefix(&self) -> Option<&Path> {
+ match self {
+ IterInfo::Base { .. } => None,
+ IterInfo::PrefixAndBase { prefix, .. } => Some(*prefix),
+ IterInfo::ComputedIterationRoot { prefix, .. } | IterInfo::BaseAndIterRoot { prefix, .. } => {
+ prefix.as_ref().into()
+ }
+ }
+ }
+
+ fn into_iter(self) -> Peekable<SortedLoosePaths> {
+ match self {
+ IterInfo::Base { base } => SortedLoosePaths::at(base.join("refs"), base, None),
+ IterInfo::BaseAndIterRoot {
+ base,
+ iter_root,
+ prefix: _,
+ } => SortedLoosePaths::at(iter_root, base, None),
+ IterInfo::PrefixAndBase { base, prefix } => SortedLoosePaths::at(base.join(prefix), base, None),
+ IterInfo::ComputedIterationRoot {
+ iter_root,
+ base,
+ prefix: _,
+ remainder,
+ } => SortedLoosePaths::at(iter_root, base, remainder),
+ }
+ .peekable()
+ }
+
+ fn from_prefix(base: &'a Path, prefix: Cow<'a, Path>) -> std::io::Result<Self> {
+ if prefix.is_absolute() {
+ return Err(std::io::Error::new(
+ std::io::ErrorKind::InvalidInput,
+ "prefix must be a relative path, like 'refs/heads'",
+ ));
+ }
+ use std::path::Component::*;
+ if prefix.components().any(|c| matches!(c, CurDir | ParentDir)) {
+ return Err(std::io::Error::new(
+ std::io::ErrorKind::InvalidInput,
+ "Refusing to handle prefixes with relative path components",
+ ));
+ }
+ let iter_root = base.join(prefix.as_ref());
+ if iter_root.is_dir() {
+ Ok(IterInfo::BaseAndIterRoot {
+ base,
+ iter_root,
+ prefix,
+ })
+ } else {
+ let filename_prefix = iter_root
+ .file_name()
+ .map(ToOwned::to_owned)
+ .map(|p| {
+ gix_path::try_into_bstr(PathBuf::from(p))
+ .map(|p| p.into_owned())
+ .map_err(|_| {
+ std::io::Error::new(std::io::ErrorKind::InvalidInput, "prefix contains ill-formed UTF-8")
+ })
+ })
+ .transpose()?;
+ let iter_root = iter_root
+ .parent()
+ .expect("a parent is always there unless empty")
+ .to_owned();
+ Ok(IterInfo::ComputedIterationRoot {
+ base,
+ prefix,
+ iter_root,
+ remainder: filename_prefix,
+ })
+ }
+ }
+}
+
+impl file::Store {
+ /// Return an iterator over all references, loose or `packed`, sorted by their name.
+ ///
+ /// Errors are returned similarly to what would happen when loose and packed refs where iterated by themselves.
+ pub fn iter_packed<'s, 'p>(
+ &'s self,
+ packed: Option<&'p packed::Buffer>,
+ ) -> std::io::Result<LooseThenPacked<'p, 's>> {
+ match self.namespace.as_ref() {
+ Some(namespace) => self.iter_from_info(
+ IterInfo::PrefixAndBase {
+ base: self.git_dir(),
+ prefix: namespace.to_path(),
+ },
+ self.common_dir().map(|base| IterInfo::PrefixAndBase {
+ base,
+ prefix: namespace.to_path(),
+ }),
+ packed,
+ ),
+ None => self.iter_from_info(
+ IterInfo::Base { base: self.git_dir() },
+ self.common_dir().map(|base| IterInfo::Base { base }),
+ packed,
+ ),
+ }
+ }
+
+ /// As [`iter(…)`][file::Store::iter()], but filters by `prefix`, i.e. "refs/heads".
+ ///
+ /// Please note that "refs/heads` or "refs\\heads" is equivalent to "refs/heads/"
+ pub fn iter_prefixed_packed<'s, 'p>(
+ &'s self,
+ prefix: impl AsRef<Path>,
+ packed: Option<&'p packed::Buffer>,
+ ) -> std::io::Result<LooseThenPacked<'p, 's>> {
+ match self.namespace.as_ref() {
+ None => {
+ let prefix = prefix.as_ref();
+ let git_dir_info = IterInfo::from_prefix(self.git_dir(), prefix.into())?;
+ let common_dir_info = self
+ .common_dir()
+ .map(|base| IterInfo::from_prefix(base, prefix.into()))
+ .transpose()?;
+ self.iter_from_info(git_dir_info, common_dir_info, packed)
+ }
+ Some(namespace) => {
+ let prefix = namespace.to_owned().into_namespaced_prefix(prefix);
+ let git_dir_info = IterInfo::from_prefix(self.git_dir(), prefix.clone().into())?;
+ let common_dir_info = self
+ .common_dir()
+ .map(|base| IterInfo::from_prefix(base, prefix.into()))
+ .transpose()?;
+ self.iter_from_info(git_dir_info, common_dir_info, packed)
+ }
+ }
+ }
+
+ fn iter_from_info<'s, 'p>(
+ &'s self,
+ git_dir_info: IterInfo<'_>,
+ common_dir_info: Option<IterInfo<'_>>,
+ packed: Option<&'p packed::Buffer>,
+ ) -> std::io::Result<LooseThenPacked<'p, 's>> {
+ Ok(LooseThenPacked {
+ git_dir: self.git_dir(),
+ common_dir: self.common_dir(),
+ iter_packed: match packed {
+ Some(packed) => Some(
+ match git_dir_info.prefix() {
+ Some(prefix) => packed.iter_prefixed(path_to_name(prefix).into_owned()),
+ None => packed.iter(),
+ }
+ .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?
+ .peekable(),
+ ),
+ None => None,
+ },
+ iter_git_dir: git_dir_info.into_iter(),
+ iter_common_dir: common_dir_info.map(IterInfo::into_iter),
+ buf: Vec::new(),
+ namespace: self.namespace.as_ref(),
+ })
+ }
+}
+
+mod error {
+ use std::{io, path::PathBuf};
+
+ use gix_object::bstr::BString;
+
+ use crate::store_impl::file;
+
+ /// The error returned by the [`LooseThenPacked`][super::LooseThenPacked] iterator.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The file system could not be traversed")]
+ Traversal(#[source] io::Error),
+ #[error("The ref file {path:?} could not be read in full")]
+ ReadFileContents { source: io::Error, path: PathBuf },
+ #[error("The reference at \"{relative_path}\" could not be instantiated")]
+ ReferenceCreation {
+ source: file::loose::reference::decode::Error,
+ relative_path: PathBuf,
+ },
+ #[error("Invalid reference in line {line_number}: {invalid_line:?}")]
+ PackedReference { invalid_line: BString, line_number: usize },
+ }
+}
+pub use error::Error;
diff --git a/vendor/gix-ref/src/store/file/packed.rs b/vendor/gix-ref/src/store/file/packed.rs
new file mode 100644
index 000000000..271ec7f5a
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/packed.rs
@@ -0,0 +1,97 @@
+use std::path::PathBuf;
+
+use crate::store_impl::{file, packed};
+
+impl file::Store {
+ /// Return a packed transaction ready to receive updates. Use this to create or update `packed-refs`.
+ /// Note that if you already have a [`packed::Buffer`] then use its [`packed::Buffer::into_transaction()`] method instead.
+ pub(crate) fn packed_transaction(
+ &self,
+ lock_mode: gix_lock::acquire::Fail,
+ ) -> Result<packed::Transaction, transaction::Error> {
+ let lock = gix_lock::File::acquire_to_update_resource(self.packed_refs_path(), lock_mode, None)?;
+ // We 'steal' the possibly existing packed buffer which may safe time if it's already there and fresh.
+ // If nothing else is happening, nobody will get to see the soon stale buffer either, but if so, they will pay
+ // for reloading it. That seems preferred over always loading up a new one.
+ Ok(packed::Transaction::new_from_pack_and_lock(
+ self.assure_packed_refs_uptodate()?,
+ lock,
+ ))
+ }
+
+ /// Try to open a new packed buffer. It's not an error if it doesn't exist, but yields `Ok(None)`.
+ pub fn open_packed_buffer(&self) -> Result<Option<packed::Buffer>, packed::buffer::open::Error> {
+ let need_more_than_this_many_bytes_to_use_mmap = 32 * 1024;
+ match packed::Buffer::open(self.packed_refs_path(), need_more_than_this_many_bytes_to_use_mmap) {
+ Ok(buf) => Ok(Some(buf)),
+ Err(packed::buffer::open::Error::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
+ Err(err) => Err(err),
+ }
+ }
+
+ /// Return a possibly cached packed buffer with shared ownership. At retrieval it will assure it's up to date, but
+ /// after that it can be considered a snapshot as it cannot change anymore.
+ ///
+ /// Use this to make successive calls to [`file::Store::try_find_packed()`]
+ /// or obtain iterators using [`file::Store::iter_packed()`] in a way that assures the packed-refs content won't change.
+ pub fn cached_packed_buffer(
+ &self,
+ ) -> Result<Option<file::packed::SharedBufferSnapshot>, packed::buffer::open::Error> {
+ self.assure_packed_refs_uptodate()
+ }
+
+ /// Return the path at which packed-refs would usually be stored
+ pub fn packed_refs_path(&self) -> PathBuf {
+ self.common_dir_resolved().join("packed-refs")
+ }
+
+ pub(crate) fn packed_refs_lock_path(&self) -> PathBuf {
+ let mut p = self.packed_refs_path();
+ p.set_extension("lock");
+ p
+ }
+}
+
+///
+pub mod transaction {
+
+ use crate::store_impl::packed;
+
+ /// The error returned by [`file::Transaction::prepare()`][crate::file::Transaction::prepare()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("An existing pack couldn't be opened or read when preparing a transaction")]
+ BufferOpen(#[from] packed::buffer::open::Error),
+ #[error("The lock for a packed transaction could not be obtained")]
+ TransactionLock(#[from] gix_lock::acquire::Error),
+ }
+}
+
+/// An up-to-date snapshot of the packed refs buffer.
+pub type SharedBufferSnapshot = gix_features::fs::SharedSnapshot<packed::Buffer>;
+
+pub(crate) mod modifiable {
+ use gix_features::threading::OwnShared;
+
+ use crate::{file, packed};
+
+ pub(crate) type MutableSharedBuffer = OwnShared<gix_features::fs::MutableSnapshot<packed::Buffer>>;
+
+ impl file::Store {
+ pub(crate) fn force_refresh_packed_buffer(&self) -> Result<(), packed::buffer::open::Error> {
+ self.packed.force_refresh(|| {
+ let modified = self.packed_refs_path().metadata()?.modified()?;
+ self.open_packed_buffer().map(|packed| Some(modified).zip(packed))
+ })
+ }
+ pub(crate) fn assure_packed_refs_uptodate(
+ &self,
+ ) -> Result<Option<super::SharedBufferSnapshot>, packed::buffer::open::Error> {
+ self.packed.recent_snapshot(
+ || self.packed_refs_path().metadata().and_then(|m| m.modified()).ok(),
+ || self.open_packed_buffer(),
+ )
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/raw_ext.rs b/vendor/gix-ref/src/store/file/raw_ext.rs
new file mode 100644
index 000000000..8bdf8392d
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/raw_ext.rs
@@ -0,0 +1,174 @@
+use std::collections::BTreeSet;
+
+use gix_hash::ObjectId;
+
+use crate::{
+ packed, peel,
+ raw::Reference,
+ store_impl::{file, file::log},
+ Target,
+};
+
+pub trait Sealed {}
+impl Sealed for crate::Reference {}
+
+/// A trait to extend [Reference][crate::Reference] with functionality requiring a [file::Store].
+pub trait ReferenceExt: Sealed {
+ /// A step towards obtaining forward or reverse iterators on reference logs.
+ fn log_iter<'a, 's>(&'a self, store: &'s file::Store) -> log::iter::Platform<'a, 's>;
+
+ /// For details, see [Reference::log_exists()].
+ fn log_exists(&self, store: &file::Store) -> bool;
+
+ /// For details, see [Reference::peel_to_id_in_place()].
+ fn peel_to_id_in_place<E: std::error::Error + Send + Sync + 'static>(
+ &mut self,
+ store: &file::Store,
+ find: impl FnMut(gix_hash::ObjectId, &mut Vec<u8>) -> Result<Option<(gix_object::Kind, &[u8])>, E>,
+ ) -> Result<ObjectId, peel::to_id::Error>;
+
+ /// For details, see [Reference::peel_to_id_in_place()], with support for a known stable packed buffer.
+ fn peel_to_id_in_place_packed<E: std::error::Error + Send + Sync + 'static>(
+ &mut self,
+ store: &file::Store,
+ find: impl FnMut(gix_hash::ObjectId, &mut Vec<u8>) -> Result<Option<(gix_object::Kind, &[u8])>, E>,
+ packed: Option<&packed::Buffer>,
+ ) -> Result<ObjectId, peel::to_id::Error>;
+
+ /// Follow this symbolic reference one level and return the ref it refers to.
+ ///
+ /// Returns `None` if this is not a symbolic reference, hence the leaf of the chain.
+ fn follow(&self, store: &file::Store) -> Option<Result<Reference, file::find::existing::Error>>;
+
+ /// Follow this symbolic reference one level and return the ref it refers to,
+ /// possibly providing access to `packed` references for lookup if it contains the referent.
+ ///
+ /// Returns `None` if this is not a symbolic reference, hence the leaf of the chain.
+ fn follow_packed(
+ &self,
+ store: &file::Store,
+ packed: Option<&packed::Buffer>,
+ ) -> Option<Result<Reference, file::find::existing::Error>>;
+}
+
+impl ReferenceExt for Reference {
+ fn log_iter<'a, 's>(&'a self, store: &'s file::Store) -> log::iter::Platform<'a, 's> {
+ log::iter::Platform {
+ store,
+ name: self.name.as_ref(),
+ buf: Vec::new(),
+ }
+ }
+
+ fn log_exists(&self, store: &file::Store) -> bool {
+ store
+ .reflog_exists(self.name.as_ref())
+ .expect("infallible name conversion")
+ }
+
+ fn peel_to_id_in_place<E: std::error::Error + Send + Sync + 'static>(
+ &mut self,
+ store: &file::Store,
+ find: impl FnMut(gix_hash::ObjectId, &mut Vec<u8>) -> Result<Option<(gix_object::Kind, &[u8])>, E>,
+ ) -> Result<ObjectId, peel::to_id::Error> {
+ let packed = store.assure_packed_refs_uptodate().map_err(|err| {
+ peel::to_id::Error::Follow(file::find::existing::Error::Find(file::find::Error::PackedOpen(err)))
+ })?;
+ self.peel_to_id_in_place_packed(store, find, packed.as_ref().map(|b| &***b))
+ }
+
+ fn peel_to_id_in_place_packed<E: std::error::Error + Send + Sync + 'static>(
+ &mut self,
+ store: &file::Store,
+ mut find: impl FnMut(gix_hash::ObjectId, &mut Vec<u8>) -> Result<Option<(gix_object::Kind, &[u8])>, E>,
+ packed: Option<&packed::Buffer>,
+ ) -> Result<ObjectId, peel::to_id::Error> {
+ match self.peeled {
+ Some(peeled) => {
+ self.target = Target::Peeled(peeled.to_owned());
+ Ok(peeled)
+ }
+ None => {
+ if self.target.kind() == crate::Kind::Symbolic {
+ let mut seen = BTreeSet::new();
+ let cursor = &mut *self;
+ while let Some(next) = cursor.follow_packed(store, packed) {
+ let next = next?;
+ if seen.contains(&next.name) {
+ return Err(peel::to_id::Error::Cycle {
+ start_absolute: store.reference_path(cursor.name.as_ref()),
+ });
+ }
+ *cursor = next;
+ seen.insert(cursor.name.clone());
+ const MAX_REF_DEPTH: usize = 5;
+ if seen.len() == MAX_REF_DEPTH {
+ return Err(peel::to_id::Error::DepthLimitExceeded {
+ max_depth: MAX_REF_DEPTH,
+ });
+ }
+ }
+ };
+ let mut buf = Vec::new();
+ let mut oid = self.target.try_id().expect("peeled ref").to_owned();
+ let peeled_id = loop {
+ let (kind, data) = find(oid, &mut buf)
+ .map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)?
+ .ok_or_else(|| peel::to_id::Error::NotFound {
+ oid,
+ name: self.name.0.clone(),
+ })?;
+ match kind {
+ gix_object::Kind::Tag => {
+ oid = gix_object::TagRefIter::from_bytes(data).target_id().map_err(|_err| {
+ peel::to_id::Error::NotFound {
+ oid,
+ name: self.name.0.clone(),
+ }
+ })?;
+ }
+ _ => break oid,
+ };
+ };
+ self.peeled = Some(peeled_id);
+ self.target = Target::Peeled(peeled_id);
+ Ok(peeled_id)
+ }
+ }
+ }
+
+ fn follow(&self, store: &file::Store) -> Option<Result<Reference, file::find::existing::Error>> {
+ let packed = match store
+ .assure_packed_refs_uptodate()
+ .map_err(|err| file::find::existing::Error::Find(file::find::Error::PackedOpen(err)))
+ {
+ Ok(packed) => packed,
+ Err(err) => return Some(Err(err)),
+ };
+ self.follow_packed(store, packed.as_ref().map(|b| &***b))
+ }
+
+ fn follow_packed(
+ &self,
+ store: &file::Store,
+ packed: Option<&packed::Buffer>,
+ ) -> Option<Result<Reference, file::find::existing::Error>> {
+ match self.peeled {
+ Some(peeled) => Some(Ok(Reference {
+ name: self.name.clone(),
+ target: Target::Peeled(peeled),
+ peeled: None,
+ })),
+ None => match &self.target {
+ Target::Peeled(_) => None,
+ Target::Symbolic(full_name) => match store.try_find_packed(full_name.as_ref(), packed) {
+ Ok(Some(next)) => Some(Ok(next)),
+ Ok(None) => Some(Err(file::find::existing::Error::NotFound {
+ name: full_name.to_path().to_owned(),
+ })),
+ Err(err) => Some(Err(file::find::existing::Error::Find(err))),
+ },
+ },
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/transaction/commit.rs b/vendor/gix-ref/src/store/file/transaction/commit.rs
new file mode 100644
index 000000000..58e6a7c99
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/transaction/commit.rs
@@ -0,0 +1,201 @@
+use crate::{
+ store_impl::file::{transaction::PackedRefs, Transaction},
+ transaction::{Change, LogChange, RefEdit, RefLog},
+ Target,
+};
+
+impl<'s, 'p> Transaction<'s, 'p> {
+ /// Make all [prepared][Transaction::prepare()] permanent and return the performed edits which represent the current
+ /// state of the affected refs in the ref store in that instant. Please note that the obtained edits may have been
+ /// adjusted to contain more dependent edits or additional information.
+ /// `committer` is used in the reflog and only if the reflog is actually written, which is why it is optional. Please note
+ /// that if `None` is passed and the reflog needs to be written, the operation will be aborted late and a few refs may have been
+ /// successfully committed already, making clear the non-atomic nature of multi-file edits.
+ ///
+ /// On error the transaction may have been performed partially, depending on the nature of the error, and no attempt to roll back
+ /// partial changes is made.
+ ///
+ /// In this stage, we perform the following operations:
+ ///
+ /// * update the ref log
+ /// * move updated refs into place
+ /// * delete reflogs and empty parent directories
+ /// * delete packed refs
+ /// * delete their corresponding reference (if applicable)
+ /// along with empty parent directories
+ ///
+ /// Note that transactions will be prepared automatically as needed.
+ pub fn commit<'a>(self, committer: impl Into<Option<gix_actor::SignatureRef<'a>>>) -> Result<Vec<RefEdit>, Error> {
+ self.commit_inner(committer.into())
+ }
+
+ fn commit_inner(self, committer: Option<gix_actor::SignatureRef<'_>>) -> Result<Vec<RefEdit>, Error> {
+ let mut updates = self.updates.expect("BUG: must call prepare before commit");
+ let delete_loose_refs = matches!(
+ self.packed_refs,
+ PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(_)
+ );
+
+ // Perform updates first so live commits remain referenced
+ for change in updates.iter_mut() {
+ assert!(!change.update.deref, "Deref mode is turned into splits and turned off");
+ match &change.update.change {
+ // reflog first, then reference
+ Change::Update { log, new, expected } => {
+ let lock = change.lock.take();
+ let (update_ref, update_reflog) = match log.mode {
+ RefLog::Only => (false, true),
+ RefLog::AndReference => (true, true),
+ };
+ if update_reflog {
+ let log_update = match new {
+ Target::Symbolic(_) => {
+ // no reflog for symref changes, unless the ref is new and we can obtain a peeled id
+ // identified by the expectation of what could be there, as is the case when cloning.
+ match expected {
+ PreviousValue::ExistingMustMatch(Target::Peeled(oid)) => {
+ Some((Some(gix_hash::ObjectId::null(oid.kind())), oid))
+ }
+ _ => None,
+ }
+ }
+ Target::Peeled(new_oid) => {
+ let previous = match expected {
+ PreviousValue::MustExistAndMatch(Target::Peeled(oid)) => Some(oid.to_owned()),
+ _ => None,
+ }
+ .or(change.leaf_referent_previous_oid);
+ Some((previous, new_oid))
+ }
+ };
+ if let Some((previous, new_oid)) = log_update {
+ let do_update = previous.as_ref().map_or(true, |previous| previous != new_oid);
+ if do_update {
+ self.store.reflog_create_or_append(
+ change.update.name.as_ref(),
+ previous,
+ new_oid,
+ committer,
+ log.message.as_ref(),
+ log.force_create_reflog,
+ )?;
+ }
+ }
+ }
+ // Don't do anything else while keeping the lock after potentially updating the reflog.
+ // We delay deletion of the reference and dropping the lock to after the packed-refs were
+ // safely written.
+ if delete_loose_refs && matches!(new, Target::Peeled(_)) {
+ change.lock = lock;
+ continue;
+ }
+ if update_ref {
+ if let Some(Err(err)) = lock.map(|l| l.commit()) {
+ // TODO: when Kind::IsADirectory becomes stable, use that.
+ let err = if err.instance.resource_path().is_dir() {
+ gix_tempfile::remove_dir::empty_depth_first(err.instance.resource_path())
+ .map_err(|io_err| std::io::Error::new(std::io::ErrorKind::Other, io_err))
+ .and_then(|_| err.instance.commit().map_err(|err| err.error))
+ .err()
+ } else {
+ Some(err.error)
+ };
+
+ if let Some(err) = err {
+ return Err(Error::LockCommit {
+ source: err,
+ full_name: change.name(),
+ });
+ }
+ };
+ }
+ }
+ Change::Delete { .. } => {}
+ }
+ }
+
+ for change in updates.iter_mut() {
+ let (reflog_root, relative_name) = self.store.reflog_base_and_relative_path(change.update.name.as_ref());
+ match &change.update.change {
+ Change::Update { .. } => {}
+ Change::Delete { .. } => {
+ // Reflog deletion happens first in case it fails a ref without log is less terrible than
+ // a log without a reference.
+ let reflog_path = reflog_root.join(relative_name);
+ if let Err(err) = std::fs::remove_file(&reflog_path) {
+ if err.kind() != std::io::ErrorKind::NotFound {
+ return Err(Error::DeleteReflog {
+ source: err,
+ full_name: change.name(),
+ });
+ }
+ } else {
+ gix_tempfile::remove_dir::empty_upward_until_boundary(
+ reflog_path.parent().expect("never without parent"),
+ &reflog_root,
+ )
+ .ok();
+ }
+ }
+ }
+ }
+
+ if let Some(t) = self.packed_transaction {
+ t.commit().map_err(Error::PackedTransactionCommit)?;
+ // Always refresh ourselves right away to avoid races. We ignore errors as there may be many reasons this fails, and it's not
+ // critical to be done here. In other words, the pack may be refreshed at a later time and then it might work.
+ self.store.force_refresh_packed_buffer().ok();
+ }
+
+ for change in updates.iter_mut() {
+ let take_lock_and_delete = match &change.update.change {
+ Change::Update {
+ log: LogChange { mode, .. },
+ new,
+ ..
+ } => delete_loose_refs && *mode == RefLog::AndReference && matches!(new, Target::Peeled(_)),
+ Change::Delete { log: mode, .. } => *mode == RefLog::AndReference,
+ };
+ if take_lock_and_delete {
+ let lock = change.lock.take();
+ let reference_path = self.store.reference_path(change.update.name.as_ref());
+ if let Err(err) = std::fs::remove_file(reference_path) {
+ if err.kind() != std::io::ErrorKind::NotFound {
+ return Err(Error::DeleteReference {
+ err,
+ full_name: change.name(),
+ });
+ }
+ }
+ drop(lock)
+ }
+ }
+ Ok(updates.into_iter().map(|edit| edit.update).collect())
+ }
+}
+mod error {
+ use gix_object::bstr::BString;
+
+ use crate::store_impl::{file, packed};
+
+ /// The error returned by various [`Transaction`][super::Transaction] methods.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The packed-ref transaction could not be committed")]
+ PackedTransactionCommit(#[source] packed::transaction::commit::Error),
+ #[error("Edit preprocessing failed with error")]
+ PreprocessingFailed { source: std::io::Error },
+ #[error("The change for reference {full_name:?} could not be committed")]
+ LockCommit { source: std::io::Error, full_name: BString },
+ #[error("The reference {full_name} could not be deleted")]
+ DeleteReference { full_name: BString, err: std::io::Error },
+ #[error("The reflog of reference {full_name:?} could not be deleted")]
+ DeleteReflog { full_name: BString, source: std::io::Error },
+ #[error("The reflog could not be created or updated")]
+ CreateOrUpdateRefLog(#[from] file::log::create_or_update::Error),
+ }
+}
+pub use error::Error;
+
+use crate::transaction::PreviousValue;
diff --git a/vendor/gix-ref/src/store/file/transaction/mod.rs b/vendor/gix-ref/src/store/file/transaction/mod.rs
new file mode 100644
index 000000000..712f0320d
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/transaction/mod.rs
@@ -0,0 +1,108 @@
+use std::fmt::Formatter;
+
+use gix_hash::ObjectId;
+use gix_object::bstr::BString;
+
+use crate::{
+ store_impl::{file, file::Transaction},
+ transaction::RefEdit,
+};
+
+/// A function receiving an object id to resolve, returning its decompressed bytes,
+/// used to obtain the peeled object ids for storage in packed-refs files.
+///
+/// Resolution means to follow tag objects until the end of the chain.
+pub type FindObjectFn<'a> = dyn FnMut(
+ gix_hash::ObjectId,
+ &mut Vec<u8>,
+ ) -> Result<Option<gix_object::Kind>, Box<dyn std::error::Error + Send + Sync + 'static>>
+ + 'a;
+
+/// How to handle packed refs during a transaction
+pub enum PackedRefs<'a> {
+ /// Only propagate deletions of references. This is the default
+ DeletionsOnly,
+ /// Propagate deletions as well as updates to references which are peeled, that is contain an object id
+ DeletionsAndNonSymbolicUpdates(Box<FindObjectFn<'a>>),
+ /// Propagate deletions as well as updates to references which are peeled, that is contain an object id. Furthermore delete the
+ /// reference which is originally updated if it exists. If it doesn't, the new value will be written into the packed ref right away.
+ /// Note that this doesn't affect symbolic references at all, which can't be placed into packed refs.
+ DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box<FindObjectFn<'a>>),
+}
+
+impl Default for PackedRefs<'_> {
+ fn default() -> Self {
+ PackedRefs::DeletionsOnly
+ }
+}
+
+#[derive(Debug)]
+pub(in crate::store_impl::file) struct Edit {
+ update: RefEdit,
+ lock: Option<gix_lock::Marker>,
+ /// Set if this update is coming from a symbolic reference and used to make it appear like it is the one that is handled,
+ /// instead of the referent reference.
+ parent_index: Option<usize>,
+ /// For symbolic refs, this is the previous OID to put into the reflog instead of our own previous value. It's the
+ /// peeled value of the leaf referent.
+ leaf_referent_previous_oid: Option<ObjectId>,
+}
+
+impl Edit {
+ fn name(&self) -> BString {
+ self.update.name.0.clone()
+ }
+}
+
+impl std::borrow::Borrow<RefEdit> for Edit {
+ fn borrow(&self) -> &RefEdit {
+ &self.update
+ }
+}
+
+impl std::borrow::BorrowMut<RefEdit> for Edit {
+ fn borrow_mut(&mut self) -> &mut RefEdit {
+ &mut self.update
+ }
+}
+
+/// Edits
+impl file::Store {
+ /// Open a transaction with the given `edits`, and determine how to fail if a `lock` cannot be obtained.
+ /// A snapshot of packed references will be obtained automatically if needed to fulfill this transaction
+ /// and will be provided as result of a successful transaction. Note that upon transaction failure, packed-refs
+ /// will never have been altered.
+ ///
+ /// The transaction inherits the parent namespace.
+ pub fn transaction(&self) -> Transaction<'_, '_> {
+ Transaction {
+ store: self,
+ packed_transaction: None,
+ updates: None,
+ packed_refs: PackedRefs::default(),
+ }
+ }
+}
+
+impl<'s, 'p> Transaction<'s, 'p> {
+ /// Configure the way packed refs are handled during the transaction
+ pub fn packed_refs(mut self, packed_refs: PackedRefs<'p>) -> Self {
+ self.packed_refs = packed_refs;
+ self
+ }
+}
+
+impl std::fmt::Debug for Transaction<'_, '_> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("Transaction")
+ .field("store", self.store)
+ .field("edits", &self.updates.as_ref().map(|u| u.len()))
+ .finish_non_exhaustive()
+ }
+}
+
+///
+pub mod prepare;
+
+///
+pub mod commit;
diff --git a/vendor/gix-ref/src/store/file/transaction/prepare.rs b/vendor/gix-ref/src/store/file/transaction/prepare.rs
new file mode 100644
index 000000000..fdb29619d
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/transaction/prepare.rs
@@ -0,0 +1,478 @@
+use crate::{
+ packed,
+ packed::transaction::buffer_into_transaction,
+ store_impl::{
+ file,
+ file::{
+ loose,
+ transaction::{Edit, PackedRefs},
+ Transaction,
+ },
+ },
+ transaction::{Change, LogChange, PreviousValue, RefEdit, RefEditsExt, RefLog},
+ FullName, FullNameRef, Reference, Target,
+};
+
+impl<'s, 'p> Transaction<'s, 'p> {
+ fn lock_ref_and_apply_change(
+ store: &file::Store,
+ lock_fail_mode: gix_lock::acquire::Fail,
+ packed: Option<&packed::Buffer>,
+ change: &mut Edit,
+ has_global_lock: bool,
+ direct_to_packed_refs: bool,
+ ) -> Result<(), Error> {
+ use std::io::Write;
+ assert!(
+ change.lock.is_none(),
+ "locks can only be acquired once and it's all or nothing"
+ );
+
+ let existing_ref = store
+ .ref_contents(change.update.name.as_ref())
+ .map_err(Error::from)
+ .and_then(|maybe_loose| {
+ maybe_loose
+ .map(|buf| {
+ loose::Reference::try_from_path(change.update.name.clone(), &buf)
+ .map(Reference::from)
+ .map_err(Error::from)
+ })
+ .transpose()
+ })
+ .or_else(|err| match err {
+ Error::ReferenceDecode(_) => Ok(None),
+ other => Err(other),
+ })
+ .and_then(|maybe_loose| match (maybe_loose, packed) {
+ (None, Some(packed)) => packed
+ .try_find(change.update.name.as_ref())
+ .map(|opt| opt.map(Into::into))
+ .map_err(Error::from),
+ (None, None) => Ok(None),
+ (maybe_loose, _) => Ok(maybe_loose),
+ });
+ let lock = match &mut change.update.change {
+ Change::Delete { expected, .. } => {
+ let (base, relative_path) = store.reference_path_with_base(change.update.name.as_ref());
+ let lock = if has_global_lock {
+ None
+ } else {
+ gix_lock::Marker::acquire_to_hold_resource(
+ base.join(relative_path.as_ref()),
+ lock_fail_mode,
+ Some(base.clone().into_owned()),
+ )
+ .map_err(|err| Error::LockAcquire {
+ source: err,
+ full_name: "borrowcheck won't allow change.name()".into(),
+ })?
+ .into()
+ };
+
+ let existing_ref = existing_ref?;
+ match (&expected, &existing_ref) {
+ (PreviousValue::MustNotExist, _) => {
+ panic!("BUG: MustNotExist constraint makes no sense if references are to be deleted")
+ }
+ (PreviousValue::ExistingMustMatch(_), None)
+ | (PreviousValue::MustExist, Some(_))
+ | (PreviousValue::Any, Some(_))
+ | (PreviousValue::Any, None) => {}
+ (PreviousValue::MustExist, None) | (PreviousValue::MustExistAndMatch(_), None) => {
+ return Err(Error::DeleteReferenceMustExist {
+ full_name: change.name(),
+ })
+ }
+ (PreviousValue::MustExistAndMatch(previous), Some(existing))
+ | (PreviousValue::ExistingMustMatch(previous), Some(existing)) => {
+ let actual = existing.target.clone();
+ if *previous != actual {
+ let expected = previous.clone();
+ return Err(Error::ReferenceOutOfDate {
+ full_name: change.name(),
+ expected,
+ actual,
+ });
+ }
+ }
+ }
+
+ // Keep the previous value for the caller and ourselves. Maybe they want to keep a log of sorts.
+ if let Some(existing) = existing_ref {
+ *expected = PreviousValue::MustExistAndMatch(existing.target);
+ }
+
+ lock
+ }
+ Change::Update { expected, new, .. } => {
+ let (base, relative_path) = store.reference_path_with_base(change.update.name.as_ref());
+ let obtain_lock = || {
+ gix_lock::File::acquire_to_update_resource(
+ base.join(relative_path.as_ref()),
+ lock_fail_mode,
+ Some(base.clone().into_owned()),
+ )
+ .map_err(|err| Error::LockAcquire {
+ source: err,
+ full_name: "borrowcheck won't allow change.name() and this will be corrected by caller".into(),
+ })
+ };
+ let mut lock = (!has_global_lock).then(obtain_lock).transpose()?;
+
+ let existing_ref = existing_ref?;
+ match (&expected, &existing_ref) {
+ (PreviousValue::Any, _)
+ | (PreviousValue::MustExist, Some(_))
+ | (PreviousValue::MustNotExist, None)
+ | (PreviousValue::ExistingMustMatch(_), None) => {}
+ (PreviousValue::MustExist, None) => {
+ let expected = Target::Peeled(store.object_hash.null());
+ let full_name = change.name();
+ return Err(Error::MustExist { full_name, expected });
+ }
+ (PreviousValue::MustNotExist, Some(existing)) => {
+ if existing.target != *new {
+ let new = new.clone();
+ return Err(Error::MustNotExist {
+ full_name: change.name(),
+ actual: existing.target.clone(),
+ new,
+ });
+ }
+ }
+ (PreviousValue::MustExistAndMatch(previous), Some(existing))
+ | (PreviousValue::ExistingMustMatch(previous), Some(existing)) => {
+ if *previous != existing.target {
+ let actual = existing.target.clone();
+ let expected = previous.to_owned();
+ let full_name = change.name();
+ return Err(Error::ReferenceOutOfDate {
+ full_name,
+ actual,
+ expected,
+ });
+ }
+ }
+
+ (PreviousValue::MustExistAndMatch(previous), None) => {
+ let expected = previous.to_owned();
+ let full_name = change.name();
+ return Err(Error::MustExist { full_name, expected });
+ }
+ };
+
+ fn new_would_change_existing(new: &Target, existing: &Target) -> (bool, bool) {
+ match (new, existing) {
+ (Target::Peeled(new), Target::Peeled(old)) => (old != new, false),
+ (Target::Symbolic(new), Target::Symbolic(old)) => (old != new, true),
+ (Target::Peeled(_), _) => (true, false),
+ (Target::Symbolic(_), _) => (true, true),
+ }
+ }
+
+ let (is_effective, is_symbolic) = if let Some(existing) = existing_ref {
+ let (effective, is_symbolic) = new_would_change_existing(new, &existing.target);
+ *expected = PreviousValue::MustExistAndMatch(existing.target);
+ (effective, is_symbolic)
+ } else {
+ (true, matches!(new, Target::Symbolic(_)))
+ };
+
+ if (is_effective && !direct_to_packed_refs) || is_symbolic {
+ let mut lock = lock.take().map(Ok).unwrap_or_else(obtain_lock)?;
+
+ lock.with_mut(|file| match new {
+ Target::Peeled(oid) => write!(file, "{oid}"),
+ Target::Symbolic(name) => write!(file, "ref: {}", name.0),
+ })?;
+ Some(lock.close()?)
+ } else {
+ None
+ }
+ }
+ };
+ change.lock = lock;
+ Ok(())
+ }
+}
+
+impl<'s, 'p> Transaction<'s, 'p> {
+ /// Prepare for calling [`commit(…)`][Transaction::commit()] in a way that can be rolled back perfectly.
+ ///
+ /// If the operation succeeds, the transaction can be committed or dropped to cause a rollback automatically.
+ /// Rollbacks happen automatically on failure and they tend to be perfect.
+ /// This method is idempotent.
+ pub fn prepare(
+ mut self,
+ edits: impl IntoIterator<Item = RefEdit>,
+ ref_files_lock_fail_mode: gix_lock::acquire::Fail,
+ packed_refs_lock_fail_mode: gix_lock::acquire::Fail,
+ ) -> Result<Self, Error> {
+ assert!(self.updates.is_none(), "BUG: Must not call prepare(…) multiple times");
+ let store = self.store;
+ let mut updates: Vec<_> = edits
+ .into_iter()
+ .map(|update| Edit {
+ update,
+ lock: None,
+ parent_index: None,
+ leaf_referent_previous_oid: None,
+ })
+ .collect();
+ updates
+ .pre_process(
+ |name| {
+ let symbolic_refs_are_never_packed = None;
+ store
+ .find_existing_inner(name, symbolic_refs_are_never_packed)
+ .map(|r| r.target)
+ .ok()
+ },
+ |idx, update| Edit {
+ update,
+ lock: None,
+ parent_index: Some(idx),
+ leaf_referent_previous_oid: None,
+ },
+ )
+ .map_err(Error::PreprocessingFailed)?;
+
+ let mut maybe_updates_for_packed_refs = match self.packed_refs {
+ PackedRefs::DeletionsAndNonSymbolicUpdates(_)
+ | PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(_) => Some(0_usize),
+ PackedRefs::DeletionsOnly => None,
+ };
+ if maybe_updates_for_packed_refs.is_some()
+ || self.store.packed_refs_path().is_file()
+ || self.store.packed_refs_lock_path().is_file()
+ {
+ let mut edits_for_packed_transaction = Vec::<RefEdit>::new();
+ let mut needs_packed_refs_lookups = false;
+ for edit in updates.iter() {
+ let log_mode = match edit.update.change {
+ Change::Update {
+ log: LogChange { mode, .. },
+ ..
+ } => mode,
+ Change::Delete { log, .. } => log,
+ };
+ if log_mode == RefLog::Only {
+ continue;
+ }
+ let name = match possibly_adjust_name_for_prefixes(edit.update.name.as_ref()) {
+ Some(n) => n,
+ None => continue,
+ };
+ if let Some(ref mut num_updates) = maybe_updates_for_packed_refs {
+ if let Change::Update {
+ new: Target::Peeled(_), ..
+ } = edit.update.change
+ {
+ edits_for_packed_transaction.push(RefEdit {
+ name,
+ ..edit.update.clone()
+ });
+ *num_updates += 1;
+ }
+ continue;
+ }
+ match edit.update.change {
+ Change::Update {
+ expected: PreviousValue::ExistingMustMatch(_) | PreviousValue::MustExistAndMatch(_),
+ ..
+ } => needs_packed_refs_lookups = true,
+ Change::Delete { .. } => {
+ edits_for_packed_transaction.push(RefEdit {
+ name,
+ ..edit.update.clone()
+ });
+ }
+ _ => {
+ needs_packed_refs_lookups = true;
+ }
+ }
+ }
+
+ if !edits_for_packed_transaction.is_empty() || needs_packed_refs_lookups {
+ // What follows means that we will only create a transaction if we have to access packed refs for looking
+ // up current ref values, or that we definitely have a transaction if we need to make updates. Otherwise
+ // we may have no transaction at all which isn't required if we had none and would only try making deletions.
+ let packed_transaction: Option<_> =
+ if maybe_updates_for_packed_refs.unwrap_or(0) > 0 || self.store.packed_refs_lock_path().is_file() {
+ // We have to create a packed-ref even if it doesn't exist
+ self.store
+ .packed_transaction(packed_refs_lock_fail_mode)
+ .map_err(|err| match err {
+ file::packed::transaction::Error::BufferOpen(err) => Error::from(err),
+ file::packed::transaction::Error::TransactionLock(err) => {
+ Error::PackedTransactionAcquire(err)
+ }
+ })?
+ .into()
+ } else {
+ // A packed transaction is optional - we only have deletions that can't be made if
+ // no packed-ref file exists anyway
+ self.store
+ .assure_packed_refs_uptodate()?
+ .map(|p| {
+ buffer_into_transaction(p, packed_refs_lock_fail_mode)
+ .map_err(Error::PackedTransactionAcquire)
+ })
+ .transpose()?
+ };
+ if let Some(transaction) = packed_transaction {
+ self.packed_transaction = Some(match &mut self.packed_refs {
+ PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(f)
+ | PackedRefs::DeletionsAndNonSymbolicUpdates(f) => {
+ transaction.prepare(edits_for_packed_transaction, f)?
+ }
+ PackedRefs::DeletionsOnly => transaction
+ .prepare(edits_for_packed_transaction, &mut |_, _| {
+ unreachable!("BUG: deletions never trigger object lookups")
+ })?,
+ });
+ }
+ }
+ }
+
+ for cid in 0..updates.len() {
+ let change = &mut updates[cid];
+ if let Err(err) = Self::lock_ref_and_apply_change(
+ self.store,
+ ref_files_lock_fail_mode,
+ self.packed_transaction.as_ref().and_then(|t| t.buffer()),
+ change,
+ self.packed_transaction.is_some(),
+ matches!(
+ self.packed_refs,
+ PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(_)
+ ),
+ ) {
+ let err = match err {
+ Error::LockAcquire {
+ source,
+ full_name: _bogus,
+ } => Error::LockAcquire {
+ source,
+ full_name: {
+ let mut cursor = change.parent_index;
+ let mut ref_name = change.name();
+ while let Some(parent_idx) = cursor {
+ let parent = &updates[parent_idx];
+ if parent.parent_index.is_none() {
+ ref_name = parent.name();
+ } else {
+ cursor = parent.parent_index;
+ }
+ }
+ ref_name
+ },
+ },
+ other => other,
+ };
+ return Err(err);
+ };
+
+ // traverse parent chain from leaf/peeled ref and set the leaf previous oid accordingly
+ // to help with their reflog entries
+ if let (Some(crate::TargetRef::Peeled(oid)), Some(parent_idx)) =
+ (change.update.change.previous_value(), change.parent_index)
+ {
+ let oid = oid.to_owned();
+ let mut parent_idx_cursor = Some(parent_idx);
+ while let Some(parent) = parent_idx_cursor.take().map(|idx| &mut updates[idx]) {
+ parent_idx_cursor = parent.parent_index;
+ parent.leaf_referent_previous_oid = Some(oid);
+ }
+ }
+ }
+ self.updates = Some(updates);
+ Ok(self)
+ }
+
+ /// Rollback all intermediate state and return the `RefEdits` as we know them thus far.
+ ///
+ /// Note that they have been altered compared to what was initially provided as they have
+ /// been split and know about their current state on disk.
+ ///
+ /// # Note
+ ///
+ /// A rollback happens automatically as this instance is dropped as well.
+ pub fn rollback(self) -> Vec<RefEdit> {
+ self.updates
+ .map(|updates| updates.into_iter().map(|u| u.update).collect())
+ .unwrap_or_default()
+ }
+}
+
+fn possibly_adjust_name_for_prefixes(name: &FullNameRef) -> Option<FullName> {
+ match name.category_and_short_name() {
+ Some((c, sn)) => {
+ use crate::Category::*;
+ let sn = FullNameRef::new_unchecked(sn);
+ match c {
+ Bisect | Rewritten | WorktreePrivate | LinkedPseudoRef { .. } | PseudoRef | MainPseudoRef => None,
+ Tag | LocalBranch | RemoteBranch | Note => name.into(),
+ MainRef | LinkedRef { .. } => sn
+ .category()
+ .map_or(false, |cat| !cat.is_worktree_private())
+ .then_some(sn),
+ }
+ .map(|n| n.to_owned())
+ }
+ None => Some(name.to_owned()), // allow (uncategorized/very special) refs to be packed
+ }
+}
+
+mod error {
+ use gix_object::bstr::BString;
+
+ use crate::{
+ store_impl::{file, packed},
+ Target,
+ };
+
+ /// The error returned by various [`Transaction`][super::Transaction] methods.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The packed ref buffer could not be loaded")]
+ Packed(#[from] packed::buffer::open::Error),
+ #[error("The lock for the packed-ref file could not be obtained")]
+ PackedTransactionAcquire(#[source] gix_lock::acquire::Error),
+ #[error("The packed transaction could not be prepared")]
+ PackedTransactionPrepare(#[from] packed::transaction::prepare::Error),
+ #[error("The packed ref file could not be parsed")]
+ PackedFind(#[from] packed::find::Error),
+ #[error("Edit preprocessing failed with an error")]
+ PreprocessingFailed(#[source] std::io::Error),
+ #[error("A lock could not be obtained for reference {full_name:?}")]
+ LockAcquire {
+ source: gix_lock::acquire::Error,
+ full_name: BString,
+ },
+ #[error("An IO error occurred while applying an edit")]
+ Io(#[from] std::io::Error),
+ #[error("The reference {full_name:?} for deletion did not exist or could not be parsed")]
+ DeleteReferenceMustExist { full_name: BString },
+ #[error("Reference {full_name:?} was not supposed to exist when writing it with value {new:?}, but actual content was {actual:?}")]
+ MustNotExist {
+ full_name: BString,
+ actual: Target,
+ new: Target,
+ },
+ #[error("Reference {full_name:?} was supposed to exist with value {expected}, but didn't.")]
+ MustExist { full_name: BString, expected: Target },
+ #[error("The reference {full_name:?} should have content {expected}, actual content was {actual}")]
+ ReferenceOutOfDate {
+ full_name: BString,
+ expected: Target,
+ actual: Target,
+ },
+ #[error("Could not read reference")]
+ ReferenceDecode(#[from] file::loose::reference::decode::Error),
+ }
+}
+
+pub use error::Error;