summaryrefslogtreecommitdiffstats
path: root/vendor/gix-ref
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 12:41:35 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 12:41:35 +0000
commit7e5d7eea9c580ef4b41a765bde624af431942b96 (patch)
tree2c0d9ca12878fc4525650aa4e54d77a81a07cc09 /vendor/gix-ref
parentAdding debian version 1.70.0+dfsg1-9. (diff)
downloadrustc-7e5d7eea9c580ef4b41a765bde624af431942b96.tar.xz
rustc-7e5d7eea9c580ef4b41a765bde624af431942b96.zip
Merging upstream version 1.70.0+dfsg2.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/gix-ref')
-rw-r--r--vendor/gix-ref/.cargo-checksum.json1
-rw-r--r--vendor/gix-ref/Cargo.toml94
-rw-r--r--vendor/gix-ref/src/fullname.rs238
-rw-r--r--vendor/gix-ref/src/lib.rs205
-rw-r--r--vendor/gix-ref/src/log.rs16
-rw-r--r--vendor/gix-ref/src/name.rs273
-rw-r--r--vendor/gix-ref/src/namespace.rs52
-rw-r--r--vendor/gix-ref/src/parse.rs27
-rw-r--r--vendor/gix-ref/src/peel.rs32
-rw-r--r--vendor/gix-ref/src/raw.rs103
-rw-r--r--vendor/gix-ref/src/store/file/find.rs353
-rw-r--r--vendor/gix-ref/src/store/file/log/iter.rs245
-rw-r--r--vendor/gix-ref/src/store/file/log/line.rs285
-rw-r--r--vendor/gix-ref/src/store/file/log/mod.rs23
-rw-r--r--vendor/gix-ref/src/store/file/loose/iter.rs95
-rw-r--r--vendor/gix-ref/src/store/file/loose/mod.rs65
-rw-r--r--vendor/gix-ref/src/store/file/loose/reference/decode.rs83
-rw-r--r--vendor/gix-ref/src/store/file/loose/reference/logiter.rs47
-rw-r--r--vendor/gix-ref/src/store/file/loose/reference/mod.rs4
-rw-r--r--vendor/gix-ref/src/store/file/loose/reflog.rs244
-rw-r--r--vendor/gix-ref/src/store/file/loose/reflog/create_or_update/tests.rs155
-rw-r--r--vendor/gix-ref/src/store/file/mod.rs104
-rw-r--r--vendor/gix-ref/src/store/file/overlay_iter.rs432
-rw-r--r--vendor/gix-ref/src/store/file/packed.rs97
-rw-r--r--vendor/gix-ref/src/store/file/raw_ext.rs174
-rw-r--r--vendor/gix-ref/src/store/file/transaction/commit.rs201
-rw-r--r--vendor/gix-ref/src/store/file/transaction/mod.rs108
-rw-r--r--vendor/gix-ref/src/store/file/transaction/prepare.rs478
-rw-r--r--vendor/gix-ref/src/store/general/handle/find.rs82
-rw-r--r--vendor/gix-ref/src/store/general/handle/mod.rs43
-rw-r--r--vendor/gix-ref/src/store/general/init.rs38
-rw-r--r--vendor/gix-ref/src/store/general/mod.rs1
-rw-r--r--vendor/gix-ref/src/store/mod.rs5
-rw-r--r--vendor/gix-ref/src/store/packed/buffer.rs105
-rw-r--r--vendor/gix-ref/src/store/packed/decode.rs83
-rw-r--r--vendor/gix-ref/src/store/packed/decode/tests.rs125
-rw-r--r--vendor/gix-ref/src/store/packed/find.rs154
-rw-r--r--vendor/gix-ref/src/store/packed/iter.rs117
-rw-r--r--vendor/gix-ref/src/store/packed/mod.rs93
-rw-r--r--vendor/gix-ref/src/store/packed/transaction.rs267
-rw-r--r--vendor/gix-ref/src/target.rs154
-rw-r--r--vendor/gix-ref/src/transaction/ext.rs133
-rw-r--r--vendor/gix-ref/src/transaction/mod.rs143
43 files changed, 5777 insertions, 0 deletions
diff --git a/vendor/gix-ref/.cargo-checksum.json b/vendor/gix-ref/.cargo-checksum.json
new file mode 100644
index 000000000..dfc3b1e66
--- /dev/null
+++ b/vendor/gix-ref/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"294774f5e1135792390a98b9b3483c83146ac30bf2543f3c155877e26c209388","src/fullname.rs":"bf5474ae029a6537d67380ba372521740ef01236d5e2c5cae14801cc2a55a615","src/lib.rs":"e09ca2a32a38bb2cf3f908d1eae8f35d01486b396253ada7fa82ae5c8afbc1d0","src/log.rs":"9c52e16a82ad8e59af00feff2206b46d5b08b007229dce8ad841d74639dcfa25","src/name.rs":"8319442bd162bb1475c236cc816ff48e5203310ef534e3772db4f917fe41620d","src/namespace.rs":"4526a9de69bc32a4fc57d386ab3e2706ebd9e8897cdc12a53c475b0a617cb3ac","src/parse.rs":"c6956a4303e996955285d3fc255bd38734c832bc877d5ae63a77ee292989c0cf","src/peel.rs":"fb576b55b4d17fe9abc12cf1fb55753937810feeb8f79a36c52ae2ac8a3abff0","src/raw.rs":"c45ab1953e2e44645c48df33abd6aa920182b39c92853bebb90aed1a67cddae0","src/store/file/find.rs":"a5e642b310e5dc8285c89f00cf0ed46fdfcb3f19e7ec59cb2264b8312bbd2223","src/store/file/log/iter.rs":"cfba6583d0fbf21e16d515e14d6e71a624291bc6910dde60642c309b24a9c275","src/store/file/log/line.rs":"fbfd0faff68ec950ef35b17f3cfd535ffe02b6b1f23540a93134daf81900a728","src/store/file/log/mod.rs":"072ad9e0864bdce813913a3421cce81bcac7ca4fe2ab514092b7f97b8082556c","src/store/file/loose/iter.rs":"f1236fd18e467863f1227862226643cc05af22708485ae1475ce1a69c0502287","src/store/file/loose/mod.rs":"a5f2ae47631c8f1b214eecd52ea749866732c740deea3e122eb8e33b6ce30152","src/store/file/loose/reference/decode.rs":"e0dee458982c0d24052b57a356a347ec0eb01ffd7e829e593551974c0fd4c038","src/store/file/loose/reference/logiter.rs":"dc25b669f26f9c6c8aaf2186e48714bec065e80e9d06112b31e462bbd5c0f2f2","src/store/file/loose/reference/mod.rs":"bfbf39a4c7b9ff8dac40a6fcc334992fbe778de86d86787cf8c7ac5467d65304","src/store/file/loose/reflog.rs":"2aeef1c5a64ee5bfb76008d8eda0010da121abd23b54582c95b2a980bd55dbbc","src/store/file/loose/reflog/create_or_update/tests.rs":"0a782c2fe0900c92aa02894d48b10b99835ce202964213acdaee167e59533cc0","src/store/file/mod.rs":"dab1cabd111bcd68c0ec62ba0b9034f9665a79673cae23bcff269fa4b162ceb9","src/store/file/overlay_iter.rs":"a5124b85d23b5a5f198b98aa06f3c86216374be77f39c479f0b459610bfde650","src/store/file/packed.rs":"e07010dc64c8a42e2b35714330dcbc683ec89620b4201889bf508c807383f2ee","src/store/file/raw_ext.rs":"2478fb22c1129d72af102a08e5058af7e7e3224c91f05d485622fe4688ffd817","src/store/file/transaction/commit.rs":"bd0c3d2aa89814a4ba586ed17a4874ce282a64d1de97f7f2b3e66794a11f8f8e","src/store/file/transaction/mod.rs":"3c8a35ce97b852255337b4679ab4144464f8b76630cf75b4aa990361f671a314","src/store/file/transaction/prepare.rs":"13187385a24c6e935e29d19776c2ae35e79c0ae57722db73348935c8643b0c7b","src/store/general/handle/find.rs":"ae228a17df534d8231d68093c209d02de03d102fac1a76a8b12906248c50412e","src/store/general/handle/mod.rs":"5f6c9bc2a2a20bc08807bdf2324e2f22461dbed609cebe5dfa25d6c096514f40","src/store/general/init.rs":"107b62c8ca50d84f95b56c8894572147a5aee585ca52cd6db6213c87db91779d","src/store/general/mod.rs":"0ed6247438790f38b75ab62bffa70ddb73c873fd2d05552c19af1fdaf604773b","src/store/mod.rs":"d160b6f8a2388fe6978cd94675af910d85f2030168bba4a173a30384948ac3ec","src/store/packed/buffer.rs":"2aeabde07415b2f3f080b6a1a5501e637f2ff99d0c2630530851d0acbd82e1d6","src/store/packed/decode.rs":"380fab290cd3d81ba77b7f87f4610dd47c9755a896359b33579bf9b8f09a16db","src/store/packed/decode/tests.rs":"5e66f4ca73b65336dcac8580243b5a53b6a0bf842579396ab08dc9d6a8386dd8","src/store/packed/find.rs":"8e148068d8f077b235d259f7302c356ec029e0e145ee41e47030a552e740bbd8","src/store/packed/iter.rs":"6aaa45aac6141f6c9c93cf56384a4b77c9a75ffd430ebbf8329a08a72688a6e6","src/store/packed/mod.rs":"711cf43cf67dff52dab1ccc7624962f805b4f4c1337ce5bfca3d315261bdf607","src/store/packed/transaction.rs":"5c9bddcd756262791dc706c008d5766eb61227d016eb2fda95c798fa64ca6f58","src/target.rs":"dce5cedb767a4ec03d3882881c5aa4af4fbf2d68240233386650badee73086fe","src/transaction/ext.rs":"7936d319b9e2230b8040c0c55a06dc5282f0e0f75f8a654c0d5d0163ecb86af5","src/transaction/mod.rs":"590a68708dd98069504f2a9d58e9adfd4ad001f673b832798ad0ec71d36ca3b9"},"package":"90a0ed29e581f04b904ecd0c32b11f33b8209b5a0af9c43f415249a4f2fba632"} \ No newline at end of file
diff --git a/vendor/gix-ref/Cargo.toml b/vendor/gix-ref/Cargo.toml
new file mode 100644
index 000000000..f6233ec47
--- /dev/null
+++ b/vendor/gix-ref/Cargo.toml
@@ -0,0 +1,94 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.64"
+name = "gix-ref"
+version = "0.26.0"
+authors = ["Sebastian Thiel <sebastian.thiel@icloud.com>"]
+include = ["src/**/*"]
+autotests = false
+description = "A crate to handle git references"
+license = "MIT/Apache-2.0"
+repository = "https://github.com/Byron/gitoxide"
+
+[package.metadata.docs.rs]
+features = [
+ "document-features",
+ "serde1",
+]
+rustdoc-args = [
+ "--cfg",
+ "docsrs",
+]
+
+[lib]
+test = true
+doctest = false
+
+[dependencies.document-features]
+version = "0.2.1"
+optional = true
+
+[dependencies.gix-actor]
+version = "^0.19.0"
+
+[dependencies.gix-features]
+version = "^0.28.0"
+features = ["walkdir"]
+
+[dependencies.gix-hash]
+version = "^0.10.3"
+
+[dependencies.gix-lock]
+version = "^4.0.0"
+
+[dependencies.gix-object]
+version = "^0.28.0"
+
+[dependencies.gix-path]
+version = "^0.7.2"
+
+[dependencies.gix-tempfile]
+version = "^4.0.0"
+default-features = false
+
+[dependencies.gix-validate]
+version = "^0.7.3"
+
+[dependencies.memmap2]
+version = "0.5.0"
+
+[dependencies.nom]
+version = "7"
+features = ["std"]
+default-features = false
+
+[dependencies.serde]
+version = "1.0.114"
+features = ["derive"]
+optional = true
+default-features = false
+
+[dependencies.thiserror]
+version = "1.0.34"
+
+[dev-dependencies.tempfile]
+version = "3.2.0"
+
+[features]
+serde1 = [
+ "serde",
+ "gix-hash/serde1",
+ "gix-actor/serde1",
+ "gix-object/serde1",
+]
diff --git a/vendor/gix-ref/src/fullname.rs b/vendor/gix-ref/src/fullname.rs
new file mode 100644
index 000000000..8870e6219
--- /dev/null
+++ b/vendor/gix-ref/src/fullname.rs
@@ -0,0 +1,238 @@
+use std::{borrow::Borrow, convert::TryFrom, path::Path};
+
+use gix_object::bstr::{BStr, BString, ByteSlice};
+
+use crate::{bstr::ByteVec, name::is_pseudo_ref, Category, FullName, FullNameRef, Namespace, PartialNameRef};
+
+impl TryFrom<&str> for FullName {
+ type Error = gix_validate::refname::Error;
+
+ fn try_from(value: &str) -> Result<Self, Self::Error> {
+ Ok(FullName(gix_validate::refname(value.as_bytes().as_bstr())?.into()))
+ }
+}
+
+impl TryFrom<String> for FullName {
+ type Error = gix_validate::refname::Error;
+
+ fn try_from(value: String) -> Result<Self, Self::Error> {
+ gix_validate::refname(value.as_bytes().as_bstr())?;
+ Ok(FullName(value.into()))
+ }
+}
+
+impl TryFrom<&BStr> for FullName {
+ type Error = gix_validate::refname::Error;
+
+ fn try_from(value: &BStr) -> Result<Self, Self::Error> {
+ Ok(FullName(gix_validate::refname(value)?.into()))
+ }
+}
+
+impl TryFrom<BString> for FullName {
+ type Error = gix_validate::refname::Error;
+
+ fn try_from(value: BString) -> Result<Self, Self::Error> {
+ gix_validate::refname(value.as_ref())?;
+ Ok(FullName(value))
+ }
+}
+
+impl TryFrom<&BString> for FullName {
+ type Error = gix_validate::refname::Error;
+
+ fn try_from(value: &BString) -> Result<Self, Self::Error> {
+ gix_validate::refname(value.as_ref())?;
+ Ok(FullName(value.clone()))
+ }
+}
+
+impl From<FullName> for BString {
+ fn from(name: FullName) -> Self {
+ name.0
+ }
+}
+
+impl<'a> From<&'a FullNameRef> for &'a BStr {
+ fn from(name: &'a FullNameRef) -> Self {
+ &name.0
+ }
+}
+
+impl<'a> From<&'a FullNameRef> for FullName {
+ fn from(value: &'a FullNameRef) -> Self {
+ FullName(value.as_bstr().into())
+ }
+}
+
+impl std::fmt::Display for FullName {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.0, f)
+ }
+}
+
+impl FullNameRef {
+ /// Interpret this fully qualified reference name as partial name.
+ pub fn as_partial_name(&self) -> &PartialNameRef {
+ PartialNameRef::new_unchecked(self.0.as_bstr())
+ }
+
+ /// Convert this name into the relative path identifying the reference location.
+ pub fn to_path(&self) -> &Path {
+ gix_path::from_byte_slice(&self.0)
+ }
+
+ /// Return ourselves as byte string which is a valid refname
+ pub fn as_bstr(&self) -> &BStr {
+ &self.0
+ }
+
+ /// Strip well-known prefixes from the name and return it.
+ ///
+ /// If there is no such prefix, the original name is returned.
+ pub fn shorten(&self) -> &BStr {
+ self.category_and_short_name()
+ .map(|(_, short)| short)
+ .unwrap_or_else(|| self.0.as_bstr())
+ }
+
+ /// Classify this name, or return `None` if it's unclassified.
+ pub fn category(&self) -> Option<Category<'_>> {
+ self.category_and_short_name().map(|(cat, _)| cat)
+ }
+
+ /// Classify this name, or return `None` if it's unclassified. If `Some`,
+ /// the shortened name is returned as well.
+ pub fn category_and_short_name(&self) -> Option<(Category<'_>, &BStr)> {
+ let name = self.0.as_bstr();
+ for category in &[Category::Tag, Category::LocalBranch, Category::RemoteBranch] {
+ if let Some(shortened) = name.strip_prefix(category.prefix().as_bytes()) {
+ return Some((*category, shortened.as_bstr()));
+ }
+ }
+
+ for category in &[
+ Category::Note,
+ Category::Bisect,
+ Category::WorktreePrivate,
+ Category::Rewritten,
+ ] {
+ if name.starts_with(category.prefix().as_ref()) {
+ return Some((
+ *category,
+ name.strip_prefix(b"refs/")
+ .expect("we checked for refs/* above")
+ .as_bstr(),
+ ));
+ }
+ }
+
+ if is_pseudo_ref(name) {
+ Some((Category::PseudoRef, name))
+ } else if let Some(shortened) = name.strip_prefix(Category::MainPseudoRef.prefix().as_bytes()) {
+ if shortened.starts_with_str("refs/") {
+ (Category::MainRef, shortened.as_bstr()).into()
+ } else {
+ is_pseudo_ref(shortened).then(|| (Category::MainPseudoRef, shortened.as_bstr()))
+ }
+ } else if let Some(shortened_with_worktree_name) =
+ name.strip_prefix(Category::LinkedPseudoRef { name: "".into() }.prefix().as_bytes())
+ {
+ let (name, shortened) = shortened_with_worktree_name.find_byte(b'/').map(|pos| {
+ (
+ shortened_with_worktree_name[..pos].as_bstr(),
+ shortened_with_worktree_name[pos + 1..].as_bstr(),
+ )
+ })?;
+ if shortened.starts_with_str("refs/") {
+ (Category::LinkedRef { name }, shortened.as_bstr()).into()
+ } else {
+ is_pseudo_ref(shortened).then(|| (Category::LinkedPseudoRef { name }, shortened.as_bstr()))
+ }
+ } else {
+ None
+ }
+ }
+}
+
+impl FullName {
+ /// Convert this name into the relative path, lossily, identifying the reference location relative to a repository
+ pub fn to_path(&self) -> &Path {
+ gix_path::from_byte_slice(&self.0)
+ }
+
+ /// Dissolve this instance and return the buffer.
+ pub fn into_inner(self) -> BString {
+ self.0
+ }
+
+ /// Return ourselves as byte string which is a valid refname
+ pub fn as_bstr(&self) -> &BStr {
+ self.0.as_bstr()
+ }
+
+ /// Modify ourself so that we use `namespace` as prefix, if it is not yet in the `namespace`
+ pub fn prefix_namespace(&mut self, namespace: &Namespace) -> &mut Self {
+ if !self.0.starts_with_str(&namespace.0) {
+ self.0.insert_str(0, &namespace.0);
+ }
+ self
+ }
+
+ /// Strip the given `namespace` off the beginning of this name, if it is in this namespace.
+ pub fn strip_namespace(&mut self, namespace: &Namespace) -> &mut Self {
+ if self.0.starts_with_str(&namespace.0) {
+ let prev_len = self.0.len();
+ self.0.copy_within(namespace.0.len().., 0);
+ self.0.resize(prev_len - namespace.0.len(), 0);
+ }
+ self
+ }
+
+ /// Strip well-known prefixes from the name and return it.
+ ///
+ /// If there is no such prefix, the original name is returned.
+ pub fn shorten(&self) -> &BStr {
+ self.as_ref().shorten()
+ }
+
+ /// Classify this name, or return `None` if it's unclassified.
+ pub fn category(&self) -> Option<crate::Category<'_>> {
+ self.as_ref().category()
+ }
+
+ /// Classify this name, or return `None` if it's unclassified. If `Some`,
+ /// the shortened name is returned as well.
+ pub fn category_and_short_name(&self) -> Option<(crate::Category<'_>, &BStr)> {
+ self.as_ref().category_and_short_name()
+ }
+}
+
+impl FullNameRef {
+ /// Return the file name portion of a full name, for instance `main` if the
+ /// full name was `refs/heads/main`.
+ pub fn file_name(&self) -> &BStr {
+ self.0.rsplitn(2, |b| *b == b'/').next().expect("valid ref").as_bstr()
+ }
+}
+
+impl Borrow<FullNameRef> for FullName {
+ #[inline]
+ fn borrow(&self) -> &FullNameRef {
+ FullNameRef::new_unchecked(self.0.as_bstr())
+ }
+}
+
+impl AsRef<FullNameRef> for FullName {
+ fn as_ref(&self) -> &FullNameRef {
+ self.borrow()
+ }
+}
+
+impl ToOwned for FullNameRef {
+ type Owned = FullName;
+
+ fn to_owned(&self) -> Self::Owned {
+ FullName(self.0.to_owned())
+ }
+}
diff --git a/vendor/gix-ref/src/lib.rs b/vendor/gix-ref/src/lib.rs
new file mode 100644
index 000000000..b18d67c4a
--- /dev/null
+++ b/vendor/gix-ref/src/lib.rs
@@ -0,0 +1,205 @@
+//! A crate for handling the references stored in various formats in a git repository.
+//!
+//! References are also called _refs_ which are used interchangeably.
+//!
+//! Refs are the way to keep track of objects and come in two flavors.
+//!
+//! * symbolic refs are pointing to another reference
+//! * peeled refs point to the an object by its [ObjectId][gix_hash::ObjectId]
+//!
+//! They can be identified by a relative path and stored in various flavors.
+//!
+//! * **files**
+//! * **[loose][file::Store]**
+//! * one reference maps to a file on disk
+//! * **packed**
+//! * references are stored in a single human-readable file, along with their targets if they are symbolic.
+//!
+//! ## Feature Flags
+#![cfg_attr(
+ feature = "document-features",
+ cfg_attr(doc, doc = ::document_features::document_features!())
+)]
+#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))]
+#![deny(missing_docs, rust_2018_idioms, unsafe_code)]
+
+use std::borrow::Cow;
+
+use gix_hash::{oid, ObjectId};
+pub use gix_object::bstr;
+use gix_object::bstr::{BStr, BString};
+
+#[path = "store/mod.rs"]
+mod store_impl;
+pub use store_impl::{file, packed};
+
+mod fullname;
+///
+pub mod name;
+///
+pub mod namespace;
+///
+pub mod transaction;
+
+mod parse;
+mod raw;
+
+pub use raw::Reference;
+
+mod target;
+
+///
+pub mod log;
+
+///
+pub mod peel;
+
+///
+pub mod store {
+ /// The way a file store handles the reflog
+ #[derive(Debug, PartialOrd, PartialEq, Ord, Eq, Hash, Clone, Copy)]
+ pub enum WriteReflog {
+ /// Always write the reflog for all references for ref edits, unconditionally.
+ Always,
+ /// Write a ref log for ref edits according to the standard rules.
+ Normal,
+ /// Never write a ref log.
+ Disable,
+ }
+
+ impl Default for WriteReflog {
+ fn default() -> Self {
+ WriteReflog::Normal
+ }
+ }
+
+ /// A thread-local handle for interacting with a [`Store`][crate::Store] to find and iterate references.
+ #[derive(Clone)]
+ #[allow(dead_code)]
+ pub(crate) struct Handle {
+ /// A way to access shared state with the requirement that interior mutability doesn't leak or is incorporated into error types
+ /// if it could. The latter can't happen if references to said internal aren't ever returned.
+ state: handle::State,
+ }
+
+ #[allow(dead_code)]
+ pub(crate) enum State {
+ Loose { store: file::Store },
+ }
+
+ pub(crate) mod general;
+
+ ///
+ #[path = "general/handle/mod.rs"]
+ mod handle;
+ pub use handle::find;
+
+ use crate::file;
+}
+
+/// The git reference store.
+/// TODO: Figure out if handles are needed at all, which depends on the ref-table implementation.
+#[allow(dead_code)]
+pub(crate) struct Store {
+ inner: store::State,
+}
+
+/// Indicate that the given BString is a validate reference name or path that can be used as path on disk or written as target
+/// of a symbolic reference
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]
+#[cfg_attr(feature = "serde1", derive(serde::Serialize, serde::Deserialize))]
+pub struct FullName(pub(crate) BString);
+
+/// A validated and potentially partial reference name - it can safely be used for common operations.
+#[derive(Hash, Debug, PartialEq, Eq, Ord, PartialOrd)]
+#[repr(transparent)]
+pub struct FullNameRef(BStr);
+
+/// A validated complete and fully qualified reference name, safe to use for all operations.
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]
+pub struct PartialNameCow<'a>(Cow<'a, BStr>);
+
+/// A validated complete and fully qualified referenced reference name, safe to use for all operations.
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd)]
+#[repr(transparent)]
+pub struct PartialNameRef(BStr);
+
+/// A validated complete and fully qualified owned reference name, safe to use for all operations.
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd)]
+pub struct PartialName(BString);
+
+/// A _validated_ prefix for references to act as a namespace.
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]
+pub struct Namespace(BString);
+
+/// Denotes the kind of reference.
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone, Copy)]
+#[cfg_attr(feature = "serde1", derive(serde::Serialize, serde::Deserialize))]
+pub enum Kind {
+ /// A ref that points to an object id
+ Peeled,
+ /// A ref that points to another reference, adding a level of indirection.
+ ///
+ /// It can be resolved to an id using the [`peel_in_place_to_id()`][`crate::file::ReferenceExt::peel_to_id_in_place()`] method.
+ Symbolic,
+}
+
+/// The various known categories of references.
+///
+/// This translates into a prefix containing all references of a given category.
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone, Copy)]
+pub enum Category<'a> {
+ /// A tag in `refs/tags`
+ Tag,
+ /// A branch in `refs/heads`
+ LocalBranch,
+ /// A branch in `refs/remotes`
+ RemoteBranch,
+ /// A tag in `refs/notes`
+ Note,
+ /// Something outside of `ref/` in the current worktree, typically `HEAD`.
+ PseudoRef,
+ /// A `PseudoRef`, but referenced so that it will always refer to the main worktree by
+ /// prefixing it with `main-worktree/`.
+ MainPseudoRef,
+ /// Any reference that is prefixed with `main-worktree/refs/`
+ MainRef,
+ /// A `PseudoRef` in another _linked_ worktree, never in the main one, like `worktrees/<id>/HEAD`.
+ LinkedPseudoRef {
+ /// The name of the worktree.
+ name: &'a BStr,
+ },
+ /// Any reference that is prefixed with `worktrees/<id>/refs/`.
+ LinkedRef {
+ /// The name of the worktree.
+ name: &'a BStr,
+ },
+ /// A ref that is private to each worktree (_linked_ or _main_), with `refs/bisect/` prefix
+ Bisect,
+ /// A ref that is private to each worktree (_linked_ or _main_), with `refs/rewritten/` prefix
+ Rewritten,
+ /// A ref that is private to each worktree (_linked_ or _main_), with `refs/worktree/` prefix
+ WorktreePrivate,
+ // REF_TYPE_NORMAL, /* normal/shared refs inside refs/ */
+}
+
+/// Denotes a ref target, equivalent to [`Kind`], but with mutable data.
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]
+#[cfg_attr(feature = "serde1", derive(serde::Serialize, serde::Deserialize))]
+pub enum Target {
+ /// A ref that points to an object id
+ Peeled(ObjectId),
+ /// A ref that points to another reference by its validated name, adding a level of indirection.
+ ///
+ /// Note that this is an extension of gitoxide which will be helpful in logging all reference changes.
+ Symbolic(FullName),
+}
+
+/// Denotes a ref target, equivalent to [`Kind`], but with immutable data.
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone, Copy)]
+pub enum TargetRef<'a> {
+ /// A ref that points to an object id
+ Peeled(&'a oid),
+ /// A ref that points to another reference by its validated name, adding a level of indirection.
+ Symbolic(&'a FullNameRef),
+}
diff --git a/vendor/gix-ref/src/log.rs b/vendor/gix-ref/src/log.rs
new file mode 100644
index 000000000..42ce97aa9
--- /dev/null
+++ b/vendor/gix-ref/src/log.rs
@@ -0,0 +1,16 @@
+use gix_hash::ObjectId;
+use gix_object::bstr::BString;
+
+/// A parsed ref log line that can be changed
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]
+#[cfg_attr(feature = "serde1", derive(serde::Serialize, serde::Deserialize))]
+pub struct Line {
+ /// The previous object id. Can be a null-sha to indicate this is a line for a new ref.
+ pub previous_oid: ObjectId,
+ /// The new object id. Can be a null-sha to indicate this ref is being deleted.
+ pub new_oid: ObjectId,
+ /// The signature of the currently configured committer.
+ pub signature: gix_actor::Signature,
+ /// The message providing details about the operation performed in this log line.
+ pub message: BString,
+}
diff --git a/vendor/gix-ref/src/name.rs b/vendor/gix-ref/src/name.rs
new file mode 100644
index 000000000..dbf96c1e5
--- /dev/null
+++ b/vendor/gix-ref/src/name.rs
@@ -0,0 +1,273 @@
+use std::{convert, convert::Infallible, ffi::OsStr, path::Path};
+
+use gix_object::bstr::{BStr, BString, ByteSlice, ByteVec};
+
+use crate::{Category, FullName, FullNameRef, PartialName, PartialNameRef};
+
+/// The error used in the [`PartialNameRef`][super::PartialNameRef]::try_from(…) implementations.
+pub type Error = gix_validate::reference::name::Error;
+
+impl<'a> Category<'a> {
+ /// Return the prefix that would contain all references of our kind, or an empty string if the reference would
+ /// be directly inside of the [`git_dir()`][crate::file::Store::git_dir()].
+ pub fn prefix(&self) -> &BStr {
+ match self {
+ Category::Tag => b"refs/tags/".as_bstr(),
+ Category::LocalBranch => b"refs/heads/".as_bstr(),
+ Category::RemoteBranch => b"refs/remotes/".as_bstr(),
+ Category::Note => b"refs/notes/".as_bstr(),
+ Category::MainPseudoRef => b"main-worktree/".as_bstr(),
+ Category::MainRef => b"main-worktree/refs/".as_bstr(),
+ Category::PseudoRef => b"".as_bstr(),
+ Category::LinkedPseudoRef { .. } => b"worktrees/".as_bstr(),
+ Category::LinkedRef { .. } => b"worktrees/".as_bstr(),
+ Category::Bisect => b"refs/bisect/".as_bstr(),
+ Category::Rewritten => b"refs/rewritten/".as_bstr(),
+ Category::WorktreePrivate => b"refs/worktree/".as_bstr(),
+ }
+ }
+
+ /// Returns true if the category is private to their worktrees, and never shared with other worktrees.
+ pub fn is_worktree_private(&self) -> bool {
+ matches!(
+ self,
+ Category::MainPseudoRef
+ | Category::PseudoRef
+ | Category::LinkedPseudoRef { .. }
+ | Category::WorktreePrivate
+ | Category::Rewritten
+ | Category::Bisect
+ )
+ }
+}
+
+impl FullNameRef {
+ pub(crate) fn new_unchecked(v: &BStr) -> &Self {
+ // SAFETY: FullNameRef is transparent and equivalent to a &BStr if provided as reference
+ #[allow(unsafe_code)]
+ unsafe {
+ std::mem::transmute(v)
+ }
+ }
+}
+
+impl PartialNameRef {
+ pub(crate) fn new_unchecked(v: &BStr) -> &Self {
+ // SAFETY: PartialNameRef is transparent and equivalent to a &BStr if provided as reference
+ #[allow(unsafe_code)]
+ unsafe {
+ std::mem::transmute(v)
+ }
+ }
+}
+
+impl PartialNameRef {
+ pub(crate) fn looks_like_full_name(&self) -> bool {
+ let name = self.0.as_bstr();
+ name.starts_with_str("refs/")
+ || name.starts_with(Category::MainPseudoRef.prefix())
+ || name.starts_with(Category::LinkedPseudoRef { name: "".into() }.prefix())
+ || is_pseudo_ref(name)
+ }
+ pub(crate) fn construct_full_name_ref<'buf>(
+ &self,
+ add_refs_prefix: bool,
+ inbetween: &str,
+ buf: &'buf mut BString,
+ ) -> &'buf FullNameRef {
+ buf.clear();
+ if add_refs_prefix && !self.looks_like_full_name() {
+ buf.push_str("refs/");
+ }
+ if !inbetween.is_empty() {
+ buf.push_str(inbetween);
+ buf.push_byte(b'/');
+ }
+ buf.extend_from_slice(&self.0);
+ FullNameRef::new_unchecked(buf.as_bstr())
+ }
+}
+
+impl PartialNameRef {
+ /// Convert this name into the relative path possibly identifying the reference location.
+ /// Note that it may be only a partial path though.
+ pub fn to_partial_path(&self) -> &Path {
+ gix_path::from_byte_slice(self.0.as_bstr())
+ }
+
+ /// Provide the name as binary string which is known to be a valid partial ref name.
+ pub fn as_bstr(&self) -> &BStr {
+ &self.0
+ }
+}
+
+impl PartialName {
+ /// Append the `component` to ourselves and validate the newly created partial path.
+ pub fn join(self, component: impl AsRef<[u8]>) -> Result<Self, Error> {
+ let mut b = self.0;
+ b.push_byte(b'/');
+ b.extend(component.as_ref());
+ gix_validate::reference::name_partial(b.as_ref())?;
+ Ok(PartialName(b))
+ }
+}
+
+impl<'a> convert::TryFrom<&'a BStr> for &'a FullNameRef {
+ type Error = Error;
+
+ fn try_from(v: &'a BStr) -> Result<Self, Self::Error> {
+ Ok(FullNameRef::new_unchecked(gix_validate::reference::name(v)?))
+ }
+}
+
+impl<'a> From<&'a FullNameRef> for &'a PartialNameRef {
+ fn from(v: &'a FullNameRef) -> Self {
+ PartialNameRef::new_unchecked(v.0.as_bstr())
+ }
+}
+
+impl<'a> convert::TryFrom<&'a OsStr> for &'a PartialNameRef {
+ type Error = Error;
+
+ fn try_from(v: &'a OsStr) -> Result<Self, Self::Error> {
+ let v = gix_path::os_str_into_bstr(v).map_err(|_| {
+ Error::Tag(gix_validate::tag::name::Error::InvalidByte {
+ byte: "<unknown encoding>".into(),
+ })
+ })?;
+ Ok(PartialNameRef::new_unchecked(gix_validate::reference::name_partial(
+ v.as_bstr(),
+ )?))
+ }
+}
+
+mod impls {
+ use std::borrow::Borrow;
+
+ use crate::{bstr::ByteSlice, PartialName, PartialNameRef};
+
+ impl Borrow<PartialNameRef> for PartialName {
+ #[inline]
+ fn borrow(&self) -> &PartialNameRef {
+ PartialNameRef::new_unchecked(self.0.as_bstr())
+ }
+ }
+
+ impl AsRef<PartialNameRef> for PartialName {
+ fn as_ref(&self) -> &PartialNameRef {
+ self.borrow()
+ }
+ }
+
+ impl ToOwned for PartialNameRef {
+ type Owned = PartialName;
+
+ fn to_owned(&self) -> Self::Owned {
+ PartialName(self.0.to_owned())
+ }
+ }
+}
+
+impl<'a> convert::TryFrom<&'a BString> for &'a PartialNameRef {
+ type Error = Error;
+
+ fn try_from(v: &'a BString) -> Result<Self, Self::Error> {
+ Ok(PartialNameRef::new_unchecked(gix_validate::reference::name_partial(
+ v.as_ref(),
+ )?))
+ }
+}
+
+impl<'a> convert::TryFrom<&'a BStr> for &'a PartialNameRef {
+ type Error = Error;
+
+ fn try_from(v: &'a BStr) -> Result<Self, Self::Error> {
+ Ok(PartialNameRef::new_unchecked(gix_validate::reference::name_partial(v)?))
+ }
+}
+
+impl<'a> convert::TryFrom<&'a PartialName> for &'a PartialNameRef {
+ type Error = Error;
+
+ fn try_from(v: &'a PartialName) -> Result<Self, Self::Error> {
+ Ok(PartialNameRef::new_unchecked(v.0.as_bstr()))
+ }
+}
+
+impl<'a> convert::TryFrom<&'a str> for &'a FullNameRef {
+ type Error = Error;
+
+ fn try_from(v: &'a str) -> Result<Self, Self::Error> {
+ let v = v.as_bytes().as_bstr();
+ Ok(FullNameRef::new_unchecked(gix_validate::reference::name(v)?))
+ }
+}
+
+impl<'a> convert::TryFrom<&'a str> for &'a PartialNameRef {
+ type Error = Error;
+
+ fn try_from(v: &'a str) -> Result<Self, Self::Error> {
+ let v = v.as_bytes().as_bstr();
+ Ok(PartialNameRef::new_unchecked(gix_validate::reference::name_partial(v)?))
+ }
+}
+
+impl<'a> convert::TryFrom<&'a str> for PartialName {
+ type Error = Error;
+
+ fn try_from(v: &'a str) -> Result<Self, Self::Error> {
+ let v = v.as_bytes().as_bstr();
+ Ok(PartialName(gix_validate::reference::name_partial(v)?.to_owned()))
+ }
+}
+
+impl<'a> convert::TryFrom<&'a FullName> for &'a PartialNameRef {
+ type Error = Infallible;
+
+ fn try_from(v: &'a FullName) -> Result<Self, Self::Error> {
+ Ok(v.as_ref().as_partial_name())
+ }
+}
+
+impl<'a> convert::TryFrom<&'a String> for &'a FullNameRef {
+ type Error = Error;
+
+ fn try_from(v: &'a String) -> Result<Self, Self::Error> {
+ let v = v.as_bytes().as_bstr();
+ Ok(FullNameRef::new_unchecked(gix_validate::reference::name(v)?))
+ }
+}
+
+impl<'a> convert::TryFrom<&'a String> for &'a PartialNameRef {
+ type Error = Error;
+
+ fn try_from(v: &'a String) -> Result<Self, Self::Error> {
+ let v = v.as_bytes().as_bstr();
+ Ok(PartialNameRef::new_unchecked(gix_validate::reference::name_partial(v)?))
+ }
+}
+
+impl convert::TryFrom<String> for PartialName {
+ type Error = Error;
+
+ fn try_from(v: String) -> Result<Self, Self::Error> {
+ gix_validate::reference::name_partial(v.as_bytes().as_bstr())?;
+ Ok(PartialName(v.into()))
+ }
+}
+
+impl convert::TryFrom<BString> for PartialName {
+ type Error = Error;
+
+ fn try_from(v: BString) -> Result<Self, Self::Error> {
+ gix_validate::reference::name_partial(v.as_ref())?;
+ Ok(PartialName(v))
+ }
+}
+
+/// Note that this method is disagreeing with gix_validate as it allows dashes '-' for some reason.
+/// Since partial names cannot be created with dashes inside we adjusted this as it's probably unintended or git creates pseudo-refs
+/// which wouldn't pass its safety checks.
+pub(crate) fn is_pseudo_ref<'a>(name: impl Into<&'a BStr>) -> bool {
+ name.into().bytes().all(|b| b.is_ascii_uppercase() || b == b'_')
+}
diff --git a/vendor/gix-ref/src/namespace.rs b/vendor/gix-ref/src/namespace.rs
new file mode 100644
index 000000000..2723052ec
--- /dev/null
+++ b/vendor/gix-ref/src/namespace.rs
@@ -0,0 +1,52 @@
+use std::{
+ convert::TryInto,
+ path::{Path, PathBuf},
+};
+
+use gix_object::bstr::{BStr, BString, ByteSlice, ByteVec};
+
+use crate::{FullName, FullNameRef, Namespace, PartialNameRef};
+
+impl Namespace {
+ /// Dissolve ourselves into the interior representation
+ pub fn into_bstring(self) -> BString {
+ self.0
+ }
+ /// Return ourselves as
+ pub fn as_bstr(&self) -> &BStr {
+ self.0.as_ref()
+ }
+ /// Return ourselves as a path for use within the filesystem.
+ pub fn to_path(&self) -> &Path {
+ gix_path::from_byte_slice(&self.0)
+ }
+ /// Append the given `prefix` to this namespace so it becomes usable for prefixed iteration.
+ pub fn into_namespaced_prefix(mut self, prefix: impl AsRef<Path>) -> PathBuf {
+ let path = prefix.as_ref();
+ let prefix = gix_path::into_bstr(path);
+ self.0.push_str(prefix.as_ref());
+ gix_path::to_native_path_on_windows(self.0).into_owned()
+ }
+ pub(crate) fn into_namespaced_name(mut self, name: &FullNameRef) -> FullName {
+ self.0.push_str(name.as_bstr());
+ FullName(self.0)
+ }
+}
+
+/// Given a `namespace` 'foo we output 'refs/namespaces/foo', and given 'foo/bar' we output 'refs/namespaces/foo/refs/namespaces/bar'.
+///
+/// For more information, consult the [git namespace documentation](https://git-scm.com/docs/gitnamespaces).
+pub fn expand<'a, Name, E>(namespace: Name) -> Result<Namespace, gix_validate::refname::Error>
+where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ gix_validate::refname::Error: From<E>,
+{
+ let namespace = &namespace.try_into()?.0;
+ let mut out = BString::default();
+ for component in namespace.split_str(b"/") {
+ out.push_str("refs/namespaces/");
+ out.push_str(component);
+ out.push_str(b"/");
+ }
+ Ok(Namespace(out))
+}
diff --git a/vendor/gix-ref/src/parse.rs b/vendor/gix-ref/src/parse.rs
new file mode 100644
index 000000000..9656c8197
--- /dev/null
+++ b/vendor/gix-ref/src/parse.rs
@@ -0,0 +1,27 @@
+use gix_object::bstr::{BStr, ByteSlice};
+use nom::{
+ branch::alt,
+ bytes::complete::{tag, take_while_m_n},
+ error::ParseError,
+ IResult,
+};
+
+fn is_hex_digit_lc(b: u8) -> bool {
+ matches!(b, b'0'..=b'9' | b'a'..=b'f')
+}
+
+/// Copy from https://github.com/Byron/gitoxide/blob/f270850ff92eab15258023b8e59346ec200303bd/gix-object/src/immutable/parse.rs#L64
+pub fn hex_hash<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a BStr, E> {
+ // NOTE: It's important to be able to read all hashes, do not parameterize it. Hashes can be rejected at a later stage
+ // if needed.
+ take_while_m_n(
+ gix_hash::Kind::shortest().len_in_hex(),
+ gix_hash::Kind::longest().len_in_hex(),
+ is_hex_digit_lc,
+ )(i)
+ .map(|(i, hex)| (i, hex.as_bstr()))
+}
+
+pub fn newline<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> {
+ alt((tag(b"\r\n"), tag(b"\n")))(i)
+}
diff --git a/vendor/gix-ref/src/peel.rs b/vendor/gix-ref/src/peel.rs
new file mode 100644
index 000000000..c8efb5f03
--- /dev/null
+++ b/vendor/gix-ref/src/peel.rs
@@ -0,0 +1,32 @@
+/// A function for use in [`crate::file::ReferenceExt::peel_to_id_in_place()`] to indicate no peeling should happen.
+pub fn none(
+ _id: gix_hash::ObjectId,
+ #[allow(clippy::ptr_arg)] _buf: &mut Vec<u8>,
+) -> Result<Option<(gix_object::Kind, &[u8])>, std::convert::Infallible> {
+ Ok(Some((gix_object::Kind::Commit, &[])))
+}
+
+///
+pub mod to_id {
+ use std::path::PathBuf;
+
+ use gix_object::bstr::BString;
+
+ use crate::file;
+
+ /// The error returned by [`crate::file::ReferenceExt::peel_to_id_in_place()`].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not follow a single level of a symbolic reference")]
+ Follow(#[from] file::find::existing::Error),
+ #[error("Aborting due to reference cycle with first seen path being {start_absolute:?}")]
+ Cycle { start_absolute: PathBuf },
+ #[error("Refusing to follow more than {max_depth} levels of indirection")]
+ DepthLimitExceeded { max_depth: usize },
+ #[error("An error occurred when trying to resolve an object a reference points to")]
+ Find(#[from] Box<dyn std::error::Error + Send + Sync + 'static>),
+ #[error("Object {oid} as referred to by {name:?} could not be found")]
+ NotFound { oid: gix_hash::ObjectId, name: BString },
+ }
+}
diff --git a/vendor/gix-ref/src/raw.rs b/vendor/gix-ref/src/raw.rs
new file mode 100644
index 000000000..fd1f9db34
--- /dev/null
+++ b/vendor/gix-ref/src/raw.rs
@@ -0,0 +1,103 @@
+use gix_hash::ObjectId;
+
+use crate::{FullName, Target};
+
+/// A fully owned backend agnostic reference
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]
+#[cfg_attr(feature = "serde1", derive(serde::Serialize, serde::Deserialize))]
+pub struct Reference {
+ /// The path to uniquely identify this ref within its store.
+ pub name: FullName,
+ /// The target of the reference, either a symbolic reference by full name or a possibly intermediate object by its id.
+ pub target: Target,
+ /// The fully peeled object to which this reference ultimately points to. Only guaranteed to be set after `peel_to_id_in_place()` was called.
+ pub peeled: Option<ObjectId>,
+}
+
+mod convert {
+ use gix_hash::ObjectId;
+
+ use crate::{
+ raw::Reference,
+ store_impl::{file::loose, packed},
+ Target,
+ };
+
+ impl From<Reference> for loose::Reference {
+ fn from(value: Reference) -> Self {
+ loose::Reference {
+ name: value.name,
+ target: value.target,
+ }
+ }
+ }
+
+ impl From<loose::Reference> for Reference {
+ fn from(value: loose::Reference) -> Self {
+ Reference {
+ name: value.name,
+ target: value.target,
+ peeled: None,
+ }
+ }
+ }
+
+ impl<'p> From<packed::Reference<'p>> for Reference {
+ fn from(value: packed::Reference<'p>) -> Self {
+ Reference {
+ name: value.name.into(),
+ target: Target::Peeled(value.target()),
+ peeled: value
+ .object
+ .map(|hex| ObjectId::from_hex(hex).expect("parser validation")),
+ }
+ }
+ }
+}
+
+mod access {
+ use gix_object::bstr::ByteSlice;
+
+ use crate::{raw::Reference, FullNameRef, Namespace, Target};
+
+ impl Reference {
+ /// Returns the kind of reference based on its target
+ pub fn kind(&self) -> crate::Kind {
+ self.target.kind()
+ }
+
+ /// Return the full validated name of the reference, with the given namespace stripped if possible.
+ ///
+ /// If the reference name wasn't prefixed with `namespace`, `None` is returned instead.
+ pub fn name_without_namespace(&self, namespace: &Namespace) -> Option<&FullNameRef> {
+ self.name
+ .0
+ .as_bstr()
+ .strip_prefix(namespace.0.as_bytes())
+ .map(|stripped| FullNameRef::new_unchecked(stripped.as_bstr()))
+ }
+
+ /// Strip the given namespace from our name as well as the name, but not the reference we point to.
+ pub fn strip_namespace(&mut self, namespace: &Namespace) -> &mut Self {
+ self.name.strip_namespace(namespace);
+ if let Target::Symbolic(name) = &mut self.target {
+ name.strip_namespace(namespace);
+ }
+ self
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn size_of_reference() {
+ assert_eq!(
+ std::mem::size_of::<Reference>(),
+ 80,
+ "let's not let it change size undetected"
+ );
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/find.rs b/vendor/gix-ref/src/store/file/find.rs
new file mode 100644
index 000000000..0c6d04b6c
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/find.rs
@@ -0,0 +1,353 @@
+use std::{
+ borrow::Cow,
+ convert::TryInto,
+ io::{self, Read},
+ path::{Path, PathBuf},
+};
+
+pub use error::Error;
+
+use crate::{
+ file,
+ store_impl::{file::loose, packed},
+ BStr, BString, FullNameRef, PartialNameRef, Reference,
+};
+
+enum Transform {
+ EnforceRefsPrefix,
+ None,
+}
+
+impl file::Store {
+ /// Find a single reference by the given `path` which is required to be a valid reference name.
+ ///
+ /// Returns `Ok(None)` if no such ref exists.
+ ///
+ /// ### Note
+ ///
+ /// * The lookup algorithm follows the one in [the git documentation][git-lookup-docs].
+ /// * The packed buffer is checked for modifications each time the method is called. See [`file::Store::try_find_packed()`]
+ /// for a version with more control.
+ ///
+ /// [git-lookup-docs]: https://github.com/git/git/blob/5d5b1473453400224ebb126bf3947e0a3276bdf5/Documentation/revisions.txt#L34-L46
+ pub fn try_find<'a, Name, E>(&self, partial: Name) -> Result<Option<Reference>, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ Error: From<E>,
+ {
+ let packed = self.assure_packed_refs_uptodate()?;
+ self.find_one_with_verified_input(partial.try_into()?, packed.as_ref().map(|b| &***b))
+ }
+
+ /// Similar to [`file::Store::find()`] but a non-existing ref is treated as error.
+ ///
+ /// Find only loose references, that is references that aren't in the packed-refs buffer.
+ /// All symbolic references are loose references.
+ /// `HEAD` is always a loose reference.
+ pub fn try_find_loose<'a, Name, E>(&self, partial: Name) -> Result<Option<loose::Reference>, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ Error: From<E>,
+ {
+ self.find_one_with_verified_input(partial.try_into()?, None)
+ .map(|r| r.map(|r| r.try_into().expect("only loose refs are found without pack")))
+ }
+
+ /// Similar to [`file::Store::find()`], but allows to pass a snapshotted packed buffer instead.
+ pub fn try_find_packed<'a, Name, E>(
+ &self,
+ partial: Name,
+ packed: Option<&packed::Buffer>,
+ ) -> Result<Option<Reference>, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ Error: From<E>,
+ {
+ self.find_one_with_verified_input(partial.try_into()?, packed)
+ }
+
+ pub(crate) fn find_one_with_verified_input(
+ &self,
+ partial_name: &PartialNameRef,
+ packed: Option<&packed::Buffer>,
+ ) -> Result<Option<Reference>, Error> {
+ let mut buf = BString::default();
+ if partial_name.looks_like_full_name() {
+ if let Some(r) = self.find_inner("", partial_name, None, Transform::None, &mut buf)? {
+ return Ok(Some(r));
+ }
+ }
+
+ for inbetween in &["", "tags", "heads", "remotes"] {
+ match self.find_inner(inbetween, partial_name, packed, Transform::EnforceRefsPrefix, &mut buf) {
+ Ok(Some(r)) => return Ok(Some(r)),
+ Ok(None) => {
+ continue;
+ }
+ Err(err) => return Err(err),
+ }
+ }
+ self.find_inner(
+ "remotes",
+ partial_name
+ .to_owned()
+ .join("HEAD")
+ .expect("HEAD is valid name")
+ .as_ref(),
+ None,
+ Transform::EnforceRefsPrefix,
+ &mut buf,
+ )
+ }
+
+ fn find_inner(
+ &self,
+ inbetween: &str,
+ partial_name: &PartialNameRef,
+ packed: Option<&packed::Buffer>,
+ transform: Transform,
+ path_buf: &mut BString,
+ ) -> Result<Option<Reference>, Error> {
+ let add_refs_prefix = matches!(transform, Transform::EnforceRefsPrefix);
+ let full_name = partial_name.construct_full_name_ref(add_refs_prefix, inbetween, path_buf);
+ let content_buf = self.ref_contents(full_name).map_err(|err| Error::ReadFileContents {
+ source: err,
+ path: self.reference_path(full_name),
+ })?;
+
+ match content_buf {
+ None => {
+ if let Some(packed) = packed {
+ if let Some(full_name) = packed::find::transform_full_name_for_lookup(full_name) {
+ let full_name_backing;
+ let full_name = match &self.namespace {
+ Some(namespace) => {
+ full_name_backing = namespace.to_owned().into_namespaced_name(full_name);
+ full_name_backing.as_ref()
+ }
+ None => full_name,
+ };
+ if let Some(packed_ref) = packed.try_find_full_name(full_name)? {
+ let mut res: Reference = packed_ref.into();
+ if let Some(namespace) = &self.namespace {
+ res.strip_namespace(namespace);
+ }
+ return Ok(Some(res));
+ };
+ }
+ }
+ Ok(None)
+ }
+ Some(content) => Ok(Some(
+ loose::Reference::try_from_path(full_name.to_owned(), &content)
+ .map(Into::into)
+ .map(|mut r: Reference| {
+ if let Some(namespace) = &self.namespace {
+ r.strip_namespace(namespace);
+ }
+ r
+ })
+ .map_err(|err| Error::ReferenceCreation {
+ source: err,
+ relative_path: full_name.to_path().to_owned(),
+ })?,
+ )),
+ }
+ }
+}
+
+impl file::Store {
+ pub(crate) fn to_base_dir_and_relative_name<'a>(
+ &self,
+ name: &'a FullNameRef,
+ is_reflog: bool,
+ ) -> (Cow<'_, Path>, &'a FullNameRef) {
+ let commondir = self.common_dir_resolved();
+ let linked_git_dir =
+ |worktree_name: &BStr| commondir.join("worktrees").join(gix_path::from_bstr(worktree_name));
+ name.category_and_short_name()
+ .and_then(|(c, sn)| {
+ use crate::Category::*;
+ let sn = FullNameRef::new_unchecked(sn);
+ Some(match c {
+ LinkedPseudoRef { name: worktree_name } => is_reflog
+ .then(|| (linked_git_dir(worktree_name).into(), sn))
+ .unwrap_or((commondir.into(), name)),
+ Tag | LocalBranch | RemoteBranch | Note => (commondir.into(), name),
+ MainRef | MainPseudoRef => (commondir.into(), sn),
+ LinkedRef { name: worktree_name } => sn
+ .category()
+ .map_or(false, |cat| cat.is_worktree_private())
+ .then(|| {
+ if is_reflog {
+ (linked_git_dir(worktree_name).into(), sn)
+ } else {
+ (commondir.into(), name)
+ }
+ })
+ .unwrap_or((commondir.into(), sn)),
+ PseudoRef | Bisect | Rewritten | WorktreePrivate => return None,
+ })
+ })
+ .unwrap_or((self.git_dir.as_path().into(), name))
+ }
+
+ /// Implements the logic required to transform a fully qualified refname into a filesystem path
+ pub(crate) fn reference_path_with_base<'b>(&self, name: &'b FullNameRef) -> (Cow<'_, Path>, Cow<'b, Path>) {
+ let (base, name) = self.to_base_dir_and_relative_name(name, false);
+ (
+ base,
+ match &self.namespace {
+ None => gix_path::to_native_path_on_windows(name.as_bstr()),
+ Some(namespace) => {
+ gix_path::to_native_path_on_windows(namespace.to_owned().into_namespaced_name(name).into_inner())
+ }
+ },
+ )
+ }
+
+ /// Implements the logic required to transform a fully qualified refname into a filesystem path
+ pub(crate) fn reference_path(&self, name: &FullNameRef) -> PathBuf {
+ let (base, relative_path) = self.reference_path_with_base(name);
+ base.join(relative_path)
+ }
+
+ /// Read the file contents with a verified full reference path and return it in the given vector if possible.
+ pub(crate) fn ref_contents(&self, name: &FullNameRef) -> io::Result<Option<Vec<u8>>> {
+ let ref_path = self.reference_path(name);
+
+ match std::fs::File::open(&ref_path) {
+ Ok(mut file) => {
+ let mut buf = Vec::with_capacity(128);
+ if let Err(err) = file.read_to_end(&mut buf) {
+ return if ref_path.is_dir() { Ok(None) } else { Err(err) };
+ }
+ Ok(buf.into())
+ }
+ Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(None),
+ #[cfg(windows)]
+ Err(err) if err.kind() == std::io::ErrorKind::PermissionDenied => Ok(None),
+ Err(err) => Err(err),
+ }
+ }
+}
+
+///
+pub mod existing {
+ use std::convert::TryInto;
+
+ pub use error::Error;
+
+ use crate::{
+ file::{self},
+ store_impl::{
+ file::{find, loose},
+ packed,
+ },
+ PartialNameRef, Reference,
+ };
+
+ impl file::Store {
+ /// Similar to [`file::Store::try_find()`] but a non-existing ref is treated as error.
+ pub fn find<'a, Name, E>(&self, partial: Name) -> Result<Reference, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ let packed = self.assure_packed_refs_uptodate().map_err(find::Error::PackedOpen)?;
+ self.find_existing_inner(partial, packed.as_ref().map(|b| &***b))
+ }
+
+ /// Similar to [`file::Store::find()`], but supports a stable packed buffer.
+ pub fn find_packed<'a, Name, E>(
+ &self,
+ partial: Name,
+ packed: Option<&packed::Buffer>,
+ ) -> Result<Reference, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ self.find_existing_inner(partial, packed)
+ }
+
+ /// Similar to [`file::Store::find()`] won't handle packed-refs.
+ pub fn find_loose<'a, Name, E>(&self, partial: Name) -> Result<loose::Reference, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ self.find_existing_inner(partial, None)
+ .map(|r| r.try_into().expect("always loose without packed"))
+ }
+
+ /// Similar to [`file::Store::find()`] but a non-existing ref is treated as error.
+ pub(crate) fn find_existing_inner<'a, Name, E>(
+ &self,
+ partial: Name,
+ packed: Option<&packed::Buffer>,
+ ) -> Result<Reference, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ let path = partial
+ .try_into()
+ .map_err(|err| Error::Find(find::Error::RefnameValidation(err.into())))?;
+ match self.find_one_with_verified_input(path, packed) {
+ Ok(Some(r)) => Ok(r),
+ Ok(None) => Err(Error::NotFound {
+ name: path.to_partial_path().to_owned(),
+ }),
+ Err(err) => Err(err.into()),
+ }
+ }
+ }
+
+ mod error {
+ use std::path::PathBuf;
+
+ use crate::store_impl::file::find;
+
+ /// The error returned by [file::Store::find_existing()][crate::file::Store::find()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("An error occurred while trying to find a reference")]
+ Find(#[from] find::Error),
+ #[error("The ref partially named {name:?} could not be found")]
+ NotFound { name: PathBuf },
+ }
+ }
+}
+
+mod error {
+ use std::{convert::Infallible, io, path::PathBuf};
+
+ use crate::{file, store_impl::packed};
+
+ /// The error returned by [file::Store::find()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The ref name or path is not a valid ref name")]
+ RefnameValidation(#[from] crate::name::Error),
+ #[error("The ref file {path:?} could not be read in full")]
+ ReadFileContents { source: io::Error, path: PathBuf },
+ #[error("The reference at \"{relative_path}\" could not be instantiated")]
+ ReferenceCreation {
+ source: file::loose::reference::decode::Error,
+ relative_path: PathBuf,
+ },
+ #[error("A packed ref lookup failed")]
+ PackedRef(#[from] packed::find::Error),
+ #[error("Could not open the packed refs buffer when trying to find references.")]
+ PackedOpen(#[from] packed::buffer::open::Error),
+ }
+
+ impl From<Infallible> for Error {
+ fn from(_: Infallible) -> Self {
+ unreachable!("this impl is needed to allow passing a known valid partial path as parameter")
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/log/iter.rs b/vendor/gix-ref/src/store/file/log/iter.rs
new file mode 100644
index 000000000..d62df6800
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/log/iter.rs
@@ -0,0 +1,245 @@
+use gix_object::bstr::ByteSlice;
+
+use crate::{
+ file,
+ file::loose::reference::logiter::must_be_io_err,
+ store_impl::file::{log, log::iter::decode::LineNumber},
+ FullNameRef,
+};
+
+///
+pub mod decode {
+ use crate::store_impl::file::log;
+
+ /// The error returned by items in the [forward][super::forward()] and [reverse][super::reverse()] iterators
+ #[derive(Debug)]
+ pub struct Error {
+ inner: log::line::decode::Error,
+ line: LineNumber,
+ }
+
+ impl std::fmt::Display for Error {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "In line {}: {}", self.line, self.inner)
+ }
+ }
+
+ impl std::error::Error for Error {}
+
+ impl Error {
+ pub(crate) fn new(err: log::line::decode::Error, line: LineNumber) -> Self {
+ Error { line, inner: err }
+ }
+ }
+
+ #[derive(Debug)]
+ pub(crate) enum LineNumber {
+ FromStart(usize),
+ FromEnd(usize),
+ }
+
+ impl std::fmt::Display for LineNumber {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let (line, suffix) = match self {
+ LineNumber::FromStart(line) => (line, ""),
+ LineNumber::FromEnd(line) => (line, " from the end"),
+ };
+ write!(f, "{}{}", line + 1, suffix)
+ }
+ }
+}
+
+/// Returns a forward iterator over the given `lines`, starting from the first line in the file and ending at the last.
+///
+/// Note that `lines` are an entire reflog file.
+///
+/// This iterator is useful when the ref log file is going to be rewritten which forces processing of the entire file.
+/// It will continue parsing even if individual log entries failed to parse, leaving it to the driver to decide whether to
+/// abort or continue.
+pub fn forward(lines: &[u8]) -> Forward<'_> {
+ Forward {
+ inner: lines.as_bstr().lines().enumerate(),
+ }
+}
+
+/// An iterator yielding parsed lines in a file from start to end, oldest to newest.
+pub struct Forward<'a> {
+ inner: std::iter::Enumerate<gix_object::bstr::Lines<'a>>,
+}
+
+impl<'a> Iterator for Forward<'a> {
+ type Item = Result<log::LineRef<'a>, decode::Error>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next().map(|(ln, line)| {
+ log::LineRef::from_bytes(line).map_err(|err| decode::Error::new(err, decode::LineNumber::FromStart(ln)))
+ })
+ }
+}
+
+/// A platform to store a buffer to hold ref log lines for iteration.
+#[must_use = "Iterators should be obtained from this platform"]
+pub struct Platform<'a, 's> {
+ /// The store containing the reflogs
+ pub store: &'s file::Store,
+ /// The full name of the reference whose reflog to retrieve.
+ pub name: &'a FullNameRef,
+ /// A reusable buffer for storing log lines read from disk.
+ pub buf: Vec<u8>,
+}
+
+impl<'a, 's> Platform<'a, 's> {
+ /// Return a forward iterator over all log-lines, most recent to oldest.
+ pub fn rev(&mut self) -> std::io::Result<Option<log::iter::Reverse<'_, std::fs::File>>> {
+ self.buf.clear();
+ self.buf.resize(512, 0);
+ self.store
+ .reflog_iter_rev(self.name, &mut self.buf)
+ .map_err(must_be_io_err)
+ }
+
+ /// Return a forward iterator over all log-lines, oldest to most recent.
+ pub fn all(&mut self) -> std::io::Result<Option<log::iter::Forward<'_>>> {
+ self.buf.clear();
+ self.store.reflog_iter(self.name, &mut self.buf).map_err(must_be_io_err)
+ }
+}
+
+/// An iterator yielding parsed lines in a file in reverse, most recent to oldest.
+pub struct Reverse<'a, F> {
+ buf: &'a mut [u8],
+ count: usize,
+ read_and_pos: Option<(F, u64)>,
+ last_nl_pos: Option<usize>,
+}
+
+/// An iterator over entries of the `log` file in reverse, using `buf` as sliding window.
+///
+/// Note that `buf` must be big enough to capture typical line length or else partial lines will be parsed and probably fail
+/// in the process.
+///
+/// This iterator is very expensive in terms of I/O operations and shouldn't be used to read more than the last few entries of the log.
+/// Use a forward iterator instead for these cases.
+///
+/// It will continue parsing even if individual log entries failed to parse, leaving it to the driver to decide whether to
+/// abort or continue.
+pub fn reverse<F>(mut log: F, buf: &mut [u8]) -> std::io::Result<Reverse<'_, F>>
+where
+ F: std::io::Read + std::io::Seek,
+{
+ let pos = log.seek(std::io::SeekFrom::End(0))?;
+ if buf.is_empty() {
+ return Err(std::io::Error::new(
+ std::io::ErrorKind::Other,
+ "Zero sized buffers are not allowed, use 256 bytes or more for typical logs",
+ ));
+ }
+ Ok(Reverse {
+ buf,
+ count: 0,
+ read_and_pos: Some((log, pos)),
+ last_nl_pos: None,
+ })
+}
+
+///
+pub mod reverse {
+
+ use super::decode;
+
+ /// The error returned by the [`Reverse`][super::Reverse] iterator
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The buffer could not be filled to make more lines available")]
+ Io(#[from] std::io::Error),
+ #[error("Could not decode log line")]
+ Decode(#[from] decode::Error),
+ }
+}
+
+impl<'a, F> Iterator for Reverse<'a, F>
+where
+ F: std::io::Read + std::io::Seek,
+{
+ type Item = Result<crate::log::Line, reverse::Error>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match (self.last_nl_pos.take(), self.read_and_pos.take()) {
+ // Initial state - load first data block
+ (None, Some((mut read, pos))) => {
+ let npos = pos.saturating_sub(self.buf.len() as u64);
+ if let Err(err) = read.seek(std::io::SeekFrom::Start(npos)) {
+ return Some(Err(err.into()));
+ }
+
+ let n = (pos - npos) as usize;
+ if n == 0 {
+ return None;
+ }
+ let buf = &mut self.buf[..n];
+ if let Err(err) = read.read_exact(buf) {
+ return Some(Err(err.into()));
+ };
+
+ let last_byte = *buf.last().expect("we have read non-zero bytes before");
+ self.last_nl_pos = Some(if last_byte != b'\n' { buf.len() } else { buf.len() - 1 });
+ self.read_and_pos = Some((read, npos));
+ self.next()
+ }
+ // Has data block and can extract lines from it, load new blocks as needed
+ (Some(end), Some(read_and_pos)) => match self.buf[..end].rfind_byte(b'\n') {
+ Some(start) => {
+ self.read_and_pos = Some(read_and_pos);
+ self.last_nl_pos = Some(start);
+ let buf = &self.buf[start + 1..end];
+ let res = Some(
+ log::LineRef::from_bytes(buf)
+ .map_err(|err| {
+ reverse::Error::Decode(decode::Error::new(err, LineNumber::FromEnd(self.count)))
+ })
+ .map(Into::into),
+ );
+ self.count += 1;
+ res
+ }
+ None => {
+ let (mut read, last_read_pos) = read_and_pos;
+ if last_read_pos == 0 {
+ let buf = &self.buf[..end];
+ Some(
+ log::LineRef::from_bytes(buf)
+ .map_err(|err| {
+ reverse::Error::Decode(decode::Error::new(err, LineNumber::FromEnd(self.count)))
+ })
+ .map(Into::into),
+ )
+ } else {
+ let npos = last_read_pos.saturating_sub((self.buf.len() - end) as u64);
+ if npos == last_read_pos {
+ return Some(Err(std::io::Error::new(
+ std::io::ErrorKind::Other,
+ "buffer too small for line size",
+ )
+ .into()));
+ }
+ let n = (last_read_pos - npos) as usize;
+ self.buf.copy_within(0..end, n);
+ if let Err(err) = read.seek(std::io::SeekFrom::Start(npos)) {
+ return Some(Err(err.into()));
+ }
+ if let Err(err) = read.read_exact(&mut self.buf[..n]) {
+ return Some(Err(err.into()));
+ }
+ self.read_and_pos = Some((read, npos));
+ self.last_nl_pos = Some(n + end);
+ self.next()
+ }
+ }
+ },
+ // depleted
+ (None, None) => None,
+ (Some(_), None) => unreachable!("BUG: Invalid state: we never discard only our file, always both."),
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/log/line.rs b/vendor/gix-ref/src/store/file/log/line.rs
new file mode 100644
index 000000000..1ac45c75c
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/log/line.rs
@@ -0,0 +1,285 @@
+use gix_hash::ObjectId;
+
+use crate::{log::Line, store_impl::file::log::LineRef};
+
+impl<'a> LineRef<'a> {
+ /// Convert this instance into its mutable counterpart
+ pub fn to_owned(&self) -> Line {
+ self.clone().into()
+ }
+}
+
+mod write {
+ use std::io;
+
+ use gix_object::bstr::{BStr, ByteSlice};
+
+ use crate::log::Line;
+
+ /// The Error produced by [`Line::write_to()`] (but wrapped in an io error).
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ enum Error {
+ #[error("Messages must not contain newlines\\n")]
+ IllegalCharacter,
+ }
+
+ impl From<Error> for io::Error {
+ fn from(err: Error) -> Self {
+ io::Error::new(io::ErrorKind::Other, err)
+ }
+ }
+
+ /// Output
+ impl Line {
+ /// Serialize this instance to `out` in the git serialization format for ref log lines.
+ pub fn write_to(&self, mut out: impl io::Write) -> io::Result<()> {
+ write!(out, "{} {} ", self.previous_oid, self.new_oid)?;
+ self.signature.write_to(&mut out)?;
+ writeln!(out, "\t{}", check_newlines(self.message.as_ref())?)
+ }
+ }
+
+ fn check_newlines(input: &BStr) -> Result<&BStr, Error> {
+ if input.find_byte(b'\n').is_some() {
+ return Err(Error::IllegalCharacter);
+ }
+ Ok(input)
+ }
+}
+
+impl<'a> LineRef<'a> {
+ /// The previous object id of the ref. It will be a null hash if there was no previous id as
+ /// this ref is being created.
+ pub fn previous_oid(&self) -> ObjectId {
+ ObjectId::from_hex(self.previous_oid).expect("parse validation")
+ }
+ /// The new object id of the ref, or a null hash if it is removed.
+ pub fn new_oid(&self) -> ObjectId {
+ ObjectId::from_hex(self.new_oid).expect("parse validation")
+ }
+}
+
+impl<'a> From<LineRef<'a>> for Line {
+ fn from(v: LineRef<'a>) -> Self {
+ Line {
+ previous_oid: v.previous_oid(),
+ new_oid: v.new_oid(),
+ signature: v.signature.into(),
+ message: v.message.into(),
+ }
+ }
+}
+
+///
+pub mod decode {
+ use gix_object::bstr::{BStr, ByteSlice};
+ use nom::{
+ bytes::complete::{tag, take_while},
+ combinator::opt,
+ error::{context, ContextError, ParseError},
+ sequence::{terminated, tuple},
+ IResult,
+ };
+
+ use crate::{file::log::LineRef, parse::hex_hash};
+
+ ///
+ mod error {
+ use gix_object::bstr::{BString, ByteSlice};
+
+ /// The error returned by [from_bytes(…)][super::Line::from_bytes()]
+ #[derive(Debug)]
+ pub struct Error {
+ pub input: BString,
+ }
+
+ impl std::fmt::Display for Error {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(
+ f,
+ "{:?} did not match '<old-hexsha> <new-hexsha> <name> <<email>> <timestamp> <tz>\\t<message>'",
+ self.input
+ )
+ }
+ }
+
+ impl std::error::Error for Error {}
+
+ impl Error {
+ pub(crate) fn new(input: &[u8]) -> Self {
+ Error {
+ input: input.as_bstr().to_owned(),
+ }
+ }
+ }
+ }
+ pub use error::Error;
+
+ impl<'a> LineRef<'a> {
+ /// Decode a line from the given bytes which are expected to start at a hex sha.
+ pub fn from_bytes(input: &'a [u8]) -> Result<LineRef<'a>, Error> {
+ one::<()>(input).map(|(_, l)| l).map_err(|_| Error::new(input))
+ }
+ }
+
+ fn message<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a BStr, E> {
+ if i.is_empty() {
+ Ok((&[], i.as_bstr()))
+ } else {
+ terminated(take_while(|c| c != b'\n'), opt(tag(b"\n")))(i).map(|(i, o)| (i, o.as_bstr()))
+ }
+ }
+
+ fn one<'a, E: ParseError<&'a [u8]> + ContextError<&'a [u8]>>(bytes: &'a [u8]) -> IResult<&[u8], LineRef<'a>, E> {
+ let (i, (old, new, signature, message_sep, message)) = context(
+ "<old-hexsha> <new-hexsha> <name> <<email>> <timestamp> <tz>\\t<message>",
+ tuple((
+ context("<old-hexsha>", terminated(hex_hash, tag(b" "))),
+ context("<new-hexsha>", terminated(hex_hash, tag(b" "))),
+ context("<name> <<email>> <timestamp>", gix_actor::signature::decode),
+ opt(tag(b"\t")),
+ context("<optional message>", message),
+ )),
+ )(bytes)?;
+
+ if message_sep.is_none() {
+ if let Some(first) = message.first() {
+ if !first.is_ascii_whitespace() {
+ return Err(nom::Err::Error(E::add_context(
+ i,
+ "log message must be separated from signature with whitespace",
+ E::from_error_kind(i, nom::error::ErrorKind::MapRes),
+ )));
+ }
+ }
+ }
+
+ Ok((
+ i,
+ LineRef {
+ previous_oid: old,
+ new_oid: new,
+ signature,
+ message,
+ },
+ ))
+ }
+
+ #[cfg(test)]
+ mod test {
+ use gix_actor::{Sign, Time};
+ use gix_object::bstr::ByteSlice;
+
+ use super::*;
+
+ /// Convert a hexadecimal hash into its corresponding `ObjectId` or _panic_.
+ fn hex_to_oid(hex: &str) -> gix_hash::ObjectId {
+ gix_hash::ObjectId::from_hex(hex.as_bytes()).expect("40 bytes hex")
+ }
+
+ fn with_newline(mut v: Vec<u8>) -> Vec<u8> {
+ v.push(b'\n');
+ v
+ }
+
+ mod invalid {
+ use gix_testtools::to_bstr_err;
+ use nom::error::VerboseError;
+
+ use super::one;
+
+ #[test]
+ fn completely_bogus_shows_error_with_context() {
+ let err = one::<VerboseError<&[u8]>>(b"definitely not a log entry")
+ .map_err(to_bstr_err)
+ .expect_err("this should fail");
+ assert!(err.to_string().contains("<old-hexsha> <new-hexsha>"));
+ }
+
+ #[test]
+ fn missing_whitespace_between_signature_and_message() {
+ let line = "0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 one <foo@example.com> 1234567890 -0000message";
+ let err = one::<VerboseError<&[u8]>>(line.as_bytes())
+ .map_err(to_bstr_err)
+ .expect_err("this should fail");
+ assert!(err
+ .to_string()
+ .contains("log message must be separated from signature with whitespace"));
+ }
+ }
+
+ const NULL_SHA1: &[u8] = b"0000000000000000000000000000000000000000";
+
+ #[test]
+ fn entry_with_empty_message() {
+ let line_without_nl: Vec<_> = b"0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 name <foo@example.com> 1234567890 -0000".to_vec();
+ let line_with_nl = with_newline(line_without_nl.clone());
+ for input in &[line_without_nl, line_with_nl] {
+ assert_eq!(
+ one::<nom::error::Error<_>>(input).expect("successful parsing").1,
+ LineRef {
+ previous_oid: NULL_SHA1.as_bstr(),
+ new_oid: NULL_SHA1.as_bstr(),
+ signature: gix_actor::SignatureRef {
+ name: b"name".as_bstr(),
+ email: b"foo@example.com".as_bstr(),
+ time: Time {
+ seconds_since_unix_epoch: 1234567890,
+ offset_in_seconds: 0,
+ sign: Sign::Minus
+ }
+ },
+ message: b"".as_bstr(),
+ }
+ );
+ }
+ }
+
+ #[test]
+ fn entry_with_message_without_newline_and_with_newline() {
+ let line_without_nl: Vec<_> = b"a5828ae6b52137b913b978e16cd2334482eb4c1f 89b43f80a514aee58b662ad606e6352e03eaeee4 Sebastian Thiel <foo@example.com> 1618030561 +0800\tpull --ff-only: Fast-forward".to_vec();
+ let line_with_nl = with_newline(line_without_nl.clone());
+
+ for input in &[line_without_nl, line_with_nl] {
+ let (remaining, res) = one::<nom::error::Error<_>>(input).expect("successful parsing");
+ assert!(remaining.is_empty(), "all consuming even without trailing newline");
+ let actual = LineRef {
+ previous_oid: b"a5828ae6b52137b913b978e16cd2334482eb4c1f".as_bstr(),
+ new_oid: b"89b43f80a514aee58b662ad606e6352e03eaeee4".as_bstr(),
+ signature: gix_actor::SignatureRef {
+ name: b"Sebastian Thiel".as_bstr(),
+ email: b"foo@example.com".as_bstr(),
+ time: Time {
+ seconds_since_unix_epoch: 1618030561,
+ offset_in_seconds: 28800,
+ sign: Sign::Plus,
+ },
+ },
+ message: b"pull --ff-only: Fast-forward".as_bstr(),
+ };
+ assert_eq!(res, actual);
+ assert_eq!(
+ actual.previous_oid(),
+ hex_to_oid("a5828ae6b52137b913b978e16cd2334482eb4c1f")
+ );
+ assert_eq!(actual.new_oid(), hex_to_oid("89b43f80a514aee58b662ad606e6352e03eaeee4"));
+ }
+ }
+
+ #[test]
+ fn two_lines_in_a_row_with_and_without_newline() {
+ let lines = b"0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 one <foo@example.com> 1234567890 -0000\t\n0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 two <foo@example.com> 1234567890 -0000\thello";
+ let (remainder, parsed) = one::<nom::error::Error<_>>(lines).expect("parse single line");
+ assert_eq!(parsed.message, b"".as_bstr(), "first message is empty");
+
+ let (remainder, parsed) = one::<nom::error::Error<_>>(remainder).expect("parse single line");
+ assert_eq!(
+ parsed.message,
+ b"hello".as_bstr(),
+ "second message is not and contains no newline"
+ );
+ assert!(remainder.is_empty());
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/log/mod.rs b/vendor/gix-ref/src/store/file/log/mod.rs
new file mode 100644
index 000000000..5791358e4
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/log/mod.rs
@@ -0,0 +1,23 @@
+use gix_object::bstr::BStr;
+
+pub use super::loose::reflog::{create_or_update, Error};
+
+///
+pub mod iter;
+mod line;
+
+/// A parsed ref log line.
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]
+#[cfg_attr(feature = "serde1", derive(serde::Serialize, serde::Deserialize))]
+#[non_exhaustive]
+pub struct LineRef<'a> {
+ /// The previous object id in hexadecimal. Use [`LineRef::previous_oid()`] to get a more usable form.
+ pub previous_oid: &'a BStr,
+ /// The new object id in hexadecimal. Use [`LineRef::new_oid()`] to get a more usable form.
+ pub new_oid: &'a BStr,
+ /// The signature of the currently configured committer.
+ #[cfg_attr(feature = "serde1", serde(borrow))]
+ pub signature: gix_actor::SignatureRef<'a>,
+ /// The message providing details about the operation performed in this log line.
+ pub message: &'a BStr,
+}
diff --git a/vendor/gix-ref/src/store/file/loose/iter.rs b/vendor/gix-ref/src/store/file/loose/iter.rs
new file mode 100644
index 000000000..b4b46ccc4
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/loose/iter.rs
@@ -0,0 +1,95 @@
+use std::path::{Path, PathBuf};
+
+use gix_features::fs::walkdir::DirEntryIter;
+use gix_object::bstr::ByteSlice;
+
+use crate::{file::iter::LooseThenPacked, store_impl::file, BString, FullName};
+
+/// An iterator over all valid loose reference paths as seen from a particular base directory.
+pub(in crate::store_impl::file) struct SortedLoosePaths {
+ pub(crate) base: PathBuf,
+ filename_prefix: Option<BString>,
+ file_walk: Option<DirEntryIter>,
+}
+
+impl SortedLoosePaths {
+ pub fn at(path: impl AsRef<Path>, base: impl Into<PathBuf>, filename_prefix: Option<BString>) -> Self {
+ let path = path.as_ref();
+ SortedLoosePaths {
+ base: base.into(),
+ filename_prefix,
+ file_walk: path.is_dir().then(|| {
+ // serial iteration as we expect most refs in packed-refs anyway.
+ gix_features::fs::walkdir_sorted_new(path, gix_features::fs::walkdir::Parallelism::Serial).into_iter()
+ }),
+ }
+ }
+}
+
+impl Iterator for SortedLoosePaths {
+ type Item = std::io::Result<(PathBuf, FullName)>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ for entry in self.file_walk.as_mut()?.by_ref() {
+ match entry {
+ Ok(entry) => {
+ if !entry.file_type().is_file() {
+ continue;
+ }
+ let full_path = entry.path().to_owned();
+ if let Some((prefix, name)) = self
+ .filename_prefix
+ .as_deref()
+ .and_then(|prefix| full_path.file_name().map(|name| (prefix, name)))
+ {
+ match gix_path::os_str_into_bstr(name) {
+ Ok(name) => {
+ if !name.starts_with(prefix) {
+ continue;
+ }
+ }
+ Err(_) => continue, // TODO: silently skipping ill-formed UTF-8 on windows - maybe this can be better?
+ }
+ }
+ let full_name = full_path
+ .strip_prefix(&self.base)
+ .expect("prefix-stripping cannot fail as prefix is our root");
+ let full_name = match gix_path::try_into_bstr(full_name) {
+ Ok(name) => {
+ let name = gix_path::to_unix_separators_on_windows(name);
+ name.into_owned()
+ }
+ Err(_) => continue, // TODO: silently skipping ill-formed UTF-8 on windows here, maybe there are better ways?
+ };
+
+ if gix_validate::reference::name_partial(full_name.as_bstr()).is_ok() {
+ let name = FullName(full_name);
+ return Some(Ok((full_path, name)));
+ } else {
+ continue;
+ }
+ }
+ Err(err) => return Some(Err(err.into_io_error().expect("no symlink related errors"))),
+ }
+ }
+ None
+ }
+}
+
+impl file::Store {
+ /// Return an iterator over all loose references, notably not including any packed ones, in lexical order.
+ /// Each of the references may fail to parse and the iterator will not stop if parsing fails, allowing the caller
+ /// to see all files that look like references whether valid or not.
+ ///
+ /// Reference files that do not constitute valid names will be silently ignored.
+ pub fn loose_iter(&self) -> std::io::Result<LooseThenPacked<'_, '_>> {
+ self.iter_packed(None)
+ }
+
+ /// Return an iterator over all loose references that start with the given `prefix`.
+ ///
+ /// Otherwise it's similar to [`loose_iter()`][file::Store::loose_iter()].
+ pub fn loose_iter_prefixed(&self, prefix: impl AsRef<Path>) -> std::io::Result<LooseThenPacked<'_, '_>> {
+ self.iter_prefixed_packed(prefix, None)
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/loose/mod.rs b/vendor/gix-ref/src/store/file/loose/mod.rs
new file mode 100644
index 000000000..230641509
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/loose/mod.rs
@@ -0,0 +1,65 @@
+use crate::{FullName, Kind, Target};
+
+/// A git _ref_ which is stored in a file.
+#[derive(Debug, PartialOrd, PartialEq, Ord, Eq, Hash, Clone)]
+pub struct Reference {
+ /// The path to uniquely identify this ref within its store.
+ pub name: FullName,
+ /// The target of the reference, either a symbolic reference by full name or an object by its id.
+ pub target: Target,
+}
+
+impl Reference {
+ /// Return the kind of ref.
+ pub fn kind(&self) -> Kind {
+ self.target.kind()
+ }
+}
+
+///
+pub(crate) mod reflog;
+
+///
+pub(crate) mod iter;
+///
+pub mod reference;
+
+mod init {
+ use std::path::PathBuf;
+
+ use crate::store_impl::file;
+
+ impl file::Store {
+ /// Create a new instance at the given `git_dir`, which commonly is a standard git repository with a
+ /// `refs/` subdirectory.
+ /// The `object_hash` defines which kind of hash we should recognize.
+ pub fn at(git_dir: impl Into<PathBuf>, write_reflog: file::WriteReflog, object_hash: gix_hash::Kind) -> Self {
+ file::Store {
+ git_dir: git_dir.into(),
+ common_dir: None,
+ write_reflog,
+ namespace: None,
+ packed: gix_features::fs::MutableSnapshot::new().into(),
+ object_hash,
+ }
+ }
+
+ /// Like [`at()`][file::Store::at()], but for _linked_ work-trees which use `git_dir` as private ref store and `common_dir` for
+ /// shared references.
+ pub fn for_linked_worktree(
+ git_dir: impl Into<PathBuf>,
+ common_dir: impl Into<PathBuf>,
+ write_reflog: file::WriteReflog,
+ object_hash: gix_hash::Kind,
+ ) -> Self {
+ file::Store {
+ git_dir: git_dir.into(),
+ common_dir: Some(common_dir.into()),
+ write_reflog,
+ namespace: None,
+ packed: gix_features::fs::MutableSnapshot::new().into(),
+ object_hash,
+ }
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/loose/reference/decode.rs b/vendor/gix-ref/src/store/file/loose/reference/decode.rs
new file mode 100644
index 000000000..9bf2f7c29
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/loose/reference/decode.rs
@@ -0,0 +1,83 @@
+use std::convert::{TryFrom, TryInto};
+
+use gix_hash::ObjectId;
+use gix_object::bstr::BString;
+use nom::{
+ bytes::complete::{tag, take_while},
+ combinator::{map, opt},
+ sequence::terminated,
+ IResult,
+};
+
+use crate::{
+ parse::{hex_hash, newline},
+ store_impl::file::loose::Reference,
+ FullName, Target,
+};
+
+enum MaybeUnsafeState {
+ Id(ObjectId),
+ UnvalidatedPath(BString),
+}
+
+/// The error returned by [`Reference::try_from_path()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("{content:?} could not be parsed")]
+ Parse { content: BString },
+ #[error("The path {path:?} to a symbolic reference within a ref file is invalid")]
+ RefnameValidation {
+ source: gix_validate::reference::name::Error,
+ path: BString,
+ },
+}
+
+impl TryFrom<MaybeUnsafeState> for Target {
+ type Error = Error;
+
+ fn try_from(v: MaybeUnsafeState) -> Result<Self, Self::Error> {
+ Ok(match v {
+ MaybeUnsafeState::Id(id) => Target::Peeled(id),
+ MaybeUnsafeState::UnvalidatedPath(name) => Target::Symbolic(match gix_validate::refname(name.as_ref()) {
+ Ok(_) => FullName(name),
+ Err(err) => {
+ return Err(Error::RefnameValidation {
+ source: err,
+ path: name,
+ })
+ }
+ }),
+ })
+ }
+}
+
+impl Reference {
+ /// Create a new reference of the given `parent` store with `relative_path` service as unique identifier
+ /// at which the `path_contents` was read to obtain the refs value.
+ pub fn try_from_path(name: FullName, path_contents: &[u8]) -> Result<Self, Error> {
+ Ok(Reference {
+ name,
+ target: parse(path_contents)
+ .map_err(|_| Error::Parse {
+ content: path_contents.into(),
+ })?
+ .1
+ .try_into()?,
+ })
+ }
+}
+
+fn parse(bytes: &[u8]) -> IResult<&[u8], MaybeUnsafeState> {
+ let is_space = |b: u8| b == b' ';
+ if let (path, Some(_ref_prefix)) = opt(terminated(tag("ref: "), take_while(is_space)))(bytes)? {
+ map(
+ terminated(take_while(|b| b != b'\r' && b != b'\n'), opt(newline)),
+ |path| MaybeUnsafeState::UnvalidatedPath(path.into()),
+ )(path)
+ } else {
+ map(terminated(hex_hash, opt(newline)), |hex| {
+ MaybeUnsafeState::Id(ObjectId::from_hex(hex).expect("prior validation"))
+ })(bytes)
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/loose/reference/logiter.rs b/vendor/gix-ref/src/store/file/loose/reference/logiter.rs
new file mode 100644
index 000000000..0bc81f22d
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/loose/reference/logiter.rs
@@ -0,0 +1,47 @@
+use crate::store_impl::{
+ file,
+ file::{log, loose, loose::Reference},
+};
+
+pub(crate) fn must_be_io_err(err: loose::reflog::Error) -> std::io::Error {
+ match err {
+ loose::reflog::Error::Io(err) => err,
+ loose::reflog::Error::RefnameValidation(_) => unreachable!("we are called from a valid ref"),
+ }
+}
+
+impl Reference {
+ /// Returns true if a reflog exists in the given `store`.
+ ///
+ /// Please note that this method shouldn't be used to check if a log exists before trying to read it, but instead
+ /// is meant to be the fastest possible way to determine if a log exists or not.
+ /// If the caller needs to know if it's readable, try to read the log instead with a reverse or forward iterator.
+ pub fn log_exists(&self, store: &file::Store) -> bool {
+ store
+ .reflog_exists(self.name.as_ref())
+ .expect("name conversion infallible")
+ }
+ /// Return a reflog reverse iterator for this ref, reading chunks from the back into the fixed buffer `buf`, in the given `store`.
+ ///
+ /// The iterator will traverse log entries from most recent to oldest, reading the underlying file in chunks from the back.
+ /// Return `Ok(None)` if no reflog exists.
+ pub fn log_iter_rev<'b>(
+ &self,
+ store: &file::Store,
+ buf: &'b mut [u8],
+ ) -> std::io::Result<Option<log::iter::Reverse<'b, std::fs::File>>> {
+ store.reflog_iter_rev(self.name.as_ref(), buf).map_err(must_be_io_err)
+ }
+
+ /// Return a reflog forward iterator for this ref and write its file contents into `buf`, in the given `store`.
+ ///
+ /// The iterator will traverse log entries from oldest to newest.
+ /// Return `Ok(None)` if no reflog exists.
+ pub fn log_iter<'a, 'b: 'a>(
+ &'a self,
+ store: &file::Store,
+ buf: &'b mut Vec<u8>,
+ ) -> std::io::Result<Option<impl Iterator<Item = Result<log::LineRef<'b>, log::iter::decode::Error>> + 'a>> {
+ store.reflog_iter(self.name.as_ref(), buf).map_err(must_be_io_err)
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/loose/reference/mod.rs b/vendor/gix-ref/src/store/file/loose/reference/mod.rs
new file mode 100644
index 000000000..3e5ce0683
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/loose/reference/mod.rs
@@ -0,0 +1,4 @@
+pub(crate) mod logiter;
+
+///
+pub mod decode;
diff --git a/vendor/gix-ref/src/store/file/loose/reflog.rs b/vendor/gix-ref/src/store/file/loose/reflog.rs
new file mode 100644
index 000000000..a43d773fe
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/loose/reflog.rs
@@ -0,0 +1,244 @@
+use std::{convert::TryInto, io::Read, path::PathBuf};
+
+use crate::{
+ store_impl::{file, file::log},
+ FullNameRef,
+};
+
+impl file::Store {
+ /// Returns true if a reflog exists for the given reference `name`.
+ ///
+ /// Please note that this method shouldn't be used to check if a log exists before trying to read it, but instead
+ /// is meant to be the fastest possible way to determine if a log exists or not.
+ /// If the caller needs to know if it's readable, try to read the log instead with a reverse or forward iterator.
+ pub fn reflog_exists<'a, Name, E>(&self, name: Name) -> Result<bool, E>
+ where
+ Name: TryInto<&'a FullNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ Ok(self.reflog_path(name.try_into()?).is_file())
+ }
+
+ /// Return a reflog reverse iterator for the given fully qualified `name`, reading chunks from the back into the fixed buffer `buf`.
+ ///
+ /// The iterator will traverse log entries from most recent to oldest, reading the underlying file in chunks from the back.
+ /// Return `Ok(None)` if no reflog exists.
+ pub fn reflog_iter_rev<'a, 'b, Name, E>(
+ &self,
+ name: Name,
+ buf: &'b mut [u8],
+ ) -> Result<Option<log::iter::Reverse<'b, std::fs::File>>, Error>
+ where
+ Name: TryInto<&'a FullNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ let name: &FullNameRef = name.try_into().map_err(|err| Error::RefnameValidation(err.into()))?;
+ let path = self.reflog_path(name);
+ if path.is_dir() {
+ return Ok(None);
+ }
+ match std::fs::File::open(&path) {
+ Ok(file) => Ok(Some(log::iter::reverse(file, buf)?)),
+ Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
+ Err(err) => Err(err.into()),
+ }
+ }
+
+ /// Return a reflog forward iterator for the given fully qualified `name` and write its file contents into `buf`.
+ ///
+ /// The iterator will traverse log entries from oldest to newest.
+ /// Return `Ok(None)` if no reflog exists.
+ pub fn reflog_iter<'a, 'b, Name, E>(
+ &self,
+ name: Name,
+ buf: &'b mut Vec<u8>,
+ ) -> Result<Option<log::iter::Forward<'b>>, Error>
+ where
+ Name: TryInto<&'a FullNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ let name: &FullNameRef = name.try_into().map_err(|err| Error::RefnameValidation(err.into()))?;
+ let path = self.reflog_path(name);
+ match std::fs::File::open(&path) {
+ Ok(mut file) => {
+ buf.clear();
+ if let Err(err) = file.read_to_end(buf) {
+ return if path.is_dir() { Ok(None) } else { Err(err.into()) };
+ }
+ Ok(Some(log::iter::forward(buf)))
+ }
+ Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
+ #[cfg(windows)]
+ Err(err) if err.kind() == std::io::ErrorKind::PermissionDenied => Ok(None),
+ Err(err) => Err(err.into()),
+ }
+ }
+}
+
+impl file::Store {
+ /// Implements the logic required to transform a fully qualified refname into its log name
+ pub(crate) fn reflog_path(&self, name: &FullNameRef) -> PathBuf {
+ let (base, rela_path) = self.reflog_base_and_relative_path(name);
+ base.join(rela_path)
+ }
+}
+
+///
+pub mod create_or_update {
+ use std::{
+ borrow::Cow,
+ io::Write,
+ path::{Path, PathBuf},
+ };
+
+ use gix_hash::{oid, ObjectId};
+ use gix_object::bstr::BStr;
+
+ use crate::store_impl::{file, file::WriteReflog};
+
+ impl file::Store {
+ #[allow(clippy::too_many_arguments)]
+ pub(crate) fn reflog_create_or_append(
+ &self,
+ name: &FullNameRef,
+ previous_oid: Option<ObjectId>,
+ new: &oid,
+ committer: Option<gix_actor::SignatureRef<'_>>,
+ message: &BStr,
+ mut force_create_reflog: bool,
+ ) -> Result<(), Error> {
+ let (reflog_base, full_name) = self.reflog_base_and_relative_path(name);
+ match self.write_reflog {
+ WriteReflog::Normal | WriteReflog::Always => {
+ if self.write_reflog == WriteReflog::Always {
+ force_create_reflog = true;
+ }
+ let mut options = std::fs::OpenOptions::new();
+ options.append(true).read(false);
+ let log_path = reflog_base.join(&full_name);
+
+ if force_create_reflog || self.should_autocreate_reflog(&full_name) {
+ let parent_dir = log_path.parent().expect("always with parent directory");
+ gix_tempfile::create_dir::all(parent_dir, Default::default()).map_err(|err| {
+ Error::CreateLeadingDirectories {
+ source: err,
+ reflog_directory: parent_dir.to_owned(),
+ }
+ })?;
+ options.create(true);
+ };
+
+ let file_for_appending = match options.open(&log_path) {
+ Ok(f) => Some(f),
+ Err(err) if err.kind() == std::io::ErrorKind::NotFound => None,
+ Err(err) => {
+ // TODO: when Kind::IsADirectory becomes stable, use that.
+ if log_path.is_dir() {
+ gix_tempfile::remove_dir::empty_depth_first(&log_path)
+ .and_then(|_| options.open(&log_path))
+ .map(Some)
+ .map_err(|_| Error::Append {
+ source: err,
+ reflog_path: self.reflog_path(name),
+ })?
+ } else {
+ return Err(Error::Append {
+ source: err,
+ reflog_path: log_path,
+ });
+ }
+ }
+ };
+
+ if let Some(mut file) = file_for_appending {
+ let committer = committer.ok_or(Error::MissingCommitter)?;
+ write!(file, "{} {} ", previous_oid.unwrap_or_else(|| new.kind().null()), new)
+ .and_then(|_| committer.write_to(&mut file))
+ .and_then(|_| {
+ if !message.is_empty() {
+ writeln!(file, "\t{message}")
+ } else {
+ writeln!(file)
+ }
+ })
+ .map_err(|err| Error::Append {
+ source: err,
+ reflog_path: self.reflog_path(name),
+ })?;
+ }
+ Ok(())
+ }
+ WriteReflog::Disable => Ok(()),
+ }
+ }
+
+ fn should_autocreate_reflog(&self, full_name: &Path) -> bool {
+ full_name.starts_with("refs/heads/")
+ || full_name.starts_with("refs/remotes/")
+ || full_name.starts_with("refs/notes/")
+ || full_name.starts_with("refs/worktree/") // NOTE: git does not write reflogs for worktree private refs
+ || full_name == Path::new("HEAD")
+ }
+
+ /// Returns the base paths for all reflogs
+ pub(in crate::store_impl::file) fn reflog_base_and_relative_path<'a>(
+ &self,
+ name: &'a FullNameRef,
+ ) -> (PathBuf, Cow<'a, Path>) {
+ let is_reflog = true;
+ let (base, name) = self.to_base_dir_and_relative_name(name, is_reflog);
+ (
+ base.join("logs"),
+ match &self.namespace {
+ None => gix_path::to_native_path_on_windows(name.as_bstr()),
+ Some(namespace) => gix_path::to_native_path_on_windows(
+ namespace.to_owned().into_namespaced_name(name).into_inner(),
+ ),
+ },
+ )
+ }
+ }
+
+ #[cfg(test)]
+ mod tests;
+
+ mod error {
+ use std::path::PathBuf;
+
+ /// The error returned when creating or appending to a reflog
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could create one or more directories in {reflog_directory:?} to contain reflog file")]
+ CreateLeadingDirectories {
+ source: std::io::Error,
+ reflog_directory: PathBuf,
+ },
+ #[error("Could not open reflog file at {reflog_path:?} for appending")]
+ Append {
+ source: std::io::Error,
+ reflog_path: PathBuf,
+ },
+ #[error("reflog message must not contain newlines")]
+ MessageWithNewlines,
+ #[error("reflog messages need a committer which isn't set")]
+ MissingCommitter,
+ }
+ }
+ pub use error::Error;
+
+ use crate::FullNameRef;
+}
+
+mod error {
+ /// The error returned by [crate::file::Store::reflog_iter()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The reflog name or path is not a valid ref name")]
+ RefnameValidation(#[from] crate::name::Error),
+ #[error("The reflog file could not read")]
+ Io(#[from] std::io::Error),
+ }
+}
+pub use error::Error;
diff --git a/vendor/gix-ref/src/store/file/loose/reflog/create_or_update/tests.rs b/vendor/gix-ref/src/store/file/loose/reflog/create_or_update/tests.rs
new file mode 100644
index 000000000..16b9b1492
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/loose/reflog/create_or_update/tests.rs
@@ -0,0 +1,155 @@
+use std::{convert::TryInto, path::Path};
+
+use gix_actor::{Sign, Signature, Time};
+use gix_object::bstr::ByteSlice;
+use tempfile::TempDir;
+
+use super::*;
+use crate::{file::WriteReflog, FullNameRef};
+
+type Result<T = ()> = std::result::Result<T, Box<dyn std::error::Error>>;
+
+/// Convert a hexadecimal hash into its corresponding `ObjectId` or _panic_.
+fn hex_to_id(hex: &str) -> gix_hash::ObjectId {
+ gix_hash::ObjectId::from_hex(hex.as_bytes()).expect("40 bytes hex")
+}
+
+fn empty_store(writemode: WriteReflog) -> Result<(TempDir, file::Store)> {
+ let dir = TempDir::new()?;
+ let store = file::Store::at(dir.path(), writemode, gix_hash::Kind::Sha1);
+ Ok((dir, store))
+}
+
+fn reflog_lines(store: &file::Store, name: &str, buf: &mut Vec<u8>) -> Result<Vec<crate::log::Line>> {
+ store
+ .reflog_iter(name, buf)?
+ .expect("existing reflog")
+ .map(|l| l.map(crate::log::Line::from))
+ .collect::<std::result::Result<Vec<_>, _>>()
+ .map_err(Into::into)
+}
+
+const WRITE_MODES: &[WriteReflog] = &[WriteReflog::Normal, WriteReflog::Disable, WriteReflog::Always];
+
+#[test]
+fn should_autocreate_is_unaffected_by_writemode() -> Result {
+ let (_keep, store) = empty_store(WriteReflog::Disable)?;
+ for should_create_name in &["HEAD", "refs/heads/main", "refs/remotes/any", "refs/notes/any"] {
+ assert!(store.should_autocreate_reflog(Path::new(should_create_name)));
+ }
+ for should_not_create_name in &["FETCH_HEAD", "SOMETHING", "refs/special/this", "refs/tags/0.1.0"] {
+ assert!(!store.should_autocreate_reflog(Path::new(should_not_create_name)));
+ }
+ Ok(())
+}
+
+#[test]
+fn missing_reflog_creates_it_even_if_similarly_named_empty_dir_exists_and_append_log_lines() -> Result {
+ for mode in WRITE_MODES {
+ let (_keep, store) = empty_store(*mode)?;
+ let full_name_str = "refs/heads/main";
+ let full_name: &FullNameRef = full_name_str.try_into()?;
+ let new = hex_to_id("28ce6a8b26aa170e1de65536fe8abe1832bd3242");
+ let committer = Signature {
+ name: "committer".into(),
+ email: "committer@example.com".into(),
+ time: Time {
+ seconds_since_unix_epoch: 1234,
+ offset_in_seconds: 1800,
+ sign: Sign::Plus,
+ },
+ };
+ store.reflog_create_or_append(
+ full_name,
+ None,
+ &new,
+ committer.to_ref().into(),
+ b"the message".as_bstr(),
+ false,
+ )?;
+
+ let mut buf = Vec::new();
+ match mode {
+ WriteReflog::Normal | WriteReflog::Always => {
+ assert_eq!(
+ reflog_lines(&store, full_name_str, &mut buf)?,
+ vec![crate::log::Line {
+ previous_oid: gix_hash::Kind::Sha1.null(),
+ new_oid: new,
+ signature: committer.clone(),
+ message: "the message".into()
+ }]
+ );
+ let previous = hex_to_id("0000000000000000000000111111111111111111");
+ store.reflog_create_or_append(
+ full_name,
+ Some(previous),
+ &new,
+ committer.to_ref().into(),
+ b"next message".as_bstr(),
+ false,
+ )?;
+
+ let lines = reflog_lines(&store, full_name_str, &mut buf)?;
+ assert_eq!(lines.len(), 2, "now there is another line");
+ assert_eq!(
+ lines.last().expect("non-empty"),
+ &crate::log::Line {
+ previous_oid: previous,
+ new_oid: new,
+ signature: committer.clone(),
+ message: "next message".into()
+ }
+ );
+ }
+ WriteReflog::Disable => {
+ assert!(
+ store.reflog_iter(full_name, &mut buf)?.is_none(),
+ "there is no logs in disabled mode"
+ );
+ }
+ };
+
+ // create onto existing directory
+ let full_name_str = "refs/heads/other";
+ let full_name: &FullNameRef = full_name_str.try_into()?;
+ let reflog_path = store.reflog_path(full_name_str.try_into().expect("valid"));
+ let directory_in_place_of_reflog = reflog_path.join("empty-a").join("empty-b");
+ std::fs::create_dir_all(directory_in_place_of_reflog)?;
+
+ store.reflog_create_or_append(
+ full_name,
+ None,
+ &new,
+ committer.to_ref().into(),
+ b"more complicated reflog creation".as_bstr(),
+ false,
+ )?;
+
+ match mode {
+ WriteReflog::Normal | WriteReflog::Always => {
+ assert_eq!(
+ reflog_lines(&store, full_name_str, &mut buf)?.len(),
+ 1,
+ "reflog was written despite directory"
+ );
+ assert!(
+ reflog_path.is_file(),
+ "the empty directory was replaced with the reflog file"
+ );
+ }
+ WriteReflog::Disable => {
+ assert!(
+ store.reflog_iter(full_name_str, &mut buf)?.is_none(),
+ "reflog still doesn't exist"
+ );
+ assert!(
+ store.reflog_iter_rev(full_name_str, &mut buf)?.is_none(),
+ "reflog still doesn't exist"
+ );
+ assert!(reflog_path.is_dir(), "reflog directory wasn't touched");
+ }
+ }
+ }
+ Ok(())
+}
diff --git a/vendor/gix-ref/src/store/file/mod.rs b/vendor/gix-ref/src/store/file/mod.rs
new file mode 100644
index 000000000..cadc1d3b7
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/mod.rs
@@ -0,0 +1,104 @@
+use std::{
+ borrow::Cow,
+ path::{Path, PathBuf},
+};
+
+use crate::{bstr::BStr, store::WriteReflog, Namespace};
+
+/// A store for reference which uses plain files.
+///
+/// Each ref is represented as a single file on disk in a folder structure that follows the relative path
+/// used to identify [references][crate::Reference].
+#[derive(Debug, Clone)]
+pub struct Store {
+ /// The location at which loose references can be found as per conventions of a typical git repository.
+ ///
+ /// Typical base paths are `.git` repository folders.
+ git_dir: PathBuf,
+ /// Possibly the common directory at which to find shared references. Only set if this `Store` is for a work tree.
+ common_dir: Option<PathBuf>,
+ /// The kind of hash to assume in a couple of situations. Note that currently we are able to read any valid hash from files
+ /// which might want to change one day.
+ object_hash: gix_hash::Kind,
+
+ /// The way to handle reflog edits
+ pub write_reflog: WriteReflog,
+ /// The namespace to use for edits and reads
+ pub namespace: Option<Namespace>,
+ /// A packed buffer which can be mapped in one version and shared as such.
+ /// It's updated only in one spot, which is prior to reading it based on file stamps.
+ /// Doing it like this has the benefit of being able to hand snapshots out to people without blocking others from updating it.
+ packed: packed::modifiable::MutableSharedBuffer,
+}
+
+mod access {
+ use std::path::Path;
+
+ use crate::file;
+
+ impl file::Store {
+ /// Return the `.git` directory at which all references are loaded.
+ ///
+ /// For worktrees, this is the linked work-tree private ref location,
+ /// then [`common_dir()`][file::Store::common_dir()] is `Some(parent_git_dir)`.
+ pub fn git_dir(&self) -> &Path {
+ &self.git_dir
+ }
+
+ /// If this is a linked work tree, there will be `Some(git_dir)` pointing to the parent repository,
+ /// while [`git_dir()`][file::Store::git_dir()] points to the location holding linked work-tree private references.
+ pub fn common_dir(&self) -> Option<&Path> {
+ self.common_dir.as_deref()
+ }
+
+ /// Similar to [`common_dir()`][file::Store::common_dir()], but it will produce either the common-dir, or the git-dir if the former
+ /// isn't present.
+ ///
+ /// This is also the directory in which the packed references file would be placed.
+ pub fn common_dir_resolved(&self) -> &Path {
+ self.common_dir.as_deref().unwrap_or(&self.git_dir)
+ }
+ }
+}
+
+/// A transaction on a file store
+pub struct Transaction<'s, 'p> {
+ store: &'s Store,
+ packed_transaction: Option<crate::store_impl::packed::Transaction>,
+ updates: Option<Vec<transaction::Edit>>,
+ packed_refs: transaction::PackedRefs<'p>,
+}
+
+pub(in crate::store_impl::file) fn path_to_name<'a>(path: impl Into<Cow<'a, Path>>) -> Cow<'a, BStr> {
+ let path = gix_path::into_bstr(path.into());
+ gix_path::to_unix_separators_on_windows(path)
+}
+
+///
+pub mod loose;
+mod overlay_iter;
+
+///
+pub mod iter {
+ pub use super::overlay_iter::{LooseThenPacked, Platform};
+
+ ///
+ pub mod loose_then_packed {
+ pub use super::super::overlay_iter::Error;
+ }
+}
+
+///
+pub mod log;
+
+///
+pub mod find;
+
+///
+pub mod transaction;
+
+///
+pub mod packed;
+
+mod raw_ext;
+pub use raw_ext::ReferenceExt;
diff --git a/vendor/gix-ref/src/store/file/overlay_iter.rs b/vendor/gix-ref/src/store/file/overlay_iter.rs
new file mode 100644
index 000000000..51f290c7b
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/overlay_iter.rs
@@ -0,0 +1,432 @@
+use std::{
+ borrow::Cow,
+ cmp::Ordering,
+ io::Read,
+ iter::Peekable,
+ path::{Path, PathBuf},
+};
+
+use crate::{
+ file::{loose, loose::iter::SortedLoosePaths, path_to_name},
+ store_impl::{file, packed},
+ BString, FullName, Namespace, Reference,
+};
+
+/// An iterator stepping through sorted input of loose references and packed references, preferring loose refs over otherwise
+/// equivalent packed references.
+///
+/// All errors will be returned verbatim, while packed errors are depleted first if loose refs also error.
+pub struct LooseThenPacked<'p, 's> {
+ git_dir: &'s Path,
+ common_dir: Option<&'s Path>,
+ namespace: Option<&'s Namespace>,
+ iter_packed: Option<Peekable<packed::Iter<'p>>>,
+ iter_git_dir: Peekable<SortedLoosePaths>,
+ #[allow(dead_code)]
+ iter_common_dir: Option<Peekable<SortedLoosePaths>>,
+ buf: Vec<u8>,
+}
+
+enum IterKind {
+ Git,
+ GitAndConsumeCommon,
+ Common,
+}
+
+/// An intermediate structure to hold shared state alive long enough for iteration to happen.
+#[must_use = "Iterators should be obtained from this platform"]
+pub struct Platform<'s> {
+ store: &'s file::Store,
+ packed: Option<file::packed::SharedBufferSnapshot>,
+}
+
+impl<'p, 's> LooseThenPacked<'p, 's> {
+ fn strip_namespace(&self, mut r: Reference) -> Reference {
+ if let Some(namespace) = &self.namespace {
+ r.strip_namespace(namespace);
+ }
+ r
+ }
+
+ fn loose_iter(&mut self, kind: IterKind) -> &mut Peekable<SortedLoosePaths> {
+ match kind {
+ IterKind::GitAndConsumeCommon => {
+ drop(self.iter_common_dir.as_mut().map(|iter| iter.next()));
+ &mut self.iter_git_dir
+ }
+ IterKind::Git => &mut self.iter_git_dir,
+ IterKind::Common => self
+ .iter_common_dir
+ .as_mut()
+ .expect("caller knows there is a common iter"),
+ }
+ }
+
+ fn convert_packed(
+ &mut self,
+ packed: Result<packed::Reference<'p>, packed::iter::Error>,
+ ) -> Result<Reference, Error> {
+ packed
+ .map(Into::into)
+ .map(|r| self.strip_namespace(r))
+ .map_err(|err| match err {
+ packed::iter::Error::Reference {
+ invalid_line,
+ line_number,
+ } => Error::PackedReference {
+ invalid_line,
+ line_number,
+ },
+ packed::iter::Error::Header { .. } => unreachable!("this one only happens on iteration creation"),
+ })
+ }
+
+ fn convert_loose(&mut self, res: std::io::Result<(PathBuf, FullName)>) -> Result<Reference, Error> {
+ let (refpath, name) = res.map_err(Error::Traversal)?;
+ std::fs::File::open(&refpath)
+ .and_then(|mut f| {
+ self.buf.clear();
+ f.read_to_end(&mut self.buf)
+ })
+ .map_err(|err| Error::ReadFileContents {
+ source: err,
+ path: refpath.to_owned(),
+ })?;
+ loose::Reference::try_from_path(name, &self.buf)
+ .map_err(|err| {
+ let relative_path = refpath
+ .strip_prefix(self.git_dir)
+ .ok()
+ .or_else(|| {
+ self.common_dir
+ .and_then(|common_dir| refpath.strip_prefix(common_dir).ok())
+ })
+ .expect("one of our bases contains the path");
+ Error::ReferenceCreation {
+ source: err,
+ relative_path: relative_path.into(),
+ }
+ })
+ .map(Into::into)
+ .map(|r| self.strip_namespace(r))
+ }
+}
+
+impl<'p, 's> Iterator for LooseThenPacked<'p, 's> {
+ type Item = Result<Reference, Error>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ fn advance_to_non_private(iter: &mut Peekable<SortedLoosePaths>) {
+ while let Some(Ok((_path, name))) = iter.peek() {
+ if name.category().map_or(true, |cat| cat.is_worktree_private()) {
+ iter.next();
+ } else {
+ break;
+ }
+ }
+ }
+
+ fn peek_loose<'a>(
+ git_dir: &'a mut Peekable<SortedLoosePaths>,
+ common_dir: Option<&'a mut Peekable<SortedLoosePaths>>,
+ ) -> Option<(&'a std::io::Result<(PathBuf, FullName)>, IterKind)> {
+ match common_dir {
+ Some(common_dir) => match (git_dir.peek(), {
+ advance_to_non_private(common_dir);
+ common_dir.peek()
+ }) {
+ (None, None) => None,
+ (None, Some(res)) | (Some(_), Some(res @ Err(_))) => Some((res, IterKind::Common)),
+ (Some(res), None) | (Some(res @ Err(_)), Some(_)) => Some((res, IterKind::Git)),
+ (Some(r_gitdir @ Ok((_, git_dir_name))), Some(r_cd @ Ok((_, common_dir_name)))) => {
+ match git_dir_name.cmp(common_dir_name) {
+ Ordering::Less => Some((r_gitdir, IterKind::Git)),
+ Ordering::Equal => Some((r_gitdir, IterKind::GitAndConsumeCommon)),
+ Ordering::Greater => Some((r_cd, IterKind::Common)),
+ }
+ }
+ },
+ None => git_dir.peek().map(|r| (r, IterKind::Git)),
+ }
+ }
+ match self.iter_packed.as_mut() {
+ Some(packed_iter) => match (
+ peek_loose(&mut self.iter_git_dir, self.iter_common_dir.as_mut()),
+ packed_iter.peek(),
+ ) {
+ (None, None) => None,
+ (None, Some(_)) | (Some(_), Some(Err(_))) => {
+ let res = packed_iter.next().expect("peeked value exists");
+ Some(self.convert_packed(res))
+ }
+ (Some((_, kind)), None) | (Some((Err(_), kind)), Some(_)) => {
+ let res = self.loose_iter(kind).next().expect("prior peek");
+ Some(self.convert_loose(res))
+ }
+ (Some((Ok((_, loose_name)), kind)), Some(Ok(packed))) => match loose_name.as_ref().cmp(packed.name) {
+ Ordering::Less => {
+ let res = self.loose_iter(kind).next().expect("prior peek");
+ Some(self.convert_loose(res))
+ }
+ Ordering::Equal => {
+ drop(packed_iter.next());
+ let res = self.loose_iter(kind).next().expect("prior peek");
+ Some(self.convert_loose(res))
+ }
+ Ordering::Greater => {
+ let res = packed_iter.next().expect("name retrieval configured");
+ Some(self.convert_packed(res))
+ }
+ },
+ },
+ None => match peek_loose(&mut self.iter_git_dir, self.iter_common_dir.as_mut()) {
+ None => None,
+ Some((_, kind)) => self.loose_iter(kind).next().map(|res| self.convert_loose(res)),
+ },
+ }
+ }
+}
+
+impl<'s> Platform<'s> {
+ /// Return an iterator over all references, loose or `packed`, sorted by their name.
+ ///
+ /// Errors are returned similarly to what would happen when loose and packed refs where iterated by themselves.
+ pub fn all(&self) -> std::io::Result<LooseThenPacked<'_, '_>> {
+ self.store.iter_packed(self.packed.as_ref().map(|b| &***b))
+ }
+
+ /// As [`iter(…)`][file::Store::iter()], but filters by `prefix`, i.e. "refs/heads".
+ ///
+ /// Please note that "refs/heads` or "refs\\heads" is equivalent to "refs/heads/"
+ pub fn prefixed(&self, prefix: impl AsRef<Path>) -> std::io::Result<LooseThenPacked<'_, '_>> {
+ self.store
+ .iter_prefixed_packed(prefix, self.packed.as_ref().map(|b| &***b))
+ }
+}
+
+impl file::Store {
+ /// Return a platform to obtain iterator over all references, or prefixed ones, loose or packed, sorted by their name.
+ ///
+ /// Errors are returned similarly to what would happen when loose and packed refs where iterated by themselves.
+ pub fn iter(&self) -> Result<Platform<'_>, packed::buffer::open::Error> {
+ Ok(Platform {
+ store: self,
+ packed: self.assure_packed_refs_uptodate()?,
+ })
+ }
+}
+
+#[derive(Debug)]
+pub(crate) enum IterInfo<'a> {
+ Base {
+ base: &'a Path,
+ },
+ BaseAndIterRoot {
+ base: &'a Path,
+ iter_root: PathBuf,
+ prefix: Cow<'a, Path>,
+ },
+ PrefixAndBase {
+ base: &'a Path,
+ prefix: &'a Path,
+ },
+ ComputedIterationRoot {
+ /// The root to iterate over
+ iter_root: PathBuf,
+ /// The top-level directory as boundary of all references, used to create their short-names after iteration
+ base: &'a Path,
+ /// The original prefix
+ prefix: Cow<'a, Path>,
+ /// The remainder of the prefix that wasn't a valid path
+ remainder: Option<BString>,
+ },
+}
+
+impl<'a> IterInfo<'a> {
+ fn prefix(&self) -> Option<&Path> {
+ match self {
+ IterInfo::Base { .. } => None,
+ IterInfo::PrefixAndBase { prefix, .. } => Some(*prefix),
+ IterInfo::ComputedIterationRoot { prefix, .. } | IterInfo::BaseAndIterRoot { prefix, .. } => {
+ prefix.as_ref().into()
+ }
+ }
+ }
+
+ fn into_iter(self) -> Peekable<SortedLoosePaths> {
+ match self {
+ IterInfo::Base { base } => SortedLoosePaths::at(base.join("refs"), base, None),
+ IterInfo::BaseAndIterRoot {
+ base,
+ iter_root,
+ prefix: _,
+ } => SortedLoosePaths::at(iter_root, base, None),
+ IterInfo::PrefixAndBase { base, prefix } => SortedLoosePaths::at(base.join(prefix), base, None),
+ IterInfo::ComputedIterationRoot {
+ iter_root,
+ base,
+ prefix: _,
+ remainder,
+ } => SortedLoosePaths::at(iter_root, base, remainder),
+ }
+ .peekable()
+ }
+
+ fn from_prefix(base: &'a Path, prefix: Cow<'a, Path>) -> std::io::Result<Self> {
+ if prefix.is_absolute() {
+ return Err(std::io::Error::new(
+ std::io::ErrorKind::InvalidInput,
+ "prefix must be a relative path, like 'refs/heads'",
+ ));
+ }
+ use std::path::Component::*;
+ if prefix.components().any(|c| matches!(c, CurDir | ParentDir)) {
+ return Err(std::io::Error::new(
+ std::io::ErrorKind::InvalidInput,
+ "Refusing to handle prefixes with relative path components",
+ ));
+ }
+ let iter_root = base.join(prefix.as_ref());
+ if iter_root.is_dir() {
+ Ok(IterInfo::BaseAndIterRoot {
+ base,
+ iter_root,
+ prefix,
+ })
+ } else {
+ let filename_prefix = iter_root
+ .file_name()
+ .map(ToOwned::to_owned)
+ .map(|p| {
+ gix_path::try_into_bstr(PathBuf::from(p))
+ .map(|p| p.into_owned())
+ .map_err(|_| {
+ std::io::Error::new(std::io::ErrorKind::InvalidInput, "prefix contains ill-formed UTF-8")
+ })
+ })
+ .transpose()?;
+ let iter_root = iter_root
+ .parent()
+ .expect("a parent is always there unless empty")
+ .to_owned();
+ Ok(IterInfo::ComputedIterationRoot {
+ base,
+ prefix,
+ iter_root,
+ remainder: filename_prefix,
+ })
+ }
+ }
+}
+
+impl file::Store {
+ /// Return an iterator over all references, loose or `packed`, sorted by their name.
+ ///
+ /// Errors are returned similarly to what would happen when loose and packed refs where iterated by themselves.
+ pub fn iter_packed<'s, 'p>(
+ &'s self,
+ packed: Option<&'p packed::Buffer>,
+ ) -> std::io::Result<LooseThenPacked<'p, 's>> {
+ match self.namespace.as_ref() {
+ Some(namespace) => self.iter_from_info(
+ IterInfo::PrefixAndBase {
+ base: self.git_dir(),
+ prefix: namespace.to_path(),
+ },
+ self.common_dir().map(|base| IterInfo::PrefixAndBase {
+ base,
+ prefix: namespace.to_path(),
+ }),
+ packed,
+ ),
+ None => self.iter_from_info(
+ IterInfo::Base { base: self.git_dir() },
+ self.common_dir().map(|base| IterInfo::Base { base }),
+ packed,
+ ),
+ }
+ }
+
+ /// As [`iter(…)`][file::Store::iter()], but filters by `prefix`, i.e. "refs/heads".
+ ///
+ /// Please note that "refs/heads` or "refs\\heads" is equivalent to "refs/heads/"
+ pub fn iter_prefixed_packed<'s, 'p>(
+ &'s self,
+ prefix: impl AsRef<Path>,
+ packed: Option<&'p packed::Buffer>,
+ ) -> std::io::Result<LooseThenPacked<'p, 's>> {
+ match self.namespace.as_ref() {
+ None => {
+ let prefix = prefix.as_ref();
+ let git_dir_info = IterInfo::from_prefix(self.git_dir(), prefix.into())?;
+ let common_dir_info = self
+ .common_dir()
+ .map(|base| IterInfo::from_prefix(base, prefix.into()))
+ .transpose()?;
+ self.iter_from_info(git_dir_info, common_dir_info, packed)
+ }
+ Some(namespace) => {
+ let prefix = namespace.to_owned().into_namespaced_prefix(prefix);
+ let git_dir_info = IterInfo::from_prefix(self.git_dir(), prefix.clone().into())?;
+ let common_dir_info = self
+ .common_dir()
+ .map(|base| IterInfo::from_prefix(base, prefix.into()))
+ .transpose()?;
+ self.iter_from_info(git_dir_info, common_dir_info, packed)
+ }
+ }
+ }
+
+ fn iter_from_info<'s, 'p>(
+ &'s self,
+ git_dir_info: IterInfo<'_>,
+ common_dir_info: Option<IterInfo<'_>>,
+ packed: Option<&'p packed::Buffer>,
+ ) -> std::io::Result<LooseThenPacked<'p, 's>> {
+ Ok(LooseThenPacked {
+ git_dir: self.git_dir(),
+ common_dir: self.common_dir(),
+ iter_packed: match packed {
+ Some(packed) => Some(
+ match git_dir_info.prefix() {
+ Some(prefix) => packed.iter_prefixed(path_to_name(prefix).into_owned()),
+ None => packed.iter(),
+ }
+ .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?
+ .peekable(),
+ ),
+ None => None,
+ },
+ iter_git_dir: git_dir_info.into_iter(),
+ iter_common_dir: common_dir_info.map(IterInfo::into_iter),
+ buf: Vec::new(),
+ namespace: self.namespace.as_ref(),
+ })
+ }
+}
+
+mod error {
+ use std::{io, path::PathBuf};
+
+ use gix_object::bstr::BString;
+
+ use crate::store_impl::file;
+
+ /// The error returned by the [`LooseThenPacked`][super::LooseThenPacked] iterator.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The file system could not be traversed")]
+ Traversal(#[source] io::Error),
+ #[error("The ref file {path:?} could not be read in full")]
+ ReadFileContents { source: io::Error, path: PathBuf },
+ #[error("The reference at \"{relative_path}\" could not be instantiated")]
+ ReferenceCreation {
+ source: file::loose::reference::decode::Error,
+ relative_path: PathBuf,
+ },
+ #[error("Invalid reference in line {line_number}: {invalid_line:?}")]
+ PackedReference { invalid_line: BString, line_number: usize },
+ }
+}
+pub use error::Error;
diff --git a/vendor/gix-ref/src/store/file/packed.rs b/vendor/gix-ref/src/store/file/packed.rs
new file mode 100644
index 000000000..271ec7f5a
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/packed.rs
@@ -0,0 +1,97 @@
+use std::path::PathBuf;
+
+use crate::store_impl::{file, packed};
+
+impl file::Store {
+ /// Return a packed transaction ready to receive updates. Use this to create or update `packed-refs`.
+ /// Note that if you already have a [`packed::Buffer`] then use its [`packed::Buffer::into_transaction()`] method instead.
+ pub(crate) fn packed_transaction(
+ &self,
+ lock_mode: gix_lock::acquire::Fail,
+ ) -> Result<packed::Transaction, transaction::Error> {
+ let lock = gix_lock::File::acquire_to_update_resource(self.packed_refs_path(), lock_mode, None)?;
+ // We 'steal' the possibly existing packed buffer which may safe time if it's already there and fresh.
+ // If nothing else is happening, nobody will get to see the soon stale buffer either, but if so, they will pay
+ // for reloading it. That seems preferred over always loading up a new one.
+ Ok(packed::Transaction::new_from_pack_and_lock(
+ self.assure_packed_refs_uptodate()?,
+ lock,
+ ))
+ }
+
+ /// Try to open a new packed buffer. It's not an error if it doesn't exist, but yields `Ok(None)`.
+ pub fn open_packed_buffer(&self) -> Result<Option<packed::Buffer>, packed::buffer::open::Error> {
+ let need_more_than_this_many_bytes_to_use_mmap = 32 * 1024;
+ match packed::Buffer::open(self.packed_refs_path(), need_more_than_this_many_bytes_to_use_mmap) {
+ Ok(buf) => Ok(Some(buf)),
+ Err(packed::buffer::open::Error::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
+ Err(err) => Err(err),
+ }
+ }
+
+ /// Return a possibly cached packed buffer with shared ownership. At retrieval it will assure it's up to date, but
+ /// after that it can be considered a snapshot as it cannot change anymore.
+ ///
+ /// Use this to make successive calls to [`file::Store::try_find_packed()`]
+ /// or obtain iterators using [`file::Store::iter_packed()`] in a way that assures the packed-refs content won't change.
+ pub fn cached_packed_buffer(
+ &self,
+ ) -> Result<Option<file::packed::SharedBufferSnapshot>, packed::buffer::open::Error> {
+ self.assure_packed_refs_uptodate()
+ }
+
+ /// Return the path at which packed-refs would usually be stored
+ pub fn packed_refs_path(&self) -> PathBuf {
+ self.common_dir_resolved().join("packed-refs")
+ }
+
+ pub(crate) fn packed_refs_lock_path(&self) -> PathBuf {
+ let mut p = self.packed_refs_path();
+ p.set_extension("lock");
+ p
+ }
+}
+
+///
+pub mod transaction {
+
+ use crate::store_impl::packed;
+
+ /// The error returned by [`file::Transaction::prepare()`][crate::file::Transaction::prepare()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("An existing pack couldn't be opened or read when preparing a transaction")]
+ BufferOpen(#[from] packed::buffer::open::Error),
+ #[error("The lock for a packed transaction could not be obtained")]
+ TransactionLock(#[from] gix_lock::acquire::Error),
+ }
+}
+
+/// An up-to-date snapshot of the packed refs buffer.
+pub type SharedBufferSnapshot = gix_features::fs::SharedSnapshot<packed::Buffer>;
+
+pub(crate) mod modifiable {
+ use gix_features::threading::OwnShared;
+
+ use crate::{file, packed};
+
+ pub(crate) type MutableSharedBuffer = OwnShared<gix_features::fs::MutableSnapshot<packed::Buffer>>;
+
+ impl file::Store {
+ pub(crate) fn force_refresh_packed_buffer(&self) -> Result<(), packed::buffer::open::Error> {
+ self.packed.force_refresh(|| {
+ let modified = self.packed_refs_path().metadata()?.modified()?;
+ self.open_packed_buffer().map(|packed| Some(modified).zip(packed))
+ })
+ }
+ pub(crate) fn assure_packed_refs_uptodate(
+ &self,
+ ) -> Result<Option<super::SharedBufferSnapshot>, packed::buffer::open::Error> {
+ self.packed.recent_snapshot(
+ || self.packed_refs_path().metadata().and_then(|m| m.modified()).ok(),
+ || self.open_packed_buffer(),
+ )
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/raw_ext.rs b/vendor/gix-ref/src/store/file/raw_ext.rs
new file mode 100644
index 000000000..8bdf8392d
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/raw_ext.rs
@@ -0,0 +1,174 @@
+use std::collections::BTreeSet;
+
+use gix_hash::ObjectId;
+
+use crate::{
+ packed, peel,
+ raw::Reference,
+ store_impl::{file, file::log},
+ Target,
+};
+
+pub trait Sealed {}
+impl Sealed for crate::Reference {}
+
+/// A trait to extend [Reference][crate::Reference] with functionality requiring a [file::Store].
+pub trait ReferenceExt: Sealed {
+ /// A step towards obtaining forward or reverse iterators on reference logs.
+ fn log_iter<'a, 's>(&'a self, store: &'s file::Store) -> log::iter::Platform<'a, 's>;
+
+ /// For details, see [Reference::log_exists()].
+ fn log_exists(&self, store: &file::Store) -> bool;
+
+ /// For details, see [Reference::peel_to_id_in_place()].
+ fn peel_to_id_in_place<E: std::error::Error + Send + Sync + 'static>(
+ &mut self,
+ store: &file::Store,
+ find: impl FnMut(gix_hash::ObjectId, &mut Vec<u8>) -> Result<Option<(gix_object::Kind, &[u8])>, E>,
+ ) -> Result<ObjectId, peel::to_id::Error>;
+
+ /// For details, see [Reference::peel_to_id_in_place()], with support for a known stable packed buffer.
+ fn peel_to_id_in_place_packed<E: std::error::Error + Send + Sync + 'static>(
+ &mut self,
+ store: &file::Store,
+ find: impl FnMut(gix_hash::ObjectId, &mut Vec<u8>) -> Result<Option<(gix_object::Kind, &[u8])>, E>,
+ packed: Option<&packed::Buffer>,
+ ) -> Result<ObjectId, peel::to_id::Error>;
+
+ /// Follow this symbolic reference one level and return the ref it refers to.
+ ///
+ /// Returns `None` if this is not a symbolic reference, hence the leaf of the chain.
+ fn follow(&self, store: &file::Store) -> Option<Result<Reference, file::find::existing::Error>>;
+
+ /// Follow this symbolic reference one level and return the ref it refers to,
+ /// possibly providing access to `packed` references for lookup if it contains the referent.
+ ///
+ /// Returns `None` if this is not a symbolic reference, hence the leaf of the chain.
+ fn follow_packed(
+ &self,
+ store: &file::Store,
+ packed: Option<&packed::Buffer>,
+ ) -> Option<Result<Reference, file::find::existing::Error>>;
+}
+
+impl ReferenceExt for Reference {
+ fn log_iter<'a, 's>(&'a self, store: &'s file::Store) -> log::iter::Platform<'a, 's> {
+ log::iter::Platform {
+ store,
+ name: self.name.as_ref(),
+ buf: Vec::new(),
+ }
+ }
+
+ fn log_exists(&self, store: &file::Store) -> bool {
+ store
+ .reflog_exists(self.name.as_ref())
+ .expect("infallible name conversion")
+ }
+
+ fn peel_to_id_in_place<E: std::error::Error + Send + Sync + 'static>(
+ &mut self,
+ store: &file::Store,
+ find: impl FnMut(gix_hash::ObjectId, &mut Vec<u8>) -> Result<Option<(gix_object::Kind, &[u8])>, E>,
+ ) -> Result<ObjectId, peel::to_id::Error> {
+ let packed = store.assure_packed_refs_uptodate().map_err(|err| {
+ peel::to_id::Error::Follow(file::find::existing::Error::Find(file::find::Error::PackedOpen(err)))
+ })?;
+ self.peel_to_id_in_place_packed(store, find, packed.as_ref().map(|b| &***b))
+ }
+
+ fn peel_to_id_in_place_packed<E: std::error::Error + Send + Sync + 'static>(
+ &mut self,
+ store: &file::Store,
+ mut find: impl FnMut(gix_hash::ObjectId, &mut Vec<u8>) -> Result<Option<(gix_object::Kind, &[u8])>, E>,
+ packed: Option<&packed::Buffer>,
+ ) -> Result<ObjectId, peel::to_id::Error> {
+ match self.peeled {
+ Some(peeled) => {
+ self.target = Target::Peeled(peeled.to_owned());
+ Ok(peeled)
+ }
+ None => {
+ if self.target.kind() == crate::Kind::Symbolic {
+ let mut seen = BTreeSet::new();
+ let cursor = &mut *self;
+ while let Some(next) = cursor.follow_packed(store, packed) {
+ let next = next?;
+ if seen.contains(&next.name) {
+ return Err(peel::to_id::Error::Cycle {
+ start_absolute: store.reference_path(cursor.name.as_ref()),
+ });
+ }
+ *cursor = next;
+ seen.insert(cursor.name.clone());
+ const MAX_REF_DEPTH: usize = 5;
+ if seen.len() == MAX_REF_DEPTH {
+ return Err(peel::to_id::Error::DepthLimitExceeded {
+ max_depth: MAX_REF_DEPTH,
+ });
+ }
+ }
+ };
+ let mut buf = Vec::new();
+ let mut oid = self.target.try_id().expect("peeled ref").to_owned();
+ let peeled_id = loop {
+ let (kind, data) = find(oid, &mut buf)
+ .map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync + 'static>)?
+ .ok_or_else(|| peel::to_id::Error::NotFound {
+ oid,
+ name: self.name.0.clone(),
+ })?;
+ match kind {
+ gix_object::Kind::Tag => {
+ oid = gix_object::TagRefIter::from_bytes(data).target_id().map_err(|_err| {
+ peel::to_id::Error::NotFound {
+ oid,
+ name: self.name.0.clone(),
+ }
+ })?;
+ }
+ _ => break oid,
+ };
+ };
+ self.peeled = Some(peeled_id);
+ self.target = Target::Peeled(peeled_id);
+ Ok(peeled_id)
+ }
+ }
+ }
+
+ fn follow(&self, store: &file::Store) -> Option<Result<Reference, file::find::existing::Error>> {
+ let packed = match store
+ .assure_packed_refs_uptodate()
+ .map_err(|err| file::find::existing::Error::Find(file::find::Error::PackedOpen(err)))
+ {
+ Ok(packed) => packed,
+ Err(err) => return Some(Err(err)),
+ };
+ self.follow_packed(store, packed.as_ref().map(|b| &***b))
+ }
+
+ fn follow_packed(
+ &self,
+ store: &file::Store,
+ packed: Option<&packed::Buffer>,
+ ) -> Option<Result<Reference, file::find::existing::Error>> {
+ match self.peeled {
+ Some(peeled) => Some(Ok(Reference {
+ name: self.name.clone(),
+ target: Target::Peeled(peeled),
+ peeled: None,
+ })),
+ None => match &self.target {
+ Target::Peeled(_) => None,
+ Target::Symbolic(full_name) => match store.try_find_packed(full_name.as_ref(), packed) {
+ Ok(Some(next)) => Some(Ok(next)),
+ Ok(None) => Some(Err(file::find::existing::Error::NotFound {
+ name: full_name.to_path().to_owned(),
+ })),
+ Err(err) => Some(Err(file::find::existing::Error::Find(err))),
+ },
+ },
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/store/file/transaction/commit.rs b/vendor/gix-ref/src/store/file/transaction/commit.rs
new file mode 100644
index 000000000..58e6a7c99
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/transaction/commit.rs
@@ -0,0 +1,201 @@
+use crate::{
+ store_impl::file::{transaction::PackedRefs, Transaction},
+ transaction::{Change, LogChange, RefEdit, RefLog},
+ Target,
+};
+
+impl<'s, 'p> Transaction<'s, 'p> {
+ /// Make all [prepared][Transaction::prepare()] permanent and return the performed edits which represent the current
+ /// state of the affected refs in the ref store in that instant. Please note that the obtained edits may have been
+ /// adjusted to contain more dependent edits or additional information.
+ /// `committer` is used in the reflog and only if the reflog is actually written, which is why it is optional. Please note
+ /// that if `None` is passed and the reflog needs to be written, the operation will be aborted late and a few refs may have been
+ /// successfully committed already, making clear the non-atomic nature of multi-file edits.
+ ///
+ /// On error the transaction may have been performed partially, depending on the nature of the error, and no attempt to roll back
+ /// partial changes is made.
+ ///
+ /// In this stage, we perform the following operations:
+ ///
+ /// * update the ref log
+ /// * move updated refs into place
+ /// * delete reflogs and empty parent directories
+ /// * delete packed refs
+ /// * delete their corresponding reference (if applicable)
+ /// along with empty parent directories
+ ///
+ /// Note that transactions will be prepared automatically as needed.
+ pub fn commit<'a>(self, committer: impl Into<Option<gix_actor::SignatureRef<'a>>>) -> Result<Vec<RefEdit>, Error> {
+ self.commit_inner(committer.into())
+ }
+
+ fn commit_inner(self, committer: Option<gix_actor::SignatureRef<'_>>) -> Result<Vec<RefEdit>, Error> {
+ let mut updates = self.updates.expect("BUG: must call prepare before commit");
+ let delete_loose_refs = matches!(
+ self.packed_refs,
+ PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(_)
+ );
+
+ // Perform updates first so live commits remain referenced
+ for change in updates.iter_mut() {
+ assert!(!change.update.deref, "Deref mode is turned into splits and turned off");
+ match &change.update.change {
+ // reflog first, then reference
+ Change::Update { log, new, expected } => {
+ let lock = change.lock.take();
+ let (update_ref, update_reflog) = match log.mode {
+ RefLog::Only => (false, true),
+ RefLog::AndReference => (true, true),
+ };
+ if update_reflog {
+ let log_update = match new {
+ Target::Symbolic(_) => {
+ // no reflog for symref changes, unless the ref is new and we can obtain a peeled id
+ // identified by the expectation of what could be there, as is the case when cloning.
+ match expected {
+ PreviousValue::ExistingMustMatch(Target::Peeled(oid)) => {
+ Some((Some(gix_hash::ObjectId::null(oid.kind())), oid))
+ }
+ _ => None,
+ }
+ }
+ Target::Peeled(new_oid) => {
+ let previous = match expected {
+ PreviousValue::MustExistAndMatch(Target::Peeled(oid)) => Some(oid.to_owned()),
+ _ => None,
+ }
+ .or(change.leaf_referent_previous_oid);
+ Some((previous, new_oid))
+ }
+ };
+ if let Some((previous, new_oid)) = log_update {
+ let do_update = previous.as_ref().map_or(true, |previous| previous != new_oid);
+ if do_update {
+ self.store.reflog_create_or_append(
+ change.update.name.as_ref(),
+ previous,
+ new_oid,
+ committer,
+ log.message.as_ref(),
+ log.force_create_reflog,
+ )?;
+ }
+ }
+ }
+ // Don't do anything else while keeping the lock after potentially updating the reflog.
+ // We delay deletion of the reference and dropping the lock to after the packed-refs were
+ // safely written.
+ if delete_loose_refs && matches!(new, Target::Peeled(_)) {
+ change.lock = lock;
+ continue;
+ }
+ if update_ref {
+ if let Some(Err(err)) = lock.map(|l| l.commit()) {
+ // TODO: when Kind::IsADirectory becomes stable, use that.
+ let err = if err.instance.resource_path().is_dir() {
+ gix_tempfile::remove_dir::empty_depth_first(err.instance.resource_path())
+ .map_err(|io_err| std::io::Error::new(std::io::ErrorKind::Other, io_err))
+ .and_then(|_| err.instance.commit().map_err(|err| err.error))
+ .err()
+ } else {
+ Some(err.error)
+ };
+
+ if let Some(err) = err {
+ return Err(Error::LockCommit {
+ source: err,
+ full_name: change.name(),
+ });
+ }
+ };
+ }
+ }
+ Change::Delete { .. } => {}
+ }
+ }
+
+ for change in updates.iter_mut() {
+ let (reflog_root, relative_name) = self.store.reflog_base_and_relative_path(change.update.name.as_ref());
+ match &change.update.change {
+ Change::Update { .. } => {}
+ Change::Delete { .. } => {
+ // Reflog deletion happens first in case it fails a ref without log is less terrible than
+ // a log without a reference.
+ let reflog_path = reflog_root.join(relative_name);
+ if let Err(err) = std::fs::remove_file(&reflog_path) {
+ if err.kind() != std::io::ErrorKind::NotFound {
+ return Err(Error::DeleteReflog {
+ source: err,
+ full_name: change.name(),
+ });
+ }
+ } else {
+ gix_tempfile::remove_dir::empty_upward_until_boundary(
+ reflog_path.parent().expect("never without parent"),
+ &reflog_root,
+ )
+ .ok();
+ }
+ }
+ }
+ }
+
+ if let Some(t) = self.packed_transaction {
+ t.commit().map_err(Error::PackedTransactionCommit)?;
+ // Always refresh ourselves right away to avoid races. We ignore errors as there may be many reasons this fails, and it's not
+ // critical to be done here. In other words, the pack may be refreshed at a later time and then it might work.
+ self.store.force_refresh_packed_buffer().ok();
+ }
+
+ for change in updates.iter_mut() {
+ let take_lock_and_delete = match &change.update.change {
+ Change::Update {
+ log: LogChange { mode, .. },
+ new,
+ ..
+ } => delete_loose_refs && *mode == RefLog::AndReference && matches!(new, Target::Peeled(_)),
+ Change::Delete { log: mode, .. } => *mode == RefLog::AndReference,
+ };
+ if take_lock_and_delete {
+ let lock = change.lock.take();
+ let reference_path = self.store.reference_path(change.update.name.as_ref());
+ if let Err(err) = std::fs::remove_file(reference_path) {
+ if err.kind() != std::io::ErrorKind::NotFound {
+ return Err(Error::DeleteReference {
+ err,
+ full_name: change.name(),
+ });
+ }
+ }
+ drop(lock)
+ }
+ }
+ Ok(updates.into_iter().map(|edit| edit.update).collect())
+ }
+}
+mod error {
+ use gix_object::bstr::BString;
+
+ use crate::store_impl::{file, packed};
+
+ /// The error returned by various [`Transaction`][super::Transaction] methods.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The packed-ref transaction could not be committed")]
+ PackedTransactionCommit(#[source] packed::transaction::commit::Error),
+ #[error("Edit preprocessing failed with error")]
+ PreprocessingFailed { source: std::io::Error },
+ #[error("The change for reference {full_name:?} could not be committed")]
+ LockCommit { source: std::io::Error, full_name: BString },
+ #[error("The reference {full_name} could not be deleted")]
+ DeleteReference { full_name: BString, err: std::io::Error },
+ #[error("The reflog of reference {full_name:?} could not be deleted")]
+ DeleteReflog { full_name: BString, source: std::io::Error },
+ #[error("The reflog could not be created or updated")]
+ CreateOrUpdateRefLog(#[from] file::log::create_or_update::Error),
+ }
+}
+pub use error::Error;
+
+use crate::transaction::PreviousValue;
diff --git a/vendor/gix-ref/src/store/file/transaction/mod.rs b/vendor/gix-ref/src/store/file/transaction/mod.rs
new file mode 100644
index 000000000..712f0320d
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/transaction/mod.rs
@@ -0,0 +1,108 @@
+use std::fmt::Formatter;
+
+use gix_hash::ObjectId;
+use gix_object::bstr::BString;
+
+use crate::{
+ store_impl::{file, file::Transaction},
+ transaction::RefEdit,
+};
+
+/// A function receiving an object id to resolve, returning its decompressed bytes,
+/// used to obtain the peeled object ids for storage in packed-refs files.
+///
+/// Resolution means to follow tag objects until the end of the chain.
+pub type FindObjectFn<'a> = dyn FnMut(
+ gix_hash::ObjectId,
+ &mut Vec<u8>,
+ ) -> Result<Option<gix_object::Kind>, Box<dyn std::error::Error + Send + Sync + 'static>>
+ + 'a;
+
+/// How to handle packed refs during a transaction
+pub enum PackedRefs<'a> {
+ /// Only propagate deletions of references. This is the default
+ DeletionsOnly,
+ /// Propagate deletions as well as updates to references which are peeled, that is contain an object id
+ DeletionsAndNonSymbolicUpdates(Box<FindObjectFn<'a>>),
+ /// Propagate deletions as well as updates to references which are peeled, that is contain an object id. Furthermore delete the
+ /// reference which is originally updated if it exists. If it doesn't, the new value will be written into the packed ref right away.
+ /// Note that this doesn't affect symbolic references at all, which can't be placed into packed refs.
+ DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box<FindObjectFn<'a>>),
+}
+
+impl Default for PackedRefs<'_> {
+ fn default() -> Self {
+ PackedRefs::DeletionsOnly
+ }
+}
+
+#[derive(Debug)]
+pub(in crate::store_impl::file) struct Edit {
+ update: RefEdit,
+ lock: Option<gix_lock::Marker>,
+ /// Set if this update is coming from a symbolic reference and used to make it appear like it is the one that is handled,
+ /// instead of the referent reference.
+ parent_index: Option<usize>,
+ /// For symbolic refs, this is the previous OID to put into the reflog instead of our own previous value. It's the
+ /// peeled value of the leaf referent.
+ leaf_referent_previous_oid: Option<ObjectId>,
+}
+
+impl Edit {
+ fn name(&self) -> BString {
+ self.update.name.0.clone()
+ }
+}
+
+impl std::borrow::Borrow<RefEdit> for Edit {
+ fn borrow(&self) -> &RefEdit {
+ &self.update
+ }
+}
+
+impl std::borrow::BorrowMut<RefEdit> for Edit {
+ fn borrow_mut(&mut self) -> &mut RefEdit {
+ &mut self.update
+ }
+}
+
+/// Edits
+impl file::Store {
+ /// Open a transaction with the given `edits`, and determine how to fail if a `lock` cannot be obtained.
+ /// A snapshot of packed references will be obtained automatically if needed to fulfill this transaction
+ /// and will be provided as result of a successful transaction. Note that upon transaction failure, packed-refs
+ /// will never have been altered.
+ ///
+ /// The transaction inherits the parent namespace.
+ pub fn transaction(&self) -> Transaction<'_, '_> {
+ Transaction {
+ store: self,
+ packed_transaction: None,
+ updates: None,
+ packed_refs: PackedRefs::default(),
+ }
+ }
+}
+
+impl<'s, 'p> Transaction<'s, 'p> {
+ /// Configure the way packed refs are handled during the transaction
+ pub fn packed_refs(mut self, packed_refs: PackedRefs<'p>) -> Self {
+ self.packed_refs = packed_refs;
+ self
+ }
+}
+
+impl std::fmt::Debug for Transaction<'_, '_> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("Transaction")
+ .field("store", self.store)
+ .field("edits", &self.updates.as_ref().map(|u| u.len()))
+ .finish_non_exhaustive()
+ }
+}
+
+///
+pub mod prepare;
+
+///
+pub mod commit;
diff --git a/vendor/gix-ref/src/store/file/transaction/prepare.rs b/vendor/gix-ref/src/store/file/transaction/prepare.rs
new file mode 100644
index 000000000..fdb29619d
--- /dev/null
+++ b/vendor/gix-ref/src/store/file/transaction/prepare.rs
@@ -0,0 +1,478 @@
+use crate::{
+ packed,
+ packed::transaction::buffer_into_transaction,
+ store_impl::{
+ file,
+ file::{
+ loose,
+ transaction::{Edit, PackedRefs},
+ Transaction,
+ },
+ },
+ transaction::{Change, LogChange, PreviousValue, RefEdit, RefEditsExt, RefLog},
+ FullName, FullNameRef, Reference, Target,
+};
+
+impl<'s, 'p> Transaction<'s, 'p> {
+ fn lock_ref_and_apply_change(
+ store: &file::Store,
+ lock_fail_mode: gix_lock::acquire::Fail,
+ packed: Option<&packed::Buffer>,
+ change: &mut Edit,
+ has_global_lock: bool,
+ direct_to_packed_refs: bool,
+ ) -> Result<(), Error> {
+ use std::io::Write;
+ assert!(
+ change.lock.is_none(),
+ "locks can only be acquired once and it's all or nothing"
+ );
+
+ let existing_ref = store
+ .ref_contents(change.update.name.as_ref())
+ .map_err(Error::from)
+ .and_then(|maybe_loose| {
+ maybe_loose
+ .map(|buf| {
+ loose::Reference::try_from_path(change.update.name.clone(), &buf)
+ .map(Reference::from)
+ .map_err(Error::from)
+ })
+ .transpose()
+ })
+ .or_else(|err| match err {
+ Error::ReferenceDecode(_) => Ok(None),
+ other => Err(other),
+ })
+ .and_then(|maybe_loose| match (maybe_loose, packed) {
+ (None, Some(packed)) => packed
+ .try_find(change.update.name.as_ref())
+ .map(|opt| opt.map(Into::into))
+ .map_err(Error::from),
+ (None, None) => Ok(None),
+ (maybe_loose, _) => Ok(maybe_loose),
+ });
+ let lock = match &mut change.update.change {
+ Change::Delete { expected, .. } => {
+ let (base, relative_path) = store.reference_path_with_base(change.update.name.as_ref());
+ let lock = if has_global_lock {
+ None
+ } else {
+ gix_lock::Marker::acquire_to_hold_resource(
+ base.join(relative_path.as_ref()),
+ lock_fail_mode,
+ Some(base.clone().into_owned()),
+ )
+ .map_err(|err| Error::LockAcquire {
+ source: err,
+ full_name: "borrowcheck won't allow change.name()".into(),
+ })?
+ .into()
+ };
+
+ let existing_ref = existing_ref?;
+ match (&expected, &existing_ref) {
+ (PreviousValue::MustNotExist, _) => {
+ panic!("BUG: MustNotExist constraint makes no sense if references are to be deleted")
+ }
+ (PreviousValue::ExistingMustMatch(_), None)
+ | (PreviousValue::MustExist, Some(_))
+ | (PreviousValue::Any, Some(_))
+ | (PreviousValue::Any, None) => {}
+ (PreviousValue::MustExist, None) | (PreviousValue::MustExistAndMatch(_), None) => {
+ return Err(Error::DeleteReferenceMustExist {
+ full_name: change.name(),
+ })
+ }
+ (PreviousValue::MustExistAndMatch(previous), Some(existing))
+ | (PreviousValue::ExistingMustMatch(previous), Some(existing)) => {
+ let actual = existing.target.clone();
+ if *previous != actual {
+ let expected = previous.clone();
+ return Err(Error::ReferenceOutOfDate {
+ full_name: change.name(),
+ expected,
+ actual,
+ });
+ }
+ }
+ }
+
+ // Keep the previous value for the caller and ourselves. Maybe they want to keep a log of sorts.
+ if let Some(existing) = existing_ref {
+ *expected = PreviousValue::MustExistAndMatch(existing.target);
+ }
+
+ lock
+ }
+ Change::Update { expected, new, .. } => {
+ let (base, relative_path) = store.reference_path_with_base(change.update.name.as_ref());
+ let obtain_lock = || {
+ gix_lock::File::acquire_to_update_resource(
+ base.join(relative_path.as_ref()),
+ lock_fail_mode,
+ Some(base.clone().into_owned()),
+ )
+ .map_err(|err| Error::LockAcquire {
+ source: err,
+ full_name: "borrowcheck won't allow change.name() and this will be corrected by caller".into(),
+ })
+ };
+ let mut lock = (!has_global_lock).then(obtain_lock).transpose()?;
+
+ let existing_ref = existing_ref?;
+ match (&expected, &existing_ref) {
+ (PreviousValue::Any, _)
+ | (PreviousValue::MustExist, Some(_))
+ | (PreviousValue::MustNotExist, None)
+ | (PreviousValue::ExistingMustMatch(_), None) => {}
+ (PreviousValue::MustExist, None) => {
+ let expected = Target::Peeled(store.object_hash.null());
+ let full_name = change.name();
+ return Err(Error::MustExist { full_name, expected });
+ }
+ (PreviousValue::MustNotExist, Some(existing)) => {
+ if existing.target != *new {
+ let new = new.clone();
+ return Err(Error::MustNotExist {
+ full_name: change.name(),
+ actual: existing.target.clone(),
+ new,
+ });
+ }
+ }
+ (PreviousValue::MustExistAndMatch(previous), Some(existing))
+ | (PreviousValue::ExistingMustMatch(previous), Some(existing)) => {
+ if *previous != existing.target {
+ let actual = existing.target.clone();
+ let expected = previous.to_owned();
+ let full_name = change.name();
+ return Err(Error::ReferenceOutOfDate {
+ full_name,
+ actual,
+ expected,
+ });
+ }
+ }
+
+ (PreviousValue::MustExistAndMatch(previous), None) => {
+ let expected = previous.to_owned();
+ let full_name = change.name();
+ return Err(Error::MustExist { full_name, expected });
+ }
+ };
+
+ fn new_would_change_existing(new: &Target, existing: &Target) -> (bool, bool) {
+ match (new, existing) {
+ (Target::Peeled(new), Target::Peeled(old)) => (old != new, false),
+ (Target::Symbolic(new), Target::Symbolic(old)) => (old != new, true),
+ (Target::Peeled(_), _) => (true, false),
+ (Target::Symbolic(_), _) => (true, true),
+ }
+ }
+
+ let (is_effective, is_symbolic) = if let Some(existing) = existing_ref {
+ let (effective, is_symbolic) = new_would_change_existing(new, &existing.target);
+ *expected = PreviousValue::MustExistAndMatch(existing.target);
+ (effective, is_symbolic)
+ } else {
+ (true, matches!(new, Target::Symbolic(_)))
+ };
+
+ if (is_effective && !direct_to_packed_refs) || is_symbolic {
+ let mut lock = lock.take().map(Ok).unwrap_or_else(obtain_lock)?;
+
+ lock.with_mut(|file| match new {
+ Target::Peeled(oid) => write!(file, "{oid}"),
+ Target::Symbolic(name) => write!(file, "ref: {}", name.0),
+ })?;
+ Some(lock.close()?)
+ } else {
+ None
+ }
+ }
+ };
+ change.lock = lock;
+ Ok(())
+ }
+}
+
+impl<'s, 'p> Transaction<'s, 'p> {
+ /// Prepare for calling [`commit(…)`][Transaction::commit()] in a way that can be rolled back perfectly.
+ ///
+ /// If the operation succeeds, the transaction can be committed or dropped to cause a rollback automatically.
+ /// Rollbacks happen automatically on failure and they tend to be perfect.
+ /// This method is idempotent.
+ pub fn prepare(
+ mut self,
+ edits: impl IntoIterator<Item = RefEdit>,
+ ref_files_lock_fail_mode: gix_lock::acquire::Fail,
+ packed_refs_lock_fail_mode: gix_lock::acquire::Fail,
+ ) -> Result<Self, Error> {
+ assert!(self.updates.is_none(), "BUG: Must not call prepare(…) multiple times");
+ let store = self.store;
+ let mut updates: Vec<_> = edits
+ .into_iter()
+ .map(|update| Edit {
+ update,
+ lock: None,
+ parent_index: None,
+ leaf_referent_previous_oid: None,
+ })
+ .collect();
+ updates
+ .pre_process(
+ |name| {
+ let symbolic_refs_are_never_packed = None;
+ store
+ .find_existing_inner(name, symbolic_refs_are_never_packed)
+ .map(|r| r.target)
+ .ok()
+ },
+ |idx, update| Edit {
+ update,
+ lock: None,
+ parent_index: Some(idx),
+ leaf_referent_previous_oid: None,
+ },
+ )
+ .map_err(Error::PreprocessingFailed)?;
+
+ let mut maybe_updates_for_packed_refs = match self.packed_refs {
+ PackedRefs::DeletionsAndNonSymbolicUpdates(_)
+ | PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(_) => Some(0_usize),
+ PackedRefs::DeletionsOnly => None,
+ };
+ if maybe_updates_for_packed_refs.is_some()
+ || self.store.packed_refs_path().is_file()
+ || self.store.packed_refs_lock_path().is_file()
+ {
+ let mut edits_for_packed_transaction = Vec::<RefEdit>::new();
+ let mut needs_packed_refs_lookups = false;
+ for edit in updates.iter() {
+ let log_mode = match edit.update.change {
+ Change::Update {
+ log: LogChange { mode, .. },
+ ..
+ } => mode,
+ Change::Delete { log, .. } => log,
+ };
+ if log_mode == RefLog::Only {
+ continue;
+ }
+ let name = match possibly_adjust_name_for_prefixes(edit.update.name.as_ref()) {
+ Some(n) => n,
+ None => continue,
+ };
+ if let Some(ref mut num_updates) = maybe_updates_for_packed_refs {
+ if let Change::Update {
+ new: Target::Peeled(_), ..
+ } = edit.update.change
+ {
+ edits_for_packed_transaction.push(RefEdit {
+ name,
+ ..edit.update.clone()
+ });
+ *num_updates += 1;
+ }
+ continue;
+ }
+ match edit.update.change {
+ Change::Update {
+ expected: PreviousValue::ExistingMustMatch(_) | PreviousValue::MustExistAndMatch(_),
+ ..
+ } => needs_packed_refs_lookups = true,
+ Change::Delete { .. } => {
+ edits_for_packed_transaction.push(RefEdit {
+ name,
+ ..edit.update.clone()
+ });
+ }
+ _ => {
+ needs_packed_refs_lookups = true;
+ }
+ }
+ }
+
+ if !edits_for_packed_transaction.is_empty() || needs_packed_refs_lookups {
+ // What follows means that we will only create a transaction if we have to access packed refs for looking
+ // up current ref values, or that we definitely have a transaction if we need to make updates. Otherwise
+ // we may have no transaction at all which isn't required if we had none and would only try making deletions.
+ let packed_transaction: Option<_> =
+ if maybe_updates_for_packed_refs.unwrap_or(0) > 0 || self.store.packed_refs_lock_path().is_file() {
+ // We have to create a packed-ref even if it doesn't exist
+ self.store
+ .packed_transaction(packed_refs_lock_fail_mode)
+ .map_err(|err| match err {
+ file::packed::transaction::Error::BufferOpen(err) => Error::from(err),
+ file::packed::transaction::Error::TransactionLock(err) => {
+ Error::PackedTransactionAcquire(err)
+ }
+ })?
+ .into()
+ } else {
+ // A packed transaction is optional - we only have deletions that can't be made if
+ // no packed-ref file exists anyway
+ self.store
+ .assure_packed_refs_uptodate()?
+ .map(|p| {
+ buffer_into_transaction(p, packed_refs_lock_fail_mode)
+ .map_err(Error::PackedTransactionAcquire)
+ })
+ .transpose()?
+ };
+ if let Some(transaction) = packed_transaction {
+ self.packed_transaction = Some(match &mut self.packed_refs {
+ PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(f)
+ | PackedRefs::DeletionsAndNonSymbolicUpdates(f) => {
+ transaction.prepare(edits_for_packed_transaction, f)?
+ }
+ PackedRefs::DeletionsOnly => transaction
+ .prepare(edits_for_packed_transaction, &mut |_, _| {
+ unreachable!("BUG: deletions never trigger object lookups")
+ })?,
+ });
+ }
+ }
+ }
+
+ for cid in 0..updates.len() {
+ let change = &mut updates[cid];
+ if let Err(err) = Self::lock_ref_and_apply_change(
+ self.store,
+ ref_files_lock_fail_mode,
+ self.packed_transaction.as_ref().and_then(|t| t.buffer()),
+ change,
+ self.packed_transaction.is_some(),
+ matches!(
+ self.packed_refs,
+ PackedRefs::DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(_)
+ ),
+ ) {
+ let err = match err {
+ Error::LockAcquire {
+ source,
+ full_name: _bogus,
+ } => Error::LockAcquire {
+ source,
+ full_name: {
+ let mut cursor = change.parent_index;
+ let mut ref_name = change.name();
+ while let Some(parent_idx) = cursor {
+ let parent = &updates[parent_idx];
+ if parent.parent_index.is_none() {
+ ref_name = parent.name();
+ } else {
+ cursor = parent.parent_index;
+ }
+ }
+ ref_name
+ },
+ },
+ other => other,
+ };
+ return Err(err);
+ };
+
+ // traverse parent chain from leaf/peeled ref and set the leaf previous oid accordingly
+ // to help with their reflog entries
+ if let (Some(crate::TargetRef::Peeled(oid)), Some(parent_idx)) =
+ (change.update.change.previous_value(), change.parent_index)
+ {
+ let oid = oid.to_owned();
+ let mut parent_idx_cursor = Some(parent_idx);
+ while let Some(parent) = parent_idx_cursor.take().map(|idx| &mut updates[idx]) {
+ parent_idx_cursor = parent.parent_index;
+ parent.leaf_referent_previous_oid = Some(oid);
+ }
+ }
+ }
+ self.updates = Some(updates);
+ Ok(self)
+ }
+
+ /// Rollback all intermediate state and return the `RefEdits` as we know them thus far.
+ ///
+ /// Note that they have been altered compared to what was initially provided as they have
+ /// been split and know about their current state on disk.
+ ///
+ /// # Note
+ ///
+ /// A rollback happens automatically as this instance is dropped as well.
+ pub fn rollback(self) -> Vec<RefEdit> {
+ self.updates
+ .map(|updates| updates.into_iter().map(|u| u.update).collect())
+ .unwrap_or_default()
+ }
+}
+
+fn possibly_adjust_name_for_prefixes(name: &FullNameRef) -> Option<FullName> {
+ match name.category_and_short_name() {
+ Some((c, sn)) => {
+ use crate::Category::*;
+ let sn = FullNameRef::new_unchecked(sn);
+ match c {
+ Bisect | Rewritten | WorktreePrivate | LinkedPseudoRef { .. } | PseudoRef | MainPseudoRef => None,
+ Tag | LocalBranch | RemoteBranch | Note => name.into(),
+ MainRef | LinkedRef { .. } => sn
+ .category()
+ .map_or(false, |cat| !cat.is_worktree_private())
+ .then_some(sn),
+ }
+ .map(|n| n.to_owned())
+ }
+ None => Some(name.to_owned()), // allow (uncategorized/very special) refs to be packed
+ }
+}
+
+mod error {
+ use gix_object::bstr::BString;
+
+ use crate::{
+ store_impl::{file, packed},
+ Target,
+ };
+
+ /// The error returned by various [`Transaction`][super::Transaction] methods.
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The packed ref buffer could not be loaded")]
+ Packed(#[from] packed::buffer::open::Error),
+ #[error("The lock for the packed-ref file could not be obtained")]
+ PackedTransactionAcquire(#[source] gix_lock::acquire::Error),
+ #[error("The packed transaction could not be prepared")]
+ PackedTransactionPrepare(#[from] packed::transaction::prepare::Error),
+ #[error("The packed ref file could not be parsed")]
+ PackedFind(#[from] packed::find::Error),
+ #[error("Edit preprocessing failed with an error")]
+ PreprocessingFailed(#[source] std::io::Error),
+ #[error("A lock could not be obtained for reference {full_name:?}")]
+ LockAcquire {
+ source: gix_lock::acquire::Error,
+ full_name: BString,
+ },
+ #[error("An IO error occurred while applying an edit")]
+ Io(#[from] std::io::Error),
+ #[error("The reference {full_name:?} for deletion did not exist or could not be parsed")]
+ DeleteReferenceMustExist { full_name: BString },
+ #[error("Reference {full_name:?} was not supposed to exist when writing it with value {new:?}, but actual content was {actual:?}")]
+ MustNotExist {
+ full_name: BString,
+ actual: Target,
+ new: Target,
+ },
+ #[error("Reference {full_name:?} was supposed to exist with value {expected}, but didn't.")]
+ MustExist { full_name: BString, expected: Target },
+ #[error("The reference {full_name:?} should have content {expected}, actual content was {actual}")]
+ ReferenceOutOfDate {
+ full_name: BString,
+ expected: Target,
+ actual: Target,
+ },
+ #[error("Could not read reference")]
+ ReferenceDecode(#[from] file::loose::reference::decode::Error),
+ }
+}
+
+pub use error::Error;
diff --git a/vendor/gix-ref/src/store/general/handle/find.rs b/vendor/gix-ref/src/store/general/handle/find.rs
new file mode 100644
index 000000000..9792b9b7d
--- /dev/null
+++ b/vendor/gix-ref/src/store/general/handle/find.rs
@@ -0,0 +1,82 @@
+use std::convert::TryInto;
+
+use crate::{store, PartialNameRef, Reference};
+
+mod error {
+ use std::convert::Infallible;
+
+ /// The error returned by [crate::file::Store::find_loose()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("An error occurred while finding a reference in the loose file database")]
+ Loose(#[from] crate::file::find::Error),
+ #[error("The ref name or path is not a valid ref name")]
+ RefnameValidation(#[from] crate::name::Error),
+ }
+
+ impl From<Infallible> for Error {
+ fn from(_: Infallible) -> Self {
+ unreachable!("this impl is needed to allow passing a known valid partial path as parameter")
+ }
+ }
+}
+
+pub use error::Error;
+
+use crate::store::handle;
+
+impl store::Handle {
+ /// TODO: actually implement this with handling of the packed buffer.
+ pub fn try_find<'a, Name, E>(&self, partial: Name) -> Result<Option<Reference>, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ Error: From<E>,
+ {
+ let _name = partial.try_into()?;
+ match &self.state {
+ handle::State::Loose { store: _, .. } => {
+ todo!()
+ }
+ }
+ }
+}
+
+mod existing {
+ mod error {
+ use std::path::PathBuf;
+
+ /// The error returned by [file::Store::find_existing()][crate::file::Store::find_existing()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("An error occurred while finding a reference in the database")]
+ Find(#[from] crate::store::find::Error),
+ #[error("The ref partially named {name:?} could not be found")]
+ NotFound { name: PathBuf },
+ }
+ }
+
+ use std::convert::TryInto;
+
+ pub use error::Error;
+
+ use crate::{store, PartialNameRef, Reference};
+
+ impl store::Handle {
+ /// Similar to [`crate::file::Store::find()`] but a non-existing ref is treated as error.
+ pub fn find<'a, Name, E>(&self, _partial: Name) -> Result<Reference, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ crate::name::Error: From<E>,
+ {
+ todo!()
+ // match self.try_find(partial) {}
+ // match self.find_one_with_verified_input(path.to_partial_path().as_ref(), packed) {
+ // Ok(Some(r)) => Ok(r),
+ // Ok(None) => Err(Error::NotFound(path.to_partial_path().into_owned())),
+ // Err(err) => Err(err.into()),
+ // }
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/store/general/handle/mod.rs b/vendor/gix-ref/src/store/general/handle/mod.rs
new file mode 100644
index 000000000..44d9e060d
--- /dev/null
+++ b/vendor/gix-ref/src/store/general/handle/mod.rs
@@ -0,0 +1,43 @@
+#![allow(dead_code)]
+use crate::{store, Namespace};
+
+#[derive(Clone)]
+pub(crate) enum State {
+ Loose { store: crate::file::Store },
+}
+
+impl crate::Store {
+ /// Return a new handle which sees all references if `namespace` is `None` or all read and write operations are limited
+ /// to the given `namespace` if `Some`.
+ pub fn to_handle(&self) -> store::Handle {
+ Self::new_handle_inner(&self.inner, None)
+ }
+
+ /// As above, but supports a namespace to be set
+ pub fn to_handle_namespaced(&self, namespace: Option<Namespace>) -> store::Handle {
+ Self::new_handle_inner(&self.inner, namespace)
+ }
+
+ fn new_handle_inner(state: &store::State, namespace: Option<Namespace>) -> store::Handle {
+ store::Handle {
+ state: match state {
+ store::State::Loose { store } => store::handle::State::Loose {
+ store: {
+ let mut store = store.clone();
+ store.namespace = namespace;
+ store
+ },
+ },
+ },
+ }
+ }
+}
+
+///
+pub mod find;
+
+mod iter {
+ // impl store::Handle {
+ // pub fn iter<'p, 's>(&'s self, packed: Option<&'p packed::Buffer>) -> std::io::Result<LooseThenPacked<'p, 's>> {
+ // }
+}
diff --git a/vendor/gix-ref/src/store/general/init.rs b/vendor/gix-ref/src/store/general/init.rs
new file mode 100644
index 000000000..a9112c0a6
--- /dev/null
+++ b/vendor/gix-ref/src/store/general/init.rs
@@ -0,0 +1,38 @@
+use std::path::PathBuf;
+
+use crate::store::WriteReflog;
+
+mod error {
+ /// The error returned by [crate::Store::at()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("There was an error accessing the store's directory")]
+ Io(#[from] std::io::Error),
+ }
+}
+
+pub use error::Error;
+
+use crate::file;
+
+#[allow(dead_code)]
+impl crate::Store {
+ /// Create a new store at the given location, typically the `.git/` directory.
+ ///
+ /// `object_hash` defines the kind of hash to assume when dealing with refs.
+ pub fn at(
+ git_dir: impl Into<PathBuf>,
+ reflog_mode: WriteReflog,
+ object_hash: gix_hash::Kind,
+ ) -> Result<Self, Error> {
+ // for now, just try to read the directory - later we will do that naturally as we have to figure out if it's a ref-table or not.
+ let git_dir = git_dir.into();
+ std::fs::read_dir(&git_dir)?;
+ Ok(crate::Store {
+ inner: crate::store::State::Loose {
+ store: file::Store::at(git_dir, reflog_mode, object_hash),
+ },
+ })
+ }
+}
diff --git a/vendor/gix-ref/src/store/general/mod.rs b/vendor/gix-ref/src/store/general/mod.rs
new file mode 100644
index 000000000..6adc4f6e6
--- /dev/null
+++ b/vendor/gix-ref/src/store/general/mod.rs
@@ -0,0 +1 @@
+mod init;
diff --git a/vendor/gix-ref/src/store/mod.rs b/vendor/gix-ref/src/store/mod.rs
new file mode 100644
index 000000000..6691098e2
--- /dev/null
+++ b/vendor/gix-ref/src/store/mod.rs
@@ -0,0 +1,5 @@
+///
+pub mod file;
+
+///
+pub mod packed;
diff --git a/vendor/gix-ref/src/store/packed/buffer.rs b/vendor/gix-ref/src/store/packed/buffer.rs
new file mode 100644
index 000000000..6786e4a9f
--- /dev/null
+++ b/vendor/gix-ref/src/store/packed/buffer.rs
@@ -0,0 +1,105 @@
+use crate::store_impl::packed;
+
+impl AsRef<[u8]> for packed::Buffer {
+ fn as_ref(&self) -> &[u8] {
+ &self.data.as_ref()[self.offset..]
+ }
+}
+
+impl AsRef<[u8]> for packed::Backing {
+ fn as_ref(&self) -> &[u8] {
+ match self {
+ packed::Backing::InMemory(data) => data,
+ packed::Backing::Mapped(map) => map,
+ }
+ }
+}
+
+///
+pub mod open {
+ use std::path::PathBuf;
+
+ use memmap2::Mmap;
+
+ use crate::store_impl::packed;
+
+ /// Initialization
+ impl packed::Buffer {
+ /// Open the file at `path` and map it into memory if the file size is larger than `use_memory_map_if_larger_than_bytes`.
+ ///
+ /// In order to allow fast lookups and optimizations, the contents of the packed refs must be sorted.
+ /// If that's not the case, they will be sorted on the fly with the data being written into a memory buffer.
+ pub fn open(path: impl Into<PathBuf>, use_memory_map_if_larger_than_bytes: u64) -> Result<Self, Error> {
+ let path = path.into();
+ let (backing, offset) = {
+ let backing = if std::fs::metadata(&path)?.len() <= use_memory_map_if_larger_than_bytes {
+ packed::Backing::InMemory(std::fs::read(&path)?)
+ } else {
+ packed::Backing::Mapped(
+ // SAFETY: we have to take the risk of somebody changing the file underneath. Git never writes into the same file.
+ #[allow(unsafe_code)]
+ unsafe {
+ Mmap::map(&std::fs::File::open(&path)?)?
+ },
+ )
+ };
+
+ let (offset, sorted) = {
+ let data = backing.as_ref();
+ if *data.first().unwrap_or(&b' ') == b'#' {
+ let (records, header) = packed::decode::header::<()>(data).map_err(|_| Error::HeaderParsing)?;
+ let offset = records.as_ptr() as usize - data.as_ptr() as usize;
+ (offset, header.sorted)
+ } else {
+ (0, false)
+ }
+ };
+
+ if !sorted {
+ // this implementation is likely slower than what git does, but it's less code, too.
+ let mut entries = packed::Iter::new(&backing.as_ref()[offset..])?.collect::<Result<Vec<_>, _>>()?;
+ entries.sort_by_key(|e| e.name.as_bstr());
+ let mut serialized = Vec::<u8>::new();
+ for entry in entries {
+ serialized.extend_from_slice(entry.target);
+ serialized.push(b' ');
+ serialized.extend_from_slice(entry.name.as_bstr());
+ serialized.push(b'\n');
+ if let Some(object) = entry.object {
+ serialized.push(b'^');
+ serialized.extend_from_slice(object);
+ serialized.push(b'\n');
+ }
+ }
+ (Backing::InMemory(serialized), 0)
+ } else {
+ (backing, offset)
+ }
+ };
+ Ok(packed::Buffer {
+ offset,
+ data: backing,
+ path,
+ })
+ }
+ }
+
+ mod error {
+ use crate::packed;
+
+ /// The error returned by [`open()`][super::packed::Buffer::open()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The packed-refs file did not have a header or wasn't sorted and could not be iterated")]
+ Iter(#[from] packed::iter::Error),
+ #[error("The header could not be parsed, even though first line started with '#'")]
+ HeaderParsing,
+ #[error("The buffer could not be opened or read")]
+ Io(#[from] std::io::Error),
+ }
+ }
+ pub use error::Error;
+
+ use crate::packed::Backing;
+}
diff --git a/vendor/gix-ref/src/store/packed/decode.rs b/vendor/gix-ref/src/store/packed/decode.rs
new file mode 100644
index 000000000..f8825459e
--- /dev/null
+++ b/vendor/gix-ref/src/store/packed/decode.rs
@@ -0,0 +1,83 @@
+use std::convert::TryInto;
+
+use gix_object::bstr::{BStr, ByteSlice};
+use nom::{
+ bytes::complete::{tag, take_while},
+ combinator::{map, map_res, opt},
+ error::{FromExternalError, ParseError},
+ sequence::{delimited, preceded, terminated, tuple},
+ IResult,
+};
+
+use crate::{
+ parse::{hex_hash, newline},
+ store_impl::packed,
+};
+
+#[derive(Debug, PartialEq, Eq)]
+enum Peeled {
+ Unspecified,
+ Partial,
+ Fully,
+}
+
+/// Information parsed from the header of a packed ref file
+#[derive(Debug, PartialEq, Eq)]
+pub struct Header {
+ peeled: Peeled,
+ pub sorted: bool,
+}
+
+impl Default for Header {
+ fn default() -> Self {
+ Header {
+ peeled: Peeled::Unspecified,
+ sorted: false,
+ }
+ }
+}
+
+fn until_newline<'a, E>(input: &'a [u8]) -> IResult<&'a [u8], &'a BStr, E>
+where
+ E: ParseError<&'a [u8]>,
+{
+ map(
+ terminated(take_while(|b: u8| b != b'\r' && b != b'\n'), newline),
+ |not_newline| not_newline.as_bstr(),
+ )(input)
+}
+
+pub fn header<'a, E>(input: &'a [u8]) -> IResult<&'a [u8], Header, E>
+where
+ E: ParseError<&'a [u8]>,
+{
+ let (rest, traits) = preceded(tag(b"# pack-refs with: "), until_newline)(input)?;
+
+ let mut peeled = Peeled::Unspecified;
+ let mut sorted = false;
+ for token in traits.as_bstr().split_str(b" ") {
+ if token == b"fully-peeled" {
+ peeled = Peeled::Fully;
+ } else if token == b"peeled" {
+ peeled = Peeled::Partial;
+ } else if token == b"sorted" {
+ sorted = true;
+ }
+ }
+
+ Ok((rest, Header { peeled, sorted }))
+}
+
+pub fn reference<'a, E: ParseError<&'a [u8]> + FromExternalError<&'a [u8], crate::name::Error>>(
+ input: &'a [u8],
+) -> IResult<&'a [u8], packed::Reference<'a>, E> {
+ let (input, (target, name)) = tuple((
+ terminated(hex_hash, tag(b" ")),
+ map_res(until_newline, TryInto::try_into),
+ ))(input)?;
+ let (rest, object) = opt(delimited(tag(b"^"), hex_hash, newline))(input)?;
+ Ok((rest, packed::Reference { name, target, object }))
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/vendor/gix-ref/src/store/packed/decode/tests.rs b/vendor/gix-ref/src/store/packed/decode/tests.rs
new file mode 100644
index 000000000..6c8f315c1
--- /dev/null
+++ b/vendor/gix-ref/src/store/packed/decode/tests.rs
@@ -0,0 +1,125 @@
+type Result = std::result::Result<(), Box<dyn std::error::Error>>;
+
+mod reference {
+ use nom::error::VerboseError;
+
+ use super::Result;
+ use crate::{
+ store_impl::{packed, packed::decode},
+ FullNameRef,
+ };
+
+ /// Convert a hexadecimal hash into its corresponding `ObjectId` or _panic_.
+ fn hex_to_id(hex: &str) -> gix_hash::ObjectId {
+ gix_hash::ObjectId::from_hex(hex.as_bytes()).expect("40 bytes hex")
+ }
+
+ #[test]
+ fn invalid() {
+ assert!(decode::reference::<()>(b"# what looks like a comment",).is_err());
+ assert!(
+ decode::reference::<()>(b"^e9cdc958e7ce2290e2d7958cdb5aa9323ef35d37\n",).is_err(),
+ "lonely peel"
+ );
+ }
+
+ #[test]
+ fn two_refs_in_a_row() -> Result {
+ let input: &[u8] = b"d53c4b0f91f1b29769c9430f2d1c0bcab1170c75 refs/heads/alternates-after-packs-and-loose
+^e9cdc958e7ce2290e2d7958cdb5aa9323ef35d37\neaae9c1bc723209d793eb93f5587fa2604d5cd92 refs/heads/avoid-double-lookup\n";
+ let (input, parsed) = decode::reference::<VerboseError<_>>(input)?;
+
+ assert_eq!(
+ parsed,
+ packed::Reference {
+ name: FullNameRef::new_unchecked("refs/heads/alternates-after-packs-and-loose".into()),
+ target: "d53c4b0f91f1b29769c9430f2d1c0bcab1170c75".into(),
+ object: Some("e9cdc958e7ce2290e2d7958cdb5aa9323ef35d37".into())
+ }
+ );
+ assert_eq!(parsed.target(), hex_to_id("d53c4b0f91f1b29769c9430f2d1c0bcab1170c75"));
+ assert_eq!(parsed.object(), hex_to_id("e9cdc958e7ce2290e2d7958cdb5aa9323ef35d37"));
+
+ let (input, parsed) = decode::reference::<VerboseError<_>>(input)?;
+ assert!(input.is_empty(), "exhausted");
+ assert_eq!(
+ parsed.name,
+ FullNameRef::new_unchecked("refs/heads/avoid-double-lookup".into())
+ );
+ assert_eq!(parsed.target, "eaae9c1bc723209d793eb93f5587fa2604d5cd92");
+ assert!(parsed.object.is_none());
+ Ok(())
+ }
+}
+
+mod header {
+ use gix_object::bstr::ByteSlice;
+ use gix_testtools::to_bstr_err;
+
+ use super::Result;
+ use crate::store_impl::packed::{
+ decode,
+ decode::{Header, Peeled},
+ };
+
+ #[test]
+ fn invalid() {
+ assert!(
+ decode::header::<()>(b"# some user comment").is_err(),
+ "something the user put there"
+ );
+ assert!(decode::header::<()>(b"# pack-refs: ").is_err(), "looks right but isn't");
+ assert!(
+ decode::header::<()>(b" # pack-refs with: ").is_err(),
+ "does not start with #"
+ );
+ }
+
+ #[test]
+ fn valid_fully_peeled_stored() -> Result {
+ let input: &[u8] = b"# pack-refs with: peeled fully-peeled sorted \nsomething else";
+ let (rest, header) = decode::header::<nom::error::VerboseError<_>>(input).map_err(to_bstr_err)?;
+
+ assert_eq!(rest.as_bstr(), "something else", "remainder starts after newline");
+ assert_eq!(
+ header,
+ Header {
+ peeled: Peeled::Fully,
+ sorted: true
+ }
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn valid_peeled_unsorted() -> Result {
+ let input: &[u8] = b"# pack-refs with: peeled\n";
+ let (rest, header) = decode::header::<()>(input)?;
+
+ assert!(rest.is_empty());
+ assert_eq!(
+ header,
+ Header {
+ peeled: Peeled::Partial,
+ sorted: false
+ }
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn valid_empty() -> Result {
+ let input: &[u8] = b"# pack-refs with: \n";
+ let (rest, header) = decode::header::<()>(input)?;
+
+ assert!(rest.is_empty());
+ assert_eq!(
+ header,
+ Header {
+ peeled: Peeled::Unspecified,
+ sorted: false
+ }
+ );
+ Ok(())
+ }
+}
diff --git a/vendor/gix-ref/src/store/packed/find.rs b/vendor/gix-ref/src/store/packed/find.rs
new file mode 100644
index 000000000..abd35dfe2
--- /dev/null
+++ b/vendor/gix-ref/src/store/packed/find.rs
@@ -0,0 +1,154 @@
+use std::convert::TryInto;
+
+use gix_object::bstr::{BStr, BString, ByteSlice};
+
+use crate::{store_impl::packed, FullNameRef, PartialNameRef};
+
+/// packed-refs specific functionality
+impl packed::Buffer {
+ /// Find a reference with the given `name` and return it.
+ ///
+ /// Note that it will look it up verbatim and does not deal with namespaces or special prefixes like
+ /// `main-worktree/` or `worktrees/<name>/`, as this is left to the caller.
+ pub fn try_find<'a, Name, E>(&self, name: Name) -> Result<Option<packed::Reference<'_>>, Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ Error: From<E>,
+ {
+ let name = name.try_into()?;
+ let mut buf = BString::default();
+ for inbetween in &["", "tags", "heads", "remotes"] {
+ let (name, was_absolute) = if name.looks_like_full_name() {
+ let name = FullNameRef::new_unchecked(name.as_bstr());
+ let name = match transform_full_name_for_lookup(name) {
+ None => return Ok(None),
+ Some(name) => name,
+ };
+ (name, true)
+ } else {
+ let full_name = name.construct_full_name_ref(true, inbetween, &mut buf);
+ (full_name, false)
+ };
+ match self.try_find_full_name(name)? {
+ Some(r) => return Ok(Some(r)),
+ None if was_absolute => return Ok(None),
+ None => continue,
+ }
+ }
+ Ok(None)
+ }
+
+ pub(crate) fn try_find_full_name(&self, name: &FullNameRef) -> Result<Option<packed::Reference<'_>>, Error> {
+ match self.binary_search_by(name.as_bstr()) {
+ Ok(line_start) => Ok(Some(
+ packed::decode::reference::<()>(&self.as_ref()[line_start..])
+ .map_err(|_| Error::Parse)?
+ .1,
+ )),
+ Err((parse_failure, _)) => {
+ if parse_failure {
+ Err(Error::Parse)
+ } else {
+ Ok(None)
+ }
+ }
+ }
+ }
+
+ /// Find a reference with the given `name` and return it.
+ pub fn find<'a, Name, E>(&self, name: Name) -> Result<packed::Reference<'_>, existing::Error>
+ where
+ Name: TryInto<&'a PartialNameRef, Error = E>,
+ Error: From<E>,
+ {
+ match self.try_find(name) {
+ Ok(Some(r)) => Ok(r),
+ Ok(None) => Err(existing::Error::NotFound),
+ Err(err) => Err(existing::Error::Find(err)),
+ }
+ }
+
+ /// Perform a binary search where `Ok(pos)` is the beginning of the line that matches `name` perfectly and `Err(pos)`
+ /// is the beginning of the line at which `name` could be inserted to still be in sort order.
+ pub(in crate::store_impl::packed) fn binary_search_by(&self, full_name: &BStr) -> Result<usize, (bool, usize)> {
+ let a = self.as_ref();
+ let search_start_of_record = |ofs: usize| {
+ a[..ofs]
+ .rfind(b"\n")
+ .and_then(|pos| {
+ let candidate = pos + 1;
+ a.get(candidate).and_then(|b| {
+ if *b == b'^' {
+ a[..pos].rfind(b"\n").map(|pos| pos + 1)
+ } else {
+ Some(candidate)
+ }
+ })
+ })
+ .unwrap_or(0)
+ };
+ let mut encountered_parse_failure = false;
+ a.binary_search_by_key(&full_name.as_ref(), |b: &u8| {
+ let ofs = b as *const u8 as usize - a.as_ptr() as usize;
+ let line = &a[search_start_of_record(ofs)..];
+ packed::decode::reference::<()>(line)
+ .map(|(_rest, r)| r.name.as_bstr().as_bytes())
+ .map_err(|err| {
+ encountered_parse_failure = true;
+ err
+ })
+ .unwrap_or(&[])
+ })
+ .map(search_start_of_record)
+ .map_err(|pos| (encountered_parse_failure, search_start_of_record(pos)))
+ }
+}
+
+mod error {
+ use std::convert::Infallible;
+
+ /// The error returned by [`find()`][super::packed::Buffer::find()]
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The ref name or path is not a valid ref name")]
+ RefnameValidation(#[from] crate::name::Error),
+ #[error("The reference could not be parsed")]
+ Parse,
+ }
+
+ impl From<Infallible> for Error {
+ fn from(_: Infallible) -> Self {
+ unreachable!("this impl is needed to allow passing a known valid partial path as parameter")
+ }
+ }
+}
+pub use error::Error;
+
+///
+pub mod existing {
+
+ /// The error returned by [`find_existing()`][super::packed::Buffer::find()]
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The find operation failed")]
+ Find(#[from] super::Error),
+ #[error("The reference did not exist even though that was expected")]
+ NotFound,
+ }
+}
+
+pub(crate) fn transform_full_name_for_lookup(name: &FullNameRef) -> Option<&FullNameRef> {
+ match name.category_and_short_name() {
+ Some((c, sn)) => {
+ use crate::Category::*;
+ Some(match c {
+ MainRef | LinkedRef { .. } => FullNameRef::new_unchecked(sn),
+ Tag | RemoteBranch | LocalBranch | Bisect | Rewritten | Note => name,
+ MainPseudoRef | PseudoRef | LinkedPseudoRef { .. } | WorktreePrivate => return None,
+ })
+ }
+ None => Some(name),
+ }
+}
diff --git a/vendor/gix-ref/src/store/packed/iter.rs b/vendor/gix-ref/src/store/packed/iter.rs
new file mode 100644
index 000000000..d9c49956b
--- /dev/null
+++ b/vendor/gix-ref/src/store/packed/iter.rs
@@ -0,0 +1,117 @@
+use gix_object::bstr::{BString, ByteSlice};
+
+use crate::store_impl::{packed, packed::decode};
+
+/// packed-refs specific functionality
+impl packed::Buffer {
+ /// Return an iterator of references stored in this packed refs buffer, ordered by reference name.
+ ///
+ /// # Note
+ ///
+ /// There is no namespace support in packed iterators. It can be emulated using `iter_prefixed(…)`.
+ pub fn iter(&self) -> Result<packed::Iter<'_>, packed::iter::Error> {
+ packed::Iter::new(self.as_ref())
+ }
+
+ /// Return an iterator yielding only references matching the given prefix, ordered by reference name.
+ pub fn iter_prefixed(&self, prefix: impl Into<BString>) -> Result<packed::Iter<'_>, packed::iter::Error> {
+ let prefix = prefix.into();
+ let first_record_with_prefix = self.binary_search_by(prefix.as_bstr()).unwrap_or_else(|(_, pos)| pos);
+ packed::Iter::new_with_prefix(&self.as_ref()[first_record_with_prefix..], Some(prefix))
+ }
+}
+
+impl<'a> Iterator for packed::Iter<'a> {
+ type Item = Result<packed::Reference<'a>, Error>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.cursor.is_empty() {
+ return None;
+ }
+
+ match decode::reference::<()>(self.cursor) {
+ Ok((rest, reference)) => {
+ self.cursor = rest;
+ self.current_line += 1;
+ if let Some(ref prefix) = self.prefix {
+ if !reference.name.as_bstr().starts_with_str(prefix) {
+ self.cursor = &[];
+ return None;
+ }
+ }
+ Some(Ok(reference))
+ }
+ Err(_) => {
+ let (failed_line, next_cursor) = self
+ .cursor
+ .find_byte(b'\n')
+ .map_or((self.cursor, &[][..]), |pos| self.cursor.split_at(pos + 1));
+ self.cursor = next_cursor;
+ let line_number = self.current_line;
+ self.current_line += 1;
+
+ Some(Err(Error::Reference {
+ invalid_line: failed_line
+ .get(..failed_line.len().saturating_sub(1))
+ .unwrap_or(failed_line)
+ .into(),
+ line_number,
+ }))
+ }
+ }
+ }
+}
+
+impl<'a> packed::Iter<'a> {
+ /// Return a new iterator after successfully parsing the possibly existing first line of the given `packed` refs buffer.
+ pub fn new(packed: &'a [u8]) -> Result<Self, Error> {
+ Self::new_with_prefix(packed, None)
+ }
+
+ /// Returns an iterators whose references will only match the given prefix.
+ ///
+ /// It assumes that the underlying `packed` buffer is indeed sorted
+ pub(in crate::store_impl::packed) fn new_with_prefix(
+ packed: &'a [u8],
+ prefix: Option<BString>,
+ ) -> Result<Self, Error> {
+ if packed.is_empty() {
+ Ok(packed::Iter {
+ cursor: packed,
+ prefix,
+ current_line: 1,
+ })
+ } else if packed[0] == b'#' {
+ let (refs, _header) = decode::header::<()>(packed).map_err(|_| Error::Header {
+ invalid_first_line: packed.lines().next().unwrap_or(packed).into(),
+ })?;
+ Ok(packed::Iter {
+ cursor: refs,
+ prefix,
+ current_line: 2,
+ })
+ } else {
+ Ok(packed::Iter {
+ cursor: packed,
+ prefix,
+ current_line: 1,
+ })
+ }
+ }
+}
+
+mod error {
+ use gix_object::bstr::BString;
+
+ /// The error returned by [`Iter`][super::packed::Iter],
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("The header existed but could not be parsed: {invalid_first_line:?}")]
+ Header { invalid_first_line: BString },
+ #[error("Invalid reference in line {line_number}: {invalid_line:?}")]
+ Reference { invalid_line: BString, line_number: usize },
+ }
+}
+
+pub use error::Error;
diff --git a/vendor/gix-ref/src/store/packed/mod.rs b/vendor/gix-ref/src/store/packed/mod.rs
new file mode 100644
index 000000000..53a077414
--- /dev/null
+++ b/vendor/gix-ref/src/store/packed/mod.rs
@@ -0,0 +1,93 @@
+use std::path::PathBuf;
+
+use gix_hash::ObjectId;
+use gix_object::bstr::{BStr, BString};
+use memmap2::Mmap;
+
+use crate::{file, transaction::RefEdit, FullNameRef};
+
+#[derive(Debug)]
+enum Backing {
+ /// The buffer is loaded entirely in memory, along with the `offset` to the first record past the header.
+ InMemory(Vec<u8>),
+ /// The buffer is mapping the file on disk, along with the offset to the first record past the header
+ Mapped(Mmap),
+}
+
+/// A buffer containing a packed-ref file that is either memory mapped or fully in-memory depending on a cutoff.
+///
+/// The buffer is guaranteed to be sorted as per the packed-ref rules which allows some operations to be more efficient.
+#[derive(Debug)]
+pub struct Buffer {
+ data: Backing,
+ /// The offset to the first record, how many bytes to skip past the header
+ offset: usize,
+ /// The path from which we were loaded
+ path: PathBuf,
+}
+
+struct Edit {
+ inner: RefEdit,
+ peeled: Option<ObjectId>,
+}
+
+/// A transaction for editing packed references
+pub(crate) struct Transaction {
+ buffer: Option<file::packed::SharedBufferSnapshot>,
+ edits: Option<Vec<Edit>>,
+ lock: Option<gix_lock::File>,
+ #[allow(dead_code)] // It just has to be kept alive, hence no reads
+ closed_lock: Option<gix_lock::Marker>,
+}
+
+/// A reference as parsed from the `packed-refs` file
+#[derive(Debug, PartialEq, Eq)]
+pub struct Reference<'a> {
+ /// The validated full name of the reference.
+ pub name: &'a FullNameRef,
+ /// The target object id of the reference, hex encoded.
+ pub target: &'a BStr,
+ /// The fully peeled object id, hex encoded, that the ref is ultimately pointing to
+ /// i.e. when all indirections are removed.
+ pub object: Option<&'a BStr>,
+}
+
+impl<'a> Reference<'a> {
+ /// Decode the target as object
+ pub fn target(&self) -> ObjectId {
+ gix_hash::ObjectId::from_hex(self.target).expect("parser validation")
+ }
+
+ /// Decode the object this reference is ultimately pointing to. Note that this is
+ /// the [`target()`][Reference::target()] if this is not a fully peeled reference like a tag.
+ pub fn object(&self) -> ObjectId {
+ self.object.map_or_else(
+ || self.target(),
+ |id| ObjectId::from_hex(id).expect("parser validation"),
+ )
+ }
+}
+
+/// An iterator over references in a packed refs file
+pub struct Iter<'a> {
+ /// The position at which to parse the next reference
+ cursor: &'a [u8],
+ /// The next line, starting at 1
+ current_line: usize,
+ /// If set, references returned will match the prefix, the first failed match will stop all iteration.
+ prefix: Option<BString>,
+}
+
+mod decode;
+
+///
+pub mod iter;
+
+///
+pub mod buffer;
+
+///
+pub mod find;
+
+///
+pub mod transaction;
diff --git a/vendor/gix-ref/src/store/packed/transaction.rs b/vendor/gix-ref/src/store/packed/transaction.rs
new file mode 100644
index 000000000..26cc84b9b
--- /dev/null
+++ b/vendor/gix-ref/src/store/packed/transaction.rs
@@ -0,0 +1,267 @@
+use std::{fmt::Formatter, io::Write};
+
+use crate::{
+ file,
+ store_impl::{file::transaction::FindObjectFn, packed, packed::Edit},
+ transaction::{Change, RefEdit},
+ Target,
+};
+
+pub(crate) const HEADER_LINE: &[u8] = b"# pack-refs with: peeled fully-peeled sorted \n";
+
+/// Access and instantiation
+impl packed::Transaction {
+ pub(crate) fn new_from_pack_and_lock(
+ buffer: Option<file::packed::SharedBufferSnapshot>,
+ lock: gix_lock::File,
+ ) -> Self {
+ packed::Transaction {
+ buffer,
+ edits: None,
+ lock: Some(lock),
+ closed_lock: None,
+ }
+ }
+}
+
+impl std::fmt::Debug for packed::Transaction {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("packed::Transaction")
+ .field("edits", &self.edits.as_ref().map(|e| e.len()))
+ .field("lock", &self.lock)
+ .finish_non_exhaustive()
+ }
+}
+
+/// Access
+impl packed::Transaction {
+ /// Returns our packed buffer
+ pub fn buffer(&self) -> Option<&packed::Buffer> {
+ self.buffer.as_ref().map(|b| &***b)
+ }
+}
+
+/// Lifecycle
+impl packed::Transaction {
+ /// Prepare the transaction by checking all edits for applicability.
+ pub fn prepare(
+ mut self,
+ edits: impl IntoIterator<Item = RefEdit>,
+ find: &mut FindObjectFn<'_>,
+ ) -> Result<Self, prepare::Error> {
+ assert!(self.edits.is_none(), "BUG: cannot call prepare(…) more than once");
+ let buffer = &self.buffer;
+ // Remove all edits which are deletions that aren't here in the first place
+ let mut edits: Vec<Edit> = edits
+ .into_iter()
+ .filter(|edit| {
+ if let Change::Delete { .. } = edit.change {
+ buffer.as_ref().map_or(true, |b| b.find(edit.name.as_ref()).is_ok())
+ } else {
+ true
+ }
+ })
+ .map(|change| Edit {
+ inner: change,
+ peeled: None,
+ })
+ .collect();
+
+ let mut buf = Vec::new();
+ for edit in edits.iter_mut() {
+ if let Change::Update {
+ new: Target::Peeled(new),
+ ..
+ } = edit.inner.change
+ {
+ let mut next_id = new;
+ edit.peeled = loop {
+ let kind = find(next_id, &mut buf)?;
+ match kind {
+ Some(kind) if kind == gix_object::Kind::Tag => {
+ next_id = gix_object::TagRefIter::from_bytes(&buf).target_id().map_err(|_| {
+ prepare::Error::Resolve(
+ format!("Couldn't get target object id from tag {next_id}").into(),
+ )
+ })?;
+ }
+ Some(_) => {
+ break if next_id == new { None } else { Some(next_id) };
+ }
+ None => {
+ return Err(prepare::Error::Resolve(
+ format!("Couldn't find object with id {next_id}").into(),
+ ))
+ }
+ }
+ };
+ }
+ }
+
+ if edits.is_empty() {
+ self.closed_lock = self
+ .lock
+ .take()
+ .map(|l| l.close())
+ .transpose()
+ .map_err(prepare::Error::CloseLock)?;
+ } else {
+ // NOTE that we don't do any additional checks here but apply all edits unconditionally.
+ // This is because this transaction system is internal and will be used correctly from the
+ // loose ref store transactions, which do the necessary checking.
+ }
+ self.edits = Some(edits);
+ Ok(self)
+ }
+
+ /// Commit the prepared transaction.
+ ///
+ /// Please note that actual edits invalidated existing packed buffers.
+ /// Note: There is the potential to write changes into memory and return such a packed-refs buffer for reuse.
+ pub fn commit(self) -> Result<(), commit::Error> {
+ let mut edits = self.edits.expect("BUG: cannot call commit() before prepare(…)");
+ if edits.is_empty() {
+ return Ok(());
+ }
+
+ let mut file = self.lock.expect("a write lock for applying changes");
+ let refs_sorted: Box<dyn Iterator<Item = Result<packed::Reference<'_>, packed::iter::Error>>> =
+ match self.buffer.as_ref() {
+ Some(buffer) => Box::new(buffer.iter()?),
+ None => Box::new(std::iter::empty()),
+ };
+
+ let mut refs_sorted = refs_sorted.peekable();
+
+ edits.sort_by(|l, r| l.inner.name.as_bstr().cmp(r.inner.name.as_bstr()));
+ let mut peekable_sorted_edits = edits.iter().peekable();
+
+ file.with_mut(|f| f.write_all(HEADER_LINE))?;
+
+ let mut num_written_lines = 0;
+ loop {
+ match (refs_sorted.peek(), peekable_sorted_edits.peek()) {
+ (Some(Err(_)), _) => {
+ let err = refs_sorted.next().expect("next").expect_err("err");
+ return Err(commit::Error::Iteration(err));
+ }
+ (None, None) => {
+ break;
+ }
+ (Some(Ok(_)), None) => {
+ let pref = refs_sorted.next().expect("next").expect("no err");
+ num_written_lines += 1;
+ file.with_mut(|out| write_packed_ref(out, pref))?;
+ }
+ (Some(Ok(pref)), Some(edit)) => {
+ use std::cmp::Ordering::*;
+ match pref.name.as_bstr().cmp(edit.inner.name.as_bstr()) {
+ Less => {
+ let pref = refs_sorted.next().expect("next").expect("valid");
+ num_written_lines += 1;
+ file.with_mut(|out| write_packed_ref(out, pref))?;
+ }
+ Greater => {
+ let edit = peekable_sorted_edits.next().expect("next");
+ file.with_mut(|out| write_edit(out, edit, &mut num_written_lines))?;
+ }
+ Equal => {
+ let _pref = refs_sorted.next().expect("next").expect("valid");
+ let edit = peekable_sorted_edits.next().expect("next");
+ file.with_mut(|out| write_edit(out, edit, &mut num_written_lines))?;
+ }
+ }
+ }
+ (None, Some(_)) => {
+ let edit = peekable_sorted_edits.next().expect("next");
+ file.with_mut(|out| write_edit(out, edit, &mut num_written_lines))?;
+ }
+ }
+ }
+
+ if num_written_lines == 0 {
+ std::fs::remove_file(file.resource_path())?;
+ } else {
+ file.commit()?;
+ }
+ drop(refs_sorted);
+ Ok(())
+ }
+}
+
+fn write_packed_ref(mut out: impl std::io::Write, pref: packed::Reference<'_>) -> std::io::Result<()> {
+ write!(out, "{} ", pref.target)?;
+ out.write_all(pref.name.as_bstr())?;
+ out.write_all(b"\n")?;
+ if let Some(object) = pref.object {
+ writeln!(out, "^{object}")?;
+ }
+ Ok(())
+}
+
+fn write_edit(mut out: impl std::io::Write, edit: &Edit, lines_written: &mut i32) -> std::io::Result<()> {
+ match edit.inner.change {
+ Change::Delete { .. } => {}
+ Change::Update {
+ new: Target::Peeled(target_oid),
+ ..
+ } => {
+ write!(out, "{target_oid} ")?;
+ out.write_all(edit.inner.name.as_bstr())?;
+ out.write_all(b"\n")?;
+ if let Some(object) = edit.peeled {
+ writeln!(out, "^{object}")?;
+ }
+ *lines_written += 1;
+ }
+ Change::Update {
+ new: Target::Symbolic(_),
+ ..
+ } => unreachable!("BUG: packed refs cannot contain symbolic refs, catch that in prepare(…)"),
+ }
+ Ok(())
+}
+
+/// Convert this buffer to be used as the basis for a transaction.
+pub(crate) fn buffer_into_transaction(
+ buffer: file::packed::SharedBufferSnapshot,
+ lock_mode: gix_lock::acquire::Fail,
+) -> Result<packed::Transaction, gix_lock::acquire::Error> {
+ let lock = gix_lock::File::acquire_to_update_resource(&buffer.path, lock_mode, None)?;
+ Ok(packed::Transaction {
+ buffer: Some(buffer),
+ lock: Some(lock),
+ closed_lock: None,
+ edits: None,
+ })
+}
+
+///
+pub mod prepare {
+ /// The error used in [`Transaction::prepare(…)`][crate::file::Transaction::prepare()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not close a lock which won't ever be committed")]
+ CloseLock(#[from] std::io::Error),
+ #[error("The lookup of an object failed while peeling it")]
+ Resolve(#[from] Box<dyn std::error::Error + Send + Sync + 'static>),
+ }
+}
+
+///
+pub mod commit {
+ use crate::store_impl::packed;
+
+ /// The error used in [`Transaction::commit(…)`][crate::file::Transaction::commit()].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Changes to the resource could not be committed")]
+ Commit(#[from] gix_lock::commit::Error<gix_lock::File>),
+ #[error("Some references in the packed refs buffer could not be parsed")]
+ Iteration(#[from] packed::iter::Error),
+ #[error("Failed to write a ref line to the packed ref file")]
+ Io(#[from] std::io::Error),
+ }
+}
diff --git a/vendor/gix-ref/src/target.rs b/vendor/gix-ref/src/target.rs
new file mode 100644
index 000000000..6d4f69991
--- /dev/null
+++ b/vendor/gix-ref/src/target.rs
@@ -0,0 +1,154 @@
+use std::{convert::TryFrom, fmt};
+
+use gix_hash::{oid, ObjectId};
+
+use crate::{FullName, FullNameRef, Kind, Target, TargetRef};
+
+impl<'a> TargetRef<'a> {
+ /// Returns the kind of the target the ref is pointing to.
+ pub fn kind(&self) -> Kind {
+ match self {
+ TargetRef::Symbolic(_) => Kind::Symbolic,
+ TargetRef::Peeled(_) => Kind::Peeled,
+ }
+ }
+ /// Interpret this target as object id which maybe `None` if it is symbolic.
+ pub fn try_id(&self) -> Option<&oid> {
+ match self {
+ TargetRef::Symbolic(_) => None,
+ TargetRef::Peeled(oid) => Some(oid),
+ }
+ }
+ /// Interpret this target as object id or **panic** if it is symbolic.
+ pub fn id(&self) -> &oid {
+ match self {
+ TargetRef::Symbolic(_) => panic!("BUG: tries to obtain object id from symbolic target"),
+ TargetRef::Peeled(oid) => oid,
+ }
+ }
+ /// Interpret this target as name of the reference it points to which maybe `None` if it an object id.
+ pub fn try_name(&self) -> Option<&FullNameRef> {
+ match self {
+ TargetRef::Symbolic(name) => Some(name),
+ TargetRef::Peeled(_) => None,
+ }
+ }
+ /// Convert this instance into an owned version, without consuming it.
+ pub fn into_owned(self) -> Target {
+ self.into()
+ }
+}
+
+impl Target {
+ /// Returns the kind of the target the ref is pointing to.
+ pub fn kind(&self) -> Kind {
+ match self {
+ Target::Symbolic(_) => Kind::Symbolic,
+ Target::Peeled(_) => Kind::Peeled,
+ }
+ }
+
+ /// Return true if this is a peeled target with a null hash
+ pub fn is_null(&self) -> bool {
+ match self {
+ Target::Peeled(oid) => oid.is_null(),
+ Target::Symbolic(_) => false,
+ }
+ }
+
+ /// Interpret this owned Target as shared Target
+ pub fn to_ref(&self) -> TargetRef<'_> {
+ match self {
+ Target::Peeled(oid) => TargetRef::Peeled(oid),
+ Target::Symbolic(name) => TargetRef::Symbolic(name.as_ref()),
+ }
+ }
+
+ /// Interpret this target as object id which maybe `None` if it is symbolic.
+ pub fn try_id(&self) -> Option<&oid> {
+ match self {
+ Target::Symbolic(_) => None,
+ Target::Peeled(oid) => Some(oid),
+ }
+ }
+ /// Interpret this target as object id or panic if it is symbolic.
+ pub fn id(&self) -> &oid {
+ match self {
+ Target::Symbolic(_) => panic!("BUG: tries to obtain object id from symbolic target"),
+ Target::Peeled(oid) => oid,
+ }
+ }
+ /// Return the contained object id or panic
+ pub fn into_id(self) -> ObjectId {
+ match self {
+ Target::Symbolic(_) => panic!("BUG: expected peeled reference target but found symbolic one"),
+ Target::Peeled(oid) => oid,
+ }
+ }
+
+ /// Return the contained object id if the target is peeled or itself if it is not.
+ pub fn try_into_id(self) -> Result<ObjectId, Self> {
+ match self {
+ Target::Symbolic(_) => Err(self),
+ Target::Peeled(oid) => Ok(oid),
+ }
+ }
+ /// Interpret this target as name of the reference it points to which maybe `None` if it an object id.
+ pub fn try_name(&self) -> Option<&FullNameRef> {
+ match self {
+ Target::Symbolic(name) => Some(name.as_ref()),
+ Target::Peeled(_) => None,
+ }
+ }
+}
+
+impl<'a> From<TargetRef<'a>> for Target {
+ fn from(src: TargetRef<'a>) -> Self {
+ match src {
+ TargetRef::Peeled(oid) => Target::Peeled(oid.to_owned()),
+ TargetRef::Symbolic(name) => Target::Symbolic(name.to_owned()),
+ }
+ }
+}
+
+impl<'a> PartialEq<TargetRef<'a>> for Target {
+ fn eq(&self, other: &TargetRef<'a>) -> bool {
+ match (self, other) {
+ (Target::Peeled(lhs), TargetRef::Peeled(rhs)) => lhs == rhs,
+ (Target::Symbolic(lhs), TargetRef::Symbolic(rhs)) => lhs.as_bstr() == rhs.as_bstr(),
+ _ => false,
+ }
+ }
+}
+
+impl From<ObjectId> for Target {
+ fn from(id: ObjectId) -> Self {
+ Target::Peeled(id)
+ }
+}
+
+impl TryFrom<Target> for ObjectId {
+ type Error = Target;
+
+ fn try_from(value: Target) -> Result<Self, Self::Error> {
+ match value {
+ Target::Peeled(id) => Ok(id),
+ Target::Symbolic(_) => Err(value),
+ }
+ }
+}
+
+impl From<FullName> for Target {
+ fn from(name: FullName) -> Self {
+ Target::Symbolic(name)
+ }
+}
+
+impl fmt::Display for Target {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Target::Peeled(oid) => oid.fmt(f),
+ Target::Symbolic(name) => write!(f, "ref: {}", name.as_bstr()),
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/transaction/ext.rs b/vendor/gix-ref/src/transaction/ext.rs
new file mode 100644
index 000000000..cb85d9d17
--- /dev/null
+++ b/vendor/gix-ref/src/transaction/ext.rs
@@ -0,0 +1,133 @@
+use gix_object::bstr::BString;
+
+use crate::{
+ transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog, Target},
+ PartialNameRef,
+};
+
+/// An extension trait to perform commonly used operations on edits across different ref stores.
+pub trait RefEditsExt<T>
+where
+ T: std::borrow::Borrow<RefEdit> + std::borrow::BorrowMut<RefEdit>,
+{
+ /// Return true if each ref `name` has exactly one `edit` across multiple ref edits
+ fn assure_one_name_has_one_edit(&self) -> Result<(), BString>;
+
+ /// Split all symbolic refs into updates for the symbolic ref as well as all their referents if the `deref` flag is enabled.
+ ///
+ /// Note no action is performed if deref isn't specified.
+ fn extend_with_splits_of_symbolic_refs(
+ &mut self,
+ find: impl FnMut(&PartialNameRef) -> Option<Target>,
+ make_entry: impl FnMut(usize, RefEdit) -> T,
+ ) -> Result<(), std::io::Error>;
+
+ /// All processing steps in one and in the correct order.
+ ///
+ /// Users call this to assure derefs are honored and duplicate checks are done.
+ fn pre_process(
+ &mut self,
+ find: impl FnMut(&PartialNameRef) -> Option<Target>,
+ make_entry: impl FnMut(usize, RefEdit) -> T,
+ ) -> Result<(), std::io::Error> {
+ self.extend_with_splits_of_symbolic_refs(find, make_entry)?;
+ self.assure_one_name_has_one_edit().map_err(|name| {
+ std::io::Error::new(
+ std::io::ErrorKind::AlreadyExists,
+ format!("A reference named '{name}' has multiple edits"),
+ )
+ })
+ }
+}
+
+impl<E> RefEditsExt<E> for Vec<E>
+where
+ E: std::borrow::Borrow<RefEdit> + std::borrow::BorrowMut<RefEdit>,
+{
+ fn assure_one_name_has_one_edit(&self) -> Result<(), BString> {
+ let mut names: Vec<_> = self.iter().map(|e| &e.borrow().name).collect();
+ names.sort();
+ match names.windows(2).find(|v| v[0] == v[1]) {
+ Some(name) => Err(name[0].as_bstr().to_owned()),
+ None => Ok(()),
+ }
+ }
+
+ fn extend_with_splits_of_symbolic_refs(
+ &mut self,
+ mut find: impl FnMut(&PartialNameRef) -> Option<Target>,
+ mut make_entry: impl FnMut(usize, RefEdit) -> E,
+ ) -> Result<(), std::io::Error> {
+ let mut new_edits = Vec::new();
+ let mut first = 0;
+ let mut round = 1;
+ loop {
+ for (eid, edit) in self[first..].iter_mut().enumerate().map(|(eid, v)| (eid + first, v)) {
+ let edit = edit.borrow_mut();
+ if !edit.deref {
+ continue;
+ };
+
+ // we can't tell what happened and we are here because it's a non-existing ref or an invalid one.
+ // In any case, we don't want the following algorithms to try dereffing it and assume they deal with
+ // broken refs gracefully.
+ edit.deref = false;
+ if let Some(Target::Symbolic(referent)) = find(edit.name.as_ref().as_partial_name()) {
+ new_edits.push(make_entry(
+ eid,
+ match &mut edit.change {
+ Change::Delete {
+ expected: previous,
+ log: mode,
+ } => {
+ let current_mode = *mode;
+ *mode = RefLog::Only;
+ RefEdit {
+ change: Change::Delete {
+ expected: previous.clone(),
+ log: current_mode,
+ },
+ name: referent,
+ deref: true,
+ }
+ }
+ Change::Update { log, expected, new } => {
+ let current = std::mem::replace(
+ log,
+ LogChange {
+ message: log.message.clone(),
+ mode: RefLog::Only,
+ force_create_reflog: log.force_create_reflog,
+ },
+ );
+ let next = std::mem::replace(expected, PreviousValue::Any);
+ RefEdit {
+ change: Change::Update {
+ expected: next,
+ new: new.clone(),
+ log: current,
+ },
+ name: referent,
+ deref: true,
+ }
+ }
+ },
+ ));
+ }
+ }
+ if new_edits.is_empty() {
+ break Ok(());
+ }
+ if round == 5 {
+ break Err(std::io::Error::new(
+ std::io::ErrorKind::WouldBlock,
+ format!("Could not follow all splits after {round} rounds, assuming reference cycle"),
+ ));
+ }
+ round += 1;
+ first = self.len();
+
+ self.append(&mut new_edits);
+ }
+ }
+}
diff --git a/vendor/gix-ref/src/transaction/mod.rs b/vendor/gix-ref/src/transaction/mod.rs
new file mode 100644
index 000000000..d13ff2e70
--- /dev/null
+++ b/vendor/gix-ref/src/transaction/mod.rs
@@ -0,0 +1,143 @@
+//! **Transactions** are the only way make changes to the ref store in order to increase the chance of consistency in a multi-threaded
+//! environment.
+//!
+//! Transactions currently allow to…
+//!
+//! * create or update reference
+//! * delete references
+//!
+//! The following guarantees are made:
+//!
+//! * transactions are prepared which is when other writers are prevented from changing them
+//! - errors during preparations will cause a perfect rollback
+//! * prepared transactions are committed to finalize the change
+//! - errors when committing while leave the ref store in an inconsistent, but operational state.
+use gix_object::bstr::BString;
+
+use crate::{FullName, Target};
+
+/// A change to the reflog.
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]
+pub struct LogChange {
+ /// How to treat the reference log.
+ pub mode: RefLog,
+ /// If set, create a reflog even though it would otherwise not be the case as prohibited by general rules.
+ /// Note that ref-log writing might be prohibited in the entire repository which is when this flag has no effect either.
+ pub force_create_reflog: bool,
+ /// The message to put into the reference log. It must be a single line, hence newlines are forbidden.
+ /// The string can be empty to indicate there should be no message at all.
+ pub message: BString,
+}
+
+impl Default for LogChange {
+ fn default() -> Self {
+ LogChange {
+ mode: RefLog::AndReference,
+ force_create_reflog: false,
+ message: Default::default(),
+ }
+ }
+}
+
+/// The desired value of an updated value
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]
+pub enum PreviousValue {
+ /// No requirements are made towards the current value, and the new value is set unconditionally.
+ Any,
+ /// The reference must exist and may have any value.
+ MustExist,
+ /// Create the ref only, hence the reference must not exist.
+ MustNotExist,
+ /// The ref _must_ exist and have the given value.
+ MustExistAndMatch(Target),
+ /// The ref _may_ exist and have the given value, or may not exist at all.
+ ExistingMustMatch(Target),
+}
+
+/// A description of an edit to perform.
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]
+pub enum Change {
+ /// If previous is not `None`, the ref must exist and its `oid` must agree with the `previous`, and
+ /// we function like `update`.
+ /// Otherwise it functions as `create-or-update`.
+ Update {
+ /// The desired change to the reference log.
+ log: LogChange,
+ /// The expected value already present in the reference.
+ /// If a ref was existing previously it will be overwritten at `MustExistAndMatch(actual_value)` for use after
+ /// the transaction was committed successfully.
+ expected: PreviousValue,
+ /// The new state of the reference, either for updating an existing one or creating a new one.
+ new: Target,
+ },
+ /// Delete a reference and optionally check if `previous` is its content.
+ Delete {
+ /// The expected value of the reference, with the `MustNotExist` variant being invalid.
+ ///
+ /// If a previous ref existed, this value will be filled in automatically as `MustExistAndMatch(actual_value)` and
+ /// can be accessed if the transaction was committed successfully.
+ expected: PreviousValue,
+ /// How to treat the reference log during deletion.
+ log: RefLog,
+ },
+}
+
+impl Change {
+ /// Return references to values that are the new value after the change is applied, if this is an update.
+ pub fn new_value(&self) -> Option<crate::TargetRef<'_>> {
+ match self {
+ Change::Update { new, .. } => new.to_ref().into(),
+ Change::Delete { .. } => None,
+ }
+ }
+
+ /// Return references to values that are in common between all variants and denote the previous observed value.
+ pub fn previous_value(&self) -> Option<crate::TargetRef<'_>> {
+ match self {
+ // TODO: use or-patterns once MRV is larger than 1.52 (and this is supported)
+ Change::Update {
+ expected: PreviousValue::MustExistAndMatch(previous),
+ ..
+ }
+ | Change::Update {
+ expected: PreviousValue::ExistingMustMatch(previous),
+ ..
+ }
+ | Change::Delete {
+ expected: PreviousValue::MustExistAndMatch(previous),
+ ..
+ }
+ | Change::Delete {
+ expected: PreviousValue::ExistingMustMatch(previous),
+ ..
+ } => previous,
+ _ => return None,
+ }
+ .to_ref()
+ .into()
+ }
+}
+
+/// A reference that is to be changed
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]
+pub struct RefEdit {
+ /// The change itself
+ pub change: Change,
+ /// The name of the reference to apply the change to
+ pub name: FullName,
+ /// If set, symbolic references identified by `name` will be dereferenced to have the `change` applied to their target.
+ /// This flag has no effect if the reference isn't symbolic.
+ pub deref: bool,
+}
+
+/// The way to deal with the Reflog in deletions.
+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone, Copy)]
+pub enum RefLog {
+ /// Delete or update the reference and the log
+ AndReference,
+ /// Delete or update only the reflog
+ Only,
+}
+
+mod ext;
+pub use ext::RefEditsExt;