summaryrefslogtreecommitdiffstats
path: root/vendor/gix/src/revision/spec/parse
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 12:41:41 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 12:41:41 +0000
commit10ee2acdd26a7f1298c6f6d6b7af9b469fe29b87 (patch)
treebdffd5d80c26cf4a7a518281a204be1ace85b4c1 /vendor/gix/src/revision/spec/parse
parentReleasing progress-linux version 1.70.0+dfsg1-9~progress7.99u1. (diff)
downloadrustc-10ee2acdd26a7f1298c6f6d6b7af9b469fe29b87.tar.xz
rustc-10ee2acdd26a7f1298c6f6d6b7af9b469fe29b87.zip
Merging upstream version 1.70.0+dfsg2.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/gix/src/revision/spec/parse')
-rw-r--r--vendor/gix/src/revision/spec/parse/delegate/mod.rs256
-rw-r--r--vendor/gix/src/revision/spec/parse/delegate/navigate.rs340
-rw-r--r--vendor/gix/src/revision/spec/parse/delegate/revision.rs225
-rw-r--r--vendor/gix/src/revision/spec/parse/error.rs130
-rw-r--r--vendor/gix/src/revision/spec/parse/mod.rs61
-rw-r--r--vendor/gix/src/revision/spec/parse/types.rs182
6 files changed, 1194 insertions, 0 deletions
diff --git a/vendor/gix/src/revision/spec/parse/delegate/mod.rs b/vendor/gix/src/revision/spec/parse/delegate/mod.rs
new file mode 100644
index 000000000..78e4ab9ee
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/delegate/mod.rs
@@ -0,0 +1,256 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+use gix_revision::spec::{
+ parse,
+ parse::delegate::{self},
+};
+use smallvec::SmallVec;
+
+use super::{Delegate, Error, ObjectKindHint};
+use crate::{
+ ext::{ObjectIdExt, ReferenceExt},
+ Repository,
+};
+
+type Replacements = SmallVec<[(ObjectId, ObjectId); 1]>;
+
+impl<'repo> Delegate<'repo> {
+ pub fn new(repo: &'repo Repository, opts: crate::revision::spec::parse::Options) -> Self {
+ Delegate {
+ refs: Default::default(),
+ objs: Default::default(),
+ ambiguous_objects: Default::default(),
+ idx: 0,
+ kind: None,
+ err: Vec::new(),
+ prefix: Default::default(),
+ last_call_was_disambiguate_prefix: Default::default(),
+ opts,
+ repo,
+ }
+ }
+
+ pub fn into_err(mut self) -> Error {
+ let repo = self.repo;
+ for err in self
+ .ambiguous_objects
+ .iter_mut()
+ .zip(self.prefix)
+ .filter_map(|(a, b)| a.take().filter(|candidates| candidates.len() > 1).zip(b))
+ .map(|(candidates, prefix)| Error::ambiguous(candidates, prefix, repo))
+ .rev()
+ {
+ self.err.insert(0, err);
+ }
+ Error::from_errors(self.err)
+ }
+
+ pub fn into_rev_spec(mut self) -> Result<crate::revision::Spec<'repo>, Error> {
+ fn zero_or_one_objects_or_ambiguity_err(
+ mut candidates: [Option<HashSet<ObjectId>>; 2],
+ prefix: [Option<gix_hash::Prefix>; 2],
+ mut errors: Vec<Error>,
+ repo: &Repository,
+ ) -> Result<[Option<ObjectId>; 2], Error> {
+ let mut out = [None, None];
+ for ((candidates, prefix), out) in candidates.iter_mut().zip(prefix).zip(out.iter_mut()) {
+ let candidates = candidates.take();
+ match candidates {
+ None => *out = None,
+ Some(candidates) => {
+ match candidates.len() {
+ 0 => unreachable!(
+ "BUG: let's avoid still being around if no candidate matched the requirements"
+ ),
+ 1 => {
+ *out = candidates.into_iter().next();
+ }
+ _ => {
+ errors.insert(
+ 0,
+ Error::ambiguous(candidates, prefix.expect("set when obtaining candidates"), repo),
+ );
+ return Err(Error::from_errors(errors));
+ }
+ };
+ }
+ };
+ }
+ Ok(out)
+ }
+
+ fn kind_to_spec(
+ kind: Option<gix_revision::spec::Kind>,
+ [first, second]: [Option<ObjectId>; 2],
+ ) -> Result<gix_revision::Spec, Error> {
+ use gix_revision::spec::Kind::*;
+ Ok(match kind.unwrap_or_default() {
+ IncludeReachable => gix_revision::Spec::Include(first.ok_or(Error::Malformed)?),
+ ExcludeReachable => gix_revision::Spec::Exclude(first.ok_or(Error::Malformed)?),
+ RangeBetween => gix_revision::Spec::Range {
+ from: first.ok_or(Error::Malformed)?,
+ to: second.ok_or(Error::Malformed)?,
+ },
+ ReachableToMergeBase => gix_revision::Spec::Merge {
+ theirs: first.ok_or(Error::Malformed)?,
+ ours: second.ok_or(Error::Malformed)?,
+ },
+ IncludeReachableFromParents => gix_revision::Spec::IncludeOnlyParents(first.ok_or(Error::Malformed)?),
+ ExcludeReachableFromParents => gix_revision::Spec::ExcludeParents(first.ok_or(Error::Malformed)?),
+ })
+ }
+
+ let range = zero_or_one_objects_or_ambiguity_err(self.objs, self.prefix, self.err, self.repo)?;
+ Ok(crate::revision::Spec {
+ first_ref: self.refs[0].take(),
+ second_ref: self.refs[1].take(),
+ inner: kind_to_spec(self.kind, range)?,
+ repo: self.repo,
+ })
+ }
+}
+
+impl<'repo> parse::Delegate for Delegate<'repo> {
+ fn done(&mut self) {
+ self.follow_refs_to_objects_if_needed();
+ self.disambiguate_objects_by_fallback_hint(
+ self.kind_implies_committish()
+ .then_some(ObjectKindHint::Committish)
+ .or(self.opts.object_kind_hint),
+ );
+ }
+}
+
+impl<'repo> delegate::Kind for Delegate<'repo> {
+ fn kind(&mut self, kind: gix_revision::spec::Kind) -> Option<()> {
+ use gix_revision::spec::Kind::*;
+ self.kind = Some(kind);
+
+ if self.kind_implies_committish() {
+ self.disambiguate_objects_by_fallback_hint(ObjectKindHint::Committish.into());
+ }
+ if matches!(kind, RangeBetween | ReachableToMergeBase) {
+ self.idx += 1;
+ }
+
+ Some(())
+ }
+}
+
+impl<'repo> Delegate<'repo> {
+ fn kind_implies_committish(&self) -> bool {
+ self.kind.unwrap_or(gix_revision::spec::Kind::IncludeReachable) != gix_revision::spec::Kind::IncludeReachable
+ }
+ fn disambiguate_objects_by_fallback_hint(&mut self, hint: Option<ObjectKindHint>) {
+ fn require_object_kind(repo: &Repository, obj: &gix_hash::oid, kind: gix_object::Kind) -> Result<(), Error> {
+ let obj = repo.find_object(obj)?;
+ if obj.kind == kind {
+ Ok(())
+ } else {
+ Err(Error::ObjectKind {
+ actual: obj.kind,
+ expected: kind,
+ oid: obj.id.attach(repo).shorten_or_id(),
+ })
+ }
+ }
+
+ if self.last_call_was_disambiguate_prefix[self.idx] {
+ self.unset_disambiguate_call();
+
+ if let Some(objs) = self.objs[self.idx].as_mut() {
+ let repo = self.repo;
+ let errors: Vec<_> = match hint {
+ Some(kind_hint) => match kind_hint {
+ ObjectKindHint::Treeish | ObjectKindHint::Committish => {
+ let kind = match kind_hint {
+ ObjectKindHint::Treeish => gix_object::Kind::Tree,
+ ObjectKindHint::Committish => gix_object::Kind::Commit,
+ _ => unreachable!("BUG: we narrow possibilities above"),
+ };
+ objs.iter()
+ .filter_map(|obj| peel(repo, obj, kind).err().map(|err| (*obj, err)))
+ .collect()
+ }
+ ObjectKindHint::Tree | ObjectKindHint::Commit | ObjectKindHint::Blob => {
+ let kind = match kind_hint {
+ ObjectKindHint::Tree => gix_object::Kind::Tree,
+ ObjectKindHint::Commit => gix_object::Kind::Commit,
+ ObjectKindHint::Blob => gix_object::Kind::Blob,
+ _ => unreachable!("BUG: we narrow possibilities above"),
+ };
+ objs.iter()
+ .filter_map(|obj| require_object_kind(repo, obj, kind).err().map(|err| (*obj, err)))
+ .collect()
+ }
+ },
+ None => return,
+ };
+
+ if errors.len() == objs.len() {
+ self.err.extend(errors.into_iter().map(|(_, err)| err));
+ } else {
+ for (obj, err) in errors {
+ objs.remove(&obj);
+ self.err.push(err);
+ }
+ }
+ }
+ }
+ }
+ fn follow_refs_to_objects_if_needed(&mut self) -> Option<()> {
+ assert_eq!(self.refs.len(), self.objs.len());
+ let repo = self.repo;
+ for (r, obj) in self.refs.iter().zip(self.objs.iter_mut()) {
+ if let (_ref_opt @ Some(ref_), obj_opt @ None) = (r, obj) {
+ if let Some(id) = ref_.target.try_id().map(ToOwned::to_owned).or_else(|| {
+ ref_.clone()
+ .attach(repo)
+ .peel_to_id_in_place()
+ .ok()
+ .map(|id| id.detach())
+ }) {
+ obj_opt.get_or_insert_with(HashSet::default).insert(id);
+ };
+ };
+ }
+ Some(())
+ }
+
+ fn unset_disambiguate_call(&mut self) {
+ self.last_call_was_disambiguate_prefix[self.idx] = false;
+ }
+}
+
+fn peel(repo: &Repository, obj: &gix_hash::oid, kind: gix_object::Kind) -> Result<ObjectId, Error> {
+ let mut obj = repo.find_object(obj)?;
+ obj = obj.peel_to_kind(kind)?;
+ debug_assert_eq!(obj.kind, kind, "bug in Object::peel_to_kind() which didn't deliver");
+ Ok(obj.id)
+}
+
+fn handle_errors_and_replacements(
+ destination: &mut Vec<Error>,
+ objs: &mut HashSet<ObjectId>,
+ errors: Vec<(ObjectId, Error)>,
+ replacements: &mut Replacements,
+) -> Option<()> {
+ if errors.len() == objs.len() {
+ destination.extend(errors.into_iter().map(|(_, err)| err));
+ None
+ } else {
+ for (obj, err) in errors {
+ objs.remove(&obj);
+ destination.push(err);
+ }
+ for (find, replace) in replacements {
+ objs.remove(find);
+ objs.insert(*replace);
+ }
+ Some(())
+ }
+}
+
+mod navigate;
+mod revision;
diff --git a/vendor/gix/src/revision/spec/parse/delegate/navigate.rs b/vendor/gix/src/revision/spec/parse/delegate/navigate.rs
new file mode 100644
index 000000000..882c2835c
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/delegate/navigate.rs
@@ -0,0 +1,340 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+use gix_revision::spec::parse::{
+ delegate,
+ delegate::{PeelTo, Traversal},
+};
+use gix_traverse::commit::Sorting;
+
+use crate::{
+ bstr::{BStr, ByteSlice},
+ ext::ObjectIdExt,
+ object,
+ revision::spec::parse::{
+ delegate::{handle_errors_and_replacements, peel, Replacements},
+ Delegate, Error,
+ },
+};
+
+impl<'repo> delegate::Navigate for Delegate<'repo> {
+ fn traverse(&mut self, kind: Traversal) -> Option<()> {
+ self.unset_disambiguate_call();
+ self.follow_refs_to_objects_if_needed()?;
+
+ let mut replacements = Replacements::default();
+ let mut errors = Vec::new();
+ let objs = self.objs[self.idx].as_mut()?;
+ let repo = self.repo;
+
+ for obj in objs.iter() {
+ match kind {
+ Traversal::NthParent(num) => {
+ match self.repo.find_object(*obj).map_err(Error::from).and_then(|obj| {
+ obj.try_into_commit().map_err(|err| {
+ let object::try_into::Error { actual, expected, id } = err;
+ Error::ObjectKind {
+ oid: id.attach(repo).shorten_or_id(),
+ actual,
+ expected,
+ }
+ })
+ }) {
+ Ok(commit) => match commit.parent_ids().nth(num.saturating_sub(1)) {
+ Some(id) => replacements.push((commit.id, id.detach())),
+ None => errors.push((
+ commit.id,
+ Error::ParentOutOfRange {
+ oid: commit.id().shorten_or_id(),
+ desired: num,
+ available: commit.parent_ids().count(),
+ },
+ )),
+ },
+ Err(err) => errors.push((*obj, err)),
+ }
+ }
+ Traversal::NthAncestor(num) => {
+ let id = obj.attach(repo);
+ match id
+ .ancestors()
+ .first_parent_only()
+ .all()
+ .expect("cannot fail without sorting")
+ .skip(num)
+ .filter_map(Result::ok)
+ .next()
+ {
+ Some(id) => replacements.push((*obj, id.detach())),
+ None => errors.push((
+ *obj,
+ Error::AncestorOutOfRange {
+ oid: id.shorten_or_id(),
+ desired: num,
+ available: id
+ .ancestors()
+ .first_parent_only()
+ .all()
+ .expect("cannot fail without sorting")
+ .skip(1)
+ .count(),
+ },
+ )),
+ }
+ }
+ }
+ }
+
+ handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements)
+ }
+
+ fn peel_until(&mut self, kind: PeelTo<'_>) -> Option<()> {
+ self.unset_disambiguate_call();
+ self.follow_refs_to_objects_if_needed()?;
+
+ let mut replacements = Replacements::default();
+ let mut errors = Vec::new();
+ let objs = self.objs[self.idx].as_mut()?;
+ let repo = self.repo;
+
+ match kind {
+ PeelTo::ValidObject => {
+ for obj in objs.iter() {
+ match repo.find_object(*obj) {
+ Ok(_) => {}
+ Err(err) => {
+ errors.push((*obj, err.into()));
+ }
+ };
+ }
+ }
+ PeelTo::ObjectKind(kind) => {
+ let peel = |obj| peel(repo, obj, kind);
+ for obj in objs.iter() {
+ match peel(obj) {
+ Ok(replace) => replacements.push((*obj, replace)),
+ Err(err) => errors.push((*obj, err)),
+ }
+ }
+ }
+ PeelTo::Path(path) => {
+ let lookup_path = |obj: &ObjectId| {
+ let tree_id = peel(repo, obj, gix_object::Kind::Tree)?;
+ if path.is_empty() {
+ return Ok(tree_id);
+ }
+ let tree = repo.find_object(tree_id)?.into_tree();
+ let entry =
+ tree.lookup_entry_by_path(gix_path::from_bstr(path))?
+ .ok_or_else(|| Error::PathNotFound {
+ path: path.into(),
+ object: obj.attach(repo).shorten_or_id(),
+ tree: tree_id.attach(repo).shorten_or_id(),
+ })?;
+ Ok(entry.object_id())
+ };
+ for obj in objs.iter() {
+ match lookup_path(obj) {
+ Ok(replace) => replacements.push((*obj, replace)),
+ Err(err) => errors.push((*obj, err)),
+ }
+ }
+ }
+ PeelTo::RecursiveTagObject => {
+ for oid in objs.iter() {
+ match oid.attach(repo).object().and_then(|obj| obj.peel_tags_to_end()) {
+ Ok(obj) => replacements.push((*oid, obj.id)),
+ Err(err) => errors.push((*oid, err.into())),
+ }
+ }
+ }
+ }
+
+ handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements)
+ }
+
+ fn find(&mut self, regex: &BStr, negated: bool) -> Option<()> {
+ self.unset_disambiguate_call();
+ self.follow_refs_to_objects_if_needed()?;
+
+ #[cfg(not(feature = "regex"))]
+ let matches = |message: &BStr| -> bool { message.contains_str(regex) ^ negated };
+ #[cfg(feature = "regex")]
+ let matches = match regex::bytes::Regex::new(regex.to_str_lossy().as_ref()) {
+ Ok(compiled) => {
+ let needs_regex = regex::escape(compiled.as_str()) != regex;
+ move |message: &BStr| -> bool {
+ if needs_regex {
+ compiled.is_match(message) ^ negated
+ } else {
+ message.contains_str(regex) ^ negated
+ }
+ }
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ return None;
+ }
+ };
+
+ match self.objs[self.idx].as_mut() {
+ Some(objs) => {
+ let repo = self.repo;
+ let mut errors = Vec::new();
+ let mut replacements = Replacements::default();
+ for oid in objs.iter() {
+ match oid
+ .attach(repo)
+ .ancestors()
+ .sorting(Sorting::ByCommitTimeNewestFirst)
+ .all()
+ {
+ Ok(iter) => {
+ let mut matched = false;
+ let mut count = 0;
+ let commits = iter.map(|res| {
+ res.map_err(Error::from).and_then(|commit_id| {
+ commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
+ })
+ });
+ for commit in commits {
+ count += 1;
+ match commit {
+ Ok(commit) => {
+ if matches(commit.message_raw_sloppy()) {
+ replacements.push((*oid, commit.id));
+ matched = true;
+ break;
+ }
+ }
+ Err(err) => errors.push((*oid, err)),
+ }
+ }
+ if !matched {
+ errors.push((
+ *oid,
+ Error::NoRegexMatch {
+ regex: regex.into(),
+ commits_searched: count,
+ oid: oid.attach(repo).shorten_or_id(),
+ },
+ ))
+ }
+ }
+ Err(err) => errors.push((*oid, err.into())),
+ }
+ }
+ handle_errors_and_replacements(&mut self.err, objs, errors, &mut replacements)
+ }
+ None => match self.repo.references() {
+ Ok(references) => match references.all() {
+ Ok(references) => {
+ match self
+ .repo
+ .rev_walk(
+ references
+ .peeled()
+ .filter_map(Result::ok)
+ .filter(|r| {
+ r.id()
+ .object()
+ .ok()
+ .map(|obj| obj.kind == gix_object::Kind::Commit)
+ .unwrap_or(false)
+ })
+ .filter_map(|r| r.detach().peeled),
+ )
+ .sorting(Sorting::ByCommitTimeNewestFirst)
+ .all()
+ {
+ Ok(iter) => {
+ let mut matched = false;
+ let mut count = 0;
+ let commits = iter.map(|res| {
+ res.map_err(Error::from).and_then(|commit_id| {
+ commit_id.object().map_err(Error::from).map(|obj| obj.into_commit())
+ })
+ });
+ for commit in commits {
+ count += 1;
+ match commit {
+ Ok(commit) => {
+ if matches(commit.message_raw_sloppy()) {
+ self.objs[self.idx]
+ .get_or_insert_with(HashSet::default)
+ .insert(commit.id);
+ matched = true;
+ break;
+ }
+ }
+ Err(err) => self.err.push(err),
+ }
+ }
+ if matched {
+ Some(())
+ } else {
+ self.err.push(Error::NoRegexMatchAllRefs {
+ regex: regex.into(),
+ commits_searched: count,
+ });
+ None
+ }
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ }
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ },
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ },
+ }
+ }
+
+ fn index_lookup(&mut self, path: &BStr, stage: u8) -> Option<()> {
+ self.unset_disambiguate_call();
+ match self.repo.index() {
+ Ok(index) => match index.entry_by_path_and_stage(path, stage.into()) {
+ Some(entry) => {
+ self.objs[self.idx]
+ .get_or_insert_with(HashSet::default)
+ .insert(entry.id);
+ Some(())
+ }
+ None => {
+ let stage_hint = [0, 1, 2]
+ .iter()
+ .filter(|our_stage| **our_stage != stage)
+ .find_map(|stage| {
+ index
+ .entry_index_by_path_and_stage(path, (*stage).into())
+ .map(|_| (*stage).into())
+ });
+ let exists = self
+ .repo
+ .work_dir()
+ .map_or(false, |root| root.join(gix_path::from_bstr(path)).exists());
+ self.err.push(Error::IndexLookup {
+ desired_path: path.into(),
+ desired_stage: stage.into(),
+ exists,
+ stage_hint,
+ });
+ None
+ }
+ },
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/revision/spec/parse/delegate/revision.rs b/vendor/gix/src/revision/spec/parse/delegate/revision.rs
new file mode 100644
index 000000000..7ea691a28
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/delegate/revision.rs
@@ -0,0 +1,225 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+use gix_revision::spec::parse::{
+ delegate,
+ delegate::{ReflogLookup, SiblingBranch},
+};
+
+use crate::{
+ bstr::{BStr, BString, ByteSlice},
+ ext::ReferenceExt,
+ revision::spec::parse::{Delegate, Error, RefsHint},
+};
+
+impl<'repo> delegate::Revision for Delegate<'repo> {
+ fn find_ref(&mut self, name: &BStr) -> Option<()> {
+ self.unset_disambiguate_call();
+ if !self.err.is_empty() && self.refs[self.idx].is_some() {
+ return None;
+ }
+ match self.repo.refs.find(name) {
+ Ok(r) => {
+ assert!(self.refs[self.idx].is_none(), "BUG: cannot set the same ref twice");
+ self.refs[self.idx] = Some(r);
+ Some(())
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ }
+ }
+
+ fn disambiguate_prefix(
+ &mut self,
+ prefix: gix_hash::Prefix,
+ _must_be_commit: Option<delegate::PrefixHint<'_>>,
+ ) -> Option<()> {
+ self.last_call_was_disambiguate_prefix[self.idx] = true;
+ let mut candidates = Some(HashSet::default());
+ self.prefix[self.idx] = Some(prefix);
+
+ let empty_tree_id = gix_hash::ObjectId::empty_tree(prefix.as_oid().kind());
+ let res = if prefix.as_oid() == empty_tree_id {
+ candidates.as_mut().expect("set").insert(empty_tree_id);
+ Ok(Some(Err(())))
+ } else {
+ self.repo.objects.lookup_prefix(prefix, candidates.as_mut())
+ };
+
+ match res {
+ Err(err) => {
+ self.err.push(err.into());
+ None
+ }
+ Ok(None) => {
+ self.err.push(Error::PrefixNotFound { prefix });
+ None
+ }
+ Ok(Some(Ok(_) | Err(()))) => {
+ assert!(self.objs[self.idx].is_none(), "BUG: cannot set the same prefix twice");
+ let candidates = candidates.expect("set above");
+ match self.opts.refs_hint {
+ RefsHint::PreferObjectOnFullLengthHexShaUseRefOtherwise
+ if prefix.hex_len() == candidates.iter().next().expect("at least one").kind().len_in_hex() =>
+ {
+ self.ambiguous_objects[self.idx] = Some(candidates.clone());
+ self.objs[self.idx] = Some(candidates);
+ Some(())
+ }
+ RefsHint::PreferObject => {
+ self.ambiguous_objects[self.idx] = Some(candidates.clone());
+ self.objs[self.idx] = Some(candidates);
+ Some(())
+ }
+ RefsHint::PreferRef | RefsHint::PreferObjectOnFullLengthHexShaUseRefOtherwise | RefsHint::Fail => {
+ match self.repo.refs.find(&prefix.to_string()) {
+ Ok(ref_) => {
+ assert!(self.refs[self.idx].is_none(), "BUG: cannot set the same ref twice");
+ if self.opts.refs_hint == RefsHint::Fail {
+ self.refs[self.idx] = Some(ref_.clone());
+ self.err.push(Error::AmbiguousRefAndObject {
+ prefix,
+ reference: ref_,
+ });
+ self.err.push(Error::ambiguous(candidates, prefix, self.repo));
+ None
+ } else {
+ self.refs[self.idx] = Some(ref_);
+ Some(())
+ }
+ }
+ Err(_) => {
+ self.ambiguous_objects[self.idx] = Some(candidates.clone());
+ self.objs[self.idx] = Some(candidates);
+ Some(())
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ fn reflog(&mut self, query: ReflogLookup) -> Option<()> {
+ self.unset_disambiguate_call();
+ match query {
+ ReflogLookup::Date(_date) => {
+ self.err.push(Error::Planned {
+ dependency: "remote handling and ref-specs are fleshed out more",
+ });
+ None
+ }
+ ReflogLookup::Entry(no) => {
+ let r = match &mut self.refs[self.idx] {
+ Some(r) => r.clone().attach(self.repo),
+ val @ None => match self.repo.head().map(|head| head.try_into_referent()) {
+ Ok(Some(r)) => {
+ *val = Some(r.clone().detach());
+ r
+ }
+ Ok(None) => {
+ self.err.push(Error::UnbornHeadsHaveNoRefLog);
+ return None;
+ }
+ Err(err) => {
+ self.err.push(err.into());
+ return None;
+ }
+ },
+ };
+ let mut platform = r.log_iter();
+ match platform.rev().ok().flatten() {
+ Some(mut it) => match it.nth(no).and_then(Result::ok) {
+ Some(line) => {
+ self.objs[self.idx]
+ .get_or_insert_with(HashSet::default)
+ .insert(line.new_oid);
+ Some(())
+ }
+ None => {
+ let available = platform.rev().ok().flatten().map_or(0, |it| it.count());
+ self.err.push(Error::RefLogEntryOutOfRange {
+ reference: r.detach(),
+ desired: no,
+ available,
+ });
+ None
+ }
+ },
+ None => {
+ self.err.push(Error::MissingRefLog {
+ reference: r.name().as_bstr().into(),
+ action: "lookup entry",
+ });
+ None
+ }
+ }
+ }
+ }
+ }
+
+ fn nth_checked_out_branch(&mut self, branch_no: usize) -> Option<()> {
+ self.unset_disambiguate_call();
+ fn prior_checkouts_iter<'a>(
+ platform: &'a mut gix_ref::file::log::iter::Platform<'static, '_>,
+ ) -> Result<impl Iterator<Item = (BString, ObjectId)> + 'a, Error> {
+ match platform.rev().ok().flatten() {
+ Some(log) => Ok(log.filter_map(Result::ok).filter_map(|line| {
+ line.message
+ .strip_prefix(b"checkout: moving from ")
+ .and_then(|from_to| from_to.find(" to ").map(|pos| &from_to[..pos]))
+ .map(|from_branch| (from_branch.into(), line.previous_oid))
+ })),
+ None => Err(Error::MissingRefLog {
+ reference: "HEAD".into(),
+ action: "search prior checked out branch",
+ }),
+ }
+ }
+
+ let head = match self.repo.head() {
+ Ok(head) => head,
+ Err(err) => {
+ self.err.push(err.into());
+ return None;
+ }
+ };
+ match prior_checkouts_iter(&mut head.log_iter()).map(|mut it| it.nth(branch_no.saturating_sub(1))) {
+ Ok(Some((ref_name, id))) => {
+ let id = match self.repo.find_reference(ref_name.as_bstr()) {
+ Ok(mut r) => {
+ let id = r.peel_to_id_in_place().map(|id| id.detach()).unwrap_or(id);
+ self.refs[self.idx] = Some(r.detach());
+ id
+ }
+ Err(_) => id,
+ };
+ self.objs[self.idx].get_or_insert_with(HashSet::default).insert(id);
+ Some(())
+ }
+ Ok(None) => {
+ self.err.push(Error::PriorCheckoutOutOfRange {
+ desired: branch_no,
+ available: prior_checkouts_iter(&mut head.log_iter())
+ .map(|it| it.count())
+ .unwrap_or(0),
+ });
+ None
+ }
+ Err(err) => {
+ self.err.push(err);
+ None
+ }
+ }
+ }
+
+ fn sibling_branch(&mut self, _kind: SiblingBranch) -> Option<()> {
+ self.unset_disambiguate_call();
+ self.err.push(Error::Planned {
+ dependency: "remote handling and ref-specs are fleshed out more",
+ });
+ None
+ }
+}
diff --git a/vendor/gix/src/revision/spec/parse/error.rs b/vendor/gix/src/revision/spec/parse/error.rs
new file mode 100644
index 000000000..3af4697b0
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/error.rs
@@ -0,0 +1,130 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+
+use super::Error;
+use crate::{bstr, bstr::BString, ext::ObjectIdExt, Repository};
+
+/// Additional information about candidates that caused ambiguity.
+#[derive(Debug)]
+pub enum CandidateInfo {
+ /// An error occurred when looking up the object in the database.
+ FindError {
+ /// The reported error.
+ source: crate::object::find::existing::Error,
+ },
+ /// The candidate is an object of the given `kind`.
+ Object {
+ /// The kind of the object.
+ kind: gix_object::Kind,
+ },
+ /// The candidate is a tag.
+ Tag {
+ /// The name of the tag.
+ name: BString,
+ },
+ /// The candidate is a commit.
+ Commit {
+ /// The date of the commit.
+ date: gix_date::Time,
+ /// The subject line.
+ title: BString,
+ },
+}
+
+impl std::fmt::Display for CandidateInfo {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ CandidateInfo::FindError { source } => write!(f, "lookup error: {source}"),
+ CandidateInfo::Tag { name } => write!(f, "tag {name:?}"),
+ CandidateInfo::Object { kind } => std::fmt::Display::fmt(kind, f),
+ CandidateInfo::Commit { date, title } => {
+ write!(f, "commit {} {title:?}", date.format(gix_date::time::format::SHORT))
+ }
+ }
+ }
+}
+
+impl Error {
+ pub(crate) fn ambiguous(candidates: HashSet<ObjectId>, prefix: gix_hash::Prefix, repo: &Repository) -> Self {
+ #[derive(PartialOrd, Ord, Eq, PartialEq, Copy, Clone)]
+ enum Order {
+ Tag,
+ Commit,
+ Tree,
+ Blob,
+ Invalid,
+ }
+ let candidates = {
+ let mut c: Vec<_> = candidates
+ .into_iter()
+ .map(|oid| {
+ let obj = repo.find_object(oid);
+ let order = match &obj {
+ Err(_) => Order::Invalid,
+ Ok(obj) => match obj.kind {
+ gix_object::Kind::Tag => Order::Tag,
+ gix_object::Kind::Commit => Order::Commit,
+ gix_object::Kind::Tree => Order::Tree,
+ gix_object::Kind::Blob => Order::Blob,
+ },
+ };
+ (oid, obj, order)
+ })
+ .collect();
+ c.sort_by(|lhs, rhs| lhs.2.cmp(&rhs.2).then_with(|| lhs.0.cmp(&rhs.0)));
+ c
+ };
+ Error::AmbiguousPrefix {
+ prefix,
+ info: candidates
+ .into_iter()
+ .map(|(oid, find_result, _)| {
+ let info = match find_result {
+ Ok(obj) => match obj.kind {
+ gix_object::Kind::Tree | gix_object::Kind::Blob => CandidateInfo::Object { kind: obj.kind },
+ gix_object::Kind::Tag => {
+ let tag = obj.to_tag_ref();
+ CandidateInfo::Tag { name: tag.name.into() }
+ }
+ gix_object::Kind::Commit => {
+ use bstr::ByteSlice;
+ let commit = obj.to_commit_ref();
+ CandidateInfo::Commit {
+ date: commit.committer().time,
+ title: commit.message().title.trim().into(),
+ }
+ }
+ },
+ Err(err) => CandidateInfo::FindError { source: err },
+ };
+ (oid.attach(repo).shorten().unwrap_or_else(|_| oid.into()), info)
+ })
+ .collect(),
+ }
+ }
+
+ pub(crate) fn from_errors(errors: Vec<Self>) -> Self {
+ assert!(!errors.is_empty());
+ match errors.len() {
+ 0 => unreachable!(
+ "BUG: cannot create something from nothing, must have recorded some errors to call from_errors()"
+ ),
+ 1 => errors.into_iter().next().expect("one"),
+ _ => {
+ let mut it = errors.into_iter().rev();
+ let mut recent = Error::Multi {
+ current: Box::new(it.next().expect("at least one error")),
+ next: None,
+ };
+ for err in it {
+ recent = Error::Multi {
+ current: Box::new(err),
+ next: Some(Box::new(recent)),
+ }
+ }
+ recent
+ }
+ }
+ }
+}
diff --git a/vendor/gix/src/revision/spec/parse/mod.rs b/vendor/gix/src/revision/spec/parse/mod.rs
new file mode 100644
index 000000000..f69ecc4af
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/mod.rs
@@ -0,0 +1,61 @@
+use std::collections::HashSet;
+
+use gix_hash::ObjectId;
+use gix_revision::spec::parse;
+
+use crate::{bstr::BStr, revision::Spec, Repository};
+
+mod types;
+pub use types::{Error, ObjectKindHint, Options, RefsHint};
+
+///
+pub mod single {
+ use crate::bstr::BString;
+
+ /// The error returned by [`crate::Repository::rev_parse_single()`].
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error(transparent)]
+ Parse(#[from] super::Error),
+ #[error("revspec {spec:?} did not resolve to a single object")]
+ RangedRev { spec: BString },
+ }
+}
+
+///
+pub mod error;
+
+impl<'repo> Spec<'repo> {
+ /// Parse `spec` and use information from `repo` to resolve it, using `opts` to learn how to deal with ambiguity.
+ ///
+ /// Note that it's easier and to use [`repo.rev_parse()`][Repository::rev_parse()] instead.
+ pub fn from_bstr<'a>(spec: impl Into<&'a BStr>, repo: &'repo Repository, opts: Options) -> Result<Self, Error> {
+ let mut delegate = Delegate::new(repo, opts);
+ match gix_revision::spec::parse(spec.into(), &mut delegate) {
+ Err(parse::Error::Delegate) => Err(delegate.into_err()),
+ Err(err) => Err(err.into()),
+ Ok(()) => delegate.into_rev_spec(),
+ }
+ }
+}
+
+struct Delegate<'repo> {
+ refs: [Option<gix_ref::Reference>; 2],
+ objs: [Option<HashSet<ObjectId>>; 2],
+ /// The originally encountered ambiguous objects for potential later use in errors.
+ ambiguous_objects: [Option<HashSet<ObjectId>>; 2],
+ idx: usize,
+ kind: Option<gix_revision::spec::Kind>,
+
+ opts: Options,
+ err: Vec<Error>,
+ /// The ambiguous prefix obtained during a call to `disambiguate_prefix()`.
+ prefix: [Option<gix_hash::Prefix>; 2],
+ /// If true, we didn't try to do any other transformation which might have helped with disambiguation.
+ last_call_was_disambiguate_prefix: [bool; 2],
+
+ repo: &'repo Repository,
+}
+
+mod delegate;
diff --git a/vendor/gix/src/revision/spec/parse/types.rs b/vendor/gix/src/revision/spec/parse/types.rs
new file mode 100644
index 000000000..4e523ab14
--- /dev/null
+++ b/vendor/gix/src/revision/spec/parse/types.rs
@@ -0,0 +1,182 @@
+use crate::{bstr::BString, object, reference};
+
+/// A hint to know what to do if refs and object names are equal.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum RefsHint {
+ /// This is the default, and leads to specs that look like objects identified by full hex sha and are objects to be used
+ /// instead of similarly named references. The latter is not typical but can absolutely happen by accident.
+ /// If the object prefix is shorter than the maximum hash length of the repository, use the reference instead, which is
+ /// preferred as there are many valid object names like `beef` and `cafe` that are short and both valid and typical prefixes
+ /// for objects.
+ /// Git chooses this as default as well, even though it means that every object prefix is also looked up as ref.
+ PreferObjectOnFullLengthHexShaUseRefOtherwise,
+ /// No matter what, if it looks like an object prefix and has an object, use it.
+ /// Note that no ref-lookup is made here which is the fastest option.
+ PreferObject,
+ /// When an object is found for a given prefix, also check if a reference exists with that name and if it does,
+ /// use that moving forward.
+ PreferRef,
+ /// If there is an ambiguous situation, instead of silently choosing one over the other, fail instead.
+ Fail,
+}
+
+/// A hint to know which object kind to prefer if multiple objects match a prefix.
+///
+/// This disambiguation mechanism is applied only if there is no disambiguation hints in the spec itself.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum ObjectKindHint {
+ /// Pick objects that are commits themselves.
+ Commit,
+ /// Pick objects that can be peeled into a commit, i.e. commits themselves or tags which are peeled until a commit is found.
+ Committish,
+ /// Pick objects that are trees themselves.
+ Tree,
+ /// Pick objects that can be peeled into a tree, i.e. trees themselves or tags which are peeled until a tree is found or commits
+ /// whose tree is chosen.
+ Treeish,
+ /// Pick objects that are blobs.
+ Blob,
+}
+
+impl Default for RefsHint {
+ fn default() -> Self {
+ RefsHint::PreferObjectOnFullLengthHexShaUseRefOtherwise
+ }
+}
+
+/// Options for use in [`revision::Spec::from_bstr()`][crate::revision::Spec::from_bstr()].
+#[derive(Debug, Default, Copy, Clone)]
+pub struct Options {
+ /// What to do if both refs and object names match the same input.
+ pub refs_hint: RefsHint,
+ /// The hint to use when encountering multiple object matching a prefix.
+ ///
+ /// If `None`, the rev-spec itself must disambiguate the object by drilling down to desired kinds or applying
+ /// other disambiguating transformations.
+ pub object_kind_hint: Option<ObjectKindHint>,
+}
+
+/// The error returned by [`crate::Repository::rev_parse()`].
+#[derive(Debug, thiserror::Error)]
+#[allow(missing_docs)]
+pub enum Error {
+ #[error("The rev-spec is malformed and misses a ref name")]
+ Malformed,
+ #[error("Unborn heads do not have a reflog yet")]
+ UnbornHeadsHaveNoRefLog,
+ #[error("This feature will be implemented once {dependency}")]
+ Planned { dependency: &'static str },
+ #[error("Reference {reference:?} does not have a reference log, cannot {action}")]
+ MissingRefLog { reference: BString, action: &'static str },
+ #[error("HEAD has {available} prior checkouts and checkout number {desired} is out of range")]
+ PriorCheckoutOutOfRange { desired: usize, available: usize },
+ #[error("Reference {:?} has {available} ref-log entries and entry number {desired} is out of range", reference.name.as_bstr())]
+ RefLogEntryOutOfRange {
+ reference: gix_ref::Reference,
+ desired: usize,
+ available: usize,
+ },
+ #[error(
+ "Commit {oid} has {available} ancestors along the first parent and ancestor number {desired} is out of range"
+ )]
+ AncestorOutOfRange {
+ oid: gix_hash::Prefix,
+ desired: usize,
+ available: usize,
+ },
+ #[error("Commit {oid} has {available} parents and parent number {desired} is out of range")]
+ ParentOutOfRange {
+ oid: gix_hash::Prefix,
+ desired: usize,
+ available: usize,
+ },
+ #[error("Path {desired_path:?} did not exist in index at stage {desired_stage}{}{}", stage_hint.map(|actual|format!(". It does exist at stage {actual}")).unwrap_or_default(), exists.then(|| ". It exists on disk").unwrap_or(". It does not exist on disk"))]
+ IndexLookup {
+ desired_path: BString,
+ desired_stage: gix_index::entry::Stage,
+ stage_hint: Option<gix_index::entry::Stage>,
+ exists: bool,
+ },
+ #[error(transparent)]
+ FindHead(#[from] reference::find::existing::Error),
+ #[error(transparent)]
+ Index(#[from] crate::worktree::open_index::Error),
+ #[error(transparent)]
+ RevWalkIterInit(#[from] crate::reference::iter::init::Error),
+ #[error(transparent)]
+ RevWalkAllReferences(#[from] gix_ref::packed::buffer::open::Error),
+ #[cfg(feature = "regex")]
+ #[error(transparent)]
+ InvalidRegex(#[from] regex::Error),
+ #[cfg_attr(
+ feature = "regex",
+ error("None of {commits_searched} commits from {oid} matched regex {regex:?}")
+ )]
+ #[cfg_attr(
+ not(feature = "regex"),
+ error("None of {commits_searched} commits from {oid} matched text {regex:?}")
+ )]
+ NoRegexMatch {
+ regex: BString,
+ oid: gix_hash::Prefix,
+ commits_searched: usize,
+ },
+ #[cfg_attr(
+ feature = "regex",
+ error("None of {commits_searched} commits reached from all references matched regex {regex:?}")
+ )]
+ #[cfg_attr(
+ not(feature = "regex"),
+ error("None of {commits_searched} commits reached from all references matched text {regex:?}")
+ )]
+ NoRegexMatchAllRefs { regex: BString, commits_searched: usize },
+ #[error(
+ "The short hash {prefix} matched both the reference {} and at least one object", reference.name)]
+ AmbiguousRefAndObject {
+ /// The prefix to look for.
+ prefix: gix_hash::Prefix,
+ /// The reference matching the prefix.
+ reference: gix_ref::Reference,
+ },
+ #[error(transparent)]
+ IdFromHex(#[from] gix_hash::decode::Error),
+ #[error(transparent)]
+ FindReference(#[from] gix_ref::file::find::existing::Error),
+ #[error(transparent)]
+ FindObject(#[from] object::find::existing::Error),
+ #[error(transparent)]
+ LookupPrefix(#[from] gix_odb::store::prefix::lookup::Error),
+ #[error(transparent)]
+ PeelToKind(#[from] object::peel::to_kind::Error),
+ #[error("Object {oid} was a {actual}, but needed it to be a {expected}")]
+ ObjectKind {
+ oid: gix_hash::Prefix,
+ actual: gix_object::Kind,
+ expected: gix_object::Kind,
+ },
+ #[error(transparent)]
+ Parse(#[from] gix_revision::spec::parse::Error),
+ #[error("An object prefixed {prefix} could not be found")]
+ PrefixNotFound { prefix: gix_hash::Prefix },
+ #[error("Short id {prefix} is ambiguous. Candidates are:\n{}", info.iter().map(|(oid, info)| format!("\t{oid} {info}")).collect::<Vec<_>>().join("\n"))]
+ AmbiguousPrefix {
+ prefix: gix_hash::Prefix,
+ info: Vec<(gix_hash::Prefix, super::error::CandidateInfo)>,
+ },
+ #[error("Could not find path {path:?} in tree {tree} of parent object {object}")]
+ PathNotFound {
+ object: gix_hash::Prefix,
+ tree: gix_hash::Prefix,
+ path: BString,
+ },
+ #[error("{current}")]
+ Multi {
+ current: Box<dyn std::error::Error + Send + Sync + 'static>,
+ #[source]
+ next: Option<Box<dyn std::error::Error + Send + Sync + 'static>>,
+ },
+ #[error(transparent)]
+ Traverse(#[from] gix_traverse::commit::ancestors::Error),
+ #[error("Spec does not contain a single object id")]
+ SingleNotFound,
+}