summaryrefslogtreecommitdiffstats
path: root/vendor/gix-odb/src
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/gix-odb/src')
-rw-r--r--vendor/gix-odb/src/alternate/mod.rs15
-rw-r--r--vendor/gix-odb/src/cache.rs38
-rw-r--r--vendor/gix-odb/src/find.rs14
-rw-r--r--vendor/gix-odb/src/lib.rs15
-rw-r--r--vendor/gix-odb/src/sink.rs14
-rw-r--r--vendor/gix-odb/src/store_impls/dynamic/find.rs54
-rw-r--r--vendor/gix-odb/src/store_impls/dynamic/handle.rs7
-rw-r--r--vendor/gix-odb/src/store_impls/dynamic/header.rs18
-rw-r--r--vendor/gix-odb/src/store_impls/dynamic/init.rs10
-rw-r--r--vendor/gix-odb/src/store_impls/dynamic/iter.rs2
-rw-r--r--vendor/gix-odb/src/store_impls/dynamic/load_index.rs10
-rw-r--r--vendor/gix-odb/src/store_impls/dynamic/mod.rs2
-rw-r--r--vendor/gix-odb/src/store_impls/dynamic/prefix.rs5
-rw-r--r--vendor/gix-odb/src/store_impls/dynamic/structure.rs1
-rw-r--r--vendor/gix-odb/src/store_impls/dynamic/types.rs2
-rw-r--r--vendor/gix-odb/src/store_impls/dynamic/verify.rs225
-rw-r--r--vendor/gix-odb/src/store_impls/dynamic/write.rs7
-rw-r--r--vendor/gix-odb/src/store_impls/loose/find.rs18
-rw-r--r--vendor/gix-odb/src/store_impls/loose/verify.rs8
-rw-r--r--vendor/gix-odb/src/store_impls/loose/write.rs36
-rw-r--r--vendor/gix-odb/src/traits.rs104
21 files changed, 291 insertions, 314 deletions
diff --git a/vendor/gix-odb/src/alternate/mod.rs b/vendor/gix-odb/src/alternate/mod.rs
index c343ef5aa..c4e9fc8c0 100644
--- a/vendor/gix-odb/src/alternate/mod.rs
+++ b/vendor/gix-odb/src/alternate/mod.rs
@@ -42,21 +42,16 @@ pub enum Error {
/// If no alternate object database was resolved, the resulting `Vec` is empty (it is not an error
/// if there are no alternates).
/// It is an error once a repository is seen again as it would lead to a cycle.
-pub fn resolve(
- objects_directory: impl Into<PathBuf>,
- current_dir: impl AsRef<std::path::Path>,
-) -> Result<Vec<PathBuf>, Error> {
- let relative_base = objects_directory.into();
- let mut dirs = vec![(0, relative_base.clone())];
+pub fn resolve(objects_directory: PathBuf, current_dir: &std::path::Path) -> Result<Vec<PathBuf>, Error> {
+ let mut dirs = vec![(0, objects_directory.clone())];
let mut out = Vec::new();
- let cwd = current_dir.as_ref();
- let mut seen = vec![gix_path::realpath_opts(&relative_base, cwd, MAX_SYMLINKS)?];
+ let mut seen = vec![gix_path::realpath_opts(&objects_directory, current_dir, MAX_SYMLINKS)?];
while let Some((depth, dir)) = dirs.pop() {
match fs::read(dir.join("info").join("alternates")) {
Ok(input) => {
for path in parse::content(&input)?.into_iter() {
- let path = relative_base.join(path);
- let path_canonicalized = gix_path::realpath_opts(&path, cwd, MAX_SYMLINKS)?;
+ let path = objects_directory.join(path);
+ let path_canonicalized = gix_path::realpath_opts(&path, current_dir, MAX_SYMLINKS)?;
if seen.contains(&path_canonicalized) {
return Err(Error::Cycle(seen));
}
diff --git a/vendor/gix-odb/src/cache.rs b/vendor/gix-odb/src/cache.rs
index 8e108646f..87c8da4ed 100644
--- a/vendor/gix-odb/src/cache.rs
+++ b/vendor/gix-odb/src/cache.rs
@@ -133,7 +133,7 @@ impl<S> DerefMut for Cache<S> {
}
mod impls {
- use std::{io::Read, ops::DerefMut};
+ use std::{cell::RefCell, io::Read, ops::DerefMut};
use gix_hash::{oid, ObjectId};
use gix_object::{Data, Kind};
@@ -145,9 +145,7 @@ mod impls {
where
S: crate::Write,
{
- type Error = S::Error;
-
- fn write_stream(&self, kind: Kind, size: u64, from: impl Read) -> Result<ObjectId, Self::Error> {
+ fn write_stream(&self, kind: Kind, size: u64, from: &mut dyn Read) -> Result<ObjectId, crate::write::Error> {
self.inner.write_stream(kind, size, from)
}
}
@@ -156,13 +154,11 @@ mod impls {
where
S: gix_pack::Find,
{
- type Error = S::Error;
-
- fn contains(&self, id: impl AsRef<oid>) -> bool {
+ fn contains(&self, id: &oid) -> bool {
self.inner.contains(id)
}
- fn try_find<'a>(&self, id: impl AsRef<oid>, buffer: &'a mut Vec<u8>) -> Result<Option<Data<'a>>, Self::Error> {
+ fn try_find<'a>(&self, id: &oid, buffer: &'a mut Vec<u8>) -> Result<Option<Data<'a>>, crate::find::Error> {
gix_pack::Find::try_find(self, id, buffer).map(|t| t.map(|t| t.0))
}
}
@@ -171,9 +167,7 @@ mod impls {
where
S: crate::Header,
{
- type Error = S::Error;
-
- fn try_header(&self, id: impl AsRef<oid>) -> Result<Option<Header>, Self::Error> {
+ fn try_header(&self, id: &oid) -> Result<Option<Header>, crate::find::Error> {
self.inner.try_header(id)
}
}
@@ -182,18 +176,16 @@ mod impls {
where
S: gix_pack::Find,
{
- type Error = S::Error;
-
- fn contains(&self, id: impl AsRef<oid>) -> bool {
+ fn contains(&self, id: &oid) -> bool {
self.inner.contains(id)
}
fn try_find<'a>(
&self,
- id: impl AsRef<oid>,
+ id: &oid,
buffer: &'a mut Vec<u8>,
- ) -> Result<Option<(Data<'a>, Option<Location>)>, Self::Error> {
- match self.pack_cache.as_ref().map(|rc| rc.borrow_mut()) {
+ ) -> Result<Option<(Data<'a>, Option<Location>)>, crate::find::Error> {
+ match self.pack_cache.as_ref().map(RefCell::borrow_mut) {
Some(mut pack_cache) => self.try_find_cached(id, buffer, pack_cache.deref_mut()),
None => self.try_find_cached(id, buffer, &mut gix_pack::cache::Never),
}
@@ -201,25 +193,25 @@ mod impls {
fn try_find_cached<'a>(
&self,
- id: impl AsRef<oid>,
+ id: &oid,
buffer: &'a mut Vec<u8>,
- pack_cache: &mut impl gix_pack::cache::DecodeEntry,
- ) -> Result<Option<(Data<'a>, Option<gix_pack::data::entry::Location>)>, Self::Error> {
- if let Some(mut obj_cache) = self.object_cache.as_ref().map(|rc| rc.borrow_mut()) {
+ pack_cache: &mut dyn gix_pack::cache::DecodeEntry,
+ ) -> Result<Option<(Data<'a>, Option<gix_pack::data::entry::Location>)>, crate::find::Error> {
+ if let Some(mut obj_cache) = self.object_cache.as_ref().map(RefCell::borrow_mut) {
if let Some(kind) = obj_cache.get(&id.as_ref().to_owned(), buffer) {
return Ok(Some((Data::new(kind, buffer), None)));
}
}
let possibly_obj = self.inner.try_find_cached(id.as_ref(), buffer, pack_cache)?;
if let (Some(mut obj_cache), Some((obj, _location))) =
- (self.object_cache.as_ref().map(|rc| rc.borrow_mut()), &possibly_obj)
+ (self.object_cache.as_ref().map(RefCell::borrow_mut), &possibly_obj)
{
obj_cache.put(id.as_ref().to_owned(), obj.kind, obj.data);
}
Ok(possibly_obj)
}
- fn location_by_oid(&self, id: impl AsRef<oid>, buf: &mut Vec<u8>) -> Option<gix_pack::data::entry::Location> {
+ fn location_by_oid(&self, id: &oid, buf: &mut Vec<u8>) -> Option<gix_pack::data::entry::Location> {
self.inner.location_by_oid(id, buf)
}
diff --git a/vendor/gix-odb/src/find.rs b/vendor/gix-odb/src/find.rs
index 69eccbf04..bf807e27c 100644
--- a/vendor/gix-odb/src/find.rs
+++ b/vendor/gix-odb/src/find.rs
@@ -1,3 +1,5 @@
+/// The error type returned by the [`Find`](crate::Find) and [`Header`](crate::Header) traits.
+pub type Error = Box<dyn std::error::Error + Send + Sync + 'static>;
///
pub mod existing {
use gix_hash::ObjectId;
@@ -5,9 +7,9 @@ pub mod existing {
/// The error returned by the [`find(…)`][crate::FindExt::find()] trait methods.
#[derive(Debug, thiserror::Error)]
#[allow(missing_docs)]
- pub enum Error<T: std::error::Error + 'static> {
+ pub enum Error {
#[error(transparent)]
- Find(T),
+ Find(crate::find::Error),
#[error("An object with id {} could not be found", .oid)]
NotFound { oid: ObjectId },
}
@@ -20,9 +22,9 @@ pub mod existing_object {
/// The error returned by the various [`find_*()`][crate::FindExt::find_commit()] trait methods.
#[derive(Debug, thiserror::Error)]
#[allow(missing_docs)]
- pub enum Error<T: std::error::Error + 'static> {
+ pub enum Error {
#[error(transparent)]
- Find(T),
+ Find(crate::find::Error),
#[error(transparent)]
Decode(gix_object::decode::Error),
#[error("An object with id {oid} could not be found")]
@@ -39,9 +41,9 @@ pub mod existing_iter {
/// The error returned by the various [`find_*_iter()`][crate::FindExt::find_commit_iter()] trait methods.
#[derive(Debug, thiserror::Error)]
#[allow(missing_docs)]
- pub enum Error<T: std::error::Error + 'static> {
+ pub enum Error {
#[error(transparent)]
- Find(T),
+ Find(crate::find::Error),
#[error("An object with id {oid} could not be found")]
NotFound { oid: ObjectId },
#[error("Expected object of kind {expected}")]
diff --git a/vendor/gix-odb/src/lib.rs b/vendor/gix-odb/src/lib.rs
index e0beac548..5aee76ca2 100644
--- a/vendor/gix-odb/src/lib.rs
+++ b/vendor/gix-odb/src/lib.rs
@@ -76,6 +76,12 @@ mod traits;
pub use traits::{Find, FindExt, Header, HeaderExt, Write};
+///
+pub mod write {
+ /// The error type returned by the [`Write`](crate::Write) trait.
+ pub type Error = Box<dyn std::error::Error + Send + Sync + 'static>;
+}
+
/// A thread-local handle to access any object.
pub type Handle = Cache<store::Handle<OwnShared<Store>>>;
/// A thread-local handle to access any object, but thread-safe and independent of the actual type of `OwnShared` or feature toggles in `gix-features`.
@@ -142,11 +148,16 @@ pub fn at_opts(
replacements: impl IntoIterator<Item = (gix_hash::ObjectId, gix_hash::ObjectId)>,
options: store::init::Options,
) -> std::io::Result<Handle> {
- let handle = OwnShared::new(Store::at_opts(objects_dir, replacements, options)?).to_handle();
+ let handle = OwnShared::new(Store::at_opts(
+ objects_dir.into(),
+ &mut replacements.into_iter(),
+ options,
+ )?)
+ .to_handle();
Ok(Cache::from(handle))
}
/// Create a new cached handle to the object store.
pub fn at(objects_dir: impl Into<PathBuf>) -> std::io::Result<Handle> {
- at_opts(objects_dir, Vec::new().into_iter(), Default::default())
+ at_opts(objects_dir, Vec::new(), Default::default())
}
diff --git a/vendor/gix-odb/src/sink.rs b/vendor/gix-odb/src/sink.rs
index 44a406151..f6334a51c 100644
--- a/vendor/gix-odb/src/sink.rs
+++ b/vendor/gix-odb/src/sink.rs
@@ -21,14 +21,12 @@ impl Sink {
}
impl crate::traits::Write for Sink {
- type Error = io::Error;
-
fn write_stream(
&self,
kind: gix_object::Kind,
size: u64,
- mut from: impl io::Read,
- ) -> Result<gix_hash::ObjectId, Self::Error> {
+ from: &mut dyn io::Read,
+ ) -> Result<gix_hash::ObjectId, crate::write::Error> {
let mut size = size.try_into().expect("object size to fit into usize");
let mut buf = [0u8; 8096];
let header = gix_object::encode::loose_header(kind, size);
@@ -42,18 +40,18 @@ impl crate::traits::Write for Sink {
let mut hasher = gix_features::hash::hasher(self.object_hash);
hasher.update(&header);
- possibly_compress(&header)?;
+ possibly_compress(&header).map_err(Box::new)?;
while size != 0 {
let bytes = size.min(buf.len());
- from.read_exact(&mut buf[..bytes])?;
+ from.read_exact(&mut buf[..bytes]).map_err(Box::new)?;
hasher.update(&buf[..bytes]);
- possibly_compress(&buf[..bytes])?;
+ possibly_compress(&buf[..bytes]).map_err(Box::new)?;
size -= bytes;
}
if let Some(compressor) = self.compressor.as_ref() {
let mut c = compressor.borrow_mut();
- c.flush()?;
+ c.flush().map_err(Box::new)?;
c.reset();
}
diff --git a/vendor/gix-odb/src/store_impls/dynamic/find.rs b/vendor/gix-odb/src/store_impls/dynamic/find.rs
index 88f95db51..b4cd25aeb 100644
--- a/vendor/gix-odb/src/store_impls/dynamic/find.rs
+++ b/vendor/gix-odb/src/store_impls/dynamic/find.rs
@@ -75,6 +75,7 @@ pub(crate) mod error {
}
}
pub use error::Error;
+use gix_features::zlib;
use crate::{store::types::PackId, Find};
@@ -86,7 +87,8 @@ where
&'b self,
mut id: &'b gix_hash::oid,
buffer: &'a mut Vec<u8>,
- pack_cache: &mut impl DecodeEntry,
+ inflate: &mut zlib::Inflate,
+ pack_cache: &mut dyn DecodeEntry,
snapshot: &mut load_index::Snapshot,
recursion: Option<error::DeltaBaseRecursion<'_>>,
) -> Result<Option<(gix_object::Data<'a>, Option<gix_pack::data::entry::Location>)>, Error> {
@@ -147,7 +149,8 @@ where
let res = match pack.decode_entry(
entry,
buffer,
- |id, _out| {
+ inflate,
+ &|id, _out| {
index_file.pack_offset_by_id(id).map(|pack_offset| {
gix_pack::data::decode::entry::ResolvedBase::InPack(pack.entry(pack_offset))
})
@@ -182,10 +185,11 @@ where
.try_find_cached_inner(
&base_id,
&mut buf,
+ inflate,
pack_cache,
snapshot,
recursion
- .map(|r| r.inc_depth())
+ .map(error::DeltaBaseRecursion::inc_depth)
.or_else(|| error::DeltaBaseRecursion::new(id).into()),
)
.map_err(|err| Error::DeltaBaseLookup {
@@ -231,7 +235,8 @@ where
pack.decode_entry(
entry,
buffer,
- |id, out| {
+ inflate,
+ &|id, out| {
index_file
.pack_offset_by_id(id)
.map(|pack_offset| {
@@ -306,11 +311,8 @@ impl<S> gix_pack::Find for super::Handle<S>
where
S: Deref<Target = super::Store> + Clone,
{
- type Error = Error;
-
// TODO: probably make this method fallible, but that would mean its own error type.
- fn contains(&self, id: impl AsRef<gix_hash::oid>) -> bool {
- let id = id.as_ref();
+ fn contains(&self, id: &gix_hash::oid) -> bool {
let mut snapshot = self.snapshot.borrow_mut();
loop {
for (idx, index) in snapshot.indices.iter().enumerate() {
@@ -341,20 +343,17 @@ where
fn try_find_cached<'a>(
&self,
- id: impl AsRef<gix_hash::oid>,
+ id: &gix_hash::oid,
buffer: &'a mut Vec<u8>,
- pack_cache: &mut impl DecodeEntry,
- ) -> Result<Option<(gix_object::Data<'a>, Option<gix_pack::data::entry::Location>)>, Self::Error> {
- let id = id.as_ref();
+ pack_cache: &mut dyn DecodeEntry,
+ ) -> Result<Option<(gix_object::Data<'a>, Option<gix_pack::data::entry::Location>)>, gix_pack::find::Error> {
let mut snapshot = self.snapshot.borrow_mut();
- self.try_find_cached_inner(id, buffer, pack_cache, &mut snapshot, None)
+ let mut inflate = self.inflate.borrow_mut();
+ self.try_find_cached_inner(id, buffer, &mut inflate, pack_cache, &mut snapshot, None)
+ .map_err(|err| Box::new(err) as _)
}
- fn location_by_oid(
- &self,
- id: impl AsRef<gix_hash::oid>,
- buf: &mut Vec<u8>,
- ) -> Option<gix_pack::data::entry::Location> {
+ fn location_by_oid(&self, id: &gix_hash::oid, buf: &mut Vec<u8>) -> Option<gix_pack::data::entry::Location> {
assert!(
matches!(self.token.as_ref(), Some(handle::Mode::KeepDeletedPacksAvailable)),
"BUG: handle must be configured to `prevent_pack_unload()` before using this method"
@@ -362,8 +361,8 @@ where
assert!(self.store_ref().replacements.is_empty() || self.ignore_replacements, "Everything related to packing must not use replacements. These are not used here, but it should be turned off for good measure.");
- let id = id.as_ref();
let mut snapshot = self.snapshot.borrow_mut();
+ let mut inflate = self.inflate.borrow_mut();
'outer: loop {
{
let marker = snapshot.marker;
@@ -404,13 +403,14 @@ where
buf.resize(entry.decompressed_size.try_into().expect("representable size"), 0);
assert_eq!(pack.id, pack_id.to_intrinsic_pack_id(), "both ids must always match");
- let res = pack.decompress_entry(&entry, buf).ok().map(|entry_size_past_header| {
- gix_pack::data::entry::Location {
+ let res = pack
+ .decompress_entry(&entry, &mut inflate, buf)
+ .ok()
+ .map(|entry_size_past_header| gix_pack::data::entry::Location {
pack_id: pack.id,
pack_offset,
entry_size: entry.header_size() + entry_size_past_header,
- }
- });
+ });
if idx != 0 {
snapshot.indices.swap(0, idx);
@@ -503,17 +503,15 @@ where
S: Deref<Target = super::Store> + Clone,
Self: gix_pack::Find,
{
- type Error = <Self as gix_pack::Find>::Error;
-
- fn contains(&self, id: impl AsRef<gix_hash::oid>) -> bool {
+ fn contains(&self, id: &gix_hash::oid) -> bool {
gix_pack::Find::contains(self, id)
}
fn try_find<'a>(
&self,
- id: impl AsRef<gix_hash::oid>,
+ id: &gix_hash::oid,
buffer: &'a mut Vec<u8>,
- ) -> Result<Option<gix_object::Data<'a>>, Self::Error> {
+ ) -> Result<Option<gix_object::Data<'a>>, crate::find::Error> {
gix_pack::Find::try_find(self, id, buffer).map(|t| t.map(|t| t.0))
}
}
diff --git a/vendor/gix-odb/src/store_impls/dynamic/handle.rs b/vendor/gix-odb/src/store_impls/dynamic/handle.rs
index 78efd4451..655bdfa43 100644
--- a/vendor/gix-odb/src/store_impls/dynamic/handle.rs
+++ b/vendor/gix-odb/src/store_impls/dynamic/handle.rs
@@ -257,6 +257,7 @@ impl super::Store {
refresh: RefreshMode::default(),
ignore_replacements: false,
token: Some(token),
+ inflate: RefCell::new(Default::default()),
snapshot: RefCell::new(self.collect_snapshot()),
max_recursion_depth: Self::INITIAL_MAX_RECURSION_DEPTH,
packed_object_count: Default::default(),
@@ -273,6 +274,7 @@ impl super::Store {
refresh: Default::default(),
ignore_replacements: false,
token: Some(token),
+ inflate: RefCell::new(Default::default()),
snapshot: RefCell::new(self.collect_snapshot()),
max_recursion_depth: Self::INITIAL_MAX_RECURSION_DEPTH,
packed_object_count: Default::default(),
@@ -344,8 +346,8 @@ impl TryFrom<&super::Store> for super::Store {
fn try_from(s: &super::Store) -> Result<Self, Self::Error> {
super::Store::at_opts(
- s.path(),
- s.replacements(),
+ s.path().into(),
+ &mut s.replacements(),
crate::store::init::Options {
slots: crate::store::init::Slots::Given(s.files.len().try_into().expect("BUG: too many slots")),
object_hash: Default::default(),
@@ -391,6 +393,7 @@ where
}
.into()
},
+ inflate: RefCell::new(Default::default()),
snapshot: RefCell::new(self.store.collect_snapshot()),
max_recursion_depth: self.max_recursion_depth,
packed_object_count: Default::default(),
diff --git a/vendor/gix-odb/src/store_impls/dynamic/header.rs b/vendor/gix-odb/src/store_impls/dynamic/header.rs
index d29a911f1..05ff7cf64 100644
--- a/vendor/gix-odb/src/store_impls/dynamic/header.rs
+++ b/vendor/gix-odb/src/store_impls/dynamic/header.rs
@@ -1,3 +1,4 @@
+use gix_features::zlib;
use std::ops::Deref;
use gix_hash::oid;
@@ -15,6 +16,7 @@ where
fn try_header_inner<'b>(
&'b self,
mut id: &'b gix_hash::oid,
+ inflate: &mut zlib::Inflate,
snapshot: &mut load_index::Snapshot,
recursion: Option<DeltaBaseRecursion<'_>>,
) -> Result<Option<Header>, Error> {
@@ -71,7 +73,7 @@ where
},
};
let entry = pack.entry(pack_offset);
- let res = match pack.decode_header(entry, |id| {
+ let res = match pack.decode_header(entry, inflate, &|id| {
index_file.pack_offset_by_id(id).map(|pack_offset| {
gix_pack::data::decode::header::ResolvedBase::InPack(pack.entry(pack_offset))
})
@@ -85,9 +87,10 @@ where
let hdr = self
.try_header_inner(
&base_id,
+ inflate,
snapshot,
recursion
- .map(|r| r.inc_depth())
+ .map(DeltaBaseRecursion::inc_depth)
.or_else(|| DeltaBaseRecursion::new(id).into()),
)
.map_err(|err| Error::DeltaBaseLookup {
@@ -127,7 +130,7 @@ where
.as_ref()
.expect("pack to still be available like just now");
let entry = pack.entry(pack_offset);
- pack.decode_header(entry, |id| {
+ pack.decode_header(entry, inflate, &|id| {
index_file
.pack_offset_by_id(id)
.map(|pack_offset| {
@@ -179,11 +182,10 @@ impl<S> crate::Header for super::Handle<S>
where
S: Deref<Target = super::Store> + Clone,
{
- type Error = Error;
-
- fn try_header(&self, id: impl AsRef<oid>) -> Result<Option<Header>, Self::Error> {
- let id = id.as_ref();
+ fn try_header(&self, id: &oid) -> Result<Option<Header>, crate::find::Error> {
let mut snapshot = self.snapshot.borrow_mut();
- self.try_header_inner(id, &mut snapshot, None)
+ let mut inflate = self.inflate.borrow_mut();
+ self.try_header_inner(id, &mut inflate, &mut snapshot, None)
+ .map_err(|err| Box::new(err) as _)
}
}
diff --git a/vendor/gix-odb/src/store_impls/dynamic/init.rs b/vendor/gix-odb/src/store_impls/dynamic/init.rs
index 5e272dcff..d7e1d9d99 100644
--- a/vendor/gix-odb/src/store_impls/dynamic/init.rs
+++ b/vendor/gix-odb/src/store_impls/dynamic/init.rs
@@ -70,8 +70,8 @@ impl Store {
/// `replacements` is an iterator over pairs of old and new object ids for replacement support.
/// This means that when asking for object `X`, one will receive object `X-replaced` given an iterator like `Some((X, X-replaced))`.
pub fn at_opts(
- objects_dir: impl Into<PathBuf>,
- replacements: impl IntoIterator<Item = (gix_hash::ObjectId, gix_hash::ObjectId)>,
+ objects_dir: PathBuf,
+ replacements: &mut dyn Iterator<Item = (gix_hash::ObjectId, gix_hash::ObjectId)>,
Options {
slots,
object_hash,
@@ -79,7 +79,7 @@ impl Store {
current_dir,
}: Options,
) -> std::io::Result<Self> {
- let objects_dir = objects_dir.into();
+ let _span = gix_features::trace::detail!("gix_odb::Store::at()");
let current_dir = current_dir.map_or_else(std::env::current_dir, Ok)?;
if !objects_dir.is_dir() {
return Err(std::io::Error::new(
@@ -90,7 +90,7 @@ impl Store {
let slot_count = match slots {
Slots::Given(n) => n as usize,
Slots::AsNeededByDiskState { multiplier, minimum } => {
- let mut db_paths = crate::alternate::resolve(&objects_dir, &current_dir)
+ let mut db_paths = crate::alternate::resolve(objects_dir.clone(), &current_dir)
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
db_paths.insert(0, objects_dir.clone());
let num_slots = super::Store::collect_indices_and_mtime_sorted_by_size(db_paths, None, None)
@@ -106,7 +106,7 @@ impl Store {
"Cannot use more than 1^15 slots",
));
}
- let mut replacements: Vec<_> = replacements.into_iter().collect();
+ let mut replacements: Vec<_> = replacements.collect();
replacements.sort_by(|a, b| a.0.cmp(&b.0));
Ok(Store {
diff --git a/vendor/gix-odb/src/store_impls/dynamic/iter.rs b/vendor/gix-odb/src/store_impls/dynamic/iter.rs
index 2152dff71..2a7253aec 100644
--- a/vendor/gix-odb/src/store_impls/dynamic/iter.rs
+++ b/vendor/gix-odb/src/store_impls/dynamic/iter.rs
@@ -192,7 +192,7 @@ impl Iterator for AllObjects {
Some(id) => Some(id),
None => {
*index += 1;
- match self.loose_dbs.get(*index).map(|ldb| ldb.iter()) {
+ match self.loose_dbs.get(*index).map(loose::Store::iter) {
Some(new_iter) => {
*iter = new_iter;
self.next()
diff --git a/vendor/gix-odb/src/store_impls/dynamic/load_index.rs b/vendor/gix-odb/src/store_impls/dynamic/load_index.rs
index 84224dff1..4716a5806 100644
--- a/vendor/gix-odb/src/store_impls/dynamic/load_index.rs
+++ b/vendor/gix-odb/src/store_impls/dynamic/load_index.rs
@@ -206,7 +206,7 @@ impl super::Store {
self.num_disk_state_consolidation.fetch_add(1, Ordering::Relaxed);
let db_paths: Vec<_> = std::iter::once(objects_directory.to_owned())
- .chain(crate::alternate::resolve(objects_directory, &self.current_dir)?)
+ .chain(crate::alternate::resolve(objects_directory.clone(), &self.current_dir)?)
.collect();
// turn db paths into loose object databases. Reuse what's there, but only if it is in the right order.
@@ -470,8 +470,7 @@ impl super::Store {
})
.transpose()?;
if let Some((multi_index, mtime, flen)) = multi_index_info {
- let index_names_in_multi_index: Vec<_> =
- multi_index.index_names().iter().map(|p| p.as_path()).collect();
+ let index_names_in_multi_index: Vec<_> = multi_index.index_names().iter().map(AsRef::as_ref).collect();
let mut indices_not_in_multi_index: Vec<(Either, _, _)> = indices
.into_iter()
.filter_map(|(path, a, b)| {
@@ -494,6 +493,11 @@ impl super::Store {
// Unlike libgit2, do not sort by modification date, but by size and put the biggest indices first. That way
// the chance to hit an object should be higher. We leave it to the handle to sort by LRU.
// Git itself doesn't change the order which may safe time, but we want it to be stable which also helps some tests.
+ // NOTE: this will work well for well-packed repos or those using geometric repacking, but force us to open a lot
+ // of files when dealing with new objects, as there is no notion of recency here as would be with unmaintained
+ // repositories. Different algorithms should be provided, like newest packs first, and possibly a mix of both
+ // with big packs first, then sorting by recency for smaller packs.
+ // We also want to implement `fetch.unpackLimit` to alleviate this issue a little.
indices_by_modification_time.sort_by(|l, r| l.2.cmp(&r.2).reverse());
Ok(indices_by_modification_time)
}
diff --git a/vendor/gix-odb/src/store_impls/dynamic/mod.rs b/vendor/gix-odb/src/store_impls/dynamic/mod.rs
index e992fada6..35d4f7ca6 100644
--- a/vendor/gix-odb/src/store_impls/dynamic/mod.rs
+++ b/vendor/gix-odb/src/store_impls/dynamic/mod.rs
@@ -1,4 +1,5 @@
//! The standard object store which should fit all needs.
+use gix_features::zlib;
use std::{cell::RefCell, ops::Deref};
use crate::Store;
@@ -24,6 +25,7 @@ where
pub(crate) token: Option<handle::Mode>,
snapshot: RefCell<load_index::Snapshot>,
+ inflate: RefCell<zlib::Inflate>,
packed_object_count: RefCell<Option<u64>>,
}
diff --git a/vendor/gix-odb/src/store_impls/dynamic/prefix.rs b/vendor/gix-odb/src/store_impls/dynamic/prefix.rs
index c0edeba3f..58a49416b 100644
--- a/vendor/gix-odb/src/store_impls/dynamic/prefix.rs
+++ b/vendor/gix-odb/src/store_impls/dynamic/prefix.rs
@@ -40,13 +40,13 @@ pub mod disambiguate {
/// matching this prefix.
pub fn new(id: impl Into<gix_hash::ObjectId>, hex_len: usize) -> Result<Self, gix_hash::prefix::Error> {
let id = id.into();
- gix_hash::Prefix::new(id, hex_len)?;
+ gix_hash::Prefix::new(&id, hex_len)?;
Ok(Candidate { id, hex_len })
}
/// Transform ourselves into a `Prefix` with our current hex lengths.
pub fn to_prefix(&self) -> gix_hash::Prefix {
- gix_hash::Prefix::new(self.id, self.hex_len).expect("our hex-len to always be in bounds")
+ gix_hash::Prefix::new(&self.id, self.hex_len).expect("our hex-len to always be in bounds")
}
pub(crate) fn inc_hex_len(&mut self) {
@@ -85,6 +85,7 @@ where
match *count {
Some(count) => Ok(count),
None => {
+ let _span = gix_features::trace::detail!("gix_odb::Handle::packed_object_count()");
let mut snapshot = self.snapshot.borrow_mut();
*snapshot = self.store.load_all_indices()?;
let mut obj_count = 0;
diff --git a/vendor/gix-odb/src/store_impls/dynamic/structure.rs b/vendor/gix-odb/src/store_impls/dynamic/structure.rs
index 687e74d6a..8e5adc23c 100644
--- a/vendor/gix-odb/src/store_impls/dynamic/structure.rs
+++ b/vendor/gix-odb/src/store_impls/dynamic/structure.rs
@@ -52,6 +52,7 @@ impl Store {
/// implementation. The likelihood of actual changes is low though as these still depend on something
/// changing on disk and somebody reading at the same time.
pub fn structure(&self) -> Result<Vec<Record>, load_index::Error> {
+ let _span = gix_features::trace::detail!("gix_odb::Store::structure()");
let index = self.index.load();
if !index.is_initialized() {
self.consolidate_with_disk_state(true, false /*load one new index*/)?;
diff --git a/vendor/gix-odb/src/store_impls/dynamic/types.rs b/vendor/gix-odb/src/store_impls/dynamic/types.rs
index c77cf2109..473c587bb 100644
--- a/vendor/gix-odb/src/store_impls/dynamic/types.rs
+++ b/vendor/gix-odb/src/store_impls/dynamic/types.rs
@@ -305,7 +305,7 @@ impl IndexAndPacks {
match self {
Self::Index(bundle) => bundle.index.is_disposable() || bundle.data.is_disposable(),
Self::MultiIndex(bundle) => {
- bundle.multi_index.is_disposable() || bundle.data.iter().any(|odf| odf.is_disposable())
+ bundle.multi_index.is_disposable() || bundle.data.iter().any(OnDiskFile::is_disposable)
}
}
}
diff --git a/vendor/gix-odb/src/store_impls/dynamic/verify.rs b/vendor/gix-odb/src/store_impls/dynamic/verify.rs
index 5156074ac..94a916b98 100644
--- a/vendor/gix-odb/src/store_impls/dynamic/verify.rs
+++ b/vendor/gix-odb/src/store_impls/dynamic/verify.rs
@@ -4,7 +4,7 @@ use std::{
time::Instant,
};
-use gix_features::progress::{MessageLevel, Progress};
+use gix_features::progress::{DynNestedProgress, MessageLevel, Progress};
use crate::{
pack,
@@ -73,13 +73,11 @@ pub mod integrity {
}
/// Returned by [`Store::verify_integrity()`][crate::Store::verify_integrity()].
- pub struct Outcome<P> {
+ pub struct Outcome {
/// Statistics for validated loose object stores.
pub loose_object_stores: Vec<LooseObjectStatistics>,
/// Pack traversal statistics for each index and their pack(s)
pub index_statistics: Vec<IndexStatistics>,
- /// The provided progress instance.
- pub progress: P,
}
/// The progress ids used in [`Store::verify_integrity()`][crate::Store::verify_integrity()].
@@ -111,17 +109,17 @@ impl super::Store {
///
/// Note that this will not force loading all indices or packs permanently, as we will only use the momentarily loaded disk state.
/// This does, however, include all alternates.
- pub fn verify_integrity<C, P, F>(
+ pub fn verify_integrity<C, F>(
&self,
- mut progress: P,
+ progress: &mut dyn DynNestedProgress,
should_interrupt: &AtomicBool,
options: integrity::Options<F>,
- ) -> Result<integrity::Outcome<P>, integrity::Error>
+ ) -> Result<integrity::Outcome, integrity::Error>
where
- P: Progress,
C: pack::cache::DecodeEntry,
F: Fn() -> C + Send + Clone,
{
+ let _span = gix_features::trace::coarse!("gix_odb:Store::verify_integrity()");
let mut index = self.index.load();
if !index.is_initialized() {
self.consolidate_with_disk_state(true, false)?;
@@ -141,126 +139,131 @@ impl super::Store {
format!(
"Checking integrity: {}",
path.file_name()
- .map_or_else(std::borrow::Cow::default, |f| f.to_string_lossy())
+ .map_or_else(Default::default, std::ffi::OsStr::to_string_lossy)
)
};
- for slot_index in &index.slot_indices {
- let slot = &self.files[*slot_index];
- if slot.generation.load(Ordering::SeqCst) != index.generation {
- return Err(integrity::Error::NeedsRetryDueToChangeOnDisk);
- }
- let files = slot.files.load();
- let files = Option::as_ref(&files).ok_or(integrity::Error::NeedsRetryDueToChangeOnDisk)?;
+ gix_features::trace::detail!("verify indices").into_scope(|| {
+ for slot_index in &index.slot_indices {
+ let slot = &self.files[*slot_index];
+ if slot.generation.load(Ordering::SeqCst) != index.generation {
+ return Err(integrity::Error::NeedsRetryDueToChangeOnDisk);
+ }
+ let files = slot.files.load();
+ let files = Option::as_ref(&files).ok_or(integrity::Error::NeedsRetryDueToChangeOnDisk)?;
- let start = Instant::now();
- let (mut child_progress, num_objects, index_path) = match files {
- IndexAndPacks::Index(bundle) => {
- let index;
- let index = match bundle.index.loaded() {
- Some(index) => index.deref(),
- None => {
- index = pack::index::File::at(bundle.index.path(), self.object_hash)?;
- &index
- }
- };
- let pack;
- let data = match bundle.data.loaded() {
- Some(pack) => pack.deref(),
- None => {
- pack = pack::data::File::at(bundle.data.path(), self.object_hash)?;
- &pack
- }
- };
- let outcome = index.verify_integrity(
- Some(pack::index::verify::PackContext {
- data,
- options: options.clone(),
- }),
- progress.add_child_with_id(
- "verify index",
+ let start = Instant::now();
+ let (mut child_progress, num_objects, index_path) = match files {
+ IndexAndPacks::Index(bundle) => {
+ let index;
+ let index = match bundle.index.loaded() {
+ Some(index) => index.deref(),
+ None => {
+ index = pack::index::File::at(bundle.index.path(), self.object_hash)?;
+ &index
+ }
+ };
+ let pack;
+ let data = match bundle.data.loaded() {
+ Some(pack) => pack.deref(),
+ None => {
+ pack = pack::data::File::at(bundle.data.path(), self.object_hash)?;
+ &pack
+ }
+ };
+ let mut child_progress = progress.add_child_with_id(
+ "verify index".into(),
integrity::ProgressId::VerifyIndex(Default::default()).into(),
- ),
- should_interrupt,
- )?;
- statistics.push(IndexStatistics {
- path: bundle.index.path().to_owned(),
- statistics: SingleOrMultiStatistics::Single(
- outcome
- .pack_traverse_statistics
- .expect("pack provided so there are stats"),
- ),
- });
- (outcome.progress, index.num_objects(), index.path().to_owned())
- }
- IndexAndPacks::MultiIndex(bundle) => {
- let index;
- let index = match bundle.multi_index.loaded() {
- Some(index) => index.deref(),
- None => {
- index = pack::multi_index::File::at(bundle.multi_index.path())?;
- &index
- }
- };
- let outcome = index.verify_integrity(
- progress.add_child_with_id(
- "verify multi-index",
+ );
+ let outcome = index.verify_integrity(
+ Some(pack::index::verify::PackContext {
+ data,
+ options: options.clone(),
+ }),
+ &mut child_progress,
+ should_interrupt,
+ )?;
+ statistics.push(IndexStatistics {
+ path: bundle.index.path().to_owned(),
+ statistics: SingleOrMultiStatistics::Single(
+ outcome
+ .pack_traverse_statistics
+ .expect("pack provided so there are stats"),
+ ),
+ });
+ (child_progress, index.num_objects(), index.path().to_owned())
+ }
+ IndexAndPacks::MultiIndex(bundle) => {
+ let index;
+ let index = match bundle.multi_index.loaded() {
+ Some(index) => index.deref(),
+ None => {
+ index = pack::multi_index::File::at(bundle.multi_index.path())?;
+ &index
+ }
+ };
+ let mut child_progress = progress.add_child_with_id(
+ "verify multi-index".into(),
integrity::ProgressId::VerifyMultiIndex(Default::default()).into(),
- ),
- should_interrupt,
- options.clone(),
- )?;
+ );
+ let outcome = index.verify_integrity(&mut child_progress, should_interrupt, options.clone())?;
- let index_dir = bundle.multi_index.path().parent().expect("file in a directory");
- statistics.push(IndexStatistics {
- path: Default::default(),
- statistics: SingleOrMultiStatistics::Multi(
- outcome
- .pack_traverse_statistics
- .into_iter()
- .zip(index.index_names())
- .map(|(statistics, index_name)| (index_dir.join(index_name), statistics))
- .collect(),
- ),
- });
- (outcome.progress, index.num_objects(), index.path().to_owned())
- }
- };
+ let index_dir = bundle.multi_index.path().parent().expect("file in a directory");
+ statistics.push(IndexStatistics {
+ path: Default::default(),
+ statistics: SingleOrMultiStatistics::Multi(
+ outcome
+ .pack_traverse_statistics
+ .into_iter()
+ .zip(index.index_names())
+ .map(|(statistics, index_name)| (index_dir.join(index_name), statistics))
+ .collect(),
+ ),
+ });
+ (child_progress, index.num_objects(), index.path().to_owned())
+ }
+ };
- child_progress.set_name(index_check_message(&index_path));
- child_progress.show_throughput_with(
- start,
- num_objects as usize,
- gix_features::progress::count("objects").expect("set"),
- MessageLevel::Success,
- );
- progress.inc();
- }
+ child_progress.set_name(index_check_message(&index_path));
+ child_progress.show_throughput_with(
+ start,
+ num_objects as usize,
+ gix_features::progress::count("objects").expect("set"),
+ MessageLevel::Success,
+ );
+ progress.inc();
+ }
+ Ok(())
+ })?;
progress.init(
Some(index.loose_dbs.len()),
gix_features::progress::count("loose object stores"),
);
let mut loose_object_stores = Vec::new();
- for loose_db in &*index.loose_dbs {
- let out = loose_db
- .verify_integrity(
- progress.add_child_with_id(
- loose_db.path().display().to_string(),
- integrity::ProgressId::VerifyLooseObjectDbPath.into(),
- ),
- should_interrupt,
- )
- .map(|statistics| integrity::LooseObjectStatistics {
- path: loose_db.path().to_owned(),
- statistics,
- })?;
- loose_object_stores.push(out);
- }
+ gix_features::trace::detail!("verify loose ODBs").into_scope(
+ || -> Result<_, crate::loose::verify::integrity::Error> {
+ for loose_db in &*index.loose_dbs {
+ let out = loose_db
+ .verify_integrity(
+ &mut progress.add_child_with_id(
+ loose_db.path().display().to_string(),
+ integrity::ProgressId::VerifyLooseObjectDbPath.into(),
+ ),
+ should_interrupt,
+ )
+ .map(|statistics| integrity::LooseObjectStatistics {
+ path: loose_db.path().to_owned(),
+ statistics,
+ })?;
+ loose_object_stores.push(out);
+ }
+ Ok(())
+ },
+ )?;
Ok(integrity::Outcome {
loose_object_stores,
index_statistics: statistics,
- progress,
})
}
}
diff --git a/vendor/gix-odb/src/store_impls/dynamic/write.rs b/vendor/gix-odb/src/store_impls/dynamic/write.rs
index a2e40eec4..ba615f351 100644
--- a/vendor/gix-odb/src/store_impls/dynamic/write.rs
+++ b/vendor/gix-odb/src/store_impls/dynamic/write.rs
@@ -28,16 +28,15 @@ impl<S> crate::Write for store::Handle<S>
where
S: Deref<Target = dynamic::Store> + Clone,
{
- type Error = Error;
-
- fn write_stream(&self, kind: Kind, size: u64, from: impl Read) -> Result<ObjectId, Self::Error> {
+ fn write_stream(&self, kind: Kind, size: u64, from: &mut dyn Read) -> Result<ObjectId, crate::write::Error> {
let mut snapshot = self.snapshot.borrow_mut();
Ok(match snapshot.loose_dbs.first() {
Some(ldb) => ldb.write_stream(kind, size, from)?,
None => {
let new_snapshot = self
.store
- .load_one_index(self.refresh, snapshot.marker)?
+ .load_one_index(self.refresh, snapshot.marker)
+ .map_err(Box::new)?
.expect("there is always at least one ODB, and this code runs only once for initialization");
*snapshot = new_snapshot;
snapshot.loose_dbs[0].write_stream(kind, size, from)?
diff --git a/vendor/gix-odb/src/store_impls/loose/find.rs b/vendor/gix-odb/src/store_impls/loose/find.rs
index 04fabe61b..91bf0ba87 100644
--- a/vendor/gix-odb/src/store_impls/loose/find.rs
+++ b/vendor/gix-odb/src/store_impls/loose/find.rs
@@ -34,9 +34,9 @@ impl Store {
const OPEN_ACTION: &'static str = "open";
/// Returns true if the given id is contained in our repository.
- pub fn contains(&self, id: impl AsRef<gix_hash::oid>) -> bool {
- debug_assert_eq!(self.object_hash, id.as_ref().kind());
- hash_path(id.as_ref(), self.path.clone()).is_file()
+ pub fn contains(&self, id: &gix_hash::oid) -> bool {
+ debug_assert_eq!(self.object_hash, id.kind());
+ hash_path(id, self.path.clone()).is_file()
}
/// Given a `prefix`, find an object that matches it uniquely within this loose object
@@ -56,7 +56,7 @@ impl Store {
) -> Result<Option<crate::store::prefix::lookup::Outcome>, crate::loose::iter::Error> {
let single_directory_iter = crate::loose::Iter {
inner: gix_features::fs::walkdir_new(
- self.path.join(prefix.as_oid().to_hex_with_len(2).to_string()),
+ &self.path.join(prefix.as_oid().to_hex_with_len(2).to_string()),
gix_features::fs::walkdir::Parallelism::Serial,
)
.min_depth(1)
@@ -108,11 +108,11 @@ impl Store {
/// there was no such object.
pub fn try_find<'a>(
&self,
- id: impl AsRef<gix_hash::oid>,
+ id: &gix_hash::oid,
out: &'a mut Vec<u8>,
) -> Result<Option<gix_object::Data<'a>>, Error> {
- debug_assert_eq!(self.object_hash, id.as_ref().kind());
- match self.find_inner(id.as_ref(), out) {
+ debug_assert_eq!(self.object_hash, id.kind());
+ match self.find_inner(id, out) {
Ok(obj) => Ok(Some(obj)),
Err(err) => match err {
Error::Io {
@@ -137,10 +137,10 @@ impl Store {
/// Return only the decompressed size of the object and its kind without fully reading it into memory as tuple of `(size, kind)`.
/// Returns `None` if `id` does not exist in the database.
- pub fn try_header(&self, id: impl AsRef<gix_hash::oid>) -> Result<Option<(usize, gix_object::Kind)>, Error> {
+ pub fn try_header(&self, id: &gix_hash::oid) -> Result<Option<(usize, gix_object::Kind)>, Error> {
const BUF_SIZE: usize = 256;
let mut buf = [0_u8; BUF_SIZE];
- let path = hash_path(id.as_ref(), self.path.clone());
+ let path = hash_path(id, self.path.clone());
let mut inflate = zlib::Inflate::default();
let mut istream = match fs::File::open(&path) {
diff --git a/vendor/gix-odb/src/store_impls/loose/verify.rs b/vendor/gix-odb/src/store_impls/loose/verify.rs
index 8ffbb7105..ae83c1d01 100644
--- a/vendor/gix-odb/src/store_impls/loose/verify.rs
+++ b/vendor/gix-odb/src/store_impls/loose/verify.rs
@@ -3,7 +3,7 @@ use std::{
time::Instant,
};
-use gix_features::progress::Progress;
+use gix_features::progress::{Count, DynNestedProgress, Progress};
use crate::{loose::Store, Write};
@@ -61,7 +61,7 @@ impl Store {
/// Check all loose objects for their integrity checking their hash matches the actual data and by decoding them fully.
pub fn verify_integrity(
&self,
- mut progress: impl Progress,
+ progress: &mut dyn DynNestedProgress,
should_interrupt: &AtomicBool,
) -> Result<integrity::Statistics, integrity::Error> {
let mut buf = Vec::new();
@@ -69,11 +69,11 @@ impl Store {
let mut num_objects = 0;
let start = Instant::now();
- let mut progress = progress.add_child_with_id("Validating", integrity::ProgressId::LooseObjects.into());
+ let mut progress = progress.add_child_with_id("Validating".into(), integrity::ProgressId::LooseObjects.into());
progress.init(None, gix_features::progress::count("loose objects"));
for id in self.iter().filter_map(Result::ok) {
let object = self
- .try_find(id, &mut buf)
+ .try_find(&id, &mut buf)
.map_err(|_| integrity::Error::Retry)?
.ok_or(integrity::Error::Retry)?;
let actual_id = sink.write_buf(object.kind, object.data).expect("sink never fails");
diff --git a/vendor/gix-odb/src/store_impls/loose/write.rs b/vendor/gix-odb/src/store_impls/loose/write.rs
index 912426bba..e537eda92 100644
--- a/vendor/gix-odb/src/store_impls/loose/write.rs
+++ b/vendor/gix-odb/src/store_impls/loose/write.rs
@@ -27,9 +27,7 @@ pub enum Error {
}
impl crate::traits::Write for Store {
- type Error = Error;
-
- fn write(&self, object: impl WriteTo) -> Result<gix_hash::ObjectId, Self::Error> {
+ fn write(&self, object: &dyn WriteTo) -> Result<gix_hash::ObjectId, crate::write::Error> {
let mut to = self.dest()?;
to.write_all(&object.loose_header()).map_err(|err| Error::Io {
source: err,
@@ -41,15 +39,15 @@ impl crate::traits::Write for Store {
message: "stream all data into tempfile in",
path: self.path.to_owned(),
})?;
- to.flush()?;
- self.finalize_object(to)
+ to.flush().map_err(Box::new)?;
+ Ok(self.finalize_object(to).map_err(Box::new)?)
}
/// Write the given buffer in `from` to disk in one syscall at best.
///
/// This will cost at least 4 IO operations.
- fn write_buf(&self, kind: gix_object::Kind, from: &[u8]) -> Result<gix_hash::ObjectId, Self::Error> {
- let mut to = self.dest()?;
+ fn write_buf(&self, kind: gix_object::Kind, from: &[u8]) -> Result<gix_hash::ObjectId, crate::write::Error> {
+ let mut to = self.dest().map_err(Box::new)?;
to.write_all(&gix_object::encode::loose_header(kind, from.len()))
.map_err(|err| Error::Io {
source: err,
@@ -63,7 +61,7 @@ impl crate::traits::Write for Store {
path: self.path.to_owned(),
})?;
to.flush()?;
- self.finalize_object(to)
+ Ok(self.finalize_object(to)?)
}
/// Write the given stream in `from` to disk with at least one syscall.
@@ -73,9 +71,9 @@ impl crate::traits::Write for Store {
&self,
kind: gix_object::Kind,
size: u64,
- mut from: impl io::Read,
- ) -> Result<gix_hash::ObjectId, Self::Error> {
- let mut to = self.dest()?;
+ mut from: &mut dyn io::Read,
+ ) -> Result<gix_hash::ObjectId, crate::write::Error> {
+ let mut to = self.dest().map_err(Box::new)?;
to.write_all(&gix_object::encode::loose_header(
kind,
size.try_into().expect("object size to fit into usize"),
@@ -86,13 +84,15 @@ impl crate::traits::Write for Store {
path: self.path.to_owned(),
})?;
- io::copy(&mut from, &mut to).map_err(|err| Error::Io {
- source: err,
- message: "stream all data into tempfile in",
- path: self.path.to_owned(),
- })?;
- to.flush()?;
- self.finalize_object(to)
+ io::copy(&mut from, &mut to)
+ .map_err(|err| Error::Io {
+ source: err,
+ message: "stream all data into tempfile in",
+ path: self.path.to_owned(),
+ })
+ .map_err(Box::new)?;
+ to.flush().map_err(Box::new)?;
+ Ok(self.finalize_object(to)?)
}
}
diff --git a/vendor/gix-odb/src/traits.rs b/vendor/gix-odb/src/traits.rs
index ddec78b8e..91d66c42d 100644
--- a/vendor/gix-odb/src/traits.rs
+++ b/vendor/gix-odb/src/traits.rs
@@ -4,21 +4,16 @@ use gix_object::WriteTo;
/// Describe the capability to write git objects into an object store.
pub trait Write {
- /// The error type used for all trait methods.
- ///
- /// _Note_ the default implementations require the `From<io::Error>` bound.
- type Error: std::error::Error + From<io::Error>;
-
/// Write objects using the intrinsic kind of [`hash`][gix_hash::Kind] into the database,
/// returning id to reference it in subsequent reads.
- fn write(&self, object: impl WriteTo) -> Result<gix_hash::ObjectId, Self::Error> {
+ fn write(&self, object: &dyn WriteTo) -> Result<gix_hash::ObjectId, crate::write::Error> {
let mut buf = Vec::with_capacity(2048);
object.write_to(&mut buf)?;
- self.write_stream(object.kind(), buf.len() as u64, buf.as_slice())
+ self.write_stream(object.kind(), buf.len() as u64, &mut buf.as_slice())
}
/// As [`write`][Write::write], but takes an [`object` kind][gix_object::Kind] along with its encoded bytes.
- fn write_buf(&self, object: gix_object::Kind, from: &[u8]) -> Result<gix_hash::ObjectId, Self::Error> {
- self.write_stream(object, from.len() as u64, from)
+ fn write_buf(&self, object: gix_object::Kind, mut from: &[u8]) -> Result<gix_hash::ObjectId, crate::write::Error> {
+ self.write_stream(object, from.len() as u64, &mut from)
}
/// As [`write`][Write::write], but takes an input stream.
/// This is commonly used for writing blobs directly without reading them to memory first.
@@ -26,8 +21,8 @@ pub trait Write {
&self,
kind: gix_object::Kind,
size: u64,
- from: impl io::Read,
- ) -> Result<gix_hash::ObjectId, Self::Error>;
+ from: &mut dyn io::Read,
+ ) -> Result<gix_hash::ObjectId, crate::write::Error>;
}
/// Describe how object can be located in an object store.
@@ -39,11 +34,8 @@ pub trait Write {
///
/// [issue]: https://github.com/rust-lang/rust/issues/44265
pub trait Find {
- /// The error returned by [`try_find()`][Find::try_find()]
- type Error: std::error::Error + 'static;
-
/// Returns true if the object exists in the database.
- fn contains(&self, id: impl AsRef<gix_hash::oid>) -> bool;
+ fn contains(&self, id: &gix_hash::oid) -> bool;
/// Find an object matching `id` in the database while placing its raw, possibly encoded data into `buffer`.
///
@@ -51,17 +43,15 @@ pub trait Find {
/// retrieval.
fn try_find<'a>(
&self,
- id: impl AsRef<gix_hash::oid>,
+ id: &gix_hash::oid,
buffer: &'a mut Vec<u8>,
- ) -> Result<Option<gix_object::Data<'a>>, Self::Error>;
+ ) -> Result<Option<gix_object::Data<'a>>, find::Error>;
}
/// A way to obtain object properties without fully decoding it.
pub trait Header {
- /// The error returned by [`try_header()`][Header::try_header()].
- type Error: std::error::Error + 'static;
/// Try to read the header of the object associated with `id` or return `None` if it could not be found.
- fn try_header(&self, id: impl AsRef<gix_hash::oid>) -> Result<Option<find::Header>, Self::Error>;
+ fn try_header(&self, id: &gix_hash::oid) -> Result<Option<find::Header>, find::Error>;
}
mod _impls {
@@ -76,17 +66,15 @@ mod _impls {
where
T: crate::Write,
{
- type Error = T::Error;
-
- fn write(&self, object: impl WriteTo) -> Result<ObjectId, Self::Error> {
+ fn write(&self, object: &dyn WriteTo) -> Result<ObjectId, crate::write::Error> {
(*self).write(object)
}
- fn write_buf(&self, object: Kind, from: &[u8]) -> Result<ObjectId, Self::Error> {
+ fn write_buf(&self, object: Kind, from: &[u8]) -> Result<ObjectId, crate::write::Error> {
(*self).write_buf(object, from)
}
- fn write_stream(&self, kind: Kind, size: u64, from: impl Read) -> Result<ObjectId, Self::Error> {
+ fn write_stream(&self, kind: Kind, size: u64, from: &mut dyn Read) -> Result<ObjectId, crate::write::Error> {
(*self).write_stream(kind, size, from)
}
}
@@ -95,17 +83,15 @@ mod _impls {
where
T: crate::Write,
{
- type Error = T::Error;
-
- fn write(&self, object: impl WriteTo) -> Result<ObjectId, Self::Error> {
+ fn write(&self, object: &dyn WriteTo) -> Result<ObjectId, crate::write::Error> {
self.deref().write(object)
}
- fn write_buf(&self, object: Kind, from: &[u8]) -> Result<ObjectId, Self::Error> {
+ fn write_buf(&self, object: Kind, from: &[u8]) -> Result<ObjectId, crate::write::Error> {
self.deref().write_buf(object, from)
}
- fn write_stream(&self, kind: Kind, size: u64, from: impl Read) -> Result<ObjectId, Self::Error> {
+ fn write_stream(&self, kind: Kind, size: u64, from: &mut dyn Read) -> Result<ObjectId, crate::write::Error> {
self.deref().write_stream(kind, size, from)
}
}
@@ -114,17 +100,15 @@ mod _impls {
where
T: crate::Write,
{
- type Error = T::Error;
-
- fn write(&self, object: impl WriteTo) -> Result<ObjectId, Self::Error> {
+ fn write(&self, object: &dyn WriteTo) -> Result<ObjectId, crate::write::Error> {
self.deref().write(object)
}
- fn write_buf(&self, object: Kind, from: &[u8]) -> Result<ObjectId, Self::Error> {
+ fn write_buf(&self, object: Kind, from: &[u8]) -> Result<ObjectId, crate::write::Error> {
self.deref().write_buf(object, from)
}
- fn write_stream(&self, kind: Kind, size: u64, from: impl Read) -> Result<ObjectId, Self::Error> {
+ fn write_stream(&self, kind: Kind, size: u64, from: &mut dyn Read) -> Result<ObjectId, crate::write::Error> {
self.deref().write_stream(kind, size, from)
}
}
@@ -133,13 +117,11 @@ mod _impls {
where
T: crate::Find,
{
- type Error = T::Error;
-
- fn contains(&self, id: impl AsRef<oid>) -> bool {
+ fn contains(&self, id: &oid) -> bool {
(*self).contains(id)
}
- fn try_find<'a>(&self, id: impl AsRef<oid>, buffer: &'a mut Vec<u8>) -> Result<Option<Data<'a>>, Self::Error> {
+ fn try_find<'a>(&self, id: &oid, buffer: &'a mut Vec<u8>) -> Result<Option<Data<'a>>, crate::find::Error> {
(*self).try_find(id, buffer)
}
}
@@ -148,9 +130,7 @@ mod _impls {
where
T: crate::Header,
{
- type Error = T::Error;
-
- fn try_header(&self, id: impl AsRef<oid>) -> Result<Option<Header>, Self::Error> {
+ fn try_header(&self, id: &oid) -> Result<Option<Header>, crate::find::Error> {
(*self).try_header(id)
}
}
@@ -159,13 +139,11 @@ mod _impls {
where
T: crate::Find,
{
- type Error = T::Error;
-
- fn contains(&self, id: impl AsRef<oid>) -> bool {
+ fn contains(&self, id: &oid) -> bool {
self.deref().contains(id)
}
- fn try_find<'a>(&self, id: impl AsRef<oid>, buffer: &'a mut Vec<u8>) -> Result<Option<Data<'a>>, Self::Error> {
+ fn try_find<'a>(&self, id: &oid, buffer: &'a mut Vec<u8>) -> Result<Option<Data<'a>>, crate::find::Error> {
self.deref().try_find(id, buffer)
}
}
@@ -174,9 +152,7 @@ mod _impls {
where
T: crate::Header,
{
- type Error = T::Error;
-
- fn try_header(&self, id: impl AsRef<oid>) -> Result<Option<Header>, Self::Error> {
+ fn try_header(&self, id: &oid) -> Result<Option<Header>, crate::find::Error> {
self.deref().try_header(id)
}
}
@@ -185,13 +161,11 @@ mod _impls {
where
T: crate::Find,
{
- type Error = T::Error;
-
- fn contains(&self, id: impl AsRef<oid>) -> bool {
+ fn contains(&self, id: &oid) -> bool {
self.deref().contains(id)
}
- fn try_find<'a>(&self, id: impl AsRef<oid>, buffer: &'a mut Vec<u8>) -> Result<Option<Data<'a>>, Self::Error> {
+ fn try_find<'a>(&self, id: &oid, buffer: &'a mut Vec<u8>) -> Result<Option<Data<'a>>, crate::find::Error> {
self.deref().try_find(id, buffer)
}
}
@@ -200,9 +174,7 @@ mod _impls {
where
T: crate::Header,
{
- type Error = T::Error;
-
- fn try_header(&self, id: impl AsRef<oid>) -> Result<Option<Header>, Self::Error> {
+ fn try_header(&self, id: &oid) -> Result<Option<Header>, crate::find::Error> {
self.deref().try_header(id)
}
}
@@ -219,10 +191,9 @@ mod ext {
/// while returning the desired object type.
fn $method<'a>(
&self,
- id: impl AsRef<gix_hash::oid>,
+ id: &gix_hash::oid,
buffer: &'a mut Vec<u8>,
- ) -> Result<$object_type, find::existing_object::Error<Self::Error>> {
- let id = id.as_ref();
+ ) -> Result<$object_type, find::existing_object::Error> {
self.try_find(id, buffer)
.map_err(find::existing_object::Error::Find)?
.ok_or_else(|| find::existing_object::Error::NotFound {
@@ -245,10 +216,9 @@ mod ext {
/// while returning the desired iterator type.
fn $method<'a>(
&self,
- id: impl AsRef<gix_hash::oid>,
+ id: &gix_hash::oid,
buffer: &'a mut Vec<u8>,
- ) -> Result<$object_type, find::existing_iter::Error<Self::Error>> {
- let id = id.as_ref();
+ ) -> Result<$object_type, find::existing_iter::Error> {
self.try_find(id, buffer)
.map_err(find::existing_iter::Error::Find)?
.ok_or_else(|| find::existing_iter::Error::NotFound {
@@ -267,10 +237,7 @@ mod ext {
/// An extension trait with convenience functions.
pub trait HeaderExt: super::Header {
/// Like [`try_header(…)`][super::Header::try_header()], but flattens the `Result<Option<_>>` into a single `Result` making a non-existing object an error.
- fn header(
- &self,
- id: impl AsRef<gix_hash::oid>,
- ) -> Result<crate::find::Header, find::existing::Error<Self::Error>> {
+ fn header(&self, id: impl AsRef<gix_hash::oid>) -> Result<find::Header, find::existing::Error> {
let id = id.as_ref();
self.try_header(id)
.map_err(find::existing::Error::Find)?
@@ -285,10 +252,9 @@ mod ext {
/// Like [`try_find(…)`][super::Find::try_find()], but flattens the `Result<Option<_>>` into a single `Result` making a non-existing object an error.
fn find<'a>(
&self,
- id: impl AsRef<gix_hash::oid>,
+ id: &gix_hash::oid,
buffer: &'a mut Vec<u8>,
- ) -> Result<gix_object::Data<'a>, find::existing::Error<Self::Error>> {
- let id = id.as_ref();
+ ) -> Result<gix_object::Data<'a>, find::existing::Error> {
self.try_find(id, buffer)
.map_err(find::existing::Error::Find)?
.ok_or_else(|| find::existing::Error::NotFound { oid: id.to_owned() })