summaryrefslogtreecommitdiffstats
path: root/vendor/kstring/src
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/kstring/src')
-rw-r--r--vendor/kstring/src/backend.rs97
-rw-r--r--vendor/kstring/src/lib.rs78
-rw-r--r--vendor/kstring/src/stack.rs457
-rw-r--r--vendor/kstring/src/string.rs865
-rw-r--r--vendor/kstring/src/string_cow.rs383
-rw-r--r--vendor/kstring/src/string_ref.rs277
6 files changed, 2157 insertions, 0 deletions
diff --git a/vendor/kstring/src/backend.rs b/vendor/kstring/src/backend.rs
new file mode 100644
index 000000000..3827082f1
--- /dev/null
+++ b/vendor/kstring/src/backend.rs
@@ -0,0 +1,97 @@
+#[cfg(feature = "arc")]
+pub(crate) type DefaultStr = crate::backend::ArcStr;
+#[cfg(not(feature = "arc"))]
+pub(crate) type DefaultStr = crate::backend::BoxedStr;
+
+/// Fast allocations, O(n) clones
+pub type BoxedStr = Box<str>;
+static_assertions::assert_eq_size!(DefaultStr, BoxedStr);
+
+/// Cross-thread, O(1) clones
+pub type ArcStr = std::sync::Arc<str>;
+static_assertions::assert_eq_size!(DefaultStr, ArcStr);
+
+/// O(1) clones
+pub type RcStr = std::rc::Rc<str>;
+static_assertions::assert_eq_size!(DefaultStr, RcStr);
+
+/// Abstract over different type of heap-allocated strings
+pub trait HeapStr: std::fmt::Debug + Clone + private::Sealed {
+ fn from_str(other: &str) -> Self;
+ fn from_string(other: String) -> Self;
+ fn from_boxed_str(other: BoxedStr) -> Self;
+ fn as_str(&self) -> &str;
+}
+
+impl HeapStr for BoxedStr {
+ #[inline]
+ fn from_str(other: &str) -> Self {
+ other.into()
+ }
+
+ #[inline]
+ fn from_string(other: String) -> Self {
+ other.into_boxed_str()
+ }
+
+ #[inline]
+ fn from_boxed_str(other: BoxedStr) -> Self {
+ other
+ }
+
+ #[inline]
+ fn as_str(&self) -> &str {
+ self
+ }
+}
+
+impl HeapStr for ArcStr {
+ #[inline]
+ fn from_str(other: &str) -> Self {
+ other.into()
+ }
+
+ #[inline]
+ fn from_string(other: String) -> Self {
+ other.into_boxed_str().into()
+ }
+
+ #[inline]
+ fn from_boxed_str(other: BoxedStr) -> Self {
+ other.into()
+ }
+
+ #[inline]
+ fn as_str(&self) -> &str {
+ self
+ }
+}
+
+impl HeapStr for RcStr {
+ #[inline]
+ fn from_str(other: &str) -> Self {
+ other.into()
+ }
+
+ #[inline]
+ fn from_string(other: String) -> Self {
+ other.into_boxed_str().into()
+ }
+
+ #[inline]
+ fn from_boxed_str(other: BoxedStr) -> Self {
+ other.into()
+ }
+
+ #[inline]
+ fn as_str(&self) -> &str {
+ self
+ }
+}
+
+pub(crate) mod private {
+ pub trait Sealed {}
+ impl Sealed for super::BoxedStr {}
+ impl Sealed for super::ArcStr {}
+ impl Sealed for super::RcStr {}
+}
diff --git a/vendor/kstring/src/lib.rs b/vendor/kstring/src/lib.rs
new file mode 100644
index 000000000..dc1ffd608
--- /dev/null
+++ b/vendor/kstring/src/lib.rs
@@ -0,0 +1,78 @@
+//! Key String: Optimized for map keys.
+//!
+//! # Examples
+//!
+//! String creation
+//! ```rust
+//! // Explicit
+//! let literal = kstring::KString::from_static("literal");
+//! // Implicit
+//! let literal = kstring::KString::from("literal");
+//!
+//! // Explicit
+//! let inline = kstring::KString::try_inline("stack").unwrap();
+//! let inline = kstring::KString::from_ref("stack");
+//!
+//! let formatted: kstring::KStringCow = format!("Hello {} and {}", literal, inline).into();
+//! ```
+//!
+//! # Background
+//!
+//! Considerations:
+//! - Large maps
+//! - Most keys live and drop without being used in any other way
+//! - Most keys are relatively small (single to double digit bytes)
+//! - Keys are immutable
+//! - Allow zero-cost abstractions between structs and maps (e.g. no allocating
+//! when dealing with struct field names)
+//!
+//! Ramifications:
+//! - Inline small strings rather than going to the heap.
+//! - Preserve `&'static str` across strings ([`KString`]),
+//! references ([`KStringRef`]), and lifetime abstractions ([`KStringCow`]) to avoid
+//! allocating for struct field names.
+//! - Use `Box<str>` rather than `String` to use less memory.
+//!
+//! # Feature Flags
+//!
+#![cfg_attr(feature = "document-features", doc = document_features::document_features!())]
+#![cfg_attr(feature = "safe", forbid(unsafe_code))]
+
+#[cfg(not(feature = "std"))]
+compile_error!("`std` feature is required; reserved for future `no_std` support");
+
+mod stack;
+mod string;
+mod string_cow;
+mod string_ref;
+
+pub mod backend;
+
+pub use stack::StackString;
+pub use string::*;
+pub use string_cow::*;
+pub use string_ref::*;
+
+#[cfg(test)]
+mod test {
+ #[test]
+ fn test_size() {
+ println!(
+ "String: {}",
+ std::mem::size_of::<crate::string::StdString>()
+ );
+ println!(
+ "Box<str>: {}",
+ std::mem::size_of::<crate::backend::DefaultStr>()
+ );
+ println!(
+ "Box<Box<str>>: {}",
+ std::mem::size_of::<Box<crate::backend::DefaultStr>>()
+ );
+ println!("str: {}", std::mem::size_of::<&'static str>());
+ println!(
+ "Cow: {}",
+ std::mem::size_of::<std::borrow::Cow<'static, str>>()
+ );
+ }
+}
diff --git a/vendor/kstring/src/stack.rs b/vendor/kstring/src/stack.rs
new file mode 100644
index 000000000..93e2f0722
--- /dev/null
+++ b/vendor/kstring/src/stack.rs
@@ -0,0 +1,457 @@
+use std::fmt;
+
+pub(crate) type Len = u8;
+
+/// Fixed-size stack-allocated string
+#[derive(Copy, Clone)]
+pub struct StackString<const CAPACITY: usize> {
+ len: Len,
+ buffer: StrBuffer<CAPACITY>,
+}
+
+impl<const CAPACITY: usize> StackString<CAPACITY> {
+ pub const CAPACITY: usize = CAPACITY;
+ pub const EMPTY: Self = Self::empty();
+
+ const fn empty() -> Self {
+ Self {
+ len: 0,
+ buffer: StrBuffer::empty(),
+ }
+ }
+
+ /// Create a `StackString` from a `&str`, if it'll fit within `Self::CAPACITY`
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = kstring::StackString::<3>::try_new("foo");
+ /// assert_eq!(s.as_deref(), Some("foo"));
+ /// let s = kstring::StackString::<3>::try_new("foobar");
+ /// assert_eq!(s, None);
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn try_new(s: &str) -> Option<Self> {
+ let len = s.as_bytes().len();
+ if len <= Self::CAPACITY {
+ #[cfg(feature = "unsafe")]
+ let stack = {
+ unsafe {
+ // SAFETY: We've confirmed `len` is within size
+ Self::new_unchecked(s)
+ }
+ };
+ #[cfg(not(feature = "unsafe"))]
+ let stack = { Self::new(s) };
+ Some(stack)
+ } else {
+ None
+ }
+ }
+
+ /// Create a `StackString` from a `&str`
+ ///
+ /// # Panic
+ ///
+ /// Calling this function with a string larger than `Self::CAPACITY` will panic
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = kstring::StackString::<3>::new("foo");
+ /// assert_eq!(s, "foo");
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn new(s: &str) -> Self {
+ let len = s.as_bytes().len() as u8;
+ debug_assert!(Self::CAPACITY <= Len::MAX.into());
+ let buffer = StrBuffer::new(s);
+ Self { len, buffer }
+ }
+
+ /// Create a `StackString` from a `&str`
+ ///
+ /// # Safety
+ ///
+ /// Calling this function with a string larger than `Self::CAPACITY` is undefined behavior.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = unsafe {
+ /// // SAFETY: Literal is short-enough
+ /// kstring::StackString::<3>::new_unchecked("foo")
+ /// };
+ /// assert_eq!(s, "foo");
+ /// ```
+ #[inline]
+ #[must_use]
+ #[cfg(feature = "unsafe")]
+ pub unsafe fn new_unchecked(s: &str) -> Self {
+ let len = s.as_bytes().len() as u8;
+ debug_assert!(Self::CAPACITY <= Len::MAX.into());
+ let buffer = StrBuffer::new_unchecked(s);
+ Self { len, buffer }
+ }
+
+ /// Extracts a string slice containing the entire `StackString`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = kstring::StackString::<3>::try_new("foo").unwrap();
+ ///
+ /// assert_eq!("foo", s.as_str());
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn as_str(&self) -> &str {
+ let len = self.len as usize;
+ #[cfg(feature = "unsafe")]
+ unsafe {
+ // SAFETY: Constructors guarantee that `buffer[..len]` is a `str`,
+ // and we don't mutate the data afterwards.
+ self.buffer.as_str_unchecked(len)
+ }
+ #[cfg(not(feature = "unsafe"))]
+ self.buffer.as_str(len)
+ }
+
+ /// Converts a `StackString` into a mutable string slice.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = kstring::StackString::<6>::try_new("foobar").unwrap();
+ /// let s_mut_str = s.as_mut_str();
+ ///
+ /// s_mut_str.make_ascii_uppercase();
+ ///
+ /// assert_eq!("FOOBAR", s_mut_str);
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn as_mut_str(&mut self) -> &mut str {
+ let len = self.len as usize;
+ #[cfg(feature = "unsafe")]
+ unsafe {
+ // SAFETY: Constructors guarantee that `buffer[..len]` is a `str`,
+ // and we don't mutate the data afterwards.
+ self.buffer.as_mut_str_unchecked(len)
+ }
+ #[cfg(not(feature = "unsafe"))]
+ self.buffer.as_mut_str(len)
+ }
+
+ /// Returns the length of this `StasckString`, in bytes, not [`char`]s or
+ /// graphemes. In other words, it might not be what a human considers the
+ /// length of the string.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let a = kstring::StackString::<3>::try_new("foo").unwrap();
+ /// assert_eq!(a.len(), 3);
+ ///
+ /// let fancy_f = kstring::StackString::<4>::try_new("ƒoo").unwrap();
+ /// assert_eq!(fancy_f.len(), 4);
+ /// assert_eq!(fancy_f.chars().count(), 3);
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn len(&self) -> usize {
+ self.len as usize
+ }
+
+ /// Returns `true` if this `StackString` has a length of zero, and `false` otherwise.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut v = kstring::StackString::<20>::EMPTY;
+ /// assert!(v.is_empty());
+ ///
+ /// let a = kstring::StackString::<3>::try_new("foo").unwrap();
+ /// assert!(!a.is_empty());
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Truncates this `StackString`, removing all contents.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = kstring::StackString::<3>::try_new("foo").unwrap();
+ ///
+ /// s.clear();
+ ///
+ /// assert!(s.is_empty());
+ /// assert_eq!(0, s.len());
+ /// ```
+ #[inline]
+ pub fn clear(&mut self) {
+ self.len = 0;
+ }
+
+ /// Shortens this `StackString` to the specified length.
+ ///
+ /// If `new_len` is greater than the string's current length, this has no
+ /// effect.
+ ///
+ /// Note that this method has no effect on the allocated capacity
+ /// of the string
+ ///
+ /// # Panics
+ ///
+ /// Panics if `new_len` does not lie on a [`char`] boundary.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = kstring::StackString::<5>::try_new("hello").unwrap();
+ ///
+ /// s.truncate(2);
+ ///
+ /// assert_eq!(s, "he");
+ /// ```
+ #[inline]
+ pub fn truncate(&mut self, new_len: usize) {
+ if new_len <= self.len() {
+ assert!(self.is_char_boundary(new_len));
+ self.len = new_len as u8;
+ }
+ }
+}
+
+impl<const CAPACITY: usize> Default for StackString<CAPACITY> {
+ fn default() -> Self {
+ Self::empty()
+ }
+}
+
+impl<const CAPACITY: usize> std::ops::Deref for StackString<CAPACITY> {
+ type Target = str;
+
+ #[inline]
+ fn deref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl<const CAPACITY: usize> Eq for StackString<CAPACITY> {}
+
+impl<const C1: usize, const C2: usize> PartialEq<StackString<C1>> for StackString<C2> {
+ #[inline]
+ fn eq(&self, other: &StackString<C1>) -> bool {
+ PartialEq::eq(self.as_str(), other.as_str())
+ }
+}
+
+impl<const CAPACITY: usize> PartialEq<str> for StackString<CAPACITY> {
+ #[inline]
+ fn eq(&self, other: &str) -> bool {
+ PartialEq::eq(self.as_str(), other)
+ }
+}
+
+impl<'s, const CAPACITY: usize> PartialEq<&'s str> for StackString<CAPACITY> {
+ #[inline]
+ fn eq(&self, other: &&str) -> bool {
+ PartialEq::eq(self.as_str(), *other)
+ }
+}
+
+impl<const CAPACITY: usize> PartialEq<String> for StackString<CAPACITY> {
+ #[inline]
+ fn eq(&self, other: &String) -> bool {
+ PartialEq::eq(self.as_str(), other.as_str())
+ }
+}
+
+impl<const CAPACITY: usize> Ord for StackString<CAPACITY> {
+ #[inline]
+ fn cmp(&self, other: &Self) -> std::cmp::Ordering {
+ self.as_str().cmp(other.as_str())
+ }
+}
+
+impl<const C1: usize, const C2: usize> PartialOrd<StackString<C1>> for StackString<C2> {
+ #[inline]
+ fn partial_cmp(&self, other: &StackString<C1>) -> Option<std::cmp::Ordering> {
+ self.as_str().partial_cmp(other.as_str())
+ }
+}
+
+impl<const CAPACITY: usize> PartialOrd<str> for StackString<CAPACITY> {
+ #[inline]
+ fn partial_cmp(&self, other: &str) -> Option<std::cmp::Ordering> {
+ self.as_str().partial_cmp(other)
+ }
+}
+
+impl<'s, const CAPACITY: usize> PartialOrd<&'s str> for StackString<CAPACITY> {
+ #[inline]
+ fn partial_cmp(&self, other: &&str) -> Option<std::cmp::Ordering> {
+ self.as_str().partial_cmp(other)
+ }
+}
+
+impl<const CAPACITY: usize> PartialOrd<String> for StackString<CAPACITY> {
+ #[inline]
+ fn partial_cmp(&self, other: &String) -> Option<std::cmp::Ordering> {
+ self.as_str().partial_cmp(other.as_str())
+ }
+}
+
+impl<const CAPACITY: usize> std::hash::Hash for StackString<CAPACITY> {
+ #[inline]
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ self.as_str().hash(state);
+ }
+}
+
+impl<const CAPACITY: usize> fmt::Debug for StackString<CAPACITY> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(self.as_str(), f)
+ }
+}
+
+impl<const CAPACITY: usize> fmt::Display for StackString<CAPACITY> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.as_str(), f)
+ }
+}
+
+impl<const CAPACITY: usize> AsRef<str> for StackString<CAPACITY> {
+ #[inline]
+ fn as_ref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl<const CAPACITY: usize> AsRef<[u8]> for StackString<CAPACITY> {
+ #[inline]
+ fn as_ref(&self) -> &[u8] {
+ self.as_bytes()
+ }
+}
+
+impl<const CAPACITY: usize> AsRef<std::ffi::OsStr> for StackString<CAPACITY> {
+ #[inline]
+ fn as_ref(&self) -> &std::ffi::OsStr {
+ (&**self).as_ref()
+ }
+}
+
+impl<const CAPACITY: usize> AsRef<std::path::Path> for StackString<CAPACITY> {
+ #[inline]
+ fn as_ref(&self) -> &std::path::Path {
+ std::path::Path::new(self)
+ }
+}
+
+impl<const CAPACITY: usize> std::borrow::Borrow<str> for StackString<CAPACITY> {
+ #[inline]
+ fn borrow(&self) -> &str {
+ self.as_str()
+ }
+}
+
+#[derive(Copy, Clone)]
+#[repr(transparent)]
+pub(crate) struct StrBuffer<const CAPACITY: usize>([u8; CAPACITY]);
+
+impl<const CAPACITY: usize> StrBuffer<CAPACITY> {
+ pub(crate) const fn empty() -> Self {
+ let array = [0; CAPACITY];
+ StrBuffer(array)
+ }
+
+ #[inline]
+ pub(crate) fn new(s: &str) -> Self {
+ let len = s.as_bytes().len();
+ debug_assert!(len <= CAPACITY);
+ let mut buffer = Self::default();
+ if let Some(buffer) = buffer.0.get_mut(..len) {
+ buffer.copy_from_slice(s.as_bytes());
+ } else {
+ panic!("`{}` is larger than capacity {}", s, CAPACITY);
+ }
+ buffer
+ }
+
+ #[inline]
+ #[cfg(not(feature = "unsafe"))]
+ pub(crate) fn as_str(&self, len: usize) -> &str {
+ let slice = self.0.get(..len).unwrap();
+ std::str::from_utf8(slice).unwrap()
+ }
+
+ #[inline]
+ #[cfg(not(feature = "unsafe"))]
+ pub(crate) fn as_mut_str(&mut self, len: usize) -> &mut str {
+ let slice = self.0.get_mut(..len).unwrap();
+ std::str::from_utf8_mut(slice).unwrap()
+ }
+}
+
+impl<const CAPACITY: usize> StrBuffer<CAPACITY> {
+ #[inline]
+ #[cfg(feature = "unsafe")]
+ pub(crate) unsafe fn new_unchecked(s: &str) -> Self {
+ let len = s.as_bytes().len();
+ debug_assert!(len <= CAPACITY);
+ let mut buffer = Self::default();
+ buffer
+ .0
+ .get_unchecked_mut(..len)
+ .copy_from_slice(s.as_bytes());
+ buffer
+ }
+
+ #[inline]
+ #[cfg(feature = "unsafe")]
+ pub(crate) unsafe fn as_str_unchecked(&self, len: usize) -> &str {
+ let slice = self.0.get_unchecked(..len);
+ std::str::from_utf8_unchecked(slice)
+ }
+
+ #[inline]
+ #[cfg(feature = "unsafe")]
+ pub(crate) unsafe fn as_mut_str_unchecked(&mut self, len: usize) -> &mut str {
+ let slice = self.0.get_unchecked_mut(..len);
+ std::str::from_utf8_unchecked_mut(slice)
+ }
+}
+
+impl<const CAPACITY: usize> Default for StrBuffer<CAPACITY> {
+ fn default() -> Self {
+ Self::empty()
+ }
+}
diff --git a/vendor/kstring/src/string.rs b/vendor/kstring/src/string.rs
new file mode 100644
index 000000000..ed39d188a
--- /dev/null
+++ b/vendor/kstring/src/string.rs
@@ -0,0 +1,865 @@
+use std::{borrow::Cow, fmt};
+
+use crate::stack::StackString;
+use crate::KStringCowBase;
+use crate::KStringRef;
+
+pub(crate) type StdString = std::string::String;
+
+/// A UTF-8 encoded, immutable string.
+pub type KString = KStringBase<crate::backend::DefaultStr>;
+
+/// A UTF-8 encoded, immutable string.
+#[derive(Clone)]
+#[repr(transparent)]
+pub struct KStringBase<B> {
+ inner: KStringInner<B>,
+}
+
+impl<B> KStringBase<B> {
+ pub const EMPTY: Self = KStringBase::from_static("");
+
+ /// Create a new empty `KStringBase`.
+ #[inline]
+ #[must_use]
+ pub fn new() -> Self {
+ Self::EMPTY
+ }
+
+ /// Create a reference to a `'static` data.
+ #[inline]
+ #[must_use]
+ pub const fn from_static(other: &'static str) -> Self {
+ Self {
+ inner: KStringInner::from_static(other),
+ }
+ }
+
+ /// Create an inline string, if possible
+ #[inline]
+ #[must_use]
+ pub fn try_inline(other: &str) -> Option<Self> {
+ KStringInner::try_inline(other).map(|inner| Self { inner })
+ }
+}
+
+impl<B: crate::backend::HeapStr> KStringBase<B> {
+ /// Create an owned `KStringBase`.
+ #[inline]
+ #[must_use]
+ pub fn from_boxed(other: crate::backend::BoxedStr) -> Self {
+ Self {
+ inner: KStringInner::from_boxed(other),
+ }
+ }
+
+ /// Create an owned `KStringBase`.
+ #[inline]
+ #[must_use]
+ pub fn from_string(other: StdString) -> Self {
+ Self {
+ inner: KStringInner::from_string(other),
+ }
+ }
+
+ /// Create an owned `KStringBase` optimally from a reference.
+ #[inline]
+ #[must_use]
+ pub fn from_ref(other: &str) -> Self {
+ Self {
+ inner: KStringInner::from_ref(other),
+ }
+ }
+
+ /// Get a reference to the `KStringBase`.
+ #[inline]
+ #[must_use]
+ pub fn as_ref(&self) -> KStringRef<'_> {
+ self.inner.as_ref()
+ }
+
+ /// Extracts a string slice containing the entire `KStringBase`.
+ #[inline]
+ #[must_use]
+ pub fn as_str(&self) -> &str {
+ self.inner.as_str()
+ }
+
+ /// Convert to a mutable string type, cloning the data if necessary.
+ #[inline]
+ #[must_use]
+ pub fn into_string(self) -> StdString {
+ String::from(self.into_boxed_str())
+ }
+
+ /// Convert to a mutable string type, cloning the data if necessary.
+ #[inline]
+ #[must_use]
+ pub fn into_boxed_str(self) -> crate::backend::BoxedStr {
+ self.inner.into_boxed_str()
+ }
+
+ /// Convert to a Cow str
+ #[inline]
+ #[must_use]
+ pub fn into_cow_str(self) -> Cow<'static, str> {
+ self.inner.into_cow_str()
+ }
+}
+
+impl<B: crate::backend::HeapStr> std::ops::Deref for KStringBase<B> {
+ type Target = str;
+
+ #[inline]
+ fn deref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl<B: crate::backend::HeapStr> Eq for KStringBase<B> {}
+
+impl<'s, B: crate::backend::HeapStr> PartialEq<KStringBase<B>> for KStringBase<B> {
+ #[inline]
+ fn eq(&self, other: &Self) -> bool {
+ PartialEq::eq(self.as_str(), other.as_str())
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> PartialEq<str> for KStringBase<B> {
+ #[inline]
+ fn eq(&self, other: &str) -> bool {
+ PartialEq::eq(self.as_str(), other)
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> PartialEq<&'s str> for KStringBase<B> {
+ #[inline]
+ fn eq(&self, other: &&str) -> bool {
+ PartialEq::eq(self.as_str(), *other)
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> PartialEq<String> for KStringBase<B> {
+ #[inline]
+ fn eq(&self, other: &StdString) -> bool {
+ PartialEq::eq(self.as_str(), other.as_str())
+ }
+}
+
+impl<B: crate::backend::HeapStr> Ord for KStringBase<B> {
+ #[inline]
+ fn cmp(&self, other: &Self) -> std::cmp::Ordering {
+ self.as_str().cmp(other.as_str())
+ }
+}
+
+impl<B: crate::backend::HeapStr> PartialOrd for KStringBase<B> {
+ #[inline]
+ fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
+ self.as_str().partial_cmp(other.as_str())
+ }
+}
+
+impl<B: crate::backend::HeapStr> std::hash::Hash for KStringBase<B> {
+ #[inline]
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ self.as_str().hash(state);
+ }
+}
+
+impl<B: crate::backend::HeapStr> fmt::Debug for KStringBase<B> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.as_str().fmt(f)
+ }
+}
+
+impl<B: crate::backend::HeapStr> fmt::Display for KStringBase<B> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.as_str(), f)
+ }
+}
+
+impl<B: crate::backend::HeapStr> AsRef<str> for KStringBase<B> {
+ #[inline]
+ fn as_ref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl<B: crate::backend::HeapStr> AsRef<[u8]> for KStringBase<B> {
+ #[inline]
+ fn as_ref(&self) -> &[u8] {
+ self.as_bytes()
+ }
+}
+
+impl<B: crate::backend::HeapStr> AsRef<std::ffi::OsStr> for KStringBase<B> {
+ #[inline]
+ fn as_ref(&self) -> &std::ffi::OsStr {
+ (&**self).as_ref()
+ }
+}
+
+impl<B: crate::backend::HeapStr> AsRef<std::path::Path> for KStringBase<B> {
+ #[inline]
+ fn as_ref(&self) -> &std::path::Path {
+ std::path::Path::new(self)
+ }
+}
+
+impl<B: crate::backend::HeapStr> std::borrow::Borrow<str> for KStringBase<B> {
+ #[inline]
+ fn borrow(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl<B: crate::backend::HeapStr> Default for KStringBase<B> {
+ #[inline]
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<KStringRef<'s>> for KStringBase<B> {
+ #[inline]
+ fn from(other: KStringRef<'s>) -> Self {
+ other.to_owned()
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<&'s KStringRef<'s>> for KStringBase<B> {
+ #[inline]
+ fn from(other: &'s KStringRef<'s>) -> Self {
+ other.to_owned()
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<KStringCowBase<'s, B>> for KStringBase<B> {
+ #[inline]
+ fn from(other: KStringCowBase<'s, B>) -> Self {
+ other.into_owned()
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<&'s KStringCowBase<'s, B>> for KStringBase<B> {
+ #[inline]
+ fn from(other: &'s KStringCowBase<'s, B>) -> Self {
+ other.clone().into_owned()
+ }
+}
+
+impl<B: crate::backend::HeapStr> From<StdString> for KStringBase<B> {
+ #[inline]
+ fn from(other: StdString) -> Self {
+ Self::from_string(other)
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<&'s StdString> for KStringBase<B> {
+ #[inline]
+ fn from(other: &'s StdString) -> Self {
+ Self::from_ref(other)
+ }
+}
+
+impl<B: crate::backend::HeapStr> From<crate::backend::BoxedStr> for KStringBase<B> {
+ #[inline]
+ fn from(other: crate::backend::BoxedStr) -> Self {
+ Self::from_boxed(other)
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<&'s crate::backend::BoxedStr> for KStringBase<B> {
+ #[inline]
+ fn from(other: &'s crate::backend::BoxedStr) -> Self {
+ Self::from_ref(other)
+ }
+}
+
+impl<B: crate::backend::HeapStr> From<&'static str> for KStringBase<B> {
+ #[inline]
+ fn from(other: &'static str) -> Self {
+ Self::from_static(other)
+ }
+}
+
+impl<B: crate::backend::HeapStr> std::str::FromStr for KStringBase<B> {
+ type Err = std::convert::Infallible;
+ #[inline]
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ Ok(Self::from_ref(s))
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<B: crate::backend::HeapStr> serde::Serialize for KStringBase<B> {
+ #[inline]
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ serializer.serialize_str(self.as_str())
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<'de, B: crate::backend::HeapStr> serde::Deserialize<'de> for KStringBase<B> {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ deserializer.deserialize_string(StringVisitor(std::marker::PhantomData))
+ }
+}
+
+#[cfg(feature = "serde")]
+struct StringVisitor<B>(std::marker::PhantomData<B>);
+
+#[cfg(feature = "serde")]
+impl<'de, B: crate::backend::HeapStr> serde::de::Visitor<'de> for StringVisitor<B> {
+ type Value = KStringBase<B>;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("a string")
+ }
+
+ fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ Ok(Self::Value::from_ref(v))
+ }
+
+ fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ Ok(Self::Value::from_string(v))
+ }
+
+ fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ match std::str::from_utf8(v) {
+ Ok(s) => Ok(Self::Value::from_ref(s)),
+ Err(_) => Err(serde::de::Error::invalid_value(
+ serde::de::Unexpected::Bytes(v),
+ &self,
+ )),
+ }
+ }
+
+ fn visit_byte_buf<E>(self, v: Vec<u8>) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ match String::from_utf8(v) {
+ Ok(s) => Ok(Self::Value::from_string(s)),
+ Err(e) => Err(serde::de::Error::invalid_value(
+ serde::de::Unexpected::Bytes(&e.into_bytes()),
+ &self,
+ )),
+ }
+ }
+}
+
+use inner::KStringInner;
+
+#[cfg(not(feature = "unsafe"))]
+mod inner {
+ use super::*;
+
+ pub(super) enum KStringInner<B> {
+ Singleton(&'static str),
+ Inline(StackString<CAPACITY>),
+ Owned(B),
+ }
+
+ impl<B> KStringInner<B> {
+ /// Create a reference to a `'static` data.
+ #[inline]
+ pub const fn from_static(other: &'static str) -> Self {
+ Self::Singleton(other)
+ }
+
+ #[inline]
+ pub fn try_inline(other: &str) -> Option<Self> {
+ StackString::try_new(other).map(Self::Inline)
+ }
+ }
+
+ impl<B: crate::backend::HeapStr> KStringInner<B> {
+ #[inline]
+ pub(super) fn from_boxed(other: crate::backend::BoxedStr) -> Self {
+ #[allow(clippy::useless_conversion)]
+ Self::Owned(B::from_boxed_str(other))
+ }
+
+ #[inline]
+ pub(super) fn from_string(other: StdString) -> Self {
+ if (0..=CAPACITY).contains(&other.len()) {
+ let inline = { StackString::new(other.as_str()) };
+ Self::Inline(inline)
+ } else {
+ Self::from_boxed(other.into_boxed_str())
+ }
+ }
+
+ #[inline]
+ pub(super) fn from_ref(other: &str) -> Self {
+ if (0..=CAPACITY).contains(&other.len()) {
+ let inline = { StackString::new(other) };
+ Self::Inline(inline)
+ } else {
+ Self::Owned(B::from_str(other))
+ }
+ }
+
+ #[inline]
+ pub(super) fn as_ref(&self) -> KStringRef<'_> {
+ match self {
+ Self::Singleton(s) => KStringRef::from_static(s),
+ Self::Inline(s) => KStringRef::from_ref(s.as_str()),
+ Self::Owned(s) => KStringRef::from_ref(s.as_str()),
+ }
+ }
+
+ #[inline]
+ pub(super) fn as_str(&self) -> &str {
+ match self {
+ Self::Singleton(s) => s,
+ Self::Inline(s) => s.as_str(),
+ Self::Owned(s) => s.as_str(),
+ }
+ }
+
+ #[inline]
+ pub(super) fn into_boxed_str(self) -> crate::backend::BoxedStr {
+ match self {
+ Self::Singleton(s) => crate::backend::BoxedStr::from(s),
+ Self::Inline(s) => crate::backend::BoxedStr::from(s.as_str()),
+ Self::Owned(s) => crate::backend::BoxedStr::from(s.as_str()),
+ }
+ }
+
+ /// Convert to a Cow str
+ #[inline]
+ pub(super) fn into_cow_str(self) -> Cow<'static, str> {
+ match self {
+ Self::Singleton(s) => Cow::Borrowed(s),
+ Self::Inline(s) => Cow::Owned(s.as_str().into()),
+ Self::Owned(s) => Cow::Owned(s.as_str().into()),
+ }
+ }
+ }
+
+ // Explicit to avoid inlining which cuts clone times in half.
+ //
+ // An automatically derived `clone()` has 10ns overhead while the explicit `Deref`/`as_str` has
+ // none of that. Being explicit and removing the `#[inline]` attribute dropped the overhead to
+ // 5ns.
+ //
+ // My only guess is that the `clone()` calls we delegate to are just that much bigger than
+ // `as_str()` that, when combined with a jump table, is blowing the icache, slowing things down.
+ impl<B: Clone> Clone for KStringInner<B> {
+ fn clone(&self) -> Self {
+ match self {
+ Self::Singleton(s) => Self::Singleton(s),
+ Self::Inline(s) => Self::Inline(*s),
+ Self::Owned(s) => Self::Owned(s.clone()),
+ }
+ }
+ }
+
+ #[allow(unused)]
+ const LEN_SIZE: usize = std::mem::size_of::<crate::stack::Len>();
+
+ #[allow(unused)]
+ const TAG_SIZE: usize = std::mem::size_of::<u8>();
+
+ #[allow(unused)]
+ const MAX_CAPACITY: usize =
+ std::mem::size_of::<crate::string::StdString>() - TAG_SIZE - LEN_SIZE;
+
+ // Performance seems to slow down when trying to occupy all of the padding left by `String`'s
+ // discriminant. The question is whether faster len=1-16 "allocations" outweighs going to the heap
+ // for len=17-22.
+ #[allow(unused)]
+ const ALIGNED_CAPACITY: usize = std::mem::size_of::<crate::backend::DefaultStr>() - LEN_SIZE;
+
+ #[cfg(feature = "max_inline")]
+ const CAPACITY: usize = MAX_CAPACITY;
+ #[cfg(not(feature = "max_inline"))]
+ const CAPACITY: usize = ALIGNED_CAPACITY;
+}
+
+#[cfg(feature = "unsafe")]
+mod inner {
+ use super::*;
+
+ pub(super) union KStringInner<B> {
+ tag: TagVariant,
+ singleton: SingletonVariant,
+ owned: std::mem::ManuallyDrop<OwnedVariant<B>>,
+ inline: InlineVariant,
+ }
+
+ impl<B> KStringInner<B> {
+ /// Create a reference to a `'static` data.
+ #[inline]
+ pub const fn from_static(other: &'static str) -> Self {
+ Self {
+ singleton: SingletonVariant::new(other),
+ }
+ }
+
+ #[inline]
+ pub fn try_inline(other: &str) -> Option<Self> {
+ StackString::try_new(other).map(|inline| Self {
+ inline: InlineVariant::new(inline),
+ })
+ }
+
+ #[inline]
+ const fn tag(&self) -> Tag {
+ unsafe {
+ // SAFETY: `tag` is in the same spot in each variant
+ self.tag.tag
+ }
+ }
+ }
+
+ impl<B: crate::backend::HeapStr> KStringInner<B> {
+ #[inline]
+ pub(super) fn from_boxed(other: crate::backend::BoxedStr) -> Self {
+ #[allow(clippy::useless_conversion)]
+ let payload = B::from_boxed_str(other);
+ Self {
+ owned: std::mem::ManuallyDrop::new(OwnedVariant::new(payload)),
+ }
+ }
+
+ #[inline]
+ pub(super) fn from_string(other: StdString) -> Self {
+ if (0..=CAPACITY).contains(&other.len()) {
+ let payload = unsafe {
+ // SAFETY: range check ensured this is always safe
+ StackString::new_unchecked(other.as_str())
+ };
+ Self {
+ inline: InlineVariant::new(payload),
+ }
+ } else {
+ Self::from_boxed(other.into_boxed_str())
+ }
+ }
+
+ #[inline]
+ pub(super) fn from_ref(other: &str) -> Self {
+ if (0..=CAPACITY).contains(&other.len()) {
+ let payload = unsafe {
+ // SAFETY: range check ensured this is always safe
+ StackString::new_unchecked(other)
+ };
+ Self {
+ inline: InlineVariant::new(payload),
+ }
+ } else {
+ #[allow(clippy::useless_conversion)]
+ let payload = B::from_str(other);
+ Self {
+ owned: std::mem::ManuallyDrop::new(OwnedVariant::new(payload)),
+ }
+ }
+ }
+
+ #[inline]
+ pub(super) fn as_ref(&self) -> KStringRef<'_> {
+ let tag = self.tag();
+ unsafe {
+ // SAFETY: `tag` ensures access to correct variant
+ if tag.is_singleton() {
+ KStringRef::from_static(self.singleton.payload)
+ } else if tag.is_owned() {
+ KStringRef::from_ref(self.owned.payload.as_str())
+ } else {
+ debug_assert!(tag.is_inline());
+ KStringRef::from_ref(self.inline.payload.as_str())
+ }
+ }
+ }
+
+ #[inline]
+ pub(super) fn as_str(&self) -> &str {
+ let tag = self.tag();
+ unsafe {
+ // SAFETY: `tag` ensures access to correct variant
+ if tag.is_singleton() {
+ self.singleton.payload
+ } else if tag.is_owned() {
+ self.owned.payload.as_str()
+ } else {
+ debug_assert!(tag.is_inline());
+ self.inline.payload.as_str()
+ }
+ }
+ }
+
+ #[inline]
+ pub(super) fn into_boxed_str(self) -> crate::backend::BoxedStr {
+ let tag = self.tag();
+ unsafe {
+ // SAFETY: `tag` ensures access to correct variant
+ if tag.is_singleton() {
+ crate::backend::BoxedStr::from(self.singleton.payload)
+ } else if tag.is_owned() {
+ crate::backend::BoxedStr::from(self.owned.payload.as_str())
+ } else {
+ debug_assert!(tag.is_inline());
+ crate::backend::BoxedStr::from(self.inline.payload.as_ref())
+ }
+ }
+ }
+
+ /// Convert to a Cow str
+ #[inline]
+ pub(super) fn into_cow_str(self) -> Cow<'static, str> {
+ let tag = self.tag();
+ unsafe {
+ // SAFETY: `tag` ensures access to correct variant
+ if tag.is_singleton() {
+ Cow::Borrowed(self.singleton.payload)
+ } else if tag.is_owned() {
+ Cow::Owned(self.owned.payload.as_str().into())
+ } else {
+ debug_assert!(tag.is_inline());
+ Cow::Owned(self.inline.payload.as_str().into())
+ }
+ }
+ }
+ }
+
+ // Explicit to avoid inlining which cuts clone times in half.
+ //
+ // An automatically derived `clone()` has 10ns overhead while the explicit `Deref`/`as_str` has
+ // none of that. Being explicit and removing the `#[inline]` attribute dropped the overhead to
+ // 5ns.
+ //
+ // My only guess is that the `clone()` calls we delegate to are just that much bigger than
+ // `as_str()` that, when combined with a jump table, is blowing the icache, slowing things down.
+ impl<B: Clone> Clone for KStringInner<B> {
+ fn clone(&self) -> Self {
+ let tag = self.tag();
+ if tag.is_owned() {
+ unsafe {
+ // SAFETY: `tag` ensures access to correct variant
+ Self {
+ owned: std::mem::ManuallyDrop::new(OwnedVariant::new(
+ self.owned.payload.clone(),
+ )),
+ }
+ }
+ } else {
+ unsafe {
+ // SAFETY: `tag` ensures access to correct variant
+ // SAFETY: non-owned types are copyable
+ std::mem::transmute_copy(self)
+ }
+ }
+ }
+ }
+
+ impl<B> Drop for KStringInner<B> {
+ fn drop(&mut self) {
+ let tag = self.tag();
+ if tag.is_owned() {
+ unsafe {
+ // SAFETY: `tag` ensures we are using the right variant
+ std::mem::ManuallyDrop::drop(&mut self.owned)
+ }
+ }
+ }
+ }
+
+ #[allow(unused)]
+ const LEN_SIZE: usize = std::mem::size_of::<crate::stack::Len>();
+
+ #[allow(unused)]
+ const TAG_SIZE: usize = std::mem::size_of::<Tag>();
+
+ #[allow(unused)]
+ const PAYLOAD_SIZE: usize = std::mem::size_of::<crate::backend::DefaultStr>();
+ type Payload = Padding<PAYLOAD_SIZE>;
+
+ #[allow(unused)]
+ const TARGET_SIZE: usize = std::mem::size_of::<Target>();
+ type Target = crate::string::StdString;
+
+ #[allow(unused)]
+ const MAX_CAPACITY: usize = TARGET_SIZE - LEN_SIZE - TAG_SIZE;
+
+ // Performance seems to slow down when trying to occupy all of the padding left by `String`'s
+ // discriminant. The question is whether faster len=1-16 "allocations" outweighs going to the heap
+ // for len=17-22.
+ #[allow(unused)]
+ const ALIGNED_CAPACITY: usize = PAYLOAD_SIZE - LEN_SIZE;
+
+ #[cfg(feature = "max_inline")]
+ const CAPACITY: usize = MAX_CAPACITY;
+ #[cfg(not(feature = "max_inline"))]
+ const CAPACITY: usize = ALIGNED_CAPACITY;
+
+ const PAYLOAD_PAD_SIZE: usize = TARGET_SIZE - PAYLOAD_SIZE - TAG_SIZE;
+ const INLINE_PAD_SIZE: usize = TARGET_SIZE - CAPACITY - LEN_SIZE - TAG_SIZE;
+
+ #[derive(Copy, Clone)]
+ #[repr(C)]
+ struct TagVariant {
+ payload: Payload,
+ pad: Padding<PAYLOAD_PAD_SIZE>,
+ tag: Tag,
+ }
+ static_assertions::assert_eq_size!(Target, TagVariant);
+
+ #[derive(Copy, Clone)]
+ #[repr(C)]
+ struct SingletonVariant {
+ payload: &'static str,
+ pad: Padding<PAYLOAD_PAD_SIZE>,
+ tag: Tag,
+ }
+ static_assertions::assert_eq_size!(Payload, &'static str);
+ static_assertions::assert_eq_size!(Target, SingletonVariant);
+
+ impl SingletonVariant {
+ #[inline]
+ const fn new(payload: &'static str) -> Self {
+ Self {
+ payload,
+ pad: Padding::new(),
+ tag: Tag::SINGLETON,
+ }
+ }
+ }
+
+ impl std::fmt::Debug for SingletonVariant {
+ #[inline]
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.payload.fmt(f)
+ }
+ }
+
+ #[derive(Clone)]
+ #[repr(C)]
+ struct OwnedVariant<B> {
+ payload: B,
+ pad: Padding<PAYLOAD_PAD_SIZE>,
+ tag: Tag,
+ }
+ static_assertions::assert_eq_size!(Payload, crate::backend::DefaultStr);
+ static_assertions::assert_eq_size!(Target, OwnedVariant<crate::backend::DefaultStr>);
+
+ impl<B> OwnedVariant<B> {
+ #[inline]
+ const fn new(payload: B) -> Self {
+ Self {
+ payload,
+ pad: Padding::new(),
+ tag: Tag::OWNED,
+ }
+ }
+ }
+
+ impl<B: crate::backend::HeapStr> std::fmt::Debug for OwnedVariant<B> {
+ #[inline]
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.payload.fmt(f)
+ }
+ }
+
+ #[derive(Copy, Clone)]
+ #[repr(C)]
+ struct InlineVariant {
+ payload: StackString<CAPACITY>,
+ pad: Padding<INLINE_PAD_SIZE>,
+ tag: Tag,
+ }
+ static_assertions::assert_eq_size!(Target, InlineVariant);
+
+ impl InlineVariant {
+ #[inline]
+ const fn new(payload: StackString<CAPACITY>) -> Self {
+ Self {
+ payload,
+ pad: Padding::new(),
+ tag: Tag::INLINE,
+ }
+ }
+ }
+
+ impl std::fmt::Debug for InlineVariant {
+ #[inline]
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.payload.fmt(f)
+ }
+ }
+
+ #[derive(Copy, Clone, PartialEq, Eq)]
+ #[repr(transparent)]
+ struct Tag(u8);
+
+ impl Tag {
+ const SINGLETON: Tag = Tag(0);
+ const OWNED: Tag = Tag(u8::MAX);
+ const INLINE: Tag = Tag(1);
+
+ #[inline]
+ const fn is_singleton(self) -> bool {
+ self.0 == Self::SINGLETON.0
+ }
+
+ #[inline]
+ const fn is_owned(self) -> bool {
+ self.0 == Self::OWNED.0
+ }
+
+ #[inline]
+ const fn is_inline(self) -> bool {
+ !self.is_singleton() && !self.is_owned()
+ }
+ }
+
+ #[derive(Copy, Clone)]
+ #[repr(transparent)]
+ struct Padding<const L: usize>([std::mem::MaybeUninit<u8>; L]);
+
+ impl<const L: usize> Padding<L> {
+ const fn new() -> Self {
+ let padding = unsafe {
+ // SAFETY: Padding, never actually used
+ std::mem::MaybeUninit::uninit().assume_init()
+ };
+ Self(padding)
+ }
+ }
+
+ impl<const L: usize> Default for Padding<L> {
+ fn default() -> Self {
+ Self::new()
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ fn test_size() {
+ println!("KString: {}", std::mem::size_of::<KString>());
+ }
+}
diff --git a/vendor/kstring/src/string_cow.rs b/vendor/kstring/src/string_cow.rs
new file mode 100644
index 000000000..6a1b4b89a
--- /dev/null
+++ b/vendor/kstring/src/string_cow.rs
@@ -0,0 +1,383 @@
+use std::{borrow::Cow, fmt};
+
+use crate::KStringBase;
+use crate::KStringRef;
+use crate::KStringRefInner;
+
+type StdString = std::string::String;
+type BoxedStr = Box<str>;
+
+/// A reference to a UTF-8 encoded, immutable string.
+pub type KStringCow<'s> = KStringCowBase<'s, crate::backend::DefaultStr>;
+
+/// A reference to a UTF-8 encoded, immutable string.
+#[derive(Clone)]
+#[repr(transparent)]
+pub struct KStringCowBase<'s, B = crate::backend::DefaultStr> {
+ pub(crate) inner: KStringCowInner<'s, B>,
+}
+
+#[derive(Clone)]
+pub(crate) enum KStringCowInner<'s, B> {
+ Borrowed(&'s str),
+ Owned(KStringBase<B>),
+}
+
+impl<'s, B> KStringCowBase<'s, B> {
+ /// Create a new empty `KStringCowBase`.
+ #[inline]
+ #[must_use]
+ pub const fn new() -> Self {
+ Self::from_static("")
+ }
+
+ /// Create a reference to a `'static` data.
+ #[inline]
+ #[must_use]
+ pub const fn from_static(other: &'static str) -> Self {
+ Self {
+ inner: KStringCowInner::Owned(KStringBase::from_static(other)),
+ }
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> KStringCowBase<'s, B> {
+ /// Create an owned `KStringCowBase`.
+ #[inline]
+ #[must_use]
+ pub fn from_boxed(other: BoxedStr) -> Self {
+ Self {
+ inner: KStringCowInner::Owned(KStringBase::from_boxed(other)),
+ }
+ }
+
+ /// Create an owned `KStringCowBase`.
+ #[inline]
+ #[must_use]
+ pub fn from_string(other: StdString) -> Self {
+ Self {
+ inner: KStringCowInner::Owned(KStringBase::from_string(other)),
+ }
+ }
+
+ /// Create a reference to a borrowed data.
+ #[inline]
+ #[must_use]
+ pub fn from_ref(other: &'s str) -> Self {
+ Self {
+ inner: KStringCowInner::Borrowed(other),
+ }
+ }
+
+ /// Get a reference to the `KStringBase`.
+ #[inline]
+ #[must_use]
+ pub fn as_ref(&self) -> KStringRef<'_> {
+ self.inner.as_ref()
+ }
+
+ /// Clone the data into an owned-type.
+ #[inline]
+ #[must_use]
+ pub fn into_owned(self) -> KStringBase<B> {
+ self.inner.into_owned()
+ }
+
+ /// Extracts a string slice containing the entire `KStringCowBase`.
+ #[inline]
+ #[must_use]
+ pub fn as_str(&self) -> &str {
+ self.inner.as_str()
+ }
+
+ /// Convert to a mutable string type, cloning the data if necessary.
+ #[inline]
+ #[must_use]
+ pub fn into_string(self) -> StdString {
+ String::from(self.into_boxed_str())
+ }
+
+ /// Convert to a mutable string type, cloning the data if necessary.
+ #[inline]
+ #[must_use]
+ pub fn into_boxed_str(self) -> BoxedStr {
+ self.inner.into_boxed_str()
+ }
+
+ /// Convert to a Cow str
+ #[inline]
+ #[must_use]
+ pub fn into_cow_str(self) -> Cow<'s, str> {
+ self.inner.into_cow_str()
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> KStringCowInner<'s, B> {
+ #[inline]
+ fn as_ref(&self) -> KStringRef<'_> {
+ match self {
+ Self::Borrowed(s) => KStringRef::from_ref(s),
+ Self::Owned(s) => s.as_ref(),
+ }
+ }
+
+ #[inline]
+ fn into_owned(self) -> KStringBase<B> {
+ match self {
+ Self::Borrowed(s) => KStringBase::from_ref(s),
+ Self::Owned(s) => s,
+ }
+ }
+
+ #[inline]
+ fn as_str(&self) -> &str {
+ match self {
+ Self::Borrowed(s) => s,
+ Self::Owned(s) => s.as_str(),
+ }
+ }
+
+ #[inline]
+ fn into_boxed_str(self) -> BoxedStr {
+ match self {
+ Self::Borrowed(s) => BoxedStr::from(s),
+ Self::Owned(s) => s.into_boxed_str(),
+ }
+ }
+
+ /// Convert to a Cow str
+ #[inline]
+ fn into_cow_str(self) -> Cow<'s, str> {
+ match self {
+ Self::Borrowed(s) => Cow::Borrowed(s),
+ Self::Owned(s) => s.into_cow_str(),
+ }
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> std::ops::Deref for KStringCowBase<'s, B> {
+ type Target = str;
+
+ #[inline]
+ fn deref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> Eq for KStringCowBase<'s, B> {}
+
+impl<'s, B: crate::backend::HeapStr> PartialEq<KStringCowBase<'s, B>> for KStringCowBase<'s, B> {
+ #[inline]
+ fn eq(&self, other: &KStringCowBase<'s, B>) -> bool {
+ PartialEq::eq(self.as_str(), other.as_str())
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> PartialEq<str> for KStringCowBase<'s, B> {
+ #[inline]
+ fn eq(&self, other: &str) -> bool {
+ PartialEq::eq(self.as_str(), other)
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> PartialEq<&'s str> for KStringCowBase<'s, B> {
+ #[inline]
+ fn eq(&self, other: &&str) -> bool {
+ PartialEq::eq(self.as_str(), *other)
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> PartialEq<String> for KStringCowBase<'s, B> {
+ #[inline]
+ fn eq(&self, other: &StdString) -> bool {
+ PartialEq::eq(self.as_str(), other.as_str())
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> Ord for KStringCowBase<'s, B> {
+ #[inline]
+ fn cmp(&self, other: &Self) -> std::cmp::Ordering {
+ self.as_str().cmp(other.as_str())
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> PartialOrd for KStringCowBase<'s, B> {
+ #[inline]
+ fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
+ self.as_str().partial_cmp(other.as_str())
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> std::hash::Hash for KStringCowBase<'s, B> {
+ #[inline]
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ self.as_str().hash(state);
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> fmt::Debug for KStringCowBase<'s, B> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.as_str().fmt(f)
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> fmt::Display for KStringCowBase<'s, B> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.as_str(), f)
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> AsRef<str> for KStringCowBase<'s, B> {
+ #[inline]
+ fn as_ref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> AsRef<[u8]> for KStringCowBase<'s, B> {
+ #[inline]
+ fn as_ref(&self) -> &[u8] {
+ self.as_bytes()
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> AsRef<std::ffi::OsStr> for KStringCowBase<'s, B> {
+ #[inline]
+ fn as_ref(&self) -> &std::ffi::OsStr {
+ (&**self).as_ref()
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> AsRef<std::path::Path> for KStringCowBase<'s, B> {
+ #[inline]
+ fn as_ref(&self) -> &std::path::Path {
+ std::path::Path::new(self)
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> std::borrow::Borrow<str> for KStringCowBase<'s, B> {
+ #[inline]
+ fn borrow(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl<'s, B> Default for KStringCowBase<'s, B> {
+ #[inline]
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<KStringBase<B>> for KStringCowBase<'s, B> {
+ #[inline]
+ fn from(other: KStringBase<B>) -> Self {
+ let inner = KStringCowInner::Owned(other);
+ Self { inner }
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<&'s KStringBase<B>> for KStringCowBase<'s, B> {
+ #[inline]
+ fn from(other: &'s KStringBase<B>) -> Self {
+ let other = other.as_ref();
+ other.into()
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<KStringRef<'s>> for KStringCowBase<'s, B> {
+ #[inline]
+ fn from(other: KStringRef<'s>) -> Self {
+ match other.inner {
+ KStringRefInner::Borrowed(s) => Self::from_ref(s),
+ KStringRefInner::Singleton(s) => Self::from_static(s),
+ }
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<&'s KStringRef<'s>> for KStringCowBase<'s, B> {
+ #[inline]
+ fn from(other: &'s KStringRef<'s>) -> Self {
+ match other.inner {
+ KStringRefInner::Borrowed(s) => Self::from_ref(s),
+ KStringRefInner::Singleton(s) => Self::from_static(s),
+ }
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<StdString> for KStringCowBase<'s, B> {
+ #[inline]
+ fn from(other: StdString) -> Self {
+ Self::from_string(other)
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<&'s StdString> for KStringCowBase<'s, B> {
+ #[inline]
+ fn from(other: &'s StdString) -> Self {
+ Self::from_ref(other.as_str())
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<BoxedStr> for KStringCowBase<'s, B> {
+ #[inline]
+ fn from(other: BoxedStr) -> Self {
+ // Since the memory is already allocated, don't bother moving it into a FixedString
+ Self::from_boxed(other)
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<&'s BoxedStr> for KStringCowBase<'s, B> {
+ #[inline]
+ fn from(other: &'s BoxedStr) -> Self {
+ Self::from_ref(other)
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<&'s str> for KStringCowBase<'s, B> {
+ #[inline]
+ fn from(other: &'s str) -> Self {
+ Self::from_ref(other)
+ }
+}
+
+impl<B: crate::backend::HeapStr> std::str::FromStr for KStringCowBase<'_, B> {
+ type Err = std::convert::Infallible;
+ #[inline]
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ Ok(Self::from_string(s.into()))
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<'s, B: crate::backend::HeapStr> serde::Serialize for KStringCowBase<'s, B> {
+ #[inline]
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ serializer.serialize_str(self.as_str())
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<'de, 's, B: crate::backend::HeapStr> serde::Deserialize<'de> for KStringCowBase<'s, B> {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ KStringBase::deserialize(deserializer).map(|s| s.into())
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ fn test_size() {
+ println!("KStringCow: {}", std::mem::size_of::<KStringCow<'static>>());
+ }
+}
diff --git a/vendor/kstring/src/string_ref.rs b/vendor/kstring/src/string_ref.rs
new file mode 100644
index 000000000..a79b9d46c
--- /dev/null
+++ b/vendor/kstring/src/string_ref.rs
@@ -0,0 +1,277 @@
+use std::fmt;
+
+use crate::KStringBase;
+use crate::KStringCowBase;
+
+type StdString = std::string::String;
+type BoxedStr = Box<str>;
+
+/// A reference to a UTF-8 encoded, immutable string.
+#[derive(Copy, Clone)]
+#[repr(transparent)]
+pub struct KStringRef<'s> {
+ pub(crate) inner: KStringRefInner<'s>,
+}
+
+#[derive(Copy, Clone, Debug)]
+pub(crate) enum KStringRefInner<'s> {
+ Borrowed(&'s str),
+ Singleton(&'static str),
+}
+
+impl<'s> KStringRef<'s> {
+ /// Create a new empty `KStringBase`.
+ #[inline]
+ #[must_use]
+ pub const fn new() -> Self {
+ Self::from_static("")
+ }
+
+ /// Create a reference to a `'static` data.
+ #[inline]
+ #[must_use]
+ pub const fn from_static(other: &'static str) -> Self {
+ Self {
+ inner: KStringRefInner::Singleton(other),
+ }
+ }
+
+ /// Create a reference to a borrowed data.
+ #[inline]
+ #[must_use]
+ pub fn from_ref(other: &'s str) -> Self {
+ Self {
+ inner: KStringRefInner::Borrowed(other),
+ }
+ }
+
+ /// Clone the data into an owned-type.
+ #[inline]
+ #[must_use]
+ #[allow(clippy::wrong_self_convention)]
+ pub fn to_owned<B: crate::backend::HeapStr>(&self) -> KStringBase<B> {
+ self.inner.to_owned()
+ }
+
+ /// Extracts a string slice containing the entire `KStringRef`.
+ #[inline]
+ #[must_use]
+ pub fn as_str(&self) -> &str {
+ self.inner.as_str()
+ }
+
+ /// Convert to a mutable string type, cloning the data if necessary.
+ #[inline]
+ #[must_use]
+ pub fn into_mut(self) -> StdString {
+ self.inner.into_mut()
+ }
+}
+
+impl<'s> KStringRefInner<'s> {
+ #[inline]
+ #[allow(clippy::wrong_self_convention)]
+ fn to_owned<B: crate::backend::HeapStr>(&self) -> KStringBase<B> {
+ match self {
+ Self::Borrowed(s) => KStringBase::from_ref(s),
+ Self::Singleton(s) => KStringBase::from_static(s),
+ }
+ }
+
+ #[inline]
+ fn as_str(&self) -> &str {
+ match self {
+ Self::Borrowed(s) => s,
+ Self::Singleton(s) => s,
+ }
+ }
+
+ #[inline]
+ fn into_mut(self) -> StdString {
+ self.as_str().to_owned()
+ }
+}
+
+impl<'s> std::ops::Deref for KStringRef<'s> {
+ type Target = str;
+
+ #[inline]
+ fn deref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl<'s> Eq for KStringRef<'s> {}
+
+impl<'s> PartialEq<KStringRef<'s>> for KStringRef<'s> {
+ #[inline]
+ fn eq(&self, other: &KStringRef<'s>) -> bool {
+ PartialEq::eq(self.as_str(), other.as_str())
+ }
+}
+
+impl<'s> PartialEq<str> for KStringRef<'s> {
+ #[inline]
+ fn eq(&self, other: &str) -> bool {
+ PartialEq::eq(self.as_str(), other)
+ }
+}
+
+impl<'s> PartialEq<&'s str> for KStringRef<'s> {
+ #[inline]
+ fn eq(&self, other: &&str) -> bool {
+ PartialEq::eq(self.as_str(), *other)
+ }
+}
+
+impl<'s> PartialEq<String> for KStringRef<'s> {
+ #[inline]
+ fn eq(&self, other: &StdString) -> bool {
+ PartialEq::eq(self.as_str(), other.as_str())
+ }
+}
+
+impl<'s> Ord for KStringRef<'s> {
+ #[inline]
+ fn cmp(&self, other: &Self) -> std::cmp::Ordering {
+ self.as_str().cmp(other.as_str())
+ }
+}
+
+impl<'s> PartialOrd for KStringRef<'s> {
+ #[inline]
+ fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
+ self.as_str().partial_cmp(other.as_str())
+ }
+}
+
+impl<'s> std::hash::Hash for KStringRef<'s> {
+ #[inline]
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ self.as_str().hash(state);
+ }
+}
+
+impl<'s> fmt::Debug for KStringRef<'s> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&self.inner, f)
+ }
+}
+
+impl<'s> fmt::Display for KStringRef<'s> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.as_str(), f)
+ }
+}
+
+impl<'s> AsRef<str> for KStringRef<'s> {
+ #[inline]
+ fn as_ref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl<'s> AsRef<[u8]> for KStringRef<'s> {
+ #[inline]
+ fn as_ref(&self) -> &[u8] {
+ self.as_bytes()
+ }
+}
+
+impl<'s> AsRef<std::ffi::OsStr> for KStringRef<'s> {
+ #[inline]
+ fn as_ref(&self) -> &std::ffi::OsStr {
+ (&**self).as_ref()
+ }
+}
+
+impl<'s> AsRef<std::path::Path> for KStringRef<'s> {
+ #[inline]
+ fn as_ref(&self) -> &std::path::Path {
+ std::path::Path::new(self)
+ }
+}
+
+impl<'s> std::borrow::Borrow<str> for KStringRef<'s> {
+ #[inline]
+ fn borrow(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl<'s> Default for KStringRef<'s> {
+ #[inline]
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<&'s KStringBase<B>> for KStringRef<'s> {
+ #[inline]
+ fn from(other: &'s KStringBase<B>) -> Self {
+ other.as_ref()
+ }
+}
+
+impl<'s, B: crate::backend::HeapStr> From<&'s KStringCowBase<'s, B>> for KStringRef<'s> {
+ #[inline]
+ fn from(other: &'s KStringCowBase<'s, B>) -> Self {
+ other.as_ref()
+ }
+}
+
+impl<'s> From<&'s StdString> for KStringRef<'s> {
+ #[inline]
+ fn from(other: &'s StdString) -> Self {
+ KStringRef::from_ref(other.as_str())
+ }
+}
+
+impl<'s> From<&'s BoxedStr> for KStringRef<'s> {
+ #[inline]
+ fn from(other: &'s BoxedStr) -> Self {
+ Self::from_ref(other)
+ }
+}
+
+impl<'s> From<&'s str> for KStringRef<'s> {
+ #[inline]
+ fn from(other: &'s str) -> Self {
+ KStringRef::from_ref(other)
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<'s> serde::Serialize for KStringRef<'s> {
+ #[inline]
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ serializer.serialize_str(self.as_str())
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<'de: 's, 's> serde::Deserialize<'de> for KStringRef<'s> {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ let s: &'s str = serde::Deserialize::deserialize(deserializer)?;
+ let s = KStringRef::from_ref(s);
+ Ok(s)
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ fn test_size() {
+ println!("KStringRef: {}", std::mem::size_of::<KStringRef<'static>>());
+ }
+}