summaryrefslogtreecommitdiffstats
path: root/third_party/rust/wgpu-core/src/init_tracker
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/rust/wgpu-core/src/init_tracker')
-rw-r--r--third_party/rust/wgpu-core/src/init_tracker/buffer.rs39
-rw-r--r--third_party/rust/wgpu-core/src/init_tracker/mod.rs384
-rw-r--r--third_party/rust/wgpu-core/src/init_tracker/texture.rs103
3 files changed, 526 insertions, 0 deletions
diff --git a/third_party/rust/wgpu-core/src/init_tracker/buffer.rs b/third_party/rust/wgpu-core/src/init_tracker/buffer.rs
new file mode 100644
index 0000000000..2c0fa8d372
--- /dev/null
+++ b/third_party/rust/wgpu-core/src/init_tracker/buffer.rs
@@ -0,0 +1,39 @@
+use super::{InitTracker, MemoryInitKind};
+use crate::{hal_api::HalApi, resource::Buffer};
+use std::{ops::Range, sync::Arc};
+
+#[derive(Debug, Clone)]
+pub(crate) struct BufferInitTrackerAction<A: HalApi> {
+ pub buffer: Arc<Buffer<A>>,
+ pub range: Range<wgt::BufferAddress>,
+ pub kind: MemoryInitKind,
+}
+
+pub(crate) type BufferInitTracker = InitTracker<wgt::BufferAddress>;
+
+impl BufferInitTracker {
+ /// Checks if an action has/requires any effect on the initialization status
+ /// and shrinks its range if possible.
+ pub(crate) fn check_action<A: HalApi>(
+ &self,
+ action: &BufferInitTrackerAction<A>,
+ ) -> Option<BufferInitTrackerAction<A>> {
+ self.create_action(&action.buffer, action.range.clone(), action.kind)
+ }
+
+ /// Creates an action if it would have any effect on the initialization
+ /// status and shrinks the range if possible.
+ pub(crate) fn create_action<A: HalApi>(
+ &self,
+ buffer: &Arc<Buffer<A>>,
+ query_range: Range<wgt::BufferAddress>,
+ kind: MemoryInitKind,
+ ) -> Option<BufferInitTrackerAction<A>> {
+ self.check(query_range)
+ .map(|range| BufferInitTrackerAction {
+ buffer: buffer.clone(),
+ range,
+ kind,
+ })
+ }
+}
diff --git a/third_party/rust/wgpu-core/src/init_tracker/mod.rs b/third_party/rust/wgpu-core/src/init_tracker/mod.rs
new file mode 100644
index 0000000000..ccaac1e16f
--- /dev/null
+++ b/third_party/rust/wgpu-core/src/init_tracker/mod.rs
@@ -0,0 +1,384 @@
+/*! Lazy initialization of texture and buffer memory.
+
+The WebGPU specification requires all texture & buffer memory to be
+zero initialized on first read. To avoid unnecessary inits, we track
+the initialization status of every resource and perform inits lazily.
+
+The granularity is different for buffers and textures:
+
+- Buffer: Byte granularity to support usecases with large, partially
+ bound buffers well.
+
+- Texture: Mip-level per layer. That is, a 2D surface is either
+ completely initialized or not, subrects are not tracked.
+
+Every use of a buffer/texture generates a InitTrackerAction which are
+recorded and later resolved at queue submit by merging them with the
+current state and each other in execution order.
+
+It is important to note that from the point of view of the memory init
+system there are two kind of writes:
+
+- **Full writes**: Any kind of memcpy operation. These cause a
+ `MemoryInitKind.ImplicitlyInitialized` action.
+
+- **(Potentially) partial writes**: For example, write use in a
+ Shader. The system is not able to determine if a resource is fully
+ initialized afterwards but is no longer allowed to perform any
+ clears, therefore this leads to a
+ `MemoryInitKind.ImplicitlyInitialized` action, exactly like a read
+ would.
+
+ */
+
+use smallvec::SmallVec;
+use std::{fmt, iter, ops::Range};
+
+mod buffer;
+mod texture;
+
+pub(crate) use buffer::{BufferInitTracker, BufferInitTrackerAction};
+pub(crate) use texture::{
+ has_copy_partial_init_tracker_coverage, TextureInitRange, TextureInitTracker,
+ TextureInitTrackerAction,
+};
+
+#[derive(Debug, Clone, Copy)]
+pub(crate) enum MemoryInitKind {
+ // The memory range is going to be written by an already initialized source,
+ // thus doesn't need extra attention other than marking as initialized.
+ ImplicitlyInitialized,
+ // The memory range is going to be read, therefore needs to ensure prior
+ // initialization.
+ NeedsInitializedMemory,
+}
+
+// Most of the time a resource is either fully uninitialized (one element) or
+// initialized (zero elements).
+type UninitializedRangeVec<Idx> = SmallVec<[Range<Idx>; 1]>;
+
+/// Tracks initialization status of a linear range from 0..size
+#[derive(Debug, Clone)]
+pub(crate) struct InitTracker<Idx: Ord + Copy + Default> {
+ /// Non-overlapping list of all uninitialized ranges, sorted by
+ /// range end.
+ uninitialized_ranges: UninitializedRangeVec<Idx>,
+}
+
+pub(crate) struct InitTrackerDrain<'a, Idx: fmt::Debug + Ord + Copy> {
+ uninitialized_ranges: &'a mut UninitializedRangeVec<Idx>,
+ drain_range: Range<Idx>,
+ first_index: usize,
+ next_index: usize,
+}
+
+impl<'a, Idx> Iterator for InitTrackerDrain<'a, Idx>
+where
+ Idx: fmt::Debug + Ord + Copy,
+{
+ type Item = Range<Idx>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(r) = self
+ .uninitialized_ranges
+ .get(self.next_index)
+ .and_then(|range| {
+ if range.start < self.drain_range.end {
+ Some(range.clone())
+ } else {
+ None
+ }
+ })
+ {
+ self.next_index += 1;
+ Some(r.start.max(self.drain_range.start)..r.end.min(self.drain_range.end))
+ } else {
+ let num_affected = self.next_index - self.first_index;
+ if num_affected == 0 {
+ return None;
+ }
+ let first_range = &mut self.uninitialized_ranges[self.first_index];
+
+ // Split one "big" uninitialized range?
+ if num_affected == 1
+ && first_range.start < self.drain_range.start
+ && first_range.end > self.drain_range.end
+ {
+ let old_start = first_range.start;
+ first_range.start = self.drain_range.end;
+ self.uninitialized_ranges
+ .insert(self.first_index, old_start..self.drain_range.start);
+ }
+ // Adjust border ranges and delete everything in-between.
+ else {
+ let remove_start = if first_range.start >= self.drain_range.start {
+ self.first_index
+ } else {
+ first_range.end = self.drain_range.start;
+ self.first_index + 1
+ };
+
+ let last_range = &mut self.uninitialized_ranges[self.next_index - 1];
+ let remove_end = if last_range.end <= self.drain_range.end {
+ self.next_index
+ } else {
+ last_range.start = self.drain_range.end;
+ self.next_index - 1
+ };
+
+ self.uninitialized_ranges.drain(remove_start..remove_end);
+ }
+
+ None
+ }
+ }
+}
+
+impl<'a, Idx> Drop for InitTrackerDrain<'a, Idx>
+where
+ Idx: fmt::Debug + Ord + Copy,
+{
+ fn drop(&mut self) {
+ if self.next_index <= self.first_index {
+ for _ in self {}
+ }
+ }
+}
+
+impl<Idx> InitTracker<Idx>
+where
+ Idx: fmt::Debug + Ord + Copy + Default,
+{
+ pub(crate) fn new(size: Idx) -> Self {
+ Self {
+ uninitialized_ranges: iter::once(Idx::default()..size).collect(),
+ }
+ }
+
+ /// Checks for uninitialized ranges within a given query range.
+ ///
+ /// If `query_range` includes any uninitialized portions of this init
+ /// tracker's resource, return the smallest subrange of `query_range` that
+ /// covers all uninitialized regions.
+ ///
+ /// The returned range may be larger than necessary, to keep this function
+ /// O(log n).
+ pub(crate) fn check(&self, query_range: Range<Idx>) -> Option<Range<Idx>> {
+ let index = self
+ .uninitialized_ranges
+ .partition_point(|r| r.end <= query_range.start);
+ self.uninitialized_ranges
+ .get(index)
+ .and_then(|start_range| {
+ if start_range.start < query_range.end {
+ let start = start_range.start.max(query_range.start);
+ match self.uninitialized_ranges.get(index + 1) {
+ Some(next_range) => {
+ if next_range.start < query_range.end {
+ // Would need to keep iterating for more
+ // accurate upper bound. Don't do that here.
+ Some(start..query_range.end)
+ } else {
+ Some(start..start_range.end.min(query_range.end))
+ }
+ }
+ None => Some(start..start_range.end.min(query_range.end)),
+ }
+ } else {
+ None
+ }
+ })
+ }
+
+ // Drains uninitialized ranges in a query range.
+ pub(crate) fn drain(&mut self, drain_range: Range<Idx>) -> InitTrackerDrain<Idx> {
+ let index = self
+ .uninitialized_ranges
+ .partition_point(|r| r.end <= drain_range.start);
+ InitTrackerDrain {
+ drain_range,
+ uninitialized_ranges: &mut self.uninitialized_ranges,
+ first_index: index,
+ next_index: index,
+ }
+ }
+}
+
+impl InitTracker<u32> {
+ // Makes a single entry uninitialized if not already uninitialized
+ #[allow(dead_code)]
+ pub(crate) fn discard(&mut self, pos: u32) {
+ // first range where end>=idx
+ let r_idx = self.uninitialized_ranges.partition_point(|r| r.end < pos);
+ if let Some(r) = self.uninitialized_ranges.get(r_idx) {
+ // Extend range at end
+ if r.end == pos {
+ // merge with next?
+ if let Some(right) = self.uninitialized_ranges.get(r_idx + 1) {
+ if right.start == pos + 1 {
+ self.uninitialized_ranges[r_idx] = r.start..right.end;
+ self.uninitialized_ranges.remove(r_idx + 1);
+ return;
+ }
+ }
+ self.uninitialized_ranges[r_idx] = r.start..(pos + 1);
+ } else if r.start > pos {
+ // may still extend range at beginning
+ if r.start == pos + 1 {
+ self.uninitialized_ranges[r_idx] = pos..r.end;
+ } else {
+ // previous range end must be smaller than idx, therefore no merge possible
+ self.uninitialized_ranges.push(pos..(pos + 1));
+ }
+ }
+ } else {
+ self.uninitialized_ranges.push(pos..(pos + 1));
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use std::ops::Range;
+
+ type Tracker = super::InitTracker<u32>;
+
+ #[test]
+ fn check_for_newly_created_tracker() {
+ let tracker = Tracker::new(10);
+ assert_eq!(tracker.check(0..10), Some(0..10));
+ assert_eq!(tracker.check(0..3), Some(0..3));
+ assert_eq!(tracker.check(3..4), Some(3..4));
+ assert_eq!(tracker.check(4..10), Some(4..10));
+ }
+
+ #[test]
+ fn check_for_drained_tracker() {
+ let mut tracker = Tracker::new(10);
+ tracker.drain(0..10);
+ assert_eq!(tracker.check(0..10), None);
+ assert_eq!(tracker.check(0..3), None);
+ assert_eq!(tracker.check(3..4), None);
+ assert_eq!(tracker.check(4..10), None);
+ }
+
+ #[test]
+ fn check_for_partially_filled_tracker() {
+ let mut tracker = Tracker::new(25);
+ // Two regions of uninitialized memory
+ tracker.drain(0..5);
+ tracker.drain(10..15);
+ tracker.drain(20..25);
+
+ assert_eq!(tracker.check(0..25), Some(5..25)); // entire range
+
+ assert_eq!(tracker.check(0..5), None); // left non-overlapping
+ assert_eq!(tracker.check(3..8), Some(5..8)); // left overlapping region
+ assert_eq!(tracker.check(3..17), Some(5..17)); // left overlapping region + contained region
+
+ // right overlapping region + contained region (yes, doesn't fix range end!)
+ assert_eq!(tracker.check(8..22), Some(8..22));
+ // right overlapping region
+ assert_eq!(tracker.check(17..22), Some(17..20));
+ // right non-overlapping
+ assert_eq!(tracker.check(20..25), None);
+ }
+
+ #[test]
+ fn drain_already_drained() {
+ let mut tracker = Tracker::new(30);
+ tracker.drain(10..20);
+
+ // Overlapping with non-cleared
+ tracker.drain(5..15); // Left overlap
+ tracker.drain(15..25); // Right overlap
+ tracker.drain(0..30); // Inner overlap
+
+ // Clear fully cleared
+ tracker.drain(0..30);
+
+ assert_eq!(tracker.check(0..30), None);
+ }
+
+ #[test]
+ fn drain_never_returns_ranges_twice_for_same_range() {
+ let mut tracker = Tracker::new(19);
+ assert_eq!(tracker.drain(0..19).count(), 1);
+ assert_eq!(tracker.drain(0..19).count(), 0);
+
+ let mut tracker = Tracker::new(17);
+ assert_eq!(tracker.drain(5..8).count(), 1);
+ assert_eq!(tracker.drain(5..8).count(), 0);
+ assert_eq!(tracker.drain(1..3).count(), 1);
+ assert_eq!(tracker.drain(1..3).count(), 0);
+ assert_eq!(tracker.drain(7..13).count(), 1);
+ assert_eq!(tracker.drain(7..13).count(), 0);
+ }
+
+ #[test]
+ fn drain_splits_ranges_correctly() {
+ let mut tracker = Tracker::new(1337);
+ assert_eq!(
+ tracker.drain(21..42).collect::<Vec<Range<u32>>>(),
+ vec![21..42]
+ );
+ assert_eq!(
+ tracker.drain(900..1000).collect::<Vec<Range<u32>>>(),
+ vec![900..1000]
+ );
+
+ // Split ranges.
+ assert_eq!(
+ tracker.drain(5..1003).collect::<Vec<Range<u32>>>(),
+ vec![5..21, 42..900, 1000..1003]
+ );
+ assert_eq!(
+ tracker.drain(0..1337).collect::<Vec<Range<u32>>>(),
+ vec![0..5, 1003..1337]
+ );
+ }
+
+ #[test]
+ fn discard_adds_range_on_cleared() {
+ let mut tracker = Tracker::new(10);
+ tracker.drain(0..10);
+ tracker.discard(0);
+ tracker.discard(5);
+ tracker.discard(9);
+ assert_eq!(tracker.check(0..1), Some(0..1));
+ assert_eq!(tracker.check(1..5), None);
+ assert_eq!(tracker.check(5..6), Some(5..6));
+ assert_eq!(tracker.check(6..9), None);
+ assert_eq!(tracker.check(9..10), Some(9..10));
+ }
+
+ #[test]
+ fn discard_does_nothing_on_uncleared() {
+ let mut tracker = Tracker::new(10);
+ tracker.discard(0);
+ tracker.discard(5);
+ tracker.discard(9);
+ assert_eq!(tracker.uninitialized_ranges.len(), 1);
+ assert_eq!(tracker.uninitialized_ranges[0], 0..10);
+ }
+
+ #[test]
+ fn discard_extends_ranges() {
+ let mut tracker = Tracker::new(10);
+ tracker.drain(3..7);
+ tracker.discard(2);
+ tracker.discard(7);
+ assert_eq!(tracker.uninitialized_ranges.len(), 2);
+ assert_eq!(tracker.uninitialized_ranges[0], 0..3);
+ assert_eq!(tracker.uninitialized_ranges[1], 7..10);
+ }
+
+ #[test]
+ fn discard_merges_ranges() {
+ let mut tracker = Tracker::new(10);
+ tracker.drain(3..4);
+ tracker.discard(3);
+ assert_eq!(tracker.uninitialized_ranges.len(), 1);
+ assert_eq!(tracker.uninitialized_ranges[0], 0..10);
+ }
+}
diff --git a/third_party/rust/wgpu-core/src/init_tracker/texture.rs b/third_party/rust/wgpu-core/src/init_tracker/texture.rs
new file mode 100644
index 0000000000..a859b5f784
--- /dev/null
+++ b/third_party/rust/wgpu-core/src/init_tracker/texture.rs
@@ -0,0 +1,103 @@
+use super::{InitTracker, MemoryInitKind};
+use crate::{hal_api::HalApi, resource::Texture, track::TextureSelector};
+use arrayvec::ArrayVec;
+use std::{ops::Range, sync::Arc};
+
+#[derive(Debug, Clone)]
+pub(crate) struct TextureInitRange {
+ pub(crate) mip_range: Range<u32>,
+ // Strictly array layers. We do *not* track volume slices separately.
+ pub(crate) layer_range: Range<u32>,
+}
+
+// Returns true if a copy operation doesn't fully cover the texture init
+// tracking granularity. I.e. if this function returns true for a pending copy
+// operation, the target texture needs to be ensured to be initialized first!
+pub(crate) fn has_copy_partial_init_tracker_coverage(
+ copy_size: &wgt::Extent3d,
+ mip_level: u32,
+ desc: &wgt::TextureDescriptor<(), Vec<wgt::TextureFormat>>,
+) -> bool {
+ let target_size = desc.mip_level_size(mip_level).unwrap();
+ copy_size.width != target_size.width
+ || copy_size.height != target_size.height
+ || (desc.dimension == wgt::TextureDimension::D3
+ && copy_size.depth_or_array_layers != target_size.depth_or_array_layers)
+}
+
+impl From<TextureSelector> for TextureInitRange {
+ fn from(selector: TextureSelector) -> Self {
+ TextureInitRange {
+ mip_range: selector.mips,
+ layer_range: selector.layers,
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct TextureInitTrackerAction<A: HalApi> {
+ pub(crate) texture: Arc<Texture<A>>,
+ pub(crate) range: TextureInitRange,
+ pub(crate) kind: MemoryInitKind,
+}
+
+pub(crate) type TextureLayerInitTracker = InitTracker<u32>;
+
+#[derive(Debug)]
+pub(crate) struct TextureInitTracker {
+ pub mips: ArrayVec<TextureLayerInitTracker, { hal::MAX_MIP_LEVELS as usize }>,
+}
+
+impl TextureInitTracker {
+ pub(crate) fn new(mip_level_count: u32, depth_or_array_layers: u32) -> Self {
+ TextureInitTracker {
+ mips: std::iter::repeat(TextureLayerInitTracker::new(depth_or_array_layers))
+ .take(mip_level_count as usize)
+ .collect(),
+ }
+ }
+
+ pub(crate) fn check_action<A: HalApi>(
+ &self,
+ action: &TextureInitTrackerAction<A>,
+ ) -> Option<TextureInitTrackerAction<A>> {
+ let mut mip_range_start = std::usize::MAX;
+ let mut mip_range_end = std::usize::MIN;
+ let mut layer_range_start = std::u32::MAX;
+ let mut layer_range_end = std::u32::MIN;
+
+ for (i, mip_tracker) in self
+ .mips
+ .iter()
+ .enumerate()
+ .take(action.range.mip_range.end as usize)
+ .skip(action.range.mip_range.start as usize)
+ {
+ if let Some(uninitialized_layer_range) =
+ mip_tracker.check(action.range.layer_range.clone())
+ {
+ mip_range_start = mip_range_start.min(i);
+ mip_range_end = i + 1;
+ layer_range_start = layer_range_start.min(uninitialized_layer_range.start);
+ layer_range_end = layer_range_end.max(uninitialized_layer_range.end);
+ };
+ }
+
+ if mip_range_start < mip_range_end && layer_range_start < layer_range_end {
+ Some(TextureInitTrackerAction {
+ texture: action.texture.clone(),
+ range: TextureInitRange {
+ mip_range: mip_range_start as u32..mip_range_end as u32,
+ layer_range: layer_range_start..layer_range_end,
+ },
+ kind: action.kind,
+ })
+ } else {
+ None
+ }
+ }
+
+ pub(crate) fn discard(&mut self, mip_level: u32, layer: u32) {
+ self.mips[mip_level as usize].discard(layer);
+ }
+}