From 31d292b1694098e74c1ecedbbd41e5d272cbb43e Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Sat, 30 Jan 2021 10:09:33 +0100 Subject: [PATCH] added clear helper method to memory_init_tracker, renamed drain --- wgpu-core/src/device/mod.rs | 15 +++-------- wgpu-core/src/device/queue.rs | 8 +++--- wgpu-core/src/memory_init_tracker.rs | 40 ++++++++++++++++------------ 3 files changed, 29 insertions(+), 34 deletions(-) diff --git a/wgpu-core/src/device/mod.rs b/wgpu-core/src/device/mod.rs index 964b64e22..0cc5f46de 100644 --- a/wgpu-core/src/device/mod.rs +++ b/wgpu-core/src/device/mod.rs @@ -208,10 +208,7 @@ fn map_buffer( // // If this is a write mapping zeroing out the memory here is the only reasonable way as all data is pushed to GPU anyways. let zero_init_needs_flush_now = !block.is_coherent() && buffer.sync_mapped_writes.is_none(); // No need to flush if it is flushed later anyways. - for uninitialized_range in buffer - .initialization_status - .drain_uninitialized_ranges(offset..(size + offset)) - { + for uninitialized_range in buffer.initialization_status.drain(offset..(size + offset)) { let num_bytes = uninitialized_range.end - uninitialized_range.start; unsafe { ptr::write_bytes( @@ -2606,14 +2603,8 @@ impl Global { // Zero initialize memory and then mark both staging and buffer as initialized // (it's guaranteed that this is the case by the time the buffer is usable) unsafe { ptr::write_bytes(ptr.as_ptr(), 0, buffer.size as usize) }; - buffer - .initialization_status - .drain_uninitialized_ranges(0..buffer.size) - .for_each(drop); - stage - .initialization_status - .drain_uninitialized_ranges(0..buffer.size) - .for_each(drop); + buffer.initialization_status.clear(0..buffer.size); + stage.initialization_status.clear(0..buffer.size); buffer.map_state = resource::BufferMapState::Init { ptr, diff --git a/wgpu-core/src/device/queue.rs b/wgpu-core/src/device/queue.rs index 87e7c7f67..d89ce3bdf 100644 --- a/wgpu-core/src/device/queue.rs +++ b/wgpu-core/src/device/queue.rs @@ -276,8 +276,7 @@ impl Global { { let dst = buffer_guard.get_mut(buffer_id).unwrap(); dst.initialization_status - .drain_uninitialized_ranges(buffer_offset..(buffer_offset + data_size)) - .for_each(drop); + .clear(buffer_offset..(buffer_offset + data_size)); } Ok(()) @@ -500,9 +499,8 @@ impl Global { .get_mut(buffer_use.id) .map_err(|_| QueueSubmitError::DestroyedBuffer(buffer_use.id))?; - let uninitialized_ranges = buffer - .initialization_status - .drain_uninitialized_ranges(buffer_use.range.clone()); + let uninitialized_ranges = + buffer.initialization_status.drain(buffer_use.range.clone()); match buffer_use.kind { MemoryInitKind::ImplicitlyInitialized => { uninitialized_ranges.for_each(drop); diff --git a/wgpu-core/src/memory_init_tracker.rs b/wgpu-core/src/memory_init_tracker.rs index 2e9780877..e06e690f6 100644 --- a/wgpu-core/src/memory_init_tracker.rs +++ b/wgpu-core/src/memory_init_tracker.rs @@ -91,8 +91,9 @@ impl MemoryInitTracker { } } + // Drains uninitialized ranges in a query range. #[must_use] - pub(crate) fn drain_uninitialized_ranges<'a>( + pub(crate) fn drain<'a>( &'a mut self, drain_range: Range, ) -> MemoryInitTrackerDrain<'a> { @@ -108,6 +109,11 @@ impl MemoryInitTracker { uninitialized_ranges: &mut self.uninitialized_ranges, } } + + // Clears uninitialized ranges in a query range. + pub(crate) fn clear(&mut self, drain_range: Range) { + self.drain(drain_range).for_each(drop); + } } #[cfg(test)] @@ -127,7 +133,7 @@ mod test { #[test] fn is_initialized_for_filled_tracker() { let mut tracker = MemoryInitTracker::new(10); - tracker.drain_uninitialized_ranges(0..10).for_each(drop); + tracker.clear(0..10); assert!(tracker.is_initialized(&(0..10))); assert!(tracker.is_initialized(&(0..3))); assert!(tracker.is_initialized(&(3..4))); @@ -137,7 +143,7 @@ mod test { #[test] fn is_initialized_for_partially_filled_tracker() { let mut tracker = MemoryInitTracker::new(10); - tracker.drain_uninitialized_ranges(4..6).for_each(drop); + tracker.clear(4..6); assert!(!tracker.is_initialized(&(0..10))); // entire range assert!(!tracker.is_initialized(&(0..4))); // left non-overlapping assert!(!tracker.is_initialized(&(3..5))); // left overlapping @@ -149,32 +155,32 @@ mod test { } #[test] - fn drain_uninitialized_ranges_never_returns_ranges_twice_for_same_range() { + fn drain_never_returns_ranges_twice_for_same_range() { let mut tracker = MemoryInitTracker::new(19); - assert_eq!(tracker.drain_uninitialized_ranges(0..19).count(), 1); - assert_eq!(tracker.drain_uninitialized_ranges(0..19).count(), 0); + assert_eq!(tracker.drain(0..19).count(), 1); + assert_eq!(tracker.drain(0..19).count(), 0); let mut tracker = MemoryInitTracker::new(17); - assert_eq!(tracker.drain_uninitialized_ranges(5..8).count(), 1); - assert_eq!(tracker.drain_uninitialized_ranges(5..8).count(), 0); - assert_eq!(tracker.drain_uninitialized_ranges(1..3).count(), 1); - assert_eq!(tracker.drain_uninitialized_ranges(1..3).count(), 0); - assert_eq!(tracker.drain_uninitialized_ranges(7..13).count(), 1); - assert_eq!(tracker.drain_uninitialized_ranges(7..13).count(), 0); + assert_eq!(tracker.drain(5..8).count(), 1); + assert_eq!(tracker.drain(5..8).count(), 0); + assert_eq!(tracker.drain(1..3).count(), 1); + assert_eq!(tracker.drain(1..3).count(), 0); + assert_eq!(tracker.drain(7..13).count(), 1); + assert_eq!(tracker.drain(7..13).count(), 0); } #[test] - fn drain_uninitialized_ranges_splits_ranges_correctly() { + fn drain_splits_ranges_correctly() { let mut tracker = MemoryInitTracker::new(1337); assert_eq!( tracker - .drain_uninitialized_ranges(21..42) + .drain(21..42) .collect::>>(), vec![21..42] ); assert_eq!( tracker - .drain_uninitialized_ranges(900..1000) + .drain(900..1000) .collect::>>(), vec![900..1000] ); @@ -182,13 +188,13 @@ mod test { // Splitted ranges. assert_eq!( tracker - .drain_uninitialized_ranges(5..1003) + .drain(5..1003) .collect::>>(), vec![5..21, 42..900, 1000..1003] ); assert_eq!( tracker - .drain_uninitialized_ranges(0..1337) + .drain(0..1337) .collect::>>(), vec![0..5, 1003..1337] );