added clear helper method to memory_init_tracker, renamed drain

This commit is contained in:
Andreas Reich 2021-01-30 10:09:33 +01:00
parent da86b3f401
commit 31d292b169
3 changed files with 29 additions and 34 deletions

View File

@ -208,10 +208,7 @@ fn map_buffer<B: hal::Backend>(
//
// If this is a write mapping zeroing out the memory here is the only reasonable way as all data is pushed to GPU anyways.
let zero_init_needs_flush_now = !block.is_coherent() && buffer.sync_mapped_writes.is_none(); // No need to flush if it is flushed later anyways.
for uninitialized_range in buffer
.initialization_status
.drain_uninitialized_ranges(offset..(size + offset))
{
for uninitialized_range in buffer.initialization_status.drain(offset..(size + offset)) {
let num_bytes = uninitialized_range.end - uninitialized_range.start;
unsafe {
ptr::write_bytes(
@ -2606,14 +2603,8 @@ impl<G: GlobalIdentityHandlerFactory> Global<G> {
// Zero initialize memory and then mark both staging and buffer as initialized
// (it's guaranteed that this is the case by the time the buffer is usable)
unsafe { ptr::write_bytes(ptr.as_ptr(), 0, buffer.size as usize) };
buffer
.initialization_status
.drain_uninitialized_ranges(0..buffer.size)
.for_each(drop);
stage
.initialization_status
.drain_uninitialized_ranges(0..buffer.size)
.for_each(drop);
buffer.initialization_status.clear(0..buffer.size);
stage.initialization_status.clear(0..buffer.size);
buffer.map_state = resource::BufferMapState::Init {
ptr,

View File

@ -276,8 +276,7 @@ impl<G: GlobalIdentityHandlerFactory> Global<G> {
{
let dst = buffer_guard.get_mut(buffer_id).unwrap();
dst.initialization_status
.drain_uninitialized_ranges(buffer_offset..(buffer_offset + data_size))
.for_each(drop);
.clear(buffer_offset..(buffer_offset + data_size));
}
Ok(())
@ -500,9 +499,8 @@ impl<G: GlobalIdentityHandlerFactory> Global<G> {
.get_mut(buffer_use.id)
.map_err(|_| QueueSubmitError::DestroyedBuffer(buffer_use.id))?;
let uninitialized_ranges = buffer
.initialization_status
.drain_uninitialized_ranges(buffer_use.range.clone());
let uninitialized_ranges =
buffer.initialization_status.drain(buffer_use.range.clone());
match buffer_use.kind {
MemoryInitKind::ImplicitlyInitialized => {
uninitialized_ranges.for_each(drop);

View File

@ -91,8 +91,9 @@ impl MemoryInitTracker {
}
}
// Drains uninitialized ranges in a query range.
#[must_use]
pub(crate) fn drain_uninitialized_ranges<'a>(
pub(crate) fn drain<'a>(
&'a mut self,
drain_range: Range<wgt::BufferAddress>,
) -> MemoryInitTrackerDrain<'a> {
@ -108,6 +109,11 @@ impl MemoryInitTracker {
uninitialized_ranges: &mut self.uninitialized_ranges,
}
}
// Clears uninitialized ranges in a query range.
pub(crate) fn clear(&mut self, drain_range: Range<wgt::BufferAddress>) {
self.drain(drain_range).for_each(drop);
}
}
#[cfg(test)]
@ -127,7 +133,7 @@ mod test {
#[test]
fn is_initialized_for_filled_tracker() {
let mut tracker = MemoryInitTracker::new(10);
tracker.drain_uninitialized_ranges(0..10).for_each(drop);
tracker.clear(0..10);
assert!(tracker.is_initialized(&(0..10)));
assert!(tracker.is_initialized(&(0..3)));
assert!(tracker.is_initialized(&(3..4)));
@ -137,7 +143,7 @@ mod test {
#[test]
fn is_initialized_for_partially_filled_tracker() {
let mut tracker = MemoryInitTracker::new(10);
tracker.drain_uninitialized_ranges(4..6).for_each(drop);
tracker.clear(4..6);
assert!(!tracker.is_initialized(&(0..10))); // entire range
assert!(!tracker.is_initialized(&(0..4))); // left non-overlapping
assert!(!tracker.is_initialized(&(3..5))); // left overlapping
@ -149,32 +155,32 @@ mod test {
}
#[test]
fn drain_uninitialized_ranges_never_returns_ranges_twice_for_same_range() {
fn drain_never_returns_ranges_twice_for_same_range() {
let mut tracker = MemoryInitTracker::new(19);
assert_eq!(tracker.drain_uninitialized_ranges(0..19).count(), 1);
assert_eq!(tracker.drain_uninitialized_ranges(0..19).count(), 0);
assert_eq!(tracker.drain(0..19).count(), 1);
assert_eq!(tracker.drain(0..19).count(), 0);
let mut tracker = MemoryInitTracker::new(17);
assert_eq!(tracker.drain_uninitialized_ranges(5..8).count(), 1);
assert_eq!(tracker.drain_uninitialized_ranges(5..8).count(), 0);
assert_eq!(tracker.drain_uninitialized_ranges(1..3).count(), 1);
assert_eq!(tracker.drain_uninitialized_ranges(1..3).count(), 0);
assert_eq!(tracker.drain_uninitialized_ranges(7..13).count(), 1);
assert_eq!(tracker.drain_uninitialized_ranges(7..13).count(), 0);
assert_eq!(tracker.drain(5..8).count(), 1);
assert_eq!(tracker.drain(5..8).count(), 0);
assert_eq!(tracker.drain(1..3).count(), 1);
assert_eq!(tracker.drain(1..3).count(), 0);
assert_eq!(tracker.drain(7..13).count(), 1);
assert_eq!(tracker.drain(7..13).count(), 0);
}
#[test]
fn drain_uninitialized_ranges_splits_ranges_correctly() {
fn drain_splits_ranges_correctly() {
let mut tracker = MemoryInitTracker::new(1337);
assert_eq!(
tracker
.drain_uninitialized_ranges(21..42)
.drain(21..42)
.collect::<Vec<Range<wgt::BufferAddress>>>(),
vec![21..42]
);
assert_eq!(
tracker
.drain_uninitialized_ranges(900..1000)
.drain(900..1000)
.collect::<Vec<Range<wgt::BufferAddress>>>(),
vec![900..1000]
);
@ -182,13 +188,13 @@ mod test {
// Splitted ranges.
assert_eq!(
tracker
.drain_uninitialized_ranges(5..1003)
.drain(5..1003)
.collect::<Vec<Range<wgt::BufferAddress>>>(),
vec![5..21, 42..900, 1000..1003]
);
assert_eq!(
tracker
.drain_uninitialized_ranges(0..1337)
.drain(0..1337)
.collect::<Vec<Range<wgt::BufferAddress>>>(),
vec![0..5, 1003..1337]
);