Make BufferSlice's size non-optional. (#7640)

It used to be that `wgpu::Buffer` did not know its own size, and so slices had to potentially not know their endpoints. Now, buffers do, so slices can. This makes the code simpler, without modifying the API.
This commit is contained in:
Kevin Reid 2025-04-27 09:38:23 -07:00 committed by GitHub
parent 65eb10ed5a
commit 640e031919
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 68 additions and 95 deletions

View File

@ -240,8 +240,9 @@ impl Buffer {
/// ///
/// - If `bounds` is outside of the bounds of `self`. /// - If `bounds` is outside of the bounds of `self`.
/// - If `bounds` has a length less than 1. /// - If `bounds` has a length less than 1.
#[track_caller]
pub fn slice<S: RangeBounds<BufferAddress>>(&self, bounds: S) -> BufferSlice<'_> { pub fn slice<S: RangeBounds<BufferAddress>>(&self, bounds: S) -> BufferSlice<'_> {
let (offset, size) = range_to_offset_size(bounds); let (offset, size) = range_to_offset_size(bounds, self.size);
check_buffer_bounds(self.size, offset, size); check_buffer_bounds(self.size, offset, size);
BufferSlice { BufferSlice {
buffer: self, buffer: self,
@ -438,7 +439,7 @@ impl Buffer {
pub struct BufferSlice<'a> { pub struct BufferSlice<'a> {
pub(crate) buffer: &'a Buffer, pub(crate) buffer: &'a Buffer,
pub(crate) offset: BufferAddress, pub(crate) offset: BufferAddress,
pub(crate) size: Option<BufferSize>, pub(crate) size: BufferSize,
} }
#[cfg(send_sync)] #[cfg(send_sync)]
static_assertions::assert_impl_all!(BufferSlice<'_>: Send, Sync); static_assertions::assert_impl_all!(BufferSlice<'_>: Send, Sync);
@ -456,20 +457,14 @@ impl<'a> BufferSlice<'a> {
/// ///
/// - If `bounds` is outside of the bounds of `self`. /// - If `bounds` is outside of the bounds of `self`.
/// - If `bounds` has a length less than 1. /// - If `bounds` has a length less than 1.
#[track_caller]
pub fn slice<S: RangeBounds<BufferAddress>>(&self, bounds: S) -> BufferSlice<'a> { pub fn slice<S: RangeBounds<BufferAddress>>(&self, bounds: S) -> BufferSlice<'a> {
let (offset, size) = range_to_offset_size(bounds); let (offset, size) = range_to_offset_size(bounds, self.size.get());
check_buffer_bounds( check_buffer_bounds(self.size.get(), offset, size);
match self.size {
Some(size) => size.get(),
None => self.buffer.size(),
},
offset,
size,
);
BufferSlice { BufferSlice {
buffer: self.buffer, buffer: self.buffer,
offset: self.offset + offset, // check_buffer_bounds ensures this does not overflow offset: self.offset + offset, // check_buffer_bounds ensures this does not overflow
size: size.or(self.size), // check_buffer_bounds ensures this is essentially min() size, // check_buffer_bounds ensures this is essentially min()
} }
} }
@ -502,10 +497,7 @@ impl<'a> BufferSlice<'a> {
) { ) {
let mut mc = self.buffer.map_context.lock(); let mut mc = self.buffer.map_context.lock();
assert_eq!(mc.initial_range, 0..0, "Buffer is already mapped"); assert_eq!(mc.initial_range, 0..0, "Buffer is already mapped");
let end = match self.size { let end = self.offset + self.size.get();
Some(s) => self.offset + s.get(),
None => mc.total_size,
};
mc.initial_range = self.offset..end; mc.initial_range = self.offset..end;
self.buffer self.buffer
@ -608,10 +600,7 @@ impl<'a> BufferSlice<'a> {
/// Returns the size of this slice. /// Returns the size of this slice.
pub fn size(&self) -> BufferSize { pub fn size(&self) -> BufferSize {
self.size.unwrap_or_else(|| { self.size
(|| BufferSize::new(self.buffer.size().checked_sub(self.offset)?))()
.expect("can't happen: slice has incorrect size for its buffer")
})
} }
} }
@ -622,7 +611,7 @@ impl<'a> From<BufferSlice<'a>> for crate::BufferBinding<'a> {
BufferBinding { BufferBinding {
buffer: value.buffer, buffer: value.buffer,
offset: value.offset, offset: value.offset,
size: value.size, size: Some(value.size),
} }
} }
} }
@ -637,16 +626,9 @@ impl<'a> From<BufferSlice<'a>> for crate::BindingResource<'a> {
/// The mapped portion of a buffer, if any, and its outstanding views. /// The mapped portion of a buffer, if any, and its outstanding views.
/// ///
/// This ensures that views fall within the mapped range and don't overlap, and /// This ensures that views fall within the mapped range and don't overlap.
/// also takes care of turning `Option<BufferSize>` sizes into actual buffer
/// offsets.
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct MapContext { pub(crate) struct MapContext {
/// The overall size of the buffer.
///
/// This is just a convenient copy of [`Buffer::size`].
pub(crate) total_size: BufferAddress,
/// The range of the buffer that is mapped. /// The range of the buffer that is mapped.
/// ///
/// This is `0..0` if the buffer is not mapped. This becomes non-empty when /// This is `0..0` if the buffer is not mapped. This becomes non-empty when
@ -664,9 +646,8 @@ pub(crate) struct MapContext {
} }
impl MapContext { impl MapContext {
pub(crate) fn new(total_size: BufferAddress) -> Self { pub(crate) fn new() -> Self {
Self { Self {
total_size,
initial_range: 0..0, initial_range: 0..0,
sub_ranges: Vec::new(), sub_ranges: Vec::new(),
} }
@ -689,11 +670,8 @@ impl MapContext {
/// # Panics /// # Panics
/// ///
/// This panics if the given range overlaps with any existing range. /// This panics if the given range overlaps with any existing range.
fn add(&mut self, offset: BufferAddress, size: Option<BufferSize>) -> BufferAddress { fn add(&mut self, offset: BufferAddress, size: BufferSize) -> BufferAddress {
let end = match size { let end = offset + size.get();
Some(s) => offset + s.get(),
None => self.initial_range.end,
};
assert!(self.initial_range.start <= offset && end <= self.initial_range.end); assert!(self.initial_range.start <= offset && end <= self.initial_range.end);
// This check is essential for avoiding undefined behavior: it is the // This check is essential for avoiding undefined behavior: it is the
// only thing that ensures that `&mut` references to the buffer's // only thing that ensures that `&mut` references to the buffer's
@ -716,11 +694,8 @@ impl MapContext {
/// passed to [`add`]. /// passed to [`add`].
/// ///
/// [`add]`: MapContext::add /// [`add]`: MapContext::add
fn remove(&mut self, offset: BufferAddress, size: Option<BufferSize>) { fn remove(&mut self, offset: BufferAddress, size: BufferSize) {
let end = match size { let end = offset + size.get();
Some(s) => offset + s.get(),
None => self.initial_range.end,
};
let index = self let index = self
.sub_ranges .sub_ranges
@ -871,99 +846,100 @@ impl Drop for BufferViewMut<'_> {
} }
} }
#[track_caller]
fn check_buffer_bounds( fn check_buffer_bounds(
buffer_size: BufferAddress, buffer_size: BufferAddress,
offset: BufferAddress, slice_offset: BufferAddress,
size: Option<BufferSize>, slice_size: BufferSize,
) { ) {
// A slice of length 0 is invalid, so the offset must not be equal to or greater than the buffer size. // A slice of length 0 is invalid, so the offset must not be equal to or greater than the buffer size.
if offset >= buffer_size { if slice_offset >= buffer_size {
panic!( panic!(
"slice offset {} is out of range for buffer of size {}", "slice offset {} is out of range for buffer of size {}",
offset, buffer_size slice_offset, buffer_size
); );
} }
if let Some(size) = size { // Detect integer overflow.
// Detect integer overflow. let end = slice_offset.checked_add(slice_size.get());
let end = offset.checked_add(size.get()); if end.is_none_or(|end| end > buffer_size) {
if end.is_none_or(|end| end > buffer_size) { panic!(
panic!( "slice offset {} size {} is out of range for buffer of size {}",
"slice offset {} size {} is out of range for buffer of size {}", slice_offset, slice_size, buffer_size
offset, size, buffer_size );
);
}
} }
} }
#[track_caller]
fn range_to_offset_size<S: RangeBounds<BufferAddress>>( fn range_to_offset_size<S: RangeBounds<BufferAddress>>(
bounds: S, bounds: S,
) -> (BufferAddress, Option<BufferSize>) { whole_size: BufferAddress,
) -> (BufferAddress, BufferSize) {
let offset = match bounds.start_bound() { let offset = match bounds.start_bound() {
Bound::Included(&bound) => bound, Bound::Included(&bound) => bound,
Bound::Excluded(&bound) => bound + 1, Bound::Excluded(&bound) => bound + 1,
Bound::Unbounded => 0, Bound::Unbounded => 0,
}; };
let size = match bounds.end_bound() { let size = BufferSize::new(match bounds.end_bound() {
Bound::Included(&bound) => Some(bound + 1 - offset), Bound::Included(&bound) => bound + 1 - offset,
Bound::Excluded(&bound) => Some(bound - offset), Bound::Excluded(&bound) => bound - offset,
Bound::Unbounded => None, Bound::Unbounded => whole_size - offset,
} })
.map(|size| BufferSize::new(size).expect("Buffer slices can not be empty")); .expect("buffer slices can not be empty");
(offset, size) (offset, size)
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{check_buffer_bounds, range_to_offset_size, BufferSize}; use super::{check_buffer_bounds, range_to_offset_size, BufferAddress, BufferSize};
fn bs(value: BufferAddress) -> BufferSize {
BufferSize::new(value).unwrap()
}
#[test] #[test]
fn range_to_offset_size_works() { fn range_to_offset_size_works() {
assert_eq!(range_to_offset_size(0..2), (0, BufferSize::new(2))); let whole = 100;
assert_eq!(range_to_offset_size(2..5), (2, BufferSize::new(3)));
assert_eq!(range_to_offset_size(..), (0, None)); assert_eq!(range_to_offset_size(0..2, whole), (0, bs(2)));
assert_eq!(range_to_offset_size(21..), (21, None)); assert_eq!(range_to_offset_size(2..5, whole), (2, bs(3)));
assert_eq!(range_to_offset_size(0..), (0, None)); assert_eq!(range_to_offset_size(.., whole), (0, bs(whole)));
assert_eq!(range_to_offset_size(..21), (0, BufferSize::new(21))); assert_eq!(range_to_offset_size(21.., whole), (21, bs(whole - 21)));
assert_eq!(range_to_offset_size(0.., whole), (0, bs(whole)));
assert_eq!(range_to_offset_size(..21, whole), (0, bs(21)));
} }
#[test] #[test]
#[should_panic] #[should_panic = "buffer slices can not be empty"]
fn range_to_offset_size_panics_for_empty_range() { fn range_to_offset_size_panics_for_empty_range() {
range_to_offset_size(123..123); range_to_offset_size(123..123, 200);
} }
#[test] #[test]
#[should_panic] #[should_panic = "buffer slices can not be empty"]
fn range_to_offset_size_panics_for_unbounded_empty_range() { fn range_to_offset_size_panics_for_unbounded_empty_range() {
range_to_offset_size(..0); range_to_offset_size(..0, 100);
}
#[test]
#[should_panic]
fn check_buffer_bounds_panics_for_offset_at_size() {
check_buffer_bounds(100, 100, None);
} }
#[test] #[test]
fn check_buffer_bounds_works_for_end_in_range() { fn check_buffer_bounds_works_for_end_in_range() {
check_buffer_bounds(200, 100, BufferSize::new(50)); check_buffer_bounds(200, 100, bs(50));
check_buffer_bounds(200, 100, BufferSize::new(100)); check_buffer_bounds(200, 100, bs(100));
check_buffer_bounds(u64::MAX, u64::MAX - 100, BufferSize::new(100)); check_buffer_bounds(u64::MAX, u64::MAX - 100, bs(100));
check_buffer_bounds(u64::MAX, 0, BufferSize::new(u64::MAX)); check_buffer_bounds(u64::MAX, 0, bs(u64::MAX));
check_buffer_bounds(u64::MAX, 1, BufferSize::new(u64::MAX - 1)); check_buffer_bounds(u64::MAX, 1, bs(u64::MAX - 1));
} }
#[test] #[test]
#[should_panic] #[should_panic]
fn check_buffer_bounds_panics_for_end_over_size() { fn check_buffer_bounds_panics_for_end_over_size() {
check_buffer_bounds(200, 100, BufferSize::new(101)); check_buffer_bounds(200, 100, bs(101));
} }
#[test] #[test]
#[should_panic] #[should_panic]
fn check_buffer_bounds_panics_for_end_wraparound() { fn check_buffer_bounds_panics_for_end_wraparound() {
check_buffer_bounds(u64::MAX, 1, BufferSize::new(u64::MAX)); check_buffer_bounds(u64::MAX, 1, bs(u64::MAX));
} }
} }

View File

@ -254,7 +254,7 @@ impl Device {
/// Creates a [`Buffer`]. /// Creates a [`Buffer`].
#[must_use] #[must_use]
pub fn create_buffer(&self, desc: &BufferDescriptor<'_>) -> Buffer { pub fn create_buffer(&self, desc: &BufferDescriptor<'_>) -> Buffer {
let mut map_context = MapContext::new(desc.size); let mut map_context = MapContext::new();
if desc.mapped_at_creation { if desc.mapped_at_creation {
map_context.initial_range = 0..desc.size; map_context.initial_range = 0..desc.size;
} }
@ -330,7 +330,7 @@ impl Device {
hal_buffer: A::Buffer, hal_buffer: A::Buffer,
desc: &BufferDescriptor<'_>, desc: &BufferDescriptor<'_>,
) -> Buffer { ) -> Buffer {
let mut map_context = MapContext::new(desc.size); let mut map_context = MapContext::new();
if desc.mapped_at_creation { if desc.mapped_at_creation {
map_context.initial_range = 0..desc.size; map_context.initial_range = 0..desc.size;
} }

View File

@ -93,7 +93,7 @@ impl<'a> RenderBundleEncoder<'a> {
&buffer_slice.buffer.inner, &buffer_slice.buffer.inner,
index_format, index_format,
buffer_slice.offset, buffer_slice.offset,
buffer_slice.size, Some(buffer_slice.size),
); );
} }
@ -112,7 +112,7 @@ impl<'a> RenderBundleEncoder<'a> {
slot, slot,
&buffer_slice.buffer.inner, &buffer_slice.buffer.inner,
buffer_slice.offset, buffer_slice.offset,
buffer_slice.size, Some(buffer_slice.size),
); );
} }

View File

@ -100,7 +100,7 @@ impl RenderPass<'_> {
&buffer_slice.buffer.inner, &buffer_slice.buffer.inner,
index_format, index_format,
buffer_slice.offset, buffer_slice.offset,
buffer_slice.size, Some(buffer_slice.size),
); );
} }
@ -121,7 +121,7 @@ impl RenderPass<'_> {
slot, slot,
&buffer_slice.buffer.inner, &buffer_slice.buffer.inner,
buffer_slice.offset, buffer_slice.offset,
buffer_slice.size, Some(buffer_slice.size),
); );
} }

View File

@ -98,10 +98,7 @@ impl DownloadBuffer {
buffer: &super::BufferSlice<'_>, buffer: &super::BufferSlice<'_>,
callback: impl FnOnce(Result<Self, super::BufferAsyncError>) + Send + 'static, callback: impl FnOnce(Result<Self, super::BufferAsyncError>) + Send + 'static,
) { ) {
let size = match buffer.size { let size = buffer.size.into();
Some(size) => size.into(),
None => buffer.buffer.map_context.lock().total_size - buffer.offset,
};
let download = device.create_buffer(&super::BufferDescriptor { let download = device.create_buffer(&super::BufferDescriptor {
size, size,