Unify set bind group.

This commit is contained in:
Vecvec 2025-06-22 12:40:06 +12:00 committed by Teodor Tanasoaia
parent 768d5f4879
commit 4d36a02691
6 changed files with 524 additions and 452 deletions

View File

@ -118,6 +118,7 @@ use crate::{
};
use super::{
pass,
render_command::{ArcRenderCommand, RenderCommand},
DrawKind,
};
@ -527,11 +528,13 @@ fn set_bind_group(
let max_bind_groups = state.device.limits.max_bind_groups;
if index >= max_bind_groups {
return Err(RenderCommandError::BindGroupIndexOutOfRange {
index,
max: max_bind_groups,
}
.into());
return Err(
RenderCommandError::BindGroupIndexOutOfRange(pass::BindGroupIndexOutOfRange {
index,
max: max_bind_groups,
})
.into(),
);
}
// Identify the next `num_dynamic_offsets` entries from `dynamic_offsets`.

View File

@ -4,34 +4,31 @@ use wgt::{BufferAddress, DynamicOffset};
use alloc::{borrow::Cow, boxed::Box, sync::Arc, vec::Vec};
use core::{fmt, str};
use crate::command::{EncoderStateError, PassStateError, TimestampWritesError};
use crate::ray_tracing::AsAction;
use crate::binding_model::BindError;
use crate::command::{pass, EncoderStateError, PassStateError, TimestampWritesError};
use crate::resource::DestroyedResourceError;
use crate::{
binding_model::{
BindError, BindGroup, LateMinBufferBindingSizeMismatch, PushConstantUploadError,
},
binding_model::{LateMinBufferBindingSizeMismatch, PushConstantUploadError},
command::{
bind::{Binder, BinderError},
compute_command::ArcComputeCommand,
end_pipeline_statistics_query,
memory_init::{
fixup_discarded_surfaces, CommandBufferTextureMemoryActions, SurfacesInDiscardState,
fixup_discarded_surfaces, SurfacesInDiscardState,
},
pass_base, pass_try, validate_and_begin_pipeline_statistics_query, ArcPassTimestampWrites,
BasePass, BindGroupStateChange, CommandBuffer, CommandEncoderError, MapPassErr,
PassErrorScope, PassTimestampWrites, QueryUseError, StateChange,
},
device::{Device, DeviceError, MissingDownlevelFlags, MissingFeatures},
device::{DeviceError, MissingDownlevelFlags, MissingFeatures},
global::Global,
hal_label, id,
init_tracker::{BufferInitTrackerAction, MemoryInitKind},
init_tracker::MemoryInitKind,
pipeline::ComputePipeline,
resource::{
self, Buffer, DestroyedResourceError, InvalidResourceError, Labeled,
MissingBufferUsageError, ParentDevice,
self, Buffer, InvalidResourceError, Labeled, MissingBufferUsageError, ParentDevice,
},
snatch::SnatchGuard,
track::{ResourceUsageCompatibilityError, Tracker, TrackerIndex, UsageScope},
track::{ResourceUsageCompatibilityError, Tracker, TrackerIndex},
Label,
};
@ -139,8 +136,8 @@ pub enum ComputePassErrorInner {
EncoderState(#[from] EncoderStateError),
#[error("Parent encoder is invalid")]
InvalidParentEncoder,
#[error("Bind group index {index} is greater than the device's requested `max_bind_group` limit {max}")]
BindGroupIndexOutOfRange { index: u32, max: u32 },
#[error(transparent)]
BindGroupIndexOutOfRange(#[from] pass::BindGroupIndexOutOfRange),
#[error(transparent)]
DestroyedResource(#[from] DestroyedResourceError),
#[error("Indirect buffer offset {0:?} is not a multiple of 4")]
@ -206,34 +203,17 @@ where
}
struct State<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder> {
binder: Binder,
pipeline: Option<Arc<ComputePipeline>>,
scope: UsageScope<'scope>,
debug_scope_depth: u32,
snatch_guard: SnatchGuard<'snatch_guard>,
general: pass::BaseState<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder>,
device: &'cmd_buf Arc<Device>,
raw_encoder: &'raw_encoder mut dyn hal::DynCommandEncoder,
tracker: &'cmd_buf mut Tracker,
buffer_memory_init_actions: &'cmd_buf mut Vec<BufferInitTrackerAction>,
texture_memory_actions: &'cmd_buf mut CommandBufferTextureMemoryActions,
as_actions: &'cmd_buf mut Vec<AsAction>,
temp_offsets: Vec<u32>,
dynamic_offset_count: usize,
string_offset: usize,
active_query: Option<(Arc<resource::QuerySet>, u32)>,
push_constants: Vec<u32>,
intermediate_trackers: Tracker,
/// Immediate texture inits required because of prior discards. Need to
/// be inserted before texture reads.
pending_discard_init_fixups: SurfacesInDiscardState,
}
impl<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder>
@ -241,8 +221,8 @@ impl<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder>
{
fn is_ready(&self) -> Result<(), DispatchError> {
if let Some(pipeline) = self.pipeline.as_ref() {
self.binder.check_compatibility(pipeline.as_ref())?;
self.binder.check_late_buffer_bindings()?;
self.general.binder.check_compatibility(pipeline.as_ref())?;
self.general.binder.check_late_buffer_bindings()?;
Ok(())
} else {
Err(DispatchError::MissingPipeline)
@ -255,16 +235,19 @@ impl<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder>
&mut self,
indirect_buffer: Option<TrackerIndex>,
) -> Result<(), ResourceUsageCompatibilityError> {
for bind_group in self.binder.list_active() {
unsafe { self.scope.merge_bind_group(&bind_group.used)? };
for bind_group in self.general.binder.list_active() {
unsafe { self.general.scope.merge_bind_group(&bind_group.used)? };
// Note: stateless trackers are not merged: the lifetime reference
// is held to the bind group itself.
}
for bind_group in self.binder.list_active() {
for bind_group in self.general.binder.list_active() {
unsafe {
self.intermediate_trackers
.set_and_remove_from_usage_scope_sparse(&mut self.scope, &bind_group.used)
.set_and_remove_from_usage_scope_sparse(
&mut self.general.scope,
&bind_group.used,
)
}
}
@ -272,13 +255,16 @@ impl<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder>
unsafe {
self.intermediate_trackers
.buffers
.set_and_remove_from_usage_scope_sparse(&mut self.scope.buffers, indirect_buffer);
.set_and_remove_from_usage_scope_sparse(
&mut self.general.scope.buffers,
indirect_buffer,
);
}
CommandBuffer::drain_barriers(
self.raw_encoder,
self.general.raw_encoder,
&mut self.intermediate_trackers,
&self.snatch_guard,
self.general.snatch_guard,
);
Ok(())
}
@ -496,36 +482,48 @@ impl Global {
.open_pass(base.label.as_deref())
.map_pass_err(pass_scope)?;
let snatch_guard = device.snatchable_lock.read();
let mut state = State {
binder: Binder::new(),
pipeline: None,
scope: device.new_usage_scope(),
debug_scope_depth: 0,
snatch_guard: device.snatchable_lock.read(),
general: pass::BaseState {
device,
raw_encoder,
tracker: &mut cmd_buf_data.trackers,
buffer_memory_init_actions: &mut cmd_buf_data.buffer_memory_init_actions,
texture_memory_actions: &mut cmd_buf_data.texture_memory_actions,
as_actions: &mut cmd_buf_data.as_actions,
binder: Binder::new(),
temp_offsets: Vec::new(),
dynamic_offset_count: 0,
device,
raw_encoder,
tracker: &mut cmd_buf_data.trackers,
buffer_memory_init_actions: &mut cmd_buf_data.buffer_memory_init_actions,
texture_memory_actions: &mut cmd_buf_data.texture_memory_actions,
as_actions: &mut cmd_buf_data.as_actions,
pending_discard_init_fixups: SurfacesInDiscardState::new(),
snatch_guard: &snatch_guard,
scope: device.new_usage_scope(),
},
temp_offsets: Vec::new(),
dynamic_offset_count: 0,
string_offset: 0,
active_query: None,
push_constants: Vec::new(),
intermediate_trackers: Tracker::new(),
pending_discard_init_fixups: SurfacesInDiscardState::new(),
};
let indices = &state.device.tracker_indices;
state.tracker.buffers.set_size(indices.buffers.size());
state.tracker.textures.set_size(indices.textures.size());
let indices = &state.general.device.tracker_indices;
state
.general
.tracker
.buffers
.set_size(indices.buffers.size());
state
.general
.tracker
.textures
.set_size(indices.textures.size());
let timestamp_writes: Option<hal::PassTimestampWrites<'_, dyn hal::DynQuerySet>> =
if let Some(tw) = pass.timestamp_writes.take() {
@ -533,7 +531,7 @@ impl Global {
.same_device_as(cmd_buf.as_ref())
.map_pass_err(pass_scope)?;
let query_set = state.tracker.query_sets.insert_single(tw.query_set);
let query_set = state.general.tracker.query_sets.insert_single(tw.query_set);
// Unlike in render passes we can't delay resetting the query sets since
// there is no auxiliary pass.
@ -550,7 +548,10 @@ impl Global {
// But no point in erroring over that nuance here!
if let Some(range) = range {
unsafe {
state.raw_encoder.reset_queries(query_set.raw(), range);
state
.general
.raw_encoder
.reset_queries(query_set.raw(), range);
}
}
@ -569,7 +570,7 @@ impl Global {
};
unsafe {
state.raw_encoder.begin_compute_pass(&hal_desc);
state.general.raw_encoder.begin_compute_pass(&hal_desc);
}
for command in base.commands.drain(..) {
@ -580,13 +581,14 @@ impl Global {
bind_group,
} => {
let scope = PassErrorScope::SetBindGroup;
set_bind_group(
&mut state,
pass::set_bind_group::<ComputePassErrorInner>(
&mut state.general,
cmd_buf.as_ref(),
&base.dynamic_offsets,
index,
num_dynamic_offsets,
bind_group,
false,
)
.map_pass_err(scope)?;
}
@ -643,8 +645,8 @@ impl Global {
let scope = PassErrorScope::BeginPipelineStatisticsQuery;
validate_and_begin_pipeline_statistics_query(
query_set,
state.raw_encoder,
&mut state.tracker.query_sets,
state.general.raw_encoder,
&mut state.general.tracker.query_sets,
cmd_buf.as_ref(),
query_index,
None,
@ -654,21 +656,27 @@ impl Global {
}
ArcComputeCommand::EndPipelineStatisticsQuery => {
let scope = PassErrorScope::EndPipelineStatisticsQuery;
end_pipeline_statistics_query(state.raw_encoder, &mut state.active_query)
.map_pass_err(scope)?;
end_pipeline_statistics_query(
state.general.raw_encoder,
&mut state.active_query,
)
.map_pass_err(scope)?;
}
}
}
unsafe {
state.raw_encoder.end_compute_pass();
state.general.raw_encoder.end_compute_pass();
}
let State {
snatch_guard,
tracker,
general:
pass::BaseState {
tracker,
pending_discard_init_fixups,
..
},
intermediate_trackers,
pending_discard_init_fixups,
..
} = state;
@ -702,88 +710,6 @@ impl Global {
}
}
fn set_bind_group(
state: &mut State,
cmd_buf: &CommandBuffer,
dynamic_offsets: &[DynamicOffset],
index: u32,
num_dynamic_offsets: usize,
bind_group: Option<Arc<BindGroup>>,
) -> Result<(), ComputePassErrorInner> {
let max_bind_groups = state.device.limits.max_bind_groups;
if index >= max_bind_groups {
return Err(ComputePassErrorInner::BindGroupIndexOutOfRange {
index,
max: max_bind_groups,
});
}
state.temp_offsets.clear();
state.temp_offsets.extend_from_slice(
&dynamic_offsets
[state.dynamic_offset_count..state.dynamic_offset_count + num_dynamic_offsets],
);
state.dynamic_offset_count += num_dynamic_offsets;
if bind_group.is_none() {
// TODO: Handle bind_group None.
return Ok(());
}
let bind_group = bind_group.unwrap();
let bind_group = state.tracker.bind_groups.insert_single(bind_group);
bind_group.same_device_as(cmd_buf)?;
bind_group.validate_dynamic_bindings(index, &state.temp_offsets)?;
state
.buffer_memory_init_actions
.extend(bind_group.used_buffer_ranges.iter().filter_map(|action| {
action
.buffer
.initialization_status
.read()
.check_action(action)
}));
for action in bind_group.used_texture_ranges.iter() {
state
.pending_discard_init_fixups
.extend(state.texture_memory_actions.register_init_action(action));
}
let used_resource = bind_group
.used
.acceleration_structures
.into_iter()
.map(|tlas| AsAction::UseTlas(tlas.clone()));
state.as_actions.extend(used_resource);
let pipeline_layout = state.binder.pipeline_layout.clone();
let entries = state
.binder
.assign_group(index as usize, bind_group, &state.temp_offsets);
if !entries.is_empty() && pipeline_layout.is_some() {
let pipeline_layout = pipeline_layout.as_ref().unwrap().raw();
for (i, e) in entries.iter().enumerate() {
if let Some(group) = e.group.as_ref() {
let raw_bg = group.try_raw(&state.snatch_guard)?;
unsafe {
state.raw_encoder.set_bind_group(
pipeline_layout,
index + i as u32,
Some(raw_bg),
&e.dynamic_offsets,
);
}
}
}
}
Ok(())
}
fn set_pipeline(
state: &mut State,
cmd_buf: &CommandBuffer,
@ -793,15 +719,23 @@ fn set_pipeline(
state.pipeline = Some(pipeline.clone());
let pipeline = state.tracker.compute_pipelines.insert_single(pipeline);
let pipeline = state
.general
.tracker
.compute_pipelines
.insert_single(pipeline);
unsafe {
state.raw_encoder.set_compute_pipeline(pipeline.raw());
state
.general
.raw_encoder
.set_compute_pipeline(pipeline.raw());
}
// Rebind resources
if state.binder.pipeline_layout.is_none()
if state.general.binder.pipeline_layout.is_none()
|| !state
.general
.binder
.pipeline_layout
.as_ref()
@ -809,14 +743,15 @@ fn set_pipeline(
.is_equal(&pipeline.layout)
{
let (start_index, entries) = state
.general
.binder
.change_pipeline_layout(&pipeline.layout, &pipeline.late_sized_buffer_groups);
if !entries.is_empty() {
for (i, e) in entries.iter().enumerate() {
if let Some(group) = e.group.as_ref() {
let raw_bg = group.try_raw(&state.snatch_guard)?;
let raw_bg = group.try_raw(state.general.snatch_guard)?;
unsafe {
state.raw_encoder.set_bind_group(
state.general.raw_encoder.set_bind_group(
pipeline.layout.raw(),
start_index as u32 + i as u32,
Some(raw_bg),
@ -849,7 +784,7 @@ fn set_pipeline(
let offset = range.range.start;
let size_bytes = range.range.end - offset;
super::push_constant_clear(offset, size_bytes, |clear_offset, clear_data| unsafe {
state.raw_encoder.set_push_constants(
state.general.raw_encoder.set_push_constants(
pipeline.layout.raw(),
wgt::ShaderStages::COMPUTE,
clear_offset,
@ -873,6 +808,7 @@ fn set_push_constant(
let data_slice = &push_constant_data[(values_offset as usize)..values_end_offset];
let pipeline_layout = state
.general
.binder
.pipeline_layout
.as_ref()
@ -892,7 +828,7 @@ fn set_push_constant(
state.push_constants[offset_in_elements..][..size_in_elements].copy_from_slice(data_slice);
unsafe {
state.raw_encoder.set_push_constants(
state.general.raw_encoder.set_push_constants(
pipeline_layout.raw(),
wgt::ShaderStages::COMPUTE,
offset,
@ -907,7 +843,11 @@ fn dispatch(state: &mut State, groups: [u32; 3]) -> Result<(), ComputePassErrorI
state.flush_states(None)?;
let groups_size_limit = state.device.limits.max_compute_workgroups_per_dimension;
let groups_size_limit = state
.general
.device
.limits
.max_compute_workgroups_per_dimension;
if groups[0] > groups_size_limit
|| groups[1] > groups_size_limit
@ -922,7 +862,7 @@ fn dispatch(state: &mut State, groups: [u32; 3]) -> Result<(), ComputePassErrorI
}
unsafe {
state.raw_encoder.dispatch(groups);
state.general.raw_encoder.dispatch(groups);
}
Ok(())
}
@ -938,11 +878,12 @@ fn dispatch_indirect(
state.is_ready()?;
state
.general
.device
.require_downlevel_flags(wgt::DownlevelFlags::INDIRECT_EXECUTION)?;
buffer.check_usage(wgt::BufferUsages::INDIRECT)?;
buffer.check_destroyed(&state.snatch_guard)?;
buffer.check_destroyed(state.general.snatch_guard)?;
if offset % 4 != 0 {
return Err(ComputePassErrorInner::UnalignedIndirectBufferOffset(offset));
@ -958,25 +899,29 @@ fn dispatch_indirect(
}
let stride = 3 * 4; // 3 integers, x/y/z group size
state
.buffer_memory_init_actions
.extend(buffer.initialization_status.read().create_action(
state.general.buffer_memory_init_actions.extend(
buffer.initialization_status.read().create_action(
&buffer,
offset..(offset + stride),
MemoryInitKind::NeedsInitializedMemory,
));
),
);
if let Some(ref indirect_validation) = state.device.indirect_validation {
let params = indirect_validation
.dispatch
.params(&state.device.limits, offset, buffer.size);
if let Some(ref indirect_validation) = state.general.device.indirect_validation {
let params =
indirect_validation
.dispatch
.params(&state.general.device.limits, offset, buffer.size);
unsafe {
state.raw_encoder.set_compute_pipeline(params.pipeline);
state
.general
.raw_encoder
.set_compute_pipeline(params.pipeline);
}
unsafe {
state.raw_encoder.set_push_constants(
state.general.raw_encoder.set_push_constants(
params.pipeline_layout,
wgt::ShaderStages::COMPUTE,
0,
@ -985,7 +930,7 @@ fn dispatch_indirect(
}
unsafe {
state.raw_encoder.set_bind_group(
state.general.raw_encoder.set_bind_group(
params.pipeline_layout,
0,
Some(params.dst_bind_group),
@ -993,13 +938,13 @@ fn dispatch_indirect(
);
}
unsafe {
state.raw_encoder.set_bind_group(
state.general.raw_encoder.set_bind_group(
params.pipeline_layout,
1,
Some(
buffer
.indirect_validation_bind_groups
.get(&state.snatch_guard)
.get(state.general.snatch_guard)
.unwrap()
.dispatch
.as_ref(),
@ -1012,24 +957,30 @@ fn dispatch_indirect(
.intermediate_trackers
.buffers
.set_single(&buffer, wgt::BufferUses::STORAGE_READ_ONLY);
let src_barrier =
src_transition.map(|transition| transition.into_hal(&buffer, &state.snatch_guard));
let src_barrier = src_transition
.map(|transition| transition.into_hal(&buffer, state.general.snatch_guard));
unsafe {
state.raw_encoder.transition_buffers(src_barrier.as_slice());
state
.general
.raw_encoder
.transition_buffers(src_barrier.as_slice());
}
unsafe {
state.raw_encoder.transition_buffers(&[hal::BufferBarrier {
buffer: params.dst_buffer,
usage: hal::StateTransition {
from: wgt::BufferUses::INDIRECT,
to: wgt::BufferUses::STORAGE_READ_WRITE,
},
}]);
state
.general
.raw_encoder
.transition_buffers(&[hal::BufferBarrier {
buffer: params.dst_buffer,
usage: hal::StateTransition {
from: wgt::BufferUses::INDIRECT,
to: wgt::BufferUses::STORAGE_READ_WRITE,
},
}]);
}
unsafe {
state.raw_encoder.dispatch([1, 1, 1]);
state.general.raw_encoder.dispatch([1, 1, 1]);
}
// reset state
@ -1037,12 +988,15 @@ fn dispatch_indirect(
let pipeline = state.pipeline.as_ref().unwrap();
unsafe {
state.raw_encoder.set_compute_pipeline(pipeline.raw());
state
.general
.raw_encoder
.set_compute_pipeline(pipeline.raw());
}
if !state.push_constants.is_empty() {
unsafe {
state.raw_encoder.set_push_constants(
state.general.raw_encoder.set_push_constants(
pipeline.layout.raw(),
wgt::ShaderStages::COMPUTE,
0,
@ -1051,11 +1005,11 @@ fn dispatch_indirect(
}
}
for (i, e) in state.binder.list_valid() {
for (i, e) in state.general.binder.list_valid() {
let group = e.group.as_ref().unwrap();
let raw_bg = group.try_raw(&state.snatch_guard)?;
let raw_bg = group.try_raw(state.general.snatch_guard)?;
unsafe {
state.raw_encoder.set_bind_group(
state.general.raw_encoder.set_bind_group(
pipeline.layout.raw(),
i as u32,
Some(raw_bg),
@ -1066,31 +1020,39 @@ fn dispatch_indirect(
}
unsafe {
state.raw_encoder.transition_buffers(&[hal::BufferBarrier {
buffer: params.dst_buffer,
usage: hal::StateTransition {
from: wgt::BufferUses::STORAGE_READ_WRITE,
to: wgt::BufferUses::INDIRECT,
},
}]);
state
.general
.raw_encoder
.transition_buffers(&[hal::BufferBarrier {
buffer: params.dst_buffer,
usage: hal::StateTransition {
from: wgt::BufferUses::STORAGE_READ_WRITE,
to: wgt::BufferUses::INDIRECT,
},
}]);
}
state.flush_states(None)?;
unsafe {
state.raw_encoder.dispatch_indirect(params.dst_buffer, 0);
state
.general
.raw_encoder
.dispatch_indirect(params.dst_buffer, 0);
}
} else {
state
.general
.scope
.buffers
.merge_single(&buffer, wgt::BufferUses::INDIRECT)?;
use crate::resource::Trackable;
state.flush_states(Some(buffer.tracker_index()))?;
state
.flush_states(Some(buffer.tracker_index()))?;
let buf_raw = buffer.try_raw(&state.snatch_guard)?;
let buf_raw = buffer.try_raw(state.general.snatch_guard)?;
unsafe {
state.raw_encoder.dispatch_indirect(buf_raw, offset);
state.general.raw_encoder.dispatch_indirect(buf_raw, offset);
}
}
@ -1100,6 +1062,7 @@ fn dispatch_indirect(
fn push_debug_group(state: &mut State, string_data: &[u8], len: usize) {
state.debug_scope_depth += 1;
if !state
.general
.device
.instance_flags
.contains(wgt::InstanceFlags::DISCARD_HAL_LABELS)
@ -1107,7 +1070,7 @@ fn push_debug_group(state: &mut State, string_data: &[u8], len: usize) {
let label =
str::from_utf8(&string_data[state.string_offset..state.string_offset + len]).unwrap();
unsafe {
state.raw_encoder.begin_debug_marker(label);
state.general.raw_encoder.begin_debug_marker(label);
}
}
state.string_offset += len;
@ -1119,12 +1082,13 @@ fn pop_debug_group(state: &mut State) -> Result<(), ComputePassErrorInner> {
}
state.debug_scope_depth -= 1;
if !state
.general
.device
.instance_flags
.contains(wgt::InstanceFlags::DISCARD_HAL_LABELS)
{
unsafe {
state.raw_encoder.end_debug_marker();
state.general.raw_encoder.end_debug_marker();
}
}
Ok(())
@ -1132,13 +1096,14 @@ fn pop_debug_group(state: &mut State) -> Result<(), ComputePassErrorInner> {
fn insert_debug_marker(state: &mut State, string_data: &[u8], len: usize) {
if !state
.general
.device
.instance_flags
.contains(wgt::InstanceFlags::DISCARD_HAL_LABELS)
{
let label =
str::from_utf8(&string_data[state.string_offset..state.string_offset + len]).unwrap();
unsafe { state.raw_encoder.insert_debug_marker(label) }
unsafe { state.general.raw_encoder.insert_debug_marker(label) }
}
state.string_offset += len;
}
@ -1152,12 +1117,13 @@ fn write_timestamp(
query_set.same_device_as(cmd_buf)?;
state
.general
.device
.require_features(wgt::Features::TIMESTAMP_QUERY_INSIDE_PASSES)?;
let query_set = state.tracker.query_sets.insert_single(query_set);
let query_set = state.general.tracker.query_sets.insert_single(query_set);
query_set.validate_and_write_timestamp(state.raw_encoder, query_index, None)?;
query_set.validate_and_write_timestamp(state.general.raw_encoder, query_index, None)?;
Ok(())
}

View File

@ -2,6 +2,8 @@ use alloc::boxed::Box;
use thiserror::Error;
use super::bind::BinderError;
use crate::command::pass;
use crate::{
binding_model::{LateMinBufferBindingSizeMismatch, PushConstantUploadError},
resource::{
@ -11,8 +13,6 @@ use crate::{
track::ResourceUsageCompatibilityError,
};
use super::bind::BinderError;
/// Error validating a draw call.
#[derive(Clone, Debug, Error)]
#[non_exhaustive]
@ -61,8 +61,8 @@ pub enum DrawError {
#[derive(Clone, Debug, Error)]
#[non_exhaustive]
pub enum RenderCommandError {
#[error("Bind group index {index} is greater than the device's requested `max_bind_group` limit {max}")]
BindGroupIndexOutOfRange { index: u32, max: u32 },
#[error(transparent)]
BindGroupIndexOutOfRange(#[from] pass::BindGroupIndexOutOfRange),
#[error("Vertex buffer index {index} is greater than the device's requested `max_vertex_buffers` limit {max}")]
VertexBufferIndexOutOfRange { index: u32, max: u32 },
#[error("Render pipeline targets are incompatible with render pass")]

View File

@ -6,6 +6,7 @@ mod compute;
mod compute_command;
mod draw;
mod memory_init;
mod pass;
mod query;
mod ray_tracing;
mod render;

View File

@ -0,0 +1,159 @@
//! Generic pass functions that both compute and render passes need.
use crate::api_log;
use crate::binding_model::{BindError, BindGroup};
use crate::command::bind::Binder;
use crate::command::memory_init::{CommandBufferTextureMemoryActions, SurfacesInDiscardState};
use crate::command::CommandBuffer;
use crate::device::{Device, DeviceError};
use crate::init_tracker::BufferInitTrackerAction;
use crate::ray_tracing::AsAction;
use crate::resource::{DestroyedResourceError, Labeled, ParentDevice};
use crate::snatch::SnatchGuard;
use crate::track::{ResourceUsageCompatibilityError, Tracker, UsageScope};
use alloc::sync::Arc;
use alloc::vec::Vec;
use thiserror::Error;
use wgt::DynamicOffset;
#[derive(Clone, Debug, Error)]
#[error(
"Bind group index {index} is greater than the device's requested `max_bind_group` limit {max}"
)]
pub struct BindGroupIndexOutOfRange {
pub index: u32,
pub max: u32,
}
pub(crate) struct BaseState<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder> {
pub(crate) device: &'cmd_buf Arc<Device>,
pub(crate) raw_encoder: &'raw_encoder mut dyn hal::DynCommandEncoder,
pub(crate) tracker: &'cmd_buf mut Tracker,
pub(crate) buffer_memory_init_actions: &'cmd_buf mut Vec<BufferInitTrackerAction>,
pub(crate) texture_memory_actions: &'cmd_buf mut CommandBufferTextureMemoryActions,
pub(crate) as_actions: &'cmd_buf mut Vec<AsAction>,
/// Immediate texture inits required because of prior discards. Need to
/// be inserted before texture reads.
pub(crate) pending_discard_init_fixups: SurfacesInDiscardState,
pub(crate) scope: UsageScope<'scope>,
pub(crate) binder: Binder,
pub(crate) temp_offsets: Vec<u32>,
pub(crate) dynamic_offset_count: usize,
pub(crate) snatch_guard: &'snatch_guard SnatchGuard<'snatch_guard>,
}
pub(crate) fn set_bind_group<E>(
state: &mut BaseState,
cmd_buf: &CommandBuffer,
dynamic_offsets: &[DynamicOffset],
index: u32,
num_dynamic_offsets: usize,
bind_group: Option<Arc<BindGroup>>,
merge_bind_groups: bool,
) -> Result<(), E>
where
E: From<DeviceError>
+ From<BindGroupIndexOutOfRange>
+ From<ResourceUsageCompatibilityError>
+ From<DestroyedResourceError>
+ From<BindError>,
{
if bind_group.is_none() {
api_log!("Pass::set_bind_group {index} None");
} else {
api_log!(
"Pass::set_bind_group {index} {}",
bind_group.as_ref().unwrap().error_ident()
);
}
let max_bind_groups = state.device.limits.max_bind_groups;
if index >= max_bind_groups {
return Err(BindGroupIndexOutOfRange {
index,
max: max_bind_groups,
}
.into());
}
state.temp_offsets.clear();
state.temp_offsets.extend_from_slice(
&dynamic_offsets
[state.dynamic_offset_count..state.dynamic_offset_count + num_dynamic_offsets],
);
state.dynamic_offset_count += num_dynamic_offsets;
if bind_group.is_none() {
// TODO: Handle bind_group None.
return Ok(());
}
let bind_group = bind_group.unwrap();
let bind_group = state.tracker.bind_groups.insert_single(bind_group);
bind_group.same_device_as(cmd_buf)?;
bind_group.validate_dynamic_bindings(index, &state.temp_offsets)?;
if merge_bind_groups {
// merge the resource tracker in
unsafe {
state.scope.merge_bind_group(&bind_group.used)?;
}
}
//Note: stateless trackers are not merged: the lifetime reference
// is held to the bind group itself.
state
.buffer_memory_init_actions
.extend(bind_group.used_buffer_ranges.iter().filter_map(|action| {
action
.buffer
.initialization_status
.read()
.check_action(action)
}));
for action in bind_group.used_texture_ranges.iter() {
state
.pending_discard_init_fixups
.extend(state.texture_memory_actions.register_init_action(action));
}
let used_resource = bind_group
.used
.acceleration_structures
.into_iter()
.map(|tlas| AsAction::UseTlas(tlas.clone()));
state.as_actions.extend(used_resource);
let pipeline_layout = state.binder.pipeline_layout.clone();
let entries = state
.binder
.assign_group(index as usize, bind_group, &state.temp_offsets);
if !entries.is_empty() && pipeline_layout.is_some() {
let pipeline_layout = pipeline_layout.as_ref().unwrap().raw();
for (i, e) in entries.iter().enumerate() {
if let Some(group) = e.group.as_ref() {
let raw_bg = group.try_raw(state.snatch_guard)?;
unsafe {
state.raw_encoder.set_bind_group(
pipeline_layout,
index + i as u32,
Some(raw_bg),
&e.dynamic_offsets,
);
}
}
}
}
Ok(())
}

View File

@ -8,19 +8,16 @@ use wgt::{
TextureSelector, TextureUsages, TextureViewDimension, VertexStepMode,
};
use crate::binding_model::BindGroup;
use crate::command::{
pass_base, pass_try, validate_and_begin_occlusion_query,
pass, pass_base, pass_try, validate_and_begin_occlusion_query,
validate_and_begin_pipeline_statistics_query, EncoderStateError, PassStateError,
TimestampWritesError,
};
use crate::init_tracker::BufferInitTrackerAction;
use crate::pipeline::{RenderPipeline, VertexStep};
use crate::resource::{InvalidResourceError, ResourceErrorIdent};
use crate::snatch::SnatchGuard;
use crate::{
api_log,
binding_model::BindError,
command::{
bind::Binder,
end_occlusion_query, end_pipeline_statistics_query,
@ -57,6 +54,7 @@ use super::{
};
use super::{DrawKind, Rect};
use crate::binding_model::BindError;
pub use wgt::{LoadOp, StoreOp};
fn load_hal_ops<V>(load: LoadOp<V>) -> hal::AttachmentOps {
@ -496,7 +494,6 @@ impl VertexState {
struct State<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder> {
pipeline_flags: PipelineFlags,
binder: Binder,
blend_constant: OptionalState,
stencil_reference: u32,
pipeline: Option<Arc<RenderPipeline>>,
@ -504,20 +501,10 @@ struct State<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder> {
vertex: VertexState,
debug_scope_depth: u32,
info: RenderPassInfo<'scope>,
info: RenderPassInfo,
snatch_guard: &'snatch_guard SnatchGuard<'snatch_guard>,
general: pass::BaseState<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder>,
device: &'cmd_buf Arc<Device>,
raw_encoder: &'raw_encoder mut dyn hal::DynCommandEncoder,
tracker: &'cmd_buf mut Tracker,
buffer_memory_init_actions: &'cmd_buf mut Vec<BufferInitTrackerAction>,
texture_memory_actions: &'cmd_buf mut CommandBufferTextureMemoryActions,
temp_offsets: Vec<u32>,
dynamic_offset_count: usize,
string_offset: usize,
active_occlusion_query: Option<(Arc<QuerySet>, u32)>,
@ -529,8 +516,8 @@ impl<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder>
{
fn is_ready(&self, indexed: bool) -> Result<(), DrawError> {
if let Some(pipeline) = self.pipeline.as_ref() {
self.binder.check_compatibility(pipeline.as_ref())?;
self.binder.check_late_buffer_bindings()?;
self.general.binder.check_compatibility(pipeline.as_ref())?;
self.general.binder.check_late_buffer_bindings()?;
if self.blend_constant == OptionalState::Required {
return Err(DrawError::MissingBlendConstant);
@ -578,7 +565,7 @@ impl<'scope, 'snatch_guard, 'cmd_buf, 'raw_encoder>
/// Reset the `RenderBundle`-related states.
fn reset_bundle(&mut self) {
self.binder.reset();
self.general.binder.reset();
self.pipeline = None;
self.index.reset();
self.vertex = Default::default();
@ -796,6 +783,12 @@ impl From<MissingTextureUsageError> for RenderPassErrorInner {
}
}
impl From<pass::BindGroupIndexOutOfRange> for RenderPassErrorInner {
fn from(error: pass::BindGroupIndexOutOfRange) -> Self {
Self::RenderCommand(RenderCommandError::BindGroupIndexOutOfRange(error))
}
}
/// Error encountered when performing a render pass.
#[derive(Clone, Debug, Error)]
#[error("{scope}")]
@ -833,21 +826,19 @@ impl TextureView {
const MAX_TOTAL_ATTACHMENTS: usize = hal::MAX_COLOR_ATTACHMENTS + hal::MAX_COLOR_ATTACHMENTS + 1;
type AttachmentDataVec<T> = ArrayVec<T, MAX_TOTAL_ATTACHMENTS>;
struct RenderPassInfo<'d> {
struct RenderPassInfo {
context: RenderPassContext,
usage_scope: UsageScope<'d>,
/// All render attachments, including depth/stencil
render_attachments: AttachmentDataVec<RenderAttachment>,
is_depth_read_only: bool,
is_stencil_read_only: bool,
extent: wgt::Extent3d,
pending_discard_init_fixups: SurfacesInDiscardState,
divergent_discarded_depth_stencil_aspect: Option<(wgt::TextureAspect, Arc<TextureView>)>,
multiview: Option<NonZeroU32>,
}
impl<'d> RenderPassInfo<'d> {
impl RenderPassInfo {
fn add_pass_texture_init_actions<V>(
load_op: LoadOp<V>,
store_op: StoreOp,
@ -884,7 +875,7 @@ impl<'d> RenderPassInfo<'d> {
}
fn start(
device: &'d Arc<Device>,
device: &Arc<Device>,
hal_label: Option<&str>,
color_attachments: ArrayVec<
Option<ArcRenderPassColorAttachment>,
@ -897,6 +888,7 @@ impl<'d> RenderPassInfo<'d> {
trackers: &mut Tracker,
texture_memory_actions: &mut CommandBufferTextureMemoryActions,
pending_query_resets: &mut QueryResetMap,
pending_discard_init_fixups: &mut SurfacesInDiscardState,
snatch_guard: &SnatchGuard<'_>,
) -> Result<Self, RenderPassErrorInner> {
profiling::scope!("RenderPassInfo::start");
@ -909,7 +901,6 @@ impl<'d> RenderPassInfo<'d> {
let mut render_attachments = AttachmentDataVec::<RenderAttachment>::new();
let mut discarded_surfaces = AttachmentDataVec::new();
let mut pending_discard_init_fixups = SurfacesInDiscardState::new();
let mut divergent_discarded_depth_stencil_aspect = None;
let mut attachment_location = AttachmentErrorLocation::Color {
@ -1000,7 +991,7 @@ impl<'d> RenderPassInfo<'d> {
at.depth.store_op(),
texture_memory_actions,
view,
&mut pending_discard_init_fixups,
pending_discard_init_fixups,
);
} else if !ds_aspects.contains(hal::FormatAspects::DEPTH) {
Self::add_pass_texture_init_actions(
@ -1008,7 +999,7 @@ impl<'d> RenderPassInfo<'d> {
at.stencil.store_op(),
texture_memory_actions,
view,
&mut pending_discard_init_fixups,
pending_discard_init_fixups,
);
} else {
// This is the only place (anywhere in wgpu) where Stencil &
@ -1206,7 +1197,7 @@ impl<'d> RenderPassInfo<'d> {
at.store_op,
texture_memory_actions,
color_view,
&mut pending_discard_init_fixups,
pending_discard_init_fixups,
);
render_attachments
.push(color_view.to_render_attachment(wgt::TextureUses::COLOR_TARGET));
@ -1380,23 +1371,22 @@ impl<'d> RenderPassInfo<'d> {
Ok(Self {
context,
usage_scope: device.new_usage_scope(),
render_attachments,
is_depth_read_only,
is_stencil_read_only,
extent,
pending_discard_init_fixups,
divergent_discarded_depth_stencil_aspect,
multiview,
})
}
fn finish(
mut self,
self,
device: &Device,
raw: &mut dyn hal::DynCommandEncoder,
snatch_guard: &SnatchGuard,
) -> Result<(UsageScope<'d>, SurfacesInDiscardState), RenderPassErrorInner> {
scope: &mut UsageScope<'_>,
) -> Result<(), RenderPassErrorInner> {
profiling::scope!("RenderPassInfo::finish");
unsafe {
raw.end_render_pass();
@ -1408,11 +1398,9 @@ impl<'d> RenderPassInfo<'d> {
// the tracker set of the pass is always in "extend" mode
unsafe {
self.usage_scope.textures.merge_single(
texture,
Some(ra.selector.clone()),
ra.usage,
)?
scope
.textures
.merge_single(texture, Some(ra.selector.clone()), ra.usage)?
};
}
@ -1462,7 +1450,7 @@ impl<'d> RenderPassInfo<'d> {
}
}
Ok((self.usage_scope, self.pending_discard_init_fixups))
Ok(())
}
}
@ -1798,6 +1786,8 @@ impl Global {
.open_pass(base.label.as_deref())
.map_pass_err(pass_scope)?;
let mut pending_discard_init_fixups = SurfacesInDiscardState::new();
let info = RenderPassInfo::start(
device,
hal_label(base.label.as_deref(), device.instance_flags),
@ -1811,6 +1801,7 @@ impl Global {
tracker,
texture_memory_actions,
pending_query_resets,
&mut pending_discard_init_fixups,
snatch_guard,
)
.map_pass_err(pass_scope)?;
@ -1821,7 +1812,6 @@ impl Global {
let mut state = State {
pipeline_flags: PipelineFlags::empty(),
binder: Binder::new(),
blend_constant: OptionalState::Unused,
stencil_reference: 0,
pipeline: None,
@ -1831,16 +1821,22 @@ impl Global {
info,
snatch_guard,
general: pass::BaseState {
device,
raw_encoder: encoder.raw.as_mut(),
tracker,
buffer_memory_init_actions,
texture_memory_actions,
as_actions: &mut cmd_buf_data.as_actions,
pending_discard_init_fixups,
scope: device.new_usage_scope(),
binder: Binder::new(),
device,
raw_encoder: encoder.raw.as_mut(),
tracker,
buffer_memory_init_actions,
texture_memory_actions,
snatch_guard,
temp_offsets: Vec::new(),
dynamic_offset_count: 0,
temp_offsets: Vec::new(),
dynamic_offset_count: 0,
},
string_offset: 0,
active_occlusion_query: None,
@ -1855,13 +1851,14 @@ impl Global {
bind_group,
} => {
let scope = PassErrorScope::SetBindGroup;
set_bind_group(
&mut state,
&cmd_buf,
pass::set_bind_group::<RenderPassErrorInner>(
&mut state.general,
cmd_buf.as_ref(),
&base.dynamic_offsets,
index,
num_dynamic_offsets,
bind_group,
true,
)
.map_pass_err(scope)?;
}
@ -2061,8 +2058,8 @@ impl Global {
validate_and_begin_occlusion_query(
query_set,
state.raw_encoder,
&mut state.tracker.query_sets,
state.general.raw_encoder,
&mut state.general.tracker.query_sets,
query_index,
Some(&mut cmd_buf_data.pending_query_resets),
&mut state.active_occlusion_query,
@ -2074,7 +2071,7 @@ impl Global {
let scope = PassErrorScope::EndOcclusionQuery;
end_occlusion_query(
state.raw_encoder,
state.general.raw_encoder,
&mut state.active_occlusion_query,
)
.map_pass_err(scope)?;
@ -2091,8 +2088,8 @@ impl Global {
validate_and_begin_pipeline_statistics_query(
query_set,
state.raw_encoder,
&mut state.tracker.query_sets,
state.general.raw_encoder,
&mut state.general.tracker.query_sets,
cmd_buf.as_ref(),
query_index,
Some(&mut cmd_buf_data.pending_query_resets),
@ -2105,7 +2102,7 @@ impl Global {
let scope = PassErrorScope::EndPipelineStatisticsQuery;
end_pipeline_statistics_query(
state.raw_encoder,
state.general.raw_encoder,
&mut state.active_pipeline_statistics_query,
)
.map_pass_err(scope)?;
@ -2124,11 +2121,20 @@ impl Global {
}
}
let (trackers, pending_discard_init_fixups) = state
state
.info
.finish(device, state.raw_encoder, state.snatch_guard)
.finish(
device,
state.general.raw_encoder,
state.general.snatch_guard,
&mut state.general.scope,
)
.map_pass_err(pass_scope)?;
let trackers = state.general.scope;
let pending_discard_init_fixups = state.general.pending_discard_init_fixups;
encoder.close().map_pass_err(pass_scope)?;
(trackers, pending_discard_init_fixups)
};
@ -2175,97 +2181,6 @@ impl Global {
}
}
fn set_bind_group(
state: &mut State,
cmd_buf: &Arc<CommandBuffer>,
dynamic_offsets: &[DynamicOffset],
index: u32,
num_dynamic_offsets: usize,
bind_group: Option<Arc<BindGroup>>,
) -> Result<(), RenderPassErrorInner> {
if bind_group.is_none() {
api_log!("RenderPass::set_bind_group {index} None");
} else {
api_log!(
"RenderPass::set_bind_group {index} {}",
bind_group.as_ref().unwrap().error_ident()
);
}
let max_bind_groups = state.device.limits.max_bind_groups;
if index >= max_bind_groups {
return Err(RenderCommandError::BindGroupIndexOutOfRange {
index,
max: max_bind_groups,
}
.into());
}
state.temp_offsets.clear();
state.temp_offsets.extend_from_slice(
&dynamic_offsets
[state.dynamic_offset_count..state.dynamic_offset_count + num_dynamic_offsets],
);
state.dynamic_offset_count += num_dynamic_offsets;
if bind_group.is_none() {
// TODO: Handle bind_group None.
return Ok(());
}
let bind_group = bind_group.unwrap();
let bind_group = state.tracker.bind_groups.insert_single(bind_group);
bind_group.same_device_as(cmd_buf.as_ref())?;
bind_group.validate_dynamic_bindings(index, &state.temp_offsets)?;
// merge the resource tracker in
unsafe {
state.info.usage_scope.merge_bind_group(&bind_group.used)?;
}
//Note: stateless trackers are not merged: the lifetime reference
// is held to the bind group itself.
state
.buffer_memory_init_actions
.extend(bind_group.used_buffer_ranges.iter().filter_map(|action| {
action
.buffer
.initialization_status
.read()
.check_action(action)
}));
for action in bind_group.used_texture_ranges.iter() {
state
.info
.pending_discard_init_fixups
.extend(state.texture_memory_actions.register_init_action(action));
}
let pipeline_layout = state.binder.pipeline_layout.clone();
let entries = state
.binder
.assign_group(index as usize, bind_group, &state.temp_offsets);
if !entries.is_empty() && pipeline_layout.is_some() {
let pipeline_layout = pipeline_layout.as_ref().unwrap().raw();
for (i, e) in entries.iter().enumerate() {
if let Some(group) = e.group.as_ref() {
let raw_bg = group.try_raw(state.snatch_guard)?;
unsafe {
state.raw_encoder.set_bind_group(
pipeline_layout,
index + i as u32,
Some(raw_bg),
&e.dynamic_offsets,
);
}
}
}
}
Ok(())
}
fn set_pipeline(
state: &mut State,
cmd_buf: &Arc<CommandBuffer>,
@ -2275,7 +2190,11 @@ fn set_pipeline(
state.pipeline = Some(pipeline.clone());
let pipeline = state.tracker.render_pipelines.insert_single(pipeline);
let pipeline = state
.general
.tracker
.render_pipelines
.insert_single(pipeline);
pipeline.same_device_as(cmd_buf.as_ref())?;
@ -2299,20 +2218,25 @@ fn set_pipeline(
.require(pipeline.flags.contains(PipelineFlags::BLEND_CONSTANT));
unsafe {
state.raw_encoder.set_render_pipeline(pipeline.raw());
state
.general
.raw_encoder
.set_render_pipeline(pipeline.raw());
}
if pipeline.flags.contains(PipelineFlags::STENCIL_REFERENCE) {
unsafe {
state
.general
.raw_encoder
.set_stencil_reference(state.stencil_reference);
}
}
// Rebind resource
if state.binder.pipeline_layout.is_none()
if state.general.binder.pipeline_layout.is_none()
|| !state
.general
.binder
.pipeline_layout
.as_ref()
@ -2320,14 +2244,15 @@ fn set_pipeline(
.is_equal(&pipeline.layout)
{
let (start_index, entries) = state
.general
.binder
.change_pipeline_layout(&pipeline.layout, &pipeline.late_sized_buffer_groups);
if !entries.is_empty() {
for (i, e) in entries.iter().enumerate() {
if let Some(group) = e.group.as_ref() {
let raw_bg = group.try_raw(state.snatch_guard)?;
let raw_bg = group.try_raw(state.general.snatch_guard)?;
unsafe {
state.raw_encoder.set_bind_group(
state.general.raw_encoder.set_bind_group(
pipeline.layout.raw(),
start_index as u32 + i as u32,
Some(raw_bg),
@ -2345,7 +2270,7 @@ fn set_pipeline(
let offset = range.range.start;
let size_bytes = range.range.end - offset;
super::push_constant_clear(offset, size_bytes, |clear_offset, clear_data| unsafe {
state.raw_encoder.set_push_constants(
state.general.raw_encoder.set_push_constants(
pipeline.layout.raw(),
range.stages,
clear_offset,
@ -2371,15 +2296,15 @@ fn set_index_buffer(
api_log!("RenderPass::set_index_buffer {}", buffer.error_ident());
state
.info
.usage_scope
.general
.scope
.buffers
.merge_single(&buffer, wgt::BufferUses::INDEX)?;
buffer.same_device_as(cmd_buf.as_ref())?;
buffer.check_usage(BufferUsages::INDEX)?;
let buf_raw = buffer.try_raw(state.snatch_guard)?;
let buf_raw = buffer.try_raw(state.general.snatch_guard)?;
let end = match size {
Some(s) => offset + s.get(),
@ -2387,13 +2312,13 @@ fn set_index_buffer(
};
state.index.update_buffer(offset..end, index_format);
state
.buffer_memory_init_actions
.extend(buffer.initialization_status.read().create_action(
state.general.buffer_memory_init_actions.extend(
buffer.initialization_status.read().create_action(
&buffer,
offset..end,
MemoryInitKind::NeedsInitializedMemory,
));
),
);
let bb = hal::BufferBinding {
buffer: buf_raw,
@ -2401,7 +2326,7 @@ fn set_index_buffer(
size,
};
unsafe {
hal::DynCommandEncoder::set_index_buffer(state.raw_encoder, bb, index_format);
hal::DynCommandEncoder::set_index_buffer(state.general.raw_encoder, bb, index_format);
}
Ok(())
}
@ -2420,14 +2345,14 @@ fn set_vertex_buffer(
);
state
.info
.usage_scope
.general
.scope
.buffers
.merge_single(&buffer, wgt::BufferUses::VERTEX)?;
buffer.same_device_as(cmd_buf.as_ref())?;
let max_vertex_buffers = state.device.limits.max_vertex_buffers;
let max_vertex_buffers = state.general.device.limits.max_vertex_buffers;
if slot >= max_vertex_buffers {
return Err(RenderCommandError::VertexBufferIndexOutOfRange {
index: slot,
@ -2437,7 +2362,7 @@ fn set_vertex_buffer(
}
buffer.check_usage(BufferUsages::VERTEX)?;
let buf_raw = buffer.try_raw(state.snatch_guard)?;
let buf_raw = buffer.try_raw(state.general.snatch_guard)?;
//TODO: where are we checking that the offset is in bound?
let buffer_size = match size {
@ -2446,13 +2371,13 @@ fn set_vertex_buffer(
};
state.vertex.buffer_sizes[slot as usize] = Some(buffer_size);
state
.buffer_memory_init_actions
.extend(buffer.initialization_status.read().create_action(
state.general.buffer_memory_init_actions.extend(
buffer.initialization_status.read().create_action(
&buffer,
offset..(offset + buffer_size),
MemoryInitKind::NeedsInitializedMemory,
));
),
);
let bb = hal::BufferBinding {
buffer: buf_raw,
@ -2460,7 +2385,7 @@ fn set_vertex_buffer(
size,
};
unsafe {
hal::DynCommandEncoder::set_vertex_buffer(state.raw_encoder, slot, bb);
hal::DynCommandEncoder::set_vertex_buffer(state.general.raw_encoder, slot, bb);
}
if let Some(pipeline) = state.pipeline.as_ref() {
state.vertex.update_limits(&pipeline.vertex_steps);
@ -2479,7 +2404,7 @@ fn set_blend_constant(state: &mut State, color: &Color) {
color.a as f32,
];
unsafe {
state.raw_encoder.set_blend_constants(&array);
state.general.raw_encoder.set_blend_constants(&array);
}
}
@ -2492,7 +2417,7 @@ fn set_stencil_reference(state: &mut State, value: u32) {
.contains(PipelineFlags::STENCIL_REFERENCE)
{
unsafe {
state.raw_encoder.set_stencil_reference(value);
state.general.raw_encoder.set_stencil_reference(value);
}
}
}
@ -2507,18 +2432,18 @@ fn set_viewport(
if rect.w < 0.0
|| rect.h < 0.0
|| rect.w > state.device.limits.max_texture_dimension_2d as f32
|| rect.h > state.device.limits.max_texture_dimension_2d as f32
|| rect.w > state.general.device.limits.max_texture_dimension_2d as f32
|| rect.h > state.general.device.limits.max_texture_dimension_2d as f32
{
return Err(RenderCommandError::InvalidViewportRectSize {
w: rect.w,
h: rect.h,
max: state.device.limits.max_texture_dimension_2d,
max: state.general.device.limits.max_texture_dimension_2d,
}
.into());
}
let max_viewport_range = state.device.limits.max_texture_dimension_2d as f32 * 2.0;
let max_viewport_range = state.general.device.limits.max_texture_dimension_2d as f32 * 2.0;
if rect.x < -max_viewport_range
|| rect.y < -max_viewport_range
@ -2542,7 +2467,10 @@ fn set_viewport(
h: rect.h,
};
unsafe {
state.raw_encoder.set_viewport(&r, depth_min..depth_max);
state
.general
.raw_encoder
.set_viewport(&r, depth_min..depth_max);
}
Ok(())
}
@ -2564,6 +2492,7 @@ fn set_push_constant(
let data_slice = &push_constant_data[(values_offset as usize)..values_end_offset];
let pipeline_layout = state
.general
.binder
.pipeline_layout
.as_ref()
@ -2574,9 +2503,12 @@ fn set_push_constant(
.map_err(RenderCommandError::from)?;
unsafe {
state
.raw_encoder
.set_push_constants(pipeline_layout.raw(), stages, offset, data_slice)
state.general.raw_encoder.set_push_constants(
pipeline_layout.raw(),
stages,
offset,
data_slice,
)
}
Ok(())
}
@ -2596,7 +2528,7 @@ fn set_scissor(state: &mut State, rect: Rect<u32>) -> Result<(), RenderPassError
h: rect.h,
};
unsafe {
state.raw_encoder.set_scissor_rect(&r);
state.general.raw_encoder.set_scissor_rect(&r);
}
Ok(())
}
@ -2623,9 +2555,12 @@ fn draw(
unsafe {
if instance_count > 0 && vertex_count > 0 {
state
.raw_encoder
.draw(first_vertex, vertex_count, first_instance, instance_count);
state.general.raw_encoder.draw(
first_vertex,
vertex_count,
first_instance,
instance_count,
);
}
}
Ok(())
@ -2658,7 +2593,7 @@ fn draw_indexed(
unsafe {
if instance_count > 0 && index_count > 0 {
state.raw_encoder.draw_indexed(
state.general.raw_encoder.draw_indexed(
first_index,
index_count,
base_vertex,
@ -2689,17 +2624,19 @@ fn multi_draw_indirect(
if count != 1 {
state
.general
.device
.require_features(wgt::Features::MULTI_DRAW_INDIRECT)?;
}
state
.general
.device
.require_downlevel_flags(wgt::DownlevelFlags::INDIRECT_EXECUTION)?;
indirect_buffer.same_device_as(cmd_buf.as_ref())?;
indirect_buffer.check_usage(BufferUsages::INDIRECT)?;
indirect_buffer.check_destroyed(state.snatch_guard)?;
indirect_buffer.check_destroyed(state.general.snatch_guard)?;
if offset % 4 != 0 {
return Err(RenderPassErrorInner::UnalignedIndirectBufferOffset(offset));
@ -2717,7 +2654,7 @@ fn multi_draw_indirect(
});
}
state.buffer_memory_init_actions.extend(
state.general.buffer_memory_init_actions.extend(
indirect_buffer.initialization_status.read().create_action(
&indirect_buffer,
offset..end_offset,
@ -2742,10 +2679,10 @@ fn multi_draw_indirect(
}
}
if state.device.indirect_validation.is_some() {
if state.general.device.indirect_validation.is_some() {
state
.info
.usage_scope
.general
.scope
.buffers
.merge_single(&indirect_buffer, wgt::BufferUses::STORAGE_READ_ONLY)?;
@ -2800,8 +2737,8 @@ fn multi_draw_indirect(
}
let mut draw_ctx = DrawContext {
raw_encoder: state.raw_encoder,
device: state.device,
raw_encoder: state.general.raw_encoder,
device: state.general.device,
indirect_draw_validation_resources,
indirect_draw_validation_batcher,
indirect_buffer,
@ -2834,15 +2771,15 @@ fn multi_draw_indirect(
draw_ctx.draw(current_draw_data);
} else {
state
.info
.usage_scope
.general
.scope
.buffers
.merge_single(&indirect_buffer, wgt::BufferUses::INDIRECT)?;
draw(
state.raw_encoder,
state.general.raw_encoder,
indexed,
indirect_buffer.try_raw(state.snatch_guard)?,
indirect_buffer.try_raw(state.general.snatch_guard)?,
offset,
count,
);
@ -2872,9 +2809,11 @@ fn multi_draw_indirect_count(
let stride = get_stride_of_indirect_args(indexed);
state
.general
.device
.require_features(wgt::Features::MULTI_DRAW_INDIRECT_COUNT)?;
state
.general
.device
.require_downlevel_flags(wgt::DownlevelFlags::INDIRECT_EXECUTION)?;
@ -2882,22 +2821,22 @@ fn multi_draw_indirect_count(
count_buffer.same_device_as(cmd_buf.as_ref())?;
state
.info
.usage_scope
.general
.scope
.buffers
.merge_single(&indirect_buffer, wgt::BufferUses::INDIRECT)?;
indirect_buffer.check_usage(BufferUsages::INDIRECT)?;
let indirect_raw = indirect_buffer.try_raw(state.snatch_guard)?;
let indirect_raw = indirect_buffer.try_raw(state.general.snatch_guard)?;
state
.info
.usage_scope
.general
.scope
.buffers
.merge_single(&count_buffer, wgt::BufferUses::INDIRECT)?;
count_buffer.check_usage(BufferUsages::INDIRECT)?;
let count_raw = count_buffer.try_raw(state.snatch_guard)?;
let count_raw = count_buffer.try_raw(state.general.snatch_guard)?;
if offset % 4 != 0 {
return Err(RenderPassErrorInner::UnalignedIndirectBufferOffset(offset));
@ -2912,7 +2851,7 @@ fn multi_draw_indirect_count(
buffer_size: indirect_buffer.size,
});
}
state.buffer_memory_init_actions.extend(
state.general.buffer_memory_init_actions.extend(
indirect_buffer.initialization_status.read().create_action(
&indirect_buffer,
offset..end_offset,
@ -2929,7 +2868,7 @@ fn multi_draw_indirect_count(
count_buffer_size: count_buffer.size,
});
}
state.buffer_memory_init_actions.extend(
state.general.buffer_memory_init_actions.extend(
count_buffer.initialization_status.read().create_action(
&count_buffer,
count_buffer_offset..end_count_offset,
@ -2939,7 +2878,7 @@ fn multi_draw_indirect_count(
match indexed {
false => unsafe {
state.raw_encoder.draw_indirect_count(
state.general.raw_encoder.draw_indirect_count(
indirect_raw,
offset,
count_raw,
@ -2948,7 +2887,7 @@ fn multi_draw_indirect_count(
);
},
true => unsafe {
state.raw_encoder.draw_indexed_indirect_count(
state.general.raw_encoder.draw_indexed_indirect_count(
indirect_raw,
offset,
count_raw,
@ -2963,6 +2902,7 @@ fn multi_draw_indirect_count(
fn push_debug_group(state: &mut State, string_data: &[u8], len: usize) {
state.debug_scope_depth += 1;
if !state
.general
.device
.instance_flags
.contains(wgt::InstanceFlags::DISCARD_HAL_LABELS)
@ -2972,7 +2912,7 @@ fn push_debug_group(state: &mut State, string_data: &[u8], len: usize) {
api_log!("RenderPass::push_debug_group {label:?}");
unsafe {
state.raw_encoder.begin_debug_marker(label);
state.general.raw_encoder.begin_debug_marker(label);
}
}
state.string_offset += len;
@ -2986,12 +2926,13 @@ fn pop_debug_group(state: &mut State) -> Result<(), RenderPassErrorInner> {
}
state.debug_scope_depth -= 1;
if !state
.general
.device
.instance_flags
.contains(wgt::InstanceFlags::DISCARD_HAL_LABELS)
{
unsafe {
state.raw_encoder.end_debug_marker();
state.general.raw_encoder.end_debug_marker();
}
}
Ok(())
@ -2999,6 +2940,7 @@ fn pop_debug_group(state: &mut State) -> Result<(), RenderPassErrorInner> {
fn insert_debug_marker(state: &mut State, string_data: &[u8], len: usize) {
if !state
.general
.device
.instance_flags
.contains(wgt::InstanceFlags::DISCARD_HAL_LABELS)
@ -3007,7 +2949,7 @@ fn insert_debug_marker(state: &mut State, string_data: &[u8], len: usize) {
str::from_utf8(&string_data[state.string_offset..state.string_offset + len]).unwrap();
api_log!("RenderPass::insert_debug_marker {label:?}");
unsafe {
state.raw_encoder.insert_debug_marker(label);
state.general.raw_encoder.insert_debug_marker(label);
}
}
state.string_offset += len;
@ -3028,13 +2970,14 @@ fn write_timestamp(
query_set.same_device_as(cmd_buf)?;
state
.general
.device
.require_features(wgt::Features::TIMESTAMP_QUERY_INSIDE_PASSES)?;
let query_set = state.tracker.query_sets.insert_single(query_set);
let query_set = state.general.tracker.query_sets.insert_single(query_set);
query_set.validate_and_write_timestamp(
state.raw_encoder,
state.general.raw_encoder,
query_index,
Some(pending_query_resets),
)?;
@ -3050,7 +2993,7 @@ fn execute_bundle(
) -> Result<(), RenderPassErrorInner> {
api_log!("RenderPass::execute_bundle {}", bundle.error_ident());
let bundle = state.tracker.bundles.insert_single(bundle);
let bundle = state.general.tracker.bundles.insert_single(bundle);
bundle.same_device_as(cmd_buf.as_ref())?;
@ -3073,33 +3016,33 @@ fn execute_bundle(
);
}
state
.buffer_memory_init_actions
.extend(
bundle
.buffer_memory_init_actions
.iter()
.filter_map(|action| {
action
.buffer
.initialization_status
.read()
.check_action(action)
}),
);
state.general.buffer_memory_init_actions.extend(
bundle
.buffer_memory_init_actions
.iter()
.filter_map(|action| {
action
.buffer
.initialization_status
.read()
.check_action(action)
}),
);
for action in bundle.texture_memory_init_actions.iter() {
state
.info
.pending_discard_init_fixups
.extend(state.texture_memory_actions.register_init_action(action));
state.general.pending_discard_init_fixups.extend(
state
.general
.texture_memory_actions
.register_init_action(action),
);
}
unsafe {
bundle.execute(
state.raw_encoder,
state.general.raw_encoder,
indirect_draw_validation_resources,
indirect_draw_validation_batcher,
state.snatch_guard,
state.general.snatch_guard,
)
}
.map_err(|e| match e {
@ -3113,7 +3056,7 @@ fn execute_bundle(
})?;
unsafe {
state.info.usage_scope.merge_render_bundle(&bundle.used)?;
state.general.scope.merge_render_bundle(&bundle.used)?;
};
state.reset_bundle();
Ok(())