[core] Move as_hal methods to their own file

This commit is contained in:
Connor Fitzgerald 2025-06-26 18:13:29 -04:00
parent 8148ac6883
commit 795fe3bf27
3 changed files with 245 additions and 243 deletions

239
wgpu-core/src/as_hal.rs Normal file
View File

@ -0,0 +1,239 @@
use crate::{
global::Global,
hal_api::HalApi,
id::{
AdapterId, BlasId, BufferId, CommandEncoderId, DeviceId, QueueId, SurfaceId, TextureId,
TextureViewId, TlasId,
},
resource::AccelerationStructure,
};
impl Global {
/// # Safety
///
/// - The raw buffer handle must not be manually destroyed
pub unsafe fn buffer_as_hal<A: HalApi, F: FnOnce(Option<&A::Buffer>) -> R, R>(
&self,
id: BufferId,
hal_buffer_callback: F,
) -> R {
profiling::scope!("Buffer::as_hal");
let hub = &self.hub;
if let Ok(buffer) = hub.buffers.get(id).get() {
let snatch_guard = buffer.device.snatchable_lock.read();
let hal_buffer = buffer
.raw(&snatch_guard)
.and_then(|b| b.as_any().downcast_ref());
hal_buffer_callback(hal_buffer)
} else {
hal_buffer_callback(None)
}
}
/// # Safety
///
/// - The raw texture handle must not be manually destroyed
pub unsafe fn texture_as_hal<A: HalApi, F: FnOnce(Option<&A::Texture>) -> R, R>(
&self,
id: TextureId,
hal_texture_callback: F,
) -> R {
profiling::scope!("Texture::as_hal");
let hub = &self.hub;
if let Ok(texture) = hub.textures.get(id).get() {
let snatch_guard = texture.device.snatchable_lock.read();
let hal_texture = texture.raw(&snatch_guard);
let hal_texture = hal_texture
.as_ref()
.and_then(|it| it.as_any().downcast_ref());
hal_texture_callback(hal_texture)
} else {
hal_texture_callback(None)
}
}
/// # Safety
///
/// - The raw texture view handle must not be manually destroyed
pub unsafe fn texture_view_as_hal<A: HalApi, F: FnOnce(Option<&A::TextureView>) -> R, R>(
&self,
id: TextureViewId,
hal_texture_view_callback: F,
) -> R {
profiling::scope!("TextureView::as_hal");
let hub = &self.hub;
if let Ok(texture_view) = hub.texture_views.get(id).get() {
let snatch_guard = texture_view.device.snatchable_lock.read();
let hal_texture_view = texture_view.raw(&snatch_guard);
let hal_texture_view = hal_texture_view
.as_ref()
.and_then(|it| it.as_any().downcast_ref());
hal_texture_view_callback(hal_texture_view)
} else {
hal_texture_view_callback(None)
}
}
/// # Safety
///
/// - The raw adapter handle must not be manually destroyed
pub unsafe fn adapter_as_hal<A: HalApi, F: FnOnce(Option<&A::Adapter>) -> R, R>(
&self,
id: AdapterId,
hal_adapter_callback: F,
) -> R {
profiling::scope!("Adapter::as_hal");
let hub = &self.hub;
let adapter = hub.adapters.get(id);
let hal_adapter = adapter.raw.adapter.as_any().downcast_ref();
hal_adapter_callback(hal_adapter)
}
/// # Safety
///
/// - The raw device handle must not be manually destroyed
pub unsafe fn device_as_hal<A: HalApi, F: FnOnce(Option<&A::Device>) -> R, R>(
&self,
id: DeviceId,
hal_device_callback: F,
) -> R {
profiling::scope!("Device::as_hal");
let device = self.hub.devices.get(id);
let hal_device = device.raw().as_any().downcast_ref();
hal_device_callback(hal_device)
}
/// # Safety
///
/// - The raw fence handle must not be manually destroyed
pub unsafe fn device_fence_as_hal<A: HalApi, F: FnOnce(Option<&A::Fence>) -> R, R>(
&self,
id: DeviceId,
hal_fence_callback: F,
) -> R {
profiling::scope!("Device::fence_as_hal");
let device = self.hub.devices.get(id);
let fence = device.fence.read();
hal_fence_callback(fence.as_any().downcast_ref())
}
/// # Safety
/// - The raw surface handle must not be manually destroyed
pub unsafe fn surface_as_hal<A: HalApi, F: FnOnce(Option<&A::Surface>) -> R, R>(
&self,
id: SurfaceId,
hal_surface_callback: F,
) -> R {
profiling::scope!("Surface::as_hal");
let surface = self.surfaces.get(id);
let hal_surface = surface
.raw(A::VARIANT)
.and_then(|surface| surface.as_any().downcast_ref());
hal_surface_callback(hal_surface)
}
/// # Safety
///
/// - The raw command encoder handle must not be manually destroyed
pub unsafe fn command_encoder_as_hal_mut<
A: HalApi,
F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
R,
>(
&self,
id: CommandEncoderId,
hal_command_encoder_callback: F,
) -> R {
profiling::scope!("CommandEncoder::as_hal");
let hub = &self.hub;
let cmd_buf = hub.command_buffers.get(id.into_command_buffer_id());
let mut cmd_buf_data = cmd_buf.data.lock();
cmd_buf_data.record_as_hal_mut(|opt_cmd_buf| -> R {
hal_command_encoder_callback(opt_cmd_buf.and_then(|cmd_buf| {
cmd_buf
.encoder
.open()
.ok()
.and_then(|encoder| encoder.as_any_mut().downcast_mut())
}))
})
}
/// # Safety
///
/// - The raw queue handle must not be manually destroyed
pub unsafe fn queue_as_hal<A: HalApi, F, R>(&self, id: QueueId, hal_queue_callback: F) -> R
where
F: FnOnce(Option<&A::Queue>) -> R,
{
profiling::scope!("Queue::as_hal");
let queue = self.hub.queues.get(id);
let hal_queue = queue.raw().as_any().downcast_ref();
hal_queue_callback(hal_queue)
}
/// # Safety
///
/// - The raw blas handle must not be manually destroyed
pub unsafe fn blas_as_hal<A: HalApi, F: FnOnce(Option<&A::AccelerationStructure>) -> R, R>(
&self,
id: BlasId,
hal_blas_callback: F,
) -> R {
profiling::scope!("Blas::as_hal");
let hub = &self.hub;
if let Ok(blas) = hub.blas_s.get(id).get() {
let snatch_guard = blas.device.snatchable_lock.read();
let hal_blas = blas
.try_raw(&snatch_guard)
.ok()
.and_then(|b| b.as_any().downcast_ref());
hal_blas_callback(hal_blas)
} else {
hal_blas_callback(None)
}
}
/// # Safety
///
/// - The raw tlas handle must not be manually destroyed
pub unsafe fn tlas_as_hal<A: HalApi, F: FnOnce(Option<&A::AccelerationStructure>) -> R, R>(
&self,
id: TlasId,
hal_tlas_callback: F,
) -> R {
profiling::scope!("Blas::as_hal");
let hub = &self.hub;
if let Ok(tlas) = hub.tlas_s.get(id).get() {
let snatch_guard = tlas.device.snatchable_lock.read();
let hal_tlas = tlas
.try_raw(&snatch_guard)
.ok()
.and_then(|t| t.as_any().downcast_ref());
hal_tlas_callback(hal_tlas)
} else {
hal_tlas_callback(None)
}
}
}

View File

@ -65,6 +65,7 @@ extern crate std;
extern crate wgpu_hal as hal;
extern crate wgpu_types as wgt;
mod as_hal;
pub mod binding_model;
pub mod command;
mod conv;

View File

@ -22,12 +22,6 @@ use crate::{
queue, resource::DeferredDestroy, BufferMapPendingClosure, Device, DeviceError,
DeviceMismatch, HostMap, MissingDownlevelFlags, MissingFeatures,
},
global::Global,
hal_api::HalApi,
id::{
AdapterId, BufferId, CommandEncoderId, DeviceId, QueueId, SurfaceId, TextureId,
TextureViewId,
},
init_tracker::{BufferInitTracker, TextureInitTracker},
lock::{rank, Mutex, RwLock},
ray_tracing::{BlasCompactReadyPendingClosure, BlasPrepareCompactError},
@ -39,8 +33,6 @@ use crate::{
Label, LabelHelpers, SubmissionIndex,
};
use crate::id::{BlasId, TlasId};
/// Information about the wgpu-core resource.
///
/// Each type representing a `wgpu-core` resource, like [`Device`],
@ -647,7 +639,7 @@ impl Buffer {
// Note: This must not be called while holding a lock.
pub(crate) fn unmap(
self: &Arc<Self>,
#[cfg(feature = "trace")] buffer_id: BufferId,
#[cfg(feature = "trace")] buffer_id: crate::id::BufferId,
) -> Result<(), BufferAccessError> {
if let Some((mut operation, status)) = self.unmap_inner(
#[cfg(feature = "trace")]
@ -663,7 +655,7 @@ impl Buffer {
fn unmap_inner(
self: &Arc<Self>,
#[cfg(feature = "trace")] buffer_id: BufferId,
#[cfg(feature = "trace")] buffer_id: crate::id::BufferId,
) -> Result<Option<BufferMapPendingClosure>, BufferAccessError> {
let device = &self.device;
let snatch_guard = device.snatchable_lock.read();
@ -915,9 +907,9 @@ unsafe impl Sync for StagingBuffer {}
/// freed once their associated operation's queue submission has finished
/// execution.
///
/// [`queue_create_staging_buffer`]: Global::queue_create_staging_buffer
/// [`queue_write_staging_buffer`]: Global::queue_write_staging_buffer
/// [`queue_write_texture`]: Global::queue_write_texture
/// [`queue_create_staging_buffer`]: crate::global::Global::queue_create_staging_buffer
/// [`queue_write_staging_buffer`]: crate::global::Global::queue_write_staging_buffer
/// [`queue_write_texture`]: crate::global::Global::queue_write_texture
/// [`Device::pending_writes`]: crate::device::Device
#[derive(Debug)]
pub struct StagingBuffer {
@ -1306,236 +1298,6 @@ impl Texture {
}
}
impl Global {
/// # Safety
///
/// - The raw buffer handle must not be manually destroyed
pub unsafe fn buffer_as_hal<A: HalApi, F: FnOnce(Option<&A::Buffer>) -> R, R>(
&self,
id: BufferId,
hal_buffer_callback: F,
) -> R {
profiling::scope!("Buffer::as_hal");
let hub = &self.hub;
if let Ok(buffer) = hub.buffers.get(id).get() {
let snatch_guard = buffer.device.snatchable_lock.read();
let hal_buffer = buffer
.raw(&snatch_guard)
.and_then(|b| b.as_any().downcast_ref());
hal_buffer_callback(hal_buffer)
} else {
hal_buffer_callback(None)
}
}
/// # Safety
///
/// - The raw texture handle must not be manually destroyed
pub unsafe fn texture_as_hal<A: HalApi, F: FnOnce(Option<&A::Texture>) -> R, R>(
&self,
id: TextureId,
hal_texture_callback: F,
) -> R {
profiling::scope!("Texture::as_hal");
let hub = &self.hub;
if let Ok(texture) = hub.textures.get(id).get() {
let snatch_guard = texture.device.snatchable_lock.read();
let hal_texture = texture.raw(&snatch_guard);
let hal_texture = hal_texture
.as_ref()
.and_then(|it| it.as_any().downcast_ref());
hal_texture_callback(hal_texture)
} else {
hal_texture_callback(None)
}
}
/// # Safety
///
/// - The raw texture view handle must not be manually destroyed
pub unsafe fn texture_view_as_hal<A: HalApi, F: FnOnce(Option<&A::TextureView>) -> R, R>(
&self,
id: TextureViewId,
hal_texture_view_callback: F,
) -> R {
profiling::scope!("TextureView::as_hal");
let hub = &self.hub;
if let Ok(texture_view) = hub.texture_views.get(id).get() {
let snatch_guard = texture_view.device.snatchable_lock.read();
let hal_texture_view = texture_view.raw(&snatch_guard);
let hal_texture_view = hal_texture_view
.as_ref()
.and_then(|it| it.as_any().downcast_ref());
hal_texture_view_callback(hal_texture_view)
} else {
hal_texture_view_callback(None)
}
}
/// # Safety
///
/// - The raw adapter handle must not be manually destroyed
pub unsafe fn adapter_as_hal<A: HalApi, F: FnOnce(Option<&A::Adapter>) -> R, R>(
&self,
id: AdapterId,
hal_adapter_callback: F,
) -> R {
profiling::scope!("Adapter::as_hal");
let hub = &self.hub;
let adapter = hub.adapters.get(id);
let hal_adapter = adapter.raw.adapter.as_any().downcast_ref();
hal_adapter_callback(hal_adapter)
}
/// # Safety
///
/// - The raw device handle must not be manually destroyed
pub unsafe fn device_as_hal<A: HalApi, F: FnOnce(Option<&A::Device>) -> R, R>(
&self,
id: DeviceId,
hal_device_callback: F,
) -> R {
profiling::scope!("Device::as_hal");
let device = self.hub.devices.get(id);
let hal_device = device.raw().as_any().downcast_ref();
hal_device_callback(hal_device)
}
/// # Safety
///
/// - The raw fence handle must not be manually destroyed
pub unsafe fn device_fence_as_hal<A: HalApi, F: FnOnce(Option<&A::Fence>) -> R, R>(
&self,
id: DeviceId,
hal_fence_callback: F,
) -> R {
profiling::scope!("Device::fence_as_hal");
let device = self.hub.devices.get(id);
let fence = device.fence.read();
hal_fence_callback(fence.as_any().downcast_ref())
}
/// # Safety
/// - The raw surface handle must not be manually destroyed
pub unsafe fn surface_as_hal<A: HalApi, F: FnOnce(Option<&A::Surface>) -> R, R>(
&self,
id: SurfaceId,
hal_surface_callback: F,
) -> R {
profiling::scope!("Surface::as_hal");
let surface = self.surfaces.get(id);
let hal_surface = surface
.raw(A::VARIANT)
.and_then(|surface| surface.as_any().downcast_ref());
hal_surface_callback(hal_surface)
}
/// # Safety
///
/// - The raw command encoder handle must not be manually destroyed
pub unsafe fn command_encoder_as_hal_mut<
A: HalApi,
F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
R,
>(
&self,
id: CommandEncoderId,
hal_command_encoder_callback: F,
) -> R {
profiling::scope!("CommandEncoder::as_hal");
let hub = &self.hub;
let cmd_buf = hub.command_buffers.get(id.into_command_buffer_id());
let mut cmd_buf_data = cmd_buf.data.lock();
cmd_buf_data.record_as_hal_mut(|opt_cmd_buf| -> R {
hal_command_encoder_callback(opt_cmd_buf.and_then(|cmd_buf| {
cmd_buf
.encoder
.open()
.ok()
.and_then(|encoder| encoder.as_any_mut().downcast_mut())
}))
})
}
/// # Safety
///
/// - The raw queue handle must not be manually destroyed
pub unsafe fn queue_as_hal<A: HalApi, F, R>(&self, id: QueueId, hal_queue_callback: F) -> R
where
F: FnOnce(Option<&A::Queue>) -> R,
{
profiling::scope!("Queue::as_hal");
let queue = self.hub.queues.get(id);
let hal_queue = queue.raw().as_any().downcast_ref();
hal_queue_callback(hal_queue)
}
/// # Safety
///
/// - The raw blas handle must not be manually destroyed
pub unsafe fn blas_as_hal<A: HalApi, F: FnOnce(Option<&A::AccelerationStructure>) -> R, R>(
&self,
id: BlasId,
hal_blas_callback: F,
) -> R {
profiling::scope!("Blas::as_hal");
let hub = &self.hub;
if let Ok(blas) = hub.blas_s.get(id).get() {
let snatch_guard = blas.device.snatchable_lock.read();
let hal_blas = blas
.try_raw(&snatch_guard)
.ok()
.and_then(|b| b.as_any().downcast_ref());
hal_blas_callback(hal_blas)
} else {
hal_blas_callback(None)
}
}
/// # Safety
///
/// - The raw tlas handle must not be manually destroyed
pub unsafe fn tlas_as_hal<A: HalApi, F: FnOnce(Option<&A::AccelerationStructure>) -> R, R>(
&self,
id: TlasId,
hal_tlas_callback: F,
) -> R {
profiling::scope!("Blas::as_hal");
let hub = &self.hub;
if let Ok(tlas) = hub.tlas_s.get(id).get() {
let snatch_guard = tlas.device.snatchable_lock.read();
let hal_tlas = tlas
.try_raw(&snatch_guard)
.ok()
.and_then(|t| t.as_any().downcast_ref());
hal_tlas_callback(hal_tlas)
} else {
hal_tlas_callback(None)
}
}
}
/// A texture that has been marked as destroyed and is staged for actual deletion soon.
#[derive(Debug)]
pub struct DestroyedTexture {