[wgpu-hal] Upgrade to ash 0.38 (#5504)

This commit is contained in:
Marijn Suijten 2024-05-13 17:57:44 +02:00 committed by GitHub
parent ca91744955
commit 8879733875
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 607 additions and 714 deletions

View File

@ -257,6 +257,7 @@ By @atlv24 and @cwfitzgerald in [#5154](https://github.com/gfx-rs/wgpu/pull/5154
- Set object labels when the DEBUG flag is set, even if the VALIDATION flag is disabled. By @DJMcNab in [#5345](https://github.com/gfx-rs/wgpu/pull/5345).
- Add safety check to `wgpu_hal::vulkan::CommandEncoder` to make sure `discard_encoding` is not called in the closed state. By @villuna in [#5557](https://github.com/gfx-rs/wgpu/pull/5557)
- Fix SPIR-V type capability requests to not depend on `LocalType` caching. By @atlv24 in [#5590](https://github.com/gfx-rs/wgpu/pull/5590)
- Upgrade `ash` to `0.38`. By @MarijnS95 in [#5504](https://github.com/gfx-rs/wgpu/pull/5504).
#### Tests

6
Cargo.lock generated
View File

@ -227,11 +227,11 @@ checksum = "175571dd1d178ced59193a6fc02dde1b972eb0bc56c892cde9beeceac5bf0f6b"
[[package]]
name = "ash"
version = "0.37.3+1.3.251"
version = "0.38.0+1.3.281"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39e9c3835d686b0a6084ab4234fcd1b07dbf6e4767dce60874b12356a25ecd4a"
checksum = "0bb44936d800fea8f016d7f2311c6a4f97aebd5dc86f09906139ec848cf3a46f"
dependencies = [
"libloading 0.7.4",
"libloading 0.8.3",
]
[[package]]

View File

@ -133,7 +133,7 @@ objc = "0.2.5"
# Vulkan dependencies
android_system_properties = "0.1.1"
ash = "0.37.3"
ash = "0.38.0"
gpu-alloc = "0.6"
gpu-descriptor = "0.3"

View File

@ -55,6 +55,7 @@ gles = [
"dep:khronos-egl",
"dep:libloading",
"dep:ndk-sys",
"winapi/libloaderapi",
]
dx12 = [
"naga/hlsl-out",
@ -68,6 +69,7 @@ dx12 = [
"winapi/d3d12shader",
"winapi/d3d12sdklayers",
"winapi/dxgi1_6",
"winapi/errhandlingapi",
]
# TODO: This is a separate feature until Mozilla okays windows-rs, see https://github.com/gfx-rs/wgpu/issues/3207 for the tracking issue.
windows_rs = ["dep:gpu-allocator"]
@ -118,7 +120,7 @@ version = "0.20.0"
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
# backend: Vulkan
ash = { version = "0.37.3", optional = true }
ash = { version = "0.38.0", optional = true }
gpu-alloc = { version = "0.6", optional = true }
gpu-descriptor = { version = "0.3", optional = true }
smallvec = { version = "1", optional = true, features = ["union"] }
@ -146,7 +148,6 @@ glutin_wgl_sys = { version = "0.5", optional = true }
winapi = { version = "0.3", features = [
"profileapi",
"libloaderapi",
"windef",
"winuser",
"dcomp",

View File

@ -115,24 +115,3 @@ impl crate::TextureCopy {
self.size = self.size.min(&max_src_size).min(&max_dst_size);
}
}
/// Construct a `CStr` from a byte slice, up to the first zero byte.
///
/// Return a `CStr` extending from the start of `bytes` up to and
/// including the first zero byte. If there is no zero byte in
/// `bytes`, return `None`.
///
/// This can be removed when `CStr::from_bytes_until_nul` is stabilized.
/// ([#95027](https://github.com/rust-lang/rust/issues/95027))
#[allow(dead_code)]
pub(crate) fn cstr_from_bytes_until_nul(bytes: &[std::os::raw::c_char]) -> Option<&std::ffi::CStr> {
if bytes.contains(&0) {
// Safety for `CStr::from_ptr`:
// - We've ensured that the slice does contain a null terminator.
// - The range is valid to read, because the slice covers it.
// - The memory won't be changed, because the slice borrows it.
unsafe { Some(std::ffi::CStr::from_ptr(bytes.as_ptr())) }
} else {
None
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
use super::conv;
use arrayvec::ArrayVec;
use ash::{extensions::ext, vk};
use ash::vk;
use std::{mem, ops::Range, slice};
@ -39,12 +39,6 @@ impl super::Texture {
}
}
impl super::DeviceShared {
fn debug_messenger(&self) -> Option<&ext::DebugUtils> {
Some(&self.instance.debug_utils.as_ref()?.extension)
}
}
impl super::CommandEncoder {
fn write_pass_end_timestamp_if_requested(&mut self) {
if let Some((query_set, index)) = self.end_of_pass_timer_query.take() {
@ -65,10 +59,9 @@ impl crate::CommandEncoder for super::CommandEncoder {
unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
if self.free.is_empty() {
let vk_info = vk::CommandBufferAllocateInfo::builder()
let vk_info = vk::CommandBufferAllocateInfo::default()
.command_pool(self.raw)
.command_buffer_count(ALLOCATION_GRANULARITY)
.build();
.command_buffer_count(ALLOCATION_GRANULARITY);
let cmd_buf_vec = unsafe { self.device.raw.allocate_command_buffers(&vk_info)? };
self.free.extend(cmd_buf_vec);
}
@ -76,20 +69,13 @@ impl crate::CommandEncoder for super::CommandEncoder {
// Set the name unconditionally, since there might be a
// previous name assigned to this.
unsafe {
self.device.set_object_name(
vk::ObjectType::COMMAND_BUFFER,
raw,
label.unwrap_or_default(),
)
};
unsafe { self.device.set_object_name(raw, label.unwrap_or_default()) };
// Reset this in case the last renderpass was never ended.
self.rpass_debug_marker_active = false;
let vk_info = vk::CommandBufferBeginInfo::builder()
.flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT)
.build();
let vk_info = vk::CommandBufferBeginInfo::default()
.flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT);
unsafe { self.device.raw.begin_command_buffer(raw, &vk_info) }?;
self.active = raw;
@ -145,12 +131,11 @@ impl crate::CommandEncoder for super::CommandEncoder {
dst_stages |= dst_stage;
vk_barriers.push(
vk::BufferMemoryBarrier::builder()
vk::BufferMemoryBarrier::default()
.buffer(bar.buffer.raw)
.size(vk::WHOLE_SIZE)
.src_access_mask(src_access)
.dst_access_mask(dst_access)
.build(),
.dst_access_mask(dst_access),
)
}
@ -192,14 +177,13 @@ impl crate::CommandEncoder for super::CommandEncoder {
dst_stages |= dst_stage;
vk_barriers.push(
vk::ImageMemoryBarrier::builder()
vk::ImageMemoryBarrier::default()
.image(bar.texture.raw)
.subresource_range(range)
.src_access_mask(src_access)
.dst_access_mask(dst_access)
.old_layout(src_layout)
.new_layout(dst_layout)
.build(),
.new_layout(dst_layout),
);
}
@ -442,7 +426,7 @@ impl crate::CommandEncoder for super::CommandEncoder {
Some(buffer) => ray_tracing_functions
.buffer_device_address
.get_buffer_device_address(
&vk::BufferDeviceAddressInfo::builder().buffer(buffer.raw),
&vk::BufferDeviceAddressInfo::default().buffer(buffer.raw),
),
None => panic!("Buffers are required to build acceleration structures"),
}
@ -469,23 +453,24 @@ impl crate::CommandEncoder for super::CommandEncoder {
for desc in descriptors {
let (geometries, ranges) = match *desc.entries {
crate::AccelerationStructureEntries::Instances(ref instances) => {
let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::builder(
let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default(
// TODO: Code is so large that rustfmt refuses to treat this... :(
)
.data(vk::DeviceOrHostAddressConstKHR {
device_address: get_device_address(instances.buffer),
});
let geometry = vk::AccelerationStructureGeometryKHR::builder()
let geometry = vk::AccelerationStructureGeometryKHR::default()
.geometry_type(vk::GeometryTypeKHR::INSTANCES)
.geometry(vk::AccelerationStructureGeometryDataKHR {
instances: *instance_data,
instances: instance_data,
});
let range = vk::AccelerationStructureBuildRangeInfoKHR::builder()
let range = vk::AccelerationStructureBuildRangeInfoKHR::default()
.primitive_count(instances.count)
.primitive_offset(instances.offset);
(smallvec::smallvec![*geometry], smallvec::smallvec![*range])
(smallvec::smallvec![geometry], smallvec::smallvec![range])
}
crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
let mut ranges = smallvec::SmallVec::<
@ -496,7 +481,7 @@ impl crate::CommandEncoder for super::CommandEncoder {
>::with_capacity(in_geometries.len());
for triangles in in_geometries {
let mut triangle_data =
vk::AccelerationStructureGeometryTrianglesDataKHR::builder()
vk::AccelerationStructureGeometryTrianglesDataKHR::default()
.vertex_data(vk::DeviceOrHostAddressConstKHR {
device_address: get_device_address(triangles.vertex_buffer),
})
@ -504,7 +489,7 @@ impl crate::CommandEncoder for super::CommandEncoder {
.max_vertex(triangles.vertex_count)
.vertex_stride(triangles.vertex_stride);
let mut range = vk::AccelerationStructureBuildRangeInfoKHR::builder();
let mut range = vk::AccelerationStructureBuildRangeInfoKHR::default();
if let Some(ref indices) = triangles.indices {
triangle_data = triangle_data
@ -528,7 +513,7 @@ impl crate::CommandEncoder for super::CommandEncoder {
ray_tracing_functions
.buffer_device_address
.get_buffer_device_address(
&vk::BufferDeviceAddressInfo::builder()
&vk::BufferDeviceAddressInfo::default()
.buffer(transform.buffer.raw),
)
};
@ -540,17 +525,17 @@ impl crate::CommandEncoder for super::CommandEncoder {
range = range.transform_offset(transform.offset);
}
let geometry = vk::AccelerationStructureGeometryKHR::builder()
let geometry = vk::AccelerationStructureGeometryKHR::default()
.geometry_type(vk::GeometryTypeKHR::TRIANGLES)
.geometry(vk::AccelerationStructureGeometryDataKHR {
triangles: *triangle_data,
triangles: triangle_data,
})
.flags(conv::map_acceleration_structure_geometry_flags(
triangles.flags,
));
geometries.push(*geometry);
ranges.push(*range);
geometries.push(geometry);
ranges.push(range);
}
(geometries, ranges)
}
@ -562,25 +547,25 @@ impl crate::CommandEncoder for super::CommandEncoder {
[vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
>::with_capacity(in_geometries.len());
for aabb in in_geometries {
let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::builder()
let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
.data(vk::DeviceOrHostAddressConstKHR {
device_address: get_device_address(aabb.buffer),
})
.stride(aabb.stride);
let range = vk::AccelerationStructureBuildRangeInfoKHR::builder()
let range = vk::AccelerationStructureBuildRangeInfoKHR::default()
.primitive_count(aabb.count)
.primitive_offset(aabb.offset);
let geometry = vk::AccelerationStructureGeometryKHR::builder()
let geometry = vk::AccelerationStructureGeometryKHR::default()
.geometry_type(vk::GeometryTypeKHR::AABBS)
.geometry(vk::AccelerationStructureGeometryDataKHR {
aabbs: *aabbs_data,
aabbs: aabbs_data,
})
.flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
geometries.push(*geometry);
ranges.push(*range);
geometries.push(geometry);
ranges.push(range);
}
(geometries, ranges)
}
@ -593,7 +578,7 @@ impl crate::CommandEncoder for super::CommandEncoder {
ray_tracing_functions
.buffer_device_address
.get_buffer_device_address(
&vk::BufferDeviceAddressInfo::builder().buffer(desc.scratch_buffer.raw),
&vk::BufferDeviceAddressInfo::default().buffer(desc.scratch_buffer.raw),
)
};
let ty = match *desc.entries {
@ -602,7 +587,7 @@ impl crate::CommandEncoder for super::CommandEncoder {
}
_ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
};
let mut geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::builder()
let mut geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
.ty(ty)
.mode(conv::map_acceleration_structure_build_mode(desc.mode))
.flags(conv::map_acceleration_structure_flags(desc.flags))
@ -618,7 +603,7 @@ impl crate::CommandEncoder for super::CommandEncoder {
.raw;
}
geometry_infos.push(*geometry_info);
geometry_infos.push(geometry_info);
}
for (i, geometry_info) in geometry_infos.iter_mut().enumerate() {
@ -649,10 +634,9 @@ impl crate::CommandEncoder for super::CommandEncoder {
src_stage | vk::PipelineStageFlags::TOP_OF_PIPE,
dst_stage | vk::PipelineStageFlags::BOTTOM_OF_PIPE,
vk::DependencyFlags::empty(),
&[vk::MemoryBarrier::builder()
&[vk::MemoryBarrier::default()
.src_access_mask(src_access)
.dst_access_mask(dst_access)
.build()],
.dst_access_mask(dst_access)],
&[],
&[],
)
@ -754,17 +738,13 @@ impl crate::CommandEncoder for super::CommandEncoder {
.make_framebuffer(fb_key, raw_pass, desc.label)
.unwrap();
let mut vk_info = vk::RenderPassBeginInfo::builder()
let mut vk_info = vk::RenderPassBeginInfo::default()
.render_pass(raw_pass)
.render_area(render_area)
.clear_values(&vk_clear_values)
.framebuffer(raw_framebuffer);
let mut vk_attachment_info = if caps.imageless_framebuffers {
Some(
vk::RenderPassAttachmentBeginInfo::builder()
.attachments(&vk_image_views)
.build(),
)
Some(vk::RenderPassAttachmentBeginInfo::default().attachments(&vk_image_views))
} else {
None
};
@ -859,21 +839,21 @@ impl crate::CommandEncoder for super::CommandEncoder {
}
unsafe fn insert_debug_marker(&mut self, label: &str) {
if let Some(ext) = self.device.debug_messenger() {
if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
let cstr = self.temp.make_c_str(label);
let vk_label = vk::DebugUtilsLabelEXT::builder().label_name(cstr).build();
let vk_label = vk::DebugUtilsLabelEXT::default().label_name(cstr);
unsafe { ext.cmd_insert_debug_utils_label(self.active, &vk_label) };
}
}
unsafe fn begin_debug_marker(&mut self, group_label: &str) {
if let Some(ext) = self.device.debug_messenger() {
if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
let cstr = self.temp.make_c_str(group_label);
let vk_label = vk::DebugUtilsLabelEXT::builder().label_name(cstr).build();
let vk_label = vk::DebugUtilsLabelEXT::default().label_name(cstr);
unsafe { ext.cmd_begin_debug_utils_label(self.active, &vk_label) };
}
}
unsafe fn end_debug_marker(&mut self) {
if let Some(ext) = self.device.debug_messenger() {
if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
unsafe { ext.cmd_end_debug_utils_label(self.active) };
}
}

View File

@ -1,29 +1,23 @@
use super::conv;
use arrayvec::ArrayVec;
use ash::{extensions::khr, vk};
use naga::back::spv::ZeroInitializeWorkgroupMemoryMode;
use ash::{khr, vk};
use parking_lot::Mutex;
use std::{
borrow::Cow,
collections::{hash_map::Entry, BTreeMap},
ffi::{CStr, CString},
mem::MaybeUninit,
num::NonZeroU32,
ptr,
sync::Arc,
};
impl super::DeviceShared {
pub(super) unsafe fn set_object_name(
&self,
object_type: vk::ObjectType,
object: impl vk::Handle,
name: &str,
) {
let extension = match self.instance.debug_utils {
Some(ref debug_utils) => &debug_utils.extension,
None => return,
pub(super) unsafe fn set_object_name(&self, object: impl vk::Handle, name: &str) {
let Some(extension) = self.extension_fns.debug_utils.as_ref() else {
return;
};
// Keep variables outside the if-else block to ensure they do not
@ -54,10 +48,8 @@ impl super::DeviceShared {
let _result = unsafe {
extension.set_debug_utils_object_name(
self.raw.handle(),
&vk::DebugUtilsObjectNameInfoEXT::builder()
.object_type(object_type)
.object_handle(object.as_raw())
&vk::DebugUtilsObjectNameInfoEXT::default()
.object_handle(object)
.object_name(name),
)
};
@ -87,25 +79,23 @@ impl super::DeviceShared {
};
vk_attachments.push({
let (load_op, store_op) = conv::map_attachment_ops(cat.base.ops);
vk::AttachmentDescription::builder()
vk::AttachmentDescription::default()
.format(cat.base.format)
.samples(samples)
.load_op(load_op)
.store_op(store_op)
.initial_layout(cat.base.layout)
.final_layout(cat.base.layout)
.build()
});
let resolve_ref = if let Some(ref rat) = cat.resolve {
let (load_op, store_op) = conv::map_attachment_ops(rat.ops);
let vk_attachment = vk::AttachmentDescription::builder()
let vk_attachment = vk::AttachmentDescription::default()
.format(rat.format)
.samples(vk::SampleCountFlags::TYPE_1)
.load_op(load_op)
.store_op(store_op)
.initial_layout(rat.layout)
.final_layout(rat.layout)
.build();
.final_layout(rat.layout);
vk_attachments.push(vk_attachment);
vk::AttachmentReference {
@ -133,7 +123,7 @@ impl super::DeviceShared {
let (load_op, store_op) = conv::map_attachment_ops(ds.base.ops);
let (stencil_load_op, stencil_store_op) =
conv::map_attachment_ops(ds.stencil_ops);
let vk_attachment = vk::AttachmentDescription::builder()
let vk_attachment = vk::AttachmentDescription::default()
.format(ds.base.format)
.samples(samples)
.load_op(load_op)
@ -141,13 +131,12 @@ impl super::DeviceShared {
.stencil_load_op(stencil_load_op)
.stencil_store_op(stencil_store_op)
.initial_layout(ds.base.layout)
.final_layout(ds.base.layout)
.build();
.final_layout(ds.base.layout);
vk_attachments.push(vk_attachment);
}
let vk_subpasses = [{
let mut vk_subpass = vk::SubpassDescription::builder()
let mut vk_subpass = vk::SubpassDescription::default()
.pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS)
.color_attachments(&color_refs)
.resolve_attachments(&resolve_refs);
@ -163,10 +152,10 @@ impl super::DeviceShared {
if let Some(ref reference) = ds_ref {
vk_subpass = vk_subpass.depth_stencil_attachment(reference)
}
vk_subpass.build()
vk_subpass
}];
let mut vk_info = vk::RenderPassCreateInfo::builder()
let mut vk_info = vk::RenderPassCreateInfo::default()
.attachments(&vk_attachments)
.subpasses(&vk_subpasses);
@ -183,10 +172,9 @@ impl super::DeviceShared {
mask = [(1 << multiview.get()) - 1];
// On Vulkan 1.1 or later, this is an alias for core functionality
multiview_info = vk::RenderPassMultiviewCreateInfoKHR::builder()
multiview_info = vk::RenderPassMultiviewCreateInfoKHR::default()
.view_masks(&mask)
.correlation_masks(&mask)
.build();
.correlation_masks(&mask);
vk_info = vk_info.push_next(&mut multiview_info);
}
@ -231,7 +219,7 @@ impl super::DeviceShared {
.iter()
.enumerate()
.map(|(i, at)| {
let mut info = vk::FramebufferAttachmentImageInfo::builder()
let mut info = vk::FramebufferAttachmentImageInfo::default()
.usage(conv::map_texture_usage(at.view_usage))
.flags(at.raw_image_flags)
.width(e.key().extent.width)
@ -243,14 +231,13 @@ impl super::DeviceShared {
} else {
info = info.view_formats(&vk_view_formats_list[i]);
};
info.build()
info
})
.collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
let mut vk_attachment_info = vk::FramebufferAttachmentsCreateInfo::builder()
.attachment_image_infos(&vk_image_infos)
.build();
let mut vk_info = vk::FramebufferCreateInfo::builder()
let mut vk_attachment_info = vk::FramebufferAttachmentsCreateInfo::default()
.attachment_image_infos(&vk_image_infos);
let mut vk_info = vk::FramebufferCreateInfo::default()
.render_pass(raw_pass)
.width(e.key().extent.width)
.height(e.key().extent.height)
@ -269,7 +256,7 @@ impl super::DeviceShared {
*e.insert(unsafe {
let raw = self.raw.create_framebuffer(&vk_info, None).unwrap();
if let Some(label) = pass_label {
self.set_object_name(vk::ObjectType::FRAMEBUFFER, raw, label);
self.set_object_name(raw, label);
}
raw
})
@ -285,11 +272,10 @@ impl super::DeviceShared {
let block = buffer.block.as_ref()?.lock();
let mask = self.private_caps.non_coherent_map_mask;
Some(ranges.map(move |range| {
vk::MappedMemoryRange::builder()
vk::MappedMemoryRange::default()
.memory(*block.memory())
.offset((block.offset() + range.start) & !mask)
.size((range.end - range.start + mask) & !mask)
.build()
}))
}
@ -313,14 +299,14 @@ impl gpu_alloc::MemoryDevice<vk::DeviceMemory> for super::DeviceShared {
memory_type: u32,
flags: gpu_alloc::AllocationFlags,
) -> Result<vk::DeviceMemory, gpu_alloc::OutOfMemory> {
let mut info = vk::MemoryAllocateInfo::builder()
let mut info = vk::MemoryAllocateInfo::default()
.allocation_size(size)
.memory_type_index(memory_type);
let mut info_flags;
if flags.contains(gpu_alloc::AllocationFlags::DEVICE_ADDRESS) {
info_flags = vk::MemoryAllocateFlagsInfo::builder()
info_flags = vk::MemoryAllocateFlagsInfo::default()
.flags(vk::MemoryAllocateFlags::DEVICE_ADDRESS);
info = info.push_next(&mut info_flags);
}
@ -444,11 +430,10 @@ impl
if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET) {
vk_flags |= vk::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET;
}
let vk_info = vk::DescriptorPoolCreateInfo::builder()
let vk_info = vk::DescriptorPoolCreateInfo::default()
.max_sets(max_sets)
.flags(vk_flags)
.pool_sizes(&filtered_counts)
.build();
.pool_sizes(&filtered_counts);
match unsafe { self.raw.create_descriptor_pool(&vk_info, None) } {
Ok(pool) => Ok(pool),
@ -480,14 +465,13 @@ impl
) -> Result<(), gpu_descriptor::DeviceAllocationError> {
let result = unsafe {
self.raw.allocate_descriptor_sets(
&vk::DescriptorSetAllocateInfo::builder()
&vk::DescriptorSetAllocateInfo::default()
.descriptor_pool(*pool)
.set_layouts(
&smallvec::SmallVec::<[vk::DescriptorSetLayout; 32]>::from_iter(
layouts.cloned(),
),
)
.build(),
),
)
};
@ -532,7 +516,7 @@ impl
}
struct CompiledStage {
create_info: vk::PipelineShaderStageCreateInfo,
create_info: vk::PipelineShaderStageCreateInfo<'static>,
_entry_point: CString,
temp_raw_module: Option<vk::ShaderModule>,
}
@ -545,7 +529,7 @@ impl super::Device {
provided_old_swapchain: Option<super::Swapchain>,
) -> Result<super::Swapchain, crate::SurfaceError> {
profiling::scope!("Device::create_swapchain");
let functor = khr::Swapchain::new(&surface.instance.raw, &self.shared.raw);
let functor = khr::swapchain::Device::new(&surface.instance.raw, &self.shared.raw);
let old_swapchain = match provided_old_swapchain {
Some(osc) => osc.raw,
@ -577,7 +561,7 @@ impl super::Device {
wgt_view_formats.push(config.format);
}
let mut info = vk::SwapchainCreateInfoKHR::builder()
let mut info = vk::SwapchainCreateInfoKHR::default()
.flags(raw_flags)
.surface(surface.raw)
.min_image_count(config.maximum_frame_latency + 1) // TODO: https://github.com/gfx-rs/wgpu/issues/2869
@ -596,7 +580,7 @@ impl super::Device {
.clipped(true)
.old_swapchain(old_swapchain);
let mut format_list_info = vk::ImageFormatListCreateInfo::builder();
let mut format_list_info = vk::ImageFormatListCreateInfo::default();
if !raw_view_formats.is_empty() {
format_list_info = format_list_info.view_formats(&raw_view_formats);
info = info.push_next(&mut format_list_info);
@ -635,7 +619,7 @@ impl super::Device {
.map(|_| unsafe {
self.shared
.raw
.create_semaphore(&vk::SemaphoreCreateInfo::builder(), None)
.create_semaphore(&vk::SemaphoreCreateInfo::default(), None)
})
.collect::<Result<Vec<_>, _>>()
.map_err(crate::DeviceError::from)?;
@ -708,7 +692,7 @@ impl super::Device {
&self,
spv: &[u32],
) -> Result<vk::ShaderModule, crate::DeviceError> {
let vk_info = vk::ShaderModuleCreateInfo::builder()
let vk_info = vk::ShaderModuleCreateInfo::default()
.flags(vk::ShaderModuleCreateFlags::empty())
.code(spv);
@ -764,7 +748,7 @@ impl super::Device {
}
if !stage.zero_initialize_workgroup_memory {
temp_options.zero_initialize_workgroup_memory =
ZeroInitializeWorkgroupMemoryMode::None;
naga::back::spv::ZeroInitializeWorkgroupMemoryMode::None;
}
&temp_options
@ -794,12 +778,13 @@ impl super::Device {
}
let entry_point = CString::new(stage.entry_point).unwrap();
let create_info = vk::PipelineShaderStageCreateInfo::builder()
let mut create_info = vk::PipelineShaderStageCreateInfo::default()
.flags(flags)
.stage(conv::map_shader_stage(stage_flags))
.module(vk_module)
.name(&entry_point)
.build();
.module(vk_module);
// Circumvent struct lifetime check because of a self-reference inside CompiledStage
create_info.p_name = entry_point.as_ptr();
Ok(CompiledStage {
create_info,
@ -861,7 +846,7 @@ impl crate::Device for super::Device {
&self,
desc: &crate::BufferDescriptor,
) -> Result<super::Buffer, crate::DeviceError> {
let vk_info = vk::BufferCreateInfo::builder()
let vk_info = vk::BufferCreateInfo::default()
.size(desc.size)
.usage(conv::map_buffer_usage(desc.usage))
.sharing_mode(vk::SharingMode::EXCLUSIVE);
@ -920,10 +905,7 @@ impl crate::Device for super::Device {
};
if let Some(label) = desc.label {
unsafe {
self.shared
.set_object_name(vk::ObjectType::BUFFER, raw, label)
};
unsafe { self.shared.set_object_name(raw, label) };
}
Ok(super::Buffer {
@ -1031,7 +1013,7 @@ impl crate::Device for super::Device {
raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
}
let mut vk_info = vk::ImageCreateInfo::builder()
let mut vk_info = vk::ImageCreateInfo::default()
.flags(raw_flags)
.image_type(conv::map_texture_dimension(desc.dimension))
.format(original_format)
@ -1044,7 +1026,7 @@ impl crate::Device for super::Device {
.sharing_mode(vk::SharingMode::EXCLUSIVE)
.initial_layout(vk::ImageLayout::UNDEFINED);
let mut format_list_info = vk::ImageFormatListCreateInfo::builder();
let mut format_list_info = vk::ImageFormatListCreateInfo::default();
if !vk_view_formats.is_empty() {
format_list_info = format_list_info.view_formats(&vk_view_formats);
vk_info = vk_info.push_next(&mut format_list_info);
@ -1072,10 +1054,7 @@ impl crate::Device for super::Device {
};
if let Some(label) = desc.label {
unsafe {
self.shared
.set_object_name(vk::ObjectType::IMAGE, raw, label)
};
unsafe { self.shared.set_object_name(raw, label) };
}
Ok(super::Texture {
@ -1104,7 +1083,7 @@ impl crate::Device for super::Device {
desc: &crate::TextureViewDescriptor,
) -> Result<super::TextureView, crate::DeviceError> {
let subresource_range = conv::map_subresource_range(&desc.range, texture.format);
let mut vk_info = vk::ImageViewCreateInfo::builder()
let mut vk_info = vk::ImageViewCreateInfo::default()
.flags(vk::ImageViewCreateFlags::empty())
.image(texture.raw)
.view_type(conv::map_view_dimension(desc.dimension))
@ -1115,9 +1094,8 @@ impl crate::Device for super::Device {
let mut image_view_info;
let view_usage = if self.shared.private_caps.image_view_usage && !desc.usage.is_empty() {
image_view_info = vk::ImageViewUsageCreateInfo::builder()
.usage(conv::map_texture_usage(desc.usage))
.build();
image_view_info =
vk::ImageViewUsageCreateInfo::default().usage(conv::map_texture_usage(desc.usage));
vk_info = vk_info.push_next(&mut image_view_info);
desc.usage
} else {
@ -1127,10 +1105,7 @@ impl crate::Device for super::Device {
let raw = unsafe { self.shared.raw.create_image_view(&vk_info, None) }?;
if let Some(label) = desc.label {
unsafe {
self.shared
.set_object_name(vk::ObjectType::IMAGE_VIEW, raw, label)
};
unsafe { self.shared.set_object_name(raw, label) };
}
let attachment = super::FramebufferAttachment {
@ -1172,7 +1147,7 @@ impl crate::Device for super::Device {
&self,
desc: &crate::SamplerDescriptor,
) -> Result<super::Sampler, crate::DeviceError> {
let mut vk_info = vk::SamplerCreateInfo::builder()
let mut vk_info = vk::SamplerCreateInfo::default()
.flags(vk::SamplerCreateFlags::empty())
.mag_filter(conv::map_filter_mode(desc.mag_filter))
.min_filter(conv::map_filter_mode(desc.min_filter))
@ -1204,10 +1179,7 @@ impl crate::Device for super::Device {
let raw = unsafe { self.shared.raw.create_sampler(&vk_info, None)? };
if let Some(label) = desc.label {
unsafe {
self.shared
.set_object_name(vk::ObjectType::SAMPLER, raw, label)
};
unsafe { self.shared.set_object_name(raw, label) };
}
Ok(super::Sampler { raw })
@ -1220,10 +1192,9 @@ impl crate::Device for super::Device {
&self,
desc: &crate::CommandEncoderDescriptor<super::Api>,
) -> Result<super::CommandEncoder, crate::DeviceError> {
let vk_info = vk::CommandPoolCreateInfo::builder()
let vk_info = vk::CommandPoolCreateInfo::default()
.queue_family_index(desc.queue.family_index)
.flags(vk::CommandPoolCreateFlags::TRANSIENT)
.build();
.flags(vk::CommandPoolCreateFlags::TRANSIENT);
let raw = unsafe { self.shared.raw.create_command_pool(&vk_info, None)? };
Ok(super::CommandEncoder {
@ -1313,10 +1284,11 @@ impl crate::Device for super::Device {
descriptor_count: types[entry.binding as usize].1,
stage_flags: conv::map_shader_stage(entry.visibility),
p_immutable_samplers: ptr::null(),
_marker: Default::default(),
})
.collect::<Vec<_>>();
let vk_info = vk::DescriptorSetLayoutCreateInfo::builder().bindings(&vk_bindings);
let vk_info = vk::DescriptorSetLayoutCreateInfo::default().bindings(&vk_bindings);
let binding_arrays = desc
.entries
@ -1347,7 +1319,7 @@ impl crate::Device for super::Device {
})
.collect::<Vec<_>>();
binding_flag_info = vk::DescriptorSetLayoutBindingFlagsCreateInfo::builder()
binding_flag_info = vk::DescriptorSetLayoutBindingFlagsCreateInfo::default()
.binding_flags(&binding_flag_vec);
vk_info.push_next(&mut binding_flag_info)
@ -1362,10 +1334,7 @@ impl crate::Device for super::Device {
};
if let Some(label) = desc.label {
unsafe {
self.shared
.set_object_name(vk::ObjectType::DESCRIPTOR_SET_LAYOUT, raw, label)
};
unsafe { self.shared.set_object_name(raw, label) };
}
Ok(super::BindGroupLayout {
@ -1403,7 +1372,7 @@ impl crate::Device for super::Device {
})
.collect::<Vec<_>>();
let vk_info = vk::PipelineLayoutCreateInfo::builder()
let vk_info = vk::PipelineLayoutCreateInfo::default()
.flags(vk::PipelineLayoutCreateFlags::empty())
.set_layouts(&vk_set_layouts)
.push_constant_ranges(&vk_push_constant_ranges);
@ -1414,10 +1383,7 @@ impl crate::Device for super::Device {
};
if let Some(label) = desc.label {
unsafe {
self.shared
.set_object_name(vk::ObjectType::PIPELINE_LAYOUT, raw, label)
};
unsafe { self.shared.set_object_name(raw, label) };
}
let mut binding_arrays = BTreeMap::new();
@ -1464,119 +1430,166 @@ impl crate::Device for super::Device {
let set = vk_sets.pop().unwrap();
if let Some(label) = desc.label {
unsafe { self.shared.set_object_name(*set.raw(), label) };
}
/// Helper for splitting off and initializing a given number of elements on a pre-allocated
/// stack, based on items returned from an [`ExactSizeIterator`]. Typically created from a
/// [`MaybeUninit`] slice (see [`Vec::spare_capacity_mut()`]).
/// The updated [`ExtensionStack`] of remaining uninitialized elements is returned, safely
/// representing that the initialized and remaining elements are two independent mutable
/// borrows.
struct ExtendStack<'a, T> {
remainder: &'a mut [MaybeUninit<T>],
}
impl<'a, T> ExtendStack<'a, T> {
fn from_vec_capacity(vec: &'a mut Vec<T>) -> Self {
Self {
remainder: vec.spare_capacity_mut(),
}
}
fn extend_one(self, value: T) -> (Self, &'a mut T) {
let (to_init, remainder) = self.remainder.split_first_mut().unwrap();
let init = to_init.write(value);
(Self { remainder }, init)
}
fn extend(
self,
iter: impl IntoIterator<Item = T> + ExactSizeIterator,
) -> (Self, &'a mut [T]) {
let (to_init, remainder) = self.remainder.split_at_mut(iter.len());
for (value, to_init) in iter.into_iter().zip(to_init.iter_mut()) {
to_init.write(value);
}
// we can't use the safe (yet unstable) MaybeUninit::write_slice() here because of having an iterator to write
let init = {
#[allow(trivial_casts)]
// SAFETY: The loop above has initialized exactly as many items as to_init is
// long, so it is safe to cast away the MaybeUninit<T> wrapper into T.
// Additional safety docs from unstable slice_assume_init_mut
// SAFETY: similar to safety notes for `slice_get_ref`, but we have a
// mutable reference which is also guaranteed to be valid for writes.
unsafe {
self.shared
.set_object_name(vk::ObjectType::DESCRIPTOR_SET, *set.raw(), label)
&mut *(to_init as *mut [MaybeUninit<T>] as *mut [T])
}
};
(Self { remainder }, init)
}
}
let mut writes = Vec::with_capacity(desc.entries.len());
let mut buffer_infos = Vec::with_capacity(desc.buffers.len());
let mut sampler_infos = Vec::with_capacity(desc.samplers.len());
let mut image_infos = Vec::with_capacity(desc.textures.len());
let mut buffer_infos = ExtendStack::from_vec_capacity(&mut buffer_infos);
let mut image_infos = Vec::with_capacity(desc.samplers.len() + desc.textures.len());
let mut image_infos = ExtendStack::from_vec_capacity(&mut image_infos);
// TODO: This length could be reduced to just the number of top-level acceleration
// structure bindings, where multiple consecutive TLAS bindings that are set via
// one `WriteDescriptorSet` count towards one "info" struct, not the total number of
// acceleration structure bindings to write:
let mut acceleration_structure_infos =
Vec::with_capacity(desc.acceleration_structures.len());
let mut acceleration_structure_infos =
ExtendStack::from_vec_capacity(&mut acceleration_structure_infos);
let mut raw_acceleration_structures =
Vec::with_capacity(desc.acceleration_structures.len());
let mut raw_acceleration_structures =
ExtendStack::from_vec_capacity(&mut raw_acceleration_structures);
for entry in desc.entries {
let (ty, size) = desc.layout.types[entry.binding as usize];
if size == 0 {
continue; // empty slot
}
let mut write = vk::WriteDescriptorSet::builder()
let mut write = vk::WriteDescriptorSet::default()
.dst_set(*set.raw())
.dst_binding(entry.binding)
.descriptor_type(ty);
let mut extra_descriptor_count = 0;
write = match ty {
vk::DescriptorType::SAMPLER => {
let index = sampler_infos.len();
let start = entry.resource_index;
let end = start + entry.count;
sampler_infos.extend(desc.samplers[start as usize..end as usize].iter().map(
|binding| {
vk::DescriptorImageInfo::builder()
.sampler(binding.raw)
.build()
},
let local_image_infos;
(image_infos, local_image_infos) =
image_infos.extend(desc.samplers[start as usize..end as usize].iter().map(
|sampler| vk::DescriptorImageInfo::default().sampler(sampler.raw),
));
write.image_info(&sampler_infos[index..])
write.image_info(local_image_infos)
}
vk::DescriptorType::SAMPLED_IMAGE | vk::DescriptorType::STORAGE_IMAGE => {
let index = image_infos.len();
let start = entry.resource_index;
let end = start + entry.count;
let local_image_infos;
(image_infos, local_image_infos) =
image_infos.extend(desc.textures[start as usize..end as usize].iter().map(
|binding| {
let layout = conv::derive_image_layout(
binding.usage,
binding.view.attachment.view_format,
);
vk::DescriptorImageInfo::builder()
vk::DescriptorImageInfo::default()
.image_view(binding.view.raw)
.image_layout(layout)
.build()
},
));
write.image_info(&image_infos[index..])
write.image_info(local_image_infos)
}
vk::DescriptorType::UNIFORM_BUFFER
| vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC
| vk::DescriptorType::STORAGE_BUFFER
| vk::DescriptorType::STORAGE_BUFFER_DYNAMIC => {
let index = buffer_infos.len();
let start = entry.resource_index;
let end = start + entry.count;
let local_buffer_infos;
(buffer_infos, local_buffer_infos) =
buffer_infos.extend(desc.buffers[start as usize..end as usize].iter().map(
|binding| {
vk::DescriptorBufferInfo::builder()
vk::DescriptorBufferInfo::default()
.buffer(binding.buffer.raw)
.offset(binding.offset)
.range(binding.size.map_or(vk::WHOLE_SIZE, wgt::BufferSize::get))
.build()
.range(
binding.size.map_or(vk::WHOLE_SIZE, wgt::BufferSize::get),
)
},
));
write.buffer_info(&buffer_infos[index..])
write.buffer_info(local_buffer_infos)
}
vk::DescriptorType::ACCELERATION_STRUCTURE_KHR => {
let index = acceleration_structure_infos.len();
let start = entry.resource_index;
let end = start + entry.count;
let raw_start = raw_acceleration_structures.len();
raw_acceleration_structures.extend(
let local_raw_acceleration_structures;
(
raw_acceleration_structures,
local_raw_acceleration_structures,
) = raw_acceleration_structures.extend(
desc.acceleration_structures[start as usize..end as usize]
.iter()
.map(|acceleration_structure| acceleration_structure.raw),
);
let acceleration_structure_info =
vk::WriteDescriptorSetAccelerationStructureKHR::builder()
.acceleration_structures(&raw_acceleration_structures[raw_start..]);
// todo: Dereference the struct to get around lifetime issues. Safe as long as we never resize
// `raw_acceleration_structures`.
let acceleration_structure_info: vk::WriteDescriptorSetAccelerationStructureKHR = *acceleration_structure_info;
assert!(
index < desc.acceleration_structures.len(),
"Encountered more acceleration structures then expected"
let local_acceleration_structure_infos;
(
acceleration_structure_infos,
local_acceleration_structure_infos,
) = acceleration_structure_infos.extend_one(
vk::WriteDescriptorSetAccelerationStructureKHR::default()
.acceleration_structures(local_raw_acceleration_structures),
);
acceleration_structure_infos.push(acceleration_structure_info);
extra_descriptor_count += 1;
write.push_next(&mut acceleration_structure_infos[index])
write
.descriptor_count(entry.count)
.push_next(local_acceleration_structure_infos)
}
_ => unreachable!(),
};
let mut write = write.build();
write.descriptor_count += extra_descriptor_count;
writes.push(write);
}
@ -1643,10 +1656,7 @@ impl crate::Device for super::Device {
let raw = self.create_shader_module_impl(&spv)?;
if let Some(label) = desc.label {
unsafe {
self.shared
.set_object_name(vk::ObjectType::SHADER_MODULE, raw, label)
};
unsafe { self.shared.set_object_name(raw, label) };
}
Ok(super::ShaderModule::Raw(raw))
@ -1698,15 +1708,13 @@ impl crate::Device for super::Device {
}
}
let vk_vertex_input = vk::PipelineVertexInputStateCreateInfo::builder()
let vk_vertex_input = vk::PipelineVertexInputStateCreateInfo::default()
.vertex_binding_descriptions(&vertex_buffers)
.vertex_attribute_descriptions(&vertex_attributes)
.build();
.vertex_attribute_descriptions(&vertex_attributes);
let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::builder()
let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::default()
.topology(conv::map_topology(desc.primitive.topology))
.primitive_restart_enable(desc.primitive.strip_index_format.is_some())
.build();
.primitive_restart_enable(desc.primitive.strip_index_format.is_some());
let compiled_vs = self.compile_stage(
&desc.vertex_stage,
@ -1727,7 +1735,7 @@ impl crate::Device for super::Device {
None => None,
};
let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::builder()
let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::default()
.polygon_mode(conv::map_polygon_mode(desc.primitive.polygon_mode))
.front_face(conv::map_front_face(desc.primitive.front_face))
.line_width(1.0)
@ -1736,14 +1744,15 @@ impl crate::Device for super::Device {
vk_rasterization = vk_rasterization.cull_mode(conv::map_cull_face(face))
}
let mut vk_rasterization_conservative_state =
vk::PipelineRasterizationConservativeStateCreateInfoEXT::builder()
.conservative_rasterization_mode(vk::ConservativeRasterizationModeEXT::OVERESTIMATE)
.build();
vk::PipelineRasterizationConservativeStateCreateInfoEXT::default()
.conservative_rasterization_mode(
vk::ConservativeRasterizationModeEXT::OVERESTIMATE,
);
if desc.primitive.conservative {
vk_rasterization = vk_rasterization.push_next(&mut vk_rasterization_conservative_state);
}
let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::builder();
let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::default();
if let Some(ref ds) = desc.depth_stencil {
let vk_format = self.shared.private_caps.map_texture_format(ds.format);
let vk_layout = if ds.is_read_only(desc.primitive.cull_mode) {
@ -1781,26 +1790,24 @@ impl crate::Device for super::Device {
}
}
let vk_viewport = vk::PipelineViewportStateCreateInfo::builder()
let vk_viewport = vk::PipelineViewportStateCreateInfo::default()
.flags(vk::PipelineViewportStateCreateFlags::empty())
.scissor_count(1)
.viewport_count(1)
.build();
.viewport_count(1);
let vk_sample_mask = [
desc.multisample.mask as u32,
(desc.multisample.mask >> 32) as u32,
];
let vk_multisample = vk::PipelineMultisampleStateCreateInfo::builder()
let vk_multisample = vk::PipelineMultisampleStateCreateInfo::default()
.rasterization_samples(vk::SampleCountFlags::from_raw(desc.multisample.count))
.alpha_to_coverage_enable(desc.multisample.alpha_to_coverage_enabled)
.sample_mask(&vk_sample_mask)
.build();
.sample_mask(&vk_sample_mask);
let mut vk_attachments = Vec::with_capacity(desc.color_targets.len());
for cat in desc.color_targets {
let (key, attarchment) = if let Some(cat) = cat.as_ref() {
let mut vk_attachment = vk::PipelineColorBlendAttachmentState::builder()
let mut vk_attachment = vk::PipelineColorBlendAttachmentState::default()
.color_write_mask(vk::ColorComponentFlags::from_raw(cat.write_mask.bits()));
if let Some(ref blend) = cat.blend {
let (color_op, color_src, color_dst) = conv::map_blend_component(&blend.color);
@ -1824,7 +1831,7 @@ impl crate::Device for super::Device {
),
resolve: None,
}),
vk_attachment.build(),
vk_attachment,
)
} else {
(None, vk::PipelineColorBlendAttachmentState::default())
@ -1834,13 +1841,11 @@ impl crate::Device for super::Device {
vk_attachments.push(attarchment);
}
let vk_color_blend = vk::PipelineColorBlendStateCreateInfo::builder()
.attachments(&vk_attachments)
.build();
let vk_color_blend =
vk::PipelineColorBlendStateCreateInfo::default().attachments(&vk_attachments);
let vk_dynamic_state = vk::PipelineDynamicStateCreateInfo::builder()
.dynamic_states(&dynamic_states)
.build();
let vk_dynamic_state =
vk::PipelineDynamicStateCreateInfo::default().dynamic_states(&dynamic_states);
let raw_pass = self
.shared
@ -1848,7 +1853,7 @@ impl crate::Device for super::Device {
.map_err(crate::DeviceError::from)?;
let vk_infos = [{
vk::GraphicsPipelineCreateInfo::builder()
vk::GraphicsPipelineCreateInfo::default()
.layout(desc.layout.raw)
.stages(&stages)
.vertex_input_state(&vk_vertex_input)
@ -1860,7 +1865,6 @@ impl crate::Device for super::Device {
.color_blend_state(&vk_color_blend)
.dynamic_state(&vk_dynamic_state)
.render_pass(raw_pass)
.build()
}];
let mut raw_vec = {
@ -1875,10 +1879,7 @@ impl crate::Device for super::Device {
let raw = raw_vec.pop().unwrap();
if let Some(label) = desc.label {
unsafe {
self.shared
.set_object_name(vk::ObjectType::PIPELINE, raw, label)
};
unsafe { self.shared.set_object_name(raw, label) };
}
if let Some(raw_module) = compiled_vs.temp_raw_module {
@ -1909,10 +1910,9 @@ impl crate::Device for super::Device {
)?;
let vk_infos = [{
vk::ComputePipelineCreateInfo::builder()
vk::ComputePipelineCreateInfo::default()
.layout(desc.layout.raw)
.stage(compiled.create_info)
.build()
}];
let mut raw_vec = {
@ -1927,10 +1927,7 @@ impl crate::Device for super::Device {
let raw = raw_vec.pop().unwrap();
if let Some(label) = desc.label {
unsafe {
self.shared
.set_object_name(vk::ObjectType::PIPELINE, raw, label)
};
unsafe { self.shared.set_object_name(raw, label) };
}
if let Some(raw_module) = compiled.temp_raw_module {
@ -1962,18 +1959,14 @@ impl crate::Device for super::Device {
),
};
let vk_info = vk::QueryPoolCreateInfo::builder()
let vk_info = vk::QueryPoolCreateInfo::default()
.query_type(vk_type)
.query_count(desc.count)
.pipeline_statistics(pipeline_statistics)
.build();
.pipeline_statistics(pipeline_statistics);
let raw = unsafe { self.shared.raw.create_query_pool(&vk_info, None) }?;
if let Some(label) = desc.label {
unsafe {
self.shared
.set_object_name(vk::ObjectType::QUERY_POOL, raw, label)
};
unsafe { self.shared.set_object_name(raw, label) };
}
Ok(super::QuerySet { raw })
@ -1985,8 +1978,8 @@ impl crate::Device for super::Device {
unsafe fn create_fence(&self) -> Result<super::Fence, crate::DeviceError> {
Ok(if self.shared.private_caps.timeline_semaphores {
let mut sem_type_info =
vk::SemaphoreTypeCreateInfo::builder().semaphore_type(vk::SemaphoreType::TIMELINE);
let vk_info = vk::SemaphoreCreateInfo::builder().push_next(&mut sem_type_info);
vk::SemaphoreTypeCreateInfo::default().semaphore_type(vk::SemaphoreType::TIMELINE);
let vk_info = vk::SemaphoreCreateInfo::default().push_next(&mut sem_type_info);
let raw = unsafe { self.shared.raw.create_semaphore(&vk_info, None) }?;
super::Fence::TimelineSemaphore(raw)
} else {
@ -2036,7 +2029,7 @@ impl crate::Device for super::Device {
super::Fence::TimelineSemaphore(raw) => {
let semaphores = [raw];
let values = [wait_value];
let vk_info = vk::SemaphoreWaitInfo::builder()
let vk_info = vk::SemaphoreWaitInfo::default()
.semaphores(&semaphores)
.values(&values);
let result = match self.shared.extension_fns.timeline_semaphore {
@ -2129,14 +2122,14 @@ impl crate::Device for super::Device {
crate::AccelerationStructureEntries::Instances(ref instances) => {
let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default();
let geometry = vk::AccelerationStructureGeometryKHR::builder()
let geometry = vk::AccelerationStructureGeometryKHR::default()
.geometry_type(vk::GeometryTypeKHR::INSTANCES)
.geometry(vk::AccelerationStructureGeometryDataKHR {
instances: instance_data,
});
(
smallvec::smallvec![*geometry],
smallvec::smallvec![geometry],
smallvec::smallvec![instances.count],
)
}
@ -2149,7 +2142,7 @@ impl crate::Device for super::Device {
for triangles in in_geometries {
let mut triangle_data =
vk::AccelerationStructureGeometryTrianglesDataKHR::builder()
vk::AccelerationStructureGeometryTrianglesDataKHR::default()
.vertex_format(conv::map_vertex_format(triangles.vertex_format))
.max_vertex(triangles.vertex_count)
.vertex_stride(triangles.vertex_stride);
@ -2162,16 +2155,16 @@ impl crate::Device for super::Device {
triangles.vertex_count
};
let geometry = vk::AccelerationStructureGeometryKHR::builder()
let geometry = vk::AccelerationStructureGeometryKHR::default()
.geometry_type(vk::GeometryTypeKHR::TRIANGLES)
.geometry(vk::AccelerationStructureGeometryDataKHR {
triangles: *triangle_data,
triangles: triangle_data,
})
.flags(conv::map_acceleration_structure_geometry_flags(
triangles.flags,
));
geometries.push(*geometry);
geometries.push(geometry);
primitive_counts.push(pritive_count);
}
(geometries, primitive_counts)
@ -2183,15 +2176,15 @@ impl crate::Device for super::Device {
[vk::AccelerationStructureGeometryKHR; CAPACITY],
>::with_capacity(in_geometries.len());
for aabb in in_geometries {
let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::builder()
let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
.stride(aabb.stride);
let geometry = vk::AccelerationStructureGeometryKHR::builder()
let geometry = vk::AccelerationStructureGeometryKHR::default()
.geometry_type(vk::GeometryTypeKHR::AABBS)
.geometry(vk::AccelerationStructureGeometryDataKHR { aabbs: *aabbs_data })
.geometry(vk::AccelerationStructureGeometryDataKHR { aabbs: aabbs_data })
.flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
geometries.push(*geometry);
geometries.push(geometry);
primitive_counts.push(aabb.count);
}
(geometries, primitive_counts)
@ -2205,20 +2198,22 @@ impl crate::Device for super::Device {
_ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
};
let geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::builder()
let geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
.ty(ty)
.flags(conv::map_acceleration_structure_flags(desc.flags))
.geometries(&geometries);
let raw = unsafe {
let mut raw = Default::default();
unsafe {
ray_tracing_functions
.acceleration_structure
.get_acceleration_structure_build_sizes(
vk::AccelerationStructureBuildTypeKHR::DEVICE,
&geometry_info,
&primitive_counts,
&mut raw,
)
};
}
crate::AccelerationStructureBuildSizes {
acceleration_structure_size: raw.acceleration_structure_size,
@ -2242,7 +2237,7 @@ impl crate::Device for super::Device {
ray_tracing_functions
.acceleration_structure
.get_acceleration_structure_device_address(
&vk::AccelerationStructureDeviceAddressInfoKHR::builder()
&vk::AccelerationStructureDeviceAddressInfoKHR::default()
.acceleration_structure(acceleration_structure.raw),
)
}
@ -2259,7 +2254,7 @@ impl crate::Device for super::Device {
.as_ref()
.expect("Feature `RAY_TRACING` not enabled");
let vk_buffer_info = vk::BufferCreateInfo::builder()
let vk_buffer_info = vk::BufferCreateInfo::default()
.size(desc.size)
.usage(vk::BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR)
.sharing_mode(vk::SharingMode::EXCLUSIVE);
@ -2283,11 +2278,10 @@ impl crate::Device for super::Device {
.bind_buffer_memory(raw_buffer, *block.memory(), block.offset())?;
if let Some(label) = desc.label {
self.shared
.set_object_name(vk::ObjectType::BUFFER, raw_buffer, label);
self.shared.set_object_name(raw_buffer, label);
}
let vk_info = vk::AccelerationStructureCreateInfoKHR::builder()
let vk_info = vk::AccelerationStructureCreateInfoKHR::default()
.buffer(raw_buffer)
.offset(0)
.size(desc.size)
@ -2298,11 +2292,8 @@ impl crate::Device for super::Device {
.create_acceleration_structure(&vk_info, None)?;
if let Some(label) = desc.label {
self.shared.set_object_name(
vk::ObjectType::ACCELERATION_STRUCTURE_KHR,
raw_acceleration_structure,
label,
);
self.shared
.set_object_name(raw_acceleration_structure, label);
}
Ok(super::AccelerationStructure {

View File

@ -7,10 +7,7 @@ use std::{
};
use arrayvec::ArrayVec;
use ash::{
extensions::{ext, khr},
vk,
};
use ash::{ext, khr, vk};
use parking_lot::RwLock;
unsafe extern "system" fn debug_utils_messenger_callback(
@ -33,10 +30,10 @@ unsafe extern "system" fn debug_utils_messenger_callback(
// https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues/5671
// Versions 1.3.240 through 1.3.250 return a spurious error here if
// the debug range start and end appear in different command buffers.
let khronos_validation_layer =
CStr::from_bytes_with_nul(b"Khronos Validation Layer\0").unwrap();
const KHRONOS_VALIDATION_LAYER: &CStr =
unsafe { CStr::from_bytes_with_nul_unchecked(b"Khronos Validation Layer\0") };
if let Some(layer_properties) = user_data.validation_layer_properties.as_ref() {
if layer_properties.layer_description.as_ref() == khronos_validation_layer
if layer_properties.layer_description.as_ref() == KHRONOS_VALIDATION_LAYER
&& layer_properties.layer_spec_version >= vk::make_api_version(0, 1, 3, 240)
&& layer_properties.layer_spec_version <= vk::make_api_version(0, 1, 3, 250)
{
@ -47,7 +44,7 @@ unsafe extern "system" fn debug_utils_messenger_callback(
// Silence Vulkan Validation error "VUID-VkSwapchainCreateInfoKHR-pNext-07781"
// This happens when a surface is configured with a size outside the allowed extent.
// It's s false positive due to the inherent racy-ness of surface resizing.
// It's a false positive due to the inherent racy-ness of surface resizing.
const VUID_VKSWAPCHAINCREATEINFOKHR_PNEXT_07781: i32 = 0x4c8929c1;
if cd.message_id_number == VUID_VKSWAPCHAINCREATEINFOKHR_PNEXT_07781 {
return vk::FALSE;
@ -74,16 +71,9 @@ unsafe extern "system" fn debug_utils_messenger_callback(
_ => log::Level::Warn,
};
let message_id_name = if cd.p_message_id_name.is_null() {
Cow::from("")
} else {
unsafe { CStr::from_ptr(cd.p_message_id_name) }.to_string_lossy()
};
let message = if cd.p_message.is_null() {
Cow::from("")
} else {
unsafe { CStr::from_ptr(cd.p_message) }.to_string_lossy()
};
let message_id_name =
unsafe { cd.message_id_name_as_c_str() }.map_or(Cow::Borrowed(""), CStr::to_string_lossy);
let message = unsafe { cd.message_as_c_str() }.map_or(Cow::Borrowed(""), CStr::to_string_lossy);
let _ = std::panic::catch_unwind(|| {
log::log!(
@ -101,10 +91,7 @@ unsafe extern "system" fn debug_utils_messenger_callback(
unsafe { slice::from_raw_parts(cd.p_queue_labels, cd.queue_label_count as usize) };
let names = labels
.iter()
.flat_map(|dul_obj| {
unsafe { dul_obj.p_label_name.as_ref() }
.map(|lbl| unsafe { CStr::from_ptr(lbl) }.to_string_lossy())
})
.flat_map(|dul_obj| unsafe { dul_obj.label_name_as_c_str() }.map(CStr::to_string_lossy))
.collect::<Vec<_>>();
let _ = std::panic::catch_unwind(|| {
@ -117,10 +104,7 @@ unsafe extern "system" fn debug_utils_messenger_callback(
unsafe { slice::from_raw_parts(cd.p_cmd_buf_labels, cd.cmd_buf_label_count as usize) };
let names = labels
.iter()
.flat_map(|dul_obj| {
unsafe { dul_obj.p_label_name.as_ref() }
.map(|lbl| unsafe { CStr::from_ptr(lbl) }.to_string_lossy())
})
.flat_map(|dul_obj| unsafe { dul_obj.label_name_as_c_str() }.map(CStr::to_string_lossy))
.collect::<Vec<_>>();
let _ = std::panic::catch_unwind(|| {
@ -134,9 +118,8 @@ unsafe extern "system" fn debug_utils_messenger_callback(
let names = labels
.iter()
.map(|obj_info| {
let name = unsafe { obj_info.p_object_name.as_ref() }
.map(|name| unsafe { CStr::from_ptr(name) }.to_string_lossy())
.unwrap_or(Cow::Borrowed("?"));
let name = unsafe { obj_info.object_name_as_c_str() }
.map_or(Cow::Borrowed("?"), CStr::to_string_lossy);
format!(
"(type: {:?}, hndl: 0x{:x}, name: {})",
@ -158,9 +141,9 @@ unsafe extern "system" fn debug_utils_messenger_callback(
}
impl super::DebugUtilsCreateInfo {
fn to_vk_create_info(&self) -> vk::DebugUtilsMessengerCreateInfoEXTBuilder<'_> {
fn to_vk_create_info(&self) -> vk::DebugUtilsMessengerCreateInfoEXT<'_> {
let user_data_ptr: *const super::DebugUtilsMessengerUserData = &*self.callback_data;
vk::DebugUtilsMessengerCreateInfoEXT::builder()
vk::DebugUtilsMessengerCreateInfoEXT::default()
.message_severity(self.severity)
.message_type(self.message_type)
.user_data(user_data_ptr as *mut _)
@ -220,7 +203,7 @@ impl super::Instance {
) -> Result<Vec<vk::ExtensionProperties>, crate::InstanceError> {
let instance_extensions = {
profiling::scope!("vkEnumerateInstanceExtensionProperties");
entry.enumerate_instance_extension_properties(layer_name)
unsafe { entry.enumerate_instance_extension_properties(layer_name) }
};
instance_extensions.map_err(|e| {
crate::InstanceError::with_source(
@ -254,7 +237,7 @@ impl super::Instance {
let mut extensions: Vec<&'static CStr> = Vec::new();
// VK_KHR_surface
extensions.push(khr::Surface::name());
extensions.push(khr::surface::NAME);
// Platform-specific WSI extensions
if cfg!(all(
@ -263,45 +246,46 @@ impl super::Instance {
not(target_os = "macos")
)) {
// VK_KHR_xlib_surface
extensions.push(khr::XlibSurface::name());
extensions.push(khr::xlib_surface::NAME);
// VK_KHR_xcb_surface
extensions.push(khr::XcbSurface::name());
extensions.push(khr::xcb_surface::NAME);
// VK_KHR_wayland_surface
extensions.push(khr::WaylandSurface::name());
extensions.push(khr::wayland_surface::NAME);
}
if cfg!(target_os = "android") {
// VK_KHR_android_surface
extensions.push(khr::AndroidSurface::name());
extensions.push(khr::android_surface::NAME);
}
if cfg!(target_os = "windows") {
// VK_KHR_win32_surface
extensions.push(khr::Win32Surface::name());
extensions.push(khr::win32_surface::NAME);
}
if cfg!(target_os = "macos") {
// VK_EXT_metal_surface
extensions.push(ext::MetalSurface::name());
extensions.push(vk::KhrPortabilityEnumerationFn::name());
extensions.push(ext::metal_surface::NAME);
extensions.push(khr::portability_enumeration::NAME);
}
if flags.contains(wgt::InstanceFlags::DEBUG) {
// VK_EXT_debug_utils
extensions.push(ext::DebugUtils::name());
extensions.push(ext::debug_utils::NAME);
}
// VK_EXT_swapchain_colorspace
// Provides wide color gamut
extensions.push(vk::ExtSwapchainColorspaceFn::name());
extensions.push(ext::swapchain_colorspace::NAME);
// VK_KHR_get_physical_device_properties2
// Even though the extension was promoted to Vulkan 1.1, we still require the extension
// so that we don't have to conditionally use the functions provided by the 1.1 instance
extensions.push(vk::KhrGetPhysicalDeviceProperties2Fn::name());
extensions.push(khr::get_physical_device_properties2::NAME);
// Only keep available extensions.
extensions.retain(|&ext| {
if instance_extensions.iter().any(|inst_ext| {
crate::auxil::cstr_from_bytes_until_nul(&inst_ext.extension_name) == Some(ext)
}) {
if instance_extensions
.iter()
.any(|inst_ext| inst_ext.extension_name_as_c_str() == Ok(ext))
{
true
} else {
log::warn!("Unable to find extension: {}", ext.to_string_lossy());
@ -336,10 +320,10 @@ impl super::Instance {
log::debug!("Instance version: 0x{:x}", instance_api_version);
let debug_utils = if let Some(debug_utils_create_info) = debug_utils_create_info {
if extensions.contains(&ext::DebugUtils::name()) {
if extensions.contains(&ext::debug_utils::NAME) {
log::info!("Enabling debug utils");
let extension = ext::DebugUtils::new(&entry, &raw_instance);
let extension = ext::debug_utils::Instance::new(&entry, &raw_instance);
let vk_info = debug_utils_create_info.to_vk_create_info();
let messenger =
unsafe { extension.create_debug_utils_messenger(&vk_info, None) }.unwrap();
@ -362,9 +346,9 @@ impl super::Instance {
};
let get_physical_device_properties =
if extensions.contains(&khr::GetPhysicalDeviceProperties2::name()) {
if extensions.contains(&khr::get_physical_device_properties2::NAME) {
log::debug!("Enabling device properties2");
Some(khr::GetPhysicalDeviceProperties2::new(
Some(khr::get_physical_device_properties2::Instance::new(
&entry,
&raw_instance,
))
@ -388,21 +372,21 @@ impl super::Instance {
})
}
#[allow(dead_code)]
fn create_surface_from_xlib(
&self,
dpy: *mut vk::Display,
window: vk::Window,
) -> Result<super::Surface, crate::InstanceError> {
if !self.shared.extensions.contains(&khr::XlibSurface::name()) {
if !self.shared.extensions.contains(&khr::xlib_surface::NAME) {
return Err(crate::InstanceError::new(String::from(
"Vulkan driver does not support VK_KHR_xlib_surface",
)));
}
let surface = {
let xlib_loader = khr::XlibSurface::new(&self.shared.entry, &self.shared.raw);
let info = vk::XlibSurfaceCreateInfoKHR::builder()
let xlib_loader =
khr::xlib_surface::Instance::new(&self.shared.entry, &self.shared.raw);
let info = vk::XlibSurfaceCreateInfoKHR::default()
.flags(vk::XlibSurfaceCreateFlagsKHR::empty())
.window(window)
.dpy(dpy);
@ -414,21 +398,20 @@ impl super::Instance {
Ok(self.create_surface_from_vk_surface_khr(surface))
}
#[allow(dead_code)]
fn create_surface_from_xcb(
&self,
connection: *mut vk::xcb_connection_t,
window: vk::xcb_window_t,
) -> Result<super::Surface, crate::InstanceError> {
if !self.shared.extensions.contains(&khr::XcbSurface::name()) {
if !self.shared.extensions.contains(&khr::xcb_surface::NAME) {
return Err(crate::InstanceError::new(String::from(
"Vulkan driver does not support VK_KHR_xcb_surface",
)));
}
let surface = {
let xcb_loader = khr::XcbSurface::new(&self.shared.entry, &self.shared.raw);
let info = vk::XcbSurfaceCreateInfoKHR::builder()
let xcb_loader = khr::xcb_surface::Instance::new(&self.shared.entry, &self.shared.raw);
let info = vk::XcbSurfaceCreateInfoKHR::default()
.flags(vk::XcbSurfaceCreateFlagsKHR::empty())
.window(window)
.connection(connection);
@ -440,25 +423,21 @@ impl super::Instance {
Ok(self.create_surface_from_vk_surface_khr(surface))
}
#[allow(dead_code)]
fn create_surface_from_wayland(
&self,
display: *mut c_void,
surface: *mut c_void,
display: *mut vk::wl_display,
surface: *mut vk::wl_surface,
) -> Result<super::Surface, crate::InstanceError> {
if !self
.shared
.extensions
.contains(&khr::WaylandSurface::name())
{
if !self.shared.extensions.contains(&khr::wayland_surface::NAME) {
return Err(crate::InstanceError::new(String::from(
"Vulkan driver does not support VK_KHR_wayland_surface",
)));
}
let surface = {
let w_loader = khr::WaylandSurface::new(&self.shared.entry, &self.shared.raw);
let info = vk::WaylandSurfaceCreateInfoKHR::builder()
let w_loader =
khr::wayland_surface::Instance::new(&self.shared.entry, &self.shared.raw);
let info = vk::WaylandSurfaceCreateInfoKHR::default()
.flags(vk::WaylandSurfaceCreateFlagsKHR::empty())
.display(display)
.surface(surface);
@ -469,26 +448,22 @@ impl super::Instance {
Ok(self.create_surface_from_vk_surface_khr(surface))
}
#[allow(dead_code)]
fn create_surface_android(
&self,
window: *const c_void,
window: *mut vk::ANativeWindow,
) -> Result<super::Surface, crate::InstanceError> {
if !self
.shared
.extensions
.contains(&khr::AndroidSurface::name())
{
if !self.shared.extensions.contains(&khr::android_surface::NAME) {
return Err(crate::InstanceError::new(String::from(
"Vulkan driver does not support VK_KHR_android_surface",
)));
}
let surface = {
let a_loader = khr::AndroidSurface::new(&self.shared.entry, &self.shared.raw);
let info = vk::AndroidSurfaceCreateInfoKHR::builder()
let a_loader =
khr::android_surface::Instance::new(&self.shared.entry, &self.shared.raw);
let info = vk::AndroidSurfaceCreateInfoKHR::default()
.flags(vk::AndroidSurfaceCreateFlagsKHR::empty())
.window(window as *mut _);
.window(window);
unsafe { a_loader.create_android_surface(&info, None) }.expect("AndroidSurface failed")
};
@ -496,24 +471,24 @@ impl super::Instance {
Ok(self.create_surface_from_vk_surface_khr(surface))
}
#[allow(dead_code)]
fn create_surface_from_hwnd(
&self,
hinstance: *mut c_void,
hwnd: *mut c_void,
hinstance: vk::HINSTANCE,
hwnd: vk::HWND,
) -> Result<super::Surface, crate::InstanceError> {
if !self.shared.extensions.contains(&khr::Win32Surface::name()) {
if !self.shared.extensions.contains(&khr::win32_surface::NAME) {
return Err(crate::InstanceError::new(String::from(
"Vulkan driver does not support VK_KHR_win32_surface",
)));
}
let surface = {
let info = vk::Win32SurfaceCreateInfoKHR::builder()
let info = vk::Win32SurfaceCreateInfoKHR::default()
.flags(vk::Win32SurfaceCreateFlagsKHR::empty())
.hinstance(hinstance)
.hwnd(hwnd);
let win32_loader = khr::Win32Surface::new(&self.shared.entry, &self.shared.raw);
let win32_loader =
khr::win32_surface::Instance::new(&self.shared.entry, &self.shared.raw);
unsafe {
win32_loader
.create_win32_surface(&info, None)
@ -529,7 +504,7 @@ impl super::Instance {
&self,
view: *mut c_void,
) -> Result<super::Surface, crate::InstanceError> {
if !self.shared.extensions.contains(&ext::MetalSurface::name()) {
if !self.shared.extensions.contains(&ext::metal_surface::NAME) {
return Err(crate::InstanceError::new(String::from(
"Vulkan driver does not support VK_EXT_metal_surface",
)));
@ -540,11 +515,11 @@ impl super::Instance {
};
let surface = {
let metal_loader = ext::MetalSurface::new(&self.shared.entry, &self.shared.raw);
let vk_info = vk::MetalSurfaceCreateInfoEXT::builder()
let metal_loader =
ext::metal_surface::Instance::new(&self.shared.entry, &self.shared.raw);
let vk_info = vk::MetalSurfaceCreateInfoEXT::default()
.flags(vk::MetalSurfaceCreateFlagsEXT::empty())
.layer(layer as *mut _)
.build();
.layer(layer as *mut _);
unsafe { metal_loader.create_metal_surface(&vk_info, None).unwrap() }
};
@ -553,7 +528,7 @@ impl super::Instance {
}
fn create_surface_from_vk_surface_khr(&self, surface: vk::SurfaceKHR) -> super::Surface {
let functor = khr::Surface::new(&self.shared.entry, &self.shared.raw);
let functor = khr::surface::Instance::new(&self.shared.entry, &self.shared.raw);
super::Surface {
raw: surface,
functor,
@ -584,7 +559,6 @@ impl crate::Instance for super::Instance {
unsafe fn init(desc: &crate::InstanceDescriptor) -> Result<Self, crate::InstanceError> {
profiling::scope!("Init Vulkan Backend");
use crate::auxil::cstr_from_bytes_until_nul;
let entry = unsafe {
profiling::scope!("Load vk library");
@ -595,7 +569,7 @@ impl crate::Instance for super::Instance {
})?;
let version = {
profiling::scope!("vkEnumerateInstanceVersion");
entry.try_enumerate_instance_version()
unsafe { entry.try_enumerate_instance_version() }
};
let instance_api_version = match version {
// Vulkan 1.1+
@ -610,7 +584,7 @@ impl crate::Instance for super::Instance {
};
let app_name = CString::new(desc.name).unwrap();
let app_info = vk::ApplicationInfo::builder()
let app_info = vk::ApplicationInfo::default()
.application_name(app_name.as_c_str())
.application_version(1)
.engine_name(CStr::from_bytes_with_nul(b"wgpu-hal\0").unwrap())
@ -636,7 +610,7 @@ impl crate::Instance for super::Instance {
let instance_layers = {
profiling::scope!("vkEnumerateInstanceLayerProperties");
entry.enumerate_instance_layer_properties()
unsafe { entry.enumerate_instance_layer_properties() }
};
let instance_layers = instance_layers.map_err(|e| {
log::debug!("enumerate_instance_layer_properties: {:?}", e);
@ -652,7 +626,7 @@ impl crate::Instance for super::Instance {
) -> Option<&'layers vk::LayerProperties> {
instance_layers
.iter()
.find(|inst_layer| cstr_from_bytes_until_nul(&inst_layer.layer_name) == Some(name))
.find(|inst_layer| inst_layer.layer_name_as_c_str() == Ok(name))
}
let validation_layer_name =
@ -668,9 +642,9 @@ impl crate::Instance for super::Instance {
// Convert all the names of the extensions into an iterator of CStrs.
let mut ext_names = exts
.iter()
.filter_map(|ext| cstr_from_bytes_until_nul(&ext.extension_name));
.filter_map(|ext| ext.extension_name_as_c_str().ok());
// Find the validation features extension.
ext_names.any(|ext_name| ext_name == vk::ExtValidationFeaturesFn::name())
ext_names.any(|ext_name| ext_name == ext::validation_features::NAME)
} else {
false
};
@ -688,7 +662,7 @@ impl crate::Instance for super::Instance {
let mut layers: Vec<&'static CStr> = Vec::new();
let has_debug_extension = extensions.contains(&ext::DebugUtils::name());
let has_debug_extension = extensions.contains(&ext::debug_utils::NAME);
let mut debug_user_data = has_debug_extension.then(|| {
// Put the callback data on the heap, to ensure it will never be
// moved.
@ -708,9 +682,8 @@ impl crate::Instance for super::Instance {
if let Some(debug_user_data) = debug_user_data.as_mut() {
debug_user_data.validation_layer_properties =
Some(super::ValidationLayerProperties {
layer_description: cstr_from_bytes_until_nul(
&layer_properties.description,
)
layer_description: layer_properties
.description_as_c_str()
.unwrap()
.to_owned(),
layer_spec_version: layer_properties.spec_version,
@ -746,9 +719,7 @@ impl crate::Instance for super::Instance {
callback_data,
};
let vk_create_info = create_info.to_vk_create_info().build();
Some((create_info, vk_create_info))
Some(create_info)
} else {
None
};
@ -780,7 +751,7 @@ impl crate::Instance for super::Instance {
// Avoid VUID-VkInstanceCreateInfo-flags-06559: Only ask the instance to
// enumerate incomplete Vulkan implementations (which we need on Mac) if
// we managed to find the extension that provides the flag.
if extensions.contains(&vk::KhrPortabilityEnumerationFn::name()) {
if extensions.contains(&khr::portability_enumeration::NAME) {
flags |= vk::InstanceCreateFlags::ENUMERATE_PORTABILITY_KHR;
}
let vk_instance = {
@ -793,14 +764,17 @@ impl crate::Instance for super::Instance {
})
.collect::<Vec<_>>();
let mut create_info = vk::InstanceCreateInfo::builder()
let mut create_info = vk::InstanceCreateInfo::default()
.flags(flags)
.application_info(&app_info)
.enabled_layer_names(&str_pointers[..layers.len()])
.enabled_extension_names(&str_pointers[layers.len()..]);
if let Some(&mut (_, ref mut vk_create_info)) = debug_utils.as_mut() {
create_info = create_info.push_next(vk_create_info);
let mut debug_utils_create_info = debug_utils
.as_mut()
.map(|create_info| create_info.to_vk_create_info());
if let Some(debug_utils_create_info) = debug_utils_create_info.as_mut() {
create_info = create_info.push_next(debug_utils_create_info);
}
// Enable explicit validation features if available
@ -820,7 +794,7 @@ impl crate::Instance for super::Instance {
.push(vk::ValidationFeatureEnableEXT::GPU_ASSISTED_RESERVE_BINDING_SLOT);
}
validation_features = vk::ValidationFeaturesEXT::builder()
validation_features = vk::ValidationFeaturesEXT::default()
.enabled_validation_features(&validation_feature_list);
create_info = create_info.push_next(&mut validation_features);
}
@ -843,7 +817,7 @@ impl crate::Instance for super::Instance {
vk_instance,
instance_api_version,
android_sdk_version,
debug_utils.map(|(i, _)| i),
debug_utils,
extensions,
desc.flags,
has_nv_optimus,
@ -859,13 +833,15 @@ impl crate::Instance for super::Instance {
) -> Result<super::Surface, crate::InstanceError> {
use raw_window_handle::{RawDisplayHandle as Rdh, RawWindowHandle as Rwh};
// TODO: Replace with ash-window, which also lazy-loads the extension based on handle type
match (window_handle, display_handle) {
(Rwh::Wayland(handle), Rdh::Wayland(display)) => {
self.create_surface_from_wayland(display.display.as_ptr(), handle.surface.as_ptr())
}
(Rwh::Xlib(handle), Rdh::Xlib(display)) => {
let display = display.display.expect("Display pointer is not set.");
self.create_surface_from_xlib(display.as_ptr() as *mut *const c_void, handle.window)
self.create_surface_from_xlib(display.as_ptr(), handle.window)
}
(Rwh::Xcb(handle), Rdh::Xcb(display)) => {
let connection = display.connection.expect("Pointer to X-Server is not set.");
@ -874,22 +850,23 @@ impl crate::Instance for super::Instance {
(Rwh::AndroidNdk(handle), _) => {
self.create_surface_android(handle.a_native_window.as_ptr())
}
#[cfg(windows)]
(Rwh::Win32(handle), _) => {
use winapi::um::libloaderapi::GetModuleHandleW;
let hinstance = unsafe { GetModuleHandleW(std::ptr::null()) };
self.create_surface_from_hwnd(hinstance as *mut _, handle.hwnd.get() as *mut _)
let hinstance = handle.hinstance.ok_or_else(|| {
crate::InstanceError::new(String::from(
"Vulkan requires raw-window-handle's Win32::hinstance to be set",
))
})?;
self.create_surface_from_hwnd(hinstance.get(), handle.hwnd.get())
}
#[cfg(all(target_os = "macos", feature = "metal"))]
(Rwh::AppKit(handle), _)
if self.shared.extensions.contains(&ext::MetalSurface::name()) =>
if self.shared.extensions.contains(&ext::metal_surface::NAME) =>
{
self.create_surface_from_view(handle.ns_view.as_ptr())
}
#[cfg(all(target_os = "ios", feature = "metal"))]
(Rwh::UiKit(handle), _)
if self.shared.extensions.contains(&ext::MetalSurface::name()) =>
if self.shared.extensions.contains(&ext::metal_surface::NAME) =>
{
self.create_surface_from_view(handle.ui_view.as_ptr())
}

View File

@ -33,7 +33,7 @@ mod instance;
use std::{
borrow::Borrow,
ffi::CStr,
ffi::{CStr, CString},
fmt,
num::NonZeroU32,
sync::{
@ -43,10 +43,7 @@ use std::{
};
use arrayvec::ArrayVec;
use ash::{
extensions::{ext, khr},
vk,
};
use ash::{ext, khr, vk};
use parking_lot::{Mutex, RwLock};
const MILLIS_TO_NANOS: u64 = 1_000_000;
@ -83,7 +80,7 @@ impl crate::Api for Api {
}
struct DebugUtils {
extension: ext::DebugUtils,
extension: ext::debug_utils::Instance,
messenger: vk::DebugUtilsMessengerEXT,
/// Owning pointer to the debug messenger callback user data.
@ -106,7 +103,7 @@ pub struct DebugUtilsCreateInfo {
/// DebugUtilsMessenger for their workarounds
struct ValidationLayerProperties {
/// Validation layer description, from `vk::LayerProperties`.
layer_description: std::ffi::CString,
layer_description: CString,
/// Validation layer specification version, from `vk::LayerProperties`.
layer_spec_version: u32,
@ -132,7 +129,7 @@ pub struct InstanceShared {
drop_guard: Option<crate::DropGuard>,
flags: wgt::InstanceFlags,
debug_utils: Option<DebugUtils>,
get_physical_device_properties: Option<khr::GetPhysicalDeviceProperties2>,
get_physical_device_properties: Option<khr::get_physical_device_properties2::Instance>,
entry: ash::Entry,
has_nv_optimus: bool,
android_sdk_version: u32,
@ -152,7 +149,7 @@ pub struct Instance {
struct Swapchain {
raw: vk::SwapchainKHR,
raw_flags: vk::SwapchainCreateFlagsKHR,
functor: khr::Swapchain,
functor: khr::swapchain::Device,
device: Arc<DeviceShared>,
images: Vec<vk::Image>,
config: crate::SurfaceConfiguration,
@ -166,7 +163,7 @@ struct Swapchain {
pub struct Surface {
raw: vk::SurfaceKHR,
functor: khr::Surface,
functor: khr::surface::Instance,
instance: Arc<InstanceShared>,
swapchain: RwLock<Option<Swapchain>>,
}
@ -205,14 +202,15 @@ enum ExtensionFn<T> {
}
struct DeviceExtensionFunctions {
draw_indirect_count: Option<khr::DrawIndirectCount>,
timeline_semaphore: Option<ExtensionFn<khr::TimelineSemaphore>>,
debug_utils: Option<ext::debug_utils::Device>,
draw_indirect_count: Option<khr::draw_indirect_count::Device>,
timeline_semaphore: Option<ExtensionFn<khr::timeline_semaphore::Device>>,
ray_tracing: Option<RayTracingDeviceExtensionFunctions>,
}
struct RayTracingDeviceExtensionFunctions {
acceleration_structure: khr::AccelerationStructure,
buffer_device_address: khr::BufferDeviceAddress,
acceleration_structure: khr::acceleration_structure::Device,
buffer_device_address: khr::buffer_device_address::Device,
}
/// Set of internal capabilities, which don't show up in the exposed
@ -361,7 +359,7 @@ pub struct Device {
pub struct Queue {
raw: vk::Queue,
swapchain_fn: khr::Swapchain,
swapchain_fn: khr::swapchain::Device,
device: Arc<DeviceShared>,
family_index: u32,
/// We use a redundant chain of semaphores to pass on the signal
@ -452,13 +450,10 @@ pub struct BindGroup {
#[derive(Default)]
struct Temp {
marker: Vec<u8>,
buffer_barriers: Vec<vk::BufferMemoryBarrier>,
image_barriers: Vec<vk::ImageMemoryBarrier>,
buffer_barriers: Vec<vk::BufferMemoryBarrier<'static>>,
image_barriers: Vec<vk::ImageMemoryBarrier<'static>>,
}
unsafe impl Send for Temp {}
unsafe impl Sync for Temp {}
impl Temp {
fn clear(&mut self) {
self.marker.clear();
@ -638,7 +633,7 @@ impl Fence {
fn get_latest(
&self,
device: &ash::Device,
extension: Option<&ExtensionFn<khr::TimelineSemaphore>>,
extension: Option<&ExtensionFn<khr::timeline_semaphore::Device>>,
) -> Result<crate::FenceValue, crate::DeviceError> {
match *self {
Self::TimelineSemaphore(raw) => unsafe {
@ -684,9 +679,7 @@ impl Fence {
}
if free.len() != base_free {
active.retain(|&(value, _)| value > latest);
unsafe {
device.reset_fences(&free[base_free..])?;
}
unsafe { device.reset_fences(&free[base_free..]) }?
}
*last_completed = latest;
}
@ -749,7 +742,7 @@ impl crate::Queue for Queue {
None => unsafe {
self.device
.raw
.create_fence(&vk::FenceCreateInfo::builder(), None)?
.create_fence(&vk::FenceCreateInfo::default(), None)?
},
};
active.push((value, fence_raw));
@ -762,7 +755,7 @@ impl crate::Queue for Queue {
.map(|cmd| cmd.raw)
.collect::<Vec<_>>();
let mut vk_info = vk::SubmitInfo::builder().command_buffers(&vk_cmd_buffers);
let mut vk_info = vk::SubmitInfo::default().command_buffers(&vk_cmd_buffers);
vk_info = vk_info
.wait_semaphores(&wait_semaphores)
@ -773,7 +766,7 @@ impl crate::Queue for Queue {
if !signal_values.is_empty() {
vk_timeline_info =
vk::TimelineSemaphoreSubmitInfo::builder().signal_semaphore_values(&signal_values);
vk::TimelineSemaphoreSubmitInfo::default().signal_semaphore_values(&signal_values);
vk_info = vk_info.push_next(&mut vk_timeline_info);
}
@ -781,7 +774,7 @@ impl crate::Queue for Queue {
unsafe {
self.device
.raw
.queue_submit(self.raw, &[vk_info.build()], fence_raw)?
.queue_submit(self.raw, &[vk_info], fence_raw)?
};
Ok(())
}
@ -796,7 +789,7 @@ impl crate::Queue for Queue {
let swapchains = [ssc.raw];
let image_indices = [texture.index];
let mut vk_info = vk::PresentInfoKHR::builder()
let mut vk_info = vk::PresentInfoKHR::default()
.swapchains(&swapchains)
.image_indices(&image_indices);