wgpu_hal/vulkan/
device.rs

1use alloc::{borrow::ToOwned as _, collections::BTreeMap, ffi::CString, sync::Arc, vec::Vec};
2use core::{
3    ffi::CStr,
4    mem::{self, MaybeUninit},
5    num::NonZeroU32,
6    ptr,
7    time::Duration,
8};
9
10use arrayvec::ArrayVec;
11use ash::{ext, vk};
12use hashbrown::hash_map::Entry;
13use parking_lot::Mutex;
14
15use super::{conv, RawTlasInstance};
16use crate::TlasInstance;
17
18impl super::DeviceShared {
19    /// Set the name of `object` to `name`.
20    ///
21    /// If `name` contains an interior null byte, then the name set will be truncated to that byte.
22    ///
23    /// # Safety
24    ///
25    /// This method inherits the safety contract from [`vkSetDebugUtilsObjectName`]. In particular:
26    ///
27    /// - `object` must be a valid handle for one of the following:
28    ///   - An instance-level object from the same instance as this device.
29    ///   - A physical-device-level object that descends from the same physical device as this
30    ///     device.
31    ///   - A device-level object that descends from this device.
32    /// - `object` must be externally synchronized—only the calling thread should access it during
33    ///   this call.
34    ///
35    /// [`vkSetDebugUtilsObjectName`]: https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectNameEXT.html
36    pub(super) unsafe fn set_object_name(&self, object: impl vk::Handle, name: &str) {
37        let Some(extension) = self.extension_fns.debug_utils.as_ref() else {
38            return;
39        };
40
41        // Keep variables outside the if-else block to ensure they do not
42        // go out of scope while we hold a pointer to them
43        let mut buffer: [u8; 64] = [0u8; 64];
44        let buffer_vec: Vec<u8>;
45
46        // Append a null terminator to the string
47        let name_bytes = if name.len() < buffer.len() {
48            // Common case, string is very small. Allocate a copy on the stack.
49            buffer[..name.len()].copy_from_slice(name.as_bytes());
50            // Add null terminator
51            buffer[name.len()] = 0;
52            &buffer[..name.len() + 1]
53        } else {
54            // Less common case, the string is large.
55            // This requires a heap allocation.
56            buffer_vec = name
57                .as_bytes()
58                .iter()
59                .cloned()
60                .chain(core::iter::once(0))
61                .collect();
62            &buffer_vec
63        };
64
65        let name = CStr::from_bytes_until_nul(name_bytes).expect("We have added a null byte");
66
67        let _result = unsafe {
68            extension.set_debug_utils_object_name(
69                &vk::DebugUtilsObjectNameInfoEXT::default()
70                    .object_handle(object)
71                    .object_name(name),
72            )
73        };
74    }
75
76    pub fn make_render_pass(
77        &self,
78        key: super::RenderPassKey,
79    ) -> Result<vk::RenderPass, crate::DeviceError> {
80        Ok(match self.render_passes.lock().entry(key) {
81            Entry::Occupied(e) => *e.get(),
82            Entry::Vacant(e) => {
83                let super::RenderPassKey {
84                    ref colors,
85                    ref depth_stencil,
86                    sample_count,
87                    multiview_mask,
88                } = *e.key();
89
90                let mut vk_attachments = Vec::new();
91                let mut color_refs = Vec::with_capacity(colors.len());
92                let mut resolve_refs = Vec::with_capacity(color_refs.capacity());
93                let mut ds_ref = None;
94                let samples = vk::SampleCountFlags::from_raw(sample_count);
95                let unused = vk::AttachmentReference {
96                    attachment: vk::ATTACHMENT_UNUSED,
97                    layout: vk::ImageLayout::UNDEFINED,
98                };
99                for cat in colors.iter() {
100                    let (color_ref, resolve_ref) =
101                        if let Some(super::ColorAttachmentKey { base, resolve }) = cat {
102                            let super::AttachmentKey {
103                                format,
104                                layout,
105                                ops,
106                            } = *base;
107
108                            let color_ref = vk::AttachmentReference {
109                                attachment: vk_attachments.len() as u32,
110                                layout,
111                            };
112                            vk_attachments.push({
113                                let (load_op, store_op) = conv::map_attachment_ops(ops);
114                                vk::AttachmentDescription::default()
115                                    .format(format)
116                                    .samples(samples)
117                                    .load_op(load_op)
118                                    .store_op(store_op)
119                                    .initial_layout(layout)
120                                    .final_layout(layout)
121                            });
122                            let resolve_ref = if let Some(rat) = resolve {
123                                let super::AttachmentKey {
124                                    format,
125                                    layout,
126                                    ops,
127                                } = *rat;
128
129                                let (load_op, store_op) = conv::map_attachment_ops(ops);
130                                let vk_attachment = vk::AttachmentDescription::default()
131                                    .format(format)
132                                    .samples(vk::SampleCountFlags::TYPE_1)
133                                    .load_op(load_op)
134                                    .store_op(store_op)
135                                    .initial_layout(layout)
136                                    .final_layout(layout);
137                                vk_attachments.push(vk_attachment);
138
139                                vk::AttachmentReference {
140                                    attachment: vk_attachments.len() as u32 - 1,
141                                    layout,
142                                }
143                            } else {
144                                unused
145                            };
146
147                            (color_ref, resolve_ref)
148                        } else {
149                            (unused, unused)
150                        };
151
152                    color_refs.push(color_ref);
153                    resolve_refs.push(resolve_ref);
154                }
155
156                if let Some(ds) = depth_stencil {
157                    let super::DepthStencilAttachmentKey {
158                        ref base,
159                        stencil_ops,
160                    } = *ds;
161
162                    let super::AttachmentKey {
163                        format,
164                        layout,
165                        ops,
166                    } = *base;
167
168                    ds_ref = Some(vk::AttachmentReference {
169                        attachment: vk_attachments.len() as u32,
170                        layout,
171                    });
172                    let (load_op, store_op) = conv::map_attachment_ops(ops);
173                    let (stencil_load_op, stencil_store_op) = conv::map_attachment_ops(stencil_ops);
174                    let vk_attachment = vk::AttachmentDescription::default()
175                        .format(format)
176                        .samples(samples)
177                        .load_op(load_op)
178                        .store_op(store_op)
179                        .stencil_load_op(stencil_load_op)
180                        .stencil_store_op(stencil_store_op)
181                        .initial_layout(layout)
182                        .final_layout(layout);
183                    vk_attachments.push(vk_attachment);
184                }
185
186                let vk_subpasses = [{
187                    let mut vk_subpass = vk::SubpassDescription::default()
188                        .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS)
189                        .color_attachments(&color_refs)
190                        .resolve_attachments(&resolve_refs);
191
192                    if self
193                        .workarounds
194                        .contains(super::Workarounds::EMPTY_RESOLVE_ATTACHMENT_LISTS)
195                        && resolve_refs.is_empty()
196                    {
197                        vk_subpass.p_resolve_attachments = ptr::null();
198                    }
199
200                    if let Some(ref reference) = ds_ref {
201                        vk_subpass = vk_subpass.depth_stencil_attachment(reference)
202                    }
203                    vk_subpass
204                }];
205
206                let mut vk_info = vk::RenderPassCreateInfo::default()
207                    .attachments(&vk_attachments)
208                    .subpasses(&vk_subpasses);
209
210                let mut multiview_info;
211                let mask;
212                if let Some(multiview_mask) = multiview_mask {
213                    mask = [multiview_mask.get()];
214
215                    // On Vulkan 1.1 or later, this is an alias for core functionality
216                    multiview_info = vk::RenderPassMultiviewCreateInfoKHR::default()
217                        .view_masks(&mask)
218                        .correlation_masks(&mask);
219                    vk_info = vk_info.push_next(&mut multiview_info);
220                }
221
222                let raw = unsafe {
223                    self.raw
224                        .create_render_pass(&vk_info, None)
225                        .map_err(super::map_host_device_oom_err)?
226                };
227
228                *e.insert(raw)
229            }
230        })
231    }
232
233    fn make_memory_ranges<'a, I: 'a + Iterator<Item = crate::MemoryRange>>(
234        &self,
235        buffer: &'a super::Buffer,
236        ranges: I,
237    ) -> Option<impl 'a + Iterator<Item = vk::MappedMemoryRange<'a>>> {
238        let allocation = buffer.allocation.as_ref()?.lock();
239        let mask = self.private_caps.non_coherent_map_mask;
240        Some(ranges.map(move |range| {
241            vk::MappedMemoryRange::default()
242                .memory(allocation.memory())
243                .offset((allocation.offset() + range.start) & !mask)
244                .size((range.end - range.start + mask) & !mask)
245        }))
246    }
247}
248
249impl
250    gpu_descriptor::DescriptorDevice<vk::DescriptorSetLayout, vk::DescriptorPool, vk::DescriptorSet>
251    for super::DeviceShared
252{
253    unsafe fn create_descriptor_pool(
254        &self,
255        descriptor_count: &gpu_descriptor::DescriptorTotalCount,
256        max_sets: u32,
257        flags: gpu_descriptor::DescriptorPoolCreateFlags,
258    ) -> Result<vk::DescriptorPool, gpu_descriptor::CreatePoolError> {
259        //Note: ignoring other types, since they can't appear here
260        let unfiltered_counts = [
261            (vk::DescriptorType::SAMPLER, descriptor_count.sampler),
262            (
263                vk::DescriptorType::SAMPLED_IMAGE,
264                descriptor_count.sampled_image,
265            ),
266            (
267                vk::DescriptorType::STORAGE_IMAGE,
268                descriptor_count.storage_image,
269            ),
270            (
271                vk::DescriptorType::UNIFORM_BUFFER,
272                descriptor_count.uniform_buffer,
273            ),
274            (
275                vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC,
276                descriptor_count.uniform_buffer_dynamic,
277            ),
278            (
279                vk::DescriptorType::STORAGE_BUFFER,
280                descriptor_count.storage_buffer,
281            ),
282            (
283                vk::DescriptorType::STORAGE_BUFFER_DYNAMIC,
284                descriptor_count.storage_buffer_dynamic,
285            ),
286            (
287                vk::DescriptorType::ACCELERATION_STRUCTURE_KHR,
288                descriptor_count.acceleration_structure,
289            ),
290        ];
291
292        let filtered_counts = unfiltered_counts
293            .iter()
294            .cloned()
295            .filter(|&(_, count)| count != 0)
296            .map(|(ty, count)| vk::DescriptorPoolSize {
297                ty,
298                descriptor_count: count,
299            })
300            .collect::<ArrayVec<_, 8>>();
301
302        let mut vk_flags =
303            if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND) {
304                vk::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND
305            } else {
306                vk::DescriptorPoolCreateFlags::empty()
307            };
308        if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET) {
309            vk_flags |= vk::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET;
310        }
311        let vk_info = vk::DescriptorPoolCreateInfo::default()
312            .max_sets(max_sets)
313            .flags(vk_flags)
314            .pool_sizes(&filtered_counts);
315
316        match unsafe { self.raw.create_descriptor_pool(&vk_info, None) } {
317            Ok(pool) => Ok(pool),
318            Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
319                Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
320            }
321            Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
322                Err(gpu_descriptor::CreatePoolError::OutOfDeviceMemory)
323            }
324            Err(vk::Result::ERROR_FRAGMENTATION) => {
325                Err(gpu_descriptor::CreatePoolError::Fragmentation)
326            }
327            Err(err) => handle_unexpected(err),
328        }
329    }
330
331    unsafe fn destroy_descriptor_pool(&self, pool: vk::DescriptorPool) {
332        unsafe { self.raw.destroy_descriptor_pool(pool, None) }
333    }
334
335    unsafe fn alloc_descriptor_sets<'a>(
336        &self,
337        pool: &mut vk::DescriptorPool,
338        layouts: impl ExactSizeIterator<Item = &'a vk::DescriptorSetLayout>,
339        sets: &mut impl Extend<vk::DescriptorSet>,
340    ) -> Result<(), gpu_descriptor::DeviceAllocationError> {
341        let result = unsafe {
342            self.raw.allocate_descriptor_sets(
343                &vk::DescriptorSetAllocateInfo::default()
344                    .descriptor_pool(*pool)
345                    .set_layouts(
346                        &smallvec::SmallVec::<[vk::DescriptorSetLayout; 32]>::from_iter(
347                            layouts.cloned(),
348                        ),
349                    ),
350            )
351        };
352
353        match result {
354            Ok(vk_sets) => {
355                sets.extend(vk_sets);
356                Ok(())
357            }
358            Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY)
359            | Err(vk::Result::ERROR_OUT_OF_POOL_MEMORY) => {
360                Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
361            }
362            Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
363                Err(gpu_descriptor::DeviceAllocationError::OutOfDeviceMemory)
364            }
365            Err(vk::Result::ERROR_FRAGMENTED_POOL) => {
366                Err(gpu_descriptor::DeviceAllocationError::FragmentedPool)
367            }
368            Err(err) => handle_unexpected(err),
369        }
370    }
371
372    unsafe fn dealloc_descriptor_sets<'a>(
373        &self,
374        pool: &mut vk::DescriptorPool,
375        sets: impl Iterator<Item = vk::DescriptorSet>,
376    ) {
377        let result = unsafe {
378            self.raw.free_descriptor_sets(
379                *pool,
380                &smallvec::SmallVec::<[vk::DescriptorSet; 32]>::from_iter(sets),
381            )
382        };
383        match result {
384            Ok(()) => {}
385            Err(err) => handle_unexpected(err),
386        }
387    }
388}
389
390struct CompiledStage {
391    create_info: vk::PipelineShaderStageCreateInfo<'static>,
392    _entry_point: CString,
393    temp_raw_module: Option<vk::ShaderModule>,
394}
395
396impl super::Device {
397    /// # Safety
398    ///
399    /// - `vk_image` must be created respecting `desc`
400    /// - If `drop_callback` is [`None`], wgpu-hal will take ownership of `vk_image`. If
401    ///   `drop_callback` is [`Some`], `vk_image` must be valid until the callback is called.
402    /// - If the `ImageCreateFlags` does not contain `MUTABLE_FORMAT`, the `view_formats` of `desc` must be empty.
403    /// - If `memory` is not [`super::TextureMemory::External`], wgpu-hal will take ownership of the
404    ///   memory (which is presumed to back `vk_image`). Otherwise, the memory must remain valid until
405    ///   `drop_callback` is called.
406    pub unsafe fn texture_from_raw(
407        &self,
408        vk_image: vk::Image,
409        desc: &crate::TextureDescriptor,
410        drop_callback: Option<crate::DropCallback>,
411        memory: super::TextureMemory,
412    ) -> super::Texture {
413        let identity = self.shared.texture_identity_factory.next();
414        let drop_guard = crate::DropGuard::from_option(drop_callback);
415
416        if let Some(label) = desc.label {
417            unsafe { self.shared.set_object_name(vk_image, label) };
418        }
419
420        super::Texture {
421            raw: vk_image,
422            drop_guard,
423            memory,
424            format: desc.format,
425            copy_size: desc.copy_extent(),
426            identity,
427        }
428    }
429
430    fn find_memory_type_index(
431        &self,
432        type_bits_req: u32,
433        flags_req: vk::MemoryPropertyFlags,
434    ) -> Option<usize> {
435        let mem_properties = unsafe {
436            self.shared
437                .instance
438                .raw
439                .get_physical_device_memory_properties(self.shared.physical_device)
440        };
441
442        // https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMemoryProperties.html
443        for (i, mem_ty) in mem_properties.memory_types_as_slice().iter().enumerate() {
444            let types_bits = 1 << i;
445            let is_required_memory_type = type_bits_req & types_bits != 0;
446            let has_required_properties = mem_ty.property_flags & flags_req == flags_req;
447            if is_required_memory_type && has_required_properties {
448                return Some(i);
449            }
450        }
451
452        None
453    }
454
455    fn create_image_without_memory(
456        &self,
457        desc: &crate::TextureDescriptor,
458        external_memory_image_create_info: Option<&mut vk::ExternalMemoryImageCreateInfo>,
459    ) -> Result<ImageWithoutMemory, crate::DeviceError> {
460        let copy_size = desc.copy_extent();
461
462        let mut raw_flags = vk::ImageCreateFlags::empty();
463        if desc.dimension == wgt::TextureDimension::D3
464            && desc.usage.contains(wgt::TextureUses::COLOR_TARGET)
465        {
466            raw_flags |= vk::ImageCreateFlags::TYPE_2D_ARRAY_COMPATIBLE;
467        }
468        if desc.is_cube_compatible() {
469            raw_flags |= vk::ImageCreateFlags::CUBE_COMPATIBLE;
470        }
471
472        let original_format = self.shared.private_caps.map_texture_format(desc.format);
473        let mut vk_view_formats = vec![];
474        if !desc.view_formats.is_empty() {
475            raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
476
477            if self.shared.private_caps.image_format_list {
478                vk_view_formats = desc
479                    .view_formats
480                    .iter()
481                    .map(|f| self.shared.private_caps.map_texture_format(*f))
482                    .collect();
483                vk_view_formats.push(original_format)
484            }
485        }
486        if desc.format.is_multi_planar_format() {
487            raw_flags |=
488                vk::ImageCreateFlags::MUTABLE_FORMAT | vk::ImageCreateFlags::EXTENDED_USAGE;
489        }
490
491        let mut vk_info = vk::ImageCreateInfo::default()
492            .flags(raw_flags)
493            .image_type(conv::map_texture_dimension(desc.dimension))
494            .format(original_format)
495            .extent(conv::map_copy_extent(&copy_size))
496            .mip_levels(desc.mip_level_count)
497            .array_layers(desc.array_layer_count())
498            .samples(vk::SampleCountFlags::from_raw(desc.sample_count))
499            .tiling(vk::ImageTiling::OPTIMAL)
500            .usage(conv::map_texture_usage(desc.usage))
501            .sharing_mode(vk::SharingMode::EXCLUSIVE)
502            .initial_layout(vk::ImageLayout::UNDEFINED);
503
504        let mut format_list_info = vk::ImageFormatListCreateInfo::default();
505        if !vk_view_formats.is_empty() {
506            format_list_info = format_list_info.view_formats(&vk_view_formats);
507            vk_info = vk_info.push_next(&mut format_list_info);
508        }
509
510        if let Some(ext_info) = external_memory_image_create_info {
511            vk_info = vk_info.push_next(ext_info);
512        }
513
514        let raw = unsafe { self.shared.raw.create_image(&vk_info, None) }.map_err(map_err)?;
515        fn map_err(err: vk::Result) -> crate::DeviceError {
516            // We don't use VK_EXT_image_compression_control
517            // VK_ERROR_COMPRESSION_EXHAUSTED_EXT
518            super::map_host_device_oom_and_ioca_err(err)
519        }
520        let mut req = unsafe { self.shared.raw.get_image_memory_requirements(raw) };
521
522        if desc.usage.contains(wgt::TextureUses::TRANSIENT) {
523            let mem_type_index = self.find_memory_type_index(
524                req.memory_type_bits,
525                vk::MemoryPropertyFlags::LAZILY_ALLOCATED,
526            );
527            if let Some(mem_type_index) = mem_type_index {
528                req.memory_type_bits = 1 << mem_type_index;
529            }
530        }
531
532        Ok(ImageWithoutMemory {
533            raw,
534            requirements: req,
535        })
536    }
537
538    /// # Safety
539    ///
540    /// - Vulkan (with VK_KHR_external_memory_win32)
541    /// - The `d3d11_shared_handle` must be valid and respecting `desc`
542    /// - `VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT` flag is used because we need to hold a reference to the handle
543    #[cfg(windows)]
544    pub unsafe fn texture_from_d3d11_shared_handle(
545        &self,
546        d3d11_shared_handle: windows::Win32::Foundation::HANDLE,
547        desc: &crate::TextureDescriptor,
548    ) -> Result<super::Texture, crate::DeviceError> {
549        if !self
550            .shared
551            .features
552            .contains(wgt::Features::VULKAN_EXTERNAL_MEMORY_WIN32)
553        {
554            log::error!("Vulkan driver does not support VK_KHR_external_memory_win32");
555            return Err(crate::DeviceError::Unexpected);
556        }
557
558        let mut external_memory_image_info = vk::ExternalMemoryImageCreateInfo::default()
559            .handle_types(vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE);
560
561        let image =
562            self.create_image_without_memory(desc, Some(&mut external_memory_image_info))?;
563
564        // Some external memory types require dedicated allocation
565        // https://docs.vulkan.org/guide/latest/extensions/external.html#_importing_memory
566        let mut dedicated_allocate_info =
567            vk::MemoryDedicatedAllocateInfo::default().image(image.raw);
568
569        let mut import_memory_info = vk::ImportMemoryWin32HandleInfoKHR::default()
570            .handle_type(vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE)
571            .handle(d3d11_shared_handle.0 as _);
572        // TODO: We should use `push_next` instead, but currently ash does not provide this method for the `ImportMemoryWin32HandleInfoKHR` type.
573        #[allow(clippy::unnecessary_mut_passed)]
574        {
575            import_memory_info.p_next = <*const _>::cast(&mut dedicated_allocate_info);
576        }
577
578        let mem_type_index = self
579            .find_memory_type_index(
580                image.requirements.memory_type_bits,
581                vk::MemoryPropertyFlags::DEVICE_LOCAL,
582            )
583            .ok_or(crate::DeviceError::Unexpected)?;
584
585        let memory_allocate_info = vk::MemoryAllocateInfo::default()
586            .allocation_size(image.requirements.size)
587            .memory_type_index(mem_type_index as _)
588            .push_next(&mut import_memory_info);
589        let memory = unsafe { self.shared.raw.allocate_memory(&memory_allocate_info, None) }
590            .map_err(super::map_host_device_oom_err)?;
591
592        unsafe { self.shared.raw.bind_image_memory(image.raw, memory, 0) }
593            .map_err(super::map_host_device_oom_err)?;
594
595        Ok(unsafe {
596            self.texture_from_raw(
597                image.raw,
598                desc,
599                None,
600                super::TextureMemory::Dedicated(memory),
601            )
602        })
603    }
604
605    fn create_shader_module_impl(
606        &self,
607        spv: &[u32],
608        label: &crate::Label<'_>,
609    ) -> Result<vk::ShaderModule, crate::DeviceError> {
610        let vk_info = vk::ShaderModuleCreateInfo::default()
611            .flags(vk::ShaderModuleCreateFlags::empty())
612            .code(spv);
613
614        let raw = unsafe {
615            profiling::scope!("vkCreateShaderModule");
616            self.shared
617                .raw
618                .create_shader_module(&vk_info, None)
619                .map_err(map_err)?
620        };
621        fn map_err(err: vk::Result) -> crate::DeviceError {
622            // We don't use VK_NV_glsl_shader
623            // VK_ERROR_INVALID_SHADER_NV
624            super::map_host_device_oom_err(err)
625        }
626
627        if let Some(label) = label {
628            unsafe { self.shared.set_object_name(raw, label) };
629        }
630
631        Ok(raw)
632    }
633
634    fn compile_stage(
635        &self,
636        stage: &crate::ProgrammableStage<super::ShaderModule>,
637        naga_stage: naga::ShaderStage,
638        binding_map: &naga::back::spv::BindingMap,
639    ) -> Result<CompiledStage, crate::PipelineError> {
640        let stage_flags = crate::auxil::map_naga_stage(naga_stage);
641        let vk_module = match *stage.module {
642            super::ShaderModule::Raw(raw) => raw,
643            super::ShaderModule::Intermediate {
644                ref naga_shader,
645                runtime_checks,
646            } => {
647                let pipeline_options = naga::back::spv::PipelineOptions {
648                    entry_point: stage.entry_point.to_owned(),
649                    shader_stage: naga_stage,
650                };
651                let needs_temp_options = !runtime_checks.bounds_checks
652                    || !runtime_checks.force_loop_bounding
653                    || !runtime_checks.ray_query_initialization_tracking
654                    || !binding_map.is_empty()
655                    || naga_shader.debug_source.is_some()
656                    || !stage.zero_initialize_workgroup_memory
657                    || !runtime_checks.task_shader_dispatch_tracking
658                    || !runtime_checks.mesh_shader_primitive_indices_clamp;
659
660                let mut temp_options;
661                let options = if needs_temp_options {
662                    temp_options = self.naga_options.clone();
663                    if !runtime_checks.bounds_checks {
664                        temp_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
665                            index: naga::proc::BoundsCheckPolicy::Unchecked,
666                            buffer: naga::proc::BoundsCheckPolicy::Unchecked,
667                            image_load: naga::proc::BoundsCheckPolicy::Unchecked,
668                            binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
669                        };
670                    }
671                    if !runtime_checks.force_loop_bounding {
672                        temp_options.force_loop_bounding = false;
673                    }
674                    if !runtime_checks.ray_query_initialization_tracking {
675                        temp_options.ray_query_initialization_tracking = false;
676                    }
677                    if !binding_map.is_empty() {
678                        temp_options.binding_map = binding_map.clone();
679                    }
680
681                    if let Some(ref debug) = naga_shader.debug_source {
682                        temp_options.debug_info = Some(naga::back::spv::DebugInfo {
683                            source_code: &debug.source_code,
684                            file_name: debug.file_name.as_ref(),
685                            language: naga::back::spv::SourceLanguage::WGSL,
686                        })
687                    }
688                    if !stage.zero_initialize_workgroup_memory {
689                        temp_options.zero_initialize_workgroup_memory =
690                            naga::back::spv::ZeroInitializeWorkgroupMemoryMode::None;
691                    }
692                    if !runtime_checks.task_shader_dispatch_tracking {
693                        temp_options.task_dispatch_limits = None;
694                    }
695                    temp_options.mesh_shader_primitive_indices_clamp =
696                        runtime_checks.mesh_shader_primitive_indices_clamp;
697
698                    &temp_options
699                } else {
700                    &self.naga_options
701                };
702
703                let (module, info) = naga::back::pipeline_constants::process_overrides(
704                    &naga_shader.module,
705                    &naga_shader.info,
706                    Some((naga_stage, stage.entry_point)),
707                    stage.constants,
708                )
709                .map_err(|e| {
710                    crate::PipelineError::PipelineConstants(stage_flags, format!("{e}"))
711                })?;
712
713                let spv = {
714                    profiling::scope!("naga::spv::write_vec");
715                    naga::back::spv::write_vec(&module, &info, options, Some(&pipeline_options))
716                }
717                .map_err(|e| crate::PipelineError::Linkage(stage_flags, format!("{e}")))?;
718                self.create_shader_module_impl(&spv, &None)?
719            }
720        };
721
722        let mut flags = vk::PipelineShaderStageCreateFlags::empty();
723        if self.shared.features.contains(wgt::Features::SUBGROUP) {
724            flags |= vk::PipelineShaderStageCreateFlags::ALLOW_VARYING_SUBGROUP_SIZE
725        }
726
727        let entry_point = CString::new(stage.entry_point).unwrap();
728        let mut create_info = vk::PipelineShaderStageCreateInfo::default()
729            .flags(flags)
730            .stage(conv::map_shader_stage(stage_flags))
731            .module(vk_module);
732
733        // Circumvent struct lifetime check because of a self-reference inside CompiledStage
734        create_info.p_name = entry_point.as_ptr();
735
736        Ok(CompiledStage {
737            create_info,
738            _entry_point: entry_point,
739            temp_raw_module: match *stage.module {
740                super::ShaderModule::Raw(_) => None,
741                super::ShaderModule::Intermediate { .. } => Some(vk_module),
742            },
743        })
744    }
745
746    /// Returns the queue family index of the device's internal queue.
747    ///
748    /// This is useful for constructing memory barriers needed for queue family ownership transfer when
749    /// external memory is involved (from/to `VK_QUEUE_FAMILY_EXTERNAL_KHR` and `VK_QUEUE_FAMILY_FOREIGN_EXT`
750    /// for example).
751    pub fn queue_family_index(&self) -> u32 {
752        self.shared.family_index
753    }
754
755    pub fn queue_index(&self) -> u32 {
756        self.shared.queue_index
757    }
758
759    pub fn raw_device(&self) -> &ash::Device {
760        &self.shared.raw
761    }
762
763    pub fn raw_physical_device(&self) -> vk::PhysicalDevice {
764        self.shared.physical_device
765    }
766
767    pub fn raw_queue(&self) -> vk::Queue {
768        self.shared.raw_queue
769    }
770
771    pub fn enabled_device_extensions(&self) -> &[&'static CStr] {
772        &self.shared.enabled_extensions
773    }
774
775    pub fn shared_instance(&self) -> &super::InstanceShared {
776        &self.shared.instance
777    }
778
779    fn error_if_would_oom_on_resource_allocation(
780        &self,
781        needs_host_access: bool,
782        size: u64,
783    ) -> Result<(), crate::DeviceError> {
784        let Some(threshold) = self
785            .shared
786            .instance
787            .memory_budget_thresholds
788            .for_resource_creation
789        else {
790            return Ok(());
791        };
792
793        if !self
794            .shared
795            .enabled_extensions
796            .contains(&ext::memory_budget::NAME)
797        {
798            return Ok(());
799        }
800
801        let get_physical_device_properties = self
802            .shared
803            .instance
804            .get_physical_device_properties
805            .as_ref()
806            .unwrap();
807
808        let mut memory_budget_properties = vk::PhysicalDeviceMemoryBudgetPropertiesEXT::default();
809
810        let mut memory_properties =
811            vk::PhysicalDeviceMemoryProperties2::default().push_next(&mut memory_budget_properties);
812
813        unsafe {
814            get_physical_device_properties.get_physical_device_memory_properties2(
815                self.shared.physical_device,
816                &mut memory_properties,
817            );
818        }
819
820        let mut host_visible_heaps = [false; vk::MAX_MEMORY_HEAPS];
821        let mut device_local_heaps = [false; vk::MAX_MEMORY_HEAPS];
822
823        let memory_properties = memory_properties.memory_properties;
824
825        for i in 0..memory_properties.memory_type_count {
826            let memory_type = memory_properties.memory_types[i as usize];
827            let flags = memory_type.property_flags;
828
829            if flags.intersects(
830                vk::MemoryPropertyFlags::LAZILY_ALLOCATED | vk::MemoryPropertyFlags::PROTECTED,
831            ) {
832                continue; // not used by gpu-alloc
833            }
834
835            if flags.contains(vk::MemoryPropertyFlags::HOST_VISIBLE) {
836                host_visible_heaps[memory_type.heap_index as usize] = true;
837            }
838
839            if flags.contains(vk::MemoryPropertyFlags::DEVICE_LOCAL) {
840                device_local_heaps[memory_type.heap_index as usize] = true;
841            }
842        }
843
844        let heaps = if needs_host_access {
845            host_visible_heaps
846        } else {
847            device_local_heaps
848        };
849
850        // NOTE: We might end up checking multiple heaps since gpu-alloc doesn't have a way
851        // for us to query the heap the resource will end up on. But this is unlikely,
852        // there is usually only one heap on integrated GPUs and two on dedicated GPUs.
853
854        for (i, check) in heaps.iter().enumerate() {
855            if !check {
856                continue;
857            }
858
859            let heap_usage = memory_budget_properties.heap_usage[i];
860            let heap_budget = memory_budget_properties.heap_budget[i];
861
862            if heap_usage + size >= heap_budget / 100 * threshold as u64 {
863                return Err(crate::DeviceError::OutOfMemory);
864            }
865        }
866
867        Ok(())
868    }
869}
870
871impl crate::Device for super::Device {
872    type A = super::Api;
873
874    unsafe fn create_buffer(
875        &self,
876        desc: &crate::BufferDescriptor,
877    ) -> Result<super::Buffer, crate::DeviceError> {
878        let vk_info = vk::BufferCreateInfo::default()
879            .size(desc.size)
880            .usage(conv::map_buffer_usage(desc.usage))
881            .sharing_mode(vk::SharingMode::EXCLUSIVE);
882
883        let raw = unsafe {
884            self.shared
885                .raw
886                .create_buffer(&vk_info, None)
887                .map_err(super::map_host_device_oom_and_ioca_err)?
888        };
889
890        let mut requirements = unsafe { self.shared.raw.get_buffer_memory_requirements(raw) };
891
892        let is_cpu_read = desc.usage.contains(wgt::BufferUses::MAP_READ);
893        let is_cpu_write = desc.usage.contains(wgt::BufferUses::MAP_WRITE);
894
895        let location = match (is_cpu_read, is_cpu_write) {
896            (true, true) => gpu_allocator::MemoryLocation::CpuToGpu,
897            (true, false) => gpu_allocator::MemoryLocation::GpuToCpu,
898            (false, true) => gpu_allocator::MemoryLocation::CpuToGpu,
899            (false, false) => gpu_allocator::MemoryLocation::GpuOnly,
900        };
901
902        let needs_host_access = is_cpu_read || is_cpu_write;
903
904        self.error_if_would_oom_on_resource_allocation(needs_host_access, requirements.size)
905            .inspect_err(|_| {
906                unsafe { self.shared.raw.destroy_buffer(raw, None) };
907            })?;
908
909        let name = desc.label.unwrap_or("Unlabeled buffer");
910
911        if desc
912            .usage
913            .contains(wgt::BufferUses::ACCELERATION_STRUCTURE_SCRATCH)
914        {
915            // There is no way to specify this usage to Vulkan so we must make sure the alignment requirement is large enough.
916            requirements.alignment = requirements
917                .alignment
918                .max(self.shared.private_caps.scratch_buffer_alignment as u64);
919        }
920
921        let allocation = self
922            .mem_allocator
923            .lock()
924            .allocate(&gpu_allocator::vulkan::AllocationCreateDesc {
925                name,
926                requirements: vk::MemoryRequirements {
927                    memory_type_bits: requirements.memory_type_bits & self.valid_ash_memory_types,
928                    ..requirements
929                },
930                location,
931                linear: true, // Buffers are always linear
932                allocation_scheme: gpu_allocator::vulkan::AllocationScheme::GpuAllocatorManaged,
933            })
934            .inspect_err(|_| {
935                unsafe { self.shared.raw.destroy_buffer(raw, None) };
936            })?;
937
938        unsafe {
939            self.shared
940                .raw
941                .bind_buffer_memory(raw, allocation.memory(), allocation.offset())
942        }
943        .map_err(super::map_host_device_oom_and_ioca_err)
944        .inspect_err(|_| {
945            unsafe { self.shared.raw.destroy_buffer(raw, None) };
946        })?;
947
948        if let Some(label) = desc.label {
949            unsafe { self.shared.set_object_name(raw, label) };
950        }
951
952        self.counters.buffer_memory.add(allocation.size() as isize);
953        self.counters.buffers.add(1);
954
955        Ok(super::Buffer {
956            raw,
957            allocation: Some(Mutex::new(super::BufferMemoryBacking::Managed(allocation))),
958        })
959    }
960    unsafe fn destroy_buffer(&self, buffer: super::Buffer) {
961        unsafe { self.shared.raw.destroy_buffer(buffer.raw, None) };
962        if let Some(allocation) = buffer.allocation {
963            let allocation = allocation.into_inner();
964            self.counters.buffer_memory.sub(allocation.size() as isize);
965            match allocation {
966                super::BufferMemoryBacking::Managed(allocation) => {
967                    let result = self.mem_allocator.lock().free(allocation);
968                    if let Err(err) = result {
969                        log::warn!("Failed to free buffer allocation: {err}");
970                    }
971                }
972                super::BufferMemoryBacking::VulkanMemory { memory, .. } => unsafe {
973                    self.shared.raw.free_memory(memory, None);
974                },
975            }
976        }
977
978        self.counters.buffers.sub(1);
979    }
980
981    unsafe fn add_raw_buffer(&self, _buffer: &super::Buffer) {
982        self.counters.buffers.add(1);
983    }
984
985    unsafe fn map_buffer(
986        &self,
987        buffer: &super::Buffer,
988        range: crate::MemoryRange,
989    ) -> Result<crate::BufferMapping, crate::DeviceError> {
990        if let Some(ref allocation) = buffer.allocation {
991            let mut allocation = allocation.lock();
992            if let super::BufferMemoryBacking::Managed(ref mut allocation) = *allocation {
993                let is_coherent = allocation
994                    .memory_properties()
995                    .contains(vk::MemoryPropertyFlags::HOST_COHERENT);
996                Ok(crate::BufferMapping {
997                    ptr: unsafe {
998                        allocation
999                            .mapped_ptr()
1000                            .unwrap()
1001                            .cast()
1002                            .offset(range.start as isize)
1003                    },
1004                    is_coherent,
1005                })
1006            } else {
1007                crate::hal_usage_error("tried to map externally created buffer")
1008            }
1009        } else {
1010            crate::hal_usage_error("tried to map external buffer")
1011        }
1012    }
1013
1014    unsafe fn unmap_buffer(&self, buffer: &super::Buffer) {
1015        if buffer.allocation.is_some() {
1016            // gpu-allocator maps the buffer when allocated and unmap it when free'd
1017        } else {
1018            crate::hal_usage_error("tried to unmap external buffer")
1019        }
1020    }
1021
1022    unsafe fn flush_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
1023    where
1024        I: Iterator<Item = crate::MemoryRange>,
1025    {
1026        if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
1027            unsafe {
1028                self.shared
1029                    .raw
1030                    .flush_mapped_memory_ranges(
1031                        &smallvec::SmallVec::<[vk::MappedMemoryRange; 32]>::from_iter(vk_ranges),
1032                    )
1033            }
1034            .unwrap();
1035        }
1036    }
1037    unsafe fn invalidate_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
1038    where
1039        I: Iterator<Item = crate::MemoryRange>,
1040    {
1041        if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
1042            unsafe {
1043                self.shared
1044                    .raw
1045                    .invalidate_mapped_memory_ranges(&smallvec::SmallVec::<
1046                        [vk::MappedMemoryRange; 32],
1047                    >::from_iter(vk_ranges))
1048            }
1049            .unwrap();
1050        }
1051    }
1052
1053    unsafe fn create_texture(
1054        &self,
1055        desc: &crate::TextureDescriptor,
1056    ) -> Result<super::Texture, crate::DeviceError> {
1057        let image = self.create_image_without_memory(desc, None)?;
1058
1059        self.error_if_would_oom_on_resource_allocation(false, image.requirements.size)
1060            .inspect_err(|_| {
1061                unsafe { self.shared.raw.destroy_image(image.raw, None) };
1062            })?;
1063
1064        let name = desc.label.unwrap_or("Unlabeled texture");
1065
1066        let allocation = self
1067            .mem_allocator
1068            .lock()
1069            .allocate(&gpu_allocator::vulkan::AllocationCreateDesc {
1070                name,
1071                requirements: vk::MemoryRequirements {
1072                    memory_type_bits: image.requirements.memory_type_bits
1073                        & self.valid_ash_memory_types,
1074                    ..image.requirements
1075                },
1076                location: gpu_allocator::MemoryLocation::GpuOnly,
1077                linear: false,
1078                allocation_scheme: gpu_allocator::vulkan::AllocationScheme::GpuAllocatorManaged,
1079            })
1080            .inspect_err(|_| {
1081                unsafe { self.shared.raw.destroy_image(image.raw, None) };
1082            })?;
1083
1084        self.counters.texture_memory.add(allocation.size() as isize);
1085
1086        unsafe {
1087            self.shared
1088                .raw
1089                .bind_image_memory(image.raw, allocation.memory(), allocation.offset())
1090        }
1091        .map_err(super::map_host_device_oom_err)
1092        .inspect_err(|_| {
1093            unsafe { self.shared.raw.destroy_image(image.raw, None) };
1094        })?;
1095
1096        Ok(unsafe {
1097            self.texture_from_raw(
1098                image.raw,
1099                desc,
1100                None,
1101                super::TextureMemory::Allocation(allocation),
1102            )
1103        })
1104    }
1105
1106    unsafe fn destroy_texture(&self, texture: super::Texture) {
1107        if texture.drop_guard.is_none() {
1108            unsafe { self.shared.raw.destroy_image(texture.raw, None) };
1109        }
1110
1111        match texture.memory {
1112            super::TextureMemory::Allocation(allocation) => {
1113                self.counters.texture_memory.sub(allocation.size() as isize);
1114                let result = self.mem_allocator.lock().free(allocation);
1115                if let Err(err) = result {
1116                    log::warn!("Failed to free texture allocation: {err}");
1117                }
1118            }
1119            super::TextureMemory::Dedicated(memory) => unsafe {
1120                self.shared.raw.free_memory(memory, None);
1121            },
1122            super::TextureMemory::External => {}
1123        }
1124
1125        self.counters.textures.sub(1);
1126    }
1127
1128    unsafe fn add_raw_texture(&self, _texture: &super::Texture) {
1129        self.counters.textures.add(1);
1130    }
1131
1132    unsafe fn create_texture_view(
1133        &self,
1134        texture: &super::Texture,
1135        desc: &crate::TextureViewDescriptor,
1136    ) -> Result<super::TextureView, crate::DeviceError> {
1137        let subresource_range = conv::map_subresource_range(&desc.range, texture.format);
1138        let raw_format = self.shared.private_caps.map_texture_format(desc.format);
1139        let mut vk_info = vk::ImageViewCreateInfo::default()
1140            .flags(vk::ImageViewCreateFlags::empty())
1141            .image(texture.raw)
1142            .view_type(conv::map_view_dimension(desc.dimension))
1143            .format(raw_format)
1144            .subresource_range(subresource_range);
1145        let layers =
1146            NonZeroU32::new(subresource_range.layer_count).expect("Unexpected zero layer count");
1147
1148        let mut image_view_info;
1149        if self.shared.private_caps.image_view_usage && !desc.usage.is_empty() {
1150            image_view_info =
1151                vk::ImageViewUsageCreateInfo::default().usage(conv::map_texture_usage(desc.usage));
1152            vk_info = vk_info.push_next(&mut image_view_info);
1153        }
1154
1155        let raw = unsafe { self.shared.raw.create_image_view(&vk_info, None) }
1156            .map_err(super::map_host_device_oom_and_ioca_err)?;
1157
1158        if let Some(label) = desc.label {
1159            unsafe { self.shared.set_object_name(raw, label) };
1160        }
1161
1162        let identity = self.shared.texture_view_identity_factory.next();
1163
1164        self.counters.texture_views.add(1);
1165
1166        Ok(super::TextureView {
1167            raw_texture: texture.raw,
1168            raw,
1169            _layers: layers,
1170            format: desc.format,
1171            raw_format,
1172            base_mip_level: desc.range.base_mip_level,
1173            dimension: desc.dimension,
1174            texture_identity: texture.identity,
1175            view_identity: identity,
1176        })
1177    }
1178    unsafe fn destroy_texture_view(&self, view: super::TextureView) {
1179        unsafe { self.shared.raw.destroy_image_view(view.raw, None) };
1180
1181        self.counters.texture_views.sub(1);
1182    }
1183
1184    unsafe fn create_sampler(
1185        &self,
1186        desc: &crate::SamplerDescriptor,
1187    ) -> Result<super::Sampler, crate::DeviceError> {
1188        let mut create_info = vk::SamplerCreateInfo::default()
1189            .flags(vk::SamplerCreateFlags::empty())
1190            .mag_filter(conv::map_filter_mode(desc.mag_filter))
1191            .min_filter(conv::map_filter_mode(desc.min_filter))
1192            .mipmap_mode(conv::map_mip_filter_mode(desc.mipmap_filter))
1193            .address_mode_u(conv::map_address_mode(desc.address_modes[0]))
1194            .address_mode_v(conv::map_address_mode(desc.address_modes[1]))
1195            .address_mode_w(conv::map_address_mode(desc.address_modes[2]))
1196            .min_lod(desc.lod_clamp.start)
1197            .max_lod(desc.lod_clamp.end);
1198
1199        if let Some(fun) = desc.compare {
1200            create_info = create_info
1201                .compare_enable(true)
1202                .compare_op(conv::map_comparison(fun));
1203        }
1204
1205        if desc.anisotropy_clamp != 1 {
1206            // We only enable anisotropy if it is supported, and wgpu-hal interface guarantees
1207            // the clamp is in the range [1, 16] which is always supported if anisotropy is.
1208            create_info = create_info
1209                .anisotropy_enable(true)
1210                .max_anisotropy(desc.anisotropy_clamp as f32);
1211        }
1212
1213        if let Some(color) = desc.border_color {
1214            create_info = create_info.border_color(conv::map_border_color(color));
1215        }
1216
1217        let mut sampler_cache_guard = self.shared.sampler_cache.lock();
1218
1219        let raw = sampler_cache_guard.create_sampler(&self.shared.raw, create_info)?;
1220
1221        // Note: Cached samplers will just continually overwrite the label
1222        //
1223        // https://github.com/gfx-rs/wgpu/issues/6867
1224        if let Some(label) = desc.label {
1225            // SAFETY: we are holding a lock on the sampler cache,
1226            // so we can only be setting the name from one thread.
1227            unsafe { self.shared.set_object_name(raw, label) };
1228        }
1229
1230        drop(sampler_cache_guard);
1231
1232        self.counters.samplers.add(1);
1233
1234        Ok(super::Sampler { raw, create_info })
1235    }
1236    unsafe fn destroy_sampler(&self, sampler: super::Sampler) {
1237        self.shared.sampler_cache.lock().destroy_sampler(
1238            &self.shared.raw,
1239            sampler.create_info,
1240            sampler.raw,
1241        );
1242
1243        self.counters.samplers.sub(1);
1244    }
1245
1246    unsafe fn create_command_encoder(
1247        &self,
1248        desc: &crate::CommandEncoderDescriptor<super::Queue>,
1249    ) -> Result<super::CommandEncoder, crate::DeviceError> {
1250        let vk_info = vk::CommandPoolCreateInfo::default()
1251            .queue_family_index(desc.queue.family_index)
1252            .flags(vk::CommandPoolCreateFlags::TRANSIENT);
1253
1254        let raw = unsafe {
1255            self.shared
1256                .raw
1257                .create_command_pool(&vk_info, None)
1258                .map_err(super::map_host_device_oom_err)?
1259        };
1260
1261        self.counters.command_encoders.add(1);
1262
1263        Ok(super::CommandEncoder {
1264            raw,
1265            device: Arc::clone(&self.shared),
1266            active: vk::CommandBuffer::null(),
1267            bind_point: vk::PipelineBindPoint::default(),
1268            temp: super::Temp::default(),
1269            free: Vec::new(),
1270            discarded: Vec::new(),
1271            rpass_debug_marker_active: false,
1272            end_of_pass_timer_query: None,
1273            framebuffers: Default::default(),
1274            temp_texture_views: Default::default(),
1275            counters: Arc::clone(&self.counters),
1276            current_pipeline_is_multiview: false,
1277        })
1278    }
1279
1280    unsafe fn create_bind_group_layout(
1281        &self,
1282        desc: &crate::BindGroupLayoutDescriptor,
1283    ) -> Result<super::BindGroupLayout, crate::DeviceError> {
1284        // Iterate through the entries and accumulate our Vulkan
1285        // DescriptorSetLayoutBindings and DescriptorBindingFlags, as well as
1286        // our binding map and our descriptor counts.
1287        // Note: not bothering with on stack arrays here as it's low frequency
1288        let mut vk_bindings = Vec::new();
1289        let mut binding_flags = Vec::new();
1290        let mut binding_map = Vec::new();
1291        let mut next_binding = 0;
1292        let mut contains_binding_arrays = false;
1293        let mut desc_count = gpu_descriptor::DescriptorTotalCount::default();
1294        for entry in desc.entries {
1295            if entry.count.is_some() {
1296                contains_binding_arrays = true;
1297            }
1298
1299            let partially_bound = desc
1300                .flags
1301                .contains(crate::BindGroupLayoutFlags::PARTIALLY_BOUND);
1302            let mut flags = vk::DescriptorBindingFlags::empty();
1303            if partially_bound && entry.count.is_some() {
1304                flags |= vk::DescriptorBindingFlags::PARTIALLY_BOUND;
1305            }
1306            if entry.count.is_some() {
1307                flags |= vk::DescriptorBindingFlags::UPDATE_AFTER_BIND;
1308            }
1309
1310            let count = entry.count.map_or(1, |c| c.get());
1311            match entry.ty {
1312                wgt::BindingType::ExternalTexture => unimplemented!(),
1313                _ => {
1314                    vk_bindings.push(vk::DescriptorSetLayoutBinding {
1315                        binding: next_binding,
1316                        descriptor_type: conv::map_binding_type(entry.ty),
1317                        descriptor_count: count,
1318                        stage_flags: conv::map_shader_stage(entry.visibility),
1319                        p_immutable_samplers: ptr::null(),
1320                        _marker: Default::default(),
1321                    });
1322                    binding_flags.push(flags);
1323                    binding_map.push((
1324                        entry.binding,
1325                        super::BindingInfo {
1326                            binding: next_binding,
1327                            binding_array_size: entry.count,
1328                        },
1329                    ));
1330                    next_binding += 1;
1331                }
1332            }
1333
1334            match entry.ty {
1335                wgt::BindingType::Buffer {
1336                    ty,
1337                    has_dynamic_offset,
1338                    ..
1339                } => match ty {
1340                    wgt::BufferBindingType::Uniform => {
1341                        if has_dynamic_offset {
1342                            desc_count.uniform_buffer_dynamic += count;
1343                        } else {
1344                            desc_count.uniform_buffer += count;
1345                        }
1346                    }
1347                    wgt::BufferBindingType::Storage { .. } => {
1348                        if has_dynamic_offset {
1349                            desc_count.storage_buffer_dynamic += count;
1350                        } else {
1351                            desc_count.storage_buffer += count;
1352                        }
1353                    }
1354                },
1355                wgt::BindingType::Sampler { .. } => {
1356                    desc_count.sampler += count;
1357                }
1358                wgt::BindingType::Texture { .. } => {
1359                    desc_count.sampled_image += count;
1360                }
1361                wgt::BindingType::StorageTexture { .. } => {
1362                    desc_count.storage_image += count;
1363                }
1364                wgt::BindingType::AccelerationStructure { .. } => {
1365                    desc_count.acceleration_structure += count;
1366                }
1367                wgt::BindingType::ExternalTexture => unimplemented!(),
1368            }
1369        }
1370
1371        let vk_info = vk::DescriptorSetLayoutCreateInfo::default()
1372            .bindings(&vk_bindings)
1373            .flags(if contains_binding_arrays {
1374                vk::DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND_POOL
1375            } else {
1376                vk::DescriptorSetLayoutCreateFlags::empty()
1377            });
1378
1379        let mut binding_flag_info =
1380            vk::DescriptorSetLayoutBindingFlagsCreateInfo::default().binding_flags(&binding_flags);
1381
1382        let vk_info = vk_info.push_next(&mut binding_flag_info);
1383
1384        let raw = unsafe {
1385            self.shared
1386                .raw
1387                .create_descriptor_set_layout(&vk_info, None)
1388                .map_err(super::map_host_device_oom_err)?
1389        };
1390
1391        if let Some(label) = desc.label {
1392            unsafe { self.shared.set_object_name(raw, label) };
1393        }
1394
1395        self.counters.bind_group_layouts.add(1);
1396
1397        Ok(super::BindGroupLayout {
1398            raw,
1399            desc_count,
1400            entries: desc.entries.into(),
1401            binding_map,
1402            contains_binding_arrays,
1403        })
1404    }
1405    unsafe fn destroy_bind_group_layout(&self, bg_layout: super::BindGroupLayout) {
1406        unsafe {
1407            self.shared
1408                .raw
1409                .destroy_descriptor_set_layout(bg_layout.raw, None)
1410        };
1411
1412        self.counters.bind_group_layouts.sub(1);
1413    }
1414
1415    unsafe fn create_pipeline_layout(
1416        &self,
1417        desc: &crate::PipelineLayoutDescriptor<super::BindGroupLayout>,
1418    ) -> Result<super::PipelineLayout, crate::DeviceError> {
1419        //Note: not bothering with on stack array here as it's low frequency
1420        let vk_set_layouts = desc
1421            .bind_group_layouts
1422            .iter()
1423            .map(|bgl| bgl.raw)
1424            .collect::<Vec<_>>();
1425        let vk_immediates_ranges: Option<vk::PushConstantRange> = if desc.immediate_size != 0 {
1426            Some(vk::PushConstantRange {
1427                stage_flags: vk::ShaderStageFlags::ALL,
1428                offset: 0,
1429                size: desc.immediate_size,
1430            })
1431        } else {
1432            None
1433        };
1434
1435        let vk_info = vk::PipelineLayoutCreateInfo::default()
1436            .flags(vk::PipelineLayoutCreateFlags::empty())
1437            .set_layouts(&vk_set_layouts)
1438            .push_constant_ranges(vk_immediates_ranges.as_slice());
1439
1440        let raw = {
1441            profiling::scope!("vkCreatePipelineLayout");
1442            unsafe {
1443                self.shared
1444                    .raw
1445                    .create_pipeline_layout(&vk_info, None)
1446                    .map_err(super::map_host_device_oom_err)?
1447            }
1448        };
1449
1450        if let Some(label) = desc.label {
1451            unsafe { self.shared.set_object_name(raw, label) };
1452        }
1453
1454        let mut binding_map = BTreeMap::new();
1455        for (group, &layout) in desc.bind_group_layouts.iter().enumerate() {
1456            for &(binding, binding_info) in &layout.binding_map {
1457                binding_map.insert(
1458                    naga::ResourceBinding {
1459                        group: group as u32,
1460                        binding,
1461                    },
1462                    naga::back::spv::BindingInfo {
1463                        descriptor_set: group as u32,
1464                        binding: binding_info.binding,
1465                        binding_array_size: binding_info.binding_array_size.map(NonZeroU32::get),
1466                    },
1467                );
1468            }
1469        }
1470
1471        self.counters.pipeline_layouts.add(1);
1472        Ok(super::PipelineLayout { raw, binding_map })
1473    }
1474    unsafe fn destroy_pipeline_layout(&self, pipeline_layout: super::PipelineLayout) {
1475        unsafe {
1476            self.shared
1477                .raw
1478                .destroy_pipeline_layout(pipeline_layout.raw, None)
1479        };
1480
1481        self.counters.pipeline_layouts.sub(1);
1482    }
1483
1484    unsafe fn create_bind_group(
1485        &self,
1486        desc: &crate::BindGroupDescriptor<
1487            super::BindGroupLayout,
1488            super::Buffer,
1489            super::Sampler,
1490            super::TextureView,
1491            super::AccelerationStructure,
1492        >,
1493    ) -> Result<super::BindGroup, crate::DeviceError> {
1494        let desc_set_layout_flags = if desc.layout.contains_binding_arrays {
1495            gpu_descriptor::DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND
1496        } else {
1497            gpu_descriptor::DescriptorSetLayoutCreateFlags::empty()
1498        };
1499
1500        let mut vk_sets = unsafe {
1501            self.desc_allocator.lock().allocate(
1502                &*self.shared,
1503                &desc.layout.raw,
1504                desc_set_layout_flags,
1505                &desc.layout.desc_count,
1506                1,
1507            )?
1508        };
1509
1510        let set = vk_sets.pop().unwrap();
1511        if let Some(label) = desc.label {
1512            unsafe { self.shared.set_object_name(*set.raw(), label) };
1513        }
1514
1515        /// Helper for splitting off and initializing a given number of elements on a pre-allocated
1516        /// stack, based on items returned from an [`ExactSizeIterator`].  Typically created from a
1517        /// [`MaybeUninit`] slice (see [`Vec::spare_capacity_mut()`]).
1518        /// The updated [`ExtensionStack`] of remaining uninitialized elements is returned, safely
1519        /// representing that the initialized and remaining elements are two independent mutable
1520        /// borrows.
1521        struct ExtendStack<'a, T> {
1522            remainder: &'a mut [MaybeUninit<T>],
1523        }
1524
1525        impl<'a, T> ExtendStack<'a, T> {
1526            fn from_vec_capacity(vec: &'a mut Vec<T>) -> Self {
1527                Self {
1528                    remainder: vec.spare_capacity_mut(),
1529                }
1530            }
1531
1532            fn extend_one(self, value: T) -> (Self, &'a mut T) {
1533                let (to_init, remainder) = self.remainder.split_first_mut().unwrap();
1534                let init = to_init.write(value);
1535                (Self { remainder }, init)
1536            }
1537
1538            fn extend(
1539                self,
1540                iter: impl IntoIterator<Item = T> + ExactSizeIterator,
1541            ) -> (Self, &'a mut [T]) {
1542                let (to_init, remainder) = self.remainder.split_at_mut(iter.len());
1543
1544                for (value, to_init) in iter.into_iter().zip(to_init.iter_mut()) {
1545                    to_init.write(value);
1546                }
1547
1548                // we can't use the safe (yet unstable) MaybeUninit::write_slice() here because of having an iterator to write
1549
1550                let init = {
1551                    // SAFETY: The loop above has initialized exactly as many items as to_init is
1552                    // long, so it is safe to cast away the MaybeUninit<T> wrapper into T.
1553
1554                    // Additional safety docs from unstable slice_assume_init_mut
1555                    // SAFETY: similar to safety notes for `slice_get_ref`, but we have a
1556                    // mutable reference which is also guaranteed to be valid for writes.
1557                    unsafe { mem::transmute::<&mut [MaybeUninit<T>], &mut [T]>(to_init) }
1558                };
1559                (Self { remainder }, init)
1560            }
1561        }
1562
1563        let mut writes = Vec::with_capacity(desc.entries.len());
1564        let mut buffer_infos = Vec::with_capacity(desc.buffers.len());
1565        let mut buffer_infos = ExtendStack::from_vec_capacity(&mut buffer_infos);
1566        let mut image_infos = Vec::with_capacity(desc.samplers.len() + desc.textures.len());
1567        let mut image_infos = ExtendStack::from_vec_capacity(&mut image_infos);
1568        // TODO: This length could be reduced to just the number of top-level acceleration
1569        // structure bindings, where multiple consecutive TLAS bindings that are set via
1570        // one `WriteDescriptorSet` count towards one "info" struct, not the total number of
1571        // acceleration structure bindings to write:
1572        let mut acceleration_structure_infos =
1573            Vec::with_capacity(desc.acceleration_structures.len());
1574        let mut acceleration_structure_infos =
1575            ExtendStack::from_vec_capacity(&mut acceleration_structure_infos);
1576        let mut raw_acceleration_structures =
1577            Vec::with_capacity(desc.acceleration_structures.len());
1578        let mut raw_acceleration_structures =
1579            ExtendStack::from_vec_capacity(&mut raw_acceleration_structures);
1580
1581        let layout_and_entry_iter = desc.entries.iter().map(|entry| {
1582            let layout = desc
1583                .layout
1584                .entries
1585                .iter()
1586                .find(|layout_entry| layout_entry.binding == entry.binding)
1587                .expect("internal error: no layout entry found with binding slot");
1588            (layout, entry)
1589        });
1590        let mut next_binding = 0;
1591        for (layout, entry) in layout_and_entry_iter {
1592            let write = vk::WriteDescriptorSet::default().dst_set(*set.raw());
1593
1594            match layout.ty {
1595                wgt::BindingType::Sampler(_) => {
1596                    let start = entry.resource_index;
1597                    let end = start + entry.count;
1598                    let local_image_infos;
1599                    (image_infos, local_image_infos) =
1600                        image_infos.extend(desc.samplers[start as usize..end as usize].iter().map(
1601                            |sampler| vk::DescriptorImageInfo::default().sampler(sampler.raw),
1602                        ));
1603                    writes.push(
1604                        write
1605                            .dst_binding(next_binding)
1606                            .descriptor_type(conv::map_binding_type(layout.ty))
1607                            .image_info(local_image_infos),
1608                    );
1609                    next_binding += 1;
1610                }
1611                wgt::BindingType::Texture { .. } | wgt::BindingType::StorageTexture { .. } => {
1612                    let start = entry.resource_index;
1613                    let end = start + entry.count;
1614                    let local_image_infos;
1615                    (image_infos, local_image_infos) =
1616                        image_infos.extend(desc.textures[start as usize..end as usize].iter().map(
1617                            |binding| {
1618                                let layout =
1619                                    conv::derive_image_layout(binding.usage, binding.view.format);
1620                                vk::DescriptorImageInfo::default()
1621                                    .image_view(binding.view.raw)
1622                                    .image_layout(layout)
1623                            },
1624                        ));
1625                    writes.push(
1626                        write
1627                            .dst_binding(next_binding)
1628                            .descriptor_type(conv::map_binding_type(layout.ty))
1629                            .image_info(local_image_infos),
1630                    );
1631                    next_binding += 1;
1632                }
1633                wgt::BindingType::Buffer { .. } => {
1634                    let start = entry.resource_index;
1635                    let end = start + entry.count;
1636                    let local_buffer_infos;
1637                    (buffer_infos, local_buffer_infos) =
1638                        buffer_infos.extend(desc.buffers[start as usize..end as usize].iter().map(
1639                            |binding| {
1640                                vk::DescriptorBufferInfo::default()
1641                                    .buffer(binding.buffer.raw)
1642                                    .offset(binding.offset)
1643                                    .range(
1644                                        binding.size.map_or(vk::WHOLE_SIZE, wgt::BufferSize::get),
1645                                    )
1646                            },
1647                        ));
1648                    writes.push(
1649                        write
1650                            .dst_binding(next_binding)
1651                            .descriptor_type(conv::map_binding_type(layout.ty))
1652                            .buffer_info(local_buffer_infos),
1653                    );
1654                    next_binding += 1;
1655                }
1656                wgt::BindingType::AccelerationStructure { .. } => {
1657                    let start = entry.resource_index;
1658                    let end = start + entry.count;
1659
1660                    let local_raw_acceleration_structures;
1661                    (
1662                        raw_acceleration_structures,
1663                        local_raw_acceleration_structures,
1664                    ) = raw_acceleration_structures.extend(
1665                        desc.acceleration_structures[start as usize..end as usize]
1666                            .iter()
1667                            .map(|acceleration_structure| acceleration_structure.raw),
1668                    );
1669
1670                    let local_acceleration_structure_infos;
1671                    (
1672                        acceleration_structure_infos,
1673                        local_acceleration_structure_infos,
1674                    ) = acceleration_structure_infos.extend_one(
1675                        vk::WriteDescriptorSetAccelerationStructureKHR::default()
1676                            .acceleration_structures(local_raw_acceleration_structures),
1677                    );
1678
1679                    writes.push(
1680                        write
1681                            .dst_binding(next_binding)
1682                            .descriptor_type(conv::map_binding_type(layout.ty))
1683                            .descriptor_count(entry.count)
1684                            .push_next(local_acceleration_structure_infos),
1685                    );
1686                    next_binding += 1;
1687                }
1688                wgt::BindingType::ExternalTexture => unimplemented!(),
1689            }
1690        }
1691
1692        unsafe { self.shared.raw.update_descriptor_sets(&writes, &[]) };
1693
1694        self.counters.bind_groups.add(1);
1695
1696        Ok(super::BindGroup { set })
1697    }
1698
1699    unsafe fn destroy_bind_group(&self, group: super::BindGroup) {
1700        unsafe {
1701            self.desc_allocator
1702                .lock()
1703                .free(&*self.shared, Some(group.set))
1704        };
1705
1706        self.counters.bind_groups.sub(1);
1707    }
1708
1709    unsafe fn create_shader_module(
1710        &self,
1711        desc: &crate::ShaderModuleDescriptor,
1712        shader: crate::ShaderInput,
1713    ) -> Result<super::ShaderModule, crate::ShaderError> {
1714        let shader_module = match shader {
1715            crate::ShaderInput::Naga(naga_shader)
1716                if self
1717                    .shared
1718                    .workarounds
1719                    .contains(super::Workarounds::SEPARATE_ENTRY_POINTS)
1720                    || !naga_shader.module.overrides.is_empty() =>
1721            {
1722                super::ShaderModule::Intermediate {
1723                    naga_shader,
1724                    runtime_checks: desc.runtime_checks,
1725                }
1726            }
1727            crate::ShaderInput::Naga(naga_shader) => {
1728                let mut naga_options = self.naga_options.clone();
1729                naga_options.debug_info =
1730                    naga_shader
1731                        .debug_source
1732                        .as_ref()
1733                        .map(|d| naga::back::spv::DebugInfo {
1734                            source_code: d.source_code.as_ref(),
1735                            file_name: d.file_name.as_ref(),
1736                            language: naga::back::spv::SourceLanguage::WGSL,
1737                        });
1738                if !desc.runtime_checks.bounds_checks {
1739                    naga_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
1740                        index: naga::proc::BoundsCheckPolicy::Unchecked,
1741                        buffer: naga::proc::BoundsCheckPolicy::Unchecked,
1742                        image_load: naga::proc::BoundsCheckPolicy::Unchecked,
1743                        binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
1744                    };
1745                }
1746                let spv = naga::back::spv::write_vec(
1747                    &naga_shader.module,
1748                    &naga_shader.info,
1749                    &naga_options,
1750                    None,
1751                )
1752                .map_err(|e| crate::ShaderError::Compilation(format!("{e}")))?;
1753                super::ShaderModule::Raw(self.create_shader_module_impl(&spv, &desc.label)?)
1754            }
1755            crate::ShaderInput::SpirV(data) => {
1756                super::ShaderModule::Raw(self.create_shader_module_impl(data, &desc.label)?)
1757            }
1758            crate::ShaderInput::MetalLib { .. }
1759            | crate::ShaderInput::Msl { .. }
1760            | crate::ShaderInput::Dxil { .. }
1761            | crate::ShaderInput::Hlsl { .. }
1762            | crate::ShaderInput::Glsl { .. } => unreachable!(),
1763        };
1764
1765        self.counters.shader_modules.add(1);
1766
1767        Ok(shader_module)
1768    }
1769
1770    unsafe fn destroy_shader_module(&self, module: super::ShaderModule) {
1771        match module {
1772            super::ShaderModule::Raw(raw) => {
1773                unsafe { self.shared.raw.destroy_shader_module(raw, None) };
1774            }
1775            super::ShaderModule::Intermediate { .. } => {}
1776        }
1777
1778        self.counters.shader_modules.sub(1);
1779    }
1780
1781    unsafe fn create_render_pipeline(
1782        &self,
1783        desc: &crate::RenderPipelineDescriptor<
1784            super::PipelineLayout,
1785            super::ShaderModule,
1786            super::PipelineCache,
1787        >,
1788    ) -> Result<super::RenderPipeline, crate::PipelineError> {
1789        let dynamic_states = [
1790            vk::DynamicState::VIEWPORT,
1791            vk::DynamicState::SCISSOR,
1792            vk::DynamicState::BLEND_CONSTANTS,
1793            vk::DynamicState::STENCIL_REFERENCE,
1794        ];
1795        let mut compatible_rp_key = super::RenderPassKey {
1796            sample_count: desc.multisample.count,
1797            multiview_mask: desc.multiview_mask,
1798            ..Default::default()
1799        };
1800        let mut stages = ArrayVec::<_, { crate::MAX_CONCURRENT_SHADER_STAGES }>::new();
1801        let mut vertex_buffers = Vec::new();
1802        let mut vertex_attributes = Vec::new();
1803
1804        if let crate::VertexProcessor::Standard {
1805            vertex_buffers: desc_vertex_buffers,
1806            vertex_stage: _,
1807        } = &desc.vertex_processor
1808        {
1809            vertex_buffers = Vec::with_capacity(desc_vertex_buffers.len());
1810            for (i, vb) in desc_vertex_buffers.iter().enumerate() {
1811                vertex_buffers.push(vk::VertexInputBindingDescription {
1812                    binding: i as u32,
1813                    stride: vb.array_stride as u32,
1814                    input_rate: match vb.step_mode {
1815                        wgt::VertexStepMode::Vertex => vk::VertexInputRate::VERTEX,
1816                        wgt::VertexStepMode::Instance => vk::VertexInputRate::INSTANCE,
1817                    },
1818                });
1819                for at in vb.attributes {
1820                    vertex_attributes.push(vk::VertexInputAttributeDescription {
1821                        location: at.shader_location,
1822                        binding: i as u32,
1823                        format: conv::map_vertex_format(at.format),
1824                        offset: at.offset as u32,
1825                    });
1826                }
1827            }
1828        }
1829
1830        let vk_vertex_input = vk::PipelineVertexInputStateCreateInfo::default()
1831            .vertex_binding_descriptions(&vertex_buffers)
1832            .vertex_attribute_descriptions(&vertex_attributes);
1833
1834        let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::default()
1835            .topology(conv::map_topology(desc.primitive.topology))
1836            .primitive_restart_enable(desc.primitive.strip_index_format.is_some());
1837
1838        let mut compiled_vs = None;
1839        let mut compiled_ms = None;
1840        let mut compiled_ts = None;
1841        match &desc.vertex_processor {
1842            crate::VertexProcessor::Standard {
1843                vertex_buffers: _,
1844                vertex_stage,
1845            } => {
1846                compiled_vs = Some(self.compile_stage(
1847                    vertex_stage,
1848                    naga::ShaderStage::Vertex,
1849                    &desc.layout.binding_map,
1850                )?);
1851                stages.push(compiled_vs.as_ref().unwrap().create_info);
1852            }
1853            crate::VertexProcessor::Mesh {
1854                task_stage,
1855                mesh_stage,
1856            } => {
1857                if let Some(t) = task_stage.as_ref() {
1858                    compiled_ts = Some(self.compile_stage(
1859                        t,
1860                        naga::ShaderStage::Task,
1861                        &desc.layout.binding_map,
1862                    )?);
1863                    stages.push(compiled_ts.as_ref().unwrap().create_info);
1864                }
1865                compiled_ms = Some(self.compile_stage(
1866                    mesh_stage,
1867                    naga::ShaderStage::Mesh,
1868                    &desc.layout.binding_map,
1869                )?);
1870                stages.push(compiled_ms.as_ref().unwrap().create_info);
1871            }
1872        }
1873        let compiled_fs = match desc.fragment_stage {
1874            Some(ref stage) => {
1875                let compiled = self.compile_stage(
1876                    stage,
1877                    naga::ShaderStage::Fragment,
1878                    &desc.layout.binding_map,
1879                )?;
1880                stages.push(compiled.create_info);
1881                Some(compiled)
1882            }
1883            None => None,
1884        };
1885
1886        let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::default()
1887            .polygon_mode(conv::map_polygon_mode(desc.primitive.polygon_mode))
1888            .front_face(conv::map_front_face(desc.primitive.front_face))
1889            .line_width(1.0)
1890            .depth_clamp_enable(desc.primitive.unclipped_depth);
1891        if let Some(face) = desc.primitive.cull_mode {
1892            vk_rasterization = vk_rasterization.cull_mode(conv::map_cull_face(face))
1893        }
1894        let mut vk_rasterization_conservative_state =
1895            vk::PipelineRasterizationConservativeStateCreateInfoEXT::default()
1896                .conservative_rasterization_mode(
1897                    vk::ConservativeRasterizationModeEXT::OVERESTIMATE,
1898                );
1899        if desc.primitive.conservative {
1900            vk_rasterization = vk_rasterization.push_next(&mut vk_rasterization_conservative_state);
1901        }
1902
1903        let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::default();
1904        if let Some(ref ds) = desc.depth_stencil {
1905            let vk_format = self.shared.private_caps.map_texture_format(ds.format);
1906            let vk_layout = if ds.is_read_only(desc.primitive.cull_mode) {
1907                vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL
1908            } else {
1909                vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL
1910            };
1911            compatible_rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
1912                base: super::AttachmentKey::compatible(vk_format, vk_layout),
1913                stencil_ops: crate::AttachmentOps::all(),
1914            });
1915
1916            if ds.is_depth_enabled() {
1917                vk_depth_stencil = vk_depth_stencil
1918                    .depth_test_enable(true)
1919                    .depth_write_enable(ds.depth_write_enabled)
1920                    .depth_compare_op(conv::map_comparison(ds.depth_compare));
1921            }
1922            if ds.stencil.is_enabled() {
1923                let s = &ds.stencil;
1924                let front = conv::map_stencil_face(&s.front, s.read_mask, s.write_mask);
1925                let back = conv::map_stencil_face(&s.back, s.read_mask, s.write_mask);
1926                vk_depth_stencil = vk_depth_stencil
1927                    .stencil_test_enable(true)
1928                    .front(front)
1929                    .back(back);
1930            }
1931
1932            if ds.bias.is_enabled() {
1933                vk_rasterization = vk_rasterization
1934                    .depth_bias_enable(true)
1935                    .depth_bias_constant_factor(ds.bias.constant as f32)
1936                    .depth_bias_clamp(ds.bias.clamp)
1937                    .depth_bias_slope_factor(ds.bias.slope_scale);
1938            }
1939        }
1940
1941        let vk_viewport = vk::PipelineViewportStateCreateInfo::default()
1942            .flags(vk::PipelineViewportStateCreateFlags::empty())
1943            .scissor_count(1)
1944            .viewport_count(1);
1945
1946        let vk_sample_mask = [
1947            desc.multisample.mask as u32,
1948            (desc.multisample.mask >> 32) as u32,
1949        ];
1950        let vk_multisample = vk::PipelineMultisampleStateCreateInfo::default()
1951            .rasterization_samples(vk::SampleCountFlags::from_raw(desc.multisample.count))
1952            .alpha_to_coverage_enable(desc.multisample.alpha_to_coverage_enabled)
1953            .sample_mask(&vk_sample_mask);
1954
1955        let mut vk_attachments = Vec::with_capacity(desc.color_targets.len());
1956        for cat in desc.color_targets {
1957            let (key, attarchment) = if let Some(cat) = cat.as_ref() {
1958                let mut vk_attachment = vk::PipelineColorBlendAttachmentState::default()
1959                    .color_write_mask(vk::ColorComponentFlags::from_raw(cat.write_mask.bits()));
1960                if let Some(ref blend) = cat.blend {
1961                    let (color_op, color_src, color_dst) = conv::map_blend_component(&blend.color);
1962                    let (alpha_op, alpha_src, alpha_dst) = conv::map_blend_component(&blend.alpha);
1963                    vk_attachment = vk_attachment
1964                        .blend_enable(true)
1965                        .color_blend_op(color_op)
1966                        .src_color_blend_factor(color_src)
1967                        .dst_color_blend_factor(color_dst)
1968                        .alpha_blend_op(alpha_op)
1969                        .src_alpha_blend_factor(alpha_src)
1970                        .dst_alpha_blend_factor(alpha_dst);
1971                }
1972
1973                let vk_format = self.shared.private_caps.map_texture_format(cat.format);
1974                (
1975                    Some(super::ColorAttachmentKey {
1976                        base: super::AttachmentKey::compatible(
1977                            vk_format,
1978                            vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,
1979                        ),
1980                        resolve: None,
1981                    }),
1982                    vk_attachment,
1983                )
1984            } else {
1985                (None, vk::PipelineColorBlendAttachmentState::default())
1986            };
1987
1988            compatible_rp_key.colors.push(key);
1989            vk_attachments.push(attarchment);
1990        }
1991
1992        let vk_color_blend =
1993            vk::PipelineColorBlendStateCreateInfo::default().attachments(&vk_attachments);
1994
1995        let vk_dynamic_state =
1996            vk::PipelineDynamicStateCreateInfo::default().dynamic_states(&dynamic_states);
1997
1998        let raw_pass = self.shared.make_render_pass(compatible_rp_key)?;
1999
2000        let vk_infos = [{
2001            vk::GraphicsPipelineCreateInfo::default()
2002                .layout(desc.layout.raw)
2003                .stages(&stages)
2004                .vertex_input_state(&vk_vertex_input)
2005                .input_assembly_state(&vk_input_assembly)
2006                .rasterization_state(&vk_rasterization)
2007                .viewport_state(&vk_viewport)
2008                .multisample_state(&vk_multisample)
2009                .depth_stencil_state(&vk_depth_stencil)
2010                .color_blend_state(&vk_color_blend)
2011                .dynamic_state(&vk_dynamic_state)
2012                .render_pass(raw_pass)
2013        }];
2014
2015        let pipeline_cache = desc
2016            .cache
2017            .map(|it| it.raw)
2018            .unwrap_or(vk::PipelineCache::null());
2019
2020        let mut raw_vec = {
2021            profiling::scope!("vkCreateGraphicsPipelines");
2022            unsafe {
2023                self.shared
2024                    .raw
2025                    .create_graphics_pipelines(pipeline_cache, &vk_infos, None)
2026                    .map_err(|(_, e)| super::map_pipeline_err(e))
2027            }?
2028        };
2029
2030        let raw = raw_vec.pop().unwrap();
2031        if let Some(label) = desc.label {
2032            unsafe { self.shared.set_object_name(raw, label) };
2033        }
2034
2035        if let Some(CompiledStage {
2036            temp_raw_module: Some(raw_module),
2037            ..
2038        }) = compiled_vs
2039        {
2040            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2041        }
2042        if let Some(CompiledStage {
2043            temp_raw_module: Some(raw_module),
2044            ..
2045        }) = compiled_ts
2046        {
2047            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2048        }
2049        if let Some(CompiledStage {
2050            temp_raw_module: Some(raw_module),
2051            ..
2052        }) = compiled_ms
2053        {
2054            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2055        }
2056        if let Some(CompiledStage {
2057            temp_raw_module: Some(raw_module),
2058            ..
2059        }) = compiled_fs
2060        {
2061            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2062        }
2063
2064        self.counters.render_pipelines.add(1);
2065
2066        Ok(super::RenderPipeline {
2067            raw,
2068            is_multiview: desc.multiview_mask.is_some(),
2069        })
2070    }
2071
2072    unsafe fn destroy_render_pipeline(&self, pipeline: super::RenderPipeline) {
2073        unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
2074
2075        self.counters.render_pipelines.sub(1);
2076    }
2077
2078    unsafe fn create_compute_pipeline(
2079        &self,
2080        desc: &crate::ComputePipelineDescriptor<
2081            super::PipelineLayout,
2082            super::ShaderModule,
2083            super::PipelineCache,
2084        >,
2085    ) -> Result<super::ComputePipeline, crate::PipelineError> {
2086        let compiled = self.compile_stage(
2087            &desc.stage,
2088            naga::ShaderStage::Compute,
2089            &desc.layout.binding_map,
2090        )?;
2091
2092        let vk_infos = [{
2093            vk::ComputePipelineCreateInfo::default()
2094                .layout(desc.layout.raw)
2095                .stage(compiled.create_info)
2096        }];
2097
2098        let pipeline_cache = desc
2099            .cache
2100            .map(|it| it.raw)
2101            .unwrap_or(vk::PipelineCache::null());
2102
2103        let mut raw_vec = {
2104            profiling::scope!("vkCreateComputePipelines");
2105            unsafe {
2106                self.shared
2107                    .raw
2108                    .create_compute_pipelines(pipeline_cache, &vk_infos, None)
2109                    .map_err(|(_, e)| super::map_pipeline_err(e))
2110            }?
2111        };
2112
2113        let raw = raw_vec.pop().unwrap();
2114        if let Some(label) = desc.label {
2115            unsafe { self.shared.set_object_name(raw, label) };
2116        }
2117
2118        if let Some(raw_module) = compiled.temp_raw_module {
2119            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2120        }
2121
2122        self.counters.compute_pipelines.add(1);
2123
2124        Ok(super::ComputePipeline { raw })
2125    }
2126
2127    unsafe fn destroy_compute_pipeline(&self, pipeline: super::ComputePipeline) {
2128        unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
2129
2130        self.counters.compute_pipelines.sub(1);
2131    }
2132
2133    unsafe fn create_pipeline_cache(
2134        &self,
2135        desc: &crate::PipelineCacheDescriptor<'_>,
2136    ) -> Result<super::PipelineCache, crate::PipelineCacheError> {
2137        let mut info = vk::PipelineCacheCreateInfo::default();
2138        if let Some(data) = desc.data {
2139            info = info.initial_data(data)
2140        }
2141        profiling::scope!("vkCreatePipelineCache");
2142        let raw = unsafe { self.shared.raw.create_pipeline_cache(&info, None) }
2143            .map_err(super::map_host_device_oom_err)?;
2144
2145        Ok(super::PipelineCache { raw })
2146    }
2147    fn pipeline_cache_validation_key(&self) -> Option<[u8; 16]> {
2148        Some(self.shared.pipeline_cache_validation_key)
2149    }
2150    unsafe fn destroy_pipeline_cache(&self, cache: super::PipelineCache) {
2151        unsafe { self.shared.raw.destroy_pipeline_cache(cache.raw, None) }
2152    }
2153    unsafe fn create_query_set(
2154        &self,
2155        desc: &wgt::QuerySetDescriptor<crate::Label>,
2156    ) -> Result<super::QuerySet, crate::DeviceError> {
2157        // Assume each query is 256 bytes.
2158        // On an AMD W6800 with driver version 32.0.12030.9, occlusion queries are 256.
2159        self.error_if_would_oom_on_resource_allocation(true, desc.count as u64 * 256)?;
2160
2161        let (vk_type, pipeline_statistics) = match desc.ty {
2162            wgt::QueryType::Occlusion => (
2163                vk::QueryType::OCCLUSION,
2164                vk::QueryPipelineStatisticFlags::empty(),
2165            ),
2166            wgt::QueryType::PipelineStatistics(statistics) => (
2167                vk::QueryType::PIPELINE_STATISTICS,
2168                conv::map_pipeline_statistics(statistics),
2169            ),
2170            wgt::QueryType::Timestamp => (
2171                vk::QueryType::TIMESTAMP,
2172                vk::QueryPipelineStatisticFlags::empty(),
2173            ),
2174        };
2175
2176        let vk_info = vk::QueryPoolCreateInfo::default()
2177            .query_type(vk_type)
2178            .query_count(desc.count)
2179            .pipeline_statistics(pipeline_statistics);
2180
2181        let raw = unsafe { self.shared.raw.create_query_pool(&vk_info, None) }
2182            .map_err(super::map_host_device_oom_err)?;
2183        if let Some(label) = desc.label {
2184            unsafe { self.shared.set_object_name(raw, label) };
2185        }
2186
2187        self.counters.query_sets.add(1);
2188
2189        Ok(super::QuerySet { raw })
2190    }
2191
2192    unsafe fn destroy_query_set(&self, set: super::QuerySet) {
2193        unsafe { self.shared.raw.destroy_query_pool(set.raw, None) };
2194
2195        self.counters.query_sets.sub(1);
2196    }
2197
2198    unsafe fn create_fence(&self) -> Result<super::Fence, crate::DeviceError> {
2199        self.counters.fences.add(1);
2200
2201        Ok(if self.shared.private_caps.timeline_semaphores {
2202            let mut sem_type_info =
2203                vk::SemaphoreTypeCreateInfo::default().semaphore_type(vk::SemaphoreType::TIMELINE);
2204            let vk_info = vk::SemaphoreCreateInfo::default().push_next(&mut sem_type_info);
2205            let raw = unsafe { self.shared.raw.create_semaphore(&vk_info, None) }
2206                .map_err(super::map_host_device_oom_err)?;
2207
2208            super::Fence::TimelineSemaphore(raw)
2209        } else {
2210            super::Fence::FencePool {
2211                last_completed: 0,
2212                active: Vec::new(),
2213                free: Vec::new(),
2214            }
2215        })
2216    }
2217    unsafe fn destroy_fence(&self, fence: super::Fence) {
2218        match fence {
2219            super::Fence::TimelineSemaphore(raw) => {
2220                unsafe { self.shared.raw.destroy_semaphore(raw, None) };
2221            }
2222            super::Fence::FencePool {
2223                active,
2224                free,
2225                last_completed: _,
2226            } => {
2227                for (_, raw) in active {
2228                    unsafe { self.shared.raw.destroy_fence(raw, None) };
2229                }
2230                for raw in free {
2231                    unsafe { self.shared.raw.destroy_fence(raw, None) };
2232                }
2233            }
2234        }
2235
2236        self.counters.fences.sub(1);
2237    }
2238    unsafe fn get_fence_value(
2239        &self,
2240        fence: &super::Fence,
2241    ) -> Result<crate::FenceValue, crate::DeviceError> {
2242        fence.get_latest(
2243            &self.shared.raw,
2244            self.shared.extension_fns.timeline_semaphore.as_ref(),
2245        )
2246    }
2247    unsafe fn wait(
2248        &self,
2249        fence: &super::Fence,
2250        wait_value: crate::FenceValue,
2251        timeout: Option<Duration>,
2252    ) -> Result<bool, crate::DeviceError> {
2253        let timeout_ns = timeout
2254            .unwrap_or(Duration::MAX)
2255            .as_nanos()
2256            .min(u64::MAX as _) as u64;
2257        self.shared.wait_for_fence(fence, wait_value, timeout_ns)
2258    }
2259
2260    unsafe fn start_graphics_debugger_capture(&self) -> bool {
2261        #[cfg(feature = "renderdoc")]
2262        {
2263            // Renderdoc requires us to give us the pointer that vkInstance _points to_.
2264            let raw_vk_instance =
2265                vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2266            let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2267            unsafe {
2268                self.render_doc
2269                    .start_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2270            }
2271        }
2272        #[cfg(not(feature = "renderdoc"))]
2273        false
2274    }
2275    unsafe fn stop_graphics_debugger_capture(&self) {
2276        #[cfg(feature = "renderdoc")]
2277        {
2278            // Renderdoc requires us to give us the pointer that vkInstance _points to_.
2279            let raw_vk_instance =
2280                vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2281            let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2282
2283            unsafe {
2284                self.render_doc
2285                    .end_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2286            }
2287        }
2288    }
2289
2290    unsafe fn pipeline_cache_get_data(&self, cache: &super::PipelineCache) -> Option<Vec<u8>> {
2291        let data = unsafe { self.raw_device().get_pipeline_cache_data(cache.raw) };
2292        data.ok()
2293    }
2294
2295    unsafe fn get_acceleration_structure_build_sizes<'a>(
2296        &self,
2297        desc: &crate::GetAccelerationStructureBuildSizesDescriptor<'a, super::Buffer>,
2298    ) -> crate::AccelerationStructureBuildSizes {
2299        const CAPACITY: usize = 8;
2300
2301        let ray_tracing_functions = self
2302            .shared
2303            .extension_fns
2304            .ray_tracing
2305            .as_ref()
2306            .expect("Feature `RAY_TRACING` not enabled");
2307
2308        let (geometries, primitive_counts) = match *desc.entries {
2309            crate::AccelerationStructureEntries::Instances(ref instances) => {
2310                let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default();
2311
2312                let geometry = vk::AccelerationStructureGeometryKHR::default()
2313                    .geometry_type(vk::GeometryTypeKHR::INSTANCES)
2314                    .geometry(vk::AccelerationStructureGeometryDataKHR {
2315                        instances: instance_data,
2316                    });
2317
2318                (
2319                    smallvec::smallvec![geometry],
2320                    smallvec::smallvec![instances.count],
2321                )
2322            }
2323            crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
2324                let mut primitive_counts =
2325                    smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2326                let mut geometries = smallvec::SmallVec::<
2327                    [vk::AccelerationStructureGeometryKHR; CAPACITY],
2328                >::with_capacity(in_geometries.len());
2329
2330                for triangles in in_geometries {
2331                    let mut triangle_data =
2332                        vk::AccelerationStructureGeometryTrianglesDataKHR::default()
2333                            .index_type(vk::IndexType::NONE_KHR)
2334                            .vertex_format(conv::map_vertex_format(triangles.vertex_format))
2335                            .max_vertex(triangles.vertex_count)
2336                            .vertex_stride(triangles.vertex_stride)
2337                            // The vulkan spec suggests we could pass a non-zero invalid address here if fetching
2338                            // the real address has significant overhead, but we pass the real one to be on the
2339                            // safe side for now.
2340                            // from https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureBuildSizesKHR.html
2341                            // > The srcAccelerationStructure, dstAccelerationStructure, and mode members
2342                            // > of pBuildInfo are ignored. Any VkDeviceOrHostAddressKHR or VkDeviceOrHostAddressConstKHR
2343                            // > members of pBuildInfo are ignored by this command, except that the hostAddress
2344                            // > member of VkAccelerationStructureGeometryTrianglesDataKHR::transformData will
2345                            // > be examined to check if it is NULL.
2346                            .transform_data(vk::DeviceOrHostAddressConstKHR {
2347                                device_address: if desc
2348                                    .flags
2349                                    .contains(wgt::AccelerationStructureFlags::USE_TRANSFORM)
2350                                {
2351                                    unsafe {
2352                                        ray_tracing_functions
2353                                            .buffer_device_address
2354                                            .get_buffer_device_address(
2355                                                &vk::BufferDeviceAddressInfo::default().buffer(
2356                                                    triangles
2357                                                        .transform
2358                                                        .as_ref()
2359                                                        .unwrap()
2360                                                        .buffer
2361                                                        .raw,
2362                                                ),
2363                                            )
2364                                    }
2365                                } else {
2366                                    0
2367                                },
2368                            });
2369
2370                    let pritive_count = if let Some(ref indices) = triangles.indices {
2371                        triangle_data =
2372                            triangle_data.index_type(conv::map_index_format(indices.format));
2373                        indices.count / 3
2374                    } else {
2375                        triangles.vertex_count / 3
2376                    };
2377
2378                    let geometry = vk::AccelerationStructureGeometryKHR::default()
2379                        .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
2380                        .geometry(vk::AccelerationStructureGeometryDataKHR {
2381                            triangles: triangle_data,
2382                        })
2383                        .flags(conv::map_acceleration_structure_geometry_flags(
2384                            triangles.flags,
2385                        ));
2386
2387                    geometries.push(geometry);
2388                    primitive_counts.push(pritive_count);
2389                }
2390                (geometries, primitive_counts)
2391            }
2392            crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
2393                let mut primitive_counts =
2394                    smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2395                let mut geometries = smallvec::SmallVec::<
2396                    [vk::AccelerationStructureGeometryKHR; CAPACITY],
2397                >::with_capacity(in_geometries.len());
2398                for aabb in in_geometries {
2399                    let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
2400                        .stride(aabb.stride);
2401
2402                    let geometry = vk::AccelerationStructureGeometryKHR::default()
2403                        .geometry_type(vk::GeometryTypeKHR::AABBS)
2404                        .geometry(vk::AccelerationStructureGeometryDataKHR { aabbs: aabbs_data })
2405                        .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
2406
2407                    geometries.push(geometry);
2408                    primitive_counts.push(aabb.count);
2409                }
2410                (geometries, primitive_counts)
2411            }
2412        };
2413
2414        let ty = match *desc.entries {
2415            crate::AccelerationStructureEntries::Instances(_) => {
2416                vk::AccelerationStructureTypeKHR::TOP_LEVEL
2417            }
2418            _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
2419        };
2420
2421        let geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
2422            .ty(ty)
2423            .flags(conv::map_acceleration_structure_flags(desc.flags))
2424            .geometries(&geometries);
2425
2426        let mut raw = Default::default();
2427        unsafe {
2428            ray_tracing_functions
2429                .acceleration_structure
2430                .get_acceleration_structure_build_sizes(
2431                    vk::AccelerationStructureBuildTypeKHR::DEVICE,
2432                    &geometry_info,
2433                    &primitive_counts,
2434                    &mut raw,
2435                )
2436        }
2437
2438        crate::AccelerationStructureBuildSizes {
2439            acceleration_structure_size: raw.acceleration_structure_size,
2440            update_scratch_size: raw.update_scratch_size,
2441            build_scratch_size: raw.build_scratch_size,
2442        }
2443    }
2444
2445    unsafe fn get_acceleration_structure_device_address(
2446        &self,
2447        acceleration_structure: &super::AccelerationStructure,
2448    ) -> wgt::BufferAddress {
2449        let ray_tracing_functions = self
2450            .shared
2451            .extension_fns
2452            .ray_tracing
2453            .as_ref()
2454            .expect("Feature `RAY_TRACING` not enabled");
2455
2456        unsafe {
2457            ray_tracing_functions
2458                .acceleration_structure
2459                .get_acceleration_structure_device_address(
2460                    &vk::AccelerationStructureDeviceAddressInfoKHR::default()
2461                        .acceleration_structure(acceleration_structure.raw),
2462                )
2463        }
2464    }
2465
2466    unsafe fn create_acceleration_structure(
2467        &self,
2468        desc: &crate::AccelerationStructureDescriptor,
2469    ) -> Result<super::AccelerationStructure, crate::DeviceError> {
2470        let ray_tracing_functions = self
2471            .shared
2472            .extension_fns
2473            .ray_tracing
2474            .as_ref()
2475            .expect("Feature `RAY_TRACING` not enabled");
2476
2477        let vk_buffer_info = vk::BufferCreateInfo::default()
2478            .size(desc.size)
2479            .usage(
2480                vk::BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR
2481                    | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,
2482            )
2483            .sharing_mode(vk::SharingMode::EXCLUSIVE);
2484
2485        unsafe {
2486            let raw_buffer = self
2487                .shared
2488                .raw
2489                .create_buffer(&vk_buffer_info, None)
2490                .map_err(super::map_host_device_oom_and_ioca_err)?;
2491
2492            let requirements = self.shared.raw.get_buffer_memory_requirements(raw_buffer);
2493
2494            self.error_if_would_oom_on_resource_allocation(false, requirements.size)
2495                .inspect_err(|_| {
2496                    self.shared.raw.destroy_buffer(raw_buffer, None);
2497                })?;
2498
2499            let name = desc
2500                .label
2501                .unwrap_or("Unlabeled acceleration structure buffer");
2502
2503            let allocation = self
2504                .mem_allocator
2505                .lock()
2506                .allocate(&gpu_allocator::vulkan::AllocationCreateDesc {
2507                    name,
2508                    requirements,
2509                    location: gpu_allocator::MemoryLocation::GpuOnly,
2510                    linear: true, // Buffers are always linear
2511                    allocation_scheme: gpu_allocator::vulkan::AllocationScheme::GpuAllocatorManaged,
2512                })
2513                .inspect_err(|_| {
2514                    self.shared.raw.destroy_buffer(raw_buffer, None);
2515                })?;
2516
2517            self.shared
2518                .raw
2519                .bind_buffer_memory(raw_buffer, allocation.memory(), allocation.offset())
2520                .map_err(super::map_host_device_oom_and_ioca_err)
2521                .inspect_err(|_| {
2522                    self.shared.raw.destroy_buffer(raw_buffer, None);
2523                })?;
2524
2525            if let Some(label) = desc.label {
2526                self.shared.set_object_name(raw_buffer, label);
2527            }
2528
2529            let vk_info = vk::AccelerationStructureCreateInfoKHR::default()
2530                .buffer(raw_buffer)
2531                .offset(0)
2532                .size(desc.size)
2533                .ty(conv::map_acceleration_structure_format(desc.format));
2534
2535            let raw_acceleration_structure = ray_tracing_functions
2536                .acceleration_structure
2537                .create_acceleration_structure(&vk_info, None)
2538                .map_err(super::map_host_oom_and_ioca_err)
2539                .inspect_err(|_| {
2540                    self.shared.raw.destroy_buffer(raw_buffer, None);
2541                })?;
2542
2543            if let Some(label) = desc.label {
2544                self.shared
2545                    .set_object_name(raw_acceleration_structure, label);
2546            }
2547
2548            let pool = if desc.allow_compaction {
2549                let vk_info = vk::QueryPoolCreateInfo::default()
2550                    .query_type(vk::QueryType::ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR)
2551                    .query_count(1);
2552
2553                let raw = self
2554                    .shared
2555                    .raw
2556                    .create_query_pool(&vk_info, None)
2557                    .map_err(super::map_host_device_oom_err)
2558                    .inspect_err(|_| {
2559                        ray_tracing_functions
2560                            .acceleration_structure
2561                            .destroy_acceleration_structure(raw_acceleration_structure, None);
2562                        self.shared.raw.destroy_buffer(raw_buffer, None);
2563                    })?;
2564                Some(raw)
2565            } else {
2566                None
2567            };
2568
2569            Ok(super::AccelerationStructure {
2570                raw: raw_acceleration_structure,
2571                buffer: raw_buffer,
2572                allocation,
2573                compacted_size_query: pool,
2574            })
2575        }
2576    }
2577
2578    unsafe fn destroy_acceleration_structure(
2579        &self,
2580        acceleration_structure: super::AccelerationStructure,
2581    ) {
2582        let ray_tracing_functions = self
2583            .shared
2584            .extension_fns
2585            .ray_tracing
2586            .as_ref()
2587            .expect("Feature `RAY_TRACING` not enabled");
2588
2589        unsafe {
2590            ray_tracing_functions
2591                .acceleration_structure
2592                .destroy_acceleration_structure(acceleration_structure.raw, None);
2593            self.shared
2594                .raw
2595                .destroy_buffer(acceleration_structure.buffer, None);
2596            let result = self
2597                .mem_allocator
2598                .lock()
2599                .free(acceleration_structure.allocation);
2600            if let Err(err) = result {
2601                log::warn!("Failed to free buffer acceleration structure: {err}");
2602            }
2603            if let Some(query) = acceleration_structure.compacted_size_query {
2604                self.shared.raw.destroy_query_pool(query, None)
2605            }
2606        }
2607    }
2608
2609    fn get_internal_counters(&self) -> wgt::HalCounters {
2610        self.counters
2611            .memory_allocations
2612            .set(self.shared.memory_allocations_counter.read());
2613
2614        self.counters.as_ref().clone()
2615    }
2616
2617    fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
2618        let gpu_allocator::AllocatorReport {
2619            allocations,
2620            blocks,
2621            total_allocated_bytes,
2622            total_capacity_bytes,
2623        } = self.mem_allocator.lock().generate_report();
2624
2625        let allocations = allocations
2626            .into_iter()
2627            .map(|alloc| wgt::AllocationReport {
2628                name: alloc.name,
2629                offset: alloc.offset,
2630                size: alloc.size,
2631            })
2632            .collect();
2633
2634        let blocks = blocks
2635            .into_iter()
2636            .map(|block| wgt::MemoryBlockReport {
2637                size: block.size,
2638                allocations: block.allocations.clone(),
2639            })
2640            .collect();
2641
2642        Some(wgt::AllocatorReport {
2643            allocations,
2644            blocks,
2645            total_allocated_bytes,
2646            total_reserved_bytes: total_capacity_bytes,
2647        })
2648    }
2649
2650    fn tlas_instance_to_bytes(&self, instance: TlasInstance) -> Vec<u8> {
2651        const MAX_U24: u32 = (1u32 << 24u32) - 1u32;
2652        let temp = RawTlasInstance {
2653            transform: instance.transform,
2654            custom_data_and_mask: (instance.custom_data & MAX_U24)
2655                | (u32::from(instance.mask) << 24),
2656            shader_binding_table_record_offset_and_flags: 0,
2657            acceleration_structure_reference: instance.blas_address,
2658        };
2659        bytemuck::bytes_of(&temp).to_vec()
2660    }
2661
2662    fn check_if_oom(&self) -> Result<(), crate::DeviceError> {
2663        let Some(threshold) = self
2664            .shared
2665            .instance
2666            .memory_budget_thresholds
2667            .for_device_loss
2668        else {
2669            return Ok(());
2670        };
2671
2672        if !self
2673            .shared
2674            .enabled_extensions
2675            .contains(&ext::memory_budget::NAME)
2676        {
2677            return Ok(());
2678        }
2679
2680        let get_physical_device_properties = self
2681            .shared
2682            .instance
2683            .get_physical_device_properties
2684            .as_ref()
2685            .unwrap();
2686
2687        let mut memory_budget_properties = vk::PhysicalDeviceMemoryBudgetPropertiesEXT::default();
2688
2689        let mut memory_properties =
2690            vk::PhysicalDeviceMemoryProperties2::default().push_next(&mut memory_budget_properties);
2691
2692        unsafe {
2693            get_physical_device_properties.get_physical_device_memory_properties2(
2694                self.shared.physical_device,
2695                &mut memory_properties,
2696            );
2697        }
2698
2699        let memory_properties = memory_properties.memory_properties;
2700
2701        for i in 0..memory_properties.memory_heap_count {
2702            let heap_usage = memory_budget_properties.heap_usage[i as usize];
2703            let heap_budget = memory_budget_properties.heap_budget[i as usize];
2704
2705            if heap_usage >= heap_budget / 100 * threshold as u64 {
2706                return Err(crate::DeviceError::OutOfMemory);
2707            }
2708        }
2709
2710        Ok(())
2711    }
2712}
2713
2714impl super::DeviceShared {
2715    pub(super) fn new_binary_semaphore(
2716        &self,
2717        name: &str,
2718    ) -> Result<vk::Semaphore, crate::DeviceError> {
2719        unsafe {
2720            let semaphore = self
2721                .raw
2722                .create_semaphore(&vk::SemaphoreCreateInfo::default(), None)
2723                .map_err(super::map_host_device_oom_err)?;
2724
2725            self.set_object_name(semaphore, name);
2726
2727            Ok(semaphore)
2728        }
2729    }
2730
2731    pub(super) fn wait_for_fence(
2732        &self,
2733        fence: &super::Fence,
2734        wait_value: crate::FenceValue,
2735        timeout_ns: u64,
2736    ) -> Result<bool, crate::DeviceError> {
2737        profiling::scope!("Device::wait");
2738        match *fence {
2739            super::Fence::TimelineSemaphore(raw) => {
2740                let semaphores = [raw];
2741                let values = [wait_value];
2742                let vk_info = vk::SemaphoreWaitInfo::default()
2743                    .semaphores(&semaphores)
2744                    .values(&values);
2745                let result = match self.extension_fns.timeline_semaphore {
2746                    Some(super::ExtensionFn::Extension(ref ext)) => unsafe {
2747                        ext.wait_semaphores(&vk_info, timeout_ns)
2748                    },
2749                    Some(super::ExtensionFn::Promoted) => unsafe {
2750                        self.raw.wait_semaphores(&vk_info, timeout_ns)
2751                    },
2752                    None => unreachable!(),
2753                };
2754                match result {
2755                    Ok(()) => Ok(true),
2756                    Err(vk::Result::TIMEOUT) => Ok(false),
2757                    Err(other) => Err(super::map_host_device_oom_and_lost_err(other)),
2758                }
2759            }
2760            super::Fence::FencePool {
2761                last_completed,
2762                ref active,
2763                free: _,
2764            } => {
2765                if wait_value <= last_completed {
2766                    Ok(true)
2767                } else {
2768                    match active.iter().find(|&&(value, _)| value >= wait_value) {
2769                        Some(&(_, raw)) => {
2770                            match unsafe { self.raw.wait_for_fences(&[raw], true, timeout_ns) } {
2771                                Ok(()) => Ok(true),
2772                                Err(vk::Result::TIMEOUT) => Ok(false),
2773                                Err(other) => Err(super::map_host_device_oom_and_lost_err(other)),
2774                            }
2775                        }
2776                        None => {
2777                            crate::hal_usage_error(format!(
2778                                "no signals reached value {wait_value}"
2779                            ));
2780                        }
2781                    }
2782                }
2783            }
2784        }
2785    }
2786}
2787
2788impl From<gpu_descriptor::AllocationError> for crate::DeviceError {
2789    fn from(error: gpu_descriptor::AllocationError) -> Self {
2790        use gpu_descriptor::AllocationError as Ae;
2791        match error {
2792            Ae::OutOfDeviceMemory | Ae::OutOfHostMemory | Ae::Fragmentation => Self::OutOfMemory,
2793        }
2794    }
2795}
2796
2797/// We usually map unexpected vulkan errors to the [`crate::DeviceError::Unexpected`]
2798/// variant to be more robust even in cases where the driver is not
2799/// complying with the spec.
2800///
2801/// However, we implement a few Trait methods that don't have an equivalent
2802/// error variant. In those cases we use this function.
2803fn handle_unexpected(err: vk::Result) -> ! {
2804    panic!("Unexpected Vulkan error: `{err}`")
2805}
2806
2807struct ImageWithoutMemory {
2808    raw: vk::Image,
2809    requirements: vk::MemoryRequirements,
2810}