1use alloc::{borrow::ToOwned as _, collections::BTreeMap, ffi::CString, sync::Arc, vec::Vec};
2use core::{
3 ffi::CStr,
4 mem::{self, MaybeUninit},
5 num::NonZeroU32,
6 ptr,
7 time::Duration,
8};
9
10use arrayvec::ArrayVec;
11use ash::{ext, vk};
12use hashbrown::hash_map::Entry;
13use parking_lot::Mutex;
14
15use super::{conv, RawTlasInstance};
16use crate::TlasInstance;
17
18impl super::DeviceShared {
19 pub(super) unsafe fn set_object_name(&self, object: impl vk::Handle, name: &str) {
37 let Some(extension) = self.extension_fns.debug_utils.as_ref() else {
38 return;
39 };
40
41 let mut buffer: [u8; 64] = [0u8; 64];
44 let buffer_vec: Vec<u8>;
45
46 let name_bytes = if name.len() < buffer.len() {
48 buffer[..name.len()].copy_from_slice(name.as_bytes());
50 buffer[name.len()] = 0;
52 &buffer[..name.len() + 1]
53 } else {
54 buffer_vec = name
57 .as_bytes()
58 .iter()
59 .cloned()
60 .chain(core::iter::once(0))
61 .collect();
62 &buffer_vec
63 };
64
65 let name = CStr::from_bytes_until_nul(name_bytes).expect("We have added a null byte");
66
67 let _result = unsafe {
68 extension.set_debug_utils_object_name(
69 &vk::DebugUtilsObjectNameInfoEXT::default()
70 .object_handle(object)
71 .object_name(name),
72 )
73 };
74 }
75
76 pub fn make_render_pass(
77 &self,
78 key: super::RenderPassKey,
79 ) -> Result<vk::RenderPass, crate::DeviceError> {
80 Ok(match self.render_passes.lock().entry(key) {
81 Entry::Occupied(e) => *e.get(),
82 Entry::Vacant(e) => {
83 let super::RenderPassKey {
84 ref colors,
85 ref depth_stencil,
86 sample_count,
87 multiview_mask,
88 } = *e.key();
89
90 let mut vk_attachments = Vec::new();
91 let mut color_refs = Vec::with_capacity(colors.len());
92 let mut resolve_refs = Vec::with_capacity(color_refs.capacity());
93 let mut ds_ref = None;
94 let samples = vk::SampleCountFlags::from_raw(sample_count);
95 let unused = vk::AttachmentReference {
96 attachment: vk::ATTACHMENT_UNUSED,
97 layout: vk::ImageLayout::UNDEFINED,
98 };
99 for cat in colors.iter() {
100 let (color_ref, resolve_ref) =
101 if let Some(super::ColorAttachmentKey { base, resolve }) = cat {
102 let super::AttachmentKey {
103 format,
104 layout,
105 ops,
106 } = *base;
107
108 let color_ref = vk::AttachmentReference {
109 attachment: vk_attachments.len() as u32,
110 layout,
111 };
112 vk_attachments.push({
113 let (load_op, store_op) = conv::map_attachment_ops(ops);
114 vk::AttachmentDescription::default()
115 .format(format)
116 .samples(samples)
117 .load_op(load_op)
118 .store_op(store_op)
119 .initial_layout(layout)
120 .final_layout(layout)
121 });
122 let resolve_ref = if let Some(rat) = resolve {
123 let super::AttachmentKey {
124 format,
125 layout,
126 ops,
127 } = *rat;
128
129 let (load_op, store_op) = conv::map_attachment_ops(ops);
130 let vk_attachment = vk::AttachmentDescription::default()
131 .format(format)
132 .samples(vk::SampleCountFlags::TYPE_1)
133 .load_op(load_op)
134 .store_op(store_op)
135 .initial_layout(layout)
136 .final_layout(layout);
137 vk_attachments.push(vk_attachment);
138
139 vk::AttachmentReference {
140 attachment: vk_attachments.len() as u32 - 1,
141 layout,
142 }
143 } else {
144 unused
145 };
146
147 (color_ref, resolve_ref)
148 } else {
149 (unused, unused)
150 };
151
152 color_refs.push(color_ref);
153 resolve_refs.push(resolve_ref);
154 }
155
156 if let Some(ds) = depth_stencil {
157 let super::DepthStencilAttachmentKey {
158 ref base,
159 stencil_ops,
160 } = *ds;
161
162 let super::AttachmentKey {
163 format,
164 layout,
165 ops,
166 } = *base;
167
168 ds_ref = Some(vk::AttachmentReference {
169 attachment: vk_attachments.len() as u32,
170 layout,
171 });
172 let (load_op, store_op) = conv::map_attachment_ops(ops);
173 let (stencil_load_op, stencil_store_op) = conv::map_attachment_ops(stencil_ops);
174 let vk_attachment = vk::AttachmentDescription::default()
175 .format(format)
176 .samples(samples)
177 .load_op(load_op)
178 .store_op(store_op)
179 .stencil_load_op(stencil_load_op)
180 .stencil_store_op(stencil_store_op)
181 .initial_layout(layout)
182 .final_layout(layout);
183 vk_attachments.push(vk_attachment);
184 }
185
186 let vk_subpasses = [{
187 let mut vk_subpass = vk::SubpassDescription::default()
188 .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS)
189 .color_attachments(&color_refs)
190 .resolve_attachments(&resolve_refs);
191
192 if self
193 .workarounds
194 .contains(super::Workarounds::EMPTY_RESOLVE_ATTACHMENT_LISTS)
195 && resolve_refs.is_empty()
196 {
197 vk_subpass.p_resolve_attachments = ptr::null();
198 }
199
200 if let Some(ref reference) = ds_ref {
201 vk_subpass = vk_subpass.depth_stencil_attachment(reference)
202 }
203 vk_subpass
204 }];
205
206 let mut vk_info = vk::RenderPassCreateInfo::default()
207 .attachments(&vk_attachments)
208 .subpasses(&vk_subpasses);
209
210 let mut multiview_info;
211 let mask;
212 if let Some(multiview_mask) = multiview_mask {
213 mask = [multiview_mask.get()];
214
215 multiview_info = vk::RenderPassMultiviewCreateInfoKHR::default()
217 .view_masks(&mask)
218 .correlation_masks(&mask);
219 vk_info = vk_info.push_next(&mut multiview_info);
220 }
221
222 let raw = unsafe {
223 self.raw
224 .create_render_pass(&vk_info, None)
225 .map_err(super::map_host_device_oom_err)?
226 };
227
228 *e.insert(raw)
229 }
230 })
231 }
232
233 fn make_memory_ranges<'a, I: 'a + Iterator<Item = crate::MemoryRange>>(
234 &self,
235 buffer: &'a super::Buffer,
236 ranges: I,
237 ) -> Option<impl 'a + Iterator<Item = vk::MappedMemoryRange<'a>>> {
238 let allocation = buffer.allocation.as_ref()?.lock();
239 let mask = self.private_caps.non_coherent_map_mask;
240 Some(ranges.map(move |range| {
241 vk::MappedMemoryRange::default()
242 .memory(allocation.memory())
243 .offset((allocation.offset() + range.start) & !mask)
244 .size((range.end - range.start + mask) & !mask)
245 }))
246 }
247}
248
249impl
250 gpu_descriptor::DescriptorDevice<vk::DescriptorSetLayout, vk::DescriptorPool, vk::DescriptorSet>
251 for super::DeviceShared
252{
253 unsafe fn create_descriptor_pool(
254 &self,
255 descriptor_count: &gpu_descriptor::DescriptorTotalCount,
256 max_sets: u32,
257 flags: gpu_descriptor::DescriptorPoolCreateFlags,
258 ) -> Result<vk::DescriptorPool, gpu_descriptor::CreatePoolError> {
259 let unfiltered_counts = [
261 (vk::DescriptorType::SAMPLER, descriptor_count.sampler),
262 (
263 vk::DescriptorType::SAMPLED_IMAGE,
264 descriptor_count.sampled_image,
265 ),
266 (
267 vk::DescriptorType::STORAGE_IMAGE,
268 descriptor_count.storage_image,
269 ),
270 (
271 vk::DescriptorType::UNIFORM_BUFFER,
272 descriptor_count.uniform_buffer,
273 ),
274 (
275 vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC,
276 descriptor_count.uniform_buffer_dynamic,
277 ),
278 (
279 vk::DescriptorType::STORAGE_BUFFER,
280 descriptor_count.storage_buffer,
281 ),
282 (
283 vk::DescriptorType::STORAGE_BUFFER_DYNAMIC,
284 descriptor_count.storage_buffer_dynamic,
285 ),
286 (
287 vk::DescriptorType::ACCELERATION_STRUCTURE_KHR,
288 descriptor_count.acceleration_structure,
289 ),
290 ];
291
292 let filtered_counts = unfiltered_counts
293 .iter()
294 .cloned()
295 .filter(|&(_, count)| count != 0)
296 .map(|(ty, count)| vk::DescriptorPoolSize {
297 ty,
298 descriptor_count: count,
299 })
300 .collect::<ArrayVec<_, 8>>();
301
302 let mut vk_flags =
303 if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND) {
304 vk::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND
305 } else {
306 vk::DescriptorPoolCreateFlags::empty()
307 };
308 if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET) {
309 vk_flags |= vk::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET;
310 }
311 let vk_info = vk::DescriptorPoolCreateInfo::default()
312 .max_sets(max_sets)
313 .flags(vk_flags)
314 .pool_sizes(&filtered_counts);
315
316 match unsafe { self.raw.create_descriptor_pool(&vk_info, None) } {
317 Ok(pool) => Ok(pool),
318 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
319 Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
320 }
321 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
322 Err(gpu_descriptor::CreatePoolError::OutOfDeviceMemory)
323 }
324 Err(vk::Result::ERROR_FRAGMENTATION) => {
325 Err(gpu_descriptor::CreatePoolError::Fragmentation)
326 }
327 Err(err) => handle_unexpected(err),
328 }
329 }
330
331 unsafe fn destroy_descriptor_pool(&self, pool: vk::DescriptorPool) {
332 unsafe { self.raw.destroy_descriptor_pool(pool, None) }
333 }
334
335 unsafe fn alloc_descriptor_sets<'a>(
336 &self,
337 pool: &mut vk::DescriptorPool,
338 layouts: impl ExactSizeIterator<Item = &'a vk::DescriptorSetLayout>,
339 sets: &mut impl Extend<vk::DescriptorSet>,
340 ) -> Result<(), gpu_descriptor::DeviceAllocationError> {
341 let result = unsafe {
342 self.raw.allocate_descriptor_sets(
343 &vk::DescriptorSetAllocateInfo::default()
344 .descriptor_pool(*pool)
345 .set_layouts(
346 &smallvec::SmallVec::<[vk::DescriptorSetLayout; 32]>::from_iter(
347 layouts.cloned(),
348 ),
349 ),
350 )
351 };
352
353 match result {
354 Ok(vk_sets) => {
355 sets.extend(vk_sets);
356 Ok(())
357 }
358 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY)
359 | Err(vk::Result::ERROR_OUT_OF_POOL_MEMORY) => {
360 Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
361 }
362 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
363 Err(gpu_descriptor::DeviceAllocationError::OutOfDeviceMemory)
364 }
365 Err(vk::Result::ERROR_FRAGMENTED_POOL) => {
366 Err(gpu_descriptor::DeviceAllocationError::FragmentedPool)
367 }
368 Err(err) => handle_unexpected(err),
369 }
370 }
371
372 unsafe fn dealloc_descriptor_sets<'a>(
373 &self,
374 pool: &mut vk::DescriptorPool,
375 sets: impl Iterator<Item = vk::DescriptorSet>,
376 ) {
377 let result = unsafe {
378 self.raw.free_descriptor_sets(
379 *pool,
380 &smallvec::SmallVec::<[vk::DescriptorSet; 32]>::from_iter(sets),
381 )
382 };
383 match result {
384 Ok(()) => {}
385 Err(err) => handle_unexpected(err),
386 }
387 }
388}
389
390struct CompiledStage {
391 create_info: vk::PipelineShaderStageCreateInfo<'static>,
392 _entry_point: CString,
393 temp_raw_module: Option<vk::ShaderModule>,
394}
395
396impl super::Device {
397 pub unsafe fn texture_from_raw(
407 &self,
408 vk_image: vk::Image,
409 desc: &crate::TextureDescriptor,
410 drop_callback: Option<crate::DropCallback>,
411 memory: super::TextureMemory,
412 ) -> super::Texture {
413 let identity = self.shared.texture_identity_factory.next();
414 let drop_guard = crate::DropGuard::from_option(drop_callback);
415
416 if let Some(label) = desc.label {
417 unsafe { self.shared.set_object_name(vk_image, label) };
418 }
419
420 super::Texture {
421 raw: vk_image,
422 drop_guard,
423 memory,
424 format: desc.format,
425 copy_size: desc.copy_extent(),
426 identity,
427 }
428 }
429
430 fn find_memory_type_index(
431 &self,
432 type_bits_req: u32,
433 flags_req: vk::MemoryPropertyFlags,
434 ) -> Option<usize> {
435 let mem_properties = unsafe {
436 self.shared
437 .instance
438 .raw
439 .get_physical_device_memory_properties(self.shared.physical_device)
440 };
441
442 for (i, mem_ty) in mem_properties.memory_types_as_slice().iter().enumerate() {
444 let types_bits = 1 << i;
445 let is_required_memory_type = type_bits_req & types_bits != 0;
446 let has_required_properties = mem_ty.property_flags & flags_req == flags_req;
447 if is_required_memory_type && has_required_properties {
448 return Some(i);
449 }
450 }
451
452 None
453 }
454
455 fn create_image_without_memory(
456 &self,
457 desc: &crate::TextureDescriptor,
458 external_memory_image_create_info: Option<&mut vk::ExternalMemoryImageCreateInfo>,
459 ) -> Result<ImageWithoutMemory, crate::DeviceError> {
460 let copy_size = desc.copy_extent();
461
462 let mut raw_flags = vk::ImageCreateFlags::empty();
463 if desc.dimension == wgt::TextureDimension::D3
464 && desc.usage.contains(wgt::TextureUses::COLOR_TARGET)
465 {
466 raw_flags |= vk::ImageCreateFlags::TYPE_2D_ARRAY_COMPATIBLE;
467 }
468 if desc.is_cube_compatible() {
469 raw_flags |= vk::ImageCreateFlags::CUBE_COMPATIBLE;
470 }
471
472 let original_format = self.shared.private_caps.map_texture_format(desc.format);
473 let mut vk_view_formats = vec![];
474 if !desc.view_formats.is_empty() {
475 raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
476
477 if self.shared.private_caps.image_format_list {
478 vk_view_formats = desc
479 .view_formats
480 .iter()
481 .map(|f| self.shared.private_caps.map_texture_format(*f))
482 .collect();
483 vk_view_formats.push(original_format)
484 }
485 }
486 if desc.format.is_multi_planar_format() {
487 raw_flags |=
488 vk::ImageCreateFlags::MUTABLE_FORMAT | vk::ImageCreateFlags::EXTENDED_USAGE;
489 }
490
491 let mut vk_info = vk::ImageCreateInfo::default()
492 .flags(raw_flags)
493 .image_type(conv::map_texture_dimension(desc.dimension))
494 .format(original_format)
495 .extent(conv::map_copy_extent(©_size))
496 .mip_levels(desc.mip_level_count)
497 .array_layers(desc.array_layer_count())
498 .samples(vk::SampleCountFlags::from_raw(desc.sample_count))
499 .tiling(vk::ImageTiling::OPTIMAL)
500 .usage(conv::map_texture_usage(desc.usage))
501 .sharing_mode(vk::SharingMode::EXCLUSIVE)
502 .initial_layout(vk::ImageLayout::UNDEFINED);
503
504 let mut format_list_info = vk::ImageFormatListCreateInfo::default();
505 if !vk_view_formats.is_empty() {
506 format_list_info = format_list_info.view_formats(&vk_view_formats);
507 vk_info = vk_info.push_next(&mut format_list_info);
508 }
509
510 if let Some(ext_info) = external_memory_image_create_info {
511 vk_info = vk_info.push_next(ext_info);
512 }
513
514 let raw = unsafe { self.shared.raw.create_image(&vk_info, None) }.map_err(map_err)?;
515 fn map_err(err: vk::Result) -> crate::DeviceError {
516 super::map_host_device_oom_and_ioca_err(err)
519 }
520 let mut req = unsafe { self.shared.raw.get_image_memory_requirements(raw) };
521
522 if desc.usage.contains(wgt::TextureUses::TRANSIENT) {
523 let mem_type_index = self.find_memory_type_index(
524 req.memory_type_bits,
525 vk::MemoryPropertyFlags::LAZILY_ALLOCATED,
526 );
527 if let Some(mem_type_index) = mem_type_index {
528 req.memory_type_bits = 1 << mem_type_index;
529 }
530 }
531
532 Ok(ImageWithoutMemory {
533 raw,
534 requirements: req,
535 })
536 }
537
538 #[cfg(windows)]
544 pub unsafe fn texture_from_d3d11_shared_handle(
545 &self,
546 d3d11_shared_handle: windows::Win32::Foundation::HANDLE,
547 desc: &crate::TextureDescriptor,
548 ) -> Result<super::Texture, crate::DeviceError> {
549 if !self
550 .shared
551 .features
552 .contains(wgt::Features::VULKAN_EXTERNAL_MEMORY_WIN32)
553 {
554 log::error!("Vulkan driver does not support VK_KHR_external_memory_win32");
555 return Err(crate::DeviceError::Unexpected);
556 }
557
558 let mut external_memory_image_info = vk::ExternalMemoryImageCreateInfo::default()
559 .handle_types(vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE);
560
561 let image =
562 self.create_image_without_memory(desc, Some(&mut external_memory_image_info))?;
563
564 let mut dedicated_allocate_info =
567 vk::MemoryDedicatedAllocateInfo::default().image(image.raw);
568
569 let mut import_memory_info = vk::ImportMemoryWin32HandleInfoKHR::default()
570 .handle_type(vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE)
571 .handle(d3d11_shared_handle.0 as _);
572 #[allow(clippy::unnecessary_mut_passed)]
574 {
575 import_memory_info.p_next = <*const _>::cast(&mut dedicated_allocate_info);
576 }
577
578 let mem_type_index = self
579 .find_memory_type_index(
580 image.requirements.memory_type_bits,
581 vk::MemoryPropertyFlags::DEVICE_LOCAL,
582 )
583 .ok_or(crate::DeviceError::Unexpected)?;
584
585 let memory_allocate_info = vk::MemoryAllocateInfo::default()
586 .allocation_size(image.requirements.size)
587 .memory_type_index(mem_type_index as _)
588 .push_next(&mut import_memory_info);
589 let memory = unsafe { self.shared.raw.allocate_memory(&memory_allocate_info, None) }
590 .map_err(super::map_host_device_oom_err)?;
591
592 unsafe { self.shared.raw.bind_image_memory(image.raw, memory, 0) }
593 .map_err(super::map_host_device_oom_err)?;
594
595 Ok(unsafe {
596 self.texture_from_raw(
597 image.raw,
598 desc,
599 None,
600 super::TextureMemory::Dedicated(memory),
601 )
602 })
603 }
604
605 fn create_shader_module_impl(
606 &self,
607 spv: &[u32],
608 label: &crate::Label<'_>,
609 ) -> Result<vk::ShaderModule, crate::DeviceError> {
610 let vk_info = vk::ShaderModuleCreateInfo::default()
611 .flags(vk::ShaderModuleCreateFlags::empty())
612 .code(spv);
613
614 let raw = unsafe {
615 profiling::scope!("vkCreateShaderModule");
616 self.shared
617 .raw
618 .create_shader_module(&vk_info, None)
619 .map_err(map_err)?
620 };
621 fn map_err(err: vk::Result) -> crate::DeviceError {
622 super::map_host_device_oom_err(err)
625 }
626
627 if let Some(label) = label {
628 unsafe { self.shared.set_object_name(raw, label) };
629 }
630
631 Ok(raw)
632 }
633
634 fn compile_stage(
635 &self,
636 stage: &crate::ProgrammableStage<super::ShaderModule>,
637 naga_stage: naga::ShaderStage,
638 binding_map: &naga::back::spv::BindingMap,
639 ) -> Result<CompiledStage, crate::PipelineError> {
640 let stage_flags = crate::auxil::map_naga_stage(naga_stage);
641 let vk_module = match *stage.module {
642 super::ShaderModule::Raw(raw) => raw,
643 super::ShaderModule::Intermediate {
644 ref naga_shader,
645 runtime_checks,
646 } => {
647 let pipeline_options = naga::back::spv::PipelineOptions {
648 entry_point: stage.entry_point.to_owned(),
649 shader_stage: naga_stage,
650 };
651 let needs_temp_options = !runtime_checks.bounds_checks
652 || !runtime_checks.force_loop_bounding
653 || !runtime_checks.ray_query_initialization_tracking
654 || !binding_map.is_empty()
655 || naga_shader.debug_source.is_some()
656 || !stage.zero_initialize_workgroup_memory;
657 let mut temp_options;
658 let options = if needs_temp_options {
659 temp_options = self.naga_options.clone();
660 if !runtime_checks.bounds_checks {
661 temp_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
662 index: naga::proc::BoundsCheckPolicy::Unchecked,
663 buffer: naga::proc::BoundsCheckPolicy::Unchecked,
664 image_load: naga::proc::BoundsCheckPolicy::Unchecked,
665 binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
666 };
667 }
668 if !runtime_checks.force_loop_bounding {
669 temp_options.force_loop_bounding = false;
670 }
671 if !runtime_checks.ray_query_initialization_tracking {
672 temp_options.ray_query_initialization_tracking = false;
673 }
674 if !binding_map.is_empty() {
675 temp_options.binding_map = binding_map.clone();
676 }
677
678 if let Some(ref debug) = naga_shader.debug_source {
679 temp_options.debug_info = Some(naga::back::spv::DebugInfo {
680 source_code: &debug.source_code,
681 file_name: debug.file_name.as_ref(),
682 language: naga::back::spv::SourceLanguage::WGSL,
683 })
684 }
685 if !stage.zero_initialize_workgroup_memory {
686 temp_options.zero_initialize_workgroup_memory =
687 naga::back::spv::ZeroInitializeWorkgroupMemoryMode::None;
688 }
689
690 &temp_options
691 } else {
692 &self.naga_options
693 };
694
695 let (module, info) = naga::back::pipeline_constants::process_overrides(
696 &naga_shader.module,
697 &naga_shader.info,
698 Some((naga_stage, stage.entry_point)),
699 stage.constants,
700 )
701 .map_err(|e| {
702 crate::PipelineError::PipelineConstants(stage_flags, format!("{e}"))
703 })?;
704
705 let spv = {
706 profiling::scope!("naga::spv::write_vec");
707 naga::back::spv::write_vec(&module, &info, options, Some(&pipeline_options))
708 }
709 .map_err(|e| crate::PipelineError::Linkage(stage_flags, format!("{e}")))?;
710 self.create_shader_module_impl(&spv, &None)?
711 }
712 };
713
714 let mut flags = vk::PipelineShaderStageCreateFlags::empty();
715 if self.shared.features.contains(wgt::Features::SUBGROUP) {
716 flags |= vk::PipelineShaderStageCreateFlags::ALLOW_VARYING_SUBGROUP_SIZE
717 }
718
719 let entry_point = CString::new(stage.entry_point).unwrap();
720 let mut create_info = vk::PipelineShaderStageCreateInfo::default()
721 .flags(flags)
722 .stage(conv::map_shader_stage(stage_flags))
723 .module(vk_module);
724
725 create_info.p_name = entry_point.as_ptr();
727
728 Ok(CompiledStage {
729 create_info,
730 _entry_point: entry_point,
731 temp_raw_module: match *stage.module {
732 super::ShaderModule::Raw(_) => None,
733 super::ShaderModule::Intermediate { .. } => Some(vk_module),
734 },
735 })
736 }
737
738 pub fn queue_family_index(&self) -> u32 {
744 self.shared.family_index
745 }
746
747 pub fn queue_index(&self) -> u32 {
748 self.shared.queue_index
749 }
750
751 pub fn raw_device(&self) -> &ash::Device {
752 &self.shared.raw
753 }
754
755 pub fn raw_physical_device(&self) -> vk::PhysicalDevice {
756 self.shared.physical_device
757 }
758
759 pub fn raw_queue(&self) -> vk::Queue {
760 self.shared.raw_queue
761 }
762
763 pub fn enabled_device_extensions(&self) -> &[&'static CStr] {
764 &self.shared.enabled_extensions
765 }
766
767 pub fn shared_instance(&self) -> &super::InstanceShared {
768 &self.shared.instance
769 }
770
771 fn error_if_would_oom_on_resource_allocation(
772 &self,
773 needs_host_access: bool,
774 size: u64,
775 ) -> Result<(), crate::DeviceError> {
776 let Some(threshold) = self
777 .shared
778 .instance
779 .memory_budget_thresholds
780 .for_resource_creation
781 else {
782 return Ok(());
783 };
784
785 if !self
786 .shared
787 .enabled_extensions
788 .contains(&ext::memory_budget::NAME)
789 {
790 return Ok(());
791 }
792
793 let get_physical_device_properties = self
794 .shared
795 .instance
796 .get_physical_device_properties
797 .as_ref()
798 .unwrap();
799
800 let mut memory_budget_properties = vk::PhysicalDeviceMemoryBudgetPropertiesEXT::default();
801
802 let mut memory_properties =
803 vk::PhysicalDeviceMemoryProperties2::default().push_next(&mut memory_budget_properties);
804
805 unsafe {
806 get_physical_device_properties.get_physical_device_memory_properties2(
807 self.shared.physical_device,
808 &mut memory_properties,
809 );
810 }
811
812 let mut host_visible_heaps = [false; vk::MAX_MEMORY_HEAPS];
813 let mut device_local_heaps = [false; vk::MAX_MEMORY_HEAPS];
814
815 let memory_properties = memory_properties.memory_properties;
816
817 for i in 0..memory_properties.memory_type_count {
818 let memory_type = memory_properties.memory_types[i as usize];
819 let flags = memory_type.property_flags;
820
821 if flags.intersects(
822 vk::MemoryPropertyFlags::LAZILY_ALLOCATED | vk::MemoryPropertyFlags::PROTECTED,
823 ) {
824 continue; }
826
827 if flags.contains(vk::MemoryPropertyFlags::HOST_VISIBLE) {
828 host_visible_heaps[memory_type.heap_index as usize] = true;
829 }
830
831 if flags.contains(vk::MemoryPropertyFlags::DEVICE_LOCAL) {
832 device_local_heaps[memory_type.heap_index as usize] = true;
833 }
834 }
835
836 let heaps = if needs_host_access {
837 host_visible_heaps
838 } else {
839 device_local_heaps
840 };
841
842 for (i, check) in heaps.iter().enumerate() {
847 if !check {
848 continue;
849 }
850
851 let heap_usage = memory_budget_properties.heap_usage[i];
852 let heap_budget = memory_budget_properties.heap_budget[i];
853
854 if heap_usage + size >= heap_budget / 100 * threshold as u64 {
855 return Err(crate::DeviceError::OutOfMemory);
856 }
857 }
858
859 Ok(())
860 }
861}
862
863impl crate::Device for super::Device {
864 type A = super::Api;
865
866 unsafe fn create_buffer(
867 &self,
868 desc: &crate::BufferDescriptor,
869 ) -> Result<super::Buffer, crate::DeviceError> {
870 let vk_info = vk::BufferCreateInfo::default()
871 .size(desc.size)
872 .usage(conv::map_buffer_usage(desc.usage))
873 .sharing_mode(vk::SharingMode::EXCLUSIVE);
874
875 let raw = unsafe {
876 self.shared
877 .raw
878 .create_buffer(&vk_info, None)
879 .map_err(super::map_host_device_oom_and_ioca_err)?
880 };
881
882 let requirements = unsafe { self.shared.raw.get_buffer_memory_requirements(raw) };
883
884 let is_cpu_read = desc.usage.contains(wgt::BufferUses::MAP_READ);
885 let is_cpu_write = desc.usage.contains(wgt::BufferUses::MAP_WRITE);
886
887 let location = match (is_cpu_read, is_cpu_write) {
888 (true, true) => gpu_allocator::MemoryLocation::CpuToGpu,
889 (true, false) => gpu_allocator::MemoryLocation::GpuToCpu,
890 (false, true) => gpu_allocator::MemoryLocation::CpuToGpu,
891 (false, false) => gpu_allocator::MemoryLocation::GpuOnly,
892 };
893
894 let needs_host_access = is_cpu_read || is_cpu_write;
895
896 self.error_if_would_oom_on_resource_allocation(needs_host_access, requirements.size)
897 .inspect_err(|_| {
898 unsafe { self.shared.raw.destroy_buffer(raw, None) };
899 })?;
900
901 let name = desc.label.unwrap_or("Unlabeled buffer");
902
903 let allocation = self
904 .mem_allocator
905 .lock()
906 .allocate(&gpu_allocator::vulkan::AllocationCreateDesc {
907 name,
908 requirements: vk::MemoryRequirements {
909 memory_type_bits: requirements.memory_type_bits & self.valid_ash_memory_types,
910 ..requirements
911 },
912 location,
913 linear: true, allocation_scheme: gpu_allocator::vulkan::AllocationScheme::GpuAllocatorManaged,
915 })
916 .inspect_err(|_| {
917 unsafe { self.shared.raw.destroy_buffer(raw, None) };
918 })?;
919
920 unsafe {
921 self.shared
922 .raw
923 .bind_buffer_memory(raw, allocation.memory(), allocation.offset())
924 }
925 .map_err(super::map_host_device_oom_and_ioca_err)
926 .inspect_err(|_| {
927 unsafe { self.shared.raw.destroy_buffer(raw, None) };
928 })?;
929
930 if let Some(label) = desc.label {
931 unsafe { self.shared.set_object_name(raw, label) };
932 }
933
934 self.counters.buffer_memory.add(allocation.size() as isize);
935 self.counters.buffers.add(1);
936
937 Ok(super::Buffer {
938 raw,
939 allocation: Some(Mutex::new(super::BufferMemoryBacking::Managed(allocation))),
940 })
941 }
942 unsafe fn destroy_buffer(&self, buffer: super::Buffer) {
943 unsafe { self.shared.raw.destroy_buffer(buffer.raw, None) };
944 if let Some(allocation) = buffer.allocation {
945 let allocation = allocation.into_inner();
946 self.counters.buffer_memory.sub(allocation.size() as isize);
947 match allocation {
948 super::BufferMemoryBacking::Managed(allocation) => {
949 let result = self.mem_allocator.lock().free(allocation);
950 if let Err(err) = result {
951 log::warn!("Failed to free buffer allocation: {err}");
952 }
953 }
954 super::BufferMemoryBacking::VulkanMemory { memory, .. } => unsafe {
955 self.shared.raw.free_memory(memory, None);
956 },
957 }
958 }
959
960 self.counters.buffers.sub(1);
961 }
962
963 unsafe fn add_raw_buffer(&self, _buffer: &super::Buffer) {
964 self.counters.buffers.add(1);
965 }
966
967 unsafe fn map_buffer(
968 &self,
969 buffer: &super::Buffer,
970 range: crate::MemoryRange,
971 ) -> Result<crate::BufferMapping, crate::DeviceError> {
972 if let Some(ref allocation) = buffer.allocation {
973 let mut allocation = allocation.lock();
974 if let super::BufferMemoryBacking::Managed(ref mut allocation) = *allocation {
975 let is_coherent = allocation
976 .memory_properties()
977 .contains(vk::MemoryPropertyFlags::HOST_COHERENT);
978 Ok(crate::BufferMapping {
979 ptr: unsafe {
980 allocation
981 .mapped_ptr()
982 .unwrap()
983 .cast()
984 .offset(range.start as isize)
985 },
986 is_coherent,
987 })
988 } else {
989 crate::hal_usage_error("tried to map externally created buffer")
990 }
991 } else {
992 crate::hal_usage_error("tried to map external buffer")
993 }
994 }
995
996 unsafe fn unmap_buffer(&self, buffer: &super::Buffer) {
997 if buffer.allocation.is_some() {
998 } else {
1000 crate::hal_usage_error("tried to unmap external buffer")
1001 }
1002 }
1003
1004 unsafe fn flush_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
1005 where
1006 I: Iterator<Item = crate::MemoryRange>,
1007 {
1008 if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
1009 unsafe {
1010 self.shared
1011 .raw
1012 .flush_mapped_memory_ranges(
1013 &smallvec::SmallVec::<[vk::MappedMemoryRange; 32]>::from_iter(vk_ranges),
1014 )
1015 }
1016 .unwrap();
1017 }
1018 }
1019 unsafe fn invalidate_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
1020 where
1021 I: Iterator<Item = crate::MemoryRange>,
1022 {
1023 if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
1024 unsafe {
1025 self.shared
1026 .raw
1027 .invalidate_mapped_memory_ranges(&smallvec::SmallVec::<
1028 [vk::MappedMemoryRange; 32],
1029 >::from_iter(vk_ranges))
1030 }
1031 .unwrap();
1032 }
1033 }
1034
1035 unsafe fn create_texture(
1036 &self,
1037 desc: &crate::TextureDescriptor,
1038 ) -> Result<super::Texture, crate::DeviceError> {
1039 let image = self.create_image_without_memory(desc, None)?;
1040
1041 self.error_if_would_oom_on_resource_allocation(false, image.requirements.size)
1042 .inspect_err(|_| {
1043 unsafe { self.shared.raw.destroy_image(image.raw, None) };
1044 })?;
1045
1046 let name = desc.label.unwrap_or("Unlabeled texture");
1047
1048 let allocation = self
1049 .mem_allocator
1050 .lock()
1051 .allocate(&gpu_allocator::vulkan::AllocationCreateDesc {
1052 name,
1053 requirements: vk::MemoryRequirements {
1054 memory_type_bits: image.requirements.memory_type_bits
1055 & self.valid_ash_memory_types,
1056 ..image.requirements
1057 },
1058 location: gpu_allocator::MemoryLocation::GpuOnly,
1059 linear: false,
1060 allocation_scheme: gpu_allocator::vulkan::AllocationScheme::GpuAllocatorManaged,
1061 })
1062 .inspect_err(|_| {
1063 unsafe { self.shared.raw.destroy_image(image.raw, None) };
1064 })?;
1065
1066 self.counters.texture_memory.add(allocation.size() as isize);
1067
1068 unsafe {
1069 self.shared
1070 .raw
1071 .bind_image_memory(image.raw, allocation.memory(), allocation.offset())
1072 }
1073 .map_err(super::map_host_device_oom_err)
1074 .inspect_err(|_| {
1075 unsafe { self.shared.raw.destroy_image(image.raw, None) };
1076 })?;
1077
1078 Ok(unsafe {
1079 self.texture_from_raw(
1080 image.raw,
1081 desc,
1082 None,
1083 super::TextureMemory::Allocation(allocation),
1084 )
1085 })
1086 }
1087
1088 unsafe fn destroy_texture(&self, texture: super::Texture) {
1089 if texture.drop_guard.is_none() {
1090 unsafe { self.shared.raw.destroy_image(texture.raw, None) };
1091 }
1092
1093 match texture.memory {
1094 super::TextureMemory::Allocation(allocation) => {
1095 self.counters.texture_memory.sub(allocation.size() as isize);
1096 let result = self.mem_allocator.lock().free(allocation);
1097 if let Err(err) = result {
1098 log::warn!("Failed to free texture allocation: {err}");
1099 }
1100 }
1101 super::TextureMemory::Dedicated(memory) => unsafe {
1102 self.shared.raw.free_memory(memory, None);
1103 },
1104 super::TextureMemory::External => {}
1105 }
1106
1107 self.counters.textures.sub(1);
1108 }
1109
1110 unsafe fn add_raw_texture(&self, _texture: &super::Texture) {
1111 self.counters.textures.add(1);
1112 }
1113
1114 unsafe fn create_texture_view(
1115 &self,
1116 texture: &super::Texture,
1117 desc: &crate::TextureViewDescriptor,
1118 ) -> Result<super::TextureView, crate::DeviceError> {
1119 let subresource_range = conv::map_subresource_range(&desc.range, texture.format);
1120 let raw_format = self.shared.private_caps.map_texture_format(desc.format);
1121 let mut vk_info = vk::ImageViewCreateInfo::default()
1122 .flags(vk::ImageViewCreateFlags::empty())
1123 .image(texture.raw)
1124 .view_type(conv::map_view_dimension(desc.dimension))
1125 .format(raw_format)
1126 .subresource_range(subresource_range);
1127 let layers =
1128 NonZeroU32::new(subresource_range.layer_count).expect("Unexpected zero layer count");
1129
1130 let mut image_view_info;
1131 if self.shared.private_caps.image_view_usage && !desc.usage.is_empty() {
1132 image_view_info =
1133 vk::ImageViewUsageCreateInfo::default().usage(conv::map_texture_usage(desc.usage));
1134 vk_info = vk_info.push_next(&mut image_view_info);
1135 }
1136
1137 let raw = unsafe { self.shared.raw.create_image_view(&vk_info, None) }
1138 .map_err(super::map_host_device_oom_and_ioca_err)?;
1139
1140 if let Some(label) = desc.label {
1141 unsafe { self.shared.set_object_name(raw, label) };
1142 }
1143
1144 let identity = self.shared.texture_view_identity_factory.next();
1145
1146 self.counters.texture_views.add(1);
1147
1148 Ok(super::TextureView {
1149 raw_texture: texture.raw,
1150 raw,
1151 _layers: layers,
1152 format: desc.format,
1153 raw_format,
1154 base_mip_level: desc.range.base_mip_level,
1155 dimension: desc.dimension,
1156 texture_identity: texture.identity,
1157 view_identity: identity,
1158 })
1159 }
1160 unsafe fn destroy_texture_view(&self, view: super::TextureView) {
1161 unsafe { self.shared.raw.destroy_image_view(view.raw, None) };
1162
1163 self.counters.texture_views.sub(1);
1164 }
1165
1166 unsafe fn create_sampler(
1167 &self,
1168 desc: &crate::SamplerDescriptor,
1169 ) -> Result<super::Sampler, crate::DeviceError> {
1170 let mut create_info = vk::SamplerCreateInfo::default()
1171 .flags(vk::SamplerCreateFlags::empty())
1172 .mag_filter(conv::map_filter_mode(desc.mag_filter))
1173 .min_filter(conv::map_filter_mode(desc.min_filter))
1174 .mipmap_mode(conv::map_mip_filter_mode(desc.mipmap_filter))
1175 .address_mode_u(conv::map_address_mode(desc.address_modes[0]))
1176 .address_mode_v(conv::map_address_mode(desc.address_modes[1]))
1177 .address_mode_w(conv::map_address_mode(desc.address_modes[2]))
1178 .min_lod(desc.lod_clamp.start)
1179 .max_lod(desc.lod_clamp.end);
1180
1181 if let Some(fun) = desc.compare {
1182 create_info = create_info
1183 .compare_enable(true)
1184 .compare_op(conv::map_comparison(fun));
1185 }
1186
1187 if desc.anisotropy_clamp != 1 {
1188 create_info = create_info
1191 .anisotropy_enable(true)
1192 .max_anisotropy(desc.anisotropy_clamp as f32);
1193 }
1194
1195 if let Some(color) = desc.border_color {
1196 create_info = create_info.border_color(conv::map_border_color(color));
1197 }
1198
1199 let mut sampler_cache_guard = self.shared.sampler_cache.lock();
1200
1201 let raw = sampler_cache_guard.create_sampler(&self.shared.raw, create_info)?;
1202
1203 if let Some(label) = desc.label {
1207 unsafe { self.shared.set_object_name(raw, label) };
1210 }
1211
1212 drop(sampler_cache_guard);
1213
1214 self.counters.samplers.add(1);
1215
1216 Ok(super::Sampler { raw, create_info })
1217 }
1218 unsafe fn destroy_sampler(&self, sampler: super::Sampler) {
1219 self.shared.sampler_cache.lock().destroy_sampler(
1220 &self.shared.raw,
1221 sampler.create_info,
1222 sampler.raw,
1223 );
1224
1225 self.counters.samplers.sub(1);
1226 }
1227
1228 unsafe fn create_command_encoder(
1229 &self,
1230 desc: &crate::CommandEncoderDescriptor<super::Queue>,
1231 ) -> Result<super::CommandEncoder, crate::DeviceError> {
1232 let vk_info = vk::CommandPoolCreateInfo::default()
1233 .queue_family_index(desc.queue.family_index)
1234 .flags(vk::CommandPoolCreateFlags::TRANSIENT);
1235
1236 let raw = unsafe {
1237 self.shared
1238 .raw
1239 .create_command_pool(&vk_info, None)
1240 .map_err(super::map_host_device_oom_err)?
1241 };
1242
1243 self.counters.command_encoders.add(1);
1244
1245 Ok(super::CommandEncoder {
1246 raw,
1247 device: Arc::clone(&self.shared),
1248 active: vk::CommandBuffer::null(),
1249 bind_point: vk::PipelineBindPoint::default(),
1250 temp: super::Temp::default(),
1251 free: Vec::new(),
1252 discarded: Vec::new(),
1253 rpass_debug_marker_active: false,
1254 end_of_pass_timer_query: None,
1255 framebuffers: Default::default(),
1256 temp_texture_views: Default::default(),
1257 counters: Arc::clone(&self.counters),
1258 current_pipeline_is_multiview: false,
1259 })
1260 }
1261
1262 unsafe fn create_bind_group_layout(
1263 &self,
1264 desc: &crate::BindGroupLayoutDescriptor,
1265 ) -> Result<super::BindGroupLayout, crate::DeviceError> {
1266 let mut vk_bindings = Vec::new();
1271 let mut binding_flags = Vec::new();
1272 let mut binding_map = Vec::new();
1273 let mut next_binding = 0;
1274 let mut contains_binding_arrays = false;
1275 let mut desc_count = gpu_descriptor::DescriptorTotalCount::default();
1276 for entry in desc.entries {
1277 if entry.count.is_some() {
1278 contains_binding_arrays = true;
1279 }
1280
1281 let partially_bound = desc
1282 .flags
1283 .contains(crate::BindGroupLayoutFlags::PARTIALLY_BOUND);
1284 let mut flags = vk::DescriptorBindingFlags::empty();
1285 if partially_bound && entry.count.is_some() {
1286 flags |= vk::DescriptorBindingFlags::PARTIALLY_BOUND;
1287 }
1288 if entry.count.is_some() {
1289 flags |= vk::DescriptorBindingFlags::UPDATE_AFTER_BIND;
1290 }
1291
1292 let count = entry.count.map_or(1, |c| c.get());
1293 match entry.ty {
1294 wgt::BindingType::ExternalTexture => unimplemented!(),
1295 _ => {
1296 vk_bindings.push(vk::DescriptorSetLayoutBinding {
1297 binding: next_binding,
1298 descriptor_type: conv::map_binding_type(entry.ty),
1299 descriptor_count: count,
1300 stage_flags: conv::map_shader_stage(entry.visibility),
1301 p_immutable_samplers: ptr::null(),
1302 _marker: Default::default(),
1303 });
1304 binding_flags.push(flags);
1305 binding_map.push((
1306 entry.binding,
1307 super::BindingInfo {
1308 binding: next_binding,
1309 binding_array_size: entry.count,
1310 },
1311 ));
1312 next_binding += 1;
1313 }
1314 }
1315
1316 match entry.ty {
1317 wgt::BindingType::Buffer {
1318 ty,
1319 has_dynamic_offset,
1320 ..
1321 } => match ty {
1322 wgt::BufferBindingType::Uniform => {
1323 if has_dynamic_offset {
1324 desc_count.uniform_buffer_dynamic += count;
1325 } else {
1326 desc_count.uniform_buffer += count;
1327 }
1328 }
1329 wgt::BufferBindingType::Storage { .. } => {
1330 if has_dynamic_offset {
1331 desc_count.storage_buffer_dynamic += count;
1332 } else {
1333 desc_count.storage_buffer += count;
1334 }
1335 }
1336 },
1337 wgt::BindingType::Sampler { .. } => {
1338 desc_count.sampler += count;
1339 }
1340 wgt::BindingType::Texture { .. } => {
1341 desc_count.sampled_image += count;
1342 }
1343 wgt::BindingType::StorageTexture { .. } => {
1344 desc_count.storage_image += count;
1345 }
1346 wgt::BindingType::AccelerationStructure { .. } => {
1347 desc_count.acceleration_structure += count;
1348 }
1349 wgt::BindingType::ExternalTexture => unimplemented!(),
1350 }
1351 }
1352
1353 let vk_info = vk::DescriptorSetLayoutCreateInfo::default()
1354 .bindings(&vk_bindings)
1355 .flags(if contains_binding_arrays {
1356 vk::DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND_POOL
1357 } else {
1358 vk::DescriptorSetLayoutCreateFlags::empty()
1359 });
1360
1361 let mut binding_flag_info =
1362 vk::DescriptorSetLayoutBindingFlagsCreateInfo::default().binding_flags(&binding_flags);
1363
1364 let vk_info = vk_info.push_next(&mut binding_flag_info);
1365
1366 let raw = unsafe {
1367 self.shared
1368 .raw
1369 .create_descriptor_set_layout(&vk_info, None)
1370 .map_err(super::map_host_device_oom_err)?
1371 };
1372
1373 if let Some(label) = desc.label {
1374 unsafe { self.shared.set_object_name(raw, label) };
1375 }
1376
1377 self.counters.bind_group_layouts.add(1);
1378
1379 Ok(super::BindGroupLayout {
1380 raw,
1381 desc_count,
1382 entries: desc.entries.into(),
1383 binding_map,
1384 contains_binding_arrays,
1385 })
1386 }
1387 unsafe fn destroy_bind_group_layout(&self, bg_layout: super::BindGroupLayout) {
1388 unsafe {
1389 self.shared
1390 .raw
1391 .destroy_descriptor_set_layout(bg_layout.raw, None)
1392 };
1393
1394 self.counters.bind_group_layouts.sub(1);
1395 }
1396
1397 unsafe fn create_pipeline_layout(
1398 &self,
1399 desc: &crate::PipelineLayoutDescriptor<super::BindGroupLayout>,
1400 ) -> Result<super::PipelineLayout, crate::DeviceError> {
1401 let vk_set_layouts = desc
1403 .bind_group_layouts
1404 .iter()
1405 .map(|bgl| bgl.raw)
1406 .collect::<Vec<_>>();
1407 let vk_immediates_ranges = desc
1408 .immediates_ranges
1409 .iter()
1410 .map(|pcr| vk::PushConstantRange {
1411 stage_flags: conv::map_shader_stage(pcr.stages),
1412 offset: pcr.range.start,
1413 size: pcr.range.end - pcr.range.start,
1414 })
1415 .collect::<Vec<_>>();
1416
1417 let vk_info = vk::PipelineLayoutCreateInfo::default()
1418 .flags(vk::PipelineLayoutCreateFlags::empty())
1419 .set_layouts(&vk_set_layouts)
1420 .push_constant_ranges(&vk_immediates_ranges);
1421
1422 let raw = {
1423 profiling::scope!("vkCreatePipelineLayout");
1424 unsafe {
1425 self.shared
1426 .raw
1427 .create_pipeline_layout(&vk_info, None)
1428 .map_err(super::map_host_device_oom_err)?
1429 }
1430 };
1431
1432 if let Some(label) = desc.label {
1433 unsafe { self.shared.set_object_name(raw, label) };
1434 }
1435
1436 let mut binding_map = BTreeMap::new();
1437 for (group, &layout) in desc.bind_group_layouts.iter().enumerate() {
1438 for &(binding, binding_info) in &layout.binding_map {
1439 binding_map.insert(
1440 naga::ResourceBinding {
1441 group: group as u32,
1442 binding,
1443 },
1444 naga::back::spv::BindingInfo {
1445 descriptor_set: group as u32,
1446 binding: binding_info.binding,
1447 binding_array_size: binding_info.binding_array_size.map(NonZeroU32::get),
1448 },
1449 );
1450 }
1451 }
1452
1453 self.counters.pipeline_layouts.add(1);
1454 Ok(super::PipelineLayout { raw, binding_map })
1455 }
1456 unsafe fn destroy_pipeline_layout(&self, pipeline_layout: super::PipelineLayout) {
1457 unsafe {
1458 self.shared
1459 .raw
1460 .destroy_pipeline_layout(pipeline_layout.raw, None)
1461 };
1462
1463 self.counters.pipeline_layouts.sub(1);
1464 }
1465
1466 unsafe fn create_bind_group(
1467 &self,
1468 desc: &crate::BindGroupDescriptor<
1469 super::BindGroupLayout,
1470 super::Buffer,
1471 super::Sampler,
1472 super::TextureView,
1473 super::AccelerationStructure,
1474 >,
1475 ) -> Result<super::BindGroup, crate::DeviceError> {
1476 let desc_set_layout_flags = if desc.layout.contains_binding_arrays {
1477 gpu_descriptor::DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND
1478 } else {
1479 gpu_descriptor::DescriptorSetLayoutCreateFlags::empty()
1480 };
1481
1482 let mut vk_sets = unsafe {
1483 self.desc_allocator.lock().allocate(
1484 &*self.shared,
1485 &desc.layout.raw,
1486 desc_set_layout_flags,
1487 &desc.layout.desc_count,
1488 1,
1489 )?
1490 };
1491
1492 let set = vk_sets.pop().unwrap();
1493 if let Some(label) = desc.label {
1494 unsafe { self.shared.set_object_name(*set.raw(), label) };
1495 }
1496
1497 struct ExtendStack<'a, T> {
1504 remainder: &'a mut [MaybeUninit<T>],
1505 }
1506
1507 impl<'a, T> ExtendStack<'a, T> {
1508 fn from_vec_capacity(vec: &'a mut Vec<T>) -> Self {
1509 Self {
1510 remainder: vec.spare_capacity_mut(),
1511 }
1512 }
1513
1514 fn extend_one(self, value: T) -> (Self, &'a mut T) {
1515 let (to_init, remainder) = self.remainder.split_first_mut().unwrap();
1516 let init = to_init.write(value);
1517 (Self { remainder }, init)
1518 }
1519
1520 fn extend(
1521 self,
1522 iter: impl IntoIterator<Item = T> + ExactSizeIterator,
1523 ) -> (Self, &'a mut [T]) {
1524 let (to_init, remainder) = self.remainder.split_at_mut(iter.len());
1525
1526 for (value, to_init) in iter.into_iter().zip(to_init.iter_mut()) {
1527 to_init.write(value);
1528 }
1529
1530 let init = {
1533 unsafe { mem::transmute::<&mut [MaybeUninit<T>], &mut [T]>(to_init) }
1540 };
1541 (Self { remainder }, init)
1542 }
1543 }
1544
1545 let mut writes = Vec::with_capacity(desc.entries.len());
1546 let mut buffer_infos = Vec::with_capacity(desc.buffers.len());
1547 let mut buffer_infos = ExtendStack::from_vec_capacity(&mut buffer_infos);
1548 let mut image_infos = Vec::with_capacity(desc.samplers.len() + desc.textures.len());
1549 let mut image_infos = ExtendStack::from_vec_capacity(&mut image_infos);
1550 let mut acceleration_structure_infos =
1555 Vec::with_capacity(desc.acceleration_structures.len());
1556 let mut acceleration_structure_infos =
1557 ExtendStack::from_vec_capacity(&mut acceleration_structure_infos);
1558 let mut raw_acceleration_structures =
1559 Vec::with_capacity(desc.acceleration_structures.len());
1560 let mut raw_acceleration_structures =
1561 ExtendStack::from_vec_capacity(&mut raw_acceleration_structures);
1562
1563 let layout_and_entry_iter = desc.entries.iter().map(|entry| {
1564 let layout = desc
1565 .layout
1566 .entries
1567 .iter()
1568 .find(|layout_entry| layout_entry.binding == entry.binding)
1569 .expect("internal error: no layout entry found with binding slot");
1570 (layout, entry)
1571 });
1572 let mut next_binding = 0;
1573 for (layout, entry) in layout_and_entry_iter {
1574 let write = vk::WriteDescriptorSet::default().dst_set(*set.raw());
1575
1576 match layout.ty {
1577 wgt::BindingType::Sampler(_) => {
1578 let start = entry.resource_index;
1579 let end = start + entry.count;
1580 let local_image_infos;
1581 (image_infos, local_image_infos) =
1582 image_infos.extend(desc.samplers[start as usize..end as usize].iter().map(
1583 |sampler| vk::DescriptorImageInfo::default().sampler(sampler.raw),
1584 ));
1585 writes.push(
1586 write
1587 .dst_binding(next_binding)
1588 .descriptor_type(conv::map_binding_type(layout.ty))
1589 .image_info(local_image_infos),
1590 );
1591 next_binding += 1;
1592 }
1593 wgt::BindingType::Texture { .. } | wgt::BindingType::StorageTexture { .. } => {
1594 let start = entry.resource_index;
1595 let end = start + entry.count;
1596 let local_image_infos;
1597 (image_infos, local_image_infos) =
1598 image_infos.extend(desc.textures[start as usize..end as usize].iter().map(
1599 |binding| {
1600 let layout =
1601 conv::derive_image_layout(binding.usage, binding.view.format);
1602 vk::DescriptorImageInfo::default()
1603 .image_view(binding.view.raw)
1604 .image_layout(layout)
1605 },
1606 ));
1607 writes.push(
1608 write
1609 .dst_binding(next_binding)
1610 .descriptor_type(conv::map_binding_type(layout.ty))
1611 .image_info(local_image_infos),
1612 );
1613 next_binding += 1;
1614 }
1615 wgt::BindingType::Buffer { .. } => {
1616 let start = entry.resource_index;
1617 let end = start + entry.count;
1618 let local_buffer_infos;
1619 (buffer_infos, local_buffer_infos) =
1620 buffer_infos.extend(desc.buffers[start as usize..end as usize].iter().map(
1621 |binding| {
1622 vk::DescriptorBufferInfo::default()
1623 .buffer(binding.buffer.raw)
1624 .offset(binding.offset)
1625 .range(
1626 binding.size.map_or(vk::WHOLE_SIZE, wgt::BufferSize::get),
1627 )
1628 },
1629 ));
1630 writes.push(
1631 write
1632 .dst_binding(next_binding)
1633 .descriptor_type(conv::map_binding_type(layout.ty))
1634 .buffer_info(local_buffer_infos),
1635 );
1636 next_binding += 1;
1637 }
1638 wgt::BindingType::AccelerationStructure { .. } => {
1639 let start = entry.resource_index;
1640 let end = start + entry.count;
1641
1642 let local_raw_acceleration_structures;
1643 (
1644 raw_acceleration_structures,
1645 local_raw_acceleration_structures,
1646 ) = raw_acceleration_structures.extend(
1647 desc.acceleration_structures[start as usize..end as usize]
1648 .iter()
1649 .map(|acceleration_structure| acceleration_structure.raw),
1650 );
1651
1652 let local_acceleration_structure_infos;
1653 (
1654 acceleration_structure_infos,
1655 local_acceleration_structure_infos,
1656 ) = acceleration_structure_infos.extend_one(
1657 vk::WriteDescriptorSetAccelerationStructureKHR::default()
1658 .acceleration_structures(local_raw_acceleration_structures),
1659 );
1660
1661 writes.push(
1662 write
1663 .dst_binding(next_binding)
1664 .descriptor_type(conv::map_binding_type(layout.ty))
1665 .descriptor_count(entry.count)
1666 .push_next(local_acceleration_structure_infos),
1667 );
1668 next_binding += 1;
1669 }
1670 wgt::BindingType::ExternalTexture => unimplemented!(),
1671 }
1672 }
1673
1674 unsafe { self.shared.raw.update_descriptor_sets(&writes, &[]) };
1675
1676 self.counters.bind_groups.add(1);
1677
1678 Ok(super::BindGroup { set })
1679 }
1680
1681 unsafe fn destroy_bind_group(&self, group: super::BindGroup) {
1682 unsafe {
1683 self.desc_allocator
1684 .lock()
1685 .free(&*self.shared, Some(group.set))
1686 };
1687
1688 self.counters.bind_groups.sub(1);
1689 }
1690
1691 unsafe fn create_shader_module(
1692 &self,
1693 desc: &crate::ShaderModuleDescriptor,
1694 shader: crate::ShaderInput,
1695 ) -> Result<super::ShaderModule, crate::ShaderError> {
1696 let shader_module = match shader {
1697 crate::ShaderInput::Naga(naga_shader)
1698 if self
1699 .shared
1700 .workarounds
1701 .contains(super::Workarounds::SEPARATE_ENTRY_POINTS)
1702 || !naga_shader.module.overrides.is_empty() =>
1703 {
1704 super::ShaderModule::Intermediate {
1705 naga_shader,
1706 runtime_checks: desc.runtime_checks,
1707 }
1708 }
1709 crate::ShaderInput::Naga(naga_shader) => {
1710 let mut naga_options = self.naga_options.clone();
1711 naga_options.debug_info =
1712 naga_shader
1713 .debug_source
1714 .as_ref()
1715 .map(|d| naga::back::spv::DebugInfo {
1716 source_code: d.source_code.as_ref(),
1717 file_name: d.file_name.as_ref(),
1718 language: naga::back::spv::SourceLanguage::WGSL,
1719 });
1720 if !desc.runtime_checks.bounds_checks {
1721 naga_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
1722 index: naga::proc::BoundsCheckPolicy::Unchecked,
1723 buffer: naga::proc::BoundsCheckPolicy::Unchecked,
1724 image_load: naga::proc::BoundsCheckPolicy::Unchecked,
1725 binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
1726 };
1727 }
1728 let spv = naga::back::spv::write_vec(
1729 &naga_shader.module,
1730 &naga_shader.info,
1731 &naga_options,
1732 None,
1733 )
1734 .map_err(|e| crate::ShaderError::Compilation(format!("{e}")))?;
1735 super::ShaderModule::Raw(self.create_shader_module_impl(&spv, &desc.label)?)
1736 }
1737 crate::ShaderInput::SpirV(data) => {
1738 super::ShaderModule::Raw(self.create_shader_module_impl(data, &desc.label)?)
1739 }
1740 crate::ShaderInput::Msl { .. }
1741 | crate::ShaderInput::Dxil { .. }
1742 | crate::ShaderInput::Hlsl { .. }
1743 | crate::ShaderInput::Glsl { .. } => unreachable!(),
1744 };
1745
1746 self.counters.shader_modules.add(1);
1747
1748 Ok(shader_module)
1749 }
1750
1751 unsafe fn destroy_shader_module(&self, module: super::ShaderModule) {
1752 match module {
1753 super::ShaderModule::Raw(raw) => {
1754 unsafe { self.shared.raw.destroy_shader_module(raw, None) };
1755 }
1756 super::ShaderModule::Intermediate { .. } => {}
1757 }
1758
1759 self.counters.shader_modules.sub(1);
1760 }
1761
1762 unsafe fn create_render_pipeline(
1763 &self,
1764 desc: &crate::RenderPipelineDescriptor<
1765 super::PipelineLayout,
1766 super::ShaderModule,
1767 super::PipelineCache,
1768 >,
1769 ) -> Result<super::RenderPipeline, crate::PipelineError> {
1770 let dynamic_states = [
1771 vk::DynamicState::VIEWPORT,
1772 vk::DynamicState::SCISSOR,
1773 vk::DynamicState::BLEND_CONSTANTS,
1774 vk::DynamicState::STENCIL_REFERENCE,
1775 ];
1776 let mut compatible_rp_key = super::RenderPassKey {
1777 sample_count: desc.multisample.count,
1778 multiview_mask: desc.multiview_mask,
1779 ..Default::default()
1780 };
1781 let mut stages = ArrayVec::<_, { crate::MAX_CONCURRENT_SHADER_STAGES }>::new();
1782 let mut vertex_buffers = Vec::new();
1783 let mut vertex_attributes = Vec::new();
1784
1785 if let crate::VertexProcessor::Standard {
1786 vertex_buffers: desc_vertex_buffers,
1787 vertex_stage: _,
1788 } = &desc.vertex_processor
1789 {
1790 vertex_buffers = Vec::with_capacity(desc_vertex_buffers.len());
1791 for (i, vb) in desc_vertex_buffers.iter().enumerate() {
1792 vertex_buffers.push(vk::VertexInputBindingDescription {
1793 binding: i as u32,
1794 stride: vb.array_stride as u32,
1795 input_rate: match vb.step_mode {
1796 wgt::VertexStepMode::Vertex => vk::VertexInputRate::VERTEX,
1797 wgt::VertexStepMode::Instance => vk::VertexInputRate::INSTANCE,
1798 },
1799 });
1800 for at in vb.attributes {
1801 vertex_attributes.push(vk::VertexInputAttributeDescription {
1802 location: at.shader_location,
1803 binding: i as u32,
1804 format: conv::map_vertex_format(at.format),
1805 offset: at.offset as u32,
1806 });
1807 }
1808 }
1809 }
1810
1811 let vk_vertex_input = vk::PipelineVertexInputStateCreateInfo::default()
1812 .vertex_binding_descriptions(&vertex_buffers)
1813 .vertex_attribute_descriptions(&vertex_attributes);
1814
1815 let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::default()
1816 .topology(conv::map_topology(desc.primitive.topology))
1817 .primitive_restart_enable(desc.primitive.strip_index_format.is_some());
1818
1819 let mut compiled_vs = None;
1820 let mut compiled_ms = None;
1821 let mut compiled_ts = None;
1822 match &desc.vertex_processor {
1823 crate::VertexProcessor::Standard {
1824 vertex_buffers: _,
1825 vertex_stage,
1826 } => {
1827 compiled_vs = Some(self.compile_stage(
1828 vertex_stage,
1829 naga::ShaderStage::Vertex,
1830 &desc.layout.binding_map,
1831 )?);
1832 stages.push(compiled_vs.as_ref().unwrap().create_info);
1833 }
1834 crate::VertexProcessor::Mesh {
1835 task_stage,
1836 mesh_stage,
1837 } => {
1838 if let Some(t) = task_stage.as_ref() {
1839 compiled_ts = Some(self.compile_stage(
1840 t,
1841 naga::ShaderStage::Task,
1842 &desc.layout.binding_map,
1843 )?);
1844 stages.push(compiled_ts.as_ref().unwrap().create_info);
1845 }
1846 compiled_ms = Some(self.compile_stage(
1847 mesh_stage,
1848 naga::ShaderStage::Mesh,
1849 &desc.layout.binding_map,
1850 )?);
1851 stages.push(compiled_ms.as_ref().unwrap().create_info);
1852 }
1853 }
1854 let compiled_fs = match desc.fragment_stage {
1855 Some(ref stage) => {
1856 let compiled = self.compile_stage(
1857 stage,
1858 naga::ShaderStage::Fragment,
1859 &desc.layout.binding_map,
1860 )?;
1861 stages.push(compiled.create_info);
1862 Some(compiled)
1863 }
1864 None => None,
1865 };
1866
1867 let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::default()
1868 .polygon_mode(conv::map_polygon_mode(desc.primitive.polygon_mode))
1869 .front_face(conv::map_front_face(desc.primitive.front_face))
1870 .line_width(1.0)
1871 .depth_clamp_enable(desc.primitive.unclipped_depth);
1872 if let Some(face) = desc.primitive.cull_mode {
1873 vk_rasterization = vk_rasterization.cull_mode(conv::map_cull_face(face))
1874 }
1875 let mut vk_rasterization_conservative_state =
1876 vk::PipelineRasterizationConservativeStateCreateInfoEXT::default()
1877 .conservative_rasterization_mode(
1878 vk::ConservativeRasterizationModeEXT::OVERESTIMATE,
1879 );
1880 if desc.primitive.conservative {
1881 vk_rasterization = vk_rasterization.push_next(&mut vk_rasterization_conservative_state);
1882 }
1883
1884 let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::default();
1885 if let Some(ref ds) = desc.depth_stencil {
1886 let vk_format = self.shared.private_caps.map_texture_format(ds.format);
1887 let vk_layout = if ds.is_read_only(desc.primitive.cull_mode) {
1888 vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL
1889 } else {
1890 vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL
1891 };
1892 compatible_rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
1893 base: super::AttachmentKey::compatible(vk_format, vk_layout),
1894 stencil_ops: crate::AttachmentOps::all(),
1895 });
1896
1897 if ds.is_depth_enabled() {
1898 vk_depth_stencil = vk_depth_stencil
1899 .depth_test_enable(true)
1900 .depth_write_enable(ds.depth_write_enabled)
1901 .depth_compare_op(conv::map_comparison(ds.depth_compare));
1902 }
1903 if ds.stencil.is_enabled() {
1904 let s = &ds.stencil;
1905 let front = conv::map_stencil_face(&s.front, s.read_mask, s.write_mask);
1906 let back = conv::map_stencil_face(&s.back, s.read_mask, s.write_mask);
1907 vk_depth_stencil = vk_depth_stencil
1908 .stencil_test_enable(true)
1909 .front(front)
1910 .back(back);
1911 }
1912
1913 if ds.bias.is_enabled() {
1914 vk_rasterization = vk_rasterization
1915 .depth_bias_enable(true)
1916 .depth_bias_constant_factor(ds.bias.constant as f32)
1917 .depth_bias_clamp(ds.bias.clamp)
1918 .depth_bias_slope_factor(ds.bias.slope_scale);
1919 }
1920 }
1921
1922 let vk_viewport = vk::PipelineViewportStateCreateInfo::default()
1923 .flags(vk::PipelineViewportStateCreateFlags::empty())
1924 .scissor_count(1)
1925 .viewport_count(1);
1926
1927 let vk_sample_mask = [
1928 desc.multisample.mask as u32,
1929 (desc.multisample.mask >> 32) as u32,
1930 ];
1931 let vk_multisample = vk::PipelineMultisampleStateCreateInfo::default()
1932 .rasterization_samples(vk::SampleCountFlags::from_raw(desc.multisample.count))
1933 .alpha_to_coverage_enable(desc.multisample.alpha_to_coverage_enabled)
1934 .sample_mask(&vk_sample_mask);
1935
1936 let mut vk_attachments = Vec::with_capacity(desc.color_targets.len());
1937 for cat in desc.color_targets {
1938 let (key, attarchment) = if let Some(cat) = cat.as_ref() {
1939 let mut vk_attachment = vk::PipelineColorBlendAttachmentState::default()
1940 .color_write_mask(vk::ColorComponentFlags::from_raw(cat.write_mask.bits()));
1941 if let Some(ref blend) = cat.blend {
1942 let (color_op, color_src, color_dst) = conv::map_blend_component(&blend.color);
1943 let (alpha_op, alpha_src, alpha_dst) = conv::map_blend_component(&blend.alpha);
1944 vk_attachment = vk_attachment
1945 .blend_enable(true)
1946 .color_blend_op(color_op)
1947 .src_color_blend_factor(color_src)
1948 .dst_color_blend_factor(color_dst)
1949 .alpha_blend_op(alpha_op)
1950 .src_alpha_blend_factor(alpha_src)
1951 .dst_alpha_blend_factor(alpha_dst);
1952 }
1953
1954 let vk_format = self.shared.private_caps.map_texture_format(cat.format);
1955 (
1956 Some(super::ColorAttachmentKey {
1957 base: super::AttachmentKey::compatible(
1958 vk_format,
1959 vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,
1960 ),
1961 resolve: None,
1962 }),
1963 vk_attachment,
1964 )
1965 } else {
1966 (None, vk::PipelineColorBlendAttachmentState::default())
1967 };
1968
1969 compatible_rp_key.colors.push(key);
1970 vk_attachments.push(attarchment);
1971 }
1972
1973 let vk_color_blend =
1974 vk::PipelineColorBlendStateCreateInfo::default().attachments(&vk_attachments);
1975
1976 let vk_dynamic_state =
1977 vk::PipelineDynamicStateCreateInfo::default().dynamic_states(&dynamic_states);
1978
1979 let raw_pass = self.shared.make_render_pass(compatible_rp_key)?;
1980
1981 let vk_infos = [{
1982 vk::GraphicsPipelineCreateInfo::default()
1983 .layout(desc.layout.raw)
1984 .stages(&stages)
1985 .vertex_input_state(&vk_vertex_input)
1986 .input_assembly_state(&vk_input_assembly)
1987 .rasterization_state(&vk_rasterization)
1988 .viewport_state(&vk_viewport)
1989 .multisample_state(&vk_multisample)
1990 .depth_stencil_state(&vk_depth_stencil)
1991 .color_blend_state(&vk_color_blend)
1992 .dynamic_state(&vk_dynamic_state)
1993 .render_pass(raw_pass)
1994 }];
1995
1996 let pipeline_cache = desc
1997 .cache
1998 .map(|it| it.raw)
1999 .unwrap_or(vk::PipelineCache::null());
2000
2001 let mut raw_vec = {
2002 profiling::scope!("vkCreateGraphicsPipelines");
2003 unsafe {
2004 self.shared
2005 .raw
2006 .create_graphics_pipelines(pipeline_cache, &vk_infos, None)
2007 .map_err(|(_, e)| super::map_pipeline_err(e))
2008 }?
2009 };
2010
2011 let raw = raw_vec.pop().unwrap();
2012 if let Some(label) = desc.label {
2013 unsafe { self.shared.set_object_name(raw, label) };
2014 }
2015
2016 if let Some(CompiledStage {
2017 temp_raw_module: Some(raw_module),
2018 ..
2019 }) = compiled_vs
2020 {
2021 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2022 }
2023 if let Some(CompiledStage {
2024 temp_raw_module: Some(raw_module),
2025 ..
2026 }) = compiled_ts
2027 {
2028 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2029 }
2030 if let Some(CompiledStage {
2031 temp_raw_module: Some(raw_module),
2032 ..
2033 }) = compiled_ms
2034 {
2035 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2036 }
2037 if let Some(CompiledStage {
2038 temp_raw_module: Some(raw_module),
2039 ..
2040 }) = compiled_fs
2041 {
2042 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2043 }
2044
2045 self.counters.render_pipelines.add(1);
2046
2047 Ok(super::RenderPipeline {
2048 raw,
2049 is_multiview: desc.multiview_mask.is_some(),
2050 })
2051 }
2052
2053 unsafe fn destroy_render_pipeline(&self, pipeline: super::RenderPipeline) {
2054 unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
2055
2056 self.counters.render_pipelines.sub(1);
2057 }
2058
2059 unsafe fn create_compute_pipeline(
2060 &self,
2061 desc: &crate::ComputePipelineDescriptor<
2062 super::PipelineLayout,
2063 super::ShaderModule,
2064 super::PipelineCache,
2065 >,
2066 ) -> Result<super::ComputePipeline, crate::PipelineError> {
2067 let compiled = self.compile_stage(
2068 &desc.stage,
2069 naga::ShaderStage::Compute,
2070 &desc.layout.binding_map,
2071 )?;
2072
2073 let vk_infos = [{
2074 vk::ComputePipelineCreateInfo::default()
2075 .layout(desc.layout.raw)
2076 .stage(compiled.create_info)
2077 }];
2078
2079 let pipeline_cache = desc
2080 .cache
2081 .map(|it| it.raw)
2082 .unwrap_or(vk::PipelineCache::null());
2083
2084 let mut raw_vec = {
2085 profiling::scope!("vkCreateComputePipelines");
2086 unsafe {
2087 self.shared
2088 .raw
2089 .create_compute_pipelines(pipeline_cache, &vk_infos, None)
2090 .map_err(|(_, e)| super::map_pipeline_err(e))
2091 }?
2092 };
2093
2094 let raw = raw_vec.pop().unwrap();
2095 if let Some(label) = desc.label {
2096 unsafe { self.shared.set_object_name(raw, label) };
2097 }
2098
2099 if let Some(raw_module) = compiled.temp_raw_module {
2100 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2101 }
2102
2103 self.counters.compute_pipelines.add(1);
2104
2105 Ok(super::ComputePipeline { raw })
2106 }
2107
2108 unsafe fn destroy_compute_pipeline(&self, pipeline: super::ComputePipeline) {
2109 unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
2110
2111 self.counters.compute_pipelines.sub(1);
2112 }
2113
2114 unsafe fn create_pipeline_cache(
2115 &self,
2116 desc: &crate::PipelineCacheDescriptor<'_>,
2117 ) -> Result<super::PipelineCache, crate::PipelineCacheError> {
2118 let mut info = vk::PipelineCacheCreateInfo::default();
2119 if let Some(data) = desc.data {
2120 info = info.initial_data(data)
2121 }
2122 profiling::scope!("vkCreatePipelineCache");
2123 let raw = unsafe { self.shared.raw.create_pipeline_cache(&info, None) }
2124 .map_err(super::map_host_device_oom_err)?;
2125
2126 Ok(super::PipelineCache { raw })
2127 }
2128 fn pipeline_cache_validation_key(&self) -> Option<[u8; 16]> {
2129 Some(self.shared.pipeline_cache_validation_key)
2130 }
2131 unsafe fn destroy_pipeline_cache(&self, cache: super::PipelineCache) {
2132 unsafe { self.shared.raw.destroy_pipeline_cache(cache.raw, None) }
2133 }
2134 unsafe fn create_query_set(
2135 &self,
2136 desc: &wgt::QuerySetDescriptor<crate::Label>,
2137 ) -> Result<super::QuerySet, crate::DeviceError> {
2138 self.error_if_would_oom_on_resource_allocation(true, desc.count as u64 * 256)?;
2141
2142 let (vk_type, pipeline_statistics) = match desc.ty {
2143 wgt::QueryType::Occlusion => (
2144 vk::QueryType::OCCLUSION,
2145 vk::QueryPipelineStatisticFlags::empty(),
2146 ),
2147 wgt::QueryType::PipelineStatistics(statistics) => (
2148 vk::QueryType::PIPELINE_STATISTICS,
2149 conv::map_pipeline_statistics(statistics),
2150 ),
2151 wgt::QueryType::Timestamp => (
2152 vk::QueryType::TIMESTAMP,
2153 vk::QueryPipelineStatisticFlags::empty(),
2154 ),
2155 };
2156
2157 let vk_info = vk::QueryPoolCreateInfo::default()
2158 .query_type(vk_type)
2159 .query_count(desc.count)
2160 .pipeline_statistics(pipeline_statistics);
2161
2162 let raw = unsafe { self.shared.raw.create_query_pool(&vk_info, None) }
2163 .map_err(super::map_host_device_oom_err)?;
2164 if let Some(label) = desc.label {
2165 unsafe { self.shared.set_object_name(raw, label) };
2166 }
2167
2168 self.counters.query_sets.add(1);
2169
2170 Ok(super::QuerySet { raw })
2171 }
2172
2173 unsafe fn destroy_query_set(&self, set: super::QuerySet) {
2174 unsafe { self.shared.raw.destroy_query_pool(set.raw, None) };
2175
2176 self.counters.query_sets.sub(1);
2177 }
2178
2179 unsafe fn create_fence(&self) -> Result<super::Fence, crate::DeviceError> {
2180 self.counters.fences.add(1);
2181
2182 Ok(if self.shared.private_caps.timeline_semaphores {
2183 let mut sem_type_info =
2184 vk::SemaphoreTypeCreateInfo::default().semaphore_type(vk::SemaphoreType::TIMELINE);
2185 let vk_info = vk::SemaphoreCreateInfo::default().push_next(&mut sem_type_info);
2186 let raw = unsafe { self.shared.raw.create_semaphore(&vk_info, None) }
2187 .map_err(super::map_host_device_oom_err)?;
2188
2189 super::Fence::TimelineSemaphore(raw)
2190 } else {
2191 super::Fence::FencePool {
2192 last_completed: 0,
2193 active: Vec::new(),
2194 free: Vec::new(),
2195 }
2196 })
2197 }
2198 unsafe fn destroy_fence(&self, fence: super::Fence) {
2199 match fence {
2200 super::Fence::TimelineSemaphore(raw) => {
2201 unsafe { self.shared.raw.destroy_semaphore(raw, None) };
2202 }
2203 super::Fence::FencePool {
2204 active,
2205 free,
2206 last_completed: _,
2207 } => {
2208 for (_, raw) in active {
2209 unsafe { self.shared.raw.destroy_fence(raw, None) };
2210 }
2211 for raw in free {
2212 unsafe { self.shared.raw.destroy_fence(raw, None) };
2213 }
2214 }
2215 }
2216
2217 self.counters.fences.sub(1);
2218 }
2219 unsafe fn get_fence_value(
2220 &self,
2221 fence: &super::Fence,
2222 ) -> Result<crate::FenceValue, crate::DeviceError> {
2223 fence.get_latest(
2224 &self.shared.raw,
2225 self.shared.extension_fns.timeline_semaphore.as_ref(),
2226 )
2227 }
2228 unsafe fn wait(
2229 &self,
2230 fence: &super::Fence,
2231 wait_value: crate::FenceValue,
2232 timeout: Option<Duration>,
2233 ) -> Result<bool, crate::DeviceError> {
2234 let timeout_ns = timeout
2235 .unwrap_or(Duration::MAX)
2236 .as_nanos()
2237 .min(u64::MAX as _) as u64;
2238 self.shared.wait_for_fence(fence, wait_value, timeout_ns)
2239 }
2240
2241 unsafe fn start_graphics_debugger_capture(&self) -> bool {
2242 #[cfg(feature = "renderdoc")]
2243 {
2244 let raw_vk_instance =
2246 vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2247 let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2248 unsafe {
2249 self.render_doc
2250 .start_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2251 }
2252 }
2253 #[cfg(not(feature = "renderdoc"))]
2254 false
2255 }
2256 unsafe fn stop_graphics_debugger_capture(&self) {
2257 #[cfg(feature = "renderdoc")]
2258 {
2259 let raw_vk_instance =
2261 vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2262 let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2263
2264 unsafe {
2265 self.render_doc
2266 .end_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2267 }
2268 }
2269 }
2270
2271 unsafe fn pipeline_cache_get_data(&self, cache: &super::PipelineCache) -> Option<Vec<u8>> {
2272 let data = unsafe { self.raw_device().get_pipeline_cache_data(cache.raw) };
2273 data.ok()
2274 }
2275
2276 unsafe fn get_acceleration_structure_build_sizes<'a>(
2277 &self,
2278 desc: &crate::GetAccelerationStructureBuildSizesDescriptor<'a, super::Buffer>,
2279 ) -> crate::AccelerationStructureBuildSizes {
2280 const CAPACITY: usize = 8;
2281
2282 let ray_tracing_functions = self
2283 .shared
2284 .extension_fns
2285 .ray_tracing
2286 .as_ref()
2287 .expect("Feature `RAY_TRACING` not enabled");
2288
2289 let (geometries, primitive_counts) = match *desc.entries {
2290 crate::AccelerationStructureEntries::Instances(ref instances) => {
2291 let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default();
2292
2293 let geometry = vk::AccelerationStructureGeometryKHR::default()
2294 .geometry_type(vk::GeometryTypeKHR::INSTANCES)
2295 .geometry(vk::AccelerationStructureGeometryDataKHR {
2296 instances: instance_data,
2297 });
2298
2299 (
2300 smallvec::smallvec![geometry],
2301 smallvec::smallvec![instances.count],
2302 )
2303 }
2304 crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
2305 let mut primitive_counts =
2306 smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2307 let mut geometries = smallvec::SmallVec::<
2308 [vk::AccelerationStructureGeometryKHR; CAPACITY],
2309 >::with_capacity(in_geometries.len());
2310
2311 for triangles in in_geometries {
2312 let mut triangle_data =
2313 vk::AccelerationStructureGeometryTrianglesDataKHR::default()
2314 .index_type(vk::IndexType::NONE_KHR)
2315 .vertex_format(conv::map_vertex_format(triangles.vertex_format))
2316 .max_vertex(triangles.vertex_count)
2317 .vertex_stride(triangles.vertex_stride)
2318 .transform_data(vk::DeviceOrHostAddressConstKHR {
2328 device_address: if desc
2329 .flags
2330 .contains(wgt::AccelerationStructureFlags::USE_TRANSFORM)
2331 {
2332 unsafe {
2333 ray_tracing_functions
2334 .buffer_device_address
2335 .get_buffer_device_address(
2336 &vk::BufferDeviceAddressInfo::default().buffer(
2337 triangles
2338 .transform
2339 .as_ref()
2340 .unwrap()
2341 .buffer
2342 .raw,
2343 ),
2344 )
2345 }
2346 } else {
2347 0
2348 },
2349 });
2350
2351 let pritive_count = if let Some(ref indices) = triangles.indices {
2352 triangle_data =
2353 triangle_data.index_type(conv::map_index_format(indices.format));
2354 indices.count / 3
2355 } else {
2356 triangles.vertex_count / 3
2357 };
2358
2359 let geometry = vk::AccelerationStructureGeometryKHR::default()
2360 .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
2361 .geometry(vk::AccelerationStructureGeometryDataKHR {
2362 triangles: triangle_data,
2363 })
2364 .flags(conv::map_acceleration_structure_geometry_flags(
2365 triangles.flags,
2366 ));
2367
2368 geometries.push(geometry);
2369 primitive_counts.push(pritive_count);
2370 }
2371 (geometries, primitive_counts)
2372 }
2373 crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
2374 let mut primitive_counts =
2375 smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2376 let mut geometries = smallvec::SmallVec::<
2377 [vk::AccelerationStructureGeometryKHR; CAPACITY],
2378 >::with_capacity(in_geometries.len());
2379 for aabb in in_geometries {
2380 let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
2381 .stride(aabb.stride);
2382
2383 let geometry = vk::AccelerationStructureGeometryKHR::default()
2384 .geometry_type(vk::GeometryTypeKHR::AABBS)
2385 .geometry(vk::AccelerationStructureGeometryDataKHR { aabbs: aabbs_data })
2386 .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
2387
2388 geometries.push(geometry);
2389 primitive_counts.push(aabb.count);
2390 }
2391 (geometries, primitive_counts)
2392 }
2393 };
2394
2395 let ty = match *desc.entries {
2396 crate::AccelerationStructureEntries::Instances(_) => {
2397 vk::AccelerationStructureTypeKHR::TOP_LEVEL
2398 }
2399 _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
2400 };
2401
2402 let geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
2403 .ty(ty)
2404 .flags(conv::map_acceleration_structure_flags(desc.flags))
2405 .geometries(&geometries);
2406
2407 let mut raw = Default::default();
2408 unsafe {
2409 ray_tracing_functions
2410 .acceleration_structure
2411 .get_acceleration_structure_build_sizes(
2412 vk::AccelerationStructureBuildTypeKHR::DEVICE,
2413 &geometry_info,
2414 &primitive_counts,
2415 &mut raw,
2416 )
2417 }
2418
2419 crate::AccelerationStructureBuildSizes {
2420 acceleration_structure_size: raw.acceleration_structure_size,
2421 update_scratch_size: raw.update_scratch_size,
2422 build_scratch_size: raw.build_scratch_size,
2423 }
2424 }
2425
2426 unsafe fn get_acceleration_structure_device_address(
2427 &self,
2428 acceleration_structure: &super::AccelerationStructure,
2429 ) -> wgt::BufferAddress {
2430 let ray_tracing_functions = self
2431 .shared
2432 .extension_fns
2433 .ray_tracing
2434 .as_ref()
2435 .expect("Feature `RAY_TRACING` not enabled");
2436
2437 unsafe {
2438 ray_tracing_functions
2439 .acceleration_structure
2440 .get_acceleration_structure_device_address(
2441 &vk::AccelerationStructureDeviceAddressInfoKHR::default()
2442 .acceleration_structure(acceleration_structure.raw),
2443 )
2444 }
2445 }
2446
2447 unsafe fn create_acceleration_structure(
2448 &self,
2449 desc: &crate::AccelerationStructureDescriptor,
2450 ) -> Result<super::AccelerationStructure, crate::DeviceError> {
2451 let ray_tracing_functions = self
2452 .shared
2453 .extension_fns
2454 .ray_tracing
2455 .as_ref()
2456 .expect("Feature `RAY_TRACING` not enabled");
2457
2458 let vk_buffer_info = vk::BufferCreateInfo::default()
2459 .size(desc.size)
2460 .usage(
2461 vk::BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR
2462 | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,
2463 )
2464 .sharing_mode(vk::SharingMode::EXCLUSIVE);
2465
2466 unsafe {
2467 let raw_buffer = self
2468 .shared
2469 .raw
2470 .create_buffer(&vk_buffer_info, None)
2471 .map_err(super::map_host_device_oom_and_ioca_err)?;
2472
2473 let requirements = self.shared.raw.get_buffer_memory_requirements(raw_buffer);
2474
2475 self.error_if_would_oom_on_resource_allocation(false, requirements.size)
2476 .inspect_err(|_| {
2477 self.shared.raw.destroy_buffer(raw_buffer, None);
2478 })?;
2479
2480 let name = desc
2481 .label
2482 .unwrap_or("Unlabeled acceleration structure buffer");
2483
2484 let allocation = self
2485 .mem_allocator
2486 .lock()
2487 .allocate(&gpu_allocator::vulkan::AllocationCreateDesc {
2488 name,
2489 requirements,
2490 location: gpu_allocator::MemoryLocation::GpuOnly,
2491 linear: true, allocation_scheme: gpu_allocator::vulkan::AllocationScheme::GpuAllocatorManaged,
2493 })
2494 .inspect_err(|_| {
2495 self.shared.raw.destroy_buffer(raw_buffer, None);
2496 })?;
2497
2498 self.shared
2499 .raw
2500 .bind_buffer_memory(raw_buffer, allocation.memory(), allocation.offset())
2501 .map_err(super::map_host_device_oom_and_ioca_err)
2502 .inspect_err(|_| {
2503 self.shared.raw.destroy_buffer(raw_buffer, None);
2504 })?;
2505
2506 if let Some(label) = desc.label {
2507 self.shared.set_object_name(raw_buffer, label);
2508 }
2509
2510 let vk_info = vk::AccelerationStructureCreateInfoKHR::default()
2511 .buffer(raw_buffer)
2512 .offset(0)
2513 .size(desc.size)
2514 .ty(conv::map_acceleration_structure_format(desc.format));
2515
2516 let raw_acceleration_structure = ray_tracing_functions
2517 .acceleration_structure
2518 .create_acceleration_structure(&vk_info, None)
2519 .map_err(super::map_host_oom_and_ioca_err)
2520 .inspect_err(|_| {
2521 self.shared.raw.destroy_buffer(raw_buffer, None);
2522 })?;
2523
2524 if let Some(label) = desc.label {
2525 self.shared
2526 .set_object_name(raw_acceleration_structure, label);
2527 }
2528
2529 let pool = if desc.allow_compaction {
2530 let vk_info = vk::QueryPoolCreateInfo::default()
2531 .query_type(vk::QueryType::ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR)
2532 .query_count(1);
2533
2534 let raw = self
2535 .shared
2536 .raw
2537 .create_query_pool(&vk_info, None)
2538 .map_err(super::map_host_device_oom_err)
2539 .inspect_err(|_| {
2540 ray_tracing_functions
2541 .acceleration_structure
2542 .destroy_acceleration_structure(raw_acceleration_structure, None);
2543 self.shared.raw.destroy_buffer(raw_buffer, None);
2544 })?;
2545 Some(raw)
2546 } else {
2547 None
2548 };
2549
2550 Ok(super::AccelerationStructure {
2551 raw: raw_acceleration_structure,
2552 buffer: raw_buffer,
2553 allocation,
2554 compacted_size_query: pool,
2555 })
2556 }
2557 }
2558
2559 unsafe fn destroy_acceleration_structure(
2560 &self,
2561 acceleration_structure: super::AccelerationStructure,
2562 ) {
2563 let ray_tracing_functions = self
2564 .shared
2565 .extension_fns
2566 .ray_tracing
2567 .as_ref()
2568 .expect("Feature `RAY_TRACING` not enabled");
2569
2570 unsafe {
2571 ray_tracing_functions
2572 .acceleration_structure
2573 .destroy_acceleration_structure(acceleration_structure.raw, None);
2574 self.shared
2575 .raw
2576 .destroy_buffer(acceleration_structure.buffer, None);
2577 let result = self
2578 .mem_allocator
2579 .lock()
2580 .free(acceleration_structure.allocation);
2581 if let Err(err) = result {
2582 log::warn!("Failed to free buffer acceleration structure: {err}");
2583 }
2584 if let Some(query) = acceleration_structure.compacted_size_query {
2585 self.shared.raw.destroy_query_pool(query, None)
2586 }
2587 }
2588 }
2589
2590 fn get_internal_counters(&self) -> wgt::HalCounters {
2591 self.counters
2592 .memory_allocations
2593 .set(self.shared.memory_allocations_counter.read());
2594
2595 self.counters.as_ref().clone()
2596 }
2597
2598 fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
2599 let gpu_allocator::AllocatorReport {
2600 allocations,
2601 blocks,
2602 total_allocated_bytes,
2603 total_capacity_bytes,
2604 } = self.mem_allocator.lock().generate_report();
2605
2606 let allocations = allocations
2607 .into_iter()
2608 .map(|alloc| wgt::AllocationReport {
2609 name: alloc.name,
2610 offset: alloc.offset,
2611 size: alloc.size,
2612 })
2613 .collect();
2614
2615 let blocks = blocks
2616 .into_iter()
2617 .map(|block| wgt::MemoryBlockReport {
2618 size: block.size,
2619 allocations: block.allocations.clone(),
2620 })
2621 .collect();
2622
2623 Some(wgt::AllocatorReport {
2624 allocations,
2625 blocks,
2626 total_allocated_bytes,
2627 total_reserved_bytes: total_capacity_bytes,
2628 })
2629 }
2630
2631 fn tlas_instance_to_bytes(&self, instance: TlasInstance) -> Vec<u8> {
2632 const MAX_U24: u32 = (1u32 << 24u32) - 1u32;
2633 let temp = RawTlasInstance {
2634 transform: instance.transform,
2635 custom_data_and_mask: (instance.custom_data & MAX_U24)
2636 | (u32::from(instance.mask) << 24),
2637 shader_binding_table_record_offset_and_flags: 0,
2638 acceleration_structure_reference: instance.blas_address,
2639 };
2640 bytemuck::bytes_of(&temp).to_vec()
2641 }
2642
2643 fn check_if_oom(&self) -> Result<(), crate::DeviceError> {
2644 let Some(threshold) = self
2645 .shared
2646 .instance
2647 .memory_budget_thresholds
2648 .for_device_loss
2649 else {
2650 return Ok(());
2651 };
2652
2653 if !self
2654 .shared
2655 .enabled_extensions
2656 .contains(&ext::memory_budget::NAME)
2657 {
2658 return Ok(());
2659 }
2660
2661 let get_physical_device_properties = self
2662 .shared
2663 .instance
2664 .get_physical_device_properties
2665 .as_ref()
2666 .unwrap();
2667
2668 let mut memory_budget_properties = vk::PhysicalDeviceMemoryBudgetPropertiesEXT::default();
2669
2670 let mut memory_properties =
2671 vk::PhysicalDeviceMemoryProperties2::default().push_next(&mut memory_budget_properties);
2672
2673 unsafe {
2674 get_physical_device_properties.get_physical_device_memory_properties2(
2675 self.shared.physical_device,
2676 &mut memory_properties,
2677 );
2678 }
2679
2680 let memory_properties = memory_properties.memory_properties;
2681
2682 for i in 0..memory_properties.memory_heap_count {
2683 let heap_usage = memory_budget_properties.heap_usage[i as usize];
2684 let heap_budget = memory_budget_properties.heap_budget[i as usize];
2685
2686 if heap_usage >= heap_budget / 100 * threshold as u64 {
2687 return Err(crate::DeviceError::OutOfMemory);
2688 }
2689 }
2690
2691 Ok(())
2692 }
2693}
2694
2695impl super::DeviceShared {
2696 pub(super) fn new_binary_semaphore(
2697 &self,
2698 name: &str,
2699 ) -> Result<vk::Semaphore, crate::DeviceError> {
2700 unsafe {
2701 let semaphore = self
2702 .raw
2703 .create_semaphore(&vk::SemaphoreCreateInfo::default(), None)
2704 .map_err(super::map_host_device_oom_err)?;
2705
2706 self.set_object_name(semaphore, name);
2707
2708 Ok(semaphore)
2709 }
2710 }
2711
2712 pub(super) fn wait_for_fence(
2713 &self,
2714 fence: &super::Fence,
2715 wait_value: crate::FenceValue,
2716 timeout_ns: u64,
2717 ) -> Result<bool, crate::DeviceError> {
2718 profiling::scope!("Device::wait");
2719 match *fence {
2720 super::Fence::TimelineSemaphore(raw) => {
2721 let semaphores = [raw];
2722 let values = [wait_value];
2723 let vk_info = vk::SemaphoreWaitInfo::default()
2724 .semaphores(&semaphores)
2725 .values(&values);
2726 let result = match self.extension_fns.timeline_semaphore {
2727 Some(super::ExtensionFn::Extension(ref ext)) => unsafe {
2728 ext.wait_semaphores(&vk_info, timeout_ns)
2729 },
2730 Some(super::ExtensionFn::Promoted) => unsafe {
2731 self.raw.wait_semaphores(&vk_info, timeout_ns)
2732 },
2733 None => unreachable!(),
2734 };
2735 match result {
2736 Ok(()) => Ok(true),
2737 Err(vk::Result::TIMEOUT) => Ok(false),
2738 Err(other) => Err(super::map_host_device_oom_and_lost_err(other)),
2739 }
2740 }
2741 super::Fence::FencePool {
2742 last_completed,
2743 ref active,
2744 free: _,
2745 } => {
2746 if wait_value <= last_completed {
2747 Ok(true)
2748 } else {
2749 match active.iter().find(|&&(value, _)| value >= wait_value) {
2750 Some(&(_, raw)) => {
2751 match unsafe { self.raw.wait_for_fences(&[raw], true, timeout_ns) } {
2752 Ok(()) => Ok(true),
2753 Err(vk::Result::TIMEOUT) => Ok(false),
2754 Err(other) => Err(super::map_host_device_oom_and_lost_err(other)),
2755 }
2756 }
2757 None => {
2758 crate::hal_usage_error(format!(
2759 "no signals reached value {wait_value}"
2760 ));
2761 }
2762 }
2763 }
2764 }
2765 }
2766 }
2767}
2768
2769impl From<gpu_descriptor::AllocationError> for crate::DeviceError {
2770 fn from(error: gpu_descriptor::AllocationError) -> Self {
2771 use gpu_descriptor::AllocationError as Ae;
2772 match error {
2773 Ae::OutOfDeviceMemory | Ae::OutOfHostMemory | Ae::Fragmentation => Self::OutOfMemory,
2774 }
2775 }
2776}
2777
2778fn handle_unexpected(err: vk::Result) -> ! {
2785 panic!("Unexpected Vulkan error: `{err}`")
2786}
2787
2788struct ImageWithoutMemory {
2789 raw: vk::Image,
2790 requirements: vk::MemoryRequirements,
2791}