1use alloc::{borrow::ToOwned as _, collections::BTreeMap, ffi::CString, sync::Arc, vec::Vec};
2use core::{
3 ffi::CStr,
4 mem::{self, MaybeUninit},
5 num::NonZeroU32,
6 ptr,
7 time::Duration,
8};
9
10use arrayvec::ArrayVec;
11use ash::{ext, vk};
12use hashbrown::hash_map::Entry;
13use parking_lot::Mutex;
14
15use super::{conv, RawTlasInstance};
16use crate::TlasInstance;
17
18impl super::DeviceShared {
19 pub(super) unsafe fn set_object_name(&self, object: impl vk::Handle, name: &str) {
37 let Some(extension) = self.extension_fns.debug_utils.as_ref() else {
38 return;
39 };
40
41 let mut buffer: [u8; 64] = [0u8; 64];
44 let buffer_vec: Vec<u8>;
45
46 let name_bytes = if name.len() < buffer.len() {
48 buffer[..name.len()].copy_from_slice(name.as_bytes());
50 buffer[name.len()] = 0;
52 &buffer[..name.len() + 1]
53 } else {
54 buffer_vec = name
57 .as_bytes()
58 .iter()
59 .cloned()
60 .chain(core::iter::once(0))
61 .collect();
62 &buffer_vec
63 };
64
65 let name = CStr::from_bytes_until_nul(name_bytes).expect("We have added a null byte");
66
67 let _result = unsafe {
68 extension.set_debug_utils_object_name(
69 &vk::DebugUtilsObjectNameInfoEXT::default()
70 .object_handle(object)
71 .object_name(name),
72 )
73 };
74 }
75
76 pub fn make_render_pass(
77 &self,
78 key: super::RenderPassKey,
79 ) -> Result<vk::RenderPass, crate::DeviceError> {
80 Ok(match self.render_passes.lock().entry(key) {
81 Entry::Occupied(e) => *e.get(),
82 Entry::Vacant(e) => {
83 let super::RenderPassKey {
84 ref colors,
85 ref depth_stencil,
86 sample_count,
87 multiview_mask,
88 } = *e.key();
89
90 let mut vk_attachments = Vec::new();
91 let mut color_refs = Vec::with_capacity(colors.len());
92 let mut resolve_refs = Vec::with_capacity(color_refs.capacity());
93 let mut ds_ref = None;
94 let samples = vk::SampleCountFlags::from_raw(sample_count);
95 let unused = vk::AttachmentReference {
96 attachment: vk::ATTACHMENT_UNUSED,
97 layout: vk::ImageLayout::UNDEFINED,
98 };
99 for cat in colors.iter() {
100 let (color_ref, resolve_ref) =
101 if let Some(super::ColorAttachmentKey { base, resolve }) = cat {
102 let super::AttachmentKey {
103 format,
104 layout,
105 ops,
106 } = *base;
107
108 let color_ref = vk::AttachmentReference {
109 attachment: vk_attachments.len() as u32,
110 layout,
111 };
112 vk_attachments.push({
113 let (load_op, store_op) = conv::map_attachment_ops(ops);
114 vk::AttachmentDescription::default()
115 .format(format)
116 .samples(samples)
117 .load_op(load_op)
118 .store_op(store_op)
119 .initial_layout(layout)
120 .final_layout(layout)
121 });
122 let resolve_ref = if let Some(rat) = resolve {
123 let super::AttachmentKey {
124 format,
125 layout,
126 ops,
127 } = *rat;
128
129 let (load_op, store_op) = conv::map_attachment_ops(ops);
130 let vk_attachment = vk::AttachmentDescription::default()
131 .format(format)
132 .samples(vk::SampleCountFlags::TYPE_1)
133 .load_op(load_op)
134 .store_op(store_op)
135 .initial_layout(layout)
136 .final_layout(layout);
137 vk_attachments.push(vk_attachment);
138
139 vk::AttachmentReference {
140 attachment: vk_attachments.len() as u32 - 1,
141 layout,
142 }
143 } else {
144 unused
145 };
146
147 (color_ref, resolve_ref)
148 } else {
149 (unused, unused)
150 };
151
152 color_refs.push(color_ref);
153 resolve_refs.push(resolve_ref);
154 }
155
156 if let Some(ds) = depth_stencil {
157 let super::DepthStencilAttachmentKey {
158 ref base,
159 stencil_ops,
160 } = *ds;
161
162 let super::AttachmentKey {
163 format,
164 layout,
165 ops,
166 } = *base;
167
168 ds_ref = Some(vk::AttachmentReference {
169 attachment: vk_attachments.len() as u32,
170 layout,
171 });
172 let (load_op, store_op) = conv::map_attachment_ops(ops);
173 let (stencil_load_op, stencil_store_op) = conv::map_attachment_ops(stencil_ops);
174 let vk_attachment = vk::AttachmentDescription::default()
175 .format(format)
176 .samples(samples)
177 .load_op(load_op)
178 .store_op(store_op)
179 .stencil_load_op(stencil_load_op)
180 .stencil_store_op(stencil_store_op)
181 .initial_layout(layout)
182 .final_layout(layout);
183 vk_attachments.push(vk_attachment);
184 }
185
186 let vk_subpasses = [{
187 let mut vk_subpass = vk::SubpassDescription::default()
188 .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS)
189 .color_attachments(&color_refs)
190 .resolve_attachments(&resolve_refs);
191
192 if self
193 .workarounds
194 .contains(super::Workarounds::EMPTY_RESOLVE_ATTACHMENT_LISTS)
195 && resolve_refs.is_empty()
196 {
197 vk_subpass.p_resolve_attachments = ptr::null();
198 }
199
200 if let Some(ref reference) = ds_ref {
201 vk_subpass = vk_subpass.depth_stencil_attachment(reference)
202 }
203 vk_subpass
204 }];
205
206 let mut vk_info = vk::RenderPassCreateInfo::default()
207 .attachments(&vk_attachments)
208 .subpasses(&vk_subpasses);
209
210 let mut multiview_info;
211 let mask;
212 if let Some(multiview_mask) = multiview_mask {
213 mask = [multiview_mask.get()];
214
215 multiview_info = vk::RenderPassMultiviewCreateInfoKHR::default()
217 .view_masks(&mask)
218 .correlation_masks(&mask);
219 vk_info = vk_info.push_next(&mut multiview_info);
220 }
221
222 let raw = unsafe {
223 self.raw
224 .create_render_pass(&vk_info, None)
225 .map_err(super::map_host_device_oom_err)?
226 };
227
228 *e.insert(raw)
229 }
230 })
231 }
232
233 fn make_memory_ranges<'a, I: 'a + Iterator<Item = crate::MemoryRange>>(
234 &self,
235 buffer: &'a super::Buffer,
236 ranges: I,
237 ) -> Option<impl 'a + Iterator<Item = vk::MappedMemoryRange<'a>>> {
238 let allocation = buffer.allocation.as_ref()?.lock();
239 let mask = self.private_caps.non_coherent_map_mask;
240 Some(ranges.map(move |range| {
241 vk::MappedMemoryRange::default()
242 .memory(allocation.memory())
243 .offset((allocation.offset() + range.start) & !mask)
244 .size((range.end - range.start + mask) & !mask)
245 }))
246 }
247}
248
249impl
250 gpu_descriptor::DescriptorDevice<vk::DescriptorSetLayout, vk::DescriptorPool, vk::DescriptorSet>
251 for super::DeviceShared
252{
253 unsafe fn create_descriptor_pool(
254 &self,
255 descriptor_count: &gpu_descriptor::DescriptorTotalCount,
256 max_sets: u32,
257 flags: gpu_descriptor::DescriptorPoolCreateFlags,
258 ) -> Result<vk::DescriptorPool, gpu_descriptor::CreatePoolError> {
259 let unfiltered_counts = [
261 (vk::DescriptorType::SAMPLER, descriptor_count.sampler),
262 (
263 vk::DescriptorType::SAMPLED_IMAGE,
264 descriptor_count.sampled_image,
265 ),
266 (
267 vk::DescriptorType::STORAGE_IMAGE,
268 descriptor_count.storage_image,
269 ),
270 (
271 vk::DescriptorType::UNIFORM_BUFFER,
272 descriptor_count.uniform_buffer,
273 ),
274 (
275 vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC,
276 descriptor_count.uniform_buffer_dynamic,
277 ),
278 (
279 vk::DescriptorType::STORAGE_BUFFER,
280 descriptor_count.storage_buffer,
281 ),
282 (
283 vk::DescriptorType::STORAGE_BUFFER_DYNAMIC,
284 descriptor_count.storage_buffer_dynamic,
285 ),
286 (
287 vk::DescriptorType::ACCELERATION_STRUCTURE_KHR,
288 descriptor_count.acceleration_structure,
289 ),
290 ];
291
292 let filtered_counts = unfiltered_counts
293 .iter()
294 .cloned()
295 .filter(|&(_, count)| count != 0)
296 .map(|(ty, count)| vk::DescriptorPoolSize {
297 ty,
298 descriptor_count: count,
299 })
300 .collect::<ArrayVec<_, 8>>();
301
302 let mut vk_flags =
303 if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND) {
304 vk::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND
305 } else {
306 vk::DescriptorPoolCreateFlags::empty()
307 };
308 if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET) {
309 vk_flags |= vk::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET;
310 }
311 let vk_info = vk::DescriptorPoolCreateInfo::default()
312 .max_sets(max_sets)
313 .flags(vk_flags)
314 .pool_sizes(&filtered_counts);
315
316 match unsafe { self.raw.create_descriptor_pool(&vk_info, None) } {
317 Ok(pool) => Ok(pool),
318 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
319 Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
320 }
321 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
322 Err(gpu_descriptor::CreatePoolError::OutOfDeviceMemory)
323 }
324 Err(vk::Result::ERROR_FRAGMENTATION) => {
325 Err(gpu_descriptor::CreatePoolError::Fragmentation)
326 }
327 Err(err) => handle_unexpected(err),
328 }
329 }
330
331 unsafe fn destroy_descriptor_pool(&self, pool: vk::DescriptorPool) {
332 unsafe { self.raw.destroy_descriptor_pool(pool, None) }
333 }
334
335 unsafe fn alloc_descriptor_sets<'a>(
336 &self,
337 pool: &mut vk::DescriptorPool,
338 layouts: impl ExactSizeIterator<Item = &'a vk::DescriptorSetLayout>,
339 sets: &mut impl Extend<vk::DescriptorSet>,
340 ) -> Result<(), gpu_descriptor::DeviceAllocationError> {
341 let result = unsafe {
342 self.raw.allocate_descriptor_sets(
343 &vk::DescriptorSetAllocateInfo::default()
344 .descriptor_pool(*pool)
345 .set_layouts(
346 &smallvec::SmallVec::<[vk::DescriptorSetLayout; 32]>::from_iter(
347 layouts.cloned(),
348 ),
349 ),
350 )
351 };
352
353 match result {
354 Ok(vk_sets) => {
355 sets.extend(vk_sets);
356 Ok(())
357 }
358 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY)
359 | Err(vk::Result::ERROR_OUT_OF_POOL_MEMORY) => {
360 Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
361 }
362 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
363 Err(gpu_descriptor::DeviceAllocationError::OutOfDeviceMemory)
364 }
365 Err(vk::Result::ERROR_FRAGMENTED_POOL) => {
366 Err(gpu_descriptor::DeviceAllocationError::FragmentedPool)
367 }
368 Err(err) => handle_unexpected(err),
369 }
370 }
371
372 unsafe fn dealloc_descriptor_sets<'a>(
373 &self,
374 pool: &mut vk::DescriptorPool,
375 sets: impl Iterator<Item = vk::DescriptorSet>,
376 ) {
377 let result = unsafe {
378 self.raw.free_descriptor_sets(
379 *pool,
380 &smallvec::SmallVec::<[vk::DescriptorSet; 32]>::from_iter(sets),
381 )
382 };
383 match result {
384 Ok(()) => {}
385 Err(err) => handle_unexpected(err),
386 }
387 }
388}
389
390struct CompiledStage {
391 create_info: vk::PipelineShaderStageCreateInfo<'static>,
392 _entry_point: CString,
393 temp_raw_module: Option<vk::ShaderModule>,
394}
395
396impl super::Device {
397 pub unsafe fn texture_from_raw(
407 &self,
408 vk_image: vk::Image,
409 desc: &crate::TextureDescriptor,
410 drop_callback: Option<crate::DropCallback>,
411 memory: super::TextureMemory,
412 ) -> super::Texture {
413 let identity = self.shared.texture_identity_factory.next();
414 let drop_guard = crate::DropGuard::from_option(drop_callback);
415
416 if let Some(label) = desc.label {
417 unsafe { self.shared.set_object_name(vk_image, label) };
418 }
419
420 super::Texture {
421 raw: vk_image,
422 drop_guard,
423 memory,
424 format: desc.format,
425 copy_size: desc.copy_extent(),
426 identity,
427 }
428 }
429
430 fn find_memory_type_index(
431 &self,
432 type_bits_req: u32,
433 flags_req: vk::MemoryPropertyFlags,
434 ) -> Option<usize> {
435 let mem_properties = unsafe {
436 self.shared
437 .instance
438 .raw
439 .get_physical_device_memory_properties(self.shared.physical_device)
440 };
441
442 for (i, mem_ty) in mem_properties.memory_types_as_slice().iter().enumerate() {
444 let types_bits = 1 << i;
445 let is_required_memory_type = type_bits_req & types_bits != 0;
446 let has_required_properties = mem_ty.property_flags & flags_req == flags_req;
447 if is_required_memory_type && has_required_properties {
448 return Some(i);
449 }
450 }
451
452 None
453 }
454
455 fn create_image_without_memory(
456 &self,
457 desc: &crate::TextureDescriptor,
458 external_memory_image_create_info: Option<&mut vk::ExternalMemoryImageCreateInfo>,
459 ) -> Result<ImageWithoutMemory, crate::DeviceError> {
460 let copy_size = desc.copy_extent();
461
462 let mut raw_flags = vk::ImageCreateFlags::empty();
463 if desc.dimension == wgt::TextureDimension::D3
464 && desc.usage.contains(wgt::TextureUses::COLOR_TARGET)
465 {
466 raw_flags |= vk::ImageCreateFlags::TYPE_2D_ARRAY_COMPATIBLE;
467 }
468 if desc.is_cube_compatible() {
469 raw_flags |= vk::ImageCreateFlags::CUBE_COMPATIBLE;
470 }
471
472 let original_format = self.shared.private_caps.map_texture_format(desc.format);
473 let mut vk_view_formats = vec![];
474 if !desc.view_formats.is_empty() {
475 raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
476
477 if self.shared.private_caps.image_format_list {
478 vk_view_formats = desc
479 .view_formats
480 .iter()
481 .map(|f| self.shared.private_caps.map_texture_format(*f))
482 .collect();
483 vk_view_formats.push(original_format)
484 }
485 }
486 if desc.format.is_multi_planar_format() {
487 raw_flags |=
488 vk::ImageCreateFlags::MUTABLE_FORMAT | vk::ImageCreateFlags::EXTENDED_USAGE;
489 }
490
491 let mut vk_info = vk::ImageCreateInfo::default()
492 .flags(raw_flags)
493 .image_type(conv::map_texture_dimension(desc.dimension))
494 .format(original_format)
495 .extent(conv::map_copy_extent(©_size))
496 .mip_levels(desc.mip_level_count)
497 .array_layers(desc.array_layer_count())
498 .samples(vk::SampleCountFlags::from_raw(desc.sample_count))
499 .tiling(vk::ImageTiling::OPTIMAL)
500 .usage(conv::map_texture_usage(desc.usage))
501 .sharing_mode(vk::SharingMode::EXCLUSIVE)
502 .initial_layout(vk::ImageLayout::UNDEFINED);
503
504 let mut format_list_info = vk::ImageFormatListCreateInfo::default();
505 if !vk_view_formats.is_empty() {
506 format_list_info = format_list_info.view_formats(&vk_view_formats);
507 vk_info = vk_info.push_next(&mut format_list_info);
508 }
509
510 if let Some(ext_info) = external_memory_image_create_info {
511 vk_info = vk_info.push_next(ext_info);
512 }
513
514 let raw = unsafe { self.shared.raw.create_image(&vk_info, None) }.map_err(map_err)?;
515 fn map_err(err: vk::Result) -> crate::DeviceError {
516 super::map_host_device_oom_and_ioca_err(err)
519 }
520 let mut req = unsafe { self.shared.raw.get_image_memory_requirements(raw) };
521
522 if desc.usage.contains(wgt::TextureUses::TRANSIENT) {
523 let mem_type_index = self.find_memory_type_index(
524 req.memory_type_bits,
525 vk::MemoryPropertyFlags::LAZILY_ALLOCATED,
526 );
527 if let Some(mem_type_index) = mem_type_index {
528 req.memory_type_bits = 1 << mem_type_index;
529 }
530 }
531
532 Ok(ImageWithoutMemory {
533 raw,
534 requirements: req,
535 })
536 }
537
538 #[cfg(windows)]
544 pub unsafe fn texture_from_d3d11_shared_handle(
545 &self,
546 d3d11_shared_handle: windows::Win32::Foundation::HANDLE,
547 desc: &crate::TextureDescriptor,
548 ) -> Result<super::Texture, crate::DeviceError> {
549 if !self
550 .shared
551 .features
552 .contains(wgt::Features::VULKAN_EXTERNAL_MEMORY_WIN32)
553 {
554 log::error!("Vulkan driver does not support VK_KHR_external_memory_win32");
555 return Err(crate::DeviceError::Unexpected);
556 }
557
558 let mut external_memory_image_info = vk::ExternalMemoryImageCreateInfo::default()
559 .handle_types(vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE);
560
561 let image =
562 self.create_image_without_memory(desc, Some(&mut external_memory_image_info))?;
563
564 let mut dedicated_allocate_info =
567 vk::MemoryDedicatedAllocateInfo::default().image(image.raw);
568
569 let mut import_memory_info = vk::ImportMemoryWin32HandleInfoKHR::default()
570 .handle_type(vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE)
571 .handle(d3d11_shared_handle.0 as _);
572 #[allow(clippy::unnecessary_mut_passed)]
574 {
575 import_memory_info.p_next = <*const _>::cast(&mut dedicated_allocate_info);
576 }
577
578 let mem_type_index = self
579 .find_memory_type_index(
580 image.requirements.memory_type_bits,
581 vk::MemoryPropertyFlags::DEVICE_LOCAL,
582 )
583 .ok_or(crate::DeviceError::Unexpected)?;
584
585 let memory_allocate_info = vk::MemoryAllocateInfo::default()
586 .allocation_size(image.requirements.size)
587 .memory_type_index(mem_type_index as _)
588 .push_next(&mut import_memory_info);
589 let memory = unsafe { self.shared.raw.allocate_memory(&memory_allocate_info, None) }
590 .map_err(super::map_host_device_oom_err)?;
591
592 unsafe { self.shared.raw.bind_image_memory(image.raw, memory, 0) }
593 .map_err(super::map_host_device_oom_err)?;
594
595 Ok(unsafe {
596 self.texture_from_raw(
597 image.raw,
598 desc,
599 None,
600 super::TextureMemory::Dedicated(memory),
601 )
602 })
603 }
604
605 fn create_shader_module_impl(
606 &self,
607 spv: &[u32],
608 label: &crate::Label<'_>,
609 ) -> Result<vk::ShaderModule, crate::DeviceError> {
610 let vk_info = vk::ShaderModuleCreateInfo::default()
611 .flags(vk::ShaderModuleCreateFlags::empty())
612 .code(spv);
613
614 let raw = unsafe {
615 profiling::scope!("vkCreateShaderModule");
616 self.shared
617 .raw
618 .create_shader_module(&vk_info, None)
619 .map_err(map_err)?
620 };
621 fn map_err(err: vk::Result) -> crate::DeviceError {
622 super::map_host_device_oom_err(err)
625 }
626
627 if let Some(label) = label {
628 unsafe { self.shared.set_object_name(raw, label) };
629 }
630
631 Ok(raw)
632 }
633
634 fn compile_stage(
635 &self,
636 stage: &crate::ProgrammableStage<super::ShaderModule>,
637 naga_stage: naga::ShaderStage,
638 binding_map: &naga::back::spv::BindingMap,
639 ) -> Result<CompiledStage, crate::PipelineError> {
640 let stage_flags = crate::auxil::map_naga_stage(naga_stage);
641 let vk_module = match *stage.module {
642 super::ShaderModule::Raw(raw) => raw,
643 super::ShaderModule::Intermediate {
644 ref naga_shader,
645 runtime_checks,
646 } => {
647 let pipeline_options = naga::back::spv::PipelineOptions {
648 entry_point: stage.entry_point.to_owned(),
649 shader_stage: naga_stage,
650 };
651 let needs_temp_options = !runtime_checks.bounds_checks
652 || !runtime_checks.force_loop_bounding
653 || !runtime_checks.ray_query_initialization_tracking
654 || !binding_map.is_empty()
655 || naga_shader.debug_source.is_some()
656 || !stage.zero_initialize_workgroup_memory
657 || !runtime_checks.task_shader_dispatch_tracking
658 || !runtime_checks.mesh_shader_primitive_indices_clamp;
659
660 let mut temp_options;
661 let options = if needs_temp_options {
662 temp_options = self.naga_options.clone();
663 if !runtime_checks.bounds_checks {
664 temp_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
665 index: naga::proc::BoundsCheckPolicy::Unchecked,
666 buffer: naga::proc::BoundsCheckPolicy::Unchecked,
667 image_load: naga::proc::BoundsCheckPolicy::Unchecked,
668 binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
669 };
670 }
671 if !runtime_checks.force_loop_bounding {
672 temp_options.force_loop_bounding = false;
673 }
674 if !runtime_checks.ray_query_initialization_tracking {
675 temp_options.ray_query_initialization_tracking = false;
676 }
677 if !binding_map.is_empty() {
678 temp_options.binding_map = binding_map.clone();
679 }
680
681 if let Some(ref debug) = naga_shader.debug_source {
682 temp_options.debug_info = Some(naga::back::spv::DebugInfo {
683 source_code: &debug.source_code,
684 file_name: debug.file_name.as_ref(),
685 language: naga::back::spv::SourceLanguage::WGSL,
686 })
687 }
688 if !stage.zero_initialize_workgroup_memory {
689 temp_options.zero_initialize_workgroup_memory =
690 naga::back::spv::ZeroInitializeWorkgroupMemoryMode::None;
691 }
692 if !runtime_checks.task_shader_dispatch_tracking {
693 temp_options.task_dispatch_limits = None;
694 }
695 temp_options.mesh_shader_primitive_indices_clamp =
696 runtime_checks.mesh_shader_primitive_indices_clamp;
697
698 &temp_options
699 } else {
700 &self.naga_options
701 };
702
703 let (module, info) = naga::back::pipeline_constants::process_overrides(
704 &naga_shader.module,
705 &naga_shader.info,
706 Some((naga_stage, stage.entry_point)),
707 stage.constants,
708 )
709 .map_err(|e| {
710 crate::PipelineError::PipelineConstants(stage_flags, format!("{e}"))
711 })?;
712
713 let spv = {
714 profiling::scope!("naga::spv::write_vec");
715 naga::back::spv::write_vec(&module, &info, options, Some(&pipeline_options))
716 }
717 .map_err(|e| crate::PipelineError::Linkage(stage_flags, format!("{e}")))?;
718 self.create_shader_module_impl(&spv, &None)?
719 }
720 };
721
722 let mut flags = vk::PipelineShaderStageCreateFlags::empty();
723 if self.shared.features.contains(wgt::Features::SUBGROUP) {
724 flags |= vk::PipelineShaderStageCreateFlags::ALLOW_VARYING_SUBGROUP_SIZE
725 }
726
727 let entry_point = CString::new(stage.entry_point).unwrap();
728 let mut create_info = vk::PipelineShaderStageCreateInfo::default()
729 .flags(flags)
730 .stage(conv::map_shader_stage(stage_flags))
731 .module(vk_module);
732
733 create_info.p_name = entry_point.as_ptr();
735
736 Ok(CompiledStage {
737 create_info,
738 _entry_point: entry_point,
739 temp_raw_module: match *stage.module {
740 super::ShaderModule::Raw(_) => None,
741 super::ShaderModule::Intermediate { .. } => Some(vk_module),
742 },
743 })
744 }
745
746 pub fn queue_family_index(&self) -> u32 {
752 self.shared.family_index
753 }
754
755 pub fn queue_index(&self) -> u32 {
756 self.shared.queue_index
757 }
758
759 pub fn raw_device(&self) -> &ash::Device {
760 &self.shared.raw
761 }
762
763 pub fn raw_physical_device(&self) -> vk::PhysicalDevice {
764 self.shared.physical_device
765 }
766
767 pub fn raw_queue(&self) -> vk::Queue {
768 self.shared.raw_queue
769 }
770
771 pub fn enabled_device_extensions(&self) -> &[&'static CStr] {
772 &self.shared.enabled_extensions
773 }
774
775 pub fn shared_instance(&self) -> &super::InstanceShared {
776 &self.shared.instance
777 }
778
779 fn error_if_would_oom_on_resource_allocation(
780 &self,
781 needs_host_access: bool,
782 size: u64,
783 ) -> Result<(), crate::DeviceError> {
784 let Some(threshold) = self
785 .shared
786 .instance
787 .memory_budget_thresholds
788 .for_resource_creation
789 else {
790 return Ok(());
791 };
792
793 if !self
794 .shared
795 .enabled_extensions
796 .contains(&ext::memory_budget::NAME)
797 {
798 return Ok(());
799 }
800
801 let get_physical_device_properties = self
802 .shared
803 .instance
804 .get_physical_device_properties
805 .as_ref()
806 .unwrap();
807
808 let mut memory_budget_properties = vk::PhysicalDeviceMemoryBudgetPropertiesEXT::default();
809
810 let mut memory_properties =
811 vk::PhysicalDeviceMemoryProperties2::default().push_next(&mut memory_budget_properties);
812
813 unsafe {
814 get_physical_device_properties.get_physical_device_memory_properties2(
815 self.shared.physical_device,
816 &mut memory_properties,
817 );
818 }
819
820 let mut host_visible_heaps = [false; vk::MAX_MEMORY_HEAPS];
821 let mut device_local_heaps = [false; vk::MAX_MEMORY_HEAPS];
822
823 let memory_properties = memory_properties.memory_properties;
824
825 for i in 0..memory_properties.memory_type_count {
826 let memory_type = memory_properties.memory_types[i as usize];
827 let flags = memory_type.property_flags;
828
829 if flags.intersects(
830 vk::MemoryPropertyFlags::LAZILY_ALLOCATED | vk::MemoryPropertyFlags::PROTECTED,
831 ) {
832 continue; }
834
835 if flags.contains(vk::MemoryPropertyFlags::HOST_VISIBLE) {
836 host_visible_heaps[memory_type.heap_index as usize] = true;
837 }
838
839 if flags.contains(vk::MemoryPropertyFlags::DEVICE_LOCAL) {
840 device_local_heaps[memory_type.heap_index as usize] = true;
841 }
842 }
843
844 let heaps = if needs_host_access {
845 host_visible_heaps
846 } else {
847 device_local_heaps
848 };
849
850 for (i, check) in heaps.iter().enumerate() {
855 if !check {
856 continue;
857 }
858
859 let heap_usage = memory_budget_properties.heap_usage[i];
860 let heap_budget = memory_budget_properties.heap_budget[i];
861
862 if heap_usage + size >= heap_budget / 100 * threshold as u64 {
863 return Err(crate::DeviceError::OutOfMemory);
864 }
865 }
866
867 Ok(())
868 }
869}
870
871impl crate::Device for super::Device {
872 type A = super::Api;
873
874 unsafe fn create_buffer(
875 &self,
876 desc: &crate::BufferDescriptor,
877 ) -> Result<super::Buffer, crate::DeviceError> {
878 let vk_info = vk::BufferCreateInfo::default()
879 .size(desc.size)
880 .usage(conv::map_buffer_usage(desc.usage))
881 .sharing_mode(vk::SharingMode::EXCLUSIVE);
882
883 let raw = unsafe {
884 self.shared
885 .raw
886 .create_buffer(&vk_info, None)
887 .map_err(super::map_host_device_oom_and_ioca_err)?
888 };
889
890 let mut requirements = unsafe { self.shared.raw.get_buffer_memory_requirements(raw) };
891
892 let is_cpu_read = desc.usage.contains(wgt::BufferUses::MAP_READ);
893 let is_cpu_write = desc.usage.contains(wgt::BufferUses::MAP_WRITE);
894
895 let location = match (is_cpu_read, is_cpu_write) {
896 (true, true) => gpu_allocator::MemoryLocation::CpuToGpu,
897 (true, false) => gpu_allocator::MemoryLocation::GpuToCpu,
898 (false, true) => gpu_allocator::MemoryLocation::CpuToGpu,
899 (false, false) => gpu_allocator::MemoryLocation::GpuOnly,
900 };
901
902 let needs_host_access = is_cpu_read || is_cpu_write;
903
904 self.error_if_would_oom_on_resource_allocation(needs_host_access, requirements.size)
905 .inspect_err(|_| {
906 unsafe { self.shared.raw.destroy_buffer(raw, None) };
907 })?;
908
909 let name = desc.label.unwrap_or("Unlabeled buffer");
910
911 if desc
912 .usage
913 .contains(wgt::BufferUses::ACCELERATION_STRUCTURE_SCRATCH)
914 {
915 requirements.alignment = requirements
917 .alignment
918 .max(self.shared.private_caps.scratch_buffer_alignment as u64);
919 }
920
921 let allocation = self
922 .mem_allocator
923 .lock()
924 .allocate(&gpu_allocator::vulkan::AllocationCreateDesc {
925 name,
926 requirements: vk::MemoryRequirements {
927 memory_type_bits: requirements.memory_type_bits & self.valid_ash_memory_types,
928 ..requirements
929 },
930 location,
931 linear: true, allocation_scheme: gpu_allocator::vulkan::AllocationScheme::GpuAllocatorManaged,
933 })
934 .inspect_err(|_| {
935 unsafe { self.shared.raw.destroy_buffer(raw, None) };
936 })?;
937
938 unsafe {
939 self.shared
940 .raw
941 .bind_buffer_memory(raw, allocation.memory(), allocation.offset())
942 }
943 .map_err(super::map_host_device_oom_and_ioca_err)
944 .inspect_err(|_| {
945 unsafe { self.shared.raw.destroy_buffer(raw, None) };
946 })?;
947
948 if let Some(label) = desc.label {
949 unsafe { self.shared.set_object_name(raw, label) };
950 }
951
952 self.counters.buffer_memory.add(allocation.size() as isize);
953 self.counters.buffers.add(1);
954
955 Ok(super::Buffer {
956 raw,
957 allocation: Some(Mutex::new(super::BufferMemoryBacking::Managed(allocation))),
958 })
959 }
960 unsafe fn destroy_buffer(&self, buffer: super::Buffer) {
961 unsafe { self.shared.raw.destroy_buffer(buffer.raw, None) };
962 if let Some(allocation) = buffer.allocation {
963 let allocation = allocation.into_inner();
964 self.counters.buffer_memory.sub(allocation.size() as isize);
965 match allocation {
966 super::BufferMemoryBacking::Managed(allocation) => {
967 let result = self.mem_allocator.lock().free(allocation);
968 if let Err(err) = result {
969 log::warn!("Failed to free buffer allocation: {err}");
970 }
971 }
972 super::BufferMemoryBacking::VulkanMemory { memory, .. } => unsafe {
973 self.shared.raw.free_memory(memory, None);
974 },
975 }
976 }
977
978 self.counters.buffers.sub(1);
979 }
980
981 unsafe fn add_raw_buffer(&self, _buffer: &super::Buffer) {
982 self.counters.buffers.add(1);
983 }
984
985 unsafe fn map_buffer(
986 &self,
987 buffer: &super::Buffer,
988 range: crate::MemoryRange,
989 ) -> Result<crate::BufferMapping, crate::DeviceError> {
990 if let Some(ref allocation) = buffer.allocation {
991 let mut allocation = allocation.lock();
992 if let super::BufferMemoryBacking::Managed(ref mut allocation) = *allocation {
993 let is_coherent = allocation
994 .memory_properties()
995 .contains(vk::MemoryPropertyFlags::HOST_COHERENT);
996 Ok(crate::BufferMapping {
997 ptr: unsafe {
998 allocation
999 .mapped_ptr()
1000 .unwrap()
1001 .cast()
1002 .offset(range.start as isize)
1003 },
1004 is_coherent,
1005 })
1006 } else {
1007 crate::hal_usage_error("tried to map externally created buffer")
1008 }
1009 } else {
1010 crate::hal_usage_error("tried to map external buffer")
1011 }
1012 }
1013
1014 unsafe fn unmap_buffer(&self, buffer: &super::Buffer) {
1015 if buffer.allocation.is_some() {
1016 } else {
1018 crate::hal_usage_error("tried to unmap external buffer")
1019 }
1020 }
1021
1022 unsafe fn flush_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
1023 where
1024 I: Iterator<Item = crate::MemoryRange>,
1025 {
1026 if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
1027 unsafe {
1028 self.shared
1029 .raw
1030 .flush_mapped_memory_ranges(
1031 &smallvec::SmallVec::<[vk::MappedMemoryRange; 32]>::from_iter(vk_ranges),
1032 )
1033 }
1034 .unwrap();
1035 }
1036 }
1037 unsafe fn invalidate_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
1038 where
1039 I: Iterator<Item = crate::MemoryRange>,
1040 {
1041 if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
1042 unsafe {
1043 self.shared
1044 .raw
1045 .invalidate_mapped_memory_ranges(&smallvec::SmallVec::<
1046 [vk::MappedMemoryRange; 32],
1047 >::from_iter(vk_ranges))
1048 }
1049 .unwrap();
1050 }
1051 }
1052
1053 unsafe fn create_texture(
1054 &self,
1055 desc: &crate::TextureDescriptor,
1056 ) -> Result<super::Texture, crate::DeviceError> {
1057 let image = self.create_image_without_memory(desc, None)?;
1058
1059 self.error_if_would_oom_on_resource_allocation(false, image.requirements.size)
1060 .inspect_err(|_| {
1061 unsafe { self.shared.raw.destroy_image(image.raw, None) };
1062 })?;
1063
1064 let name = desc.label.unwrap_or("Unlabeled texture");
1065
1066 let allocation = self
1067 .mem_allocator
1068 .lock()
1069 .allocate(&gpu_allocator::vulkan::AllocationCreateDesc {
1070 name,
1071 requirements: vk::MemoryRequirements {
1072 memory_type_bits: image.requirements.memory_type_bits
1073 & self.valid_ash_memory_types,
1074 ..image.requirements
1075 },
1076 location: gpu_allocator::MemoryLocation::GpuOnly,
1077 linear: false,
1078 allocation_scheme: gpu_allocator::vulkan::AllocationScheme::GpuAllocatorManaged,
1079 })
1080 .inspect_err(|_| {
1081 unsafe { self.shared.raw.destroy_image(image.raw, None) };
1082 })?;
1083
1084 self.counters.texture_memory.add(allocation.size() as isize);
1085
1086 unsafe {
1087 self.shared
1088 .raw
1089 .bind_image_memory(image.raw, allocation.memory(), allocation.offset())
1090 }
1091 .map_err(super::map_host_device_oom_err)
1092 .inspect_err(|_| {
1093 unsafe { self.shared.raw.destroy_image(image.raw, None) };
1094 })?;
1095
1096 Ok(unsafe {
1097 self.texture_from_raw(
1098 image.raw,
1099 desc,
1100 None,
1101 super::TextureMemory::Allocation(allocation),
1102 )
1103 })
1104 }
1105
1106 unsafe fn destroy_texture(&self, texture: super::Texture) {
1107 if texture.drop_guard.is_none() {
1108 unsafe { self.shared.raw.destroy_image(texture.raw, None) };
1109 }
1110
1111 match texture.memory {
1112 super::TextureMemory::Allocation(allocation) => {
1113 self.counters.texture_memory.sub(allocation.size() as isize);
1114 let result = self.mem_allocator.lock().free(allocation);
1115 if let Err(err) = result {
1116 log::warn!("Failed to free texture allocation: {err}");
1117 }
1118 }
1119 super::TextureMemory::Dedicated(memory) => unsafe {
1120 self.shared.raw.free_memory(memory, None);
1121 },
1122 super::TextureMemory::External => {}
1123 }
1124
1125 self.counters.textures.sub(1);
1126 }
1127
1128 unsafe fn add_raw_texture(&self, _texture: &super::Texture) {
1129 self.counters.textures.add(1);
1130 }
1131
1132 unsafe fn create_texture_view(
1133 &self,
1134 texture: &super::Texture,
1135 desc: &crate::TextureViewDescriptor,
1136 ) -> Result<super::TextureView, crate::DeviceError> {
1137 let subresource_range = conv::map_subresource_range(&desc.range, texture.format);
1138 let raw_format = self.shared.private_caps.map_texture_format(desc.format);
1139 let mut vk_info = vk::ImageViewCreateInfo::default()
1140 .flags(vk::ImageViewCreateFlags::empty())
1141 .image(texture.raw)
1142 .view_type(conv::map_view_dimension(desc.dimension))
1143 .format(raw_format)
1144 .subresource_range(subresource_range);
1145 let layers =
1146 NonZeroU32::new(subresource_range.layer_count).expect("Unexpected zero layer count");
1147
1148 let mut image_view_info;
1149 if self.shared.private_caps.image_view_usage && !desc.usage.is_empty() {
1150 image_view_info =
1151 vk::ImageViewUsageCreateInfo::default().usage(conv::map_texture_usage(desc.usage));
1152 vk_info = vk_info.push_next(&mut image_view_info);
1153 }
1154
1155 let raw = unsafe { self.shared.raw.create_image_view(&vk_info, None) }
1156 .map_err(super::map_host_device_oom_and_ioca_err)?;
1157
1158 if let Some(label) = desc.label {
1159 unsafe { self.shared.set_object_name(raw, label) };
1160 }
1161
1162 let identity = self.shared.texture_view_identity_factory.next();
1163
1164 self.counters.texture_views.add(1);
1165
1166 Ok(super::TextureView {
1167 raw_texture: texture.raw,
1168 raw,
1169 _layers: layers,
1170 format: desc.format,
1171 raw_format,
1172 base_mip_level: desc.range.base_mip_level,
1173 dimension: desc.dimension,
1174 texture_identity: texture.identity,
1175 view_identity: identity,
1176 })
1177 }
1178 unsafe fn destroy_texture_view(&self, view: super::TextureView) {
1179 unsafe { self.shared.raw.destroy_image_view(view.raw, None) };
1180
1181 self.counters.texture_views.sub(1);
1182 }
1183
1184 unsafe fn create_sampler(
1185 &self,
1186 desc: &crate::SamplerDescriptor,
1187 ) -> Result<super::Sampler, crate::DeviceError> {
1188 let mut create_info = vk::SamplerCreateInfo::default()
1189 .flags(vk::SamplerCreateFlags::empty())
1190 .mag_filter(conv::map_filter_mode(desc.mag_filter))
1191 .min_filter(conv::map_filter_mode(desc.min_filter))
1192 .mipmap_mode(conv::map_mip_filter_mode(desc.mipmap_filter))
1193 .address_mode_u(conv::map_address_mode(desc.address_modes[0]))
1194 .address_mode_v(conv::map_address_mode(desc.address_modes[1]))
1195 .address_mode_w(conv::map_address_mode(desc.address_modes[2]))
1196 .min_lod(desc.lod_clamp.start)
1197 .max_lod(desc.lod_clamp.end);
1198
1199 if let Some(fun) = desc.compare {
1200 create_info = create_info
1201 .compare_enable(true)
1202 .compare_op(conv::map_comparison(fun));
1203 }
1204
1205 if desc.anisotropy_clamp != 1 {
1206 create_info = create_info
1209 .anisotropy_enable(true)
1210 .max_anisotropy(desc.anisotropy_clamp as f32);
1211 }
1212
1213 if let Some(color) = desc.border_color {
1214 create_info = create_info.border_color(conv::map_border_color(color));
1215 }
1216
1217 let mut sampler_cache_guard = self.shared.sampler_cache.lock();
1218
1219 let raw = sampler_cache_guard.create_sampler(&self.shared.raw, create_info)?;
1220
1221 if let Some(label) = desc.label {
1225 unsafe { self.shared.set_object_name(raw, label) };
1228 }
1229
1230 drop(sampler_cache_guard);
1231
1232 self.counters.samplers.add(1);
1233
1234 Ok(super::Sampler { raw, create_info })
1235 }
1236 unsafe fn destroy_sampler(&self, sampler: super::Sampler) {
1237 self.shared.sampler_cache.lock().destroy_sampler(
1238 &self.shared.raw,
1239 sampler.create_info,
1240 sampler.raw,
1241 );
1242
1243 self.counters.samplers.sub(1);
1244 }
1245
1246 unsafe fn create_command_encoder(
1247 &self,
1248 desc: &crate::CommandEncoderDescriptor<super::Queue>,
1249 ) -> Result<super::CommandEncoder, crate::DeviceError> {
1250 let vk_info = vk::CommandPoolCreateInfo::default()
1251 .queue_family_index(desc.queue.family_index)
1252 .flags(vk::CommandPoolCreateFlags::TRANSIENT);
1253
1254 let raw = unsafe {
1255 self.shared
1256 .raw
1257 .create_command_pool(&vk_info, None)
1258 .map_err(super::map_host_device_oom_err)?
1259 };
1260
1261 self.counters.command_encoders.add(1);
1262
1263 Ok(super::CommandEncoder {
1264 raw,
1265 device: Arc::clone(&self.shared),
1266 active: vk::CommandBuffer::null(),
1267 bind_point: vk::PipelineBindPoint::default(),
1268 temp: super::Temp::default(),
1269 free: Vec::new(),
1270 discarded: Vec::new(),
1271 rpass_debug_marker_active: false,
1272 end_of_pass_timer_query: None,
1273 framebuffers: Default::default(),
1274 temp_texture_views: Default::default(),
1275 counters: Arc::clone(&self.counters),
1276 current_pipeline_is_multiview: false,
1277 })
1278 }
1279
1280 unsafe fn create_bind_group_layout(
1281 &self,
1282 desc: &crate::BindGroupLayoutDescriptor,
1283 ) -> Result<super::BindGroupLayout, crate::DeviceError> {
1284 let mut vk_bindings = Vec::new();
1289 let mut binding_flags = Vec::new();
1290 let mut binding_map = Vec::new();
1291 let mut next_binding = 0;
1292 let mut contains_binding_arrays = false;
1293 let mut desc_count = gpu_descriptor::DescriptorTotalCount::default();
1294 for entry in desc.entries {
1295 if entry.count.is_some() {
1296 contains_binding_arrays = true;
1297 }
1298
1299 let partially_bound = desc
1300 .flags
1301 .contains(crate::BindGroupLayoutFlags::PARTIALLY_BOUND);
1302 let mut flags = vk::DescriptorBindingFlags::empty();
1303 if partially_bound && entry.count.is_some() {
1304 flags |= vk::DescriptorBindingFlags::PARTIALLY_BOUND;
1305 }
1306 if entry.count.is_some() {
1307 flags |= vk::DescriptorBindingFlags::UPDATE_AFTER_BIND;
1308 }
1309
1310 let count = entry.count.map_or(1, |c| c.get());
1311 match entry.ty {
1312 wgt::BindingType::ExternalTexture => unimplemented!(),
1313 _ => {
1314 vk_bindings.push(vk::DescriptorSetLayoutBinding {
1315 binding: next_binding,
1316 descriptor_type: conv::map_binding_type(entry.ty),
1317 descriptor_count: count,
1318 stage_flags: conv::map_shader_stage(entry.visibility),
1319 p_immutable_samplers: ptr::null(),
1320 _marker: Default::default(),
1321 });
1322 binding_flags.push(flags);
1323 binding_map.push((
1324 entry.binding,
1325 super::BindingInfo {
1326 binding: next_binding,
1327 binding_array_size: entry.count,
1328 },
1329 ));
1330 next_binding += 1;
1331 }
1332 }
1333
1334 match entry.ty {
1335 wgt::BindingType::Buffer {
1336 ty,
1337 has_dynamic_offset,
1338 ..
1339 } => match ty {
1340 wgt::BufferBindingType::Uniform => {
1341 if has_dynamic_offset {
1342 desc_count.uniform_buffer_dynamic += count;
1343 } else {
1344 desc_count.uniform_buffer += count;
1345 }
1346 }
1347 wgt::BufferBindingType::Storage { .. } => {
1348 if has_dynamic_offset {
1349 desc_count.storage_buffer_dynamic += count;
1350 } else {
1351 desc_count.storage_buffer += count;
1352 }
1353 }
1354 },
1355 wgt::BindingType::Sampler { .. } => {
1356 desc_count.sampler += count;
1357 }
1358 wgt::BindingType::Texture { .. } => {
1359 desc_count.sampled_image += count;
1360 }
1361 wgt::BindingType::StorageTexture { .. } => {
1362 desc_count.storage_image += count;
1363 }
1364 wgt::BindingType::AccelerationStructure { .. } => {
1365 desc_count.acceleration_structure += count;
1366 }
1367 wgt::BindingType::ExternalTexture => unimplemented!(),
1368 }
1369 }
1370
1371 let vk_info = vk::DescriptorSetLayoutCreateInfo::default()
1372 .bindings(&vk_bindings)
1373 .flags(if contains_binding_arrays {
1374 vk::DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND_POOL
1375 } else {
1376 vk::DescriptorSetLayoutCreateFlags::empty()
1377 });
1378
1379 let mut binding_flag_info =
1380 vk::DescriptorSetLayoutBindingFlagsCreateInfo::default().binding_flags(&binding_flags);
1381
1382 let vk_info = vk_info.push_next(&mut binding_flag_info);
1383
1384 let raw = unsafe {
1385 self.shared
1386 .raw
1387 .create_descriptor_set_layout(&vk_info, None)
1388 .map_err(super::map_host_device_oom_err)?
1389 };
1390
1391 if let Some(label) = desc.label {
1392 unsafe { self.shared.set_object_name(raw, label) };
1393 }
1394
1395 self.counters.bind_group_layouts.add(1);
1396
1397 Ok(super::BindGroupLayout {
1398 raw,
1399 desc_count,
1400 entries: desc.entries.into(),
1401 binding_map,
1402 contains_binding_arrays,
1403 })
1404 }
1405 unsafe fn destroy_bind_group_layout(&self, bg_layout: super::BindGroupLayout) {
1406 unsafe {
1407 self.shared
1408 .raw
1409 .destroy_descriptor_set_layout(bg_layout.raw, None)
1410 };
1411
1412 self.counters.bind_group_layouts.sub(1);
1413 }
1414
1415 unsafe fn create_pipeline_layout(
1416 &self,
1417 desc: &crate::PipelineLayoutDescriptor<super::BindGroupLayout>,
1418 ) -> Result<super::PipelineLayout, crate::DeviceError> {
1419 let vk_set_layouts = desc
1421 .bind_group_layouts
1422 .iter()
1423 .map(|bgl| match bgl {
1424 Some(bgl) => bgl.raw,
1425 None => {
1426 self.shared.empty_descriptor_set_layout
1434 }
1435 })
1436 .collect::<Vec<_>>();
1437 let vk_immediates_ranges: Option<vk::PushConstantRange> = if desc.immediate_size != 0 {
1438 Some(vk::PushConstantRange {
1439 stage_flags: vk::ShaderStageFlags::ALL,
1440 offset: 0,
1441 size: desc.immediate_size,
1442 })
1443 } else {
1444 None
1445 };
1446
1447 let vk_info = vk::PipelineLayoutCreateInfo::default()
1448 .flags(vk::PipelineLayoutCreateFlags::empty())
1449 .set_layouts(&vk_set_layouts)
1450 .push_constant_ranges(vk_immediates_ranges.as_slice());
1451
1452 let raw = {
1453 profiling::scope!("vkCreatePipelineLayout");
1454 unsafe {
1455 self.shared
1456 .raw
1457 .create_pipeline_layout(&vk_info, None)
1458 .map_err(super::map_host_device_oom_err)?
1459 }
1460 };
1461
1462 if let Some(label) = desc.label {
1463 unsafe { self.shared.set_object_name(raw, label) };
1464 }
1465
1466 let mut binding_map = BTreeMap::new();
1467 for (group, layout) in desc.bind_group_layouts.iter().enumerate() {
1468 let Some(layout) = layout else {
1469 continue;
1470 };
1471
1472 for &(binding, binding_info) in &layout.binding_map {
1473 binding_map.insert(
1474 naga::ResourceBinding {
1475 group: group as u32,
1476 binding,
1477 },
1478 naga::back::spv::BindingInfo {
1479 descriptor_set: group as u32,
1480 binding: binding_info.binding,
1481 binding_array_size: binding_info.binding_array_size.map(NonZeroU32::get),
1482 },
1483 );
1484 }
1485 }
1486
1487 self.counters.pipeline_layouts.add(1);
1488 Ok(super::PipelineLayout { raw, binding_map })
1489 }
1490 unsafe fn destroy_pipeline_layout(&self, pipeline_layout: super::PipelineLayout) {
1491 unsafe {
1492 self.shared
1493 .raw
1494 .destroy_pipeline_layout(pipeline_layout.raw, None)
1495 };
1496
1497 self.counters.pipeline_layouts.sub(1);
1498 }
1499
1500 unsafe fn create_bind_group(
1501 &self,
1502 desc: &crate::BindGroupDescriptor<
1503 super::BindGroupLayout,
1504 super::Buffer,
1505 super::Sampler,
1506 super::TextureView,
1507 super::AccelerationStructure,
1508 >,
1509 ) -> Result<super::BindGroup, crate::DeviceError> {
1510 let desc_set_layout_flags = if desc.layout.contains_binding_arrays {
1511 gpu_descriptor::DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND
1512 } else {
1513 gpu_descriptor::DescriptorSetLayoutCreateFlags::empty()
1514 };
1515
1516 let mut vk_sets = unsafe {
1517 self.desc_allocator.lock().allocate(
1518 &*self.shared,
1519 &desc.layout.raw,
1520 desc_set_layout_flags,
1521 &desc.layout.desc_count,
1522 1,
1523 )?
1524 };
1525
1526 let set = vk_sets.pop().unwrap();
1527 if let Some(label) = desc.label {
1528 unsafe { self.shared.set_object_name(*set.raw(), label) };
1529 }
1530
1531 struct ExtendStack<'a, T> {
1538 remainder: &'a mut [MaybeUninit<T>],
1539 }
1540
1541 impl<'a, T> ExtendStack<'a, T> {
1542 fn from_vec_capacity(vec: &'a mut Vec<T>) -> Self {
1543 Self {
1544 remainder: vec.spare_capacity_mut(),
1545 }
1546 }
1547
1548 fn extend_one(self, value: T) -> (Self, &'a mut T) {
1549 let (to_init, remainder) = self.remainder.split_first_mut().unwrap();
1550 let init = to_init.write(value);
1551 (Self { remainder }, init)
1552 }
1553
1554 fn extend(
1555 self,
1556 iter: impl IntoIterator<Item = T> + ExactSizeIterator,
1557 ) -> (Self, &'a mut [T]) {
1558 let (to_init, remainder) = self.remainder.split_at_mut(iter.len());
1559
1560 for (value, to_init) in iter.into_iter().zip(to_init.iter_mut()) {
1561 to_init.write(value);
1562 }
1563
1564 let init = {
1567 unsafe { mem::transmute::<&mut [MaybeUninit<T>], &mut [T]>(to_init) }
1574 };
1575 (Self { remainder }, init)
1576 }
1577 }
1578
1579 let mut writes = Vec::with_capacity(desc.entries.len());
1580 let mut buffer_infos = Vec::with_capacity(desc.buffers.len());
1581 let mut buffer_infos = ExtendStack::from_vec_capacity(&mut buffer_infos);
1582 let mut image_infos = Vec::with_capacity(desc.samplers.len() + desc.textures.len());
1583 let mut image_infos = ExtendStack::from_vec_capacity(&mut image_infos);
1584 let mut acceleration_structure_infos =
1589 Vec::with_capacity(desc.acceleration_structures.len());
1590 let mut acceleration_structure_infos =
1591 ExtendStack::from_vec_capacity(&mut acceleration_structure_infos);
1592 let mut raw_acceleration_structures =
1593 Vec::with_capacity(desc.acceleration_structures.len());
1594 let mut raw_acceleration_structures =
1595 ExtendStack::from_vec_capacity(&mut raw_acceleration_structures);
1596
1597 let layout_and_entry_iter = desc.entries.iter().map(|entry| {
1598 let layout = desc
1599 .layout
1600 .entries
1601 .iter()
1602 .find(|layout_entry| layout_entry.binding == entry.binding)
1603 .expect("internal error: no layout entry found with binding slot");
1604 (layout, entry)
1605 });
1606 let mut next_binding = 0;
1607 for (layout, entry) in layout_and_entry_iter {
1608 let write = vk::WriteDescriptorSet::default().dst_set(*set.raw());
1609
1610 match layout.ty {
1611 wgt::BindingType::Sampler(_) => {
1612 let start = entry.resource_index;
1613 let end = start + entry.count;
1614 let local_image_infos;
1615 (image_infos, local_image_infos) =
1616 image_infos.extend(desc.samplers[start as usize..end as usize].iter().map(
1617 |sampler| vk::DescriptorImageInfo::default().sampler(sampler.raw),
1618 ));
1619 writes.push(
1620 write
1621 .dst_binding(next_binding)
1622 .descriptor_type(conv::map_binding_type(layout.ty))
1623 .image_info(local_image_infos),
1624 );
1625 next_binding += 1;
1626 }
1627 wgt::BindingType::Texture { .. } | wgt::BindingType::StorageTexture { .. } => {
1628 let start = entry.resource_index;
1629 let end = start + entry.count;
1630 let local_image_infos;
1631 (image_infos, local_image_infos) =
1632 image_infos.extend(desc.textures[start as usize..end as usize].iter().map(
1633 |binding| {
1634 let layout =
1635 conv::derive_image_layout(binding.usage, binding.view.format);
1636 vk::DescriptorImageInfo::default()
1637 .image_view(binding.view.raw)
1638 .image_layout(layout)
1639 },
1640 ));
1641 writes.push(
1642 write
1643 .dst_binding(next_binding)
1644 .descriptor_type(conv::map_binding_type(layout.ty))
1645 .image_info(local_image_infos),
1646 );
1647 next_binding += 1;
1648 }
1649 wgt::BindingType::Buffer { .. } => {
1650 let start = entry.resource_index;
1651 let end = start + entry.count;
1652 let local_buffer_infos;
1653 (buffer_infos, local_buffer_infos) =
1654 buffer_infos.extend(desc.buffers[start as usize..end as usize].iter().map(
1655 |binding| {
1656 vk::DescriptorBufferInfo::default()
1657 .buffer(binding.buffer.raw)
1658 .offset(binding.offset)
1659 .range(
1660 binding.size.map_or(vk::WHOLE_SIZE, wgt::BufferSize::get),
1661 )
1662 },
1663 ));
1664 writes.push(
1665 write
1666 .dst_binding(next_binding)
1667 .descriptor_type(conv::map_binding_type(layout.ty))
1668 .buffer_info(local_buffer_infos),
1669 );
1670 next_binding += 1;
1671 }
1672 wgt::BindingType::AccelerationStructure { .. } => {
1673 let start = entry.resource_index;
1674 let end = start + entry.count;
1675
1676 let local_raw_acceleration_structures;
1677 (
1678 raw_acceleration_structures,
1679 local_raw_acceleration_structures,
1680 ) = raw_acceleration_structures.extend(
1681 desc.acceleration_structures[start as usize..end as usize]
1682 .iter()
1683 .map(|acceleration_structure| acceleration_structure.raw),
1684 );
1685
1686 let local_acceleration_structure_infos;
1687 (
1688 acceleration_structure_infos,
1689 local_acceleration_structure_infos,
1690 ) = acceleration_structure_infos.extend_one(
1691 vk::WriteDescriptorSetAccelerationStructureKHR::default()
1692 .acceleration_structures(local_raw_acceleration_structures),
1693 );
1694
1695 writes.push(
1696 write
1697 .dst_binding(next_binding)
1698 .descriptor_type(conv::map_binding_type(layout.ty))
1699 .descriptor_count(entry.count)
1700 .push_next(local_acceleration_structure_infos),
1701 );
1702 next_binding += 1;
1703 }
1704 wgt::BindingType::ExternalTexture => unimplemented!(),
1705 }
1706 }
1707
1708 unsafe { self.shared.raw.update_descriptor_sets(&writes, &[]) };
1709
1710 self.counters.bind_groups.add(1);
1711
1712 Ok(super::BindGroup { set })
1713 }
1714
1715 unsafe fn destroy_bind_group(&self, group: super::BindGroup) {
1716 unsafe {
1717 self.desc_allocator
1718 .lock()
1719 .free(&*self.shared, Some(group.set))
1720 };
1721
1722 self.counters.bind_groups.sub(1);
1723 }
1724
1725 unsafe fn create_shader_module(
1726 &self,
1727 desc: &crate::ShaderModuleDescriptor,
1728 shader: crate::ShaderInput,
1729 ) -> Result<super::ShaderModule, crate::ShaderError> {
1730 let shader_module = match shader {
1731 crate::ShaderInput::Naga(naga_shader)
1732 if self
1733 .shared
1734 .workarounds
1735 .contains(super::Workarounds::SEPARATE_ENTRY_POINTS)
1736 || !naga_shader.module.overrides.is_empty() =>
1737 {
1738 super::ShaderModule::Intermediate {
1739 naga_shader,
1740 runtime_checks: desc.runtime_checks,
1741 }
1742 }
1743 crate::ShaderInput::Naga(naga_shader) => {
1744 let mut naga_options = self.naga_options.clone();
1745 naga_options.debug_info =
1746 naga_shader
1747 .debug_source
1748 .as_ref()
1749 .map(|d| naga::back::spv::DebugInfo {
1750 source_code: d.source_code.as_ref(),
1751 file_name: d.file_name.as_ref(),
1752 language: naga::back::spv::SourceLanguage::WGSL,
1753 });
1754 if !desc.runtime_checks.bounds_checks {
1755 naga_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
1756 index: naga::proc::BoundsCheckPolicy::Unchecked,
1757 buffer: naga::proc::BoundsCheckPolicy::Unchecked,
1758 image_load: naga::proc::BoundsCheckPolicy::Unchecked,
1759 binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
1760 };
1761 }
1762 let spv = naga::back::spv::write_vec(
1763 &naga_shader.module,
1764 &naga_shader.info,
1765 &naga_options,
1766 None,
1767 )
1768 .map_err(|e| crate::ShaderError::Compilation(format!("{e}")))?;
1769 super::ShaderModule::Raw(self.create_shader_module_impl(&spv, &desc.label)?)
1770 }
1771 crate::ShaderInput::SpirV(data) => {
1772 super::ShaderModule::Raw(self.create_shader_module_impl(data, &desc.label)?)
1773 }
1774 crate::ShaderInput::MetalLib { .. }
1775 | crate::ShaderInput::Msl { .. }
1776 | crate::ShaderInput::Dxil { .. }
1777 | crate::ShaderInput::Hlsl { .. }
1778 | crate::ShaderInput::Glsl { .. } => unreachable!(),
1779 };
1780
1781 self.counters.shader_modules.add(1);
1782
1783 Ok(shader_module)
1784 }
1785
1786 unsafe fn destroy_shader_module(&self, module: super::ShaderModule) {
1787 match module {
1788 super::ShaderModule::Raw(raw) => {
1789 unsafe { self.shared.raw.destroy_shader_module(raw, None) };
1790 }
1791 super::ShaderModule::Intermediate { .. } => {}
1792 }
1793
1794 self.counters.shader_modules.sub(1);
1795 }
1796
1797 unsafe fn create_render_pipeline(
1798 &self,
1799 desc: &crate::RenderPipelineDescriptor<
1800 super::PipelineLayout,
1801 super::ShaderModule,
1802 super::PipelineCache,
1803 >,
1804 ) -> Result<super::RenderPipeline, crate::PipelineError> {
1805 let dynamic_states = [
1806 vk::DynamicState::VIEWPORT,
1807 vk::DynamicState::SCISSOR,
1808 vk::DynamicState::BLEND_CONSTANTS,
1809 vk::DynamicState::STENCIL_REFERENCE,
1810 ];
1811 let mut compatible_rp_key = super::RenderPassKey {
1812 sample_count: desc.multisample.count,
1813 multiview_mask: desc.multiview_mask,
1814 ..Default::default()
1815 };
1816 let mut stages = ArrayVec::<_, { crate::MAX_CONCURRENT_SHADER_STAGES }>::new();
1817 let mut vertex_buffers = Vec::new();
1818 let mut vertex_attributes = Vec::new();
1819
1820 if let crate::VertexProcessor::Standard {
1821 vertex_buffers: desc_vertex_buffers,
1822 vertex_stage: _,
1823 } = &desc.vertex_processor
1824 {
1825 vertex_buffers = Vec::with_capacity(desc_vertex_buffers.len());
1826 for (i, vb) in desc_vertex_buffers.iter().enumerate() {
1827 vertex_buffers.push(vk::VertexInputBindingDescription {
1828 binding: i as u32,
1829 stride: vb.array_stride as u32,
1830 input_rate: match vb.step_mode {
1831 wgt::VertexStepMode::Vertex => vk::VertexInputRate::VERTEX,
1832 wgt::VertexStepMode::Instance => vk::VertexInputRate::INSTANCE,
1833 },
1834 });
1835 for at in vb.attributes {
1836 vertex_attributes.push(vk::VertexInputAttributeDescription {
1837 location: at.shader_location,
1838 binding: i as u32,
1839 format: conv::map_vertex_format(at.format),
1840 offset: at.offset as u32,
1841 });
1842 }
1843 }
1844 }
1845
1846 let vk_vertex_input = vk::PipelineVertexInputStateCreateInfo::default()
1847 .vertex_binding_descriptions(&vertex_buffers)
1848 .vertex_attribute_descriptions(&vertex_attributes);
1849
1850 let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::default()
1851 .topology(conv::map_topology(desc.primitive.topology))
1852 .primitive_restart_enable(desc.primitive.strip_index_format.is_some());
1853
1854 let mut compiled_vs = None;
1855 let mut compiled_ms = None;
1856 let mut compiled_ts = None;
1857 match &desc.vertex_processor {
1858 crate::VertexProcessor::Standard {
1859 vertex_buffers: _,
1860 vertex_stage,
1861 } => {
1862 compiled_vs = Some(self.compile_stage(
1863 vertex_stage,
1864 naga::ShaderStage::Vertex,
1865 &desc.layout.binding_map,
1866 )?);
1867 stages.push(compiled_vs.as_ref().unwrap().create_info);
1868 }
1869 crate::VertexProcessor::Mesh {
1870 task_stage,
1871 mesh_stage,
1872 } => {
1873 if let Some(t) = task_stage.as_ref() {
1874 compiled_ts = Some(self.compile_stage(
1875 t,
1876 naga::ShaderStage::Task,
1877 &desc.layout.binding_map,
1878 )?);
1879 stages.push(compiled_ts.as_ref().unwrap().create_info);
1880 }
1881 compiled_ms = Some(self.compile_stage(
1882 mesh_stage,
1883 naga::ShaderStage::Mesh,
1884 &desc.layout.binding_map,
1885 )?);
1886 stages.push(compiled_ms.as_ref().unwrap().create_info);
1887 }
1888 }
1889 let compiled_fs = match desc.fragment_stage {
1890 Some(ref stage) => {
1891 let compiled = self.compile_stage(
1892 stage,
1893 naga::ShaderStage::Fragment,
1894 &desc.layout.binding_map,
1895 )?;
1896 stages.push(compiled.create_info);
1897 Some(compiled)
1898 }
1899 None => None,
1900 };
1901
1902 let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::default()
1903 .polygon_mode(conv::map_polygon_mode(desc.primitive.polygon_mode))
1904 .front_face(conv::map_front_face(desc.primitive.front_face))
1905 .line_width(1.0)
1906 .depth_clamp_enable(desc.primitive.unclipped_depth);
1907 if let Some(face) = desc.primitive.cull_mode {
1908 vk_rasterization = vk_rasterization.cull_mode(conv::map_cull_face(face))
1909 }
1910 let mut vk_rasterization_conservative_state =
1911 vk::PipelineRasterizationConservativeStateCreateInfoEXT::default()
1912 .conservative_rasterization_mode(
1913 vk::ConservativeRasterizationModeEXT::OVERESTIMATE,
1914 );
1915 if desc.primitive.conservative {
1916 vk_rasterization = vk_rasterization.push_next(&mut vk_rasterization_conservative_state);
1917 }
1918
1919 let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::default();
1920 if let Some(ref ds) = desc.depth_stencil {
1921 let vk_format = self.shared.private_caps.map_texture_format(ds.format);
1922 let vk_layout = if ds.is_read_only(desc.primitive.cull_mode) {
1923 vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL
1924 } else {
1925 vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL
1926 };
1927 compatible_rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
1928 base: super::AttachmentKey::compatible(vk_format, vk_layout),
1929 stencil_ops: crate::AttachmentOps::all(),
1930 });
1931
1932 if ds.is_depth_enabled() {
1933 vk_depth_stencil = vk_depth_stencil
1934 .depth_test_enable(true)
1935 .depth_write_enable(ds.depth_write_enabled.unwrap_or_default())
1936 .depth_compare_op(conv::map_comparison(ds.depth_compare.unwrap_or_default()));
1937 }
1938 if ds.stencil.is_enabled() {
1939 let s = &ds.stencil;
1940 let front = conv::map_stencil_face(&s.front, s.read_mask, s.write_mask);
1941 let back = conv::map_stencil_face(&s.back, s.read_mask, s.write_mask);
1942 vk_depth_stencil = vk_depth_stencil
1943 .stencil_test_enable(true)
1944 .front(front)
1945 .back(back);
1946 }
1947
1948 if ds.bias.is_enabled() {
1949 vk_rasterization = vk_rasterization
1950 .depth_bias_enable(true)
1951 .depth_bias_constant_factor(ds.bias.constant as f32)
1952 .depth_bias_clamp(ds.bias.clamp)
1953 .depth_bias_slope_factor(ds.bias.slope_scale);
1954 }
1955 }
1956
1957 let vk_viewport = vk::PipelineViewportStateCreateInfo::default()
1958 .flags(vk::PipelineViewportStateCreateFlags::empty())
1959 .scissor_count(1)
1960 .viewport_count(1);
1961
1962 let vk_sample_mask = [
1963 desc.multisample.mask as u32,
1964 (desc.multisample.mask >> 32) as u32,
1965 ];
1966 let vk_multisample = vk::PipelineMultisampleStateCreateInfo::default()
1967 .rasterization_samples(vk::SampleCountFlags::from_raw(desc.multisample.count))
1968 .alpha_to_coverage_enable(desc.multisample.alpha_to_coverage_enabled)
1969 .sample_mask(&vk_sample_mask);
1970
1971 let mut vk_attachments = Vec::with_capacity(desc.color_targets.len());
1972 for cat in desc.color_targets {
1973 let (key, attarchment) = if let Some(cat) = cat.as_ref() {
1974 let mut vk_attachment = vk::PipelineColorBlendAttachmentState::default()
1975 .color_write_mask(vk::ColorComponentFlags::from_raw(cat.write_mask.bits()));
1976 if let Some(ref blend) = cat.blend {
1977 let (color_op, color_src, color_dst) = conv::map_blend_component(&blend.color);
1978 let (alpha_op, alpha_src, alpha_dst) = conv::map_blend_component(&blend.alpha);
1979 vk_attachment = vk_attachment
1980 .blend_enable(true)
1981 .color_blend_op(color_op)
1982 .src_color_blend_factor(color_src)
1983 .dst_color_blend_factor(color_dst)
1984 .alpha_blend_op(alpha_op)
1985 .src_alpha_blend_factor(alpha_src)
1986 .dst_alpha_blend_factor(alpha_dst);
1987 }
1988
1989 let vk_format = self.shared.private_caps.map_texture_format(cat.format);
1990 (
1991 Some(super::ColorAttachmentKey {
1992 base: super::AttachmentKey::compatible(
1993 vk_format,
1994 vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,
1995 ),
1996 resolve: None,
1997 }),
1998 vk_attachment,
1999 )
2000 } else {
2001 (None, vk::PipelineColorBlendAttachmentState::default())
2002 };
2003
2004 compatible_rp_key.colors.push(key);
2005 vk_attachments.push(attarchment);
2006 }
2007
2008 let vk_color_blend =
2009 vk::PipelineColorBlendStateCreateInfo::default().attachments(&vk_attachments);
2010
2011 let vk_dynamic_state =
2012 vk::PipelineDynamicStateCreateInfo::default().dynamic_states(&dynamic_states);
2013
2014 let raw_pass = self.shared.make_render_pass(compatible_rp_key)?;
2015
2016 let vk_infos = [{
2017 vk::GraphicsPipelineCreateInfo::default()
2018 .layout(desc.layout.raw)
2019 .stages(&stages)
2020 .vertex_input_state(&vk_vertex_input)
2021 .input_assembly_state(&vk_input_assembly)
2022 .rasterization_state(&vk_rasterization)
2023 .viewport_state(&vk_viewport)
2024 .multisample_state(&vk_multisample)
2025 .depth_stencil_state(&vk_depth_stencil)
2026 .color_blend_state(&vk_color_blend)
2027 .dynamic_state(&vk_dynamic_state)
2028 .render_pass(raw_pass)
2029 }];
2030
2031 let pipeline_cache = desc
2032 .cache
2033 .map(|it| it.raw)
2034 .unwrap_or(vk::PipelineCache::null());
2035
2036 let mut raw_vec = {
2037 profiling::scope!("vkCreateGraphicsPipelines");
2038 unsafe {
2039 self.shared
2040 .raw
2041 .create_graphics_pipelines(pipeline_cache, &vk_infos, None)
2042 .map_err(|(_, e)| super::map_pipeline_err(e))
2043 }?
2044 };
2045
2046 let raw = raw_vec.pop().unwrap();
2047 if let Some(label) = desc.label {
2048 unsafe { self.shared.set_object_name(raw, label) };
2049 }
2050
2051 if let Some(CompiledStage {
2052 temp_raw_module: Some(raw_module),
2053 ..
2054 }) = compiled_vs
2055 {
2056 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2057 }
2058 if let Some(CompiledStage {
2059 temp_raw_module: Some(raw_module),
2060 ..
2061 }) = compiled_ts
2062 {
2063 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2064 }
2065 if let Some(CompiledStage {
2066 temp_raw_module: Some(raw_module),
2067 ..
2068 }) = compiled_ms
2069 {
2070 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2071 }
2072 if let Some(CompiledStage {
2073 temp_raw_module: Some(raw_module),
2074 ..
2075 }) = compiled_fs
2076 {
2077 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2078 }
2079
2080 self.counters.render_pipelines.add(1);
2081
2082 Ok(super::RenderPipeline {
2083 raw,
2084 is_multiview: desc.multiview_mask.is_some(),
2085 })
2086 }
2087
2088 unsafe fn destroy_render_pipeline(&self, pipeline: super::RenderPipeline) {
2089 unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
2090
2091 self.counters.render_pipelines.sub(1);
2092 }
2093
2094 unsafe fn create_compute_pipeline(
2095 &self,
2096 desc: &crate::ComputePipelineDescriptor<
2097 super::PipelineLayout,
2098 super::ShaderModule,
2099 super::PipelineCache,
2100 >,
2101 ) -> Result<super::ComputePipeline, crate::PipelineError> {
2102 let compiled = self.compile_stage(
2103 &desc.stage,
2104 naga::ShaderStage::Compute,
2105 &desc.layout.binding_map,
2106 )?;
2107
2108 let vk_infos = [{
2109 vk::ComputePipelineCreateInfo::default()
2110 .layout(desc.layout.raw)
2111 .stage(compiled.create_info)
2112 }];
2113
2114 let pipeline_cache = desc
2115 .cache
2116 .map(|it| it.raw)
2117 .unwrap_or(vk::PipelineCache::null());
2118
2119 let mut raw_vec = {
2120 profiling::scope!("vkCreateComputePipelines");
2121 unsafe {
2122 self.shared
2123 .raw
2124 .create_compute_pipelines(pipeline_cache, &vk_infos, None)
2125 .map_err(|(_, e)| super::map_pipeline_err(e))
2126 }?
2127 };
2128
2129 let raw = raw_vec.pop().unwrap();
2130 if let Some(label) = desc.label {
2131 unsafe { self.shared.set_object_name(raw, label) };
2132 }
2133
2134 if let Some(raw_module) = compiled.temp_raw_module {
2135 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2136 }
2137
2138 self.counters.compute_pipelines.add(1);
2139
2140 Ok(super::ComputePipeline { raw })
2141 }
2142
2143 unsafe fn destroy_compute_pipeline(&self, pipeline: super::ComputePipeline) {
2144 unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
2145
2146 self.counters.compute_pipelines.sub(1);
2147 }
2148
2149 unsafe fn create_pipeline_cache(
2150 &self,
2151 desc: &crate::PipelineCacheDescriptor<'_>,
2152 ) -> Result<super::PipelineCache, crate::PipelineCacheError> {
2153 let mut info = vk::PipelineCacheCreateInfo::default();
2154 if let Some(data) = desc.data {
2155 info = info.initial_data(data)
2156 }
2157 profiling::scope!("vkCreatePipelineCache");
2158 let raw = unsafe { self.shared.raw.create_pipeline_cache(&info, None) }
2159 .map_err(super::map_host_device_oom_err)?;
2160
2161 Ok(super::PipelineCache { raw })
2162 }
2163 fn pipeline_cache_validation_key(&self) -> Option<[u8; 16]> {
2164 Some(self.shared.pipeline_cache_validation_key)
2165 }
2166 unsafe fn destroy_pipeline_cache(&self, cache: super::PipelineCache) {
2167 unsafe { self.shared.raw.destroy_pipeline_cache(cache.raw, None) }
2168 }
2169 unsafe fn create_query_set(
2170 &self,
2171 desc: &wgt::QuerySetDescriptor<crate::Label>,
2172 ) -> Result<super::QuerySet, crate::DeviceError> {
2173 self.error_if_would_oom_on_resource_allocation(true, desc.count as u64 * 256)?;
2176
2177 let (vk_type, pipeline_statistics) = match desc.ty {
2178 wgt::QueryType::Occlusion => (
2179 vk::QueryType::OCCLUSION,
2180 vk::QueryPipelineStatisticFlags::empty(),
2181 ),
2182 wgt::QueryType::PipelineStatistics(statistics) => (
2183 vk::QueryType::PIPELINE_STATISTICS,
2184 conv::map_pipeline_statistics(statistics),
2185 ),
2186 wgt::QueryType::Timestamp => (
2187 vk::QueryType::TIMESTAMP,
2188 vk::QueryPipelineStatisticFlags::empty(),
2189 ),
2190 };
2191
2192 let vk_info = vk::QueryPoolCreateInfo::default()
2193 .query_type(vk_type)
2194 .query_count(desc.count)
2195 .pipeline_statistics(pipeline_statistics);
2196
2197 let raw = unsafe { self.shared.raw.create_query_pool(&vk_info, None) }
2198 .map_err(super::map_host_device_oom_err)?;
2199 if let Some(label) = desc.label {
2200 unsafe { self.shared.set_object_name(raw, label) };
2201 }
2202
2203 self.counters.query_sets.add(1);
2204
2205 Ok(super::QuerySet { raw })
2206 }
2207
2208 unsafe fn destroy_query_set(&self, set: super::QuerySet) {
2209 unsafe { self.shared.raw.destroy_query_pool(set.raw, None) };
2210
2211 self.counters.query_sets.sub(1);
2212 }
2213
2214 unsafe fn create_fence(&self) -> Result<super::Fence, crate::DeviceError> {
2215 self.counters.fences.add(1);
2216
2217 Ok(if self.shared.private_caps.timeline_semaphores {
2218 let mut sem_type_info =
2219 vk::SemaphoreTypeCreateInfo::default().semaphore_type(vk::SemaphoreType::TIMELINE);
2220 let vk_info = vk::SemaphoreCreateInfo::default().push_next(&mut sem_type_info);
2221 let raw = unsafe { self.shared.raw.create_semaphore(&vk_info, None) }
2222 .map_err(super::map_host_device_oom_err)?;
2223
2224 super::Fence::TimelineSemaphore(raw)
2225 } else {
2226 super::Fence::FencePool {
2227 last_completed: 0,
2228 active: Vec::new(),
2229 free: Vec::new(),
2230 }
2231 })
2232 }
2233 unsafe fn destroy_fence(&self, fence: super::Fence) {
2234 match fence {
2235 super::Fence::TimelineSemaphore(raw) => {
2236 unsafe { self.shared.raw.destroy_semaphore(raw, None) };
2237 }
2238 super::Fence::FencePool {
2239 active,
2240 free,
2241 last_completed: _,
2242 } => {
2243 for (_, raw) in active {
2244 unsafe { self.shared.raw.destroy_fence(raw, None) };
2245 }
2246 for raw in free {
2247 unsafe { self.shared.raw.destroy_fence(raw, None) };
2248 }
2249 }
2250 }
2251
2252 self.counters.fences.sub(1);
2253 }
2254 unsafe fn get_fence_value(
2255 &self,
2256 fence: &super::Fence,
2257 ) -> Result<crate::FenceValue, crate::DeviceError> {
2258 fence.get_latest(
2259 &self.shared.raw,
2260 self.shared.extension_fns.timeline_semaphore.as_ref(),
2261 )
2262 }
2263 unsafe fn wait(
2264 &self,
2265 fence: &super::Fence,
2266 wait_value: crate::FenceValue,
2267 timeout: Option<Duration>,
2268 ) -> Result<bool, crate::DeviceError> {
2269 let timeout_ns = timeout
2270 .unwrap_or(Duration::MAX)
2271 .as_nanos()
2272 .min(u64::MAX as _) as u64;
2273 self.shared.wait_for_fence(fence, wait_value, timeout_ns)
2274 }
2275
2276 unsafe fn start_graphics_debugger_capture(&self) -> bool {
2277 #[cfg(feature = "renderdoc")]
2278 {
2279 let raw_vk_instance =
2281 vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2282 let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2283 unsafe {
2284 self.render_doc
2285 .start_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2286 }
2287 }
2288 #[cfg(not(feature = "renderdoc"))]
2289 false
2290 }
2291 unsafe fn stop_graphics_debugger_capture(&self) {
2292 #[cfg(feature = "renderdoc")]
2293 {
2294 let raw_vk_instance =
2296 vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2297 let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2298
2299 unsafe {
2300 self.render_doc
2301 .end_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2302 }
2303 }
2304 }
2305
2306 unsafe fn pipeline_cache_get_data(&self, cache: &super::PipelineCache) -> Option<Vec<u8>> {
2307 let data = unsafe { self.raw_device().get_pipeline_cache_data(cache.raw) };
2308 data.ok()
2309 }
2310
2311 unsafe fn get_acceleration_structure_build_sizes<'a>(
2312 &self,
2313 desc: &crate::GetAccelerationStructureBuildSizesDescriptor<'a, super::Buffer>,
2314 ) -> crate::AccelerationStructureBuildSizes {
2315 const CAPACITY: usize = 8;
2316
2317 let ray_tracing_functions = self
2318 .shared
2319 .extension_fns
2320 .ray_tracing
2321 .as_ref()
2322 .expect("Feature `RAY_TRACING` not enabled");
2323
2324 let (geometries, primitive_counts) = match *desc.entries {
2325 crate::AccelerationStructureEntries::Instances(ref instances) => {
2326 let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default();
2327
2328 let geometry = vk::AccelerationStructureGeometryKHR::default()
2329 .geometry_type(vk::GeometryTypeKHR::INSTANCES)
2330 .geometry(vk::AccelerationStructureGeometryDataKHR {
2331 instances: instance_data,
2332 });
2333
2334 (
2335 smallvec::smallvec![geometry],
2336 smallvec::smallvec![instances.count],
2337 )
2338 }
2339 crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
2340 let mut primitive_counts =
2341 smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2342 let mut geometries = smallvec::SmallVec::<
2343 [vk::AccelerationStructureGeometryKHR; CAPACITY],
2344 >::with_capacity(in_geometries.len());
2345
2346 for triangles in in_geometries {
2347 let mut triangle_data =
2348 vk::AccelerationStructureGeometryTrianglesDataKHR::default()
2349 .index_type(vk::IndexType::NONE_KHR)
2350 .vertex_format(conv::map_vertex_format(triangles.vertex_format))
2351 .max_vertex(triangles.vertex_count)
2352 .vertex_stride(triangles.vertex_stride)
2353 .transform_data(vk::DeviceOrHostAddressConstKHR {
2363 device_address: if desc
2364 .flags
2365 .contains(wgt::AccelerationStructureFlags::USE_TRANSFORM)
2366 {
2367 unsafe {
2368 ray_tracing_functions
2369 .buffer_device_address
2370 .get_buffer_device_address(
2371 &vk::BufferDeviceAddressInfo::default().buffer(
2372 triangles
2373 .transform
2374 .as_ref()
2375 .unwrap()
2376 .buffer
2377 .raw,
2378 ),
2379 )
2380 }
2381 } else {
2382 0
2383 },
2384 });
2385
2386 let pritive_count = if let Some(ref indices) = triangles.indices {
2387 triangle_data =
2388 triangle_data.index_type(conv::map_index_format(indices.format));
2389 indices.count / 3
2390 } else {
2391 triangles.vertex_count / 3
2392 };
2393
2394 let geometry = vk::AccelerationStructureGeometryKHR::default()
2395 .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
2396 .geometry(vk::AccelerationStructureGeometryDataKHR {
2397 triangles: triangle_data,
2398 })
2399 .flags(conv::map_acceleration_structure_geometry_flags(
2400 triangles.flags,
2401 ));
2402
2403 geometries.push(geometry);
2404 primitive_counts.push(pritive_count);
2405 }
2406 (geometries, primitive_counts)
2407 }
2408 crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
2409 let mut primitive_counts =
2410 smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2411 let mut geometries = smallvec::SmallVec::<
2412 [vk::AccelerationStructureGeometryKHR; CAPACITY],
2413 >::with_capacity(in_geometries.len());
2414 for aabb in in_geometries {
2415 let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
2416 .stride(aabb.stride);
2417
2418 let geometry = vk::AccelerationStructureGeometryKHR::default()
2419 .geometry_type(vk::GeometryTypeKHR::AABBS)
2420 .geometry(vk::AccelerationStructureGeometryDataKHR { aabbs: aabbs_data })
2421 .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
2422
2423 geometries.push(geometry);
2424 primitive_counts.push(aabb.count);
2425 }
2426 (geometries, primitive_counts)
2427 }
2428 };
2429
2430 let ty = match *desc.entries {
2431 crate::AccelerationStructureEntries::Instances(_) => {
2432 vk::AccelerationStructureTypeKHR::TOP_LEVEL
2433 }
2434 _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
2435 };
2436
2437 let geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
2438 .ty(ty)
2439 .flags(conv::map_acceleration_structure_flags(desc.flags))
2440 .geometries(&geometries);
2441
2442 let mut raw = Default::default();
2443 unsafe {
2444 ray_tracing_functions
2445 .acceleration_structure
2446 .get_acceleration_structure_build_sizes(
2447 vk::AccelerationStructureBuildTypeKHR::DEVICE,
2448 &geometry_info,
2449 &primitive_counts,
2450 &mut raw,
2451 )
2452 }
2453
2454 crate::AccelerationStructureBuildSizes {
2455 acceleration_structure_size: raw.acceleration_structure_size,
2456 update_scratch_size: raw.update_scratch_size,
2457 build_scratch_size: raw.build_scratch_size,
2458 }
2459 }
2460
2461 unsafe fn get_acceleration_structure_device_address(
2462 &self,
2463 acceleration_structure: &super::AccelerationStructure,
2464 ) -> wgt::BufferAddress {
2465 let ray_tracing_functions = self
2466 .shared
2467 .extension_fns
2468 .ray_tracing
2469 .as_ref()
2470 .expect("Feature `RAY_TRACING` not enabled");
2471
2472 unsafe {
2473 ray_tracing_functions
2474 .acceleration_structure
2475 .get_acceleration_structure_device_address(
2476 &vk::AccelerationStructureDeviceAddressInfoKHR::default()
2477 .acceleration_structure(acceleration_structure.raw),
2478 )
2479 }
2480 }
2481
2482 unsafe fn create_acceleration_structure(
2483 &self,
2484 desc: &crate::AccelerationStructureDescriptor,
2485 ) -> Result<super::AccelerationStructure, crate::DeviceError> {
2486 let ray_tracing_functions = self
2487 .shared
2488 .extension_fns
2489 .ray_tracing
2490 .as_ref()
2491 .expect("Feature `RAY_TRACING` not enabled");
2492
2493 let vk_buffer_info = vk::BufferCreateInfo::default()
2494 .size(desc.size)
2495 .usage(
2496 vk::BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR
2497 | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,
2498 )
2499 .sharing_mode(vk::SharingMode::EXCLUSIVE);
2500
2501 unsafe {
2502 let raw_buffer = self
2503 .shared
2504 .raw
2505 .create_buffer(&vk_buffer_info, None)
2506 .map_err(super::map_host_device_oom_and_ioca_err)?;
2507
2508 let requirements = self.shared.raw.get_buffer_memory_requirements(raw_buffer);
2509
2510 self.error_if_would_oom_on_resource_allocation(false, requirements.size)
2511 .inspect_err(|_| {
2512 self.shared.raw.destroy_buffer(raw_buffer, None);
2513 })?;
2514
2515 let name = desc
2516 .label
2517 .unwrap_or("Unlabeled acceleration structure buffer");
2518
2519 let allocation = self
2520 .mem_allocator
2521 .lock()
2522 .allocate(&gpu_allocator::vulkan::AllocationCreateDesc {
2523 name,
2524 requirements,
2525 location: gpu_allocator::MemoryLocation::GpuOnly,
2526 linear: true, allocation_scheme: gpu_allocator::vulkan::AllocationScheme::GpuAllocatorManaged,
2528 })
2529 .inspect_err(|_| {
2530 self.shared.raw.destroy_buffer(raw_buffer, None);
2531 })?;
2532
2533 self.shared
2534 .raw
2535 .bind_buffer_memory(raw_buffer, allocation.memory(), allocation.offset())
2536 .map_err(super::map_host_device_oom_and_ioca_err)
2537 .inspect_err(|_| {
2538 self.shared.raw.destroy_buffer(raw_buffer, None);
2539 })?;
2540
2541 if let Some(label) = desc.label {
2542 self.shared.set_object_name(raw_buffer, label);
2543 }
2544
2545 let vk_info = vk::AccelerationStructureCreateInfoKHR::default()
2546 .buffer(raw_buffer)
2547 .offset(0)
2548 .size(desc.size)
2549 .ty(conv::map_acceleration_structure_format(desc.format));
2550
2551 let raw_acceleration_structure = ray_tracing_functions
2552 .acceleration_structure
2553 .create_acceleration_structure(&vk_info, None)
2554 .map_err(super::map_host_oom_and_ioca_err)
2555 .inspect_err(|_| {
2556 self.shared.raw.destroy_buffer(raw_buffer, None);
2557 })?;
2558
2559 if let Some(label) = desc.label {
2560 self.shared
2561 .set_object_name(raw_acceleration_structure, label);
2562 }
2563
2564 let pool = if desc.allow_compaction {
2565 let vk_info = vk::QueryPoolCreateInfo::default()
2566 .query_type(vk::QueryType::ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR)
2567 .query_count(1);
2568
2569 let raw = self
2570 .shared
2571 .raw
2572 .create_query_pool(&vk_info, None)
2573 .map_err(super::map_host_device_oom_err)
2574 .inspect_err(|_| {
2575 ray_tracing_functions
2576 .acceleration_structure
2577 .destroy_acceleration_structure(raw_acceleration_structure, None);
2578 self.shared.raw.destroy_buffer(raw_buffer, None);
2579 })?;
2580 Some(raw)
2581 } else {
2582 None
2583 };
2584
2585 Ok(super::AccelerationStructure {
2586 raw: raw_acceleration_structure,
2587 buffer: raw_buffer,
2588 allocation,
2589 compacted_size_query: pool,
2590 })
2591 }
2592 }
2593
2594 unsafe fn destroy_acceleration_structure(
2595 &self,
2596 acceleration_structure: super::AccelerationStructure,
2597 ) {
2598 let ray_tracing_functions = self
2599 .shared
2600 .extension_fns
2601 .ray_tracing
2602 .as_ref()
2603 .expect("Feature `RAY_TRACING` not enabled");
2604
2605 unsafe {
2606 ray_tracing_functions
2607 .acceleration_structure
2608 .destroy_acceleration_structure(acceleration_structure.raw, None);
2609 self.shared
2610 .raw
2611 .destroy_buffer(acceleration_structure.buffer, None);
2612 let result = self
2613 .mem_allocator
2614 .lock()
2615 .free(acceleration_structure.allocation);
2616 if let Err(err) = result {
2617 log::warn!("Failed to free buffer acceleration structure: {err}");
2618 }
2619 if let Some(query) = acceleration_structure.compacted_size_query {
2620 self.shared.raw.destroy_query_pool(query, None)
2621 }
2622 }
2623 }
2624
2625 fn get_internal_counters(&self) -> wgt::HalCounters {
2626 self.counters
2627 .memory_allocations
2628 .set(self.shared.memory_allocations_counter.read());
2629
2630 self.counters.as_ref().clone()
2631 }
2632
2633 fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
2634 let gpu_allocator::AllocatorReport {
2635 allocations,
2636 blocks,
2637 total_allocated_bytes,
2638 total_capacity_bytes,
2639 } = self.mem_allocator.lock().generate_report();
2640
2641 let allocations = allocations
2642 .into_iter()
2643 .map(|alloc| wgt::AllocationReport {
2644 name: alloc.name,
2645 offset: alloc.offset,
2646 size: alloc.size,
2647 })
2648 .collect();
2649
2650 let blocks = blocks
2651 .into_iter()
2652 .map(|block| wgt::MemoryBlockReport {
2653 size: block.size,
2654 allocations: block.allocations.clone(),
2655 })
2656 .collect();
2657
2658 Some(wgt::AllocatorReport {
2659 allocations,
2660 blocks,
2661 total_allocated_bytes,
2662 total_reserved_bytes: total_capacity_bytes,
2663 })
2664 }
2665
2666 fn tlas_instance_to_bytes(&self, instance: TlasInstance) -> Vec<u8> {
2667 const MAX_U24: u32 = (1u32 << 24u32) - 1u32;
2668 let temp = RawTlasInstance {
2669 transform: instance.transform,
2670 custom_data_and_mask: (instance.custom_data & MAX_U24)
2671 | (u32::from(instance.mask) << 24),
2672 shader_binding_table_record_offset_and_flags: 0,
2673 acceleration_structure_reference: instance.blas_address,
2674 };
2675 bytemuck::bytes_of(&temp).to_vec()
2676 }
2677
2678 fn check_if_oom(&self) -> Result<(), crate::DeviceError> {
2679 let Some(threshold) = self
2680 .shared
2681 .instance
2682 .memory_budget_thresholds
2683 .for_device_loss
2684 else {
2685 return Ok(());
2686 };
2687
2688 if !self
2689 .shared
2690 .enabled_extensions
2691 .contains(&ext::memory_budget::NAME)
2692 {
2693 return Ok(());
2694 }
2695
2696 let get_physical_device_properties = self
2697 .shared
2698 .instance
2699 .get_physical_device_properties
2700 .as_ref()
2701 .unwrap();
2702
2703 let mut memory_budget_properties = vk::PhysicalDeviceMemoryBudgetPropertiesEXT::default();
2704
2705 let mut memory_properties =
2706 vk::PhysicalDeviceMemoryProperties2::default().push_next(&mut memory_budget_properties);
2707
2708 unsafe {
2709 get_physical_device_properties.get_physical_device_memory_properties2(
2710 self.shared.physical_device,
2711 &mut memory_properties,
2712 );
2713 }
2714
2715 let memory_properties = memory_properties.memory_properties;
2716
2717 for i in 0..memory_properties.memory_heap_count {
2718 let heap_usage = memory_budget_properties.heap_usage[i as usize];
2719 let heap_budget = memory_budget_properties.heap_budget[i as usize];
2720
2721 if heap_usage >= heap_budget / 100 * threshold as u64 {
2722 return Err(crate::DeviceError::OutOfMemory);
2723 }
2724 }
2725
2726 Ok(())
2727 }
2728}
2729
2730impl super::DeviceShared {
2731 pub(super) fn new_binary_semaphore(
2732 &self,
2733 name: &str,
2734 ) -> Result<vk::Semaphore, crate::DeviceError> {
2735 unsafe {
2736 let semaphore = self
2737 .raw
2738 .create_semaphore(&vk::SemaphoreCreateInfo::default(), None)
2739 .map_err(super::map_host_device_oom_err)?;
2740
2741 self.set_object_name(semaphore, name);
2742
2743 Ok(semaphore)
2744 }
2745 }
2746
2747 pub(super) fn wait_for_fence(
2748 &self,
2749 fence: &super::Fence,
2750 wait_value: crate::FenceValue,
2751 timeout_ns: u64,
2752 ) -> Result<bool, crate::DeviceError> {
2753 profiling::scope!("Device::wait");
2754 match *fence {
2755 super::Fence::TimelineSemaphore(raw) => {
2756 let semaphores = [raw];
2757 let values = [wait_value];
2758 let vk_info = vk::SemaphoreWaitInfo::default()
2759 .semaphores(&semaphores)
2760 .values(&values);
2761 let result = match self.extension_fns.timeline_semaphore {
2762 Some(super::ExtensionFn::Extension(ref ext)) => unsafe {
2763 ext.wait_semaphores(&vk_info, timeout_ns)
2764 },
2765 Some(super::ExtensionFn::Promoted) => unsafe {
2766 self.raw.wait_semaphores(&vk_info, timeout_ns)
2767 },
2768 None => unreachable!(),
2769 };
2770 match result {
2771 Ok(()) => Ok(true),
2772 Err(vk::Result::TIMEOUT) => Ok(false),
2773 Err(other) => Err(super::map_host_device_oom_and_lost_err(other)),
2774 }
2775 }
2776 super::Fence::FencePool {
2777 last_completed,
2778 ref active,
2779 free: _,
2780 } => {
2781 if wait_value <= last_completed {
2782 Ok(true)
2783 } else {
2784 match active.iter().find(|&&(value, _)| value >= wait_value) {
2785 Some(&(_, raw)) => {
2786 match unsafe { self.raw.wait_for_fences(&[raw], true, timeout_ns) } {
2787 Ok(()) => Ok(true),
2788 Err(vk::Result::TIMEOUT) => Ok(false),
2789 Err(other) => Err(super::map_host_device_oom_and_lost_err(other)),
2790 }
2791 }
2792 None => {
2793 crate::hal_usage_error(format!(
2794 "no signals reached value {wait_value}"
2795 ));
2796 }
2797 }
2798 }
2799 }
2800 }
2801 }
2802}
2803
2804impl From<gpu_descriptor::AllocationError> for crate::DeviceError {
2805 fn from(error: gpu_descriptor::AllocationError) -> Self {
2806 use gpu_descriptor::AllocationError as Ae;
2807 match error {
2808 Ae::OutOfDeviceMemory | Ae::OutOfHostMemory | Ae::Fragmentation => Self::OutOfMemory,
2809 }
2810 }
2811}
2812
2813fn handle_unexpected(err: vk::Result) -> ! {
2820 panic!("Unexpected Vulkan error: `{err}`")
2821}
2822
2823struct ImageWithoutMemory {
2824 raw: vk::Image,
2825 requirements: vk::MemoryRequirements,
2826}