1use alloc::{borrow::ToOwned as _, collections::BTreeMap, ffi::CString, sync::Arc, vec::Vec};
2use core::{
3 ffi::CStr,
4 mem::{self, MaybeUninit},
5 num::NonZeroU32,
6 ptr,
7 time::Duration,
8};
9
10use arrayvec::ArrayVec;
11use ash::{ext, vk};
12use hashbrown::hash_map::Entry;
13use parking_lot::Mutex;
14
15use super::{conv, RawTlasInstance};
16use crate::TlasInstance;
17
18impl super::DeviceShared {
19 pub(super) unsafe fn set_object_name(&self, object: impl vk::Handle, name: &str) {
37 let Some(extension) = self.extension_fns.debug_utils.as_ref() else {
38 return;
39 };
40
41 let mut buffer: [u8; 64] = [0u8; 64];
44 let buffer_vec: Vec<u8>;
45
46 let name_bytes = if name.len() < buffer.len() {
48 buffer[..name.len()].copy_from_slice(name.as_bytes());
50 buffer[name.len()] = 0;
52 &buffer[..name.len() + 1]
53 } else {
54 buffer_vec = name
57 .as_bytes()
58 .iter()
59 .cloned()
60 .chain(core::iter::once(0))
61 .collect();
62 &buffer_vec
63 };
64
65 let name = CStr::from_bytes_until_nul(name_bytes).expect("We have added a null byte");
66
67 let _result = unsafe {
68 extension.set_debug_utils_object_name(
69 &vk::DebugUtilsObjectNameInfoEXT::default()
70 .object_handle(object)
71 .object_name(name),
72 )
73 };
74 }
75
76 pub fn make_render_pass(
77 &self,
78 key: super::RenderPassKey,
79 ) -> Result<vk::RenderPass, crate::DeviceError> {
80 Ok(match self.render_passes.lock().entry(key) {
81 Entry::Occupied(e) => *e.get(),
82 Entry::Vacant(e) => {
83 let super::RenderPassKey {
84 ref colors,
85 ref depth_stencil,
86 sample_count,
87 multiview_mask,
88 } = *e.key();
89
90 let mut vk_attachments = Vec::new();
91 let mut color_refs = Vec::with_capacity(colors.len());
92 let mut resolve_refs = Vec::with_capacity(color_refs.capacity());
93 let mut ds_ref = None;
94 let samples = vk::SampleCountFlags::from_raw(sample_count);
95 let unused = vk::AttachmentReference {
96 attachment: vk::ATTACHMENT_UNUSED,
97 layout: vk::ImageLayout::UNDEFINED,
98 };
99 for cat in colors.iter() {
100 let (color_ref, resolve_ref) =
101 if let Some(super::ColorAttachmentKey { base, resolve }) = cat {
102 let super::AttachmentKey {
103 format,
104 layout,
105 ops,
106 } = *base;
107
108 let color_ref = vk::AttachmentReference {
109 attachment: vk_attachments.len() as u32,
110 layout,
111 };
112 vk_attachments.push({
113 let (load_op, store_op) = conv::map_attachment_ops(ops);
114 vk::AttachmentDescription::default()
115 .format(format)
116 .samples(samples)
117 .load_op(load_op)
118 .store_op(store_op)
119 .initial_layout(layout)
120 .final_layout(layout)
121 });
122 let resolve_ref = if let Some(rat) = resolve {
123 let super::AttachmentKey {
124 format,
125 layout,
126 ops,
127 } = *rat;
128
129 let (load_op, store_op) = conv::map_attachment_ops(ops);
130 let vk_attachment = vk::AttachmentDescription::default()
131 .format(format)
132 .samples(vk::SampleCountFlags::TYPE_1)
133 .load_op(load_op)
134 .store_op(store_op)
135 .initial_layout(layout)
136 .final_layout(layout);
137 vk_attachments.push(vk_attachment);
138
139 vk::AttachmentReference {
140 attachment: vk_attachments.len() as u32 - 1,
141 layout,
142 }
143 } else {
144 unused
145 };
146
147 (color_ref, resolve_ref)
148 } else {
149 (unused, unused)
150 };
151
152 color_refs.push(color_ref);
153 resolve_refs.push(resolve_ref);
154 }
155
156 if let Some(ds) = depth_stencil {
157 let super::DepthStencilAttachmentKey {
158 ref base,
159 stencil_ops,
160 } = *ds;
161
162 let super::AttachmentKey {
163 format,
164 layout,
165 ops,
166 } = *base;
167
168 ds_ref = Some(vk::AttachmentReference {
169 attachment: vk_attachments.len() as u32,
170 layout,
171 });
172 let (load_op, store_op) = conv::map_attachment_ops(ops);
173 let (stencil_load_op, stencil_store_op) = conv::map_attachment_ops(stencil_ops);
174 let vk_attachment = vk::AttachmentDescription::default()
175 .format(format)
176 .samples(samples)
177 .load_op(load_op)
178 .store_op(store_op)
179 .stencil_load_op(stencil_load_op)
180 .stencil_store_op(stencil_store_op)
181 .initial_layout(layout)
182 .final_layout(layout);
183 vk_attachments.push(vk_attachment);
184 }
185
186 let vk_subpasses = [{
187 let mut vk_subpass = vk::SubpassDescription::default()
188 .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS)
189 .color_attachments(&color_refs)
190 .resolve_attachments(&resolve_refs);
191
192 if self
193 .workarounds
194 .contains(super::Workarounds::EMPTY_RESOLVE_ATTACHMENT_LISTS)
195 && resolve_refs.is_empty()
196 {
197 vk_subpass.p_resolve_attachments = ptr::null();
198 }
199
200 if let Some(ref reference) = ds_ref {
201 vk_subpass = vk_subpass.depth_stencil_attachment(reference)
202 }
203 vk_subpass
204 }];
205
206 let mut vk_info = vk::RenderPassCreateInfo::default()
207 .attachments(&vk_attachments)
208 .subpasses(&vk_subpasses);
209
210 let mut multiview_info;
211 let mask;
212 if let Some(multiview_mask) = multiview_mask {
213 mask = [multiview_mask.get()];
214
215 multiview_info = vk::RenderPassMultiviewCreateInfoKHR::default()
217 .view_masks(&mask)
218 .correlation_masks(&mask);
219 vk_info = vk_info.push_next(&mut multiview_info);
220 }
221
222 let raw = unsafe {
223 self.raw
224 .create_render_pass(&vk_info, None)
225 .map_err(super::map_host_device_oom_err)?
226 };
227
228 *e.insert(raw)
229 }
230 })
231 }
232
233 fn make_memory_ranges<'a, I: 'a + Iterator<Item = crate::MemoryRange>>(
234 &self,
235 buffer: &'a super::Buffer,
236 ranges: I,
237 ) -> Option<impl 'a + Iterator<Item = vk::MappedMemoryRange<'a>>> {
238 let block = buffer.block.as_ref()?.lock();
239 let mask = self.private_caps.non_coherent_map_mask;
240 Some(ranges.map(move |range| {
241 vk::MappedMemoryRange::default()
242 .memory(*block.memory())
243 .offset((block.offset() + range.start) & !mask)
244 .size((range.end - range.start + mask) & !mask)
245 }))
246 }
247}
248
249impl gpu_alloc::MemoryDevice<vk::DeviceMemory> for super::DeviceShared {
250 unsafe fn allocate_memory(
251 &self,
252 size: u64,
253 memory_type: u32,
254 flags: gpu_alloc::AllocationFlags,
255 ) -> Result<vk::DeviceMemory, gpu_alloc::OutOfMemory> {
256 let mut info = vk::MemoryAllocateInfo::default()
257 .allocation_size(size)
258 .memory_type_index(memory_type);
259
260 let mut info_flags;
261
262 if flags.contains(gpu_alloc::AllocationFlags::DEVICE_ADDRESS) {
263 info_flags = vk::MemoryAllocateFlagsInfo::default()
264 .flags(vk::MemoryAllocateFlags::DEVICE_ADDRESS);
265 info = info.push_next(&mut info_flags);
266 }
267
268 match unsafe { self.raw.allocate_memory(&info, None) } {
269 Ok(memory) => {
270 self.memory_allocations_counter.add(1);
271 Ok(memory)
272 }
273 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
274 Err(gpu_alloc::OutOfMemory::OutOfDeviceMemory)
275 }
276 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
277 Err(gpu_alloc::OutOfMemory::OutOfHostMemory)
278 }
279 Err(err) => handle_unexpected(err),
284 }
285 }
286
287 unsafe fn deallocate_memory(&self, memory: vk::DeviceMemory) {
288 self.memory_allocations_counter.sub(1);
289
290 unsafe { self.raw.free_memory(memory, None) };
291 }
292
293 unsafe fn map_memory(
294 &self,
295 memory: &mut vk::DeviceMemory,
296 offset: u64,
297 size: u64,
298 ) -> Result<ptr::NonNull<u8>, gpu_alloc::DeviceMapError> {
299 match unsafe {
300 self.raw
301 .map_memory(*memory, offset, size, vk::MemoryMapFlags::empty())
302 } {
303 Ok(ptr) => Ok(ptr::NonNull::new(ptr.cast::<u8>())
304 .expect("Pointer to memory mapping must not be null")),
305 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
306 Err(gpu_alloc::DeviceMapError::OutOfDeviceMemory)
307 }
308 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
309 Err(gpu_alloc::DeviceMapError::OutOfHostMemory)
310 }
311 Err(vk::Result::ERROR_MEMORY_MAP_FAILED) => Err(gpu_alloc::DeviceMapError::MapFailed),
312 Err(err) => handle_unexpected(err),
313 }
314 }
315
316 unsafe fn unmap_memory(&self, memory: &mut vk::DeviceMemory) {
317 unsafe { self.raw.unmap_memory(*memory) };
318 }
319
320 unsafe fn invalidate_memory_ranges(
321 &self,
322 _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
323 ) -> Result<(), gpu_alloc::OutOfMemory> {
324 unimplemented!()
326 }
327
328 unsafe fn flush_memory_ranges(
329 &self,
330 _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
331 ) -> Result<(), gpu_alloc::OutOfMemory> {
332 unimplemented!()
334 }
335}
336
337impl
338 gpu_descriptor::DescriptorDevice<vk::DescriptorSetLayout, vk::DescriptorPool, vk::DescriptorSet>
339 for super::DeviceShared
340{
341 unsafe fn create_descriptor_pool(
342 &self,
343 descriptor_count: &gpu_descriptor::DescriptorTotalCount,
344 max_sets: u32,
345 flags: gpu_descriptor::DescriptorPoolCreateFlags,
346 ) -> Result<vk::DescriptorPool, gpu_descriptor::CreatePoolError> {
347 let unfiltered_counts = [
349 (vk::DescriptorType::SAMPLER, descriptor_count.sampler),
350 (
351 vk::DescriptorType::SAMPLED_IMAGE,
352 descriptor_count.sampled_image,
353 ),
354 (
355 vk::DescriptorType::STORAGE_IMAGE,
356 descriptor_count.storage_image,
357 ),
358 (
359 vk::DescriptorType::UNIFORM_BUFFER,
360 descriptor_count.uniform_buffer,
361 ),
362 (
363 vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC,
364 descriptor_count.uniform_buffer_dynamic,
365 ),
366 (
367 vk::DescriptorType::STORAGE_BUFFER,
368 descriptor_count.storage_buffer,
369 ),
370 (
371 vk::DescriptorType::STORAGE_BUFFER_DYNAMIC,
372 descriptor_count.storage_buffer_dynamic,
373 ),
374 (
375 vk::DescriptorType::ACCELERATION_STRUCTURE_KHR,
376 descriptor_count.acceleration_structure,
377 ),
378 ];
379
380 let filtered_counts = unfiltered_counts
381 .iter()
382 .cloned()
383 .filter(|&(_, count)| count != 0)
384 .map(|(ty, count)| vk::DescriptorPoolSize {
385 ty,
386 descriptor_count: count,
387 })
388 .collect::<ArrayVec<_, 8>>();
389
390 let mut vk_flags =
391 if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND) {
392 vk::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND
393 } else {
394 vk::DescriptorPoolCreateFlags::empty()
395 };
396 if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET) {
397 vk_flags |= vk::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET;
398 }
399 let vk_info = vk::DescriptorPoolCreateInfo::default()
400 .max_sets(max_sets)
401 .flags(vk_flags)
402 .pool_sizes(&filtered_counts);
403
404 match unsafe { self.raw.create_descriptor_pool(&vk_info, None) } {
405 Ok(pool) => Ok(pool),
406 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
407 Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
408 }
409 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
410 Err(gpu_descriptor::CreatePoolError::OutOfDeviceMemory)
411 }
412 Err(vk::Result::ERROR_FRAGMENTATION) => {
413 Err(gpu_descriptor::CreatePoolError::Fragmentation)
414 }
415 Err(err) => handle_unexpected(err),
416 }
417 }
418
419 unsafe fn destroy_descriptor_pool(&self, pool: vk::DescriptorPool) {
420 unsafe { self.raw.destroy_descriptor_pool(pool, None) }
421 }
422
423 unsafe fn alloc_descriptor_sets<'a>(
424 &self,
425 pool: &mut vk::DescriptorPool,
426 layouts: impl ExactSizeIterator<Item = &'a vk::DescriptorSetLayout>,
427 sets: &mut impl Extend<vk::DescriptorSet>,
428 ) -> Result<(), gpu_descriptor::DeviceAllocationError> {
429 let result = unsafe {
430 self.raw.allocate_descriptor_sets(
431 &vk::DescriptorSetAllocateInfo::default()
432 .descriptor_pool(*pool)
433 .set_layouts(
434 &smallvec::SmallVec::<[vk::DescriptorSetLayout; 32]>::from_iter(
435 layouts.cloned(),
436 ),
437 ),
438 )
439 };
440
441 match result {
442 Ok(vk_sets) => {
443 sets.extend(vk_sets);
444 Ok(())
445 }
446 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY)
447 | Err(vk::Result::ERROR_OUT_OF_POOL_MEMORY) => {
448 Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
449 }
450 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
451 Err(gpu_descriptor::DeviceAllocationError::OutOfDeviceMemory)
452 }
453 Err(vk::Result::ERROR_FRAGMENTED_POOL) => {
454 Err(gpu_descriptor::DeviceAllocationError::FragmentedPool)
455 }
456 Err(err) => handle_unexpected(err),
457 }
458 }
459
460 unsafe fn dealloc_descriptor_sets<'a>(
461 &self,
462 pool: &mut vk::DescriptorPool,
463 sets: impl Iterator<Item = vk::DescriptorSet>,
464 ) {
465 let result = unsafe {
466 self.raw.free_descriptor_sets(
467 *pool,
468 &smallvec::SmallVec::<[vk::DescriptorSet; 32]>::from_iter(sets),
469 )
470 };
471 match result {
472 Ok(()) => {}
473 Err(err) => handle_unexpected(err),
474 }
475 }
476}
477
478struct CompiledStage {
479 create_info: vk::PipelineShaderStageCreateInfo<'static>,
480 _entry_point: CString,
481 temp_raw_module: Option<vk::ShaderModule>,
482}
483
484impl super::Device {
485 pub unsafe fn texture_from_raw(
492 &self,
493 vk_image: vk::Image,
494 desc: &crate::TextureDescriptor,
495 drop_callback: Option<crate::DropCallback>,
496 ) -> super::Texture {
497 let mut raw_flags = vk::ImageCreateFlags::empty();
498 let mut view_formats = vec![];
499 for tf in desc.view_formats.iter() {
500 if *tf == desc.format {
501 continue;
502 }
503 view_formats.push(*tf);
504 }
505 if !view_formats.is_empty() {
506 raw_flags |=
507 vk::ImageCreateFlags::MUTABLE_FORMAT | vk::ImageCreateFlags::EXTENDED_USAGE;
508 view_formats.push(desc.format)
509 }
510 if desc.format.is_multi_planar_format() {
511 raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
512 }
513
514 let identity = self.shared.texture_identity_factory.next();
515
516 let drop_guard = crate::DropGuard::from_option(drop_callback);
517
518 super::Texture {
519 raw: vk_image,
520 drop_guard,
521 external_memory: None,
522 block: None,
523 format: desc.format,
524 copy_size: desc.copy_extent(),
525 identity,
526 }
527 }
528
529 fn find_memory_type_index(
530 &self,
531 type_bits_req: u32,
532 flags_req: vk::MemoryPropertyFlags,
533 ) -> Option<usize> {
534 let mem_properties = unsafe {
535 self.shared
536 .instance
537 .raw
538 .get_physical_device_memory_properties(self.shared.physical_device)
539 };
540
541 for (i, mem_ty) in mem_properties.memory_types_as_slice().iter().enumerate() {
543 let types_bits = 1 << i;
544 let is_required_memory_type = type_bits_req & types_bits != 0;
545 let has_required_properties = mem_ty.property_flags & flags_req == flags_req;
546 if is_required_memory_type && has_required_properties {
547 return Some(i);
548 }
549 }
550
551 None
552 }
553
554 fn create_image_without_memory(
555 &self,
556 desc: &crate::TextureDescriptor,
557 external_memory_image_create_info: Option<&mut vk::ExternalMemoryImageCreateInfo>,
558 ) -> Result<ImageWithoutMemory, crate::DeviceError> {
559 let copy_size = desc.copy_extent();
560
561 let mut raw_flags = vk::ImageCreateFlags::empty();
562 if desc.dimension == wgt::TextureDimension::D3
563 && desc.usage.contains(wgt::TextureUses::COLOR_TARGET)
564 {
565 raw_flags |= vk::ImageCreateFlags::TYPE_2D_ARRAY_COMPATIBLE;
566 }
567 if desc.is_cube_compatible() {
568 raw_flags |= vk::ImageCreateFlags::CUBE_COMPATIBLE;
569 }
570
571 let original_format = self.shared.private_caps.map_texture_format(desc.format);
572 let mut vk_view_formats = vec![];
573 if !desc.view_formats.is_empty() {
574 raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
575
576 if self.shared.private_caps.image_format_list {
577 vk_view_formats = desc
578 .view_formats
579 .iter()
580 .map(|f| self.shared.private_caps.map_texture_format(*f))
581 .collect();
582 vk_view_formats.push(original_format)
583 }
584 }
585 if desc.format.is_multi_planar_format() {
586 raw_flags |=
587 vk::ImageCreateFlags::MUTABLE_FORMAT | vk::ImageCreateFlags::EXTENDED_USAGE;
588 }
589
590 let mut vk_info = vk::ImageCreateInfo::default()
591 .flags(raw_flags)
592 .image_type(conv::map_texture_dimension(desc.dimension))
593 .format(original_format)
594 .extent(conv::map_copy_extent(©_size))
595 .mip_levels(desc.mip_level_count)
596 .array_layers(desc.array_layer_count())
597 .samples(vk::SampleCountFlags::from_raw(desc.sample_count))
598 .tiling(vk::ImageTiling::OPTIMAL)
599 .usage(conv::map_texture_usage(desc.usage))
600 .sharing_mode(vk::SharingMode::EXCLUSIVE)
601 .initial_layout(vk::ImageLayout::UNDEFINED);
602
603 let mut format_list_info = vk::ImageFormatListCreateInfo::default();
604 if !vk_view_formats.is_empty() {
605 format_list_info = format_list_info.view_formats(&vk_view_formats);
606 vk_info = vk_info.push_next(&mut format_list_info);
607 }
608
609 if let Some(ext_info) = external_memory_image_create_info {
610 vk_info = vk_info.push_next(ext_info);
611 }
612
613 let raw = unsafe { self.shared.raw.create_image(&vk_info, None) }.map_err(map_err)?;
614 fn map_err(err: vk::Result) -> crate::DeviceError {
615 super::map_host_device_oom_and_ioca_err(err)
618 }
619 let mut req = unsafe { self.shared.raw.get_image_memory_requirements(raw) };
620
621 if desc.usage.contains(wgt::TextureUses::TRANSIENT) {
622 let mem_type_index = self.find_memory_type_index(
623 req.memory_type_bits,
624 vk::MemoryPropertyFlags::LAZILY_ALLOCATED,
625 );
626 if let Some(mem_type_index) = mem_type_index {
627 req.memory_type_bits = 1 << mem_type_index;
628 }
629 }
630
631 Ok(ImageWithoutMemory {
632 raw,
633 requirements: req,
634 copy_size,
635 })
636 }
637
638 #[cfg(windows)]
644 pub unsafe fn texture_from_d3d11_shared_handle(
645 &self,
646 d3d11_shared_handle: windows::Win32::Foundation::HANDLE,
647 desc: &crate::TextureDescriptor,
648 ) -> Result<super::Texture, crate::DeviceError> {
649 if !self
650 .shared
651 .features
652 .contains(wgt::Features::VULKAN_EXTERNAL_MEMORY_WIN32)
653 {
654 log::error!("Vulkan driver does not support VK_KHR_external_memory_win32");
655 return Err(crate::DeviceError::Unexpected);
656 }
657
658 let mut external_memory_image_info = vk::ExternalMemoryImageCreateInfo::default()
659 .handle_types(vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE);
660
661 let image =
662 self.create_image_without_memory(desc, Some(&mut external_memory_image_info))?;
663
664 let mut dedicated_allocate_info =
667 vk::MemoryDedicatedAllocateInfo::default().image(image.raw);
668
669 let mut import_memory_info = vk::ImportMemoryWin32HandleInfoKHR::default()
670 .handle_type(vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE)
671 .handle(d3d11_shared_handle.0 as _);
672 #[allow(clippy::unnecessary_mut_passed)]
674 {
675 import_memory_info.p_next = <*const _>::cast(&mut dedicated_allocate_info);
676 }
677
678 let mem_type_index = self
679 .find_memory_type_index(
680 image.requirements.memory_type_bits,
681 vk::MemoryPropertyFlags::DEVICE_LOCAL,
682 )
683 .ok_or(crate::DeviceError::Unexpected)?;
684
685 let memory_allocate_info = vk::MemoryAllocateInfo::default()
686 .allocation_size(image.requirements.size)
687 .memory_type_index(mem_type_index as _)
688 .push_next(&mut import_memory_info);
689 let memory = unsafe { self.shared.raw.allocate_memory(&memory_allocate_info, None) }
690 .map_err(super::map_host_device_oom_err)?;
691
692 unsafe { self.shared.raw.bind_image_memory(image.raw, memory, 0) }
693 .map_err(super::map_host_device_oom_err)?;
694
695 if let Some(label) = desc.label {
696 unsafe { self.shared.set_object_name(image.raw, label) };
697 }
698
699 let identity = self.shared.texture_identity_factory.next();
700
701 self.counters.textures.add(1);
702
703 Ok(super::Texture {
704 raw: image.raw,
705 drop_guard: None,
706 external_memory: Some(memory),
707 block: None,
708 format: desc.format,
709 copy_size: image.copy_size,
710 identity,
711 })
712 }
713
714 fn create_shader_module_impl(
715 &self,
716 spv: &[u32],
717 label: &crate::Label<'_>,
718 ) -> Result<vk::ShaderModule, crate::DeviceError> {
719 let vk_info = vk::ShaderModuleCreateInfo::default()
720 .flags(vk::ShaderModuleCreateFlags::empty())
721 .code(spv);
722
723 let raw = unsafe {
724 profiling::scope!("vkCreateShaderModule");
725 self.shared
726 .raw
727 .create_shader_module(&vk_info, None)
728 .map_err(map_err)?
729 };
730 fn map_err(err: vk::Result) -> crate::DeviceError {
731 super::map_host_device_oom_err(err)
734 }
735
736 if let Some(label) = label {
737 unsafe { self.shared.set_object_name(raw, label) };
738 }
739
740 Ok(raw)
741 }
742
743 fn compile_stage(
744 &self,
745 stage: &crate::ProgrammableStage<super::ShaderModule>,
746 naga_stage: naga::ShaderStage,
747 binding_map: &naga::back::spv::BindingMap,
748 ) -> Result<CompiledStage, crate::PipelineError> {
749 let stage_flags = crate::auxil::map_naga_stage(naga_stage);
750 let vk_module = match *stage.module {
751 super::ShaderModule::Raw(raw) => raw,
752 super::ShaderModule::Intermediate {
753 ref naga_shader,
754 runtime_checks,
755 } => {
756 let pipeline_options = naga::back::spv::PipelineOptions {
757 entry_point: stage.entry_point.to_owned(),
758 shader_stage: naga_stage,
759 };
760 let needs_temp_options = !runtime_checks.bounds_checks
761 || !runtime_checks.force_loop_bounding
762 || !binding_map.is_empty()
763 || naga_shader.debug_source.is_some()
764 || !stage.zero_initialize_workgroup_memory;
765 let mut temp_options;
766 let options = if needs_temp_options {
767 temp_options = self.naga_options.clone();
768 if !runtime_checks.bounds_checks {
769 temp_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
770 index: naga::proc::BoundsCheckPolicy::Unchecked,
771 buffer: naga::proc::BoundsCheckPolicy::Unchecked,
772 image_load: naga::proc::BoundsCheckPolicy::Unchecked,
773 binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
774 };
775 }
776 if !runtime_checks.force_loop_bounding {
777 temp_options.force_loop_bounding = false;
778 }
779 if !binding_map.is_empty() {
780 temp_options.binding_map = binding_map.clone();
781 }
782
783 if let Some(ref debug) = naga_shader.debug_source {
784 temp_options.debug_info = Some(naga::back::spv::DebugInfo {
785 source_code: &debug.source_code,
786 file_name: debug.file_name.as_ref(),
787 language: naga::back::spv::SourceLanguage::WGSL,
788 })
789 }
790 if !stage.zero_initialize_workgroup_memory {
791 temp_options.zero_initialize_workgroup_memory =
792 naga::back::spv::ZeroInitializeWorkgroupMemoryMode::None;
793 }
794
795 &temp_options
796 } else {
797 &self.naga_options
798 };
799
800 let (module, info) = naga::back::pipeline_constants::process_overrides(
801 &naga_shader.module,
802 &naga_shader.info,
803 Some((naga_stage, stage.entry_point)),
804 stage.constants,
805 )
806 .map_err(|e| {
807 crate::PipelineError::PipelineConstants(stage_flags, format!("{e}"))
808 })?;
809
810 let spv = {
811 profiling::scope!("naga::spv::write_vec");
812 naga::back::spv::write_vec(&module, &info, options, Some(&pipeline_options))
813 }
814 .map_err(|e| crate::PipelineError::Linkage(stage_flags, format!("{e}")))?;
815 self.create_shader_module_impl(&spv, &None)?
816 }
817 };
818
819 let mut flags = vk::PipelineShaderStageCreateFlags::empty();
820 if self.shared.features.contains(wgt::Features::SUBGROUP) {
821 flags |= vk::PipelineShaderStageCreateFlags::ALLOW_VARYING_SUBGROUP_SIZE
822 }
823
824 let entry_point = CString::new(stage.entry_point).unwrap();
825 let mut create_info = vk::PipelineShaderStageCreateInfo::default()
826 .flags(flags)
827 .stage(conv::map_shader_stage(stage_flags))
828 .module(vk_module);
829
830 create_info.p_name = entry_point.as_ptr();
832
833 Ok(CompiledStage {
834 create_info,
835 _entry_point: entry_point,
836 temp_raw_module: match *stage.module {
837 super::ShaderModule::Raw(_) => None,
838 super::ShaderModule::Intermediate { .. } => Some(vk_module),
839 },
840 })
841 }
842
843 pub fn queue_family_index(&self) -> u32 {
849 self.shared.family_index
850 }
851
852 pub fn queue_index(&self) -> u32 {
853 self.shared.queue_index
854 }
855
856 pub fn raw_device(&self) -> &ash::Device {
857 &self.shared.raw
858 }
859
860 pub fn raw_physical_device(&self) -> vk::PhysicalDevice {
861 self.shared.physical_device
862 }
863
864 pub fn raw_queue(&self) -> vk::Queue {
865 self.shared.raw_queue
866 }
867
868 pub fn enabled_device_extensions(&self) -> &[&'static CStr] {
869 &self.shared.enabled_extensions
870 }
871
872 pub fn shared_instance(&self) -> &super::InstanceShared {
873 &self.shared.instance
874 }
875
876 fn error_if_would_oom_on_resource_allocation(
877 &self,
878 needs_host_access: bool,
879 size: u64,
880 ) -> Result<(), crate::DeviceError> {
881 let Some(threshold) = self
882 .shared
883 .instance
884 .memory_budget_thresholds
885 .for_resource_creation
886 else {
887 return Ok(());
888 };
889
890 if !self
891 .shared
892 .enabled_extensions
893 .contains(&ext::memory_budget::NAME)
894 {
895 return Ok(());
896 }
897
898 let get_physical_device_properties = self
899 .shared
900 .instance
901 .get_physical_device_properties
902 .as_ref()
903 .unwrap();
904
905 let mut memory_budget_properties = vk::PhysicalDeviceMemoryBudgetPropertiesEXT::default();
906
907 let mut memory_properties =
908 vk::PhysicalDeviceMemoryProperties2::default().push_next(&mut memory_budget_properties);
909
910 unsafe {
911 get_physical_device_properties.get_physical_device_memory_properties2(
912 self.shared.physical_device,
913 &mut memory_properties,
914 );
915 }
916
917 let mut host_visible_heaps = [false; vk::MAX_MEMORY_HEAPS];
918 let mut device_local_heaps = [false; vk::MAX_MEMORY_HEAPS];
919
920 let memory_properties = memory_properties.memory_properties;
921
922 for i in 0..memory_properties.memory_type_count {
923 let memory_type = memory_properties.memory_types[i as usize];
924 let flags = memory_type.property_flags;
925
926 if flags.intersects(
927 vk::MemoryPropertyFlags::LAZILY_ALLOCATED | vk::MemoryPropertyFlags::PROTECTED,
928 ) {
929 continue; }
931
932 if flags.contains(vk::MemoryPropertyFlags::HOST_VISIBLE) {
933 host_visible_heaps[memory_type.heap_index as usize] = true;
934 }
935
936 if flags.contains(vk::MemoryPropertyFlags::DEVICE_LOCAL) {
937 device_local_heaps[memory_type.heap_index as usize] = true;
938 }
939 }
940
941 let heaps = if needs_host_access {
942 host_visible_heaps
943 } else {
944 device_local_heaps
945 };
946
947 for (i, check) in heaps.iter().enumerate() {
952 if !check {
953 continue;
954 }
955
956 let heap_usage = memory_budget_properties.heap_usage[i];
957 let heap_budget = memory_budget_properties.heap_budget[i];
958
959 if heap_usage + size >= heap_budget / 100 * threshold as u64 {
960 return Err(crate::DeviceError::OutOfMemory);
961 }
962 }
963
964 Ok(())
965 }
966}
967
968impl crate::Device for super::Device {
969 type A = super::Api;
970
971 unsafe fn create_buffer(
972 &self,
973 desc: &crate::BufferDescriptor,
974 ) -> Result<super::Buffer, crate::DeviceError> {
975 let vk_info = vk::BufferCreateInfo::default()
976 .size(desc.size)
977 .usage(conv::map_buffer_usage(desc.usage))
978 .sharing_mode(vk::SharingMode::EXCLUSIVE);
979
980 let raw = unsafe {
981 self.shared
982 .raw
983 .create_buffer(&vk_info, None)
984 .map_err(super::map_host_device_oom_and_ioca_err)?
985 };
986 let req = unsafe { self.shared.raw.get_buffer_memory_requirements(raw) };
987
988 let mut alloc_usage = if desc
989 .usage
990 .intersects(wgt::BufferUses::MAP_READ | wgt::BufferUses::MAP_WRITE)
991 {
992 let mut flags = gpu_alloc::UsageFlags::HOST_ACCESS;
993 flags.set(
995 gpu_alloc::UsageFlags::DOWNLOAD,
996 desc.usage.contains(wgt::BufferUses::MAP_READ),
997 );
998 flags.set(
999 gpu_alloc::UsageFlags::UPLOAD,
1000 desc.usage.contains(wgt::BufferUses::MAP_WRITE),
1001 );
1002 flags
1003 } else {
1004 gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS
1005 };
1006 alloc_usage.set(
1007 gpu_alloc::UsageFlags::TRANSIENT,
1008 desc.memory_flags.contains(crate::MemoryFlags::TRANSIENT),
1009 );
1010
1011 let needs_host_access = alloc_usage.contains(gpu_alloc::UsageFlags::HOST_ACCESS);
1012
1013 self.error_if_would_oom_on_resource_allocation(needs_host_access, req.size)
1014 .inspect_err(|_| {
1015 unsafe { self.shared.raw.destroy_buffer(raw, None) };
1016 })?;
1017
1018 let alignment_mask = req.alignment - 1;
1019
1020 let block = unsafe {
1021 self.mem_allocator.lock().alloc(
1022 &*self.shared,
1023 gpu_alloc::Request {
1024 size: req.size,
1025 align_mask: alignment_mask,
1026 usage: alloc_usage,
1027 memory_types: req.memory_type_bits & self.valid_ash_memory_types,
1028 },
1029 )
1030 }
1031 .inspect_err(|_| {
1032 unsafe { self.shared.raw.destroy_buffer(raw, None) };
1033 })?;
1034
1035 unsafe {
1036 self.shared
1037 .raw
1038 .bind_buffer_memory(raw, *block.memory(), block.offset())
1039 }
1040 .map_err(super::map_host_device_oom_and_ioca_err)
1041 .inspect_err(|_| {
1042 unsafe { self.shared.raw.destroy_buffer(raw, None) };
1043 })?;
1044
1045 if let Some(label) = desc.label {
1046 unsafe { self.shared.set_object_name(raw, label) };
1047 }
1048
1049 self.counters.buffer_memory.add(block.size() as isize);
1050 self.counters.buffers.add(1);
1051
1052 Ok(super::Buffer {
1053 raw,
1054 block: Some(Mutex::new(super::BufferMemoryBacking::Managed(block))),
1055 })
1056 }
1057 unsafe fn destroy_buffer(&self, buffer: super::Buffer) {
1058 unsafe { self.shared.raw.destroy_buffer(buffer.raw, None) };
1059 if let Some(block) = buffer.block {
1060 let block = block.into_inner();
1061 self.counters.buffer_memory.sub(block.size() as isize);
1062 match block {
1063 super::BufferMemoryBacking::Managed(block) => unsafe {
1064 self.mem_allocator.lock().dealloc(&*self.shared, block)
1065 },
1066 super::BufferMemoryBacking::VulkanMemory { memory, .. } => unsafe {
1067 self.shared.raw.free_memory(memory, None);
1068 },
1069 }
1070 }
1071
1072 self.counters.buffers.sub(1);
1073 }
1074
1075 unsafe fn add_raw_buffer(&self, _buffer: &super::Buffer) {
1076 self.counters.buffers.add(1);
1077 }
1078
1079 unsafe fn map_buffer(
1080 &self,
1081 buffer: &super::Buffer,
1082 range: crate::MemoryRange,
1083 ) -> Result<crate::BufferMapping, crate::DeviceError> {
1084 if let Some(ref block) = buffer.block {
1085 let size = range.end - range.start;
1086 let mut block = block.lock();
1087 if let super::BufferMemoryBacking::Managed(ref mut block) = *block {
1088 let ptr = unsafe { block.map(&*self.shared, range.start, size as usize)? };
1089 let is_coherent = block
1090 .props()
1091 .contains(gpu_alloc::MemoryPropertyFlags::HOST_COHERENT);
1092 Ok(crate::BufferMapping { ptr, is_coherent })
1093 } else {
1094 crate::hal_usage_error("tried to map externally created buffer")
1095 }
1096 } else {
1097 crate::hal_usage_error("tried to map external buffer")
1098 }
1099 }
1100 unsafe fn unmap_buffer(&self, buffer: &super::Buffer) {
1101 if let Some(ref block) = buffer.block {
1102 match &mut *block.lock() {
1103 super::BufferMemoryBacking::Managed(block) => unsafe { block.unmap(&*self.shared) },
1104 super::BufferMemoryBacking::VulkanMemory { .. } => {
1105 crate::hal_usage_error("tried to unmap externally created buffer")
1106 }
1107 };
1108 } else {
1109 crate::hal_usage_error("tried to unmap external buffer")
1110 }
1111 }
1112
1113 unsafe fn flush_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
1114 where
1115 I: Iterator<Item = crate::MemoryRange>,
1116 {
1117 if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
1118 unsafe {
1119 self.shared
1120 .raw
1121 .flush_mapped_memory_ranges(
1122 &smallvec::SmallVec::<[vk::MappedMemoryRange; 32]>::from_iter(vk_ranges),
1123 )
1124 }
1125 .unwrap();
1126 }
1127 }
1128 unsafe fn invalidate_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
1129 where
1130 I: Iterator<Item = crate::MemoryRange>,
1131 {
1132 if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
1133 unsafe {
1134 self.shared
1135 .raw
1136 .invalidate_mapped_memory_ranges(&smallvec::SmallVec::<
1137 [vk::MappedMemoryRange; 32],
1138 >::from_iter(vk_ranges))
1139 }
1140 .unwrap();
1141 }
1142 }
1143
1144 unsafe fn create_texture(
1145 &self,
1146 desc: &crate::TextureDescriptor,
1147 ) -> Result<super::Texture, crate::DeviceError> {
1148 let image = self.create_image_without_memory(desc, None)?;
1149
1150 self.error_if_would_oom_on_resource_allocation(false, image.requirements.size)
1151 .inspect_err(|_| {
1152 unsafe { self.shared.raw.destroy_image(image.raw, None) };
1153 })?;
1154
1155 let block = unsafe {
1156 self.mem_allocator.lock().alloc(
1157 &*self.shared,
1158 gpu_alloc::Request {
1159 size: image.requirements.size,
1160 align_mask: image.requirements.alignment - 1,
1161 usage: gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS,
1162 memory_types: image.requirements.memory_type_bits & self.valid_ash_memory_types,
1163 },
1164 )
1165 }
1166 .inspect_err(|_| {
1167 unsafe { self.shared.raw.destroy_image(image.raw, None) };
1168 })?;
1169
1170 self.counters.texture_memory.add(block.size() as isize);
1171
1172 unsafe {
1173 self.shared
1174 .raw
1175 .bind_image_memory(image.raw, *block.memory(), block.offset())
1176 }
1177 .map_err(super::map_host_device_oom_err)
1178 .inspect_err(|_| {
1179 unsafe { self.shared.raw.destroy_image(image.raw, None) };
1180 })?;
1181
1182 if let Some(label) = desc.label {
1183 unsafe { self.shared.set_object_name(image.raw, label) };
1184 }
1185
1186 let identity = self.shared.texture_identity_factory.next();
1187
1188 self.counters.textures.add(1);
1189
1190 Ok(super::Texture {
1191 raw: image.raw,
1192 drop_guard: None,
1193 external_memory: None,
1194 block: Some(block),
1195 format: desc.format,
1196 copy_size: image.copy_size,
1197 identity,
1198 })
1199 }
1200 unsafe fn destroy_texture(&self, texture: super::Texture) {
1201 if texture.drop_guard.is_none() {
1202 unsafe { self.shared.raw.destroy_image(texture.raw, None) };
1203 }
1204 if let Some(memory) = texture.external_memory {
1205 unsafe { self.shared.raw.free_memory(memory, None) };
1206 }
1207 if let Some(block) = texture.block {
1208 self.counters.texture_memory.sub(block.size() as isize);
1209
1210 unsafe { self.mem_allocator.lock().dealloc(&*self.shared, block) };
1211 }
1212
1213 self.counters.textures.sub(1);
1214 }
1215
1216 unsafe fn add_raw_texture(&self, _texture: &super::Texture) {
1217 self.counters.textures.add(1);
1218 }
1219
1220 unsafe fn create_texture_view(
1221 &self,
1222 texture: &super::Texture,
1223 desc: &crate::TextureViewDescriptor,
1224 ) -> Result<super::TextureView, crate::DeviceError> {
1225 let subresource_range = conv::map_subresource_range(&desc.range, texture.format);
1226 let raw_format = self.shared.private_caps.map_texture_format(desc.format);
1227 let mut vk_info = vk::ImageViewCreateInfo::default()
1228 .flags(vk::ImageViewCreateFlags::empty())
1229 .image(texture.raw)
1230 .view_type(conv::map_view_dimension(desc.dimension))
1231 .format(raw_format)
1232 .subresource_range(subresource_range);
1233 let layers =
1234 NonZeroU32::new(subresource_range.layer_count).expect("Unexpected zero layer count");
1235
1236 let mut image_view_info;
1237 if self.shared.private_caps.image_view_usage && !desc.usage.is_empty() {
1238 image_view_info =
1239 vk::ImageViewUsageCreateInfo::default().usage(conv::map_texture_usage(desc.usage));
1240 vk_info = vk_info.push_next(&mut image_view_info);
1241 }
1242
1243 let raw = unsafe { self.shared.raw.create_image_view(&vk_info, None) }
1244 .map_err(super::map_host_device_oom_and_ioca_err)?;
1245
1246 if let Some(label) = desc.label {
1247 unsafe { self.shared.set_object_name(raw, label) };
1248 }
1249
1250 let identity = self.shared.texture_view_identity_factory.next();
1251
1252 self.counters.texture_views.add(1);
1253
1254 Ok(super::TextureView {
1255 raw_texture: texture.raw,
1256 raw,
1257 _layers: layers,
1258 format: desc.format,
1259 raw_format,
1260 base_mip_level: desc.range.base_mip_level,
1261 dimension: desc.dimension,
1262 texture_identity: texture.identity,
1263 view_identity: identity,
1264 })
1265 }
1266 unsafe fn destroy_texture_view(&self, view: super::TextureView) {
1267 unsafe { self.shared.raw.destroy_image_view(view.raw, None) };
1268
1269 self.counters.texture_views.sub(1);
1270 }
1271
1272 unsafe fn create_sampler(
1273 &self,
1274 desc: &crate::SamplerDescriptor,
1275 ) -> Result<super::Sampler, crate::DeviceError> {
1276 let mut create_info = vk::SamplerCreateInfo::default()
1277 .flags(vk::SamplerCreateFlags::empty())
1278 .mag_filter(conv::map_filter_mode(desc.mag_filter))
1279 .min_filter(conv::map_filter_mode(desc.min_filter))
1280 .mipmap_mode(conv::map_mip_filter_mode(desc.mipmap_filter))
1281 .address_mode_u(conv::map_address_mode(desc.address_modes[0]))
1282 .address_mode_v(conv::map_address_mode(desc.address_modes[1]))
1283 .address_mode_w(conv::map_address_mode(desc.address_modes[2]))
1284 .min_lod(desc.lod_clamp.start)
1285 .max_lod(desc.lod_clamp.end);
1286
1287 if let Some(fun) = desc.compare {
1288 create_info = create_info
1289 .compare_enable(true)
1290 .compare_op(conv::map_comparison(fun));
1291 }
1292
1293 if desc.anisotropy_clamp != 1 {
1294 create_info = create_info
1297 .anisotropy_enable(true)
1298 .max_anisotropy(desc.anisotropy_clamp as f32);
1299 }
1300
1301 if let Some(color) = desc.border_color {
1302 create_info = create_info.border_color(conv::map_border_color(color));
1303 }
1304
1305 let mut sampler_cache_guard = self.shared.sampler_cache.lock();
1306
1307 let raw = sampler_cache_guard.create_sampler(&self.shared.raw, create_info)?;
1308
1309 if let Some(label) = desc.label {
1313 unsafe { self.shared.set_object_name(raw, label) };
1316 }
1317
1318 drop(sampler_cache_guard);
1319
1320 self.counters.samplers.add(1);
1321
1322 Ok(super::Sampler { raw, create_info })
1323 }
1324 unsafe fn destroy_sampler(&self, sampler: super::Sampler) {
1325 self.shared.sampler_cache.lock().destroy_sampler(
1326 &self.shared.raw,
1327 sampler.create_info,
1328 sampler.raw,
1329 );
1330
1331 self.counters.samplers.sub(1);
1332 }
1333
1334 unsafe fn create_command_encoder(
1335 &self,
1336 desc: &crate::CommandEncoderDescriptor<super::Queue>,
1337 ) -> Result<super::CommandEncoder, crate::DeviceError> {
1338 let vk_info = vk::CommandPoolCreateInfo::default()
1339 .queue_family_index(desc.queue.family_index)
1340 .flags(vk::CommandPoolCreateFlags::TRANSIENT);
1341
1342 let raw = unsafe {
1343 self.shared
1344 .raw
1345 .create_command_pool(&vk_info, None)
1346 .map_err(super::map_host_device_oom_err)?
1347 };
1348
1349 self.counters.command_encoders.add(1);
1350
1351 Ok(super::CommandEncoder {
1352 raw,
1353 device: Arc::clone(&self.shared),
1354 active: vk::CommandBuffer::null(),
1355 bind_point: vk::PipelineBindPoint::default(),
1356 temp: super::Temp::default(),
1357 free: Vec::new(),
1358 discarded: Vec::new(),
1359 rpass_debug_marker_active: false,
1360 end_of_pass_timer_query: None,
1361 framebuffers: Default::default(),
1362 temp_texture_views: Default::default(),
1363 counters: Arc::clone(&self.counters),
1364 current_pipeline_is_multiview: false,
1365 })
1366 }
1367
1368 unsafe fn create_bind_group_layout(
1369 &self,
1370 desc: &crate::BindGroupLayoutDescriptor,
1371 ) -> Result<super::BindGroupLayout, crate::DeviceError> {
1372 let mut vk_bindings = Vec::new();
1377 let mut binding_flags = Vec::new();
1378 let mut binding_map = Vec::new();
1379 let mut next_binding = 0;
1380 let mut contains_binding_arrays = false;
1381 let mut desc_count = gpu_descriptor::DescriptorTotalCount::default();
1382 for entry in desc.entries {
1383 if entry.count.is_some() {
1384 contains_binding_arrays = true;
1385 }
1386
1387 let partially_bound = desc
1388 .flags
1389 .contains(crate::BindGroupLayoutFlags::PARTIALLY_BOUND);
1390 let mut flags = vk::DescriptorBindingFlags::empty();
1391 if partially_bound && entry.count.is_some() {
1392 flags |= vk::DescriptorBindingFlags::PARTIALLY_BOUND;
1393 }
1394 if entry.count.is_some() {
1395 flags |= vk::DescriptorBindingFlags::UPDATE_AFTER_BIND;
1396 }
1397
1398 let count = entry.count.map_or(1, |c| c.get());
1399 match entry.ty {
1400 wgt::BindingType::ExternalTexture => unimplemented!(),
1401 _ => {
1402 vk_bindings.push(vk::DescriptorSetLayoutBinding {
1403 binding: next_binding,
1404 descriptor_type: conv::map_binding_type(entry.ty),
1405 descriptor_count: count,
1406 stage_flags: conv::map_shader_stage(entry.visibility),
1407 p_immutable_samplers: ptr::null(),
1408 _marker: Default::default(),
1409 });
1410 binding_flags.push(flags);
1411 binding_map.push((
1412 entry.binding,
1413 super::BindingInfo {
1414 binding: next_binding,
1415 binding_array_size: entry.count,
1416 },
1417 ));
1418 next_binding += 1;
1419 }
1420 }
1421
1422 match entry.ty {
1423 wgt::BindingType::Buffer {
1424 ty,
1425 has_dynamic_offset,
1426 ..
1427 } => match ty {
1428 wgt::BufferBindingType::Uniform => {
1429 if has_dynamic_offset {
1430 desc_count.uniform_buffer_dynamic += count;
1431 } else {
1432 desc_count.uniform_buffer += count;
1433 }
1434 }
1435 wgt::BufferBindingType::Storage { .. } => {
1436 if has_dynamic_offset {
1437 desc_count.storage_buffer_dynamic += count;
1438 } else {
1439 desc_count.storage_buffer += count;
1440 }
1441 }
1442 },
1443 wgt::BindingType::Sampler { .. } => {
1444 desc_count.sampler += count;
1445 }
1446 wgt::BindingType::Texture { .. } => {
1447 desc_count.sampled_image += count;
1448 }
1449 wgt::BindingType::StorageTexture { .. } => {
1450 desc_count.storage_image += count;
1451 }
1452 wgt::BindingType::AccelerationStructure { .. } => {
1453 desc_count.acceleration_structure += count;
1454 }
1455 wgt::BindingType::ExternalTexture => unimplemented!(),
1456 }
1457 }
1458
1459 let vk_info = vk::DescriptorSetLayoutCreateInfo::default()
1460 .bindings(&vk_bindings)
1461 .flags(if contains_binding_arrays {
1462 vk::DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND_POOL
1463 } else {
1464 vk::DescriptorSetLayoutCreateFlags::empty()
1465 });
1466
1467 let mut binding_flag_info =
1468 vk::DescriptorSetLayoutBindingFlagsCreateInfo::default().binding_flags(&binding_flags);
1469
1470 let vk_info = vk_info.push_next(&mut binding_flag_info);
1471
1472 let raw = unsafe {
1473 self.shared
1474 .raw
1475 .create_descriptor_set_layout(&vk_info, None)
1476 .map_err(super::map_host_device_oom_err)?
1477 };
1478
1479 if let Some(label) = desc.label {
1480 unsafe { self.shared.set_object_name(raw, label) };
1481 }
1482
1483 self.counters.bind_group_layouts.add(1);
1484
1485 Ok(super::BindGroupLayout {
1486 raw,
1487 desc_count,
1488 entries: desc.entries.into(),
1489 binding_map,
1490 contains_binding_arrays,
1491 })
1492 }
1493 unsafe fn destroy_bind_group_layout(&self, bg_layout: super::BindGroupLayout) {
1494 unsafe {
1495 self.shared
1496 .raw
1497 .destroy_descriptor_set_layout(bg_layout.raw, None)
1498 };
1499
1500 self.counters.bind_group_layouts.sub(1);
1501 }
1502
1503 unsafe fn create_pipeline_layout(
1504 &self,
1505 desc: &crate::PipelineLayoutDescriptor<super::BindGroupLayout>,
1506 ) -> Result<super::PipelineLayout, crate::DeviceError> {
1507 let vk_set_layouts = desc
1509 .bind_group_layouts
1510 .iter()
1511 .map(|bgl| bgl.raw)
1512 .collect::<Vec<_>>();
1513 let vk_push_constant_ranges = desc
1514 .push_constant_ranges
1515 .iter()
1516 .map(|pcr| vk::PushConstantRange {
1517 stage_flags: conv::map_shader_stage(pcr.stages),
1518 offset: pcr.range.start,
1519 size: pcr.range.end - pcr.range.start,
1520 })
1521 .collect::<Vec<_>>();
1522
1523 let vk_info = vk::PipelineLayoutCreateInfo::default()
1524 .flags(vk::PipelineLayoutCreateFlags::empty())
1525 .set_layouts(&vk_set_layouts)
1526 .push_constant_ranges(&vk_push_constant_ranges);
1527
1528 let raw = {
1529 profiling::scope!("vkCreatePipelineLayout");
1530 unsafe {
1531 self.shared
1532 .raw
1533 .create_pipeline_layout(&vk_info, None)
1534 .map_err(super::map_host_device_oom_err)?
1535 }
1536 };
1537
1538 if let Some(label) = desc.label {
1539 unsafe { self.shared.set_object_name(raw, label) };
1540 }
1541
1542 let mut binding_map = BTreeMap::new();
1543 for (group, &layout) in desc.bind_group_layouts.iter().enumerate() {
1544 for &(binding, binding_info) in &layout.binding_map {
1545 binding_map.insert(
1546 naga::ResourceBinding {
1547 group: group as u32,
1548 binding,
1549 },
1550 naga::back::spv::BindingInfo {
1551 descriptor_set: group as u32,
1552 binding: binding_info.binding,
1553 binding_array_size: binding_info.binding_array_size.map(NonZeroU32::get),
1554 },
1555 );
1556 }
1557 }
1558
1559 self.counters.pipeline_layouts.add(1);
1560 Ok(super::PipelineLayout { raw, binding_map })
1561 }
1562 unsafe fn destroy_pipeline_layout(&self, pipeline_layout: super::PipelineLayout) {
1563 unsafe {
1564 self.shared
1565 .raw
1566 .destroy_pipeline_layout(pipeline_layout.raw, None)
1567 };
1568
1569 self.counters.pipeline_layouts.sub(1);
1570 }
1571
1572 unsafe fn create_bind_group(
1573 &self,
1574 desc: &crate::BindGroupDescriptor<
1575 super::BindGroupLayout,
1576 super::Buffer,
1577 super::Sampler,
1578 super::TextureView,
1579 super::AccelerationStructure,
1580 >,
1581 ) -> Result<super::BindGroup, crate::DeviceError> {
1582 let desc_set_layout_flags = if desc.layout.contains_binding_arrays {
1583 gpu_descriptor::DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND
1584 } else {
1585 gpu_descriptor::DescriptorSetLayoutCreateFlags::empty()
1586 };
1587
1588 let mut vk_sets = unsafe {
1589 self.desc_allocator.lock().allocate(
1590 &*self.shared,
1591 &desc.layout.raw,
1592 desc_set_layout_flags,
1593 &desc.layout.desc_count,
1594 1,
1595 )?
1596 };
1597
1598 let set = vk_sets.pop().unwrap();
1599 if let Some(label) = desc.label {
1600 unsafe { self.shared.set_object_name(*set.raw(), label) };
1601 }
1602
1603 struct ExtendStack<'a, T> {
1610 remainder: &'a mut [MaybeUninit<T>],
1611 }
1612
1613 impl<'a, T> ExtendStack<'a, T> {
1614 fn from_vec_capacity(vec: &'a mut Vec<T>) -> Self {
1615 Self {
1616 remainder: vec.spare_capacity_mut(),
1617 }
1618 }
1619
1620 fn extend_one(self, value: T) -> (Self, &'a mut T) {
1621 let (to_init, remainder) = self.remainder.split_first_mut().unwrap();
1622 let init = to_init.write(value);
1623 (Self { remainder }, init)
1624 }
1625
1626 fn extend(
1627 self,
1628 iter: impl IntoIterator<Item = T> + ExactSizeIterator,
1629 ) -> (Self, &'a mut [T]) {
1630 let (to_init, remainder) = self.remainder.split_at_mut(iter.len());
1631
1632 for (value, to_init) in iter.into_iter().zip(to_init.iter_mut()) {
1633 to_init.write(value);
1634 }
1635
1636 let init = {
1639 unsafe { mem::transmute::<&mut [MaybeUninit<T>], &mut [T]>(to_init) }
1646 };
1647 (Self { remainder }, init)
1648 }
1649 }
1650
1651 let mut writes = Vec::with_capacity(desc.entries.len());
1652 let mut buffer_infos = Vec::with_capacity(desc.buffers.len());
1653 let mut buffer_infos = ExtendStack::from_vec_capacity(&mut buffer_infos);
1654 let mut image_infos = Vec::with_capacity(desc.samplers.len() + desc.textures.len());
1655 let mut image_infos = ExtendStack::from_vec_capacity(&mut image_infos);
1656 let mut acceleration_structure_infos =
1661 Vec::with_capacity(desc.acceleration_structures.len());
1662 let mut acceleration_structure_infos =
1663 ExtendStack::from_vec_capacity(&mut acceleration_structure_infos);
1664 let mut raw_acceleration_structures =
1665 Vec::with_capacity(desc.acceleration_structures.len());
1666 let mut raw_acceleration_structures =
1667 ExtendStack::from_vec_capacity(&mut raw_acceleration_structures);
1668
1669 let layout_and_entry_iter = desc.entries.iter().map(|entry| {
1670 let layout = desc
1671 .layout
1672 .entries
1673 .iter()
1674 .find(|layout_entry| layout_entry.binding == entry.binding)
1675 .expect("internal error: no layout entry found with binding slot");
1676 (layout, entry)
1677 });
1678 let mut next_binding = 0;
1679 for (layout, entry) in layout_and_entry_iter {
1680 let write = vk::WriteDescriptorSet::default().dst_set(*set.raw());
1681
1682 match layout.ty {
1683 wgt::BindingType::Sampler(_) => {
1684 let start = entry.resource_index;
1685 let end = start + entry.count;
1686 let local_image_infos;
1687 (image_infos, local_image_infos) =
1688 image_infos.extend(desc.samplers[start as usize..end as usize].iter().map(
1689 |sampler| vk::DescriptorImageInfo::default().sampler(sampler.raw),
1690 ));
1691 writes.push(
1692 write
1693 .dst_binding(next_binding)
1694 .descriptor_type(conv::map_binding_type(layout.ty))
1695 .image_info(local_image_infos),
1696 );
1697 next_binding += 1;
1698 }
1699 wgt::BindingType::Texture { .. } | wgt::BindingType::StorageTexture { .. } => {
1700 let start = entry.resource_index;
1701 let end = start + entry.count;
1702 let local_image_infos;
1703 (image_infos, local_image_infos) =
1704 image_infos.extend(desc.textures[start as usize..end as usize].iter().map(
1705 |binding| {
1706 let layout =
1707 conv::derive_image_layout(binding.usage, binding.view.format);
1708 vk::DescriptorImageInfo::default()
1709 .image_view(binding.view.raw)
1710 .image_layout(layout)
1711 },
1712 ));
1713 writes.push(
1714 write
1715 .dst_binding(next_binding)
1716 .descriptor_type(conv::map_binding_type(layout.ty))
1717 .image_info(local_image_infos),
1718 );
1719 next_binding += 1;
1720 }
1721 wgt::BindingType::Buffer { .. } => {
1722 let start = entry.resource_index;
1723 let end = start + entry.count;
1724 let local_buffer_infos;
1725 (buffer_infos, local_buffer_infos) =
1726 buffer_infos.extend(desc.buffers[start as usize..end as usize].iter().map(
1727 |binding| {
1728 vk::DescriptorBufferInfo::default()
1729 .buffer(binding.buffer.raw)
1730 .offset(binding.offset)
1731 .range(
1732 binding.size.map_or(vk::WHOLE_SIZE, wgt::BufferSize::get),
1733 )
1734 },
1735 ));
1736 writes.push(
1737 write
1738 .dst_binding(next_binding)
1739 .descriptor_type(conv::map_binding_type(layout.ty))
1740 .buffer_info(local_buffer_infos),
1741 );
1742 next_binding += 1;
1743 }
1744 wgt::BindingType::AccelerationStructure { .. } => {
1745 let start = entry.resource_index;
1746 let end = start + entry.count;
1747
1748 let local_raw_acceleration_structures;
1749 (
1750 raw_acceleration_structures,
1751 local_raw_acceleration_structures,
1752 ) = raw_acceleration_structures.extend(
1753 desc.acceleration_structures[start as usize..end as usize]
1754 .iter()
1755 .map(|acceleration_structure| acceleration_structure.raw),
1756 );
1757
1758 let local_acceleration_structure_infos;
1759 (
1760 acceleration_structure_infos,
1761 local_acceleration_structure_infos,
1762 ) = acceleration_structure_infos.extend_one(
1763 vk::WriteDescriptorSetAccelerationStructureKHR::default()
1764 .acceleration_structures(local_raw_acceleration_structures),
1765 );
1766
1767 writes.push(
1768 write
1769 .dst_binding(next_binding)
1770 .descriptor_type(conv::map_binding_type(layout.ty))
1771 .descriptor_count(entry.count)
1772 .push_next(local_acceleration_structure_infos),
1773 );
1774 next_binding += 1;
1775 }
1776 wgt::BindingType::ExternalTexture => unimplemented!(),
1777 }
1778 }
1779
1780 unsafe { self.shared.raw.update_descriptor_sets(&writes, &[]) };
1781
1782 self.counters.bind_groups.add(1);
1783
1784 Ok(super::BindGroup { set })
1785 }
1786
1787 unsafe fn destroy_bind_group(&self, group: super::BindGroup) {
1788 unsafe {
1789 self.desc_allocator
1790 .lock()
1791 .free(&*self.shared, Some(group.set))
1792 };
1793
1794 self.counters.bind_groups.sub(1);
1795 }
1796
1797 unsafe fn create_shader_module(
1798 &self,
1799 desc: &crate::ShaderModuleDescriptor,
1800 shader: crate::ShaderInput,
1801 ) -> Result<super::ShaderModule, crate::ShaderError> {
1802 let shader_module = match shader {
1803 crate::ShaderInput::Naga(naga_shader)
1804 if self
1805 .shared
1806 .workarounds
1807 .contains(super::Workarounds::SEPARATE_ENTRY_POINTS)
1808 || !naga_shader.module.overrides.is_empty() =>
1809 {
1810 super::ShaderModule::Intermediate {
1811 naga_shader,
1812 runtime_checks: desc.runtime_checks,
1813 }
1814 }
1815 crate::ShaderInput::Naga(naga_shader) => {
1816 let mut naga_options = self.naga_options.clone();
1817 naga_options.debug_info =
1818 naga_shader
1819 .debug_source
1820 .as_ref()
1821 .map(|d| naga::back::spv::DebugInfo {
1822 source_code: d.source_code.as_ref(),
1823 file_name: d.file_name.as_ref(),
1824 language: naga::back::spv::SourceLanguage::WGSL,
1825 });
1826 if !desc.runtime_checks.bounds_checks {
1827 naga_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
1828 index: naga::proc::BoundsCheckPolicy::Unchecked,
1829 buffer: naga::proc::BoundsCheckPolicy::Unchecked,
1830 image_load: naga::proc::BoundsCheckPolicy::Unchecked,
1831 binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
1832 };
1833 }
1834 let spv = naga::back::spv::write_vec(
1835 &naga_shader.module,
1836 &naga_shader.info,
1837 &naga_options,
1838 None,
1839 )
1840 .map_err(|e| crate::ShaderError::Compilation(format!("{e}")))?;
1841 super::ShaderModule::Raw(self.create_shader_module_impl(&spv, &desc.label)?)
1842 }
1843 crate::ShaderInput::SpirV(data) => {
1844 super::ShaderModule::Raw(self.create_shader_module_impl(data, &desc.label)?)
1845 }
1846 crate::ShaderInput::Msl { .. }
1847 | crate::ShaderInput::Dxil { .. }
1848 | crate::ShaderInput::Hlsl { .. }
1849 | crate::ShaderInput::Glsl { .. } => unreachable!(),
1850 };
1851
1852 self.counters.shader_modules.add(1);
1853
1854 Ok(shader_module)
1855 }
1856
1857 unsafe fn destroy_shader_module(&self, module: super::ShaderModule) {
1858 match module {
1859 super::ShaderModule::Raw(raw) => {
1860 unsafe { self.shared.raw.destroy_shader_module(raw, None) };
1861 }
1862 super::ShaderModule::Intermediate { .. } => {}
1863 }
1864
1865 self.counters.shader_modules.sub(1);
1866 }
1867
1868 unsafe fn create_render_pipeline(
1869 &self,
1870 desc: &crate::RenderPipelineDescriptor<
1871 super::PipelineLayout,
1872 super::ShaderModule,
1873 super::PipelineCache,
1874 >,
1875 ) -> Result<super::RenderPipeline, crate::PipelineError> {
1876 let dynamic_states = [
1877 vk::DynamicState::VIEWPORT,
1878 vk::DynamicState::SCISSOR,
1879 vk::DynamicState::BLEND_CONSTANTS,
1880 vk::DynamicState::STENCIL_REFERENCE,
1881 ];
1882 let mut compatible_rp_key = super::RenderPassKey {
1883 sample_count: desc.multisample.count,
1884 multiview_mask: desc.multiview_mask,
1885 ..Default::default()
1886 };
1887 let mut stages = ArrayVec::<_, { crate::MAX_CONCURRENT_SHADER_STAGES }>::new();
1888 let mut vertex_buffers = Vec::new();
1889 let mut vertex_attributes = Vec::new();
1890
1891 if let crate::VertexProcessor::Standard {
1892 vertex_buffers: desc_vertex_buffers,
1893 vertex_stage: _,
1894 } = &desc.vertex_processor
1895 {
1896 vertex_buffers = Vec::with_capacity(desc_vertex_buffers.len());
1897 for (i, vb) in desc_vertex_buffers.iter().enumerate() {
1898 vertex_buffers.push(vk::VertexInputBindingDescription {
1899 binding: i as u32,
1900 stride: vb.array_stride as u32,
1901 input_rate: match vb.step_mode {
1902 wgt::VertexStepMode::Vertex => vk::VertexInputRate::VERTEX,
1903 wgt::VertexStepMode::Instance => vk::VertexInputRate::INSTANCE,
1904 },
1905 });
1906 for at in vb.attributes {
1907 vertex_attributes.push(vk::VertexInputAttributeDescription {
1908 location: at.shader_location,
1909 binding: i as u32,
1910 format: conv::map_vertex_format(at.format),
1911 offset: at.offset as u32,
1912 });
1913 }
1914 }
1915 }
1916
1917 let vk_vertex_input = vk::PipelineVertexInputStateCreateInfo::default()
1918 .vertex_binding_descriptions(&vertex_buffers)
1919 .vertex_attribute_descriptions(&vertex_attributes);
1920
1921 let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::default()
1922 .topology(conv::map_topology(desc.primitive.topology))
1923 .primitive_restart_enable(desc.primitive.strip_index_format.is_some());
1924
1925 let mut compiled_vs = None;
1926 let mut compiled_ms = None;
1927 let mut compiled_ts = None;
1928 match &desc.vertex_processor {
1929 crate::VertexProcessor::Standard {
1930 vertex_buffers: _,
1931 vertex_stage,
1932 } => {
1933 compiled_vs = Some(self.compile_stage(
1934 vertex_stage,
1935 naga::ShaderStage::Vertex,
1936 &desc.layout.binding_map,
1937 )?);
1938 stages.push(compiled_vs.as_ref().unwrap().create_info);
1939 }
1940 crate::VertexProcessor::Mesh {
1941 task_stage,
1942 mesh_stage,
1943 } => {
1944 if let Some(t) = task_stage.as_ref() {
1945 compiled_ts = Some(self.compile_stage(
1946 t,
1947 naga::ShaderStage::Task,
1948 &desc.layout.binding_map,
1949 )?);
1950 stages.push(compiled_ts.as_ref().unwrap().create_info);
1951 }
1952 compiled_ms = Some(self.compile_stage(
1953 mesh_stage,
1954 naga::ShaderStage::Mesh,
1955 &desc.layout.binding_map,
1956 )?);
1957 stages.push(compiled_ms.as_ref().unwrap().create_info);
1958 }
1959 }
1960 let compiled_fs = match desc.fragment_stage {
1961 Some(ref stage) => {
1962 let compiled = self.compile_stage(
1963 stage,
1964 naga::ShaderStage::Fragment,
1965 &desc.layout.binding_map,
1966 )?;
1967 stages.push(compiled.create_info);
1968 Some(compiled)
1969 }
1970 None => None,
1971 };
1972
1973 let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::default()
1974 .polygon_mode(conv::map_polygon_mode(desc.primitive.polygon_mode))
1975 .front_face(conv::map_front_face(desc.primitive.front_face))
1976 .line_width(1.0)
1977 .depth_clamp_enable(desc.primitive.unclipped_depth);
1978 if let Some(face) = desc.primitive.cull_mode {
1979 vk_rasterization = vk_rasterization.cull_mode(conv::map_cull_face(face))
1980 }
1981 let mut vk_rasterization_conservative_state =
1982 vk::PipelineRasterizationConservativeStateCreateInfoEXT::default()
1983 .conservative_rasterization_mode(
1984 vk::ConservativeRasterizationModeEXT::OVERESTIMATE,
1985 );
1986 if desc.primitive.conservative {
1987 vk_rasterization = vk_rasterization.push_next(&mut vk_rasterization_conservative_state);
1988 }
1989
1990 let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::default();
1991 if let Some(ref ds) = desc.depth_stencil {
1992 let vk_format = self.shared.private_caps.map_texture_format(ds.format);
1993 let vk_layout = if ds.is_read_only(desc.primitive.cull_mode) {
1994 vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL
1995 } else {
1996 vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL
1997 };
1998 compatible_rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
1999 base: super::AttachmentKey::compatible(vk_format, vk_layout),
2000 stencil_ops: crate::AttachmentOps::all(),
2001 });
2002
2003 if ds.is_depth_enabled() {
2004 vk_depth_stencil = vk_depth_stencil
2005 .depth_test_enable(true)
2006 .depth_write_enable(ds.depth_write_enabled)
2007 .depth_compare_op(conv::map_comparison(ds.depth_compare));
2008 }
2009 if ds.stencil.is_enabled() {
2010 let s = &ds.stencil;
2011 let front = conv::map_stencil_face(&s.front, s.read_mask, s.write_mask);
2012 let back = conv::map_stencil_face(&s.back, s.read_mask, s.write_mask);
2013 vk_depth_stencil = vk_depth_stencil
2014 .stencil_test_enable(true)
2015 .front(front)
2016 .back(back);
2017 }
2018
2019 if ds.bias.is_enabled() {
2020 vk_rasterization = vk_rasterization
2021 .depth_bias_enable(true)
2022 .depth_bias_constant_factor(ds.bias.constant as f32)
2023 .depth_bias_clamp(ds.bias.clamp)
2024 .depth_bias_slope_factor(ds.bias.slope_scale);
2025 }
2026 }
2027
2028 let vk_viewport = vk::PipelineViewportStateCreateInfo::default()
2029 .flags(vk::PipelineViewportStateCreateFlags::empty())
2030 .scissor_count(1)
2031 .viewport_count(1);
2032
2033 let vk_sample_mask = [
2034 desc.multisample.mask as u32,
2035 (desc.multisample.mask >> 32) as u32,
2036 ];
2037 let vk_multisample = vk::PipelineMultisampleStateCreateInfo::default()
2038 .rasterization_samples(vk::SampleCountFlags::from_raw(desc.multisample.count))
2039 .alpha_to_coverage_enable(desc.multisample.alpha_to_coverage_enabled)
2040 .sample_mask(&vk_sample_mask);
2041
2042 let mut vk_attachments = Vec::with_capacity(desc.color_targets.len());
2043 for cat in desc.color_targets {
2044 let (key, attarchment) = if let Some(cat) = cat.as_ref() {
2045 let mut vk_attachment = vk::PipelineColorBlendAttachmentState::default()
2046 .color_write_mask(vk::ColorComponentFlags::from_raw(cat.write_mask.bits()));
2047 if let Some(ref blend) = cat.blend {
2048 let (color_op, color_src, color_dst) = conv::map_blend_component(&blend.color);
2049 let (alpha_op, alpha_src, alpha_dst) = conv::map_blend_component(&blend.alpha);
2050 vk_attachment = vk_attachment
2051 .blend_enable(true)
2052 .color_blend_op(color_op)
2053 .src_color_blend_factor(color_src)
2054 .dst_color_blend_factor(color_dst)
2055 .alpha_blend_op(alpha_op)
2056 .src_alpha_blend_factor(alpha_src)
2057 .dst_alpha_blend_factor(alpha_dst);
2058 }
2059
2060 let vk_format = self.shared.private_caps.map_texture_format(cat.format);
2061 (
2062 Some(super::ColorAttachmentKey {
2063 base: super::AttachmentKey::compatible(
2064 vk_format,
2065 vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,
2066 ),
2067 resolve: None,
2068 }),
2069 vk_attachment,
2070 )
2071 } else {
2072 (None, vk::PipelineColorBlendAttachmentState::default())
2073 };
2074
2075 compatible_rp_key.colors.push(key);
2076 vk_attachments.push(attarchment);
2077 }
2078
2079 let vk_color_blend =
2080 vk::PipelineColorBlendStateCreateInfo::default().attachments(&vk_attachments);
2081
2082 let vk_dynamic_state =
2083 vk::PipelineDynamicStateCreateInfo::default().dynamic_states(&dynamic_states);
2084
2085 let raw_pass = self.shared.make_render_pass(compatible_rp_key)?;
2086
2087 let vk_infos = [{
2088 vk::GraphicsPipelineCreateInfo::default()
2089 .layout(desc.layout.raw)
2090 .stages(&stages)
2091 .vertex_input_state(&vk_vertex_input)
2092 .input_assembly_state(&vk_input_assembly)
2093 .rasterization_state(&vk_rasterization)
2094 .viewport_state(&vk_viewport)
2095 .multisample_state(&vk_multisample)
2096 .depth_stencil_state(&vk_depth_stencil)
2097 .color_blend_state(&vk_color_blend)
2098 .dynamic_state(&vk_dynamic_state)
2099 .render_pass(raw_pass)
2100 }];
2101
2102 let pipeline_cache = desc
2103 .cache
2104 .map(|it| it.raw)
2105 .unwrap_or(vk::PipelineCache::null());
2106
2107 let mut raw_vec = {
2108 profiling::scope!("vkCreateGraphicsPipelines");
2109 unsafe {
2110 self.shared
2111 .raw
2112 .create_graphics_pipelines(pipeline_cache, &vk_infos, None)
2113 .map_err(|(_, e)| super::map_pipeline_err(e))
2114 }?
2115 };
2116
2117 let raw = raw_vec.pop().unwrap();
2118 if let Some(label) = desc.label {
2119 unsafe { self.shared.set_object_name(raw, label) };
2120 }
2121
2122 if let Some(CompiledStage {
2123 temp_raw_module: Some(raw_module),
2124 ..
2125 }) = compiled_vs
2126 {
2127 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2128 }
2129 if let Some(CompiledStage {
2130 temp_raw_module: Some(raw_module),
2131 ..
2132 }) = compiled_ts
2133 {
2134 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2135 }
2136 if let Some(CompiledStage {
2137 temp_raw_module: Some(raw_module),
2138 ..
2139 }) = compiled_ms
2140 {
2141 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2142 }
2143 if let Some(CompiledStage {
2144 temp_raw_module: Some(raw_module),
2145 ..
2146 }) = compiled_fs
2147 {
2148 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2149 }
2150
2151 self.counters.render_pipelines.add(1);
2152
2153 Ok(super::RenderPipeline {
2154 raw,
2155 is_multiview: desc.multiview_mask.is_some(),
2156 })
2157 }
2158
2159 unsafe fn destroy_render_pipeline(&self, pipeline: super::RenderPipeline) {
2160 unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
2161
2162 self.counters.render_pipelines.sub(1);
2163 }
2164
2165 unsafe fn create_compute_pipeline(
2166 &self,
2167 desc: &crate::ComputePipelineDescriptor<
2168 super::PipelineLayout,
2169 super::ShaderModule,
2170 super::PipelineCache,
2171 >,
2172 ) -> Result<super::ComputePipeline, crate::PipelineError> {
2173 let compiled = self.compile_stage(
2174 &desc.stage,
2175 naga::ShaderStage::Compute,
2176 &desc.layout.binding_map,
2177 )?;
2178
2179 let vk_infos = [{
2180 vk::ComputePipelineCreateInfo::default()
2181 .layout(desc.layout.raw)
2182 .stage(compiled.create_info)
2183 }];
2184
2185 let pipeline_cache = desc
2186 .cache
2187 .map(|it| it.raw)
2188 .unwrap_or(vk::PipelineCache::null());
2189
2190 let mut raw_vec = {
2191 profiling::scope!("vkCreateComputePipelines");
2192 unsafe {
2193 self.shared
2194 .raw
2195 .create_compute_pipelines(pipeline_cache, &vk_infos, None)
2196 .map_err(|(_, e)| super::map_pipeline_err(e))
2197 }?
2198 };
2199
2200 let raw = raw_vec.pop().unwrap();
2201 if let Some(label) = desc.label {
2202 unsafe { self.shared.set_object_name(raw, label) };
2203 }
2204
2205 if let Some(raw_module) = compiled.temp_raw_module {
2206 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2207 }
2208
2209 self.counters.compute_pipelines.add(1);
2210
2211 Ok(super::ComputePipeline { raw })
2212 }
2213
2214 unsafe fn destroy_compute_pipeline(&self, pipeline: super::ComputePipeline) {
2215 unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
2216
2217 self.counters.compute_pipelines.sub(1);
2218 }
2219
2220 unsafe fn create_pipeline_cache(
2221 &self,
2222 desc: &crate::PipelineCacheDescriptor<'_>,
2223 ) -> Result<super::PipelineCache, crate::PipelineCacheError> {
2224 let mut info = vk::PipelineCacheCreateInfo::default();
2225 if let Some(data) = desc.data {
2226 info = info.initial_data(data)
2227 }
2228 profiling::scope!("vkCreatePipelineCache");
2229 let raw = unsafe { self.shared.raw.create_pipeline_cache(&info, None) }
2230 .map_err(super::map_host_device_oom_err)?;
2231
2232 Ok(super::PipelineCache { raw })
2233 }
2234 fn pipeline_cache_validation_key(&self) -> Option<[u8; 16]> {
2235 Some(self.shared.pipeline_cache_validation_key)
2236 }
2237 unsafe fn destroy_pipeline_cache(&self, cache: super::PipelineCache) {
2238 unsafe { self.shared.raw.destroy_pipeline_cache(cache.raw, None) }
2239 }
2240 unsafe fn create_query_set(
2241 &self,
2242 desc: &wgt::QuerySetDescriptor<crate::Label>,
2243 ) -> Result<super::QuerySet, crate::DeviceError> {
2244 self.error_if_would_oom_on_resource_allocation(true, desc.count as u64 * 256)?;
2247
2248 let (vk_type, pipeline_statistics) = match desc.ty {
2249 wgt::QueryType::Occlusion => (
2250 vk::QueryType::OCCLUSION,
2251 vk::QueryPipelineStatisticFlags::empty(),
2252 ),
2253 wgt::QueryType::PipelineStatistics(statistics) => (
2254 vk::QueryType::PIPELINE_STATISTICS,
2255 conv::map_pipeline_statistics(statistics),
2256 ),
2257 wgt::QueryType::Timestamp => (
2258 vk::QueryType::TIMESTAMP,
2259 vk::QueryPipelineStatisticFlags::empty(),
2260 ),
2261 };
2262
2263 let vk_info = vk::QueryPoolCreateInfo::default()
2264 .query_type(vk_type)
2265 .query_count(desc.count)
2266 .pipeline_statistics(pipeline_statistics);
2267
2268 let raw = unsafe { self.shared.raw.create_query_pool(&vk_info, None) }
2269 .map_err(super::map_host_device_oom_err)?;
2270 if let Some(label) = desc.label {
2271 unsafe { self.shared.set_object_name(raw, label) };
2272 }
2273
2274 self.counters.query_sets.add(1);
2275
2276 Ok(super::QuerySet { raw })
2277 }
2278
2279 unsafe fn destroy_query_set(&self, set: super::QuerySet) {
2280 unsafe { self.shared.raw.destroy_query_pool(set.raw, None) };
2281
2282 self.counters.query_sets.sub(1);
2283 }
2284
2285 unsafe fn create_fence(&self) -> Result<super::Fence, crate::DeviceError> {
2286 self.counters.fences.add(1);
2287
2288 Ok(if self.shared.private_caps.timeline_semaphores {
2289 let mut sem_type_info =
2290 vk::SemaphoreTypeCreateInfo::default().semaphore_type(vk::SemaphoreType::TIMELINE);
2291 let vk_info = vk::SemaphoreCreateInfo::default().push_next(&mut sem_type_info);
2292 let raw = unsafe { self.shared.raw.create_semaphore(&vk_info, None) }
2293 .map_err(super::map_host_device_oom_err)?;
2294
2295 super::Fence::TimelineSemaphore(raw)
2296 } else {
2297 super::Fence::FencePool {
2298 last_completed: 0,
2299 active: Vec::new(),
2300 free: Vec::new(),
2301 }
2302 })
2303 }
2304 unsafe fn destroy_fence(&self, fence: super::Fence) {
2305 match fence {
2306 super::Fence::TimelineSemaphore(raw) => {
2307 unsafe { self.shared.raw.destroy_semaphore(raw, None) };
2308 }
2309 super::Fence::FencePool {
2310 active,
2311 free,
2312 last_completed: _,
2313 } => {
2314 for (_, raw) in active {
2315 unsafe { self.shared.raw.destroy_fence(raw, None) };
2316 }
2317 for raw in free {
2318 unsafe { self.shared.raw.destroy_fence(raw, None) };
2319 }
2320 }
2321 }
2322
2323 self.counters.fences.sub(1);
2324 }
2325 unsafe fn get_fence_value(
2326 &self,
2327 fence: &super::Fence,
2328 ) -> Result<crate::FenceValue, crate::DeviceError> {
2329 fence.get_latest(
2330 &self.shared.raw,
2331 self.shared.extension_fns.timeline_semaphore.as_ref(),
2332 )
2333 }
2334 unsafe fn wait(
2335 &self,
2336 fence: &super::Fence,
2337 wait_value: crate::FenceValue,
2338 timeout: Option<Duration>,
2339 ) -> Result<bool, crate::DeviceError> {
2340 let timeout_ns = timeout
2341 .unwrap_or(Duration::MAX)
2342 .as_nanos()
2343 .min(u64::MAX as _) as u64;
2344 self.shared.wait_for_fence(fence, wait_value, timeout_ns)
2345 }
2346
2347 unsafe fn start_graphics_debugger_capture(&self) -> bool {
2348 #[cfg(feature = "renderdoc")]
2349 {
2350 let raw_vk_instance =
2352 vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2353 let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2354 unsafe {
2355 self.render_doc
2356 .start_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2357 }
2358 }
2359 #[cfg(not(feature = "renderdoc"))]
2360 false
2361 }
2362 unsafe fn stop_graphics_debugger_capture(&self) {
2363 #[cfg(feature = "renderdoc")]
2364 {
2365 let raw_vk_instance =
2367 vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2368 let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2369
2370 unsafe {
2371 self.render_doc
2372 .end_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2373 }
2374 }
2375 }
2376
2377 unsafe fn pipeline_cache_get_data(&self, cache: &super::PipelineCache) -> Option<Vec<u8>> {
2378 let data = unsafe { self.raw_device().get_pipeline_cache_data(cache.raw) };
2379 data.ok()
2380 }
2381
2382 unsafe fn get_acceleration_structure_build_sizes<'a>(
2383 &self,
2384 desc: &crate::GetAccelerationStructureBuildSizesDescriptor<'a, super::Buffer>,
2385 ) -> crate::AccelerationStructureBuildSizes {
2386 const CAPACITY: usize = 8;
2387
2388 let ray_tracing_functions = self
2389 .shared
2390 .extension_fns
2391 .ray_tracing
2392 .as_ref()
2393 .expect("Feature `RAY_TRACING` not enabled");
2394
2395 let (geometries, primitive_counts) = match *desc.entries {
2396 crate::AccelerationStructureEntries::Instances(ref instances) => {
2397 let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default();
2398
2399 let geometry = vk::AccelerationStructureGeometryKHR::default()
2400 .geometry_type(vk::GeometryTypeKHR::INSTANCES)
2401 .geometry(vk::AccelerationStructureGeometryDataKHR {
2402 instances: instance_data,
2403 });
2404
2405 (
2406 smallvec::smallvec![geometry],
2407 smallvec::smallvec![instances.count],
2408 )
2409 }
2410 crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
2411 let mut primitive_counts =
2412 smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2413 let mut geometries = smallvec::SmallVec::<
2414 [vk::AccelerationStructureGeometryKHR; CAPACITY],
2415 >::with_capacity(in_geometries.len());
2416
2417 for triangles in in_geometries {
2418 let mut triangle_data =
2419 vk::AccelerationStructureGeometryTrianglesDataKHR::default()
2420 .index_type(vk::IndexType::NONE_KHR)
2421 .vertex_format(conv::map_vertex_format(triangles.vertex_format))
2422 .max_vertex(triangles.vertex_count)
2423 .vertex_stride(triangles.vertex_stride)
2424 .transform_data(vk::DeviceOrHostAddressConstKHR {
2434 device_address: if desc
2435 .flags
2436 .contains(wgt::AccelerationStructureFlags::USE_TRANSFORM)
2437 {
2438 unsafe {
2439 ray_tracing_functions
2440 .buffer_device_address
2441 .get_buffer_device_address(
2442 &vk::BufferDeviceAddressInfo::default().buffer(
2443 triangles
2444 .transform
2445 .as_ref()
2446 .unwrap()
2447 .buffer
2448 .raw,
2449 ),
2450 )
2451 }
2452 } else {
2453 0
2454 },
2455 });
2456
2457 let pritive_count = if let Some(ref indices) = triangles.indices {
2458 triangle_data =
2459 triangle_data.index_type(conv::map_index_format(indices.format));
2460 indices.count / 3
2461 } else {
2462 triangles.vertex_count / 3
2463 };
2464
2465 let geometry = vk::AccelerationStructureGeometryKHR::default()
2466 .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
2467 .geometry(vk::AccelerationStructureGeometryDataKHR {
2468 triangles: triangle_data,
2469 })
2470 .flags(conv::map_acceleration_structure_geometry_flags(
2471 triangles.flags,
2472 ));
2473
2474 geometries.push(geometry);
2475 primitive_counts.push(pritive_count);
2476 }
2477 (geometries, primitive_counts)
2478 }
2479 crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
2480 let mut primitive_counts =
2481 smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2482 let mut geometries = smallvec::SmallVec::<
2483 [vk::AccelerationStructureGeometryKHR; CAPACITY],
2484 >::with_capacity(in_geometries.len());
2485 for aabb in in_geometries {
2486 let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
2487 .stride(aabb.stride);
2488
2489 let geometry = vk::AccelerationStructureGeometryKHR::default()
2490 .geometry_type(vk::GeometryTypeKHR::AABBS)
2491 .geometry(vk::AccelerationStructureGeometryDataKHR { aabbs: aabbs_data })
2492 .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
2493
2494 geometries.push(geometry);
2495 primitive_counts.push(aabb.count);
2496 }
2497 (geometries, primitive_counts)
2498 }
2499 };
2500
2501 let ty = match *desc.entries {
2502 crate::AccelerationStructureEntries::Instances(_) => {
2503 vk::AccelerationStructureTypeKHR::TOP_LEVEL
2504 }
2505 _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
2506 };
2507
2508 let geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
2509 .ty(ty)
2510 .flags(conv::map_acceleration_structure_flags(desc.flags))
2511 .geometries(&geometries);
2512
2513 let mut raw = Default::default();
2514 unsafe {
2515 ray_tracing_functions
2516 .acceleration_structure
2517 .get_acceleration_structure_build_sizes(
2518 vk::AccelerationStructureBuildTypeKHR::DEVICE,
2519 &geometry_info,
2520 &primitive_counts,
2521 &mut raw,
2522 )
2523 }
2524
2525 crate::AccelerationStructureBuildSizes {
2526 acceleration_structure_size: raw.acceleration_structure_size,
2527 update_scratch_size: raw.update_scratch_size,
2528 build_scratch_size: raw.build_scratch_size,
2529 }
2530 }
2531
2532 unsafe fn get_acceleration_structure_device_address(
2533 &self,
2534 acceleration_structure: &super::AccelerationStructure,
2535 ) -> wgt::BufferAddress {
2536 let ray_tracing_functions = self
2537 .shared
2538 .extension_fns
2539 .ray_tracing
2540 .as_ref()
2541 .expect("Feature `RAY_TRACING` not enabled");
2542
2543 unsafe {
2544 ray_tracing_functions
2545 .acceleration_structure
2546 .get_acceleration_structure_device_address(
2547 &vk::AccelerationStructureDeviceAddressInfoKHR::default()
2548 .acceleration_structure(acceleration_structure.raw),
2549 )
2550 }
2551 }
2552
2553 unsafe fn create_acceleration_structure(
2554 &self,
2555 desc: &crate::AccelerationStructureDescriptor,
2556 ) -> Result<super::AccelerationStructure, crate::DeviceError> {
2557 let ray_tracing_functions = self
2558 .shared
2559 .extension_fns
2560 .ray_tracing
2561 .as_ref()
2562 .expect("Feature `RAY_TRACING` not enabled");
2563
2564 let vk_buffer_info = vk::BufferCreateInfo::default()
2565 .size(desc.size)
2566 .usage(
2567 vk::BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR
2568 | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,
2569 )
2570 .sharing_mode(vk::SharingMode::EXCLUSIVE);
2571
2572 unsafe {
2573 let raw_buffer = self
2574 .shared
2575 .raw
2576 .create_buffer(&vk_buffer_info, None)
2577 .map_err(super::map_host_device_oom_and_ioca_err)?;
2578 let req = self.shared.raw.get_buffer_memory_requirements(raw_buffer);
2579
2580 self.error_if_would_oom_on_resource_allocation(false, req.size)
2581 .inspect_err(|_| {
2582 self.shared.raw.destroy_buffer(raw_buffer, None);
2583 })?;
2584
2585 let block = self
2586 .mem_allocator
2587 .lock()
2588 .alloc(
2589 &*self.shared,
2590 gpu_alloc::Request {
2591 size: req.size,
2592 align_mask: req.alignment - 1,
2593 usage: gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS,
2594 memory_types: req.memory_type_bits & self.valid_ash_memory_types,
2595 },
2596 )
2597 .inspect_err(|_| {
2598 self.shared.raw.destroy_buffer(raw_buffer, None);
2599 })?;
2600
2601 self.shared
2602 .raw
2603 .bind_buffer_memory(raw_buffer, *block.memory(), block.offset())
2604 .map_err(super::map_host_device_oom_and_ioca_err)
2605 .inspect_err(|_| {
2606 self.shared.raw.destroy_buffer(raw_buffer, None);
2607 })?;
2608
2609 if let Some(label) = desc.label {
2610 self.shared.set_object_name(raw_buffer, label);
2611 }
2612
2613 let vk_info = vk::AccelerationStructureCreateInfoKHR::default()
2614 .buffer(raw_buffer)
2615 .offset(0)
2616 .size(desc.size)
2617 .ty(conv::map_acceleration_structure_format(desc.format));
2618
2619 let raw_acceleration_structure = ray_tracing_functions
2620 .acceleration_structure
2621 .create_acceleration_structure(&vk_info, None)
2622 .map_err(super::map_host_oom_and_ioca_err)
2623 .inspect_err(|_| {
2624 self.shared.raw.destroy_buffer(raw_buffer, None);
2625 })?;
2626
2627 if let Some(label) = desc.label {
2628 self.shared
2629 .set_object_name(raw_acceleration_structure, label);
2630 }
2631
2632 let pool = if desc.allow_compaction {
2633 let vk_info = vk::QueryPoolCreateInfo::default()
2634 .query_type(vk::QueryType::ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR)
2635 .query_count(1);
2636
2637 let raw = self
2638 .shared
2639 .raw
2640 .create_query_pool(&vk_info, None)
2641 .map_err(super::map_host_device_oom_err)
2642 .inspect_err(|_| {
2643 ray_tracing_functions
2644 .acceleration_structure
2645 .destroy_acceleration_structure(raw_acceleration_structure, None);
2646 self.shared.raw.destroy_buffer(raw_buffer, None);
2647 })?;
2648 Some(raw)
2649 } else {
2650 None
2651 };
2652
2653 Ok(super::AccelerationStructure {
2654 raw: raw_acceleration_structure,
2655 buffer: raw_buffer,
2656 block: Mutex::new(block),
2657 compacted_size_query: pool,
2658 })
2659 }
2660 }
2661
2662 unsafe fn destroy_acceleration_structure(
2663 &self,
2664 acceleration_structure: super::AccelerationStructure,
2665 ) {
2666 let ray_tracing_functions = self
2667 .shared
2668 .extension_fns
2669 .ray_tracing
2670 .as_ref()
2671 .expect("Feature `RAY_TRACING` not enabled");
2672
2673 unsafe {
2674 ray_tracing_functions
2675 .acceleration_structure
2676 .destroy_acceleration_structure(acceleration_structure.raw, None);
2677 self.shared
2678 .raw
2679 .destroy_buffer(acceleration_structure.buffer, None);
2680 self.mem_allocator
2681 .lock()
2682 .dealloc(&*self.shared, acceleration_structure.block.into_inner());
2683 if let Some(query) = acceleration_structure.compacted_size_query {
2684 self.shared.raw.destroy_query_pool(query, None)
2685 }
2686 }
2687 }
2688
2689 fn get_internal_counters(&self) -> wgt::HalCounters {
2690 self.counters
2691 .memory_allocations
2692 .set(self.shared.memory_allocations_counter.read());
2693
2694 self.counters.as_ref().clone()
2695 }
2696
2697 fn tlas_instance_to_bytes(&self, instance: TlasInstance) -> Vec<u8> {
2698 const MAX_U24: u32 = (1u32 << 24u32) - 1u32;
2699 let temp = RawTlasInstance {
2700 transform: instance.transform,
2701 custom_data_and_mask: (instance.custom_data & MAX_U24)
2702 | (u32::from(instance.mask) << 24),
2703 shader_binding_table_record_offset_and_flags: 0,
2704 acceleration_structure_reference: instance.blas_address,
2705 };
2706 bytemuck::bytes_of(&temp).to_vec()
2707 }
2708
2709 fn check_if_oom(&self) -> Result<(), crate::DeviceError> {
2710 let Some(threshold) = self
2711 .shared
2712 .instance
2713 .memory_budget_thresholds
2714 .for_device_loss
2715 else {
2716 return Ok(());
2717 };
2718
2719 if !self
2720 .shared
2721 .enabled_extensions
2722 .contains(&ext::memory_budget::NAME)
2723 {
2724 return Ok(());
2725 }
2726
2727 let get_physical_device_properties = self
2728 .shared
2729 .instance
2730 .get_physical_device_properties
2731 .as_ref()
2732 .unwrap();
2733
2734 let mut memory_budget_properties = vk::PhysicalDeviceMemoryBudgetPropertiesEXT::default();
2735
2736 let mut memory_properties =
2737 vk::PhysicalDeviceMemoryProperties2::default().push_next(&mut memory_budget_properties);
2738
2739 unsafe {
2740 get_physical_device_properties.get_physical_device_memory_properties2(
2741 self.shared.physical_device,
2742 &mut memory_properties,
2743 );
2744 }
2745
2746 let memory_properties = memory_properties.memory_properties;
2747
2748 for i in 0..memory_properties.memory_heap_count {
2749 let heap_usage = memory_budget_properties.heap_usage[i as usize];
2750 let heap_budget = memory_budget_properties.heap_budget[i as usize];
2751
2752 if heap_usage >= heap_budget / 100 * threshold as u64 {
2753 return Err(crate::DeviceError::OutOfMemory);
2754 }
2755 }
2756
2757 Ok(())
2758 }
2759}
2760
2761impl super::DeviceShared {
2762 pub(super) fn new_binary_semaphore(
2763 &self,
2764 name: &str,
2765 ) -> Result<vk::Semaphore, crate::DeviceError> {
2766 unsafe {
2767 let semaphore = self
2768 .raw
2769 .create_semaphore(&vk::SemaphoreCreateInfo::default(), None)
2770 .map_err(super::map_host_device_oom_err)?;
2771
2772 self.set_object_name(semaphore, name);
2773
2774 Ok(semaphore)
2775 }
2776 }
2777
2778 pub(super) fn wait_for_fence(
2779 &self,
2780 fence: &super::Fence,
2781 wait_value: crate::FenceValue,
2782 timeout_ns: u64,
2783 ) -> Result<bool, crate::DeviceError> {
2784 profiling::scope!("Device::wait");
2785 match *fence {
2786 super::Fence::TimelineSemaphore(raw) => {
2787 let semaphores = [raw];
2788 let values = [wait_value];
2789 let vk_info = vk::SemaphoreWaitInfo::default()
2790 .semaphores(&semaphores)
2791 .values(&values);
2792 let result = match self.extension_fns.timeline_semaphore {
2793 Some(super::ExtensionFn::Extension(ref ext)) => unsafe {
2794 ext.wait_semaphores(&vk_info, timeout_ns)
2795 },
2796 Some(super::ExtensionFn::Promoted) => unsafe {
2797 self.raw.wait_semaphores(&vk_info, timeout_ns)
2798 },
2799 None => unreachable!(),
2800 };
2801 match result {
2802 Ok(()) => Ok(true),
2803 Err(vk::Result::TIMEOUT) => Ok(false),
2804 Err(other) => Err(super::map_host_device_oom_and_lost_err(other)),
2805 }
2806 }
2807 super::Fence::FencePool {
2808 last_completed,
2809 ref active,
2810 free: _,
2811 } => {
2812 if wait_value <= last_completed {
2813 Ok(true)
2814 } else {
2815 match active.iter().find(|&&(value, _)| value >= wait_value) {
2816 Some(&(_, raw)) => {
2817 match unsafe { self.raw.wait_for_fences(&[raw], true, timeout_ns) } {
2818 Ok(()) => Ok(true),
2819 Err(vk::Result::TIMEOUT) => Ok(false),
2820 Err(other) => Err(super::map_host_device_oom_and_lost_err(other)),
2821 }
2822 }
2823 None => {
2824 crate::hal_usage_error(format!(
2825 "no signals reached value {wait_value}"
2826 ));
2827 }
2828 }
2829 }
2830 }
2831 }
2832 }
2833}
2834
2835impl From<gpu_alloc::AllocationError> for crate::DeviceError {
2836 fn from(error: gpu_alloc::AllocationError) -> Self {
2837 use gpu_alloc::AllocationError as Ae;
2838 match error {
2839 Ae::OutOfDeviceMemory | Ae::OutOfHostMemory | Ae::TooManyObjects => Self::OutOfMemory,
2840 Ae::NoCompatibleMemoryTypes => crate::hal_usage_error(error),
2841 }
2842 }
2843}
2844impl From<gpu_alloc::MapError> for crate::DeviceError {
2845 fn from(error: gpu_alloc::MapError) -> Self {
2846 use gpu_alloc::MapError as Me;
2847 match error {
2848 Me::OutOfDeviceMemory | Me::OutOfHostMemory | Me::MapFailed => Self::OutOfMemory,
2849 Me::NonHostVisible | Me::AlreadyMapped => crate::hal_usage_error(error),
2850 }
2851 }
2852}
2853impl From<gpu_descriptor::AllocationError> for crate::DeviceError {
2854 fn from(error: gpu_descriptor::AllocationError) -> Self {
2855 use gpu_descriptor::AllocationError as Ae;
2856 match error {
2857 Ae::OutOfDeviceMemory | Ae::OutOfHostMemory | Ae::Fragmentation => Self::OutOfMemory,
2858 }
2859 }
2860}
2861
2862fn handle_unexpected(err: vk::Result) -> ! {
2869 panic!("Unexpected Vulkan error: `{err}`")
2870}
2871
2872struct ImageWithoutMemory {
2873 raw: vk::Image,
2874 requirements: vk::MemoryRequirements,
2875 copy_size: crate::CopyExtent,
2876}