1use super::conv;
2use arrayvec::ArrayVec;
3use ash::vk;
4use core::{mem, ops::Range};
5use hashbrown::hash_map::Entry;
6
7const ALLOCATION_GRANULARITY: u32 = 16;
8const DST_IMAGE_LAYOUT: vk::ImageLayout = vk::ImageLayout::TRANSFER_DST_OPTIMAL;
9
10impl super::Texture {
11 fn map_buffer_copies<T>(&self, regions: T) -> impl Iterator<Item = vk::BufferImageCopy>
12 where
13 T: Iterator<Item = crate::BufferTextureCopy>,
14 {
15 let (block_width, block_height) = self.format.block_dimensions();
16 let format = self.format;
17 let copy_size = self.copy_size;
18 regions.map(move |r| {
19 let extent = r.texture_base.max_copy_size(©_size).min(&r.size);
20 let (image_subresource, image_offset) = conv::map_subresource_layers(&r.texture_base);
21 vk::BufferImageCopy {
22 buffer_offset: r.buffer_layout.offset,
23 buffer_row_length: r.buffer_layout.bytes_per_row.map_or(0, |bpr| {
24 let block_size = format
25 .block_copy_size(Some(r.texture_base.aspect.map()))
26 .unwrap();
27 block_width * (bpr / block_size)
28 }),
29 buffer_image_height: r
30 .buffer_layout
31 .rows_per_image
32 .map_or(0, |rpi| rpi * block_height),
33 image_subresource,
34 image_offset,
35 image_extent: conv::map_copy_extent(&extent),
36 }
37 })
38 }
39}
40
41impl super::CommandEncoder {
42 fn write_pass_end_timestamp_if_requested(&mut self) {
43 if let Some((query_set, index)) = self.end_of_pass_timer_query.take() {
44 unsafe {
45 self.device.raw.cmd_write_timestamp(
46 self.active,
47 vk::PipelineStageFlags::BOTTOM_OF_PIPE,
48 query_set,
49 index,
50 );
51 }
52 }
53 }
54
55 fn make_framebuffer(
56 &mut self,
57 key: super::FramebufferKey,
58 ) -> Result<vk::Framebuffer, crate::DeviceError> {
59 Ok(match self.framebuffers.entry(key) {
60 Entry::Occupied(e) => *e.get(),
61 Entry::Vacant(e) => {
62 let super::FramebufferKey {
63 raw_pass,
64 ref attachment_views,
65 attachment_identities: _,
66 extent,
67 } = *e.key();
68
69 let vk_info = vk::FramebufferCreateInfo::default()
70 .render_pass(raw_pass)
71 .width(extent.width)
72 .height(extent.height)
73 .layers(extent.depth_or_array_layers)
74 .attachments(attachment_views);
75
76 let raw = unsafe { self.device.raw.create_framebuffer(&vk_info, None).unwrap() };
77 *e.insert(raw)
78 }
79 })
80 }
81
82 fn make_temp_texture_view(
83 &mut self,
84 key: super::TempTextureViewKey,
85 ) -> Result<super::IdentifiedTextureView, crate::DeviceError> {
86 Ok(match self.temp_texture_views.entry(key) {
87 Entry::Occupied(e) => *e.get(),
88 Entry::Vacant(e) => {
89 let super::TempTextureViewKey {
90 texture,
91 texture_identity: _,
92 format,
93 mip_level,
94 depth_slice,
95 } = *e.key();
96
97 let vk_info = vk::ImageViewCreateInfo::default()
98 .image(texture)
99 .view_type(vk::ImageViewType::TYPE_2D)
100 .format(format)
101 .subresource_range(vk::ImageSubresourceRange {
102 aspect_mask: vk::ImageAspectFlags::COLOR,
103 base_mip_level: mip_level,
104 level_count: 1,
105 base_array_layer: depth_slice,
106 layer_count: 1,
107 });
108 let raw = unsafe { self.device.raw.create_image_view(&vk_info, None) }
109 .map_err(super::map_host_device_oom_and_ioca_err)?;
110
111 let identity = self.device.texture_view_identity_factory.next();
112
113 *e.insert(super::IdentifiedTextureView { raw, identity })
114 }
115 })
116 }
117}
118
119impl crate::CommandEncoder for super::CommandEncoder {
120 type A = super::Api;
121
122 unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
123 if self.free.is_empty() {
124 let vk_info = vk::CommandBufferAllocateInfo::default()
125 .command_pool(self.raw)
126 .command_buffer_count(ALLOCATION_GRANULARITY);
127 let cmd_buf_vec = unsafe {
128 self.device
129 .raw
130 .allocate_command_buffers(&vk_info)
131 .map_err(super::map_host_device_oom_err)?
132 };
133 self.free.extend(cmd_buf_vec);
134 }
135 let raw = self.free.pop().unwrap();
136
137 unsafe { self.device.set_object_name(raw, label.unwrap_or_default()) };
140
141 self.rpass_debug_marker_active = false;
143
144 let vk_info = vk::CommandBufferBeginInfo::default()
145 .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT);
146 unsafe { self.device.raw.begin_command_buffer(raw, &vk_info) }
147 .map_err(super::map_host_device_oom_err)?;
148 self.active = raw;
149
150 Ok(())
151 }
152
153 unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
154 let raw = self.active;
155 self.active = vk::CommandBuffer::null();
156 unsafe { self.device.raw.end_command_buffer(raw) }.map_err(map_err)?;
157 fn map_err(err: vk::Result) -> crate::DeviceError {
158 super::map_host_device_oom_err(err)
161 }
162 Ok(super::CommandBuffer { raw })
163 }
164
165 unsafe fn discard_encoding(&mut self) {
166 assert_ne!(self.active, vk::CommandBuffer::null());
170
171 self.discarded.push(self.active);
172 self.active = vk::CommandBuffer::null();
173 }
174
175 unsafe fn reset_all<I>(&mut self, cmd_bufs: I)
176 where
177 I: Iterator<Item = super::CommandBuffer>,
178 {
179 self.temp.clear();
180 self.free
181 .extend(cmd_bufs.into_iter().map(|cmd_buf| cmd_buf.raw));
182 self.free.append(&mut self.discarded);
183 for (_, framebuffer) in self.framebuffers.drain() {
185 unsafe { self.device.raw.destroy_framebuffer(framebuffer, None) };
186 }
187 let _ = unsafe {
188 self.device
189 .raw
190 .reset_command_pool(self.raw, vk::CommandPoolResetFlags::default())
191 };
192 }
193
194 unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
195 where
196 T: Iterator<Item = crate::BufferBarrier<'a, super::Buffer>>,
197 {
198 let mut src_stages = vk::PipelineStageFlags::TOP_OF_PIPE;
200 let mut dst_stages = vk::PipelineStageFlags::BOTTOM_OF_PIPE;
201 let vk_barriers = &mut self.temp.buffer_barriers;
202 vk_barriers.clear();
203
204 for bar in barriers {
205 let (src_stage, src_access) = conv::map_buffer_usage_to_barrier(bar.usage.from);
206 src_stages |= src_stage;
207 let (dst_stage, dst_access) = conv::map_buffer_usage_to_barrier(bar.usage.to);
208 dst_stages |= dst_stage;
209
210 vk_barriers.push(
211 vk::BufferMemoryBarrier::default()
212 .buffer(bar.buffer.raw)
213 .size(vk::WHOLE_SIZE)
214 .src_access_mask(src_access)
215 .dst_access_mask(dst_access),
216 )
217 }
218
219 if !vk_barriers.is_empty() {
220 unsafe {
221 self.device.raw.cmd_pipeline_barrier(
222 self.active,
223 src_stages,
224 dst_stages,
225 vk::DependencyFlags::empty(),
226 &[],
227 vk_barriers,
228 &[],
229 )
230 };
231 }
232 }
233
234 unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
235 where
236 T: Iterator<Item = crate::TextureBarrier<'a, super::Texture>>,
237 {
238 let mut src_stages = vk::PipelineStageFlags::empty();
239 let mut dst_stages = vk::PipelineStageFlags::empty();
240 let vk_barriers = &mut self.temp.image_barriers;
241 vk_barriers.clear();
242
243 for bar in barriers {
244 let range = conv::map_subresource_range_combined_aspect(
245 &bar.range,
246 bar.texture.format,
247 &self.device.private_caps,
248 );
249 let (src_stage, src_access) = conv::map_texture_usage_to_barrier(bar.usage.from);
250 let src_layout = conv::derive_image_layout(bar.usage.from, bar.texture.format);
251 src_stages |= src_stage;
252 let (dst_stage, dst_access) = conv::map_texture_usage_to_barrier(bar.usage.to);
253 let dst_layout = conv::derive_image_layout(bar.usage.to, bar.texture.format);
254 dst_stages |= dst_stage;
255
256 vk_barriers.push(
257 vk::ImageMemoryBarrier::default()
258 .image(bar.texture.raw)
259 .subresource_range(range)
260 .src_access_mask(src_access)
261 .dst_access_mask(dst_access)
262 .old_layout(src_layout)
263 .new_layout(dst_layout),
264 );
265 }
266
267 if !vk_barriers.is_empty() {
268 unsafe {
269 self.device.raw.cmd_pipeline_barrier(
270 self.active,
271 src_stages,
272 dst_stages,
273 vk::DependencyFlags::empty(),
274 &[],
275 &[],
276 vk_barriers,
277 )
278 };
279 }
280 }
281
282 unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
283 let range_size = range.end - range.start;
284 if self.device.workarounds.contains(
285 super::Workarounds::FORCE_FILL_BUFFER_WITH_SIZE_GREATER_4096_ALIGNED_OFFSET_16,
286 ) && range_size >= 4096
287 && !range.start.is_multiple_of(16)
288 {
289 let rounded_start = wgt::math::align_to(range.start, 16);
290 let prefix_size = rounded_start - range.start;
291
292 unsafe {
293 self.device.raw.cmd_fill_buffer(
294 self.active,
295 buffer.raw,
296 range.start,
297 prefix_size,
298 0,
299 )
300 };
301
302 let suffix_size = range.end - rounded_start;
304
305 unsafe {
306 self.device.raw.cmd_fill_buffer(
307 self.active,
308 buffer.raw,
309 rounded_start,
310 suffix_size,
311 0,
312 )
313 };
314 } else {
315 unsafe {
316 self.device
317 .raw
318 .cmd_fill_buffer(self.active, buffer.raw, range.start, range_size, 0)
319 };
320 }
321 }
322
323 unsafe fn copy_buffer_to_buffer<T>(
324 &mut self,
325 src: &super::Buffer,
326 dst: &super::Buffer,
327 regions: T,
328 ) where
329 T: Iterator<Item = crate::BufferCopy>,
330 {
331 let vk_regions_iter = regions.map(|r| vk::BufferCopy {
332 src_offset: r.src_offset,
333 dst_offset: r.dst_offset,
334 size: r.size.get(),
335 });
336
337 unsafe {
338 self.device.raw.cmd_copy_buffer(
339 self.active,
340 src.raw,
341 dst.raw,
342 &smallvec::SmallVec::<[vk::BufferCopy; 32]>::from_iter(vk_regions_iter),
343 )
344 };
345 }
346
347 unsafe fn copy_texture_to_texture<T>(
348 &mut self,
349 src: &super::Texture,
350 src_usage: wgt::TextureUses,
351 dst: &super::Texture,
352 regions: T,
353 ) where
354 T: Iterator<Item = crate::TextureCopy>,
355 {
356 let src_layout = conv::derive_image_layout(src_usage, src.format);
357
358 let vk_regions_iter = regions.map(|r| {
359 let (src_subresource, src_offset) = conv::map_subresource_layers(&r.src_base);
360 let (dst_subresource, dst_offset) = conv::map_subresource_layers(&r.dst_base);
361 let extent = r
362 .size
363 .min(&r.src_base.max_copy_size(&src.copy_size))
364 .min(&r.dst_base.max_copy_size(&dst.copy_size));
365 vk::ImageCopy {
366 src_subresource,
367 src_offset,
368 dst_subresource,
369 dst_offset,
370 extent: conv::map_copy_extent(&extent),
371 }
372 });
373
374 unsafe {
375 self.device.raw.cmd_copy_image(
376 self.active,
377 src.raw,
378 src_layout,
379 dst.raw,
380 DST_IMAGE_LAYOUT,
381 &smallvec::SmallVec::<[vk::ImageCopy; 32]>::from_iter(vk_regions_iter),
382 )
383 };
384 }
385
386 unsafe fn copy_buffer_to_texture<T>(
387 &mut self,
388 src: &super::Buffer,
389 dst: &super::Texture,
390 regions: T,
391 ) where
392 T: Iterator<Item = crate::BufferTextureCopy>,
393 {
394 let vk_regions_iter = dst.map_buffer_copies(regions);
395
396 unsafe {
397 self.device.raw.cmd_copy_buffer_to_image(
398 self.active,
399 src.raw,
400 dst.raw,
401 DST_IMAGE_LAYOUT,
402 &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
403 )
404 };
405 }
406
407 unsafe fn copy_texture_to_buffer<T>(
408 &mut self,
409 src: &super::Texture,
410 src_usage: wgt::TextureUses,
411 dst: &super::Buffer,
412 regions: T,
413 ) where
414 T: Iterator<Item = crate::BufferTextureCopy>,
415 {
416 let src_layout = conv::derive_image_layout(src_usage, src.format);
417 let vk_regions_iter = src.map_buffer_copies(regions);
418
419 unsafe {
420 self.device.raw.cmd_copy_image_to_buffer(
421 self.active,
422 src.raw,
423 src_layout,
424 dst.raw,
425 &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
426 )
427 };
428 }
429
430 unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
431 unsafe {
432 self.device.raw.cmd_begin_query(
433 self.active,
434 set.raw,
435 index,
436 vk::QueryControlFlags::empty(),
437 )
438 };
439 }
440 unsafe fn end_query(&mut self, set: &super::QuerySet, index: u32) {
441 unsafe { self.device.raw.cmd_end_query(self.active, set.raw, index) };
442 }
443 unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
444 unsafe {
445 self.device.raw.cmd_write_timestamp(
446 self.active,
447 vk::PipelineStageFlags::BOTTOM_OF_PIPE,
448 set.raw,
449 index,
450 )
451 };
452 }
453 unsafe fn read_acceleration_structure_compact_size(
454 &mut self,
455 acceleration_structure: &super::AccelerationStructure,
456 buffer: &super::Buffer,
457 ) {
458 let ray_tracing_functions = self
459 .device
460 .extension_fns
461 .ray_tracing
462 .as_ref()
463 .expect("Feature `RAY_TRACING` not enabled");
464 let query_pool = acceleration_structure
465 .compacted_size_query
466 .as_ref()
467 .unwrap();
468 unsafe {
469 self.device
470 .raw
471 .cmd_reset_query_pool(self.active, *query_pool, 0, 1);
472 ray_tracing_functions
473 .acceleration_structure
474 .cmd_write_acceleration_structures_properties(
475 self.active,
476 &[acceleration_structure.raw],
477 vk::QueryType::ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR,
478 *query_pool,
479 0,
480 );
481 self.device.raw.cmd_copy_query_pool_results(
482 self.active,
483 *query_pool,
484 0,
485 1,
486 buffer.raw,
487 0,
488 wgt::QUERY_SIZE as vk::DeviceSize,
489 vk::QueryResultFlags::TYPE_64 | vk::QueryResultFlags::WAIT,
490 )
491 };
492 }
493 unsafe fn reset_queries(&mut self, set: &super::QuerySet, range: Range<u32>) {
494 unsafe {
495 self.device.raw.cmd_reset_query_pool(
496 self.active,
497 set.raw,
498 range.start,
499 range.end - range.start,
500 )
501 };
502 }
503 unsafe fn copy_query_results(
504 &mut self,
505 set: &super::QuerySet,
506 range: Range<u32>,
507 buffer: &super::Buffer,
508 offset: wgt::BufferAddress,
509 stride: wgt::BufferSize,
510 ) {
511 unsafe {
512 self.device.raw.cmd_copy_query_pool_results(
513 self.active,
514 set.raw,
515 range.start,
516 range.end - range.start,
517 buffer.raw,
518 offset,
519 stride.get(),
520 vk::QueryResultFlags::TYPE_64 | vk::QueryResultFlags::WAIT,
521 )
522 };
523 }
524
525 unsafe fn build_acceleration_structures<'a, T>(&mut self, descriptor_count: u32, descriptors: T)
526 where
527 super::Api: 'a,
528 T: IntoIterator<
529 Item = crate::BuildAccelerationStructureDescriptor<
530 'a,
531 super::Buffer,
532 super::AccelerationStructure,
533 >,
534 >,
535 {
536 const CAPACITY_OUTER: usize = 8;
537 const CAPACITY_INNER: usize = 1;
538 let descriptor_count = descriptor_count as usize;
539
540 let ray_tracing_functions = self
541 .device
542 .extension_fns
543 .ray_tracing
544 .as_ref()
545 .expect("Feature `RAY_TRACING` not enabled");
546
547 let get_device_address = |buffer: Option<&super::Buffer>| unsafe {
548 match buffer {
549 Some(buffer) => ray_tracing_functions
550 .buffer_device_address
551 .get_buffer_device_address(
552 &vk::BufferDeviceAddressInfo::default().buffer(buffer.raw),
553 ),
554 None => panic!("Buffers are required to build acceleration structures"),
555 }
556 };
557
558 let mut ranges_storage = smallvec::SmallVec::<
560 [smallvec::SmallVec<[vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER]>;
561 CAPACITY_OUTER],
562 >::with_capacity(descriptor_count);
563 let mut geometries_storage = smallvec::SmallVec::<
564 [smallvec::SmallVec<[vk::AccelerationStructureGeometryKHR; CAPACITY_INNER]>;
565 CAPACITY_OUTER],
566 >::with_capacity(descriptor_count);
567
568 let mut geometry_infos = smallvec::SmallVec::<
570 [vk::AccelerationStructureBuildGeometryInfoKHR; CAPACITY_OUTER],
571 >::with_capacity(descriptor_count);
572 let mut ranges_ptrs = smallvec::SmallVec::<
573 [&[vk::AccelerationStructureBuildRangeInfoKHR]; CAPACITY_OUTER],
574 >::with_capacity(descriptor_count);
575
576 for desc in descriptors {
577 let (geometries, ranges) = match *desc.entries {
578 crate::AccelerationStructureEntries::Instances(ref instances) => {
579 let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default(
580 )
582 .data(vk::DeviceOrHostAddressConstKHR {
583 device_address: get_device_address(instances.buffer),
584 });
585
586 let geometry = vk::AccelerationStructureGeometryKHR::default()
587 .geometry_type(vk::GeometryTypeKHR::INSTANCES)
588 .geometry(vk::AccelerationStructureGeometryDataKHR {
589 instances: instance_data,
590 });
591
592 let range = vk::AccelerationStructureBuildRangeInfoKHR::default()
593 .primitive_count(instances.count)
594 .primitive_offset(instances.offset);
595
596 (smallvec::smallvec![geometry], smallvec::smallvec![range])
597 }
598 crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
599 let mut ranges = smallvec::SmallVec::<
600 [vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER],
601 >::with_capacity(in_geometries.len());
602 let mut geometries = smallvec::SmallVec::<
603 [vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
604 >::with_capacity(in_geometries.len());
605 for triangles in in_geometries {
606 let mut triangle_data =
607 vk::AccelerationStructureGeometryTrianglesDataKHR::default()
608 .index_type(vk::IndexType::NONE_KHR)
611 .vertex_data(vk::DeviceOrHostAddressConstKHR {
612 device_address: get_device_address(triangles.vertex_buffer),
613 })
614 .vertex_format(conv::map_vertex_format(triangles.vertex_format))
615 .max_vertex(triangles.vertex_count)
616 .vertex_stride(triangles.vertex_stride);
617
618 let mut range = vk::AccelerationStructureBuildRangeInfoKHR::default();
619
620 if let Some(ref indices) = triangles.indices {
621 triangle_data = triangle_data
622 .index_data(vk::DeviceOrHostAddressConstKHR {
623 device_address: get_device_address(indices.buffer),
624 })
625 .index_type(conv::map_index_format(indices.format));
626
627 range = range
628 .primitive_count(indices.count / 3)
629 .primitive_offset(indices.offset)
630 .first_vertex(triangles.first_vertex);
631 } else {
632 range = range
633 .primitive_count(triangles.vertex_count / 3)
634 .first_vertex(triangles.first_vertex);
635 }
636
637 if let Some(ref transform) = triangles.transform {
638 let transform_device_address = unsafe {
639 ray_tracing_functions
640 .buffer_device_address
641 .get_buffer_device_address(
642 &vk::BufferDeviceAddressInfo::default()
643 .buffer(transform.buffer.raw),
644 )
645 };
646 triangle_data =
647 triangle_data.transform_data(vk::DeviceOrHostAddressConstKHR {
648 device_address: transform_device_address,
649 });
650
651 range = range.transform_offset(transform.offset);
652 }
653
654 let geometry = vk::AccelerationStructureGeometryKHR::default()
655 .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
656 .geometry(vk::AccelerationStructureGeometryDataKHR {
657 triangles: triangle_data,
658 })
659 .flags(conv::map_acceleration_structure_geometry_flags(
660 triangles.flags,
661 ));
662
663 geometries.push(geometry);
664 ranges.push(range);
665 }
666 (geometries, ranges)
667 }
668 crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
669 let mut ranges = smallvec::SmallVec::<
670 [vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER],
671 >::with_capacity(in_geometries.len());
672 let mut geometries = smallvec::SmallVec::<
673 [vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
674 >::with_capacity(in_geometries.len());
675 for aabb in in_geometries {
676 let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
677 .data(vk::DeviceOrHostAddressConstKHR {
678 device_address: get_device_address(aabb.buffer),
679 })
680 .stride(aabb.stride);
681
682 let range = vk::AccelerationStructureBuildRangeInfoKHR::default()
683 .primitive_count(aabb.count)
684 .primitive_offset(aabb.offset);
685
686 let geometry = vk::AccelerationStructureGeometryKHR::default()
687 .geometry_type(vk::GeometryTypeKHR::AABBS)
688 .geometry(vk::AccelerationStructureGeometryDataKHR {
689 aabbs: aabbs_data,
690 })
691 .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
692
693 geometries.push(geometry);
694 ranges.push(range);
695 }
696 (geometries, ranges)
697 }
698 };
699
700 ranges_storage.push(ranges);
701 geometries_storage.push(geometries);
702
703 let scratch_device_address = unsafe {
704 ray_tracing_functions
705 .buffer_device_address
706 .get_buffer_device_address(
707 &vk::BufferDeviceAddressInfo::default().buffer(desc.scratch_buffer.raw),
708 )
709 };
710 let ty = match *desc.entries {
711 crate::AccelerationStructureEntries::Instances(_) => {
712 vk::AccelerationStructureTypeKHR::TOP_LEVEL
713 }
714 _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
715 };
716 let mut geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
717 .ty(ty)
718 .mode(conv::map_acceleration_structure_build_mode(desc.mode))
719 .flags(conv::map_acceleration_structure_flags(desc.flags))
720 .dst_acceleration_structure(desc.destination_acceleration_structure.raw)
721 .scratch_data(vk::DeviceOrHostAddressKHR {
722 device_address: scratch_device_address + desc.scratch_buffer_offset,
723 });
724
725 if desc.mode == crate::AccelerationStructureBuildMode::Update {
726 geometry_info.src_acceleration_structure = desc
727 .source_acceleration_structure
728 .unwrap_or(desc.destination_acceleration_structure)
729 .raw;
730 }
731
732 geometry_infos.push(geometry_info);
733 }
734
735 for (i, geometry_info) in geometry_infos.iter_mut().enumerate() {
736 geometry_info.geometry_count = geometries_storage[i].len() as u32;
737 geometry_info.p_geometries = geometries_storage[i].as_ptr();
738 ranges_ptrs.push(&ranges_storage[i]);
739 }
740
741 unsafe {
742 ray_tracing_functions
743 .acceleration_structure
744 .cmd_build_acceleration_structures(self.active, &geometry_infos, &ranges_ptrs);
745 }
746 }
747
748 unsafe fn place_acceleration_structure_barrier(
749 &mut self,
750 barrier: crate::AccelerationStructureBarrier,
751 ) {
752 let (src_stage, src_access) = conv::map_acceleration_structure_usage_to_barrier(
753 barrier.usage.from,
754 self.device.features,
755 );
756 let (dst_stage, dst_access) = conv::map_acceleration_structure_usage_to_barrier(
757 barrier.usage.to,
758 self.device.features,
759 );
760
761 unsafe {
762 self.device.raw.cmd_pipeline_barrier(
763 self.active,
764 src_stage | vk::PipelineStageFlags::TOP_OF_PIPE,
765 dst_stage | vk::PipelineStageFlags::BOTTOM_OF_PIPE,
766 vk::DependencyFlags::empty(),
767 &[vk::MemoryBarrier::default()
768 .src_access_mask(src_access)
769 .dst_access_mask(dst_access)],
770 &[],
771 &[],
772 )
773 };
774 }
775
776 unsafe fn set_acceleration_structure_dependencies(
777 _command_buffers: &[&super::CommandBuffer],
778 _dependencies: &[&super::AccelerationStructure],
779 ) {
780 }
781 unsafe fn begin_render_pass(
784 &mut self,
785 desc: &crate::RenderPassDescriptor<super::QuerySet, super::TextureView>,
786 ) -> Result<(), crate::DeviceError> {
787 let mut vk_clear_values =
788 ArrayVec::<vk::ClearValue, { super::MAX_TOTAL_ATTACHMENTS }>::new();
789 let mut rp_key = super::RenderPassKey {
790 colors: ArrayVec::default(),
791 depth_stencil: None,
792 sample_count: desc.sample_count,
793 multiview_mask: desc.multiview_mask,
794 };
795 let mut fb_key = super::FramebufferKey {
796 raw_pass: vk::RenderPass::null(),
797 attachment_views: ArrayVec::default(),
798 attachment_identities: ArrayVec::default(),
799 extent: desc.extent,
800 };
801
802 for cat in desc.color_attachments {
803 if let Some(cat) = cat.as_ref() {
804 let color_view = if cat.target.view.dimension == wgt::TextureViewDimension::D3 {
805 let key = super::TempTextureViewKey {
806 texture: cat.target.view.raw_texture,
807 texture_identity: cat.target.view.texture_identity,
808 format: cat.target.view.raw_format,
809 mip_level: cat.target.view.base_mip_level,
810 depth_slice: cat.depth_slice.unwrap(),
811 };
812 self.make_temp_texture_view(key)?
813 } else {
814 cat.target.view.identified_raw_view()
815 };
816
817 vk_clear_values.push(vk::ClearValue {
818 color: unsafe { cat.make_vk_clear_color() },
819 });
820 let color = super::ColorAttachmentKey {
821 base: cat.target.make_attachment_key(cat.ops),
822 resolve: cat.resolve_target.as_ref().map(|target| {
823 target.make_attachment_key(
824 crate::AttachmentOps::LOAD_CLEAR | crate::AttachmentOps::STORE,
825 )
826 }),
827 };
828
829 rp_key.colors.push(Some(color));
830 fb_key.push_view(color_view);
831 if let Some(ref at) = cat.resolve_target {
832 vk_clear_values.push(unsafe { mem::zeroed() });
833 fb_key.push_view(at.view.identified_raw_view());
834 }
835 } else {
836 rp_key.colors.push(None);
837 }
838 }
839 if let Some(ref ds) = desc.depth_stencil_attachment {
840 vk_clear_values.push(vk::ClearValue {
841 depth_stencil: vk::ClearDepthStencilValue {
842 depth: ds.clear_value.0,
843 stencil: ds.clear_value.1,
844 },
845 });
846 rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
847 base: ds.target.make_attachment_key(ds.depth_ops),
848 stencil_ops: ds.stencil_ops,
849 });
850 fb_key.push_view(ds.target.view.identified_raw_view());
851 }
852
853 let render_area = vk::Rect2D {
854 offset: vk::Offset2D { x: 0, y: 0 },
855 extent: vk::Extent2D {
856 width: desc.extent.width,
857 height: desc.extent.height,
858 },
859 };
860 let vk_viewports = [vk::Viewport {
861 x: 0.0,
862 y: desc.extent.height as f32,
863 width: desc.extent.width as f32,
864 height: -(desc.extent.height as f32),
865 min_depth: 0.0,
866 max_depth: 1.0,
867 }];
868
869 let raw_pass = self.device.make_render_pass(rp_key).unwrap();
870 fb_key.raw_pass = raw_pass;
871 let raw_framebuffer = self.make_framebuffer(fb_key).unwrap();
872
873 let vk_info = vk::RenderPassBeginInfo::default()
874 .render_pass(raw_pass)
875 .render_area(render_area)
876 .clear_values(&vk_clear_values)
877 .framebuffer(raw_framebuffer);
878
879 if let Some(label) = desc.label {
880 unsafe { self.begin_debug_marker(label) };
881 self.rpass_debug_marker_active = true;
882 }
883
884 if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
886 if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
887 unsafe {
888 self.write_timestamp(timestamp_writes.query_set, index);
889 }
890 }
891 self.end_of_pass_timer_query = timestamp_writes
892 .end_of_pass_write_index
893 .map(|index| (timestamp_writes.query_set.raw, index));
894 }
895
896 unsafe {
897 self.device
898 .raw
899 .cmd_set_viewport(self.active, 0, &vk_viewports);
900 self.device
901 .raw
902 .cmd_set_scissor(self.active, 0, &[render_area]);
903 self.device.raw.cmd_begin_render_pass(
904 self.active,
905 &vk_info,
906 vk::SubpassContents::INLINE,
907 );
908 };
909
910 self.bind_point = vk::PipelineBindPoint::GRAPHICS;
911
912 Ok(())
913 }
914 unsafe fn end_render_pass(&mut self) {
915 unsafe {
916 self.device.raw.cmd_end_render_pass(self.active);
917 }
918
919 self.write_pass_end_timestamp_if_requested();
921
922 if self.rpass_debug_marker_active {
923 unsafe {
924 self.end_debug_marker();
925 }
926 self.rpass_debug_marker_active = false;
927 }
928 }
929
930 unsafe fn set_bind_group(
931 &mut self,
932 layout: &super::PipelineLayout,
933 index: u32,
934 group: &super::BindGroup,
935 dynamic_offsets: &[wgt::DynamicOffset],
936 ) {
937 let sets = [*group.set.raw()];
938 unsafe {
939 self.device.raw.cmd_bind_descriptor_sets(
940 self.active,
941 self.bind_point,
942 layout.raw,
943 index,
944 &sets,
945 dynamic_offsets,
946 )
947 };
948 }
949 unsafe fn set_immediates(
950 &mut self,
951 layout: &super::PipelineLayout,
952 offset_bytes: u32,
953 data: &[u32],
954 ) {
955 unsafe {
956 self.device.raw.cmd_push_constants(
957 self.active,
958 layout.raw,
959 vk::ShaderStageFlags::ALL,
960 offset_bytes,
961 bytemuck::cast_slice(data),
962 )
963 };
964 }
965
966 unsafe fn insert_debug_marker(&mut self, label: &str) {
967 if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
968 let cstr = self.temp.make_c_str(label);
969 let vk_label = vk::DebugUtilsLabelEXT::default().label_name(cstr);
970 unsafe { ext.cmd_insert_debug_utils_label(self.active, &vk_label) };
971 }
972 }
973 unsafe fn begin_debug_marker(&mut self, group_label: &str) {
974 if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
975 let cstr = self.temp.make_c_str(group_label);
976 let vk_label = vk::DebugUtilsLabelEXT::default().label_name(cstr);
977 unsafe { ext.cmd_begin_debug_utils_label(self.active, &vk_label) };
978 }
979 }
980 unsafe fn end_debug_marker(&mut self) {
981 if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
982 unsafe { ext.cmd_end_debug_utils_label(self.active) };
983 }
984 }
985
986 unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
987 unsafe {
988 self.current_pipeline_is_multiview = pipeline.is_multiview;
989 self.device.raw.cmd_bind_pipeline(
990 self.active,
991 vk::PipelineBindPoint::GRAPHICS,
992 pipeline.raw,
993 )
994 };
995 }
996
997 unsafe fn set_index_buffer<'a>(
998 &mut self,
999 binding: crate::BufferBinding<'a, super::Buffer>,
1000 format: wgt::IndexFormat,
1001 ) {
1002 unsafe {
1003 self.device.raw.cmd_bind_index_buffer(
1004 self.active,
1005 binding.buffer.raw,
1006 binding.offset,
1007 conv::map_index_format(format),
1008 )
1009 };
1010 }
1011 unsafe fn set_vertex_buffer<'a>(
1012 &mut self,
1013 index: u32,
1014 binding: crate::BufferBinding<'a, super::Buffer>,
1015 ) {
1016 let vk_buffers = [binding.buffer.raw];
1017 let vk_offsets = [binding.offset];
1018 unsafe {
1019 self.device
1020 .raw
1021 .cmd_bind_vertex_buffers(self.active, index, &vk_buffers, &vk_offsets)
1022 };
1023 }
1024 unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>) {
1025 let vk_viewports = [vk::Viewport {
1026 x: rect.x,
1027 y: rect.y + rect.h,
1028 width: rect.w,
1029 height: -rect.h, min_depth: depth_range.start,
1031 max_depth: depth_range.end,
1032 }];
1033 unsafe {
1034 self.device
1035 .raw
1036 .cmd_set_viewport(self.active, 0, &vk_viewports)
1037 };
1038 }
1039 unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
1040 let vk_scissors = [vk::Rect2D {
1041 offset: vk::Offset2D {
1042 x: rect.x as i32,
1043 y: rect.y as i32,
1044 },
1045 extent: vk::Extent2D {
1046 width: rect.w,
1047 height: rect.h,
1048 },
1049 }];
1050 unsafe {
1051 self.device
1052 .raw
1053 .cmd_set_scissor(self.active, 0, &vk_scissors)
1054 };
1055 }
1056 unsafe fn set_stencil_reference(&mut self, value: u32) {
1057 unsafe {
1058 self.device.raw.cmd_set_stencil_reference(
1059 self.active,
1060 vk::StencilFaceFlags::FRONT_AND_BACK,
1061 value,
1062 )
1063 };
1064 }
1065 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
1066 unsafe { self.device.raw.cmd_set_blend_constants(self.active, color) };
1067 }
1068
1069 unsafe fn draw(
1070 &mut self,
1071 first_vertex: u32,
1072 vertex_count: u32,
1073 first_instance: u32,
1074 instance_count: u32,
1075 ) {
1076 if self.current_pipeline_is_multiview
1077 && (first_instance as u64 + instance_count as u64 - 1)
1078 > self.device.private_caps.multiview_instance_index_limit as u64
1079 {
1080 panic!("This vulkan device is affected by [#8333](https://github.com/gfx-rs/wgpu/issues/8333)");
1081 }
1082 unsafe {
1083 self.device.raw.cmd_draw(
1084 self.active,
1085 vertex_count,
1086 instance_count,
1087 first_vertex,
1088 first_instance,
1089 )
1090 };
1091 }
1092 unsafe fn draw_indexed(
1093 &mut self,
1094 first_index: u32,
1095 index_count: u32,
1096 base_vertex: i32,
1097 first_instance: u32,
1098 instance_count: u32,
1099 ) {
1100 if self.current_pipeline_is_multiview
1101 && (first_instance as u64 + instance_count as u64 - 1)
1102 > self.device.private_caps.multiview_instance_index_limit as u64
1103 {
1104 panic!("This vulkan device is affected by [#8333](https://github.com/gfx-rs/wgpu/issues/8333)");
1105 }
1106 unsafe {
1107 self.device.raw.cmd_draw_indexed(
1108 self.active,
1109 index_count,
1110 instance_count,
1111 first_index,
1112 base_vertex,
1113 first_instance,
1114 )
1115 };
1116 }
1117 unsafe fn draw_mesh_tasks(
1118 &mut self,
1119 group_count_x: u32,
1120 group_count_y: u32,
1121 group_count_z: u32,
1122 ) {
1123 if let Some(ref t) = self.device.extension_fns.mesh_shading {
1124 unsafe {
1125 t.cmd_draw_mesh_tasks(self.active, group_count_x, group_count_y, group_count_z);
1126 };
1127 } else {
1128 panic!("Feature `MESH_SHADING` not enabled");
1129 }
1130 }
1131 unsafe fn draw_indirect(
1132 &mut self,
1133 buffer: &super::Buffer,
1134 offset: wgt::BufferAddress,
1135 draw_count: u32,
1136 ) {
1137 if draw_count >= 1
1138 && self.device.private_caps.multi_draw_indirect
1139 && draw_count <= self.device.private_caps.max_draw_indirect_count
1140 {
1141 unsafe {
1142 self.device.raw.cmd_draw_indirect(
1143 self.active,
1144 buffer.raw,
1145 offset,
1146 draw_count,
1147 size_of::<wgt::DrawIndirectArgs>() as u32,
1148 )
1149 };
1150 } else {
1151 for i in 0..draw_count {
1152 let indirect_offset = offset
1153 + i as wgt::BufferAddress
1154 * size_of::<wgt::DrawIndirectArgs>() as wgt::BufferAddress;
1155 unsafe {
1156 self.device.raw.cmd_draw_indirect(
1157 self.active,
1158 buffer.raw,
1159 indirect_offset,
1160 1,
1161 size_of::<wgt::DrawIndirectArgs>() as u32,
1162 )
1163 };
1164 }
1165 }
1166 }
1167 unsafe fn draw_indexed_indirect(
1168 &mut self,
1169 buffer: &super::Buffer,
1170 offset: wgt::BufferAddress,
1171 draw_count: u32,
1172 ) {
1173 if draw_count >= 1
1174 && self.device.private_caps.multi_draw_indirect
1175 && draw_count <= self.device.private_caps.max_draw_indirect_count
1176 {
1177 unsafe {
1178 self.device.raw.cmd_draw_indexed_indirect(
1179 self.active,
1180 buffer.raw,
1181 offset,
1182 draw_count,
1183 size_of::<wgt::DrawIndexedIndirectArgs>() as u32,
1184 )
1185 };
1186 } else {
1187 for i in 0..draw_count {
1188 let indirect_offset = offset
1189 + i as wgt::BufferAddress
1190 * size_of::<wgt::DrawIndexedIndirectArgs>() as wgt::BufferAddress;
1191 unsafe {
1192 self.device.raw.cmd_draw_indexed_indirect(
1193 self.active,
1194 buffer.raw,
1195 indirect_offset,
1196 1,
1197 size_of::<wgt::DrawIndexedIndirectArgs>() as u32,
1198 )
1199 };
1200 }
1201 }
1202 }
1203 unsafe fn draw_mesh_tasks_indirect(
1204 &mut self,
1205 buffer: &<Self::A as crate::Api>::Buffer,
1206 offset: wgt::BufferAddress,
1207 draw_count: u32,
1208 ) {
1209 if let Some(ref t) = self.device.extension_fns.mesh_shading {
1210 unsafe {
1211 t.cmd_draw_mesh_tasks_indirect(
1212 self.active,
1213 buffer.raw,
1214 offset,
1215 draw_count,
1216 size_of::<wgt::DispatchIndirectArgs>() as u32,
1217 );
1218 };
1219 } else {
1220 panic!("Feature `MESH_SHADING` not enabled");
1221 }
1222 }
1223 unsafe fn draw_indirect_count(
1224 &mut self,
1225 buffer: &super::Buffer,
1226 offset: wgt::BufferAddress,
1227 count_buffer: &super::Buffer,
1228 count_offset: wgt::BufferAddress,
1229 max_count: u32,
1230 ) {
1231 let stride = size_of::<wgt::DrawIndirectArgs>() as u32;
1232 match self.device.extension_fns.draw_indirect_count {
1233 Some(ref t) => {
1234 unsafe {
1235 t.cmd_draw_indirect_count(
1236 self.active,
1237 buffer.raw,
1238 offset,
1239 count_buffer.raw,
1240 count_offset,
1241 max_count,
1242 stride,
1243 )
1244 };
1245 }
1246 None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
1247 }
1248 }
1249 unsafe fn draw_indexed_indirect_count(
1250 &mut self,
1251 buffer: &super::Buffer,
1252 offset: wgt::BufferAddress,
1253 count_buffer: &super::Buffer,
1254 count_offset: wgt::BufferAddress,
1255 max_count: u32,
1256 ) {
1257 let stride = size_of::<wgt::DrawIndexedIndirectArgs>() as u32;
1258 match self.device.extension_fns.draw_indirect_count {
1259 Some(ref t) => {
1260 unsafe {
1261 t.cmd_draw_indexed_indirect_count(
1262 self.active,
1263 buffer.raw,
1264 offset,
1265 count_buffer.raw,
1266 count_offset,
1267 max_count,
1268 stride,
1269 )
1270 };
1271 }
1272 None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
1273 }
1274 }
1275 unsafe fn draw_mesh_tasks_indirect_count(
1276 &mut self,
1277 buffer: &<Self::A as crate::Api>::Buffer,
1278 offset: wgt::BufferAddress,
1279 count_buffer: &super::Buffer,
1280 count_offset: wgt::BufferAddress,
1281 max_count: u32,
1282 ) {
1283 if self.device.extension_fns.draw_indirect_count.is_none() {
1284 panic!("Feature `DRAW_INDIRECT_COUNT` not enabled");
1285 }
1286 if let Some(ref t) = self.device.extension_fns.mesh_shading {
1287 unsafe {
1288 t.cmd_draw_mesh_tasks_indirect_count(
1289 self.active,
1290 buffer.raw,
1291 offset,
1292 count_buffer.raw,
1293 count_offset,
1294 max_count,
1295 size_of::<wgt::DispatchIndirectArgs>() as u32,
1296 );
1297 };
1298 } else {
1299 panic!("Feature `MESH_SHADING` not enabled");
1300 }
1301 }
1302
1303 unsafe fn begin_compute_pass(
1306 &mut self,
1307 desc: &crate::ComputePassDescriptor<'_, super::QuerySet>,
1308 ) {
1309 self.bind_point = vk::PipelineBindPoint::COMPUTE;
1310 if let Some(label) = desc.label {
1311 unsafe { self.begin_debug_marker(label) };
1312 self.rpass_debug_marker_active = true;
1313 }
1314
1315 if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
1316 if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
1317 unsafe {
1318 self.write_timestamp(timestamp_writes.query_set, index);
1319 }
1320 }
1321 self.end_of_pass_timer_query = timestamp_writes
1322 .end_of_pass_write_index
1323 .map(|index| (timestamp_writes.query_set.raw, index));
1324 }
1325 }
1326 unsafe fn end_compute_pass(&mut self) {
1327 self.write_pass_end_timestamp_if_requested();
1328
1329 if self.rpass_debug_marker_active {
1330 unsafe { self.end_debug_marker() };
1331 self.rpass_debug_marker_active = false
1332 }
1333 }
1334
1335 unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1336 unsafe {
1337 self.device.raw.cmd_bind_pipeline(
1338 self.active,
1339 vk::PipelineBindPoint::COMPUTE,
1340 pipeline.raw,
1341 )
1342 };
1343 }
1344
1345 unsafe fn dispatch(&mut self, count: [u32; 3]) {
1346 unsafe {
1347 self.device
1348 .raw
1349 .cmd_dispatch(self.active, count[0], count[1], count[2])
1350 };
1351 }
1352 unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1353 unsafe {
1354 self.device
1355 .raw
1356 .cmd_dispatch_indirect(self.active, buffer.raw, offset)
1357 }
1358 }
1359
1360 unsafe fn copy_acceleration_structure_to_acceleration_structure(
1361 &mut self,
1362 src: &super::AccelerationStructure,
1363 dst: &super::AccelerationStructure,
1364 copy: wgt::AccelerationStructureCopy,
1365 ) {
1366 let ray_tracing_functions = self
1367 .device
1368 .extension_fns
1369 .ray_tracing
1370 .as_ref()
1371 .expect("Feature `RAY_TRACING` not enabled");
1372
1373 let mode = match copy {
1374 wgt::AccelerationStructureCopy::Clone => vk::CopyAccelerationStructureModeKHR::CLONE,
1375 wgt::AccelerationStructureCopy::Compact => {
1376 vk::CopyAccelerationStructureModeKHR::COMPACT
1377 }
1378 };
1379
1380 unsafe {
1381 ray_tracing_functions
1382 .acceleration_structure
1383 .cmd_copy_acceleration_structure(
1384 self.active,
1385 &vk::CopyAccelerationStructureInfoKHR {
1386 s_type: vk::StructureType::COPY_ACCELERATION_STRUCTURE_INFO_KHR,
1387 p_next: core::ptr::null(),
1388 src: src.raw,
1389 dst: dst.raw,
1390 mode,
1391 _marker: Default::default(),
1392 },
1393 );
1394 }
1395 }
1396}
1397
1398#[test]
1399fn check_dst_image_layout() {
1400 assert_eq!(
1401 conv::derive_image_layout(wgt::TextureUses::COPY_DST, wgt::TextureFormat::Rgba8Unorm),
1402 DST_IMAGE_LAYOUT
1403 );
1404}