1use super::conv;
2use arrayvec::ArrayVec;
3use ash::vk;
4use core::{mem, ops::Range};
5use hashbrown::hash_map::Entry;
6
7const ALLOCATION_GRANULARITY: u32 = 16;
8const DST_IMAGE_LAYOUT: vk::ImageLayout = vk::ImageLayout::TRANSFER_DST_OPTIMAL;
9
10impl super::Texture {
11 fn map_buffer_copies<T>(&self, regions: T) -> impl Iterator<Item = vk::BufferImageCopy>
12 where
13 T: Iterator<Item = crate::BufferTextureCopy>,
14 {
15 let (block_width, block_height) = self.format.block_dimensions();
16 let format = self.format;
17 let copy_size = self.copy_size;
18 regions.map(move |r| {
19 let extent = r.texture_base.max_copy_size(©_size).min(&r.size);
20 let (image_subresource, image_offset) = conv::map_subresource_layers(&r.texture_base);
21 vk::BufferImageCopy {
22 buffer_offset: r.buffer_layout.offset,
23 buffer_row_length: r.buffer_layout.bytes_per_row.map_or(0, |bpr| {
24 let block_size = format
25 .block_copy_size(Some(r.texture_base.aspect.map()))
26 .unwrap();
27 block_width * (bpr / block_size)
28 }),
29 buffer_image_height: r
30 .buffer_layout
31 .rows_per_image
32 .map_or(0, |rpi| rpi * block_height),
33 image_subresource,
34 image_offset,
35 image_extent: conv::map_copy_extent(&extent),
36 }
37 })
38 }
39}
40
41impl super::CommandEncoder {
42 fn write_pass_end_timestamp_if_requested(&mut self) {
43 if let Some((query_set, index)) = self.end_of_pass_timer_query.take() {
44 unsafe {
45 self.device.raw.cmd_write_timestamp(
46 self.active,
47 vk::PipelineStageFlags::BOTTOM_OF_PIPE,
48 query_set,
49 index,
50 );
51 }
52 }
53 }
54
55 fn make_framebuffer(
56 &mut self,
57 key: super::FramebufferKey,
58 ) -> Result<vk::Framebuffer, crate::DeviceError> {
59 Ok(match self.framebuffers.entry(key) {
60 Entry::Occupied(e) => *e.get(),
61 Entry::Vacant(e) => {
62 let super::FramebufferKey {
63 raw_pass,
64 ref attachment_views,
65 attachment_identities: _,
66 extent,
67 } = *e.key();
68
69 let vk_info = vk::FramebufferCreateInfo::default()
70 .render_pass(raw_pass)
71 .width(extent.width)
72 .height(extent.height)
73 .layers(extent.depth_or_array_layers)
74 .attachments(attachment_views);
75
76 let raw = unsafe { self.device.raw.create_framebuffer(&vk_info, None).unwrap() };
77 *e.insert(raw)
78 }
79 })
80 }
81
82 fn make_temp_texture_view(
83 &mut self,
84 key: super::TempTextureViewKey,
85 ) -> Result<super::IdentifiedTextureView, crate::DeviceError> {
86 Ok(match self.temp_texture_views.entry(key) {
87 Entry::Occupied(e) => *e.get(),
88 Entry::Vacant(e) => {
89 let super::TempTextureViewKey {
90 texture,
91 texture_identity: _,
92 format,
93 mip_level,
94 depth_slice,
95 } = *e.key();
96
97 let vk_info = vk::ImageViewCreateInfo::default()
98 .image(texture)
99 .view_type(vk::ImageViewType::TYPE_2D)
100 .format(format)
101 .subresource_range(vk::ImageSubresourceRange {
102 aspect_mask: vk::ImageAspectFlags::COLOR,
103 base_mip_level: mip_level,
104 level_count: 1,
105 base_array_layer: depth_slice,
106 layer_count: 1,
107 });
108 let raw = unsafe { self.device.raw.create_image_view(&vk_info, None) }
109 .map_err(super::map_host_device_oom_and_ioca_err)?;
110
111 let identity = self.device.texture_view_identity_factory.next();
112
113 *e.insert(super::IdentifiedTextureView { raw, identity })
114 }
115 })
116 }
117}
118
119impl crate::CommandEncoder for super::CommandEncoder {
120 type A = super::Api;
121
122 unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
123 if self.free.is_empty() {
124 let vk_info = vk::CommandBufferAllocateInfo::default()
125 .command_pool(self.raw)
126 .command_buffer_count(ALLOCATION_GRANULARITY);
127 let cmd_buf_vec = unsafe {
128 self.device
129 .raw
130 .allocate_command_buffers(&vk_info)
131 .map_err(super::map_host_device_oom_err)?
132 };
133 self.free.extend(cmd_buf_vec);
134 }
135 let raw = self.free.pop().unwrap();
136
137 unsafe { self.device.set_object_name(raw, label.unwrap_or_default()) };
140
141 self.rpass_debug_marker_active = false;
143
144 let vk_info = vk::CommandBufferBeginInfo::default()
145 .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT);
146 unsafe { self.device.raw.begin_command_buffer(raw, &vk_info) }
147 .map_err(super::map_host_device_oom_err)?;
148 self.active = raw;
149
150 Ok(())
151 }
152
153 unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
154 let raw = self.active;
155 self.active = vk::CommandBuffer::null();
156 unsafe { self.device.raw.end_command_buffer(raw) }.map_err(map_err)?;
157 fn map_err(err: vk::Result) -> crate::DeviceError {
158 super::map_host_device_oom_err(err)
161 }
162 Ok(super::CommandBuffer { raw })
163 }
164
165 unsafe fn discard_encoding(&mut self) {
166 assert_ne!(self.active, vk::CommandBuffer::null());
170
171 self.discarded.push(self.active);
172 self.active = vk::CommandBuffer::null();
173 }
174
175 unsafe fn reset_all<I>(&mut self, cmd_bufs: I)
176 where
177 I: Iterator<Item = super::CommandBuffer>,
178 {
179 self.temp.clear();
180 self.free
181 .extend(cmd_bufs.into_iter().map(|cmd_buf| cmd_buf.raw));
182 self.free.append(&mut self.discarded);
183 for (_, framebuffer) in self.framebuffers.drain() {
185 unsafe { self.device.raw.destroy_framebuffer(framebuffer, None) };
186 }
187 let _ = unsafe {
188 self.device
189 .raw
190 .reset_command_pool(self.raw, vk::CommandPoolResetFlags::default())
191 };
192 }
193
194 unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
195 where
196 T: Iterator<Item = crate::BufferBarrier<'a, super::Buffer>>,
197 {
198 let mut src_stages = vk::PipelineStageFlags::TOP_OF_PIPE;
200 let mut dst_stages = vk::PipelineStageFlags::BOTTOM_OF_PIPE;
201 let vk_barriers = &mut self.temp.buffer_barriers;
202 vk_barriers.clear();
203
204 for bar in barriers {
205 let (src_stage, src_access) = conv::map_buffer_usage_to_barrier(bar.usage.from);
206 src_stages |= src_stage;
207 let (dst_stage, dst_access) = conv::map_buffer_usage_to_barrier(bar.usage.to);
208 dst_stages |= dst_stage;
209
210 vk_barriers.push(
211 vk::BufferMemoryBarrier::default()
212 .buffer(bar.buffer.raw)
213 .size(vk::WHOLE_SIZE)
214 .src_access_mask(src_access)
215 .dst_access_mask(dst_access),
216 )
217 }
218
219 if !vk_barriers.is_empty() {
220 unsafe {
221 self.device.raw.cmd_pipeline_barrier(
222 self.active,
223 src_stages,
224 dst_stages,
225 vk::DependencyFlags::empty(),
226 &[],
227 vk_barriers,
228 &[],
229 )
230 };
231 }
232 }
233
234 unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
235 where
236 T: Iterator<Item = crate::TextureBarrier<'a, super::Texture>>,
237 {
238 let mut src_stages = vk::PipelineStageFlags::empty();
239 let mut dst_stages = vk::PipelineStageFlags::empty();
240 let vk_barriers = &mut self.temp.image_barriers;
241 vk_barriers.clear();
242
243 for bar in barriers {
244 let range = conv::map_subresource_range_combined_aspect(
245 &bar.range,
246 bar.texture.format,
247 &self.device.private_caps,
248 );
249 let (src_stage, src_access) = conv::map_texture_usage_to_barrier(bar.usage.from);
250 let src_layout = conv::derive_image_layout(bar.usage.from, bar.texture.format);
251 src_stages |= src_stage;
252 let (dst_stage, dst_access) = conv::map_texture_usage_to_barrier(bar.usage.to);
253 let dst_layout = conv::derive_image_layout(bar.usage.to, bar.texture.format);
254 dst_stages |= dst_stage;
255
256 vk_barriers.push(
257 vk::ImageMemoryBarrier::default()
258 .image(bar.texture.raw)
259 .subresource_range(range)
260 .src_access_mask(src_access)
261 .dst_access_mask(dst_access)
262 .old_layout(src_layout)
263 .new_layout(dst_layout),
264 );
265 }
266
267 if !vk_barriers.is_empty() {
268 unsafe {
269 self.device.raw.cmd_pipeline_barrier(
270 self.active,
271 src_stages,
272 dst_stages,
273 vk::DependencyFlags::empty(),
274 &[],
275 &[],
276 vk_barriers,
277 )
278 };
279 }
280 }
281
282 unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
283 let range_size = range.end - range.start;
284 if self.device.workarounds.contains(
285 super::Workarounds::FORCE_FILL_BUFFER_WITH_SIZE_GREATER_4096_ALIGNED_OFFSET_16,
286 ) && range_size >= 4096
287 && range.start % 16 != 0
288 {
289 let rounded_start = wgt::math::align_to(range.start, 16);
290 let prefix_size = rounded_start - range.start;
291
292 unsafe {
293 self.device.raw.cmd_fill_buffer(
294 self.active,
295 buffer.raw,
296 range.start,
297 prefix_size,
298 0,
299 )
300 };
301
302 let suffix_size = range.end - rounded_start;
304
305 unsafe {
306 self.device.raw.cmd_fill_buffer(
307 self.active,
308 buffer.raw,
309 rounded_start,
310 suffix_size,
311 0,
312 )
313 };
314 } else {
315 unsafe {
316 self.device
317 .raw
318 .cmd_fill_buffer(self.active, buffer.raw, range.start, range_size, 0)
319 };
320 }
321 }
322
323 unsafe fn copy_buffer_to_buffer<T>(
324 &mut self,
325 src: &super::Buffer,
326 dst: &super::Buffer,
327 regions: T,
328 ) where
329 T: Iterator<Item = crate::BufferCopy>,
330 {
331 let vk_regions_iter = regions.map(|r| vk::BufferCopy {
332 src_offset: r.src_offset,
333 dst_offset: r.dst_offset,
334 size: r.size.get(),
335 });
336
337 unsafe {
338 self.device.raw.cmd_copy_buffer(
339 self.active,
340 src.raw,
341 dst.raw,
342 &smallvec::SmallVec::<[vk::BufferCopy; 32]>::from_iter(vk_regions_iter),
343 )
344 };
345 }
346
347 unsafe fn copy_texture_to_texture<T>(
348 &mut self,
349 src: &super::Texture,
350 src_usage: wgt::TextureUses,
351 dst: &super::Texture,
352 regions: T,
353 ) where
354 T: Iterator<Item = crate::TextureCopy>,
355 {
356 let src_layout = conv::derive_image_layout(src_usage, src.format);
357
358 let vk_regions_iter = regions.map(|r| {
359 let (src_subresource, src_offset) = conv::map_subresource_layers(&r.src_base);
360 let (dst_subresource, dst_offset) = conv::map_subresource_layers(&r.dst_base);
361 let extent = r
362 .size
363 .min(&r.src_base.max_copy_size(&src.copy_size))
364 .min(&r.dst_base.max_copy_size(&dst.copy_size));
365 vk::ImageCopy {
366 src_subresource,
367 src_offset,
368 dst_subresource,
369 dst_offset,
370 extent: conv::map_copy_extent(&extent),
371 }
372 });
373
374 unsafe {
375 self.device.raw.cmd_copy_image(
376 self.active,
377 src.raw,
378 src_layout,
379 dst.raw,
380 DST_IMAGE_LAYOUT,
381 &smallvec::SmallVec::<[vk::ImageCopy; 32]>::from_iter(vk_regions_iter),
382 )
383 };
384 }
385
386 unsafe fn copy_buffer_to_texture<T>(
387 &mut self,
388 src: &super::Buffer,
389 dst: &super::Texture,
390 regions: T,
391 ) where
392 T: Iterator<Item = crate::BufferTextureCopy>,
393 {
394 let vk_regions_iter = dst.map_buffer_copies(regions);
395
396 unsafe {
397 self.device.raw.cmd_copy_buffer_to_image(
398 self.active,
399 src.raw,
400 dst.raw,
401 DST_IMAGE_LAYOUT,
402 &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
403 )
404 };
405 }
406
407 unsafe fn copy_texture_to_buffer<T>(
408 &mut self,
409 src: &super::Texture,
410 src_usage: wgt::TextureUses,
411 dst: &super::Buffer,
412 regions: T,
413 ) where
414 T: Iterator<Item = crate::BufferTextureCopy>,
415 {
416 let src_layout = conv::derive_image_layout(src_usage, src.format);
417 let vk_regions_iter = src.map_buffer_copies(regions);
418
419 unsafe {
420 self.device.raw.cmd_copy_image_to_buffer(
421 self.active,
422 src.raw,
423 src_layout,
424 dst.raw,
425 &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
426 )
427 };
428 }
429
430 unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
431 unsafe {
432 self.device.raw.cmd_begin_query(
433 self.active,
434 set.raw,
435 index,
436 vk::QueryControlFlags::empty(),
437 )
438 };
439 }
440 unsafe fn end_query(&mut self, set: &super::QuerySet, index: u32) {
441 unsafe { self.device.raw.cmd_end_query(self.active, set.raw, index) };
442 }
443 unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
444 unsafe {
445 self.device.raw.cmd_write_timestamp(
446 self.active,
447 vk::PipelineStageFlags::BOTTOM_OF_PIPE,
448 set.raw,
449 index,
450 )
451 };
452 }
453 unsafe fn read_acceleration_structure_compact_size(
454 &mut self,
455 acceleration_structure: &super::AccelerationStructure,
456 buffer: &super::Buffer,
457 ) {
458 let ray_tracing_functions = self
459 .device
460 .extension_fns
461 .ray_tracing
462 .as_ref()
463 .expect("Feature `RAY_TRACING` not enabled");
464 let query_pool = acceleration_structure
465 .compacted_size_query
466 .as_ref()
467 .unwrap();
468 unsafe {
469 self.device
470 .raw
471 .cmd_reset_query_pool(self.active, *query_pool, 0, 1);
472 ray_tracing_functions
473 .acceleration_structure
474 .cmd_write_acceleration_structures_properties(
475 self.active,
476 &[acceleration_structure.raw],
477 vk::QueryType::ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR,
478 *query_pool,
479 0,
480 );
481 self.device.raw.cmd_copy_query_pool_results(
482 self.active,
483 *query_pool,
484 0,
485 1,
486 buffer.raw,
487 0,
488 wgt::QUERY_SIZE as vk::DeviceSize,
489 vk::QueryResultFlags::TYPE_64 | vk::QueryResultFlags::WAIT,
490 )
491 };
492 }
493 unsafe fn reset_queries(&mut self, set: &super::QuerySet, range: Range<u32>) {
494 unsafe {
495 self.device.raw.cmd_reset_query_pool(
496 self.active,
497 set.raw,
498 range.start,
499 range.end - range.start,
500 )
501 };
502 }
503 unsafe fn copy_query_results(
504 &mut self,
505 set: &super::QuerySet,
506 range: Range<u32>,
507 buffer: &super::Buffer,
508 offset: wgt::BufferAddress,
509 stride: wgt::BufferSize,
510 ) {
511 unsafe {
512 self.device.raw.cmd_copy_query_pool_results(
513 self.active,
514 set.raw,
515 range.start,
516 range.end - range.start,
517 buffer.raw,
518 offset,
519 stride.get(),
520 vk::QueryResultFlags::TYPE_64 | vk::QueryResultFlags::WAIT,
521 )
522 };
523 }
524
525 unsafe fn build_acceleration_structures<'a, T>(&mut self, descriptor_count: u32, descriptors: T)
526 where
527 super::Api: 'a,
528 T: IntoIterator<
529 Item = crate::BuildAccelerationStructureDescriptor<
530 'a,
531 super::Buffer,
532 super::AccelerationStructure,
533 >,
534 >,
535 {
536 const CAPACITY_OUTER: usize = 8;
537 const CAPACITY_INNER: usize = 1;
538 let descriptor_count = descriptor_count as usize;
539
540 let ray_tracing_functions = self
541 .device
542 .extension_fns
543 .ray_tracing
544 .as_ref()
545 .expect("Feature `RAY_TRACING` not enabled");
546
547 let get_device_address = |buffer: Option<&super::Buffer>| unsafe {
548 match buffer {
549 Some(buffer) => ray_tracing_functions
550 .buffer_device_address
551 .get_buffer_device_address(
552 &vk::BufferDeviceAddressInfo::default().buffer(buffer.raw),
553 ),
554 None => panic!("Buffers are required to build acceleration structures"),
555 }
556 };
557
558 let mut ranges_storage = smallvec::SmallVec::<
560 [smallvec::SmallVec<[vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER]>;
561 CAPACITY_OUTER],
562 >::with_capacity(descriptor_count);
563 let mut geometries_storage = smallvec::SmallVec::<
564 [smallvec::SmallVec<[vk::AccelerationStructureGeometryKHR; CAPACITY_INNER]>;
565 CAPACITY_OUTER],
566 >::with_capacity(descriptor_count);
567
568 let mut geometry_infos = smallvec::SmallVec::<
570 [vk::AccelerationStructureBuildGeometryInfoKHR; CAPACITY_OUTER],
571 >::with_capacity(descriptor_count);
572 let mut ranges_ptrs = smallvec::SmallVec::<
573 [&[vk::AccelerationStructureBuildRangeInfoKHR]; CAPACITY_OUTER],
574 >::with_capacity(descriptor_count);
575
576 for desc in descriptors {
577 let (geometries, ranges) = match *desc.entries {
578 crate::AccelerationStructureEntries::Instances(ref instances) => {
579 let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default(
580 )
582 .data(vk::DeviceOrHostAddressConstKHR {
583 device_address: get_device_address(instances.buffer),
584 });
585
586 let geometry = vk::AccelerationStructureGeometryKHR::default()
587 .geometry_type(vk::GeometryTypeKHR::INSTANCES)
588 .geometry(vk::AccelerationStructureGeometryDataKHR {
589 instances: instance_data,
590 });
591
592 let range = vk::AccelerationStructureBuildRangeInfoKHR::default()
593 .primitive_count(instances.count)
594 .primitive_offset(instances.offset);
595
596 (smallvec::smallvec![geometry], smallvec::smallvec![range])
597 }
598 crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
599 let mut ranges = smallvec::SmallVec::<
600 [vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER],
601 >::with_capacity(in_geometries.len());
602 let mut geometries = smallvec::SmallVec::<
603 [vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
604 >::with_capacity(in_geometries.len());
605 for triangles in in_geometries {
606 let mut triangle_data =
607 vk::AccelerationStructureGeometryTrianglesDataKHR::default()
608 .index_type(vk::IndexType::NONE_KHR)
611 .vertex_data(vk::DeviceOrHostAddressConstKHR {
612 device_address: get_device_address(triangles.vertex_buffer),
613 })
614 .vertex_format(conv::map_vertex_format(triangles.vertex_format))
615 .max_vertex(triangles.vertex_count)
616 .vertex_stride(triangles.vertex_stride);
617
618 let mut range = vk::AccelerationStructureBuildRangeInfoKHR::default();
619
620 if let Some(ref indices) = triangles.indices {
621 triangle_data = triangle_data
622 .index_data(vk::DeviceOrHostAddressConstKHR {
623 device_address: get_device_address(indices.buffer),
624 })
625 .index_type(conv::map_index_format(indices.format));
626
627 range = range
628 .primitive_count(indices.count / 3)
629 .primitive_offset(indices.offset)
630 .first_vertex(triangles.first_vertex);
631 } else {
632 range = range
633 .primitive_count(triangles.vertex_count / 3)
634 .first_vertex(triangles.first_vertex);
635 }
636
637 if let Some(ref transform) = triangles.transform {
638 let transform_device_address = unsafe {
639 ray_tracing_functions
640 .buffer_device_address
641 .get_buffer_device_address(
642 &vk::BufferDeviceAddressInfo::default()
643 .buffer(transform.buffer.raw),
644 )
645 };
646 triangle_data =
647 triangle_data.transform_data(vk::DeviceOrHostAddressConstKHR {
648 device_address: transform_device_address,
649 });
650
651 range = range.transform_offset(transform.offset);
652 }
653
654 let geometry = vk::AccelerationStructureGeometryKHR::default()
655 .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
656 .geometry(vk::AccelerationStructureGeometryDataKHR {
657 triangles: triangle_data,
658 })
659 .flags(conv::map_acceleration_structure_geometry_flags(
660 triangles.flags,
661 ));
662
663 geometries.push(geometry);
664 ranges.push(range);
665 }
666 (geometries, ranges)
667 }
668 crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
669 let mut ranges = smallvec::SmallVec::<
670 [vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER],
671 >::with_capacity(in_geometries.len());
672 let mut geometries = smallvec::SmallVec::<
673 [vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
674 >::with_capacity(in_geometries.len());
675 for aabb in in_geometries {
676 let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
677 .data(vk::DeviceOrHostAddressConstKHR {
678 device_address: get_device_address(aabb.buffer),
679 })
680 .stride(aabb.stride);
681
682 let range = vk::AccelerationStructureBuildRangeInfoKHR::default()
683 .primitive_count(aabb.count)
684 .primitive_offset(aabb.offset);
685
686 let geometry = vk::AccelerationStructureGeometryKHR::default()
687 .geometry_type(vk::GeometryTypeKHR::AABBS)
688 .geometry(vk::AccelerationStructureGeometryDataKHR {
689 aabbs: aabbs_data,
690 })
691 .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
692
693 geometries.push(geometry);
694 ranges.push(range);
695 }
696 (geometries, ranges)
697 }
698 };
699
700 ranges_storage.push(ranges);
701 geometries_storage.push(geometries);
702
703 let scratch_device_address = unsafe {
704 ray_tracing_functions
705 .buffer_device_address
706 .get_buffer_device_address(
707 &vk::BufferDeviceAddressInfo::default().buffer(desc.scratch_buffer.raw),
708 )
709 };
710 let ty = match *desc.entries {
711 crate::AccelerationStructureEntries::Instances(_) => {
712 vk::AccelerationStructureTypeKHR::TOP_LEVEL
713 }
714 _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
715 };
716 let mut geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
717 .ty(ty)
718 .mode(conv::map_acceleration_structure_build_mode(desc.mode))
719 .flags(conv::map_acceleration_structure_flags(desc.flags))
720 .dst_acceleration_structure(desc.destination_acceleration_structure.raw)
721 .scratch_data(vk::DeviceOrHostAddressKHR {
722 device_address: scratch_device_address + desc.scratch_buffer_offset,
723 });
724
725 if desc.mode == crate::AccelerationStructureBuildMode::Update {
726 geometry_info.src_acceleration_structure = desc
727 .source_acceleration_structure
728 .unwrap_or(desc.destination_acceleration_structure)
729 .raw;
730 }
731
732 geometry_infos.push(geometry_info);
733 }
734
735 for (i, geometry_info) in geometry_infos.iter_mut().enumerate() {
736 geometry_info.geometry_count = geometries_storage[i].len() as u32;
737 geometry_info.p_geometries = geometries_storage[i].as_ptr();
738 ranges_ptrs.push(&ranges_storage[i]);
739 }
740
741 unsafe {
742 ray_tracing_functions
743 .acceleration_structure
744 .cmd_build_acceleration_structures(self.active, &geometry_infos, &ranges_ptrs);
745 }
746 }
747
748 unsafe fn place_acceleration_structure_barrier(
749 &mut self,
750 barrier: crate::AccelerationStructureBarrier,
751 ) {
752 let (src_stage, src_access) = conv::map_acceleration_structure_usage_to_barrier(
753 barrier.usage.from,
754 self.device.features,
755 );
756 let (dst_stage, dst_access) = conv::map_acceleration_structure_usage_to_barrier(
757 barrier.usage.to,
758 self.device.features,
759 );
760
761 unsafe {
762 self.device.raw.cmd_pipeline_barrier(
763 self.active,
764 src_stage | vk::PipelineStageFlags::TOP_OF_PIPE,
765 dst_stage | vk::PipelineStageFlags::BOTTOM_OF_PIPE,
766 vk::DependencyFlags::empty(),
767 &[vk::MemoryBarrier::default()
768 .src_access_mask(src_access)
769 .dst_access_mask(dst_access)],
770 &[],
771 &[],
772 )
773 };
774 }
775 unsafe fn begin_render_pass(
778 &mut self,
779 desc: &crate::RenderPassDescriptor<super::QuerySet, super::TextureView>,
780 ) -> Result<(), crate::DeviceError> {
781 let mut vk_clear_values =
782 ArrayVec::<vk::ClearValue, { super::MAX_TOTAL_ATTACHMENTS }>::new();
783 let mut rp_key = super::RenderPassKey {
784 colors: ArrayVec::default(),
785 depth_stencil: None,
786 sample_count: desc.sample_count,
787 multiview_mask: desc.multiview_mask,
788 };
789 let mut fb_key = super::FramebufferKey {
790 raw_pass: vk::RenderPass::null(),
791 attachment_views: ArrayVec::default(),
792 attachment_identities: ArrayVec::default(),
793 extent: desc.extent,
794 };
795
796 for cat in desc.color_attachments {
797 if let Some(cat) = cat.as_ref() {
798 let color_view = if cat.target.view.dimension == wgt::TextureViewDimension::D3 {
799 let key = super::TempTextureViewKey {
800 texture: cat.target.view.raw_texture,
801 texture_identity: cat.target.view.texture_identity,
802 format: cat.target.view.raw_format,
803 mip_level: cat.target.view.base_mip_level,
804 depth_slice: cat.depth_slice.unwrap(),
805 };
806 self.make_temp_texture_view(key)?
807 } else {
808 cat.target.view.identified_raw_view()
809 };
810
811 vk_clear_values.push(vk::ClearValue {
812 color: unsafe { cat.make_vk_clear_color() },
813 });
814 let color = super::ColorAttachmentKey {
815 base: cat.target.make_attachment_key(cat.ops),
816 resolve: cat
817 .resolve_target
818 .as_ref()
819 .map(|target| target.make_attachment_key(crate::AttachmentOps::STORE)),
820 };
821
822 rp_key.colors.push(Some(color));
823 fb_key.push_view(color_view);
824 if let Some(ref at) = cat.resolve_target {
825 vk_clear_values.push(unsafe { mem::zeroed() });
826 fb_key.push_view(at.view.identified_raw_view());
827 }
828 } else {
829 rp_key.colors.push(None);
830 }
831 }
832 if let Some(ref ds) = desc.depth_stencil_attachment {
833 vk_clear_values.push(vk::ClearValue {
834 depth_stencil: vk::ClearDepthStencilValue {
835 depth: ds.clear_value.0,
836 stencil: ds.clear_value.1,
837 },
838 });
839 rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
840 base: ds.target.make_attachment_key(ds.depth_ops),
841 stencil_ops: ds.stencil_ops,
842 });
843 fb_key.push_view(ds.target.view.identified_raw_view());
844 }
845
846 let render_area = vk::Rect2D {
847 offset: vk::Offset2D { x: 0, y: 0 },
848 extent: vk::Extent2D {
849 width: desc.extent.width,
850 height: desc.extent.height,
851 },
852 };
853 let vk_viewports = [vk::Viewport {
854 x: 0.0,
855 y: desc.extent.height as f32,
856 width: desc.extent.width as f32,
857 height: -(desc.extent.height as f32),
858 min_depth: 0.0,
859 max_depth: 1.0,
860 }];
861
862 let raw_pass = self.device.make_render_pass(rp_key).unwrap();
863 fb_key.raw_pass = raw_pass;
864 let raw_framebuffer = self.make_framebuffer(fb_key).unwrap();
865
866 let vk_info = vk::RenderPassBeginInfo::default()
867 .render_pass(raw_pass)
868 .render_area(render_area)
869 .clear_values(&vk_clear_values)
870 .framebuffer(raw_framebuffer);
871
872 if let Some(label) = desc.label {
873 unsafe { self.begin_debug_marker(label) };
874 self.rpass_debug_marker_active = true;
875 }
876
877 if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
879 if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
880 unsafe {
881 self.write_timestamp(timestamp_writes.query_set, index);
882 }
883 }
884 self.end_of_pass_timer_query = timestamp_writes
885 .end_of_pass_write_index
886 .map(|index| (timestamp_writes.query_set.raw, index));
887 }
888
889 unsafe {
890 self.device
891 .raw
892 .cmd_set_viewport(self.active, 0, &vk_viewports);
893 self.device
894 .raw
895 .cmd_set_scissor(self.active, 0, &[render_area]);
896 self.device.raw.cmd_begin_render_pass(
897 self.active,
898 &vk_info,
899 vk::SubpassContents::INLINE,
900 );
901 };
902
903 self.bind_point = vk::PipelineBindPoint::GRAPHICS;
904
905 Ok(())
906 }
907 unsafe fn end_render_pass(&mut self) {
908 unsafe {
909 self.device.raw.cmd_end_render_pass(self.active);
910 }
911
912 self.write_pass_end_timestamp_if_requested();
914
915 if self.rpass_debug_marker_active {
916 unsafe {
917 self.end_debug_marker();
918 }
919 self.rpass_debug_marker_active = false;
920 }
921 }
922
923 unsafe fn set_bind_group(
924 &mut self,
925 layout: &super::PipelineLayout,
926 index: u32,
927 group: &super::BindGroup,
928 dynamic_offsets: &[wgt::DynamicOffset],
929 ) {
930 let sets = [*group.set.raw()];
931 unsafe {
932 self.device.raw.cmd_bind_descriptor_sets(
933 self.active,
934 self.bind_point,
935 layout.raw,
936 index,
937 &sets,
938 dynamic_offsets,
939 )
940 };
941 }
942 unsafe fn set_push_constants(
943 &mut self,
944 layout: &super::PipelineLayout,
945 stages: wgt::ShaderStages,
946 offset_bytes: u32,
947 data: &[u32],
948 ) {
949 unsafe {
950 self.device.raw.cmd_push_constants(
951 self.active,
952 layout.raw,
953 conv::map_shader_stage(stages),
954 offset_bytes,
955 bytemuck::cast_slice(data),
956 )
957 };
958 }
959
960 unsafe fn insert_debug_marker(&mut self, label: &str) {
961 if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
962 let cstr = self.temp.make_c_str(label);
963 let vk_label = vk::DebugUtilsLabelEXT::default().label_name(cstr);
964 unsafe { ext.cmd_insert_debug_utils_label(self.active, &vk_label) };
965 }
966 }
967 unsafe fn begin_debug_marker(&mut self, group_label: &str) {
968 if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
969 let cstr = self.temp.make_c_str(group_label);
970 let vk_label = vk::DebugUtilsLabelEXT::default().label_name(cstr);
971 unsafe { ext.cmd_begin_debug_utils_label(self.active, &vk_label) };
972 }
973 }
974 unsafe fn end_debug_marker(&mut self) {
975 if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
976 unsafe { ext.cmd_end_debug_utils_label(self.active) };
977 }
978 }
979
980 unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
981 unsafe {
982 self.current_pipeline_is_multiview = pipeline.is_multiview;
983 self.device.raw.cmd_bind_pipeline(
984 self.active,
985 vk::PipelineBindPoint::GRAPHICS,
986 pipeline.raw,
987 )
988 };
989 }
990
991 unsafe fn set_index_buffer<'a>(
992 &mut self,
993 binding: crate::BufferBinding<'a, super::Buffer>,
994 format: wgt::IndexFormat,
995 ) {
996 unsafe {
997 self.device.raw.cmd_bind_index_buffer(
998 self.active,
999 binding.buffer.raw,
1000 binding.offset,
1001 conv::map_index_format(format),
1002 )
1003 };
1004 }
1005 unsafe fn set_vertex_buffer<'a>(
1006 &mut self,
1007 index: u32,
1008 binding: crate::BufferBinding<'a, super::Buffer>,
1009 ) {
1010 let vk_buffers = [binding.buffer.raw];
1011 let vk_offsets = [binding.offset];
1012 unsafe {
1013 self.device
1014 .raw
1015 .cmd_bind_vertex_buffers(self.active, index, &vk_buffers, &vk_offsets)
1016 };
1017 }
1018 unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>) {
1019 let vk_viewports = [vk::Viewport {
1020 x: rect.x,
1021 y: rect.y + rect.h,
1022 width: rect.w,
1023 height: -rect.h, min_depth: depth_range.start,
1025 max_depth: depth_range.end,
1026 }];
1027 unsafe {
1028 self.device
1029 .raw
1030 .cmd_set_viewport(self.active, 0, &vk_viewports)
1031 };
1032 }
1033 unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
1034 let vk_scissors = [vk::Rect2D {
1035 offset: vk::Offset2D {
1036 x: rect.x as i32,
1037 y: rect.y as i32,
1038 },
1039 extent: vk::Extent2D {
1040 width: rect.w,
1041 height: rect.h,
1042 },
1043 }];
1044 unsafe {
1045 self.device
1046 .raw
1047 .cmd_set_scissor(self.active, 0, &vk_scissors)
1048 };
1049 }
1050 unsafe fn set_stencil_reference(&mut self, value: u32) {
1051 unsafe {
1052 self.device.raw.cmd_set_stencil_reference(
1053 self.active,
1054 vk::StencilFaceFlags::FRONT_AND_BACK,
1055 value,
1056 )
1057 };
1058 }
1059 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
1060 unsafe { self.device.raw.cmd_set_blend_constants(self.active, color) };
1061 }
1062
1063 unsafe fn draw(
1064 &mut self,
1065 first_vertex: u32,
1066 vertex_count: u32,
1067 first_instance: u32,
1068 instance_count: u32,
1069 ) {
1070 if self.current_pipeline_is_multiview
1071 && (first_instance as u64 + instance_count as u64 - 1)
1072 > self.device.private_caps.multiview_instance_index_limit as u64
1073 {
1074 panic!("This vulkan device is affected by [#8333](https://github.com/gfx-rs/wgpu/issues/8333)");
1075 }
1076 unsafe {
1077 self.device.raw.cmd_draw(
1078 self.active,
1079 vertex_count,
1080 instance_count,
1081 first_vertex,
1082 first_instance,
1083 )
1084 };
1085 }
1086 unsafe fn draw_indexed(
1087 &mut self,
1088 first_index: u32,
1089 index_count: u32,
1090 base_vertex: i32,
1091 first_instance: u32,
1092 instance_count: u32,
1093 ) {
1094 if self.current_pipeline_is_multiview
1095 && (first_instance as u64 + instance_count as u64 - 1)
1096 > self.device.private_caps.multiview_instance_index_limit as u64
1097 {
1098 panic!("This vulkan device is affected by [#8333](https://github.com/gfx-rs/wgpu/issues/8333)");
1099 }
1100 unsafe {
1101 self.device.raw.cmd_draw_indexed(
1102 self.active,
1103 index_count,
1104 instance_count,
1105 first_index,
1106 base_vertex,
1107 first_instance,
1108 )
1109 };
1110 }
1111 unsafe fn draw_mesh_tasks(
1112 &mut self,
1113 group_count_x: u32,
1114 group_count_y: u32,
1115 group_count_z: u32,
1116 ) {
1117 if let Some(ref t) = self.device.extension_fns.mesh_shading {
1118 unsafe {
1119 t.cmd_draw_mesh_tasks(self.active, group_count_x, group_count_y, group_count_z);
1120 };
1121 } else {
1122 panic!("Feature `MESH_SHADING` not enabled");
1123 }
1124 }
1125 unsafe fn draw_indirect(
1126 &mut self,
1127 buffer: &super::Buffer,
1128 offset: wgt::BufferAddress,
1129 draw_count: u32,
1130 ) {
1131 unsafe {
1132 self.device.raw.cmd_draw_indirect(
1133 self.active,
1134 buffer.raw,
1135 offset,
1136 draw_count,
1137 size_of::<wgt::DrawIndirectArgs>() as u32,
1138 )
1139 };
1140 }
1141 unsafe fn draw_indexed_indirect(
1142 &mut self,
1143 buffer: &super::Buffer,
1144 offset: wgt::BufferAddress,
1145 draw_count: u32,
1146 ) {
1147 if draw_count >= 1 && self.device.private_caps.multi_draw_indirect {
1148 unsafe {
1149 self.device.raw.cmd_draw_indexed_indirect(
1150 self.active,
1151 buffer.raw,
1152 offset,
1153 draw_count,
1154 size_of::<wgt::DrawIndexedIndirectArgs>() as u32,
1155 )
1156 };
1157 } else {
1158 for _ in 0..draw_count {
1159 unsafe {
1160 self.device.raw.cmd_draw_indexed_indirect(
1161 self.active,
1162 buffer.raw,
1163 offset,
1164 1,
1165 size_of::<wgt::DrawIndexedIndirectArgs>() as u32,
1166 )
1167 };
1168 }
1169 }
1170 }
1171 unsafe fn draw_mesh_tasks_indirect(
1172 &mut self,
1173 buffer: &<Self::A as crate::Api>::Buffer,
1174 offset: wgt::BufferAddress,
1175 draw_count: u32,
1176 ) {
1177 if let Some(ref t) = self.device.extension_fns.mesh_shading {
1178 unsafe {
1179 t.cmd_draw_mesh_tasks_indirect(
1180 self.active,
1181 buffer.raw,
1182 offset,
1183 draw_count,
1184 size_of::<wgt::DispatchIndirectArgs>() as u32,
1185 );
1186 };
1187 } else {
1188 panic!("Feature `MESH_SHADING` not enabled");
1189 }
1190 }
1191 unsafe fn draw_indirect_count(
1192 &mut self,
1193 buffer: &super::Buffer,
1194 offset: wgt::BufferAddress,
1195 count_buffer: &super::Buffer,
1196 count_offset: wgt::BufferAddress,
1197 max_count: u32,
1198 ) {
1199 let stride = size_of::<wgt::DrawIndirectArgs>() as u32;
1200 match self.device.extension_fns.draw_indirect_count {
1201 Some(ref t) => {
1202 unsafe {
1203 t.cmd_draw_indirect_count(
1204 self.active,
1205 buffer.raw,
1206 offset,
1207 count_buffer.raw,
1208 count_offset,
1209 max_count,
1210 stride,
1211 )
1212 };
1213 }
1214 None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
1215 }
1216 }
1217 unsafe fn draw_indexed_indirect_count(
1218 &mut self,
1219 buffer: &super::Buffer,
1220 offset: wgt::BufferAddress,
1221 count_buffer: &super::Buffer,
1222 count_offset: wgt::BufferAddress,
1223 max_count: u32,
1224 ) {
1225 let stride = size_of::<wgt::DrawIndexedIndirectArgs>() as u32;
1226 match self.device.extension_fns.draw_indirect_count {
1227 Some(ref t) => {
1228 unsafe {
1229 t.cmd_draw_indexed_indirect_count(
1230 self.active,
1231 buffer.raw,
1232 offset,
1233 count_buffer.raw,
1234 count_offset,
1235 max_count,
1236 stride,
1237 )
1238 };
1239 }
1240 None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
1241 }
1242 }
1243 unsafe fn draw_mesh_tasks_indirect_count(
1244 &mut self,
1245 buffer: &<Self::A as crate::Api>::Buffer,
1246 offset: wgt::BufferAddress,
1247 count_buffer: &super::Buffer,
1248 count_offset: wgt::BufferAddress,
1249 max_count: u32,
1250 ) {
1251 if self.device.extension_fns.draw_indirect_count.is_none() {
1252 panic!("Feature `DRAW_INDIRECT_COUNT` not enabled");
1253 }
1254 if let Some(ref t) = self.device.extension_fns.mesh_shading {
1255 unsafe {
1256 t.cmd_draw_mesh_tasks_indirect_count(
1257 self.active,
1258 buffer.raw,
1259 offset,
1260 count_buffer.raw,
1261 count_offset,
1262 max_count,
1263 size_of::<wgt::DispatchIndirectArgs>() as u32,
1264 );
1265 };
1266 } else {
1267 panic!("Feature `MESH_SHADING` not enabled");
1268 }
1269 }
1270
1271 unsafe fn begin_compute_pass(
1274 &mut self,
1275 desc: &crate::ComputePassDescriptor<'_, super::QuerySet>,
1276 ) {
1277 self.bind_point = vk::PipelineBindPoint::COMPUTE;
1278 if let Some(label) = desc.label {
1279 unsafe { self.begin_debug_marker(label) };
1280 self.rpass_debug_marker_active = true;
1281 }
1282
1283 if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
1284 if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
1285 unsafe {
1286 self.write_timestamp(timestamp_writes.query_set, index);
1287 }
1288 }
1289 self.end_of_pass_timer_query = timestamp_writes
1290 .end_of_pass_write_index
1291 .map(|index| (timestamp_writes.query_set.raw, index));
1292 }
1293 }
1294 unsafe fn end_compute_pass(&mut self) {
1295 self.write_pass_end_timestamp_if_requested();
1296
1297 if self.rpass_debug_marker_active {
1298 unsafe { self.end_debug_marker() };
1299 self.rpass_debug_marker_active = false
1300 }
1301 }
1302
1303 unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1304 unsafe {
1305 self.device.raw.cmd_bind_pipeline(
1306 self.active,
1307 vk::PipelineBindPoint::COMPUTE,
1308 pipeline.raw,
1309 )
1310 };
1311 }
1312
1313 unsafe fn dispatch(&mut self, count: [u32; 3]) {
1314 unsafe {
1315 self.device
1316 .raw
1317 .cmd_dispatch(self.active, count[0], count[1], count[2])
1318 };
1319 }
1320 unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1321 unsafe {
1322 self.device
1323 .raw
1324 .cmd_dispatch_indirect(self.active, buffer.raw, offset)
1325 }
1326 }
1327
1328 unsafe fn copy_acceleration_structure_to_acceleration_structure(
1329 &mut self,
1330 src: &super::AccelerationStructure,
1331 dst: &super::AccelerationStructure,
1332 copy: wgt::AccelerationStructureCopy,
1333 ) {
1334 let ray_tracing_functions = self
1335 .device
1336 .extension_fns
1337 .ray_tracing
1338 .as_ref()
1339 .expect("Feature `RAY_TRACING` not enabled");
1340
1341 let mode = match copy {
1342 wgt::AccelerationStructureCopy::Clone => vk::CopyAccelerationStructureModeKHR::CLONE,
1343 wgt::AccelerationStructureCopy::Compact => {
1344 vk::CopyAccelerationStructureModeKHR::COMPACT
1345 }
1346 };
1347
1348 unsafe {
1349 ray_tracing_functions
1350 .acceleration_structure
1351 .cmd_copy_acceleration_structure(
1352 self.active,
1353 &vk::CopyAccelerationStructureInfoKHR {
1354 s_type: vk::StructureType::COPY_ACCELERATION_STRUCTURE_INFO_KHR,
1355 p_next: core::ptr::null(),
1356 src: src.raw,
1357 dst: dst.raw,
1358 mode,
1359 _marker: Default::default(),
1360 },
1361 );
1362 }
1363 }
1364}
1365
1366#[test]
1367fn check_dst_image_layout() {
1368 assert_eq!(
1369 conv::derive_image_layout(wgt::TextureUses::COPY_DST, wgt::TextureFormat::Rgba8Unorm),
1370 DST_IMAGE_LAYOUT
1371 );
1372}