1use alloc::{boxed::Box, vec::Vec};
2use core::ops::Range;
3
4use crate::{
5 AccelerationStructureBarrier, Api, Attachment, BufferBarrier, BufferBinding, BufferCopy,
6 BufferTextureCopy, BuildAccelerationStructureDescriptor, ColorAttachment, CommandEncoder,
7 ComputePassDescriptor, DepthStencilAttachment, DeviceError, Label, MemoryRange,
8 PassTimestampWrites, Rect, RenderPassDescriptor, TextureBarrier, TextureCopy,
9};
10
11use super::{
12 DynAccelerationStructure, DynBindGroup, DynBuffer, DynCommandBuffer, DynComputePipeline,
13 DynPipelineLayout, DynQuerySet, DynRenderPipeline, DynResource, DynResourceExt as _,
14 DynTexture, DynTextureView,
15};
16
17pub trait DynCommandEncoder: DynResource + core::fmt::Debug {
18 unsafe fn begin_encoding(&mut self, label: Label) -> Result<(), DeviceError>;
19
20 unsafe fn discard_encoding(&mut self);
21
22 unsafe fn end_encoding(&mut self) -> Result<Box<dyn DynCommandBuffer>, DeviceError>;
23
24 unsafe fn reset_all(&mut self, command_buffers: Vec<Box<dyn DynCommandBuffer>>);
25
26 unsafe fn transition_buffers(&mut self, barriers: &[BufferBarrier<'_, dyn DynBuffer>]);
27 unsafe fn transition_textures(&mut self, barriers: &[TextureBarrier<'_, dyn DynTexture>]);
28
29 unsafe fn clear_buffer(&mut self, buffer: &dyn DynBuffer, range: MemoryRange);
30
31 unsafe fn copy_buffer_to_buffer(
32 &mut self,
33 src: &dyn DynBuffer,
34 dst: &dyn DynBuffer,
35 regions: &[BufferCopy],
36 );
37
38 unsafe fn copy_texture_to_texture(
39 &mut self,
40 src: &dyn DynTexture,
41 src_usage: wgt::TextureUses,
42 dst: &dyn DynTexture,
43 regions: &[TextureCopy],
44 );
45
46 unsafe fn copy_buffer_to_texture(
47 &mut self,
48 src: &dyn DynBuffer,
49 dst: &dyn DynTexture,
50 regions: &[BufferTextureCopy],
51 );
52
53 unsafe fn copy_texture_to_buffer(
54 &mut self,
55 src: &dyn DynTexture,
56 src_usage: wgt::TextureUses,
57 dst: &dyn DynBuffer,
58 regions: &[BufferTextureCopy],
59 );
60
61 unsafe fn set_bind_group(
62 &mut self,
63 layout: &dyn DynPipelineLayout,
64 index: u32,
65 group: &dyn DynBindGroup,
66 dynamic_offsets: &[wgt::DynamicOffset],
67 );
68
69 unsafe fn set_immediates(
70 &mut self,
71 layout: &dyn DynPipelineLayout,
72 offset_bytes: u32,
73 data: &[u32],
74 );
75
76 unsafe fn insert_debug_marker(&mut self, label: &str);
77 unsafe fn begin_debug_marker(&mut self, group_label: &str);
78 unsafe fn end_debug_marker(&mut self);
79
80 unsafe fn begin_query(&mut self, set: &dyn DynQuerySet, index: u32);
81 unsafe fn end_query(&mut self, set: &dyn DynQuerySet, index: u32);
82 unsafe fn write_timestamp(&mut self, set: &dyn DynQuerySet, index: u32);
83 unsafe fn reset_queries(&mut self, set: &dyn DynQuerySet, range: Range<u32>);
84 unsafe fn copy_query_results(
85 &mut self,
86 set: &dyn DynQuerySet,
87 range: Range<u32>,
88 buffer: &dyn DynBuffer,
89 offset: wgt::BufferAddress,
90 stride: wgt::BufferSize,
91 );
92
93 unsafe fn begin_render_pass(
94 &mut self,
95 desc: &RenderPassDescriptor<dyn DynQuerySet, dyn DynTextureView>,
96 ) -> Result<(), DeviceError>;
97 unsafe fn end_render_pass(&mut self);
98
99 unsafe fn set_render_pipeline(&mut self, pipeline: &dyn DynRenderPipeline);
100
101 unsafe fn set_index_buffer<'a>(
102 &mut self,
103 binding: BufferBinding<'a, dyn DynBuffer>,
104 format: wgt::IndexFormat,
105 );
106
107 unsafe fn set_vertex_buffer<'a>(
108 &mut self,
109 index: u32,
110 binding: BufferBinding<'a, dyn DynBuffer>,
111 );
112 unsafe fn set_viewport(&mut self, rect: &Rect<f32>, depth_range: Range<f32>);
113 unsafe fn set_scissor_rect(&mut self, rect: &Rect<u32>);
114 unsafe fn set_stencil_reference(&mut self, value: u32);
115 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]);
116
117 unsafe fn draw(
118 &mut self,
119 first_vertex: u32,
120 vertex_count: u32,
121 first_instance: u32,
122 instance_count: u32,
123 );
124 unsafe fn draw_indexed(
125 &mut self,
126 first_index: u32,
127 index_count: u32,
128 base_vertex: i32,
129 first_instance: u32,
130 instance_count: u32,
131 );
132 unsafe fn draw_mesh_tasks(
133 &mut self,
134 group_count_x: u32,
135 group_count_y: u32,
136 group_count_z: u32,
137 );
138 unsafe fn draw_indirect(
139 &mut self,
140 buffer: &dyn DynBuffer,
141 offset: wgt::BufferAddress,
142 draw_count: u32,
143 );
144 unsafe fn draw_indexed_indirect(
145 &mut self,
146 buffer: &dyn DynBuffer,
147 offset: wgt::BufferAddress,
148 draw_count: u32,
149 );
150 unsafe fn draw_mesh_tasks_indirect(
151 &mut self,
152 buffer: &dyn DynBuffer,
153 offset: wgt::BufferAddress,
154 draw_count: u32,
155 );
156 unsafe fn draw_indirect_count(
157 &mut self,
158 buffer: &dyn DynBuffer,
159 offset: wgt::BufferAddress,
160 count_buffer: &dyn DynBuffer,
161 count_offset: wgt::BufferAddress,
162 max_count: u32,
163 );
164 unsafe fn draw_indexed_indirect_count(
165 &mut self,
166 buffer: &dyn DynBuffer,
167 offset: wgt::BufferAddress,
168 count_buffer: &dyn DynBuffer,
169 count_offset: wgt::BufferAddress,
170 max_count: u32,
171 );
172 unsafe fn draw_mesh_tasks_indirect_count(
173 &mut self,
174 buffer: &dyn DynBuffer,
175 offset: wgt::BufferAddress,
176 count_buffer: &dyn DynBuffer,
177 count_offset: wgt::BufferAddress,
178 max_count: u32,
179 );
180
181 unsafe fn begin_compute_pass(&mut self, desc: &ComputePassDescriptor<dyn DynQuerySet>);
182 unsafe fn end_compute_pass(&mut self);
183
184 unsafe fn set_compute_pipeline(&mut self, pipeline: &dyn DynComputePipeline);
185
186 unsafe fn dispatch_workgroups(&mut self, count: [u32; 3]);
187 unsafe fn dispatch_workgroups_indirect(
188 &mut self,
189 buffer: &dyn DynBuffer,
190 offset: wgt::BufferAddress,
191 );
192
193 unsafe fn build_acceleration_structures<'a>(
194 &mut self,
195 descriptors: &'a [BuildAccelerationStructureDescriptor<
196 'a,
197 dyn DynBuffer,
198 dyn DynAccelerationStructure,
199 >],
200 );
201 unsafe fn place_acceleration_structure_barrier(
202 &mut self,
203 barrier: AccelerationStructureBarrier,
204 );
205 unsafe fn copy_acceleration_structure_to_acceleration_structure(
206 &mut self,
207 src: &dyn DynAccelerationStructure,
208 dst: &dyn DynAccelerationStructure,
209 copy: wgt::AccelerationStructureCopy,
210 );
211 unsafe fn read_acceleration_structure_compact_size(
212 &mut self,
213 acceleration_structure: &dyn DynAccelerationStructure,
214 buf: &dyn DynBuffer,
215 );
216 unsafe fn set_acceleration_structure_dependencies(
217 &self,
218 command_buffers: &[Box<dyn DynCommandBuffer>],
219 dependencies: &[&dyn DynAccelerationStructure],
220 );
221}
222
223impl<C: CommandEncoder + DynResource> DynCommandEncoder for C {
224 unsafe fn begin_encoding(&mut self, label: Label) -> Result<(), DeviceError> {
225 unsafe { C::begin_encoding(self, label) }
226 }
227
228 unsafe fn discard_encoding(&mut self) {
229 unsafe { C::discard_encoding(self) }
230 }
231
232 unsafe fn end_encoding(&mut self) -> Result<Box<dyn DynCommandBuffer>, DeviceError> {
233 unsafe { C::end_encoding(self) }.map(|cb| {
234 let boxed_command_buffer: Box<<C::A as Api>::CommandBuffer> = Box::new(cb);
235 let boxed_command_buffer: Box<dyn DynCommandBuffer> = boxed_command_buffer;
236 boxed_command_buffer
237 })
238 }
239
240 unsafe fn reset_all(&mut self, command_buffers: Vec<Box<dyn DynCommandBuffer>>) {
241 unsafe { C::reset_all(self, command_buffers.into_iter().map(|cb| cb.unbox())) }
242 }
243
244 unsafe fn transition_buffers(&mut self, barriers: &[BufferBarrier<'_, dyn DynBuffer>]) {
245 let barriers = barriers.iter().map(|barrier| BufferBarrier {
246 buffer: barrier.buffer.expect_downcast_ref(),
247 usage: barrier.usage.clone(),
248 });
249 unsafe { self.transition_buffers(barriers) };
250 }
251
252 unsafe fn transition_textures(&mut self, barriers: &[TextureBarrier<'_, dyn DynTexture>]) {
253 let barriers = barriers.iter().map(|barrier| TextureBarrier {
254 texture: barrier.texture.expect_downcast_ref(),
255 usage: barrier.usage.clone(),
256 range: barrier.range,
257 });
258 unsafe { self.transition_textures(barriers) };
259 }
260
261 unsafe fn clear_buffer(&mut self, buffer: &dyn DynBuffer, range: MemoryRange) {
262 let buffer = buffer.expect_downcast_ref();
263 unsafe { C::clear_buffer(self, buffer, range) };
264 }
265
266 unsafe fn copy_buffer_to_buffer(
267 &mut self,
268 src: &dyn DynBuffer,
269 dst: &dyn DynBuffer,
270 regions: &[BufferCopy],
271 ) {
272 let src = src.expect_downcast_ref();
273 let dst = dst.expect_downcast_ref();
274 unsafe {
275 C::copy_buffer_to_buffer(self, src, dst, regions.iter().copied());
276 }
277 }
278
279 unsafe fn copy_texture_to_texture(
280 &mut self,
281 src: &dyn DynTexture,
282 src_usage: wgt::TextureUses,
283 dst: &dyn DynTexture,
284 regions: &[TextureCopy],
285 ) {
286 let src = src.expect_downcast_ref();
287 let dst = dst.expect_downcast_ref();
288 unsafe {
289 C::copy_texture_to_texture(self, src, src_usage, dst, regions.iter().cloned());
290 }
291 }
292
293 unsafe fn copy_buffer_to_texture(
294 &mut self,
295 src: &dyn DynBuffer,
296 dst: &dyn DynTexture,
297 regions: &[BufferTextureCopy],
298 ) {
299 let src = src.expect_downcast_ref();
300 let dst = dst.expect_downcast_ref();
301 unsafe {
302 C::copy_buffer_to_texture(self, src, dst, regions.iter().cloned());
303 }
304 }
305
306 unsafe fn copy_texture_to_buffer(
307 &mut self,
308 src: &dyn DynTexture,
309 src_usage: wgt::TextureUses,
310 dst: &dyn DynBuffer,
311 regions: &[BufferTextureCopy],
312 ) {
313 let src = src.expect_downcast_ref();
314 let dst = dst.expect_downcast_ref();
315 unsafe {
316 C::copy_texture_to_buffer(self, src, src_usage, dst, regions.iter().cloned());
317 }
318 }
319
320 unsafe fn set_bind_group(
321 &mut self,
322 layout: &dyn DynPipelineLayout,
323 index: u32,
324 group: &dyn DynBindGroup,
325 dynamic_offsets: &[wgt::DynamicOffset],
326 ) {
327 let layout = layout.expect_downcast_ref();
328 let group = group.expect_downcast_ref();
329 unsafe { C::set_bind_group(self, layout, index, group, dynamic_offsets) };
330 }
331
332 unsafe fn set_immediates(
333 &mut self,
334 layout: &dyn DynPipelineLayout,
335 offset_bytes: u32,
336 data: &[u32],
337 ) {
338 let layout = layout.expect_downcast_ref();
339 unsafe { C::set_immediates(self, layout, offset_bytes, data) };
340 }
341
342 unsafe fn insert_debug_marker(&mut self, label: &str) {
343 unsafe {
344 C::insert_debug_marker(self, label);
345 }
346 }
347
348 unsafe fn begin_debug_marker(&mut self, group_label: &str) {
349 unsafe {
350 C::begin_debug_marker(self, group_label);
351 }
352 }
353
354 unsafe fn end_debug_marker(&mut self) {
355 unsafe {
356 C::end_debug_marker(self);
357 }
358 }
359
360 unsafe fn begin_query(&mut self, set: &dyn DynQuerySet, index: u32) {
361 let set = set.expect_downcast_ref();
362 unsafe { C::begin_query(self, set, index) };
363 }
364
365 unsafe fn end_query(&mut self, set: &dyn DynQuerySet, index: u32) {
366 let set = set.expect_downcast_ref();
367 unsafe { C::end_query(self, set, index) };
368 }
369
370 unsafe fn write_timestamp(&mut self, set: &dyn DynQuerySet, index: u32) {
371 let set = set.expect_downcast_ref();
372 unsafe { C::write_timestamp(self, set, index) };
373 }
374
375 unsafe fn reset_queries(&mut self, set: &dyn DynQuerySet, range: Range<u32>) {
376 let set = set.expect_downcast_ref();
377 unsafe { C::reset_queries(self, set, range) };
378 }
379
380 unsafe fn copy_query_results(
381 &mut self,
382 set: &dyn DynQuerySet,
383 range: Range<u32>,
384 buffer: &dyn DynBuffer,
385 offset: wgt::BufferAddress,
386 stride: wgt::BufferSize,
387 ) {
388 let set = set.expect_downcast_ref();
389 let buffer = buffer.expect_downcast_ref();
390 unsafe { C::copy_query_results(self, set, range, buffer, offset, stride) };
391 }
392
393 unsafe fn begin_render_pass(
394 &mut self,
395 desc: &RenderPassDescriptor<dyn DynQuerySet, dyn DynTextureView>,
396 ) -> Result<(), DeviceError> {
397 let color_attachments = desc
398 .color_attachments
399 .iter()
400 .map(|attachment| {
401 attachment
402 .as_ref()
403 .map(|attachment| attachment.expect_downcast())
404 })
405 .collect::<Vec<_>>();
406
407 let desc: RenderPassDescriptor<<C::A as Api>::QuerySet, <C::A as Api>::TextureView> =
408 RenderPassDescriptor {
409 label: desc.label,
410 extent: desc.extent,
411 sample_count: desc.sample_count,
412 color_attachments: &color_attachments,
413 depth_stencil_attachment: desc
414 .depth_stencil_attachment
415 .as_ref()
416 .map(|ds| ds.expect_downcast()),
417 multiview_mask: desc.multiview_mask,
418 timestamp_writes: desc
419 .timestamp_writes
420 .as_ref()
421 .map(|writes| writes.expect_downcast()),
422 occlusion_query_set: desc
423 .occlusion_query_set
424 .map(|set| set.expect_downcast_ref()),
425 };
426 unsafe { C::begin_render_pass(self, &desc) }
427 }
428
429 unsafe fn end_render_pass(&mut self) {
430 unsafe {
431 C::end_render_pass(self);
432 }
433 }
434
435 unsafe fn set_viewport(&mut self, rect: &Rect<f32>, depth_range: Range<f32>) {
436 unsafe {
437 C::set_viewport(self, rect, depth_range);
438 }
439 }
440
441 unsafe fn set_scissor_rect(&mut self, rect: &Rect<u32>) {
442 unsafe {
443 C::set_scissor_rect(self, rect);
444 }
445 }
446
447 unsafe fn set_stencil_reference(&mut self, value: u32) {
448 unsafe {
449 C::set_stencil_reference(self, value);
450 }
451 }
452
453 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
454 unsafe { C::set_blend_constants(self, color) };
455 }
456
457 unsafe fn draw(
458 &mut self,
459 first_vertex: u32,
460 vertex_count: u32,
461 first_instance: u32,
462 instance_count: u32,
463 ) {
464 unsafe {
465 C::draw(
466 self,
467 first_vertex,
468 vertex_count,
469 first_instance,
470 instance_count,
471 )
472 };
473 }
474
475 unsafe fn draw_indexed(
476 &mut self,
477 first_index: u32,
478 index_count: u32,
479 base_vertex: i32,
480 first_instance: u32,
481 instance_count: u32,
482 ) {
483 unsafe {
484 C::draw_indexed(
485 self,
486 first_index,
487 index_count,
488 base_vertex,
489 first_instance,
490 instance_count,
491 )
492 };
493 }
494
495 unsafe fn draw_mesh_tasks(
496 &mut self,
497 group_count_x: u32,
498 group_count_y: u32,
499 group_count_z: u32,
500 ) {
501 unsafe { C::draw_mesh_tasks(self, group_count_x, group_count_y, group_count_z) };
502 }
503
504 unsafe fn draw_indirect(
505 &mut self,
506 buffer: &dyn DynBuffer,
507 offset: wgt::BufferAddress,
508 draw_count: u32,
509 ) {
510 let buffer = buffer.expect_downcast_ref();
511 unsafe { C::draw_indirect(self, buffer, offset, draw_count) };
512 }
513
514 unsafe fn draw_indexed_indirect(
515 &mut self,
516 buffer: &dyn DynBuffer,
517 offset: wgt::BufferAddress,
518 draw_count: u32,
519 ) {
520 let buffer = buffer.expect_downcast_ref();
521 unsafe { C::draw_indexed_indirect(self, buffer, offset, draw_count) };
522 }
523
524 unsafe fn draw_mesh_tasks_indirect(
525 &mut self,
526 buffer: &dyn DynBuffer,
527 offset: wgt::BufferAddress,
528 draw_count: u32,
529 ) {
530 let buffer = buffer.expect_downcast_ref();
531 unsafe { C::draw_mesh_tasks_indirect(self, buffer, offset, draw_count) };
532 }
533
534 unsafe fn draw_indirect_count(
535 &mut self,
536 buffer: &dyn DynBuffer,
537 offset: wgt::BufferAddress,
538 count_buffer: &dyn DynBuffer,
539 count_offset: wgt::BufferAddress,
540 max_count: u32,
541 ) {
542 let buffer = buffer.expect_downcast_ref();
543 let count_buffer = count_buffer.expect_downcast_ref();
544 unsafe {
545 C::draw_indirect_count(self, buffer, offset, count_buffer, count_offset, max_count)
546 };
547 }
548
549 unsafe fn draw_indexed_indirect_count(
550 &mut self,
551 buffer: &dyn DynBuffer,
552 offset: wgt::BufferAddress,
553 count_buffer: &dyn DynBuffer,
554 count_offset: wgt::BufferAddress,
555 max_count: u32,
556 ) {
557 let buffer = buffer.expect_downcast_ref();
558 let count_buffer = count_buffer.expect_downcast_ref();
559 unsafe {
560 C::draw_indexed_indirect_count(
561 self,
562 buffer,
563 offset,
564 count_buffer,
565 count_offset,
566 max_count,
567 )
568 };
569 }
570
571 unsafe fn draw_mesh_tasks_indirect_count(
572 &mut self,
573 buffer: &dyn DynBuffer,
574 offset: wgt::BufferAddress,
575 count_buffer: &dyn DynBuffer,
576 count_offset: wgt::BufferAddress,
577 max_count: u32,
578 ) {
579 let buffer = buffer.expect_downcast_ref();
580 let count_buffer = count_buffer.expect_downcast_ref();
581 unsafe {
582 C::draw_mesh_tasks_indirect_count(
583 self,
584 buffer,
585 offset,
586 count_buffer,
587 count_offset,
588 max_count,
589 )
590 };
591 }
592
593 unsafe fn begin_compute_pass(&mut self, desc: &ComputePassDescriptor<dyn DynQuerySet>) {
594 let desc = ComputePassDescriptor {
595 label: desc.label,
596 timestamp_writes: desc
597 .timestamp_writes
598 .as_ref()
599 .map(|writes| writes.expect_downcast()),
600 };
601 unsafe { C::begin_compute_pass(self, &desc) };
602 }
603
604 unsafe fn end_compute_pass(&mut self) {
605 unsafe { C::end_compute_pass(self) };
606 }
607
608 unsafe fn set_compute_pipeline(&mut self, pipeline: &dyn DynComputePipeline) {
609 let pipeline = pipeline.expect_downcast_ref();
610 unsafe { C::set_compute_pipeline(self, pipeline) };
611 }
612
613 unsafe fn dispatch_workgroups(&mut self, count: [u32; 3]) {
614 unsafe { C::dispatch_workgroups(self, count) };
615 }
616
617 unsafe fn dispatch_workgroups_indirect(
618 &mut self,
619 buffer: &dyn DynBuffer,
620 offset: wgt::BufferAddress,
621 ) {
622 let buffer = buffer.expect_downcast_ref();
623 unsafe { C::dispatch_workgroups_indirect(self, buffer, offset) };
624 }
625
626 unsafe fn set_render_pipeline(&mut self, pipeline: &dyn DynRenderPipeline) {
627 let pipeline = pipeline.expect_downcast_ref();
628 unsafe { C::set_render_pipeline(self, pipeline) };
629 }
630
631 unsafe fn set_index_buffer<'a>(
632 &mut self,
633 binding: BufferBinding<'a, dyn DynBuffer>,
634 format: wgt::IndexFormat,
635 ) {
636 let binding = binding.expect_downcast();
637 unsafe { self.set_index_buffer(binding, format) };
638 }
639
640 unsafe fn set_vertex_buffer<'a>(
641 &mut self,
642 index: u32,
643 binding: BufferBinding<'a, dyn DynBuffer>,
644 ) {
645 let binding = binding.expect_downcast();
646 unsafe { self.set_vertex_buffer(index, binding) };
647 }
648
649 unsafe fn build_acceleration_structures<'a>(
650 &mut self,
651 descriptors: &'a [BuildAccelerationStructureDescriptor<
652 'a,
653 dyn DynBuffer,
654 dyn DynAccelerationStructure,
655 >],
656 ) {
657 let descriptor_entries = descriptors
660 .iter()
661 .map(|d| d.entries.expect_downcast())
662 .collect::<Vec<_>>();
663 let descriptors = descriptors
664 .iter()
665 .zip(descriptor_entries.iter())
666 .map(|(d, entries)| BuildAccelerationStructureDescriptor::<
667 <C::A as Api>::Buffer,
668 <C::A as Api>::AccelerationStructure,
669 > {
670 entries,
671 mode: d.mode,
672 flags: d.flags,
673 source_acceleration_structure: d
674 .source_acceleration_structure
675 .map(|a| a.expect_downcast_ref()),
676 destination_acceleration_structure: d
677 .destination_acceleration_structure
678 .expect_downcast_ref(),
679 scratch_buffer: d.scratch_buffer.expect_downcast_ref(),
680 scratch_buffer_offset: d.scratch_buffer_offset,
681 });
682 unsafe { C::build_acceleration_structures(self, descriptors.len() as _, descriptors) };
683 }
684
685 unsafe fn place_acceleration_structure_barrier(
686 &mut self,
687 barrier: AccelerationStructureBarrier,
688 ) {
689 unsafe { C::place_acceleration_structure_barrier(self, barrier) };
690 }
691
692 unsafe fn copy_acceleration_structure_to_acceleration_structure(
693 &mut self,
694 src: &dyn DynAccelerationStructure,
695 dst: &dyn DynAccelerationStructure,
696 copy: wgt::AccelerationStructureCopy,
697 ) {
698 let src = src.expect_downcast_ref();
699 let dst = dst.expect_downcast_ref();
700 unsafe { C::copy_acceleration_structure_to_acceleration_structure(self, src, dst, copy) };
701 }
702 unsafe fn read_acceleration_structure_compact_size(
703 &mut self,
704 acceleration_structure: &dyn DynAccelerationStructure,
705 buf: &dyn DynBuffer,
706 ) {
707 let acceleration_structure = acceleration_structure.expect_downcast_ref();
708 let buf = buf.expect_downcast_ref();
709 unsafe { C::read_acceleration_structure_compact_size(self, acceleration_structure, buf) }
710 }
711
712 unsafe fn set_acceleration_structure_dependencies(
713 &self,
714 command_buffers: &[Box<dyn DynCommandBuffer>],
715 dependencies: &[&dyn DynAccelerationStructure],
716 ) {
717 let command_buffers: Vec<&<C::A as Api>::CommandBuffer> = command_buffers
718 .iter()
719 .map(|command_buffer| command_buffer.expect_downcast_ref())
720 .collect();
721 let dependencies: Vec<&<C::A as Api>::AccelerationStructure> = dependencies
722 .iter()
723 .map(|dependency| dependency.expect_downcast_ref())
724 .collect();
725 unsafe { C::set_acceleration_structure_dependencies(&command_buffers, &dependencies) }
726 }
727}
728
729impl<'a> PassTimestampWrites<'a, dyn DynQuerySet> {
730 pub fn expect_downcast<B: DynQuerySet>(&self) -> PassTimestampWrites<'a, B> {
731 PassTimestampWrites {
732 query_set: self.query_set.expect_downcast_ref(),
733 beginning_of_pass_write_index: self.beginning_of_pass_write_index,
734 end_of_pass_write_index: self.end_of_pass_write_index,
735 }
736 }
737}
738
739impl<'a> Attachment<'a, dyn DynTextureView> {
740 pub fn expect_downcast<B: DynTextureView>(&self) -> Attachment<'a, B> {
741 Attachment {
742 view: self.view.expect_downcast_ref(),
743 usage: self.usage,
744 }
745 }
746}
747
748impl<'a> ColorAttachment<'a, dyn DynTextureView> {
749 pub fn expect_downcast<B: DynTextureView>(&self) -> ColorAttachment<'a, B> {
750 ColorAttachment {
751 target: self.target.expect_downcast(),
752 depth_slice: self.depth_slice,
753 resolve_target: self.resolve_target.as_ref().map(|rt| rt.expect_downcast()),
754 ops: self.ops,
755 clear_value: self.clear_value,
756 }
757 }
758}
759
760impl<'a> DepthStencilAttachment<'a, dyn DynTextureView> {
761 pub fn expect_downcast<B: DynTextureView>(&self) -> DepthStencilAttachment<'a, B> {
762 DepthStencilAttachment {
763 target: self.target.expect_downcast(),
764 depth_ops: self.depth_ops,
765 stencil_ops: self.stencil_ops,
766 clear_value: self.clear_value,
767 }
768 }
769}