1use alloc::{boxed::Box, vec::Vec};
2use core::ops::Range;
3
4use crate::{
5 AccelerationStructureBarrier, Api, Attachment, BufferBarrier, BufferBinding, BufferCopy,
6 BufferTextureCopy, BuildAccelerationStructureDescriptor, ColorAttachment, CommandEncoder,
7 ComputePassDescriptor, DepthStencilAttachment, DeviceError, Label, MemoryRange,
8 PassTimestampWrites, Rect, RenderPassDescriptor, TextureBarrier, TextureCopy,
9};
10
11use super::{
12 DynAccelerationStructure, DynBindGroup, DynBuffer, DynCommandBuffer, DynComputePipeline,
13 DynPipelineLayout, DynQuerySet, DynRenderPipeline, DynResource, DynResourceExt as _,
14 DynTexture, DynTextureView,
15};
16
17pub trait DynCommandEncoder: DynResource + core::fmt::Debug {
18 unsafe fn begin_encoding(&mut self, label: Label) -> Result<(), DeviceError>;
19
20 unsafe fn discard_encoding(&mut self);
21
22 unsafe fn end_encoding(&mut self) -> Result<Box<dyn DynCommandBuffer>, DeviceError>;
23
24 unsafe fn reset_all(&mut self, command_buffers: Vec<Box<dyn DynCommandBuffer>>);
25
26 unsafe fn transition_buffers(&mut self, barriers: &[BufferBarrier<'_, dyn DynBuffer>]);
27 unsafe fn transition_textures(&mut self, barriers: &[TextureBarrier<'_, dyn DynTexture>]);
28
29 unsafe fn clear_buffer(&mut self, buffer: &dyn DynBuffer, range: MemoryRange);
30
31 unsafe fn copy_buffer_to_buffer(
32 &mut self,
33 src: &dyn DynBuffer,
34 dst: &dyn DynBuffer,
35 regions: &[BufferCopy],
36 );
37
38 unsafe fn copy_texture_to_texture(
39 &mut self,
40 src: &dyn DynTexture,
41 src_usage: wgt::TextureUses,
42 dst: &dyn DynTexture,
43 regions: &[TextureCopy],
44 );
45
46 unsafe fn copy_buffer_to_texture(
47 &mut self,
48 src: &dyn DynBuffer,
49 dst: &dyn DynTexture,
50 regions: &[BufferTextureCopy],
51 );
52
53 unsafe fn copy_texture_to_buffer(
54 &mut self,
55 src: &dyn DynTexture,
56 src_usage: wgt::TextureUses,
57 dst: &dyn DynBuffer,
58 regions: &[BufferTextureCopy],
59 );
60
61 unsafe fn set_bind_group(
62 &mut self,
63 layout: &dyn DynPipelineLayout,
64 index: u32,
65 group: &dyn DynBindGroup,
66 dynamic_offsets: &[wgt::DynamicOffset],
67 );
68
69 unsafe fn set_immediates(
70 &mut self,
71 layout: &dyn DynPipelineLayout,
72 offset_bytes: u32,
73 data: &[u32],
74 );
75
76 unsafe fn insert_debug_marker(&mut self, label: &str);
77 unsafe fn begin_debug_marker(&mut self, group_label: &str);
78 unsafe fn end_debug_marker(&mut self);
79
80 unsafe fn begin_query(&mut self, set: &dyn DynQuerySet, index: u32);
81 unsafe fn end_query(&mut self, set: &dyn DynQuerySet, index: u32);
82 unsafe fn write_timestamp(&mut self, set: &dyn DynQuerySet, index: u32);
83 unsafe fn reset_queries(&mut self, set: &dyn DynQuerySet, range: Range<u32>);
84 unsafe fn copy_query_results(
85 &mut self,
86 set: &dyn DynQuerySet,
87 range: Range<u32>,
88 buffer: &dyn DynBuffer,
89 offset: wgt::BufferAddress,
90 stride: wgt::BufferSize,
91 );
92
93 unsafe fn begin_render_pass(
94 &mut self,
95 desc: &RenderPassDescriptor<dyn DynQuerySet, dyn DynTextureView>,
96 ) -> Result<(), DeviceError>;
97 unsafe fn end_render_pass(&mut self);
98
99 unsafe fn set_render_pipeline(&mut self, pipeline: &dyn DynRenderPipeline);
100
101 unsafe fn set_index_buffer<'a>(
102 &mut self,
103 binding: BufferBinding<'a, dyn DynBuffer>,
104 format: wgt::IndexFormat,
105 );
106
107 unsafe fn set_vertex_buffer<'a>(
108 &mut self,
109 index: u32,
110 binding: BufferBinding<'a, dyn DynBuffer>,
111 );
112 unsafe fn set_viewport(&mut self, rect: &Rect<f32>, depth_range: Range<f32>);
113 unsafe fn set_scissor_rect(&mut self, rect: &Rect<u32>);
114 unsafe fn set_stencil_reference(&mut self, value: u32);
115 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]);
116
117 unsafe fn draw(
118 &mut self,
119 first_vertex: u32,
120 vertex_count: u32,
121 first_instance: u32,
122 instance_count: u32,
123 );
124 unsafe fn draw_indexed(
125 &mut self,
126 first_index: u32,
127 index_count: u32,
128 base_vertex: i32,
129 first_instance: u32,
130 instance_count: u32,
131 );
132 unsafe fn draw_mesh_tasks(
133 &mut self,
134 group_count_x: u32,
135 group_count_y: u32,
136 group_count_z: u32,
137 );
138 unsafe fn draw_indirect(
139 &mut self,
140 buffer: &dyn DynBuffer,
141 offset: wgt::BufferAddress,
142 draw_count: u32,
143 );
144 unsafe fn draw_indexed_indirect(
145 &mut self,
146 buffer: &dyn DynBuffer,
147 offset: wgt::BufferAddress,
148 draw_count: u32,
149 );
150 unsafe fn draw_mesh_tasks_indirect(
151 &mut self,
152 buffer: &dyn DynBuffer,
153 offset: wgt::BufferAddress,
154 draw_count: u32,
155 );
156 unsafe fn draw_indirect_count(
157 &mut self,
158 buffer: &dyn DynBuffer,
159 offset: wgt::BufferAddress,
160 count_buffer: &dyn DynBuffer,
161 count_offset: wgt::BufferAddress,
162 max_count: u32,
163 );
164 unsafe fn draw_indexed_indirect_count(
165 &mut self,
166 buffer: &dyn DynBuffer,
167 offset: wgt::BufferAddress,
168 count_buffer: &dyn DynBuffer,
169 count_offset: wgt::BufferAddress,
170 max_count: u32,
171 );
172 unsafe fn draw_mesh_tasks_indirect_count(
173 &mut self,
174 buffer: &dyn DynBuffer,
175 offset: wgt::BufferAddress,
176 count_buffer: &dyn DynBuffer,
177 count_offset: wgt::BufferAddress,
178 max_count: u32,
179 );
180
181 unsafe fn begin_compute_pass(&mut self, desc: &ComputePassDescriptor<dyn DynQuerySet>);
182 unsafe fn end_compute_pass(&mut self);
183
184 unsafe fn set_compute_pipeline(&mut self, pipeline: &dyn DynComputePipeline);
185
186 unsafe fn dispatch(&mut self, count: [u32; 3]);
187 unsafe fn dispatch_indirect(&mut self, buffer: &dyn DynBuffer, offset: wgt::BufferAddress);
188
189 unsafe fn build_acceleration_structures<'a>(
190 &mut self,
191 descriptors: &'a [BuildAccelerationStructureDescriptor<
192 'a,
193 dyn DynBuffer,
194 dyn DynAccelerationStructure,
195 >],
196 );
197 unsafe fn place_acceleration_structure_barrier(
198 &mut self,
199 barrier: AccelerationStructureBarrier,
200 );
201 unsafe fn copy_acceleration_structure_to_acceleration_structure(
202 &mut self,
203 src: &dyn DynAccelerationStructure,
204 dst: &dyn DynAccelerationStructure,
205 copy: wgt::AccelerationStructureCopy,
206 );
207 unsafe fn read_acceleration_structure_compact_size(
208 &mut self,
209 acceleration_structure: &dyn DynAccelerationStructure,
210 buf: &dyn DynBuffer,
211 );
212 unsafe fn set_acceleration_structure_dependencies(
213 &self,
214 command_buffers: &[Box<dyn DynCommandBuffer>],
215 dependencies: &[&dyn DynAccelerationStructure],
216 );
217}
218
219impl<C: CommandEncoder + DynResource> DynCommandEncoder for C {
220 unsafe fn begin_encoding(&mut self, label: Label) -> Result<(), DeviceError> {
221 unsafe { C::begin_encoding(self, label) }
222 }
223
224 unsafe fn discard_encoding(&mut self) {
225 unsafe { C::discard_encoding(self) }
226 }
227
228 unsafe fn end_encoding(&mut self) -> Result<Box<dyn DynCommandBuffer>, DeviceError> {
229 unsafe { C::end_encoding(self) }.map(|cb| {
230 let boxed_command_buffer: Box<<C::A as Api>::CommandBuffer> = Box::new(cb);
231 let boxed_command_buffer: Box<dyn DynCommandBuffer> = boxed_command_buffer;
232 boxed_command_buffer
233 })
234 }
235
236 unsafe fn reset_all(&mut self, command_buffers: Vec<Box<dyn DynCommandBuffer>>) {
237 unsafe { C::reset_all(self, command_buffers.into_iter().map(|cb| cb.unbox())) }
238 }
239
240 unsafe fn transition_buffers(&mut self, barriers: &[BufferBarrier<'_, dyn DynBuffer>]) {
241 let barriers = barriers.iter().map(|barrier| BufferBarrier {
242 buffer: barrier.buffer.expect_downcast_ref(),
243 usage: barrier.usage.clone(),
244 });
245 unsafe { self.transition_buffers(barriers) };
246 }
247
248 unsafe fn transition_textures(&mut self, barriers: &[TextureBarrier<'_, dyn DynTexture>]) {
249 let barriers = barriers.iter().map(|barrier| TextureBarrier {
250 texture: barrier.texture.expect_downcast_ref(),
251 usage: barrier.usage.clone(),
252 range: barrier.range,
253 });
254 unsafe { self.transition_textures(barriers) };
255 }
256
257 unsafe fn clear_buffer(&mut self, buffer: &dyn DynBuffer, range: MemoryRange) {
258 let buffer = buffer.expect_downcast_ref();
259 unsafe { C::clear_buffer(self, buffer, range) };
260 }
261
262 unsafe fn copy_buffer_to_buffer(
263 &mut self,
264 src: &dyn DynBuffer,
265 dst: &dyn DynBuffer,
266 regions: &[BufferCopy],
267 ) {
268 let src = src.expect_downcast_ref();
269 let dst = dst.expect_downcast_ref();
270 unsafe {
271 C::copy_buffer_to_buffer(self, src, dst, regions.iter().copied());
272 }
273 }
274
275 unsafe fn copy_texture_to_texture(
276 &mut self,
277 src: &dyn DynTexture,
278 src_usage: wgt::TextureUses,
279 dst: &dyn DynTexture,
280 regions: &[TextureCopy],
281 ) {
282 let src = src.expect_downcast_ref();
283 let dst = dst.expect_downcast_ref();
284 unsafe {
285 C::copy_texture_to_texture(self, src, src_usage, dst, regions.iter().cloned());
286 }
287 }
288
289 unsafe fn copy_buffer_to_texture(
290 &mut self,
291 src: &dyn DynBuffer,
292 dst: &dyn DynTexture,
293 regions: &[BufferTextureCopy],
294 ) {
295 let src = src.expect_downcast_ref();
296 let dst = dst.expect_downcast_ref();
297 unsafe {
298 C::copy_buffer_to_texture(self, src, dst, regions.iter().cloned());
299 }
300 }
301
302 unsafe fn copy_texture_to_buffer(
303 &mut self,
304 src: &dyn DynTexture,
305 src_usage: wgt::TextureUses,
306 dst: &dyn DynBuffer,
307 regions: &[BufferTextureCopy],
308 ) {
309 let src = src.expect_downcast_ref();
310 let dst = dst.expect_downcast_ref();
311 unsafe {
312 C::copy_texture_to_buffer(self, src, src_usage, dst, regions.iter().cloned());
313 }
314 }
315
316 unsafe fn set_bind_group(
317 &mut self,
318 layout: &dyn DynPipelineLayout,
319 index: u32,
320 group: &dyn DynBindGroup,
321 dynamic_offsets: &[wgt::DynamicOffset],
322 ) {
323 let layout = layout.expect_downcast_ref();
324 let group = group.expect_downcast_ref();
325 unsafe { C::set_bind_group(self, layout, index, group, dynamic_offsets) };
326 }
327
328 unsafe fn set_immediates(
329 &mut self,
330 layout: &dyn DynPipelineLayout,
331 offset_bytes: u32,
332 data: &[u32],
333 ) {
334 let layout = layout.expect_downcast_ref();
335 unsafe { C::set_immediates(self, layout, offset_bytes, data) };
336 }
337
338 unsafe fn insert_debug_marker(&mut self, label: &str) {
339 unsafe {
340 C::insert_debug_marker(self, label);
341 }
342 }
343
344 unsafe fn begin_debug_marker(&mut self, group_label: &str) {
345 unsafe {
346 C::begin_debug_marker(self, group_label);
347 }
348 }
349
350 unsafe fn end_debug_marker(&mut self) {
351 unsafe {
352 C::end_debug_marker(self);
353 }
354 }
355
356 unsafe fn begin_query(&mut self, set: &dyn DynQuerySet, index: u32) {
357 let set = set.expect_downcast_ref();
358 unsafe { C::begin_query(self, set, index) };
359 }
360
361 unsafe fn end_query(&mut self, set: &dyn DynQuerySet, index: u32) {
362 let set = set.expect_downcast_ref();
363 unsafe { C::end_query(self, set, index) };
364 }
365
366 unsafe fn write_timestamp(&mut self, set: &dyn DynQuerySet, index: u32) {
367 let set = set.expect_downcast_ref();
368 unsafe { C::write_timestamp(self, set, index) };
369 }
370
371 unsafe fn reset_queries(&mut self, set: &dyn DynQuerySet, range: Range<u32>) {
372 let set = set.expect_downcast_ref();
373 unsafe { C::reset_queries(self, set, range) };
374 }
375
376 unsafe fn copy_query_results(
377 &mut self,
378 set: &dyn DynQuerySet,
379 range: Range<u32>,
380 buffer: &dyn DynBuffer,
381 offset: wgt::BufferAddress,
382 stride: wgt::BufferSize,
383 ) {
384 let set = set.expect_downcast_ref();
385 let buffer = buffer.expect_downcast_ref();
386 unsafe { C::copy_query_results(self, set, range, buffer, offset, stride) };
387 }
388
389 unsafe fn begin_render_pass(
390 &mut self,
391 desc: &RenderPassDescriptor<dyn DynQuerySet, dyn DynTextureView>,
392 ) -> Result<(), DeviceError> {
393 let color_attachments = desc
394 .color_attachments
395 .iter()
396 .map(|attachment| {
397 attachment
398 .as_ref()
399 .map(|attachment| attachment.expect_downcast())
400 })
401 .collect::<Vec<_>>();
402
403 let desc: RenderPassDescriptor<<C::A as Api>::QuerySet, <C::A as Api>::TextureView> =
404 RenderPassDescriptor {
405 label: desc.label,
406 extent: desc.extent,
407 sample_count: desc.sample_count,
408 color_attachments: &color_attachments,
409 depth_stencil_attachment: desc
410 .depth_stencil_attachment
411 .as_ref()
412 .map(|ds| ds.expect_downcast()),
413 multiview_mask: desc.multiview_mask,
414 timestamp_writes: desc
415 .timestamp_writes
416 .as_ref()
417 .map(|writes| writes.expect_downcast()),
418 occlusion_query_set: desc
419 .occlusion_query_set
420 .map(|set| set.expect_downcast_ref()),
421 };
422 unsafe { C::begin_render_pass(self, &desc) }
423 }
424
425 unsafe fn end_render_pass(&mut self) {
426 unsafe {
427 C::end_render_pass(self);
428 }
429 }
430
431 unsafe fn set_viewport(&mut self, rect: &Rect<f32>, depth_range: Range<f32>) {
432 unsafe {
433 C::set_viewport(self, rect, depth_range);
434 }
435 }
436
437 unsafe fn set_scissor_rect(&mut self, rect: &Rect<u32>) {
438 unsafe {
439 C::set_scissor_rect(self, rect);
440 }
441 }
442
443 unsafe fn set_stencil_reference(&mut self, value: u32) {
444 unsafe {
445 C::set_stencil_reference(self, value);
446 }
447 }
448
449 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
450 unsafe { C::set_blend_constants(self, color) };
451 }
452
453 unsafe fn draw(
454 &mut self,
455 first_vertex: u32,
456 vertex_count: u32,
457 first_instance: u32,
458 instance_count: u32,
459 ) {
460 unsafe {
461 C::draw(
462 self,
463 first_vertex,
464 vertex_count,
465 first_instance,
466 instance_count,
467 )
468 };
469 }
470
471 unsafe fn draw_indexed(
472 &mut self,
473 first_index: u32,
474 index_count: u32,
475 base_vertex: i32,
476 first_instance: u32,
477 instance_count: u32,
478 ) {
479 unsafe {
480 C::draw_indexed(
481 self,
482 first_index,
483 index_count,
484 base_vertex,
485 first_instance,
486 instance_count,
487 )
488 };
489 }
490
491 unsafe fn draw_mesh_tasks(
492 &mut self,
493 group_count_x: u32,
494 group_count_y: u32,
495 group_count_z: u32,
496 ) {
497 unsafe { C::draw_mesh_tasks(self, group_count_x, group_count_y, group_count_z) };
498 }
499
500 unsafe fn draw_indirect(
501 &mut self,
502 buffer: &dyn DynBuffer,
503 offset: wgt::BufferAddress,
504 draw_count: u32,
505 ) {
506 let buffer = buffer.expect_downcast_ref();
507 unsafe { C::draw_indirect(self, buffer, offset, draw_count) };
508 }
509
510 unsafe fn draw_indexed_indirect(
511 &mut self,
512 buffer: &dyn DynBuffer,
513 offset: wgt::BufferAddress,
514 draw_count: u32,
515 ) {
516 let buffer = buffer.expect_downcast_ref();
517 unsafe { C::draw_indexed_indirect(self, buffer, offset, draw_count) };
518 }
519
520 unsafe fn draw_mesh_tasks_indirect(
521 &mut self,
522 buffer: &dyn DynBuffer,
523 offset: wgt::BufferAddress,
524 draw_count: u32,
525 ) {
526 let buffer = buffer.expect_downcast_ref();
527 unsafe { C::draw_mesh_tasks_indirect(self, buffer, offset, draw_count) };
528 }
529
530 unsafe fn draw_indirect_count(
531 &mut self,
532 buffer: &dyn DynBuffer,
533 offset: wgt::BufferAddress,
534 count_buffer: &dyn DynBuffer,
535 count_offset: wgt::BufferAddress,
536 max_count: u32,
537 ) {
538 let buffer = buffer.expect_downcast_ref();
539 let count_buffer = count_buffer.expect_downcast_ref();
540 unsafe {
541 C::draw_indirect_count(self, buffer, offset, count_buffer, count_offset, max_count)
542 };
543 }
544
545 unsafe fn draw_indexed_indirect_count(
546 &mut self,
547 buffer: &dyn DynBuffer,
548 offset: wgt::BufferAddress,
549 count_buffer: &dyn DynBuffer,
550 count_offset: wgt::BufferAddress,
551 max_count: u32,
552 ) {
553 let buffer = buffer.expect_downcast_ref();
554 let count_buffer = count_buffer.expect_downcast_ref();
555 unsafe {
556 C::draw_indexed_indirect_count(
557 self,
558 buffer,
559 offset,
560 count_buffer,
561 count_offset,
562 max_count,
563 )
564 };
565 }
566
567 unsafe fn draw_mesh_tasks_indirect_count(
568 &mut self,
569 buffer: &dyn DynBuffer,
570 offset: wgt::BufferAddress,
571 count_buffer: &dyn DynBuffer,
572 count_offset: wgt::BufferAddress,
573 max_count: u32,
574 ) {
575 let buffer = buffer.expect_downcast_ref();
576 let count_buffer = count_buffer.expect_downcast_ref();
577 unsafe {
578 C::draw_mesh_tasks_indirect_count(
579 self,
580 buffer,
581 offset,
582 count_buffer,
583 count_offset,
584 max_count,
585 )
586 };
587 }
588
589 unsafe fn begin_compute_pass(&mut self, desc: &ComputePassDescriptor<dyn DynQuerySet>) {
590 let desc = ComputePassDescriptor {
591 label: desc.label,
592 timestamp_writes: desc
593 .timestamp_writes
594 .as_ref()
595 .map(|writes| writes.expect_downcast()),
596 };
597 unsafe { C::begin_compute_pass(self, &desc) };
598 }
599
600 unsafe fn end_compute_pass(&mut self) {
601 unsafe { C::end_compute_pass(self) };
602 }
603
604 unsafe fn set_compute_pipeline(&mut self, pipeline: &dyn DynComputePipeline) {
605 let pipeline = pipeline.expect_downcast_ref();
606 unsafe { C::set_compute_pipeline(self, pipeline) };
607 }
608
609 unsafe fn dispatch(&mut self, count: [u32; 3]) {
610 unsafe { C::dispatch(self, count) };
611 }
612
613 unsafe fn dispatch_indirect(&mut self, buffer: &dyn DynBuffer, offset: wgt::BufferAddress) {
614 let buffer = buffer.expect_downcast_ref();
615 unsafe { C::dispatch_indirect(self, buffer, offset) };
616 }
617
618 unsafe fn set_render_pipeline(&mut self, pipeline: &dyn DynRenderPipeline) {
619 let pipeline = pipeline.expect_downcast_ref();
620 unsafe { C::set_render_pipeline(self, pipeline) };
621 }
622
623 unsafe fn set_index_buffer<'a>(
624 &mut self,
625 binding: BufferBinding<'a, dyn DynBuffer>,
626 format: wgt::IndexFormat,
627 ) {
628 let binding = binding.expect_downcast();
629 unsafe { self.set_index_buffer(binding, format) };
630 }
631
632 unsafe fn set_vertex_buffer<'a>(
633 &mut self,
634 index: u32,
635 binding: BufferBinding<'a, dyn DynBuffer>,
636 ) {
637 let binding = binding.expect_downcast();
638 unsafe { self.set_vertex_buffer(index, binding) };
639 }
640
641 unsafe fn build_acceleration_structures<'a>(
642 &mut self,
643 descriptors: &'a [BuildAccelerationStructureDescriptor<
644 'a,
645 dyn DynBuffer,
646 dyn DynAccelerationStructure,
647 >],
648 ) {
649 let descriptor_entries = descriptors
652 .iter()
653 .map(|d| d.entries.expect_downcast())
654 .collect::<Vec<_>>();
655 let descriptors = descriptors
656 .iter()
657 .zip(descriptor_entries.iter())
658 .map(|(d, entries)| BuildAccelerationStructureDescriptor::<
659 <C::A as Api>::Buffer,
660 <C::A as Api>::AccelerationStructure,
661 > {
662 entries,
663 mode: d.mode,
664 flags: d.flags,
665 source_acceleration_structure: d
666 .source_acceleration_structure
667 .map(|a| a.expect_downcast_ref()),
668 destination_acceleration_structure: d
669 .destination_acceleration_structure
670 .expect_downcast_ref(),
671 scratch_buffer: d.scratch_buffer.expect_downcast_ref(),
672 scratch_buffer_offset: d.scratch_buffer_offset,
673 });
674 unsafe { C::build_acceleration_structures(self, descriptors.len() as _, descriptors) };
675 }
676
677 unsafe fn place_acceleration_structure_barrier(
678 &mut self,
679 barrier: AccelerationStructureBarrier,
680 ) {
681 unsafe { C::place_acceleration_structure_barrier(self, barrier) };
682 }
683
684 unsafe fn copy_acceleration_structure_to_acceleration_structure(
685 &mut self,
686 src: &dyn DynAccelerationStructure,
687 dst: &dyn DynAccelerationStructure,
688 copy: wgt::AccelerationStructureCopy,
689 ) {
690 let src = src.expect_downcast_ref();
691 let dst = dst.expect_downcast_ref();
692 unsafe { C::copy_acceleration_structure_to_acceleration_structure(self, src, dst, copy) };
693 }
694 unsafe fn read_acceleration_structure_compact_size(
695 &mut self,
696 acceleration_structure: &dyn DynAccelerationStructure,
697 buf: &dyn DynBuffer,
698 ) {
699 let acceleration_structure = acceleration_structure.expect_downcast_ref();
700 let buf = buf.expect_downcast_ref();
701 unsafe { C::read_acceleration_structure_compact_size(self, acceleration_structure, buf) }
702 }
703
704 unsafe fn set_acceleration_structure_dependencies(
705 &self,
706 command_buffers: &[Box<dyn DynCommandBuffer>],
707 dependencies: &[&dyn DynAccelerationStructure],
708 ) {
709 let command_buffers: Vec<&<C::A as Api>::CommandBuffer> = command_buffers
710 .iter()
711 .map(|command_buffer| command_buffer.expect_downcast_ref())
712 .collect();
713 let dependencies: Vec<&<C::A as Api>::AccelerationStructure> = dependencies
714 .iter()
715 .map(|dependency| dependency.expect_downcast_ref())
716 .collect();
717 unsafe { C::set_acceleration_structure_dependencies(&command_buffers, &dependencies) }
718 }
719}
720
721impl<'a> PassTimestampWrites<'a, dyn DynQuerySet> {
722 pub fn expect_downcast<B: DynQuerySet>(&self) -> PassTimestampWrites<'a, B> {
723 PassTimestampWrites {
724 query_set: self.query_set.expect_downcast_ref(),
725 beginning_of_pass_write_index: self.beginning_of_pass_write_index,
726 end_of_pass_write_index: self.end_of_pass_write_index,
727 }
728 }
729}
730
731impl<'a> Attachment<'a, dyn DynTextureView> {
732 pub fn expect_downcast<B: DynTextureView>(&self) -> Attachment<'a, B> {
733 Attachment {
734 view: self.view.expect_downcast_ref(),
735 usage: self.usage,
736 }
737 }
738}
739
740impl<'a> ColorAttachment<'a, dyn DynTextureView> {
741 pub fn expect_downcast<B: DynTextureView>(&self) -> ColorAttachment<'a, B> {
742 ColorAttachment {
743 target: self.target.expect_downcast(),
744 depth_slice: self.depth_slice,
745 resolve_target: self.resolve_target.as_ref().map(|rt| rt.expect_downcast()),
746 ops: self.ops,
747 clear_value: self.clear_value,
748 }
749 }
750}
751
752impl<'a> DepthStencilAttachment<'a, dyn DynTextureView> {
753 pub fn expect_downcast<B: DynTextureView>(&self) -> DepthStencilAttachment<'a, B> {
754 DepthStencilAttachment {
755 target: self.target.expect_downcast(),
756 depth_ops: self.depth_ops,
757 stencil_ops: self.stencil_ops,
758 clear_value: self.clear_value,
759 }
760 }
761}