1use alloc::vec::Vec;
2use core::mem;
3use core::ops::Range;
4
5use super::{Api, Buffer, DeviceResult, Resource};
6
7#[derive(Debug)]
9pub struct CommandBuffer {
10 commands: Vec<Command>,
11}
12
13#[derive(Debug)]
14enum Command {
15 ClearBuffer {
16 buffer: Buffer,
17 range: crate::MemoryRange,
18 },
19 CopyBufferToBuffer {
20 src: Buffer,
21 dst: Buffer,
22 regions: Vec<crate::BufferCopy>,
23 },
24}
25
26impl CommandBuffer {
27 pub(crate) unsafe fn execute(&self) {
32 for command in &self.commands {
33 unsafe { command.execute() };
34 }
35 }
36
37 pub(crate) fn new() -> Self {
38 Self {
39 commands: Vec::new(),
40 }
41 }
42}
43
44impl crate::CommandEncoder for CommandBuffer {
45 type A = Api;
46
47 unsafe fn begin_encoding(&mut self, label: crate::Label) -> DeviceResult<()> {
48 assert!(self.commands.is_empty());
49 Ok(())
50 }
51 unsafe fn discard_encoding(&mut self) {
52 self.commands.clear();
53 }
54 unsafe fn end_encoding(&mut self) -> DeviceResult<CommandBuffer> {
55 Ok(CommandBuffer {
56 commands: mem::take(&mut self.commands),
57 })
58 }
59 unsafe fn reset_all<I>(&mut self, command_buffers: I) {}
60
61 unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
62 where
63 T: Iterator<Item = crate::BufferBarrier<'a, Buffer>>,
64 {
65 }
66
67 unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
68 where
69 T: Iterator<Item = crate::TextureBarrier<'a, Resource>>,
70 {
71 }
72
73 unsafe fn clear_buffer(&mut self, buffer: &Buffer, range: crate::MemoryRange) {
74 self.commands.push(Command::ClearBuffer {
75 buffer: buffer.clone(),
76 range,
77 })
78 }
79
80 unsafe fn copy_buffer_to_buffer<T>(&mut self, src: &Buffer, dst: &Buffer, regions: T)
81 where
82 T: Iterator<Item = crate::BufferCopy>,
83 {
84 self.commands.push(Command::CopyBufferToBuffer {
85 src: src.clone(),
86 dst: dst.clone(),
87 regions: regions.collect(),
88 });
89 }
90
91 #[cfg(webgl)]
92 unsafe fn copy_external_image_to_texture<T>(
93 &mut self,
94 src: &wgt::CopyExternalImageSourceInfo,
95 dst: &Resource,
96 dst_premultiplication: bool,
97 regions: T,
98 ) where
99 T: Iterator<Item = crate::TextureCopy>,
100 {
101 }
102
103 unsafe fn copy_texture_to_texture<T>(
104 &mut self,
105 src: &Resource,
106 src_usage: wgt::TextureUses,
107 dst: &Resource,
108 regions: T,
109 ) {
110 }
112
113 unsafe fn copy_buffer_to_texture<T>(&mut self, src: &Buffer, dst: &Resource, regions: T) {
114 }
116
117 unsafe fn copy_texture_to_buffer<T>(
118 &mut self,
119 src: &Resource,
120 src_usage: wgt::TextureUses,
121 dst: &Buffer,
122 regions: T,
123 ) {
124 }
126
127 unsafe fn begin_query(&mut self, set: &Resource, index: u32) {}
128 unsafe fn end_query(&mut self, set: &Resource, index: u32) {}
129 unsafe fn write_timestamp(&mut self, set: &Resource, index: u32) {}
130 unsafe fn read_acceleration_structure_compact_size(
131 &mut self,
132 acceleration_structure: &Resource,
133 buf: &Buffer,
134 ) {
135 }
136 unsafe fn reset_queries(&mut self, set: &Resource, range: Range<u32>) {}
137 unsafe fn copy_query_results(
138 &mut self,
139 set: &Resource,
140 range: Range<u32>,
141 buffer: &Buffer,
142 offset: wgt::BufferAddress,
143 stride: wgt::BufferSize,
144 ) {
145 }
146
147 unsafe fn begin_render_pass(
150 &mut self,
151 desc: &crate::RenderPassDescriptor<Resource, Resource>,
152 ) -> DeviceResult<()> {
153 Ok(())
154 }
155 unsafe fn end_render_pass(&mut self) {}
156
157 unsafe fn set_bind_group(
158 &mut self,
159 layout: &Resource,
160 index: u32,
161 group: &Resource,
162 dynamic_offsets: &[wgt::DynamicOffset],
163 ) {
164 }
165 unsafe fn set_push_constants(
166 &mut self,
167 layout: &Resource,
168 stages: wgt::ShaderStages,
169 offset_bytes: u32,
170 data: &[u32],
171 ) {
172 }
173
174 unsafe fn insert_debug_marker(&mut self, label: &str) {}
175 unsafe fn begin_debug_marker(&mut self, group_label: &str) {}
176 unsafe fn end_debug_marker(&mut self) {}
177
178 unsafe fn set_render_pipeline(&mut self, pipeline: &Resource) {}
179
180 unsafe fn set_index_buffer<'a>(
181 &mut self,
182 binding: crate::BufferBinding<'a, Buffer>,
183 format: wgt::IndexFormat,
184 ) {
185 }
186 unsafe fn set_vertex_buffer<'a>(
187 &mut self,
188 index: u32,
189 binding: crate::BufferBinding<'a, Buffer>,
190 ) {
191 }
192 unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>) {}
193 unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {}
194 unsafe fn set_stencil_reference(&mut self, value: u32) {}
195 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {}
196
197 unsafe fn draw(
198 &mut self,
199 first_vertex: u32,
200 vertex_count: u32,
201 first_instance: u32,
202 instance_count: u32,
203 ) {
204 }
205 unsafe fn draw_indexed(
206 &mut self,
207 first_index: u32,
208 index_count: u32,
209 base_vertex: i32,
210 first_instance: u32,
211 instance_count: u32,
212 ) {
213 }
214 unsafe fn draw_mesh_tasks(
215 &mut self,
216 group_count_x: u32,
217 group_count_y: u32,
218 group_count_z: u32,
219 ) {
220 }
221 unsafe fn draw_indirect(
222 &mut self,
223 buffer: &Buffer,
224 offset: wgt::BufferAddress,
225 draw_count: u32,
226 ) {
227 }
228 unsafe fn draw_indexed_indirect(
229 &mut self,
230 buffer: &Buffer,
231 offset: wgt::BufferAddress,
232 draw_count: u32,
233 ) {
234 }
235 unsafe fn draw_mesh_tasks_indirect(
236 &mut self,
237 buffer: &<Self::A as crate::Api>::Buffer,
238 offset: wgt::BufferAddress,
239 draw_count: u32,
240 ) {
241 }
242 unsafe fn draw_indirect_count(
243 &mut self,
244 buffer: &Buffer,
245 offset: wgt::BufferAddress,
246 count_buffer: &Buffer,
247 count_offset: wgt::BufferAddress,
248 max_count: u32,
249 ) {
250 }
251 unsafe fn draw_indexed_indirect_count(
252 &mut self,
253 buffer: &Buffer,
254 offset: wgt::BufferAddress,
255 count_buffer: &Buffer,
256 count_offset: wgt::BufferAddress,
257 max_count: u32,
258 ) {
259 }
260 unsafe fn draw_mesh_tasks_indirect_count(
261 &mut self,
262 buffer: &<Self::A as crate::Api>::Buffer,
263 offset: wgt::BufferAddress,
264 count_buffer: &<Self::A as crate::Api>::Buffer,
265 count_offset: wgt::BufferAddress,
266 max_count: u32,
267 ) {
268 }
269
270 unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor<Resource>) {}
273 unsafe fn end_compute_pass(&mut self) {}
274
275 unsafe fn set_compute_pipeline(&mut self, pipeline: &Resource) {}
276
277 unsafe fn dispatch(&mut self, count: [u32; 3]) {}
278 unsafe fn dispatch_indirect(&mut self, buffer: &Buffer, offset: wgt::BufferAddress) {}
279
280 unsafe fn build_acceleration_structures<'a, T>(
281 &mut self,
282 _descriptor_count: u32,
283 descriptors: T,
284 ) where
285 Api: 'a,
286 T: IntoIterator<Item = crate::BuildAccelerationStructureDescriptor<'a, Buffer, Resource>>,
287 {
288 }
289
290 unsafe fn place_acceleration_structure_barrier(
291 &mut self,
292 _barriers: crate::AccelerationStructureBarrier,
293 ) {
294 }
295
296 unsafe fn copy_acceleration_structure_to_acceleration_structure(
297 &mut self,
298 src: &Resource,
299 dst: &Resource,
300 copy: wgt::AccelerationStructureCopy,
301 ) {
302 }
303}
304
305impl Command {
306 unsafe fn execute(&self) {
311 match self {
312 Command::ClearBuffer { ref buffer, range } => {
313 let buffer_slice: &mut [u8] = unsafe { &mut *buffer.get_slice_ptr(range.clone()) };
316 buffer_slice.fill(0);
317 }
318
319 Command::CopyBufferToBuffer { src, dst, regions } => {
320 for &crate::BufferCopy {
321 src_offset,
322 dst_offset,
323 size,
324 } in regions
325 {
326 let src_region: &[u8] =
329 unsafe { &*src.get_slice_ptr(src_offset..src_offset + size.get()) };
330 let dst_region: &mut [u8] =
331 unsafe { &mut *dst.get_slice_ptr(dst_offset..dst_offset + size.get()) };
332 dst_region.copy_from_slice(src_region);
333 }
334 }
335 }
336 }
337}