1use alloc::string::String;
2use core::{mem, ops::Range};
3
4use arrayvec::ArrayVec;
5
6use super::{conv, Command as C};
7
8#[derive(Clone, Copy, Debug, Default)]
9struct TextureSlotDesc {
10 tex_target: super::BindTarget,
11 sampler_index: Option<u8>,
12}
13
14pub(super) struct State {
15 topology: u32,
16 primitive: super::PrimitiveState,
17 index_format: wgt::IndexFormat,
18 index_offset: wgt::BufferAddress,
19 vertex_buffers: [(
20 Option<super::VertexBufferDesc>,
21 Option<super::BufferBinding>,
22 ); crate::MAX_VERTEX_BUFFERS],
23 vertex_attributes: ArrayVec<super::AttributeDesc, { super::MAX_VERTEX_ATTRIBUTES }>,
24 color_targets: ArrayVec<super::ColorTargetDesc, { crate::MAX_COLOR_ATTACHMENTS }>,
25 stencil: super::StencilState,
26 depth_bias: wgt::DepthBiasState,
27 alpha_to_coverage_enabled: bool,
28 samplers: [Option<glow::Sampler>; super::MAX_SAMPLERS],
29 texture_slots: [TextureSlotDesc; super::MAX_TEXTURE_SLOTS],
30 render_size: wgt::Extent3d,
31 resolve_attachments: ArrayVec<(u32, super::TextureView), { crate::MAX_COLOR_ATTACHMENTS }>,
32 invalidate_attachments: ArrayVec<u32, { crate::MAX_COLOR_ATTACHMENTS + 2 }>,
33 has_pass_label: bool,
34 instance_vbuf_mask: usize,
35 dirty_vbuf_mask: usize,
36 active_first_instance: u32,
37 first_instance_location: Option<glow::UniformLocation>,
38 immediates_descs: ArrayVec<super::ImmediateDesc, { super::MAX_IMMEDIATES_COMMANDS }>,
39 current_immediates_data: [u32; super::MAX_IMMEDIATES],
41 end_of_pass_timestamp: Option<glow::Query>,
42 clip_distance_count: u32,
43}
44
45impl Default for State {
46 fn default() -> Self {
47 Self {
48 topology: Default::default(),
49 primitive: Default::default(),
50 index_format: Default::default(),
51 index_offset: Default::default(),
52 vertex_buffers: Default::default(),
53 vertex_attributes: Default::default(),
54 color_targets: Default::default(),
55 stencil: Default::default(),
56 depth_bias: Default::default(),
57 alpha_to_coverage_enabled: Default::default(),
58 samplers: Default::default(),
59 texture_slots: Default::default(),
60 render_size: Default::default(),
61 resolve_attachments: Default::default(),
62 invalidate_attachments: Default::default(),
63 has_pass_label: Default::default(),
64 instance_vbuf_mask: Default::default(),
65 dirty_vbuf_mask: Default::default(),
66 active_first_instance: Default::default(),
67 first_instance_location: Default::default(),
68 immediates_descs: Default::default(),
69 current_immediates_data: [0; super::MAX_IMMEDIATES],
70 end_of_pass_timestamp: Default::default(),
71 clip_distance_count: Default::default(),
72 }
73 }
74}
75
76impl super::CommandBuffer {
77 fn clear(&mut self) {
78 self.label = None;
79 self.commands.clear();
80 self.data_bytes.clear();
81 self.queries.clear();
82 }
83
84 fn add_marker(&mut self, marker: &str) -> Range<u32> {
85 let start = self.data_bytes.len() as u32;
86 self.data_bytes.extend(marker.as_bytes());
87 start..self.data_bytes.len() as u32
88 }
89
90 fn add_immediates_data(&mut self, data: &[u32]) -> Range<u32> {
91 let data_raw = bytemuck::cast_slice(data);
92 let start = self.data_bytes.len();
93 assert!(start < u32::MAX as usize);
94 self.data_bytes.extend_from_slice(data_raw);
95 let end = self.data_bytes.len();
96 assert!(end < u32::MAX as usize);
97 (start as u32)..(end as u32)
98 }
99}
100
101impl Drop for super::CommandEncoder {
102 fn drop(&mut self) {
103 use crate::CommandEncoder;
104 unsafe { self.discard_encoding() }
105 self.counters.command_encoders.sub(1);
106 }
107}
108
109impl super::CommandEncoder {
110 fn rebind_stencil_func(&mut self) {
111 fn make(s: &super::StencilSide, face: u32) -> C {
112 C::SetStencilFunc {
113 face,
114 function: s.function,
115 reference: s.reference,
116 read_mask: s.mask_read,
117 }
118 }
119
120 let s = &self.state.stencil;
121 if s.front.function == s.back.function
122 && s.front.mask_read == s.back.mask_read
123 && s.front.reference == s.back.reference
124 {
125 self.cmd_buffer
126 .commands
127 .push(make(&s.front, glow::FRONT_AND_BACK));
128 } else {
129 self.cmd_buffer.commands.push(make(&s.front, glow::FRONT));
130 self.cmd_buffer.commands.push(make(&s.back, glow::BACK));
131 }
132 }
133
134 fn rebind_vertex_data(&mut self, first_instance: u32) {
135 if self
136 .private_caps
137 .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
138 {
139 for (index, pair) in self.state.vertex_buffers.iter().enumerate() {
140 if self.state.dirty_vbuf_mask & (1 << index) == 0 {
141 continue;
142 }
143 let (buffer_desc, vb) = match *pair {
144 (Some(ref vb_desc), Some(ref vb)) => (vb_desc.clone(), vb),
145 (_, _) => continue,
147 };
148 let instance_offset = match buffer_desc.step {
149 wgt::VertexStepMode::Vertex => 0,
150 wgt::VertexStepMode::Instance => first_instance * buffer_desc.stride,
151 };
152
153 self.cmd_buffer.commands.push(C::SetVertexBuffer {
154 index: index as u32,
155 buffer: super::BufferBinding {
156 raw: vb.raw,
157 offset: vb.offset + instance_offset as wgt::BufferAddress,
158 },
159 buffer_desc,
160 });
161 self.state.dirty_vbuf_mask ^= 1 << index;
162 }
163 } else {
164 let mut vbuf_mask = 0;
165 for attribute in self.state.vertex_attributes.iter() {
166 if self.state.dirty_vbuf_mask & (1 << attribute.buffer_index) == 0 {
167 continue;
168 }
169 let (buffer_desc, vb) =
170 match self.state.vertex_buffers[attribute.buffer_index as usize] {
171 (Some(ref vb_desc), Some(ref vb)) => (vb_desc.clone(), vb),
172 (_, _) => continue,
174 };
175
176 let mut attribute_desc = attribute.clone();
177 attribute_desc.offset += vb.offset as u32;
178 if buffer_desc.step == wgt::VertexStepMode::Instance {
179 attribute_desc.offset += buffer_desc.stride * first_instance;
180 }
181
182 self.cmd_buffer.commands.push(C::SetVertexAttribute {
183 buffer: Some(vb.raw),
184 buffer_desc,
185 attribute_desc,
186 });
187 vbuf_mask |= 1 << attribute.buffer_index;
188 }
189 self.state.dirty_vbuf_mask ^= vbuf_mask;
190 }
191 }
192
193 fn rebind_sampler_states(&mut self, dirty_textures: u32, dirty_samplers: u32) {
194 for (texture_index, slot) in self.state.texture_slots.iter().enumerate() {
195 if dirty_textures & (1 << texture_index) != 0
196 || slot
197 .sampler_index
198 .is_some_and(|si| dirty_samplers & (1 << si) != 0)
199 {
200 let sampler = slot
201 .sampler_index
202 .and_then(|si| self.state.samplers[si as usize]);
203 self.cmd_buffer
204 .commands
205 .push(C::BindSampler(texture_index as u32, sampler));
206 }
207 }
208 }
209
210 fn prepare_draw(&mut self, first_instance: u32) {
211 let emulated_first_instance_value = if self
214 .private_caps
215 .contains(super::PrivateCapabilities::FULLY_FEATURED_INSTANCING)
216 {
217 0
218 } else {
219 first_instance
220 };
221
222 if emulated_first_instance_value != self.state.active_first_instance {
223 self.state.dirty_vbuf_mask |= self.state.instance_vbuf_mask;
225 self.state.active_first_instance = emulated_first_instance_value;
226 }
227 if self.state.dirty_vbuf_mask != 0 {
228 self.rebind_vertex_data(emulated_first_instance_value);
229 }
230 }
231
232 fn set_pipeline_inner(&mut self, inner: &super::PipelineInner) {
233 self.cmd_buffer.commands.push(C::SetProgram(inner.program));
234
235 self.state
236 .first_instance_location
237 .clone_from(&inner.first_instance_location);
238 self.state
239 .immediates_descs
240 .clone_from(&inner.immediates_descs);
241
242 let mut dirty_textures = 0u32;
244 for (texture_index, (slot, &sampler_index)) in self
245 .state
246 .texture_slots
247 .iter_mut()
248 .zip(inner.sampler_map.iter())
249 .enumerate()
250 {
251 if slot.sampler_index != sampler_index {
252 slot.sampler_index = sampler_index;
253 dirty_textures |= 1 << texture_index;
254 }
255 }
256 if dirty_textures != 0 {
257 self.rebind_sampler_states(dirty_textures, 0);
258 }
259 }
260}
261
262impl crate::CommandEncoder for super::CommandEncoder {
263 type A = super::Api;
264
265 unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
266 self.state = State::default();
267 self.cmd_buffer.label = label.map(String::from);
268 Ok(())
269 }
270 unsafe fn discard_encoding(&mut self) {
271 self.cmd_buffer.clear();
272 }
273 unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
274 Ok(mem::take(&mut self.cmd_buffer))
275 }
276 unsafe fn reset_all<I>(&mut self, _command_buffers: I) {
277 }
279
280 unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
281 where
282 T: Iterator<Item = crate::BufferBarrier<'a, super::Buffer>>,
283 {
284 if !self
285 .private_caps
286 .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
287 {
288 return;
289 }
290 for bar in barriers {
291 if !bar.usage.from.contains(wgt::BufferUses::STORAGE_READ_WRITE) {
293 continue;
294 }
295 self.cmd_buffer
296 .commands
297 .push(C::BufferBarrier(bar.buffer.raw.unwrap(), bar.usage.to));
298 }
299 }
300
301 unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
302 where
303 T: Iterator<Item = crate::TextureBarrier<'a, super::Texture>>,
304 {
305 if !self
306 .private_caps
307 .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
308 {
309 return;
310 }
311
312 let mut combined_usage = wgt::TextureUses::empty();
313 for bar in barriers {
314 if !bar.usage.from.intersects(
317 wgt::TextureUses::STORAGE_READ_WRITE | wgt::TextureUses::STORAGE_WRITE_ONLY,
318 ) {
319 continue;
320 }
321 combined_usage |= bar.usage.to;
324 }
325
326 if !combined_usage.is_empty() {
327 self.cmd_buffer
328 .commands
329 .push(C::TextureBarrier(combined_usage));
330 }
331 }
332
333 unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
334 self.cmd_buffer.commands.push(C::ClearBuffer {
335 dst: buffer.clone(),
336 dst_target: buffer.target,
337 range,
338 });
339 }
340
341 unsafe fn copy_buffer_to_buffer<T>(
342 &mut self,
343 src: &super::Buffer,
344 dst: &super::Buffer,
345 regions: T,
346 ) where
347 T: Iterator<Item = crate::BufferCopy>,
348 {
349 let (src_target, dst_target) = if src.target == dst.target {
350 (glow::COPY_READ_BUFFER, glow::COPY_WRITE_BUFFER)
351 } else {
352 (src.target, dst.target)
353 };
354 for copy in regions {
355 self.cmd_buffer.commands.push(C::CopyBufferToBuffer {
356 src: src.clone(),
357 src_target,
358 dst: dst.clone(),
359 dst_target,
360 copy,
361 })
362 }
363 }
364
365 #[cfg(webgl)]
366 unsafe fn copy_external_image_to_texture<T>(
367 &mut self,
368 src: &wgt::CopyExternalImageSourceInfo,
369 dst: &super::Texture,
370 dst_premultiplication: bool,
371 regions: T,
372 ) where
373 T: Iterator<Item = crate::TextureCopy>,
374 {
375 let (dst_raw, dst_target) = dst.inner.as_native();
376 for copy in regions {
377 self.cmd_buffer
378 .commands
379 .push(C::CopyExternalImageToTexture {
380 src: src.clone(),
381 dst: dst_raw,
382 dst_target,
383 dst_format: dst.format,
384 dst_premultiplication,
385 copy,
386 })
387 }
388 }
389
390 unsafe fn copy_texture_to_texture<T>(
391 &mut self,
392 src: &super::Texture,
393 _src_usage: wgt::TextureUses,
394 dst: &super::Texture,
395 regions: T,
396 ) where
397 T: Iterator<Item = crate::TextureCopy>,
398 {
399 let (src_raw, src_target) = src.inner.as_native();
400 let (dst_raw, dst_target) = dst.inner.as_native();
401 for mut copy in regions {
402 copy.clamp_size_to_virtual(&src.copy_size, &dst.copy_size);
403 self.cmd_buffer.commands.push(C::CopyTextureToTexture {
404 src: src_raw,
405 src_target,
406 dst: dst_raw,
407 dst_target,
408 copy,
409 })
410 }
411 }
412
413 unsafe fn copy_buffer_to_texture<T>(
414 &mut self,
415 src: &super::Buffer,
416 dst: &super::Texture,
417 regions: T,
418 ) where
419 T: Iterator<Item = crate::BufferTextureCopy>,
420 {
421 let (dst_raw, dst_target) = dst.inner.as_native();
422
423 for mut copy in regions {
424 copy.clamp_size_to_virtual(&dst.copy_size);
425 self.cmd_buffer.commands.push(C::CopyBufferToTexture {
426 src: src.clone(),
427 src_target: src.target,
428 dst: dst_raw,
429 dst_target,
430 dst_format: dst.format,
431 copy,
432 })
433 }
434 }
435
436 unsafe fn copy_texture_to_buffer<T>(
437 &mut self,
438 src: &super::Texture,
439 _src_usage: wgt::TextureUses,
440 dst: &super::Buffer,
441 regions: T,
442 ) where
443 T: Iterator<Item = crate::BufferTextureCopy>,
444 {
445 let (src_raw, src_target) = src.inner.as_native();
446 for mut copy in regions {
447 copy.clamp_size_to_virtual(&src.copy_size);
448 self.cmd_buffer.commands.push(C::CopyTextureToBuffer {
449 src: src_raw,
450 src_target,
451 src_format: src.format,
452 dst: dst.clone(),
453 dst_target: dst.target,
454 copy,
455 })
456 }
457 }
458
459 unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
460 let query = set.queries[index as usize];
461 self.cmd_buffer
462 .commands
463 .push(C::BeginQuery(query, set.target));
464 }
465 unsafe fn end_query(&mut self, set: &super::QuerySet, _index: u32) {
466 self.cmd_buffer.commands.push(C::EndQuery(set.target));
467 }
468 unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
469 let query = set.queries[index as usize];
470 self.cmd_buffer.commands.push(C::TimestampQuery(query));
471 }
472 unsafe fn reset_queries(&mut self, _set: &super::QuerySet, _range: Range<u32>) {
473 }
475 unsafe fn copy_query_results(
476 &mut self,
477 set: &super::QuerySet,
478 range: Range<u32>,
479 buffer: &super::Buffer,
480 offset: wgt::BufferAddress,
481 _stride: wgt::BufferSize,
482 ) {
483 let start = self.cmd_buffer.queries.len();
484 self.cmd_buffer
485 .queries
486 .extend_from_slice(&set.queries[range.start as usize..range.end as usize]);
487 let query_range = start as u32..self.cmd_buffer.queries.len() as u32;
488 self.cmd_buffer.commands.push(C::CopyQueryResults {
489 query_range,
490 dst: buffer.clone(),
491 dst_target: buffer.target,
492 dst_offset: offset,
493 });
494 }
495
496 unsafe fn begin_render_pass(
499 &mut self,
500 desc: &crate::RenderPassDescriptor<super::QuerySet, super::TextureView>,
501 ) -> Result<(), crate::DeviceError> {
502 debug_assert!(self.state.end_of_pass_timestamp.is_none());
503 if let Some(ref t) = desc.timestamp_writes {
504 if let Some(index) = t.beginning_of_pass_write_index {
505 unsafe { self.write_timestamp(t.query_set, index) }
506 }
507 self.state.end_of_pass_timestamp = t
508 .end_of_pass_write_index
509 .map(|index| t.query_set.queries[index as usize]);
510 }
511
512 self.state.render_size = desc.extent;
513 self.state.resolve_attachments.clear();
514 self.state.invalidate_attachments.clear();
515 if let Some(label) = desc.label {
516 let range = self.cmd_buffer.add_marker(label);
517 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
518 self.state.has_pass_label = true;
519 }
520
521 let rendering_to_external_framebuffer = desc
522 .color_attachments
523 .iter()
524 .filter_map(|at| at.as_ref())
525 .any(|at| match at.target.view.inner {
526 #[cfg(webgl)]
527 super::TextureInner::ExternalFramebuffer { .. } => true,
528 #[cfg(native)]
529 super::TextureInner::ExternalNativeFramebuffer { .. } => true,
530 _ => false,
531 });
532
533 if rendering_to_external_framebuffer && desc.color_attachments.len() != 1 {
534 panic!("Multiple render attachments with external framebuffers are not supported.");
535 }
536
537 assert!(desc.color_attachments.len() <= 32);
539
540 match desc
541 .color_attachments
542 .first()
543 .filter(|at| at.is_some())
544 .and_then(|at| at.as_ref().map(|at| &at.target.view.inner))
545 {
546 Some(&super::TextureInner::DefaultRenderbuffer) => {
548 self.cmd_buffer
549 .commands
550 .push(C::ResetFramebuffer { is_default: true });
551 }
552 _ => {
553 self.cmd_buffer
555 .commands
556 .push(C::ResetFramebuffer { is_default: false });
557
558 for (i, cat) in desc.color_attachments.iter().enumerate() {
559 if let Some(cat) = cat.as_ref() {
560 let attachment = glow::COLOR_ATTACHMENT0 + i as u32;
561 if let Some(ref rat) = cat.resolve_target {
563 if matches!(rat.view.inner, super::TextureInner::Texture { .. })
564 && self.private_caps.contains(
565 super::PrivateCapabilities::MULTISAMPLED_RENDER_TO_TEXTURE,
566 )
567 && !cat.ops.contains(crate::AttachmentOps::STORE)
568 && i == 0
570 {
571 self.cmd_buffer.commands.push(C::BindAttachment {
572 attachment,
573 view: rat.view.clone(),
574 depth_slice: None,
575 sample_count: desc.sample_count,
576 });
577 continue;
578 }
579 }
580 self.cmd_buffer.commands.push(C::BindAttachment {
581 attachment,
582 view: cat.target.view.clone(),
583 depth_slice: cat.depth_slice,
584 sample_count: 1,
585 });
586 if let Some(ref rat) = cat.resolve_target {
587 self.state
588 .resolve_attachments
589 .push((attachment, rat.view.clone()));
590 }
591 if cat.ops.contains(crate::AttachmentOps::STORE_DISCARD) {
592 self.state.invalidate_attachments.push(attachment);
593 }
594 }
595 }
596 if let Some(ref dsat) = desc.depth_stencil_attachment {
597 let aspects = dsat.target.view.aspects;
598 let attachment = match aspects {
599 crate::FormatAspects::DEPTH => glow::DEPTH_ATTACHMENT,
600 crate::FormatAspects::STENCIL => glow::STENCIL_ATTACHMENT,
601 _ => glow::DEPTH_STENCIL_ATTACHMENT,
602 };
603 self.cmd_buffer.commands.push(C::BindAttachment {
604 attachment,
605 view: dsat.target.view.clone(),
606 depth_slice: None,
607 sample_count: 1,
608 });
609 if aspects.contains(crate::FormatAspects::DEPTH)
610 && dsat.depth_ops.contains(crate::AttachmentOps::STORE_DISCARD)
611 {
612 self.state
613 .invalidate_attachments
614 .push(glow::DEPTH_ATTACHMENT);
615 }
616 if aspects.contains(crate::FormatAspects::STENCIL)
617 && dsat
618 .stencil_ops
619 .contains(crate::AttachmentOps::STORE_DISCARD)
620 {
621 self.state
622 .invalidate_attachments
623 .push(glow::STENCIL_ATTACHMENT);
624 }
625 }
626 }
627 }
628
629 let rect = crate::Rect {
630 x: 0,
631 y: 0,
632 w: desc.extent.width as i32,
633 h: desc.extent.height as i32,
634 };
635 self.cmd_buffer.commands.push(C::SetScissor(rect.clone()));
636 self.cmd_buffer.commands.push(C::SetViewport {
637 rect,
638 depth: 0.0..1.0,
639 });
640
641 if !rendering_to_external_framebuffer {
642 self.cmd_buffer
644 .commands
645 .push(C::SetDrawColorBuffers(desc.color_attachments.len() as u8));
646 }
647
648 for (i, cat) in desc
650 .color_attachments
651 .iter()
652 .filter_map(|at| at.as_ref())
653 .enumerate()
654 {
655 if cat.ops.contains(crate::AttachmentOps::LOAD_CLEAR) {
656 let c = &cat.clear_value;
657 self.cmd_buffer.commands.push(
658 match cat.target.view.format.sample_type(None, None).unwrap() {
659 wgt::TextureSampleType::Float { .. } => C::ClearColorF {
660 draw_buffer: i as u32,
661 color: [c.r as f32, c.g as f32, c.b as f32, c.a as f32],
662 is_srgb: cat.target.view.format.is_srgb(),
663 },
664 wgt::TextureSampleType::Uint => C::ClearColorU(
665 i as u32,
666 [c.r as u32, c.g as u32, c.b as u32, c.a as u32],
667 ),
668 wgt::TextureSampleType::Sint => C::ClearColorI(
669 i as u32,
670 [c.r as i32, c.g as i32, c.b as i32, c.a as i32],
671 ),
672 wgt::TextureSampleType::Depth => unreachable!(),
673 },
674 );
675 }
676 }
677
678 if let Some(ref dsat) = desc.depth_stencil_attachment {
679 let clear_depth = dsat.depth_ops.contains(crate::AttachmentOps::LOAD_CLEAR);
680 let clear_stencil = dsat.stencil_ops.contains(crate::AttachmentOps::LOAD_CLEAR);
681
682 if clear_depth && clear_stencil {
683 self.cmd_buffer.commands.push(C::ClearDepthAndStencil(
684 dsat.clear_value.0,
685 dsat.clear_value.1,
686 ));
687 } else if clear_depth {
688 self.cmd_buffer
689 .commands
690 .push(C::ClearDepth(dsat.clear_value.0));
691 } else if clear_stencil {
692 self.cmd_buffer
693 .commands
694 .push(C::ClearStencil(dsat.clear_value.1));
695 }
696 }
697 Ok(())
698 }
699 unsafe fn end_render_pass(&mut self) {
700 for (attachment, dst) in self.state.resolve_attachments.drain(..) {
701 self.cmd_buffer.commands.push(C::ResolveAttachment {
702 attachment,
703 dst,
704 size: self.state.render_size,
705 });
706 }
707 if !self.state.invalidate_attachments.is_empty() {
708 self.cmd_buffer.commands.push(C::InvalidateAttachments(
709 self.state.invalidate_attachments.clone(),
710 ));
711 self.state.invalidate_attachments.clear();
712 }
713 if self.state.has_pass_label {
714 self.cmd_buffer.commands.push(C::PopDebugGroup);
715 self.state.has_pass_label = false;
716 }
717 self.state.instance_vbuf_mask = 0;
718 self.state.dirty_vbuf_mask = 0;
719 self.state.active_first_instance = 0;
720 self.state.color_targets.clear();
721 for vat in &self.state.vertex_attributes {
722 self.cmd_buffer
723 .commands
724 .push(C::UnsetVertexAttribute(vat.location));
725 }
726 self.state.vertex_attributes.clear();
727 self.state.primitive = super::PrimitiveState::default();
728
729 if let Some(query) = self.state.end_of_pass_timestamp.take() {
730 self.cmd_buffer.commands.push(C::TimestampQuery(query));
731 }
732 }
733
734 unsafe fn set_bind_group(
735 &mut self,
736 layout: &super::PipelineLayout,
737 index: u32,
738 group: &super::BindGroup,
739 dynamic_offsets: &[wgt::DynamicOffset],
740 ) {
741 let mut do_index = 0;
742 let mut dirty_textures = 0u32;
743 let mut dirty_samplers = 0u32;
744 let group_info = layout.group_infos[index as usize].as_ref().unwrap();
745
746 for (binding_layout, raw_binding) in group_info.entries.iter().zip(group.contents.iter()) {
747 let slot = group_info.binding_to_slot[binding_layout.binding as usize] as u32;
748 match *raw_binding {
749 super::RawBinding::Buffer {
750 raw,
751 offset: base_offset,
752 size,
753 } => {
754 let mut offset = base_offset;
755 let target = match binding_layout.ty {
756 wgt::BindingType::Buffer {
757 ty,
758 has_dynamic_offset,
759 min_binding_size: _,
760 } => {
761 if has_dynamic_offset {
762 offset += dynamic_offsets[do_index] as i32;
763 do_index += 1;
764 }
765 match ty {
766 wgt::BufferBindingType::Uniform => glow::UNIFORM_BUFFER,
767 wgt::BufferBindingType::Storage { .. } => {
768 glow::SHADER_STORAGE_BUFFER
769 }
770 }
771 }
772 _ => unreachable!(),
773 };
774 self.cmd_buffer.commands.push(C::BindBuffer {
775 target,
776 slot,
777 buffer: raw,
778 offset,
779 size,
780 });
781 }
782 super::RawBinding::Sampler(sampler) => {
783 dirty_samplers |= 1 << slot;
784 self.state.samplers[slot as usize] = Some(sampler);
785 }
786 super::RawBinding::Texture {
787 raw,
788 target,
789 aspects,
790 ref mip_levels,
791 } => {
792 dirty_textures |= 1 << slot;
793 self.state.texture_slots[slot as usize].tex_target = target;
794 self.cmd_buffer.commands.push(C::BindTexture {
795 slot,
796 texture: raw,
797 target,
798 aspects,
799 mip_levels: mip_levels.clone(),
800 });
801 }
802 super::RawBinding::Image(ref binding) => {
803 self.cmd_buffer.commands.push(C::BindImage {
804 slot,
805 binding: binding.clone(),
806 });
807 }
808 }
809 }
810
811 self.rebind_sampler_states(dirty_textures, dirty_samplers);
812 }
813
814 unsafe fn set_immediates(
815 &mut self,
816 _layout: &super::PipelineLayout,
817 offset_bytes: u32,
818 data: &[u32],
819 ) {
820 let start_words = offset_bytes / 4;
828 let end_words = start_words + data.len() as u32;
829 self.state.current_immediates_data[start_words as usize..end_words as usize]
830 .copy_from_slice(data);
831
832 for uniform in self.state.immediates_descs.iter().cloned() {
838 let uniform_size_words = uniform.size_bytes / 4;
839 let uniform_start_words = uniform.offset / 4;
840 let uniform_end_words = uniform_start_words + uniform_size_words;
841
842 let needs_updating =
844 start_words < uniform_end_words || uniform_start_words <= end_words;
845
846 if needs_updating {
847 let uniform_data = &self.state.current_immediates_data
848 [uniform_start_words as usize..uniform_end_words as usize];
849
850 let range = self.cmd_buffer.add_immediates_data(uniform_data);
851
852 self.cmd_buffer.commands.push(C::SetImmediates {
853 uniform,
854 offset: range.start,
855 });
856 }
857 }
858 }
859
860 unsafe fn insert_debug_marker(&mut self, label: &str) {
861 let range = self.cmd_buffer.add_marker(label);
862 self.cmd_buffer.commands.push(C::InsertDebugMarker(range));
863 }
864 unsafe fn begin_debug_marker(&mut self, group_label: &str) {
865 let range = self.cmd_buffer.add_marker(group_label);
866 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
867 }
868 unsafe fn end_debug_marker(&mut self) {
869 self.cmd_buffer.commands.push(C::PopDebugGroup);
870 }
871
872 unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
873 self.state.topology = conv::map_primitive_topology(pipeline.primitive.topology);
874
875 if self
876 .private_caps
877 .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
878 {
879 for vat in pipeline.vertex_attributes.iter() {
880 let vb = pipeline.vertex_buffers[vat.buffer_index as usize]
881 .as_ref()
882 .unwrap();
883 self.cmd_buffer.commands.push(C::SetVertexAttribute {
885 buffer: None,
886 buffer_desc: vb.clone(),
887 attribute_desc: vat.clone(),
888 });
889 }
890 } else {
891 for vat in &self.state.vertex_attributes {
892 self.cmd_buffer
893 .commands
894 .push(C::UnsetVertexAttribute(vat.location));
895 }
896 self.state.vertex_attributes.clear();
897
898 self.state.dirty_vbuf_mask = 0;
899 for vat in pipeline.vertex_attributes.iter() {
901 self.state.dirty_vbuf_mask |= 1 << vat.buffer_index;
903 self.state.vertex_attributes.push(vat.clone());
904 }
905 }
906
907 self.state.instance_vbuf_mask = 0;
908 for (index, (&mut (ref mut state_desc, _), pipe_desc)) in self
910 .state
911 .vertex_buffers
912 .iter_mut()
913 .zip(pipeline.vertex_buffers.iter())
914 .enumerate()
915 {
916 let Some(pipe_desc) = pipe_desc else {
917 continue;
918 };
919 if pipe_desc.step == wgt::VertexStepMode::Instance {
920 self.state.instance_vbuf_mask |= 1 << index;
921 }
922 if state_desc.as_ref() != Some(pipe_desc) {
923 self.state.dirty_vbuf_mask |= 1 << index;
924 *state_desc = Some(pipe_desc.clone());
925 }
926 }
927
928 self.set_pipeline_inner(&pipeline.inner);
929
930 let prim_state = conv::map_primitive_state(&pipeline.primitive);
932 if prim_state != self.state.primitive {
933 self.cmd_buffer
934 .commands
935 .push(C::SetPrimitive(prim_state.clone()));
936 self.state.primitive = prim_state;
937 }
938
939 let mut aspects = crate::FormatAspects::empty();
941 if pipeline.depth_bias != self.state.depth_bias {
942 self.state.depth_bias = pipeline.depth_bias;
943 self.cmd_buffer
944 .commands
945 .push(C::SetDepthBias(pipeline.depth_bias));
946 }
947 if let Some(ref depth) = pipeline.depth {
948 aspects |= crate::FormatAspects::DEPTH;
949 self.cmd_buffer.commands.push(C::SetDepth(depth.clone()));
950 }
951 if let Some(ref stencil) = pipeline.stencil {
952 aspects |= crate::FormatAspects::STENCIL;
953 self.state.stencil = stencil.clone();
954 self.rebind_stencil_func();
955 if stencil.front.ops == stencil.back.ops
956 && stencil.front.mask_write == stencil.back.mask_write
957 {
958 self.cmd_buffer.commands.push(C::SetStencilOps {
959 face: glow::FRONT_AND_BACK,
960 write_mask: stencil.front.mask_write,
961 ops: stencil.front.ops.clone(),
962 });
963 } else {
964 self.cmd_buffer.commands.push(C::SetStencilOps {
965 face: glow::FRONT,
966 write_mask: stencil.front.mask_write,
967 ops: stencil.front.ops.clone(),
968 });
969 self.cmd_buffer.commands.push(C::SetStencilOps {
970 face: glow::BACK,
971 write_mask: stencil.back.mask_write,
972 ops: stencil.back.ops.clone(),
973 });
974 }
975 }
976 self.cmd_buffer
977 .commands
978 .push(C::ConfigureDepthStencil(aspects));
979
980 if pipeline.alpha_to_coverage_enabled != self.state.alpha_to_coverage_enabled {
982 self.state.alpha_to_coverage_enabled = pipeline.alpha_to_coverage_enabled;
983 self.cmd_buffer
984 .commands
985 .push(C::SetAlphaToCoverage(pipeline.alpha_to_coverage_enabled));
986 }
987
988 if self.state.color_targets[..] != pipeline.color_targets[..] {
990 if pipeline
991 .color_targets
992 .iter()
993 .skip(1)
994 .any(|ct| *ct != pipeline.color_targets[0])
995 {
996 for (index, ct) in pipeline.color_targets.iter().enumerate() {
997 self.cmd_buffer.commands.push(C::SetColorTarget {
998 draw_buffer_index: Some(index as u32),
999 desc: ct.clone(),
1000 });
1001 }
1002 } else {
1003 self.cmd_buffer.commands.push(C::SetColorTarget {
1004 draw_buffer_index: None,
1005 desc: pipeline.color_targets.first().cloned().unwrap_or_default(),
1006 });
1007 }
1008 }
1009 self.state.color_targets.clear();
1010 for ct in pipeline.color_targets.iter() {
1011 self.state.color_targets.push(ct.clone());
1012 }
1013
1014 if pipeline.inner.clip_distance_count != self.state.clip_distance_count {
1016 self.cmd_buffer.commands.push(C::SetClipDistances {
1017 old_count: self.state.clip_distance_count,
1018 new_count: pipeline.inner.clip_distance_count,
1019 });
1020 self.state.clip_distance_count = pipeline.inner.clip_distance_count;
1021 }
1022 }
1023
1024 unsafe fn set_index_buffer<'a>(
1025 &mut self,
1026 binding: crate::BufferBinding<'a, super::Buffer>,
1027 format: wgt::IndexFormat,
1028 ) {
1029 self.state.index_offset = binding.offset;
1030 self.state.index_format = format;
1031 self.cmd_buffer
1032 .commands
1033 .push(C::SetIndexBuffer(binding.buffer.raw.unwrap()));
1034 }
1035 unsafe fn set_vertex_buffer<'a>(
1036 &mut self,
1037 index: u32,
1038 binding: crate::BufferBinding<'a, super::Buffer>,
1039 ) {
1040 self.state.dirty_vbuf_mask |= 1 << index;
1041 let (_, ref mut vb) = self.state.vertex_buffers[index as usize];
1042 *vb = Some(super::BufferBinding {
1043 raw: binding.buffer.raw.unwrap(),
1044 offset: binding.offset,
1045 });
1046 }
1047 unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth: Range<f32>) {
1048 self.cmd_buffer.commands.push(C::SetViewport {
1049 rect: crate::Rect {
1050 x: rect.x as i32,
1051 y: rect.y as i32,
1052 w: rect.w as i32,
1053 h: rect.h as i32,
1054 },
1055 depth,
1056 });
1057 }
1058 unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
1059 self.cmd_buffer.commands.push(C::SetScissor(crate::Rect {
1060 x: rect.x as i32,
1061 y: rect.y as i32,
1062 w: rect.w as i32,
1063 h: rect.h as i32,
1064 }));
1065 }
1066 unsafe fn set_stencil_reference(&mut self, value: u32) {
1067 self.state.stencil.front.reference = value;
1068 self.state.stencil.back.reference = value;
1069 self.rebind_stencil_func();
1070 }
1071 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
1072 self.cmd_buffer.commands.push(C::SetBlendConstant(*color));
1073 }
1074
1075 unsafe fn draw(
1076 &mut self,
1077 first_vertex: u32,
1078 vertex_count: u32,
1079 first_instance: u32,
1080 instance_count: u32,
1081 ) {
1082 self.prepare_draw(first_instance);
1083 #[allow(
1084 clippy::clone_on_copy,
1085 reason = "False positive when cloning glow::UniformLocation"
1086 )]
1087 self.cmd_buffer.commands.push(C::Draw {
1088 topology: self.state.topology,
1089 first_vertex,
1090 vertex_count,
1091 first_instance,
1092 instance_count,
1093 first_instance_location: self.state.first_instance_location.clone(),
1094 });
1095 }
1096 unsafe fn draw_indexed(
1097 &mut self,
1098 first_index: u32,
1099 index_count: u32,
1100 base_vertex: i32,
1101 first_instance: u32,
1102 instance_count: u32,
1103 ) {
1104 self.prepare_draw(first_instance);
1105 let (index_size, index_type) = match self.state.index_format {
1106 wgt::IndexFormat::Uint16 => (2, glow::UNSIGNED_SHORT),
1107 wgt::IndexFormat::Uint32 => (4, glow::UNSIGNED_INT),
1108 };
1109 let index_offset = self.state.index_offset + index_size * first_index as wgt::BufferAddress;
1110 #[allow(
1111 clippy::clone_on_copy,
1112 reason = "False positive when cloning glow::UniformLocation"
1113 )]
1114 self.cmd_buffer.commands.push(C::DrawIndexed {
1115 topology: self.state.topology,
1116 index_type,
1117 index_offset,
1118 index_count,
1119 base_vertex,
1120 first_instance,
1121 instance_count,
1122 first_instance_location: self.state.first_instance_location.clone(),
1123 });
1124 }
1125 unsafe fn draw_mesh_tasks(
1126 &mut self,
1127 _group_count_x: u32,
1128 _group_count_y: u32,
1129 _group_count_z: u32,
1130 ) {
1131 unreachable!()
1132 }
1133 unsafe fn draw_indirect(
1134 &mut self,
1135 buffer: &super::Buffer,
1136 offset: wgt::BufferAddress,
1137 draw_count: u32,
1138 ) {
1139 self.prepare_draw(0);
1140 for draw in 0..draw_count as wgt::BufferAddress {
1141 let indirect_offset =
1142 offset + draw * size_of::<wgt::DrawIndirectArgs>() as wgt::BufferAddress;
1143 #[allow(
1144 clippy::clone_on_copy,
1145 reason = "False positive when cloning glow::UniformLocation"
1146 )]
1147 self.cmd_buffer.commands.push(C::DrawIndirect {
1148 topology: self.state.topology,
1149 indirect_buf: buffer.raw.unwrap(),
1150 indirect_offset,
1151 first_instance_location: self.state.first_instance_location.clone(),
1152 });
1153 }
1154 }
1155 unsafe fn draw_indexed_indirect(
1156 &mut self,
1157 buffer: &super::Buffer,
1158 offset: wgt::BufferAddress,
1159 draw_count: u32,
1160 ) {
1161 self.prepare_draw(0);
1162 let index_type = match self.state.index_format {
1163 wgt::IndexFormat::Uint16 => glow::UNSIGNED_SHORT,
1164 wgt::IndexFormat::Uint32 => glow::UNSIGNED_INT,
1165 };
1166 for draw in 0..draw_count as wgt::BufferAddress {
1167 let indirect_offset =
1168 offset + draw * size_of::<wgt::DrawIndexedIndirectArgs>() as wgt::BufferAddress;
1169 #[allow(
1170 clippy::clone_on_copy,
1171 reason = "False positive when cloning glow::UniformLocation"
1172 )]
1173 self.cmd_buffer.commands.push(C::DrawIndexedIndirect {
1174 topology: self.state.topology,
1175 index_type,
1176 indirect_buf: buffer.raw.unwrap(),
1177 indirect_offset,
1178 first_instance_location: self.state.first_instance_location.clone(),
1179 });
1180 }
1181 }
1182 unsafe fn draw_mesh_tasks_indirect(
1183 &mut self,
1184 _buffer: &<Self::A as crate::Api>::Buffer,
1185 _offset: wgt::BufferAddress,
1186 _draw_count: u32,
1187 ) {
1188 unreachable!()
1189 }
1190 unsafe fn draw_indirect_count(
1191 &mut self,
1192 _buffer: &super::Buffer,
1193 _offset: wgt::BufferAddress,
1194 _count_buffer: &super::Buffer,
1195 _count_offset: wgt::BufferAddress,
1196 _max_count: u32,
1197 ) {
1198 unreachable!()
1199 }
1200 unsafe fn draw_indexed_indirect_count(
1201 &mut self,
1202 _buffer: &super::Buffer,
1203 _offset: wgt::BufferAddress,
1204 _count_buffer: &super::Buffer,
1205 _count_offset: wgt::BufferAddress,
1206 _max_count: u32,
1207 ) {
1208 unreachable!()
1209 }
1210 unsafe fn draw_mesh_tasks_indirect_count(
1211 &mut self,
1212 _buffer: &<Self::A as crate::Api>::Buffer,
1213 _offset: wgt::BufferAddress,
1214 _count_buffer: &<Self::A as crate::Api>::Buffer,
1215 _count_offset: wgt::BufferAddress,
1216 _max_count: u32,
1217 ) {
1218 unreachable!()
1219 }
1220
1221 unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor<super::QuerySet>) {
1224 debug_assert!(self.state.end_of_pass_timestamp.is_none());
1225 if let Some(ref t) = desc.timestamp_writes {
1226 if let Some(index) = t.beginning_of_pass_write_index {
1227 unsafe { self.write_timestamp(t.query_set, index) }
1228 }
1229 self.state.end_of_pass_timestamp = t
1230 .end_of_pass_write_index
1231 .map(|index| t.query_set.queries[index as usize]);
1232 }
1233
1234 if let Some(label) = desc.label {
1235 let range = self.cmd_buffer.add_marker(label);
1236 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
1237 self.state.has_pass_label = true;
1238 }
1239 }
1240 unsafe fn end_compute_pass(&mut self) {
1241 if self.state.has_pass_label {
1242 self.cmd_buffer.commands.push(C::PopDebugGroup);
1243 self.state.has_pass_label = false;
1244 }
1245
1246 if let Some(query) = self.state.end_of_pass_timestamp.take() {
1247 self.cmd_buffer.commands.push(C::TimestampQuery(query));
1248 }
1249 }
1250
1251 unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1252 self.set_pipeline_inner(&pipeline.inner);
1253 }
1254
1255 unsafe fn dispatch_workgroups(&mut self, count: [u32; 3]) {
1256 if count.contains(&0) {
1258 return;
1259 }
1260 self.cmd_buffer.commands.push(C::Dispatch(count));
1261 }
1262 unsafe fn dispatch_workgroups_indirect(
1263 &mut self,
1264 buffer: &super::Buffer,
1265 offset: wgt::BufferAddress,
1266 ) {
1267 self.cmd_buffer.commands.push(C::DispatchIndirect {
1268 indirect_buf: buffer.raw.unwrap(),
1269 indirect_offset: offset,
1270 });
1271 }
1272
1273 unsafe fn build_acceleration_structures<'a, T>(
1274 &mut self,
1275 _descriptor_count: u32,
1276 _descriptors: T,
1277 ) where
1278 super::Api: 'a,
1279 T: IntoIterator<
1280 Item = crate::BuildAccelerationStructureDescriptor<
1281 'a,
1282 super::Buffer,
1283 super::AccelerationStructure,
1284 >,
1285 >,
1286 {
1287 unimplemented!()
1288 }
1289
1290 unsafe fn place_acceleration_structure_barrier(
1291 &mut self,
1292 _barriers: crate::AccelerationStructureBarrier,
1293 ) {
1294 unimplemented!()
1295 }
1296
1297 unsafe fn copy_acceleration_structure_to_acceleration_structure(
1298 &mut self,
1299 _src: &super::AccelerationStructure,
1300 _dst: &super::AccelerationStructure,
1301 _copy: wgt::AccelerationStructureCopy,
1302 ) {
1303 unimplemented!()
1304 }
1305
1306 unsafe fn read_acceleration_structure_compact_size(
1307 &mut self,
1308 _acceleration_structure: &super::AccelerationStructure,
1309 _buf: &super::Buffer,
1310 ) {
1311 unimplemented!()
1312 }
1313
1314 unsafe fn set_acceleration_structure_dependencies(
1315 _command_buffers: &[&super::CommandBuffer],
1316 _dependencies: &[&super::AccelerationStructure],
1317 ) {
1318 unimplemented!()
1319 }
1320}