1use alloc::string::String;
2use core::{mem, ops::Range};
3
4use arrayvec::ArrayVec;
5
6use super::{conv, Command as C};
7
8#[derive(Clone, Copy, Debug, Default)]
9struct TextureSlotDesc {
10 tex_target: super::BindTarget,
11 sampler_index: Option<u8>,
12}
13
14pub(super) struct State {
15 topology: u32,
16 primitive: super::PrimitiveState,
17 index_format: wgt::IndexFormat,
18 index_offset: wgt::BufferAddress,
19 vertex_buffers:
20 [(super::VertexBufferDesc, Option<super::BufferBinding>); crate::MAX_VERTEX_BUFFERS],
21 vertex_attributes: ArrayVec<super::AttributeDesc, { super::MAX_VERTEX_ATTRIBUTES }>,
22 color_targets: ArrayVec<super::ColorTargetDesc, { crate::MAX_COLOR_ATTACHMENTS }>,
23 stencil: super::StencilState,
24 depth_bias: wgt::DepthBiasState,
25 alpha_to_coverage_enabled: bool,
26 samplers: [Option<glow::Sampler>; super::MAX_SAMPLERS],
27 texture_slots: [TextureSlotDesc; super::MAX_TEXTURE_SLOTS],
28 render_size: wgt::Extent3d,
29 resolve_attachments: ArrayVec<(u32, super::TextureView), { crate::MAX_COLOR_ATTACHMENTS }>,
30 invalidate_attachments: ArrayVec<u32, { crate::MAX_COLOR_ATTACHMENTS + 2 }>,
31 has_pass_label: bool,
32 instance_vbuf_mask: usize,
33 dirty_vbuf_mask: usize,
34 active_first_instance: u32,
35 first_instance_location: Option<glow::UniformLocation>,
36 immediates_descs: ArrayVec<super::ImmediateDesc, { super::MAX_IMMEDIATES_COMMANDS }>,
37 current_immediates_data: [u32; super::MAX_IMMEDIATES],
39 end_of_pass_timestamp: Option<glow::Query>,
40 clip_distance_count: u32,
41}
42
43impl Default for State {
44 fn default() -> Self {
45 Self {
46 topology: Default::default(),
47 primitive: Default::default(),
48 index_format: Default::default(),
49 index_offset: Default::default(),
50 vertex_buffers: Default::default(),
51 vertex_attributes: Default::default(),
52 color_targets: Default::default(),
53 stencil: Default::default(),
54 depth_bias: Default::default(),
55 alpha_to_coverage_enabled: Default::default(),
56 samplers: Default::default(),
57 texture_slots: Default::default(),
58 render_size: Default::default(),
59 resolve_attachments: Default::default(),
60 invalidate_attachments: Default::default(),
61 has_pass_label: Default::default(),
62 instance_vbuf_mask: Default::default(),
63 dirty_vbuf_mask: Default::default(),
64 active_first_instance: Default::default(),
65 first_instance_location: Default::default(),
66 immediates_descs: Default::default(),
67 current_immediates_data: [0; super::MAX_IMMEDIATES],
68 end_of_pass_timestamp: Default::default(),
69 clip_distance_count: Default::default(),
70 }
71 }
72}
73
74impl super::CommandBuffer {
75 fn clear(&mut self) {
76 self.label = None;
77 self.commands.clear();
78 self.data_bytes.clear();
79 self.queries.clear();
80 }
81
82 fn add_marker(&mut self, marker: &str) -> Range<u32> {
83 let start = self.data_bytes.len() as u32;
84 self.data_bytes.extend(marker.as_bytes());
85 start..self.data_bytes.len() as u32
86 }
87
88 fn add_immediates_data(&mut self, data: &[u32]) -> Range<u32> {
89 let data_raw = bytemuck::cast_slice(data);
90 let start = self.data_bytes.len();
91 assert!(start < u32::MAX as usize);
92 self.data_bytes.extend_from_slice(data_raw);
93 let end = self.data_bytes.len();
94 assert!(end < u32::MAX as usize);
95 (start as u32)..(end as u32)
96 }
97}
98
99impl Drop for super::CommandEncoder {
100 fn drop(&mut self) {
101 use crate::CommandEncoder;
102 unsafe { self.discard_encoding() }
103 self.counters.command_encoders.sub(1);
104 }
105}
106
107impl super::CommandEncoder {
108 fn rebind_stencil_func(&mut self) {
109 fn make(s: &super::StencilSide, face: u32) -> C {
110 C::SetStencilFunc {
111 face,
112 function: s.function,
113 reference: s.reference,
114 read_mask: s.mask_read,
115 }
116 }
117
118 let s = &self.state.stencil;
119 if s.front.function == s.back.function
120 && s.front.mask_read == s.back.mask_read
121 && s.front.reference == s.back.reference
122 {
123 self.cmd_buffer
124 .commands
125 .push(make(&s.front, glow::FRONT_AND_BACK));
126 } else {
127 self.cmd_buffer.commands.push(make(&s.front, glow::FRONT));
128 self.cmd_buffer.commands.push(make(&s.back, glow::BACK));
129 }
130 }
131
132 fn rebind_vertex_data(&mut self, first_instance: u32) {
133 if self
134 .private_caps
135 .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
136 {
137 for (index, pair) in self.state.vertex_buffers.iter().enumerate() {
138 if self.state.dirty_vbuf_mask & (1 << index) == 0 {
139 continue;
140 }
141 let (buffer_desc, vb) = match *pair {
142 (_, None) => continue,
144 (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
145 };
146 let instance_offset = match buffer_desc.step {
147 wgt::VertexStepMode::Vertex => 0,
148 wgt::VertexStepMode::Instance => first_instance * buffer_desc.stride,
149 };
150
151 self.cmd_buffer.commands.push(C::SetVertexBuffer {
152 index: index as u32,
153 buffer: super::BufferBinding {
154 raw: vb.raw,
155 offset: vb.offset + instance_offset as wgt::BufferAddress,
156 },
157 buffer_desc,
158 });
159 self.state.dirty_vbuf_mask ^= 1 << index;
160 }
161 } else {
162 let mut vbuf_mask = 0;
163 for attribute in self.state.vertex_attributes.iter() {
164 if self.state.dirty_vbuf_mask & (1 << attribute.buffer_index) == 0 {
165 continue;
166 }
167 let (buffer_desc, vb) =
168 match self.state.vertex_buffers[attribute.buffer_index as usize] {
169 (_, None) => continue,
171 (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
172 };
173
174 let mut attribute_desc = attribute.clone();
175 attribute_desc.offset += vb.offset as u32;
176 if buffer_desc.step == wgt::VertexStepMode::Instance {
177 attribute_desc.offset += buffer_desc.stride * first_instance;
178 }
179
180 self.cmd_buffer.commands.push(C::SetVertexAttribute {
181 buffer: Some(vb.raw),
182 buffer_desc,
183 attribute_desc,
184 });
185 vbuf_mask |= 1 << attribute.buffer_index;
186 }
187 self.state.dirty_vbuf_mask ^= vbuf_mask;
188 }
189 }
190
191 fn rebind_sampler_states(&mut self, dirty_textures: u32, dirty_samplers: u32) {
192 for (texture_index, slot) in self.state.texture_slots.iter().enumerate() {
193 if dirty_textures & (1 << texture_index) != 0
194 || slot
195 .sampler_index
196 .is_some_and(|si| dirty_samplers & (1 << si) != 0)
197 {
198 let sampler = slot
199 .sampler_index
200 .and_then(|si| self.state.samplers[si as usize]);
201 self.cmd_buffer
202 .commands
203 .push(C::BindSampler(texture_index as u32, sampler));
204 }
205 }
206 }
207
208 fn prepare_draw(&mut self, first_instance: u32) {
209 let emulated_first_instance_value = if self
212 .private_caps
213 .contains(super::PrivateCapabilities::FULLY_FEATURED_INSTANCING)
214 {
215 0
216 } else {
217 first_instance
218 };
219
220 if emulated_first_instance_value != self.state.active_first_instance {
221 self.state.dirty_vbuf_mask |= self.state.instance_vbuf_mask;
223 self.state.active_first_instance = emulated_first_instance_value;
224 }
225 if self.state.dirty_vbuf_mask != 0 {
226 self.rebind_vertex_data(emulated_first_instance_value);
227 }
228 }
229
230 #[allow(clippy::clone_on_copy)] fn set_pipeline_inner(&mut self, inner: &super::PipelineInner) {
232 self.cmd_buffer.commands.push(C::SetProgram(inner.program));
233
234 self.state
235 .first_instance_location
236 .clone_from(&inner.first_instance_location);
237 self.state
238 .immediates_descs
239 .clone_from(&inner.immediates_descs);
240
241 let mut dirty_textures = 0u32;
243 for (texture_index, (slot, &sampler_index)) in self
244 .state
245 .texture_slots
246 .iter_mut()
247 .zip(inner.sampler_map.iter())
248 .enumerate()
249 {
250 if slot.sampler_index != sampler_index {
251 slot.sampler_index = sampler_index;
252 dirty_textures |= 1 << texture_index;
253 }
254 }
255 if dirty_textures != 0 {
256 self.rebind_sampler_states(dirty_textures, 0);
257 }
258 }
259}
260
261impl crate::CommandEncoder for super::CommandEncoder {
262 type A = super::Api;
263
264 unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
265 self.state = State::default();
266 self.cmd_buffer.label = label.map(String::from);
267 Ok(())
268 }
269 unsafe fn discard_encoding(&mut self) {
270 self.cmd_buffer.clear();
271 }
272 unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
273 Ok(mem::take(&mut self.cmd_buffer))
274 }
275 unsafe fn reset_all<I>(&mut self, _command_buffers: I) {
276 }
278
279 unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
280 where
281 T: Iterator<Item = crate::BufferBarrier<'a, super::Buffer>>,
282 {
283 if !self
284 .private_caps
285 .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
286 {
287 return;
288 }
289 for bar in barriers {
290 if !bar.usage.from.contains(wgt::BufferUses::STORAGE_READ_WRITE) {
292 continue;
293 }
294 self.cmd_buffer
295 .commands
296 .push(C::BufferBarrier(bar.buffer.raw.unwrap(), bar.usage.to));
297 }
298 }
299
300 unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
301 where
302 T: Iterator<Item = crate::TextureBarrier<'a, super::Texture>>,
303 {
304 if !self
305 .private_caps
306 .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
307 {
308 return;
309 }
310
311 let mut combined_usage = wgt::TextureUses::empty();
312 for bar in barriers {
313 if !bar.usage.from.intersects(
316 wgt::TextureUses::STORAGE_READ_WRITE | wgt::TextureUses::STORAGE_WRITE_ONLY,
317 ) {
318 continue;
319 }
320 combined_usage |= bar.usage.to;
323 }
324
325 if !combined_usage.is_empty() {
326 self.cmd_buffer
327 .commands
328 .push(C::TextureBarrier(combined_usage));
329 }
330 }
331
332 unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
333 self.cmd_buffer.commands.push(C::ClearBuffer {
334 dst: buffer.clone(),
335 dst_target: buffer.target,
336 range,
337 });
338 }
339
340 unsafe fn copy_buffer_to_buffer<T>(
341 &mut self,
342 src: &super::Buffer,
343 dst: &super::Buffer,
344 regions: T,
345 ) where
346 T: Iterator<Item = crate::BufferCopy>,
347 {
348 let (src_target, dst_target) = if src.target == dst.target {
349 (glow::COPY_READ_BUFFER, glow::COPY_WRITE_BUFFER)
350 } else {
351 (src.target, dst.target)
352 };
353 for copy in regions {
354 self.cmd_buffer.commands.push(C::CopyBufferToBuffer {
355 src: src.clone(),
356 src_target,
357 dst: dst.clone(),
358 dst_target,
359 copy,
360 })
361 }
362 }
363
364 #[cfg(webgl)]
365 unsafe fn copy_external_image_to_texture<T>(
366 &mut self,
367 src: &wgt::CopyExternalImageSourceInfo,
368 dst: &super::Texture,
369 dst_premultiplication: bool,
370 regions: T,
371 ) where
372 T: Iterator<Item = crate::TextureCopy>,
373 {
374 let (dst_raw, dst_target) = dst.inner.as_native();
375 for copy in regions {
376 self.cmd_buffer
377 .commands
378 .push(C::CopyExternalImageToTexture {
379 src: src.clone(),
380 dst: dst_raw,
381 dst_target,
382 dst_format: dst.format,
383 dst_premultiplication,
384 copy,
385 })
386 }
387 }
388
389 unsafe fn copy_texture_to_texture<T>(
390 &mut self,
391 src: &super::Texture,
392 _src_usage: wgt::TextureUses,
393 dst: &super::Texture,
394 regions: T,
395 ) where
396 T: Iterator<Item = crate::TextureCopy>,
397 {
398 let (src_raw, src_target) = src.inner.as_native();
399 let (dst_raw, dst_target) = dst.inner.as_native();
400 for mut copy in regions {
401 copy.clamp_size_to_virtual(&src.copy_size, &dst.copy_size);
402 self.cmd_buffer.commands.push(C::CopyTextureToTexture {
403 src: src_raw,
404 src_target,
405 dst: dst_raw,
406 dst_target,
407 copy,
408 })
409 }
410 }
411
412 unsafe fn copy_buffer_to_texture<T>(
413 &mut self,
414 src: &super::Buffer,
415 dst: &super::Texture,
416 regions: T,
417 ) where
418 T: Iterator<Item = crate::BufferTextureCopy>,
419 {
420 let (dst_raw, dst_target) = dst.inner.as_native();
421
422 for mut copy in regions {
423 copy.clamp_size_to_virtual(&dst.copy_size);
424 self.cmd_buffer.commands.push(C::CopyBufferToTexture {
425 src: src.clone(),
426 src_target: src.target,
427 dst: dst_raw,
428 dst_target,
429 dst_format: dst.format,
430 copy,
431 })
432 }
433 }
434
435 unsafe fn copy_texture_to_buffer<T>(
436 &mut self,
437 src: &super::Texture,
438 _src_usage: wgt::TextureUses,
439 dst: &super::Buffer,
440 regions: T,
441 ) where
442 T: Iterator<Item = crate::BufferTextureCopy>,
443 {
444 let (src_raw, src_target) = src.inner.as_native();
445 for mut copy in regions {
446 copy.clamp_size_to_virtual(&src.copy_size);
447 self.cmd_buffer.commands.push(C::CopyTextureToBuffer {
448 src: src_raw,
449 src_target,
450 src_format: src.format,
451 dst: dst.clone(),
452 dst_target: dst.target,
453 copy,
454 })
455 }
456 }
457
458 unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
459 let query = set.queries[index as usize];
460 self.cmd_buffer
461 .commands
462 .push(C::BeginQuery(query, set.target));
463 }
464 unsafe fn end_query(&mut self, set: &super::QuerySet, _index: u32) {
465 self.cmd_buffer.commands.push(C::EndQuery(set.target));
466 }
467 unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
468 let query = set.queries[index as usize];
469 self.cmd_buffer.commands.push(C::TimestampQuery(query));
470 }
471 unsafe fn reset_queries(&mut self, _set: &super::QuerySet, _range: Range<u32>) {
472 }
474 unsafe fn copy_query_results(
475 &mut self,
476 set: &super::QuerySet,
477 range: Range<u32>,
478 buffer: &super::Buffer,
479 offset: wgt::BufferAddress,
480 _stride: wgt::BufferSize,
481 ) {
482 let start = self.cmd_buffer.queries.len();
483 self.cmd_buffer
484 .queries
485 .extend_from_slice(&set.queries[range.start as usize..range.end as usize]);
486 let query_range = start as u32..self.cmd_buffer.queries.len() as u32;
487 self.cmd_buffer.commands.push(C::CopyQueryResults {
488 query_range,
489 dst: buffer.clone(),
490 dst_target: buffer.target,
491 dst_offset: offset,
492 });
493 }
494
495 unsafe fn begin_render_pass(
498 &mut self,
499 desc: &crate::RenderPassDescriptor<super::QuerySet, super::TextureView>,
500 ) -> Result<(), crate::DeviceError> {
501 debug_assert!(self.state.end_of_pass_timestamp.is_none());
502 if let Some(ref t) = desc.timestamp_writes {
503 if let Some(index) = t.beginning_of_pass_write_index {
504 unsafe { self.write_timestamp(t.query_set, index) }
505 }
506 self.state.end_of_pass_timestamp = t
507 .end_of_pass_write_index
508 .map(|index| t.query_set.queries[index as usize]);
509 }
510
511 self.state.render_size = desc.extent;
512 self.state.resolve_attachments.clear();
513 self.state.invalidate_attachments.clear();
514 if let Some(label) = desc.label {
515 let range = self.cmd_buffer.add_marker(label);
516 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
517 self.state.has_pass_label = true;
518 }
519
520 let rendering_to_external_framebuffer = desc
521 .color_attachments
522 .iter()
523 .filter_map(|at| at.as_ref())
524 .any(|at| match at.target.view.inner {
525 #[cfg(webgl)]
526 super::TextureInner::ExternalFramebuffer { .. } => true,
527 #[cfg(native)]
528 super::TextureInner::ExternalNativeFramebuffer { .. } => true,
529 _ => false,
530 });
531
532 if rendering_to_external_framebuffer && desc.color_attachments.len() != 1 {
533 panic!("Multiple render attachments with external framebuffers are not supported.");
534 }
535
536 assert!(desc.color_attachments.len() <= 32);
538
539 match desc
540 .color_attachments
541 .first()
542 .filter(|at| at.is_some())
543 .and_then(|at| at.as_ref().map(|at| &at.target.view.inner))
544 {
545 Some(&super::TextureInner::DefaultRenderbuffer) => {
547 self.cmd_buffer
548 .commands
549 .push(C::ResetFramebuffer { is_default: true });
550 }
551 _ => {
552 self.cmd_buffer
554 .commands
555 .push(C::ResetFramebuffer { is_default: false });
556
557 for (i, cat) in desc.color_attachments.iter().enumerate() {
558 if let Some(cat) = cat.as_ref() {
559 let attachment = glow::COLOR_ATTACHMENT0 + i as u32;
560 self.cmd_buffer.commands.push(C::BindAttachment {
561 attachment,
562 view: cat.target.view.clone(),
563 depth_slice: cat.depth_slice,
564 });
565 if let Some(ref rat) = cat.resolve_target {
566 self.state
567 .resolve_attachments
568 .push((attachment, rat.view.clone()));
569 }
570 if cat.ops.contains(crate::AttachmentOps::STORE_DISCARD) {
571 self.state.invalidate_attachments.push(attachment);
572 }
573 }
574 }
575 if let Some(ref dsat) = desc.depth_stencil_attachment {
576 let aspects = dsat.target.view.aspects;
577 let attachment = match aspects {
578 crate::FormatAspects::DEPTH => glow::DEPTH_ATTACHMENT,
579 crate::FormatAspects::STENCIL => glow::STENCIL_ATTACHMENT,
580 _ => glow::DEPTH_STENCIL_ATTACHMENT,
581 };
582 self.cmd_buffer.commands.push(C::BindAttachment {
583 attachment,
584 view: dsat.target.view.clone(),
585 depth_slice: None,
586 });
587 if aspects.contains(crate::FormatAspects::DEPTH)
588 && dsat.depth_ops.contains(crate::AttachmentOps::STORE_DISCARD)
589 {
590 self.state
591 .invalidate_attachments
592 .push(glow::DEPTH_ATTACHMENT);
593 }
594 if aspects.contains(crate::FormatAspects::STENCIL)
595 && dsat
596 .stencil_ops
597 .contains(crate::AttachmentOps::STORE_DISCARD)
598 {
599 self.state
600 .invalidate_attachments
601 .push(glow::STENCIL_ATTACHMENT);
602 }
603 }
604 }
605 }
606
607 let rect = crate::Rect {
608 x: 0,
609 y: 0,
610 w: desc.extent.width as i32,
611 h: desc.extent.height as i32,
612 };
613 self.cmd_buffer.commands.push(C::SetScissor(rect.clone()));
614 self.cmd_buffer.commands.push(C::SetViewport {
615 rect,
616 depth: 0.0..1.0,
617 });
618
619 if !rendering_to_external_framebuffer {
620 self.cmd_buffer
622 .commands
623 .push(C::SetDrawColorBuffers(desc.color_attachments.len() as u8));
624 }
625
626 for (i, cat) in desc
628 .color_attachments
629 .iter()
630 .filter_map(|at| at.as_ref())
631 .enumerate()
632 {
633 if cat.ops.contains(crate::AttachmentOps::LOAD_CLEAR) {
634 let c = &cat.clear_value;
635 self.cmd_buffer.commands.push(
636 match cat.target.view.format.sample_type(None, None).unwrap() {
637 wgt::TextureSampleType::Float { .. } => C::ClearColorF {
638 draw_buffer: i as u32,
639 color: [c.r as f32, c.g as f32, c.b as f32, c.a as f32],
640 is_srgb: cat.target.view.format.is_srgb(),
641 },
642 wgt::TextureSampleType::Uint => C::ClearColorU(
643 i as u32,
644 [c.r as u32, c.g as u32, c.b as u32, c.a as u32],
645 ),
646 wgt::TextureSampleType::Sint => C::ClearColorI(
647 i as u32,
648 [c.r as i32, c.g as i32, c.b as i32, c.a as i32],
649 ),
650 wgt::TextureSampleType::Depth => unreachable!(),
651 },
652 );
653 }
654 }
655
656 if let Some(ref dsat) = desc.depth_stencil_attachment {
657 let clear_depth = dsat.depth_ops.contains(crate::AttachmentOps::LOAD_CLEAR);
658 let clear_stencil = dsat.stencil_ops.contains(crate::AttachmentOps::LOAD_CLEAR);
659
660 if clear_depth && clear_stencil {
661 self.cmd_buffer.commands.push(C::ClearDepthAndStencil(
662 dsat.clear_value.0,
663 dsat.clear_value.1,
664 ));
665 } else if clear_depth {
666 self.cmd_buffer
667 .commands
668 .push(C::ClearDepth(dsat.clear_value.0));
669 } else if clear_stencil {
670 self.cmd_buffer
671 .commands
672 .push(C::ClearStencil(dsat.clear_value.1));
673 }
674 }
675 Ok(())
676 }
677 unsafe fn end_render_pass(&mut self) {
678 for (attachment, dst) in self.state.resolve_attachments.drain(..) {
679 self.cmd_buffer.commands.push(C::ResolveAttachment {
680 attachment,
681 dst,
682 size: self.state.render_size,
683 });
684 }
685 if !self.state.invalidate_attachments.is_empty() {
686 self.cmd_buffer.commands.push(C::InvalidateAttachments(
687 self.state.invalidate_attachments.clone(),
688 ));
689 self.state.invalidate_attachments.clear();
690 }
691 if self.state.has_pass_label {
692 self.cmd_buffer.commands.push(C::PopDebugGroup);
693 self.state.has_pass_label = false;
694 }
695 self.state.instance_vbuf_mask = 0;
696 self.state.dirty_vbuf_mask = 0;
697 self.state.active_first_instance = 0;
698 self.state.color_targets.clear();
699 for vat in &self.state.vertex_attributes {
700 self.cmd_buffer
701 .commands
702 .push(C::UnsetVertexAttribute(vat.location));
703 }
704 self.state.vertex_attributes.clear();
705 self.state.primitive = super::PrimitiveState::default();
706
707 if let Some(query) = self.state.end_of_pass_timestamp.take() {
708 self.cmd_buffer.commands.push(C::TimestampQuery(query));
709 }
710 }
711
712 unsafe fn set_bind_group(
713 &mut self,
714 layout: &super::PipelineLayout,
715 index: u32,
716 group: &super::BindGroup,
717 dynamic_offsets: &[wgt::DynamicOffset],
718 ) {
719 let mut do_index = 0;
720 let mut dirty_textures = 0u32;
721 let mut dirty_samplers = 0u32;
722 let group_info = &layout.group_infos[index as usize];
723
724 for (binding_layout, raw_binding) in group_info.entries.iter().zip(group.contents.iter()) {
725 let slot = group_info.binding_to_slot[binding_layout.binding as usize] as u32;
726 match *raw_binding {
727 super::RawBinding::Buffer {
728 raw,
729 offset: base_offset,
730 size,
731 } => {
732 let mut offset = base_offset;
733 let target = match binding_layout.ty {
734 wgt::BindingType::Buffer {
735 ty,
736 has_dynamic_offset,
737 min_binding_size: _,
738 } => {
739 if has_dynamic_offset {
740 offset += dynamic_offsets[do_index] as i32;
741 do_index += 1;
742 }
743 match ty {
744 wgt::BufferBindingType::Uniform => glow::UNIFORM_BUFFER,
745 wgt::BufferBindingType::Storage { .. } => {
746 glow::SHADER_STORAGE_BUFFER
747 }
748 }
749 }
750 _ => unreachable!(),
751 };
752 self.cmd_buffer.commands.push(C::BindBuffer {
753 target,
754 slot,
755 buffer: raw,
756 offset,
757 size,
758 });
759 }
760 super::RawBinding::Sampler(sampler) => {
761 dirty_samplers |= 1 << slot;
762 self.state.samplers[slot as usize] = Some(sampler);
763 }
764 super::RawBinding::Texture {
765 raw,
766 target,
767 aspects,
768 ref mip_levels,
769 } => {
770 dirty_textures |= 1 << slot;
771 self.state.texture_slots[slot as usize].tex_target = target;
772 self.cmd_buffer.commands.push(C::BindTexture {
773 slot,
774 texture: raw,
775 target,
776 aspects,
777 mip_levels: mip_levels.clone(),
778 });
779 }
780 super::RawBinding::Image(ref binding) => {
781 self.cmd_buffer.commands.push(C::BindImage {
782 slot,
783 binding: binding.clone(),
784 });
785 }
786 }
787 }
788
789 self.rebind_sampler_states(dirty_textures, dirty_samplers);
790 }
791
792 unsafe fn set_immediates(
793 &mut self,
794 _layout: &super::PipelineLayout,
795 _stages: wgt::ShaderStages,
796 offset_bytes: u32,
797 data: &[u32],
798 ) {
799 let start_words = offset_bytes / 4;
807 let end_words = start_words + data.len() as u32;
808 self.state.current_immediates_data[start_words as usize..end_words as usize]
809 .copy_from_slice(data);
810
811 for uniform in self.state.immediates_descs.iter().cloned() {
817 let uniform_size_words = uniform.size_bytes / 4;
818 let uniform_start_words = uniform.offset / 4;
819 let uniform_end_words = uniform_start_words + uniform_size_words;
820
821 let needs_updating =
823 start_words < uniform_end_words || uniform_start_words <= end_words;
824
825 if needs_updating {
826 let uniform_data = &self.state.current_immediates_data
827 [uniform_start_words as usize..uniform_end_words as usize];
828
829 let range = self.cmd_buffer.add_immediates_data(uniform_data);
830
831 self.cmd_buffer.commands.push(C::SetImmediates {
832 uniform,
833 offset: range.start,
834 });
835 }
836 }
837 }
838
839 unsafe fn insert_debug_marker(&mut self, label: &str) {
840 let range = self.cmd_buffer.add_marker(label);
841 self.cmd_buffer.commands.push(C::InsertDebugMarker(range));
842 }
843 unsafe fn begin_debug_marker(&mut self, group_label: &str) {
844 let range = self.cmd_buffer.add_marker(group_label);
845 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
846 }
847 unsafe fn end_debug_marker(&mut self) {
848 self.cmd_buffer.commands.push(C::PopDebugGroup);
849 }
850
851 unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
852 self.state.topology = conv::map_primitive_topology(pipeline.primitive.topology);
853
854 if self
855 .private_caps
856 .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
857 {
858 for vat in pipeline.vertex_attributes.iter() {
859 let vb = &pipeline.vertex_buffers[vat.buffer_index as usize];
860 self.cmd_buffer.commands.push(C::SetVertexAttribute {
862 buffer: None,
863 buffer_desc: vb.clone(),
864 attribute_desc: vat.clone(),
865 });
866 }
867 } else {
868 for vat in &self.state.vertex_attributes {
869 self.cmd_buffer
870 .commands
871 .push(C::UnsetVertexAttribute(vat.location));
872 }
873 self.state.vertex_attributes.clear();
874
875 self.state.dirty_vbuf_mask = 0;
876 for vat in pipeline.vertex_attributes.iter() {
878 self.state.dirty_vbuf_mask |= 1 << vat.buffer_index;
880 self.state.vertex_attributes.push(vat.clone());
881 }
882 }
883
884 self.state.instance_vbuf_mask = 0;
885 for (index, (&mut (ref mut state_desc, _), pipe_desc)) in self
887 .state
888 .vertex_buffers
889 .iter_mut()
890 .zip(pipeline.vertex_buffers.iter())
891 .enumerate()
892 {
893 if pipe_desc.step == wgt::VertexStepMode::Instance {
894 self.state.instance_vbuf_mask |= 1 << index;
895 }
896 if state_desc != pipe_desc {
897 self.state.dirty_vbuf_mask |= 1 << index;
898 *state_desc = pipe_desc.clone();
899 }
900 }
901
902 self.set_pipeline_inner(&pipeline.inner);
903
904 let prim_state = conv::map_primitive_state(&pipeline.primitive);
906 if prim_state != self.state.primitive {
907 self.cmd_buffer
908 .commands
909 .push(C::SetPrimitive(prim_state.clone()));
910 self.state.primitive = prim_state;
911 }
912
913 let mut aspects = crate::FormatAspects::empty();
915 if pipeline.depth_bias != self.state.depth_bias {
916 self.state.depth_bias = pipeline.depth_bias;
917 self.cmd_buffer
918 .commands
919 .push(C::SetDepthBias(pipeline.depth_bias));
920 }
921 if let Some(ref depth) = pipeline.depth {
922 aspects |= crate::FormatAspects::DEPTH;
923 self.cmd_buffer.commands.push(C::SetDepth(depth.clone()));
924 }
925 if let Some(ref stencil) = pipeline.stencil {
926 aspects |= crate::FormatAspects::STENCIL;
927 self.state.stencil = stencil.clone();
928 self.rebind_stencil_func();
929 if stencil.front.ops == stencil.back.ops
930 && stencil.front.mask_write == stencil.back.mask_write
931 {
932 self.cmd_buffer.commands.push(C::SetStencilOps {
933 face: glow::FRONT_AND_BACK,
934 write_mask: stencil.front.mask_write,
935 ops: stencil.front.ops.clone(),
936 });
937 } else {
938 self.cmd_buffer.commands.push(C::SetStencilOps {
939 face: glow::FRONT,
940 write_mask: stencil.front.mask_write,
941 ops: stencil.front.ops.clone(),
942 });
943 self.cmd_buffer.commands.push(C::SetStencilOps {
944 face: glow::BACK,
945 write_mask: stencil.back.mask_write,
946 ops: stencil.back.ops.clone(),
947 });
948 }
949 }
950 self.cmd_buffer
951 .commands
952 .push(C::ConfigureDepthStencil(aspects));
953
954 if pipeline.alpha_to_coverage_enabled != self.state.alpha_to_coverage_enabled {
956 self.state.alpha_to_coverage_enabled = pipeline.alpha_to_coverage_enabled;
957 self.cmd_buffer
958 .commands
959 .push(C::SetAlphaToCoverage(pipeline.alpha_to_coverage_enabled));
960 }
961
962 if self.state.color_targets[..] != pipeline.color_targets[..] {
964 if pipeline
965 .color_targets
966 .iter()
967 .skip(1)
968 .any(|ct| *ct != pipeline.color_targets[0])
969 {
970 for (index, ct) in pipeline.color_targets.iter().enumerate() {
971 self.cmd_buffer.commands.push(C::SetColorTarget {
972 draw_buffer_index: Some(index as u32),
973 desc: ct.clone(),
974 });
975 }
976 } else {
977 self.cmd_buffer.commands.push(C::SetColorTarget {
978 draw_buffer_index: None,
979 desc: pipeline.color_targets.first().cloned().unwrap_or_default(),
980 });
981 }
982 }
983 self.state.color_targets.clear();
984 for ct in pipeline.color_targets.iter() {
985 self.state.color_targets.push(ct.clone());
986 }
987
988 if pipeline.inner.clip_distance_count != self.state.clip_distance_count {
990 self.cmd_buffer.commands.push(C::SetClipDistances {
991 old_count: self.state.clip_distance_count,
992 new_count: pipeline.inner.clip_distance_count,
993 });
994 self.state.clip_distance_count = pipeline.inner.clip_distance_count;
995 }
996 }
997
998 unsafe fn set_index_buffer<'a>(
999 &mut self,
1000 binding: crate::BufferBinding<'a, super::Buffer>,
1001 format: wgt::IndexFormat,
1002 ) {
1003 self.state.index_offset = binding.offset;
1004 self.state.index_format = format;
1005 self.cmd_buffer
1006 .commands
1007 .push(C::SetIndexBuffer(binding.buffer.raw.unwrap()));
1008 }
1009 unsafe fn set_vertex_buffer<'a>(
1010 &mut self,
1011 index: u32,
1012 binding: crate::BufferBinding<'a, super::Buffer>,
1013 ) {
1014 self.state.dirty_vbuf_mask |= 1 << index;
1015 let (_, ref mut vb) = self.state.vertex_buffers[index as usize];
1016 *vb = Some(super::BufferBinding {
1017 raw: binding.buffer.raw.unwrap(),
1018 offset: binding.offset,
1019 });
1020 }
1021 unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth: Range<f32>) {
1022 self.cmd_buffer.commands.push(C::SetViewport {
1023 rect: crate::Rect {
1024 x: rect.x as i32,
1025 y: rect.y as i32,
1026 w: rect.w as i32,
1027 h: rect.h as i32,
1028 },
1029 depth,
1030 });
1031 }
1032 unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
1033 self.cmd_buffer.commands.push(C::SetScissor(crate::Rect {
1034 x: rect.x as i32,
1035 y: rect.y as i32,
1036 w: rect.w as i32,
1037 h: rect.h as i32,
1038 }));
1039 }
1040 unsafe fn set_stencil_reference(&mut self, value: u32) {
1041 self.state.stencil.front.reference = value;
1042 self.state.stencil.back.reference = value;
1043 self.rebind_stencil_func();
1044 }
1045 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
1046 self.cmd_buffer.commands.push(C::SetBlendConstant(*color));
1047 }
1048
1049 unsafe fn draw(
1050 &mut self,
1051 first_vertex: u32,
1052 vertex_count: u32,
1053 first_instance: u32,
1054 instance_count: u32,
1055 ) {
1056 self.prepare_draw(first_instance);
1057 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::Draw {
1059 topology: self.state.topology,
1060 first_vertex,
1061 vertex_count,
1062 first_instance,
1063 instance_count,
1064 first_instance_location: self.state.first_instance_location.clone(),
1065 });
1066 }
1067 unsafe fn draw_indexed(
1068 &mut self,
1069 first_index: u32,
1070 index_count: u32,
1071 base_vertex: i32,
1072 first_instance: u32,
1073 instance_count: u32,
1074 ) {
1075 self.prepare_draw(first_instance);
1076 let (index_size, index_type) = match self.state.index_format {
1077 wgt::IndexFormat::Uint16 => (2, glow::UNSIGNED_SHORT),
1078 wgt::IndexFormat::Uint32 => (4, glow::UNSIGNED_INT),
1079 };
1080 let index_offset = self.state.index_offset + index_size * first_index as wgt::BufferAddress;
1081 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::DrawIndexed {
1083 topology: self.state.topology,
1084 index_type,
1085 index_offset,
1086 index_count,
1087 base_vertex,
1088 first_instance,
1089 instance_count,
1090 first_instance_location: self.state.first_instance_location.clone(),
1091 });
1092 }
1093 unsafe fn draw_mesh_tasks(
1094 &mut self,
1095 _group_count_x: u32,
1096 _group_count_y: u32,
1097 _group_count_z: u32,
1098 ) {
1099 unreachable!()
1100 }
1101 unsafe fn draw_indirect(
1102 &mut self,
1103 buffer: &super::Buffer,
1104 offset: wgt::BufferAddress,
1105 draw_count: u32,
1106 ) {
1107 self.prepare_draw(0);
1108 for draw in 0..draw_count as wgt::BufferAddress {
1109 let indirect_offset =
1110 offset + draw * size_of::<wgt::DrawIndirectArgs>() as wgt::BufferAddress;
1111 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::DrawIndirect {
1113 topology: self.state.topology,
1114 indirect_buf: buffer.raw.unwrap(),
1115 indirect_offset,
1116 first_instance_location: self.state.first_instance_location.clone(),
1117 });
1118 }
1119 }
1120 unsafe fn draw_indexed_indirect(
1121 &mut self,
1122 buffer: &super::Buffer,
1123 offset: wgt::BufferAddress,
1124 draw_count: u32,
1125 ) {
1126 self.prepare_draw(0);
1127 let index_type = match self.state.index_format {
1128 wgt::IndexFormat::Uint16 => glow::UNSIGNED_SHORT,
1129 wgt::IndexFormat::Uint32 => glow::UNSIGNED_INT,
1130 };
1131 for draw in 0..draw_count as wgt::BufferAddress {
1132 let indirect_offset =
1133 offset + draw * size_of::<wgt::DrawIndexedIndirectArgs>() as wgt::BufferAddress;
1134 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::DrawIndexedIndirect {
1136 topology: self.state.topology,
1137 index_type,
1138 indirect_buf: buffer.raw.unwrap(),
1139 indirect_offset,
1140 first_instance_location: self.state.first_instance_location.clone(),
1141 });
1142 }
1143 }
1144 unsafe fn draw_mesh_tasks_indirect(
1145 &mut self,
1146 _buffer: &<Self::A as crate::Api>::Buffer,
1147 _offset: wgt::BufferAddress,
1148 _draw_count: u32,
1149 ) {
1150 unreachable!()
1151 }
1152 unsafe fn draw_indirect_count(
1153 &mut self,
1154 _buffer: &super::Buffer,
1155 _offset: wgt::BufferAddress,
1156 _count_buffer: &super::Buffer,
1157 _count_offset: wgt::BufferAddress,
1158 _max_count: u32,
1159 ) {
1160 unreachable!()
1161 }
1162 unsafe fn draw_indexed_indirect_count(
1163 &mut self,
1164 _buffer: &super::Buffer,
1165 _offset: wgt::BufferAddress,
1166 _count_buffer: &super::Buffer,
1167 _count_offset: wgt::BufferAddress,
1168 _max_count: u32,
1169 ) {
1170 unreachable!()
1171 }
1172 unsafe fn draw_mesh_tasks_indirect_count(
1173 &mut self,
1174 _buffer: &<Self::A as crate::Api>::Buffer,
1175 _offset: wgt::BufferAddress,
1176 _count_buffer: &<Self::A as crate::Api>::Buffer,
1177 _count_offset: wgt::BufferAddress,
1178 _max_count: u32,
1179 ) {
1180 unreachable!()
1181 }
1182
1183 unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor<super::QuerySet>) {
1186 debug_assert!(self.state.end_of_pass_timestamp.is_none());
1187 if let Some(ref t) = desc.timestamp_writes {
1188 if let Some(index) = t.beginning_of_pass_write_index {
1189 unsafe { self.write_timestamp(t.query_set, index) }
1190 }
1191 self.state.end_of_pass_timestamp = t
1192 .end_of_pass_write_index
1193 .map(|index| t.query_set.queries[index as usize]);
1194 }
1195
1196 if let Some(label) = desc.label {
1197 let range = self.cmd_buffer.add_marker(label);
1198 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
1199 self.state.has_pass_label = true;
1200 }
1201 }
1202 unsafe fn end_compute_pass(&mut self) {
1203 if self.state.has_pass_label {
1204 self.cmd_buffer.commands.push(C::PopDebugGroup);
1205 self.state.has_pass_label = false;
1206 }
1207
1208 if let Some(query) = self.state.end_of_pass_timestamp.take() {
1209 self.cmd_buffer.commands.push(C::TimestampQuery(query));
1210 }
1211 }
1212
1213 unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1214 self.set_pipeline_inner(&pipeline.inner);
1215 }
1216
1217 unsafe fn dispatch(&mut self, count: [u32; 3]) {
1218 if count.contains(&0) {
1220 return;
1221 }
1222 self.cmd_buffer.commands.push(C::Dispatch(count));
1223 }
1224 unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1225 self.cmd_buffer.commands.push(C::DispatchIndirect {
1226 indirect_buf: buffer.raw.unwrap(),
1227 indirect_offset: offset,
1228 });
1229 }
1230
1231 unsafe fn build_acceleration_structures<'a, T>(
1232 &mut self,
1233 _descriptor_count: u32,
1234 _descriptors: T,
1235 ) where
1236 super::Api: 'a,
1237 T: IntoIterator<
1238 Item = crate::BuildAccelerationStructureDescriptor<
1239 'a,
1240 super::Buffer,
1241 super::AccelerationStructure,
1242 >,
1243 >,
1244 {
1245 unimplemented!()
1246 }
1247
1248 unsafe fn place_acceleration_structure_barrier(
1249 &mut self,
1250 _barriers: crate::AccelerationStructureBarrier,
1251 ) {
1252 unimplemented!()
1253 }
1254
1255 unsafe fn copy_acceleration_structure_to_acceleration_structure(
1256 &mut self,
1257 _src: &super::AccelerationStructure,
1258 _dst: &super::AccelerationStructure,
1259 _copy: wgt::AccelerationStructureCopy,
1260 ) {
1261 unimplemented!()
1262 }
1263
1264 unsafe fn read_acceleration_structure_compact_size(
1265 &mut self,
1266 _acceleration_structure: &super::AccelerationStructure,
1267 _buf: &super::Buffer,
1268 ) {
1269 unimplemented!()
1270 }
1271}