1use alloc::{
2 borrow::Cow::{self, Borrowed},
3 boxed::Box,
4 format,
5 string::{String, ToString as _},
6 sync::Arc,
7 vec,
8 vec::Vec,
9};
10use core::{
11 error::Error,
12 fmt,
13 future::ready,
14 ops::{Deref, Range},
15 pin::Pin,
16 ptr::NonNull,
17 slice,
18};
19use hashbrown::HashMap;
20
21use arrayvec::ArrayVec;
22use smallvec::SmallVec;
23use wgc::{
24 command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError,
25 resource::BlasPrepareCompactResult,
26};
27use wgt::{
28 error::{ErrorType, WebGpuError},
29 WasmNotSendSync,
30};
31
32use crate::{
33 api,
34 dispatch::{self, BlasCompactCallback, BufferMappedRangeInterface},
35 BindingResource, Blas, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
36 CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
37 ShaderSource, SurfaceTargetUnsafe, TextureDescriptor, Tlas, WriteOnly,
38};
39use crate::{dispatch::DispatchAdapter, util::Mutex};
40
41mod thread_id;
42
43#[derive(Clone)]
44pub struct ContextWgpuCore(Arc<wgc::global::Global>);
45
46impl Drop for ContextWgpuCore {
47 fn drop(&mut self) {
48 }
50}
51
52impl fmt::Debug for ContextWgpuCore {
53 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
54 f.debug_struct("ContextWgpuCore")
55 .field("type", &"Native")
56 .finish()
57 }
58}
59
60impl ContextWgpuCore {
61 pub unsafe fn from_hal_instance<A: hal::Api>(hal_instance: A::Instance) -> Self {
62 Self(unsafe {
63 Arc::new(wgc::global::Global::from_hal_instance::<A>(
64 "wgpu",
65 hal_instance,
66 ))
67 })
68 }
69
70 pub unsafe fn instance_as_hal<A: hal::Api>(&self) -> Option<&A::Instance> {
74 unsafe { self.0.instance_as_hal::<A>() }
75 }
76
77 pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
78 Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
79 }
80
81 #[cfg(wgpu_core)]
82 pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
83 self.0
84 .enumerate_adapters(backends, false )
85 }
86
87 pub unsafe fn create_adapter_from_hal<A: hal::Api>(
88 &self,
89 hal_adapter: hal::ExposedAdapter<A>,
90 ) -> wgc::id::AdapterId {
91 unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
92 }
93
94 pub unsafe fn adapter_as_hal<A: hal::Api>(
95 &self,
96 adapter: &CoreAdapter,
97 ) -> Option<impl Deref<Target = A::Adapter> + WasmNotSendSync> {
98 unsafe { self.0.adapter_as_hal::<A>(adapter.id) }
99 }
100
101 pub unsafe fn buffer_as_hal<A: hal::Api>(
102 &self,
103 buffer: &CoreBuffer,
104 ) -> Option<impl Deref<Target = A::Buffer>> {
105 unsafe { self.0.buffer_as_hal::<A>(buffer.id) }
106 }
107
108 pub unsafe fn create_device_from_hal<A: hal::Api>(
109 &self,
110 adapter: &CoreAdapter,
111 hal_device: hal::OpenDevice<A>,
112 desc: &crate::DeviceDescriptor<'_>,
113 ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
114 let (device_id, queue_id) = unsafe {
115 self.0.create_device_from_hal(
116 adapter.id,
117 hal_device.into(),
118 &desc.map_label(|l| l.map(Borrowed)),
119 None,
120 None,
121 )
122 }?;
123 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
124 let device = CoreDevice {
125 context: self.clone(),
126 id: device_id,
127 error_sink: error_sink.clone(),
128 features: desc.required_features,
129 };
130 let queue = CoreQueue {
131 context: self.clone(),
132 id: queue_id,
133 error_sink,
134 };
135 Ok((device, queue))
136 }
137
138 pub unsafe fn create_texture_from_hal<A: hal::Api>(
139 &self,
140 hal_texture: A::Texture,
141 device: &CoreDevice,
142 desc: &TextureDescriptor<'_>,
143 ) -> CoreTexture {
144 let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
145 let (id, error) = unsafe {
146 self.0
147 .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
148 };
149 if let Some(cause) = error {
150 self.handle_error(
151 &device.error_sink,
152 cause,
153 desc.label,
154 "Device::create_texture_from_hal",
155 );
156 }
157 CoreTexture {
158 context: self.clone(),
159 id,
160 error_sink: Arc::clone(&device.error_sink),
161 }
162 }
163
164 pub unsafe fn create_buffer_from_hal<A: hal::Api>(
171 &self,
172 hal_buffer: A::Buffer,
173 device: &CoreDevice,
174 desc: &BufferDescriptor<'_>,
175 ) -> CoreBuffer {
176 let (id, error) = unsafe {
177 self.0.create_buffer_from_hal::<A>(
178 hal_buffer,
179 device.id,
180 &desc.map_label(|l| l.map(Borrowed)),
181 None,
182 )
183 };
184 if let Some(cause) = error {
185 self.handle_error(
186 &device.error_sink,
187 cause,
188 desc.label,
189 "Device::create_buffer_from_hal",
190 );
191 }
192 CoreBuffer {
193 context: self.clone(),
194 id,
195 error_sink: Arc::clone(&device.error_sink),
196 }
197 }
198
199 pub unsafe fn device_as_hal<A: hal::Api>(
200 &self,
201 device: &CoreDevice,
202 ) -> Option<impl Deref<Target = A::Device>> {
203 unsafe { self.0.device_as_hal::<A>(device.id) }
204 }
205
206 pub unsafe fn surface_as_hal<A: hal::Api>(
207 &self,
208 surface: &CoreSurface,
209 ) -> Option<impl Deref<Target = A::Surface>> {
210 unsafe { self.0.surface_as_hal::<A>(surface.id) }
211 }
212
213 pub unsafe fn texture_as_hal<A: hal::Api>(
214 &self,
215 texture: &CoreTexture,
216 ) -> Option<impl Deref<Target = A::Texture>> {
217 unsafe { self.0.texture_as_hal::<A>(texture.id) }
218 }
219
220 pub unsafe fn texture_view_as_hal<A: hal::Api>(
221 &self,
222 texture_view: &CoreTextureView,
223 ) -> Option<impl Deref<Target = A::TextureView>> {
224 unsafe { self.0.texture_view_as_hal::<A>(texture_view.id) }
225 }
226
227 pub unsafe fn command_encoder_as_hal_mut<
229 A: hal::Api,
230 F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
231 R,
232 >(
233 &self,
234 command_encoder: &CoreCommandEncoder,
235 hal_command_encoder_callback: F,
236 ) -> R {
237 unsafe {
238 self.0.command_encoder_as_hal_mut::<A, F, R>(
239 command_encoder.id,
240 hal_command_encoder_callback,
241 )
242 }
243 }
244
245 pub unsafe fn blas_as_hal<A: hal::Api>(
246 &self,
247 blas: &CoreBlas,
248 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
249 unsafe { self.0.blas_as_hal::<A>(blas.id) }
250 }
251
252 pub unsafe fn tlas_as_hal<A: hal::Api>(
253 &self,
254 tlas: &CoreTlas,
255 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
256 unsafe { self.0.tlas_as_hal::<A>(tlas.id) }
257 }
258
259 pub fn generate_report(&self) -> wgc::global::GlobalReport {
260 self.0.generate_report()
261 }
262
263 #[cold]
264 #[track_caller]
265 #[inline(never)]
266 fn handle_error_inner(
267 &self,
268 sink_mutex: &Mutex<ErrorSinkRaw>,
269 error_type: ErrorType,
270 source: ContextErrorSource,
271 label: Label<'_>,
272 fn_ident: &'static str,
273 ) {
274 let source: ErrorSource = Box::new(wgc::error::ContextError {
275 fn_ident,
276 source,
277 label: label.unwrap_or_default().to_string(),
278 });
279 let final_error_handling = {
280 let mut sink = sink_mutex.lock();
281 let description = || self.format_error(&*source);
282 let error = match error_type {
283 ErrorType::Internal => {
284 let description = description();
285 crate::Error::Internal {
286 source,
287 description,
288 }
289 }
290 ErrorType::OutOfMemory => crate::Error::OutOfMemory { source },
291 ErrorType::Validation => {
292 let description = description();
293 crate::Error::Validation {
294 source,
295 description,
296 }
297 }
298 ErrorType::DeviceLost => return, };
300 sink.handle_error_or_return_handler(error)
301 };
302
303 if let Some(f) = final_error_handling {
304 f();
308 }
309 }
310
311 #[inline]
312 #[track_caller]
313 fn handle_error(
314 &self,
315 sink_mutex: &Mutex<ErrorSinkRaw>,
316 source: impl WebGpuError + WasmNotSendSync + 'static,
317 label: Label<'_>,
318 fn_ident: &'static str,
319 ) {
320 let error_type = source.webgpu_error_type();
321 self.handle_error_inner(sink_mutex, error_type, Box::new(source), label, fn_ident)
322 }
323
324 #[inline]
325 #[track_caller]
326 fn handle_error_nolabel(
327 &self,
328 sink_mutex: &Mutex<ErrorSinkRaw>,
329 source: impl WebGpuError + WasmNotSendSync + 'static,
330 fn_ident: &'static str,
331 ) {
332 let error_type = source.webgpu_error_type();
333 self.handle_error_inner(sink_mutex, error_type, Box::new(source), None, fn_ident)
334 }
335
336 #[track_caller]
337 #[cold]
338 fn handle_error_fatal(
339 &self,
340 cause: impl Error + WasmNotSendSync + 'static,
341 operation: &'static str,
342 ) -> ! {
343 panic!("Error in {operation}: {f}", f = self.format_error(&cause));
344 }
345
346 #[inline(never)]
347 fn format_error(&self, err: &(dyn Error + 'static)) -> String {
348 let mut output = String::new();
349 let mut level = 1;
350
351 fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
352 let mut print = |e: &(dyn Error + 'static)| {
353 use core::fmt::Write;
354 writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
355
356 if let Some(e) = e.source() {
357 *level += 1;
358 print_tree(output, level, e);
359 *level -= 1;
360 }
361 };
362 if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
363 for e in multi.errors() {
364 print(e);
365 }
366 } else {
367 print(e);
368 }
369 }
370
371 print_tree(&mut output, &mut level, err);
372
373 format!("Validation Error\n\nCaused by:\n{output}")
374 }
375
376 pub unsafe fn queue_as_hal<A: hal::Api>(
377 &self,
378 queue: &CoreQueue,
379 ) -> Option<impl Deref<Target = A::Queue> + WasmNotSendSync> {
380 unsafe { self.0.queue_as_hal::<A>(queue.id) }
381 }
382}
383
384fn map_buffer_copy_view(
385 view: crate::TexelCopyBufferInfo<'_>,
386) -> wgt::TexelCopyBufferInfo<wgc::id::BufferId> {
387 wgt::TexelCopyBufferInfo {
388 buffer: view.buffer.inner.as_core().id,
389 layout: view.layout,
390 }
391}
392
393fn map_texture_copy_view(
394 view: crate::TexelCopyTextureInfo<'_>,
395) -> wgt::TexelCopyTextureInfo<wgc::id::TextureId> {
396 wgt::TexelCopyTextureInfo {
397 texture: view.texture.inner.as_core().id,
398 mip_level: view.mip_level,
399 origin: view.origin,
400 aspect: view.aspect,
401 }
402}
403
404#[cfg_attr(not(webgl), expect(unused))]
405fn map_texture_tagged_copy_view(
406 view: crate::CopyExternalImageDestInfo<&api::Texture>,
407) -> wgt::CopyExternalImageDestInfo<wgc::id::TextureId> {
408 wgt::CopyExternalImageDestInfo {
409 texture: view.texture.inner.as_core().id,
410 mip_level: view.mip_level,
411 origin: view.origin,
412 aspect: view.aspect,
413 color_space: view.color_space,
414 premultiplied_alpha: view.premultiplied_alpha,
415 }
416}
417
418fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
419 match *load {
420 LoadOp::Clear(clear_value) => LoadOp::Clear(Some(clear_value)),
421 LoadOp::DontCare(token) => LoadOp::DontCare(token),
422 LoadOp::Load => LoadOp::Load,
423 }
424}
425
426fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
427 match ops {
428 Some(&Operations { load, store }) => wgc::command::PassChannel {
429 load_op: Some(map_load_op(&load)),
430 store_op: Some(store),
431 read_only: false,
432 },
433 None => wgc::command::PassChannel {
434 load_op: None,
435 store_op: None,
436 read_only: true,
437 },
438 }
439}
440
441#[derive(Debug)]
442pub struct CoreSurface {
443 pub(crate) context: ContextWgpuCore,
444 id: wgc::id::SurfaceId,
445 configured_device: Mutex<Option<wgc::id::DeviceId>>,
448 error_sink: Mutex<Option<ErrorSink>>,
451}
452
453#[derive(Debug)]
454pub struct CoreAdapter {
455 pub(crate) context: ContextWgpuCore,
456 pub(crate) id: wgc::id::AdapterId,
457}
458
459#[derive(Debug)]
460pub struct CoreDevice {
461 pub(crate) context: ContextWgpuCore,
462 id: wgc::id::DeviceId,
463 error_sink: ErrorSink,
464 features: Features,
465}
466
467#[derive(Debug)]
468pub struct CoreBuffer {
469 pub(crate) context: ContextWgpuCore,
470 id: wgc::id::BufferId,
471 error_sink: ErrorSink,
472}
473
474#[derive(Debug)]
475pub struct CoreShaderModule {
476 pub(crate) context: ContextWgpuCore,
477 id: wgc::id::ShaderModuleId,
478 compilation_info: CompilationInfo,
479}
480
481#[derive(Debug)]
482pub struct CoreBindGroupLayout {
483 pub(crate) context: ContextWgpuCore,
484 id: wgc::id::BindGroupLayoutId,
485}
486
487#[derive(Debug)]
488pub struct CoreBindGroup {
489 pub(crate) context: ContextWgpuCore,
490 id: wgc::id::BindGroupId,
491}
492
493#[derive(Debug)]
494pub struct CoreTexture {
495 pub(crate) context: ContextWgpuCore,
496 id: wgc::id::TextureId,
497 error_sink: ErrorSink,
498}
499
500#[derive(Debug)]
501pub struct CoreTextureView {
502 pub(crate) context: ContextWgpuCore,
503 id: wgc::id::TextureViewId,
504}
505
506#[derive(Debug)]
507pub struct CoreExternalTexture {
508 pub(crate) context: ContextWgpuCore,
509 id: wgc::id::ExternalTextureId,
510}
511
512#[derive(Debug)]
513pub struct CoreSampler {
514 pub(crate) context: ContextWgpuCore,
515 id: wgc::id::SamplerId,
516}
517
518#[derive(Debug)]
519pub struct CoreQuerySet {
520 pub(crate) context: ContextWgpuCore,
521 id: wgc::id::QuerySetId,
522}
523
524#[derive(Debug)]
525pub struct CorePipelineLayout {
526 pub(crate) context: ContextWgpuCore,
527 id: wgc::id::PipelineLayoutId,
528}
529
530#[derive(Debug)]
531pub struct CorePipelineCache {
532 pub(crate) context: ContextWgpuCore,
533 id: wgc::id::PipelineCacheId,
534}
535
536#[derive(Debug)]
537pub struct CoreCommandBuffer {
538 pub(crate) context: ContextWgpuCore,
539 id: wgc::id::CommandBufferId,
540}
541
542#[derive(Debug)]
543pub struct CoreRenderBundleEncoder {
544 pub(crate) context: ContextWgpuCore,
545 encoder: wgc::command::RenderBundleEncoder,
546 id: crate::cmp::Identifier,
547}
548
549#[derive(Debug)]
550pub struct CoreRenderBundle {
551 context: ContextWgpuCore,
552 id: wgc::id::RenderBundleId,
553}
554
555#[derive(Debug)]
556pub struct CoreQueue {
557 pub(crate) context: ContextWgpuCore,
558 id: wgc::id::QueueId,
559 error_sink: ErrorSink,
560}
561
562#[derive(Debug)]
563pub struct CoreComputePipeline {
564 pub(crate) context: ContextWgpuCore,
565 id: wgc::id::ComputePipelineId,
566 error_sink: ErrorSink,
567}
568
569#[derive(Debug)]
570pub struct CoreRenderPipeline {
571 pub(crate) context: ContextWgpuCore,
572 id: wgc::id::RenderPipelineId,
573 error_sink: ErrorSink,
574}
575
576#[derive(Debug)]
577pub struct CoreComputePass {
578 pub(crate) context: ContextWgpuCore,
579 pass: wgc::command::ComputePass,
580 error_sink: ErrorSink,
581 id: crate::cmp::Identifier,
582}
583
584#[derive(Debug)]
585pub struct CoreRenderPass {
586 pub(crate) context: ContextWgpuCore,
587 pass: wgc::command::RenderPass,
588 error_sink: ErrorSink,
589 id: crate::cmp::Identifier,
590}
591
592#[derive(Debug)]
593pub struct CoreCommandEncoder {
594 pub(crate) context: ContextWgpuCore,
595 id: wgc::id::CommandEncoderId,
596 error_sink: ErrorSink,
597}
598
599#[derive(Debug)]
600pub struct CoreBlas {
601 pub(crate) context: ContextWgpuCore,
602 id: wgc::id::BlasId,
603 error_sink: ErrorSink,
604}
605
606#[derive(Debug)]
607pub struct CoreTlas {
608 pub(crate) context: ContextWgpuCore,
609 id: wgc::id::TlasId,
610 }
612
613#[derive(Debug)]
614pub struct CoreSurfaceOutputDetail {
615 context: ContextWgpuCore,
616 surface_id: wgc::id::SurfaceId,
617 error_sink: ErrorSink,
618}
619
620type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
621
622struct ErrorScope {
623 error: Option<crate::Error>,
624 filter: crate::ErrorFilter,
625}
626
627struct ErrorSinkRaw {
628 scopes: HashMap<thread_id::ThreadId, Vec<ErrorScope>>,
629 uncaptured_handler: Option<Arc<dyn crate::UncapturedErrorHandler>>,
630}
631
632impl ErrorSinkRaw {
633 fn new() -> ErrorSinkRaw {
634 ErrorSinkRaw {
635 scopes: HashMap::new(),
636 uncaptured_handler: None,
637 }
638 }
639
640 #[track_caller]
650 #[must_use]
651 fn handle_error_or_return_handler(&mut self, err: crate::Error) -> Option<impl FnOnce()> {
652 let filter = match err {
653 crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
654 crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
655 crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
656 };
657 let thread_id = thread_id::ThreadId::current();
658 let scopes = self.scopes.entry(thread_id).or_default();
659 match scopes.iter_mut().rev().find(|scope| scope.filter == filter) {
660 Some(scope) => {
661 if scope.error.is_none() {
662 scope.error = Some(err);
663 }
664 None
665 }
666 None => {
667 if let Some(custom_handler) = &self.uncaptured_handler {
668 let custom_handler = Arc::clone(custom_handler);
669 Some(move || (custom_handler)(err))
670 } else {
671 default_error_handler(err)
673 }
674 }
675 }
676 }
677}
678
679impl fmt::Debug for ErrorSinkRaw {
680 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
681 write!(f, "ErrorSink")
682 }
683}
684
685#[track_caller]
686fn default_error_handler(err: crate::Error) -> ! {
687 log::error!("Handling wgpu errors as fatal by default");
688 panic!("wgpu error: {err}\n");
689}
690
691impl From<CreateShaderModuleError> for CompilationInfo {
692 fn from(value: CreateShaderModuleError) -> Self {
693 match value {
694 #[cfg(feature = "wgsl")]
695 CreateShaderModuleError::Parsing(v) => v.into(),
696 #[cfg(feature = "glsl")]
697 CreateShaderModuleError::ParsingGlsl(v) => v.into(),
698 #[cfg(feature = "spirv")]
699 CreateShaderModuleError::ParsingSpirV(v) => v.into(),
700 CreateShaderModuleError::Validation(v) => v.into(),
701 CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
704 CompilationInfo {
705 messages: Vec::new(),
706 }
707 }
708 _ => CompilationInfo {
710 messages: vec![CompilationMessage {
711 message: value.to_string(),
712 message_type: CompilationMessageType::Error,
713 location: None,
714 }],
715 },
716 }
717 }
718}
719
720#[derive(Debug)]
721pub struct CoreQueueWriteBuffer {
722 buffer_id: wgc::id::StagingBufferId,
723 mapping: CoreBufferMappedRange,
724}
725
726#[derive(Debug)]
727pub struct CoreBufferMappedRange {
728 ptr: NonNull<u8>,
729 size: usize,
730}
731
732#[cfg(send_sync)]
733unsafe impl Send for CoreBufferMappedRange {}
734#[cfg(send_sync)]
735unsafe impl Sync for CoreBufferMappedRange {}
736
737impl Drop for CoreBufferMappedRange {
738 fn drop(&mut self) {
739 }
742}
743
744crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
745crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
746crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
747crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
748crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
749crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
750crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
751crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
752crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
753crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
754crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
755crate::cmp::impl_eq_ord_hash_proxy!(CoreExternalTexture => .id);
756crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
757crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
758crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
759crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
760crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
761crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
762crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
763crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
764crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
765crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
766crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
767crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
768crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
769crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
770crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
771crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
772crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
773
774impl dispatch::InstanceInterface for ContextWgpuCore {
775 fn new(desc: wgt::InstanceDescriptor) -> Self
776 where
777 Self: Sized,
778 {
779 Self(Arc::new(wgc::global::Global::new("wgpu", desc, None)))
780 }
781
782 unsafe fn create_surface(
783 &self,
784 target: crate::api::SurfaceTargetUnsafe,
785 ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
786 let id = match target {
787 SurfaceTargetUnsafe::RawHandle {
788 raw_display_handle,
789 raw_window_handle,
790 } => unsafe {
791 self.0
792 .instance_create_surface(raw_display_handle, raw_window_handle, None)
793 },
794
795 #[cfg(all(
796 unix,
797 not(target_vendor = "apple"),
798 not(target_family = "wasm"),
799 not(target_os = "netbsd")
800 ))]
801 SurfaceTargetUnsafe::Drm {
802 fd,
803 plane,
804 connector_id,
805 width,
806 height,
807 refresh_rate,
808 } => unsafe {
809 self.0.instance_create_surface_from_drm(
810 fd,
811 plane,
812 connector_id,
813 width,
814 height,
815 refresh_rate,
816 None,
817 )
818 },
819
820 #[cfg(metal)]
821 SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
822 self.0.instance_create_surface_metal(layer, None)
823 },
824
825 #[cfg(target_os = "netbsd")]
826 SurfaceTargetUnsafe::Drm { .. } => Err(
827 wgc::instance::CreateSurfaceError::BackendNotEnabled(wgt::Backend::Vulkan),
828 ),
829
830 #[cfg(dx12)]
831 SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
832 self.0.instance_create_surface_from_visual(visual, None)
833 },
834
835 #[cfg(dx12)]
836 SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
837 self.0
838 .instance_create_surface_from_surface_handle(surface_handle, None)
839 },
840
841 #[cfg(dx12)]
842 SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
843 self.0
844 .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
845 },
846 }?;
847
848 Ok(CoreSurface {
849 context: self.clone(),
850 id,
851 configured_device: Mutex::default(),
852 error_sink: Mutex::default(),
853 }
854 .into())
855 }
856
857 fn request_adapter(
858 &self,
859 options: &crate::api::RequestAdapterOptions<'_, '_>,
860 ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
861 let id = self.0.request_adapter(
862 &wgc::instance::RequestAdapterOptions {
863 power_preference: options.power_preference,
864 force_fallback_adapter: options.force_fallback_adapter,
865 compatible_surface: options
866 .compatible_surface
867 .map(|surface| surface.inner.as_core().id),
868 apply_limit_buckets: false,
869 },
870 wgt::Backends::all(),
871 None,
872 );
873 let adapter = id.map(|id| {
874 let core = CoreAdapter {
875 context: self.clone(),
876 id,
877 };
878 let generic: dispatch::DispatchAdapter = core.into();
879 generic
880 });
881 Box::pin(ready(adapter))
882 }
883
884 fn poll_all_devices(&self, force_wait: bool) -> bool {
885 match self.0.poll_all_devices(force_wait) {
886 Ok(all_queue_empty) => all_queue_empty,
887 Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
888 }
889 }
890
891 #[cfg(feature = "wgsl")]
892 fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
893 use wgc::naga::front::wgsl::ImplementedLanguageExtension;
894 ImplementedLanguageExtension::all().iter().copied().fold(
895 crate::WgslLanguageFeatures::empty(),
896 |acc, wle| {
897 acc | match wle {
898 ImplementedLanguageExtension::ReadOnlyAndReadWriteStorageTextures => {
899 crate::WgslLanguageFeatures::ReadOnlyAndReadWriteStorageTextures
900 }
901 ImplementedLanguageExtension::Packed4x8IntegerDotProduct => {
902 crate::WgslLanguageFeatures::Packed4x8IntegerDotProduct
903 }
904 ImplementedLanguageExtension::PointerCompositeAccess => {
905 crate::WgslLanguageFeatures::PointerCompositeAccess
906 }
907 }
908 },
909 )
910 }
911
912 fn enumerate_adapters(
913 &self,
914 backends: crate::Backends,
915 ) -> Pin<Box<dyn dispatch::EnumerateAdapterFuture>> {
916 let adapters: Vec<DispatchAdapter> = self
917 .enumerate_adapters(backends)
918 .into_iter()
919 .map(|adapter| {
920 let core = crate::backend::wgpu_core::CoreAdapter {
921 context: self.clone(),
922 id: adapter,
923 };
924 core.into()
925 })
926 .collect();
927 Box::pin(ready(adapters))
928 }
929}
930
931impl dispatch::AdapterInterface for CoreAdapter {
932 fn request_device(
933 &self,
934 desc: &crate::DeviceDescriptor<'_>,
935 ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
936 let res = self.context.0.adapter_request_device(
937 self.id,
938 &desc.map_label(|l| l.map(Borrowed)),
939 None,
940 None,
941 );
942 let (device_id, queue_id) = match res {
943 Ok(ids) => ids,
944 Err(err) => {
945 return Box::pin(ready(Err(err.into())));
946 }
947 };
948 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
949 let device = CoreDevice {
950 context: self.context.clone(),
951 id: device_id,
952 error_sink: error_sink.clone(),
953 features: desc.required_features,
954 };
955 let queue = CoreQueue {
956 context: self.context.clone(),
957 id: queue_id,
958 error_sink,
959 };
960 Box::pin(ready(Ok((device.into(), queue.into()))))
961 }
962
963 fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
964 let surface = surface.as_core();
965
966 self.context
967 .0
968 .adapter_is_surface_supported(self.id, surface.id)
969 }
970
971 fn features(&self) -> crate::Features {
972 self.context.0.adapter_features(self.id)
973 }
974
975 fn limits(&self) -> crate::Limits {
976 self.context.0.adapter_limits(self.id)
977 }
978
979 fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
980 self.context.0.adapter_downlevel_capabilities(self.id)
981 }
982
983 fn get_info(&self) -> crate::AdapterInfo {
984 self.context.0.adapter_get_info(self.id)
985 }
986
987 fn get_texture_format_features(
988 &self,
989 format: crate::TextureFormat,
990 ) -> crate::TextureFormatFeatures {
991 self.context
992 .0
993 .adapter_get_texture_format_features(self.id, format)
994 }
995
996 fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
997 self.context.0.adapter_get_presentation_timestamp(self.id)
998 }
999
1000 fn cooperative_matrix_properties(&self) -> Vec<crate::wgt::CooperativeMatrixProperties> {
1001 self.context
1002 .0
1003 .adapter_cooperative_matrix_properties(self.id)
1004 }
1005}
1006
1007impl Drop for CoreAdapter {
1008 fn drop(&mut self) {
1009 self.context.0.adapter_drop(self.id)
1010 }
1011}
1012
1013impl dispatch::DeviceInterface for CoreDevice {
1014 fn features(&self) -> crate::Features {
1015 self.context.0.device_features(self.id)
1016 }
1017
1018 fn limits(&self) -> crate::Limits {
1019 self.context.0.device_limits(self.id)
1020 }
1021
1022 fn adapter_info(&self) -> crate::AdapterInfo {
1023 self.context.0.device_adapter_info(self.id)
1024 }
1025
1026 #[cfg_attr(
1028 not(any(
1029 feature = "spirv",
1030 feature = "glsl",
1031 feature = "wgsl",
1032 feature = "naga-ir"
1033 )),
1034 expect(unused)
1035 )]
1036 fn create_shader_module(
1037 &self,
1038 desc: crate::ShaderModuleDescriptor<'_>,
1039 shader_bound_checks: wgt::ShaderRuntimeChecks,
1040 ) -> dispatch::DispatchShaderModule {
1041 let descriptor = wgc::pipeline::ShaderModuleDescriptor {
1042 label: desc.label.map(Borrowed),
1043 runtime_checks: shader_bound_checks,
1044 };
1045 let source = match desc.source {
1046 #[cfg(feature = "spirv")]
1047 ShaderSource::SpirV(ref spv) => {
1048 let options = naga::front::spv::Options {
1050 adjust_coordinate_space: false, strict_capabilities: true,
1052 block_ctx_dump_prefix: None,
1053 };
1054 wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
1055 }
1056 #[cfg(feature = "glsl")]
1057 ShaderSource::Glsl {
1058 ref shader,
1059 stage,
1060 defines,
1061 } => {
1062 let options = naga::front::glsl::Options {
1063 stage,
1064 defines: defines
1065 .iter()
1066 .map(|&(key, value)| (String::from(key), String::from(value)))
1067 .collect(),
1068 };
1069 wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
1070 }
1071 #[cfg(feature = "wgsl")]
1072 ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
1073 #[cfg(feature = "naga-ir")]
1074 ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1075 ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1076 };
1077 let (id, error) =
1078 self.context
1079 .0
1080 .device_create_shader_module(self.id, &descriptor, source, None);
1081 let compilation_info = match error {
1082 Some(cause) => {
1083 self.context.handle_error(
1084 &self.error_sink,
1085 cause.clone(),
1086 desc.label,
1087 "Device::create_shader_module",
1088 );
1089 CompilationInfo::from(cause)
1090 }
1091 None => CompilationInfo { messages: vec![] },
1092 };
1093
1094 CoreShaderModule {
1095 context: self.context.clone(),
1096 id,
1097 compilation_info,
1098 }
1099 .into()
1100 }
1101
1102 unsafe fn create_shader_module_passthrough(
1103 &self,
1104 desc: &crate::ShaderModuleDescriptorPassthrough<'_>,
1105 ) -> dispatch::DispatchShaderModule {
1106 let desc = desc.map_label(|l| l.map(Cow::from));
1107 let (id, error) = unsafe {
1108 self.context
1109 .0
1110 .device_create_shader_module_passthrough(self.id, &desc, None)
1111 };
1112
1113 let compilation_info = match error {
1114 Some(cause) => {
1115 self.context.handle_error(
1116 &self.error_sink,
1117 cause.clone(),
1118 desc.label.as_deref(),
1119 "Device::create_shader_module_passthrough",
1120 );
1121 CompilationInfo::from(cause)
1122 }
1123 None => CompilationInfo { messages: vec![] },
1124 };
1125
1126 CoreShaderModule {
1127 context: self.context.clone(),
1128 id,
1129 compilation_info,
1130 }
1131 .into()
1132 }
1133
1134 fn create_bind_group_layout(
1135 &self,
1136 desc: &crate::BindGroupLayoutDescriptor<'_>,
1137 ) -> dispatch::DispatchBindGroupLayout {
1138 let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1139 label: desc.label.map(Borrowed),
1140 entries: Borrowed(desc.entries),
1141 };
1142 let (id, error) =
1143 self.context
1144 .0
1145 .device_create_bind_group_layout(self.id, &descriptor, None);
1146 if let Some(cause) = error {
1147 self.context.handle_error(
1148 &self.error_sink,
1149 cause,
1150 desc.label,
1151 "Device::create_bind_group_layout",
1152 );
1153 }
1154 CoreBindGroupLayout {
1155 context: self.context.clone(),
1156 id,
1157 }
1158 .into()
1159 }
1160
1161 fn create_bind_group(
1162 &self,
1163 desc: &crate::BindGroupDescriptor<'_>,
1164 ) -> dispatch::DispatchBindGroup {
1165 use wgc::binding_model as bm;
1166
1167 let mut arrayed_texture_views = Vec::new();
1168 let mut arrayed_samplers = Vec::new();
1169 if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1170 for entry in desc.entries.iter() {
1172 if let BindingResource::TextureViewArray(array) = entry.resource {
1173 arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1174 }
1175 if let BindingResource::SamplerArray(array) = entry.resource {
1176 arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1177 }
1178 }
1179 }
1180 let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1181 let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1182
1183 let mut arrayed_buffer_bindings = Vec::new();
1184 if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1185 for entry in desc.entries.iter() {
1187 if let BindingResource::BufferArray(array) = entry.resource {
1188 arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1189 buffer: binding.buffer.inner.as_core().id,
1190 offset: binding.offset,
1191 size: binding.size.map(wgt::BufferSize::get),
1192 }));
1193 }
1194 }
1195 }
1196 let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1197
1198 let mut arrayed_acceleration_structures = Vec::new();
1199 if self
1200 .features
1201 .contains(Features::ACCELERATION_STRUCTURE_BINDING_ARRAY)
1202 {
1203 for entry in desc.entries.iter() {
1205 if let BindingResource::AccelerationStructureArray(array) = entry.resource {
1206 arrayed_acceleration_structures
1207 .extend(array.iter().map(|tlas| tlas.inner.as_core().id));
1208 }
1209 }
1210 }
1211 let mut remaining_arrayed_acceleration_structures = &arrayed_acceleration_structures[..];
1212
1213 let entries = desc
1214 .entries
1215 .iter()
1216 .map(|entry| bm::BindGroupEntry {
1217 binding: entry.binding,
1218 resource: match entry.resource {
1219 BindingResource::Buffer(BufferBinding {
1220 buffer,
1221 offset,
1222 size,
1223 }) => bm::BindingResource::Buffer(bm::BufferBinding {
1224 buffer: buffer.inner.as_core().id,
1225 offset,
1226 size: size.map(wgt::BufferSize::get),
1227 }),
1228 BindingResource::BufferArray(array) => {
1229 let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1230 remaining_arrayed_buffer_bindings =
1231 &remaining_arrayed_buffer_bindings[array.len()..];
1232 bm::BindingResource::BufferArray(Borrowed(slice))
1233 }
1234 BindingResource::Sampler(sampler) => {
1235 bm::BindingResource::Sampler(sampler.inner.as_core().id)
1236 }
1237 BindingResource::SamplerArray(array) => {
1238 let slice = &remaining_arrayed_samplers[..array.len()];
1239 remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1240 bm::BindingResource::SamplerArray(Borrowed(slice))
1241 }
1242 BindingResource::TextureView(texture_view) => {
1243 bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1244 }
1245 BindingResource::TextureViewArray(array) => {
1246 let slice = &remaining_arrayed_texture_views[..array.len()];
1247 remaining_arrayed_texture_views =
1248 &remaining_arrayed_texture_views[array.len()..];
1249 bm::BindingResource::TextureViewArray(Borrowed(slice))
1250 }
1251 BindingResource::AccelerationStructure(acceleration_structure) => {
1252 bm::BindingResource::AccelerationStructure(
1253 acceleration_structure.inner.as_core().id,
1254 )
1255 }
1256 BindingResource::AccelerationStructureArray(array) => {
1257 let slice = &remaining_arrayed_acceleration_structures[..array.len()];
1258 remaining_arrayed_acceleration_structures =
1259 &remaining_arrayed_acceleration_structures[array.len()..];
1260 bm::BindingResource::AccelerationStructureArray(Borrowed(slice))
1261 }
1262 BindingResource::ExternalTexture(external_texture) => {
1263 bm::BindingResource::ExternalTexture(external_texture.inner.as_core().id)
1264 }
1265 },
1266 })
1267 .collect::<Vec<_>>();
1268 let descriptor = bm::BindGroupDescriptor {
1269 label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1270 layout: desc.layout.inner.as_core().id,
1271 entries: Borrowed(&entries),
1272 };
1273
1274 let (id, error) = self
1275 .context
1276 .0
1277 .device_create_bind_group(self.id, &descriptor, None);
1278 if let Some(cause) = error {
1279 self.context.handle_error(
1280 &self.error_sink,
1281 cause,
1282 desc.label,
1283 "Device::create_bind_group",
1284 );
1285 }
1286 CoreBindGroup {
1287 context: self.context.clone(),
1288 id,
1289 }
1290 .into()
1291 }
1292
1293 fn create_pipeline_layout(
1294 &self,
1295 desc: &crate::PipelineLayoutDescriptor<'_>,
1296 ) -> dispatch::DispatchPipelineLayout {
1297 assert!(
1300 desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1301 "Bind group layout count {} exceeds device bind group limit {}",
1302 desc.bind_group_layouts.len(),
1303 wgc::MAX_BIND_GROUPS
1304 );
1305
1306 let temp_layouts = desc
1307 .bind_group_layouts
1308 .iter()
1309 .map(|bgl| bgl.map(|bgl| bgl.inner.as_core().id))
1310 .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1311 let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1312 label: desc.label.map(Borrowed),
1313 bind_group_layouts: Borrowed(&temp_layouts),
1314 immediate_size: desc.immediate_size,
1315 };
1316
1317 let (id, error) = self
1318 .context
1319 .0
1320 .device_create_pipeline_layout(self.id, &descriptor, None);
1321 if let Some(cause) = error {
1322 self.context.handle_error(
1323 &self.error_sink,
1324 cause,
1325 desc.label,
1326 "Device::create_pipeline_layout",
1327 );
1328 }
1329 CorePipelineLayout {
1330 context: self.context.clone(),
1331 id,
1332 }
1333 .into()
1334 }
1335
1336 fn create_render_pipeline(
1337 &self,
1338 desc: &crate::RenderPipelineDescriptor<'_>,
1339 ) -> dispatch::DispatchRenderPipeline {
1340 use wgc::pipeline as pipe;
1341
1342 let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1343 .vertex
1344 .buffers
1345 .iter()
1346 .map(|vbuf| pipe::VertexBufferLayout {
1347 array_stride: vbuf.array_stride,
1348 step_mode: vbuf.step_mode,
1349 attributes: Borrowed(vbuf.attributes),
1350 })
1351 .collect();
1352
1353 let vert_constants = desc
1354 .vertex
1355 .compilation_options
1356 .constants
1357 .iter()
1358 .map(|&(key, value)| (String::from(key), value))
1359 .collect();
1360
1361 let descriptor = pipe::RenderPipelineDescriptor {
1362 label: desc.label.map(Borrowed),
1363 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1364 vertex: pipe::VertexState {
1365 stage: pipe::ProgrammableStageDescriptor {
1366 module: desc.vertex.module.inner.as_core().id,
1367 entry_point: desc.vertex.entry_point.map(Borrowed),
1368 constants: vert_constants,
1369 zero_initialize_workgroup_memory: desc
1370 .vertex
1371 .compilation_options
1372 .zero_initialize_workgroup_memory,
1373 },
1374 buffers: Borrowed(&vertex_buffers),
1375 },
1376 primitive: desc.primitive,
1377 depth_stencil: desc.depth_stencil.clone(),
1378 multisample: desc.multisample,
1379 fragment: desc.fragment.as_ref().map(|frag| {
1380 let frag_constants = frag
1381 .compilation_options
1382 .constants
1383 .iter()
1384 .map(|&(key, value)| (String::from(key), value))
1385 .collect();
1386 pipe::FragmentState {
1387 stage: pipe::ProgrammableStageDescriptor {
1388 module: frag.module.inner.as_core().id,
1389 entry_point: frag.entry_point.map(Borrowed),
1390 constants: frag_constants,
1391 zero_initialize_workgroup_memory: frag
1392 .compilation_options
1393 .zero_initialize_workgroup_memory,
1394 },
1395 targets: Borrowed(frag.targets),
1396 }
1397 }),
1398 multiview_mask: desc.multiview_mask,
1399 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1400 };
1401
1402 let (id, error) = self
1403 .context
1404 .0
1405 .device_create_render_pipeline(self.id, &descriptor, None);
1406 if let Some(cause) = error {
1407 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1408 log::error!("Shader translation error for stage {stage:?}: {error}");
1409 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1410 }
1411 self.context.handle_error(
1412 &self.error_sink,
1413 cause,
1414 desc.label,
1415 "Device::create_render_pipeline",
1416 );
1417 }
1418 CoreRenderPipeline {
1419 context: self.context.clone(),
1420 id,
1421 error_sink: Arc::clone(&self.error_sink),
1422 }
1423 .into()
1424 }
1425
1426 fn create_mesh_pipeline(
1427 &self,
1428 desc: &crate::MeshPipelineDescriptor<'_>,
1429 ) -> dispatch::DispatchRenderPipeline {
1430 use wgc::pipeline as pipe;
1431
1432 let mesh_constants = desc
1433 .mesh
1434 .compilation_options
1435 .constants
1436 .iter()
1437 .map(|&(key, value)| (String::from(key), value))
1438 .collect();
1439 let descriptor = pipe::MeshPipelineDescriptor {
1440 label: desc.label.map(Borrowed),
1441 task: desc.task.as_ref().map(|task| {
1442 let task_constants = task
1443 .compilation_options
1444 .constants
1445 .iter()
1446 .map(|&(key, value)| (String::from(key), value))
1447 .collect();
1448 pipe::TaskState {
1449 stage: pipe::ProgrammableStageDescriptor {
1450 module: task.module.inner.as_core().id,
1451 entry_point: task.entry_point.map(Borrowed),
1452 constants: task_constants,
1453 zero_initialize_workgroup_memory: desc
1454 .mesh
1455 .compilation_options
1456 .zero_initialize_workgroup_memory,
1457 },
1458 }
1459 }),
1460 mesh: pipe::MeshState {
1461 stage: pipe::ProgrammableStageDescriptor {
1462 module: desc.mesh.module.inner.as_core().id,
1463 entry_point: desc.mesh.entry_point.map(Borrowed),
1464 constants: mesh_constants,
1465 zero_initialize_workgroup_memory: desc
1466 .mesh
1467 .compilation_options
1468 .zero_initialize_workgroup_memory,
1469 },
1470 },
1471 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1472 primitive: desc.primitive,
1473 depth_stencil: desc.depth_stencil.clone(),
1474 multisample: desc.multisample,
1475 fragment: desc.fragment.as_ref().map(|frag| {
1476 let frag_constants = frag
1477 .compilation_options
1478 .constants
1479 .iter()
1480 .map(|&(key, value)| (String::from(key), value))
1481 .collect();
1482 pipe::FragmentState {
1483 stage: pipe::ProgrammableStageDescriptor {
1484 module: frag.module.inner.as_core().id,
1485 entry_point: frag.entry_point.map(Borrowed),
1486 constants: frag_constants,
1487 zero_initialize_workgroup_memory: frag
1488 .compilation_options
1489 .zero_initialize_workgroup_memory,
1490 },
1491 targets: Borrowed(frag.targets),
1492 }
1493 }),
1494 multiview: desc.multiview,
1495 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1496 };
1497
1498 let (id, error) = self
1499 .context
1500 .0
1501 .device_create_mesh_pipeline(self.id, &descriptor, None);
1502 if let Some(cause) = error {
1503 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1504 log::error!("Shader translation error for stage {stage:?}: {error}");
1505 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1506 }
1507 self.context.handle_error(
1508 &self.error_sink,
1509 cause,
1510 desc.label,
1511 "Device::create_render_pipeline",
1512 );
1513 }
1514 CoreRenderPipeline {
1515 context: self.context.clone(),
1516 id,
1517 error_sink: Arc::clone(&self.error_sink),
1518 }
1519 .into()
1520 }
1521
1522 fn create_compute_pipeline(
1523 &self,
1524 desc: &crate::ComputePipelineDescriptor<'_>,
1525 ) -> dispatch::DispatchComputePipeline {
1526 use wgc::pipeline as pipe;
1527
1528 let constants = desc
1529 .compilation_options
1530 .constants
1531 .iter()
1532 .map(|&(key, value)| (String::from(key), value))
1533 .collect();
1534
1535 let descriptor = pipe::ComputePipelineDescriptor {
1536 label: desc.label.map(Borrowed),
1537 layout: desc.layout.map(|pll| pll.inner.as_core().id),
1538 stage: pipe::ProgrammableStageDescriptor {
1539 module: desc.module.inner.as_core().id,
1540 entry_point: desc.entry_point.map(Borrowed),
1541 constants,
1542 zero_initialize_workgroup_memory: desc
1543 .compilation_options
1544 .zero_initialize_workgroup_memory,
1545 },
1546 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1547 };
1548
1549 let (id, error) = self
1550 .context
1551 .0
1552 .device_create_compute_pipeline(self.id, &descriptor, None);
1553 if let Some(cause) = error {
1554 if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1555 log::error!(
1556 "Shader translation error for stage {:?}: {}",
1557 wgt::ShaderStages::COMPUTE,
1558 error
1559 );
1560 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1561 }
1562 self.context.handle_error(
1563 &self.error_sink,
1564 cause,
1565 desc.label,
1566 "Device::create_compute_pipeline",
1567 );
1568 }
1569 CoreComputePipeline {
1570 context: self.context.clone(),
1571 id,
1572 error_sink: Arc::clone(&self.error_sink),
1573 }
1574 .into()
1575 }
1576
1577 unsafe fn create_pipeline_cache(
1578 &self,
1579 desc: &crate::PipelineCacheDescriptor<'_>,
1580 ) -> dispatch::DispatchPipelineCache {
1581 use wgc::pipeline as pipe;
1582
1583 let descriptor = pipe::PipelineCacheDescriptor {
1584 label: desc.label.map(Borrowed),
1585 data: desc.data.map(Borrowed),
1586 fallback: desc.fallback,
1587 };
1588 let (id, error) = unsafe {
1589 self.context
1590 .0
1591 .device_create_pipeline_cache(self.id, &descriptor, None)
1592 };
1593 if let Some(cause) = error {
1594 self.context.handle_error(
1595 &self.error_sink,
1596 cause,
1597 desc.label,
1598 "Device::device_create_pipeline_cache_init",
1599 );
1600 }
1601 CorePipelineCache {
1602 context: self.context.clone(),
1603 id,
1604 }
1605 .into()
1606 }
1607
1608 fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1609 let (id, error) = self.context.0.device_create_buffer(
1610 self.id,
1611 &desc.map_label(|l| l.map(Borrowed)),
1612 None,
1613 );
1614 if let Some(cause) = error {
1615 self.context
1616 .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1617 }
1618
1619 CoreBuffer {
1620 context: self.context.clone(),
1621 id,
1622 error_sink: Arc::clone(&self.error_sink),
1623 }
1624 .into()
1625 }
1626
1627 fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1628 let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1629 let (id, error) = self
1630 .context
1631 .0
1632 .device_create_texture(self.id, &wgt_desc, None);
1633 if let Some(cause) = error {
1634 self.context.handle_error(
1635 &self.error_sink,
1636 cause,
1637 desc.label,
1638 "Device::create_texture",
1639 );
1640 }
1641
1642 CoreTexture {
1643 context: self.context.clone(),
1644 id,
1645 error_sink: Arc::clone(&self.error_sink),
1646 }
1647 .into()
1648 }
1649
1650 fn create_external_texture(
1651 &self,
1652 desc: &crate::ExternalTextureDescriptor<'_>,
1653 planes: &[&crate::TextureView],
1654 ) -> dispatch::DispatchExternalTexture {
1655 let wgt_desc = desc.map_label(|l| l.map(Borrowed));
1656 let planes = planes
1657 .iter()
1658 .map(|plane| plane.inner.as_core().id)
1659 .collect::<Vec<_>>();
1660 let (id, error) = self
1661 .context
1662 .0
1663 .device_create_external_texture(self.id, &wgt_desc, &planes, None);
1664 if let Some(cause) = error {
1665 self.context.handle_error(
1666 &self.error_sink,
1667 cause,
1668 desc.label,
1669 "Device::create_external_texture",
1670 );
1671 }
1672
1673 CoreExternalTexture {
1674 context: self.context.clone(),
1675 id,
1676 }
1677 .into()
1678 }
1679
1680 fn create_blas(
1681 &self,
1682 desc: &crate::CreateBlasDescriptor<'_>,
1683 sizes: crate::BlasGeometrySizeDescriptors,
1684 ) -> (Option<u64>, dispatch::DispatchBlas) {
1685 let global = &self.context.0;
1686 let (id, handle, error) =
1687 global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1688 if let Some(cause) = error {
1689 self.context
1690 .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1691 }
1692 (
1693 handle,
1694 CoreBlas {
1695 context: self.context.clone(),
1696 id,
1697 error_sink: Arc::clone(&self.error_sink),
1698 }
1699 .into(),
1700 )
1701 }
1702
1703 fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1704 let global = &self.context.0;
1705 let (id, error) =
1706 global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1707 if let Some(cause) = error {
1708 self.context
1709 .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1710 }
1711 CoreTlas {
1712 context: self.context.clone(),
1713 id,
1714 }
1716 .into()
1717 }
1718
1719 fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1720 let descriptor = wgc::resource::SamplerDescriptor {
1721 label: desc.label.map(Borrowed),
1722 address_modes: [
1723 desc.address_mode_u,
1724 desc.address_mode_v,
1725 desc.address_mode_w,
1726 ],
1727 mag_filter: desc.mag_filter,
1728 min_filter: desc.min_filter,
1729 mipmap_filter: desc.mipmap_filter,
1730 lod_min_clamp: desc.lod_min_clamp,
1731 lod_max_clamp: desc.lod_max_clamp,
1732 compare: desc.compare,
1733 anisotropy_clamp: desc.anisotropy_clamp,
1734 border_color: desc.border_color,
1735 };
1736
1737 let (id, error) = self
1738 .context
1739 .0
1740 .device_create_sampler(self.id, &descriptor, None);
1741 if let Some(cause) = error {
1742 self.context.handle_error(
1743 &self.error_sink,
1744 cause,
1745 desc.label,
1746 "Device::create_sampler",
1747 );
1748 }
1749 CoreSampler {
1750 context: self.context.clone(),
1751 id,
1752 }
1753 .into()
1754 }
1755
1756 fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1757 let (id, error) = self.context.0.device_create_query_set(
1758 self.id,
1759 &desc.map_label(|l| l.map(Borrowed)),
1760 None,
1761 );
1762 if let Some(cause) = error {
1763 self.context
1764 .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1765 }
1766 CoreQuerySet {
1767 context: self.context.clone(),
1768 id,
1769 }
1770 .into()
1771 }
1772
1773 fn create_command_encoder(
1774 &self,
1775 desc: &crate::CommandEncoderDescriptor<'_>,
1776 ) -> dispatch::DispatchCommandEncoder {
1777 let (id, error) = self.context.0.device_create_command_encoder(
1778 self.id,
1779 &desc.map_label(|l| l.map(Borrowed)),
1780 None,
1781 );
1782 if let Some(cause) = error {
1783 self.context.handle_error(
1784 &self.error_sink,
1785 cause,
1786 desc.label,
1787 "Device::create_command_encoder",
1788 );
1789 }
1790
1791 CoreCommandEncoder {
1792 context: self.context.clone(),
1793 id,
1794 error_sink: Arc::clone(&self.error_sink),
1795 }
1796 .into()
1797 }
1798
1799 fn create_render_bundle_encoder(
1800 &self,
1801 desc: &crate::RenderBundleEncoderDescriptor<'_>,
1802 ) -> dispatch::DispatchRenderBundleEncoder {
1803 let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1804 label: desc.label.map(Borrowed),
1805 color_formats: Borrowed(desc.color_formats),
1806 depth_stencil: desc.depth_stencil,
1807 sample_count: desc.sample_count,
1808 multiview: desc.multiview,
1809 };
1810 let encoder = match wgc::command::RenderBundleEncoder::new(&descriptor, self.id) {
1811 Ok(encoder) => encoder,
1812 Err(e) => panic!("Error in Device::create_render_bundle_encoder: {e}"),
1813 };
1814
1815 CoreRenderBundleEncoder {
1816 context: self.context.clone(),
1817 encoder,
1818 id: crate::cmp::Identifier::create(),
1819 }
1820 .into()
1821 }
1822
1823 fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1824 self.context
1825 .0
1826 .device_set_device_lost_closure(self.id, device_lost_callback);
1827 }
1828
1829 fn on_uncaptured_error(&self, handler: Arc<dyn crate::UncapturedErrorHandler>) {
1830 let mut error_sink = self.error_sink.lock();
1831 error_sink.uncaptured_handler = Some(handler);
1832 }
1833
1834 fn push_error_scope(&self, filter: crate::ErrorFilter) -> u32 {
1835 let mut error_sink = self.error_sink.lock();
1836 let thread_id = thread_id::ThreadId::current();
1837 let scopes = error_sink.scopes.entry(thread_id).or_default();
1838 let index = scopes
1839 .len()
1840 .try_into()
1841 .expect("Greater than 2^32 nested error scopes");
1842 scopes.push(ErrorScope {
1843 error: None,
1844 filter,
1845 });
1846 index
1847 }
1848
1849 fn pop_error_scope(&self, index: u32) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1850 let mut error_sink = self.error_sink.lock();
1851
1852 let is_panicking = crate::util::is_panicking();
1855 let thread_id = thread_id::ThreadId::current();
1856 let err = "Mismatched pop_error_scope call: no error scope for this thread. Error scopes are thread-local.";
1857 let scopes = match error_sink.scopes.get_mut(&thread_id) {
1858 Some(s) => s,
1859 None => {
1860 if !is_panicking {
1861 panic!("{err}");
1862 } else {
1863 return Box::pin(ready(None));
1864 }
1865 }
1866 };
1867 if scopes.is_empty() && !is_panicking {
1868 panic!("{err}");
1869 }
1870 if index as usize != scopes.len() - 1 && !is_panicking {
1871 panic!(
1872 "Mismatched pop_error_scope call: error scopes must be popped in reverse order."
1873 );
1874 }
1875
1876 let scope = match scopes.pop() {
1881 Some(s) => s,
1882 None if !is_panicking => unreachable!(),
1883 None => return Box::pin(ready(None)),
1884 };
1885
1886 Box::pin(ready(scope.error))
1887 }
1888
1889 unsafe fn start_graphics_debugger_capture(&self) {
1890 unsafe {
1891 self.context
1892 .0
1893 .device_start_graphics_debugger_capture(self.id)
1894 };
1895 }
1896
1897 unsafe fn stop_graphics_debugger_capture(&self) {
1898 unsafe {
1899 self.context
1900 .0
1901 .device_stop_graphics_debugger_capture(self.id)
1902 };
1903 }
1904
1905 fn poll(&self, poll_type: wgt::PollType<u64>) -> Result<crate::PollStatus, crate::PollError> {
1906 match self.context.0.device_poll(self.id, poll_type) {
1907 Ok(status) => Ok(status),
1908 Err(err) => {
1909 if let Some(poll_error) = err.to_poll_error() {
1910 return Err(poll_error);
1911 }
1912
1913 self.context.handle_error_fatal(err, "Device::poll")
1914 }
1915 }
1916 }
1917
1918 fn get_internal_counters(&self) -> crate::InternalCounters {
1919 self.context.0.device_get_internal_counters(self.id)
1920 }
1921
1922 fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1923 self.context.0.device_generate_allocator_report(self.id)
1924 }
1925
1926 fn destroy(&self) {
1927 self.context.0.device_destroy(self.id);
1928 }
1929}
1930
1931impl Drop for CoreDevice {
1932 fn drop(&mut self) {
1933 self.context.0.device_drop(self.id)
1934 }
1935}
1936
1937impl dispatch::QueueInterface for CoreQueue {
1938 fn write_buffer(
1939 &self,
1940 buffer: &dispatch::DispatchBuffer,
1941 offset: crate::BufferAddress,
1942 data: &[u8],
1943 ) {
1944 let buffer = buffer.as_core();
1945
1946 match self
1947 .context
1948 .0
1949 .queue_write_buffer(self.id, buffer.id, offset, data)
1950 {
1951 Ok(()) => (),
1952 Err(err) => {
1953 self.context
1954 .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1955 }
1956 }
1957 }
1958
1959 fn create_staging_buffer(
1960 &self,
1961 size: crate::BufferSize,
1962 ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1963 match self
1964 .context
1965 .0
1966 .queue_create_staging_buffer(self.id, size, None)
1967 {
1968 Ok((buffer_id, ptr)) => Some(
1969 CoreQueueWriteBuffer {
1970 buffer_id,
1971 mapping: CoreBufferMappedRange {
1972 ptr,
1973 size: size.get() as usize,
1974 },
1975 }
1976 .into(),
1977 ),
1978 Err(err) => {
1979 self.context.handle_error_nolabel(
1980 &self.error_sink,
1981 err,
1982 "Queue::write_buffer_with",
1983 );
1984 None
1985 }
1986 }
1987 }
1988
1989 fn validate_write_buffer(
1990 &self,
1991 buffer: &dispatch::DispatchBuffer,
1992 offset: wgt::BufferAddress,
1993 size: wgt::BufferSize,
1994 ) -> Option<()> {
1995 let buffer = buffer.as_core();
1996
1997 match self
1998 .context
1999 .0
2000 .queue_validate_write_buffer(self.id, buffer.id, offset, size)
2001 {
2002 Ok(()) => Some(()),
2003 Err(err) => {
2004 self.context.handle_error_nolabel(
2005 &self.error_sink,
2006 err,
2007 "Queue::write_buffer_with",
2008 );
2009 None
2010 }
2011 }
2012 }
2013
2014 fn write_staging_buffer(
2015 &self,
2016 buffer: &dispatch::DispatchBuffer,
2017 offset: crate::BufferAddress,
2018 staging_buffer: &dispatch::DispatchQueueWriteBuffer,
2019 ) {
2020 let buffer = buffer.as_core();
2021 let staging_buffer = staging_buffer.as_core();
2022
2023 match self.context.0.queue_write_staging_buffer(
2024 self.id,
2025 buffer.id,
2026 offset,
2027 staging_buffer.buffer_id,
2028 ) {
2029 Ok(()) => (),
2030 Err(err) => {
2031 self.context.handle_error_nolabel(
2032 &self.error_sink,
2033 err,
2034 "Queue::write_buffer_with",
2035 );
2036 }
2037 }
2038 }
2039
2040 fn write_texture(
2041 &self,
2042 texture: crate::TexelCopyTextureInfo<'_>,
2043 data: &[u8],
2044 data_layout: crate::TexelCopyBufferLayout,
2045 size: crate::Extent3d,
2046 ) {
2047 match self.context.0.queue_write_texture(
2048 self.id,
2049 &map_texture_copy_view(texture),
2050 data,
2051 &data_layout,
2052 &size,
2053 ) {
2054 Ok(()) => (),
2055 Err(err) => {
2056 self.context
2057 .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
2058 }
2059 }
2060 }
2061
2062 #[cfg(web)]
2065 #[cfg_attr(not(webgl), expect(unused_variables))]
2066 fn copy_external_image_to_texture(
2067 &self,
2068 source: &crate::CopyExternalImageSourceInfo,
2069 dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
2070 size: crate::Extent3d,
2071 ) {
2072 #[cfg(webgl)]
2073 match self.context.0.queue_copy_external_image_to_texture(
2074 self.id,
2075 source,
2076 map_texture_tagged_copy_view(dest),
2077 size,
2078 ) {
2079 Ok(()) => (),
2080 Err(err) => self.context.handle_error_nolabel(
2081 &self.error_sink,
2082 err,
2083 "Queue::copy_external_image_to_texture",
2084 ),
2085 }
2086 }
2087
2088 fn submit(
2089 &self,
2090 command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
2091 ) -> u64 {
2092 let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
2093 let command_buffer_ids = temp_command_buffers
2094 .iter()
2095 .map(|cmdbuf| cmdbuf.as_core().id)
2096 .collect::<SmallVec<[_; 4]>>();
2097
2098 let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
2099 Ok(index) => index,
2100 Err((index, err)) => {
2101 self.context
2102 .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
2103 index
2104 }
2105 };
2106
2107 drop(temp_command_buffers);
2108
2109 index
2110 }
2111
2112 fn get_timestamp_period(&self) -> f32 {
2113 self.context.0.queue_get_timestamp_period(self.id)
2114 }
2115
2116 fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
2117 self.context
2118 .0
2119 .queue_on_submitted_work_done(self.id, callback);
2120 }
2121
2122 fn compact_blas(&self, blas: &dispatch::DispatchBlas) -> (Option<u64>, dispatch::DispatchBlas) {
2123 let (id, handle, error) =
2124 self.context
2125 .0
2126 .queue_compact_blas(self.id, blas.as_core().id, None);
2127
2128 if let Some(cause) = error {
2129 self.context
2130 .handle_error_nolabel(&self.error_sink, cause, "Queue::compact_blas");
2131 }
2132 (
2133 handle,
2134 CoreBlas {
2135 context: self.context.clone(),
2136 id,
2137 error_sink: Arc::clone(&self.error_sink),
2138 }
2139 .into(),
2140 )
2141 }
2142}
2143
2144impl Drop for CoreQueue {
2145 fn drop(&mut self) {
2146 self.context.0.queue_drop(self.id)
2147 }
2148}
2149
2150impl dispatch::ShaderModuleInterface for CoreShaderModule {
2151 fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
2152 Box::pin(ready(self.compilation_info.clone()))
2153 }
2154}
2155
2156impl Drop for CoreShaderModule {
2157 fn drop(&mut self) {
2158 self.context.0.shader_module_drop(self.id)
2159 }
2160}
2161
2162impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
2163
2164impl Drop for CoreBindGroupLayout {
2165 fn drop(&mut self) {
2166 self.context.0.bind_group_layout_drop(self.id)
2167 }
2168}
2169
2170impl dispatch::BindGroupInterface for CoreBindGroup {}
2171
2172impl Drop for CoreBindGroup {
2173 fn drop(&mut self) {
2174 self.context.0.bind_group_drop(self.id)
2175 }
2176}
2177
2178impl dispatch::TextureViewInterface for CoreTextureView {}
2179
2180impl Drop for CoreTextureView {
2181 fn drop(&mut self) {
2182 self.context.0.texture_view_drop(self.id);
2183 }
2184}
2185
2186impl dispatch::ExternalTextureInterface for CoreExternalTexture {
2187 fn destroy(&self) {
2188 self.context.0.external_texture_destroy(self.id);
2189 }
2190}
2191
2192impl Drop for CoreExternalTexture {
2193 fn drop(&mut self) {
2194 self.context.0.external_texture_drop(self.id);
2195 }
2196}
2197
2198impl dispatch::SamplerInterface for CoreSampler {}
2199
2200impl Drop for CoreSampler {
2201 fn drop(&mut self) {
2202 self.context.0.sampler_drop(self.id)
2203 }
2204}
2205
2206impl dispatch::BufferInterface for CoreBuffer {
2207 fn map_async(
2208 &self,
2209 mode: crate::MapMode,
2210 range: Range<crate::BufferAddress>,
2211 callback: dispatch::BufferMapCallback,
2212 ) {
2213 let operation = wgc::resource::BufferMapOperation {
2214 host: match mode {
2215 MapMode::Read => wgc::device::HostMap::Read,
2216 MapMode::Write => wgc::device::HostMap::Write,
2217 },
2218 callback: Some(Box::new(|status| {
2219 let res = status.map_err(|_| crate::BufferAsyncError);
2220 callback(res);
2221 })),
2222 };
2223
2224 match self.context.0.buffer_map_async(
2225 self.id,
2226 range.start,
2227 Some(range.end - range.start),
2228 operation,
2229 ) {
2230 Ok(_) => (),
2231 Err(cause) => {
2232 self.context
2233 .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
2234 }
2235 }
2236 }
2237
2238 fn get_mapped_range(
2239 &self,
2240 sub_range: Range<crate::BufferAddress>,
2241 ) -> dispatch::DispatchBufferMappedRange {
2242 let size = sub_range.end - sub_range.start;
2243 match self
2244 .context
2245 .0
2246 .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
2247 {
2248 Ok((ptr, size)) => CoreBufferMappedRange {
2249 ptr,
2250 size: size as usize,
2251 }
2252 .into(),
2253 Err(err) => self
2254 .context
2255 .handle_error_fatal(err, "Buffer::get_mapped_range"),
2256 }
2257 }
2258
2259 fn unmap(&self) {
2260 match self.context.0.buffer_unmap(self.id) {
2261 Ok(()) => (),
2262 Err(cause) => {
2263 self.context
2264 .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
2265 }
2266 }
2267 }
2268
2269 fn destroy(&self) {
2270 self.context.0.buffer_destroy(self.id);
2271 }
2272}
2273
2274impl Drop for CoreBuffer {
2275 fn drop(&mut self) {
2276 self.context.0.buffer_drop(self.id)
2277 }
2278}
2279
2280impl dispatch::TextureInterface for CoreTexture {
2281 fn create_view(
2282 &self,
2283 desc: &crate::TextureViewDescriptor<'_>,
2284 ) -> dispatch::DispatchTextureView {
2285 let descriptor = wgc::resource::TextureViewDescriptor {
2286 label: desc.label.map(Borrowed),
2287 format: desc.format,
2288 dimension: desc.dimension,
2289 usage: desc.usage,
2290 range: wgt::ImageSubresourceRange {
2291 aspect: desc.aspect,
2292 base_mip_level: desc.base_mip_level,
2293 mip_level_count: desc.mip_level_count,
2294 base_array_layer: desc.base_array_layer,
2295 array_layer_count: desc.array_layer_count,
2296 },
2297 };
2298 let (id, error) = self
2299 .context
2300 .0
2301 .texture_create_view(self.id, &descriptor, None);
2302 if let Some(cause) = error {
2303 self.context
2304 .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
2305 }
2306 CoreTextureView {
2307 context: self.context.clone(),
2308 id,
2309 }
2310 .into()
2311 }
2312
2313 fn destroy(&self) {
2314 self.context.0.texture_destroy(self.id);
2315 }
2316}
2317
2318impl Drop for CoreTexture {
2319 fn drop(&mut self) {
2320 self.context.0.texture_drop(self.id)
2321 }
2322}
2323
2324impl dispatch::BlasInterface for CoreBlas {
2325 fn prepare_compact_async(&self, callback: BlasCompactCallback) {
2326 let callback: Option<wgc::resource::BlasCompactCallback> =
2327 Some(Box::new(|status: BlasPrepareCompactResult| {
2328 let res = status.map_err(|_| crate::BlasAsyncError);
2329 callback(res);
2330 }));
2331
2332 match self.context.0.blas_prepare_compact_async(self.id, callback) {
2333 Ok(_) => (),
2334 Err(cause) => self.context.handle_error_nolabel(
2335 &self.error_sink,
2336 cause,
2337 "Blas::prepare_compact_async",
2338 ),
2339 }
2340 }
2341
2342 fn ready_for_compaction(&self) -> bool {
2343 match self.context.0.ready_for_compaction(self.id) {
2344 Ok(ready) => ready,
2345 Err(cause) => {
2346 self.context.handle_error_nolabel(
2347 &self.error_sink,
2348 cause,
2349 "Blas::ready_for_compaction",
2350 );
2351 false
2353 }
2354 }
2355 }
2356}
2357
2358impl Drop for CoreBlas {
2359 fn drop(&mut self) {
2360 self.context.0.blas_drop(self.id)
2361 }
2362}
2363
2364impl dispatch::TlasInterface for CoreTlas {}
2365
2366impl Drop for CoreTlas {
2367 fn drop(&mut self) {
2368 self.context.0.tlas_drop(self.id)
2369 }
2370}
2371
2372impl dispatch::QuerySetInterface for CoreQuerySet {}
2373
2374impl Drop for CoreQuerySet {
2375 fn drop(&mut self) {
2376 self.context.0.query_set_drop(self.id)
2377 }
2378}
2379
2380impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2381
2382impl Drop for CorePipelineLayout {
2383 fn drop(&mut self) {
2384 self.context.0.pipeline_layout_drop(self.id)
2385 }
2386}
2387
2388impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2389 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2390 let (id, error) = self
2391 .context
2392 .0
2393 .render_pipeline_get_bind_group_layout(self.id, index, None);
2394 if let Some(err) = error {
2395 self.context.handle_error_nolabel(
2396 &self.error_sink,
2397 err,
2398 "RenderPipeline::get_bind_group_layout",
2399 )
2400 }
2401 CoreBindGroupLayout {
2402 context: self.context.clone(),
2403 id,
2404 }
2405 .into()
2406 }
2407}
2408
2409impl Drop for CoreRenderPipeline {
2410 fn drop(&mut self) {
2411 self.context.0.render_pipeline_drop(self.id)
2412 }
2413}
2414
2415impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2416 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2417 let (id, error) = self
2418 .context
2419 .0
2420 .compute_pipeline_get_bind_group_layout(self.id, index, None);
2421 if let Some(err) = error {
2422 self.context.handle_error_nolabel(
2423 &self.error_sink,
2424 err,
2425 "ComputePipeline::get_bind_group_layout",
2426 )
2427 }
2428 CoreBindGroupLayout {
2429 context: self.context.clone(),
2430 id,
2431 }
2432 .into()
2433 }
2434}
2435
2436impl Drop for CoreComputePipeline {
2437 fn drop(&mut self) {
2438 self.context.0.compute_pipeline_drop(self.id)
2439 }
2440}
2441
2442impl dispatch::PipelineCacheInterface for CorePipelineCache {
2443 fn get_data(&self) -> Option<Vec<u8>> {
2444 self.context.0.pipeline_cache_get_data(self.id)
2445 }
2446}
2447
2448impl Drop for CorePipelineCache {
2449 fn drop(&mut self) {
2450 self.context.0.pipeline_cache_drop(self.id)
2451 }
2452}
2453
2454impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2455 fn copy_buffer_to_buffer(
2456 &self,
2457 source: &dispatch::DispatchBuffer,
2458 source_offset: crate::BufferAddress,
2459 destination: &dispatch::DispatchBuffer,
2460 destination_offset: crate::BufferAddress,
2461 copy_size: Option<crate::BufferAddress>,
2462 ) {
2463 let source = source.as_core();
2464 let destination = destination.as_core();
2465
2466 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2467 self.id,
2468 source.id,
2469 source_offset,
2470 destination.id,
2471 destination_offset,
2472 copy_size,
2473 ) {
2474 self.context.handle_error_nolabel(
2475 &self.error_sink,
2476 cause,
2477 "CommandEncoder::copy_buffer_to_buffer",
2478 );
2479 }
2480 }
2481
2482 fn copy_buffer_to_texture(
2483 &self,
2484 source: crate::TexelCopyBufferInfo<'_>,
2485 destination: crate::TexelCopyTextureInfo<'_>,
2486 copy_size: crate::Extent3d,
2487 ) {
2488 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2489 self.id,
2490 &map_buffer_copy_view(source),
2491 &map_texture_copy_view(destination),
2492 ©_size,
2493 ) {
2494 self.context.handle_error_nolabel(
2495 &self.error_sink,
2496 cause,
2497 "CommandEncoder::copy_buffer_to_texture",
2498 );
2499 }
2500 }
2501
2502 fn copy_texture_to_buffer(
2503 &self,
2504 source: crate::TexelCopyTextureInfo<'_>,
2505 destination: crate::TexelCopyBufferInfo<'_>,
2506 copy_size: crate::Extent3d,
2507 ) {
2508 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2509 self.id,
2510 &map_texture_copy_view(source),
2511 &map_buffer_copy_view(destination),
2512 ©_size,
2513 ) {
2514 self.context.handle_error_nolabel(
2515 &self.error_sink,
2516 cause,
2517 "CommandEncoder::copy_texture_to_buffer",
2518 );
2519 }
2520 }
2521
2522 fn copy_texture_to_texture(
2523 &self,
2524 source: crate::TexelCopyTextureInfo<'_>,
2525 destination: crate::TexelCopyTextureInfo<'_>,
2526 copy_size: crate::Extent3d,
2527 ) {
2528 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2529 self.id,
2530 &map_texture_copy_view(source),
2531 &map_texture_copy_view(destination),
2532 ©_size,
2533 ) {
2534 self.context.handle_error_nolabel(
2535 &self.error_sink,
2536 cause,
2537 "CommandEncoder::copy_texture_to_texture",
2538 );
2539 }
2540 }
2541
2542 fn begin_compute_pass(
2543 &self,
2544 desc: &crate::ComputePassDescriptor<'_>,
2545 ) -> dispatch::DispatchComputePass {
2546 let timestamp_writes =
2547 desc.timestamp_writes
2548 .as_ref()
2549 .map(|tw| wgc::command::PassTimestampWrites {
2550 query_set: tw.query_set.inner.as_core().id,
2551 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2552 end_of_pass_write_index: tw.end_of_pass_write_index,
2553 });
2554
2555 let (pass, err) = self.context.0.command_encoder_begin_compute_pass(
2556 self.id,
2557 &wgc::command::ComputePassDescriptor {
2558 label: desc.label.map(Borrowed),
2559 timestamp_writes,
2560 },
2561 );
2562
2563 if let Some(cause) = err {
2564 self.context.handle_error(
2565 &self.error_sink,
2566 cause,
2567 desc.label,
2568 "CommandEncoder::begin_compute_pass",
2569 );
2570 }
2571
2572 CoreComputePass {
2573 context: self.context.clone(),
2574 pass,
2575 error_sink: self.error_sink.clone(),
2576 id: crate::cmp::Identifier::create(),
2577 }
2578 .into()
2579 }
2580
2581 fn begin_render_pass(
2582 &self,
2583 desc: &crate::RenderPassDescriptor<'_>,
2584 ) -> dispatch::DispatchRenderPass {
2585 let colors = desc
2586 .color_attachments
2587 .iter()
2588 .map(|ca| {
2589 ca.as_ref()
2590 .map(|at| wgc::command::RenderPassColorAttachment {
2591 view: at.view.inner.as_core().id,
2592 depth_slice: at.depth_slice,
2593 resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2594 load_op: at.ops.load,
2595 store_op: at.ops.store,
2596 })
2597 })
2598 .collect::<Vec<_>>();
2599
2600 let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2601 wgc::command::RenderPassDepthStencilAttachment {
2602 view: dsa.view.inner.as_core().id,
2603 depth: map_pass_channel(dsa.depth_ops.as_ref()),
2604 stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2605 }
2606 });
2607
2608 let timestamp_writes =
2609 desc.timestamp_writes
2610 .as_ref()
2611 .map(|tw| wgc::command::PassTimestampWrites {
2612 query_set: tw.query_set.inner.as_core().id,
2613 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2614 end_of_pass_write_index: tw.end_of_pass_write_index,
2615 });
2616
2617 let (pass, err) = self.context.0.command_encoder_begin_render_pass(
2618 self.id,
2619 &wgc::command::RenderPassDescriptor {
2620 label: desc.label.map(Borrowed),
2621 timestamp_writes: timestamp_writes.as_ref(),
2622 color_attachments: Borrowed(&colors),
2623 depth_stencil_attachment: depth_stencil.as_ref(),
2624 occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2625 multiview_mask: desc.multiview_mask,
2626 },
2627 );
2628
2629 if let Some(cause) = err {
2630 self.context.handle_error(
2631 &self.error_sink,
2632 cause,
2633 desc.label,
2634 "CommandEncoder::begin_render_pass",
2635 );
2636 }
2637
2638 CoreRenderPass {
2639 context: self.context.clone(),
2640 pass,
2641 error_sink: self.error_sink.clone(),
2642 id: crate::cmp::Identifier::create(),
2643 }
2644 .into()
2645 }
2646
2647 fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2648 let descriptor = wgt::CommandBufferDescriptor::default();
2649 let (id, opt_label_and_error) =
2650 self.context
2651 .0
2652 .command_encoder_finish(self.id, &descriptor, None);
2653 if let Some((label, cause)) = opt_label_and_error {
2654 self.context
2655 .handle_error(&self.error_sink, cause, Some(&label), "a CommandEncoder");
2656 }
2657 CoreCommandBuffer {
2658 context: self.context.clone(),
2659 id,
2660 }
2661 .into()
2662 }
2663
2664 fn clear_texture(
2665 &self,
2666 texture: &dispatch::DispatchTexture,
2667 subresource_range: &crate::ImageSubresourceRange,
2668 ) {
2669 let texture = texture.as_core();
2670
2671 if let Err(cause) =
2672 self.context
2673 .0
2674 .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2675 {
2676 self.context.handle_error_nolabel(
2677 &self.error_sink,
2678 cause,
2679 "CommandEncoder::clear_texture",
2680 );
2681 }
2682 }
2683
2684 fn clear_buffer(
2685 &self,
2686 buffer: &dispatch::DispatchBuffer,
2687 offset: crate::BufferAddress,
2688 size: Option<crate::BufferAddress>,
2689 ) {
2690 let buffer = buffer.as_core();
2691
2692 if let Err(cause) = self
2693 .context
2694 .0
2695 .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2696 {
2697 self.context.handle_error_nolabel(
2698 &self.error_sink,
2699 cause,
2700 "CommandEncoder::fill_buffer",
2701 );
2702 }
2703 }
2704
2705 fn insert_debug_marker(&self, label: &str) {
2706 if let Err(cause) = self
2707 .context
2708 .0
2709 .command_encoder_insert_debug_marker(self.id, label)
2710 {
2711 self.context.handle_error_nolabel(
2712 &self.error_sink,
2713 cause,
2714 "CommandEncoder::insert_debug_marker",
2715 );
2716 }
2717 }
2718
2719 fn push_debug_group(&self, label: &str) {
2720 if let Err(cause) = self
2721 .context
2722 .0
2723 .command_encoder_push_debug_group(self.id, label)
2724 {
2725 self.context.handle_error_nolabel(
2726 &self.error_sink,
2727 cause,
2728 "CommandEncoder::push_debug_group",
2729 );
2730 }
2731 }
2732
2733 fn pop_debug_group(&self) {
2734 if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2735 self.context.handle_error_nolabel(
2736 &self.error_sink,
2737 cause,
2738 "CommandEncoder::pop_debug_group",
2739 );
2740 }
2741 }
2742
2743 fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2744 let query_set = query_set.as_core();
2745
2746 if let Err(cause) =
2747 self.context
2748 .0
2749 .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2750 {
2751 self.context.handle_error_nolabel(
2752 &self.error_sink,
2753 cause,
2754 "CommandEncoder::write_timestamp",
2755 );
2756 }
2757 }
2758
2759 fn resolve_query_set(
2760 &self,
2761 query_set: &dispatch::DispatchQuerySet,
2762 first_query: u32,
2763 query_count: u32,
2764 destination: &dispatch::DispatchBuffer,
2765 destination_offset: crate::BufferAddress,
2766 ) {
2767 let query_set = query_set.as_core();
2768 let destination = destination.as_core();
2769
2770 if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2771 self.id,
2772 query_set.id,
2773 first_query,
2774 query_count,
2775 destination.id,
2776 destination_offset,
2777 ) {
2778 self.context.handle_error_nolabel(
2779 &self.error_sink,
2780 cause,
2781 "CommandEncoder::resolve_query_set",
2782 );
2783 }
2784 }
2785
2786 fn mark_acceleration_structures_built<'a>(
2787 &self,
2788 blas: &mut dyn Iterator<Item = &'a Blas>,
2789 tlas: &mut dyn Iterator<Item = &'a Tlas>,
2790 ) {
2791 let blas = blas
2792 .map(|b| b.inner.as_core().id)
2793 .collect::<SmallVec<[_; 4]>>();
2794 let tlas = tlas
2795 .map(|t| t.inner.as_core().id)
2796 .collect::<SmallVec<[_; 4]>>();
2797 if let Err(cause) = self
2798 .context
2799 .0
2800 .command_encoder_mark_acceleration_structures_built(self.id, &blas, &tlas)
2801 {
2802 self.context.handle_error_nolabel(
2803 &self.error_sink,
2804 cause,
2805 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2806 );
2807 }
2808 }
2809
2810 fn build_acceleration_structures<'a>(
2811 &self,
2812 blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2813 tlas: &mut dyn Iterator<Item = &'a crate::Tlas>,
2814 ) {
2815 let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2816 let geometries = match e.geometry {
2817 crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2818 let iter = triangle_geometries.iter().map(|tg| {
2819 wgc::ray_tracing::BlasTriangleGeometry {
2820 vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2821 index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2822 transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2823 size: tg.size,
2824 transform_buffer_offset: tg.transform_buffer_offset,
2825 first_vertex: tg.first_vertex,
2826 vertex_stride: tg.vertex_stride,
2827 first_index: tg.first_index,
2828 }
2829 });
2830 wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2831 }
2832 };
2833 wgc::ray_tracing::BlasBuildEntry {
2834 blas_id: e.blas.inner.as_core().id,
2835 geometries,
2836 }
2837 });
2838
2839 let tlas = tlas.into_iter().map(|e| {
2840 let instances = e
2841 .instances
2842 .iter()
2843 .map(|instance: &Option<crate::TlasInstance>| {
2844 instance
2845 .as_ref()
2846 .map(|instance| wgc::ray_tracing::TlasInstance {
2847 blas_id: instance.blas.as_core().id,
2848 transform: &instance.transform,
2849 custom_data: instance.custom_data,
2850 mask: instance.mask,
2851 })
2852 });
2853 wgc::ray_tracing::TlasPackage {
2854 tlas_id: e.inner.as_core().id,
2855 instances: Box::new(instances),
2856 lowest_unmodified: e.lowest_unmodified,
2857 }
2858 });
2859
2860 if let Err(cause) = self
2861 .context
2862 .0
2863 .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2864 {
2865 self.context.handle_error_nolabel(
2866 &self.error_sink,
2867 cause,
2868 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2869 );
2870 }
2871 }
2872
2873 fn transition_resources<'a>(
2874 &mut self,
2875 buffer_transitions: &mut dyn Iterator<
2876 Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
2877 >,
2878 texture_transitions: &mut dyn Iterator<
2879 Item = wgt::TextureTransition<&'a dispatch::DispatchTexture>,
2880 >,
2881 ) {
2882 let result = self.context.0.command_encoder_transition_resources(
2883 self.id,
2884 buffer_transitions.map(|t| wgt::BufferTransition {
2885 buffer: t.buffer.as_core().id,
2886 state: t.state,
2887 }),
2888 texture_transitions.map(|t| wgt::TextureTransition {
2889 texture: t.texture.as_core().id,
2890 selector: t.selector.clone(),
2891 state: t.state,
2892 }),
2893 );
2894
2895 if let Err(cause) = result {
2896 self.context.handle_error_nolabel(
2897 &self.error_sink,
2898 cause,
2899 "CommandEncoder::transition_resources",
2900 );
2901 }
2902 }
2903}
2904
2905impl Drop for CoreCommandEncoder {
2906 fn drop(&mut self) {
2907 self.context.0.command_encoder_drop(self.id)
2908 }
2909}
2910
2911impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2912
2913impl Drop for CoreCommandBuffer {
2914 fn drop(&mut self) {
2915 self.context.0.command_buffer_drop(self.id)
2916 }
2917}
2918
2919impl dispatch::ComputePassInterface for CoreComputePass {
2920 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2921 let pipeline = pipeline.as_core();
2922
2923 if let Err(cause) = self
2924 .context
2925 .0
2926 .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2927 {
2928 self.context.handle_error(
2929 &self.error_sink,
2930 cause,
2931 self.pass.label(),
2932 "ComputePass::set_pipeline",
2933 );
2934 }
2935 }
2936
2937 fn set_bind_group(
2938 &mut self,
2939 index: u32,
2940 bind_group: Option<&dispatch::DispatchBindGroup>,
2941 offsets: &[crate::DynamicOffset],
2942 ) {
2943 let bg = bind_group.map(|bg| bg.as_core().id);
2944
2945 if let Err(cause) =
2946 self.context
2947 .0
2948 .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2949 {
2950 self.context.handle_error(
2951 &self.error_sink,
2952 cause,
2953 self.pass.label(),
2954 "ComputePass::set_bind_group",
2955 );
2956 }
2957 }
2958
2959 fn set_immediates(&mut self, offset: u32, data: &[u8]) {
2960 if let Err(cause) = self
2961 .context
2962 .0
2963 .compute_pass_set_immediates(&mut self.pass, offset, data)
2964 {
2965 self.context.handle_error(
2966 &self.error_sink,
2967 cause,
2968 self.pass.label(),
2969 "ComputePass::set_immediates",
2970 );
2971 }
2972 }
2973
2974 fn insert_debug_marker(&mut self, label: &str) {
2975 if let Err(cause) =
2976 self.context
2977 .0
2978 .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
2979 {
2980 self.context.handle_error(
2981 &self.error_sink,
2982 cause,
2983 self.pass.label(),
2984 "ComputePass::insert_debug_marker",
2985 );
2986 }
2987 }
2988
2989 fn push_debug_group(&mut self, group_label: &str) {
2990 if let Err(cause) =
2991 self.context
2992 .0
2993 .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
2994 {
2995 self.context.handle_error(
2996 &self.error_sink,
2997 cause,
2998 self.pass.label(),
2999 "ComputePass::push_debug_group",
3000 );
3001 }
3002 }
3003
3004 fn pop_debug_group(&mut self) {
3005 if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
3006 self.context.handle_error(
3007 &self.error_sink,
3008 cause,
3009 self.pass.label(),
3010 "ComputePass::pop_debug_group",
3011 );
3012 }
3013 }
3014
3015 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3016 let query_set = query_set.as_core();
3017
3018 if let Err(cause) =
3019 self.context
3020 .0
3021 .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3022 {
3023 self.context.handle_error(
3024 &self.error_sink,
3025 cause,
3026 self.pass.label(),
3027 "ComputePass::write_timestamp",
3028 );
3029 }
3030 }
3031
3032 fn begin_pipeline_statistics_query(
3033 &mut self,
3034 query_set: &dispatch::DispatchQuerySet,
3035 query_index: u32,
3036 ) {
3037 let query_set = query_set.as_core();
3038
3039 if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
3040 &mut self.pass,
3041 query_set.id,
3042 query_index,
3043 ) {
3044 self.context.handle_error(
3045 &self.error_sink,
3046 cause,
3047 self.pass.label(),
3048 "ComputePass::begin_pipeline_statistics_query",
3049 );
3050 }
3051 }
3052
3053 fn end_pipeline_statistics_query(&mut self) {
3054 if let Err(cause) = self
3055 .context
3056 .0
3057 .compute_pass_end_pipeline_statistics_query(&mut self.pass)
3058 {
3059 self.context.handle_error(
3060 &self.error_sink,
3061 cause,
3062 self.pass.label(),
3063 "ComputePass::end_pipeline_statistics_query",
3064 );
3065 }
3066 }
3067
3068 fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
3069 if let Err(cause) = self
3070 .context
3071 .0
3072 .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
3073 {
3074 self.context.handle_error(
3075 &self.error_sink,
3076 cause,
3077 self.pass.label(),
3078 "ComputePass::dispatch_workgroups",
3079 );
3080 }
3081 }
3082
3083 fn dispatch_workgroups_indirect(
3084 &mut self,
3085 indirect_buffer: &dispatch::DispatchBuffer,
3086 indirect_offset: crate::BufferAddress,
3087 ) {
3088 let indirect_buffer = indirect_buffer.as_core();
3089
3090 if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
3091 &mut self.pass,
3092 indirect_buffer.id,
3093 indirect_offset,
3094 ) {
3095 self.context.handle_error(
3096 &self.error_sink,
3097 cause,
3098 self.pass.label(),
3099 "ComputePass::dispatch_workgroups_indirect",
3100 );
3101 }
3102 }
3103}
3104
3105impl Drop for CoreComputePass {
3106 fn drop(&mut self) {
3107 if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
3108 self.context.handle_error(
3109 &self.error_sink,
3110 cause,
3111 self.pass.label(),
3112 "ComputePass::end",
3113 );
3114 }
3115 }
3116}
3117
3118impl dispatch::RenderPassInterface for CoreRenderPass {
3119 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3120 let pipeline = pipeline.as_core();
3121
3122 if let Err(cause) = self
3123 .context
3124 .0
3125 .render_pass_set_pipeline(&mut self.pass, pipeline.id)
3126 {
3127 self.context.handle_error(
3128 &self.error_sink,
3129 cause,
3130 self.pass.label(),
3131 "RenderPass::set_pipeline",
3132 );
3133 }
3134 }
3135
3136 fn set_bind_group(
3137 &mut self,
3138 index: u32,
3139 bind_group: Option<&dispatch::DispatchBindGroup>,
3140 offsets: &[crate::DynamicOffset],
3141 ) {
3142 let bg = bind_group.map(|bg| bg.as_core().id);
3143
3144 if let Err(cause) =
3145 self.context
3146 .0
3147 .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
3148 {
3149 self.context.handle_error(
3150 &self.error_sink,
3151 cause,
3152 self.pass.label(),
3153 "RenderPass::set_bind_group",
3154 );
3155 }
3156 }
3157
3158 fn set_index_buffer(
3159 &mut self,
3160 buffer: &dispatch::DispatchBuffer,
3161 index_format: crate::IndexFormat,
3162 offset: crate::BufferAddress,
3163 size: Option<crate::BufferSize>,
3164 ) {
3165 let buffer = buffer.as_core();
3166
3167 if let Err(cause) = self.context.0.render_pass_set_index_buffer(
3168 &mut self.pass,
3169 buffer.id,
3170 index_format,
3171 offset,
3172 size,
3173 ) {
3174 self.context.handle_error(
3175 &self.error_sink,
3176 cause,
3177 self.pass.label(),
3178 "RenderPass::set_index_buffer",
3179 );
3180 }
3181 }
3182
3183 fn set_vertex_buffer(
3184 &mut self,
3185 slot: u32,
3186 buffer: &dispatch::DispatchBuffer,
3187 offset: crate::BufferAddress,
3188 size: Option<crate::BufferSize>,
3189 ) {
3190 let buffer = buffer.as_core();
3191
3192 if let Err(cause) = self.context.0.render_pass_set_vertex_buffer(
3193 &mut self.pass,
3194 slot,
3195 buffer.id,
3196 offset,
3197 size,
3198 ) {
3199 self.context.handle_error(
3200 &self.error_sink,
3201 cause,
3202 self.pass.label(),
3203 "RenderPass::set_vertex_buffer",
3204 );
3205 }
3206 }
3207
3208 fn set_immediates(&mut self, offset: u32, data: &[u8]) {
3209 if let Err(cause) = self
3210 .context
3211 .0
3212 .render_pass_set_immediates(&mut self.pass, offset, data)
3213 {
3214 self.context.handle_error(
3215 &self.error_sink,
3216 cause,
3217 self.pass.label(),
3218 "RenderPass::set_immediates",
3219 );
3220 }
3221 }
3222
3223 fn set_blend_constant(&mut self, color: crate::Color) {
3224 if let Err(cause) = self
3225 .context
3226 .0
3227 .render_pass_set_blend_constant(&mut self.pass, color)
3228 {
3229 self.context.handle_error(
3230 &self.error_sink,
3231 cause,
3232 self.pass.label(),
3233 "RenderPass::set_blend_constant",
3234 );
3235 }
3236 }
3237
3238 fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
3239 if let Err(cause) =
3240 self.context
3241 .0
3242 .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
3243 {
3244 self.context.handle_error(
3245 &self.error_sink,
3246 cause,
3247 self.pass.label(),
3248 "RenderPass::set_scissor_rect",
3249 );
3250 }
3251 }
3252
3253 fn set_viewport(
3254 &mut self,
3255 x: f32,
3256 y: f32,
3257 width: f32,
3258 height: f32,
3259 min_depth: f32,
3260 max_depth: f32,
3261 ) {
3262 if let Err(cause) = self.context.0.render_pass_set_viewport(
3263 &mut self.pass,
3264 x,
3265 y,
3266 width,
3267 height,
3268 min_depth,
3269 max_depth,
3270 ) {
3271 self.context.handle_error(
3272 &self.error_sink,
3273 cause,
3274 self.pass.label(),
3275 "RenderPass::set_viewport",
3276 );
3277 }
3278 }
3279
3280 fn set_stencil_reference(&mut self, reference: u32) {
3281 if let Err(cause) = self
3282 .context
3283 .0
3284 .render_pass_set_stencil_reference(&mut self.pass, reference)
3285 {
3286 self.context.handle_error(
3287 &self.error_sink,
3288 cause,
3289 self.pass.label(),
3290 "RenderPass::set_stencil_reference",
3291 );
3292 }
3293 }
3294
3295 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3296 if let Err(cause) = self.context.0.render_pass_draw(
3297 &mut self.pass,
3298 vertices.end - vertices.start,
3299 instances.end - instances.start,
3300 vertices.start,
3301 instances.start,
3302 ) {
3303 self.context.handle_error(
3304 &self.error_sink,
3305 cause,
3306 self.pass.label(),
3307 "RenderPass::draw",
3308 );
3309 }
3310 }
3311
3312 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3313 if let Err(cause) = self.context.0.render_pass_draw_indexed(
3314 &mut self.pass,
3315 indices.end - indices.start,
3316 instances.end - instances.start,
3317 indices.start,
3318 base_vertex,
3319 instances.start,
3320 ) {
3321 self.context.handle_error(
3322 &self.error_sink,
3323 cause,
3324 self.pass.label(),
3325 "RenderPass::draw_indexed",
3326 );
3327 }
3328 }
3329
3330 fn draw_mesh_tasks(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
3331 if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks(
3332 &mut self.pass,
3333 group_count_x,
3334 group_count_y,
3335 group_count_z,
3336 ) {
3337 self.context.handle_error(
3338 &self.error_sink,
3339 cause,
3340 self.pass.label(),
3341 "RenderPass::draw_mesh_tasks",
3342 );
3343 }
3344 }
3345
3346 fn draw_indirect(
3347 &mut self,
3348 indirect_buffer: &dispatch::DispatchBuffer,
3349 indirect_offset: crate::BufferAddress,
3350 ) {
3351 let indirect_buffer = indirect_buffer.as_core();
3352
3353 if let Err(cause) = self.context.0.render_pass_draw_indirect(
3354 &mut self.pass,
3355 indirect_buffer.id,
3356 indirect_offset,
3357 ) {
3358 self.context.handle_error(
3359 &self.error_sink,
3360 cause,
3361 self.pass.label(),
3362 "RenderPass::draw_indirect",
3363 );
3364 }
3365 }
3366
3367 fn draw_indexed_indirect(
3368 &mut self,
3369 indirect_buffer: &dispatch::DispatchBuffer,
3370 indirect_offset: crate::BufferAddress,
3371 ) {
3372 let indirect_buffer = indirect_buffer.as_core();
3373
3374 if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3375 &mut self.pass,
3376 indirect_buffer.id,
3377 indirect_offset,
3378 ) {
3379 self.context.handle_error(
3380 &self.error_sink,
3381 cause,
3382 self.pass.label(),
3383 "RenderPass::draw_indexed_indirect",
3384 );
3385 }
3386 }
3387
3388 fn draw_mesh_tasks_indirect(
3389 &mut self,
3390 indirect_buffer: &dispatch::DispatchBuffer,
3391 indirect_offset: crate::BufferAddress,
3392 ) {
3393 let indirect_buffer = indirect_buffer.as_core();
3394
3395 if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks_indirect(
3396 &mut self.pass,
3397 indirect_buffer.id,
3398 indirect_offset,
3399 ) {
3400 self.context.handle_error(
3401 &self.error_sink,
3402 cause,
3403 self.pass.label(),
3404 "RenderPass::draw_mesh_tasks_indirect",
3405 );
3406 }
3407 }
3408
3409 fn multi_draw_indirect(
3410 &mut self,
3411 indirect_buffer: &dispatch::DispatchBuffer,
3412 indirect_offset: crate::BufferAddress,
3413 count: u32,
3414 ) {
3415 let indirect_buffer = indirect_buffer.as_core();
3416
3417 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3418 &mut self.pass,
3419 indirect_buffer.id,
3420 indirect_offset,
3421 count,
3422 ) {
3423 self.context.handle_error(
3424 &self.error_sink,
3425 cause,
3426 self.pass.label(),
3427 "RenderPass::multi_draw_indirect",
3428 );
3429 }
3430 }
3431
3432 fn multi_draw_indexed_indirect(
3433 &mut self,
3434 indirect_buffer: &dispatch::DispatchBuffer,
3435 indirect_offset: crate::BufferAddress,
3436 count: u32,
3437 ) {
3438 let indirect_buffer = indirect_buffer.as_core();
3439
3440 if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3441 &mut self.pass,
3442 indirect_buffer.id,
3443 indirect_offset,
3444 count,
3445 ) {
3446 self.context.handle_error(
3447 &self.error_sink,
3448 cause,
3449 self.pass.label(),
3450 "RenderPass::multi_draw_indexed_indirect",
3451 );
3452 }
3453 }
3454
3455 fn multi_draw_mesh_tasks_indirect(
3456 &mut self,
3457 indirect_buffer: &dispatch::DispatchBuffer,
3458 indirect_offset: crate::BufferAddress,
3459 count: u32,
3460 ) {
3461 let indirect_buffer = indirect_buffer.as_core();
3462
3463 if let Err(cause) = self.context.0.render_pass_multi_draw_mesh_tasks_indirect(
3464 &mut self.pass,
3465 indirect_buffer.id,
3466 indirect_offset,
3467 count,
3468 ) {
3469 self.context.handle_error(
3470 &self.error_sink,
3471 cause,
3472 self.pass.label(),
3473 "RenderPass::multi_draw_mesh_tasks_indirect",
3474 );
3475 }
3476 }
3477
3478 fn multi_draw_indirect_count(
3479 &mut self,
3480 indirect_buffer: &dispatch::DispatchBuffer,
3481 indirect_offset: crate::BufferAddress,
3482 count_buffer: &dispatch::DispatchBuffer,
3483 count_buffer_offset: crate::BufferAddress,
3484 max_count: u32,
3485 ) {
3486 let indirect_buffer = indirect_buffer.as_core();
3487 let count_buffer = count_buffer.as_core();
3488
3489 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3490 &mut self.pass,
3491 indirect_buffer.id,
3492 indirect_offset,
3493 count_buffer.id,
3494 count_buffer_offset,
3495 max_count,
3496 ) {
3497 self.context.handle_error(
3498 &self.error_sink,
3499 cause,
3500 self.pass.label(),
3501 "RenderPass::multi_draw_indirect_count",
3502 );
3503 }
3504 }
3505
3506 fn multi_draw_indexed_indirect_count(
3507 &mut self,
3508 indirect_buffer: &dispatch::DispatchBuffer,
3509 indirect_offset: crate::BufferAddress,
3510 count_buffer: &dispatch::DispatchBuffer,
3511 count_buffer_offset: crate::BufferAddress,
3512 max_count: u32,
3513 ) {
3514 let indirect_buffer = indirect_buffer.as_core();
3515 let count_buffer = count_buffer.as_core();
3516
3517 if let Err(cause) = self
3518 .context
3519 .0
3520 .render_pass_multi_draw_indexed_indirect_count(
3521 &mut self.pass,
3522 indirect_buffer.id,
3523 indirect_offset,
3524 count_buffer.id,
3525 count_buffer_offset,
3526 max_count,
3527 )
3528 {
3529 self.context.handle_error(
3530 &self.error_sink,
3531 cause,
3532 self.pass.label(),
3533 "RenderPass::multi_draw_indexed_indirect_count",
3534 );
3535 }
3536 }
3537
3538 fn multi_draw_mesh_tasks_indirect_count(
3539 &mut self,
3540 indirect_buffer: &dispatch::DispatchBuffer,
3541 indirect_offset: crate::BufferAddress,
3542 count_buffer: &dispatch::DispatchBuffer,
3543 count_buffer_offset: crate::BufferAddress,
3544 max_count: u32,
3545 ) {
3546 let indirect_buffer = indirect_buffer.as_core();
3547 let count_buffer = count_buffer.as_core();
3548
3549 if let Err(cause) = self
3550 .context
3551 .0
3552 .render_pass_multi_draw_mesh_tasks_indirect_count(
3553 &mut self.pass,
3554 indirect_buffer.id,
3555 indirect_offset,
3556 count_buffer.id,
3557 count_buffer_offset,
3558 max_count,
3559 )
3560 {
3561 self.context.handle_error(
3562 &self.error_sink,
3563 cause,
3564 self.pass.label(),
3565 "RenderPass::multi_draw_mesh_tasks_indirect_count",
3566 );
3567 }
3568 }
3569
3570 fn insert_debug_marker(&mut self, label: &str) {
3571 if let Err(cause) = self
3572 .context
3573 .0
3574 .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3575 {
3576 self.context.handle_error(
3577 &self.error_sink,
3578 cause,
3579 self.pass.label(),
3580 "RenderPass::insert_debug_marker",
3581 );
3582 }
3583 }
3584
3585 fn push_debug_group(&mut self, group_label: &str) {
3586 if let Err(cause) =
3587 self.context
3588 .0
3589 .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3590 {
3591 self.context.handle_error(
3592 &self.error_sink,
3593 cause,
3594 self.pass.label(),
3595 "RenderPass::push_debug_group",
3596 );
3597 }
3598 }
3599
3600 fn pop_debug_group(&mut self) {
3601 if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3602 self.context.handle_error(
3603 &self.error_sink,
3604 cause,
3605 self.pass.label(),
3606 "RenderPass::pop_debug_group",
3607 );
3608 }
3609 }
3610
3611 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3612 let query_set = query_set.as_core();
3613
3614 if let Err(cause) =
3615 self.context
3616 .0
3617 .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3618 {
3619 self.context.handle_error(
3620 &self.error_sink,
3621 cause,
3622 self.pass.label(),
3623 "RenderPass::write_timestamp",
3624 );
3625 }
3626 }
3627
3628 fn begin_occlusion_query(&mut self, query_index: u32) {
3629 if let Err(cause) = self
3630 .context
3631 .0
3632 .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3633 {
3634 self.context.handle_error(
3635 &self.error_sink,
3636 cause,
3637 self.pass.label(),
3638 "RenderPass::begin_occlusion_query",
3639 );
3640 }
3641 }
3642
3643 fn end_occlusion_query(&mut self) {
3644 if let Err(cause) = self
3645 .context
3646 .0
3647 .render_pass_end_occlusion_query(&mut self.pass)
3648 {
3649 self.context.handle_error(
3650 &self.error_sink,
3651 cause,
3652 self.pass.label(),
3653 "RenderPass::end_occlusion_query",
3654 );
3655 }
3656 }
3657
3658 fn begin_pipeline_statistics_query(
3659 &mut self,
3660 query_set: &dispatch::DispatchQuerySet,
3661 query_index: u32,
3662 ) {
3663 let query_set = query_set.as_core();
3664
3665 if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3666 &mut self.pass,
3667 query_set.id,
3668 query_index,
3669 ) {
3670 self.context.handle_error(
3671 &self.error_sink,
3672 cause,
3673 self.pass.label(),
3674 "RenderPass::begin_pipeline_statistics_query",
3675 );
3676 }
3677 }
3678
3679 fn end_pipeline_statistics_query(&mut self) {
3680 if let Err(cause) = self
3681 .context
3682 .0
3683 .render_pass_end_pipeline_statistics_query(&mut self.pass)
3684 {
3685 self.context.handle_error(
3686 &self.error_sink,
3687 cause,
3688 self.pass.label(),
3689 "RenderPass::end_pipeline_statistics_query",
3690 );
3691 }
3692 }
3693
3694 fn execute_bundles(
3695 &mut self,
3696 render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3697 ) {
3698 let temp_render_bundles = render_bundles
3699 .map(|rb| rb.as_core().id)
3700 .collect::<SmallVec<[_; 4]>>();
3701 if let Err(cause) = self
3702 .context
3703 .0
3704 .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3705 {
3706 self.context.handle_error(
3707 &self.error_sink,
3708 cause,
3709 self.pass.label(),
3710 "RenderPass::execute_bundles",
3711 );
3712 }
3713 }
3714}
3715
3716impl Drop for CoreRenderPass {
3717 fn drop(&mut self) {
3718 if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3719 self.context.handle_error(
3720 &self.error_sink,
3721 cause,
3722 self.pass.label(),
3723 "RenderPass::end",
3724 );
3725 }
3726 }
3727}
3728
3729impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3730 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3731 let pipeline = pipeline.as_core();
3732
3733 wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3734 }
3735
3736 fn set_bind_group(
3737 &mut self,
3738 index: u32,
3739 bind_group: Option<&dispatch::DispatchBindGroup>,
3740 offsets: &[crate::DynamicOffset],
3741 ) {
3742 let bg = bind_group.map(|bg| bg.as_core().id);
3743
3744 unsafe {
3745 wgpu_render_bundle_set_bind_group(
3746 &mut self.encoder,
3747 index,
3748 bg,
3749 offsets.as_ptr(),
3750 offsets.len(),
3751 )
3752 }
3753 }
3754
3755 fn set_index_buffer(
3756 &mut self,
3757 buffer: &dispatch::DispatchBuffer,
3758 index_format: crate::IndexFormat,
3759 offset: crate::BufferAddress,
3760 size: Option<crate::BufferSize>,
3761 ) {
3762 let buffer = buffer.as_core();
3763
3764 self.encoder
3765 .set_index_buffer(buffer.id, index_format, offset, size)
3766 }
3767
3768 fn set_vertex_buffer(
3769 &mut self,
3770 slot: u32,
3771 buffer: &dispatch::DispatchBuffer,
3772 offset: crate::BufferAddress,
3773 size: Option<crate::BufferSize>,
3774 ) {
3775 let buffer = buffer.as_core();
3776
3777 wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer.id, offset, size)
3778 }
3779
3780 fn set_immediates(&mut self, offset: u32, data: &[u8]) {
3781 unsafe {
3782 wgpu_render_bundle_set_immediates(
3783 &mut self.encoder,
3784 offset,
3785 data.len().try_into().unwrap(),
3786 data.as_ptr(),
3787 )
3788 }
3789 }
3790
3791 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3792 wgpu_render_bundle_draw(
3793 &mut self.encoder,
3794 vertices.end - vertices.start,
3795 instances.end - instances.start,
3796 vertices.start,
3797 instances.start,
3798 )
3799 }
3800
3801 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3802 wgpu_render_bundle_draw_indexed(
3803 &mut self.encoder,
3804 indices.end - indices.start,
3805 instances.end - instances.start,
3806 indices.start,
3807 base_vertex,
3808 instances.start,
3809 )
3810 }
3811
3812 fn draw_indirect(
3813 &mut self,
3814 indirect_buffer: &dispatch::DispatchBuffer,
3815 indirect_offset: crate::BufferAddress,
3816 ) {
3817 let indirect_buffer = indirect_buffer.as_core();
3818
3819 wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3820 }
3821
3822 fn draw_indexed_indirect(
3823 &mut self,
3824 indirect_buffer: &dispatch::DispatchBuffer,
3825 indirect_offset: crate::BufferAddress,
3826 ) {
3827 let indirect_buffer = indirect_buffer.as_core();
3828
3829 wgpu_render_bundle_draw_indexed_indirect(
3830 &mut self.encoder,
3831 indirect_buffer.id,
3832 indirect_offset,
3833 )
3834 }
3835
3836 fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3837 where
3838 Self: Sized,
3839 {
3840 let (id, error) = self.context.0.render_bundle_encoder_finish(
3841 self.encoder,
3842 &desc.map_label(|l| l.map(Borrowed)),
3843 None,
3844 );
3845 if let Some(err) = error {
3846 self.context
3847 .handle_error_fatal(err, "RenderBundleEncoder::finish");
3848 }
3849 CoreRenderBundle {
3850 context: self.context.clone(),
3851 id,
3852 }
3853 .into()
3854 }
3855}
3856
3857impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3858
3859impl Drop for CoreRenderBundle {
3860 fn drop(&mut self) {
3861 self.context.0.render_bundle_drop(self.id)
3862 }
3863}
3864
3865impl dispatch::SurfaceInterface for CoreSurface {
3866 fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3867 let adapter = adapter.as_core();
3868
3869 self.context
3870 .0
3871 .surface_get_capabilities(self.id, adapter.id)
3872 .unwrap_or_default()
3873 }
3874
3875 fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3876 let device = device.as_core();
3877
3878 let error = self.context.0.surface_configure(self.id, device.id, config);
3879 if let Some(e) = error {
3880 self.context
3881 .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3882 } else {
3883 *self.configured_device.lock() = Some(device.id);
3884 *self.error_sink.lock() = Some(device.error_sink.clone());
3885 }
3886 }
3887
3888 fn get_current_texture(
3889 &self,
3890 ) -> (
3891 Option<dispatch::DispatchTexture>,
3892 crate::SurfaceStatus,
3893 dispatch::DispatchSurfaceOutputDetail,
3894 ) {
3895 let error_sink = if let Some(error_sink) = self.error_sink.lock().as_ref() {
3896 error_sink.clone()
3897 } else {
3898 Arc::new(Mutex::new(ErrorSinkRaw::new()))
3899 };
3900
3901 let output_detail = CoreSurfaceOutputDetail {
3902 context: self.context.clone(),
3903 surface_id: self.id,
3904 error_sink: error_sink.clone(),
3905 }
3906 .into();
3907
3908 match self.context.0.surface_get_current_texture(self.id, None) {
3909 Ok(wgc::present::SurfaceOutput {
3910 status,
3911 texture: texture_id,
3912 }) => {
3913 let data = texture_id
3914 .map(|id| CoreTexture {
3915 context: self.context.clone(),
3916 id,
3917 error_sink,
3918 })
3919 .map(Into::into);
3920
3921 (data, status, output_detail)
3922 }
3923 Err(err) => {
3924 let error_sink = self.error_sink.lock();
3925 match error_sink.as_ref() {
3926 Some(error_sink) => {
3927 self.context.handle_error_nolabel(
3928 error_sink,
3929 err,
3930 "Surface::get_current_texture_view",
3931 );
3932 (None, crate::SurfaceStatus::Validation, output_detail)
3933 }
3934 None => self
3935 .context
3936 .handle_error_fatal(err, "Surface::get_current_texture_view"),
3937 }
3938 }
3939 }
3940 }
3941}
3942
3943impl Drop for CoreSurface {
3944 fn drop(&mut self) {
3945 self.context.0.surface_drop(self.id)
3946 }
3947}
3948
3949impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
3950 fn present(&self) {
3951 match self.context.0.surface_present(self.surface_id) {
3952 Ok(_status) => (),
3953 Err(err) => {
3954 self.context
3955 .handle_error_nolabel(&self.error_sink, err, "Surface::present");
3956 }
3957 }
3958 }
3959
3960 fn texture_discard(&self) {
3961 match self.context.0.surface_texture_discard(self.surface_id) {
3962 Ok(_status) => (),
3963 Err(err) => self
3964 .context
3965 .handle_error_fatal(err, "Surface::discard_texture"),
3966 }
3967 }
3968}
3969impl Drop for CoreSurfaceOutputDetail {
3970 fn drop(&mut self) {
3971 }
3975}
3976
3977impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
3978 #[inline]
3979 fn len(&self) -> usize {
3980 self.mapping.len()
3981 }
3982
3983 #[inline]
3984 unsafe fn write_slice(&mut self) -> WriteOnly<'_, [u8]> {
3985 unsafe { self.mapping.write_slice() }
3986 }
3987}
3988impl Drop for CoreQueueWriteBuffer {
3989 fn drop(&mut self) {
3990 }
3994}
3995
3996impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
3997 #[inline]
3998 fn len(&self) -> usize {
3999 self.size
4000 }
4001
4002 #[inline]
4003 unsafe fn read_slice(&self) -> &[u8] {
4004 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
4005 }
4006
4007 #[inline]
4008 unsafe fn write_slice(&mut self) -> WriteOnly<'_, [u8]> {
4009 unsafe { WriteOnly::new(NonNull::slice_from_raw_parts(self.ptr, self.size)) }
4010 }
4011
4012 #[cfg(webgpu)]
4013 fn as_uint8array(&self) -> &js_sys::Uint8Array {
4014 panic!("Only available on WebGPU")
4015 }
4016}