wgpu/backend/
wgpu_core.rs

1use alloc::{
2    borrow::Cow::{self, Borrowed},
3    boxed::Box,
4    format,
5    string::{String, ToString as _},
6    sync::Arc,
7    vec,
8    vec::Vec,
9};
10use core::{
11    error::Error,
12    fmt,
13    future::ready,
14    ops::{Deref, Range},
15    pin::Pin,
16    ptr::NonNull,
17    slice,
18};
19
20use arrayvec::ArrayVec;
21use smallvec::SmallVec;
22use wgc::{
23    command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError,
24    resource::BlasPrepareCompactResult,
25};
26use wgt::{
27    error::{ErrorType, WebGpuError},
28    WasmNotSendSync,
29};
30
31use crate::{
32    api,
33    dispatch::{self, BlasCompactCallback, BufferMappedRangeInterface},
34    BindingResource, Blas, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
35    CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
36    ShaderSource, SurfaceTargetUnsafe, TextureDescriptor, Tlas,
37};
38use crate::{dispatch::DispatchAdapter, util::Mutex};
39
40#[derive(Clone)]
41pub struct ContextWgpuCore(Arc<wgc::global::Global>);
42
43impl Drop for ContextWgpuCore {
44    fn drop(&mut self) {
45        //nothing
46    }
47}
48
49impl fmt::Debug for ContextWgpuCore {
50    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
51        f.debug_struct("ContextWgpuCore")
52            .field("type", &"Native")
53            .finish()
54    }
55}
56
57impl ContextWgpuCore {
58    pub unsafe fn from_hal_instance<A: hal::Api>(hal_instance: A::Instance) -> Self {
59        Self(unsafe {
60            Arc::new(wgc::global::Global::from_hal_instance::<A>(
61                "wgpu",
62                hal_instance,
63            ))
64        })
65    }
66
67    /// # Safety
68    ///
69    /// - The raw instance handle returned must not be manually destroyed.
70    pub unsafe fn instance_as_hal<A: hal::Api>(&self) -> Option<&A::Instance> {
71        unsafe { self.0.instance_as_hal::<A>() }
72    }
73
74    pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
75        Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
76    }
77
78    #[cfg(wgpu_core)]
79    pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
80        self.0.enumerate_adapters(backends)
81    }
82
83    pub unsafe fn create_adapter_from_hal<A: hal::Api>(
84        &self,
85        hal_adapter: hal::ExposedAdapter<A>,
86    ) -> wgc::id::AdapterId {
87        unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
88    }
89
90    pub unsafe fn adapter_as_hal<A: hal::Api>(
91        &self,
92        adapter: &CoreAdapter,
93    ) -> Option<impl Deref<Target = A::Adapter> + WasmNotSendSync> {
94        unsafe { self.0.adapter_as_hal::<A>(adapter.id) }
95    }
96
97    pub unsafe fn buffer_as_hal<A: hal::Api>(
98        &self,
99        buffer: &CoreBuffer,
100    ) -> Option<impl Deref<Target = A::Buffer>> {
101        unsafe { self.0.buffer_as_hal::<A>(buffer.id) }
102    }
103
104    pub unsafe fn create_device_from_hal<A: hal::Api>(
105        &self,
106        adapter: &CoreAdapter,
107        hal_device: hal::OpenDevice<A>,
108        desc: &crate::DeviceDescriptor<'_>,
109    ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
110        if !matches!(desc.trace, wgt::Trace::Off) {
111            log::error!(
112                "
113                Feature 'trace' has been removed temporarily; \
114                see https://github.com/gfx-rs/wgpu/issues/5974. \
115                The `trace` parameter will have no effect."
116            );
117        }
118
119        let (device_id, queue_id) = unsafe {
120            self.0.create_device_from_hal(
121                adapter.id,
122                hal_device.into(),
123                &desc.map_label(|l| l.map(Borrowed)),
124                None,
125                None,
126            )
127        }?;
128        let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
129        let device = CoreDevice {
130            context: self.clone(),
131            id: device_id,
132            error_sink: error_sink.clone(),
133            features: desc.required_features,
134        };
135        let queue = CoreQueue {
136            context: self.clone(),
137            id: queue_id,
138            error_sink,
139        };
140        Ok((device, queue))
141    }
142
143    pub unsafe fn create_texture_from_hal<A: hal::Api>(
144        &self,
145        hal_texture: A::Texture,
146        device: &CoreDevice,
147        desc: &TextureDescriptor<'_>,
148    ) -> CoreTexture {
149        let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
150        let (id, error) = unsafe {
151            self.0
152                .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
153        };
154        if let Some(cause) = error {
155            self.handle_error(
156                &device.error_sink,
157                cause,
158                desc.label,
159                "Device::create_texture_from_hal",
160            );
161        }
162        CoreTexture {
163            context: self.clone(),
164            id,
165            error_sink: Arc::clone(&device.error_sink),
166        }
167    }
168
169    /// # Safety
170    ///
171    /// - `hal_buffer` must be created from `device`.
172    /// - `hal_buffer` must be created respecting `desc`
173    /// - `hal_buffer` must be initialized
174    /// - `hal_buffer` must not have zero size.
175    pub unsafe fn create_buffer_from_hal<A: hal::Api>(
176        &self,
177        hal_buffer: A::Buffer,
178        device: &CoreDevice,
179        desc: &BufferDescriptor<'_>,
180    ) -> CoreBuffer {
181        let (id, error) = unsafe {
182            self.0.create_buffer_from_hal::<A>(
183                hal_buffer,
184                device.id,
185                &desc.map_label(|l| l.map(Borrowed)),
186                None,
187            )
188        };
189        if let Some(cause) = error {
190            self.handle_error(
191                &device.error_sink,
192                cause,
193                desc.label,
194                "Device::create_buffer_from_hal",
195            );
196        }
197        CoreBuffer {
198            context: self.clone(),
199            id,
200            error_sink: Arc::clone(&device.error_sink),
201        }
202    }
203
204    pub unsafe fn device_as_hal<A: hal::Api>(
205        &self,
206        device: &CoreDevice,
207    ) -> Option<impl Deref<Target = A::Device>> {
208        unsafe { self.0.device_as_hal::<A>(device.id) }
209    }
210
211    pub unsafe fn surface_as_hal<A: hal::Api>(
212        &self,
213        surface: &CoreSurface,
214    ) -> Option<impl Deref<Target = A::Surface>> {
215        unsafe { self.0.surface_as_hal::<A>(surface.id) }
216    }
217
218    pub unsafe fn texture_as_hal<A: hal::Api>(
219        &self,
220        texture: &CoreTexture,
221    ) -> Option<impl Deref<Target = A::Texture>> {
222        unsafe { self.0.texture_as_hal::<A>(texture.id) }
223    }
224
225    pub unsafe fn texture_view_as_hal<A: hal::Api>(
226        &self,
227        texture_view: &CoreTextureView,
228    ) -> Option<impl Deref<Target = A::TextureView>> {
229        unsafe { self.0.texture_view_as_hal::<A>(texture_view.id) }
230    }
231
232    /// This method will start the wgpu_core level command recording.
233    pub unsafe fn command_encoder_as_hal_mut<
234        A: hal::Api,
235        F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
236        R,
237    >(
238        &self,
239        command_encoder: &CoreCommandEncoder,
240        hal_command_encoder_callback: F,
241    ) -> R {
242        unsafe {
243            self.0.command_encoder_as_hal_mut::<A, F, R>(
244                command_encoder.id,
245                hal_command_encoder_callback,
246            )
247        }
248    }
249
250    pub unsafe fn blas_as_hal<A: hal::Api>(
251        &self,
252        blas: &CoreBlas,
253    ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
254        unsafe { self.0.blas_as_hal::<A>(blas.id) }
255    }
256
257    pub unsafe fn tlas_as_hal<A: hal::Api>(
258        &self,
259        tlas: &CoreTlas,
260    ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
261        unsafe { self.0.tlas_as_hal::<A>(tlas.id) }
262    }
263
264    pub fn generate_report(&self) -> wgc::global::GlobalReport {
265        self.0.generate_report()
266    }
267
268    #[cold]
269    #[track_caller]
270    #[inline(never)]
271    fn handle_error_inner(
272        &self,
273        sink_mutex: &Mutex<ErrorSinkRaw>,
274        error_type: ErrorType,
275        source: ContextErrorSource,
276        label: Label<'_>,
277        fn_ident: &'static str,
278    ) {
279        let source: ErrorSource = Box::new(wgc::error::ContextError {
280            fn_ident,
281            source,
282            label: label.unwrap_or_default().to_string(),
283        });
284        let final_error_handling = {
285            let mut sink = sink_mutex.lock();
286            let description = || self.format_error(&*source);
287            let error = match error_type {
288                ErrorType::Internal => {
289                    let description = description();
290                    crate::Error::Internal {
291                        source,
292                        description,
293                    }
294                }
295                ErrorType::OutOfMemory => crate::Error::OutOfMemory { source },
296                ErrorType::Validation => {
297                    let description = description();
298                    crate::Error::Validation {
299                        source,
300                        description,
301                    }
302                }
303                ErrorType::DeviceLost => return, // will be surfaced via callback
304            };
305            sink.handle_error_or_return_handler(error)
306        };
307
308        if let Some(f) = final_error_handling {
309            // If the user has provided their own `uncaptured_handler` callback, invoke it now,
310            // having released our lock on `sink_mutex`. See the comments on
311            // `handle_error_or_return_handler` for details.
312            f();
313        }
314    }
315
316    #[inline]
317    #[track_caller]
318    fn handle_error(
319        &self,
320        sink_mutex: &Mutex<ErrorSinkRaw>,
321        source: impl WebGpuError + WasmNotSendSync + 'static,
322        label: Label<'_>,
323        fn_ident: &'static str,
324    ) {
325        let error_type = source.webgpu_error_type();
326        self.handle_error_inner(sink_mutex, error_type, Box::new(source), label, fn_ident)
327    }
328
329    #[inline]
330    #[track_caller]
331    fn handle_error_nolabel(
332        &self,
333        sink_mutex: &Mutex<ErrorSinkRaw>,
334        source: impl WebGpuError + WasmNotSendSync + 'static,
335        fn_ident: &'static str,
336    ) {
337        let error_type = source.webgpu_error_type();
338        self.handle_error_inner(sink_mutex, error_type, Box::new(source), None, fn_ident)
339    }
340
341    #[track_caller]
342    #[cold]
343    fn handle_error_fatal(
344        &self,
345        cause: impl Error + WasmNotSendSync + 'static,
346        operation: &'static str,
347    ) -> ! {
348        panic!("Error in {operation}: {f}", f = self.format_error(&cause));
349    }
350
351    #[inline(never)]
352    fn format_error(&self, err: &(dyn Error + 'static)) -> String {
353        let mut output = String::new();
354        let mut level = 1;
355
356        fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
357            let mut print = |e: &(dyn Error + 'static)| {
358                use core::fmt::Write;
359                writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
360
361                if let Some(e) = e.source() {
362                    *level += 1;
363                    print_tree(output, level, e);
364                    *level -= 1;
365                }
366            };
367            if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
368                for e in multi.errors() {
369                    print(e);
370                }
371            } else {
372                print(e);
373            }
374        }
375
376        print_tree(&mut output, &mut level, err);
377
378        format!("Validation Error\n\nCaused by:\n{output}")
379    }
380
381    pub unsafe fn queue_as_hal<A: hal::Api>(
382        &self,
383        queue: &CoreQueue,
384    ) -> Option<impl Deref<Target = A::Queue> + WasmNotSendSync> {
385        unsafe { self.0.queue_as_hal::<A>(queue.id) }
386    }
387}
388
389fn map_buffer_copy_view(
390    view: crate::TexelCopyBufferInfo<'_>,
391) -> wgt::TexelCopyBufferInfo<wgc::id::BufferId> {
392    wgt::TexelCopyBufferInfo {
393        buffer: view.buffer.inner.as_core().id,
394        layout: view.layout,
395    }
396}
397
398fn map_texture_copy_view(
399    view: crate::TexelCopyTextureInfo<'_>,
400) -> wgt::TexelCopyTextureInfo<wgc::id::TextureId> {
401    wgt::TexelCopyTextureInfo {
402        texture: view.texture.inner.as_core().id,
403        mip_level: view.mip_level,
404        origin: view.origin,
405        aspect: view.aspect,
406    }
407}
408
409#[cfg_attr(not(webgl), expect(unused))]
410fn map_texture_tagged_copy_view(
411    view: crate::CopyExternalImageDestInfo<&api::Texture>,
412) -> wgt::CopyExternalImageDestInfo<wgc::id::TextureId> {
413    wgt::CopyExternalImageDestInfo {
414        texture: view.texture.inner.as_core().id,
415        mip_level: view.mip_level,
416        origin: view.origin,
417        aspect: view.aspect,
418        color_space: view.color_space,
419        premultiplied_alpha: view.premultiplied_alpha,
420    }
421}
422
423fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
424    match load {
425        LoadOp::Clear(clear_value) => LoadOp::Clear(Some(*clear_value)),
426        LoadOp::Load => LoadOp::Load,
427    }
428}
429
430fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
431    match ops {
432        Some(&Operations { load, store }) => wgc::command::PassChannel {
433            load_op: Some(map_load_op(&load)),
434            store_op: Some(store),
435            read_only: false,
436        },
437        None => wgc::command::PassChannel {
438            load_op: None,
439            store_op: None,
440            read_only: true,
441        },
442    }
443}
444
445#[derive(Debug)]
446pub struct CoreSurface {
447    pub(crate) context: ContextWgpuCore,
448    id: wgc::id::SurfaceId,
449    /// Configured device is needed to know which backend
450    /// code to execute when acquiring a new frame.
451    configured_device: Mutex<Option<wgc::id::DeviceId>>,
452    /// The error sink with which to report errors.
453    /// `None` if the surface has not been configured.
454    error_sink: Mutex<Option<ErrorSink>>,
455}
456
457#[derive(Debug)]
458pub struct CoreAdapter {
459    pub(crate) context: ContextWgpuCore,
460    pub(crate) id: wgc::id::AdapterId,
461}
462
463#[derive(Debug)]
464pub struct CoreDevice {
465    pub(crate) context: ContextWgpuCore,
466    id: wgc::id::DeviceId,
467    error_sink: ErrorSink,
468    features: Features,
469}
470
471#[derive(Debug)]
472pub struct CoreBuffer {
473    pub(crate) context: ContextWgpuCore,
474    id: wgc::id::BufferId,
475    error_sink: ErrorSink,
476}
477
478#[derive(Debug)]
479pub struct CoreShaderModule {
480    pub(crate) context: ContextWgpuCore,
481    id: wgc::id::ShaderModuleId,
482    compilation_info: CompilationInfo,
483}
484
485#[derive(Debug)]
486pub struct CoreBindGroupLayout {
487    pub(crate) context: ContextWgpuCore,
488    id: wgc::id::BindGroupLayoutId,
489}
490
491#[derive(Debug)]
492pub struct CoreBindGroup {
493    pub(crate) context: ContextWgpuCore,
494    id: wgc::id::BindGroupId,
495}
496
497#[derive(Debug)]
498pub struct CoreTexture {
499    pub(crate) context: ContextWgpuCore,
500    id: wgc::id::TextureId,
501    error_sink: ErrorSink,
502}
503
504#[derive(Debug)]
505pub struct CoreTextureView {
506    pub(crate) context: ContextWgpuCore,
507    id: wgc::id::TextureViewId,
508}
509
510#[derive(Debug)]
511pub struct CoreExternalTexture {
512    pub(crate) context: ContextWgpuCore,
513    id: wgc::id::ExternalTextureId,
514}
515
516#[derive(Debug)]
517pub struct CoreSampler {
518    pub(crate) context: ContextWgpuCore,
519    id: wgc::id::SamplerId,
520}
521
522#[derive(Debug)]
523pub struct CoreQuerySet {
524    pub(crate) context: ContextWgpuCore,
525    id: wgc::id::QuerySetId,
526}
527
528#[derive(Debug)]
529pub struct CorePipelineLayout {
530    pub(crate) context: ContextWgpuCore,
531    id: wgc::id::PipelineLayoutId,
532}
533
534#[derive(Debug)]
535pub struct CorePipelineCache {
536    pub(crate) context: ContextWgpuCore,
537    id: wgc::id::PipelineCacheId,
538}
539
540#[derive(Debug)]
541pub struct CoreCommandBuffer {
542    pub(crate) context: ContextWgpuCore,
543    id: wgc::id::CommandBufferId,
544}
545
546#[derive(Debug)]
547pub struct CoreRenderBundleEncoder {
548    pub(crate) context: ContextWgpuCore,
549    encoder: wgc::command::RenderBundleEncoder,
550    id: crate::cmp::Identifier,
551}
552
553#[derive(Debug)]
554pub struct CoreRenderBundle {
555    id: wgc::id::RenderBundleId,
556}
557
558#[derive(Debug)]
559pub struct CoreQueue {
560    pub(crate) context: ContextWgpuCore,
561    id: wgc::id::QueueId,
562    error_sink: ErrorSink,
563}
564
565#[derive(Debug)]
566pub struct CoreComputePipeline {
567    pub(crate) context: ContextWgpuCore,
568    id: wgc::id::ComputePipelineId,
569    error_sink: ErrorSink,
570}
571
572#[derive(Debug)]
573pub struct CoreRenderPipeline {
574    pub(crate) context: ContextWgpuCore,
575    id: wgc::id::RenderPipelineId,
576    error_sink: ErrorSink,
577}
578
579#[derive(Debug)]
580pub struct CoreComputePass {
581    pub(crate) context: ContextWgpuCore,
582    pass: wgc::command::ComputePass,
583    error_sink: ErrorSink,
584    id: crate::cmp::Identifier,
585}
586
587#[derive(Debug)]
588pub struct CoreRenderPass {
589    pub(crate) context: ContextWgpuCore,
590    pass: wgc::command::RenderPass,
591    error_sink: ErrorSink,
592    id: crate::cmp::Identifier,
593}
594
595#[derive(Debug)]
596pub struct CoreCommandEncoder {
597    pub(crate) context: ContextWgpuCore,
598    id: wgc::id::CommandEncoderId,
599    error_sink: ErrorSink,
600}
601
602#[derive(Debug)]
603pub struct CoreBlas {
604    pub(crate) context: ContextWgpuCore,
605    id: wgc::id::BlasId,
606    error_sink: ErrorSink,
607}
608
609#[derive(Debug)]
610pub struct CoreTlas {
611    pub(crate) context: ContextWgpuCore,
612    id: wgc::id::TlasId,
613    // error_sink: ErrorSink,
614}
615
616#[derive(Debug)]
617pub struct CoreSurfaceOutputDetail {
618    context: ContextWgpuCore,
619    surface_id: wgc::id::SurfaceId,
620}
621
622type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
623
624struct ErrorScope {
625    error: Option<crate::Error>,
626    filter: crate::ErrorFilter,
627}
628
629struct ErrorSinkRaw {
630    scopes: Vec<ErrorScope>,
631    uncaptured_handler: Option<Arc<dyn crate::UncapturedErrorHandler>>,
632}
633
634impl ErrorSinkRaw {
635    fn new() -> ErrorSinkRaw {
636        ErrorSinkRaw {
637            scopes: Vec::new(),
638            uncaptured_handler: None,
639        }
640    }
641
642    /// Deliver the error to
643    ///
644    /// * the innermost error scope, if any, or
645    /// * the uncaptured error handler, if there is one, or
646    /// * [`default_error_handler()`].
647    ///
648    /// If a closure is returned, the caller should call it immediately after dropping the
649    /// [`ErrorSink`] mutex guard. This makes sure that the user callback is not called with
650    /// a wgpu mutex held.
651    #[track_caller]
652    #[must_use]
653    fn handle_error_or_return_handler(&mut self, err: crate::Error) -> Option<impl FnOnce()> {
654        let filter = match err {
655            crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
656            crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
657            crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
658        };
659        match self
660            .scopes
661            .iter_mut()
662            .rev()
663            .find(|scope| scope.filter == filter)
664        {
665            Some(scope) => {
666                if scope.error.is_none() {
667                    scope.error = Some(err);
668                }
669                None
670            }
671            None => {
672                if let Some(custom_handler) = &self.uncaptured_handler {
673                    let custom_handler = Arc::clone(custom_handler);
674                    Some(move || (custom_handler)(err))
675                } else {
676                    // direct call preserves #[track_caller] where dyn can't
677                    default_error_handler(err)
678                }
679            }
680        }
681    }
682}
683
684impl fmt::Debug for ErrorSinkRaw {
685    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
686        write!(f, "ErrorSink")
687    }
688}
689
690#[track_caller]
691fn default_error_handler(err: crate::Error) -> ! {
692    log::error!("Handling wgpu errors as fatal by default");
693    panic!("wgpu error: {err}\n");
694}
695
696impl From<CreateShaderModuleError> for CompilationInfo {
697    fn from(value: CreateShaderModuleError) -> Self {
698        match value {
699            #[cfg(feature = "wgsl")]
700            CreateShaderModuleError::Parsing(v) => v.into(),
701            #[cfg(feature = "glsl")]
702            CreateShaderModuleError::ParsingGlsl(v) => v.into(),
703            #[cfg(feature = "spirv")]
704            CreateShaderModuleError::ParsingSpirV(v) => v.into(),
705            CreateShaderModuleError::Validation(v) => v.into(),
706            // Device errors are reported through the error sink, and are not compilation errors.
707            // Same goes for native shader module generation errors.
708            CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
709                CompilationInfo {
710                    messages: Vec::new(),
711                }
712            }
713            // Everything else is an error message without location information.
714            _ => CompilationInfo {
715                messages: vec![CompilationMessage {
716                    message: value.to_string(),
717                    message_type: CompilationMessageType::Error,
718                    location: None,
719                }],
720            },
721        }
722    }
723}
724
725#[derive(Debug)]
726pub struct CoreQueueWriteBuffer {
727    buffer_id: wgc::id::StagingBufferId,
728    mapping: CoreBufferMappedRange,
729}
730
731#[derive(Debug)]
732pub struct CoreBufferMappedRange {
733    ptr: NonNull<u8>,
734    size: usize,
735}
736
737#[cfg(send_sync)]
738unsafe impl Send for CoreBufferMappedRange {}
739#[cfg(send_sync)]
740unsafe impl Sync for CoreBufferMappedRange {}
741
742impl Drop for CoreBufferMappedRange {
743    fn drop(&mut self) {
744        // Intentionally left blank so that `BufferMappedRange` still
745        // implements `Drop`, to match the web backend
746    }
747}
748
749crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
750crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
751crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
752crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
753crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
754crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
755crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
756crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
757crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
758crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
759crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
760crate::cmp::impl_eq_ord_hash_proxy!(CoreExternalTexture => .id);
761crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
762crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
763crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
764crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
765crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
766crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
767crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
768crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
769crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
770crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
771crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
772crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
773crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
774crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
775crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
776crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
777crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
778
779impl dispatch::InstanceInterface for ContextWgpuCore {
780    fn new(desc: &wgt::InstanceDescriptor) -> Self
781    where
782        Self: Sized,
783    {
784        Self(Arc::new(wgc::global::Global::new("wgpu", desc)))
785    }
786
787    unsafe fn create_surface(
788        &self,
789        target: crate::api::SurfaceTargetUnsafe,
790    ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
791        let id = match target {
792            SurfaceTargetUnsafe::RawHandle {
793                raw_display_handle,
794                raw_window_handle,
795            } => unsafe {
796                self.0
797                    .instance_create_surface(raw_display_handle, raw_window_handle, None)
798            },
799
800            #[cfg(all(unix, not(target_vendor = "apple"), not(target_family = "wasm")))]
801            SurfaceTargetUnsafe::Drm {
802                fd,
803                plane,
804                connector_id,
805                width,
806                height,
807                refresh_rate,
808            } => unsafe {
809                self.0.instance_create_surface_from_drm(
810                    fd,
811                    plane,
812                    connector_id,
813                    width,
814                    height,
815                    refresh_rate,
816                    None,
817                )
818            },
819
820            #[cfg(metal)]
821            SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
822                self.0.instance_create_surface_metal(layer, None)
823            },
824
825            #[cfg(dx12)]
826            SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
827                self.0.instance_create_surface_from_visual(visual, None)
828            },
829
830            #[cfg(dx12)]
831            SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
832                self.0
833                    .instance_create_surface_from_surface_handle(surface_handle, None)
834            },
835
836            #[cfg(dx12)]
837            SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
838                self.0
839                    .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
840            },
841        }?;
842
843        Ok(CoreSurface {
844            context: self.clone(),
845            id,
846            configured_device: Mutex::default(),
847            error_sink: Mutex::default(),
848        }
849        .into())
850    }
851
852    fn request_adapter(
853        &self,
854        options: &crate::api::RequestAdapterOptions<'_, '_>,
855    ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
856        let id = self.0.request_adapter(
857            &wgc::instance::RequestAdapterOptions {
858                power_preference: options.power_preference,
859                force_fallback_adapter: options.force_fallback_adapter,
860                compatible_surface: options
861                    .compatible_surface
862                    .map(|surface| surface.inner.as_core().id),
863            },
864            wgt::Backends::all(),
865            None,
866        );
867        let adapter = id.map(|id| {
868            let core = CoreAdapter {
869                context: self.clone(),
870                id,
871            };
872            let generic: dispatch::DispatchAdapter = core.into();
873            generic
874        });
875        Box::pin(ready(adapter))
876    }
877
878    fn poll_all_devices(&self, force_wait: bool) -> bool {
879        match self.0.poll_all_devices(force_wait) {
880            Ok(all_queue_empty) => all_queue_empty,
881            Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
882        }
883    }
884
885    #[cfg(feature = "wgsl")]
886    fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
887        use wgc::naga::front::wgsl::ImplementedLanguageExtension;
888        ImplementedLanguageExtension::all().iter().copied().fold(
889            crate::WgslLanguageFeatures::empty(),
890            |acc, wle| {
891                acc | match wle {
892                    ImplementedLanguageExtension::ReadOnlyAndReadWriteStorageTextures => {
893                        crate::WgslLanguageFeatures::ReadOnlyAndReadWriteStorageTextures
894                    }
895                    ImplementedLanguageExtension::Packed4x8IntegerDotProduct => {
896                        crate::WgslLanguageFeatures::Packed4x8IntegerDotProduct
897                    }
898                    ImplementedLanguageExtension::PointerCompositeAccess => {
899                        crate::WgslLanguageFeatures::PointerCompositeAccess
900                    }
901                }
902            },
903        )
904    }
905
906    fn enumerate_adapters(
907        &self,
908        backends: crate::Backends,
909    ) -> Pin<Box<dyn dispatch::EnumerateAdapterFuture>> {
910        let adapters: Vec<DispatchAdapter> = self
911            .enumerate_adapters(backends)
912            .into_iter()
913            .map(|adapter| {
914                let core = crate::backend::wgpu_core::CoreAdapter {
915                    context: self.clone(),
916                    id: adapter,
917                };
918                core.into()
919            })
920            .collect();
921        Box::pin(ready(adapters))
922    }
923}
924
925impl dispatch::AdapterInterface for CoreAdapter {
926    fn request_device(
927        &self,
928        desc: &crate::DeviceDescriptor<'_>,
929    ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
930        if !matches!(desc.trace, wgt::Trace::Off) {
931            log::error!(
932                "
933                Feature 'trace' has been removed temporarily; \
934                see https://github.com/gfx-rs/wgpu/issues/5974. \
935                The `trace` parameter will have no effect."
936            );
937        }
938
939        let res = self.context.0.adapter_request_device(
940            self.id,
941            &desc.map_label(|l| l.map(Borrowed)),
942            None,
943            None,
944        );
945        let (device_id, queue_id) = match res {
946            Ok(ids) => ids,
947            Err(err) => {
948                return Box::pin(ready(Err(err.into())));
949            }
950        };
951        let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
952        let device = CoreDevice {
953            context: self.context.clone(),
954            id: device_id,
955            error_sink: error_sink.clone(),
956            features: desc.required_features,
957        };
958        let queue = CoreQueue {
959            context: self.context.clone(),
960            id: queue_id,
961            error_sink,
962        };
963        Box::pin(ready(Ok((device.into(), queue.into()))))
964    }
965
966    fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
967        let surface = surface.as_core();
968
969        self.context
970            .0
971            .adapter_is_surface_supported(self.id, surface.id)
972    }
973
974    fn features(&self) -> crate::Features {
975        self.context.0.adapter_features(self.id)
976    }
977
978    fn limits(&self) -> crate::Limits {
979        self.context.0.adapter_limits(self.id)
980    }
981
982    fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
983        self.context.0.adapter_downlevel_capabilities(self.id)
984    }
985
986    fn get_info(&self) -> crate::AdapterInfo {
987        self.context.0.adapter_get_info(self.id)
988    }
989
990    fn get_texture_format_features(
991        &self,
992        format: crate::TextureFormat,
993    ) -> crate::TextureFormatFeatures {
994        self.context
995            .0
996            .adapter_get_texture_format_features(self.id, format)
997    }
998
999    fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
1000        self.context.0.adapter_get_presentation_timestamp(self.id)
1001    }
1002}
1003
1004impl Drop for CoreAdapter {
1005    fn drop(&mut self) {
1006        self.context.0.adapter_drop(self.id)
1007    }
1008}
1009
1010impl dispatch::DeviceInterface for CoreDevice {
1011    fn features(&self) -> crate::Features {
1012        self.context.0.device_features(self.id)
1013    }
1014
1015    fn limits(&self) -> crate::Limits {
1016        self.context.0.device_limits(self.id)
1017    }
1018
1019    // If we have no way to create a shader module, we can't return one, and so most of the function is unreachable.
1020    #[cfg_attr(
1021        not(any(
1022            feature = "spirv",
1023            feature = "glsl",
1024            feature = "wgsl",
1025            feature = "naga-ir"
1026        )),
1027        expect(unused)
1028    )]
1029    fn create_shader_module(
1030        &self,
1031        desc: crate::ShaderModuleDescriptor<'_>,
1032        shader_bound_checks: wgt::ShaderRuntimeChecks,
1033    ) -> dispatch::DispatchShaderModule {
1034        let descriptor = wgc::pipeline::ShaderModuleDescriptor {
1035            label: desc.label.map(Borrowed),
1036            runtime_checks: shader_bound_checks,
1037        };
1038        let source = match desc.source {
1039            #[cfg(feature = "spirv")]
1040            ShaderSource::SpirV(ref spv) => {
1041                // Parse the given shader code and store its representation.
1042                let options = naga::front::spv::Options {
1043                    adjust_coordinate_space: false, // we require NDC_Y_UP feature
1044                    strict_capabilities: true,
1045                    block_ctx_dump_prefix: None,
1046                };
1047                wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
1048            }
1049            #[cfg(feature = "glsl")]
1050            ShaderSource::Glsl {
1051                ref shader,
1052                stage,
1053                defines,
1054            } => {
1055                let options = naga::front::glsl::Options {
1056                    stage,
1057                    defines: defines
1058                        .iter()
1059                        .map(|&(key, value)| (String::from(key), String::from(value)))
1060                        .collect(),
1061                };
1062                wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
1063            }
1064            #[cfg(feature = "wgsl")]
1065            ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
1066            #[cfg(feature = "naga-ir")]
1067            ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1068            ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1069        };
1070        let (id, error) =
1071            self.context
1072                .0
1073                .device_create_shader_module(self.id, &descriptor, source, None);
1074        let compilation_info = match error {
1075            Some(cause) => {
1076                self.context.handle_error(
1077                    &self.error_sink,
1078                    cause.clone(),
1079                    desc.label,
1080                    "Device::create_shader_module",
1081                );
1082                CompilationInfo::from(cause)
1083            }
1084            None => CompilationInfo { messages: vec![] },
1085        };
1086
1087        CoreShaderModule {
1088            context: self.context.clone(),
1089            id,
1090            compilation_info,
1091        }
1092        .into()
1093    }
1094
1095    unsafe fn create_shader_module_passthrough(
1096        &self,
1097        desc: &crate::ShaderModuleDescriptorPassthrough<'_>,
1098    ) -> dispatch::DispatchShaderModule {
1099        let desc = desc.map_label(|l| l.map(Cow::from));
1100        let (id, error) = unsafe {
1101            self.context
1102                .0
1103                .device_create_shader_module_passthrough(self.id, &desc, None)
1104        };
1105
1106        let compilation_info = match error {
1107            Some(cause) => {
1108                self.context.handle_error(
1109                    &self.error_sink,
1110                    cause.clone(),
1111                    desc.label.as_deref(),
1112                    "Device::create_shader_module_passthrough",
1113                );
1114                CompilationInfo::from(cause)
1115            }
1116            None => CompilationInfo { messages: vec![] },
1117        };
1118
1119        CoreShaderModule {
1120            context: self.context.clone(),
1121            id,
1122            compilation_info,
1123        }
1124        .into()
1125    }
1126
1127    fn create_bind_group_layout(
1128        &self,
1129        desc: &crate::BindGroupLayoutDescriptor<'_>,
1130    ) -> dispatch::DispatchBindGroupLayout {
1131        let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1132            label: desc.label.map(Borrowed),
1133            entries: Borrowed(desc.entries),
1134        };
1135        let (id, error) =
1136            self.context
1137                .0
1138                .device_create_bind_group_layout(self.id, &descriptor, None);
1139        if let Some(cause) = error {
1140            self.context.handle_error(
1141                &self.error_sink,
1142                cause,
1143                desc.label,
1144                "Device::create_bind_group_layout",
1145            );
1146        }
1147        CoreBindGroupLayout {
1148            context: self.context.clone(),
1149            id,
1150        }
1151        .into()
1152    }
1153
1154    fn create_bind_group(
1155        &self,
1156        desc: &crate::BindGroupDescriptor<'_>,
1157    ) -> dispatch::DispatchBindGroup {
1158        use wgc::binding_model as bm;
1159
1160        let mut arrayed_texture_views = Vec::new();
1161        let mut arrayed_samplers = Vec::new();
1162        if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1163            // gather all the array view IDs first
1164            for entry in desc.entries.iter() {
1165                if let BindingResource::TextureViewArray(array) = entry.resource {
1166                    arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1167                }
1168                if let BindingResource::SamplerArray(array) = entry.resource {
1169                    arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1170                }
1171            }
1172        }
1173        let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1174        let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1175
1176        let mut arrayed_buffer_bindings = Vec::new();
1177        if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1178            // gather all the buffers first
1179            for entry in desc.entries.iter() {
1180                if let BindingResource::BufferArray(array) = entry.resource {
1181                    arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1182                        buffer: binding.buffer.inner.as_core().id,
1183                        offset: binding.offset,
1184                        size: binding.size,
1185                    }));
1186                }
1187            }
1188        }
1189        let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1190
1191        let entries = desc
1192            .entries
1193            .iter()
1194            .map(|entry| bm::BindGroupEntry {
1195                binding: entry.binding,
1196                resource: match entry.resource {
1197                    BindingResource::Buffer(BufferBinding {
1198                        buffer,
1199                        offset,
1200                        size,
1201                    }) => bm::BindingResource::Buffer(bm::BufferBinding {
1202                        buffer: buffer.inner.as_core().id,
1203                        offset,
1204                        size,
1205                    }),
1206                    BindingResource::BufferArray(array) => {
1207                        let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1208                        remaining_arrayed_buffer_bindings =
1209                            &remaining_arrayed_buffer_bindings[array.len()..];
1210                        bm::BindingResource::BufferArray(Borrowed(slice))
1211                    }
1212                    BindingResource::Sampler(sampler) => {
1213                        bm::BindingResource::Sampler(sampler.inner.as_core().id)
1214                    }
1215                    BindingResource::SamplerArray(array) => {
1216                        let slice = &remaining_arrayed_samplers[..array.len()];
1217                        remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1218                        bm::BindingResource::SamplerArray(Borrowed(slice))
1219                    }
1220                    BindingResource::TextureView(texture_view) => {
1221                        bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1222                    }
1223                    BindingResource::TextureViewArray(array) => {
1224                        let slice = &remaining_arrayed_texture_views[..array.len()];
1225                        remaining_arrayed_texture_views =
1226                            &remaining_arrayed_texture_views[array.len()..];
1227                        bm::BindingResource::TextureViewArray(Borrowed(slice))
1228                    }
1229                    BindingResource::AccelerationStructure(acceleration_structure) => {
1230                        bm::BindingResource::AccelerationStructure(
1231                            acceleration_structure.inner.as_core().id,
1232                        )
1233                    }
1234                    BindingResource::ExternalTexture(external_texture) => {
1235                        bm::BindingResource::ExternalTexture(external_texture.inner.as_core().id)
1236                    }
1237                },
1238            })
1239            .collect::<Vec<_>>();
1240        let descriptor = bm::BindGroupDescriptor {
1241            label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1242            layout: desc.layout.inner.as_core().id,
1243            entries: Borrowed(&entries),
1244        };
1245
1246        let (id, error) = self
1247            .context
1248            .0
1249            .device_create_bind_group(self.id, &descriptor, None);
1250        if let Some(cause) = error {
1251            self.context.handle_error(
1252                &self.error_sink,
1253                cause,
1254                desc.label,
1255                "Device::create_bind_group",
1256            );
1257        }
1258        CoreBindGroup {
1259            context: self.context.clone(),
1260            id,
1261        }
1262        .into()
1263    }
1264
1265    fn create_pipeline_layout(
1266        &self,
1267        desc: &crate::PipelineLayoutDescriptor<'_>,
1268    ) -> dispatch::DispatchPipelineLayout {
1269        // Limit is always less or equal to hal::MAX_BIND_GROUPS, so this is always right
1270        // Guards following ArrayVec
1271        assert!(
1272            desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1273            "Bind group layout count {} exceeds device bind group limit {}",
1274            desc.bind_group_layouts.len(),
1275            wgc::MAX_BIND_GROUPS
1276        );
1277
1278        let temp_layouts = desc
1279            .bind_group_layouts
1280            .iter()
1281            .map(|bgl| bgl.inner.as_core().id)
1282            .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1283        let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1284            label: desc.label.map(Borrowed),
1285            bind_group_layouts: Borrowed(&temp_layouts),
1286            push_constant_ranges: Borrowed(desc.push_constant_ranges),
1287        };
1288
1289        let (id, error) = self
1290            .context
1291            .0
1292            .device_create_pipeline_layout(self.id, &descriptor, None);
1293        if let Some(cause) = error {
1294            self.context.handle_error(
1295                &self.error_sink,
1296                cause,
1297                desc.label,
1298                "Device::create_pipeline_layout",
1299            );
1300        }
1301        CorePipelineLayout {
1302            context: self.context.clone(),
1303            id,
1304        }
1305        .into()
1306    }
1307
1308    fn create_render_pipeline(
1309        &self,
1310        desc: &crate::RenderPipelineDescriptor<'_>,
1311    ) -> dispatch::DispatchRenderPipeline {
1312        use wgc::pipeline as pipe;
1313
1314        let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1315            .vertex
1316            .buffers
1317            .iter()
1318            .map(|vbuf| pipe::VertexBufferLayout {
1319                array_stride: vbuf.array_stride,
1320                step_mode: vbuf.step_mode,
1321                attributes: Borrowed(vbuf.attributes),
1322            })
1323            .collect();
1324
1325        let vert_constants = desc
1326            .vertex
1327            .compilation_options
1328            .constants
1329            .iter()
1330            .map(|&(key, value)| (String::from(key), value))
1331            .collect();
1332
1333        let descriptor = pipe::RenderPipelineDescriptor {
1334            label: desc.label.map(Borrowed),
1335            layout: desc.layout.map(|layout| layout.inner.as_core().id),
1336            vertex: pipe::VertexState {
1337                stage: pipe::ProgrammableStageDescriptor {
1338                    module: desc.vertex.module.inner.as_core().id,
1339                    entry_point: desc.vertex.entry_point.map(Borrowed),
1340                    constants: vert_constants,
1341                    zero_initialize_workgroup_memory: desc
1342                        .vertex
1343                        .compilation_options
1344                        .zero_initialize_workgroup_memory,
1345                },
1346                buffers: Borrowed(&vertex_buffers),
1347            },
1348            primitive: desc.primitive,
1349            depth_stencil: desc.depth_stencil.clone(),
1350            multisample: desc.multisample,
1351            fragment: desc.fragment.as_ref().map(|frag| {
1352                let frag_constants = frag
1353                    .compilation_options
1354                    .constants
1355                    .iter()
1356                    .map(|&(key, value)| (String::from(key), value))
1357                    .collect();
1358                pipe::FragmentState {
1359                    stage: pipe::ProgrammableStageDescriptor {
1360                        module: frag.module.inner.as_core().id,
1361                        entry_point: frag.entry_point.map(Borrowed),
1362                        constants: frag_constants,
1363                        zero_initialize_workgroup_memory: frag
1364                            .compilation_options
1365                            .zero_initialize_workgroup_memory,
1366                    },
1367                    targets: Borrowed(frag.targets),
1368                }
1369            }),
1370            multiview: desc.multiview,
1371            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1372        };
1373
1374        let (id, error) = self
1375            .context
1376            .0
1377            .device_create_render_pipeline(self.id, &descriptor, None);
1378        if let Some(cause) = error {
1379            if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1380                log::error!("Shader translation error for stage {stage:?}: {error}");
1381                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1382            }
1383            self.context.handle_error(
1384                &self.error_sink,
1385                cause,
1386                desc.label,
1387                "Device::create_render_pipeline",
1388            );
1389        }
1390        CoreRenderPipeline {
1391            context: self.context.clone(),
1392            id,
1393            error_sink: Arc::clone(&self.error_sink),
1394        }
1395        .into()
1396    }
1397
1398    fn create_mesh_pipeline(
1399        &self,
1400        desc: &crate::MeshPipelineDescriptor<'_>,
1401    ) -> dispatch::DispatchRenderPipeline {
1402        use wgc::pipeline as pipe;
1403
1404        let mesh_constants = desc
1405            .mesh
1406            .compilation_options
1407            .constants
1408            .iter()
1409            .map(|&(key, value)| (String::from(key), value))
1410            .collect();
1411        let descriptor = pipe::MeshPipelineDescriptor {
1412            label: desc.label.map(Borrowed),
1413            task: desc.task.as_ref().map(|task| {
1414                let task_constants = task
1415                    .compilation_options
1416                    .constants
1417                    .iter()
1418                    .map(|&(key, value)| (String::from(key), value))
1419                    .collect();
1420                pipe::TaskState {
1421                    stage: pipe::ProgrammableStageDescriptor {
1422                        module: task.module.inner.as_core().id,
1423                        entry_point: task.entry_point.map(Borrowed),
1424                        constants: task_constants,
1425                        zero_initialize_workgroup_memory: desc
1426                            .mesh
1427                            .compilation_options
1428                            .zero_initialize_workgroup_memory,
1429                    },
1430                }
1431            }),
1432            mesh: pipe::MeshState {
1433                stage: pipe::ProgrammableStageDescriptor {
1434                    module: desc.mesh.module.inner.as_core().id,
1435                    entry_point: desc.mesh.entry_point.map(Borrowed),
1436                    constants: mesh_constants,
1437                    zero_initialize_workgroup_memory: desc
1438                        .mesh
1439                        .compilation_options
1440                        .zero_initialize_workgroup_memory,
1441                },
1442            },
1443            layout: desc.layout.map(|layout| layout.inner.as_core().id),
1444            primitive: desc.primitive,
1445            depth_stencil: desc.depth_stencil.clone(),
1446            multisample: desc.multisample,
1447            fragment: desc.fragment.as_ref().map(|frag| {
1448                let frag_constants = frag
1449                    .compilation_options
1450                    .constants
1451                    .iter()
1452                    .map(|&(key, value)| (String::from(key), value))
1453                    .collect();
1454                pipe::FragmentState {
1455                    stage: pipe::ProgrammableStageDescriptor {
1456                        module: frag.module.inner.as_core().id,
1457                        entry_point: frag.entry_point.map(Borrowed),
1458                        constants: frag_constants,
1459                        zero_initialize_workgroup_memory: frag
1460                            .compilation_options
1461                            .zero_initialize_workgroup_memory,
1462                    },
1463                    targets: Borrowed(frag.targets),
1464                }
1465            }),
1466            multiview: desc.multiview,
1467            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1468        };
1469
1470        let (id, error) = self
1471            .context
1472            .0
1473            .device_create_mesh_pipeline(self.id, &descriptor, None);
1474        if let Some(cause) = error {
1475            if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1476                log::error!("Shader translation error for stage {stage:?}: {error}");
1477                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1478            }
1479            self.context.handle_error(
1480                &self.error_sink,
1481                cause,
1482                desc.label,
1483                "Device::create_render_pipeline",
1484            );
1485        }
1486        CoreRenderPipeline {
1487            context: self.context.clone(),
1488            id,
1489            error_sink: Arc::clone(&self.error_sink),
1490        }
1491        .into()
1492    }
1493
1494    fn create_compute_pipeline(
1495        &self,
1496        desc: &crate::ComputePipelineDescriptor<'_>,
1497    ) -> dispatch::DispatchComputePipeline {
1498        use wgc::pipeline as pipe;
1499
1500        let constants = desc
1501            .compilation_options
1502            .constants
1503            .iter()
1504            .map(|&(key, value)| (String::from(key), value))
1505            .collect();
1506
1507        let descriptor = pipe::ComputePipelineDescriptor {
1508            label: desc.label.map(Borrowed),
1509            layout: desc.layout.map(|pll| pll.inner.as_core().id),
1510            stage: pipe::ProgrammableStageDescriptor {
1511                module: desc.module.inner.as_core().id,
1512                entry_point: desc.entry_point.map(Borrowed),
1513                constants,
1514                zero_initialize_workgroup_memory: desc
1515                    .compilation_options
1516                    .zero_initialize_workgroup_memory,
1517            },
1518            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1519        };
1520
1521        let (id, error) = self
1522            .context
1523            .0
1524            .device_create_compute_pipeline(self.id, &descriptor, None);
1525        if let Some(cause) = error {
1526            if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1527                log::error!(
1528                    "Shader translation error for stage {:?}: {}",
1529                    wgt::ShaderStages::COMPUTE,
1530                    error
1531                );
1532                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1533            }
1534            self.context.handle_error(
1535                &self.error_sink,
1536                cause,
1537                desc.label,
1538                "Device::create_compute_pipeline",
1539            );
1540        }
1541        CoreComputePipeline {
1542            context: self.context.clone(),
1543            id,
1544            error_sink: Arc::clone(&self.error_sink),
1545        }
1546        .into()
1547    }
1548
1549    unsafe fn create_pipeline_cache(
1550        &self,
1551        desc: &crate::PipelineCacheDescriptor<'_>,
1552    ) -> dispatch::DispatchPipelineCache {
1553        use wgc::pipeline as pipe;
1554
1555        let descriptor = pipe::PipelineCacheDescriptor {
1556            label: desc.label.map(Borrowed),
1557            data: desc.data.map(Borrowed),
1558            fallback: desc.fallback,
1559        };
1560        let (id, error) = unsafe {
1561            self.context
1562                .0
1563                .device_create_pipeline_cache(self.id, &descriptor, None)
1564        };
1565        if let Some(cause) = error {
1566            self.context.handle_error(
1567                &self.error_sink,
1568                cause,
1569                desc.label,
1570                "Device::device_create_pipeline_cache_init",
1571            );
1572        }
1573        CorePipelineCache {
1574            context: self.context.clone(),
1575            id,
1576        }
1577        .into()
1578    }
1579
1580    fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1581        let (id, error) = self.context.0.device_create_buffer(
1582            self.id,
1583            &desc.map_label(|l| l.map(Borrowed)),
1584            None,
1585        );
1586        if let Some(cause) = error {
1587            self.context
1588                .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1589        }
1590
1591        CoreBuffer {
1592            context: self.context.clone(),
1593            id,
1594            error_sink: Arc::clone(&self.error_sink),
1595        }
1596        .into()
1597    }
1598
1599    fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1600        let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1601        let (id, error) = self
1602            .context
1603            .0
1604            .device_create_texture(self.id, &wgt_desc, None);
1605        if let Some(cause) = error {
1606            self.context.handle_error(
1607                &self.error_sink,
1608                cause,
1609                desc.label,
1610                "Device::create_texture",
1611            );
1612        }
1613
1614        CoreTexture {
1615            context: self.context.clone(),
1616            id,
1617            error_sink: Arc::clone(&self.error_sink),
1618        }
1619        .into()
1620    }
1621
1622    fn create_external_texture(
1623        &self,
1624        desc: &crate::ExternalTextureDescriptor<'_>,
1625        planes: &[&crate::TextureView],
1626    ) -> dispatch::DispatchExternalTexture {
1627        let wgt_desc = desc.map_label(|l| l.map(Borrowed));
1628        let planes = planes
1629            .iter()
1630            .map(|plane| plane.inner.as_core().id)
1631            .collect::<Vec<_>>();
1632        let (id, error) = self
1633            .context
1634            .0
1635            .device_create_external_texture(self.id, &wgt_desc, &planes, None);
1636        if let Some(cause) = error {
1637            self.context.handle_error(
1638                &self.error_sink,
1639                cause,
1640                desc.label,
1641                "Device::create_external_texture",
1642            );
1643        }
1644
1645        CoreExternalTexture {
1646            context: self.context.clone(),
1647            id,
1648        }
1649        .into()
1650    }
1651
1652    fn create_blas(
1653        &self,
1654        desc: &crate::CreateBlasDescriptor<'_>,
1655        sizes: crate::BlasGeometrySizeDescriptors,
1656    ) -> (Option<u64>, dispatch::DispatchBlas) {
1657        let global = &self.context.0;
1658        let (id, handle, error) =
1659            global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1660        if let Some(cause) = error {
1661            self.context
1662                .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1663        }
1664        (
1665            handle,
1666            CoreBlas {
1667                context: self.context.clone(),
1668                id,
1669                error_sink: Arc::clone(&self.error_sink),
1670            }
1671            .into(),
1672        )
1673    }
1674
1675    fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1676        let global = &self.context.0;
1677        let (id, error) =
1678            global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1679        if let Some(cause) = error {
1680            self.context
1681                .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1682        }
1683        CoreTlas {
1684            context: self.context.clone(),
1685            id,
1686            // error_sink: Arc::clone(&self.error_sink),
1687        }
1688        .into()
1689    }
1690
1691    fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1692        let descriptor = wgc::resource::SamplerDescriptor {
1693            label: desc.label.map(Borrowed),
1694            address_modes: [
1695                desc.address_mode_u,
1696                desc.address_mode_v,
1697                desc.address_mode_w,
1698            ],
1699            mag_filter: desc.mag_filter,
1700            min_filter: desc.min_filter,
1701            mipmap_filter: desc.mipmap_filter,
1702            lod_min_clamp: desc.lod_min_clamp,
1703            lod_max_clamp: desc.lod_max_clamp,
1704            compare: desc.compare,
1705            anisotropy_clamp: desc.anisotropy_clamp,
1706            border_color: desc.border_color,
1707        };
1708
1709        let (id, error) = self
1710            .context
1711            .0
1712            .device_create_sampler(self.id, &descriptor, None);
1713        if let Some(cause) = error {
1714            self.context.handle_error(
1715                &self.error_sink,
1716                cause,
1717                desc.label,
1718                "Device::create_sampler",
1719            );
1720        }
1721        CoreSampler {
1722            context: self.context.clone(),
1723            id,
1724        }
1725        .into()
1726    }
1727
1728    fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1729        let (id, error) = self.context.0.device_create_query_set(
1730            self.id,
1731            &desc.map_label(|l| l.map(Borrowed)),
1732            None,
1733        );
1734        if let Some(cause) = error {
1735            self.context
1736                .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1737        }
1738        CoreQuerySet {
1739            context: self.context.clone(),
1740            id,
1741        }
1742        .into()
1743    }
1744
1745    fn create_command_encoder(
1746        &self,
1747        desc: &crate::CommandEncoderDescriptor<'_>,
1748    ) -> dispatch::DispatchCommandEncoder {
1749        let (id, error) = self.context.0.device_create_command_encoder(
1750            self.id,
1751            &desc.map_label(|l| l.map(Borrowed)),
1752            None,
1753        );
1754        if let Some(cause) = error {
1755            self.context.handle_error(
1756                &self.error_sink,
1757                cause,
1758                desc.label,
1759                "Device::create_command_encoder",
1760            );
1761        }
1762
1763        CoreCommandEncoder {
1764            context: self.context.clone(),
1765            id,
1766            error_sink: Arc::clone(&self.error_sink),
1767        }
1768        .into()
1769    }
1770
1771    fn create_render_bundle_encoder(
1772        &self,
1773        desc: &crate::RenderBundleEncoderDescriptor<'_>,
1774    ) -> dispatch::DispatchRenderBundleEncoder {
1775        let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1776            label: desc.label.map(Borrowed),
1777            color_formats: Borrowed(desc.color_formats),
1778            depth_stencil: desc.depth_stencil,
1779            sample_count: desc.sample_count,
1780            multiview: desc.multiview,
1781        };
1782        let encoder = match wgc::command::RenderBundleEncoder::new(&descriptor, self.id, None) {
1783            Ok(encoder) => encoder,
1784            Err(e) => panic!("Error in Device::create_render_bundle_encoder: {e}"),
1785        };
1786
1787        CoreRenderBundleEncoder {
1788            context: self.context.clone(),
1789            encoder,
1790            id: crate::cmp::Identifier::create(),
1791        }
1792        .into()
1793    }
1794
1795    fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1796        self.context
1797            .0
1798            .device_set_device_lost_closure(self.id, device_lost_callback);
1799    }
1800
1801    fn on_uncaptured_error(&self, handler: Arc<dyn crate::UncapturedErrorHandler>) {
1802        let mut error_sink = self.error_sink.lock();
1803        error_sink.uncaptured_handler = Some(handler);
1804    }
1805
1806    fn push_error_scope(&self, filter: crate::ErrorFilter) {
1807        let mut error_sink = self.error_sink.lock();
1808        error_sink.scopes.push(ErrorScope {
1809            error: None,
1810            filter,
1811        });
1812    }
1813
1814    fn pop_error_scope(&self) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1815        let mut error_sink = self.error_sink.lock();
1816        let scope = error_sink.scopes.pop().unwrap();
1817        Box::pin(ready(scope.error))
1818    }
1819
1820    unsafe fn start_graphics_debugger_capture(&self) {
1821        unsafe {
1822            self.context
1823                .0
1824                .device_start_graphics_debugger_capture(self.id)
1825        };
1826    }
1827
1828    unsafe fn stop_graphics_debugger_capture(&self) {
1829        unsafe {
1830            self.context
1831                .0
1832                .device_stop_graphics_debugger_capture(self.id)
1833        };
1834    }
1835
1836    fn poll(&self, poll_type: wgt::PollType<u64>) -> Result<crate::PollStatus, crate::PollError> {
1837        match self.context.0.device_poll(self.id, poll_type) {
1838            Ok(status) => Ok(status),
1839            Err(err) => {
1840                if let Some(poll_error) = err.to_poll_error() {
1841                    return Err(poll_error);
1842                }
1843
1844                self.context.handle_error_fatal(err, "Device::poll")
1845            }
1846        }
1847    }
1848
1849    fn get_internal_counters(&self) -> crate::InternalCounters {
1850        self.context.0.device_get_internal_counters(self.id)
1851    }
1852
1853    fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1854        self.context.0.device_generate_allocator_report(self.id)
1855    }
1856
1857    fn destroy(&self) {
1858        self.context.0.device_destroy(self.id);
1859    }
1860}
1861
1862impl Drop for CoreDevice {
1863    fn drop(&mut self) {
1864        self.context.0.device_drop(self.id)
1865    }
1866}
1867
1868impl dispatch::QueueInterface for CoreQueue {
1869    fn write_buffer(
1870        &self,
1871        buffer: &dispatch::DispatchBuffer,
1872        offset: crate::BufferAddress,
1873        data: &[u8],
1874    ) {
1875        let buffer = buffer.as_core();
1876
1877        match self
1878            .context
1879            .0
1880            .queue_write_buffer(self.id, buffer.id, offset, data)
1881        {
1882            Ok(()) => (),
1883            Err(err) => {
1884                self.context
1885                    .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1886            }
1887        }
1888    }
1889
1890    fn create_staging_buffer(
1891        &self,
1892        size: crate::BufferSize,
1893    ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1894        match self
1895            .context
1896            .0
1897            .queue_create_staging_buffer(self.id, size, None)
1898        {
1899            Ok((buffer_id, ptr)) => Some(
1900                CoreQueueWriteBuffer {
1901                    buffer_id,
1902                    mapping: CoreBufferMappedRange {
1903                        ptr,
1904                        size: size.get() as usize,
1905                    },
1906                }
1907                .into(),
1908            ),
1909            Err(err) => {
1910                self.context.handle_error_nolabel(
1911                    &self.error_sink,
1912                    err,
1913                    "Queue::write_buffer_with",
1914                );
1915                None
1916            }
1917        }
1918    }
1919
1920    fn validate_write_buffer(
1921        &self,
1922        buffer: &dispatch::DispatchBuffer,
1923        offset: wgt::BufferAddress,
1924        size: wgt::BufferSize,
1925    ) -> Option<()> {
1926        let buffer = buffer.as_core();
1927
1928        match self
1929            .context
1930            .0
1931            .queue_validate_write_buffer(self.id, buffer.id, offset, size)
1932        {
1933            Ok(()) => Some(()),
1934            Err(err) => {
1935                self.context.handle_error_nolabel(
1936                    &self.error_sink,
1937                    err,
1938                    "Queue::write_buffer_with",
1939                );
1940                None
1941            }
1942        }
1943    }
1944
1945    fn write_staging_buffer(
1946        &self,
1947        buffer: &dispatch::DispatchBuffer,
1948        offset: crate::BufferAddress,
1949        staging_buffer: &dispatch::DispatchQueueWriteBuffer,
1950    ) {
1951        let buffer = buffer.as_core();
1952        let staging_buffer = staging_buffer.as_core();
1953
1954        match self.context.0.queue_write_staging_buffer(
1955            self.id,
1956            buffer.id,
1957            offset,
1958            staging_buffer.buffer_id,
1959        ) {
1960            Ok(()) => (),
1961            Err(err) => {
1962                self.context.handle_error_nolabel(
1963                    &self.error_sink,
1964                    err,
1965                    "Queue::write_buffer_with",
1966                );
1967            }
1968        }
1969    }
1970
1971    fn write_texture(
1972        &self,
1973        texture: crate::TexelCopyTextureInfo<'_>,
1974        data: &[u8],
1975        data_layout: crate::TexelCopyBufferLayout,
1976        size: crate::Extent3d,
1977    ) {
1978        match self.context.0.queue_write_texture(
1979            self.id,
1980            &map_texture_copy_view(texture),
1981            data,
1982            &data_layout,
1983            &size,
1984        ) {
1985            Ok(()) => (),
1986            Err(err) => {
1987                self.context
1988                    .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
1989            }
1990        }
1991    }
1992
1993    // This method needs to exist if either webgpu or webgl is enabled,
1994    // but we only actually have an implementation if webgl is enabled.
1995    #[cfg(web)]
1996    #[cfg_attr(not(webgl), expect(unused_variables))]
1997    fn copy_external_image_to_texture(
1998        &self,
1999        source: &crate::CopyExternalImageSourceInfo,
2000        dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
2001        size: crate::Extent3d,
2002    ) {
2003        #[cfg(webgl)]
2004        match self.context.0.queue_copy_external_image_to_texture(
2005            self.id,
2006            source,
2007            map_texture_tagged_copy_view(dest),
2008            size,
2009        ) {
2010            Ok(()) => (),
2011            Err(err) => self.context.handle_error_nolabel(
2012                &self.error_sink,
2013                err,
2014                "Queue::copy_external_image_to_texture",
2015            ),
2016        }
2017    }
2018
2019    fn submit(
2020        &self,
2021        command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
2022    ) -> u64 {
2023        let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
2024        let command_buffer_ids = temp_command_buffers
2025            .iter()
2026            .map(|cmdbuf| cmdbuf.as_core().id)
2027            .collect::<SmallVec<[_; 4]>>();
2028
2029        let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
2030            Ok(index) => index,
2031            Err((index, err)) => {
2032                self.context
2033                    .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
2034                index
2035            }
2036        };
2037
2038        drop(temp_command_buffers);
2039
2040        index
2041    }
2042
2043    fn get_timestamp_period(&self) -> f32 {
2044        self.context.0.queue_get_timestamp_period(self.id)
2045    }
2046
2047    fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
2048        self.context
2049            .0
2050            .queue_on_submitted_work_done(self.id, callback);
2051    }
2052
2053    fn compact_blas(&self, blas: &dispatch::DispatchBlas) -> (Option<u64>, dispatch::DispatchBlas) {
2054        let (id, handle, error) =
2055            self.context
2056                .0
2057                .queue_compact_blas(self.id, blas.as_core().id, None);
2058
2059        if let Some(cause) = error {
2060            self.context
2061                .handle_error_nolabel(&self.error_sink, cause, "Queue::compact_blas");
2062        }
2063        (
2064            handle,
2065            CoreBlas {
2066                context: self.context.clone(),
2067                id,
2068                error_sink: Arc::clone(&self.error_sink),
2069            }
2070            .into(),
2071        )
2072    }
2073}
2074
2075impl Drop for CoreQueue {
2076    fn drop(&mut self) {
2077        self.context.0.queue_drop(self.id)
2078    }
2079}
2080
2081impl dispatch::ShaderModuleInterface for CoreShaderModule {
2082    fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
2083        Box::pin(ready(self.compilation_info.clone()))
2084    }
2085}
2086
2087impl Drop for CoreShaderModule {
2088    fn drop(&mut self) {
2089        self.context.0.shader_module_drop(self.id)
2090    }
2091}
2092
2093impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
2094
2095impl Drop for CoreBindGroupLayout {
2096    fn drop(&mut self) {
2097        self.context.0.bind_group_layout_drop(self.id)
2098    }
2099}
2100
2101impl dispatch::BindGroupInterface for CoreBindGroup {}
2102
2103impl Drop for CoreBindGroup {
2104    fn drop(&mut self) {
2105        self.context.0.bind_group_drop(self.id)
2106    }
2107}
2108
2109impl dispatch::TextureViewInterface for CoreTextureView {}
2110
2111impl Drop for CoreTextureView {
2112    fn drop(&mut self) {
2113        // TODO: We don't use this error at all?
2114        let _ = self.context.0.texture_view_drop(self.id);
2115    }
2116}
2117
2118impl dispatch::ExternalTextureInterface for CoreExternalTexture {
2119    fn destroy(&self) {
2120        self.context.0.external_texture_destroy(self.id);
2121    }
2122}
2123
2124impl Drop for CoreExternalTexture {
2125    fn drop(&mut self) {
2126        self.context.0.external_texture_drop(self.id);
2127    }
2128}
2129
2130impl dispatch::SamplerInterface for CoreSampler {}
2131
2132impl Drop for CoreSampler {
2133    fn drop(&mut self) {
2134        self.context.0.sampler_drop(self.id)
2135    }
2136}
2137
2138impl dispatch::BufferInterface for CoreBuffer {
2139    fn map_async(
2140        &self,
2141        mode: crate::MapMode,
2142        range: Range<crate::BufferAddress>,
2143        callback: dispatch::BufferMapCallback,
2144    ) {
2145        let operation = wgc::resource::BufferMapOperation {
2146            host: match mode {
2147                MapMode::Read => wgc::device::HostMap::Read,
2148                MapMode::Write => wgc::device::HostMap::Write,
2149            },
2150            callback: Some(Box::new(|status| {
2151                let res = status.map_err(|_| crate::BufferAsyncError);
2152                callback(res);
2153            })),
2154        };
2155
2156        match self.context.0.buffer_map_async(
2157            self.id,
2158            range.start,
2159            Some(range.end - range.start),
2160            operation,
2161        ) {
2162            Ok(_) => (),
2163            Err(cause) => {
2164                self.context
2165                    .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
2166            }
2167        }
2168    }
2169
2170    fn get_mapped_range(
2171        &self,
2172        sub_range: Range<crate::BufferAddress>,
2173    ) -> dispatch::DispatchBufferMappedRange {
2174        let size = sub_range.end - sub_range.start;
2175        match self
2176            .context
2177            .0
2178            .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
2179        {
2180            Ok((ptr, size)) => CoreBufferMappedRange {
2181                ptr,
2182                size: size as usize,
2183            }
2184            .into(),
2185            Err(err) => self
2186                .context
2187                .handle_error_fatal(err, "Buffer::get_mapped_range"),
2188        }
2189    }
2190
2191    fn unmap(&self) {
2192        match self.context.0.buffer_unmap(self.id) {
2193            Ok(()) => (),
2194            Err(cause) => {
2195                self.context
2196                    .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
2197            }
2198        }
2199    }
2200
2201    fn destroy(&self) {
2202        self.context.0.buffer_destroy(self.id);
2203    }
2204}
2205
2206impl Drop for CoreBuffer {
2207    fn drop(&mut self) {
2208        self.context.0.buffer_drop(self.id)
2209    }
2210}
2211
2212impl dispatch::TextureInterface for CoreTexture {
2213    fn create_view(
2214        &self,
2215        desc: &crate::TextureViewDescriptor<'_>,
2216    ) -> dispatch::DispatchTextureView {
2217        let descriptor = wgc::resource::TextureViewDescriptor {
2218            label: desc.label.map(Borrowed),
2219            format: desc.format,
2220            dimension: desc.dimension,
2221            usage: desc.usage,
2222            range: wgt::ImageSubresourceRange {
2223                aspect: desc.aspect,
2224                base_mip_level: desc.base_mip_level,
2225                mip_level_count: desc.mip_level_count,
2226                base_array_layer: desc.base_array_layer,
2227                array_layer_count: desc.array_layer_count,
2228            },
2229        };
2230        let (id, error) = self
2231            .context
2232            .0
2233            .texture_create_view(self.id, &descriptor, None);
2234        if let Some(cause) = error {
2235            self.context
2236                .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
2237        }
2238        CoreTextureView {
2239            context: self.context.clone(),
2240            id,
2241        }
2242        .into()
2243    }
2244
2245    fn destroy(&self) {
2246        self.context.0.texture_destroy(self.id);
2247    }
2248}
2249
2250impl Drop for CoreTexture {
2251    fn drop(&mut self) {
2252        self.context.0.texture_drop(self.id)
2253    }
2254}
2255
2256impl dispatch::BlasInterface for CoreBlas {
2257    fn prepare_compact_async(&self, callback: BlasCompactCallback) {
2258        let callback: Option<wgc::resource::BlasCompactCallback> =
2259            Some(Box::new(|status: BlasPrepareCompactResult| {
2260                let res = status.map_err(|_| crate::BlasAsyncError);
2261                callback(res);
2262            }));
2263
2264        match self.context.0.blas_prepare_compact_async(self.id, callback) {
2265            Ok(_) => (),
2266            Err(cause) => self.context.handle_error_nolabel(
2267                &self.error_sink,
2268                cause,
2269                "Blas::prepare_compact_async",
2270            ),
2271        }
2272    }
2273
2274    fn ready_for_compaction(&self) -> bool {
2275        match self.context.0.ready_for_compaction(self.id) {
2276            Ok(ready) => ready,
2277            Err(cause) => {
2278                self.context.handle_error_nolabel(
2279                    &self.error_sink,
2280                    cause,
2281                    "Blas::ready_for_compaction",
2282                );
2283                // A BLAS is definitely not ready for compaction if it's not valid
2284                false
2285            }
2286        }
2287    }
2288}
2289
2290impl Drop for CoreBlas {
2291    fn drop(&mut self) {
2292        self.context.0.blas_drop(self.id)
2293    }
2294}
2295
2296impl dispatch::TlasInterface for CoreTlas {}
2297
2298impl Drop for CoreTlas {
2299    fn drop(&mut self) {
2300        self.context.0.tlas_drop(self.id)
2301    }
2302}
2303
2304impl dispatch::QuerySetInterface for CoreQuerySet {}
2305
2306impl Drop for CoreQuerySet {
2307    fn drop(&mut self) {
2308        self.context.0.query_set_drop(self.id)
2309    }
2310}
2311
2312impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2313
2314impl Drop for CorePipelineLayout {
2315    fn drop(&mut self) {
2316        self.context.0.pipeline_layout_drop(self.id)
2317    }
2318}
2319
2320impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2321    fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2322        let (id, error) = self
2323            .context
2324            .0
2325            .render_pipeline_get_bind_group_layout(self.id, index, None);
2326        if let Some(err) = error {
2327            self.context.handle_error_nolabel(
2328                &self.error_sink,
2329                err,
2330                "RenderPipeline::get_bind_group_layout",
2331            )
2332        }
2333        CoreBindGroupLayout {
2334            context: self.context.clone(),
2335            id,
2336        }
2337        .into()
2338    }
2339}
2340
2341impl Drop for CoreRenderPipeline {
2342    fn drop(&mut self) {
2343        self.context.0.render_pipeline_drop(self.id)
2344    }
2345}
2346
2347impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2348    fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2349        let (id, error) = self
2350            .context
2351            .0
2352            .compute_pipeline_get_bind_group_layout(self.id, index, None);
2353        if let Some(err) = error {
2354            self.context.handle_error_nolabel(
2355                &self.error_sink,
2356                err,
2357                "ComputePipeline::get_bind_group_layout",
2358            )
2359        }
2360        CoreBindGroupLayout {
2361            context: self.context.clone(),
2362            id,
2363        }
2364        .into()
2365    }
2366}
2367
2368impl Drop for CoreComputePipeline {
2369    fn drop(&mut self) {
2370        self.context.0.compute_pipeline_drop(self.id)
2371    }
2372}
2373
2374impl dispatch::PipelineCacheInterface for CorePipelineCache {
2375    fn get_data(&self) -> Option<Vec<u8>> {
2376        self.context.0.pipeline_cache_get_data(self.id)
2377    }
2378}
2379
2380impl Drop for CorePipelineCache {
2381    fn drop(&mut self) {
2382        self.context.0.pipeline_cache_drop(self.id)
2383    }
2384}
2385
2386impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2387    fn copy_buffer_to_buffer(
2388        &self,
2389        source: &dispatch::DispatchBuffer,
2390        source_offset: crate::BufferAddress,
2391        destination: &dispatch::DispatchBuffer,
2392        destination_offset: crate::BufferAddress,
2393        copy_size: Option<crate::BufferAddress>,
2394    ) {
2395        let source = source.as_core();
2396        let destination = destination.as_core();
2397
2398        if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2399            self.id,
2400            source.id,
2401            source_offset,
2402            destination.id,
2403            destination_offset,
2404            copy_size,
2405        ) {
2406            self.context.handle_error_nolabel(
2407                &self.error_sink,
2408                cause,
2409                "CommandEncoder::copy_buffer_to_buffer",
2410            );
2411        }
2412    }
2413
2414    fn copy_buffer_to_texture(
2415        &self,
2416        source: crate::TexelCopyBufferInfo<'_>,
2417        destination: crate::TexelCopyTextureInfo<'_>,
2418        copy_size: crate::Extent3d,
2419    ) {
2420        if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2421            self.id,
2422            &map_buffer_copy_view(source),
2423            &map_texture_copy_view(destination),
2424            &copy_size,
2425        ) {
2426            self.context.handle_error_nolabel(
2427                &self.error_sink,
2428                cause,
2429                "CommandEncoder::copy_buffer_to_texture",
2430            );
2431        }
2432    }
2433
2434    fn copy_texture_to_buffer(
2435        &self,
2436        source: crate::TexelCopyTextureInfo<'_>,
2437        destination: crate::TexelCopyBufferInfo<'_>,
2438        copy_size: crate::Extent3d,
2439    ) {
2440        if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2441            self.id,
2442            &map_texture_copy_view(source),
2443            &map_buffer_copy_view(destination),
2444            &copy_size,
2445        ) {
2446            self.context.handle_error_nolabel(
2447                &self.error_sink,
2448                cause,
2449                "CommandEncoder::copy_texture_to_buffer",
2450            );
2451        }
2452    }
2453
2454    fn copy_texture_to_texture(
2455        &self,
2456        source: crate::TexelCopyTextureInfo<'_>,
2457        destination: crate::TexelCopyTextureInfo<'_>,
2458        copy_size: crate::Extent3d,
2459    ) {
2460        if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2461            self.id,
2462            &map_texture_copy_view(source),
2463            &map_texture_copy_view(destination),
2464            &copy_size,
2465        ) {
2466            self.context.handle_error_nolabel(
2467                &self.error_sink,
2468                cause,
2469                "CommandEncoder::copy_texture_to_texture",
2470            );
2471        }
2472    }
2473
2474    fn begin_compute_pass(
2475        &self,
2476        desc: &crate::ComputePassDescriptor<'_>,
2477    ) -> dispatch::DispatchComputePass {
2478        let timestamp_writes =
2479            desc.timestamp_writes
2480                .as_ref()
2481                .map(|tw| wgc::command::PassTimestampWrites {
2482                    query_set: tw.query_set.inner.as_core().id,
2483                    beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2484                    end_of_pass_write_index: tw.end_of_pass_write_index,
2485                });
2486
2487        let (pass, err) = self.context.0.command_encoder_begin_compute_pass(
2488            self.id,
2489            &wgc::command::ComputePassDescriptor {
2490                label: desc.label.map(Borrowed),
2491                timestamp_writes,
2492            },
2493        );
2494
2495        if let Some(cause) = err {
2496            self.context.handle_error(
2497                &self.error_sink,
2498                cause,
2499                desc.label,
2500                "CommandEncoder::begin_compute_pass",
2501            );
2502        }
2503
2504        CoreComputePass {
2505            context: self.context.clone(),
2506            pass,
2507            error_sink: self.error_sink.clone(),
2508            id: crate::cmp::Identifier::create(),
2509        }
2510        .into()
2511    }
2512
2513    fn begin_render_pass(
2514        &self,
2515        desc: &crate::RenderPassDescriptor<'_>,
2516    ) -> dispatch::DispatchRenderPass {
2517        let colors = desc
2518            .color_attachments
2519            .iter()
2520            .map(|ca| {
2521                ca.as_ref()
2522                    .map(|at| wgc::command::RenderPassColorAttachment {
2523                        view: at.view.inner.as_core().id,
2524                        depth_slice: at.depth_slice,
2525                        resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2526                        load_op: at.ops.load,
2527                        store_op: at.ops.store,
2528                    })
2529            })
2530            .collect::<Vec<_>>();
2531
2532        let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2533            wgc::command::RenderPassDepthStencilAttachment {
2534                view: dsa.view.inner.as_core().id,
2535                depth: map_pass_channel(dsa.depth_ops.as_ref()),
2536                stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2537            }
2538        });
2539
2540        let timestamp_writes =
2541            desc.timestamp_writes
2542                .as_ref()
2543                .map(|tw| wgc::command::PassTimestampWrites {
2544                    query_set: tw.query_set.inner.as_core().id,
2545                    beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2546                    end_of_pass_write_index: tw.end_of_pass_write_index,
2547                });
2548
2549        let (pass, err) = self.context.0.command_encoder_begin_render_pass(
2550            self.id,
2551            &wgc::command::RenderPassDescriptor {
2552                label: desc.label.map(Borrowed),
2553                timestamp_writes: timestamp_writes.as_ref(),
2554                color_attachments: Borrowed(&colors),
2555                depth_stencil_attachment: depth_stencil.as_ref(),
2556                occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2557            },
2558        );
2559
2560        if let Some(cause) = err {
2561            self.context.handle_error(
2562                &self.error_sink,
2563                cause,
2564                desc.label,
2565                "CommandEncoder::begin_render_pass",
2566            );
2567        }
2568
2569        CoreRenderPass {
2570            context: self.context.clone(),
2571            pass,
2572            error_sink: self.error_sink.clone(),
2573            id: crate::cmp::Identifier::create(),
2574        }
2575        .into()
2576    }
2577
2578    fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2579        let descriptor = wgt::CommandBufferDescriptor::default();
2580        let (id, error) = self
2581            .context
2582            .0
2583            .command_encoder_finish(self.id, &descriptor, None);
2584        if let Some(cause) = error {
2585            self.context
2586                .handle_error_nolabel(&self.error_sink, cause, "a CommandEncoder");
2587        }
2588        CoreCommandBuffer {
2589            context: self.context.clone(),
2590            id,
2591        }
2592        .into()
2593    }
2594
2595    fn clear_texture(
2596        &self,
2597        texture: &dispatch::DispatchTexture,
2598        subresource_range: &crate::ImageSubresourceRange,
2599    ) {
2600        let texture = texture.as_core();
2601
2602        if let Err(cause) =
2603            self.context
2604                .0
2605                .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2606        {
2607            self.context.handle_error_nolabel(
2608                &self.error_sink,
2609                cause,
2610                "CommandEncoder::clear_texture",
2611            );
2612        }
2613    }
2614
2615    fn clear_buffer(
2616        &self,
2617        buffer: &dispatch::DispatchBuffer,
2618        offset: crate::BufferAddress,
2619        size: Option<crate::BufferAddress>,
2620    ) {
2621        let buffer = buffer.as_core();
2622
2623        if let Err(cause) = self
2624            .context
2625            .0
2626            .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2627        {
2628            self.context.handle_error_nolabel(
2629                &self.error_sink,
2630                cause,
2631                "CommandEncoder::fill_buffer",
2632            );
2633        }
2634    }
2635
2636    fn insert_debug_marker(&self, label: &str) {
2637        if let Err(cause) = self
2638            .context
2639            .0
2640            .command_encoder_insert_debug_marker(self.id, label)
2641        {
2642            self.context.handle_error_nolabel(
2643                &self.error_sink,
2644                cause,
2645                "CommandEncoder::insert_debug_marker",
2646            );
2647        }
2648    }
2649
2650    fn push_debug_group(&self, label: &str) {
2651        if let Err(cause) = self
2652            .context
2653            .0
2654            .command_encoder_push_debug_group(self.id, label)
2655        {
2656            self.context.handle_error_nolabel(
2657                &self.error_sink,
2658                cause,
2659                "CommandEncoder::push_debug_group",
2660            );
2661        }
2662    }
2663
2664    fn pop_debug_group(&self) {
2665        if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2666            self.context.handle_error_nolabel(
2667                &self.error_sink,
2668                cause,
2669                "CommandEncoder::pop_debug_group",
2670            );
2671        }
2672    }
2673
2674    fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2675        let query_set = query_set.as_core();
2676
2677        if let Err(cause) =
2678            self.context
2679                .0
2680                .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2681        {
2682            self.context.handle_error_nolabel(
2683                &self.error_sink,
2684                cause,
2685                "CommandEncoder::write_timestamp",
2686            );
2687        }
2688    }
2689
2690    fn resolve_query_set(
2691        &self,
2692        query_set: &dispatch::DispatchQuerySet,
2693        first_query: u32,
2694        query_count: u32,
2695        destination: &dispatch::DispatchBuffer,
2696        destination_offset: crate::BufferAddress,
2697    ) {
2698        let query_set = query_set.as_core();
2699        let destination = destination.as_core();
2700
2701        if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2702            self.id,
2703            query_set.id,
2704            first_query,
2705            query_count,
2706            destination.id,
2707            destination_offset,
2708        ) {
2709            self.context.handle_error_nolabel(
2710                &self.error_sink,
2711                cause,
2712                "CommandEncoder::resolve_query_set",
2713            );
2714        }
2715    }
2716
2717    fn mark_acceleration_structures_built<'a>(
2718        &self,
2719        blas: &mut dyn Iterator<Item = &'a Blas>,
2720        tlas: &mut dyn Iterator<Item = &'a Tlas>,
2721    ) {
2722        let blas = blas
2723            .map(|b| b.inner.as_core().id)
2724            .collect::<SmallVec<[_; 4]>>();
2725        let tlas = tlas
2726            .map(|t| t.inner.as_core().id)
2727            .collect::<SmallVec<[_; 4]>>();
2728        if let Err(cause) = self
2729            .context
2730            .0
2731            .command_encoder_mark_acceleration_structures_built(self.id, &blas, &tlas)
2732        {
2733            self.context.handle_error_nolabel(
2734                &self.error_sink,
2735                cause,
2736                "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2737            );
2738        }
2739    }
2740
2741    fn build_acceleration_structures<'a>(
2742        &self,
2743        blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2744        tlas: &mut dyn Iterator<Item = &'a crate::Tlas>,
2745    ) {
2746        let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2747            let geometries = match e.geometry {
2748                crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2749                    let iter = triangle_geometries.iter().map(|tg| {
2750                        wgc::ray_tracing::BlasTriangleGeometry {
2751                            vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2752                            index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2753                            transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2754                            size: tg.size,
2755                            transform_buffer_offset: tg.transform_buffer_offset,
2756                            first_vertex: tg.first_vertex,
2757                            vertex_stride: tg.vertex_stride,
2758                            first_index: tg.first_index,
2759                        }
2760                    });
2761                    wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2762                }
2763            };
2764            wgc::ray_tracing::BlasBuildEntry {
2765                blas_id: e.blas.inner.as_core().id,
2766                geometries,
2767            }
2768        });
2769
2770        let tlas = tlas.into_iter().map(|e| {
2771            let instances = e
2772                .instances
2773                .iter()
2774                .map(|instance: &Option<crate::TlasInstance>| {
2775                    instance
2776                        .as_ref()
2777                        .map(|instance| wgc::ray_tracing::TlasInstance {
2778                            blas_id: instance.blas.as_core().id,
2779                            transform: &instance.transform,
2780                            custom_data: instance.custom_data,
2781                            mask: instance.mask,
2782                        })
2783                });
2784            wgc::ray_tracing::TlasPackage {
2785                tlas_id: e.inner.as_core().id,
2786                instances: Box::new(instances),
2787                lowest_unmodified: e.lowest_unmodified,
2788            }
2789        });
2790
2791        if let Err(cause) = self
2792            .context
2793            .0
2794            .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2795        {
2796            self.context.handle_error_nolabel(
2797                &self.error_sink,
2798                cause,
2799                "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2800            );
2801        }
2802    }
2803
2804    fn transition_resources<'a>(
2805        &mut self,
2806        buffer_transitions: &mut dyn Iterator<
2807            Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
2808        >,
2809        texture_transitions: &mut dyn Iterator<
2810            Item = wgt::TextureTransition<&'a dispatch::DispatchTexture>,
2811        >,
2812    ) {
2813        let result = self.context.0.command_encoder_transition_resources(
2814            self.id,
2815            buffer_transitions.map(|t| wgt::BufferTransition {
2816                buffer: t.buffer.as_core().id,
2817                state: t.state,
2818            }),
2819            texture_transitions.map(|t| wgt::TextureTransition {
2820                texture: t.texture.as_core().id,
2821                selector: t.selector.clone(),
2822                state: t.state,
2823            }),
2824        );
2825
2826        if let Err(cause) = result {
2827            self.context.handle_error_nolabel(
2828                &self.error_sink,
2829                cause,
2830                "CommandEncoder::transition_resources",
2831            );
2832        }
2833    }
2834}
2835
2836impl Drop for CoreCommandEncoder {
2837    fn drop(&mut self) {
2838        self.context.0.command_encoder_drop(self.id)
2839    }
2840}
2841
2842impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2843
2844impl Drop for CoreCommandBuffer {
2845    fn drop(&mut self) {
2846        self.context.0.command_buffer_drop(self.id)
2847    }
2848}
2849
2850impl dispatch::ComputePassInterface for CoreComputePass {
2851    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2852        let pipeline = pipeline.as_core();
2853
2854        if let Err(cause) = self
2855            .context
2856            .0
2857            .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2858        {
2859            self.context.handle_error(
2860                &self.error_sink,
2861                cause,
2862                self.pass.label(),
2863                "ComputePass::set_pipeline",
2864            );
2865        }
2866    }
2867
2868    fn set_bind_group(
2869        &mut self,
2870        index: u32,
2871        bind_group: Option<&dispatch::DispatchBindGroup>,
2872        offsets: &[crate::DynamicOffset],
2873    ) {
2874        let bg = bind_group.map(|bg| bg.as_core().id);
2875
2876        if let Err(cause) =
2877            self.context
2878                .0
2879                .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2880        {
2881            self.context.handle_error(
2882                &self.error_sink,
2883                cause,
2884                self.pass.label(),
2885                "ComputePass::set_bind_group",
2886            );
2887        }
2888    }
2889
2890    fn set_push_constants(&mut self, offset: u32, data: &[u8]) {
2891        if let Err(cause) =
2892            self.context
2893                .0
2894                .compute_pass_set_push_constants(&mut self.pass, offset, data)
2895        {
2896            self.context.handle_error(
2897                &self.error_sink,
2898                cause,
2899                self.pass.label(),
2900                "ComputePass::set_push_constant",
2901            );
2902        }
2903    }
2904
2905    fn insert_debug_marker(&mut self, label: &str) {
2906        if let Err(cause) =
2907            self.context
2908                .0
2909                .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
2910        {
2911            self.context.handle_error(
2912                &self.error_sink,
2913                cause,
2914                self.pass.label(),
2915                "ComputePass::insert_debug_marker",
2916            );
2917        }
2918    }
2919
2920    fn push_debug_group(&mut self, group_label: &str) {
2921        if let Err(cause) =
2922            self.context
2923                .0
2924                .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
2925        {
2926            self.context.handle_error(
2927                &self.error_sink,
2928                cause,
2929                self.pass.label(),
2930                "ComputePass::push_debug_group",
2931            );
2932        }
2933    }
2934
2935    fn pop_debug_group(&mut self) {
2936        if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
2937            self.context.handle_error(
2938                &self.error_sink,
2939                cause,
2940                self.pass.label(),
2941                "ComputePass::pop_debug_group",
2942            );
2943        }
2944    }
2945
2946    fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2947        let query_set = query_set.as_core();
2948
2949        if let Err(cause) =
2950            self.context
2951                .0
2952                .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
2953        {
2954            self.context.handle_error(
2955                &self.error_sink,
2956                cause,
2957                self.pass.label(),
2958                "ComputePass::write_timestamp",
2959            );
2960        }
2961    }
2962
2963    fn begin_pipeline_statistics_query(
2964        &mut self,
2965        query_set: &dispatch::DispatchQuerySet,
2966        query_index: u32,
2967    ) {
2968        let query_set = query_set.as_core();
2969
2970        if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
2971            &mut self.pass,
2972            query_set.id,
2973            query_index,
2974        ) {
2975            self.context.handle_error(
2976                &self.error_sink,
2977                cause,
2978                self.pass.label(),
2979                "ComputePass::begin_pipeline_statistics_query",
2980            );
2981        }
2982    }
2983
2984    fn end_pipeline_statistics_query(&mut self) {
2985        if let Err(cause) = self
2986            .context
2987            .0
2988            .compute_pass_end_pipeline_statistics_query(&mut self.pass)
2989        {
2990            self.context.handle_error(
2991                &self.error_sink,
2992                cause,
2993                self.pass.label(),
2994                "ComputePass::end_pipeline_statistics_query",
2995            );
2996        }
2997    }
2998
2999    fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
3000        if let Err(cause) = self
3001            .context
3002            .0
3003            .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
3004        {
3005            self.context.handle_error(
3006                &self.error_sink,
3007                cause,
3008                self.pass.label(),
3009                "ComputePass::dispatch_workgroups",
3010            );
3011        }
3012    }
3013
3014    fn dispatch_workgroups_indirect(
3015        &mut self,
3016        indirect_buffer: &dispatch::DispatchBuffer,
3017        indirect_offset: crate::BufferAddress,
3018    ) {
3019        let indirect_buffer = indirect_buffer.as_core();
3020
3021        if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
3022            &mut self.pass,
3023            indirect_buffer.id,
3024            indirect_offset,
3025        ) {
3026            self.context.handle_error(
3027                &self.error_sink,
3028                cause,
3029                self.pass.label(),
3030                "ComputePass::dispatch_workgroups_indirect",
3031            );
3032        }
3033    }
3034
3035    fn end(&mut self) {
3036        if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
3037            self.context.handle_error(
3038                &self.error_sink,
3039                cause,
3040                self.pass.label(),
3041                "ComputePass::end",
3042            );
3043        }
3044    }
3045}
3046
3047impl Drop for CoreComputePass {
3048    fn drop(&mut self) {
3049        dispatch::ComputePassInterface::end(self);
3050    }
3051}
3052
3053impl dispatch::RenderPassInterface for CoreRenderPass {
3054    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3055        let pipeline = pipeline.as_core();
3056
3057        if let Err(cause) = self
3058            .context
3059            .0
3060            .render_pass_set_pipeline(&mut self.pass, pipeline.id)
3061        {
3062            self.context.handle_error(
3063                &self.error_sink,
3064                cause,
3065                self.pass.label(),
3066                "RenderPass::set_pipeline",
3067            );
3068        }
3069    }
3070
3071    fn set_bind_group(
3072        &mut self,
3073        index: u32,
3074        bind_group: Option<&dispatch::DispatchBindGroup>,
3075        offsets: &[crate::DynamicOffset],
3076    ) {
3077        let bg = bind_group.map(|bg| bg.as_core().id);
3078
3079        if let Err(cause) =
3080            self.context
3081                .0
3082                .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
3083        {
3084            self.context.handle_error(
3085                &self.error_sink,
3086                cause,
3087                self.pass.label(),
3088                "RenderPass::set_bind_group",
3089            );
3090        }
3091    }
3092
3093    fn set_index_buffer(
3094        &mut self,
3095        buffer: &dispatch::DispatchBuffer,
3096        index_format: crate::IndexFormat,
3097        offset: crate::BufferAddress,
3098        size: Option<crate::BufferSize>,
3099    ) {
3100        let buffer = buffer.as_core();
3101
3102        if let Err(cause) = self.context.0.render_pass_set_index_buffer(
3103            &mut self.pass,
3104            buffer.id,
3105            index_format,
3106            offset,
3107            size,
3108        ) {
3109            self.context.handle_error(
3110                &self.error_sink,
3111                cause,
3112                self.pass.label(),
3113                "RenderPass::set_index_buffer",
3114            );
3115        }
3116    }
3117
3118    fn set_vertex_buffer(
3119        &mut self,
3120        slot: u32,
3121        buffer: &dispatch::DispatchBuffer,
3122        offset: crate::BufferAddress,
3123        size: Option<crate::BufferSize>,
3124    ) {
3125        let buffer = buffer.as_core();
3126
3127        if let Err(cause) = self.context.0.render_pass_set_vertex_buffer(
3128            &mut self.pass,
3129            slot,
3130            buffer.id,
3131            offset,
3132            size,
3133        ) {
3134            self.context.handle_error(
3135                &self.error_sink,
3136                cause,
3137                self.pass.label(),
3138                "RenderPass::set_vertex_buffer",
3139            );
3140        }
3141    }
3142
3143    fn set_push_constants(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
3144        if let Err(cause) =
3145            self.context
3146                .0
3147                .render_pass_set_push_constants(&mut self.pass, stages, offset, data)
3148        {
3149            self.context.handle_error(
3150                &self.error_sink,
3151                cause,
3152                self.pass.label(),
3153                "RenderPass::set_push_constants",
3154            );
3155        }
3156    }
3157
3158    fn set_blend_constant(&mut self, color: crate::Color) {
3159        if let Err(cause) = self
3160            .context
3161            .0
3162            .render_pass_set_blend_constant(&mut self.pass, color)
3163        {
3164            self.context.handle_error(
3165                &self.error_sink,
3166                cause,
3167                self.pass.label(),
3168                "RenderPass::set_blend_constant",
3169            );
3170        }
3171    }
3172
3173    fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
3174        if let Err(cause) =
3175            self.context
3176                .0
3177                .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
3178        {
3179            self.context.handle_error(
3180                &self.error_sink,
3181                cause,
3182                self.pass.label(),
3183                "RenderPass::set_scissor_rect",
3184            );
3185        }
3186    }
3187
3188    fn set_viewport(
3189        &mut self,
3190        x: f32,
3191        y: f32,
3192        width: f32,
3193        height: f32,
3194        min_depth: f32,
3195        max_depth: f32,
3196    ) {
3197        if let Err(cause) = self.context.0.render_pass_set_viewport(
3198            &mut self.pass,
3199            x,
3200            y,
3201            width,
3202            height,
3203            min_depth,
3204            max_depth,
3205        ) {
3206            self.context.handle_error(
3207                &self.error_sink,
3208                cause,
3209                self.pass.label(),
3210                "RenderPass::set_viewport",
3211            );
3212        }
3213    }
3214
3215    fn set_stencil_reference(&mut self, reference: u32) {
3216        if let Err(cause) = self
3217            .context
3218            .0
3219            .render_pass_set_stencil_reference(&mut self.pass, reference)
3220        {
3221            self.context.handle_error(
3222                &self.error_sink,
3223                cause,
3224                self.pass.label(),
3225                "RenderPass::set_stencil_reference",
3226            );
3227        }
3228    }
3229
3230    fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3231        if let Err(cause) = self.context.0.render_pass_draw(
3232            &mut self.pass,
3233            vertices.end - vertices.start,
3234            instances.end - instances.start,
3235            vertices.start,
3236            instances.start,
3237        ) {
3238            self.context.handle_error(
3239                &self.error_sink,
3240                cause,
3241                self.pass.label(),
3242                "RenderPass::draw",
3243            );
3244        }
3245    }
3246
3247    fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3248        if let Err(cause) = self.context.0.render_pass_draw_indexed(
3249            &mut self.pass,
3250            indices.end - indices.start,
3251            instances.end - instances.start,
3252            indices.start,
3253            base_vertex,
3254            instances.start,
3255        ) {
3256            self.context.handle_error(
3257                &self.error_sink,
3258                cause,
3259                self.pass.label(),
3260                "RenderPass::draw_indexed",
3261            );
3262        }
3263    }
3264
3265    fn draw_mesh_tasks(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
3266        if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks(
3267            &mut self.pass,
3268            group_count_x,
3269            group_count_y,
3270            group_count_z,
3271        ) {
3272            self.context.handle_error(
3273                &self.error_sink,
3274                cause,
3275                self.pass.label(),
3276                "RenderPass::draw_mesh_tasks",
3277            );
3278        }
3279    }
3280
3281    fn draw_indirect(
3282        &mut self,
3283        indirect_buffer: &dispatch::DispatchBuffer,
3284        indirect_offset: crate::BufferAddress,
3285    ) {
3286        let indirect_buffer = indirect_buffer.as_core();
3287
3288        if let Err(cause) = self.context.0.render_pass_draw_indirect(
3289            &mut self.pass,
3290            indirect_buffer.id,
3291            indirect_offset,
3292        ) {
3293            self.context.handle_error(
3294                &self.error_sink,
3295                cause,
3296                self.pass.label(),
3297                "RenderPass::draw_indirect",
3298            );
3299        }
3300    }
3301
3302    fn draw_indexed_indirect(
3303        &mut self,
3304        indirect_buffer: &dispatch::DispatchBuffer,
3305        indirect_offset: crate::BufferAddress,
3306    ) {
3307        let indirect_buffer = indirect_buffer.as_core();
3308
3309        if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3310            &mut self.pass,
3311            indirect_buffer.id,
3312            indirect_offset,
3313        ) {
3314            self.context.handle_error(
3315                &self.error_sink,
3316                cause,
3317                self.pass.label(),
3318                "RenderPass::draw_indexed_indirect",
3319            );
3320        }
3321    }
3322
3323    fn draw_mesh_tasks_indirect(
3324        &mut self,
3325        indirect_buffer: &dispatch::DispatchBuffer,
3326        indirect_offset: crate::BufferAddress,
3327    ) {
3328        let indirect_buffer = indirect_buffer.as_core();
3329
3330        if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks_indirect(
3331            &mut self.pass,
3332            indirect_buffer.id,
3333            indirect_offset,
3334        ) {
3335            self.context.handle_error(
3336                &self.error_sink,
3337                cause,
3338                self.pass.label(),
3339                "RenderPass::draw_mesh_tasks_indirect",
3340            );
3341        }
3342    }
3343
3344    fn multi_draw_indirect(
3345        &mut self,
3346        indirect_buffer: &dispatch::DispatchBuffer,
3347        indirect_offset: crate::BufferAddress,
3348        count: u32,
3349    ) {
3350        let indirect_buffer = indirect_buffer.as_core();
3351
3352        if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3353            &mut self.pass,
3354            indirect_buffer.id,
3355            indirect_offset,
3356            count,
3357        ) {
3358            self.context.handle_error(
3359                &self.error_sink,
3360                cause,
3361                self.pass.label(),
3362                "RenderPass::multi_draw_indirect",
3363            );
3364        }
3365    }
3366
3367    fn multi_draw_indexed_indirect(
3368        &mut self,
3369        indirect_buffer: &dispatch::DispatchBuffer,
3370        indirect_offset: crate::BufferAddress,
3371        count: u32,
3372    ) {
3373        let indirect_buffer = indirect_buffer.as_core();
3374
3375        if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3376            &mut self.pass,
3377            indirect_buffer.id,
3378            indirect_offset,
3379            count,
3380        ) {
3381            self.context.handle_error(
3382                &self.error_sink,
3383                cause,
3384                self.pass.label(),
3385                "RenderPass::multi_draw_indexed_indirect",
3386            );
3387        }
3388    }
3389
3390    fn multi_draw_mesh_tasks_indirect(
3391        &mut self,
3392        indirect_buffer: &dispatch::DispatchBuffer,
3393        indirect_offset: crate::BufferAddress,
3394        count: u32,
3395    ) {
3396        let indirect_buffer = indirect_buffer.as_core();
3397
3398        if let Err(cause) = self.context.0.render_pass_multi_draw_mesh_tasks_indirect(
3399            &mut self.pass,
3400            indirect_buffer.id,
3401            indirect_offset,
3402            count,
3403        ) {
3404            self.context.handle_error(
3405                &self.error_sink,
3406                cause,
3407                self.pass.label(),
3408                "RenderPass::multi_draw_mesh_tasks_indirect",
3409            );
3410        }
3411    }
3412
3413    fn multi_draw_indirect_count(
3414        &mut self,
3415        indirect_buffer: &dispatch::DispatchBuffer,
3416        indirect_offset: crate::BufferAddress,
3417        count_buffer: &dispatch::DispatchBuffer,
3418        count_buffer_offset: crate::BufferAddress,
3419        max_count: u32,
3420    ) {
3421        let indirect_buffer = indirect_buffer.as_core();
3422        let count_buffer = count_buffer.as_core();
3423
3424        if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3425            &mut self.pass,
3426            indirect_buffer.id,
3427            indirect_offset,
3428            count_buffer.id,
3429            count_buffer_offset,
3430            max_count,
3431        ) {
3432            self.context.handle_error(
3433                &self.error_sink,
3434                cause,
3435                self.pass.label(),
3436                "RenderPass::multi_draw_indirect_count",
3437            );
3438        }
3439    }
3440
3441    fn multi_draw_indexed_indirect_count(
3442        &mut self,
3443        indirect_buffer: &dispatch::DispatchBuffer,
3444        indirect_offset: crate::BufferAddress,
3445        count_buffer: &dispatch::DispatchBuffer,
3446        count_buffer_offset: crate::BufferAddress,
3447        max_count: u32,
3448    ) {
3449        let indirect_buffer = indirect_buffer.as_core();
3450        let count_buffer = count_buffer.as_core();
3451
3452        if let Err(cause) = self
3453            .context
3454            .0
3455            .render_pass_multi_draw_indexed_indirect_count(
3456                &mut self.pass,
3457                indirect_buffer.id,
3458                indirect_offset,
3459                count_buffer.id,
3460                count_buffer_offset,
3461                max_count,
3462            )
3463        {
3464            self.context.handle_error(
3465                &self.error_sink,
3466                cause,
3467                self.pass.label(),
3468                "RenderPass::multi_draw_indexed_indirect_count",
3469            );
3470        }
3471    }
3472
3473    fn multi_draw_mesh_tasks_indirect_count(
3474        &mut self,
3475        indirect_buffer: &dispatch::DispatchBuffer,
3476        indirect_offset: crate::BufferAddress,
3477        count_buffer: &dispatch::DispatchBuffer,
3478        count_buffer_offset: crate::BufferAddress,
3479        max_count: u32,
3480    ) {
3481        let indirect_buffer = indirect_buffer.as_core();
3482        let count_buffer = count_buffer.as_core();
3483
3484        if let Err(cause) = self
3485            .context
3486            .0
3487            .render_pass_multi_draw_mesh_tasks_indirect_count(
3488                &mut self.pass,
3489                indirect_buffer.id,
3490                indirect_offset,
3491                count_buffer.id,
3492                count_buffer_offset,
3493                max_count,
3494            )
3495        {
3496            self.context.handle_error(
3497                &self.error_sink,
3498                cause,
3499                self.pass.label(),
3500                "RenderPass::multi_draw_mesh_tasks_indirect_count",
3501            );
3502        }
3503    }
3504
3505    fn insert_debug_marker(&mut self, label: &str) {
3506        if let Err(cause) = self
3507            .context
3508            .0
3509            .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3510        {
3511            self.context.handle_error(
3512                &self.error_sink,
3513                cause,
3514                self.pass.label(),
3515                "RenderPass::insert_debug_marker",
3516            );
3517        }
3518    }
3519
3520    fn push_debug_group(&mut self, group_label: &str) {
3521        if let Err(cause) =
3522            self.context
3523                .0
3524                .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3525        {
3526            self.context.handle_error(
3527                &self.error_sink,
3528                cause,
3529                self.pass.label(),
3530                "RenderPass::push_debug_group",
3531            );
3532        }
3533    }
3534
3535    fn pop_debug_group(&mut self) {
3536        if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3537            self.context.handle_error(
3538                &self.error_sink,
3539                cause,
3540                self.pass.label(),
3541                "RenderPass::pop_debug_group",
3542            );
3543        }
3544    }
3545
3546    fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3547        let query_set = query_set.as_core();
3548
3549        if let Err(cause) =
3550            self.context
3551                .0
3552                .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3553        {
3554            self.context.handle_error(
3555                &self.error_sink,
3556                cause,
3557                self.pass.label(),
3558                "RenderPass::write_timestamp",
3559            );
3560        }
3561    }
3562
3563    fn begin_occlusion_query(&mut self, query_index: u32) {
3564        if let Err(cause) = self
3565            .context
3566            .0
3567            .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3568        {
3569            self.context.handle_error(
3570                &self.error_sink,
3571                cause,
3572                self.pass.label(),
3573                "RenderPass::begin_occlusion_query",
3574            );
3575        }
3576    }
3577
3578    fn end_occlusion_query(&mut self) {
3579        if let Err(cause) = self
3580            .context
3581            .0
3582            .render_pass_end_occlusion_query(&mut self.pass)
3583        {
3584            self.context.handle_error(
3585                &self.error_sink,
3586                cause,
3587                self.pass.label(),
3588                "RenderPass::end_occlusion_query",
3589            );
3590        }
3591    }
3592
3593    fn begin_pipeline_statistics_query(
3594        &mut self,
3595        query_set: &dispatch::DispatchQuerySet,
3596        query_index: u32,
3597    ) {
3598        let query_set = query_set.as_core();
3599
3600        if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3601            &mut self.pass,
3602            query_set.id,
3603            query_index,
3604        ) {
3605            self.context.handle_error(
3606                &self.error_sink,
3607                cause,
3608                self.pass.label(),
3609                "RenderPass::begin_pipeline_statistics_query",
3610            );
3611        }
3612    }
3613
3614    fn end_pipeline_statistics_query(&mut self) {
3615        if let Err(cause) = self
3616            .context
3617            .0
3618            .render_pass_end_pipeline_statistics_query(&mut self.pass)
3619        {
3620            self.context.handle_error(
3621                &self.error_sink,
3622                cause,
3623                self.pass.label(),
3624                "RenderPass::end_pipeline_statistics_query",
3625            );
3626        }
3627    }
3628
3629    fn execute_bundles(
3630        &mut self,
3631        render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3632    ) {
3633        let temp_render_bundles = render_bundles
3634            .map(|rb| rb.as_core().id)
3635            .collect::<SmallVec<[_; 4]>>();
3636        if let Err(cause) = self
3637            .context
3638            .0
3639            .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3640        {
3641            self.context.handle_error(
3642                &self.error_sink,
3643                cause,
3644                self.pass.label(),
3645                "RenderPass::execute_bundles",
3646            );
3647        }
3648    }
3649
3650    fn end(&mut self) {
3651        if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3652            self.context.handle_error(
3653                &self.error_sink,
3654                cause,
3655                self.pass.label(),
3656                "RenderPass::end",
3657            );
3658        }
3659    }
3660}
3661
3662impl Drop for CoreRenderPass {
3663    fn drop(&mut self) {
3664        dispatch::RenderPassInterface::end(self);
3665    }
3666}
3667
3668impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3669    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3670        let pipeline = pipeline.as_core();
3671
3672        wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3673    }
3674
3675    fn set_bind_group(
3676        &mut self,
3677        index: u32,
3678        bind_group: Option<&dispatch::DispatchBindGroup>,
3679        offsets: &[crate::DynamicOffset],
3680    ) {
3681        let bg = bind_group.map(|bg| bg.as_core().id);
3682
3683        unsafe {
3684            wgpu_render_bundle_set_bind_group(
3685                &mut self.encoder,
3686                index,
3687                bg,
3688                offsets.as_ptr(),
3689                offsets.len(),
3690            )
3691        }
3692    }
3693
3694    fn set_index_buffer(
3695        &mut self,
3696        buffer: &dispatch::DispatchBuffer,
3697        index_format: crate::IndexFormat,
3698        offset: crate::BufferAddress,
3699        size: Option<crate::BufferSize>,
3700    ) {
3701        let buffer = buffer.as_core();
3702
3703        self.encoder
3704            .set_index_buffer(buffer.id, index_format, offset, size)
3705    }
3706
3707    fn set_vertex_buffer(
3708        &mut self,
3709        slot: u32,
3710        buffer: &dispatch::DispatchBuffer,
3711        offset: crate::BufferAddress,
3712        size: Option<crate::BufferSize>,
3713    ) {
3714        let buffer = buffer.as_core();
3715
3716        wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer.id, offset, size)
3717    }
3718
3719    fn set_push_constants(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
3720        unsafe {
3721            wgpu_render_bundle_set_push_constants(
3722                &mut self.encoder,
3723                stages,
3724                offset,
3725                data.len().try_into().unwrap(),
3726                data.as_ptr(),
3727            )
3728        }
3729    }
3730
3731    fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3732        wgpu_render_bundle_draw(
3733            &mut self.encoder,
3734            vertices.end - vertices.start,
3735            instances.end - instances.start,
3736            vertices.start,
3737            instances.start,
3738        )
3739    }
3740
3741    fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3742        wgpu_render_bundle_draw_indexed(
3743            &mut self.encoder,
3744            indices.end - indices.start,
3745            instances.end - instances.start,
3746            indices.start,
3747            base_vertex,
3748            instances.start,
3749        )
3750    }
3751
3752    fn draw_indirect(
3753        &mut self,
3754        indirect_buffer: &dispatch::DispatchBuffer,
3755        indirect_offset: crate::BufferAddress,
3756    ) {
3757        let indirect_buffer = indirect_buffer.as_core();
3758
3759        wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3760    }
3761
3762    fn draw_indexed_indirect(
3763        &mut self,
3764        indirect_buffer: &dispatch::DispatchBuffer,
3765        indirect_offset: crate::BufferAddress,
3766    ) {
3767        let indirect_buffer = indirect_buffer.as_core();
3768
3769        wgpu_render_bundle_draw_indexed_indirect(
3770            &mut self.encoder,
3771            indirect_buffer.id,
3772            indirect_offset,
3773        )
3774    }
3775
3776    fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3777    where
3778        Self: Sized,
3779    {
3780        let (id, error) = self.context.0.render_bundle_encoder_finish(
3781            self.encoder,
3782            &desc.map_label(|l| l.map(Borrowed)),
3783            None,
3784        );
3785        if let Some(err) = error {
3786            self.context
3787                .handle_error_fatal(err, "RenderBundleEncoder::finish");
3788        }
3789        CoreRenderBundle { id }.into()
3790    }
3791}
3792
3793impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3794
3795impl dispatch::SurfaceInterface for CoreSurface {
3796    fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3797        let adapter = adapter.as_core();
3798
3799        self.context
3800            .0
3801            .surface_get_capabilities(self.id, adapter.id)
3802            .unwrap_or_default()
3803    }
3804
3805    fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3806        let device = device.as_core();
3807
3808        let error = self.context.0.surface_configure(self.id, device.id, config);
3809        if let Some(e) = error {
3810            self.context
3811                .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3812        } else {
3813            *self.configured_device.lock() = Some(device.id);
3814            *self.error_sink.lock() = Some(device.error_sink.clone());
3815        }
3816    }
3817
3818    fn get_current_texture(
3819        &self,
3820    ) -> (
3821        Option<dispatch::DispatchTexture>,
3822        crate::SurfaceStatus,
3823        dispatch::DispatchSurfaceOutputDetail,
3824    ) {
3825        let output_detail = CoreSurfaceOutputDetail {
3826            context: self.context.clone(),
3827            surface_id: self.id,
3828        }
3829        .into();
3830
3831        match self.context.0.surface_get_current_texture(self.id, None) {
3832            Ok(wgc::present::SurfaceOutput {
3833                status,
3834                texture: texture_id,
3835            }) => {
3836                let data = texture_id
3837                    .map(|id| CoreTexture {
3838                        context: self.context.clone(),
3839                        id,
3840                        error_sink: Arc::new(Mutex::new(ErrorSinkRaw::new())),
3841                    })
3842                    .map(Into::into);
3843
3844                (data, status, output_detail)
3845            }
3846            Err(err) => {
3847                let error_sink = self.error_sink.lock();
3848                match error_sink.as_ref() {
3849                    Some(error_sink) => {
3850                        self.context.handle_error_nolabel(
3851                            error_sink,
3852                            err,
3853                            "Surface::get_current_texture_view",
3854                        );
3855                        (None, crate::SurfaceStatus::Unknown, output_detail)
3856                    }
3857                    None => self
3858                        .context
3859                        .handle_error_fatal(err, "Surface::get_current_texture_view"),
3860                }
3861            }
3862        }
3863    }
3864}
3865
3866impl Drop for CoreSurface {
3867    fn drop(&mut self) {
3868        self.context.0.surface_drop(self.id)
3869    }
3870}
3871
3872impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
3873    fn present(&self) {
3874        match self.context.0.surface_present(self.surface_id) {
3875            Ok(_status) => (),
3876            Err(err) => self.context.handle_error_fatal(err, "Surface::present"),
3877        }
3878    }
3879
3880    fn texture_discard(&self) {
3881        match self.context.0.surface_texture_discard(self.surface_id) {
3882            Ok(_status) => (),
3883            Err(err) => self
3884                .context
3885                .handle_error_fatal(err, "Surface::discard_texture"),
3886        }
3887    }
3888}
3889impl Drop for CoreSurfaceOutputDetail {
3890    fn drop(&mut self) {
3891        // Discard gets called by the api struct
3892
3893        // no-op
3894    }
3895}
3896
3897impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
3898    fn slice(&self) -> &[u8] {
3899        panic!()
3900    }
3901
3902    #[inline]
3903    fn slice_mut(&mut self) -> &mut [u8] {
3904        self.mapping.slice_mut()
3905    }
3906}
3907impl Drop for CoreQueueWriteBuffer {
3908    fn drop(&mut self) {
3909        // The api struct calls queue.write_staging_buffer
3910
3911        // no-op
3912    }
3913}
3914
3915impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
3916    #[inline]
3917    fn slice(&self) -> &[u8] {
3918        unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
3919    }
3920
3921    #[inline]
3922    fn slice_mut(&mut self) -> &mut [u8] {
3923        unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.size) }
3924    }
3925
3926    #[cfg(webgpu)]
3927    fn as_uint8array(&self) -> &js_sys::Uint8Array {
3928        panic!("Only available on WebGPU")
3929    }
3930}