wgpu/backend/
wgpu_core.rs

1use alloc::{
2    borrow::Cow::{self, Borrowed},
3    boxed::Box,
4    format,
5    string::{String, ToString as _},
6    sync::Arc,
7    vec,
8    vec::Vec,
9};
10use core::{
11    error::Error,
12    fmt,
13    future::ready,
14    ops::{Deref, Range},
15    pin::Pin,
16    ptr::NonNull,
17    slice,
18};
19
20use arrayvec::ArrayVec;
21use smallvec::SmallVec;
22use wgc::{
23    command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError,
24    resource::BlasPrepareCompactResult,
25};
26use wgt::{
27    error::{ErrorType, WebGpuError},
28    WasmNotSendSync,
29};
30
31use crate::util::Mutex;
32use crate::{
33    api,
34    dispatch::{self, BlasCompactCallback, BufferMappedRangeInterface},
35    BindingResource, Blas, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
36    CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
37    ShaderSource, SurfaceTargetUnsafe, TextureDescriptor, Tlas,
38};
39
40#[derive(Clone)]
41pub struct ContextWgpuCore(Arc<wgc::global::Global>);
42
43impl Drop for ContextWgpuCore {
44    fn drop(&mut self) {
45        //nothing
46    }
47}
48
49impl fmt::Debug for ContextWgpuCore {
50    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
51        f.debug_struct("ContextWgpuCore")
52            .field("type", &"Native")
53            .finish()
54    }
55}
56
57impl ContextWgpuCore {
58    pub unsafe fn from_hal_instance<A: hal::Api>(hal_instance: A::Instance) -> Self {
59        Self(unsafe {
60            Arc::new(wgc::global::Global::from_hal_instance::<A>(
61                "wgpu",
62                hal_instance,
63            ))
64        })
65    }
66
67    /// # Safety
68    ///
69    /// - The raw instance handle returned must not be manually destroyed.
70    pub unsafe fn instance_as_hal<A: hal::Api>(&self) -> Option<&A::Instance> {
71        unsafe { self.0.instance_as_hal::<A>() }
72    }
73
74    pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
75        Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
76    }
77
78    #[cfg(wgpu_core)]
79    pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
80        self.0.enumerate_adapters(backends)
81    }
82
83    pub unsafe fn create_adapter_from_hal<A: hal::Api>(
84        &self,
85        hal_adapter: hal::ExposedAdapter<A>,
86    ) -> wgc::id::AdapterId {
87        unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
88    }
89
90    pub unsafe fn adapter_as_hal<A: hal::Api>(
91        &self,
92        adapter: &CoreAdapter,
93    ) -> Option<impl Deref<Target = A::Adapter> + WasmNotSendSync> {
94        unsafe { self.0.adapter_as_hal::<A>(adapter.id) }
95    }
96
97    pub unsafe fn buffer_as_hal<A: hal::Api>(
98        &self,
99        buffer: &CoreBuffer,
100    ) -> Option<impl Deref<Target = A::Buffer>> {
101        unsafe { self.0.buffer_as_hal::<A>(buffer.id) }
102    }
103
104    pub unsafe fn create_device_from_hal<A: hal::Api>(
105        &self,
106        adapter: &CoreAdapter,
107        hal_device: hal::OpenDevice<A>,
108        desc: &crate::DeviceDescriptor<'_>,
109    ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
110        if !matches!(desc.trace, wgt::Trace::Off) {
111            log::error!(
112                "
113                Feature 'trace' has been removed temporarily; \
114                see https://github.com/gfx-rs/wgpu/issues/5974. \
115                The `trace` parameter will have no effect."
116            );
117        }
118
119        let (device_id, queue_id) = unsafe {
120            self.0.create_device_from_hal(
121                adapter.id,
122                hal_device.into(),
123                &desc.map_label(|l| l.map(Borrowed)),
124                None,
125                None,
126            )
127        }?;
128        let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
129        let device = CoreDevice {
130            context: self.clone(),
131            id: device_id,
132            error_sink: error_sink.clone(),
133            features: desc.required_features,
134        };
135        let queue = CoreQueue {
136            context: self.clone(),
137            id: queue_id,
138            error_sink,
139        };
140        Ok((device, queue))
141    }
142
143    pub unsafe fn create_texture_from_hal<A: hal::Api>(
144        &self,
145        hal_texture: A::Texture,
146        device: &CoreDevice,
147        desc: &TextureDescriptor<'_>,
148    ) -> CoreTexture {
149        let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
150        let (id, error) = unsafe {
151            self.0
152                .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
153        };
154        if let Some(cause) = error {
155            self.handle_error(
156                &device.error_sink,
157                cause,
158                desc.label,
159                "Device::create_texture_from_hal",
160            );
161        }
162        CoreTexture {
163            context: self.clone(),
164            id,
165            error_sink: Arc::clone(&device.error_sink),
166        }
167    }
168
169    /// # Safety
170    ///
171    /// - `hal_buffer` must be created from `device`.
172    /// - `hal_buffer` must be created respecting `desc`
173    /// - `hal_buffer` must be initialized
174    /// - `hal_buffer` must not have zero size.
175    pub unsafe fn create_buffer_from_hal<A: hal::Api>(
176        &self,
177        hal_buffer: A::Buffer,
178        device: &CoreDevice,
179        desc: &BufferDescriptor<'_>,
180    ) -> CoreBuffer {
181        let (id, error) = unsafe {
182            self.0.create_buffer_from_hal::<A>(
183                hal_buffer,
184                device.id,
185                &desc.map_label(|l| l.map(Borrowed)),
186                None,
187            )
188        };
189        if let Some(cause) = error {
190            self.handle_error(
191                &device.error_sink,
192                cause,
193                desc.label,
194                "Device::create_buffer_from_hal",
195            );
196        }
197        CoreBuffer {
198            context: self.clone(),
199            id,
200            error_sink: Arc::clone(&device.error_sink),
201        }
202    }
203
204    pub unsafe fn device_as_hal<A: hal::Api>(
205        &self,
206        device: &CoreDevice,
207    ) -> Option<impl Deref<Target = A::Device>> {
208        unsafe { self.0.device_as_hal::<A>(device.id) }
209    }
210
211    pub unsafe fn surface_as_hal<A: hal::Api>(
212        &self,
213        surface: &CoreSurface,
214    ) -> Option<impl Deref<Target = A::Surface>> {
215        unsafe { self.0.surface_as_hal::<A>(surface.id) }
216    }
217
218    pub unsafe fn texture_as_hal<A: hal::Api>(
219        &self,
220        texture: &CoreTexture,
221    ) -> Option<impl Deref<Target = A::Texture>> {
222        unsafe { self.0.texture_as_hal::<A>(texture.id) }
223    }
224
225    pub unsafe fn texture_view_as_hal<A: hal::Api>(
226        &self,
227        texture_view: &CoreTextureView,
228    ) -> Option<impl Deref<Target = A::TextureView>> {
229        unsafe { self.0.texture_view_as_hal::<A>(texture_view.id) }
230    }
231
232    /// This method will start the wgpu_core level command recording.
233    pub unsafe fn command_encoder_as_hal_mut<
234        A: hal::Api,
235        F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
236        R,
237    >(
238        &self,
239        command_encoder: &CoreCommandEncoder,
240        hal_command_encoder_callback: F,
241    ) -> R {
242        unsafe {
243            self.0.command_encoder_as_hal_mut::<A, F, R>(
244                command_encoder.id,
245                hal_command_encoder_callback,
246            )
247        }
248    }
249
250    pub unsafe fn blas_as_hal<A: hal::Api>(
251        &self,
252        blas: &CoreBlas,
253    ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
254        unsafe { self.0.blas_as_hal::<A>(blas.id) }
255    }
256
257    pub unsafe fn tlas_as_hal<A: hal::Api>(
258        &self,
259        tlas: &CoreTlas,
260    ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
261        unsafe { self.0.tlas_as_hal::<A>(tlas.id) }
262    }
263
264    pub fn generate_report(&self) -> wgc::global::GlobalReport {
265        self.0.generate_report()
266    }
267
268    #[cold]
269    #[track_caller]
270    #[inline(never)]
271    fn handle_error_inner(
272        &self,
273        sink_mutex: &Mutex<ErrorSinkRaw>,
274        error_type: ErrorType,
275        source: ContextErrorSource,
276        label: Label<'_>,
277        fn_ident: &'static str,
278    ) {
279        let source: ErrorSource = Box::new(wgc::error::ContextError {
280            fn_ident,
281            source,
282            label: label.unwrap_or_default().to_string(),
283        });
284        let final_error_handling = {
285            let mut sink = sink_mutex.lock();
286            let description = || self.format_error(&*source);
287            let error = match error_type {
288                ErrorType::Internal => {
289                    let description = description();
290                    crate::Error::Internal {
291                        source,
292                        description,
293                    }
294                }
295                ErrorType::OutOfMemory => crate::Error::OutOfMemory { source },
296                ErrorType::Validation => {
297                    let description = description();
298                    crate::Error::Validation {
299                        source,
300                        description,
301                    }
302                }
303                ErrorType::DeviceLost => return, // will be surfaced via callback
304            };
305            sink.handle_error_or_return_handler(error)
306        };
307
308        if let Some(f) = final_error_handling {
309            // If the user has provided their own `uncaptured_handler` callback, invoke it now,
310            // having released our lock on `sink_mutex`. See the comments on
311            // `handle_error_or_return_handler` for details.
312            f();
313        }
314    }
315
316    #[inline]
317    #[track_caller]
318    fn handle_error(
319        &self,
320        sink_mutex: &Mutex<ErrorSinkRaw>,
321        source: impl WebGpuError + WasmNotSendSync + 'static,
322        label: Label<'_>,
323        fn_ident: &'static str,
324    ) {
325        let error_type = source.webgpu_error_type();
326        self.handle_error_inner(sink_mutex, error_type, Box::new(source), label, fn_ident)
327    }
328
329    #[inline]
330    #[track_caller]
331    fn handle_error_nolabel(
332        &self,
333        sink_mutex: &Mutex<ErrorSinkRaw>,
334        source: impl WebGpuError + WasmNotSendSync + 'static,
335        fn_ident: &'static str,
336    ) {
337        let error_type = source.webgpu_error_type();
338        self.handle_error_inner(sink_mutex, error_type, Box::new(source), None, fn_ident)
339    }
340
341    #[track_caller]
342    #[cold]
343    fn handle_error_fatal(
344        &self,
345        cause: impl Error + WasmNotSendSync + 'static,
346        operation: &'static str,
347    ) -> ! {
348        panic!("Error in {operation}: {f}", f = self.format_error(&cause));
349    }
350
351    #[inline(never)]
352    fn format_error(&self, err: &(dyn Error + 'static)) -> String {
353        let mut output = String::new();
354        let mut level = 1;
355
356        fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
357            let mut print = |e: &(dyn Error + 'static)| {
358                use core::fmt::Write;
359                writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
360
361                if let Some(e) = e.source() {
362                    *level += 1;
363                    print_tree(output, level, e);
364                    *level -= 1;
365                }
366            };
367            if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
368                for e in multi.errors() {
369                    print(e);
370                }
371            } else {
372                print(e);
373            }
374        }
375
376        print_tree(&mut output, &mut level, err);
377
378        format!("Validation Error\n\nCaused by:\n{output}")
379    }
380
381    pub unsafe fn queue_as_hal<A: hal::Api>(
382        &self,
383        queue: &CoreQueue,
384    ) -> Option<impl Deref<Target = A::Queue> + WasmNotSendSync> {
385        unsafe { self.0.queue_as_hal::<A>(queue.id) }
386    }
387}
388
389fn map_buffer_copy_view(view: crate::TexelCopyBufferInfo<'_>) -> wgc::command::TexelCopyBufferInfo {
390    wgc::command::TexelCopyBufferInfo {
391        buffer: view.buffer.inner.as_core().id,
392        layout: view.layout,
393    }
394}
395
396fn map_texture_copy_view(
397    view: crate::TexelCopyTextureInfo<'_>,
398) -> wgc::command::TexelCopyTextureInfo {
399    wgc::command::TexelCopyTextureInfo {
400        texture: view.texture.inner.as_core().id,
401        mip_level: view.mip_level,
402        origin: view.origin,
403        aspect: view.aspect,
404    }
405}
406
407#[cfg_attr(not(webgl), expect(unused))]
408fn map_texture_tagged_copy_view(
409    view: crate::CopyExternalImageDestInfo<&api::Texture>,
410) -> wgc::command::CopyExternalImageDestInfo {
411    wgc::command::CopyExternalImageDestInfo {
412        texture: view.texture.inner.as_core().id,
413        mip_level: view.mip_level,
414        origin: view.origin,
415        aspect: view.aspect,
416        color_space: view.color_space,
417        premultiplied_alpha: view.premultiplied_alpha,
418    }
419}
420
421fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
422    match load {
423        LoadOp::Clear(clear_value) => LoadOp::Clear(Some(*clear_value)),
424        LoadOp::Load => LoadOp::Load,
425    }
426}
427
428fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
429    match ops {
430        Some(&Operations { load, store }) => wgc::command::PassChannel {
431            load_op: Some(map_load_op(&load)),
432            store_op: Some(store),
433            read_only: false,
434        },
435        None => wgc::command::PassChannel {
436            load_op: None,
437            store_op: None,
438            read_only: true,
439        },
440    }
441}
442
443#[derive(Debug)]
444pub struct CoreSurface {
445    pub(crate) context: ContextWgpuCore,
446    id: wgc::id::SurfaceId,
447    /// Configured device is needed to know which backend
448    /// code to execute when acquiring a new frame.
449    configured_device: Mutex<Option<wgc::id::DeviceId>>,
450    /// The error sink with which to report errors.
451    /// `None` if the surface has not been configured.
452    error_sink: Mutex<Option<ErrorSink>>,
453}
454
455#[derive(Debug)]
456pub struct CoreAdapter {
457    pub(crate) context: ContextWgpuCore,
458    pub(crate) id: wgc::id::AdapterId,
459}
460
461#[derive(Debug)]
462pub struct CoreDevice {
463    pub(crate) context: ContextWgpuCore,
464    id: wgc::id::DeviceId,
465    error_sink: ErrorSink,
466    features: Features,
467}
468
469#[derive(Debug)]
470pub struct CoreBuffer {
471    pub(crate) context: ContextWgpuCore,
472    id: wgc::id::BufferId,
473    error_sink: ErrorSink,
474}
475
476#[derive(Debug)]
477pub struct CoreShaderModule {
478    pub(crate) context: ContextWgpuCore,
479    id: wgc::id::ShaderModuleId,
480    compilation_info: CompilationInfo,
481}
482
483#[derive(Debug)]
484pub struct CoreBindGroupLayout {
485    pub(crate) context: ContextWgpuCore,
486    id: wgc::id::BindGroupLayoutId,
487}
488
489#[derive(Debug)]
490pub struct CoreBindGroup {
491    pub(crate) context: ContextWgpuCore,
492    id: wgc::id::BindGroupId,
493}
494
495#[derive(Debug)]
496pub struct CoreTexture {
497    pub(crate) context: ContextWgpuCore,
498    id: wgc::id::TextureId,
499    error_sink: ErrorSink,
500}
501
502#[derive(Debug)]
503pub struct CoreTextureView {
504    pub(crate) context: ContextWgpuCore,
505    id: wgc::id::TextureViewId,
506}
507
508#[derive(Debug)]
509pub struct CoreExternalTexture {
510    pub(crate) context: ContextWgpuCore,
511    id: wgc::id::ExternalTextureId,
512}
513
514#[derive(Debug)]
515pub struct CoreSampler {
516    pub(crate) context: ContextWgpuCore,
517    id: wgc::id::SamplerId,
518}
519
520#[derive(Debug)]
521pub struct CoreQuerySet {
522    pub(crate) context: ContextWgpuCore,
523    id: wgc::id::QuerySetId,
524}
525
526#[derive(Debug)]
527pub struct CorePipelineLayout {
528    pub(crate) context: ContextWgpuCore,
529    id: wgc::id::PipelineLayoutId,
530}
531
532#[derive(Debug)]
533pub struct CorePipelineCache {
534    pub(crate) context: ContextWgpuCore,
535    id: wgc::id::PipelineCacheId,
536}
537
538#[derive(Debug)]
539pub struct CoreCommandBuffer {
540    pub(crate) context: ContextWgpuCore,
541    id: wgc::id::CommandBufferId,
542}
543
544#[derive(Debug)]
545pub struct CoreRenderBundleEncoder {
546    pub(crate) context: ContextWgpuCore,
547    encoder: wgc::command::RenderBundleEncoder,
548    id: crate::cmp::Identifier,
549}
550
551#[derive(Debug)]
552pub struct CoreRenderBundle {
553    id: wgc::id::RenderBundleId,
554}
555
556#[derive(Debug)]
557pub struct CoreQueue {
558    pub(crate) context: ContextWgpuCore,
559    id: wgc::id::QueueId,
560    error_sink: ErrorSink,
561}
562
563#[derive(Debug)]
564pub struct CoreComputePipeline {
565    pub(crate) context: ContextWgpuCore,
566    id: wgc::id::ComputePipelineId,
567    error_sink: ErrorSink,
568}
569
570#[derive(Debug)]
571pub struct CoreRenderPipeline {
572    pub(crate) context: ContextWgpuCore,
573    id: wgc::id::RenderPipelineId,
574    error_sink: ErrorSink,
575}
576
577#[derive(Debug)]
578pub struct CoreComputePass {
579    pub(crate) context: ContextWgpuCore,
580    pass: wgc::command::ComputePass,
581    error_sink: ErrorSink,
582    id: crate::cmp::Identifier,
583}
584
585#[derive(Debug)]
586pub struct CoreRenderPass {
587    pub(crate) context: ContextWgpuCore,
588    pass: wgc::command::RenderPass,
589    error_sink: ErrorSink,
590    id: crate::cmp::Identifier,
591}
592
593#[derive(Debug)]
594pub struct CoreCommandEncoder {
595    pub(crate) context: ContextWgpuCore,
596    id: wgc::id::CommandEncoderId,
597    error_sink: ErrorSink,
598}
599
600#[derive(Debug)]
601pub struct CoreBlas {
602    pub(crate) context: ContextWgpuCore,
603    id: wgc::id::BlasId,
604    error_sink: ErrorSink,
605}
606
607#[derive(Debug)]
608pub struct CoreTlas {
609    pub(crate) context: ContextWgpuCore,
610    id: wgc::id::TlasId,
611    // error_sink: ErrorSink,
612}
613
614#[derive(Debug)]
615pub struct CoreSurfaceOutputDetail {
616    context: ContextWgpuCore,
617    surface_id: wgc::id::SurfaceId,
618}
619
620type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
621
622struct ErrorScope {
623    error: Option<crate::Error>,
624    filter: crate::ErrorFilter,
625}
626
627struct ErrorSinkRaw {
628    scopes: Vec<ErrorScope>,
629    uncaptured_handler: Option<Arc<dyn crate::UncapturedErrorHandler>>,
630}
631
632impl ErrorSinkRaw {
633    fn new() -> ErrorSinkRaw {
634        ErrorSinkRaw {
635            scopes: Vec::new(),
636            uncaptured_handler: None,
637        }
638    }
639
640    /// Deliver the error to
641    ///
642    /// * the innermost error scope, if any, or
643    /// * the uncaptured error handler, if there is one, or
644    /// * [`default_error_handler()`].
645    ///
646    /// If a closure is returned, the caller should call it immediately after dropping the
647    /// [`ErrorSink`] mutex guard. This makes sure that the user callback is not called with
648    /// a wgpu mutex held.
649    #[track_caller]
650    #[must_use]
651    fn handle_error_or_return_handler(&mut self, err: crate::Error) -> Option<impl FnOnce()> {
652        let filter = match err {
653            crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
654            crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
655            crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
656        };
657        match self
658            .scopes
659            .iter_mut()
660            .rev()
661            .find(|scope| scope.filter == filter)
662        {
663            Some(scope) => {
664                if scope.error.is_none() {
665                    scope.error = Some(err);
666                }
667                None
668            }
669            None => {
670                if let Some(custom_handler) = &self.uncaptured_handler {
671                    let custom_handler = Arc::clone(custom_handler);
672                    Some(move || (custom_handler)(err))
673                } else {
674                    // direct call preserves #[track_caller] where dyn can't
675                    default_error_handler(err)
676                }
677            }
678        }
679    }
680}
681
682impl fmt::Debug for ErrorSinkRaw {
683    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
684        write!(f, "ErrorSink")
685    }
686}
687
688#[track_caller]
689fn default_error_handler(err: crate::Error) -> ! {
690    log::error!("Handling wgpu errors as fatal by default");
691    panic!("wgpu error: {err}\n");
692}
693
694impl From<CreateShaderModuleError> for CompilationInfo {
695    fn from(value: CreateShaderModuleError) -> Self {
696        match value {
697            #[cfg(feature = "wgsl")]
698            CreateShaderModuleError::Parsing(v) => v.into(),
699            #[cfg(feature = "glsl")]
700            CreateShaderModuleError::ParsingGlsl(v) => v.into(),
701            #[cfg(feature = "spirv")]
702            CreateShaderModuleError::ParsingSpirV(v) => v.into(),
703            CreateShaderModuleError::Validation(v) => v.into(),
704            // Device errors are reported through the error sink, and are not compilation errors.
705            // Same goes for native shader module generation errors.
706            CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
707                CompilationInfo {
708                    messages: Vec::new(),
709                }
710            }
711            // Everything else is an error message without location information.
712            _ => CompilationInfo {
713                messages: vec![CompilationMessage {
714                    message: value.to_string(),
715                    message_type: CompilationMessageType::Error,
716                    location: None,
717                }],
718            },
719        }
720    }
721}
722
723#[derive(Debug)]
724pub struct CoreQueueWriteBuffer {
725    buffer_id: wgc::id::StagingBufferId,
726    mapping: CoreBufferMappedRange,
727}
728
729#[derive(Debug)]
730pub struct CoreBufferMappedRange {
731    ptr: NonNull<u8>,
732    size: usize,
733}
734
735#[cfg(send_sync)]
736unsafe impl Send for CoreBufferMappedRange {}
737#[cfg(send_sync)]
738unsafe impl Sync for CoreBufferMappedRange {}
739
740impl Drop for CoreBufferMappedRange {
741    fn drop(&mut self) {
742        // Intentionally left blank so that `BufferMappedRange` still
743        // implements `Drop`, to match the web backend
744    }
745}
746
747crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
748crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
749crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
750crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
751crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
752crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
753crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
754crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
755crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
756crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
757crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
758crate::cmp::impl_eq_ord_hash_proxy!(CoreExternalTexture => .id);
759crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
760crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
761crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
762crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
763crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
764crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
765crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
766crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
767crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
768crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
769crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
770crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
771crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
772crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
773crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
774crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
775crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
776
777impl dispatch::InstanceInterface for ContextWgpuCore {
778    fn new(desc: &wgt::InstanceDescriptor) -> Self
779    where
780        Self: Sized,
781    {
782        Self(Arc::new(wgc::global::Global::new("wgpu", desc)))
783    }
784
785    unsafe fn create_surface(
786        &self,
787        target: crate::api::SurfaceTargetUnsafe,
788    ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
789        let id = match target {
790            SurfaceTargetUnsafe::RawHandle {
791                raw_display_handle,
792                raw_window_handle,
793            } => unsafe {
794                self.0
795                    .instance_create_surface(raw_display_handle, raw_window_handle, None)
796            },
797
798            #[cfg(all(unix, not(target_vendor = "apple"), not(target_family = "wasm")))]
799            SurfaceTargetUnsafe::Drm {
800                fd,
801                plane,
802                connector_id,
803                width,
804                height,
805                refresh_rate,
806            } => unsafe {
807                self.0.instance_create_surface_from_drm(
808                    fd,
809                    plane,
810                    connector_id,
811                    width,
812                    height,
813                    refresh_rate,
814                    None,
815                )
816            },
817
818            #[cfg(metal)]
819            SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
820                self.0.instance_create_surface_metal(layer, None)
821            },
822
823            #[cfg(dx12)]
824            SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
825                self.0.instance_create_surface_from_visual(visual, None)
826            },
827
828            #[cfg(dx12)]
829            SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
830                self.0
831                    .instance_create_surface_from_surface_handle(surface_handle, None)
832            },
833
834            #[cfg(dx12)]
835            SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
836                self.0
837                    .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
838            },
839        }?;
840
841        Ok(CoreSurface {
842            context: self.clone(),
843            id,
844            configured_device: Mutex::default(),
845            error_sink: Mutex::default(),
846        }
847        .into())
848    }
849
850    fn request_adapter(
851        &self,
852        options: &crate::api::RequestAdapterOptions<'_, '_>,
853    ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
854        let id = self.0.request_adapter(
855            &wgc::instance::RequestAdapterOptions {
856                power_preference: options.power_preference,
857                force_fallback_adapter: options.force_fallback_adapter,
858                compatible_surface: options
859                    .compatible_surface
860                    .map(|surface| surface.inner.as_core().id),
861            },
862            wgt::Backends::all(),
863            None,
864        );
865        let adapter = id.map(|id| {
866            let core = CoreAdapter {
867                context: self.clone(),
868                id,
869            };
870            let generic: dispatch::DispatchAdapter = core.into();
871            generic
872        });
873        Box::pin(ready(adapter))
874    }
875
876    fn poll_all_devices(&self, force_wait: bool) -> bool {
877        match self.0.poll_all_devices(force_wait) {
878            Ok(all_queue_empty) => all_queue_empty,
879            Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
880        }
881    }
882
883    #[cfg(feature = "wgsl")]
884    fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
885        use wgc::naga::front::wgsl::ImplementedLanguageExtension;
886        ImplementedLanguageExtension::all().iter().copied().fold(
887            crate::WgslLanguageFeatures::empty(),
888            |acc, wle| {
889                acc | match wle {
890                    ImplementedLanguageExtension::ReadOnlyAndReadWriteStorageTextures => {
891                        crate::WgslLanguageFeatures::ReadOnlyAndReadWriteStorageTextures
892                    }
893                    ImplementedLanguageExtension::Packed4x8IntegerDotProduct => {
894                        crate::WgslLanguageFeatures::Packed4x8IntegerDotProduct
895                    }
896                    ImplementedLanguageExtension::PointerCompositeAccess => {
897                        crate::WgslLanguageFeatures::PointerCompositeAccess
898                    }
899                }
900            },
901        )
902    }
903}
904
905impl dispatch::AdapterInterface for CoreAdapter {
906    fn request_device(
907        &self,
908        desc: &crate::DeviceDescriptor<'_>,
909    ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
910        if !matches!(desc.trace, wgt::Trace::Off) {
911            log::error!(
912                "
913                Feature 'trace' has been removed temporarily; \
914                see https://github.com/gfx-rs/wgpu/issues/5974. \
915                The `trace` parameter will have no effect."
916            );
917        }
918
919        let res = self.context.0.adapter_request_device(
920            self.id,
921            &desc.map_label(|l| l.map(Borrowed)),
922            None,
923            None,
924        );
925        let (device_id, queue_id) = match res {
926            Ok(ids) => ids,
927            Err(err) => {
928                return Box::pin(ready(Err(err.into())));
929            }
930        };
931        let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
932        let device = CoreDevice {
933            context: self.context.clone(),
934            id: device_id,
935            error_sink: error_sink.clone(),
936            features: desc.required_features,
937        };
938        let queue = CoreQueue {
939            context: self.context.clone(),
940            id: queue_id,
941            error_sink,
942        };
943        Box::pin(ready(Ok((device.into(), queue.into()))))
944    }
945
946    fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
947        let surface = surface.as_core();
948
949        self.context
950            .0
951            .adapter_is_surface_supported(self.id, surface.id)
952    }
953
954    fn features(&self) -> crate::Features {
955        self.context.0.adapter_features(self.id)
956    }
957
958    fn limits(&self) -> crate::Limits {
959        self.context.0.adapter_limits(self.id)
960    }
961
962    fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
963        self.context.0.adapter_downlevel_capabilities(self.id)
964    }
965
966    fn get_info(&self) -> crate::AdapterInfo {
967        self.context.0.adapter_get_info(self.id)
968    }
969
970    fn get_texture_format_features(
971        &self,
972        format: crate::TextureFormat,
973    ) -> crate::TextureFormatFeatures {
974        self.context
975            .0
976            .adapter_get_texture_format_features(self.id, format)
977    }
978
979    fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
980        self.context.0.adapter_get_presentation_timestamp(self.id)
981    }
982}
983
984impl Drop for CoreAdapter {
985    fn drop(&mut self) {
986        self.context.0.adapter_drop(self.id)
987    }
988}
989
990impl dispatch::DeviceInterface for CoreDevice {
991    fn features(&self) -> crate::Features {
992        self.context.0.device_features(self.id)
993    }
994
995    fn limits(&self) -> crate::Limits {
996        self.context.0.device_limits(self.id)
997    }
998
999    // If we have no way to create a shader module, we can't return one, and so most of the function is unreachable.
1000    #[cfg_attr(
1001        not(any(
1002            feature = "spirv",
1003            feature = "glsl",
1004            feature = "wgsl",
1005            feature = "naga-ir"
1006        )),
1007        expect(unused)
1008    )]
1009    fn create_shader_module(
1010        &self,
1011        desc: crate::ShaderModuleDescriptor<'_>,
1012        shader_bound_checks: wgt::ShaderRuntimeChecks,
1013    ) -> dispatch::DispatchShaderModule {
1014        let descriptor = wgc::pipeline::ShaderModuleDescriptor {
1015            label: desc.label.map(Borrowed),
1016            runtime_checks: shader_bound_checks,
1017        };
1018        let source = match desc.source {
1019            #[cfg(feature = "spirv")]
1020            ShaderSource::SpirV(ref spv) => {
1021                // Parse the given shader code and store its representation.
1022                let options = naga::front::spv::Options {
1023                    adjust_coordinate_space: false, // we require NDC_Y_UP feature
1024                    strict_capabilities: true,
1025                    block_ctx_dump_prefix: None,
1026                };
1027                wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
1028            }
1029            #[cfg(feature = "glsl")]
1030            ShaderSource::Glsl {
1031                ref shader,
1032                stage,
1033                defines,
1034            } => {
1035                let options = naga::front::glsl::Options {
1036                    stage,
1037                    defines: defines
1038                        .iter()
1039                        .map(|&(key, value)| (String::from(key), String::from(value)))
1040                        .collect(),
1041                };
1042                wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
1043            }
1044            #[cfg(feature = "wgsl")]
1045            ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
1046            #[cfg(feature = "naga-ir")]
1047            ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1048            ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1049        };
1050        let (id, error) =
1051            self.context
1052                .0
1053                .device_create_shader_module(self.id, &descriptor, source, None);
1054        let compilation_info = match error {
1055            Some(cause) => {
1056                self.context.handle_error(
1057                    &self.error_sink,
1058                    cause.clone(),
1059                    desc.label,
1060                    "Device::create_shader_module",
1061                );
1062                CompilationInfo::from(cause)
1063            }
1064            None => CompilationInfo { messages: vec![] },
1065        };
1066
1067        CoreShaderModule {
1068            context: self.context.clone(),
1069            id,
1070            compilation_info,
1071        }
1072        .into()
1073    }
1074
1075    unsafe fn create_shader_module_passthrough(
1076        &self,
1077        desc: &crate::ShaderModuleDescriptorPassthrough<'_>,
1078    ) -> dispatch::DispatchShaderModule {
1079        let desc = desc.map_label(|l| l.map(Cow::from));
1080        let (id, error) = unsafe {
1081            self.context
1082                .0
1083                .device_create_shader_module_passthrough(self.id, &desc, None)
1084        };
1085
1086        let compilation_info = match error {
1087            Some(cause) => {
1088                self.context.handle_error(
1089                    &self.error_sink,
1090                    cause.clone(),
1091                    desc.label.as_deref(),
1092                    "Device::create_shader_module_passthrough",
1093                );
1094                CompilationInfo::from(cause)
1095            }
1096            None => CompilationInfo { messages: vec![] },
1097        };
1098
1099        CoreShaderModule {
1100            context: self.context.clone(),
1101            id,
1102            compilation_info,
1103        }
1104        .into()
1105    }
1106
1107    fn create_bind_group_layout(
1108        &self,
1109        desc: &crate::BindGroupLayoutDescriptor<'_>,
1110    ) -> dispatch::DispatchBindGroupLayout {
1111        let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1112            label: desc.label.map(Borrowed),
1113            entries: Borrowed(desc.entries),
1114        };
1115        let (id, error) =
1116            self.context
1117                .0
1118                .device_create_bind_group_layout(self.id, &descriptor, None);
1119        if let Some(cause) = error {
1120            self.context.handle_error(
1121                &self.error_sink,
1122                cause,
1123                desc.label,
1124                "Device::create_bind_group_layout",
1125            );
1126        }
1127        CoreBindGroupLayout {
1128            context: self.context.clone(),
1129            id,
1130        }
1131        .into()
1132    }
1133
1134    fn create_bind_group(
1135        &self,
1136        desc: &crate::BindGroupDescriptor<'_>,
1137    ) -> dispatch::DispatchBindGroup {
1138        use wgc::binding_model as bm;
1139
1140        let mut arrayed_texture_views = Vec::new();
1141        let mut arrayed_samplers = Vec::new();
1142        if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1143            // gather all the array view IDs first
1144            for entry in desc.entries.iter() {
1145                if let BindingResource::TextureViewArray(array) = entry.resource {
1146                    arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1147                }
1148                if let BindingResource::SamplerArray(array) = entry.resource {
1149                    arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1150                }
1151            }
1152        }
1153        let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1154        let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1155
1156        let mut arrayed_buffer_bindings = Vec::new();
1157        if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1158            // gather all the buffers first
1159            for entry in desc.entries.iter() {
1160                if let BindingResource::BufferArray(array) = entry.resource {
1161                    arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1162                        buffer: binding.buffer.inner.as_core().id,
1163                        offset: binding.offset,
1164                        size: binding.size,
1165                    }));
1166                }
1167            }
1168        }
1169        let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1170
1171        let entries = desc
1172            .entries
1173            .iter()
1174            .map(|entry| bm::BindGroupEntry {
1175                binding: entry.binding,
1176                resource: match entry.resource {
1177                    BindingResource::Buffer(BufferBinding {
1178                        buffer,
1179                        offset,
1180                        size,
1181                    }) => bm::BindingResource::Buffer(bm::BufferBinding {
1182                        buffer: buffer.inner.as_core().id,
1183                        offset,
1184                        size,
1185                    }),
1186                    BindingResource::BufferArray(array) => {
1187                        let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1188                        remaining_arrayed_buffer_bindings =
1189                            &remaining_arrayed_buffer_bindings[array.len()..];
1190                        bm::BindingResource::BufferArray(Borrowed(slice))
1191                    }
1192                    BindingResource::Sampler(sampler) => {
1193                        bm::BindingResource::Sampler(sampler.inner.as_core().id)
1194                    }
1195                    BindingResource::SamplerArray(array) => {
1196                        let slice = &remaining_arrayed_samplers[..array.len()];
1197                        remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1198                        bm::BindingResource::SamplerArray(Borrowed(slice))
1199                    }
1200                    BindingResource::TextureView(texture_view) => {
1201                        bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1202                    }
1203                    BindingResource::TextureViewArray(array) => {
1204                        let slice = &remaining_arrayed_texture_views[..array.len()];
1205                        remaining_arrayed_texture_views =
1206                            &remaining_arrayed_texture_views[array.len()..];
1207                        bm::BindingResource::TextureViewArray(Borrowed(slice))
1208                    }
1209                    BindingResource::AccelerationStructure(acceleration_structure) => {
1210                        bm::BindingResource::AccelerationStructure(
1211                            acceleration_structure.inner.as_core().id,
1212                        )
1213                    }
1214                    BindingResource::ExternalTexture(external_texture) => {
1215                        bm::BindingResource::ExternalTexture(external_texture.inner.as_core().id)
1216                    }
1217                },
1218            })
1219            .collect::<Vec<_>>();
1220        let descriptor = bm::BindGroupDescriptor {
1221            label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1222            layout: desc.layout.inner.as_core().id,
1223            entries: Borrowed(&entries),
1224        };
1225
1226        let (id, error) = self
1227            .context
1228            .0
1229            .device_create_bind_group(self.id, &descriptor, None);
1230        if let Some(cause) = error {
1231            self.context.handle_error(
1232                &self.error_sink,
1233                cause,
1234                desc.label,
1235                "Device::create_bind_group",
1236            );
1237        }
1238        CoreBindGroup {
1239            context: self.context.clone(),
1240            id,
1241        }
1242        .into()
1243    }
1244
1245    fn create_pipeline_layout(
1246        &self,
1247        desc: &crate::PipelineLayoutDescriptor<'_>,
1248    ) -> dispatch::DispatchPipelineLayout {
1249        // Limit is always less or equal to hal::MAX_BIND_GROUPS, so this is always right
1250        // Guards following ArrayVec
1251        assert!(
1252            desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1253            "Bind group layout count {} exceeds device bind group limit {}",
1254            desc.bind_group_layouts.len(),
1255            wgc::MAX_BIND_GROUPS
1256        );
1257
1258        let temp_layouts = desc
1259            .bind_group_layouts
1260            .iter()
1261            .map(|bgl| bgl.inner.as_core().id)
1262            .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1263        let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1264            label: desc.label.map(Borrowed),
1265            bind_group_layouts: Borrowed(&temp_layouts),
1266            push_constant_ranges: Borrowed(desc.push_constant_ranges),
1267        };
1268
1269        let (id, error) = self
1270            .context
1271            .0
1272            .device_create_pipeline_layout(self.id, &descriptor, None);
1273        if let Some(cause) = error {
1274            self.context.handle_error(
1275                &self.error_sink,
1276                cause,
1277                desc.label,
1278                "Device::create_pipeline_layout",
1279            );
1280        }
1281        CorePipelineLayout {
1282            context: self.context.clone(),
1283            id,
1284        }
1285        .into()
1286    }
1287
1288    fn create_render_pipeline(
1289        &self,
1290        desc: &crate::RenderPipelineDescriptor<'_>,
1291    ) -> dispatch::DispatchRenderPipeline {
1292        use wgc::pipeline as pipe;
1293
1294        let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1295            .vertex
1296            .buffers
1297            .iter()
1298            .map(|vbuf| pipe::VertexBufferLayout {
1299                array_stride: vbuf.array_stride,
1300                step_mode: vbuf.step_mode,
1301                attributes: Borrowed(vbuf.attributes),
1302            })
1303            .collect();
1304
1305        let vert_constants = desc
1306            .vertex
1307            .compilation_options
1308            .constants
1309            .iter()
1310            .map(|&(key, value)| (String::from(key), value))
1311            .collect();
1312
1313        let descriptor = pipe::RenderPipelineDescriptor {
1314            label: desc.label.map(Borrowed),
1315            layout: desc.layout.map(|layout| layout.inner.as_core().id),
1316            vertex: pipe::VertexState {
1317                stage: pipe::ProgrammableStageDescriptor {
1318                    module: desc.vertex.module.inner.as_core().id,
1319                    entry_point: desc.vertex.entry_point.map(Borrowed),
1320                    constants: vert_constants,
1321                    zero_initialize_workgroup_memory: desc
1322                        .vertex
1323                        .compilation_options
1324                        .zero_initialize_workgroup_memory,
1325                },
1326                buffers: Borrowed(&vertex_buffers),
1327            },
1328            primitive: desc.primitive,
1329            depth_stencil: desc.depth_stencil.clone(),
1330            multisample: desc.multisample,
1331            fragment: desc.fragment.as_ref().map(|frag| {
1332                let frag_constants = frag
1333                    .compilation_options
1334                    .constants
1335                    .iter()
1336                    .map(|&(key, value)| (String::from(key), value))
1337                    .collect();
1338                pipe::FragmentState {
1339                    stage: pipe::ProgrammableStageDescriptor {
1340                        module: frag.module.inner.as_core().id,
1341                        entry_point: frag.entry_point.map(Borrowed),
1342                        constants: frag_constants,
1343                        zero_initialize_workgroup_memory: frag
1344                            .compilation_options
1345                            .zero_initialize_workgroup_memory,
1346                    },
1347                    targets: Borrowed(frag.targets),
1348                }
1349            }),
1350            multiview: desc.multiview,
1351            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1352        };
1353
1354        let (id, error) = self
1355            .context
1356            .0
1357            .device_create_render_pipeline(self.id, &descriptor, None);
1358        if let Some(cause) = error {
1359            if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1360                log::error!("Shader translation error for stage {stage:?}: {error}");
1361                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1362            }
1363            self.context.handle_error(
1364                &self.error_sink,
1365                cause,
1366                desc.label,
1367                "Device::create_render_pipeline",
1368            );
1369        }
1370        CoreRenderPipeline {
1371            context: self.context.clone(),
1372            id,
1373            error_sink: Arc::clone(&self.error_sink),
1374        }
1375        .into()
1376    }
1377
1378    fn create_mesh_pipeline(
1379        &self,
1380        desc: &crate::MeshPipelineDescriptor<'_>,
1381    ) -> dispatch::DispatchRenderPipeline {
1382        use wgc::pipeline as pipe;
1383
1384        let mesh_constants = desc
1385            .mesh
1386            .compilation_options
1387            .constants
1388            .iter()
1389            .map(|&(key, value)| (String::from(key), value))
1390            .collect();
1391        let descriptor = pipe::MeshPipelineDescriptor {
1392            label: desc.label.map(Borrowed),
1393            task: desc.task.as_ref().map(|task| {
1394                let task_constants = task
1395                    .compilation_options
1396                    .constants
1397                    .iter()
1398                    .map(|&(key, value)| (String::from(key), value))
1399                    .collect();
1400                pipe::TaskState {
1401                    stage: pipe::ProgrammableStageDescriptor {
1402                        module: task.module.inner.as_core().id,
1403                        entry_point: task.entry_point.map(Borrowed),
1404                        constants: task_constants,
1405                        zero_initialize_workgroup_memory: desc
1406                            .mesh
1407                            .compilation_options
1408                            .zero_initialize_workgroup_memory,
1409                    },
1410                }
1411            }),
1412            mesh: pipe::MeshState {
1413                stage: pipe::ProgrammableStageDescriptor {
1414                    module: desc.mesh.module.inner.as_core().id,
1415                    entry_point: desc.mesh.entry_point.map(Borrowed),
1416                    constants: mesh_constants,
1417                    zero_initialize_workgroup_memory: desc
1418                        .mesh
1419                        .compilation_options
1420                        .zero_initialize_workgroup_memory,
1421                },
1422            },
1423            layout: desc.layout.map(|layout| layout.inner.as_core().id),
1424            primitive: desc.primitive,
1425            depth_stencil: desc.depth_stencil.clone(),
1426            multisample: desc.multisample,
1427            fragment: desc.fragment.as_ref().map(|frag| {
1428                let frag_constants = frag
1429                    .compilation_options
1430                    .constants
1431                    .iter()
1432                    .map(|&(key, value)| (String::from(key), value))
1433                    .collect();
1434                pipe::FragmentState {
1435                    stage: pipe::ProgrammableStageDescriptor {
1436                        module: frag.module.inner.as_core().id,
1437                        entry_point: frag.entry_point.map(Borrowed),
1438                        constants: frag_constants,
1439                        zero_initialize_workgroup_memory: frag
1440                            .compilation_options
1441                            .zero_initialize_workgroup_memory,
1442                    },
1443                    targets: Borrowed(frag.targets),
1444                }
1445            }),
1446            multiview: desc.multiview,
1447            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1448        };
1449
1450        let (id, error) = self
1451            .context
1452            .0
1453            .device_create_mesh_pipeline(self.id, &descriptor, None);
1454        if let Some(cause) = error {
1455            if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1456                log::error!("Shader translation error for stage {stage:?}: {error}");
1457                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1458            }
1459            self.context.handle_error(
1460                &self.error_sink,
1461                cause,
1462                desc.label,
1463                "Device::create_render_pipeline",
1464            );
1465        }
1466        CoreRenderPipeline {
1467            context: self.context.clone(),
1468            id,
1469            error_sink: Arc::clone(&self.error_sink),
1470        }
1471        .into()
1472    }
1473
1474    fn create_compute_pipeline(
1475        &self,
1476        desc: &crate::ComputePipelineDescriptor<'_>,
1477    ) -> dispatch::DispatchComputePipeline {
1478        use wgc::pipeline as pipe;
1479
1480        let constants = desc
1481            .compilation_options
1482            .constants
1483            .iter()
1484            .map(|&(key, value)| (String::from(key), value))
1485            .collect();
1486
1487        let descriptor = pipe::ComputePipelineDescriptor {
1488            label: desc.label.map(Borrowed),
1489            layout: desc.layout.map(|pll| pll.inner.as_core().id),
1490            stage: pipe::ProgrammableStageDescriptor {
1491                module: desc.module.inner.as_core().id,
1492                entry_point: desc.entry_point.map(Borrowed),
1493                constants,
1494                zero_initialize_workgroup_memory: desc
1495                    .compilation_options
1496                    .zero_initialize_workgroup_memory,
1497            },
1498            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1499        };
1500
1501        let (id, error) = self
1502            .context
1503            .0
1504            .device_create_compute_pipeline(self.id, &descriptor, None);
1505        if let Some(cause) = error {
1506            if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1507                log::error!(
1508                    "Shader translation error for stage {:?}: {}",
1509                    wgt::ShaderStages::COMPUTE,
1510                    error
1511                );
1512                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1513            }
1514            self.context.handle_error(
1515                &self.error_sink,
1516                cause,
1517                desc.label,
1518                "Device::create_compute_pipeline",
1519            );
1520        }
1521        CoreComputePipeline {
1522            context: self.context.clone(),
1523            id,
1524            error_sink: Arc::clone(&self.error_sink),
1525        }
1526        .into()
1527    }
1528
1529    unsafe fn create_pipeline_cache(
1530        &self,
1531        desc: &crate::PipelineCacheDescriptor<'_>,
1532    ) -> dispatch::DispatchPipelineCache {
1533        use wgc::pipeline as pipe;
1534
1535        let descriptor = pipe::PipelineCacheDescriptor {
1536            label: desc.label.map(Borrowed),
1537            data: desc.data.map(Borrowed),
1538            fallback: desc.fallback,
1539        };
1540        let (id, error) = unsafe {
1541            self.context
1542                .0
1543                .device_create_pipeline_cache(self.id, &descriptor, None)
1544        };
1545        if let Some(cause) = error {
1546            self.context.handle_error(
1547                &self.error_sink,
1548                cause,
1549                desc.label,
1550                "Device::device_create_pipeline_cache_init",
1551            );
1552        }
1553        CorePipelineCache {
1554            context: self.context.clone(),
1555            id,
1556        }
1557        .into()
1558    }
1559
1560    fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1561        let (id, error) = self.context.0.device_create_buffer(
1562            self.id,
1563            &desc.map_label(|l| l.map(Borrowed)),
1564            None,
1565        );
1566        if let Some(cause) = error {
1567            self.context
1568                .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1569        }
1570
1571        CoreBuffer {
1572            context: self.context.clone(),
1573            id,
1574            error_sink: Arc::clone(&self.error_sink),
1575        }
1576        .into()
1577    }
1578
1579    fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1580        let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1581        let (id, error) = self
1582            .context
1583            .0
1584            .device_create_texture(self.id, &wgt_desc, None);
1585        if let Some(cause) = error {
1586            self.context.handle_error(
1587                &self.error_sink,
1588                cause,
1589                desc.label,
1590                "Device::create_texture",
1591            );
1592        }
1593
1594        CoreTexture {
1595            context: self.context.clone(),
1596            id,
1597            error_sink: Arc::clone(&self.error_sink),
1598        }
1599        .into()
1600    }
1601
1602    fn create_external_texture(
1603        &self,
1604        desc: &crate::ExternalTextureDescriptor<'_>,
1605        planes: &[&crate::TextureView],
1606    ) -> dispatch::DispatchExternalTexture {
1607        let wgt_desc = desc.map_label(|l| l.map(Borrowed));
1608        let planes = planes
1609            .iter()
1610            .map(|plane| plane.inner.as_core().id)
1611            .collect::<Vec<_>>();
1612        let (id, error) = self
1613            .context
1614            .0
1615            .device_create_external_texture(self.id, &wgt_desc, &planes, None);
1616        if let Some(cause) = error {
1617            self.context.handle_error(
1618                &self.error_sink,
1619                cause,
1620                desc.label,
1621                "Device::create_external_texture",
1622            );
1623        }
1624
1625        CoreExternalTexture {
1626            context: self.context.clone(),
1627            id,
1628        }
1629        .into()
1630    }
1631
1632    fn create_blas(
1633        &self,
1634        desc: &crate::CreateBlasDescriptor<'_>,
1635        sizes: crate::BlasGeometrySizeDescriptors,
1636    ) -> (Option<u64>, dispatch::DispatchBlas) {
1637        let global = &self.context.0;
1638        let (id, handle, error) =
1639            global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1640        if let Some(cause) = error {
1641            self.context
1642                .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1643        }
1644        (
1645            handle,
1646            CoreBlas {
1647                context: self.context.clone(),
1648                id,
1649                error_sink: Arc::clone(&self.error_sink),
1650            }
1651            .into(),
1652        )
1653    }
1654
1655    fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1656        let global = &self.context.0;
1657        let (id, error) =
1658            global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1659        if let Some(cause) = error {
1660            self.context
1661                .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1662        }
1663        CoreTlas {
1664            context: self.context.clone(),
1665            id,
1666            // error_sink: Arc::clone(&self.error_sink),
1667        }
1668        .into()
1669    }
1670
1671    fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1672        let descriptor = wgc::resource::SamplerDescriptor {
1673            label: desc.label.map(Borrowed),
1674            address_modes: [
1675                desc.address_mode_u,
1676                desc.address_mode_v,
1677                desc.address_mode_w,
1678            ],
1679            mag_filter: desc.mag_filter,
1680            min_filter: desc.min_filter,
1681            mipmap_filter: desc.mipmap_filter,
1682            lod_min_clamp: desc.lod_min_clamp,
1683            lod_max_clamp: desc.lod_max_clamp,
1684            compare: desc.compare,
1685            anisotropy_clamp: desc.anisotropy_clamp,
1686            border_color: desc.border_color,
1687        };
1688
1689        let (id, error) = self
1690            .context
1691            .0
1692            .device_create_sampler(self.id, &descriptor, None);
1693        if let Some(cause) = error {
1694            self.context.handle_error(
1695                &self.error_sink,
1696                cause,
1697                desc.label,
1698                "Device::create_sampler",
1699            );
1700        }
1701        CoreSampler {
1702            context: self.context.clone(),
1703            id,
1704        }
1705        .into()
1706    }
1707
1708    fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1709        let (id, error) = self.context.0.device_create_query_set(
1710            self.id,
1711            &desc.map_label(|l| l.map(Borrowed)),
1712            None,
1713        );
1714        if let Some(cause) = error {
1715            self.context
1716                .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1717        }
1718        CoreQuerySet {
1719            context: self.context.clone(),
1720            id,
1721        }
1722        .into()
1723    }
1724
1725    fn create_command_encoder(
1726        &self,
1727        desc: &crate::CommandEncoderDescriptor<'_>,
1728    ) -> dispatch::DispatchCommandEncoder {
1729        let (id, error) = self.context.0.device_create_command_encoder(
1730            self.id,
1731            &desc.map_label(|l| l.map(Borrowed)),
1732            None,
1733        );
1734        if let Some(cause) = error {
1735            self.context.handle_error(
1736                &self.error_sink,
1737                cause,
1738                desc.label,
1739                "Device::create_command_encoder",
1740            );
1741        }
1742
1743        CoreCommandEncoder {
1744            context: self.context.clone(),
1745            id,
1746            error_sink: Arc::clone(&self.error_sink),
1747        }
1748        .into()
1749    }
1750
1751    fn create_render_bundle_encoder(
1752        &self,
1753        desc: &crate::RenderBundleEncoderDescriptor<'_>,
1754    ) -> dispatch::DispatchRenderBundleEncoder {
1755        let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1756            label: desc.label.map(Borrowed),
1757            color_formats: Borrowed(desc.color_formats),
1758            depth_stencil: desc.depth_stencil,
1759            sample_count: desc.sample_count,
1760            multiview: desc.multiview,
1761        };
1762        let encoder = match wgc::command::RenderBundleEncoder::new(&descriptor, self.id, None) {
1763            Ok(encoder) => encoder,
1764            Err(e) => panic!("Error in Device::create_render_bundle_encoder: {e}"),
1765        };
1766
1767        CoreRenderBundleEncoder {
1768            context: self.context.clone(),
1769            encoder,
1770            id: crate::cmp::Identifier::create(),
1771        }
1772        .into()
1773    }
1774
1775    fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1776        self.context
1777            .0
1778            .device_set_device_lost_closure(self.id, device_lost_callback);
1779    }
1780
1781    fn on_uncaptured_error(&self, handler: Arc<dyn crate::UncapturedErrorHandler>) {
1782        let mut error_sink = self.error_sink.lock();
1783        error_sink.uncaptured_handler = Some(handler);
1784    }
1785
1786    fn push_error_scope(&self, filter: crate::ErrorFilter) {
1787        let mut error_sink = self.error_sink.lock();
1788        error_sink.scopes.push(ErrorScope {
1789            error: None,
1790            filter,
1791        });
1792    }
1793
1794    fn pop_error_scope(&self) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1795        let mut error_sink = self.error_sink.lock();
1796        let scope = error_sink.scopes.pop().unwrap();
1797        Box::pin(ready(scope.error))
1798    }
1799
1800    unsafe fn start_graphics_debugger_capture(&self) {
1801        unsafe {
1802            self.context
1803                .0
1804                .device_start_graphics_debugger_capture(self.id)
1805        };
1806    }
1807
1808    unsafe fn stop_graphics_debugger_capture(&self) {
1809        unsafe {
1810            self.context
1811                .0
1812                .device_stop_graphics_debugger_capture(self.id)
1813        };
1814    }
1815
1816    fn poll(&self, poll_type: wgt::PollType<u64>) -> Result<crate::PollStatus, crate::PollError> {
1817        match self.context.0.device_poll(self.id, poll_type) {
1818            Ok(status) => Ok(status),
1819            Err(err) => {
1820                if let Some(poll_error) = err.to_poll_error() {
1821                    return Err(poll_error);
1822                }
1823
1824                self.context.handle_error_fatal(err, "Device::poll")
1825            }
1826        }
1827    }
1828
1829    fn get_internal_counters(&self) -> crate::InternalCounters {
1830        self.context.0.device_get_internal_counters(self.id)
1831    }
1832
1833    fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1834        self.context.0.device_generate_allocator_report(self.id)
1835    }
1836
1837    fn destroy(&self) {
1838        self.context.0.device_destroy(self.id);
1839    }
1840}
1841
1842impl Drop for CoreDevice {
1843    fn drop(&mut self) {
1844        self.context.0.device_drop(self.id)
1845    }
1846}
1847
1848impl dispatch::QueueInterface for CoreQueue {
1849    fn write_buffer(
1850        &self,
1851        buffer: &dispatch::DispatchBuffer,
1852        offset: crate::BufferAddress,
1853        data: &[u8],
1854    ) {
1855        let buffer = buffer.as_core();
1856
1857        match self
1858            .context
1859            .0
1860            .queue_write_buffer(self.id, buffer.id, offset, data)
1861        {
1862            Ok(()) => (),
1863            Err(err) => {
1864                self.context
1865                    .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1866            }
1867        }
1868    }
1869
1870    fn create_staging_buffer(
1871        &self,
1872        size: crate::BufferSize,
1873    ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1874        match self
1875            .context
1876            .0
1877            .queue_create_staging_buffer(self.id, size, None)
1878        {
1879            Ok((buffer_id, ptr)) => Some(
1880                CoreQueueWriteBuffer {
1881                    buffer_id,
1882                    mapping: CoreBufferMappedRange {
1883                        ptr,
1884                        size: size.get() as usize,
1885                    },
1886                }
1887                .into(),
1888            ),
1889            Err(err) => {
1890                self.context.handle_error_nolabel(
1891                    &self.error_sink,
1892                    err,
1893                    "Queue::write_buffer_with",
1894                );
1895                None
1896            }
1897        }
1898    }
1899
1900    fn validate_write_buffer(
1901        &self,
1902        buffer: &dispatch::DispatchBuffer,
1903        offset: wgt::BufferAddress,
1904        size: wgt::BufferSize,
1905    ) -> Option<()> {
1906        let buffer = buffer.as_core();
1907
1908        match self
1909            .context
1910            .0
1911            .queue_validate_write_buffer(self.id, buffer.id, offset, size)
1912        {
1913            Ok(()) => Some(()),
1914            Err(err) => {
1915                self.context.handle_error_nolabel(
1916                    &self.error_sink,
1917                    err,
1918                    "Queue::write_buffer_with",
1919                );
1920                None
1921            }
1922        }
1923    }
1924
1925    fn write_staging_buffer(
1926        &self,
1927        buffer: &dispatch::DispatchBuffer,
1928        offset: crate::BufferAddress,
1929        staging_buffer: &dispatch::DispatchQueueWriteBuffer,
1930    ) {
1931        let buffer = buffer.as_core();
1932        let staging_buffer = staging_buffer.as_core();
1933
1934        match self.context.0.queue_write_staging_buffer(
1935            self.id,
1936            buffer.id,
1937            offset,
1938            staging_buffer.buffer_id,
1939        ) {
1940            Ok(()) => (),
1941            Err(err) => {
1942                self.context.handle_error_nolabel(
1943                    &self.error_sink,
1944                    err,
1945                    "Queue::write_buffer_with",
1946                );
1947            }
1948        }
1949    }
1950
1951    fn write_texture(
1952        &self,
1953        texture: crate::TexelCopyTextureInfo<'_>,
1954        data: &[u8],
1955        data_layout: crate::TexelCopyBufferLayout,
1956        size: crate::Extent3d,
1957    ) {
1958        match self.context.0.queue_write_texture(
1959            self.id,
1960            &map_texture_copy_view(texture),
1961            data,
1962            &data_layout,
1963            &size,
1964        ) {
1965            Ok(()) => (),
1966            Err(err) => {
1967                self.context
1968                    .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
1969            }
1970        }
1971    }
1972
1973    // This method needs to exist if either webgpu or webgl is enabled,
1974    // but we only actually have an implementation if webgl is enabled.
1975    #[cfg(web)]
1976    #[cfg_attr(not(webgl), expect(unused_variables))]
1977    fn copy_external_image_to_texture(
1978        &self,
1979        source: &crate::CopyExternalImageSourceInfo,
1980        dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
1981        size: crate::Extent3d,
1982    ) {
1983        #[cfg(webgl)]
1984        match self.context.0.queue_copy_external_image_to_texture(
1985            self.id,
1986            source,
1987            map_texture_tagged_copy_view(dest),
1988            size,
1989        ) {
1990            Ok(()) => (),
1991            Err(err) => self.context.handle_error_nolabel(
1992                &self.error_sink,
1993                err,
1994                "Queue::copy_external_image_to_texture",
1995            ),
1996        }
1997    }
1998
1999    fn submit(
2000        &self,
2001        command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
2002    ) -> u64 {
2003        let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
2004        let command_buffer_ids = temp_command_buffers
2005            .iter()
2006            .map(|cmdbuf| cmdbuf.as_core().id)
2007            .collect::<SmallVec<[_; 4]>>();
2008
2009        let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
2010            Ok(index) => index,
2011            Err((index, err)) => {
2012                self.context
2013                    .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
2014                index
2015            }
2016        };
2017
2018        drop(temp_command_buffers);
2019
2020        index
2021    }
2022
2023    fn get_timestamp_period(&self) -> f32 {
2024        self.context.0.queue_get_timestamp_period(self.id)
2025    }
2026
2027    fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
2028        self.context
2029            .0
2030            .queue_on_submitted_work_done(self.id, callback);
2031    }
2032
2033    fn compact_blas(&self, blas: &dispatch::DispatchBlas) -> (Option<u64>, dispatch::DispatchBlas) {
2034        let (id, handle, error) =
2035            self.context
2036                .0
2037                .queue_compact_blas(self.id, blas.as_core().id, None);
2038
2039        if let Some(cause) = error {
2040            self.context
2041                .handle_error_nolabel(&self.error_sink, cause, "Queue::compact_blas");
2042        }
2043        (
2044            handle,
2045            CoreBlas {
2046                context: self.context.clone(),
2047                id,
2048                error_sink: Arc::clone(&self.error_sink),
2049            }
2050            .into(),
2051        )
2052    }
2053}
2054
2055impl Drop for CoreQueue {
2056    fn drop(&mut self) {
2057        self.context.0.queue_drop(self.id)
2058    }
2059}
2060
2061impl dispatch::ShaderModuleInterface for CoreShaderModule {
2062    fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
2063        Box::pin(ready(self.compilation_info.clone()))
2064    }
2065}
2066
2067impl Drop for CoreShaderModule {
2068    fn drop(&mut self) {
2069        self.context.0.shader_module_drop(self.id)
2070    }
2071}
2072
2073impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
2074
2075impl Drop for CoreBindGroupLayout {
2076    fn drop(&mut self) {
2077        self.context.0.bind_group_layout_drop(self.id)
2078    }
2079}
2080
2081impl dispatch::BindGroupInterface for CoreBindGroup {}
2082
2083impl Drop for CoreBindGroup {
2084    fn drop(&mut self) {
2085        self.context.0.bind_group_drop(self.id)
2086    }
2087}
2088
2089impl dispatch::TextureViewInterface for CoreTextureView {}
2090
2091impl Drop for CoreTextureView {
2092    fn drop(&mut self) {
2093        // TODO: We don't use this error at all?
2094        let _ = self.context.0.texture_view_drop(self.id);
2095    }
2096}
2097
2098impl dispatch::ExternalTextureInterface for CoreExternalTexture {
2099    fn destroy(&self) {
2100        self.context.0.external_texture_destroy(self.id);
2101    }
2102}
2103
2104impl Drop for CoreExternalTexture {
2105    fn drop(&mut self) {
2106        self.context.0.external_texture_drop(self.id);
2107    }
2108}
2109
2110impl dispatch::SamplerInterface for CoreSampler {}
2111
2112impl Drop for CoreSampler {
2113    fn drop(&mut self) {
2114        self.context.0.sampler_drop(self.id)
2115    }
2116}
2117
2118impl dispatch::BufferInterface for CoreBuffer {
2119    fn map_async(
2120        &self,
2121        mode: crate::MapMode,
2122        range: Range<crate::BufferAddress>,
2123        callback: dispatch::BufferMapCallback,
2124    ) {
2125        let operation = wgc::resource::BufferMapOperation {
2126            host: match mode {
2127                MapMode::Read => wgc::device::HostMap::Read,
2128                MapMode::Write => wgc::device::HostMap::Write,
2129            },
2130            callback: Some(Box::new(|status| {
2131                let res = status.map_err(|_| crate::BufferAsyncError);
2132                callback(res);
2133            })),
2134        };
2135
2136        match self.context.0.buffer_map_async(
2137            self.id,
2138            range.start,
2139            Some(range.end - range.start),
2140            operation,
2141        ) {
2142            Ok(_) => (),
2143            Err(cause) => {
2144                self.context
2145                    .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
2146            }
2147        }
2148    }
2149
2150    fn get_mapped_range(
2151        &self,
2152        sub_range: Range<crate::BufferAddress>,
2153    ) -> dispatch::DispatchBufferMappedRange {
2154        let size = sub_range.end - sub_range.start;
2155        match self
2156            .context
2157            .0
2158            .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
2159        {
2160            Ok((ptr, size)) => CoreBufferMappedRange {
2161                ptr,
2162                size: size as usize,
2163            }
2164            .into(),
2165            Err(err) => self
2166                .context
2167                .handle_error_fatal(err, "Buffer::get_mapped_range"),
2168        }
2169    }
2170
2171    fn unmap(&self) {
2172        match self.context.0.buffer_unmap(self.id) {
2173            Ok(()) => (),
2174            Err(cause) => {
2175                self.context
2176                    .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
2177            }
2178        }
2179    }
2180
2181    fn destroy(&self) {
2182        self.context.0.buffer_destroy(self.id);
2183    }
2184}
2185
2186impl Drop for CoreBuffer {
2187    fn drop(&mut self) {
2188        self.context.0.buffer_drop(self.id)
2189    }
2190}
2191
2192impl dispatch::TextureInterface for CoreTexture {
2193    fn create_view(
2194        &self,
2195        desc: &crate::TextureViewDescriptor<'_>,
2196    ) -> dispatch::DispatchTextureView {
2197        let descriptor = wgc::resource::TextureViewDescriptor {
2198            label: desc.label.map(Borrowed),
2199            format: desc.format,
2200            dimension: desc.dimension,
2201            usage: desc.usage,
2202            range: wgt::ImageSubresourceRange {
2203                aspect: desc.aspect,
2204                base_mip_level: desc.base_mip_level,
2205                mip_level_count: desc.mip_level_count,
2206                base_array_layer: desc.base_array_layer,
2207                array_layer_count: desc.array_layer_count,
2208            },
2209        };
2210        let (id, error) = self
2211            .context
2212            .0
2213            .texture_create_view(self.id, &descriptor, None);
2214        if let Some(cause) = error {
2215            self.context
2216                .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
2217        }
2218        CoreTextureView {
2219            context: self.context.clone(),
2220            id,
2221        }
2222        .into()
2223    }
2224
2225    fn destroy(&self) {
2226        self.context.0.texture_destroy(self.id);
2227    }
2228}
2229
2230impl Drop for CoreTexture {
2231    fn drop(&mut self) {
2232        self.context.0.texture_drop(self.id)
2233    }
2234}
2235
2236impl dispatch::BlasInterface for CoreBlas {
2237    fn prepare_compact_async(&self, callback: BlasCompactCallback) {
2238        let callback: Option<wgc::resource::BlasCompactCallback> =
2239            Some(Box::new(|status: BlasPrepareCompactResult| {
2240                let res = status.map_err(|_| crate::BlasAsyncError);
2241                callback(res);
2242            }));
2243
2244        match self.context.0.blas_prepare_compact_async(self.id, callback) {
2245            Ok(_) => (),
2246            Err(cause) => self.context.handle_error_nolabel(
2247                &self.error_sink,
2248                cause,
2249                "Blas::prepare_compact_async",
2250            ),
2251        }
2252    }
2253
2254    fn ready_for_compaction(&self) -> bool {
2255        match self.context.0.ready_for_compaction(self.id) {
2256            Ok(ready) => ready,
2257            Err(cause) => {
2258                self.context.handle_error_nolabel(
2259                    &self.error_sink,
2260                    cause,
2261                    "Blas::ready_for_compaction",
2262                );
2263                // A BLAS is definitely not ready for compaction if it's not valid
2264                false
2265            }
2266        }
2267    }
2268}
2269
2270impl Drop for CoreBlas {
2271    fn drop(&mut self) {
2272        self.context.0.blas_drop(self.id)
2273    }
2274}
2275
2276impl dispatch::TlasInterface for CoreTlas {}
2277
2278impl Drop for CoreTlas {
2279    fn drop(&mut self) {
2280        self.context.0.tlas_drop(self.id)
2281    }
2282}
2283
2284impl dispatch::QuerySetInterface for CoreQuerySet {}
2285
2286impl Drop for CoreQuerySet {
2287    fn drop(&mut self) {
2288        self.context.0.query_set_drop(self.id)
2289    }
2290}
2291
2292impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2293
2294impl Drop for CorePipelineLayout {
2295    fn drop(&mut self) {
2296        self.context.0.pipeline_layout_drop(self.id)
2297    }
2298}
2299
2300impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2301    fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2302        let (id, error) = self
2303            .context
2304            .0
2305            .render_pipeline_get_bind_group_layout(self.id, index, None);
2306        if let Some(err) = error {
2307            self.context.handle_error_nolabel(
2308                &self.error_sink,
2309                err,
2310                "RenderPipeline::get_bind_group_layout",
2311            )
2312        }
2313        CoreBindGroupLayout {
2314            context: self.context.clone(),
2315            id,
2316        }
2317        .into()
2318    }
2319}
2320
2321impl Drop for CoreRenderPipeline {
2322    fn drop(&mut self) {
2323        self.context.0.render_pipeline_drop(self.id)
2324    }
2325}
2326
2327impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2328    fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2329        let (id, error) = self
2330            .context
2331            .0
2332            .compute_pipeline_get_bind_group_layout(self.id, index, None);
2333        if let Some(err) = error {
2334            self.context.handle_error_nolabel(
2335                &self.error_sink,
2336                err,
2337                "ComputePipeline::get_bind_group_layout",
2338            )
2339        }
2340        CoreBindGroupLayout {
2341            context: self.context.clone(),
2342            id,
2343        }
2344        .into()
2345    }
2346}
2347
2348impl Drop for CoreComputePipeline {
2349    fn drop(&mut self) {
2350        self.context.0.compute_pipeline_drop(self.id)
2351    }
2352}
2353
2354impl dispatch::PipelineCacheInterface for CorePipelineCache {
2355    fn get_data(&self) -> Option<Vec<u8>> {
2356        self.context.0.pipeline_cache_get_data(self.id)
2357    }
2358}
2359
2360impl Drop for CorePipelineCache {
2361    fn drop(&mut self) {
2362        self.context.0.pipeline_cache_drop(self.id)
2363    }
2364}
2365
2366impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2367    fn copy_buffer_to_buffer(
2368        &self,
2369        source: &dispatch::DispatchBuffer,
2370        source_offset: crate::BufferAddress,
2371        destination: &dispatch::DispatchBuffer,
2372        destination_offset: crate::BufferAddress,
2373        copy_size: Option<crate::BufferAddress>,
2374    ) {
2375        let source = source.as_core();
2376        let destination = destination.as_core();
2377
2378        if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2379            self.id,
2380            source.id,
2381            source_offset,
2382            destination.id,
2383            destination_offset,
2384            copy_size,
2385        ) {
2386            self.context.handle_error_nolabel(
2387                &self.error_sink,
2388                cause,
2389                "CommandEncoder::copy_buffer_to_buffer",
2390            );
2391        }
2392    }
2393
2394    fn copy_buffer_to_texture(
2395        &self,
2396        source: crate::TexelCopyBufferInfo<'_>,
2397        destination: crate::TexelCopyTextureInfo<'_>,
2398        copy_size: crate::Extent3d,
2399    ) {
2400        if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2401            self.id,
2402            &map_buffer_copy_view(source),
2403            &map_texture_copy_view(destination),
2404            &copy_size,
2405        ) {
2406            self.context.handle_error_nolabel(
2407                &self.error_sink,
2408                cause,
2409                "CommandEncoder::copy_buffer_to_texture",
2410            );
2411        }
2412    }
2413
2414    fn copy_texture_to_buffer(
2415        &self,
2416        source: crate::TexelCopyTextureInfo<'_>,
2417        destination: crate::TexelCopyBufferInfo<'_>,
2418        copy_size: crate::Extent3d,
2419    ) {
2420        if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2421            self.id,
2422            &map_texture_copy_view(source),
2423            &map_buffer_copy_view(destination),
2424            &copy_size,
2425        ) {
2426            self.context.handle_error_nolabel(
2427                &self.error_sink,
2428                cause,
2429                "CommandEncoder::copy_texture_to_buffer",
2430            );
2431        }
2432    }
2433
2434    fn copy_texture_to_texture(
2435        &self,
2436        source: crate::TexelCopyTextureInfo<'_>,
2437        destination: crate::TexelCopyTextureInfo<'_>,
2438        copy_size: crate::Extent3d,
2439    ) {
2440        if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2441            self.id,
2442            &map_texture_copy_view(source),
2443            &map_texture_copy_view(destination),
2444            &copy_size,
2445        ) {
2446            self.context.handle_error_nolabel(
2447                &self.error_sink,
2448                cause,
2449                "CommandEncoder::copy_texture_to_texture",
2450            );
2451        }
2452    }
2453
2454    fn begin_compute_pass(
2455        &self,
2456        desc: &crate::ComputePassDescriptor<'_>,
2457    ) -> dispatch::DispatchComputePass {
2458        let timestamp_writes =
2459            desc.timestamp_writes
2460                .as_ref()
2461                .map(|tw| wgc::command::PassTimestampWrites {
2462                    query_set: tw.query_set.inner.as_core().id,
2463                    beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2464                    end_of_pass_write_index: tw.end_of_pass_write_index,
2465                });
2466
2467        let (pass, err) = self.context.0.command_encoder_begin_compute_pass(
2468            self.id,
2469            &wgc::command::ComputePassDescriptor {
2470                label: desc.label.map(Borrowed),
2471                timestamp_writes,
2472            },
2473        );
2474
2475        if let Some(cause) = err {
2476            self.context.handle_error(
2477                &self.error_sink,
2478                cause,
2479                desc.label,
2480                "CommandEncoder::begin_compute_pass",
2481            );
2482        }
2483
2484        CoreComputePass {
2485            context: self.context.clone(),
2486            pass,
2487            error_sink: self.error_sink.clone(),
2488            id: crate::cmp::Identifier::create(),
2489        }
2490        .into()
2491    }
2492
2493    fn begin_render_pass(
2494        &self,
2495        desc: &crate::RenderPassDescriptor<'_>,
2496    ) -> dispatch::DispatchRenderPass {
2497        let colors = desc
2498            .color_attachments
2499            .iter()
2500            .map(|ca| {
2501                ca.as_ref()
2502                    .map(|at| wgc::command::RenderPassColorAttachment {
2503                        view: at.view.inner.as_core().id,
2504                        depth_slice: at.depth_slice,
2505                        resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2506                        load_op: at.ops.load,
2507                        store_op: at.ops.store,
2508                    })
2509            })
2510            .collect::<Vec<_>>();
2511
2512        let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2513            wgc::command::RenderPassDepthStencilAttachment {
2514                view: dsa.view.inner.as_core().id,
2515                depth: map_pass_channel(dsa.depth_ops.as_ref()),
2516                stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2517            }
2518        });
2519
2520        let timestamp_writes =
2521            desc.timestamp_writes
2522                .as_ref()
2523                .map(|tw| wgc::command::PassTimestampWrites {
2524                    query_set: tw.query_set.inner.as_core().id,
2525                    beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2526                    end_of_pass_write_index: tw.end_of_pass_write_index,
2527                });
2528
2529        let (pass, err) = self.context.0.command_encoder_begin_render_pass(
2530            self.id,
2531            &wgc::command::RenderPassDescriptor {
2532                label: desc.label.map(Borrowed),
2533                timestamp_writes: timestamp_writes.as_ref(),
2534                color_attachments: Borrowed(&colors),
2535                depth_stencil_attachment: depth_stencil.as_ref(),
2536                occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2537            },
2538        );
2539
2540        if let Some(cause) = err {
2541            self.context.handle_error(
2542                &self.error_sink,
2543                cause,
2544                desc.label,
2545                "CommandEncoder::begin_render_pass",
2546            );
2547        }
2548
2549        CoreRenderPass {
2550            context: self.context.clone(),
2551            pass,
2552            error_sink: self.error_sink.clone(),
2553            id: crate::cmp::Identifier::create(),
2554        }
2555        .into()
2556    }
2557
2558    fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2559        let descriptor = wgt::CommandBufferDescriptor::default();
2560        let (id, error) = self
2561            .context
2562            .0
2563            .command_encoder_finish(self.id, &descriptor, None);
2564        if let Some(cause) = error {
2565            self.context
2566                .handle_error_nolabel(&self.error_sink, cause, "a CommandEncoder");
2567        }
2568        CoreCommandBuffer {
2569            context: self.context.clone(),
2570            id,
2571        }
2572        .into()
2573    }
2574
2575    fn clear_texture(
2576        &self,
2577        texture: &dispatch::DispatchTexture,
2578        subresource_range: &crate::ImageSubresourceRange,
2579    ) {
2580        let texture = texture.as_core();
2581
2582        if let Err(cause) =
2583            self.context
2584                .0
2585                .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2586        {
2587            self.context.handle_error_nolabel(
2588                &self.error_sink,
2589                cause,
2590                "CommandEncoder::clear_texture",
2591            );
2592        }
2593    }
2594
2595    fn clear_buffer(
2596        &self,
2597        buffer: &dispatch::DispatchBuffer,
2598        offset: crate::BufferAddress,
2599        size: Option<crate::BufferAddress>,
2600    ) {
2601        let buffer = buffer.as_core();
2602
2603        if let Err(cause) = self
2604            .context
2605            .0
2606            .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2607        {
2608            self.context.handle_error_nolabel(
2609                &self.error_sink,
2610                cause,
2611                "CommandEncoder::fill_buffer",
2612            );
2613        }
2614    }
2615
2616    fn insert_debug_marker(&self, label: &str) {
2617        if let Err(cause) = self
2618            .context
2619            .0
2620            .command_encoder_insert_debug_marker(self.id, label)
2621        {
2622            self.context.handle_error_nolabel(
2623                &self.error_sink,
2624                cause,
2625                "CommandEncoder::insert_debug_marker",
2626            );
2627        }
2628    }
2629
2630    fn push_debug_group(&self, label: &str) {
2631        if let Err(cause) = self
2632            .context
2633            .0
2634            .command_encoder_push_debug_group(self.id, label)
2635        {
2636            self.context.handle_error_nolabel(
2637                &self.error_sink,
2638                cause,
2639                "CommandEncoder::push_debug_group",
2640            );
2641        }
2642    }
2643
2644    fn pop_debug_group(&self) {
2645        if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2646            self.context.handle_error_nolabel(
2647                &self.error_sink,
2648                cause,
2649                "CommandEncoder::pop_debug_group",
2650            );
2651        }
2652    }
2653
2654    fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2655        let query_set = query_set.as_core();
2656
2657        if let Err(cause) =
2658            self.context
2659                .0
2660                .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2661        {
2662            self.context.handle_error_nolabel(
2663                &self.error_sink,
2664                cause,
2665                "CommandEncoder::write_timestamp",
2666            );
2667        }
2668    }
2669
2670    fn resolve_query_set(
2671        &self,
2672        query_set: &dispatch::DispatchQuerySet,
2673        first_query: u32,
2674        query_count: u32,
2675        destination: &dispatch::DispatchBuffer,
2676        destination_offset: crate::BufferAddress,
2677    ) {
2678        let query_set = query_set.as_core();
2679        let destination = destination.as_core();
2680
2681        if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2682            self.id,
2683            query_set.id,
2684            first_query,
2685            query_count,
2686            destination.id,
2687            destination_offset,
2688        ) {
2689            self.context.handle_error_nolabel(
2690                &self.error_sink,
2691                cause,
2692                "CommandEncoder::resolve_query_set",
2693            );
2694        }
2695    }
2696
2697    fn mark_acceleration_structures_built<'a>(
2698        &self,
2699        blas: &mut dyn Iterator<Item = &'a Blas>,
2700        tlas: &mut dyn Iterator<Item = &'a Tlas>,
2701    ) {
2702        let blas = blas
2703            .map(|b| b.inner.as_core().id)
2704            .collect::<SmallVec<[_; 4]>>();
2705        let tlas = tlas
2706            .map(|t| t.inner.as_core().id)
2707            .collect::<SmallVec<[_; 4]>>();
2708        if let Err(cause) = self
2709            .context
2710            .0
2711            .command_encoder_mark_acceleration_structures_built(self.id, &blas, &tlas)
2712        {
2713            self.context.handle_error_nolabel(
2714                &self.error_sink,
2715                cause,
2716                "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2717            );
2718        }
2719    }
2720
2721    fn build_acceleration_structures<'a>(
2722        &self,
2723        blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2724        tlas: &mut dyn Iterator<Item = &'a crate::Tlas>,
2725    ) {
2726        let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2727            let geometries = match e.geometry {
2728                crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2729                    let iter = triangle_geometries.iter().map(|tg| {
2730                        wgc::ray_tracing::BlasTriangleGeometry {
2731                            vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2732                            index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2733                            transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2734                            size: tg.size,
2735                            transform_buffer_offset: tg.transform_buffer_offset,
2736                            first_vertex: tg.first_vertex,
2737                            vertex_stride: tg.vertex_stride,
2738                            first_index: tg.first_index,
2739                        }
2740                    });
2741                    wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2742                }
2743            };
2744            wgc::ray_tracing::BlasBuildEntry {
2745                blas_id: e.blas.inner.as_core().id,
2746                geometries,
2747            }
2748        });
2749
2750        let tlas = tlas.into_iter().map(|e| {
2751            let instances = e
2752                .instances
2753                .iter()
2754                .map(|instance: &Option<crate::TlasInstance>| {
2755                    instance
2756                        .as_ref()
2757                        .map(|instance| wgc::ray_tracing::TlasInstance {
2758                            blas_id: instance.blas.as_core().id,
2759                            transform: &instance.transform,
2760                            custom_data: instance.custom_data,
2761                            mask: instance.mask,
2762                        })
2763                });
2764            wgc::ray_tracing::TlasPackage {
2765                tlas_id: e.inner.as_core().id,
2766                instances: Box::new(instances),
2767                lowest_unmodified: e.lowest_unmodified,
2768            }
2769        });
2770
2771        if let Err(cause) = self
2772            .context
2773            .0
2774            .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2775        {
2776            self.context.handle_error_nolabel(
2777                &self.error_sink,
2778                cause,
2779                "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2780            );
2781        }
2782    }
2783
2784    fn transition_resources<'a>(
2785        &mut self,
2786        buffer_transitions: &mut dyn Iterator<
2787            Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
2788        >,
2789        texture_transitions: &mut dyn Iterator<
2790            Item = wgt::TextureTransition<&'a dispatch::DispatchTexture>,
2791        >,
2792    ) {
2793        let result = self.context.0.command_encoder_transition_resources(
2794            self.id,
2795            buffer_transitions.map(|t| wgt::BufferTransition {
2796                buffer: t.buffer.as_core().id,
2797                state: t.state,
2798            }),
2799            texture_transitions.map(|t| wgt::TextureTransition {
2800                texture: t.texture.as_core().id,
2801                selector: t.selector.clone(),
2802                state: t.state,
2803            }),
2804        );
2805
2806        if let Err(cause) = result {
2807            self.context.handle_error_nolabel(
2808                &self.error_sink,
2809                cause,
2810                "CommandEncoder::transition_resources",
2811            );
2812        }
2813    }
2814}
2815
2816impl Drop for CoreCommandEncoder {
2817    fn drop(&mut self) {
2818        self.context.0.command_encoder_drop(self.id)
2819    }
2820}
2821
2822impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2823
2824impl Drop for CoreCommandBuffer {
2825    fn drop(&mut self) {
2826        self.context.0.command_buffer_drop(self.id)
2827    }
2828}
2829
2830impl dispatch::ComputePassInterface for CoreComputePass {
2831    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2832        let pipeline = pipeline.as_core();
2833
2834        if let Err(cause) = self
2835            .context
2836            .0
2837            .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2838        {
2839            self.context.handle_error(
2840                &self.error_sink,
2841                cause,
2842                self.pass.label(),
2843                "ComputePass::set_pipeline",
2844            );
2845        }
2846    }
2847
2848    fn set_bind_group(
2849        &mut self,
2850        index: u32,
2851        bind_group: Option<&dispatch::DispatchBindGroup>,
2852        offsets: &[crate::DynamicOffset],
2853    ) {
2854        let bg = bind_group.map(|bg| bg.as_core().id);
2855
2856        if let Err(cause) =
2857            self.context
2858                .0
2859                .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2860        {
2861            self.context.handle_error(
2862                &self.error_sink,
2863                cause,
2864                self.pass.label(),
2865                "ComputePass::set_bind_group",
2866            );
2867        }
2868    }
2869
2870    fn set_push_constants(&mut self, offset: u32, data: &[u8]) {
2871        if let Err(cause) =
2872            self.context
2873                .0
2874                .compute_pass_set_push_constants(&mut self.pass, offset, data)
2875        {
2876            self.context.handle_error(
2877                &self.error_sink,
2878                cause,
2879                self.pass.label(),
2880                "ComputePass::set_push_constant",
2881            );
2882        }
2883    }
2884
2885    fn insert_debug_marker(&mut self, label: &str) {
2886        if let Err(cause) =
2887            self.context
2888                .0
2889                .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
2890        {
2891            self.context.handle_error(
2892                &self.error_sink,
2893                cause,
2894                self.pass.label(),
2895                "ComputePass::insert_debug_marker",
2896            );
2897        }
2898    }
2899
2900    fn push_debug_group(&mut self, group_label: &str) {
2901        if let Err(cause) =
2902            self.context
2903                .0
2904                .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
2905        {
2906            self.context.handle_error(
2907                &self.error_sink,
2908                cause,
2909                self.pass.label(),
2910                "ComputePass::push_debug_group",
2911            );
2912        }
2913    }
2914
2915    fn pop_debug_group(&mut self) {
2916        if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
2917            self.context.handle_error(
2918                &self.error_sink,
2919                cause,
2920                self.pass.label(),
2921                "ComputePass::pop_debug_group",
2922            );
2923        }
2924    }
2925
2926    fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2927        let query_set = query_set.as_core();
2928
2929        if let Err(cause) =
2930            self.context
2931                .0
2932                .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
2933        {
2934            self.context.handle_error(
2935                &self.error_sink,
2936                cause,
2937                self.pass.label(),
2938                "ComputePass::write_timestamp",
2939            );
2940        }
2941    }
2942
2943    fn begin_pipeline_statistics_query(
2944        &mut self,
2945        query_set: &dispatch::DispatchQuerySet,
2946        query_index: u32,
2947    ) {
2948        let query_set = query_set.as_core();
2949
2950        if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
2951            &mut self.pass,
2952            query_set.id,
2953            query_index,
2954        ) {
2955            self.context.handle_error(
2956                &self.error_sink,
2957                cause,
2958                self.pass.label(),
2959                "ComputePass::begin_pipeline_statistics_query",
2960            );
2961        }
2962    }
2963
2964    fn end_pipeline_statistics_query(&mut self) {
2965        if let Err(cause) = self
2966            .context
2967            .0
2968            .compute_pass_end_pipeline_statistics_query(&mut self.pass)
2969        {
2970            self.context.handle_error(
2971                &self.error_sink,
2972                cause,
2973                self.pass.label(),
2974                "ComputePass::end_pipeline_statistics_query",
2975            );
2976        }
2977    }
2978
2979    fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
2980        if let Err(cause) = self
2981            .context
2982            .0
2983            .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
2984        {
2985            self.context.handle_error(
2986                &self.error_sink,
2987                cause,
2988                self.pass.label(),
2989                "ComputePass::dispatch_workgroups",
2990            );
2991        }
2992    }
2993
2994    fn dispatch_workgroups_indirect(
2995        &mut self,
2996        indirect_buffer: &dispatch::DispatchBuffer,
2997        indirect_offset: crate::BufferAddress,
2998    ) {
2999        let indirect_buffer = indirect_buffer.as_core();
3000
3001        if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
3002            &mut self.pass,
3003            indirect_buffer.id,
3004            indirect_offset,
3005        ) {
3006            self.context.handle_error(
3007                &self.error_sink,
3008                cause,
3009                self.pass.label(),
3010                "ComputePass::dispatch_workgroups_indirect",
3011            );
3012        }
3013    }
3014
3015    fn end(&mut self) {
3016        if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
3017            self.context.handle_error(
3018                &self.error_sink,
3019                cause,
3020                self.pass.label(),
3021                "ComputePass::end",
3022            );
3023        }
3024    }
3025}
3026
3027impl Drop for CoreComputePass {
3028    fn drop(&mut self) {
3029        dispatch::ComputePassInterface::end(self);
3030    }
3031}
3032
3033impl dispatch::RenderPassInterface for CoreRenderPass {
3034    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3035        let pipeline = pipeline.as_core();
3036
3037        if let Err(cause) = self
3038            .context
3039            .0
3040            .render_pass_set_pipeline(&mut self.pass, pipeline.id)
3041        {
3042            self.context.handle_error(
3043                &self.error_sink,
3044                cause,
3045                self.pass.label(),
3046                "RenderPass::set_pipeline",
3047            );
3048        }
3049    }
3050
3051    fn set_bind_group(
3052        &mut self,
3053        index: u32,
3054        bind_group: Option<&dispatch::DispatchBindGroup>,
3055        offsets: &[crate::DynamicOffset],
3056    ) {
3057        let bg = bind_group.map(|bg| bg.as_core().id);
3058
3059        if let Err(cause) =
3060            self.context
3061                .0
3062                .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
3063        {
3064            self.context.handle_error(
3065                &self.error_sink,
3066                cause,
3067                self.pass.label(),
3068                "RenderPass::set_bind_group",
3069            );
3070        }
3071    }
3072
3073    fn set_index_buffer(
3074        &mut self,
3075        buffer: &dispatch::DispatchBuffer,
3076        index_format: crate::IndexFormat,
3077        offset: crate::BufferAddress,
3078        size: Option<crate::BufferSize>,
3079    ) {
3080        let buffer = buffer.as_core();
3081
3082        if let Err(cause) = self.context.0.render_pass_set_index_buffer(
3083            &mut self.pass,
3084            buffer.id,
3085            index_format,
3086            offset,
3087            size,
3088        ) {
3089            self.context.handle_error(
3090                &self.error_sink,
3091                cause,
3092                self.pass.label(),
3093                "RenderPass::set_index_buffer",
3094            );
3095        }
3096    }
3097
3098    fn set_vertex_buffer(
3099        &mut self,
3100        slot: u32,
3101        buffer: &dispatch::DispatchBuffer,
3102        offset: crate::BufferAddress,
3103        size: Option<crate::BufferSize>,
3104    ) {
3105        let buffer = buffer.as_core();
3106
3107        if let Err(cause) = self.context.0.render_pass_set_vertex_buffer(
3108            &mut self.pass,
3109            slot,
3110            buffer.id,
3111            offset,
3112            size,
3113        ) {
3114            self.context.handle_error(
3115                &self.error_sink,
3116                cause,
3117                self.pass.label(),
3118                "RenderPass::set_vertex_buffer",
3119            );
3120        }
3121    }
3122
3123    fn set_push_constants(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
3124        if let Err(cause) =
3125            self.context
3126                .0
3127                .render_pass_set_push_constants(&mut self.pass, stages, offset, data)
3128        {
3129            self.context.handle_error(
3130                &self.error_sink,
3131                cause,
3132                self.pass.label(),
3133                "RenderPass::set_push_constants",
3134            );
3135        }
3136    }
3137
3138    fn set_blend_constant(&mut self, color: crate::Color) {
3139        if let Err(cause) = self
3140            .context
3141            .0
3142            .render_pass_set_blend_constant(&mut self.pass, color)
3143        {
3144            self.context.handle_error(
3145                &self.error_sink,
3146                cause,
3147                self.pass.label(),
3148                "RenderPass::set_blend_constant",
3149            );
3150        }
3151    }
3152
3153    fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
3154        if let Err(cause) =
3155            self.context
3156                .0
3157                .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
3158        {
3159            self.context.handle_error(
3160                &self.error_sink,
3161                cause,
3162                self.pass.label(),
3163                "RenderPass::set_scissor_rect",
3164            );
3165        }
3166    }
3167
3168    fn set_viewport(
3169        &mut self,
3170        x: f32,
3171        y: f32,
3172        width: f32,
3173        height: f32,
3174        min_depth: f32,
3175        max_depth: f32,
3176    ) {
3177        if let Err(cause) = self.context.0.render_pass_set_viewport(
3178            &mut self.pass,
3179            x,
3180            y,
3181            width,
3182            height,
3183            min_depth,
3184            max_depth,
3185        ) {
3186            self.context.handle_error(
3187                &self.error_sink,
3188                cause,
3189                self.pass.label(),
3190                "RenderPass::set_viewport",
3191            );
3192        }
3193    }
3194
3195    fn set_stencil_reference(&mut self, reference: u32) {
3196        if let Err(cause) = self
3197            .context
3198            .0
3199            .render_pass_set_stencil_reference(&mut self.pass, reference)
3200        {
3201            self.context.handle_error(
3202                &self.error_sink,
3203                cause,
3204                self.pass.label(),
3205                "RenderPass::set_stencil_reference",
3206            );
3207        }
3208    }
3209
3210    fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3211        if let Err(cause) = self.context.0.render_pass_draw(
3212            &mut self.pass,
3213            vertices.end - vertices.start,
3214            instances.end - instances.start,
3215            vertices.start,
3216            instances.start,
3217        ) {
3218            self.context.handle_error(
3219                &self.error_sink,
3220                cause,
3221                self.pass.label(),
3222                "RenderPass::draw",
3223            );
3224        }
3225    }
3226
3227    fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3228        if let Err(cause) = self.context.0.render_pass_draw_indexed(
3229            &mut self.pass,
3230            indices.end - indices.start,
3231            instances.end - instances.start,
3232            indices.start,
3233            base_vertex,
3234            instances.start,
3235        ) {
3236            self.context.handle_error(
3237                &self.error_sink,
3238                cause,
3239                self.pass.label(),
3240                "RenderPass::draw_indexed",
3241            );
3242        }
3243    }
3244
3245    fn draw_mesh_tasks(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
3246        if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks(
3247            &mut self.pass,
3248            group_count_x,
3249            group_count_y,
3250            group_count_z,
3251        ) {
3252            self.context.handle_error(
3253                &self.error_sink,
3254                cause,
3255                self.pass.label(),
3256                "RenderPass::draw_mesh_tasks",
3257            );
3258        }
3259    }
3260
3261    fn draw_indirect(
3262        &mut self,
3263        indirect_buffer: &dispatch::DispatchBuffer,
3264        indirect_offset: crate::BufferAddress,
3265    ) {
3266        let indirect_buffer = indirect_buffer.as_core();
3267
3268        if let Err(cause) = self.context.0.render_pass_draw_indirect(
3269            &mut self.pass,
3270            indirect_buffer.id,
3271            indirect_offset,
3272        ) {
3273            self.context.handle_error(
3274                &self.error_sink,
3275                cause,
3276                self.pass.label(),
3277                "RenderPass::draw_indirect",
3278            );
3279        }
3280    }
3281
3282    fn draw_indexed_indirect(
3283        &mut self,
3284        indirect_buffer: &dispatch::DispatchBuffer,
3285        indirect_offset: crate::BufferAddress,
3286    ) {
3287        let indirect_buffer = indirect_buffer.as_core();
3288
3289        if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3290            &mut self.pass,
3291            indirect_buffer.id,
3292            indirect_offset,
3293        ) {
3294            self.context.handle_error(
3295                &self.error_sink,
3296                cause,
3297                self.pass.label(),
3298                "RenderPass::draw_indexed_indirect",
3299            );
3300        }
3301    }
3302
3303    fn draw_mesh_tasks_indirect(
3304        &mut self,
3305        indirect_buffer: &dispatch::DispatchBuffer,
3306        indirect_offset: crate::BufferAddress,
3307    ) {
3308        let indirect_buffer = indirect_buffer.as_core();
3309
3310        if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks_indirect(
3311            &mut self.pass,
3312            indirect_buffer.id,
3313            indirect_offset,
3314        ) {
3315            self.context.handle_error(
3316                &self.error_sink,
3317                cause,
3318                self.pass.label(),
3319                "RenderPass::draw_mesh_tasks_indirect",
3320            );
3321        }
3322    }
3323
3324    fn multi_draw_indirect(
3325        &mut self,
3326        indirect_buffer: &dispatch::DispatchBuffer,
3327        indirect_offset: crate::BufferAddress,
3328        count: u32,
3329    ) {
3330        let indirect_buffer = indirect_buffer.as_core();
3331
3332        if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3333            &mut self.pass,
3334            indirect_buffer.id,
3335            indirect_offset,
3336            count,
3337        ) {
3338            self.context.handle_error(
3339                &self.error_sink,
3340                cause,
3341                self.pass.label(),
3342                "RenderPass::multi_draw_indirect",
3343            );
3344        }
3345    }
3346
3347    fn multi_draw_indexed_indirect(
3348        &mut self,
3349        indirect_buffer: &dispatch::DispatchBuffer,
3350        indirect_offset: crate::BufferAddress,
3351        count: u32,
3352    ) {
3353        let indirect_buffer = indirect_buffer.as_core();
3354
3355        if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3356            &mut self.pass,
3357            indirect_buffer.id,
3358            indirect_offset,
3359            count,
3360        ) {
3361            self.context.handle_error(
3362                &self.error_sink,
3363                cause,
3364                self.pass.label(),
3365                "RenderPass::multi_draw_indexed_indirect",
3366            );
3367        }
3368    }
3369
3370    fn multi_draw_mesh_tasks_indirect(
3371        &mut self,
3372        indirect_buffer: &dispatch::DispatchBuffer,
3373        indirect_offset: crate::BufferAddress,
3374        count: u32,
3375    ) {
3376        let indirect_buffer = indirect_buffer.as_core();
3377
3378        if let Err(cause) = self.context.0.render_pass_multi_draw_mesh_tasks_indirect(
3379            &mut self.pass,
3380            indirect_buffer.id,
3381            indirect_offset,
3382            count,
3383        ) {
3384            self.context.handle_error(
3385                &self.error_sink,
3386                cause,
3387                self.pass.label(),
3388                "RenderPass::multi_draw_mesh_tasks_indirect",
3389            );
3390        }
3391    }
3392
3393    fn multi_draw_indirect_count(
3394        &mut self,
3395        indirect_buffer: &dispatch::DispatchBuffer,
3396        indirect_offset: crate::BufferAddress,
3397        count_buffer: &dispatch::DispatchBuffer,
3398        count_buffer_offset: crate::BufferAddress,
3399        max_count: u32,
3400    ) {
3401        let indirect_buffer = indirect_buffer.as_core();
3402        let count_buffer = count_buffer.as_core();
3403
3404        if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3405            &mut self.pass,
3406            indirect_buffer.id,
3407            indirect_offset,
3408            count_buffer.id,
3409            count_buffer_offset,
3410            max_count,
3411        ) {
3412            self.context.handle_error(
3413                &self.error_sink,
3414                cause,
3415                self.pass.label(),
3416                "RenderPass::multi_draw_indirect_count",
3417            );
3418        }
3419    }
3420
3421    fn multi_draw_indexed_indirect_count(
3422        &mut self,
3423        indirect_buffer: &dispatch::DispatchBuffer,
3424        indirect_offset: crate::BufferAddress,
3425        count_buffer: &dispatch::DispatchBuffer,
3426        count_buffer_offset: crate::BufferAddress,
3427        max_count: u32,
3428    ) {
3429        let indirect_buffer = indirect_buffer.as_core();
3430        let count_buffer = count_buffer.as_core();
3431
3432        if let Err(cause) = self
3433            .context
3434            .0
3435            .render_pass_multi_draw_indexed_indirect_count(
3436                &mut self.pass,
3437                indirect_buffer.id,
3438                indirect_offset,
3439                count_buffer.id,
3440                count_buffer_offset,
3441                max_count,
3442            )
3443        {
3444            self.context.handle_error(
3445                &self.error_sink,
3446                cause,
3447                self.pass.label(),
3448                "RenderPass::multi_draw_indexed_indirect_count",
3449            );
3450        }
3451    }
3452
3453    fn multi_draw_mesh_tasks_indirect_count(
3454        &mut self,
3455        indirect_buffer: &dispatch::DispatchBuffer,
3456        indirect_offset: crate::BufferAddress,
3457        count_buffer: &dispatch::DispatchBuffer,
3458        count_buffer_offset: crate::BufferAddress,
3459        max_count: u32,
3460    ) {
3461        let indirect_buffer = indirect_buffer.as_core();
3462        let count_buffer = count_buffer.as_core();
3463
3464        if let Err(cause) = self
3465            .context
3466            .0
3467            .render_pass_multi_draw_mesh_tasks_indirect_count(
3468                &mut self.pass,
3469                indirect_buffer.id,
3470                indirect_offset,
3471                count_buffer.id,
3472                count_buffer_offset,
3473                max_count,
3474            )
3475        {
3476            self.context.handle_error(
3477                &self.error_sink,
3478                cause,
3479                self.pass.label(),
3480                "RenderPass::multi_draw_mesh_tasks_indirect_count",
3481            );
3482        }
3483    }
3484
3485    fn insert_debug_marker(&mut self, label: &str) {
3486        if let Err(cause) = self
3487            .context
3488            .0
3489            .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3490        {
3491            self.context.handle_error(
3492                &self.error_sink,
3493                cause,
3494                self.pass.label(),
3495                "RenderPass::insert_debug_marker",
3496            );
3497        }
3498    }
3499
3500    fn push_debug_group(&mut self, group_label: &str) {
3501        if let Err(cause) =
3502            self.context
3503                .0
3504                .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3505        {
3506            self.context.handle_error(
3507                &self.error_sink,
3508                cause,
3509                self.pass.label(),
3510                "RenderPass::push_debug_group",
3511            );
3512        }
3513    }
3514
3515    fn pop_debug_group(&mut self) {
3516        if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3517            self.context.handle_error(
3518                &self.error_sink,
3519                cause,
3520                self.pass.label(),
3521                "RenderPass::pop_debug_group",
3522            );
3523        }
3524    }
3525
3526    fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3527        let query_set = query_set.as_core();
3528
3529        if let Err(cause) =
3530            self.context
3531                .0
3532                .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3533        {
3534            self.context.handle_error(
3535                &self.error_sink,
3536                cause,
3537                self.pass.label(),
3538                "RenderPass::write_timestamp",
3539            );
3540        }
3541    }
3542
3543    fn begin_occlusion_query(&mut self, query_index: u32) {
3544        if let Err(cause) = self
3545            .context
3546            .0
3547            .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3548        {
3549            self.context.handle_error(
3550                &self.error_sink,
3551                cause,
3552                self.pass.label(),
3553                "RenderPass::begin_occlusion_query",
3554            );
3555        }
3556    }
3557
3558    fn end_occlusion_query(&mut self) {
3559        if let Err(cause) = self
3560            .context
3561            .0
3562            .render_pass_end_occlusion_query(&mut self.pass)
3563        {
3564            self.context.handle_error(
3565                &self.error_sink,
3566                cause,
3567                self.pass.label(),
3568                "RenderPass::end_occlusion_query",
3569            );
3570        }
3571    }
3572
3573    fn begin_pipeline_statistics_query(
3574        &mut self,
3575        query_set: &dispatch::DispatchQuerySet,
3576        query_index: u32,
3577    ) {
3578        let query_set = query_set.as_core();
3579
3580        if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3581            &mut self.pass,
3582            query_set.id,
3583            query_index,
3584        ) {
3585            self.context.handle_error(
3586                &self.error_sink,
3587                cause,
3588                self.pass.label(),
3589                "RenderPass::begin_pipeline_statistics_query",
3590            );
3591        }
3592    }
3593
3594    fn end_pipeline_statistics_query(&mut self) {
3595        if let Err(cause) = self
3596            .context
3597            .0
3598            .render_pass_end_pipeline_statistics_query(&mut self.pass)
3599        {
3600            self.context.handle_error(
3601                &self.error_sink,
3602                cause,
3603                self.pass.label(),
3604                "RenderPass::end_pipeline_statistics_query",
3605            );
3606        }
3607    }
3608
3609    fn execute_bundles(
3610        &mut self,
3611        render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3612    ) {
3613        let temp_render_bundles = render_bundles
3614            .map(|rb| rb.as_core().id)
3615            .collect::<SmallVec<[_; 4]>>();
3616        if let Err(cause) = self
3617            .context
3618            .0
3619            .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3620        {
3621            self.context.handle_error(
3622                &self.error_sink,
3623                cause,
3624                self.pass.label(),
3625                "RenderPass::execute_bundles",
3626            );
3627        }
3628    }
3629
3630    fn end(&mut self) {
3631        if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3632            self.context.handle_error(
3633                &self.error_sink,
3634                cause,
3635                self.pass.label(),
3636                "RenderPass::end",
3637            );
3638        }
3639    }
3640}
3641
3642impl Drop for CoreRenderPass {
3643    fn drop(&mut self) {
3644        dispatch::RenderPassInterface::end(self);
3645    }
3646}
3647
3648impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3649    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3650        let pipeline = pipeline.as_core();
3651
3652        wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3653    }
3654
3655    fn set_bind_group(
3656        &mut self,
3657        index: u32,
3658        bind_group: Option<&dispatch::DispatchBindGroup>,
3659        offsets: &[crate::DynamicOffset],
3660    ) {
3661        let bg = bind_group.map(|bg| bg.as_core().id);
3662
3663        unsafe {
3664            wgpu_render_bundle_set_bind_group(
3665                &mut self.encoder,
3666                index,
3667                bg,
3668                offsets.as_ptr(),
3669                offsets.len(),
3670            )
3671        }
3672    }
3673
3674    fn set_index_buffer(
3675        &mut self,
3676        buffer: &dispatch::DispatchBuffer,
3677        index_format: crate::IndexFormat,
3678        offset: crate::BufferAddress,
3679        size: Option<crate::BufferSize>,
3680    ) {
3681        let buffer = buffer.as_core();
3682
3683        self.encoder
3684            .set_index_buffer(buffer.id, index_format, offset, size)
3685    }
3686
3687    fn set_vertex_buffer(
3688        &mut self,
3689        slot: u32,
3690        buffer: &dispatch::DispatchBuffer,
3691        offset: crate::BufferAddress,
3692        size: Option<crate::BufferSize>,
3693    ) {
3694        let buffer = buffer.as_core();
3695
3696        wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer.id, offset, size)
3697    }
3698
3699    fn set_push_constants(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
3700        unsafe {
3701            wgpu_render_bundle_set_push_constants(
3702                &mut self.encoder,
3703                stages,
3704                offset,
3705                data.len().try_into().unwrap(),
3706                data.as_ptr(),
3707            )
3708        }
3709    }
3710
3711    fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3712        wgpu_render_bundle_draw(
3713            &mut self.encoder,
3714            vertices.end - vertices.start,
3715            instances.end - instances.start,
3716            vertices.start,
3717            instances.start,
3718        )
3719    }
3720
3721    fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3722        wgpu_render_bundle_draw_indexed(
3723            &mut self.encoder,
3724            indices.end - indices.start,
3725            instances.end - instances.start,
3726            indices.start,
3727            base_vertex,
3728            instances.start,
3729        )
3730    }
3731
3732    fn draw_indirect(
3733        &mut self,
3734        indirect_buffer: &dispatch::DispatchBuffer,
3735        indirect_offset: crate::BufferAddress,
3736    ) {
3737        let indirect_buffer = indirect_buffer.as_core();
3738
3739        wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3740    }
3741
3742    fn draw_indexed_indirect(
3743        &mut self,
3744        indirect_buffer: &dispatch::DispatchBuffer,
3745        indirect_offset: crate::BufferAddress,
3746    ) {
3747        let indirect_buffer = indirect_buffer.as_core();
3748
3749        wgpu_render_bundle_draw_indexed_indirect(
3750            &mut self.encoder,
3751            indirect_buffer.id,
3752            indirect_offset,
3753        )
3754    }
3755
3756    fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3757    where
3758        Self: Sized,
3759    {
3760        let (id, error) = self.context.0.render_bundle_encoder_finish(
3761            self.encoder,
3762            &desc.map_label(|l| l.map(Borrowed)),
3763            None,
3764        );
3765        if let Some(err) = error {
3766            self.context
3767                .handle_error_fatal(err, "RenderBundleEncoder::finish");
3768        }
3769        CoreRenderBundle { id }.into()
3770    }
3771}
3772
3773impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3774
3775impl dispatch::SurfaceInterface for CoreSurface {
3776    fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3777        let adapter = adapter.as_core();
3778
3779        self.context
3780            .0
3781            .surface_get_capabilities(self.id, adapter.id)
3782            .unwrap_or_default()
3783    }
3784
3785    fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3786        let device = device.as_core();
3787
3788        let error = self.context.0.surface_configure(self.id, device.id, config);
3789        if let Some(e) = error {
3790            self.context
3791                .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3792        } else {
3793            *self.configured_device.lock() = Some(device.id);
3794            *self.error_sink.lock() = Some(device.error_sink.clone());
3795        }
3796    }
3797
3798    fn get_current_texture(
3799        &self,
3800    ) -> (
3801        Option<dispatch::DispatchTexture>,
3802        crate::SurfaceStatus,
3803        dispatch::DispatchSurfaceOutputDetail,
3804    ) {
3805        let output_detail = CoreSurfaceOutputDetail {
3806            context: self.context.clone(),
3807            surface_id: self.id,
3808        }
3809        .into();
3810
3811        match self.context.0.surface_get_current_texture(self.id, None) {
3812            Ok(wgc::present::SurfaceOutput {
3813                status,
3814                texture: texture_id,
3815            }) => {
3816                let data = texture_id
3817                    .map(|id| CoreTexture {
3818                        context: self.context.clone(),
3819                        id,
3820                        error_sink: Arc::new(Mutex::new(ErrorSinkRaw::new())),
3821                    })
3822                    .map(Into::into);
3823
3824                (data, status, output_detail)
3825            }
3826            Err(err) => {
3827                let error_sink = self.error_sink.lock();
3828                match error_sink.as_ref() {
3829                    Some(error_sink) => {
3830                        self.context.handle_error_nolabel(
3831                            error_sink,
3832                            err,
3833                            "Surface::get_current_texture_view",
3834                        );
3835                        (None, crate::SurfaceStatus::Unknown, output_detail)
3836                    }
3837                    None => self
3838                        .context
3839                        .handle_error_fatal(err, "Surface::get_current_texture_view"),
3840                }
3841            }
3842        }
3843    }
3844}
3845
3846impl Drop for CoreSurface {
3847    fn drop(&mut self) {
3848        self.context.0.surface_drop(self.id)
3849    }
3850}
3851
3852impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
3853    fn present(&self) {
3854        match self.context.0.surface_present(self.surface_id) {
3855            Ok(_status) => (),
3856            Err(err) => self.context.handle_error_fatal(err, "Surface::present"),
3857        }
3858    }
3859
3860    fn texture_discard(&self) {
3861        match self.context.0.surface_texture_discard(self.surface_id) {
3862            Ok(_status) => (),
3863            Err(err) => self
3864                .context
3865                .handle_error_fatal(err, "Surface::discard_texture"),
3866        }
3867    }
3868}
3869impl Drop for CoreSurfaceOutputDetail {
3870    fn drop(&mut self) {
3871        // Discard gets called by the api struct
3872
3873        // no-op
3874    }
3875}
3876
3877impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
3878    fn slice(&self) -> &[u8] {
3879        panic!()
3880    }
3881
3882    #[inline]
3883    fn slice_mut(&mut self) -> &mut [u8] {
3884        self.mapping.slice_mut()
3885    }
3886}
3887impl Drop for CoreQueueWriteBuffer {
3888    fn drop(&mut self) {
3889        // The api struct calls queue.write_staging_buffer
3890
3891        // no-op
3892    }
3893}
3894
3895impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
3896    #[inline]
3897    fn slice(&self) -> &[u8] {
3898        unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
3899    }
3900
3901    #[inline]
3902    fn slice_mut(&mut self) -> &mut [u8] {
3903        unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.size) }
3904    }
3905
3906    #[cfg(webgpu)]
3907    fn as_uint8array(&self) -> &js_sys::Uint8Array {
3908        panic!("Only available on WebGPU")
3909    }
3910}