wgpu/backend/
wgpu_core.rs

1use alloc::{
2    borrow::Cow::{self, Borrowed},
3    boxed::Box,
4    format,
5    string::{String, ToString as _},
6    sync::Arc,
7    vec,
8    vec::Vec,
9};
10use core::{
11    error::Error,
12    fmt,
13    future::ready,
14    ops::{Deref, Range},
15    pin::Pin,
16    ptr::NonNull,
17    slice,
18};
19
20use arrayvec::ArrayVec;
21use smallvec::SmallVec;
22use wgc::{
23    command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError,
24    resource::BlasPrepareCompactResult,
25};
26use wgt::{
27    error::{ErrorType, WebGpuError},
28    WasmNotSendSync,
29};
30
31use crate::util::Mutex;
32use crate::{
33    api,
34    dispatch::{self, BlasCompactCallback, BufferMappedRangeInterface},
35    BindingResource, Blas, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
36    CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
37    ShaderSource, SurfaceTargetUnsafe, TextureDescriptor, Tlas,
38};
39
40#[derive(Clone)]
41pub struct ContextWgpuCore(Arc<wgc::global::Global>);
42
43impl Drop for ContextWgpuCore {
44    fn drop(&mut self) {
45        //nothing
46    }
47}
48
49impl fmt::Debug for ContextWgpuCore {
50    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
51        f.debug_struct("ContextWgpuCore")
52            .field("type", &"Native")
53            .finish()
54    }
55}
56
57impl ContextWgpuCore {
58    pub unsafe fn from_hal_instance<A: hal::Api>(hal_instance: A::Instance) -> Self {
59        Self(unsafe {
60            Arc::new(wgc::global::Global::from_hal_instance::<A>(
61                "wgpu",
62                hal_instance,
63            ))
64        })
65    }
66
67    /// # Safety
68    ///
69    /// - The raw instance handle returned must not be manually destroyed.
70    pub unsafe fn instance_as_hal<A: hal::Api>(&self) -> Option<&A::Instance> {
71        unsafe { self.0.instance_as_hal::<A>() }
72    }
73
74    pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
75        Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
76    }
77
78    #[cfg(wgpu_core)]
79    pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
80        self.0.enumerate_adapters(backends)
81    }
82
83    pub unsafe fn create_adapter_from_hal<A: hal::Api>(
84        &self,
85        hal_adapter: hal::ExposedAdapter<A>,
86    ) -> wgc::id::AdapterId {
87        unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
88    }
89
90    pub unsafe fn adapter_as_hal<A: hal::Api>(
91        &self,
92        adapter: &CoreAdapter,
93    ) -> Option<impl Deref<Target = A::Adapter> + WasmNotSendSync> {
94        unsafe { self.0.adapter_as_hal::<A>(adapter.id) }
95    }
96
97    pub unsafe fn buffer_as_hal<A: hal::Api>(
98        &self,
99        buffer: &CoreBuffer,
100    ) -> Option<impl Deref<Target = A::Buffer>> {
101        unsafe { self.0.buffer_as_hal::<A>(buffer.id) }
102    }
103
104    pub unsafe fn create_device_from_hal<A: hal::Api>(
105        &self,
106        adapter: &CoreAdapter,
107        hal_device: hal::OpenDevice<A>,
108        desc: &crate::DeviceDescriptor<'_>,
109    ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
110        if !matches!(desc.trace, wgt::Trace::Off) {
111            log::error!(
112                "
113                Feature 'trace' has been removed temporarily; \
114                see https://github.com/gfx-rs/wgpu/issues/5974. \
115                The `trace` parameter will have no effect."
116            );
117        }
118
119        let (device_id, queue_id) = unsafe {
120            self.0.create_device_from_hal(
121                adapter.id,
122                hal_device.into(),
123                &desc.map_label(|l| l.map(Borrowed)),
124                None,
125                None,
126            )
127        }?;
128        let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
129        let device = CoreDevice {
130            context: self.clone(),
131            id: device_id,
132            error_sink: error_sink.clone(),
133            features: desc.required_features,
134        };
135        let queue = CoreQueue {
136            context: self.clone(),
137            id: queue_id,
138            error_sink,
139        };
140        Ok((device, queue))
141    }
142
143    pub unsafe fn create_texture_from_hal<A: hal::Api>(
144        &self,
145        hal_texture: A::Texture,
146        device: &CoreDevice,
147        desc: &TextureDescriptor<'_>,
148    ) -> CoreTexture {
149        let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
150        let (id, error) = unsafe {
151            self.0
152                .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
153        };
154        if let Some(cause) = error {
155            self.handle_error(
156                &device.error_sink,
157                cause,
158                desc.label,
159                "Device::create_texture_from_hal",
160            );
161        }
162        CoreTexture {
163            context: self.clone(),
164            id,
165            error_sink: Arc::clone(&device.error_sink),
166        }
167    }
168
169    /// # Safety
170    ///
171    /// - `hal_buffer` must be created from `device`.
172    /// - `hal_buffer` must be created respecting `desc`
173    /// - `hal_buffer` must be initialized
174    /// - `hal_buffer` must not have zero size.
175    pub unsafe fn create_buffer_from_hal<A: hal::Api>(
176        &self,
177        hal_buffer: A::Buffer,
178        device: &CoreDevice,
179        desc: &BufferDescriptor<'_>,
180    ) -> CoreBuffer {
181        let (id, error) = unsafe {
182            self.0.create_buffer_from_hal::<A>(
183                hal_buffer,
184                device.id,
185                &desc.map_label(|l| l.map(Borrowed)),
186                None,
187            )
188        };
189        if let Some(cause) = error {
190            self.handle_error(
191                &device.error_sink,
192                cause,
193                desc.label,
194                "Device::create_buffer_from_hal",
195            );
196        }
197        CoreBuffer {
198            context: self.clone(),
199            id,
200            error_sink: Arc::clone(&device.error_sink),
201        }
202    }
203
204    pub unsafe fn device_as_hal<A: hal::Api>(
205        &self,
206        device: &CoreDevice,
207    ) -> Option<impl Deref<Target = A::Device>> {
208        unsafe { self.0.device_as_hal::<A>(device.id) }
209    }
210
211    pub unsafe fn surface_as_hal<A: hal::Api>(
212        &self,
213        surface: &CoreSurface,
214    ) -> Option<impl Deref<Target = A::Surface>> {
215        unsafe { self.0.surface_as_hal::<A>(surface.id) }
216    }
217
218    pub unsafe fn texture_as_hal<A: hal::Api>(
219        &self,
220        texture: &CoreTexture,
221    ) -> Option<impl Deref<Target = A::Texture>> {
222        unsafe { self.0.texture_as_hal::<A>(texture.id) }
223    }
224
225    pub unsafe fn texture_view_as_hal<A: hal::Api>(
226        &self,
227        texture_view: &CoreTextureView,
228    ) -> Option<impl Deref<Target = A::TextureView>> {
229        unsafe { self.0.texture_view_as_hal::<A>(texture_view.id) }
230    }
231
232    /// This method will start the wgpu_core level command recording.
233    pub unsafe fn command_encoder_as_hal_mut<
234        A: hal::Api,
235        F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
236        R,
237    >(
238        &self,
239        command_encoder: &CoreCommandEncoder,
240        hal_command_encoder_callback: F,
241    ) -> R {
242        unsafe {
243            self.0.command_encoder_as_hal_mut::<A, F, R>(
244                command_encoder.id,
245                hal_command_encoder_callback,
246            )
247        }
248    }
249
250    pub unsafe fn blas_as_hal<A: hal::Api>(
251        &self,
252        blas: &CoreBlas,
253    ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
254        unsafe { self.0.blas_as_hal::<A>(blas.id) }
255    }
256
257    pub unsafe fn tlas_as_hal<A: hal::Api>(
258        &self,
259        tlas: &CoreTlas,
260    ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
261        unsafe { self.0.tlas_as_hal::<A>(tlas.id) }
262    }
263
264    pub fn generate_report(&self) -> wgc::global::GlobalReport {
265        self.0.generate_report()
266    }
267
268    #[cold]
269    #[track_caller]
270    #[inline(never)]
271    fn handle_error_inner(
272        &self,
273        sink_mutex: &Mutex<ErrorSinkRaw>,
274        error_type: ErrorType,
275        source: ContextErrorSource,
276        label: Label<'_>,
277        fn_ident: &'static str,
278    ) {
279        let source: ErrorSource = Box::new(wgc::error::ContextError {
280            fn_ident,
281            source,
282            label: label.unwrap_or_default().to_string(),
283        });
284        let final_error_handling = {
285            let mut sink = sink_mutex.lock();
286            let description = || self.format_error(&*source);
287            let error = match error_type {
288                ErrorType::Internal => {
289                    let description = description();
290                    crate::Error::Internal {
291                        source,
292                        description,
293                    }
294                }
295                ErrorType::OutOfMemory => crate::Error::OutOfMemory { source },
296                ErrorType::Validation => {
297                    let description = description();
298                    crate::Error::Validation {
299                        source,
300                        description,
301                    }
302                }
303                ErrorType::DeviceLost => return, // will be surfaced via callback
304            };
305            sink.handle_error_or_return_handler(error)
306        };
307
308        if let Some(f) = final_error_handling {
309            // If the user has provided their own `uncaptured_handler` callback, invoke it now,
310            // having released our lock on `sink_mutex`. See the comments on
311            // `handle_error_or_return_handler` for details.
312            f();
313        }
314    }
315
316    #[inline]
317    #[track_caller]
318    fn handle_error(
319        &self,
320        sink_mutex: &Mutex<ErrorSinkRaw>,
321        source: impl WebGpuError + WasmNotSendSync + 'static,
322        label: Label<'_>,
323        fn_ident: &'static str,
324    ) {
325        let error_type = source.webgpu_error_type();
326        self.handle_error_inner(sink_mutex, error_type, Box::new(source), label, fn_ident)
327    }
328
329    #[inline]
330    #[track_caller]
331    fn handle_error_nolabel(
332        &self,
333        sink_mutex: &Mutex<ErrorSinkRaw>,
334        source: impl WebGpuError + WasmNotSendSync + 'static,
335        fn_ident: &'static str,
336    ) {
337        let error_type = source.webgpu_error_type();
338        self.handle_error_inner(sink_mutex, error_type, Box::new(source), None, fn_ident)
339    }
340
341    #[track_caller]
342    #[cold]
343    fn handle_error_fatal(
344        &self,
345        cause: impl Error + WasmNotSendSync + 'static,
346        operation: &'static str,
347    ) -> ! {
348        panic!("Error in {operation}: {f}", f = self.format_error(&cause));
349    }
350
351    #[inline(never)]
352    fn format_error(&self, err: &(dyn Error + 'static)) -> String {
353        let mut output = String::new();
354        let mut level = 1;
355
356        fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
357            let mut print = |e: &(dyn Error + 'static)| {
358                use core::fmt::Write;
359                writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
360
361                if let Some(e) = e.source() {
362                    *level += 1;
363                    print_tree(output, level, e);
364                    *level -= 1;
365                }
366            };
367            if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
368                for e in multi.errors() {
369                    print(e);
370                }
371            } else {
372                print(e);
373            }
374        }
375
376        print_tree(&mut output, &mut level, err);
377
378        format!("Validation Error\n\nCaused by:\n{output}")
379    }
380
381    pub unsafe fn queue_as_hal<A: hal::Api>(
382        &self,
383        queue: &CoreQueue,
384    ) -> Option<impl Deref<Target = A::Queue> + WasmNotSendSync> {
385        unsafe { self.0.queue_as_hal::<A>(queue.id) }
386    }
387}
388
389fn map_buffer_copy_view(
390    view: crate::TexelCopyBufferInfo<'_>,
391) -> wgt::TexelCopyBufferInfo<wgc::id::BufferId> {
392    wgt::TexelCopyBufferInfo {
393        buffer: view.buffer.inner.as_core().id,
394        layout: view.layout,
395    }
396}
397
398fn map_texture_copy_view(
399    view: crate::TexelCopyTextureInfo<'_>,
400) -> wgc::command::TexelCopyTextureInfo {
401    wgc::command::TexelCopyTextureInfo {
402        texture: view.texture.inner.as_core().id,
403        mip_level: view.mip_level,
404        origin: view.origin,
405        aspect: view.aspect,
406    }
407}
408
409#[cfg_attr(not(webgl), expect(unused))]
410fn map_texture_tagged_copy_view(
411    view: crate::CopyExternalImageDestInfo<&api::Texture>,
412) -> wgt::CopyExternalImageDestInfo<wgc::id::TextureId> {
413    wgt::CopyExternalImageDestInfo {
414        texture: view.texture.inner.as_core().id,
415        mip_level: view.mip_level,
416        origin: view.origin,
417        aspect: view.aspect,
418        color_space: view.color_space,
419        premultiplied_alpha: view.premultiplied_alpha,
420    }
421}
422
423fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
424    match load {
425        LoadOp::Clear(clear_value) => LoadOp::Clear(Some(*clear_value)),
426        LoadOp::Load => LoadOp::Load,
427    }
428}
429
430fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
431    match ops {
432        Some(&Operations { load, store }) => wgc::command::PassChannel {
433            load_op: Some(map_load_op(&load)),
434            store_op: Some(store),
435            read_only: false,
436        },
437        None => wgc::command::PassChannel {
438            load_op: None,
439            store_op: None,
440            read_only: true,
441        },
442    }
443}
444
445#[derive(Debug)]
446pub struct CoreSurface {
447    pub(crate) context: ContextWgpuCore,
448    id: wgc::id::SurfaceId,
449    /// Configured device is needed to know which backend
450    /// code to execute when acquiring a new frame.
451    configured_device: Mutex<Option<wgc::id::DeviceId>>,
452    /// The error sink with which to report errors.
453    /// `None` if the surface has not been configured.
454    error_sink: Mutex<Option<ErrorSink>>,
455}
456
457#[derive(Debug)]
458pub struct CoreAdapter {
459    pub(crate) context: ContextWgpuCore,
460    pub(crate) id: wgc::id::AdapterId,
461}
462
463#[derive(Debug)]
464pub struct CoreDevice {
465    pub(crate) context: ContextWgpuCore,
466    id: wgc::id::DeviceId,
467    error_sink: ErrorSink,
468    features: Features,
469}
470
471#[derive(Debug)]
472pub struct CoreBuffer {
473    pub(crate) context: ContextWgpuCore,
474    id: wgc::id::BufferId,
475    error_sink: ErrorSink,
476}
477
478#[derive(Debug)]
479pub struct CoreShaderModule {
480    pub(crate) context: ContextWgpuCore,
481    id: wgc::id::ShaderModuleId,
482    compilation_info: CompilationInfo,
483}
484
485#[derive(Debug)]
486pub struct CoreBindGroupLayout {
487    pub(crate) context: ContextWgpuCore,
488    id: wgc::id::BindGroupLayoutId,
489}
490
491#[derive(Debug)]
492pub struct CoreBindGroup {
493    pub(crate) context: ContextWgpuCore,
494    id: wgc::id::BindGroupId,
495}
496
497#[derive(Debug)]
498pub struct CoreTexture {
499    pub(crate) context: ContextWgpuCore,
500    id: wgc::id::TextureId,
501    error_sink: ErrorSink,
502}
503
504#[derive(Debug)]
505pub struct CoreTextureView {
506    pub(crate) context: ContextWgpuCore,
507    id: wgc::id::TextureViewId,
508}
509
510#[derive(Debug)]
511pub struct CoreExternalTexture {
512    pub(crate) context: ContextWgpuCore,
513    id: wgc::id::ExternalTextureId,
514}
515
516#[derive(Debug)]
517pub struct CoreSampler {
518    pub(crate) context: ContextWgpuCore,
519    id: wgc::id::SamplerId,
520}
521
522#[derive(Debug)]
523pub struct CoreQuerySet {
524    pub(crate) context: ContextWgpuCore,
525    id: wgc::id::QuerySetId,
526}
527
528#[derive(Debug)]
529pub struct CorePipelineLayout {
530    pub(crate) context: ContextWgpuCore,
531    id: wgc::id::PipelineLayoutId,
532}
533
534#[derive(Debug)]
535pub struct CorePipelineCache {
536    pub(crate) context: ContextWgpuCore,
537    id: wgc::id::PipelineCacheId,
538}
539
540#[derive(Debug)]
541pub struct CoreCommandBuffer {
542    pub(crate) context: ContextWgpuCore,
543    id: wgc::id::CommandBufferId,
544}
545
546#[derive(Debug)]
547pub struct CoreRenderBundleEncoder {
548    pub(crate) context: ContextWgpuCore,
549    encoder: wgc::command::RenderBundleEncoder,
550    id: crate::cmp::Identifier,
551}
552
553#[derive(Debug)]
554pub struct CoreRenderBundle {
555    id: wgc::id::RenderBundleId,
556}
557
558#[derive(Debug)]
559pub struct CoreQueue {
560    pub(crate) context: ContextWgpuCore,
561    id: wgc::id::QueueId,
562    error_sink: ErrorSink,
563}
564
565#[derive(Debug)]
566pub struct CoreComputePipeline {
567    pub(crate) context: ContextWgpuCore,
568    id: wgc::id::ComputePipelineId,
569    error_sink: ErrorSink,
570}
571
572#[derive(Debug)]
573pub struct CoreRenderPipeline {
574    pub(crate) context: ContextWgpuCore,
575    id: wgc::id::RenderPipelineId,
576    error_sink: ErrorSink,
577}
578
579#[derive(Debug)]
580pub struct CoreComputePass {
581    pub(crate) context: ContextWgpuCore,
582    pass: wgc::command::ComputePass,
583    error_sink: ErrorSink,
584    id: crate::cmp::Identifier,
585}
586
587#[derive(Debug)]
588pub struct CoreRenderPass {
589    pub(crate) context: ContextWgpuCore,
590    pass: wgc::command::RenderPass,
591    error_sink: ErrorSink,
592    id: crate::cmp::Identifier,
593}
594
595#[derive(Debug)]
596pub struct CoreCommandEncoder {
597    pub(crate) context: ContextWgpuCore,
598    id: wgc::id::CommandEncoderId,
599    error_sink: ErrorSink,
600}
601
602#[derive(Debug)]
603pub struct CoreBlas {
604    pub(crate) context: ContextWgpuCore,
605    id: wgc::id::BlasId,
606    error_sink: ErrorSink,
607}
608
609#[derive(Debug)]
610pub struct CoreTlas {
611    pub(crate) context: ContextWgpuCore,
612    id: wgc::id::TlasId,
613    // error_sink: ErrorSink,
614}
615
616#[derive(Debug)]
617pub struct CoreSurfaceOutputDetail {
618    context: ContextWgpuCore,
619    surface_id: wgc::id::SurfaceId,
620}
621
622type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
623
624struct ErrorScope {
625    error: Option<crate::Error>,
626    filter: crate::ErrorFilter,
627}
628
629struct ErrorSinkRaw {
630    scopes: Vec<ErrorScope>,
631    uncaptured_handler: Option<Arc<dyn crate::UncapturedErrorHandler>>,
632}
633
634impl ErrorSinkRaw {
635    fn new() -> ErrorSinkRaw {
636        ErrorSinkRaw {
637            scopes: Vec::new(),
638            uncaptured_handler: None,
639        }
640    }
641
642    /// Deliver the error to
643    ///
644    /// * the innermost error scope, if any, or
645    /// * the uncaptured error handler, if there is one, or
646    /// * [`default_error_handler()`].
647    ///
648    /// If a closure is returned, the caller should call it immediately after dropping the
649    /// [`ErrorSink`] mutex guard. This makes sure that the user callback is not called with
650    /// a wgpu mutex held.
651    #[track_caller]
652    #[must_use]
653    fn handle_error_or_return_handler(&mut self, err: crate::Error) -> Option<impl FnOnce()> {
654        let filter = match err {
655            crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
656            crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
657            crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
658        };
659        match self
660            .scopes
661            .iter_mut()
662            .rev()
663            .find(|scope| scope.filter == filter)
664        {
665            Some(scope) => {
666                if scope.error.is_none() {
667                    scope.error = Some(err);
668                }
669                None
670            }
671            None => {
672                if let Some(custom_handler) = &self.uncaptured_handler {
673                    let custom_handler = Arc::clone(custom_handler);
674                    Some(move || (custom_handler)(err))
675                } else {
676                    // direct call preserves #[track_caller] where dyn can't
677                    default_error_handler(err)
678                }
679            }
680        }
681    }
682}
683
684impl fmt::Debug for ErrorSinkRaw {
685    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
686        write!(f, "ErrorSink")
687    }
688}
689
690#[track_caller]
691fn default_error_handler(err: crate::Error) -> ! {
692    log::error!("Handling wgpu errors as fatal by default");
693    panic!("wgpu error: {err}\n");
694}
695
696impl From<CreateShaderModuleError> for CompilationInfo {
697    fn from(value: CreateShaderModuleError) -> Self {
698        match value {
699            #[cfg(feature = "wgsl")]
700            CreateShaderModuleError::Parsing(v) => v.into(),
701            #[cfg(feature = "glsl")]
702            CreateShaderModuleError::ParsingGlsl(v) => v.into(),
703            #[cfg(feature = "spirv")]
704            CreateShaderModuleError::ParsingSpirV(v) => v.into(),
705            CreateShaderModuleError::Validation(v) => v.into(),
706            // Device errors are reported through the error sink, and are not compilation errors.
707            // Same goes for native shader module generation errors.
708            CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
709                CompilationInfo {
710                    messages: Vec::new(),
711                }
712            }
713            // Everything else is an error message without location information.
714            _ => CompilationInfo {
715                messages: vec![CompilationMessage {
716                    message: value.to_string(),
717                    message_type: CompilationMessageType::Error,
718                    location: None,
719                }],
720            },
721        }
722    }
723}
724
725#[derive(Debug)]
726pub struct CoreQueueWriteBuffer {
727    buffer_id: wgc::id::StagingBufferId,
728    mapping: CoreBufferMappedRange,
729}
730
731#[derive(Debug)]
732pub struct CoreBufferMappedRange {
733    ptr: NonNull<u8>,
734    size: usize,
735}
736
737#[cfg(send_sync)]
738unsafe impl Send for CoreBufferMappedRange {}
739#[cfg(send_sync)]
740unsafe impl Sync for CoreBufferMappedRange {}
741
742impl Drop for CoreBufferMappedRange {
743    fn drop(&mut self) {
744        // Intentionally left blank so that `BufferMappedRange` still
745        // implements `Drop`, to match the web backend
746    }
747}
748
749crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
750crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
751crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
752crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
753crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
754crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
755crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
756crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
757crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
758crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
759crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
760crate::cmp::impl_eq_ord_hash_proxy!(CoreExternalTexture => .id);
761crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
762crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
763crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
764crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
765crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
766crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
767crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
768crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
769crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
770crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
771crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
772crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
773crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
774crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
775crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
776crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
777crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
778
779impl dispatch::InstanceInterface for ContextWgpuCore {
780    fn new(desc: &wgt::InstanceDescriptor) -> Self
781    where
782        Self: Sized,
783    {
784        Self(Arc::new(wgc::global::Global::new("wgpu", desc)))
785    }
786
787    unsafe fn create_surface(
788        &self,
789        target: crate::api::SurfaceTargetUnsafe,
790    ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
791        let id = match target {
792            SurfaceTargetUnsafe::RawHandle {
793                raw_display_handle,
794                raw_window_handle,
795            } => unsafe {
796                self.0
797                    .instance_create_surface(raw_display_handle, raw_window_handle, None)
798            },
799
800            #[cfg(all(unix, not(target_vendor = "apple"), not(target_family = "wasm")))]
801            SurfaceTargetUnsafe::Drm {
802                fd,
803                plane,
804                connector_id,
805                width,
806                height,
807                refresh_rate,
808            } => unsafe {
809                self.0.instance_create_surface_from_drm(
810                    fd,
811                    plane,
812                    connector_id,
813                    width,
814                    height,
815                    refresh_rate,
816                    None,
817                )
818            },
819
820            #[cfg(metal)]
821            SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
822                self.0.instance_create_surface_metal(layer, None)
823            },
824
825            #[cfg(dx12)]
826            SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
827                self.0.instance_create_surface_from_visual(visual, None)
828            },
829
830            #[cfg(dx12)]
831            SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
832                self.0
833                    .instance_create_surface_from_surface_handle(surface_handle, None)
834            },
835
836            #[cfg(dx12)]
837            SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
838                self.0
839                    .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
840            },
841        }?;
842
843        Ok(CoreSurface {
844            context: self.clone(),
845            id,
846            configured_device: Mutex::default(),
847            error_sink: Mutex::default(),
848        }
849        .into())
850    }
851
852    fn request_adapter(
853        &self,
854        options: &crate::api::RequestAdapterOptions<'_, '_>,
855    ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
856        let id = self.0.request_adapter(
857            &wgc::instance::RequestAdapterOptions {
858                power_preference: options.power_preference,
859                force_fallback_adapter: options.force_fallback_adapter,
860                compatible_surface: options
861                    .compatible_surface
862                    .map(|surface| surface.inner.as_core().id),
863            },
864            wgt::Backends::all(),
865            None,
866        );
867        let adapter = id.map(|id| {
868            let core = CoreAdapter {
869                context: self.clone(),
870                id,
871            };
872            let generic: dispatch::DispatchAdapter = core.into();
873            generic
874        });
875        Box::pin(ready(adapter))
876    }
877
878    fn poll_all_devices(&self, force_wait: bool) -> bool {
879        match self.0.poll_all_devices(force_wait) {
880            Ok(all_queue_empty) => all_queue_empty,
881            Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
882        }
883    }
884
885    #[cfg(feature = "wgsl")]
886    fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
887        use wgc::naga::front::wgsl::ImplementedLanguageExtension;
888        ImplementedLanguageExtension::all().iter().copied().fold(
889            crate::WgslLanguageFeatures::empty(),
890            |acc, wle| {
891                acc | match wle {
892                    ImplementedLanguageExtension::ReadOnlyAndReadWriteStorageTextures => {
893                        crate::WgslLanguageFeatures::ReadOnlyAndReadWriteStorageTextures
894                    }
895                    ImplementedLanguageExtension::Packed4x8IntegerDotProduct => {
896                        crate::WgslLanguageFeatures::Packed4x8IntegerDotProduct
897                    }
898                    ImplementedLanguageExtension::PointerCompositeAccess => {
899                        crate::WgslLanguageFeatures::PointerCompositeAccess
900                    }
901                }
902            },
903        )
904    }
905}
906
907impl dispatch::AdapterInterface for CoreAdapter {
908    fn request_device(
909        &self,
910        desc: &crate::DeviceDescriptor<'_>,
911    ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
912        if !matches!(desc.trace, wgt::Trace::Off) {
913            log::error!(
914                "
915                Feature 'trace' has been removed temporarily; \
916                see https://github.com/gfx-rs/wgpu/issues/5974. \
917                The `trace` parameter will have no effect."
918            );
919        }
920
921        let res = self.context.0.adapter_request_device(
922            self.id,
923            &desc.map_label(|l| l.map(Borrowed)),
924            None,
925            None,
926        );
927        let (device_id, queue_id) = match res {
928            Ok(ids) => ids,
929            Err(err) => {
930                return Box::pin(ready(Err(err.into())));
931            }
932        };
933        let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
934        let device = CoreDevice {
935            context: self.context.clone(),
936            id: device_id,
937            error_sink: error_sink.clone(),
938            features: desc.required_features,
939        };
940        let queue = CoreQueue {
941            context: self.context.clone(),
942            id: queue_id,
943            error_sink,
944        };
945        Box::pin(ready(Ok((device.into(), queue.into()))))
946    }
947
948    fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
949        let surface = surface.as_core();
950
951        self.context
952            .0
953            .adapter_is_surface_supported(self.id, surface.id)
954    }
955
956    fn features(&self) -> crate::Features {
957        self.context.0.adapter_features(self.id)
958    }
959
960    fn limits(&self) -> crate::Limits {
961        self.context.0.adapter_limits(self.id)
962    }
963
964    fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
965        self.context.0.adapter_downlevel_capabilities(self.id)
966    }
967
968    fn get_info(&self) -> crate::AdapterInfo {
969        self.context.0.adapter_get_info(self.id)
970    }
971
972    fn get_texture_format_features(
973        &self,
974        format: crate::TextureFormat,
975    ) -> crate::TextureFormatFeatures {
976        self.context
977            .0
978            .adapter_get_texture_format_features(self.id, format)
979    }
980
981    fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
982        self.context.0.adapter_get_presentation_timestamp(self.id)
983    }
984}
985
986impl Drop for CoreAdapter {
987    fn drop(&mut self) {
988        self.context.0.adapter_drop(self.id)
989    }
990}
991
992impl dispatch::DeviceInterface for CoreDevice {
993    fn features(&self) -> crate::Features {
994        self.context.0.device_features(self.id)
995    }
996
997    fn limits(&self) -> crate::Limits {
998        self.context.0.device_limits(self.id)
999    }
1000
1001    // If we have no way to create a shader module, we can't return one, and so most of the function is unreachable.
1002    #[cfg_attr(
1003        not(any(
1004            feature = "spirv",
1005            feature = "glsl",
1006            feature = "wgsl",
1007            feature = "naga-ir"
1008        )),
1009        expect(unused)
1010    )]
1011    fn create_shader_module(
1012        &self,
1013        desc: crate::ShaderModuleDescriptor<'_>,
1014        shader_bound_checks: wgt::ShaderRuntimeChecks,
1015    ) -> dispatch::DispatchShaderModule {
1016        let descriptor = wgc::pipeline::ShaderModuleDescriptor {
1017            label: desc.label.map(Borrowed),
1018            runtime_checks: shader_bound_checks,
1019        };
1020        let source = match desc.source {
1021            #[cfg(feature = "spirv")]
1022            ShaderSource::SpirV(ref spv) => {
1023                // Parse the given shader code and store its representation.
1024                let options = naga::front::spv::Options {
1025                    adjust_coordinate_space: false, // we require NDC_Y_UP feature
1026                    strict_capabilities: true,
1027                    block_ctx_dump_prefix: None,
1028                };
1029                wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
1030            }
1031            #[cfg(feature = "glsl")]
1032            ShaderSource::Glsl {
1033                ref shader,
1034                stage,
1035                defines,
1036            } => {
1037                let options = naga::front::glsl::Options {
1038                    stage,
1039                    defines: defines
1040                        .iter()
1041                        .map(|&(key, value)| (String::from(key), String::from(value)))
1042                        .collect(),
1043                };
1044                wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
1045            }
1046            #[cfg(feature = "wgsl")]
1047            ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
1048            #[cfg(feature = "naga-ir")]
1049            ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1050            ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1051        };
1052        let (id, error) =
1053            self.context
1054                .0
1055                .device_create_shader_module(self.id, &descriptor, source, None);
1056        let compilation_info = match error {
1057            Some(cause) => {
1058                self.context.handle_error(
1059                    &self.error_sink,
1060                    cause.clone(),
1061                    desc.label,
1062                    "Device::create_shader_module",
1063                );
1064                CompilationInfo::from(cause)
1065            }
1066            None => CompilationInfo { messages: vec![] },
1067        };
1068
1069        CoreShaderModule {
1070            context: self.context.clone(),
1071            id,
1072            compilation_info,
1073        }
1074        .into()
1075    }
1076
1077    unsafe fn create_shader_module_passthrough(
1078        &self,
1079        desc: &crate::ShaderModuleDescriptorPassthrough<'_>,
1080    ) -> dispatch::DispatchShaderModule {
1081        let desc = desc.map_label(|l| l.map(Cow::from));
1082        let (id, error) = unsafe {
1083            self.context
1084                .0
1085                .device_create_shader_module_passthrough(self.id, &desc, None)
1086        };
1087
1088        let compilation_info = match error {
1089            Some(cause) => {
1090                self.context.handle_error(
1091                    &self.error_sink,
1092                    cause.clone(),
1093                    desc.label.as_deref(),
1094                    "Device::create_shader_module_passthrough",
1095                );
1096                CompilationInfo::from(cause)
1097            }
1098            None => CompilationInfo { messages: vec![] },
1099        };
1100
1101        CoreShaderModule {
1102            context: self.context.clone(),
1103            id,
1104            compilation_info,
1105        }
1106        .into()
1107    }
1108
1109    fn create_bind_group_layout(
1110        &self,
1111        desc: &crate::BindGroupLayoutDescriptor<'_>,
1112    ) -> dispatch::DispatchBindGroupLayout {
1113        let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1114            label: desc.label.map(Borrowed),
1115            entries: Borrowed(desc.entries),
1116        };
1117        let (id, error) =
1118            self.context
1119                .0
1120                .device_create_bind_group_layout(self.id, &descriptor, None);
1121        if let Some(cause) = error {
1122            self.context.handle_error(
1123                &self.error_sink,
1124                cause,
1125                desc.label,
1126                "Device::create_bind_group_layout",
1127            );
1128        }
1129        CoreBindGroupLayout {
1130            context: self.context.clone(),
1131            id,
1132        }
1133        .into()
1134    }
1135
1136    fn create_bind_group(
1137        &self,
1138        desc: &crate::BindGroupDescriptor<'_>,
1139    ) -> dispatch::DispatchBindGroup {
1140        use wgc::binding_model as bm;
1141
1142        let mut arrayed_texture_views = Vec::new();
1143        let mut arrayed_samplers = Vec::new();
1144        if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1145            // gather all the array view IDs first
1146            for entry in desc.entries.iter() {
1147                if let BindingResource::TextureViewArray(array) = entry.resource {
1148                    arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1149                }
1150                if let BindingResource::SamplerArray(array) = entry.resource {
1151                    arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1152                }
1153            }
1154        }
1155        let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1156        let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1157
1158        let mut arrayed_buffer_bindings = Vec::new();
1159        if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1160            // gather all the buffers first
1161            for entry in desc.entries.iter() {
1162                if let BindingResource::BufferArray(array) = entry.resource {
1163                    arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1164                        buffer: binding.buffer.inner.as_core().id,
1165                        offset: binding.offset,
1166                        size: binding.size,
1167                    }));
1168                }
1169            }
1170        }
1171        let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1172
1173        let entries = desc
1174            .entries
1175            .iter()
1176            .map(|entry| bm::BindGroupEntry {
1177                binding: entry.binding,
1178                resource: match entry.resource {
1179                    BindingResource::Buffer(BufferBinding {
1180                        buffer,
1181                        offset,
1182                        size,
1183                    }) => bm::BindingResource::Buffer(bm::BufferBinding {
1184                        buffer: buffer.inner.as_core().id,
1185                        offset,
1186                        size,
1187                    }),
1188                    BindingResource::BufferArray(array) => {
1189                        let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1190                        remaining_arrayed_buffer_bindings =
1191                            &remaining_arrayed_buffer_bindings[array.len()..];
1192                        bm::BindingResource::BufferArray(Borrowed(slice))
1193                    }
1194                    BindingResource::Sampler(sampler) => {
1195                        bm::BindingResource::Sampler(sampler.inner.as_core().id)
1196                    }
1197                    BindingResource::SamplerArray(array) => {
1198                        let slice = &remaining_arrayed_samplers[..array.len()];
1199                        remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1200                        bm::BindingResource::SamplerArray(Borrowed(slice))
1201                    }
1202                    BindingResource::TextureView(texture_view) => {
1203                        bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1204                    }
1205                    BindingResource::TextureViewArray(array) => {
1206                        let slice = &remaining_arrayed_texture_views[..array.len()];
1207                        remaining_arrayed_texture_views =
1208                            &remaining_arrayed_texture_views[array.len()..];
1209                        bm::BindingResource::TextureViewArray(Borrowed(slice))
1210                    }
1211                    BindingResource::AccelerationStructure(acceleration_structure) => {
1212                        bm::BindingResource::AccelerationStructure(
1213                            acceleration_structure.inner.as_core().id,
1214                        )
1215                    }
1216                    BindingResource::ExternalTexture(external_texture) => {
1217                        bm::BindingResource::ExternalTexture(external_texture.inner.as_core().id)
1218                    }
1219                },
1220            })
1221            .collect::<Vec<_>>();
1222        let descriptor = bm::BindGroupDescriptor {
1223            label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1224            layout: desc.layout.inner.as_core().id,
1225            entries: Borrowed(&entries),
1226        };
1227
1228        let (id, error) = self
1229            .context
1230            .0
1231            .device_create_bind_group(self.id, &descriptor, None);
1232        if let Some(cause) = error {
1233            self.context.handle_error(
1234                &self.error_sink,
1235                cause,
1236                desc.label,
1237                "Device::create_bind_group",
1238            );
1239        }
1240        CoreBindGroup {
1241            context: self.context.clone(),
1242            id,
1243        }
1244        .into()
1245    }
1246
1247    fn create_pipeline_layout(
1248        &self,
1249        desc: &crate::PipelineLayoutDescriptor<'_>,
1250    ) -> dispatch::DispatchPipelineLayout {
1251        // Limit is always less or equal to hal::MAX_BIND_GROUPS, so this is always right
1252        // Guards following ArrayVec
1253        assert!(
1254            desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1255            "Bind group layout count {} exceeds device bind group limit {}",
1256            desc.bind_group_layouts.len(),
1257            wgc::MAX_BIND_GROUPS
1258        );
1259
1260        let temp_layouts = desc
1261            .bind_group_layouts
1262            .iter()
1263            .map(|bgl| bgl.inner.as_core().id)
1264            .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1265        let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1266            label: desc.label.map(Borrowed),
1267            bind_group_layouts: Borrowed(&temp_layouts),
1268            push_constant_ranges: Borrowed(desc.push_constant_ranges),
1269        };
1270
1271        let (id, error) = self
1272            .context
1273            .0
1274            .device_create_pipeline_layout(self.id, &descriptor, None);
1275        if let Some(cause) = error {
1276            self.context.handle_error(
1277                &self.error_sink,
1278                cause,
1279                desc.label,
1280                "Device::create_pipeline_layout",
1281            );
1282        }
1283        CorePipelineLayout {
1284            context: self.context.clone(),
1285            id,
1286        }
1287        .into()
1288    }
1289
1290    fn create_render_pipeline(
1291        &self,
1292        desc: &crate::RenderPipelineDescriptor<'_>,
1293    ) -> dispatch::DispatchRenderPipeline {
1294        use wgc::pipeline as pipe;
1295
1296        let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1297            .vertex
1298            .buffers
1299            .iter()
1300            .map(|vbuf| pipe::VertexBufferLayout {
1301                array_stride: vbuf.array_stride,
1302                step_mode: vbuf.step_mode,
1303                attributes: Borrowed(vbuf.attributes),
1304            })
1305            .collect();
1306
1307        let vert_constants = desc
1308            .vertex
1309            .compilation_options
1310            .constants
1311            .iter()
1312            .map(|&(key, value)| (String::from(key), value))
1313            .collect();
1314
1315        let descriptor = pipe::RenderPipelineDescriptor {
1316            label: desc.label.map(Borrowed),
1317            layout: desc.layout.map(|layout| layout.inner.as_core().id),
1318            vertex: pipe::VertexState {
1319                stage: pipe::ProgrammableStageDescriptor {
1320                    module: desc.vertex.module.inner.as_core().id,
1321                    entry_point: desc.vertex.entry_point.map(Borrowed),
1322                    constants: vert_constants,
1323                    zero_initialize_workgroup_memory: desc
1324                        .vertex
1325                        .compilation_options
1326                        .zero_initialize_workgroup_memory,
1327                },
1328                buffers: Borrowed(&vertex_buffers),
1329            },
1330            primitive: desc.primitive,
1331            depth_stencil: desc.depth_stencil.clone(),
1332            multisample: desc.multisample,
1333            fragment: desc.fragment.as_ref().map(|frag| {
1334                let frag_constants = frag
1335                    .compilation_options
1336                    .constants
1337                    .iter()
1338                    .map(|&(key, value)| (String::from(key), value))
1339                    .collect();
1340                pipe::FragmentState {
1341                    stage: pipe::ProgrammableStageDescriptor {
1342                        module: frag.module.inner.as_core().id,
1343                        entry_point: frag.entry_point.map(Borrowed),
1344                        constants: frag_constants,
1345                        zero_initialize_workgroup_memory: frag
1346                            .compilation_options
1347                            .zero_initialize_workgroup_memory,
1348                    },
1349                    targets: Borrowed(frag.targets),
1350                }
1351            }),
1352            multiview: desc.multiview,
1353            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1354        };
1355
1356        let (id, error) = self
1357            .context
1358            .0
1359            .device_create_render_pipeline(self.id, &descriptor, None);
1360        if let Some(cause) = error {
1361            if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1362                log::error!("Shader translation error for stage {stage:?}: {error}");
1363                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1364            }
1365            self.context.handle_error(
1366                &self.error_sink,
1367                cause,
1368                desc.label,
1369                "Device::create_render_pipeline",
1370            );
1371        }
1372        CoreRenderPipeline {
1373            context: self.context.clone(),
1374            id,
1375            error_sink: Arc::clone(&self.error_sink),
1376        }
1377        .into()
1378    }
1379
1380    fn create_mesh_pipeline(
1381        &self,
1382        desc: &crate::MeshPipelineDescriptor<'_>,
1383    ) -> dispatch::DispatchRenderPipeline {
1384        use wgc::pipeline as pipe;
1385
1386        let mesh_constants = desc
1387            .mesh
1388            .compilation_options
1389            .constants
1390            .iter()
1391            .map(|&(key, value)| (String::from(key), value))
1392            .collect();
1393        let descriptor = pipe::MeshPipelineDescriptor {
1394            label: desc.label.map(Borrowed),
1395            task: desc.task.as_ref().map(|task| {
1396                let task_constants = task
1397                    .compilation_options
1398                    .constants
1399                    .iter()
1400                    .map(|&(key, value)| (String::from(key), value))
1401                    .collect();
1402                pipe::TaskState {
1403                    stage: pipe::ProgrammableStageDescriptor {
1404                        module: task.module.inner.as_core().id,
1405                        entry_point: task.entry_point.map(Borrowed),
1406                        constants: task_constants,
1407                        zero_initialize_workgroup_memory: desc
1408                            .mesh
1409                            .compilation_options
1410                            .zero_initialize_workgroup_memory,
1411                    },
1412                }
1413            }),
1414            mesh: pipe::MeshState {
1415                stage: pipe::ProgrammableStageDescriptor {
1416                    module: desc.mesh.module.inner.as_core().id,
1417                    entry_point: desc.mesh.entry_point.map(Borrowed),
1418                    constants: mesh_constants,
1419                    zero_initialize_workgroup_memory: desc
1420                        .mesh
1421                        .compilation_options
1422                        .zero_initialize_workgroup_memory,
1423                },
1424            },
1425            layout: desc.layout.map(|layout| layout.inner.as_core().id),
1426            primitive: desc.primitive,
1427            depth_stencil: desc.depth_stencil.clone(),
1428            multisample: desc.multisample,
1429            fragment: desc.fragment.as_ref().map(|frag| {
1430                let frag_constants = frag
1431                    .compilation_options
1432                    .constants
1433                    .iter()
1434                    .map(|&(key, value)| (String::from(key), value))
1435                    .collect();
1436                pipe::FragmentState {
1437                    stage: pipe::ProgrammableStageDescriptor {
1438                        module: frag.module.inner.as_core().id,
1439                        entry_point: frag.entry_point.map(Borrowed),
1440                        constants: frag_constants,
1441                        zero_initialize_workgroup_memory: frag
1442                            .compilation_options
1443                            .zero_initialize_workgroup_memory,
1444                    },
1445                    targets: Borrowed(frag.targets),
1446                }
1447            }),
1448            multiview: desc.multiview,
1449            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1450        };
1451
1452        let (id, error) = self
1453            .context
1454            .0
1455            .device_create_mesh_pipeline(self.id, &descriptor, None);
1456        if let Some(cause) = error {
1457            if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1458                log::error!("Shader translation error for stage {stage:?}: {error}");
1459                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1460            }
1461            self.context.handle_error(
1462                &self.error_sink,
1463                cause,
1464                desc.label,
1465                "Device::create_render_pipeline",
1466            );
1467        }
1468        CoreRenderPipeline {
1469            context: self.context.clone(),
1470            id,
1471            error_sink: Arc::clone(&self.error_sink),
1472        }
1473        .into()
1474    }
1475
1476    fn create_compute_pipeline(
1477        &self,
1478        desc: &crate::ComputePipelineDescriptor<'_>,
1479    ) -> dispatch::DispatchComputePipeline {
1480        use wgc::pipeline as pipe;
1481
1482        let constants = desc
1483            .compilation_options
1484            .constants
1485            .iter()
1486            .map(|&(key, value)| (String::from(key), value))
1487            .collect();
1488
1489        let descriptor = pipe::ComputePipelineDescriptor {
1490            label: desc.label.map(Borrowed),
1491            layout: desc.layout.map(|pll| pll.inner.as_core().id),
1492            stage: pipe::ProgrammableStageDescriptor {
1493                module: desc.module.inner.as_core().id,
1494                entry_point: desc.entry_point.map(Borrowed),
1495                constants,
1496                zero_initialize_workgroup_memory: desc
1497                    .compilation_options
1498                    .zero_initialize_workgroup_memory,
1499            },
1500            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1501        };
1502
1503        let (id, error) = self
1504            .context
1505            .0
1506            .device_create_compute_pipeline(self.id, &descriptor, None);
1507        if let Some(cause) = error {
1508            if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1509                log::error!(
1510                    "Shader translation error for stage {:?}: {}",
1511                    wgt::ShaderStages::COMPUTE,
1512                    error
1513                );
1514                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1515            }
1516            self.context.handle_error(
1517                &self.error_sink,
1518                cause,
1519                desc.label,
1520                "Device::create_compute_pipeline",
1521            );
1522        }
1523        CoreComputePipeline {
1524            context: self.context.clone(),
1525            id,
1526            error_sink: Arc::clone(&self.error_sink),
1527        }
1528        .into()
1529    }
1530
1531    unsafe fn create_pipeline_cache(
1532        &self,
1533        desc: &crate::PipelineCacheDescriptor<'_>,
1534    ) -> dispatch::DispatchPipelineCache {
1535        use wgc::pipeline as pipe;
1536
1537        let descriptor = pipe::PipelineCacheDescriptor {
1538            label: desc.label.map(Borrowed),
1539            data: desc.data.map(Borrowed),
1540            fallback: desc.fallback,
1541        };
1542        let (id, error) = unsafe {
1543            self.context
1544                .0
1545                .device_create_pipeline_cache(self.id, &descriptor, None)
1546        };
1547        if let Some(cause) = error {
1548            self.context.handle_error(
1549                &self.error_sink,
1550                cause,
1551                desc.label,
1552                "Device::device_create_pipeline_cache_init",
1553            );
1554        }
1555        CorePipelineCache {
1556            context: self.context.clone(),
1557            id,
1558        }
1559        .into()
1560    }
1561
1562    fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1563        let (id, error) = self.context.0.device_create_buffer(
1564            self.id,
1565            &desc.map_label(|l| l.map(Borrowed)),
1566            None,
1567        );
1568        if let Some(cause) = error {
1569            self.context
1570                .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1571        }
1572
1573        CoreBuffer {
1574            context: self.context.clone(),
1575            id,
1576            error_sink: Arc::clone(&self.error_sink),
1577        }
1578        .into()
1579    }
1580
1581    fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1582        let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1583        let (id, error) = self
1584            .context
1585            .0
1586            .device_create_texture(self.id, &wgt_desc, None);
1587        if let Some(cause) = error {
1588            self.context.handle_error(
1589                &self.error_sink,
1590                cause,
1591                desc.label,
1592                "Device::create_texture",
1593            );
1594        }
1595
1596        CoreTexture {
1597            context: self.context.clone(),
1598            id,
1599            error_sink: Arc::clone(&self.error_sink),
1600        }
1601        .into()
1602    }
1603
1604    fn create_external_texture(
1605        &self,
1606        desc: &crate::ExternalTextureDescriptor<'_>,
1607        planes: &[&crate::TextureView],
1608    ) -> dispatch::DispatchExternalTexture {
1609        let wgt_desc = desc.map_label(|l| l.map(Borrowed));
1610        let planes = planes
1611            .iter()
1612            .map(|plane| plane.inner.as_core().id)
1613            .collect::<Vec<_>>();
1614        let (id, error) = self
1615            .context
1616            .0
1617            .device_create_external_texture(self.id, &wgt_desc, &planes, None);
1618        if let Some(cause) = error {
1619            self.context.handle_error(
1620                &self.error_sink,
1621                cause,
1622                desc.label,
1623                "Device::create_external_texture",
1624            );
1625        }
1626
1627        CoreExternalTexture {
1628            context: self.context.clone(),
1629            id,
1630        }
1631        .into()
1632    }
1633
1634    fn create_blas(
1635        &self,
1636        desc: &crate::CreateBlasDescriptor<'_>,
1637        sizes: crate::BlasGeometrySizeDescriptors,
1638    ) -> (Option<u64>, dispatch::DispatchBlas) {
1639        let global = &self.context.0;
1640        let (id, handle, error) =
1641            global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1642        if let Some(cause) = error {
1643            self.context
1644                .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1645        }
1646        (
1647            handle,
1648            CoreBlas {
1649                context: self.context.clone(),
1650                id,
1651                error_sink: Arc::clone(&self.error_sink),
1652            }
1653            .into(),
1654        )
1655    }
1656
1657    fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1658        let global = &self.context.0;
1659        let (id, error) =
1660            global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1661        if let Some(cause) = error {
1662            self.context
1663                .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1664        }
1665        CoreTlas {
1666            context: self.context.clone(),
1667            id,
1668            // error_sink: Arc::clone(&self.error_sink),
1669        }
1670        .into()
1671    }
1672
1673    fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1674        let descriptor = wgc::resource::SamplerDescriptor {
1675            label: desc.label.map(Borrowed),
1676            address_modes: [
1677                desc.address_mode_u,
1678                desc.address_mode_v,
1679                desc.address_mode_w,
1680            ],
1681            mag_filter: desc.mag_filter,
1682            min_filter: desc.min_filter,
1683            mipmap_filter: desc.mipmap_filter,
1684            lod_min_clamp: desc.lod_min_clamp,
1685            lod_max_clamp: desc.lod_max_clamp,
1686            compare: desc.compare,
1687            anisotropy_clamp: desc.anisotropy_clamp,
1688            border_color: desc.border_color,
1689        };
1690
1691        let (id, error) = self
1692            .context
1693            .0
1694            .device_create_sampler(self.id, &descriptor, None);
1695        if let Some(cause) = error {
1696            self.context.handle_error(
1697                &self.error_sink,
1698                cause,
1699                desc.label,
1700                "Device::create_sampler",
1701            );
1702        }
1703        CoreSampler {
1704            context: self.context.clone(),
1705            id,
1706        }
1707        .into()
1708    }
1709
1710    fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1711        let (id, error) = self.context.0.device_create_query_set(
1712            self.id,
1713            &desc.map_label(|l| l.map(Borrowed)),
1714            None,
1715        );
1716        if let Some(cause) = error {
1717            self.context
1718                .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1719        }
1720        CoreQuerySet {
1721            context: self.context.clone(),
1722            id,
1723        }
1724        .into()
1725    }
1726
1727    fn create_command_encoder(
1728        &self,
1729        desc: &crate::CommandEncoderDescriptor<'_>,
1730    ) -> dispatch::DispatchCommandEncoder {
1731        let (id, error) = self.context.0.device_create_command_encoder(
1732            self.id,
1733            &desc.map_label(|l| l.map(Borrowed)),
1734            None,
1735        );
1736        if let Some(cause) = error {
1737            self.context.handle_error(
1738                &self.error_sink,
1739                cause,
1740                desc.label,
1741                "Device::create_command_encoder",
1742            );
1743        }
1744
1745        CoreCommandEncoder {
1746            context: self.context.clone(),
1747            id,
1748            error_sink: Arc::clone(&self.error_sink),
1749        }
1750        .into()
1751    }
1752
1753    fn create_render_bundle_encoder(
1754        &self,
1755        desc: &crate::RenderBundleEncoderDescriptor<'_>,
1756    ) -> dispatch::DispatchRenderBundleEncoder {
1757        let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1758            label: desc.label.map(Borrowed),
1759            color_formats: Borrowed(desc.color_formats),
1760            depth_stencil: desc.depth_stencil,
1761            sample_count: desc.sample_count,
1762            multiview: desc.multiview,
1763        };
1764        let encoder = match wgc::command::RenderBundleEncoder::new(&descriptor, self.id, None) {
1765            Ok(encoder) => encoder,
1766            Err(e) => panic!("Error in Device::create_render_bundle_encoder: {e}"),
1767        };
1768
1769        CoreRenderBundleEncoder {
1770            context: self.context.clone(),
1771            encoder,
1772            id: crate::cmp::Identifier::create(),
1773        }
1774        .into()
1775    }
1776
1777    fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1778        self.context
1779            .0
1780            .device_set_device_lost_closure(self.id, device_lost_callback);
1781    }
1782
1783    fn on_uncaptured_error(&self, handler: Arc<dyn crate::UncapturedErrorHandler>) {
1784        let mut error_sink = self.error_sink.lock();
1785        error_sink.uncaptured_handler = Some(handler);
1786    }
1787
1788    fn push_error_scope(&self, filter: crate::ErrorFilter) {
1789        let mut error_sink = self.error_sink.lock();
1790        error_sink.scopes.push(ErrorScope {
1791            error: None,
1792            filter,
1793        });
1794    }
1795
1796    fn pop_error_scope(&self) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1797        let mut error_sink = self.error_sink.lock();
1798        let scope = error_sink.scopes.pop().unwrap();
1799        Box::pin(ready(scope.error))
1800    }
1801
1802    unsafe fn start_graphics_debugger_capture(&self) {
1803        unsafe {
1804            self.context
1805                .0
1806                .device_start_graphics_debugger_capture(self.id)
1807        };
1808    }
1809
1810    unsafe fn stop_graphics_debugger_capture(&self) {
1811        unsafe {
1812            self.context
1813                .0
1814                .device_stop_graphics_debugger_capture(self.id)
1815        };
1816    }
1817
1818    fn poll(&self, poll_type: wgt::PollType<u64>) -> Result<crate::PollStatus, crate::PollError> {
1819        match self.context.0.device_poll(self.id, poll_type) {
1820            Ok(status) => Ok(status),
1821            Err(err) => {
1822                if let Some(poll_error) = err.to_poll_error() {
1823                    return Err(poll_error);
1824                }
1825
1826                self.context.handle_error_fatal(err, "Device::poll")
1827            }
1828        }
1829    }
1830
1831    fn get_internal_counters(&self) -> crate::InternalCounters {
1832        self.context.0.device_get_internal_counters(self.id)
1833    }
1834
1835    fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1836        self.context.0.device_generate_allocator_report(self.id)
1837    }
1838
1839    fn destroy(&self) {
1840        self.context.0.device_destroy(self.id);
1841    }
1842}
1843
1844impl Drop for CoreDevice {
1845    fn drop(&mut self) {
1846        self.context.0.device_drop(self.id)
1847    }
1848}
1849
1850impl dispatch::QueueInterface for CoreQueue {
1851    fn write_buffer(
1852        &self,
1853        buffer: &dispatch::DispatchBuffer,
1854        offset: crate::BufferAddress,
1855        data: &[u8],
1856    ) {
1857        let buffer = buffer.as_core();
1858
1859        match self
1860            .context
1861            .0
1862            .queue_write_buffer(self.id, buffer.id, offset, data)
1863        {
1864            Ok(()) => (),
1865            Err(err) => {
1866                self.context
1867                    .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1868            }
1869        }
1870    }
1871
1872    fn create_staging_buffer(
1873        &self,
1874        size: crate::BufferSize,
1875    ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1876        match self
1877            .context
1878            .0
1879            .queue_create_staging_buffer(self.id, size, None)
1880        {
1881            Ok((buffer_id, ptr)) => Some(
1882                CoreQueueWriteBuffer {
1883                    buffer_id,
1884                    mapping: CoreBufferMappedRange {
1885                        ptr,
1886                        size: size.get() as usize,
1887                    },
1888                }
1889                .into(),
1890            ),
1891            Err(err) => {
1892                self.context.handle_error_nolabel(
1893                    &self.error_sink,
1894                    err,
1895                    "Queue::write_buffer_with",
1896                );
1897                None
1898            }
1899        }
1900    }
1901
1902    fn validate_write_buffer(
1903        &self,
1904        buffer: &dispatch::DispatchBuffer,
1905        offset: wgt::BufferAddress,
1906        size: wgt::BufferSize,
1907    ) -> Option<()> {
1908        let buffer = buffer.as_core();
1909
1910        match self
1911            .context
1912            .0
1913            .queue_validate_write_buffer(self.id, buffer.id, offset, size)
1914        {
1915            Ok(()) => Some(()),
1916            Err(err) => {
1917                self.context.handle_error_nolabel(
1918                    &self.error_sink,
1919                    err,
1920                    "Queue::write_buffer_with",
1921                );
1922                None
1923            }
1924        }
1925    }
1926
1927    fn write_staging_buffer(
1928        &self,
1929        buffer: &dispatch::DispatchBuffer,
1930        offset: crate::BufferAddress,
1931        staging_buffer: &dispatch::DispatchQueueWriteBuffer,
1932    ) {
1933        let buffer = buffer.as_core();
1934        let staging_buffer = staging_buffer.as_core();
1935
1936        match self.context.0.queue_write_staging_buffer(
1937            self.id,
1938            buffer.id,
1939            offset,
1940            staging_buffer.buffer_id,
1941        ) {
1942            Ok(()) => (),
1943            Err(err) => {
1944                self.context.handle_error_nolabel(
1945                    &self.error_sink,
1946                    err,
1947                    "Queue::write_buffer_with",
1948                );
1949            }
1950        }
1951    }
1952
1953    fn write_texture(
1954        &self,
1955        texture: crate::TexelCopyTextureInfo<'_>,
1956        data: &[u8],
1957        data_layout: crate::TexelCopyBufferLayout,
1958        size: crate::Extent3d,
1959    ) {
1960        match self.context.0.queue_write_texture(
1961            self.id,
1962            &map_texture_copy_view(texture),
1963            data,
1964            &data_layout,
1965            &size,
1966        ) {
1967            Ok(()) => (),
1968            Err(err) => {
1969                self.context
1970                    .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
1971            }
1972        }
1973    }
1974
1975    // This method needs to exist if either webgpu or webgl is enabled,
1976    // but we only actually have an implementation if webgl is enabled.
1977    #[cfg(web)]
1978    #[cfg_attr(not(webgl), expect(unused_variables))]
1979    fn copy_external_image_to_texture(
1980        &self,
1981        source: &crate::CopyExternalImageSourceInfo,
1982        dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
1983        size: crate::Extent3d,
1984    ) {
1985        #[cfg(webgl)]
1986        match self.context.0.queue_copy_external_image_to_texture(
1987            self.id,
1988            source,
1989            map_texture_tagged_copy_view(dest),
1990            size,
1991        ) {
1992            Ok(()) => (),
1993            Err(err) => self.context.handle_error_nolabel(
1994                &self.error_sink,
1995                err,
1996                "Queue::copy_external_image_to_texture",
1997            ),
1998        }
1999    }
2000
2001    fn submit(
2002        &self,
2003        command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
2004    ) -> u64 {
2005        let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
2006        let command_buffer_ids = temp_command_buffers
2007            .iter()
2008            .map(|cmdbuf| cmdbuf.as_core().id)
2009            .collect::<SmallVec<[_; 4]>>();
2010
2011        let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
2012            Ok(index) => index,
2013            Err((index, err)) => {
2014                self.context
2015                    .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
2016                index
2017            }
2018        };
2019
2020        drop(temp_command_buffers);
2021
2022        index
2023    }
2024
2025    fn get_timestamp_period(&self) -> f32 {
2026        self.context.0.queue_get_timestamp_period(self.id)
2027    }
2028
2029    fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
2030        self.context
2031            .0
2032            .queue_on_submitted_work_done(self.id, callback);
2033    }
2034
2035    fn compact_blas(&self, blas: &dispatch::DispatchBlas) -> (Option<u64>, dispatch::DispatchBlas) {
2036        let (id, handle, error) =
2037            self.context
2038                .0
2039                .queue_compact_blas(self.id, blas.as_core().id, None);
2040
2041        if let Some(cause) = error {
2042            self.context
2043                .handle_error_nolabel(&self.error_sink, cause, "Queue::compact_blas");
2044        }
2045        (
2046            handle,
2047            CoreBlas {
2048                context: self.context.clone(),
2049                id,
2050                error_sink: Arc::clone(&self.error_sink),
2051            }
2052            .into(),
2053        )
2054    }
2055}
2056
2057impl Drop for CoreQueue {
2058    fn drop(&mut self) {
2059        self.context.0.queue_drop(self.id)
2060    }
2061}
2062
2063impl dispatch::ShaderModuleInterface for CoreShaderModule {
2064    fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
2065        Box::pin(ready(self.compilation_info.clone()))
2066    }
2067}
2068
2069impl Drop for CoreShaderModule {
2070    fn drop(&mut self) {
2071        self.context.0.shader_module_drop(self.id)
2072    }
2073}
2074
2075impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
2076
2077impl Drop for CoreBindGroupLayout {
2078    fn drop(&mut self) {
2079        self.context.0.bind_group_layout_drop(self.id)
2080    }
2081}
2082
2083impl dispatch::BindGroupInterface for CoreBindGroup {}
2084
2085impl Drop for CoreBindGroup {
2086    fn drop(&mut self) {
2087        self.context.0.bind_group_drop(self.id)
2088    }
2089}
2090
2091impl dispatch::TextureViewInterface for CoreTextureView {}
2092
2093impl Drop for CoreTextureView {
2094    fn drop(&mut self) {
2095        // TODO: We don't use this error at all?
2096        let _ = self.context.0.texture_view_drop(self.id);
2097    }
2098}
2099
2100impl dispatch::ExternalTextureInterface for CoreExternalTexture {
2101    fn destroy(&self) {
2102        self.context.0.external_texture_destroy(self.id);
2103    }
2104}
2105
2106impl Drop for CoreExternalTexture {
2107    fn drop(&mut self) {
2108        self.context.0.external_texture_drop(self.id);
2109    }
2110}
2111
2112impl dispatch::SamplerInterface for CoreSampler {}
2113
2114impl Drop for CoreSampler {
2115    fn drop(&mut self) {
2116        self.context.0.sampler_drop(self.id)
2117    }
2118}
2119
2120impl dispatch::BufferInterface for CoreBuffer {
2121    fn map_async(
2122        &self,
2123        mode: crate::MapMode,
2124        range: Range<crate::BufferAddress>,
2125        callback: dispatch::BufferMapCallback,
2126    ) {
2127        let operation = wgc::resource::BufferMapOperation {
2128            host: match mode {
2129                MapMode::Read => wgc::device::HostMap::Read,
2130                MapMode::Write => wgc::device::HostMap::Write,
2131            },
2132            callback: Some(Box::new(|status| {
2133                let res = status.map_err(|_| crate::BufferAsyncError);
2134                callback(res);
2135            })),
2136        };
2137
2138        match self.context.0.buffer_map_async(
2139            self.id,
2140            range.start,
2141            Some(range.end - range.start),
2142            operation,
2143        ) {
2144            Ok(_) => (),
2145            Err(cause) => {
2146                self.context
2147                    .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
2148            }
2149        }
2150    }
2151
2152    fn get_mapped_range(
2153        &self,
2154        sub_range: Range<crate::BufferAddress>,
2155    ) -> dispatch::DispatchBufferMappedRange {
2156        let size = sub_range.end - sub_range.start;
2157        match self
2158            .context
2159            .0
2160            .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
2161        {
2162            Ok((ptr, size)) => CoreBufferMappedRange {
2163                ptr,
2164                size: size as usize,
2165            }
2166            .into(),
2167            Err(err) => self
2168                .context
2169                .handle_error_fatal(err, "Buffer::get_mapped_range"),
2170        }
2171    }
2172
2173    fn unmap(&self) {
2174        match self.context.0.buffer_unmap(self.id) {
2175            Ok(()) => (),
2176            Err(cause) => {
2177                self.context
2178                    .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
2179            }
2180        }
2181    }
2182
2183    fn destroy(&self) {
2184        self.context.0.buffer_destroy(self.id);
2185    }
2186}
2187
2188impl Drop for CoreBuffer {
2189    fn drop(&mut self) {
2190        self.context.0.buffer_drop(self.id)
2191    }
2192}
2193
2194impl dispatch::TextureInterface for CoreTexture {
2195    fn create_view(
2196        &self,
2197        desc: &crate::TextureViewDescriptor<'_>,
2198    ) -> dispatch::DispatchTextureView {
2199        let descriptor = wgc::resource::TextureViewDescriptor {
2200            label: desc.label.map(Borrowed),
2201            format: desc.format,
2202            dimension: desc.dimension,
2203            usage: desc.usage,
2204            range: wgt::ImageSubresourceRange {
2205                aspect: desc.aspect,
2206                base_mip_level: desc.base_mip_level,
2207                mip_level_count: desc.mip_level_count,
2208                base_array_layer: desc.base_array_layer,
2209                array_layer_count: desc.array_layer_count,
2210            },
2211        };
2212        let (id, error) = self
2213            .context
2214            .0
2215            .texture_create_view(self.id, &descriptor, None);
2216        if let Some(cause) = error {
2217            self.context
2218                .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
2219        }
2220        CoreTextureView {
2221            context: self.context.clone(),
2222            id,
2223        }
2224        .into()
2225    }
2226
2227    fn destroy(&self) {
2228        self.context.0.texture_destroy(self.id);
2229    }
2230}
2231
2232impl Drop for CoreTexture {
2233    fn drop(&mut self) {
2234        self.context.0.texture_drop(self.id)
2235    }
2236}
2237
2238impl dispatch::BlasInterface for CoreBlas {
2239    fn prepare_compact_async(&self, callback: BlasCompactCallback) {
2240        let callback: Option<wgc::resource::BlasCompactCallback> =
2241            Some(Box::new(|status: BlasPrepareCompactResult| {
2242                let res = status.map_err(|_| crate::BlasAsyncError);
2243                callback(res);
2244            }));
2245
2246        match self.context.0.blas_prepare_compact_async(self.id, callback) {
2247            Ok(_) => (),
2248            Err(cause) => self.context.handle_error_nolabel(
2249                &self.error_sink,
2250                cause,
2251                "Blas::prepare_compact_async",
2252            ),
2253        }
2254    }
2255
2256    fn ready_for_compaction(&self) -> bool {
2257        match self.context.0.ready_for_compaction(self.id) {
2258            Ok(ready) => ready,
2259            Err(cause) => {
2260                self.context.handle_error_nolabel(
2261                    &self.error_sink,
2262                    cause,
2263                    "Blas::ready_for_compaction",
2264                );
2265                // A BLAS is definitely not ready for compaction if it's not valid
2266                false
2267            }
2268        }
2269    }
2270}
2271
2272impl Drop for CoreBlas {
2273    fn drop(&mut self) {
2274        self.context.0.blas_drop(self.id)
2275    }
2276}
2277
2278impl dispatch::TlasInterface for CoreTlas {}
2279
2280impl Drop for CoreTlas {
2281    fn drop(&mut self) {
2282        self.context.0.tlas_drop(self.id)
2283    }
2284}
2285
2286impl dispatch::QuerySetInterface for CoreQuerySet {}
2287
2288impl Drop for CoreQuerySet {
2289    fn drop(&mut self) {
2290        self.context.0.query_set_drop(self.id)
2291    }
2292}
2293
2294impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2295
2296impl Drop for CorePipelineLayout {
2297    fn drop(&mut self) {
2298        self.context.0.pipeline_layout_drop(self.id)
2299    }
2300}
2301
2302impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2303    fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2304        let (id, error) = self
2305            .context
2306            .0
2307            .render_pipeline_get_bind_group_layout(self.id, index, None);
2308        if let Some(err) = error {
2309            self.context.handle_error_nolabel(
2310                &self.error_sink,
2311                err,
2312                "RenderPipeline::get_bind_group_layout",
2313            )
2314        }
2315        CoreBindGroupLayout {
2316            context: self.context.clone(),
2317            id,
2318        }
2319        .into()
2320    }
2321}
2322
2323impl Drop for CoreRenderPipeline {
2324    fn drop(&mut self) {
2325        self.context.0.render_pipeline_drop(self.id)
2326    }
2327}
2328
2329impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2330    fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2331        let (id, error) = self
2332            .context
2333            .0
2334            .compute_pipeline_get_bind_group_layout(self.id, index, None);
2335        if let Some(err) = error {
2336            self.context.handle_error_nolabel(
2337                &self.error_sink,
2338                err,
2339                "ComputePipeline::get_bind_group_layout",
2340            )
2341        }
2342        CoreBindGroupLayout {
2343            context: self.context.clone(),
2344            id,
2345        }
2346        .into()
2347    }
2348}
2349
2350impl Drop for CoreComputePipeline {
2351    fn drop(&mut self) {
2352        self.context.0.compute_pipeline_drop(self.id)
2353    }
2354}
2355
2356impl dispatch::PipelineCacheInterface for CorePipelineCache {
2357    fn get_data(&self) -> Option<Vec<u8>> {
2358        self.context.0.pipeline_cache_get_data(self.id)
2359    }
2360}
2361
2362impl Drop for CorePipelineCache {
2363    fn drop(&mut self) {
2364        self.context.0.pipeline_cache_drop(self.id)
2365    }
2366}
2367
2368impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2369    fn copy_buffer_to_buffer(
2370        &self,
2371        source: &dispatch::DispatchBuffer,
2372        source_offset: crate::BufferAddress,
2373        destination: &dispatch::DispatchBuffer,
2374        destination_offset: crate::BufferAddress,
2375        copy_size: Option<crate::BufferAddress>,
2376    ) {
2377        let source = source.as_core();
2378        let destination = destination.as_core();
2379
2380        if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2381            self.id,
2382            source.id,
2383            source_offset,
2384            destination.id,
2385            destination_offset,
2386            copy_size,
2387        ) {
2388            self.context.handle_error_nolabel(
2389                &self.error_sink,
2390                cause,
2391                "CommandEncoder::copy_buffer_to_buffer",
2392            );
2393        }
2394    }
2395
2396    fn copy_buffer_to_texture(
2397        &self,
2398        source: crate::TexelCopyBufferInfo<'_>,
2399        destination: crate::TexelCopyTextureInfo<'_>,
2400        copy_size: crate::Extent3d,
2401    ) {
2402        if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2403            self.id,
2404            &map_buffer_copy_view(source),
2405            &map_texture_copy_view(destination),
2406            &copy_size,
2407        ) {
2408            self.context.handle_error_nolabel(
2409                &self.error_sink,
2410                cause,
2411                "CommandEncoder::copy_buffer_to_texture",
2412            );
2413        }
2414    }
2415
2416    fn copy_texture_to_buffer(
2417        &self,
2418        source: crate::TexelCopyTextureInfo<'_>,
2419        destination: crate::TexelCopyBufferInfo<'_>,
2420        copy_size: crate::Extent3d,
2421    ) {
2422        if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2423            self.id,
2424            &map_texture_copy_view(source),
2425            &map_buffer_copy_view(destination),
2426            &copy_size,
2427        ) {
2428            self.context.handle_error_nolabel(
2429                &self.error_sink,
2430                cause,
2431                "CommandEncoder::copy_texture_to_buffer",
2432            );
2433        }
2434    }
2435
2436    fn copy_texture_to_texture(
2437        &self,
2438        source: crate::TexelCopyTextureInfo<'_>,
2439        destination: crate::TexelCopyTextureInfo<'_>,
2440        copy_size: crate::Extent3d,
2441    ) {
2442        if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2443            self.id,
2444            &map_texture_copy_view(source),
2445            &map_texture_copy_view(destination),
2446            &copy_size,
2447        ) {
2448            self.context.handle_error_nolabel(
2449                &self.error_sink,
2450                cause,
2451                "CommandEncoder::copy_texture_to_texture",
2452            );
2453        }
2454    }
2455
2456    fn begin_compute_pass(
2457        &self,
2458        desc: &crate::ComputePassDescriptor<'_>,
2459    ) -> dispatch::DispatchComputePass {
2460        let timestamp_writes =
2461            desc.timestamp_writes
2462                .as_ref()
2463                .map(|tw| wgc::command::PassTimestampWrites {
2464                    query_set: tw.query_set.inner.as_core().id,
2465                    beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2466                    end_of_pass_write_index: tw.end_of_pass_write_index,
2467                });
2468
2469        let (pass, err) = self.context.0.command_encoder_begin_compute_pass(
2470            self.id,
2471            &wgc::command::ComputePassDescriptor {
2472                label: desc.label.map(Borrowed),
2473                timestamp_writes,
2474            },
2475        );
2476
2477        if let Some(cause) = err {
2478            self.context.handle_error(
2479                &self.error_sink,
2480                cause,
2481                desc.label,
2482                "CommandEncoder::begin_compute_pass",
2483            );
2484        }
2485
2486        CoreComputePass {
2487            context: self.context.clone(),
2488            pass,
2489            error_sink: self.error_sink.clone(),
2490            id: crate::cmp::Identifier::create(),
2491        }
2492        .into()
2493    }
2494
2495    fn begin_render_pass(
2496        &self,
2497        desc: &crate::RenderPassDescriptor<'_>,
2498    ) -> dispatch::DispatchRenderPass {
2499        let colors = desc
2500            .color_attachments
2501            .iter()
2502            .map(|ca| {
2503                ca.as_ref()
2504                    .map(|at| wgc::command::RenderPassColorAttachment {
2505                        view: at.view.inner.as_core().id,
2506                        depth_slice: at.depth_slice,
2507                        resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2508                        load_op: at.ops.load,
2509                        store_op: at.ops.store,
2510                    })
2511            })
2512            .collect::<Vec<_>>();
2513
2514        let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2515            wgc::command::RenderPassDepthStencilAttachment {
2516                view: dsa.view.inner.as_core().id,
2517                depth: map_pass_channel(dsa.depth_ops.as_ref()),
2518                stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2519            }
2520        });
2521
2522        let timestamp_writes =
2523            desc.timestamp_writes
2524                .as_ref()
2525                .map(|tw| wgc::command::PassTimestampWrites {
2526                    query_set: tw.query_set.inner.as_core().id,
2527                    beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2528                    end_of_pass_write_index: tw.end_of_pass_write_index,
2529                });
2530
2531        let (pass, err) = self.context.0.command_encoder_begin_render_pass(
2532            self.id,
2533            &wgc::command::RenderPassDescriptor {
2534                label: desc.label.map(Borrowed),
2535                timestamp_writes: timestamp_writes.as_ref(),
2536                color_attachments: Borrowed(&colors),
2537                depth_stencil_attachment: depth_stencil.as_ref(),
2538                occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2539            },
2540        );
2541
2542        if let Some(cause) = err {
2543            self.context.handle_error(
2544                &self.error_sink,
2545                cause,
2546                desc.label,
2547                "CommandEncoder::begin_render_pass",
2548            );
2549        }
2550
2551        CoreRenderPass {
2552            context: self.context.clone(),
2553            pass,
2554            error_sink: self.error_sink.clone(),
2555            id: crate::cmp::Identifier::create(),
2556        }
2557        .into()
2558    }
2559
2560    fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2561        let descriptor = wgt::CommandBufferDescriptor::default();
2562        let (id, error) = self
2563            .context
2564            .0
2565            .command_encoder_finish(self.id, &descriptor, None);
2566        if let Some(cause) = error {
2567            self.context
2568                .handle_error_nolabel(&self.error_sink, cause, "a CommandEncoder");
2569        }
2570        CoreCommandBuffer {
2571            context: self.context.clone(),
2572            id,
2573        }
2574        .into()
2575    }
2576
2577    fn clear_texture(
2578        &self,
2579        texture: &dispatch::DispatchTexture,
2580        subresource_range: &crate::ImageSubresourceRange,
2581    ) {
2582        let texture = texture.as_core();
2583
2584        if let Err(cause) =
2585            self.context
2586                .0
2587                .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2588        {
2589            self.context.handle_error_nolabel(
2590                &self.error_sink,
2591                cause,
2592                "CommandEncoder::clear_texture",
2593            );
2594        }
2595    }
2596
2597    fn clear_buffer(
2598        &self,
2599        buffer: &dispatch::DispatchBuffer,
2600        offset: crate::BufferAddress,
2601        size: Option<crate::BufferAddress>,
2602    ) {
2603        let buffer = buffer.as_core();
2604
2605        if let Err(cause) = self
2606            .context
2607            .0
2608            .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2609        {
2610            self.context.handle_error_nolabel(
2611                &self.error_sink,
2612                cause,
2613                "CommandEncoder::fill_buffer",
2614            );
2615        }
2616    }
2617
2618    fn insert_debug_marker(&self, label: &str) {
2619        if let Err(cause) = self
2620            .context
2621            .0
2622            .command_encoder_insert_debug_marker(self.id, label)
2623        {
2624            self.context.handle_error_nolabel(
2625                &self.error_sink,
2626                cause,
2627                "CommandEncoder::insert_debug_marker",
2628            );
2629        }
2630    }
2631
2632    fn push_debug_group(&self, label: &str) {
2633        if let Err(cause) = self
2634            .context
2635            .0
2636            .command_encoder_push_debug_group(self.id, label)
2637        {
2638            self.context.handle_error_nolabel(
2639                &self.error_sink,
2640                cause,
2641                "CommandEncoder::push_debug_group",
2642            );
2643        }
2644    }
2645
2646    fn pop_debug_group(&self) {
2647        if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2648            self.context.handle_error_nolabel(
2649                &self.error_sink,
2650                cause,
2651                "CommandEncoder::pop_debug_group",
2652            );
2653        }
2654    }
2655
2656    fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2657        let query_set = query_set.as_core();
2658
2659        if let Err(cause) =
2660            self.context
2661                .0
2662                .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2663        {
2664            self.context.handle_error_nolabel(
2665                &self.error_sink,
2666                cause,
2667                "CommandEncoder::write_timestamp",
2668            );
2669        }
2670    }
2671
2672    fn resolve_query_set(
2673        &self,
2674        query_set: &dispatch::DispatchQuerySet,
2675        first_query: u32,
2676        query_count: u32,
2677        destination: &dispatch::DispatchBuffer,
2678        destination_offset: crate::BufferAddress,
2679    ) {
2680        let query_set = query_set.as_core();
2681        let destination = destination.as_core();
2682
2683        if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2684            self.id,
2685            query_set.id,
2686            first_query,
2687            query_count,
2688            destination.id,
2689            destination_offset,
2690        ) {
2691            self.context.handle_error_nolabel(
2692                &self.error_sink,
2693                cause,
2694                "CommandEncoder::resolve_query_set",
2695            );
2696        }
2697    }
2698
2699    fn mark_acceleration_structures_built<'a>(
2700        &self,
2701        blas: &mut dyn Iterator<Item = &'a Blas>,
2702        tlas: &mut dyn Iterator<Item = &'a Tlas>,
2703    ) {
2704        let blas = blas
2705            .map(|b| b.inner.as_core().id)
2706            .collect::<SmallVec<[_; 4]>>();
2707        let tlas = tlas
2708            .map(|t| t.inner.as_core().id)
2709            .collect::<SmallVec<[_; 4]>>();
2710        if let Err(cause) = self
2711            .context
2712            .0
2713            .command_encoder_mark_acceleration_structures_built(self.id, &blas, &tlas)
2714        {
2715            self.context.handle_error_nolabel(
2716                &self.error_sink,
2717                cause,
2718                "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2719            );
2720        }
2721    }
2722
2723    fn build_acceleration_structures<'a>(
2724        &self,
2725        blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2726        tlas: &mut dyn Iterator<Item = &'a crate::Tlas>,
2727    ) {
2728        let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2729            let geometries = match e.geometry {
2730                crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2731                    let iter = triangle_geometries.iter().map(|tg| {
2732                        wgc::ray_tracing::BlasTriangleGeometry {
2733                            vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2734                            index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2735                            transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2736                            size: tg.size,
2737                            transform_buffer_offset: tg.transform_buffer_offset,
2738                            first_vertex: tg.first_vertex,
2739                            vertex_stride: tg.vertex_stride,
2740                            first_index: tg.first_index,
2741                        }
2742                    });
2743                    wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2744                }
2745            };
2746            wgc::ray_tracing::BlasBuildEntry {
2747                blas_id: e.blas.inner.as_core().id,
2748                geometries,
2749            }
2750        });
2751
2752        let tlas = tlas.into_iter().map(|e| {
2753            let instances = e
2754                .instances
2755                .iter()
2756                .map(|instance: &Option<crate::TlasInstance>| {
2757                    instance
2758                        .as_ref()
2759                        .map(|instance| wgc::ray_tracing::TlasInstance {
2760                            blas_id: instance.blas.as_core().id,
2761                            transform: &instance.transform,
2762                            custom_data: instance.custom_data,
2763                            mask: instance.mask,
2764                        })
2765                });
2766            wgc::ray_tracing::TlasPackage {
2767                tlas_id: e.inner.as_core().id,
2768                instances: Box::new(instances),
2769                lowest_unmodified: e.lowest_unmodified,
2770            }
2771        });
2772
2773        if let Err(cause) = self
2774            .context
2775            .0
2776            .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2777        {
2778            self.context.handle_error_nolabel(
2779                &self.error_sink,
2780                cause,
2781                "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2782            );
2783        }
2784    }
2785
2786    fn transition_resources<'a>(
2787        &mut self,
2788        buffer_transitions: &mut dyn Iterator<
2789            Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
2790        >,
2791        texture_transitions: &mut dyn Iterator<
2792            Item = wgt::TextureTransition<&'a dispatch::DispatchTexture>,
2793        >,
2794    ) {
2795        let result = self.context.0.command_encoder_transition_resources(
2796            self.id,
2797            buffer_transitions.map(|t| wgt::BufferTransition {
2798                buffer: t.buffer.as_core().id,
2799                state: t.state,
2800            }),
2801            texture_transitions.map(|t| wgt::TextureTransition {
2802                texture: t.texture.as_core().id,
2803                selector: t.selector.clone(),
2804                state: t.state,
2805            }),
2806        );
2807
2808        if let Err(cause) = result {
2809            self.context.handle_error_nolabel(
2810                &self.error_sink,
2811                cause,
2812                "CommandEncoder::transition_resources",
2813            );
2814        }
2815    }
2816}
2817
2818impl Drop for CoreCommandEncoder {
2819    fn drop(&mut self) {
2820        self.context.0.command_encoder_drop(self.id)
2821    }
2822}
2823
2824impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2825
2826impl Drop for CoreCommandBuffer {
2827    fn drop(&mut self) {
2828        self.context.0.command_buffer_drop(self.id)
2829    }
2830}
2831
2832impl dispatch::ComputePassInterface for CoreComputePass {
2833    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2834        let pipeline = pipeline.as_core();
2835
2836        if let Err(cause) = self
2837            .context
2838            .0
2839            .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2840        {
2841            self.context.handle_error(
2842                &self.error_sink,
2843                cause,
2844                self.pass.label(),
2845                "ComputePass::set_pipeline",
2846            );
2847        }
2848    }
2849
2850    fn set_bind_group(
2851        &mut self,
2852        index: u32,
2853        bind_group: Option<&dispatch::DispatchBindGroup>,
2854        offsets: &[crate::DynamicOffset],
2855    ) {
2856        let bg = bind_group.map(|bg| bg.as_core().id);
2857
2858        if let Err(cause) =
2859            self.context
2860                .0
2861                .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2862        {
2863            self.context.handle_error(
2864                &self.error_sink,
2865                cause,
2866                self.pass.label(),
2867                "ComputePass::set_bind_group",
2868            );
2869        }
2870    }
2871
2872    fn set_push_constants(&mut self, offset: u32, data: &[u8]) {
2873        if let Err(cause) =
2874            self.context
2875                .0
2876                .compute_pass_set_push_constants(&mut self.pass, offset, data)
2877        {
2878            self.context.handle_error(
2879                &self.error_sink,
2880                cause,
2881                self.pass.label(),
2882                "ComputePass::set_push_constant",
2883            );
2884        }
2885    }
2886
2887    fn insert_debug_marker(&mut self, label: &str) {
2888        if let Err(cause) =
2889            self.context
2890                .0
2891                .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
2892        {
2893            self.context.handle_error(
2894                &self.error_sink,
2895                cause,
2896                self.pass.label(),
2897                "ComputePass::insert_debug_marker",
2898            );
2899        }
2900    }
2901
2902    fn push_debug_group(&mut self, group_label: &str) {
2903        if let Err(cause) =
2904            self.context
2905                .0
2906                .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
2907        {
2908            self.context.handle_error(
2909                &self.error_sink,
2910                cause,
2911                self.pass.label(),
2912                "ComputePass::push_debug_group",
2913            );
2914        }
2915    }
2916
2917    fn pop_debug_group(&mut self) {
2918        if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
2919            self.context.handle_error(
2920                &self.error_sink,
2921                cause,
2922                self.pass.label(),
2923                "ComputePass::pop_debug_group",
2924            );
2925        }
2926    }
2927
2928    fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2929        let query_set = query_set.as_core();
2930
2931        if let Err(cause) =
2932            self.context
2933                .0
2934                .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
2935        {
2936            self.context.handle_error(
2937                &self.error_sink,
2938                cause,
2939                self.pass.label(),
2940                "ComputePass::write_timestamp",
2941            );
2942        }
2943    }
2944
2945    fn begin_pipeline_statistics_query(
2946        &mut self,
2947        query_set: &dispatch::DispatchQuerySet,
2948        query_index: u32,
2949    ) {
2950        let query_set = query_set.as_core();
2951
2952        if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
2953            &mut self.pass,
2954            query_set.id,
2955            query_index,
2956        ) {
2957            self.context.handle_error(
2958                &self.error_sink,
2959                cause,
2960                self.pass.label(),
2961                "ComputePass::begin_pipeline_statistics_query",
2962            );
2963        }
2964    }
2965
2966    fn end_pipeline_statistics_query(&mut self) {
2967        if let Err(cause) = self
2968            .context
2969            .0
2970            .compute_pass_end_pipeline_statistics_query(&mut self.pass)
2971        {
2972            self.context.handle_error(
2973                &self.error_sink,
2974                cause,
2975                self.pass.label(),
2976                "ComputePass::end_pipeline_statistics_query",
2977            );
2978        }
2979    }
2980
2981    fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
2982        if let Err(cause) = self
2983            .context
2984            .0
2985            .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
2986        {
2987            self.context.handle_error(
2988                &self.error_sink,
2989                cause,
2990                self.pass.label(),
2991                "ComputePass::dispatch_workgroups",
2992            );
2993        }
2994    }
2995
2996    fn dispatch_workgroups_indirect(
2997        &mut self,
2998        indirect_buffer: &dispatch::DispatchBuffer,
2999        indirect_offset: crate::BufferAddress,
3000    ) {
3001        let indirect_buffer = indirect_buffer.as_core();
3002
3003        if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
3004            &mut self.pass,
3005            indirect_buffer.id,
3006            indirect_offset,
3007        ) {
3008            self.context.handle_error(
3009                &self.error_sink,
3010                cause,
3011                self.pass.label(),
3012                "ComputePass::dispatch_workgroups_indirect",
3013            );
3014        }
3015    }
3016
3017    fn end(&mut self) {
3018        if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
3019            self.context.handle_error(
3020                &self.error_sink,
3021                cause,
3022                self.pass.label(),
3023                "ComputePass::end",
3024            );
3025        }
3026    }
3027}
3028
3029impl Drop for CoreComputePass {
3030    fn drop(&mut self) {
3031        dispatch::ComputePassInterface::end(self);
3032    }
3033}
3034
3035impl dispatch::RenderPassInterface for CoreRenderPass {
3036    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3037        let pipeline = pipeline.as_core();
3038
3039        if let Err(cause) = self
3040            .context
3041            .0
3042            .render_pass_set_pipeline(&mut self.pass, pipeline.id)
3043        {
3044            self.context.handle_error(
3045                &self.error_sink,
3046                cause,
3047                self.pass.label(),
3048                "RenderPass::set_pipeline",
3049            );
3050        }
3051    }
3052
3053    fn set_bind_group(
3054        &mut self,
3055        index: u32,
3056        bind_group: Option<&dispatch::DispatchBindGroup>,
3057        offsets: &[crate::DynamicOffset],
3058    ) {
3059        let bg = bind_group.map(|bg| bg.as_core().id);
3060
3061        if let Err(cause) =
3062            self.context
3063                .0
3064                .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
3065        {
3066            self.context.handle_error(
3067                &self.error_sink,
3068                cause,
3069                self.pass.label(),
3070                "RenderPass::set_bind_group",
3071            );
3072        }
3073    }
3074
3075    fn set_index_buffer(
3076        &mut self,
3077        buffer: &dispatch::DispatchBuffer,
3078        index_format: crate::IndexFormat,
3079        offset: crate::BufferAddress,
3080        size: Option<crate::BufferSize>,
3081    ) {
3082        let buffer = buffer.as_core();
3083
3084        if let Err(cause) = self.context.0.render_pass_set_index_buffer(
3085            &mut self.pass,
3086            buffer.id,
3087            index_format,
3088            offset,
3089            size,
3090        ) {
3091            self.context.handle_error(
3092                &self.error_sink,
3093                cause,
3094                self.pass.label(),
3095                "RenderPass::set_index_buffer",
3096            );
3097        }
3098    }
3099
3100    fn set_vertex_buffer(
3101        &mut self,
3102        slot: u32,
3103        buffer: &dispatch::DispatchBuffer,
3104        offset: crate::BufferAddress,
3105        size: Option<crate::BufferSize>,
3106    ) {
3107        let buffer = buffer.as_core();
3108
3109        if let Err(cause) = self.context.0.render_pass_set_vertex_buffer(
3110            &mut self.pass,
3111            slot,
3112            buffer.id,
3113            offset,
3114            size,
3115        ) {
3116            self.context.handle_error(
3117                &self.error_sink,
3118                cause,
3119                self.pass.label(),
3120                "RenderPass::set_vertex_buffer",
3121            );
3122        }
3123    }
3124
3125    fn set_push_constants(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
3126        if let Err(cause) =
3127            self.context
3128                .0
3129                .render_pass_set_push_constants(&mut self.pass, stages, offset, data)
3130        {
3131            self.context.handle_error(
3132                &self.error_sink,
3133                cause,
3134                self.pass.label(),
3135                "RenderPass::set_push_constants",
3136            );
3137        }
3138    }
3139
3140    fn set_blend_constant(&mut self, color: crate::Color) {
3141        if let Err(cause) = self
3142            .context
3143            .0
3144            .render_pass_set_blend_constant(&mut self.pass, color)
3145        {
3146            self.context.handle_error(
3147                &self.error_sink,
3148                cause,
3149                self.pass.label(),
3150                "RenderPass::set_blend_constant",
3151            );
3152        }
3153    }
3154
3155    fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
3156        if let Err(cause) =
3157            self.context
3158                .0
3159                .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
3160        {
3161            self.context.handle_error(
3162                &self.error_sink,
3163                cause,
3164                self.pass.label(),
3165                "RenderPass::set_scissor_rect",
3166            );
3167        }
3168    }
3169
3170    fn set_viewport(
3171        &mut self,
3172        x: f32,
3173        y: f32,
3174        width: f32,
3175        height: f32,
3176        min_depth: f32,
3177        max_depth: f32,
3178    ) {
3179        if let Err(cause) = self.context.0.render_pass_set_viewport(
3180            &mut self.pass,
3181            x,
3182            y,
3183            width,
3184            height,
3185            min_depth,
3186            max_depth,
3187        ) {
3188            self.context.handle_error(
3189                &self.error_sink,
3190                cause,
3191                self.pass.label(),
3192                "RenderPass::set_viewport",
3193            );
3194        }
3195    }
3196
3197    fn set_stencil_reference(&mut self, reference: u32) {
3198        if let Err(cause) = self
3199            .context
3200            .0
3201            .render_pass_set_stencil_reference(&mut self.pass, reference)
3202        {
3203            self.context.handle_error(
3204                &self.error_sink,
3205                cause,
3206                self.pass.label(),
3207                "RenderPass::set_stencil_reference",
3208            );
3209        }
3210    }
3211
3212    fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3213        if let Err(cause) = self.context.0.render_pass_draw(
3214            &mut self.pass,
3215            vertices.end - vertices.start,
3216            instances.end - instances.start,
3217            vertices.start,
3218            instances.start,
3219        ) {
3220            self.context.handle_error(
3221                &self.error_sink,
3222                cause,
3223                self.pass.label(),
3224                "RenderPass::draw",
3225            );
3226        }
3227    }
3228
3229    fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3230        if let Err(cause) = self.context.0.render_pass_draw_indexed(
3231            &mut self.pass,
3232            indices.end - indices.start,
3233            instances.end - instances.start,
3234            indices.start,
3235            base_vertex,
3236            instances.start,
3237        ) {
3238            self.context.handle_error(
3239                &self.error_sink,
3240                cause,
3241                self.pass.label(),
3242                "RenderPass::draw_indexed",
3243            );
3244        }
3245    }
3246
3247    fn draw_mesh_tasks(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
3248        if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks(
3249            &mut self.pass,
3250            group_count_x,
3251            group_count_y,
3252            group_count_z,
3253        ) {
3254            self.context.handle_error(
3255                &self.error_sink,
3256                cause,
3257                self.pass.label(),
3258                "RenderPass::draw_mesh_tasks",
3259            );
3260        }
3261    }
3262
3263    fn draw_indirect(
3264        &mut self,
3265        indirect_buffer: &dispatch::DispatchBuffer,
3266        indirect_offset: crate::BufferAddress,
3267    ) {
3268        let indirect_buffer = indirect_buffer.as_core();
3269
3270        if let Err(cause) = self.context.0.render_pass_draw_indirect(
3271            &mut self.pass,
3272            indirect_buffer.id,
3273            indirect_offset,
3274        ) {
3275            self.context.handle_error(
3276                &self.error_sink,
3277                cause,
3278                self.pass.label(),
3279                "RenderPass::draw_indirect",
3280            );
3281        }
3282    }
3283
3284    fn draw_indexed_indirect(
3285        &mut self,
3286        indirect_buffer: &dispatch::DispatchBuffer,
3287        indirect_offset: crate::BufferAddress,
3288    ) {
3289        let indirect_buffer = indirect_buffer.as_core();
3290
3291        if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3292            &mut self.pass,
3293            indirect_buffer.id,
3294            indirect_offset,
3295        ) {
3296            self.context.handle_error(
3297                &self.error_sink,
3298                cause,
3299                self.pass.label(),
3300                "RenderPass::draw_indexed_indirect",
3301            );
3302        }
3303    }
3304
3305    fn draw_mesh_tasks_indirect(
3306        &mut self,
3307        indirect_buffer: &dispatch::DispatchBuffer,
3308        indirect_offset: crate::BufferAddress,
3309    ) {
3310        let indirect_buffer = indirect_buffer.as_core();
3311
3312        if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks_indirect(
3313            &mut self.pass,
3314            indirect_buffer.id,
3315            indirect_offset,
3316        ) {
3317            self.context.handle_error(
3318                &self.error_sink,
3319                cause,
3320                self.pass.label(),
3321                "RenderPass::draw_mesh_tasks_indirect",
3322            );
3323        }
3324    }
3325
3326    fn multi_draw_indirect(
3327        &mut self,
3328        indirect_buffer: &dispatch::DispatchBuffer,
3329        indirect_offset: crate::BufferAddress,
3330        count: u32,
3331    ) {
3332        let indirect_buffer = indirect_buffer.as_core();
3333
3334        if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3335            &mut self.pass,
3336            indirect_buffer.id,
3337            indirect_offset,
3338            count,
3339        ) {
3340            self.context.handle_error(
3341                &self.error_sink,
3342                cause,
3343                self.pass.label(),
3344                "RenderPass::multi_draw_indirect",
3345            );
3346        }
3347    }
3348
3349    fn multi_draw_indexed_indirect(
3350        &mut self,
3351        indirect_buffer: &dispatch::DispatchBuffer,
3352        indirect_offset: crate::BufferAddress,
3353        count: u32,
3354    ) {
3355        let indirect_buffer = indirect_buffer.as_core();
3356
3357        if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3358            &mut self.pass,
3359            indirect_buffer.id,
3360            indirect_offset,
3361            count,
3362        ) {
3363            self.context.handle_error(
3364                &self.error_sink,
3365                cause,
3366                self.pass.label(),
3367                "RenderPass::multi_draw_indexed_indirect",
3368            );
3369        }
3370    }
3371
3372    fn multi_draw_mesh_tasks_indirect(
3373        &mut self,
3374        indirect_buffer: &dispatch::DispatchBuffer,
3375        indirect_offset: crate::BufferAddress,
3376        count: u32,
3377    ) {
3378        let indirect_buffer = indirect_buffer.as_core();
3379
3380        if let Err(cause) = self.context.0.render_pass_multi_draw_mesh_tasks_indirect(
3381            &mut self.pass,
3382            indirect_buffer.id,
3383            indirect_offset,
3384            count,
3385        ) {
3386            self.context.handle_error(
3387                &self.error_sink,
3388                cause,
3389                self.pass.label(),
3390                "RenderPass::multi_draw_mesh_tasks_indirect",
3391            );
3392        }
3393    }
3394
3395    fn multi_draw_indirect_count(
3396        &mut self,
3397        indirect_buffer: &dispatch::DispatchBuffer,
3398        indirect_offset: crate::BufferAddress,
3399        count_buffer: &dispatch::DispatchBuffer,
3400        count_buffer_offset: crate::BufferAddress,
3401        max_count: u32,
3402    ) {
3403        let indirect_buffer = indirect_buffer.as_core();
3404        let count_buffer = count_buffer.as_core();
3405
3406        if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3407            &mut self.pass,
3408            indirect_buffer.id,
3409            indirect_offset,
3410            count_buffer.id,
3411            count_buffer_offset,
3412            max_count,
3413        ) {
3414            self.context.handle_error(
3415                &self.error_sink,
3416                cause,
3417                self.pass.label(),
3418                "RenderPass::multi_draw_indirect_count",
3419            );
3420        }
3421    }
3422
3423    fn multi_draw_indexed_indirect_count(
3424        &mut self,
3425        indirect_buffer: &dispatch::DispatchBuffer,
3426        indirect_offset: crate::BufferAddress,
3427        count_buffer: &dispatch::DispatchBuffer,
3428        count_buffer_offset: crate::BufferAddress,
3429        max_count: u32,
3430    ) {
3431        let indirect_buffer = indirect_buffer.as_core();
3432        let count_buffer = count_buffer.as_core();
3433
3434        if let Err(cause) = self
3435            .context
3436            .0
3437            .render_pass_multi_draw_indexed_indirect_count(
3438                &mut self.pass,
3439                indirect_buffer.id,
3440                indirect_offset,
3441                count_buffer.id,
3442                count_buffer_offset,
3443                max_count,
3444            )
3445        {
3446            self.context.handle_error(
3447                &self.error_sink,
3448                cause,
3449                self.pass.label(),
3450                "RenderPass::multi_draw_indexed_indirect_count",
3451            );
3452        }
3453    }
3454
3455    fn multi_draw_mesh_tasks_indirect_count(
3456        &mut self,
3457        indirect_buffer: &dispatch::DispatchBuffer,
3458        indirect_offset: crate::BufferAddress,
3459        count_buffer: &dispatch::DispatchBuffer,
3460        count_buffer_offset: crate::BufferAddress,
3461        max_count: u32,
3462    ) {
3463        let indirect_buffer = indirect_buffer.as_core();
3464        let count_buffer = count_buffer.as_core();
3465
3466        if let Err(cause) = self
3467            .context
3468            .0
3469            .render_pass_multi_draw_mesh_tasks_indirect_count(
3470                &mut self.pass,
3471                indirect_buffer.id,
3472                indirect_offset,
3473                count_buffer.id,
3474                count_buffer_offset,
3475                max_count,
3476            )
3477        {
3478            self.context.handle_error(
3479                &self.error_sink,
3480                cause,
3481                self.pass.label(),
3482                "RenderPass::multi_draw_mesh_tasks_indirect_count",
3483            );
3484        }
3485    }
3486
3487    fn insert_debug_marker(&mut self, label: &str) {
3488        if let Err(cause) = self
3489            .context
3490            .0
3491            .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3492        {
3493            self.context.handle_error(
3494                &self.error_sink,
3495                cause,
3496                self.pass.label(),
3497                "RenderPass::insert_debug_marker",
3498            );
3499        }
3500    }
3501
3502    fn push_debug_group(&mut self, group_label: &str) {
3503        if let Err(cause) =
3504            self.context
3505                .0
3506                .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3507        {
3508            self.context.handle_error(
3509                &self.error_sink,
3510                cause,
3511                self.pass.label(),
3512                "RenderPass::push_debug_group",
3513            );
3514        }
3515    }
3516
3517    fn pop_debug_group(&mut self) {
3518        if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3519            self.context.handle_error(
3520                &self.error_sink,
3521                cause,
3522                self.pass.label(),
3523                "RenderPass::pop_debug_group",
3524            );
3525        }
3526    }
3527
3528    fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3529        let query_set = query_set.as_core();
3530
3531        if let Err(cause) =
3532            self.context
3533                .0
3534                .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3535        {
3536            self.context.handle_error(
3537                &self.error_sink,
3538                cause,
3539                self.pass.label(),
3540                "RenderPass::write_timestamp",
3541            );
3542        }
3543    }
3544
3545    fn begin_occlusion_query(&mut self, query_index: u32) {
3546        if let Err(cause) = self
3547            .context
3548            .0
3549            .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3550        {
3551            self.context.handle_error(
3552                &self.error_sink,
3553                cause,
3554                self.pass.label(),
3555                "RenderPass::begin_occlusion_query",
3556            );
3557        }
3558    }
3559
3560    fn end_occlusion_query(&mut self) {
3561        if let Err(cause) = self
3562            .context
3563            .0
3564            .render_pass_end_occlusion_query(&mut self.pass)
3565        {
3566            self.context.handle_error(
3567                &self.error_sink,
3568                cause,
3569                self.pass.label(),
3570                "RenderPass::end_occlusion_query",
3571            );
3572        }
3573    }
3574
3575    fn begin_pipeline_statistics_query(
3576        &mut self,
3577        query_set: &dispatch::DispatchQuerySet,
3578        query_index: u32,
3579    ) {
3580        let query_set = query_set.as_core();
3581
3582        if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3583            &mut self.pass,
3584            query_set.id,
3585            query_index,
3586        ) {
3587            self.context.handle_error(
3588                &self.error_sink,
3589                cause,
3590                self.pass.label(),
3591                "RenderPass::begin_pipeline_statistics_query",
3592            );
3593        }
3594    }
3595
3596    fn end_pipeline_statistics_query(&mut self) {
3597        if let Err(cause) = self
3598            .context
3599            .0
3600            .render_pass_end_pipeline_statistics_query(&mut self.pass)
3601        {
3602            self.context.handle_error(
3603                &self.error_sink,
3604                cause,
3605                self.pass.label(),
3606                "RenderPass::end_pipeline_statistics_query",
3607            );
3608        }
3609    }
3610
3611    fn execute_bundles(
3612        &mut self,
3613        render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3614    ) {
3615        let temp_render_bundles = render_bundles
3616            .map(|rb| rb.as_core().id)
3617            .collect::<SmallVec<[_; 4]>>();
3618        if let Err(cause) = self
3619            .context
3620            .0
3621            .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3622        {
3623            self.context.handle_error(
3624                &self.error_sink,
3625                cause,
3626                self.pass.label(),
3627                "RenderPass::execute_bundles",
3628            );
3629        }
3630    }
3631
3632    fn end(&mut self) {
3633        if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3634            self.context.handle_error(
3635                &self.error_sink,
3636                cause,
3637                self.pass.label(),
3638                "RenderPass::end",
3639            );
3640        }
3641    }
3642}
3643
3644impl Drop for CoreRenderPass {
3645    fn drop(&mut self) {
3646        dispatch::RenderPassInterface::end(self);
3647    }
3648}
3649
3650impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3651    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3652        let pipeline = pipeline.as_core();
3653
3654        wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3655    }
3656
3657    fn set_bind_group(
3658        &mut self,
3659        index: u32,
3660        bind_group: Option<&dispatch::DispatchBindGroup>,
3661        offsets: &[crate::DynamicOffset],
3662    ) {
3663        let bg = bind_group.map(|bg| bg.as_core().id);
3664
3665        unsafe {
3666            wgpu_render_bundle_set_bind_group(
3667                &mut self.encoder,
3668                index,
3669                bg,
3670                offsets.as_ptr(),
3671                offsets.len(),
3672            )
3673        }
3674    }
3675
3676    fn set_index_buffer(
3677        &mut self,
3678        buffer: &dispatch::DispatchBuffer,
3679        index_format: crate::IndexFormat,
3680        offset: crate::BufferAddress,
3681        size: Option<crate::BufferSize>,
3682    ) {
3683        let buffer = buffer.as_core();
3684
3685        self.encoder
3686            .set_index_buffer(buffer.id, index_format, offset, size)
3687    }
3688
3689    fn set_vertex_buffer(
3690        &mut self,
3691        slot: u32,
3692        buffer: &dispatch::DispatchBuffer,
3693        offset: crate::BufferAddress,
3694        size: Option<crate::BufferSize>,
3695    ) {
3696        let buffer = buffer.as_core();
3697
3698        wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer.id, offset, size)
3699    }
3700
3701    fn set_push_constants(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
3702        unsafe {
3703            wgpu_render_bundle_set_push_constants(
3704                &mut self.encoder,
3705                stages,
3706                offset,
3707                data.len().try_into().unwrap(),
3708                data.as_ptr(),
3709            )
3710        }
3711    }
3712
3713    fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3714        wgpu_render_bundle_draw(
3715            &mut self.encoder,
3716            vertices.end - vertices.start,
3717            instances.end - instances.start,
3718            vertices.start,
3719            instances.start,
3720        )
3721    }
3722
3723    fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3724        wgpu_render_bundle_draw_indexed(
3725            &mut self.encoder,
3726            indices.end - indices.start,
3727            instances.end - instances.start,
3728            indices.start,
3729            base_vertex,
3730            instances.start,
3731        )
3732    }
3733
3734    fn draw_indirect(
3735        &mut self,
3736        indirect_buffer: &dispatch::DispatchBuffer,
3737        indirect_offset: crate::BufferAddress,
3738    ) {
3739        let indirect_buffer = indirect_buffer.as_core();
3740
3741        wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3742    }
3743
3744    fn draw_indexed_indirect(
3745        &mut self,
3746        indirect_buffer: &dispatch::DispatchBuffer,
3747        indirect_offset: crate::BufferAddress,
3748    ) {
3749        let indirect_buffer = indirect_buffer.as_core();
3750
3751        wgpu_render_bundle_draw_indexed_indirect(
3752            &mut self.encoder,
3753            indirect_buffer.id,
3754            indirect_offset,
3755        )
3756    }
3757
3758    fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3759    where
3760        Self: Sized,
3761    {
3762        let (id, error) = self.context.0.render_bundle_encoder_finish(
3763            self.encoder,
3764            &desc.map_label(|l| l.map(Borrowed)),
3765            None,
3766        );
3767        if let Some(err) = error {
3768            self.context
3769                .handle_error_fatal(err, "RenderBundleEncoder::finish");
3770        }
3771        CoreRenderBundle { id }.into()
3772    }
3773}
3774
3775impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3776
3777impl dispatch::SurfaceInterface for CoreSurface {
3778    fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3779        let adapter = adapter.as_core();
3780
3781        self.context
3782            .0
3783            .surface_get_capabilities(self.id, adapter.id)
3784            .unwrap_or_default()
3785    }
3786
3787    fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3788        let device = device.as_core();
3789
3790        let error = self.context.0.surface_configure(self.id, device.id, config);
3791        if let Some(e) = error {
3792            self.context
3793                .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3794        } else {
3795            *self.configured_device.lock() = Some(device.id);
3796            *self.error_sink.lock() = Some(device.error_sink.clone());
3797        }
3798    }
3799
3800    fn get_current_texture(
3801        &self,
3802    ) -> (
3803        Option<dispatch::DispatchTexture>,
3804        crate::SurfaceStatus,
3805        dispatch::DispatchSurfaceOutputDetail,
3806    ) {
3807        let output_detail = CoreSurfaceOutputDetail {
3808            context: self.context.clone(),
3809            surface_id: self.id,
3810        }
3811        .into();
3812
3813        match self.context.0.surface_get_current_texture(self.id, None) {
3814            Ok(wgc::present::SurfaceOutput {
3815                status,
3816                texture: texture_id,
3817            }) => {
3818                let data = texture_id
3819                    .map(|id| CoreTexture {
3820                        context: self.context.clone(),
3821                        id,
3822                        error_sink: Arc::new(Mutex::new(ErrorSinkRaw::new())),
3823                    })
3824                    .map(Into::into);
3825
3826                (data, status, output_detail)
3827            }
3828            Err(err) => {
3829                let error_sink = self.error_sink.lock();
3830                match error_sink.as_ref() {
3831                    Some(error_sink) => {
3832                        self.context.handle_error_nolabel(
3833                            error_sink,
3834                            err,
3835                            "Surface::get_current_texture_view",
3836                        );
3837                        (None, crate::SurfaceStatus::Unknown, output_detail)
3838                    }
3839                    None => self
3840                        .context
3841                        .handle_error_fatal(err, "Surface::get_current_texture_view"),
3842                }
3843            }
3844        }
3845    }
3846}
3847
3848impl Drop for CoreSurface {
3849    fn drop(&mut self) {
3850        self.context.0.surface_drop(self.id)
3851    }
3852}
3853
3854impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
3855    fn present(&self) {
3856        match self.context.0.surface_present(self.surface_id) {
3857            Ok(_status) => (),
3858            Err(err) => self.context.handle_error_fatal(err, "Surface::present"),
3859        }
3860    }
3861
3862    fn texture_discard(&self) {
3863        match self.context.0.surface_texture_discard(self.surface_id) {
3864            Ok(_status) => (),
3865            Err(err) => self
3866                .context
3867                .handle_error_fatal(err, "Surface::discard_texture"),
3868        }
3869    }
3870}
3871impl Drop for CoreSurfaceOutputDetail {
3872    fn drop(&mut self) {
3873        // Discard gets called by the api struct
3874
3875        // no-op
3876    }
3877}
3878
3879impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
3880    fn slice(&self) -> &[u8] {
3881        panic!()
3882    }
3883
3884    #[inline]
3885    fn slice_mut(&mut self) -> &mut [u8] {
3886        self.mapping.slice_mut()
3887    }
3888}
3889impl Drop for CoreQueueWriteBuffer {
3890    fn drop(&mut self) {
3891        // The api struct calls queue.write_staging_buffer
3892
3893        // no-op
3894    }
3895}
3896
3897impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
3898    #[inline]
3899    fn slice(&self) -> &[u8] {
3900        unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
3901    }
3902
3903    #[inline]
3904    fn slice_mut(&mut self) -> &mut [u8] {
3905        unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.size) }
3906    }
3907
3908    #[cfg(webgpu)]
3909    fn as_uint8array(&self) -> &js_sys::Uint8Array {
3910        panic!("Only available on WebGPU")
3911    }
3912}