wgpu/backend/
wgpu_core.rs

1use alloc::{
2    borrow::Cow::{self, Borrowed},
3    boxed::Box,
4    format,
5    string::{String, ToString as _},
6    sync::Arc,
7    vec,
8    vec::Vec,
9};
10use core::{
11    error::Error,
12    fmt,
13    future::ready,
14    ops::{Deref, Range},
15    pin::Pin,
16    ptr::NonNull,
17    slice,
18};
19use hashbrown::HashMap;
20
21use arrayvec::ArrayVec;
22use smallvec::SmallVec;
23use wgc::{
24    command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError,
25    resource::BlasPrepareCompactResult,
26};
27use wgt::{
28    error::{ErrorType, WebGpuError},
29    WasmNotSendSync,
30};
31
32use crate::{
33    api,
34    dispatch::{self, BlasCompactCallback, BufferMappedRangeInterface},
35    BindingResource, Blas, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
36    CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
37    ShaderSource, SurfaceTargetUnsafe, TextureDescriptor, Tlas,
38};
39use crate::{dispatch::DispatchAdapter, util::Mutex};
40
41mod thread_id;
42
43#[derive(Clone)]
44pub struct ContextWgpuCore(Arc<wgc::global::Global>);
45
46impl Drop for ContextWgpuCore {
47    fn drop(&mut self) {
48        //nothing
49    }
50}
51
52impl fmt::Debug for ContextWgpuCore {
53    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
54        f.debug_struct("ContextWgpuCore")
55            .field("type", &"Native")
56            .finish()
57    }
58}
59
60impl ContextWgpuCore {
61    pub unsafe fn from_hal_instance<A: hal::Api>(hal_instance: A::Instance) -> Self {
62        Self(unsafe {
63            Arc::new(wgc::global::Global::from_hal_instance::<A>(
64                "wgpu",
65                hal_instance,
66            ))
67        })
68    }
69
70    /// # Safety
71    ///
72    /// - The raw instance handle returned must not be manually destroyed.
73    pub unsafe fn instance_as_hal<A: hal::Api>(&self) -> Option<&A::Instance> {
74        unsafe { self.0.instance_as_hal::<A>() }
75    }
76
77    pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
78        Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
79    }
80
81    #[cfg(wgpu_core)]
82    pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
83        self.0.enumerate_adapters(backends)
84    }
85
86    pub unsafe fn create_adapter_from_hal<A: hal::Api>(
87        &self,
88        hal_adapter: hal::ExposedAdapter<A>,
89    ) -> wgc::id::AdapterId {
90        unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
91    }
92
93    pub unsafe fn adapter_as_hal<A: hal::Api>(
94        &self,
95        adapter: &CoreAdapter,
96    ) -> Option<impl Deref<Target = A::Adapter> + WasmNotSendSync> {
97        unsafe { self.0.adapter_as_hal::<A>(adapter.id) }
98    }
99
100    pub unsafe fn buffer_as_hal<A: hal::Api>(
101        &self,
102        buffer: &CoreBuffer,
103    ) -> Option<impl Deref<Target = A::Buffer>> {
104        unsafe { self.0.buffer_as_hal::<A>(buffer.id) }
105    }
106
107    pub unsafe fn create_device_from_hal<A: hal::Api>(
108        &self,
109        adapter: &CoreAdapter,
110        hal_device: hal::OpenDevice<A>,
111        desc: &crate::DeviceDescriptor<'_>,
112    ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
113        let (device_id, queue_id) = unsafe {
114            self.0.create_device_from_hal(
115                adapter.id,
116                hal_device.into(),
117                &desc.map_label(|l| l.map(Borrowed)),
118                None,
119                None,
120            )
121        }?;
122        let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
123        let device = CoreDevice {
124            context: self.clone(),
125            id: device_id,
126            error_sink: error_sink.clone(),
127            features: desc.required_features,
128        };
129        let queue = CoreQueue {
130            context: self.clone(),
131            id: queue_id,
132            error_sink,
133        };
134        Ok((device, queue))
135    }
136
137    pub unsafe fn create_texture_from_hal<A: hal::Api>(
138        &self,
139        hal_texture: A::Texture,
140        device: &CoreDevice,
141        desc: &TextureDescriptor<'_>,
142    ) -> CoreTexture {
143        let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
144        let (id, error) = unsafe {
145            self.0
146                .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
147        };
148        if let Some(cause) = error {
149            self.handle_error(
150                &device.error_sink,
151                cause,
152                desc.label,
153                "Device::create_texture_from_hal",
154            );
155        }
156        CoreTexture {
157            context: self.clone(),
158            id,
159            error_sink: Arc::clone(&device.error_sink),
160        }
161    }
162
163    /// # Safety
164    ///
165    /// - `hal_buffer` must be created from `device`.
166    /// - `hal_buffer` must be created respecting `desc`
167    /// - `hal_buffer` must be initialized
168    /// - `hal_buffer` must not have zero size.
169    pub unsafe fn create_buffer_from_hal<A: hal::Api>(
170        &self,
171        hal_buffer: A::Buffer,
172        device: &CoreDevice,
173        desc: &BufferDescriptor<'_>,
174    ) -> CoreBuffer {
175        let (id, error) = unsafe {
176            self.0.create_buffer_from_hal::<A>(
177                hal_buffer,
178                device.id,
179                &desc.map_label(|l| l.map(Borrowed)),
180                None,
181            )
182        };
183        if let Some(cause) = error {
184            self.handle_error(
185                &device.error_sink,
186                cause,
187                desc.label,
188                "Device::create_buffer_from_hal",
189            );
190        }
191        CoreBuffer {
192            context: self.clone(),
193            id,
194            error_sink: Arc::clone(&device.error_sink),
195        }
196    }
197
198    pub unsafe fn device_as_hal<A: hal::Api>(
199        &self,
200        device: &CoreDevice,
201    ) -> Option<impl Deref<Target = A::Device>> {
202        unsafe { self.0.device_as_hal::<A>(device.id) }
203    }
204
205    pub unsafe fn surface_as_hal<A: hal::Api>(
206        &self,
207        surface: &CoreSurface,
208    ) -> Option<impl Deref<Target = A::Surface>> {
209        unsafe { self.0.surface_as_hal::<A>(surface.id) }
210    }
211
212    pub unsafe fn texture_as_hal<A: hal::Api>(
213        &self,
214        texture: &CoreTexture,
215    ) -> Option<impl Deref<Target = A::Texture>> {
216        unsafe { self.0.texture_as_hal::<A>(texture.id) }
217    }
218
219    pub unsafe fn texture_view_as_hal<A: hal::Api>(
220        &self,
221        texture_view: &CoreTextureView,
222    ) -> Option<impl Deref<Target = A::TextureView>> {
223        unsafe { self.0.texture_view_as_hal::<A>(texture_view.id) }
224    }
225
226    /// This method will start the wgpu_core level command recording.
227    pub unsafe fn command_encoder_as_hal_mut<
228        A: hal::Api,
229        F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
230        R,
231    >(
232        &self,
233        command_encoder: &CoreCommandEncoder,
234        hal_command_encoder_callback: F,
235    ) -> R {
236        unsafe {
237            self.0.command_encoder_as_hal_mut::<A, F, R>(
238                command_encoder.id,
239                hal_command_encoder_callback,
240            )
241        }
242    }
243
244    pub unsafe fn blas_as_hal<A: hal::Api>(
245        &self,
246        blas: &CoreBlas,
247    ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
248        unsafe { self.0.blas_as_hal::<A>(blas.id) }
249    }
250
251    pub unsafe fn tlas_as_hal<A: hal::Api>(
252        &self,
253        tlas: &CoreTlas,
254    ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
255        unsafe { self.0.tlas_as_hal::<A>(tlas.id) }
256    }
257
258    pub fn generate_report(&self) -> wgc::global::GlobalReport {
259        self.0.generate_report()
260    }
261
262    #[cold]
263    #[track_caller]
264    #[inline(never)]
265    fn handle_error_inner(
266        &self,
267        sink_mutex: &Mutex<ErrorSinkRaw>,
268        error_type: ErrorType,
269        source: ContextErrorSource,
270        label: Label<'_>,
271        fn_ident: &'static str,
272    ) {
273        let source: ErrorSource = Box::new(wgc::error::ContextError {
274            fn_ident,
275            source,
276            label: label.unwrap_or_default().to_string(),
277        });
278        let final_error_handling = {
279            let mut sink = sink_mutex.lock();
280            let description = || self.format_error(&*source);
281            let error = match error_type {
282                ErrorType::Internal => {
283                    let description = description();
284                    crate::Error::Internal {
285                        source,
286                        description,
287                    }
288                }
289                ErrorType::OutOfMemory => crate::Error::OutOfMemory { source },
290                ErrorType::Validation => {
291                    let description = description();
292                    crate::Error::Validation {
293                        source,
294                        description,
295                    }
296                }
297                ErrorType::DeviceLost => return, // will be surfaced via callback
298            };
299            sink.handle_error_or_return_handler(error)
300        };
301
302        if let Some(f) = final_error_handling {
303            // If the user has provided their own `uncaptured_handler` callback, invoke it now,
304            // having released our lock on `sink_mutex`. See the comments on
305            // `handle_error_or_return_handler` for details.
306            f();
307        }
308    }
309
310    #[inline]
311    #[track_caller]
312    fn handle_error(
313        &self,
314        sink_mutex: &Mutex<ErrorSinkRaw>,
315        source: impl WebGpuError + WasmNotSendSync + 'static,
316        label: Label<'_>,
317        fn_ident: &'static str,
318    ) {
319        let error_type = source.webgpu_error_type();
320        self.handle_error_inner(sink_mutex, error_type, Box::new(source), label, fn_ident)
321    }
322
323    #[inline]
324    #[track_caller]
325    fn handle_error_nolabel(
326        &self,
327        sink_mutex: &Mutex<ErrorSinkRaw>,
328        source: impl WebGpuError + WasmNotSendSync + 'static,
329        fn_ident: &'static str,
330    ) {
331        let error_type = source.webgpu_error_type();
332        self.handle_error_inner(sink_mutex, error_type, Box::new(source), None, fn_ident)
333    }
334
335    #[track_caller]
336    #[cold]
337    fn handle_error_fatal(
338        &self,
339        cause: impl Error + WasmNotSendSync + 'static,
340        operation: &'static str,
341    ) -> ! {
342        panic!("Error in {operation}: {f}", f = self.format_error(&cause));
343    }
344
345    #[inline(never)]
346    fn format_error(&self, err: &(dyn Error + 'static)) -> String {
347        let mut output = String::new();
348        let mut level = 1;
349
350        fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
351            let mut print = |e: &(dyn Error + 'static)| {
352                use core::fmt::Write;
353                writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
354
355                if let Some(e) = e.source() {
356                    *level += 1;
357                    print_tree(output, level, e);
358                    *level -= 1;
359                }
360            };
361            if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
362                for e in multi.errors() {
363                    print(e);
364                }
365            } else {
366                print(e);
367            }
368        }
369
370        print_tree(&mut output, &mut level, err);
371
372        format!("Validation Error\n\nCaused by:\n{output}")
373    }
374
375    pub unsafe fn queue_as_hal<A: hal::Api>(
376        &self,
377        queue: &CoreQueue,
378    ) -> Option<impl Deref<Target = A::Queue> + WasmNotSendSync> {
379        unsafe { self.0.queue_as_hal::<A>(queue.id) }
380    }
381}
382
383fn map_buffer_copy_view(
384    view: crate::TexelCopyBufferInfo<'_>,
385) -> wgt::TexelCopyBufferInfo<wgc::id::BufferId> {
386    wgt::TexelCopyBufferInfo {
387        buffer: view.buffer.inner.as_core().id,
388        layout: view.layout,
389    }
390}
391
392fn map_texture_copy_view(
393    view: crate::TexelCopyTextureInfo<'_>,
394) -> wgt::TexelCopyTextureInfo<wgc::id::TextureId> {
395    wgt::TexelCopyTextureInfo {
396        texture: view.texture.inner.as_core().id,
397        mip_level: view.mip_level,
398        origin: view.origin,
399        aspect: view.aspect,
400    }
401}
402
403#[cfg_attr(not(webgl), expect(unused))]
404fn map_texture_tagged_copy_view(
405    view: crate::CopyExternalImageDestInfo<&api::Texture>,
406) -> wgt::CopyExternalImageDestInfo<wgc::id::TextureId> {
407    wgt::CopyExternalImageDestInfo {
408        texture: view.texture.inner.as_core().id,
409        mip_level: view.mip_level,
410        origin: view.origin,
411        aspect: view.aspect,
412        color_space: view.color_space,
413        premultiplied_alpha: view.premultiplied_alpha,
414    }
415}
416
417fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
418    match *load {
419        LoadOp::Clear(clear_value) => LoadOp::Clear(Some(clear_value)),
420        LoadOp::DontCare(token) => LoadOp::DontCare(token),
421        LoadOp::Load => LoadOp::Load,
422    }
423}
424
425fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
426    match ops {
427        Some(&Operations { load, store }) => wgc::command::PassChannel {
428            load_op: Some(map_load_op(&load)),
429            store_op: Some(store),
430            read_only: false,
431        },
432        None => wgc::command::PassChannel {
433            load_op: None,
434            store_op: None,
435            read_only: true,
436        },
437    }
438}
439
440#[derive(Debug)]
441pub struct CoreSurface {
442    pub(crate) context: ContextWgpuCore,
443    id: wgc::id::SurfaceId,
444    /// Configured device is needed to know which backend
445    /// code to execute when acquiring a new frame.
446    configured_device: Mutex<Option<wgc::id::DeviceId>>,
447    /// The error sink with which to report errors.
448    /// `None` if the surface has not been configured.
449    error_sink: Mutex<Option<ErrorSink>>,
450}
451
452#[derive(Debug)]
453pub struct CoreAdapter {
454    pub(crate) context: ContextWgpuCore,
455    pub(crate) id: wgc::id::AdapterId,
456}
457
458#[derive(Debug)]
459pub struct CoreDevice {
460    pub(crate) context: ContextWgpuCore,
461    id: wgc::id::DeviceId,
462    error_sink: ErrorSink,
463    features: Features,
464}
465
466#[derive(Debug)]
467pub struct CoreBuffer {
468    pub(crate) context: ContextWgpuCore,
469    id: wgc::id::BufferId,
470    error_sink: ErrorSink,
471}
472
473#[derive(Debug)]
474pub struct CoreShaderModule {
475    pub(crate) context: ContextWgpuCore,
476    id: wgc::id::ShaderModuleId,
477    compilation_info: CompilationInfo,
478}
479
480#[derive(Debug)]
481pub struct CoreBindGroupLayout {
482    pub(crate) context: ContextWgpuCore,
483    id: wgc::id::BindGroupLayoutId,
484}
485
486#[derive(Debug)]
487pub struct CoreBindGroup {
488    pub(crate) context: ContextWgpuCore,
489    id: wgc::id::BindGroupId,
490}
491
492#[derive(Debug)]
493pub struct CoreTexture {
494    pub(crate) context: ContextWgpuCore,
495    id: wgc::id::TextureId,
496    error_sink: ErrorSink,
497}
498
499#[derive(Debug)]
500pub struct CoreTextureView {
501    pub(crate) context: ContextWgpuCore,
502    id: wgc::id::TextureViewId,
503}
504
505#[derive(Debug)]
506pub struct CoreExternalTexture {
507    pub(crate) context: ContextWgpuCore,
508    id: wgc::id::ExternalTextureId,
509}
510
511#[derive(Debug)]
512pub struct CoreSampler {
513    pub(crate) context: ContextWgpuCore,
514    id: wgc::id::SamplerId,
515}
516
517#[derive(Debug)]
518pub struct CoreQuerySet {
519    pub(crate) context: ContextWgpuCore,
520    id: wgc::id::QuerySetId,
521}
522
523#[derive(Debug)]
524pub struct CorePipelineLayout {
525    pub(crate) context: ContextWgpuCore,
526    id: wgc::id::PipelineLayoutId,
527}
528
529#[derive(Debug)]
530pub struct CorePipelineCache {
531    pub(crate) context: ContextWgpuCore,
532    id: wgc::id::PipelineCacheId,
533}
534
535#[derive(Debug)]
536pub struct CoreCommandBuffer {
537    pub(crate) context: ContextWgpuCore,
538    id: wgc::id::CommandBufferId,
539}
540
541#[derive(Debug)]
542pub struct CoreRenderBundleEncoder {
543    pub(crate) context: ContextWgpuCore,
544    encoder: wgc::command::RenderBundleEncoder,
545    id: crate::cmp::Identifier,
546}
547
548#[derive(Debug)]
549pub struct CoreRenderBundle {
550    context: ContextWgpuCore,
551    id: wgc::id::RenderBundleId,
552}
553
554#[derive(Debug)]
555pub struct CoreQueue {
556    pub(crate) context: ContextWgpuCore,
557    id: wgc::id::QueueId,
558    error_sink: ErrorSink,
559}
560
561#[derive(Debug)]
562pub struct CoreComputePipeline {
563    pub(crate) context: ContextWgpuCore,
564    id: wgc::id::ComputePipelineId,
565    error_sink: ErrorSink,
566}
567
568#[derive(Debug)]
569pub struct CoreRenderPipeline {
570    pub(crate) context: ContextWgpuCore,
571    id: wgc::id::RenderPipelineId,
572    error_sink: ErrorSink,
573}
574
575#[derive(Debug)]
576pub struct CoreComputePass {
577    pub(crate) context: ContextWgpuCore,
578    pass: wgc::command::ComputePass,
579    error_sink: ErrorSink,
580    id: crate::cmp::Identifier,
581}
582
583#[derive(Debug)]
584pub struct CoreRenderPass {
585    pub(crate) context: ContextWgpuCore,
586    pass: wgc::command::RenderPass,
587    error_sink: ErrorSink,
588    id: crate::cmp::Identifier,
589}
590
591#[derive(Debug)]
592pub struct CoreCommandEncoder {
593    pub(crate) context: ContextWgpuCore,
594    id: wgc::id::CommandEncoderId,
595    error_sink: ErrorSink,
596}
597
598#[derive(Debug)]
599pub struct CoreBlas {
600    pub(crate) context: ContextWgpuCore,
601    id: wgc::id::BlasId,
602    error_sink: ErrorSink,
603}
604
605#[derive(Debug)]
606pub struct CoreTlas {
607    pub(crate) context: ContextWgpuCore,
608    id: wgc::id::TlasId,
609    // error_sink: ErrorSink,
610}
611
612#[derive(Debug)]
613pub struct CoreSurfaceOutputDetail {
614    context: ContextWgpuCore,
615    surface_id: wgc::id::SurfaceId,
616    error_sink: ErrorSink,
617}
618
619type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
620
621struct ErrorScope {
622    error: Option<crate::Error>,
623    filter: crate::ErrorFilter,
624}
625
626struct ErrorSinkRaw {
627    scopes: HashMap<thread_id::ThreadId, Vec<ErrorScope>>,
628    uncaptured_handler: Option<Arc<dyn crate::UncapturedErrorHandler>>,
629}
630
631impl ErrorSinkRaw {
632    fn new() -> ErrorSinkRaw {
633        ErrorSinkRaw {
634            scopes: HashMap::new(),
635            uncaptured_handler: None,
636        }
637    }
638
639    /// Deliver the error to
640    ///
641    /// * the innermost error scope, if any, or
642    /// * the uncaptured error handler, if there is one, or
643    /// * [`default_error_handler()`].
644    ///
645    /// If a closure is returned, the caller should call it immediately after dropping the
646    /// [`ErrorSink`] mutex guard. This makes sure that the user callback is not called with
647    /// a wgpu mutex held.
648    #[track_caller]
649    #[must_use]
650    fn handle_error_or_return_handler(&mut self, err: crate::Error) -> Option<impl FnOnce()> {
651        let filter = match err {
652            crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
653            crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
654            crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
655        };
656        let thread_id = thread_id::ThreadId::current();
657        let scopes = self.scopes.entry(thread_id).or_default();
658        match scopes.iter_mut().rev().find(|scope| scope.filter == filter) {
659            Some(scope) => {
660                if scope.error.is_none() {
661                    scope.error = Some(err);
662                }
663                None
664            }
665            None => {
666                if let Some(custom_handler) = &self.uncaptured_handler {
667                    let custom_handler = Arc::clone(custom_handler);
668                    Some(move || (custom_handler)(err))
669                } else {
670                    // direct call preserves #[track_caller] where dyn can't
671                    default_error_handler(err)
672                }
673            }
674        }
675    }
676}
677
678impl fmt::Debug for ErrorSinkRaw {
679    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
680        write!(f, "ErrorSink")
681    }
682}
683
684#[track_caller]
685fn default_error_handler(err: crate::Error) -> ! {
686    log::error!("Handling wgpu errors as fatal by default");
687    panic!("wgpu error: {err}\n");
688}
689
690impl From<CreateShaderModuleError> for CompilationInfo {
691    fn from(value: CreateShaderModuleError) -> Self {
692        match value {
693            #[cfg(feature = "wgsl")]
694            CreateShaderModuleError::Parsing(v) => v.into(),
695            #[cfg(feature = "glsl")]
696            CreateShaderModuleError::ParsingGlsl(v) => v.into(),
697            #[cfg(feature = "spirv")]
698            CreateShaderModuleError::ParsingSpirV(v) => v.into(),
699            CreateShaderModuleError::Validation(v) => v.into(),
700            // Device errors are reported through the error sink, and are not compilation errors.
701            // Same goes for native shader module generation errors.
702            CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
703                CompilationInfo {
704                    messages: Vec::new(),
705                }
706            }
707            // Everything else is an error message without location information.
708            _ => CompilationInfo {
709                messages: vec![CompilationMessage {
710                    message: value.to_string(),
711                    message_type: CompilationMessageType::Error,
712                    location: None,
713                }],
714            },
715        }
716    }
717}
718
719#[derive(Debug)]
720pub struct CoreQueueWriteBuffer {
721    buffer_id: wgc::id::StagingBufferId,
722    mapping: CoreBufferMappedRange,
723}
724
725#[derive(Debug)]
726pub struct CoreBufferMappedRange {
727    ptr: NonNull<u8>,
728    size: usize,
729}
730
731#[cfg(send_sync)]
732unsafe impl Send for CoreBufferMappedRange {}
733#[cfg(send_sync)]
734unsafe impl Sync for CoreBufferMappedRange {}
735
736impl Drop for CoreBufferMappedRange {
737    fn drop(&mut self) {
738        // Intentionally left blank so that `BufferMappedRange` still
739        // implements `Drop`, to match the web backend
740    }
741}
742
743crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
744crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
745crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
746crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
747crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
748crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
749crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
750crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
751crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
752crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
753crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
754crate::cmp::impl_eq_ord_hash_proxy!(CoreExternalTexture => .id);
755crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
756crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
757crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
758crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
759crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
760crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
761crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
762crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
763crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
764crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
765crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
766crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
767crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
768crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
769crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
770crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
771crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
772
773impl dispatch::InstanceInterface for ContextWgpuCore {
774    fn new(desc: wgt::InstanceDescriptor) -> Self
775    where
776        Self: Sized,
777    {
778        Self(Arc::new(wgc::global::Global::new("wgpu", desc, None)))
779    }
780
781    unsafe fn create_surface(
782        &self,
783        target: crate::api::SurfaceTargetUnsafe,
784    ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
785        let id = match target {
786            SurfaceTargetUnsafe::RawHandle {
787                raw_display_handle,
788                raw_window_handle,
789            } => unsafe {
790                self.0
791                    .instance_create_surface(raw_display_handle, raw_window_handle, None)
792            },
793
794            #[cfg(all(
795                unix,
796                not(target_vendor = "apple"),
797                not(target_family = "wasm"),
798                not(target_os = "netbsd")
799            ))]
800            SurfaceTargetUnsafe::Drm {
801                fd,
802                plane,
803                connector_id,
804                width,
805                height,
806                refresh_rate,
807            } => unsafe {
808                self.0.instance_create_surface_from_drm(
809                    fd,
810                    plane,
811                    connector_id,
812                    width,
813                    height,
814                    refresh_rate,
815                    None,
816                )
817            },
818
819            #[cfg(metal)]
820            SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
821                self.0.instance_create_surface_metal(layer, None)
822            },
823
824            #[cfg(target_os = "netbsd")]
825            SurfaceTargetUnsafe::Drm { .. } => Err(
826                wgc::instance::CreateSurfaceError::BackendNotEnabled(wgt::Backend::Vulkan),
827            ),
828
829            #[cfg(dx12)]
830            SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
831                self.0.instance_create_surface_from_visual(visual, None)
832            },
833
834            #[cfg(dx12)]
835            SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
836                self.0
837                    .instance_create_surface_from_surface_handle(surface_handle, None)
838            },
839
840            #[cfg(dx12)]
841            SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
842                self.0
843                    .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
844            },
845        }?;
846
847        Ok(CoreSurface {
848            context: self.clone(),
849            id,
850            configured_device: Mutex::default(),
851            error_sink: Mutex::default(),
852        }
853        .into())
854    }
855
856    fn request_adapter(
857        &self,
858        options: &crate::api::RequestAdapterOptions<'_, '_>,
859    ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
860        let id = self.0.request_adapter(
861            &wgc::instance::RequestAdapterOptions {
862                power_preference: options.power_preference,
863                force_fallback_adapter: options.force_fallback_adapter,
864                compatible_surface: options
865                    .compatible_surface
866                    .map(|surface| surface.inner.as_core().id),
867            },
868            wgt::Backends::all(),
869            None,
870        );
871        let adapter = id.map(|id| {
872            let core = CoreAdapter {
873                context: self.clone(),
874                id,
875            };
876            let generic: dispatch::DispatchAdapter = core.into();
877            generic
878        });
879        Box::pin(ready(adapter))
880    }
881
882    fn poll_all_devices(&self, force_wait: bool) -> bool {
883        match self.0.poll_all_devices(force_wait) {
884            Ok(all_queue_empty) => all_queue_empty,
885            Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
886        }
887    }
888
889    #[cfg(feature = "wgsl")]
890    fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
891        use wgc::naga::front::wgsl::ImplementedLanguageExtension;
892        ImplementedLanguageExtension::all().iter().copied().fold(
893            crate::WgslLanguageFeatures::empty(),
894            |acc, wle| {
895                acc | match wle {
896                    ImplementedLanguageExtension::ReadOnlyAndReadWriteStorageTextures => {
897                        crate::WgslLanguageFeatures::ReadOnlyAndReadWriteStorageTextures
898                    }
899                    ImplementedLanguageExtension::Packed4x8IntegerDotProduct => {
900                        crate::WgslLanguageFeatures::Packed4x8IntegerDotProduct
901                    }
902                    ImplementedLanguageExtension::PointerCompositeAccess => {
903                        crate::WgslLanguageFeatures::PointerCompositeAccess
904                    }
905                }
906            },
907        )
908    }
909
910    fn enumerate_adapters(
911        &self,
912        backends: crate::Backends,
913    ) -> Pin<Box<dyn dispatch::EnumerateAdapterFuture>> {
914        let adapters: Vec<DispatchAdapter> = self
915            .enumerate_adapters(backends)
916            .into_iter()
917            .map(|adapter| {
918                let core = crate::backend::wgpu_core::CoreAdapter {
919                    context: self.clone(),
920                    id: adapter,
921                };
922                core.into()
923            })
924            .collect();
925        Box::pin(ready(adapters))
926    }
927}
928
929impl dispatch::AdapterInterface for CoreAdapter {
930    fn request_device(
931        &self,
932        desc: &crate::DeviceDescriptor<'_>,
933    ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
934        let res = self.context.0.adapter_request_device(
935            self.id,
936            &desc.map_label(|l| l.map(Borrowed)),
937            None,
938            None,
939        );
940        let (device_id, queue_id) = match res {
941            Ok(ids) => ids,
942            Err(err) => {
943                return Box::pin(ready(Err(err.into())));
944            }
945        };
946        let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
947        let device = CoreDevice {
948            context: self.context.clone(),
949            id: device_id,
950            error_sink: error_sink.clone(),
951            features: desc.required_features,
952        };
953        let queue = CoreQueue {
954            context: self.context.clone(),
955            id: queue_id,
956            error_sink,
957        };
958        Box::pin(ready(Ok((device.into(), queue.into()))))
959    }
960
961    fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
962        let surface = surface.as_core();
963
964        self.context
965            .0
966            .adapter_is_surface_supported(self.id, surface.id)
967    }
968
969    fn features(&self) -> crate::Features {
970        self.context.0.adapter_features(self.id)
971    }
972
973    fn limits(&self) -> crate::Limits {
974        self.context.0.adapter_limits(self.id)
975    }
976
977    fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
978        self.context.0.adapter_downlevel_capabilities(self.id)
979    }
980
981    fn get_info(&self) -> crate::AdapterInfo {
982        self.context.0.adapter_get_info(self.id)
983    }
984
985    fn get_texture_format_features(
986        &self,
987        format: crate::TextureFormat,
988    ) -> crate::TextureFormatFeatures {
989        self.context
990            .0
991            .adapter_get_texture_format_features(self.id, format)
992    }
993
994    fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
995        self.context.0.adapter_get_presentation_timestamp(self.id)
996    }
997
998    fn cooperative_matrix_properties(&self) -> Vec<crate::wgt::CooperativeMatrixProperties> {
999        self.context
1000            .0
1001            .adapter_cooperative_matrix_properties(self.id)
1002    }
1003}
1004
1005impl Drop for CoreAdapter {
1006    fn drop(&mut self) {
1007        self.context.0.adapter_drop(self.id)
1008    }
1009}
1010
1011impl dispatch::DeviceInterface for CoreDevice {
1012    fn features(&self) -> crate::Features {
1013        self.context.0.device_features(self.id)
1014    }
1015
1016    fn limits(&self) -> crate::Limits {
1017        self.context.0.device_limits(self.id)
1018    }
1019
1020    fn adapter_info(&self) -> crate::AdapterInfo {
1021        self.context.0.device_adapter_info(self.id)
1022    }
1023
1024    // If we have no way to create a shader module, we can't return one, and so most of the function is unreachable.
1025    #[cfg_attr(
1026        not(any(
1027            feature = "spirv",
1028            feature = "glsl",
1029            feature = "wgsl",
1030            feature = "naga-ir"
1031        )),
1032        expect(unused)
1033    )]
1034    fn create_shader_module(
1035        &self,
1036        desc: crate::ShaderModuleDescriptor<'_>,
1037        shader_bound_checks: wgt::ShaderRuntimeChecks,
1038    ) -> dispatch::DispatchShaderModule {
1039        let descriptor = wgc::pipeline::ShaderModuleDescriptor {
1040            label: desc.label.map(Borrowed),
1041            runtime_checks: shader_bound_checks,
1042        };
1043        let source = match desc.source {
1044            #[cfg(feature = "spirv")]
1045            ShaderSource::SpirV(ref spv) => {
1046                // Parse the given shader code and store its representation.
1047                let options = naga::front::spv::Options {
1048                    adjust_coordinate_space: false, // we require NDC_Y_UP feature
1049                    strict_capabilities: true,
1050                    block_ctx_dump_prefix: None,
1051                };
1052                wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
1053            }
1054            #[cfg(feature = "glsl")]
1055            ShaderSource::Glsl {
1056                ref shader,
1057                stage,
1058                defines,
1059            } => {
1060                let options = naga::front::glsl::Options {
1061                    stage,
1062                    defines: defines
1063                        .iter()
1064                        .map(|&(key, value)| (String::from(key), String::from(value)))
1065                        .collect(),
1066                };
1067                wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
1068            }
1069            #[cfg(feature = "wgsl")]
1070            ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
1071            #[cfg(feature = "naga-ir")]
1072            ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1073            ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1074        };
1075        let (id, error) =
1076            self.context
1077                .0
1078                .device_create_shader_module(self.id, &descriptor, source, None);
1079        let compilation_info = match error {
1080            Some(cause) => {
1081                self.context.handle_error(
1082                    &self.error_sink,
1083                    cause.clone(),
1084                    desc.label,
1085                    "Device::create_shader_module",
1086                );
1087                CompilationInfo::from(cause)
1088            }
1089            None => CompilationInfo { messages: vec![] },
1090        };
1091
1092        CoreShaderModule {
1093            context: self.context.clone(),
1094            id,
1095            compilation_info,
1096        }
1097        .into()
1098    }
1099
1100    unsafe fn create_shader_module_passthrough(
1101        &self,
1102        desc: &crate::ShaderModuleDescriptorPassthrough<'_>,
1103    ) -> dispatch::DispatchShaderModule {
1104        let desc = desc.map_label(|l| l.map(Cow::from));
1105        let (id, error) = unsafe {
1106            self.context
1107                .0
1108                .device_create_shader_module_passthrough(self.id, &desc, None)
1109        };
1110
1111        let compilation_info = match error {
1112            Some(cause) => {
1113                self.context.handle_error(
1114                    &self.error_sink,
1115                    cause.clone(),
1116                    desc.label.as_deref(),
1117                    "Device::create_shader_module_passthrough",
1118                );
1119                CompilationInfo::from(cause)
1120            }
1121            None => CompilationInfo { messages: vec![] },
1122        };
1123
1124        CoreShaderModule {
1125            context: self.context.clone(),
1126            id,
1127            compilation_info,
1128        }
1129        .into()
1130    }
1131
1132    fn create_bind_group_layout(
1133        &self,
1134        desc: &crate::BindGroupLayoutDescriptor<'_>,
1135    ) -> dispatch::DispatchBindGroupLayout {
1136        let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1137            label: desc.label.map(Borrowed),
1138            entries: Borrowed(desc.entries),
1139        };
1140        let (id, error) =
1141            self.context
1142                .0
1143                .device_create_bind_group_layout(self.id, &descriptor, None);
1144        if let Some(cause) = error {
1145            self.context.handle_error(
1146                &self.error_sink,
1147                cause,
1148                desc.label,
1149                "Device::create_bind_group_layout",
1150            );
1151        }
1152        CoreBindGroupLayout {
1153            context: self.context.clone(),
1154            id,
1155        }
1156        .into()
1157    }
1158
1159    fn create_bind_group(
1160        &self,
1161        desc: &crate::BindGroupDescriptor<'_>,
1162    ) -> dispatch::DispatchBindGroup {
1163        use wgc::binding_model as bm;
1164
1165        let mut arrayed_texture_views = Vec::new();
1166        let mut arrayed_samplers = Vec::new();
1167        if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1168            // gather all the array view IDs first
1169            for entry in desc.entries.iter() {
1170                if let BindingResource::TextureViewArray(array) = entry.resource {
1171                    arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1172                }
1173                if let BindingResource::SamplerArray(array) = entry.resource {
1174                    arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1175                }
1176            }
1177        }
1178        let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1179        let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1180
1181        let mut arrayed_buffer_bindings = Vec::new();
1182        if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1183            // gather all the buffers first
1184            for entry in desc.entries.iter() {
1185                if let BindingResource::BufferArray(array) = entry.resource {
1186                    arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1187                        buffer: binding.buffer.inner.as_core().id,
1188                        offset: binding.offset,
1189                        size: binding.size.map(wgt::BufferSize::get),
1190                    }));
1191                }
1192            }
1193        }
1194        let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1195
1196        let entries = desc
1197            .entries
1198            .iter()
1199            .map(|entry| bm::BindGroupEntry {
1200                binding: entry.binding,
1201                resource: match entry.resource {
1202                    BindingResource::Buffer(BufferBinding {
1203                        buffer,
1204                        offset,
1205                        size,
1206                    }) => bm::BindingResource::Buffer(bm::BufferBinding {
1207                        buffer: buffer.inner.as_core().id,
1208                        offset,
1209                        size: size.map(wgt::BufferSize::get),
1210                    }),
1211                    BindingResource::BufferArray(array) => {
1212                        let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1213                        remaining_arrayed_buffer_bindings =
1214                            &remaining_arrayed_buffer_bindings[array.len()..];
1215                        bm::BindingResource::BufferArray(Borrowed(slice))
1216                    }
1217                    BindingResource::Sampler(sampler) => {
1218                        bm::BindingResource::Sampler(sampler.inner.as_core().id)
1219                    }
1220                    BindingResource::SamplerArray(array) => {
1221                        let slice = &remaining_arrayed_samplers[..array.len()];
1222                        remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1223                        bm::BindingResource::SamplerArray(Borrowed(slice))
1224                    }
1225                    BindingResource::TextureView(texture_view) => {
1226                        bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1227                    }
1228                    BindingResource::TextureViewArray(array) => {
1229                        let slice = &remaining_arrayed_texture_views[..array.len()];
1230                        remaining_arrayed_texture_views =
1231                            &remaining_arrayed_texture_views[array.len()..];
1232                        bm::BindingResource::TextureViewArray(Borrowed(slice))
1233                    }
1234                    BindingResource::AccelerationStructure(acceleration_structure) => {
1235                        bm::BindingResource::AccelerationStructure(
1236                            acceleration_structure.inner.as_core().id,
1237                        )
1238                    }
1239                    BindingResource::ExternalTexture(external_texture) => {
1240                        bm::BindingResource::ExternalTexture(external_texture.inner.as_core().id)
1241                    }
1242                },
1243            })
1244            .collect::<Vec<_>>();
1245        let descriptor = bm::BindGroupDescriptor {
1246            label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1247            layout: desc.layout.inner.as_core().id,
1248            entries: Borrowed(&entries),
1249        };
1250
1251        let (id, error) = self
1252            .context
1253            .0
1254            .device_create_bind_group(self.id, &descriptor, None);
1255        if let Some(cause) = error {
1256            self.context.handle_error(
1257                &self.error_sink,
1258                cause,
1259                desc.label,
1260                "Device::create_bind_group",
1261            );
1262        }
1263        CoreBindGroup {
1264            context: self.context.clone(),
1265            id,
1266        }
1267        .into()
1268    }
1269
1270    fn create_pipeline_layout(
1271        &self,
1272        desc: &crate::PipelineLayoutDescriptor<'_>,
1273    ) -> dispatch::DispatchPipelineLayout {
1274        // Limit is always less or equal to hal::MAX_BIND_GROUPS, so this is always right
1275        // Guards following ArrayVec
1276        assert!(
1277            desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1278            "Bind group layout count {} exceeds device bind group limit {}",
1279            desc.bind_group_layouts.len(),
1280            wgc::MAX_BIND_GROUPS
1281        );
1282
1283        let temp_layouts = desc
1284            .bind_group_layouts
1285            .iter()
1286            .map(|bgl| bgl.inner.as_core().id)
1287            .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1288        let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1289            label: desc.label.map(Borrowed),
1290            bind_group_layouts: Borrowed(&temp_layouts),
1291            immediate_size: desc.immediate_size,
1292        };
1293
1294        let (id, error) = self
1295            .context
1296            .0
1297            .device_create_pipeline_layout(self.id, &descriptor, None);
1298        if let Some(cause) = error {
1299            self.context.handle_error(
1300                &self.error_sink,
1301                cause,
1302                desc.label,
1303                "Device::create_pipeline_layout",
1304            );
1305        }
1306        CorePipelineLayout {
1307            context: self.context.clone(),
1308            id,
1309        }
1310        .into()
1311    }
1312
1313    fn create_render_pipeline(
1314        &self,
1315        desc: &crate::RenderPipelineDescriptor<'_>,
1316    ) -> dispatch::DispatchRenderPipeline {
1317        use wgc::pipeline as pipe;
1318
1319        let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1320            .vertex
1321            .buffers
1322            .iter()
1323            .map(|vbuf| pipe::VertexBufferLayout {
1324                array_stride: vbuf.array_stride,
1325                step_mode: vbuf.step_mode,
1326                attributes: Borrowed(vbuf.attributes),
1327            })
1328            .collect();
1329
1330        let vert_constants = desc
1331            .vertex
1332            .compilation_options
1333            .constants
1334            .iter()
1335            .map(|&(key, value)| (String::from(key), value))
1336            .collect();
1337
1338        let descriptor = pipe::RenderPipelineDescriptor {
1339            label: desc.label.map(Borrowed),
1340            layout: desc.layout.map(|layout| layout.inner.as_core().id),
1341            vertex: pipe::VertexState {
1342                stage: pipe::ProgrammableStageDescriptor {
1343                    module: desc.vertex.module.inner.as_core().id,
1344                    entry_point: desc.vertex.entry_point.map(Borrowed),
1345                    constants: vert_constants,
1346                    zero_initialize_workgroup_memory: desc
1347                        .vertex
1348                        .compilation_options
1349                        .zero_initialize_workgroup_memory,
1350                },
1351                buffers: Borrowed(&vertex_buffers),
1352            },
1353            primitive: desc.primitive,
1354            depth_stencil: desc.depth_stencil.clone(),
1355            multisample: desc.multisample,
1356            fragment: desc.fragment.as_ref().map(|frag| {
1357                let frag_constants = frag
1358                    .compilation_options
1359                    .constants
1360                    .iter()
1361                    .map(|&(key, value)| (String::from(key), value))
1362                    .collect();
1363                pipe::FragmentState {
1364                    stage: pipe::ProgrammableStageDescriptor {
1365                        module: frag.module.inner.as_core().id,
1366                        entry_point: frag.entry_point.map(Borrowed),
1367                        constants: frag_constants,
1368                        zero_initialize_workgroup_memory: frag
1369                            .compilation_options
1370                            .zero_initialize_workgroup_memory,
1371                    },
1372                    targets: Borrowed(frag.targets),
1373                }
1374            }),
1375            multiview_mask: desc.multiview_mask,
1376            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1377        };
1378
1379        let (id, error) = self
1380            .context
1381            .0
1382            .device_create_render_pipeline(self.id, &descriptor, None);
1383        if let Some(cause) = error {
1384            if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1385                log::error!("Shader translation error for stage {stage:?}: {error}");
1386                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1387            }
1388            self.context.handle_error(
1389                &self.error_sink,
1390                cause,
1391                desc.label,
1392                "Device::create_render_pipeline",
1393            );
1394        }
1395        CoreRenderPipeline {
1396            context: self.context.clone(),
1397            id,
1398            error_sink: Arc::clone(&self.error_sink),
1399        }
1400        .into()
1401    }
1402
1403    fn create_mesh_pipeline(
1404        &self,
1405        desc: &crate::MeshPipelineDescriptor<'_>,
1406    ) -> dispatch::DispatchRenderPipeline {
1407        use wgc::pipeline as pipe;
1408
1409        let mesh_constants = desc
1410            .mesh
1411            .compilation_options
1412            .constants
1413            .iter()
1414            .map(|&(key, value)| (String::from(key), value))
1415            .collect();
1416        let descriptor = pipe::MeshPipelineDescriptor {
1417            label: desc.label.map(Borrowed),
1418            task: desc.task.as_ref().map(|task| {
1419                let task_constants = task
1420                    .compilation_options
1421                    .constants
1422                    .iter()
1423                    .map(|&(key, value)| (String::from(key), value))
1424                    .collect();
1425                pipe::TaskState {
1426                    stage: pipe::ProgrammableStageDescriptor {
1427                        module: task.module.inner.as_core().id,
1428                        entry_point: task.entry_point.map(Borrowed),
1429                        constants: task_constants,
1430                        zero_initialize_workgroup_memory: desc
1431                            .mesh
1432                            .compilation_options
1433                            .zero_initialize_workgroup_memory,
1434                    },
1435                }
1436            }),
1437            mesh: pipe::MeshState {
1438                stage: pipe::ProgrammableStageDescriptor {
1439                    module: desc.mesh.module.inner.as_core().id,
1440                    entry_point: desc.mesh.entry_point.map(Borrowed),
1441                    constants: mesh_constants,
1442                    zero_initialize_workgroup_memory: desc
1443                        .mesh
1444                        .compilation_options
1445                        .zero_initialize_workgroup_memory,
1446                },
1447            },
1448            layout: desc.layout.map(|layout| layout.inner.as_core().id),
1449            primitive: desc.primitive,
1450            depth_stencil: desc.depth_stencil.clone(),
1451            multisample: desc.multisample,
1452            fragment: desc.fragment.as_ref().map(|frag| {
1453                let frag_constants = frag
1454                    .compilation_options
1455                    .constants
1456                    .iter()
1457                    .map(|&(key, value)| (String::from(key), value))
1458                    .collect();
1459                pipe::FragmentState {
1460                    stage: pipe::ProgrammableStageDescriptor {
1461                        module: frag.module.inner.as_core().id,
1462                        entry_point: frag.entry_point.map(Borrowed),
1463                        constants: frag_constants,
1464                        zero_initialize_workgroup_memory: frag
1465                            .compilation_options
1466                            .zero_initialize_workgroup_memory,
1467                    },
1468                    targets: Borrowed(frag.targets),
1469                }
1470            }),
1471            multiview: desc.multiview,
1472            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1473        };
1474
1475        let (id, error) = self
1476            .context
1477            .0
1478            .device_create_mesh_pipeline(self.id, &descriptor, None);
1479        if let Some(cause) = error {
1480            if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1481                log::error!("Shader translation error for stage {stage:?}: {error}");
1482                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1483            }
1484            self.context.handle_error(
1485                &self.error_sink,
1486                cause,
1487                desc.label,
1488                "Device::create_render_pipeline",
1489            );
1490        }
1491        CoreRenderPipeline {
1492            context: self.context.clone(),
1493            id,
1494            error_sink: Arc::clone(&self.error_sink),
1495        }
1496        .into()
1497    }
1498
1499    fn create_compute_pipeline(
1500        &self,
1501        desc: &crate::ComputePipelineDescriptor<'_>,
1502    ) -> dispatch::DispatchComputePipeline {
1503        use wgc::pipeline as pipe;
1504
1505        let constants = desc
1506            .compilation_options
1507            .constants
1508            .iter()
1509            .map(|&(key, value)| (String::from(key), value))
1510            .collect();
1511
1512        let descriptor = pipe::ComputePipelineDescriptor {
1513            label: desc.label.map(Borrowed),
1514            layout: desc.layout.map(|pll| pll.inner.as_core().id),
1515            stage: pipe::ProgrammableStageDescriptor {
1516                module: desc.module.inner.as_core().id,
1517                entry_point: desc.entry_point.map(Borrowed),
1518                constants,
1519                zero_initialize_workgroup_memory: desc
1520                    .compilation_options
1521                    .zero_initialize_workgroup_memory,
1522            },
1523            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1524        };
1525
1526        let (id, error) = self
1527            .context
1528            .0
1529            .device_create_compute_pipeline(self.id, &descriptor, None);
1530        if let Some(cause) = error {
1531            if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1532                log::error!(
1533                    "Shader translation error for stage {:?}: {}",
1534                    wgt::ShaderStages::COMPUTE,
1535                    error
1536                );
1537                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1538            }
1539            self.context.handle_error(
1540                &self.error_sink,
1541                cause,
1542                desc.label,
1543                "Device::create_compute_pipeline",
1544            );
1545        }
1546        CoreComputePipeline {
1547            context: self.context.clone(),
1548            id,
1549            error_sink: Arc::clone(&self.error_sink),
1550        }
1551        .into()
1552    }
1553
1554    unsafe fn create_pipeline_cache(
1555        &self,
1556        desc: &crate::PipelineCacheDescriptor<'_>,
1557    ) -> dispatch::DispatchPipelineCache {
1558        use wgc::pipeline as pipe;
1559
1560        let descriptor = pipe::PipelineCacheDescriptor {
1561            label: desc.label.map(Borrowed),
1562            data: desc.data.map(Borrowed),
1563            fallback: desc.fallback,
1564        };
1565        let (id, error) = unsafe {
1566            self.context
1567                .0
1568                .device_create_pipeline_cache(self.id, &descriptor, None)
1569        };
1570        if let Some(cause) = error {
1571            self.context.handle_error(
1572                &self.error_sink,
1573                cause,
1574                desc.label,
1575                "Device::device_create_pipeline_cache_init",
1576            );
1577        }
1578        CorePipelineCache {
1579            context: self.context.clone(),
1580            id,
1581        }
1582        .into()
1583    }
1584
1585    fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1586        let (id, error) = self.context.0.device_create_buffer(
1587            self.id,
1588            &desc.map_label(|l| l.map(Borrowed)),
1589            None,
1590        );
1591        if let Some(cause) = error {
1592            self.context
1593                .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1594        }
1595
1596        CoreBuffer {
1597            context: self.context.clone(),
1598            id,
1599            error_sink: Arc::clone(&self.error_sink),
1600        }
1601        .into()
1602    }
1603
1604    fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1605        let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1606        let (id, error) = self
1607            .context
1608            .0
1609            .device_create_texture(self.id, &wgt_desc, None);
1610        if let Some(cause) = error {
1611            self.context.handle_error(
1612                &self.error_sink,
1613                cause,
1614                desc.label,
1615                "Device::create_texture",
1616            );
1617        }
1618
1619        CoreTexture {
1620            context: self.context.clone(),
1621            id,
1622            error_sink: Arc::clone(&self.error_sink),
1623        }
1624        .into()
1625    }
1626
1627    fn create_external_texture(
1628        &self,
1629        desc: &crate::ExternalTextureDescriptor<'_>,
1630        planes: &[&crate::TextureView],
1631    ) -> dispatch::DispatchExternalTexture {
1632        let wgt_desc = desc.map_label(|l| l.map(Borrowed));
1633        let planes = planes
1634            .iter()
1635            .map(|plane| plane.inner.as_core().id)
1636            .collect::<Vec<_>>();
1637        let (id, error) = self
1638            .context
1639            .0
1640            .device_create_external_texture(self.id, &wgt_desc, &planes, None);
1641        if let Some(cause) = error {
1642            self.context.handle_error(
1643                &self.error_sink,
1644                cause,
1645                desc.label,
1646                "Device::create_external_texture",
1647            );
1648        }
1649
1650        CoreExternalTexture {
1651            context: self.context.clone(),
1652            id,
1653        }
1654        .into()
1655    }
1656
1657    fn create_blas(
1658        &self,
1659        desc: &crate::CreateBlasDescriptor<'_>,
1660        sizes: crate::BlasGeometrySizeDescriptors,
1661    ) -> (Option<u64>, dispatch::DispatchBlas) {
1662        let global = &self.context.0;
1663        let (id, handle, error) =
1664            global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1665        if let Some(cause) = error {
1666            self.context
1667                .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1668        }
1669        (
1670            handle,
1671            CoreBlas {
1672                context: self.context.clone(),
1673                id,
1674                error_sink: Arc::clone(&self.error_sink),
1675            }
1676            .into(),
1677        )
1678    }
1679
1680    fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1681        let global = &self.context.0;
1682        let (id, error) =
1683            global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1684        if let Some(cause) = error {
1685            self.context
1686                .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1687        }
1688        CoreTlas {
1689            context: self.context.clone(),
1690            id,
1691            // error_sink: Arc::clone(&self.error_sink),
1692        }
1693        .into()
1694    }
1695
1696    fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1697        let descriptor = wgc::resource::SamplerDescriptor {
1698            label: desc.label.map(Borrowed),
1699            address_modes: [
1700                desc.address_mode_u,
1701                desc.address_mode_v,
1702                desc.address_mode_w,
1703            ],
1704            mag_filter: desc.mag_filter,
1705            min_filter: desc.min_filter,
1706            mipmap_filter: desc.mipmap_filter,
1707            lod_min_clamp: desc.lod_min_clamp,
1708            lod_max_clamp: desc.lod_max_clamp,
1709            compare: desc.compare,
1710            anisotropy_clamp: desc.anisotropy_clamp,
1711            border_color: desc.border_color,
1712        };
1713
1714        let (id, error) = self
1715            .context
1716            .0
1717            .device_create_sampler(self.id, &descriptor, None);
1718        if let Some(cause) = error {
1719            self.context.handle_error(
1720                &self.error_sink,
1721                cause,
1722                desc.label,
1723                "Device::create_sampler",
1724            );
1725        }
1726        CoreSampler {
1727            context: self.context.clone(),
1728            id,
1729        }
1730        .into()
1731    }
1732
1733    fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1734        let (id, error) = self.context.0.device_create_query_set(
1735            self.id,
1736            &desc.map_label(|l| l.map(Borrowed)),
1737            None,
1738        );
1739        if let Some(cause) = error {
1740            self.context
1741                .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1742        }
1743        CoreQuerySet {
1744            context: self.context.clone(),
1745            id,
1746        }
1747        .into()
1748    }
1749
1750    fn create_command_encoder(
1751        &self,
1752        desc: &crate::CommandEncoderDescriptor<'_>,
1753    ) -> dispatch::DispatchCommandEncoder {
1754        let (id, error) = self.context.0.device_create_command_encoder(
1755            self.id,
1756            &desc.map_label(|l| l.map(Borrowed)),
1757            None,
1758        );
1759        if let Some(cause) = error {
1760            self.context.handle_error(
1761                &self.error_sink,
1762                cause,
1763                desc.label,
1764                "Device::create_command_encoder",
1765            );
1766        }
1767
1768        CoreCommandEncoder {
1769            context: self.context.clone(),
1770            id,
1771            error_sink: Arc::clone(&self.error_sink),
1772        }
1773        .into()
1774    }
1775
1776    fn create_render_bundle_encoder(
1777        &self,
1778        desc: &crate::RenderBundleEncoderDescriptor<'_>,
1779    ) -> dispatch::DispatchRenderBundleEncoder {
1780        let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1781            label: desc.label.map(Borrowed),
1782            color_formats: Borrowed(desc.color_formats),
1783            depth_stencil: desc.depth_stencil,
1784            sample_count: desc.sample_count,
1785            multiview: desc.multiview,
1786        };
1787        let encoder = match wgc::command::RenderBundleEncoder::new(&descriptor, self.id) {
1788            Ok(encoder) => encoder,
1789            Err(e) => panic!("Error in Device::create_render_bundle_encoder: {e}"),
1790        };
1791
1792        CoreRenderBundleEncoder {
1793            context: self.context.clone(),
1794            encoder,
1795            id: crate::cmp::Identifier::create(),
1796        }
1797        .into()
1798    }
1799
1800    fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1801        self.context
1802            .0
1803            .device_set_device_lost_closure(self.id, device_lost_callback);
1804    }
1805
1806    fn on_uncaptured_error(&self, handler: Arc<dyn crate::UncapturedErrorHandler>) {
1807        let mut error_sink = self.error_sink.lock();
1808        error_sink.uncaptured_handler = Some(handler);
1809    }
1810
1811    fn push_error_scope(&self, filter: crate::ErrorFilter) -> u32 {
1812        let mut error_sink = self.error_sink.lock();
1813        let thread_id = thread_id::ThreadId::current();
1814        let scopes = error_sink.scopes.entry(thread_id).or_default();
1815        let index = scopes
1816            .len()
1817            .try_into()
1818            .expect("Greater than 2^32 nested error scopes");
1819        scopes.push(ErrorScope {
1820            error: None,
1821            filter,
1822        });
1823        index
1824    }
1825
1826    fn pop_error_scope(&self, index: u32) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1827        let mut error_sink = self.error_sink.lock();
1828
1829        // We go out of our way to avoid panicking while unwinding, because that would abort the process,
1830        // and we are supposed to just drop the error scope on the floor.
1831        let is_panicking = crate::util::is_panicking();
1832        let thread_id = thread_id::ThreadId::current();
1833        let err = "Mismatched pop_error_scope call: no error scope for this thread. Error scopes are thread-local.";
1834        let scopes = match error_sink.scopes.get_mut(&thread_id) {
1835            Some(s) => s,
1836            None => {
1837                if !is_panicking {
1838                    panic!("{err}");
1839                } else {
1840                    return Box::pin(ready(None));
1841                }
1842            }
1843        };
1844        if scopes.is_empty() && !is_panicking {
1845            panic!("{err}");
1846        }
1847        if index as usize != scopes.len() - 1 && !is_panicking {
1848            panic!(
1849                "Mismatched pop_error_scope call: error scopes must be popped in reverse order."
1850            );
1851        }
1852
1853        // It would be more correct in this case to use `remove` here so that when unwinding is occurring
1854        // we would remove the correct error scope, but we don't have such a primitive on the web
1855        // and having consistent behavior here is more important. If you are unwinding and it unwinds
1856        // the guards in the wrong order, it's totally reasonable to have incorrect behavior.
1857        let scope = match scopes.pop() {
1858            Some(s) => s,
1859            None if !is_panicking => unreachable!(),
1860            None => return Box::pin(ready(None)),
1861        };
1862
1863        Box::pin(ready(scope.error))
1864    }
1865
1866    unsafe fn start_graphics_debugger_capture(&self) {
1867        unsafe {
1868            self.context
1869                .0
1870                .device_start_graphics_debugger_capture(self.id)
1871        };
1872    }
1873
1874    unsafe fn stop_graphics_debugger_capture(&self) {
1875        unsafe {
1876            self.context
1877                .0
1878                .device_stop_graphics_debugger_capture(self.id)
1879        };
1880    }
1881
1882    fn poll(&self, poll_type: wgt::PollType<u64>) -> Result<crate::PollStatus, crate::PollError> {
1883        match self.context.0.device_poll(self.id, poll_type) {
1884            Ok(status) => Ok(status),
1885            Err(err) => {
1886                if let Some(poll_error) = err.to_poll_error() {
1887                    return Err(poll_error);
1888                }
1889
1890                self.context.handle_error_fatal(err, "Device::poll")
1891            }
1892        }
1893    }
1894
1895    fn get_internal_counters(&self) -> crate::InternalCounters {
1896        self.context.0.device_get_internal_counters(self.id)
1897    }
1898
1899    fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1900        self.context.0.device_generate_allocator_report(self.id)
1901    }
1902
1903    fn destroy(&self) {
1904        self.context.0.device_destroy(self.id);
1905    }
1906}
1907
1908impl Drop for CoreDevice {
1909    fn drop(&mut self) {
1910        self.context.0.device_drop(self.id)
1911    }
1912}
1913
1914impl dispatch::QueueInterface for CoreQueue {
1915    fn write_buffer(
1916        &self,
1917        buffer: &dispatch::DispatchBuffer,
1918        offset: crate::BufferAddress,
1919        data: &[u8],
1920    ) {
1921        let buffer = buffer.as_core();
1922
1923        match self
1924            .context
1925            .0
1926            .queue_write_buffer(self.id, buffer.id, offset, data)
1927        {
1928            Ok(()) => (),
1929            Err(err) => {
1930                self.context
1931                    .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1932            }
1933        }
1934    }
1935
1936    fn create_staging_buffer(
1937        &self,
1938        size: crate::BufferSize,
1939    ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1940        match self
1941            .context
1942            .0
1943            .queue_create_staging_buffer(self.id, size, None)
1944        {
1945            Ok((buffer_id, ptr)) => Some(
1946                CoreQueueWriteBuffer {
1947                    buffer_id,
1948                    mapping: CoreBufferMappedRange {
1949                        ptr,
1950                        size: size.get() as usize,
1951                    },
1952                }
1953                .into(),
1954            ),
1955            Err(err) => {
1956                self.context.handle_error_nolabel(
1957                    &self.error_sink,
1958                    err,
1959                    "Queue::write_buffer_with",
1960                );
1961                None
1962            }
1963        }
1964    }
1965
1966    fn validate_write_buffer(
1967        &self,
1968        buffer: &dispatch::DispatchBuffer,
1969        offset: wgt::BufferAddress,
1970        size: wgt::BufferSize,
1971    ) -> Option<()> {
1972        let buffer = buffer.as_core();
1973
1974        match self
1975            .context
1976            .0
1977            .queue_validate_write_buffer(self.id, buffer.id, offset, size)
1978        {
1979            Ok(()) => Some(()),
1980            Err(err) => {
1981                self.context.handle_error_nolabel(
1982                    &self.error_sink,
1983                    err,
1984                    "Queue::write_buffer_with",
1985                );
1986                None
1987            }
1988        }
1989    }
1990
1991    fn write_staging_buffer(
1992        &self,
1993        buffer: &dispatch::DispatchBuffer,
1994        offset: crate::BufferAddress,
1995        staging_buffer: &dispatch::DispatchQueueWriteBuffer,
1996    ) {
1997        let buffer = buffer.as_core();
1998        let staging_buffer = staging_buffer.as_core();
1999
2000        match self.context.0.queue_write_staging_buffer(
2001            self.id,
2002            buffer.id,
2003            offset,
2004            staging_buffer.buffer_id,
2005        ) {
2006            Ok(()) => (),
2007            Err(err) => {
2008                self.context.handle_error_nolabel(
2009                    &self.error_sink,
2010                    err,
2011                    "Queue::write_buffer_with",
2012                );
2013            }
2014        }
2015    }
2016
2017    fn write_texture(
2018        &self,
2019        texture: crate::TexelCopyTextureInfo<'_>,
2020        data: &[u8],
2021        data_layout: crate::TexelCopyBufferLayout,
2022        size: crate::Extent3d,
2023    ) {
2024        match self.context.0.queue_write_texture(
2025            self.id,
2026            &map_texture_copy_view(texture),
2027            data,
2028            &data_layout,
2029            &size,
2030        ) {
2031            Ok(()) => (),
2032            Err(err) => {
2033                self.context
2034                    .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
2035            }
2036        }
2037    }
2038
2039    // This method needs to exist if either webgpu or webgl is enabled,
2040    // but we only actually have an implementation if webgl is enabled.
2041    #[cfg(web)]
2042    #[cfg_attr(not(webgl), expect(unused_variables))]
2043    fn copy_external_image_to_texture(
2044        &self,
2045        source: &crate::CopyExternalImageSourceInfo,
2046        dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
2047        size: crate::Extent3d,
2048    ) {
2049        #[cfg(webgl)]
2050        match self.context.0.queue_copy_external_image_to_texture(
2051            self.id,
2052            source,
2053            map_texture_tagged_copy_view(dest),
2054            size,
2055        ) {
2056            Ok(()) => (),
2057            Err(err) => self.context.handle_error_nolabel(
2058                &self.error_sink,
2059                err,
2060                "Queue::copy_external_image_to_texture",
2061            ),
2062        }
2063    }
2064
2065    fn submit(
2066        &self,
2067        command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
2068    ) -> u64 {
2069        let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
2070        let command_buffer_ids = temp_command_buffers
2071            .iter()
2072            .map(|cmdbuf| cmdbuf.as_core().id)
2073            .collect::<SmallVec<[_; 4]>>();
2074
2075        let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
2076            Ok(index) => index,
2077            Err((index, err)) => {
2078                self.context
2079                    .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
2080                index
2081            }
2082        };
2083
2084        drop(temp_command_buffers);
2085
2086        index
2087    }
2088
2089    fn get_timestamp_period(&self) -> f32 {
2090        self.context.0.queue_get_timestamp_period(self.id)
2091    }
2092
2093    fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
2094        self.context
2095            .0
2096            .queue_on_submitted_work_done(self.id, callback);
2097    }
2098
2099    fn compact_blas(&self, blas: &dispatch::DispatchBlas) -> (Option<u64>, dispatch::DispatchBlas) {
2100        let (id, handle, error) =
2101            self.context
2102                .0
2103                .queue_compact_blas(self.id, blas.as_core().id, None);
2104
2105        if let Some(cause) = error {
2106            self.context
2107                .handle_error_nolabel(&self.error_sink, cause, "Queue::compact_blas");
2108        }
2109        (
2110            handle,
2111            CoreBlas {
2112                context: self.context.clone(),
2113                id,
2114                error_sink: Arc::clone(&self.error_sink),
2115            }
2116            .into(),
2117        )
2118    }
2119}
2120
2121impl Drop for CoreQueue {
2122    fn drop(&mut self) {
2123        self.context.0.queue_drop(self.id)
2124    }
2125}
2126
2127impl dispatch::ShaderModuleInterface for CoreShaderModule {
2128    fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
2129        Box::pin(ready(self.compilation_info.clone()))
2130    }
2131}
2132
2133impl Drop for CoreShaderModule {
2134    fn drop(&mut self) {
2135        self.context.0.shader_module_drop(self.id)
2136    }
2137}
2138
2139impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
2140
2141impl Drop for CoreBindGroupLayout {
2142    fn drop(&mut self) {
2143        self.context.0.bind_group_layout_drop(self.id)
2144    }
2145}
2146
2147impl dispatch::BindGroupInterface for CoreBindGroup {}
2148
2149impl Drop for CoreBindGroup {
2150    fn drop(&mut self) {
2151        self.context.0.bind_group_drop(self.id)
2152    }
2153}
2154
2155impl dispatch::TextureViewInterface for CoreTextureView {}
2156
2157impl Drop for CoreTextureView {
2158    fn drop(&mut self) {
2159        // TODO: We don't use this error at all?
2160        let _ = self.context.0.texture_view_drop(self.id);
2161    }
2162}
2163
2164impl dispatch::ExternalTextureInterface for CoreExternalTexture {
2165    fn destroy(&self) {
2166        self.context.0.external_texture_destroy(self.id);
2167    }
2168}
2169
2170impl Drop for CoreExternalTexture {
2171    fn drop(&mut self) {
2172        self.context.0.external_texture_drop(self.id);
2173    }
2174}
2175
2176impl dispatch::SamplerInterface for CoreSampler {}
2177
2178impl Drop for CoreSampler {
2179    fn drop(&mut self) {
2180        self.context.0.sampler_drop(self.id)
2181    }
2182}
2183
2184impl dispatch::BufferInterface for CoreBuffer {
2185    fn map_async(
2186        &self,
2187        mode: crate::MapMode,
2188        range: Range<crate::BufferAddress>,
2189        callback: dispatch::BufferMapCallback,
2190    ) {
2191        let operation = wgc::resource::BufferMapOperation {
2192            host: match mode {
2193                MapMode::Read => wgc::device::HostMap::Read,
2194                MapMode::Write => wgc::device::HostMap::Write,
2195            },
2196            callback: Some(Box::new(|status| {
2197                let res = status.map_err(|_| crate::BufferAsyncError);
2198                callback(res);
2199            })),
2200        };
2201
2202        match self.context.0.buffer_map_async(
2203            self.id,
2204            range.start,
2205            Some(range.end - range.start),
2206            operation,
2207        ) {
2208            Ok(_) => (),
2209            Err(cause) => {
2210                self.context
2211                    .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
2212            }
2213        }
2214    }
2215
2216    fn get_mapped_range(
2217        &self,
2218        sub_range: Range<crate::BufferAddress>,
2219    ) -> dispatch::DispatchBufferMappedRange {
2220        let size = sub_range.end - sub_range.start;
2221        match self
2222            .context
2223            .0
2224            .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
2225        {
2226            Ok((ptr, size)) => CoreBufferMappedRange {
2227                ptr,
2228                size: size as usize,
2229            }
2230            .into(),
2231            Err(err) => self
2232                .context
2233                .handle_error_fatal(err, "Buffer::get_mapped_range"),
2234        }
2235    }
2236
2237    fn unmap(&self) {
2238        match self.context.0.buffer_unmap(self.id) {
2239            Ok(()) => (),
2240            Err(cause) => {
2241                self.context
2242                    .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
2243            }
2244        }
2245    }
2246
2247    fn destroy(&self) {
2248        self.context.0.buffer_destroy(self.id);
2249    }
2250}
2251
2252impl Drop for CoreBuffer {
2253    fn drop(&mut self) {
2254        self.context.0.buffer_drop(self.id)
2255    }
2256}
2257
2258impl dispatch::TextureInterface for CoreTexture {
2259    fn create_view(
2260        &self,
2261        desc: &crate::TextureViewDescriptor<'_>,
2262    ) -> dispatch::DispatchTextureView {
2263        let descriptor = wgc::resource::TextureViewDescriptor {
2264            label: desc.label.map(Borrowed),
2265            format: desc.format,
2266            dimension: desc.dimension,
2267            usage: desc.usage,
2268            range: wgt::ImageSubresourceRange {
2269                aspect: desc.aspect,
2270                base_mip_level: desc.base_mip_level,
2271                mip_level_count: desc.mip_level_count,
2272                base_array_layer: desc.base_array_layer,
2273                array_layer_count: desc.array_layer_count,
2274            },
2275        };
2276        let (id, error) = self
2277            .context
2278            .0
2279            .texture_create_view(self.id, &descriptor, None);
2280        if let Some(cause) = error {
2281            self.context
2282                .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
2283        }
2284        CoreTextureView {
2285            context: self.context.clone(),
2286            id,
2287        }
2288        .into()
2289    }
2290
2291    fn destroy(&self) {
2292        self.context.0.texture_destroy(self.id);
2293    }
2294}
2295
2296impl Drop for CoreTexture {
2297    fn drop(&mut self) {
2298        self.context.0.texture_drop(self.id)
2299    }
2300}
2301
2302impl dispatch::BlasInterface for CoreBlas {
2303    fn prepare_compact_async(&self, callback: BlasCompactCallback) {
2304        let callback: Option<wgc::resource::BlasCompactCallback> =
2305            Some(Box::new(|status: BlasPrepareCompactResult| {
2306                let res = status.map_err(|_| crate::BlasAsyncError);
2307                callback(res);
2308            }));
2309
2310        match self.context.0.blas_prepare_compact_async(self.id, callback) {
2311            Ok(_) => (),
2312            Err(cause) => self.context.handle_error_nolabel(
2313                &self.error_sink,
2314                cause,
2315                "Blas::prepare_compact_async",
2316            ),
2317        }
2318    }
2319
2320    fn ready_for_compaction(&self) -> bool {
2321        match self.context.0.ready_for_compaction(self.id) {
2322            Ok(ready) => ready,
2323            Err(cause) => {
2324                self.context.handle_error_nolabel(
2325                    &self.error_sink,
2326                    cause,
2327                    "Blas::ready_for_compaction",
2328                );
2329                // A BLAS is definitely not ready for compaction if it's not valid
2330                false
2331            }
2332        }
2333    }
2334}
2335
2336impl Drop for CoreBlas {
2337    fn drop(&mut self) {
2338        self.context.0.blas_drop(self.id)
2339    }
2340}
2341
2342impl dispatch::TlasInterface for CoreTlas {}
2343
2344impl Drop for CoreTlas {
2345    fn drop(&mut self) {
2346        self.context.0.tlas_drop(self.id)
2347    }
2348}
2349
2350impl dispatch::QuerySetInterface for CoreQuerySet {}
2351
2352impl Drop for CoreQuerySet {
2353    fn drop(&mut self) {
2354        self.context.0.query_set_drop(self.id)
2355    }
2356}
2357
2358impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2359
2360impl Drop for CorePipelineLayout {
2361    fn drop(&mut self) {
2362        self.context.0.pipeline_layout_drop(self.id)
2363    }
2364}
2365
2366impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2367    fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2368        let (id, error) = self
2369            .context
2370            .0
2371            .render_pipeline_get_bind_group_layout(self.id, index, None);
2372        if let Some(err) = error {
2373            self.context.handle_error_nolabel(
2374                &self.error_sink,
2375                err,
2376                "RenderPipeline::get_bind_group_layout",
2377            )
2378        }
2379        CoreBindGroupLayout {
2380            context: self.context.clone(),
2381            id,
2382        }
2383        .into()
2384    }
2385}
2386
2387impl Drop for CoreRenderPipeline {
2388    fn drop(&mut self) {
2389        self.context.0.render_pipeline_drop(self.id)
2390    }
2391}
2392
2393impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2394    fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2395        let (id, error) = self
2396            .context
2397            .0
2398            .compute_pipeline_get_bind_group_layout(self.id, index, None);
2399        if let Some(err) = error {
2400            self.context.handle_error_nolabel(
2401                &self.error_sink,
2402                err,
2403                "ComputePipeline::get_bind_group_layout",
2404            )
2405        }
2406        CoreBindGroupLayout {
2407            context: self.context.clone(),
2408            id,
2409        }
2410        .into()
2411    }
2412}
2413
2414impl Drop for CoreComputePipeline {
2415    fn drop(&mut self) {
2416        self.context.0.compute_pipeline_drop(self.id)
2417    }
2418}
2419
2420impl dispatch::PipelineCacheInterface for CorePipelineCache {
2421    fn get_data(&self) -> Option<Vec<u8>> {
2422        self.context.0.pipeline_cache_get_data(self.id)
2423    }
2424}
2425
2426impl Drop for CorePipelineCache {
2427    fn drop(&mut self) {
2428        self.context.0.pipeline_cache_drop(self.id)
2429    }
2430}
2431
2432impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2433    fn copy_buffer_to_buffer(
2434        &self,
2435        source: &dispatch::DispatchBuffer,
2436        source_offset: crate::BufferAddress,
2437        destination: &dispatch::DispatchBuffer,
2438        destination_offset: crate::BufferAddress,
2439        copy_size: Option<crate::BufferAddress>,
2440    ) {
2441        let source = source.as_core();
2442        let destination = destination.as_core();
2443
2444        if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2445            self.id,
2446            source.id,
2447            source_offset,
2448            destination.id,
2449            destination_offset,
2450            copy_size,
2451        ) {
2452            self.context.handle_error_nolabel(
2453                &self.error_sink,
2454                cause,
2455                "CommandEncoder::copy_buffer_to_buffer",
2456            );
2457        }
2458    }
2459
2460    fn copy_buffer_to_texture(
2461        &self,
2462        source: crate::TexelCopyBufferInfo<'_>,
2463        destination: crate::TexelCopyTextureInfo<'_>,
2464        copy_size: crate::Extent3d,
2465    ) {
2466        if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2467            self.id,
2468            &map_buffer_copy_view(source),
2469            &map_texture_copy_view(destination),
2470            &copy_size,
2471        ) {
2472            self.context.handle_error_nolabel(
2473                &self.error_sink,
2474                cause,
2475                "CommandEncoder::copy_buffer_to_texture",
2476            );
2477        }
2478    }
2479
2480    fn copy_texture_to_buffer(
2481        &self,
2482        source: crate::TexelCopyTextureInfo<'_>,
2483        destination: crate::TexelCopyBufferInfo<'_>,
2484        copy_size: crate::Extent3d,
2485    ) {
2486        if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2487            self.id,
2488            &map_texture_copy_view(source),
2489            &map_buffer_copy_view(destination),
2490            &copy_size,
2491        ) {
2492            self.context.handle_error_nolabel(
2493                &self.error_sink,
2494                cause,
2495                "CommandEncoder::copy_texture_to_buffer",
2496            );
2497        }
2498    }
2499
2500    fn copy_texture_to_texture(
2501        &self,
2502        source: crate::TexelCopyTextureInfo<'_>,
2503        destination: crate::TexelCopyTextureInfo<'_>,
2504        copy_size: crate::Extent3d,
2505    ) {
2506        if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2507            self.id,
2508            &map_texture_copy_view(source),
2509            &map_texture_copy_view(destination),
2510            &copy_size,
2511        ) {
2512            self.context.handle_error_nolabel(
2513                &self.error_sink,
2514                cause,
2515                "CommandEncoder::copy_texture_to_texture",
2516            );
2517        }
2518    }
2519
2520    fn begin_compute_pass(
2521        &self,
2522        desc: &crate::ComputePassDescriptor<'_>,
2523    ) -> dispatch::DispatchComputePass {
2524        let timestamp_writes =
2525            desc.timestamp_writes
2526                .as_ref()
2527                .map(|tw| wgc::command::PassTimestampWrites {
2528                    query_set: tw.query_set.inner.as_core().id,
2529                    beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2530                    end_of_pass_write_index: tw.end_of_pass_write_index,
2531                });
2532
2533        let (pass, err) = self.context.0.command_encoder_begin_compute_pass(
2534            self.id,
2535            &wgc::command::ComputePassDescriptor {
2536                label: desc.label.map(Borrowed),
2537                timestamp_writes,
2538            },
2539        );
2540
2541        if let Some(cause) = err {
2542            self.context.handle_error(
2543                &self.error_sink,
2544                cause,
2545                desc.label,
2546                "CommandEncoder::begin_compute_pass",
2547            );
2548        }
2549
2550        CoreComputePass {
2551            context: self.context.clone(),
2552            pass,
2553            error_sink: self.error_sink.clone(),
2554            id: crate::cmp::Identifier::create(),
2555        }
2556        .into()
2557    }
2558
2559    fn begin_render_pass(
2560        &self,
2561        desc: &crate::RenderPassDescriptor<'_>,
2562    ) -> dispatch::DispatchRenderPass {
2563        let colors = desc
2564            .color_attachments
2565            .iter()
2566            .map(|ca| {
2567                ca.as_ref()
2568                    .map(|at| wgc::command::RenderPassColorAttachment {
2569                        view: at.view.inner.as_core().id,
2570                        depth_slice: at.depth_slice,
2571                        resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2572                        load_op: at.ops.load,
2573                        store_op: at.ops.store,
2574                    })
2575            })
2576            .collect::<Vec<_>>();
2577
2578        let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2579            wgc::command::RenderPassDepthStencilAttachment {
2580                view: dsa.view.inner.as_core().id,
2581                depth: map_pass_channel(dsa.depth_ops.as_ref()),
2582                stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2583            }
2584        });
2585
2586        let timestamp_writes =
2587            desc.timestamp_writes
2588                .as_ref()
2589                .map(|tw| wgc::command::PassTimestampWrites {
2590                    query_set: tw.query_set.inner.as_core().id,
2591                    beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2592                    end_of_pass_write_index: tw.end_of_pass_write_index,
2593                });
2594
2595        let (pass, err) = self.context.0.command_encoder_begin_render_pass(
2596            self.id,
2597            &wgc::command::RenderPassDescriptor {
2598                label: desc.label.map(Borrowed),
2599                timestamp_writes: timestamp_writes.as_ref(),
2600                color_attachments: Borrowed(&colors),
2601                depth_stencil_attachment: depth_stencil.as_ref(),
2602                occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2603                multiview_mask: desc.multiview_mask,
2604            },
2605        );
2606
2607        if let Some(cause) = err {
2608            self.context.handle_error(
2609                &self.error_sink,
2610                cause,
2611                desc.label,
2612                "CommandEncoder::begin_render_pass",
2613            );
2614        }
2615
2616        CoreRenderPass {
2617            context: self.context.clone(),
2618            pass,
2619            error_sink: self.error_sink.clone(),
2620            id: crate::cmp::Identifier::create(),
2621        }
2622        .into()
2623    }
2624
2625    fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2626        let descriptor = wgt::CommandBufferDescriptor::default();
2627        let (id, opt_label_and_error) =
2628            self.context
2629                .0
2630                .command_encoder_finish(self.id, &descriptor, None);
2631        if let Some((label, cause)) = opt_label_and_error {
2632            self.context
2633                .handle_error(&self.error_sink, cause, Some(&label), "a CommandEncoder");
2634        }
2635        CoreCommandBuffer {
2636            context: self.context.clone(),
2637            id,
2638        }
2639        .into()
2640    }
2641
2642    fn clear_texture(
2643        &self,
2644        texture: &dispatch::DispatchTexture,
2645        subresource_range: &crate::ImageSubresourceRange,
2646    ) {
2647        let texture = texture.as_core();
2648
2649        if let Err(cause) =
2650            self.context
2651                .0
2652                .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2653        {
2654            self.context.handle_error_nolabel(
2655                &self.error_sink,
2656                cause,
2657                "CommandEncoder::clear_texture",
2658            );
2659        }
2660    }
2661
2662    fn clear_buffer(
2663        &self,
2664        buffer: &dispatch::DispatchBuffer,
2665        offset: crate::BufferAddress,
2666        size: Option<crate::BufferAddress>,
2667    ) {
2668        let buffer = buffer.as_core();
2669
2670        if let Err(cause) = self
2671            .context
2672            .0
2673            .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2674        {
2675            self.context.handle_error_nolabel(
2676                &self.error_sink,
2677                cause,
2678                "CommandEncoder::fill_buffer",
2679            );
2680        }
2681    }
2682
2683    fn insert_debug_marker(&self, label: &str) {
2684        if let Err(cause) = self
2685            .context
2686            .0
2687            .command_encoder_insert_debug_marker(self.id, label)
2688        {
2689            self.context.handle_error_nolabel(
2690                &self.error_sink,
2691                cause,
2692                "CommandEncoder::insert_debug_marker",
2693            );
2694        }
2695    }
2696
2697    fn push_debug_group(&self, label: &str) {
2698        if let Err(cause) = self
2699            .context
2700            .0
2701            .command_encoder_push_debug_group(self.id, label)
2702        {
2703            self.context.handle_error_nolabel(
2704                &self.error_sink,
2705                cause,
2706                "CommandEncoder::push_debug_group",
2707            );
2708        }
2709    }
2710
2711    fn pop_debug_group(&self) {
2712        if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2713            self.context.handle_error_nolabel(
2714                &self.error_sink,
2715                cause,
2716                "CommandEncoder::pop_debug_group",
2717            );
2718        }
2719    }
2720
2721    fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2722        let query_set = query_set.as_core();
2723
2724        if let Err(cause) =
2725            self.context
2726                .0
2727                .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2728        {
2729            self.context.handle_error_nolabel(
2730                &self.error_sink,
2731                cause,
2732                "CommandEncoder::write_timestamp",
2733            );
2734        }
2735    }
2736
2737    fn resolve_query_set(
2738        &self,
2739        query_set: &dispatch::DispatchQuerySet,
2740        first_query: u32,
2741        query_count: u32,
2742        destination: &dispatch::DispatchBuffer,
2743        destination_offset: crate::BufferAddress,
2744    ) {
2745        let query_set = query_set.as_core();
2746        let destination = destination.as_core();
2747
2748        if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2749            self.id,
2750            query_set.id,
2751            first_query,
2752            query_count,
2753            destination.id,
2754            destination_offset,
2755        ) {
2756            self.context.handle_error_nolabel(
2757                &self.error_sink,
2758                cause,
2759                "CommandEncoder::resolve_query_set",
2760            );
2761        }
2762    }
2763
2764    fn mark_acceleration_structures_built<'a>(
2765        &self,
2766        blas: &mut dyn Iterator<Item = &'a Blas>,
2767        tlas: &mut dyn Iterator<Item = &'a Tlas>,
2768    ) {
2769        let blas = blas
2770            .map(|b| b.inner.as_core().id)
2771            .collect::<SmallVec<[_; 4]>>();
2772        let tlas = tlas
2773            .map(|t| t.inner.as_core().id)
2774            .collect::<SmallVec<[_; 4]>>();
2775        if let Err(cause) = self
2776            .context
2777            .0
2778            .command_encoder_mark_acceleration_structures_built(self.id, &blas, &tlas)
2779        {
2780            self.context.handle_error_nolabel(
2781                &self.error_sink,
2782                cause,
2783                "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2784            );
2785        }
2786    }
2787
2788    fn build_acceleration_structures<'a>(
2789        &self,
2790        blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2791        tlas: &mut dyn Iterator<Item = &'a crate::Tlas>,
2792    ) {
2793        let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2794            let geometries = match e.geometry {
2795                crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2796                    let iter = triangle_geometries.iter().map(|tg| {
2797                        wgc::ray_tracing::BlasTriangleGeometry {
2798                            vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2799                            index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2800                            transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2801                            size: tg.size,
2802                            transform_buffer_offset: tg.transform_buffer_offset,
2803                            first_vertex: tg.first_vertex,
2804                            vertex_stride: tg.vertex_stride,
2805                            first_index: tg.first_index,
2806                        }
2807                    });
2808                    wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2809                }
2810            };
2811            wgc::ray_tracing::BlasBuildEntry {
2812                blas_id: e.blas.inner.as_core().id,
2813                geometries,
2814            }
2815        });
2816
2817        let tlas = tlas.into_iter().map(|e| {
2818            let instances = e
2819                .instances
2820                .iter()
2821                .map(|instance: &Option<crate::TlasInstance>| {
2822                    instance
2823                        .as_ref()
2824                        .map(|instance| wgc::ray_tracing::TlasInstance {
2825                            blas_id: instance.blas.as_core().id,
2826                            transform: &instance.transform,
2827                            custom_data: instance.custom_data,
2828                            mask: instance.mask,
2829                        })
2830                });
2831            wgc::ray_tracing::TlasPackage {
2832                tlas_id: e.inner.as_core().id,
2833                instances: Box::new(instances),
2834                lowest_unmodified: e.lowest_unmodified,
2835            }
2836        });
2837
2838        if let Err(cause) = self
2839            .context
2840            .0
2841            .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2842        {
2843            self.context.handle_error_nolabel(
2844                &self.error_sink,
2845                cause,
2846                "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2847            );
2848        }
2849    }
2850
2851    fn transition_resources<'a>(
2852        &mut self,
2853        buffer_transitions: &mut dyn Iterator<
2854            Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
2855        >,
2856        texture_transitions: &mut dyn Iterator<
2857            Item = wgt::TextureTransition<&'a dispatch::DispatchTexture>,
2858        >,
2859    ) {
2860        let result = self.context.0.command_encoder_transition_resources(
2861            self.id,
2862            buffer_transitions.map(|t| wgt::BufferTransition {
2863                buffer: t.buffer.as_core().id,
2864                state: t.state,
2865            }),
2866            texture_transitions.map(|t| wgt::TextureTransition {
2867                texture: t.texture.as_core().id,
2868                selector: t.selector.clone(),
2869                state: t.state,
2870            }),
2871        );
2872
2873        if let Err(cause) = result {
2874            self.context.handle_error_nolabel(
2875                &self.error_sink,
2876                cause,
2877                "CommandEncoder::transition_resources",
2878            );
2879        }
2880    }
2881}
2882
2883impl Drop for CoreCommandEncoder {
2884    fn drop(&mut self) {
2885        self.context.0.command_encoder_drop(self.id)
2886    }
2887}
2888
2889impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2890
2891impl Drop for CoreCommandBuffer {
2892    fn drop(&mut self) {
2893        self.context.0.command_buffer_drop(self.id)
2894    }
2895}
2896
2897impl dispatch::ComputePassInterface for CoreComputePass {
2898    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2899        let pipeline = pipeline.as_core();
2900
2901        if let Err(cause) = self
2902            .context
2903            .0
2904            .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2905        {
2906            self.context.handle_error(
2907                &self.error_sink,
2908                cause,
2909                self.pass.label(),
2910                "ComputePass::set_pipeline",
2911            );
2912        }
2913    }
2914
2915    fn set_bind_group(
2916        &mut self,
2917        index: u32,
2918        bind_group: Option<&dispatch::DispatchBindGroup>,
2919        offsets: &[crate::DynamicOffset],
2920    ) {
2921        let bg = bind_group.map(|bg| bg.as_core().id);
2922
2923        if let Err(cause) =
2924            self.context
2925                .0
2926                .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2927        {
2928            self.context.handle_error(
2929                &self.error_sink,
2930                cause,
2931                self.pass.label(),
2932                "ComputePass::set_bind_group",
2933            );
2934        }
2935    }
2936
2937    fn set_immediates(&mut self, offset: u32, data: &[u8]) {
2938        if let Err(cause) = self
2939            .context
2940            .0
2941            .compute_pass_set_immediates(&mut self.pass, offset, data)
2942        {
2943            self.context.handle_error(
2944                &self.error_sink,
2945                cause,
2946                self.pass.label(),
2947                "ComputePass::set_immediates",
2948            );
2949        }
2950    }
2951
2952    fn insert_debug_marker(&mut self, label: &str) {
2953        if let Err(cause) =
2954            self.context
2955                .0
2956                .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
2957        {
2958            self.context.handle_error(
2959                &self.error_sink,
2960                cause,
2961                self.pass.label(),
2962                "ComputePass::insert_debug_marker",
2963            );
2964        }
2965    }
2966
2967    fn push_debug_group(&mut self, group_label: &str) {
2968        if let Err(cause) =
2969            self.context
2970                .0
2971                .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
2972        {
2973            self.context.handle_error(
2974                &self.error_sink,
2975                cause,
2976                self.pass.label(),
2977                "ComputePass::push_debug_group",
2978            );
2979        }
2980    }
2981
2982    fn pop_debug_group(&mut self) {
2983        if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
2984            self.context.handle_error(
2985                &self.error_sink,
2986                cause,
2987                self.pass.label(),
2988                "ComputePass::pop_debug_group",
2989            );
2990        }
2991    }
2992
2993    fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2994        let query_set = query_set.as_core();
2995
2996        if let Err(cause) =
2997            self.context
2998                .0
2999                .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3000        {
3001            self.context.handle_error(
3002                &self.error_sink,
3003                cause,
3004                self.pass.label(),
3005                "ComputePass::write_timestamp",
3006            );
3007        }
3008    }
3009
3010    fn begin_pipeline_statistics_query(
3011        &mut self,
3012        query_set: &dispatch::DispatchQuerySet,
3013        query_index: u32,
3014    ) {
3015        let query_set = query_set.as_core();
3016
3017        if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
3018            &mut self.pass,
3019            query_set.id,
3020            query_index,
3021        ) {
3022            self.context.handle_error(
3023                &self.error_sink,
3024                cause,
3025                self.pass.label(),
3026                "ComputePass::begin_pipeline_statistics_query",
3027            );
3028        }
3029    }
3030
3031    fn end_pipeline_statistics_query(&mut self) {
3032        if let Err(cause) = self
3033            .context
3034            .0
3035            .compute_pass_end_pipeline_statistics_query(&mut self.pass)
3036        {
3037            self.context.handle_error(
3038                &self.error_sink,
3039                cause,
3040                self.pass.label(),
3041                "ComputePass::end_pipeline_statistics_query",
3042            );
3043        }
3044    }
3045
3046    fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
3047        if let Err(cause) = self
3048            .context
3049            .0
3050            .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
3051        {
3052            self.context.handle_error(
3053                &self.error_sink,
3054                cause,
3055                self.pass.label(),
3056                "ComputePass::dispatch_workgroups",
3057            );
3058        }
3059    }
3060
3061    fn dispatch_workgroups_indirect(
3062        &mut self,
3063        indirect_buffer: &dispatch::DispatchBuffer,
3064        indirect_offset: crate::BufferAddress,
3065    ) {
3066        let indirect_buffer = indirect_buffer.as_core();
3067
3068        if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
3069            &mut self.pass,
3070            indirect_buffer.id,
3071            indirect_offset,
3072        ) {
3073            self.context.handle_error(
3074                &self.error_sink,
3075                cause,
3076                self.pass.label(),
3077                "ComputePass::dispatch_workgroups_indirect",
3078            );
3079        }
3080    }
3081}
3082
3083impl Drop for CoreComputePass {
3084    fn drop(&mut self) {
3085        if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
3086            self.context.handle_error(
3087                &self.error_sink,
3088                cause,
3089                self.pass.label(),
3090                "ComputePass::end",
3091            );
3092        }
3093    }
3094}
3095
3096impl dispatch::RenderPassInterface for CoreRenderPass {
3097    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3098        let pipeline = pipeline.as_core();
3099
3100        if let Err(cause) = self
3101            .context
3102            .0
3103            .render_pass_set_pipeline(&mut self.pass, pipeline.id)
3104        {
3105            self.context.handle_error(
3106                &self.error_sink,
3107                cause,
3108                self.pass.label(),
3109                "RenderPass::set_pipeline",
3110            );
3111        }
3112    }
3113
3114    fn set_bind_group(
3115        &mut self,
3116        index: u32,
3117        bind_group: Option<&dispatch::DispatchBindGroup>,
3118        offsets: &[crate::DynamicOffset],
3119    ) {
3120        let bg = bind_group.map(|bg| bg.as_core().id);
3121
3122        if let Err(cause) =
3123            self.context
3124                .0
3125                .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
3126        {
3127            self.context.handle_error(
3128                &self.error_sink,
3129                cause,
3130                self.pass.label(),
3131                "RenderPass::set_bind_group",
3132            );
3133        }
3134    }
3135
3136    fn set_index_buffer(
3137        &mut self,
3138        buffer: &dispatch::DispatchBuffer,
3139        index_format: crate::IndexFormat,
3140        offset: crate::BufferAddress,
3141        size: Option<crate::BufferSize>,
3142    ) {
3143        let buffer = buffer.as_core();
3144
3145        if let Err(cause) = self.context.0.render_pass_set_index_buffer(
3146            &mut self.pass,
3147            buffer.id,
3148            index_format,
3149            offset,
3150            size,
3151        ) {
3152            self.context.handle_error(
3153                &self.error_sink,
3154                cause,
3155                self.pass.label(),
3156                "RenderPass::set_index_buffer",
3157            );
3158        }
3159    }
3160
3161    fn set_vertex_buffer(
3162        &mut self,
3163        slot: u32,
3164        buffer: &dispatch::DispatchBuffer,
3165        offset: crate::BufferAddress,
3166        size: Option<crate::BufferSize>,
3167    ) {
3168        let buffer = buffer.as_core();
3169
3170        if let Err(cause) = self.context.0.render_pass_set_vertex_buffer(
3171            &mut self.pass,
3172            slot,
3173            buffer.id,
3174            offset,
3175            size,
3176        ) {
3177            self.context.handle_error(
3178                &self.error_sink,
3179                cause,
3180                self.pass.label(),
3181                "RenderPass::set_vertex_buffer",
3182            );
3183        }
3184    }
3185
3186    fn set_immediates(&mut self, offset: u32, data: &[u8]) {
3187        if let Err(cause) = self
3188            .context
3189            .0
3190            .render_pass_set_immediates(&mut self.pass, offset, data)
3191        {
3192            self.context.handle_error(
3193                &self.error_sink,
3194                cause,
3195                self.pass.label(),
3196                "RenderPass::set_immediates",
3197            );
3198        }
3199    }
3200
3201    fn set_blend_constant(&mut self, color: crate::Color) {
3202        if let Err(cause) = self
3203            .context
3204            .0
3205            .render_pass_set_blend_constant(&mut self.pass, color)
3206        {
3207            self.context.handle_error(
3208                &self.error_sink,
3209                cause,
3210                self.pass.label(),
3211                "RenderPass::set_blend_constant",
3212            );
3213        }
3214    }
3215
3216    fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
3217        if let Err(cause) =
3218            self.context
3219                .0
3220                .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
3221        {
3222            self.context.handle_error(
3223                &self.error_sink,
3224                cause,
3225                self.pass.label(),
3226                "RenderPass::set_scissor_rect",
3227            );
3228        }
3229    }
3230
3231    fn set_viewport(
3232        &mut self,
3233        x: f32,
3234        y: f32,
3235        width: f32,
3236        height: f32,
3237        min_depth: f32,
3238        max_depth: f32,
3239    ) {
3240        if let Err(cause) = self.context.0.render_pass_set_viewport(
3241            &mut self.pass,
3242            x,
3243            y,
3244            width,
3245            height,
3246            min_depth,
3247            max_depth,
3248        ) {
3249            self.context.handle_error(
3250                &self.error_sink,
3251                cause,
3252                self.pass.label(),
3253                "RenderPass::set_viewport",
3254            );
3255        }
3256    }
3257
3258    fn set_stencil_reference(&mut self, reference: u32) {
3259        if let Err(cause) = self
3260            .context
3261            .0
3262            .render_pass_set_stencil_reference(&mut self.pass, reference)
3263        {
3264            self.context.handle_error(
3265                &self.error_sink,
3266                cause,
3267                self.pass.label(),
3268                "RenderPass::set_stencil_reference",
3269            );
3270        }
3271    }
3272
3273    fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3274        if let Err(cause) = self.context.0.render_pass_draw(
3275            &mut self.pass,
3276            vertices.end - vertices.start,
3277            instances.end - instances.start,
3278            vertices.start,
3279            instances.start,
3280        ) {
3281            self.context.handle_error(
3282                &self.error_sink,
3283                cause,
3284                self.pass.label(),
3285                "RenderPass::draw",
3286            );
3287        }
3288    }
3289
3290    fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3291        if let Err(cause) = self.context.0.render_pass_draw_indexed(
3292            &mut self.pass,
3293            indices.end - indices.start,
3294            instances.end - instances.start,
3295            indices.start,
3296            base_vertex,
3297            instances.start,
3298        ) {
3299            self.context.handle_error(
3300                &self.error_sink,
3301                cause,
3302                self.pass.label(),
3303                "RenderPass::draw_indexed",
3304            );
3305        }
3306    }
3307
3308    fn draw_mesh_tasks(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
3309        if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks(
3310            &mut self.pass,
3311            group_count_x,
3312            group_count_y,
3313            group_count_z,
3314        ) {
3315            self.context.handle_error(
3316                &self.error_sink,
3317                cause,
3318                self.pass.label(),
3319                "RenderPass::draw_mesh_tasks",
3320            );
3321        }
3322    }
3323
3324    fn draw_indirect(
3325        &mut self,
3326        indirect_buffer: &dispatch::DispatchBuffer,
3327        indirect_offset: crate::BufferAddress,
3328    ) {
3329        let indirect_buffer = indirect_buffer.as_core();
3330
3331        if let Err(cause) = self.context.0.render_pass_draw_indirect(
3332            &mut self.pass,
3333            indirect_buffer.id,
3334            indirect_offset,
3335        ) {
3336            self.context.handle_error(
3337                &self.error_sink,
3338                cause,
3339                self.pass.label(),
3340                "RenderPass::draw_indirect",
3341            );
3342        }
3343    }
3344
3345    fn draw_indexed_indirect(
3346        &mut self,
3347        indirect_buffer: &dispatch::DispatchBuffer,
3348        indirect_offset: crate::BufferAddress,
3349    ) {
3350        let indirect_buffer = indirect_buffer.as_core();
3351
3352        if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3353            &mut self.pass,
3354            indirect_buffer.id,
3355            indirect_offset,
3356        ) {
3357            self.context.handle_error(
3358                &self.error_sink,
3359                cause,
3360                self.pass.label(),
3361                "RenderPass::draw_indexed_indirect",
3362            );
3363        }
3364    }
3365
3366    fn draw_mesh_tasks_indirect(
3367        &mut self,
3368        indirect_buffer: &dispatch::DispatchBuffer,
3369        indirect_offset: crate::BufferAddress,
3370    ) {
3371        let indirect_buffer = indirect_buffer.as_core();
3372
3373        if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks_indirect(
3374            &mut self.pass,
3375            indirect_buffer.id,
3376            indirect_offset,
3377        ) {
3378            self.context.handle_error(
3379                &self.error_sink,
3380                cause,
3381                self.pass.label(),
3382                "RenderPass::draw_mesh_tasks_indirect",
3383            );
3384        }
3385    }
3386
3387    fn multi_draw_indirect(
3388        &mut self,
3389        indirect_buffer: &dispatch::DispatchBuffer,
3390        indirect_offset: crate::BufferAddress,
3391        count: u32,
3392    ) {
3393        let indirect_buffer = indirect_buffer.as_core();
3394
3395        if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3396            &mut self.pass,
3397            indirect_buffer.id,
3398            indirect_offset,
3399            count,
3400        ) {
3401            self.context.handle_error(
3402                &self.error_sink,
3403                cause,
3404                self.pass.label(),
3405                "RenderPass::multi_draw_indirect",
3406            );
3407        }
3408    }
3409
3410    fn multi_draw_indexed_indirect(
3411        &mut self,
3412        indirect_buffer: &dispatch::DispatchBuffer,
3413        indirect_offset: crate::BufferAddress,
3414        count: u32,
3415    ) {
3416        let indirect_buffer = indirect_buffer.as_core();
3417
3418        if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3419            &mut self.pass,
3420            indirect_buffer.id,
3421            indirect_offset,
3422            count,
3423        ) {
3424            self.context.handle_error(
3425                &self.error_sink,
3426                cause,
3427                self.pass.label(),
3428                "RenderPass::multi_draw_indexed_indirect",
3429            );
3430        }
3431    }
3432
3433    fn multi_draw_mesh_tasks_indirect(
3434        &mut self,
3435        indirect_buffer: &dispatch::DispatchBuffer,
3436        indirect_offset: crate::BufferAddress,
3437        count: u32,
3438    ) {
3439        let indirect_buffer = indirect_buffer.as_core();
3440
3441        if let Err(cause) = self.context.0.render_pass_multi_draw_mesh_tasks_indirect(
3442            &mut self.pass,
3443            indirect_buffer.id,
3444            indirect_offset,
3445            count,
3446        ) {
3447            self.context.handle_error(
3448                &self.error_sink,
3449                cause,
3450                self.pass.label(),
3451                "RenderPass::multi_draw_mesh_tasks_indirect",
3452            );
3453        }
3454    }
3455
3456    fn multi_draw_indirect_count(
3457        &mut self,
3458        indirect_buffer: &dispatch::DispatchBuffer,
3459        indirect_offset: crate::BufferAddress,
3460        count_buffer: &dispatch::DispatchBuffer,
3461        count_buffer_offset: crate::BufferAddress,
3462        max_count: u32,
3463    ) {
3464        let indirect_buffer = indirect_buffer.as_core();
3465        let count_buffer = count_buffer.as_core();
3466
3467        if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3468            &mut self.pass,
3469            indirect_buffer.id,
3470            indirect_offset,
3471            count_buffer.id,
3472            count_buffer_offset,
3473            max_count,
3474        ) {
3475            self.context.handle_error(
3476                &self.error_sink,
3477                cause,
3478                self.pass.label(),
3479                "RenderPass::multi_draw_indirect_count",
3480            );
3481        }
3482    }
3483
3484    fn multi_draw_indexed_indirect_count(
3485        &mut self,
3486        indirect_buffer: &dispatch::DispatchBuffer,
3487        indirect_offset: crate::BufferAddress,
3488        count_buffer: &dispatch::DispatchBuffer,
3489        count_buffer_offset: crate::BufferAddress,
3490        max_count: u32,
3491    ) {
3492        let indirect_buffer = indirect_buffer.as_core();
3493        let count_buffer = count_buffer.as_core();
3494
3495        if let Err(cause) = self
3496            .context
3497            .0
3498            .render_pass_multi_draw_indexed_indirect_count(
3499                &mut self.pass,
3500                indirect_buffer.id,
3501                indirect_offset,
3502                count_buffer.id,
3503                count_buffer_offset,
3504                max_count,
3505            )
3506        {
3507            self.context.handle_error(
3508                &self.error_sink,
3509                cause,
3510                self.pass.label(),
3511                "RenderPass::multi_draw_indexed_indirect_count",
3512            );
3513        }
3514    }
3515
3516    fn multi_draw_mesh_tasks_indirect_count(
3517        &mut self,
3518        indirect_buffer: &dispatch::DispatchBuffer,
3519        indirect_offset: crate::BufferAddress,
3520        count_buffer: &dispatch::DispatchBuffer,
3521        count_buffer_offset: crate::BufferAddress,
3522        max_count: u32,
3523    ) {
3524        let indirect_buffer = indirect_buffer.as_core();
3525        let count_buffer = count_buffer.as_core();
3526
3527        if let Err(cause) = self
3528            .context
3529            .0
3530            .render_pass_multi_draw_mesh_tasks_indirect_count(
3531                &mut self.pass,
3532                indirect_buffer.id,
3533                indirect_offset,
3534                count_buffer.id,
3535                count_buffer_offset,
3536                max_count,
3537            )
3538        {
3539            self.context.handle_error(
3540                &self.error_sink,
3541                cause,
3542                self.pass.label(),
3543                "RenderPass::multi_draw_mesh_tasks_indirect_count",
3544            );
3545        }
3546    }
3547
3548    fn insert_debug_marker(&mut self, label: &str) {
3549        if let Err(cause) = self
3550            .context
3551            .0
3552            .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3553        {
3554            self.context.handle_error(
3555                &self.error_sink,
3556                cause,
3557                self.pass.label(),
3558                "RenderPass::insert_debug_marker",
3559            );
3560        }
3561    }
3562
3563    fn push_debug_group(&mut self, group_label: &str) {
3564        if let Err(cause) =
3565            self.context
3566                .0
3567                .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3568        {
3569            self.context.handle_error(
3570                &self.error_sink,
3571                cause,
3572                self.pass.label(),
3573                "RenderPass::push_debug_group",
3574            );
3575        }
3576    }
3577
3578    fn pop_debug_group(&mut self) {
3579        if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3580            self.context.handle_error(
3581                &self.error_sink,
3582                cause,
3583                self.pass.label(),
3584                "RenderPass::pop_debug_group",
3585            );
3586        }
3587    }
3588
3589    fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3590        let query_set = query_set.as_core();
3591
3592        if let Err(cause) =
3593            self.context
3594                .0
3595                .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3596        {
3597            self.context.handle_error(
3598                &self.error_sink,
3599                cause,
3600                self.pass.label(),
3601                "RenderPass::write_timestamp",
3602            );
3603        }
3604    }
3605
3606    fn begin_occlusion_query(&mut self, query_index: u32) {
3607        if let Err(cause) = self
3608            .context
3609            .0
3610            .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3611        {
3612            self.context.handle_error(
3613                &self.error_sink,
3614                cause,
3615                self.pass.label(),
3616                "RenderPass::begin_occlusion_query",
3617            );
3618        }
3619    }
3620
3621    fn end_occlusion_query(&mut self) {
3622        if let Err(cause) = self
3623            .context
3624            .0
3625            .render_pass_end_occlusion_query(&mut self.pass)
3626        {
3627            self.context.handle_error(
3628                &self.error_sink,
3629                cause,
3630                self.pass.label(),
3631                "RenderPass::end_occlusion_query",
3632            );
3633        }
3634    }
3635
3636    fn begin_pipeline_statistics_query(
3637        &mut self,
3638        query_set: &dispatch::DispatchQuerySet,
3639        query_index: u32,
3640    ) {
3641        let query_set = query_set.as_core();
3642
3643        if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3644            &mut self.pass,
3645            query_set.id,
3646            query_index,
3647        ) {
3648            self.context.handle_error(
3649                &self.error_sink,
3650                cause,
3651                self.pass.label(),
3652                "RenderPass::begin_pipeline_statistics_query",
3653            );
3654        }
3655    }
3656
3657    fn end_pipeline_statistics_query(&mut self) {
3658        if let Err(cause) = self
3659            .context
3660            .0
3661            .render_pass_end_pipeline_statistics_query(&mut self.pass)
3662        {
3663            self.context.handle_error(
3664                &self.error_sink,
3665                cause,
3666                self.pass.label(),
3667                "RenderPass::end_pipeline_statistics_query",
3668            );
3669        }
3670    }
3671
3672    fn execute_bundles(
3673        &mut self,
3674        render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3675    ) {
3676        let temp_render_bundles = render_bundles
3677            .map(|rb| rb.as_core().id)
3678            .collect::<SmallVec<[_; 4]>>();
3679        if let Err(cause) = self
3680            .context
3681            .0
3682            .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3683        {
3684            self.context.handle_error(
3685                &self.error_sink,
3686                cause,
3687                self.pass.label(),
3688                "RenderPass::execute_bundles",
3689            );
3690        }
3691    }
3692}
3693
3694impl Drop for CoreRenderPass {
3695    fn drop(&mut self) {
3696        if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3697            self.context.handle_error(
3698                &self.error_sink,
3699                cause,
3700                self.pass.label(),
3701                "RenderPass::end",
3702            );
3703        }
3704    }
3705}
3706
3707impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3708    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3709        let pipeline = pipeline.as_core();
3710
3711        wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3712    }
3713
3714    fn set_bind_group(
3715        &mut self,
3716        index: u32,
3717        bind_group: Option<&dispatch::DispatchBindGroup>,
3718        offsets: &[crate::DynamicOffset],
3719    ) {
3720        let bg = bind_group.map(|bg| bg.as_core().id);
3721
3722        unsafe {
3723            wgpu_render_bundle_set_bind_group(
3724                &mut self.encoder,
3725                index,
3726                bg,
3727                offsets.as_ptr(),
3728                offsets.len(),
3729            )
3730        }
3731    }
3732
3733    fn set_index_buffer(
3734        &mut self,
3735        buffer: &dispatch::DispatchBuffer,
3736        index_format: crate::IndexFormat,
3737        offset: crate::BufferAddress,
3738        size: Option<crate::BufferSize>,
3739    ) {
3740        let buffer = buffer.as_core();
3741
3742        self.encoder
3743            .set_index_buffer(buffer.id, index_format, offset, size)
3744    }
3745
3746    fn set_vertex_buffer(
3747        &mut self,
3748        slot: u32,
3749        buffer: &dispatch::DispatchBuffer,
3750        offset: crate::BufferAddress,
3751        size: Option<crate::BufferSize>,
3752    ) {
3753        let buffer = buffer.as_core();
3754
3755        wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer.id, offset, size)
3756    }
3757
3758    fn set_immediates(&mut self, offset: u32, data: &[u8]) {
3759        unsafe {
3760            wgpu_render_bundle_set_immediates(
3761                &mut self.encoder,
3762                offset,
3763                data.len().try_into().unwrap(),
3764                data.as_ptr(),
3765            )
3766        }
3767    }
3768
3769    fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3770        wgpu_render_bundle_draw(
3771            &mut self.encoder,
3772            vertices.end - vertices.start,
3773            instances.end - instances.start,
3774            vertices.start,
3775            instances.start,
3776        )
3777    }
3778
3779    fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3780        wgpu_render_bundle_draw_indexed(
3781            &mut self.encoder,
3782            indices.end - indices.start,
3783            instances.end - instances.start,
3784            indices.start,
3785            base_vertex,
3786            instances.start,
3787        )
3788    }
3789
3790    fn draw_indirect(
3791        &mut self,
3792        indirect_buffer: &dispatch::DispatchBuffer,
3793        indirect_offset: crate::BufferAddress,
3794    ) {
3795        let indirect_buffer = indirect_buffer.as_core();
3796
3797        wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3798    }
3799
3800    fn draw_indexed_indirect(
3801        &mut self,
3802        indirect_buffer: &dispatch::DispatchBuffer,
3803        indirect_offset: crate::BufferAddress,
3804    ) {
3805        let indirect_buffer = indirect_buffer.as_core();
3806
3807        wgpu_render_bundle_draw_indexed_indirect(
3808            &mut self.encoder,
3809            indirect_buffer.id,
3810            indirect_offset,
3811        )
3812    }
3813
3814    fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3815    where
3816        Self: Sized,
3817    {
3818        let (id, error) = self.context.0.render_bundle_encoder_finish(
3819            self.encoder,
3820            &desc.map_label(|l| l.map(Borrowed)),
3821            None,
3822        );
3823        if let Some(err) = error {
3824            self.context
3825                .handle_error_fatal(err, "RenderBundleEncoder::finish");
3826        }
3827        CoreRenderBundle {
3828            context: self.context.clone(),
3829            id,
3830        }
3831        .into()
3832    }
3833}
3834
3835impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3836
3837impl Drop for CoreRenderBundle {
3838    fn drop(&mut self) {
3839        self.context.0.render_bundle_drop(self.id)
3840    }
3841}
3842
3843impl dispatch::SurfaceInterface for CoreSurface {
3844    fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3845        let adapter = adapter.as_core();
3846
3847        self.context
3848            .0
3849            .surface_get_capabilities(self.id, adapter.id)
3850            .unwrap_or_default()
3851    }
3852
3853    fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3854        let device = device.as_core();
3855
3856        let error = self.context.0.surface_configure(self.id, device.id, config);
3857        if let Some(e) = error {
3858            self.context
3859                .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3860        } else {
3861            *self.configured_device.lock() = Some(device.id);
3862            *self.error_sink.lock() = Some(device.error_sink.clone());
3863        }
3864    }
3865
3866    fn get_current_texture(
3867        &self,
3868    ) -> (
3869        Option<dispatch::DispatchTexture>,
3870        crate::SurfaceStatus,
3871        dispatch::DispatchSurfaceOutputDetail,
3872    ) {
3873        let error_sink = if let Some(error_sink) = self.error_sink.lock().as_ref() {
3874            error_sink.clone()
3875        } else {
3876            Arc::new(Mutex::new(ErrorSinkRaw::new()))
3877        };
3878
3879        let output_detail = CoreSurfaceOutputDetail {
3880            context: self.context.clone(),
3881            surface_id: self.id,
3882            error_sink: error_sink.clone(),
3883        }
3884        .into();
3885
3886        match self.context.0.surface_get_current_texture(self.id, None) {
3887            Ok(wgc::present::SurfaceOutput {
3888                status,
3889                texture: texture_id,
3890            }) => {
3891                let data = texture_id
3892                    .map(|id| CoreTexture {
3893                        context: self.context.clone(),
3894                        id,
3895                        error_sink,
3896                    })
3897                    .map(Into::into);
3898
3899                (data, status, output_detail)
3900            }
3901            Err(err) => {
3902                let error_sink = self.error_sink.lock();
3903                match error_sink.as_ref() {
3904                    Some(error_sink) => {
3905                        self.context.handle_error_nolabel(
3906                            error_sink,
3907                            err,
3908                            "Surface::get_current_texture_view",
3909                        );
3910                        (None, crate::SurfaceStatus::Unknown, output_detail)
3911                    }
3912                    None => self
3913                        .context
3914                        .handle_error_fatal(err, "Surface::get_current_texture_view"),
3915                }
3916            }
3917        }
3918    }
3919}
3920
3921impl Drop for CoreSurface {
3922    fn drop(&mut self) {
3923        self.context.0.surface_drop(self.id)
3924    }
3925}
3926
3927impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
3928    fn present(&self) {
3929        match self.context.0.surface_present(self.surface_id) {
3930            Ok(_status) => (),
3931            Err(err) => {
3932                self.context
3933                    .handle_error_nolabel(&self.error_sink, err, "Surface::present");
3934            }
3935        }
3936    }
3937
3938    fn texture_discard(&self) {
3939        match self.context.0.surface_texture_discard(self.surface_id) {
3940            Ok(_status) => (),
3941            Err(err) => self
3942                .context
3943                .handle_error_fatal(err, "Surface::discard_texture"),
3944        }
3945    }
3946}
3947impl Drop for CoreSurfaceOutputDetail {
3948    fn drop(&mut self) {
3949        // Discard gets called by the api struct
3950
3951        // no-op
3952    }
3953}
3954
3955impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
3956    fn slice(&self) -> &[u8] {
3957        panic!()
3958    }
3959
3960    #[inline]
3961    fn slice_mut(&mut self) -> &mut [u8] {
3962        self.mapping.slice_mut()
3963    }
3964}
3965impl Drop for CoreQueueWriteBuffer {
3966    fn drop(&mut self) {
3967        // The api struct calls queue.write_staging_buffer
3968
3969        // no-op
3970    }
3971}
3972
3973impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
3974    #[inline]
3975    fn slice(&self) -> &[u8] {
3976        unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
3977    }
3978
3979    #[inline]
3980    fn slice_mut(&mut self) -> &mut [u8] {
3981        unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.size) }
3982    }
3983
3984    #[cfg(webgpu)]
3985    fn as_uint8array(&self) -> &js_sys::Uint8Array {
3986        panic!("Only available on WebGPU")
3987    }
3988}