wgpu/backend/
wgpu_core.rs

1use alloc::{
2    borrow::Cow::{self, Borrowed},
3    boxed::Box,
4    format,
5    string::{String, ToString as _},
6    sync::Arc,
7    vec,
8    vec::Vec,
9};
10use core::{
11    error::Error,
12    fmt,
13    future::ready,
14    ops::{Deref, Range},
15    pin::Pin,
16    ptr::NonNull,
17    slice,
18};
19use hashbrown::HashMap;
20
21use arrayvec::ArrayVec;
22use smallvec::SmallVec;
23use wgc::{
24    command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError,
25    resource::BlasPrepareCompactResult,
26};
27use wgt::{
28    error::{ErrorType, WebGpuError},
29    WasmNotSendSync,
30};
31
32use crate::{
33    api,
34    dispatch::{self, BlasCompactCallback, BufferMappedRangeInterface},
35    BindingResource, Blas, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
36    CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
37    ShaderSource, SurfaceTargetUnsafe, TextureDescriptor, Tlas,
38};
39use crate::{dispatch::DispatchAdapter, util::Mutex};
40
41mod thread_id;
42
43#[derive(Clone)]
44pub struct ContextWgpuCore(Arc<wgc::global::Global>);
45
46impl Drop for ContextWgpuCore {
47    fn drop(&mut self) {
48        //nothing
49    }
50}
51
52impl fmt::Debug for ContextWgpuCore {
53    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
54        f.debug_struct("ContextWgpuCore")
55            .field("type", &"Native")
56            .finish()
57    }
58}
59
60impl ContextWgpuCore {
61    pub unsafe fn from_hal_instance<A: hal::Api>(hal_instance: A::Instance) -> Self {
62        Self(unsafe {
63            Arc::new(wgc::global::Global::from_hal_instance::<A>(
64                "wgpu",
65                hal_instance,
66            ))
67        })
68    }
69
70    /// # Safety
71    ///
72    /// - The raw instance handle returned must not be manually destroyed.
73    pub unsafe fn instance_as_hal<A: hal::Api>(&self) -> Option<&A::Instance> {
74        unsafe { self.0.instance_as_hal::<A>() }
75    }
76
77    pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
78        Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
79    }
80
81    #[cfg(wgpu_core)]
82    pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
83        self.0.enumerate_adapters(backends)
84    }
85
86    pub unsafe fn create_adapter_from_hal<A: hal::Api>(
87        &self,
88        hal_adapter: hal::ExposedAdapter<A>,
89    ) -> wgc::id::AdapterId {
90        unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
91    }
92
93    pub unsafe fn adapter_as_hal<A: hal::Api>(
94        &self,
95        adapter: &CoreAdapter,
96    ) -> Option<impl Deref<Target = A::Adapter> + WasmNotSendSync> {
97        unsafe { self.0.adapter_as_hal::<A>(adapter.id) }
98    }
99
100    pub unsafe fn buffer_as_hal<A: hal::Api>(
101        &self,
102        buffer: &CoreBuffer,
103    ) -> Option<impl Deref<Target = A::Buffer>> {
104        unsafe { self.0.buffer_as_hal::<A>(buffer.id) }
105    }
106
107    pub unsafe fn create_device_from_hal<A: hal::Api>(
108        &self,
109        adapter: &CoreAdapter,
110        hal_device: hal::OpenDevice<A>,
111        desc: &crate::DeviceDescriptor<'_>,
112    ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
113        let (device_id, queue_id) = unsafe {
114            self.0.create_device_from_hal(
115                adapter.id,
116                hal_device.into(),
117                &desc.map_label(|l| l.map(Borrowed)),
118                None,
119                None,
120            )
121        }?;
122        let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
123        let device = CoreDevice {
124            context: self.clone(),
125            id: device_id,
126            error_sink: error_sink.clone(),
127            features: desc.required_features,
128        };
129        let queue = CoreQueue {
130            context: self.clone(),
131            id: queue_id,
132            error_sink,
133        };
134        Ok((device, queue))
135    }
136
137    pub unsafe fn create_texture_from_hal<A: hal::Api>(
138        &self,
139        hal_texture: A::Texture,
140        device: &CoreDevice,
141        desc: &TextureDescriptor<'_>,
142    ) -> CoreTexture {
143        let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
144        let (id, error) = unsafe {
145            self.0
146                .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
147        };
148        if let Some(cause) = error {
149            self.handle_error(
150                &device.error_sink,
151                cause,
152                desc.label,
153                "Device::create_texture_from_hal",
154            );
155        }
156        CoreTexture {
157            context: self.clone(),
158            id,
159            error_sink: Arc::clone(&device.error_sink),
160        }
161    }
162
163    /// # Safety
164    ///
165    /// - `hal_buffer` must be created from `device`.
166    /// - `hal_buffer` must be created respecting `desc`
167    /// - `hal_buffer` must be initialized
168    /// - `hal_buffer` must not have zero size.
169    pub unsafe fn create_buffer_from_hal<A: hal::Api>(
170        &self,
171        hal_buffer: A::Buffer,
172        device: &CoreDevice,
173        desc: &BufferDescriptor<'_>,
174    ) -> CoreBuffer {
175        let (id, error) = unsafe {
176            self.0.create_buffer_from_hal::<A>(
177                hal_buffer,
178                device.id,
179                &desc.map_label(|l| l.map(Borrowed)),
180                None,
181            )
182        };
183        if let Some(cause) = error {
184            self.handle_error(
185                &device.error_sink,
186                cause,
187                desc.label,
188                "Device::create_buffer_from_hal",
189            );
190        }
191        CoreBuffer {
192            context: self.clone(),
193            id,
194            error_sink: Arc::clone(&device.error_sink),
195        }
196    }
197
198    pub unsafe fn device_as_hal<A: hal::Api>(
199        &self,
200        device: &CoreDevice,
201    ) -> Option<impl Deref<Target = A::Device>> {
202        unsafe { self.0.device_as_hal::<A>(device.id) }
203    }
204
205    pub unsafe fn surface_as_hal<A: hal::Api>(
206        &self,
207        surface: &CoreSurface,
208    ) -> Option<impl Deref<Target = A::Surface>> {
209        unsafe { self.0.surface_as_hal::<A>(surface.id) }
210    }
211
212    pub unsafe fn texture_as_hal<A: hal::Api>(
213        &self,
214        texture: &CoreTexture,
215    ) -> Option<impl Deref<Target = A::Texture>> {
216        unsafe { self.0.texture_as_hal::<A>(texture.id) }
217    }
218
219    pub unsafe fn texture_view_as_hal<A: hal::Api>(
220        &self,
221        texture_view: &CoreTextureView,
222    ) -> Option<impl Deref<Target = A::TextureView>> {
223        unsafe { self.0.texture_view_as_hal::<A>(texture_view.id) }
224    }
225
226    /// This method will start the wgpu_core level command recording.
227    pub unsafe fn command_encoder_as_hal_mut<
228        A: hal::Api,
229        F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
230        R,
231    >(
232        &self,
233        command_encoder: &CoreCommandEncoder,
234        hal_command_encoder_callback: F,
235    ) -> R {
236        unsafe {
237            self.0.command_encoder_as_hal_mut::<A, F, R>(
238                command_encoder.id,
239                hal_command_encoder_callback,
240            )
241        }
242    }
243
244    pub unsafe fn blas_as_hal<A: hal::Api>(
245        &self,
246        blas: &CoreBlas,
247    ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
248        unsafe { self.0.blas_as_hal::<A>(blas.id) }
249    }
250
251    pub unsafe fn tlas_as_hal<A: hal::Api>(
252        &self,
253        tlas: &CoreTlas,
254    ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
255        unsafe { self.0.tlas_as_hal::<A>(tlas.id) }
256    }
257
258    pub fn generate_report(&self) -> wgc::global::GlobalReport {
259        self.0.generate_report()
260    }
261
262    #[cold]
263    #[track_caller]
264    #[inline(never)]
265    fn handle_error_inner(
266        &self,
267        sink_mutex: &Mutex<ErrorSinkRaw>,
268        error_type: ErrorType,
269        source: ContextErrorSource,
270        label: Label<'_>,
271        fn_ident: &'static str,
272    ) {
273        let source: ErrorSource = Box::new(wgc::error::ContextError {
274            fn_ident,
275            source,
276            label: label.unwrap_or_default().to_string(),
277        });
278        let final_error_handling = {
279            let mut sink = sink_mutex.lock();
280            let description = || self.format_error(&*source);
281            let error = match error_type {
282                ErrorType::Internal => {
283                    let description = description();
284                    crate::Error::Internal {
285                        source,
286                        description,
287                    }
288                }
289                ErrorType::OutOfMemory => crate::Error::OutOfMemory { source },
290                ErrorType::Validation => {
291                    let description = description();
292                    crate::Error::Validation {
293                        source,
294                        description,
295                    }
296                }
297                ErrorType::DeviceLost => return, // will be surfaced via callback
298            };
299            sink.handle_error_or_return_handler(error)
300        };
301
302        if let Some(f) = final_error_handling {
303            // If the user has provided their own `uncaptured_handler` callback, invoke it now,
304            // having released our lock on `sink_mutex`. See the comments on
305            // `handle_error_or_return_handler` for details.
306            f();
307        }
308    }
309
310    #[inline]
311    #[track_caller]
312    fn handle_error(
313        &self,
314        sink_mutex: &Mutex<ErrorSinkRaw>,
315        source: impl WebGpuError + WasmNotSendSync + 'static,
316        label: Label<'_>,
317        fn_ident: &'static str,
318    ) {
319        let error_type = source.webgpu_error_type();
320        self.handle_error_inner(sink_mutex, error_type, Box::new(source), label, fn_ident)
321    }
322
323    #[inline]
324    #[track_caller]
325    fn handle_error_nolabel(
326        &self,
327        sink_mutex: &Mutex<ErrorSinkRaw>,
328        source: impl WebGpuError + WasmNotSendSync + 'static,
329        fn_ident: &'static str,
330    ) {
331        let error_type = source.webgpu_error_type();
332        self.handle_error_inner(sink_mutex, error_type, Box::new(source), None, fn_ident)
333    }
334
335    #[track_caller]
336    #[cold]
337    fn handle_error_fatal(
338        &self,
339        cause: impl Error + WasmNotSendSync + 'static,
340        operation: &'static str,
341    ) -> ! {
342        panic!("Error in {operation}: {f}", f = self.format_error(&cause));
343    }
344
345    #[inline(never)]
346    fn format_error(&self, err: &(dyn Error + 'static)) -> String {
347        let mut output = String::new();
348        let mut level = 1;
349
350        fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
351            let mut print = |e: &(dyn Error + 'static)| {
352                use core::fmt::Write;
353                writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
354
355                if let Some(e) = e.source() {
356                    *level += 1;
357                    print_tree(output, level, e);
358                    *level -= 1;
359                }
360            };
361            if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
362                for e in multi.errors() {
363                    print(e);
364                }
365            } else {
366                print(e);
367            }
368        }
369
370        print_tree(&mut output, &mut level, err);
371
372        format!("Validation Error\n\nCaused by:\n{output}")
373    }
374
375    pub unsafe fn queue_as_hal<A: hal::Api>(
376        &self,
377        queue: &CoreQueue,
378    ) -> Option<impl Deref<Target = A::Queue> + WasmNotSendSync> {
379        unsafe { self.0.queue_as_hal::<A>(queue.id) }
380    }
381}
382
383fn map_buffer_copy_view(
384    view: crate::TexelCopyBufferInfo<'_>,
385) -> wgt::TexelCopyBufferInfo<wgc::id::BufferId> {
386    wgt::TexelCopyBufferInfo {
387        buffer: view.buffer.inner.as_core().id,
388        layout: view.layout,
389    }
390}
391
392fn map_texture_copy_view(
393    view: crate::TexelCopyTextureInfo<'_>,
394) -> wgt::TexelCopyTextureInfo<wgc::id::TextureId> {
395    wgt::TexelCopyTextureInfo {
396        texture: view.texture.inner.as_core().id,
397        mip_level: view.mip_level,
398        origin: view.origin,
399        aspect: view.aspect,
400    }
401}
402
403#[cfg_attr(not(webgl), expect(unused))]
404fn map_texture_tagged_copy_view(
405    view: crate::CopyExternalImageDestInfo<&api::Texture>,
406) -> wgt::CopyExternalImageDestInfo<wgc::id::TextureId> {
407    wgt::CopyExternalImageDestInfo {
408        texture: view.texture.inner.as_core().id,
409        mip_level: view.mip_level,
410        origin: view.origin,
411        aspect: view.aspect,
412        color_space: view.color_space,
413        premultiplied_alpha: view.premultiplied_alpha,
414    }
415}
416
417fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
418    match *load {
419        LoadOp::Clear(clear_value) => LoadOp::Clear(Some(clear_value)),
420        LoadOp::DontCare(token) => LoadOp::DontCare(token),
421        LoadOp::Load => LoadOp::Load,
422    }
423}
424
425fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
426    match ops {
427        Some(&Operations { load, store }) => wgc::command::PassChannel {
428            load_op: Some(map_load_op(&load)),
429            store_op: Some(store),
430            read_only: false,
431        },
432        None => wgc::command::PassChannel {
433            load_op: None,
434            store_op: None,
435            read_only: true,
436        },
437    }
438}
439
440#[derive(Debug)]
441pub struct CoreSurface {
442    pub(crate) context: ContextWgpuCore,
443    id: wgc::id::SurfaceId,
444    /// Configured device is needed to know which backend
445    /// code to execute when acquiring a new frame.
446    configured_device: Mutex<Option<wgc::id::DeviceId>>,
447    /// The error sink with which to report errors.
448    /// `None` if the surface has not been configured.
449    error_sink: Mutex<Option<ErrorSink>>,
450}
451
452#[derive(Debug)]
453pub struct CoreAdapter {
454    pub(crate) context: ContextWgpuCore,
455    pub(crate) id: wgc::id::AdapterId,
456}
457
458#[derive(Debug)]
459pub struct CoreDevice {
460    pub(crate) context: ContextWgpuCore,
461    id: wgc::id::DeviceId,
462    error_sink: ErrorSink,
463    features: Features,
464}
465
466#[derive(Debug)]
467pub struct CoreBuffer {
468    pub(crate) context: ContextWgpuCore,
469    id: wgc::id::BufferId,
470    error_sink: ErrorSink,
471}
472
473#[derive(Debug)]
474pub struct CoreShaderModule {
475    pub(crate) context: ContextWgpuCore,
476    id: wgc::id::ShaderModuleId,
477    compilation_info: CompilationInfo,
478}
479
480#[derive(Debug)]
481pub struct CoreBindGroupLayout {
482    pub(crate) context: ContextWgpuCore,
483    id: wgc::id::BindGroupLayoutId,
484}
485
486#[derive(Debug)]
487pub struct CoreBindGroup {
488    pub(crate) context: ContextWgpuCore,
489    id: wgc::id::BindGroupId,
490}
491
492#[derive(Debug)]
493pub struct CoreTexture {
494    pub(crate) context: ContextWgpuCore,
495    id: wgc::id::TextureId,
496    error_sink: ErrorSink,
497}
498
499#[derive(Debug)]
500pub struct CoreTextureView {
501    pub(crate) context: ContextWgpuCore,
502    id: wgc::id::TextureViewId,
503}
504
505#[derive(Debug)]
506pub struct CoreExternalTexture {
507    pub(crate) context: ContextWgpuCore,
508    id: wgc::id::ExternalTextureId,
509}
510
511#[derive(Debug)]
512pub struct CoreSampler {
513    pub(crate) context: ContextWgpuCore,
514    id: wgc::id::SamplerId,
515}
516
517#[derive(Debug)]
518pub struct CoreQuerySet {
519    pub(crate) context: ContextWgpuCore,
520    id: wgc::id::QuerySetId,
521}
522
523#[derive(Debug)]
524pub struct CorePipelineLayout {
525    pub(crate) context: ContextWgpuCore,
526    id: wgc::id::PipelineLayoutId,
527}
528
529#[derive(Debug)]
530pub struct CorePipelineCache {
531    pub(crate) context: ContextWgpuCore,
532    id: wgc::id::PipelineCacheId,
533}
534
535#[derive(Debug)]
536pub struct CoreCommandBuffer {
537    pub(crate) context: ContextWgpuCore,
538    id: wgc::id::CommandBufferId,
539}
540
541#[derive(Debug)]
542pub struct CoreRenderBundleEncoder {
543    pub(crate) context: ContextWgpuCore,
544    encoder: wgc::command::RenderBundleEncoder,
545    id: crate::cmp::Identifier,
546}
547
548#[derive(Debug)]
549pub struct CoreRenderBundle {
550    context: ContextWgpuCore,
551    id: wgc::id::RenderBundleId,
552}
553
554#[derive(Debug)]
555pub struct CoreQueue {
556    pub(crate) context: ContextWgpuCore,
557    id: wgc::id::QueueId,
558    error_sink: ErrorSink,
559}
560
561#[derive(Debug)]
562pub struct CoreComputePipeline {
563    pub(crate) context: ContextWgpuCore,
564    id: wgc::id::ComputePipelineId,
565    error_sink: ErrorSink,
566}
567
568#[derive(Debug)]
569pub struct CoreRenderPipeline {
570    pub(crate) context: ContextWgpuCore,
571    id: wgc::id::RenderPipelineId,
572    error_sink: ErrorSink,
573}
574
575#[derive(Debug)]
576pub struct CoreComputePass {
577    pub(crate) context: ContextWgpuCore,
578    pass: wgc::command::ComputePass,
579    error_sink: ErrorSink,
580    id: crate::cmp::Identifier,
581}
582
583#[derive(Debug)]
584pub struct CoreRenderPass {
585    pub(crate) context: ContextWgpuCore,
586    pass: wgc::command::RenderPass,
587    error_sink: ErrorSink,
588    id: crate::cmp::Identifier,
589}
590
591#[derive(Debug)]
592pub struct CoreCommandEncoder {
593    pub(crate) context: ContextWgpuCore,
594    id: wgc::id::CommandEncoderId,
595    error_sink: ErrorSink,
596}
597
598#[derive(Debug)]
599pub struct CoreBlas {
600    pub(crate) context: ContextWgpuCore,
601    id: wgc::id::BlasId,
602    error_sink: ErrorSink,
603}
604
605#[derive(Debug)]
606pub struct CoreTlas {
607    pub(crate) context: ContextWgpuCore,
608    id: wgc::id::TlasId,
609    // error_sink: ErrorSink,
610}
611
612#[derive(Debug)]
613pub struct CoreSurfaceOutputDetail {
614    context: ContextWgpuCore,
615    surface_id: wgc::id::SurfaceId,
616    error_sink: ErrorSink,
617}
618
619type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
620
621struct ErrorScope {
622    error: Option<crate::Error>,
623    filter: crate::ErrorFilter,
624}
625
626struct ErrorSinkRaw {
627    scopes: HashMap<thread_id::ThreadId, Vec<ErrorScope>>,
628    uncaptured_handler: Option<Arc<dyn crate::UncapturedErrorHandler>>,
629}
630
631impl ErrorSinkRaw {
632    fn new() -> ErrorSinkRaw {
633        ErrorSinkRaw {
634            scopes: HashMap::new(),
635            uncaptured_handler: None,
636        }
637    }
638
639    /// Deliver the error to
640    ///
641    /// * the innermost error scope, if any, or
642    /// * the uncaptured error handler, if there is one, or
643    /// * [`default_error_handler()`].
644    ///
645    /// If a closure is returned, the caller should call it immediately after dropping the
646    /// [`ErrorSink`] mutex guard. This makes sure that the user callback is not called with
647    /// a wgpu mutex held.
648    #[track_caller]
649    #[must_use]
650    fn handle_error_or_return_handler(&mut self, err: crate::Error) -> Option<impl FnOnce()> {
651        let filter = match err {
652            crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
653            crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
654            crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
655        };
656        let thread_id = thread_id::ThreadId::current();
657        let scopes = self.scopes.entry(thread_id).or_default();
658        match scopes.iter_mut().rev().find(|scope| scope.filter == filter) {
659            Some(scope) => {
660                if scope.error.is_none() {
661                    scope.error = Some(err);
662                }
663                None
664            }
665            None => {
666                if let Some(custom_handler) = &self.uncaptured_handler {
667                    let custom_handler = Arc::clone(custom_handler);
668                    Some(move || (custom_handler)(err))
669                } else {
670                    // direct call preserves #[track_caller] where dyn can't
671                    default_error_handler(err)
672                }
673            }
674        }
675    }
676}
677
678impl fmt::Debug for ErrorSinkRaw {
679    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
680        write!(f, "ErrorSink")
681    }
682}
683
684#[track_caller]
685fn default_error_handler(err: crate::Error) -> ! {
686    log::error!("Handling wgpu errors as fatal by default");
687    panic!("wgpu error: {err}\n");
688}
689
690impl From<CreateShaderModuleError> for CompilationInfo {
691    fn from(value: CreateShaderModuleError) -> Self {
692        match value {
693            #[cfg(feature = "wgsl")]
694            CreateShaderModuleError::Parsing(v) => v.into(),
695            #[cfg(feature = "glsl")]
696            CreateShaderModuleError::ParsingGlsl(v) => v.into(),
697            #[cfg(feature = "spirv")]
698            CreateShaderModuleError::ParsingSpirV(v) => v.into(),
699            CreateShaderModuleError::Validation(v) => v.into(),
700            // Device errors are reported through the error sink, and are not compilation errors.
701            // Same goes for native shader module generation errors.
702            CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
703                CompilationInfo {
704                    messages: Vec::new(),
705                }
706            }
707            // Everything else is an error message without location information.
708            _ => CompilationInfo {
709                messages: vec![CompilationMessage {
710                    message: value.to_string(),
711                    message_type: CompilationMessageType::Error,
712                    location: None,
713                }],
714            },
715        }
716    }
717}
718
719#[derive(Debug)]
720pub struct CoreQueueWriteBuffer {
721    buffer_id: wgc::id::StagingBufferId,
722    mapping: CoreBufferMappedRange,
723}
724
725#[derive(Debug)]
726pub struct CoreBufferMappedRange {
727    ptr: NonNull<u8>,
728    size: usize,
729}
730
731#[cfg(send_sync)]
732unsafe impl Send for CoreBufferMappedRange {}
733#[cfg(send_sync)]
734unsafe impl Sync for CoreBufferMappedRange {}
735
736impl Drop for CoreBufferMappedRange {
737    fn drop(&mut self) {
738        // Intentionally left blank so that `BufferMappedRange` still
739        // implements `Drop`, to match the web backend
740    }
741}
742
743crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
744crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
745crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
746crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
747crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
748crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
749crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
750crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
751crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
752crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
753crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
754crate::cmp::impl_eq_ord_hash_proxy!(CoreExternalTexture => .id);
755crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
756crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
757crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
758crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
759crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
760crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
761crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
762crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
763crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
764crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
765crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
766crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
767crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
768crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
769crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
770crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
771crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
772
773impl dispatch::InstanceInterface for ContextWgpuCore {
774    fn new(desc: wgt::InstanceDescriptor) -> Self
775    where
776        Self: Sized,
777    {
778        Self(Arc::new(wgc::global::Global::new("wgpu", desc, None)))
779    }
780
781    unsafe fn create_surface(
782        &self,
783        target: crate::api::SurfaceTargetUnsafe,
784    ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
785        let id = match target {
786            SurfaceTargetUnsafe::RawHandle {
787                raw_display_handle,
788                raw_window_handle,
789            } => unsafe {
790                self.0
791                    .instance_create_surface(raw_display_handle, raw_window_handle, None)
792            },
793
794            #[cfg(all(
795                unix,
796                not(target_vendor = "apple"),
797                not(target_family = "wasm"),
798                not(target_os = "netbsd")
799            ))]
800            SurfaceTargetUnsafe::Drm {
801                fd,
802                plane,
803                connector_id,
804                width,
805                height,
806                refresh_rate,
807            } => unsafe {
808                self.0.instance_create_surface_from_drm(
809                    fd,
810                    plane,
811                    connector_id,
812                    width,
813                    height,
814                    refresh_rate,
815                    None,
816                )
817            },
818
819            #[cfg(metal)]
820            SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
821                self.0.instance_create_surface_metal(layer, None)
822            },
823
824            #[cfg(target_os = "netbsd")]
825            SurfaceTargetUnsafe::Drm { .. } => Err(
826                wgc::instance::CreateSurfaceError::BackendNotEnabled(wgt::Backend::Vulkan),
827            ),
828
829            #[cfg(dx12)]
830            SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
831                self.0.instance_create_surface_from_visual(visual, None)
832            },
833
834            #[cfg(dx12)]
835            SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
836                self.0
837                    .instance_create_surface_from_surface_handle(surface_handle, None)
838            },
839
840            #[cfg(dx12)]
841            SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
842                self.0
843                    .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
844            },
845        }?;
846
847        Ok(CoreSurface {
848            context: self.clone(),
849            id,
850            configured_device: Mutex::default(),
851            error_sink: Mutex::default(),
852        }
853        .into())
854    }
855
856    fn request_adapter(
857        &self,
858        options: &crate::api::RequestAdapterOptions<'_, '_>,
859    ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
860        let id = self.0.request_adapter(
861            &wgc::instance::RequestAdapterOptions {
862                power_preference: options.power_preference,
863                force_fallback_adapter: options.force_fallback_adapter,
864                compatible_surface: options
865                    .compatible_surface
866                    .map(|surface| surface.inner.as_core().id),
867            },
868            wgt::Backends::all(),
869            None,
870        );
871        let adapter = id.map(|id| {
872            let core = CoreAdapter {
873                context: self.clone(),
874                id,
875            };
876            let generic: dispatch::DispatchAdapter = core.into();
877            generic
878        });
879        Box::pin(ready(adapter))
880    }
881
882    fn poll_all_devices(&self, force_wait: bool) -> bool {
883        match self.0.poll_all_devices(force_wait) {
884            Ok(all_queue_empty) => all_queue_empty,
885            Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
886        }
887    }
888
889    #[cfg(feature = "wgsl")]
890    fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
891        use wgc::naga::front::wgsl::ImplementedLanguageExtension;
892        ImplementedLanguageExtension::all().iter().copied().fold(
893            crate::WgslLanguageFeatures::empty(),
894            |acc, wle| {
895                acc | match wle {
896                    ImplementedLanguageExtension::ReadOnlyAndReadWriteStorageTextures => {
897                        crate::WgslLanguageFeatures::ReadOnlyAndReadWriteStorageTextures
898                    }
899                    ImplementedLanguageExtension::Packed4x8IntegerDotProduct => {
900                        crate::WgslLanguageFeatures::Packed4x8IntegerDotProduct
901                    }
902                    ImplementedLanguageExtension::PointerCompositeAccess => {
903                        crate::WgslLanguageFeatures::PointerCompositeAccess
904                    }
905                }
906            },
907        )
908    }
909
910    fn enumerate_adapters(
911        &self,
912        backends: crate::Backends,
913    ) -> Pin<Box<dyn dispatch::EnumerateAdapterFuture>> {
914        let adapters: Vec<DispatchAdapter> = self
915            .enumerate_adapters(backends)
916            .into_iter()
917            .map(|adapter| {
918                let core = crate::backend::wgpu_core::CoreAdapter {
919                    context: self.clone(),
920                    id: adapter,
921                };
922                core.into()
923            })
924            .collect();
925        Box::pin(ready(adapters))
926    }
927}
928
929impl dispatch::AdapterInterface for CoreAdapter {
930    fn request_device(
931        &self,
932        desc: &crate::DeviceDescriptor<'_>,
933    ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
934        let res = self.context.0.adapter_request_device(
935            self.id,
936            &desc.map_label(|l| l.map(Borrowed)),
937            None,
938            None,
939        );
940        let (device_id, queue_id) = match res {
941            Ok(ids) => ids,
942            Err(err) => {
943                return Box::pin(ready(Err(err.into())));
944            }
945        };
946        let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
947        let device = CoreDevice {
948            context: self.context.clone(),
949            id: device_id,
950            error_sink: error_sink.clone(),
951            features: desc.required_features,
952        };
953        let queue = CoreQueue {
954            context: self.context.clone(),
955            id: queue_id,
956            error_sink,
957        };
958        Box::pin(ready(Ok((device.into(), queue.into()))))
959    }
960
961    fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
962        let surface = surface.as_core();
963
964        self.context
965            .0
966            .adapter_is_surface_supported(self.id, surface.id)
967    }
968
969    fn features(&self) -> crate::Features {
970        self.context.0.adapter_features(self.id)
971    }
972
973    fn limits(&self) -> crate::Limits {
974        self.context.0.adapter_limits(self.id)
975    }
976
977    fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
978        self.context.0.adapter_downlevel_capabilities(self.id)
979    }
980
981    fn get_info(&self) -> crate::AdapterInfo {
982        self.context.0.adapter_get_info(self.id)
983    }
984
985    fn get_texture_format_features(
986        &self,
987        format: crate::TextureFormat,
988    ) -> crate::TextureFormatFeatures {
989        self.context
990            .0
991            .adapter_get_texture_format_features(self.id, format)
992    }
993
994    fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
995        self.context.0.adapter_get_presentation_timestamp(self.id)
996    }
997
998    fn cooperative_matrix_properties(&self) -> Vec<crate::wgt::CooperativeMatrixProperties> {
999        self.context
1000            .0
1001            .adapter_cooperative_matrix_properties(self.id)
1002    }
1003}
1004
1005impl Drop for CoreAdapter {
1006    fn drop(&mut self) {
1007        self.context.0.adapter_drop(self.id)
1008    }
1009}
1010
1011impl dispatch::DeviceInterface for CoreDevice {
1012    fn features(&self) -> crate::Features {
1013        self.context.0.device_features(self.id)
1014    }
1015
1016    fn limits(&self) -> crate::Limits {
1017        self.context.0.device_limits(self.id)
1018    }
1019
1020    fn adapter_info(&self) -> crate::AdapterInfo {
1021        self.context.0.device_adapter_info(self.id)
1022    }
1023
1024    // If we have no way to create a shader module, we can't return one, and so most of the function is unreachable.
1025    #[cfg_attr(
1026        not(any(
1027            feature = "spirv",
1028            feature = "glsl",
1029            feature = "wgsl",
1030            feature = "naga-ir"
1031        )),
1032        expect(unused)
1033    )]
1034    fn create_shader_module(
1035        &self,
1036        desc: crate::ShaderModuleDescriptor<'_>,
1037        shader_bound_checks: wgt::ShaderRuntimeChecks,
1038    ) -> dispatch::DispatchShaderModule {
1039        let descriptor = wgc::pipeline::ShaderModuleDescriptor {
1040            label: desc.label.map(Borrowed),
1041            runtime_checks: shader_bound_checks,
1042        };
1043        let source = match desc.source {
1044            #[cfg(feature = "spirv")]
1045            ShaderSource::SpirV(ref spv) => {
1046                // Parse the given shader code and store its representation.
1047                let options = naga::front::spv::Options {
1048                    adjust_coordinate_space: false, // we require NDC_Y_UP feature
1049                    strict_capabilities: true,
1050                    block_ctx_dump_prefix: None,
1051                };
1052                wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
1053            }
1054            #[cfg(feature = "glsl")]
1055            ShaderSource::Glsl {
1056                ref shader,
1057                stage,
1058                defines,
1059            } => {
1060                let options = naga::front::glsl::Options {
1061                    stage,
1062                    defines: defines
1063                        .iter()
1064                        .map(|&(key, value)| (String::from(key), String::from(value)))
1065                        .collect(),
1066                };
1067                wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
1068            }
1069            #[cfg(feature = "wgsl")]
1070            ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
1071            #[cfg(feature = "naga-ir")]
1072            ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1073            ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1074        };
1075        let (id, error) =
1076            self.context
1077                .0
1078                .device_create_shader_module(self.id, &descriptor, source, None);
1079        let compilation_info = match error {
1080            Some(cause) => {
1081                self.context.handle_error(
1082                    &self.error_sink,
1083                    cause.clone(),
1084                    desc.label,
1085                    "Device::create_shader_module",
1086                );
1087                CompilationInfo::from(cause)
1088            }
1089            None => CompilationInfo { messages: vec![] },
1090        };
1091
1092        CoreShaderModule {
1093            context: self.context.clone(),
1094            id,
1095            compilation_info,
1096        }
1097        .into()
1098    }
1099
1100    unsafe fn create_shader_module_passthrough(
1101        &self,
1102        desc: &crate::ShaderModuleDescriptorPassthrough<'_>,
1103    ) -> dispatch::DispatchShaderModule {
1104        let desc = desc.map_label(|l| l.map(Cow::from));
1105        let (id, error) = unsafe {
1106            self.context
1107                .0
1108                .device_create_shader_module_passthrough(self.id, &desc, None)
1109        };
1110
1111        let compilation_info = match error {
1112            Some(cause) => {
1113                self.context.handle_error(
1114                    &self.error_sink,
1115                    cause.clone(),
1116                    desc.label.as_deref(),
1117                    "Device::create_shader_module_passthrough",
1118                );
1119                CompilationInfo::from(cause)
1120            }
1121            None => CompilationInfo { messages: vec![] },
1122        };
1123
1124        CoreShaderModule {
1125            context: self.context.clone(),
1126            id,
1127            compilation_info,
1128        }
1129        .into()
1130    }
1131
1132    fn create_bind_group_layout(
1133        &self,
1134        desc: &crate::BindGroupLayoutDescriptor<'_>,
1135    ) -> dispatch::DispatchBindGroupLayout {
1136        let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1137            label: desc.label.map(Borrowed),
1138            entries: Borrowed(desc.entries),
1139        };
1140        let (id, error) =
1141            self.context
1142                .0
1143                .device_create_bind_group_layout(self.id, &descriptor, None);
1144        if let Some(cause) = error {
1145            self.context.handle_error(
1146                &self.error_sink,
1147                cause,
1148                desc.label,
1149                "Device::create_bind_group_layout",
1150            );
1151        }
1152        CoreBindGroupLayout {
1153            context: self.context.clone(),
1154            id,
1155        }
1156        .into()
1157    }
1158
1159    fn create_bind_group(
1160        &self,
1161        desc: &crate::BindGroupDescriptor<'_>,
1162    ) -> dispatch::DispatchBindGroup {
1163        use wgc::binding_model as bm;
1164
1165        let mut arrayed_texture_views = Vec::new();
1166        let mut arrayed_samplers = Vec::new();
1167        if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1168            // gather all the array view IDs first
1169            for entry in desc.entries.iter() {
1170                if let BindingResource::TextureViewArray(array) = entry.resource {
1171                    arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1172                }
1173                if let BindingResource::SamplerArray(array) = entry.resource {
1174                    arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1175                }
1176            }
1177        }
1178        let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1179        let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1180
1181        let mut arrayed_buffer_bindings = Vec::new();
1182        if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1183            // gather all the buffers first
1184            for entry in desc.entries.iter() {
1185                if let BindingResource::BufferArray(array) = entry.resource {
1186                    arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1187                        buffer: binding.buffer.inner.as_core().id,
1188                        offset: binding.offset,
1189                        size: binding.size.map(wgt::BufferSize::get),
1190                    }));
1191                }
1192            }
1193        }
1194        let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1195
1196        let entries = desc
1197            .entries
1198            .iter()
1199            .map(|entry| bm::BindGroupEntry {
1200                binding: entry.binding,
1201                resource: match entry.resource {
1202                    BindingResource::Buffer(BufferBinding {
1203                        buffer,
1204                        offset,
1205                        size,
1206                    }) => bm::BindingResource::Buffer(bm::BufferBinding {
1207                        buffer: buffer.inner.as_core().id,
1208                        offset,
1209                        size: size.map(wgt::BufferSize::get),
1210                    }),
1211                    BindingResource::BufferArray(array) => {
1212                        let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1213                        remaining_arrayed_buffer_bindings =
1214                            &remaining_arrayed_buffer_bindings[array.len()..];
1215                        bm::BindingResource::BufferArray(Borrowed(slice))
1216                    }
1217                    BindingResource::Sampler(sampler) => {
1218                        bm::BindingResource::Sampler(sampler.inner.as_core().id)
1219                    }
1220                    BindingResource::SamplerArray(array) => {
1221                        let slice = &remaining_arrayed_samplers[..array.len()];
1222                        remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1223                        bm::BindingResource::SamplerArray(Borrowed(slice))
1224                    }
1225                    BindingResource::TextureView(texture_view) => {
1226                        bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1227                    }
1228                    BindingResource::TextureViewArray(array) => {
1229                        let slice = &remaining_arrayed_texture_views[..array.len()];
1230                        remaining_arrayed_texture_views =
1231                            &remaining_arrayed_texture_views[array.len()..];
1232                        bm::BindingResource::TextureViewArray(Borrowed(slice))
1233                    }
1234                    BindingResource::AccelerationStructure(acceleration_structure) => {
1235                        bm::BindingResource::AccelerationStructure(
1236                            acceleration_structure.inner.as_core().id,
1237                        )
1238                    }
1239                    BindingResource::ExternalTexture(external_texture) => {
1240                        bm::BindingResource::ExternalTexture(external_texture.inner.as_core().id)
1241                    }
1242                },
1243            })
1244            .collect::<Vec<_>>();
1245        let descriptor = bm::BindGroupDescriptor {
1246            label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1247            layout: desc.layout.inner.as_core().id,
1248            entries: Borrowed(&entries),
1249        };
1250
1251        let (id, error) = self
1252            .context
1253            .0
1254            .device_create_bind_group(self.id, &descriptor, None);
1255        if let Some(cause) = error {
1256            self.context.handle_error(
1257                &self.error_sink,
1258                cause,
1259                desc.label,
1260                "Device::create_bind_group",
1261            );
1262        }
1263        CoreBindGroup {
1264            context: self.context.clone(),
1265            id,
1266        }
1267        .into()
1268    }
1269
1270    fn create_pipeline_layout(
1271        &self,
1272        desc: &crate::PipelineLayoutDescriptor<'_>,
1273    ) -> dispatch::DispatchPipelineLayout {
1274        // Limit is always less or equal to hal::MAX_BIND_GROUPS, so this is always right
1275        // Guards following ArrayVec
1276        assert!(
1277            desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1278            "Bind group layout count {} exceeds device bind group limit {}",
1279            desc.bind_group_layouts.len(),
1280            wgc::MAX_BIND_GROUPS
1281        );
1282
1283        let temp_layouts = desc
1284            .bind_group_layouts
1285            .iter()
1286            .map(|bgl| bgl.map(|bgl| bgl.inner.as_core().id))
1287            .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1288        let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1289            label: desc.label.map(Borrowed),
1290            bind_group_layouts: Borrowed(&temp_layouts),
1291            immediate_size: desc.immediate_size,
1292        };
1293
1294        let (id, error) = self
1295            .context
1296            .0
1297            .device_create_pipeline_layout(self.id, &descriptor, None);
1298        if let Some(cause) = error {
1299            self.context.handle_error(
1300                &self.error_sink,
1301                cause,
1302                desc.label,
1303                "Device::create_pipeline_layout",
1304            );
1305        }
1306        CorePipelineLayout {
1307            context: self.context.clone(),
1308            id,
1309        }
1310        .into()
1311    }
1312
1313    fn create_render_pipeline(
1314        &self,
1315        desc: &crate::RenderPipelineDescriptor<'_>,
1316    ) -> dispatch::DispatchRenderPipeline {
1317        use wgc::pipeline as pipe;
1318
1319        let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1320            .vertex
1321            .buffers
1322            .iter()
1323            .map(|vbuf| pipe::VertexBufferLayout {
1324                array_stride: vbuf.array_stride,
1325                step_mode: vbuf.step_mode,
1326                attributes: Borrowed(vbuf.attributes),
1327            })
1328            .collect();
1329
1330        let vert_constants = desc
1331            .vertex
1332            .compilation_options
1333            .constants
1334            .iter()
1335            .map(|&(key, value)| (String::from(key), value))
1336            .collect();
1337
1338        let descriptor = pipe::RenderPipelineDescriptor {
1339            label: desc.label.map(Borrowed),
1340            layout: desc.layout.map(|layout| layout.inner.as_core().id),
1341            vertex: pipe::VertexState {
1342                stage: pipe::ProgrammableStageDescriptor {
1343                    module: desc.vertex.module.inner.as_core().id,
1344                    entry_point: desc.vertex.entry_point.map(Borrowed),
1345                    constants: vert_constants,
1346                    zero_initialize_workgroup_memory: desc
1347                        .vertex
1348                        .compilation_options
1349                        .zero_initialize_workgroup_memory,
1350                },
1351                buffers: Borrowed(&vertex_buffers),
1352            },
1353            primitive: desc.primitive,
1354            depth_stencil: desc.depth_stencil.clone(),
1355            multisample: desc.multisample,
1356            fragment: desc.fragment.as_ref().map(|frag| {
1357                let frag_constants = frag
1358                    .compilation_options
1359                    .constants
1360                    .iter()
1361                    .map(|&(key, value)| (String::from(key), value))
1362                    .collect();
1363                pipe::FragmentState {
1364                    stage: pipe::ProgrammableStageDescriptor {
1365                        module: frag.module.inner.as_core().id,
1366                        entry_point: frag.entry_point.map(Borrowed),
1367                        constants: frag_constants,
1368                        zero_initialize_workgroup_memory: frag
1369                            .compilation_options
1370                            .zero_initialize_workgroup_memory,
1371                    },
1372                    targets: Borrowed(frag.targets),
1373                }
1374            }),
1375            multiview_mask: desc.multiview_mask,
1376            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1377        };
1378
1379        let (id, error) = self
1380            .context
1381            .0
1382            .device_create_render_pipeline(self.id, &descriptor, None);
1383        if let Some(cause) = error {
1384            if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1385                log::error!("Shader translation error for stage {stage:?}: {error}");
1386                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1387            }
1388            self.context.handle_error(
1389                &self.error_sink,
1390                cause,
1391                desc.label,
1392                "Device::create_render_pipeline",
1393            );
1394        }
1395        CoreRenderPipeline {
1396            context: self.context.clone(),
1397            id,
1398            error_sink: Arc::clone(&self.error_sink),
1399        }
1400        .into()
1401    }
1402
1403    fn create_mesh_pipeline(
1404        &self,
1405        desc: &crate::MeshPipelineDescriptor<'_>,
1406    ) -> dispatch::DispatchRenderPipeline {
1407        use wgc::pipeline as pipe;
1408
1409        let mesh_constants = desc
1410            .mesh
1411            .compilation_options
1412            .constants
1413            .iter()
1414            .map(|&(key, value)| (String::from(key), value))
1415            .collect();
1416        let descriptor = pipe::MeshPipelineDescriptor {
1417            label: desc.label.map(Borrowed),
1418            task: desc.task.as_ref().map(|task| {
1419                let task_constants = task
1420                    .compilation_options
1421                    .constants
1422                    .iter()
1423                    .map(|&(key, value)| (String::from(key), value))
1424                    .collect();
1425                pipe::TaskState {
1426                    stage: pipe::ProgrammableStageDescriptor {
1427                        module: task.module.inner.as_core().id,
1428                        entry_point: task.entry_point.map(Borrowed),
1429                        constants: task_constants,
1430                        zero_initialize_workgroup_memory: desc
1431                            .mesh
1432                            .compilation_options
1433                            .zero_initialize_workgroup_memory,
1434                    },
1435                }
1436            }),
1437            mesh: pipe::MeshState {
1438                stage: pipe::ProgrammableStageDescriptor {
1439                    module: desc.mesh.module.inner.as_core().id,
1440                    entry_point: desc.mesh.entry_point.map(Borrowed),
1441                    constants: mesh_constants,
1442                    zero_initialize_workgroup_memory: desc
1443                        .mesh
1444                        .compilation_options
1445                        .zero_initialize_workgroup_memory,
1446                },
1447            },
1448            layout: desc.layout.map(|layout| layout.inner.as_core().id),
1449            primitive: desc.primitive,
1450            depth_stencil: desc.depth_stencil.clone(),
1451            multisample: desc.multisample,
1452            fragment: desc.fragment.as_ref().map(|frag| {
1453                let frag_constants = frag
1454                    .compilation_options
1455                    .constants
1456                    .iter()
1457                    .map(|&(key, value)| (String::from(key), value))
1458                    .collect();
1459                pipe::FragmentState {
1460                    stage: pipe::ProgrammableStageDescriptor {
1461                        module: frag.module.inner.as_core().id,
1462                        entry_point: frag.entry_point.map(Borrowed),
1463                        constants: frag_constants,
1464                        zero_initialize_workgroup_memory: frag
1465                            .compilation_options
1466                            .zero_initialize_workgroup_memory,
1467                    },
1468                    targets: Borrowed(frag.targets),
1469                }
1470            }),
1471            multiview: desc.multiview,
1472            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1473        };
1474
1475        let (id, error) = self
1476            .context
1477            .0
1478            .device_create_mesh_pipeline(self.id, &descriptor, None);
1479        if let Some(cause) = error {
1480            if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1481                log::error!("Shader translation error for stage {stage:?}: {error}");
1482                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1483            }
1484            self.context.handle_error(
1485                &self.error_sink,
1486                cause,
1487                desc.label,
1488                "Device::create_render_pipeline",
1489            );
1490        }
1491        CoreRenderPipeline {
1492            context: self.context.clone(),
1493            id,
1494            error_sink: Arc::clone(&self.error_sink),
1495        }
1496        .into()
1497    }
1498
1499    fn create_compute_pipeline(
1500        &self,
1501        desc: &crate::ComputePipelineDescriptor<'_>,
1502    ) -> dispatch::DispatchComputePipeline {
1503        use wgc::pipeline as pipe;
1504
1505        let constants = desc
1506            .compilation_options
1507            .constants
1508            .iter()
1509            .map(|&(key, value)| (String::from(key), value))
1510            .collect();
1511
1512        let descriptor = pipe::ComputePipelineDescriptor {
1513            label: desc.label.map(Borrowed),
1514            layout: desc.layout.map(|pll| pll.inner.as_core().id),
1515            stage: pipe::ProgrammableStageDescriptor {
1516                module: desc.module.inner.as_core().id,
1517                entry_point: desc.entry_point.map(Borrowed),
1518                constants,
1519                zero_initialize_workgroup_memory: desc
1520                    .compilation_options
1521                    .zero_initialize_workgroup_memory,
1522            },
1523            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1524        };
1525
1526        let (id, error) = self
1527            .context
1528            .0
1529            .device_create_compute_pipeline(self.id, &descriptor, None);
1530        if let Some(cause) = error {
1531            if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1532                log::error!(
1533                    "Shader translation error for stage {:?}: {}",
1534                    wgt::ShaderStages::COMPUTE,
1535                    error
1536                );
1537                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1538            }
1539            self.context.handle_error(
1540                &self.error_sink,
1541                cause,
1542                desc.label,
1543                "Device::create_compute_pipeline",
1544            );
1545        }
1546        CoreComputePipeline {
1547            context: self.context.clone(),
1548            id,
1549            error_sink: Arc::clone(&self.error_sink),
1550        }
1551        .into()
1552    }
1553
1554    unsafe fn create_pipeline_cache(
1555        &self,
1556        desc: &crate::PipelineCacheDescriptor<'_>,
1557    ) -> dispatch::DispatchPipelineCache {
1558        use wgc::pipeline as pipe;
1559
1560        let descriptor = pipe::PipelineCacheDescriptor {
1561            label: desc.label.map(Borrowed),
1562            data: desc.data.map(Borrowed),
1563            fallback: desc.fallback,
1564        };
1565        let (id, error) = unsafe {
1566            self.context
1567                .0
1568                .device_create_pipeline_cache(self.id, &descriptor, None)
1569        };
1570        if let Some(cause) = error {
1571            self.context.handle_error(
1572                &self.error_sink,
1573                cause,
1574                desc.label,
1575                "Device::device_create_pipeline_cache_init",
1576            );
1577        }
1578        CorePipelineCache {
1579            context: self.context.clone(),
1580            id,
1581        }
1582        .into()
1583    }
1584
1585    fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1586        let (id, error) = self.context.0.device_create_buffer(
1587            self.id,
1588            &desc.map_label(|l| l.map(Borrowed)),
1589            None,
1590        );
1591        if let Some(cause) = error {
1592            self.context
1593                .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1594        }
1595
1596        CoreBuffer {
1597            context: self.context.clone(),
1598            id,
1599            error_sink: Arc::clone(&self.error_sink),
1600        }
1601        .into()
1602    }
1603
1604    fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1605        let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1606        let (id, error) = self
1607            .context
1608            .0
1609            .device_create_texture(self.id, &wgt_desc, None);
1610        if let Some(cause) = error {
1611            self.context.handle_error(
1612                &self.error_sink,
1613                cause,
1614                desc.label,
1615                "Device::create_texture",
1616            );
1617        }
1618
1619        CoreTexture {
1620            context: self.context.clone(),
1621            id,
1622            error_sink: Arc::clone(&self.error_sink),
1623        }
1624        .into()
1625    }
1626
1627    fn create_external_texture(
1628        &self,
1629        desc: &crate::ExternalTextureDescriptor<'_>,
1630        planes: &[&crate::TextureView],
1631    ) -> dispatch::DispatchExternalTexture {
1632        let wgt_desc = desc.map_label(|l| l.map(Borrowed));
1633        let planes = planes
1634            .iter()
1635            .map(|plane| plane.inner.as_core().id)
1636            .collect::<Vec<_>>();
1637        let (id, error) = self
1638            .context
1639            .0
1640            .device_create_external_texture(self.id, &wgt_desc, &planes, None);
1641        if let Some(cause) = error {
1642            self.context.handle_error(
1643                &self.error_sink,
1644                cause,
1645                desc.label,
1646                "Device::create_external_texture",
1647            );
1648        }
1649
1650        CoreExternalTexture {
1651            context: self.context.clone(),
1652            id,
1653        }
1654        .into()
1655    }
1656
1657    fn create_blas(
1658        &self,
1659        desc: &crate::CreateBlasDescriptor<'_>,
1660        sizes: crate::BlasGeometrySizeDescriptors,
1661    ) -> (Option<u64>, dispatch::DispatchBlas) {
1662        let global = &self.context.0;
1663        let (id, handle, error) =
1664            global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1665        if let Some(cause) = error {
1666            self.context
1667                .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1668        }
1669        (
1670            handle,
1671            CoreBlas {
1672                context: self.context.clone(),
1673                id,
1674                error_sink: Arc::clone(&self.error_sink),
1675            }
1676            .into(),
1677        )
1678    }
1679
1680    fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1681        let global = &self.context.0;
1682        let (id, error) =
1683            global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1684        if let Some(cause) = error {
1685            self.context
1686                .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1687        }
1688        CoreTlas {
1689            context: self.context.clone(),
1690            id,
1691            // error_sink: Arc::clone(&self.error_sink),
1692        }
1693        .into()
1694    }
1695
1696    fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1697        let descriptor = wgc::resource::SamplerDescriptor {
1698            label: desc.label.map(Borrowed),
1699            address_modes: [
1700                desc.address_mode_u,
1701                desc.address_mode_v,
1702                desc.address_mode_w,
1703            ],
1704            mag_filter: desc.mag_filter,
1705            min_filter: desc.min_filter,
1706            mipmap_filter: desc.mipmap_filter,
1707            lod_min_clamp: desc.lod_min_clamp,
1708            lod_max_clamp: desc.lod_max_clamp,
1709            compare: desc.compare,
1710            anisotropy_clamp: desc.anisotropy_clamp,
1711            border_color: desc.border_color,
1712        };
1713
1714        let (id, error) = self
1715            .context
1716            .0
1717            .device_create_sampler(self.id, &descriptor, None);
1718        if let Some(cause) = error {
1719            self.context.handle_error(
1720                &self.error_sink,
1721                cause,
1722                desc.label,
1723                "Device::create_sampler",
1724            );
1725        }
1726        CoreSampler {
1727            context: self.context.clone(),
1728            id,
1729        }
1730        .into()
1731    }
1732
1733    fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1734        let (id, error) = self.context.0.device_create_query_set(
1735            self.id,
1736            &desc.map_label(|l| l.map(Borrowed)),
1737            None,
1738        );
1739        if let Some(cause) = error {
1740            self.context
1741                .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1742        }
1743        CoreQuerySet {
1744            context: self.context.clone(),
1745            id,
1746        }
1747        .into()
1748    }
1749
1750    fn create_command_encoder(
1751        &self,
1752        desc: &crate::CommandEncoderDescriptor<'_>,
1753    ) -> dispatch::DispatchCommandEncoder {
1754        let (id, error) = self.context.0.device_create_command_encoder(
1755            self.id,
1756            &desc.map_label(|l| l.map(Borrowed)),
1757            None,
1758        );
1759        if let Some(cause) = error {
1760            self.context.handle_error(
1761                &self.error_sink,
1762                cause,
1763                desc.label,
1764                "Device::create_command_encoder",
1765            );
1766        }
1767
1768        CoreCommandEncoder {
1769            context: self.context.clone(),
1770            id,
1771            error_sink: Arc::clone(&self.error_sink),
1772        }
1773        .into()
1774    }
1775
1776    fn create_render_bundle_encoder(
1777        &self,
1778        desc: &crate::RenderBundleEncoderDescriptor<'_>,
1779    ) -> dispatch::DispatchRenderBundleEncoder {
1780        let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1781            label: desc.label.map(Borrowed),
1782            color_formats: Borrowed(desc.color_formats),
1783            depth_stencil: desc.depth_stencil,
1784            sample_count: desc.sample_count,
1785            multiview: desc.multiview,
1786        };
1787        let encoder = match wgc::command::RenderBundleEncoder::new(&descriptor, self.id) {
1788            Ok(encoder) => encoder,
1789            Err(e) => panic!("Error in Device::create_render_bundle_encoder: {e}"),
1790        };
1791
1792        CoreRenderBundleEncoder {
1793            context: self.context.clone(),
1794            encoder,
1795            id: crate::cmp::Identifier::create(),
1796        }
1797        .into()
1798    }
1799
1800    fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1801        self.context
1802            .0
1803            .device_set_device_lost_closure(self.id, device_lost_callback);
1804    }
1805
1806    fn on_uncaptured_error(&self, handler: Arc<dyn crate::UncapturedErrorHandler>) {
1807        let mut error_sink = self.error_sink.lock();
1808        error_sink.uncaptured_handler = Some(handler);
1809    }
1810
1811    fn push_error_scope(&self, filter: crate::ErrorFilter) -> u32 {
1812        let mut error_sink = self.error_sink.lock();
1813        let thread_id = thread_id::ThreadId::current();
1814        let scopes = error_sink.scopes.entry(thread_id).or_default();
1815        let index = scopes
1816            .len()
1817            .try_into()
1818            .expect("Greater than 2^32 nested error scopes");
1819        scopes.push(ErrorScope {
1820            error: None,
1821            filter,
1822        });
1823        index
1824    }
1825
1826    fn pop_error_scope(&self, index: u32) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1827        let mut error_sink = self.error_sink.lock();
1828
1829        // We go out of our way to avoid panicking while unwinding, because that would abort the process,
1830        // and we are supposed to just drop the error scope on the floor.
1831        let is_panicking = crate::util::is_panicking();
1832        let thread_id = thread_id::ThreadId::current();
1833        let err = "Mismatched pop_error_scope call: no error scope for this thread. Error scopes are thread-local.";
1834        let scopes = match error_sink.scopes.get_mut(&thread_id) {
1835            Some(s) => s,
1836            None => {
1837                if !is_panicking {
1838                    panic!("{err}");
1839                } else {
1840                    return Box::pin(ready(None));
1841                }
1842            }
1843        };
1844        if scopes.is_empty() && !is_panicking {
1845            panic!("{err}");
1846        }
1847        if index as usize != scopes.len() - 1 && !is_panicking {
1848            panic!(
1849                "Mismatched pop_error_scope call: error scopes must be popped in reverse order."
1850            );
1851        }
1852
1853        // It would be more correct in this case to use `remove` here so that when unwinding is occurring
1854        // we would remove the correct error scope, but we don't have such a primitive on the web
1855        // and having consistent behavior here is more important. If you are unwinding and it unwinds
1856        // the guards in the wrong order, it's totally reasonable to have incorrect behavior.
1857        let scope = match scopes.pop() {
1858            Some(s) => s,
1859            None if !is_panicking => unreachable!(),
1860            None => return Box::pin(ready(None)),
1861        };
1862
1863        Box::pin(ready(scope.error))
1864    }
1865
1866    unsafe fn start_graphics_debugger_capture(&self) {
1867        unsafe {
1868            self.context
1869                .0
1870                .device_start_graphics_debugger_capture(self.id)
1871        };
1872    }
1873
1874    unsafe fn stop_graphics_debugger_capture(&self) {
1875        unsafe {
1876            self.context
1877                .0
1878                .device_stop_graphics_debugger_capture(self.id)
1879        };
1880    }
1881
1882    fn poll(&self, poll_type: wgt::PollType<u64>) -> Result<crate::PollStatus, crate::PollError> {
1883        match self.context.0.device_poll(self.id, poll_type) {
1884            Ok(status) => Ok(status),
1885            Err(err) => {
1886                if let Some(poll_error) = err.to_poll_error() {
1887                    return Err(poll_error);
1888                }
1889
1890                self.context.handle_error_fatal(err, "Device::poll")
1891            }
1892        }
1893    }
1894
1895    fn get_internal_counters(&self) -> crate::InternalCounters {
1896        self.context.0.device_get_internal_counters(self.id)
1897    }
1898
1899    fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1900        self.context.0.device_generate_allocator_report(self.id)
1901    }
1902
1903    fn destroy(&self) {
1904        self.context.0.device_destroy(self.id);
1905    }
1906}
1907
1908impl Drop for CoreDevice {
1909    fn drop(&mut self) {
1910        self.context.0.device_drop(self.id)
1911    }
1912}
1913
1914impl dispatch::QueueInterface for CoreQueue {
1915    fn write_buffer(
1916        &self,
1917        buffer: &dispatch::DispatchBuffer,
1918        offset: crate::BufferAddress,
1919        data: &[u8],
1920    ) {
1921        let buffer = buffer.as_core();
1922
1923        match self
1924            .context
1925            .0
1926            .queue_write_buffer(self.id, buffer.id, offset, data)
1927        {
1928            Ok(()) => (),
1929            Err(err) => {
1930                self.context
1931                    .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1932            }
1933        }
1934    }
1935
1936    fn create_staging_buffer(
1937        &self,
1938        size: crate::BufferSize,
1939    ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1940        match self
1941            .context
1942            .0
1943            .queue_create_staging_buffer(self.id, size, None)
1944        {
1945            Ok((buffer_id, ptr)) => Some(
1946                CoreQueueWriteBuffer {
1947                    buffer_id,
1948                    mapping: CoreBufferMappedRange {
1949                        ptr,
1950                        size: size.get() as usize,
1951                    },
1952                }
1953                .into(),
1954            ),
1955            Err(err) => {
1956                self.context.handle_error_nolabel(
1957                    &self.error_sink,
1958                    err,
1959                    "Queue::write_buffer_with",
1960                );
1961                None
1962            }
1963        }
1964    }
1965
1966    fn validate_write_buffer(
1967        &self,
1968        buffer: &dispatch::DispatchBuffer,
1969        offset: wgt::BufferAddress,
1970        size: wgt::BufferSize,
1971    ) -> Option<()> {
1972        let buffer = buffer.as_core();
1973
1974        match self
1975            .context
1976            .0
1977            .queue_validate_write_buffer(self.id, buffer.id, offset, size)
1978        {
1979            Ok(()) => Some(()),
1980            Err(err) => {
1981                self.context.handle_error_nolabel(
1982                    &self.error_sink,
1983                    err,
1984                    "Queue::write_buffer_with",
1985                );
1986                None
1987            }
1988        }
1989    }
1990
1991    fn write_staging_buffer(
1992        &self,
1993        buffer: &dispatch::DispatchBuffer,
1994        offset: crate::BufferAddress,
1995        staging_buffer: &dispatch::DispatchQueueWriteBuffer,
1996    ) {
1997        let buffer = buffer.as_core();
1998        let staging_buffer = staging_buffer.as_core();
1999
2000        match self.context.0.queue_write_staging_buffer(
2001            self.id,
2002            buffer.id,
2003            offset,
2004            staging_buffer.buffer_id,
2005        ) {
2006            Ok(()) => (),
2007            Err(err) => {
2008                self.context.handle_error_nolabel(
2009                    &self.error_sink,
2010                    err,
2011                    "Queue::write_buffer_with",
2012                );
2013            }
2014        }
2015    }
2016
2017    fn write_texture(
2018        &self,
2019        texture: crate::TexelCopyTextureInfo<'_>,
2020        data: &[u8],
2021        data_layout: crate::TexelCopyBufferLayout,
2022        size: crate::Extent3d,
2023    ) {
2024        match self.context.0.queue_write_texture(
2025            self.id,
2026            &map_texture_copy_view(texture),
2027            data,
2028            &data_layout,
2029            &size,
2030        ) {
2031            Ok(()) => (),
2032            Err(err) => {
2033                self.context
2034                    .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
2035            }
2036        }
2037    }
2038
2039    // This method needs to exist if either webgpu or webgl is enabled,
2040    // but we only actually have an implementation if webgl is enabled.
2041    #[cfg(web)]
2042    #[cfg_attr(not(webgl), expect(unused_variables))]
2043    fn copy_external_image_to_texture(
2044        &self,
2045        source: &crate::CopyExternalImageSourceInfo,
2046        dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
2047        size: crate::Extent3d,
2048    ) {
2049        #[cfg(webgl)]
2050        match self.context.0.queue_copy_external_image_to_texture(
2051            self.id,
2052            source,
2053            map_texture_tagged_copy_view(dest),
2054            size,
2055        ) {
2056            Ok(()) => (),
2057            Err(err) => self.context.handle_error_nolabel(
2058                &self.error_sink,
2059                err,
2060                "Queue::copy_external_image_to_texture",
2061            ),
2062        }
2063    }
2064
2065    fn submit(
2066        &self,
2067        command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
2068    ) -> u64 {
2069        let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
2070        let command_buffer_ids = temp_command_buffers
2071            .iter()
2072            .map(|cmdbuf| cmdbuf.as_core().id)
2073            .collect::<SmallVec<[_; 4]>>();
2074
2075        let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
2076            Ok(index) => index,
2077            Err((index, err)) => {
2078                self.context
2079                    .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
2080                index
2081            }
2082        };
2083
2084        drop(temp_command_buffers);
2085
2086        index
2087    }
2088
2089    fn get_timestamp_period(&self) -> f32 {
2090        self.context.0.queue_get_timestamp_period(self.id)
2091    }
2092
2093    fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
2094        self.context
2095            .0
2096            .queue_on_submitted_work_done(self.id, callback);
2097    }
2098
2099    fn compact_blas(&self, blas: &dispatch::DispatchBlas) -> (Option<u64>, dispatch::DispatchBlas) {
2100        let (id, handle, error) =
2101            self.context
2102                .0
2103                .queue_compact_blas(self.id, blas.as_core().id, None);
2104
2105        if let Some(cause) = error {
2106            self.context
2107                .handle_error_nolabel(&self.error_sink, cause, "Queue::compact_blas");
2108        }
2109        (
2110            handle,
2111            CoreBlas {
2112                context: self.context.clone(),
2113                id,
2114                error_sink: Arc::clone(&self.error_sink),
2115            }
2116            .into(),
2117        )
2118    }
2119}
2120
2121impl Drop for CoreQueue {
2122    fn drop(&mut self) {
2123        self.context.0.queue_drop(self.id)
2124    }
2125}
2126
2127impl dispatch::ShaderModuleInterface for CoreShaderModule {
2128    fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
2129        Box::pin(ready(self.compilation_info.clone()))
2130    }
2131}
2132
2133impl Drop for CoreShaderModule {
2134    fn drop(&mut self) {
2135        self.context.0.shader_module_drop(self.id)
2136    }
2137}
2138
2139impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
2140
2141impl Drop for CoreBindGroupLayout {
2142    fn drop(&mut self) {
2143        self.context.0.bind_group_layout_drop(self.id)
2144    }
2145}
2146
2147impl dispatch::BindGroupInterface for CoreBindGroup {}
2148
2149impl Drop for CoreBindGroup {
2150    fn drop(&mut self) {
2151        self.context.0.bind_group_drop(self.id)
2152    }
2153}
2154
2155impl dispatch::TextureViewInterface for CoreTextureView {}
2156
2157impl Drop for CoreTextureView {
2158    fn drop(&mut self) {
2159        self.context.0.texture_view_drop(self.id);
2160    }
2161}
2162
2163impl dispatch::ExternalTextureInterface for CoreExternalTexture {
2164    fn destroy(&self) {
2165        self.context.0.external_texture_destroy(self.id);
2166    }
2167}
2168
2169impl Drop for CoreExternalTexture {
2170    fn drop(&mut self) {
2171        self.context.0.external_texture_drop(self.id);
2172    }
2173}
2174
2175impl dispatch::SamplerInterface for CoreSampler {}
2176
2177impl Drop for CoreSampler {
2178    fn drop(&mut self) {
2179        self.context.0.sampler_drop(self.id)
2180    }
2181}
2182
2183impl dispatch::BufferInterface for CoreBuffer {
2184    fn map_async(
2185        &self,
2186        mode: crate::MapMode,
2187        range: Range<crate::BufferAddress>,
2188        callback: dispatch::BufferMapCallback,
2189    ) {
2190        let operation = wgc::resource::BufferMapOperation {
2191            host: match mode {
2192                MapMode::Read => wgc::device::HostMap::Read,
2193                MapMode::Write => wgc::device::HostMap::Write,
2194            },
2195            callback: Some(Box::new(|status| {
2196                let res = status.map_err(|_| crate::BufferAsyncError);
2197                callback(res);
2198            })),
2199        };
2200
2201        match self.context.0.buffer_map_async(
2202            self.id,
2203            range.start,
2204            Some(range.end - range.start),
2205            operation,
2206        ) {
2207            Ok(_) => (),
2208            Err(cause) => {
2209                self.context
2210                    .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
2211            }
2212        }
2213    }
2214
2215    fn get_mapped_range(
2216        &self,
2217        sub_range: Range<crate::BufferAddress>,
2218    ) -> dispatch::DispatchBufferMappedRange {
2219        let size = sub_range.end - sub_range.start;
2220        match self
2221            .context
2222            .0
2223            .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
2224        {
2225            Ok((ptr, size)) => CoreBufferMappedRange {
2226                ptr,
2227                size: size as usize,
2228            }
2229            .into(),
2230            Err(err) => self
2231                .context
2232                .handle_error_fatal(err, "Buffer::get_mapped_range"),
2233        }
2234    }
2235
2236    fn unmap(&self) {
2237        match self.context.0.buffer_unmap(self.id) {
2238            Ok(()) => (),
2239            Err(cause) => {
2240                self.context
2241                    .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
2242            }
2243        }
2244    }
2245
2246    fn destroy(&self) {
2247        self.context.0.buffer_destroy(self.id);
2248    }
2249}
2250
2251impl Drop for CoreBuffer {
2252    fn drop(&mut self) {
2253        self.context.0.buffer_drop(self.id)
2254    }
2255}
2256
2257impl dispatch::TextureInterface for CoreTexture {
2258    fn create_view(
2259        &self,
2260        desc: &crate::TextureViewDescriptor<'_>,
2261    ) -> dispatch::DispatchTextureView {
2262        let descriptor = wgc::resource::TextureViewDescriptor {
2263            label: desc.label.map(Borrowed),
2264            format: desc.format,
2265            dimension: desc.dimension,
2266            usage: desc.usage,
2267            range: wgt::ImageSubresourceRange {
2268                aspect: desc.aspect,
2269                base_mip_level: desc.base_mip_level,
2270                mip_level_count: desc.mip_level_count,
2271                base_array_layer: desc.base_array_layer,
2272                array_layer_count: desc.array_layer_count,
2273            },
2274        };
2275        let (id, error) = self
2276            .context
2277            .0
2278            .texture_create_view(self.id, &descriptor, None);
2279        if let Some(cause) = error {
2280            self.context
2281                .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
2282        }
2283        CoreTextureView {
2284            context: self.context.clone(),
2285            id,
2286        }
2287        .into()
2288    }
2289
2290    fn destroy(&self) {
2291        self.context.0.texture_destroy(self.id);
2292    }
2293}
2294
2295impl Drop for CoreTexture {
2296    fn drop(&mut self) {
2297        self.context.0.texture_drop(self.id)
2298    }
2299}
2300
2301impl dispatch::BlasInterface for CoreBlas {
2302    fn prepare_compact_async(&self, callback: BlasCompactCallback) {
2303        let callback: Option<wgc::resource::BlasCompactCallback> =
2304            Some(Box::new(|status: BlasPrepareCompactResult| {
2305                let res = status.map_err(|_| crate::BlasAsyncError);
2306                callback(res);
2307            }));
2308
2309        match self.context.0.blas_prepare_compact_async(self.id, callback) {
2310            Ok(_) => (),
2311            Err(cause) => self.context.handle_error_nolabel(
2312                &self.error_sink,
2313                cause,
2314                "Blas::prepare_compact_async",
2315            ),
2316        }
2317    }
2318
2319    fn ready_for_compaction(&self) -> bool {
2320        match self.context.0.ready_for_compaction(self.id) {
2321            Ok(ready) => ready,
2322            Err(cause) => {
2323                self.context.handle_error_nolabel(
2324                    &self.error_sink,
2325                    cause,
2326                    "Blas::ready_for_compaction",
2327                );
2328                // A BLAS is definitely not ready for compaction if it's not valid
2329                false
2330            }
2331        }
2332    }
2333}
2334
2335impl Drop for CoreBlas {
2336    fn drop(&mut self) {
2337        self.context.0.blas_drop(self.id)
2338    }
2339}
2340
2341impl dispatch::TlasInterface for CoreTlas {}
2342
2343impl Drop for CoreTlas {
2344    fn drop(&mut self) {
2345        self.context.0.tlas_drop(self.id)
2346    }
2347}
2348
2349impl dispatch::QuerySetInterface for CoreQuerySet {}
2350
2351impl Drop for CoreQuerySet {
2352    fn drop(&mut self) {
2353        self.context.0.query_set_drop(self.id)
2354    }
2355}
2356
2357impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2358
2359impl Drop for CorePipelineLayout {
2360    fn drop(&mut self) {
2361        self.context.0.pipeline_layout_drop(self.id)
2362    }
2363}
2364
2365impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2366    fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2367        let (id, error) = self
2368            .context
2369            .0
2370            .render_pipeline_get_bind_group_layout(self.id, index, None);
2371        if let Some(err) = error {
2372            self.context.handle_error_nolabel(
2373                &self.error_sink,
2374                err,
2375                "RenderPipeline::get_bind_group_layout",
2376            )
2377        }
2378        CoreBindGroupLayout {
2379            context: self.context.clone(),
2380            id,
2381        }
2382        .into()
2383    }
2384}
2385
2386impl Drop for CoreRenderPipeline {
2387    fn drop(&mut self) {
2388        self.context.0.render_pipeline_drop(self.id)
2389    }
2390}
2391
2392impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2393    fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2394        let (id, error) = self
2395            .context
2396            .0
2397            .compute_pipeline_get_bind_group_layout(self.id, index, None);
2398        if let Some(err) = error {
2399            self.context.handle_error_nolabel(
2400                &self.error_sink,
2401                err,
2402                "ComputePipeline::get_bind_group_layout",
2403            )
2404        }
2405        CoreBindGroupLayout {
2406            context: self.context.clone(),
2407            id,
2408        }
2409        .into()
2410    }
2411}
2412
2413impl Drop for CoreComputePipeline {
2414    fn drop(&mut self) {
2415        self.context.0.compute_pipeline_drop(self.id)
2416    }
2417}
2418
2419impl dispatch::PipelineCacheInterface for CorePipelineCache {
2420    fn get_data(&self) -> Option<Vec<u8>> {
2421        self.context.0.pipeline_cache_get_data(self.id)
2422    }
2423}
2424
2425impl Drop for CorePipelineCache {
2426    fn drop(&mut self) {
2427        self.context.0.pipeline_cache_drop(self.id)
2428    }
2429}
2430
2431impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2432    fn copy_buffer_to_buffer(
2433        &self,
2434        source: &dispatch::DispatchBuffer,
2435        source_offset: crate::BufferAddress,
2436        destination: &dispatch::DispatchBuffer,
2437        destination_offset: crate::BufferAddress,
2438        copy_size: Option<crate::BufferAddress>,
2439    ) {
2440        let source = source.as_core();
2441        let destination = destination.as_core();
2442
2443        if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2444            self.id,
2445            source.id,
2446            source_offset,
2447            destination.id,
2448            destination_offset,
2449            copy_size,
2450        ) {
2451            self.context.handle_error_nolabel(
2452                &self.error_sink,
2453                cause,
2454                "CommandEncoder::copy_buffer_to_buffer",
2455            );
2456        }
2457    }
2458
2459    fn copy_buffer_to_texture(
2460        &self,
2461        source: crate::TexelCopyBufferInfo<'_>,
2462        destination: crate::TexelCopyTextureInfo<'_>,
2463        copy_size: crate::Extent3d,
2464    ) {
2465        if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2466            self.id,
2467            &map_buffer_copy_view(source),
2468            &map_texture_copy_view(destination),
2469            &copy_size,
2470        ) {
2471            self.context.handle_error_nolabel(
2472                &self.error_sink,
2473                cause,
2474                "CommandEncoder::copy_buffer_to_texture",
2475            );
2476        }
2477    }
2478
2479    fn copy_texture_to_buffer(
2480        &self,
2481        source: crate::TexelCopyTextureInfo<'_>,
2482        destination: crate::TexelCopyBufferInfo<'_>,
2483        copy_size: crate::Extent3d,
2484    ) {
2485        if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2486            self.id,
2487            &map_texture_copy_view(source),
2488            &map_buffer_copy_view(destination),
2489            &copy_size,
2490        ) {
2491            self.context.handle_error_nolabel(
2492                &self.error_sink,
2493                cause,
2494                "CommandEncoder::copy_texture_to_buffer",
2495            );
2496        }
2497    }
2498
2499    fn copy_texture_to_texture(
2500        &self,
2501        source: crate::TexelCopyTextureInfo<'_>,
2502        destination: crate::TexelCopyTextureInfo<'_>,
2503        copy_size: crate::Extent3d,
2504    ) {
2505        if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2506            self.id,
2507            &map_texture_copy_view(source),
2508            &map_texture_copy_view(destination),
2509            &copy_size,
2510        ) {
2511            self.context.handle_error_nolabel(
2512                &self.error_sink,
2513                cause,
2514                "CommandEncoder::copy_texture_to_texture",
2515            );
2516        }
2517    }
2518
2519    fn begin_compute_pass(
2520        &self,
2521        desc: &crate::ComputePassDescriptor<'_>,
2522    ) -> dispatch::DispatchComputePass {
2523        let timestamp_writes =
2524            desc.timestamp_writes
2525                .as_ref()
2526                .map(|tw| wgc::command::PassTimestampWrites {
2527                    query_set: tw.query_set.inner.as_core().id,
2528                    beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2529                    end_of_pass_write_index: tw.end_of_pass_write_index,
2530                });
2531
2532        let (pass, err) = self.context.0.command_encoder_begin_compute_pass(
2533            self.id,
2534            &wgc::command::ComputePassDescriptor {
2535                label: desc.label.map(Borrowed),
2536                timestamp_writes,
2537            },
2538        );
2539
2540        if let Some(cause) = err {
2541            self.context.handle_error(
2542                &self.error_sink,
2543                cause,
2544                desc.label,
2545                "CommandEncoder::begin_compute_pass",
2546            );
2547        }
2548
2549        CoreComputePass {
2550            context: self.context.clone(),
2551            pass,
2552            error_sink: self.error_sink.clone(),
2553            id: crate::cmp::Identifier::create(),
2554        }
2555        .into()
2556    }
2557
2558    fn begin_render_pass(
2559        &self,
2560        desc: &crate::RenderPassDescriptor<'_>,
2561    ) -> dispatch::DispatchRenderPass {
2562        let colors = desc
2563            .color_attachments
2564            .iter()
2565            .map(|ca| {
2566                ca.as_ref()
2567                    .map(|at| wgc::command::RenderPassColorAttachment {
2568                        view: at.view.inner.as_core().id,
2569                        depth_slice: at.depth_slice,
2570                        resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2571                        load_op: at.ops.load,
2572                        store_op: at.ops.store,
2573                    })
2574            })
2575            .collect::<Vec<_>>();
2576
2577        let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2578            wgc::command::RenderPassDepthStencilAttachment {
2579                view: dsa.view.inner.as_core().id,
2580                depth: map_pass_channel(dsa.depth_ops.as_ref()),
2581                stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2582            }
2583        });
2584
2585        let timestamp_writes =
2586            desc.timestamp_writes
2587                .as_ref()
2588                .map(|tw| wgc::command::PassTimestampWrites {
2589                    query_set: tw.query_set.inner.as_core().id,
2590                    beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2591                    end_of_pass_write_index: tw.end_of_pass_write_index,
2592                });
2593
2594        let (pass, err) = self.context.0.command_encoder_begin_render_pass(
2595            self.id,
2596            &wgc::command::RenderPassDescriptor {
2597                label: desc.label.map(Borrowed),
2598                timestamp_writes: timestamp_writes.as_ref(),
2599                color_attachments: Borrowed(&colors),
2600                depth_stencil_attachment: depth_stencil.as_ref(),
2601                occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2602                multiview_mask: desc.multiview_mask,
2603            },
2604        );
2605
2606        if let Some(cause) = err {
2607            self.context.handle_error(
2608                &self.error_sink,
2609                cause,
2610                desc.label,
2611                "CommandEncoder::begin_render_pass",
2612            );
2613        }
2614
2615        CoreRenderPass {
2616            context: self.context.clone(),
2617            pass,
2618            error_sink: self.error_sink.clone(),
2619            id: crate::cmp::Identifier::create(),
2620        }
2621        .into()
2622    }
2623
2624    fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2625        let descriptor = wgt::CommandBufferDescriptor::default();
2626        let (id, opt_label_and_error) =
2627            self.context
2628                .0
2629                .command_encoder_finish(self.id, &descriptor, None);
2630        if let Some((label, cause)) = opt_label_and_error {
2631            self.context
2632                .handle_error(&self.error_sink, cause, Some(&label), "a CommandEncoder");
2633        }
2634        CoreCommandBuffer {
2635            context: self.context.clone(),
2636            id,
2637        }
2638        .into()
2639    }
2640
2641    fn clear_texture(
2642        &self,
2643        texture: &dispatch::DispatchTexture,
2644        subresource_range: &crate::ImageSubresourceRange,
2645    ) {
2646        let texture = texture.as_core();
2647
2648        if let Err(cause) =
2649            self.context
2650                .0
2651                .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2652        {
2653            self.context.handle_error_nolabel(
2654                &self.error_sink,
2655                cause,
2656                "CommandEncoder::clear_texture",
2657            );
2658        }
2659    }
2660
2661    fn clear_buffer(
2662        &self,
2663        buffer: &dispatch::DispatchBuffer,
2664        offset: crate::BufferAddress,
2665        size: Option<crate::BufferAddress>,
2666    ) {
2667        let buffer = buffer.as_core();
2668
2669        if let Err(cause) = self
2670            .context
2671            .0
2672            .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2673        {
2674            self.context.handle_error_nolabel(
2675                &self.error_sink,
2676                cause,
2677                "CommandEncoder::fill_buffer",
2678            );
2679        }
2680    }
2681
2682    fn insert_debug_marker(&self, label: &str) {
2683        if let Err(cause) = self
2684            .context
2685            .0
2686            .command_encoder_insert_debug_marker(self.id, label)
2687        {
2688            self.context.handle_error_nolabel(
2689                &self.error_sink,
2690                cause,
2691                "CommandEncoder::insert_debug_marker",
2692            );
2693        }
2694    }
2695
2696    fn push_debug_group(&self, label: &str) {
2697        if let Err(cause) = self
2698            .context
2699            .0
2700            .command_encoder_push_debug_group(self.id, label)
2701        {
2702            self.context.handle_error_nolabel(
2703                &self.error_sink,
2704                cause,
2705                "CommandEncoder::push_debug_group",
2706            );
2707        }
2708    }
2709
2710    fn pop_debug_group(&self) {
2711        if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2712            self.context.handle_error_nolabel(
2713                &self.error_sink,
2714                cause,
2715                "CommandEncoder::pop_debug_group",
2716            );
2717        }
2718    }
2719
2720    fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2721        let query_set = query_set.as_core();
2722
2723        if let Err(cause) =
2724            self.context
2725                .0
2726                .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2727        {
2728            self.context.handle_error_nolabel(
2729                &self.error_sink,
2730                cause,
2731                "CommandEncoder::write_timestamp",
2732            );
2733        }
2734    }
2735
2736    fn resolve_query_set(
2737        &self,
2738        query_set: &dispatch::DispatchQuerySet,
2739        first_query: u32,
2740        query_count: u32,
2741        destination: &dispatch::DispatchBuffer,
2742        destination_offset: crate::BufferAddress,
2743    ) {
2744        let query_set = query_set.as_core();
2745        let destination = destination.as_core();
2746
2747        if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2748            self.id,
2749            query_set.id,
2750            first_query,
2751            query_count,
2752            destination.id,
2753            destination_offset,
2754        ) {
2755            self.context.handle_error_nolabel(
2756                &self.error_sink,
2757                cause,
2758                "CommandEncoder::resolve_query_set",
2759            );
2760        }
2761    }
2762
2763    fn mark_acceleration_structures_built<'a>(
2764        &self,
2765        blas: &mut dyn Iterator<Item = &'a Blas>,
2766        tlas: &mut dyn Iterator<Item = &'a Tlas>,
2767    ) {
2768        let blas = blas
2769            .map(|b| b.inner.as_core().id)
2770            .collect::<SmallVec<[_; 4]>>();
2771        let tlas = tlas
2772            .map(|t| t.inner.as_core().id)
2773            .collect::<SmallVec<[_; 4]>>();
2774        if let Err(cause) = self
2775            .context
2776            .0
2777            .command_encoder_mark_acceleration_structures_built(self.id, &blas, &tlas)
2778        {
2779            self.context.handle_error_nolabel(
2780                &self.error_sink,
2781                cause,
2782                "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2783            );
2784        }
2785    }
2786
2787    fn build_acceleration_structures<'a>(
2788        &self,
2789        blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2790        tlas: &mut dyn Iterator<Item = &'a crate::Tlas>,
2791    ) {
2792        let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2793            let geometries = match e.geometry {
2794                crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2795                    let iter = triangle_geometries.iter().map(|tg| {
2796                        wgc::ray_tracing::BlasTriangleGeometry {
2797                            vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2798                            index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2799                            transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2800                            size: tg.size,
2801                            transform_buffer_offset: tg.transform_buffer_offset,
2802                            first_vertex: tg.first_vertex,
2803                            vertex_stride: tg.vertex_stride,
2804                            first_index: tg.first_index,
2805                        }
2806                    });
2807                    wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2808                }
2809            };
2810            wgc::ray_tracing::BlasBuildEntry {
2811                blas_id: e.blas.inner.as_core().id,
2812                geometries,
2813            }
2814        });
2815
2816        let tlas = tlas.into_iter().map(|e| {
2817            let instances = e
2818                .instances
2819                .iter()
2820                .map(|instance: &Option<crate::TlasInstance>| {
2821                    instance
2822                        .as_ref()
2823                        .map(|instance| wgc::ray_tracing::TlasInstance {
2824                            blas_id: instance.blas.as_core().id,
2825                            transform: &instance.transform,
2826                            custom_data: instance.custom_data,
2827                            mask: instance.mask,
2828                        })
2829                });
2830            wgc::ray_tracing::TlasPackage {
2831                tlas_id: e.inner.as_core().id,
2832                instances: Box::new(instances),
2833                lowest_unmodified: e.lowest_unmodified,
2834            }
2835        });
2836
2837        if let Err(cause) = self
2838            .context
2839            .0
2840            .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2841        {
2842            self.context.handle_error_nolabel(
2843                &self.error_sink,
2844                cause,
2845                "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2846            );
2847        }
2848    }
2849
2850    fn transition_resources<'a>(
2851        &mut self,
2852        buffer_transitions: &mut dyn Iterator<
2853            Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
2854        >,
2855        texture_transitions: &mut dyn Iterator<
2856            Item = wgt::TextureTransition<&'a dispatch::DispatchTexture>,
2857        >,
2858    ) {
2859        let result = self.context.0.command_encoder_transition_resources(
2860            self.id,
2861            buffer_transitions.map(|t| wgt::BufferTransition {
2862                buffer: t.buffer.as_core().id,
2863                state: t.state,
2864            }),
2865            texture_transitions.map(|t| wgt::TextureTransition {
2866                texture: t.texture.as_core().id,
2867                selector: t.selector.clone(),
2868                state: t.state,
2869            }),
2870        );
2871
2872        if let Err(cause) = result {
2873            self.context.handle_error_nolabel(
2874                &self.error_sink,
2875                cause,
2876                "CommandEncoder::transition_resources",
2877            );
2878        }
2879    }
2880}
2881
2882impl Drop for CoreCommandEncoder {
2883    fn drop(&mut self) {
2884        self.context.0.command_encoder_drop(self.id)
2885    }
2886}
2887
2888impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2889
2890impl Drop for CoreCommandBuffer {
2891    fn drop(&mut self) {
2892        self.context.0.command_buffer_drop(self.id)
2893    }
2894}
2895
2896impl dispatch::ComputePassInterface for CoreComputePass {
2897    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2898        let pipeline = pipeline.as_core();
2899
2900        if let Err(cause) = self
2901            .context
2902            .0
2903            .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2904        {
2905            self.context.handle_error(
2906                &self.error_sink,
2907                cause,
2908                self.pass.label(),
2909                "ComputePass::set_pipeline",
2910            );
2911        }
2912    }
2913
2914    fn set_bind_group(
2915        &mut self,
2916        index: u32,
2917        bind_group: Option<&dispatch::DispatchBindGroup>,
2918        offsets: &[crate::DynamicOffset],
2919    ) {
2920        let bg = bind_group.map(|bg| bg.as_core().id);
2921
2922        if let Err(cause) =
2923            self.context
2924                .0
2925                .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2926        {
2927            self.context.handle_error(
2928                &self.error_sink,
2929                cause,
2930                self.pass.label(),
2931                "ComputePass::set_bind_group",
2932            );
2933        }
2934    }
2935
2936    fn set_immediates(&mut self, offset: u32, data: &[u8]) {
2937        if let Err(cause) = self
2938            .context
2939            .0
2940            .compute_pass_set_immediates(&mut self.pass, offset, data)
2941        {
2942            self.context.handle_error(
2943                &self.error_sink,
2944                cause,
2945                self.pass.label(),
2946                "ComputePass::set_immediates",
2947            );
2948        }
2949    }
2950
2951    fn insert_debug_marker(&mut self, label: &str) {
2952        if let Err(cause) =
2953            self.context
2954                .0
2955                .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
2956        {
2957            self.context.handle_error(
2958                &self.error_sink,
2959                cause,
2960                self.pass.label(),
2961                "ComputePass::insert_debug_marker",
2962            );
2963        }
2964    }
2965
2966    fn push_debug_group(&mut self, group_label: &str) {
2967        if let Err(cause) =
2968            self.context
2969                .0
2970                .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
2971        {
2972            self.context.handle_error(
2973                &self.error_sink,
2974                cause,
2975                self.pass.label(),
2976                "ComputePass::push_debug_group",
2977            );
2978        }
2979    }
2980
2981    fn pop_debug_group(&mut self) {
2982        if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
2983            self.context.handle_error(
2984                &self.error_sink,
2985                cause,
2986                self.pass.label(),
2987                "ComputePass::pop_debug_group",
2988            );
2989        }
2990    }
2991
2992    fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2993        let query_set = query_set.as_core();
2994
2995        if let Err(cause) =
2996            self.context
2997                .0
2998                .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
2999        {
3000            self.context.handle_error(
3001                &self.error_sink,
3002                cause,
3003                self.pass.label(),
3004                "ComputePass::write_timestamp",
3005            );
3006        }
3007    }
3008
3009    fn begin_pipeline_statistics_query(
3010        &mut self,
3011        query_set: &dispatch::DispatchQuerySet,
3012        query_index: u32,
3013    ) {
3014        let query_set = query_set.as_core();
3015
3016        if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
3017            &mut self.pass,
3018            query_set.id,
3019            query_index,
3020        ) {
3021            self.context.handle_error(
3022                &self.error_sink,
3023                cause,
3024                self.pass.label(),
3025                "ComputePass::begin_pipeline_statistics_query",
3026            );
3027        }
3028    }
3029
3030    fn end_pipeline_statistics_query(&mut self) {
3031        if let Err(cause) = self
3032            .context
3033            .0
3034            .compute_pass_end_pipeline_statistics_query(&mut self.pass)
3035        {
3036            self.context.handle_error(
3037                &self.error_sink,
3038                cause,
3039                self.pass.label(),
3040                "ComputePass::end_pipeline_statistics_query",
3041            );
3042        }
3043    }
3044
3045    fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
3046        if let Err(cause) = self
3047            .context
3048            .0
3049            .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
3050        {
3051            self.context.handle_error(
3052                &self.error_sink,
3053                cause,
3054                self.pass.label(),
3055                "ComputePass::dispatch_workgroups",
3056            );
3057        }
3058    }
3059
3060    fn dispatch_workgroups_indirect(
3061        &mut self,
3062        indirect_buffer: &dispatch::DispatchBuffer,
3063        indirect_offset: crate::BufferAddress,
3064    ) {
3065        let indirect_buffer = indirect_buffer.as_core();
3066
3067        if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
3068            &mut self.pass,
3069            indirect_buffer.id,
3070            indirect_offset,
3071        ) {
3072            self.context.handle_error(
3073                &self.error_sink,
3074                cause,
3075                self.pass.label(),
3076                "ComputePass::dispatch_workgroups_indirect",
3077            );
3078        }
3079    }
3080}
3081
3082impl Drop for CoreComputePass {
3083    fn drop(&mut self) {
3084        if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
3085            self.context.handle_error(
3086                &self.error_sink,
3087                cause,
3088                self.pass.label(),
3089                "ComputePass::end",
3090            );
3091        }
3092    }
3093}
3094
3095impl dispatch::RenderPassInterface for CoreRenderPass {
3096    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3097        let pipeline = pipeline.as_core();
3098
3099        if let Err(cause) = self
3100            .context
3101            .0
3102            .render_pass_set_pipeline(&mut self.pass, pipeline.id)
3103        {
3104            self.context.handle_error(
3105                &self.error_sink,
3106                cause,
3107                self.pass.label(),
3108                "RenderPass::set_pipeline",
3109            );
3110        }
3111    }
3112
3113    fn set_bind_group(
3114        &mut self,
3115        index: u32,
3116        bind_group: Option<&dispatch::DispatchBindGroup>,
3117        offsets: &[crate::DynamicOffset],
3118    ) {
3119        let bg = bind_group.map(|bg| bg.as_core().id);
3120
3121        if let Err(cause) =
3122            self.context
3123                .0
3124                .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
3125        {
3126            self.context.handle_error(
3127                &self.error_sink,
3128                cause,
3129                self.pass.label(),
3130                "RenderPass::set_bind_group",
3131            );
3132        }
3133    }
3134
3135    fn set_index_buffer(
3136        &mut self,
3137        buffer: &dispatch::DispatchBuffer,
3138        index_format: crate::IndexFormat,
3139        offset: crate::BufferAddress,
3140        size: Option<crate::BufferSize>,
3141    ) {
3142        let buffer = buffer.as_core();
3143
3144        if let Err(cause) = self.context.0.render_pass_set_index_buffer(
3145            &mut self.pass,
3146            buffer.id,
3147            index_format,
3148            offset,
3149            size,
3150        ) {
3151            self.context.handle_error(
3152                &self.error_sink,
3153                cause,
3154                self.pass.label(),
3155                "RenderPass::set_index_buffer",
3156            );
3157        }
3158    }
3159
3160    fn set_vertex_buffer(
3161        &mut self,
3162        slot: u32,
3163        buffer: &dispatch::DispatchBuffer,
3164        offset: crate::BufferAddress,
3165        size: Option<crate::BufferSize>,
3166    ) {
3167        let buffer = buffer.as_core();
3168
3169        if let Err(cause) = self.context.0.render_pass_set_vertex_buffer(
3170            &mut self.pass,
3171            slot,
3172            buffer.id,
3173            offset,
3174            size,
3175        ) {
3176            self.context.handle_error(
3177                &self.error_sink,
3178                cause,
3179                self.pass.label(),
3180                "RenderPass::set_vertex_buffer",
3181            );
3182        }
3183    }
3184
3185    fn set_immediates(&mut self, offset: u32, data: &[u8]) {
3186        if let Err(cause) = self
3187            .context
3188            .0
3189            .render_pass_set_immediates(&mut self.pass, offset, data)
3190        {
3191            self.context.handle_error(
3192                &self.error_sink,
3193                cause,
3194                self.pass.label(),
3195                "RenderPass::set_immediates",
3196            );
3197        }
3198    }
3199
3200    fn set_blend_constant(&mut self, color: crate::Color) {
3201        if let Err(cause) = self
3202            .context
3203            .0
3204            .render_pass_set_blend_constant(&mut self.pass, color)
3205        {
3206            self.context.handle_error(
3207                &self.error_sink,
3208                cause,
3209                self.pass.label(),
3210                "RenderPass::set_blend_constant",
3211            );
3212        }
3213    }
3214
3215    fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
3216        if let Err(cause) =
3217            self.context
3218                .0
3219                .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
3220        {
3221            self.context.handle_error(
3222                &self.error_sink,
3223                cause,
3224                self.pass.label(),
3225                "RenderPass::set_scissor_rect",
3226            );
3227        }
3228    }
3229
3230    fn set_viewport(
3231        &mut self,
3232        x: f32,
3233        y: f32,
3234        width: f32,
3235        height: f32,
3236        min_depth: f32,
3237        max_depth: f32,
3238    ) {
3239        if let Err(cause) = self.context.0.render_pass_set_viewport(
3240            &mut self.pass,
3241            x,
3242            y,
3243            width,
3244            height,
3245            min_depth,
3246            max_depth,
3247        ) {
3248            self.context.handle_error(
3249                &self.error_sink,
3250                cause,
3251                self.pass.label(),
3252                "RenderPass::set_viewport",
3253            );
3254        }
3255    }
3256
3257    fn set_stencil_reference(&mut self, reference: u32) {
3258        if let Err(cause) = self
3259            .context
3260            .0
3261            .render_pass_set_stencil_reference(&mut self.pass, reference)
3262        {
3263            self.context.handle_error(
3264                &self.error_sink,
3265                cause,
3266                self.pass.label(),
3267                "RenderPass::set_stencil_reference",
3268            );
3269        }
3270    }
3271
3272    fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3273        if let Err(cause) = self.context.0.render_pass_draw(
3274            &mut self.pass,
3275            vertices.end - vertices.start,
3276            instances.end - instances.start,
3277            vertices.start,
3278            instances.start,
3279        ) {
3280            self.context.handle_error(
3281                &self.error_sink,
3282                cause,
3283                self.pass.label(),
3284                "RenderPass::draw",
3285            );
3286        }
3287    }
3288
3289    fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3290        if let Err(cause) = self.context.0.render_pass_draw_indexed(
3291            &mut self.pass,
3292            indices.end - indices.start,
3293            instances.end - instances.start,
3294            indices.start,
3295            base_vertex,
3296            instances.start,
3297        ) {
3298            self.context.handle_error(
3299                &self.error_sink,
3300                cause,
3301                self.pass.label(),
3302                "RenderPass::draw_indexed",
3303            );
3304        }
3305    }
3306
3307    fn draw_mesh_tasks(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
3308        if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks(
3309            &mut self.pass,
3310            group_count_x,
3311            group_count_y,
3312            group_count_z,
3313        ) {
3314            self.context.handle_error(
3315                &self.error_sink,
3316                cause,
3317                self.pass.label(),
3318                "RenderPass::draw_mesh_tasks",
3319            );
3320        }
3321    }
3322
3323    fn draw_indirect(
3324        &mut self,
3325        indirect_buffer: &dispatch::DispatchBuffer,
3326        indirect_offset: crate::BufferAddress,
3327    ) {
3328        let indirect_buffer = indirect_buffer.as_core();
3329
3330        if let Err(cause) = self.context.0.render_pass_draw_indirect(
3331            &mut self.pass,
3332            indirect_buffer.id,
3333            indirect_offset,
3334        ) {
3335            self.context.handle_error(
3336                &self.error_sink,
3337                cause,
3338                self.pass.label(),
3339                "RenderPass::draw_indirect",
3340            );
3341        }
3342    }
3343
3344    fn draw_indexed_indirect(
3345        &mut self,
3346        indirect_buffer: &dispatch::DispatchBuffer,
3347        indirect_offset: crate::BufferAddress,
3348    ) {
3349        let indirect_buffer = indirect_buffer.as_core();
3350
3351        if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3352            &mut self.pass,
3353            indirect_buffer.id,
3354            indirect_offset,
3355        ) {
3356            self.context.handle_error(
3357                &self.error_sink,
3358                cause,
3359                self.pass.label(),
3360                "RenderPass::draw_indexed_indirect",
3361            );
3362        }
3363    }
3364
3365    fn draw_mesh_tasks_indirect(
3366        &mut self,
3367        indirect_buffer: &dispatch::DispatchBuffer,
3368        indirect_offset: crate::BufferAddress,
3369    ) {
3370        let indirect_buffer = indirect_buffer.as_core();
3371
3372        if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks_indirect(
3373            &mut self.pass,
3374            indirect_buffer.id,
3375            indirect_offset,
3376        ) {
3377            self.context.handle_error(
3378                &self.error_sink,
3379                cause,
3380                self.pass.label(),
3381                "RenderPass::draw_mesh_tasks_indirect",
3382            );
3383        }
3384    }
3385
3386    fn multi_draw_indirect(
3387        &mut self,
3388        indirect_buffer: &dispatch::DispatchBuffer,
3389        indirect_offset: crate::BufferAddress,
3390        count: u32,
3391    ) {
3392        let indirect_buffer = indirect_buffer.as_core();
3393
3394        if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3395            &mut self.pass,
3396            indirect_buffer.id,
3397            indirect_offset,
3398            count,
3399        ) {
3400            self.context.handle_error(
3401                &self.error_sink,
3402                cause,
3403                self.pass.label(),
3404                "RenderPass::multi_draw_indirect",
3405            );
3406        }
3407    }
3408
3409    fn multi_draw_indexed_indirect(
3410        &mut self,
3411        indirect_buffer: &dispatch::DispatchBuffer,
3412        indirect_offset: crate::BufferAddress,
3413        count: u32,
3414    ) {
3415        let indirect_buffer = indirect_buffer.as_core();
3416
3417        if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3418            &mut self.pass,
3419            indirect_buffer.id,
3420            indirect_offset,
3421            count,
3422        ) {
3423            self.context.handle_error(
3424                &self.error_sink,
3425                cause,
3426                self.pass.label(),
3427                "RenderPass::multi_draw_indexed_indirect",
3428            );
3429        }
3430    }
3431
3432    fn multi_draw_mesh_tasks_indirect(
3433        &mut self,
3434        indirect_buffer: &dispatch::DispatchBuffer,
3435        indirect_offset: crate::BufferAddress,
3436        count: u32,
3437    ) {
3438        let indirect_buffer = indirect_buffer.as_core();
3439
3440        if let Err(cause) = self.context.0.render_pass_multi_draw_mesh_tasks_indirect(
3441            &mut self.pass,
3442            indirect_buffer.id,
3443            indirect_offset,
3444            count,
3445        ) {
3446            self.context.handle_error(
3447                &self.error_sink,
3448                cause,
3449                self.pass.label(),
3450                "RenderPass::multi_draw_mesh_tasks_indirect",
3451            );
3452        }
3453    }
3454
3455    fn multi_draw_indirect_count(
3456        &mut self,
3457        indirect_buffer: &dispatch::DispatchBuffer,
3458        indirect_offset: crate::BufferAddress,
3459        count_buffer: &dispatch::DispatchBuffer,
3460        count_buffer_offset: crate::BufferAddress,
3461        max_count: u32,
3462    ) {
3463        let indirect_buffer = indirect_buffer.as_core();
3464        let count_buffer = count_buffer.as_core();
3465
3466        if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3467            &mut self.pass,
3468            indirect_buffer.id,
3469            indirect_offset,
3470            count_buffer.id,
3471            count_buffer_offset,
3472            max_count,
3473        ) {
3474            self.context.handle_error(
3475                &self.error_sink,
3476                cause,
3477                self.pass.label(),
3478                "RenderPass::multi_draw_indirect_count",
3479            );
3480        }
3481    }
3482
3483    fn multi_draw_indexed_indirect_count(
3484        &mut self,
3485        indirect_buffer: &dispatch::DispatchBuffer,
3486        indirect_offset: crate::BufferAddress,
3487        count_buffer: &dispatch::DispatchBuffer,
3488        count_buffer_offset: crate::BufferAddress,
3489        max_count: u32,
3490    ) {
3491        let indirect_buffer = indirect_buffer.as_core();
3492        let count_buffer = count_buffer.as_core();
3493
3494        if let Err(cause) = self
3495            .context
3496            .0
3497            .render_pass_multi_draw_indexed_indirect_count(
3498                &mut self.pass,
3499                indirect_buffer.id,
3500                indirect_offset,
3501                count_buffer.id,
3502                count_buffer_offset,
3503                max_count,
3504            )
3505        {
3506            self.context.handle_error(
3507                &self.error_sink,
3508                cause,
3509                self.pass.label(),
3510                "RenderPass::multi_draw_indexed_indirect_count",
3511            );
3512        }
3513    }
3514
3515    fn multi_draw_mesh_tasks_indirect_count(
3516        &mut self,
3517        indirect_buffer: &dispatch::DispatchBuffer,
3518        indirect_offset: crate::BufferAddress,
3519        count_buffer: &dispatch::DispatchBuffer,
3520        count_buffer_offset: crate::BufferAddress,
3521        max_count: u32,
3522    ) {
3523        let indirect_buffer = indirect_buffer.as_core();
3524        let count_buffer = count_buffer.as_core();
3525
3526        if let Err(cause) = self
3527            .context
3528            .0
3529            .render_pass_multi_draw_mesh_tasks_indirect_count(
3530                &mut self.pass,
3531                indirect_buffer.id,
3532                indirect_offset,
3533                count_buffer.id,
3534                count_buffer_offset,
3535                max_count,
3536            )
3537        {
3538            self.context.handle_error(
3539                &self.error_sink,
3540                cause,
3541                self.pass.label(),
3542                "RenderPass::multi_draw_mesh_tasks_indirect_count",
3543            );
3544        }
3545    }
3546
3547    fn insert_debug_marker(&mut self, label: &str) {
3548        if let Err(cause) = self
3549            .context
3550            .0
3551            .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3552        {
3553            self.context.handle_error(
3554                &self.error_sink,
3555                cause,
3556                self.pass.label(),
3557                "RenderPass::insert_debug_marker",
3558            );
3559        }
3560    }
3561
3562    fn push_debug_group(&mut self, group_label: &str) {
3563        if let Err(cause) =
3564            self.context
3565                .0
3566                .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3567        {
3568            self.context.handle_error(
3569                &self.error_sink,
3570                cause,
3571                self.pass.label(),
3572                "RenderPass::push_debug_group",
3573            );
3574        }
3575    }
3576
3577    fn pop_debug_group(&mut self) {
3578        if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3579            self.context.handle_error(
3580                &self.error_sink,
3581                cause,
3582                self.pass.label(),
3583                "RenderPass::pop_debug_group",
3584            );
3585        }
3586    }
3587
3588    fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3589        let query_set = query_set.as_core();
3590
3591        if let Err(cause) =
3592            self.context
3593                .0
3594                .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3595        {
3596            self.context.handle_error(
3597                &self.error_sink,
3598                cause,
3599                self.pass.label(),
3600                "RenderPass::write_timestamp",
3601            );
3602        }
3603    }
3604
3605    fn begin_occlusion_query(&mut self, query_index: u32) {
3606        if let Err(cause) = self
3607            .context
3608            .0
3609            .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3610        {
3611            self.context.handle_error(
3612                &self.error_sink,
3613                cause,
3614                self.pass.label(),
3615                "RenderPass::begin_occlusion_query",
3616            );
3617        }
3618    }
3619
3620    fn end_occlusion_query(&mut self) {
3621        if let Err(cause) = self
3622            .context
3623            .0
3624            .render_pass_end_occlusion_query(&mut self.pass)
3625        {
3626            self.context.handle_error(
3627                &self.error_sink,
3628                cause,
3629                self.pass.label(),
3630                "RenderPass::end_occlusion_query",
3631            );
3632        }
3633    }
3634
3635    fn begin_pipeline_statistics_query(
3636        &mut self,
3637        query_set: &dispatch::DispatchQuerySet,
3638        query_index: u32,
3639    ) {
3640        let query_set = query_set.as_core();
3641
3642        if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3643            &mut self.pass,
3644            query_set.id,
3645            query_index,
3646        ) {
3647            self.context.handle_error(
3648                &self.error_sink,
3649                cause,
3650                self.pass.label(),
3651                "RenderPass::begin_pipeline_statistics_query",
3652            );
3653        }
3654    }
3655
3656    fn end_pipeline_statistics_query(&mut self) {
3657        if let Err(cause) = self
3658            .context
3659            .0
3660            .render_pass_end_pipeline_statistics_query(&mut self.pass)
3661        {
3662            self.context.handle_error(
3663                &self.error_sink,
3664                cause,
3665                self.pass.label(),
3666                "RenderPass::end_pipeline_statistics_query",
3667            );
3668        }
3669    }
3670
3671    fn execute_bundles(
3672        &mut self,
3673        render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3674    ) {
3675        let temp_render_bundles = render_bundles
3676            .map(|rb| rb.as_core().id)
3677            .collect::<SmallVec<[_; 4]>>();
3678        if let Err(cause) = self
3679            .context
3680            .0
3681            .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3682        {
3683            self.context.handle_error(
3684                &self.error_sink,
3685                cause,
3686                self.pass.label(),
3687                "RenderPass::execute_bundles",
3688            );
3689        }
3690    }
3691}
3692
3693impl Drop for CoreRenderPass {
3694    fn drop(&mut self) {
3695        if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3696            self.context.handle_error(
3697                &self.error_sink,
3698                cause,
3699                self.pass.label(),
3700                "RenderPass::end",
3701            );
3702        }
3703    }
3704}
3705
3706impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3707    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3708        let pipeline = pipeline.as_core();
3709
3710        wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3711    }
3712
3713    fn set_bind_group(
3714        &mut self,
3715        index: u32,
3716        bind_group: Option<&dispatch::DispatchBindGroup>,
3717        offsets: &[crate::DynamicOffset],
3718    ) {
3719        let bg = bind_group.map(|bg| bg.as_core().id);
3720
3721        unsafe {
3722            wgpu_render_bundle_set_bind_group(
3723                &mut self.encoder,
3724                index,
3725                bg,
3726                offsets.as_ptr(),
3727                offsets.len(),
3728            )
3729        }
3730    }
3731
3732    fn set_index_buffer(
3733        &mut self,
3734        buffer: &dispatch::DispatchBuffer,
3735        index_format: crate::IndexFormat,
3736        offset: crate::BufferAddress,
3737        size: Option<crate::BufferSize>,
3738    ) {
3739        let buffer = buffer.as_core();
3740
3741        self.encoder
3742            .set_index_buffer(buffer.id, index_format, offset, size)
3743    }
3744
3745    fn set_vertex_buffer(
3746        &mut self,
3747        slot: u32,
3748        buffer: &dispatch::DispatchBuffer,
3749        offset: crate::BufferAddress,
3750        size: Option<crate::BufferSize>,
3751    ) {
3752        let buffer = buffer.as_core();
3753
3754        wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer.id, offset, size)
3755    }
3756
3757    fn set_immediates(&mut self, offset: u32, data: &[u8]) {
3758        unsafe {
3759            wgpu_render_bundle_set_immediates(
3760                &mut self.encoder,
3761                offset,
3762                data.len().try_into().unwrap(),
3763                data.as_ptr(),
3764            )
3765        }
3766    }
3767
3768    fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3769        wgpu_render_bundle_draw(
3770            &mut self.encoder,
3771            vertices.end - vertices.start,
3772            instances.end - instances.start,
3773            vertices.start,
3774            instances.start,
3775        )
3776    }
3777
3778    fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3779        wgpu_render_bundle_draw_indexed(
3780            &mut self.encoder,
3781            indices.end - indices.start,
3782            instances.end - instances.start,
3783            indices.start,
3784            base_vertex,
3785            instances.start,
3786        )
3787    }
3788
3789    fn draw_indirect(
3790        &mut self,
3791        indirect_buffer: &dispatch::DispatchBuffer,
3792        indirect_offset: crate::BufferAddress,
3793    ) {
3794        let indirect_buffer = indirect_buffer.as_core();
3795
3796        wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3797    }
3798
3799    fn draw_indexed_indirect(
3800        &mut self,
3801        indirect_buffer: &dispatch::DispatchBuffer,
3802        indirect_offset: crate::BufferAddress,
3803    ) {
3804        let indirect_buffer = indirect_buffer.as_core();
3805
3806        wgpu_render_bundle_draw_indexed_indirect(
3807            &mut self.encoder,
3808            indirect_buffer.id,
3809            indirect_offset,
3810        )
3811    }
3812
3813    fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3814    where
3815        Self: Sized,
3816    {
3817        let (id, error) = self.context.0.render_bundle_encoder_finish(
3818            self.encoder,
3819            &desc.map_label(|l| l.map(Borrowed)),
3820            None,
3821        );
3822        if let Some(err) = error {
3823            self.context
3824                .handle_error_fatal(err, "RenderBundleEncoder::finish");
3825        }
3826        CoreRenderBundle {
3827            context: self.context.clone(),
3828            id,
3829        }
3830        .into()
3831    }
3832}
3833
3834impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3835
3836impl Drop for CoreRenderBundle {
3837    fn drop(&mut self) {
3838        self.context.0.render_bundle_drop(self.id)
3839    }
3840}
3841
3842impl dispatch::SurfaceInterface for CoreSurface {
3843    fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3844        let adapter = adapter.as_core();
3845
3846        self.context
3847            .0
3848            .surface_get_capabilities(self.id, adapter.id)
3849            .unwrap_or_default()
3850    }
3851
3852    fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3853        let device = device.as_core();
3854
3855        let error = self.context.0.surface_configure(self.id, device.id, config);
3856        if let Some(e) = error {
3857            self.context
3858                .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3859        } else {
3860            *self.configured_device.lock() = Some(device.id);
3861            *self.error_sink.lock() = Some(device.error_sink.clone());
3862        }
3863    }
3864
3865    fn get_current_texture(
3866        &self,
3867    ) -> (
3868        Option<dispatch::DispatchTexture>,
3869        crate::SurfaceStatus,
3870        dispatch::DispatchSurfaceOutputDetail,
3871    ) {
3872        let error_sink = if let Some(error_sink) = self.error_sink.lock().as_ref() {
3873            error_sink.clone()
3874        } else {
3875            Arc::new(Mutex::new(ErrorSinkRaw::new()))
3876        };
3877
3878        let output_detail = CoreSurfaceOutputDetail {
3879            context: self.context.clone(),
3880            surface_id: self.id,
3881            error_sink: error_sink.clone(),
3882        }
3883        .into();
3884
3885        match self.context.0.surface_get_current_texture(self.id, None) {
3886            Ok(wgc::present::SurfaceOutput {
3887                status,
3888                texture: texture_id,
3889            }) => {
3890                let data = texture_id
3891                    .map(|id| CoreTexture {
3892                        context: self.context.clone(),
3893                        id,
3894                        error_sink,
3895                    })
3896                    .map(Into::into);
3897
3898                (data, status, output_detail)
3899            }
3900            Err(err) => {
3901                let error_sink = self.error_sink.lock();
3902                match error_sink.as_ref() {
3903                    Some(error_sink) => {
3904                        self.context.handle_error_nolabel(
3905                            error_sink,
3906                            err,
3907                            "Surface::get_current_texture_view",
3908                        );
3909                        (None, crate::SurfaceStatus::Unknown, output_detail)
3910                    }
3911                    None => self
3912                        .context
3913                        .handle_error_fatal(err, "Surface::get_current_texture_view"),
3914                }
3915            }
3916        }
3917    }
3918}
3919
3920impl Drop for CoreSurface {
3921    fn drop(&mut self) {
3922        self.context.0.surface_drop(self.id)
3923    }
3924}
3925
3926impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
3927    fn present(&self) {
3928        match self.context.0.surface_present(self.surface_id) {
3929            Ok(_status) => (),
3930            Err(err) => {
3931                self.context
3932                    .handle_error_nolabel(&self.error_sink, err, "Surface::present");
3933            }
3934        }
3935    }
3936
3937    fn texture_discard(&self) {
3938        match self.context.0.surface_texture_discard(self.surface_id) {
3939            Ok(_status) => (),
3940            Err(err) => self
3941                .context
3942                .handle_error_fatal(err, "Surface::discard_texture"),
3943        }
3944    }
3945}
3946impl Drop for CoreSurfaceOutputDetail {
3947    fn drop(&mut self) {
3948        // Discard gets called by the api struct
3949
3950        // no-op
3951    }
3952}
3953
3954impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
3955    fn slice(&self) -> &[u8] {
3956        panic!()
3957    }
3958
3959    #[inline]
3960    fn slice_mut(&mut self) -> &mut [u8] {
3961        self.mapping.slice_mut()
3962    }
3963}
3964impl Drop for CoreQueueWriteBuffer {
3965    fn drop(&mut self) {
3966        // The api struct calls queue.write_staging_buffer
3967
3968        // no-op
3969    }
3970}
3971
3972impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
3973    #[inline]
3974    fn slice(&self) -> &[u8] {
3975        unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
3976    }
3977
3978    #[inline]
3979    fn slice_mut(&mut self) -> &mut [u8] {
3980        unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.size) }
3981    }
3982
3983    #[cfg(webgpu)]
3984    fn as_uint8array(&self) -> &js_sys::Uint8Array {
3985        panic!("Only available on WebGPU")
3986    }
3987}