wgpu/backend/
wgpu_core.rs

1use alloc::{
2    borrow::Cow::{self, Borrowed},
3    boxed::Box,
4    format,
5    string::{String, ToString as _},
6    sync::Arc,
7    vec,
8    vec::Vec,
9};
10use core::{
11    error::Error,
12    fmt,
13    future::ready,
14    ops::{Deref, Range},
15    pin::Pin,
16    ptr::NonNull,
17    slice,
18};
19use hashbrown::HashMap;
20
21use arrayvec::ArrayVec;
22use smallvec::SmallVec;
23use wgc::{
24    command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError,
25    resource::BlasPrepareCompactResult,
26};
27use wgt::{
28    error::{ErrorType, WebGpuError},
29    WasmNotSendSync,
30};
31
32use crate::{
33    api,
34    dispatch::{self, BlasCompactCallback, BufferMappedRangeInterface},
35    BindingResource, Blas, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
36    CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
37    ShaderSource, SurfaceTargetUnsafe, TextureDescriptor, Tlas,
38};
39use crate::{dispatch::DispatchAdapter, util::Mutex};
40
41mod thread_id;
42
43#[derive(Clone)]
44pub struct ContextWgpuCore(Arc<wgc::global::Global>);
45
46impl Drop for ContextWgpuCore {
47    fn drop(&mut self) {
48        //nothing
49    }
50}
51
52impl fmt::Debug for ContextWgpuCore {
53    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
54        f.debug_struct("ContextWgpuCore")
55            .field("type", &"Native")
56            .finish()
57    }
58}
59
60impl ContextWgpuCore {
61    pub unsafe fn from_hal_instance<A: hal::Api>(hal_instance: A::Instance) -> Self {
62        Self(unsafe {
63            Arc::new(wgc::global::Global::from_hal_instance::<A>(
64                "wgpu",
65                hal_instance,
66            ))
67        })
68    }
69
70    /// # Safety
71    ///
72    /// - The raw instance handle returned must not be manually destroyed.
73    pub unsafe fn instance_as_hal<A: hal::Api>(&self) -> Option<&A::Instance> {
74        unsafe { self.0.instance_as_hal::<A>() }
75    }
76
77    pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
78        Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
79    }
80
81    #[cfg(wgpu_core)]
82    pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
83        self.0.enumerate_adapters(backends)
84    }
85
86    pub unsafe fn create_adapter_from_hal<A: hal::Api>(
87        &self,
88        hal_adapter: hal::ExposedAdapter<A>,
89    ) -> wgc::id::AdapterId {
90        unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
91    }
92
93    pub unsafe fn adapter_as_hal<A: hal::Api>(
94        &self,
95        adapter: &CoreAdapter,
96    ) -> Option<impl Deref<Target = A::Adapter> + WasmNotSendSync> {
97        unsafe { self.0.adapter_as_hal::<A>(adapter.id) }
98    }
99
100    pub unsafe fn buffer_as_hal<A: hal::Api>(
101        &self,
102        buffer: &CoreBuffer,
103    ) -> Option<impl Deref<Target = A::Buffer>> {
104        unsafe { self.0.buffer_as_hal::<A>(buffer.id) }
105    }
106
107    pub unsafe fn create_device_from_hal<A: hal::Api>(
108        &self,
109        adapter: &CoreAdapter,
110        hal_device: hal::OpenDevice<A>,
111        desc: &crate::DeviceDescriptor<'_>,
112    ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
113        let (device_id, queue_id) = unsafe {
114            self.0.create_device_from_hal(
115                adapter.id,
116                hal_device.into(),
117                &desc.map_label(|l| l.map(Borrowed)),
118                None,
119                None,
120            )
121        }?;
122        let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
123        let device = CoreDevice {
124            context: self.clone(),
125            id: device_id,
126            error_sink: error_sink.clone(),
127            features: desc.required_features,
128        };
129        let queue = CoreQueue {
130            context: self.clone(),
131            id: queue_id,
132            error_sink,
133        };
134        Ok((device, queue))
135    }
136
137    pub unsafe fn create_texture_from_hal<A: hal::Api>(
138        &self,
139        hal_texture: A::Texture,
140        device: &CoreDevice,
141        desc: &TextureDescriptor<'_>,
142    ) -> CoreTexture {
143        let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
144        let (id, error) = unsafe {
145            self.0
146                .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
147        };
148        if let Some(cause) = error {
149            self.handle_error(
150                &device.error_sink,
151                cause,
152                desc.label,
153                "Device::create_texture_from_hal",
154            );
155        }
156        CoreTexture {
157            context: self.clone(),
158            id,
159            error_sink: Arc::clone(&device.error_sink),
160        }
161    }
162
163    /// # Safety
164    ///
165    /// - `hal_buffer` must be created from `device`.
166    /// - `hal_buffer` must be created respecting `desc`
167    /// - `hal_buffer` must be initialized
168    /// - `hal_buffer` must not have zero size.
169    pub unsafe fn create_buffer_from_hal<A: hal::Api>(
170        &self,
171        hal_buffer: A::Buffer,
172        device: &CoreDevice,
173        desc: &BufferDescriptor<'_>,
174    ) -> CoreBuffer {
175        let (id, error) = unsafe {
176            self.0.create_buffer_from_hal::<A>(
177                hal_buffer,
178                device.id,
179                &desc.map_label(|l| l.map(Borrowed)),
180                None,
181            )
182        };
183        if let Some(cause) = error {
184            self.handle_error(
185                &device.error_sink,
186                cause,
187                desc.label,
188                "Device::create_buffer_from_hal",
189            );
190        }
191        CoreBuffer {
192            context: self.clone(),
193            id,
194            error_sink: Arc::clone(&device.error_sink),
195        }
196    }
197
198    pub unsafe fn device_as_hal<A: hal::Api>(
199        &self,
200        device: &CoreDevice,
201    ) -> Option<impl Deref<Target = A::Device>> {
202        unsafe { self.0.device_as_hal::<A>(device.id) }
203    }
204
205    pub unsafe fn surface_as_hal<A: hal::Api>(
206        &self,
207        surface: &CoreSurface,
208    ) -> Option<impl Deref<Target = A::Surface>> {
209        unsafe { self.0.surface_as_hal::<A>(surface.id) }
210    }
211
212    pub unsafe fn texture_as_hal<A: hal::Api>(
213        &self,
214        texture: &CoreTexture,
215    ) -> Option<impl Deref<Target = A::Texture>> {
216        unsafe { self.0.texture_as_hal::<A>(texture.id) }
217    }
218
219    pub unsafe fn texture_view_as_hal<A: hal::Api>(
220        &self,
221        texture_view: &CoreTextureView,
222    ) -> Option<impl Deref<Target = A::TextureView>> {
223        unsafe { self.0.texture_view_as_hal::<A>(texture_view.id) }
224    }
225
226    /// This method will start the wgpu_core level command recording.
227    pub unsafe fn command_encoder_as_hal_mut<
228        A: hal::Api,
229        F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
230        R,
231    >(
232        &self,
233        command_encoder: &CoreCommandEncoder,
234        hal_command_encoder_callback: F,
235    ) -> R {
236        unsafe {
237            self.0.command_encoder_as_hal_mut::<A, F, R>(
238                command_encoder.id,
239                hal_command_encoder_callback,
240            )
241        }
242    }
243
244    pub unsafe fn blas_as_hal<A: hal::Api>(
245        &self,
246        blas: &CoreBlas,
247    ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
248        unsafe { self.0.blas_as_hal::<A>(blas.id) }
249    }
250
251    pub unsafe fn tlas_as_hal<A: hal::Api>(
252        &self,
253        tlas: &CoreTlas,
254    ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
255        unsafe { self.0.tlas_as_hal::<A>(tlas.id) }
256    }
257
258    pub fn generate_report(&self) -> wgc::global::GlobalReport {
259        self.0.generate_report()
260    }
261
262    #[cold]
263    #[track_caller]
264    #[inline(never)]
265    fn handle_error_inner(
266        &self,
267        sink_mutex: &Mutex<ErrorSinkRaw>,
268        error_type: ErrorType,
269        source: ContextErrorSource,
270        label: Label<'_>,
271        fn_ident: &'static str,
272    ) {
273        let source: ErrorSource = Box::new(wgc::error::ContextError {
274            fn_ident,
275            source,
276            label: label.unwrap_or_default().to_string(),
277        });
278        let final_error_handling = {
279            let mut sink = sink_mutex.lock();
280            let description = || self.format_error(&*source);
281            let error = match error_type {
282                ErrorType::Internal => {
283                    let description = description();
284                    crate::Error::Internal {
285                        source,
286                        description,
287                    }
288                }
289                ErrorType::OutOfMemory => crate::Error::OutOfMemory { source },
290                ErrorType::Validation => {
291                    let description = description();
292                    crate::Error::Validation {
293                        source,
294                        description,
295                    }
296                }
297                ErrorType::DeviceLost => return, // will be surfaced via callback
298            };
299            sink.handle_error_or_return_handler(error)
300        };
301
302        if let Some(f) = final_error_handling {
303            // If the user has provided their own `uncaptured_handler` callback, invoke it now,
304            // having released our lock on `sink_mutex`. See the comments on
305            // `handle_error_or_return_handler` for details.
306            f();
307        }
308    }
309
310    #[inline]
311    #[track_caller]
312    fn handle_error(
313        &self,
314        sink_mutex: &Mutex<ErrorSinkRaw>,
315        source: impl WebGpuError + WasmNotSendSync + 'static,
316        label: Label<'_>,
317        fn_ident: &'static str,
318    ) {
319        let error_type = source.webgpu_error_type();
320        self.handle_error_inner(sink_mutex, error_type, Box::new(source), label, fn_ident)
321    }
322
323    #[inline]
324    #[track_caller]
325    fn handle_error_nolabel(
326        &self,
327        sink_mutex: &Mutex<ErrorSinkRaw>,
328        source: impl WebGpuError + WasmNotSendSync + 'static,
329        fn_ident: &'static str,
330    ) {
331        let error_type = source.webgpu_error_type();
332        self.handle_error_inner(sink_mutex, error_type, Box::new(source), None, fn_ident)
333    }
334
335    #[track_caller]
336    #[cold]
337    fn handle_error_fatal(
338        &self,
339        cause: impl Error + WasmNotSendSync + 'static,
340        operation: &'static str,
341    ) -> ! {
342        panic!("Error in {operation}: {f}", f = self.format_error(&cause));
343    }
344
345    #[inline(never)]
346    fn format_error(&self, err: &(dyn Error + 'static)) -> String {
347        let mut output = String::new();
348        let mut level = 1;
349
350        fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
351            let mut print = |e: &(dyn Error + 'static)| {
352                use core::fmt::Write;
353                writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
354
355                if let Some(e) = e.source() {
356                    *level += 1;
357                    print_tree(output, level, e);
358                    *level -= 1;
359                }
360            };
361            if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
362                for e in multi.errors() {
363                    print(e);
364                }
365            } else {
366                print(e);
367            }
368        }
369
370        print_tree(&mut output, &mut level, err);
371
372        format!("Validation Error\n\nCaused by:\n{output}")
373    }
374
375    pub unsafe fn queue_as_hal<A: hal::Api>(
376        &self,
377        queue: &CoreQueue,
378    ) -> Option<impl Deref<Target = A::Queue> + WasmNotSendSync> {
379        unsafe { self.0.queue_as_hal::<A>(queue.id) }
380    }
381}
382
383fn map_buffer_copy_view(
384    view: crate::TexelCopyBufferInfo<'_>,
385) -> wgt::TexelCopyBufferInfo<wgc::id::BufferId> {
386    wgt::TexelCopyBufferInfo {
387        buffer: view.buffer.inner.as_core().id,
388        layout: view.layout,
389    }
390}
391
392fn map_texture_copy_view(
393    view: crate::TexelCopyTextureInfo<'_>,
394) -> wgt::TexelCopyTextureInfo<wgc::id::TextureId> {
395    wgt::TexelCopyTextureInfo {
396        texture: view.texture.inner.as_core().id,
397        mip_level: view.mip_level,
398        origin: view.origin,
399        aspect: view.aspect,
400    }
401}
402
403#[cfg_attr(not(webgl), expect(unused))]
404fn map_texture_tagged_copy_view(
405    view: crate::CopyExternalImageDestInfo<&api::Texture>,
406) -> wgt::CopyExternalImageDestInfo<wgc::id::TextureId> {
407    wgt::CopyExternalImageDestInfo {
408        texture: view.texture.inner.as_core().id,
409        mip_level: view.mip_level,
410        origin: view.origin,
411        aspect: view.aspect,
412        color_space: view.color_space,
413        premultiplied_alpha: view.premultiplied_alpha,
414    }
415}
416
417fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
418    match *load {
419        LoadOp::Clear(clear_value) => LoadOp::Clear(Some(clear_value)),
420        LoadOp::DontCare(token) => LoadOp::DontCare(token),
421        LoadOp::Load => LoadOp::Load,
422    }
423}
424
425fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
426    match ops {
427        Some(&Operations { load, store }) => wgc::command::PassChannel {
428            load_op: Some(map_load_op(&load)),
429            store_op: Some(store),
430            read_only: false,
431        },
432        None => wgc::command::PassChannel {
433            load_op: None,
434            store_op: None,
435            read_only: true,
436        },
437    }
438}
439
440#[derive(Debug)]
441pub struct CoreSurface {
442    pub(crate) context: ContextWgpuCore,
443    id: wgc::id::SurfaceId,
444    /// Configured device is needed to know which backend
445    /// code to execute when acquiring a new frame.
446    configured_device: Mutex<Option<wgc::id::DeviceId>>,
447    /// The error sink with which to report errors.
448    /// `None` if the surface has not been configured.
449    error_sink: Mutex<Option<ErrorSink>>,
450}
451
452#[derive(Debug)]
453pub struct CoreAdapter {
454    pub(crate) context: ContextWgpuCore,
455    pub(crate) id: wgc::id::AdapterId,
456}
457
458#[derive(Debug)]
459pub struct CoreDevice {
460    pub(crate) context: ContextWgpuCore,
461    id: wgc::id::DeviceId,
462    error_sink: ErrorSink,
463    features: Features,
464}
465
466#[derive(Debug)]
467pub struct CoreBuffer {
468    pub(crate) context: ContextWgpuCore,
469    id: wgc::id::BufferId,
470    error_sink: ErrorSink,
471}
472
473#[derive(Debug)]
474pub struct CoreShaderModule {
475    pub(crate) context: ContextWgpuCore,
476    id: wgc::id::ShaderModuleId,
477    compilation_info: CompilationInfo,
478}
479
480#[derive(Debug)]
481pub struct CoreBindGroupLayout {
482    pub(crate) context: ContextWgpuCore,
483    id: wgc::id::BindGroupLayoutId,
484}
485
486#[derive(Debug)]
487pub struct CoreBindGroup {
488    pub(crate) context: ContextWgpuCore,
489    id: wgc::id::BindGroupId,
490}
491
492#[derive(Debug)]
493pub struct CoreTexture {
494    pub(crate) context: ContextWgpuCore,
495    id: wgc::id::TextureId,
496    error_sink: ErrorSink,
497}
498
499#[derive(Debug)]
500pub struct CoreTextureView {
501    pub(crate) context: ContextWgpuCore,
502    id: wgc::id::TextureViewId,
503}
504
505#[derive(Debug)]
506pub struct CoreExternalTexture {
507    pub(crate) context: ContextWgpuCore,
508    id: wgc::id::ExternalTextureId,
509}
510
511#[derive(Debug)]
512pub struct CoreSampler {
513    pub(crate) context: ContextWgpuCore,
514    id: wgc::id::SamplerId,
515}
516
517#[derive(Debug)]
518pub struct CoreQuerySet {
519    pub(crate) context: ContextWgpuCore,
520    id: wgc::id::QuerySetId,
521}
522
523#[derive(Debug)]
524pub struct CorePipelineLayout {
525    pub(crate) context: ContextWgpuCore,
526    id: wgc::id::PipelineLayoutId,
527}
528
529#[derive(Debug)]
530pub struct CorePipelineCache {
531    pub(crate) context: ContextWgpuCore,
532    id: wgc::id::PipelineCacheId,
533}
534
535#[derive(Debug)]
536pub struct CoreCommandBuffer {
537    pub(crate) context: ContextWgpuCore,
538    id: wgc::id::CommandBufferId,
539}
540
541#[derive(Debug)]
542pub struct CoreRenderBundleEncoder {
543    pub(crate) context: ContextWgpuCore,
544    encoder: wgc::command::RenderBundleEncoder,
545    id: crate::cmp::Identifier,
546}
547
548#[derive(Debug)]
549pub struct CoreRenderBundle {
550    context: ContextWgpuCore,
551    id: wgc::id::RenderBundleId,
552}
553
554#[derive(Debug)]
555pub struct CoreQueue {
556    pub(crate) context: ContextWgpuCore,
557    id: wgc::id::QueueId,
558    error_sink: ErrorSink,
559}
560
561#[derive(Debug)]
562pub struct CoreComputePipeline {
563    pub(crate) context: ContextWgpuCore,
564    id: wgc::id::ComputePipelineId,
565    error_sink: ErrorSink,
566}
567
568#[derive(Debug)]
569pub struct CoreRenderPipeline {
570    pub(crate) context: ContextWgpuCore,
571    id: wgc::id::RenderPipelineId,
572    error_sink: ErrorSink,
573}
574
575#[derive(Debug)]
576pub struct CoreComputePass {
577    pub(crate) context: ContextWgpuCore,
578    pass: wgc::command::ComputePass,
579    error_sink: ErrorSink,
580    id: crate::cmp::Identifier,
581}
582
583#[derive(Debug)]
584pub struct CoreRenderPass {
585    pub(crate) context: ContextWgpuCore,
586    pass: wgc::command::RenderPass,
587    error_sink: ErrorSink,
588    id: crate::cmp::Identifier,
589}
590
591#[derive(Debug)]
592pub struct CoreCommandEncoder {
593    pub(crate) context: ContextWgpuCore,
594    id: wgc::id::CommandEncoderId,
595    error_sink: ErrorSink,
596}
597
598#[derive(Debug)]
599pub struct CoreBlas {
600    pub(crate) context: ContextWgpuCore,
601    id: wgc::id::BlasId,
602    error_sink: ErrorSink,
603}
604
605#[derive(Debug)]
606pub struct CoreTlas {
607    pub(crate) context: ContextWgpuCore,
608    id: wgc::id::TlasId,
609    // error_sink: ErrorSink,
610}
611
612#[derive(Debug)]
613pub struct CoreSurfaceOutputDetail {
614    context: ContextWgpuCore,
615    surface_id: wgc::id::SurfaceId,
616    error_sink: ErrorSink,
617}
618
619type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
620
621struct ErrorScope {
622    error: Option<crate::Error>,
623    filter: crate::ErrorFilter,
624}
625
626struct ErrorSinkRaw {
627    scopes: HashMap<thread_id::ThreadId, Vec<ErrorScope>>,
628    uncaptured_handler: Option<Arc<dyn crate::UncapturedErrorHandler>>,
629}
630
631impl ErrorSinkRaw {
632    fn new() -> ErrorSinkRaw {
633        ErrorSinkRaw {
634            scopes: HashMap::new(),
635            uncaptured_handler: None,
636        }
637    }
638
639    /// Deliver the error to
640    ///
641    /// * the innermost error scope, if any, or
642    /// * the uncaptured error handler, if there is one, or
643    /// * [`default_error_handler()`].
644    ///
645    /// If a closure is returned, the caller should call it immediately after dropping the
646    /// [`ErrorSink`] mutex guard. This makes sure that the user callback is not called with
647    /// a wgpu mutex held.
648    #[track_caller]
649    #[must_use]
650    fn handle_error_or_return_handler(&mut self, err: crate::Error) -> Option<impl FnOnce()> {
651        let filter = match err {
652            crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
653            crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
654            crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
655        };
656        let thread_id = thread_id::ThreadId::current();
657        let scopes = self.scopes.entry(thread_id).or_default();
658        match scopes.iter_mut().rev().find(|scope| scope.filter == filter) {
659            Some(scope) => {
660                if scope.error.is_none() {
661                    scope.error = Some(err);
662                }
663                None
664            }
665            None => {
666                if let Some(custom_handler) = &self.uncaptured_handler {
667                    let custom_handler = Arc::clone(custom_handler);
668                    Some(move || (custom_handler)(err))
669                } else {
670                    // direct call preserves #[track_caller] where dyn can't
671                    default_error_handler(err)
672                }
673            }
674        }
675    }
676}
677
678impl fmt::Debug for ErrorSinkRaw {
679    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
680        write!(f, "ErrorSink")
681    }
682}
683
684#[track_caller]
685fn default_error_handler(err: crate::Error) -> ! {
686    log::error!("Handling wgpu errors as fatal by default");
687    panic!("wgpu error: {err}\n");
688}
689
690impl From<CreateShaderModuleError> for CompilationInfo {
691    fn from(value: CreateShaderModuleError) -> Self {
692        match value {
693            #[cfg(feature = "wgsl")]
694            CreateShaderModuleError::Parsing(v) => v.into(),
695            #[cfg(feature = "glsl")]
696            CreateShaderModuleError::ParsingGlsl(v) => v.into(),
697            #[cfg(feature = "spirv")]
698            CreateShaderModuleError::ParsingSpirV(v) => v.into(),
699            CreateShaderModuleError::Validation(v) => v.into(),
700            // Device errors are reported through the error sink, and are not compilation errors.
701            // Same goes for native shader module generation errors.
702            CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
703                CompilationInfo {
704                    messages: Vec::new(),
705                }
706            }
707            // Everything else is an error message without location information.
708            _ => CompilationInfo {
709                messages: vec![CompilationMessage {
710                    message: value.to_string(),
711                    message_type: CompilationMessageType::Error,
712                    location: None,
713                }],
714            },
715        }
716    }
717}
718
719#[derive(Debug)]
720pub struct CoreQueueWriteBuffer {
721    buffer_id: wgc::id::StagingBufferId,
722    mapping: CoreBufferMappedRange,
723}
724
725#[derive(Debug)]
726pub struct CoreBufferMappedRange {
727    ptr: NonNull<u8>,
728    size: usize,
729}
730
731#[cfg(send_sync)]
732unsafe impl Send for CoreBufferMappedRange {}
733#[cfg(send_sync)]
734unsafe impl Sync for CoreBufferMappedRange {}
735
736impl Drop for CoreBufferMappedRange {
737    fn drop(&mut self) {
738        // Intentionally left blank so that `BufferMappedRange` still
739        // implements `Drop`, to match the web backend
740    }
741}
742
743crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
744crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
745crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
746crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
747crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
748crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
749crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
750crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
751crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
752crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
753crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
754crate::cmp::impl_eq_ord_hash_proxy!(CoreExternalTexture => .id);
755crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
756crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
757crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
758crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
759crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
760crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
761crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
762crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
763crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
764crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
765crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
766crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
767crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
768crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
769crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
770crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
771crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
772
773impl dispatch::InstanceInterface for ContextWgpuCore {
774    fn new(desc: wgt::InstanceDescriptor) -> Self
775    where
776        Self: Sized,
777    {
778        Self(Arc::new(wgc::global::Global::new("wgpu", desc, None)))
779    }
780
781    unsafe fn create_surface(
782        &self,
783        target: crate::api::SurfaceTargetUnsafe,
784    ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
785        let id = match target {
786            SurfaceTargetUnsafe::RawHandle {
787                raw_display_handle,
788                raw_window_handle,
789            } => unsafe {
790                self.0
791                    .instance_create_surface(raw_display_handle, raw_window_handle, None)
792            },
793
794            #[cfg(all(unix, not(target_vendor = "apple"), not(target_family = "wasm")))]
795            SurfaceTargetUnsafe::Drm {
796                fd,
797                plane,
798                connector_id,
799                width,
800                height,
801                refresh_rate,
802            } => unsafe {
803                self.0.instance_create_surface_from_drm(
804                    fd,
805                    plane,
806                    connector_id,
807                    width,
808                    height,
809                    refresh_rate,
810                    None,
811                )
812            },
813
814            #[cfg(metal)]
815            SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
816                self.0.instance_create_surface_metal(layer, None)
817            },
818
819            #[cfg(dx12)]
820            SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
821                self.0.instance_create_surface_from_visual(visual, None)
822            },
823
824            #[cfg(dx12)]
825            SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
826                self.0
827                    .instance_create_surface_from_surface_handle(surface_handle, None)
828            },
829
830            #[cfg(dx12)]
831            SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
832                self.0
833                    .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
834            },
835        }?;
836
837        Ok(CoreSurface {
838            context: self.clone(),
839            id,
840            configured_device: Mutex::default(),
841            error_sink: Mutex::default(),
842        }
843        .into())
844    }
845
846    fn request_adapter(
847        &self,
848        options: &crate::api::RequestAdapterOptions<'_, '_>,
849    ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
850        let id = self.0.request_adapter(
851            &wgc::instance::RequestAdapterOptions {
852                power_preference: options.power_preference,
853                force_fallback_adapter: options.force_fallback_adapter,
854                compatible_surface: options
855                    .compatible_surface
856                    .map(|surface| surface.inner.as_core().id),
857            },
858            wgt::Backends::all(),
859            None,
860        );
861        let adapter = id.map(|id| {
862            let core = CoreAdapter {
863                context: self.clone(),
864                id,
865            };
866            let generic: dispatch::DispatchAdapter = core.into();
867            generic
868        });
869        Box::pin(ready(adapter))
870    }
871
872    fn poll_all_devices(&self, force_wait: bool) -> bool {
873        match self.0.poll_all_devices(force_wait) {
874            Ok(all_queue_empty) => all_queue_empty,
875            Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
876        }
877    }
878
879    #[cfg(feature = "wgsl")]
880    fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
881        use wgc::naga::front::wgsl::ImplementedLanguageExtension;
882        ImplementedLanguageExtension::all().iter().copied().fold(
883            crate::WgslLanguageFeatures::empty(),
884            |acc, wle| {
885                acc | match wle {
886                    ImplementedLanguageExtension::ReadOnlyAndReadWriteStorageTextures => {
887                        crate::WgslLanguageFeatures::ReadOnlyAndReadWriteStorageTextures
888                    }
889                    ImplementedLanguageExtension::Packed4x8IntegerDotProduct => {
890                        crate::WgslLanguageFeatures::Packed4x8IntegerDotProduct
891                    }
892                    ImplementedLanguageExtension::PointerCompositeAccess => {
893                        crate::WgslLanguageFeatures::PointerCompositeAccess
894                    }
895                }
896            },
897        )
898    }
899
900    fn enumerate_adapters(
901        &self,
902        backends: crate::Backends,
903    ) -> Pin<Box<dyn dispatch::EnumerateAdapterFuture>> {
904        let adapters: Vec<DispatchAdapter> = self
905            .enumerate_adapters(backends)
906            .into_iter()
907            .map(|adapter| {
908                let core = crate::backend::wgpu_core::CoreAdapter {
909                    context: self.clone(),
910                    id: adapter,
911                };
912                core.into()
913            })
914            .collect();
915        Box::pin(ready(adapters))
916    }
917}
918
919impl dispatch::AdapterInterface for CoreAdapter {
920    fn request_device(
921        &self,
922        desc: &crate::DeviceDescriptor<'_>,
923    ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
924        let res = self.context.0.adapter_request_device(
925            self.id,
926            &desc.map_label(|l| l.map(Borrowed)),
927            None,
928            None,
929        );
930        let (device_id, queue_id) = match res {
931            Ok(ids) => ids,
932            Err(err) => {
933                return Box::pin(ready(Err(err.into())));
934            }
935        };
936        let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
937        let device = CoreDevice {
938            context: self.context.clone(),
939            id: device_id,
940            error_sink: error_sink.clone(),
941            features: desc.required_features,
942        };
943        let queue = CoreQueue {
944            context: self.context.clone(),
945            id: queue_id,
946            error_sink,
947        };
948        Box::pin(ready(Ok((device.into(), queue.into()))))
949    }
950
951    fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
952        let surface = surface.as_core();
953
954        self.context
955            .0
956            .adapter_is_surface_supported(self.id, surface.id)
957    }
958
959    fn features(&self) -> crate::Features {
960        self.context.0.adapter_features(self.id)
961    }
962
963    fn limits(&self) -> crate::Limits {
964        self.context.0.adapter_limits(self.id)
965    }
966
967    fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
968        self.context.0.adapter_downlevel_capabilities(self.id)
969    }
970
971    fn get_info(&self) -> crate::AdapterInfo {
972        self.context.0.adapter_get_info(self.id)
973    }
974
975    fn get_texture_format_features(
976        &self,
977        format: crate::TextureFormat,
978    ) -> crate::TextureFormatFeatures {
979        self.context
980            .0
981            .adapter_get_texture_format_features(self.id, format)
982    }
983
984    fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
985        self.context.0.adapter_get_presentation_timestamp(self.id)
986    }
987
988    fn cooperative_matrix_properties(&self) -> Vec<crate::wgt::CooperativeMatrixProperties> {
989        self.context
990            .0
991            .adapter_cooperative_matrix_properties(self.id)
992    }
993}
994
995impl Drop for CoreAdapter {
996    fn drop(&mut self) {
997        self.context.0.adapter_drop(self.id)
998    }
999}
1000
1001impl dispatch::DeviceInterface for CoreDevice {
1002    fn features(&self) -> crate::Features {
1003        self.context.0.device_features(self.id)
1004    }
1005
1006    fn limits(&self) -> crate::Limits {
1007        self.context.0.device_limits(self.id)
1008    }
1009
1010    fn adapter_info(&self) -> crate::AdapterInfo {
1011        self.context.0.device_adapter_info(self.id)
1012    }
1013
1014    // If we have no way to create a shader module, we can't return one, and so most of the function is unreachable.
1015    #[cfg_attr(
1016        not(any(
1017            feature = "spirv",
1018            feature = "glsl",
1019            feature = "wgsl",
1020            feature = "naga-ir"
1021        )),
1022        expect(unused)
1023    )]
1024    fn create_shader_module(
1025        &self,
1026        desc: crate::ShaderModuleDescriptor<'_>,
1027        shader_bound_checks: wgt::ShaderRuntimeChecks,
1028    ) -> dispatch::DispatchShaderModule {
1029        let descriptor = wgc::pipeline::ShaderModuleDescriptor {
1030            label: desc.label.map(Borrowed),
1031            runtime_checks: shader_bound_checks,
1032        };
1033        let source = match desc.source {
1034            #[cfg(feature = "spirv")]
1035            ShaderSource::SpirV(ref spv) => {
1036                // Parse the given shader code and store its representation.
1037                let options = naga::front::spv::Options {
1038                    adjust_coordinate_space: false, // we require NDC_Y_UP feature
1039                    strict_capabilities: true,
1040                    block_ctx_dump_prefix: None,
1041                };
1042                wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
1043            }
1044            #[cfg(feature = "glsl")]
1045            ShaderSource::Glsl {
1046                ref shader,
1047                stage,
1048                defines,
1049            } => {
1050                let options = naga::front::glsl::Options {
1051                    stage,
1052                    defines: defines
1053                        .iter()
1054                        .map(|&(key, value)| (String::from(key), String::from(value)))
1055                        .collect(),
1056                };
1057                wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
1058            }
1059            #[cfg(feature = "wgsl")]
1060            ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
1061            #[cfg(feature = "naga-ir")]
1062            ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1063            ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1064        };
1065        let (id, error) =
1066            self.context
1067                .0
1068                .device_create_shader_module(self.id, &descriptor, source, None);
1069        let compilation_info = match error {
1070            Some(cause) => {
1071                self.context.handle_error(
1072                    &self.error_sink,
1073                    cause.clone(),
1074                    desc.label,
1075                    "Device::create_shader_module",
1076                );
1077                CompilationInfo::from(cause)
1078            }
1079            None => CompilationInfo { messages: vec![] },
1080        };
1081
1082        CoreShaderModule {
1083            context: self.context.clone(),
1084            id,
1085            compilation_info,
1086        }
1087        .into()
1088    }
1089
1090    unsafe fn create_shader_module_passthrough(
1091        &self,
1092        desc: &crate::ShaderModuleDescriptorPassthrough<'_>,
1093    ) -> dispatch::DispatchShaderModule {
1094        let desc = desc.map_label(|l| l.map(Cow::from));
1095        let (id, error) = unsafe {
1096            self.context
1097                .0
1098                .device_create_shader_module_passthrough(self.id, &desc, None)
1099        };
1100
1101        let compilation_info = match error {
1102            Some(cause) => {
1103                self.context.handle_error(
1104                    &self.error_sink,
1105                    cause.clone(),
1106                    desc.label.as_deref(),
1107                    "Device::create_shader_module_passthrough",
1108                );
1109                CompilationInfo::from(cause)
1110            }
1111            None => CompilationInfo { messages: vec![] },
1112        };
1113
1114        CoreShaderModule {
1115            context: self.context.clone(),
1116            id,
1117            compilation_info,
1118        }
1119        .into()
1120    }
1121
1122    fn create_bind_group_layout(
1123        &self,
1124        desc: &crate::BindGroupLayoutDescriptor<'_>,
1125    ) -> dispatch::DispatchBindGroupLayout {
1126        let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1127            label: desc.label.map(Borrowed),
1128            entries: Borrowed(desc.entries),
1129        };
1130        let (id, error) =
1131            self.context
1132                .0
1133                .device_create_bind_group_layout(self.id, &descriptor, None);
1134        if let Some(cause) = error {
1135            self.context.handle_error(
1136                &self.error_sink,
1137                cause,
1138                desc.label,
1139                "Device::create_bind_group_layout",
1140            );
1141        }
1142        CoreBindGroupLayout {
1143            context: self.context.clone(),
1144            id,
1145        }
1146        .into()
1147    }
1148
1149    fn create_bind_group(
1150        &self,
1151        desc: &crate::BindGroupDescriptor<'_>,
1152    ) -> dispatch::DispatchBindGroup {
1153        use wgc::binding_model as bm;
1154
1155        let mut arrayed_texture_views = Vec::new();
1156        let mut arrayed_samplers = Vec::new();
1157        if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1158            // gather all the array view IDs first
1159            for entry in desc.entries.iter() {
1160                if let BindingResource::TextureViewArray(array) = entry.resource {
1161                    arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1162                }
1163                if let BindingResource::SamplerArray(array) = entry.resource {
1164                    arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1165                }
1166            }
1167        }
1168        let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1169        let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1170
1171        let mut arrayed_buffer_bindings = Vec::new();
1172        if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1173            // gather all the buffers first
1174            for entry in desc.entries.iter() {
1175                if let BindingResource::BufferArray(array) = entry.resource {
1176                    arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1177                        buffer: binding.buffer.inner.as_core().id,
1178                        offset: binding.offset,
1179                        size: binding.size.map(wgt::BufferSize::get),
1180                    }));
1181                }
1182            }
1183        }
1184        let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1185
1186        let entries = desc
1187            .entries
1188            .iter()
1189            .map(|entry| bm::BindGroupEntry {
1190                binding: entry.binding,
1191                resource: match entry.resource {
1192                    BindingResource::Buffer(BufferBinding {
1193                        buffer,
1194                        offset,
1195                        size,
1196                    }) => bm::BindingResource::Buffer(bm::BufferBinding {
1197                        buffer: buffer.inner.as_core().id,
1198                        offset,
1199                        size: size.map(wgt::BufferSize::get),
1200                    }),
1201                    BindingResource::BufferArray(array) => {
1202                        let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1203                        remaining_arrayed_buffer_bindings =
1204                            &remaining_arrayed_buffer_bindings[array.len()..];
1205                        bm::BindingResource::BufferArray(Borrowed(slice))
1206                    }
1207                    BindingResource::Sampler(sampler) => {
1208                        bm::BindingResource::Sampler(sampler.inner.as_core().id)
1209                    }
1210                    BindingResource::SamplerArray(array) => {
1211                        let slice = &remaining_arrayed_samplers[..array.len()];
1212                        remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1213                        bm::BindingResource::SamplerArray(Borrowed(slice))
1214                    }
1215                    BindingResource::TextureView(texture_view) => {
1216                        bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1217                    }
1218                    BindingResource::TextureViewArray(array) => {
1219                        let slice = &remaining_arrayed_texture_views[..array.len()];
1220                        remaining_arrayed_texture_views =
1221                            &remaining_arrayed_texture_views[array.len()..];
1222                        bm::BindingResource::TextureViewArray(Borrowed(slice))
1223                    }
1224                    BindingResource::AccelerationStructure(acceleration_structure) => {
1225                        bm::BindingResource::AccelerationStructure(
1226                            acceleration_structure.inner.as_core().id,
1227                        )
1228                    }
1229                    BindingResource::ExternalTexture(external_texture) => {
1230                        bm::BindingResource::ExternalTexture(external_texture.inner.as_core().id)
1231                    }
1232                },
1233            })
1234            .collect::<Vec<_>>();
1235        let descriptor = bm::BindGroupDescriptor {
1236            label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1237            layout: desc.layout.inner.as_core().id,
1238            entries: Borrowed(&entries),
1239        };
1240
1241        let (id, error) = self
1242            .context
1243            .0
1244            .device_create_bind_group(self.id, &descriptor, None);
1245        if let Some(cause) = error {
1246            self.context.handle_error(
1247                &self.error_sink,
1248                cause,
1249                desc.label,
1250                "Device::create_bind_group",
1251            );
1252        }
1253        CoreBindGroup {
1254            context: self.context.clone(),
1255            id,
1256        }
1257        .into()
1258    }
1259
1260    fn create_pipeline_layout(
1261        &self,
1262        desc: &crate::PipelineLayoutDescriptor<'_>,
1263    ) -> dispatch::DispatchPipelineLayout {
1264        // Limit is always less or equal to hal::MAX_BIND_GROUPS, so this is always right
1265        // Guards following ArrayVec
1266        assert!(
1267            desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1268            "Bind group layout count {} exceeds device bind group limit {}",
1269            desc.bind_group_layouts.len(),
1270            wgc::MAX_BIND_GROUPS
1271        );
1272
1273        let temp_layouts = desc
1274            .bind_group_layouts
1275            .iter()
1276            .map(|bgl| bgl.inner.as_core().id)
1277            .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1278        let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1279            label: desc.label.map(Borrowed),
1280            bind_group_layouts: Borrowed(&temp_layouts),
1281            immediate_size: desc.immediate_size,
1282        };
1283
1284        let (id, error) = self
1285            .context
1286            .0
1287            .device_create_pipeline_layout(self.id, &descriptor, None);
1288        if let Some(cause) = error {
1289            self.context.handle_error(
1290                &self.error_sink,
1291                cause,
1292                desc.label,
1293                "Device::create_pipeline_layout",
1294            );
1295        }
1296        CorePipelineLayout {
1297            context: self.context.clone(),
1298            id,
1299        }
1300        .into()
1301    }
1302
1303    fn create_render_pipeline(
1304        &self,
1305        desc: &crate::RenderPipelineDescriptor<'_>,
1306    ) -> dispatch::DispatchRenderPipeline {
1307        use wgc::pipeline as pipe;
1308
1309        let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1310            .vertex
1311            .buffers
1312            .iter()
1313            .map(|vbuf| pipe::VertexBufferLayout {
1314                array_stride: vbuf.array_stride,
1315                step_mode: vbuf.step_mode,
1316                attributes: Borrowed(vbuf.attributes),
1317            })
1318            .collect();
1319
1320        let vert_constants = desc
1321            .vertex
1322            .compilation_options
1323            .constants
1324            .iter()
1325            .map(|&(key, value)| (String::from(key), value))
1326            .collect();
1327
1328        let descriptor = pipe::RenderPipelineDescriptor {
1329            label: desc.label.map(Borrowed),
1330            layout: desc.layout.map(|layout| layout.inner.as_core().id),
1331            vertex: pipe::VertexState {
1332                stage: pipe::ProgrammableStageDescriptor {
1333                    module: desc.vertex.module.inner.as_core().id,
1334                    entry_point: desc.vertex.entry_point.map(Borrowed),
1335                    constants: vert_constants,
1336                    zero_initialize_workgroup_memory: desc
1337                        .vertex
1338                        .compilation_options
1339                        .zero_initialize_workgroup_memory,
1340                },
1341                buffers: Borrowed(&vertex_buffers),
1342            },
1343            primitive: desc.primitive,
1344            depth_stencil: desc.depth_stencil.clone(),
1345            multisample: desc.multisample,
1346            fragment: desc.fragment.as_ref().map(|frag| {
1347                let frag_constants = frag
1348                    .compilation_options
1349                    .constants
1350                    .iter()
1351                    .map(|&(key, value)| (String::from(key), value))
1352                    .collect();
1353                pipe::FragmentState {
1354                    stage: pipe::ProgrammableStageDescriptor {
1355                        module: frag.module.inner.as_core().id,
1356                        entry_point: frag.entry_point.map(Borrowed),
1357                        constants: frag_constants,
1358                        zero_initialize_workgroup_memory: frag
1359                            .compilation_options
1360                            .zero_initialize_workgroup_memory,
1361                    },
1362                    targets: Borrowed(frag.targets),
1363                }
1364            }),
1365            multiview_mask: desc.multiview_mask,
1366            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1367        };
1368
1369        let (id, error) = self
1370            .context
1371            .0
1372            .device_create_render_pipeline(self.id, &descriptor, None);
1373        if let Some(cause) = error {
1374            if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1375                log::error!("Shader translation error for stage {stage:?}: {error}");
1376                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1377            }
1378            self.context.handle_error(
1379                &self.error_sink,
1380                cause,
1381                desc.label,
1382                "Device::create_render_pipeline",
1383            );
1384        }
1385        CoreRenderPipeline {
1386            context: self.context.clone(),
1387            id,
1388            error_sink: Arc::clone(&self.error_sink),
1389        }
1390        .into()
1391    }
1392
1393    fn create_mesh_pipeline(
1394        &self,
1395        desc: &crate::MeshPipelineDescriptor<'_>,
1396    ) -> dispatch::DispatchRenderPipeline {
1397        use wgc::pipeline as pipe;
1398
1399        let mesh_constants = desc
1400            .mesh
1401            .compilation_options
1402            .constants
1403            .iter()
1404            .map(|&(key, value)| (String::from(key), value))
1405            .collect();
1406        let descriptor = pipe::MeshPipelineDescriptor {
1407            label: desc.label.map(Borrowed),
1408            task: desc.task.as_ref().map(|task| {
1409                let task_constants = task
1410                    .compilation_options
1411                    .constants
1412                    .iter()
1413                    .map(|&(key, value)| (String::from(key), value))
1414                    .collect();
1415                pipe::TaskState {
1416                    stage: pipe::ProgrammableStageDescriptor {
1417                        module: task.module.inner.as_core().id,
1418                        entry_point: task.entry_point.map(Borrowed),
1419                        constants: task_constants,
1420                        zero_initialize_workgroup_memory: desc
1421                            .mesh
1422                            .compilation_options
1423                            .zero_initialize_workgroup_memory,
1424                    },
1425                }
1426            }),
1427            mesh: pipe::MeshState {
1428                stage: pipe::ProgrammableStageDescriptor {
1429                    module: desc.mesh.module.inner.as_core().id,
1430                    entry_point: desc.mesh.entry_point.map(Borrowed),
1431                    constants: mesh_constants,
1432                    zero_initialize_workgroup_memory: desc
1433                        .mesh
1434                        .compilation_options
1435                        .zero_initialize_workgroup_memory,
1436                },
1437            },
1438            layout: desc.layout.map(|layout| layout.inner.as_core().id),
1439            primitive: desc.primitive,
1440            depth_stencil: desc.depth_stencil.clone(),
1441            multisample: desc.multisample,
1442            fragment: desc.fragment.as_ref().map(|frag| {
1443                let frag_constants = frag
1444                    .compilation_options
1445                    .constants
1446                    .iter()
1447                    .map(|&(key, value)| (String::from(key), value))
1448                    .collect();
1449                pipe::FragmentState {
1450                    stage: pipe::ProgrammableStageDescriptor {
1451                        module: frag.module.inner.as_core().id,
1452                        entry_point: frag.entry_point.map(Borrowed),
1453                        constants: frag_constants,
1454                        zero_initialize_workgroup_memory: frag
1455                            .compilation_options
1456                            .zero_initialize_workgroup_memory,
1457                    },
1458                    targets: Borrowed(frag.targets),
1459                }
1460            }),
1461            multiview: desc.multiview,
1462            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1463        };
1464
1465        let (id, error) = self
1466            .context
1467            .0
1468            .device_create_mesh_pipeline(self.id, &descriptor, None);
1469        if let Some(cause) = error {
1470            if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1471                log::error!("Shader translation error for stage {stage:?}: {error}");
1472                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1473            }
1474            self.context.handle_error(
1475                &self.error_sink,
1476                cause,
1477                desc.label,
1478                "Device::create_render_pipeline",
1479            );
1480        }
1481        CoreRenderPipeline {
1482            context: self.context.clone(),
1483            id,
1484            error_sink: Arc::clone(&self.error_sink),
1485        }
1486        .into()
1487    }
1488
1489    fn create_compute_pipeline(
1490        &self,
1491        desc: &crate::ComputePipelineDescriptor<'_>,
1492    ) -> dispatch::DispatchComputePipeline {
1493        use wgc::pipeline as pipe;
1494
1495        let constants = desc
1496            .compilation_options
1497            .constants
1498            .iter()
1499            .map(|&(key, value)| (String::from(key), value))
1500            .collect();
1501
1502        let descriptor = pipe::ComputePipelineDescriptor {
1503            label: desc.label.map(Borrowed),
1504            layout: desc.layout.map(|pll| pll.inner.as_core().id),
1505            stage: pipe::ProgrammableStageDescriptor {
1506                module: desc.module.inner.as_core().id,
1507                entry_point: desc.entry_point.map(Borrowed),
1508                constants,
1509                zero_initialize_workgroup_memory: desc
1510                    .compilation_options
1511                    .zero_initialize_workgroup_memory,
1512            },
1513            cache: desc.cache.map(|cache| cache.inner.as_core().id),
1514        };
1515
1516        let (id, error) = self
1517            .context
1518            .0
1519            .device_create_compute_pipeline(self.id, &descriptor, None);
1520        if let Some(cause) = error {
1521            if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1522                log::error!(
1523                    "Shader translation error for stage {:?}: {}",
1524                    wgt::ShaderStages::COMPUTE,
1525                    error
1526                );
1527                log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1528            }
1529            self.context.handle_error(
1530                &self.error_sink,
1531                cause,
1532                desc.label,
1533                "Device::create_compute_pipeline",
1534            );
1535        }
1536        CoreComputePipeline {
1537            context: self.context.clone(),
1538            id,
1539            error_sink: Arc::clone(&self.error_sink),
1540        }
1541        .into()
1542    }
1543
1544    unsafe fn create_pipeline_cache(
1545        &self,
1546        desc: &crate::PipelineCacheDescriptor<'_>,
1547    ) -> dispatch::DispatchPipelineCache {
1548        use wgc::pipeline as pipe;
1549
1550        let descriptor = pipe::PipelineCacheDescriptor {
1551            label: desc.label.map(Borrowed),
1552            data: desc.data.map(Borrowed),
1553            fallback: desc.fallback,
1554        };
1555        let (id, error) = unsafe {
1556            self.context
1557                .0
1558                .device_create_pipeline_cache(self.id, &descriptor, None)
1559        };
1560        if let Some(cause) = error {
1561            self.context.handle_error(
1562                &self.error_sink,
1563                cause,
1564                desc.label,
1565                "Device::device_create_pipeline_cache_init",
1566            );
1567        }
1568        CorePipelineCache {
1569            context: self.context.clone(),
1570            id,
1571        }
1572        .into()
1573    }
1574
1575    fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1576        let (id, error) = self.context.0.device_create_buffer(
1577            self.id,
1578            &desc.map_label(|l| l.map(Borrowed)),
1579            None,
1580        );
1581        if let Some(cause) = error {
1582            self.context
1583                .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1584        }
1585
1586        CoreBuffer {
1587            context: self.context.clone(),
1588            id,
1589            error_sink: Arc::clone(&self.error_sink),
1590        }
1591        .into()
1592    }
1593
1594    fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1595        let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1596        let (id, error) = self
1597            .context
1598            .0
1599            .device_create_texture(self.id, &wgt_desc, None);
1600        if let Some(cause) = error {
1601            self.context.handle_error(
1602                &self.error_sink,
1603                cause,
1604                desc.label,
1605                "Device::create_texture",
1606            );
1607        }
1608
1609        CoreTexture {
1610            context: self.context.clone(),
1611            id,
1612            error_sink: Arc::clone(&self.error_sink),
1613        }
1614        .into()
1615    }
1616
1617    fn create_external_texture(
1618        &self,
1619        desc: &crate::ExternalTextureDescriptor<'_>,
1620        planes: &[&crate::TextureView],
1621    ) -> dispatch::DispatchExternalTexture {
1622        let wgt_desc = desc.map_label(|l| l.map(Borrowed));
1623        let planes = planes
1624            .iter()
1625            .map(|plane| plane.inner.as_core().id)
1626            .collect::<Vec<_>>();
1627        let (id, error) = self
1628            .context
1629            .0
1630            .device_create_external_texture(self.id, &wgt_desc, &planes, None);
1631        if let Some(cause) = error {
1632            self.context.handle_error(
1633                &self.error_sink,
1634                cause,
1635                desc.label,
1636                "Device::create_external_texture",
1637            );
1638        }
1639
1640        CoreExternalTexture {
1641            context: self.context.clone(),
1642            id,
1643        }
1644        .into()
1645    }
1646
1647    fn create_blas(
1648        &self,
1649        desc: &crate::CreateBlasDescriptor<'_>,
1650        sizes: crate::BlasGeometrySizeDescriptors,
1651    ) -> (Option<u64>, dispatch::DispatchBlas) {
1652        let global = &self.context.0;
1653        let (id, handle, error) =
1654            global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1655        if let Some(cause) = error {
1656            self.context
1657                .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1658        }
1659        (
1660            handle,
1661            CoreBlas {
1662                context: self.context.clone(),
1663                id,
1664                error_sink: Arc::clone(&self.error_sink),
1665            }
1666            .into(),
1667        )
1668    }
1669
1670    fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1671        let global = &self.context.0;
1672        let (id, error) =
1673            global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1674        if let Some(cause) = error {
1675            self.context
1676                .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1677        }
1678        CoreTlas {
1679            context: self.context.clone(),
1680            id,
1681            // error_sink: Arc::clone(&self.error_sink),
1682        }
1683        .into()
1684    }
1685
1686    fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1687        let descriptor = wgc::resource::SamplerDescriptor {
1688            label: desc.label.map(Borrowed),
1689            address_modes: [
1690                desc.address_mode_u,
1691                desc.address_mode_v,
1692                desc.address_mode_w,
1693            ],
1694            mag_filter: desc.mag_filter,
1695            min_filter: desc.min_filter,
1696            mipmap_filter: desc.mipmap_filter,
1697            lod_min_clamp: desc.lod_min_clamp,
1698            lod_max_clamp: desc.lod_max_clamp,
1699            compare: desc.compare,
1700            anisotropy_clamp: desc.anisotropy_clamp,
1701            border_color: desc.border_color,
1702        };
1703
1704        let (id, error) = self
1705            .context
1706            .0
1707            .device_create_sampler(self.id, &descriptor, None);
1708        if let Some(cause) = error {
1709            self.context.handle_error(
1710                &self.error_sink,
1711                cause,
1712                desc.label,
1713                "Device::create_sampler",
1714            );
1715        }
1716        CoreSampler {
1717            context: self.context.clone(),
1718            id,
1719        }
1720        .into()
1721    }
1722
1723    fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1724        let (id, error) = self.context.0.device_create_query_set(
1725            self.id,
1726            &desc.map_label(|l| l.map(Borrowed)),
1727            None,
1728        );
1729        if let Some(cause) = error {
1730            self.context
1731                .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1732        }
1733        CoreQuerySet {
1734            context: self.context.clone(),
1735            id,
1736        }
1737        .into()
1738    }
1739
1740    fn create_command_encoder(
1741        &self,
1742        desc: &crate::CommandEncoderDescriptor<'_>,
1743    ) -> dispatch::DispatchCommandEncoder {
1744        let (id, error) = self.context.0.device_create_command_encoder(
1745            self.id,
1746            &desc.map_label(|l| l.map(Borrowed)),
1747            None,
1748        );
1749        if let Some(cause) = error {
1750            self.context.handle_error(
1751                &self.error_sink,
1752                cause,
1753                desc.label,
1754                "Device::create_command_encoder",
1755            );
1756        }
1757
1758        CoreCommandEncoder {
1759            context: self.context.clone(),
1760            id,
1761            error_sink: Arc::clone(&self.error_sink),
1762        }
1763        .into()
1764    }
1765
1766    fn create_render_bundle_encoder(
1767        &self,
1768        desc: &crate::RenderBundleEncoderDescriptor<'_>,
1769    ) -> dispatch::DispatchRenderBundleEncoder {
1770        let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1771            label: desc.label.map(Borrowed),
1772            color_formats: Borrowed(desc.color_formats),
1773            depth_stencil: desc.depth_stencil,
1774            sample_count: desc.sample_count,
1775            multiview: desc.multiview,
1776        };
1777        let encoder = match wgc::command::RenderBundleEncoder::new(&descriptor, self.id) {
1778            Ok(encoder) => encoder,
1779            Err(e) => panic!("Error in Device::create_render_bundle_encoder: {e}"),
1780        };
1781
1782        CoreRenderBundleEncoder {
1783            context: self.context.clone(),
1784            encoder,
1785            id: crate::cmp::Identifier::create(),
1786        }
1787        .into()
1788    }
1789
1790    fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1791        self.context
1792            .0
1793            .device_set_device_lost_closure(self.id, device_lost_callback);
1794    }
1795
1796    fn on_uncaptured_error(&self, handler: Arc<dyn crate::UncapturedErrorHandler>) {
1797        let mut error_sink = self.error_sink.lock();
1798        error_sink.uncaptured_handler = Some(handler);
1799    }
1800
1801    fn push_error_scope(&self, filter: crate::ErrorFilter) -> u32 {
1802        let mut error_sink = self.error_sink.lock();
1803        let thread_id = thread_id::ThreadId::current();
1804        let scopes = error_sink.scopes.entry(thread_id).or_default();
1805        let index = scopes
1806            .len()
1807            .try_into()
1808            .expect("Greater than 2^32 nested error scopes");
1809        scopes.push(ErrorScope {
1810            error: None,
1811            filter,
1812        });
1813        index
1814    }
1815
1816    fn pop_error_scope(&self, index: u32) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1817        let mut error_sink = self.error_sink.lock();
1818
1819        // We go out of our way to avoid panicking while unwinding, because that would abort the process,
1820        // and we are supposed to just drop the error scope on the floor.
1821        let is_panicking = crate::util::is_panicking();
1822        let thread_id = thread_id::ThreadId::current();
1823        let err = "Mismatched pop_error_scope call: no error scope for this thread. Error scopes are thread-local.";
1824        let scopes = match error_sink.scopes.get_mut(&thread_id) {
1825            Some(s) => s,
1826            None => {
1827                if !is_panicking {
1828                    panic!("{err}");
1829                } else {
1830                    return Box::pin(ready(None));
1831                }
1832            }
1833        };
1834        if scopes.is_empty() && !is_panicking {
1835            panic!("{err}");
1836        }
1837        if index as usize != scopes.len() - 1 && !is_panicking {
1838            panic!(
1839                "Mismatched pop_error_scope call: error scopes must be popped in reverse order."
1840            );
1841        }
1842
1843        // It would be more correct in this case to use `remove` here so that when unwinding is occurring
1844        // we would remove the correct error scope, but we don't have such a primitive on the web
1845        // and having consistent behavior here is more important. If you are unwinding and it unwinds
1846        // the guards in the wrong order, it's totally reasonable to have incorrect behavior.
1847        let scope = match scopes.pop() {
1848            Some(s) => s,
1849            None if !is_panicking => unreachable!(),
1850            None => return Box::pin(ready(None)),
1851        };
1852
1853        Box::pin(ready(scope.error))
1854    }
1855
1856    unsafe fn start_graphics_debugger_capture(&self) {
1857        unsafe {
1858            self.context
1859                .0
1860                .device_start_graphics_debugger_capture(self.id)
1861        };
1862    }
1863
1864    unsafe fn stop_graphics_debugger_capture(&self) {
1865        unsafe {
1866            self.context
1867                .0
1868                .device_stop_graphics_debugger_capture(self.id)
1869        };
1870    }
1871
1872    fn poll(&self, poll_type: wgt::PollType<u64>) -> Result<crate::PollStatus, crate::PollError> {
1873        match self.context.0.device_poll(self.id, poll_type) {
1874            Ok(status) => Ok(status),
1875            Err(err) => {
1876                if let Some(poll_error) = err.to_poll_error() {
1877                    return Err(poll_error);
1878                }
1879
1880                self.context.handle_error_fatal(err, "Device::poll")
1881            }
1882        }
1883    }
1884
1885    fn get_internal_counters(&self) -> crate::InternalCounters {
1886        self.context.0.device_get_internal_counters(self.id)
1887    }
1888
1889    fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1890        self.context.0.device_generate_allocator_report(self.id)
1891    }
1892
1893    fn destroy(&self) {
1894        self.context.0.device_destroy(self.id);
1895    }
1896}
1897
1898impl Drop for CoreDevice {
1899    fn drop(&mut self) {
1900        self.context.0.device_drop(self.id)
1901    }
1902}
1903
1904impl dispatch::QueueInterface for CoreQueue {
1905    fn write_buffer(
1906        &self,
1907        buffer: &dispatch::DispatchBuffer,
1908        offset: crate::BufferAddress,
1909        data: &[u8],
1910    ) {
1911        let buffer = buffer.as_core();
1912
1913        match self
1914            .context
1915            .0
1916            .queue_write_buffer(self.id, buffer.id, offset, data)
1917        {
1918            Ok(()) => (),
1919            Err(err) => {
1920                self.context
1921                    .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1922            }
1923        }
1924    }
1925
1926    fn create_staging_buffer(
1927        &self,
1928        size: crate::BufferSize,
1929    ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1930        match self
1931            .context
1932            .0
1933            .queue_create_staging_buffer(self.id, size, None)
1934        {
1935            Ok((buffer_id, ptr)) => Some(
1936                CoreQueueWriteBuffer {
1937                    buffer_id,
1938                    mapping: CoreBufferMappedRange {
1939                        ptr,
1940                        size: size.get() as usize,
1941                    },
1942                }
1943                .into(),
1944            ),
1945            Err(err) => {
1946                self.context.handle_error_nolabel(
1947                    &self.error_sink,
1948                    err,
1949                    "Queue::write_buffer_with",
1950                );
1951                None
1952            }
1953        }
1954    }
1955
1956    fn validate_write_buffer(
1957        &self,
1958        buffer: &dispatch::DispatchBuffer,
1959        offset: wgt::BufferAddress,
1960        size: wgt::BufferSize,
1961    ) -> Option<()> {
1962        let buffer = buffer.as_core();
1963
1964        match self
1965            .context
1966            .0
1967            .queue_validate_write_buffer(self.id, buffer.id, offset, size)
1968        {
1969            Ok(()) => Some(()),
1970            Err(err) => {
1971                self.context.handle_error_nolabel(
1972                    &self.error_sink,
1973                    err,
1974                    "Queue::write_buffer_with",
1975                );
1976                None
1977            }
1978        }
1979    }
1980
1981    fn write_staging_buffer(
1982        &self,
1983        buffer: &dispatch::DispatchBuffer,
1984        offset: crate::BufferAddress,
1985        staging_buffer: &dispatch::DispatchQueueWriteBuffer,
1986    ) {
1987        let buffer = buffer.as_core();
1988        let staging_buffer = staging_buffer.as_core();
1989
1990        match self.context.0.queue_write_staging_buffer(
1991            self.id,
1992            buffer.id,
1993            offset,
1994            staging_buffer.buffer_id,
1995        ) {
1996            Ok(()) => (),
1997            Err(err) => {
1998                self.context.handle_error_nolabel(
1999                    &self.error_sink,
2000                    err,
2001                    "Queue::write_buffer_with",
2002                );
2003            }
2004        }
2005    }
2006
2007    fn write_texture(
2008        &self,
2009        texture: crate::TexelCopyTextureInfo<'_>,
2010        data: &[u8],
2011        data_layout: crate::TexelCopyBufferLayout,
2012        size: crate::Extent3d,
2013    ) {
2014        match self.context.0.queue_write_texture(
2015            self.id,
2016            &map_texture_copy_view(texture),
2017            data,
2018            &data_layout,
2019            &size,
2020        ) {
2021            Ok(()) => (),
2022            Err(err) => {
2023                self.context
2024                    .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
2025            }
2026        }
2027    }
2028
2029    // This method needs to exist if either webgpu or webgl is enabled,
2030    // but we only actually have an implementation if webgl is enabled.
2031    #[cfg(web)]
2032    #[cfg_attr(not(webgl), expect(unused_variables))]
2033    fn copy_external_image_to_texture(
2034        &self,
2035        source: &crate::CopyExternalImageSourceInfo,
2036        dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
2037        size: crate::Extent3d,
2038    ) {
2039        #[cfg(webgl)]
2040        match self.context.0.queue_copy_external_image_to_texture(
2041            self.id,
2042            source,
2043            map_texture_tagged_copy_view(dest),
2044            size,
2045        ) {
2046            Ok(()) => (),
2047            Err(err) => self.context.handle_error_nolabel(
2048                &self.error_sink,
2049                err,
2050                "Queue::copy_external_image_to_texture",
2051            ),
2052        }
2053    }
2054
2055    fn submit(
2056        &self,
2057        command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
2058    ) -> u64 {
2059        let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
2060        let command_buffer_ids = temp_command_buffers
2061            .iter()
2062            .map(|cmdbuf| cmdbuf.as_core().id)
2063            .collect::<SmallVec<[_; 4]>>();
2064
2065        let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
2066            Ok(index) => index,
2067            Err((index, err)) => {
2068                self.context
2069                    .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
2070                index
2071            }
2072        };
2073
2074        drop(temp_command_buffers);
2075
2076        index
2077    }
2078
2079    fn get_timestamp_period(&self) -> f32 {
2080        self.context.0.queue_get_timestamp_period(self.id)
2081    }
2082
2083    fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
2084        self.context
2085            .0
2086            .queue_on_submitted_work_done(self.id, callback);
2087    }
2088
2089    fn compact_blas(&self, blas: &dispatch::DispatchBlas) -> (Option<u64>, dispatch::DispatchBlas) {
2090        let (id, handle, error) =
2091            self.context
2092                .0
2093                .queue_compact_blas(self.id, blas.as_core().id, None);
2094
2095        if let Some(cause) = error {
2096            self.context
2097                .handle_error_nolabel(&self.error_sink, cause, "Queue::compact_blas");
2098        }
2099        (
2100            handle,
2101            CoreBlas {
2102                context: self.context.clone(),
2103                id,
2104                error_sink: Arc::clone(&self.error_sink),
2105            }
2106            .into(),
2107        )
2108    }
2109}
2110
2111impl Drop for CoreQueue {
2112    fn drop(&mut self) {
2113        self.context.0.queue_drop(self.id)
2114    }
2115}
2116
2117impl dispatch::ShaderModuleInterface for CoreShaderModule {
2118    fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
2119        Box::pin(ready(self.compilation_info.clone()))
2120    }
2121}
2122
2123impl Drop for CoreShaderModule {
2124    fn drop(&mut self) {
2125        self.context.0.shader_module_drop(self.id)
2126    }
2127}
2128
2129impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
2130
2131impl Drop for CoreBindGroupLayout {
2132    fn drop(&mut self) {
2133        self.context.0.bind_group_layout_drop(self.id)
2134    }
2135}
2136
2137impl dispatch::BindGroupInterface for CoreBindGroup {}
2138
2139impl Drop for CoreBindGroup {
2140    fn drop(&mut self) {
2141        self.context.0.bind_group_drop(self.id)
2142    }
2143}
2144
2145impl dispatch::TextureViewInterface for CoreTextureView {}
2146
2147impl Drop for CoreTextureView {
2148    fn drop(&mut self) {
2149        // TODO: We don't use this error at all?
2150        let _ = self.context.0.texture_view_drop(self.id);
2151    }
2152}
2153
2154impl dispatch::ExternalTextureInterface for CoreExternalTexture {
2155    fn destroy(&self) {
2156        self.context.0.external_texture_destroy(self.id);
2157    }
2158}
2159
2160impl Drop for CoreExternalTexture {
2161    fn drop(&mut self) {
2162        self.context.0.external_texture_drop(self.id);
2163    }
2164}
2165
2166impl dispatch::SamplerInterface for CoreSampler {}
2167
2168impl Drop for CoreSampler {
2169    fn drop(&mut self) {
2170        self.context.0.sampler_drop(self.id)
2171    }
2172}
2173
2174impl dispatch::BufferInterface for CoreBuffer {
2175    fn map_async(
2176        &self,
2177        mode: crate::MapMode,
2178        range: Range<crate::BufferAddress>,
2179        callback: dispatch::BufferMapCallback,
2180    ) {
2181        let operation = wgc::resource::BufferMapOperation {
2182            host: match mode {
2183                MapMode::Read => wgc::device::HostMap::Read,
2184                MapMode::Write => wgc::device::HostMap::Write,
2185            },
2186            callback: Some(Box::new(|status| {
2187                let res = status.map_err(|_| crate::BufferAsyncError);
2188                callback(res);
2189            })),
2190        };
2191
2192        match self.context.0.buffer_map_async(
2193            self.id,
2194            range.start,
2195            Some(range.end - range.start),
2196            operation,
2197        ) {
2198            Ok(_) => (),
2199            Err(cause) => {
2200                self.context
2201                    .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
2202            }
2203        }
2204    }
2205
2206    fn get_mapped_range(
2207        &self,
2208        sub_range: Range<crate::BufferAddress>,
2209    ) -> dispatch::DispatchBufferMappedRange {
2210        let size = sub_range.end - sub_range.start;
2211        match self
2212            .context
2213            .0
2214            .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
2215        {
2216            Ok((ptr, size)) => CoreBufferMappedRange {
2217                ptr,
2218                size: size as usize,
2219            }
2220            .into(),
2221            Err(err) => self
2222                .context
2223                .handle_error_fatal(err, "Buffer::get_mapped_range"),
2224        }
2225    }
2226
2227    fn unmap(&self) {
2228        match self.context.0.buffer_unmap(self.id) {
2229            Ok(()) => (),
2230            Err(cause) => {
2231                self.context
2232                    .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
2233            }
2234        }
2235    }
2236
2237    fn destroy(&self) {
2238        self.context.0.buffer_destroy(self.id);
2239    }
2240}
2241
2242impl Drop for CoreBuffer {
2243    fn drop(&mut self) {
2244        self.context.0.buffer_drop(self.id)
2245    }
2246}
2247
2248impl dispatch::TextureInterface for CoreTexture {
2249    fn create_view(
2250        &self,
2251        desc: &crate::TextureViewDescriptor<'_>,
2252    ) -> dispatch::DispatchTextureView {
2253        let descriptor = wgc::resource::TextureViewDescriptor {
2254            label: desc.label.map(Borrowed),
2255            format: desc.format,
2256            dimension: desc.dimension,
2257            usage: desc.usage,
2258            range: wgt::ImageSubresourceRange {
2259                aspect: desc.aspect,
2260                base_mip_level: desc.base_mip_level,
2261                mip_level_count: desc.mip_level_count,
2262                base_array_layer: desc.base_array_layer,
2263                array_layer_count: desc.array_layer_count,
2264            },
2265        };
2266        let (id, error) = self
2267            .context
2268            .0
2269            .texture_create_view(self.id, &descriptor, None);
2270        if let Some(cause) = error {
2271            self.context
2272                .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
2273        }
2274        CoreTextureView {
2275            context: self.context.clone(),
2276            id,
2277        }
2278        .into()
2279    }
2280
2281    fn destroy(&self) {
2282        self.context.0.texture_destroy(self.id);
2283    }
2284}
2285
2286impl Drop for CoreTexture {
2287    fn drop(&mut self) {
2288        self.context.0.texture_drop(self.id)
2289    }
2290}
2291
2292impl dispatch::BlasInterface for CoreBlas {
2293    fn prepare_compact_async(&self, callback: BlasCompactCallback) {
2294        let callback: Option<wgc::resource::BlasCompactCallback> =
2295            Some(Box::new(|status: BlasPrepareCompactResult| {
2296                let res = status.map_err(|_| crate::BlasAsyncError);
2297                callback(res);
2298            }));
2299
2300        match self.context.0.blas_prepare_compact_async(self.id, callback) {
2301            Ok(_) => (),
2302            Err(cause) => self.context.handle_error_nolabel(
2303                &self.error_sink,
2304                cause,
2305                "Blas::prepare_compact_async",
2306            ),
2307        }
2308    }
2309
2310    fn ready_for_compaction(&self) -> bool {
2311        match self.context.0.ready_for_compaction(self.id) {
2312            Ok(ready) => ready,
2313            Err(cause) => {
2314                self.context.handle_error_nolabel(
2315                    &self.error_sink,
2316                    cause,
2317                    "Blas::ready_for_compaction",
2318                );
2319                // A BLAS is definitely not ready for compaction if it's not valid
2320                false
2321            }
2322        }
2323    }
2324}
2325
2326impl Drop for CoreBlas {
2327    fn drop(&mut self) {
2328        self.context.0.blas_drop(self.id)
2329    }
2330}
2331
2332impl dispatch::TlasInterface for CoreTlas {}
2333
2334impl Drop for CoreTlas {
2335    fn drop(&mut self) {
2336        self.context.0.tlas_drop(self.id)
2337    }
2338}
2339
2340impl dispatch::QuerySetInterface for CoreQuerySet {}
2341
2342impl Drop for CoreQuerySet {
2343    fn drop(&mut self) {
2344        self.context.0.query_set_drop(self.id)
2345    }
2346}
2347
2348impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2349
2350impl Drop for CorePipelineLayout {
2351    fn drop(&mut self) {
2352        self.context.0.pipeline_layout_drop(self.id)
2353    }
2354}
2355
2356impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2357    fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2358        let (id, error) = self
2359            .context
2360            .0
2361            .render_pipeline_get_bind_group_layout(self.id, index, None);
2362        if let Some(err) = error {
2363            self.context.handle_error_nolabel(
2364                &self.error_sink,
2365                err,
2366                "RenderPipeline::get_bind_group_layout",
2367            )
2368        }
2369        CoreBindGroupLayout {
2370            context: self.context.clone(),
2371            id,
2372        }
2373        .into()
2374    }
2375}
2376
2377impl Drop for CoreRenderPipeline {
2378    fn drop(&mut self) {
2379        self.context.0.render_pipeline_drop(self.id)
2380    }
2381}
2382
2383impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2384    fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2385        let (id, error) = self
2386            .context
2387            .0
2388            .compute_pipeline_get_bind_group_layout(self.id, index, None);
2389        if let Some(err) = error {
2390            self.context.handle_error_nolabel(
2391                &self.error_sink,
2392                err,
2393                "ComputePipeline::get_bind_group_layout",
2394            )
2395        }
2396        CoreBindGroupLayout {
2397            context: self.context.clone(),
2398            id,
2399        }
2400        .into()
2401    }
2402}
2403
2404impl Drop for CoreComputePipeline {
2405    fn drop(&mut self) {
2406        self.context.0.compute_pipeline_drop(self.id)
2407    }
2408}
2409
2410impl dispatch::PipelineCacheInterface for CorePipelineCache {
2411    fn get_data(&self) -> Option<Vec<u8>> {
2412        self.context.0.pipeline_cache_get_data(self.id)
2413    }
2414}
2415
2416impl Drop for CorePipelineCache {
2417    fn drop(&mut self) {
2418        self.context.0.pipeline_cache_drop(self.id)
2419    }
2420}
2421
2422impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2423    fn copy_buffer_to_buffer(
2424        &self,
2425        source: &dispatch::DispatchBuffer,
2426        source_offset: crate::BufferAddress,
2427        destination: &dispatch::DispatchBuffer,
2428        destination_offset: crate::BufferAddress,
2429        copy_size: Option<crate::BufferAddress>,
2430    ) {
2431        let source = source.as_core();
2432        let destination = destination.as_core();
2433
2434        if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2435            self.id,
2436            source.id,
2437            source_offset,
2438            destination.id,
2439            destination_offset,
2440            copy_size,
2441        ) {
2442            self.context.handle_error_nolabel(
2443                &self.error_sink,
2444                cause,
2445                "CommandEncoder::copy_buffer_to_buffer",
2446            );
2447        }
2448    }
2449
2450    fn copy_buffer_to_texture(
2451        &self,
2452        source: crate::TexelCopyBufferInfo<'_>,
2453        destination: crate::TexelCopyTextureInfo<'_>,
2454        copy_size: crate::Extent3d,
2455    ) {
2456        if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2457            self.id,
2458            &map_buffer_copy_view(source),
2459            &map_texture_copy_view(destination),
2460            &copy_size,
2461        ) {
2462            self.context.handle_error_nolabel(
2463                &self.error_sink,
2464                cause,
2465                "CommandEncoder::copy_buffer_to_texture",
2466            );
2467        }
2468    }
2469
2470    fn copy_texture_to_buffer(
2471        &self,
2472        source: crate::TexelCopyTextureInfo<'_>,
2473        destination: crate::TexelCopyBufferInfo<'_>,
2474        copy_size: crate::Extent3d,
2475    ) {
2476        if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2477            self.id,
2478            &map_texture_copy_view(source),
2479            &map_buffer_copy_view(destination),
2480            &copy_size,
2481        ) {
2482            self.context.handle_error_nolabel(
2483                &self.error_sink,
2484                cause,
2485                "CommandEncoder::copy_texture_to_buffer",
2486            );
2487        }
2488    }
2489
2490    fn copy_texture_to_texture(
2491        &self,
2492        source: crate::TexelCopyTextureInfo<'_>,
2493        destination: crate::TexelCopyTextureInfo<'_>,
2494        copy_size: crate::Extent3d,
2495    ) {
2496        if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2497            self.id,
2498            &map_texture_copy_view(source),
2499            &map_texture_copy_view(destination),
2500            &copy_size,
2501        ) {
2502            self.context.handle_error_nolabel(
2503                &self.error_sink,
2504                cause,
2505                "CommandEncoder::copy_texture_to_texture",
2506            );
2507        }
2508    }
2509
2510    fn begin_compute_pass(
2511        &self,
2512        desc: &crate::ComputePassDescriptor<'_>,
2513    ) -> dispatch::DispatchComputePass {
2514        let timestamp_writes =
2515            desc.timestamp_writes
2516                .as_ref()
2517                .map(|tw| wgc::command::PassTimestampWrites {
2518                    query_set: tw.query_set.inner.as_core().id,
2519                    beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2520                    end_of_pass_write_index: tw.end_of_pass_write_index,
2521                });
2522
2523        let (pass, err) = self.context.0.command_encoder_begin_compute_pass(
2524            self.id,
2525            &wgc::command::ComputePassDescriptor {
2526                label: desc.label.map(Borrowed),
2527                timestamp_writes,
2528            },
2529        );
2530
2531        if let Some(cause) = err {
2532            self.context.handle_error(
2533                &self.error_sink,
2534                cause,
2535                desc.label,
2536                "CommandEncoder::begin_compute_pass",
2537            );
2538        }
2539
2540        CoreComputePass {
2541            context: self.context.clone(),
2542            pass,
2543            error_sink: self.error_sink.clone(),
2544            id: crate::cmp::Identifier::create(),
2545        }
2546        .into()
2547    }
2548
2549    fn begin_render_pass(
2550        &self,
2551        desc: &crate::RenderPassDescriptor<'_>,
2552    ) -> dispatch::DispatchRenderPass {
2553        let colors = desc
2554            .color_attachments
2555            .iter()
2556            .map(|ca| {
2557                ca.as_ref()
2558                    .map(|at| wgc::command::RenderPassColorAttachment {
2559                        view: at.view.inner.as_core().id,
2560                        depth_slice: at.depth_slice,
2561                        resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2562                        load_op: at.ops.load,
2563                        store_op: at.ops.store,
2564                    })
2565            })
2566            .collect::<Vec<_>>();
2567
2568        let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2569            wgc::command::RenderPassDepthStencilAttachment {
2570                view: dsa.view.inner.as_core().id,
2571                depth: map_pass_channel(dsa.depth_ops.as_ref()),
2572                stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2573            }
2574        });
2575
2576        let timestamp_writes =
2577            desc.timestamp_writes
2578                .as_ref()
2579                .map(|tw| wgc::command::PassTimestampWrites {
2580                    query_set: tw.query_set.inner.as_core().id,
2581                    beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2582                    end_of_pass_write_index: tw.end_of_pass_write_index,
2583                });
2584
2585        let (pass, err) = self.context.0.command_encoder_begin_render_pass(
2586            self.id,
2587            &wgc::command::RenderPassDescriptor {
2588                label: desc.label.map(Borrowed),
2589                timestamp_writes: timestamp_writes.as_ref(),
2590                color_attachments: Borrowed(&colors),
2591                depth_stencil_attachment: depth_stencil.as_ref(),
2592                occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2593                multiview_mask: desc.multiview_mask,
2594            },
2595        );
2596
2597        if let Some(cause) = err {
2598            self.context.handle_error(
2599                &self.error_sink,
2600                cause,
2601                desc.label,
2602                "CommandEncoder::begin_render_pass",
2603            );
2604        }
2605
2606        CoreRenderPass {
2607            context: self.context.clone(),
2608            pass,
2609            error_sink: self.error_sink.clone(),
2610            id: crate::cmp::Identifier::create(),
2611        }
2612        .into()
2613    }
2614
2615    fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2616        let descriptor = wgt::CommandBufferDescriptor::default();
2617        let (id, opt_label_and_error) =
2618            self.context
2619                .0
2620                .command_encoder_finish(self.id, &descriptor, None);
2621        if let Some((label, cause)) = opt_label_and_error {
2622            self.context
2623                .handle_error(&self.error_sink, cause, Some(&label), "a CommandEncoder");
2624        }
2625        CoreCommandBuffer {
2626            context: self.context.clone(),
2627            id,
2628        }
2629        .into()
2630    }
2631
2632    fn clear_texture(
2633        &self,
2634        texture: &dispatch::DispatchTexture,
2635        subresource_range: &crate::ImageSubresourceRange,
2636    ) {
2637        let texture = texture.as_core();
2638
2639        if let Err(cause) =
2640            self.context
2641                .0
2642                .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2643        {
2644            self.context.handle_error_nolabel(
2645                &self.error_sink,
2646                cause,
2647                "CommandEncoder::clear_texture",
2648            );
2649        }
2650    }
2651
2652    fn clear_buffer(
2653        &self,
2654        buffer: &dispatch::DispatchBuffer,
2655        offset: crate::BufferAddress,
2656        size: Option<crate::BufferAddress>,
2657    ) {
2658        let buffer = buffer.as_core();
2659
2660        if let Err(cause) = self
2661            .context
2662            .0
2663            .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2664        {
2665            self.context.handle_error_nolabel(
2666                &self.error_sink,
2667                cause,
2668                "CommandEncoder::fill_buffer",
2669            );
2670        }
2671    }
2672
2673    fn insert_debug_marker(&self, label: &str) {
2674        if let Err(cause) = self
2675            .context
2676            .0
2677            .command_encoder_insert_debug_marker(self.id, label)
2678        {
2679            self.context.handle_error_nolabel(
2680                &self.error_sink,
2681                cause,
2682                "CommandEncoder::insert_debug_marker",
2683            );
2684        }
2685    }
2686
2687    fn push_debug_group(&self, label: &str) {
2688        if let Err(cause) = self
2689            .context
2690            .0
2691            .command_encoder_push_debug_group(self.id, label)
2692        {
2693            self.context.handle_error_nolabel(
2694                &self.error_sink,
2695                cause,
2696                "CommandEncoder::push_debug_group",
2697            );
2698        }
2699    }
2700
2701    fn pop_debug_group(&self) {
2702        if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2703            self.context.handle_error_nolabel(
2704                &self.error_sink,
2705                cause,
2706                "CommandEncoder::pop_debug_group",
2707            );
2708        }
2709    }
2710
2711    fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2712        let query_set = query_set.as_core();
2713
2714        if let Err(cause) =
2715            self.context
2716                .0
2717                .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2718        {
2719            self.context.handle_error_nolabel(
2720                &self.error_sink,
2721                cause,
2722                "CommandEncoder::write_timestamp",
2723            );
2724        }
2725    }
2726
2727    fn resolve_query_set(
2728        &self,
2729        query_set: &dispatch::DispatchQuerySet,
2730        first_query: u32,
2731        query_count: u32,
2732        destination: &dispatch::DispatchBuffer,
2733        destination_offset: crate::BufferAddress,
2734    ) {
2735        let query_set = query_set.as_core();
2736        let destination = destination.as_core();
2737
2738        if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2739            self.id,
2740            query_set.id,
2741            first_query,
2742            query_count,
2743            destination.id,
2744            destination_offset,
2745        ) {
2746            self.context.handle_error_nolabel(
2747                &self.error_sink,
2748                cause,
2749                "CommandEncoder::resolve_query_set",
2750            );
2751        }
2752    }
2753
2754    fn mark_acceleration_structures_built<'a>(
2755        &self,
2756        blas: &mut dyn Iterator<Item = &'a Blas>,
2757        tlas: &mut dyn Iterator<Item = &'a Tlas>,
2758    ) {
2759        let blas = blas
2760            .map(|b| b.inner.as_core().id)
2761            .collect::<SmallVec<[_; 4]>>();
2762        let tlas = tlas
2763            .map(|t| t.inner.as_core().id)
2764            .collect::<SmallVec<[_; 4]>>();
2765        if let Err(cause) = self
2766            .context
2767            .0
2768            .command_encoder_mark_acceleration_structures_built(self.id, &blas, &tlas)
2769        {
2770            self.context.handle_error_nolabel(
2771                &self.error_sink,
2772                cause,
2773                "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2774            );
2775        }
2776    }
2777
2778    fn build_acceleration_structures<'a>(
2779        &self,
2780        blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2781        tlas: &mut dyn Iterator<Item = &'a crate::Tlas>,
2782    ) {
2783        let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2784            let geometries = match e.geometry {
2785                crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2786                    let iter = triangle_geometries.iter().map(|tg| {
2787                        wgc::ray_tracing::BlasTriangleGeometry {
2788                            vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2789                            index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2790                            transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2791                            size: tg.size,
2792                            transform_buffer_offset: tg.transform_buffer_offset,
2793                            first_vertex: tg.first_vertex,
2794                            vertex_stride: tg.vertex_stride,
2795                            first_index: tg.first_index,
2796                        }
2797                    });
2798                    wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2799                }
2800            };
2801            wgc::ray_tracing::BlasBuildEntry {
2802                blas_id: e.blas.inner.as_core().id,
2803                geometries,
2804            }
2805        });
2806
2807        let tlas = tlas.into_iter().map(|e| {
2808            let instances = e
2809                .instances
2810                .iter()
2811                .map(|instance: &Option<crate::TlasInstance>| {
2812                    instance
2813                        .as_ref()
2814                        .map(|instance| wgc::ray_tracing::TlasInstance {
2815                            blas_id: instance.blas.as_core().id,
2816                            transform: &instance.transform,
2817                            custom_data: instance.custom_data,
2818                            mask: instance.mask,
2819                        })
2820                });
2821            wgc::ray_tracing::TlasPackage {
2822                tlas_id: e.inner.as_core().id,
2823                instances: Box::new(instances),
2824                lowest_unmodified: e.lowest_unmodified,
2825            }
2826        });
2827
2828        if let Err(cause) = self
2829            .context
2830            .0
2831            .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2832        {
2833            self.context.handle_error_nolabel(
2834                &self.error_sink,
2835                cause,
2836                "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2837            );
2838        }
2839    }
2840
2841    fn transition_resources<'a>(
2842        &mut self,
2843        buffer_transitions: &mut dyn Iterator<
2844            Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
2845        >,
2846        texture_transitions: &mut dyn Iterator<
2847            Item = wgt::TextureTransition<&'a dispatch::DispatchTexture>,
2848        >,
2849    ) {
2850        let result = self.context.0.command_encoder_transition_resources(
2851            self.id,
2852            buffer_transitions.map(|t| wgt::BufferTransition {
2853                buffer: t.buffer.as_core().id,
2854                state: t.state,
2855            }),
2856            texture_transitions.map(|t| wgt::TextureTransition {
2857                texture: t.texture.as_core().id,
2858                selector: t.selector.clone(),
2859                state: t.state,
2860            }),
2861        );
2862
2863        if let Err(cause) = result {
2864            self.context.handle_error_nolabel(
2865                &self.error_sink,
2866                cause,
2867                "CommandEncoder::transition_resources",
2868            );
2869        }
2870    }
2871}
2872
2873impl Drop for CoreCommandEncoder {
2874    fn drop(&mut self) {
2875        self.context.0.command_encoder_drop(self.id)
2876    }
2877}
2878
2879impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2880
2881impl Drop for CoreCommandBuffer {
2882    fn drop(&mut self) {
2883        self.context.0.command_buffer_drop(self.id)
2884    }
2885}
2886
2887impl dispatch::ComputePassInterface for CoreComputePass {
2888    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2889        let pipeline = pipeline.as_core();
2890
2891        if let Err(cause) = self
2892            .context
2893            .0
2894            .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2895        {
2896            self.context.handle_error(
2897                &self.error_sink,
2898                cause,
2899                self.pass.label(),
2900                "ComputePass::set_pipeline",
2901            );
2902        }
2903    }
2904
2905    fn set_bind_group(
2906        &mut self,
2907        index: u32,
2908        bind_group: Option<&dispatch::DispatchBindGroup>,
2909        offsets: &[crate::DynamicOffset],
2910    ) {
2911        let bg = bind_group.map(|bg| bg.as_core().id);
2912
2913        if let Err(cause) =
2914            self.context
2915                .0
2916                .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2917        {
2918            self.context.handle_error(
2919                &self.error_sink,
2920                cause,
2921                self.pass.label(),
2922                "ComputePass::set_bind_group",
2923            );
2924        }
2925    }
2926
2927    fn set_immediates(&mut self, offset: u32, data: &[u8]) {
2928        if let Err(cause) = self
2929            .context
2930            .0
2931            .compute_pass_set_immediates(&mut self.pass, offset, data)
2932        {
2933            self.context.handle_error(
2934                &self.error_sink,
2935                cause,
2936                self.pass.label(),
2937                "ComputePass::set_immediates",
2938            );
2939        }
2940    }
2941
2942    fn insert_debug_marker(&mut self, label: &str) {
2943        if let Err(cause) =
2944            self.context
2945                .0
2946                .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
2947        {
2948            self.context.handle_error(
2949                &self.error_sink,
2950                cause,
2951                self.pass.label(),
2952                "ComputePass::insert_debug_marker",
2953            );
2954        }
2955    }
2956
2957    fn push_debug_group(&mut self, group_label: &str) {
2958        if let Err(cause) =
2959            self.context
2960                .0
2961                .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
2962        {
2963            self.context.handle_error(
2964                &self.error_sink,
2965                cause,
2966                self.pass.label(),
2967                "ComputePass::push_debug_group",
2968            );
2969        }
2970    }
2971
2972    fn pop_debug_group(&mut self) {
2973        if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
2974            self.context.handle_error(
2975                &self.error_sink,
2976                cause,
2977                self.pass.label(),
2978                "ComputePass::pop_debug_group",
2979            );
2980        }
2981    }
2982
2983    fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2984        let query_set = query_set.as_core();
2985
2986        if let Err(cause) =
2987            self.context
2988                .0
2989                .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
2990        {
2991            self.context.handle_error(
2992                &self.error_sink,
2993                cause,
2994                self.pass.label(),
2995                "ComputePass::write_timestamp",
2996            );
2997        }
2998    }
2999
3000    fn begin_pipeline_statistics_query(
3001        &mut self,
3002        query_set: &dispatch::DispatchQuerySet,
3003        query_index: u32,
3004    ) {
3005        let query_set = query_set.as_core();
3006
3007        if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
3008            &mut self.pass,
3009            query_set.id,
3010            query_index,
3011        ) {
3012            self.context.handle_error(
3013                &self.error_sink,
3014                cause,
3015                self.pass.label(),
3016                "ComputePass::begin_pipeline_statistics_query",
3017            );
3018        }
3019    }
3020
3021    fn end_pipeline_statistics_query(&mut self) {
3022        if let Err(cause) = self
3023            .context
3024            .0
3025            .compute_pass_end_pipeline_statistics_query(&mut self.pass)
3026        {
3027            self.context.handle_error(
3028                &self.error_sink,
3029                cause,
3030                self.pass.label(),
3031                "ComputePass::end_pipeline_statistics_query",
3032            );
3033        }
3034    }
3035
3036    fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
3037        if let Err(cause) = self
3038            .context
3039            .0
3040            .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
3041        {
3042            self.context.handle_error(
3043                &self.error_sink,
3044                cause,
3045                self.pass.label(),
3046                "ComputePass::dispatch_workgroups",
3047            );
3048        }
3049    }
3050
3051    fn dispatch_workgroups_indirect(
3052        &mut self,
3053        indirect_buffer: &dispatch::DispatchBuffer,
3054        indirect_offset: crate::BufferAddress,
3055    ) {
3056        let indirect_buffer = indirect_buffer.as_core();
3057
3058        if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
3059            &mut self.pass,
3060            indirect_buffer.id,
3061            indirect_offset,
3062        ) {
3063            self.context.handle_error(
3064                &self.error_sink,
3065                cause,
3066                self.pass.label(),
3067                "ComputePass::dispatch_workgroups_indirect",
3068            );
3069        }
3070    }
3071
3072    fn end(&mut self) {
3073        if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
3074            self.context.handle_error(
3075                &self.error_sink,
3076                cause,
3077                self.pass.label(),
3078                "ComputePass::end",
3079            );
3080        }
3081    }
3082}
3083
3084impl Drop for CoreComputePass {
3085    fn drop(&mut self) {
3086        dispatch::ComputePassInterface::end(self);
3087    }
3088}
3089
3090impl dispatch::RenderPassInterface for CoreRenderPass {
3091    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3092        let pipeline = pipeline.as_core();
3093
3094        if let Err(cause) = self
3095            .context
3096            .0
3097            .render_pass_set_pipeline(&mut self.pass, pipeline.id)
3098        {
3099            self.context.handle_error(
3100                &self.error_sink,
3101                cause,
3102                self.pass.label(),
3103                "RenderPass::set_pipeline",
3104            );
3105        }
3106    }
3107
3108    fn set_bind_group(
3109        &mut self,
3110        index: u32,
3111        bind_group: Option<&dispatch::DispatchBindGroup>,
3112        offsets: &[crate::DynamicOffset],
3113    ) {
3114        let bg = bind_group.map(|bg| bg.as_core().id);
3115
3116        if let Err(cause) =
3117            self.context
3118                .0
3119                .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
3120        {
3121            self.context.handle_error(
3122                &self.error_sink,
3123                cause,
3124                self.pass.label(),
3125                "RenderPass::set_bind_group",
3126            );
3127        }
3128    }
3129
3130    fn set_index_buffer(
3131        &mut self,
3132        buffer: &dispatch::DispatchBuffer,
3133        index_format: crate::IndexFormat,
3134        offset: crate::BufferAddress,
3135        size: Option<crate::BufferSize>,
3136    ) {
3137        let buffer = buffer.as_core();
3138
3139        if let Err(cause) = self.context.0.render_pass_set_index_buffer(
3140            &mut self.pass,
3141            buffer.id,
3142            index_format,
3143            offset,
3144            size,
3145        ) {
3146            self.context.handle_error(
3147                &self.error_sink,
3148                cause,
3149                self.pass.label(),
3150                "RenderPass::set_index_buffer",
3151            );
3152        }
3153    }
3154
3155    fn set_vertex_buffer(
3156        &mut self,
3157        slot: u32,
3158        buffer: &dispatch::DispatchBuffer,
3159        offset: crate::BufferAddress,
3160        size: Option<crate::BufferSize>,
3161    ) {
3162        let buffer = buffer.as_core();
3163
3164        if let Err(cause) = self.context.0.render_pass_set_vertex_buffer(
3165            &mut self.pass,
3166            slot,
3167            buffer.id,
3168            offset,
3169            size,
3170        ) {
3171            self.context.handle_error(
3172                &self.error_sink,
3173                cause,
3174                self.pass.label(),
3175                "RenderPass::set_vertex_buffer",
3176            );
3177        }
3178    }
3179
3180    fn set_immediates(&mut self, offset: u32, data: &[u8]) {
3181        if let Err(cause) = self
3182            .context
3183            .0
3184            .render_pass_set_immediates(&mut self.pass, offset, data)
3185        {
3186            self.context.handle_error(
3187                &self.error_sink,
3188                cause,
3189                self.pass.label(),
3190                "RenderPass::set_immediates",
3191            );
3192        }
3193    }
3194
3195    fn set_blend_constant(&mut self, color: crate::Color) {
3196        if let Err(cause) = self
3197            .context
3198            .0
3199            .render_pass_set_blend_constant(&mut self.pass, color)
3200        {
3201            self.context.handle_error(
3202                &self.error_sink,
3203                cause,
3204                self.pass.label(),
3205                "RenderPass::set_blend_constant",
3206            );
3207        }
3208    }
3209
3210    fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
3211        if let Err(cause) =
3212            self.context
3213                .0
3214                .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
3215        {
3216            self.context.handle_error(
3217                &self.error_sink,
3218                cause,
3219                self.pass.label(),
3220                "RenderPass::set_scissor_rect",
3221            );
3222        }
3223    }
3224
3225    fn set_viewport(
3226        &mut self,
3227        x: f32,
3228        y: f32,
3229        width: f32,
3230        height: f32,
3231        min_depth: f32,
3232        max_depth: f32,
3233    ) {
3234        if let Err(cause) = self.context.0.render_pass_set_viewport(
3235            &mut self.pass,
3236            x,
3237            y,
3238            width,
3239            height,
3240            min_depth,
3241            max_depth,
3242        ) {
3243            self.context.handle_error(
3244                &self.error_sink,
3245                cause,
3246                self.pass.label(),
3247                "RenderPass::set_viewport",
3248            );
3249        }
3250    }
3251
3252    fn set_stencil_reference(&mut self, reference: u32) {
3253        if let Err(cause) = self
3254            .context
3255            .0
3256            .render_pass_set_stencil_reference(&mut self.pass, reference)
3257        {
3258            self.context.handle_error(
3259                &self.error_sink,
3260                cause,
3261                self.pass.label(),
3262                "RenderPass::set_stencil_reference",
3263            );
3264        }
3265    }
3266
3267    fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3268        if let Err(cause) = self.context.0.render_pass_draw(
3269            &mut self.pass,
3270            vertices.end - vertices.start,
3271            instances.end - instances.start,
3272            vertices.start,
3273            instances.start,
3274        ) {
3275            self.context.handle_error(
3276                &self.error_sink,
3277                cause,
3278                self.pass.label(),
3279                "RenderPass::draw",
3280            );
3281        }
3282    }
3283
3284    fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3285        if let Err(cause) = self.context.0.render_pass_draw_indexed(
3286            &mut self.pass,
3287            indices.end - indices.start,
3288            instances.end - instances.start,
3289            indices.start,
3290            base_vertex,
3291            instances.start,
3292        ) {
3293            self.context.handle_error(
3294                &self.error_sink,
3295                cause,
3296                self.pass.label(),
3297                "RenderPass::draw_indexed",
3298            );
3299        }
3300    }
3301
3302    fn draw_mesh_tasks(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
3303        if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks(
3304            &mut self.pass,
3305            group_count_x,
3306            group_count_y,
3307            group_count_z,
3308        ) {
3309            self.context.handle_error(
3310                &self.error_sink,
3311                cause,
3312                self.pass.label(),
3313                "RenderPass::draw_mesh_tasks",
3314            );
3315        }
3316    }
3317
3318    fn draw_indirect(
3319        &mut self,
3320        indirect_buffer: &dispatch::DispatchBuffer,
3321        indirect_offset: crate::BufferAddress,
3322    ) {
3323        let indirect_buffer = indirect_buffer.as_core();
3324
3325        if let Err(cause) = self.context.0.render_pass_draw_indirect(
3326            &mut self.pass,
3327            indirect_buffer.id,
3328            indirect_offset,
3329        ) {
3330            self.context.handle_error(
3331                &self.error_sink,
3332                cause,
3333                self.pass.label(),
3334                "RenderPass::draw_indirect",
3335            );
3336        }
3337    }
3338
3339    fn draw_indexed_indirect(
3340        &mut self,
3341        indirect_buffer: &dispatch::DispatchBuffer,
3342        indirect_offset: crate::BufferAddress,
3343    ) {
3344        let indirect_buffer = indirect_buffer.as_core();
3345
3346        if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3347            &mut self.pass,
3348            indirect_buffer.id,
3349            indirect_offset,
3350        ) {
3351            self.context.handle_error(
3352                &self.error_sink,
3353                cause,
3354                self.pass.label(),
3355                "RenderPass::draw_indexed_indirect",
3356            );
3357        }
3358    }
3359
3360    fn draw_mesh_tasks_indirect(
3361        &mut self,
3362        indirect_buffer: &dispatch::DispatchBuffer,
3363        indirect_offset: crate::BufferAddress,
3364    ) {
3365        let indirect_buffer = indirect_buffer.as_core();
3366
3367        if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks_indirect(
3368            &mut self.pass,
3369            indirect_buffer.id,
3370            indirect_offset,
3371        ) {
3372            self.context.handle_error(
3373                &self.error_sink,
3374                cause,
3375                self.pass.label(),
3376                "RenderPass::draw_mesh_tasks_indirect",
3377            );
3378        }
3379    }
3380
3381    fn multi_draw_indirect(
3382        &mut self,
3383        indirect_buffer: &dispatch::DispatchBuffer,
3384        indirect_offset: crate::BufferAddress,
3385        count: u32,
3386    ) {
3387        let indirect_buffer = indirect_buffer.as_core();
3388
3389        if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3390            &mut self.pass,
3391            indirect_buffer.id,
3392            indirect_offset,
3393            count,
3394        ) {
3395            self.context.handle_error(
3396                &self.error_sink,
3397                cause,
3398                self.pass.label(),
3399                "RenderPass::multi_draw_indirect",
3400            );
3401        }
3402    }
3403
3404    fn multi_draw_indexed_indirect(
3405        &mut self,
3406        indirect_buffer: &dispatch::DispatchBuffer,
3407        indirect_offset: crate::BufferAddress,
3408        count: u32,
3409    ) {
3410        let indirect_buffer = indirect_buffer.as_core();
3411
3412        if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3413            &mut self.pass,
3414            indirect_buffer.id,
3415            indirect_offset,
3416            count,
3417        ) {
3418            self.context.handle_error(
3419                &self.error_sink,
3420                cause,
3421                self.pass.label(),
3422                "RenderPass::multi_draw_indexed_indirect",
3423            );
3424        }
3425    }
3426
3427    fn multi_draw_mesh_tasks_indirect(
3428        &mut self,
3429        indirect_buffer: &dispatch::DispatchBuffer,
3430        indirect_offset: crate::BufferAddress,
3431        count: u32,
3432    ) {
3433        let indirect_buffer = indirect_buffer.as_core();
3434
3435        if let Err(cause) = self.context.0.render_pass_multi_draw_mesh_tasks_indirect(
3436            &mut self.pass,
3437            indirect_buffer.id,
3438            indirect_offset,
3439            count,
3440        ) {
3441            self.context.handle_error(
3442                &self.error_sink,
3443                cause,
3444                self.pass.label(),
3445                "RenderPass::multi_draw_mesh_tasks_indirect",
3446            );
3447        }
3448    }
3449
3450    fn multi_draw_indirect_count(
3451        &mut self,
3452        indirect_buffer: &dispatch::DispatchBuffer,
3453        indirect_offset: crate::BufferAddress,
3454        count_buffer: &dispatch::DispatchBuffer,
3455        count_buffer_offset: crate::BufferAddress,
3456        max_count: u32,
3457    ) {
3458        let indirect_buffer = indirect_buffer.as_core();
3459        let count_buffer = count_buffer.as_core();
3460
3461        if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3462            &mut self.pass,
3463            indirect_buffer.id,
3464            indirect_offset,
3465            count_buffer.id,
3466            count_buffer_offset,
3467            max_count,
3468        ) {
3469            self.context.handle_error(
3470                &self.error_sink,
3471                cause,
3472                self.pass.label(),
3473                "RenderPass::multi_draw_indirect_count",
3474            );
3475        }
3476    }
3477
3478    fn multi_draw_indexed_indirect_count(
3479        &mut self,
3480        indirect_buffer: &dispatch::DispatchBuffer,
3481        indirect_offset: crate::BufferAddress,
3482        count_buffer: &dispatch::DispatchBuffer,
3483        count_buffer_offset: crate::BufferAddress,
3484        max_count: u32,
3485    ) {
3486        let indirect_buffer = indirect_buffer.as_core();
3487        let count_buffer = count_buffer.as_core();
3488
3489        if let Err(cause) = self
3490            .context
3491            .0
3492            .render_pass_multi_draw_indexed_indirect_count(
3493                &mut self.pass,
3494                indirect_buffer.id,
3495                indirect_offset,
3496                count_buffer.id,
3497                count_buffer_offset,
3498                max_count,
3499            )
3500        {
3501            self.context.handle_error(
3502                &self.error_sink,
3503                cause,
3504                self.pass.label(),
3505                "RenderPass::multi_draw_indexed_indirect_count",
3506            );
3507        }
3508    }
3509
3510    fn multi_draw_mesh_tasks_indirect_count(
3511        &mut self,
3512        indirect_buffer: &dispatch::DispatchBuffer,
3513        indirect_offset: crate::BufferAddress,
3514        count_buffer: &dispatch::DispatchBuffer,
3515        count_buffer_offset: crate::BufferAddress,
3516        max_count: u32,
3517    ) {
3518        let indirect_buffer = indirect_buffer.as_core();
3519        let count_buffer = count_buffer.as_core();
3520
3521        if let Err(cause) = self
3522            .context
3523            .0
3524            .render_pass_multi_draw_mesh_tasks_indirect_count(
3525                &mut self.pass,
3526                indirect_buffer.id,
3527                indirect_offset,
3528                count_buffer.id,
3529                count_buffer_offset,
3530                max_count,
3531            )
3532        {
3533            self.context.handle_error(
3534                &self.error_sink,
3535                cause,
3536                self.pass.label(),
3537                "RenderPass::multi_draw_mesh_tasks_indirect_count",
3538            );
3539        }
3540    }
3541
3542    fn insert_debug_marker(&mut self, label: &str) {
3543        if let Err(cause) = self
3544            .context
3545            .0
3546            .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3547        {
3548            self.context.handle_error(
3549                &self.error_sink,
3550                cause,
3551                self.pass.label(),
3552                "RenderPass::insert_debug_marker",
3553            );
3554        }
3555    }
3556
3557    fn push_debug_group(&mut self, group_label: &str) {
3558        if let Err(cause) =
3559            self.context
3560                .0
3561                .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3562        {
3563            self.context.handle_error(
3564                &self.error_sink,
3565                cause,
3566                self.pass.label(),
3567                "RenderPass::push_debug_group",
3568            );
3569        }
3570    }
3571
3572    fn pop_debug_group(&mut self) {
3573        if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3574            self.context.handle_error(
3575                &self.error_sink,
3576                cause,
3577                self.pass.label(),
3578                "RenderPass::pop_debug_group",
3579            );
3580        }
3581    }
3582
3583    fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3584        let query_set = query_set.as_core();
3585
3586        if let Err(cause) =
3587            self.context
3588                .0
3589                .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3590        {
3591            self.context.handle_error(
3592                &self.error_sink,
3593                cause,
3594                self.pass.label(),
3595                "RenderPass::write_timestamp",
3596            );
3597        }
3598    }
3599
3600    fn begin_occlusion_query(&mut self, query_index: u32) {
3601        if let Err(cause) = self
3602            .context
3603            .0
3604            .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3605        {
3606            self.context.handle_error(
3607                &self.error_sink,
3608                cause,
3609                self.pass.label(),
3610                "RenderPass::begin_occlusion_query",
3611            );
3612        }
3613    }
3614
3615    fn end_occlusion_query(&mut self) {
3616        if let Err(cause) = self
3617            .context
3618            .0
3619            .render_pass_end_occlusion_query(&mut self.pass)
3620        {
3621            self.context.handle_error(
3622                &self.error_sink,
3623                cause,
3624                self.pass.label(),
3625                "RenderPass::end_occlusion_query",
3626            );
3627        }
3628    }
3629
3630    fn begin_pipeline_statistics_query(
3631        &mut self,
3632        query_set: &dispatch::DispatchQuerySet,
3633        query_index: u32,
3634    ) {
3635        let query_set = query_set.as_core();
3636
3637        if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3638            &mut self.pass,
3639            query_set.id,
3640            query_index,
3641        ) {
3642            self.context.handle_error(
3643                &self.error_sink,
3644                cause,
3645                self.pass.label(),
3646                "RenderPass::begin_pipeline_statistics_query",
3647            );
3648        }
3649    }
3650
3651    fn end_pipeline_statistics_query(&mut self) {
3652        if let Err(cause) = self
3653            .context
3654            .0
3655            .render_pass_end_pipeline_statistics_query(&mut self.pass)
3656        {
3657            self.context.handle_error(
3658                &self.error_sink,
3659                cause,
3660                self.pass.label(),
3661                "RenderPass::end_pipeline_statistics_query",
3662            );
3663        }
3664    }
3665
3666    fn execute_bundles(
3667        &mut self,
3668        render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3669    ) {
3670        let temp_render_bundles = render_bundles
3671            .map(|rb| rb.as_core().id)
3672            .collect::<SmallVec<[_; 4]>>();
3673        if let Err(cause) = self
3674            .context
3675            .0
3676            .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3677        {
3678            self.context.handle_error(
3679                &self.error_sink,
3680                cause,
3681                self.pass.label(),
3682                "RenderPass::execute_bundles",
3683            );
3684        }
3685    }
3686
3687    fn end(&mut self) {
3688        if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3689            self.context.handle_error(
3690                &self.error_sink,
3691                cause,
3692                self.pass.label(),
3693                "RenderPass::end",
3694            );
3695        }
3696    }
3697}
3698
3699impl Drop for CoreRenderPass {
3700    fn drop(&mut self) {
3701        dispatch::RenderPassInterface::end(self);
3702    }
3703}
3704
3705impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3706    fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3707        let pipeline = pipeline.as_core();
3708
3709        wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3710    }
3711
3712    fn set_bind_group(
3713        &mut self,
3714        index: u32,
3715        bind_group: Option<&dispatch::DispatchBindGroup>,
3716        offsets: &[crate::DynamicOffset],
3717    ) {
3718        let bg = bind_group.map(|bg| bg.as_core().id);
3719
3720        unsafe {
3721            wgpu_render_bundle_set_bind_group(
3722                &mut self.encoder,
3723                index,
3724                bg,
3725                offsets.as_ptr(),
3726                offsets.len(),
3727            )
3728        }
3729    }
3730
3731    fn set_index_buffer(
3732        &mut self,
3733        buffer: &dispatch::DispatchBuffer,
3734        index_format: crate::IndexFormat,
3735        offset: crate::BufferAddress,
3736        size: Option<crate::BufferSize>,
3737    ) {
3738        let buffer = buffer.as_core();
3739
3740        self.encoder
3741            .set_index_buffer(buffer.id, index_format, offset, size)
3742    }
3743
3744    fn set_vertex_buffer(
3745        &mut self,
3746        slot: u32,
3747        buffer: &dispatch::DispatchBuffer,
3748        offset: crate::BufferAddress,
3749        size: Option<crate::BufferSize>,
3750    ) {
3751        let buffer = buffer.as_core();
3752
3753        wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer.id, offset, size)
3754    }
3755
3756    fn set_immediates(&mut self, offset: u32, data: &[u8]) {
3757        unsafe {
3758            wgpu_render_bundle_set_immediates(
3759                &mut self.encoder,
3760                offset,
3761                data.len().try_into().unwrap(),
3762                data.as_ptr(),
3763            )
3764        }
3765    }
3766
3767    fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3768        wgpu_render_bundle_draw(
3769            &mut self.encoder,
3770            vertices.end - vertices.start,
3771            instances.end - instances.start,
3772            vertices.start,
3773            instances.start,
3774        )
3775    }
3776
3777    fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3778        wgpu_render_bundle_draw_indexed(
3779            &mut self.encoder,
3780            indices.end - indices.start,
3781            instances.end - instances.start,
3782            indices.start,
3783            base_vertex,
3784            instances.start,
3785        )
3786    }
3787
3788    fn draw_indirect(
3789        &mut self,
3790        indirect_buffer: &dispatch::DispatchBuffer,
3791        indirect_offset: crate::BufferAddress,
3792    ) {
3793        let indirect_buffer = indirect_buffer.as_core();
3794
3795        wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3796    }
3797
3798    fn draw_indexed_indirect(
3799        &mut self,
3800        indirect_buffer: &dispatch::DispatchBuffer,
3801        indirect_offset: crate::BufferAddress,
3802    ) {
3803        let indirect_buffer = indirect_buffer.as_core();
3804
3805        wgpu_render_bundle_draw_indexed_indirect(
3806            &mut self.encoder,
3807            indirect_buffer.id,
3808            indirect_offset,
3809        )
3810    }
3811
3812    fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3813    where
3814        Self: Sized,
3815    {
3816        let (id, error) = self.context.0.render_bundle_encoder_finish(
3817            self.encoder,
3818            &desc.map_label(|l| l.map(Borrowed)),
3819            None,
3820        );
3821        if let Some(err) = error {
3822            self.context
3823                .handle_error_fatal(err, "RenderBundleEncoder::finish");
3824        }
3825        CoreRenderBundle {
3826            context: self.context.clone(),
3827            id,
3828        }
3829        .into()
3830    }
3831}
3832
3833impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3834
3835impl Drop for CoreRenderBundle {
3836    fn drop(&mut self) {
3837        self.context.0.render_bundle_drop(self.id)
3838    }
3839}
3840
3841impl dispatch::SurfaceInterface for CoreSurface {
3842    fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3843        let adapter = adapter.as_core();
3844
3845        self.context
3846            .0
3847            .surface_get_capabilities(self.id, adapter.id)
3848            .unwrap_or_default()
3849    }
3850
3851    fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3852        let device = device.as_core();
3853
3854        let error = self.context.0.surface_configure(self.id, device.id, config);
3855        if let Some(e) = error {
3856            self.context
3857                .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3858        } else {
3859            *self.configured_device.lock() = Some(device.id);
3860            *self.error_sink.lock() = Some(device.error_sink.clone());
3861        }
3862    }
3863
3864    fn get_current_texture(
3865        &self,
3866    ) -> (
3867        Option<dispatch::DispatchTexture>,
3868        crate::SurfaceStatus,
3869        dispatch::DispatchSurfaceOutputDetail,
3870    ) {
3871        let error_sink = if let Some(error_sink) = self.error_sink.lock().as_ref() {
3872            error_sink.clone()
3873        } else {
3874            Arc::new(Mutex::new(ErrorSinkRaw::new()))
3875        };
3876
3877        let output_detail = CoreSurfaceOutputDetail {
3878            context: self.context.clone(),
3879            surface_id: self.id,
3880            error_sink: error_sink.clone(),
3881        }
3882        .into();
3883
3884        match self.context.0.surface_get_current_texture(self.id, None) {
3885            Ok(wgc::present::SurfaceOutput {
3886                status,
3887                texture: texture_id,
3888            }) => {
3889                let data = texture_id
3890                    .map(|id| CoreTexture {
3891                        context: self.context.clone(),
3892                        id,
3893                        error_sink,
3894                    })
3895                    .map(Into::into);
3896
3897                (data, status, output_detail)
3898            }
3899            Err(err) => {
3900                let error_sink = self.error_sink.lock();
3901                match error_sink.as_ref() {
3902                    Some(error_sink) => {
3903                        self.context.handle_error_nolabel(
3904                            error_sink,
3905                            err,
3906                            "Surface::get_current_texture_view",
3907                        );
3908                        (None, crate::SurfaceStatus::Unknown, output_detail)
3909                    }
3910                    None => self
3911                        .context
3912                        .handle_error_fatal(err, "Surface::get_current_texture_view"),
3913                }
3914            }
3915        }
3916    }
3917}
3918
3919impl Drop for CoreSurface {
3920    fn drop(&mut self) {
3921        self.context.0.surface_drop(self.id)
3922    }
3923}
3924
3925impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
3926    fn present(&self) {
3927        match self.context.0.surface_present(self.surface_id) {
3928            Ok(_status) => (),
3929            Err(err) => {
3930                self.context
3931                    .handle_error_nolabel(&self.error_sink, err, "Surface::present");
3932            }
3933        }
3934    }
3935
3936    fn texture_discard(&self) {
3937        match self.context.0.surface_texture_discard(self.surface_id) {
3938            Ok(_status) => (),
3939            Err(err) => self
3940                .context
3941                .handle_error_fatal(err, "Surface::discard_texture"),
3942        }
3943    }
3944}
3945impl Drop for CoreSurfaceOutputDetail {
3946    fn drop(&mut self) {
3947        // Discard gets called by the api struct
3948
3949        // no-op
3950    }
3951}
3952
3953impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
3954    fn slice(&self) -> &[u8] {
3955        panic!()
3956    }
3957
3958    #[inline]
3959    fn slice_mut(&mut self) -> &mut [u8] {
3960        self.mapping.slice_mut()
3961    }
3962}
3963impl Drop for CoreQueueWriteBuffer {
3964    fn drop(&mut self) {
3965        // The api struct calls queue.write_staging_buffer
3966
3967        // no-op
3968    }
3969}
3970
3971impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
3972    #[inline]
3973    fn slice(&self) -> &[u8] {
3974        unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
3975    }
3976
3977    #[inline]
3978    fn slice_mut(&mut self) -> &mut [u8] {
3979        unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.size) }
3980    }
3981
3982    #[cfg(webgpu)]
3983    fn as_uint8array(&self) -> &js_sys::Uint8Array {
3984        panic!("Only available on WebGPU")
3985    }
3986}