1use alloc::{
2 borrow::Cow::{self, Borrowed},
3 boxed::Box,
4 format,
5 string::{String, ToString as _},
6 sync::Arc,
7 vec,
8 vec::Vec,
9};
10use core::{
11 error::Error,
12 fmt,
13 future::ready,
14 ops::{Deref, Range},
15 pin::Pin,
16 ptr::NonNull,
17 slice,
18};
19
20use arrayvec::ArrayVec;
21use smallvec::SmallVec;
22use wgc::{
23 command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError,
24 resource::BlasPrepareCompactResult,
25};
26use wgt::{
27 error::{ErrorType, WebGpuError},
28 WasmNotSendSync,
29};
30
31use crate::{
32 api,
33 dispatch::{self, BlasCompactCallback, BufferMappedRangeInterface},
34 BindingResource, Blas, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
35 CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
36 ShaderSource, SurfaceTargetUnsafe, TextureDescriptor, Tlas,
37};
38use crate::{dispatch::DispatchAdapter, util::Mutex};
39
40#[derive(Clone)]
41pub struct ContextWgpuCore(Arc<wgc::global::Global>);
42
43impl Drop for ContextWgpuCore {
44 fn drop(&mut self) {
45 }
47}
48
49impl fmt::Debug for ContextWgpuCore {
50 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
51 f.debug_struct("ContextWgpuCore")
52 .field("type", &"Native")
53 .finish()
54 }
55}
56
57impl ContextWgpuCore {
58 pub unsafe fn from_hal_instance<A: hal::Api>(hal_instance: A::Instance) -> Self {
59 Self(unsafe {
60 Arc::new(wgc::global::Global::from_hal_instance::<A>(
61 "wgpu",
62 hal_instance,
63 ))
64 })
65 }
66
67 pub unsafe fn instance_as_hal<A: hal::Api>(&self) -> Option<&A::Instance> {
71 unsafe { self.0.instance_as_hal::<A>() }
72 }
73
74 pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
75 Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
76 }
77
78 #[cfg(wgpu_core)]
79 pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
80 self.0.enumerate_adapters(backends)
81 }
82
83 pub unsafe fn create_adapter_from_hal<A: hal::Api>(
84 &self,
85 hal_adapter: hal::ExposedAdapter<A>,
86 ) -> wgc::id::AdapterId {
87 unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
88 }
89
90 pub unsafe fn adapter_as_hal<A: hal::Api>(
91 &self,
92 adapter: &CoreAdapter,
93 ) -> Option<impl Deref<Target = A::Adapter> + WasmNotSendSync> {
94 unsafe { self.0.adapter_as_hal::<A>(adapter.id) }
95 }
96
97 pub unsafe fn buffer_as_hal<A: hal::Api>(
98 &self,
99 buffer: &CoreBuffer,
100 ) -> Option<impl Deref<Target = A::Buffer>> {
101 unsafe { self.0.buffer_as_hal::<A>(buffer.id) }
102 }
103
104 pub unsafe fn create_device_from_hal<A: hal::Api>(
105 &self,
106 adapter: &CoreAdapter,
107 hal_device: hal::OpenDevice<A>,
108 desc: &crate::DeviceDescriptor<'_>,
109 ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
110 if !matches!(desc.trace, wgt::Trace::Off) {
111 log::error!(
112 "
113 Feature 'trace' has been removed temporarily; \
114 see https://github.com/gfx-rs/wgpu/issues/5974. \
115 The `trace` parameter will have no effect."
116 );
117 }
118
119 let (device_id, queue_id) = unsafe {
120 self.0.create_device_from_hal(
121 adapter.id,
122 hal_device.into(),
123 &desc.map_label(|l| l.map(Borrowed)),
124 None,
125 None,
126 )
127 }?;
128 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
129 let device = CoreDevice {
130 context: self.clone(),
131 id: device_id,
132 error_sink: error_sink.clone(),
133 features: desc.required_features,
134 };
135 let queue = CoreQueue {
136 context: self.clone(),
137 id: queue_id,
138 error_sink,
139 };
140 Ok((device, queue))
141 }
142
143 pub unsafe fn create_texture_from_hal<A: hal::Api>(
144 &self,
145 hal_texture: A::Texture,
146 device: &CoreDevice,
147 desc: &TextureDescriptor<'_>,
148 ) -> CoreTexture {
149 let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
150 let (id, error) = unsafe {
151 self.0
152 .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
153 };
154 if let Some(cause) = error {
155 self.handle_error(
156 &device.error_sink,
157 cause,
158 desc.label,
159 "Device::create_texture_from_hal",
160 );
161 }
162 CoreTexture {
163 context: self.clone(),
164 id,
165 error_sink: Arc::clone(&device.error_sink),
166 }
167 }
168
169 pub unsafe fn create_buffer_from_hal<A: hal::Api>(
176 &self,
177 hal_buffer: A::Buffer,
178 device: &CoreDevice,
179 desc: &BufferDescriptor<'_>,
180 ) -> CoreBuffer {
181 let (id, error) = unsafe {
182 self.0.create_buffer_from_hal::<A>(
183 hal_buffer,
184 device.id,
185 &desc.map_label(|l| l.map(Borrowed)),
186 None,
187 )
188 };
189 if let Some(cause) = error {
190 self.handle_error(
191 &device.error_sink,
192 cause,
193 desc.label,
194 "Device::create_buffer_from_hal",
195 );
196 }
197 CoreBuffer {
198 context: self.clone(),
199 id,
200 error_sink: Arc::clone(&device.error_sink),
201 }
202 }
203
204 pub unsafe fn device_as_hal<A: hal::Api>(
205 &self,
206 device: &CoreDevice,
207 ) -> Option<impl Deref<Target = A::Device>> {
208 unsafe { self.0.device_as_hal::<A>(device.id) }
209 }
210
211 pub unsafe fn surface_as_hal<A: hal::Api>(
212 &self,
213 surface: &CoreSurface,
214 ) -> Option<impl Deref<Target = A::Surface>> {
215 unsafe { self.0.surface_as_hal::<A>(surface.id) }
216 }
217
218 pub unsafe fn texture_as_hal<A: hal::Api>(
219 &self,
220 texture: &CoreTexture,
221 ) -> Option<impl Deref<Target = A::Texture>> {
222 unsafe { self.0.texture_as_hal::<A>(texture.id) }
223 }
224
225 pub unsafe fn texture_view_as_hal<A: hal::Api>(
226 &self,
227 texture_view: &CoreTextureView,
228 ) -> Option<impl Deref<Target = A::TextureView>> {
229 unsafe { self.0.texture_view_as_hal::<A>(texture_view.id) }
230 }
231
232 pub unsafe fn command_encoder_as_hal_mut<
234 A: hal::Api,
235 F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
236 R,
237 >(
238 &self,
239 command_encoder: &CoreCommandEncoder,
240 hal_command_encoder_callback: F,
241 ) -> R {
242 unsafe {
243 self.0.command_encoder_as_hal_mut::<A, F, R>(
244 command_encoder.id,
245 hal_command_encoder_callback,
246 )
247 }
248 }
249
250 pub unsafe fn blas_as_hal<A: hal::Api>(
251 &self,
252 blas: &CoreBlas,
253 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
254 unsafe { self.0.blas_as_hal::<A>(blas.id) }
255 }
256
257 pub unsafe fn tlas_as_hal<A: hal::Api>(
258 &self,
259 tlas: &CoreTlas,
260 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
261 unsafe { self.0.tlas_as_hal::<A>(tlas.id) }
262 }
263
264 pub fn generate_report(&self) -> wgc::global::GlobalReport {
265 self.0.generate_report()
266 }
267
268 #[cold]
269 #[track_caller]
270 #[inline(never)]
271 fn handle_error_inner(
272 &self,
273 sink_mutex: &Mutex<ErrorSinkRaw>,
274 error_type: ErrorType,
275 source: ContextErrorSource,
276 label: Label<'_>,
277 fn_ident: &'static str,
278 ) {
279 let source: ErrorSource = Box::new(wgc::error::ContextError {
280 fn_ident,
281 source,
282 label: label.unwrap_or_default().to_string(),
283 });
284 let final_error_handling = {
285 let mut sink = sink_mutex.lock();
286 let description = || self.format_error(&*source);
287 let error = match error_type {
288 ErrorType::Internal => {
289 let description = description();
290 crate::Error::Internal {
291 source,
292 description,
293 }
294 }
295 ErrorType::OutOfMemory => crate::Error::OutOfMemory { source },
296 ErrorType::Validation => {
297 let description = description();
298 crate::Error::Validation {
299 source,
300 description,
301 }
302 }
303 ErrorType::DeviceLost => return, };
305 sink.handle_error_or_return_handler(error)
306 };
307
308 if let Some(f) = final_error_handling {
309 f();
313 }
314 }
315
316 #[inline]
317 #[track_caller]
318 fn handle_error(
319 &self,
320 sink_mutex: &Mutex<ErrorSinkRaw>,
321 source: impl WebGpuError + WasmNotSendSync + 'static,
322 label: Label<'_>,
323 fn_ident: &'static str,
324 ) {
325 let error_type = source.webgpu_error_type();
326 self.handle_error_inner(sink_mutex, error_type, Box::new(source), label, fn_ident)
327 }
328
329 #[inline]
330 #[track_caller]
331 fn handle_error_nolabel(
332 &self,
333 sink_mutex: &Mutex<ErrorSinkRaw>,
334 source: impl WebGpuError + WasmNotSendSync + 'static,
335 fn_ident: &'static str,
336 ) {
337 let error_type = source.webgpu_error_type();
338 self.handle_error_inner(sink_mutex, error_type, Box::new(source), None, fn_ident)
339 }
340
341 #[track_caller]
342 #[cold]
343 fn handle_error_fatal(
344 &self,
345 cause: impl Error + WasmNotSendSync + 'static,
346 operation: &'static str,
347 ) -> ! {
348 panic!("Error in {operation}: {f}", f = self.format_error(&cause));
349 }
350
351 #[inline(never)]
352 fn format_error(&self, err: &(dyn Error + 'static)) -> String {
353 let mut output = String::new();
354 let mut level = 1;
355
356 fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
357 let mut print = |e: &(dyn Error + 'static)| {
358 use core::fmt::Write;
359 writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
360
361 if let Some(e) = e.source() {
362 *level += 1;
363 print_tree(output, level, e);
364 *level -= 1;
365 }
366 };
367 if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
368 for e in multi.errors() {
369 print(e);
370 }
371 } else {
372 print(e);
373 }
374 }
375
376 print_tree(&mut output, &mut level, err);
377
378 format!("Validation Error\n\nCaused by:\n{output}")
379 }
380
381 pub unsafe fn queue_as_hal<A: hal::Api>(
382 &self,
383 queue: &CoreQueue,
384 ) -> Option<impl Deref<Target = A::Queue> + WasmNotSendSync> {
385 unsafe { self.0.queue_as_hal::<A>(queue.id) }
386 }
387}
388
389fn map_buffer_copy_view(
390 view: crate::TexelCopyBufferInfo<'_>,
391) -> wgt::TexelCopyBufferInfo<wgc::id::BufferId> {
392 wgt::TexelCopyBufferInfo {
393 buffer: view.buffer.inner.as_core().id,
394 layout: view.layout,
395 }
396}
397
398fn map_texture_copy_view(
399 view: crate::TexelCopyTextureInfo<'_>,
400) -> wgt::TexelCopyTextureInfo<wgc::id::TextureId> {
401 wgt::TexelCopyTextureInfo {
402 texture: view.texture.inner.as_core().id,
403 mip_level: view.mip_level,
404 origin: view.origin,
405 aspect: view.aspect,
406 }
407}
408
409#[cfg_attr(not(webgl), expect(unused))]
410fn map_texture_tagged_copy_view(
411 view: crate::CopyExternalImageDestInfo<&api::Texture>,
412) -> wgt::CopyExternalImageDestInfo<wgc::id::TextureId> {
413 wgt::CopyExternalImageDestInfo {
414 texture: view.texture.inner.as_core().id,
415 mip_level: view.mip_level,
416 origin: view.origin,
417 aspect: view.aspect,
418 color_space: view.color_space,
419 premultiplied_alpha: view.premultiplied_alpha,
420 }
421}
422
423fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
424 match load {
425 LoadOp::Clear(clear_value) => LoadOp::Clear(Some(*clear_value)),
426 LoadOp::Load => LoadOp::Load,
427 }
428}
429
430fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
431 match ops {
432 Some(&Operations { load, store }) => wgc::command::PassChannel {
433 load_op: Some(map_load_op(&load)),
434 store_op: Some(store),
435 read_only: false,
436 },
437 None => wgc::command::PassChannel {
438 load_op: None,
439 store_op: None,
440 read_only: true,
441 },
442 }
443}
444
445#[derive(Debug)]
446pub struct CoreSurface {
447 pub(crate) context: ContextWgpuCore,
448 id: wgc::id::SurfaceId,
449 configured_device: Mutex<Option<wgc::id::DeviceId>>,
452 error_sink: Mutex<Option<ErrorSink>>,
455}
456
457#[derive(Debug)]
458pub struct CoreAdapter {
459 pub(crate) context: ContextWgpuCore,
460 pub(crate) id: wgc::id::AdapterId,
461}
462
463#[derive(Debug)]
464pub struct CoreDevice {
465 pub(crate) context: ContextWgpuCore,
466 id: wgc::id::DeviceId,
467 error_sink: ErrorSink,
468 features: Features,
469}
470
471#[derive(Debug)]
472pub struct CoreBuffer {
473 pub(crate) context: ContextWgpuCore,
474 id: wgc::id::BufferId,
475 error_sink: ErrorSink,
476}
477
478#[derive(Debug)]
479pub struct CoreShaderModule {
480 pub(crate) context: ContextWgpuCore,
481 id: wgc::id::ShaderModuleId,
482 compilation_info: CompilationInfo,
483}
484
485#[derive(Debug)]
486pub struct CoreBindGroupLayout {
487 pub(crate) context: ContextWgpuCore,
488 id: wgc::id::BindGroupLayoutId,
489}
490
491#[derive(Debug)]
492pub struct CoreBindGroup {
493 pub(crate) context: ContextWgpuCore,
494 id: wgc::id::BindGroupId,
495}
496
497#[derive(Debug)]
498pub struct CoreTexture {
499 pub(crate) context: ContextWgpuCore,
500 id: wgc::id::TextureId,
501 error_sink: ErrorSink,
502}
503
504#[derive(Debug)]
505pub struct CoreTextureView {
506 pub(crate) context: ContextWgpuCore,
507 id: wgc::id::TextureViewId,
508}
509
510#[derive(Debug)]
511pub struct CoreExternalTexture {
512 pub(crate) context: ContextWgpuCore,
513 id: wgc::id::ExternalTextureId,
514}
515
516#[derive(Debug)]
517pub struct CoreSampler {
518 pub(crate) context: ContextWgpuCore,
519 id: wgc::id::SamplerId,
520}
521
522#[derive(Debug)]
523pub struct CoreQuerySet {
524 pub(crate) context: ContextWgpuCore,
525 id: wgc::id::QuerySetId,
526}
527
528#[derive(Debug)]
529pub struct CorePipelineLayout {
530 pub(crate) context: ContextWgpuCore,
531 id: wgc::id::PipelineLayoutId,
532}
533
534#[derive(Debug)]
535pub struct CorePipelineCache {
536 pub(crate) context: ContextWgpuCore,
537 id: wgc::id::PipelineCacheId,
538}
539
540#[derive(Debug)]
541pub struct CoreCommandBuffer {
542 pub(crate) context: ContextWgpuCore,
543 id: wgc::id::CommandBufferId,
544}
545
546#[derive(Debug)]
547pub struct CoreRenderBundleEncoder {
548 pub(crate) context: ContextWgpuCore,
549 encoder: wgc::command::RenderBundleEncoder,
550 id: crate::cmp::Identifier,
551}
552
553#[derive(Debug)]
554pub struct CoreRenderBundle {
555 id: wgc::id::RenderBundleId,
556}
557
558#[derive(Debug)]
559pub struct CoreQueue {
560 pub(crate) context: ContextWgpuCore,
561 id: wgc::id::QueueId,
562 error_sink: ErrorSink,
563}
564
565#[derive(Debug)]
566pub struct CoreComputePipeline {
567 pub(crate) context: ContextWgpuCore,
568 id: wgc::id::ComputePipelineId,
569 error_sink: ErrorSink,
570}
571
572#[derive(Debug)]
573pub struct CoreRenderPipeline {
574 pub(crate) context: ContextWgpuCore,
575 id: wgc::id::RenderPipelineId,
576 error_sink: ErrorSink,
577}
578
579#[derive(Debug)]
580pub struct CoreComputePass {
581 pub(crate) context: ContextWgpuCore,
582 pass: wgc::command::ComputePass,
583 error_sink: ErrorSink,
584 id: crate::cmp::Identifier,
585}
586
587#[derive(Debug)]
588pub struct CoreRenderPass {
589 pub(crate) context: ContextWgpuCore,
590 pass: wgc::command::RenderPass,
591 error_sink: ErrorSink,
592 id: crate::cmp::Identifier,
593}
594
595#[derive(Debug)]
596pub struct CoreCommandEncoder {
597 pub(crate) context: ContextWgpuCore,
598 id: wgc::id::CommandEncoderId,
599 error_sink: ErrorSink,
600}
601
602#[derive(Debug)]
603pub struct CoreBlas {
604 pub(crate) context: ContextWgpuCore,
605 id: wgc::id::BlasId,
606 error_sink: ErrorSink,
607}
608
609#[derive(Debug)]
610pub struct CoreTlas {
611 pub(crate) context: ContextWgpuCore,
612 id: wgc::id::TlasId,
613 }
615
616#[derive(Debug)]
617pub struct CoreSurfaceOutputDetail {
618 context: ContextWgpuCore,
619 surface_id: wgc::id::SurfaceId,
620}
621
622type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
623
624struct ErrorScope {
625 error: Option<crate::Error>,
626 filter: crate::ErrorFilter,
627}
628
629struct ErrorSinkRaw {
630 scopes: Vec<ErrorScope>,
631 uncaptured_handler: Option<Arc<dyn crate::UncapturedErrorHandler>>,
632}
633
634impl ErrorSinkRaw {
635 fn new() -> ErrorSinkRaw {
636 ErrorSinkRaw {
637 scopes: Vec::new(),
638 uncaptured_handler: None,
639 }
640 }
641
642 #[track_caller]
652 #[must_use]
653 fn handle_error_or_return_handler(&mut self, err: crate::Error) -> Option<impl FnOnce()> {
654 let filter = match err {
655 crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
656 crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
657 crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
658 };
659 match self
660 .scopes
661 .iter_mut()
662 .rev()
663 .find(|scope| scope.filter == filter)
664 {
665 Some(scope) => {
666 if scope.error.is_none() {
667 scope.error = Some(err);
668 }
669 None
670 }
671 None => {
672 if let Some(custom_handler) = &self.uncaptured_handler {
673 let custom_handler = Arc::clone(custom_handler);
674 Some(move || (custom_handler)(err))
675 } else {
676 default_error_handler(err)
678 }
679 }
680 }
681 }
682}
683
684impl fmt::Debug for ErrorSinkRaw {
685 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
686 write!(f, "ErrorSink")
687 }
688}
689
690#[track_caller]
691fn default_error_handler(err: crate::Error) -> ! {
692 log::error!("Handling wgpu errors as fatal by default");
693 panic!("wgpu error: {err}\n");
694}
695
696impl From<CreateShaderModuleError> for CompilationInfo {
697 fn from(value: CreateShaderModuleError) -> Self {
698 match value {
699 #[cfg(feature = "wgsl")]
700 CreateShaderModuleError::Parsing(v) => v.into(),
701 #[cfg(feature = "glsl")]
702 CreateShaderModuleError::ParsingGlsl(v) => v.into(),
703 #[cfg(feature = "spirv")]
704 CreateShaderModuleError::ParsingSpirV(v) => v.into(),
705 CreateShaderModuleError::Validation(v) => v.into(),
706 CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
709 CompilationInfo {
710 messages: Vec::new(),
711 }
712 }
713 _ => CompilationInfo {
715 messages: vec![CompilationMessage {
716 message: value.to_string(),
717 message_type: CompilationMessageType::Error,
718 location: None,
719 }],
720 },
721 }
722 }
723}
724
725#[derive(Debug)]
726pub struct CoreQueueWriteBuffer {
727 buffer_id: wgc::id::StagingBufferId,
728 mapping: CoreBufferMappedRange,
729}
730
731#[derive(Debug)]
732pub struct CoreBufferMappedRange {
733 ptr: NonNull<u8>,
734 size: usize,
735}
736
737#[cfg(send_sync)]
738unsafe impl Send for CoreBufferMappedRange {}
739#[cfg(send_sync)]
740unsafe impl Sync for CoreBufferMappedRange {}
741
742impl Drop for CoreBufferMappedRange {
743 fn drop(&mut self) {
744 }
747}
748
749crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
750crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
751crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
752crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
753crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
754crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
755crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
756crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
757crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
758crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
759crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
760crate::cmp::impl_eq_ord_hash_proxy!(CoreExternalTexture => .id);
761crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
762crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
763crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
764crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
765crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
766crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
767crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
768crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
769crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
770crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
771crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
772crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
773crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
774crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
775crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
776crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
777crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
778
779impl dispatch::InstanceInterface for ContextWgpuCore {
780 fn new(desc: &wgt::InstanceDescriptor) -> Self
781 where
782 Self: Sized,
783 {
784 Self(Arc::new(wgc::global::Global::new("wgpu", desc)))
785 }
786
787 unsafe fn create_surface(
788 &self,
789 target: crate::api::SurfaceTargetUnsafe,
790 ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
791 let id = match target {
792 SurfaceTargetUnsafe::RawHandle {
793 raw_display_handle,
794 raw_window_handle,
795 } => unsafe {
796 self.0
797 .instance_create_surface(raw_display_handle, raw_window_handle, None)
798 },
799
800 #[cfg(all(unix, not(target_vendor = "apple"), not(target_family = "wasm")))]
801 SurfaceTargetUnsafe::Drm {
802 fd,
803 plane,
804 connector_id,
805 width,
806 height,
807 refresh_rate,
808 } => unsafe {
809 self.0.instance_create_surface_from_drm(
810 fd,
811 plane,
812 connector_id,
813 width,
814 height,
815 refresh_rate,
816 None,
817 )
818 },
819
820 #[cfg(metal)]
821 SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
822 self.0.instance_create_surface_metal(layer, None)
823 },
824
825 #[cfg(dx12)]
826 SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
827 self.0.instance_create_surface_from_visual(visual, None)
828 },
829
830 #[cfg(dx12)]
831 SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
832 self.0
833 .instance_create_surface_from_surface_handle(surface_handle, None)
834 },
835
836 #[cfg(dx12)]
837 SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
838 self.0
839 .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
840 },
841 }?;
842
843 Ok(CoreSurface {
844 context: self.clone(),
845 id,
846 configured_device: Mutex::default(),
847 error_sink: Mutex::default(),
848 }
849 .into())
850 }
851
852 fn request_adapter(
853 &self,
854 options: &crate::api::RequestAdapterOptions<'_, '_>,
855 ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
856 let id = self.0.request_adapter(
857 &wgc::instance::RequestAdapterOptions {
858 power_preference: options.power_preference,
859 force_fallback_adapter: options.force_fallback_adapter,
860 compatible_surface: options
861 .compatible_surface
862 .map(|surface| surface.inner.as_core().id),
863 },
864 wgt::Backends::all(),
865 None,
866 );
867 let adapter = id.map(|id| {
868 let core = CoreAdapter {
869 context: self.clone(),
870 id,
871 };
872 let generic: dispatch::DispatchAdapter = core.into();
873 generic
874 });
875 Box::pin(ready(adapter))
876 }
877
878 fn poll_all_devices(&self, force_wait: bool) -> bool {
879 match self.0.poll_all_devices(force_wait) {
880 Ok(all_queue_empty) => all_queue_empty,
881 Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
882 }
883 }
884
885 #[cfg(feature = "wgsl")]
886 fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
887 use wgc::naga::front::wgsl::ImplementedLanguageExtension;
888 ImplementedLanguageExtension::all().iter().copied().fold(
889 crate::WgslLanguageFeatures::empty(),
890 |acc, wle| {
891 acc | match wle {
892 ImplementedLanguageExtension::ReadOnlyAndReadWriteStorageTextures => {
893 crate::WgslLanguageFeatures::ReadOnlyAndReadWriteStorageTextures
894 }
895 ImplementedLanguageExtension::Packed4x8IntegerDotProduct => {
896 crate::WgslLanguageFeatures::Packed4x8IntegerDotProduct
897 }
898 ImplementedLanguageExtension::PointerCompositeAccess => {
899 crate::WgslLanguageFeatures::PointerCompositeAccess
900 }
901 }
902 },
903 )
904 }
905
906 fn enumerate_adapters(
907 &self,
908 backends: crate::Backends,
909 ) -> Pin<Box<dyn dispatch::EnumerateAdapterFuture>> {
910 let adapters: Vec<DispatchAdapter> = self
911 .enumerate_adapters(backends)
912 .into_iter()
913 .map(|adapter| {
914 let core = crate::backend::wgpu_core::CoreAdapter {
915 context: self.clone(),
916 id: adapter,
917 };
918 core.into()
919 })
920 .collect();
921 Box::pin(ready(adapters))
922 }
923}
924
925impl dispatch::AdapterInterface for CoreAdapter {
926 fn request_device(
927 &self,
928 desc: &crate::DeviceDescriptor<'_>,
929 ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
930 if !matches!(desc.trace, wgt::Trace::Off) {
931 log::error!(
932 "
933 Feature 'trace' has been removed temporarily; \
934 see https://github.com/gfx-rs/wgpu/issues/5974. \
935 The `trace` parameter will have no effect."
936 );
937 }
938
939 let res = self.context.0.adapter_request_device(
940 self.id,
941 &desc.map_label(|l| l.map(Borrowed)),
942 None,
943 None,
944 );
945 let (device_id, queue_id) = match res {
946 Ok(ids) => ids,
947 Err(err) => {
948 return Box::pin(ready(Err(err.into())));
949 }
950 };
951 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
952 let device = CoreDevice {
953 context: self.context.clone(),
954 id: device_id,
955 error_sink: error_sink.clone(),
956 features: desc.required_features,
957 };
958 let queue = CoreQueue {
959 context: self.context.clone(),
960 id: queue_id,
961 error_sink,
962 };
963 Box::pin(ready(Ok((device.into(), queue.into()))))
964 }
965
966 fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
967 let surface = surface.as_core();
968
969 self.context
970 .0
971 .adapter_is_surface_supported(self.id, surface.id)
972 }
973
974 fn features(&self) -> crate::Features {
975 self.context.0.adapter_features(self.id)
976 }
977
978 fn limits(&self) -> crate::Limits {
979 self.context.0.adapter_limits(self.id)
980 }
981
982 fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
983 self.context.0.adapter_downlevel_capabilities(self.id)
984 }
985
986 fn get_info(&self) -> crate::AdapterInfo {
987 self.context.0.adapter_get_info(self.id)
988 }
989
990 fn get_texture_format_features(
991 &self,
992 format: crate::TextureFormat,
993 ) -> crate::TextureFormatFeatures {
994 self.context
995 .0
996 .adapter_get_texture_format_features(self.id, format)
997 }
998
999 fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
1000 self.context.0.adapter_get_presentation_timestamp(self.id)
1001 }
1002}
1003
1004impl Drop for CoreAdapter {
1005 fn drop(&mut self) {
1006 self.context.0.adapter_drop(self.id)
1007 }
1008}
1009
1010impl dispatch::DeviceInterface for CoreDevice {
1011 fn features(&self) -> crate::Features {
1012 self.context.0.device_features(self.id)
1013 }
1014
1015 fn limits(&self) -> crate::Limits {
1016 self.context.0.device_limits(self.id)
1017 }
1018
1019 #[cfg_attr(
1021 not(any(
1022 feature = "spirv",
1023 feature = "glsl",
1024 feature = "wgsl",
1025 feature = "naga-ir"
1026 )),
1027 expect(unused)
1028 )]
1029 fn create_shader_module(
1030 &self,
1031 desc: crate::ShaderModuleDescriptor<'_>,
1032 shader_bound_checks: wgt::ShaderRuntimeChecks,
1033 ) -> dispatch::DispatchShaderModule {
1034 let descriptor = wgc::pipeline::ShaderModuleDescriptor {
1035 label: desc.label.map(Borrowed),
1036 runtime_checks: shader_bound_checks,
1037 };
1038 let source = match desc.source {
1039 #[cfg(feature = "spirv")]
1040 ShaderSource::SpirV(ref spv) => {
1041 let options = naga::front::spv::Options {
1043 adjust_coordinate_space: false, strict_capabilities: true,
1045 block_ctx_dump_prefix: None,
1046 };
1047 wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
1048 }
1049 #[cfg(feature = "glsl")]
1050 ShaderSource::Glsl {
1051 ref shader,
1052 stage,
1053 defines,
1054 } => {
1055 let options = naga::front::glsl::Options {
1056 stage,
1057 defines: defines
1058 .iter()
1059 .map(|&(key, value)| (String::from(key), String::from(value)))
1060 .collect(),
1061 };
1062 wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
1063 }
1064 #[cfg(feature = "wgsl")]
1065 ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
1066 #[cfg(feature = "naga-ir")]
1067 ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1068 ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1069 };
1070 let (id, error) =
1071 self.context
1072 .0
1073 .device_create_shader_module(self.id, &descriptor, source, None);
1074 let compilation_info = match error {
1075 Some(cause) => {
1076 self.context.handle_error(
1077 &self.error_sink,
1078 cause.clone(),
1079 desc.label,
1080 "Device::create_shader_module",
1081 );
1082 CompilationInfo::from(cause)
1083 }
1084 None => CompilationInfo { messages: vec![] },
1085 };
1086
1087 CoreShaderModule {
1088 context: self.context.clone(),
1089 id,
1090 compilation_info,
1091 }
1092 .into()
1093 }
1094
1095 unsafe fn create_shader_module_passthrough(
1096 &self,
1097 desc: &crate::ShaderModuleDescriptorPassthrough<'_>,
1098 ) -> dispatch::DispatchShaderModule {
1099 let desc = desc.map_label(|l| l.map(Cow::from));
1100 let (id, error) = unsafe {
1101 self.context
1102 .0
1103 .device_create_shader_module_passthrough(self.id, &desc, None)
1104 };
1105
1106 let compilation_info = match error {
1107 Some(cause) => {
1108 self.context.handle_error(
1109 &self.error_sink,
1110 cause.clone(),
1111 desc.label.as_deref(),
1112 "Device::create_shader_module_passthrough",
1113 );
1114 CompilationInfo::from(cause)
1115 }
1116 None => CompilationInfo { messages: vec![] },
1117 };
1118
1119 CoreShaderModule {
1120 context: self.context.clone(),
1121 id,
1122 compilation_info,
1123 }
1124 .into()
1125 }
1126
1127 fn create_bind_group_layout(
1128 &self,
1129 desc: &crate::BindGroupLayoutDescriptor<'_>,
1130 ) -> dispatch::DispatchBindGroupLayout {
1131 let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1132 label: desc.label.map(Borrowed),
1133 entries: Borrowed(desc.entries),
1134 };
1135 let (id, error) =
1136 self.context
1137 .0
1138 .device_create_bind_group_layout(self.id, &descriptor, None);
1139 if let Some(cause) = error {
1140 self.context.handle_error(
1141 &self.error_sink,
1142 cause,
1143 desc.label,
1144 "Device::create_bind_group_layout",
1145 );
1146 }
1147 CoreBindGroupLayout {
1148 context: self.context.clone(),
1149 id,
1150 }
1151 .into()
1152 }
1153
1154 fn create_bind_group(
1155 &self,
1156 desc: &crate::BindGroupDescriptor<'_>,
1157 ) -> dispatch::DispatchBindGroup {
1158 use wgc::binding_model as bm;
1159
1160 let mut arrayed_texture_views = Vec::new();
1161 let mut arrayed_samplers = Vec::new();
1162 if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1163 for entry in desc.entries.iter() {
1165 if let BindingResource::TextureViewArray(array) = entry.resource {
1166 arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1167 }
1168 if let BindingResource::SamplerArray(array) = entry.resource {
1169 arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1170 }
1171 }
1172 }
1173 let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1174 let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1175
1176 let mut arrayed_buffer_bindings = Vec::new();
1177 if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1178 for entry in desc.entries.iter() {
1180 if let BindingResource::BufferArray(array) = entry.resource {
1181 arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1182 buffer: binding.buffer.inner.as_core().id,
1183 offset: binding.offset,
1184 size: binding.size,
1185 }));
1186 }
1187 }
1188 }
1189 let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1190
1191 let entries = desc
1192 .entries
1193 .iter()
1194 .map(|entry| bm::BindGroupEntry {
1195 binding: entry.binding,
1196 resource: match entry.resource {
1197 BindingResource::Buffer(BufferBinding {
1198 buffer,
1199 offset,
1200 size,
1201 }) => bm::BindingResource::Buffer(bm::BufferBinding {
1202 buffer: buffer.inner.as_core().id,
1203 offset,
1204 size,
1205 }),
1206 BindingResource::BufferArray(array) => {
1207 let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1208 remaining_arrayed_buffer_bindings =
1209 &remaining_arrayed_buffer_bindings[array.len()..];
1210 bm::BindingResource::BufferArray(Borrowed(slice))
1211 }
1212 BindingResource::Sampler(sampler) => {
1213 bm::BindingResource::Sampler(sampler.inner.as_core().id)
1214 }
1215 BindingResource::SamplerArray(array) => {
1216 let slice = &remaining_arrayed_samplers[..array.len()];
1217 remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1218 bm::BindingResource::SamplerArray(Borrowed(slice))
1219 }
1220 BindingResource::TextureView(texture_view) => {
1221 bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1222 }
1223 BindingResource::TextureViewArray(array) => {
1224 let slice = &remaining_arrayed_texture_views[..array.len()];
1225 remaining_arrayed_texture_views =
1226 &remaining_arrayed_texture_views[array.len()..];
1227 bm::BindingResource::TextureViewArray(Borrowed(slice))
1228 }
1229 BindingResource::AccelerationStructure(acceleration_structure) => {
1230 bm::BindingResource::AccelerationStructure(
1231 acceleration_structure.inner.as_core().id,
1232 )
1233 }
1234 BindingResource::ExternalTexture(external_texture) => {
1235 bm::BindingResource::ExternalTexture(external_texture.inner.as_core().id)
1236 }
1237 },
1238 })
1239 .collect::<Vec<_>>();
1240 let descriptor = bm::BindGroupDescriptor {
1241 label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1242 layout: desc.layout.inner.as_core().id,
1243 entries: Borrowed(&entries),
1244 };
1245
1246 let (id, error) = self
1247 .context
1248 .0
1249 .device_create_bind_group(self.id, &descriptor, None);
1250 if let Some(cause) = error {
1251 self.context.handle_error(
1252 &self.error_sink,
1253 cause,
1254 desc.label,
1255 "Device::create_bind_group",
1256 );
1257 }
1258 CoreBindGroup {
1259 context: self.context.clone(),
1260 id,
1261 }
1262 .into()
1263 }
1264
1265 fn create_pipeline_layout(
1266 &self,
1267 desc: &crate::PipelineLayoutDescriptor<'_>,
1268 ) -> dispatch::DispatchPipelineLayout {
1269 assert!(
1272 desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1273 "Bind group layout count {} exceeds device bind group limit {}",
1274 desc.bind_group_layouts.len(),
1275 wgc::MAX_BIND_GROUPS
1276 );
1277
1278 let temp_layouts = desc
1279 .bind_group_layouts
1280 .iter()
1281 .map(|bgl| bgl.inner.as_core().id)
1282 .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1283 let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1284 label: desc.label.map(Borrowed),
1285 bind_group_layouts: Borrowed(&temp_layouts),
1286 push_constant_ranges: Borrowed(desc.push_constant_ranges),
1287 };
1288
1289 let (id, error) = self
1290 .context
1291 .0
1292 .device_create_pipeline_layout(self.id, &descriptor, None);
1293 if let Some(cause) = error {
1294 self.context.handle_error(
1295 &self.error_sink,
1296 cause,
1297 desc.label,
1298 "Device::create_pipeline_layout",
1299 );
1300 }
1301 CorePipelineLayout {
1302 context: self.context.clone(),
1303 id,
1304 }
1305 .into()
1306 }
1307
1308 fn create_render_pipeline(
1309 &self,
1310 desc: &crate::RenderPipelineDescriptor<'_>,
1311 ) -> dispatch::DispatchRenderPipeline {
1312 use wgc::pipeline as pipe;
1313
1314 let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1315 .vertex
1316 .buffers
1317 .iter()
1318 .map(|vbuf| pipe::VertexBufferLayout {
1319 array_stride: vbuf.array_stride,
1320 step_mode: vbuf.step_mode,
1321 attributes: Borrowed(vbuf.attributes),
1322 })
1323 .collect();
1324
1325 let vert_constants = desc
1326 .vertex
1327 .compilation_options
1328 .constants
1329 .iter()
1330 .map(|&(key, value)| (String::from(key), value))
1331 .collect();
1332
1333 let descriptor = pipe::RenderPipelineDescriptor {
1334 label: desc.label.map(Borrowed),
1335 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1336 vertex: pipe::VertexState {
1337 stage: pipe::ProgrammableStageDescriptor {
1338 module: desc.vertex.module.inner.as_core().id,
1339 entry_point: desc.vertex.entry_point.map(Borrowed),
1340 constants: vert_constants,
1341 zero_initialize_workgroup_memory: desc
1342 .vertex
1343 .compilation_options
1344 .zero_initialize_workgroup_memory,
1345 },
1346 buffers: Borrowed(&vertex_buffers),
1347 },
1348 primitive: desc.primitive,
1349 depth_stencil: desc.depth_stencil.clone(),
1350 multisample: desc.multisample,
1351 fragment: desc.fragment.as_ref().map(|frag| {
1352 let frag_constants = frag
1353 .compilation_options
1354 .constants
1355 .iter()
1356 .map(|&(key, value)| (String::from(key), value))
1357 .collect();
1358 pipe::FragmentState {
1359 stage: pipe::ProgrammableStageDescriptor {
1360 module: frag.module.inner.as_core().id,
1361 entry_point: frag.entry_point.map(Borrowed),
1362 constants: frag_constants,
1363 zero_initialize_workgroup_memory: frag
1364 .compilation_options
1365 .zero_initialize_workgroup_memory,
1366 },
1367 targets: Borrowed(frag.targets),
1368 }
1369 }),
1370 multiview_mask: desc.multiview_mask,
1371 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1372 };
1373
1374 let (id, error) = self
1375 .context
1376 .0
1377 .device_create_render_pipeline(self.id, &descriptor, None);
1378 if let Some(cause) = error {
1379 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1380 log::error!("Shader translation error for stage {stage:?}: {error}");
1381 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1382 }
1383 self.context.handle_error(
1384 &self.error_sink,
1385 cause,
1386 desc.label,
1387 "Device::create_render_pipeline",
1388 );
1389 }
1390 CoreRenderPipeline {
1391 context: self.context.clone(),
1392 id,
1393 error_sink: Arc::clone(&self.error_sink),
1394 }
1395 .into()
1396 }
1397
1398 fn create_mesh_pipeline(
1399 &self,
1400 desc: &crate::MeshPipelineDescriptor<'_>,
1401 ) -> dispatch::DispatchRenderPipeline {
1402 use wgc::pipeline as pipe;
1403
1404 let mesh_constants = desc
1405 .mesh
1406 .compilation_options
1407 .constants
1408 .iter()
1409 .map(|&(key, value)| (String::from(key), value))
1410 .collect();
1411 let descriptor = pipe::MeshPipelineDescriptor {
1412 label: desc.label.map(Borrowed),
1413 task: desc.task.as_ref().map(|task| {
1414 let task_constants = task
1415 .compilation_options
1416 .constants
1417 .iter()
1418 .map(|&(key, value)| (String::from(key), value))
1419 .collect();
1420 pipe::TaskState {
1421 stage: pipe::ProgrammableStageDescriptor {
1422 module: task.module.inner.as_core().id,
1423 entry_point: task.entry_point.map(Borrowed),
1424 constants: task_constants,
1425 zero_initialize_workgroup_memory: desc
1426 .mesh
1427 .compilation_options
1428 .zero_initialize_workgroup_memory,
1429 },
1430 }
1431 }),
1432 mesh: pipe::MeshState {
1433 stage: pipe::ProgrammableStageDescriptor {
1434 module: desc.mesh.module.inner.as_core().id,
1435 entry_point: desc.mesh.entry_point.map(Borrowed),
1436 constants: mesh_constants,
1437 zero_initialize_workgroup_memory: desc
1438 .mesh
1439 .compilation_options
1440 .zero_initialize_workgroup_memory,
1441 },
1442 },
1443 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1444 primitive: desc.primitive,
1445 depth_stencil: desc.depth_stencil.clone(),
1446 multisample: desc.multisample,
1447 fragment: desc.fragment.as_ref().map(|frag| {
1448 let frag_constants = frag
1449 .compilation_options
1450 .constants
1451 .iter()
1452 .map(|&(key, value)| (String::from(key), value))
1453 .collect();
1454 pipe::FragmentState {
1455 stage: pipe::ProgrammableStageDescriptor {
1456 module: frag.module.inner.as_core().id,
1457 entry_point: frag.entry_point.map(Borrowed),
1458 constants: frag_constants,
1459 zero_initialize_workgroup_memory: frag
1460 .compilation_options
1461 .zero_initialize_workgroup_memory,
1462 },
1463 targets: Borrowed(frag.targets),
1464 }
1465 }),
1466 multiview: desc.multiview,
1467 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1468 };
1469
1470 let (id, error) = self
1471 .context
1472 .0
1473 .device_create_mesh_pipeline(self.id, &descriptor, None);
1474 if let Some(cause) = error {
1475 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1476 log::error!("Shader translation error for stage {stage:?}: {error}");
1477 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1478 }
1479 self.context.handle_error(
1480 &self.error_sink,
1481 cause,
1482 desc.label,
1483 "Device::create_render_pipeline",
1484 );
1485 }
1486 CoreRenderPipeline {
1487 context: self.context.clone(),
1488 id,
1489 error_sink: Arc::clone(&self.error_sink),
1490 }
1491 .into()
1492 }
1493
1494 fn create_compute_pipeline(
1495 &self,
1496 desc: &crate::ComputePipelineDescriptor<'_>,
1497 ) -> dispatch::DispatchComputePipeline {
1498 use wgc::pipeline as pipe;
1499
1500 let constants = desc
1501 .compilation_options
1502 .constants
1503 .iter()
1504 .map(|&(key, value)| (String::from(key), value))
1505 .collect();
1506
1507 let descriptor = pipe::ComputePipelineDescriptor {
1508 label: desc.label.map(Borrowed),
1509 layout: desc.layout.map(|pll| pll.inner.as_core().id),
1510 stage: pipe::ProgrammableStageDescriptor {
1511 module: desc.module.inner.as_core().id,
1512 entry_point: desc.entry_point.map(Borrowed),
1513 constants,
1514 zero_initialize_workgroup_memory: desc
1515 .compilation_options
1516 .zero_initialize_workgroup_memory,
1517 },
1518 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1519 };
1520
1521 let (id, error) = self
1522 .context
1523 .0
1524 .device_create_compute_pipeline(self.id, &descriptor, None);
1525 if let Some(cause) = error {
1526 if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1527 log::error!(
1528 "Shader translation error for stage {:?}: {}",
1529 wgt::ShaderStages::COMPUTE,
1530 error
1531 );
1532 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1533 }
1534 self.context.handle_error(
1535 &self.error_sink,
1536 cause,
1537 desc.label,
1538 "Device::create_compute_pipeline",
1539 );
1540 }
1541 CoreComputePipeline {
1542 context: self.context.clone(),
1543 id,
1544 error_sink: Arc::clone(&self.error_sink),
1545 }
1546 .into()
1547 }
1548
1549 unsafe fn create_pipeline_cache(
1550 &self,
1551 desc: &crate::PipelineCacheDescriptor<'_>,
1552 ) -> dispatch::DispatchPipelineCache {
1553 use wgc::pipeline as pipe;
1554
1555 let descriptor = pipe::PipelineCacheDescriptor {
1556 label: desc.label.map(Borrowed),
1557 data: desc.data.map(Borrowed),
1558 fallback: desc.fallback,
1559 };
1560 let (id, error) = unsafe {
1561 self.context
1562 .0
1563 .device_create_pipeline_cache(self.id, &descriptor, None)
1564 };
1565 if let Some(cause) = error {
1566 self.context.handle_error(
1567 &self.error_sink,
1568 cause,
1569 desc.label,
1570 "Device::device_create_pipeline_cache_init",
1571 );
1572 }
1573 CorePipelineCache {
1574 context: self.context.clone(),
1575 id,
1576 }
1577 .into()
1578 }
1579
1580 fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1581 let (id, error) = self.context.0.device_create_buffer(
1582 self.id,
1583 &desc.map_label(|l| l.map(Borrowed)),
1584 None,
1585 );
1586 if let Some(cause) = error {
1587 self.context
1588 .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1589 }
1590
1591 CoreBuffer {
1592 context: self.context.clone(),
1593 id,
1594 error_sink: Arc::clone(&self.error_sink),
1595 }
1596 .into()
1597 }
1598
1599 fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1600 let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1601 let (id, error) = self
1602 .context
1603 .0
1604 .device_create_texture(self.id, &wgt_desc, None);
1605 if let Some(cause) = error {
1606 self.context.handle_error(
1607 &self.error_sink,
1608 cause,
1609 desc.label,
1610 "Device::create_texture",
1611 );
1612 }
1613
1614 CoreTexture {
1615 context: self.context.clone(),
1616 id,
1617 error_sink: Arc::clone(&self.error_sink),
1618 }
1619 .into()
1620 }
1621
1622 fn create_external_texture(
1623 &self,
1624 desc: &crate::ExternalTextureDescriptor<'_>,
1625 planes: &[&crate::TextureView],
1626 ) -> dispatch::DispatchExternalTexture {
1627 let wgt_desc = desc.map_label(|l| l.map(Borrowed));
1628 let planes = planes
1629 .iter()
1630 .map(|plane| plane.inner.as_core().id)
1631 .collect::<Vec<_>>();
1632 let (id, error) = self
1633 .context
1634 .0
1635 .device_create_external_texture(self.id, &wgt_desc, &planes, None);
1636 if let Some(cause) = error {
1637 self.context.handle_error(
1638 &self.error_sink,
1639 cause,
1640 desc.label,
1641 "Device::create_external_texture",
1642 );
1643 }
1644
1645 CoreExternalTexture {
1646 context: self.context.clone(),
1647 id,
1648 }
1649 .into()
1650 }
1651
1652 fn create_blas(
1653 &self,
1654 desc: &crate::CreateBlasDescriptor<'_>,
1655 sizes: crate::BlasGeometrySizeDescriptors,
1656 ) -> (Option<u64>, dispatch::DispatchBlas) {
1657 let global = &self.context.0;
1658 let (id, handle, error) =
1659 global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1660 if let Some(cause) = error {
1661 self.context
1662 .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1663 }
1664 (
1665 handle,
1666 CoreBlas {
1667 context: self.context.clone(),
1668 id,
1669 error_sink: Arc::clone(&self.error_sink),
1670 }
1671 .into(),
1672 )
1673 }
1674
1675 fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1676 let global = &self.context.0;
1677 let (id, error) =
1678 global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1679 if let Some(cause) = error {
1680 self.context
1681 .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1682 }
1683 CoreTlas {
1684 context: self.context.clone(),
1685 id,
1686 }
1688 .into()
1689 }
1690
1691 fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1692 let descriptor = wgc::resource::SamplerDescriptor {
1693 label: desc.label.map(Borrowed),
1694 address_modes: [
1695 desc.address_mode_u,
1696 desc.address_mode_v,
1697 desc.address_mode_w,
1698 ],
1699 mag_filter: desc.mag_filter,
1700 min_filter: desc.min_filter,
1701 mipmap_filter: desc.mipmap_filter,
1702 lod_min_clamp: desc.lod_min_clamp,
1703 lod_max_clamp: desc.lod_max_clamp,
1704 compare: desc.compare,
1705 anisotropy_clamp: desc.anisotropy_clamp,
1706 border_color: desc.border_color,
1707 };
1708
1709 let (id, error) = self
1710 .context
1711 .0
1712 .device_create_sampler(self.id, &descriptor, None);
1713 if let Some(cause) = error {
1714 self.context.handle_error(
1715 &self.error_sink,
1716 cause,
1717 desc.label,
1718 "Device::create_sampler",
1719 );
1720 }
1721 CoreSampler {
1722 context: self.context.clone(),
1723 id,
1724 }
1725 .into()
1726 }
1727
1728 fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1729 let (id, error) = self.context.0.device_create_query_set(
1730 self.id,
1731 &desc.map_label(|l| l.map(Borrowed)),
1732 None,
1733 );
1734 if let Some(cause) = error {
1735 self.context
1736 .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1737 }
1738 CoreQuerySet {
1739 context: self.context.clone(),
1740 id,
1741 }
1742 .into()
1743 }
1744
1745 fn create_command_encoder(
1746 &self,
1747 desc: &crate::CommandEncoderDescriptor<'_>,
1748 ) -> dispatch::DispatchCommandEncoder {
1749 let (id, error) = self.context.0.device_create_command_encoder(
1750 self.id,
1751 &desc.map_label(|l| l.map(Borrowed)),
1752 None,
1753 );
1754 if let Some(cause) = error {
1755 self.context.handle_error(
1756 &self.error_sink,
1757 cause,
1758 desc.label,
1759 "Device::create_command_encoder",
1760 );
1761 }
1762
1763 CoreCommandEncoder {
1764 context: self.context.clone(),
1765 id,
1766 error_sink: Arc::clone(&self.error_sink),
1767 }
1768 .into()
1769 }
1770
1771 fn create_render_bundle_encoder(
1772 &self,
1773 desc: &crate::RenderBundleEncoderDescriptor<'_>,
1774 ) -> dispatch::DispatchRenderBundleEncoder {
1775 let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1776 label: desc.label.map(Borrowed),
1777 color_formats: Borrowed(desc.color_formats),
1778 depth_stencil: desc.depth_stencil,
1779 sample_count: desc.sample_count,
1780 multiview: desc.multiview,
1781 };
1782 let encoder = match wgc::command::RenderBundleEncoder::new(&descriptor, self.id) {
1783 Ok(encoder) => encoder,
1784 Err(e) => panic!("Error in Device::create_render_bundle_encoder: {e}"),
1785 };
1786
1787 CoreRenderBundleEncoder {
1788 context: self.context.clone(),
1789 encoder,
1790 id: crate::cmp::Identifier::create(),
1791 }
1792 .into()
1793 }
1794
1795 fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1796 self.context
1797 .0
1798 .device_set_device_lost_closure(self.id, device_lost_callback);
1799 }
1800
1801 fn on_uncaptured_error(&self, handler: Arc<dyn crate::UncapturedErrorHandler>) {
1802 let mut error_sink = self.error_sink.lock();
1803 error_sink.uncaptured_handler = Some(handler);
1804 }
1805
1806 fn push_error_scope(&self, filter: crate::ErrorFilter) {
1807 let mut error_sink = self.error_sink.lock();
1808 error_sink.scopes.push(ErrorScope {
1809 error: None,
1810 filter,
1811 });
1812 }
1813
1814 fn pop_error_scope(&self) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1815 let mut error_sink = self.error_sink.lock();
1816 let scope = error_sink.scopes.pop().unwrap();
1817 Box::pin(ready(scope.error))
1818 }
1819
1820 unsafe fn start_graphics_debugger_capture(&self) {
1821 unsafe {
1822 self.context
1823 .0
1824 .device_start_graphics_debugger_capture(self.id)
1825 };
1826 }
1827
1828 unsafe fn stop_graphics_debugger_capture(&self) {
1829 unsafe {
1830 self.context
1831 .0
1832 .device_stop_graphics_debugger_capture(self.id)
1833 };
1834 }
1835
1836 fn poll(&self, poll_type: wgt::PollType<u64>) -> Result<crate::PollStatus, crate::PollError> {
1837 match self.context.0.device_poll(self.id, poll_type) {
1838 Ok(status) => Ok(status),
1839 Err(err) => {
1840 if let Some(poll_error) = err.to_poll_error() {
1841 return Err(poll_error);
1842 }
1843
1844 self.context.handle_error_fatal(err, "Device::poll")
1845 }
1846 }
1847 }
1848
1849 fn get_internal_counters(&self) -> crate::InternalCounters {
1850 self.context.0.device_get_internal_counters(self.id)
1851 }
1852
1853 fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1854 self.context.0.device_generate_allocator_report(self.id)
1855 }
1856
1857 fn destroy(&self) {
1858 self.context.0.device_destroy(self.id);
1859 }
1860}
1861
1862impl Drop for CoreDevice {
1863 fn drop(&mut self) {
1864 self.context.0.device_drop(self.id)
1865 }
1866}
1867
1868impl dispatch::QueueInterface for CoreQueue {
1869 fn write_buffer(
1870 &self,
1871 buffer: &dispatch::DispatchBuffer,
1872 offset: crate::BufferAddress,
1873 data: &[u8],
1874 ) {
1875 let buffer = buffer.as_core();
1876
1877 match self
1878 .context
1879 .0
1880 .queue_write_buffer(self.id, buffer.id, offset, data)
1881 {
1882 Ok(()) => (),
1883 Err(err) => {
1884 self.context
1885 .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1886 }
1887 }
1888 }
1889
1890 fn create_staging_buffer(
1891 &self,
1892 size: crate::BufferSize,
1893 ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1894 match self
1895 .context
1896 .0
1897 .queue_create_staging_buffer(self.id, size, None)
1898 {
1899 Ok((buffer_id, ptr)) => Some(
1900 CoreQueueWriteBuffer {
1901 buffer_id,
1902 mapping: CoreBufferMappedRange {
1903 ptr,
1904 size: size.get() as usize,
1905 },
1906 }
1907 .into(),
1908 ),
1909 Err(err) => {
1910 self.context.handle_error_nolabel(
1911 &self.error_sink,
1912 err,
1913 "Queue::write_buffer_with",
1914 );
1915 None
1916 }
1917 }
1918 }
1919
1920 fn validate_write_buffer(
1921 &self,
1922 buffer: &dispatch::DispatchBuffer,
1923 offset: wgt::BufferAddress,
1924 size: wgt::BufferSize,
1925 ) -> Option<()> {
1926 let buffer = buffer.as_core();
1927
1928 match self
1929 .context
1930 .0
1931 .queue_validate_write_buffer(self.id, buffer.id, offset, size)
1932 {
1933 Ok(()) => Some(()),
1934 Err(err) => {
1935 self.context.handle_error_nolabel(
1936 &self.error_sink,
1937 err,
1938 "Queue::write_buffer_with",
1939 );
1940 None
1941 }
1942 }
1943 }
1944
1945 fn write_staging_buffer(
1946 &self,
1947 buffer: &dispatch::DispatchBuffer,
1948 offset: crate::BufferAddress,
1949 staging_buffer: &dispatch::DispatchQueueWriteBuffer,
1950 ) {
1951 let buffer = buffer.as_core();
1952 let staging_buffer = staging_buffer.as_core();
1953
1954 match self.context.0.queue_write_staging_buffer(
1955 self.id,
1956 buffer.id,
1957 offset,
1958 staging_buffer.buffer_id,
1959 ) {
1960 Ok(()) => (),
1961 Err(err) => {
1962 self.context.handle_error_nolabel(
1963 &self.error_sink,
1964 err,
1965 "Queue::write_buffer_with",
1966 );
1967 }
1968 }
1969 }
1970
1971 fn write_texture(
1972 &self,
1973 texture: crate::TexelCopyTextureInfo<'_>,
1974 data: &[u8],
1975 data_layout: crate::TexelCopyBufferLayout,
1976 size: crate::Extent3d,
1977 ) {
1978 match self.context.0.queue_write_texture(
1979 self.id,
1980 &map_texture_copy_view(texture),
1981 data,
1982 &data_layout,
1983 &size,
1984 ) {
1985 Ok(()) => (),
1986 Err(err) => {
1987 self.context
1988 .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
1989 }
1990 }
1991 }
1992
1993 #[cfg(web)]
1996 #[cfg_attr(not(webgl), expect(unused_variables))]
1997 fn copy_external_image_to_texture(
1998 &self,
1999 source: &crate::CopyExternalImageSourceInfo,
2000 dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
2001 size: crate::Extent3d,
2002 ) {
2003 #[cfg(webgl)]
2004 match self.context.0.queue_copy_external_image_to_texture(
2005 self.id,
2006 source,
2007 map_texture_tagged_copy_view(dest),
2008 size,
2009 ) {
2010 Ok(()) => (),
2011 Err(err) => self.context.handle_error_nolabel(
2012 &self.error_sink,
2013 err,
2014 "Queue::copy_external_image_to_texture",
2015 ),
2016 }
2017 }
2018
2019 fn submit(
2020 &self,
2021 command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
2022 ) -> u64 {
2023 let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
2024 let command_buffer_ids = temp_command_buffers
2025 .iter()
2026 .map(|cmdbuf| cmdbuf.as_core().id)
2027 .collect::<SmallVec<[_; 4]>>();
2028
2029 let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
2030 Ok(index) => index,
2031 Err((index, err)) => {
2032 self.context
2033 .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
2034 index
2035 }
2036 };
2037
2038 drop(temp_command_buffers);
2039
2040 index
2041 }
2042
2043 fn get_timestamp_period(&self) -> f32 {
2044 self.context.0.queue_get_timestamp_period(self.id)
2045 }
2046
2047 fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
2048 self.context
2049 .0
2050 .queue_on_submitted_work_done(self.id, callback);
2051 }
2052
2053 fn compact_blas(&self, blas: &dispatch::DispatchBlas) -> (Option<u64>, dispatch::DispatchBlas) {
2054 let (id, handle, error) =
2055 self.context
2056 .0
2057 .queue_compact_blas(self.id, blas.as_core().id, None);
2058
2059 if let Some(cause) = error {
2060 self.context
2061 .handle_error_nolabel(&self.error_sink, cause, "Queue::compact_blas");
2062 }
2063 (
2064 handle,
2065 CoreBlas {
2066 context: self.context.clone(),
2067 id,
2068 error_sink: Arc::clone(&self.error_sink),
2069 }
2070 .into(),
2071 )
2072 }
2073}
2074
2075impl Drop for CoreQueue {
2076 fn drop(&mut self) {
2077 self.context.0.queue_drop(self.id)
2078 }
2079}
2080
2081impl dispatch::ShaderModuleInterface for CoreShaderModule {
2082 fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
2083 Box::pin(ready(self.compilation_info.clone()))
2084 }
2085}
2086
2087impl Drop for CoreShaderModule {
2088 fn drop(&mut self) {
2089 self.context.0.shader_module_drop(self.id)
2090 }
2091}
2092
2093impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
2094
2095impl Drop for CoreBindGroupLayout {
2096 fn drop(&mut self) {
2097 self.context.0.bind_group_layout_drop(self.id)
2098 }
2099}
2100
2101impl dispatch::BindGroupInterface for CoreBindGroup {}
2102
2103impl Drop for CoreBindGroup {
2104 fn drop(&mut self) {
2105 self.context.0.bind_group_drop(self.id)
2106 }
2107}
2108
2109impl dispatch::TextureViewInterface for CoreTextureView {}
2110
2111impl Drop for CoreTextureView {
2112 fn drop(&mut self) {
2113 let _ = self.context.0.texture_view_drop(self.id);
2115 }
2116}
2117
2118impl dispatch::ExternalTextureInterface for CoreExternalTexture {
2119 fn destroy(&self) {
2120 self.context.0.external_texture_destroy(self.id);
2121 }
2122}
2123
2124impl Drop for CoreExternalTexture {
2125 fn drop(&mut self) {
2126 self.context.0.external_texture_drop(self.id);
2127 }
2128}
2129
2130impl dispatch::SamplerInterface for CoreSampler {}
2131
2132impl Drop for CoreSampler {
2133 fn drop(&mut self) {
2134 self.context.0.sampler_drop(self.id)
2135 }
2136}
2137
2138impl dispatch::BufferInterface for CoreBuffer {
2139 fn map_async(
2140 &self,
2141 mode: crate::MapMode,
2142 range: Range<crate::BufferAddress>,
2143 callback: dispatch::BufferMapCallback,
2144 ) {
2145 let operation = wgc::resource::BufferMapOperation {
2146 host: match mode {
2147 MapMode::Read => wgc::device::HostMap::Read,
2148 MapMode::Write => wgc::device::HostMap::Write,
2149 },
2150 callback: Some(Box::new(|status| {
2151 let res = status.map_err(|_| crate::BufferAsyncError);
2152 callback(res);
2153 })),
2154 };
2155
2156 match self.context.0.buffer_map_async(
2157 self.id,
2158 range.start,
2159 Some(range.end - range.start),
2160 operation,
2161 ) {
2162 Ok(_) => (),
2163 Err(cause) => {
2164 self.context
2165 .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
2166 }
2167 }
2168 }
2169
2170 fn get_mapped_range(
2171 &self,
2172 sub_range: Range<crate::BufferAddress>,
2173 ) -> dispatch::DispatchBufferMappedRange {
2174 let size = sub_range.end - sub_range.start;
2175 match self
2176 .context
2177 .0
2178 .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
2179 {
2180 Ok((ptr, size)) => CoreBufferMappedRange {
2181 ptr,
2182 size: size as usize,
2183 }
2184 .into(),
2185 Err(err) => self
2186 .context
2187 .handle_error_fatal(err, "Buffer::get_mapped_range"),
2188 }
2189 }
2190
2191 fn unmap(&self) {
2192 match self.context.0.buffer_unmap(self.id) {
2193 Ok(()) => (),
2194 Err(cause) => {
2195 self.context
2196 .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
2197 }
2198 }
2199 }
2200
2201 fn destroy(&self) {
2202 self.context.0.buffer_destroy(self.id);
2203 }
2204}
2205
2206impl Drop for CoreBuffer {
2207 fn drop(&mut self) {
2208 self.context.0.buffer_drop(self.id)
2209 }
2210}
2211
2212impl dispatch::TextureInterface for CoreTexture {
2213 fn create_view(
2214 &self,
2215 desc: &crate::TextureViewDescriptor<'_>,
2216 ) -> dispatch::DispatchTextureView {
2217 let descriptor = wgc::resource::TextureViewDescriptor {
2218 label: desc.label.map(Borrowed),
2219 format: desc.format,
2220 dimension: desc.dimension,
2221 usage: desc.usage,
2222 range: wgt::ImageSubresourceRange {
2223 aspect: desc.aspect,
2224 base_mip_level: desc.base_mip_level,
2225 mip_level_count: desc.mip_level_count,
2226 base_array_layer: desc.base_array_layer,
2227 array_layer_count: desc.array_layer_count,
2228 },
2229 };
2230 let (id, error) = self
2231 .context
2232 .0
2233 .texture_create_view(self.id, &descriptor, None);
2234 if let Some(cause) = error {
2235 self.context
2236 .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
2237 }
2238 CoreTextureView {
2239 context: self.context.clone(),
2240 id,
2241 }
2242 .into()
2243 }
2244
2245 fn destroy(&self) {
2246 self.context.0.texture_destroy(self.id);
2247 }
2248}
2249
2250impl Drop for CoreTexture {
2251 fn drop(&mut self) {
2252 self.context.0.texture_drop(self.id)
2253 }
2254}
2255
2256impl dispatch::BlasInterface for CoreBlas {
2257 fn prepare_compact_async(&self, callback: BlasCompactCallback) {
2258 let callback: Option<wgc::resource::BlasCompactCallback> =
2259 Some(Box::new(|status: BlasPrepareCompactResult| {
2260 let res = status.map_err(|_| crate::BlasAsyncError);
2261 callback(res);
2262 }));
2263
2264 match self.context.0.blas_prepare_compact_async(self.id, callback) {
2265 Ok(_) => (),
2266 Err(cause) => self.context.handle_error_nolabel(
2267 &self.error_sink,
2268 cause,
2269 "Blas::prepare_compact_async",
2270 ),
2271 }
2272 }
2273
2274 fn ready_for_compaction(&self) -> bool {
2275 match self.context.0.ready_for_compaction(self.id) {
2276 Ok(ready) => ready,
2277 Err(cause) => {
2278 self.context.handle_error_nolabel(
2279 &self.error_sink,
2280 cause,
2281 "Blas::ready_for_compaction",
2282 );
2283 false
2285 }
2286 }
2287 }
2288}
2289
2290impl Drop for CoreBlas {
2291 fn drop(&mut self) {
2292 self.context.0.blas_drop(self.id)
2293 }
2294}
2295
2296impl dispatch::TlasInterface for CoreTlas {}
2297
2298impl Drop for CoreTlas {
2299 fn drop(&mut self) {
2300 self.context.0.tlas_drop(self.id)
2301 }
2302}
2303
2304impl dispatch::QuerySetInterface for CoreQuerySet {}
2305
2306impl Drop for CoreQuerySet {
2307 fn drop(&mut self) {
2308 self.context.0.query_set_drop(self.id)
2309 }
2310}
2311
2312impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2313
2314impl Drop for CorePipelineLayout {
2315 fn drop(&mut self) {
2316 self.context.0.pipeline_layout_drop(self.id)
2317 }
2318}
2319
2320impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2321 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2322 let (id, error) = self
2323 .context
2324 .0
2325 .render_pipeline_get_bind_group_layout(self.id, index, None);
2326 if let Some(err) = error {
2327 self.context.handle_error_nolabel(
2328 &self.error_sink,
2329 err,
2330 "RenderPipeline::get_bind_group_layout",
2331 )
2332 }
2333 CoreBindGroupLayout {
2334 context: self.context.clone(),
2335 id,
2336 }
2337 .into()
2338 }
2339}
2340
2341impl Drop for CoreRenderPipeline {
2342 fn drop(&mut self) {
2343 self.context.0.render_pipeline_drop(self.id)
2344 }
2345}
2346
2347impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2348 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2349 let (id, error) = self
2350 .context
2351 .0
2352 .compute_pipeline_get_bind_group_layout(self.id, index, None);
2353 if let Some(err) = error {
2354 self.context.handle_error_nolabel(
2355 &self.error_sink,
2356 err,
2357 "ComputePipeline::get_bind_group_layout",
2358 )
2359 }
2360 CoreBindGroupLayout {
2361 context: self.context.clone(),
2362 id,
2363 }
2364 .into()
2365 }
2366}
2367
2368impl Drop for CoreComputePipeline {
2369 fn drop(&mut self) {
2370 self.context.0.compute_pipeline_drop(self.id)
2371 }
2372}
2373
2374impl dispatch::PipelineCacheInterface for CorePipelineCache {
2375 fn get_data(&self) -> Option<Vec<u8>> {
2376 self.context.0.pipeline_cache_get_data(self.id)
2377 }
2378}
2379
2380impl Drop for CorePipelineCache {
2381 fn drop(&mut self) {
2382 self.context.0.pipeline_cache_drop(self.id)
2383 }
2384}
2385
2386impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2387 fn copy_buffer_to_buffer(
2388 &self,
2389 source: &dispatch::DispatchBuffer,
2390 source_offset: crate::BufferAddress,
2391 destination: &dispatch::DispatchBuffer,
2392 destination_offset: crate::BufferAddress,
2393 copy_size: Option<crate::BufferAddress>,
2394 ) {
2395 let source = source.as_core();
2396 let destination = destination.as_core();
2397
2398 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2399 self.id,
2400 source.id,
2401 source_offset,
2402 destination.id,
2403 destination_offset,
2404 copy_size,
2405 ) {
2406 self.context.handle_error_nolabel(
2407 &self.error_sink,
2408 cause,
2409 "CommandEncoder::copy_buffer_to_buffer",
2410 );
2411 }
2412 }
2413
2414 fn copy_buffer_to_texture(
2415 &self,
2416 source: crate::TexelCopyBufferInfo<'_>,
2417 destination: crate::TexelCopyTextureInfo<'_>,
2418 copy_size: crate::Extent3d,
2419 ) {
2420 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2421 self.id,
2422 &map_buffer_copy_view(source),
2423 &map_texture_copy_view(destination),
2424 ©_size,
2425 ) {
2426 self.context.handle_error_nolabel(
2427 &self.error_sink,
2428 cause,
2429 "CommandEncoder::copy_buffer_to_texture",
2430 );
2431 }
2432 }
2433
2434 fn copy_texture_to_buffer(
2435 &self,
2436 source: crate::TexelCopyTextureInfo<'_>,
2437 destination: crate::TexelCopyBufferInfo<'_>,
2438 copy_size: crate::Extent3d,
2439 ) {
2440 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2441 self.id,
2442 &map_texture_copy_view(source),
2443 &map_buffer_copy_view(destination),
2444 ©_size,
2445 ) {
2446 self.context.handle_error_nolabel(
2447 &self.error_sink,
2448 cause,
2449 "CommandEncoder::copy_texture_to_buffer",
2450 );
2451 }
2452 }
2453
2454 fn copy_texture_to_texture(
2455 &self,
2456 source: crate::TexelCopyTextureInfo<'_>,
2457 destination: crate::TexelCopyTextureInfo<'_>,
2458 copy_size: crate::Extent3d,
2459 ) {
2460 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2461 self.id,
2462 &map_texture_copy_view(source),
2463 &map_texture_copy_view(destination),
2464 ©_size,
2465 ) {
2466 self.context.handle_error_nolabel(
2467 &self.error_sink,
2468 cause,
2469 "CommandEncoder::copy_texture_to_texture",
2470 );
2471 }
2472 }
2473
2474 fn begin_compute_pass(
2475 &self,
2476 desc: &crate::ComputePassDescriptor<'_>,
2477 ) -> dispatch::DispatchComputePass {
2478 let timestamp_writes =
2479 desc.timestamp_writes
2480 .as_ref()
2481 .map(|tw| wgc::command::PassTimestampWrites {
2482 query_set: tw.query_set.inner.as_core().id,
2483 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2484 end_of_pass_write_index: tw.end_of_pass_write_index,
2485 });
2486
2487 let (pass, err) = self.context.0.command_encoder_begin_compute_pass(
2488 self.id,
2489 &wgc::command::ComputePassDescriptor {
2490 label: desc.label.map(Borrowed),
2491 timestamp_writes,
2492 },
2493 );
2494
2495 if let Some(cause) = err {
2496 self.context.handle_error(
2497 &self.error_sink,
2498 cause,
2499 desc.label,
2500 "CommandEncoder::begin_compute_pass",
2501 );
2502 }
2503
2504 CoreComputePass {
2505 context: self.context.clone(),
2506 pass,
2507 error_sink: self.error_sink.clone(),
2508 id: crate::cmp::Identifier::create(),
2509 }
2510 .into()
2511 }
2512
2513 fn begin_render_pass(
2514 &self,
2515 desc: &crate::RenderPassDescriptor<'_>,
2516 ) -> dispatch::DispatchRenderPass {
2517 let colors = desc
2518 .color_attachments
2519 .iter()
2520 .map(|ca| {
2521 ca.as_ref()
2522 .map(|at| wgc::command::RenderPassColorAttachment {
2523 view: at.view.inner.as_core().id,
2524 depth_slice: at.depth_slice,
2525 resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2526 load_op: at.ops.load,
2527 store_op: at.ops.store,
2528 })
2529 })
2530 .collect::<Vec<_>>();
2531
2532 let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2533 wgc::command::RenderPassDepthStencilAttachment {
2534 view: dsa.view.inner.as_core().id,
2535 depth: map_pass_channel(dsa.depth_ops.as_ref()),
2536 stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2537 }
2538 });
2539
2540 let timestamp_writes =
2541 desc.timestamp_writes
2542 .as_ref()
2543 .map(|tw| wgc::command::PassTimestampWrites {
2544 query_set: tw.query_set.inner.as_core().id,
2545 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2546 end_of_pass_write_index: tw.end_of_pass_write_index,
2547 });
2548
2549 let (pass, err) = self.context.0.command_encoder_begin_render_pass(
2550 self.id,
2551 &wgc::command::RenderPassDescriptor {
2552 label: desc.label.map(Borrowed),
2553 timestamp_writes: timestamp_writes.as_ref(),
2554 color_attachments: Borrowed(&colors),
2555 depth_stencil_attachment: depth_stencil.as_ref(),
2556 occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2557 multiview_mask: desc.multiview_mask,
2558 },
2559 );
2560
2561 if let Some(cause) = err {
2562 self.context.handle_error(
2563 &self.error_sink,
2564 cause,
2565 desc.label,
2566 "CommandEncoder::begin_render_pass",
2567 );
2568 }
2569
2570 CoreRenderPass {
2571 context: self.context.clone(),
2572 pass,
2573 error_sink: self.error_sink.clone(),
2574 id: crate::cmp::Identifier::create(),
2575 }
2576 .into()
2577 }
2578
2579 fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2580 let descriptor = wgt::CommandBufferDescriptor::default();
2581 let (id, opt_label_and_error) =
2582 self.context
2583 .0
2584 .command_encoder_finish(self.id, &descriptor, None);
2585 if let Some((label, cause)) = opt_label_and_error {
2586 self.context
2587 .handle_error(&self.error_sink, cause, Some(&label), "a CommandEncoder");
2588 }
2589 CoreCommandBuffer {
2590 context: self.context.clone(),
2591 id,
2592 }
2593 .into()
2594 }
2595
2596 fn clear_texture(
2597 &self,
2598 texture: &dispatch::DispatchTexture,
2599 subresource_range: &crate::ImageSubresourceRange,
2600 ) {
2601 let texture = texture.as_core();
2602
2603 if let Err(cause) =
2604 self.context
2605 .0
2606 .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2607 {
2608 self.context.handle_error_nolabel(
2609 &self.error_sink,
2610 cause,
2611 "CommandEncoder::clear_texture",
2612 );
2613 }
2614 }
2615
2616 fn clear_buffer(
2617 &self,
2618 buffer: &dispatch::DispatchBuffer,
2619 offset: crate::BufferAddress,
2620 size: Option<crate::BufferAddress>,
2621 ) {
2622 let buffer = buffer.as_core();
2623
2624 if let Err(cause) = self
2625 .context
2626 .0
2627 .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2628 {
2629 self.context.handle_error_nolabel(
2630 &self.error_sink,
2631 cause,
2632 "CommandEncoder::fill_buffer",
2633 );
2634 }
2635 }
2636
2637 fn insert_debug_marker(&self, label: &str) {
2638 if let Err(cause) = self
2639 .context
2640 .0
2641 .command_encoder_insert_debug_marker(self.id, label)
2642 {
2643 self.context.handle_error_nolabel(
2644 &self.error_sink,
2645 cause,
2646 "CommandEncoder::insert_debug_marker",
2647 );
2648 }
2649 }
2650
2651 fn push_debug_group(&self, label: &str) {
2652 if let Err(cause) = self
2653 .context
2654 .0
2655 .command_encoder_push_debug_group(self.id, label)
2656 {
2657 self.context.handle_error_nolabel(
2658 &self.error_sink,
2659 cause,
2660 "CommandEncoder::push_debug_group",
2661 );
2662 }
2663 }
2664
2665 fn pop_debug_group(&self) {
2666 if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2667 self.context.handle_error_nolabel(
2668 &self.error_sink,
2669 cause,
2670 "CommandEncoder::pop_debug_group",
2671 );
2672 }
2673 }
2674
2675 fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2676 let query_set = query_set.as_core();
2677
2678 if let Err(cause) =
2679 self.context
2680 .0
2681 .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2682 {
2683 self.context.handle_error_nolabel(
2684 &self.error_sink,
2685 cause,
2686 "CommandEncoder::write_timestamp",
2687 );
2688 }
2689 }
2690
2691 fn resolve_query_set(
2692 &self,
2693 query_set: &dispatch::DispatchQuerySet,
2694 first_query: u32,
2695 query_count: u32,
2696 destination: &dispatch::DispatchBuffer,
2697 destination_offset: crate::BufferAddress,
2698 ) {
2699 let query_set = query_set.as_core();
2700 let destination = destination.as_core();
2701
2702 if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2703 self.id,
2704 query_set.id,
2705 first_query,
2706 query_count,
2707 destination.id,
2708 destination_offset,
2709 ) {
2710 self.context.handle_error_nolabel(
2711 &self.error_sink,
2712 cause,
2713 "CommandEncoder::resolve_query_set",
2714 );
2715 }
2716 }
2717
2718 fn mark_acceleration_structures_built<'a>(
2719 &self,
2720 blas: &mut dyn Iterator<Item = &'a Blas>,
2721 tlas: &mut dyn Iterator<Item = &'a Tlas>,
2722 ) {
2723 let blas = blas
2724 .map(|b| b.inner.as_core().id)
2725 .collect::<SmallVec<[_; 4]>>();
2726 let tlas = tlas
2727 .map(|t| t.inner.as_core().id)
2728 .collect::<SmallVec<[_; 4]>>();
2729 if let Err(cause) = self
2730 .context
2731 .0
2732 .command_encoder_mark_acceleration_structures_built(self.id, &blas, &tlas)
2733 {
2734 self.context.handle_error_nolabel(
2735 &self.error_sink,
2736 cause,
2737 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2738 );
2739 }
2740 }
2741
2742 fn build_acceleration_structures<'a>(
2743 &self,
2744 blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2745 tlas: &mut dyn Iterator<Item = &'a crate::Tlas>,
2746 ) {
2747 let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2748 let geometries = match e.geometry {
2749 crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2750 let iter = triangle_geometries.iter().map(|tg| {
2751 wgc::ray_tracing::BlasTriangleGeometry {
2752 vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2753 index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2754 transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2755 size: tg.size,
2756 transform_buffer_offset: tg.transform_buffer_offset,
2757 first_vertex: tg.first_vertex,
2758 vertex_stride: tg.vertex_stride,
2759 first_index: tg.first_index,
2760 }
2761 });
2762 wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2763 }
2764 };
2765 wgc::ray_tracing::BlasBuildEntry {
2766 blas_id: e.blas.inner.as_core().id,
2767 geometries,
2768 }
2769 });
2770
2771 let tlas = tlas.into_iter().map(|e| {
2772 let instances = e
2773 .instances
2774 .iter()
2775 .map(|instance: &Option<crate::TlasInstance>| {
2776 instance
2777 .as_ref()
2778 .map(|instance| wgc::ray_tracing::TlasInstance {
2779 blas_id: instance.blas.as_core().id,
2780 transform: &instance.transform,
2781 custom_data: instance.custom_data,
2782 mask: instance.mask,
2783 })
2784 });
2785 wgc::ray_tracing::TlasPackage {
2786 tlas_id: e.inner.as_core().id,
2787 instances: Box::new(instances),
2788 lowest_unmodified: e.lowest_unmodified,
2789 }
2790 });
2791
2792 if let Err(cause) = self
2793 .context
2794 .0
2795 .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2796 {
2797 self.context.handle_error_nolabel(
2798 &self.error_sink,
2799 cause,
2800 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2801 );
2802 }
2803 }
2804
2805 fn transition_resources<'a>(
2806 &mut self,
2807 buffer_transitions: &mut dyn Iterator<
2808 Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
2809 >,
2810 texture_transitions: &mut dyn Iterator<
2811 Item = wgt::TextureTransition<&'a dispatch::DispatchTexture>,
2812 >,
2813 ) {
2814 let result = self.context.0.command_encoder_transition_resources(
2815 self.id,
2816 buffer_transitions.map(|t| wgt::BufferTransition {
2817 buffer: t.buffer.as_core().id,
2818 state: t.state,
2819 }),
2820 texture_transitions.map(|t| wgt::TextureTransition {
2821 texture: t.texture.as_core().id,
2822 selector: t.selector.clone(),
2823 state: t.state,
2824 }),
2825 );
2826
2827 if let Err(cause) = result {
2828 self.context.handle_error_nolabel(
2829 &self.error_sink,
2830 cause,
2831 "CommandEncoder::transition_resources",
2832 );
2833 }
2834 }
2835}
2836
2837impl Drop for CoreCommandEncoder {
2838 fn drop(&mut self) {
2839 self.context.0.command_encoder_drop(self.id)
2840 }
2841}
2842
2843impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2844
2845impl Drop for CoreCommandBuffer {
2846 fn drop(&mut self) {
2847 self.context.0.command_buffer_drop(self.id)
2848 }
2849}
2850
2851impl dispatch::ComputePassInterface for CoreComputePass {
2852 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2853 let pipeline = pipeline.as_core();
2854
2855 if let Err(cause) = self
2856 .context
2857 .0
2858 .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2859 {
2860 self.context.handle_error(
2861 &self.error_sink,
2862 cause,
2863 self.pass.label(),
2864 "ComputePass::set_pipeline",
2865 );
2866 }
2867 }
2868
2869 fn set_bind_group(
2870 &mut self,
2871 index: u32,
2872 bind_group: Option<&dispatch::DispatchBindGroup>,
2873 offsets: &[crate::DynamicOffset],
2874 ) {
2875 let bg = bind_group.map(|bg| bg.as_core().id);
2876
2877 if let Err(cause) =
2878 self.context
2879 .0
2880 .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2881 {
2882 self.context.handle_error(
2883 &self.error_sink,
2884 cause,
2885 self.pass.label(),
2886 "ComputePass::set_bind_group",
2887 );
2888 }
2889 }
2890
2891 fn set_push_constants(&mut self, offset: u32, data: &[u8]) {
2892 if let Err(cause) =
2893 self.context
2894 .0
2895 .compute_pass_set_push_constants(&mut self.pass, offset, data)
2896 {
2897 self.context.handle_error(
2898 &self.error_sink,
2899 cause,
2900 self.pass.label(),
2901 "ComputePass::set_push_constant",
2902 );
2903 }
2904 }
2905
2906 fn insert_debug_marker(&mut self, label: &str) {
2907 if let Err(cause) =
2908 self.context
2909 .0
2910 .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
2911 {
2912 self.context.handle_error(
2913 &self.error_sink,
2914 cause,
2915 self.pass.label(),
2916 "ComputePass::insert_debug_marker",
2917 );
2918 }
2919 }
2920
2921 fn push_debug_group(&mut self, group_label: &str) {
2922 if let Err(cause) =
2923 self.context
2924 .0
2925 .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
2926 {
2927 self.context.handle_error(
2928 &self.error_sink,
2929 cause,
2930 self.pass.label(),
2931 "ComputePass::push_debug_group",
2932 );
2933 }
2934 }
2935
2936 fn pop_debug_group(&mut self) {
2937 if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
2938 self.context.handle_error(
2939 &self.error_sink,
2940 cause,
2941 self.pass.label(),
2942 "ComputePass::pop_debug_group",
2943 );
2944 }
2945 }
2946
2947 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2948 let query_set = query_set.as_core();
2949
2950 if let Err(cause) =
2951 self.context
2952 .0
2953 .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
2954 {
2955 self.context.handle_error(
2956 &self.error_sink,
2957 cause,
2958 self.pass.label(),
2959 "ComputePass::write_timestamp",
2960 );
2961 }
2962 }
2963
2964 fn begin_pipeline_statistics_query(
2965 &mut self,
2966 query_set: &dispatch::DispatchQuerySet,
2967 query_index: u32,
2968 ) {
2969 let query_set = query_set.as_core();
2970
2971 if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
2972 &mut self.pass,
2973 query_set.id,
2974 query_index,
2975 ) {
2976 self.context.handle_error(
2977 &self.error_sink,
2978 cause,
2979 self.pass.label(),
2980 "ComputePass::begin_pipeline_statistics_query",
2981 );
2982 }
2983 }
2984
2985 fn end_pipeline_statistics_query(&mut self) {
2986 if let Err(cause) = self
2987 .context
2988 .0
2989 .compute_pass_end_pipeline_statistics_query(&mut self.pass)
2990 {
2991 self.context.handle_error(
2992 &self.error_sink,
2993 cause,
2994 self.pass.label(),
2995 "ComputePass::end_pipeline_statistics_query",
2996 );
2997 }
2998 }
2999
3000 fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
3001 if let Err(cause) = self
3002 .context
3003 .0
3004 .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
3005 {
3006 self.context.handle_error(
3007 &self.error_sink,
3008 cause,
3009 self.pass.label(),
3010 "ComputePass::dispatch_workgroups",
3011 );
3012 }
3013 }
3014
3015 fn dispatch_workgroups_indirect(
3016 &mut self,
3017 indirect_buffer: &dispatch::DispatchBuffer,
3018 indirect_offset: crate::BufferAddress,
3019 ) {
3020 let indirect_buffer = indirect_buffer.as_core();
3021
3022 if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
3023 &mut self.pass,
3024 indirect_buffer.id,
3025 indirect_offset,
3026 ) {
3027 self.context.handle_error(
3028 &self.error_sink,
3029 cause,
3030 self.pass.label(),
3031 "ComputePass::dispatch_workgroups_indirect",
3032 );
3033 }
3034 }
3035
3036 fn end(&mut self) {
3037 if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
3038 self.context.handle_error(
3039 &self.error_sink,
3040 cause,
3041 self.pass.label(),
3042 "ComputePass::end",
3043 );
3044 }
3045 }
3046}
3047
3048impl Drop for CoreComputePass {
3049 fn drop(&mut self) {
3050 dispatch::ComputePassInterface::end(self);
3051 }
3052}
3053
3054impl dispatch::RenderPassInterface for CoreRenderPass {
3055 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3056 let pipeline = pipeline.as_core();
3057
3058 if let Err(cause) = self
3059 .context
3060 .0
3061 .render_pass_set_pipeline(&mut self.pass, pipeline.id)
3062 {
3063 self.context.handle_error(
3064 &self.error_sink,
3065 cause,
3066 self.pass.label(),
3067 "RenderPass::set_pipeline",
3068 );
3069 }
3070 }
3071
3072 fn set_bind_group(
3073 &mut self,
3074 index: u32,
3075 bind_group: Option<&dispatch::DispatchBindGroup>,
3076 offsets: &[crate::DynamicOffset],
3077 ) {
3078 let bg = bind_group.map(|bg| bg.as_core().id);
3079
3080 if let Err(cause) =
3081 self.context
3082 .0
3083 .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
3084 {
3085 self.context.handle_error(
3086 &self.error_sink,
3087 cause,
3088 self.pass.label(),
3089 "RenderPass::set_bind_group",
3090 );
3091 }
3092 }
3093
3094 fn set_index_buffer(
3095 &mut self,
3096 buffer: &dispatch::DispatchBuffer,
3097 index_format: crate::IndexFormat,
3098 offset: crate::BufferAddress,
3099 size: Option<crate::BufferSize>,
3100 ) {
3101 let buffer = buffer.as_core();
3102
3103 if let Err(cause) = self.context.0.render_pass_set_index_buffer(
3104 &mut self.pass,
3105 buffer.id,
3106 index_format,
3107 offset,
3108 size,
3109 ) {
3110 self.context.handle_error(
3111 &self.error_sink,
3112 cause,
3113 self.pass.label(),
3114 "RenderPass::set_index_buffer",
3115 );
3116 }
3117 }
3118
3119 fn set_vertex_buffer(
3120 &mut self,
3121 slot: u32,
3122 buffer: &dispatch::DispatchBuffer,
3123 offset: crate::BufferAddress,
3124 size: Option<crate::BufferSize>,
3125 ) {
3126 let buffer = buffer.as_core();
3127
3128 if let Err(cause) = self.context.0.render_pass_set_vertex_buffer(
3129 &mut self.pass,
3130 slot,
3131 buffer.id,
3132 offset,
3133 size,
3134 ) {
3135 self.context.handle_error(
3136 &self.error_sink,
3137 cause,
3138 self.pass.label(),
3139 "RenderPass::set_vertex_buffer",
3140 );
3141 }
3142 }
3143
3144 fn set_push_constants(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
3145 if let Err(cause) =
3146 self.context
3147 .0
3148 .render_pass_set_push_constants(&mut self.pass, stages, offset, data)
3149 {
3150 self.context.handle_error(
3151 &self.error_sink,
3152 cause,
3153 self.pass.label(),
3154 "RenderPass::set_push_constants",
3155 );
3156 }
3157 }
3158
3159 fn set_blend_constant(&mut self, color: crate::Color) {
3160 if let Err(cause) = self
3161 .context
3162 .0
3163 .render_pass_set_blend_constant(&mut self.pass, color)
3164 {
3165 self.context.handle_error(
3166 &self.error_sink,
3167 cause,
3168 self.pass.label(),
3169 "RenderPass::set_blend_constant",
3170 );
3171 }
3172 }
3173
3174 fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
3175 if let Err(cause) =
3176 self.context
3177 .0
3178 .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
3179 {
3180 self.context.handle_error(
3181 &self.error_sink,
3182 cause,
3183 self.pass.label(),
3184 "RenderPass::set_scissor_rect",
3185 );
3186 }
3187 }
3188
3189 fn set_viewport(
3190 &mut self,
3191 x: f32,
3192 y: f32,
3193 width: f32,
3194 height: f32,
3195 min_depth: f32,
3196 max_depth: f32,
3197 ) {
3198 if let Err(cause) = self.context.0.render_pass_set_viewport(
3199 &mut self.pass,
3200 x,
3201 y,
3202 width,
3203 height,
3204 min_depth,
3205 max_depth,
3206 ) {
3207 self.context.handle_error(
3208 &self.error_sink,
3209 cause,
3210 self.pass.label(),
3211 "RenderPass::set_viewport",
3212 );
3213 }
3214 }
3215
3216 fn set_stencil_reference(&mut self, reference: u32) {
3217 if let Err(cause) = self
3218 .context
3219 .0
3220 .render_pass_set_stencil_reference(&mut self.pass, reference)
3221 {
3222 self.context.handle_error(
3223 &self.error_sink,
3224 cause,
3225 self.pass.label(),
3226 "RenderPass::set_stencil_reference",
3227 );
3228 }
3229 }
3230
3231 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3232 if let Err(cause) = self.context.0.render_pass_draw(
3233 &mut self.pass,
3234 vertices.end - vertices.start,
3235 instances.end - instances.start,
3236 vertices.start,
3237 instances.start,
3238 ) {
3239 self.context.handle_error(
3240 &self.error_sink,
3241 cause,
3242 self.pass.label(),
3243 "RenderPass::draw",
3244 );
3245 }
3246 }
3247
3248 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3249 if let Err(cause) = self.context.0.render_pass_draw_indexed(
3250 &mut self.pass,
3251 indices.end - indices.start,
3252 instances.end - instances.start,
3253 indices.start,
3254 base_vertex,
3255 instances.start,
3256 ) {
3257 self.context.handle_error(
3258 &self.error_sink,
3259 cause,
3260 self.pass.label(),
3261 "RenderPass::draw_indexed",
3262 );
3263 }
3264 }
3265
3266 fn draw_mesh_tasks(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
3267 if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks(
3268 &mut self.pass,
3269 group_count_x,
3270 group_count_y,
3271 group_count_z,
3272 ) {
3273 self.context.handle_error(
3274 &self.error_sink,
3275 cause,
3276 self.pass.label(),
3277 "RenderPass::draw_mesh_tasks",
3278 );
3279 }
3280 }
3281
3282 fn draw_indirect(
3283 &mut self,
3284 indirect_buffer: &dispatch::DispatchBuffer,
3285 indirect_offset: crate::BufferAddress,
3286 ) {
3287 let indirect_buffer = indirect_buffer.as_core();
3288
3289 if let Err(cause) = self.context.0.render_pass_draw_indirect(
3290 &mut self.pass,
3291 indirect_buffer.id,
3292 indirect_offset,
3293 ) {
3294 self.context.handle_error(
3295 &self.error_sink,
3296 cause,
3297 self.pass.label(),
3298 "RenderPass::draw_indirect",
3299 );
3300 }
3301 }
3302
3303 fn draw_indexed_indirect(
3304 &mut self,
3305 indirect_buffer: &dispatch::DispatchBuffer,
3306 indirect_offset: crate::BufferAddress,
3307 ) {
3308 let indirect_buffer = indirect_buffer.as_core();
3309
3310 if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3311 &mut self.pass,
3312 indirect_buffer.id,
3313 indirect_offset,
3314 ) {
3315 self.context.handle_error(
3316 &self.error_sink,
3317 cause,
3318 self.pass.label(),
3319 "RenderPass::draw_indexed_indirect",
3320 );
3321 }
3322 }
3323
3324 fn draw_mesh_tasks_indirect(
3325 &mut self,
3326 indirect_buffer: &dispatch::DispatchBuffer,
3327 indirect_offset: crate::BufferAddress,
3328 ) {
3329 let indirect_buffer = indirect_buffer.as_core();
3330
3331 if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks_indirect(
3332 &mut self.pass,
3333 indirect_buffer.id,
3334 indirect_offset,
3335 ) {
3336 self.context.handle_error(
3337 &self.error_sink,
3338 cause,
3339 self.pass.label(),
3340 "RenderPass::draw_mesh_tasks_indirect",
3341 );
3342 }
3343 }
3344
3345 fn multi_draw_indirect(
3346 &mut self,
3347 indirect_buffer: &dispatch::DispatchBuffer,
3348 indirect_offset: crate::BufferAddress,
3349 count: u32,
3350 ) {
3351 let indirect_buffer = indirect_buffer.as_core();
3352
3353 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3354 &mut self.pass,
3355 indirect_buffer.id,
3356 indirect_offset,
3357 count,
3358 ) {
3359 self.context.handle_error(
3360 &self.error_sink,
3361 cause,
3362 self.pass.label(),
3363 "RenderPass::multi_draw_indirect",
3364 );
3365 }
3366 }
3367
3368 fn multi_draw_indexed_indirect(
3369 &mut self,
3370 indirect_buffer: &dispatch::DispatchBuffer,
3371 indirect_offset: crate::BufferAddress,
3372 count: u32,
3373 ) {
3374 let indirect_buffer = indirect_buffer.as_core();
3375
3376 if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3377 &mut self.pass,
3378 indirect_buffer.id,
3379 indirect_offset,
3380 count,
3381 ) {
3382 self.context.handle_error(
3383 &self.error_sink,
3384 cause,
3385 self.pass.label(),
3386 "RenderPass::multi_draw_indexed_indirect",
3387 );
3388 }
3389 }
3390
3391 fn multi_draw_mesh_tasks_indirect(
3392 &mut self,
3393 indirect_buffer: &dispatch::DispatchBuffer,
3394 indirect_offset: crate::BufferAddress,
3395 count: u32,
3396 ) {
3397 let indirect_buffer = indirect_buffer.as_core();
3398
3399 if let Err(cause) = self.context.0.render_pass_multi_draw_mesh_tasks_indirect(
3400 &mut self.pass,
3401 indirect_buffer.id,
3402 indirect_offset,
3403 count,
3404 ) {
3405 self.context.handle_error(
3406 &self.error_sink,
3407 cause,
3408 self.pass.label(),
3409 "RenderPass::multi_draw_mesh_tasks_indirect",
3410 );
3411 }
3412 }
3413
3414 fn multi_draw_indirect_count(
3415 &mut self,
3416 indirect_buffer: &dispatch::DispatchBuffer,
3417 indirect_offset: crate::BufferAddress,
3418 count_buffer: &dispatch::DispatchBuffer,
3419 count_buffer_offset: crate::BufferAddress,
3420 max_count: u32,
3421 ) {
3422 let indirect_buffer = indirect_buffer.as_core();
3423 let count_buffer = count_buffer.as_core();
3424
3425 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3426 &mut self.pass,
3427 indirect_buffer.id,
3428 indirect_offset,
3429 count_buffer.id,
3430 count_buffer_offset,
3431 max_count,
3432 ) {
3433 self.context.handle_error(
3434 &self.error_sink,
3435 cause,
3436 self.pass.label(),
3437 "RenderPass::multi_draw_indirect_count",
3438 );
3439 }
3440 }
3441
3442 fn multi_draw_indexed_indirect_count(
3443 &mut self,
3444 indirect_buffer: &dispatch::DispatchBuffer,
3445 indirect_offset: crate::BufferAddress,
3446 count_buffer: &dispatch::DispatchBuffer,
3447 count_buffer_offset: crate::BufferAddress,
3448 max_count: u32,
3449 ) {
3450 let indirect_buffer = indirect_buffer.as_core();
3451 let count_buffer = count_buffer.as_core();
3452
3453 if let Err(cause) = self
3454 .context
3455 .0
3456 .render_pass_multi_draw_indexed_indirect_count(
3457 &mut self.pass,
3458 indirect_buffer.id,
3459 indirect_offset,
3460 count_buffer.id,
3461 count_buffer_offset,
3462 max_count,
3463 )
3464 {
3465 self.context.handle_error(
3466 &self.error_sink,
3467 cause,
3468 self.pass.label(),
3469 "RenderPass::multi_draw_indexed_indirect_count",
3470 );
3471 }
3472 }
3473
3474 fn multi_draw_mesh_tasks_indirect_count(
3475 &mut self,
3476 indirect_buffer: &dispatch::DispatchBuffer,
3477 indirect_offset: crate::BufferAddress,
3478 count_buffer: &dispatch::DispatchBuffer,
3479 count_buffer_offset: crate::BufferAddress,
3480 max_count: u32,
3481 ) {
3482 let indirect_buffer = indirect_buffer.as_core();
3483 let count_buffer = count_buffer.as_core();
3484
3485 if let Err(cause) = self
3486 .context
3487 .0
3488 .render_pass_multi_draw_mesh_tasks_indirect_count(
3489 &mut self.pass,
3490 indirect_buffer.id,
3491 indirect_offset,
3492 count_buffer.id,
3493 count_buffer_offset,
3494 max_count,
3495 )
3496 {
3497 self.context.handle_error(
3498 &self.error_sink,
3499 cause,
3500 self.pass.label(),
3501 "RenderPass::multi_draw_mesh_tasks_indirect_count",
3502 );
3503 }
3504 }
3505
3506 fn insert_debug_marker(&mut self, label: &str) {
3507 if let Err(cause) = self
3508 .context
3509 .0
3510 .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3511 {
3512 self.context.handle_error(
3513 &self.error_sink,
3514 cause,
3515 self.pass.label(),
3516 "RenderPass::insert_debug_marker",
3517 );
3518 }
3519 }
3520
3521 fn push_debug_group(&mut self, group_label: &str) {
3522 if let Err(cause) =
3523 self.context
3524 .0
3525 .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3526 {
3527 self.context.handle_error(
3528 &self.error_sink,
3529 cause,
3530 self.pass.label(),
3531 "RenderPass::push_debug_group",
3532 );
3533 }
3534 }
3535
3536 fn pop_debug_group(&mut self) {
3537 if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3538 self.context.handle_error(
3539 &self.error_sink,
3540 cause,
3541 self.pass.label(),
3542 "RenderPass::pop_debug_group",
3543 );
3544 }
3545 }
3546
3547 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3548 let query_set = query_set.as_core();
3549
3550 if let Err(cause) =
3551 self.context
3552 .0
3553 .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3554 {
3555 self.context.handle_error(
3556 &self.error_sink,
3557 cause,
3558 self.pass.label(),
3559 "RenderPass::write_timestamp",
3560 );
3561 }
3562 }
3563
3564 fn begin_occlusion_query(&mut self, query_index: u32) {
3565 if let Err(cause) = self
3566 .context
3567 .0
3568 .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3569 {
3570 self.context.handle_error(
3571 &self.error_sink,
3572 cause,
3573 self.pass.label(),
3574 "RenderPass::begin_occlusion_query",
3575 );
3576 }
3577 }
3578
3579 fn end_occlusion_query(&mut self) {
3580 if let Err(cause) = self
3581 .context
3582 .0
3583 .render_pass_end_occlusion_query(&mut self.pass)
3584 {
3585 self.context.handle_error(
3586 &self.error_sink,
3587 cause,
3588 self.pass.label(),
3589 "RenderPass::end_occlusion_query",
3590 );
3591 }
3592 }
3593
3594 fn begin_pipeline_statistics_query(
3595 &mut self,
3596 query_set: &dispatch::DispatchQuerySet,
3597 query_index: u32,
3598 ) {
3599 let query_set = query_set.as_core();
3600
3601 if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3602 &mut self.pass,
3603 query_set.id,
3604 query_index,
3605 ) {
3606 self.context.handle_error(
3607 &self.error_sink,
3608 cause,
3609 self.pass.label(),
3610 "RenderPass::begin_pipeline_statistics_query",
3611 );
3612 }
3613 }
3614
3615 fn end_pipeline_statistics_query(&mut self) {
3616 if let Err(cause) = self
3617 .context
3618 .0
3619 .render_pass_end_pipeline_statistics_query(&mut self.pass)
3620 {
3621 self.context.handle_error(
3622 &self.error_sink,
3623 cause,
3624 self.pass.label(),
3625 "RenderPass::end_pipeline_statistics_query",
3626 );
3627 }
3628 }
3629
3630 fn execute_bundles(
3631 &mut self,
3632 render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3633 ) {
3634 let temp_render_bundles = render_bundles
3635 .map(|rb| rb.as_core().id)
3636 .collect::<SmallVec<[_; 4]>>();
3637 if let Err(cause) = self
3638 .context
3639 .0
3640 .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3641 {
3642 self.context.handle_error(
3643 &self.error_sink,
3644 cause,
3645 self.pass.label(),
3646 "RenderPass::execute_bundles",
3647 );
3648 }
3649 }
3650
3651 fn end(&mut self) {
3652 if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3653 self.context.handle_error(
3654 &self.error_sink,
3655 cause,
3656 self.pass.label(),
3657 "RenderPass::end",
3658 );
3659 }
3660 }
3661}
3662
3663impl Drop for CoreRenderPass {
3664 fn drop(&mut self) {
3665 dispatch::RenderPassInterface::end(self);
3666 }
3667}
3668
3669impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3670 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3671 let pipeline = pipeline.as_core();
3672
3673 wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3674 }
3675
3676 fn set_bind_group(
3677 &mut self,
3678 index: u32,
3679 bind_group: Option<&dispatch::DispatchBindGroup>,
3680 offsets: &[crate::DynamicOffset],
3681 ) {
3682 let bg = bind_group.map(|bg| bg.as_core().id);
3683
3684 unsafe {
3685 wgpu_render_bundle_set_bind_group(
3686 &mut self.encoder,
3687 index,
3688 bg,
3689 offsets.as_ptr(),
3690 offsets.len(),
3691 )
3692 }
3693 }
3694
3695 fn set_index_buffer(
3696 &mut self,
3697 buffer: &dispatch::DispatchBuffer,
3698 index_format: crate::IndexFormat,
3699 offset: crate::BufferAddress,
3700 size: Option<crate::BufferSize>,
3701 ) {
3702 let buffer = buffer.as_core();
3703
3704 self.encoder
3705 .set_index_buffer(buffer.id, index_format, offset, size)
3706 }
3707
3708 fn set_vertex_buffer(
3709 &mut self,
3710 slot: u32,
3711 buffer: &dispatch::DispatchBuffer,
3712 offset: crate::BufferAddress,
3713 size: Option<crate::BufferSize>,
3714 ) {
3715 let buffer = buffer.as_core();
3716
3717 wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer.id, offset, size)
3718 }
3719
3720 fn set_push_constants(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
3721 unsafe {
3722 wgpu_render_bundle_set_push_constants(
3723 &mut self.encoder,
3724 stages,
3725 offset,
3726 data.len().try_into().unwrap(),
3727 data.as_ptr(),
3728 )
3729 }
3730 }
3731
3732 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3733 wgpu_render_bundle_draw(
3734 &mut self.encoder,
3735 vertices.end - vertices.start,
3736 instances.end - instances.start,
3737 vertices.start,
3738 instances.start,
3739 )
3740 }
3741
3742 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3743 wgpu_render_bundle_draw_indexed(
3744 &mut self.encoder,
3745 indices.end - indices.start,
3746 instances.end - instances.start,
3747 indices.start,
3748 base_vertex,
3749 instances.start,
3750 )
3751 }
3752
3753 fn draw_indirect(
3754 &mut self,
3755 indirect_buffer: &dispatch::DispatchBuffer,
3756 indirect_offset: crate::BufferAddress,
3757 ) {
3758 let indirect_buffer = indirect_buffer.as_core();
3759
3760 wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3761 }
3762
3763 fn draw_indexed_indirect(
3764 &mut self,
3765 indirect_buffer: &dispatch::DispatchBuffer,
3766 indirect_offset: crate::BufferAddress,
3767 ) {
3768 let indirect_buffer = indirect_buffer.as_core();
3769
3770 wgpu_render_bundle_draw_indexed_indirect(
3771 &mut self.encoder,
3772 indirect_buffer.id,
3773 indirect_offset,
3774 )
3775 }
3776
3777 fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3778 where
3779 Self: Sized,
3780 {
3781 let (id, error) = self.context.0.render_bundle_encoder_finish(
3782 self.encoder,
3783 &desc.map_label(|l| l.map(Borrowed)),
3784 None,
3785 );
3786 if let Some(err) = error {
3787 self.context
3788 .handle_error_fatal(err, "RenderBundleEncoder::finish");
3789 }
3790 CoreRenderBundle { id }.into()
3791 }
3792}
3793
3794impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3795
3796impl dispatch::SurfaceInterface for CoreSurface {
3797 fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3798 let adapter = adapter.as_core();
3799
3800 self.context
3801 .0
3802 .surface_get_capabilities(self.id, adapter.id)
3803 .unwrap_or_default()
3804 }
3805
3806 fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3807 let device = device.as_core();
3808
3809 let error = self.context.0.surface_configure(self.id, device.id, config);
3810 if let Some(e) = error {
3811 self.context
3812 .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3813 } else {
3814 *self.configured_device.lock() = Some(device.id);
3815 *self.error_sink.lock() = Some(device.error_sink.clone());
3816 }
3817 }
3818
3819 fn get_current_texture(
3820 &self,
3821 ) -> (
3822 Option<dispatch::DispatchTexture>,
3823 crate::SurfaceStatus,
3824 dispatch::DispatchSurfaceOutputDetail,
3825 ) {
3826 let output_detail = CoreSurfaceOutputDetail {
3827 context: self.context.clone(),
3828 surface_id: self.id,
3829 }
3830 .into();
3831
3832 match self.context.0.surface_get_current_texture(self.id, None) {
3833 Ok(wgc::present::SurfaceOutput {
3834 status,
3835 texture: texture_id,
3836 }) => {
3837 let data = texture_id
3838 .map(|id| CoreTexture {
3839 context: self.context.clone(),
3840 id,
3841 error_sink: Arc::new(Mutex::new(ErrorSinkRaw::new())),
3842 })
3843 .map(Into::into);
3844
3845 (data, status, output_detail)
3846 }
3847 Err(err) => {
3848 let error_sink = self.error_sink.lock();
3849 match error_sink.as_ref() {
3850 Some(error_sink) => {
3851 self.context.handle_error_nolabel(
3852 error_sink,
3853 err,
3854 "Surface::get_current_texture_view",
3855 );
3856 (None, crate::SurfaceStatus::Unknown, output_detail)
3857 }
3858 None => self
3859 .context
3860 .handle_error_fatal(err, "Surface::get_current_texture_view"),
3861 }
3862 }
3863 }
3864 }
3865}
3866
3867impl Drop for CoreSurface {
3868 fn drop(&mut self) {
3869 self.context.0.surface_drop(self.id)
3870 }
3871}
3872
3873impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
3874 fn present(&self) {
3875 match self.context.0.surface_present(self.surface_id) {
3876 Ok(_status) => (),
3877 Err(err) => self.context.handle_error_fatal(err, "Surface::present"),
3878 }
3879 }
3880
3881 fn texture_discard(&self) {
3882 match self.context.0.surface_texture_discard(self.surface_id) {
3883 Ok(_status) => (),
3884 Err(err) => self
3885 .context
3886 .handle_error_fatal(err, "Surface::discard_texture"),
3887 }
3888 }
3889}
3890impl Drop for CoreSurfaceOutputDetail {
3891 fn drop(&mut self) {
3892 }
3896}
3897
3898impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
3899 fn slice(&self) -> &[u8] {
3900 panic!()
3901 }
3902
3903 #[inline]
3904 fn slice_mut(&mut self) -> &mut [u8] {
3905 self.mapping.slice_mut()
3906 }
3907}
3908impl Drop for CoreQueueWriteBuffer {
3909 fn drop(&mut self) {
3910 }
3914}
3915
3916impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
3917 #[inline]
3918 fn slice(&self) -> &[u8] {
3919 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
3920 }
3921
3922 #[inline]
3923 fn slice_mut(&mut self) -> &mut [u8] {
3924 unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.size) }
3925 }
3926
3927 #[cfg(webgpu)]
3928 fn as_uint8array(&self) -> &js_sys::Uint8Array {
3929 panic!("Only available on WebGPU")
3930 }
3931}