1use alloc::{
2 borrow::Cow::{self, Borrowed},
3 boxed::Box,
4 format,
5 string::{String, ToString as _},
6 sync::Arc,
7 vec,
8 vec::Vec,
9};
10use core::{
11 error::Error,
12 fmt,
13 future::ready,
14 ops::{Deref, Range},
15 pin::Pin,
16 ptr::NonNull,
17 slice,
18};
19
20use arrayvec::ArrayVec;
21use smallvec::SmallVec;
22use wgc::{
23 command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError,
24 resource::BlasPrepareCompactResult,
25};
26use wgt::{
27 error::{ErrorType, WebGpuError},
28 WasmNotSendSync,
29};
30
31use crate::util::Mutex;
32use crate::{
33 api,
34 dispatch::{self, BlasCompactCallback, BufferMappedRangeInterface},
35 BindingResource, Blas, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
36 CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
37 ShaderSource, SurfaceTargetUnsafe, TextureDescriptor, Tlas,
38};
39
40#[derive(Clone)]
41pub struct ContextWgpuCore(Arc<wgc::global::Global>);
42
43impl Drop for ContextWgpuCore {
44 fn drop(&mut self) {
45 }
47}
48
49impl fmt::Debug for ContextWgpuCore {
50 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
51 f.debug_struct("ContextWgpuCore")
52 .field("type", &"Native")
53 .finish()
54 }
55}
56
57impl ContextWgpuCore {
58 pub unsafe fn from_hal_instance<A: hal::Api>(hal_instance: A::Instance) -> Self {
59 Self(unsafe {
60 Arc::new(wgc::global::Global::from_hal_instance::<A>(
61 "wgpu",
62 hal_instance,
63 ))
64 })
65 }
66
67 pub unsafe fn instance_as_hal<A: hal::Api>(&self) -> Option<&A::Instance> {
71 unsafe { self.0.instance_as_hal::<A>() }
72 }
73
74 pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
75 Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
76 }
77
78 #[cfg(wgpu_core)]
79 pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
80 self.0.enumerate_adapters(backends)
81 }
82
83 pub unsafe fn create_adapter_from_hal<A: hal::Api>(
84 &self,
85 hal_adapter: hal::ExposedAdapter<A>,
86 ) -> wgc::id::AdapterId {
87 unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
88 }
89
90 pub unsafe fn adapter_as_hal<A: hal::Api>(
91 &self,
92 adapter: &CoreAdapter,
93 ) -> Option<impl Deref<Target = A::Adapter> + WasmNotSendSync> {
94 unsafe { self.0.adapter_as_hal::<A>(adapter.id) }
95 }
96
97 pub unsafe fn buffer_as_hal<A: hal::Api>(
98 &self,
99 buffer: &CoreBuffer,
100 ) -> Option<impl Deref<Target = A::Buffer>> {
101 unsafe { self.0.buffer_as_hal::<A>(buffer.id) }
102 }
103
104 pub unsafe fn create_device_from_hal<A: hal::Api>(
105 &self,
106 adapter: &CoreAdapter,
107 hal_device: hal::OpenDevice<A>,
108 desc: &crate::DeviceDescriptor<'_>,
109 ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
110 if !matches!(desc.trace, wgt::Trace::Off) {
111 log::error!(
112 "
113 Feature 'trace' has been removed temporarily; \
114 see https://github.com/gfx-rs/wgpu/issues/5974. \
115 The `trace` parameter will have no effect."
116 );
117 }
118
119 let (device_id, queue_id) = unsafe {
120 self.0.create_device_from_hal(
121 adapter.id,
122 hal_device.into(),
123 &desc.map_label(|l| l.map(Borrowed)),
124 None,
125 None,
126 )
127 }?;
128 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
129 let device = CoreDevice {
130 context: self.clone(),
131 id: device_id,
132 error_sink: error_sink.clone(),
133 features: desc.required_features,
134 };
135 let queue = CoreQueue {
136 context: self.clone(),
137 id: queue_id,
138 error_sink,
139 };
140 Ok((device, queue))
141 }
142
143 pub unsafe fn create_texture_from_hal<A: hal::Api>(
144 &self,
145 hal_texture: A::Texture,
146 device: &CoreDevice,
147 desc: &TextureDescriptor<'_>,
148 ) -> CoreTexture {
149 let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
150 let (id, error) = unsafe {
151 self.0
152 .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
153 };
154 if let Some(cause) = error {
155 self.handle_error(
156 &device.error_sink,
157 cause,
158 desc.label,
159 "Device::create_texture_from_hal",
160 );
161 }
162 CoreTexture {
163 context: self.clone(),
164 id,
165 error_sink: Arc::clone(&device.error_sink),
166 }
167 }
168
169 pub unsafe fn create_buffer_from_hal<A: hal::Api>(
176 &self,
177 hal_buffer: A::Buffer,
178 device: &CoreDevice,
179 desc: &BufferDescriptor<'_>,
180 ) -> CoreBuffer {
181 let (id, error) = unsafe {
182 self.0.create_buffer_from_hal::<A>(
183 hal_buffer,
184 device.id,
185 &desc.map_label(|l| l.map(Borrowed)),
186 None,
187 )
188 };
189 if let Some(cause) = error {
190 self.handle_error(
191 &device.error_sink,
192 cause,
193 desc.label,
194 "Device::create_buffer_from_hal",
195 );
196 }
197 CoreBuffer {
198 context: self.clone(),
199 id,
200 error_sink: Arc::clone(&device.error_sink),
201 }
202 }
203
204 pub unsafe fn device_as_hal<A: hal::Api>(
205 &self,
206 device: &CoreDevice,
207 ) -> Option<impl Deref<Target = A::Device>> {
208 unsafe { self.0.device_as_hal::<A>(device.id) }
209 }
210
211 pub unsafe fn surface_as_hal<A: hal::Api>(
212 &self,
213 surface: &CoreSurface,
214 ) -> Option<impl Deref<Target = A::Surface>> {
215 unsafe { self.0.surface_as_hal::<A>(surface.id) }
216 }
217
218 pub unsafe fn texture_as_hal<A: hal::Api>(
219 &self,
220 texture: &CoreTexture,
221 ) -> Option<impl Deref<Target = A::Texture>> {
222 unsafe { self.0.texture_as_hal::<A>(texture.id) }
223 }
224
225 pub unsafe fn texture_view_as_hal<A: hal::Api>(
226 &self,
227 texture_view: &CoreTextureView,
228 ) -> Option<impl Deref<Target = A::TextureView>> {
229 unsafe { self.0.texture_view_as_hal::<A>(texture_view.id) }
230 }
231
232 pub unsafe fn command_encoder_as_hal_mut<
234 A: hal::Api,
235 F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
236 R,
237 >(
238 &self,
239 command_encoder: &CoreCommandEncoder,
240 hal_command_encoder_callback: F,
241 ) -> R {
242 unsafe {
243 self.0.command_encoder_as_hal_mut::<A, F, R>(
244 command_encoder.id,
245 hal_command_encoder_callback,
246 )
247 }
248 }
249
250 pub unsafe fn blas_as_hal<A: hal::Api>(
251 &self,
252 blas: &CoreBlas,
253 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
254 unsafe { self.0.blas_as_hal::<A>(blas.id) }
255 }
256
257 pub unsafe fn tlas_as_hal<A: hal::Api>(
258 &self,
259 tlas: &CoreTlas,
260 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
261 unsafe { self.0.tlas_as_hal::<A>(tlas.id) }
262 }
263
264 pub fn generate_report(&self) -> wgc::global::GlobalReport {
265 self.0.generate_report()
266 }
267
268 #[cold]
269 #[track_caller]
270 #[inline(never)]
271 fn handle_error_inner(
272 &self,
273 sink_mutex: &Mutex<ErrorSinkRaw>,
274 error_type: ErrorType,
275 source: ContextErrorSource,
276 label: Label<'_>,
277 fn_ident: &'static str,
278 ) {
279 let source: ErrorSource = Box::new(wgc::error::ContextError {
280 fn_ident,
281 source,
282 label: label.unwrap_or_default().to_string(),
283 });
284 let mut sink = sink_mutex.lock();
285 let description = || self.format_error(&*source);
286 let error = match error_type {
287 ErrorType::Internal => {
288 let description = description();
289 crate::Error::Internal {
290 source,
291 description,
292 }
293 }
294 ErrorType::OutOfMemory => crate::Error::OutOfMemory { source },
295 ErrorType::Validation => {
296 let description = description();
297 crate::Error::Validation {
298 source,
299 description,
300 }
301 }
302 ErrorType::DeviceLost => return, };
304 sink.handle_error(error);
305 }
306
307 #[inline]
308 #[track_caller]
309 fn handle_error(
310 &self,
311 sink_mutex: &Mutex<ErrorSinkRaw>,
312 source: impl WebGpuError + WasmNotSendSync + 'static,
313 label: Label<'_>,
314 fn_ident: &'static str,
315 ) {
316 let error_type = source.webgpu_error_type();
317 self.handle_error_inner(sink_mutex, error_type, Box::new(source), label, fn_ident)
318 }
319
320 #[inline]
321 #[track_caller]
322 fn handle_error_nolabel(
323 &self,
324 sink_mutex: &Mutex<ErrorSinkRaw>,
325 source: impl WebGpuError + WasmNotSendSync + 'static,
326 fn_ident: &'static str,
327 ) {
328 let error_type = source.webgpu_error_type();
329 self.handle_error_inner(sink_mutex, error_type, Box::new(source), None, fn_ident)
330 }
331
332 #[track_caller]
333 #[cold]
334 fn handle_error_fatal(
335 &self,
336 cause: impl Error + WasmNotSendSync + 'static,
337 operation: &'static str,
338 ) -> ! {
339 panic!("Error in {operation}: {f}", f = self.format_error(&cause));
340 }
341
342 #[inline(never)]
343 fn format_error(&self, err: &(dyn Error + 'static)) -> String {
344 let mut output = String::new();
345 let mut level = 1;
346
347 fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
348 let mut print = |e: &(dyn Error + 'static)| {
349 use core::fmt::Write;
350 writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
351
352 if let Some(e) = e.source() {
353 *level += 1;
354 print_tree(output, level, e);
355 *level -= 1;
356 }
357 };
358 if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
359 for e in multi.errors() {
360 print(e);
361 }
362 } else {
363 print(e);
364 }
365 }
366
367 print_tree(&mut output, &mut level, err);
368
369 format!("Validation Error\n\nCaused by:\n{output}")
370 }
371
372 pub unsafe fn queue_as_hal<A: hal::Api>(
373 &self,
374 queue: &CoreQueue,
375 ) -> Option<impl Deref<Target = A::Queue> + WasmNotSendSync> {
376 unsafe { self.0.queue_as_hal::<A>(queue.id) }
377 }
378}
379
380fn map_buffer_copy_view(view: crate::TexelCopyBufferInfo<'_>) -> wgc::command::TexelCopyBufferInfo {
381 wgc::command::TexelCopyBufferInfo {
382 buffer: view.buffer.inner.as_core().id,
383 layout: view.layout,
384 }
385}
386
387fn map_texture_copy_view(
388 view: crate::TexelCopyTextureInfo<'_>,
389) -> wgc::command::TexelCopyTextureInfo {
390 wgc::command::TexelCopyTextureInfo {
391 texture: view.texture.inner.as_core().id,
392 mip_level: view.mip_level,
393 origin: view.origin,
394 aspect: view.aspect,
395 }
396}
397
398#[cfg_attr(not(webgl), expect(unused))]
399fn map_texture_tagged_copy_view(
400 view: crate::CopyExternalImageDestInfo<&api::Texture>,
401) -> wgc::command::CopyExternalImageDestInfo {
402 wgc::command::CopyExternalImageDestInfo {
403 texture: view.texture.inner.as_core().id,
404 mip_level: view.mip_level,
405 origin: view.origin,
406 aspect: view.aspect,
407 color_space: view.color_space,
408 premultiplied_alpha: view.premultiplied_alpha,
409 }
410}
411
412fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
413 match load {
414 LoadOp::Clear(clear_value) => LoadOp::Clear(Some(*clear_value)),
415 LoadOp::Load => LoadOp::Load,
416 }
417}
418
419fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
420 match ops {
421 Some(&Operations { load, store }) => wgc::command::PassChannel {
422 load_op: Some(map_load_op(&load)),
423 store_op: Some(store),
424 read_only: false,
425 },
426 None => wgc::command::PassChannel {
427 load_op: None,
428 store_op: None,
429 read_only: true,
430 },
431 }
432}
433
434#[derive(Debug)]
435pub struct CoreSurface {
436 pub(crate) context: ContextWgpuCore,
437 id: wgc::id::SurfaceId,
438 configured_device: Mutex<Option<wgc::id::DeviceId>>,
441 error_sink: Mutex<Option<ErrorSink>>,
444}
445
446#[derive(Debug)]
447pub struct CoreAdapter {
448 pub(crate) context: ContextWgpuCore,
449 pub(crate) id: wgc::id::AdapterId,
450}
451
452#[derive(Debug)]
453pub struct CoreDevice {
454 pub(crate) context: ContextWgpuCore,
455 id: wgc::id::DeviceId,
456 error_sink: ErrorSink,
457 features: Features,
458}
459
460#[derive(Debug)]
461pub struct CoreBuffer {
462 pub(crate) context: ContextWgpuCore,
463 id: wgc::id::BufferId,
464 error_sink: ErrorSink,
465}
466
467#[derive(Debug)]
468pub struct CoreShaderModule {
469 pub(crate) context: ContextWgpuCore,
470 id: wgc::id::ShaderModuleId,
471 compilation_info: CompilationInfo,
472}
473
474#[derive(Debug)]
475pub struct CoreBindGroupLayout {
476 pub(crate) context: ContextWgpuCore,
477 id: wgc::id::BindGroupLayoutId,
478}
479
480#[derive(Debug)]
481pub struct CoreBindGroup {
482 pub(crate) context: ContextWgpuCore,
483 id: wgc::id::BindGroupId,
484}
485
486#[derive(Debug)]
487pub struct CoreTexture {
488 pub(crate) context: ContextWgpuCore,
489 id: wgc::id::TextureId,
490 error_sink: ErrorSink,
491}
492
493#[derive(Debug)]
494pub struct CoreTextureView {
495 pub(crate) context: ContextWgpuCore,
496 id: wgc::id::TextureViewId,
497}
498
499#[derive(Debug)]
500pub struct CoreExternalTexture {
501 pub(crate) context: ContextWgpuCore,
502 id: wgc::id::ExternalTextureId,
503}
504
505#[derive(Debug)]
506pub struct CoreSampler {
507 pub(crate) context: ContextWgpuCore,
508 id: wgc::id::SamplerId,
509}
510
511#[derive(Debug)]
512pub struct CoreQuerySet {
513 pub(crate) context: ContextWgpuCore,
514 id: wgc::id::QuerySetId,
515}
516
517#[derive(Debug)]
518pub struct CorePipelineLayout {
519 pub(crate) context: ContextWgpuCore,
520 id: wgc::id::PipelineLayoutId,
521}
522
523#[derive(Debug)]
524pub struct CorePipelineCache {
525 pub(crate) context: ContextWgpuCore,
526 id: wgc::id::PipelineCacheId,
527}
528
529#[derive(Debug)]
530pub struct CoreCommandBuffer {
531 pub(crate) context: ContextWgpuCore,
532 id: wgc::id::CommandBufferId,
533}
534
535#[derive(Debug)]
536pub struct CoreRenderBundleEncoder {
537 pub(crate) context: ContextWgpuCore,
538 encoder: wgc::command::RenderBundleEncoder,
539 id: crate::cmp::Identifier,
540}
541
542#[derive(Debug)]
543pub struct CoreRenderBundle {
544 id: wgc::id::RenderBundleId,
545}
546
547#[derive(Debug)]
548pub struct CoreQueue {
549 pub(crate) context: ContextWgpuCore,
550 id: wgc::id::QueueId,
551 error_sink: ErrorSink,
552}
553
554#[derive(Debug)]
555pub struct CoreComputePipeline {
556 pub(crate) context: ContextWgpuCore,
557 id: wgc::id::ComputePipelineId,
558 error_sink: ErrorSink,
559}
560
561#[derive(Debug)]
562pub struct CoreRenderPipeline {
563 pub(crate) context: ContextWgpuCore,
564 id: wgc::id::RenderPipelineId,
565 error_sink: ErrorSink,
566}
567
568#[derive(Debug)]
569pub struct CoreComputePass {
570 pub(crate) context: ContextWgpuCore,
571 pass: wgc::command::ComputePass,
572 error_sink: ErrorSink,
573 id: crate::cmp::Identifier,
574}
575
576#[derive(Debug)]
577pub struct CoreRenderPass {
578 pub(crate) context: ContextWgpuCore,
579 pass: wgc::command::RenderPass,
580 error_sink: ErrorSink,
581 id: crate::cmp::Identifier,
582}
583
584#[derive(Debug)]
585pub struct CoreCommandEncoder {
586 pub(crate) context: ContextWgpuCore,
587 id: wgc::id::CommandEncoderId,
588 error_sink: ErrorSink,
589}
590
591#[derive(Debug)]
592pub struct CoreBlas {
593 pub(crate) context: ContextWgpuCore,
594 id: wgc::id::BlasId,
595 error_sink: ErrorSink,
596}
597
598#[derive(Debug)]
599pub struct CoreTlas {
600 pub(crate) context: ContextWgpuCore,
601 id: wgc::id::TlasId,
602 }
604
605#[derive(Debug)]
606pub struct CoreSurfaceOutputDetail {
607 context: ContextWgpuCore,
608 surface_id: wgc::id::SurfaceId,
609}
610
611type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
612
613struct ErrorScope {
614 error: Option<crate::Error>,
615 filter: crate::ErrorFilter,
616}
617
618struct ErrorSinkRaw {
619 scopes: Vec<ErrorScope>,
620 uncaptured_handler: Option<Box<dyn crate::UncapturedErrorHandler>>,
621}
622
623impl ErrorSinkRaw {
624 fn new() -> ErrorSinkRaw {
625 ErrorSinkRaw {
626 scopes: Vec::new(),
627 uncaptured_handler: None,
628 }
629 }
630
631 #[track_caller]
632 fn handle_error(&mut self, err: crate::Error) {
633 let filter = match err {
634 crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
635 crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
636 crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
637 };
638 match self
639 .scopes
640 .iter_mut()
641 .rev()
642 .find(|scope| scope.filter == filter)
643 {
644 Some(scope) => {
645 if scope.error.is_none() {
646 scope.error = Some(err);
647 }
648 }
649 None => {
650 if let Some(custom_handler) = self.uncaptured_handler.as_ref() {
651 (custom_handler)(err);
652 } else {
653 default_error_handler(err);
655 }
656 }
657 }
658 }
659}
660
661impl fmt::Debug for ErrorSinkRaw {
662 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
663 write!(f, "ErrorSink")
664 }
665}
666
667#[track_caller]
668fn default_error_handler(err: crate::Error) {
669 log::error!("Handling wgpu errors as fatal by default");
670 panic!("wgpu error: {err}\n");
671}
672
673impl From<CreateShaderModuleError> for CompilationInfo {
674 fn from(value: CreateShaderModuleError) -> Self {
675 match value {
676 #[cfg(feature = "wgsl")]
677 CreateShaderModuleError::Parsing(v) => v.into(),
678 #[cfg(feature = "glsl")]
679 CreateShaderModuleError::ParsingGlsl(v) => v.into(),
680 #[cfg(feature = "spirv")]
681 CreateShaderModuleError::ParsingSpirV(v) => v.into(),
682 CreateShaderModuleError::Validation(v) => v.into(),
683 CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
686 CompilationInfo {
687 messages: Vec::new(),
688 }
689 }
690 _ => CompilationInfo {
692 messages: vec![CompilationMessage {
693 message: value.to_string(),
694 message_type: CompilationMessageType::Error,
695 location: None,
696 }],
697 },
698 }
699 }
700}
701
702#[derive(Debug)]
703pub struct CoreQueueWriteBuffer {
704 buffer_id: wgc::id::StagingBufferId,
705 mapping: CoreBufferMappedRange,
706}
707
708#[derive(Debug)]
709pub struct CoreBufferMappedRange {
710 ptr: NonNull<u8>,
711 size: usize,
712}
713
714#[cfg(send_sync)]
715unsafe impl Send for CoreBufferMappedRange {}
716#[cfg(send_sync)]
717unsafe impl Sync for CoreBufferMappedRange {}
718
719impl Drop for CoreBufferMappedRange {
720 fn drop(&mut self) {
721 }
724}
725
726crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
727crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
728crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
729crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
730crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
731crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
732crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
733crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
734crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
735crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
736crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
737crate::cmp::impl_eq_ord_hash_proxy!(CoreExternalTexture => .id);
738crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
739crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
740crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
741crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
742crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
743crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
744crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
745crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
746crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
747crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
748crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
749crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
750crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
751crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
752crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
753crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
754crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
755
756impl dispatch::InstanceInterface for ContextWgpuCore {
757 fn new(desc: &wgt::InstanceDescriptor) -> Self
758 where
759 Self: Sized,
760 {
761 Self(Arc::new(wgc::global::Global::new("wgpu", desc)))
762 }
763
764 unsafe fn create_surface(
765 &self,
766 target: crate::api::SurfaceTargetUnsafe,
767 ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
768 let id = match target {
769 SurfaceTargetUnsafe::RawHandle {
770 raw_display_handle,
771 raw_window_handle,
772 } => unsafe {
773 self.0
774 .instance_create_surface(raw_display_handle, raw_window_handle, None)
775 },
776
777 #[cfg(all(unix, not(target_vendor = "apple"), not(target_family = "wasm")))]
778 SurfaceTargetUnsafe::Drm {
779 fd,
780 plane,
781 connector_id,
782 width,
783 height,
784 refresh_rate,
785 } => unsafe {
786 self.0.instance_create_surface_from_drm(
787 fd,
788 plane,
789 connector_id,
790 width,
791 height,
792 refresh_rate,
793 None,
794 )
795 },
796
797 #[cfg(metal)]
798 SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
799 self.0.instance_create_surface_metal(layer, None)
800 },
801
802 #[cfg(dx12)]
803 SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
804 self.0.instance_create_surface_from_visual(visual, None)
805 },
806
807 #[cfg(dx12)]
808 SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
809 self.0
810 .instance_create_surface_from_surface_handle(surface_handle, None)
811 },
812
813 #[cfg(dx12)]
814 SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
815 self.0
816 .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
817 },
818 }?;
819
820 Ok(CoreSurface {
821 context: self.clone(),
822 id,
823 configured_device: Mutex::default(),
824 error_sink: Mutex::default(),
825 }
826 .into())
827 }
828
829 fn request_adapter(
830 &self,
831 options: &crate::api::RequestAdapterOptions<'_, '_>,
832 ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
833 let id = self.0.request_adapter(
834 &wgc::instance::RequestAdapterOptions {
835 power_preference: options.power_preference,
836 force_fallback_adapter: options.force_fallback_adapter,
837 compatible_surface: options
838 .compatible_surface
839 .map(|surface| surface.inner.as_core().id),
840 },
841 wgt::Backends::all(),
842 None,
843 );
844 let adapter = id.map(|id| {
845 let core = CoreAdapter {
846 context: self.clone(),
847 id,
848 };
849 let generic: dispatch::DispatchAdapter = core.into();
850 generic
851 });
852 Box::pin(ready(adapter))
853 }
854
855 fn poll_all_devices(&self, force_wait: bool) -> bool {
856 match self.0.poll_all_devices(force_wait) {
857 Ok(all_queue_empty) => all_queue_empty,
858 Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
859 }
860 }
861
862 #[cfg(feature = "wgsl")]
863 fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
864 use wgc::naga::front::wgsl::ImplementedLanguageExtension;
865 ImplementedLanguageExtension::all().iter().copied().fold(
866 crate::WgslLanguageFeatures::empty(),
867 |acc, wle| {
868 acc | match wle {
869 ImplementedLanguageExtension::ReadOnlyAndReadWriteStorageTextures => {
870 crate::WgslLanguageFeatures::ReadOnlyAndReadWriteStorageTextures
871 }
872 ImplementedLanguageExtension::Packed4x8IntegerDotProduct => {
873 crate::WgslLanguageFeatures::Packed4x8IntegerDotProduct
874 }
875 ImplementedLanguageExtension::PointerCompositeAccess => {
876 crate::WgslLanguageFeatures::PointerCompositeAccess
877 }
878 }
879 },
880 )
881 }
882}
883
884impl dispatch::AdapterInterface for CoreAdapter {
885 fn request_device(
886 &self,
887 desc: &crate::DeviceDescriptor<'_>,
888 ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
889 if !matches!(desc.trace, wgt::Trace::Off) {
890 log::error!(
891 "
892 Feature 'trace' has been removed temporarily; \
893 see https://github.com/gfx-rs/wgpu/issues/5974. \
894 The `trace` parameter will have no effect."
895 );
896 }
897
898 let res = self.context.0.adapter_request_device(
899 self.id,
900 &desc.map_label(|l| l.map(Borrowed)),
901 None,
902 None,
903 );
904 let (device_id, queue_id) = match res {
905 Ok(ids) => ids,
906 Err(err) => {
907 return Box::pin(ready(Err(err.into())));
908 }
909 };
910 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
911 let device = CoreDevice {
912 context: self.context.clone(),
913 id: device_id,
914 error_sink: error_sink.clone(),
915 features: desc.required_features,
916 };
917 let queue = CoreQueue {
918 context: self.context.clone(),
919 id: queue_id,
920 error_sink,
921 };
922 Box::pin(ready(Ok((device.into(), queue.into()))))
923 }
924
925 fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
926 let surface = surface.as_core();
927
928 self.context
929 .0
930 .adapter_is_surface_supported(self.id, surface.id)
931 }
932
933 fn features(&self) -> crate::Features {
934 self.context.0.adapter_features(self.id)
935 }
936
937 fn limits(&self) -> crate::Limits {
938 self.context.0.adapter_limits(self.id)
939 }
940
941 fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
942 self.context.0.adapter_downlevel_capabilities(self.id)
943 }
944
945 fn get_info(&self) -> crate::AdapterInfo {
946 self.context.0.adapter_get_info(self.id)
947 }
948
949 fn get_texture_format_features(
950 &self,
951 format: crate::TextureFormat,
952 ) -> crate::TextureFormatFeatures {
953 self.context
954 .0
955 .adapter_get_texture_format_features(self.id, format)
956 }
957
958 fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
959 self.context.0.adapter_get_presentation_timestamp(self.id)
960 }
961}
962
963impl Drop for CoreAdapter {
964 fn drop(&mut self) {
965 self.context.0.adapter_drop(self.id)
966 }
967}
968
969impl dispatch::DeviceInterface for CoreDevice {
970 fn features(&self) -> crate::Features {
971 self.context.0.device_features(self.id)
972 }
973
974 fn limits(&self) -> crate::Limits {
975 self.context.0.device_limits(self.id)
976 }
977
978 #[cfg_attr(
980 not(any(
981 feature = "spirv",
982 feature = "glsl",
983 feature = "wgsl",
984 feature = "naga-ir"
985 )),
986 expect(unused)
987 )]
988 fn create_shader_module(
989 &self,
990 desc: crate::ShaderModuleDescriptor<'_>,
991 shader_bound_checks: wgt::ShaderRuntimeChecks,
992 ) -> dispatch::DispatchShaderModule {
993 let descriptor = wgc::pipeline::ShaderModuleDescriptor {
994 label: desc.label.map(Borrowed),
995 runtime_checks: shader_bound_checks,
996 };
997 let source = match desc.source {
998 #[cfg(feature = "spirv")]
999 ShaderSource::SpirV(ref spv) => {
1000 let options = naga::front::spv::Options {
1002 adjust_coordinate_space: false, strict_capabilities: true,
1004 block_ctx_dump_prefix: None,
1005 };
1006 wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
1007 }
1008 #[cfg(feature = "glsl")]
1009 ShaderSource::Glsl {
1010 ref shader,
1011 stage,
1012 defines,
1013 } => {
1014 let options = naga::front::glsl::Options {
1015 stage,
1016 defines: defines
1017 .iter()
1018 .map(|&(key, value)| (String::from(key), String::from(value)))
1019 .collect(),
1020 };
1021 wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
1022 }
1023 #[cfg(feature = "wgsl")]
1024 ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
1025 #[cfg(feature = "naga-ir")]
1026 ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1027 ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1028 };
1029 let (id, error) =
1030 self.context
1031 .0
1032 .device_create_shader_module(self.id, &descriptor, source, None);
1033 let compilation_info = match error {
1034 Some(cause) => {
1035 self.context.handle_error(
1036 &self.error_sink,
1037 cause.clone(),
1038 desc.label,
1039 "Device::create_shader_module",
1040 );
1041 CompilationInfo::from(cause)
1042 }
1043 None => CompilationInfo { messages: vec![] },
1044 };
1045
1046 CoreShaderModule {
1047 context: self.context.clone(),
1048 id,
1049 compilation_info,
1050 }
1051 .into()
1052 }
1053
1054 unsafe fn create_shader_module_passthrough(
1055 &self,
1056 desc: &crate::ShaderModuleDescriptorPassthrough<'_>,
1057 ) -> dispatch::DispatchShaderModule {
1058 let desc = desc.map_label(|l| l.map(Cow::from));
1059 let (id, error) = unsafe {
1060 self.context
1061 .0
1062 .device_create_shader_module_passthrough(self.id, &desc, None)
1063 };
1064
1065 let compilation_info = match error {
1066 Some(cause) => {
1067 self.context.handle_error(
1068 &self.error_sink,
1069 cause.clone(),
1070 desc.label().as_deref(),
1071 "Device::create_shader_module_passthrough",
1072 );
1073 CompilationInfo::from(cause)
1074 }
1075 None => CompilationInfo { messages: vec![] },
1076 };
1077
1078 CoreShaderModule {
1079 context: self.context.clone(),
1080 id,
1081 compilation_info,
1082 }
1083 .into()
1084 }
1085
1086 fn create_bind_group_layout(
1087 &self,
1088 desc: &crate::BindGroupLayoutDescriptor<'_>,
1089 ) -> dispatch::DispatchBindGroupLayout {
1090 let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1091 label: desc.label.map(Borrowed),
1092 entries: Borrowed(desc.entries),
1093 };
1094 let (id, error) =
1095 self.context
1096 .0
1097 .device_create_bind_group_layout(self.id, &descriptor, None);
1098 if let Some(cause) = error {
1099 self.context.handle_error(
1100 &self.error_sink,
1101 cause,
1102 desc.label,
1103 "Device::create_bind_group_layout",
1104 );
1105 }
1106 CoreBindGroupLayout {
1107 context: self.context.clone(),
1108 id,
1109 }
1110 .into()
1111 }
1112
1113 fn create_bind_group(
1114 &self,
1115 desc: &crate::BindGroupDescriptor<'_>,
1116 ) -> dispatch::DispatchBindGroup {
1117 use wgc::binding_model as bm;
1118
1119 let mut arrayed_texture_views = Vec::new();
1120 let mut arrayed_samplers = Vec::new();
1121 if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1122 for entry in desc.entries.iter() {
1124 if let BindingResource::TextureViewArray(array) = entry.resource {
1125 arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1126 }
1127 if let BindingResource::SamplerArray(array) = entry.resource {
1128 arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1129 }
1130 }
1131 }
1132 let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1133 let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1134
1135 let mut arrayed_buffer_bindings = Vec::new();
1136 if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1137 for entry in desc.entries.iter() {
1139 if let BindingResource::BufferArray(array) = entry.resource {
1140 arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1141 buffer: binding.buffer.inner.as_core().id,
1142 offset: binding.offset,
1143 size: binding.size,
1144 }));
1145 }
1146 }
1147 }
1148 let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1149
1150 let entries = desc
1151 .entries
1152 .iter()
1153 .map(|entry| bm::BindGroupEntry {
1154 binding: entry.binding,
1155 resource: match entry.resource {
1156 BindingResource::Buffer(BufferBinding {
1157 buffer,
1158 offset,
1159 size,
1160 }) => bm::BindingResource::Buffer(bm::BufferBinding {
1161 buffer: buffer.inner.as_core().id,
1162 offset,
1163 size,
1164 }),
1165 BindingResource::BufferArray(array) => {
1166 let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1167 remaining_arrayed_buffer_bindings =
1168 &remaining_arrayed_buffer_bindings[array.len()..];
1169 bm::BindingResource::BufferArray(Borrowed(slice))
1170 }
1171 BindingResource::Sampler(sampler) => {
1172 bm::BindingResource::Sampler(sampler.inner.as_core().id)
1173 }
1174 BindingResource::SamplerArray(array) => {
1175 let slice = &remaining_arrayed_samplers[..array.len()];
1176 remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1177 bm::BindingResource::SamplerArray(Borrowed(slice))
1178 }
1179 BindingResource::TextureView(texture_view) => {
1180 bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1181 }
1182 BindingResource::TextureViewArray(array) => {
1183 let slice = &remaining_arrayed_texture_views[..array.len()];
1184 remaining_arrayed_texture_views =
1185 &remaining_arrayed_texture_views[array.len()..];
1186 bm::BindingResource::TextureViewArray(Borrowed(slice))
1187 }
1188 BindingResource::AccelerationStructure(acceleration_structure) => {
1189 bm::BindingResource::AccelerationStructure(
1190 acceleration_structure.inner.as_core().id,
1191 )
1192 }
1193 BindingResource::ExternalTexture(external_texture) => {
1194 bm::BindingResource::ExternalTexture(external_texture.inner.as_core().id)
1195 }
1196 },
1197 })
1198 .collect::<Vec<_>>();
1199 let descriptor = bm::BindGroupDescriptor {
1200 label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1201 layout: desc.layout.inner.as_core().id,
1202 entries: Borrowed(&entries),
1203 };
1204
1205 let (id, error) = self
1206 .context
1207 .0
1208 .device_create_bind_group(self.id, &descriptor, None);
1209 if let Some(cause) = error {
1210 self.context.handle_error(
1211 &self.error_sink,
1212 cause,
1213 desc.label,
1214 "Device::create_bind_group",
1215 );
1216 }
1217 CoreBindGroup {
1218 context: self.context.clone(),
1219 id,
1220 }
1221 .into()
1222 }
1223
1224 fn create_pipeline_layout(
1225 &self,
1226 desc: &crate::PipelineLayoutDescriptor<'_>,
1227 ) -> dispatch::DispatchPipelineLayout {
1228 assert!(
1231 desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1232 "Bind group layout count {} exceeds device bind group limit {}",
1233 desc.bind_group_layouts.len(),
1234 wgc::MAX_BIND_GROUPS
1235 );
1236
1237 let temp_layouts = desc
1238 .bind_group_layouts
1239 .iter()
1240 .map(|bgl| bgl.inner.as_core().id)
1241 .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1242 let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1243 label: desc.label.map(Borrowed),
1244 bind_group_layouts: Borrowed(&temp_layouts),
1245 push_constant_ranges: Borrowed(desc.push_constant_ranges),
1246 };
1247
1248 let (id, error) = self
1249 .context
1250 .0
1251 .device_create_pipeline_layout(self.id, &descriptor, None);
1252 if let Some(cause) = error {
1253 self.context.handle_error(
1254 &self.error_sink,
1255 cause,
1256 desc.label,
1257 "Device::create_pipeline_layout",
1258 );
1259 }
1260 CorePipelineLayout {
1261 context: self.context.clone(),
1262 id,
1263 }
1264 .into()
1265 }
1266
1267 fn create_render_pipeline(
1268 &self,
1269 desc: &crate::RenderPipelineDescriptor<'_>,
1270 ) -> dispatch::DispatchRenderPipeline {
1271 use wgc::pipeline as pipe;
1272
1273 let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1274 .vertex
1275 .buffers
1276 .iter()
1277 .map(|vbuf| pipe::VertexBufferLayout {
1278 array_stride: vbuf.array_stride,
1279 step_mode: vbuf.step_mode,
1280 attributes: Borrowed(vbuf.attributes),
1281 })
1282 .collect();
1283
1284 let vert_constants = desc
1285 .vertex
1286 .compilation_options
1287 .constants
1288 .iter()
1289 .map(|&(key, value)| (String::from(key), value))
1290 .collect();
1291
1292 let descriptor = pipe::RenderPipelineDescriptor {
1293 label: desc.label.map(Borrowed),
1294 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1295 vertex: pipe::VertexState {
1296 stage: pipe::ProgrammableStageDescriptor {
1297 module: desc.vertex.module.inner.as_core().id,
1298 entry_point: desc.vertex.entry_point.map(Borrowed),
1299 constants: vert_constants,
1300 zero_initialize_workgroup_memory: desc
1301 .vertex
1302 .compilation_options
1303 .zero_initialize_workgroup_memory,
1304 },
1305 buffers: Borrowed(&vertex_buffers),
1306 },
1307 primitive: desc.primitive,
1308 depth_stencil: desc.depth_stencil.clone(),
1309 multisample: desc.multisample,
1310 fragment: desc.fragment.as_ref().map(|frag| {
1311 let frag_constants = frag
1312 .compilation_options
1313 .constants
1314 .iter()
1315 .map(|&(key, value)| (String::from(key), value))
1316 .collect();
1317 pipe::FragmentState {
1318 stage: pipe::ProgrammableStageDescriptor {
1319 module: frag.module.inner.as_core().id,
1320 entry_point: frag.entry_point.map(Borrowed),
1321 constants: frag_constants,
1322 zero_initialize_workgroup_memory: frag
1323 .compilation_options
1324 .zero_initialize_workgroup_memory,
1325 },
1326 targets: Borrowed(frag.targets),
1327 }
1328 }),
1329 multiview: desc.multiview,
1330 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1331 };
1332
1333 let (id, error) = self
1334 .context
1335 .0
1336 .device_create_render_pipeline(self.id, &descriptor, None);
1337 if let Some(cause) = error {
1338 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1339 log::error!("Shader translation error for stage {stage:?}: {error}");
1340 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1341 }
1342 self.context.handle_error(
1343 &self.error_sink,
1344 cause,
1345 desc.label,
1346 "Device::create_render_pipeline",
1347 );
1348 }
1349 CoreRenderPipeline {
1350 context: self.context.clone(),
1351 id,
1352 error_sink: Arc::clone(&self.error_sink),
1353 }
1354 .into()
1355 }
1356
1357 fn create_mesh_pipeline(
1358 &self,
1359 desc: &crate::MeshPipelineDescriptor<'_>,
1360 ) -> dispatch::DispatchRenderPipeline {
1361 use wgc::pipeline as pipe;
1362
1363 let mesh_constants = desc
1364 .mesh
1365 .compilation_options
1366 .constants
1367 .iter()
1368 .map(|&(key, value)| (String::from(key), value))
1369 .collect();
1370 let descriptor = pipe::MeshPipelineDescriptor {
1371 label: desc.label.map(Borrowed),
1372 task: desc.task.as_ref().map(|task| {
1373 let task_constants = task
1374 .compilation_options
1375 .constants
1376 .iter()
1377 .map(|&(key, value)| (String::from(key), value))
1378 .collect();
1379 pipe::TaskState {
1380 stage: pipe::ProgrammableStageDescriptor {
1381 module: task.module.inner.as_core().id,
1382 entry_point: task.entry_point.map(Borrowed),
1383 constants: task_constants,
1384 zero_initialize_workgroup_memory: desc
1385 .mesh
1386 .compilation_options
1387 .zero_initialize_workgroup_memory,
1388 },
1389 }
1390 }),
1391 mesh: pipe::MeshState {
1392 stage: pipe::ProgrammableStageDescriptor {
1393 module: desc.mesh.module.inner.as_core().id,
1394 entry_point: desc.mesh.entry_point.map(Borrowed),
1395 constants: mesh_constants,
1396 zero_initialize_workgroup_memory: desc
1397 .mesh
1398 .compilation_options
1399 .zero_initialize_workgroup_memory,
1400 },
1401 },
1402 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1403 primitive: desc.primitive,
1404 depth_stencil: desc.depth_stencil.clone(),
1405 multisample: desc.multisample,
1406 fragment: desc.fragment.as_ref().map(|frag| {
1407 let frag_constants = frag
1408 .compilation_options
1409 .constants
1410 .iter()
1411 .map(|&(key, value)| (String::from(key), value))
1412 .collect();
1413 pipe::FragmentState {
1414 stage: pipe::ProgrammableStageDescriptor {
1415 module: frag.module.inner.as_core().id,
1416 entry_point: frag.entry_point.map(Borrowed),
1417 constants: frag_constants,
1418 zero_initialize_workgroup_memory: frag
1419 .compilation_options
1420 .zero_initialize_workgroup_memory,
1421 },
1422 targets: Borrowed(frag.targets),
1423 }
1424 }),
1425 multiview: desc.multiview,
1426 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1427 };
1428
1429 let (id, error) = self
1430 .context
1431 .0
1432 .device_create_mesh_pipeline(self.id, &descriptor, None);
1433 if let Some(cause) = error {
1434 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1435 log::error!("Shader translation error for stage {stage:?}: {error}");
1436 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1437 }
1438 self.context.handle_error(
1439 &self.error_sink,
1440 cause,
1441 desc.label,
1442 "Device::create_render_pipeline",
1443 );
1444 }
1445 CoreRenderPipeline {
1446 context: self.context.clone(),
1447 id,
1448 error_sink: Arc::clone(&self.error_sink),
1449 }
1450 .into()
1451 }
1452
1453 fn create_compute_pipeline(
1454 &self,
1455 desc: &crate::ComputePipelineDescriptor<'_>,
1456 ) -> dispatch::DispatchComputePipeline {
1457 use wgc::pipeline as pipe;
1458
1459 let constants = desc
1460 .compilation_options
1461 .constants
1462 .iter()
1463 .map(|&(key, value)| (String::from(key), value))
1464 .collect();
1465
1466 let descriptor = pipe::ComputePipelineDescriptor {
1467 label: desc.label.map(Borrowed),
1468 layout: desc.layout.map(|pll| pll.inner.as_core().id),
1469 stage: pipe::ProgrammableStageDescriptor {
1470 module: desc.module.inner.as_core().id,
1471 entry_point: desc.entry_point.map(Borrowed),
1472 constants,
1473 zero_initialize_workgroup_memory: desc
1474 .compilation_options
1475 .zero_initialize_workgroup_memory,
1476 },
1477 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1478 };
1479
1480 let (id, error) = self
1481 .context
1482 .0
1483 .device_create_compute_pipeline(self.id, &descriptor, None);
1484 if let Some(cause) = error {
1485 if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1486 log::error!(
1487 "Shader translation error for stage {:?}: {}",
1488 wgt::ShaderStages::COMPUTE,
1489 error
1490 );
1491 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1492 }
1493 self.context.handle_error(
1494 &self.error_sink,
1495 cause,
1496 desc.label,
1497 "Device::create_compute_pipeline",
1498 );
1499 }
1500 CoreComputePipeline {
1501 context: self.context.clone(),
1502 id,
1503 error_sink: Arc::clone(&self.error_sink),
1504 }
1505 .into()
1506 }
1507
1508 unsafe fn create_pipeline_cache(
1509 &self,
1510 desc: &crate::PipelineCacheDescriptor<'_>,
1511 ) -> dispatch::DispatchPipelineCache {
1512 use wgc::pipeline as pipe;
1513
1514 let descriptor = pipe::PipelineCacheDescriptor {
1515 label: desc.label.map(Borrowed),
1516 data: desc.data.map(Borrowed),
1517 fallback: desc.fallback,
1518 };
1519 let (id, error) = unsafe {
1520 self.context
1521 .0
1522 .device_create_pipeline_cache(self.id, &descriptor, None)
1523 };
1524 if let Some(cause) = error {
1525 self.context.handle_error(
1526 &self.error_sink,
1527 cause,
1528 desc.label,
1529 "Device::device_create_pipeline_cache_init",
1530 );
1531 }
1532 CorePipelineCache {
1533 context: self.context.clone(),
1534 id,
1535 }
1536 .into()
1537 }
1538
1539 fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1540 let (id, error) = self.context.0.device_create_buffer(
1541 self.id,
1542 &desc.map_label(|l| l.map(Borrowed)),
1543 None,
1544 );
1545 if let Some(cause) = error {
1546 self.context
1547 .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1548 }
1549
1550 CoreBuffer {
1551 context: self.context.clone(),
1552 id,
1553 error_sink: Arc::clone(&self.error_sink),
1554 }
1555 .into()
1556 }
1557
1558 fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1559 let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1560 let (id, error) = self
1561 .context
1562 .0
1563 .device_create_texture(self.id, &wgt_desc, None);
1564 if let Some(cause) = error {
1565 self.context.handle_error(
1566 &self.error_sink,
1567 cause,
1568 desc.label,
1569 "Device::create_texture",
1570 );
1571 }
1572
1573 CoreTexture {
1574 context: self.context.clone(),
1575 id,
1576 error_sink: Arc::clone(&self.error_sink),
1577 }
1578 .into()
1579 }
1580
1581 fn create_external_texture(
1582 &self,
1583 desc: &crate::ExternalTextureDescriptor<'_>,
1584 planes: &[&crate::TextureView],
1585 ) -> dispatch::DispatchExternalTexture {
1586 let wgt_desc = desc.map_label(|l| l.map(Borrowed));
1587 let planes = planes
1588 .iter()
1589 .map(|plane| plane.inner.as_core().id)
1590 .collect::<Vec<_>>();
1591 let (id, error) = self
1592 .context
1593 .0
1594 .device_create_external_texture(self.id, &wgt_desc, &planes, None);
1595 if let Some(cause) = error {
1596 self.context.handle_error(
1597 &self.error_sink,
1598 cause,
1599 desc.label,
1600 "Device::create_external_texture",
1601 );
1602 }
1603
1604 CoreExternalTexture {
1605 context: self.context.clone(),
1606 id,
1607 }
1608 .into()
1609 }
1610
1611 fn create_blas(
1612 &self,
1613 desc: &crate::CreateBlasDescriptor<'_>,
1614 sizes: crate::BlasGeometrySizeDescriptors,
1615 ) -> (Option<u64>, dispatch::DispatchBlas) {
1616 let global = &self.context.0;
1617 let (id, handle, error) =
1618 global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1619 if let Some(cause) = error {
1620 self.context
1621 .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1622 }
1623 (
1624 handle,
1625 CoreBlas {
1626 context: self.context.clone(),
1627 id,
1628 error_sink: Arc::clone(&self.error_sink),
1629 }
1630 .into(),
1631 )
1632 }
1633
1634 fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1635 let global = &self.context.0;
1636 let (id, error) =
1637 global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1638 if let Some(cause) = error {
1639 self.context
1640 .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1641 }
1642 CoreTlas {
1643 context: self.context.clone(),
1644 id,
1645 }
1647 .into()
1648 }
1649
1650 fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1651 let descriptor = wgc::resource::SamplerDescriptor {
1652 label: desc.label.map(Borrowed),
1653 address_modes: [
1654 desc.address_mode_u,
1655 desc.address_mode_v,
1656 desc.address_mode_w,
1657 ],
1658 mag_filter: desc.mag_filter,
1659 min_filter: desc.min_filter,
1660 mipmap_filter: desc.mipmap_filter,
1661 lod_min_clamp: desc.lod_min_clamp,
1662 lod_max_clamp: desc.lod_max_clamp,
1663 compare: desc.compare,
1664 anisotropy_clamp: desc.anisotropy_clamp,
1665 border_color: desc.border_color,
1666 };
1667
1668 let (id, error) = self
1669 .context
1670 .0
1671 .device_create_sampler(self.id, &descriptor, None);
1672 if let Some(cause) = error {
1673 self.context.handle_error(
1674 &self.error_sink,
1675 cause,
1676 desc.label,
1677 "Device::create_sampler",
1678 );
1679 }
1680 CoreSampler {
1681 context: self.context.clone(),
1682 id,
1683 }
1684 .into()
1685 }
1686
1687 fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1688 let (id, error) = self.context.0.device_create_query_set(
1689 self.id,
1690 &desc.map_label(|l| l.map(Borrowed)),
1691 None,
1692 );
1693 if let Some(cause) = error {
1694 self.context
1695 .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1696 }
1697 CoreQuerySet {
1698 context: self.context.clone(),
1699 id,
1700 }
1701 .into()
1702 }
1703
1704 fn create_command_encoder(
1705 &self,
1706 desc: &crate::CommandEncoderDescriptor<'_>,
1707 ) -> dispatch::DispatchCommandEncoder {
1708 let (id, error) = self.context.0.device_create_command_encoder(
1709 self.id,
1710 &desc.map_label(|l| l.map(Borrowed)),
1711 None,
1712 );
1713 if let Some(cause) = error {
1714 self.context.handle_error(
1715 &self.error_sink,
1716 cause,
1717 desc.label,
1718 "Device::create_command_encoder",
1719 );
1720 }
1721
1722 CoreCommandEncoder {
1723 context: self.context.clone(),
1724 id,
1725 error_sink: Arc::clone(&self.error_sink),
1726 }
1727 .into()
1728 }
1729
1730 fn create_render_bundle_encoder(
1731 &self,
1732 desc: &crate::RenderBundleEncoderDescriptor<'_>,
1733 ) -> dispatch::DispatchRenderBundleEncoder {
1734 let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1735 label: desc.label.map(Borrowed),
1736 color_formats: Borrowed(desc.color_formats),
1737 depth_stencil: desc.depth_stencil,
1738 sample_count: desc.sample_count,
1739 multiview: desc.multiview,
1740 };
1741 let encoder = match wgc::command::RenderBundleEncoder::new(&descriptor, self.id, None) {
1742 Ok(encoder) => encoder,
1743 Err(e) => panic!("Error in Device::create_render_bundle_encoder: {e}"),
1744 };
1745
1746 CoreRenderBundleEncoder {
1747 context: self.context.clone(),
1748 encoder,
1749 id: crate::cmp::Identifier::create(),
1750 }
1751 .into()
1752 }
1753
1754 fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1755 self.context
1756 .0
1757 .device_set_device_lost_closure(self.id, device_lost_callback);
1758 }
1759
1760 fn on_uncaptured_error(&self, handler: Box<dyn crate::UncapturedErrorHandler>) {
1761 let mut error_sink = self.error_sink.lock();
1762 error_sink.uncaptured_handler = Some(handler);
1763 }
1764
1765 fn push_error_scope(&self, filter: crate::ErrorFilter) {
1766 let mut error_sink = self.error_sink.lock();
1767 error_sink.scopes.push(ErrorScope {
1768 error: None,
1769 filter,
1770 });
1771 }
1772
1773 fn pop_error_scope(&self) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1774 let mut error_sink = self.error_sink.lock();
1775 let scope = error_sink.scopes.pop().unwrap();
1776 Box::pin(ready(scope.error))
1777 }
1778
1779 unsafe fn start_graphics_debugger_capture(&self) {
1780 unsafe {
1781 self.context
1782 .0
1783 .device_start_graphics_debugger_capture(self.id)
1784 };
1785 }
1786
1787 unsafe fn stop_graphics_debugger_capture(&self) {
1788 unsafe {
1789 self.context
1790 .0
1791 .device_stop_graphics_debugger_capture(self.id)
1792 };
1793 }
1794
1795 fn poll(&self, poll_type: wgt::PollType<u64>) -> Result<crate::PollStatus, crate::PollError> {
1796 match self.context.0.device_poll(self.id, poll_type) {
1797 Ok(status) => Ok(status),
1798 Err(err) => {
1799 if let Some(poll_error) = err.to_poll_error() {
1800 return Err(poll_error);
1801 }
1802
1803 self.context.handle_error_fatal(err, "Device::poll")
1804 }
1805 }
1806 }
1807
1808 fn get_internal_counters(&self) -> crate::InternalCounters {
1809 self.context.0.device_get_internal_counters(self.id)
1810 }
1811
1812 fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1813 self.context.0.device_generate_allocator_report(self.id)
1814 }
1815
1816 fn destroy(&self) {
1817 self.context.0.device_destroy(self.id);
1818 }
1819}
1820
1821impl Drop for CoreDevice {
1822 fn drop(&mut self) {
1823 self.context.0.device_drop(self.id)
1824 }
1825}
1826
1827impl dispatch::QueueInterface for CoreQueue {
1828 fn write_buffer(
1829 &self,
1830 buffer: &dispatch::DispatchBuffer,
1831 offset: crate::BufferAddress,
1832 data: &[u8],
1833 ) {
1834 let buffer = buffer.as_core();
1835
1836 match self
1837 .context
1838 .0
1839 .queue_write_buffer(self.id, buffer.id, offset, data)
1840 {
1841 Ok(()) => (),
1842 Err(err) => {
1843 self.context
1844 .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1845 }
1846 }
1847 }
1848
1849 fn create_staging_buffer(
1850 &self,
1851 size: crate::BufferSize,
1852 ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1853 match self
1854 .context
1855 .0
1856 .queue_create_staging_buffer(self.id, size, None)
1857 {
1858 Ok((buffer_id, ptr)) => Some(
1859 CoreQueueWriteBuffer {
1860 buffer_id,
1861 mapping: CoreBufferMappedRange {
1862 ptr,
1863 size: size.get() as usize,
1864 },
1865 }
1866 .into(),
1867 ),
1868 Err(err) => {
1869 self.context.handle_error_nolabel(
1870 &self.error_sink,
1871 err,
1872 "Queue::write_buffer_with",
1873 );
1874 None
1875 }
1876 }
1877 }
1878
1879 fn validate_write_buffer(
1880 &self,
1881 buffer: &dispatch::DispatchBuffer,
1882 offset: wgt::BufferAddress,
1883 size: wgt::BufferSize,
1884 ) -> Option<()> {
1885 let buffer = buffer.as_core();
1886
1887 match self
1888 .context
1889 .0
1890 .queue_validate_write_buffer(self.id, buffer.id, offset, size)
1891 {
1892 Ok(()) => Some(()),
1893 Err(err) => {
1894 self.context.handle_error_nolabel(
1895 &self.error_sink,
1896 err,
1897 "Queue::write_buffer_with",
1898 );
1899 None
1900 }
1901 }
1902 }
1903
1904 fn write_staging_buffer(
1905 &self,
1906 buffer: &dispatch::DispatchBuffer,
1907 offset: crate::BufferAddress,
1908 staging_buffer: &dispatch::DispatchQueueWriteBuffer,
1909 ) {
1910 let buffer = buffer.as_core();
1911 let staging_buffer = staging_buffer.as_core();
1912
1913 match self.context.0.queue_write_staging_buffer(
1914 self.id,
1915 buffer.id,
1916 offset,
1917 staging_buffer.buffer_id,
1918 ) {
1919 Ok(()) => (),
1920 Err(err) => {
1921 self.context.handle_error_nolabel(
1922 &self.error_sink,
1923 err,
1924 "Queue::write_buffer_with",
1925 );
1926 }
1927 }
1928 }
1929
1930 fn write_texture(
1931 &self,
1932 texture: crate::TexelCopyTextureInfo<'_>,
1933 data: &[u8],
1934 data_layout: crate::TexelCopyBufferLayout,
1935 size: crate::Extent3d,
1936 ) {
1937 match self.context.0.queue_write_texture(
1938 self.id,
1939 &map_texture_copy_view(texture),
1940 data,
1941 &data_layout,
1942 &size,
1943 ) {
1944 Ok(()) => (),
1945 Err(err) => {
1946 self.context
1947 .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
1948 }
1949 }
1950 }
1951
1952 #[cfg(web)]
1955 #[cfg_attr(not(webgl), expect(unused_variables))]
1956 fn copy_external_image_to_texture(
1957 &self,
1958 source: &crate::CopyExternalImageSourceInfo,
1959 dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
1960 size: crate::Extent3d,
1961 ) {
1962 #[cfg(webgl)]
1963 match self.context.0.queue_copy_external_image_to_texture(
1964 self.id,
1965 source,
1966 map_texture_tagged_copy_view(dest),
1967 size,
1968 ) {
1969 Ok(()) => (),
1970 Err(err) => self.context.handle_error_nolabel(
1971 &self.error_sink,
1972 err,
1973 "Queue::copy_external_image_to_texture",
1974 ),
1975 }
1976 }
1977
1978 fn submit(
1979 &self,
1980 command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
1981 ) -> u64 {
1982 let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
1983 let command_buffer_ids = temp_command_buffers
1984 .iter()
1985 .map(|cmdbuf| cmdbuf.as_core().id)
1986 .collect::<SmallVec<[_; 4]>>();
1987
1988 let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
1989 Ok(index) => index,
1990 Err((index, err)) => {
1991 self.context
1992 .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
1993 index
1994 }
1995 };
1996
1997 drop(temp_command_buffers);
1998
1999 index
2000 }
2001
2002 fn get_timestamp_period(&self) -> f32 {
2003 self.context.0.queue_get_timestamp_period(self.id)
2004 }
2005
2006 fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
2007 self.context
2008 .0
2009 .queue_on_submitted_work_done(self.id, callback);
2010 }
2011
2012 fn compact_blas(&self, blas: &dispatch::DispatchBlas) -> (Option<u64>, dispatch::DispatchBlas) {
2013 let (id, handle, error) =
2014 self.context
2015 .0
2016 .queue_compact_blas(self.id, blas.as_core().id, None);
2017
2018 if let Some(cause) = error {
2019 self.context
2020 .handle_error_nolabel(&self.error_sink, cause, "Queue::compact_blas");
2021 }
2022 (
2023 handle,
2024 CoreBlas {
2025 context: self.context.clone(),
2026 id,
2027 error_sink: Arc::clone(&self.error_sink),
2028 }
2029 .into(),
2030 )
2031 }
2032}
2033
2034impl Drop for CoreQueue {
2035 fn drop(&mut self) {
2036 self.context.0.queue_drop(self.id)
2037 }
2038}
2039
2040impl dispatch::ShaderModuleInterface for CoreShaderModule {
2041 fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
2042 Box::pin(ready(self.compilation_info.clone()))
2043 }
2044}
2045
2046impl Drop for CoreShaderModule {
2047 fn drop(&mut self) {
2048 self.context.0.shader_module_drop(self.id)
2049 }
2050}
2051
2052impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
2053
2054impl Drop for CoreBindGroupLayout {
2055 fn drop(&mut self) {
2056 self.context.0.bind_group_layout_drop(self.id)
2057 }
2058}
2059
2060impl dispatch::BindGroupInterface for CoreBindGroup {}
2061
2062impl Drop for CoreBindGroup {
2063 fn drop(&mut self) {
2064 self.context.0.bind_group_drop(self.id)
2065 }
2066}
2067
2068impl dispatch::TextureViewInterface for CoreTextureView {}
2069
2070impl Drop for CoreTextureView {
2071 fn drop(&mut self) {
2072 let _ = self.context.0.texture_view_drop(self.id);
2074 }
2075}
2076
2077impl dispatch::ExternalTextureInterface for CoreExternalTexture {
2078 fn destroy(&self) {
2079 self.context.0.external_texture_destroy(self.id);
2080 }
2081}
2082
2083impl Drop for CoreExternalTexture {
2084 fn drop(&mut self) {
2085 self.context.0.external_texture_drop(self.id);
2086 }
2087}
2088
2089impl dispatch::SamplerInterface for CoreSampler {}
2090
2091impl Drop for CoreSampler {
2092 fn drop(&mut self) {
2093 self.context.0.sampler_drop(self.id)
2094 }
2095}
2096
2097impl dispatch::BufferInterface for CoreBuffer {
2098 fn map_async(
2099 &self,
2100 mode: crate::MapMode,
2101 range: Range<crate::BufferAddress>,
2102 callback: dispatch::BufferMapCallback,
2103 ) {
2104 let operation = wgc::resource::BufferMapOperation {
2105 host: match mode {
2106 MapMode::Read => wgc::device::HostMap::Read,
2107 MapMode::Write => wgc::device::HostMap::Write,
2108 },
2109 callback: Some(Box::new(|status| {
2110 let res = status.map_err(|_| crate::BufferAsyncError);
2111 callback(res);
2112 })),
2113 };
2114
2115 match self.context.0.buffer_map_async(
2116 self.id,
2117 range.start,
2118 Some(range.end - range.start),
2119 operation,
2120 ) {
2121 Ok(_) => (),
2122 Err(cause) => {
2123 self.context
2124 .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
2125 }
2126 }
2127 }
2128
2129 fn get_mapped_range(
2130 &self,
2131 sub_range: Range<crate::BufferAddress>,
2132 ) -> dispatch::DispatchBufferMappedRange {
2133 let size = sub_range.end - sub_range.start;
2134 match self
2135 .context
2136 .0
2137 .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
2138 {
2139 Ok((ptr, size)) => CoreBufferMappedRange {
2140 ptr,
2141 size: size as usize,
2142 }
2143 .into(),
2144 Err(err) => self
2145 .context
2146 .handle_error_fatal(err, "Buffer::get_mapped_range"),
2147 }
2148 }
2149
2150 fn unmap(&self) {
2151 match self.context.0.buffer_unmap(self.id) {
2152 Ok(()) => (),
2153 Err(cause) => {
2154 self.context
2155 .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
2156 }
2157 }
2158 }
2159
2160 fn destroy(&self) {
2161 self.context.0.buffer_destroy(self.id);
2162 }
2163}
2164
2165impl Drop for CoreBuffer {
2166 fn drop(&mut self) {
2167 self.context.0.buffer_drop(self.id)
2168 }
2169}
2170
2171impl dispatch::TextureInterface for CoreTexture {
2172 fn create_view(
2173 &self,
2174 desc: &crate::TextureViewDescriptor<'_>,
2175 ) -> dispatch::DispatchTextureView {
2176 let descriptor = wgc::resource::TextureViewDescriptor {
2177 label: desc.label.map(Borrowed),
2178 format: desc.format,
2179 dimension: desc.dimension,
2180 usage: desc.usage,
2181 range: wgt::ImageSubresourceRange {
2182 aspect: desc.aspect,
2183 base_mip_level: desc.base_mip_level,
2184 mip_level_count: desc.mip_level_count,
2185 base_array_layer: desc.base_array_layer,
2186 array_layer_count: desc.array_layer_count,
2187 },
2188 };
2189 let (id, error) = self
2190 .context
2191 .0
2192 .texture_create_view(self.id, &descriptor, None);
2193 if let Some(cause) = error {
2194 self.context
2195 .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
2196 }
2197 CoreTextureView {
2198 context: self.context.clone(),
2199 id,
2200 }
2201 .into()
2202 }
2203
2204 fn destroy(&self) {
2205 self.context.0.texture_destroy(self.id);
2206 }
2207}
2208
2209impl Drop for CoreTexture {
2210 fn drop(&mut self) {
2211 self.context.0.texture_drop(self.id)
2212 }
2213}
2214
2215impl dispatch::BlasInterface for CoreBlas {
2216 fn prepare_compact_async(&self, callback: BlasCompactCallback) {
2217 let callback: Option<wgc::resource::BlasCompactCallback> =
2218 Some(Box::new(|status: BlasPrepareCompactResult| {
2219 let res = status.map_err(|_| crate::BlasAsyncError);
2220 callback(res);
2221 }));
2222
2223 match self.context.0.blas_prepare_compact_async(self.id, callback) {
2224 Ok(_) => (),
2225 Err(cause) => self.context.handle_error_nolabel(
2226 &self.error_sink,
2227 cause,
2228 "Blas::prepare_compact_async",
2229 ),
2230 }
2231 }
2232
2233 fn ready_for_compaction(&self) -> bool {
2234 match self.context.0.ready_for_compaction(self.id) {
2235 Ok(ready) => ready,
2236 Err(cause) => {
2237 self.context.handle_error_nolabel(
2238 &self.error_sink,
2239 cause,
2240 "Blas::ready_for_compaction",
2241 );
2242 false
2244 }
2245 }
2246 }
2247}
2248
2249impl Drop for CoreBlas {
2250 fn drop(&mut self) {
2251 self.context.0.blas_drop(self.id)
2252 }
2253}
2254
2255impl dispatch::TlasInterface for CoreTlas {}
2256
2257impl Drop for CoreTlas {
2258 fn drop(&mut self) {
2259 self.context.0.tlas_drop(self.id)
2260 }
2261}
2262
2263impl dispatch::QuerySetInterface for CoreQuerySet {}
2264
2265impl Drop for CoreQuerySet {
2266 fn drop(&mut self) {
2267 self.context.0.query_set_drop(self.id)
2268 }
2269}
2270
2271impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2272
2273impl Drop for CorePipelineLayout {
2274 fn drop(&mut self) {
2275 self.context.0.pipeline_layout_drop(self.id)
2276 }
2277}
2278
2279impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2280 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2281 let (id, error) = self
2282 .context
2283 .0
2284 .render_pipeline_get_bind_group_layout(self.id, index, None);
2285 if let Some(err) = error {
2286 self.context.handle_error_nolabel(
2287 &self.error_sink,
2288 err,
2289 "RenderPipeline::get_bind_group_layout",
2290 )
2291 }
2292 CoreBindGroupLayout {
2293 context: self.context.clone(),
2294 id,
2295 }
2296 .into()
2297 }
2298}
2299
2300impl Drop for CoreRenderPipeline {
2301 fn drop(&mut self) {
2302 self.context.0.render_pipeline_drop(self.id)
2303 }
2304}
2305
2306impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2307 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2308 let (id, error) = self
2309 .context
2310 .0
2311 .compute_pipeline_get_bind_group_layout(self.id, index, None);
2312 if let Some(err) = error {
2313 self.context.handle_error_nolabel(
2314 &self.error_sink,
2315 err,
2316 "ComputePipeline::get_bind_group_layout",
2317 )
2318 }
2319 CoreBindGroupLayout {
2320 context: self.context.clone(),
2321 id,
2322 }
2323 .into()
2324 }
2325}
2326
2327impl Drop for CoreComputePipeline {
2328 fn drop(&mut self) {
2329 self.context.0.compute_pipeline_drop(self.id)
2330 }
2331}
2332
2333impl dispatch::PipelineCacheInterface for CorePipelineCache {
2334 fn get_data(&self) -> Option<Vec<u8>> {
2335 self.context.0.pipeline_cache_get_data(self.id)
2336 }
2337}
2338
2339impl Drop for CorePipelineCache {
2340 fn drop(&mut self) {
2341 self.context.0.pipeline_cache_drop(self.id)
2342 }
2343}
2344
2345impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2346 fn copy_buffer_to_buffer(
2347 &self,
2348 source: &dispatch::DispatchBuffer,
2349 source_offset: crate::BufferAddress,
2350 destination: &dispatch::DispatchBuffer,
2351 destination_offset: crate::BufferAddress,
2352 copy_size: Option<crate::BufferAddress>,
2353 ) {
2354 let source = source.as_core();
2355 let destination = destination.as_core();
2356
2357 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2358 self.id,
2359 source.id,
2360 source_offset,
2361 destination.id,
2362 destination_offset,
2363 copy_size,
2364 ) {
2365 self.context.handle_error_nolabel(
2366 &self.error_sink,
2367 cause,
2368 "CommandEncoder::copy_buffer_to_buffer",
2369 );
2370 }
2371 }
2372
2373 fn copy_buffer_to_texture(
2374 &self,
2375 source: crate::TexelCopyBufferInfo<'_>,
2376 destination: crate::TexelCopyTextureInfo<'_>,
2377 copy_size: crate::Extent3d,
2378 ) {
2379 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2380 self.id,
2381 &map_buffer_copy_view(source),
2382 &map_texture_copy_view(destination),
2383 ©_size,
2384 ) {
2385 self.context.handle_error_nolabel(
2386 &self.error_sink,
2387 cause,
2388 "CommandEncoder::copy_buffer_to_texture",
2389 );
2390 }
2391 }
2392
2393 fn copy_texture_to_buffer(
2394 &self,
2395 source: crate::TexelCopyTextureInfo<'_>,
2396 destination: crate::TexelCopyBufferInfo<'_>,
2397 copy_size: crate::Extent3d,
2398 ) {
2399 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2400 self.id,
2401 &map_texture_copy_view(source),
2402 &map_buffer_copy_view(destination),
2403 ©_size,
2404 ) {
2405 self.context.handle_error_nolabel(
2406 &self.error_sink,
2407 cause,
2408 "CommandEncoder::copy_texture_to_buffer",
2409 );
2410 }
2411 }
2412
2413 fn copy_texture_to_texture(
2414 &self,
2415 source: crate::TexelCopyTextureInfo<'_>,
2416 destination: crate::TexelCopyTextureInfo<'_>,
2417 copy_size: crate::Extent3d,
2418 ) {
2419 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2420 self.id,
2421 &map_texture_copy_view(source),
2422 &map_texture_copy_view(destination),
2423 ©_size,
2424 ) {
2425 self.context.handle_error_nolabel(
2426 &self.error_sink,
2427 cause,
2428 "CommandEncoder::copy_texture_to_texture",
2429 );
2430 }
2431 }
2432
2433 fn begin_compute_pass(
2434 &self,
2435 desc: &crate::ComputePassDescriptor<'_>,
2436 ) -> dispatch::DispatchComputePass {
2437 let timestamp_writes =
2438 desc.timestamp_writes
2439 .as_ref()
2440 .map(|tw| wgc::command::PassTimestampWrites {
2441 query_set: tw.query_set.inner.as_core().id,
2442 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2443 end_of_pass_write_index: tw.end_of_pass_write_index,
2444 });
2445
2446 let (pass, err) = self.context.0.command_encoder_begin_compute_pass(
2447 self.id,
2448 &wgc::command::ComputePassDescriptor {
2449 label: desc.label.map(Borrowed),
2450 timestamp_writes,
2451 },
2452 );
2453
2454 if let Some(cause) = err {
2455 self.context.handle_error(
2456 &self.error_sink,
2457 cause,
2458 desc.label,
2459 "CommandEncoder::begin_compute_pass",
2460 );
2461 }
2462
2463 CoreComputePass {
2464 context: self.context.clone(),
2465 pass,
2466 error_sink: self.error_sink.clone(),
2467 id: crate::cmp::Identifier::create(),
2468 }
2469 .into()
2470 }
2471
2472 fn begin_render_pass(
2473 &self,
2474 desc: &crate::RenderPassDescriptor<'_>,
2475 ) -> dispatch::DispatchRenderPass {
2476 let colors = desc
2477 .color_attachments
2478 .iter()
2479 .map(|ca| {
2480 ca.as_ref()
2481 .map(|at| wgc::command::RenderPassColorAttachment {
2482 view: at.view.inner.as_core().id,
2483 depth_slice: at.depth_slice,
2484 resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2485 load_op: at.ops.load,
2486 store_op: at.ops.store,
2487 })
2488 })
2489 .collect::<Vec<_>>();
2490
2491 let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2492 wgc::command::RenderPassDepthStencilAttachment {
2493 view: dsa.view.inner.as_core().id,
2494 depth: map_pass_channel(dsa.depth_ops.as_ref()),
2495 stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2496 }
2497 });
2498
2499 let timestamp_writes =
2500 desc.timestamp_writes
2501 .as_ref()
2502 .map(|tw| wgc::command::PassTimestampWrites {
2503 query_set: tw.query_set.inner.as_core().id,
2504 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2505 end_of_pass_write_index: tw.end_of_pass_write_index,
2506 });
2507
2508 let (pass, err) = self.context.0.command_encoder_begin_render_pass(
2509 self.id,
2510 &wgc::command::RenderPassDescriptor {
2511 label: desc.label.map(Borrowed),
2512 timestamp_writes: timestamp_writes.as_ref(),
2513 color_attachments: Borrowed(&colors),
2514 depth_stencil_attachment: depth_stencil.as_ref(),
2515 occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2516 },
2517 );
2518
2519 if let Some(cause) = err {
2520 self.context.handle_error(
2521 &self.error_sink,
2522 cause,
2523 desc.label,
2524 "CommandEncoder::begin_render_pass",
2525 );
2526 }
2527
2528 CoreRenderPass {
2529 context: self.context.clone(),
2530 pass,
2531 error_sink: self.error_sink.clone(),
2532 id: crate::cmp::Identifier::create(),
2533 }
2534 .into()
2535 }
2536
2537 fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2538 let descriptor = wgt::CommandBufferDescriptor::default();
2539 let (id, error) = self
2540 .context
2541 .0
2542 .command_encoder_finish(self.id, &descriptor, None);
2543 if let Some(cause) = error {
2544 self.context
2545 .handle_error_nolabel(&self.error_sink, cause, "a CommandEncoder");
2546 }
2547 CoreCommandBuffer {
2548 context: self.context.clone(),
2549 id,
2550 }
2551 .into()
2552 }
2553
2554 fn clear_texture(
2555 &self,
2556 texture: &dispatch::DispatchTexture,
2557 subresource_range: &crate::ImageSubresourceRange,
2558 ) {
2559 let texture = texture.as_core();
2560
2561 if let Err(cause) =
2562 self.context
2563 .0
2564 .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2565 {
2566 self.context.handle_error_nolabel(
2567 &self.error_sink,
2568 cause,
2569 "CommandEncoder::clear_texture",
2570 );
2571 }
2572 }
2573
2574 fn clear_buffer(
2575 &self,
2576 buffer: &dispatch::DispatchBuffer,
2577 offset: crate::BufferAddress,
2578 size: Option<crate::BufferAddress>,
2579 ) {
2580 let buffer = buffer.as_core();
2581
2582 if let Err(cause) = self
2583 .context
2584 .0
2585 .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2586 {
2587 self.context.handle_error_nolabel(
2588 &self.error_sink,
2589 cause,
2590 "CommandEncoder::fill_buffer",
2591 );
2592 }
2593 }
2594
2595 fn insert_debug_marker(&self, label: &str) {
2596 if let Err(cause) = self
2597 .context
2598 .0
2599 .command_encoder_insert_debug_marker(self.id, label)
2600 {
2601 self.context.handle_error_nolabel(
2602 &self.error_sink,
2603 cause,
2604 "CommandEncoder::insert_debug_marker",
2605 );
2606 }
2607 }
2608
2609 fn push_debug_group(&self, label: &str) {
2610 if let Err(cause) = self
2611 .context
2612 .0
2613 .command_encoder_push_debug_group(self.id, label)
2614 {
2615 self.context.handle_error_nolabel(
2616 &self.error_sink,
2617 cause,
2618 "CommandEncoder::push_debug_group",
2619 );
2620 }
2621 }
2622
2623 fn pop_debug_group(&self) {
2624 if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2625 self.context.handle_error_nolabel(
2626 &self.error_sink,
2627 cause,
2628 "CommandEncoder::pop_debug_group",
2629 );
2630 }
2631 }
2632
2633 fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2634 let query_set = query_set.as_core();
2635
2636 if let Err(cause) =
2637 self.context
2638 .0
2639 .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2640 {
2641 self.context.handle_error_nolabel(
2642 &self.error_sink,
2643 cause,
2644 "CommandEncoder::write_timestamp",
2645 );
2646 }
2647 }
2648
2649 fn resolve_query_set(
2650 &self,
2651 query_set: &dispatch::DispatchQuerySet,
2652 first_query: u32,
2653 query_count: u32,
2654 destination: &dispatch::DispatchBuffer,
2655 destination_offset: crate::BufferAddress,
2656 ) {
2657 let query_set = query_set.as_core();
2658 let destination = destination.as_core();
2659
2660 if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2661 self.id,
2662 query_set.id,
2663 first_query,
2664 query_count,
2665 destination.id,
2666 destination_offset,
2667 ) {
2668 self.context.handle_error_nolabel(
2669 &self.error_sink,
2670 cause,
2671 "CommandEncoder::resolve_query_set",
2672 );
2673 }
2674 }
2675
2676 fn mark_acceleration_structures_built<'a>(
2677 &self,
2678 blas: &mut dyn Iterator<Item = &'a Blas>,
2679 tlas: &mut dyn Iterator<Item = &'a Tlas>,
2680 ) {
2681 let blas = blas
2682 .map(|b| b.inner.as_core().id)
2683 .collect::<SmallVec<[_; 4]>>();
2684 let tlas = tlas
2685 .map(|t| t.inner.as_core().id)
2686 .collect::<SmallVec<[_; 4]>>();
2687 if let Err(cause) = self
2688 .context
2689 .0
2690 .command_encoder_mark_acceleration_structures_built(self.id, &blas, &tlas)
2691 {
2692 self.context.handle_error_nolabel(
2693 &self.error_sink,
2694 cause,
2695 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2696 );
2697 }
2698 }
2699
2700 fn build_acceleration_structures<'a>(
2701 &self,
2702 blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2703 tlas: &mut dyn Iterator<Item = &'a crate::Tlas>,
2704 ) {
2705 let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2706 let geometries = match e.geometry {
2707 crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2708 let iter = triangle_geometries.iter().map(|tg| {
2709 wgc::ray_tracing::BlasTriangleGeometry {
2710 vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2711 index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2712 transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2713 size: tg.size,
2714 transform_buffer_offset: tg.transform_buffer_offset,
2715 first_vertex: tg.first_vertex,
2716 vertex_stride: tg.vertex_stride,
2717 first_index: tg.first_index,
2718 }
2719 });
2720 wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2721 }
2722 };
2723 wgc::ray_tracing::BlasBuildEntry {
2724 blas_id: e.blas.inner.as_core().id,
2725 geometries,
2726 }
2727 });
2728
2729 let tlas = tlas.into_iter().map(|e| {
2730 let instances = e
2731 .instances
2732 .iter()
2733 .map(|instance: &Option<crate::TlasInstance>| {
2734 instance
2735 .as_ref()
2736 .map(|instance| wgc::ray_tracing::TlasInstance {
2737 blas_id: instance.blas.as_core().id,
2738 transform: &instance.transform,
2739 custom_data: instance.custom_data,
2740 mask: instance.mask,
2741 })
2742 });
2743 wgc::ray_tracing::TlasPackage {
2744 tlas_id: e.inner.as_core().id,
2745 instances: Box::new(instances),
2746 lowest_unmodified: e.lowest_unmodified,
2747 }
2748 });
2749
2750 if let Err(cause) = self
2751 .context
2752 .0
2753 .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2754 {
2755 self.context.handle_error_nolabel(
2756 &self.error_sink,
2757 cause,
2758 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2759 );
2760 }
2761 }
2762
2763 fn transition_resources<'a>(
2764 &mut self,
2765 buffer_transitions: &mut dyn Iterator<
2766 Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
2767 >,
2768 texture_transitions: &mut dyn Iterator<
2769 Item = wgt::TextureTransition<&'a dispatch::DispatchTexture>,
2770 >,
2771 ) {
2772 let result = self.context.0.command_encoder_transition_resources(
2773 self.id,
2774 buffer_transitions.map(|t| wgt::BufferTransition {
2775 buffer: t.buffer.as_core().id,
2776 state: t.state,
2777 }),
2778 texture_transitions.map(|t| wgt::TextureTransition {
2779 texture: t.texture.as_core().id,
2780 selector: t.selector.clone(),
2781 state: t.state,
2782 }),
2783 );
2784
2785 if let Err(cause) = result {
2786 self.context.handle_error_nolabel(
2787 &self.error_sink,
2788 cause,
2789 "CommandEncoder::transition_resources",
2790 );
2791 }
2792 }
2793}
2794
2795impl Drop for CoreCommandEncoder {
2796 fn drop(&mut self) {
2797 self.context.0.command_encoder_drop(self.id)
2798 }
2799}
2800
2801impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2802
2803impl Drop for CoreCommandBuffer {
2804 fn drop(&mut self) {
2805 self.context.0.command_buffer_drop(self.id)
2806 }
2807}
2808
2809impl dispatch::ComputePassInterface for CoreComputePass {
2810 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2811 let pipeline = pipeline.as_core();
2812
2813 if let Err(cause) = self
2814 .context
2815 .0
2816 .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2817 {
2818 self.context.handle_error(
2819 &self.error_sink,
2820 cause,
2821 self.pass.label(),
2822 "ComputePass::set_pipeline",
2823 );
2824 }
2825 }
2826
2827 fn set_bind_group(
2828 &mut self,
2829 index: u32,
2830 bind_group: Option<&dispatch::DispatchBindGroup>,
2831 offsets: &[crate::DynamicOffset],
2832 ) {
2833 let bg = bind_group.map(|bg| bg.as_core().id);
2834
2835 if let Err(cause) =
2836 self.context
2837 .0
2838 .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2839 {
2840 self.context.handle_error(
2841 &self.error_sink,
2842 cause,
2843 self.pass.label(),
2844 "ComputePass::set_bind_group",
2845 );
2846 }
2847 }
2848
2849 fn set_push_constants(&mut self, offset: u32, data: &[u8]) {
2850 if let Err(cause) =
2851 self.context
2852 .0
2853 .compute_pass_set_push_constants(&mut self.pass, offset, data)
2854 {
2855 self.context.handle_error(
2856 &self.error_sink,
2857 cause,
2858 self.pass.label(),
2859 "ComputePass::set_push_constant",
2860 );
2861 }
2862 }
2863
2864 fn insert_debug_marker(&mut self, label: &str) {
2865 if let Err(cause) =
2866 self.context
2867 .0
2868 .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
2869 {
2870 self.context.handle_error(
2871 &self.error_sink,
2872 cause,
2873 self.pass.label(),
2874 "ComputePass::insert_debug_marker",
2875 );
2876 }
2877 }
2878
2879 fn push_debug_group(&mut self, group_label: &str) {
2880 if let Err(cause) =
2881 self.context
2882 .0
2883 .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
2884 {
2885 self.context.handle_error(
2886 &self.error_sink,
2887 cause,
2888 self.pass.label(),
2889 "ComputePass::push_debug_group",
2890 );
2891 }
2892 }
2893
2894 fn pop_debug_group(&mut self) {
2895 if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
2896 self.context.handle_error(
2897 &self.error_sink,
2898 cause,
2899 self.pass.label(),
2900 "ComputePass::pop_debug_group",
2901 );
2902 }
2903 }
2904
2905 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2906 let query_set = query_set.as_core();
2907
2908 if let Err(cause) =
2909 self.context
2910 .0
2911 .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
2912 {
2913 self.context.handle_error(
2914 &self.error_sink,
2915 cause,
2916 self.pass.label(),
2917 "ComputePass::write_timestamp",
2918 );
2919 }
2920 }
2921
2922 fn begin_pipeline_statistics_query(
2923 &mut self,
2924 query_set: &dispatch::DispatchQuerySet,
2925 query_index: u32,
2926 ) {
2927 let query_set = query_set.as_core();
2928
2929 if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
2930 &mut self.pass,
2931 query_set.id,
2932 query_index,
2933 ) {
2934 self.context.handle_error(
2935 &self.error_sink,
2936 cause,
2937 self.pass.label(),
2938 "ComputePass::begin_pipeline_statistics_query",
2939 );
2940 }
2941 }
2942
2943 fn end_pipeline_statistics_query(&mut self) {
2944 if let Err(cause) = self
2945 .context
2946 .0
2947 .compute_pass_end_pipeline_statistics_query(&mut self.pass)
2948 {
2949 self.context.handle_error(
2950 &self.error_sink,
2951 cause,
2952 self.pass.label(),
2953 "ComputePass::end_pipeline_statistics_query",
2954 );
2955 }
2956 }
2957
2958 fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
2959 if let Err(cause) = self
2960 .context
2961 .0
2962 .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
2963 {
2964 self.context.handle_error(
2965 &self.error_sink,
2966 cause,
2967 self.pass.label(),
2968 "ComputePass::dispatch_workgroups",
2969 );
2970 }
2971 }
2972
2973 fn dispatch_workgroups_indirect(
2974 &mut self,
2975 indirect_buffer: &dispatch::DispatchBuffer,
2976 indirect_offset: crate::BufferAddress,
2977 ) {
2978 let indirect_buffer = indirect_buffer.as_core();
2979
2980 if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
2981 &mut self.pass,
2982 indirect_buffer.id,
2983 indirect_offset,
2984 ) {
2985 self.context.handle_error(
2986 &self.error_sink,
2987 cause,
2988 self.pass.label(),
2989 "ComputePass::dispatch_workgroups_indirect",
2990 );
2991 }
2992 }
2993
2994 fn end(&mut self) {
2995 if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
2996 self.context.handle_error(
2997 &self.error_sink,
2998 cause,
2999 self.pass.label(),
3000 "ComputePass::end",
3001 );
3002 }
3003 }
3004}
3005
3006impl Drop for CoreComputePass {
3007 fn drop(&mut self) {
3008 dispatch::ComputePassInterface::end(self);
3009 }
3010}
3011
3012impl dispatch::RenderPassInterface for CoreRenderPass {
3013 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3014 let pipeline = pipeline.as_core();
3015
3016 if let Err(cause) = self
3017 .context
3018 .0
3019 .render_pass_set_pipeline(&mut self.pass, pipeline.id)
3020 {
3021 self.context.handle_error(
3022 &self.error_sink,
3023 cause,
3024 self.pass.label(),
3025 "RenderPass::set_pipeline",
3026 );
3027 }
3028 }
3029
3030 fn set_bind_group(
3031 &mut self,
3032 index: u32,
3033 bind_group: Option<&dispatch::DispatchBindGroup>,
3034 offsets: &[crate::DynamicOffset],
3035 ) {
3036 let bg = bind_group.map(|bg| bg.as_core().id);
3037
3038 if let Err(cause) =
3039 self.context
3040 .0
3041 .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
3042 {
3043 self.context.handle_error(
3044 &self.error_sink,
3045 cause,
3046 self.pass.label(),
3047 "RenderPass::set_bind_group",
3048 );
3049 }
3050 }
3051
3052 fn set_index_buffer(
3053 &mut self,
3054 buffer: &dispatch::DispatchBuffer,
3055 index_format: crate::IndexFormat,
3056 offset: crate::BufferAddress,
3057 size: Option<crate::BufferSize>,
3058 ) {
3059 let buffer = buffer.as_core();
3060
3061 if let Err(cause) = self.context.0.render_pass_set_index_buffer(
3062 &mut self.pass,
3063 buffer.id,
3064 index_format,
3065 offset,
3066 size,
3067 ) {
3068 self.context.handle_error(
3069 &self.error_sink,
3070 cause,
3071 self.pass.label(),
3072 "RenderPass::set_index_buffer",
3073 );
3074 }
3075 }
3076
3077 fn set_vertex_buffer(
3078 &mut self,
3079 slot: u32,
3080 buffer: &dispatch::DispatchBuffer,
3081 offset: crate::BufferAddress,
3082 size: Option<crate::BufferSize>,
3083 ) {
3084 let buffer = buffer.as_core();
3085
3086 if let Err(cause) = self.context.0.render_pass_set_vertex_buffer(
3087 &mut self.pass,
3088 slot,
3089 buffer.id,
3090 offset,
3091 size,
3092 ) {
3093 self.context.handle_error(
3094 &self.error_sink,
3095 cause,
3096 self.pass.label(),
3097 "RenderPass::set_vertex_buffer",
3098 );
3099 }
3100 }
3101
3102 fn set_push_constants(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
3103 if let Err(cause) =
3104 self.context
3105 .0
3106 .render_pass_set_push_constants(&mut self.pass, stages, offset, data)
3107 {
3108 self.context.handle_error(
3109 &self.error_sink,
3110 cause,
3111 self.pass.label(),
3112 "RenderPass::set_push_constants",
3113 );
3114 }
3115 }
3116
3117 fn set_blend_constant(&mut self, color: crate::Color) {
3118 if let Err(cause) = self
3119 .context
3120 .0
3121 .render_pass_set_blend_constant(&mut self.pass, color)
3122 {
3123 self.context.handle_error(
3124 &self.error_sink,
3125 cause,
3126 self.pass.label(),
3127 "RenderPass::set_blend_constant",
3128 );
3129 }
3130 }
3131
3132 fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
3133 if let Err(cause) =
3134 self.context
3135 .0
3136 .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
3137 {
3138 self.context.handle_error(
3139 &self.error_sink,
3140 cause,
3141 self.pass.label(),
3142 "RenderPass::set_scissor_rect",
3143 );
3144 }
3145 }
3146
3147 fn set_viewport(
3148 &mut self,
3149 x: f32,
3150 y: f32,
3151 width: f32,
3152 height: f32,
3153 min_depth: f32,
3154 max_depth: f32,
3155 ) {
3156 if let Err(cause) = self.context.0.render_pass_set_viewport(
3157 &mut self.pass,
3158 x,
3159 y,
3160 width,
3161 height,
3162 min_depth,
3163 max_depth,
3164 ) {
3165 self.context.handle_error(
3166 &self.error_sink,
3167 cause,
3168 self.pass.label(),
3169 "RenderPass::set_viewport",
3170 );
3171 }
3172 }
3173
3174 fn set_stencil_reference(&mut self, reference: u32) {
3175 if let Err(cause) = self
3176 .context
3177 .0
3178 .render_pass_set_stencil_reference(&mut self.pass, reference)
3179 {
3180 self.context.handle_error(
3181 &self.error_sink,
3182 cause,
3183 self.pass.label(),
3184 "RenderPass::set_stencil_reference",
3185 );
3186 }
3187 }
3188
3189 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3190 if let Err(cause) = self.context.0.render_pass_draw(
3191 &mut self.pass,
3192 vertices.end - vertices.start,
3193 instances.end - instances.start,
3194 vertices.start,
3195 instances.start,
3196 ) {
3197 self.context.handle_error(
3198 &self.error_sink,
3199 cause,
3200 self.pass.label(),
3201 "RenderPass::draw",
3202 );
3203 }
3204 }
3205
3206 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3207 if let Err(cause) = self.context.0.render_pass_draw_indexed(
3208 &mut self.pass,
3209 indices.end - indices.start,
3210 instances.end - instances.start,
3211 indices.start,
3212 base_vertex,
3213 instances.start,
3214 ) {
3215 self.context.handle_error(
3216 &self.error_sink,
3217 cause,
3218 self.pass.label(),
3219 "RenderPass::draw_indexed",
3220 );
3221 }
3222 }
3223
3224 fn draw_mesh_tasks(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
3225 if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks(
3226 &mut self.pass,
3227 group_count_x,
3228 group_count_y,
3229 group_count_z,
3230 ) {
3231 self.context.handle_error(
3232 &self.error_sink,
3233 cause,
3234 self.pass.label(),
3235 "RenderPass::draw_mesh_tasks",
3236 );
3237 }
3238 }
3239
3240 fn draw_indirect(
3241 &mut self,
3242 indirect_buffer: &dispatch::DispatchBuffer,
3243 indirect_offset: crate::BufferAddress,
3244 ) {
3245 let indirect_buffer = indirect_buffer.as_core();
3246
3247 if let Err(cause) = self.context.0.render_pass_draw_indirect(
3248 &mut self.pass,
3249 indirect_buffer.id,
3250 indirect_offset,
3251 ) {
3252 self.context.handle_error(
3253 &self.error_sink,
3254 cause,
3255 self.pass.label(),
3256 "RenderPass::draw_indirect",
3257 );
3258 }
3259 }
3260
3261 fn draw_indexed_indirect(
3262 &mut self,
3263 indirect_buffer: &dispatch::DispatchBuffer,
3264 indirect_offset: crate::BufferAddress,
3265 ) {
3266 let indirect_buffer = indirect_buffer.as_core();
3267
3268 if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3269 &mut self.pass,
3270 indirect_buffer.id,
3271 indirect_offset,
3272 ) {
3273 self.context.handle_error(
3274 &self.error_sink,
3275 cause,
3276 self.pass.label(),
3277 "RenderPass::draw_indexed_indirect",
3278 );
3279 }
3280 }
3281
3282 fn draw_mesh_tasks_indirect(
3283 &mut self,
3284 indirect_buffer: &dispatch::DispatchBuffer,
3285 indirect_offset: crate::BufferAddress,
3286 ) {
3287 let indirect_buffer = indirect_buffer.as_core();
3288
3289 if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks_indirect(
3290 &mut self.pass,
3291 indirect_buffer.id,
3292 indirect_offset,
3293 ) {
3294 self.context.handle_error(
3295 &self.error_sink,
3296 cause,
3297 self.pass.label(),
3298 "RenderPass::draw_mesh_tasks_indirect",
3299 );
3300 }
3301 }
3302
3303 fn multi_draw_indirect(
3304 &mut self,
3305 indirect_buffer: &dispatch::DispatchBuffer,
3306 indirect_offset: crate::BufferAddress,
3307 count: u32,
3308 ) {
3309 let indirect_buffer = indirect_buffer.as_core();
3310
3311 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3312 &mut self.pass,
3313 indirect_buffer.id,
3314 indirect_offset,
3315 count,
3316 ) {
3317 self.context.handle_error(
3318 &self.error_sink,
3319 cause,
3320 self.pass.label(),
3321 "RenderPass::multi_draw_indirect",
3322 );
3323 }
3324 }
3325
3326 fn multi_draw_indexed_indirect(
3327 &mut self,
3328 indirect_buffer: &dispatch::DispatchBuffer,
3329 indirect_offset: crate::BufferAddress,
3330 count: u32,
3331 ) {
3332 let indirect_buffer = indirect_buffer.as_core();
3333
3334 if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3335 &mut self.pass,
3336 indirect_buffer.id,
3337 indirect_offset,
3338 count,
3339 ) {
3340 self.context.handle_error(
3341 &self.error_sink,
3342 cause,
3343 self.pass.label(),
3344 "RenderPass::multi_draw_indexed_indirect",
3345 );
3346 }
3347 }
3348
3349 fn multi_draw_mesh_tasks_indirect(
3350 &mut self,
3351 indirect_buffer: &dispatch::DispatchBuffer,
3352 indirect_offset: crate::BufferAddress,
3353 count: u32,
3354 ) {
3355 let indirect_buffer = indirect_buffer.as_core();
3356
3357 if let Err(cause) = self.context.0.render_pass_multi_draw_mesh_tasks_indirect(
3358 &mut self.pass,
3359 indirect_buffer.id,
3360 indirect_offset,
3361 count,
3362 ) {
3363 self.context.handle_error(
3364 &self.error_sink,
3365 cause,
3366 self.pass.label(),
3367 "RenderPass::multi_draw_mesh_tasks_indirect",
3368 );
3369 }
3370 }
3371
3372 fn multi_draw_indirect_count(
3373 &mut self,
3374 indirect_buffer: &dispatch::DispatchBuffer,
3375 indirect_offset: crate::BufferAddress,
3376 count_buffer: &dispatch::DispatchBuffer,
3377 count_buffer_offset: crate::BufferAddress,
3378 max_count: u32,
3379 ) {
3380 let indirect_buffer = indirect_buffer.as_core();
3381 let count_buffer = count_buffer.as_core();
3382
3383 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3384 &mut self.pass,
3385 indirect_buffer.id,
3386 indirect_offset,
3387 count_buffer.id,
3388 count_buffer_offset,
3389 max_count,
3390 ) {
3391 self.context.handle_error(
3392 &self.error_sink,
3393 cause,
3394 self.pass.label(),
3395 "RenderPass::multi_draw_indirect_count",
3396 );
3397 }
3398 }
3399
3400 fn multi_draw_indexed_indirect_count(
3401 &mut self,
3402 indirect_buffer: &dispatch::DispatchBuffer,
3403 indirect_offset: crate::BufferAddress,
3404 count_buffer: &dispatch::DispatchBuffer,
3405 count_buffer_offset: crate::BufferAddress,
3406 max_count: u32,
3407 ) {
3408 let indirect_buffer = indirect_buffer.as_core();
3409 let count_buffer = count_buffer.as_core();
3410
3411 if let Err(cause) = self
3412 .context
3413 .0
3414 .render_pass_multi_draw_indexed_indirect_count(
3415 &mut self.pass,
3416 indirect_buffer.id,
3417 indirect_offset,
3418 count_buffer.id,
3419 count_buffer_offset,
3420 max_count,
3421 )
3422 {
3423 self.context.handle_error(
3424 &self.error_sink,
3425 cause,
3426 self.pass.label(),
3427 "RenderPass::multi_draw_indexed_indirect_count",
3428 );
3429 }
3430 }
3431
3432 fn multi_draw_mesh_tasks_indirect_count(
3433 &mut self,
3434 indirect_buffer: &dispatch::DispatchBuffer,
3435 indirect_offset: crate::BufferAddress,
3436 count_buffer: &dispatch::DispatchBuffer,
3437 count_buffer_offset: crate::BufferAddress,
3438 max_count: u32,
3439 ) {
3440 let indirect_buffer = indirect_buffer.as_core();
3441 let count_buffer = count_buffer.as_core();
3442
3443 if let Err(cause) = self
3444 .context
3445 .0
3446 .render_pass_multi_draw_mesh_tasks_indirect_count(
3447 &mut self.pass,
3448 indirect_buffer.id,
3449 indirect_offset,
3450 count_buffer.id,
3451 count_buffer_offset,
3452 max_count,
3453 )
3454 {
3455 self.context.handle_error(
3456 &self.error_sink,
3457 cause,
3458 self.pass.label(),
3459 "RenderPass::multi_draw_mesh_tasks_indirect_count",
3460 );
3461 }
3462 }
3463
3464 fn insert_debug_marker(&mut self, label: &str) {
3465 if let Err(cause) = self
3466 .context
3467 .0
3468 .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3469 {
3470 self.context.handle_error(
3471 &self.error_sink,
3472 cause,
3473 self.pass.label(),
3474 "RenderPass::insert_debug_marker",
3475 );
3476 }
3477 }
3478
3479 fn push_debug_group(&mut self, group_label: &str) {
3480 if let Err(cause) =
3481 self.context
3482 .0
3483 .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3484 {
3485 self.context.handle_error(
3486 &self.error_sink,
3487 cause,
3488 self.pass.label(),
3489 "RenderPass::push_debug_group",
3490 );
3491 }
3492 }
3493
3494 fn pop_debug_group(&mut self) {
3495 if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3496 self.context.handle_error(
3497 &self.error_sink,
3498 cause,
3499 self.pass.label(),
3500 "RenderPass::pop_debug_group",
3501 );
3502 }
3503 }
3504
3505 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3506 let query_set = query_set.as_core();
3507
3508 if let Err(cause) =
3509 self.context
3510 .0
3511 .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3512 {
3513 self.context.handle_error(
3514 &self.error_sink,
3515 cause,
3516 self.pass.label(),
3517 "RenderPass::write_timestamp",
3518 );
3519 }
3520 }
3521
3522 fn begin_occlusion_query(&mut self, query_index: u32) {
3523 if let Err(cause) = self
3524 .context
3525 .0
3526 .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3527 {
3528 self.context.handle_error(
3529 &self.error_sink,
3530 cause,
3531 self.pass.label(),
3532 "RenderPass::begin_occlusion_query",
3533 );
3534 }
3535 }
3536
3537 fn end_occlusion_query(&mut self) {
3538 if let Err(cause) = self
3539 .context
3540 .0
3541 .render_pass_end_occlusion_query(&mut self.pass)
3542 {
3543 self.context.handle_error(
3544 &self.error_sink,
3545 cause,
3546 self.pass.label(),
3547 "RenderPass::end_occlusion_query",
3548 );
3549 }
3550 }
3551
3552 fn begin_pipeline_statistics_query(
3553 &mut self,
3554 query_set: &dispatch::DispatchQuerySet,
3555 query_index: u32,
3556 ) {
3557 let query_set = query_set.as_core();
3558
3559 if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3560 &mut self.pass,
3561 query_set.id,
3562 query_index,
3563 ) {
3564 self.context.handle_error(
3565 &self.error_sink,
3566 cause,
3567 self.pass.label(),
3568 "RenderPass::begin_pipeline_statistics_query",
3569 );
3570 }
3571 }
3572
3573 fn end_pipeline_statistics_query(&mut self) {
3574 if let Err(cause) = self
3575 .context
3576 .0
3577 .render_pass_end_pipeline_statistics_query(&mut self.pass)
3578 {
3579 self.context.handle_error(
3580 &self.error_sink,
3581 cause,
3582 self.pass.label(),
3583 "RenderPass::end_pipeline_statistics_query",
3584 );
3585 }
3586 }
3587
3588 fn execute_bundles(
3589 &mut self,
3590 render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3591 ) {
3592 let temp_render_bundles = render_bundles
3593 .map(|rb| rb.as_core().id)
3594 .collect::<SmallVec<[_; 4]>>();
3595 if let Err(cause) = self
3596 .context
3597 .0
3598 .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3599 {
3600 self.context.handle_error(
3601 &self.error_sink,
3602 cause,
3603 self.pass.label(),
3604 "RenderPass::execute_bundles",
3605 );
3606 }
3607 }
3608
3609 fn end(&mut self) {
3610 if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3611 self.context.handle_error(
3612 &self.error_sink,
3613 cause,
3614 self.pass.label(),
3615 "RenderPass::end",
3616 );
3617 }
3618 }
3619}
3620
3621impl Drop for CoreRenderPass {
3622 fn drop(&mut self) {
3623 dispatch::RenderPassInterface::end(self);
3624 }
3625}
3626
3627impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3628 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3629 let pipeline = pipeline.as_core();
3630
3631 wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3632 }
3633
3634 fn set_bind_group(
3635 &mut self,
3636 index: u32,
3637 bind_group: Option<&dispatch::DispatchBindGroup>,
3638 offsets: &[crate::DynamicOffset],
3639 ) {
3640 let bg = bind_group.map(|bg| bg.as_core().id);
3641
3642 unsafe {
3643 wgpu_render_bundle_set_bind_group(
3644 &mut self.encoder,
3645 index,
3646 bg,
3647 offsets.as_ptr(),
3648 offsets.len(),
3649 )
3650 }
3651 }
3652
3653 fn set_index_buffer(
3654 &mut self,
3655 buffer: &dispatch::DispatchBuffer,
3656 index_format: crate::IndexFormat,
3657 offset: crate::BufferAddress,
3658 size: Option<crate::BufferSize>,
3659 ) {
3660 let buffer = buffer.as_core();
3661
3662 self.encoder
3663 .set_index_buffer(buffer.id, index_format, offset, size)
3664 }
3665
3666 fn set_vertex_buffer(
3667 &mut self,
3668 slot: u32,
3669 buffer: &dispatch::DispatchBuffer,
3670 offset: crate::BufferAddress,
3671 size: Option<crate::BufferSize>,
3672 ) {
3673 let buffer = buffer.as_core();
3674
3675 wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer.id, offset, size)
3676 }
3677
3678 fn set_push_constants(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
3679 unsafe {
3680 wgpu_render_bundle_set_push_constants(
3681 &mut self.encoder,
3682 stages,
3683 offset,
3684 data.len().try_into().unwrap(),
3685 data.as_ptr(),
3686 )
3687 }
3688 }
3689
3690 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3691 wgpu_render_bundle_draw(
3692 &mut self.encoder,
3693 vertices.end - vertices.start,
3694 instances.end - instances.start,
3695 vertices.start,
3696 instances.start,
3697 )
3698 }
3699
3700 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3701 wgpu_render_bundle_draw_indexed(
3702 &mut self.encoder,
3703 indices.end - indices.start,
3704 instances.end - instances.start,
3705 indices.start,
3706 base_vertex,
3707 instances.start,
3708 )
3709 }
3710
3711 fn draw_indirect(
3712 &mut self,
3713 indirect_buffer: &dispatch::DispatchBuffer,
3714 indirect_offset: crate::BufferAddress,
3715 ) {
3716 let indirect_buffer = indirect_buffer.as_core();
3717
3718 wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3719 }
3720
3721 fn draw_indexed_indirect(
3722 &mut self,
3723 indirect_buffer: &dispatch::DispatchBuffer,
3724 indirect_offset: crate::BufferAddress,
3725 ) {
3726 let indirect_buffer = indirect_buffer.as_core();
3727
3728 wgpu_render_bundle_draw_indexed_indirect(
3729 &mut self.encoder,
3730 indirect_buffer.id,
3731 indirect_offset,
3732 )
3733 }
3734
3735 fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3736 where
3737 Self: Sized,
3738 {
3739 let (id, error) = self.context.0.render_bundle_encoder_finish(
3740 self.encoder,
3741 &desc.map_label(|l| l.map(Borrowed)),
3742 None,
3743 );
3744 if let Some(err) = error {
3745 self.context
3746 .handle_error_fatal(err, "RenderBundleEncoder::finish");
3747 }
3748 CoreRenderBundle { id }.into()
3749 }
3750}
3751
3752impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3753
3754impl dispatch::SurfaceInterface for CoreSurface {
3755 fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3756 let adapter = adapter.as_core();
3757
3758 self.context
3759 .0
3760 .surface_get_capabilities(self.id, adapter.id)
3761 .unwrap_or_default()
3762 }
3763
3764 fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3765 let device = device.as_core();
3766
3767 let error = self.context.0.surface_configure(self.id, device.id, config);
3768 if let Some(e) = error {
3769 self.context
3770 .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3771 } else {
3772 *self.configured_device.lock() = Some(device.id);
3773 *self.error_sink.lock() = Some(device.error_sink.clone());
3774 }
3775 }
3776
3777 fn get_current_texture(
3778 &self,
3779 ) -> (
3780 Option<dispatch::DispatchTexture>,
3781 crate::SurfaceStatus,
3782 dispatch::DispatchSurfaceOutputDetail,
3783 ) {
3784 let output_detail = CoreSurfaceOutputDetail {
3785 context: self.context.clone(),
3786 surface_id: self.id,
3787 }
3788 .into();
3789
3790 match self.context.0.surface_get_current_texture(self.id, None) {
3791 Ok(wgc::present::SurfaceOutput {
3792 status,
3793 texture: texture_id,
3794 }) => {
3795 let data = texture_id
3796 .map(|id| CoreTexture {
3797 context: self.context.clone(),
3798 id,
3799 error_sink: Arc::new(Mutex::new(ErrorSinkRaw::new())),
3800 })
3801 .map(Into::into);
3802
3803 (data, status, output_detail)
3804 }
3805 Err(err) => {
3806 let error_sink = self.error_sink.lock();
3807 match error_sink.as_ref() {
3808 Some(error_sink) => {
3809 self.context.handle_error_nolabel(
3810 error_sink,
3811 err,
3812 "Surface::get_current_texture_view",
3813 );
3814 (None, crate::SurfaceStatus::Unknown, output_detail)
3815 }
3816 None => self
3817 .context
3818 .handle_error_fatal(err, "Surface::get_current_texture_view"),
3819 }
3820 }
3821 }
3822 }
3823}
3824
3825impl Drop for CoreSurface {
3826 fn drop(&mut self) {
3827 self.context.0.surface_drop(self.id)
3828 }
3829}
3830
3831impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
3832 fn present(&self) {
3833 match self.context.0.surface_present(self.surface_id) {
3834 Ok(_status) => (),
3835 Err(err) => self.context.handle_error_fatal(err, "Surface::present"),
3836 }
3837 }
3838
3839 fn texture_discard(&self) {
3840 match self.context.0.surface_texture_discard(self.surface_id) {
3841 Ok(_status) => (),
3842 Err(err) => self
3843 .context
3844 .handle_error_fatal(err, "Surface::discard_texture"),
3845 }
3846 }
3847}
3848impl Drop for CoreSurfaceOutputDetail {
3849 fn drop(&mut self) {
3850 }
3854}
3855
3856impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
3857 fn slice(&self) -> &[u8] {
3858 panic!()
3859 }
3860
3861 #[inline]
3862 fn slice_mut(&mut self) -> &mut [u8] {
3863 self.mapping.slice_mut()
3864 }
3865}
3866impl Drop for CoreQueueWriteBuffer {
3867 fn drop(&mut self) {
3868 }
3872}
3873
3874impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
3875 #[inline]
3876 fn slice(&self) -> &[u8] {
3877 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
3878 }
3879
3880 #[inline]
3881 fn slice_mut(&mut self) -> &mut [u8] {
3882 unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.size) }
3883 }
3884
3885 #[cfg(webgpu)]
3886 fn as_uint8array(&self) -> &js_sys::Uint8Array {
3887 panic!("Only available on WebGPU")
3888 }
3889}