1use alloc::{
2 borrow::Cow::{self, Borrowed},
3 boxed::Box,
4 format,
5 string::{String, ToString as _},
6 sync::Arc,
7 vec,
8 vec::Vec,
9};
10use core::{
11 error::Error,
12 fmt,
13 future::ready,
14 ops::{Deref, Range},
15 pin::Pin,
16 ptr::NonNull,
17 slice,
18};
19use hashbrown::HashMap;
20
21use arrayvec::ArrayVec;
22use smallvec::SmallVec;
23use wgc::{
24 command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError,
25 resource::BlasPrepareCompactResult,
26};
27use wgt::{
28 error::{ErrorType, WebGpuError},
29 WasmNotSendSync,
30};
31
32use crate::{
33 api,
34 dispatch::{self, BlasCompactCallback, BufferMappedRangeInterface},
35 BindingResource, Blas, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
36 CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
37 ShaderSource, SurfaceTargetUnsafe, TextureDescriptor, Tlas, WriteOnly,
38};
39use crate::{dispatch::DispatchAdapter, util::Mutex};
40
41mod thread_id;
42
43#[derive(Clone)]
44pub struct ContextWgpuCore(Arc<wgc::global::Global>);
45
46impl Drop for ContextWgpuCore {
47 fn drop(&mut self) {
48 }
50}
51
52impl fmt::Debug for ContextWgpuCore {
53 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
54 f.debug_struct("ContextWgpuCore")
55 .field("type", &"Native")
56 .finish()
57 }
58}
59
60impl ContextWgpuCore {
61 pub unsafe fn from_hal_instance<A: hal::Api>(hal_instance: A::Instance) -> Self {
62 Self(unsafe {
63 Arc::new(wgc::global::Global::from_hal_instance::<A>(
64 "wgpu",
65 hal_instance,
66 ))
67 })
68 }
69
70 pub unsafe fn instance_as_hal<A: hal::Api>(&self) -> Option<&A::Instance> {
74 unsafe { self.0.instance_as_hal::<A>() }
75 }
76
77 pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
78 Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
79 }
80
81 #[cfg(wgpu_core)]
82 pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
83 self.0.enumerate_adapters(backends)
84 }
85
86 pub unsafe fn create_adapter_from_hal<A: hal::Api>(
87 &self,
88 hal_adapter: hal::ExposedAdapter<A>,
89 ) -> wgc::id::AdapterId {
90 unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
91 }
92
93 pub unsafe fn adapter_as_hal<A: hal::Api>(
94 &self,
95 adapter: &CoreAdapter,
96 ) -> Option<impl Deref<Target = A::Adapter> + WasmNotSendSync> {
97 unsafe { self.0.adapter_as_hal::<A>(adapter.id) }
98 }
99
100 pub unsafe fn buffer_as_hal<A: hal::Api>(
101 &self,
102 buffer: &CoreBuffer,
103 ) -> Option<impl Deref<Target = A::Buffer>> {
104 unsafe { self.0.buffer_as_hal::<A>(buffer.id) }
105 }
106
107 pub unsafe fn create_device_from_hal<A: hal::Api>(
108 &self,
109 adapter: &CoreAdapter,
110 hal_device: hal::OpenDevice<A>,
111 desc: &crate::DeviceDescriptor<'_>,
112 ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
113 let (device_id, queue_id) = unsafe {
114 self.0.create_device_from_hal(
115 adapter.id,
116 hal_device.into(),
117 &desc.map_label(|l| l.map(Borrowed)),
118 None,
119 None,
120 )
121 }?;
122 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
123 let device = CoreDevice {
124 context: self.clone(),
125 id: device_id,
126 error_sink: error_sink.clone(),
127 features: desc.required_features,
128 };
129 let queue = CoreQueue {
130 context: self.clone(),
131 id: queue_id,
132 error_sink,
133 };
134 Ok((device, queue))
135 }
136
137 pub unsafe fn create_texture_from_hal<A: hal::Api>(
138 &self,
139 hal_texture: A::Texture,
140 device: &CoreDevice,
141 desc: &TextureDescriptor<'_>,
142 ) -> CoreTexture {
143 let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
144 let (id, error) = unsafe {
145 self.0
146 .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
147 };
148 if let Some(cause) = error {
149 self.handle_error(
150 &device.error_sink,
151 cause,
152 desc.label,
153 "Device::create_texture_from_hal",
154 );
155 }
156 CoreTexture {
157 context: self.clone(),
158 id,
159 error_sink: Arc::clone(&device.error_sink),
160 }
161 }
162
163 pub unsafe fn create_buffer_from_hal<A: hal::Api>(
170 &self,
171 hal_buffer: A::Buffer,
172 device: &CoreDevice,
173 desc: &BufferDescriptor<'_>,
174 ) -> CoreBuffer {
175 let (id, error) = unsafe {
176 self.0.create_buffer_from_hal::<A>(
177 hal_buffer,
178 device.id,
179 &desc.map_label(|l| l.map(Borrowed)),
180 None,
181 )
182 };
183 if let Some(cause) = error {
184 self.handle_error(
185 &device.error_sink,
186 cause,
187 desc.label,
188 "Device::create_buffer_from_hal",
189 );
190 }
191 CoreBuffer {
192 context: self.clone(),
193 id,
194 error_sink: Arc::clone(&device.error_sink),
195 }
196 }
197
198 pub unsafe fn device_as_hal<A: hal::Api>(
199 &self,
200 device: &CoreDevice,
201 ) -> Option<impl Deref<Target = A::Device>> {
202 unsafe { self.0.device_as_hal::<A>(device.id) }
203 }
204
205 pub unsafe fn surface_as_hal<A: hal::Api>(
206 &self,
207 surface: &CoreSurface,
208 ) -> Option<impl Deref<Target = A::Surface>> {
209 unsafe { self.0.surface_as_hal::<A>(surface.id) }
210 }
211
212 pub unsafe fn texture_as_hal<A: hal::Api>(
213 &self,
214 texture: &CoreTexture,
215 ) -> Option<impl Deref<Target = A::Texture>> {
216 unsafe { self.0.texture_as_hal::<A>(texture.id) }
217 }
218
219 pub unsafe fn texture_view_as_hal<A: hal::Api>(
220 &self,
221 texture_view: &CoreTextureView,
222 ) -> Option<impl Deref<Target = A::TextureView>> {
223 unsafe { self.0.texture_view_as_hal::<A>(texture_view.id) }
224 }
225
226 pub unsafe fn command_encoder_as_hal_mut<
228 A: hal::Api,
229 F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
230 R,
231 >(
232 &self,
233 command_encoder: &CoreCommandEncoder,
234 hal_command_encoder_callback: F,
235 ) -> R {
236 unsafe {
237 self.0.command_encoder_as_hal_mut::<A, F, R>(
238 command_encoder.id,
239 hal_command_encoder_callback,
240 )
241 }
242 }
243
244 pub unsafe fn blas_as_hal<A: hal::Api>(
245 &self,
246 blas: &CoreBlas,
247 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
248 unsafe { self.0.blas_as_hal::<A>(blas.id) }
249 }
250
251 pub unsafe fn tlas_as_hal<A: hal::Api>(
252 &self,
253 tlas: &CoreTlas,
254 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
255 unsafe { self.0.tlas_as_hal::<A>(tlas.id) }
256 }
257
258 pub fn generate_report(&self) -> wgc::global::GlobalReport {
259 self.0.generate_report()
260 }
261
262 #[cold]
263 #[track_caller]
264 #[inline(never)]
265 fn handle_error_inner(
266 &self,
267 sink_mutex: &Mutex<ErrorSinkRaw>,
268 error_type: ErrorType,
269 source: ContextErrorSource,
270 label: Label<'_>,
271 fn_ident: &'static str,
272 ) {
273 let source: ErrorSource = Box::new(wgc::error::ContextError {
274 fn_ident,
275 source,
276 label: label.unwrap_or_default().to_string(),
277 });
278 let final_error_handling = {
279 let mut sink = sink_mutex.lock();
280 let description = || self.format_error(&*source);
281 let error = match error_type {
282 ErrorType::Internal => {
283 let description = description();
284 crate::Error::Internal {
285 source,
286 description,
287 }
288 }
289 ErrorType::OutOfMemory => crate::Error::OutOfMemory { source },
290 ErrorType::Validation => {
291 let description = description();
292 crate::Error::Validation {
293 source,
294 description,
295 }
296 }
297 ErrorType::DeviceLost => return, };
299 sink.handle_error_or_return_handler(error)
300 };
301
302 if let Some(f) = final_error_handling {
303 f();
307 }
308 }
309
310 #[inline]
311 #[track_caller]
312 fn handle_error(
313 &self,
314 sink_mutex: &Mutex<ErrorSinkRaw>,
315 source: impl WebGpuError + WasmNotSendSync + 'static,
316 label: Label<'_>,
317 fn_ident: &'static str,
318 ) {
319 let error_type = source.webgpu_error_type();
320 self.handle_error_inner(sink_mutex, error_type, Box::new(source), label, fn_ident)
321 }
322
323 #[inline]
324 #[track_caller]
325 fn handle_error_nolabel(
326 &self,
327 sink_mutex: &Mutex<ErrorSinkRaw>,
328 source: impl WebGpuError + WasmNotSendSync + 'static,
329 fn_ident: &'static str,
330 ) {
331 let error_type = source.webgpu_error_type();
332 self.handle_error_inner(sink_mutex, error_type, Box::new(source), None, fn_ident)
333 }
334
335 #[track_caller]
336 #[cold]
337 fn handle_error_fatal(
338 &self,
339 cause: impl Error + WasmNotSendSync + 'static,
340 operation: &'static str,
341 ) -> ! {
342 panic!("Error in {operation}: {f}", f = self.format_error(&cause));
343 }
344
345 #[inline(never)]
346 fn format_error(&self, err: &(dyn Error + 'static)) -> String {
347 let mut output = String::new();
348 let mut level = 1;
349
350 fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
351 let mut print = |e: &(dyn Error + 'static)| {
352 use core::fmt::Write;
353 writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
354
355 if let Some(e) = e.source() {
356 *level += 1;
357 print_tree(output, level, e);
358 *level -= 1;
359 }
360 };
361 if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
362 for e in multi.errors() {
363 print(e);
364 }
365 } else {
366 print(e);
367 }
368 }
369
370 print_tree(&mut output, &mut level, err);
371
372 format!("Validation Error\n\nCaused by:\n{output}")
373 }
374
375 pub unsafe fn queue_as_hal<A: hal::Api>(
376 &self,
377 queue: &CoreQueue,
378 ) -> Option<impl Deref<Target = A::Queue> + WasmNotSendSync> {
379 unsafe { self.0.queue_as_hal::<A>(queue.id) }
380 }
381}
382
383fn map_buffer_copy_view(
384 view: crate::TexelCopyBufferInfo<'_>,
385) -> wgt::TexelCopyBufferInfo<wgc::id::BufferId> {
386 wgt::TexelCopyBufferInfo {
387 buffer: view.buffer.inner.as_core().id,
388 layout: view.layout,
389 }
390}
391
392fn map_texture_copy_view(
393 view: crate::TexelCopyTextureInfo<'_>,
394) -> wgt::TexelCopyTextureInfo<wgc::id::TextureId> {
395 wgt::TexelCopyTextureInfo {
396 texture: view.texture.inner.as_core().id,
397 mip_level: view.mip_level,
398 origin: view.origin,
399 aspect: view.aspect,
400 }
401}
402
403#[cfg_attr(not(webgl), expect(unused))]
404fn map_texture_tagged_copy_view(
405 view: crate::CopyExternalImageDestInfo<&api::Texture>,
406) -> wgt::CopyExternalImageDestInfo<wgc::id::TextureId> {
407 wgt::CopyExternalImageDestInfo {
408 texture: view.texture.inner.as_core().id,
409 mip_level: view.mip_level,
410 origin: view.origin,
411 aspect: view.aspect,
412 color_space: view.color_space,
413 premultiplied_alpha: view.premultiplied_alpha,
414 }
415}
416
417fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
418 match *load {
419 LoadOp::Clear(clear_value) => LoadOp::Clear(Some(clear_value)),
420 LoadOp::DontCare(token) => LoadOp::DontCare(token),
421 LoadOp::Load => LoadOp::Load,
422 }
423}
424
425fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
426 match ops {
427 Some(&Operations { load, store }) => wgc::command::PassChannel {
428 load_op: Some(map_load_op(&load)),
429 store_op: Some(store),
430 read_only: false,
431 },
432 None => wgc::command::PassChannel {
433 load_op: None,
434 store_op: None,
435 read_only: true,
436 },
437 }
438}
439
440#[derive(Debug)]
441pub struct CoreSurface {
442 pub(crate) context: ContextWgpuCore,
443 id: wgc::id::SurfaceId,
444 configured_device: Mutex<Option<wgc::id::DeviceId>>,
447 error_sink: Mutex<Option<ErrorSink>>,
450}
451
452#[derive(Debug)]
453pub struct CoreAdapter {
454 pub(crate) context: ContextWgpuCore,
455 pub(crate) id: wgc::id::AdapterId,
456}
457
458#[derive(Debug)]
459pub struct CoreDevice {
460 pub(crate) context: ContextWgpuCore,
461 id: wgc::id::DeviceId,
462 error_sink: ErrorSink,
463 features: Features,
464}
465
466#[derive(Debug)]
467pub struct CoreBuffer {
468 pub(crate) context: ContextWgpuCore,
469 id: wgc::id::BufferId,
470 error_sink: ErrorSink,
471}
472
473#[derive(Debug)]
474pub struct CoreShaderModule {
475 pub(crate) context: ContextWgpuCore,
476 id: wgc::id::ShaderModuleId,
477 compilation_info: CompilationInfo,
478}
479
480#[derive(Debug)]
481pub struct CoreBindGroupLayout {
482 pub(crate) context: ContextWgpuCore,
483 id: wgc::id::BindGroupLayoutId,
484}
485
486#[derive(Debug)]
487pub struct CoreBindGroup {
488 pub(crate) context: ContextWgpuCore,
489 id: wgc::id::BindGroupId,
490}
491
492#[derive(Debug)]
493pub struct CoreTexture {
494 pub(crate) context: ContextWgpuCore,
495 id: wgc::id::TextureId,
496 error_sink: ErrorSink,
497}
498
499#[derive(Debug)]
500pub struct CoreTextureView {
501 pub(crate) context: ContextWgpuCore,
502 id: wgc::id::TextureViewId,
503}
504
505#[derive(Debug)]
506pub struct CoreExternalTexture {
507 pub(crate) context: ContextWgpuCore,
508 id: wgc::id::ExternalTextureId,
509}
510
511#[derive(Debug)]
512pub struct CoreSampler {
513 pub(crate) context: ContextWgpuCore,
514 id: wgc::id::SamplerId,
515}
516
517#[derive(Debug)]
518pub struct CoreQuerySet {
519 pub(crate) context: ContextWgpuCore,
520 id: wgc::id::QuerySetId,
521}
522
523#[derive(Debug)]
524pub struct CorePipelineLayout {
525 pub(crate) context: ContextWgpuCore,
526 id: wgc::id::PipelineLayoutId,
527}
528
529#[derive(Debug)]
530pub struct CorePipelineCache {
531 pub(crate) context: ContextWgpuCore,
532 id: wgc::id::PipelineCacheId,
533}
534
535#[derive(Debug)]
536pub struct CoreCommandBuffer {
537 pub(crate) context: ContextWgpuCore,
538 id: wgc::id::CommandBufferId,
539}
540
541#[derive(Debug)]
542pub struct CoreRenderBundleEncoder {
543 pub(crate) context: ContextWgpuCore,
544 encoder: wgc::command::RenderBundleEncoder,
545 id: crate::cmp::Identifier,
546}
547
548#[derive(Debug)]
549pub struct CoreRenderBundle {
550 context: ContextWgpuCore,
551 id: wgc::id::RenderBundleId,
552}
553
554#[derive(Debug)]
555pub struct CoreQueue {
556 pub(crate) context: ContextWgpuCore,
557 id: wgc::id::QueueId,
558 error_sink: ErrorSink,
559}
560
561#[derive(Debug)]
562pub struct CoreComputePipeline {
563 pub(crate) context: ContextWgpuCore,
564 id: wgc::id::ComputePipelineId,
565 error_sink: ErrorSink,
566}
567
568#[derive(Debug)]
569pub struct CoreRenderPipeline {
570 pub(crate) context: ContextWgpuCore,
571 id: wgc::id::RenderPipelineId,
572 error_sink: ErrorSink,
573}
574
575#[derive(Debug)]
576pub struct CoreComputePass {
577 pub(crate) context: ContextWgpuCore,
578 pass: wgc::command::ComputePass,
579 error_sink: ErrorSink,
580 id: crate::cmp::Identifier,
581}
582
583#[derive(Debug)]
584pub struct CoreRenderPass {
585 pub(crate) context: ContextWgpuCore,
586 pass: wgc::command::RenderPass,
587 error_sink: ErrorSink,
588 id: crate::cmp::Identifier,
589}
590
591#[derive(Debug)]
592pub struct CoreCommandEncoder {
593 pub(crate) context: ContextWgpuCore,
594 id: wgc::id::CommandEncoderId,
595 error_sink: ErrorSink,
596}
597
598#[derive(Debug)]
599pub struct CoreBlas {
600 pub(crate) context: ContextWgpuCore,
601 id: wgc::id::BlasId,
602 error_sink: ErrorSink,
603}
604
605#[derive(Debug)]
606pub struct CoreTlas {
607 pub(crate) context: ContextWgpuCore,
608 id: wgc::id::TlasId,
609 }
611
612#[derive(Debug)]
613pub struct CoreSurfaceOutputDetail {
614 context: ContextWgpuCore,
615 surface_id: wgc::id::SurfaceId,
616 error_sink: ErrorSink,
617}
618
619type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
620
621struct ErrorScope {
622 error: Option<crate::Error>,
623 filter: crate::ErrorFilter,
624}
625
626struct ErrorSinkRaw {
627 scopes: HashMap<thread_id::ThreadId, Vec<ErrorScope>>,
628 uncaptured_handler: Option<Arc<dyn crate::UncapturedErrorHandler>>,
629}
630
631impl ErrorSinkRaw {
632 fn new() -> ErrorSinkRaw {
633 ErrorSinkRaw {
634 scopes: HashMap::new(),
635 uncaptured_handler: None,
636 }
637 }
638
639 #[track_caller]
649 #[must_use]
650 fn handle_error_or_return_handler(&mut self, err: crate::Error) -> Option<impl FnOnce()> {
651 let filter = match err {
652 crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
653 crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
654 crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
655 };
656 let thread_id = thread_id::ThreadId::current();
657 let scopes = self.scopes.entry(thread_id).or_default();
658 match scopes.iter_mut().rev().find(|scope| scope.filter == filter) {
659 Some(scope) => {
660 if scope.error.is_none() {
661 scope.error = Some(err);
662 }
663 None
664 }
665 None => {
666 if let Some(custom_handler) = &self.uncaptured_handler {
667 let custom_handler = Arc::clone(custom_handler);
668 Some(move || (custom_handler)(err))
669 } else {
670 default_error_handler(err)
672 }
673 }
674 }
675 }
676}
677
678impl fmt::Debug for ErrorSinkRaw {
679 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
680 write!(f, "ErrorSink")
681 }
682}
683
684#[track_caller]
685fn default_error_handler(err: crate::Error) -> ! {
686 log::error!("Handling wgpu errors as fatal by default");
687 panic!("wgpu error: {err}\n");
688}
689
690impl From<CreateShaderModuleError> for CompilationInfo {
691 fn from(value: CreateShaderModuleError) -> Self {
692 match value {
693 #[cfg(feature = "wgsl")]
694 CreateShaderModuleError::Parsing(v) => v.into(),
695 #[cfg(feature = "glsl")]
696 CreateShaderModuleError::ParsingGlsl(v) => v.into(),
697 #[cfg(feature = "spirv")]
698 CreateShaderModuleError::ParsingSpirV(v) => v.into(),
699 CreateShaderModuleError::Validation(v) => v.into(),
700 CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
703 CompilationInfo {
704 messages: Vec::new(),
705 }
706 }
707 _ => CompilationInfo {
709 messages: vec![CompilationMessage {
710 message: value.to_string(),
711 message_type: CompilationMessageType::Error,
712 location: None,
713 }],
714 },
715 }
716 }
717}
718
719#[derive(Debug)]
720pub struct CoreQueueWriteBuffer {
721 buffer_id: wgc::id::StagingBufferId,
722 mapping: CoreBufferMappedRange,
723}
724
725#[derive(Debug)]
726pub struct CoreBufferMappedRange {
727 ptr: NonNull<u8>,
728 size: usize,
729}
730
731#[cfg(send_sync)]
732unsafe impl Send for CoreBufferMappedRange {}
733#[cfg(send_sync)]
734unsafe impl Sync for CoreBufferMappedRange {}
735
736impl Drop for CoreBufferMappedRange {
737 fn drop(&mut self) {
738 }
741}
742
743crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
744crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
745crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
746crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
747crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
748crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
749crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
750crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
751crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
752crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
753crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
754crate::cmp::impl_eq_ord_hash_proxy!(CoreExternalTexture => .id);
755crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
756crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
757crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
758crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
759crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
760crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
761crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
762crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
763crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
764crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
765crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
766crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
767crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
768crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
769crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
770crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
771crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
772
773impl dispatch::InstanceInterface for ContextWgpuCore {
774 fn new(desc: wgt::InstanceDescriptor) -> Self
775 where
776 Self: Sized,
777 {
778 Self(Arc::new(wgc::global::Global::new("wgpu", desc, None)))
779 }
780
781 unsafe fn create_surface(
782 &self,
783 target: crate::api::SurfaceTargetUnsafe,
784 ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
785 let id = match target {
786 SurfaceTargetUnsafe::RawHandle {
787 raw_display_handle,
788 raw_window_handle,
789 } => unsafe {
790 self.0
791 .instance_create_surface(raw_display_handle, raw_window_handle, None)
792 },
793
794 #[cfg(all(
795 unix,
796 not(target_vendor = "apple"),
797 not(target_family = "wasm"),
798 not(target_os = "netbsd")
799 ))]
800 SurfaceTargetUnsafe::Drm {
801 fd,
802 plane,
803 connector_id,
804 width,
805 height,
806 refresh_rate,
807 } => unsafe {
808 self.0.instance_create_surface_from_drm(
809 fd,
810 plane,
811 connector_id,
812 width,
813 height,
814 refresh_rate,
815 None,
816 )
817 },
818
819 #[cfg(metal)]
820 SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
821 self.0.instance_create_surface_metal(layer, None)
822 },
823
824 #[cfg(target_os = "netbsd")]
825 SurfaceTargetUnsafe::Drm { .. } => Err(
826 wgc::instance::CreateSurfaceError::BackendNotEnabled(wgt::Backend::Vulkan),
827 ),
828
829 #[cfg(dx12)]
830 SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
831 self.0.instance_create_surface_from_visual(visual, None)
832 },
833
834 #[cfg(dx12)]
835 SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
836 self.0
837 .instance_create_surface_from_surface_handle(surface_handle, None)
838 },
839
840 #[cfg(dx12)]
841 SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
842 self.0
843 .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
844 },
845 }?;
846
847 Ok(CoreSurface {
848 context: self.clone(),
849 id,
850 configured_device: Mutex::default(),
851 error_sink: Mutex::default(),
852 }
853 .into())
854 }
855
856 fn request_adapter(
857 &self,
858 options: &crate::api::RequestAdapterOptions<'_, '_>,
859 ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
860 let id = self.0.request_adapter(
861 &wgc::instance::RequestAdapterOptions {
862 power_preference: options.power_preference,
863 force_fallback_adapter: options.force_fallback_adapter,
864 compatible_surface: options
865 .compatible_surface
866 .map(|surface| surface.inner.as_core().id),
867 },
868 wgt::Backends::all(),
869 None,
870 );
871 let adapter = id.map(|id| {
872 let core = CoreAdapter {
873 context: self.clone(),
874 id,
875 };
876 let generic: dispatch::DispatchAdapter = core.into();
877 generic
878 });
879 Box::pin(ready(adapter))
880 }
881
882 fn poll_all_devices(&self, force_wait: bool) -> bool {
883 match self.0.poll_all_devices(force_wait) {
884 Ok(all_queue_empty) => all_queue_empty,
885 Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
886 }
887 }
888
889 #[cfg(feature = "wgsl")]
890 fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
891 use wgc::naga::front::wgsl::ImplementedLanguageExtension;
892 ImplementedLanguageExtension::all().iter().copied().fold(
893 crate::WgslLanguageFeatures::empty(),
894 |acc, wle| {
895 acc | match wle {
896 ImplementedLanguageExtension::ReadOnlyAndReadWriteStorageTextures => {
897 crate::WgslLanguageFeatures::ReadOnlyAndReadWriteStorageTextures
898 }
899 ImplementedLanguageExtension::Packed4x8IntegerDotProduct => {
900 crate::WgslLanguageFeatures::Packed4x8IntegerDotProduct
901 }
902 ImplementedLanguageExtension::PointerCompositeAccess => {
903 crate::WgslLanguageFeatures::PointerCompositeAccess
904 }
905 }
906 },
907 )
908 }
909
910 fn enumerate_adapters(
911 &self,
912 backends: crate::Backends,
913 ) -> Pin<Box<dyn dispatch::EnumerateAdapterFuture>> {
914 let adapters: Vec<DispatchAdapter> = self
915 .enumerate_adapters(backends)
916 .into_iter()
917 .map(|adapter| {
918 let core = crate::backend::wgpu_core::CoreAdapter {
919 context: self.clone(),
920 id: adapter,
921 };
922 core.into()
923 })
924 .collect();
925 Box::pin(ready(adapters))
926 }
927}
928
929impl dispatch::AdapterInterface for CoreAdapter {
930 fn request_device(
931 &self,
932 desc: &crate::DeviceDescriptor<'_>,
933 ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
934 let res = self.context.0.adapter_request_device(
935 self.id,
936 &desc.map_label(|l| l.map(Borrowed)),
937 None,
938 None,
939 );
940 let (device_id, queue_id) = match res {
941 Ok(ids) => ids,
942 Err(err) => {
943 return Box::pin(ready(Err(err.into())));
944 }
945 };
946 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
947 let device = CoreDevice {
948 context: self.context.clone(),
949 id: device_id,
950 error_sink: error_sink.clone(),
951 features: desc.required_features,
952 };
953 let queue = CoreQueue {
954 context: self.context.clone(),
955 id: queue_id,
956 error_sink,
957 };
958 Box::pin(ready(Ok((device.into(), queue.into()))))
959 }
960
961 fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
962 let surface = surface.as_core();
963
964 self.context
965 .0
966 .adapter_is_surface_supported(self.id, surface.id)
967 }
968
969 fn features(&self) -> crate::Features {
970 self.context.0.adapter_features(self.id)
971 }
972
973 fn limits(&self) -> crate::Limits {
974 self.context.0.adapter_limits(self.id)
975 }
976
977 fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
978 self.context.0.adapter_downlevel_capabilities(self.id)
979 }
980
981 fn get_info(&self) -> crate::AdapterInfo {
982 self.context.0.adapter_get_info(self.id)
983 }
984
985 fn get_texture_format_features(
986 &self,
987 format: crate::TextureFormat,
988 ) -> crate::TextureFormatFeatures {
989 self.context
990 .0
991 .adapter_get_texture_format_features(self.id, format)
992 }
993
994 fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
995 self.context.0.adapter_get_presentation_timestamp(self.id)
996 }
997
998 fn cooperative_matrix_properties(&self) -> Vec<crate::wgt::CooperativeMatrixProperties> {
999 self.context
1000 .0
1001 .adapter_cooperative_matrix_properties(self.id)
1002 }
1003}
1004
1005impl Drop for CoreAdapter {
1006 fn drop(&mut self) {
1007 self.context.0.adapter_drop(self.id)
1008 }
1009}
1010
1011impl dispatch::DeviceInterface for CoreDevice {
1012 fn features(&self) -> crate::Features {
1013 self.context.0.device_features(self.id)
1014 }
1015
1016 fn limits(&self) -> crate::Limits {
1017 self.context.0.device_limits(self.id)
1018 }
1019
1020 fn adapter_info(&self) -> crate::AdapterInfo {
1021 self.context.0.device_adapter_info(self.id)
1022 }
1023
1024 #[cfg_attr(
1026 not(any(
1027 feature = "spirv",
1028 feature = "glsl",
1029 feature = "wgsl",
1030 feature = "naga-ir"
1031 )),
1032 expect(unused)
1033 )]
1034 fn create_shader_module(
1035 &self,
1036 desc: crate::ShaderModuleDescriptor<'_>,
1037 shader_bound_checks: wgt::ShaderRuntimeChecks,
1038 ) -> dispatch::DispatchShaderModule {
1039 let descriptor = wgc::pipeline::ShaderModuleDescriptor {
1040 label: desc.label.map(Borrowed),
1041 runtime_checks: shader_bound_checks,
1042 };
1043 let source = match desc.source {
1044 #[cfg(feature = "spirv")]
1045 ShaderSource::SpirV(ref spv) => {
1046 let options = naga::front::spv::Options {
1048 adjust_coordinate_space: false, strict_capabilities: true,
1050 block_ctx_dump_prefix: None,
1051 };
1052 wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
1053 }
1054 #[cfg(feature = "glsl")]
1055 ShaderSource::Glsl {
1056 ref shader,
1057 stage,
1058 defines,
1059 } => {
1060 let options = naga::front::glsl::Options {
1061 stage,
1062 defines: defines
1063 .iter()
1064 .map(|&(key, value)| (String::from(key), String::from(value)))
1065 .collect(),
1066 };
1067 wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
1068 }
1069 #[cfg(feature = "wgsl")]
1070 ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
1071 #[cfg(feature = "naga-ir")]
1072 ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1073 ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1074 };
1075 let (id, error) =
1076 self.context
1077 .0
1078 .device_create_shader_module(self.id, &descriptor, source, None);
1079 let compilation_info = match error {
1080 Some(cause) => {
1081 self.context.handle_error(
1082 &self.error_sink,
1083 cause.clone(),
1084 desc.label,
1085 "Device::create_shader_module",
1086 );
1087 CompilationInfo::from(cause)
1088 }
1089 None => CompilationInfo { messages: vec![] },
1090 };
1091
1092 CoreShaderModule {
1093 context: self.context.clone(),
1094 id,
1095 compilation_info,
1096 }
1097 .into()
1098 }
1099
1100 unsafe fn create_shader_module_passthrough(
1101 &self,
1102 desc: &crate::ShaderModuleDescriptorPassthrough<'_>,
1103 ) -> dispatch::DispatchShaderModule {
1104 let desc = desc.map_label(|l| l.map(Cow::from));
1105 let (id, error) = unsafe {
1106 self.context
1107 .0
1108 .device_create_shader_module_passthrough(self.id, &desc, None)
1109 };
1110
1111 let compilation_info = match error {
1112 Some(cause) => {
1113 self.context.handle_error(
1114 &self.error_sink,
1115 cause.clone(),
1116 desc.label.as_deref(),
1117 "Device::create_shader_module_passthrough",
1118 );
1119 CompilationInfo::from(cause)
1120 }
1121 None => CompilationInfo { messages: vec![] },
1122 };
1123
1124 CoreShaderModule {
1125 context: self.context.clone(),
1126 id,
1127 compilation_info,
1128 }
1129 .into()
1130 }
1131
1132 fn create_bind_group_layout(
1133 &self,
1134 desc: &crate::BindGroupLayoutDescriptor<'_>,
1135 ) -> dispatch::DispatchBindGroupLayout {
1136 let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1137 label: desc.label.map(Borrowed),
1138 entries: Borrowed(desc.entries),
1139 };
1140 let (id, error) =
1141 self.context
1142 .0
1143 .device_create_bind_group_layout(self.id, &descriptor, None);
1144 if let Some(cause) = error {
1145 self.context.handle_error(
1146 &self.error_sink,
1147 cause,
1148 desc.label,
1149 "Device::create_bind_group_layout",
1150 );
1151 }
1152 CoreBindGroupLayout {
1153 context: self.context.clone(),
1154 id,
1155 }
1156 .into()
1157 }
1158
1159 fn create_bind_group(
1160 &self,
1161 desc: &crate::BindGroupDescriptor<'_>,
1162 ) -> dispatch::DispatchBindGroup {
1163 use wgc::binding_model as bm;
1164
1165 let mut arrayed_texture_views = Vec::new();
1166 let mut arrayed_samplers = Vec::new();
1167 if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1168 for entry in desc.entries.iter() {
1170 if let BindingResource::TextureViewArray(array) = entry.resource {
1171 arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1172 }
1173 if let BindingResource::SamplerArray(array) = entry.resource {
1174 arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1175 }
1176 }
1177 }
1178 let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1179 let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1180
1181 let mut arrayed_buffer_bindings = Vec::new();
1182 if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1183 for entry in desc.entries.iter() {
1185 if let BindingResource::BufferArray(array) = entry.resource {
1186 arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1187 buffer: binding.buffer.inner.as_core().id,
1188 offset: binding.offset,
1189 size: binding.size.map(wgt::BufferSize::get),
1190 }));
1191 }
1192 }
1193 }
1194 let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1195
1196 let mut arrayed_acceleration_structures = Vec::new();
1197 if self
1198 .features
1199 .contains(Features::ACCELERATION_STRUCTURE_BINDING_ARRAY)
1200 {
1201 for entry in desc.entries.iter() {
1203 if let BindingResource::AccelerationStructureArray(array) = entry.resource {
1204 arrayed_acceleration_structures
1205 .extend(array.iter().map(|tlas| tlas.inner.as_core().id));
1206 }
1207 }
1208 }
1209 let mut remaining_arrayed_acceleration_structures = &arrayed_acceleration_structures[..];
1210
1211 let entries = desc
1212 .entries
1213 .iter()
1214 .map(|entry| bm::BindGroupEntry {
1215 binding: entry.binding,
1216 resource: match entry.resource {
1217 BindingResource::Buffer(BufferBinding {
1218 buffer,
1219 offset,
1220 size,
1221 }) => bm::BindingResource::Buffer(bm::BufferBinding {
1222 buffer: buffer.inner.as_core().id,
1223 offset,
1224 size: size.map(wgt::BufferSize::get),
1225 }),
1226 BindingResource::BufferArray(array) => {
1227 let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1228 remaining_arrayed_buffer_bindings =
1229 &remaining_arrayed_buffer_bindings[array.len()..];
1230 bm::BindingResource::BufferArray(Borrowed(slice))
1231 }
1232 BindingResource::Sampler(sampler) => {
1233 bm::BindingResource::Sampler(sampler.inner.as_core().id)
1234 }
1235 BindingResource::SamplerArray(array) => {
1236 let slice = &remaining_arrayed_samplers[..array.len()];
1237 remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1238 bm::BindingResource::SamplerArray(Borrowed(slice))
1239 }
1240 BindingResource::TextureView(texture_view) => {
1241 bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1242 }
1243 BindingResource::TextureViewArray(array) => {
1244 let slice = &remaining_arrayed_texture_views[..array.len()];
1245 remaining_arrayed_texture_views =
1246 &remaining_arrayed_texture_views[array.len()..];
1247 bm::BindingResource::TextureViewArray(Borrowed(slice))
1248 }
1249 BindingResource::AccelerationStructure(acceleration_structure) => {
1250 bm::BindingResource::AccelerationStructure(
1251 acceleration_structure.inner.as_core().id,
1252 )
1253 }
1254 BindingResource::AccelerationStructureArray(array) => {
1255 let slice = &remaining_arrayed_acceleration_structures[..array.len()];
1256 remaining_arrayed_acceleration_structures =
1257 &remaining_arrayed_acceleration_structures[array.len()..];
1258 bm::BindingResource::AccelerationStructureArray(Borrowed(slice))
1259 }
1260 BindingResource::ExternalTexture(external_texture) => {
1261 bm::BindingResource::ExternalTexture(external_texture.inner.as_core().id)
1262 }
1263 },
1264 })
1265 .collect::<Vec<_>>();
1266 let descriptor = bm::BindGroupDescriptor {
1267 label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1268 layout: desc.layout.inner.as_core().id,
1269 entries: Borrowed(&entries),
1270 };
1271
1272 let (id, error) = self
1273 .context
1274 .0
1275 .device_create_bind_group(self.id, &descriptor, None);
1276 if let Some(cause) = error {
1277 self.context.handle_error(
1278 &self.error_sink,
1279 cause,
1280 desc.label,
1281 "Device::create_bind_group",
1282 );
1283 }
1284 CoreBindGroup {
1285 context: self.context.clone(),
1286 id,
1287 }
1288 .into()
1289 }
1290
1291 fn create_pipeline_layout(
1292 &self,
1293 desc: &crate::PipelineLayoutDescriptor<'_>,
1294 ) -> dispatch::DispatchPipelineLayout {
1295 assert!(
1298 desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1299 "Bind group layout count {} exceeds device bind group limit {}",
1300 desc.bind_group_layouts.len(),
1301 wgc::MAX_BIND_GROUPS
1302 );
1303
1304 let temp_layouts = desc
1305 .bind_group_layouts
1306 .iter()
1307 .map(|bgl| bgl.map(|bgl| bgl.inner.as_core().id))
1308 .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1309 let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1310 label: desc.label.map(Borrowed),
1311 bind_group_layouts: Borrowed(&temp_layouts),
1312 immediate_size: desc.immediate_size,
1313 };
1314
1315 let (id, error) = self
1316 .context
1317 .0
1318 .device_create_pipeline_layout(self.id, &descriptor, None);
1319 if let Some(cause) = error {
1320 self.context.handle_error(
1321 &self.error_sink,
1322 cause,
1323 desc.label,
1324 "Device::create_pipeline_layout",
1325 );
1326 }
1327 CorePipelineLayout {
1328 context: self.context.clone(),
1329 id,
1330 }
1331 .into()
1332 }
1333
1334 fn create_render_pipeline(
1335 &self,
1336 desc: &crate::RenderPipelineDescriptor<'_>,
1337 ) -> dispatch::DispatchRenderPipeline {
1338 use wgc::pipeline as pipe;
1339
1340 let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1341 .vertex
1342 .buffers
1343 .iter()
1344 .map(|vbuf| pipe::VertexBufferLayout {
1345 array_stride: vbuf.array_stride,
1346 step_mode: vbuf.step_mode,
1347 attributes: Borrowed(vbuf.attributes),
1348 })
1349 .collect();
1350
1351 let vert_constants = desc
1352 .vertex
1353 .compilation_options
1354 .constants
1355 .iter()
1356 .map(|&(key, value)| (String::from(key), value))
1357 .collect();
1358
1359 let descriptor = pipe::RenderPipelineDescriptor {
1360 label: desc.label.map(Borrowed),
1361 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1362 vertex: pipe::VertexState {
1363 stage: pipe::ProgrammableStageDescriptor {
1364 module: desc.vertex.module.inner.as_core().id,
1365 entry_point: desc.vertex.entry_point.map(Borrowed),
1366 constants: vert_constants,
1367 zero_initialize_workgroup_memory: desc
1368 .vertex
1369 .compilation_options
1370 .zero_initialize_workgroup_memory,
1371 },
1372 buffers: Borrowed(&vertex_buffers),
1373 },
1374 primitive: desc.primitive,
1375 depth_stencil: desc.depth_stencil.clone(),
1376 multisample: desc.multisample,
1377 fragment: desc.fragment.as_ref().map(|frag| {
1378 let frag_constants = frag
1379 .compilation_options
1380 .constants
1381 .iter()
1382 .map(|&(key, value)| (String::from(key), value))
1383 .collect();
1384 pipe::FragmentState {
1385 stage: pipe::ProgrammableStageDescriptor {
1386 module: frag.module.inner.as_core().id,
1387 entry_point: frag.entry_point.map(Borrowed),
1388 constants: frag_constants,
1389 zero_initialize_workgroup_memory: frag
1390 .compilation_options
1391 .zero_initialize_workgroup_memory,
1392 },
1393 targets: Borrowed(frag.targets),
1394 }
1395 }),
1396 multiview_mask: desc.multiview_mask,
1397 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1398 };
1399
1400 let (id, error) = self
1401 .context
1402 .0
1403 .device_create_render_pipeline(self.id, &descriptor, None);
1404 if let Some(cause) = error {
1405 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1406 log::error!("Shader translation error for stage {stage:?}: {error}");
1407 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1408 }
1409 self.context.handle_error(
1410 &self.error_sink,
1411 cause,
1412 desc.label,
1413 "Device::create_render_pipeline",
1414 );
1415 }
1416 CoreRenderPipeline {
1417 context: self.context.clone(),
1418 id,
1419 error_sink: Arc::clone(&self.error_sink),
1420 }
1421 .into()
1422 }
1423
1424 fn create_mesh_pipeline(
1425 &self,
1426 desc: &crate::MeshPipelineDescriptor<'_>,
1427 ) -> dispatch::DispatchRenderPipeline {
1428 use wgc::pipeline as pipe;
1429
1430 let mesh_constants = desc
1431 .mesh
1432 .compilation_options
1433 .constants
1434 .iter()
1435 .map(|&(key, value)| (String::from(key), value))
1436 .collect();
1437 let descriptor = pipe::MeshPipelineDescriptor {
1438 label: desc.label.map(Borrowed),
1439 task: desc.task.as_ref().map(|task| {
1440 let task_constants = task
1441 .compilation_options
1442 .constants
1443 .iter()
1444 .map(|&(key, value)| (String::from(key), value))
1445 .collect();
1446 pipe::TaskState {
1447 stage: pipe::ProgrammableStageDescriptor {
1448 module: task.module.inner.as_core().id,
1449 entry_point: task.entry_point.map(Borrowed),
1450 constants: task_constants,
1451 zero_initialize_workgroup_memory: desc
1452 .mesh
1453 .compilation_options
1454 .zero_initialize_workgroup_memory,
1455 },
1456 }
1457 }),
1458 mesh: pipe::MeshState {
1459 stage: pipe::ProgrammableStageDescriptor {
1460 module: desc.mesh.module.inner.as_core().id,
1461 entry_point: desc.mesh.entry_point.map(Borrowed),
1462 constants: mesh_constants,
1463 zero_initialize_workgroup_memory: desc
1464 .mesh
1465 .compilation_options
1466 .zero_initialize_workgroup_memory,
1467 },
1468 },
1469 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1470 primitive: desc.primitive,
1471 depth_stencil: desc.depth_stencil.clone(),
1472 multisample: desc.multisample,
1473 fragment: desc.fragment.as_ref().map(|frag| {
1474 let frag_constants = frag
1475 .compilation_options
1476 .constants
1477 .iter()
1478 .map(|&(key, value)| (String::from(key), value))
1479 .collect();
1480 pipe::FragmentState {
1481 stage: pipe::ProgrammableStageDescriptor {
1482 module: frag.module.inner.as_core().id,
1483 entry_point: frag.entry_point.map(Borrowed),
1484 constants: frag_constants,
1485 zero_initialize_workgroup_memory: frag
1486 .compilation_options
1487 .zero_initialize_workgroup_memory,
1488 },
1489 targets: Borrowed(frag.targets),
1490 }
1491 }),
1492 multiview: desc.multiview,
1493 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1494 };
1495
1496 let (id, error) = self
1497 .context
1498 .0
1499 .device_create_mesh_pipeline(self.id, &descriptor, None);
1500 if let Some(cause) = error {
1501 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1502 log::error!("Shader translation error for stage {stage:?}: {error}");
1503 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1504 }
1505 self.context.handle_error(
1506 &self.error_sink,
1507 cause,
1508 desc.label,
1509 "Device::create_render_pipeline",
1510 );
1511 }
1512 CoreRenderPipeline {
1513 context: self.context.clone(),
1514 id,
1515 error_sink: Arc::clone(&self.error_sink),
1516 }
1517 .into()
1518 }
1519
1520 fn create_compute_pipeline(
1521 &self,
1522 desc: &crate::ComputePipelineDescriptor<'_>,
1523 ) -> dispatch::DispatchComputePipeline {
1524 use wgc::pipeline as pipe;
1525
1526 let constants = desc
1527 .compilation_options
1528 .constants
1529 .iter()
1530 .map(|&(key, value)| (String::from(key), value))
1531 .collect();
1532
1533 let descriptor = pipe::ComputePipelineDescriptor {
1534 label: desc.label.map(Borrowed),
1535 layout: desc.layout.map(|pll| pll.inner.as_core().id),
1536 stage: pipe::ProgrammableStageDescriptor {
1537 module: desc.module.inner.as_core().id,
1538 entry_point: desc.entry_point.map(Borrowed),
1539 constants,
1540 zero_initialize_workgroup_memory: desc
1541 .compilation_options
1542 .zero_initialize_workgroup_memory,
1543 },
1544 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1545 };
1546
1547 let (id, error) = self
1548 .context
1549 .0
1550 .device_create_compute_pipeline(self.id, &descriptor, None);
1551 if let Some(cause) = error {
1552 if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1553 log::error!(
1554 "Shader translation error for stage {:?}: {}",
1555 wgt::ShaderStages::COMPUTE,
1556 error
1557 );
1558 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1559 }
1560 self.context.handle_error(
1561 &self.error_sink,
1562 cause,
1563 desc.label,
1564 "Device::create_compute_pipeline",
1565 );
1566 }
1567 CoreComputePipeline {
1568 context: self.context.clone(),
1569 id,
1570 error_sink: Arc::clone(&self.error_sink),
1571 }
1572 .into()
1573 }
1574
1575 unsafe fn create_pipeline_cache(
1576 &self,
1577 desc: &crate::PipelineCacheDescriptor<'_>,
1578 ) -> dispatch::DispatchPipelineCache {
1579 use wgc::pipeline as pipe;
1580
1581 let descriptor = pipe::PipelineCacheDescriptor {
1582 label: desc.label.map(Borrowed),
1583 data: desc.data.map(Borrowed),
1584 fallback: desc.fallback,
1585 };
1586 let (id, error) = unsafe {
1587 self.context
1588 .0
1589 .device_create_pipeline_cache(self.id, &descriptor, None)
1590 };
1591 if let Some(cause) = error {
1592 self.context.handle_error(
1593 &self.error_sink,
1594 cause,
1595 desc.label,
1596 "Device::device_create_pipeline_cache_init",
1597 );
1598 }
1599 CorePipelineCache {
1600 context: self.context.clone(),
1601 id,
1602 }
1603 .into()
1604 }
1605
1606 fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1607 let (id, error) = self.context.0.device_create_buffer(
1608 self.id,
1609 &desc.map_label(|l| l.map(Borrowed)),
1610 None,
1611 );
1612 if let Some(cause) = error {
1613 self.context
1614 .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1615 }
1616
1617 CoreBuffer {
1618 context: self.context.clone(),
1619 id,
1620 error_sink: Arc::clone(&self.error_sink),
1621 }
1622 .into()
1623 }
1624
1625 fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1626 let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1627 let (id, error) = self
1628 .context
1629 .0
1630 .device_create_texture(self.id, &wgt_desc, None);
1631 if let Some(cause) = error {
1632 self.context.handle_error(
1633 &self.error_sink,
1634 cause,
1635 desc.label,
1636 "Device::create_texture",
1637 );
1638 }
1639
1640 CoreTexture {
1641 context: self.context.clone(),
1642 id,
1643 error_sink: Arc::clone(&self.error_sink),
1644 }
1645 .into()
1646 }
1647
1648 fn create_external_texture(
1649 &self,
1650 desc: &crate::ExternalTextureDescriptor<'_>,
1651 planes: &[&crate::TextureView],
1652 ) -> dispatch::DispatchExternalTexture {
1653 let wgt_desc = desc.map_label(|l| l.map(Borrowed));
1654 let planes = planes
1655 .iter()
1656 .map(|plane| plane.inner.as_core().id)
1657 .collect::<Vec<_>>();
1658 let (id, error) = self
1659 .context
1660 .0
1661 .device_create_external_texture(self.id, &wgt_desc, &planes, None);
1662 if let Some(cause) = error {
1663 self.context.handle_error(
1664 &self.error_sink,
1665 cause,
1666 desc.label,
1667 "Device::create_external_texture",
1668 );
1669 }
1670
1671 CoreExternalTexture {
1672 context: self.context.clone(),
1673 id,
1674 }
1675 .into()
1676 }
1677
1678 fn create_blas(
1679 &self,
1680 desc: &crate::CreateBlasDescriptor<'_>,
1681 sizes: crate::BlasGeometrySizeDescriptors,
1682 ) -> (Option<u64>, dispatch::DispatchBlas) {
1683 let global = &self.context.0;
1684 let (id, handle, error) =
1685 global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1686 if let Some(cause) = error {
1687 self.context
1688 .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1689 }
1690 (
1691 handle,
1692 CoreBlas {
1693 context: self.context.clone(),
1694 id,
1695 error_sink: Arc::clone(&self.error_sink),
1696 }
1697 .into(),
1698 )
1699 }
1700
1701 fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1702 let global = &self.context.0;
1703 let (id, error) =
1704 global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1705 if let Some(cause) = error {
1706 self.context
1707 .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1708 }
1709 CoreTlas {
1710 context: self.context.clone(),
1711 id,
1712 }
1714 .into()
1715 }
1716
1717 fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1718 let descriptor = wgc::resource::SamplerDescriptor {
1719 label: desc.label.map(Borrowed),
1720 address_modes: [
1721 desc.address_mode_u,
1722 desc.address_mode_v,
1723 desc.address_mode_w,
1724 ],
1725 mag_filter: desc.mag_filter,
1726 min_filter: desc.min_filter,
1727 mipmap_filter: desc.mipmap_filter,
1728 lod_min_clamp: desc.lod_min_clamp,
1729 lod_max_clamp: desc.lod_max_clamp,
1730 compare: desc.compare,
1731 anisotropy_clamp: desc.anisotropy_clamp,
1732 border_color: desc.border_color,
1733 };
1734
1735 let (id, error) = self
1736 .context
1737 .0
1738 .device_create_sampler(self.id, &descriptor, None);
1739 if let Some(cause) = error {
1740 self.context.handle_error(
1741 &self.error_sink,
1742 cause,
1743 desc.label,
1744 "Device::create_sampler",
1745 );
1746 }
1747 CoreSampler {
1748 context: self.context.clone(),
1749 id,
1750 }
1751 .into()
1752 }
1753
1754 fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1755 let (id, error) = self.context.0.device_create_query_set(
1756 self.id,
1757 &desc.map_label(|l| l.map(Borrowed)),
1758 None,
1759 );
1760 if let Some(cause) = error {
1761 self.context
1762 .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1763 }
1764 CoreQuerySet {
1765 context: self.context.clone(),
1766 id,
1767 }
1768 .into()
1769 }
1770
1771 fn create_command_encoder(
1772 &self,
1773 desc: &crate::CommandEncoderDescriptor<'_>,
1774 ) -> dispatch::DispatchCommandEncoder {
1775 let (id, error) = self.context.0.device_create_command_encoder(
1776 self.id,
1777 &desc.map_label(|l| l.map(Borrowed)),
1778 None,
1779 );
1780 if let Some(cause) = error {
1781 self.context.handle_error(
1782 &self.error_sink,
1783 cause,
1784 desc.label,
1785 "Device::create_command_encoder",
1786 );
1787 }
1788
1789 CoreCommandEncoder {
1790 context: self.context.clone(),
1791 id,
1792 error_sink: Arc::clone(&self.error_sink),
1793 }
1794 .into()
1795 }
1796
1797 fn create_render_bundle_encoder(
1798 &self,
1799 desc: &crate::RenderBundleEncoderDescriptor<'_>,
1800 ) -> dispatch::DispatchRenderBundleEncoder {
1801 let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1802 label: desc.label.map(Borrowed),
1803 color_formats: Borrowed(desc.color_formats),
1804 depth_stencil: desc.depth_stencil,
1805 sample_count: desc.sample_count,
1806 multiview: desc.multiview,
1807 };
1808 let encoder = match wgc::command::RenderBundleEncoder::new(&descriptor, self.id) {
1809 Ok(encoder) => encoder,
1810 Err(e) => panic!("Error in Device::create_render_bundle_encoder: {e}"),
1811 };
1812
1813 CoreRenderBundleEncoder {
1814 context: self.context.clone(),
1815 encoder,
1816 id: crate::cmp::Identifier::create(),
1817 }
1818 .into()
1819 }
1820
1821 fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1822 self.context
1823 .0
1824 .device_set_device_lost_closure(self.id, device_lost_callback);
1825 }
1826
1827 fn on_uncaptured_error(&self, handler: Arc<dyn crate::UncapturedErrorHandler>) {
1828 let mut error_sink = self.error_sink.lock();
1829 error_sink.uncaptured_handler = Some(handler);
1830 }
1831
1832 fn push_error_scope(&self, filter: crate::ErrorFilter) -> u32 {
1833 let mut error_sink = self.error_sink.lock();
1834 let thread_id = thread_id::ThreadId::current();
1835 let scopes = error_sink.scopes.entry(thread_id).or_default();
1836 let index = scopes
1837 .len()
1838 .try_into()
1839 .expect("Greater than 2^32 nested error scopes");
1840 scopes.push(ErrorScope {
1841 error: None,
1842 filter,
1843 });
1844 index
1845 }
1846
1847 fn pop_error_scope(&self, index: u32) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1848 let mut error_sink = self.error_sink.lock();
1849
1850 let is_panicking = crate::util::is_panicking();
1853 let thread_id = thread_id::ThreadId::current();
1854 let err = "Mismatched pop_error_scope call: no error scope for this thread. Error scopes are thread-local.";
1855 let scopes = match error_sink.scopes.get_mut(&thread_id) {
1856 Some(s) => s,
1857 None => {
1858 if !is_panicking {
1859 panic!("{err}");
1860 } else {
1861 return Box::pin(ready(None));
1862 }
1863 }
1864 };
1865 if scopes.is_empty() && !is_panicking {
1866 panic!("{err}");
1867 }
1868 if index as usize != scopes.len() - 1 && !is_panicking {
1869 panic!(
1870 "Mismatched pop_error_scope call: error scopes must be popped in reverse order."
1871 );
1872 }
1873
1874 let scope = match scopes.pop() {
1879 Some(s) => s,
1880 None if !is_panicking => unreachable!(),
1881 None => return Box::pin(ready(None)),
1882 };
1883
1884 Box::pin(ready(scope.error))
1885 }
1886
1887 unsafe fn start_graphics_debugger_capture(&self) {
1888 unsafe {
1889 self.context
1890 .0
1891 .device_start_graphics_debugger_capture(self.id)
1892 };
1893 }
1894
1895 unsafe fn stop_graphics_debugger_capture(&self) {
1896 unsafe {
1897 self.context
1898 .0
1899 .device_stop_graphics_debugger_capture(self.id)
1900 };
1901 }
1902
1903 fn poll(&self, poll_type: wgt::PollType<u64>) -> Result<crate::PollStatus, crate::PollError> {
1904 match self.context.0.device_poll(self.id, poll_type) {
1905 Ok(status) => Ok(status),
1906 Err(err) => {
1907 if let Some(poll_error) = err.to_poll_error() {
1908 return Err(poll_error);
1909 }
1910
1911 self.context.handle_error_fatal(err, "Device::poll")
1912 }
1913 }
1914 }
1915
1916 fn get_internal_counters(&self) -> crate::InternalCounters {
1917 self.context.0.device_get_internal_counters(self.id)
1918 }
1919
1920 fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1921 self.context.0.device_generate_allocator_report(self.id)
1922 }
1923
1924 fn destroy(&self) {
1925 self.context.0.device_destroy(self.id);
1926 }
1927}
1928
1929impl Drop for CoreDevice {
1930 fn drop(&mut self) {
1931 self.context.0.device_drop(self.id)
1932 }
1933}
1934
1935impl dispatch::QueueInterface for CoreQueue {
1936 fn write_buffer(
1937 &self,
1938 buffer: &dispatch::DispatchBuffer,
1939 offset: crate::BufferAddress,
1940 data: &[u8],
1941 ) {
1942 let buffer = buffer.as_core();
1943
1944 match self
1945 .context
1946 .0
1947 .queue_write_buffer(self.id, buffer.id, offset, data)
1948 {
1949 Ok(()) => (),
1950 Err(err) => {
1951 self.context
1952 .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1953 }
1954 }
1955 }
1956
1957 fn create_staging_buffer(
1958 &self,
1959 size: crate::BufferSize,
1960 ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1961 match self
1962 .context
1963 .0
1964 .queue_create_staging_buffer(self.id, size, None)
1965 {
1966 Ok((buffer_id, ptr)) => Some(
1967 CoreQueueWriteBuffer {
1968 buffer_id,
1969 mapping: CoreBufferMappedRange {
1970 ptr,
1971 size: size.get() as usize,
1972 },
1973 }
1974 .into(),
1975 ),
1976 Err(err) => {
1977 self.context.handle_error_nolabel(
1978 &self.error_sink,
1979 err,
1980 "Queue::write_buffer_with",
1981 );
1982 None
1983 }
1984 }
1985 }
1986
1987 fn validate_write_buffer(
1988 &self,
1989 buffer: &dispatch::DispatchBuffer,
1990 offset: wgt::BufferAddress,
1991 size: wgt::BufferSize,
1992 ) -> Option<()> {
1993 let buffer = buffer.as_core();
1994
1995 match self
1996 .context
1997 .0
1998 .queue_validate_write_buffer(self.id, buffer.id, offset, size)
1999 {
2000 Ok(()) => Some(()),
2001 Err(err) => {
2002 self.context.handle_error_nolabel(
2003 &self.error_sink,
2004 err,
2005 "Queue::write_buffer_with",
2006 );
2007 None
2008 }
2009 }
2010 }
2011
2012 fn write_staging_buffer(
2013 &self,
2014 buffer: &dispatch::DispatchBuffer,
2015 offset: crate::BufferAddress,
2016 staging_buffer: &dispatch::DispatchQueueWriteBuffer,
2017 ) {
2018 let buffer = buffer.as_core();
2019 let staging_buffer = staging_buffer.as_core();
2020
2021 match self.context.0.queue_write_staging_buffer(
2022 self.id,
2023 buffer.id,
2024 offset,
2025 staging_buffer.buffer_id,
2026 ) {
2027 Ok(()) => (),
2028 Err(err) => {
2029 self.context.handle_error_nolabel(
2030 &self.error_sink,
2031 err,
2032 "Queue::write_buffer_with",
2033 );
2034 }
2035 }
2036 }
2037
2038 fn write_texture(
2039 &self,
2040 texture: crate::TexelCopyTextureInfo<'_>,
2041 data: &[u8],
2042 data_layout: crate::TexelCopyBufferLayout,
2043 size: crate::Extent3d,
2044 ) {
2045 match self.context.0.queue_write_texture(
2046 self.id,
2047 &map_texture_copy_view(texture),
2048 data,
2049 &data_layout,
2050 &size,
2051 ) {
2052 Ok(()) => (),
2053 Err(err) => {
2054 self.context
2055 .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
2056 }
2057 }
2058 }
2059
2060 #[cfg(web)]
2063 #[cfg_attr(not(webgl), expect(unused_variables))]
2064 fn copy_external_image_to_texture(
2065 &self,
2066 source: &crate::CopyExternalImageSourceInfo,
2067 dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
2068 size: crate::Extent3d,
2069 ) {
2070 #[cfg(webgl)]
2071 match self.context.0.queue_copy_external_image_to_texture(
2072 self.id,
2073 source,
2074 map_texture_tagged_copy_view(dest),
2075 size,
2076 ) {
2077 Ok(()) => (),
2078 Err(err) => self.context.handle_error_nolabel(
2079 &self.error_sink,
2080 err,
2081 "Queue::copy_external_image_to_texture",
2082 ),
2083 }
2084 }
2085
2086 fn submit(
2087 &self,
2088 command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
2089 ) -> u64 {
2090 let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
2091 let command_buffer_ids = temp_command_buffers
2092 .iter()
2093 .map(|cmdbuf| cmdbuf.as_core().id)
2094 .collect::<SmallVec<[_; 4]>>();
2095
2096 let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
2097 Ok(index) => index,
2098 Err((index, err)) => {
2099 self.context
2100 .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
2101 index
2102 }
2103 };
2104
2105 drop(temp_command_buffers);
2106
2107 index
2108 }
2109
2110 fn get_timestamp_period(&self) -> f32 {
2111 self.context.0.queue_get_timestamp_period(self.id)
2112 }
2113
2114 fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
2115 self.context
2116 .0
2117 .queue_on_submitted_work_done(self.id, callback);
2118 }
2119
2120 fn compact_blas(&self, blas: &dispatch::DispatchBlas) -> (Option<u64>, dispatch::DispatchBlas) {
2121 let (id, handle, error) =
2122 self.context
2123 .0
2124 .queue_compact_blas(self.id, blas.as_core().id, None);
2125
2126 if let Some(cause) = error {
2127 self.context
2128 .handle_error_nolabel(&self.error_sink, cause, "Queue::compact_blas");
2129 }
2130 (
2131 handle,
2132 CoreBlas {
2133 context: self.context.clone(),
2134 id,
2135 error_sink: Arc::clone(&self.error_sink),
2136 }
2137 .into(),
2138 )
2139 }
2140}
2141
2142impl Drop for CoreQueue {
2143 fn drop(&mut self) {
2144 self.context.0.queue_drop(self.id)
2145 }
2146}
2147
2148impl dispatch::ShaderModuleInterface for CoreShaderModule {
2149 fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
2150 Box::pin(ready(self.compilation_info.clone()))
2151 }
2152}
2153
2154impl Drop for CoreShaderModule {
2155 fn drop(&mut self) {
2156 self.context.0.shader_module_drop(self.id)
2157 }
2158}
2159
2160impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
2161
2162impl Drop for CoreBindGroupLayout {
2163 fn drop(&mut self) {
2164 self.context.0.bind_group_layout_drop(self.id)
2165 }
2166}
2167
2168impl dispatch::BindGroupInterface for CoreBindGroup {}
2169
2170impl Drop for CoreBindGroup {
2171 fn drop(&mut self) {
2172 self.context.0.bind_group_drop(self.id)
2173 }
2174}
2175
2176impl dispatch::TextureViewInterface for CoreTextureView {}
2177
2178impl Drop for CoreTextureView {
2179 fn drop(&mut self) {
2180 self.context.0.texture_view_drop(self.id);
2181 }
2182}
2183
2184impl dispatch::ExternalTextureInterface for CoreExternalTexture {
2185 fn destroy(&self) {
2186 self.context.0.external_texture_destroy(self.id);
2187 }
2188}
2189
2190impl Drop for CoreExternalTexture {
2191 fn drop(&mut self) {
2192 self.context.0.external_texture_drop(self.id);
2193 }
2194}
2195
2196impl dispatch::SamplerInterface for CoreSampler {}
2197
2198impl Drop for CoreSampler {
2199 fn drop(&mut self) {
2200 self.context.0.sampler_drop(self.id)
2201 }
2202}
2203
2204impl dispatch::BufferInterface for CoreBuffer {
2205 fn map_async(
2206 &self,
2207 mode: crate::MapMode,
2208 range: Range<crate::BufferAddress>,
2209 callback: dispatch::BufferMapCallback,
2210 ) {
2211 let operation = wgc::resource::BufferMapOperation {
2212 host: match mode {
2213 MapMode::Read => wgc::device::HostMap::Read,
2214 MapMode::Write => wgc::device::HostMap::Write,
2215 },
2216 callback: Some(Box::new(|status| {
2217 let res = status.map_err(|_| crate::BufferAsyncError);
2218 callback(res);
2219 })),
2220 };
2221
2222 match self.context.0.buffer_map_async(
2223 self.id,
2224 range.start,
2225 Some(range.end - range.start),
2226 operation,
2227 ) {
2228 Ok(_) => (),
2229 Err(cause) => {
2230 self.context
2231 .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
2232 }
2233 }
2234 }
2235
2236 fn get_mapped_range(
2237 &self,
2238 sub_range: Range<crate::BufferAddress>,
2239 ) -> dispatch::DispatchBufferMappedRange {
2240 let size = sub_range.end - sub_range.start;
2241 match self
2242 .context
2243 .0
2244 .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
2245 {
2246 Ok((ptr, size)) => CoreBufferMappedRange {
2247 ptr,
2248 size: size as usize,
2249 }
2250 .into(),
2251 Err(err) => self
2252 .context
2253 .handle_error_fatal(err, "Buffer::get_mapped_range"),
2254 }
2255 }
2256
2257 fn unmap(&self) {
2258 match self.context.0.buffer_unmap(self.id) {
2259 Ok(()) => (),
2260 Err(cause) => {
2261 self.context
2262 .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
2263 }
2264 }
2265 }
2266
2267 fn destroy(&self) {
2268 self.context.0.buffer_destroy(self.id);
2269 }
2270}
2271
2272impl Drop for CoreBuffer {
2273 fn drop(&mut self) {
2274 self.context.0.buffer_drop(self.id)
2275 }
2276}
2277
2278impl dispatch::TextureInterface for CoreTexture {
2279 fn create_view(
2280 &self,
2281 desc: &crate::TextureViewDescriptor<'_>,
2282 ) -> dispatch::DispatchTextureView {
2283 let descriptor = wgc::resource::TextureViewDescriptor {
2284 label: desc.label.map(Borrowed),
2285 format: desc.format,
2286 dimension: desc.dimension,
2287 usage: desc.usage,
2288 range: wgt::ImageSubresourceRange {
2289 aspect: desc.aspect,
2290 base_mip_level: desc.base_mip_level,
2291 mip_level_count: desc.mip_level_count,
2292 base_array_layer: desc.base_array_layer,
2293 array_layer_count: desc.array_layer_count,
2294 },
2295 };
2296 let (id, error) = self
2297 .context
2298 .0
2299 .texture_create_view(self.id, &descriptor, None);
2300 if let Some(cause) = error {
2301 self.context
2302 .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
2303 }
2304 CoreTextureView {
2305 context: self.context.clone(),
2306 id,
2307 }
2308 .into()
2309 }
2310
2311 fn destroy(&self) {
2312 self.context.0.texture_destroy(self.id);
2313 }
2314}
2315
2316impl Drop for CoreTexture {
2317 fn drop(&mut self) {
2318 self.context.0.texture_drop(self.id)
2319 }
2320}
2321
2322impl dispatch::BlasInterface for CoreBlas {
2323 fn prepare_compact_async(&self, callback: BlasCompactCallback) {
2324 let callback: Option<wgc::resource::BlasCompactCallback> =
2325 Some(Box::new(|status: BlasPrepareCompactResult| {
2326 let res = status.map_err(|_| crate::BlasAsyncError);
2327 callback(res);
2328 }));
2329
2330 match self.context.0.blas_prepare_compact_async(self.id, callback) {
2331 Ok(_) => (),
2332 Err(cause) => self.context.handle_error_nolabel(
2333 &self.error_sink,
2334 cause,
2335 "Blas::prepare_compact_async",
2336 ),
2337 }
2338 }
2339
2340 fn ready_for_compaction(&self) -> bool {
2341 match self.context.0.ready_for_compaction(self.id) {
2342 Ok(ready) => ready,
2343 Err(cause) => {
2344 self.context.handle_error_nolabel(
2345 &self.error_sink,
2346 cause,
2347 "Blas::ready_for_compaction",
2348 );
2349 false
2351 }
2352 }
2353 }
2354}
2355
2356impl Drop for CoreBlas {
2357 fn drop(&mut self) {
2358 self.context.0.blas_drop(self.id)
2359 }
2360}
2361
2362impl dispatch::TlasInterface for CoreTlas {}
2363
2364impl Drop for CoreTlas {
2365 fn drop(&mut self) {
2366 self.context.0.tlas_drop(self.id)
2367 }
2368}
2369
2370impl dispatch::QuerySetInterface for CoreQuerySet {}
2371
2372impl Drop for CoreQuerySet {
2373 fn drop(&mut self) {
2374 self.context.0.query_set_drop(self.id)
2375 }
2376}
2377
2378impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2379
2380impl Drop for CorePipelineLayout {
2381 fn drop(&mut self) {
2382 self.context.0.pipeline_layout_drop(self.id)
2383 }
2384}
2385
2386impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2387 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2388 let (id, error) = self
2389 .context
2390 .0
2391 .render_pipeline_get_bind_group_layout(self.id, index, None);
2392 if let Some(err) = error {
2393 self.context.handle_error_nolabel(
2394 &self.error_sink,
2395 err,
2396 "RenderPipeline::get_bind_group_layout",
2397 )
2398 }
2399 CoreBindGroupLayout {
2400 context: self.context.clone(),
2401 id,
2402 }
2403 .into()
2404 }
2405}
2406
2407impl Drop for CoreRenderPipeline {
2408 fn drop(&mut self) {
2409 self.context.0.render_pipeline_drop(self.id)
2410 }
2411}
2412
2413impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2414 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2415 let (id, error) = self
2416 .context
2417 .0
2418 .compute_pipeline_get_bind_group_layout(self.id, index, None);
2419 if let Some(err) = error {
2420 self.context.handle_error_nolabel(
2421 &self.error_sink,
2422 err,
2423 "ComputePipeline::get_bind_group_layout",
2424 )
2425 }
2426 CoreBindGroupLayout {
2427 context: self.context.clone(),
2428 id,
2429 }
2430 .into()
2431 }
2432}
2433
2434impl Drop for CoreComputePipeline {
2435 fn drop(&mut self) {
2436 self.context.0.compute_pipeline_drop(self.id)
2437 }
2438}
2439
2440impl dispatch::PipelineCacheInterface for CorePipelineCache {
2441 fn get_data(&self) -> Option<Vec<u8>> {
2442 self.context.0.pipeline_cache_get_data(self.id)
2443 }
2444}
2445
2446impl Drop for CorePipelineCache {
2447 fn drop(&mut self) {
2448 self.context.0.pipeline_cache_drop(self.id)
2449 }
2450}
2451
2452impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2453 fn copy_buffer_to_buffer(
2454 &self,
2455 source: &dispatch::DispatchBuffer,
2456 source_offset: crate::BufferAddress,
2457 destination: &dispatch::DispatchBuffer,
2458 destination_offset: crate::BufferAddress,
2459 copy_size: Option<crate::BufferAddress>,
2460 ) {
2461 let source = source.as_core();
2462 let destination = destination.as_core();
2463
2464 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2465 self.id,
2466 source.id,
2467 source_offset,
2468 destination.id,
2469 destination_offset,
2470 copy_size,
2471 ) {
2472 self.context.handle_error_nolabel(
2473 &self.error_sink,
2474 cause,
2475 "CommandEncoder::copy_buffer_to_buffer",
2476 );
2477 }
2478 }
2479
2480 fn copy_buffer_to_texture(
2481 &self,
2482 source: crate::TexelCopyBufferInfo<'_>,
2483 destination: crate::TexelCopyTextureInfo<'_>,
2484 copy_size: crate::Extent3d,
2485 ) {
2486 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2487 self.id,
2488 &map_buffer_copy_view(source),
2489 &map_texture_copy_view(destination),
2490 ©_size,
2491 ) {
2492 self.context.handle_error_nolabel(
2493 &self.error_sink,
2494 cause,
2495 "CommandEncoder::copy_buffer_to_texture",
2496 );
2497 }
2498 }
2499
2500 fn copy_texture_to_buffer(
2501 &self,
2502 source: crate::TexelCopyTextureInfo<'_>,
2503 destination: crate::TexelCopyBufferInfo<'_>,
2504 copy_size: crate::Extent3d,
2505 ) {
2506 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2507 self.id,
2508 &map_texture_copy_view(source),
2509 &map_buffer_copy_view(destination),
2510 ©_size,
2511 ) {
2512 self.context.handle_error_nolabel(
2513 &self.error_sink,
2514 cause,
2515 "CommandEncoder::copy_texture_to_buffer",
2516 );
2517 }
2518 }
2519
2520 fn copy_texture_to_texture(
2521 &self,
2522 source: crate::TexelCopyTextureInfo<'_>,
2523 destination: crate::TexelCopyTextureInfo<'_>,
2524 copy_size: crate::Extent3d,
2525 ) {
2526 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2527 self.id,
2528 &map_texture_copy_view(source),
2529 &map_texture_copy_view(destination),
2530 ©_size,
2531 ) {
2532 self.context.handle_error_nolabel(
2533 &self.error_sink,
2534 cause,
2535 "CommandEncoder::copy_texture_to_texture",
2536 );
2537 }
2538 }
2539
2540 fn begin_compute_pass(
2541 &self,
2542 desc: &crate::ComputePassDescriptor<'_>,
2543 ) -> dispatch::DispatchComputePass {
2544 let timestamp_writes =
2545 desc.timestamp_writes
2546 .as_ref()
2547 .map(|tw| wgc::command::PassTimestampWrites {
2548 query_set: tw.query_set.inner.as_core().id,
2549 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2550 end_of_pass_write_index: tw.end_of_pass_write_index,
2551 });
2552
2553 let (pass, err) = self.context.0.command_encoder_begin_compute_pass(
2554 self.id,
2555 &wgc::command::ComputePassDescriptor {
2556 label: desc.label.map(Borrowed),
2557 timestamp_writes,
2558 },
2559 );
2560
2561 if let Some(cause) = err {
2562 self.context.handle_error(
2563 &self.error_sink,
2564 cause,
2565 desc.label,
2566 "CommandEncoder::begin_compute_pass",
2567 );
2568 }
2569
2570 CoreComputePass {
2571 context: self.context.clone(),
2572 pass,
2573 error_sink: self.error_sink.clone(),
2574 id: crate::cmp::Identifier::create(),
2575 }
2576 .into()
2577 }
2578
2579 fn begin_render_pass(
2580 &self,
2581 desc: &crate::RenderPassDescriptor<'_>,
2582 ) -> dispatch::DispatchRenderPass {
2583 let colors = desc
2584 .color_attachments
2585 .iter()
2586 .map(|ca| {
2587 ca.as_ref()
2588 .map(|at| wgc::command::RenderPassColorAttachment {
2589 view: at.view.inner.as_core().id,
2590 depth_slice: at.depth_slice,
2591 resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2592 load_op: at.ops.load,
2593 store_op: at.ops.store,
2594 })
2595 })
2596 .collect::<Vec<_>>();
2597
2598 let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2599 wgc::command::RenderPassDepthStencilAttachment {
2600 view: dsa.view.inner.as_core().id,
2601 depth: map_pass_channel(dsa.depth_ops.as_ref()),
2602 stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2603 }
2604 });
2605
2606 let timestamp_writes =
2607 desc.timestamp_writes
2608 .as_ref()
2609 .map(|tw| wgc::command::PassTimestampWrites {
2610 query_set: tw.query_set.inner.as_core().id,
2611 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2612 end_of_pass_write_index: tw.end_of_pass_write_index,
2613 });
2614
2615 let (pass, err) = self.context.0.command_encoder_begin_render_pass(
2616 self.id,
2617 &wgc::command::RenderPassDescriptor {
2618 label: desc.label.map(Borrowed),
2619 timestamp_writes: timestamp_writes.as_ref(),
2620 color_attachments: Borrowed(&colors),
2621 depth_stencil_attachment: depth_stencil.as_ref(),
2622 occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2623 multiview_mask: desc.multiview_mask,
2624 },
2625 );
2626
2627 if let Some(cause) = err {
2628 self.context.handle_error(
2629 &self.error_sink,
2630 cause,
2631 desc.label,
2632 "CommandEncoder::begin_render_pass",
2633 );
2634 }
2635
2636 CoreRenderPass {
2637 context: self.context.clone(),
2638 pass,
2639 error_sink: self.error_sink.clone(),
2640 id: crate::cmp::Identifier::create(),
2641 }
2642 .into()
2643 }
2644
2645 fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2646 let descriptor = wgt::CommandBufferDescriptor::default();
2647 let (id, opt_label_and_error) =
2648 self.context
2649 .0
2650 .command_encoder_finish(self.id, &descriptor, None);
2651 if let Some((label, cause)) = opt_label_and_error {
2652 self.context
2653 .handle_error(&self.error_sink, cause, Some(&label), "a CommandEncoder");
2654 }
2655 CoreCommandBuffer {
2656 context: self.context.clone(),
2657 id,
2658 }
2659 .into()
2660 }
2661
2662 fn clear_texture(
2663 &self,
2664 texture: &dispatch::DispatchTexture,
2665 subresource_range: &crate::ImageSubresourceRange,
2666 ) {
2667 let texture = texture.as_core();
2668
2669 if let Err(cause) =
2670 self.context
2671 .0
2672 .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2673 {
2674 self.context.handle_error_nolabel(
2675 &self.error_sink,
2676 cause,
2677 "CommandEncoder::clear_texture",
2678 );
2679 }
2680 }
2681
2682 fn clear_buffer(
2683 &self,
2684 buffer: &dispatch::DispatchBuffer,
2685 offset: crate::BufferAddress,
2686 size: Option<crate::BufferAddress>,
2687 ) {
2688 let buffer = buffer.as_core();
2689
2690 if let Err(cause) = self
2691 .context
2692 .0
2693 .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2694 {
2695 self.context.handle_error_nolabel(
2696 &self.error_sink,
2697 cause,
2698 "CommandEncoder::fill_buffer",
2699 );
2700 }
2701 }
2702
2703 fn insert_debug_marker(&self, label: &str) {
2704 if let Err(cause) = self
2705 .context
2706 .0
2707 .command_encoder_insert_debug_marker(self.id, label)
2708 {
2709 self.context.handle_error_nolabel(
2710 &self.error_sink,
2711 cause,
2712 "CommandEncoder::insert_debug_marker",
2713 );
2714 }
2715 }
2716
2717 fn push_debug_group(&self, label: &str) {
2718 if let Err(cause) = self
2719 .context
2720 .0
2721 .command_encoder_push_debug_group(self.id, label)
2722 {
2723 self.context.handle_error_nolabel(
2724 &self.error_sink,
2725 cause,
2726 "CommandEncoder::push_debug_group",
2727 );
2728 }
2729 }
2730
2731 fn pop_debug_group(&self) {
2732 if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2733 self.context.handle_error_nolabel(
2734 &self.error_sink,
2735 cause,
2736 "CommandEncoder::pop_debug_group",
2737 );
2738 }
2739 }
2740
2741 fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2742 let query_set = query_set.as_core();
2743
2744 if let Err(cause) =
2745 self.context
2746 .0
2747 .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2748 {
2749 self.context.handle_error_nolabel(
2750 &self.error_sink,
2751 cause,
2752 "CommandEncoder::write_timestamp",
2753 );
2754 }
2755 }
2756
2757 fn resolve_query_set(
2758 &self,
2759 query_set: &dispatch::DispatchQuerySet,
2760 first_query: u32,
2761 query_count: u32,
2762 destination: &dispatch::DispatchBuffer,
2763 destination_offset: crate::BufferAddress,
2764 ) {
2765 let query_set = query_set.as_core();
2766 let destination = destination.as_core();
2767
2768 if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2769 self.id,
2770 query_set.id,
2771 first_query,
2772 query_count,
2773 destination.id,
2774 destination_offset,
2775 ) {
2776 self.context.handle_error_nolabel(
2777 &self.error_sink,
2778 cause,
2779 "CommandEncoder::resolve_query_set",
2780 );
2781 }
2782 }
2783
2784 fn mark_acceleration_structures_built<'a>(
2785 &self,
2786 blas: &mut dyn Iterator<Item = &'a Blas>,
2787 tlas: &mut dyn Iterator<Item = &'a Tlas>,
2788 ) {
2789 let blas = blas
2790 .map(|b| b.inner.as_core().id)
2791 .collect::<SmallVec<[_; 4]>>();
2792 let tlas = tlas
2793 .map(|t| t.inner.as_core().id)
2794 .collect::<SmallVec<[_; 4]>>();
2795 if let Err(cause) = self
2796 .context
2797 .0
2798 .command_encoder_mark_acceleration_structures_built(self.id, &blas, &tlas)
2799 {
2800 self.context.handle_error_nolabel(
2801 &self.error_sink,
2802 cause,
2803 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2804 );
2805 }
2806 }
2807
2808 fn build_acceleration_structures<'a>(
2809 &self,
2810 blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2811 tlas: &mut dyn Iterator<Item = &'a crate::Tlas>,
2812 ) {
2813 let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2814 let geometries = match e.geometry {
2815 crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2816 let iter = triangle_geometries.iter().map(|tg| {
2817 wgc::ray_tracing::BlasTriangleGeometry {
2818 vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2819 index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2820 transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2821 size: tg.size,
2822 transform_buffer_offset: tg.transform_buffer_offset,
2823 first_vertex: tg.first_vertex,
2824 vertex_stride: tg.vertex_stride,
2825 first_index: tg.first_index,
2826 }
2827 });
2828 wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2829 }
2830 };
2831 wgc::ray_tracing::BlasBuildEntry {
2832 blas_id: e.blas.inner.as_core().id,
2833 geometries,
2834 }
2835 });
2836
2837 let tlas = tlas.into_iter().map(|e| {
2838 let instances = e
2839 .instances
2840 .iter()
2841 .map(|instance: &Option<crate::TlasInstance>| {
2842 instance
2843 .as_ref()
2844 .map(|instance| wgc::ray_tracing::TlasInstance {
2845 blas_id: instance.blas.as_core().id,
2846 transform: &instance.transform,
2847 custom_data: instance.custom_data,
2848 mask: instance.mask,
2849 })
2850 });
2851 wgc::ray_tracing::TlasPackage {
2852 tlas_id: e.inner.as_core().id,
2853 instances: Box::new(instances),
2854 lowest_unmodified: e.lowest_unmodified,
2855 }
2856 });
2857
2858 if let Err(cause) = self
2859 .context
2860 .0
2861 .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2862 {
2863 self.context.handle_error_nolabel(
2864 &self.error_sink,
2865 cause,
2866 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2867 );
2868 }
2869 }
2870
2871 fn transition_resources<'a>(
2872 &mut self,
2873 buffer_transitions: &mut dyn Iterator<
2874 Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
2875 >,
2876 texture_transitions: &mut dyn Iterator<
2877 Item = wgt::TextureTransition<&'a dispatch::DispatchTexture>,
2878 >,
2879 ) {
2880 let result = self.context.0.command_encoder_transition_resources(
2881 self.id,
2882 buffer_transitions.map(|t| wgt::BufferTransition {
2883 buffer: t.buffer.as_core().id,
2884 state: t.state,
2885 }),
2886 texture_transitions.map(|t| wgt::TextureTransition {
2887 texture: t.texture.as_core().id,
2888 selector: t.selector.clone(),
2889 state: t.state,
2890 }),
2891 );
2892
2893 if let Err(cause) = result {
2894 self.context.handle_error_nolabel(
2895 &self.error_sink,
2896 cause,
2897 "CommandEncoder::transition_resources",
2898 );
2899 }
2900 }
2901}
2902
2903impl Drop for CoreCommandEncoder {
2904 fn drop(&mut self) {
2905 self.context.0.command_encoder_drop(self.id)
2906 }
2907}
2908
2909impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2910
2911impl Drop for CoreCommandBuffer {
2912 fn drop(&mut self) {
2913 self.context.0.command_buffer_drop(self.id)
2914 }
2915}
2916
2917impl dispatch::ComputePassInterface for CoreComputePass {
2918 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2919 let pipeline = pipeline.as_core();
2920
2921 if let Err(cause) = self
2922 .context
2923 .0
2924 .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2925 {
2926 self.context.handle_error(
2927 &self.error_sink,
2928 cause,
2929 self.pass.label(),
2930 "ComputePass::set_pipeline",
2931 );
2932 }
2933 }
2934
2935 fn set_bind_group(
2936 &mut self,
2937 index: u32,
2938 bind_group: Option<&dispatch::DispatchBindGroup>,
2939 offsets: &[crate::DynamicOffset],
2940 ) {
2941 let bg = bind_group.map(|bg| bg.as_core().id);
2942
2943 if let Err(cause) =
2944 self.context
2945 .0
2946 .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2947 {
2948 self.context.handle_error(
2949 &self.error_sink,
2950 cause,
2951 self.pass.label(),
2952 "ComputePass::set_bind_group",
2953 );
2954 }
2955 }
2956
2957 fn set_immediates(&mut self, offset: u32, data: &[u8]) {
2958 if let Err(cause) = self
2959 .context
2960 .0
2961 .compute_pass_set_immediates(&mut self.pass, offset, data)
2962 {
2963 self.context.handle_error(
2964 &self.error_sink,
2965 cause,
2966 self.pass.label(),
2967 "ComputePass::set_immediates",
2968 );
2969 }
2970 }
2971
2972 fn insert_debug_marker(&mut self, label: &str) {
2973 if let Err(cause) =
2974 self.context
2975 .0
2976 .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
2977 {
2978 self.context.handle_error(
2979 &self.error_sink,
2980 cause,
2981 self.pass.label(),
2982 "ComputePass::insert_debug_marker",
2983 );
2984 }
2985 }
2986
2987 fn push_debug_group(&mut self, group_label: &str) {
2988 if let Err(cause) =
2989 self.context
2990 .0
2991 .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
2992 {
2993 self.context.handle_error(
2994 &self.error_sink,
2995 cause,
2996 self.pass.label(),
2997 "ComputePass::push_debug_group",
2998 );
2999 }
3000 }
3001
3002 fn pop_debug_group(&mut self) {
3003 if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
3004 self.context.handle_error(
3005 &self.error_sink,
3006 cause,
3007 self.pass.label(),
3008 "ComputePass::pop_debug_group",
3009 );
3010 }
3011 }
3012
3013 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3014 let query_set = query_set.as_core();
3015
3016 if let Err(cause) =
3017 self.context
3018 .0
3019 .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3020 {
3021 self.context.handle_error(
3022 &self.error_sink,
3023 cause,
3024 self.pass.label(),
3025 "ComputePass::write_timestamp",
3026 );
3027 }
3028 }
3029
3030 fn begin_pipeline_statistics_query(
3031 &mut self,
3032 query_set: &dispatch::DispatchQuerySet,
3033 query_index: u32,
3034 ) {
3035 let query_set = query_set.as_core();
3036
3037 if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
3038 &mut self.pass,
3039 query_set.id,
3040 query_index,
3041 ) {
3042 self.context.handle_error(
3043 &self.error_sink,
3044 cause,
3045 self.pass.label(),
3046 "ComputePass::begin_pipeline_statistics_query",
3047 );
3048 }
3049 }
3050
3051 fn end_pipeline_statistics_query(&mut self) {
3052 if let Err(cause) = self
3053 .context
3054 .0
3055 .compute_pass_end_pipeline_statistics_query(&mut self.pass)
3056 {
3057 self.context.handle_error(
3058 &self.error_sink,
3059 cause,
3060 self.pass.label(),
3061 "ComputePass::end_pipeline_statistics_query",
3062 );
3063 }
3064 }
3065
3066 fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
3067 if let Err(cause) = self
3068 .context
3069 .0
3070 .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
3071 {
3072 self.context.handle_error(
3073 &self.error_sink,
3074 cause,
3075 self.pass.label(),
3076 "ComputePass::dispatch_workgroups",
3077 );
3078 }
3079 }
3080
3081 fn dispatch_workgroups_indirect(
3082 &mut self,
3083 indirect_buffer: &dispatch::DispatchBuffer,
3084 indirect_offset: crate::BufferAddress,
3085 ) {
3086 let indirect_buffer = indirect_buffer.as_core();
3087
3088 if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
3089 &mut self.pass,
3090 indirect_buffer.id,
3091 indirect_offset,
3092 ) {
3093 self.context.handle_error(
3094 &self.error_sink,
3095 cause,
3096 self.pass.label(),
3097 "ComputePass::dispatch_workgroups_indirect",
3098 );
3099 }
3100 }
3101}
3102
3103impl Drop for CoreComputePass {
3104 fn drop(&mut self) {
3105 if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
3106 self.context.handle_error(
3107 &self.error_sink,
3108 cause,
3109 self.pass.label(),
3110 "ComputePass::end",
3111 );
3112 }
3113 }
3114}
3115
3116impl dispatch::RenderPassInterface for CoreRenderPass {
3117 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3118 let pipeline = pipeline.as_core();
3119
3120 if let Err(cause) = self
3121 .context
3122 .0
3123 .render_pass_set_pipeline(&mut self.pass, pipeline.id)
3124 {
3125 self.context.handle_error(
3126 &self.error_sink,
3127 cause,
3128 self.pass.label(),
3129 "RenderPass::set_pipeline",
3130 );
3131 }
3132 }
3133
3134 fn set_bind_group(
3135 &mut self,
3136 index: u32,
3137 bind_group: Option<&dispatch::DispatchBindGroup>,
3138 offsets: &[crate::DynamicOffset],
3139 ) {
3140 let bg = bind_group.map(|bg| bg.as_core().id);
3141
3142 if let Err(cause) =
3143 self.context
3144 .0
3145 .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
3146 {
3147 self.context.handle_error(
3148 &self.error_sink,
3149 cause,
3150 self.pass.label(),
3151 "RenderPass::set_bind_group",
3152 );
3153 }
3154 }
3155
3156 fn set_index_buffer(
3157 &mut self,
3158 buffer: &dispatch::DispatchBuffer,
3159 index_format: crate::IndexFormat,
3160 offset: crate::BufferAddress,
3161 size: Option<crate::BufferSize>,
3162 ) {
3163 let buffer = buffer.as_core();
3164
3165 if let Err(cause) = self.context.0.render_pass_set_index_buffer(
3166 &mut self.pass,
3167 buffer.id,
3168 index_format,
3169 offset,
3170 size,
3171 ) {
3172 self.context.handle_error(
3173 &self.error_sink,
3174 cause,
3175 self.pass.label(),
3176 "RenderPass::set_index_buffer",
3177 );
3178 }
3179 }
3180
3181 fn set_vertex_buffer(
3182 &mut self,
3183 slot: u32,
3184 buffer: &dispatch::DispatchBuffer,
3185 offset: crate::BufferAddress,
3186 size: Option<crate::BufferSize>,
3187 ) {
3188 let buffer = buffer.as_core();
3189
3190 if let Err(cause) = self.context.0.render_pass_set_vertex_buffer(
3191 &mut self.pass,
3192 slot,
3193 buffer.id,
3194 offset,
3195 size,
3196 ) {
3197 self.context.handle_error(
3198 &self.error_sink,
3199 cause,
3200 self.pass.label(),
3201 "RenderPass::set_vertex_buffer",
3202 );
3203 }
3204 }
3205
3206 fn set_immediates(&mut self, offset: u32, data: &[u8]) {
3207 if let Err(cause) = self
3208 .context
3209 .0
3210 .render_pass_set_immediates(&mut self.pass, offset, data)
3211 {
3212 self.context.handle_error(
3213 &self.error_sink,
3214 cause,
3215 self.pass.label(),
3216 "RenderPass::set_immediates",
3217 );
3218 }
3219 }
3220
3221 fn set_blend_constant(&mut self, color: crate::Color) {
3222 if let Err(cause) = self
3223 .context
3224 .0
3225 .render_pass_set_blend_constant(&mut self.pass, color)
3226 {
3227 self.context.handle_error(
3228 &self.error_sink,
3229 cause,
3230 self.pass.label(),
3231 "RenderPass::set_blend_constant",
3232 );
3233 }
3234 }
3235
3236 fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
3237 if let Err(cause) =
3238 self.context
3239 .0
3240 .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
3241 {
3242 self.context.handle_error(
3243 &self.error_sink,
3244 cause,
3245 self.pass.label(),
3246 "RenderPass::set_scissor_rect",
3247 );
3248 }
3249 }
3250
3251 fn set_viewport(
3252 &mut self,
3253 x: f32,
3254 y: f32,
3255 width: f32,
3256 height: f32,
3257 min_depth: f32,
3258 max_depth: f32,
3259 ) {
3260 if let Err(cause) = self.context.0.render_pass_set_viewport(
3261 &mut self.pass,
3262 x,
3263 y,
3264 width,
3265 height,
3266 min_depth,
3267 max_depth,
3268 ) {
3269 self.context.handle_error(
3270 &self.error_sink,
3271 cause,
3272 self.pass.label(),
3273 "RenderPass::set_viewport",
3274 );
3275 }
3276 }
3277
3278 fn set_stencil_reference(&mut self, reference: u32) {
3279 if let Err(cause) = self
3280 .context
3281 .0
3282 .render_pass_set_stencil_reference(&mut self.pass, reference)
3283 {
3284 self.context.handle_error(
3285 &self.error_sink,
3286 cause,
3287 self.pass.label(),
3288 "RenderPass::set_stencil_reference",
3289 );
3290 }
3291 }
3292
3293 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3294 if let Err(cause) = self.context.0.render_pass_draw(
3295 &mut self.pass,
3296 vertices.end - vertices.start,
3297 instances.end - instances.start,
3298 vertices.start,
3299 instances.start,
3300 ) {
3301 self.context.handle_error(
3302 &self.error_sink,
3303 cause,
3304 self.pass.label(),
3305 "RenderPass::draw",
3306 );
3307 }
3308 }
3309
3310 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3311 if let Err(cause) = self.context.0.render_pass_draw_indexed(
3312 &mut self.pass,
3313 indices.end - indices.start,
3314 instances.end - instances.start,
3315 indices.start,
3316 base_vertex,
3317 instances.start,
3318 ) {
3319 self.context.handle_error(
3320 &self.error_sink,
3321 cause,
3322 self.pass.label(),
3323 "RenderPass::draw_indexed",
3324 );
3325 }
3326 }
3327
3328 fn draw_mesh_tasks(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
3329 if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks(
3330 &mut self.pass,
3331 group_count_x,
3332 group_count_y,
3333 group_count_z,
3334 ) {
3335 self.context.handle_error(
3336 &self.error_sink,
3337 cause,
3338 self.pass.label(),
3339 "RenderPass::draw_mesh_tasks",
3340 );
3341 }
3342 }
3343
3344 fn draw_indirect(
3345 &mut self,
3346 indirect_buffer: &dispatch::DispatchBuffer,
3347 indirect_offset: crate::BufferAddress,
3348 ) {
3349 let indirect_buffer = indirect_buffer.as_core();
3350
3351 if let Err(cause) = self.context.0.render_pass_draw_indirect(
3352 &mut self.pass,
3353 indirect_buffer.id,
3354 indirect_offset,
3355 ) {
3356 self.context.handle_error(
3357 &self.error_sink,
3358 cause,
3359 self.pass.label(),
3360 "RenderPass::draw_indirect",
3361 );
3362 }
3363 }
3364
3365 fn draw_indexed_indirect(
3366 &mut self,
3367 indirect_buffer: &dispatch::DispatchBuffer,
3368 indirect_offset: crate::BufferAddress,
3369 ) {
3370 let indirect_buffer = indirect_buffer.as_core();
3371
3372 if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3373 &mut self.pass,
3374 indirect_buffer.id,
3375 indirect_offset,
3376 ) {
3377 self.context.handle_error(
3378 &self.error_sink,
3379 cause,
3380 self.pass.label(),
3381 "RenderPass::draw_indexed_indirect",
3382 );
3383 }
3384 }
3385
3386 fn draw_mesh_tasks_indirect(
3387 &mut self,
3388 indirect_buffer: &dispatch::DispatchBuffer,
3389 indirect_offset: crate::BufferAddress,
3390 ) {
3391 let indirect_buffer = indirect_buffer.as_core();
3392
3393 if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks_indirect(
3394 &mut self.pass,
3395 indirect_buffer.id,
3396 indirect_offset,
3397 ) {
3398 self.context.handle_error(
3399 &self.error_sink,
3400 cause,
3401 self.pass.label(),
3402 "RenderPass::draw_mesh_tasks_indirect",
3403 );
3404 }
3405 }
3406
3407 fn multi_draw_indirect(
3408 &mut self,
3409 indirect_buffer: &dispatch::DispatchBuffer,
3410 indirect_offset: crate::BufferAddress,
3411 count: u32,
3412 ) {
3413 let indirect_buffer = indirect_buffer.as_core();
3414
3415 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3416 &mut self.pass,
3417 indirect_buffer.id,
3418 indirect_offset,
3419 count,
3420 ) {
3421 self.context.handle_error(
3422 &self.error_sink,
3423 cause,
3424 self.pass.label(),
3425 "RenderPass::multi_draw_indirect",
3426 );
3427 }
3428 }
3429
3430 fn multi_draw_indexed_indirect(
3431 &mut self,
3432 indirect_buffer: &dispatch::DispatchBuffer,
3433 indirect_offset: crate::BufferAddress,
3434 count: u32,
3435 ) {
3436 let indirect_buffer = indirect_buffer.as_core();
3437
3438 if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3439 &mut self.pass,
3440 indirect_buffer.id,
3441 indirect_offset,
3442 count,
3443 ) {
3444 self.context.handle_error(
3445 &self.error_sink,
3446 cause,
3447 self.pass.label(),
3448 "RenderPass::multi_draw_indexed_indirect",
3449 );
3450 }
3451 }
3452
3453 fn multi_draw_mesh_tasks_indirect(
3454 &mut self,
3455 indirect_buffer: &dispatch::DispatchBuffer,
3456 indirect_offset: crate::BufferAddress,
3457 count: u32,
3458 ) {
3459 let indirect_buffer = indirect_buffer.as_core();
3460
3461 if let Err(cause) = self.context.0.render_pass_multi_draw_mesh_tasks_indirect(
3462 &mut self.pass,
3463 indirect_buffer.id,
3464 indirect_offset,
3465 count,
3466 ) {
3467 self.context.handle_error(
3468 &self.error_sink,
3469 cause,
3470 self.pass.label(),
3471 "RenderPass::multi_draw_mesh_tasks_indirect",
3472 );
3473 }
3474 }
3475
3476 fn multi_draw_indirect_count(
3477 &mut self,
3478 indirect_buffer: &dispatch::DispatchBuffer,
3479 indirect_offset: crate::BufferAddress,
3480 count_buffer: &dispatch::DispatchBuffer,
3481 count_buffer_offset: crate::BufferAddress,
3482 max_count: u32,
3483 ) {
3484 let indirect_buffer = indirect_buffer.as_core();
3485 let count_buffer = count_buffer.as_core();
3486
3487 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3488 &mut self.pass,
3489 indirect_buffer.id,
3490 indirect_offset,
3491 count_buffer.id,
3492 count_buffer_offset,
3493 max_count,
3494 ) {
3495 self.context.handle_error(
3496 &self.error_sink,
3497 cause,
3498 self.pass.label(),
3499 "RenderPass::multi_draw_indirect_count",
3500 );
3501 }
3502 }
3503
3504 fn multi_draw_indexed_indirect_count(
3505 &mut self,
3506 indirect_buffer: &dispatch::DispatchBuffer,
3507 indirect_offset: crate::BufferAddress,
3508 count_buffer: &dispatch::DispatchBuffer,
3509 count_buffer_offset: crate::BufferAddress,
3510 max_count: u32,
3511 ) {
3512 let indirect_buffer = indirect_buffer.as_core();
3513 let count_buffer = count_buffer.as_core();
3514
3515 if let Err(cause) = self
3516 .context
3517 .0
3518 .render_pass_multi_draw_indexed_indirect_count(
3519 &mut self.pass,
3520 indirect_buffer.id,
3521 indirect_offset,
3522 count_buffer.id,
3523 count_buffer_offset,
3524 max_count,
3525 )
3526 {
3527 self.context.handle_error(
3528 &self.error_sink,
3529 cause,
3530 self.pass.label(),
3531 "RenderPass::multi_draw_indexed_indirect_count",
3532 );
3533 }
3534 }
3535
3536 fn multi_draw_mesh_tasks_indirect_count(
3537 &mut self,
3538 indirect_buffer: &dispatch::DispatchBuffer,
3539 indirect_offset: crate::BufferAddress,
3540 count_buffer: &dispatch::DispatchBuffer,
3541 count_buffer_offset: crate::BufferAddress,
3542 max_count: u32,
3543 ) {
3544 let indirect_buffer = indirect_buffer.as_core();
3545 let count_buffer = count_buffer.as_core();
3546
3547 if let Err(cause) = self
3548 .context
3549 .0
3550 .render_pass_multi_draw_mesh_tasks_indirect_count(
3551 &mut self.pass,
3552 indirect_buffer.id,
3553 indirect_offset,
3554 count_buffer.id,
3555 count_buffer_offset,
3556 max_count,
3557 )
3558 {
3559 self.context.handle_error(
3560 &self.error_sink,
3561 cause,
3562 self.pass.label(),
3563 "RenderPass::multi_draw_mesh_tasks_indirect_count",
3564 );
3565 }
3566 }
3567
3568 fn insert_debug_marker(&mut self, label: &str) {
3569 if let Err(cause) = self
3570 .context
3571 .0
3572 .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3573 {
3574 self.context.handle_error(
3575 &self.error_sink,
3576 cause,
3577 self.pass.label(),
3578 "RenderPass::insert_debug_marker",
3579 );
3580 }
3581 }
3582
3583 fn push_debug_group(&mut self, group_label: &str) {
3584 if let Err(cause) =
3585 self.context
3586 .0
3587 .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3588 {
3589 self.context.handle_error(
3590 &self.error_sink,
3591 cause,
3592 self.pass.label(),
3593 "RenderPass::push_debug_group",
3594 );
3595 }
3596 }
3597
3598 fn pop_debug_group(&mut self) {
3599 if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3600 self.context.handle_error(
3601 &self.error_sink,
3602 cause,
3603 self.pass.label(),
3604 "RenderPass::pop_debug_group",
3605 );
3606 }
3607 }
3608
3609 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3610 let query_set = query_set.as_core();
3611
3612 if let Err(cause) =
3613 self.context
3614 .0
3615 .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3616 {
3617 self.context.handle_error(
3618 &self.error_sink,
3619 cause,
3620 self.pass.label(),
3621 "RenderPass::write_timestamp",
3622 );
3623 }
3624 }
3625
3626 fn begin_occlusion_query(&mut self, query_index: u32) {
3627 if let Err(cause) = self
3628 .context
3629 .0
3630 .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3631 {
3632 self.context.handle_error(
3633 &self.error_sink,
3634 cause,
3635 self.pass.label(),
3636 "RenderPass::begin_occlusion_query",
3637 );
3638 }
3639 }
3640
3641 fn end_occlusion_query(&mut self) {
3642 if let Err(cause) = self
3643 .context
3644 .0
3645 .render_pass_end_occlusion_query(&mut self.pass)
3646 {
3647 self.context.handle_error(
3648 &self.error_sink,
3649 cause,
3650 self.pass.label(),
3651 "RenderPass::end_occlusion_query",
3652 );
3653 }
3654 }
3655
3656 fn begin_pipeline_statistics_query(
3657 &mut self,
3658 query_set: &dispatch::DispatchQuerySet,
3659 query_index: u32,
3660 ) {
3661 let query_set = query_set.as_core();
3662
3663 if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3664 &mut self.pass,
3665 query_set.id,
3666 query_index,
3667 ) {
3668 self.context.handle_error(
3669 &self.error_sink,
3670 cause,
3671 self.pass.label(),
3672 "RenderPass::begin_pipeline_statistics_query",
3673 );
3674 }
3675 }
3676
3677 fn end_pipeline_statistics_query(&mut self) {
3678 if let Err(cause) = self
3679 .context
3680 .0
3681 .render_pass_end_pipeline_statistics_query(&mut self.pass)
3682 {
3683 self.context.handle_error(
3684 &self.error_sink,
3685 cause,
3686 self.pass.label(),
3687 "RenderPass::end_pipeline_statistics_query",
3688 );
3689 }
3690 }
3691
3692 fn execute_bundles(
3693 &mut self,
3694 render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3695 ) {
3696 let temp_render_bundles = render_bundles
3697 .map(|rb| rb.as_core().id)
3698 .collect::<SmallVec<[_; 4]>>();
3699 if let Err(cause) = self
3700 .context
3701 .0
3702 .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3703 {
3704 self.context.handle_error(
3705 &self.error_sink,
3706 cause,
3707 self.pass.label(),
3708 "RenderPass::execute_bundles",
3709 );
3710 }
3711 }
3712}
3713
3714impl Drop for CoreRenderPass {
3715 fn drop(&mut self) {
3716 if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3717 self.context.handle_error(
3718 &self.error_sink,
3719 cause,
3720 self.pass.label(),
3721 "RenderPass::end",
3722 );
3723 }
3724 }
3725}
3726
3727impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3728 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3729 let pipeline = pipeline.as_core();
3730
3731 wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3732 }
3733
3734 fn set_bind_group(
3735 &mut self,
3736 index: u32,
3737 bind_group: Option<&dispatch::DispatchBindGroup>,
3738 offsets: &[crate::DynamicOffset],
3739 ) {
3740 let bg = bind_group.map(|bg| bg.as_core().id);
3741
3742 unsafe {
3743 wgpu_render_bundle_set_bind_group(
3744 &mut self.encoder,
3745 index,
3746 bg,
3747 offsets.as_ptr(),
3748 offsets.len(),
3749 )
3750 }
3751 }
3752
3753 fn set_index_buffer(
3754 &mut self,
3755 buffer: &dispatch::DispatchBuffer,
3756 index_format: crate::IndexFormat,
3757 offset: crate::BufferAddress,
3758 size: Option<crate::BufferSize>,
3759 ) {
3760 let buffer = buffer.as_core();
3761
3762 self.encoder
3763 .set_index_buffer(buffer.id, index_format, offset, size)
3764 }
3765
3766 fn set_vertex_buffer(
3767 &mut self,
3768 slot: u32,
3769 buffer: &dispatch::DispatchBuffer,
3770 offset: crate::BufferAddress,
3771 size: Option<crate::BufferSize>,
3772 ) {
3773 let buffer = buffer.as_core();
3774
3775 wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer.id, offset, size)
3776 }
3777
3778 fn set_immediates(&mut self, offset: u32, data: &[u8]) {
3779 unsafe {
3780 wgpu_render_bundle_set_immediates(
3781 &mut self.encoder,
3782 offset,
3783 data.len().try_into().unwrap(),
3784 data.as_ptr(),
3785 )
3786 }
3787 }
3788
3789 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3790 wgpu_render_bundle_draw(
3791 &mut self.encoder,
3792 vertices.end - vertices.start,
3793 instances.end - instances.start,
3794 vertices.start,
3795 instances.start,
3796 )
3797 }
3798
3799 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3800 wgpu_render_bundle_draw_indexed(
3801 &mut self.encoder,
3802 indices.end - indices.start,
3803 instances.end - instances.start,
3804 indices.start,
3805 base_vertex,
3806 instances.start,
3807 )
3808 }
3809
3810 fn draw_indirect(
3811 &mut self,
3812 indirect_buffer: &dispatch::DispatchBuffer,
3813 indirect_offset: crate::BufferAddress,
3814 ) {
3815 let indirect_buffer = indirect_buffer.as_core();
3816
3817 wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3818 }
3819
3820 fn draw_indexed_indirect(
3821 &mut self,
3822 indirect_buffer: &dispatch::DispatchBuffer,
3823 indirect_offset: crate::BufferAddress,
3824 ) {
3825 let indirect_buffer = indirect_buffer.as_core();
3826
3827 wgpu_render_bundle_draw_indexed_indirect(
3828 &mut self.encoder,
3829 indirect_buffer.id,
3830 indirect_offset,
3831 )
3832 }
3833
3834 fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3835 where
3836 Self: Sized,
3837 {
3838 let (id, error) = self.context.0.render_bundle_encoder_finish(
3839 self.encoder,
3840 &desc.map_label(|l| l.map(Borrowed)),
3841 None,
3842 );
3843 if let Some(err) = error {
3844 self.context
3845 .handle_error_fatal(err, "RenderBundleEncoder::finish");
3846 }
3847 CoreRenderBundle {
3848 context: self.context.clone(),
3849 id,
3850 }
3851 .into()
3852 }
3853}
3854
3855impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3856
3857impl Drop for CoreRenderBundle {
3858 fn drop(&mut self) {
3859 self.context.0.render_bundle_drop(self.id)
3860 }
3861}
3862
3863impl dispatch::SurfaceInterface for CoreSurface {
3864 fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3865 let adapter = adapter.as_core();
3866
3867 self.context
3868 .0
3869 .surface_get_capabilities(self.id, adapter.id)
3870 .unwrap_or_default()
3871 }
3872
3873 fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3874 let device = device.as_core();
3875
3876 let error = self.context.0.surface_configure(self.id, device.id, config);
3877 if let Some(e) = error {
3878 self.context
3879 .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3880 } else {
3881 *self.configured_device.lock() = Some(device.id);
3882 *self.error_sink.lock() = Some(device.error_sink.clone());
3883 }
3884 }
3885
3886 fn get_current_texture(
3887 &self,
3888 ) -> (
3889 Option<dispatch::DispatchTexture>,
3890 crate::SurfaceStatus,
3891 dispatch::DispatchSurfaceOutputDetail,
3892 ) {
3893 let error_sink = if let Some(error_sink) = self.error_sink.lock().as_ref() {
3894 error_sink.clone()
3895 } else {
3896 Arc::new(Mutex::new(ErrorSinkRaw::new()))
3897 };
3898
3899 let output_detail = CoreSurfaceOutputDetail {
3900 context: self.context.clone(),
3901 surface_id: self.id,
3902 error_sink: error_sink.clone(),
3903 }
3904 .into();
3905
3906 match self.context.0.surface_get_current_texture(self.id, None) {
3907 Ok(wgc::present::SurfaceOutput {
3908 status,
3909 texture: texture_id,
3910 }) => {
3911 let data = texture_id
3912 .map(|id| CoreTexture {
3913 context: self.context.clone(),
3914 id,
3915 error_sink,
3916 })
3917 .map(Into::into);
3918
3919 (data, status, output_detail)
3920 }
3921 Err(err) => {
3922 let error_sink = self.error_sink.lock();
3923 match error_sink.as_ref() {
3924 Some(error_sink) => {
3925 self.context.handle_error_nolabel(
3926 error_sink,
3927 err,
3928 "Surface::get_current_texture_view",
3929 );
3930 (None, crate::SurfaceStatus::Unknown, output_detail)
3931 }
3932 None => self
3933 .context
3934 .handle_error_fatal(err, "Surface::get_current_texture_view"),
3935 }
3936 }
3937 }
3938 }
3939}
3940
3941impl Drop for CoreSurface {
3942 fn drop(&mut self) {
3943 self.context.0.surface_drop(self.id)
3944 }
3945}
3946
3947impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
3948 fn present(&self) {
3949 match self.context.0.surface_present(self.surface_id) {
3950 Ok(_status) => (),
3951 Err(err) => {
3952 self.context
3953 .handle_error_nolabel(&self.error_sink, err, "Surface::present");
3954 }
3955 }
3956 }
3957
3958 fn texture_discard(&self) {
3959 match self.context.0.surface_texture_discard(self.surface_id) {
3960 Ok(_status) => (),
3961 Err(err) => self
3962 .context
3963 .handle_error_fatal(err, "Surface::discard_texture"),
3964 }
3965 }
3966}
3967impl Drop for CoreSurfaceOutputDetail {
3968 fn drop(&mut self) {
3969 }
3973}
3974
3975impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
3976 #[inline]
3977 fn len(&self) -> usize {
3978 self.mapping.len()
3979 }
3980
3981 #[inline]
3982 unsafe fn write_slice(&mut self) -> WriteOnly<'_, [u8]> {
3983 unsafe { self.mapping.write_slice() }
3984 }
3985}
3986impl Drop for CoreQueueWriteBuffer {
3987 fn drop(&mut self) {
3988 }
3992}
3993
3994impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
3995 #[inline]
3996 fn len(&self) -> usize {
3997 self.size
3998 }
3999
4000 #[inline]
4001 unsafe fn read_slice(&self) -> &[u8] {
4002 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
4003 }
4004
4005 #[inline]
4006 unsafe fn write_slice(&mut self) -> WriteOnly<'_, [u8]> {
4007 unsafe { WriteOnly::new(NonNull::slice_from_raw_parts(self.ptr, self.size)) }
4008 }
4009
4010 #[cfg(webgpu)]
4011 fn as_uint8array(&self) -> &js_sys::Uint8Array {
4012 panic!("Only available on WebGPU")
4013 }
4014}