1use alloc::{
2 borrow::Cow::{self, Borrowed},
3 boxed::Box,
4 format,
5 string::{String, ToString as _},
6 sync::Arc,
7 vec,
8 vec::Vec,
9};
10use core::{
11 error::Error,
12 fmt,
13 future::ready,
14 ops::{Deref, Range},
15 pin::Pin,
16 ptr::NonNull,
17 slice,
18};
19use hashbrown::HashMap;
20
21use arrayvec::ArrayVec;
22use smallvec::SmallVec;
23use wgc::{
24 command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError,
25 resource::BlasPrepareCompactResult,
26};
27use wgt::{
28 error::{ErrorType, WebGpuError},
29 WasmNotSendSync,
30};
31
32use crate::{
33 api,
34 dispatch::{self, BlasCompactCallback, BufferMappedRangeInterface},
35 BindingResource, Blas, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
36 CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
37 ShaderSource, SurfaceTargetUnsafe, TextureDescriptor, Tlas, WriteOnly,
38};
39use crate::{dispatch::DispatchAdapter, util::Mutex};
40
41mod thread_id;
42
43#[derive(Clone)]
44pub struct ContextWgpuCore(Arc<wgc::global::Global>);
45
46impl Drop for ContextWgpuCore {
47 fn drop(&mut self) {
48 }
50}
51
52impl fmt::Debug for ContextWgpuCore {
53 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
54 f.debug_struct("ContextWgpuCore")
55 .field("type", &"Native")
56 .finish()
57 }
58}
59
60impl ContextWgpuCore {
61 pub unsafe fn from_hal_instance<A: hal::Api>(hal_instance: A::Instance) -> Self {
62 Self(unsafe {
63 Arc::new(wgc::global::Global::from_hal_instance::<A>(
64 "wgpu",
65 hal_instance,
66 ))
67 })
68 }
69
70 pub unsafe fn instance_as_hal<A: hal::Api>(&self) -> Option<&A::Instance> {
74 unsafe { self.0.instance_as_hal::<A>() }
75 }
76
77 pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
78 Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
79 }
80
81 #[cfg(wgpu_core)]
82 pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
83 self.0
84 .enumerate_adapters(backends, false )
85 }
86
87 pub unsafe fn create_adapter_from_hal<A: hal::Api>(
88 &self,
89 hal_adapter: hal::ExposedAdapter<A>,
90 ) -> wgc::id::AdapterId {
91 unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
92 }
93
94 pub unsafe fn adapter_as_hal<A: hal::Api>(
95 &self,
96 adapter: &CoreAdapter,
97 ) -> Option<impl Deref<Target = A::Adapter> + WasmNotSendSync> {
98 unsafe { self.0.adapter_as_hal::<A>(adapter.id) }
99 }
100
101 pub unsafe fn buffer_as_hal<A: hal::Api>(
102 &self,
103 buffer: &CoreBuffer,
104 ) -> Option<impl Deref<Target = A::Buffer>> {
105 unsafe { self.0.buffer_as_hal::<A>(buffer.id) }
106 }
107
108 pub unsafe fn create_device_from_hal<A: hal::Api>(
109 &self,
110 adapter: &CoreAdapter,
111 hal_device: hal::OpenDevice<A>,
112 desc: &crate::DeviceDescriptor<'_>,
113 ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
114 let (device_id, queue_id) = unsafe {
115 self.0.create_device_from_hal(
116 adapter.id,
117 hal_device.into(),
118 &desc.map_label(|l| l.map(Borrowed)),
119 None,
120 None,
121 )
122 }?;
123 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
124 let device = CoreDevice {
125 context: self.clone(),
126 id: device_id,
127 error_sink: error_sink.clone(),
128 features: desc.required_features,
129 };
130 let queue = CoreQueue {
131 context: self.clone(),
132 id: queue_id,
133 error_sink,
134 };
135 Ok((device, queue))
136 }
137
138 pub unsafe fn create_texture_from_hal<A: hal::Api>(
139 &self,
140 hal_texture: A::Texture,
141 device: &CoreDevice,
142 desc: &TextureDescriptor<'_>,
143 ) -> CoreTexture {
144 let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
145 let (id, error) = unsafe {
146 self.0
147 .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
148 };
149 if let Some(cause) = error {
150 self.handle_error(
151 &device.error_sink,
152 cause,
153 desc.label,
154 "Device::create_texture_from_hal",
155 );
156 }
157 CoreTexture {
158 context: self.clone(),
159 id,
160 error_sink: Arc::clone(&device.error_sink),
161 }
162 }
163
164 pub unsafe fn create_buffer_from_hal<A: hal::Api>(
171 &self,
172 hal_buffer: A::Buffer,
173 device: &CoreDevice,
174 desc: &BufferDescriptor<'_>,
175 ) -> CoreBuffer {
176 let (id, error) = unsafe {
177 self.0.create_buffer_from_hal::<A>(
178 hal_buffer,
179 device.id,
180 &desc.map_label(|l| l.map(Borrowed)),
181 None,
182 )
183 };
184 if let Some(cause) = error {
185 self.handle_error(
186 &device.error_sink,
187 cause,
188 desc.label,
189 "Device::create_buffer_from_hal",
190 );
191 }
192 CoreBuffer {
193 context: self.clone(),
194 id,
195 error_sink: Arc::clone(&device.error_sink),
196 }
197 }
198
199 pub unsafe fn device_as_hal<A: hal::Api>(
200 &self,
201 device: &CoreDevice,
202 ) -> Option<impl Deref<Target = A::Device>> {
203 unsafe { self.0.device_as_hal::<A>(device.id) }
204 }
205
206 pub unsafe fn surface_as_hal<A: hal::Api>(
207 &self,
208 surface: &CoreSurface,
209 ) -> Option<impl Deref<Target = A::Surface>> {
210 unsafe { self.0.surface_as_hal::<A>(surface.id) }
211 }
212
213 pub unsafe fn texture_as_hal<A: hal::Api>(
214 &self,
215 texture: &CoreTexture,
216 ) -> Option<impl Deref<Target = A::Texture>> {
217 unsafe { self.0.texture_as_hal::<A>(texture.id) }
218 }
219
220 pub unsafe fn texture_view_as_hal<A: hal::Api>(
221 &self,
222 texture_view: &CoreTextureView,
223 ) -> Option<impl Deref<Target = A::TextureView>> {
224 unsafe { self.0.texture_view_as_hal::<A>(texture_view.id) }
225 }
226
227 pub unsafe fn command_encoder_as_hal_mut<
229 A: hal::Api,
230 F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
231 R,
232 >(
233 &self,
234 command_encoder: &CoreCommandEncoder,
235 hal_command_encoder_callback: F,
236 ) -> R {
237 unsafe {
238 self.0.command_encoder_as_hal_mut::<A, F, R>(
239 command_encoder.id,
240 hal_command_encoder_callback,
241 )
242 }
243 }
244
245 pub unsafe fn blas_as_hal<A: hal::Api>(
246 &self,
247 blas: &CoreBlas,
248 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
249 unsafe { self.0.blas_as_hal::<A>(blas.id) }
250 }
251
252 pub unsafe fn tlas_as_hal<A: hal::Api>(
253 &self,
254 tlas: &CoreTlas,
255 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
256 unsafe { self.0.tlas_as_hal::<A>(tlas.id) }
257 }
258
259 pub fn generate_report(&self) -> wgc::global::GlobalReport {
260 self.0.generate_report()
261 }
262
263 #[cold]
264 #[track_caller]
265 #[inline(never)]
266 fn handle_error_inner(
267 &self,
268 sink_mutex: &Mutex<ErrorSinkRaw>,
269 error_type: ErrorType,
270 source: ContextErrorSource,
271 label: Label<'_>,
272 fn_ident: &'static str,
273 ) {
274 let source: ErrorSource = Box::new(wgc::error::ContextError {
275 fn_ident,
276 source,
277 label: label.unwrap_or_default().to_string(),
278 });
279 let final_error_handling = {
280 let mut sink = sink_mutex.lock();
281 let description = || self.format_error(&*source);
282 let error = match error_type {
283 ErrorType::Internal => {
284 let description = description();
285 crate::Error::Internal {
286 source,
287 description,
288 }
289 }
290 ErrorType::OutOfMemory => crate::Error::OutOfMemory { source },
291 ErrorType::Validation => {
292 let description = description();
293 crate::Error::Validation {
294 source,
295 description,
296 }
297 }
298 ErrorType::DeviceLost => return, };
300 sink.handle_error_or_return_handler(error)
301 };
302
303 if let Some(f) = final_error_handling {
304 f();
308 }
309 }
310
311 #[inline]
312 #[track_caller]
313 fn handle_error(
314 &self,
315 sink_mutex: &Mutex<ErrorSinkRaw>,
316 source: impl WebGpuError + WasmNotSendSync + 'static,
317 label: Label<'_>,
318 fn_ident: &'static str,
319 ) {
320 let error_type = source.webgpu_error_type();
321 self.handle_error_inner(sink_mutex, error_type, Box::new(source), label, fn_ident)
322 }
323
324 #[inline]
325 #[track_caller]
326 fn handle_error_nolabel(
327 &self,
328 sink_mutex: &Mutex<ErrorSinkRaw>,
329 source: impl WebGpuError + WasmNotSendSync + 'static,
330 fn_ident: &'static str,
331 ) {
332 let error_type = source.webgpu_error_type();
333 self.handle_error_inner(sink_mutex, error_type, Box::new(source), None, fn_ident)
334 }
335
336 #[track_caller]
337 #[cold]
338 fn handle_error_fatal(
339 &self,
340 cause: impl Error + WasmNotSendSync + 'static,
341 operation: &'static str,
342 ) -> ! {
343 panic!("Error in {operation}: {f}", f = self.format_error(&cause));
344 }
345
346 #[inline(never)]
347 fn format_error(&self, err: &(dyn Error + 'static)) -> String {
348 let mut output = String::new();
349 let mut level = 1;
350
351 fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
352 let mut print = |e: &(dyn Error + 'static)| {
353 use core::fmt::Write;
354 writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
355
356 if let Some(e) = e.source() {
357 *level += 1;
358 print_tree(output, level, e);
359 *level -= 1;
360 }
361 };
362 if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
363 for e in multi.errors() {
364 print(e);
365 }
366 } else {
367 print(e);
368 }
369 }
370
371 print_tree(&mut output, &mut level, err);
372
373 format!("Validation Error\n\nCaused by:\n{output}")
374 }
375
376 pub unsafe fn queue_as_hal<A: hal::Api>(
377 &self,
378 queue: &CoreQueue,
379 ) -> Option<impl Deref<Target = A::Queue> + WasmNotSendSync> {
380 unsafe { self.0.queue_as_hal::<A>(queue.id) }
381 }
382}
383
384fn map_buffer_copy_view(
385 view: crate::TexelCopyBufferInfo<'_>,
386) -> wgt::TexelCopyBufferInfo<wgc::id::BufferId> {
387 wgt::TexelCopyBufferInfo {
388 buffer: view.buffer.inner.as_core().id,
389 layout: view.layout,
390 }
391}
392
393fn map_texture_copy_view(
394 view: crate::TexelCopyTextureInfo<'_>,
395) -> wgt::TexelCopyTextureInfo<wgc::id::TextureId> {
396 wgt::TexelCopyTextureInfo {
397 texture: view.texture.inner.as_core().id,
398 mip_level: view.mip_level,
399 origin: view.origin,
400 aspect: view.aspect,
401 }
402}
403
404#[cfg_attr(not(webgl), expect(unused))]
405fn map_texture_tagged_copy_view(
406 view: crate::CopyExternalImageDestInfo<&api::Texture>,
407) -> wgt::CopyExternalImageDestInfo<wgc::id::TextureId> {
408 wgt::CopyExternalImageDestInfo {
409 texture: view.texture.inner.as_core().id,
410 mip_level: view.mip_level,
411 origin: view.origin,
412 aspect: view.aspect,
413 color_space: view.color_space,
414 premultiplied_alpha: view.premultiplied_alpha,
415 }
416}
417
418fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
419 match *load {
420 LoadOp::Clear(clear_value) => LoadOp::Clear(Some(clear_value)),
421 LoadOp::DontCare(token) => LoadOp::DontCare(token),
422 LoadOp::Load => LoadOp::Load,
423 }
424}
425
426fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
427 match ops {
428 Some(&Operations { load, store }) => wgc::command::PassChannel {
429 load_op: Some(map_load_op(&load)),
430 store_op: Some(store),
431 read_only: false,
432 },
433 None => wgc::command::PassChannel {
434 load_op: None,
435 store_op: None,
436 read_only: true,
437 },
438 }
439}
440
441#[derive(Debug)]
442pub struct CoreSurface {
443 pub(crate) context: ContextWgpuCore,
444 id: wgc::id::SurfaceId,
445 configured_device: Mutex<Option<wgc::id::DeviceId>>,
448 error_sink: Mutex<Option<ErrorSink>>,
451}
452
453#[derive(Debug)]
454pub struct CoreAdapter {
455 pub(crate) context: ContextWgpuCore,
456 pub(crate) id: wgc::id::AdapterId,
457}
458
459#[derive(Debug)]
460pub struct CoreDevice {
461 pub(crate) context: ContextWgpuCore,
462 id: wgc::id::DeviceId,
463 error_sink: ErrorSink,
464 features: Features,
465}
466
467#[derive(Debug)]
468pub struct CoreBuffer {
469 pub(crate) context: ContextWgpuCore,
470 id: wgc::id::BufferId,
471 error_sink: ErrorSink,
472}
473
474#[derive(Debug)]
475pub struct CoreShaderModule {
476 pub(crate) context: ContextWgpuCore,
477 id: wgc::id::ShaderModuleId,
478 compilation_info: CompilationInfo,
479}
480
481#[derive(Debug)]
482pub struct CoreBindGroupLayout {
483 pub(crate) context: ContextWgpuCore,
484 id: wgc::id::BindGroupLayoutId,
485}
486
487#[derive(Debug)]
488pub struct CoreBindGroup {
489 pub(crate) context: ContextWgpuCore,
490 id: wgc::id::BindGroupId,
491}
492
493#[derive(Debug)]
494pub struct CoreTexture {
495 pub(crate) context: ContextWgpuCore,
496 id: wgc::id::TextureId,
497 error_sink: ErrorSink,
498}
499
500#[derive(Debug)]
501pub struct CoreTextureView {
502 pub(crate) context: ContextWgpuCore,
503 id: wgc::id::TextureViewId,
504}
505
506#[derive(Debug)]
507pub struct CoreExternalTexture {
508 pub(crate) context: ContextWgpuCore,
509 id: wgc::id::ExternalTextureId,
510}
511
512#[derive(Debug)]
513pub struct CoreSampler {
514 pub(crate) context: ContextWgpuCore,
515 id: wgc::id::SamplerId,
516}
517
518#[derive(Debug)]
519pub struct CoreQuerySet {
520 pub(crate) context: ContextWgpuCore,
521 id: wgc::id::QuerySetId,
522}
523
524#[derive(Debug)]
525pub struct CorePipelineLayout {
526 pub(crate) context: ContextWgpuCore,
527 id: wgc::id::PipelineLayoutId,
528}
529
530#[derive(Debug)]
531pub struct CorePipelineCache {
532 pub(crate) context: ContextWgpuCore,
533 id: wgc::id::PipelineCacheId,
534}
535
536#[derive(Debug)]
537pub struct CoreCommandBuffer {
538 pub(crate) context: ContextWgpuCore,
539 id: wgc::id::CommandBufferId,
540}
541
542#[derive(Debug)]
543pub struct CoreRenderBundleEncoder {
544 pub(crate) context: ContextWgpuCore,
545 encoder: Box<wgc::command::RenderBundleEncoder>,
546 id: crate::cmp::Identifier,
547}
548
549#[derive(Debug)]
550pub struct CoreRenderBundle {
551 context: ContextWgpuCore,
552 id: wgc::id::RenderBundleId,
553}
554
555#[derive(Debug)]
556pub struct CoreQueue {
557 pub(crate) context: ContextWgpuCore,
558 id: wgc::id::QueueId,
559 error_sink: ErrorSink,
560}
561
562#[derive(Debug)]
563pub struct CoreComputePipeline {
564 pub(crate) context: ContextWgpuCore,
565 id: wgc::id::ComputePipelineId,
566 error_sink: ErrorSink,
567}
568
569#[derive(Debug)]
570pub struct CoreRenderPipeline {
571 pub(crate) context: ContextWgpuCore,
572 id: wgc::id::RenderPipelineId,
573 error_sink: ErrorSink,
574}
575
576#[derive(Debug)]
577pub struct CoreComputePass {
578 pub(crate) context: ContextWgpuCore,
579 pass: wgc::command::ComputePass,
580 error_sink: ErrorSink,
581 id: crate::cmp::Identifier,
582}
583
584#[derive(Debug)]
585pub struct CoreRenderPass {
586 pub(crate) context: ContextWgpuCore,
587 pass: wgc::command::RenderPass,
588 error_sink: ErrorSink,
589 id: crate::cmp::Identifier,
590}
591
592#[derive(Debug)]
593pub struct CoreCommandEncoder {
594 pub(crate) context: ContextWgpuCore,
595 id: wgc::id::CommandEncoderId,
596 error_sink: ErrorSink,
597}
598
599#[derive(Debug)]
600pub struct CoreBlas {
601 pub(crate) context: ContextWgpuCore,
602 id: wgc::id::BlasId,
603 error_sink: ErrorSink,
604}
605
606#[derive(Debug)]
607pub struct CoreTlas {
608 pub(crate) context: ContextWgpuCore,
609 id: wgc::id::TlasId,
610 }
612
613#[derive(Debug)]
614pub struct CoreSurfaceOutputDetail {
615 context: ContextWgpuCore,
616 surface_id: wgc::id::SurfaceId,
617 error_sink: ErrorSink,
618}
619
620type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
621
622struct ErrorScope {
623 error: Option<crate::Error>,
624 filter: crate::ErrorFilter,
625}
626
627struct ErrorSinkRaw {
628 scopes: HashMap<thread_id::ThreadId, Vec<ErrorScope>>,
629 uncaptured_handler: Option<Arc<dyn crate::UncapturedErrorHandler>>,
630}
631
632impl ErrorSinkRaw {
633 fn new() -> ErrorSinkRaw {
634 ErrorSinkRaw {
635 scopes: HashMap::new(),
636 uncaptured_handler: None,
637 }
638 }
639
640 #[track_caller]
650 #[must_use]
651 fn handle_error_or_return_handler(&mut self, err: crate::Error) -> Option<impl FnOnce()> {
652 let filter = match err {
653 crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
654 crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
655 crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
656 };
657 let thread_id = thread_id::ThreadId::current();
658 let scopes = self.scopes.entry(thread_id).or_default();
659 match scopes.iter_mut().rev().find(|scope| scope.filter == filter) {
660 Some(scope) => {
661 if scope.error.is_none() {
662 scope.error = Some(err);
663 }
664 None
665 }
666 None => {
667 if let Some(custom_handler) = &self.uncaptured_handler {
668 let custom_handler = Arc::clone(custom_handler);
669 Some(move || (custom_handler)(err))
670 } else {
671 default_error_handler(err)
673 }
674 }
675 }
676 }
677}
678
679impl fmt::Debug for ErrorSinkRaw {
680 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
681 write!(f, "ErrorSink")
682 }
683}
684
685#[track_caller]
686fn default_error_handler(err: crate::Error) -> ! {
687 log::error!("Handling wgpu errors as fatal by default");
688 panic!("wgpu error: {err}\n");
689}
690
691impl From<CreateShaderModuleError> for CompilationInfo {
692 fn from(value: CreateShaderModuleError) -> Self {
693 match value {
694 #[cfg(feature = "wgsl")]
695 CreateShaderModuleError::Parsing(v) => v.into(),
696 #[cfg(feature = "glsl")]
697 CreateShaderModuleError::ParsingGlsl(v) => v.into(),
698 #[cfg(feature = "spirv")]
699 CreateShaderModuleError::ParsingSpirV(v) => v.into(),
700 CreateShaderModuleError::Validation(v) => v.into(),
701 CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
704 CompilationInfo {
705 messages: Vec::new(),
706 }
707 }
708 _ => CompilationInfo {
710 messages: vec![CompilationMessage {
711 message: value.to_string(),
712 message_type: CompilationMessageType::Error,
713 location: None,
714 }],
715 },
716 }
717 }
718}
719
720#[derive(Debug)]
721pub struct CoreQueueWriteBuffer {
722 buffer_id: wgc::id::StagingBufferId,
723 mapping: CoreBufferMappedRange,
724}
725
726#[derive(Debug)]
727pub struct CoreBufferMappedRange {
728 ptr: NonNull<u8>,
729 size: usize,
730}
731
732#[cfg(send_sync)]
733unsafe impl Send for CoreBufferMappedRange {}
734#[cfg(send_sync)]
735unsafe impl Sync for CoreBufferMappedRange {}
736
737impl Drop for CoreBufferMappedRange {
738 fn drop(&mut self) {
739 }
742}
743
744crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
745crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
746crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
747crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
748crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
749crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
750crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
751crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
752crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
753crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
754crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
755crate::cmp::impl_eq_ord_hash_proxy!(CoreExternalTexture => .id);
756crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
757crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
758crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
759crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
760crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
761crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
762crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
763crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
764crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
765crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
766crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
767crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
768crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
769crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
770crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
771crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
772crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
773
774impl dispatch::InstanceInterface for ContextWgpuCore {
775 fn new(desc: wgt::InstanceDescriptor) -> Self
776 where
777 Self: Sized,
778 {
779 Self(Arc::new(wgc::global::Global::new("wgpu", desc, None)))
780 }
781
782 unsafe fn create_surface(
783 &self,
784 target: crate::api::SurfaceTargetUnsafe,
785 ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
786 let id = match target {
787 SurfaceTargetUnsafe::RawHandle {
788 raw_display_handle,
789 raw_window_handle,
790 } => unsafe {
791 self.0
792 .instance_create_surface(raw_display_handle, raw_window_handle, None)
793 },
794
795 #[cfg(all(drm, not(target_os = "netbsd")))]
796 SurfaceTargetUnsafe::Drm {
797 fd,
798 plane,
799 connector_id,
800 width,
801 height,
802 refresh_rate,
803 } => unsafe {
804 self.0.instance_create_surface_from_drm(
805 fd,
806 plane,
807 connector_id,
808 width,
809 height,
810 refresh_rate,
811 None,
812 )
813 },
814
815 #[cfg(metal)]
816 SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
817 self.0.instance_create_surface_metal(layer, None)
818 },
819
820 #[cfg(all(drm, target_os = "netbsd"))]
821 SurfaceTargetUnsafe::Drm { .. } => Err(
822 wgc::instance::CreateSurfaceError::BackendNotEnabled(wgt::Backend::Vulkan),
823 ),
824
825 #[cfg(dx12)]
826 SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
827 self.0.instance_create_surface_from_visual(visual, None)
828 },
829
830 #[cfg(dx12)]
831 SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
832 self.0
833 .instance_create_surface_from_surface_handle(surface_handle, None)
834 },
835
836 #[cfg(dx12)]
837 SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
838 self.0
839 .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
840 },
841 }?;
842
843 Ok(CoreSurface {
844 context: self.clone(),
845 id,
846 configured_device: Mutex::default(),
847 error_sink: Mutex::default(),
848 }
849 .into())
850 }
851
852 fn request_adapter(
853 &self,
854 options: &crate::api::RequestAdapterOptions<'_, '_>,
855 ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
856 let id = self.0.request_adapter(
857 &wgc::instance::RequestAdapterOptions {
858 power_preference: options.power_preference,
859 force_fallback_adapter: options.force_fallback_adapter,
860 compatible_surface: options
861 .compatible_surface
862 .map(|surface| surface.inner.as_core().id),
863 apply_limit_buckets: false,
864 },
865 wgt::Backends::all(),
866 None,
867 );
868 let adapter = id.map(|id| {
869 let core = CoreAdapter {
870 context: self.clone(),
871 id,
872 };
873 let generic: dispatch::DispatchAdapter = core.into();
874 generic
875 });
876 Box::pin(ready(adapter))
877 }
878
879 fn poll_all_devices(&self, force_wait: bool) -> bool {
880 match self.0.poll_all_devices(force_wait) {
881 Ok(all_queue_empty) => all_queue_empty,
882 Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
883 }
884 }
885
886 #[cfg(feature = "wgsl")]
887 fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
888 use wgc::naga::front::wgsl::ImplementedLanguageExtension;
889 ImplementedLanguageExtension::all().iter().copied().fold(
890 crate::WgslLanguageFeatures::empty(),
891 |acc, wle| {
892 acc | match wle {
893 ImplementedLanguageExtension::ReadOnlyAndReadWriteStorageTextures => {
894 crate::WgslLanguageFeatures::ReadOnlyAndReadWriteStorageTextures
895 }
896 ImplementedLanguageExtension::Packed4x8IntegerDotProduct => {
897 crate::WgslLanguageFeatures::Packed4x8IntegerDotProduct
898 }
899 ImplementedLanguageExtension::PointerCompositeAccess => {
900 crate::WgslLanguageFeatures::PointerCompositeAccess
901 }
902 }
903 },
904 )
905 }
906
907 fn enumerate_adapters(
908 &self,
909 backends: crate::Backends,
910 ) -> Pin<Box<dyn dispatch::EnumerateAdapterFuture>> {
911 let adapters: Vec<DispatchAdapter> = self
912 .enumerate_adapters(backends)
913 .into_iter()
914 .map(|adapter| {
915 let core = crate::backend::wgpu_core::CoreAdapter {
916 context: self.clone(),
917 id: adapter,
918 };
919 core.into()
920 })
921 .collect();
922 Box::pin(ready(adapters))
923 }
924}
925
926impl dispatch::AdapterInterface for CoreAdapter {
927 fn request_device(
928 &self,
929 desc: &crate::DeviceDescriptor<'_>,
930 ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
931 let res = self.context.0.adapter_request_device(
932 self.id,
933 &desc.map_label(|l| l.map(Borrowed)),
934 None,
935 None,
936 );
937 let (device_id, queue_id) = match res {
938 Ok(ids) => ids,
939 Err(err) => {
940 return Box::pin(ready(Err(err.into())));
941 }
942 };
943 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
944 let device = CoreDevice {
945 context: self.context.clone(),
946 id: device_id,
947 error_sink: error_sink.clone(),
948 features: desc.required_features,
949 };
950 let queue = CoreQueue {
951 context: self.context.clone(),
952 id: queue_id,
953 error_sink,
954 };
955 Box::pin(ready(Ok((device.into(), queue.into()))))
956 }
957
958 fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
959 let surface = surface.as_core();
960
961 self.context
962 .0
963 .adapter_is_surface_supported(self.id, surface.id)
964 }
965
966 fn features(&self) -> crate::Features {
967 self.context.0.adapter_features(self.id)
968 }
969
970 fn limits(&self) -> crate::Limits {
971 self.context.0.adapter_limits(self.id)
972 }
973
974 fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
975 self.context.0.adapter_downlevel_capabilities(self.id)
976 }
977
978 fn get_info(&self) -> crate::AdapterInfo {
979 self.context.0.adapter_get_info(self.id)
980 }
981
982 fn get_texture_format_features(
983 &self,
984 format: crate::TextureFormat,
985 ) -> crate::TextureFormatFeatures {
986 self.context
987 .0
988 .adapter_get_texture_format_features(self.id, format)
989 }
990
991 fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
992 self.context.0.adapter_get_presentation_timestamp(self.id)
993 }
994
995 fn cooperative_matrix_properties(&self) -> Vec<crate::wgt::CooperativeMatrixProperties> {
996 self.context
997 .0
998 .adapter_cooperative_matrix_properties(self.id)
999 }
1000}
1001
1002impl Drop for CoreAdapter {
1003 fn drop(&mut self) {
1004 self.context.0.adapter_drop(self.id)
1005 }
1006}
1007
1008impl dispatch::DeviceInterface for CoreDevice {
1009 fn features(&self) -> crate::Features {
1010 self.context.0.device_features(self.id)
1011 }
1012
1013 fn limits(&self) -> crate::Limits {
1014 self.context.0.device_limits(self.id)
1015 }
1016
1017 fn adapter_info(&self) -> crate::AdapterInfo {
1018 self.context.0.device_adapter_info(self.id)
1019 }
1020
1021 #[cfg_attr(
1023 not(any(
1024 feature = "spirv",
1025 feature = "glsl",
1026 feature = "wgsl",
1027 feature = "naga-ir"
1028 )),
1029 expect(unused)
1030 )]
1031 fn create_shader_module(
1032 &self,
1033 desc: crate::ShaderModuleDescriptor<'_>,
1034 shader_bound_checks: wgt::ShaderRuntimeChecks,
1035 ) -> dispatch::DispatchShaderModule {
1036 let descriptor = wgc::pipeline::ShaderModuleDescriptor {
1037 label: desc.label.map(Borrowed),
1038 runtime_checks: shader_bound_checks,
1039 };
1040 let source = match desc.source {
1041 #[cfg(feature = "spirv")]
1042 ShaderSource::SpirV(ref spv) => {
1043 let options = naga::front::spv::Options {
1045 adjust_coordinate_space: false, strict_capabilities: true,
1047 block_ctx_dump_prefix: None,
1048 };
1049 wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
1050 }
1051 #[cfg(feature = "glsl")]
1052 ShaderSource::Glsl {
1053 ref shader,
1054 stage,
1055 defines,
1056 } => {
1057 let options = naga::front::glsl::Options {
1058 stage,
1059 defines: defines
1060 .iter()
1061 .map(|&(key, value)| (String::from(key), String::from(value)))
1062 .collect(),
1063 };
1064 wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
1065 }
1066 #[cfg(feature = "wgsl")]
1067 ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
1068 #[cfg(feature = "naga-ir")]
1069 ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1070 ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1071 };
1072 let (id, error) =
1073 self.context
1074 .0
1075 .device_create_shader_module(self.id, &descriptor, source, None);
1076 let compilation_info = match error {
1077 Some(cause) => {
1078 self.context.handle_error(
1079 &self.error_sink,
1080 cause.clone(),
1081 desc.label,
1082 "Device::create_shader_module",
1083 );
1084 CompilationInfo::from(cause)
1085 }
1086 None => CompilationInfo { messages: vec![] },
1087 };
1088
1089 CoreShaderModule {
1090 context: self.context.clone(),
1091 id,
1092 compilation_info,
1093 }
1094 .into()
1095 }
1096
1097 unsafe fn create_shader_module_passthrough(
1098 &self,
1099 desc: &crate::ShaderModuleDescriptorPassthrough<'_>,
1100 ) -> dispatch::DispatchShaderModule {
1101 let desc = desc.map_label(|l| l.map(Cow::from));
1102 let (id, error) = unsafe {
1103 self.context
1104 .0
1105 .device_create_shader_module_passthrough(self.id, &desc, None)
1106 };
1107
1108 let compilation_info = match error {
1109 Some(cause) => {
1110 self.context.handle_error(
1111 &self.error_sink,
1112 cause.clone(),
1113 desc.label.as_deref(),
1114 "Device::create_shader_module_passthrough",
1115 );
1116 CompilationInfo::from(cause)
1117 }
1118 None => CompilationInfo { messages: vec![] },
1119 };
1120
1121 CoreShaderModule {
1122 context: self.context.clone(),
1123 id,
1124 compilation_info,
1125 }
1126 .into()
1127 }
1128
1129 fn create_bind_group_layout(
1130 &self,
1131 desc: &crate::BindGroupLayoutDescriptor<'_>,
1132 ) -> dispatch::DispatchBindGroupLayout {
1133 let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1134 label: desc.label.map(Borrowed),
1135 entries: Borrowed(desc.entries),
1136 };
1137 let (id, error) =
1138 self.context
1139 .0
1140 .device_create_bind_group_layout(self.id, &descriptor, None);
1141 if let Some(cause) = error {
1142 self.context.handle_error(
1143 &self.error_sink,
1144 cause,
1145 desc.label,
1146 "Device::create_bind_group_layout",
1147 );
1148 }
1149 CoreBindGroupLayout {
1150 context: self.context.clone(),
1151 id,
1152 }
1153 .into()
1154 }
1155
1156 fn create_bind_group(
1157 &self,
1158 desc: &crate::BindGroupDescriptor<'_>,
1159 ) -> dispatch::DispatchBindGroup {
1160 use wgc::binding_model as bm;
1161
1162 let mut arrayed_texture_views = Vec::new();
1163 let mut arrayed_samplers = Vec::new();
1164 if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1165 for entry in desc.entries.iter() {
1167 if let BindingResource::TextureViewArray(array) = entry.resource {
1168 arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1169 }
1170 if let BindingResource::SamplerArray(array) = entry.resource {
1171 arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1172 }
1173 }
1174 }
1175 let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1176 let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1177
1178 let mut arrayed_buffer_bindings = Vec::new();
1179 if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1180 for entry in desc.entries.iter() {
1182 if let BindingResource::BufferArray(array) = entry.resource {
1183 arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1184 buffer: binding.buffer.inner.as_core().id,
1185 offset: binding.offset,
1186 size: binding.size.map(wgt::BufferSize::get),
1187 }));
1188 }
1189 }
1190 }
1191 let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1192
1193 let mut arrayed_acceleration_structures = Vec::new();
1194 if self
1195 .features
1196 .contains(Features::ACCELERATION_STRUCTURE_BINDING_ARRAY)
1197 {
1198 for entry in desc.entries.iter() {
1200 if let BindingResource::AccelerationStructureArray(array) = entry.resource {
1201 arrayed_acceleration_structures
1202 .extend(array.iter().map(|tlas| tlas.inner.as_core().id));
1203 }
1204 }
1205 }
1206 let mut remaining_arrayed_acceleration_structures = &arrayed_acceleration_structures[..];
1207
1208 let entries = desc
1209 .entries
1210 .iter()
1211 .map(|entry| bm::BindGroupEntry {
1212 binding: entry.binding,
1213 resource: match entry.resource {
1214 BindingResource::Buffer(BufferBinding {
1215 buffer,
1216 offset,
1217 size,
1218 }) => bm::BindingResource::Buffer(bm::BufferBinding {
1219 buffer: buffer.inner.as_core().id,
1220 offset,
1221 size: size.map(wgt::BufferSize::get),
1222 }),
1223 BindingResource::BufferArray(array) => {
1224 let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1225 remaining_arrayed_buffer_bindings =
1226 &remaining_arrayed_buffer_bindings[array.len()..];
1227 bm::BindingResource::BufferArray(Borrowed(slice))
1228 }
1229 BindingResource::Sampler(sampler) => {
1230 bm::BindingResource::Sampler(sampler.inner.as_core().id)
1231 }
1232 BindingResource::SamplerArray(array) => {
1233 let slice = &remaining_arrayed_samplers[..array.len()];
1234 remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1235 bm::BindingResource::SamplerArray(Borrowed(slice))
1236 }
1237 BindingResource::TextureView(texture_view) => {
1238 bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1239 }
1240 BindingResource::TextureViewArray(array) => {
1241 let slice = &remaining_arrayed_texture_views[..array.len()];
1242 remaining_arrayed_texture_views =
1243 &remaining_arrayed_texture_views[array.len()..];
1244 bm::BindingResource::TextureViewArray(Borrowed(slice))
1245 }
1246 BindingResource::AccelerationStructure(acceleration_structure) => {
1247 bm::BindingResource::AccelerationStructure(
1248 acceleration_structure.inner.as_core().id,
1249 )
1250 }
1251 BindingResource::AccelerationStructureArray(array) => {
1252 let slice = &remaining_arrayed_acceleration_structures[..array.len()];
1253 remaining_arrayed_acceleration_structures =
1254 &remaining_arrayed_acceleration_structures[array.len()..];
1255 bm::BindingResource::AccelerationStructureArray(Borrowed(slice))
1256 }
1257 BindingResource::ExternalTexture(external_texture) => {
1258 bm::BindingResource::ExternalTexture(external_texture.inner.as_core().id)
1259 }
1260 },
1261 })
1262 .collect::<Vec<_>>();
1263 let descriptor = bm::BindGroupDescriptor {
1264 label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1265 layout: desc.layout.inner.as_core().id,
1266 entries: Borrowed(&entries),
1267 };
1268
1269 let (id, error) = self
1270 .context
1271 .0
1272 .device_create_bind_group(self.id, &descriptor, None);
1273 if let Some(cause) = error {
1274 self.context.handle_error(
1275 &self.error_sink,
1276 cause,
1277 desc.label,
1278 "Device::create_bind_group",
1279 );
1280 }
1281 CoreBindGroup {
1282 context: self.context.clone(),
1283 id,
1284 }
1285 .into()
1286 }
1287
1288 fn create_pipeline_layout(
1289 &self,
1290 desc: &crate::PipelineLayoutDescriptor<'_>,
1291 ) -> dispatch::DispatchPipelineLayout {
1292 assert!(
1295 desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1296 "Bind group layout count {} exceeds device bind group limit {}",
1297 desc.bind_group_layouts.len(),
1298 wgc::MAX_BIND_GROUPS
1299 );
1300
1301 let temp_layouts = desc
1302 .bind_group_layouts
1303 .iter()
1304 .map(|bgl| bgl.map(|bgl| bgl.inner.as_core().id))
1305 .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1306 let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1307 label: desc.label.map(Borrowed),
1308 bind_group_layouts: Borrowed(&temp_layouts),
1309 immediate_size: desc.immediate_size,
1310 };
1311
1312 let (id, error) = self
1313 .context
1314 .0
1315 .device_create_pipeline_layout(self.id, &descriptor, None);
1316 if let Some(cause) = error {
1317 self.context.handle_error(
1318 &self.error_sink,
1319 cause,
1320 desc.label,
1321 "Device::create_pipeline_layout",
1322 );
1323 }
1324 CorePipelineLayout {
1325 context: self.context.clone(),
1326 id,
1327 }
1328 .into()
1329 }
1330
1331 fn create_render_pipeline(
1332 &self,
1333 desc: &crate::RenderPipelineDescriptor<'_>,
1334 ) -> dispatch::DispatchRenderPipeline {
1335 use wgc::pipeline as pipe;
1336
1337 let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1338 .vertex
1339 .buffers
1340 .iter()
1341 .map(|vbuf| pipe::VertexBufferLayout {
1342 array_stride: vbuf.array_stride,
1343 step_mode: vbuf.step_mode,
1344 attributes: Borrowed(vbuf.attributes),
1345 })
1346 .collect();
1347
1348 let vert_constants = desc
1349 .vertex
1350 .compilation_options
1351 .constants
1352 .iter()
1353 .map(|&(key, value)| (String::from(key), value))
1354 .collect();
1355
1356 let descriptor = pipe::RenderPipelineDescriptor {
1357 label: desc.label.map(Borrowed),
1358 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1359 vertex: pipe::VertexState {
1360 stage: pipe::ProgrammableStageDescriptor {
1361 module: desc.vertex.module.inner.as_core().id,
1362 entry_point: desc.vertex.entry_point.map(Borrowed),
1363 constants: vert_constants,
1364 zero_initialize_workgroup_memory: desc
1365 .vertex
1366 .compilation_options
1367 .zero_initialize_workgroup_memory,
1368 },
1369 buffers: Borrowed(&vertex_buffers),
1370 },
1371 primitive: desc.primitive,
1372 depth_stencil: desc.depth_stencil.clone(),
1373 multisample: desc.multisample,
1374 fragment: desc.fragment.as_ref().map(|frag| {
1375 let frag_constants = frag
1376 .compilation_options
1377 .constants
1378 .iter()
1379 .map(|&(key, value)| (String::from(key), value))
1380 .collect();
1381 pipe::FragmentState {
1382 stage: pipe::ProgrammableStageDescriptor {
1383 module: frag.module.inner.as_core().id,
1384 entry_point: frag.entry_point.map(Borrowed),
1385 constants: frag_constants,
1386 zero_initialize_workgroup_memory: frag
1387 .compilation_options
1388 .zero_initialize_workgroup_memory,
1389 },
1390 targets: Borrowed(frag.targets),
1391 }
1392 }),
1393 multiview_mask: desc.multiview_mask,
1394 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1395 };
1396
1397 let (id, error) = self
1398 .context
1399 .0
1400 .device_create_render_pipeline(self.id, &descriptor, None);
1401 if let Some(cause) = error {
1402 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1403 log::error!("Shader translation error for stage {stage:?}: {error}");
1404 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1405 }
1406 self.context.handle_error(
1407 &self.error_sink,
1408 cause,
1409 desc.label,
1410 "Device::create_render_pipeline",
1411 );
1412 }
1413 CoreRenderPipeline {
1414 context: self.context.clone(),
1415 id,
1416 error_sink: Arc::clone(&self.error_sink),
1417 }
1418 .into()
1419 }
1420
1421 fn create_mesh_pipeline(
1422 &self,
1423 desc: &crate::MeshPipelineDescriptor<'_>,
1424 ) -> dispatch::DispatchRenderPipeline {
1425 use wgc::pipeline as pipe;
1426
1427 let mesh_constants = desc
1428 .mesh
1429 .compilation_options
1430 .constants
1431 .iter()
1432 .map(|&(key, value)| (String::from(key), value))
1433 .collect();
1434 let descriptor = pipe::MeshPipelineDescriptor {
1435 label: desc.label.map(Borrowed),
1436 task: desc.task.as_ref().map(|task| {
1437 let task_constants = task
1438 .compilation_options
1439 .constants
1440 .iter()
1441 .map(|&(key, value)| (String::from(key), value))
1442 .collect();
1443 pipe::TaskState {
1444 stage: pipe::ProgrammableStageDescriptor {
1445 module: task.module.inner.as_core().id,
1446 entry_point: task.entry_point.map(Borrowed),
1447 constants: task_constants,
1448 zero_initialize_workgroup_memory: desc
1449 .mesh
1450 .compilation_options
1451 .zero_initialize_workgroup_memory,
1452 },
1453 }
1454 }),
1455 mesh: pipe::MeshState {
1456 stage: pipe::ProgrammableStageDescriptor {
1457 module: desc.mesh.module.inner.as_core().id,
1458 entry_point: desc.mesh.entry_point.map(Borrowed),
1459 constants: mesh_constants,
1460 zero_initialize_workgroup_memory: desc
1461 .mesh
1462 .compilation_options
1463 .zero_initialize_workgroup_memory,
1464 },
1465 },
1466 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1467 primitive: desc.primitive,
1468 depth_stencil: desc.depth_stencil.clone(),
1469 multisample: desc.multisample,
1470 fragment: desc.fragment.as_ref().map(|frag| {
1471 let frag_constants = frag
1472 .compilation_options
1473 .constants
1474 .iter()
1475 .map(|&(key, value)| (String::from(key), value))
1476 .collect();
1477 pipe::FragmentState {
1478 stage: pipe::ProgrammableStageDescriptor {
1479 module: frag.module.inner.as_core().id,
1480 entry_point: frag.entry_point.map(Borrowed),
1481 constants: frag_constants,
1482 zero_initialize_workgroup_memory: frag
1483 .compilation_options
1484 .zero_initialize_workgroup_memory,
1485 },
1486 targets: Borrowed(frag.targets),
1487 }
1488 }),
1489 multiview: desc.multiview,
1490 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1491 };
1492
1493 let (id, error) = self
1494 .context
1495 .0
1496 .device_create_mesh_pipeline(self.id, &descriptor, None);
1497 if let Some(cause) = error {
1498 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1499 log::error!("Shader translation error for stage {stage:?}: {error}");
1500 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1501 }
1502 self.context.handle_error(
1503 &self.error_sink,
1504 cause,
1505 desc.label,
1506 "Device::create_render_pipeline",
1507 );
1508 }
1509 CoreRenderPipeline {
1510 context: self.context.clone(),
1511 id,
1512 error_sink: Arc::clone(&self.error_sink),
1513 }
1514 .into()
1515 }
1516
1517 fn create_compute_pipeline(
1518 &self,
1519 desc: &crate::ComputePipelineDescriptor<'_>,
1520 ) -> dispatch::DispatchComputePipeline {
1521 use wgc::pipeline as pipe;
1522
1523 let constants = desc
1524 .compilation_options
1525 .constants
1526 .iter()
1527 .map(|&(key, value)| (String::from(key), value))
1528 .collect();
1529
1530 let descriptor = pipe::ComputePipelineDescriptor {
1531 label: desc.label.map(Borrowed),
1532 layout: desc.layout.map(|pll| pll.inner.as_core().id),
1533 stage: pipe::ProgrammableStageDescriptor {
1534 module: desc.module.inner.as_core().id,
1535 entry_point: desc.entry_point.map(Borrowed),
1536 constants,
1537 zero_initialize_workgroup_memory: desc
1538 .compilation_options
1539 .zero_initialize_workgroup_memory,
1540 },
1541 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1542 };
1543
1544 let (id, error) = self
1545 .context
1546 .0
1547 .device_create_compute_pipeline(self.id, &descriptor, None);
1548 if let Some(cause) = error {
1549 if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1550 log::error!(
1551 "Shader translation error for stage {:?}: {}",
1552 wgt::ShaderStages::COMPUTE,
1553 error
1554 );
1555 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1556 }
1557 self.context.handle_error(
1558 &self.error_sink,
1559 cause,
1560 desc.label,
1561 "Device::create_compute_pipeline",
1562 );
1563 }
1564 CoreComputePipeline {
1565 context: self.context.clone(),
1566 id,
1567 error_sink: Arc::clone(&self.error_sink),
1568 }
1569 .into()
1570 }
1571
1572 unsafe fn create_pipeline_cache(
1573 &self,
1574 desc: &crate::PipelineCacheDescriptor<'_>,
1575 ) -> dispatch::DispatchPipelineCache {
1576 use wgc::pipeline as pipe;
1577
1578 let descriptor = pipe::PipelineCacheDescriptor {
1579 label: desc.label.map(Borrowed),
1580 data: desc.data.map(Borrowed),
1581 fallback: desc.fallback,
1582 };
1583 let (id, error) = unsafe {
1584 self.context
1585 .0
1586 .device_create_pipeline_cache(self.id, &descriptor, None)
1587 };
1588 if let Some(cause) = error {
1589 self.context.handle_error(
1590 &self.error_sink,
1591 cause,
1592 desc.label,
1593 "Device::device_create_pipeline_cache_init",
1594 );
1595 }
1596 CorePipelineCache {
1597 context: self.context.clone(),
1598 id,
1599 }
1600 .into()
1601 }
1602
1603 fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1604 let (id, error) = self.context.0.device_create_buffer(
1605 self.id,
1606 &desc.map_label(|l| l.map(Borrowed)),
1607 None,
1608 );
1609 if let Some(cause) = error {
1610 self.context
1611 .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1612 }
1613
1614 CoreBuffer {
1615 context: self.context.clone(),
1616 id,
1617 error_sink: Arc::clone(&self.error_sink),
1618 }
1619 .into()
1620 }
1621
1622 fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1623 let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1624 let (id, error) = self
1625 .context
1626 .0
1627 .device_create_texture(self.id, &wgt_desc, None);
1628 if let Some(cause) = error {
1629 self.context.handle_error(
1630 &self.error_sink,
1631 cause,
1632 desc.label,
1633 "Device::create_texture",
1634 );
1635 }
1636
1637 CoreTexture {
1638 context: self.context.clone(),
1639 id,
1640 error_sink: Arc::clone(&self.error_sink),
1641 }
1642 .into()
1643 }
1644
1645 fn create_external_texture(
1646 &self,
1647 desc: &crate::ExternalTextureDescriptor<'_>,
1648 planes: &[&crate::TextureView],
1649 ) -> dispatch::DispatchExternalTexture {
1650 let wgt_desc = desc.map_label(|l| l.map(Borrowed));
1651 let planes = planes
1652 .iter()
1653 .map(|plane| plane.inner.as_core().id)
1654 .collect::<Vec<_>>();
1655 let (id, error) = self
1656 .context
1657 .0
1658 .device_create_external_texture(self.id, &wgt_desc, &planes, None);
1659 if let Some(cause) = error {
1660 self.context.handle_error(
1661 &self.error_sink,
1662 cause,
1663 desc.label,
1664 "Device::create_external_texture",
1665 );
1666 }
1667
1668 CoreExternalTexture {
1669 context: self.context.clone(),
1670 id,
1671 }
1672 .into()
1673 }
1674
1675 fn create_blas(
1676 &self,
1677 desc: &crate::CreateBlasDescriptor<'_>,
1678 sizes: crate::BlasGeometrySizeDescriptors,
1679 ) -> (Option<u64>, dispatch::DispatchBlas) {
1680 let global = &self.context.0;
1681 let (id, handle, error) =
1682 global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1683 if let Some(cause) = error {
1684 self.context
1685 .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1686 }
1687 (
1688 handle,
1689 CoreBlas {
1690 context: self.context.clone(),
1691 id,
1692 error_sink: Arc::clone(&self.error_sink),
1693 }
1694 .into(),
1695 )
1696 }
1697
1698 fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1699 let global = &self.context.0;
1700 let (id, error) =
1701 global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1702 if let Some(cause) = error {
1703 self.context
1704 .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1705 }
1706 CoreTlas {
1707 context: self.context.clone(),
1708 id,
1709 }
1711 .into()
1712 }
1713
1714 fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1715 let descriptor = wgc::resource::SamplerDescriptor {
1716 label: desc.label.map(Borrowed),
1717 address_modes: [
1718 desc.address_mode_u,
1719 desc.address_mode_v,
1720 desc.address_mode_w,
1721 ],
1722 mag_filter: desc.mag_filter,
1723 min_filter: desc.min_filter,
1724 mipmap_filter: desc.mipmap_filter,
1725 lod_min_clamp: desc.lod_min_clamp,
1726 lod_max_clamp: desc.lod_max_clamp,
1727 compare: desc.compare,
1728 anisotropy_clamp: desc.anisotropy_clamp,
1729 border_color: desc.border_color,
1730 };
1731
1732 let (id, error) = self
1733 .context
1734 .0
1735 .device_create_sampler(self.id, &descriptor, None);
1736 if let Some(cause) = error {
1737 self.context.handle_error(
1738 &self.error_sink,
1739 cause,
1740 desc.label,
1741 "Device::create_sampler",
1742 );
1743 }
1744 CoreSampler {
1745 context: self.context.clone(),
1746 id,
1747 }
1748 .into()
1749 }
1750
1751 fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1752 let (id, error) = self.context.0.device_create_query_set(
1753 self.id,
1754 &desc.map_label(|l| l.map(Borrowed)),
1755 None,
1756 );
1757 if let Some(cause) = error {
1758 self.context
1759 .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1760 }
1761 CoreQuerySet {
1762 context: self.context.clone(),
1763 id,
1764 }
1765 .into()
1766 }
1767
1768 fn create_command_encoder(
1769 &self,
1770 desc: &crate::CommandEncoderDescriptor<'_>,
1771 ) -> dispatch::DispatchCommandEncoder {
1772 let (id, error) = self.context.0.device_create_command_encoder(
1773 self.id,
1774 &desc.map_label(|l| l.map(Borrowed)),
1775 None,
1776 );
1777 if let Some(cause) = error {
1778 self.context.handle_error(
1779 &self.error_sink,
1780 cause,
1781 desc.label,
1782 "Device::create_command_encoder",
1783 );
1784 }
1785
1786 CoreCommandEncoder {
1787 context: self.context.clone(),
1788 id,
1789 error_sink: Arc::clone(&self.error_sink),
1790 }
1791 .into()
1792 }
1793
1794 fn create_render_bundle_encoder(
1795 &self,
1796 desc: &crate::RenderBundleEncoderDescriptor<'_>,
1797 ) -> dispatch::DispatchRenderBundleEncoder {
1798 let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1799 label: desc.label.map(Borrowed),
1800 color_formats: Borrowed(desc.color_formats),
1801 depth_stencil: desc.depth_stencil,
1802 sample_count: desc.sample_count,
1803 multiview: desc.multiview,
1804 };
1805 let (encoder, error) = self
1806 .context
1807 .0
1808 .device_create_render_bundle_encoder(self.id, &descriptor);
1809 if let Some(cause) = error {
1810 self.context.handle_error(
1811 &self.error_sink,
1812 cause,
1813 desc.label,
1814 "Device::create_render_bundle_encoder",
1815 );
1816 }
1817
1818 CoreRenderBundleEncoder {
1819 context: self.context.clone(),
1820 encoder,
1821 id: crate::cmp::Identifier::create(),
1822 }
1823 .into()
1824 }
1825
1826 fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1827 self.context
1828 .0
1829 .device_set_device_lost_closure(self.id, device_lost_callback);
1830 }
1831
1832 fn on_uncaptured_error(&self, handler: Arc<dyn crate::UncapturedErrorHandler>) {
1833 let mut error_sink = self.error_sink.lock();
1834 error_sink.uncaptured_handler = Some(handler);
1835 }
1836
1837 fn push_error_scope(&self, filter: crate::ErrorFilter) -> u32 {
1838 let mut error_sink = self.error_sink.lock();
1839 let thread_id = thread_id::ThreadId::current();
1840 let scopes = error_sink.scopes.entry(thread_id).or_default();
1841 let index = scopes
1842 .len()
1843 .try_into()
1844 .expect("Greater than 2^32 nested error scopes");
1845 scopes.push(ErrorScope {
1846 error: None,
1847 filter,
1848 });
1849 index
1850 }
1851
1852 fn pop_error_scope(&self, index: u32) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1853 let mut error_sink = self.error_sink.lock();
1854
1855 let is_panicking = crate::util::is_panicking();
1858 let thread_id = thread_id::ThreadId::current();
1859 let err = "Mismatched pop_error_scope call: no error scope for this thread. Error scopes are thread-local.";
1860 let scopes = match error_sink.scopes.get_mut(&thread_id) {
1861 Some(s) => s,
1862 None => {
1863 if !is_panicking {
1864 panic!("{err}");
1865 } else {
1866 return Box::pin(ready(None));
1867 }
1868 }
1869 };
1870 if scopes.is_empty() && !is_panicking {
1871 panic!("{err}");
1872 }
1873 if index as usize != scopes.len() - 1 && !is_panicking {
1874 panic!(
1875 "Mismatched pop_error_scope call: error scopes must be popped in reverse order."
1876 );
1877 }
1878
1879 let scope = match scopes.pop() {
1884 Some(s) => s,
1885 None if !is_panicking => unreachable!(),
1886 None => return Box::pin(ready(None)),
1887 };
1888
1889 Box::pin(ready(scope.error))
1890 }
1891
1892 unsafe fn start_graphics_debugger_capture(&self) {
1893 unsafe {
1894 self.context
1895 .0
1896 .device_start_graphics_debugger_capture(self.id)
1897 };
1898 }
1899
1900 unsafe fn stop_graphics_debugger_capture(&self) {
1901 unsafe {
1902 self.context
1903 .0
1904 .device_stop_graphics_debugger_capture(self.id)
1905 };
1906 }
1907
1908 fn poll(&self, poll_type: wgt::PollType<u64>) -> Result<crate::PollStatus, crate::PollError> {
1909 match self.context.0.device_poll(self.id, poll_type) {
1910 Ok(status) => Ok(status),
1911 Err(err) => {
1912 if let Some(poll_error) = err.to_poll_error() {
1913 return Err(poll_error);
1914 }
1915
1916 self.context.handle_error_fatal(err, "Device::poll")
1917 }
1918 }
1919 }
1920
1921 fn get_internal_counters(&self) -> crate::InternalCounters {
1922 self.context.0.device_get_internal_counters(self.id)
1923 }
1924
1925 fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1926 self.context.0.device_generate_allocator_report(self.id)
1927 }
1928
1929 fn destroy(&self) {
1930 self.context.0.device_destroy(self.id);
1931 }
1932}
1933
1934impl Drop for CoreDevice {
1935 fn drop(&mut self) {
1936 self.context.0.device_drop(self.id)
1937 }
1938}
1939
1940impl dispatch::QueueInterface for CoreQueue {
1941 fn write_buffer(
1942 &self,
1943 buffer: &dispatch::DispatchBuffer,
1944 offset: crate::BufferAddress,
1945 data: &[u8],
1946 ) {
1947 let buffer = buffer.as_core();
1948
1949 match self
1950 .context
1951 .0
1952 .queue_write_buffer(self.id, buffer.id, offset, data)
1953 {
1954 Ok(()) => (),
1955 Err(err) => {
1956 self.context
1957 .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1958 }
1959 }
1960 }
1961
1962 fn create_staging_buffer(
1963 &self,
1964 size: crate::BufferSize,
1965 ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1966 match self
1967 .context
1968 .0
1969 .queue_create_staging_buffer(self.id, size, None)
1970 {
1971 Ok((buffer_id, ptr)) => Some(
1972 CoreQueueWriteBuffer {
1973 buffer_id,
1974 mapping: CoreBufferMappedRange {
1975 ptr,
1976 size: size.get() as usize,
1977 },
1978 }
1979 .into(),
1980 ),
1981 Err(err) => {
1982 self.context.handle_error_nolabel(
1983 &self.error_sink,
1984 err,
1985 "Queue::write_buffer_with",
1986 );
1987 None
1988 }
1989 }
1990 }
1991
1992 fn validate_write_buffer(
1993 &self,
1994 buffer: &dispatch::DispatchBuffer,
1995 offset: wgt::BufferAddress,
1996 size: wgt::BufferSize,
1997 ) -> Option<()> {
1998 let buffer = buffer.as_core();
1999
2000 match self
2001 .context
2002 .0
2003 .queue_validate_write_buffer(self.id, buffer.id, offset, size)
2004 {
2005 Ok(()) => Some(()),
2006 Err(err) => {
2007 self.context.handle_error_nolabel(
2008 &self.error_sink,
2009 err,
2010 "Queue::write_buffer_with",
2011 );
2012 None
2013 }
2014 }
2015 }
2016
2017 fn write_staging_buffer(
2018 &self,
2019 buffer: &dispatch::DispatchBuffer,
2020 offset: crate::BufferAddress,
2021 staging_buffer: &dispatch::DispatchQueueWriteBuffer,
2022 ) {
2023 let buffer = buffer.as_core();
2024 let staging_buffer = staging_buffer.as_core();
2025
2026 match self.context.0.queue_write_staging_buffer(
2027 self.id,
2028 buffer.id,
2029 offset,
2030 staging_buffer.buffer_id,
2031 ) {
2032 Ok(()) => (),
2033 Err(err) => {
2034 self.context.handle_error_nolabel(
2035 &self.error_sink,
2036 err,
2037 "Queue::write_buffer_with",
2038 );
2039 }
2040 }
2041 }
2042
2043 fn write_texture(
2044 &self,
2045 texture: crate::TexelCopyTextureInfo<'_>,
2046 data: &[u8],
2047 data_layout: crate::TexelCopyBufferLayout,
2048 size: crate::Extent3d,
2049 ) {
2050 match self.context.0.queue_write_texture(
2051 self.id,
2052 &map_texture_copy_view(texture),
2053 data,
2054 &data_layout,
2055 &size,
2056 ) {
2057 Ok(()) => (),
2058 Err(err) => {
2059 self.context
2060 .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
2061 }
2062 }
2063 }
2064
2065 #[cfg(web)]
2068 #[cfg_attr(not(webgl), expect(unused_variables))]
2069 fn copy_external_image_to_texture(
2070 &self,
2071 source: &crate::CopyExternalImageSourceInfo,
2072 dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
2073 size: crate::Extent3d,
2074 ) {
2075 #[cfg(webgl)]
2076 match self.context.0.queue_copy_external_image_to_texture(
2077 self.id,
2078 source,
2079 map_texture_tagged_copy_view(dest),
2080 size,
2081 ) {
2082 Ok(()) => (),
2083 Err(err) => self.context.handle_error_nolabel(
2084 &self.error_sink,
2085 err,
2086 "Queue::copy_external_image_to_texture",
2087 ),
2088 }
2089 }
2090
2091 fn submit(
2092 &self,
2093 command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
2094 ) -> u64 {
2095 let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
2096 let command_buffer_ids = temp_command_buffers
2097 .iter()
2098 .map(|cmdbuf| cmdbuf.as_core().id)
2099 .collect::<SmallVec<[_; 4]>>();
2100
2101 let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
2102 Ok(index) => index,
2103 Err((index, err)) => {
2104 self.context
2105 .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
2106 index
2107 }
2108 };
2109
2110 drop(temp_command_buffers);
2111
2112 index
2113 }
2114
2115 fn get_timestamp_period(&self) -> f32 {
2116 self.context.0.queue_get_timestamp_period(self.id)
2117 }
2118
2119 fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
2120 self.context
2121 .0
2122 .queue_on_submitted_work_done(self.id, callback);
2123 }
2124
2125 fn compact_blas(&self, blas: &dispatch::DispatchBlas) -> (Option<u64>, dispatch::DispatchBlas) {
2126 let (id, handle, error) =
2127 self.context
2128 .0
2129 .queue_compact_blas(self.id, blas.as_core().id, None);
2130
2131 if let Some(cause) = error {
2132 self.context
2133 .handle_error_nolabel(&self.error_sink, cause, "Queue::compact_blas");
2134 }
2135 (
2136 handle,
2137 CoreBlas {
2138 context: self.context.clone(),
2139 id,
2140 error_sink: Arc::clone(&self.error_sink),
2141 }
2142 .into(),
2143 )
2144 }
2145
2146 fn present(&self, detail: &dispatch::DispatchSurfaceOutputDetail) {
2147 let detail = detail.as_core();
2148 match self.context.0.surface_present(detail.surface_id) {
2149 Ok(_status) => (),
2150 Err(err) => {
2151 self.context
2152 .handle_error_nolabel(&self.error_sink, err, "Queue::present");
2153 }
2154 }
2155 }
2156}
2157
2158impl Drop for CoreQueue {
2159 fn drop(&mut self) {
2160 self.context.0.queue_drop(self.id)
2161 }
2162}
2163
2164impl dispatch::ShaderModuleInterface for CoreShaderModule {
2165 fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
2166 Box::pin(ready(self.compilation_info.clone()))
2167 }
2168}
2169
2170impl Drop for CoreShaderModule {
2171 fn drop(&mut self) {
2172 self.context.0.shader_module_drop(self.id)
2173 }
2174}
2175
2176impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
2177
2178impl Drop for CoreBindGroupLayout {
2179 fn drop(&mut self) {
2180 self.context.0.bind_group_layout_drop(self.id)
2181 }
2182}
2183
2184impl dispatch::BindGroupInterface for CoreBindGroup {}
2185
2186impl Drop for CoreBindGroup {
2187 fn drop(&mut self) {
2188 self.context.0.bind_group_drop(self.id)
2189 }
2190}
2191
2192impl dispatch::TextureViewInterface for CoreTextureView {}
2193
2194impl Drop for CoreTextureView {
2195 fn drop(&mut self) {
2196 self.context.0.texture_view_drop(self.id);
2197 }
2198}
2199
2200impl dispatch::ExternalTextureInterface for CoreExternalTexture {
2201 fn destroy(&self) {
2202 self.context.0.external_texture_destroy(self.id);
2203 }
2204}
2205
2206impl Drop for CoreExternalTexture {
2207 fn drop(&mut self) {
2208 self.context.0.external_texture_drop(self.id);
2209 }
2210}
2211
2212impl dispatch::SamplerInterface for CoreSampler {}
2213
2214impl Drop for CoreSampler {
2215 fn drop(&mut self) {
2216 self.context.0.sampler_drop(self.id)
2217 }
2218}
2219
2220impl dispatch::BufferInterface for CoreBuffer {
2221 fn map_async(
2222 &self,
2223 mode: crate::MapMode,
2224 range: Range<crate::BufferAddress>,
2225 callback: dispatch::BufferMapCallback,
2226 ) {
2227 let operation = wgc::resource::BufferMapOperation {
2228 host: match mode {
2229 MapMode::Read => wgc::device::HostMap::Read,
2230 MapMode::Write => wgc::device::HostMap::Write,
2231 },
2232 callback: Some(Box::new(|status| {
2233 let res = status.map_err(|_| crate::BufferAsyncError);
2234 callback(res);
2235 })),
2236 };
2237
2238 match self.context.0.buffer_map_async(
2239 self.id,
2240 range.start,
2241 Some(range.end - range.start),
2242 operation,
2243 ) {
2244 Ok(_) => (),
2245 Err(cause) => {
2246 self.context
2247 .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
2248 }
2249 }
2250 }
2251
2252 fn get_mapped_range(
2253 &self,
2254 sub_range: Range<crate::BufferAddress>,
2255 ) -> Result<dispatch::DispatchBufferMappedRange, crate::MapRangeError> {
2256 let size = sub_range.end - sub_range.start;
2257 self.context
2258 .0
2259 .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
2260 .map(|(ptr, size)| {
2261 CoreBufferMappedRange {
2262 ptr,
2263 size: size as usize,
2264 }
2265 .into()
2266 })
2267 .map_err(|err| crate::MapRangeError(self.context.format_error(&err)))
2268 }
2269
2270 fn unmap(&self) {
2271 match self.context.0.buffer_unmap(self.id) {
2272 Ok(()) => (),
2273 Err(cause) => {
2274 self.context
2275 .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
2276 }
2277 }
2278 }
2279
2280 fn destroy(&self) {
2281 self.context.0.buffer_destroy(self.id);
2282 }
2283}
2284
2285impl Drop for CoreBuffer {
2286 fn drop(&mut self) {
2287 self.context.0.buffer_drop(self.id)
2288 }
2289}
2290
2291impl dispatch::TextureInterface for CoreTexture {
2292 fn create_view(
2293 &self,
2294 desc: &crate::TextureViewDescriptor<'_>,
2295 ) -> dispatch::DispatchTextureView {
2296 let descriptor = wgc::resource::TextureViewDescriptor {
2297 label: desc.label.map(Borrowed),
2298 format: desc.format,
2299 dimension: desc.dimension,
2300 usage: desc.usage,
2301 range: wgt::ImageSubresourceRange {
2302 aspect: desc.aspect,
2303 base_mip_level: desc.base_mip_level,
2304 mip_level_count: desc.mip_level_count,
2305 base_array_layer: desc.base_array_layer,
2306 array_layer_count: desc.array_layer_count,
2307 },
2308 };
2309 let (id, error) = self
2310 .context
2311 .0
2312 .texture_create_view(self.id, &descriptor, None);
2313 if let Some(cause) = error {
2314 self.context
2315 .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
2316 }
2317 CoreTextureView {
2318 context: self.context.clone(),
2319 id,
2320 }
2321 .into()
2322 }
2323
2324 fn destroy(&self) {
2325 self.context.0.texture_destroy(self.id);
2326 }
2327}
2328
2329impl Drop for CoreTexture {
2330 fn drop(&mut self) {
2331 self.context.0.texture_drop(self.id)
2332 }
2333}
2334
2335impl dispatch::BlasInterface for CoreBlas {
2336 fn prepare_compact_async(&self, callback: BlasCompactCallback) {
2337 let callback: Option<wgc::resource::BlasCompactCallback> =
2338 Some(Box::new(|status: BlasPrepareCompactResult| {
2339 let res = status.map_err(|_| crate::BlasAsyncError);
2340 callback(res);
2341 }));
2342
2343 match self.context.0.blas_prepare_compact_async(self.id, callback) {
2344 Ok(_) => (),
2345 Err(cause) => self.context.handle_error_nolabel(
2346 &self.error_sink,
2347 cause,
2348 "Blas::prepare_compact_async",
2349 ),
2350 }
2351 }
2352
2353 fn ready_for_compaction(&self) -> bool {
2354 match self.context.0.ready_for_compaction(self.id) {
2355 Ok(ready) => ready,
2356 Err(cause) => {
2357 self.context.handle_error_nolabel(
2358 &self.error_sink,
2359 cause,
2360 "Blas::ready_for_compaction",
2361 );
2362 false
2364 }
2365 }
2366 }
2367}
2368
2369impl Drop for CoreBlas {
2370 fn drop(&mut self) {
2371 self.context.0.blas_drop(self.id)
2372 }
2373}
2374
2375impl dispatch::TlasInterface for CoreTlas {}
2376
2377impl Drop for CoreTlas {
2378 fn drop(&mut self) {
2379 self.context.0.tlas_drop(self.id)
2380 }
2381}
2382
2383impl dispatch::QuerySetInterface for CoreQuerySet {}
2384
2385impl Drop for CoreQuerySet {
2386 fn drop(&mut self) {
2387 self.context.0.query_set_drop(self.id)
2388 }
2389}
2390
2391impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2392
2393impl Drop for CorePipelineLayout {
2394 fn drop(&mut self) {
2395 self.context.0.pipeline_layout_drop(self.id)
2396 }
2397}
2398
2399impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2400 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2401 let (id, error) = self
2402 .context
2403 .0
2404 .render_pipeline_get_bind_group_layout(self.id, index, None);
2405 if let Some(err) = error {
2406 self.context.handle_error_nolabel(
2407 &self.error_sink,
2408 err,
2409 "RenderPipeline::get_bind_group_layout",
2410 )
2411 }
2412 CoreBindGroupLayout {
2413 context: self.context.clone(),
2414 id,
2415 }
2416 .into()
2417 }
2418}
2419
2420impl Drop for CoreRenderPipeline {
2421 fn drop(&mut self) {
2422 self.context.0.render_pipeline_drop(self.id)
2423 }
2424}
2425
2426impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2427 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2428 let (id, error) = self
2429 .context
2430 .0
2431 .compute_pipeline_get_bind_group_layout(self.id, index, None);
2432 if let Some(err) = error {
2433 self.context.handle_error_nolabel(
2434 &self.error_sink,
2435 err,
2436 "ComputePipeline::get_bind_group_layout",
2437 )
2438 }
2439 CoreBindGroupLayout {
2440 context: self.context.clone(),
2441 id,
2442 }
2443 .into()
2444 }
2445}
2446
2447impl Drop for CoreComputePipeline {
2448 fn drop(&mut self) {
2449 self.context.0.compute_pipeline_drop(self.id)
2450 }
2451}
2452
2453impl dispatch::PipelineCacheInterface for CorePipelineCache {
2454 fn get_data(&self) -> Option<Vec<u8>> {
2455 self.context.0.pipeline_cache_get_data(self.id)
2456 }
2457}
2458
2459impl Drop for CorePipelineCache {
2460 fn drop(&mut self) {
2461 self.context.0.pipeline_cache_drop(self.id)
2462 }
2463}
2464
2465impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2466 fn copy_buffer_to_buffer(
2467 &self,
2468 source: &dispatch::DispatchBuffer,
2469 source_offset: crate::BufferAddress,
2470 destination: &dispatch::DispatchBuffer,
2471 destination_offset: crate::BufferAddress,
2472 copy_size: Option<crate::BufferAddress>,
2473 ) {
2474 let source = source.as_core();
2475 let destination = destination.as_core();
2476
2477 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2478 self.id,
2479 source.id,
2480 source_offset,
2481 destination.id,
2482 destination_offset,
2483 copy_size,
2484 ) {
2485 self.context.handle_error_nolabel(
2486 &self.error_sink,
2487 cause,
2488 "CommandEncoder::copy_buffer_to_buffer",
2489 );
2490 }
2491 }
2492
2493 fn copy_buffer_to_texture(
2494 &self,
2495 source: crate::TexelCopyBufferInfo<'_>,
2496 destination: crate::TexelCopyTextureInfo<'_>,
2497 copy_size: crate::Extent3d,
2498 ) {
2499 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2500 self.id,
2501 &map_buffer_copy_view(source),
2502 &map_texture_copy_view(destination),
2503 ©_size,
2504 ) {
2505 self.context.handle_error_nolabel(
2506 &self.error_sink,
2507 cause,
2508 "CommandEncoder::copy_buffer_to_texture",
2509 );
2510 }
2511 }
2512
2513 fn copy_texture_to_buffer(
2514 &self,
2515 source: crate::TexelCopyTextureInfo<'_>,
2516 destination: crate::TexelCopyBufferInfo<'_>,
2517 copy_size: crate::Extent3d,
2518 ) {
2519 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2520 self.id,
2521 &map_texture_copy_view(source),
2522 &map_buffer_copy_view(destination),
2523 ©_size,
2524 ) {
2525 self.context.handle_error_nolabel(
2526 &self.error_sink,
2527 cause,
2528 "CommandEncoder::copy_texture_to_buffer",
2529 );
2530 }
2531 }
2532
2533 fn copy_texture_to_texture(
2534 &self,
2535 source: crate::TexelCopyTextureInfo<'_>,
2536 destination: crate::TexelCopyTextureInfo<'_>,
2537 copy_size: crate::Extent3d,
2538 ) {
2539 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2540 self.id,
2541 &map_texture_copy_view(source),
2542 &map_texture_copy_view(destination),
2543 ©_size,
2544 ) {
2545 self.context.handle_error_nolabel(
2546 &self.error_sink,
2547 cause,
2548 "CommandEncoder::copy_texture_to_texture",
2549 );
2550 }
2551 }
2552
2553 fn begin_compute_pass(
2554 &self,
2555 desc: &crate::ComputePassDescriptor<'_>,
2556 ) -> dispatch::DispatchComputePass {
2557 let timestamp_writes =
2558 desc.timestamp_writes
2559 .as_ref()
2560 .map(|tw| wgc::command::PassTimestampWrites {
2561 query_set: tw.query_set.inner.as_core().id,
2562 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2563 end_of_pass_write_index: tw.end_of_pass_write_index,
2564 });
2565
2566 let (pass, err) = self.context.0.command_encoder_begin_compute_pass(
2567 self.id,
2568 &wgc::command::ComputePassDescriptor {
2569 label: desc.label.map(Borrowed),
2570 timestamp_writes,
2571 },
2572 );
2573
2574 if let Some(cause) = err {
2575 self.context.handle_error(
2576 &self.error_sink,
2577 cause,
2578 desc.label,
2579 "CommandEncoder::begin_compute_pass",
2580 );
2581 }
2582
2583 CoreComputePass {
2584 context: self.context.clone(),
2585 pass,
2586 error_sink: self.error_sink.clone(),
2587 id: crate::cmp::Identifier::create(),
2588 }
2589 .into()
2590 }
2591
2592 fn begin_render_pass(
2593 &self,
2594 desc: &crate::RenderPassDescriptor<'_>,
2595 ) -> dispatch::DispatchRenderPass {
2596 let colors = desc
2597 .color_attachments
2598 .iter()
2599 .map(|ca| {
2600 ca.as_ref()
2601 .map(|at| wgc::command::RenderPassColorAttachment {
2602 view: at.view.inner.as_core().id,
2603 depth_slice: at.depth_slice,
2604 resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2605 load_op: at.ops.load,
2606 store_op: at.ops.store,
2607 })
2608 })
2609 .collect::<Vec<_>>();
2610
2611 let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2612 wgc::command::RenderPassDepthStencilAttachment {
2613 view: dsa.view.inner.as_core().id,
2614 depth: map_pass_channel(dsa.depth_ops.as_ref()),
2615 stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2616 }
2617 });
2618
2619 let timestamp_writes =
2620 desc.timestamp_writes
2621 .as_ref()
2622 .map(|tw| wgc::command::PassTimestampWrites {
2623 query_set: tw.query_set.inner.as_core().id,
2624 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2625 end_of_pass_write_index: tw.end_of_pass_write_index,
2626 });
2627
2628 let (pass, err) = self.context.0.command_encoder_begin_render_pass(
2629 self.id,
2630 &wgc::command::RenderPassDescriptor {
2631 label: desc.label.map(Borrowed),
2632 timestamp_writes: timestamp_writes.as_ref(),
2633 color_attachments: Borrowed(&colors),
2634 depth_stencil_attachment: depth_stencil.as_ref(),
2635 occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2636 multiview_mask: desc.multiview_mask,
2637 },
2638 );
2639
2640 if let Some(cause) = err {
2641 self.context.handle_error(
2642 &self.error_sink,
2643 cause,
2644 desc.label,
2645 "CommandEncoder::begin_render_pass",
2646 );
2647 }
2648
2649 CoreRenderPass {
2650 context: self.context.clone(),
2651 pass,
2652 error_sink: self.error_sink.clone(),
2653 id: crate::cmp::Identifier::create(),
2654 }
2655 .into()
2656 }
2657
2658 fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2659 let descriptor = wgt::CommandBufferDescriptor::default();
2660 let (id, opt_label_and_error) =
2661 self.context
2662 .0
2663 .command_encoder_finish(self.id, &descriptor, None);
2664 if let Some((label, cause)) = opt_label_and_error {
2665 self.context
2666 .handle_error(&self.error_sink, cause, Some(&label), "a CommandEncoder");
2667 }
2668 CoreCommandBuffer {
2669 context: self.context.clone(),
2670 id,
2671 }
2672 .into()
2673 }
2674
2675 fn clear_texture(
2676 &self,
2677 texture: &dispatch::DispatchTexture,
2678 subresource_range: &crate::ImageSubresourceRange,
2679 ) {
2680 let texture = texture.as_core();
2681
2682 if let Err(cause) =
2683 self.context
2684 .0
2685 .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2686 {
2687 self.context.handle_error_nolabel(
2688 &self.error_sink,
2689 cause,
2690 "CommandEncoder::clear_texture",
2691 );
2692 }
2693 }
2694
2695 fn clear_buffer(
2696 &self,
2697 buffer: &dispatch::DispatchBuffer,
2698 offset: crate::BufferAddress,
2699 size: Option<crate::BufferAddress>,
2700 ) {
2701 let buffer = buffer.as_core();
2702
2703 if let Err(cause) = self
2704 .context
2705 .0
2706 .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2707 {
2708 self.context.handle_error_nolabel(
2709 &self.error_sink,
2710 cause,
2711 "CommandEncoder::fill_buffer",
2712 );
2713 }
2714 }
2715
2716 fn insert_debug_marker(&self, label: &str) {
2717 if let Err(cause) = self
2718 .context
2719 .0
2720 .command_encoder_insert_debug_marker(self.id, label)
2721 {
2722 self.context.handle_error_nolabel(
2723 &self.error_sink,
2724 cause,
2725 "CommandEncoder::insert_debug_marker",
2726 );
2727 }
2728 }
2729
2730 fn push_debug_group(&self, label: &str) {
2731 if let Err(cause) = self
2732 .context
2733 .0
2734 .command_encoder_push_debug_group(self.id, label)
2735 {
2736 self.context.handle_error_nolabel(
2737 &self.error_sink,
2738 cause,
2739 "CommandEncoder::push_debug_group",
2740 );
2741 }
2742 }
2743
2744 fn pop_debug_group(&self) {
2745 if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2746 self.context.handle_error_nolabel(
2747 &self.error_sink,
2748 cause,
2749 "CommandEncoder::pop_debug_group",
2750 );
2751 }
2752 }
2753
2754 fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2755 let query_set = query_set.as_core();
2756
2757 if let Err(cause) =
2758 self.context
2759 .0
2760 .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2761 {
2762 self.context.handle_error_nolabel(
2763 &self.error_sink,
2764 cause,
2765 "CommandEncoder::write_timestamp",
2766 );
2767 }
2768 }
2769
2770 fn resolve_query_set(
2771 &self,
2772 query_set: &dispatch::DispatchQuerySet,
2773 first_query: u32,
2774 query_count: u32,
2775 destination: &dispatch::DispatchBuffer,
2776 destination_offset: crate::BufferAddress,
2777 ) {
2778 let query_set = query_set.as_core();
2779 let destination = destination.as_core();
2780
2781 if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2782 self.id,
2783 query_set.id,
2784 first_query,
2785 query_count,
2786 destination.id,
2787 destination_offset,
2788 ) {
2789 self.context.handle_error_nolabel(
2790 &self.error_sink,
2791 cause,
2792 "CommandEncoder::resolve_query_set",
2793 );
2794 }
2795 }
2796
2797 fn mark_acceleration_structures_built<'a>(
2798 &self,
2799 blas: &mut dyn Iterator<Item = &'a Blas>,
2800 tlas: &mut dyn Iterator<Item = &'a Tlas>,
2801 ) {
2802 let blas = blas
2803 .map(|b| b.inner.as_core().id)
2804 .collect::<SmallVec<[_; 4]>>();
2805 let tlas = tlas
2806 .map(|t| t.inner.as_core().id)
2807 .collect::<SmallVec<[_; 4]>>();
2808 if let Err(cause) = self
2809 .context
2810 .0
2811 .command_encoder_mark_acceleration_structures_built(self.id, &blas, &tlas)
2812 {
2813 self.context.handle_error_nolabel(
2814 &self.error_sink,
2815 cause,
2816 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2817 );
2818 }
2819 }
2820
2821 fn build_acceleration_structures<'a>(
2822 &self,
2823 blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2824 tlas: &mut dyn Iterator<Item = &'a crate::Tlas>,
2825 ) {
2826 let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2827 let geometries = match e.geometry {
2828 crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2829 let iter = triangle_geometries.iter().map(|tg| {
2830 wgc::ray_tracing::BlasTriangleGeometry {
2831 vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2832 index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2833 transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2834 size: tg.size,
2835 transform_buffer_offset: tg.transform_buffer_offset,
2836 first_vertex: tg.first_vertex,
2837 vertex_stride: tg.vertex_stride,
2838 first_index: tg.first_index,
2839 }
2840 });
2841 wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2842 }
2843 crate::BlasGeometries::AabbGeometries(ref aabb_geometries) => {
2844 let iter =
2845 aabb_geometries
2846 .iter()
2847 .map(|ag| wgc::ray_tracing::BlasAabbGeometry {
2848 aabb_buffer: ag.aabb_buffer.inner.as_core().id,
2849 stride: ag.stride,
2850 size: ag.size,
2851 primitive_offset: ag.primitive_offset,
2852 });
2853 wgc::ray_tracing::BlasGeometries::AabbGeometries(Box::new(iter))
2854 }
2855 };
2856 wgc::ray_tracing::BlasBuildEntry {
2857 blas_id: e.blas.inner.as_core().id,
2858 geometries,
2859 }
2860 });
2861
2862 let tlas = tlas.into_iter().map(|e| {
2863 let instances = e
2864 .instances
2865 .iter()
2866 .map(|instance: &Option<crate::TlasInstance>| {
2867 instance
2868 .as_ref()
2869 .map(|instance| wgc::ray_tracing::TlasInstance {
2870 blas_id: instance.blas.as_core().id,
2871 transform: &instance.transform,
2872 custom_data: instance.custom_data,
2873 mask: instance.mask,
2874 })
2875 });
2876 wgc::ray_tracing::TlasPackage {
2877 tlas_id: e.inner.as_core().id,
2878 instances: Box::new(instances),
2879 lowest_unmodified: e.lowest_unmodified,
2880 }
2881 });
2882
2883 if let Err(cause) = self
2884 .context
2885 .0
2886 .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2887 {
2888 self.context.handle_error_nolabel(
2889 &self.error_sink,
2890 cause,
2891 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2892 );
2893 }
2894 }
2895
2896 fn transition_resources<'a>(
2897 &mut self,
2898 buffer_transitions: &mut dyn Iterator<
2899 Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
2900 >,
2901 texture_transitions: &mut dyn Iterator<
2902 Item = wgt::TextureTransition<&'a dispatch::DispatchTexture>,
2903 >,
2904 ) {
2905 let result = self.context.0.command_encoder_transition_resources(
2906 self.id,
2907 buffer_transitions.map(|t| wgt::BufferTransition {
2908 buffer: t.buffer.as_core().id,
2909 state: t.state,
2910 }),
2911 texture_transitions.map(|t| wgt::TextureTransition {
2912 texture: t.texture.as_core().id,
2913 selector: t.selector.clone(),
2914 state: t.state,
2915 }),
2916 );
2917
2918 if let Err(cause) = result {
2919 self.context.handle_error_nolabel(
2920 &self.error_sink,
2921 cause,
2922 "CommandEncoder::transition_resources",
2923 );
2924 }
2925 }
2926}
2927
2928impl Drop for CoreCommandEncoder {
2929 fn drop(&mut self) {
2930 self.context.0.command_encoder_drop(self.id)
2931 }
2932}
2933
2934impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2935
2936impl Drop for CoreCommandBuffer {
2937 fn drop(&mut self) {
2938 self.context.0.command_buffer_drop(self.id)
2939 }
2940}
2941
2942impl dispatch::ComputePassInterface for CoreComputePass {
2943 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2944 let pipeline = pipeline.as_core();
2945
2946 if let Err(cause) = self
2947 .context
2948 .0
2949 .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2950 {
2951 self.context.handle_error(
2952 &self.error_sink,
2953 cause,
2954 self.pass.label(),
2955 "ComputePass::set_pipeline",
2956 );
2957 }
2958 }
2959
2960 fn set_bind_group(
2961 &mut self,
2962 index: u32,
2963 bind_group: Option<&dispatch::DispatchBindGroup>,
2964 offsets: &[crate::DynamicOffset],
2965 ) {
2966 let bg = bind_group.map(|bg| bg.as_core().id);
2967
2968 if let Err(cause) =
2969 self.context
2970 .0
2971 .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2972 {
2973 self.context.handle_error(
2974 &self.error_sink,
2975 cause,
2976 self.pass.label(),
2977 "ComputePass::set_bind_group",
2978 );
2979 }
2980 }
2981
2982 fn set_immediates(&mut self, offset: u32, data: &[u8]) {
2983 if let Err(cause) = self
2984 .context
2985 .0
2986 .compute_pass_set_immediates(&mut self.pass, offset, data)
2987 {
2988 self.context.handle_error(
2989 &self.error_sink,
2990 cause,
2991 self.pass.label(),
2992 "ComputePass::set_immediates",
2993 );
2994 }
2995 }
2996
2997 fn insert_debug_marker(&mut self, label: &str) {
2998 if let Err(cause) =
2999 self.context
3000 .0
3001 .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
3002 {
3003 self.context.handle_error(
3004 &self.error_sink,
3005 cause,
3006 self.pass.label(),
3007 "ComputePass::insert_debug_marker",
3008 );
3009 }
3010 }
3011
3012 fn push_debug_group(&mut self, group_label: &str) {
3013 if let Err(cause) =
3014 self.context
3015 .0
3016 .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
3017 {
3018 self.context.handle_error(
3019 &self.error_sink,
3020 cause,
3021 self.pass.label(),
3022 "ComputePass::push_debug_group",
3023 );
3024 }
3025 }
3026
3027 fn pop_debug_group(&mut self) {
3028 if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
3029 self.context.handle_error(
3030 &self.error_sink,
3031 cause,
3032 self.pass.label(),
3033 "ComputePass::pop_debug_group",
3034 );
3035 }
3036 }
3037
3038 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3039 let query_set = query_set.as_core();
3040
3041 if let Err(cause) =
3042 self.context
3043 .0
3044 .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3045 {
3046 self.context.handle_error(
3047 &self.error_sink,
3048 cause,
3049 self.pass.label(),
3050 "ComputePass::write_timestamp",
3051 );
3052 }
3053 }
3054
3055 fn begin_pipeline_statistics_query(
3056 &mut self,
3057 query_set: &dispatch::DispatchQuerySet,
3058 query_index: u32,
3059 ) {
3060 let query_set = query_set.as_core();
3061
3062 if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
3063 &mut self.pass,
3064 query_set.id,
3065 query_index,
3066 ) {
3067 self.context.handle_error(
3068 &self.error_sink,
3069 cause,
3070 self.pass.label(),
3071 "ComputePass::begin_pipeline_statistics_query",
3072 );
3073 }
3074 }
3075
3076 fn end_pipeline_statistics_query(&mut self) {
3077 if let Err(cause) = self
3078 .context
3079 .0
3080 .compute_pass_end_pipeline_statistics_query(&mut self.pass)
3081 {
3082 self.context.handle_error(
3083 &self.error_sink,
3084 cause,
3085 self.pass.label(),
3086 "ComputePass::end_pipeline_statistics_query",
3087 );
3088 }
3089 }
3090
3091 fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
3092 if let Err(cause) = self
3093 .context
3094 .0
3095 .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
3096 {
3097 self.context.handle_error(
3098 &self.error_sink,
3099 cause,
3100 self.pass.label(),
3101 "ComputePass::dispatch_workgroups",
3102 );
3103 }
3104 }
3105
3106 fn dispatch_workgroups_indirect(
3107 &mut self,
3108 indirect_buffer: &dispatch::DispatchBuffer,
3109 indirect_offset: crate::BufferAddress,
3110 ) {
3111 let indirect_buffer = indirect_buffer.as_core();
3112
3113 if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
3114 &mut self.pass,
3115 indirect_buffer.id,
3116 indirect_offset,
3117 ) {
3118 self.context.handle_error(
3119 &self.error_sink,
3120 cause,
3121 self.pass.label(),
3122 "ComputePass::dispatch_workgroups_indirect",
3123 );
3124 }
3125 }
3126
3127 fn transition_resources<'a>(
3128 &mut self,
3129 buffer_transitions: &mut dyn Iterator<
3130 Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
3131 >,
3132 texture_transitions: &mut dyn Iterator<
3133 Item = wgt::TextureTransition<&'a dispatch::DispatchTextureView>,
3134 >,
3135 ) {
3136 let result = self.context.0.compute_pass_transition_resources(
3137 &mut self.pass,
3138 buffer_transitions.map(|t| wgt::BufferTransition {
3139 buffer: t.buffer.as_core().id,
3140 state: t.state,
3141 }),
3142 texture_transitions.map(|t| wgt::TextureTransition {
3143 texture: t.texture.as_core().id,
3144 selector: t.selector.clone(),
3145 state: t.state,
3146 }),
3147 );
3148
3149 if let Err(cause) = result {
3150 self.context.handle_error(
3151 &self.error_sink,
3152 cause,
3153 self.pass.label(),
3154 "ComputePass::transition_resources",
3155 );
3156 }
3157 }
3158}
3159
3160impl Drop for CoreComputePass {
3161 fn drop(&mut self) {
3162 if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
3163 self.context.handle_error(
3164 &self.error_sink,
3165 cause,
3166 self.pass.label(),
3167 "ComputePass::end",
3168 );
3169 }
3170 }
3171}
3172
3173impl dispatch::RenderPassInterface for CoreRenderPass {
3174 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3175 let pipeline = pipeline.as_core();
3176
3177 if let Err(cause) = self
3178 .context
3179 .0
3180 .render_pass_set_pipeline(&mut self.pass, pipeline.id)
3181 {
3182 self.context.handle_error(
3183 &self.error_sink,
3184 cause,
3185 self.pass.label(),
3186 "RenderPass::set_pipeline",
3187 );
3188 }
3189 }
3190
3191 fn set_bind_group(
3192 &mut self,
3193 index: u32,
3194 bind_group: Option<&dispatch::DispatchBindGroup>,
3195 offsets: &[crate::DynamicOffset],
3196 ) {
3197 let bg = bind_group.map(|bg| bg.as_core().id);
3198
3199 if let Err(cause) =
3200 self.context
3201 .0
3202 .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
3203 {
3204 self.context.handle_error(
3205 &self.error_sink,
3206 cause,
3207 self.pass.label(),
3208 "RenderPass::set_bind_group",
3209 );
3210 }
3211 }
3212
3213 fn set_index_buffer(
3214 &mut self,
3215 buffer: &dispatch::DispatchBuffer,
3216 index_format: crate::IndexFormat,
3217 offset: crate::BufferAddress,
3218 size: Option<crate::BufferSize>,
3219 ) {
3220 let buffer = buffer.as_core();
3221
3222 if let Err(cause) = self.context.0.render_pass_set_index_buffer(
3223 &mut self.pass,
3224 buffer.id,
3225 index_format,
3226 offset,
3227 size,
3228 ) {
3229 self.context.handle_error(
3230 &self.error_sink,
3231 cause,
3232 self.pass.label(),
3233 "RenderPass::set_index_buffer",
3234 );
3235 }
3236 }
3237
3238 fn set_vertex_buffer(
3239 &mut self,
3240 slot: u32,
3241 buffer: &dispatch::DispatchBuffer,
3242 offset: crate::BufferAddress,
3243 size: Option<crate::BufferSize>,
3244 ) {
3245 let buffer = buffer.as_core();
3246
3247 if let Err(cause) = self.context.0.render_pass_set_vertex_buffer(
3248 &mut self.pass,
3249 slot,
3250 buffer.id,
3251 offset,
3252 size,
3253 ) {
3254 self.context.handle_error(
3255 &self.error_sink,
3256 cause,
3257 self.pass.label(),
3258 "RenderPass::set_vertex_buffer",
3259 );
3260 }
3261 }
3262
3263 fn set_immediates(&mut self, offset: u32, data: &[u8]) {
3264 if let Err(cause) = self
3265 .context
3266 .0
3267 .render_pass_set_immediates(&mut self.pass, offset, data)
3268 {
3269 self.context.handle_error(
3270 &self.error_sink,
3271 cause,
3272 self.pass.label(),
3273 "RenderPass::set_immediates",
3274 );
3275 }
3276 }
3277
3278 fn set_blend_constant(&mut self, color: crate::Color) {
3279 if let Err(cause) = self
3280 .context
3281 .0
3282 .render_pass_set_blend_constant(&mut self.pass, color)
3283 {
3284 self.context.handle_error(
3285 &self.error_sink,
3286 cause,
3287 self.pass.label(),
3288 "RenderPass::set_blend_constant",
3289 );
3290 }
3291 }
3292
3293 fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
3294 if let Err(cause) =
3295 self.context
3296 .0
3297 .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
3298 {
3299 self.context.handle_error(
3300 &self.error_sink,
3301 cause,
3302 self.pass.label(),
3303 "RenderPass::set_scissor_rect",
3304 );
3305 }
3306 }
3307
3308 fn set_viewport(
3309 &mut self,
3310 x: f32,
3311 y: f32,
3312 width: f32,
3313 height: f32,
3314 min_depth: f32,
3315 max_depth: f32,
3316 ) {
3317 if let Err(cause) = self.context.0.render_pass_set_viewport(
3318 &mut self.pass,
3319 x,
3320 y,
3321 width,
3322 height,
3323 min_depth,
3324 max_depth,
3325 ) {
3326 self.context.handle_error(
3327 &self.error_sink,
3328 cause,
3329 self.pass.label(),
3330 "RenderPass::set_viewport",
3331 );
3332 }
3333 }
3334
3335 fn set_stencil_reference(&mut self, reference: u32) {
3336 if let Err(cause) = self
3337 .context
3338 .0
3339 .render_pass_set_stencil_reference(&mut self.pass, reference)
3340 {
3341 self.context.handle_error(
3342 &self.error_sink,
3343 cause,
3344 self.pass.label(),
3345 "RenderPass::set_stencil_reference",
3346 );
3347 }
3348 }
3349
3350 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3351 if let Err(cause) = self.context.0.render_pass_draw(
3352 &mut self.pass,
3353 vertices.end - vertices.start,
3354 instances.end - instances.start,
3355 vertices.start,
3356 instances.start,
3357 ) {
3358 self.context.handle_error(
3359 &self.error_sink,
3360 cause,
3361 self.pass.label(),
3362 "RenderPass::draw",
3363 );
3364 }
3365 }
3366
3367 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3368 if let Err(cause) = self.context.0.render_pass_draw_indexed(
3369 &mut self.pass,
3370 indices.end - indices.start,
3371 instances.end - instances.start,
3372 indices.start,
3373 base_vertex,
3374 instances.start,
3375 ) {
3376 self.context.handle_error(
3377 &self.error_sink,
3378 cause,
3379 self.pass.label(),
3380 "RenderPass::draw_indexed",
3381 );
3382 }
3383 }
3384
3385 fn draw_mesh_tasks(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
3386 if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks(
3387 &mut self.pass,
3388 group_count_x,
3389 group_count_y,
3390 group_count_z,
3391 ) {
3392 self.context.handle_error(
3393 &self.error_sink,
3394 cause,
3395 self.pass.label(),
3396 "RenderPass::draw_mesh_tasks",
3397 );
3398 }
3399 }
3400
3401 fn draw_indirect(
3402 &mut self,
3403 indirect_buffer: &dispatch::DispatchBuffer,
3404 indirect_offset: crate::BufferAddress,
3405 ) {
3406 let indirect_buffer = indirect_buffer.as_core();
3407
3408 if let Err(cause) = self.context.0.render_pass_draw_indirect(
3409 &mut self.pass,
3410 indirect_buffer.id,
3411 indirect_offset,
3412 ) {
3413 self.context.handle_error(
3414 &self.error_sink,
3415 cause,
3416 self.pass.label(),
3417 "RenderPass::draw_indirect",
3418 );
3419 }
3420 }
3421
3422 fn draw_indexed_indirect(
3423 &mut self,
3424 indirect_buffer: &dispatch::DispatchBuffer,
3425 indirect_offset: crate::BufferAddress,
3426 ) {
3427 let indirect_buffer = indirect_buffer.as_core();
3428
3429 if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3430 &mut self.pass,
3431 indirect_buffer.id,
3432 indirect_offset,
3433 ) {
3434 self.context.handle_error(
3435 &self.error_sink,
3436 cause,
3437 self.pass.label(),
3438 "RenderPass::draw_indexed_indirect",
3439 );
3440 }
3441 }
3442
3443 fn draw_mesh_tasks_indirect(
3444 &mut self,
3445 indirect_buffer: &dispatch::DispatchBuffer,
3446 indirect_offset: crate::BufferAddress,
3447 ) {
3448 let indirect_buffer = indirect_buffer.as_core();
3449
3450 if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks_indirect(
3451 &mut self.pass,
3452 indirect_buffer.id,
3453 indirect_offset,
3454 ) {
3455 self.context.handle_error(
3456 &self.error_sink,
3457 cause,
3458 self.pass.label(),
3459 "RenderPass::draw_mesh_tasks_indirect",
3460 );
3461 }
3462 }
3463
3464 fn multi_draw_indirect(
3465 &mut self,
3466 indirect_buffer: &dispatch::DispatchBuffer,
3467 indirect_offset: crate::BufferAddress,
3468 count: u32,
3469 ) {
3470 let indirect_buffer = indirect_buffer.as_core();
3471
3472 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3473 &mut self.pass,
3474 indirect_buffer.id,
3475 indirect_offset,
3476 count,
3477 ) {
3478 self.context.handle_error(
3479 &self.error_sink,
3480 cause,
3481 self.pass.label(),
3482 "RenderPass::multi_draw_indirect",
3483 );
3484 }
3485 }
3486
3487 fn multi_draw_indexed_indirect(
3488 &mut self,
3489 indirect_buffer: &dispatch::DispatchBuffer,
3490 indirect_offset: crate::BufferAddress,
3491 count: u32,
3492 ) {
3493 let indirect_buffer = indirect_buffer.as_core();
3494
3495 if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3496 &mut self.pass,
3497 indirect_buffer.id,
3498 indirect_offset,
3499 count,
3500 ) {
3501 self.context.handle_error(
3502 &self.error_sink,
3503 cause,
3504 self.pass.label(),
3505 "RenderPass::multi_draw_indexed_indirect",
3506 );
3507 }
3508 }
3509
3510 fn multi_draw_mesh_tasks_indirect(
3511 &mut self,
3512 indirect_buffer: &dispatch::DispatchBuffer,
3513 indirect_offset: crate::BufferAddress,
3514 count: u32,
3515 ) {
3516 let indirect_buffer = indirect_buffer.as_core();
3517
3518 if let Err(cause) = self.context.0.render_pass_multi_draw_mesh_tasks_indirect(
3519 &mut self.pass,
3520 indirect_buffer.id,
3521 indirect_offset,
3522 count,
3523 ) {
3524 self.context.handle_error(
3525 &self.error_sink,
3526 cause,
3527 self.pass.label(),
3528 "RenderPass::multi_draw_mesh_tasks_indirect",
3529 );
3530 }
3531 }
3532
3533 fn multi_draw_indirect_count(
3534 &mut self,
3535 indirect_buffer: &dispatch::DispatchBuffer,
3536 indirect_offset: crate::BufferAddress,
3537 count_buffer: &dispatch::DispatchBuffer,
3538 count_buffer_offset: crate::BufferAddress,
3539 max_count: u32,
3540 ) {
3541 let indirect_buffer = indirect_buffer.as_core();
3542 let count_buffer = count_buffer.as_core();
3543
3544 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3545 &mut self.pass,
3546 indirect_buffer.id,
3547 indirect_offset,
3548 count_buffer.id,
3549 count_buffer_offset,
3550 max_count,
3551 ) {
3552 self.context.handle_error(
3553 &self.error_sink,
3554 cause,
3555 self.pass.label(),
3556 "RenderPass::multi_draw_indirect_count",
3557 );
3558 }
3559 }
3560
3561 fn multi_draw_indexed_indirect_count(
3562 &mut self,
3563 indirect_buffer: &dispatch::DispatchBuffer,
3564 indirect_offset: crate::BufferAddress,
3565 count_buffer: &dispatch::DispatchBuffer,
3566 count_buffer_offset: crate::BufferAddress,
3567 max_count: u32,
3568 ) {
3569 let indirect_buffer = indirect_buffer.as_core();
3570 let count_buffer = count_buffer.as_core();
3571
3572 if let Err(cause) = self
3573 .context
3574 .0
3575 .render_pass_multi_draw_indexed_indirect_count(
3576 &mut self.pass,
3577 indirect_buffer.id,
3578 indirect_offset,
3579 count_buffer.id,
3580 count_buffer_offset,
3581 max_count,
3582 )
3583 {
3584 self.context.handle_error(
3585 &self.error_sink,
3586 cause,
3587 self.pass.label(),
3588 "RenderPass::multi_draw_indexed_indirect_count",
3589 );
3590 }
3591 }
3592
3593 fn multi_draw_mesh_tasks_indirect_count(
3594 &mut self,
3595 indirect_buffer: &dispatch::DispatchBuffer,
3596 indirect_offset: crate::BufferAddress,
3597 count_buffer: &dispatch::DispatchBuffer,
3598 count_buffer_offset: crate::BufferAddress,
3599 max_count: u32,
3600 ) {
3601 let indirect_buffer = indirect_buffer.as_core();
3602 let count_buffer = count_buffer.as_core();
3603
3604 if let Err(cause) = self
3605 .context
3606 .0
3607 .render_pass_multi_draw_mesh_tasks_indirect_count(
3608 &mut self.pass,
3609 indirect_buffer.id,
3610 indirect_offset,
3611 count_buffer.id,
3612 count_buffer_offset,
3613 max_count,
3614 )
3615 {
3616 self.context.handle_error(
3617 &self.error_sink,
3618 cause,
3619 self.pass.label(),
3620 "RenderPass::multi_draw_mesh_tasks_indirect_count",
3621 );
3622 }
3623 }
3624
3625 fn insert_debug_marker(&mut self, label: &str) {
3626 if let Err(cause) = self
3627 .context
3628 .0
3629 .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3630 {
3631 self.context.handle_error(
3632 &self.error_sink,
3633 cause,
3634 self.pass.label(),
3635 "RenderPass::insert_debug_marker",
3636 );
3637 }
3638 }
3639
3640 fn push_debug_group(&mut self, group_label: &str) {
3641 if let Err(cause) =
3642 self.context
3643 .0
3644 .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3645 {
3646 self.context.handle_error(
3647 &self.error_sink,
3648 cause,
3649 self.pass.label(),
3650 "RenderPass::push_debug_group",
3651 );
3652 }
3653 }
3654
3655 fn pop_debug_group(&mut self) {
3656 if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3657 self.context.handle_error(
3658 &self.error_sink,
3659 cause,
3660 self.pass.label(),
3661 "RenderPass::pop_debug_group",
3662 );
3663 }
3664 }
3665
3666 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3667 let query_set = query_set.as_core();
3668
3669 if let Err(cause) =
3670 self.context
3671 .0
3672 .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3673 {
3674 self.context.handle_error(
3675 &self.error_sink,
3676 cause,
3677 self.pass.label(),
3678 "RenderPass::write_timestamp",
3679 );
3680 }
3681 }
3682
3683 fn begin_occlusion_query(&mut self, query_index: u32) {
3684 if let Err(cause) = self
3685 .context
3686 .0
3687 .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3688 {
3689 self.context.handle_error(
3690 &self.error_sink,
3691 cause,
3692 self.pass.label(),
3693 "RenderPass::begin_occlusion_query",
3694 );
3695 }
3696 }
3697
3698 fn end_occlusion_query(&mut self) {
3699 if let Err(cause) = self
3700 .context
3701 .0
3702 .render_pass_end_occlusion_query(&mut self.pass)
3703 {
3704 self.context.handle_error(
3705 &self.error_sink,
3706 cause,
3707 self.pass.label(),
3708 "RenderPass::end_occlusion_query",
3709 );
3710 }
3711 }
3712
3713 fn begin_pipeline_statistics_query(
3714 &mut self,
3715 query_set: &dispatch::DispatchQuerySet,
3716 query_index: u32,
3717 ) {
3718 let query_set = query_set.as_core();
3719
3720 if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3721 &mut self.pass,
3722 query_set.id,
3723 query_index,
3724 ) {
3725 self.context.handle_error(
3726 &self.error_sink,
3727 cause,
3728 self.pass.label(),
3729 "RenderPass::begin_pipeline_statistics_query",
3730 );
3731 }
3732 }
3733
3734 fn end_pipeline_statistics_query(&mut self) {
3735 if let Err(cause) = self
3736 .context
3737 .0
3738 .render_pass_end_pipeline_statistics_query(&mut self.pass)
3739 {
3740 self.context.handle_error(
3741 &self.error_sink,
3742 cause,
3743 self.pass.label(),
3744 "RenderPass::end_pipeline_statistics_query",
3745 );
3746 }
3747 }
3748
3749 fn execute_bundles(
3750 &mut self,
3751 render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3752 ) {
3753 let temp_render_bundles = render_bundles
3754 .map(|rb| rb.as_core().id)
3755 .collect::<SmallVec<[_; 4]>>();
3756 if let Err(cause) = self
3757 .context
3758 .0
3759 .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3760 {
3761 self.context.handle_error(
3762 &self.error_sink,
3763 cause,
3764 self.pass.label(),
3765 "RenderPass::execute_bundles",
3766 );
3767 }
3768 }
3769}
3770
3771impl Drop for CoreRenderPass {
3772 fn drop(&mut self) {
3773 if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3774 self.context.handle_error(
3775 &self.error_sink,
3776 cause,
3777 self.pass.label(),
3778 "RenderPass::end",
3779 );
3780 }
3781 }
3782}
3783
3784impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3785 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3786 let pipeline = pipeline.as_core();
3787
3788 wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3789 }
3790
3791 fn set_bind_group(
3792 &mut self,
3793 index: u32,
3794 bind_group: Option<&dispatch::DispatchBindGroup>,
3795 offsets: &[crate::DynamicOffset],
3796 ) {
3797 let bg = bind_group.map(|bg| bg.as_core().id);
3798
3799 unsafe {
3800 wgpu_render_bundle_set_bind_group(
3801 &mut self.encoder,
3802 index,
3803 bg,
3804 offsets.as_ptr(),
3805 offsets.len(),
3806 )
3807 }
3808 }
3809
3810 fn set_index_buffer(
3811 &mut self,
3812 buffer: &dispatch::DispatchBuffer,
3813 index_format: crate::IndexFormat,
3814 offset: crate::BufferAddress,
3815 size: Option<crate::BufferSize>,
3816 ) {
3817 let buffer = buffer.as_core();
3818
3819 self.encoder
3820 .set_index_buffer(buffer.id, index_format, offset, size)
3821 }
3822
3823 fn set_vertex_buffer(
3824 &mut self,
3825 slot: u32,
3826 buffer: &dispatch::DispatchBuffer,
3827 offset: crate::BufferAddress,
3828 size: Option<crate::BufferSize>,
3829 ) {
3830 let buffer = buffer.as_core();
3831
3832 wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer.id, offset, size)
3833 }
3834
3835 fn set_immediates(&mut self, offset: u32, data: &[u8]) {
3836 unsafe {
3837 wgpu_render_bundle_set_immediates(
3838 &mut self.encoder,
3839 offset,
3840 data.len().try_into().unwrap(),
3841 data.as_ptr(),
3842 )
3843 }
3844 }
3845
3846 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3847 wgpu_render_bundle_draw(
3848 &mut self.encoder,
3849 vertices.end - vertices.start,
3850 instances.end - instances.start,
3851 vertices.start,
3852 instances.start,
3853 )
3854 }
3855
3856 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3857 wgpu_render_bundle_draw_indexed(
3858 &mut self.encoder,
3859 indices.end - indices.start,
3860 instances.end - instances.start,
3861 indices.start,
3862 base_vertex,
3863 instances.start,
3864 )
3865 }
3866
3867 fn draw_indirect(
3868 &mut self,
3869 indirect_buffer: &dispatch::DispatchBuffer,
3870 indirect_offset: crate::BufferAddress,
3871 ) {
3872 let indirect_buffer = indirect_buffer.as_core();
3873
3874 wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3875 }
3876
3877 fn draw_indexed_indirect(
3878 &mut self,
3879 indirect_buffer: &dispatch::DispatchBuffer,
3880 indirect_offset: crate::BufferAddress,
3881 ) {
3882 let indirect_buffer = indirect_buffer.as_core();
3883
3884 wgpu_render_bundle_draw_indexed_indirect(
3885 &mut self.encoder,
3886 indirect_buffer.id,
3887 indirect_offset,
3888 )
3889 }
3890
3891 fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3892 where
3893 Self: Sized,
3894 {
3895 let (id, error) = self.context.0.render_bundle_encoder_finish(
3896 self.encoder,
3897 &desc.map_label(|l| l.map(Borrowed)),
3898 None,
3899 );
3900 if let Some(err) = error {
3901 self.context
3902 .handle_error_fatal(err, "RenderBundleEncoder::finish");
3903 }
3904 CoreRenderBundle {
3905 context: self.context.clone(),
3906 id,
3907 }
3908 .into()
3909 }
3910}
3911
3912impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3913
3914impl Drop for CoreRenderBundle {
3915 fn drop(&mut self) {
3916 self.context.0.render_bundle_drop(self.id)
3917 }
3918}
3919
3920impl dispatch::SurfaceInterface for CoreSurface {
3921 fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3922 let adapter = adapter.as_core();
3923
3924 self.context
3925 .0
3926 .surface_get_capabilities(self.id, adapter.id)
3927 .unwrap_or_default()
3928 }
3929
3930 fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3931 let device = device.as_core();
3932
3933 let error = self.context.0.surface_configure(self.id, device.id, config);
3934 if let Some(e) = error {
3935 self.context
3936 .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3937 } else {
3938 *self.configured_device.lock() = Some(device.id);
3939 *self.error_sink.lock() = Some(device.error_sink.clone());
3940 }
3941 }
3942
3943 fn get_current_texture(
3944 &self,
3945 ) -> (
3946 Option<dispatch::DispatchTexture>,
3947 crate::SurfaceStatus,
3948 dispatch::DispatchSurfaceOutputDetail,
3949 ) {
3950 let error_sink = if let Some(error_sink) = self.error_sink.lock().as_ref() {
3951 error_sink.clone()
3952 } else {
3953 Arc::new(Mutex::new(ErrorSinkRaw::new()))
3954 };
3955
3956 let output_detail = CoreSurfaceOutputDetail {
3957 context: self.context.clone(),
3958 surface_id: self.id,
3959 error_sink: error_sink.clone(),
3960 }
3961 .into();
3962
3963 match self.context.0.surface_get_current_texture(self.id, None) {
3964 Ok(wgc::present::SurfaceOutput {
3965 status,
3966 texture: texture_id,
3967 }) => {
3968 let data = texture_id
3969 .map(|id| CoreTexture {
3970 context: self.context.clone(),
3971 id,
3972 error_sink,
3973 })
3974 .map(Into::into);
3975
3976 (data, status, output_detail)
3977 }
3978 Err(err) => {
3979 let error_sink = self.error_sink.lock();
3980 match error_sink.as_ref() {
3981 Some(error_sink) => {
3982 self.context.handle_error_nolabel(
3983 error_sink,
3984 err,
3985 "Surface::get_current_texture_view",
3986 );
3987 (None, crate::SurfaceStatus::Validation, output_detail)
3988 }
3989 None => self
3990 .context
3991 .handle_error_fatal(err, "Surface::get_current_texture_view"),
3992 }
3993 }
3994 }
3995 }
3996}
3997
3998impl Drop for CoreSurface {
3999 fn drop(&mut self) {
4000 self.context.0.surface_drop(self.id)
4001 }
4002}
4003
4004impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
4005 fn texture_discard(&self) {
4006 match self.context.0.surface_texture_discard(self.surface_id) {
4007 Ok(_status) => (),
4008 Err(err) => {
4009 self.context
4010 .handle_error_nolabel(&self.error_sink, err, "Surface::discard_texture")
4011 }
4012 }
4013 }
4014}
4015impl Drop for CoreSurfaceOutputDetail {
4016 fn drop(&mut self) {
4017 }
4021}
4022
4023impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
4024 #[inline]
4025 fn len(&self) -> usize {
4026 self.mapping.len()
4027 }
4028
4029 #[inline]
4030 unsafe fn write_slice(&mut self) -> WriteOnly<'_, [u8]> {
4031 unsafe { self.mapping.write_slice() }
4032 }
4033}
4034impl Drop for CoreQueueWriteBuffer {
4035 fn drop(&mut self) {
4036 }
4040}
4041
4042impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
4043 #[inline]
4044 fn len(&self) -> usize {
4045 self.size
4046 }
4047
4048 #[inline]
4049 unsafe fn read_slice(&self) -> &[u8] {
4050 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
4051 }
4052
4053 #[inline]
4054 unsafe fn write_slice(&mut self) -> WriteOnly<'_, [u8]> {
4055 unsafe { WriteOnly::new(NonNull::slice_from_raw_parts(self.ptr, self.size)) }
4056 }
4057
4058 #[cfg(webgpu)]
4059 fn as_uint8array(&self) -> &js_sys::Uint8Array {
4060 panic!("Only available on WebGPU")
4061 }
4062}