1use alloc::{
2 borrow::Cow::{self, Borrowed},
3 boxed::Box,
4 format,
5 string::{String, ToString as _},
6 sync::Arc,
7 vec,
8 vec::Vec,
9};
10use core::{
11 error::Error,
12 fmt,
13 future::ready,
14 ops::{Deref, Range},
15 pin::Pin,
16 ptr::NonNull,
17 slice,
18};
19use hashbrown::HashMap;
20
21use arrayvec::ArrayVec;
22use smallvec::SmallVec;
23use wgc::{
24 command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError,
25 resource::BlasPrepareCompactResult,
26};
27use wgt::{
28 error::{ErrorType, WebGpuError},
29 WasmNotSendSync,
30};
31
32use crate::{
33 api,
34 dispatch::{self, BlasCompactCallback, BufferMappedRangeInterface},
35 BindingResource, Blas, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
36 CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
37 ShaderSource, SurfaceTargetUnsafe, TextureDescriptor, Tlas, WriteOnly,
38};
39use crate::{dispatch::DispatchAdapter, util::Mutex};
40
41mod thread_id;
42
43#[derive(Clone)]
44pub struct ContextWgpuCore(Arc<wgc::global::Global>);
45
46impl Drop for ContextWgpuCore {
47 fn drop(&mut self) {
48 }
50}
51
52impl fmt::Debug for ContextWgpuCore {
53 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
54 f.debug_struct("ContextWgpuCore")
55 .field("type", &"Native")
56 .finish()
57 }
58}
59
60impl ContextWgpuCore {
61 pub unsafe fn from_hal_instance<A: hal::Api>(hal_instance: A::Instance) -> Self {
62 Self(unsafe {
63 Arc::new(wgc::global::Global::from_hal_instance::<A>(
64 "wgpu",
65 hal_instance,
66 ))
67 })
68 }
69
70 pub unsafe fn instance_as_hal<A: hal::Api>(&self) -> Option<&A::Instance> {
74 unsafe { self.0.instance_as_hal::<A>() }
75 }
76
77 pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
78 Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
79 }
80
81 #[cfg(wgpu_core)]
82 pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
83 self.0
84 .enumerate_adapters(backends, false )
85 }
86
87 pub unsafe fn create_adapter_from_hal<A: hal::Api>(
88 &self,
89 hal_adapter: hal::ExposedAdapter<A>,
90 ) -> wgc::id::AdapterId {
91 unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
92 }
93
94 pub unsafe fn adapter_as_hal<A: hal::Api>(
95 &self,
96 adapter: &CoreAdapter,
97 ) -> Option<impl Deref<Target = A::Adapter> + WasmNotSendSync> {
98 unsafe { self.0.adapter_as_hal::<A>(adapter.id) }
99 }
100
101 pub unsafe fn buffer_as_hal<A: hal::Api>(
102 &self,
103 buffer: &CoreBuffer,
104 ) -> Option<impl Deref<Target = A::Buffer>> {
105 unsafe { self.0.buffer_as_hal::<A>(buffer.id) }
106 }
107
108 pub unsafe fn create_device_from_hal<A: hal::Api>(
109 &self,
110 adapter: &CoreAdapter,
111 hal_device: hal::OpenDevice<A>,
112 desc: &crate::DeviceDescriptor<'_>,
113 ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
114 let (device_id, queue_id) = unsafe {
115 self.0.create_device_from_hal(
116 adapter.id,
117 hal_device.into(),
118 &desc.map_label(|l| l.map(Borrowed)),
119 None,
120 None,
121 )
122 }?;
123 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
124 let device = CoreDevice {
125 context: self.clone(),
126 id: device_id,
127 error_sink: error_sink.clone(),
128 features: desc.required_features,
129 };
130 let queue = CoreQueue {
131 context: self.clone(),
132 id: queue_id,
133 error_sink,
134 };
135 Ok((device, queue))
136 }
137
138 pub unsafe fn create_texture_from_hal<A: hal::Api>(
139 &self,
140 hal_texture: A::Texture,
141 device: &CoreDevice,
142 desc: &TextureDescriptor<'_>,
143 ) -> CoreTexture {
144 let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
145 let (id, error) = unsafe {
146 self.0
147 .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
148 };
149 if let Some(cause) = error {
150 self.handle_error(
151 &device.error_sink,
152 cause,
153 desc.label,
154 "Device::create_texture_from_hal",
155 );
156 }
157 CoreTexture {
158 context: self.clone(),
159 id,
160 error_sink: Arc::clone(&device.error_sink),
161 }
162 }
163
164 pub unsafe fn create_buffer_from_hal<A: hal::Api>(
171 &self,
172 hal_buffer: A::Buffer,
173 device: &CoreDevice,
174 desc: &BufferDescriptor<'_>,
175 ) -> CoreBuffer {
176 let (id, error) = unsafe {
177 self.0.create_buffer_from_hal::<A>(
178 hal_buffer,
179 device.id,
180 &desc.map_label(|l| l.map(Borrowed)),
181 None,
182 )
183 };
184 if let Some(cause) = error {
185 self.handle_error(
186 &device.error_sink,
187 cause,
188 desc.label,
189 "Device::create_buffer_from_hal",
190 );
191 }
192 CoreBuffer {
193 context: self.clone(),
194 id,
195 error_sink: Arc::clone(&device.error_sink),
196 }
197 }
198
199 pub unsafe fn device_as_hal<A: hal::Api>(
200 &self,
201 device: &CoreDevice,
202 ) -> Option<impl Deref<Target = A::Device>> {
203 unsafe { self.0.device_as_hal::<A>(device.id) }
204 }
205
206 pub unsafe fn surface_as_hal<A: hal::Api>(
207 &self,
208 surface: &CoreSurface,
209 ) -> Option<impl Deref<Target = A::Surface>> {
210 unsafe { self.0.surface_as_hal::<A>(surface.id) }
211 }
212
213 pub unsafe fn texture_as_hal<A: hal::Api>(
214 &self,
215 texture: &CoreTexture,
216 ) -> Option<impl Deref<Target = A::Texture>> {
217 unsafe { self.0.texture_as_hal::<A>(texture.id) }
218 }
219
220 pub unsafe fn texture_view_as_hal<A: hal::Api>(
221 &self,
222 texture_view: &CoreTextureView,
223 ) -> Option<impl Deref<Target = A::TextureView>> {
224 unsafe { self.0.texture_view_as_hal::<A>(texture_view.id) }
225 }
226
227 pub unsafe fn command_encoder_as_hal_mut<
229 A: hal::Api,
230 F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
231 R,
232 >(
233 &self,
234 command_encoder: &CoreCommandEncoder,
235 hal_command_encoder_callback: F,
236 ) -> R {
237 unsafe {
238 self.0.command_encoder_as_hal_mut::<A, F, R>(
239 command_encoder.id,
240 hal_command_encoder_callback,
241 )
242 }
243 }
244
245 pub unsafe fn blas_as_hal<A: hal::Api>(
246 &self,
247 blas: &CoreBlas,
248 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
249 unsafe { self.0.blas_as_hal::<A>(blas.id) }
250 }
251
252 pub unsafe fn tlas_as_hal<A: hal::Api>(
253 &self,
254 tlas: &CoreTlas,
255 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
256 unsafe { self.0.tlas_as_hal::<A>(tlas.id) }
257 }
258
259 pub fn generate_report(&self) -> wgc::global::GlobalReport {
260 self.0.generate_report()
261 }
262
263 #[cold]
264 #[track_caller]
265 #[inline(never)]
266 fn handle_error_inner(
267 &self,
268 sink_mutex: &Mutex<ErrorSinkRaw>,
269 error_type: ErrorType,
270 source: ContextErrorSource,
271 label: Label<'_>,
272 fn_ident: &'static str,
273 ) {
274 let source: ErrorSource = Box::new(wgc::error::ContextError {
275 fn_ident,
276 source,
277 label: label.unwrap_or_default().to_string(),
278 });
279 let final_error_handling = {
280 let mut sink = sink_mutex.lock();
281 let description = || self.format_error(&*source);
282 let error = match error_type {
283 ErrorType::Internal => {
284 let description = description();
285 crate::Error::Internal {
286 source,
287 description,
288 }
289 }
290 ErrorType::OutOfMemory => crate::Error::OutOfMemory { source },
291 ErrorType::Validation => {
292 let description = description();
293 crate::Error::Validation {
294 source,
295 description,
296 }
297 }
298 ErrorType::DeviceLost => return, };
300 sink.handle_error_or_return_handler(error)
301 };
302
303 if let Some(f) = final_error_handling {
304 f();
308 }
309 }
310
311 #[inline]
312 #[track_caller]
313 fn handle_error(
314 &self,
315 sink_mutex: &Mutex<ErrorSinkRaw>,
316 source: impl WebGpuError + WasmNotSendSync + 'static,
317 label: Label<'_>,
318 fn_ident: &'static str,
319 ) {
320 let error_type = source.webgpu_error_type();
321 self.handle_error_inner(sink_mutex, error_type, Box::new(source), label, fn_ident)
322 }
323
324 #[inline]
325 #[track_caller]
326 fn handle_error_nolabel(
327 &self,
328 sink_mutex: &Mutex<ErrorSinkRaw>,
329 source: impl WebGpuError + WasmNotSendSync + 'static,
330 fn_ident: &'static str,
331 ) {
332 let error_type = source.webgpu_error_type();
333 self.handle_error_inner(sink_mutex, error_type, Box::new(source), None, fn_ident)
334 }
335
336 #[track_caller]
337 #[cold]
338 fn handle_error_fatal(
339 &self,
340 cause: impl Error + WasmNotSendSync + 'static,
341 operation: &'static str,
342 ) -> ! {
343 panic!("Error in {operation}: {f}", f = self.format_error(&cause));
344 }
345
346 #[inline(never)]
347 fn format_error(&self, err: &(dyn Error + 'static)) -> String {
348 let mut output = String::new();
349 let mut level = 1;
350
351 fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
352 let mut print = |e: &(dyn Error + 'static)| {
353 use core::fmt::Write;
354 writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
355
356 if let Some(e) = e.source() {
357 *level += 1;
358 print_tree(output, level, e);
359 *level -= 1;
360 }
361 };
362 if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
363 for e in multi.errors() {
364 print(e);
365 }
366 } else {
367 print(e);
368 }
369 }
370
371 print_tree(&mut output, &mut level, err);
372
373 format!("Validation Error\n\nCaused by:\n{output}")
374 }
375
376 pub unsafe fn queue_as_hal<A: hal::Api>(
377 &self,
378 queue: &CoreQueue,
379 ) -> Option<impl Deref<Target = A::Queue> + WasmNotSendSync> {
380 unsafe { self.0.queue_as_hal::<A>(queue.id) }
381 }
382}
383
384fn map_buffer_copy_view(
385 view: crate::TexelCopyBufferInfo<'_>,
386) -> wgt::TexelCopyBufferInfo<wgc::id::BufferId> {
387 wgt::TexelCopyBufferInfo {
388 buffer: view.buffer.inner.as_core().id,
389 layout: view.layout,
390 }
391}
392
393fn map_texture_copy_view(
394 view: crate::TexelCopyTextureInfo<'_>,
395) -> wgt::TexelCopyTextureInfo<wgc::id::TextureId> {
396 wgt::TexelCopyTextureInfo {
397 texture: view.texture.inner.as_core().id,
398 mip_level: view.mip_level,
399 origin: view.origin,
400 aspect: view.aspect,
401 }
402}
403
404#[cfg_attr(not(webgl), expect(unused))]
405fn map_texture_tagged_copy_view(
406 view: crate::CopyExternalImageDestInfo<&api::Texture>,
407) -> wgt::CopyExternalImageDestInfo<wgc::id::TextureId> {
408 wgt::CopyExternalImageDestInfo {
409 texture: view.texture.inner.as_core().id,
410 mip_level: view.mip_level,
411 origin: view.origin,
412 aspect: view.aspect,
413 color_space: view.color_space,
414 premultiplied_alpha: view.premultiplied_alpha,
415 }
416}
417
418fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
419 match *load {
420 LoadOp::Clear(clear_value) => LoadOp::Clear(Some(clear_value)),
421 LoadOp::DontCare(token) => LoadOp::DontCare(token),
422 LoadOp::Load => LoadOp::Load,
423 }
424}
425
426fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
427 match ops {
428 Some(&Operations { load, store }) => wgc::command::PassChannel {
429 load_op: Some(map_load_op(&load)),
430 store_op: Some(store),
431 read_only: false,
432 },
433 None => wgc::command::PassChannel {
434 load_op: None,
435 store_op: None,
436 read_only: true,
437 },
438 }
439}
440
441#[derive(Debug)]
442pub struct CoreSurface {
443 pub(crate) context: ContextWgpuCore,
444 id: wgc::id::SurfaceId,
445 configured_device: Mutex<Option<wgc::id::DeviceId>>,
448 error_sink: Mutex<Option<ErrorSink>>,
451}
452
453#[derive(Debug)]
454pub struct CoreAdapter {
455 pub(crate) context: ContextWgpuCore,
456 pub(crate) id: wgc::id::AdapterId,
457}
458
459#[derive(Debug)]
460pub struct CoreDevice {
461 pub(crate) context: ContextWgpuCore,
462 id: wgc::id::DeviceId,
463 error_sink: ErrorSink,
464 features: Features,
465}
466
467#[derive(Debug)]
468pub struct CoreBuffer {
469 pub(crate) context: ContextWgpuCore,
470 id: wgc::id::BufferId,
471 error_sink: ErrorSink,
472}
473
474#[derive(Debug)]
475pub struct CoreShaderModule {
476 pub(crate) context: ContextWgpuCore,
477 id: wgc::id::ShaderModuleId,
478 compilation_info: CompilationInfo,
479}
480
481#[derive(Debug)]
482pub struct CoreBindGroupLayout {
483 pub(crate) context: ContextWgpuCore,
484 id: wgc::id::BindGroupLayoutId,
485}
486
487#[derive(Debug)]
488pub struct CoreBindGroup {
489 pub(crate) context: ContextWgpuCore,
490 id: wgc::id::BindGroupId,
491}
492
493#[derive(Debug)]
494pub struct CoreTexture {
495 pub(crate) context: ContextWgpuCore,
496 id: wgc::id::TextureId,
497 error_sink: ErrorSink,
498}
499
500#[derive(Debug)]
501pub struct CoreTextureView {
502 pub(crate) context: ContextWgpuCore,
503 id: wgc::id::TextureViewId,
504}
505
506#[derive(Debug)]
507pub struct CoreExternalTexture {
508 pub(crate) context: ContextWgpuCore,
509 id: wgc::id::ExternalTextureId,
510}
511
512#[derive(Debug)]
513pub struct CoreSampler {
514 pub(crate) context: ContextWgpuCore,
515 id: wgc::id::SamplerId,
516}
517
518#[derive(Debug)]
519pub struct CoreQuerySet {
520 pub(crate) context: ContextWgpuCore,
521 id: wgc::id::QuerySetId,
522}
523
524#[derive(Debug)]
525pub struct CorePipelineLayout {
526 pub(crate) context: ContextWgpuCore,
527 id: wgc::id::PipelineLayoutId,
528}
529
530#[derive(Debug)]
531pub struct CorePipelineCache {
532 pub(crate) context: ContextWgpuCore,
533 id: wgc::id::PipelineCacheId,
534}
535
536#[derive(Debug)]
537pub struct CoreCommandBuffer {
538 pub(crate) context: ContextWgpuCore,
539 id: wgc::id::CommandBufferId,
540}
541
542#[derive(Debug)]
543pub struct CoreRenderBundleEncoder {
544 pub(crate) context: ContextWgpuCore,
545 encoder: Box<wgc::command::RenderBundleEncoder>,
546 id: crate::cmp::Identifier,
547}
548
549#[derive(Debug)]
550pub struct CoreRenderBundle {
551 context: ContextWgpuCore,
552 id: wgc::id::RenderBundleId,
553}
554
555#[derive(Debug)]
556pub struct CoreQueue {
557 pub(crate) context: ContextWgpuCore,
558 id: wgc::id::QueueId,
559 error_sink: ErrorSink,
560}
561
562#[derive(Debug)]
563pub struct CoreComputePipeline {
564 pub(crate) context: ContextWgpuCore,
565 id: wgc::id::ComputePipelineId,
566 error_sink: ErrorSink,
567}
568
569#[derive(Debug)]
570pub struct CoreRenderPipeline {
571 pub(crate) context: ContextWgpuCore,
572 id: wgc::id::RenderPipelineId,
573 error_sink: ErrorSink,
574}
575
576#[derive(Debug)]
577pub struct CoreComputePass {
578 pub(crate) context: ContextWgpuCore,
579 pass: wgc::command::ComputePass,
580 error_sink: ErrorSink,
581 id: crate::cmp::Identifier,
582}
583
584#[derive(Debug)]
585pub struct CoreRenderPass {
586 pub(crate) context: ContextWgpuCore,
587 pass: wgc::command::RenderPass,
588 error_sink: ErrorSink,
589 id: crate::cmp::Identifier,
590}
591
592#[derive(Debug)]
593pub struct CoreCommandEncoder {
594 pub(crate) context: ContextWgpuCore,
595 id: wgc::id::CommandEncoderId,
596 error_sink: ErrorSink,
597}
598
599#[derive(Debug)]
600pub struct CoreBlas {
601 pub(crate) context: ContextWgpuCore,
602 id: wgc::id::BlasId,
603 error_sink: ErrorSink,
604}
605
606#[derive(Debug)]
607pub struct CoreTlas {
608 pub(crate) context: ContextWgpuCore,
609 id: wgc::id::TlasId,
610 }
612
613#[derive(Debug)]
614pub struct CoreSurfaceOutputDetail {
615 context: ContextWgpuCore,
616 surface_id: wgc::id::SurfaceId,
617 error_sink: ErrorSink,
618}
619
620type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
621
622struct ErrorScope {
623 error: Option<crate::Error>,
624 filter: crate::ErrorFilter,
625}
626
627struct ErrorSinkRaw {
628 scopes: HashMap<thread_id::ThreadId, Vec<ErrorScope>>,
629 uncaptured_handler: Option<Arc<dyn crate::UncapturedErrorHandler>>,
630}
631
632impl ErrorSinkRaw {
633 fn new() -> ErrorSinkRaw {
634 ErrorSinkRaw {
635 scopes: HashMap::new(),
636 uncaptured_handler: None,
637 }
638 }
639
640 #[track_caller]
650 #[must_use]
651 fn handle_error_or_return_handler(&mut self, err: crate::Error) -> Option<impl FnOnce()> {
652 let filter = match err {
653 crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
654 crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
655 crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
656 };
657 let thread_id = thread_id::ThreadId::current();
658 let scopes = self.scopes.entry(thread_id).or_default();
659 match scopes.iter_mut().rev().find(|scope| scope.filter == filter) {
660 Some(scope) => {
661 if scope.error.is_none() {
662 scope.error = Some(err);
663 }
664 None
665 }
666 None => {
667 if let Some(custom_handler) = &self.uncaptured_handler {
668 let custom_handler = Arc::clone(custom_handler);
669 Some(move || (custom_handler)(err))
670 } else {
671 default_error_handler(err)
673 }
674 }
675 }
676 }
677}
678
679impl fmt::Debug for ErrorSinkRaw {
680 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
681 write!(f, "ErrorSink")
682 }
683}
684
685#[track_caller]
686fn default_error_handler(err: crate::Error) -> ! {
687 log::error!("Handling wgpu errors as fatal by default");
688 panic!("wgpu error: {err}\n");
689}
690
691impl From<CreateShaderModuleError> for CompilationInfo {
692 fn from(value: CreateShaderModuleError) -> Self {
693 match value {
694 #[cfg(feature = "wgsl")]
695 CreateShaderModuleError::Parsing(v) => v.into(),
696 #[cfg(feature = "glsl")]
697 CreateShaderModuleError::ParsingGlsl(v) => v.into(),
698 #[cfg(feature = "spirv")]
699 CreateShaderModuleError::ParsingSpirV(v) => v.into(),
700 CreateShaderModuleError::Validation(v) => v.into(),
701 CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
704 CompilationInfo {
705 messages: Vec::new(),
706 }
707 }
708 _ => CompilationInfo {
710 messages: vec![CompilationMessage {
711 message: value.to_string(),
712 message_type: CompilationMessageType::Error,
713 location: None,
714 }],
715 },
716 }
717 }
718}
719
720#[derive(Debug)]
721pub struct CoreQueueWriteBuffer {
722 buffer_id: wgc::id::StagingBufferId,
723 mapping: CoreBufferMappedRange,
724}
725
726#[derive(Debug)]
727pub struct CoreBufferMappedRange {
728 ptr: NonNull<u8>,
729 size: usize,
730}
731
732#[cfg(send_sync)]
733unsafe impl Send for CoreBufferMappedRange {}
734#[cfg(send_sync)]
735unsafe impl Sync for CoreBufferMappedRange {}
736
737impl Drop for CoreBufferMappedRange {
738 fn drop(&mut self) {
739 }
742}
743
744crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
745crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
746crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
747crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
748crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
749crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
750crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
751crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
752crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
753crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
754crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
755crate::cmp::impl_eq_ord_hash_proxy!(CoreExternalTexture => .id);
756crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
757crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
758crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
759crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
760crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
761crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
762crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
763crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
764crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
765crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
766crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
767crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
768crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
769crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
770crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
771crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
772crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
773
774impl dispatch::InstanceInterface for ContextWgpuCore {
775 fn new(desc: wgt::InstanceDescriptor) -> Self
776 where
777 Self: Sized,
778 {
779 Self(Arc::new(wgc::global::Global::new("wgpu", desc, None)))
780 }
781
782 unsafe fn create_surface(
783 &self,
784 target: crate::api::SurfaceTargetUnsafe,
785 ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
786 let id = match target {
787 SurfaceTargetUnsafe::RawHandle {
788 raw_display_handle,
789 raw_window_handle,
790 } => unsafe {
791 self.0
792 .instance_create_surface(raw_display_handle, raw_window_handle, None)
793 },
794
795 #[cfg(all(drm, not(target_os = "netbsd")))]
796 SurfaceTargetUnsafe::Drm {
797 fd,
798 plane,
799 connector_id,
800 width,
801 height,
802 refresh_rate,
803 } => unsafe {
804 self.0.instance_create_surface_from_drm(
805 fd,
806 plane,
807 connector_id,
808 width,
809 height,
810 refresh_rate,
811 None,
812 )
813 },
814
815 #[cfg(metal)]
816 SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
817 self.0.instance_create_surface_metal(layer, None)
818 },
819
820 #[cfg(all(drm, target_os = "netbsd"))]
821 SurfaceTargetUnsafe::Drm { .. } => Err(
822 wgc::instance::CreateSurfaceError::BackendNotEnabled(wgt::Backend::Vulkan),
823 ),
824
825 #[cfg(dx12)]
826 SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
827 self.0.instance_create_surface_from_visual(visual, None)
828 },
829
830 #[cfg(dx12)]
831 SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
832 self.0
833 .instance_create_surface_from_surface_handle(surface_handle, None)
834 },
835
836 #[cfg(dx12)]
837 SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
838 self.0
839 .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
840 },
841 }?;
842
843 Ok(CoreSurface {
844 context: self.clone(),
845 id,
846 configured_device: Mutex::default(),
847 error_sink: Mutex::default(),
848 }
849 .into())
850 }
851
852 fn request_adapter(
853 &self,
854 options: &crate::api::RequestAdapterOptions<'_, '_>,
855 ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
856 let id = self.0.request_adapter(
857 &wgc::instance::RequestAdapterOptions {
858 power_preference: options.power_preference,
859 force_fallback_adapter: options.force_fallback_adapter,
860 compatible_surface: options
861 .compatible_surface
862 .map(|surface| surface.inner.as_core().id),
863 apply_limit_buckets: false,
864 },
865 wgt::Backends::all(),
866 None,
867 );
868 let adapter = id.map(|id| {
869 let core = CoreAdapter {
870 context: self.clone(),
871 id,
872 };
873 let generic: dispatch::DispatchAdapter = core.into();
874 generic
875 });
876 Box::pin(ready(adapter))
877 }
878
879 fn poll_all_devices(&self, force_wait: bool) -> bool {
880 match self.0.poll_all_devices(force_wait) {
881 Ok(all_queue_empty) => all_queue_empty,
882 Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
883 }
884 }
885
886 #[cfg(feature = "wgsl")]
887 fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
888 use wgc::naga::front::wgsl::ImplementedLanguageExtension;
889 ImplementedLanguageExtension::all().iter().copied().fold(
890 crate::WgslLanguageFeatures::empty(),
891 |acc, wle| {
892 acc | match wle {
893 ImplementedLanguageExtension::ReadOnlyAndReadWriteStorageTextures => {
894 crate::WgslLanguageFeatures::ReadOnlyAndReadWriteStorageTextures
895 }
896 ImplementedLanguageExtension::Packed4x8IntegerDotProduct => {
897 crate::WgslLanguageFeatures::Packed4x8IntegerDotProduct
898 }
899 ImplementedLanguageExtension::PointerCompositeAccess => {
900 crate::WgslLanguageFeatures::PointerCompositeAccess
901 }
902 }
903 },
904 )
905 }
906
907 fn enumerate_adapters(
908 &self,
909 backends: crate::Backends,
910 ) -> Pin<Box<dyn dispatch::EnumerateAdapterFuture>> {
911 let adapters: Vec<DispatchAdapter> = self
912 .enumerate_adapters(backends)
913 .into_iter()
914 .map(|adapter| {
915 let core = crate::backend::wgpu_core::CoreAdapter {
916 context: self.clone(),
917 id: adapter,
918 };
919 core.into()
920 })
921 .collect();
922 Box::pin(ready(adapters))
923 }
924}
925
926impl dispatch::AdapterInterface for CoreAdapter {
927 fn request_device(
928 &self,
929 desc: &crate::DeviceDescriptor<'_>,
930 ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
931 let res = self.context.0.adapter_request_device(
932 self.id,
933 &desc.map_label(|l| l.map(Borrowed)),
934 None,
935 None,
936 );
937 let (device_id, queue_id) = match res {
938 Ok(ids) => ids,
939 Err(err) => {
940 return Box::pin(ready(Err(err.into())));
941 }
942 };
943 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
944 let device = CoreDevice {
945 context: self.context.clone(),
946 id: device_id,
947 error_sink: error_sink.clone(),
948 features: desc.required_features,
949 };
950 let queue = CoreQueue {
951 context: self.context.clone(),
952 id: queue_id,
953 error_sink,
954 };
955 Box::pin(ready(Ok((device.into(), queue.into()))))
956 }
957
958 fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
959 let surface = surface.as_core();
960
961 self.context
962 .0
963 .adapter_is_surface_supported(self.id, surface.id)
964 }
965
966 fn features(&self) -> crate::Features {
967 self.context.0.adapter_features(self.id)
968 }
969
970 fn limits(&self) -> crate::Limits {
971 self.context.0.adapter_limits(self.id)
972 }
973
974 fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
975 self.context.0.adapter_downlevel_capabilities(self.id)
976 }
977
978 fn get_info(&self) -> crate::AdapterInfo {
979 self.context.0.adapter_get_info(self.id)
980 }
981
982 fn get_texture_format_features(
983 &self,
984 format: crate::TextureFormat,
985 ) -> crate::TextureFormatFeatures {
986 self.context
987 .0
988 .adapter_get_texture_format_features(self.id, format)
989 }
990
991 fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
992 self.context.0.adapter_get_presentation_timestamp(self.id)
993 }
994
995 fn cooperative_matrix_properties(&self) -> Vec<crate::wgt::CooperativeMatrixProperties> {
996 self.context
997 .0
998 .adapter_cooperative_matrix_properties(self.id)
999 }
1000}
1001
1002impl Drop for CoreAdapter {
1003 fn drop(&mut self) {
1004 self.context.0.adapter_drop(self.id)
1005 }
1006}
1007
1008impl dispatch::DeviceInterface for CoreDevice {
1009 fn features(&self) -> crate::Features {
1010 self.context.0.device_features(self.id)
1011 }
1012
1013 fn limits(&self) -> crate::Limits {
1014 self.context.0.device_limits(self.id)
1015 }
1016
1017 fn adapter_info(&self) -> crate::AdapterInfo {
1018 self.context.0.device_adapter_info(self.id)
1019 }
1020
1021 #[cfg_attr(
1023 not(any(
1024 feature = "spirv",
1025 feature = "glsl",
1026 feature = "wgsl",
1027 feature = "naga-ir"
1028 )),
1029 expect(unused)
1030 )]
1031 fn create_shader_module(
1032 &self,
1033 desc: crate::ShaderModuleDescriptor<'_>,
1034 shader_bound_checks: wgt::ShaderRuntimeChecks,
1035 ) -> dispatch::DispatchShaderModule {
1036 let descriptor = wgc::pipeline::ShaderModuleDescriptor {
1037 label: desc.label.map(Borrowed),
1038 runtime_checks: shader_bound_checks,
1039 };
1040 let source = match desc.source {
1041 #[cfg(feature = "spirv")]
1042 ShaderSource::SpirV(ref spv) => {
1043 let options = naga::front::spv::Options {
1045 adjust_coordinate_space: false, strict_capabilities: true,
1047 block_ctx_dump_prefix: None,
1048 };
1049 wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
1050 }
1051 #[cfg(feature = "glsl")]
1052 ShaderSource::Glsl {
1053 ref shader,
1054 stage,
1055 defines,
1056 } => {
1057 let options = naga::front::glsl::Options {
1058 stage,
1059 defines: defines
1060 .iter()
1061 .map(|&(key, value)| (String::from(key), String::from(value)))
1062 .collect(),
1063 };
1064 wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
1065 }
1066 #[cfg(feature = "wgsl")]
1067 ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
1068 #[cfg(feature = "naga-ir")]
1069 ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1070 ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1071 };
1072 let (id, error) =
1073 self.context
1074 .0
1075 .device_create_shader_module(self.id, &descriptor, source, None);
1076 let compilation_info = match error {
1077 Some(cause) => {
1078 self.context.handle_error(
1079 &self.error_sink,
1080 cause.clone(),
1081 desc.label,
1082 "Device::create_shader_module",
1083 );
1084 CompilationInfo::from(cause)
1085 }
1086 None => CompilationInfo { messages: vec![] },
1087 };
1088
1089 CoreShaderModule {
1090 context: self.context.clone(),
1091 id,
1092 compilation_info,
1093 }
1094 .into()
1095 }
1096
1097 unsafe fn create_shader_module_passthrough(
1098 &self,
1099 desc: &crate::ShaderModuleDescriptorPassthrough<'_>,
1100 ) -> dispatch::DispatchShaderModule {
1101 let desc = desc.map_label(|l| l.map(Cow::from));
1102 let (id, error) = unsafe {
1103 self.context
1104 .0
1105 .device_create_shader_module_passthrough(self.id, &desc, None)
1106 };
1107
1108 let compilation_info = match error {
1109 Some(cause) => {
1110 self.context.handle_error(
1111 &self.error_sink,
1112 cause.clone(),
1113 desc.label.as_deref(),
1114 "Device::create_shader_module_passthrough",
1115 );
1116 CompilationInfo::from(cause)
1117 }
1118 None => CompilationInfo { messages: vec![] },
1119 };
1120
1121 CoreShaderModule {
1122 context: self.context.clone(),
1123 id,
1124 compilation_info,
1125 }
1126 .into()
1127 }
1128
1129 fn create_bind_group_layout(
1130 &self,
1131 desc: &crate::BindGroupLayoutDescriptor<'_>,
1132 ) -> dispatch::DispatchBindGroupLayout {
1133 let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1134 label: desc.label.map(Borrowed),
1135 entries: Borrowed(desc.entries),
1136 };
1137 let (id, error) =
1138 self.context
1139 .0
1140 .device_create_bind_group_layout(self.id, &descriptor, None);
1141 if let Some(cause) = error {
1142 self.context.handle_error(
1143 &self.error_sink,
1144 cause,
1145 desc.label,
1146 "Device::create_bind_group_layout",
1147 );
1148 }
1149 CoreBindGroupLayout {
1150 context: self.context.clone(),
1151 id,
1152 }
1153 .into()
1154 }
1155
1156 fn create_bind_group(
1157 &self,
1158 desc: &crate::BindGroupDescriptor<'_>,
1159 ) -> dispatch::DispatchBindGroup {
1160 use wgc::binding_model as bm;
1161
1162 let mut arrayed_texture_views = Vec::new();
1163 let mut arrayed_samplers = Vec::new();
1164 if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1165 for entry in desc.entries.iter() {
1167 if let BindingResource::TextureViewArray(array) = entry.resource {
1168 arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1169 }
1170 if let BindingResource::SamplerArray(array) = entry.resource {
1171 arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1172 }
1173 }
1174 }
1175 let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1176 let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1177
1178 let mut arrayed_buffer_bindings = Vec::new();
1179 if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1180 for entry in desc.entries.iter() {
1182 if let BindingResource::BufferArray(array) = entry.resource {
1183 arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1184 buffer: binding.buffer.inner.as_core().id,
1185 offset: binding.offset,
1186 size: binding.size.map(wgt::BufferSize::get),
1187 }));
1188 }
1189 }
1190 }
1191 let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1192
1193 let mut arrayed_acceleration_structures = Vec::new();
1194 if self
1195 .features
1196 .contains(Features::ACCELERATION_STRUCTURE_BINDING_ARRAY)
1197 {
1198 for entry in desc.entries.iter() {
1200 if let BindingResource::AccelerationStructureArray(array) = entry.resource {
1201 arrayed_acceleration_structures
1202 .extend(array.iter().map(|tlas| tlas.inner.as_core().id));
1203 }
1204 }
1205 }
1206 let mut remaining_arrayed_acceleration_structures = &arrayed_acceleration_structures[..];
1207
1208 let entries = desc
1209 .entries
1210 .iter()
1211 .map(|entry| bm::BindGroupEntry {
1212 binding: entry.binding,
1213 resource: match entry.resource {
1214 BindingResource::Buffer(BufferBinding {
1215 buffer,
1216 offset,
1217 size,
1218 }) => bm::BindingResource::Buffer(bm::BufferBinding {
1219 buffer: buffer.inner.as_core().id,
1220 offset,
1221 size: size.map(wgt::BufferSize::get),
1222 }),
1223 BindingResource::BufferArray(array) => {
1224 let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1225 remaining_arrayed_buffer_bindings =
1226 &remaining_arrayed_buffer_bindings[array.len()..];
1227 bm::BindingResource::BufferArray(Borrowed(slice))
1228 }
1229 BindingResource::Sampler(sampler) => {
1230 bm::BindingResource::Sampler(sampler.inner.as_core().id)
1231 }
1232 BindingResource::SamplerArray(array) => {
1233 let slice = &remaining_arrayed_samplers[..array.len()];
1234 remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1235 bm::BindingResource::SamplerArray(Borrowed(slice))
1236 }
1237 BindingResource::TextureView(texture_view) => {
1238 bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1239 }
1240 BindingResource::TextureViewArray(array) => {
1241 let slice = &remaining_arrayed_texture_views[..array.len()];
1242 remaining_arrayed_texture_views =
1243 &remaining_arrayed_texture_views[array.len()..];
1244 bm::BindingResource::TextureViewArray(Borrowed(slice))
1245 }
1246 BindingResource::AccelerationStructure(acceleration_structure) => {
1247 bm::BindingResource::AccelerationStructure(
1248 acceleration_structure.inner.as_core().id,
1249 )
1250 }
1251 BindingResource::AccelerationStructureArray(array) => {
1252 let slice = &remaining_arrayed_acceleration_structures[..array.len()];
1253 remaining_arrayed_acceleration_structures =
1254 &remaining_arrayed_acceleration_structures[array.len()..];
1255 bm::BindingResource::AccelerationStructureArray(Borrowed(slice))
1256 }
1257 BindingResource::ExternalTexture(external_texture) => {
1258 bm::BindingResource::ExternalTexture(external_texture.inner.as_core().id)
1259 }
1260 },
1261 })
1262 .collect::<Vec<_>>();
1263 let descriptor = bm::BindGroupDescriptor {
1264 label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1265 layout: desc.layout.inner.as_core().id,
1266 entries: Borrowed(&entries),
1267 };
1268
1269 let (id, error) = self
1270 .context
1271 .0
1272 .device_create_bind_group(self.id, &descriptor, None);
1273 if let Some(cause) = error {
1274 self.context.handle_error(
1275 &self.error_sink,
1276 cause,
1277 desc.label,
1278 "Device::create_bind_group",
1279 );
1280 }
1281 CoreBindGroup {
1282 context: self.context.clone(),
1283 id,
1284 }
1285 .into()
1286 }
1287
1288 fn create_pipeline_layout(
1289 &self,
1290 desc: &crate::PipelineLayoutDescriptor<'_>,
1291 ) -> dispatch::DispatchPipelineLayout {
1292 assert!(
1295 desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1296 "Bind group layout count {} exceeds device bind group limit {}",
1297 desc.bind_group_layouts.len(),
1298 wgc::MAX_BIND_GROUPS
1299 );
1300
1301 let temp_layouts = desc
1302 .bind_group_layouts
1303 .iter()
1304 .map(|bgl| bgl.map(|bgl| bgl.inner.as_core().id))
1305 .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1306 let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1307 label: desc.label.map(Borrowed),
1308 bind_group_layouts: Borrowed(&temp_layouts),
1309 immediate_size: desc.immediate_size,
1310 };
1311
1312 let (id, error) = self
1313 .context
1314 .0
1315 .device_create_pipeline_layout(self.id, &descriptor, None);
1316 if let Some(cause) = error {
1317 self.context.handle_error(
1318 &self.error_sink,
1319 cause,
1320 desc.label,
1321 "Device::create_pipeline_layout",
1322 );
1323 }
1324 CorePipelineLayout {
1325 context: self.context.clone(),
1326 id,
1327 }
1328 .into()
1329 }
1330
1331 fn create_render_pipeline(
1332 &self,
1333 desc: &crate::RenderPipelineDescriptor<'_>,
1334 ) -> dispatch::DispatchRenderPipeline {
1335 use wgc::pipeline as pipe;
1336
1337 let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1338 .vertex
1339 .buffers
1340 .iter()
1341 .map(|vbuf| {
1342 vbuf.as_ref().map(|vbuf| pipe::VertexBufferLayout {
1343 array_stride: vbuf.array_stride,
1344 step_mode: vbuf.step_mode,
1345 attributes: Borrowed(vbuf.attributes),
1346 })
1347 })
1348 .collect();
1349
1350 let vert_constants = desc
1351 .vertex
1352 .compilation_options
1353 .constants
1354 .iter()
1355 .map(|&(key, value)| (String::from(key), value))
1356 .collect();
1357
1358 let descriptor = pipe::RenderPipelineDescriptor {
1359 label: desc.label.map(Borrowed),
1360 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1361 vertex: pipe::VertexState {
1362 stage: pipe::ProgrammableStageDescriptor {
1363 module: desc.vertex.module.inner.as_core().id,
1364 entry_point: desc.vertex.entry_point.map(Borrowed),
1365 constants: vert_constants,
1366 zero_initialize_workgroup_memory: desc
1367 .vertex
1368 .compilation_options
1369 .zero_initialize_workgroup_memory,
1370 },
1371 buffers: Borrowed(&vertex_buffers),
1372 },
1373 primitive: desc.primitive,
1374 depth_stencil: desc.depth_stencil.clone(),
1375 multisample: desc.multisample,
1376 fragment: desc.fragment.as_ref().map(|frag| {
1377 let frag_constants = frag
1378 .compilation_options
1379 .constants
1380 .iter()
1381 .map(|&(key, value)| (String::from(key), value))
1382 .collect();
1383 pipe::FragmentState {
1384 stage: pipe::ProgrammableStageDescriptor {
1385 module: frag.module.inner.as_core().id,
1386 entry_point: frag.entry_point.map(Borrowed),
1387 constants: frag_constants,
1388 zero_initialize_workgroup_memory: frag
1389 .compilation_options
1390 .zero_initialize_workgroup_memory,
1391 },
1392 targets: Borrowed(frag.targets),
1393 }
1394 }),
1395 multiview_mask: desc.multiview_mask,
1396 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1397 };
1398
1399 let (id, error) = self
1400 .context
1401 .0
1402 .device_create_render_pipeline(self.id, &descriptor, None);
1403 if let Some(cause) = error {
1404 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1405 log::error!("Shader translation error for stage {stage:?}: {error}");
1406 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1407 }
1408 self.context.handle_error(
1409 &self.error_sink,
1410 cause,
1411 desc.label,
1412 "Device::create_render_pipeline",
1413 );
1414 }
1415 CoreRenderPipeline {
1416 context: self.context.clone(),
1417 id,
1418 error_sink: Arc::clone(&self.error_sink),
1419 }
1420 .into()
1421 }
1422
1423 fn create_mesh_pipeline(
1424 &self,
1425 desc: &crate::MeshPipelineDescriptor<'_>,
1426 ) -> dispatch::DispatchRenderPipeline {
1427 use wgc::pipeline as pipe;
1428
1429 let mesh_constants = desc
1430 .mesh
1431 .compilation_options
1432 .constants
1433 .iter()
1434 .map(|&(key, value)| (String::from(key), value))
1435 .collect();
1436 let descriptor = pipe::MeshPipelineDescriptor {
1437 label: desc.label.map(Borrowed),
1438 task: desc.task.as_ref().map(|task| {
1439 let task_constants = task
1440 .compilation_options
1441 .constants
1442 .iter()
1443 .map(|&(key, value)| (String::from(key), value))
1444 .collect();
1445 pipe::TaskState {
1446 stage: pipe::ProgrammableStageDescriptor {
1447 module: task.module.inner.as_core().id,
1448 entry_point: task.entry_point.map(Borrowed),
1449 constants: task_constants,
1450 zero_initialize_workgroup_memory: desc
1451 .mesh
1452 .compilation_options
1453 .zero_initialize_workgroup_memory,
1454 },
1455 }
1456 }),
1457 mesh: pipe::MeshState {
1458 stage: pipe::ProgrammableStageDescriptor {
1459 module: desc.mesh.module.inner.as_core().id,
1460 entry_point: desc.mesh.entry_point.map(Borrowed),
1461 constants: mesh_constants,
1462 zero_initialize_workgroup_memory: desc
1463 .mesh
1464 .compilation_options
1465 .zero_initialize_workgroup_memory,
1466 },
1467 },
1468 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1469 primitive: desc.primitive,
1470 depth_stencil: desc.depth_stencil.clone(),
1471 multisample: desc.multisample,
1472 fragment: desc.fragment.as_ref().map(|frag| {
1473 let frag_constants = frag
1474 .compilation_options
1475 .constants
1476 .iter()
1477 .map(|&(key, value)| (String::from(key), value))
1478 .collect();
1479 pipe::FragmentState {
1480 stage: pipe::ProgrammableStageDescriptor {
1481 module: frag.module.inner.as_core().id,
1482 entry_point: frag.entry_point.map(Borrowed),
1483 constants: frag_constants,
1484 zero_initialize_workgroup_memory: frag
1485 .compilation_options
1486 .zero_initialize_workgroup_memory,
1487 },
1488 targets: Borrowed(frag.targets),
1489 }
1490 }),
1491 multiview: desc.multiview,
1492 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1493 };
1494
1495 let (id, error) = self
1496 .context
1497 .0
1498 .device_create_mesh_pipeline(self.id, &descriptor, None);
1499 if let Some(cause) = error {
1500 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1501 log::error!("Shader translation error for stage {stage:?}: {error}");
1502 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1503 }
1504 self.context.handle_error(
1505 &self.error_sink,
1506 cause,
1507 desc.label,
1508 "Device::create_render_pipeline",
1509 );
1510 }
1511 CoreRenderPipeline {
1512 context: self.context.clone(),
1513 id,
1514 error_sink: Arc::clone(&self.error_sink),
1515 }
1516 .into()
1517 }
1518
1519 fn create_compute_pipeline(
1520 &self,
1521 desc: &crate::ComputePipelineDescriptor<'_>,
1522 ) -> dispatch::DispatchComputePipeline {
1523 use wgc::pipeline as pipe;
1524
1525 let constants = desc
1526 .compilation_options
1527 .constants
1528 .iter()
1529 .map(|&(key, value)| (String::from(key), value))
1530 .collect();
1531
1532 let descriptor = pipe::ComputePipelineDescriptor {
1533 label: desc.label.map(Borrowed),
1534 layout: desc.layout.map(|pll| pll.inner.as_core().id),
1535 stage: pipe::ProgrammableStageDescriptor {
1536 module: desc.module.inner.as_core().id,
1537 entry_point: desc.entry_point.map(Borrowed),
1538 constants,
1539 zero_initialize_workgroup_memory: desc
1540 .compilation_options
1541 .zero_initialize_workgroup_memory,
1542 },
1543 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1544 };
1545
1546 let (id, error) = self
1547 .context
1548 .0
1549 .device_create_compute_pipeline(self.id, &descriptor, None);
1550 if let Some(cause) = error {
1551 if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1552 log::error!(
1553 "Shader translation error for stage {:?}: {}",
1554 wgt::ShaderStages::COMPUTE,
1555 error
1556 );
1557 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1558 }
1559 self.context.handle_error(
1560 &self.error_sink,
1561 cause,
1562 desc.label,
1563 "Device::create_compute_pipeline",
1564 );
1565 }
1566 CoreComputePipeline {
1567 context: self.context.clone(),
1568 id,
1569 error_sink: Arc::clone(&self.error_sink),
1570 }
1571 .into()
1572 }
1573
1574 unsafe fn create_pipeline_cache(
1575 &self,
1576 desc: &crate::PipelineCacheDescriptor<'_>,
1577 ) -> dispatch::DispatchPipelineCache {
1578 use wgc::pipeline as pipe;
1579
1580 let descriptor = pipe::PipelineCacheDescriptor {
1581 label: desc.label.map(Borrowed),
1582 data: desc.data.map(Borrowed),
1583 fallback: desc.fallback,
1584 };
1585 let (id, error) = unsafe {
1586 self.context
1587 .0
1588 .device_create_pipeline_cache(self.id, &descriptor, None)
1589 };
1590 if let Some(cause) = error {
1591 self.context.handle_error(
1592 &self.error_sink,
1593 cause,
1594 desc.label,
1595 "Device::device_create_pipeline_cache_init",
1596 );
1597 }
1598 CorePipelineCache {
1599 context: self.context.clone(),
1600 id,
1601 }
1602 .into()
1603 }
1604
1605 fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1606 let (id, error) = self.context.0.device_create_buffer(
1607 self.id,
1608 &desc.map_label(|l| l.map(Borrowed)),
1609 None,
1610 );
1611 if let Some(cause) = error {
1612 self.context
1613 .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1614 }
1615
1616 CoreBuffer {
1617 context: self.context.clone(),
1618 id,
1619 error_sink: Arc::clone(&self.error_sink),
1620 }
1621 .into()
1622 }
1623
1624 fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1625 let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1626 let (id, error) = self
1627 .context
1628 .0
1629 .device_create_texture(self.id, &wgt_desc, None);
1630 if let Some(cause) = error {
1631 self.context.handle_error(
1632 &self.error_sink,
1633 cause,
1634 desc.label,
1635 "Device::create_texture",
1636 );
1637 }
1638
1639 CoreTexture {
1640 context: self.context.clone(),
1641 id,
1642 error_sink: Arc::clone(&self.error_sink),
1643 }
1644 .into()
1645 }
1646
1647 fn create_external_texture(
1648 &self,
1649 desc: &crate::ExternalTextureDescriptor<'_>,
1650 planes: &[&crate::TextureView],
1651 ) -> dispatch::DispatchExternalTexture {
1652 let wgt_desc = desc.map_label(|l| l.map(Borrowed));
1653 let planes = planes
1654 .iter()
1655 .map(|plane| plane.inner.as_core().id)
1656 .collect::<Vec<_>>();
1657 let (id, error) = self
1658 .context
1659 .0
1660 .device_create_external_texture(self.id, &wgt_desc, &planes, None);
1661 if let Some(cause) = error {
1662 self.context.handle_error(
1663 &self.error_sink,
1664 cause,
1665 desc.label,
1666 "Device::create_external_texture",
1667 );
1668 }
1669
1670 CoreExternalTexture {
1671 context: self.context.clone(),
1672 id,
1673 }
1674 .into()
1675 }
1676
1677 fn create_blas(
1678 &self,
1679 desc: &crate::CreateBlasDescriptor<'_>,
1680 sizes: crate::BlasGeometrySizeDescriptors,
1681 ) -> (Option<u64>, dispatch::DispatchBlas) {
1682 let global = &self.context.0;
1683 let (id, handle, error) =
1684 global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1685 if let Some(cause) = error {
1686 self.context
1687 .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1688 }
1689 (
1690 handle,
1691 CoreBlas {
1692 context: self.context.clone(),
1693 id,
1694 error_sink: Arc::clone(&self.error_sink),
1695 }
1696 .into(),
1697 )
1698 }
1699
1700 fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1701 let global = &self.context.0;
1702 let (id, error) =
1703 global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1704 if let Some(cause) = error {
1705 self.context
1706 .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1707 }
1708 CoreTlas {
1709 context: self.context.clone(),
1710 id,
1711 }
1713 .into()
1714 }
1715
1716 fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1717 let descriptor = wgc::resource::SamplerDescriptor {
1718 label: desc.label.map(Borrowed),
1719 address_modes: [
1720 desc.address_mode_u,
1721 desc.address_mode_v,
1722 desc.address_mode_w,
1723 ],
1724 mag_filter: desc.mag_filter,
1725 min_filter: desc.min_filter,
1726 mipmap_filter: desc.mipmap_filter,
1727 lod_min_clamp: desc.lod_min_clamp,
1728 lod_max_clamp: desc.lod_max_clamp,
1729 compare: desc.compare,
1730 anisotropy_clamp: desc.anisotropy_clamp,
1731 border_color: desc.border_color,
1732 };
1733
1734 let (id, error) = self
1735 .context
1736 .0
1737 .device_create_sampler(self.id, &descriptor, None);
1738 if let Some(cause) = error {
1739 self.context.handle_error(
1740 &self.error_sink,
1741 cause,
1742 desc.label,
1743 "Device::create_sampler",
1744 );
1745 }
1746 CoreSampler {
1747 context: self.context.clone(),
1748 id,
1749 }
1750 .into()
1751 }
1752
1753 fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1754 let (id, error) = self.context.0.device_create_query_set(
1755 self.id,
1756 &desc.map_label(|l| l.map(Borrowed)),
1757 None,
1758 );
1759 if let Some(cause) = error {
1760 self.context
1761 .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1762 }
1763 CoreQuerySet {
1764 context: self.context.clone(),
1765 id,
1766 }
1767 .into()
1768 }
1769
1770 fn create_command_encoder(
1771 &self,
1772 desc: &crate::CommandEncoderDescriptor<'_>,
1773 ) -> dispatch::DispatchCommandEncoder {
1774 let (id, error) = self.context.0.device_create_command_encoder(
1775 self.id,
1776 &desc.map_label(|l| l.map(Borrowed)),
1777 None,
1778 );
1779 if let Some(cause) = error {
1780 self.context.handle_error(
1781 &self.error_sink,
1782 cause,
1783 desc.label,
1784 "Device::create_command_encoder",
1785 );
1786 }
1787
1788 CoreCommandEncoder {
1789 context: self.context.clone(),
1790 id,
1791 error_sink: Arc::clone(&self.error_sink),
1792 }
1793 .into()
1794 }
1795
1796 fn create_render_bundle_encoder(
1797 &self,
1798 desc: &crate::RenderBundleEncoderDescriptor<'_>,
1799 ) -> dispatch::DispatchRenderBundleEncoder {
1800 let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1801 label: desc.label.map(Borrowed),
1802 color_formats: Borrowed(desc.color_formats),
1803 depth_stencil: desc.depth_stencil,
1804 sample_count: desc.sample_count,
1805 multiview: desc.multiview,
1806 };
1807 let (encoder, error) = self
1808 .context
1809 .0
1810 .device_create_render_bundle_encoder(self.id, &descriptor);
1811 if let Some(cause) = error {
1812 self.context.handle_error(
1813 &self.error_sink,
1814 cause,
1815 desc.label,
1816 "Device::create_render_bundle_encoder",
1817 );
1818 }
1819
1820 CoreRenderBundleEncoder {
1821 context: self.context.clone(),
1822 encoder,
1823 id: crate::cmp::Identifier::create(),
1824 }
1825 .into()
1826 }
1827
1828 fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1829 self.context
1830 .0
1831 .device_set_device_lost_closure(self.id, device_lost_callback);
1832 }
1833
1834 fn on_uncaptured_error(&self, handler: Arc<dyn crate::UncapturedErrorHandler>) {
1835 let mut error_sink = self.error_sink.lock();
1836 error_sink.uncaptured_handler = Some(handler);
1837 }
1838
1839 fn push_error_scope(&self, filter: crate::ErrorFilter) -> u32 {
1840 let mut error_sink = self.error_sink.lock();
1841 let thread_id = thread_id::ThreadId::current();
1842 let scopes = error_sink.scopes.entry(thread_id).or_default();
1843 let index = scopes
1844 .len()
1845 .try_into()
1846 .expect("Greater than 2^32 nested error scopes");
1847 scopes.push(ErrorScope {
1848 error: None,
1849 filter,
1850 });
1851 index
1852 }
1853
1854 fn pop_error_scope(&self, index: u32) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1855 let mut error_sink = self.error_sink.lock();
1856
1857 let is_panicking = crate::util::is_panicking();
1860 let thread_id = thread_id::ThreadId::current();
1861 let err = "Mismatched pop_error_scope call: no error scope for this thread. Error scopes are thread-local.";
1862 let scopes = match error_sink.scopes.get_mut(&thread_id) {
1863 Some(s) => s,
1864 None => {
1865 if !is_panicking {
1866 panic!("{err}");
1867 } else {
1868 return Box::pin(ready(None));
1869 }
1870 }
1871 };
1872 if scopes.is_empty() && !is_panicking {
1873 panic!("{err}");
1874 }
1875 if index as usize != scopes.len() - 1 && !is_panicking {
1876 panic!(
1877 "Mismatched pop_error_scope call: error scopes must be popped in reverse order."
1878 );
1879 }
1880
1881 let scope = match scopes.pop() {
1886 Some(s) => s,
1887 None if !is_panicking => unreachable!(),
1888 None => return Box::pin(ready(None)),
1889 };
1890
1891 Box::pin(ready(scope.error))
1892 }
1893
1894 unsafe fn start_graphics_debugger_capture(&self) {
1895 unsafe {
1896 self.context
1897 .0
1898 .device_start_graphics_debugger_capture(self.id)
1899 };
1900 }
1901
1902 unsafe fn stop_graphics_debugger_capture(&self) {
1903 unsafe {
1904 self.context
1905 .0
1906 .device_stop_graphics_debugger_capture(self.id)
1907 };
1908 }
1909
1910 fn poll(&self, poll_type: wgt::PollType<u64>) -> Result<crate::PollStatus, crate::PollError> {
1911 match self.context.0.device_poll(self.id, poll_type) {
1912 Ok(status) => Ok(status),
1913 Err(err) => {
1914 if let Some(poll_error) = err.to_poll_error() {
1915 return Err(poll_error);
1916 }
1917
1918 self.context.handle_error_fatal(err, "Device::poll")
1919 }
1920 }
1921 }
1922
1923 fn get_internal_counters(&self) -> crate::InternalCounters {
1924 self.context.0.device_get_internal_counters(self.id)
1925 }
1926
1927 fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1928 self.context.0.device_generate_allocator_report(self.id)
1929 }
1930
1931 fn destroy(&self) {
1932 self.context.0.device_destroy(self.id);
1933 }
1934}
1935
1936impl Drop for CoreDevice {
1937 fn drop(&mut self) {
1938 self.context.0.device_drop(self.id)
1939 }
1940}
1941
1942impl dispatch::QueueInterface for CoreQueue {
1943 fn write_buffer(
1944 &self,
1945 buffer: &dispatch::DispatchBuffer,
1946 offset: crate::BufferAddress,
1947 data: &[u8],
1948 ) {
1949 let buffer = buffer.as_core();
1950
1951 match self
1952 .context
1953 .0
1954 .queue_write_buffer(self.id, buffer.id, offset, data)
1955 {
1956 Ok(()) => (),
1957 Err(err) => {
1958 self.context
1959 .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1960 }
1961 }
1962 }
1963
1964 fn create_staging_buffer(
1965 &self,
1966 size: crate::BufferSize,
1967 ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1968 match self
1969 .context
1970 .0
1971 .queue_create_staging_buffer(self.id, size, None)
1972 {
1973 Ok((buffer_id, ptr)) => Some(
1974 CoreQueueWriteBuffer {
1975 buffer_id,
1976 mapping: CoreBufferMappedRange {
1977 ptr,
1978 size: size.get() as usize,
1979 },
1980 }
1981 .into(),
1982 ),
1983 Err(err) => {
1984 self.context.handle_error_nolabel(
1985 &self.error_sink,
1986 err,
1987 "Queue::write_buffer_with",
1988 );
1989 None
1990 }
1991 }
1992 }
1993
1994 fn validate_write_buffer(
1995 &self,
1996 buffer: &dispatch::DispatchBuffer,
1997 offset: wgt::BufferAddress,
1998 size: wgt::BufferSize,
1999 ) -> Option<()> {
2000 let buffer = buffer.as_core();
2001
2002 match self
2003 .context
2004 .0
2005 .queue_validate_write_buffer(self.id, buffer.id, offset, size)
2006 {
2007 Ok(()) => Some(()),
2008 Err(err) => {
2009 self.context.handle_error_nolabel(
2010 &self.error_sink,
2011 err,
2012 "Queue::write_buffer_with",
2013 );
2014 None
2015 }
2016 }
2017 }
2018
2019 fn write_staging_buffer(
2020 &self,
2021 buffer: &dispatch::DispatchBuffer,
2022 offset: crate::BufferAddress,
2023 staging_buffer: &dispatch::DispatchQueueWriteBuffer,
2024 ) {
2025 let buffer = buffer.as_core();
2026 let staging_buffer = staging_buffer.as_core();
2027
2028 match self.context.0.queue_write_staging_buffer(
2029 self.id,
2030 buffer.id,
2031 offset,
2032 staging_buffer.buffer_id,
2033 ) {
2034 Ok(()) => (),
2035 Err(err) => {
2036 self.context.handle_error_nolabel(
2037 &self.error_sink,
2038 err,
2039 "Queue::write_buffer_with",
2040 );
2041 }
2042 }
2043 }
2044
2045 fn write_texture(
2046 &self,
2047 texture: crate::TexelCopyTextureInfo<'_>,
2048 data: &[u8],
2049 data_layout: crate::TexelCopyBufferLayout,
2050 size: crate::Extent3d,
2051 ) {
2052 match self.context.0.queue_write_texture(
2053 self.id,
2054 &map_texture_copy_view(texture),
2055 data,
2056 &data_layout,
2057 &size,
2058 ) {
2059 Ok(()) => (),
2060 Err(err) => {
2061 self.context
2062 .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
2063 }
2064 }
2065 }
2066
2067 #[cfg(web)]
2070 #[cfg_attr(not(webgl), expect(unused_variables))]
2071 fn copy_external_image_to_texture(
2072 &self,
2073 source: &crate::CopyExternalImageSourceInfo,
2074 dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
2075 size: crate::Extent3d,
2076 ) {
2077 #[cfg(webgl)]
2078 match self.context.0.queue_copy_external_image_to_texture(
2079 self.id,
2080 source,
2081 map_texture_tagged_copy_view(dest),
2082 size,
2083 ) {
2084 Ok(()) => (),
2085 Err(err) => self.context.handle_error_nolabel(
2086 &self.error_sink,
2087 err,
2088 "Queue::copy_external_image_to_texture",
2089 ),
2090 }
2091 }
2092
2093 fn submit(
2094 &self,
2095 command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
2096 ) -> u64 {
2097 let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
2098 let command_buffer_ids = temp_command_buffers
2099 .iter()
2100 .map(|cmdbuf| cmdbuf.as_core().id)
2101 .collect::<SmallVec<[_; 4]>>();
2102
2103 let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
2104 Ok(index) => index,
2105 Err((index, err)) => {
2106 self.context
2107 .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
2108 index
2109 }
2110 };
2111
2112 drop(temp_command_buffers);
2113
2114 index
2115 }
2116
2117 fn get_timestamp_period(&self) -> f32 {
2118 self.context.0.queue_get_timestamp_period(self.id)
2119 }
2120
2121 fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
2122 self.context
2123 .0
2124 .queue_on_submitted_work_done(self.id, callback);
2125 }
2126
2127 fn compact_blas(&self, blas: &dispatch::DispatchBlas) -> (Option<u64>, dispatch::DispatchBlas) {
2128 let (id, handle, error) =
2129 self.context
2130 .0
2131 .queue_compact_blas(self.id, blas.as_core().id, None);
2132
2133 if let Some(cause) = error {
2134 self.context
2135 .handle_error_nolabel(&self.error_sink, cause, "Queue::compact_blas");
2136 }
2137 (
2138 handle,
2139 CoreBlas {
2140 context: self.context.clone(),
2141 id,
2142 error_sink: Arc::clone(&self.error_sink),
2143 }
2144 .into(),
2145 )
2146 }
2147
2148 fn present(&self, detail: &dispatch::DispatchSurfaceOutputDetail) {
2149 let detail = detail.as_core();
2150 match self.context.0.surface_present(detail.surface_id) {
2151 Ok(_status) => (),
2152 Err(err) => {
2153 self.context
2154 .handle_error_nolabel(&self.error_sink, err, "Queue::present");
2155 }
2156 }
2157 }
2158}
2159
2160impl Drop for CoreQueue {
2161 fn drop(&mut self) {
2162 self.context.0.queue_drop(self.id)
2163 }
2164}
2165
2166impl dispatch::ShaderModuleInterface for CoreShaderModule {
2167 fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
2168 Box::pin(ready(self.compilation_info.clone()))
2169 }
2170}
2171
2172impl Drop for CoreShaderModule {
2173 fn drop(&mut self) {
2174 self.context.0.shader_module_drop(self.id)
2175 }
2176}
2177
2178impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
2179
2180impl Drop for CoreBindGroupLayout {
2181 fn drop(&mut self) {
2182 self.context.0.bind_group_layout_drop(self.id)
2183 }
2184}
2185
2186impl dispatch::BindGroupInterface for CoreBindGroup {}
2187
2188impl Drop for CoreBindGroup {
2189 fn drop(&mut self) {
2190 self.context.0.bind_group_drop(self.id)
2191 }
2192}
2193
2194impl dispatch::TextureViewInterface for CoreTextureView {}
2195
2196impl Drop for CoreTextureView {
2197 fn drop(&mut self) {
2198 self.context.0.texture_view_drop(self.id);
2199 }
2200}
2201
2202impl dispatch::ExternalTextureInterface for CoreExternalTexture {
2203 fn destroy(&self) {
2204 self.context.0.external_texture_destroy(self.id);
2205 }
2206}
2207
2208impl Drop for CoreExternalTexture {
2209 fn drop(&mut self) {
2210 self.context.0.external_texture_drop(self.id);
2211 }
2212}
2213
2214impl dispatch::SamplerInterface for CoreSampler {}
2215
2216impl Drop for CoreSampler {
2217 fn drop(&mut self) {
2218 self.context.0.sampler_drop(self.id)
2219 }
2220}
2221
2222impl dispatch::BufferInterface for CoreBuffer {
2223 fn map_async(
2224 &self,
2225 mode: crate::MapMode,
2226 range: Range<crate::BufferAddress>,
2227 callback: dispatch::BufferMapCallback,
2228 ) {
2229 let operation = wgc::resource::BufferMapOperation {
2230 host: match mode {
2231 MapMode::Read => wgc::device::HostMap::Read,
2232 MapMode::Write => wgc::device::HostMap::Write,
2233 },
2234 callback: Some(Box::new(|status| {
2235 let res = status.map_err(|_| crate::BufferAsyncError);
2236 callback(res);
2237 })),
2238 };
2239
2240 match self.context.0.buffer_map_async(
2241 self.id,
2242 range.start,
2243 Some(range.end - range.start),
2244 operation,
2245 ) {
2246 Ok(_) => (),
2247 Err(cause) => {
2248 self.context
2249 .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
2250 }
2251 }
2252 }
2253
2254 fn get_mapped_range(
2255 &self,
2256 sub_range: Range<crate::BufferAddress>,
2257 ) -> Result<dispatch::DispatchBufferMappedRange, crate::MapRangeError> {
2258 let size = sub_range.end - sub_range.start;
2259 self.context
2260 .0
2261 .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
2262 .map(|(ptr, size)| {
2263 CoreBufferMappedRange {
2264 ptr,
2265 size: size as usize,
2266 }
2267 .into()
2268 })
2269 .map_err(|err| crate::MapRangeError(self.context.format_error(&err)))
2270 }
2271
2272 fn unmap(&self) {
2273 match self.context.0.buffer_unmap(self.id) {
2274 Ok(()) => (),
2275 Err(cause) => {
2276 self.context
2277 .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
2278 }
2279 }
2280 }
2281
2282 fn destroy(&self) {
2283 self.context.0.buffer_destroy(self.id);
2284 }
2285}
2286
2287impl Drop for CoreBuffer {
2288 fn drop(&mut self) {
2289 self.context.0.buffer_drop(self.id)
2290 }
2291}
2292
2293impl dispatch::TextureInterface for CoreTexture {
2294 fn create_view(
2295 &self,
2296 desc: &crate::TextureViewDescriptor<'_>,
2297 ) -> dispatch::DispatchTextureView {
2298 let descriptor = wgc::resource::TextureViewDescriptor {
2299 label: desc.label.map(Borrowed),
2300 format: desc.format,
2301 dimension: desc.dimension,
2302 usage: desc.usage,
2303 range: wgt::ImageSubresourceRange {
2304 aspect: desc.aspect,
2305 base_mip_level: desc.base_mip_level,
2306 mip_level_count: desc.mip_level_count,
2307 base_array_layer: desc.base_array_layer,
2308 array_layer_count: desc.array_layer_count,
2309 },
2310 };
2311 let (id, error) = self
2312 .context
2313 .0
2314 .texture_create_view(self.id, &descriptor, None);
2315 if let Some(cause) = error {
2316 self.context
2317 .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
2318 }
2319 CoreTextureView {
2320 context: self.context.clone(),
2321 id,
2322 }
2323 .into()
2324 }
2325
2326 fn destroy(&self) {
2327 self.context.0.texture_destroy(self.id);
2328 }
2329}
2330
2331impl Drop for CoreTexture {
2332 fn drop(&mut self) {
2333 self.context.0.texture_drop(self.id)
2334 }
2335}
2336
2337impl dispatch::BlasInterface for CoreBlas {
2338 fn prepare_compact_async(&self, callback: BlasCompactCallback) {
2339 let callback: Option<wgc::resource::BlasCompactCallback> =
2340 Some(Box::new(|status: BlasPrepareCompactResult| {
2341 let res = status.map_err(|_| crate::BlasAsyncError);
2342 callback(res);
2343 }));
2344
2345 match self.context.0.blas_prepare_compact_async(self.id, callback) {
2346 Ok(_) => (),
2347 Err(cause) => self.context.handle_error_nolabel(
2348 &self.error_sink,
2349 cause,
2350 "Blas::prepare_compact_async",
2351 ),
2352 }
2353 }
2354
2355 fn ready_for_compaction(&self) -> bool {
2356 match self.context.0.ready_for_compaction(self.id) {
2357 Ok(ready) => ready,
2358 Err(cause) => {
2359 self.context.handle_error_nolabel(
2360 &self.error_sink,
2361 cause,
2362 "Blas::ready_for_compaction",
2363 );
2364 false
2366 }
2367 }
2368 }
2369}
2370
2371impl Drop for CoreBlas {
2372 fn drop(&mut self) {
2373 self.context.0.blas_drop(self.id)
2374 }
2375}
2376
2377impl dispatch::TlasInterface for CoreTlas {}
2378
2379impl Drop for CoreTlas {
2380 fn drop(&mut self) {
2381 self.context.0.tlas_drop(self.id)
2382 }
2383}
2384
2385impl dispatch::QuerySetInterface for CoreQuerySet {}
2386
2387impl Drop for CoreQuerySet {
2388 fn drop(&mut self) {
2389 self.context.0.query_set_drop(self.id)
2390 }
2391}
2392
2393impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2394
2395impl Drop for CorePipelineLayout {
2396 fn drop(&mut self) {
2397 self.context.0.pipeline_layout_drop(self.id)
2398 }
2399}
2400
2401impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2402 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2403 let (id, error) = self
2404 .context
2405 .0
2406 .render_pipeline_get_bind_group_layout(self.id, index, None);
2407 if let Some(err) = error {
2408 self.context.handle_error_nolabel(
2409 &self.error_sink,
2410 err,
2411 "RenderPipeline::get_bind_group_layout",
2412 )
2413 }
2414 CoreBindGroupLayout {
2415 context: self.context.clone(),
2416 id,
2417 }
2418 .into()
2419 }
2420}
2421
2422impl Drop for CoreRenderPipeline {
2423 fn drop(&mut self) {
2424 self.context.0.render_pipeline_drop(self.id)
2425 }
2426}
2427
2428impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2429 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2430 let (id, error) = self
2431 .context
2432 .0
2433 .compute_pipeline_get_bind_group_layout(self.id, index, None);
2434 if let Some(err) = error {
2435 self.context.handle_error_nolabel(
2436 &self.error_sink,
2437 err,
2438 "ComputePipeline::get_bind_group_layout",
2439 )
2440 }
2441 CoreBindGroupLayout {
2442 context: self.context.clone(),
2443 id,
2444 }
2445 .into()
2446 }
2447}
2448
2449impl Drop for CoreComputePipeline {
2450 fn drop(&mut self) {
2451 self.context.0.compute_pipeline_drop(self.id)
2452 }
2453}
2454
2455impl dispatch::PipelineCacheInterface for CorePipelineCache {
2456 fn get_data(&self) -> Option<Vec<u8>> {
2457 self.context.0.pipeline_cache_get_data(self.id)
2458 }
2459}
2460
2461impl Drop for CorePipelineCache {
2462 fn drop(&mut self) {
2463 self.context.0.pipeline_cache_drop(self.id)
2464 }
2465}
2466
2467impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2468 fn copy_buffer_to_buffer(
2469 &self,
2470 source: &dispatch::DispatchBuffer,
2471 source_offset: crate::BufferAddress,
2472 destination: &dispatch::DispatchBuffer,
2473 destination_offset: crate::BufferAddress,
2474 copy_size: Option<crate::BufferAddress>,
2475 ) {
2476 let source = source.as_core();
2477 let destination = destination.as_core();
2478
2479 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2480 self.id,
2481 source.id,
2482 source_offset,
2483 destination.id,
2484 destination_offset,
2485 copy_size,
2486 ) {
2487 self.context.handle_error_nolabel(
2488 &self.error_sink,
2489 cause,
2490 "CommandEncoder::copy_buffer_to_buffer",
2491 );
2492 }
2493 }
2494
2495 fn copy_buffer_to_texture(
2496 &self,
2497 source: crate::TexelCopyBufferInfo<'_>,
2498 destination: crate::TexelCopyTextureInfo<'_>,
2499 copy_size: crate::Extent3d,
2500 ) {
2501 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2502 self.id,
2503 &map_buffer_copy_view(source),
2504 &map_texture_copy_view(destination),
2505 ©_size,
2506 ) {
2507 self.context.handle_error_nolabel(
2508 &self.error_sink,
2509 cause,
2510 "CommandEncoder::copy_buffer_to_texture",
2511 );
2512 }
2513 }
2514
2515 fn copy_texture_to_buffer(
2516 &self,
2517 source: crate::TexelCopyTextureInfo<'_>,
2518 destination: crate::TexelCopyBufferInfo<'_>,
2519 copy_size: crate::Extent3d,
2520 ) {
2521 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2522 self.id,
2523 &map_texture_copy_view(source),
2524 &map_buffer_copy_view(destination),
2525 ©_size,
2526 ) {
2527 self.context.handle_error_nolabel(
2528 &self.error_sink,
2529 cause,
2530 "CommandEncoder::copy_texture_to_buffer",
2531 );
2532 }
2533 }
2534
2535 fn copy_texture_to_texture(
2536 &self,
2537 source: crate::TexelCopyTextureInfo<'_>,
2538 destination: crate::TexelCopyTextureInfo<'_>,
2539 copy_size: crate::Extent3d,
2540 ) {
2541 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2542 self.id,
2543 &map_texture_copy_view(source),
2544 &map_texture_copy_view(destination),
2545 ©_size,
2546 ) {
2547 self.context.handle_error_nolabel(
2548 &self.error_sink,
2549 cause,
2550 "CommandEncoder::copy_texture_to_texture",
2551 );
2552 }
2553 }
2554
2555 fn begin_compute_pass(
2556 &self,
2557 desc: &crate::ComputePassDescriptor<'_>,
2558 ) -> dispatch::DispatchComputePass {
2559 let timestamp_writes =
2560 desc.timestamp_writes
2561 .as_ref()
2562 .map(|tw| wgc::command::PassTimestampWrites {
2563 query_set: tw.query_set.inner.as_core().id,
2564 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2565 end_of_pass_write_index: tw.end_of_pass_write_index,
2566 });
2567
2568 let (pass, err) = self.context.0.command_encoder_begin_compute_pass(
2569 self.id,
2570 &wgc::command::ComputePassDescriptor {
2571 label: desc.label.map(Borrowed),
2572 timestamp_writes,
2573 },
2574 );
2575
2576 if let Some(cause) = err {
2577 self.context.handle_error(
2578 &self.error_sink,
2579 cause,
2580 desc.label,
2581 "CommandEncoder::begin_compute_pass",
2582 );
2583 }
2584
2585 CoreComputePass {
2586 context: self.context.clone(),
2587 pass,
2588 error_sink: self.error_sink.clone(),
2589 id: crate::cmp::Identifier::create(),
2590 }
2591 .into()
2592 }
2593
2594 fn begin_render_pass(
2595 &self,
2596 desc: &crate::RenderPassDescriptor<'_>,
2597 ) -> dispatch::DispatchRenderPass {
2598 let colors = desc
2599 .color_attachments
2600 .iter()
2601 .map(|ca| {
2602 ca.as_ref()
2603 .map(|at| wgc::command::RenderPassColorAttachment {
2604 view: at.view.inner.as_core().id,
2605 depth_slice: at.depth_slice,
2606 resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2607 load_op: at.ops.load,
2608 store_op: at.ops.store,
2609 })
2610 })
2611 .collect::<Vec<_>>();
2612
2613 let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2614 wgc::command::RenderPassDepthStencilAttachment {
2615 view: dsa.view.inner.as_core().id,
2616 depth: map_pass_channel(dsa.depth_ops.as_ref()),
2617 stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2618 }
2619 });
2620
2621 let timestamp_writes =
2622 desc.timestamp_writes
2623 .as_ref()
2624 .map(|tw| wgc::command::PassTimestampWrites {
2625 query_set: tw.query_set.inner.as_core().id,
2626 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2627 end_of_pass_write_index: tw.end_of_pass_write_index,
2628 });
2629
2630 let (pass, err) = self.context.0.command_encoder_begin_render_pass(
2631 self.id,
2632 &wgc::command::RenderPassDescriptor {
2633 label: desc.label.map(Borrowed),
2634 timestamp_writes: timestamp_writes.as_ref(),
2635 color_attachments: Borrowed(&colors),
2636 depth_stencil_attachment: depth_stencil.as_ref(),
2637 occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2638 multiview_mask: desc.multiview_mask,
2639 },
2640 );
2641
2642 if let Some(cause) = err {
2643 self.context.handle_error(
2644 &self.error_sink,
2645 cause,
2646 desc.label,
2647 "CommandEncoder::begin_render_pass",
2648 );
2649 }
2650
2651 CoreRenderPass {
2652 context: self.context.clone(),
2653 pass,
2654 error_sink: self.error_sink.clone(),
2655 id: crate::cmp::Identifier::create(),
2656 }
2657 .into()
2658 }
2659
2660 fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2661 let descriptor = wgt::CommandBufferDescriptor::default();
2662 let (id, opt_label_and_error) =
2663 self.context
2664 .0
2665 .command_encoder_finish(self.id, &descriptor, None);
2666 if let Some((label, cause)) = opt_label_and_error {
2667 self.context
2668 .handle_error(&self.error_sink, cause, Some(&label), "a CommandEncoder");
2669 }
2670 CoreCommandBuffer {
2671 context: self.context.clone(),
2672 id,
2673 }
2674 .into()
2675 }
2676
2677 fn clear_texture(
2678 &self,
2679 texture: &dispatch::DispatchTexture,
2680 subresource_range: &crate::ImageSubresourceRange,
2681 ) {
2682 let texture = texture.as_core();
2683
2684 if let Err(cause) =
2685 self.context
2686 .0
2687 .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2688 {
2689 self.context.handle_error_nolabel(
2690 &self.error_sink,
2691 cause,
2692 "CommandEncoder::clear_texture",
2693 );
2694 }
2695 }
2696
2697 fn clear_buffer(
2698 &self,
2699 buffer: &dispatch::DispatchBuffer,
2700 offset: crate::BufferAddress,
2701 size: Option<crate::BufferAddress>,
2702 ) {
2703 let buffer = buffer.as_core();
2704
2705 if let Err(cause) = self
2706 .context
2707 .0
2708 .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2709 {
2710 self.context.handle_error_nolabel(
2711 &self.error_sink,
2712 cause,
2713 "CommandEncoder::fill_buffer",
2714 );
2715 }
2716 }
2717
2718 fn insert_debug_marker(&self, label: &str) {
2719 if let Err(cause) = self
2720 .context
2721 .0
2722 .command_encoder_insert_debug_marker(self.id, label)
2723 {
2724 self.context.handle_error_nolabel(
2725 &self.error_sink,
2726 cause,
2727 "CommandEncoder::insert_debug_marker",
2728 );
2729 }
2730 }
2731
2732 fn push_debug_group(&self, label: &str) {
2733 if let Err(cause) = self
2734 .context
2735 .0
2736 .command_encoder_push_debug_group(self.id, label)
2737 {
2738 self.context.handle_error_nolabel(
2739 &self.error_sink,
2740 cause,
2741 "CommandEncoder::push_debug_group",
2742 );
2743 }
2744 }
2745
2746 fn pop_debug_group(&self) {
2747 if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2748 self.context.handle_error_nolabel(
2749 &self.error_sink,
2750 cause,
2751 "CommandEncoder::pop_debug_group",
2752 );
2753 }
2754 }
2755
2756 fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2757 let query_set = query_set.as_core();
2758
2759 if let Err(cause) =
2760 self.context
2761 .0
2762 .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2763 {
2764 self.context.handle_error_nolabel(
2765 &self.error_sink,
2766 cause,
2767 "CommandEncoder::write_timestamp",
2768 );
2769 }
2770 }
2771
2772 fn resolve_query_set(
2773 &self,
2774 query_set: &dispatch::DispatchQuerySet,
2775 first_query: u32,
2776 query_count: u32,
2777 destination: &dispatch::DispatchBuffer,
2778 destination_offset: crate::BufferAddress,
2779 ) {
2780 let query_set = query_set.as_core();
2781 let destination = destination.as_core();
2782
2783 if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2784 self.id,
2785 query_set.id,
2786 first_query,
2787 query_count,
2788 destination.id,
2789 destination_offset,
2790 ) {
2791 self.context.handle_error_nolabel(
2792 &self.error_sink,
2793 cause,
2794 "CommandEncoder::resolve_query_set",
2795 );
2796 }
2797 }
2798
2799 fn mark_acceleration_structures_built<'a>(
2800 &self,
2801 blas: &mut dyn Iterator<Item = &'a Blas>,
2802 tlas: &mut dyn Iterator<Item = &'a Tlas>,
2803 ) {
2804 let blas = blas
2805 .map(|b| b.inner.as_core().id)
2806 .collect::<SmallVec<[_; 4]>>();
2807 let tlas = tlas
2808 .map(|t| t.inner.as_core().id)
2809 .collect::<SmallVec<[_; 4]>>();
2810 if let Err(cause) = self
2811 .context
2812 .0
2813 .command_encoder_mark_acceleration_structures_built(self.id, &blas, &tlas)
2814 {
2815 self.context.handle_error_nolabel(
2816 &self.error_sink,
2817 cause,
2818 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2819 );
2820 }
2821 }
2822
2823 fn build_acceleration_structures<'a>(
2824 &self,
2825 blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2826 tlas: &mut dyn Iterator<Item = &'a crate::Tlas>,
2827 ) {
2828 let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2829 let geometries = match e.geometry {
2830 crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2831 let iter = triangle_geometries.iter().map(|tg| {
2832 wgc::ray_tracing::BlasTriangleGeometry {
2833 vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2834 index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2835 transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2836 size: tg.size,
2837 transform_buffer_offset: tg.transform_buffer_offset,
2838 first_vertex: tg.first_vertex,
2839 vertex_stride: tg.vertex_stride,
2840 first_index: tg.first_index,
2841 }
2842 });
2843 wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2844 }
2845 crate::BlasGeometries::AabbGeometries(ref aabb_geometries) => {
2846 let iter =
2847 aabb_geometries
2848 .iter()
2849 .map(|ag| wgc::ray_tracing::BlasAabbGeometry {
2850 aabb_buffer: ag.aabb_buffer.inner.as_core().id,
2851 stride: ag.stride,
2852 size: ag.size,
2853 primitive_offset: ag.primitive_offset,
2854 });
2855 wgc::ray_tracing::BlasGeometries::AabbGeometries(Box::new(iter))
2856 }
2857 };
2858 wgc::ray_tracing::BlasBuildEntry {
2859 blas_id: e.blas.inner.as_core().id,
2860 geometries,
2861 }
2862 });
2863
2864 let tlas = tlas.into_iter().map(|e| {
2865 let instances = e
2866 .instances
2867 .iter()
2868 .map(|instance: &Option<crate::TlasInstance>| {
2869 instance
2870 .as_ref()
2871 .map(|instance| wgc::ray_tracing::TlasInstance {
2872 blas_id: instance.blas.as_core().id,
2873 transform: &instance.transform,
2874 custom_data: instance.custom_data,
2875 mask: instance.mask,
2876 })
2877 });
2878 wgc::ray_tracing::TlasPackage {
2879 tlas_id: e.inner.as_core().id,
2880 instances: Box::new(instances),
2881 lowest_unmodified: e.lowest_unmodified,
2882 }
2883 });
2884
2885 if let Err(cause) = self
2886 .context
2887 .0
2888 .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2889 {
2890 self.context.handle_error_nolabel(
2891 &self.error_sink,
2892 cause,
2893 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2894 );
2895 }
2896 }
2897
2898 fn transition_resources<'a>(
2899 &mut self,
2900 buffer_transitions: &mut dyn Iterator<
2901 Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
2902 >,
2903 texture_transitions: &mut dyn Iterator<
2904 Item = wgt::TextureTransition<&'a dispatch::DispatchTexture>,
2905 >,
2906 ) {
2907 let result = self.context.0.command_encoder_transition_resources(
2908 self.id,
2909 buffer_transitions.map(|t| wgt::BufferTransition {
2910 buffer: t.buffer.as_core().id,
2911 state: t.state,
2912 }),
2913 texture_transitions.map(|t| wgt::TextureTransition {
2914 texture: t.texture.as_core().id,
2915 selector: t.selector.clone(),
2916 state: t.state,
2917 }),
2918 );
2919
2920 if let Err(cause) = result {
2921 self.context.handle_error_nolabel(
2922 &self.error_sink,
2923 cause,
2924 "CommandEncoder::transition_resources",
2925 );
2926 }
2927 }
2928}
2929
2930impl Drop for CoreCommandEncoder {
2931 fn drop(&mut self) {
2932 self.context.0.command_encoder_drop(self.id)
2933 }
2934}
2935
2936impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2937
2938impl Drop for CoreCommandBuffer {
2939 fn drop(&mut self) {
2940 self.context.0.command_buffer_drop(self.id)
2941 }
2942}
2943
2944impl dispatch::ComputePassInterface for CoreComputePass {
2945 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2946 let pipeline = pipeline.as_core();
2947
2948 if let Err(cause) = self
2949 .context
2950 .0
2951 .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2952 {
2953 self.context.handle_error(
2954 &self.error_sink,
2955 cause,
2956 self.pass.label(),
2957 "ComputePass::set_pipeline",
2958 );
2959 }
2960 }
2961
2962 fn set_bind_group(
2963 &mut self,
2964 index: u32,
2965 bind_group: Option<&dispatch::DispatchBindGroup>,
2966 offsets: &[crate::DynamicOffset],
2967 ) {
2968 let bg = bind_group.map(|bg| bg.as_core().id);
2969
2970 if let Err(cause) =
2971 self.context
2972 .0
2973 .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2974 {
2975 self.context.handle_error(
2976 &self.error_sink,
2977 cause,
2978 self.pass.label(),
2979 "ComputePass::set_bind_group",
2980 );
2981 }
2982 }
2983
2984 fn set_immediates(&mut self, offset: u32, data: &[u8]) {
2985 if let Err(cause) = self
2986 .context
2987 .0
2988 .compute_pass_set_immediates(&mut self.pass, offset, data)
2989 {
2990 self.context.handle_error(
2991 &self.error_sink,
2992 cause,
2993 self.pass.label(),
2994 "ComputePass::set_immediates",
2995 );
2996 }
2997 }
2998
2999 fn insert_debug_marker(&mut self, label: &str) {
3000 if let Err(cause) =
3001 self.context
3002 .0
3003 .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
3004 {
3005 self.context.handle_error(
3006 &self.error_sink,
3007 cause,
3008 self.pass.label(),
3009 "ComputePass::insert_debug_marker",
3010 );
3011 }
3012 }
3013
3014 fn push_debug_group(&mut self, group_label: &str) {
3015 if let Err(cause) =
3016 self.context
3017 .0
3018 .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
3019 {
3020 self.context.handle_error(
3021 &self.error_sink,
3022 cause,
3023 self.pass.label(),
3024 "ComputePass::push_debug_group",
3025 );
3026 }
3027 }
3028
3029 fn pop_debug_group(&mut self) {
3030 if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
3031 self.context.handle_error(
3032 &self.error_sink,
3033 cause,
3034 self.pass.label(),
3035 "ComputePass::pop_debug_group",
3036 );
3037 }
3038 }
3039
3040 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3041 let query_set = query_set.as_core();
3042
3043 if let Err(cause) =
3044 self.context
3045 .0
3046 .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3047 {
3048 self.context.handle_error(
3049 &self.error_sink,
3050 cause,
3051 self.pass.label(),
3052 "ComputePass::write_timestamp",
3053 );
3054 }
3055 }
3056
3057 fn begin_pipeline_statistics_query(
3058 &mut self,
3059 query_set: &dispatch::DispatchQuerySet,
3060 query_index: u32,
3061 ) {
3062 let query_set = query_set.as_core();
3063
3064 if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
3065 &mut self.pass,
3066 query_set.id,
3067 query_index,
3068 ) {
3069 self.context.handle_error(
3070 &self.error_sink,
3071 cause,
3072 self.pass.label(),
3073 "ComputePass::begin_pipeline_statistics_query",
3074 );
3075 }
3076 }
3077
3078 fn end_pipeline_statistics_query(&mut self) {
3079 if let Err(cause) = self
3080 .context
3081 .0
3082 .compute_pass_end_pipeline_statistics_query(&mut self.pass)
3083 {
3084 self.context.handle_error(
3085 &self.error_sink,
3086 cause,
3087 self.pass.label(),
3088 "ComputePass::end_pipeline_statistics_query",
3089 );
3090 }
3091 }
3092
3093 fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
3094 if let Err(cause) = self
3095 .context
3096 .0
3097 .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
3098 {
3099 self.context.handle_error(
3100 &self.error_sink,
3101 cause,
3102 self.pass.label(),
3103 "ComputePass::dispatch_workgroups",
3104 );
3105 }
3106 }
3107
3108 fn dispatch_workgroups_indirect(
3109 &mut self,
3110 indirect_buffer: &dispatch::DispatchBuffer,
3111 indirect_offset: crate::BufferAddress,
3112 ) {
3113 let indirect_buffer = indirect_buffer.as_core();
3114
3115 if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
3116 &mut self.pass,
3117 indirect_buffer.id,
3118 indirect_offset,
3119 ) {
3120 self.context.handle_error(
3121 &self.error_sink,
3122 cause,
3123 self.pass.label(),
3124 "ComputePass::dispatch_workgroups_indirect",
3125 );
3126 }
3127 }
3128
3129 fn transition_resources<'a>(
3130 &mut self,
3131 buffer_transitions: &mut dyn Iterator<
3132 Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
3133 >,
3134 texture_transitions: &mut dyn Iterator<
3135 Item = wgt::TextureTransition<&'a dispatch::DispatchTextureView>,
3136 >,
3137 ) {
3138 let result = self.context.0.compute_pass_transition_resources(
3139 &mut self.pass,
3140 buffer_transitions.map(|t| wgt::BufferTransition {
3141 buffer: t.buffer.as_core().id,
3142 state: t.state,
3143 }),
3144 texture_transitions.map(|t| wgt::TextureTransition {
3145 texture: t.texture.as_core().id,
3146 selector: t.selector.clone(),
3147 state: t.state,
3148 }),
3149 );
3150
3151 if let Err(cause) = result {
3152 self.context.handle_error(
3153 &self.error_sink,
3154 cause,
3155 self.pass.label(),
3156 "ComputePass::transition_resources",
3157 );
3158 }
3159 }
3160}
3161
3162impl Drop for CoreComputePass {
3163 fn drop(&mut self) {
3164 if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
3165 self.context.handle_error(
3166 &self.error_sink,
3167 cause,
3168 self.pass.label(),
3169 "ComputePass::end",
3170 );
3171 }
3172 }
3173}
3174
3175impl dispatch::RenderPassInterface for CoreRenderPass {
3176 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3177 let pipeline = pipeline.as_core();
3178
3179 if let Err(cause) = self
3180 .context
3181 .0
3182 .render_pass_set_pipeline(&mut self.pass, pipeline.id)
3183 {
3184 self.context.handle_error(
3185 &self.error_sink,
3186 cause,
3187 self.pass.label(),
3188 "RenderPass::set_pipeline",
3189 );
3190 }
3191 }
3192
3193 fn set_bind_group(
3194 &mut self,
3195 index: u32,
3196 bind_group: Option<&dispatch::DispatchBindGroup>,
3197 offsets: &[crate::DynamicOffset],
3198 ) {
3199 let bg = bind_group.map(|bg| bg.as_core().id);
3200
3201 if let Err(cause) =
3202 self.context
3203 .0
3204 .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
3205 {
3206 self.context.handle_error(
3207 &self.error_sink,
3208 cause,
3209 self.pass.label(),
3210 "RenderPass::set_bind_group",
3211 );
3212 }
3213 }
3214
3215 fn set_index_buffer(
3216 &mut self,
3217 buffer: &dispatch::DispatchBuffer,
3218 index_format: crate::IndexFormat,
3219 offset: crate::BufferAddress,
3220 size: Option<crate::BufferSize>,
3221 ) {
3222 let buffer = buffer.as_core();
3223
3224 if let Err(cause) = self.context.0.render_pass_set_index_buffer(
3225 &mut self.pass,
3226 buffer.id,
3227 index_format,
3228 offset,
3229 size,
3230 ) {
3231 self.context.handle_error(
3232 &self.error_sink,
3233 cause,
3234 self.pass.label(),
3235 "RenderPass::set_index_buffer",
3236 );
3237 }
3238 }
3239
3240 fn set_vertex_buffer(
3241 &mut self,
3242 slot: u32,
3243 buffer: Option<&dispatch::DispatchBuffer>,
3244 offset: crate::BufferAddress,
3245 size: Option<crate::BufferSize>,
3246 ) {
3247 let buffer = buffer.map(|buffer| buffer.as_core().id);
3248
3249 if let Err(cause) =
3250 self.context
3251 .0
3252 .render_pass_set_vertex_buffer(&mut self.pass, slot, buffer, offset, size)
3253 {
3254 self.context.handle_error(
3255 &self.error_sink,
3256 cause,
3257 self.pass.label(),
3258 "RenderPass::set_vertex_buffer",
3259 );
3260 }
3261 }
3262
3263 fn set_immediates(&mut self, offset: u32, data: &[u8]) {
3264 if let Err(cause) = self
3265 .context
3266 .0
3267 .render_pass_set_immediates(&mut self.pass, offset, data)
3268 {
3269 self.context.handle_error(
3270 &self.error_sink,
3271 cause,
3272 self.pass.label(),
3273 "RenderPass::set_immediates",
3274 );
3275 }
3276 }
3277
3278 fn set_blend_constant(&mut self, color: crate::Color) {
3279 if let Err(cause) = self
3280 .context
3281 .0
3282 .render_pass_set_blend_constant(&mut self.pass, color)
3283 {
3284 self.context.handle_error(
3285 &self.error_sink,
3286 cause,
3287 self.pass.label(),
3288 "RenderPass::set_blend_constant",
3289 );
3290 }
3291 }
3292
3293 fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
3294 if let Err(cause) =
3295 self.context
3296 .0
3297 .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
3298 {
3299 self.context.handle_error(
3300 &self.error_sink,
3301 cause,
3302 self.pass.label(),
3303 "RenderPass::set_scissor_rect",
3304 );
3305 }
3306 }
3307
3308 fn set_viewport(
3309 &mut self,
3310 x: f32,
3311 y: f32,
3312 width: f32,
3313 height: f32,
3314 min_depth: f32,
3315 max_depth: f32,
3316 ) {
3317 if let Err(cause) = self.context.0.render_pass_set_viewport(
3318 &mut self.pass,
3319 x,
3320 y,
3321 width,
3322 height,
3323 min_depth,
3324 max_depth,
3325 ) {
3326 self.context.handle_error(
3327 &self.error_sink,
3328 cause,
3329 self.pass.label(),
3330 "RenderPass::set_viewport",
3331 );
3332 }
3333 }
3334
3335 fn set_stencil_reference(&mut self, reference: u32) {
3336 if let Err(cause) = self
3337 .context
3338 .0
3339 .render_pass_set_stencil_reference(&mut self.pass, reference)
3340 {
3341 self.context.handle_error(
3342 &self.error_sink,
3343 cause,
3344 self.pass.label(),
3345 "RenderPass::set_stencil_reference",
3346 );
3347 }
3348 }
3349
3350 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3351 if let Err(cause) = self.context.0.render_pass_draw(
3352 &mut self.pass,
3353 vertices.end - vertices.start,
3354 instances.end - instances.start,
3355 vertices.start,
3356 instances.start,
3357 ) {
3358 self.context.handle_error(
3359 &self.error_sink,
3360 cause,
3361 self.pass.label(),
3362 "RenderPass::draw",
3363 );
3364 }
3365 }
3366
3367 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3368 if let Err(cause) = self.context.0.render_pass_draw_indexed(
3369 &mut self.pass,
3370 indices.end - indices.start,
3371 instances.end - instances.start,
3372 indices.start,
3373 base_vertex,
3374 instances.start,
3375 ) {
3376 self.context.handle_error(
3377 &self.error_sink,
3378 cause,
3379 self.pass.label(),
3380 "RenderPass::draw_indexed",
3381 );
3382 }
3383 }
3384
3385 fn draw_mesh_tasks(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
3386 if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks(
3387 &mut self.pass,
3388 group_count_x,
3389 group_count_y,
3390 group_count_z,
3391 ) {
3392 self.context.handle_error(
3393 &self.error_sink,
3394 cause,
3395 self.pass.label(),
3396 "RenderPass::draw_mesh_tasks",
3397 );
3398 }
3399 }
3400
3401 fn draw_indirect(
3402 &mut self,
3403 indirect_buffer: &dispatch::DispatchBuffer,
3404 indirect_offset: crate::BufferAddress,
3405 ) {
3406 let indirect_buffer = indirect_buffer.as_core();
3407
3408 if let Err(cause) = self.context.0.render_pass_draw_indirect(
3409 &mut self.pass,
3410 indirect_buffer.id,
3411 indirect_offset,
3412 ) {
3413 self.context.handle_error(
3414 &self.error_sink,
3415 cause,
3416 self.pass.label(),
3417 "RenderPass::draw_indirect",
3418 );
3419 }
3420 }
3421
3422 fn draw_indexed_indirect(
3423 &mut self,
3424 indirect_buffer: &dispatch::DispatchBuffer,
3425 indirect_offset: crate::BufferAddress,
3426 ) {
3427 let indirect_buffer = indirect_buffer.as_core();
3428
3429 if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3430 &mut self.pass,
3431 indirect_buffer.id,
3432 indirect_offset,
3433 ) {
3434 self.context.handle_error(
3435 &self.error_sink,
3436 cause,
3437 self.pass.label(),
3438 "RenderPass::draw_indexed_indirect",
3439 );
3440 }
3441 }
3442
3443 fn draw_mesh_tasks_indirect(
3444 &mut self,
3445 indirect_buffer: &dispatch::DispatchBuffer,
3446 indirect_offset: crate::BufferAddress,
3447 ) {
3448 let indirect_buffer = indirect_buffer.as_core();
3449
3450 if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks_indirect(
3451 &mut self.pass,
3452 indirect_buffer.id,
3453 indirect_offset,
3454 ) {
3455 self.context.handle_error(
3456 &self.error_sink,
3457 cause,
3458 self.pass.label(),
3459 "RenderPass::draw_mesh_tasks_indirect",
3460 );
3461 }
3462 }
3463
3464 fn multi_draw_indirect(
3465 &mut self,
3466 indirect_buffer: &dispatch::DispatchBuffer,
3467 indirect_offset: crate::BufferAddress,
3468 count: u32,
3469 ) {
3470 let indirect_buffer = indirect_buffer.as_core();
3471
3472 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3473 &mut self.pass,
3474 indirect_buffer.id,
3475 indirect_offset,
3476 count,
3477 ) {
3478 self.context.handle_error(
3479 &self.error_sink,
3480 cause,
3481 self.pass.label(),
3482 "RenderPass::multi_draw_indirect",
3483 );
3484 }
3485 }
3486
3487 fn multi_draw_indexed_indirect(
3488 &mut self,
3489 indirect_buffer: &dispatch::DispatchBuffer,
3490 indirect_offset: crate::BufferAddress,
3491 count: u32,
3492 ) {
3493 let indirect_buffer = indirect_buffer.as_core();
3494
3495 if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3496 &mut self.pass,
3497 indirect_buffer.id,
3498 indirect_offset,
3499 count,
3500 ) {
3501 self.context.handle_error(
3502 &self.error_sink,
3503 cause,
3504 self.pass.label(),
3505 "RenderPass::multi_draw_indexed_indirect",
3506 );
3507 }
3508 }
3509
3510 fn multi_draw_mesh_tasks_indirect(
3511 &mut self,
3512 indirect_buffer: &dispatch::DispatchBuffer,
3513 indirect_offset: crate::BufferAddress,
3514 count: u32,
3515 ) {
3516 let indirect_buffer = indirect_buffer.as_core();
3517
3518 if let Err(cause) = self.context.0.render_pass_multi_draw_mesh_tasks_indirect(
3519 &mut self.pass,
3520 indirect_buffer.id,
3521 indirect_offset,
3522 count,
3523 ) {
3524 self.context.handle_error(
3525 &self.error_sink,
3526 cause,
3527 self.pass.label(),
3528 "RenderPass::multi_draw_mesh_tasks_indirect",
3529 );
3530 }
3531 }
3532
3533 fn multi_draw_indirect_count(
3534 &mut self,
3535 indirect_buffer: &dispatch::DispatchBuffer,
3536 indirect_offset: crate::BufferAddress,
3537 count_buffer: &dispatch::DispatchBuffer,
3538 count_buffer_offset: crate::BufferAddress,
3539 max_count: u32,
3540 ) {
3541 let indirect_buffer = indirect_buffer.as_core();
3542 let count_buffer = count_buffer.as_core();
3543
3544 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3545 &mut self.pass,
3546 indirect_buffer.id,
3547 indirect_offset,
3548 count_buffer.id,
3549 count_buffer_offset,
3550 max_count,
3551 ) {
3552 self.context.handle_error(
3553 &self.error_sink,
3554 cause,
3555 self.pass.label(),
3556 "RenderPass::multi_draw_indirect_count",
3557 );
3558 }
3559 }
3560
3561 fn multi_draw_indexed_indirect_count(
3562 &mut self,
3563 indirect_buffer: &dispatch::DispatchBuffer,
3564 indirect_offset: crate::BufferAddress,
3565 count_buffer: &dispatch::DispatchBuffer,
3566 count_buffer_offset: crate::BufferAddress,
3567 max_count: u32,
3568 ) {
3569 let indirect_buffer = indirect_buffer.as_core();
3570 let count_buffer = count_buffer.as_core();
3571
3572 if let Err(cause) = self
3573 .context
3574 .0
3575 .render_pass_multi_draw_indexed_indirect_count(
3576 &mut self.pass,
3577 indirect_buffer.id,
3578 indirect_offset,
3579 count_buffer.id,
3580 count_buffer_offset,
3581 max_count,
3582 )
3583 {
3584 self.context.handle_error(
3585 &self.error_sink,
3586 cause,
3587 self.pass.label(),
3588 "RenderPass::multi_draw_indexed_indirect_count",
3589 );
3590 }
3591 }
3592
3593 fn multi_draw_mesh_tasks_indirect_count(
3594 &mut self,
3595 indirect_buffer: &dispatch::DispatchBuffer,
3596 indirect_offset: crate::BufferAddress,
3597 count_buffer: &dispatch::DispatchBuffer,
3598 count_buffer_offset: crate::BufferAddress,
3599 max_count: u32,
3600 ) {
3601 let indirect_buffer = indirect_buffer.as_core();
3602 let count_buffer = count_buffer.as_core();
3603
3604 if let Err(cause) = self
3605 .context
3606 .0
3607 .render_pass_multi_draw_mesh_tasks_indirect_count(
3608 &mut self.pass,
3609 indirect_buffer.id,
3610 indirect_offset,
3611 count_buffer.id,
3612 count_buffer_offset,
3613 max_count,
3614 )
3615 {
3616 self.context.handle_error(
3617 &self.error_sink,
3618 cause,
3619 self.pass.label(),
3620 "RenderPass::multi_draw_mesh_tasks_indirect_count",
3621 );
3622 }
3623 }
3624
3625 fn insert_debug_marker(&mut self, label: &str) {
3626 if let Err(cause) = self
3627 .context
3628 .0
3629 .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3630 {
3631 self.context.handle_error(
3632 &self.error_sink,
3633 cause,
3634 self.pass.label(),
3635 "RenderPass::insert_debug_marker",
3636 );
3637 }
3638 }
3639
3640 fn push_debug_group(&mut self, group_label: &str) {
3641 if let Err(cause) =
3642 self.context
3643 .0
3644 .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3645 {
3646 self.context.handle_error(
3647 &self.error_sink,
3648 cause,
3649 self.pass.label(),
3650 "RenderPass::push_debug_group",
3651 );
3652 }
3653 }
3654
3655 fn pop_debug_group(&mut self) {
3656 if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3657 self.context.handle_error(
3658 &self.error_sink,
3659 cause,
3660 self.pass.label(),
3661 "RenderPass::pop_debug_group",
3662 );
3663 }
3664 }
3665
3666 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3667 let query_set = query_set.as_core();
3668
3669 if let Err(cause) =
3670 self.context
3671 .0
3672 .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3673 {
3674 self.context.handle_error(
3675 &self.error_sink,
3676 cause,
3677 self.pass.label(),
3678 "RenderPass::write_timestamp",
3679 );
3680 }
3681 }
3682
3683 fn begin_occlusion_query(&mut self, query_index: u32) {
3684 if let Err(cause) = self
3685 .context
3686 .0
3687 .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3688 {
3689 self.context.handle_error(
3690 &self.error_sink,
3691 cause,
3692 self.pass.label(),
3693 "RenderPass::begin_occlusion_query",
3694 );
3695 }
3696 }
3697
3698 fn end_occlusion_query(&mut self) {
3699 if let Err(cause) = self
3700 .context
3701 .0
3702 .render_pass_end_occlusion_query(&mut self.pass)
3703 {
3704 self.context.handle_error(
3705 &self.error_sink,
3706 cause,
3707 self.pass.label(),
3708 "RenderPass::end_occlusion_query",
3709 );
3710 }
3711 }
3712
3713 fn begin_pipeline_statistics_query(
3714 &mut self,
3715 query_set: &dispatch::DispatchQuerySet,
3716 query_index: u32,
3717 ) {
3718 let query_set = query_set.as_core();
3719
3720 if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3721 &mut self.pass,
3722 query_set.id,
3723 query_index,
3724 ) {
3725 self.context.handle_error(
3726 &self.error_sink,
3727 cause,
3728 self.pass.label(),
3729 "RenderPass::begin_pipeline_statistics_query",
3730 );
3731 }
3732 }
3733
3734 fn end_pipeline_statistics_query(&mut self) {
3735 if let Err(cause) = self
3736 .context
3737 .0
3738 .render_pass_end_pipeline_statistics_query(&mut self.pass)
3739 {
3740 self.context.handle_error(
3741 &self.error_sink,
3742 cause,
3743 self.pass.label(),
3744 "RenderPass::end_pipeline_statistics_query",
3745 );
3746 }
3747 }
3748
3749 fn execute_bundles(
3750 &mut self,
3751 render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3752 ) {
3753 let temp_render_bundles = render_bundles
3754 .map(|rb| rb.as_core().id)
3755 .collect::<SmallVec<[_; 4]>>();
3756 if let Err(cause) = self
3757 .context
3758 .0
3759 .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3760 {
3761 self.context.handle_error(
3762 &self.error_sink,
3763 cause,
3764 self.pass.label(),
3765 "RenderPass::execute_bundles",
3766 );
3767 }
3768 }
3769}
3770
3771impl Drop for CoreRenderPass {
3772 fn drop(&mut self) {
3773 if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3774 self.context.handle_error(
3775 &self.error_sink,
3776 cause,
3777 self.pass.label(),
3778 "RenderPass::end",
3779 );
3780 }
3781 }
3782}
3783
3784impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3785 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3786 let pipeline = pipeline.as_core();
3787
3788 wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3789 }
3790
3791 fn set_bind_group(
3792 &mut self,
3793 index: u32,
3794 bind_group: Option<&dispatch::DispatchBindGroup>,
3795 offsets: &[crate::DynamicOffset],
3796 ) {
3797 let bg = bind_group.map(|bg| bg.as_core().id);
3798
3799 unsafe {
3800 wgpu_render_bundle_set_bind_group(
3801 &mut self.encoder,
3802 index,
3803 bg,
3804 offsets.as_ptr(),
3805 offsets.len(),
3806 )
3807 }
3808 }
3809
3810 fn set_index_buffer(
3811 &mut self,
3812 buffer: &dispatch::DispatchBuffer,
3813 index_format: crate::IndexFormat,
3814 offset: crate::BufferAddress,
3815 size: Option<crate::BufferSize>,
3816 ) {
3817 let buffer = buffer.as_core();
3818
3819 self.encoder
3820 .set_index_buffer(buffer.id, index_format, offset, size)
3821 }
3822
3823 fn set_vertex_buffer(
3824 &mut self,
3825 slot: u32,
3826 buffer: Option<&dispatch::DispatchBuffer>,
3827 offset: crate::BufferAddress,
3828 size: Option<crate::BufferSize>,
3829 ) {
3830 let buffer = buffer.map(|buffer| buffer.as_core().id);
3831
3832 wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer, offset, size)
3833 }
3834
3835 fn set_immediates(&mut self, offset: u32, data: &[u8]) {
3836 unsafe {
3837 wgpu_render_bundle_set_immediates(
3838 &mut self.encoder,
3839 offset,
3840 data.len().try_into().unwrap(),
3841 data.as_ptr(),
3842 )
3843 }
3844 }
3845
3846 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3847 wgpu_render_bundle_draw(
3848 &mut self.encoder,
3849 vertices.end - vertices.start,
3850 instances.end - instances.start,
3851 vertices.start,
3852 instances.start,
3853 )
3854 }
3855
3856 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3857 wgpu_render_bundle_draw_indexed(
3858 &mut self.encoder,
3859 indices.end - indices.start,
3860 instances.end - instances.start,
3861 indices.start,
3862 base_vertex,
3863 instances.start,
3864 )
3865 }
3866
3867 fn draw_indirect(
3868 &mut self,
3869 indirect_buffer: &dispatch::DispatchBuffer,
3870 indirect_offset: crate::BufferAddress,
3871 ) {
3872 let indirect_buffer = indirect_buffer.as_core();
3873
3874 wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3875 }
3876
3877 fn draw_indexed_indirect(
3878 &mut self,
3879 indirect_buffer: &dispatch::DispatchBuffer,
3880 indirect_offset: crate::BufferAddress,
3881 ) {
3882 let indirect_buffer = indirect_buffer.as_core();
3883
3884 wgpu_render_bundle_draw_indexed_indirect(
3885 &mut self.encoder,
3886 indirect_buffer.id,
3887 indirect_offset,
3888 )
3889 }
3890
3891 fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3892 where
3893 Self: Sized,
3894 {
3895 let (id, error) = self.context.0.render_bundle_encoder_finish(
3896 self.encoder,
3897 &desc.map_label(|l| l.map(Borrowed)),
3898 None,
3899 );
3900 if let Some(err) = error {
3901 self.context
3902 .handle_error_fatal(err, "RenderBundleEncoder::finish");
3903 }
3904 CoreRenderBundle {
3905 context: self.context.clone(),
3906 id,
3907 }
3908 .into()
3909 }
3910}
3911
3912impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3913
3914impl Drop for CoreRenderBundle {
3915 fn drop(&mut self) {
3916 self.context.0.render_bundle_drop(self.id)
3917 }
3918}
3919
3920impl dispatch::SurfaceInterface for CoreSurface {
3921 fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3922 let adapter = adapter.as_core();
3923
3924 self.context
3925 .0
3926 .surface_get_capabilities(self.id, adapter.id)
3927 .unwrap_or_default()
3928 }
3929
3930 fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3931 let device = device.as_core();
3932
3933 let error = self.context.0.surface_configure(self.id, device.id, config);
3934 if let Some(e) = error {
3935 self.context
3936 .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3937 } else {
3938 *self.configured_device.lock() = Some(device.id);
3939 *self.error_sink.lock() = Some(device.error_sink.clone());
3940 }
3941 }
3942
3943 fn get_current_texture(
3944 &self,
3945 ) -> (
3946 Option<dispatch::DispatchTexture>,
3947 crate::SurfaceStatus,
3948 dispatch::DispatchSurfaceOutputDetail,
3949 ) {
3950 let error_sink = if let Some(error_sink) = self.error_sink.lock().as_ref() {
3951 error_sink.clone()
3952 } else {
3953 Arc::new(Mutex::new(ErrorSinkRaw::new()))
3954 };
3955
3956 let output_detail = CoreSurfaceOutputDetail {
3957 context: self.context.clone(),
3958 surface_id: self.id,
3959 error_sink: error_sink.clone(),
3960 }
3961 .into();
3962
3963 match self.context.0.surface_get_current_texture(self.id, None) {
3964 Ok(wgc::present::SurfaceOutput {
3965 status,
3966 texture: texture_id,
3967 }) => {
3968 let data = texture_id
3969 .map(|id| CoreTexture {
3970 context: self.context.clone(),
3971 id,
3972 error_sink,
3973 })
3974 .map(Into::into);
3975
3976 (data, status, output_detail)
3977 }
3978 Err(err) => {
3979 let error_sink = self.error_sink.lock();
3980 match error_sink.as_ref() {
3981 Some(error_sink) => {
3982 self.context.handle_error_nolabel(
3983 error_sink,
3984 err,
3985 "Surface::get_current_texture_view",
3986 );
3987 (None, crate::SurfaceStatus::Validation, output_detail)
3988 }
3989 None => self
3990 .context
3991 .handle_error_fatal(err, "Surface::get_current_texture_view"),
3992 }
3993 }
3994 }
3995 }
3996}
3997
3998impl Drop for CoreSurface {
3999 fn drop(&mut self) {
4000 self.context.0.surface_drop(self.id)
4001 }
4002}
4003
4004impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
4005 fn texture_discard(&self) {
4006 match self.context.0.surface_texture_discard(self.surface_id) {
4007 Ok(_status) => (),
4008 Err(err) => {
4009 self.context
4010 .handle_error_nolabel(&self.error_sink, err, "Surface::discard_texture")
4011 }
4012 }
4013 }
4014}
4015impl Drop for CoreSurfaceOutputDetail {
4016 fn drop(&mut self) {
4017 }
4021}
4022
4023impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
4024 #[inline]
4025 fn len(&self) -> usize {
4026 self.mapping.len()
4027 }
4028
4029 #[inline]
4030 unsafe fn write_slice(&mut self) -> WriteOnly<'_, [u8]> {
4031 unsafe { self.mapping.write_slice() }
4032 }
4033}
4034impl Drop for CoreQueueWriteBuffer {
4035 fn drop(&mut self) {
4036 }
4040}
4041
4042impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
4043 #[inline]
4044 fn len(&self) -> usize {
4045 self.size
4046 }
4047
4048 #[inline]
4049 unsafe fn read_slice(&self) -> &[u8] {
4050 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
4051 }
4052
4053 #[inline]
4054 unsafe fn write_slice(&mut self) -> WriteOnly<'_, [u8]> {
4055 unsafe { WriteOnly::new(NonNull::slice_from_raw_parts(self.ptr, self.size)) }
4056 }
4057
4058 #[cfg(webgpu)]
4059 fn as_uint8array(&self) -> &js_sys::Uint8Array {
4060 panic!("Only available on WebGPU")
4061 }
4062}