1use alloc::{
2 borrow::Cow::{self, Borrowed},
3 boxed::Box,
4 format,
5 string::{String, ToString as _},
6 sync::Arc,
7 vec,
8 vec::Vec,
9};
10use core::{
11 error::Error,
12 fmt,
13 future::ready,
14 ops::{Deref, Range},
15 pin::Pin,
16 ptr::NonNull,
17 slice,
18};
19use hashbrown::HashMap;
20
21use arrayvec::ArrayVec;
22use smallvec::SmallVec;
23use wgc::{
24 command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError,
25 resource::BlasPrepareCompactResult,
26};
27use wgt::{
28 error::{ErrorType, WebGpuError},
29 WasmNotSendSync,
30};
31
32use crate::{
33 api,
34 dispatch::{self, BlasCompactCallback, BufferMappedRangeInterface},
35 BindingResource, Blas, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
36 CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
37 ShaderSource, SurfaceTargetUnsafe, TextureDescriptor, Tlas,
38};
39use crate::{dispatch::DispatchAdapter, util::Mutex};
40
41mod thread_id;
42
43#[derive(Clone)]
44pub struct ContextWgpuCore(Arc<wgc::global::Global>);
45
46impl Drop for ContextWgpuCore {
47 fn drop(&mut self) {
48 }
50}
51
52impl fmt::Debug for ContextWgpuCore {
53 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
54 f.debug_struct("ContextWgpuCore")
55 .field("type", &"Native")
56 .finish()
57 }
58}
59
60impl ContextWgpuCore {
61 pub unsafe fn from_hal_instance<A: hal::Api>(hal_instance: A::Instance) -> Self {
62 Self(unsafe {
63 Arc::new(wgc::global::Global::from_hal_instance::<A>(
64 "wgpu",
65 hal_instance,
66 ))
67 })
68 }
69
70 pub unsafe fn instance_as_hal<A: hal::Api>(&self) -> Option<&A::Instance> {
74 unsafe { self.0.instance_as_hal::<A>() }
75 }
76
77 pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
78 Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
79 }
80
81 #[cfg(wgpu_core)]
82 pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
83 self.0.enumerate_adapters(backends)
84 }
85
86 pub unsafe fn create_adapter_from_hal<A: hal::Api>(
87 &self,
88 hal_adapter: hal::ExposedAdapter<A>,
89 ) -> wgc::id::AdapterId {
90 unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
91 }
92
93 pub unsafe fn adapter_as_hal<A: hal::Api>(
94 &self,
95 adapter: &CoreAdapter,
96 ) -> Option<impl Deref<Target = A::Adapter> + WasmNotSendSync> {
97 unsafe { self.0.adapter_as_hal::<A>(adapter.id) }
98 }
99
100 pub unsafe fn buffer_as_hal<A: hal::Api>(
101 &self,
102 buffer: &CoreBuffer,
103 ) -> Option<impl Deref<Target = A::Buffer>> {
104 unsafe { self.0.buffer_as_hal::<A>(buffer.id) }
105 }
106
107 pub unsafe fn create_device_from_hal<A: hal::Api>(
108 &self,
109 adapter: &CoreAdapter,
110 hal_device: hal::OpenDevice<A>,
111 desc: &crate::DeviceDescriptor<'_>,
112 ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
113 if !matches!(desc.trace, wgt::Trace::Off) {
114 log::error!(
115 "
116 Feature 'trace' has been removed temporarily; \
117 see https://github.com/gfx-rs/wgpu/issues/5974. \
118 The `trace` parameter will have no effect."
119 );
120 }
121
122 let (device_id, queue_id) = unsafe {
123 self.0.create_device_from_hal(
124 adapter.id,
125 hal_device.into(),
126 &desc.map_label(|l| l.map(Borrowed)),
127 None,
128 None,
129 )
130 }?;
131 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
132 let device = CoreDevice {
133 context: self.clone(),
134 id: device_id,
135 error_sink: error_sink.clone(),
136 features: desc.required_features,
137 };
138 let queue = CoreQueue {
139 context: self.clone(),
140 id: queue_id,
141 error_sink,
142 };
143 Ok((device, queue))
144 }
145
146 pub unsafe fn create_texture_from_hal<A: hal::Api>(
147 &self,
148 hal_texture: A::Texture,
149 device: &CoreDevice,
150 desc: &TextureDescriptor<'_>,
151 ) -> CoreTexture {
152 let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
153 let (id, error) = unsafe {
154 self.0
155 .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
156 };
157 if let Some(cause) = error {
158 self.handle_error(
159 &device.error_sink,
160 cause,
161 desc.label,
162 "Device::create_texture_from_hal",
163 );
164 }
165 CoreTexture {
166 context: self.clone(),
167 id,
168 error_sink: Arc::clone(&device.error_sink),
169 }
170 }
171
172 pub unsafe fn create_buffer_from_hal<A: hal::Api>(
179 &self,
180 hal_buffer: A::Buffer,
181 device: &CoreDevice,
182 desc: &BufferDescriptor<'_>,
183 ) -> CoreBuffer {
184 let (id, error) = unsafe {
185 self.0.create_buffer_from_hal::<A>(
186 hal_buffer,
187 device.id,
188 &desc.map_label(|l| l.map(Borrowed)),
189 None,
190 )
191 };
192 if let Some(cause) = error {
193 self.handle_error(
194 &device.error_sink,
195 cause,
196 desc.label,
197 "Device::create_buffer_from_hal",
198 );
199 }
200 CoreBuffer {
201 context: self.clone(),
202 id,
203 error_sink: Arc::clone(&device.error_sink),
204 }
205 }
206
207 pub unsafe fn device_as_hal<A: hal::Api>(
208 &self,
209 device: &CoreDevice,
210 ) -> Option<impl Deref<Target = A::Device>> {
211 unsafe { self.0.device_as_hal::<A>(device.id) }
212 }
213
214 pub unsafe fn surface_as_hal<A: hal::Api>(
215 &self,
216 surface: &CoreSurface,
217 ) -> Option<impl Deref<Target = A::Surface>> {
218 unsafe { self.0.surface_as_hal::<A>(surface.id) }
219 }
220
221 pub unsafe fn texture_as_hal<A: hal::Api>(
222 &self,
223 texture: &CoreTexture,
224 ) -> Option<impl Deref<Target = A::Texture>> {
225 unsafe { self.0.texture_as_hal::<A>(texture.id) }
226 }
227
228 pub unsafe fn texture_view_as_hal<A: hal::Api>(
229 &self,
230 texture_view: &CoreTextureView,
231 ) -> Option<impl Deref<Target = A::TextureView>> {
232 unsafe { self.0.texture_view_as_hal::<A>(texture_view.id) }
233 }
234
235 pub unsafe fn command_encoder_as_hal_mut<
237 A: hal::Api,
238 F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
239 R,
240 >(
241 &self,
242 command_encoder: &CoreCommandEncoder,
243 hal_command_encoder_callback: F,
244 ) -> R {
245 unsafe {
246 self.0.command_encoder_as_hal_mut::<A, F, R>(
247 command_encoder.id,
248 hal_command_encoder_callback,
249 )
250 }
251 }
252
253 pub unsafe fn blas_as_hal<A: hal::Api>(
254 &self,
255 blas: &CoreBlas,
256 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
257 unsafe { self.0.blas_as_hal::<A>(blas.id) }
258 }
259
260 pub unsafe fn tlas_as_hal<A: hal::Api>(
261 &self,
262 tlas: &CoreTlas,
263 ) -> Option<impl Deref<Target = A::AccelerationStructure>> {
264 unsafe { self.0.tlas_as_hal::<A>(tlas.id) }
265 }
266
267 pub fn generate_report(&self) -> wgc::global::GlobalReport {
268 self.0.generate_report()
269 }
270
271 #[cold]
272 #[track_caller]
273 #[inline(never)]
274 fn handle_error_inner(
275 &self,
276 sink_mutex: &Mutex<ErrorSinkRaw>,
277 error_type: ErrorType,
278 source: ContextErrorSource,
279 label: Label<'_>,
280 fn_ident: &'static str,
281 ) {
282 let source: ErrorSource = Box::new(wgc::error::ContextError {
283 fn_ident,
284 source,
285 label: label.unwrap_or_default().to_string(),
286 });
287 let final_error_handling = {
288 let mut sink = sink_mutex.lock();
289 let description = || self.format_error(&*source);
290 let error = match error_type {
291 ErrorType::Internal => {
292 let description = description();
293 crate::Error::Internal {
294 source,
295 description,
296 }
297 }
298 ErrorType::OutOfMemory => crate::Error::OutOfMemory { source },
299 ErrorType::Validation => {
300 let description = description();
301 crate::Error::Validation {
302 source,
303 description,
304 }
305 }
306 ErrorType::DeviceLost => return, };
308 sink.handle_error_or_return_handler(error)
309 };
310
311 if let Some(f) = final_error_handling {
312 f();
316 }
317 }
318
319 #[inline]
320 #[track_caller]
321 fn handle_error(
322 &self,
323 sink_mutex: &Mutex<ErrorSinkRaw>,
324 source: impl WebGpuError + WasmNotSendSync + 'static,
325 label: Label<'_>,
326 fn_ident: &'static str,
327 ) {
328 let error_type = source.webgpu_error_type();
329 self.handle_error_inner(sink_mutex, error_type, Box::new(source), label, fn_ident)
330 }
331
332 #[inline]
333 #[track_caller]
334 fn handle_error_nolabel(
335 &self,
336 sink_mutex: &Mutex<ErrorSinkRaw>,
337 source: impl WebGpuError + WasmNotSendSync + 'static,
338 fn_ident: &'static str,
339 ) {
340 let error_type = source.webgpu_error_type();
341 self.handle_error_inner(sink_mutex, error_type, Box::new(source), None, fn_ident)
342 }
343
344 #[track_caller]
345 #[cold]
346 fn handle_error_fatal(
347 &self,
348 cause: impl Error + WasmNotSendSync + 'static,
349 operation: &'static str,
350 ) -> ! {
351 panic!("Error in {operation}: {f}", f = self.format_error(&cause));
352 }
353
354 #[inline(never)]
355 fn format_error(&self, err: &(dyn Error + 'static)) -> String {
356 let mut output = String::new();
357 let mut level = 1;
358
359 fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
360 let mut print = |e: &(dyn Error + 'static)| {
361 use core::fmt::Write;
362 writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
363
364 if let Some(e) = e.source() {
365 *level += 1;
366 print_tree(output, level, e);
367 *level -= 1;
368 }
369 };
370 if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
371 for e in multi.errors() {
372 print(e);
373 }
374 } else {
375 print(e);
376 }
377 }
378
379 print_tree(&mut output, &mut level, err);
380
381 format!("Validation Error\n\nCaused by:\n{output}")
382 }
383
384 pub unsafe fn queue_as_hal<A: hal::Api>(
385 &self,
386 queue: &CoreQueue,
387 ) -> Option<impl Deref<Target = A::Queue> + WasmNotSendSync> {
388 unsafe { self.0.queue_as_hal::<A>(queue.id) }
389 }
390}
391
392fn map_buffer_copy_view(
393 view: crate::TexelCopyBufferInfo<'_>,
394) -> wgt::TexelCopyBufferInfo<wgc::id::BufferId> {
395 wgt::TexelCopyBufferInfo {
396 buffer: view.buffer.inner.as_core().id,
397 layout: view.layout,
398 }
399}
400
401fn map_texture_copy_view(
402 view: crate::TexelCopyTextureInfo<'_>,
403) -> wgt::TexelCopyTextureInfo<wgc::id::TextureId> {
404 wgt::TexelCopyTextureInfo {
405 texture: view.texture.inner.as_core().id,
406 mip_level: view.mip_level,
407 origin: view.origin,
408 aspect: view.aspect,
409 }
410}
411
412#[cfg_attr(not(webgl), expect(unused))]
413fn map_texture_tagged_copy_view(
414 view: crate::CopyExternalImageDestInfo<&api::Texture>,
415) -> wgt::CopyExternalImageDestInfo<wgc::id::TextureId> {
416 wgt::CopyExternalImageDestInfo {
417 texture: view.texture.inner.as_core().id,
418 mip_level: view.mip_level,
419 origin: view.origin,
420 aspect: view.aspect,
421 color_space: view.color_space,
422 premultiplied_alpha: view.premultiplied_alpha,
423 }
424}
425
426fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
427 match *load {
428 LoadOp::Clear(clear_value) => LoadOp::Clear(Some(clear_value)),
429 LoadOp::DontCare(token) => LoadOp::DontCare(token),
430 LoadOp::Load => LoadOp::Load,
431 }
432}
433
434fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
435 match ops {
436 Some(&Operations { load, store }) => wgc::command::PassChannel {
437 load_op: Some(map_load_op(&load)),
438 store_op: Some(store),
439 read_only: false,
440 },
441 None => wgc::command::PassChannel {
442 load_op: None,
443 store_op: None,
444 read_only: true,
445 },
446 }
447}
448
449#[derive(Debug)]
450pub struct CoreSurface {
451 pub(crate) context: ContextWgpuCore,
452 id: wgc::id::SurfaceId,
453 configured_device: Mutex<Option<wgc::id::DeviceId>>,
456 error_sink: Mutex<Option<ErrorSink>>,
459}
460
461#[derive(Debug)]
462pub struct CoreAdapter {
463 pub(crate) context: ContextWgpuCore,
464 pub(crate) id: wgc::id::AdapterId,
465}
466
467#[derive(Debug)]
468pub struct CoreDevice {
469 pub(crate) context: ContextWgpuCore,
470 id: wgc::id::DeviceId,
471 error_sink: ErrorSink,
472 features: Features,
473}
474
475#[derive(Debug)]
476pub struct CoreBuffer {
477 pub(crate) context: ContextWgpuCore,
478 id: wgc::id::BufferId,
479 error_sink: ErrorSink,
480}
481
482#[derive(Debug)]
483pub struct CoreShaderModule {
484 pub(crate) context: ContextWgpuCore,
485 id: wgc::id::ShaderModuleId,
486 compilation_info: CompilationInfo,
487}
488
489#[derive(Debug)]
490pub struct CoreBindGroupLayout {
491 pub(crate) context: ContextWgpuCore,
492 id: wgc::id::BindGroupLayoutId,
493}
494
495#[derive(Debug)]
496pub struct CoreBindGroup {
497 pub(crate) context: ContextWgpuCore,
498 id: wgc::id::BindGroupId,
499}
500
501#[derive(Debug)]
502pub struct CoreTexture {
503 pub(crate) context: ContextWgpuCore,
504 id: wgc::id::TextureId,
505 error_sink: ErrorSink,
506}
507
508#[derive(Debug)]
509pub struct CoreTextureView {
510 pub(crate) context: ContextWgpuCore,
511 id: wgc::id::TextureViewId,
512}
513
514#[derive(Debug)]
515pub struct CoreExternalTexture {
516 pub(crate) context: ContextWgpuCore,
517 id: wgc::id::ExternalTextureId,
518}
519
520#[derive(Debug)]
521pub struct CoreSampler {
522 pub(crate) context: ContextWgpuCore,
523 id: wgc::id::SamplerId,
524}
525
526#[derive(Debug)]
527pub struct CoreQuerySet {
528 pub(crate) context: ContextWgpuCore,
529 id: wgc::id::QuerySetId,
530}
531
532#[derive(Debug)]
533pub struct CorePipelineLayout {
534 pub(crate) context: ContextWgpuCore,
535 id: wgc::id::PipelineLayoutId,
536}
537
538#[derive(Debug)]
539pub struct CorePipelineCache {
540 pub(crate) context: ContextWgpuCore,
541 id: wgc::id::PipelineCacheId,
542}
543
544#[derive(Debug)]
545pub struct CoreCommandBuffer {
546 pub(crate) context: ContextWgpuCore,
547 id: wgc::id::CommandBufferId,
548}
549
550#[derive(Debug)]
551pub struct CoreRenderBundleEncoder {
552 pub(crate) context: ContextWgpuCore,
553 encoder: wgc::command::RenderBundleEncoder,
554 id: crate::cmp::Identifier,
555}
556
557#[derive(Debug)]
558pub struct CoreRenderBundle {
559 context: ContextWgpuCore,
560 id: wgc::id::RenderBundleId,
561}
562
563#[derive(Debug)]
564pub struct CoreQueue {
565 pub(crate) context: ContextWgpuCore,
566 id: wgc::id::QueueId,
567 error_sink: ErrorSink,
568}
569
570#[derive(Debug)]
571pub struct CoreComputePipeline {
572 pub(crate) context: ContextWgpuCore,
573 id: wgc::id::ComputePipelineId,
574 error_sink: ErrorSink,
575}
576
577#[derive(Debug)]
578pub struct CoreRenderPipeline {
579 pub(crate) context: ContextWgpuCore,
580 id: wgc::id::RenderPipelineId,
581 error_sink: ErrorSink,
582}
583
584#[derive(Debug)]
585pub struct CoreComputePass {
586 pub(crate) context: ContextWgpuCore,
587 pass: wgc::command::ComputePass,
588 error_sink: ErrorSink,
589 id: crate::cmp::Identifier,
590}
591
592#[derive(Debug)]
593pub struct CoreRenderPass {
594 pub(crate) context: ContextWgpuCore,
595 pass: wgc::command::RenderPass,
596 error_sink: ErrorSink,
597 id: crate::cmp::Identifier,
598}
599
600#[derive(Debug)]
601pub struct CoreCommandEncoder {
602 pub(crate) context: ContextWgpuCore,
603 id: wgc::id::CommandEncoderId,
604 error_sink: ErrorSink,
605}
606
607#[derive(Debug)]
608pub struct CoreBlas {
609 pub(crate) context: ContextWgpuCore,
610 id: wgc::id::BlasId,
611 error_sink: ErrorSink,
612}
613
614#[derive(Debug)]
615pub struct CoreTlas {
616 pub(crate) context: ContextWgpuCore,
617 id: wgc::id::TlasId,
618 }
620
621#[derive(Debug)]
622pub struct CoreSurfaceOutputDetail {
623 context: ContextWgpuCore,
624 surface_id: wgc::id::SurfaceId,
625 error_sink: ErrorSink,
626}
627
628type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
629
630struct ErrorScope {
631 error: Option<crate::Error>,
632 filter: crate::ErrorFilter,
633}
634
635struct ErrorSinkRaw {
636 scopes: HashMap<thread_id::ThreadId, Vec<ErrorScope>>,
637 uncaptured_handler: Option<Arc<dyn crate::UncapturedErrorHandler>>,
638}
639
640impl ErrorSinkRaw {
641 fn new() -> ErrorSinkRaw {
642 ErrorSinkRaw {
643 scopes: HashMap::new(),
644 uncaptured_handler: None,
645 }
646 }
647
648 #[track_caller]
658 #[must_use]
659 fn handle_error_or_return_handler(&mut self, err: crate::Error) -> Option<impl FnOnce()> {
660 let filter = match err {
661 crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
662 crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
663 crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
664 };
665 let thread_id = thread_id::ThreadId::current();
666 let scopes = self.scopes.entry(thread_id).or_default();
667 match scopes.iter_mut().rev().find(|scope| scope.filter == filter) {
668 Some(scope) => {
669 if scope.error.is_none() {
670 scope.error = Some(err);
671 }
672 None
673 }
674 None => {
675 if let Some(custom_handler) = &self.uncaptured_handler {
676 let custom_handler = Arc::clone(custom_handler);
677 Some(move || (custom_handler)(err))
678 } else {
679 default_error_handler(err)
681 }
682 }
683 }
684 }
685}
686
687impl fmt::Debug for ErrorSinkRaw {
688 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
689 write!(f, "ErrorSink")
690 }
691}
692
693#[track_caller]
694fn default_error_handler(err: crate::Error) -> ! {
695 log::error!("Handling wgpu errors as fatal by default");
696 panic!("wgpu error: {err}\n");
697}
698
699impl From<CreateShaderModuleError> for CompilationInfo {
700 fn from(value: CreateShaderModuleError) -> Self {
701 match value {
702 #[cfg(feature = "wgsl")]
703 CreateShaderModuleError::Parsing(v) => v.into(),
704 #[cfg(feature = "glsl")]
705 CreateShaderModuleError::ParsingGlsl(v) => v.into(),
706 #[cfg(feature = "spirv")]
707 CreateShaderModuleError::ParsingSpirV(v) => v.into(),
708 CreateShaderModuleError::Validation(v) => v.into(),
709 CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
712 CompilationInfo {
713 messages: Vec::new(),
714 }
715 }
716 _ => CompilationInfo {
718 messages: vec![CompilationMessage {
719 message: value.to_string(),
720 message_type: CompilationMessageType::Error,
721 location: None,
722 }],
723 },
724 }
725 }
726}
727
728#[derive(Debug)]
729pub struct CoreQueueWriteBuffer {
730 buffer_id: wgc::id::StagingBufferId,
731 mapping: CoreBufferMappedRange,
732}
733
734#[derive(Debug)]
735pub struct CoreBufferMappedRange {
736 ptr: NonNull<u8>,
737 size: usize,
738}
739
740#[cfg(send_sync)]
741unsafe impl Send for CoreBufferMappedRange {}
742#[cfg(send_sync)]
743unsafe impl Sync for CoreBufferMappedRange {}
744
745impl Drop for CoreBufferMappedRange {
746 fn drop(&mut self) {
747 }
750}
751
752crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
753crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
754crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
755crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
756crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
757crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
758crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
759crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
760crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
761crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
762crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
763crate::cmp::impl_eq_ord_hash_proxy!(CoreExternalTexture => .id);
764crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
765crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
766crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
767crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
768crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
769crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
770crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
771crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
772crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
773crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
774crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
775crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
776crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
777crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
778crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
779crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
780crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
781
782impl dispatch::InstanceInterface for ContextWgpuCore {
783 fn new(desc: &wgt::InstanceDescriptor) -> Self
784 where
785 Self: Sized,
786 {
787 Self(Arc::new(wgc::global::Global::new("wgpu", desc)))
788 }
789
790 unsafe fn create_surface(
791 &self,
792 target: crate::api::SurfaceTargetUnsafe,
793 ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
794 let id = match target {
795 SurfaceTargetUnsafe::RawHandle {
796 raw_display_handle,
797 raw_window_handle,
798 } => unsafe {
799 self.0
800 .instance_create_surface(raw_display_handle, raw_window_handle, None)
801 },
802
803 #[cfg(all(unix, not(target_vendor = "apple"), not(target_family = "wasm")))]
804 SurfaceTargetUnsafe::Drm {
805 fd,
806 plane,
807 connector_id,
808 width,
809 height,
810 refresh_rate,
811 } => unsafe {
812 self.0.instance_create_surface_from_drm(
813 fd,
814 plane,
815 connector_id,
816 width,
817 height,
818 refresh_rate,
819 None,
820 )
821 },
822
823 #[cfg(metal)]
824 SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
825 self.0.instance_create_surface_metal(layer, None)
826 },
827
828 #[cfg(dx12)]
829 SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
830 self.0.instance_create_surface_from_visual(visual, None)
831 },
832
833 #[cfg(dx12)]
834 SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
835 self.0
836 .instance_create_surface_from_surface_handle(surface_handle, None)
837 },
838
839 #[cfg(dx12)]
840 SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
841 self.0
842 .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
843 },
844 }?;
845
846 Ok(CoreSurface {
847 context: self.clone(),
848 id,
849 configured_device: Mutex::default(),
850 error_sink: Mutex::default(),
851 }
852 .into())
853 }
854
855 fn request_adapter(
856 &self,
857 options: &crate::api::RequestAdapterOptions<'_, '_>,
858 ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
859 let id = self.0.request_adapter(
860 &wgc::instance::RequestAdapterOptions {
861 power_preference: options.power_preference,
862 force_fallback_adapter: options.force_fallback_adapter,
863 compatible_surface: options
864 .compatible_surface
865 .map(|surface| surface.inner.as_core().id),
866 },
867 wgt::Backends::all(),
868 None,
869 );
870 let adapter = id.map(|id| {
871 let core = CoreAdapter {
872 context: self.clone(),
873 id,
874 };
875 let generic: dispatch::DispatchAdapter = core.into();
876 generic
877 });
878 Box::pin(ready(adapter))
879 }
880
881 fn poll_all_devices(&self, force_wait: bool) -> bool {
882 match self.0.poll_all_devices(force_wait) {
883 Ok(all_queue_empty) => all_queue_empty,
884 Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
885 }
886 }
887
888 #[cfg(feature = "wgsl")]
889 fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
890 use wgc::naga::front::wgsl::ImplementedLanguageExtension;
891 ImplementedLanguageExtension::all().iter().copied().fold(
892 crate::WgslLanguageFeatures::empty(),
893 |acc, wle| {
894 acc | match wle {
895 ImplementedLanguageExtension::ReadOnlyAndReadWriteStorageTextures => {
896 crate::WgslLanguageFeatures::ReadOnlyAndReadWriteStorageTextures
897 }
898 ImplementedLanguageExtension::Packed4x8IntegerDotProduct => {
899 crate::WgslLanguageFeatures::Packed4x8IntegerDotProduct
900 }
901 ImplementedLanguageExtension::PointerCompositeAccess => {
902 crate::WgslLanguageFeatures::PointerCompositeAccess
903 }
904 }
905 },
906 )
907 }
908
909 fn enumerate_adapters(
910 &self,
911 backends: crate::Backends,
912 ) -> Pin<Box<dyn dispatch::EnumerateAdapterFuture>> {
913 let adapters: Vec<DispatchAdapter> = self
914 .enumerate_adapters(backends)
915 .into_iter()
916 .map(|adapter| {
917 let core = crate::backend::wgpu_core::CoreAdapter {
918 context: self.clone(),
919 id: adapter,
920 };
921 core.into()
922 })
923 .collect();
924 Box::pin(ready(adapters))
925 }
926}
927
928impl dispatch::AdapterInterface for CoreAdapter {
929 fn request_device(
930 &self,
931 desc: &crate::DeviceDescriptor<'_>,
932 ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
933 if !matches!(desc.trace, wgt::Trace::Off) {
934 log::error!(
935 "
936 Feature 'trace' has been removed temporarily; \
937 see https://github.com/gfx-rs/wgpu/issues/5974. \
938 The `trace` parameter will have no effect."
939 );
940 }
941
942 let res = self.context.0.adapter_request_device(
943 self.id,
944 &desc.map_label(|l| l.map(Borrowed)),
945 None,
946 None,
947 );
948 let (device_id, queue_id) = match res {
949 Ok(ids) => ids,
950 Err(err) => {
951 return Box::pin(ready(Err(err.into())));
952 }
953 };
954 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
955 let device = CoreDevice {
956 context: self.context.clone(),
957 id: device_id,
958 error_sink: error_sink.clone(),
959 features: desc.required_features,
960 };
961 let queue = CoreQueue {
962 context: self.context.clone(),
963 id: queue_id,
964 error_sink,
965 };
966 Box::pin(ready(Ok((device.into(), queue.into()))))
967 }
968
969 fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
970 let surface = surface.as_core();
971
972 self.context
973 .0
974 .adapter_is_surface_supported(self.id, surface.id)
975 }
976
977 fn features(&self) -> crate::Features {
978 self.context.0.adapter_features(self.id)
979 }
980
981 fn limits(&self) -> crate::Limits {
982 self.context.0.adapter_limits(self.id)
983 }
984
985 fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
986 self.context.0.adapter_downlevel_capabilities(self.id)
987 }
988
989 fn get_info(&self) -> crate::AdapterInfo {
990 self.context.0.adapter_get_info(self.id)
991 }
992
993 fn get_texture_format_features(
994 &self,
995 format: crate::TextureFormat,
996 ) -> crate::TextureFormatFeatures {
997 self.context
998 .0
999 .adapter_get_texture_format_features(self.id, format)
1000 }
1001
1002 fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
1003 self.context.0.adapter_get_presentation_timestamp(self.id)
1004 }
1005}
1006
1007impl Drop for CoreAdapter {
1008 fn drop(&mut self) {
1009 self.context.0.adapter_drop(self.id)
1010 }
1011}
1012
1013impl dispatch::DeviceInterface for CoreDevice {
1014 fn features(&self) -> crate::Features {
1015 self.context.0.device_features(self.id)
1016 }
1017
1018 fn limits(&self) -> crate::Limits {
1019 self.context.0.device_limits(self.id)
1020 }
1021
1022 #[cfg_attr(
1024 not(any(
1025 feature = "spirv",
1026 feature = "glsl",
1027 feature = "wgsl",
1028 feature = "naga-ir"
1029 )),
1030 expect(unused)
1031 )]
1032 fn create_shader_module(
1033 &self,
1034 desc: crate::ShaderModuleDescriptor<'_>,
1035 shader_bound_checks: wgt::ShaderRuntimeChecks,
1036 ) -> dispatch::DispatchShaderModule {
1037 let descriptor = wgc::pipeline::ShaderModuleDescriptor {
1038 label: desc.label.map(Borrowed),
1039 runtime_checks: shader_bound_checks,
1040 };
1041 let source = match desc.source {
1042 #[cfg(feature = "spirv")]
1043 ShaderSource::SpirV(ref spv) => {
1044 let options = naga::front::spv::Options {
1046 adjust_coordinate_space: false, strict_capabilities: true,
1048 block_ctx_dump_prefix: None,
1049 };
1050 wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
1051 }
1052 #[cfg(feature = "glsl")]
1053 ShaderSource::Glsl {
1054 ref shader,
1055 stage,
1056 defines,
1057 } => {
1058 let options = naga::front::glsl::Options {
1059 stage,
1060 defines: defines
1061 .iter()
1062 .map(|&(key, value)| (String::from(key), String::from(value)))
1063 .collect(),
1064 };
1065 wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
1066 }
1067 #[cfg(feature = "wgsl")]
1068 ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
1069 #[cfg(feature = "naga-ir")]
1070 ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1071 ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1072 };
1073 let (id, error) =
1074 self.context
1075 .0
1076 .device_create_shader_module(self.id, &descriptor, source, None);
1077 let compilation_info = match error {
1078 Some(cause) => {
1079 self.context.handle_error(
1080 &self.error_sink,
1081 cause.clone(),
1082 desc.label,
1083 "Device::create_shader_module",
1084 );
1085 CompilationInfo::from(cause)
1086 }
1087 None => CompilationInfo { messages: vec![] },
1088 };
1089
1090 CoreShaderModule {
1091 context: self.context.clone(),
1092 id,
1093 compilation_info,
1094 }
1095 .into()
1096 }
1097
1098 unsafe fn create_shader_module_passthrough(
1099 &self,
1100 desc: &crate::ShaderModuleDescriptorPassthrough<'_>,
1101 ) -> dispatch::DispatchShaderModule {
1102 let desc = desc.map_label(|l| l.map(Cow::from));
1103 let (id, error) = unsafe {
1104 self.context
1105 .0
1106 .device_create_shader_module_passthrough(self.id, &desc, None)
1107 };
1108
1109 let compilation_info = match error {
1110 Some(cause) => {
1111 self.context.handle_error(
1112 &self.error_sink,
1113 cause.clone(),
1114 desc.label.as_deref(),
1115 "Device::create_shader_module_passthrough",
1116 );
1117 CompilationInfo::from(cause)
1118 }
1119 None => CompilationInfo { messages: vec![] },
1120 };
1121
1122 CoreShaderModule {
1123 context: self.context.clone(),
1124 id,
1125 compilation_info,
1126 }
1127 .into()
1128 }
1129
1130 fn create_bind_group_layout(
1131 &self,
1132 desc: &crate::BindGroupLayoutDescriptor<'_>,
1133 ) -> dispatch::DispatchBindGroupLayout {
1134 let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1135 label: desc.label.map(Borrowed),
1136 entries: Borrowed(desc.entries),
1137 };
1138 let (id, error) =
1139 self.context
1140 .0
1141 .device_create_bind_group_layout(self.id, &descriptor, None);
1142 if let Some(cause) = error {
1143 self.context.handle_error(
1144 &self.error_sink,
1145 cause,
1146 desc.label,
1147 "Device::create_bind_group_layout",
1148 );
1149 }
1150 CoreBindGroupLayout {
1151 context: self.context.clone(),
1152 id,
1153 }
1154 .into()
1155 }
1156
1157 fn create_bind_group(
1158 &self,
1159 desc: &crate::BindGroupDescriptor<'_>,
1160 ) -> dispatch::DispatchBindGroup {
1161 use wgc::binding_model as bm;
1162
1163 let mut arrayed_texture_views = Vec::new();
1164 let mut arrayed_samplers = Vec::new();
1165 if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1166 for entry in desc.entries.iter() {
1168 if let BindingResource::TextureViewArray(array) = entry.resource {
1169 arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1170 }
1171 if let BindingResource::SamplerArray(array) = entry.resource {
1172 arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1173 }
1174 }
1175 }
1176 let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1177 let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1178
1179 let mut arrayed_buffer_bindings = Vec::new();
1180 if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1181 for entry in desc.entries.iter() {
1183 if let BindingResource::BufferArray(array) = entry.resource {
1184 arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1185 buffer: binding.buffer.inner.as_core().id,
1186 offset: binding.offset,
1187 size: binding.size,
1188 }));
1189 }
1190 }
1191 }
1192 let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1193
1194 let entries = desc
1195 .entries
1196 .iter()
1197 .map(|entry| bm::BindGroupEntry {
1198 binding: entry.binding,
1199 resource: match entry.resource {
1200 BindingResource::Buffer(BufferBinding {
1201 buffer,
1202 offset,
1203 size,
1204 }) => bm::BindingResource::Buffer(bm::BufferBinding {
1205 buffer: buffer.inner.as_core().id,
1206 offset,
1207 size,
1208 }),
1209 BindingResource::BufferArray(array) => {
1210 let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1211 remaining_arrayed_buffer_bindings =
1212 &remaining_arrayed_buffer_bindings[array.len()..];
1213 bm::BindingResource::BufferArray(Borrowed(slice))
1214 }
1215 BindingResource::Sampler(sampler) => {
1216 bm::BindingResource::Sampler(sampler.inner.as_core().id)
1217 }
1218 BindingResource::SamplerArray(array) => {
1219 let slice = &remaining_arrayed_samplers[..array.len()];
1220 remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1221 bm::BindingResource::SamplerArray(Borrowed(slice))
1222 }
1223 BindingResource::TextureView(texture_view) => {
1224 bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1225 }
1226 BindingResource::TextureViewArray(array) => {
1227 let slice = &remaining_arrayed_texture_views[..array.len()];
1228 remaining_arrayed_texture_views =
1229 &remaining_arrayed_texture_views[array.len()..];
1230 bm::BindingResource::TextureViewArray(Borrowed(slice))
1231 }
1232 BindingResource::AccelerationStructure(acceleration_structure) => {
1233 bm::BindingResource::AccelerationStructure(
1234 acceleration_structure.inner.as_core().id,
1235 )
1236 }
1237 BindingResource::ExternalTexture(external_texture) => {
1238 bm::BindingResource::ExternalTexture(external_texture.inner.as_core().id)
1239 }
1240 },
1241 })
1242 .collect::<Vec<_>>();
1243 let descriptor = bm::BindGroupDescriptor {
1244 label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1245 layout: desc.layout.inner.as_core().id,
1246 entries: Borrowed(&entries),
1247 };
1248
1249 let (id, error) = self
1250 .context
1251 .0
1252 .device_create_bind_group(self.id, &descriptor, None);
1253 if let Some(cause) = error {
1254 self.context.handle_error(
1255 &self.error_sink,
1256 cause,
1257 desc.label,
1258 "Device::create_bind_group",
1259 );
1260 }
1261 CoreBindGroup {
1262 context: self.context.clone(),
1263 id,
1264 }
1265 .into()
1266 }
1267
1268 fn create_pipeline_layout(
1269 &self,
1270 desc: &crate::PipelineLayoutDescriptor<'_>,
1271 ) -> dispatch::DispatchPipelineLayout {
1272 assert!(
1275 desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1276 "Bind group layout count {} exceeds device bind group limit {}",
1277 desc.bind_group_layouts.len(),
1278 wgc::MAX_BIND_GROUPS
1279 );
1280
1281 let temp_layouts = desc
1282 .bind_group_layouts
1283 .iter()
1284 .map(|bgl| bgl.inner.as_core().id)
1285 .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1286 let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1287 label: desc.label.map(Borrowed),
1288 bind_group_layouts: Borrowed(&temp_layouts),
1289 immediates_ranges: Borrowed(desc.immediates_ranges),
1290 };
1291
1292 let (id, error) = self
1293 .context
1294 .0
1295 .device_create_pipeline_layout(self.id, &descriptor, None);
1296 if let Some(cause) = error {
1297 self.context.handle_error(
1298 &self.error_sink,
1299 cause,
1300 desc.label,
1301 "Device::create_pipeline_layout",
1302 );
1303 }
1304 CorePipelineLayout {
1305 context: self.context.clone(),
1306 id,
1307 }
1308 .into()
1309 }
1310
1311 fn create_render_pipeline(
1312 &self,
1313 desc: &crate::RenderPipelineDescriptor<'_>,
1314 ) -> dispatch::DispatchRenderPipeline {
1315 use wgc::pipeline as pipe;
1316
1317 let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1318 .vertex
1319 .buffers
1320 .iter()
1321 .map(|vbuf| pipe::VertexBufferLayout {
1322 array_stride: vbuf.array_stride,
1323 step_mode: vbuf.step_mode,
1324 attributes: Borrowed(vbuf.attributes),
1325 })
1326 .collect();
1327
1328 let vert_constants = desc
1329 .vertex
1330 .compilation_options
1331 .constants
1332 .iter()
1333 .map(|&(key, value)| (String::from(key), value))
1334 .collect();
1335
1336 let descriptor = pipe::RenderPipelineDescriptor {
1337 label: desc.label.map(Borrowed),
1338 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1339 vertex: pipe::VertexState {
1340 stage: pipe::ProgrammableStageDescriptor {
1341 module: desc.vertex.module.inner.as_core().id,
1342 entry_point: desc.vertex.entry_point.map(Borrowed),
1343 constants: vert_constants,
1344 zero_initialize_workgroup_memory: desc
1345 .vertex
1346 .compilation_options
1347 .zero_initialize_workgroup_memory,
1348 },
1349 buffers: Borrowed(&vertex_buffers),
1350 },
1351 primitive: desc.primitive,
1352 depth_stencil: desc.depth_stencil.clone(),
1353 multisample: desc.multisample,
1354 fragment: desc.fragment.as_ref().map(|frag| {
1355 let frag_constants = frag
1356 .compilation_options
1357 .constants
1358 .iter()
1359 .map(|&(key, value)| (String::from(key), value))
1360 .collect();
1361 pipe::FragmentState {
1362 stage: pipe::ProgrammableStageDescriptor {
1363 module: frag.module.inner.as_core().id,
1364 entry_point: frag.entry_point.map(Borrowed),
1365 constants: frag_constants,
1366 zero_initialize_workgroup_memory: frag
1367 .compilation_options
1368 .zero_initialize_workgroup_memory,
1369 },
1370 targets: Borrowed(frag.targets),
1371 }
1372 }),
1373 multiview_mask: desc.multiview_mask,
1374 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1375 };
1376
1377 let (id, error) = self
1378 .context
1379 .0
1380 .device_create_render_pipeline(self.id, &descriptor, None);
1381 if let Some(cause) = error {
1382 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1383 log::error!("Shader translation error for stage {stage:?}: {error}");
1384 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1385 }
1386 self.context.handle_error(
1387 &self.error_sink,
1388 cause,
1389 desc.label,
1390 "Device::create_render_pipeline",
1391 );
1392 }
1393 CoreRenderPipeline {
1394 context: self.context.clone(),
1395 id,
1396 error_sink: Arc::clone(&self.error_sink),
1397 }
1398 .into()
1399 }
1400
1401 fn create_mesh_pipeline(
1402 &self,
1403 desc: &crate::MeshPipelineDescriptor<'_>,
1404 ) -> dispatch::DispatchRenderPipeline {
1405 use wgc::pipeline as pipe;
1406
1407 let mesh_constants = desc
1408 .mesh
1409 .compilation_options
1410 .constants
1411 .iter()
1412 .map(|&(key, value)| (String::from(key), value))
1413 .collect();
1414 let descriptor = pipe::MeshPipelineDescriptor {
1415 label: desc.label.map(Borrowed),
1416 task: desc.task.as_ref().map(|task| {
1417 let task_constants = task
1418 .compilation_options
1419 .constants
1420 .iter()
1421 .map(|&(key, value)| (String::from(key), value))
1422 .collect();
1423 pipe::TaskState {
1424 stage: pipe::ProgrammableStageDescriptor {
1425 module: task.module.inner.as_core().id,
1426 entry_point: task.entry_point.map(Borrowed),
1427 constants: task_constants,
1428 zero_initialize_workgroup_memory: desc
1429 .mesh
1430 .compilation_options
1431 .zero_initialize_workgroup_memory,
1432 },
1433 }
1434 }),
1435 mesh: pipe::MeshState {
1436 stage: pipe::ProgrammableStageDescriptor {
1437 module: desc.mesh.module.inner.as_core().id,
1438 entry_point: desc.mesh.entry_point.map(Borrowed),
1439 constants: mesh_constants,
1440 zero_initialize_workgroup_memory: desc
1441 .mesh
1442 .compilation_options
1443 .zero_initialize_workgroup_memory,
1444 },
1445 },
1446 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1447 primitive: desc.primitive,
1448 depth_stencil: desc.depth_stencil.clone(),
1449 multisample: desc.multisample,
1450 fragment: desc.fragment.as_ref().map(|frag| {
1451 let frag_constants = frag
1452 .compilation_options
1453 .constants
1454 .iter()
1455 .map(|&(key, value)| (String::from(key), value))
1456 .collect();
1457 pipe::FragmentState {
1458 stage: pipe::ProgrammableStageDescriptor {
1459 module: frag.module.inner.as_core().id,
1460 entry_point: frag.entry_point.map(Borrowed),
1461 constants: frag_constants,
1462 zero_initialize_workgroup_memory: frag
1463 .compilation_options
1464 .zero_initialize_workgroup_memory,
1465 },
1466 targets: Borrowed(frag.targets),
1467 }
1468 }),
1469 multiview: desc.multiview,
1470 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1471 };
1472
1473 let (id, error) = self
1474 .context
1475 .0
1476 .device_create_mesh_pipeline(self.id, &descriptor, None);
1477 if let Some(cause) = error {
1478 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1479 log::error!("Shader translation error for stage {stage:?}: {error}");
1480 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1481 }
1482 self.context.handle_error(
1483 &self.error_sink,
1484 cause,
1485 desc.label,
1486 "Device::create_render_pipeline",
1487 );
1488 }
1489 CoreRenderPipeline {
1490 context: self.context.clone(),
1491 id,
1492 error_sink: Arc::clone(&self.error_sink),
1493 }
1494 .into()
1495 }
1496
1497 fn create_compute_pipeline(
1498 &self,
1499 desc: &crate::ComputePipelineDescriptor<'_>,
1500 ) -> dispatch::DispatchComputePipeline {
1501 use wgc::pipeline as pipe;
1502
1503 let constants = desc
1504 .compilation_options
1505 .constants
1506 .iter()
1507 .map(|&(key, value)| (String::from(key), value))
1508 .collect();
1509
1510 let descriptor = pipe::ComputePipelineDescriptor {
1511 label: desc.label.map(Borrowed),
1512 layout: desc.layout.map(|pll| pll.inner.as_core().id),
1513 stage: pipe::ProgrammableStageDescriptor {
1514 module: desc.module.inner.as_core().id,
1515 entry_point: desc.entry_point.map(Borrowed),
1516 constants,
1517 zero_initialize_workgroup_memory: desc
1518 .compilation_options
1519 .zero_initialize_workgroup_memory,
1520 },
1521 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1522 };
1523
1524 let (id, error) = self
1525 .context
1526 .0
1527 .device_create_compute_pipeline(self.id, &descriptor, None);
1528 if let Some(cause) = error {
1529 if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1530 log::error!(
1531 "Shader translation error for stage {:?}: {}",
1532 wgt::ShaderStages::COMPUTE,
1533 error
1534 );
1535 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1536 }
1537 self.context.handle_error(
1538 &self.error_sink,
1539 cause,
1540 desc.label,
1541 "Device::create_compute_pipeline",
1542 );
1543 }
1544 CoreComputePipeline {
1545 context: self.context.clone(),
1546 id,
1547 error_sink: Arc::clone(&self.error_sink),
1548 }
1549 .into()
1550 }
1551
1552 unsafe fn create_pipeline_cache(
1553 &self,
1554 desc: &crate::PipelineCacheDescriptor<'_>,
1555 ) -> dispatch::DispatchPipelineCache {
1556 use wgc::pipeline as pipe;
1557
1558 let descriptor = pipe::PipelineCacheDescriptor {
1559 label: desc.label.map(Borrowed),
1560 data: desc.data.map(Borrowed),
1561 fallback: desc.fallback,
1562 };
1563 let (id, error) = unsafe {
1564 self.context
1565 .0
1566 .device_create_pipeline_cache(self.id, &descriptor, None)
1567 };
1568 if let Some(cause) = error {
1569 self.context.handle_error(
1570 &self.error_sink,
1571 cause,
1572 desc.label,
1573 "Device::device_create_pipeline_cache_init",
1574 );
1575 }
1576 CorePipelineCache {
1577 context: self.context.clone(),
1578 id,
1579 }
1580 .into()
1581 }
1582
1583 fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1584 let (id, error) = self.context.0.device_create_buffer(
1585 self.id,
1586 &desc.map_label(|l| l.map(Borrowed)),
1587 None,
1588 );
1589 if let Some(cause) = error {
1590 self.context
1591 .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1592 }
1593
1594 CoreBuffer {
1595 context: self.context.clone(),
1596 id,
1597 error_sink: Arc::clone(&self.error_sink),
1598 }
1599 .into()
1600 }
1601
1602 fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1603 let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1604 let (id, error) = self
1605 .context
1606 .0
1607 .device_create_texture(self.id, &wgt_desc, None);
1608 if let Some(cause) = error {
1609 self.context.handle_error(
1610 &self.error_sink,
1611 cause,
1612 desc.label,
1613 "Device::create_texture",
1614 );
1615 }
1616
1617 CoreTexture {
1618 context: self.context.clone(),
1619 id,
1620 error_sink: Arc::clone(&self.error_sink),
1621 }
1622 .into()
1623 }
1624
1625 fn create_external_texture(
1626 &self,
1627 desc: &crate::ExternalTextureDescriptor<'_>,
1628 planes: &[&crate::TextureView],
1629 ) -> dispatch::DispatchExternalTexture {
1630 let wgt_desc = desc.map_label(|l| l.map(Borrowed));
1631 let planes = planes
1632 .iter()
1633 .map(|plane| plane.inner.as_core().id)
1634 .collect::<Vec<_>>();
1635 let (id, error) = self
1636 .context
1637 .0
1638 .device_create_external_texture(self.id, &wgt_desc, &planes, None);
1639 if let Some(cause) = error {
1640 self.context.handle_error(
1641 &self.error_sink,
1642 cause,
1643 desc.label,
1644 "Device::create_external_texture",
1645 );
1646 }
1647
1648 CoreExternalTexture {
1649 context: self.context.clone(),
1650 id,
1651 }
1652 .into()
1653 }
1654
1655 fn create_blas(
1656 &self,
1657 desc: &crate::CreateBlasDescriptor<'_>,
1658 sizes: crate::BlasGeometrySizeDescriptors,
1659 ) -> (Option<u64>, dispatch::DispatchBlas) {
1660 let global = &self.context.0;
1661 let (id, handle, error) =
1662 global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1663 if let Some(cause) = error {
1664 self.context
1665 .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1666 }
1667 (
1668 handle,
1669 CoreBlas {
1670 context: self.context.clone(),
1671 id,
1672 error_sink: Arc::clone(&self.error_sink),
1673 }
1674 .into(),
1675 )
1676 }
1677
1678 fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1679 let global = &self.context.0;
1680 let (id, error) =
1681 global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1682 if let Some(cause) = error {
1683 self.context
1684 .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1685 }
1686 CoreTlas {
1687 context: self.context.clone(),
1688 id,
1689 }
1691 .into()
1692 }
1693
1694 fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1695 let descriptor = wgc::resource::SamplerDescriptor {
1696 label: desc.label.map(Borrowed),
1697 address_modes: [
1698 desc.address_mode_u,
1699 desc.address_mode_v,
1700 desc.address_mode_w,
1701 ],
1702 mag_filter: desc.mag_filter,
1703 min_filter: desc.min_filter,
1704 mipmap_filter: desc.mipmap_filter,
1705 lod_min_clamp: desc.lod_min_clamp,
1706 lod_max_clamp: desc.lod_max_clamp,
1707 compare: desc.compare,
1708 anisotropy_clamp: desc.anisotropy_clamp,
1709 border_color: desc.border_color,
1710 };
1711
1712 let (id, error) = self
1713 .context
1714 .0
1715 .device_create_sampler(self.id, &descriptor, None);
1716 if let Some(cause) = error {
1717 self.context.handle_error(
1718 &self.error_sink,
1719 cause,
1720 desc.label,
1721 "Device::create_sampler",
1722 );
1723 }
1724 CoreSampler {
1725 context: self.context.clone(),
1726 id,
1727 }
1728 .into()
1729 }
1730
1731 fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1732 let (id, error) = self.context.0.device_create_query_set(
1733 self.id,
1734 &desc.map_label(|l| l.map(Borrowed)),
1735 None,
1736 );
1737 if let Some(cause) = error {
1738 self.context
1739 .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1740 }
1741 CoreQuerySet {
1742 context: self.context.clone(),
1743 id,
1744 }
1745 .into()
1746 }
1747
1748 fn create_command_encoder(
1749 &self,
1750 desc: &crate::CommandEncoderDescriptor<'_>,
1751 ) -> dispatch::DispatchCommandEncoder {
1752 let (id, error) = self.context.0.device_create_command_encoder(
1753 self.id,
1754 &desc.map_label(|l| l.map(Borrowed)),
1755 None,
1756 );
1757 if let Some(cause) = error {
1758 self.context.handle_error(
1759 &self.error_sink,
1760 cause,
1761 desc.label,
1762 "Device::create_command_encoder",
1763 );
1764 }
1765
1766 CoreCommandEncoder {
1767 context: self.context.clone(),
1768 id,
1769 error_sink: Arc::clone(&self.error_sink),
1770 }
1771 .into()
1772 }
1773
1774 fn create_render_bundle_encoder(
1775 &self,
1776 desc: &crate::RenderBundleEncoderDescriptor<'_>,
1777 ) -> dispatch::DispatchRenderBundleEncoder {
1778 let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1779 label: desc.label.map(Borrowed),
1780 color_formats: Borrowed(desc.color_formats),
1781 depth_stencil: desc.depth_stencil,
1782 sample_count: desc.sample_count,
1783 multiview: desc.multiview,
1784 };
1785 let encoder = match wgc::command::RenderBundleEncoder::new(&descriptor, self.id) {
1786 Ok(encoder) => encoder,
1787 Err(e) => panic!("Error in Device::create_render_bundle_encoder: {e}"),
1788 };
1789
1790 CoreRenderBundleEncoder {
1791 context: self.context.clone(),
1792 encoder,
1793 id: crate::cmp::Identifier::create(),
1794 }
1795 .into()
1796 }
1797
1798 fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1799 self.context
1800 .0
1801 .device_set_device_lost_closure(self.id, device_lost_callback);
1802 }
1803
1804 fn on_uncaptured_error(&self, handler: Arc<dyn crate::UncapturedErrorHandler>) {
1805 let mut error_sink = self.error_sink.lock();
1806 error_sink.uncaptured_handler = Some(handler);
1807 }
1808
1809 fn push_error_scope(&self, filter: crate::ErrorFilter) -> u32 {
1810 let mut error_sink = self.error_sink.lock();
1811 let thread_id = thread_id::ThreadId::current();
1812 let scopes = error_sink.scopes.entry(thread_id).or_default();
1813 let index = scopes
1814 .len()
1815 .try_into()
1816 .expect("Greater than 2^32 nested error scopes");
1817 scopes.push(ErrorScope {
1818 error: None,
1819 filter,
1820 });
1821 index
1822 }
1823
1824 fn pop_error_scope(&self, index: u32) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1825 let mut error_sink = self.error_sink.lock();
1826
1827 let is_panicking = crate::util::is_panicking();
1830 let thread_id = thread_id::ThreadId::current();
1831 let err = "Mismatched pop_error_scope call: no error scope for this thread. Error scopes are thread-local.";
1832 let scopes = match error_sink.scopes.get_mut(&thread_id) {
1833 Some(s) => s,
1834 None => {
1835 if !is_panicking {
1836 panic!("{err}");
1837 } else {
1838 return Box::pin(ready(None));
1839 }
1840 }
1841 };
1842 if scopes.is_empty() && !is_panicking {
1843 panic!("{err}");
1844 }
1845 if index as usize != scopes.len() - 1 && !is_panicking {
1846 panic!(
1847 "Mismatched pop_error_scope call: error scopes must be popped in reverse order."
1848 );
1849 }
1850
1851 let scope = match scopes.pop() {
1856 Some(s) => s,
1857 None if !is_panicking => unreachable!(),
1858 None => return Box::pin(ready(None)),
1859 };
1860
1861 Box::pin(ready(scope.error))
1862 }
1863
1864 unsafe fn start_graphics_debugger_capture(&self) {
1865 unsafe {
1866 self.context
1867 .0
1868 .device_start_graphics_debugger_capture(self.id)
1869 };
1870 }
1871
1872 unsafe fn stop_graphics_debugger_capture(&self) {
1873 unsafe {
1874 self.context
1875 .0
1876 .device_stop_graphics_debugger_capture(self.id)
1877 };
1878 }
1879
1880 fn poll(&self, poll_type: wgt::PollType<u64>) -> Result<crate::PollStatus, crate::PollError> {
1881 match self.context.0.device_poll(self.id, poll_type) {
1882 Ok(status) => Ok(status),
1883 Err(err) => {
1884 if let Some(poll_error) = err.to_poll_error() {
1885 return Err(poll_error);
1886 }
1887
1888 self.context.handle_error_fatal(err, "Device::poll")
1889 }
1890 }
1891 }
1892
1893 fn get_internal_counters(&self) -> crate::InternalCounters {
1894 self.context.0.device_get_internal_counters(self.id)
1895 }
1896
1897 fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1898 self.context.0.device_generate_allocator_report(self.id)
1899 }
1900
1901 fn destroy(&self) {
1902 self.context.0.device_destroy(self.id);
1903 }
1904}
1905
1906impl Drop for CoreDevice {
1907 fn drop(&mut self) {
1908 self.context.0.device_drop(self.id)
1909 }
1910}
1911
1912impl dispatch::QueueInterface for CoreQueue {
1913 fn write_buffer(
1914 &self,
1915 buffer: &dispatch::DispatchBuffer,
1916 offset: crate::BufferAddress,
1917 data: &[u8],
1918 ) {
1919 let buffer = buffer.as_core();
1920
1921 match self
1922 .context
1923 .0
1924 .queue_write_buffer(self.id, buffer.id, offset, data)
1925 {
1926 Ok(()) => (),
1927 Err(err) => {
1928 self.context
1929 .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1930 }
1931 }
1932 }
1933
1934 fn create_staging_buffer(
1935 &self,
1936 size: crate::BufferSize,
1937 ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1938 match self
1939 .context
1940 .0
1941 .queue_create_staging_buffer(self.id, size, None)
1942 {
1943 Ok((buffer_id, ptr)) => Some(
1944 CoreQueueWriteBuffer {
1945 buffer_id,
1946 mapping: CoreBufferMappedRange {
1947 ptr,
1948 size: size.get() as usize,
1949 },
1950 }
1951 .into(),
1952 ),
1953 Err(err) => {
1954 self.context.handle_error_nolabel(
1955 &self.error_sink,
1956 err,
1957 "Queue::write_buffer_with",
1958 );
1959 None
1960 }
1961 }
1962 }
1963
1964 fn validate_write_buffer(
1965 &self,
1966 buffer: &dispatch::DispatchBuffer,
1967 offset: wgt::BufferAddress,
1968 size: wgt::BufferSize,
1969 ) -> Option<()> {
1970 let buffer = buffer.as_core();
1971
1972 match self
1973 .context
1974 .0
1975 .queue_validate_write_buffer(self.id, buffer.id, offset, size)
1976 {
1977 Ok(()) => Some(()),
1978 Err(err) => {
1979 self.context.handle_error_nolabel(
1980 &self.error_sink,
1981 err,
1982 "Queue::write_buffer_with",
1983 );
1984 None
1985 }
1986 }
1987 }
1988
1989 fn write_staging_buffer(
1990 &self,
1991 buffer: &dispatch::DispatchBuffer,
1992 offset: crate::BufferAddress,
1993 staging_buffer: &dispatch::DispatchQueueWriteBuffer,
1994 ) {
1995 let buffer = buffer.as_core();
1996 let staging_buffer = staging_buffer.as_core();
1997
1998 match self.context.0.queue_write_staging_buffer(
1999 self.id,
2000 buffer.id,
2001 offset,
2002 staging_buffer.buffer_id,
2003 ) {
2004 Ok(()) => (),
2005 Err(err) => {
2006 self.context.handle_error_nolabel(
2007 &self.error_sink,
2008 err,
2009 "Queue::write_buffer_with",
2010 );
2011 }
2012 }
2013 }
2014
2015 fn write_texture(
2016 &self,
2017 texture: crate::TexelCopyTextureInfo<'_>,
2018 data: &[u8],
2019 data_layout: crate::TexelCopyBufferLayout,
2020 size: crate::Extent3d,
2021 ) {
2022 match self.context.0.queue_write_texture(
2023 self.id,
2024 &map_texture_copy_view(texture),
2025 data,
2026 &data_layout,
2027 &size,
2028 ) {
2029 Ok(()) => (),
2030 Err(err) => {
2031 self.context
2032 .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
2033 }
2034 }
2035 }
2036
2037 #[cfg(web)]
2040 #[cfg_attr(not(webgl), expect(unused_variables))]
2041 fn copy_external_image_to_texture(
2042 &self,
2043 source: &crate::CopyExternalImageSourceInfo,
2044 dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
2045 size: crate::Extent3d,
2046 ) {
2047 #[cfg(webgl)]
2048 match self.context.0.queue_copy_external_image_to_texture(
2049 self.id,
2050 source,
2051 map_texture_tagged_copy_view(dest),
2052 size,
2053 ) {
2054 Ok(()) => (),
2055 Err(err) => self.context.handle_error_nolabel(
2056 &self.error_sink,
2057 err,
2058 "Queue::copy_external_image_to_texture",
2059 ),
2060 }
2061 }
2062
2063 fn submit(
2064 &self,
2065 command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
2066 ) -> u64 {
2067 let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
2068 let command_buffer_ids = temp_command_buffers
2069 .iter()
2070 .map(|cmdbuf| cmdbuf.as_core().id)
2071 .collect::<SmallVec<[_; 4]>>();
2072
2073 let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
2074 Ok(index) => index,
2075 Err((index, err)) => {
2076 self.context
2077 .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
2078 index
2079 }
2080 };
2081
2082 drop(temp_command_buffers);
2083
2084 index
2085 }
2086
2087 fn get_timestamp_period(&self) -> f32 {
2088 self.context.0.queue_get_timestamp_period(self.id)
2089 }
2090
2091 fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
2092 self.context
2093 .0
2094 .queue_on_submitted_work_done(self.id, callback);
2095 }
2096
2097 fn compact_blas(&self, blas: &dispatch::DispatchBlas) -> (Option<u64>, dispatch::DispatchBlas) {
2098 let (id, handle, error) =
2099 self.context
2100 .0
2101 .queue_compact_blas(self.id, blas.as_core().id, None);
2102
2103 if let Some(cause) = error {
2104 self.context
2105 .handle_error_nolabel(&self.error_sink, cause, "Queue::compact_blas");
2106 }
2107 (
2108 handle,
2109 CoreBlas {
2110 context: self.context.clone(),
2111 id,
2112 error_sink: Arc::clone(&self.error_sink),
2113 }
2114 .into(),
2115 )
2116 }
2117}
2118
2119impl Drop for CoreQueue {
2120 fn drop(&mut self) {
2121 self.context.0.queue_drop(self.id)
2122 }
2123}
2124
2125impl dispatch::ShaderModuleInterface for CoreShaderModule {
2126 fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
2127 Box::pin(ready(self.compilation_info.clone()))
2128 }
2129}
2130
2131impl Drop for CoreShaderModule {
2132 fn drop(&mut self) {
2133 self.context.0.shader_module_drop(self.id)
2134 }
2135}
2136
2137impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
2138
2139impl Drop for CoreBindGroupLayout {
2140 fn drop(&mut self) {
2141 self.context.0.bind_group_layout_drop(self.id)
2142 }
2143}
2144
2145impl dispatch::BindGroupInterface for CoreBindGroup {}
2146
2147impl Drop for CoreBindGroup {
2148 fn drop(&mut self) {
2149 self.context.0.bind_group_drop(self.id)
2150 }
2151}
2152
2153impl dispatch::TextureViewInterface for CoreTextureView {}
2154
2155impl Drop for CoreTextureView {
2156 fn drop(&mut self) {
2157 let _ = self.context.0.texture_view_drop(self.id);
2159 }
2160}
2161
2162impl dispatch::ExternalTextureInterface for CoreExternalTexture {
2163 fn destroy(&self) {
2164 self.context.0.external_texture_destroy(self.id);
2165 }
2166}
2167
2168impl Drop for CoreExternalTexture {
2169 fn drop(&mut self) {
2170 self.context.0.external_texture_drop(self.id);
2171 }
2172}
2173
2174impl dispatch::SamplerInterface for CoreSampler {}
2175
2176impl Drop for CoreSampler {
2177 fn drop(&mut self) {
2178 self.context.0.sampler_drop(self.id)
2179 }
2180}
2181
2182impl dispatch::BufferInterface for CoreBuffer {
2183 fn map_async(
2184 &self,
2185 mode: crate::MapMode,
2186 range: Range<crate::BufferAddress>,
2187 callback: dispatch::BufferMapCallback,
2188 ) {
2189 let operation = wgc::resource::BufferMapOperation {
2190 host: match mode {
2191 MapMode::Read => wgc::device::HostMap::Read,
2192 MapMode::Write => wgc::device::HostMap::Write,
2193 },
2194 callback: Some(Box::new(|status| {
2195 let res = status.map_err(|_| crate::BufferAsyncError);
2196 callback(res);
2197 })),
2198 };
2199
2200 match self.context.0.buffer_map_async(
2201 self.id,
2202 range.start,
2203 Some(range.end - range.start),
2204 operation,
2205 ) {
2206 Ok(_) => (),
2207 Err(cause) => {
2208 self.context
2209 .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
2210 }
2211 }
2212 }
2213
2214 fn get_mapped_range(
2215 &self,
2216 sub_range: Range<crate::BufferAddress>,
2217 ) -> dispatch::DispatchBufferMappedRange {
2218 let size = sub_range.end - sub_range.start;
2219 match self
2220 .context
2221 .0
2222 .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
2223 {
2224 Ok((ptr, size)) => CoreBufferMappedRange {
2225 ptr,
2226 size: size as usize,
2227 }
2228 .into(),
2229 Err(err) => self
2230 .context
2231 .handle_error_fatal(err, "Buffer::get_mapped_range"),
2232 }
2233 }
2234
2235 fn unmap(&self) {
2236 match self.context.0.buffer_unmap(self.id) {
2237 Ok(()) => (),
2238 Err(cause) => {
2239 self.context
2240 .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
2241 }
2242 }
2243 }
2244
2245 fn destroy(&self) {
2246 self.context.0.buffer_destroy(self.id);
2247 }
2248}
2249
2250impl Drop for CoreBuffer {
2251 fn drop(&mut self) {
2252 self.context.0.buffer_drop(self.id)
2253 }
2254}
2255
2256impl dispatch::TextureInterface for CoreTexture {
2257 fn create_view(
2258 &self,
2259 desc: &crate::TextureViewDescriptor<'_>,
2260 ) -> dispatch::DispatchTextureView {
2261 let descriptor = wgc::resource::TextureViewDescriptor {
2262 label: desc.label.map(Borrowed),
2263 format: desc.format,
2264 dimension: desc.dimension,
2265 usage: desc.usage,
2266 range: wgt::ImageSubresourceRange {
2267 aspect: desc.aspect,
2268 base_mip_level: desc.base_mip_level,
2269 mip_level_count: desc.mip_level_count,
2270 base_array_layer: desc.base_array_layer,
2271 array_layer_count: desc.array_layer_count,
2272 },
2273 };
2274 let (id, error) = self
2275 .context
2276 .0
2277 .texture_create_view(self.id, &descriptor, None);
2278 if let Some(cause) = error {
2279 self.context
2280 .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
2281 }
2282 CoreTextureView {
2283 context: self.context.clone(),
2284 id,
2285 }
2286 .into()
2287 }
2288
2289 fn destroy(&self) {
2290 self.context.0.texture_destroy(self.id);
2291 }
2292}
2293
2294impl Drop for CoreTexture {
2295 fn drop(&mut self) {
2296 self.context.0.texture_drop(self.id)
2297 }
2298}
2299
2300impl dispatch::BlasInterface for CoreBlas {
2301 fn prepare_compact_async(&self, callback: BlasCompactCallback) {
2302 let callback: Option<wgc::resource::BlasCompactCallback> =
2303 Some(Box::new(|status: BlasPrepareCompactResult| {
2304 let res = status.map_err(|_| crate::BlasAsyncError);
2305 callback(res);
2306 }));
2307
2308 match self.context.0.blas_prepare_compact_async(self.id, callback) {
2309 Ok(_) => (),
2310 Err(cause) => self.context.handle_error_nolabel(
2311 &self.error_sink,
2312 cause,
2313 "Blas::prepare_compact_async",
2314 ),
2315 }
2316 }
2317
2318 fn ready_for_compaction(&self) -> bool {
2319 match self.context.0.ready_for_compaction(self.id) {
2320 Ok(ready) => ready,
2321 Err(cause) => {
2322 self.context.handle_error_nolabel(
2323 &self.error_sink,
2324 cause,
2325 "Blas::ready_for_compaction",
2326 );
2327 false
2329 }
2330 }
2331 }
2332}
2333
2334impl Drop for CoreBlas {
2335 fn drop(&mut self) {
2336 self.context.0.blas_drop(self.id)
2337 }
2338}
2339
2340impl dispatch::TlasInterface for CoreTlas {}
2341
2342impl Drop for CoreTlas {
2343 fn drop(&mut self) {
2344 self.context.0.tlas_drop(self.id)
2345 }
2346}
2347
2348impl dispatch::QuerySetInterface for CoreQuerySet {}
2349
2350impl Drop for CoreQuerySet {
2351 fn drop(&mut self) {
2352 self.context.0.query_set_drop(self.id)
2353 }
2354}
2355
2356impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2357
2358impl Drop for CorePipelineLayout {
2359 fn drop(&mut self) {
2360 self.context.0.pipeline_layout_drop(self.id)
2361 }
2362}
2363
2364impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2365 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2366 let (id, error) = self
2367 .context
2368 .0
2369 .render_pipeline_get_bind_group_layout(self.id, index, None);
2370 if let Some(err) = error {
2371 self.context.handle_error_nolabel(
2372 &self.error_sink,
2373 err,
2374 "RenderPipeline::get_bind_group_layout",
2375 )
2376 }
2377 CoreBindGroupLayout {
2378 context: self.context.clone(),
2379 id,
2380 }
2381 .into()
2382 }
2383}
2384
2385impl Drop for CoreRenderPipeline {
2386 fn drop(&mut self) {
2387 self.context.0.render_pipeline_drop(self.id)
2388 }
2389}
2390
2391impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2392 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2393 let (id, error) = self
2394 .context
2395 .0
2396 .compute_pipeline_get_bind_group_layout(self.id, index, None);
2397 if let Some(err) = error {
2398 self.context.handle_error_nolabel(
2399 &self.error_sink,
2400 err,
2401 "ComputePipeline::get_bind_group_layout",
2402 )
2403 }
2404 CoreBindGroupLayout {
2405 context: self.context.clone(),
2406 id,
2407 }
2408 .into()
2409 }
2410}
2411
2412impl Drop for CoreComputePipeline {
2413 fn drop(&mut self) {
2414 self.context.0.compute_pipeline_drop(self.id)
2415 }
2416}
2417
2418impl dispatch::PipelineCacheInterface for CorePipelineCache {
2419 fn get_data(&self) -> Option<Vec<u8>> {
2420 self.context.0.pipeline_cache_get_data(self.id)
2421 }
2422}
2423
2424impl Drop for CorePipelineCache {
2425 fn drop(&mut self) {
2426 self.context.0.pipeline_cache_drop(self.id)
2427 }
2428}
2429
2430impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2431 fn copy_buffer_to_buffer(
2432 &self,
2433 source: &dispatch::DispatchBuffer,
2434 source_offset: crate::BufferAddress,
2435 destination: &dispatch::DispatchBuffer,
2436 destination_offset: crate::BufferAddress,
2437 copy_size: Option<crate::BufferAddress>,
2438 ) {
2439 let source = source.as_core();
2440 let destination = destination.as_core();
2441
2442 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2443 self.id,
2444 source.id,
2445 source_offset,
2446 destination.id,
2447 destination_offset,
2448 copy_size,
2449 ) {
2450 self.context.handle_error_nolabel(
2451 &self.error_sink,
2452 cause,
2453 "CommandEncoder::copy_buffer_to_buffer",
2454 );
2455 }
2456 }
2457
2458 fn copy_buffer_to_texture(
2459 &self,
2460 source: crate::TexelCopyBufferInfo<'_>,
2461 destination: crate::TexelCopyTextureInfo<'_>,
2462 copy_size: crate::Extent3d,
2463 ) {
2464 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2465 self.id,
2466 &map_buffer_copy_view(source),
2467 &map_texture_copy_view(destination),
2468 ©_size,
2469 ) {
2470 self.context.handle_error_nolabel(
2471 &self.error_sink,
2472 cause,
2473 "CommandEncoder::copy_buffer_to_texture",
2474 );
2475 }
2476 }
2477
2478 fn copy_texture_to_buffer(
2479 &self,
2480 source: crate::TexelCopyTextureInfo<'_>,
2481 destination: crate::TexelCopyBufferInfo<'_>,
2482 copy_size: crate::Extent3d,
2483 ) {
2484 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2485 self.id,
2486 &map_texture_copy_view(source),
2487 &map_buffer_copy_view(destination),
2488 ©_size,
2489 ) {
2490 self.context.handle_error_nolabel(
2491 &self.error_sink,
2492 cause,
2493 "CommandEncoder::copy_texture_to_buffer",
2494 );
2495 }
2496 }
2497
2498 fn copy_texture_to_texture(
2499 &self,
2500 source: crate::TexelCopyTextureInfo<'_>,
2501 destination: crate::TexelCopyTextureInfo<'_>,
2502 copy_size: crate::Extent3d,
2503 ) {
2504 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2505 self.id,
2506 &map_texture_copy_view(source),
2507 &map_texture_copy_view(destination),
2508 ©_size,
2509 ) {
2510 self.context.handle_error_nolabel(
2511 &self.error_sink,
2512 cause,
2513 "CommandEncoder::copy_texture_to_texture",
2514 );
2515 }
2516 }
2517
2518 fn begin_compute_pass(
2519 &self,
2520 desc: &crate::ComputePassDescriptor<'_>,
2521 ) -> dispatch::DispatchComputePass {
2522 let timestamp_writes =
2523 desc.timestamp_writes
2524 .as_ref()
2525 .map(|tw| wgc::command::PassTimestampWrites {
2526 query_set: tw.query_set.inner.as_core().id,
2527 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2528 end_of_pass_write_index: tw.end_of_pass_write_index,
2529 });
2530
2531 let (pass, err) = self.context.0.command_encoder_begin_compute_pass(
2532 self.id,
2533 &wgc::command::ComputePassDescriptor {
2534 label: desc.label.map(Borrowed),
2535 timestamp_writes,
2536 },
2537 );
2538
2539 if let Some(cause) = err {
2540 self.context.handle_error(
2541 &self.error_sink,
2542 cause,
2543 desc.label,
2544 "CommandEncoder::begin_compute_pass",
2545 );
2546 }
2547
2548 CoreComputePass {
2549 context: self.context.clone(),
2550 pass,
2551 error_sink: self.error_sink.clone(),
2552 id: crate::cmp::Identifier::create(),
2553 }
2554 .into()
2555 }
2556
2557 fn begin_render_pass(
2558 &self,
2559 desc: &crate::RenderPassDescriptor<'_>,
2560 ) -> dispatch::DispatchRenderPass {
2561 let colors = desc
2562 .color_attachments
2563 .iter()
2564 .map(|ca| {
2565 ca.as_ref()
2566 .map(|at| wgc::command::RenderPassColorAttachment {
2567 view: at.view.inner.as_core().id,
2568 depth_slice: at.depth_slice,
2569 resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2570 load_op: at.ops.load,
2571 store_op: at.ops.store,
2572 })
2573 })
2574 .collect::<Vec<_>>();
2575
2576 let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2577 wgc::command::RenderPassDepthStencilAttachment {
2578 view: dsa.view.inner.as_core().id,
2579 depth: map_pass_channel(dsa.depth_ops.as_ref()),
2580 stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2581 }
2582 });
2583
2584 let timestamp_writes =
2585 desc.timestamp_writes
2586 .as_ref()
2587 .map(|tw| wgc::command::PassTimestampWrites {
2588 query_set: tw.query_set.inner.as_core().id,
2589 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2590 end_of_pass_write_index: tw.end_of_pass_write_index,
2591 });
2592
2593 let (pass, err) = self.context.0.command_encoder_begin_render_pass(
2594 self.id,
2595 &wgc::command::RenderPassDescriptor {
2596 label: desc.label.map(Borrowed),
2597 timestamp_writes: timestamp_writes.as_ref(),
2598 color_attachments: Borrowed(&colors),
2599 depth_stencil_attachment: depth_stencil.as_ref(),
2600 occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2601 multiview_mask: desc.multiview_mask,
2602 },
2603 );
2604
2605 if let Some(cause) = err {
2606 self.context.handle_error(
2607 &self.error_sink,
2608 cause,
2609 desc.label,
2610 "CommandEncoder::begin_render_pass",
2611 );
2612 }
2613
2614 CoreRenderPass {
2615 context: self.context.clone(),
2616 pass,
2617 error_sink: self.error_sink.clone(),
2618 id: crate::cmp::Identifier::create(),
2619 }
2620 .into()
2621 }
2622
2623 fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2624 let descriptor = wgt::CommandBufferDescriptor::default();
2625 let (id, opt_label_and_error) =
2626 self.context
2627 .0
2628 .command_encoder_finish(self.id, &descriptor, None);
2629 if let Some((label, cause)) = opt_label_and_error {
2630 self.context
2631 .handle_error(&self.error_sink, cause, Some(&label), "a CommandEncoder");
2632 }
2633 CoreCommandBuffer {
2634 context: self.context.clone(),
2635 id,
2636 }
2637 .into()
2638 }
2639
2640 fn clear_texture(
2641 &self,
2642 texture: &dispatch::DispatchTexture,
2643 subresource_range: &crate::ImageSubresourceRange,
2644 ) {
2645 let texture = texture.as_core();
2646
2647 if let Err(cause) =
2648 self.context
2649 .0
2650 .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2651 {
2652 self.context.handle_error_nolabel(
2653 &self.error_sink,
2654 cause,
2655 "CommandEncoder::clear_texture",
2656 );
2657 }
2658 }
2659
2660 fn clear_buffer(
2661 &self,
2662 buffer: &dispatch::DispatchBuffer,
2663 offset: crate::BufferAddress,
2664 size: Option<crate::BufferAddress>,
2665 ) {
2666 let buffer = buffer.as_core();
2667
2668 if let Err(cause) = self
2669 .context
2670 .0
2671 .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2672 {
2673 self.context.handle_error_nolabel(
2674 &self.error_sink,
2675 cause,
2676 "CommandEncoder::fill_buffer",
2677 );
2678 }
2679 }
2680
2681 fn insert_debug_marker(&self, label: &str) {
2682 if let Err(cause) = self
2683 .context
2684 .0
2685 .command_encoder_insert_debug_marker(self.id, label)
2686 {
2687 self.context.handle_error_nolabel(
2688 &self.error_sink,
2689 cause,
2690 "CommandEncoder::insert_debug_marker",
2691 );
2692 }
2693 }
2694
2695 fn push_debug_group(&self, label: &str) {
2696 if let Err(cause) = self
2697 .context
2698 .0
2699 .command_encoder_push_debug_group(self.id, label)
2700 {
2701 self.context.handle_error_nolabel(
2702 &self.error_sink,
2703 cause,
2704 "CommandEncoder::push_debug_group",
2705 );
2706 }
2707 }
2708
2709 fn pop_debug_group(&self) {
2710 if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2711 self.context.handle_error_nolabel(
2712 &self.error_sink,
2713 cause,
2714 "CommandEncoder::pop_debug_group",
2715 );
2716 }
2717 }
2718
2719 fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2720 let query_set = query_set.as_core();
2721
2722 if let Err(cause) =
2723 self.context
2724 .0
2725 .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2726 {
2727 self.context.handle_error_nolabel(
2728 &self.error_sink,
2729 cause,
2730 "CommandEncoder::write_timestamp",
2731 );
2732 }
2733 }
2734
2735 fn resolve_query_set(
2736 &self,
2737 query_set: &dispatch::DispatchQuerySet,
2738 first_query: u32,
2739 query_count: u32,
2740 destination: &dispatch::DispatchBuffer,
2741 destination_offset: crate::BufferAddress,
2742 ) {
2743 let query_set = query_set.as_core();
2744 let destination = destination.as_core();
2745
2746 if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2747 self.id,
2748 query_set.id,
2749 first_query,
2750 query_count,
2751 destination.id,
2752 destination_offset,
2753 ) {
2754 self.context.handle_error_nolabel(
2755 &self.error_sink,
2756 cause,
2757 "CommandEncoder::resolve_query_set",
2758 );
2759 }
2760 }
2761
2762 fn mark_acceleration_structures_built<'a>(
2763 &self,
2764 blas: &mut dyn Iterator<Item = &'a Blas>,
2765 tlas: &mut dyn Iterator<Item = &'a Tlas>,
2766 ) {
2767 let blas = blas
2768 .map(|b| b.inner.as_core().id)
2769 .collect::<SmallVec<[_; 4]>>();
2770 let tlas = tlas
2771 .map(|t| t.inner.as_core().id)
2772 .collect::<SmallVec<[_; 4]>>();
2773 if let Err(cause) = self
2774 .context
2775 .0
2776 .command_encoder_mark_acceleration_structures_built(self.id, &blas, &tlas)
2777 {
2778 self.context.handle_error_nolabel(
2779 &self.error_sink,
2780 cause,
2781 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2782 );
2783 }
2784 }
2785
2786 fn build_acceleration_structures<'a>(
2787 &self,
2788 blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2789 tlas: &mut dyn Iterator<Item = &'a crate::Tlas>,
2790 ) {
2791 let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2792 let geometries = match e.geometry {
2793 crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2794 let iter = triangle_geometries.iter().map(|tg| {
2795 wgc::ray_tracing::BlasTriangleGeometry {
2796 vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2797 index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2798 transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2799 size: tg.size,
2800 transform_buffer_offset: tg.transform_buffer_offset,
2801 first_vertex: tg.first_vertex,
2802 vertex_stride: tg.vertex_stride,
2803 first_index: tg.first_index,
2804 }
2805 });
2806 wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2807 }
2808 };
2809 wgc::ray_tracing::BlasBuildEntry {
2810 blas_id: e.blas.inner.as_core().id,
2811 geometries,
2812 }
2813 });
2814
2815 let tlas = tlas.into_iter().map(|e| {
2816 let instances = e
2817 .instances
2818 .iter()
2819 .map(|instance: &Option<crate::TlasInstance>| {
2820 instance
2821 .as_ref()
2822 .map(|instance| wgc::ray_tracing::TlasInstance {
2823 blas_id: instance.blas.as_core().id,
2824 transform: &instance.transform,
2825 custom_data: instance.custom_data,
2826 mask: instance.mask,
2827 })
2828 });
2829 wgc::ray_tracing::TlasPackage {
2830 tlas_id: e.inner.as_core().id,
2831 instances: Box::new(instances),
2832 lowest_unmodified: e.lowest_unmodified,
2833 }
2834 });
2835
2836 if let Err(cause) = self
2837 .context
2838 .0
2839 .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2840 {
2841 self.context.handle_error_nolabel(
2842 &self.error_sink,
2843 cause,
2844 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2845 );
2846 }
2847 }
2848
2849 fn transition_resources<'a>(
2850 &mut self,
2851 buffer_transitions: &mut dyn Iterator<
2852 Item = wgt::BufferTransition<&'a dispatch::DispatchBuffer>,
2853 >,
2854 texture_transitions: &mut dyn Iterator<
2855 Item = wgt::TextureTransition<&'a dispatch::DispatchTexture>,
2856 >,
2857 ) {
2858 let result = self.context.0.command_encoder_transition_resources(
2859 self.id,
2860 buffer_transitions.map(|t| wgt::BufferTransition {
2861 buffer: t.buffer.as_core().id,
2862 state: t.state,
2863 }),
2864 texture_transitions.map(|t| wgt::TextureTransition {
2865 texture: t.texture.as_core().id,
2866 selector: t.selector.clone(),
2867 state: t.state,
2868 }),
2869 );
2870
2871 if let Err(cause) = result {
2872 self.context.handle_error_nolabel(
2873 &self.error_sink,
2874 cause,
2875 "CommandEncoder::transition_resources",
2876 );
2877 }
2878 }
2879}
2880
2881impl Drop for CoreCommandEncoder {
2882 fn drop(&mut self) {
2883 self.context.0.command_encoder_drop(self.id)
2884 }
2885}
2886
2887impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2888
2889impl Drop for CoreCommandBuffer {
2890 fn drop(&mut self) {
2891 self.context.0.command_buffer_drop(self.id)
2892 }
2893}
2894
2895impl dispatch::ComputePassInterface for CoreComputePass {
2896 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2897 let pipeline = pipeline.as_core();
2898
2899 if let Err(cause) = self
2900 .context
2901 .0
2902 .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2903 {
2904 self.context.handle_error(
2905 &self.error_sink,
2906 cause,
2907 self.pass.label(),
2908 "ComputePass::set_pipeline",
2909 );
2910 }
2911 }
2912
2913 fn set_bind_group(
2914 &mut self,
2915 index: u32,
2916 bind_group: Option<&dispatch::DispatchBindGroup>,
2917 offsets: &[crate::DynamicOffset],
2918 ) {
2919 let bg = bind_group.map(|bg| bg.as_core().id);
2920
2921 if let Err(cause) =
2922 self.context
2923 .0
2924 .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2925 {
2926 self.context.handle_error(
2927 &self.error_sink,
2928 cause,
2929 self.pass.label(),
2930 "ComputePass::set_bind_group",
2931 );
2932 }
2933 }
2934
2935 fn set_immediates(&mut self, offset: u32, data: &[u8]) {
2936 if let Err(cause) = self
2937 .context
2938 .0
2939 .compute_pass_set_immediates(&mut self.pass, offset, data)
2940 {
2941 self.context.handle_error(
2942 &self.error_sink,
2943 cause,
2944 self.pass.label(),
2945 "ComputePass::set_immediates",
2946 );
2947 }
2948 }
2949
2950 fn insert_debug_marker(&mut self, label: &str) {
2951 if let Err(cause) =
2952 self.context
2953 .0
2954 .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
2955 {
2956 self.context.handle_error(
2957 &self.error_sink,
2958 cause,
2959 self.pass.label(),
2960 "ComputePass::insert_debug_marker",
2961 );
2962 }
2963 }
2964
2965 fn push_debug_group(&mut self, group_label: &str) {
2966 if let Err(cause) =
2967 self.context
2968 .0
2969 .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
2970 {
2971 self.context.handle_error(
2972 &self.error_sink,
2973 cause,
2974 self.pass.label(),
2975 "ComputePass::push_debug_group",
2976 );
2977 }
2978 }
2979
2980 fn pop_debug_group(&mut self) {
2981 if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
2982 self.context.handle_error(
2983 &self.error_sink,
2984 cause,
2985 self.pass.label(),
2986 "ComputePass::pop_debug_group",
2987 );
2988 }
2989 }
2990
2991 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2992 let query_set = query_set.as_core();
2993
2994 if let Err(cause) =
2995 self.context
2996 .0
2997 .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
2998 {
2999 self.context.handle_error(
3000 &self.error_sink,
3001 cause,
3002 self.pass.label(),
3003 "ComputePass::write_timestamp",
3004 );
3005 }
3006 }
3007
3008 fn begin_pipeline_statistics_query(
3009 &mut self,
3010 query_set: &dispatch::DispatchQuerySet,
3011 query_index: u32,
3012 ) {
3013 let query_set = query_set.as_core();
3014
3015 if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
3016 &mut self.pass,
3017 query_set.id,
3018 query_index,
3019 ) {
3020 self.context.handle_error(
3021 &self.error_sink,
3022 cause,
3023 self.pass.label(),
3024 "ComputePass::begin_pipeline_statistics_query",
3025 );
3026 }
3027 }
3028
3029 fn end_pipeline_statistics_query(&mut self) {
3030 if let Err(cause) = self
3031 .context
3032 .0
3033 .compute_pass_end_pipeline_statistics_query(&mut self.pass)
3034 {
3035 self.context.handle_error(
3036 &self.error_sink,
3037 cause,
3038 self.pass.label(),
3039 "ComputePass::end_pipeline_statistics_query",
3040 );
3041 }
3042 }
3043
3044 fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
3045 if let Err(cause) = self
3046 .context
3047 .0
3048 .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
3049 {
3050 self.context.handle_error(
3051 &self.error_sink,
3052 cause,
3053 self.pass.label(),
3054 "ComputePass::dispatch_workgroups",
3055 );
3056 }
3057 }
3058
3059 fn dispatch_workgroups_indirect(
3060 &mut self,
3061 indirect_buffer: &dispatch::DispatchBuffer,
3062 indirect_offset: crate::BufferAddress,
3063 ) {
3064 let indirect_buffer = indirect_buffer.as_core();
3065
3066 if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
3067 &mut self.pass,
3068 indirect_buffer.id,
3069 indirect_offset,
3070 ) {
3071 self.context.handle_error(
3072 &self.error_sink,
3073 cause,
3074 self.pass.label(),
3075 "ComputePass::dispatch_workgroups_indirect",
3076 );
3077 }
3078 }
3079
3080 fn end(&mut self) {
3081 if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
3082 self.context.handle_error(
3083 &self.error_sink,
3084 cause,
3085 self.pass.label(),
3086 "ComputePass::end",
3087 );
3088 }
3089 }
3090}
3091
3092impl Drop for CoreComputePass {
3093 fn drop(&mut self) {
3094 dispatch::ComputePassInterface::end(self);
3095 }
3096}
3097
3098impl dispatch::RenderPassInterface for CoreRenderPass {
3099 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3100 let pipeline = pipeline.as_core();
3101
3102 if let Err(cause) = self
3103 .context
3104 .0
3105 .render_pass_set_pipeline(&mut self.pass, pipeline.id)
3106 {
3107 self.context.handle_error(
3108 &self.error_sink,
3109 cause,
3110 self.pass.label(),
3111 "RenderPass::set_pipeline",
3112 );
3113 }
3114 }
3115
3116 fn set_bind_group(
3117 &mut self,
3118 index: u32,
3119 bind_group: Option<&dispatch::DispatchBindGroup>,
3120 offsets: &[crate::DynamicOffset],
3121 ) {
3122 let bg = bind_group.map(|bg| bg.as_core().id);
3123
3124 if let Err(cause) =
3125 self.context
3126 .0
3127 .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
3128 {
3129 self.context.handle_error(
3130 &self.error_sink,
3131 cause,
3132 self.pass.label(),
3133 "RenderPass::set_bind_group",
3134 );
3135 }
3136 }
3137
3138 fn set_index_buffer(
3139 &mut self,
3140 buffer: &dispatch::DispatchBuffer,
3141 index_format: crate::IndexFormat,
3142 offset: crate::BufferAddress,
3143 size: Option<crate::BufferSize>,
3144 ) {
3145 let buffer = buffer.as_core();
3146
3147 if let Err(cause) = self.context.0.render_pass_set_index_buffer(
3148 &mut self.pass,
3149 buffer.id,
3150 index_format,
3151 offset,
3152 size,
3153 ) {
3154 self.context.handle_error(
3155 &self.error_sink,
3156 cause,
3157 self.pass.label(),
3158 "RenderPass::set_index_buffer",
3159 );
3160 }
3161 }
3162
3163 fn set_vertex_buffer(
3164 &mut self,
3165 slot: u32,
3166 buffer: &dispatch::DispatchBuffer,
3167 offset: crate::BufferAddress,
3168 size: Option<crate::BufferSize>,
3169 ) {
3170 let buffer = buffer.as_core();
3171
3172 if let Err(cause) = self.context.0.render_pass_set_vertex_buffer(
3173 &mut self.pass,
3174 slot,
3175 buffer.id,
3176 offset,
3177 size,
3178 ) {
3179 self.context.handle_error(
3180 &self.error_sink,
3181 cause,
3182 self.pass.label(),
3183 "RenderPass::set_vertex_buffer",
3184 );
3185 }
3186 }
3187
3188 fn set_immediates(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
3189 if let Err(cause) =
3190 self.context
3191 .0
3192 .render_pass_set_immediates(&mut self.pass, stages, offset, data)
3193 {
3194 self.context.handle_error(
3195 &self.error_sink,
3196 cause,
3197 self.pass.label(),
3198 "RenderPass::set_immediates",
3199 );
3200 }
3201 }
3202
3203 fn set_blend_constant(&mut self, color: crate::Color) {
3204 if let Err(cause) = self
3205 .context
3206 .0
3207 .render_pass_set_blend_constant(&mut self.pass, color)
3208 {
3209 self.context.handle_error(
3210 &self.error_sink,
3211 cause,
3212 self.pass.label(),
3213 "RenderPass::set_blend_constant",
3214 );
3215 }
3216 }
3217
3218 fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
3219 if let Err(cause) =
3220 self.context
3221 .0
3222 .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
3223 {
3224 self.context.handle_error(
3225 &self.error_sink,
3226 cause,
3227 self.pass.label(),
3228 "RenderPass::set_scissor_rect",
3229 );
3230 }
3231 }
3232
3233 fn set_viewport(
3234 &mut self,
3235 x: f32,
3236 y: f32,
3237 width: f32,
3238 height: f32,
3239 min_depth: f32,
3240 max_depth: f32,
3241 ) {
3242 if let Err(cause) = self.context.0.render_pass_set_viewport(
3243 &mut self.pass,
3244 x,
3245 y,
3246 width,
3247 height,
3248 min_depth,
3249 max_depth,
3250 ) {
3251 self.context.handle_error(
3252 &self.error_sink,
3253 cause,
3254 self.pass.label(),
3255 "RenderPass::set_viewport",
3256 );
3257 }
3258 }
3259
3260 fn set_stencil_reference(&mut self, reference: u32) {
3261 if let Err(cause) = self
3262 .context
3263 .0
3264 .render_pass_set_stencil_reference(&mut self.pass, reference)
3265 {
3266 self.context.handle_error(
3267 &self.error_sink,
3268 cause,
3269 self.pass.label(),
3270 "RenderPass::set_stencil_reference",
3271 );
3272 }
3273 }
3274
3275 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3276 if let Err(cause) = self.context.0.render_pass_draw(
3277 &mut self.pass,
3278 vertices.end - vertices.start,
3279 instances.end - instances.start,
3280 vertices.start,
3281 instances.start,
3282 ) {
3283 self.context.handle_error(
3284 &self.error_sink,
3285 cause,
3286 self.pass.label(),
3287 "RenderPass::draw",
3288 );
3289 }
3290 }
3291
3292 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3293 if let Err(cause) = self.context.0.render_pass_draw_indexed(
3294 &mut self.pass,
3295 indices.end - indices.start,
3296 instances.end - instances.start,
3297 indices.start,
3298 base_vertex,
3299 instances.start,
3300 ) {
3301 self.context.handle_error(
3302 &self.error_sink,
3303 cause,
3304 self.pass.label(),
3305 "RenderPass::draw_indexed",
3306 );
3307 }
3308 }
3309
3310 fn draw_mesh_tasks(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
3311 if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks(
3312 &mut self.pass,
3313 group_count_x,
3314 group_count_y,
3315 group_count_z,
3316 ) {
3317 self.context.handle_error(
3318 &self.error_sink,
3319 cause,
3320 self.pass.label(),
3321 "RenderPass::draw_mesh_tasks",
3322 );
3323 }
3324 }
3325
3326 fn draw_indirect(
3327 &mut self,
3328 indirect_buffer: &dispatch::DispatchBuffer,
3329 indirect_offset: crate::BufferAddress,
3330 ) {
3331 let indirect_buffer = indirect_buffer.as_core();
3332
3333 if let Err(cause) = self.context.0.render_pass_draw_indirect(
3334 &mut self.pass,
3335 indirect_buffer.id,
3336 indirect_offset,
3337 ) {
3338 self.context.handle_error(
3339 &self.error_sink,
3340 cause,
3341 self.pass.label(),
3342 "RenderPass::draw_indirect",
3343 );
3344 }
3345 }
3346
3347 fn draw_indexed_indirect(
3348 &mut self,
3349 indirect_buffer: &dispatch::DispatchBuffer,
3350 indirect_offset: crate::BufferAddress,
3351 ) {
3352 let indirect_buffer = indirect_buffer.as_core();
3353
3354 if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3355 &mut self.pass,
3356 indirect_buffer.id,
3357 indirect_offset,
3358 ) {
3359 self.context.handle_error(
3360 &self.error_sink,
3361 cause,
3362 self.pass.label(),
3363 "RenderPass::draw_indexed_indirect",
3364 );
3365 }
3366 }
3367
3368 fn draw_mesh_tasks_indirect(
3369 &mut self,
3370 indirect_buffer: &dispatch::DispatchBuffer,
3371 indirect_offset: crate::BufferAddress,
3372 ) {
3373 let indirect_buffer = indirect_buffer.as_core();
3374
3375 if let Err(cause) = self.context.0.render_pass_draw_mesh_tasks_indirect(
3376 &mut self.pass,
3377 indirect_buffer.id,
3378 indirect_offset,
3379 ) {
3380 self.context.handle_error(
3381 &self.error_sink,
3382 cause,
3383 self.pass.label(),
3384 "RenderPass::draw_mesh_tasks_indirect",
3385 );
3386 }
3387 }
3388
3389 fn multi_draw_indirect(
3390 &mut self,
3391 indirect_buffer: &dispatch::DispatchBuffer,
3392 indirect_offset: crate::BufferAddress,
3393 count: u32,
3394 ) {
3395 let indirect_buffer = indirect_buffer.as_core();
3396
3397 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3398 &mut self.pass,
3399 indirect_buffer.id,
3400 indirect_offset,
3401 count,
3402 ) {
3403 self.context.handle_error(
3404 &self.error_sink,
3405 cause,
3406 self.pass.label(),
3407 "RenderPass::multi_draw_indirect",
3408 );
3409 }
3410 }
3411
3412 fn multi_draw_indexed_indirect(
3413 &mut self,
3414 indirect_buffer: &dispatch::DispatchBuffer,
3415 indirect_offset: crate::BufferAddress,
3416 count: u32,
3417 ) {
3418 let indirect_buffer = indirect_buffer.as_core();
3419
3420 if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3421 &mut self.pass,
3422 indirect_buffer.id,
3423 indirect_offset,
3424 count,
3425 ) {
3426 self.context.handle_error(
3427 &self.error_sink,
3428 cause,
3429 self.pass.label(),
3430 "RenderPass::multi_draw_indexed_indirect",
3431 );
3432 }
3433 }
3434
3435 fn multi_draw_mesh_tasks_indirect(
3436 &mut self,
3437 indirect_buffer: &dispatch::DispatchBuffer,
3438 indirect_offset: crate::BufferAddress,
3439 count: u32,
3440 ) {
3441 let indirect_buffer = indirect_buffer.as_core();
3442
3443 if let Err(cause) = self.context.0.render_pass_multi_draw_mesh_tasks_indirect(
3444 &mut self.pass,
3445 indirect_buffer.id,
3446 indirect_offset,
3447 count,
3448 ) {
3449 self.context.handle_error(
3450 &self.error_sink,
3451 cause,
3452 self.pass.label(),
3453 "RenderPass::multi_draw_mesh_tasks_indirect",
3454 );
3455 }
3456 }
3457
3458 fn multi_draw_indirect_count(
3459 &mut self,
3460 indirect_buffer: &dispatch::DispatchBuffer,
3461 indirect_offset: crate::BufferAddress,
3462 count_buffer: &dispatch::DispatchBuffer,
3463 count_buffer_offset: crate::BufferAddress,
3464 max_count: u32,
3465 ) {
3466 let indirect_buffer = indirect_buffer.as_core();
3467 let count_buffer = count_buffer.as_core();
3468
3469 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3470 &mut self.pass,
3471 indirect_buffer.id,
3472 indirect_offset,
3473 count_buffer.id,
3474 count_buffer_offset,
3475 max_count,
3476 ) {
3477 self.context.handle_error(
3478 &self.error_sink,
3479 cause,
3480 self.pass.label(),
3481 "RenderPass::multi_draw_indirect_count",
3482 );
3483 }
3484 }
3485
3486 fn multi_draw_indexed_indirect_count(
3487 &mut self,
3488 indirect_buffer: &dispatch::DispatchBuffer,
3489 indirect_offset: crate::BufferAddress,
3490 count_buffer: &dispatch::DispatchBuffer,
3491 count_buffer_offset: crate::BufferAddress,
3492 max_count: u32,
3493 ) {
3494 let indirect_buffer = indirect_buffer.as_core();
3495 let count_buffer = count_buffer.as_core();
3496
3497 if let Err(cause) = self
3498 .context
3499 .0
3500 .render_pass_multi_draw_indexed_indirect_count(
3501 &mut self.pass,
3502 indirect_buffer.id,
3503 indirect_offset,
3504 count_buffer.id,
3505 count_buffer_offset,
3506 max_count,
3507 )
3508 {
3509 self.context.handle_error(
3510 &self.error_sink,
3511 cause,
3512 self.pass.label(),
3513 "RenderPass::multi_draw_indexed_indirect_count",
3514 );
3515 }
3516 }
3517
3518 fn multi_draw_mesh_tasks_indirect_count(
3519 &mut self,
3520 indirect_buffer: &dispatch::DispatchBuffer,
3521 indirect_offset: crate::BufferAddress,
3522 count_buffer: &dispatch::DispatchBuffer,
3523 count_buffer_offset: crate::BufferAddress,
3524 max_count: u32,
3525 ) {
3526 let indirect_buffer = indirect_buffer.as_core();
3527 let count_buffer = count_buffer.as_core();
3528
3529 if let Err(cause) = self
3530 .context
3531 .0
3532 .render_pass_multi_draw_mesh_tasks_indirect_count(
3533 &mut self.pass,
3534 indirect_buffer.id,
3535 indirect_offset,
3536 count_buffer.id,
3537 count_buffer_offset,
3538 max_count,
3539 )
3540 {
3541 self.context.handle_error(
3542 &self.error_sink,
3543 cause,
3544 self.pass.label(),
3545 "RenderPass::multi_draw_mesh_tasks_indirect_count",
3546 );
3547 }
3548 }
3549
3550 fn insert_debug_marker(&mut self, label: &str) {
3551 if let Err(cause) = self
3552 .context
3553 .0
3554 .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3555 {
3556 self.context.handle_error(
3557 &self.error_sink,
3558 cause,
3559 self.pass.label(),
3560 "RenderPass::insert_debug_marker",
3561 );
3562 }
3563 }
3564
3565 fn push_debug_group(&mut self, group_label: &str) {
3566 if let Err(cause) =
3567 self.context
3568 .0
3569 .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3570 {
3571 self.context.handle_error(
3572 &self.error_sink,
3573 cause,
3574 self.pass.label(),
3575 "RenderPass::push_debug_group",
3576 );
3577 }
3578 }
3579
3580 fn pop_debug_group(&mut self) {
3581 if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3582 self.context.handle_error(
3583 &self.error_sink,
3584 cause,
3585 self.pass.label(),
3586 "RenderPass::pop_debug_group",
3587 );
3588 }
3589 }
3590
3591 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3592 let query_set = query_set.as_core();
3593
3594 if let Err(cause) =
3595 self.context
3596 .0
3597 .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3598 {
3599 self.context.handle_error(
3600 &self.error_sink,
3601 cause,
3602 self.pass.label(),
3603 "RenderPass::write_timestamp",
3604 );
3605 }
3606 }
3607
3608 fn begin_occlusion_query(&mut self, query_index: u32) {
3609 if let Err(cause) = self
3610 .context
3611 .0
3612 .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3613 {
3614 self.context.handle_error(
3615 &self.error_sink,
3616 cause,
3617 self.pass.label(),
3618 "RenderPass::begin_occlusion_query",
3619 );
3620 }
3621 }
3622
3623 fn end_occlusion_query(&mut self) {
3624 if let Err(cause) = self
3625 .context
3626 .0
3627 .render_pass_end_occlusion_query(&mut self.pass)
3628 {
3629 self.context.handle_error(
3630 &self.error_sink,
3631 cause,
3632 self.pass.label(),
3633 "RenderPass::end_occlusion_query",
3634 );
3635 }
3636 }
3637
3638 fn begin_pipeline_statistics_query(
3639 &mut self,
3640 query_set: &dispatch::DispatchQuerySet,
3641 query_index: u32,
3642 ) {
3643 let query_set = query_set.as_core();
3644
3645 if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3646 &mut self.pass,
3647 query_set.id,
3648 query_index,
3649 ) {
3650 self.context.handle_error(
3651 &self.error_sink,
3652 cause,
3653 self.pass.label(),
3654 "RenderPass::begin_pipeline_statistics_query",
3655 );
3656 }
3657 }
3658
3659 fn end_pipeline_statistics_query(&mut self) {
3660 if let Err(cause) = self
3661 .context
3662 .0
3663 .render_pass_end_pipeline_statistics_query(&mut self.pass)
3664 {
3665 self.context.handle_error(
3666 &self.error_sink,
3667 cause,
3668 self.pass.label(),
3669 "RenderPass::end_pipeline_statistics_query",
3670 );
3671 }
3672 }
3673
3674 fn execute_bundles(
3675 &mut self,
3676 render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3677 ) {
3678 let temp_render_bundles = render_bundles
3679 .map(|rb| rb.as_core().id)
3680 .collect::<SmallVec<[_; 4]>>();
3681 if let Err(cause) = self
3682 .context
3683 .0
3684 .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3685 {
3686 self.context.handle_error(
3687 &self.error_sink,
3688 cause,
3689 self.pass.label(),
3690 "RenderPass::execute_bundles",
3691 );
3692 }
3693 }
3694
3695 fn end(&mut self) {
3696 if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3697 self.context.handle_error(
3698 &self.error_sink,
3699 cause,
3700 self.pass.label(),
3701 "RenderPass::end",
3702 );
3703 }
3704 }
3705}
3706
3707impl Drop for CoreRenderPass {
3708 fn drop(&mut self) {
3709 dispatch::RenderPassInterface::end(self);
3710 }
3711}
3712
3713impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3714 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3715 let pipeline = pipeline.as_core();
3716
3717 wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3718 }
3719
3720 fn set_bind_group(
3721 &mut self,
3722 index: u32,
3723 bind_group: Option<&dispatch::DispatchBindGroup>,
3724 offsets: &[crate::DynamicOffset],
3725 ) {
3726 let bg = bind_group.map(|bg| bg.as_core().id);
3727
3728 unsafe {
3729 wgpu_render_bundle_set_bind_group(
3730 &mut self.encoder,
3731 index,
3732 bg,
3733 offsets.as_ptr(),
3734 offsets.len(),
3735 )
3736 }
3737 }
3738
3739 fn set_index_buffer(
3740 &mut self,
3741 buffer: &dispatch::DispatchBuffer,
3742 index_format: crate::IndexFormat,
3743 offset: crate::BufferAddress,
3744 size: Option<crate::BufferSize>,
3745 ) {
3746 let buffer = buffer.as_core();
3747
3748 self.encoder
3749 .set_index_buffer(buffer.id, index_format, offset, size)
3750 }
3751
3752 fn set_vertex_buffer(
3753 &mut self,
3754 slot: u32,
3755 buffer: &dispatch::DispatchBuffer,
3756 offset: crate::BufferAddress,
3757 size: Option<crate::BufferSize>,
3758 ) {
3759 let buffer = buffer.as_core();
3760
3761 wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer.id, offset, size)
3762 }
3763
3764 fn set_immediates(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
3765 unsafe {
3766 wgpu_render_bundle_set_immediates(
3767 &mut self.encoder,
3768 stages,
3769 offset,
3770 data.len().try_into().unwrap(),
3771 data.as_ptr(),
3772 )
3773 }
3774 }
3775
3776 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3777 wgpu_render_bundle_draw(
3778 &mut self.encoder,
3779 vertices.end - vertices.start,
3780 instances.end - instances.start,
3781 vertices.start,
3782 instances.start,
3783 )
3784 }
3785
3786 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3787 wgpu_render_bundle_draw_indexed(
3788 &mut self.encoder,
3789 indices.end - indices.start,
3790 instances.end - instances.start,
3791 indices.start,
3792 base_vertex,
3793 instances.start,
3794 )
3795 }
3796
3797 fn draw_indirect(
3798 &mut self,
3799 indirect_buffer: &dispatch::DispatchBuffer,
3800 indirect_offset: crate::BufferAddress,
3801 ) {
3802 let indirect_buffer = indirect_buffer.as_core();
3803
3804 wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3805 }
3806
3807 fn draw_indexed_indirect(
3808 &mut self,
3809 indirect_buffer: &dispatch::DispatchBuffer,
3810 indirect_offset: crate::BufferAddress,
3811 ) {
3812 let indirect_buffer = indirect_buffer.as_core();
3813
3814 wgpu_render_bundle_draw_indexed_indirect(
3815 &mut self.encoder,
3816 indirect_buffer.id,
3817 indirect_offset,
3818 )
3819 }
3820
3821 fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3822 where
3823 Self: Sized,
3824 {
3825 let (id, error) = self.context.0.render_bundle_encoder_finish(
3826 self.encoder,
3827 &desc.map_label(|l| l.map(Borrowed)),
3828 None,
3829 );
3830 if let Some(err) = error {
3831 self.context
3832 .handle_error_fatal(err, "RenderBundleEncoder::finish");
3833 }
3834 CoreRenderBundle {
3835 context: self.context.clone(),
3836 id,
3837 }
3838 .into()
3839 }
3840}
3841
3842impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3843
3844impl Drop for CoreRenderBundle {
3845 fn drop(&mut self) {
3846 self.context.0.render_bundle_drop(self.id)
3847 }
3848}
3849
3850impl dispatch::SurfaceInterface for CoreSurface {
3851 fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3852 let adapter = adapter.as_core();
3853
3854 self.context
3855 .0
3856 .surface_get_capabilities(self.id, adapter.id)
3857 .unwrap_or_default()
3858 }
3859
3860 fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3861 let device = device.as_core();
3862
3863 let error = self.context.0.surface_configure(self.id, device.id, config);
3864 if let Some(e) = error {
3865 self.context
3866 .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3867 } else {
3868 *self.configured_device.lock() = Some(device.id);
3869 *self.error_sink.lock() = Some(device.error_sink.clone());
3870 }
3871 }
3872
3873 fn get_current_texture(
3874 &self,
3875 ) -> (
3876 Option<dispatch::DispatchTexture>,
3877 crate::SurfaceStatus,
3878 dispatch::DispatchSurfaceOutputDetail,
3879 ) {
3880 let error_sink = if let Some(error_sink) = self.error_sink.lock().as_ref() {
3881 error_sink.clone()
3882 } else {
3883 Arc::new(Mutex::new(ErrorSinkRaw::new()))
3884 };
3885
3886 let output_detail = CoreSurfaceOutputDetail {
3887 context: self.context.clone(),
3888 surface_id: self.id,
3889 error_sink: error_sink.clone(),
3890 }
3891 .into();
3892
3893 match self.context.0.surface_get_current_texture(self.id, None) {
3894 Ok(wgc::present::SurfaceOutput {
3895 status,
3896 texture: texture_id,
3897 }) => {
3898 let data = texture_id
3899 .map(|id| CoreTexture {
3900 context: self.context.clone(),
3901 id,
3902 error_sink,
3903 })
3904 .map(Into::into);
3905
3906 (data, status, output_detail)
3907 }
3908 Err(err) => {
3909 let error_sink = self.error_sink.lock();
3910 match error_sink.as_ref() {
3911 Some(error_sink) => {
3912 self.context.handle_error_nolabel(
3913 error_sink,
3914 err,
3915 "Surface::get_current_texture_view",
3916 );
3917 (None, crate::SurfaceStatus::Unknown, output_detail)
3918 }
3919 None => self
3920 .context
3921 .handle_error_fatal(err, "Surface::get_current_texture_view"),
3922 }
3923 }
3924 }
3925 }
3926}
3927
3928impl Drop for CoreSurface {
3929 fn drop(&mut self) {
3930 self.context.0.surface_drop(self.id)
3931 }
3932}
3933
3934impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
3935 fn present(&self) {
3936 match self.context.0.surface_present(self.surface_id) {
3937 Ok(_status) => (),
3938 Err(err) => {
3939 self.context
3940 .handle_error_nolabel(&self.error_sink, err, "Surface::present");
3941 }
3942 }
3943 }
3944
3945 fn texture_discard(&self) {
3946 match self.context.0.surface_texture_discard(self.surface_id) {
3947 Ok(_status) => (),
3948 Err(err) => self
3949 .context
3950 .handle_error_fatal(err, "Surface::discard_texture"),
3951 }
3952 }
3953}
3954impl Drop for CoreSurfaceOutputDetail {
3955 fn drop(&mut self) {
3956 }
3960}
3961
3962impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
3963 fn slice(&self) -> &[u8] {
3964 panic!()
3965 }
3966
3967 #[inline]
3968 fn slice_mut(&mut self) -> &mut [u8] {
3969 self.mapping.slice_mut()
3970 }
3971}
3972impl Drop for CoreQueueWriteBuffer {
3973 fn drop(&mut self) {
3974 }
3978}
3979
3980impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
3981 #[inline]
3982 fn slice(&self) -> &[u8] {
3983 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
3984 }
3985
3986 #[inline]
3987 fn slice_mut(&mut self) -> &mut [u8] {
3988 unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.size) }
3989 }
3990
3991 #[cfg(webgpu)]
3992 fn as_uint8array(&self) -> &js_sys::Uint8Array {
3993 panic!("Only available on WebGPU")
3994 }
3995}