wgpu_hal/noop/
buffer.rs

1use alloc::vec::Vec;
2use core::{cell::UnsafeCell, ops::Range, ptr};
3
4cfg_if::cfg_if! {
5    if #[cfg(supports_ptr_atomics)] {
6        use alloc::sync::Arc;
7    } else if #[cfg(feature = "portable-atomic")] {
8        use portable_atomic_util::Arc;
9    }
10}
11
12#[derive(Clone, Debug)]
13pub struct Buffer {
14    /// This data is potentially accessed mutably in arbitrary non-overlapping slices,
15    /// so we must store it in `UnsafeCell` to avoid making any too-strong no-aliasing claims.
16    storage: Arc<UnsafeCell<[u8]>>,
17
18    /// Size of the allocation.
19    ///
20    /// This is redundant with `storage.get().len()`, but that method is not
21    /// available until our MSRV is 1.79 or greater.
22    size: usize,
23}
24
25/// SAFETY:
26/// This shared mutable data will not be accessed in a way which causes data races;
27/// the obligation to do so is on the caller of the HAL API.
28/// For safe code, `wgpu-core` validation manages appropriate access.
29unsafe impl Send for Buffer {}
30unsafe impl Sync for Buffer {}
31
32impl Buffer {
33    pub(super) fn new(desc: &crate::BufferDescriptor) -> Result<Self, crate::DeviceError> {
34        let &crate::BufferDescriptor {
35            label: _,
36            size,
37            usage: _,
38            memory_flags: _,
39        } = desc;
40
41        let size = usize::try_from(size).map_err(|_| crate::DeviceError::OutOfMemory)?;
42
43        let mut vector: Vec<u8> = Vec::new();
44        vector
45            .try_reserve_exact(size)
46            .map_err(|_| crate::DeviceError::OutOfMemory)?;
47        vector.resize(size, 0);
48        let storage: Arc<[u8]> = Arc::from(vector);
49        debug_assert_eq!(storage.len(), size);
50
51        // SAFETY: `UnsafeCell<[u8]>` and `[u8]` have the same layout.
52        // This is just adding a wrapper type without changing any layout,
53        // because there is not currently a safe language/`std` way to accomplish this.
54        let storage: Arc<UnsafeCell<[u8]>> =
55            unsafe { Arc::from_raw(Arc::into_raw(storage) as *mut UnsafeCell<[u8]>) };
56
57        Ok(Buffer { storage, size })
58    }
59
60    /// Returns a pointer to the memory owned by this buffer within the given `range`.
61    ///
62    /// This may be used to create any number of simultaneous pointers;
63    /// aliasing is only a concern when actually reading, writing, or converting the pointer
64    /// to a reference.
65    pub(super) fn get_slice_ptr(&self, range: crate::MemoryRange) -> *mut [u8] {
66        let base_ptr = self.storage.get();
67        let range = range_to_usize(range, self.size);
68
69        // We must obtain a slice pointer without ever creating a slice reference
70        // that could alias with another slice.
71        ptr::slice_from_raw_parts_mut(
72            // SAFETY: `range_to_usize` bounds checks this addition.
73            unsafe { base_ptr.cast::<u8>().add(range.start) },
74            range.len(),
75        )
76    }
77}
78
79/// Convert a [`crate::MemoryRange`] to `Range<usize>` and bounds check it.
80fn range_to_usize(range: crate::MemoryRange, upper_bound: usize) -> Range<usize> {
81    // Note: these assertions should be impossible to trigger from safe code.
82    // We're doing them anyway since this entire backend is for testing
83    // (except for when it is an unused placeholder)
84    let start = usize::try_from(range.start).expect("range too large");
85    let end = usize::try_from(range.end).expect("range too large");
86    assert!(start <= end && end <= upper_bound, "range out of bounds");
87    start..end
88}