wgpu_hal/vulkan/
sampler.rs

1//! Sampler cache for Vulkan backend.
2//!
3//! Nearly identical to the DX12 sampler cache, without descriptor heap management.
4
5use ash::vk;
6use hashbrown::{hash_map::Entry, HashMap};
7use ordered_float::OrderedFloat;
8
9/// If the allowed sampler count is above this value, the sampler cache is disabled.
10const ENABLE_SAMPLER_CACHE_CUTOFF: u32 = 1 << 20;
11
12/// [`vk::SamplerCreateInfo`] is not hashable, so we wrap it in a newtype that is.
13///
14/// We use [`OrderedFloat`] to allow for floating point values to be compared and
15/// hashed in a defined way.
16#[derive(Copy, Clone)]
17struct HashableSamplerCreateInfo(vk::SamplerCreateInfo<'static>);
18
19impl PartialEq for HashableSamplerCreateInfo {
20    fn eq(&self, other: &Self) -> bool {
21        self.0.flags == other.0.flags
22            && self.0.mag_filter == other.0.mag_filter
23            && self.0.min_filter == other.0.min_filter
24            && self.0.mipmap_mode == other.0.mipmap_mode
25            && self.0.address_mode_u == other.0.address_mode_u
26            && self.0.address_mode_v == other.0.address_mode_v
27            && self.0.address_mode_w == other.0.address_mode_w
28            && OrderedFloat(self.0.mip_lod_bias) == OrderedFloat(other.0.mip_lod_bias)
29            && self.0.anisotropy_enable == other.0.anisotropy_enable
30            && OrderedFloat(self.0.max_anisotropy) == OrderedFloat(other.0.max_anisotropy)
31            && self.0.compare_enable == other.0.compare_enable
32            && self.0.compare_op == other.0.compare_op
33            && OrderedFloat(self.0.min_lod) == OrderedFloat(other.0.min_lod)
34            && OrderedFloat(self.0.max_lod) == OrderedFloat(other.0.max_lod)
35            && self.0.border_color == other.0.border_color
36            && self.0.unnormalized_coordinates == other.0.unnormalized_coordinates
37    }
38}
39
40impl Eq for HashableSamplerCreateInfo {}
41
42impl core::hash::Hash for HashableSamplerCreateInfo {
43    fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
44        self.0.flags.hash(state);
45        self.0.mag_filter.hash(state);
46        self.0.min_filter.hash(state);
47        self.0.mipmap_mode.hash(state);
48        self.0.address_mode_u.hash(state);
49        self.0.address_mode_v.hash(state);
50        self.0.address_mode_w.hash(state);
51        OrderedFloat(self.0.mip_lod_bias).hash(state);
52        self.0.anisotropy_enable.hash(state);
53        OrderedFloat(self.0.max_anisotropy).hash(state);
54        self.0.compare_enable.hash(state);
55        self.0.compare_op.hash(state);
56        OrderedFloat(self.0.min_lod).hash(state);
57        OrderedFloat(self.0.max_lod).hash(state);
58        self.0.border_color.hash(state);
59        self.0.unnormalized_coordinates.hash(state);
60    }
61}
62
63/// Entry in the sampler cache.
64struct CacheEntry {
65    sampler: vk::Sampler,
66    ref_count: u32,
67}
68
69/// Global sampler cache.
70///
71/// As some devices have a low limit (4000) on the number of unique samplers that can be created,
72/// we need to cache samplers to avoid running out if people eagerly create duplicate samplers.
73pub(crate) struct SamplerCache {
74    /// Mapping from the sampler description to sampler and reference count.
75    samplers: HashMap<HashableSamplerCreateInfo, CacheEntry>,
76    /// Maximum number of unique samplers that can be created.
77    total_capacity: u32,
78    /// If true, the sampler cache is disabled and all samplers are created on demand.
79    passthrough: bool,
80}
81
82impl SamplerCache {
83    pub fn new(total_capacity: u32) -> Self {
84        let passthrough = total_capacity >= ENABLE_SAMPLER_CACHE_CUTOFF;
85        Self {
86            samplers: HashMap::new(),
87            total_capacity,
88            passthrough,
89        }
90    }
91
92    /// Create a sampler, or return an existing one if it already exists.
93    ///
94    /// If the sampler already exists, the reference count is incremented.
95    ///
96    /// If the sampler does not exist, a new sampler is created and inserted into the cache.
97    ///
98    /// If the cache is full, an error is returned.
99    pub fn create_sampler(
100        &mut self,
101        device: &ash::Device,
102        create_info: vk::SamplerCreateInfo<'static>,
103    ) -> Result<vk::Sampler, crate::DeviceError> {
104        if self.passthrough {
105            return unsafe { device.create_sampler(&create_info, None) }
106                .map_err(super::map_host_device_oom_and_ioca_err);
107        };
108
109        // Get the number of used samplers. Needs to be done before to appease the borrow checker.
110        let used_samplers = self.samplers.len();
111
112        match self.samplers.entry(HashableSamplerCreateInfo(create_info)) {
113            Entry::Occupied(occupied_entry) => {
114                // We have found a match, so increment the refcount and return the index.
115                let value = occupied_entry.into_mut();
116                value.ref_count += 1;
117                Ok(value.sampler)
118            }
119            Entry::Vacant(vacant_entry) => {
120                // We need to create a new sampler.
121
122                // We need to check if we can create more samplers.
123                if used_samplers >= self.total_capacity as usize {
124                    log::error!("There is no more room in the global sampler heap for more unique samplers. Your device supports a maximum of {} unique samplers.", self.samplers.len());
125                    return Err(crate::DeviceError::OutOfMemory);
126                }
127
128                // Create the sampler.
129                let sampler = unsafe { device.create_sampler(&create_info, None) }
130                    .map_err(super::map_host_device_oom_and_ioca_err)?;
131
132                // Insert the new sampler into the mapping.
133                vacant_entry.insert(CacheEntry {
134                    sampler,
135                    ref_count: 1,
136                });
137
138                Ok(sampler)
139            }
140        }
141    }
142
143    /// Decrease the reference count of a sampler and destroy it if the reference count reaches 0.
144    ///
145    /// The provided sampler is checked against the sampler in the cache to ensure there is no clerical error.
146    pub fn destroy_sampler(
147        &mut self,
148        device: &ash::Device,
149        create_info: vk::SamplerCreateInfo<'static>,
150        provided_sampler: vk::Sampler,
151    ) {
152        if self.passthrough {
153            unsafe { device.destroy_sampler(provided_sampler, None) };
154            return;
155        };
156
157        let Entry::Occupied(mut hash_map_entry) =
158            self.samplers.entry(HashableSamplerCreateInfo(create_info))
159        else {
160            log::error!("Trying to destroy a sampler that does not exist.");
161            return;
162        };
163        let cache_entry = hash_map_entry.get_mut();
164
165        assert_eq!(
166            cache_entry.sampler, provided_sampler,
167            "Provided sampler does not match the sampler in the cache."
168        );
169
170        cache_entry.ref_count -= 1;
171
172        if cache_entry.ref_count == 0 {
173            unsafe { device.destroy_sampler(cache_entry.sampler, None) };
174            hash_map_entry.remove();
175        }
176    }
177}