1//! Sampler cache for Vulkan backend.
2//!
3//! Nearly identical to the DX12 sampler cache, without descriptor heap management.
45use ash::vk;
6use hashbrown::{hash_map::Entry, HashMap};
7use ordered_float::OrderedFloat;
89/// If the allowed sampler count is above this value, the sampler cache is disabled.
10const ENABLE_SAMPLER_CACHE_CUTOFF: u32 = 1 << 20;
1112/// [`vk::SamplerCreateInfo`] is not hashable, so we wrap it in a newtype that is.
13///
14/// We use [`OrderedFloat`] to allow for floating point values to be compared and
15/// hashed in a defined way.
16#[derive(Copy, Clone)]
17struct HashableSamplerCreateInfo(vk::SamplerCreateInfo<'static>);
1819impl PartialEq for HashableSamplerCreateInfo {
20fn eq(&self, other: &Self) -> bool {
21self.0.flags == other.0.flags
22 && self.0.mag_filter == other.0.mag_filter
23 && self.0.min_filter == other.0.min_filter
24 && self.0.mipmap_mode == other.0.mipmap_mode
25 && self.0.address_mode_u == other.0.address_mode_u
26 && self.0.address_mode_v == other.0.address_mode_v
27 && self.0.address_mode_w == other.0.address_mode_w
28 && OrderedFloat(self.0.mip_lod_bias) == OrderedFloat(other.0.mip_lod_bias)
29 && self.0.anisotropy_enable == other.0.anisotropy_enable
30 && OrderedFloat(self.0.max_anisotropy) == OrderedFloat(other.0.max_anisotropy)
31 && self.0.compare_enable == other.0.compare_enable
32 && self.0.compare_op == other.0.compare_op
33 && OrderedFloat(self.0.min_lod) == OrderedFloat(other.0.min_lod)
34 && OrderedFloat(self.0.max_lod) == OrderedFloat(other.0.max_lod)
35 && self.0.border_color == other.0.border_color
36 && self.0.unnormalized_coordinates == other.0.unnormalized_coordinates
37 }
38}
3940impl Eq for HashableSamplerCreateInfo {}
4142impl core::hash::Hash for HashableSamplerCreateInfo {
43fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
44self.0.flags.hash(state);
45self.0.mag_filter.hash(state);
46self.0.min_filter.hash(state);
47self.0.mipmap_mode.hash(state);
48self.0.address_mode_u.hash(state);
49self.0.address_mode_v.hash(state);
50self.0.address_mode_w.hash(state);
51 OrderedFloat(self.0.mip_lod_bias).hash(state);
52self.0.anisotropy_enable.hash(state);
53 OrderedFloat(self.0.max_anisotropy).hash(state);
54self.0.compare_enable.hash(state);
55self.0.compare_op.hash(state);
56 OrderedFloat(self.0.min_lod).hash(state);
57 OrderedFloat(self.0.max_lod).hash(state);
58self.0.border_color.hash(state);
59self.0.unnormalized_coordinates.hash(state);
60 }
61}
6263/// Entry in the sampler cache.
64struct CacheEntry {
65 sampler: vk::Sampler,
66 ref_count: u32,
67}
6869/// Global sampler cache.
70///
71/// As some devices have a low limit (4000) on the number of unique samplers that can be created,
72/// we need to cache samplers to avoid running out if people eagerly create duplicate samplers.
73pub(crate) struct SamplerCache {
74/// Mapping from the sampler description to sampler and reference count.
75samplers: HashMap<HashableSamplerCreateInfo, CacheEntry>,
76/// Maximum number of unique samplers that can be created.
77total_capacity: u32,
78/// If true, the sampler cache is disabled and all samplers are created on demand.
79passthrough: bool,
80}
8182impl SamplerCache {
83pub fn new(total_capacity: u32) -> Self {
84let passthrough = total_capacity >= ENABLE_SAMPLER_CACHE_CUTOFF;
85Self {
86 samplers: HashMap::new(),
87 total_capacity,
88 passthrough,
89 }
90 }
9192/// Create a sampler, or return an existing one if it already exists.
93 ///
94 /// If the sampler already exists, the reference count is incremented.
95 ///
96 /// If the sampler does not exist, a new sampler is created and inserted into the cache.
97 ///
98 /// If the cache is full, an error is returned.
99pub fn create_sampler(
100&mut self,
101 device: &ash::Device,
102 create_info: vk::SamplerCreateInfo<'static>,
103 ) -> Result<vk::Sampler, crate::DeviceError> {
104if self.passthrough {
105return unsafe { device.create_sampler(&create_info, None) }
106 .map_err(super::map_host_device_oom_and_ioca_err);
107 };
108109// Get the number of used samplers. Needs to be done before to appease the borrow checker.
110let used_samplers = self.samplers.len();
111112match self.samplers.entry(HashableSamplerCreateInfo(create_info)) {
113 Entry::Occupied(occupied_entry) => {
114// We have found a match, so increment the refcount and return the index.
115let value = occupied_entry.into_mut();
116 value.ref_count += 1;
117Ok(value.sampler)
118 }
119 Entry::Vacant(vacant_entry) => {
120// We need to create a new sampler.
121122 // We need to check if we can create more samplers.
123if used_samplers >= self.total_capacity as usize {
124log::error!("There is no more room in the global sampler heap for more unique samplers. Your device supports a maximum of {} unique samplers.", self.samplers.len());
125return Err(crate::DeviceError::OutOfMemory);
126 }
127128// Create the sampler.
129let sampler = unsafe { device.create_sampler(&create_info, None) }
130 .map_err(super::map_host_device_oom_and_ioca_err)?;
131132// Insert the new sampler into the mapping.
133vacant_entry.insert(CacheEntry {
134 sampler,
135 ref_count: 1,
136 });
137138Ok(sampler)
139 }
140 }
141 }
142143/// Decrease the reference count of a sampler and destroy it if the reference count reaches 0.
144 ///
145 /// The provided sampler is checked against the sampler in the cache to ensure there is no clerical error.
146pub fn destroy_sampler(
147&mut self,
148 device: &ash::Device,
149 create_info: vk::SamplerCreateInfo<'static>,
150 provided_sampler: vk::Sampler,
151 ) {
152if self.passthrough {
153unsafe { device.destroy_sampler(provided_sampler, None) };
154return;
155 };
156157let Entry::Occupied(mut hash_map_entry) =
158self.samplers.entry(HashableSamplerCreateInfo(create_info))
159else {
160log::error!("Trying to destroy a sampler that does not exist.");
161return;
162 };
163let cache_entry = hash_map_entry.get_mut();
164165assert_eq!(
166 cache_entry.sampler, provided_sampler,
167"Provided sampler does not match the sampler in the cache."
168);
169170 cache_entry.ref_count -= 1;
171172if cache_entry.ref_count == 0 {
173unsafe { device.destroy_sampler(cache_entry.sampler, None) };
174 hash_map_entry.remove();
175 }
176 }
177}