1use alloc::{
2 sync::Arc,
3 vec::{Drain, Vec},
4};
5use core::ops::Range;
6
7use hashbrown::hash_map::Entry;
8
9use crate::{
10 device::Device,
11 init_tracker::*,
12 resource::{DestroyedResourceError, ParentDevice, RawResourceAccess, Texture, Trackable},
13 snatch::SnatchGuard,
14 track::{DeviceTracker, TextureTracker},
15 FastHashMap,
16};
17
18use super::{clear::clear_texture, BakedCommands, ClearError};
19
20#[derive(Clone)]
23pub(crate) struct TextureSurfaceDiscard {
24 pub texture: Arc<Texture>,
25 pub mip_level: u32,
26 pub layer: u32,
27}
28
29pub(crate) type SurfacesInDiscardState = Vec<TextureSurfaceDiscard>;
30
31#[derive(Default)]
32pub(crate) struct CommandBufferTextureMemoryActions {
33 init_actions: Vec<TextureInitTrackerAction>,
36 discards: Vec<TextureSurfaceDiscard>,
40}
41
42impl CommandBufferTextureMemoryActions {
43 pub(crate) fn drain_init_actions(&mut self) -> Drain<'_, TextureInitTrackerAction> {
44 self.init_actions.drain(..)
45 }
46
47 pub(crate) fn discard(&mut self, discard: TextureSurfaceDiscard) {
48 self.discards.push(discard);
49 }
50
51 #[must_use]
55 pub(crate) fn register_init_action(
56 &mut self,
57 action: &TextureInitTrackerAction,
58 ) -> SurfacesInDiscardState {
59 let mut immediately_necessary_clears = SurfacesInDiscardState::new();
60
61 self.init_actions.extend(
69 action
70 .texture
71 .initialization_status
72 .read()
73 .check_action(action),
74 );
75
76 let init_actions = &mut self.init_actions;
80 self.discards.retain(|discarded_surface| {
81 if discarded_surface.texture.is_equal(&action.texture)
82 && action.range.layer_range.contains(&discarded_surface.layer)
83 && action
84 .range
85 .mip_range
86 .contains(&discarded_surface.mip_level)
87 {
88 if let MemoryInitKind::NeedsInitializedMemory = action.kind {
89 immediately_necessary_clears.push(discarded_surface.clone());
90
91 init_actions.push(TextureInitTrackerAction {
95 texture: discarded_surface.texture.clone(),
96 range: TextureInitRange {
97 mip_range: discarded_surface.mip_level
98 ..(discarded_surface.mip_level + 1),
99 layer_range: discarded_surface.layer..(discarded_surface.layer + 1),
100 },
101 kind: MemoryInitKind::ImplicitlyInitialized,
102 });
103 }
104 false
105 } else {
106 true
107 }
108 });
109
110 immediately_necessary_clears
111 }
112
113 pub(crate) fn register_implicit_init(
116 &mut self,
117 texture: &Arc<Texture>,
118 range: TextureInitRange,
119 ) {
120 let must_be_empty = self.register_init_action(&TextureInitTrackerAction {
121 texture: texture.clone(),
122 range,
123 kind: MemoryInitKind::ImplicitlyInitialized,
124 });
125 assert!(must_be_empty.is_empty());
126 }
127}
128
129pub(crate) fn fixup_discarded_surfaces<InitIter: Iterator<Item = TextureSurfaceDiscard>>(
134 inits: InitIter,
135 encoder: &mut dyn hal::DynCommandEncoder,
136 texture_tracker: &mut TextureTracker,
137 device: &Device,
138 snatch_guard: &SnatchGuard<'_>,
139) {
140 for init in inits {
141 clear_texture(
142 &init.texture,
143 TextureInitRange {
144 mip_range: init.mip_level..(init.mip_level + 1),
145 layer_range: init.layer..(init.layer + 1),
146 },
147 encoder,
148 texture_tracker,
149 &device.alignments,
150 device.zero_buffer.as_ref(),
151 snatch_guard,
152 device.instance_flags,
153 )
154 .unwrap();
155 }
156}
157
158impl BakedCommands {
159 pub(crate) fn initialize_buffer_memory(
162 &mut self,
163 device_tracker: &mut DeviceTracker,
164 snatch_guard: &SnatchGuard<'_>,
165 ) -> Result<(), DestroyedResourceError> {
166 profiling::scope!("initialize_buffer_memory");
167
168 let mut uninitialized_ranges_per_buffer = FastHashMap::default();
172 for buffer_use in self.buffer_memory_init_actions.drain(..) {
173 let mut initialization_status = buffer_use.buffer.initialization_status.write();
174
175 let end_remainder = buffer_use.range.end % wgt::COPY_BUFFER_ALIGNMENT;
177 let end = if end_remainder == 0 {
178 buffer_use.range.end
179 } else {
180 buffer_use.range.end + wgt::COPY_BUFFER_ALIGNMENT - end_remainder
181 };
182 let uninitialized_ranges = initialization_status.drain(buffer_use.range.start..end);
183
184 match buffer_use.kind {
185 MemoryInitKind::ImplicitlyInitialized => {}
186 MemoryInitKind::NeedsInitializedMemory => {
187 match uninitialized_ranges_per_buffer.entry(buffer_use.buffer.tracker_index()) {
188 Entry::Vacant(e) => {
189 e.insert((
190 buffer_use.buffer.clone(),
191 uninitialized_ranges.collect::<Vec<Range<wgt::BufferAddress>>>(),
192 ));
193 }
194 Entry::Occupied(mut e) => {
195 e.get_mut().1.extend(uninitialized_ranges);
196 }
197 }
198 }
199 }
200 }
201
202 for (buffer, mut ranges) in uninitialized_ranges_per_buffer.into_values() {
203 ranges.sort_by_key(|r| r.start);
205 for i in (1..ranges.len()).rev() {
206 assert!(ranges[i - 1].end <= ranges[i].start);
208 if ranges[i].start == ranges[i - 1].end {
209 ranges[i - 1].end = ranges[i].end;
210 ranges.swap_remove(i); }
212 }
213
214 let transition = device_tracker
220 .buffers
221 .set_single(&buffer, wgt::BufferUses::COPY_DST);
222
223 let raw_buf = buffer.try_raw(snatch_guard)?;
224
225 unsafe {
226 self.encoder.raw.transition_buffers(
227 transition
228 .map(|pending| pending.into_hal(&buffer, snatch_guard))
229 .as_slice(),
230 );
231 }
232
233 for range in ranges.iter() {
234 assert!(
235 range.start % wgt::COPY_BUFFER_ALIGNMENT == 0,
236 "Buffer {:?} has an uninitialized range with a start \
237 not aligned to 4 (start was {})",
238 raw_buf,
239 range.start
240 );
241 assert!(
242 range.end % wgt::COPY_BUFFER_ALIGNMENT == 0,
243 "Buffer {:?} has an uninitialized range with an end \
244 not aligned to 4 (end was {})",
245 raw_buf,
246 range.end
247 );
248
249 unsafe {
250 self.encoder.raw.clear_buffer(raw_buf, range.clone());
251 }
252 }
253 }
254 Ok(())
255 }
256
257 pub(crate) fn initialize_texture_memory(
262 &mut self,
263 device_tracker: &mut DeviceTracker,
264 device: &Device,
265 snatch_guard: &SnatchGuard<'_>,
266 ) -> Result<(), DestroyedResourceError> {
267 profiling::scope!("initialize_texture_memory");
268
269 let mut ranges: Vec<TextureInitRange> = Vec::new();
270 for texture_use in self.texture_memory_actions.drain_init_actions() {
271 let mut initialization_status = texture_use.texture.initialization_status.write();
272 let use_range = texture_use.range;
273 let affected_mip_trackers = initialization_status
274 .mips
275 .iter_mut()
276 .enumerate()
277 .skip(use_range.mip_range.start as usize)
278 .take((use_range.mip_range.end - use_range.mip_range.start) as usize);
279
280 match texture_use.kind {
281 MemoryInitKind::ImplicitlyInitialized => {
282 for (_, mip_tracker) in affected_mip_trackers {
283 mip_tracker.drain(use_range.layer_range.clone());
284 }
285 }
286 MemoryInitKind::NeedsInitializedMemory => {
287 for (mip_level, mip_tracker) in affected_mip_trackers {
288 for layer_range in mip_tracker.drain(use_range.layer_range.clone()) {
289 ranges.push(TextureInitRange {
290 mip_range: (mip_level as u32)..(mip_level as u32 + 1),
291 layer_range,
292 });
293 }
294 }
295 }
296 }
297
298 for range in ranges.drain(..) {
300 let clear_result = clear_texture(
301 &texture_use.texture,
302 range,
303 self.encoder.raw.as_mut(),
304 &mut device_tracker.textures,
305 &device.alignments,
306 device.zero_buffer.as_ref(),
307 snatch_guard,
308 device.instance_flags,
309 );
310
311 if let Err(ClearError::DestroyedResource(e)) = clear_result {
315 return Err(e);
316 }
317
318 if let Err(error) = clear_result {
320 panic!("{error}");
321 }
322 }
323 }
324
325 for surface_discard in self.texture_memory_actions.discards.iter() {
329 surface_discard
330 .texture
331 .initialization_status
332 .write()
333 .discard(surface_discard.mip_level, surface_discard.layer);
334 }
335
336 Ok(())
337 }
338}