wgpu_core/command/
memory_init.rs

1use alloc::{
2    sync::Arc,
3    vec::{Drain, Vec},
4};
5use core::ops::Range;
6
7use hashbrown::hash_map::Entry;
8
9use crate::{
10    device::Device,
11    init_tracker::*,
12    resource::{DestroyedResourceError, ParentDevice, RawResourceAccess, Texture, Trackable},
13    snatch::SnatchGuard,
14    track::{DeviceTracker, TextureTracker},
15    FastHashMap,
16};
17
18use super::{clear::clear_texture, BakedCommands, ClearError};
19
20/// Surface that was discarded by `StoreOp::Discard` of a preceding renderpass.
21/// Any read access to this surface needs to be preceded by a texture initialization.
22#[derive(Clone)]
23pub(crate) struct TextureSurfaceDiscard {
24    pub texture: Arc<Texture>,
25    pub mip_level: u32,
26    pub layer: u32,
27}
28
29pub(crate) type SurfacesInDiscardState = Vec<TextureSurfaceDiscard>;
30
31#[derive(Default)]
32pub(crate) struct CommandBufferTextureMemoryActions {
33    /// The tracker actions that we need to be executed before the command
34    /// buffer is executed.
35    init_actions: Vec<TextureInitTrackerAction>,
36    /// All the discards that haven't been followed by init again within the
37    /// command buffer i.e. everything in this list resets the texture init
38    /// state *after* the command buffer execution
39    discards: Vec<TextureSurfaceDiscard>,
40}
41
42impl CommandBufferTextureMemoryActions {
43    pub(crate) fn drain_init_actions(&mut self) -> Drain<'_, TextureInitTrackerAction> {
44        self.init_actions.drain(..)
45    }
46
47    pub(crate) fn discard(&mut self, discard: TextureSurfaceDiscard) {
48        self.discards.push(discard);
49    }
50
51    // Registers a TextureInitTrackerAction.
52    // Returns previously discarded surface that need to be initialized *immediately* now.
53    // Only returns a non-empty list if action is MemoryInitKind::NeedsInitializedMemory.
54    #[must_use]
55    pub(crate) fn register_init_action(
56        &mut self,
57        action: &TextureInitTrackerAction,
58    ) -> SurfacesInDiscardState {
59        let mut immediately_necessary_clears = SurfacesInDiscardState::new();
60
61        // Note that within a command buffer we may stack arbitrary memory init
62        // actions on the same texture Since we react to them in sequence, they
63        // are going to be dropped again at queue submit
64        //
65        // We don't need to add MemoryInitKind::NeedsInitializedMemory to
66        // init_actions if a surface is part of the discard list. But that would
67        // mean splitting up the action which is more than we'd win here.
68        self.init_actions.extend(
69            action
70                .texture
71                .initialization_status
72                .read()
73                .check_action(action),
74        );
75
76        // We expect very few discarded surfaces at any point in time which is
77        // why a simple linear search is likely best. (i.e. most of the time
78        // self.discards is empty!)
79        let init_actions = &mut self.init_actions;
80        self.discards.retain(|discarded_surface| {
81            if discarded_surface.texture.is_equal(&action.texture)
82                && action.range.layer_range.contains(&discarded_surface.layer)
83                && action
84                    .range
85                    .mip_range
86                    .contains(&discarded_surface.mip_level)
87            {
88                if let MemoryInitKind::NeedsInitializedMemory = action.kind {
89                    immediately_necessary_clears.push(discarded_surface.clone());
90
91                    // Mark surface as implicitly initialized (this is relevant
92                    // because it might have been uninitialized prior to
93                    // discarding
94                    init_actions.push(TextureInitTrackerAction {
95                        texture: discarded_surface.texture.clone(),
96                        range: TextureInitRange {
97                            mip_range: discarded_surface.mip_level
98                                ..(discarded_surface.mip_level + 1),
99                            layer_range: discarded_surface.layer..(discarded_surface.layer + 1),
100                        },
101                        kind: MemoryInitKind::ImplicitlyInitialized,
102                    });
103                }
104                false
105            } else {
106                true
107            }
108        });
109
110        immediately_necessary_clears
111    }
112
113    // Shortcut for register_init_action when it is known that the action is an
114    // implicit init, not requiring any immediate resource init.
115    pub(crate) fn register_implicit_init(
116        &mut self,
117        texture: &Arc<Texture>,
118        range: TextureInitRange,
119    ) {
120        let must_be_empty = self.register_init_action(&TextureInitTrackerAction {
121            texture: texture.clone(),
122            range,
123            kind: MemoryInitKind::ImplicitlyInitialized,
124        });
125        assert!(must_be_empty.is_empty());
126    }
127}
128
129// Utility function that takes discarded surfaces from (several calls to)
130// register_init_action and initializes them on the spot.
131//
132// Takes care of barriers as well!
133pub(crate) fn fixup_discarded_surfaces<InitIter: Iterator<Item = TextureSurfaceDiscard>>(
134    inits: InitIter,
135    encoder: &mut dyn hal::DynCommandEncoder,
136    texture_tracker: &mut TextureTracker,
137    device: &Device,
138    snatch_guard: &SnatchGuard<'_>,
139) {
140    for init in inits {
141        clear_texture(
142            &init.texture,
143            TextureInitRange {
144                mip_range: init.mip_level..(init.mip_level + 1),
145                layer_range: init.layer..(init.layer + 1),
146            },
147            encoder,
148            texture_tracker,
149            &device.alignments,
150            device.zero_buffer.as_ref(),
151            snatch_guard,
152            device.instance_flags,
153        )
154        .unwrap();
155    }
156}
157
158impl BakedCommands {
159    // inserts all buffer initializations that are going to be needed for
160    // executing the commands and updates resource init states accordingly
161    pub(crate) fn initialize_buffer_memory(
162        &mut self,
163        device_tracker: &mut DeviceTracker,
164        snatch_guard: &SnatchGuard<'_>,
165    ) -> Result<(), DestroyedResourceError> {
166        profiling::scope!("initialize_buffer_memory");
167
168        // Gather init ranges for each buffer so we can collapse them.
169        // It is not possible to do this at an earlier point since previously
170        // executed command buffer change the resource init state.
171        let mut uninitialized_ranges_per_buffer = FastHashMap::default();
172        for buffer_use in self.buffer_memory_init_actions.drain(..) {
173            let mut initialization_status = buffer_use.buffer.initialization_status.write();
174
175            // align the end to 4
176            let end_remainder = buffer_use.range.end % wgt::COPY_BUFFER_ALIGNMENT;
177            let end = if end_remainder == 0 {
178                buffer_use.range.end
179            } else {
180                buffer_use.range.end + wgt::COPY_BUFFER_ALIGNMENT - end_remainder
181            };
182            let uninitialized_ranges = initialization_status.drain(buffer_use.range.start..end);
183
184            match buffer_use.kind {
185                MemoryInitKind::ImplicitlyInitialized => {}
186                MemoryInitKind::NeedsInitializedMemory => {
187                    match uninitialized_ranges_per_buffer.entry(buffer_use.buffer.tracker_index()) {
188                        Entry::Vacant(e) => {
189                            e.insert((
190                                buffer_use.buffer.clone(),
191                                uninitialized_ranges.collect::<Vec<Range<wgt::BufferAddress>>>(),
192                            ));
193                        }
194                        Entry::Occupied(mut e) => {
195                            e.get_mut().1.extend(uninitialized_ranges);
196                        }
197                    }
198                }
199            }
200        }
201
202        for (buffer, mut ranges) in uninitialized_ranges_per_buffer.into_values() {
203            // Collapse touching ranges.
204            ranges.sort_by_key(|r| r.start);
205            for i in (1..ranges.len()).rev() {
206                // The memory init tracker made sure of this!
207                assert!(ranges[i - 1].end <= ranges[i].start);
208                if ranges[i].start == ranges[i - 1].end {
209                    ranges[i - 1].end = ranges[i].end;
210                    ranges.swap_remove(i); // Ordering not important at this point
211                }
212            }
213
214            // Don't do use_replace since the buffer may already no longer have
215            // a ref_count.
216            //
217            // However, we *know* that it is currently in use, so the tracker
218            // must already know about it.
219            let transition = device_tracker
220                .buffers
221                .set_single(&buffer, wgt::BufferUses::COPY_DST);
222
223            let raw_buf = buffer.try_raw(snatch_guard)?;
224
225            unsafe {
226                self.encoder.raw.transition_buffers(
227                    transition
228                        .map(|pending| pending.into_hal(&buffer, snatch_guard))
229                        .as_slice(),
230                );
231            }
232
233            for range in ranges.iter() {
234                assert!(
235                    range.start % wgt::COPY_BUFFER_ALIGNMENT == 0,
236                    "Buffer {:?} has an uninitialized range with a start \
237                         not aligned to 4 (start was {})",
238                    raw_buf,
239                    range.start
240                );
241                assert!(
242                    range.end % wgt::COPY_BUFFER_ALIGNMENT == 0,
243                    "Buffer {:?} has an uninitialized range with an end \
244                         not aligned to 4 (end was {})",
245                    raw_buf,
246                    range.end
247                );
248
249                unsafe {
250                    self.encoder.raw.clear_buffer(raw_buf, range.clone());
251                }
252            }
253        }
254        Ok(())
255    }
256
257    // inserts all texture initializations that are going to be needed for
258    // executing the commands and updates resource init states accordingly any
259    // textures that are left discarded by this command buffer will be marked as
260    // uninitialized
261    pub(crate) fn initialize_texture_memory(
262        &mut self,
263        device_tracker: &mut DeviceTracker,
264        device: &Device,
265        snatch_guard: &SnatchGuard<'_>,
266    ) -> Result<(), DestroyedResourceError> {
267        profiling::scope!("initialize_texture_memory");
268
269        let mut ranges: Vec<TextureInitRange> = Vec::new();
270        for texture_use in self.texture_memory_actions.drain_init_actions() {
271            let mut initialization_status = texture_use.texture.initialization_status.write();
272            let use_range = texture_use.range;
273            let affected_mip_trackers = initialization_status
274                .mips
275                .iter_mut()
276                .enumerate()
277                .skip(use_range.mip_range.start as usize)
278                .take((use_range.mip_range.end - use_range.mip_range.start) as usize);
279
280            match texture_use.kind {
281                MemoryInitKind::ImplicitlyInitialized => {
282                    for (_, mip_tracker) in affected_mip_trackers {
283                        mip_tracker.drain(use_range.layer_range.clone());
284                    }
285                }
286                MemoryInitKind::NeedsInitializedMemory => {
287                    for (mip_level, mip_tracker) in affected_mip_trackers {
288                        for layer_range in mip_tracker.drain(use_range.layer_range.clone()) {
289                            ranges.push(TextureInitRange {
290                                mip_range: (mip_level as u32)..(mip_level as u32 + 1),
291                                layer_range,
292                            });
293                        }
294                    }
295                }
296            }
297
298            // TODO: Could we attempt some range collapsing here?
299            for range in ranges.drain(..) {
300                let clear_result = clear_texture(
301                    &texture_use.texture,
302                    range,
303                    self.encoder.raw.as_mut(),
304                    &mut device_tracker.textures,
305                    &device.alignments,
306                    device.zero_buffer.as_ref(),
307                    snatch_guard,
308                    device.instance_flags,
309                );
310
311                // A Texture can be destroyed between the command recording
312                // and now, this is out of our control so we have to handle
313                // it gracefully.
314                if let Err(ClearError::DestroyedResource(e)) = clear_result {
315                    return Err(e);
316                }
317
318                // Other errors are unexpected.
319                if let Err(error) = clear_result {
320                    panic!("{error}");
321                }
322            }
323        }
324
325        // Now that all buffers/textures have the proper init state for before
326        // cmdbuf start, we discard init states for textures it left discarded
327        // after its execution.
328        for surface_discard in self.texture_memory_actions.discards.iter() {
329            surface_discard
330                .texture
331                .initialization_status
332                .write()
333                .discard(surface_discard.mip_level, surface_discard.layer);
334        }
335
336        Ok(())
337    }
338}