pub fn pipeline_cache_key(adapter_info: &AdapterInfo) -> Option<String>
Expand description
A recommended key for storing PipelineCache
s for the adapter
associated with the given AdapterInfo
This key will define a class of adapters for which the same cache
might be valid.
If this returns None
, the adapter doesn’t support PipelineCache
.
This may be because the API doesn’t support application managed caches
(such as browser WebGPU), or that wgpu
hasn’t implemented it for
that API yet.
This key could be used as a filename, as seen in the example below.
§Examples
use wgpu::PipelineCacheDescriptor;
let cache_dir: PathBuf = unimplemented!("Some reasonable platform-specific cache directory for your app.");
let filename = wgpu::util::pipeline_cache_key(&adapter_info);
let (pipeline_cache, cache_file) = if let Some(filename) = filename {
let cache_path = cache_dir.join(&filename);
// If we failed to read the cache, for whatever reason, treat the data as lost.
// In a real app, we'd probably avoid caching entirely unless the error was "file not found".
let cache_data = std::fs::read(&cache_path).ok();
let pipeline_cache = unsafe {
device.create_pipeline_cache(&PipelineCacheDescriptor {
data: cache_data.as_deref(),
label: None,
fallback: true
})
};
(Some(pipeline_cache), Some(cache_path))
} else {
(None, None)
};
// Run pipeline initialisation, making sure to set the `cache`
// fields of your `*PipelineDescriptor` to `pipeline_cache`
// And then save the resulting cache (probably off the main thread).
if let (Some(pipeline_cache), Some(cache_file)) = (pipeline_cache, cache_file) {
let data = pipeline_cache.get_data();
if let Some(data) = data {
let temp_file = cache_file.with_extension("temp");
std::fs::write(&temp_file, &data)?;
std::fs::rename(&temp_file, &cache_file)?;
}
}