wgpu_examples/skybox/
mod.rs

1use bytemuck::{Pod, Zeroable};
2use std::f32::consts;
3use wgpu::{util::DeviceExt, AstcBlock, AstcChannel};
4
5const IMAGE_SIZE: u32 = 256;
6
7#[derive(Clone, Copy, Pod, Zeroable)]
8#[repr(C)]
9struct Vertex {
10    pos: [f32; 3],
11    normal: [f32; 3],
12}
13
14struct Entity {
15    vertex_count: u32,
16    vertex_buf: wgpu::Buffer,
17}
18
19// Note: we use the Y=up coordinate space in this example.
20struct Camera {
21    screen_size: (u32, u32),
22    angle_y: f32,
23    angle_xz: f32,
24    dist: f32,
25}
26
27const MODEL_CENTER_Y: f32 = 2.0;
28
29impl Camera {
30    fn to_uniform_data(&self) -> [f32; 16 * 3 + 4] {
31        let aspect = self.screen_size.0 as f32 / self.screen_size.1 as f32;
32        let proj = glam::Mat4::perspective_rh(consts::FRAC_PI_4, aspect, 1.0, 50.0);
33        let cam_pos = glam::Vec3::new(
34            self.angle_xz.cos() * self.angle_y.sin() * self.dist,
35            self.angle_xz.sin() * self.dist + MODEL_CENTER_Y,
36            self.angle_xz.cos() * self.angle_y.cos() * self.dist,
37        );
38        let view = glam::Mat4::look_at_rh(
39            cam_pos,
40            glam::Vec3::new(0f32, MODEL_CENTER_Y, 0.0),
41            glam::Vec3::Y,
42        );
43        let proj_inv = proj.inverse();
44
45        let mut raw = [0f32; 16 * 3 + 4];
46        raw[..16].copy_from_slice(&AsRef::<[f32; 16]>::as_ref(&proj)[..]);
47        raw[16..32].copy_from_slice(&AsRef::<[f32; 16]>::as_ref(&proj_inv)[..]);
48        raw[32..48].copy_from_slice(&AsRef::<[f32; 16]>::as_ref(&view)[..]);
49        raw[48..51].copy_from_slice(AsRef::<[f32; 3]>::as_ref(&cam_pos));
50        raw[51] = 1.0;
51        raw
52    }
53}
54
55pub struct Example {
56    camera: Camera,
57    sky_pipeline: wgpu::RenderPipeline,
58    entity_pipeline: wgpu::RenderPipeline,
59    bind_group: wgpu::BindGroup,
60    uniform_buf: wgpu::Buffer,
61    entities: Vec<Entity>,
62    depth_view: wgpu::TextureView,
63    staging_belt: wgpu::util::StagingBelt,
64}
65
66impl Example {
67    const DEPTH_FORMAT: wgpu::TextureFormat = wgpu::TextureFormat::Depth24Plus;
68
69    fn create_depth_texture(
70        config: &wgpu::SurfaceConfiguration,
71        device: &wgpu::Device,
72    ) -> wgpu::TextureView {
73        let depth_texture = device.create_texture(&wgpu::TextureDescriptor {
74            size: wgpu::Extent3d {
75                width: config.width,
76                height: config.height,
77                depth_or_array_layers: 1,
78            },
79            mip_level_count: 1,
80            sample_count: 1,
81            dimension: wgpu::TextureDimension::D2,
82            format: Self::DEPTH_FORMAT,
83            usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
84            label: None,
85            view_formats: &[],
86        });
87
88        depth_texture.create_view(&wgpu::TextureViewDescriptor::default())
89    }
90}
91
92impl crate::framework::Example for Example {
93    fn optional_features() -> wgpu::Features {
94        wgpu::Features::TEXTURE_COMPRESSION_ASTC
95            | wgpu::Features::TEXTURE_COMPRESSION_ETC2
96            | wgpu::Features::TEXTURE_COMPRESSION_BC
97    }
98
99    fn init(
100        config: &wgpu::SurfaceConfiguration,
101        _adapter: &wgpu::Adapter,
102        device: &wgpu::Device,
103        queue: &wgpu::Queue,
104    ) -> Self {
105        let mut entities = Vec::new();
106        {
107            let source = include_bytes!("models/rustacean-3d.obj");
108            let data = obj::ObjData::load_buf(&source[..]).unwrap();
109            let mut vertices = Vec::new();
110            for object in data.objects {
111                for group in object.groups {
112                    vertices.clear();
113                    for poly in group.polys {
114                        for end_index in 2..poly.0.len() {
115                            for &index in &[0, end_index - 1, end_index] {
116                                let obj::IndexTuple(position_id, _texture_id, normal_id) =
117                                    poly.0[index];
118                                let [x, y, z] = data.position[position_id];
119                                vertices.push(Vertex {
120                                    pos: [y, z, x], // model is rotated to face down, so need to rotate it
121                                    normal: data.normal[normal_id.unwrap()],
122                                })
123                            }
124                        }
125                    }
126                    let vertex_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
127                        label: Some("Vertex"),
128                        contents: bytemuck::cast_slice(&vertices),
129                        usage: wgpu::BufferUsages::VERTEX,
130                    });
131                    entities.push(Entity {
132                        vertex_count: vertices.len() as u32,
133                        vertex_buf,
134                    });
135                }
136            }
137        }
138
139        let bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
140            label: None,
141            entries: &[
142                wgpu::BindGroupLayoutEntry {
143                    binding: 0,
144                    visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT,
145                    ty: wgpu::BindingType::Buffer {
146                        ty: wgpu::BufferBindingType::Uniform,
147                        has_dynamic_offset: false,
148                        min_binding_size: None,
149                    },
150                    count: None,
151                },
152                wgpu::BindGroupLayoutEntry {
153                    binding: 1,
154                    visibility: wgpu::ShaderStages::FRAGMENT,
155                    ty: wgpu::BindingType::Texture {
156                        sample_type: wgpu::TextureSampleType::Float { filterable: true },
157                        multisampled: false,
158                        view_dimension: wgpu::TextureViewDimension::Cube,
159                    },
160                    count: None,
161                },
162                wgpu::BindGroupLayoutEntry {
163                    binding: 2,
164                    visibility: wgpu::ShaderStages::FRAGMENT,
165                    ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
166                    count: None,
167                },
168            ],
169        });
170
171        // Create the render pipeline
172        let shader = device.create_shader_module(wgpu::include_wgsl!("shader.wgsl"));
173
174        let camera = Camera {
175            screen_size: (config.width, config.height),
176            angle_xz: 0.2,
177            angle_y: 0.2,
178            dist: 20.0,
179        };
180        let raw_uniforms = camera.to_uniform_data();
181        let uniform_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
182            label: Some("Buffer"),
183            contents: bytemuck::cast_slice(&raw_uniforms),
184            usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
185        });
186
187        let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
188            label: None,
189            bind_group_layouts: &[&bind_group_layout],
190            push_constant_ranges: &[],
191        });
192
193        // Create the render pipelines
194        let sky_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
195            label: Some("Sky"),
196            layout: Some(&pipeline_layout),
197            vertex: wgpu::VertexState {
198                module: &shader,
199                entry_point: Some("vs_sky"),
200                compilation_options: Default::default(),
201                buffers: &[],
202            },
203            fragment: Some(wgpu::FragmentState {
204                module: &shader,
205                entry_point: Some("fs_sky"),
206                compilation_options: Default::default(),
207                targets: &[Some(config.view_formats[0].into())],
208            }),
209            primitive: wgpu::PrimitiveState {
210                front_face: wgpu::FrontFace::Cw,
211                ..Default::default()
212            },
213            depth_stencil: Some(wgpu::DepthStencilState {
214                format: Self::DEPTH_FORMAT,
215                depth_write_enabled: false,
216                depth_compare: wgpu::CompareFunction::LessEqual,
217                stencil: wgpu::StencilState::default(),
218                bias: wgpu::DepthBiasState::default(),
219            }),
220            multisample: wgpu::MultisampleState::default(),
221            multiview: None,
222            cache: None,
223        });
224        let entity_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
225            label: Some("Entity"),
226            layout: Some(&pipeline_layout),
227            vertex: wgpu::VertexState {
228                module: &shader,
229                entry_point: Some("vs_entity"),
230                compilation_options: Default::default(),
231                buffers: &[wgpu::VertexBufferLayout {
232                    array_stride: size_of::<Vertex>() as wgpu::BufferAddress,
233                    step_mode: wgpu::VertexStepMode::Vertex,
234                    attributes: &wgpu::vertex_attr_array![0 => Float32x3, 1 => Float32x3],
235                }],
236            },
237            fragment: Some(wgpu::FragmentState {
238                module: &shader,
239                entry_point: Some("fs_entity"),
240                compilation_options: Default::default(),
241                targets: &[Some(config.view_formats[0].into())],
242            }),
243            primitive: wgpu::PrimitiveState {
244                front_face: wgpu::FrontFace::Cw,
245                ..Default::default()
246            },
247            depth_stencil: Some(wgpu::DepthStencilState {
248                format: Self::DEPTH_FORMAT,
249                depth_write_enabled: true,
250                depth_compare: wgpu::CompareFunction::LessEqual,
251                stencil: wgpu::StencilState::default(),
252                bias: wgpu::DepthBiasState::default(),
253            }),
254            multisample: wgpu::MultisampleState::default(),
255            multiview: None,
256            cache: None,
257        });
258
259        let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
260            label: None,
261            address_mode_u: wgpu::AddressMode::ClampToEdge,
262            address_mode_v: wgpu::AddressMode::ClampToEdge,
263            address_mode_w: wgpu::AddressMode::ClampToEdge,
264            mag_filter: wgpu::FilterMode::Linear,
265            min_filter: wgpu::FilterMode::Linear,
266            mipmap_filter: wgpu::FilterMode::Linear,
267            ..Default::default()
268        });
269
270        let device_features = device.features();
271
272        let skybox_format = if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_ASTC) {
273            log::info!("Using astc");
274            wgpu::TextureFormat::Astc {
275                block: AstcBlock::B4x4,
276                channel: AstcChannel::UnormSrgb,
277            }
278        } else if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_ETC2) {
279            log::info!("Using etc2");
280            wgpu::TextureFormat::Etc2Rgb8A1UnormSrgb
281        } else if device_features.contains(wgpu::Features::TEXTURE_COMPRESSION_BC) {
282            log::info!("Using bc7");
283            wgpu::TextureFormat::Bc7RgbaUnormSrgb
284        } else {
285            log::info!("Using rgba8");
286            wgpu::TextureFormat::Rgba8UnormSrgb
287        };
288
289        let size = wgpu::Extent3d {
290            width: IMAGE_SIZE,
291            height: IMAGE_SIZE,
292            depth_or_array_layers: 6,
293        };
294
295        let layer_size = wgpu::Extent3d {
296            depth_or_array_layers: 1,
297            ..size
298        };
299        let max_mips = layer_size.max_mips(wgpu::TextureDimension::D2);
300
301        log::debug!(
302            "Copying {skybox_format:?} skybox images of size {IMAGE_SIZE}, {IMAGE_SIZE}, 6 with {max_mips} mips to gpu",
303        );
304
305        let bytes = match skybox_format {
306            wgpu::TextureFormat::Astc {
307                block: AstcBlock::B4x4,
308                channel: AstcChannel::UnormSrgb,
309            } => &include_bytes!("images/astc.ktx2")[..],
310            wgpu::TextureFormat::Etc2Rgb8A1UnormSrgb => &include_bytes!("images/etc2.ktx2")[..],
311            wgpu::TextureFormat::Bc7RgbaUnormSrgb => &include_bytes!("images/bc7.ktx2")[..],
312            wgpu::TextureFormat::Rgba8UnormSrgb => &include_bytes!("images/rgba8.ktx2")[..],
313            _ => unreachable!(),
314        };
315
316        let reader = ktx2::Reader::new(bytes).unwrap();
317        let header = reader.header();
318
319        let mut image = Vec::with_capacity(reader.data().len());
320        for level in reader.levels() {
321            image.extend_from_slice(level.data);
322        }
323
324        let texture = device.create_texture_with_data(
325            queue,
326            &wgpu::TextureDescriptor {
327                size,
328                mip_level_count: header.level_count,
329                sample_count: 1,
330                dimension: wgpu::TextureDimension::D2,
331                format: skybox_format,
332                usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
333                label: None,
334                view_formats: &[],
335            },
336            // KTX2 stores mip levels in mip major order.
337            wgpu::util::TextureDataOrder::MipMajor,
338            &image,
339        );
340
341        let texture_view = texture.create_view(&wgpu::TextureViewDescriptor {
342            label: None,
343            dimension: Some(wgpu::TextureViewDimension::Cube),
344            ..wgpu::TextureViewDescriptor::default()
345        });
346        let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
347            layout: &bind_group_layout,
348            entries: &[
349                wgpu::BindGroupEntry {
350                    binding: 0,
351                    resource: uniform_buf.as_entire_binding(),
352                },
353                wgpu::BindGroupEntry {
354                    binding: 1,
355                    resource: wgpu::BindingResource::TextureView(&texture_view),
356                },
357                wgpu::BindGroupEntry {
358                    binding: 2,
359                    resource: wgpu::BindingResource::Sampler(&sampler),
360                },
361            ],
362            label: None,
363        });
364
365        let depth_view = Self::create_depth_texture(config, device);
366
367        Example {
368            camera,
369            sky_pipeline,
370            entity_pipeline,
371            bind_group,
372            uniform_buf,
373            entities,
374            depth_view,
375            staging_belt: wgpu::util::StagingBelt::new(0x100),
376        }
377    }
378
379    #[expect(clippy::single_match)]
380    fn update(&mut self, event: winit::event::WindowEvent) {
381        match event {
382            winit::event::WindowEvent::CursorMoved { position, .. } => {
383                let norm_x = position.x as f32 / self.camera.screen_size.0 as f32 - 0.5;
384                let norm_y = position.y as f32 / self.camera.screen_size.1 as f32 - 0.5;
385                self.camera.angle_y = norm_x * 5.0;
386                self.camera.angle_xz = norm_y;
387            }
388            _ => {}
389        }
390    }
391
392    fn resize(
393        &mut self,
394        config: &wgpu::SurfaceConfiguration,
395        device: &wgpu::Device,
396        _queue: &wgpu::Queue,
397    ) {
398        self.depth_view = Self::create_depth_texture(config, device);
399        self.camera.screen_size = (config.width, config.height);
400    }
401
402    fn render(&mut self, view: &wgpu::TextureView, device: &wgpu::Device, queue: &wgpu::Queue) {
403        let mut encoder =
404            device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None });
405
406        // update rotation
407        let raw_uniforms = self.camera.to_uniform_data();
408        self.staging_belt
409            .write_buffer(
410                &mut encoder,
411                &self.uniform_buf,
412                0,
413                wgpu::BufferSize::new((raw_uniforms.len() * 4) as wgpu::BufferAddress).unwrap(),
414                device,
415            )
416            .copy_from_slice(bytemuck::cast_slice(&raw_uniforms));
417
418        self.staging_belt.finish();
419
420        {
421            let mut rpass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
422                label: None,
423                color_attachments: &[Some(wgpu::RenderPassColorAttachment {
424                    view,
425                    depth_slice: None,
426                    resolve_target: None,
427                    ops: wgpu::Operations {
428                        load: wgpu::LoadOp::Clear(wgpu::Color {
429                            r: 0.1,
430                            g: 0.2,
431                            b: 0.3,
432                            a: 1.0,
433                        }),
434                        store: wgpu::StoreOp::Store,
435                    },
436                })],
437                depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachment {
438                    view: &self.depth_view,
439                    depth_ops: Some(wgpu::Operations {
440                        load: wgpu::LoadOp::Clear(1.0),
441                        store: wgpu::StoreOp::Discard,
442                    }),
443                    stencil_ops: None,
444                }),
445                timestamp_writes: None,
446                occlusion_query_set: None,
447            });
448
449            rpass.set_bind_group(0, &self.bind_group, &[]);
450            rpass.set_pipeline(&self.entity_pipeline);
451
452            for entity in self.entities.iter() {
453                rpass.set_vertex_buffer(0, entity.vertex_buf.slice(..));
454                rpass.draw(0..entity.vertex_count, 0..1);
455            }
456
457            rpass.set_pipeline(&self.sky_pipeline);
458            rpass.draw(0..3, 0..1);
459        }
460
461        queue.submit(std::iter::once(encoder.finish()));
462
463        self.staging_belt.recall();
464    }
465}
466
467pub fn main() {
468    crate::framework::run::<Example>("skybox");
469}
470
471#[cfg(test)]
472#[wgpu_test::gpu_test]
473static TEST: crate::framework::ExampleTestParams = crate::framework::ExampleTestParams {
474    name: "skybox",
475    image_path: "/examples/features/src/skybox/screenshot.png",
476    width: 1024,
477    height: 768,
478    optional_features: wgpu::Features::default(),
479    base_test_parameters: wgpu_test::TestParameters::default().expect_fail(
480        wgpu_test::FailureCase::backend_adapter(wgpu::Backends::GL, "ANGLE"),
481    ),
482    comparisons: &[wgpu_test::ComparisonType::Mean(0.02)],
483    _phantom: std::marker::PhantomData::<Example>,
484};
485
486#[cfg(test)]
487#[wgpu_test::gpu_test]
488static TEST_BCN: crate::framework::ExampleTestParams = crate::framework::ExampleTestParams {
489    name: "skybox-bc7",
490    image_path: "/examples/features/src/skybox/screenshot_bc7.png",
491    width: 1024,
492    height: 768,
493    optional_features: wgpu::Features::TEXTURE_COMPRESSION_BC,
494    base_test_parameters: wgpu_test::TestParameters::default(),
495    comparisons: &[wgpu_test::ComparisonType::Mean(0.02)],
496    _phantom: std::marker::PhantomData::<Example>,
497};
498
499#[cfg(test)]
500#[wgpu_test::gpu_test]
501static TEST_ETC2: crate::framework::ExampleTestParams = crate::framework::ExampleTestParams {
502    name: "skybox-etc2",
503    image_path: "/examples/features/src/skybox/screenshot_etc2.png",
504    width: 1024,
505    height: 768,
506    optional_features: wgpu::Features::TEXTURE_COMPRESSION_ETC2,
507    base_test_parameters: wgpu_test::TestParameters::default(),
508    comparisons: &[wgpu_test::ComparisonType::Mean(0.015)],
509    _phantom: std::marker::PhantomData::<Example>,
510};
511
512#[cfg(test)]
513#[wgpu_test::gpu_test]
514static TEST_ASTC: crate::framework::ExampleTestParams = crate::framework::ExampleTestParams {
515    name: "skybox-astc",
516    image_path: "/examples/features/src/skybox/screenshot_astc.png",
517    width: 1024,
518    height: 768,
519    optional_features: wgpu::Features::TEXTURE_COMPRESSION_ASTC,
520    base_test_parameters: wgpu_test::TestParameters::default(),
521    comparisons: &[wgpu_test::ComparisonType::Mean(0.016)],
522    _phantom: std::marker::PhantomData::<Example>,
523};