naga/front/wgsl/parse/
mod.rs

1use alloc::{boxed::Box, vec::Vec};
2use directive::enable_extension::ImplementedEnableExtension;
3
4use crate::diagnostic_filter::{
5    self, DiagnosticFilter, DiagnosticFilterMap, DiagnosticFilterNode, FilterableTriggeringRule,
6    ShouldConflictOnFullDuplicate, StandardFilterableTriggeringRule,
7};
8use crate::front::wgsl::error::{DiagnosticAttributeNotSupportedPosition, Error, ExpectedToken};
9use crate::front::wgsl::parse::directive::enable_extension::{EnableExtension, EnableExtensions};
10use crate::front::wgsl::parse::directive::language_extension::LanguageExtension;
11use crate::front::wgsl::parse::directive::DirectiveKind;
12use crate::front::wgsl::parse::lexer::{Lexer, Token};
13use crate::front::wgsl::parse::number::Number;
14use crate::front::wgsl::{Result, Scalar};
15use crate::front::SymbolTable;
16use crate::{Arena, FastHashSet, FastIndexSet, Handle, ShaderStage, Span};
17
18pub mod ast;
19pub mod conv;
20pub mod directive;
21pub mod lexer;
22pub mod number;
23
24/// State for constructing an AST expression.
25///
26/// Not to be confused with [`lower::ExpressionContext`], which is for producing
27/// Naga IR from the AST we produce here.
28///
29/// [`lower::ExpressionContext`]: super::lower::ExpressionContext
30struct ExpressionContext<'input, 'temp, 'out> {
31    /// The [`TranslationUnit::expressions`] arena to which we should contribute
32    /// expressions.
33    ///
34    /// [`TranslationUnit::expressions`]: ast::TranslationUnit::expressions
35    expressions: &'out mut Arena<ast::Expression<'input>>,
36
37    /// The [`TranslationUnit::types`] arena to which we should contribute new
38    /// types.
39    ///
40    /// [`TranslationUnit::types`]: ast::TranslationUnit::types
41    types: &'out mut Arena<ast::Type<'input>>,
42
43    /// A map from identifiers in scope to the locals/arguments they represent.
44    ///
45    /// The handles refer to the [`locals`] arena; see that field's
46    /// documentation for details.
47    ///
48    /// [`locals`]: ExpressionContext::locals
49    local_table: &'temp mut SymbolTable<&'input str, Handle<ast::Local>>,
50
51    /// Local variable and function argument arena for the function we're building.
52    ///
53    /// Note that the [`ast::Local`] here is actually a zero-sized type. This
54    /// `Arena`'s only role is to assign a unique `Handle` to each local
55    /// identifier, and track its definition's span for use in diagnostics. All
56    /// the detailed information about locals - names, types, etc. - is kept in
57    /// the [`LocalDecl`] statements we parsed from their declarations. For
58    /// arguments, that information is kept in [`arguments`].
59    ///
60    /// In the AST, when an [`Ident`] expression refers to a local variable or
61    /// argument, its [`IdentExpr`] holds the referent's `Handle<Local>` in this
62    /// arena.
63    ///
64    /// During lowering, [`LocalDecl`] statements add entries to a per-function
65    /// table that maps `Handle<Local>` values to their Naga representations,
66    /// accessed via [`StatementContext::local_table`] and
67    /// [`LocalExpressionContext::local_table`]. This table is then consulted when
68    /// lowering subsequent [`Ident`] expressions.
69    ///
70    /// [`LocalDecl`]: ast::StatementKind::LocalDecl
71    /// [`arguments`]: ast::Function::arguments
72    /// [`Ident`]: ast::Expression::Ident
73    /// [`IdentExpr`]: ast::IdentExpr
74    /// [`StatementContext::local_table`]: super::lower::StatementContext::local_table
75    /// [`LocalExpressionContext::local_table`]: super::lower::LocalExpressionContext::local_table
76    locals: &'out mut Arena<ast::Local>,
77
78    /// Identifiers used by the current global declaration that have no local definition.
79    ///
80    /// This becomes the [`GlobalDecl`]'s [`dependencies`] set.
81    ///
82    /// Note that we don't know at parse time what kind of [`GlobalDecl`] the
83    /// name refers to. We can't look up names until we've seen the entire
84    /// translation unit.
85    ///
86    /// [`GlobalDecl`]: ast::GlobalDecl
87    /// [`dependencies`]: ast::GlobalDecl::dependencies
88    unresolved: &'out mut FastIndexSet<ast::Dependency<'input>>,
89}
90
91impl<'a> ExpressionContext<'a, '_, '_> {
92    fn parse_binary_op(
93        &mut self,
94        lexer: &mut Lexer<'a>,
95        classifier: impl Fn(Token<'a>) -> Option<crate::BinaryOperator>,
96        mut parser: impl FnMut(&mut Lexer<'a>, &mut Self) -> Result<'a, Handle<ast::Expression<'a>>>,
97    ) -> Result<'a, Handle<ast::Expression<'a>>> {
98        let start = lexer.start_byte_offset();
99        let mut accumulator = parser(lexer, self)?;
100        while let Some(op) = classifier(lexer.peek().0) {
101            let _ = lexer.next();
102            let left = accumulator;
103            let right = parser(lexer, self)?;
104            accumulator = self.expressions.append(
105                ast::Expression::Binary { op, left, right },
106                lexer.span_from(start),
107            );
108        }
109        Ok(accumulator)
110    }
111
112    fn declare_local(&mut self, name: ast::Ident<'a>) -> Result<'a, Handle<ast::Local>> {
113        let handle = self.locals.append(ast::Local, name.span);
114        if let Some(old) = self.local_table.add(name.name, handle) {
115            Err(Box::new(Error::Redefinition {
116                previous: self.locals.get_span(old),
117                current: name.span,
118            }))
119        } else {
120            Ok(handle)
121        }
122    }
123
124    fn new_scalar(&mut self, scalar: Scalar) -> Handle<ast::Type<'a>> {
125        self.types
126            .append(ast::Type::Scalar(scalar), Span::UNDEFINED)
127    }
128}
129
130/// Which grammar rule we are in the midst of parsing.
131///
132/// This is used for error checking. `Parser` maintains a stack of
133/// these and (occasionally) checks that it is being pushed and popped
134/// as expected.
135#[derive(Copy, Clone, Debug, PartialEq)]
136enum Rule {
137    Attribute,
138    VariableDecl,
139    TypeDecl,
140    FunctionDecl,
141    Block,
142    Statement,
143    PrimaryExpr,
144    SingularExpr,
145    UnaryExpr,
146    GeneralExpr,
147    Directive,
148    GenericExpr,
149    EnclosedExpr,
150    LhsExpr,
151}
152
153struct ParsedAttribute<T> {
154    value: Option<T>,
155}
156
157impl<T> Default for ParsedAttribute<T> {
158    fn default() -> Self {
159        Self { value: None }
160    }
161}
162
163impl<T> ParsedAttribute<T> {
164    fn set(&mut self, value: T, name_span: Span) -> Result<'static, ()> {
165        if self.value.is_some() {
166            return Err(Box::new(Error::RepeatedAttribute(name_span)));
167        }
168        self.value = Some(value);
169        Ok(())
170    }
171}
172
173#[derive(Default)]
174struct BindingParser<'a> {
175    location: ParsedAttribute<Handle<ast::Expression<'a>>>,
176    built_in: ParsedAttribute<crate::BuiltIn>,
177    interpolation: ParsedAttribute<crate::Interpolation>,
178    sampling: ParsedAttribute<crate::Sampling>,
179    invariant: ParsedAttribute<bool>,
180    blend_src: ParsedAttribute<Handle<ast::Expression<'a>>>,
181}
182
183impl<'a> BindingParser<'a> {
184    fn parse(
185        &mut self,
186        parser: &mut Parser,
187        lexer: &mut Lexer<'a>,
188        name: &'a str,
189        name_span: Span,
190        ctx: &mut ExpressionContext<'a, '_, '_>,
191    ) -> Result<'a, ()> {
192        match name {
193            "location" => {
194                lexer.expect(Token::Paren('('))?;
195                self.location
196                    .set(parser.general_expression(lexer, ctx)?, name_span)?;
197                lexer.expect(Token::Paren(')'))?;
198            }
199            "builtin" => {
200                lexer.expect(Token::Paren('('))?;
201                let (raw, span) = lexer.next_ident_with_span()?;
202                self.built_in.set(
203                    conv::map_built_in(&lexer.enable_extensions, raw, span)?,
204                    name_span,
205                )?;
206                lexer.expect(Token::Paren(')'))?;
207            }
208            "interpolate" => {
209                lexer.expect(Token::Paren('('))?;
210                let (raw, span) = lexer.next_ident_with_span()?;
211                self.interpolation
212                    .set(conv::map_interpolation(raw, span)?, name_span)?;
213                if lexer.skip(Token::Separator(',')) {
214                    let (raw, span) = lexer.next_ident_with_span()?;
215                    self.sampling
216                        .set(conv::map_sampling(raw, span)?, name_span)?;
217                }
218                lexer.expect(Token::Paren(')'))?;
219            }
220
221            "invariant" => {
222                self.invariant.set(true, name_span)?;
223            }
224            "blend_src" => {
225                if !lexer
226                    .enable_extensions
227                    .contains(ImplementedEnableExtension::DualSourceBlending)
228                {
229                    return Err(Box::new(Error::EnableExtensionNotEnabled {
230                        span: name_span,
231                        kind: ImplementedEnableExtension::DualSourceBlending.into(),
232                    }));
233                }
234
235                lexer.expect(Token::Paren('('))?;
236                self.blend_src
237                    .set(parser.general_expression(lexer, ctx)?, name_span)?;
238                lexer.skip(Token::Separator(','));
239                lexer.expect(Token::Paren(')'))?;
240            }
241            _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
242        }
243        Ok(())
244    }
245
246    fn finish(self, span: Span) -> Result<'a, Option<ast::Binding<'a>>> {
247        match (
248            self.location.value,
249            self.built_in.value,
250            self.interpolation.value,
251            self.sampling.value,
252            self.invariant.value.unwrap_or_default(),
253            self.blend_src.value,
254        ) {
255            (None, None, None, None, false, None) => Ok(None),
256            (Some(location), None, interpolation, sampling, false, blend_src) => {
257                // Before handing over the completed `Module`, we call
258                // `apply_default_interpolation` to ensure that the interpolation and
259                // sampling have been explicitly specified on all vertex shader output and fragment
260                // shader input user bindings, so leaving them potentially `None` here is fine.
261                Ok(Some(ast::Binding::Location {
262                    location,
263                    interpolation,
264                    sampling,
265                    blend_src,
266                }))
267            }
268            (None, Some(crate::BuiltIn::Position { .. }), None, None, invariant, None) => {
269                Ok(Some(ast::Binding::BuiltIn(crate::BuiltIn::Position {
270                    invariant,
271                })))
272            }
273            (None, Some(built_in), None, None, false, None) => {
274                Ok(Some(ast::Binding::BuiltIn(built_in)))
275            }
276            (_, _, _, _, _, _) => Err(Box::new(Error::InconsistentBinding(span))),
277        }
278    }
279}
280
281/// Configuration for the whole parser run.
282pub struct Options {
283    /// Controls whether the parser should parse doc comments.
284    pub parse_doc_comments: bool,
285}
286
287impl Options {
288    /// Creates a new [`Options`] without doc comments parsing.
289    pub const fn new() -> Self {
290        Options {
291            parse_doc_comments: false,
292        }
293    }
294}
295
296pub struct Parser {
297    rules: Vec<(Rule, usize)>,
298    recursion_depth: u32,
299}
300
301impl Parser {
302    pub const fn new() -> Self {
303        Parser {
304            rules: Vec::new(),
305            recursion_depth: 0,
306        }
307    }
308
309    fn reset(&mut self) {
310        self.rules.clear();
311        self.recursion_depth = 0;
312    }
313
314    fn push_rule_span(&mut self, rule: Rule, lexer: &mut Lexer<'_>) {
315        self.rules.push((rule, lexer.start_byte_offset()));
316    }
317
318    fn pop_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
319        let (_, initial) = self.rules.pop().unwrap();
320        lexer.span_from(initial)
321    }
322
323    fn peek_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
324        let &(_, initial) = self.rules.last().unwrap();
325        lexer.span_from(initial)
326    }
327
328    fn race_rules(&self, rule0: Rule, rule1: Rule) -> Option<Rule> {
329        Some(
330            self.rules
331                .iter()
332                .rev()
333                .find(|&x| x.0 == rule0 || x.0 == rule1)?
334                .0,
335        )
336    }
337
338    fn track_recursion<'a, F, R>(&mut self, f: F) -> Result<'a, R>
339    where
340        F: FnOnce(&mut Self) -> Result<'a, R>,
341    {
342        self.recursion_depth += 1;
343        if self.recursion_depth >= 256 {
344            return Err(Box::new(Error::Internal("Parser recursion limit exceeded")));
345        }
346        let ret = f(self);
347        self.recursion_depth -= 1;
348        ret
349    }
350
351    fn switch_value<'a>(
352        &mut self,
353        lexer: &mut Lexer<'a>,
354        ctx: &mut ExpressionContext<'a, '_, '_>,
355    ) -> Result<'a, ast::SwitchValue<'a>> {
356        if let Token::Word("default") = lexer.peek().0 {
357            let _ = lexer.next();
358            return Ok(ast::SwitchValue::Default);
359        }
360
361        let expr = self.general_expression(lexer, ctx)?;
362        Ok(ast::SwitchValue::Expr(expr))
363    }
364
365    /// Decide if we're looking at a construction expression, and return its
366    /// type if so.
367    ///
368    /// If the identifier `word` is a [type-defining keyword], then return a
369    /// [`ConstructorType`] value describing the type to build. Return an error
370    /// if the type is not constructible (like `sampler`).
371    ///
372    /// If `word` isn't a type name, then return `None`.
373    ///
374    /// [type-defining keyword]: https://gpuweb.github.io/gpuweb/wgsl/#type-defining-keywords
375    /// [`ConstructorType`]: ast::ConstructorType
376    fn constructor_type<'a>(
377        &mut self,
378        lexer: &mut Lexer<'a>,
379        word: &'a str,
380        span: Span,
381        ctx: &mut ExpressionContext<'a, '_, '_>,
382    ) -> Result<'a, Option<ast::ConstructorType<'a>>> {
383        if let Some(scalar) = conv::get_scalar_type(&lexer.enable_extensions, span, word)? {
384            return Ok(Some(ast::ConstructorType::Scalar(scalar)));
385        }
386
387        let partial = match word {
388            "vec2" => ast::ConstructorType::PartialVector {
389                size: crate::VectorSize::Bi,
390            },
391            "vec2i" => {
392                return Ok(Some(ast::ConstructorType::Vector {
393                    size: crate::VectorSize::Bi,
394                    ty: ctx.new_scalar(Scalar::I32),
395                    ty_span: Span::UNDEFINED,
396                }))
397            }
398            "vec2u" => {
399                return Ok(Some(ast::ConstructorType::Vector {
400                    size: crate::VectorSize::Bi,
401                    ty: ctx.new_scalar(Scalar::U32),
402                    ty_span: Span::UNDEFINED,
403                }))
404            }
405            "vec2f" => {
406                return Ok(Some(ast::ConstructorType::Vector {
407                    size: crate::VectorSize::Bi,
408                    ty: ctx.new_scalar(Scalar::F32),
409                    ty_span: Span::UNDEFINED,
410                }))
411            }
412            "vec2h" => {
413                return Ok(Some(ast::ConstructorType::Vector {
414                    size: crate::VectorSize::Bi,
415                    ty: ctx.new_scalar(Scalar::F16),
416                    ty_span: Span::UNDEFINED,
417                }))
418            }
419            "vec3" => ast::ConstructorType::PartialVector {
420                size: crate::VectorSize::Tri,
421            },
422            "vec3i" => {
423                return Ok(Some(ast::ConstructorType::Vector {
424                    size: crate::VectorSize::Tri,
425                    ty: ctx.new_scalar(Scalar::I32),
426                    ty_span: Span::UNDEFINED,
427                }))
428            }
429            "vec3u" => {
430                return Ok(Some(ast::ConstructorType::Vector {
431                    size: crate::VectorSize::Tri,
432                    ty: ctx.new_scalar(Scalar::U32),
433                    ty_span: Span::UNDEFINED,
434                }))
435            }
436            "vec3f" => {
437                return Ok(Some(ast::ConstructorType::Vector {
438                    size: crate::VectorSize::Tri,
439                    ty: ctx.new_scalar(Scalar::F32),
440                    ty_span: Span::UNDEFINED,
441                }))
442            }
443            "vec3h" => {
444                return Ok(Some(ast::ConstructorType::Vector {
445                    size: crate::VectorSize::Tri,
446                    ty: ctx.new_scalar(Scalar::F16),
447                    ty_span: Span::UNDEFINED,
448                }))
449            }
450            "vec4" => ast::ConstructorType::PartialVector {
451                size: crate::VectorSize::Quad,
452            },
453            "vec4i" => {
454                return Ok(Some(ast::ConstructorType::Vector {
455                    size: crate::VectorSize::Quad,
456                    ty: ctx.new_scalar(Scalar::I32),
457                    ty_span: Span::UNDEFINED,
458                }))
459            }
460            "vec4u" => {
461                return Ok(Some(ast::ConstructorType::Vector {
462                    size: crate::VectorSize::Quad,
463                    ty: ctx.new_scalar(Scalar::U32),
464                    ty_span: Span::UNDEFINED,
465                }))
466            }
467            "vec4f" => {
468                return Ok(Some(ast::ConstructorType::Vector {
469                    size: crate::VectorSize::Quad,
470                    ty: ctx.new_scalar(Scalar::F32),
471                    ty_span: Span::UNDEFINED,
472                }))
473            }
474            "vec4h" => {
475                return Ok(Some(ast::ConstructorType::Vector {
476                    size: crate::VectorSize::Quad,
477                    ty: ctx.new_scalar(Scalar::F16),
478                    ty_span: Span::UNDEFINED,
479                }))
480            }
481            "mat2x2" => ast::ConstructorType::PartialMatrix {
482                columns: crate::VectorSize::Bi,
483                rows: crate::VectorSize::Bi,
484            },
485            "mat2x2f" => {
486                return Ok(Some(ast::ConstructorType::Matrix {
487                    columns: crate::VectorSize::Bi,
488                    rows: crate::VectorSize::Bi,
489                    ty: ctx.new_scalar(Scalar::F32),
490                    ty_span: Span::UNDEFINED,
491                }))
492            }
493            "mat2x2h" => {
494                return Ok(Some(ast::ConstructorType::Matrix {
495                    columns: crate::VectorSize::Bi,
496                    rows: crate::VectorSize::Bi,
497                    ty: ctx.new_scalar(Scalar::F16),
498                    ty_span: Span::UNDEFINED,
499                }))
500            }
501            "mat2x3" => ast::ConstructorType::PartialMatrix {
502                columns: crate::VectorSize::Bi,
503                rows: crate::VectorSize::Tri,
504            },
505            "mat2x3f" => {
506                return Ok(Some(ast::ConstructorType::Matrix {
507                    columns: crate::VectorSize::Bi,
508                    rows: crate::VectorSize::Tri,
509                    ty: ctx.new_scalar(Scalar::F32),
510                    ty_span: Span::UNDEFINED,
511                }))
512            }
513            "mat2x3h" => {
514                return Ok(Some(ast::ConstructorType::Matrix {
515                    columns: crate::VectorSize::Bi,
516                    rows: crate::VectorSize::Tri,
517                    ty: ctx.new_scalar(Scalar::F16),
518                    ty_span: Span::UNDEFINED,
519                }))
520            }
521            "mat2x4" => ast::ConstructorType::PartialMatrix {
522                columns: crate::VectorSize::Bi,
523                rows: crate::VectorSize::Quad,
524            },
525            "mat2x4f" => {
526                return Ok(Some(ast::ConstructorType::Matrix {
527                    columns: crate::VectorSize::Bi,
528                    rows: crate::VectorSize::Quad,
529                    ty: ctx.new_scalar(Scalar::F32),
530                    ty_span: Span::UNDEFINED,
531                }))
532            }
533            "mat2x4h" => {
534                return Ok(Some(ast::ConstructorType::Matrix {
535                    columns: crate::VectorSize::Bi,
536                    rows: crate::VectorSize::Quad,
537                    ty: ctx.new_scalar(Scalar::F16),
538                    ty_span: Span::UNDEFINED,
539                }))
540            }
541            "mat3x2" => ast::ConstructorType::PartialMatrix {
542                columns: crate::VectorSize::Tri,
543                rows: crate::VectorSize::Bi,
544            },
545            "mat3x2f" => {
546                return Ok(Some(ast::ConstructorType::Matrix {
547                    columns: crate::VectorSize::Tri,
548                    rows: crate::VectorSize::Bi,
549                    ty: ctx.new_scalar(Scalar::F32),
550                    ty_span: Span::UNDEFINED,
551                }))
552            }
553            "mat3x2h" => {
554                return Ok(Some(ast::ConstructorType::Matrix {
555                    columns: crate::VectorSize::Tri,
556                    rows: crate::VectorSize::Bi,
557                    ty: ctx.new_scalar(Scalar::F16),
558                    ty_span: Span::UNDEFINED,
559                }))
560            }
561            "mat3x3" => ast::ConstructorType::PartialMatrix {
562                columns: crate::VectorSize::Tri,
563                rows: crate::VectorSize::Tri,
564            },
565            "mat3x3f" => {
566                return Ok(Some(ast::ConstructorType::Matrix {
567                    columns: crate::VectorSize::Tri,
568                    rows: crate::VectorSize::Tri,
569                    ty: ctx.new_scalar(Scalar::F32),
570                    ty_span: Span::UNDEFINED,
571                }))
572            }
573            "mat3x3h" => {
574                return Ok(Some(ast::ConstructorType::Matrix {
575                    columns: crate::VectorSize::Tri,
576                    rows: crate::VectorSize::Tri,
577                    ty: ctx.new_scalar(Scalar::F16),
578                    ty_span: Span::UNDEFINED,
579                }))
580            }
581            "mat3x4" => ast::ConstructorType::PartialMatrix {
582                columns: crate::VectorSize::Tri,
583                rows: crate::VectorSize::Quad,
584            },
585            "mat3x4f" => {
586                return Ok(Some(ast::ConstructorType::Matrix {
587                    columns: crate::VectorSize::Tri,
588                    rows: crate::VectorSize::Quad,
589                    ty: ctx.new_scalar(Scalar::F32),
590                    ty_span: Span::UNDEFINED,
591                }))
592            }
593            "mat3x4h" => {
594                return Ok(Some(ast::ConstructorType::Matrix {
595                    columns: crate::VectorSize::Tri,
596                    rows: crate::VectorSize::Quad,
597                    ty: ctx.new_scalar(Scalar::F16),
598                    ty_span: Span::UNDEFINED,
599                }))
600            }
601            "mat4x2" => ast::ConstructorType::PartialMatrix {
602                columns: crate::VectorSize::Quad,
603                rows: crate::VectorSize::Bi,
604            },
605            "mat4x2f" => {
606                return Ok(Some(ast::ConstructorType::Matrix {
607                    columns: crate::VectorSize::Quad,
608                    rows: crate::VectorSize::Bi,
609                    ty: ctx.new_scalar(Scalar::F32),
610                    ty_span: Span::UNDEFINED,
611                }))
612            }
613            "mat4x2h" => {
614                return Ok(Some(ast::ConstructorType::Matrix {
615                    columns: crate::VectorSize::Quad,
616                    rows: crate::VectorSize::Bi,
617                    ty: ctx.new_scalar(Scalar::F16),
618                    ty_span: Span::UNDEFINED,
619                }))
620            }
621            "mat4x3" => ast::ConstructorType::PartialMatrix {
622                columns: crate::VectorSize::Quad,
623                rows: crate::VectorSize::Tri,
624            },
625            "mat4x3f" => {
626                return Ok(Some(ast::ConstructorType::Matrix {
627                    columns: crate::VectorSize::Quad,
628                    rows: crate::VectorSize::Tri,
629                    ty: ctx.new_scalar(Scalar::F32),
630                    ty_span: Span::UNDEFINED,
631                }))
632            }
633            "mat4x3h" => {
634                return Ok(Some(ast::ConstructorType::Matrix {
635                    columns: crate::VectorSize::Quad,
636                    rows: crate::VectorSize::Tri,
637                    ty: ctx.new_scalar(Scalar::F16),
638                    ty_span: Span::UNDEFINED,
639                }))
640            }
641            "mat4x4" => ast::ConstructorType::PartialMatrix {
642                columns: crate::VectorSize::Quad,
643                rows: crate::VectorSize::Quad,
644            },
645            "mat4x4f" => {
646                return Ok(Some(ast::ConstructorType::Matrix {
647                    columns: crate::VectorSize::Quad,
648                    rows: crate::VectorSize::Quad,
649                    ty: ctx.new_scalar(Scalar::F32),
650                    ty_span: Span::UNDEFINED,
651                }))
652            }
653            "mat4x4h" => {
654                return Ok(Some(ast::ConstructorType::Matrix {
655                    columns: crate::VectorSize::Quad,
656                    rows: crate::VectorSize::Quad,
657                    ty: ctx.new_scalar(Scalar::F16),
658                    ty_span: Span::UNDEFINED,
659                }))
660            }
661            "array" => ast::ConstructorType::PartialArray,
662            "atomic"
663            | "binding_array"
664            | "sampler"
665            | "sampler_comparison"
666            | "texture_1d"
667            | "texture_1d_array"
668            | "texture_2d"
669            | "texture_2d_array"
670            | "texture_3d"
671            | "texture_cube"
672            | "texture_cube_array"
673            | "texture_multisampled_2d"
674            | "texture_multisampled_2d_array"
675            | "texture_depth_2d"
676            | "texture_depth_2d_array"
677            | "texture_depth_cube"
678            | "texture_depth_cube_array"
679            | "texture_depth_multisampled_2d"
680            | "texture_external"
681            | "texture_storage_1d"
682            | "texture_storage_1d_array"
683            | "texture_storage_2d"
684            | "texture_storage_2d_array"
685            | "texture_storage_3d" => return Err(Box::new(Error::TypeNotConstructible(span))),
686            _ => return Ok(None),
687        };
688
689        // parse component type if present
690        match (lexer.peek().0, partial) {
691            (Token::Paren('<'), ast::ConstructorType::PartialVector { size }) => {
692                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
693                Ok(Some(ast::ConstructorType::Vector { size, ty, ty_span }))
694            }
695            (Token::Paren('<'), ast::ConstructorType::PartialMatrix { columns, rows }) => {
696                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
697                Ok(Some(ast::ConstructorType::Matrix {
698                    columns,
699                    rows,
700                    ty,
701                    ty_span,
702                }))
703            }
704            (Token::Paren('<'), ast::ConstructorType::PartialArray) => {
705                lexer.expect_generic_paren('<')?;
706                let base = self.type_decl(lexer, ctx)?;
707                let size = if lexer.end_of_generic_arguments() {
708                    let expr = self.const_generic_expression(lexer, ctx)?;
709                    lexer.skip(Token::Separator(','));
710                    ast::ArraySize::Constant(expr)
711                } else {
712                    ast::ArraySize::Dynamic
713                };
714                lexer.expect_generic_paren('>')?;
715
716                Ok(Some(ast::ConstructorType::Array { base, size }))
717            }
718            (_, partial) => Ok(Some(partial)),
719        }
720    }
721
722    /// Expects `name` to be consumed (not in lexer).
723    fn arguments<'a>(
724        &mut self,
725        lexer: &mut Lexer<'a>,
726        ctx: &mut ExpressionContext<'a, '_, '_>,
727    ) -> Result<'a, Vec<Handle<ast::Expression<'a>>>> {
728        self.push_rule_span(Rule::EnclosedExpr, lexer);
729        lexer.open_arguments()?;
730        let mut arguments = Vec::new();
731        loop {
732            if !arguments.is_empty() {
733                if !lexer.next_argument()? {
734                    break;
735                }
736            } else if lexer.skip(Token::Paren(')')) {
737                break;
738            }
739            let arg = self.general_expression(lexer, ctx)?;
740            arguments.push(arg);
741        }
742
743        self.pop_rule_span(lexer);
744        Ok(arguments)
745    }
746
747    fn enclosed_expression<'a>(
748        &mut self,
749        lexer: &mut Lexer<'a>,
750        ctx: &mut ExpressionContext<'a, '_, '_>,
751    ) -> Result<'a, Handle<ast::Expression<'a>>> {
752        self.push_rule_span(Rule::EnclosedExpr, lexer);
753        let expr = self.general_expression(lexer, ctx)?;
754        self.pop_rule_span(lexer);
755        Ok(expr)
756    }
757
758    /// Expects [`Rule::PrimaryExpr`] or [`Rule::SingularExpr`] on top; does not pop it.
759    /// Expects `name` to be consumed (not in lexer).
760    fn function_call<'a>(
761        &mut self,
762        lexer: &mut Lexer<'a>,
763        name: &'a str,
764        name_span: Span,
765        ctx: &mut ExpressionContext<'a, '_, '_>,
766    ) -> Result<'a, Handle<ast::Expression<'a>>> {
767        assert!(self.rules.last().is_some());
768
769        let expr = match name {
770            // bitcast looks like a function call, but it's an operator and must be handled differently.
771            "bitcast" => {
772                let (to, span) = self.singular_generic(lexer, ctx)?;
773
774                lexer.open_arguments()?;
775                let expr = self.general_expression(lexer, ctx)?;
776                lexer.close_arguments()?;
777
778                ast::Expression::Bitcast {
779                    expr,
780                    to,
781                    ty_span: span,
782                }
783            }
784            // everything else must be handled later, since they can be hidden by user-defined functions.
785            _ => {
786                let arguments = self.arguments(lexer, ctx)?;
787                ctx.unresolved.insert(ast::Dependency {
788                    ident: name,
789                    usage: name_span,
790                });
791                ast::Expression::Call {
792                    function: ast::Ident {
793                        name,
794                        span: name_span,
795                    },
796                    arguments,
797                }
798            }
799        };
800
801        let span = self.peek_rule_span(lexer);
802        let expr = ctx.expressions.append(expr, span);
803        Ok(expr)
804    }
805
806    fn ident_expr<'a>(
807        &mut self,
808        name: &'a str,
809        name_span: Span,
810        ctx: &mut ExpressionContext<'a, '_, '_>,
811    ) -> ast::IdentExpr<'a> {
812        match ctx.local_table.lookup(name) {
813            Some(&local) => ast::IdentExpr::Local(local),
814            None => {
815                ctx.unresolved.insert(ast::Dependency {
816                    ident: name,
817                    usage: name_span,
818                });
819                ast::IdentExpr::Unresolved(name)
820            }
821        }
822    }
823
824    fn primary_expression<'a>(
825        &mut self,
826        lexer: &mut Lexer<'a>,
827        ctx: &mut ExpressionContext<'a, '_, '_>,
828    ) -> Result<'a, Handle<ast::Expression<'a>>> {
829        self.push_rule_span(Rule::PrimaryExpr, lexer);
830        const fn literal_ray_flag<'b>(flag: crate::RayFlag) -> ast::Expression<'b> {
831            ast::Expression::Literal(ast::Literal::Number(Number::U32(flag.bits())))
832        }
833        const fn literal_ray_intersection<'b>(
834            intersection: crate::RayQueryIntersection,
835        ) -> ast::Expression<'b> {
836            ast::Expression::Literal(ast::Literal::Number(Number::U32(intersection as u32)))
837        }
838
839        let expr = match lexer.peek() {
840            (Token::Paren('('), _) => {
841                let _ = lexer.next();
842                let expr = self.enclosed_expression(lexer, ctx)?;
843                lexer.expect(Token::Paren(')'))?;
844                self.pop_rule_span(lexer);
845                return Ok(expr);
846            }
847            (Token::Word("true"), _) => {
848                let _ = lexer.next();
849                ast::Expression::Literal(ast::Literal::Bool(true))
850            }
851            (Token::Word("false"), _) => {
852                let _ = lexer.next();
853                ast::Expression::Literal(ast::Literal::Bool(false))
854            }
855            (Token::Number(res), span) => {
856                let _ = lexer.next();
857                let num = res.map_err(|err| Error::BadNumber(span, err))?;
858
859                if let Some(enable_extension) = num.requires_enable_extension() {
860                    if !lexer.enable_extensions.contains(enable_extension) {
861                        return Err(Box::new(Error::EnableExtensionNotEnabled {
862                            kind: enable_extension.into(),
863                            span,
864                        }));
865                    }
866                }
867
868                ast::Expression::Literal(ast::Literal::Number(num))
869            }
870            (Token::Word("RAY_FLAG_NONE"), _) => {
871                let _ = lexer.next();
872                literal_ray_flag(crate::RayFlag::empty())
873            }
874            (Token::Word("RAY_FLAG_FORCE_OPAQUE"), _) => {
875                let _ = lexer.next();
876                literal_ray_flag(crate::RayFlag::FORCE_OPAQUE)
877            }
878            (Token::Word("RAY_FLAG_FORCE_NO_OPAQUE"), _) => {
879                let _ = lexer.next();
880                literal_ray_flag(crate::RayFlag::FORCE_NO_OPAQUE)
881            }
882            (Token::Word("RAY_FLAG_TERMINATE_ON_FIRST_HIT"), _) => {
883                let _ = lexer.next();
884                literal_ray_flag(crate::RayFlag::TERMINATE_ON_FIRST_HIT)
885            }
886            (Token::Word("RAY_FLAG_SKIP_CLOSEST_HIT_SHADER"), _) => {
887                let _ = lexer.next();
888                literal_ray_flag(crate::RayFlag::SKIP_CLOSEST_HIT_SHADER)
889            }
890            (Token::Word("RAY_FLAG_CULL_BACK_FACING"), _) => {
891                let _ = lexer.next();
892                literal_ray_flag(crate::RayFlag::CULL_BACK_FACING)
893            }
894            (Token::Word("RAY_FLAG_CULL_FRONT_FACING"), _) => {
895                let _ = lexer.next();
896                literal_ray_flag(crate::RayFlag::CULL_FRONT_FACING)
897            }
898            (Token::Word("RAY_FLAG_CULL_OPAQUE"), _) => {
899                let _ = lexer.next();
900                literal_ray_flag(crate::RayFlag::CULL_OPAQUE)
901            }
902            (Token::Word("RAY_FLAG_CULL_NO_OPAQUE"), _) => {
903                let _ = lexer.next();
904                literal_ray_flag(crate::RayFlag::CULL_NO_OPAQUE)
905            }
906            (Token::Word("RAY_FLAG_SKIP_TRIANGLES"), _) => {
907                let _ = lexer.next();
908                literal_ray_flag(crate::RayFlag::SKIP_TRIANGLES)
909            }
910            (Token::Word("RAY_FLAG_SKIP_AABBS"), _) => {
911                let _ = lexer.next();
912                literal_ray_flag(crate::RayFlag::SKIP_AABBS)
913            }
914            (Token::Word("RAY_QUERY_INTERSECTION_NONE"), _) => {
915                let _ = lexer.next();
916                literal_ray_intersection(crate::RayQueryIntersection::None)
917            }
918            (Token::Word("RAY_QUERY_INTERSECTION_TRIANGLE"), _) => {
919                let _ = lexer.next();
920                literal_ray_intersection(crate::RayQueryIntersection::Triangle)
921            }
922            (Token::Word("RAY_QUERY_INTERSECTION_GENERATED"), _) => {
923                let _ = lexer.next();
924                literal_ray_intersection(crate::RayQueryIntersection::Generated)
925            }
926            (Token::Word("RAY_QUERY_INTERSECTION_AABB"), _) => {
927                let _ = lexer.next();
928                literal_ray_intersection(crate::RayQueryIntersection::Aabb)
929            }
930            (Token::Word(word), span) => {
931                let start = lexer.start_byte_offset();
932                let _ = lexer.next();
933
934                if let Some(ty) = self.constructor_type(lexer, word, span, ctx)? {
935                    let ty_span = lexer.span_from(start);
936                    let components = self.arguments(lexer, ctx)?;
937                    ast::Expression::Construct {
938                        ty,
939                        ty_span,
940                        components,
941                    }
942                } else if let Token::Paren('(') = lexer.peek().0 {
943                    self.pop_rule_span(lexer);
944                    return self.function_call(lexer, word, span, ctx);
945                } else if word == "bitcast" {
946                    self.pop_rule_span(lexer);
947                    return self.function_call(lexer, word, span, ctx);
948                } else {
949                    let ident = self.ident_expr(word, span, ctx);
950                    ast::Expression::Ident(ident)
951                }
952            }
953            other => {
954                return Err(Box::new(Error::Unexpected(
955                    other.1,
956                    ExpectedToken::PrimaryExpression,
957                )))
958            }
959        };
960
961        let span = self.pop_rule_span(lexer);
962        let expr = ctx.expressions.append(expr, span);
963        Ok(expr)
964    }
965
966    fn postfix<'a>(
967        &mut self,
968        span_start: usize,
969        lexer: &mut Lexer<'a>,
970        ctx: &mut ExpressionContext<'a, '_, '_>,
971        expr: Handle<ast::Expression<'a>>,
972    ) -> Result<'a, Handle<ast::Expression<'a>>> {
973        let mut expr = expr;
974
975        loop {
976            let expression = match lexer.peek().0 {
977                Token::Separator('.') => {
978                    let _ = lexer.next();
979                    let field = lexer.next_ident()?;
980
981                    ast::Expression::Member { base: expr, field }
982                }
983                Token::Paren('[') => {
984                    let _ = lexer.next();
985                    let index = self.enclosed_expression(lexer, ctx)?;
986                    lexer.expect(Token::Paren(']'))?;
987
988                    ast::Expression::Index { base: expr, index }
989                }
990                _ => break,
991            };
992
993            let span = lexer.span_from(span_start);
994            expr = ctx.expressions.append(expression, span);
995        }
996
997        Ok(expr)
998    }
999
1000    fn const_generic_expression<'a>(
1001        &mut self,
1002        lexer: &mut Lexer<'a>,
1003        ctx: &mut ExpressionContext<'a, '_, '_>,
1004    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1005        self.push_rule_span(Rule::GenericExpr, lexer);
1006        let expr = self.general_expression(lexer, ctx)?;
1007        self.pop_rule_span(lexer);
1008        Ok(expr)
1009    }
1010
1011    /// Parse a `unary_expression`.
1012    fn unary_expression<'a>(
1013        &mut self,
1014        lexer: &mut Lexer<'a>,
1015        ctx: &mut ExpressionContext<'a, '_, '_>,
1016    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1017        self.track_recursion(|this| {
1018            this.push_rule_span(Rule::UnaryExpr, lexer);
1019            //TODO: refactor this to avoid backing up
1020            let expr = match lexer.peek().0 {
1021                Token::Operation('-') => {
1022                    let _ = lexer.next();
1023                    let expr = this.unary_expression(lexer, ctx)?;
1024                    let expr = ast::Expression::Unary {
1025                        op: crate::UnaryOperator::Negate,
1026                        expr,
1027                    };
1028                    let span = this.peek_rule_span(lexer);
1029                    ctx.expressions.append(expr, span)
1030                }
1031                Token::Operation('!') => {
1032                    let _ = lexer.next();
1033                    let expr = this.unary_expression(lexer, ctx)?;
1034                    let expr = ast::Expression::Unary {
1035                        op: crate::UnaryOperator::LogicalNot,
1036                        expr,
1037                    };
1038                    let span = this.peek_rule_span(lexer);
1039                    ctx.expressions.append(expr, span)
1040                }
1041                Token::Operation('~') => {
1042                    let _ = lexer.next();
1043                    let expr = this.unary_expression(lexer, ctx)?;
1044                    let expr = ast::Expression::Unary {
1045                        op: crate::UnaryOperator::BitwiseNot,
1046                        expr,
1047                    };
1048                    let span = this.peek_rule_span(lexer);
1049                    ctx.expressions.append(expr, span)
1050                }
1051                Token::Operation('*') => {
1052                    let _ = lexer.next();
1053                    let expr = this.unary_expression(lexer, ctx)?;
1054                    let expr = ast::Expression::Deref(expr);
1055                    let span = this.peek_rule_span(lexer);
1056                    ctx.expressions.append(expr, span)
1057                }
1058                Token::Operation('&') => {
1059                    let _ = lexer.next();
1060                    let expr = this.unary_expression(lexer, ctx)?;
1061                    let expr = ast::Expression::AddrOf(expr);
1062                    let span = this.peek_rule_span(lexer);
1063                    ctx.expressions.append(expr, span)
1064                }
1065                _ => this.singular_expression(lexer, ctx)?,
1066            };
1067
1068            this.pop_rule_span(lexer);
1069            Ok(expr)
1070        })
1071    }
1072
1073    /// Parse a `lhs_expression`.
1074    ///
1075    /// LHS expressions only support the `&` and `*` operators and
1076    /// the `[]` and `.` postfix selectors.
1077    fn lhs_expression<'a>(
1078        &mut self,
1079        lexer: &mut Lexer<'a>,
1080        ctx: &mut ExpressionContext<'a, '_, '_>,
1081    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1082        self.track_recursion(|this| {
1083            this.push_rule_span(Rule::LhsExpr, lexer);
1084            let start = lexer.start_byte_offset();
1085            let expr = match lexer.peek() {
1086                (Token::Operation('*'), _) => {
1087                    let _ = lexer.next();
1088                    let expr = this.lhs_expression(lexer, ctx)?;
1089                    let expr = ast::Expression::Deref(expr);
1090                    let span = this.peek_rule_span(lexer);
1091                    ctx.expressions.append(expr, span)
1092                }
1093                (Token::Operation('&'), _) => {
1094                    let _ = lexer.next();
1095                    let expr = this.lhs_expression(lexer, ctx)?;
1096                    let expr = ast::Expression::AddrOf(expr);
1097                    let span = this.peek_rule_span(lexer);
1098                    ctx.expressions.append(expr, span)
1099                }
1100                (Token::Operation('('), _) => {
1101                    let _ = lexer.next();
1102                    let primary_expr = this.lhs_expression(lexer, ctx)?;
1103                    lexer.expect(Token::Paren(')'))?;
1104                    this.postfix(start, lexer, ctx, primary_expr)?
1105                }
1106                (Token::Word(word), span) => {
1107                    let _ = lexer.next();
1108                    let ident = this.ident_expr(word, span, ctx);
1109                    let primary_expr = ctx.expressions.append(ast::Expression::Ident(ident), span);
1110                    this.postfix(start, lexer, ctx, primary_expr)?
1111                }
1112                _ => this.singular_expression(lexer, ctx)?,
1113            };
1114
1115            this.pop_rule_span(lexer);
1116            Ok(expr)
1117        })
1118    }
1119
1120    /// Parse a `singular_expression`.
1121    fn singular_expression<'a>(
1122        &mut self,
1123        lexer: &mut Lexer<'a>,
1124        ctx: &mut ExpressionContext<'a, '_, '_>,
1125    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1126        let start = lexer.start_byte_offset();
1127        self.push_rule_span(Rule::SingularExpr, lexer);
1128        let primary_expr = self.primary_expression(lexer, ctx)?;
1129        let singular_expr = self.postfix(start, lexer, ctx, primary_expr)?;
1130        self.pop_rule_span(lexer);
1131
1132        Ok(singular_expr)
1133    }
1134
1135    fn equality_expression<'a>(
1136        &mut self,
1137        lexer: &mut Lexer<'a>,
1138        context: &mut ExpressionContext<'a, '_, '_>,
1139    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1140        // equality_expression
1141        context.parse_binary_op(
1142            lexer,
1143            |token| match token {
1144                Token::LogicalOperation('=') => Some(crate::BinaryOperator::Equal),
1145                Token::LogicalOperation('!') => Some(crate::BinaryOperator::NotEqual),
1146                _ => None,
1147            },
1148            // relational_expression
1149            |lexer, context| {
1150                let enclosing = self.race_rules(Rule::GenericExpr, Rule::EnclosedExpr);
1151                context.parse_binary_op(
1152                    lexer,
1153                    match enclosing {
1154                        Some(Rule::GenericExpr) => |token| match token {
1155                            Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
1156                            _ => None,
1157                        },
1158                        _ => |token| match token {
1159                            Token::Paren('<') => Some(crate::BinaryOperator::Less),
1160                            Token::Paren('>') => Some(crate::BinaryOperator::Greater),
1161                            Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
1162                            Token::LogicalOperation('>') => {
1163                                Some(crate::BinaryOperator::GreaterEqual)
1164                            }
1165                            _ => None,
1166                        },
1167                    },
1168                    // shift_expression
1169                    |lexer, context| {
1170                        context.parse_binary_op(
1171                            lexer,
1172                            match enclosing {
1173                                Some(Rule::GenericExpr) => |token| match token {
1174                                    Token::ShiftOperation('<') => {
1175                                        Some(crate::BinaryOperator::ShiftLeft)
1176                                    }
1177                                    _ => None,
1178                                },
1179                                _ => |token| match token {
1180                                    Token::ShiftOperation('<') => {
1181                                        Some(crate::BinaryOperator::ShiftLeft)
1182                                    }
1183                                    Token::ShiftOperation('>') => {
1184                                        Some(crate::BinaryOperator::ShiftRight)
1185                                    }
1186                                    _ => None,
1187                                },
1188                            },
1189                            // additive_expression
1190                            |lexer, context| {
1191                                context.parse_binary_op(
1192                                    lexer,
1193                                    |token| match token {
1194                                        Token::Operation('+') => Some(crate::BinaryOperator::Add),
1195                                        Token::Operation('-') => {
1196                                            Some(crate::BinaryOperator::Subtract)
1197                                        }
1198                                        _ => None,
1199                                    },
1200                                    // multiplicative_expression
1201                                    |lexer, context| {
1202                                        context.parse_binary_op(
1203                                            lexer,
1204                                            |token| match token {
1205                                                Token::Operation('*') => {
1206                                                    Some(crate::BinaryOperator::Multiply)
1207                                                }
1208                                                Token::Operation('/') => {
1209                                                    Some(crate::BinaryOperator::Divide)
1210                                                }
1211                                                Token::Operation('%') => {
1212                                                    Some(crate::BinaryOperator::Modulo)
1213                                                }
1214                                                _ => None,
1215                                            },
1216                                            |lexer, context| self.unary_expression(lexer, context),
1217                                        )
1218                                    },
1219                                )
1220                            },
1221                        )
1222                    },
1223                )
1224            },
1225        )
1226    }
1227
1228    fn general_expression<'a>(
1229        &mut self,
1230        lexer: &mut Lexer<'a>,
1231        ctx: &mut ExpressionContext<'a, '_, '_>,
1232    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1233        self.general_expression_with_span(lexer, ctx)
1234            .map(|(expr, _)| expr)
1235    }
1236
1237    fn general_expression_with_span<'a>(
1238        &mut self,
1239        lexer: &mut Lexer<'a>,
1240        context: &mut ExpressionContext<'a, '_, '_>,
1241    ) -> Result<'a, (Handle<ast::Expression<'a>>, Span)> {
1242        self.push_rule_span(Rule::GeneralExpr, lexer);
1243        // logical_or_expression
1244        let handle = context.parse_binary_op(
1245            lexer,
1246            |token| match token {
1247                Token::LogicalOperation('|') => Some(crate::BinaryOperator::LogicalOr),
1248                _ => None,
1249            },
1250            // logical_and_expression
1251            |lexer, context| {
1252                context.parse_binary_op(
1253                    lexer,
1254                    |token| match token {
1255                        Token::LogicalOperation('&') => Some(crate::BinaryOperator::LogicalAnd),
1256                        _ => None,
1257                    },
1258                    // inclusive_or_expression
1259                    |lexer, context| {
1260                        context.parse_binary_op(
1261                            lexer,
1262                            |token| match token {
1263                                Token::Operation('|') => Some(crate::BinaryOperator::InclusiveOr),
1264                                _ => None,
1265                            },
1266                            // exclusive_or_expression
1267                            |lexer, context| {
1268                                context.parse_binary_op(
1269                                    lexer,
1270                                    |token| match token {
1271                                        Token::Operation('^') => {
1272                                            Some(crate::BinaryOperator::ExclusiveOr)
1273                                        }
1274                                        _ => None,
1275                                    },
1276                                    // and_expression
1277                                    |lexer, context| {
1278                                        context.parse_binary_op(
1279                                            lexer,
1280                                            |token| match token {
1281                                                Token::Operation('&') => {
1282                                                    Some(crate::BinaryOperator::And)
1283                                                }
1284                                                _ => None,
1285                                            },
1286                                            |lexer, context| {
1287                                                self.equality_expression(lexer, context)
1288                                            },
1289                                        )
1290                                    },
1291                                )
1292                            },
1293                        )
1294                    },
1295                )
1296            },
1297        )?;
1298        Ok((handle, self.pop_rule_span(lexer)))
1299    }
1300
1301    fn variable_decl<'a>(
1302        &mut self,
1303        lexer: &mut Lexer<'a>,
1304        ctx: &mut ExpressionContext<'a, '_, '_>,
1305    ) -> Result<'a, ast::GlobalVariable<'a>> {
1306        self.push_rule_span(Rule::VariableDecl, lexer);
1307        let mut space = crate::AddressSpace::Handle;
1308
1309        if lexer.skip(Token::Paren('<')) {
1310            let (class_str, span) = lexer.next_ident_with_span()?;
1311            space = match class_str {
1312                "storage" => {
1313                    let access = if lexer.skip(Token::Separator(',')) {
1314                        lexer.next_storage_access()?
1315                    } else {
1316                        // defaulting to `read`
1317                        crate::StorageAccess::LOAD
1318                    };
1319                    crate::AddressSpace::Storage { access }
1320                }
1321                _ => conv::map_address_space(class_str, span)?,
1322            };
1323            lexer.expect(Token::Paren('>'))?;
1324        }
1325        let name = lexer.next_ident()?;
1326
1327        let ty = if lexer.skip(Token::Separator(':')) {
1328            Some(self.type_decl(lexer, ctx)?)
1329        } else {
1330            None
1331        };
1332
1333        let init = if lexer.skip(Token::Operation('=')) {
1334            let handle = self.general_expression(lexer, ctx)?;
1335            Some(handle)
1336        } else {
1337            None
1338        };
1339        lexer.expect(Token::Separator(';'))?;
1340        self.pop_rule_span(lexer);
1341
1342        Ok(ast::GlobalVariable {
1343            name,
1344            space,
1345            binding: None,
1346            ty,
1347            init,
1348            doc_comments: Vec::new(),
1349        })
1350    }
1351
1352    fn struct_body<'a>(
1353        &mut self,
1354        lexer: &mut Lexer<'a>,
1355        ctx: &mut ExpressionContext<'a, '_, '_>,
1356    ) -> Result<'a, Vec<ast::StructMember<'a>>> {
1357        let mut members = Vec::new();
1358        let mut member_names = FastHashSet::default();
1359
1360        lexer.expect(Token::Paren('{'))?;
1361        let mut ready = true;
1362        while !lexer.skip(Token::Paren('}')) {
1363            if !ready {
1364                return Err(Box::new(Error::Unexpected(
1365                    lexer.next().1,
1366                    ExpectedToken::Token(Token::Separator(',')),
1367                )));
1368            }
1369
1370            let doc_comments = lexer.accumulate_doc_comments();
1371
1372            let (mut size, mut align) = (ParsedAttribute::default(), ParsedAttribute::default());
1373            self.push_rule_span(Rule::Attribute, lexer);
1374            let mut bind_parser = BindingParser::default();
1375            while lexer.skip(Token::Attribute) {
1376                match lexer.next_ident_with_span()? {
1377                    ("size", name_span) => {
1378                        lexer.expect(Token::Paren('('))?;
1379                        let expr = self.general_expression(lexer, ctx)?;
1380                        lexer.expect(Token::Paren(')'))?;
1381                        size.set(expr, name_span)?;
1382                    }
1383                    ("align", name_span) => {
1384                        lexer.expect(Token::Paren('('))?;
1385                        let expr = self.general_expression(lexer, ctx)?;
1386                        lexer.expect(Token::Paren(')'))?;
1387                        align.set(expr, name_span)?;
1388                    }
1389                    (word, word_span) => bind_parser.parse(self, lexer, word, word_span, ctx)?,
1390                }
1391            }
1392
1393            let bind_span = self.pop_rule_span(lexer);
1394            let binding = bind_parser.finish(bind_span)?;
1395
1396            let name = lexer.next_ident()?;
1397            lexer.expect(Token::Separator(':'))?;
1398            let ty = self.type_decl(lexer, ctx)?;
1399            ready = lexer.skip(Token::Separator(','));
1400
1401            members.push(ast::StructMember {
1402                name,
1403                ty,
1404                binding,
1405                size: size.value,
1406                align: align.value,
1407                doc_comments,
1408            });
1409
1410            if !member_names.insert(name.name) {
1411                return Err(Box::new(Error::Redefinition {
1412                    previous: members
1413                        .iter()
1414                        .find(|x| x.name.name == name.name)
1415                        .map(|x| x.name.span)
1416                        .unwrap(),
1417                    current: name.span,
1418                }));
1419            }
1420        }
1421
1422        Ok(members)
1423    }
1424
1425    /// Parses `<T>`, returning T and span of T
1426    fn singular_generic<'a>(
1427        &mut self,
1428        lexer: &mut Lexer<'a>,
1429        ctx: &mut ExpressionContext<'a, '_, '_>,
1430    ) -> Result<'a, (Handle<ast::Type<'a>>, Span)> {
1431        lexer.expect_generic_paren('<')?;
1432        let start = lexer.start_byte_offset();
1433        let ty = self.type_decl(lexer, ctx)?;
1434        let span = lexer.span_from(start);
1435        lexer.skip(Token::Separator(','));
1436        lexer.expect_generic_paren('>')?;
1437        Ok((ty, span))
1438    }
1439
1440    fn matrix_with_type<'a>(
1441        &mut self,
1442        lexer: &mut Lexer<'a>,
1443        ctx: &mut ExpressionContext<'a, '_, '_>,
1444        columns: crate::VectorSize,
1445        rows: crate::VectorSize,
1446    ) -> Result<'a, ast::Type<'a>> {
1447        let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1448        Ok(ast::Type::Matrix {
1449            columns,
1450            rows,
1451            ty,
1452            ty_span,
1453        })
1454    }
1455
1456    fn type_decl_impl<'a>(
1457        &mut self,
1458        lexer: &mut Lexer<'a>,
1459        word: &'a str,
1460        span: Span,
1461        ctx: &mut ExpressionContext<'a, '_, '_>,
1462    ) -> Result<'a, Option<ast::Type<'a>>> {
1463        if let Some(scalar) = conv::get_scalar_type(&lexer.enable_extensions, span, word)? {
1464            return Ok(Some(ast::Type::Scalar(scalar)));
1465        }
1466
1467        Ok(Some(match word {
1468            "vec2" => {
1469                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1470                ast::Type::Vector {
1471                    size: crate::VectorSize::Bi,
1472                    ty,
1473                    ty_span,
1474                }
1475            }
1476            "vec2i" => ast::Type::Vector {
1477                size: crate::VectorSize::Bi,
1478                ty: ctx.new_scalar(Scalar::I32),
1479                ty_span: Span::UNDEFINED,
1480            },
1481            "vec2u" => ast::Type::Vector {
1482                size: crate::VectorSize::Bi,
1483                ty: ctx.new_scalar(Scalar::U32),
1484                ty_span: Span::UNDEFINED,
1485            },
1486            "vec2f" => ast::Type::Vector {
1487                size: crate::VectorSize::Bi,
1488                ty: ctx.new_scalar(Scalar::F32),
1489                ty_span: Span::UNDEFINED,
1490            },
1491            "vec2h" => ast::Type::Vector {
1492                size: crate::VectorSize::Bi,
1493                ty: ctx.new_scalar(Scalar::F16),
1494                ty_span: Span::UNDEFINED,
1495            },
1496            "vec3" => {
1497                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1498                ast::Type::Vector {
1499                    size: crate::VectorSize::Tri,
1500                    ty,
1501                    ty_span,
1502                }
1503            }
1504            "vec3i" => ast::Type::Vector {
1505                size: crate::VectorSize::Tri,
1506                ty: ctx.new_scalar(Scalar::I32),
1507                ty_span: Span::UNDEFINED,
1508            },
1509            "vec3u" => ast::Type::Vector {
1510                size: crate::VectorSize::Tri,
1511                ty: ctx.new_scalar(Scalar::U32),
1512                ty_span: Span::UNDEFINED,
1513            },
1514            "vec3f" => ast::Type::Vector {
1515                size: crate::VectorSize::Tri,
1516                ty: ctx.new_scalar(Scalar::F32),
1517                ty_span: Span::UNDEFINED,
1518            },
1519            "vec3h" => ast::Type::Vector {
1520                size: crate::VectorSize::Tri,
1521                ty: ctx.new_scalar(Scalar::F16),
1522                ty_span: Span::UNDEFINED,
1523            },
1524            "vec4" => {
1525                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1526                ast::Type::Vector {
1527                    size: crate::VectorSize::Quad,
1528                    ty,
1529                    ty_span,
1530                }
1531            }
1532            "vec4i" => ast::Type::Vector {
1533                size: crate::VectorSize::Quad,
1534                ty: ctx.new_scalar(Scalar::I32),
1535                ty_span: Span::UNDEFINED,
1536            },
1537            "vec4u" => ast::Type::Vector {
1538                size: crate::VectorSize::Quad,
1539                ty: ctx.new_scalar(Scalar::U32),
1540                ty_span: Span::UNDEFINED,
1541            },
1542            "vec4f" => ast::Type::Vector {
1543                size: crate::VectorSize::Quad,
1544                ty: ctx.new_scalar(Scalar::F32),
1545                ty_span: Span::UNDEFINED,
1546            },
1547            "vec4h" => ast::Type::Vector {
1548                size: crate::VectorSize::Quad,
1549                ty: ctx.new_scalar(Scalar::F16),
1550                ty_span: Span::UNDEFINED,
1551            },
1552            "mat2x2" => {
1553                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Bi)?
1554            }
1555            "mat2x2f" => ast::Type::Matrix {
1556                columns: crate::VectorSize::Bi,
1557                rows: crate::VectorSize::Bi,
1558                ty: ctx.new_scalar(Scalar::F32),
1559                ty_span: Span::UNDEFINED,
1560            },
1561            "mat2x2h" => ast::Type::Matrix {
1562                columns: crate::VectorSize::Bi,
1563                rows: crate::VectorSize::Bi,
1564                ty: ctx.new_scalar(Scalar::F16),
1565                ty_span: Span::UNDEFINED,
1566            },
1567            "mat2x3" => {
1568                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Tri)?
1569            }
1570            "mat2x3f" => ast::Type::Matrix {
1571                columns: crate::VectorSize::Bi,
1572                rows: crate::VectorSize::Tri,
1573                ty: ctx.new_scalar(Scalar::F32),
1574                ty_span: Span::UNDEFINED,
1575            },
1576            "mat2x3h" => ast::Type::Matrix {
1577                columns: crate::VectorSize::Bi,
1578                rows: crate::VectorSize::Tri,
1579                ty: ctx.new_scalar(Scalar::F16),
1580                ty_span: Span::UNDEFINED,
1581            },
1582            "mat2x4" => {
1583                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Quad)?
1584            }
1585            "mat2x4f" => ast::Type::Matrix {
1586                columns: crate::VectorSize::Bi,
1587                rows: crate::VectorSize::Quad,
1588                ty: ctx.new_scalar(Scalar::F32),
1589                ty_span: Span::UNDEFINED,
1590            },
1591            "mat2x4h" => ast::Type::Matrix {
1592                columns: crate::VectorSize::Bi,
1593                rows: crate::VectorSize::Quad,
1594                ty: ctx.new_scalar(Scalar::F16),
1595                ty_span: Span::UNDEFINED,
1596            },
1597            "mat3x2" => {
1598                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Bi)?
1599            }
1600            "mat3x2f" => ast::Type::Matrix {
1601                columns: crate::VectorSize::Tri,
1602                rows: crate::VectorSize::Bi,
1603                ty: ctx.new_scalar(Scalar::F32),
1604                ty_span: Span::UNDEFINED,
1605            },
1606            "mat3x2h" => ast::Type::Matrix {
1607                columns: crate::VectorSize::Tri,
1608                rows: crate::VectorSize::Bi,
1609                ty: ctx.new_scalar(Scalar::F16),
1610                ty_span: Span::UNDEFINED,
1611            },
1612            "mat3x3" => {
1613                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Tri)?
1614            }
1615            "mat3x3f" => ast::Type::Matrix {
1616                columns: crate::VectorSize::Tri,
1617                rows: crate::VectorSize::Tri,
1618                ty: ctx.new_scalar(Scalar::F32),
1619                ty_span: Span::UNDEFINED,
1620            },
1621            "mat3x3h" => ast::Type::Matrix {
1622                columns: crate::VectorSize::Tri,
1623                rows: crate::VectorSize::Tri,
1624                ty: ctx.new_scalar(Scalar::F16),
1625                ty_span: Span::UNDEFINED,
1626            },
1627            "mat3x4" => {
1628                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Quad)?
1629            }
1630            "mat3x4f" => ast::Type::Matrix {
1631                columns: crate::VectorSize::Tri,
1632                rows: crate::VectorSize::Quad,
1633                ty: ctx.new_scalar(Scalar::F32),
1634                ty_span: Span::UNDEFINED,
1635            },
1636            "mat3x4h" => ast::Type::Matrix {
1637                columns: crate::VectorSize::Tri,
1638                rows: crate::VectorSize::Quad,
1639                ty: ctx.new_scalar(Scalar::F16),
1640                ty_span: Span::UNDEFINED,
1641            },
1642            "mat4x2" => {
1643                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Bi)?
1644            }
1645            "mat4x2f" => ast::Type::Matrix {
1646                columns: crate::VectorSize::Quad,
1647                rows: crate::VectorSize::Bi,
1648                ty: ctx.new_scalar(Scalar::F32),
1649                ty_span: Span::UNDEFINED,
1650            },
1651            "mat4x2h" => ast::Type::Matrix {
1652                columns: crate::VectorSize::Quad,
1653                rows: crate::VectorSize::Bi,
1654                ty: ctx.new_scalar(Scalar::F16),
1655                ty_span: Span::UNDEFINED,
1656            },
1657            "mat4x3" => {
1658                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Tri)?
1659            }
1660            "mat4x3f" => ast::Type::Matrix {
1661                columns: crate::VectorSize::Quad,
1662                rows: crate::VectorSize::Tri,
1663                ty: ctx.new_scalar(Scalar::F32),
1664                ty_span: Span::UNDEFINED,
1665            },
1666            "mat4x3h" => ast::Type::Matrix {
1667                columns: crate::VectorSize::Quad,
1668                rows: crate::VectorSize::Tri,
1669                ty: ctx.new_scalar(Scalar::F16),
1670                ty_span: Span::UNDEFINED,
1671            },
1672            "mat4x4" => {
1673                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Quad)?
1674            }
1675            "mat4x4f" => ast::Type::Matrix {
1676                columns: crate::VectorSize::Quad,
1677                rows: crate::VectorSize::Quad,
1678                ty: ctx.new_scalar(Scalar::F32),
1679                ty_span: Span::UNDEFINED,
1680            },
1681            "mat4x4h" => ast::Type::Matrix {
1682                columns: crate::VectorSize::Quad,
1683                rows: crate::VectorSize::Quad,
1684                ty: ctx.new_scalar(Scalar::F16),
1685                ty_span: Span::UNDEFINED,
1686            },
1687            "atomic" => {
1688                let scalar = lexer.next_scalar_generic()?;
1689                ast::Type::Atomic(scalar)
1690            }
1691            "ptr" => {
1692                lexer.expect_generic_paren('<')?;
1693                let (ident, span) = lexer.next_ident_with_span()?;
1694                let mut space = conv::map_address_space(ident, span)?;
1695                lexer.expect(Token::Separator(','))?;
1696                let base = self.type_decl(lexer, ctx)?;
1697                if let crate::AddressSpace::Storage { ref mut access } = space {
1698                    *access = if lexer.end_of_generic_arguments() {
1699                        let result = lexer.next_storage_access()?;
1700                        lexer.skip(Token::Separator(','));
1701                        result
1702                    } else {
1703                        crate::StorageAccess::LOAD
1704                    };
1705                }
1706                lexer.expect_generic_paren('>')?;
1707                ast::Type::Pointer { base, space }
1708            }
1709            "array" => {
1710                lexer.expect_generic_paren('<')?;
1711                let base = self.type_decl(lexer, ctx)?;
1712                let size = if lexer.end_of_generic_arguments() {
1713                    let size = self.const_generic_expression(lexer, ctx)?;
1714                    lexer.skip(Token::Separator(','));
1715                    ast::ArraySize::Constant(size)
1716                } else {
1717                    ast::ArraySize::Dynamic
1718                };
1719                lexer.expect_generic_paren('>')?;
1720
1721                ast::Type::Array { base, size }
1722            }
1723            "binding_array" => {
1724                lexer.expect_generic_paren('<')?;
1725                let base = self.type_decl(lexer, ctx)?;
1726                let size = if lexer.end_of_generic_arguments() {
1727                    let size = self.unary_expression(lexer, ctx)?;
1728                    lexer.skip(Token::Separator(','));
1729                    ast::ArraySize::Constant(size)
1730                } else {
1731                    ast::ArraySize::Dynamic
1732                };
1733                lexer.expect_generic_paren('>')?;
1734
1735                ast::Type::BindingArray { base, size }
1736            }
1737            "sampler" => ast::Type::Sampler { comparison: false },
1738            "sampler_comparison" => ast::Type::Sampler { comparison: true },
1739            "texture_1d" => {
1740                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1741                Self::check_texture_sample_type(scalar, span)?;
1742                ast::Type::Image {
1743                    dim: crate::ImageDimension::D1,
1744                    arrayed: false,
1745                    class: crate::ImageClass::Sampled {
1746                        kind: scalar.kind,
1747                        multi: false,
1748                    },
1749                }
1750            }
1751            "texture_1d_array" => {
1752                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1753                Self::check_texture_sample_type(scalar, span)?;
1754                ast::Type::Image {
1755                    dim: crate::ImageDimension::D1,
1756                    arrayed: true,
1757                    class: crate::ImageClass::Sampled {
1758                        kind: scalar.kind,
1759                        multi: false,
1760                    },
1761                }
1762            }
1763            "texture_2d" => {
1764                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1765                Self::check_texture_sample_type(scalar, span)?;
1766                ast::Type::Image {
1767                    dim: crate::ImageDimension::D2,
1768                    arrayed: false,
1769                    class: crate::ImageClass::Sampled {
1770                        kind: scalar.kind,
1771                        multi: false,
1772                    },
1773                }
1774            }
1775            "texture_2d_array" => {
1776                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1777                Self::check_texture_sample_type(scalar, span)?;
1778                ast::Type::Image {
1779                    dim: crate::ImageDimension::D2,
1780                    arrayed: true,
1781                    class: crate::ImageClass::Sampled {
1782                        kind: scalar.kind,
1783                        multi: false,
1784                    },
1785                }
1786            }
1787            "texture_3d" => {
1788                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1789                Self::check_texture_sample_type(scalar, span)?;
1790                ast::Type::Image {
1791                    dim: crate::ImageDimension::D3,
1792                    arrayed: false,
1793                    class: crate::ImageClass::Sampled {
1794                        kind: scalar.kind,
1795                        multi: false,
1796                    },
1797                }
1798            }
1799            "texture_cube" => {
1800                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1801                Self::check_texture_sample_type(scalar, span)?;
1802                ast::Type::Image {
1803                    dim: crate::ImageDimension::Cube,
1804                    arrayed: false,
1805                    class: crate::ImageClass::Sampled {
1806                        kind: scalar.kind,
1807                        multi: false,
1808                    },
1809                }
1810            }
1811            "texture_cube_array" => {
1812                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1813                Self::check_texture_sample_type(scalar, span)?;
1814                ast::Type::Image {
1815                    dim: crate::ImageDimension::Cube,
1816                    arrayed: true,
1817                    class: crate::ImageClass::Sampled {
1818                        kind: scalar.kind,
1819                        multi: false,
1820                    },
1821                }
1822            }
1823            "texture_multisampled_2d" => {
1824                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1825                Self::check_texture_sample_type(scalar, span)?;
1826                ast::Type::Image {
1827                    dim: crate::ImageDimension::D2,
1828                    arrayed: false,
1829                    class: crate::ImageClass::Sampled {
1830                        kind: scalar.kind,
1831                        multi: true,
1832                    },
1833                }
1834            }
1835            "texture_multisampled_2d_array" => {
1836                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1837                Self::check_texture_sample_type(scalar, span)?;
1838                ast::Type::Image {
1839                    dim: crate::ImageDimension::D2,
1840                    arrayed: true,
1841                    class: crate::ImageClass::Sampled {
1842                        kind: scalar.kind,
1843                        multi: true,
1844                    },
1845                }
1846            }
1847            "texture_depth_2d" => ast::Type::Image {
1848                dim: crate::ImageDimension::D2,
1849                arrayed: false,
1850                class: crate::ImageClass::Depth { multi: false },
1851            },
1852            "texture_depth_2d_array" => ast::Type::Image {
1853                dim: crate::ImageDimension::D2,
1854                arrayed: true,
1855                class: crate::ImageClass::Depth { multi: false },
1856            },
1857            "texture_depth_cube" => ast::Type::Image {
1858                dim: crate::ImageDimension::Cube,
1859                arrayed: false,
1860                class: crate::ImageClass::Depth { multi: false },
1861            },
1862            "texture_depth_cube_array" => ast::Type::Image {
1863                dim: crate::ImageDimension::Cube,
1864                arrayed: true,
1865                class: crate::ImageClass::Depth { multi: false },
1866            },
1867            "texture_depth_multisampled_2d" => ast::Type::Image {
1868                dim: crate::ImageDimension::D2,
1869                arrayed: false,
1870                class: crate::ImageClass::Depth { multi: true },
1871            },
1872            "texture_external" => ast::Type::Image {
1873                dim: crate::ImageDimension::D2,
1874                arrayed: false,
1875                class: crate::ImageClass::External,
1876            },
1877            "texture_storage_1d" => {
1878                let (format, access) = lexer.next_format_generic()?;
1879                ast::Type::Image {
1880                    dim: crate::ImageDimension::D1,
1881                    arrayed: false,
1882                    class: crate::ImageClass::Storage { format, access },
1883                }
1884            }
1885            "texture_storage_1d_array" => {
1886                let (format, access) = lexer.next_format_generic()?;
1887                ast::Type::Image {
1888                    dim: crate::ImageDimension::D1,
1889                    arrayed: true,
1890                    class: crate::ImageClass::Storage { format, access },
1891                }
1892            }
1893            "texture_storage_2d" => {
1894                let (format, access) = lexer.next_format_generic()?;
1895                ast::Type::Image {
1896                    dim: crate::ImageDimension::D2,
1897                    arrayed: false,
1898                    class: crate::ImageClass::Storage { format, access },
1899                }
1900            }
1901            "texture_storage_2d_array" => {
1902                let (format, access) = lexer.next_format_generic()?;
1903                ast::Type::Image {
1904                    dim: crate::ImageDimension::D2,
1905                    arrayed: true,
1906                    class: crate::ImageClass::Storage { format, access },
1907                }
1908            }
1909            "texture_storage_3d" => {
1910                let (format, access) = lexer.next_format_generic()?;
1911                ast::Type::Image {
1912                    dim: crate::ImageDimension::D3,
1913                    arrayed: false,
1914                    class: crate::ImageClass::Storage { format, access },
1915                }
1916            }
1917            "acceleration_structure" => {
1918                let vertex_return = lexer.next_acceleration_structure_flags()?;
1919                ast::Type::AccelerationStructure { vertex_return }
1920            }
1921            "ray_query" => {
1922                let vertex_return = lexer.next_acceleration_structure_flags()?;
1923                ast::Type::RayQuery { vertex_return }
1924            }
1925            "RayDesc" => ast::Type::RayDesc,
1926            "RayIntersection" => ast::Type::RayIntersection,
1927            _ => return Ok(None),
1928        }))
1929    }
1930
1931    fn check_texture_sample_type(scalar: Scalar, span: Span) -> Result<'static, ()> {
1932        use crate::ScalarKind::*;
1933        // Validate according to https://gpuweb.github.io/gpuweb/wgsl/#sampled-texture-type
1934        match scalar {
1935            Scalar {
1936                kind: Float | Sint | Uint,
1937                width: 4,
1938            } => Ok(()),
1939            Scalar {
1940                kind: Uint,
1941                width: 8,
1942            } => Ok(()),
1943            _ => Err(Box::new(Error::BadTextureSampleType { span, scalar })),
1944        }
1945    }
1946
1947    /// Parse type declaration of a given name.
1948    fn type_decl<'a>(
1949        &mut self,
1950        lexer: &mut Lexer<'a>,
1951        ctx: &mut ExpressionContext<'a, '_, '_>,
1952    ) -> Result<'a, Handle<ast::Type<'a>>> {
1953        self.track_recursion(|this| {
1954            this.push_rule_span(Rule::TypeDecl, lexer);
1955
1956            let (name, span) = lexer.next_ident_with_span()?;
1957
1958            let ty = match this.type_decl_impl(lexer, name, span, ctx)? {
1959                Some(ty) => ty,
1960                None => {
1961                    ctx.unresolved.insert(ast::Dependency {
1962                        ident: name,
1963                        usage: span,
1964                    });
1965                    ast::Type::User(ast::Ident { name, span })
1966                }
1967            };
1968
1969            this.pop_rule_span(lexer);
1970
1971            let handle = ctx.types.append(ty, Span::UNDEFINED);
1972            Ok(handle)
1973        })
1974    }
1975
1976    fn assignment_op_and_rhs<'a>(
1977        &mut self,
1978        lexer: &mut Lexer<'a>,
1979        ctx: &mut ExpressionContext<'a, '_, '_>,
1980        block: &mut ast::Block<'a>,
1981        target: Handle<ast::Expression<'a>>,
1982        span_start: usize,
1983    ) -> Result<'a, ()> {
1984        use crate::BinaryOperator as Bo;
1985
1986        let op = lexer.next();
1987        let (op, value) = match op {
1988            (Token::Operation('='), _) => {
1989                let value = self.general_expression(lexer, ctx)?;
1990                (None, value)
1991            }
1992            (Token::AssignmentOperation(c), _) => {
1993                let op = match c {
1994                    '<' => Bo::ShiftLeft,
1995                    '>' => Bo::ShiftRight,
1996                    '+' => Bo::Add,
1997                    '-' => Bo::Subtract,
1998                    '*' => Bo::Multiply,
1999                    '/' => Bo::Divide,
2000                    '%' => Bo::Modulo,
2001                    '&' => Bo::And,
2002                    '|' => Bo::InclusiveOr,
2003                    '^' => Bo::ExclusiveOr,
2004                    // Note: `consume_token` shouldn't produce any other assignment ops
2005                    _ => unreachable!(),
2006                };
2007
2008                let value = self.general_expression(lexer, ctx)?;
2009                (Some(op), value)
2010            }
2011            token @ (Token::IncrementOperation | Token::DecrementOperation, _) => {
2012                let op = match token.0 {
2013                    Token::IncrementOperation => ast::StatementKind::Increment,
2014                    Token::DecrementOperation => ast::StatementKind::Decrement,
2015                    _ => unreachable!(),
2016                };
2017
2018                let span = lexer.span_from(span_start);
2019                block.stmts.push(ast::Statement {
2020                    kind: op(target),
2021                    span,
2022                });
2023                return Ok(());
2024            }
2025            _ => return Err(Box::new(Error::Unexpected(op.1, ExpectedToken::Assignment))),
2026        };
2027
2028        let span = lexer.span_from(span_start);
2029        block.stmts.push(ast::Statement {
2030            kind: ast::StatementKind::Assign { target, op, value },
2031            span,
2032        });
2033        Ok(())
2034    }
2035
2036    /// Parse an assignment statement (will also parse increment and decrement statements)
2037    fn assignment_statement<'a>(
2038        &mut self,
2039        lexer: &mut Lexer<'a>,
2040        ctx: &mut ExpressionContext<'a, '_, '_>,
2041        block: &mut ast::Block<'a>,
2042    ) -> Result<'a, ()> {
2043        let span_start = lexer.start_byte_offset();
2044        let target = self.lhs_expression(lexer, ctx)?;
2045        self.assignment_op_and_rhs(lexer, ctx, block, target, span_start)
2046    }
2047
2048    /// Parse a function call statement.
2049    /// Expects `ident` to be consumed (not in the lexer).
2050    fn function_statement<'a>(
2051        &mut self,
2052        lexer: &mut Lexer<'a>,
2053        ident: &'a str,
2054        ident_span: Span,
2055        span_start: usize,
2056        context: &mut ExpressionContext<'a, '_, '_>,
2057        block: &mut ast::Block<'a>,
2058    ) -> Result<'a, ()> {
2059        self.push_rule_span(Rule::SingularExpr, lexer);
2060
2061        context.unresolved.insert(ast::Dependency {
2062            ident,
2063            usage: ident_span,
2064        });
2065        let arguments = self.arguments(lexer, context)?;
2066        let span = lexer.span_from(span_start);
2067
2068        block.stmts.push(ast::Statement {
2069            kind: ast::StatementKind::Call {
2070                function: ast::Ident {
2071                    name: ident,
2072                    span: ident_span,
2073                },
2074                arguments,
2075            },
2076            span,
2077        });
2078
2079        self.pop_rule_span(lexer);
2080
2081        Ok(())
2082    }
2083
2084    fn function_call_or_assignment_statement<'a>(
2085        &mut self,
2086        lexer: &mut Lexer<'a>,
2087        context: &mut ExpressionContext<'a, '_, '_>,
2088        block: &mut ast::Block<'a>,
2089    ) -> Result<'a, ()> {
2090        let span_start = lexer.start_byte_offset();
2091        match lexer.peek() {
2092            (Token::Word(name), span) => {
2093                // A little hack for 2 token lookahead.
2094                let cloned = lexer.clone();
2095                let _ = lexer.next();
2096                match lexer.peek() {
2097                    (Token::Paren('('), _) => {
2098                        self.function_statement(lexer, name, span, span_start, context, block)
2099                    }
2100                    _ => {
2101                        *lexer = cloned;
2102                        self.assignment_statement(lexer, context, block)
2103                    }
2104                }
2105            }
2106            _ => self.assignment_statement(lexer, context, block),
2107        }
2108    }
2109
2110    fn statement<'a>(
2111        &mut self,
2112        lexer: &mut Lexer<'a>,
2113        ctx: &mut ExpressionContext<'a, '_, '_>,
2114        block: &mut ast::Block<'a>,
2115        brace_nesting_level: u8,
2116    ) -> Result<'a, ()> {
2117        self.track_recursion(|this| {
2118            this.push_rule_span(Rule::Statement, lexer);
2119            match lexer.peek() {
2120                (Token::Separator(';'), _) => {
2121                    let _ = lexer.next();
2122                    this.pop_rule_span(lexer);
2123                }
2124                (Token::Paren('{') | Token::Attribute, _) => {
2125                    let (inner, span) = this.block(lexer, ctx, brace_nesting_level)?;
2126                    block.stmts.push(ast::Statement {
2127                        kind: ast::StatementKind::Block(inner),
2128                        span,
2129                    });
2130                    this.pop_rule_span(lexer);
2131                }
2132                (Token::Word(word), _) => {
2133                    let kind = match word {
2134                        "_" => {
2135                            let _ = lexer.next();
2136                            lexer.expect(Token::Operation('='))?;
2137                            let expr = this.general_expression(lexer, ctx)?;
2138                            lexer.expect(Token::Separator(';'))?;
2139
2140                            ast::StatementKind::Phony(expr)
2141                        }
2142                        "let" => {
2143                            let _ = lexer.next();
2144                            let name = lexer.next_ident()?;
2145
2146                            let given_ty = if lexer.skip(Token::Separator(':')) {
2147                                let ty = this.type_decl(lexer, ctx)?;
2148                                Some(ty)
2149                            } else {
2150                                None
2151                            };
2152                            lexer.expect(Token::Operation('='))?;
2153                            let expr_id = this.general_expression(lexer, ctx)?;
2154                            lexer.expect(Token::Separator(';'))?;
2155
2156                            let handle = ctx.declare_local(name)?;
2157                            ast::StatementKind::LocalDecl(ast::LocalDecl::Let(ast::Let {
2158                                name,
2159                                ty: given_ty,
2160                                init: expr_id,
2161                                handle,
2162                            }))
2163                        }
2164                        "const" => {
2165                            let _ = lexer.next();
2166                            let name = lexer.next_ident()?;
2167
2168                            let given_ty = if lexer.skip(Token::Separator(':')) {
2169                                let ty = this.type_decl(lexer, ctx)?;
2170                                Some(ty)
2171                            } else {
2172                                None
2173                            };
2174                            lexer.expect(Token::Operation('='))?;
2175                            let expr_id = this.general_expression(lexer, ctx)?;
2176                            lexer.expect(Token::Separator(';'))?;
2177
2178                            let handle = ctx.declare_local(name)?;
2179                            ast::StatementKind::LocalDecl(ast::LocalDecl::Const(ast::LocalConst {
2180                                name,
2181                                ty: given_ty,
2182                                init: expr_id,
2183                                handle,
2184                            }))
2185                        }
2186                        "var" => {
2187                            let _ = lexer.next();
2188
2189                            let name = lexer.next_ident()?;
2190                            let ty = if lexer.skip(Token::Separator(':')) {
2191                                let ty = this.type_decl(lexer, ctx)?;
2192                                Some(ty)
2193                            } else {
2194                                None
2195                            };
2196
2197                            let init = if lexer.skip(Token::Operation('=')) {
2198                                let init = this.general_expression(lexer, ctx)?;
2199                                Some(init)
2200                            } else {
2201                                None
2202                            };
2203
2204                            lexer.expect(Token::Separator(';'))?;
2205
2206                            let handle = ctx.declare_local(name)?;
2207                            ast::StatementKind::LocalDecl(ast::LocalDecl::Var(ast::LocalVariable {
2208                                name,
2209                                ty,
2210                                init,
2211                                handle,
2212                            }))
2213                        }
2214                        "return" => {
2215                            let _ = lexer.next();
2216                            let value = if lexer.peek().0 != Token::Separator(';') {
2217                                let handle = this.general_expression(lexer, ctx)?;
2218                                Some(handle)
2219                            } else {
2220                                None
2221                            };
2222                            lexer.expect(Token::Separator(';'))?;
2223                            ast::StatementKind::Return { value }
2224                        }
2225                        "if" => {
2226                            let _ = lexer.next();
2227                            let condition = this.general_expression(lexer, ctx)?;
2228
2229                            let accept = this.block(lexer, ctx, brace_nesting_level)?.0;
2230
2231                            let mut elsif_stack = Vec::new();
2232                            let mut elseif_span_start = lexer.start_byte_offset();
2233                            let mut reject = loop {
2234                                if !lexer.skip(Token::Word("else")) {
2235                                    break ast::Block::default();
2236                                }
2237
2238                                if !lexer.skip(Token::Word("if")) {
2239                                    // ... else { ... }
2240                                    break this.block(lexer, ctx, brace_nesting_level)?.0;
2241                                }
2242
2243                                // ... else if (...) { ... }
2244                                let other_condition = this.general_expression(lexer, ctx)?;
2245                                let other_block = this.block(lexer, ctx, brace_nesting_level)?;
2246                                elsif_stack.push((elseif_span_start, other_condition, other_block));
2247                                elseif_span_start = lexer.start_byte_offset();
2248                            };
2249
2250                            // reverse-fold the else-if blocks
2251                            //Note: we may consider uplifting this to the IR
2252                            for (other_span_start, other_cond, other_block) in
2253                                elsif_stack.into_iter().rev()
2254                            {
2255                                let sub_stmt = ast::StatementKind::If {
2256                                    condition: other_cond,
2257                                    accept: other_block.0,
2258                                    reject,
2259                                };
2260                                reject = ast::Block::default();
2261                                let span = lexer.span_from(other_span_start);
2262                                reject.stmts.push(ast::Statement {
2263                                    kind: sub_stmt,
2264                                    span,
2265                                })
2266                            }
2267
2268                            ast::StatementKind::If {
2269                                condition,
2270                                accept,
2271                                reject,
2272                            }
2273                        }
2274                        "switch" => {
2275                            let _ = lexer.next();
2276                            let selector = this.general_expression(lexer, ctx)?;
2277                            let brace_span = lexer.expect_span(Token::Paren('{'))?;
2278                            let brace_nesting_level =
2279                                Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2280                            let mut cases = Vec::new();
2281
2282                            loop {
2283                                // cases + default
2284                                match lexer.next() {
2285                                    (Token::Word("case"), _) => {
2286                                        // parse a list of values
2287                                        let value = loop {
2288                                            let value = this.switch_value(lexer, ctx)?;
2289                                            if lexer.skip(Token::Separator(',')) {
2290                                                if lexer.skip(Token::Separator(':')) {
2291                                                    break value;
2292                                                }
2293                                            } else {
2294                                                lexer.skip(Token::Separator(':'));
2295                                                break value;
2296                                            }
2297                                            cases.push(ast::SwitchCase {
2298                                                value,
2299                                                body: ast::Block::default(),
2300                                                fall_through: true,
2301                                            });
2302                                        };
2303
2304                                        let body = this.block(lexer, ctx, brace_nesting_level)?.0;
2305
2306                                        cases.push(ast::SwitchCase {
2307                                            value,
2308                                            body,
2309                                            fall_through: false,
2310                                        });
2311                                    }
2312                                    (Token::Word("default"), _) => {
2313                                        lexer.skip(Token::Separator(':'));
2314                                        let body = this.block(lexer, ctx, brace_nesting_level)?.0;
2315                                        cases.push(ast::SwitchCase {
2316                                            value: ast::SwitchValue::Default,
2317                                            body,
2318                                            fall_through: false,
2319                                        });
2320                                    }
2321                                    (Token::Paren('}'), _) => break,
2322                                    (_, span) => {
2323                                        return Err(Box::new(Error::Unexpected(
2324                                            span,
2325                                            ExpectedToken::SwitchItem,
2326                                        )))
2327                                    }
2328                                }
2329                            }
2330
2331                            ast::StatementKind::Switch { selector, cases }
2332                        }
2333                        "loop" => this.r#loop(lexer, ctx, brace_nesting_level)?,
2334                        "while" => {
2335                            let _ = lexer.next();
2336                            let mut body = ast::Block::default();
2337
2338                            let (condition, span) =
2339                                lexer.capture_span(|lexer| this.general_expression(lexer, ctx))?;
2340                            let mut reject = ast::Block::default();
2341                            reject.stmts.push(ast::Statement {
2342                                kind: ast::StatementKind::Break,
2343                                span,
2344                            });
2345
2346                            body.stmts.push(ast::Statement {
2347                                kind: ast::StatementKind::If {
2348                                    condition,
2349                                    accept: ast::Block::default(),
2350                                    reject,
2351                                },
2352                                span,
2353                            });
2354
2355                            let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
2356                            body.stmts.push(ast::Statement {
2357                                kind: ast::StatementKind::Block(block),
2358                                span,
2359                            });
2360
2361                            ast::StatementKind::Loop {
2362                                body,
2363                                continuing: ast::Block::default(),
2364                                break_if: None,
2365                            }
2366                        }
2367                        "for" => {
2368                            let _ = lexer.next();
2369                            lexer.expect(Token::Paren('('))?;
2370
2371                            ctx.local_table.push_scope();
2372
2373                            if !lexer.skip(Token::Separator(';')) {
2374                                let num_statements = block.stmts.len();
2375                                let (_, span) = {
2376                                    let ctx = &mut *ctx;
2377                                    let block = &mut *block;
2378                                    lexer.capture_span(|lexer| {
2379                                        this.statement(lexer, ctx, block, brace_nesting_level)
2380                                    })?
2381                                };
2382
2383                                if block.stmts.len() != num_statements {
2384                                    match block.stmts.last().unwrap().kind {
2385                                        ast::StatementKind::Call { .. }
2386                                        | ast::StatementKind::Assign { .. }
2387                                        | ast::StatementKind::LocalDecl(_) => {}
2388                                        _ => {
2389                                            return Err(Box::new(Error::InvalidForInitializer(
2390                                                span,
2391                                            )))
2392                                        }
2393                                    }
2394                                }
2395                            };
2396
2397                            let mut body = ast::Block::default();
2398                            if !lexer.skip(Token::Separator(';')) {
2399                                let (condition, span) =
2400                                    lexer.capture_span(|lexer| -> Result<'_, _> {
2401                                        let condition = this.general_expression(lexer, ctx)?;
2402                                        lexer.expect(Token::Separator(';'))?;
2403                                        Ok(condition)
2404                                    })?;
2405                                let mut reject = ast::Block::default();
2406                                reject.stmts.push(ast::Statement {
2407                                    kind: ast::StatementKind::Break,
2408                                    span,
2409                                });
2410                                body.stmts.push(ast::Statement {
2411                                    kind: ast::StatementKind::If {
2412                                        condition,
2413                                        accept: ast::Block::default(),
2414                                        reject,
2415                                    },
2416                                    span,
2417                                });
2418                            };
2419
2420                            let mut continuing = ast::Block::default();
2421                            if !lexer.skip(Token::Paren(')')) {
2422                                this.function_call_or_assignment_statement(
2423                                    lexer,
2424                                    ctx,
2425                                    &mut continuing,
2426                                )?;
2427                                lexer.expect(Token::Paren(')'))?;
2428                            }
2429
2430                            let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
2431                            body.stmts.push(ast::Statement {
2432                                kind: ast::StatementKind::Block(block),
2433                                span,
2434                            });
2435
2436                            ctx.local_table.pop_scope();
2437
2438                            ast::StatementKind::Loop {
2439                                body,
2440                                continuing,
2441                                break_if: None,
2442                            }
2443                        }
2444                        "break" => {
2445                            let (_, span) = lexer.next();
2446                            // Check if the next token is an `if`, this indicates
2447                            // that the user tried to type out a `break if` which
2448                            // is illegal in this position.
2449                            let (peeked_token, peeked_span) = lexer.peek();
2450                            if let Token::Word("if") = peeked_token {
2451                                let span = span.until(&peeked_span);
2452                                return Err(Box::new(Error::InvalidBreakIf(span)));
2453                            }
2454                            lexer.expect(Token::Separator(';'))?;
2455                            ast::StatementKind::Break
2456                        }
2457                        "continue" => {
2458                            let _ = lexer.next();
2459                            lexer.expect(Token::Separator(';'))?;
2460                            ast::StatementKind::Continue
2461                        }
2462                        "discard" => {
2463                            let _ = lexer.next();
2464                            lexer.expect(Token::Separator(';'))?;
2465                            ast::StatementKind::Kill
2466                        }
2467                        // https://www.w3.org/TR/WGSL/#const-assert-statement
2468                        "const_assert" => {
2469                            let _ = lexer.next();
2470                            // parentheses are optional
2471                            let paren = lexer.skip(Token::Paren('('));
2472
2473                            let condition = this.general_expression(lexer, ctx)?;
2474
2475                            if paren {
2476                                lexer.expect(Token::Paren(')'))?;
2477                            }
2478                            lexer.expect(Token::Separator(';'))?;
2479                            ast::StatementKind::ConstAssert(condition)
2480                        }
2481                        // assignment or a function call
2482                        _ => {
2483                            this.function_call_or_assignment_statement(lexer, ctx, block)?;
2484                            lexer.expect(Token::Separator(';'))?;
2485                            this.pop_rule_span(lexer);
2486                            return Ok(());
2487                        }
2488                    };
2489
2490                    let span = this.pop_rule_span(lexer);
2491                    block.stmts.push(ast::Statement { kind, span });
2492                }
2493                _ => {
2494                    this.assignment_statement(lexer, ctx, block)?;
2495                    lexer.expect(Token::Separator(';'))?;
2496                    this.pop_rule_span(lexer);
2497                }
2498            }
2499            Ok(())
2500        })
2501    }
2502
2503    fn r#loop<'a>(
2504        &mut self,
2505        lexer: &mut Lexer<'a>,
2506        ctx: &mut ExpressionContext<'a, '_, '_>,
2507        brace_nesting_level: u8,
2508    ) -> Result<'a, ast::StatementKind<'a>> {
2509        let _ = lexer.next();
2510        let mut body = ast::Block::default();
2511        let mut continuing = ast::Block::default();
2512        let mut break_if = None;
2513
2514        let brace_span = lexer.expect_span(Token::Paren('{'))?;
2515        let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2516
2517        ctx.local_table.push_scope();
2518
2519        loop {
2520            if lexer.skip(Token::Word("continuing")) {
2521                // Branch for the `continuing` block, this must be
2522                // the last thing in the loop body
2523
2524                // Expect a opening brace to start the continuing block
2525                let brace_span = lexer.expect_span(Token::Paren('{'))?;
2526                let brace_nesting_level =
2527                    Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2528                loop {
2529                    if lexer.skip(Token::Word("break")) {
2530                        // Branch for the `break if` statement, this statement
2531                        // has the form `break if <expr>;` and must be the last
2532                        // statement in a continuing block
2533
2534                        // The break must be followed by an `if` to form
2535                        // the break if
2536                        lexer.expect(Token::Word("if"))?;
2537
2538                        let condition = self.general_expression(lexer, ctx)?;
2539                        // Set the condition of the break if to the newly parsed
2540                        // expression
2541                        break_if = Some(condition);
2542
2543                        // Expect a semicolon to close the statement
2544                        lexer.expect(Token::Separator(';'))?;
2545                        // Expect a closing brace to close the continuing block,
2546                        // since the break if must be the last statement
2547                        lexer.expect(Token::Paren('}'))?;
2548                        // Stop parsing the continuing block
2549                        break;
2550                    } else if lexer.skip(Token::Paren('}')) {
2551                        // If we encounter a closing brace it means we have reached
2552                        // the end of the continuing block and should stop processing
2553                        break;
2554                    } else {
2555                        // Otherwise try to parse a statement
2556                        self.statement(lexer, ctx, &mut continuing, brace_nesting_level)?;
2557                    }
2558                }
2559                // Since the continuing block must be the last part of the loop body,
2560                // we expect to see a closing brace to end the loop body
2561                lexer.expect(Token::Paren('}'))?;
2562                break;
2563            }
2564            if lexer.skip(Token::Paren('}')) {
2565                // If we encounter a closing brace it means we have reached
2566                // the end of the loop body and should stop processing
2567                break;
2568            }
2569            // Otherwise try to parse a statement
2570            self.statement(lexer, ctx, &mut body, brace_nesting_level)?;
2571        }
2572
2573        ctx.local_table.pop_scope();
2574
2575        Ok(ast::StatementKind::Loop {
2576            body,
2577            continuing,
2578            break_if,
2579        })
2580    }
2581
2582    /// compound_statement
2583    fn block<'a>(
2584        &mut self,
2585        lexer: &mut Lexer<'a>,
2586        ctx: &mut ExpressionContext<'a, '_, '_>,
2587        brace_nesting_level: u8,
2588    ) -> Result<'a, (ast::Block<'a>, Span)> {
2589        self.push_rule_span(Rule::Block, lexer);
2590
2591        ctx.local_table.push_scope();
2592
2593        let mut diagnostic_filters = DiagnosticFilterMap::new();
2594
2595        self.push_rule_span(Rule::Attribute, lexer);
2596        while lexer.skip(Token::Attribute) {
2597            let (name, name_span) = lexer.next_ident_with_span()?;
2598            if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
2599                let filter = self.diagnostic_filter(lexer)?;
2600                let span = self.peek_rule_span(lexer);
2601                diagnostic_filters
2602                    .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
2603                    .map_err(|e| Box::new(e.into()))?;
2604            } else {
2605                return Err(Box::new(Error::Unexpected(
2606                    name_span,
2607                    ExpectedToken::DiagnosticAttribute,
2608                )));
2609            }
2610        }
2611        self.pop_rule_span(lexer);
2612
2613        if !diagnostic_filters.is_empty() {
2614            return Err(Box::new(
2615                Error::DiagnosticAttributeNotYetImplementedAtParseSite {
2616                    site_name_plural: "compound statements",
2617                    spans: diagnostic_filters.spans().collect(),
2618                },
2619            ));
2620        }
2621
2622        let brace_span = lexer.expect_span(Token::Paren('{'))?;
2623        let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2624        let mut block = ast::Block::default();
2625        while !lexer.skip(Token::Paren('}')) {
2626            self.statement(lexer, ctx, &mut block, brace_nesting_level)?;
2627        }
2628
2629        ctx.local_table.pop_scope();
2630
2631        let span = self.pop_rule_span(lexer);
2632        Ok((block, span))
2633    }
2634
2635    fn varying_binding<'a>(
2636        &mut self,
2637        lexer: &mut Lexer<'a>,
2638        ctx: &mut ExpressionContext<'a, '_, '_>,
2639    ) -> Result<'a, Option<ast::Binding<'a>>> {
2640        let mut bind_parser = BindingParser::default();
2641        self.push_rule_span(Rule::Attribute, lexer);
2642
2643        while lexer.skip(Token::Attribute) {
2644            let (word, span) = lexer.next_ident_with_span()?;
2645            bind_parser.parse(self, lexer, word, span, ctx)?;
2646        }
2647
2648        let span = self.pop_rule_span(lexer);
2649        bind_parser.finish(span)
2650    }
2651
2652    fn function_decl<'a>(
2653        &mut self,
2654        lexer: &mut Lexer<'a>,
2655        diagnostic_filter_leaf: Option<Handle<DiagnosticFilterNode>>,
2656        must_use: Option<Span>,
2657        out: &mut ast::TranslationUnit<'a>,
2658        dependencies: &mut FastIndexSet<ast::Dependency<'a>>,
2659    ) -> Result<'a, ast::Function<'a>> {
2660        self.push_rule_span(Rule::FunctionDecl, lexer);
2661        // read function name
2662        let fun_name = lexer.next_ident()?;
2663
2664        let mut locals = Arena::new();
2665
2666        let mut ctx = ExpressionContext {
2667            expressions: &mut out.expressions,
2668            local_table: &mut SymbolTable::default(),
2669            locals: &mut locals,
2670            types: &mut out.types,
2671            unresolved: dependencies,
2672        };
2673
2674        // start a scope that contains arguments as well as the function body
2675        ctx.local_table.push_scope();
2676
2677        // read parameter list
2678        let mut arguments = Vec::new();
2679        lexer.expect(Token::Paren('('))?;
2680        let mut ready = true;
2681        while !lexer.skip(Token::Paren(')')) {
2682            if !ready {
2683                return Err(Box::new(Error::Unexpected(
2684                    lexer.next().1,
2685                    ExpectedToken::Token(Token::Separator(',')),
2686                )));
2687            }
2688            let binding = self.varying_binding(lexer, &mut ctx)?;
2689
2690            let param_name = lexer.next_ident()?;
2691
2692            lexer.expect(Token::Separator(':'))?;
2693            let param_type = self.type_decl(lexer, &mut ctx)?;
2694
2695            let handle = ctx.declare_local(param_name)?;
2696            arguments.push(ast::FunctionArgument {
2697                name: param_name,
2698                ty: param_type,
2699                binding,
2700                handle,
2701            });
2702            ready = lexer.skip(Token::Separator(','));
2703        }
2704        // read return type
2705        let result = if lexer.skip(Token::Arrow) {
2706            let binding = self.varying_binding(lexer, &mut ctx)?;
2707            let ty = self.type_decl(lexer, &mut ctx)?;
2708            let must_use = must_use.is_some();
2709            Some(ast::FunctionResult {
2710                ty,
2711                binding,
2712                must_use,
2713            })
2714        } else if let Some(must_use) = must_use {
2715            return Err(Box::new(Error::FunctionMustUseReturnsVoid(
2716                must_use,
2717                self.peek_rule_span(lexer),
2718            )));
2719        } else {
2720            None
2721        };
2722
2723        // do not use `self.block` here, since we must not push a new scope
2724        lexer.expect(Token::Paren('{'))?;
2725        let brace_nesting_level = 1;
2726        let mut body = ast::Block::default();
2727        while !lexer.skip(Token::Paren('}')) {
2728            self.statement(lexer, &mut ctx, &mut body, brace_nesting_level)?;
2729        }
2730
2731        ctx.local_table.pop_scope();
2732
2733        let fun = ast::Function {
2734            entry_point: None,
2735            name: fun_name,
2736            arguments,
2737            result,
2738            body,
2739            diagnostic_filter_leaf,
2740            doc_comments: Vec::new(),
2741        };
2742
2743        // done
2744        self.pop_rule_span(lexer);
2745
2746        Ok(fun)
2747    }
2748
2749    fn directive_ident_list<'a>(
2750        &self,
2751        lexer: &mut Lexer<'a>,
2752        handler: impl FnMut(&'a str, Span) -> Result<'a, ()>,
2753    ) -> Result<'a, ()> {
2754        let mut handler = handler;
2755        'next_arg: loop {
2756            let (ident, span) = lexer.next_ident_with_span()?;
2757            handler(ident, span)?;
2758
2759            let expected_token = match lexer.peek().0 {
2760                Token::Separator(',') => {
2761                    let _ = lexer.next();
2762                    if matches!(lexer.peek().0, Token::Word(..)) {
2763                        continue 'next_arg;
2764                    }
2765                    ExpectedToken::AfterIdentListComma
2766                }
2767                _ => ExpectedToken::AfterIdentListArg,
2768            };
2769
2770            if !matches!(lexer.next().0, Token::Separator(';')) {
2771                return Err(Box::new(Error::Unexpected(span, expected_token)));
2772            }
2773
2774            break Ok(());
2775        }
2776    }
2777
2778    fn global_decl<'a>(
2779        &mut self,
2780        lexer: &mut Lexer<'a>,
2781        out: &mut ast::TranslationUnit<'a>,
2782    ) -> Result<'a, ()> {
2783        let doc_comments = lexer.accumulate_doc_comments();
2784
2785        // read attributes
2786        let mut binding = None;
2787        let mut stage = ParsedAttribute::default();
2788        let mut compute_span = Span::new(0, 0);
2789        let mut workgroup_size = ParsedAttribute::default();
2790        let mut early_depth_test = ParsedAttribute::default();
2791        let (mut bind_index, mut bind_group) =
2792            (ParsedAttribute::default(), ParsedAttribute::default());
2793        let mut id = ParsedAttribute::default();
2794
2795        let mut must_use: ParsedAttribute<Span> = ParsedAttribute::default();
2796
2797        let mut dependencies = FastIndexSet::default();
2798        let mut ctx = ExpressionContext {
2799            expressions: &mut out.expressions,
2800            local_table: &mut SymbolTable::default(),
2801            locals: &mut Arena::new(),
2802            types: &mut out.types,
2803            unresolved: &mut dependencies,
2804        };
2805        let mut diagnostic_filters = DiagnosticFilterMap::new();
2806        let ensure_no_diag_attrs = |on_what, filters: DiagnosticFilterMap| -> Result<()> {
2807            if filters.is_empty() {
2808                Ok(())
2809            } else {
2810                Err(Box::new(Error::DiagnosticAttributeNotSupported {
2811                    on_what,
2812                    spans: filters.spans().collect(),
2813                }))
2814            }
2815        };
2816
2817        self.push_rule_span(Rule::Attribute, lexer);
2818        while lexer.skip(Token::Attribute) {
2819            let (name, name_span) = lexer.next_ident_with_span()?;
2820            if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
2821                let filter = self.diagnostic_filter(lexer)?;
2822                let span = self.peek_rule_span(lexer);
2823                diagnostic_filters
2824                    .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
2825                    .map_err(|e| Box::new(e.into()))?;
2826                continue;
2827            }
2828            match name {
2829                "binding" => {
2830                    lexer.expect(Token::Paren('('))?;
2831                    bind_index.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2832                    lexer.expect(Token::Paren(')'))?;
2833                }
2834                "group" => {
2835                    lexer.expect(Token::Paren('('))?;
2836                    bind_group.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2837                    lexer.expect(Token::Paren(')'))?;
2838                }
2839                "id" => {
2840                    lexer.expect(Token::Paren('('))?;
2841                    id.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2842                    lexer.expect(Token::Paren(')'))?;
2843                }
2844                "vertex" => {
2845                    stage.set(ShaderStage::Vertex, name_span)?;
2846                }
2847                "fragment" => {
2848                    stage.set(ShaderStage::Fragment, name_span)?;
2849                }
2850                "compute" => {
2851                    stage.set(ShaderStage::Compute, name_span)?;
2852                    compute_span = name_span;
2853                }
2854                "workgroup_size" => {
2855                    lexer.expect(Token::Paren('('))?;
2856                    let mut new_workgroup_size = [None; 3];
2857                    for (i, size) in new_workgroup_size.iter_mut().enumerate() {
2858                        *size = Some(self.general_expression(lexer, &mut ctx)?);
2859                        match lexer.next() {
2860                            (Token::Paren(')'), _) => break,
2861                            (Token::Separator(','), _) if i != 2 => (),
2862                            other => {
2863                                return Err(Box::new(Error::Unexpected(
2864                                    other.1,
2865                                    ExpectedToken::WorkgroupSizeSeparator,
2866                                )))
2867                            }
2868                        }
2869                    }
2870                    workgroup_size.set(new_workgroup_size, name_span)?;
2871                }
2872                "early_depth_test" => {
2873                    lexer.expect(Token::Paren('('))?;
2874                    let (ident, ident_span) = lexer.next_ident_with_span()?;
2875                    let value = if ident == "force" {
2876                        crate::EarlyDepthTest::Force
2877                    } else {
2878                        crate::EarlyDepthTest::Allow {
2879                            conservative: conv::map_conservative_depth(ident, ident_span)?,
2880                        }
2881                    };
2882                    lexer.expect(Token::Paren(')'))?;
2883                    early_depth_test.set(value, name_span)?;
2884                }
2885                "must_use" => {
2886                    must_use.set(name_span, name_span)?;
2887                }
2888                _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
2889            }
2890        }
2891
2892        let attrib_span = self.pop_rule_span(lexer);
2893        match (bind_group.value, bind_index.value) {
2894            (Some(group), Some(index)) => {
2895                binding = Some(ast::ResourceBinding {
2896                    group,
2897                    binding: index,
2898                });
2899            }
2900            (Some(_), None) => {
2901                return Err(Box::new(Error::MissingAttribute("binding", attrib_span)))
2902            }
2903            (None, Some(_)) => return Err(Box::new(Error::MissingAttribute("group", attrib_span))),
2904            (None, None) => {}
2905        }
2906
2907        // read item
2908        let start = lexer.start_byte_offset();
2909        let kind = match lexer.next() {
2910            (Token::Separator(';'), _) => {
2911                ensure_no_diag_attrs(
2912                    DiagnosticAttributeNotSupportedPosition::SemicolonInModulePosition,
2913                    diagnostic_filters,
2914                )?;
2915                None
2916            }
2917            (Token::Word(word), directive_span) if DirectiveKind::from_ident(word).is_some() => {
2918                return Err(Box::new(Error::DirectiveAfterFirstGlobalDecl {
2919                    directive_span,
2920                }));
2921            }
2922            (Token::Word("struct"), _) => {
2923                ensure_no_diag_attrs("`struct`s".into(), diagnostic_filters)?;
2924
2925                let name = lexer.next_ident()?;
2926
2927                let members = self.struct_body(lexer, &mut ctx)?;
2928
2929                Some(ast::GlobalDeclKind::Struct(ast::Struct {
2930                    name,
2931                    members,
2932                    doc_comments,
2933                }))
2934            }
2935            (Token::Word("alias"), _) => {
2936                ensure_no_diag_attrs("`alias`es".into(), diagnostic_filters)?;
2937
2938                let name = lexer.next_ident()?;
2939
2940                lexer.expect(Token::Operation('='))?;
2941                let ty = self.type_decl(lexer, &mut ctx)?;
2942                lexer.expect(Token::Separator(';'))?;
2943                Some(ast::GlobalDeclKind::Type(ast::TypeAlias { name, ty }))
2944            }
2945            (Token::Word("const"), _) => {
2946                ensure_no_diag_attrs("`const`s".into(), diagnostic_filters)?;
2947
2948                let name = lexer.next_ident()?;
2949
2950                let ty = if lexer.skip(Token::Separator(':')) {
2951                    let ty = self.type_decl(lexer, &mut ctx)?;
2952                    Some(ty)
2953                } else {
2954                    None
2955                };
2956
2957                lexer.expect(Token::Operation('='))?;
2958                let init = self.general_expression(lexer, &mut ctx)?;
2959                lexer.expect(Token::Separator(';'))?;
2960
2961                Some(ast::GlobalDeclKind::Const(ast::Const {
2962                    name,
2963                    ty,
2964                    init,
2965                    doc_comments,
2966                }))
2967            }
2968            (Token::Word("override"), _) => {
2969                ensure_no_diag_attrs("`override`s".into(), diagnostic_filters)?;
2970
2971                let name = lexer.next_ident()?;
2972
2973                let ty = if lexer.skip(Token::Separator(':')) {
2974                    Some(self.type_decl(lexer, &mut ctx)?)
2975                } else {
2976                    None
2977                };
2978
2979                let init = if lexer.skip(Token::Operation('=')) {
2980                    Some(self.general_expression(lexer, &mut ctx)?)
2981                } else {
2982                    None
2983                };
2984
2985                lexer.expect(Token::Separator(';'))?;
2986
2987                Some(ast::GlobalDeclKind::Override(ast::Override {
2988                    name,
2989                    id: id.value,
2990                    ty,
2991                    init,
2992                }))
2993            }
2994            (Token::Word("var"), _) => {
2995                ensure_no_diag_attrs("`var`s".into(), diagnostic_filters)?;
2996
2997                let mut var = self.variable_decl(lexer, &mut ctx)?;
2998                var.binding = binding.take();
2999                var.doc_comments = doc_comments;
3000                Some(ast::GlobalDeclKind::Var(var))
3001            }
3002            (Token::Word("fn"), _) => {
3003                let diagnostic_filter_leaf = Self::write_diagnostic_filters(
3004                    &mut out.diagnostic_filters,
3005                    diagnostic_filters,
3006                    out.diagnostic_filter_leaf,
3007                );
3008
3009                let function = self.function_decl(
3010                    lexer,
3011                    diagnostic_filter_leaf,
3012                    must_use.value,
3013                    out,
3014                    &mut dependencies,
3015                )?;
3016                Some(ast::GlobalDeclKind::Fn(ast::Function {
3017                    entry_point: if let Some(stage) = stage.value {
3018                        if stage == ShaderStage::Compute && workgroup_size.value.is_none() {
3019                            return Err(Box::new(Error::MissingWorkgroupSize(compute_span)));
3020                        }
3021                        Some(ast::EntryPoint {
3022                            stage,
3023                            early_depth_test: early_depth_test.value,
3024                            workgroup_size: workgroup_size.value,
3025                        })
3026                    } else {
3027                        None
3028                    },
3029                    doc_comments,
3030                    ..function
3031                }))
3032            }
3033            (Token::Word("const_assert"), _) => {
3034                ensure_no_diag_attrs("`const_assert`s".into(), diagnostic_filters)?;
3035
3036                // parentheses are optional
3037                let paren = lexer.skip(Token::Paren('('));
3038
3039                let condition = self.general_expression(lexer, &mut ctx)?;
3040
3041                if paren {
3042                    lexer.expect(Token::Paren(')'))?;
3043                }
3044                lexer.expect(Token::Separator(';'))?;
3045                Some(ast::GlobalDeclKind::ConstAssert(condition))
3046            }
3047            (Token::End, _) => return Ok(()),
3048            other => {
3049                return Err(Box::new(Error::Unexpected(
3050                    other.1,
3051                    ExpectedToken::GlobalItem,
3052                )))
3053            }
3054        };
3055
3056        if let Some(kind) = kind {
3057            out.decls.append(
3058                ast::GlobalDecl { kind, dependencies },
3059                lexer.span_from(start),
3060            );
3061        }
3062
3063        if !self.rules.is_empty() {
3064            log::error!("Reached the end of global decl, but rule stack is not empty");
3065            log::error!("Rules: {:?}", self.rules);
3066            return Err(Box::new(Error::Internal("rule stack is not empty")));
3067        };
3068
3069        match binding {
3070            None => Ok(()),
3071            Some(_) => Err(Box::new(Error::Internal(
3072                "we had the attribute but no var?",
3073            ))),
3074        }
3075    }
3076
3077    pub fn parse<'a>(
3078        &mut self,
3079        source: &'a str,
3080        options: &Options,
3081    ) -> Result<'a, ast::TranslationUnit<'a>> {
3082        self.reset();
3083
3084        let mut lexer = Lexer::new(source, !options.parse_doc_comments);
3085        let mut tu = ast::TranslationUnit::default();
3086        let mut enable_extensions = EnableExtensions::empty();
3087        let mut diagnostic_filters = DiagnosticFilterMap::new();
3088
3089        // Parse module doc comments.
3090        tu.doc_comments = lexer.accumulate_module_doc_comments();
3091
3092        // Parse directives.
3093        while let Ok((ident, _directive_ident_span)) = lexer.peek_ident_with_span() {
3094            if let Some(kind) = DirectiveKind::from_ident(ident) {
3095                self.push_rule_span(Rule::Directive, &mut lexer);
3096                let _ = lexer.next_ident_with_span().unwrap();
3097                match kind {
3098                    DirectiveKind::Diagnostic => {
3099                        let diagnostic_filter = self.diagnostic_filter(&mut lexer)?;
3100                        let span = self.peek_rule_span(&lexer);
3101                        diagnostic_filters
3102                            .add(diagnostic_filter, span, ShouldConflictOnFullDuplicate::No)
3103                            .map_err(|e| Box::new(e.into()))?;
3104                        lexer.expect(Token::Separator(';'))?;
3105                    }
3106                    DirectiveKind::Enable => {
3107                        self.directive_ident_list(&mut lexer, |ident, span| {
3108                            let kind = EnableExtension::from_ident(ident, span)?;
3109                            let extension = match kind {
3110                                EnableExtension::Implemented(kind) => kind,
3111                                EnableExtension::Unimplemented(kind) => {
3112                                    return Err(Box::new(Error::EnableExtensionNotYetImplemented {
3113                                        kind,
3114                                        span,
3115                                    }))
3116                                }
3117                            };
3118                            enable_extensions.add(extension);
3119                            Ok(())
3120                        })?;
3121                    }
3122                    DirectiveKind::Requires => {
3123                        self.directive_ident_list(&mut lexer, |ident, span| {
3124                            match LanguageExtension::from_ident(ident) {
3125                                Some(LanguageExtension::Implemented(_kind)) => {
3126                                    // NOTE: No further validation is needed for an extension, so
3127                                    // just throw parsed information away. If we ever want to apply
3128                                    // what we've parsed to diagnostics, maybe we'll want to refer
3129                                    // to enabled extensions later?
3130                                    Ok(())
3131                                }
3132                                Some(LanguageExtension::Unimplemented(kind)) => {
3133                                    Err(Box::new(Error::LanguageExtensionNotYetImplemented {
3134                                        kind,
3135                                        span,
3136                                    }))
3137                                }
3138                                None => Err(Box::new(Error::UnknownLanguageExtension(span, ident))),
3139                            }
3140                        })?;
3141                    }
3142                }
3143                self.pop_rule_span(&lexer);
3144            } else {
3145                break;
3146            }
3147        }
3148
3149        lexer.enable_extensions = enable_extensions.clone();
3150        tu.enable_extensions = enable_extensions;
3151        tu.diagnostic_filter_leaf =
3152            Self::write_diagnostic_filters(&mut tu.diagnostic_filters, diagnostic_filters, None);
3153
3154        loop {
3155            match self.global_decl(&mut lexer, &mut tu) {
3156                Err(error) => return Err(error),
3157                Ok(()) => {
3158                    if lexer.peek().0 == Token::End {
3159                        break;
3160                    }
3161                }
3162            }
3163        }
3164
3165        Ok(tu)
3166    }
3167
3168    fn increase_brace_nesting(brace_nesting_level: u8, brace_span: Span) -> Result<'static, u8> {
3169        // From [spec.](https://gpuweb.github.io/gpuweb/wgsl/#limits):
3170        //
3171        // > § 2.4. Limits
3172        // >
3173        // > …
3174        // >
3175        // > Maximum nesting depth of brace-enclosed statements in a function[:] 127
3176        const BRACE_NESTING_MAXIMUM: u8 = 127;
3177        if brace_nesting_level + 1 > BRACE_NESTING_MAXIMUM {
3178            return Err(Box::new(Error::ExceededLimitForNestedBraces {
3179                span: brace_span,
3180                limit: BRACE_NESTING_MAXIMUM,
3181            }));
3182        }
3183        Ok(brace_nesting_level + 1)
3184    }
3185
3186    fn diagnostic_filter<'a>(&self, lexer: &mut Lexer<'a>) -> Result<'a, DiagnosticFilter> {
3187        lexer.expect(Token::Paren('('))?;
3188
3189        let (severity_control_name, severity_control_name_span) = lexer.next_ident_with_span()?;
3190        let new_severity = diagnostic_filter::Severity::from_wgsl_ident(severity_control_name)
3191            .ok_or(Error::DiagnosticInvalidSeverity {
3192                severity_control_name_span,
3193            })?;
3194
3195        lexer.expect(Token::Separator(','))?;
3196
3197        let (diagnostic_name_token, diagnostic_name_token_span) = lexer.next_ident_with_span()?;
3198        let triggering_rule = if lexer.skip(Token::Separator('.')) {
3199            let (ident, _span) = lexer.next_ident_with_span()?;
3200            FilterableTriggeringRule::User(Box::new([diagnostic_name_token.into(), ident.into()]))
3201        } else {
3202            let diagnostic_rule_name = diagnostic_name_token;
3203            let diagnostic_rule_name_span = diagnostic_name_token_span;
3204            if let Some(triggering_rule) =
3205                StandardFilterableTriggeringRule::from_wgsl_ident(diagnostic_rule_name)
3206            {
3207                FilterableTriggeringRule::Standard(triggering_rule)
3208            } else {
3209                diagnostic_filter::Severity::Warning.report_wgsl_parse_diag(
3210                    Box::new(Error::UnknownDiagnosticRuleName(diagnostic_rule_name_span)),
3211                    lexer.source,
3212                )?;
3213                FilterableTriggeringRule::Unknown(diagnostic_rule_name.into())
3214            }
3215        };
3216        let filter = DiagnosticFilter {
3217            triggering_rule,
3218            new_severity,
3219        };
3220        lexer.skip(Token::Separator(','));
3221        lexer.expect(Token::Paren(')'))?;
3222
3223        Ok(filter)
3224    }
3225
3226    pub(crate) fn write_diagnostic_filters(
3227        arena: &mut Arena<DiagnosticFilterNode>,
3228        filters: DiagnosticFilterMap,
3229        parent: Option<Handle<DiagnosticFilterNode>>,
3230    ) -> Option<Handle<DiagnosticFilterNode>> {
3231        filters
3232            .into_iter()
3233            .fold(parent, |parent, (triggering_rule, (new_severity, span))| {
3234                Some(arena.append(
3235                    DiagnosticFilterNode {
3236                        inner: DiagnosticFilter {
3237                            new_severity,
3238                            triggering_rule,
3239                        },
3240                        parent,
3241                    },
3242                    span,
3243                ))
3244            })
3245    }
3246}