naga/front/wgsl/parse/
mod.rs

1use alloc::{boxed::Box, vec::Vec};
2use directive::enable_extension::ImplementedEnableExtension;
3
4use crate::diagnostic_filter::{
5    self, DiagnosticFilter, DiagnosticFilterMap, DiagnosticFilterNode, FilterableTriggeringRule,
6    ShouldConflictOnFullDuplicate, StandardFilterableTriggeringRule,
7};
8use crate::front::wgsl::error::{DiagnosticAttributeNotSupportedPosition, Error, ExpectedToken};
9use crate::front::wgsl::parse::directive::enable_extension::{EnableExtension, EnableExtensions};
10use crate::front::wgsl::parse::directive::language_extension::LanguageExtension;
11use crate::front::wgsl::parse::directive::DirectiveKind;
12use crate::front::wgsl::parse::lexer::{Lexer, Token};
13use crate::front::wgsl::parse::number::Number;
14use crate::front::wgsl::{Result, Scalar};
15use crate::front::SymbolTable;
16use crate::{Arena, FastHashSet, FastIndexSet, Handle, ShaderStage, Span};
17
18pub mod ast;
19pub mod conv;
20pub mod directive;
21pub mod lexer;
22pub mod number;
23
24/// State for constructing an AST expression.
25///
26/// Not to be confused with [`lower::ExpressionContext`], which is for producing
27/// Naga IR from the AST we produce here.
28///
29/// [`lower::ExpressionContext`]: super::lower::ExpressionContext
30struct ExpressionContext<'input, 'temp, 'out> {
31    /// The [`TranslationUnit::expressions`] arena to which we should contribute
32    /// expressions.
33    ///
34    /// [`TranslationUnit::expressions`]: ast::TranslationUnit::expressions
35    expressions: &'out mut Arena<ast::Expression<'input>>,
36
37    /// The [`TranslationUnit::types`] arena to which we should contribute new
38    /// types.
39    ///
40    /// [`TranslationUnit::types`]: ast::TranslationUnit::types
41    types: &'out mut Arena<ast::Type<'input>>,
42
43    /// A map from identifiers in scope to the locals/arguments they represent.
44    ///
45    /// The handles refer to the [`locals`] arena; see that field's
46    /// documentation for details.
47    ///
48    /// [`locals`]: ExpressionContext::locals
49    local_table: &'temp mut SymbolTable<&'input str, Handle<ast::Local>>,
50
51    /// Local variable and function argument arena for the function we're building.
52    ///
53    /// Note that the [`ast::Local`] here is actually a zero-sized type. This
54    /// `Arena`'s only role is to assign a unique `Handle` to each local
55    /// identifier, and track its definition's span for use in diagnostics. All
56    /// the detailed information about locals - names, types, etc. - is kept in
57    /// the [`LocalDecl`] statements we parsed from their declarations. For
58    /// arguments, that information is kept in [`arguments`].
59    ///
60    /// In the AST, when an [`Ident`] expression refers to a local variable or
61    /// argument, its [`IdentExpr`] holds the referent's `Handle<Local>` in this
62    /// arena.
63    ///
64    /// During lowering, [`LocalDecl`] statements add entries to a per-function
65    /// table that maps `Handle<Local>` values to their Naga representations,
66    /// accessed via [`StatementContext::local_table`] and
67    /// [`LocalExpressionContext::local_table`]. This table is then consulted when
68    /// lowering subsequent [`Ident`] expressions.
69    ///
70    /// [`LocalDecl`]: ast::StatementKind::LocalDecl
71    /// [`arguments`]: ast::Function::arguments
72    /// [`Ident`]: ast::Expression::Ident
73    /// [`IdentExpr`]: ast::IdentExpr
74    /// [`StatementContext::local_table`]: super::lower::StatementContext::local_table
75    /// [`LocalExpressionContext::local_table`]: super::lower::LocalExpressionContext::local_table
76    locals: &'out mut Arena<ast::Local>,
77
78    /// Identifiers used by the current global declaration that have no local definition.
79    ///
80    /// This becomes the [`GlobalDecl`]'s [`dependencies`] set.
81    ///
82    /// Note that we don't know at parse time what kind of [`GlobalDecl`] the
83    /// name refers to. We can't look up names until we've seen the entire
84    /// translation unit.
85    ///
86    /// [`GlobalDecl`]: ast::GlobalDecl
87    /// [`dependencies`]: ast::GlobalDecl::dependencies
88    unresolved: &'out mut FastIndexSet<ast::Dependency<'input>>,
89}
90
91impl<'a> ExpressionContext<'a, '_, '_> {
92    fn parse_binary_op(
93        &mut self,
94        lexer: &mut Lexer<'a>,
95        classifier: impl Fn(Token<'a>) -> Option<crate::BinaryOperator>,
96        mut parser: impl FnMut(&mut Lexer<'a>, &mut Self) -> Result<'a, Handle<ast::Expression<'a>>>,
97    ) -> Result<'a, Handle<ast::Expression<'a>>> {
98        let start = lexer.start_byte_offset();
99        let mut accumulator = parser(lexer, self)?;
100        while let Some(op) = classifier(lexer.peek().0) {
101            let _ = lexer.next();
102            let left = accumulator;
103            let right = parser(lexer, self)?;
104            accumulator = self.expressions.append(
105                ast::Expression::Binary { op, left, right },
106                lexer.span_from(start),
107            );
108        }
109        Ok(accumulator)
110    }
111
112    fn declare_local(&mut self, name: ast::Ident<'a>) -> Result<'a, Handle<ast::Local>> {
113        let handle = self.locals.append(ast::Local, name.span);
114        if let Some(old) = self.local_table.add(name.name, handle) {
115            Err(Box::new(Error::Redefinition {
116                previous: self.locals.get_span(old),
117                current: name.span,
118            }))
119        } else {
120            Ok(handle)
121        }
122    }
123
124    fn new_scalar(&mut self, scalar: Scalar) -> Handle<ast::Type<'a>> {
125        self.types
126            .append(ast::Type::Scalar(scalar), Span::UNDEFINED)
127    }
128}
129
130/// Which grammar rule we are in the midst of parsing.
131///
132/// This is used for error checking. `Parser` maintains a stack of
133/// these and (occasionally) checks that it is being pushed and popped
134/// as expected.
135#[derive(Copy, Clone, Debug, PartialEq)]
136enum Rule {
137    Attribute,
138    VariableDecl,
139    TypeDecl,
140    FunctionDecl,
141    Block,
142    Statement,
143    PrimaryExpr,
144    SingularExpr,
145    UnaryExpr,
146    GeneralExpr,
147    Directive,
148    GenericExpr,
149    EnclosedExpr,
150    LhsExpr,
151}
152
153struct ParsedAttribute<T> {
154    value: Option<T>,
155}
156
157impl<T> Default for ParsedAttribute<T> {
158    fn default() -> Self {
159        Self { value: None }
160    }
161}
162
163impl<T> ParsedAttribute<T> {
164    fn set(&mut self, value: T, name_span: Span) -> Result<'static, ()> {
165        if self.value.is_some() {
166            return Err(Box::new(Error::RepeatedAttribute(name_span)));
167        }
168        self.value = Some(value);
169        Ok(())
170    }
171}
172
173#[derive(Default)]
174struct BindingParser<'a> {
175    location: ParsedAttribute<Handle<ast::Expression<'a>>>,
176    built_in: ParsedAttribute<crate::BuiltIn>,
177    interpolation: ParsedAttribute<crate::Interpolation>,
178    sampling: ParsedAttribute<crate::Sampling>,
179    invariant: ParsedAttribute<bool>,
180    blend_src: ParsedAttribute<Handle<ast::Expression<'a>>>,
181}
182
183impl<'a> BindingParser<'a> {
184    fn parse(
185        &mut self,
186        parser: &mut Parser,
187        lexer: &mut Lexer<'a>,
188        name: &'a str,
189        name_span: Span,
190        ctx: &mut ExpressionContext<'a, '_, '_>,
191    ) -> Result<'a, ()> {
192        match name {
193            "location" => {
194                lexer.expect(Token::Paren('('))?;
195                self.location
196                    .set(parser.general_expression(lexer, ctx)?, name_span)?;
197                lexer.expect(Token::Paren(')'))?;
198            }
199            "builtin" => {
200                lexer.expect(Token::Paren('('))?;
201                let (raw, span) = lexer.next_ident_with_span()?;
202                self.built_in.set(
203                    conv::map_built_in(&lexer.enable_extensions, raw, span)?,
204                    name_span,
205                )?;
206                lexer.expect(Token::Paren(')'))?;
207            }
208            "interpolate" => {
209                lexer.expect(Token::Paren('('))?;
210                let (raw, span) = lexer.next_ident_with_span()?;
211                self.interpolation
212                    .set(conv::map_interpolation(raw, span)?, name_span)?;
213                if lexer.skip(Token::Separator(',')) {
214                    let (raw, span) = lexer.next_ident_with_span()?;
215                    self.sampling
216                        .set(conv::map_sampling(raw, span)?, name_span)?;
217                }
218                lexer.expect(Token::Paren(')'))?;
219            }
220
221            "invariant" => {
222                self.invariant.set(true, name_span)?;
223            }
224            "blend_src" => {
225                if !lexer
226                    .enable_extensions
227                    .contains(ImplementedEnableExtension::DualSourceBlending)
228                {
229                    return Err(Box::new(Error::EnableExtensionNotEnabled {
230                        span: name_span,
231                        kind: ImplementedEnableExtension::DualSourceBlending.into(),
232                    }));
233                }
234
235                lexer.expect(Token::Paren('('))?;
236                self.blend_src
237                    .set(parser.general_expression(lexer, ctx)?, name_span)?;
238                lexer.expect(Token::Paren(')'))?;
239            }
240            _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
241        }
242        Ok(())
243    }
244
245    fn finish(self, span: Span) -> Result<'a, Option<ast::Binding<'a>>> {
246        match (
247            self.location.value,
248            self.built_in.value,
249            self.interpolation.value,
250            self.sampling.value,
251            self.invariant.value.unwrap_or_default(),
252            self.blend_src.value,
253        ) {
254            (None, None, None, None, false, None) => Ok(None),
255            (Some(location), None, interpolation, sampling, false, blend_src) => {
256                // Before handing over the completed `Module`, we call
257                // `apply_default_interpolation` to ensure that the interpolation and
258                // sampling have been explicitly specified on all vertex shader output and fragment
259                // shader input user bindings, so leaving them potentially `None` here is fine.
260                Ok(Some(ast::Binding::Location {
261                    location,
262                    interpolation,
263                    sampling,
264                    blend_src,
265                }))
266            }
267            (None, Some(crate::BuiltIn::Position { .. }), None, None, invariant, None) => {
268                Ok(Some(ast::Binding::BuiltIn(crate::BuiltIn::Position {
269                    invariant,
270                })))
271            }
272            (None, Some(built_in), None, None, false, None) => {
273                Ok(Some(ast::Binding::BuiltIn(built_in)))
274            }
275            (_, _, _, _, _, _) => Err(Box::new(Error::InconsistentBinding(span))),
276        }
277    }
278}
279
280/// Configuration for the whole parser run.
281pub struct Options {
282    /// Controls whether the parser should parse doc comments.
283    pub parse_doc_comments: bool,
284}
285
286impl Options {
287    /// Creates a new [`Options`] without doc comments parsing.
288    pub const fn new() -> Self {
289        Options {
290            parse_doc_comments: false,
291        }
292    }
293}
294
295pub struct Parser {
296    rules: Vec<(Rule, usize)>,
297    recursion_depth: u32,
298}
299
300impl Parser {
301    pub const fn new() -> Self {
302        Parser {
303            rules: Vec::new(),
304            recursion_depth: 0,
305        }
306    }
307
308    fn reset(&mut self) {
309        self.rules.clear();
310        self.recursion_depth = 0;
311    }
312
313    fn push_rule_span(&mut self, rule: Rule, lexer: &mut Lexer<'_>) {
314        self.rules.push((rule, lexer.start_byte_offset()));
315    }
316
317    fn pop_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
318        let (_, initial) = self.rules.pop().unwrap();
319        lexer.span_from(initial)
320    }
321
322    fn peek_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
323        let &(_, initial) = self.rules.last().unwrap();
324        lexer.span_from(initial)
325    }
326
327    fn race_rules(&self, rule0: Rule, rule1: Rule) -> Option<Rule> {
328        Some(
329            self.rules
330                .iter()
331                .rev()
332                .find(|&x| x.0 == rule0 || x.0 == rule1)?
333                .0,
334        )
335    }
336
337    fn track_recursion<'a, F, R>(&mut self, f: F) -> Result<'a, R>
338    where
339        F: FnOnce(&mut Self) -> Result<'a, R>,
340    {
341        self.recursion_depth += 1;
342        if self.recursion_depth >= 256 {
343            return Err(Box::new(Error::Internal("Parser recursion limit exceeded")));
344        }
345        let ret = f(self);
346        self.recursion_depth -= 1;
347        ret
348    }
349
350    fn switch_value<'a>(
351        &mut self,
352        lexer: &mut Lexer<'a>,
353        ctx: &mut ExpressionContext<'a, '_, '_>,
354    ) -> Result<'a, ast::SwitchValue<'a>> {
355        if let Token::Word("default") = lexer.peek().0 {
356            let _ = lexer.next();
357            return Ok(ast::SwitchValue::Default);
358        }
359
360        let expr = self.general_expression(lexer, ctx)?;
361        Ok(ast::SwitchValue::Expr(expr))
362    }
363
364    /// Decide if we're looking at a construction expression, and return its
365    /// type if so.
366    ///
367    /// If the identifier `word` is a [type-defining keyword], then return a
368    /// [`ConstructorType`] value describing the type to build. Return an error
369    /// if the type is not constructible (like `sampler`).
370    ///
371    /// If `word` isn't a type name, then return `None`.
372    ///
373    /// [type-defining keyword]: https://gpuweb.github.io/gpuweb/wgsl/#type-defining-keywords
374    /// [`ConstructorType`]: ast::ConstructorType
375    fn constructor_type<'a>(
376        &mut self,
377        lexer: &mut Lexer<'a>,
378        word: &'a str,
379        span: Span,
380        ctx: &mut ExpressionContext<'a, '_, '_>,
381    ) -> Result<'a, Option<ast::ConstructorType<'a>>> {
382        if let Some(scalar) = conv::get_scalar_type(&lexer.enable_extensions, span, word)? {
383            return Ok(Some(ast::ConstructorType::Scalar(scalar)));
384        }
385
386        let partial = match word {
387            "vec2" => ast::ConstructorType::PartialVector {
388                size: crate::VectorSize::Bi,
389            },
390            "vec2i" => {
391                return Ok(Some(ast::ConstructorType::Vector {
392                    size: crate::VectorSize::Bi,
393                    ty: ctx.new_scalar(Scalar::I32),
394                    ty_span: Span::UNDEFINED,
395                }))
396            }
397            "vec2u" => {
398                return Ok(Some(ast::ConstructorType::Vector {
399                    size: crate::VectorSize::Bi,
400                    ty: ctx.new_scalar(Scalar::U32),
401                    ty_span: Span::UNDEFINED,
402                }))
403            }
404            "vec2f" => {
405                return Ok(Some(ast::ConstructorType::Vector {
406                    size: crate::VectorSize::Bi,
407                    ty: ctx.new_scalar(Scalar::F32),
408                    ty_span: Span::UNDEFINED,
409                }))
410            }
411            "vec2h" => {
412                return Ok(Some(ast::ConstructorType::Vector {
413                    size: crate::VectorSize::Bi,
414                    ty: ctx.new_scalar(Scalar::F16),
415                    ty_span: Span::UNDEFINED,
416                }))
417            }
418            "vec3" => ast::ConstructorType::PartialVector {
419                size: crate::VectorSize::Tri,
420            },
421            "vec3i" => {
422                return Ok(Some(ast::ConstructorType::Vector {
423                    size: crate::VectorSize::Tri,
424                    ty: ctx.new_scalar(Scalar::I32),
425                    ty_span: Span::UNDEFINED,
426                }))
427            }
428            "vec3u" => {
429                return Ok(Some(ast::ConstructorType::Vector {
430                    size: crate::VectorSize::Tri,
431                    ty: ctx.new_scalar(Scalar::U32),
432                    ty_span: Span::UNDEFINED,
433                }))
434            }
435            "vec3f" => {
436                return Ok(Some(ast::ConstructorType::Vector {
437                    size: crate::VectorSize::Tri,
438                    ty: ctx.new_scalar(Scalar::F32),
439                    ty_span: Span::UNDEFINED,
440                }))
441            }
442            "vec3h" => {
443                return Ok(Some(ast::ConstructorType::Vector {
444                    size: crate::VectorSize::Tri,
445                    ty: ctx.new_scalar(Scalar::F16),
446                    ty_span: Span::UNDEFINED,
447                }))
448            }
449            "vec4" => ast::ConstructorType::PartialVector {
450                size: crate::VectorSize::Quad,
451            },
452            "vec4i" => {
453                return Ok(Some(ast::ConstructorType::Vector {
454                    size: crate::VectorSize::Quad,
455                    ty: ctx.new_scalar(Scalar::I32),
456                    ty_span: Span::UNDEFINED,
457                }))
458            }
459            "vec4u" => {
460                return Ok(Some(ast::ConstructorType::Vector {
461                    size: crate::VectorSize::Quad,
462                    ty: ctx.new_scalar(Scalar::U32),
463                    ty_span: Span::UNDEFINED,
464                }))
465            }
466            "vec4f" => {
467                return Ok(Some(ast::ConstructorType::Vector {
468                    size: crate::VectorSize::Quad,
469                    ty: ctx.new_scalar(Scalar::F32),
470                    ty_span: Span::UNDEFINED,
471                }))
472            }
473            "vec4h" => {
474                return Ok(Some(ast::ConstructorType::Vector {
475                    size: crate::VectorSize::Quad,
476                    ty: ctx.new_scalar(Scalar::F16),
477                    ty_span: Span::UNDEFINED,
478                }))
479            }
480            "mat2x2" => ast::ConstructorType::PartialMatrix {
481                columns: crate::VectorSize::Bi,
482                rows: crate::VectorSize::Bi,
483            },
484            "mat2x2f" => {
485                return Ok(Some(ast::ConstructorType::Matrix {
486                    columns: crate::VectorSize::Bi,
487                    rows: crate::VectorSize::Bi,
488                    ty: ctx.new_scalar(Scalar::F32),
489                    ty_span: Span::UNDEFINED,
490                }))
491            }
492            "mat2x2h" => {
493                return Ok(Some(ast::ConstructorType::Matrix {
494                    columns: crate::VectorSize::Bi,
495                    rows: crate::VectorSize::Bi,
496                    ty: ctx.new_scalar(Scalar::F16),
497                    ty_span: Span::UNDEFINED,
498                }))
499            }
500            "mat2x3" => ast::ConstructorType::PartialMatrix {
501                columns: crate::VectorSize::Bi,
502                rows: crate::VectorSize::Tri,
503            },
504            "mat2x3f" => {
505                return Ok(Some(ast::ConstructorType::Matrix {
506                    columns: crate::VectorSize::Bi,
507                    rows: crate::VectorSize::Tri,
508                    ty: ctx.new_scalar(Scalar::F32),
509                    ty_span: Span::UNDEFINED,
510                }))
511            }
512            "mat2x3h" => {
513                return Ok(Some(ast::ConstructorType::Matrix {
514                    columns: crate::VectorSize::Bi,
515                    rows: crate::VectorSize::Tri,
516                    ty: ctx.new_scalar(Scalar::F16),
517                    ty_span: Span::UNDEFINED,
518                }))
519            }
520            "mat2x4" => ast::ConstructorType::PartialMatrix {
521                columns: crate::VectorSize::Bi,
522                rows: crate::VectorSize::Quad,
523            },
524            "mat2x4f" => {
525                return Ok(Some(ast::ConstructorType::Matrix {
526                    columns: crate::VectorSize::Bi,
527                    rows: crate::VectorSize::Quad,
528                    ty: ctx.new_scalar(Scalar::F32),
529                    ty_span: Span::UNDEFINED,
530                }))
531            }
532            "mat2x4h" => {
533                return Ok(Some(ast::ConstructorType::Matrix {
534                    columns: crate::VectorSize::Bi,
535                    rows: crate::VectorSize::Quad,
536                    ty: ctx.new_scalar(Scalar::F16),
537                    ty_span: Span::UNDEFINED,
538                }))
539            }
540            "mat3x2" => ast::ConstructorType::PartialMatrix {
541                columns: crate::VectorSize::Tri,
542                rows: crate::VectorSize::Bi,
543            },
544            "mat3x2f" => {
545                return Ok(Some(ast::ConstructorType::Matrix {
546                    columns: crate::VectorSize::Tri,
547                    rows: crate::VectorSize::Bi,
548                    ty: ctx.new_scalar(Scalar::F32),
549                    ty_span: Span::UNDEFINED,
550                }))
551            }
552            "mat3x2h" => {
553                return Ok(Some(ast::ConstructorType::Matrix {
554                    columns: crate::VectorSize::Tri,
555                    rows: crate::VectorSize::Bi,
556                    ty: ctx.new_scalar(Scalar::F16),
557                    ty_span: Span::UNDEFINED,
558                }))
559            }
560            "mat3x3" => ast::ConstructorType::PartialMatrix {
561                columns: crate::VectorSize::Tri,
562                rows: crate::VectorSize::Tri,
563            },
564            "mat3x3f" => {
565                return Ok(Some(ast::ConstructorType::Matrix {
566                    columns: crate::VectorSize::Tri,
567                    rows: crate::VectorSize::Tri,
568                    ty: ctx.new_scalar(Scalar::F32),
569                    ty_span: Span::UNDEFINED,
570                }))
571            }
572            "mat3x3h" => {
573                return Ok(Some(ast::ConstructorType::Matrix {
574                    columns: crate::VectorSize::Tri,
575                    rows: crate::VectorSize::Tri,
576                    ty: ctx.new_scalar(Scalar::F16),
577                    ty_span: Span::UNDEFINED,
578                }))
579            }
580            "mat3x4" => ast::ConstructorType::PartialMatrix {
581                columns: crate::VectorSize::Tri,
582                rows: crate::VectorSize::Quad,
583            },
584            "mat3x4f" => {
585                return Ok(Some(ast::ConstructorType::Matrix {
586                    columns: crate::VectorSize::Tri,
587                    rows: crate::VectorSize::Quad,
588                    ty: ctx.new_scalar(Scalar::F32),
589                    ty_span: Span::UNDEFINED,
590                }))
591            }
592            "mat3x4h" => {
593                return Ok(Some(ast::ConstructorType::Matrix {
594                    columns: crate::VectorSize::Tri,
595                    rows: crate::VectorSize::Quad,
596                    ty: ctx.new_scalar(Scalar::F16),
597                    ty_span: Span::UNDEFINED,
598                }))
599            }
600            "mat4x2" => ast::ConstructorType::PartialMatrix {
601                columns: crate::VectorSize::Quad,
602                rows: crate::VectorSize::Bi,
603            },
604            "mat4x2f" => {
605                return Ok(Some(ast::ConstructorType::Matrix {
606                    columns: crate::VectorSize::Quad,
607                    rows: crate::VectorSize::Bi,
608                    ty: ctx.new_scalar(Scalar::F32),
609                    ty_span: Span::UNDEFINED,
610                }))
611            }
612            "mat4x2h" => {
613                return Ok(Some(ast::ConstructorType::Matrix {
614                    columns: crate::VectorSize::Quad,
615                    rows: crate::VectorSize::Bi,
616                    ty: ctx.new_scalar(Scalar::F16),
617                    ty_span: Span::UNDEFINED,
618                }))
619            }
620            "mat4x3" => ast::ConstructorType::PartialMatrix {
621                columns: crate::VectorSize::Quad,
622                rows: crate::VectorSize::Tri,
623            },
624            "mat4x3f" => {
625                return Ok(Some(ast::ConstructorType::Matrix {
626                    columns: crate::VectorSize::Quad,
627                    rows: crate::VectorSize::Tri,
628                    ty: ctx.new_scalar(Scalar::F32),
629                    ty_span: Span::UNDEFINED,
630                }))
631            }
632            "mat4x3h" => {
633                return Ok(Some(ast::ConstructorType::Matrix {
634                    columns: crate::VectorSize::Quad,
635                    rows: crate::VectorSize::Tri,
636                    ty: ctx.new_scalar(Scalar::F16),
637                    ty_span: Span::UNDEFINED,
638                }))
639            }
640            "mat4x4" => ast::ConstructorType::PartialMatrix {
641                columns: crate::VectorSize::Quad,
642                rows: crate::VectorSize::Quad,
643            },
644            "mat4x4f" => {
645                return Ok(Some(ast::ConstructorType::Matrix {
646                    columns: crate::VectorSize::Quad,
647                    rows: crate::VectorSize::Quad,
648                    ty: ctx.new_scalar(Scalar::F32),
649                    ty_span: Span::UNDEFINED,
650                }))
651            }
652            "mat4x4h" => {
653                return Ok(Some(ast::ConstructorType::Matrix {
654                    columns: crate::VectorSize::Quad,
655                    rows: crate::VectorSize::Quad,
656                    ty: ctx.new_scalar(Scalar::F16),
657                    ty_span: Span::UNDEFINED,
658                }))
659            }
660            "array" => ast::ConstructorType::PartialArray,
661            "atomic"
662            | "binding_array"
663            | "sampler"
664            | "sampler_comparison"
665            | "texture_1d"
666            | "texture_1d_array"
667            | "texture_2d"
668            | "texture_2d_array"
669            | "texture_3d"
670            | "texture_cube"
671            | "texture_cube_array"
672            | "texture_multisampled_2d"
673            | "texture_multisampled_2d_array"
674            | "texture_depth_2d"
675            | "texture_depth_2d_array"
676            | "texture_depth_cube"
677            | "texture_depth_cube_array"
678            | "texture_depth_multisampled_2d"
679            | "texture_storage_1d"
680            | "texture_storage_1d_array"
681            | "texture_storage_2d"
682            | "texture_storage_2d_array"
683            | "texture_storage_3d" => return Err(Box::new(Error::TypeNotConstructible(span))),
684            _ => return Ok(None),
685        };
686
687        // parse component type if present
688        match (lexer.peek().0, partial) {
689            (Token::Paren('<'), ast::ConstructorType::PartialVector { size }) => {
690                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
691                Ok(Some(ast::ConstructorType::Vector { size, ty, ty_span }))
692            }
693            (Token::Paren('<'), ast::ConstructorType::PartialMatrix { columns, rows }) => {
694                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
695                Ok(Some(ast::ConstructorType::Matrix {
696                    columns,
697                    rows,
698                    ty,
699                    ty_span,
700                }))
701            }
702            (Token::Paren('<'), ast::ConstructorType::PartialArray) => {
703                lexer.expect_generic_paren('<')?;
704                let base = self.type_decl(lexer, ctx)?;
705                let size = if lexer.end_of_generic_arguments() {
706                    let expr = self.const_generic_expression(lexer, ctx)?;
707                    lexer.skip(Token::Separator(','));
708                    ast::ArraySize::Constant(expr)
709                } else {
710                    ast::ArraySize::Dynamic
711                };
712                lexer.expect_generic_paren('>')?;
713
714                Ok(Some(ast::ConstructorType::Array { base, size }))
715            }
716            (_, partial) => Ok(Some(partial)),
717        }
718    }
719
720    /// Expects `name` to be consumed (not in lexer).
721    fn arguments<'a>(
722        &mut self,
723        lexer: &mut Lexer<'a>,
724        ctx: &mut ExpressionContext<'a, '_, '_>,
725    ) -> Result<'a, Vec<Handle<ast::Expression<'a>>>> {
726        self.push_rule_span(Rule::EnclosedExpr, lexer);
727        lexer.open_arguments()?;
728        let mut arguments = Vec::new();
729        loop {
730            if !arguments.is_empty() {
731                if !lexer.next_argument()? {
732                    break;
733                }
734            } else if lexer.skip(Token::Paren(')')) {
735                break;
736            }
737            let arg = self.general_expression(lexer, ctx)?;
738            arguments.push(arg);
739        }
740
741        self.pop_rule_span(lexer);
742        Ok(arguments)
743    }
744
745    fn enclosed_expression<'a>(
746        &mut self,
747        lexer: &mut Lexer<'a>,
748        ctx: &mut ExpressionContext<'a, '_, '_>,
749    ) -> Result<'a, Handle<ast::Expression<'a>>> {
750        self.push_rule_span(Rule::EnclosedExpr, lexer);
751        let expr = self.general_expression(lexer, ctx)?;
752        self.pop_rule_span(lexer);
753        Ok(expr)
754    }
755
756    /// Expects [`Rule::PrimaryExpr`] or [`Rule::SingularExpr`] on top; does not pop it.
757    /// Expects `name` to be consumed (not in lexer).
758    fn function_call<'a>(
759        &mut self,
760        lexer: &mut Lexer<'a>,
761        name: &'a str,
762        name_span: Span,
763        ctx: &mut ExpressionContext<'a, '_, '_>,
764    ) -> Result<'a, Handle<ast::Expression<'a>>> {
765        assert!(self.rules.last().is_some());
766
767        let expr = match name {
768            // bitcast looks like a function call, but it's an operator and must be handled differently.
769            "bitcast" => {
770                let (to, span) = self.singular_generic(lexer, ctx)?;
771
772                lexer.open_arguments()?;
773                let expr = self.general_expression(lexer, ctx)?;
774                lexer.close_arguments()?;
775
776                ast::Expression::Bitcast {
777                    expr,
778                    to,
779                    ty_span: span,
780                }
781            }
782            // everything else must be handled later, since they can be hidden by user-defined functions.
783            _ => {
784                let arguments = self.arguments(lexer, ctx)?;
785                ctx.unresolved.insert(ast::Dependency {
786                    ident: name,
787                    usage: name_span,
788                });
789                ast::Expression::Call {
790                    function: ast::Ident {
791                        name,
792                        span: name_span,
793                    },
794                    arguments,
795                }
796            }
797        };
798
799        let span = self.peek_rule_span(lexer);
800        let expr = ctx.expressions.append(expr, span);
801        Ok(expr)
802    }
803
804    fn ident_expr<'a>(
805        &mut self,
806        name: &'a str,
807        name_span: Span,
808        ctx: &mut ExpressionContext<'a, '_, '_>,
809    ) -> ast::IdentExpr<'a> {
810        match ctx.local_table.lookup(name) {
811            Some(&local) => ast::IdentExpr::Local(local),
812            None => {
813                ctx.unresolved.insert(ast::Dependency {
814                    ident: name,
815                    usage: name_span,
816                });
817                ast::IdentExpr::Unresolved(name)
818            }
819        }
820    }
821
822    fn primary_expression<'a>(
823        &mut self,
824        lexer: &mut Lexer<'a>,
825        ctx: &mut ExpressionContext<'a, '_, '_>,
826    ) -> Result<'a, Handle<ast::Expression<'a>>> {
827        self.push_rule_span(Rule::PrimaryExpr, lexer);
828        const fn literal_ray_flag<'b>(flag: crate::RayFlag) -> ast::Expression<'b> {
829            ast::Expression::Literal(ast::Literal::Number(Number::U32(flag.bits())))
830        }
831        const fn literal_ray_intersection<'b>(
832            intersection: crate::RayQueryIntersection,
833        ) -> ast::Expression<'b> {
834            ast::Expression::Literal(ast::Literal::Number(Number::U32(intersection as u32)))
835        }
836
837        let expr = match lexer.peek() {
838            (Token::Paren('('), _) => {
839                let _ = lexer.next();
840                let expr = self.enclosed_expression(lexer, ctx)?;
841                lexer.expect(Token::Paren(')'))?;
842                self.pop_rule_span(lexer);
843                return Ok(expr);
844            }
845            (Token::Word("true"), _) => {
846                let _ = lexer.next();
847                ast::Expression::Literal(ast::Literal::Bool(true))
848            }
849            (Token::Word("false"), _) => {
850                let _ = lexer.next();
851                ast::Expression::Literal(ast::Literal::Bool(false))
852            }
853            (Token::Number(res), span) => {
854                let _ = lexer.next();
855                let num = res.map_err(|err| Error::BadNumber(span, err))?;
856
857                if let Some(enable_extension) = num.requires_enable_extension() {
858                    if !lexer.enable_extensions.contains(enable_extension) {
859                        return Err(Box::new(Error::EnableExtensionNotEnabled {
860                            kind: enable_extension.into(),
861                            span,
862                        }));
863                    }
864                }
865
866                ast::Expression::Literal(ast::Literal::Number(num))
867            }
868            (Token::Word("RAY_FLAG_NONE"), _) => {
869                let _ = lexer.next();
870                literal_ray_flag(crate::RayFlag::empty())
871            }
872            (Token::Word("RAY_FLAG_FORCE_OPAQUE"), _) => {
873                let _ = lexer.next();
874                literal_ray_flag(crate::RayFlag::FORCE_OPAQUE)
875            }
876            (Token::Word("RAY_FLAG_FORCE_NO_OPAQUE"), _) => {
877                let _ = lexer.next();
878                literal_ray_flag(crate::RayFlag::FORCE_NO_OPAQUE)
879            }
880            (Token::Word("RAY_FLAG_TERMINATE_ON_FIRST_HIT"), _) => {
881                let _ = lexer.next();
882                literal_ray_flag(crate::RayFlag::TERMINATE_ON_FIRST_HIT)
883            }
884            (Token::Word("RAY_FLAG_SKIP_CLOSEST_HIT_SHADER"), _) => {
885                let _ = lexer.next();
886                literal_ray_flag(crate::RayFlag::SKIP_CLOSEST_HIT_SHADER)
887            }
888            (Token::Word("RAY_FLAG_CULL_BACK_FACING"), _) => {
889                let _ = lexer.next();
890                literal_ray_flag(crate::RayFlag::CULL_BACK_FACING)
891            }
892            (Token::Word("RAY_FLAG_CULL_FRONT_FACING"), _) => {
893                let _ = lexer.next();
894                literal_ray_flag(crate::RayFlag::CULL_FRONT_FACING)
895            }
896            (Token::Word("RAY_FLAG_CULL_OPAQUE"), _) => {
897                let _ = lexer.next();
898                literal_ray_flag(crate::RayFlag::CULL_OPAQUE)
899            }
900            (Token::Word("RAY_FLAG_CULL_NO_OPAQUE"), _) => {
901                let _ = lexer.next();
902                literal_ray_flag(crate::RayFlag::CULL_NO_OPAQUE)
903            }
904            (Token::Word("RAY_FLAG_SKIP_TRIANGLES"), _) => {
905                let _ = lexer.next();
906                literal_ray_flag(crate::RayFlag::SKIP_TRIANGLES)
907            }
908            (Token::Word("RAY_FLAG_SKIP_AABBS"), _) => {
909                let _ = lexer.next();
910                literal_ray_flag(crate::RayFlag::SKIP_AABBS)
911            }
912            (Token::Word("RAY_QUERY_INTERSECTION_NONE"), _) => {
913                let _ = lexer.next();
914                literal_ray_intersection(crate::RayQueryIntersection::None)
915            }
916            (Token::Word("RAY_QUERY_INTERSECTION_TRIANGLE"), _) => {
917                let _ = lexer.next();
918                literal_ray_intersection(crate::RayQueryIntersection::Triangle)
919            }
920            (Token::Word("RAY_QUERY_INTERSECTION_GENERATED"), _) => {
921                let _ = lexer.next();
922                literal_ray_intersection(crate::RayQueryIntersection::Generated)
923            }
924            (Token::Word("RAY_QUERY_INTERSECTION_AABB"), _) => {
925                let _ = lexer.next();
926                literal_ray_intersection(crate::RayQueryIntersection::Aabb)
927            }
928            (Token::Word(word), span) => {
929                let start = lexer.start_byte_offset();
930                let _ = lexer.next();
931
932                if let Some(ty) = self.constructor_type(lexer, word, span, ctx)? {
933                    let ty_span = lexer.span_from(start);
934                    let components = self.arguments(lexer, ctx)?;
935                    ast::Expression::Construct {
936                        ty,
937                        ty_span,
938                        components,
939                    }
940                } else if let Token::Paren('(') = lexer.peek().0 {
941                    self.pop_rule_span(lexer);
942                    return self.function_call(lexer, word, span, ctx);
943                } else if word == "bitcast" {
944                    self.pop_rule_span(lexer);
945                    return self.function_call(lexer, word, span, ctx);
946                } else {
947                    let ident = self.ident_expr(word, span, ctx);
948                    ast::Expression::Ident(ident)
949                }
950            }
951            other => {
952                return Err(Box::new(Error::Unexpected(
953                    other.1,
954                    ExpectedToken::PrimaryExpression,
955                )))
956            }
957        };
958
959        let span = self.pop_rule_span(lexer);
960        let expr = ctx.expressions.append(expr, span);
961        Ok(expr)
962    }
963
964    fn postfix<'a>(
965        &mut self,
966        span_start: usize,
967        lexer: &mut Lexer<'a>,
968        ctx: &mut ExpressionContext<'a, '_, '_>,
969        expr: Handle<ast::Expression<'a>>,
970    ) -> Result<'a, Handle<ast::Expression<'a>>> {
971        let mut expr = expr;
972
973        loop {
974            let expression = match lexer.peek().0 {
975                Token::Separator('.') => {
976                    let _ = lexer.next();
977                    let field = lexer.next_ident()?;
978
979                    ast::Expression::Member { base: expr, field }
980                }
981                Token::Paren('[') => {
982                    let _ = lexer.next();
983                    let index = self.enclosed_expression(lexer, ctx)?;
984                    lexer.expect(Token::Paren(']'))?;
985
986                    ast::Expression::Index { base: expr, index }
987                }
988                _ => break,
989            };
990
991            let span = lexer.span_from(span_start);
992            expr = ctx.expressions.append(expression, span);
993        }
994
995        Ok(expr)
996    }
997
998    fn const_generic_expression<'a>(
999        &mut self,
1000        lexer: &mut Lexer<'a>,
1001        ctx: &mut ExpressionContext<'a, '_, '_>,
1002    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1003        self.push_rule_span(Rule::GenericExpr, lexer);
1004        let expr = self.general_expression(lexer, ctx)?;
1005        self.pop_rule_span(lexer);
1006        Ok(expr)
1007    }
1008
1009    /// Parse a `unary_expression`.
1010    fn unary_expression<'a>(
1011        &mut self,
1012        lexer: &mut Lexer<'a>,
1013        ctx: &mut ExpressionContext<'a, '_, '_>,
1014    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1015        self.track_recursion(|this| {
1016            this.push_rule_span(Rule::UnaryExpr, lexer);
1017            //TODO: refactor this to avoid backing up
1018            let expr = match lexer.peek().0 {
1019                Token::Operation('-') => {
1020                    let _ = lexer.next();
1021                    let expr = this.unary_expression(lexer, ctx)?;
1022                    let expr = ast::Expression::Unary {
1023                        op: crate::UnaryOperator::Negate,
1024                        expr,
1025                    };
1026                    let span = this.peek_rule_span(lexer);
1027                    ctx.expressions.append(expr, span)
1028                }
1029                Token::Operation('!') => {
1030                    let _ = lexer.next();
1031                    let expr = this.unary_expression(lexer, ctx)?;
1032                    let expr = ast::Expression::Unary {
1033                        op: crate::UnaryOperator::LogicalNot,
1034                        expr,
1035                    };
1036                    let span = this.peek_rule_span(lexer);
1037                    ctx.expressions.append(expr, span)
1038                }
1039                Token::Operation('~') => {
1040                    let _ = lexer.next();
1041                    let expr = this.unary_expression(lexer, ctx)?;
1042                    let expr = ast::Expression::Unary {
1043                        op: crate::UnaryOperator::BitwiseNot,
1044                        expr,
1045                    };
1046                    let span = this.peek_rule_span(lexer);
1047                    ctx.expressions.append(expr, span)
1048                }
1049                Token::Operation('*') => {
1050                    let _ = lexer.next();
1051                    let expr = this.unary_expression(lexer, ctx)?;
1052                    let expr = ast::Expression::Deref(expr);
1053                    let span = this.peek_rule_span(lexer);
1054                    ctx.expressions.append(expr, span)
1055                }
1056                Token::Operation('&') => {
1057                    let _ = lexer.next();
1058                    let expr = this.unary_expression(lexer, ctx)?;
1059                    let expr = ast::Expression::AddrOf(expr);
1060                    let span = this.peek_rule_span(lexer);
1061                    ctx.expressions.append(expr, span)
1062                }
1063                _ => this.singular_expression(lexer, ctx)?,
1064            };
1065
1066            this.pop_rule_span(lexer);
1067            Ok(expr)
1068        })
1069    }
1070
1071    /// Parse a `lhs_expression`.
1072    ///
1073    /// LHS expressions only support the `&` and `*` operators and
1074    /// the `[]` and `.` postfix selectors.
1075    fn lhs_expression<'a>(
1076        &mut self,
1077        lexer: &mut Lexer<'a>,
1078        ctx: &mut ExpressionContext<'a, '_, '_>,
1079    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1080        self.track_recursion(|this| {
1081            this.push_rule_span(Rule::LhsExpr, lexer);
1082            let start = lexer.start_byte_offset();
1083            let expr = match lexer.peek() {
1084                (Token::Operation('*'), _) => {
1085                    let _ = lexer.next();
1086                    let expr = this.lhs_expression(lexer, ctx)?;
1087                    let expr = ast::Expression::Deref(expr);
1088                    let span = this.peek_rule_span(lexer);
1089                    ctx.expressions.append(expr, span)
1090                }
1091                (Token::Operation('&'), _) => {
1092                    let _ = lexer.next();
1093                    let expr = this.lhs_expression(lexer, ctx)?;
1094                    let expr = ast::Expression::AddrOf(expr);
1095                    let span = this.peek_rule_span(lexer);
1096                    ctx.expressions.append(expr, span)
1097                }
1098                (Token::Operation('('), _) => {
1099                    let _ = lexer.next();
1100                    let primary_expr = this.lhs_expression(lexer, ctx)?;
1101                    lexer.expect(Token::Paren(')'))?;
1102                    this.postfix(start, lexer, ctx, primary_expr)?
1103                }
1104                (Token::Word(word), span) => {
1105                    let _ = lexer.next();
1106                    let ident = this.ident_expr(word, span, ctx);
1107                    let primary_expr = ctx.expressions.append(ast::Expression::Ident(ident), span);
1108                    this.postfix(start, lexer, ctx, primary_expr)?
1109                }
1110                _ => this.singular_expression(lexer, ctx)?,
1111            };
1112
1113            this.pop_rule_span(lexer);
1114            Ok(expr)
1115        })
1116    }
1117
1118    /// Parse a `singular_expression`.
1119    fn singular_expression<'a>(
1120        &mut self,
1121        lexer: &mut Lexer<'a>,
1122        ctx: &mut ExpressionContext<'a, '_, '_>,
1123    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1124        let start = lexer.start_byte_offset();
1125        self.push_rule_span(Rule::SingularExpr, lexer);
1126        let primary_expr = self.primary_expression(lexer, ctx)?;
1127        let singular_expr = self.postfix(start, lexer, ctx, primary_expr)?;
1128        self.pop_rule_span(lexer);
1129
1130        Ok(singular_expr)
1131    }
1132
1133    fn equality_expression<'a>(
1134        &mut self,
1135        lexer: &mut Lexer<'a>,
1136        context: &mut ExpressionContext<'a, '_, '_>,
1137    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1138        // equality_expression
1139        context.parse_binary_op(
1140            lexer,
1141            |token| match token {
1142                Token::LogicalOperation('=') => Some(crate::BinaryOperator::Equal),
1143                Token::LogicalOperation('!') => Some(crate::BinaryOperator::NotEqual),
1144                _ => None,
1145            },
1146            // relational_expression
1147            |lexer, context| {
1148                let enclosing = self.race_rules(Rule::GenericExpr, Rule::EnclosedExpr);
1149                context.parse_binary_op(
1150                    lexer,
1151                    match enclosing {
1152                        Some(Rule::GenericExpr) => |token| match token {
1153                            Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
1154                            _ => None,
1155                        },
1156                        _ => |token| match token {
1157                            Token::Paren('<') => Some(crate::BinaryOperator::Less),
1158                            Token::Paren('>') => Some(crate::BinaryOperator::Greater),
1159                            Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
1160                            Token::LogicalOperation('>') => {
1161                                Some(crate::BinaryOperator::GreaterEqual)
1162                            }
1163                            _ => None,
1164                        },
1165                    },
1166                    // shift_expression
1167                    |lexer, context| {
1168                        context.parse_binary_op(
1169                            lexer,
1170                            match enclosing {
1171                                Some(Rule::GenericExpr) => |token| match token {
1172                                    Token::ShiftOperation('<') => {
1173                                        Some(crate::BinaryOperator::ShiftLeft)
1174                                    }
1175                                    _ => None,
1176                                },
1177                                _ => |token| match token {
1178                                    Token::ShiftOperation('<') => {
1179                                        Some(crate::BinaryOperator::ShiftLeft)
1180                                    }
1181                                    Token::ShiftOperation('>') => {
1182                                        Some(crate::BinaryOperator::ShiftRight)
1183                                    }
1184                                    _ => None,
1185                                },
1186                            },
1187                            // additive_expression
1188                            |lexer, context| {
1189                                context.parse_binary_op(
1190                                    lexer,
1191                                    |token| match token {
1192                                        Token::Operation('+') => Some(crate::BinaryOperator::Add),
1193                                        Token::Operation('-') => {
1194                                            Some(crate::BinaryOperator::Subtract)
1195                                        }
1196                                        _ => None,
1197                                    },
1198                                    // multiplicative_expression
1199                                    |lexer, context| {
1200                                        context.parse_binary_op(
1201                                            lexer,
1202                                            |token| match token {
1203                                                Token::Operation('*') => {
1204                                                    Some(crate::BinaryOperator::Multiply)
1205                                                }
1206                                                Token::Operation('/') => {
1207                                                    Some(crate::BinaryOperator::Divide)
1208                                                }
1209                                                Token::Operation('%') => {
1210                                                    Some(crate::BinaryOperator::Modulo)
1211                                                }
1212                                                _ => None,
1213                                            },
1214                                            |lexer, context| self.unary_expression(lexer, context),
1215                                        )
1216                                    },
1217                                )
1218                            },
1219                        )
1220                    },
1221                )
1222            },
1223        )
1224    }
1225
1226    fn general_expression<'a>(
1227        &mut self,
1228        lexer: &mut Lexer<'a>,
1229        ctx: &mut ExpressionContext<'a, '_, '_>,
1230    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1231        self.general_expression_with_span(lexer, ctx)
1232            .map(|(expr, _)| expr)
1233    }
1234
1235    fn general_expression_with_span<'a>(
1236        &mut self,
1237        lexer: &mut Lexer<'a>,
1238        context: &mut ExpressionContext<'a, '_, '_>,
1239    ) -> Result<'a, (Handle<ast::Expression<'a>>, Span)> {
1240        self.push_rule_span(Rule::GeneralExpr, lexer);
1241        // logical_or_expression
1242        let handle = context.parse_binary_op(
1243            lexer,
1244            |token| match token {
1245                Token::LogicalOperation('|') => Some(crate::BinaryOperator::LogicalOr),
1246                _ => None,
1247            },
1248            // logical_and_expression
1249            |lexer, context| {
1250                context.parse_binary_op(
1251                    lexer,
1252                    |token| match token {
1253                        Token::LogicalOperation('&') => Some(crate::BinaryOperator::LogicalAnd),
1254                        _ => None,
1255                    },
1256                    // inclusive_or_expression
1257                    |lexer, context| {
1258                        context.parse_binary_op(
1259                            lexer,
1260                            |token| match token {
1261                                Token::Operation('|') => Some(crate::BinaryOperator::InclusiveOr),
1262                                _ => None,
1263                            },
1264                            // exclusive_or_expression
1265                            |lexer, context| {
1266                                context.parse_binary_op(
1267                                    lexer,
1268                                    |token| match token {
1269                                        Token::Operation('^') => {
1270                                            Some(crate::BinaryOperator::ExclusiveOr)
1271                                        }
1272                                        _ => None,
1273                                    },
1274                                    // and_expression
1275                                    |lexer, context| {
1276                                        context.parse_binary_op(
1277                                            lexer,
1278                                            |token| match token {
1279                                                Token::Operation('&') => {
1280                                                    Some(crate::BinaryOperator::And)
1281                                                }
1282                                                _ => None,
1283                                            },
1284                                            |lexer, context| {
1285                                                self.equality_expression(lexer, context)
1286                                            },
1287                                        )
1288                                    },
1289                                )
1290                            },
1291                        )
1292                    },
1293                )
1294            },
1295        )?;
1296        Ok((handle, self.pop_rule_span(lexer)))
1297    }
1298
1299    fn variable_decl<'a>(
1300        &mut self,
1301        lexer: &mut Lexer<'a>,
1302        ctx: &mut ExpressionContext<'a, '_, '_>,
1303    ) -> Result<'a, ast::GlobalVariable<'a>> {
1304        self.push_rule_span(Rule::VariableDecl, lexer);
1305        let mut space = crate::AddressSpace::Handle;
1306
1307        if lexer.skip(Token::Paren('<')) {
1308            let (class_str, span) = lexer.next_ident_with_span()?;
1309            space = match class_str {
1310                "storage" => {
1311                    let access = if lexer.skip(Token::Separator(',')) {
1312                        lexer.next_storage_access()?
1313                    } else {
1314                        // defaulting to `read`
1315                        crate::StorageAccess::LOAD
1316                    };
1317                    crate::AddressSpace::Storage { access }
1318                }
1319                _ => conv::map_address_space(class_str, span)?,
1320            };
1321            lexer.expect(Token::Paren('>'))?;
1322        }
1323        let name = lexer.next_ident()?;
1324
1325        let ty = if lexer.skip(Token::Separator(':')) {
1326            Some(self.type_decl(lexer, ctx)?)
1327        } else {
1328            None
1329        };
1330
1331        let init = if lexer.skip(Token::Operation('=')) {
1332            let handle = self.general_expression(lexer, ctx)?;
1333            Some(handle)
1334        } else {
1335            None
1336        };
1337        lexer.expect(Token::Separator(';'))?;
1338        self.pop_rule_span(lexer);
1339
1340        Ok(ast::GlobalVariable {
1341            name,
1342            space,
1343            binding: None,
1344            ty,
1345            init,
1346            doc_comments: Vec::new(),
1347        })
1348    }
1349
1350    fn struct_body<'a>(
1351        &mut self,
1352        lexer: &mut Lexer<'a>,
1353        ctx: &mut ExpressionContext<'a, '_, '_>,
1354    ) -> Result<'a, Vec<ast::StructMember<'a>>> {
1355        let mut members = Vec::new();
1356        let mut member_names = FastHashSet::default();
1357
1358        lexer.expect(Token::Paren('{'))?;
1359        let mut ready = true;
1360        while !lexer.skip(Token::Paren('}')) {
1361            if !ready {
1362                return Err(Box::new(Error::Unexpected(
1363                    lexer.next().1,
1364                    ExpectedToken::Token(Token::Separator(',')),
1365                )));
1366            }
1367
1368            let doc_comments = lexer.accumulate_doc_comments();
1369
1370            let (mut size, mut align) = (ParsedAttribute::default(), ParsedAttribute::default());
1371            self.push_rule_span(Rule::Attribute, lexer);
1372            let mut bind_parser = BindingParser::default();
1373            while lexer.skip(Token::Attribute) {
1374                match lexer.next_ident_with_span()? {
1375                    ("size", name_span) => {
1376                        lexer.expect(Token::Paren('('))?;
1377                        let expr = self.general_expression(lexer, ctx)?;
1378                        lexer.expect(Token::Paren(')'))?;
1379                        size.set(expr, name_span)?;
1380                    }
1381                    ("align", name_span) => {
1382                        lexer.expect(Token::Paren('('))?;
1383                        let expr = self.general_expression(lexer, ctx)?;
1384                        lexer.expect(Token::Paren(')'))?;
1385                        align.set(expr, name_span)?;
1386                    }
1387                    (word, word_span) => bind_parser.parse(self, lexer, word, word_span, ctx)?,
1388                }
1389            }
1390
1391            let bind_span = self.pop_rule_span(lexer);
1392            let binding = bind_parser.finish(bind_span)?;
1393
1394            let name = lexer.next_ident()?;
1395            lexer.expect(Token::Separator(':'))?;
1396            let ty = self.type_decl(lexer, ctx)?;
1397            ready = lexer.skip(Token::Separator(','));
1398
1399            members.push(ast::StructMember {
1400                name,
1401                ty,
1402                binding,
1403                size: size.value,
1404                align: align.value,
1405                doc_comments,
1406            });
1407
1408            if !member_names.insert(name.name) {
1409                return Err(Box::new(Error::Redefinition {
1410                    previous: members
1411                        .iter()
1412                        .find(|x| x.name.name == name.name)
1413                        .map(|x| x.name.span)
1414                        .unwrap(),
1415                    current: name.span,
1416                }));
1417            }
1418        }
1419
1420        Ok(members)
1421    }
1422
1423    /// Parses `<T>`, returning T and span of T
1424    fn singular_generic<'a>(
1425        &mut self,
1426        lexer: &mut Lexer<'a>,
1427        ctx: &mut ExpressionContext<'a, '_, '_>,
1428    ) -> Result<'a, (Handle<ast::Type<'a>>, Span)> {
1429        lexer.expect_generic_paren('<')?;
1430        let start = lexer.start_byte_offset();
1431        let ty = self.type_decl(lexer, ctx)?;
1432        let span = lexer.span_from(start);
1433        lexer.skip(Token::Separator(','));
1434        lexer.expect_generic_paren('>')?;
1435        Ok((ty, span))
1436    }
1437
1438    fn matrix_with_type<'a>(
1439        &mut self,
1440        lexer: &mut Lexer<'a>,
1441        ctx: &mut ExpressionContext<'a, '_, '_>,
1442        columns: crate::VectorSize,
1443        rows: crate::VectorSize,
1444    ) -> Result<'a, ast::Type<'a>> {
1445        let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1446        Ok(ast::Type::Matrix {
1447            columns,
1448            rows,
1449            ty,
1450            ty_span,
1451        })
1452    }
1453
1454    fn type_decl_impl<'a>(
1455        &mut self,
1456        lexer: &mut Lexer<'a>,
1457        word: &'a str,
1458        span: Span,
1459        ctx: &mut ExpressionContext<'a, '_, '_>,
1460    ) -> Result<'a, Option<ast::Type<'a>>> {
1461        if let Some(scalar) = conv::get_scalar_type(&lexer.enable_extensions, span, word)? {
1462            return Ok(Some(ast::Type::Scalar(scalar)));
1463        }
1464
1465        Ok(Some(match word {
1466            "vec2" => {
1467                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1468                ast::Type::Vector {
1469                    size: crate::VectorSize::Bi,
1470                    ty,
1471                    ty_span,
1472                }
1473            }
1474            "vec2i" => ast::Type::Vector {
1475                size: crate::VectorSize::Bi,
1476                ty: ctx.new_scalar(Scalar::I32),
1477                ty_span: Span::UNDEFINED,
1478            },
1479            "vec2u" => ast::Type::Vector {
1480                size: crate::VectorSize::Bi,
1481                ty: ctx.new_scalar(Scalar::U32),
1482                ty_span: Span::UNDEFINED,
1483            },
1484            "vec2f" => ast::Type::Vector {
1485                size: crate::VectorSize::Bi,
1486                ty: ctx.new_scalar(Scalar::F32),
1487                ty_span: Span::UNDEFINED,
1488            },
1489            "vec2h" => ast::Type::Vector {
1490                size: crate::VectorSize::Bi,
1491                ty: ctx.new_scalar(Scalar::F16),
1492                ty_span: Span::UNDEFINED,
1493            },
1494            "vec3" => {
1495                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1496                ast::Type::Vector {
1497                    size: crate::VectorSize::Tri,
1498                    ty,
1499                    ty_span,
1500                }
1501            }
1502            "vec3i" => ast::Type::Vector {
1503                size: crate::VectorSize::Tri,
1504                ty: ctx.new_scalar(Scalar::I32),
1505                ty_span: Span::UNDEFINED,
1506            },
1507            "vec3u" => ast::Type::Vector {
1508                size: crate::VectorSize::Tri,
1509                ty: ctx.new_scalar(Scalar::U32),
1510                ty_span: Span::UNDEFINED,
1511            },
1512            "vec3f" => ast::Type::Vector {
1513                size: crate::VectorSize::Tri,
1514                ty: ctx.new_scalar(Scalar::F32),
1515                ty_span: Span::UNDEFINED,
1516            },
1517            "vec3h" => ast::Type::Vector {
1518                size: crate::VectorSize::Tri,
1519                ty: ctx.new_scalar(Scalar::F16),
1520                ty_span: Span::UNDEFINED,
1521            },
1522            "vec4" => {
1523                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1524                ast::Type::Vector {
1525                    size: crate::VectorSize::Quad,
1526                    ty,
1527                    ty_span,
1528                }
1529            }
1530            "vec4i" => ast::Type::Vector {
1531                size: crate::VectorSize::Quad,
1532                ty: ctx.new_scalar(Scalar::I32),
1533                ty_span: Span::UNDEFINED,
1534            },
1535            "vec4u" => ast::Type::Vector {
1536                size: crate::VectorSize::Quad,
1537                ty: ctx.new_scalar(Scalar::U32),
1538                ty_span: Span::UNDEFINED,
1539            },
1540            "vec4f" => ast::Type::Vector {
1541                size: crate::VectorSize::Quad,
1542                ty: ctx.new_scalar(Scalar::F32),
1543                ty_span: Span::UNDEFINED,
1544            },
1545            "vec4h" => ast::Type::Vector {
1546                size: crate::VectorSize::Quad,
1547                ty: ctx.new_scalar(Scalar::F16),
1548                ty_span: Span::UNDEFINED,
1549            },
1550            "mat2x2" => {
1551                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Bi)?
1552            }
1553            "mat2x2f" => ast::Type::Matrix {
1554                columns: crate::VectorSize::Bi,
1555                rows: crate::VectorSize::Bi,
1556                ty: ctx.new_scalar(Scalar::F32),
1557                ty_span: Span::UNDEFINED,
1558            },
1559            "mat2x2h" => ast::Type::Matrix {
1560                columns: crate::VectorSize::Bi,
1561                rows: crate::VectorSize::Bi,
1562                ty: ctx.new_scalar(Scalar::F16),
1563                ty_span: Span::UNDEFINED,
1564            },
1565            "mat2x3" => {
1566                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Tri)?
1567            }
1568            "mat2x3f" => ast::Type::Matrix {
1569                columns: crate::VectorSize::Bi,
1570                rows: crate::VectorSize::Tri,
1571                ty: ctx.new_scalar(Scalar::F32),
1572                ty_span: Span::UNDEFINED,
1573            },
1574            "mat2x3h" => ast::Type::Matrix {
1575                columns: crate::VectorSize::Bi,
1576                rows: crate::VectorSize::Tri,
1577                ty: ctx.new_scalar(Scalar::F16),
1578                ty_span: Span::UNDEFINED,
1579            },
1580            "mat2x4" => {
1581                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Quad)?
1582            }
1583            "mat2x4f" => ast::Type::Matrix {
1584                columns: crate::VectorSize::Bi,
1585                rows: crate::VectorSize::Quad,
1586                ty: ctx.new_scalar(Scalar::F32),
1587                ty_span: Span::UNDEFINED,
1588            },
1589            "mat2x4h" => ast::Type::Matrix {
1590                columns: crate::VectorSize::Bi,
1591                rows: crate::VectorSize::Quad,
1592                ty: ctx.new_scalar(Scalar::F16),
1593                ty_span: Span::UNDEFINED,
1594            },
1595            "mat3x2" => {
1596                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Bi)?
1597            }
1598            "mat3x2f" => ast::Type::Matrix {
1599                columns: crate::VectorSize::Tri,
1600                rows: crate::VectorSize::Bi,
1601                ty: ctx.new_scalar(Scalar::F32),
1602                ty_span: Span::UNDEFINED,
1603            },
1604            "mat3x2h" => ast::Type::Matrix {
1605                columns: crate::VectorSize::Tri,
1606                rows: crate::VectorSize::Bi,
1607                ty: ctx.new_scalar(Scalar::F16),
1608                ty_span: Span::UNDEFINED,
1609            },
1610            "mat3x3" => {
1611                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Tri)?
1612            }
1613            "mat3x3f" => ast::Type::Matrix {
1614                columns: crate::VectorSize::Tri,
1615                rows: crate::VectorSize::Tri,
1616                ty: ctx.new_scalar(Scalar::F32),
1617                ty_span: Span::UNDEFINED,
1618            },
1619            "mat3x3h" => ast::Type::Matrix {
1620                columns: crate::VectorSize::Tri,
1621                rows: crate::VectorSize::Tri,
1622                ty: ctx.new_scalar(Scalar::F16),
1623                ty_span: Span::UNDEFINED,
1624            },
1625            "mat3x4" => {
1626                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Quad)?
1627            }
1628            "mat3x4f" => ast::Type::Matrix {
1629                columns: crate::VectorSize::Tri,
1630                rows: crate::VectorSize::Quad,
1631                ty: ctx.new_scalar(Scalar::F32),
1632                ty_span: Span::UNDEFINED,
1633            },
1634            "mat3x4h" => ast::Type::Matrix {
1635                columns: crate::VectorSize::Tri,
1636                rows: crate::VectorSize::Quad,
1637                ty: ctx.new_scalar(Scalar::F16),
1638                ty_span: Span::UNDEFINED,
1639            },
1640            "mat4x2" => {
1641                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Bi)?
1642            }
1643            "mat4x2f" => ast::Type::Matrix {
1644                columns: crate::VectorSize::Quad,
1645                rows: crate::VectorSize::Bi,
1646                ty: ctx.new_scalar(Scalar::F32),
1647                ty_span: Span::UNDEFINED,
1648            },
1649            "mat4x2h" => ast::Type::Matrix {
1650                columns: crate::VectorSize::Quad,
1651                rows: crate::VectorSize::Bi,
1652                ty: ctx.new_scalar(Scalar::F16),
1653                ty_span: Span::UNDEFINED,
1654            },
1655            "mat4x3" => {
1656                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Tri)?
1657            }
1658            "mat4x3f" => ast::Type::Matrix {
1659                columns: crate::VectorSize::Quad,
1660                rows: crate::VectorSize::Tri,
1661                ty: ctx.new_scalar(Scalar::F32),
1662                ty_span: Span::UNDEFINED,
1663            },
1664            "mat4x3h" => ast::Type::Matrix {
1665                columns: crate::VectorSize::Quad,
1666                rows: crate::VectorSize::Tri,
1667                ty: ctx.new_scalar(Scalar::F16),
1668                ty_span: Span::UNDEFINED,
1669            },
1670            "mat4x4" => {
1671                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Quad)?
1672            }
1673            "mat4x4f" => ast::Type::Matrix {
1674                columns: crate::VectorSize::Quad,
1675                rows: crate::VectorSize::Quad,
1676                ty: ctx.new_scalar(Scalar::F32),
1677                ty_span: Span::UNDEFINED,
1678            },
1679            "mat4x4h" => ast::Type::Matrix {
1680                columns: crate::VectorSize::Quad,
1681                rows: crate::VectorSize::Quad,
1682                ty: ctx.new_scalar(Scalar::F16),
1683                ty_span: Span::UNDEFINED,
1684            },
1685            "atomic" => {
1686                let scalar = lexer.next_scalar_generic()?;
1687                ast::Type::Atomic(scalar)
1688            }
1689            "ptr" => {
1690                lexer.expect_generic_paren('<')?;
1691                let (ident, span) = lexer.next_ident_with_span()?;
1692                let mut space = conv::map_address_space(ident, span)?;
1693                lexer.expect(Token::Separator(','))?;
1694                let base = self.type_decl(lexer, ctx)?;
1695                if let crate::AddressSpace::Storage { ref mut access } = space {
1696                    *access = if lexer.end_of_generic_arguments() {
1697                        let result = lexer.next_storage_access()?;
1698                        lexer.skip(Token::Separator(','));
1699                        result
1700                    } else {
1701                        crate::StorageAccess::LOAD
1702                    };
1703                }
1704                lexer.expect_generic_paren('>')?;
1705                ast::Type::Pointer { base, space }
1706            }
1707            "array" => {
1708                lexer.expect_generic_paren('<')?;
1709                let base = self.type_decl(lexer, ctx)?;
1710                let size = if lexer.end_of_generic_arguments() {
1711                    let size = self.const_generic_expression(lexer, ctx)?;
1712                    lexer.skip(Token::Separator(','));
1713                    ast::ArraySize::Constant(size)
1714                } else {
1715                    ast::ArraySize::Dynamic
1716                };
1717                lexer.expect_generic_paren('>')?;
1718
1719                ast::Type::Array { base, size }
1720            }
1721            "binding_array" => {
1722                lexer.expect_generic_paren('<')?;
1723                let base = self.type_decl(lexer, ctx)?;
1724                let size = if lexer.end_of_generic_arguments() {
1725                    let size = self.unary_expression(lexer, ctx)?;
1726                    lexer.skip(Token::Separator(','));
1727                    ast::ArraySize::Constant(size)
1728                } else {
1729                    ast::ArraySize::Dynamic
1730                };
1731                lexer.expect_generic_paren('>')?;
1732
1733                ast::Type::BindingArray { base, size }
1734            }
1735            "sampler" => ast::Type::Sampler { comparison: false },
1736            "sampler_comparison" => ast::Type::Sampler { comparison: true },
1737            "texture_1d" => {
1738                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1739                Self::check_texture_sample_type(scalar, span)?;
1740                ast::Type::Image {
1741                    dim: crate::ImageDimension::D1,
1742                    arrayed: false,
1743                    class: crate::ImageClass::Sampled {
1744                        kind: scalar.kind,
1745                        multi: false,
1746                    },
1747                }
1748            }
1749            "texture_1d_array" => {
1750                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1751                Self::check_texture_sample_type(scalar, span)?;
1752                ast::Type::Image {
1753                    dim: crate::ImageDimension::D1,
1754                    arrayed: true,
1755                    class: crate::ImageClass::Sampled {
1756                        kind: scalar.kind,
1757                        multi: false,
1758                    },
1759                }
1760            }
1761            "texture_2d" => {
1762                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1763                Self::check_texture_sample_type(scalar, span)?;
1764                ast::Type::Image {
1765                    dim: crate::ImageDimension::D2,
1766                    arrayed: false,
1767                    class: crate::ImageClass::Sampled {
1768                        kind: scalar.kind,
1769                        multi: false,
1770                    },
1771                }
1772            }
1773            "texture_2d_array" => {
1774                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1775                Self::check_texture_sample_type(scalar, span)?;
1776                ast::Type::Image {
1777                    dim: crate::ImageDimension::D2,
1778                    arrayed: true,
1779                    class: crate::ImageClass::Sampled {
1780                        kind: scalar.kind,
1781                        multi: false,
1782                    },
1783                }
1784            }
1785            "texture_3d" => {
1786                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1787                Self::check_texture_sample_type(scalar, span)?;
1788                ast::Type::Image {
1789                    dim: crate::ImageDimension::D3,
1790                    arrayed: false,
1791                    class: crate::ImageClass::Sampled {
1792                        kind: scalar.kind,
1793                        multi: false,
1794                    },
1795                }
1796            }
1797            "texture_cube" => {
1798                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1799                Self::check_texture_sample_type(scalar, span)?;
1800                ast::Type::Image {
1801                    dim: crate::ImageDimension::Cube,
1802                    arrayed: false,
1803                    class: crate::ImageClass::Sampled {
1804                        kind: scalar.kind,
1805                        multi: false,
1806                    },
1807                }
1808            }
1809            "texture_cube_array" => {
1810                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1811                Self::check_texture_sample_type(scalar, span)?;
1812                ast::Type::Image {
1813                    dim: crate::ImageDimension::Cube,
1814                    arrayed: true,
1815                    class: crate::ImageClass::Sampled {
1816                        kind: scalar.kind,
1817                        multi: false,
1818                    },
1819                }
1820            }
1821            "texture_multisampled_2d" => {
1822                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1823                Self::check_texture_sample_type(scalar, span)?;
1824                ast::Type::Image {
1825                    dim: crate::ImageDimension::D2,
1826                    arrayed: false,
1827                    class: crate::ImageClass::Sampled {
1828                        kind: scalar.kind,
1829                        multi: true,
1830                    },
1831                }
1832            }
1833            "texture_multisampled_2d_array" => {
1834                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1835                Self::check_texture_sample_type(scalar, span)?;
1836                ast::Type::Image {
1837                    dim: crate::ImageDimension::D2,
1838                    arrayed: true,
1839                    class: crate::ImageClass::Sampled {
1840                        kind: scalar.kind,
1841                        multi: true,
1842                    },
1843                }
1844            }
1845            "texture_depth_2d" => ast::Type::Image {
1846                dim: crate::ImageDimension::D2,
1847                arrayed: false,
1848                class: crate::ImageClass::Depth { multi: false },
1849            },
1850            "texture_depth_2d_array" => ast::Type::Image {
1851                dim: crate::ImageDimension::D2,
1852                arrayed: true,
1853                class: crate::ImageClass::Depth { multi: false },
1854            },
1855            "texture_depth_cube" => ast::Type::Image {
1856                dim: crate::ImageDimension::Cube,
1857                arrayed: false,
1858                class: crate::ImageClass::Depth { multi: false },
1859            },
1860            "texture_depth_cube_array" => ast::Type::Image {
1861                dim: crate::ImageDimension::Cube,
1862                arrayed: true,
1863                class: crate::ImageClass::Depth { multi: false },
1864            },
1865            "texture_depth_multisampled_2d" => ast::Type::Image {
1866                dim: crate::ImageDimension::D2,
1867                arrayed: false,
1868                class: crate::ImageClass::Depth { multi: true },
1869            },
1870            "texture_storage_1d" => {
1871                let (format, access) = lexer.next_format_generic()?;
1872                ast::Type::Image {
1873                    dim: crate::ImageDimension::D1,
1874                    arrayed: false,
1875                    class: crate::ImageClass::Storage { format, access },
1876                }
1877            }
1878            "texture_storage_1d_array" => {
1879                let (format, access) = lexer.next_format_generic()?;
1880                ast::Type::Image {
1881                    dim: crate::ImageDimension::D1,
1882                    arrayed: true,
1883                    class: crate::ImageClass::Storage { format, access },
1884                }
1885            }
1886            "texture_storage_2d" => {
1887                let (format, access) = lexer.next_format_generic()?;
1888                ast::Type::Image {
1889                    dim: crate::ImageDimension::D2,
1890                    arrayed: false,
1891                    class: crate::ImageClass::Storage { format, access },
1892                }
1893            }
1894            "texture_storage_2d_array" => {
1895                let (format, access) = lexer.next_format_generic()?;
1896                ast::Type::Image {
1897                    dim: crate::ImageDimension::D2,
1898                    arrayed: true,
1899                    class: crate::ImageClass::Storage { format, access },
1900                }
1901            }
1902            "texture_storage_3d" => {
1903                let (format, access) = lexer.next_format_generic()?;
1904                ast::Type::Image {
1905                    dim: crate::ImageDimension::D3,
1906                    arrayed: false,
1907                    class: crate::ImageClass::Storage { format, access },
1908                }
1909            }
1910            "acceleration_structure" => {
1911                let vertex_return = lexer.next_acceleration_structure_flags()?;
1912                ast::Type::AccelerationStructure { vertex_return }
1913            }
1914            "ray_query" => {
1915                let vertex_return = lexer.next_acceleration_structure_flags()?;
1916                ast::Type::RayQuery { vertex_return }
1917            }
1918            "RayDesc" => ast::Type::RayDesc,
1919            "RayIntersection" => ast::Type::RayIntersection,
1920            _ => return Ok(None),
1921        }))
1922    }
1923
1924    fn check_texture_sample_type(scalar: Scalar, span: Span) -> Result<'static, ()> {
1925        use crate::ScalarKind::*;
1926        // Validate according to https://gpuweb.github.io/gpuweb/wgsl/#sampled-texture-type
1927        match scalar {
1928            Scalar {
1929                kind: Float | Sint | Uint,
1930                width: 4,
1931            } => Ok(()),
1932            Scalar {
1933                kind: Uint,
1934                width: 8,
1935            } => Ok(()),
1936            _ => Err(Box::new(Error::BadTextureSampleType { span, scalar })),
1937        }
1938    }
1939
1940    /// Parse type declaration of a given name.
1941    fn type_decl<'a>(
1942        &mut self,
1943        lexer: &mut Lexer<'a>,
1944        ctx: &mut ExpressionContext<'a, '_, '_>,
1945    ) -> Result<'a, Handle<ast::Type<'a>>> {
1946        self.track_recursion(|this| {
1947            this.push_rule_span(Rule::TypeDecl, lexer);
1948
1949            let (name, span) = lexer.next_ident_with_span()?;
1950
1951            let ty = match this.type_decl_impl(lexer, name, span, ctx)? {
1952                Some(ty) => ty,
1953                None => {
1954                    ctx.unresolved.insert(ast::Dependency {
1955                        ident: name,
1956                        usage: span,
1957                    });
1958                    ast::Type::User(ast::Ident { name, span })
1959                }
1960            };
1961
1962            this.pop_rule_span(lexer);
1963
1964            let handle = ctx.types.append(ty, Span::UNDEFINED);
1965            Ok(handle)
1966        })
1967    }
1968
1969    fn assignment_op_and_rhs<'a>(
1970        &mut self,
1971        lexer: &mut Lexer<'a>,
1972        ctx: &mut ExpressionContext<'a, '_, '_>,
1973        block: &mut ast::Block<'a>,
1974        target: Handle<ast::Expression<'a>>,
1975        span_start: usize,
1976    ) -> Result<'a, ()> {
1977        use crate::BinaryOperator as Bo;
1978
1979        let op = lexer.next();
1980        let (op, value) = match op {
1981            (Token::Operation('='), _) => {
1982                let value = self.general_expression(lexer, ctx)?;
1983                (None, value)
1984            }
1985            (Token::AssignmentOperation(c), _) => {
1986                let op = match c {
1987                    '<' => Bo::ShiftLeft,
1988                    '>' => Bo::ShiftRight,
1989                    '+' => Bo::Add,
1990                    '-' => Bo::Subtract,
1991                    '*' => Bo::Multiply,
1992                    '/' => Bo::Divide,
1993                    '%' => Bo::Modulo,
1994                    '&' => Bo::And,
1995                    '|' => Bo::InclusiveOr,
1996                    '^' => Bo::ExclusiveOr,
1997                    // Note: `consume_token` shouldn't produce any other assignment ops
1998                    _ => unreachable!(),
1999                };
2000
2001                let value = self.general_expression(lexer, ctx)?;
2002                (Some(op), value)
2003            }
2004            token @ (Token::IncrementOperation | Token::DecrementOperation, _) => {
2005                let op = match token.0 {
2006                    Token::IncrementOperation => ast::StatementKind::Increment,
2007                    Token::DecrementOperation => ast::StatementKind::Decrement,
2008                    _ => unreachable!(),
2009                };
2010
2011                let span = lexer.span_from(span_start);
2012                block.stmts.push(ast::Statement {
2013                    kind: op(target),
2014                    span,
2015                });
2016                return Ok(());
2017            }
2018            _ => return Err(Box::new(Error::Unexpected(op.1, ExpectedToken::Assignment))),
2019        };
2020
2021        let span = lexer.span_from(span_start);
2022        block.stmts.push(ast::Statement {
2023            kind: ast::StatementKind::Assign { target, op, value },
2024            span,
2025        });
2026        Ok(())
2027    }
2028
2029    /// Parse an assignment statement (will also parse increment and decrement statements)
2030    fn assignment_statement<'a>(
2031        &mut self,
2032        lexer: &mut Lexer<'a>,
2033        ctx: &mut ExpressionContext<'a, '_, '_>,
2034        block: &mut ast::Block<'a>,
2035    ) -> Result<'a, ()> {
2036        let span_start = lexer.start_byte_offset();
2037        let target = self.lhs_expression(lexer, ctx)?;
2038        self.assignment_op_and_rhs(lexer, ctx, block, target, span_start)
2039    }
2040
2041    /// Parse a function call statement.
2042    /// Expects `ident` to be consumed (not in the lexer).
2043    fn function_statement<'a>(
2044        &mut self,
2045        lexer: &mut Lexer<'a>,
2046        ident: &'a str,
2047        ident_span: Span,
2048        span_start: usize,
2049        context: &mut ExpressionContext<'a, '_, '_>,
2050        block: &mut ast::Block<'a>,
2051    ) -> Result<'a, ()> {
2052        self.push_rule_span(Rule::SingularExpr, lexer);
2053
2054        context.unresolved.insert(ast::Dependency {
2055            ident,
2056            usage: ident_span,
2057        });
2058        let arguments = self.arguments(lexer, context)?;
2059        let span = lexer.span_from(span_start);
2060
2061        block.stmts.push(ast::Statement {
2062            kind: ast::StatementKind::Call {
2063                function: ast::Ident {
2064                    name: ident,
2065                    span: ident_span,
2066                },
2067                arguments,
2068            },
2069            span,
2070        });
2071
2072        self.pop_rule_span(lexer);
2073
2074        Ok(())
2075    }
2076
2077    fn function_call_or_assignment_statement<'a>(
2078        &mut self,
2079        lexer: &mut Lexer<'a>,
2080        context: &mut ExpressionContext<'a, '_, '_>,
2081        block: &mut ast::Block<'a>,
2082    ) -> Result<'a, ()> {
2083        let span_start = lexer.start_byte_offset();
2084        match lexer.peek() {
2085            (Token::Word(name), span) => {
2086                // A little hack for 2 token lookahead.
2087                let cloned = lexer.clone();
2088                let _ = lexer.next();
2089                match lexer.peek() {
2090                    (Token::Paren('('), _) => {
2091                        self.function_statement(lexer, name, span, span_start, context, block)
2092                    }
2093                    _ => {
2094                        *lexer = cloned;
2095                        self.assignment_statement(lexer, context, block)
2096                    }
2097                }
2098            }
2099            _ => self.assignment_statement(lexer, context, block),
2100        }
2101    }
2102
2103    fn statement<'a>(
2104        &mut self,
2105        lexer: &mut Lexer<'a>,
2106        ctx: &mut ExpressionContext<'a, '_, '_>,
2107        block: &mut ast::Block<'a>,
2108        brace_nesting_level: u8,
2109    ) -> Result<'a, ()> {
2110        self.track_recursion(|this| {
2111            this.push_rule_span(Rule::Statement, lexer);
2112            match lexer.peek() {
2113                (Token::Separator(';'), _) => {
2114                    let _ = lexer.next();
2115                    this.pop_rule_span(lexer);
2116                }
2117                (Token::Paren('{') | Token::Attribute, _) => {
2118                    let (inner, span) = this.block(lexer, ctx, brace_nesting_level)?;
2119                    block.stmts.push(ast::Statement {
2120                        kind: ast::StatementKind::Block(inner),
2121                        span,
2122                    });
2123                    this.pop_rule_span(lexer);
2124                }
2125                (Token::Word(word), _) => {
2126                    let kind = match word {
2127                        "_" => {
2128                            let _ = lexer.next();
2129                            lexer.expect(Token::Operation('='))?;
2130                            let expr = this.general_expression(lexer, ctx)?;
2131                            lexer.expect(Token::Separator(';'))?;
2132
2133                            ast::StatementKind::Phony(expr)
2134                        }
2135                        "let" => {
2136                            let _ = lexer.next();
2137                            let name = lexer.next_ident()?;
2138
2139                            let given_ty = if lexer.skip(Token::Separator(':')) {
2140                                let ty = this.type_decl(lexer, ctx)?;
2141                                Some(ty)
2142                            } else {
2143                                None
2144                            };
2145                            lexer.expect(Token::Operation('='))?;
2146                            let expr_id = this.general_expression(lexer, ctx)?;
2147                            lexer.expect(Token::Separator(';'))?;
2148
2149                            let handle = ctx.declare_local(name)?;
2150                            ast::StatementKind::LocalDecl(ast::LocalDecl::Let(ast::Let {
2151                                name,
2152                                ty: given_ty,
2153                                init: expr_id,
2154                                handle,
2155                            }))
2156                        }
2157                        "const" => {
2158                            let _ = lexer.next();
2159                            let name = lexer.next_ident()?;
2160
2161                            let given_ty = if lexer.skip(Token::Separator(':')) {
2162                                let ty = this.type_decl(lexer, ctx)?;
2163                                Some(ty)
2164                            } else {
2165                                None
2166                            };
2167                            lexer.expect(Token::Operation('='))?;
2168                            let expr_id = this.general_expression(lexer, ctx)?;
2169                            lexer.expect(Token::Separator(';'))?;
2170
2171                            let handle = ctx.declare_local(name)?;
2172                            ast::StatementKind::LocalDecl(ast::LocalDecl::Const(ast::LocalConst {
2173                                name,
2174                                ty: given_ty,
2175                                init: expr_id,
2176                                handle,
2177                            }))
2178                        }
2179                        "var" => {
2180                            let _ = lexer.next();
2181
2182                            let name = lexer.next_ident()?;
2183                            let ty = if lexer.skip(Token::Separator(':')) {
2184                                let ty = this.type_decl(lexer, ctx)?;
2185                                Some(ty)
2186                            } else {
2187                                None
2188                            };
2189
2190                            let init = if lexer.skip(Token::Operation('=')) {
2191                                let init = this.general_expression(lexer, ctx)?;
2192                                Some(init)
2193                            } else {
2194                                None
2195                            };
2196
2197                            lexer.expect(Token::Separator(';'))?;
2198
2199                            let handle = ctx.declare_local(name)?;
2200                            ast::StatementKind::LocalDecl(ast::LocalDecl::Var(ast::LocalVariable {
2201                                name,
2202                                ty,
2203                                init,
2204                                handle,
2205                            }))
2206                        }
2207                        "return" => {
2208                            let _ = lexer.next();
2209                            let value = if lexer.peek().0 != Token::Separator(';') {
2210                                let handle = this.general_expression(lexer, ctx)?;
2211                                Some(handle)
2212                            } else {
2213                                None
2214                            };
2215                            lexer.expect(Token::Separator(';'))?;
2216                            ast::StatementKind::Return { value }
2217                        }
2218                        "if" => {
2219                            let _ = lexer.next();
2220                            let condition = this.general_expression(lexer, ctx)?;
2221
2222                            let accept = this.block(lexer, ctx, brace_nesting_level)?.0;
2223
2224                            let mut elsif_stack = Vec::new();
2225                            let mut elseif_span_start = lexer.start_byte_offset();
2226                            let mut reject = loop {
2227                                if !lexer.skip(Token::Word("else")) {
2228                                    break ast::Block::default();
2229                                }
2230
2231                                if !lexer.skip(Token::Word("if")) {
2232                                    // ... else { ... }
2233                                    break this.block(lexer, ctx, brace_nesting_level)?.0;
2234                                }
2235
2236                                // ... else if (...) { ... }
2237                                let other_condition = this.general_expression(lexer, ctx)?;
2238                                let other_block = this.block(lexer, ctx, brace_nesting_level)?;
2239                                elsif_stack.push((elseif_span_start, other_condition, other_block));
2240                                elseif_span_start = lexer.start_byte_offset();
2241                            };
2242
2243                            // reverse-fold the else-if blocks
2244                            //Note: we may consider uplifting this to the IR
2245                            for (other_span_start, other_cond, other_block) in
2246                                elsif_stack.into_iter().rev()
2247                            {
2248                                let sub_stmt = ast::StatementKind::If {
2249                                    condition: other_cond,
2250                                    accept: other_block.0,
2251                                    reject,
2252                                };
2253                                reject = ast::Block::default();
2254                                let span = lexer.span_from(other_span_start);
2255                                reject.stmts.push(ast::Statement {
2256                                    kind: sub_stmt,
2257                                    span,
2258                                })
2259                            }
2260
2261                            ast::StatementKind::If {
2262                                condition,
2263                                accept,
2264                                reject,
2265                            }
2266                        }
2267                        "switch" => {
2268                            let _ = lexer.next();
2269                            let selector = this.general_expression(lexer, ctx)?;
2270                            let brace_span = lexer.expect_span(Token::Paren('{'))?;
2271                            let brace_nesting_level =
2272                                Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2273                            let mut cases = Vec::new();
2274
2275                            loop {
2276                                // cases + default
2277                                match lexer.next() {
2278                                    (Token::Word("case"), _) => {
2279                                        // parse a list of values
2280                                        let value = loop {
2281                                            let value = this.switch_value(lexer, ctx)?;
2282                                            if lexer.skip(Token::Separator(',')) {
2283                                                if lexer.skip(Token::Separator(':')) {
2284                                                    break value;
2285                                                }
2286                                            } else {
2287                                                lexer.skip(Token::Separator(':'));
2288                                                break value;
2289                                            }
2290                                            cases.push(ast::SwitchCase {
2291                                                value,
2292                                                body: ast::Block::default(),
2293                                                fall_through: true,
2294                                            });
2295                                        };
2296
2297                                        let body = this.block(lexer, ctx, brace_nesting_level)?.0;
2298
2299                                        cases.push(ast::SwitchCase {
2300                                            value,
2301                                            body,
2302                                            fall_through: false,
2303                                        });
2304                                    }
2305                                    (Token::Word("default"), _) => {
2306                                        lexer.skip(Token::Separator(':'));
2307                                        let body = this.block(lexer, ctx, brace_nesting_level)?.0;
2308                                        cases.push(ast::SwitchCase {
2309                                            value: ast::SwitchValue::Default,
2310                                            body,
2311                                            fall_through: false,
2312                                        });
2313                                    }
2314                                    (Token::Paren('}'), _) => break,
2315                                    (_, span) => {
2316                                        return Err(Box::new(Error::Unexpected(
2317                                            span,
2318                                            ExpectedToken::SwitchItem,
2319                                        )))
2320                                    }
2321                                }
2322                            }
2323
2324                            ast::StatementKind::Switch { selector, cases }
2325                        }
2326                        "loop" => this.r#loop(lexer, ctx, brace_nesting_level)?,
2327                        "while" => {
2328                            let _ = lexer.next();
2329                            let mut body = ast::Block::default();
2330
2331                            let (condition, span) =
2332                                lexer.capture_span(|lexer| this.general_expression(lexer, ctx))?;
2333                            let mut reject = ast::Block::default();
2334                            reject.stmts.push(ast::Statement {
2335                                kind: ast::StatementKind::Break,
2336                                span,
2337                            });
2338
2339                            body.stmts.push(ast::Statement {
2340                                kind: ast::StatementKind::If {
2341                                    condition,
2342                                    accept: ast::Block::default(),
2343                                    reject,
2344                                },
2345                                span,
2346                            });
2347
2348                            let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
2349                            body.stmts.push(ast::Statement {
2350                                kind: ast::StatementKind::Block(block),
2351                                span,
2352                            });
2353
2354                            ast::StatementKind::Loop {
2355                                body,
2356                                continuing: ast::Block::default(),
2357                                break_if: None,
2358                            }
2359                        }
2360                        "for" => {
2361                            let _ = lexer.next();
2362                            lexer.expect(Token::Paren('('))?;
2363
2364                            ctx.local_table.push_scope();
2365
2366                            if !lexer.skip(Token::Separator(';')) {
2367                                let num_statements = block.stmts.len();
2368                                let (_, span) = {
2369                                    let ctx = &mut *ctx;
2370                                    let block = &mut *block;
2371                                    lexer.capture_span(|lexer| {
2372                                        this.statement(lexer, ctx, block, brace_nesting_level)
2373                                    })?
2374                                };
2375
2376                                if block.stmts.len() != num_statements {
2377                                    match block.stmts.last().unwrap().kind {
2378                                        ast::StatementKind::Call { .. }
2379                                        | ast::StatementKind::Assign { .. }
2380                                        | ast::StatementKind::LocalDecl(_) => {}
2381                                        _ => {
2382                                            return Err(Box::new(Error::InvalidForInitializer(
2383                                                span,
2384                                            )))
2385                                        }
2386                                    }
2387                                }
2388                            };
2389
2390                            let mut body = ast::Block::default();
2391                            if !lexer.skip(Token::Separator(';')) {
2392                                let (condition, span) =
2393                                    lexer.capture_span(|lexer| -> Result<'_, _> {
2394                                        let condition = this.general_expression(lexer, ctx)?;
2395                                        lexer.expect(Token::Separator(';'))?;
2396                                        Ok(condition)
2397                                    })?;
2398                                let mut reject = ast::Block::default();
2399                                reject.stmts.push(ast::Statement {
2400                                    kind: ast::StatementKind::Break,
2401                                    span,
2402                                });
2403                                body.stmts.push(ast::Statement {
2404                                    kind: ast::StatementKind::If {
2405                                        condition,
2406                                        accept: ast::Block::default(),
2407                                        reject,
2408                                    },
2409                                    span,
2410                                });
2411                            };
2412
2413                            let mut continuing = ast::Block::default();
2414                            if !lexer.skip(Token::Paren(')')) {
2415                                this.function_call_or_assignment_statement(
2416                                    lexer,
2417                                    ctx,
2418                                    &mut continuing,
2419                                )?;
2420                                lexer.expect(Token::Paren(')'))?;
2421                            }
2422
2423                            let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
2424                            body.stmts.push(ast::Statement {
2425                                kind: ast::StatementKind::Block(block),
2426                                span,
2427                            });
2428
2429                            ctx.local_table.pop_scope();
2430
2431                            ast::StatementKind::Loop {
2432                                body,
2433                                continuing,
2434                                break_if: None,
2435                            }
2436                        }
2437                        "break" => {
2438                            let (_, span) = lexer.next();
2439                            // Check if the next token is an `if`, this indicates
2440                            // that the user tried to type out a `break if` which
2441                            // is illegal in this position.
2442                            let (peeked_token, peeked_span) = lexer.peek();
2443                            if let Token::Word("if") = peeked_token {
2444                                let span = span.until(&peeked_span);
2445                                return Err(Box::new(Error::InvalidBreakIf(span)));
2446                            }
2447                            lexer.expect(Token::Separator(';'))?;
2448                            ast::StatementKind::Break
2449                        }
2450                        "continue" => {
2451                            let _ = lexer.next();
2452                            lexer.expect(Token::Separator(';'))?;
2453                            ast::StatementKind::Continue
2454                        }
2455                        "discard" => {
2456                            let _ = lexer.next();
2457                            lexer.expect(Token::Separator(';'))?;
2458                            ast::StatementKind::Kill
2459                        }
2460                        // https://www.w3.org/TR/WGSL/#const-assert-statement
2461                        "const_assert" => {
2462                            let _ = lexer.next();
2463                            // parentheses are optional
2464                            let paren = lexer.skip(Token::Paren('('));
2465
2466                            let condition = this.general_expression(lexer, ctx)?;
2467
2468                            if paren {
2469                                lexer.expect(Token::Paren(')'))?;
2470                            }
2471                            lexer.expect(Token::Separator(';'))?;
2472                            ast::StatementKind::ConstAssert(condition)
2473                        }
2474                        // assignment or a function call
2475                        _ => {
2476                            this.function_call_or_assignment_statement(lexer, ctx, block)?;
2477                            lexer.expect(Token::Separator(';'))?;
2478                            this.pop_rule_span(lexer);
2479                            return Ok(());
2480                        }
2481                    };
2482
2483                    let span = this.pop_rule_span(lexer);
2484                    block.stmts.push(ast::Statement { kind, span });
2485                }
2486                _ => {
2487                    this.assignment_statement(lexer, ctx, block)?;
2488                    lexer.expect(Token::Separator(';'))?;
2489                    this.pop_rule_span(lexer);
2490                }
2491            }
2492            Ok(())
2493        })
2494    }
2495
2496    fn r#loop<'a>(
2497        &mut self,
2498        lexer: &mut Lexer<'a>,
2499        ctx: &mut ExpressionContext<'a, '_, '_>,
2500        brace_nesting_level: u8,
2501    ) -> Result<'a, ast::StatementKind<'a>> {
2502        let _ = lexer.next();
2503        let mut body = ast::Block::default();
2504        let mut continuing = ast::Block::default();
2505        let mut break_if = None;
2506
2507        let brace_span = lexer.expect_span(Token::Paren('{'))?;
2508        let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2509
2510        ctx.local_table.push_scope();
2511
2512        loop {
2513            if lexer.skip(Token::Word("continuing")) {
2514                // Branch for the `continuing` block, this must be
2515                // the last thing in the loop body
2516
2517                // Expect a opening brace to start the continuing block
2518                let brace_span = lexer.expect_span(Token::Paren('{'))?;
2519                let brace_nesting_level =
2520                    Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2521                loop {
2522                    if lexer.skip(Token::Word("break")) {
2523                        // Branch for the `break if` statement, this statement
2524                        // has the form `break if <expr>;` and must be the last
2525                        // statement in a continuing block
2526
2527                        // The break must be followed by an `if` to form
2528                        // the break if
2529                        lexer.expect(Token::Word("if"))?;
2530
2531                        let condition = self.general_expression(lexer, ctx)?;
2532                        // Set the condition of the break if to the newly parsed
2533                        // expression
2534                        break_if = Some(condition);
2535
2536                        // Expect a semicolon to close the statement
2537                        lexer.expect(Token::Separator(';'))?;
2538                        // Expect a closing brace to close the continuing block,
2539                        // since the break if must be the last statement
2540                        lexer.expect(Token::Paren('}'))?;
2541                        // Stop parsing the continuing block
2542                        break;
2543                    } else if lexer.skip(Token::Paren('}')) {
2544                        // If we encounter a closing brace it means we have reached
2545                        // the end of the continuing block and should stop processing
2546                        break;
2547                    } else {
2548                        // Otherwise try to parse a statement
2549                        self.statement(lexer, ctx, &mut continuing, brace_nesting_level)?;
2550                    }
2551                }
2552                // Since the continuing block must be the last part of the loop body,
2553                // we expect to see a closing brace to end the loop body
2554                lexer.expect(Token::Paren('}'))?;
2555                break;
2556            }
2557            if lexer.skip(Token::Paren('}')) {
2558                // If we encounter a closing brace it means we have reached
2559                // the end of the loop body and should stop processing
2560                break;
2561            }
2562            // Otherwise try to parse a statement
2563            self.statement(lexer, ctx, &mut body, brace_nesting_level)?;
2564        }
2565
2566        ctx.local_table.pop_scope();
2567
2568        Ok(ast::StatementKind::Loop {
2569            body,
2570            continuing,
2571            break_if,
2572        })
2573    }
2574
2575    /// compound_statement
2576    fn block<'a>(
2577        &mut self,
2578        lexer: &mut Lexer<'a>,
2579        ctx: &mut ExpressionContext<'a, '_, '_>,
2580        brace_nesting_level: u8,
2581    ) -> Result<'a, (ast::Block<'a>, Span)> {
2582        self.push_rule_span(Rule::Block, lexer);
2583
2584        ctx.local_table.push_scope();
2585
2586        let mut diagnostic_filters = DiagnosticFilterMap::new();
2587
2588        self.push_rule_span(Rule::Attribute, lexer);
2589        while lexer.skip(Token::Attribute) {
2590            let (name, name_span) = lexer.next_ident_with_span()?;
2591            if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
2592                let filter = self.diagnostic_filter(lexer)?;
2593                let span = self.peek_rule_span(lexer);
2594                diagnostic_filters
2595                    .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
2596                    .map_err(|e| Box::new(e.into()))?;
2597            } else {
2598                return Err(Box::new(Error::Unexpected(
2599                    name_span,
2600                    ExpectedToken::DiagnosticAttribute,
2601                )));
2602            }
2603        }
2604        self.pop_rule_span(lexer);
2605
2606        if !diagnostic_filters.is_empty() {
2607            return Err(Box::new(
2608                Error::DiagnosticAttributeNotYetImplementedAtParseSite {
2609                    site_name_plural: "compound statements",
2610                    spans: diagnostic_filters.spans().collect(),
2611                },
2612            ));
2613        }
2614
2615        let brace_span = lexer.expect_span(Token::Paren('{'))?;
2616        let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2617        let mut block = ast::Block::default();
2618        while !lexer.skip(Token::Paren('}')) {
2619            self.statement(lexer, ctx, &mut block, brace_nesting_level)?;
2620        }
2621
2622        ctx.local_table.pop_scope();
2623
2624        let span = self.pop_rule_span(lexer);
2625        Ok((block, span))
2626    }
2627
2628    fn varying_binding<'a>(
2629        &mut self,
2630        lexer: &mut Lexer<'a>,
2631        ctx: &mut ExpressionContext<'a, '_, '_>,
2632    ) -> Result<'a, Option<ast::Binding<'a>>> {
2633        let mut bind_parser = BindingParser::default();
2634        self.push_rule_span(Rule::Attribute, lexer);
2635
2636        while lexer.skip(Token::Attribute) {
2637            let (word, span) = lexer.next_ident_with_span()?;
2638            bind_parser.parse(self, lexer, word, span, ctx)?;
2639        }
2640
2641        let span = self.pop_rule_span(lexer);
2642        bind_parser.finish(span)
2643    }
2644
2645    fn function_decl<'a>(
2646        &mut self,
2647        lexer: &mut Lexer<'a>,
2648        diagnostic_filter_leaf: Option<Handle<DiagnosticFilterNode>>,
2649        must_use: Option<Span>,
2650        out: &mut ast::TranslationUnit<'a>,
2651        dependencies: &mut FastIndexSet<ast::Dependency<'a>>,
2652    ) -> Result<'a, ast::Function<'a>> {
2653        self.push_rule_span(Rule::FunctionDecl, lexer);
2654        // read function name
2655        let fun_name = lexer.next_ident()?;
2656
2657        let mut locals = Arena::new();
2658
2659        let mut ctx = ExpressionContext {
2660            expressions: &mut out.expressions,
2661            local_table: &mut SymbolTable::default(),
2662            locals: &mut locals,
2663            types: &mut out.types,
2664            unresolved: dependencies,
2665        };
2666
2667        // start a scope that contains arguments as well as the function body
2668        ctx.local_table.push_scope();
2669
2670        // read parameter list
2671        let mut arguments = Vec::new();
2672        lexer.expect(Token::Paren('('))?;
2673        let mut ready = true;
2674        while !lexer.skip(Token::Paren(')')) {
2675            if !ready {
2676                return Err(Box::new(Error::Unexpected(
2677                    lexer.next().1,
2678                    ExpectedToken::Token(Token::Separator(',')),
2679                )));
2680            }
2681            let binding = self.varying_binding(lexer, &mut ctx)?;
2682
2683            let param_name = lexer.next_ident()?;
2684
2685            lexer.expect(Token::Separator(':'))?;
2686            let param_type = self.type_decl(lexer, &mut ctx)?;
2687
2688            let handle = ctx.declare_local(param_name)?;
2689            arguments.push(ast::FunctionArgument {
2690                name: param_name,
2691                ty: param_type,
2692                binding,
2693                handle,
2694            });
2695            ready = lexer.skip(Token::Separator(','));
2696        }
2697        // read return type
2698        let result = if lexer.skip(Token::Arrow) {
2699            let binding = self.varying_binding(lexer, &mut ctx)?;
2700            let ty = self.type_decl(lexer, &mut ctx)?;
2701            let must_use = must_use.is_some();
2702            Some(ast::FunctionResult {
2703                ty,
2704                binding,
2705                must_use,
2706            })
2707        } else if let Some(must_use) = must_use {
2708            return Err(Box::new(Error::FunctionMustUseReturnsVoid(
2709                must_use,
2710                self.peek_rule_span(lexer),
2711            )));
2712        } else {
2713            None
2714        };
2715
2716        // do not use `self.block` here, since we must not push a new scope
2717        lexer.expect(Token::Paren('{'))?;
2718        let brace_nesting_level = 1;
2719        let mut body = ast::Block::default();
2720        while !lexer.skip(Token::Paren('}')) {
2721            self.statement(lexer, &mut ctx, &mut body, brace_nesting_level)?;
2722        }
2723
2724        ctx.local_table.pop_scope();
2725
2726        let fun = ast::Function {
2727            entry_point: None,
2728            name: fun_name,
2729            arguments,
2730            result,
2731            body,
2732            diagnostic_filter_leaf,
2733            doc_comments: Vec::new(),
2734        };
2735
2736        // done
2737        self.pop_rule_span(lexer);
2738
2739        Ok(fun)
2740    }
2741
2742    fn directive_ident_list<'a>(
2743        &self,
2744        lexer: &mut Lexer<'a>,
2745        handler: impl FnMut(&'a str, Span) -> Result<'a, ()>,
2746    ) -> Result<'a, ()> {
2747        let mut handler = handler;
2748        'next_arg: loop {
2749            let (ident, span) = lexer.next_ident_with_span()?;
2750            handler(ident, span)?;
2751
2752            let expected_token = match lexer.peek().0 {
2753                Token::Separator(',') => {
2754                    let _ = lexer.next();
2755                    if matches!(lexer.peek().0, Token::Word(..)) {
2756                        continue 'next_arg;
2757                    }
2758                    ExpectedToken::AfterIdentListComma
2759                }
2760                _ => ExpectedToken::AfterIdentListArg,
2761            };
2762
2763            if !matches!(lexer.next().0, Token::Separator(';')) {
2764                return Err(Box::new(Error::Unexpected(span, expected_token)));
2765            }
2766
2767            break Ok(());
2768        }
2769    }
2770
2771    fn global_decl<'a>(
2772        &mut self,
2773        lexer: &mut Lexer<'a>,
2774        out: &mut ast::TranslationUnit<'a>,
2775    ) -> Result<'a, ()> {
2776        let doc_comments = lexer.accumulate_doc_comments();
2777
2778        // read attributes
2779        let mut binding = None;
2780        let mut stage = ParsedAttribute::default();
2781        let mut compute_span = Span::new(0, 0);
2782        let mut workgroup_size = ParsedAttribute::default();
2783        let mut early_depth_test = ParsedAttribute::default();
2784        let (mut bind_index, mut bind_group) =
2785            (ParsedAttribute::default(), ParsedAttribute::default());
2786        let mut id = ParsedAttribute::default();
2787
2788        let mut must_use: ParsedAttribute<Span> = ParsedAttribute::default();
2789
2790        let mut dependencies = FastIndexSet::default();
2791        let mut ctx = ExpressionContext {
2792            expressions: &mut out.expressions,
2793            local_table: &mut SymbolTable::default(),
2794            locals: &mut Arena::new(),
2795            types: &mut out.types,
2796            unresolved: &mut dependencies,
2797        };
2798        let mut diagnostic_filters = DiagnosticFilterMap::new();
2799        let ensure_no_diag_attrs = |on_what, filters: DiagnosticFilterMap| -> Result<()> {
2800            if filters.is_empty() {
2801                Ok(())
2802            } else {
2803                Err(Box::new(Error::DiagnosticAttributeNotSupported {
2804                    on_what,
2805                    spans: filters.spans().collect(),
2806                }))
2807            }
2808        };
2809
2810        self.push_rule_span(Rule::Attribute, lexer);
2811        while lexer.skip(Token::Attribute) {
2812            let (name, name_span) = lexer.next_ident_with_span()?;
2813            if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
2814                let filter = self.diagnostic_filter(lexer)?;
2815                let span = self.peek_rule_span(lexer);
2816                diagnostic_filters
2817                    .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
2818                    .map_err(|e| Box::new(e.into()))?;
2819                continue;
2820            }
2821            match name {
2822                "binding" => {
2823                    lexer.expect(Token::Paren('('))?;
2824                    bind_index.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2825                    lexer.expect(Token::Paren(')'))?;
2826                }
2827                "group" => {
2828                    lexer.expect(Token::Paren('('))?;
2829                    bind_group.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2830                    lexer.expect(Token::Paren(')'))?;
2831                }
2832                "id" => {
2833                    lexer.expect(Token::Paren('('))?;
2834                    id.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2835                    lexer.expect(Token::Paren(')'))?;
2836                }
2837                "vertex" => {
2838                    stage.set(ShaderStage::Vertex, name_span)?;
2839                }
2840                "fragment" => {
2841                    stage.set(ShaderStage::Fragment, name_span)?;
2842                }
2843                "compute" => {
2844                    stage.set(ShaderStage::Compute, name_span)?;
2845                    compute_span = name_span;
2846                }
2847                "workgroup_size" => {
2848                    lexer.expect(Token::Paren('('))?;
2849                    let mut new_workgroup_size = [None; 3];
2850                    for (i, size) in new_workgroup_size.iter_mut().enumerate() {
2851                        *size = Some(self.general_expression(lexer, &mut ctx)?);
2852                        match lexer.next() {
2853                            (Token::Paren(')'), _) => break,
2854                            (Token::Separator(','), _) if i != 2 => (),
2855                            other => {
2856                                return Err(Box::new(Error::Unexpected(
2857                                    other.1,
2858                                    ExpectedToken::WorkgroupSizeSeparator,
2859                                )))
2860                            }
2861                        }
2862                    }
2863                    workgroup_size.set(new_workgroup_size, name_span)?;
2864                }
2865                "early_depth_test" => {
2866                    lexer.expect(Token::Paren('('))?;
2867                    let (ident, ident_span) = lexer.next_ident_with_span()?;
2868                    let value = if ident == "force" {
2869                        crate::EarlyDepthTest::Force
2870                    } else {
2871                        crate::EarlyDepthTest::Allow {
2872                            conservative: conv::map_conservative_depth(ident, ident_span)?,
2873                        }
2874                    };
2875                    lexer.expect(Token::Paren(')'))?;
2876                    early_depth_test.set(value, name_span)?;
2877                }
2878                "must_use" => {
2879                    must_use.set(name_span, name_span)?;
2880                }
2881                _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
2882            }
2883        }
2884
2885        let attrib_span = self.pop_rule_span(lexer);
2886        match (bind_group.value, bind_index.value) {
2887            (Some(group), Some(index)) => {
2888                binding = Some(ast::ResourceBinding {
2889                    group,
2890                    binding: index,
2891                });
2892            }
2893            (Some(_), None) => {
2894                return Err(Box::new(Error::MissingAttribute("binding", attrib_span)))
2895            }
2896            (None, Some(_)) => return Err(Box::new(Error::MissingAttribute("group", attrib_span))),
2897            (None, None) => {}
2898        }
2899
2900        // read item
2901        let start = lexer.start_byte_offset();
2902        let kind = match lexer.next() {
2903            (Token::Separator(';'), _) => {
2904                ensure_no_diag_attrs(
2905                    DiagnosticAttributeNotSupportedPosition::SemicolonInModulePosition,
2906                    diagnostic_filters,
2907                )?;
2908                None
2909            }
2910            (Token::Word(word), directive_span) if DirectiveKind::from_ident(word).is_some() => {
2911                return Err(Box::new(Error::DirectiveAfterFirstGlobalDecl {
2912                    directive_span,
2913                }));
2914            }
2915            (Token::Word("struct"), _) => {
2916                ensure_no_diag_attrs("`struct`s".into(), diagnostic_filters)?;
2917
2918                let name = lexer.next_ident()?;
2919
2920                let members = self.struct_body(lexer, &mut ctx)?;
2921
2922                Some(ast::GlobalDeclKind::Struct(ast::Struct {
2923                    name,
2924                    members,
2925                    doc_comments,
2926                }))
2927            }
2928            (Token::Word("alias"), _) => {
2929                ensure_no_diag_attrs("`alias`es".into(), diagnostic_filters)?;
2930
2931                let name = lexer.next_ident()?;
2932
2933                lexer.expect(Token::Operation('='))?;
2934                let ty = self.type_decl(lexer, &mut ctx)?;
2935                lexer.expect(Token::Separator(';'))?;
2936                Some(ast::GlobalDeclKind::Type(ast::TypeAlias { name, ty }))
2937            }
2938            (Token::Word("const"), _) => {
2939                ensure_no_diag_attrs("`const`s".into(), diagnostic_filters)?;
2940
2941                let name = lexer.next_ident()?;
2942
2943                let ty = if lexer.skip(Token::Separator(':')) {
2944                    let ty = self.type_decl(lexer, &mut ctx)?;
2945                    Some(ty)
2946                } else {
2947                    None
2948                };
2949
2950                lexer.expect(Token::Operation('='))?;
2951                let init = self.general_expression(lexer, &mut ctx)?;
2952                lexer.expect(Token::Separator(';'))?;
2953
2954                Some(ast::GlobalDeclKind::Const(ast::Const {
2955                    name,
2956                    ty,
2957                    init,
2958                    doc_comments,
2959                }))
2960            }
2961            (Token::Word("override"), _) => {
2962                ensure_no_diag_attrs("`override`s".into(), diagnostic_filters)?;
2963
2964                let name = lexer.next_ident()?;
2965
2966                let ty = if lexer.skip(Token::Separator(':')) {
2967                    Some(self.type_decl(lexer, &mut ctx)?)
2968                } else {
2969                    None
2970                };
2971
2972                let init = if lexer.skip(Token::Operation('=')) {
2973                    Some(self.general_expression(lexer, &mut ctx)?)
2974                } else {
2975                    None
2976                };
2977
2978                lexer.expect(Token::Separator(';'))?;
2979
2980                Some(ast::GlobalDeclKind::Override(ast::Override {
2981                    name,
2982                    id: id.value,
2983                    ty,
2984                    init,
2985                }))
2986            }
2987            (Token::Word("var"), _) => {
2988                ensure_no_diag_attrs("`var`s".into(), diagnostic_filters)?;
2989
2990                let mut var = self.variable_decl(lexer, &mut ctx)?;
2991                var.binding = binding.take();
2992                var.doc_comments = doc_comments;
2993                Some(ast::GlobalDeclKind::Var(var))
2994            }
2995            (Token::Word("fn"), _) => {
2996                let diagnostic_filter_leaf = Self::write_diagnostic_filters(
2997                    &mut out.diagnostic_filters,
2998                    diagnostic_filters,
2999                    out.diagnostic_filter_leaf,
3000                );
3001
3002                let function = self.function_decl(
3003                    lexer,
3004                    diagnostic_filter_leaf,
3005                    must_use.value,
3006                    out,
3007                    &mut dependencies,
3008                )?;
3009                Some(ast::GlobalDeclKind::Fn(ast::Function {
3010                    entry_point: if let Some(stage) = stage.value {
3011                        if stage == ShaderStage::Compute && workgroup_size.value.is_none() {
3012                            return Err(Box::new(Error::MissingWorkgroupSize(compute_span)));
3013                        }
3014                        Some(ast::EntryPoint {
3015                            stage,
3016                            early_depth_test: early_depth_test.value,
3017                            workgroup_size: workgroup_size.value,
3018                        })
3019                    } else {
3020                        None
3021                    },
3022                    doc_comments,
3023                    ..function
3024                }))
3025            }
3026            (Token::Word("const_assert"), _) => {
3027                ensure_no_diag_attrs("`const_assert`s".into(), diagnostic_filters)?;
3028
3029                // parentheses are optional
3030                let paren = lexer.skip(Token::Paren('('));
3031
3032                let condition = self.general_expression(lexer, &mut ctx)?;
3033
3034                if paren {
3035                    lexer.expect(Token::Paren(')'))?;
3036                }
3037                lexer.expect(Token::Separator(';'))?;
3038                Some(ast::GlobalDeclKind::ConstAssert(condition))
3039            }
3040            (Token::End, _) => return Ok(()),
3041            other => {
3042                return Err(Box::new(Error::Unexpected(
3043                    other.1,
3044                    ExpectedToken::GlobalItem,
3045                )))
3046            }
3047        };
3048
3049        if let Some(kind) = kind {
3050            out.decls.append(
3051                ast::GlobalDecl { kind, dependencies },
3052                lexer.span_from(start),
3053            );
3054        }
3055
3056        if !self.rules.is_empty() {
3057            log::error!("Reached the end of global decl, but rule stack is not empty");
3058            log::error!("Rules: {:?}", self.rules);
3059            return Err(Box::new(Error::Internal("rule stack is not empty")));
3060        };
3061
3062        match binding {
3063            None => Ok(()),
3064            Some(_) => Err(Box::new(Error::Internal(
3065                "we had the attribute but no var?",
3066            ))),
3067        }
3068    }
3069
3070    pub fn parse<'a>(
3071        &mut self,
3072        source: &'a str,
3073        options: &Options,
3074    ) -> Result<'a, ast::TranslationUnit<'a>> {
3075        self.reset();
3076
3077        let mut lexer = Lexer::new(source, !options.parse_doc_comments);
3078        let mut tu = ast::TranslationUnit::default();
3079        let mut enable_extensions = EnableExtensions::empty();
3080        let mut diagnostic_filters = DiagnosticFilterMap::new();
3081
3082        // Parse module doc comments.
3083        tu.doc_comments = lexer.accumulate_module_doc_comments();
3084
3085        // Parse directives.
3086        while let Ok((ident, _directive_ident_span)) = lexer.peek_ident_with_span() {
3087            if let Some(kind) = DirectiveKind::from_ident(ident) {
3088                self.push_rule_span(Rule::Directive, &mut lexer);
3089                let _ = lexer.next_ident_with_span().unwrap();
3090                match kind {
3091                    DirectiveKind::Diagnostic => {
3092                        let diagnostic_filter = self.diagnostic_filter(&mut lexer)?;
3093                        let span = self.peek_rule_span(&lexer);
3094                        diagnostic_filters
3095                            .add(diagnostic_filter, span, ShouldConflictOnFullDuplicate::No)
3096                            .map_err(|e| Box::new(e.into()))?;
3097                        lexer.expect(Token::Separator(';'))?;
3098                    }
3099                    DirectiveKind::Enable => {
3100                        self.directive_ident_list(&mut lexer, |ident, span| {
3101                            let kind = EnableExtension::from_ident(ident, span)?;
3102                            let extension = match kind {
3103                                EnableExtension::Implemented(kind) => kind,
3104                                EnableExtension::Unimplemented(kind) => {
3105                                    return Err(Box::new(Error::EnableExtensionNotYetImplemented {
3106                                        kind,
3107                                        span,
3108                                    }))
3109                                }
3110                            };
3111                            enable_extensions.add(extension);
3112                            Ok(())
3113                        })?;
3114                    }
3115                    DirectiveKind::Requires => {
3116                        self.directive_ident_list(&mut lexer, |ident, span| {
3117                            match LanguageExtension::from_ident(ident) {
3118                                Some(LanguageExtension::Implemented(_kind)) => {
3119                                    // NOTE: No further validation is needed for an extension, so
3120                                    // just throw parsed information away. If we ever want to apply
3121                                    // what we've parsed to diagnostics, maybe we'll want to refer
3122                                    // to enabled extensions later?
3123                                    Ok(())
3124                                }
3125                                Some(LanguageExtension::Unimplemented(kind)) => {
3126                                    Err(Box::new(Error::LanguageExtensionNotYetImplemented {
3127                                        kind,
3128                                        span,
3129                                    }))
3130                                }
3131                                None => Err(Box::new(Error::UnknownLanguageExtension(span, ident))),
3132                            }
3133                        })?;
3134                    }
3135                }
3136                self.pop_rule_span(&lexer);
3137            } else {
3138                break;
3139            }
3140        }
3141
3142        lexer.enable_extensions = enable_extensions.clone();
3143        tu.enable_extensions = enable_extensions;
3144        tu.diagnostic_filter_leaf =
3145            Self::write_diagnostic_filters(&mut tu.diagnostic_filters, diagnostic_filters, None);
3146
3147        loop {
3148            match self.global_decl(&mut lexer, &mut tu) {
3149                Err(error) => return Err(error),
3150                Ok(()) => {
3151                    if lexer.peek().0 == Token::End {
3152                        break;
3153                    }
3154                }
3155            }
3156        }
3157
3158        Ok(tu)
3159    }
3160
3161    fn increase_brace_nesting(brace_nesting_level: u8, brace_span: Span) -> Result<'static, u8> {
3162        // From [spec.](https://gpuweb.github.io/gpuweb/wgsl/#limits):
3163        //
3164        // > § 2.4. Limits
3165        // >
3166        // > …
3167        // >
3168        // > Maximum nesting depth of brace-enclosed statements in a function[:] 127
3169        const BRACE_NESTING_MAXIMUM: u8 = 127;
3170        if brace_nesting_level + 1 > BRACE_NESTING_MAXIMUM {
3171            return Err(Box::new(Error::ExceededLimitForNestedBraces {
3172                span: brace_span,
3173                limit: BRACE_NESTING_MAXIMUM,
3174            }));
3175        }
3176        Ok(brace_nesting_level + 1)
3177    }
3178
3179    fn diagnostic_filter<'a>(&self, lexer: &mut Lexer<'a>) -> Result<'a, DiagnosticFilter> {
3180        lexer.expect(Token::Paren('('))?;
3181
3182        let (severity_control_name, severity_control_name_span) = lexer.next_ident_with_span()?;
3183        let new_severity = diagnostic_filter::Severity::from_wgsl_ident(severity_control_name)
3184            .ok_or(Error::DiagnosticInvalidSeverity {
3185                severity_control_name_span,
3186            })?;
3187
3188        lexer.expect(Token::Separator(','))?;
3189
3190        let (diagnostic_name_token, diagnostic_name_token_span) = lexer.next_ident_with_span()?;
3191        let triggering_rule = if lexer.skip(Token::Separator('.')) {
3192            let (ident, _span) = lexer.next_ident_with_span()?;
3193            FilterableTriggeringRule::User(Box::new([diagnostic_name_token.into(), ident.into()]))
3194        } else {
3195            let diagnostic_rule_name = diagnostic_name_token;
3196            let diagnostic_rule_name_span = diagnostic_name_token_span;
3197            if let Some(triggering_rule) =
3198                StandardFilterableTriggeringRule::from_wgsl_ident(diagnostic_rule_name)
3199            {
3200                FilterableTriggeringRule::Standard(triggering_rule)
3201            } else {
3202                diagnostic_filter::Severity::Warning.report_wgsl_parse_diag(
3203                    Box::new(Error::UnknownDiagnosticRuleName(diagnostic_rule_name_span)),
3204                    lexer.source,
3205                )?;
3206                FilterableTriggeringRule::Unknown(diagnostic_rule_name.into())
3207            }
3208        };
3209        let filter = DiagnosticFilter {
3210            triggering_rule,
3211            new_severity,
3212        };
3213        lexer.skip(Token::Separator(','));
3214        lexer.expect(Token::Paren(')'))?;
3215
3216        Ok(filter)
3217    }
3218
3219    pub(crate) fn write_diagnostic_filters(
3220        arena: &mut Arena<DiagnosticFilterNode>,
3221        filters: DiagnosticFilterMap,
3222        parent: Option<Handle<DiagnosticFilterNode>>,
3223    ) -> Option<Handle<DiagnosticFilterNode>> {
3224        filters
3225            .into_iter()
3226            .fold(parent, |parent, (triggering_rule, (new_severity, span))| {
3227                Some(arena.append(
3228                    DiagnosticFilterNode {
3229                        inner: DiagnosticFilter {
3230                            new_severity,
3231                            triggering_rule,
3232                        },
3233                        parent,
3234                    },
3235                    span,
3236                ))
3237            })
3238    }
3239}