naga/front/wgsl/parse/
mod.rs

1use alloc::{boxed::Box, vec::Vec};
2use directive::enable_extension::ImplementedEnableExtension;
3
4use crate::diagnostic_filter::{
5    self, DiagnosticFilter, DiagnosticFilterMap, DiagnosticFilterNode, FilterableTriggeringRule,
6    ShouldConflictOnFullDuplicate, StandardFilterableTriggeringRule,
7};
8use crate::front::wgsl::error::{DiagnosticAttributeNotSupportedPosition, Error, ExpectedToken};
9use crate::front::wgsl::parse::directive::enable_extension::{EnableExtension, EnableExtensions};
10use crate::front::wgsl::parse::directive::language_extension::LanguageExtension;
11use crate::front::wgsl::parse::directive::DirectiveKind;
12use crate::front::wgsl::parse::lexer::{Lexer, Token};
13use crate::front::wgsl::parse::number::Number;
14use crate::front::wgsl::{Result, Scalar};
15use crate::front::SymbolTable;
16use crate::{Arena, FastHashSet, FastIndexSet, Handle, ShaderStage, Span};
17
18pub mod ast;
19pub mod conv;
20pub mod directive;
21pub mod lexer;
22pub mod number;
23
24/// State for constructing an AST expression.
25///
26/// Not to be confused with [`lower::ExpressionContext`], which is for producing
27/// Naga IR from the AST we produce here.
28///
29/// [`lower::ExpressionContext`]: super::lower::ExpressionContext
30struct ExpressionContext<'input, 'temp, 'out> {
31    /// The [`TranslationUnit::expressions`] arena to which we should contribute
32    /// expressions.
33    ///
34    /// [`TranslationUnit::expressions`]: ast::TranslationUnit::expressions
35    expressions: &'out mut Arena<ast::Expression<'input>>,
36
37    /// The [`TranslationUnit::types`] arena to which we should contribute new
38    /// types.
39    ///
40    /// [`TranslationUnit::types`]: ast::TranslationUnit::types
41    types: &'out mut Arena<ast::Type<'input>>,
42
43    /// A map from identifiers in scope to the locals/arguments they represent.
44    ///
45    /// The handles refer to the [`locals`] arena; see that field's
46    /// documentation for details.
47    ///
48    /// [`locals`]: ExpressionContext::locals
49    local_table: &'temp mut SymbolTable<&'input str, Handle<ast::Local>>,
50
51    /// Local variable and function argument arena for the function we're building.
52    ///
53    /// Note that the [`ast::Local`] here is actually a zero-sized type. This
54    /// `Arena`'s only role is to assign a unique `Handle` to each local
55    /// identifier, and track its definition's span for use in diagnostics. All
56    /// the detailed information about locals - names, types, etc. - is kept in
57    /// the [`LocalDecl`] statements we parsed from their declarations. For
58    /// arguments, that information is kept in [`arguments`].
59    ///
60    /// In the AST, when an [`Ident`] expression refers to a local variable or
61    /// argument, its [`IdentExpr`] holds the referent's `Handle<Local>` in this
62    /// arena.
63    ///
64    /// During lowering, [`LocalDecl`] statements add entries to a per-function
65    /// table that maps `Handle<Local>` values to their Naga representations,
66    /// accessed via [`StatementContext::local_table`] and
67    /// [`LocalExpressionContext::local_table`]. This table is then consulted when
68    /// lowering subsequent [`Ident`] expressions.
69    ///
70    /// [`LocalDecl`]: ast::StatementKind::LocalDecl
71    /// [`arguments`]: ast::Function::arguments
72    /// [`Ident`]: ast::Expression::Ident
73    /// [`IdentExpr`]: ast::IdentExpr
74    /// [`StatementContext::local_table`]: super::lower::StatementContext::local_table
75    /// [`LocalExpressionContext::local_table`]: super::lower::LocalExpressionContext::local_table
76    locals: &'out mut Arena<ast::Local>,
77
78    /// Identifiers used by the current global declaration that have no local definition.
79    ///
80    /// This becomes the [`GlobalDecl`]'s [`dependencies`] set.
81    ///
82    /// Note that we don't know at parse time what kind of [`GlobalDecl`] the
83    /// name refers to. We can't look up names until we've seen the entire
84    /// translation unit.
85    ///
86    /// [`GlobalDecl`]: ast::GlobalDecl
87    /// [`dependencies`]: ast::GlobalDecl::dependencies
88    unresolved: &'out mut FastIndexSet<ast::Dependency<'input>>,
89}
90
91impl<'a> ExpressionContext<'a, '_, '_> {
92    fn parse_binary_op(
93        &mut self,
94        lexer: &mut Lexer<'a>,
95        classifier: impl Fn(Token<'a>) -> Option<crate::BinaryOperator>,
96        mut parser: impl FnMut(&mut Lexer<'a>, &mut Self) -> Result<'a, Handle<ast::Expression<'a>>>,
97    ) -> Result<'a, Handle<ast::Expression<'a>>> {
98        let start = lexer.start_byte_offset();
99        let mut accumulator = parser(lexer, self)?;
100        while let Some(op) = classifier(lexer.peek().0) {
101            let _ = lexer.next();
102            let left = accumulator;
103            let right = parser(lexer, self)?;
104            accumulator = self.expressions.append(
105                ast::Expression::Binary { op, left, right },
106                lexer.span_from(start),
107            );
108        }
109        Ok(accumulator)
110    }
111
112    fn declare_local(&mut self, name: ast::Ident<'a>) -> Result<'a, Handle<ast::Local>> {
113        let handle = self.locals.append(ast::Local, name.span);
114        if let Some(old) = self.local_table.add(name.name, handle) {
115            Err(Box::new(Error::Redefinition {
116                previous: self.locals.get_span(old),
117                current: name.span,
118            }))
119        } else {
120            Ok(handle)
121        }
122    }
123
124    fn new_scalar(&mut self, scalar: Scalar) -> Handle<ast::Type<'a>> {
125        self.types
126            .append(ast::Type::Scalar(scalar), Span::UNDEFINED)
127    }
128}
129
130/// Which grammar rule we are in the midst of parsing.
131///
132/// This is used for error checking. `Parser` maintains a stack of
133/// these and (occasionally) checks that it is being pushed and popped
134/// as expected.
135#[derive(Copy, Clone, Debug, PartialEq)]
136enum Rule {
137    Attribute,
138    VariableDecl,
139    TypeDecl,
140    FunctionDecl,
141    Block,
142    Statement,
143    PrimaryExpr,
144    SingularExpr,
145    UnaryExpr,
146    GeneralExpr,
147    Directive,
148    GenericExpr,
149    EnclosedExpr,
150    LhsExpr,
151}
152
153struct ParsedAttribute<T> {
154    value: Option<T>,
155}
156
157impl<T> Default for ParsedAttribute<T> {
158    fn default() -> Self {
159        Self { value: None }
160    }
161}
162
163impl<T> ParsedAttribute<T> {
164    fn set(&mut self, value: T, name_span: Span) -> Result<'static, ()> {
165        if self.value.is_some() {
166            return Err(Box::new(Error::RepeatedAttribute(name_span)));
167        }
168        self.value = Some(value);
169        Ok(())
170    }
171}
172
173#[derive(Default)]
174struct BindingParser<'a> {
175    location: ParsedAttribute<Handle<ast::Expression<'a>>>,
176    built_in: ParsedAttribute<crate::BuiltIn>,
177    interpolation: ParsedAttribute<crate::Interpolation>,
178    sampling: ParsedAttribute<crate::Sampling>,
179    invariant: ParsedAttribute<bool>,
180    blend_src: ParsedAttribute<Handle<ast::Expression<'a>>>,
181    per_primitive: ParsedAttribute<()>,
182}
183
184impl<'a> BindingParser<'a> {
185    fn parse(
186        &mut self,
187        parser: &mut Parser,
188        lexer: &mut Lexer<'a>,
189        name: &'a str,
190        name_span: Span,
191        ctx: &mut ExpressionContext<'a, '_, '_>,
192    ) -> Result<'a, ()> {
193        match name {
194            "location" => {
195                lexer.expect(Token::Paren('('))?;
196                self.location
197                    .set(parser.general_expression(lexer, ctx)?, name_span)?;
198                lexer.expect(Token::Paren(')'))?;
199            }
200            "builtin" => {
201                lexer.expect(Token::Paren('('))?;
202                let (raw, span) = lexer.next_ident_with_span()?;
203                self.built_in.set(
204                    conv::map_built_in(&lexer.enable_extensions, raw, span)?,
205                    name_span,
206                )?;
207                lexer.expect(Token::Paren(')'))?;
208            }
209            "interpolate" => {
210                lexer.expect(Token::Paren('('))?;
211                let (raw, span) = lexer.next_ident_with_span()?;
212                self.interpolation
213                    .set(conv::map_interpolation(raw, span)?, name_span)?;
214                if lexer.skip(Token::Separator(',')) {
215                    let (raw, span) = lexer.next_ident_with_span()?;
216                    self.sampling
217                        .set(conv::map_sampling(raw, span)?, name_span)?;
218                }
219                lexer.expect(Token::Paren(')'))?;
220            }
221
222            "invariant" => {
223                self.invariant.set(true, name_span)?;
224            }
225            "blend_src" => {
226                if !lexer
227                    .enable_extensions
228                    .contains(ImplementedEnableExtension::DualSourceBlending)
229                {
230                    return Err(Box::new(Error::EnableExtensionNotEnabled {
231                        span: name_span,
232                        kind: ImplementedEnableExtension::DualSourceBlending.into(),
233                    }));
234                }
235
236                lexer.expect(Token::Paren('('))?;
237                self.blend_src
238                    .set(parser.general_expression(lexer, ctx)?, name_span)?;
239                lexer.skip(Token::Separator(','));
240                lexer.expect(Token::Paren(')'))?;
241            }
242            "per_primitive" => {
243                if !lexer
244                    .enable_extensions
245                    .contains(ImplementedEnableExtension::WgpuMeshShader)
246                {
247                    return Err(Box::new(Error::EnableExtensionNotEnabled {
248                        span: name_span,
249                        kind: ImplementedEnableExtension::WgpuMeshShader.into(),
250                    }));
251                }
252                self.per_primitive.set((), name_span)?;
253            }
254            _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
255        }
256        Ok(())
257    }
258
259    fn finish(self, span: Span) -> Result<'a, Option<ast::Binding<'a>>> {
260        match (
261            self.location.value,
262            self.built_in.value,
263            self.interpolation.value,
264            self.sampling.value,
265            self.invariant.value.unwrap_or_default(),
266            self.blend_src.value,
267            self.per_primitive.value,
268        ) {
269            (None, None, None, None, false, None, None) => Ok(None),
270            (Some(location), None, interpolation, sampling, false, blend_src, per_primitive) => {
271                // Before handing over the completed `Module`, we call
272                // `apply_default_interpolation` to ensure that the interpolation and
273                // sampling have been explicitly specified on all vertex shader output and fragment
274                // shader input user bindings, so leaving them potentially `None` here is fine.
275                Ok(Some(ast::Binding::Location {
276                    location,
277                    interpolation,
278                    sampling,
279                    blend_src,
280                    per_primitive: per_primitive.is_some(),
281                }))
282            }
283            (None, Some(crate::BuiltIn::Position { .. }), None, None, invariant, None, None) => {
284                Ok(Some(ast::Binding::BuiltIn(crate::BuiltIn::Position {
285                    invariant,
286                })))
287            }
288            (None, Some(built_in), None, None, false, None, None) => {
289                Ok(Some(ast::Binding::BuiltIn(built_in)))
290            }
291            (_, _, _, _, _, _, _) => Err(Box::new(Error::InconsistentBinding(span))),
292        }
293    }
294}
295
296/// Configuration for the whole parser run.
297pub struct Options {
298    /// Controls whether the parser should parse doc comments.
299    pub parse_doc_comments: bool,
300}
301
302impl Options {
303    /// Creates a new [`Options`] without doc comments parsing.
304    pub const fn new() -> Self {
305        Options {
306            parse_doc_comments: false,
307        }
308    }
309}
310
311pub struct Parser {
312    rules: Vec<(Rule, usize)>,
313    recursion_depth: u32,
314}
315
316impl Parser {
317    pub const fn new() -> Self {
318        Parser {
319            rules: Vec::new(),
320            recursion_depth: 0,
321        }
322    }
323
324    fn reset(&mut self) {
325        self.rules.clear();
326        self.recursion_depth = 0;
327    }
328
329    fn push_rule_span(&mut self, rule: Rule, lexer: &mut Lexer<'_>) {
330        self.rules.push((rule, lexer.start_byte_offset()));
331    }
332
333    fn pop_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
334        let (_, initial) = self.rules.pop().unwrap();
335        lexer.span_from(initial)
336    }
337
338    fn peek_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
339        let &(_, initial) = self.rules.last().unwrap();
340        lexer.span_from(initial)
341    }
342
343    fn race_rules(&self, rule0: Rule, rule1: Rule) -> Option<Rule> {
344        Some(
345            self.rules
346                .iter()
347                .rev()
348                .find(|&x| x.0 == rule0 || x.0 == rule1)?
349                .0,
350        )
351    }
352
353    fn track_recursion<'a, F, R>(&mut self, f: F) -> Result<'a, R>
354    where
355        F: FnOnce(&mut Self) -> Result<'a, R>,
356    {
357        self.recursion_depth += 1;
358        if self.recursion_depth >= 256 {
359            return Err(Box::new(Error::Internal("Parser recursion limit exceeded")));
360        }
361        let ret = f(self);
362        self.recursion_depth -= 1;
363        ret
364    }
365
366    fn switch_value<'a>(
367        &mut self,
368        lexer: &mut Lexer<'a>,
369        ctx: &mut ExpressionContext<'a, '_, '_>,
370    ) -> Result<'a, ast::SwitchValue<'a>> {
371        if let Token::Word("default") = lexer.peek().0 {
372            let _ = lexer.next();
373            return Ok(ast::SwitchValue::Default);
374        }
375
376        let expr = self.general_expression(lexer, ctx)?;
377        Ok(ast::SwitchValue::Expr(expr))
378    }
379
380    /// Decide if we're looking at a construction expression, and return its
381    /// type if so.
382    ///
383    /// If the identifier `word` is a [type-defining keyword], then return a
384    /// [`ConstructorType`] value describing the type to build. Return an error
385    /// if the type is not constructible (like `sampler`).
386    ///
387    /// If `word` isn't a type name, then return `None`.
388    ///
389    /// [type-defining keyword]: https://gpuweb.github.io/gpuweb/wgsl/#type-defining-keywords
390    /// [`ConstructorType`]: ast::ConstructorType
391    fn constructor_type<'a>(
392        &mut self,
393        lexer: &mut Lexer<'a>,
394        word: &'a str,
395        span: Span,
396        ctx: &mut ExpressionContext<'a, '_, '_>,
397    ) -> Result<'a, Option<ast::ConstructorType<'a>>> {
398        if let Some(scalar) = conv::get_scalar_type(&lexer.enable_extensions, span, word)? {
399            return Ok(Some(ast::ConstructorType::Scalar(scalar)));
400        }
401
402        let partial = match word {
403            "vec2" => ast::ConstructorType::PartialVector {
404                size: crate::VectorSize::Bi,
405            },
406            "vec2i" => {
407                return Ok(Some(ast::ConstructorType::Vector {
408                    size: crate::VectorSize::Bi,
409                    ty: ctx.new_scalar(Scalar::I32),
410                    ty_span: Span::UNDEFINED,
411                }))
412            }
413            "vec2u" => {
414                return Ok(Some(ast::ConstructorType::Vector {
415                    size: crate::VectorSize::Bi,
416                    ty: ctx.new_scalar(Scalar::U32),
417                    ty_span: Span::UNDEFINED,
418                }))
419            }
420            "vec2f" => {
421                return Ok(Some(ast::ConstructorType::Vector {
422                    size: crate::VectorSize::Bi,
423                    ty: ctx.new_scalar(Scalar::F32),
424                    ty_span: Span::UNDEFINED,
425                }))
426            }
427            "vec2h" => {
428                return Ok(Some(ast::ConstructorType::Vector {
429                    size: crate::VectorSize::Bi,
430                    ty: ctx.new_scalar(Scalar::F16),
431                    ty_span: Span::UNDEFINED,
432                }))
433            }
434            "vec3" => ast::ConstructorType::PartialVector {
435                size: crate::VectorSize::Tri,
436            },
437            "vec3i" => {
438                return Ok(Some(ast::ConstructorType::Vector {
439                    size: crate::VectorSize::Tri,
440                    ty: ctx.new_scalar(Scalar::I32),
441                    ty_span: Span::UNDEFINED,
442                }))
443            }
444            "vec3u" => {
445                return Ok(Some(ast::ConstructorType::Vector {
446                    size: crate::VectorSize::Tri,
447                    ty: ctx.new_scalar(Scalar::U32),
448                    ty_span: Span::UNDEFINED,
449                }))
450            }
451            "vec3f" => {
452                return Ok(Some(ast::ConstructorType::Vector {
453                    size: crate::VectorSize::Tri,
454                    ty: ctx.new_scalar(Scalar::F32),
455                    ty_span: Span::UNDEFINED,
456                }))
457            }
458            "vec3h" => {
459                return Ok(Some(ast::ConstructorType::Vector {
460                    size: crate::VectorSize::Tri,
461                    ty: ctx.new_scalar(Scalar::F16),
462                    ty_span: Span::UNDEFINED,
463                }))
464            }
465            "vec4" => ast::ConstructorType::PartialVector {
466                size: crate::VectorSize::Quad,
467            },
468            "vec4i" => {
469                return Ok(Some(ast::ConstructorType::Vector {
470                    size: crate::VectorSize::Quad,
471                    ty: ctx.new_scalar(Scalar::I32),
472                    ty_span: Span::UNDEFINED,
473                }))
474            }
475            "vec4u" => {
476                return Ok(Some(ast::ConstructorType::Vector {
477                    size: crate::VectorSize::Quad,
478                    ty: ctx.new_scalar(Scalar::U32),
479                    ty_span: Span::UNDEFINED,
480                }))
481            }
482            "vec4f" => {
483                return Ok(Some(ast::ConstructorType::Vector {
484                    size: crate::VectorSize::Quad,
485                    ty: ctx.new_scalar(Scalar::F32),
486                    ty_span: Span::UNDEFINED,
487                }))
488            }
489            "vec4h" => {
490                return Ok(Some(ast::ConstructorType::Vector {
491                    size: crate::VectorSize::Quad,
492                    ty: ctx.new_scalar(Scalar::F16),
493                    ty_span: Span::UNDEFINED,
494                }))
495            }
496            "mat2x2" => ast::ConstructorType::PartialMatrix {
497                columns: crate::VectorSize::Bi,
498                rows: crate::VectorSize::Bi,
499            },
500            "mat2x2f" => {
501                return Ok(Some(ast::ConstructorType::Matrix {
502                    columns: crate::VectorSize::Bi,
503                    rows: crate::VectorSize::Bi,
504                    ty: ctx.new_scalar(Scalar::F32),
505                    ty_span: Span::UNDEFINED,
506                }))
507            }
508            "mat2x2h" => {
509                return Ok(Some(ast::ConstructorType::Matrix {
510                    columns: crate::VectorSize::Bi,
511                    rows: crate::VectorSize::Bi,
512                    ty: ctx.new_scalar(Scalar::F16),
513                    ty_span: Span::UNDEFINED,
514                }))
515            }
516            "mat2x3" => ast::ConstructorType::PartialMatrix {
517                columns: crate::VectorSize::Bi,
518                rows: crate::VectorSize::Tri,
519            },
520            "mat2x3f" => {
521                return Ok(Some(ast::ConstructorType::Matrix {
522                    columns: crate::VectorSize::Bi,
523                    rows: crate::VectorSize::Tri,
524                    ty: ctx.new_scalar(Scalar::F32),
525                    ty_span: Span::UNDEFINED,
526                }))
527            }
528            "mat2x3h" => {
529                return Ok(Some(ast::ConstructorType::Matrix {
530                    columns: crate::VectorSize::Bi,
531                    rows: crate::VectorSize::Tri,
532                    ty: ctx.new_scalar(Scalar::F16),
533                    ty_span: Span::UNDEFINED,
534                }))
535            }
536            "mat2x4" => ast::ConstructorType::PartialMatrix {
537                columns: crate::VectorSize::Bi,
538                rows: crate::VectorSize::Quad,
539            },
540            "mat2x4f" => {
541                return Ok(Some(ast::ConstructorType::Matrix {
542                    columns: crate::VectorSize::Bi,
543                    rows: crate::VectorSize::Quad,
544                    ty: ctx.new_scalar(Scalar::F32),
545                    ty_span: Span::UNDEFINED,
546                }))
547            }
548            "mat2x4h" => {
549                return Ok(Some(ast::ConstructorType::Matrix {
550                    columns: crate::VectorSize::Bi,
551                    rows: crate::VectorSize::Quad,
552                    ty: ctx.new_scalar(Scalar::F16),
553                    ty_span: Span::UNDEFINED,
554                }))
555            }
556            "mat3x2" => ast::ConstructorType::PartialMatrix {
557                columns: crate::VectorSize::Tri,
558                rows: crate::VectorSize::Bi,
559            },
560            "mat3x2f" => {
561                return Ok(Some(ast::ConstructorType::Matrix {
562                    columns: crate::VectorSize::Tri,
563                    rows: crate::VectorSize::Bi,
564                    ty: ctx.new_scalar(Scalar::F32),
565                    ty_span: Span::UNDEFINED,
566                }))
567            }
568            "mat3x2h" => {
569                return Ok(Some(ast::ConstructorType::Matrix {
570                    columns: crate::VectorSize::Tri,
571                    rows: crate::VectorSize::Bi,
572                    ty: ctx.new_scalar(Scalar::F16),
573                    ty_span: Span::UNDEFINED,
574                }))
575            }
576            "mat3x3" => ast::ConstructorType::PartialMatrix {
577                columns: crate::VectorSize::Tri,
578                rows: crate::VectorSize::Tri,
579            },
580            "mat3x3f" => {
581                return Ok(Some(ast::ConstructorType::Matrix {
582                    columns: crate::VectorSize::Tri,
583                    rows: crate::VectorSize::Tri,
584                    ty: ctx.new_scalar(Scalar::F32),
585                    ty_span: Span::UNDEFINED,
586                }))
587            }
588            "mat3x3h" => {
589                return Ok(Some(ast::ConstructorType::Matrix {
590                    columns: crate::VectorSize::Tri,
591                    rows: crate::VectorSize::Tri,
592                    ty: ctx.new_scalar(Scalar::F16),
593                    ty_span: Span::UNDEFINED,
594                }))
595            }
596            "mat3x4" => ast::ConstructorType::PartialMatrix {
597                columns: crate::VectorSize::Tri,
598                rows: crate::VectorSize::Quad,
599            },
600            "mat3x4f" => {
601                return Ok(Some(ast::ConstructorType::Matrix {
602                    columns: crate::VectorSize::Tri,
603                    rows: crate::VectorSize::Quad,
604                    ty: ctx.new_scalar(Scalar::F32),
605                    ty_span: Span::UNDEFINED,
606                }))
607            }
608            "mat3x4h" => {
609                return Ok(Some(ast::ConstructorType::Matrix {
610                    columns: crate::VectorSize::Tri,
611                    rows: crate::VectorSize::Quad,
612                    ty: ctx.new_scalar(Scalar::F16),
613                    ty_span: Span::UNDEFINED,
614                }))
615            }
616            "mat4x2" => ast::ConstructorType::PartialMatrix {
617                columns: crate::VectorSize::Quad,
618                rows: crate::VectorSize::Bi,
619            },
620            "mat4x2f" => {
621                return Ok(Some(ast::ConstructorType::Matrix {
622                    columns: crate::VectorSize::Quad,
623                    rows: crate::VectorSize::Bi,
624                    ty: ctx.new_scalar(Scalar::F32),
625                    ty_span: Span::UNDEFINED,
626                }))
627            }
628            "mat4x2h" => {
629                return Ok(Some(ast::ConstructorType::Matrix {
630                    columns: crate::VectorSize::Quad,
631                    rows: crate::VectorSize::Bi,
632                    ty: ctx.new_scalar(Scalar::F16),
633                    ty_span: Span::UNDEFINED,
634                }))
635            }
636            "mat4x3" => ast::ConstructorType::PartialMatrix {
637                columns: crate::VectorSize::Quad,
638                rows: crate::VectorSize::Tri,
639            },
640            "mat4x3f" => {
641                return Ok(Some(ast::ConstructorType::Matrix {
642                    columns: crate::VectorSize::Quad,
643                    rows: crate::VectorSize::Tri,
644                    ty: ctx.new_scalar(Scalar::F32),
645                    ty_span: Span::UNDEFINED,
646                }))
647            }
648            "mat4x3h" => {
649                return Ok(Some(ast::ConstructorType::Matrix {
650                    columns: crate::VectorSize::Quad,
651                    rows: crate::VectorSize::Tri,
652                    ty: ctx.new_scalar(Scalar::F16),
653                    ty_span: Span::UNDEFINED,
654                }))
655            }
656            "mat4x4" => ast::ConstructorType::PartialMatrix {
657                columns: crate::VectorSize::Quad,
658                rows: crate::VectorSize::Quad,
659            },
660            "mat4x4f" => {
661                return Ok(Some(ast::ConstructorType::Matrix {
662                    columns: crate::VectorSize::Quad,
663                    rows: crate::VectorSize::Quad,
664                    ty: ctx.new_scalar(Scalar::F32),
665                    ty_span: Span::UNDEFINED,
666                }))
667            }
668            "mat4x4h" => {
669                return Ok(Some(ast::ConstructorType::Matrix {
670                    columns: crate::VectorSize::Quad,
671                    rows: crate::VectorSize::Quad,
672                    ty: ctx.new_scalar(Scalar::F16),
673                    ty_span: Span::UNDEFINED,
674                }))
675            }
676            "array" => ast::ConstructorType::PartialArray,
677            "atomic"
678            | "binding_array"
679            | "sampler"
680            | "sampler_comparison"
681            | "texture_1d"
682            | "texture_1d_array"
683            | "texture_2d"
684            | "texture_2d_array"
685            | "texture_3d"
686            | "texture_cube"
687            | "texture_cube_array"
688            | "texture_multisampled_2d"
689            | "texture_multisampled_2d_array"
690            | "texture_depth_2d"
691            | "texture_depth_2d_array"
692            | "texture_depth_cube"
693            | "texture_depth_cube_array"
694            | "texture_depth_multisampled_2d"
695            | "texture_external"
696            | "texture_storage_1d"
697            | "texture_storage_1d_array"
698            | "texture_storage_2d"
699            | "texture_storage_2d_array"
700            | "texture_storage_3d" => return Err(Box::new(Error::TypeNotConstructible(span))),
701            _ => return Ok(None),
702        };
703
704        // parse component type if present
705        match (lexer.peek().0, partial) {
706            (Token::Paren('<'), ast::ConstructorType::PartialVector { size }) => {
707                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
708                Ok(Some(ast::ConstructorType::Vector { size, ty, ty_span }))
709            }
710            (Token::Paren('<'), ast::ConstructorType::PartialMatrix { columns, rows }) => {
711                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
712                Ok(Some(ast::ConstructorType::Matrix {
713                    columns,
714                    rows,
715                    ty,
716                    ty_span,
717                }))
718            }
719            (Token::Paren('<'), ast::ConstructorType::PartialArray) => {
720                lexer.expect_generic_paren('<')?;
721                let base = self.type_decl(lexer, ctx)?;
722                let size = if lexer.end_of_generic_arguments() {
723                    let expr = self.const_generic_expression(lexer, ctx)?;
724                    lexer.skip(Token::Separator(','));
725                    ast::ArraySize::Constant(expr)
726                } else {
727                    ast::ArraySize::Dynamic
728                };
729                lexer.expect_generic_paren('>')?;
730
731                Ok(Some(ast::ConstructorType::Array { base, size }))
732            }
733            (_, partial) => Ok(Some(partial)),
734        }
735    }
736
737    /// Expects `name` to be consumed (not in lexer).
738    fn arguments<'a>(
739        &mut self,
740        lexer: &mut Lexer<'a>,
741        ctx: &mut ExpressionContext<'a, '_, '_>,
742    ) -> Result<'a, Vec<Handle<ast::Expression<'a>>>> {
743        self.push_rule_span(Rule::EnclosedExpr, lexer);
744        lexer.open_arguments()?;
745        let mut arguments = Vec::new();
746        loop {
747            if !arguments.is_empty() {
748                if !lexer.next_argument()? {
749                    break;
750                }
751            } else if lexer.skip(Token::Paren(')')) {
752                break;
753            }
754            let arg = self.general_expression(lexer, ctx)?;
755            arguments.push(arg);
756        }
757
758        self.pop_rule_span(lexer);
759        Ok(arguments)
760    }
761
762    fn enclosed_expression<'a>(
763        &mut self,
764        lexer: &mut Lexer<'a>,
765        ctx: &mut ExpressionContext<'a, '_, '_>,
766    ) -> Result<'a, Handle<ast::Expression<'a>>> {
767        self.push_rule_span(Rule::EnclosedExpr, lexer);
768        let expr = self.general_expression(lexer, ctx)?;
769        self.pop_rule_span(lexer);
770        Ok(expr)
771    }
772
773    /// Expects [`Rule::PrimaryExpr`] or [`Rule::SingularExpr`] on top; does not pop it.
774    /// Expects `name` to be consumed (not in lexer).
775    fn function_call<'a>(
776        &mut self,
777        lexer: &mut Lexer<'a>,
778        name: &'a str,
779        name_span: Span,
780        ctx: &mut ExpressionContext<'a, '_, '_>,
781    ) -> Result<'a, Handle<ast::Expression<'a>>> {
782        assert!(self.rules.last().is_some());
783
784        let expr = match name {
785            // bitcast looks like a function call, but it's an operator and must be handled differently.
786            "bitcast" => {
787                let (to, span) = self.singular_generic(lexer, ctx)?;
788
789                lexer.open_arguments()?;
790                let expr = self.general_expression(lexer, ctx)?;
791                lexer.close_arguments()?;
792
793                ast::Expression::Bitcast {
794                    expr,
795                    to,
796                    ty_span: span,
797                }
798            }
799            // everything else must be handled later, since they can be hidden by user-defined functions.
800            _ => {
801                let arguments = self.arguments(lexer, ctx)?;
802                ctx.unresolved.insert(ast::Dependency {
803                    ident: name,
804                    usage: name_span,
805                });
806                ast::Expression::Call {
807                    function: ast::Ident {
808                        name,
809                        span: name_span,
810                    },
811                    arguments,
812                }
813            }
814        };
815
816        let span = self.peek_rule_span(lexer);
817        let expr = ctx.expressions.append(expr, span);
818        Ok(expr)
819    }
820
821    fn ident_expr<'a>(
822        &mut self,
823        name: &'a str,
824        name_span: Span,
825        ctx: &mut ExpressionContext<'a, '_, '_>,
826    ) -> ast::IdentExpr<'a> {
827        match ctx.local_table.lookup(name) {
828            Some(&local) => ast::IdentExpr::Local(local),
829            None => {
830                ctx.unresolved.insert(ast::Dependency {
831                    ident: name,
832                    usage: name_span,
833                });
834                ast::IdentExpr::Unresolved(name)
835            }
836        }
837    }
838
839    fn primary_expression<'a>(
840        &mut self,
841        lexer: &mut Lexer<'a>,
842        ctx: &mut ExpressionContext<'a, '_, '_>,
843    ) -> Result<'a, Handle<ast::Expression<'a>>> {
844        self.push_rule_span(Rule::PrimaryExpr, lexer);
845        const fn literal_ray_flag<'b>(flag: crate::RayFlag) -> ast::Expression<'b> {
846            ast::Expression::Literal(ast::Literal::Number(Number::U32(flag.bits())))
847        }
848        const fn literal_ray_intersection<'b>(
849            intersection: crate::RayQueryIntersection,
850        ) -> ast::Expression<'b> {
851            ast::Expression::Literal(ast::Literal::Number(Number::U32(intersection as u32)))
852        }
853
854        let expr = match lexer.peek() {
855            (Token::Paren('('), _) => {
856                let _ = lexer.next();
857                let expr = self.enclosed_expression(lexer, ctx)?;
858                lexer.expect(Token::Paren(')'))?;
859                self.pop_rule_span(lexer);
860                return Ok(expr);
861            }
862            (Token::Word("true"), _) => {
863                let _ = lexer.next();
864                ast::Expression::Literal(ast::Literal::Bool(true))
865            }
866            (Token::Word("false"), _) => {
867                let _ = lexer.next();
868                ast::Expression::Literal(ast::Literal::Bool(false))
869            }
870            (Token::Number(res), span) => {
871                let _ = lexer.next();
872                let num = res.map_err(|err| Error::BadNumber(span, err))?;
873
874                if let Some(enable_extension) = num.requires_enable_extension() {
875                    if !lexer.enable_extensions.contains(enable_extension) {
876                        return Err(Box::new(Error::EnableExtensionNotEnabled {
877                            kind: enable_extension.into(),
878                            span,
879                        }));
880                    }
881                }
882
883                ast::Expression::Literal(ast::Literal::Number(num))
884            }
885            (Token::Word("RAY_FLAG_NONE"), _) => {
886                let _ = lexer.next();
887                literal_ray_flag(crate::RayFlag::empty())
888            }
889            (Token::Word("RAY_FLAG_FORCE_OPAQUE"), _) => {
890                let _ = lexer.next();
891                literal_ray_flag(crate::RayFlag::FORCE_OPAQUE)
892            }
893            (Token::Word("RAY_FLAG_FORCE_NO_OPAQUE"), _) => {
894                let _ = lexer.next();
895                literal_ray_flag(crate::RayFlag::FORCE_NO_OPAQUE)
896            }
897            (Token::Word("RAY_FLAG_TERMINATE_ON_FIRST_HIT"), _) => {
898                let _ = lexer.next();
899                literal_ray_flag(crate::RayFlag::TERMINATE_ON_FIRST_HIT)
900            }
901            (Token::Word("RAY_FLAG_SKIP_CLOSEST_HIT_SHADER"), _) => {
902                let _ = lexer.next();
903                literal_ray_flag(crate::RayFlag::SKIP_CLOSEST_HIT_SHADER)
904            }
905            (Token::Word("RAY_FLAG_CULL_BACK_FACING"), _) => {
906                let _ = lexer.next();
907                literal_ray_flag(crate::RayFlag::CULL_BACK_FACING)
908            }
909            (Token::Word("RAY_FLAG_CULL_FRONT_FACING"), _) => {
910                let _ = lexer.next();
911                literal_ray_flag(crate::RayFlag::CULL_FRONT_FACING)
912            }
913            (Token::Word("RAY_FLAG_CULL_OPAQUE"), _) => {
914                let _ = lexer.next();
915                literal_ray_flag(crate::RayFlag::CULL_OPAQUE)
916            }
917            (Token::Word("RAY_FLAG_CULL_NO_OPAQUE"), _) => {
918                let _ = lexer.next();
919                literal_ray_flag(crate::RayFlag::CULL_NO_OPAQUE)
920            }
921            (Token::Word("RAY_FLAG_SKIP_TRIANGLES"), _) => {
922                let _ = lexer.next();
923                literal_ray_flag(crate::RayFlag::SKIP_TRIANGLES)
924            }
925            (Token::Word("RAY_FLAG_SKIP_AABBS"), _) => {
926                let _ = lexer.next();
927                literal_ray_flag(crate::RayFlag::SKIP_AABBS)
928            }
929            (Token::Word("RAY_QUERY_INTERSECTION_NONE"), _) => {
930                let _ = lexer.next();
931                literal_ray_intersection(crate::RayQueryIntersection::None)
932            }
933            (Token::Word("RAY_QUERY_INTERSECTION_TRIANGLE"), _) => {
934                let _ = lexer.next();
935                literal_ray_intersection(crate::RayQueryIntersection::Triangle)
936            }
937            (Token::Word("RAY_QUERY_INTERSECTION_GENERATED"), _) => {
938                let _ = lexer.next();
939                literal_ray_intersection(crate::RayQueryIntersection::Generated)
940            }
941            (Token::Word("RAY_QUERY_INTERSECTION_AABB"), _) => {
942                let _ = lexer.next();
943                literal_ray_intersection(crate::RayQueryIntersection::Aabb)
944            }
945            (Token::Word(word), span) => {
946                let start = lexer.start_byte_offset();
947                let _ = lexer.next();
948
949                if let Some(ty) = self.constructor_type(lexer, word, span, ctx)? {
950                    let ty_span = lexer.span_from(start);
951                    let components = self.arguments(lexer, ctx)?;
952                    ast::Expression::Construct {
953                        ty,
954                        ty_span,
955                        components,
956                    }
957                } else if let Token::Paren('(') = lexer.peek().0 {
958                    self.pop_rule_span(lexer);
959                    return self.function_call(lexer, word, span, ctx);
960                } else if word == "bitcast" {
961                    self.pop_rule_span(lexer);
962                    return self.function_call(lexer, word, span, ctx);
963                } else {
964                    let ident = self.ident_expr(word, span, ctx);
965                    ast::Expression::Ident(ident)
966                }
967            }
968            other => {
969                return Err(Box::new(Error::Unexpected(
970                    other.1,
971                    ExpectedToken::PrimaryExpression,
972                )))
973            }
974        };
975
976        let span = self.pop_rule_span(lexer);
977        let expr = ctx.expressions.append(expr, span);
978        Ok(expr)
979    }
980
981    fn postfix<'a>(
982        &mut self,
983        span_start: usize,
984        lexer: &mut Lexer<'a>,
985        ctx: &mut ExpressionContext<'a, '_, '_>,
986        expr: Handle<ast::Expression<'a>>,
987    ) -> Result<'a, Handle<ast::Expression<'a>>> {
988        let mut expr = expr;
989
990        loop {
991            let expression = match lexer.peek().0 {
992                Token::Separator('.') => {
993                    let _ = lexer.next();
994                    let field = lexer.next_ident()?;
995
996                    ast::Expression::Member { base: expr, field }
997                }
998                Token::Paren('[') => {
999                    let _ = lexer.next();
1000                    let index = self.enclosed_expression(lexer, ctx)?;
1001                    lexer.expect(Token::Paren(']'))?;
1002
1003                    ast::Expression::Index { base: expr, index }
1004                }
1005                _ => break,
1006            };
1007
1008            let span = lexer.span_from(span_start);
1009            expr = ctx.expressions.append(expression, span);
1010        }
1011
1012        Ok(expr)
1013    }
1014
1015    fn const_generic_expression<'a>(
1016        &mut self,
1017        lexer: &mut Lexer<'a>,
1018        ctx: &mut ExpressionContext<'a, '_, '_>,
1019    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1020        self.push_rule_span(Rule::GenericExpr, lexer);
1021        let expr = self.general_expression(lexer, ctx)?;
1022        self.pop_rule_span(lexer);
1023        Ok(expr)
1024    }
1025
1026    /// Parse a `unary_expression`.
1027    fn unary_expression<'a>(
1028        &mut self,
1029        lexer: &mut Lexer<'a>,
1030        ctx: &mut ExpressionContext<'a, '_, '_>,
1031    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1032        self.track_recursion(|this| {
1033            this.push_rule_span(Rule::UnaryExpr, lexer);
1034            //TODO: refactor this to avoid backing up
1035            let expr = match lexer.peek().0 {
1036                Token::Operation('-') => {
1037                    let _ = lexer.next();
1038                    let expr = this.unary_expression(lexer, ctx)?;
1039                    let expr = ast::Expression::Unary {
1040                        op: crate::UnaryOperator::Negate,
1041                        expr,
1042                    };
1043                    let span = this.peek_rule_span(lexer);
1044                    ctx.expressions.append(expr, span)
1045                }
1046                Token::Operation('!') => {
1047                    let _ = lexer.next();
1048                    let expr = this.unary_expression(lexer, ctx)?;
1049                    let expr = ast::Expression::Unary {
1050                        op: crate::UnaryOperator::LogicalNot,
1051                        expr,
1052                    };
1053                    let span = this.peek_rule_span(lexer);
1054                    ctx.expressions.append(expr, span)
1055                }
1056                Token::Operation('~') => {
1057                    let _ = lexer.next();
1058                    let expr = this.unary_expression(lexer, ctx)?;
1059                    let expr = ast::Expression::Unary {
1060                        op: crate::UnaryOperator::BitwiseNot,
1061                        expr,
1062                    };
1063                    let span = this.peek_rule_span(lexer);
1064                    ctx.expressions.append(expr, span)
1065                }
1066                Token::Operation('*') => {
1067                    let _ = lexer.next();
1068                    let expr = this.unary_expression(lexer, ctx)?;
1069                    let expr = ast::Expression::Deref(expr);
1070                    let span = this.peek_rule_span(lexer);
1071                    ctx.expressions.append(expr, span)
1072                }
1073                Token::Operation('&') => {
1074                    let _ = lexer.next();
1075                    let expr = this.unary_expression(lexer, ctx)?;
1076                    let expr = ast::Expression::AddrOf(expr);
1077                    let span = this.peek_rule_span(lexer);
1078                    ctx.expressions.append(expr, span)
1079                }
1080                _ => this.singular_expression(lexer, ctx)?,
1081            };
1082
1083            this.pop_rule_span(lexer);
1084            Ok(expr)
1085        })
1086    }
1087
1088    /// Parse a `lhs_expression`.
1089    ///
1090    /// LHS expressions only support the `&` and `*` operators and
1091    /// the `[]` and `.` postfix selectors.
1092    fn lhs_expression<'a>(
1093        &mut self,
1094        lexer: &mut Lexer<'a>,
1095        ctx: &mut ExpressionContext<'a, '_, '_>,
1096    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1097        self.track_recursion(|this| {
1098            this.push_rule_span(Rule::LhsExpr, lexer);
1099            let start = lexer.start_byte_offset();
1100            let expr = match lexer.peek() {
1101                (Token::Operation('*'), _) => {
1102                    let _ = lexer.next();
1103                    let expr = this.lhs_expression(lexer, ctx)?;
1104                    let expr = ast::Expression::Deref(expr);
1105                    let span = this.peek_rule_span(lexer);
1106                    ctx.expressions.append(expr, span)
1107                }
1108                (Token::Operation('&'), _) => {
1109                    let _ = lexer.next();
1110                    let expr = this.lhs_expression(lexer, ctx)?;
1111                    let expr = ast::Expression::AddrOf(expr);
1112                    let span = this.peek_rule_span(lexer);
1113                    ctx.expressions.append(expr, span)
1114                }
1115                (Token::Operation('('), _) => {
1116                    let _ = lexer.next();
1117                    let primary_expr = this.lhs_expression(lexer, ctx)?;
1118                    lexer.expect(Token::Paren(')'))?;
1119                    this.postfix(start, lexer, ctx, primary_expr)?
1120                }
1121                (Token::Word(word), span) => {
1122                    let _ = lexer.next();
1123                    let ident = this.ident_expr(word, span, ctx);
1124                    let primary_expr = ctx.expressions.append(ast::Expression::Ident(ident), span);
1125                    this.postfix(start, lexer, ctx, primary_expr)?
1126                }
1127                _ => this.singular_expression(lexer, ctx)?,
1128            };
1129
1130            this.pop_rule_span(lexer);
1131            Ok(expr)
1132        })
1133    }
1134
1135    /// Parse a `singular_expression`.
1136    fn singular_expression<'a>(
1137        &mut self,
1138        lexer: &mut Lexer<'a>,
1139        ctx: &mut ExpressionContext<'a, '_, '_>,
1140    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1141        let start = lexer.start_byte_offset();
1142        self.push_rule_span(Rule::SingularExpr, lexer);
1143        let primary_expr = self.primary_expression(lexer, ctx)?;
1144        let singular_expr = self.postfix(start, lexer, ctx, primary_expr)?;
1145        self.pop_rule_span(lexer);
1146
1147        Ok(singular_expr)
1148    }
1149
1150    fn equality_expression<'a>(
1151        &mut self,
1152        lexer: &mut Lexer<'a>,
1153        context: &mut ExpressionContext<'a, '_, '_>,
1154    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1155        // equality_expression
1156        context.parse_binary_op(
1157            lexer,
1158            |token| match token {
1159                Token::LogicalOperation('=') => Some(crate::BinaryOperator::Equal),
1160                Token::LogicalOperation('!') => Some(crate::BinaryOperator::NotEqual),
1161                _ => None,
1162            },
1163            // relational_expression
1164            |lexer, context| {
1165                let enclosing = self.race_rules(Rule::GenericExpr, Rule::EnclosedExpr);
1166                context.parse_binary_op(
1167                    lexer,
1168                    match enclosing {
1169                        Some(Rule::GenericExpr) => |token| match token {
1170                            Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
1171                            _ => None,
1172                        },
1173                        _ => |token| match token {
1174                            Token::Paren('<') => Some(crate::BinaryOperator::Less),
1175                            Token::Paren('>') => Some(crate::BinaryOperator::Greater),
1176                            Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
1177                            Token::LogicalOperation('>') => {
1178                                Some(crate::BinaryOperator::GreaterEqual)
1179                            }
1180                            _ => None,
1181                        },
1182                    },
1183                    // shift_expression
1184                    |lexer, context| {
1185                        context.parse_binary_op(
1186                            lexer,
1187                            match enclosing {
1188                                Some(Rule::GenericExpr) => |token| match token {
1189                                    Token::ShiftOperation('<') => {
1190                                        Some(crate::BinaryOperator::ShiftLeft)
1191                                    }
1192                                    _ => None,
1193                                },
1194                                _ => |token| match token {
1195                                    Token::ShiftOperation('<') => {
1196                                        Some(crate::BinaryOperator::ShiftLeft)
1197                                    }
1198                                    Token::ShiftOperation('>') => {
1199                                        Some(crate::BinaryOperator::ShiftRight)
1200                                    }
1201                                    _ => None,
1202                                },
1203                            },
1204                            // additive_expression
1205                            |lexer, context| {
1206                                context.parse_binary_op(
1207                                    lexer,
1208                                    |token| match token {
1209                                        Token::Operation('+') => Some(crate::BinaryOperator::Add),
1210                                        Token::Operation('-') => {
1211                                            Some(crate::BinaryOperator::Subtract)
1212                                        }
1213                                        _ => None,
1214                                    },
1215                                    // multiplicative_expression
1216                                    |lexer, context| {
1217                                        context.parse_binary_op(
1218                                            lexer,
1219                                            |token| match token {
1220                                                Token::Operation('*') => {
1221                                                    Some(crate::BinaryOperator::Multiply)
1222                                                }
1223                                                Token::Operation('/') => {
1224                                                    Some(crate::BinaryOperator::Divide)
1225                                                }
1226                                                Token::Operation('%') => {
1227                                                    Some(crate::BinaryOperator::Modulo)
1228                                                }
1229                                                _ => None,
1230                                            },
1231                                            |lexer, context| self.unary_expression(lexer, context),
1232                                        )
1233                                    },
1234                                )
1235                            },
1236                        )
1237                    },
1238                )
1239            },
1240        )
1241    }
1242
1243    fn general_expression<'a>(
1244        &mut self,
1245        lexer: &mut Lexer<'a>,
1246        ctx: &mut ExpressionContext<'a, '_, '_>,
1247    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1248        self.general_expression_with_span(lexer, ctx)
1249            .map(|(expr, _)| expr)
1250    }
1251
1252    fn general_expression_with_span<'a>(
1253        &mut self,
1254        lexer: &mut Lexer<'a>,
1255        context: &mut ExpressionContext<'a, '_, '_>,
1256    ) -> Result<'a, (Handle<ast::Expression<'a>>, Span)> {
1257        self.push_rule_span(Rule::GeneralExpr, lexer);
1258        // logical_or_expression
1259        let handle = context.parse_binary_op(
1260            lexer,
1261            |token| match token {
1262                Token::LogicalOperation('|') => Some(crate::BinaryOperator::LogicalOr),
1263                _ => None,
1264            },
1265            // logical_and_expression
1266            |lexer, context| {
1267                context.parse_binary_op(
1268                    lexer,
1269                    |token| match token {
1270                        Token::LogicalOperation('&') => Some(crate::BinaryOperator::LogicalAnd),
1271                        _ => None,
1272                    },
1273                    // inclusive_or_expression
1274                    |lexer, context| {
1275                        context.parse_binary_op(
1276                            lexer,
1277                            |token| match token {
1278                                Token::Operation('|') => Some(crate::BinaryOperator::InclusiveOr),
1279                                _ => None,
1280                            },
1281                            // exclusive_or_expression
1282                            |lexer, context| {
1283                                context.parse_binary_op(
1284                                    lexer,
1285                                    |token| match token {
1286                                        Token::Operation('^') => {
1287                                            Some(crate::BinaryOperator::ExclusiveOr)
1288                                        }
1289                                        _ => None,
1290                                    },
1291                                    // and_expression
1292                                    |lexer, context| {
1293                                        context.parse_binary_op(
1294                                            lexer,
1295                                            |token| match token {
1296                                                Token::Operation('&') => {
1297                                                    Some(crate::BinaryOperator::And)
1298                                                }
1299                                                _ => None,
1300                                            },
1301                                            |lexer, context| {
1302                                                self.equality_expression(lexer, context)
1303                                            },
1304                                        )
1305                                    },
1306                                )
1307                            },
1308                        )
1309                    },
1310                )
1311            },
1312        )?;
1313        Ok((handle, self.pop_rule_span(lexer)))
1314    }
1315
1316    fn variable_decl<'a>(
1317        &mut self,
1318        lexer: &mut Lexer<'a>,
1319        ctx: &mut ExpressionContext<'a, '_, '_>,
1320    ) -> Result<'a, ast::GlobalVariable<'a>> {
1321        self.push_rule_span(Rule::VariableDecl, lexer);
1322        let mut space = crate::AddressSpace::Handle;
1323
1324        if lexer.skip(Token::Paren('<')) {
1325            let (class_str, span) = lexer.next_ident_with_span()?;
1326            space = match class_str {
1327                "storage" => {
1328                    let access = if lexer.skip(Token::Separator(',')) {
1329                        lexer.next_storage_access()?
1330                    } else {
1331                        // defaulting to `read`
1332                        crate::StorageAccess::LOAD
1333                    };
1334                    crate::AddressSpace::Storage { access }
1335                }
1336                _ => conv::map_address_space(class_str, span, &lexer.enable_extensions)?,
1337            };
1338            lexer.expect(Token::Paren('>'))?;
1339        }
1340        let name = lexer.next_ident()?;
1341
1342        let ty = if lexer.skip(Token::Separator(':')) {
1343            Some(self.type_decl(lexer, ctx)?)
1344        } else {
1345            None
1346        };
1347
1348        let init = if lexer.skip(Token::Operation('=')) {
1349            let handle = self.general_expression(lexer, ctx)?;
1350            Some(handle)
1351        } else {
1352            None
1353        };
1354        lexer.expect(Token::Separator(';'))?;
1355        self.pop_rule_span(lexer);
1356
1357        Ok(ast::GlobalVariable {
1358            name,
1359            space,
1360            binding: None,
1361            ty,
1362            init,
1363            doc_comments: Vec::new(),
1364        })
1365    }
1366
1367    fn struct_body<'a>(
1368        &mut self,
1369        lexer: &mut Lexer<'a>,
1370        ctx: &mut ExpressionContext<'a, '_, '_>,
1371    ) -> Result<'a, Vec<ast::StructMember<'a>>> {
1372        let mut members = Vec::new();
1373        let mut member_names = FastHashSet::default();
1374
1375        lexer.expect(Token::Paren('{'))?;
1376        let mut ready = true;
1377        while !lexer.skip(Token::Paren('}')) {
1378            if !ready {
1379                return Err(Box::new(Error::Unexpected(
1380                    lexer.next().1,
1381                    ExpectedToken::Token(Token::Separator(',')),
1382                )));
1383            }
1384
1385            let doc_comments = lexer.accumulate_doc_comments();
1386
1387            let (mut size, mut align) = (ParsedAttribute::default(), ParsedAttribute::default());
1388            self.push_rule_span(Rule::Attribute, lexer);
1389            let mut bind_parser = BindingParser::default();
1390            while lexer.skip(Token::Attribute) {
1391                match lexer.next_ident_with_span()? {
1392                    ("size", name_span) => {
1393                        lexer.expect(Token::Paren('('))?;
1394                        let expr = self.general_expression(lexer, ctx)?;
1395                        lexer.expect(Token::Paren(')'))?;
1396                        size.set(expr, name_span)?;
1397                    }
1398                    ("align", name_span) => {
1399                        lexer.expect(Token::Paren('('))?;
1400                        let expr = self.general_expression(lexer, ctx)?;
1401                        lexer.expect(Token::Paren(')'))?;
1402                        align.set(expr, name_span)?;
1403                    }
1404                    (word, word_span) => bind_parser.parse(self, lexer, word, word_span, ctx)?,
1405                }
1406            }
1407
1408            let bind_span = self.pop_rule_span(lexer);
1409            let binding = bind_parser.finish(bind_span)?;
1410
1411            let name = lexer.next_ident()?;
1412            lexer.expect(Token::Separator(':'))?;
1413            let ty = self.type_decl(lexer, ctx)?;
1414            ready = lexer.skip(Token::Separator(','));
1415
1416            members.push(ast::StructMember {
1417                name,
1418                ty,
1419                binding,
1420                size: size.value,
1421                align: align.value,
1422                doc_comments,
1423            });
1424
1425            if !member_names.insert(name.name) {
1426                return Err(Box::new(Error::Redefinition {
1427                    previous: members
1428                        .iter()
1429                        .find(|x| x.name.name == name.name)
1430                        .map(|x| x.name.span)
1431                        .unwrap(),
1432                    current: name.span,
1433                }));
1434            }
1435        }
1436
1437        Ok(members)
1438    }
1439
1440    /// Parses `<T>`, returning T and span of T
1441    fn singular_generic<'a>(
1442        &mut self,
1443        lexer: &mut Lexer<'a>,
1444        ctx: &mut ExpressionContext<'a, '_, '_>,
1445    ) -> Result<'a, (Handle<ast::Type<'a>>, Span)> {
1446        lexer.expect_generic_paren('<')?;
1447        let start = lexer.start_byte_offset();
1448        let ty = self.type_decl(lexer, ctx)?;
1449        let span = lexer.span_from(start);
1450        lexer.skip(Token::Separator(','));
1451        lexer.expect_generic_paren('>')?;
1452        Ok((ty, span))
1453    }
1454
1455    fn matrix_with_type<'a>(
1456        &mut self,
1457        lexer: &mut Lexer<'a>,
1458        ctx: &mut ExpressionContext<'a, '_, '_>,
1459        columns: crate::VectorSize,
1460        rows: crate::VectorSize,
1461    ) -> Result<'a, ast::Type<'a>> {
1462        let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1463        Ok(ast::Type::Matrix {
1464            columns,
1465            rows,
1466            ty,
1467            ty_span,
1468        })
1469    }
1470
1471    fn type_decl_impl<'a>(
1472        &mut self,
1473        lexer: &mut Lexer<'a>,
1474        word: &'a str,
1475        span: Span,
1476        ctx: &mut ExpressionContext<'a, '_, '_>,
1477    ) -> Result<'a, Option<ast::Type<'a>>> {
1478        if let Some(scalar) = conv::get_scalar_type(&lexer.enable_extensions, span, word)? {
1479            return Ok(Some(ast::Type::Scalar(scalar)));
1480        }
1481
1482        Ok(Some(match word {
1483            "vec2" => {
1484                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1485                ast::Type::Vector {
1486                    size: crate::VectorSize::Bi,
1487                    ty,
1488                    ty_span,
1489                }
1490            }
1491            "vec2i" => ast::Type::Vector {
1492                size: crate::VectorSize::Bi,
1493                ty: ctx.new_scalar(Scalar::I32),
1494                ty_span: Span::UNDEFINED,
1495            },
1496            "vec2u" => ast::Type::Vector {
1497                size: crate::VectorSize::Bi,
1498                ty: ctx.new_scalar(Scalar::U32),
1499                ty_span: Span::UNDEFINED,
1500            },
1501            "vec2f" => ast::Type::Vector {
1502                size: crate::VectorSize::Bi,
1503                ty: ctx.new_scalar(Scalar::F32),
1504                ty_span: Span::UNDEFINED,
1505            },
1506            "vec2h" => ast::Type::Vector {
1507                size: crate::VectorSize::Bi,
1508                ty: ctx.new_scalar(Scalar::F16),
1509                ty_span: Span::UNDEFINED,
1510            },
1511            "vec3" => {
1512                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1513                ast::Type::Vector {
1514                    size: crate::VectorSize::Tri,
1515                    ty,
1516                    ty_span,
1517                }
1518            }
1519            "vec3i" => ast::Type::Vector {
1520                size: crate::VectorSize::Tri,
1521                ty: ctx.new_scalar(Scalar::I32),
1522                ty_span: Span::UNDEFINED,
1523            },
1524            "vec3u" => ast::Type::Vector {
1525                size: crate::VectorSize::Tri,
1526                ty: ctx.new_scalar(Scalar::U32),
1527                ty_span: Span::UNDEFINED,
1528            },
1529            "vec3f" => ast::Type::Vector {
1530                size: crate::VectorSize::Tri,
1531                ty: ctx.new_scalar(Scalar::F32),
1532                ty_span: Span::UNDEFINED,
1533            },
1534            "vec3h" => ast::Type::Vector {
1535                size: crate::VectorSize::Tri,
1536                ty: ctx.new_scalar(Scalar::F16),
1537                ty_span: Span::UNDEFINED,
1538            },
1539            "vec4" => {
1540                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1541                ast::Type::Vector {
1542                    size: crate::VectorSize::Quad,
1543                    ty,
1544                    ty_span,
1545                }
1546            }
1547            "vec4i" => ast::Type::Vector {
1548                size: crate::VectorSize::Quad,
1549                ty: ctx.new_scalar(Scalar::I32),
1550                ty_span: Span::UNDEFINED,
1551            },
1552            "vec4u" => ast::Type::Vector {
1553                size: crate::VectorSize::Quad,
1554                ty: ctx.new_scalar(Scalar::U32),
1555                ty_span: Span::UNDEFINED,
1556            },
1557            "vec4f" => ast::Type::Vector {
1558                size: crate::VectorSize::Quad,
1559                ty: ctx.new_scalar(Scalar::F32),
1560                ty_span: Span::UNDEFINED,
1561            },
1562            "vec4h" => ast::Type::Vector {
1563                size: crate::VectorSize::Quad,
1564                ty: ctx.new_scalar(Scalar::F16),
1565                ty_span: Span::UNDEFINED,
1566            },
1567            "mat2x2" => {
1568                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Bi)?
1569            }
1570            "mat2x2f" => ast::Type::Matrix {
1571                columns: crate::VectorSize::Bi,
1572                rows: crate::VectorSize::Bi,
1573                ty: ctx.new_scalar(Scalar::F32),
1574                ty_span: Span::UNDEFINED,
1575            },
1576            "mat2x2h" => ast::Type::Matrix {
1577                columns: crate::VectorSize::Bi,
1578                rows: crate::VectorSize::Bi,
1579                ty: ctx.new_scalar(Scalar::F16),
1580                ty_span: Span::UNDEFINED,
1581            },
1582            "mat2x3" => {
1583                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Tri)?
1584            }
1585            "mat2x3f" => ast::Type::Matrix {
1586                columns: crate::VectorSize::Bi,
1587                rows: crate::VectorSize::Tri,
1588                ty: ctx.new_scalar(Scalar::F32),
1589                ty_span: Span::UNDEFINED,
1590            },
1591            "mat2x3h" => ast::Type::Matrix {
1592                columns: crate::VectorSize::Bi,
1593                rows: crate::VectorSize::Tri,
1594                ty: ctx.new_scalar(Scalar::F16),
1595                ty_span: Span::UNDEFINED,
1596            },
1597            "mat2x4" => {
1598                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Quad)?
1599            }
1600            "mat2x4f" => ast::Type::Matrix {
1601                columns: crate::VectorSize::Bi,
1602                rows: crate::VectorSize::Quad,
1603                ty: ctx.new_scalar(Scalar::F32),
1604                ty_span: Span::UNDEFINED,
1605            },
1606            "mat2x4h" => ast::Type::Matrix {
1607                columns: crate::VectorSize::Bi,
1608                rows: crate::VectorSize::Quad,
1609                ty: ctx.new_scalar(Scalar::F16),
1610                ty_span: Span::UNDEFINED,
1611            },
1612            "mat3x2" => {
1613                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Bi)?
1614            }
1615            "mat3x2f" => ast::Type::Matrix {
1616                columns: crate::VectorSize::Tri,
1617                rows: crate::VectorSize::Bi,
1618                ty: ctx.new_scalar(Scalar::F32),
1619                ty_span: Span::UNDEFINED,
1620            },
1621            "mat3x2h" => ast::Type::Matrix {
1622                columns: crate::VectorSize::Tri,
1623                rows: crate::VectorSize::Bi,
1624                ty: ctx.new_scalar(Scalar::F16),
1625                ty_span: Span::UNDEFINED,
1626            },
1627            "mat3x3" => {
1628                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Tri)?
1629            }
1630            "mat3x3f" => ast::Type::Matrix {
1631                columns: crate::VectorSize::Tri,
1632                rows: crate::VectorSize::Tri,
1633                ty: ctx.new_scalar(Scalar::F32),
1634                ty_span: Span::UNDEFINED,
1635            },
1636            "mat3x3h" => ast::Type::Matrix {
1637                columns: crate::VectorSize::Tri,
1638                rows: crate::VectorSize::Tri,
1639                ty: ctx.new_scalar(Scalar::F16),
1640                ty_span: Span::UNDEFINED,
1641            },
1642            "mat3x4" => {
1643                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Quad)?
1644            }
1645            "mat3x4f" => ast::Type::Matrix {
1646                columns: crate::VectorSize::Tri,
1647                rows: crate::VectorSize::Quad,
1648                ty: ctx.new_scalar(Scalar::F32),
1649                ty_span: Span::UNDEFINED,
1650            },
1651            "mat3x4h" => ast::Type::Matrix {
1652                columns: crate::VectorSize::Tri,
1653                rows: crate::VectorSize::Quad,
1654                ty: ctx.new_scalar(Scalar::F16),
1655                ty_span: Span::UNDEFINED,
1656            },
1657            "mat4x2" => {
1658                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Bi)?
1659            }
1660            "mat4x2f" => ast::Type::Matrix {
1661                columns: crate::VectorSize::Quad,
1662                rows: crate::VectorSize::Bi,
1663                ty: ctx.new_scalar(Scalar::F32),
1664                ty_span: Span::UNDEFINED,
1665            },
1666            "mat4x2h" => ast::Type::Matrix {
1667                columns: crate::VectorSize::Quad,
1668                rows: crate::VectorSize::Bi,
1669                ty: ctx.new_scalar(Scalar::F16),
1670                ty_span: Span::UNDEFINED,
1671            },
1672            "mat4x3" => {
1673                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Tri)?
1674            }
1675            "mat4x3f" => ast::Type::Matrix {
1676                columns: crate::VectorSize::Quad,
1677                rows: crate::VectorSize::Tri,
1678                ty: ctx.new_scalar(Scalar::F32),
1679                ty_span: Span::UNDEFINED,
1680            },
1681            "mat4x3h" => ast::Type::Matrix {
1682                columns: crate::VectorSize::Quad,
1683                rows: crate::VectorSize::Tri,
1684                ty: ctx.new_scalar(Scalar::F16),
1685                ty_span: Span::UNDEFINED,
1686            },
1687            "mat4x4" => {
1688                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Quad)?
1689            }
1690            "mat4x4f" => ast::Type::Matrix {
1691                columns: crate::VectorSize::Quad,
1692                rows: crate::VectorSize::Quad,
1693                ty: ctx.new_scalar(Scalar::F32),
1694                ty_span: Span::UNDEFINED,
1695            },
1696            "mat4x4h" => ast::Type::Matrix {
1697                columns: crate::VectorSize::Quad,
1698                rows: crate::VectorSize::Quad,
1699                ty: ctx.new_scalar(Scalar::F16),
1700                ty_span: Span::UNDEFINED,
1701            },
1702            "atomic" => {
1703                let scalar = lexer.next_scalar_generic()?;
1704                ast::Type::Atomic(scalar)
1705            }
1706            "ptr" => {
1707                lexer.expect_generic_paren('<')?;
1708                let (ident, span) = lexer.next_ident_with_span()?;
1709                let mut space = conv::map_address_space(ident, span, &lexer.enable_extensions)?;
1710                lexer.expect(Token::Separator(','))?;
1711                let base = self.type_decl(lexer, ctx)?;
1712                if let crate::AddressSpace::Storage { ref mut access } = space {
1713                    *access = if lexer.end_of_generic_arguments() {
1714                        let result = lexer.next_storage_access()?;
1715                        lexer.skip(Token::Separator(','));
1716                        result
1717                    } else {
1718                        crate::StorageAccess::LOAD
1719                    };
1720                }
1721                lexer.expect_generic_paren('>')?;
1722                ast::Type::Pointer { base, space }
1723            }
1724            "array" => {
1725                lexer.expect_generic_paren('<')?;
1726                let base = self.type_decl(lexer, ctx)?;
1727                let size = if lexer.end_of_generic_arguments() {
1728                    let size = self.const_generic_expression(lexer, ctx)?;
1729                    lexer.skip(Token::Separator(','));
1730                    ast::ArraySize::Constant(size)
1731                } else {
1732                    ast::ArraySize::Dynamic
1733                };
1734                lexer.expect_generic_paren('>')?;
1735
1736                ast::Type::Array { base, size }
1737            }
1738            "binding_array" => {
1739                lexer.expect_generic_paren('<')?;
1740                let base = self.type_decl(lexer, ctx)?;
1741                let size = if lexer.end_of_generic_arguments() {
1742                    let size = self.unary_expression(lexer, ctx)?;
1743                    lexer.skip(Token::Separator(','));
1744                    ast::ArraySize::Constant(size)
1745                } else {
1746                    ast::ArraySize::Dynamic
1747                };
1748                lexer.expect_generic_paren('>')?;
1749
1750                ast::Type::BindingArray { base, size }
1751            }
1752            "sampler" => ast::Type::Sampler { comparison: false },
1753            "sampler_comparison" => ast::Type::Sampler { comparison: true },
1754            "texture_1d" => {
1755                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1756                Self::check_texture_sample_type(scalar, span)?;
1757                ast::Type::Image {
1758                    dim: crate::ImageDimension::D1,
1759                    arrayed: false,
1760                    class: crate::ImageClass::Sampled {
1761                        kind: scalar.kind,
1762                        multi: false,
1763                    },
1764                }
1765            }
1766            "texture_1d_array" => {
1767                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1768                Self::check_texture_sample_type(scalar, span)?;
1769                ast::Type::Image {
1770                    dim: crate::ImageDimension::D1,
1771                    arrayed: true,
1772                    class: crate::ImageClass::Sampled {
1773                        kind: scalar.kind,
1774                        multi: false,
1775                    },
1776                }
1777            }
1778            "texture_2d" => {
1779                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1780                Self::check_texture_sample_type(scalar, span)?;
1781                ast::Type::Image {
1782                    dim: crate::ImageDimension::D2,
1783                    arrayed: false,
1784                    class: crate::ImageClass::Sampled {
1785                        kind: scalar.kind,
1786                        multi: false,
1787                    },
1788                }
1789            }
1790            "texture_2d_array" => {
1791                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1792                Self::check_texture_sample_type(scalar, span)?;
1793                ast::Type::Image {
1794                    dim: crate::ImageDimension::D2,
1795                    arrayed: true,
1796                    class: crate::ImageClass::Sampled {
1797                        kind: scalar.kind,
1798                        multi: false,
1799                    },
1800                }
1801            }
1802            "texture_3d" => {
1803                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1804                Self::check_texture_sample_type(scalar, span)?;
1805                ast::Type::Image {
1806                    dim: crate::ImageDimension::D3,
1807                    arrayed: false,
1808                    class: crate::ImageClass::Sampled {
1809                        kind: scalar.kind,
1810                        multi: false,
1811                    },
1812                }
1813            }
1814            "texture_cube" => {
1815                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1816                Self::check_texture_sample_type(scalar, span)?;
1817                ast::Type::Image {
1818                    dim: crate::ImageDimension::Cube,
1819                    arrayed: false,
1820                    class: crate::ImageClass::Sampled {
1821                        kind: scalar.kind,
1822                        multi: false,
1823                    },
1824                }
1825            }
1826            "texture_cube_array" => {
1827                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1828                Self::check_texture_sample_type(scalar, span)?;
1829                ast::Type::Image {
1830                    dim: crate::ImageDimension::Cube,
1831                    arrayed: true,
1832                    class: crate::ImageClass::Sampled {
1833                        kind: scalar.kind,
1834                        multi: false,
1835                    },
1836                }
1837            }
1838            "texture_multisampled_2d" => {
1839                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1840                Self::check_texture_sample_type(scalar, span)?;
1841                ast::Type::Image {
1842                    dim: crate::ImageDimension::D2,
1843                    arrayed: false,
1844                    class: crate::ImageClass::Sampled {
1845                        kind: scalar.kind,
1846                        multi: true,
1847                    },
1848                }
1849            }
1850            "texture_multisampled_2d_array" => {
1851                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1852                Self::check_texture_sample_type(scalar, span)?;
1853                ast::Type::Image {
1854                    dim: crate::ImageDimension::D2,
1855                    arrayed: true,
1856                    class: crate::ImageClass::Sampled {
1857                        kind: scalar.kind,
1858                        multi: true,
1859                    },
1860                }
1861            }
1862            "texture_depth_2d" => ast::Type::Image {
1863                dim: crate::ImageDimension::D2,
1864                arrayed: false,
1865                class: crate::ImageClass::Depth { multi: false },
1866            },
1867            "texture_depth_2d_array" => ast::Type::Image {
1868                dim: crate::ImageDimension::D2,
1869                arrayed: true,
1870                class: crate::ImageClass::Depth { multi: false },
1871            },
1872            "texture_depth_cube" => ast::Type::Image {
1873                dim: crate::ImageDimension::Cube,
1874                arrayed: false,
1875                class: crate::ImageClass::Depth { multi: false },
1876            },
1877            "texture_depth_cube_array" => ast::Type::Image {
1878                dim: crate::ImageDimension::Cube,
1879                arrayed: true,
1880                class: crate::ImageClass::Depth { multi: false },
1881            },
1882            "texture_depth_multisampled_2d" => ast::Type::Image {
1883                dim: crate::ImageDimension::D2,
1884                arrayed: false,
1885                class: crate::ImageClass::Depth { multi: true },
1886            },
1887            "texture_external" => ast::Type::Image {
1888                dim: crate::ImageDimension::D2,
1889                arrayed: false,
1890                class: crate::ImageClass::External,
1891            },
1892            "texture_storage_1d" => {
1893                let (format, access) = lexer.next_format_generic()?;
1894                ast::Type::Image {
1895                    dim: crate::ImageDimension::D1,
1896                    arrayed: false,
1897                    class: crate::ImageClass::Storage { format, access },
1898                }
1899            }
1900            "texture_storage_1d_array" => {
1901                let (format, access) = lexer.next_format_generic()?;
1902                ast::Type::Image {
1903                    dim: crate::ImageDimension::D1,
1904                    arrayed: true,
1905                    class: crate::ImageClass::Storage { format, access },
1906                }
1907            }
1908            "texture_storage_2d" => {
1909                let (format, access) = lexer.next_format_generic()?;
1910                ast::Type::Image {
1911                    dim: crate::ImageDimension::D2,
1912                    arrayed: false,
1913                    class: crate::ImageClass::Storage { format, access },
1914                }
1915            }
1916            "texture_storage_2d_array" => {
1917                let (format, access) = lexer.next_format_generic()?;
1918                ast::Type::Image {
1919                    dim: crate::ImageDimension::D2,
1920                    arrayed: true,
1921                    class: crate::ImageClass::Storage { format, access },
1922                }
1923            }
1924            "texture_storage_3d" => {
1925                let (format, access) = lexer.next_format_generic()?;
1926                ast::Type::Image {
1927                    dim: crate::ImageDimension::D3,
1928                    arrayed: false,
1929                    class: crate::ImageClass::Storage { format, access },
1930                }
1931            }
1932            "acceleration_structure" => {
1933                if !lexer
1934                    .enable_extensions
1935                    .contains(ImplementedEnableExtension::WgpuRayQuery)
1936                {
1937                    return Err(Box::new(Error::EnableExtensionNotEnabled {
1938                        kind: EnableExtension::Implemented(
1939                            ImplementedEnableExtension::WgpuRayQuery,
1940                        ),
1941                        span,
1942                    }));
1943                }
1944                let vertex_return = lexer.next_acceleration_structure_flags()?;
1945                if !lexer
1946                    .enable_extensions
1947                    .contains(ImplementedEnableExtension::WgpuRayQueryVertexReturn)
1948                    && vertex_return
1949                {
1950                    return Err(Box::new(Error::EnableExtensionNotEnabled {
1951                        kind: EnableExtension::Implemented(
1952                            ImplementedEnableExtension::WgpuRayQueryVertexReturn,
1953                        ),
1954                        span,
1955                    }));
1956                }
1957                ast::Type::AccelerationStructure { vertex_return }
1958            }
1959            "ray_query" => {
1960                if !lexer
1961                    .enable_extensions
1962                    .contains(ImplementedEnableExtension::WgpuRayQuery)
1963                {
1964                    return Err(Box::new(Error::EnableExtensionNotEnabled {
1965                        kind: EnableExtension::Implemented(
1966                            ImplementedEnableExtension::WgpuRayQuery,
1967                        ),
1968                        span,
1969                    }));
1970                }
1971                let vertex_return = lexer.next_acceleration_structure_flags()?;
1972                if !lexer
1973                    .enable_extensions
1974                    .contains(ImplementedEnableExtension::WgpuRayQueryVertexReturn)
1975                    && vertex_return
1976                {
1977                    return Err(Box::new(Error::EnableExtensionNotEnabled {
1978                        kind: EnableExtension::Implemented(
1979                            ImplementedEnableExtension::WgpuRayQueryVertexReturn,
1980                        ),
1981                        span,
1982                    }));
1983                }
1984                ast::Type::RayQuery { vertex_return }
1985            }
1986            "RayDesc" => {
1987                if !lexer
1988                    .enable_extensions
1989                    .contains(ImplementedEnableExtension::WgpuRayQuery)
1990                {
1991                    return Err(Box::new(Error::EnableExtensionNotEnabled {
1992                        kind: EnableExtension::Implemented(
1993                            ImplementedEnableExtension::WgpuRayQuery,
1994                        ),
1995                        span,
1996                    }));
1997                }
1998                ast::Type::RayDesc
1999            }
2000            "RayIntersection" => {
2001                if !lexer
2002                    .enable_extensions
2003                    .contains(ImplementedEnableExtension::WgpuRayQuery)
2004                {
2005                    return Err(Box::new(Error::EnableExtensionNotEnabled {
2006                        kind: EnableExtension::Implemented(
2007                            ImplementedEnableExtension::WgpuRayQuery,
2008                        ),
2009                        span,
2010                    }));
2011                }
2012                ast::Type::RayIntersection
2013            }
2014            _ => return Ok(None),
2015        }))
2016    }
2017
2018    fn check_texture_sample_type(scalar: Scalar, span: Span) -> Result<'static, ()> {
2019        use crate::ScalarKind::*;
2020        // Validate according to https://gpuweb.github.io/gpuweb/wgsl/#sampled-texture-type
2021        match scalar {
2022            Scalar {
2023                kind: Float | Sint | Uint,
2024                width: 4,
2025            } => Ok(()),
2026            Scalar {
2027                kind: Uint,
2028                width: 8,
2029            } => Ok(()),
2030            _ => Err(Box::new(Error::BadTextureSampleType { span, scalar })),
2031        }
2032    }
2033
2034    /// Parse type declaration of a given name.
2035    fn type_decl<'a>(
2036        &mut self,
2037        lexer: &mut Lexer<'a>,
2038        ctx: &mut ExpressionContext<'a, '_, '_>,
2039    ) -> Result<'a, Handle<ast::Type<'a>>> {
2040        self.track_recursion(|this| {
2041            this.push_rule_span(Rule::TypeDecl, lexer);
2042
2043            let (name, span) = lexer.next_ident_with_span()?;
2044
2045            let ty = match this.type_decl_impl(lexer, name, span, ctx)? {
2046                Some(ty) => ty,
2047                None => {
2048                    ctx.unresolved.insert(ast::Dependency {
2049                        ident: name,
2050                        usage: span,
2051                    });
2052                    ast::Type::User(ast::Ident { name, span })
2053                }
2054            };
2055
2056            this.pop_rule_span(lexer);
2057
2058            let handle = ctx.types.append(ty, Span::UNDEFINED);
2059            Ok(handle)
2060        })
2061    }
2062
2063    fn assignment_op_and_rhs<'a>(
2064        &mut self,
2065        lexer: &mut Lexer<'a>,
2066        ctx: &mut ExpressionContext<'a, '_, '_>,
2067        block: &mut ast::Block<'a>,
2068        target: Handle<ast::Expression<'a>>,
2069        span_start: usize,
2070    ) -> Result<'a, ()> {
2071        use crate::BinaryOperator as Bo;
2072
2073        let op = lexer.next();
2074        let (op, value) = match op {
2075            (Token::Operation('='), _) => {
2076                let value = self.general_expression(lexer, ctx)?;
2077                (None, value)
2078            }
2079            (Token::AssignmentOperation(c), _) => {
2080                let op = match c {
2081                    '<' => Bo::ShiftLeft,
2082                    '>' => Bo::ShiftRight,
2083                    '+' => Bo::Add,
2084                    '-' => Bo::Subtract,
2085                    '*' => Bo::Multiply,
2086                    '/' => Bo::Divide,
2087                    '%' => Bo::Modulo,
2088                    '&' => Bo::And,
2089                    '|' => Bo::InclusiveOr,
2090                    '^' => Bo::ExclusiveOr,
2091                    // Note: `consume_token` shouldn't produce any other assignment ops
2092                    _ => unreachable!(),
2093                };
2094
2095                let value = self.general_expression(lexer, ctx)?;
2096                (Some(op), value)
2097            }
2098            token @ (Token::IncrementOperation | Token::DecrementOperation, _) => {
2099                let op = match token.0 {
2100                    Token::IncrementOperation => ast::StatementKind::Increment,
2101                    Token::DecrementOperation => ast::StatementKind::Decrement,
2102                    _ => unreachable!(),
2103                };
2104
2105                let span = lexer.span_from(span_start);
2106                block.stmts.push(ast::Statement {
2107                    kind: op(target),
2108                    span,
2109                });
2110                return Ok(());
2111            }
2112            _ => return Err(Box::new(Error::Unexpected(op.1, ExpectedToken::Assignment))),
2113        };
2114
2115        let span = lexer.span_from(span_start);
2116        block.stmts.push(ast::Statement {
2117            kind: ast::StatementKind::Assign { target, op, value },
2118            span,
2119        });
2120        Ok(())
2121    }
2122
2123    /// Parse an assignment statement (will also parse increment and decrement statements)
2124    fn assignment_statement<'a>(
2125        &mut self,
2126        lexer: &mut Lexer<'a>,
2127        ctx: &mut ExpressionContext<'a, '_, '_>,
2128        block: &mut ast::Block<'a>,
2129    ) -> Result<'a, ()> {
2130        let span_start = lexer.start_byte_offset();
2131        let target = self.lhs_expression(lexer, ctx)?;
2132        self.assignment_op_and_rhs(lexer, ctx, block, target, span_start)
2133    }
2134
2135    /// Parse a function call statement.
2136    /// Expects `ident` to be consumed (not in the lexer).
2137    fn function_statement<'a>(
2138        &mut self,
2139        lexer: &mut Lexer<'a>,
2140        ident: &'a str,
2141        ident_span: Span,
2142        span_start: usize,
2143        context: &mut ExpressionContext<'a, '_, '_>,
2144        block: &mut ast::Block<'a>,
2145    ) -> Result<'a, ()> {
2146        self.push_rule_span(Rule::SingularExpr, lexer);
2147
2148        context.unresolved.insert(ast::Dependency {
2149            ident,
2150            usage: ident_span,
2151        });
2152        let arguments = self.arguments(lexer, context)?;
2153        let span = lexer.span_from(span_start);
2154
2155        block.stmts.push(ast::Statement {
2156            kind: ast::StatementKind::Call {
2157                function: ast::Ident {
2158                    name: ident,
2159                    span: ident_span,
2160                },
2161                arguments,
2162            },
2163            span,
2164        });
2165
2166        self.pop_rule_span(lexer);
2167
2168        Ok(())
2169    }
2170
2171    fn function_call_or_assignment_statement<'a>(
2172        &mut self,
2173        lexer: &mut Lexer<'a>,
2174        context: &mut ExpressionContext<'a, '_, '_>,
2175        block: &mut ast::Block<'a>,
2176    ) -> Result<'a, ()> {
2177        let span_start = lexer.start_byte_offset();
2178        match lexer.peek() {
2179            (Token::Word(name), span) => {
2180                // A little hack for 2 token lookahead.
2181                let cloned = lexer.clone();
2182                let _ = lexer.next();
2183                match lexer.peek() {
2184                    (Token::Paren('('), _) => {
2185                        self.function_statement(lexer, name, span, span_start, context, block)
2186                    }
2187                    _ => {
2188                        *lexer = cloned;
2189                        self.assignment_statement(lexer, context, block)
2190                    }
2191                }
2192            }
2193            _ => self.assignment_statement(lexer, context, block),
2194        }
2195    }
2196
2197    fn statement<'a>(
2198        &mut self,
2199        lexer: &mut Lexer<'a>,
2200        ctx: &mut ExpressionContext<'a, '_, '_>,
2201        block: &mut ast::Block<'a>,
2202        brace_nesting_level: u8,
2203    ) -> Result<'a, ()> {
2204        self.track_recursion(|this| {
2205            this.push_rule_span(Rule::Statement, lexer);
2206            match lexer.peek() {
2207                (Token::Separator(';'), _) => {
2208                    let _ = lexer.next();
2209                    this.pop_rule_span(lexer);
2210                }
2211                (token, _) if is_start_of_compound_statement(token) => {
2212                    let (inner, span) = this.block(lexer, ctx, brace_nesting_level)?;
2213                    block.stmts.push(ast::Statement {
2214                        kind: ast::StatementKind::Block(inner),
2215                        span,
2216                    });
2217                    this.pop_rule_span(lexer);
2218                }
2219                (Token::Word(word), _) => {
2220                    let kind = match word {
2221                        "_" => {
2222                            let _ = lexer.next();
2223                            lexer.expect(Token::Operation('='))?;
2224                            let expr = this.general_expression(lexer, ctx)?;
2225                            lexer.expect(Token::Separator(';'))?;
2226
2227                            ast::StatementKind::Phony(expr)
2228                        }
2229                        "let" => {
2230                            let _ = lexer.next();
2231                            let name = lexer.next_ident()?;
2232
2233                            let given_ty = if lexer.skip(Token::Separator(':')) {
2234                                let ty = this.type_decl(lexer, ctx)?;
2235                                Some(ty)
2236                            } else {
2237                                None
2238                            };
2239                            lexer.expect(Token::Operation('='))?;
2240                            let expr_id = this.general_expression(lexer, ctx)?;
2241                            lexer.expect(Token::Separator(';'))?;
2242
2243                            let handle = ctx.declare_local(name)?;
2244                            ast::StatementKind::LocalDecl(ast::LocalDecl::Let(ast::Let {
2245                                name,
2246                                ty: given_ty,
2247                                init: expr_id,
2248                                handle,
2249                            }))
2250                        }
2251                        "const" => {
2252                            let _ = lexer.next();
2253                            let name = lexer.next_ident()?;
2254
2255                            let given_ty = if lexer.skip(Token::Separator(':')) {
2256                                let ty = this.type_decl(lexer, ctx)?;
2257                                Some(ty)
2258                            } else {
2259                                None
2260                            };
2261                            lexer.expect(Token::Operation('='))?;
2262                            let expr_id = this.general_expression(lexer, ctx)?;
2263                            lexer.expect(Token::Separator(';'))?;
2264
2265                            let handle = ctx.declare_local(name)?;
2266                            ast::StatementKind::LocalDecl(ast::LocalDecl::Const(ast::LocalConst {
2267                                name,
2268                                ty: given_ty,
2269                                init: expr_id,
2270                                handle,
2271                            }))
2272                        }
2273                        "var" => {
2274                            let _ = lexer.next();
2275
2276                            if lexer.skip(Token::Paren('<')) {
2277                                let (class_str, span) = lexer.next_ident_with_span()?;
2278                                if class_str != "function" {
2279                                    return Err(Box::new(Error::InvalidLocalVariableAddressSpace(
2280                                        span,
2281                                    )));
2282                                }
2283                                lexer.expect(Token::Paren('>'))?;
2284                            }
2285
2286                            let name = lexer.next_ident()?;
2287                            let ty = if lexer.skip(Token::Separator(':')) {
2288                                let ty = this.type_decl(lexer, ctx)?;
2289                                Some(ty)
2290                            } else {
2291                                None
2292                            };
2293
2294                            let init = if lexer.skip(Token::Operation('=')) {
2295                                let init = this.general_expression(lexer, ctx)?;
2296                                Some(init)
2297                            } else {
2298                                None
2299                            };
2300
2301                            lexer.expect(Token::Separator(';'))?;
2302
2303                            let handle = ctx.declare_local(name)?;
2304                            ast::StatementKind::LocalDecl(ast::LocalDecl::Var(ast::LocalVariable {
2305                                name,
2306                                ty,
2307                                init,
2308                                handle,
2309                            }))
2310                        }
2311                        "return" => {
2312                            let _ = lexer.next();
2313                            let value = if lexer.peek().0 != Token::Separator(';') {
2314                                let handle = this.general_expression(lexer, ctx)?;
2315                                Some(handle)
2316                            } else {
2317                                None
2318                            };
2319                            lexer.expect(Token::Separator(';'))?;
2320                            ast::StatementKind::Return { value }
2321                        }
2322                        "if" => {
2323                            let _ = lexer.next();
2324                            let condition = this.general_expression(lexer, ctx)?;
2325
2326                            let accept = this.block(lexer, ctx, brace_nesting_level)?.0;
2327
2328                            let mut elsif_stack = Vec::new();
2329                            let mut elseif_span_start = lexer.start_byte_offset();
2330                            let mut reject = loop {
2331                                if !lexer.skip(Token::Word("else")) {
2332                                    break ast::Block::default();
2333                                }
2334
2335                                if !lexer.skip(Token::Word("if")) {
2336                                    // ... else { ... }
2337                                    break this.block(lexer, ctx, brace_nesting_level)?.0;
2338                                }
2339
2340                                // ... else if (...) { ... }
2341                                let other_condition = this.general_expression(lexer, ctx)?;
2342                                let other_block = this.block(lexer, ctx, brace_nesting_level)?;
2343                                elsif_stack.push((elseif_span_start, other_condition, other_block));
2344                                elseif_span_start = lexer.start_byte_offset();
2345                            };
2346
2347                            // reverse-fold the else-if blocks
2348                            //Note: we may consider uplifting this to the IR
2349                            for (other_span_start, other_cond, other_block) in
2350                                elsif_stack.into_iter().rev()
2351                            {
2352                                let sub_stmt = ast::StatementKind::If {
2353                                    condition: other_cond,
2354                                    accept: other_block.0,
2355                                    reject,
2356                                };
2357                                reject = ast::Block::default();
2358                                let span = lexer.span_from(other_span_start);
2359                                reject.stmts.push(ast::Statement {
2360                                    kind: sub_stmt,
2361                                    span,
2362                                })
2363                            }
2364
2365                            ast::StatementKind::If {
2366                                condition,
2367                                accept,
2368                                reject,
2369                            }
2370                        }
2371                        "switch" => {
2372                            let _ = lexer.next();
2373                            let selector = this.general_expression(lexer, ctx)?;
2374                            let brace_span = lexer.expect_span(Token::Paren('{'))?;
2375                            let brace_nesting_level =
2376                                Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2377                            let mut cases = Vec::new();
2378
2379                            loop {
2380                                // cases + default
2381                                match lexer.next() {
2382                                    (Token::Word("case"), _) => {
2383                                        // parse a list of values
2384                                        let value = loop {
2385                                            let value = this.switch_value(lexer, ctx)?;
2386                                            if lexer.skip(Token::Separator(',')) {
2387                                                // list of values ends with ':' or a compound statement
2388                                                let next_token = lexer.peek().0;
2389                                                if next_token == Token::Separator(':')
2390                                                    || is_start_of_compound_statement(next_token)
2391                                                {
2392                                                    break value;
2393                                                }
2394                                            } else {
2395                                                break value;
2396                                            }
2397                                            cases.push(ast::SwitchCase {
2398                                                value,
2399                                                body: ast::Block::default(),
2400                                                fall_through: true,
2401                                            });
2402                                        };
2403
2404                                        lexer.skip(Token::Separator(':'));
2405
2406                                        let body = this.block(lexer, ctx, brace_nesting_level)?.0;
2407
2408                                        cases.push(ast::SwitchCase {
2409                                            value,
2410                                            body,
2411                                            fall_through: false,
2412                                        });
2413                                    }
2414                                    (Token::Word("default"), _) => {
2415                                        lexer.skip(Token::Separator(':'));
2416                                        let body = this.block(lexer, ctx, brace_nesting_level)?.0;
2417                                        cases.push(ast::SwitchCase {
2418                                            value: ast::SwitchValue::Default,
2419                                            body,
2420                                            fall_through: false,
2421                                        });
2422                                    }
2423                                    (Token::Paren('}'), _) => break,
2424                                    (_, span) => {
2425                                        return Err(Box::new(Error::Unexpected(
2426                                            span,
2427                                            ExpectedToken::SwitchItem,
2428                                        )))
2429                                    }
2430                                }
2431                            }
2432
2433                            ast::StatementKind::Switch { selector, cases }
2434                        }
2435                        "loop" => this.r#loop(lexer, ctx, brace_nesting_level)?,
2436                        "while" => {
2437                            let _ = lexer.next();
2438                            let mut body = ast::Block::default();
2439
2440                            let (condition, span) =
2441                                lexer.capture_span(|lexer| this.general_expression(lexer, ctx))?;
2442                            let mut reject = ast::Block::default();
2443                            reject.stmts.push(ast::Statement {
2444                                kind: ast::StatementKind::Break,
2445                                span,
2446                            });
2447
2448                            body.stmts.push(ast::Statement {
2449                                kind: ast::StatementKind::If {
2450                                    condition,
2451                                    accept: ast::Block::default(),
2452                                    reject,
2453                                },
2454                                span,
2455                            });
2456
2457                            let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
2458                            body.stmts.push(ast::Statement {
2459                                kind: ast::StatementKind::Block(block),
2460                                span,
2461                            });
2462
2463                            ast::StatementKind::Loop {
2464                                body,
2465                                continuing: ast::Block::default(),
2466                                break_if: None,
2467                            }
2468                        }
2469                        "for" => {
2470                            let _ = lexer.next();
2471                            lexer.expect(Token::Paren('('))?;
2472
2473                            ctx.local_table.push_scope();
2474
2475                            if !lexer.skip(Token::Separator(';')) {
2476                                let num_statements = block.stmts.len();
2477                                let (_, span) = {
2478                                    let ctx = &mut *ctx;
2479                                    let block = &mut *block;
2480                                    lexer.capture_span(|lexer| {
2481                                        this.statement(lexer, ctx, block, brace_nesting_level)
2482                                    })?
2483                                };
2484
2485                                if block.stmts.len() != num_statements {
2486                                    match block.stmts.last().unwrap().kind {
2487                                        ast::StatementKind::Call { .. }
2488                                        | ast::StatementKind::Assign { .. }
2489                                        | ast::StatementKind::LocalDecl(_) => {}
2490                                        _ => {
2491                                            return Err(Box::new(Error::InvalidForInitializer(
2492                                                span,
2493                                            )))
2494                                        }
2495                                    }
2496                                }
2497                            };
2498
2499                            let mut body = ast::Block::default();
2500                            if !lexer.skip(Token::Separator(';')) {
2501                                let (condition, span) =
2502                                    lexer.capture_span(|lexer| -> Result<'_, _> {
2503                                        let condition = this.general_expression(lexer, ctx)?;
2504                                        lexer.expect(Token::Separator(';'))?;
2505                                        Ok(condition)
2506                                    })?;
2507                                let mut reject = ast::Block::default();
2508                                reject.stmts.push(ast::Statement {
2509                                    kind: ast::StatementKind::Break,
2510                                    span,
2511                                });
2512                                body.stmts.push(ast::Statement {
2513                                    kind: ast::StatementKind::If {
2514                                        condition,
2515                                        accept: ast::Block::default(),
2516                                        reject,
2517                                    },
2518                                    span,
2519                                });
2520                            };
2521
2522                            let mut continuing = ast::Block::default();
2523                            if !lexer.skip(Token::Paren(')')) {
2524                                this.function_call_or_assignment_statement(
2525                                    lexer,
2526                                    ctx,
2527                                    &mut continuing,
2528                                )?;
2529                                lexer.expect(Token::Paren(')'))?;
2530                            }
2531
2532                            let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
2533                            body.stmts.push(ast::Statement {
2534                                kind: ast::StatementKind::Block(block),
2535                                span,
2536                            });
2537
2538                            ctx.local_table.pop_scope();
2539
2540                            ast::StatementKind::Loop {
2541                                body,
2542                                continuing,
2543                                break_if: None,
2544                            }
2545                        }
2546                        "break" => {
2547                            let (_, span) = lexer.next();
2548                            // Check if the next token is an `if`, this indicates
2549                            // that the user tried to type out a `break if` which
2550                            // is illegal in this position.
2551                            let (peeked_token, peeked_span) = lexer.peek();
2552                            if let Token::Word("if") = peeked_token {
2553                                let span = span.until(&peeked_span);
2554                                return Err(Box::new(Error::InvalidBreakIf(span)));
2555                            }
2556                            lexer.expect(Token::Separator(';'))?;
2557                            ast::StatementKind::Break
2558                        }
2559                        "continue" => {
2560                            let _ = lexer.next();
2561                            lexer.expect(Token::Separator(';'))?;
2562                            ast::StatementKind::Continue
2563                        }
2564                        "discard" => {
2565                            let _ = lexer.next();
2566                            lexer.expect(Token::Separator(';'))?;
2567                            ast::StatementKind::Kill
2568                        }
2569                        // https://www.w3.org/TR/WGSL/#const-assert-statement
2570                        "const_assert" => {
2571                            let _ = lexer.next();
2572                            // parentheses are optional
2573                            let paren = lexer.skip(Token::Paren('('));
2574
2575                            let condition = this.general_expression(lexer, ctx)?;
2576
2577                            if paren {
2578                                lexer.expect(Token::Paren(')'))?;
2579                            }
2580                            lexer.expect(Token::Separator(';'))?;
2581                            ast::StatementKind::ConstAssert(condition)
2582                        }
2583                        // assignment or a function call
2584                        _ => {
2585                            this.function_call_or_assignment_statement(lexer, ctx, block)?;
2586                            lexer.expect(Token::Separator(';'))?;
2587                            this.pop_rule_span(lexer);
2588                            return Ok(());
2589                        }
2590                    };
2591
2592                    let span = this.pop_rule_span(lexer);
2593                    block.stmts.push(ast::Statement { kind, span });
2594                }
2595                _ => {
2596                    this.assignment_statement(lexer, ctx, block)?;
2597                    lexer.expect(Token::Separator(';'))?;
2598                    this.pop_rule_span(lexer);
2599                }
2600            }
2601            Ok(())
2602        })
2603    }
2604
2605    fn r#loop<'a>(
2606        &mut self,
2607        lexer: &mut Lexer<'a>,
2608        ctx: &mut ExpressionContext<'a, '_, '_>,
2609        brace_nesting_level: u8,
2610    ) -> Result<'a, ast::StatementKind<'a>> {
2611        let _ = lexer.next();
2612        let mut body = ast::Block::default();
2613        let mut continuing = ast::Block::default();
2614        let mut break_if = None;
2615
2616        let brace_span = lexer.expect_span(Token::Paren('{'))?;
2617        let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2618
2619        ctx.local_table.push_scope();
2620
2621        loop {
2622            if lexer.skip(Token::Word("continuing")) {
2623                // Branch for the `continuing` block, this must be
2624                // the last thing in the loop body
2625
2626                // Expect a opening brace to start the continuing block
2627                let brace_span = lexer.expect_span(Token::Paren('{'))?;
2628                let brace_nesting_level =
2629                    Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2630                loop {
2631                    if lexer.skip(Token::Word("break")) {
2632                        // Branch for the `break if` statement, this statement
2633                        // has the form `break if <expr>;` and must be the last
2634                        // statement in a continuing block
2635
2636                        // The break must be followed by an `if` to form
2637                        // the break if
2638                        lexer.expect(Token::Word("if"))?;
2639
2640                        let condition = self.general_expression(lexer, ctx)?;
2641                        // Set the condition of the break if to the newly parsed
2642                        // expression
2643                        break_if = Some(condition);
2644
2645                        // Expect a semicolon to close the statement
2646                        lexer.expect(Token::Separator(';'))?;
2647                        // Expect a closing brace to close the continuing block,
2648                        // since the break if must be the last statement
2649                        lexer.expect(Token::Paren('}'))?;
2650                        // Stop parsing the continuing block
2651                        break;
2652                    } else if lexer.skip(Token::Paren('}')) {
2653                        // If we encounter a closing brace it means we have reached
2654                        // the end of the continuing block and should stop processing
2655                        break;
2656                    } else {
2657                        // Otherwise try to parse a statement
2658                        self.statement(lexer, ctx, &mut continuing, brace_nesting_level)?;
2659                    }
2660                }
2661                // Since the continuing block must be the last part of the loop body,
2662                // we expect to see a closing brace to end the loop body
2663                lexer.expect(Token::Paren('}'))?;
2664                break;
2665            }
2666            if lexer.skip(Token::Paren('}')) {
2667                // If we encounter a closing brace it means we have reached
2668                // the end of the loop body and should stop processing
2669                break;
2670            }
2671            // Otherwise try to parse a statement
2672            self.statement(lexer, ctx, &mut body, brace_nesting_level)?;
2673        }
2674
2675        ctx.local_table.pop_scope();
2676
2677        Ok(ast::StatementKind::Loop {
2678            body,
2679            continuing,
2680            break_if,
2681        })
2682    }
2683
2684    /// compound_statement
2685    fn block<'a>(
2686        &mut self,
2687        lexer: &mut Lexer<'a>,
2688        ctx: &mut ExpressionContext<'a, '_, '_>,
2689        brace_nesting_level: u8,
2690    ) -> Result<'a, (ast::Block<'a>, Span)> {
2691        self.push_rule_span(Rule::Block, lexer);
2692
2693        ctx.local_table.push_scope();
2694
2695        let mut diagnostic_filters = DiagnosticFilterMap::new();
2696
2697        self.push_rule_span(Rule::Attribute, lexer);
2698        while lexer.skip(Token::Attribute) {
2699            let (name, name_span) = lexer.next_ident_with_span()?;
2700            if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
2701                let filter = self.diagnostic_filter(lexer)?;
2702                let span = self.peek_rule_span(lexer);
2703                diagnostic_filters
2704                    .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
2705                    .map_err(|e| Box::new(e.into()))?;
2706            } else {
2707                return Err(Box::new(Error::Unexpected(
2708                    name_span,
2709                    ExpectedToken::DiagnosticAttribute,
2710                )));
2711            }
2712        }
2713        self.pop_rule_span(lexer);
2714
2715        if !diagnostic_filters.is_empty() {
2716            return Err(Box::new(
2717                Error::DiagnosticAttributeNotYetImplementedAtParseSite {
2718                    site_name_plural: "compound statements",
2719                    spans: diagnostic_filters.spans().collect(),
2720                },
2721            ));
2722        }
2723
2724        let brace_span = lexer.expect_span(Token::Paren('{'))?;
2725        let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2726        let mut block = ast::Block::default();
2727        while !lexer.skip(Token::Paren('}')) {
2728            self.statement(lexer, ctx, &mut block, brace_nesting_level)?;
2729        }
2730
2731        ctx.local_table.pop_scope();
2732
2733        let span = self.pop_rule_span(lexer);
2734        Ok((block, span))
2735    }
2736
2737    fn varying_binding<'a>(
2738        &mut self,
2739        lexer: &mut Lexer<'a>,
2740        ctx: &mut ExpressionContext<'a, '_, '_>,
2741    ) -> Result<'a, Option<ast::Binding<'a>>> {
2742        let mut bind_parser = BindingParser::default();
2743        self.push_rule_span(Rule::Attribute, lexer);
2744
2745        while lexer.skip(Token::Attribute) {
2746            let (word, span) = lexer.next_ident_with_span()?;
2747            bind_parser.parse(self, lexer, word, span, ctx)?;
2748        }
2749
2750        let span = self.pop_rule_span(lexer);
2751        bind_parser.finish(span)
2752    }
2753
2754    fn function_decl<'a>(
2755        &mut self,
2756        lexer: &mut Lexer<'a>,
2757        diagnostic_filter_leaf: Option<Handle<DiagnosticFilterNode>>,
2758        must_use: Option<Span>,
2759        out: &mut ast::TranslationUnit<'a>,
2760        dependencies: &mut FastIndexSet<ast::Dependency<'a>>,
2761    ) -> Result<'a, ast::Function<'a>> {
2762        self.push_rule_span(Rule::FunctionDecl, lexer);
2763        // read function name
2764        let fun_name = lexer.next_ident()?;
2765
2766        let mut locals = Arena::new();
2767
2768        let mut ctx = ExpressionContext {
2769            expressions: &mut out.expressions,
2770            local_table: &mut SymbolTable::default(),
2771            locals: &mut locals,
2772            types: &mut out.types,
2773            unresolved: dependencies,
2774        };
2775
2776        // start a scope that contains arguments as well as the function body
2777        ctx.local_table.push_scope();
2778
2779        // read parameter list
2780        let mut arguments = Vec::new();
2781        lexer.expect(Token::Paren('('))?;
2782        let mut ready = true;
2783        while !lexer.skip(Token::Paren(')')) {
2784            if !ready {
2785                return Err(Box::new(Error::Unexpected(
2786                    lexer.next().1,
2787                    ExpectedToken::Token(Token::Separator(',')),
2788                )));
2789            }
2790            let binding = self.varying_binding(lexer, &mut ctx)?;
2791
2792            let param_name = lexer.next_ident()?;
2793
2794            lexer.expect(Token::Separator(':'))?;
2795            let param_type = self.type_decl(lexer, &mut ctx)?;
2796
2797            let handle = ctx.declare_local(param_name)?;
2798            arguments.push(ast::FunctionArgument {
2799                name: param_name,
2800                ty: param_type,
2801                binding,
2802                handle,
2803            });
2804            ready = lexer.skip(Token::Separator(','));
2805        }
2806        // read return type
2807        let result = if lexer.skip(Token::Arrow) {
2808            let binding = self.varying_binding(lexer, &mut ctx)?;
2809            let ty = self.type_decl(lexer, &mut ctx)?;
2810            let must_use = must_use.is_some();
2811            Some(ast::FunctionResult {
2812                ty,
2813                binding,
2814                must_use,
2815            })
2816        } else if let Some(must_use) = must_use {
2817            return Err(Box::new(Error::FunctionMustUseReturnsVoid(
2818                must_use,
2819                self.peek_rule_span(lexer),
2820            )));
2821        } else {
2822            None
2823        };
2824
2825        // do not use `self.block` here, since we must not push a new scope
2826        lexer.expect(Token::Paren('{'))?;
2827        let brace_nesting_level = 1;
2828        let mut body = ast::Block::default();
2829        while !lexer.skip(Token::Paren('}')) {
2830            self.statement(lexer, &mut ctx, &mut body, brace_nesting_level)?;
2831        }
2832
2833        ctx.local_table.pop_scope();
2834
2835        let fun = ast::Function {
2836            entry_point: None,
2837            name: fun_name,
2838            arguments,
2839            result,
2840            body,
2841            diagnostic_filter_leaf,
2842            doc_comments: Vec::new(),
2843        };
2844
2845        // done
2846        self.pop_rule_span(lexer);
2847
2848        Ok(fun)
2849    }
2850
2851    fn directive_ident_list<'a>(
2852        &self,
2853        lexer: &mut Lexer<'a>,
2854        handler: impl FnMut(&'a str, Span) -> Result<'a, ()>,
2855    ) -> Result<'a, ()> {
2856        let mut handler = handler;
2857        'next_arg: loop {
2858            let (ident, span) = lexer.next_ident_with_span()?;
2859            handler(ident, span)?;
2860
2861            let expected_token = match lexer.peek().0 {
2862                Token::Separator(',') => {
2863                    let _ = lexer.next();
2864                    if matches!(lexer.peek().0, Token::Word(..)) {
2865                        continue 'next_arg;
2866                    }
2867                    ExpectedToken::AfterIdentListComma
2868                }
2869                _ => ExpectedToken::AfterIdentListArg,
2870            };
2871
2872            if !matches!(lexer.next().0, Token::Separator(';')) {
2873                return Err(Box::new(Error::Unexpected(span, expected_token)));
2874            }
2875
2876            break Ok(());
2877        }
2878    }
2879
2880    fn global_decl<'a>(
2881        &mut self,
2882        lexer: &mut Lexer<'a>,
2883        out: &mut ast::TranslationUnit<'a>,
2884    ) -> Result<'a, ()> {
2885        let doc_comments = lexer.accumulate_doc_comments();
2886
2887        // read attributes
2888        let mut binding = None;
2889        let mut stage = ParsedAttribute::default();
2890        let mut compute_like_span = Span::new(0, 0);
2891        let mut workgroup_size = ParsedAttribute::default();
2892        let mut early_depth_test = ParsedAttribute::default();
2893        let (mut bind_index, mut bind_group) =
2894            (ParsedAttribute::default(), ParsedAttribute::default());
2895        let mut id = ParsedAttribute::default();
2896        let mut payload = ParsedAttribute::default();
2897        let mut mesh_output = ParsedAttribute::default();
2898
2899        let mut must_use: ParsedAttribute<Span> = ParsedAttribute::default();
2900
2901        let mut dependencies = FastIndexSet::default();
2902        let mut ctx = ExpressionContext {
2903            expressions: &mut out.expressions,
2904            local_table: &mut SymbolTable::default(),
2905            locals: &mut Arena::new(),
2906            types: &mut out.types,
2907            unresolved: &mut dependencies,
2908        };
2909        let mut diagnostic_filters = DiagnosticFilterMap::new();
2910        let ensure_no_diag_attrs = |on_what, filters: DiagnosticFilterMap| -> Result<()> {
2911            if filters.is_empty() {
2912                Ok(())
2913            } else {
2914                Err(Box::new(Error::DiagnosticAttributeNotSupported {
2915                    on_what,
2916                    spans: filters.spans().collect(),
2917                }))
2918            }
2919        };
2920
2921        self.push_rule_span(Rule::Attribute, lexer);
2922        while lexer.skip(Token::Attribute) {
2923            let (name, name_span) = lexer.next_ident_with_span()?;
2924            if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
2925                let filter = self.diagnostic_filter(lexer)?;
2926                let span = self.peek_rule_span(lexer);
2927                diagnostic_filters
2928                    .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
2929                    .map_err(|e| Box::new(e.into()))?;
2930                continue;
2931            }
2932            match name {
2933                "binding" => {
2934                    lexer.expect(Token::Paren('('))?;
2935                    bind_index.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2936                    lexer.expect(Token::Paren(')'))?;
2937                }
2938                "group" => {
2939                    lexer.expect(Token::Paren('('))?;
2940                    bind_group.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2941                    lexer.expect(Token::Paren(')'))?;
2942                }
2943                "id" => {
2944                    lexer.expect(Token::Paren('('))?;
2945                    id.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2946                    lexer.expect(Token::Paren(')'))?;
2947                }
2948                "vertex" => {
2949                    stage.set(ShaderStage::Vertex, name_span)?;
2950                }
2951                "fragment" => {
2952                    stage.set(ShaderStage::Fragment, name_span)?;
2953                }
2954                "compute" => {
2955                    stage.set(ShaderStage::Compute, name_span)?;
2956                    compute_like_span = name_span;
2957                }
2958                "task" => {
2959                    if !lexer
2960                        .enable_extensions
2961                        .contains(ImplementedEnableExtension::WgpuMeshShader)
2962                    {
2963                        return Err(Box::new(Error::EnableExtensionNotEnabled {
2964                            span: name_span,
2965                            kind: ImplementedEnableExtension::WgpuMeshShader.into(),
2966                        }));
2967                    }
2968                    stage.set(ShaderStage::Task, name_span)?;
2969                    compute_like_span = name_span;
2970                }
2971                "mesh" => {
2972                    if !lexer
2973                        .enable_extensions
2974                        .contains(ImplementedEnableExtension::WgpuMeshShader)
2975                    {
2976                        return Err(Box::new(Error::EnableExtensionNotEnabled {
2977                            span: name_span,
2978                            kind: ImplementedEnableExtension::WgpuMeshShader.into(),
2979                        }));
2980                    }
2981                    stage.set(ShaderStage::Mesh, name_span)?;
2982                    compute_like_span = name_span;
2983
2984                    lexer.expect(Token::Paren('('))?;
2985                    mesh_output.set(lexer.next_ident_with_span()?, name_span)?;
2986                    lexer.expect(Token::Paren(')'))?;
2987                }
2988                "payload" => {
2989                    if !lexer
2990                        .enable_extensions
2991                        .contains(ImplementedEnableExtension::WgpuMeshShader)
2992                    {
2993                        return Err(Box::new(Error::EnableExtensionNotEnabled {
2994                            span: name_span,
2995                            kind: ImplementedEnableExtension::WgpuMeshShader.into(),
2996                        }));
2997                    }
2998                    lexer.expect(Token::Paren('('))?;
2999                    payload.set(lexer.next_ident_with_span()?, name_span)?;
3000                    lexer.expect(Token::Paren(')'))?;
3001                }
3002                "workgroup_size" => {
3003                    lexer.expect(Token::Paren('('))?;
3004                    let mut new_workgroup_size = [None; 3];
3005                    for (i, size) in new_workgroup_size.iter_mut().enumerate() {
3006                        *size = Some(self.general_expression(lexer, &mut ctx)?);
3007                        match lexer.next() {
3008                            (Token::Paren(')'), _) => break,
3009                            (Token::Separator(','), _) if i != 2 => (),
3010                            other => {
3011                                return Err(Box::new(Error::Unexpected(
3012                                    other.1,
3013                                    ExpectedToken::WorkgroupSizeSeparator,
3014                                )))
3015                            }
3016                        }
3017                    }
3018                    workgroup_size.set(new_workgroup_size, name_span)?;
3019                }
3020                "early_depth_test" => {
3021                    lexer.expect(Token::Paren('('))?;
3022                    let (ident, ident_span) = lexer.next_ident_with_span()?;
3023                    let value = if ident == "force" {
3024                        crate::EarlyDepthTest::Force
3025                    } else {
3026                        crate::EarlyDepthTest::Allow {
3027                            conservative: conv::map_conservative_depth(ident, ident_span)?,
3028                        }
3029                    };
3030                    lexer.expect(Token::Paren(')'))?;
3031                    early_depth_test.set(value, name_span)?;
3032                }
3033                "must_use" => {
3034                    must_use.set(name_span, name_span)?;
3035                }
3036                _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
3037            }
3038        }
3039
3040        let attrib_span = self.pop_rule_span(lexer);
3041        match (bind_group.value, bind_index.value) {
3042            (Some(group), Some(index)) => {
3043                binding = Some(ast::ResourceBinding {
3044                    group,
3045                    binding: index,
3046                });
3047            }
3048            (Some(_), None) => {
3049                return Err(Box::new(Error::MissingAttribute("binding", attrib_span)))
3050            }
3051            (None, Some(_)) => return Err(Box::new(Error::MissingAttribute("group", attrib_span))),
3052            (None, None) => {}
3053        }
3054
3055        // read item
3056        let start = lexer.start_byte_offset();
3057        let kind = match lexer.next() {
3058            (Token::Separator(';'), _) => {
3059                ensure_no_diag_attrs(
3060                    DiagnosticAttributeNotSupportedPosition::SemicolonInModulePosition,
3061                    diagnostic_filters,
3062                )?;
3063                None
3064            }
3065            (Token::Word(word), directive_span) if DirectiveKind::from_ident(word).is_some() => {
3066                return Err(Box::new(Error::DirectiveAfterFirstGlobalDecl {
3067                    directive_span,
3068                }));
3069            }
3070            (Token::Word("struct"), _) => {
3071                ensure_no_diag_attrs("`struct`s".into(), diagnostic_filters)?;
3072
3073                let name = lexer.next_ident()?;
3074
3075                let members = self.struct_body(lexer, &mut ctx)?;
3076
3077                Some(ast::GlobalDeclKind::Struct(ast::Struct {
3078                    name,
3079                    members,
3080                    doc_comments,
3081                }))
3082            }
3083            (Token::Word("alias"), _) => {
3084                ensure_no_diag_attrs("`alias`es".into(), diagnostic_filters)?;
3085
3086                let name = lexer.next_ident()?;
3087
3088                lexer.expect(Token::Operation('='))?;
3089                let ty = self.type_decl(lexer, &mut ctx)?;
3090                lexer.expect(Token::Separator(';'))?;
3091                Some(ast::GlobalDeclKind::Type(ast::TypeAlias { name, ty }))
3092            }
3093            (Token::Word("const"), _) => {
3094                ensure_no_diag_attrs("`const`s".into(), diagnostic_filters)?;
3095
3096                let name = lexer.next_ident()?;
3097
3098                let ty = if lexer.skip(Token::Separator(':')) {
3099                    let ty = self.type_decl(lexer, &mut ctx)?;
3100                    Some(ty)
3101                } else {
3102                    None
3103                };
3104
3105                lexer.expect(Token::Operation('='))?;
3106                let init = self.general_expression(lexer, &mut ctx)?;
3107                lexer.expect(Token::Separator(';'))?;
3108
3109                Some(ast::GlobalDeclKind::Const(ast::Const {
3110                    name,
3111                    ty,
3112                    init,
3113                    doc_comments,
3114                }))
3115            }
3116            (Token::Word("override"), _) => {
3117                ensure_no_diag_attrs("`override`s".into(), diagnostic_filters)?;
3118
3119                let name = lexer.next_ident()?;
3120
3121                let ty = if lexer.skip(Token::Separator(':')) {
3122                    Some(self.type_decl(lexer, &mut ctx)?)
3123                } else {
3124                    None
3125                };
3126
3127                let init = if lexer.skip(Token::Operation('=')) {
3128                    Some(self.general_expression(lexer, &mut ctx)?)
3129                } else {
3130                    None
3131                };
3132
3133                lexer.expect(Token::Separator(';'))?;
3134
3135                Some(ast::GlobalDeclKind::Override(ast::Override {
3136                    name,
3137                    id: id.value,
3138                    ty,
3139                    init,
3140                }))
3141            }
3142            (Token::Word("var"), _) => {
3143                ensure_no_diag_attrs("`var`s".into(), diagnostic_filters)?;
3144
3145                let mut var = self.variable_decl(lexer, &mut ctx)?;
3146                var.binding = binding.take();
3147                var.doc_comments = doc_comments;
3148                Some(ast::GlobalDeclKind::Var(var))
3149            }
3150            (Token::Word("fn"), _) => {
3151                let diagnostic_filter_leaf = Self::write_diagnostic_filters(
3152                    &mut out.diagnostic_filters,
3153                    diagnostic_filters,
3154                    out.diagnostic_filter_leaf,
3155                );
3156
3157                let function = self.function_decl(
3158                    lexer,
3159                    diagnostic_filter_leaf,
3160                    must_use.value,
3161                    out,
3162                    &mut dependencies,
3163                )?;
3164                Some(ast::GlobalDeclKind::Fn(ast::Function {
3165                    entry_point: if let Some(stage) = stage.value {
3166                        if stage.compute_like() && workgroup_size.value.is_none() {
3167                            return Err(Box::new(Error::MissingWorkgroupSize(compute_like_span)));
3168                        }
3169
3170                        Some(ast::EntryPoint {
3171                            stage,
3172                            early_depth_test: early_depth_test.value,
3173                            workgroup_size: workgroup_size.value,
3174                            mesh_output_variable: mesh_output.value,
3175                            task_payload: payload.value,
3176                        })
3177                    } else {
3178                        None
3179                    },
3180                    doc_comments,
3181                    ..function
3182                }))
3183            }
3184            (Token::Word("const_assert"), _) => {
3185                ensure_no_diag_attrs("`const_assert`s".into(), diagnostic_filters)?;
3186
3187                // parentheses are optional
3188                let paren = lexer.skip(Token::Paren('('));
3189
3190                let condition = self.general_expression(lexer, &mut ctx)?;
3191
3192                if paren {
3193                    lexer.expect(Token::Paren(')'))?;
3194                }
3195                lexer.expect(Token::Separator(';'))?;
3196                Some(ast::GlobalDeclKind::ConstAssert(condition))
3197            }
3198            (Token::End, _) => return Ok(()),
3199            other => {
3200                return Err(Box::new(Error::Unexpected(
3201                    other.1,
3202                    ExpectedToken::GlobalItem,
3203                )))
3204            }
3205        };
3206
3207        if let Some(kind) = kind {
3208            out.decls.append(
3209                ast::GlobalDecl { kind, dependencies },
3210                lexer.span_from(start),
3211            );
3212        }
3213
3214        if !self.rules.is_empty() {
3215            log::error!("Reached the end of global decl, but rule stack is not empty");
3216            log::error!("Rules: {:?}", self.rules);
3217            return Err(Box::new(Error::Internal("rule stack is not empty")));
3218        };
3219
3220        match binding {
3221            None => Ok(()),
3222            Some(_) => Err(Box::new(Error::Internal(
3223                "we had the attribute but no var?",
3224            ))),
3225        }
3226    }
3227
3228    pub fn parse<'a>(
3229        &mut self,
3230        source: &'a str,
3231        options: &Options,
3232    ) -> Result<'a, ast::TranslationUnit<'a>> {
3233        self.reset();
3234
3235        let mut lexer = Lexer::new(source, !options.parse_doc_comments);
3236        let mut tu = ast::TranslationUnit::default();
3237        let mut enable_extensions = EnableExtensions::empty();
3238        let mut diagnostic_filters = DiagnosticFilterMap::new();
3239
3240        // Parse module doc comments.
3241        tu.doc_comments = lexer.accumulate_module_doc_comments();
3242
3243        // Parse directives.
3244        while let Ok((ident, _directive_ident_span)) = lexer.peek_ident_with_span() {
3245            if let Some(kind) = DirectiveKind::from_ident(ident) {
3246                self.push_rule_span(Rule::Directive, &mut lexer);
3247                let _ = lexer.next_ident_with_span().unwrap();
3248                match kind {
3249                    DirectiveKind::Diagnostic => {
3250                        let diagnostic_filter = self.diagnostic_filter(&mut lexer)?;
3251                        let span = self.peek_rule_span(&lexer);
3252                        diagnostic_filters
3253                            .add(diagnostic_filter, span, ShouldConflictOnFullDuplicate::No)
3254                            .map_err(|e| Box::new(e.into()))?;
3255                        lexer.expect(Token::Separator(';'))?;
3256                    }
3257                    DirectiveKind::Enable => {
3258                        self.directive_ident_list(&mut lexer, |ident, span| {
3259                            let kind = EnableExtension::from_ident(ident, span)?;
3260                            let extension = match kind {
3261                                EnableExtension::Implemented(kind) => kind,
3262                                EnableExtension::Unimplemented(kind) => {
3263                                    return Err(Box::new(Error::EnableExtensionNotYetImplemented {
3264                                        kind,
3265                                        span,
3266                                    }))
3267                                }
3268                            };
3269                            enable_extensions.add(extension);
3270                            Ok(())
3271                        })?;
3272                    }
3273                    DirectiveKind::Requires => {
3274                        self.directive_ident_list(&mut lexer, |ident, span| {
3275                            match LanguageExtension::from_ident(ident) {
3276                                Some(LanguageExtension::Implemented(_kind)) => {
3277                                    // NOTE: No further validation is needed for an extension, so
3278                                    // just throw parsed information away. If we ever want to apply
3279                                    // what we've parsed to diagnostics, maybe we'll want to refer
3280                                    // to enabled extensions later?
3281                                    Ok(())
3282                                }
3283                                Some(LanguageExtension::Unimplemented(kind)) => {
3284                                    Err(Box::new(Error::LanguageExtensionNotYetImplemented {
3285                                        kind,
3286                                        span,
3287                                    }))
3288                                }
3289                                None => Err(Box::new(Error::UnknownLanguageExtension(span, ident))),
3290                            }
3291                        })?;
3292                    }
3293                }
3294                self.pop_rule_span(&lexer);
3295            } else {
3296                break;
3297            }
3298        }
3299
3300        lexer.enable_extensions = enable_extensions.clone();
3301        tu.enable_extensions = enable_extensions;
3302        tu.diagnostic_filter_leaf =
3303            Self::write_diagnostic_filters(&mut tu.diagnostic_filters, diagnostic_filters, None);
3304
3305        loop {
3306            match self.global_decl(&mut lexer, &mut tu) {
3307                Err(error) => return Err(error),
3308                Ok(()) => {
3309                    if lexer.peek().0 == Token::End {
3310                        break;
3311                    }
3312                }
3313            }
3314        }
3315
3316        Ok(tu)
3317    }
3318
3319    fn increase_brace_nesting(brace_nesting_level: u8, brace_span: Span) -> Result<'static, u8> {
3320        // From [spec.](https://gpuweb.github.io/gpuweb/wgsl/#limits):
3321        //
3322        // > § 2.4. Limits
3323        // >
3324        // > …
3325        // >
3326        // > Maximum nesting depth of brace-enclosed statements in a function[:] 127
3327        const BRACE_NESTING_MAXIMUM: u8 = 127;
3328        if brace_nesting_level + 1 > BRACE_NESTING_MAXIMUM {
3329            return Err(Box::new(Error::ExceededLimitForNestedBraces {
3330                span: brace_span,
3331                limit: BRACE_NESTING_MAXIMUM,
3332            }));
3333        }
3334        Ok(brace_nesting_level + 1)
3335    }
3336
3337    fn diagnostic_filter<'a>(&self, lexer: &mut Lexer<'a>) -> Result<'a, DiagnosticFilter> {
3338        lexer.expect(Token::Paren('('))?;
3339
3340        let (severity_control_name, severity_control_name_span) = lexer.next_ident_with_span()?;
3341        let new_severity = diagnostic_filter::Severity::from_wgsl_ident(severity_control_name)
3342            .ok_or(Error::DiagnosticInvalidSeverity {
3343                severity_control_name_span,
3344            })?;
3345
3346        lexer.expect(Token::Separator(','))?;
3347
3348        let (diagnostic_name_token, diagnostic_name_token_span) = lexer.next_ident_with_span()?;
3349        let triggering_rule = if lexer.skip(Token::Separator('.')) {
3350            let (ident, _span) = lexer.next_ident_with_span()?;
3351            FilterableTriggeringRule::User(Box::new([diagnostic_name_token.into(), ident.into()]))
3352        } else {
3353            let diagnostic_rule_name = diagnostic_name_token;
3354            let diagnostic_rule_name_span = diagnostic_name_token_span;
3355            if let Some(triggering_rule) =
3356                StandardFilterableTriggeringRule::from_wgsl_ident(diagnostic_rule_name)
3357            {
3358                FilterableTriggeringRule::Standard(triggering_rule)
3359            } else {
3360                diagnostic_filter::Severity::Warning.report_wgsl_parse_diag(
3361                    Box::new(Error::UnknownDiagnosticRuleName(diagnostic_rule_name_span)),
3362                    lexer.source,
3363                )?;
3364                FilterableTriggeringRule::Unknown(diagnostic_rule_name.into())
3365            }
3366        };
3367        let filter = DiagnosticFilter {
3368            triggering_rule,
3369            new_severity,
3370        };
3371        lexer.skip(Token::Separator(','));
3372        lexer.expect(Token::Paren(')'))?;
3373
3374        Ok(filter)
3375    }
3376
3377    pub(crate) fn write_diagnostic_filters(
3378        arena: &mut Arena<DiagnosticFilterNode>,
3379        filters: DiagnosticFilterMap,
3380        parent: Option<Handle<DiagnosticFilterNode>>,
3381    ) -> Option<Handle<DiagnosticFilterNode>> {
3382        filters
3383            .into_iter()
3384            .fold(parent, |parent, (triggering_rule, (new_severity, span))| {
3385                Some(arena.append(
3386                    DiagnosticFilterNode {
3387                        inner: DiagnosticFilter {
3388                            new_severity,
3389                            triggering_rule,
3390                        },
3391                        parent,
3392                    },
3393                    span,
3394                ))
3395            })
3396    }
3397}
3398
3399const fn is_start_of_compound_statement<'a>(token: Token<'a>) -> bool {
3400    matches!(token, Token::Attribute | Token::Paren('{'))
3401}