naga/front/wgsl/parse/
mod.rs

1use alloc::{boxed::Box, vec::Vec};
2use directive::enable_extension::ImplementedEnableExtension;
3
4use crate::diagnostic_filter::{
5    self, DiagnosticFilter, DiagnosticFilterMap, DiagnosticFilterNode, FilterableTriggeringRule,
6    ShouldConflictOnFullDuplicate, StandardFilterableTriggeringRule,
7};
8use crate::front::wgsl::error::{DiagnosticAttributeNotSupportedPosition, Error, ExpectedToken};
9use crate::front::wgsl::parse::directive::enable_extension::{EnableExtension, EnableExtensions};
10use crate::front::wgsl::parse::directive::language_extension::LanguageExtension;
11use crate::front::wgsl::parse::directive::DirectiveKind;
12use crate::front::wgsl::parse::lexer::{Lexer, Token};
13use crate::front::wgsl::parse::number::Number;
14use crate::front::wgsl::{Result, Scalar};
15use crate::front::SymbolTable;
16use crate::{Arena, FastHashSet, FastIndexSet, Handle, ShaderStage, Span};
17
18pub mod ast;
19pub mod conv;
20pub mod directive;
21pub mod lexer;
22pub mod number;
23
24/// State for constructing an AST expression.
25///
26/// Not to be confused with [`lower::ExpressionContext`], which is for producing
27/// Naga IR from the AST we produce here.
28///
29/// [`lower::ExpressionContext`]: super::lower::ExpressionContext
30struct ExpressionContext<'input, 'temp, 'out> {
31    /// The [`TranslationUnit::expressions`] arena to which we should contribute
32    /// expressions.
33    ///
34    /// [`TranslationUnit::expressions`]: ast::TranslationUnit::expressions
35    expressions: &'out mut Arena<ast::Expression<'input>>,
36
37    /// The [`TranslationUnit::types`] arena to which we should contribute new
38    /// types.
39    ///
40    /// [`TranslationUnit::types`]: ast::TranslationUnit::types
41    types: &'out mut Arena<ast::Type<'input>>,
42
43    /// A map from identifiers in scope to the locals/arguments they represent.
44    ///
45    /// The handles refer to the [`locals`] arena; see that field's
46    /// documentation for details.
47    ///
48    /// [`locals`]: ExpressionContext::locals
49    local_table: &'temp mut SymbolTable<&'input str, Handle<ast::Local>>,
50
51    /// Local variable and function argument arena for the function we're building.
52    ///
53    /// Note that the [`ast::Local`] here is actually a zero-sized type. This
54    /// `Arena`'s only role is to assign a unique `Handle` to each local
55    /// identifier, and track its definition's span for use in diagnostics. All
56    /// the detailed information about locals - names, types, etc. - is kept in
57    /// the [`LocalDecl`] statements we parsed from their declarations. For
58    /// arguments, that information is kept in [`arguments`].
59    ///
60    /// In the AST, when an [`Ident`] expression refers to a local variable or
61    /// argument, its [`IdentExpr`] holds the referent's `Handle<Local>` in this
62    /// arena.
63    ///
64    /// During lowering, [`LocalDecl`] statements add entries to a per-function
65    /// table that maps `Handle<Local>` values to their Naga representations,
66    /// accessed via [`StatementContext::local_table`] and
67    /// [`LocalExpressionContext::local_table`]. This table is then consulted when
68    /// lowering subsequent [`Ident`] expressions.
69    ///
70    /// [`LocalDecl`]: ast::StatementKind::LocalDecl
71    /// [`arguments`]: ast::Function::arguments
72    /// [`Ident`]: ast::Expression::Ident
73    /// [`IdentExpr`]: ast::IdentExpr
74    /// [`StatementContext::local_table`]: super::lower::StatementContext::local_table
75    /// [`LocalExpressionContext::local_table`]: super::lower::LocalExpressionContext::local_table
76    locals: &'out mut Arena<ast::Local>,
77
78    /// Identifiers used by the current global declaration that have no local definition.
79    ///
80    /// This becomes the [`GlobalDecl`]'s [`dependencies`] set.
81    ///
82    /// Note that we don't know at parse time what kind of [`GlobalDecl`] the
83    /// name refers to. We can't look up names until we've seen the entire
84    /// translation unit.
85    ///
86    /// [`GlobalDecl`]: ast::GlobalDecl
87    /// [`dependencies`]: ast::GlobalDecl::dependencies
88    unresolved: &'out mut FastIndexSet<ast::Dependency<'input>>,
89}
90
91impl<'a> ExpressionContext<'a, '_, '_> {
92    fn parse_binary_op(
93        &mut self,
94        lexer: &mut Lexer<'a>,
95        classifier: impl Fn(Token<'a>) -> Option<crate::BinaryOperator>,
96        mut parser: impl FnMut(&mut Lexer<'a>, &mut Self) -> Result<'a, Handle<ast::Expression<'a>>>,
97    ) -> Result<'a, Handle<ast::Expression<'a>>> {
98        let start = lexer.start_byte_offset();
99        let mut accumulator = parser(lexer, self)?;
100        while let Some(op) = classifier(lexer.peek().0) {
101            let _ = lexer.next();
102            let left = accumulator;
103            let right = parser(lexer, self)?;
104            accumulator = self.expressions.append(
105                ast::Expression::Binary { op, left, right },
106                lexer.span_from(start),
107            );
108        }
109        Ok(accumulator)
110    }
111
112    fn declare_local(&mut self, name: ast::Ident<'a>) -> Result<'a, Handle<ast::Local>> {
113        let handle = self.locals.append(ast::Local, name.span);
114        if let Some(old) = self.local_table.add(name.name, handle) {
115            Err(Box::new(Error::Redefinition {
116                previous: self.locals.get_span(old),
117                current: name.span,
118            }))
119        } else {
120            Ok(handle)
121        }
122    }
123
124    fn new_scalar(&mut self, scalar: Scalar) -> Handle<ast::Type<'a>> {
125        self.types
126            .append(ast::Type::Scalar(scalar), Span::UNDEFINED)
127    }
128}
129
130/// Which grammar rule we are in the midst of parsing.
131///
132/// This is used for error checking. `Parser` maintains a stack of
133/// these and (occasionally) checks that it is being pushed and popped
134/// as expected.
135#[derive(Copy, Clone, Debug, PartialEq)]
136enum Rule {
137    Attribute,
138    VariableDecl,
139    TypeDecl,
140    FunctionDecl,
141    Block,
142    Statement,
143    PrimaryExpr,
144    SingularExpr,
145    UnaryExpr,
146    GeneralExpr,
147    Directive,
148    GenericExpr,
149    EnclosedExpr,
150    LhsExpr,
151}
152
153struct ParsedAttribute<T> {
154    value: Option<T>,
155}
156
157impl<T> Default for ParsedAttribute<T> {
158    fn default() -> Self {
159        Self { value: None }
160    }
161}
162
163impl<T> ParsedAttribute<T> {
164    fn set(&mut self, value: T, name_span: Span) -> Result<'static, ()> {
165        if self.value.is_some() {
166            return Err(Box::new(Error::RepeatedAttribute(name_span)));
167        }
168        self.value = Some(value);
169        Ok(())
170    }
171}
172
173#[derive(Default)]
174struct BindingParser<'a> {
175    location: ParsedAttribute<Handle<ast::Expression<'a>>>,
176    built_in: ParsedAttribute<crate::BuiltIn>,
177    interpolation: ParsedAttribute<crate::Interpolation>,
178    sampling: ParsedAttribute<crate::Sampling>,
179    invariant: ParsedAttribute<bool>,
180    blend_src: ParsedAttribute<Handle<ast::Expression<'a>>>,
181    per_primitive: ParsedAttribute<()>,
182}
183
184impl<'a> BindingParser<'a> {
185    fn parse(
186        &mut self,
187        parser: &mut Parser,
188        lexer: &mut Lexer<'a>,
189        name: &'a str,
190        name_span: Span,
191        ctx: &mut ExpressionContext<'a, '_, '_>,
192    ) -> Result<'a, ()> {
193        match name {
194            "location" => {
195                lexer.expect(Token::Paren('('))?;
196                self.location
197                    .set(parser.general_expression(lexer, ctx)?, name_span)?;
198                lexer.expect(Token::Paren(')'))?;
199            }
200            "builtin" => {
201                lexer.expect(Token::Paren('('))?;
202                let (raw, span) = lexer.next_ident_with_span()?;
203                self.built_in.set(
204                    conv::map_built_in(&lexer.enable_extensions, raw, span)?,
205                    name_span,
206                )?;
207                lexer.expect(Token::Paren(')'))?;
208            }
209            "interpolate" => {
210                lexer.expect(Token::Paren('('))?;
211                let (raw, span) = lexer.next_ident_with_span()?;
212                self.interpolation
213                    .set(conv::map_interpolation(raw, span)?, name_span)?;
214                if lexer.skip(Token::Separator(',')) {
215                    let (raw, span) = lexer.next_ident_with_span()?;
216                    self.sampling
217                        .set(conv::map_sampling(raw, span)?, name_span)?;
218                }
219                lexer.expect(Token::Paren(')'))?;
220            }
221
222            "invariant" => {
223                self.invariant.set(true, name_span)?;
224            }
225            "blend_src" => {
226                lexer.require_enable_extension(
227                    ImplementedEnableExtension::DualSourceBlending,
228                    name_span,
229                )?;
230
231                lexer.expect(Token::Paren('('))?;
232                self.blend_src
233                    .set(parser.general_expression(lexer, ctx)?, name_span)?;
234                lexer.skip(Token::Separator(','));
235                lexer.expect(Token::Paren(')'))?;
236            }
237            "per_primitive" => {
238                lexer.require_enable_extension(
239                    ImplementedEnableExtension::WgpuMeshShader,
240                    name_span,
241                )?;
242                self.per_primitive.set((), name_span)?;
243            }
244            _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
245        }
246        Ok(())
247    }
248
249    fn finish(self, span: Span) -> Result<'a, Option<ast::Binding<'a>>> {
250        match (
251            self.location.value,
252            self.built_in.value,
253            self.interpolation.value,
254            self.sampling.value,
255            self.invariant.value.unwrap_or_default(),
256            self.blend_src.value,
257            self.per_primitive.value,
258        ) {
259            (None, None, None, None, false, None, None) => Ok(None),
260            (Some(location), None, interpolation, sampling, false, blend_src, per_primitive) => {
261                // Before handing over the completed `Module`, we call
262                // `apply_default_interpolation` to ensure that the interpolation and
263                // sampling have been explicitly specified on all vertex shader output and fragment
264                // shader input user bindings, so leaving them potentially `None` here is fine.
265                Ok(Some(ast::Binding::Location {
266                    location,
267                    interpolation,
268                    sampling,
269                    blend_src,
270                    per_primitive: per_primitive.is_some(),
271                }))
272            }
273            (None, Some(crate::BuiltIn::Position { .. }), None, None, invariant, None, None) => {
274                Ok(Some(ast::Binding::BuiltIn(crate::BuiltIn::Position {
275                    invariant,
276                })))
277            }
278            (None, Some(built_in), None, None, false, None, None) => {
279                Ok(Some(ast::Binding::BuiltIn(built_in)))
280            }
281            (_, _, _, _, _, _, _) => Err(Box::new(Error::InconsistentBinding(span))),
282        }
283    }
284}
285
286/// Configuration for the whole parser run.
287pub struct Options {
288    /// Controls whether the parser should parse doc comments.
289    pub parse_doc_comments: bool,
290}
291
292impl Options {
293    /// Creates a new [`Options`] without doc comments parsing.
294    pub const fn new() -> Self {
295        Options {
296            parse_doc_comments: false,
297        }
298    }
299}
300
301pub struct Parser {
302    rules: Vec<(Rule, usize)>,
303    recursion_depth: u32,
304}
305
306impl Parser {
307    pub const fn new() -> Self {
308        Parser {
309            rules: Vec::new(),
310            recursion_depth: 0,
311        }
312    }
313
314    fn reset(&mut self) {
315        self.rules.clear();
316        self.recursion_depth = 0;
317    }
318
319    fn push_rule_span(&mut self, rule: Rule, lexer: &mut Lexer<'_>) {
320        self.rules.push((rule, lexer.start_byte_offset()));
321    }
322
323    fn pop_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
324        let (_, initial) = self.rules.pop().unwrap();
325        lexer.span_from(initial)
326    }
327
328    fn peek_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
329        let &(_, initial) = self.rules.last().unwrap();
330        lexer.span_from(initial)
331    }
332
333    fn race_rules(&self, rule0: Rule, rule1: Rule) -> Option<Rule> {
334        Some(
335            self.rules
336                .iter()
337                .rev()
338                .find(|&x| x.0 == rule0 || x.0 == rule1)?
339                .0,
340        )
341    }
342
343    fn track_recursion<'a, F, R>(&mut self, f: F) -> Result<'a, R>
344    where
345        F: FnOnce(&mut Self) -> Result<'a, R>,
346    {
347        self.recursion_depth += 1;
348        if self.recursion_depth >= 256 {
349            return Err(Box::new(Error::Internal("Parser recursion limit exceeded")));
350        }
351        let ret = f(self);
352        self.recursion_depth -= 1;
353        ret
354    }
355
356    fn switch_value<'a>(
357        &mut self,
358        lexer: &mut Lexer<'a>,
359        ctx: &mut ExpressionContext<'a, '_, '_>,
360    ) -> Result<'a, ast::SwitchValue<'a>> {
361        if let Token::Word("default") = lexer.peek().0 {
362            let _ = lexer.next();
363            return Ok(ast::SwitchValue::Default);
364        }
365
366        let expr = self.general_expression(lexer, ctx)?;
367        Ok(ast::SwitchValue::Expr(expr))
368    }
369
370    /// Decide if we're looking at a construction expression, and return its
371    /// type if so.
372    ///
373    /// If the identifier `word` is a [type-defining keyword], then return a
374    /// [`ConstructorType`] value describing the type to build. Return an error
375    /// if the type is not constructible (like `sampler`).
376    ///
377    /// If `word` isn't a type name, then return `None`.
378    ///
379    /// [type-defining keyword]: https://gpuweb.github.io/gpuweb/wgsl/#type-defining-keywords
380    /// [`ConstructorType`]: ast::ConstructorType
381    fn constructor_type<'a>(
382        &mut self,
383        lexer: &mut Lexer<'a>,
384        word: &'a str,
385        span: Span,
386        ctx: &mut ExpressionContext<'a, '_, '_>,
387    ) -> Result<'a, Option<ast::ConstructorType<'a>>> {
388        if let Some(scalar) = conv::get_scalar_type(&lexer.enable_extensions, span, word)? {
389            return Ok(Some(ast::ConstructorType::Scalar(scalar)));
390        }
391
392        let partial = match word {
393            "vec2" => ast::ConstructorType::PartialVector {
394                size: crate::VectorSize::Bi,
395            },
396            "vec2i" => {
397                return Ok(Some(ast::ConstructorType::Vector {
398                    size: crate::VectorSize::Bi,
399                    ty: ctx.new_scalar(Scalar::I32),
400                    ty_span: Span::UNDEFINED,
401                }))
402            }
403            "vec2u" => {
404                return Ok(Some(ast::ConstructorType::Vector {
405                    size: crate::VectorSize::Bi,
406                    ty: ctx.new_scalar(Scalar::U32),
407                    ty_span: Span::UNDEFINED,
408                }))
409            }
410            "vec2f" => {
411                return Ok(Some(ast::ConstructorType::Vector {
412                    size: crate::VectorSize::Bi,
413                    ty: ctx.new_scalar(Scalar::F32),
414                    ty_span: Span::UNDEFINED,
415                }))
416            }
417            "vec2h" => {
418                return Ok(Some(ast::ConstructorType::Vector {
419                    size: crate::VectorSize::Bi,
420                    ty: ctx.new_scalar(Scalar::F16),
421                    ty_span: Span::UNDEFINED,
422                }))
423            }
424            "vec3" => ast::ConstructorType::PartialVector {
425                size: crate::VectorSize::Tri,
426            },
427            "vec3i" => {
428                return Ok(Some(ast::ConstructorType::Vector {
429                    size: crate::VectorSize::Tri,
430                    ty: ctx.new_scalar(Scalar::I32),
431                    ty_span: Span::UNDEFINED,
432                }))
433            }
434            "vec3u" => {
435                return Ok(Some(ast::ConstructorType::Vector {
436                    size: crate::VectorSize::Tri,
437                    ty: ctx.new_scalar(Scalar::U32),
438                    ty_span: Span::UNDEFINED,
439                }))
440            }
441            "vec3f" => {
442                return Ok(Some(ast::ConstructorType::Vector {
443                    size: crate::VectorSize::Tri,
444                    ty: ctx.new_scalar(Scalar::F32),
445                    ty_span: Span::UNDEFINED,
446                }))
447            }
448            "vec3h" => {
449                return Ok(Some(ast::ConstructorType::Vector {
450                    size: crate::VectorSize::Tri,
451                    ty: ctx.new_scalar(Scalar::F16),
452                    ty_span: Span::UNDEFINED,
453                }))
454            }
455            "vec4" => ast::ConstructorType::PartialVector {
456                size: crate::VectorSize::Quad,
457            },
458            "vec4i" => {
459                return Ok(Some(ast::ConstructorType::Vector {
460                    size: crate::VectorSize::Quad,
461                    ty: ctx.new_scalar(Scalar::I32),
462                    ty_span: Span::UNDEFINED,
463                }))
464            }
465            "vec4u" => {
466                return Ok(Some(ast::ConstructorType::Vector {
467                    size: crate::VectorSize::Quad,
468                    ty: ctx.new_scalar(Scalar::U32),
469                    ty_span: Span::UNDEFINED,
470                }))
471            }
472            "vec4f" => {
473                return Ok(Some(ast::ConstructorType::Vector {
474                    size: crate::VectorSize::Quad,
475                    ty: ctx.new_scalar(Scalar::F32),
476                    ty_span: Span::UNDEFINED,
477                }))
478            }
479            "vec4h" => {
480                return Ok(Some(ast::ConstructorType::Vector {
481                    size: crate::VectorSize::Quad,
482                    ty: ctx.new_scalar(Scalar::F16),
483                    ty_span: Span::UNDEFINED,
484                }))
485            }
486            "mat2x2" => ast::ConstructorType::PartialMatrix {
487                columns: crate::VectorSize::Bi,
488                rows: crate::VectorSize::Bi,
489            },
490            "mat2x2f" => {
491                return Ok(Some(ast::ConstructorType::Matrix {
492                    columns: crate::VectorSize::Bi,
493                    rows: crate::VectorSize::Bi,
494                    ty: ctx.new_scalar(Scalar::F32),
495                    ty_span: Span::UNDEFINED,
496                }))
497            }
498            "mat2x2h" => {
499                return Ok(Some(ast::ConstructorType::Matrix {
500                    columns: crate::VectorSize::Bi,
501                    rows: crate::VectorSize::Bi,
502                    ty: ctx.new_scalar(Scalar::F16),
503                    ty_span: Span::UNDEFINED,
504                }))
505            }
506            "mat2x3" => ast::ConstructorType::PartialMatrix {
507                columns: crate::VectorSize::Bi,
508                rows: crate::VectorSize::Tri,
509            },
510            "mat2x3f" => {
511                return Ok(Some(ast::ConstructorType::Matrix {
512                    columns: crate::VectorSize::Bi,
513                    rows: crate::VectorSize::Tri,
514                    ty: ctx.new_scalar(Scalar::F32),
515                    ty_span: Span::UNDEFINED,
516                }))
517            }
518            "mat2x3h" => {
519                return Ok(Some(ast::ConstructorType::Matrix {
520                    columns: crate::VectorSize::Bi,
521                    rows: crate::VectorSize::Tri,
522                    ty: ctx.new_scalar(Scalar::F16),
523                    ty_span: Span::UNDEFINED,
524                }))
525            }
526            "mat2x4" => ast::ConstructorType::PartialMatrix {
527                columns: crate::VectorSize::Bi,
528                rows: crate::VectorSize::Quad,
529            },
530            "mat2x4f" => {
531                return Ok(Some(ast::ConstructorType::Matrix {
532                    columns: crate::VectorSize::Bi,
533                    rows: crate::VectorSize::Quad,
534                    ty: ctx.new_scalar(Scalar::F32),
535                    ty_span: Span::UNDEFINED,
536                }))
537            }
538            "mat2x4h" => {
539                return Ok(Some(ast::ConstructorType::Matrix {
540                    columns: crate::VectorSize::Bi,
541                    rows: crate::VectorSize::Quad,
542                    ty: ctx.new_scalar(Scalar::F16),
543                    ty_span: Span::UNDEFINED,
544                }))
545            }
546            "mat3x2" => ast::ConstructorType::PartialMatrix {
547                columns: crate::VectorSize::Tri,
548                rows: crate::VectorSize::Bi,
549            },
550            "mat3x2f" => {
551                return Ok(Some(ast::ConstructorType::Matrix {
552                    columns: crate::VectorSize::Tri,
553                    rows: crate::VectorSize::Bi,
554                    ty: ctx.new_scalar(Scalar::F32),
555                    ty_span: Span::UNDEFINED,
556                }))
557            }
558            "mat3x2h" => {
559                return Ok(Some(ast::ConstructorType::Matrix {
560                    columns: crate::VectorSize::Tri,
561                    rows: crate::VectorSize::Bi,
562                    ty: ctx.new_scalar(Scalar::F16),
563                    ty_span: Span::UNDEFINED,
564                }))
565            }
566            "mat3x3" => ast::ConstructorType::PartialMatrix {
567                columns: crate::VectorSize::Tri,
568                rows: crate::VectorSize::Tri,
569            },
570            "mat3x3f" => {
571                return Ok(Some(ast::ConstructorType::Matrix {
572                    columns: crate::VectorSize::Tri,
573                    rows: crate::VectorSize::Tri,
574                    ty: ctx.new_scalar(Scalar::F32),
575                    ty_span: Span::UNDEFINED,
576                }))
577            }
578            "mat3x3h" => {
579                return Ok(Some(ast::ConstructorType::Matrix {
580                    columns: crate::VectorSize::Tri,
581                    rows: crate::VectorSize::Tri,
582                    ty: ctx.new_scalar(Scalar::F16),
583                    ty_span: Span::UNDEFINED,
584                }))
585            }
586            "mat3x4" => ast::ConstructorType::PartialMatrix {
587                columns: crate::VectorSize::Tri,
588                rows: crate::VectorSize::Quad,
589            },
590            "mat3x4f" => {
591                return Ok(Some(ast::ConstructorType::Matrix {
592                    columns: crate::VectorSize::Tri,
593                    rows: crate::VectorSize::Quad,
594                    ty: ctx.new_scalar(Scalar::F32),
595                    ty_span: Span::UNDEFINED,
596                }))
597            }
598            "mat3x4h" => {
599                return Ok(Some(ast::ConstructorType::Matrix {
600                    columns: crate::VectorSize::Tri,
601                    rows: crate::VectorSize::Quad,
602                    ty: ctx.new_scalar(Scalar::F16),
603                    ty_span: Span::UNDEFINED,
604                }))
605            }
606            "mat4x2" => ast::ConstructorType::PartialMatrix {
607                columns: crate::VectorSize::Quad,
608                rows: crate::VectorSize::Bi,
609            },
610            "mat4x2f" => {
611                return Ok(Some(ast::ConstructorType::Matrix {
612                    columns: crate::VectorSize::Quad,
613                    rows: crate::VectorSize::Bi,
614                    ty: ctx.new_scalar(Scalar::F32),
615                    ty_span: Span::UNDEFINED,
616                }))
617            }
618            "mat4x2h" => {
619                return Ok(Some(ast::ConstructorType::Matrix {
620                    columns: crate::VectorSize::Quad,
621                    rows: crate::VectorSize::Bi,
622                    ty: ctx.new_scalar(Scalar::F16),
623                    ty_span: Span::UNDEFINED,
624                }))
625            }
626            "mat4x3" => ast::ConstructorType::PartialMatrix {
627                columns: crate::VectorSize::Quad,
628                rows: crate::VectorSize::Tri,
629            },
630            "mat4x3f" => {
631                return Ok(Some(ast::ConstructorType::Matrix {
632                    columns: crate::VectorSize::Quad,
633                    rows: crate::VectorSize::Tri,
634                    ty: ctx.new_scalar(Scalar::F32),
635                    ty_span: Span::UNDEFINED,
636                }))
637            }
638            "mat4x3h" => {
639                return Ok(Some(ast::ConstructorType::Matrix {
640                    columns: crate::VectorSize::Quad,
641                    rows: crate::VectorSize::Tri,
642                    ty: ctx.new_scalar(Scalar::F16),
643                    ty_span: Span::UNDEFINED,
644                }))
645            }
646            "mat4x4" => ast::ConstructorType::PartialMatrix {
647                columns: crate::VectorSize::Quad,
648                rows: crate::VectorSize::Quad,
649            },
650            "mat4x4f" => {
651                return Ok(Some(ast::ConstructorType::Matrix {
652                    columns: crate::VectorSize::Quad,
653                    rows: crate::VectorSize::Quad,
654                    ty: ctx.new_scalar(Scalar::F32),
655                    ty_span: Span::UNDEFINED,
656                }))
657            }
658            "mat4x4h" => {
659                return Ok(Some(ast::ConstructorType::Matrix {
660                    columns: crate::VectorSize::Quad,
661                    rows: crate::VectorSize::Quad,
662                    ty: ctx.new_scalar(Scalar::F16),
663                    ty_span: Span::UNDEFINED,
664                }))
665            }
666            "coop_mat8x8" => {
667                lexer.require_enable_extension(
668                    ImplementedEnableExtension::WgpuCooperativeMatrix,
669                    span,
670                )?;
671                ast::ConstructorType::PartialCooperativeMatrix {
672                    columns: crate::CooperativeSize::Eight,
673                    rows: crate::CooperativeSize::Eight,
674                }
675            }
676            "coop_mat16x16" => {
677                lexer.require_enable_extension(
678                    ImplementedEnableExtension::WgpuCooperativeMatrix,
679                    span,
680                )?;
681                ast::ConstructorType::PartialCooperativeMatrix {
682                    columns: crate::CooperativeSize::Sixteen,
683                    rows: crate::CooperativeSize::Sixteen,
684                }
685            }
686            "array" => ast::ConstructorType::PartialArray,
687            "atomic"
688            | "binding_array"
689            | "sampler"
690            | "sampler_comparison"
691            | "texture_1d"
692            | "texture_1d_array"
693            | "texture_2d"
694            | "texture_2d_array"
695            | "texture_3d"
696            | "texture_cube"
697            | "texture_cube_array"
698            | "texture_multisampled_2d"
699            | "texture_multisampled_2d_array"
700            | "texture_depth_2d"
701            | "texture_depth_2d_array"
702            | "texture_depth_cube"
703            | "texture_depth_cube_array"
704            | "texture_depth_multisampled_2d"
705            | "texture_external"
706            | "texture_storage_1d"
707            | "texture_storage_1d_array"
708            | "texture_storage_2d"
709            | "texture_storage_2d_array"
710            | "texture_storage_3d" => return Err(Box::new(Error::TypeNotConstructible(span))),
711            _ => return Ok(None),
712        };
713
714        // parse component type if present
715        match (lexer.peek().0, partial) {
716            (Token::Paren('<'), ast::ConstructorType::PartialVector { size }) => {
717                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
718                Ok(Some(ast::ConstructorType::Vector { size, ty, ty_span }))
719            }
720            (Token::Paren('<'), ast::ConstructorType::PartialMatrix { columns, rows }) => {
721                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
722                Ok(Some(ast::ConstructorType::Matrix {
723                    columns,
724                    rows,
725                    ty,
726                    ty_span,
727                }))
728            }
729            (
730                Token::Paren('<'),
731                ast::ConstructorType::PartialCooperativeMatrix { columns, rows },
732            ) => {
733                let (ty, ty_span, role) = self.cooperative_scalar_and_role(lexer, ctx)?;
734                Ok(Some(ast::ConstructorType::CooperativeMatrix {
735                    columns,
736                    rows,
737                    ty,
738                    ty_span,
739                    role,
740                }))
741            }
742            (Token::Paren('<'), ast::ConstructorType::PartialArray) => {
743                lexer.expect_generic_paren('<')?;
744                let base = self.type_decl(lexer, ctx)?;
745                let size = if lexer.end_of_generic_arguments() {
746                    let expr = self.const_generic_expression(lexer, ctx)?;
747                    lexer.skip(Token::Separator(','));
748                    ast::ArraySize::Constant(expr)
749                } else {
750                    ast::ArraySize::Dynamic
751                };
752                lexer.expect_generic_paren('>')?;
753
754                Ok(Some(ast::ConstructorType::Array { base, size }))
755            }
756            (_, partial) => Ok(Some(partial)),
757        }
758    }
759
760    /// Expects `name` to be consumed (not in lexer).
761    fn arguments<'a>(
762        &mut self,
763        lexer: &mut Lexer<'a>,
764        ctx: &mut ExpressionContext<'a, '_, '_>,
765    ) -> Result<'a, Vec<Handle<ast::Expression<'a>>>> {
766        self.push_rule_span(Rule::EnclosedExpr, lexer);
767        lexer.open_arguments()?;
768        let mut arguments = Vec::new();
769        loop {
770            if !arguments.is_empty() {
771                if !lexer.next_argument()? {
772                    break;
773                }
774            } else if lexer.skip(Token::Paren(')')) {
775                break;
776            }
777            let arg = self.general_expression(lexer, ctx)?;
778            arguments.push(arg);
779        }
780
781        self.pop_rule_span(lexer);
782        Ok(arguments)
783    }
784
785    fn enclosed_expression<'a>(
786        &mut self,
787        lexer: &mut Lexer<'a>,
788        ctx: &mut ExpressionContext<'a, '_, '_>,
789    ) -> Result<'a, Handle<ast::Expression<'a>>> {
790        self.push_rule_span(Rule::EnclosedExpr, lexer);
791        let expr = self.general_expression(lexer, ctx)?;
792        self.pop_rule_span(lexer);
793        Ok(expr)
794    }
795
796    /// Expects [`Rule::PrimaryExpr`] or [`Rule::SingularExpr`] on top; does not pop it.
797    /// Expects `name` to be consumed (not in lexer).
798    fn function_call<'a>(
799        &mut self,
800        lexer: &mut Lexer<'a>,
801        name: &'a str,
802        name_span: Span,
803        ctx: &mut ExpressionContext<'a, '_, '_>,
804    ) -> Result<'a, Handle<ast::Expression<'a>>> {
805        assert!(self.rules.last().is_some());
806
807        let expr = match name {
808            // bitcast looks like a function call, but it's an operator and must be handled differently.
809            "bitcast" => {
810                let (to, span) = self.singular_generic(lexer, ctx)?;
811
812                lexer.open_arguments()?;
813                let expr = self.general_expression(lexer, ctx)?;
814                lexer.close_arguments()?;
815
816                ast::Expression::Bitcast {
817                    expr,
818                    to,
819                    ty_span: span,
820                }
821            }
822            // everything else must be handled later, since they can be hidden by user-defined functions.
823            _ => {
824                let result_ty = if lexer.peek().0 == Token::Paren('<') {
825                    Some(self.singular_generic(lexer, ctx)?)
826                } else {
827                    None
828                };
829                let arguments = self.arguments(lexer, ctx)?;
830                ctx.unresolved.insert(ast::Dependency {
831                    ident: name,
832                    usage: name_span,
833                });
834                ast::Expression::Call {
835                    function: ast::Ident {
836                        name,
837                        span: name_span,
838                    },
839                    arguments,
840                    result_ty,
841                }
842            }
843        };
844
845        let span = self.peek_rule_span(lexer);
846        let expr = ctx.expressions.append(expr, span);
847        Ok(expr)
848    }
849
850    fn ident_expr<'a>(
851        &mut self,
852        name: &'a str,
853        name_span: Span,
854        ctx: &mut ExpressionContext<'a, '_, '_>,
855    ) -> ast::IdentExpr<'a> {
856        match ctx.local_table.lookup(name) {
857            Some(&local) => ast::IdentExpr::Local(local),
858            None => {
859                ctx.unresolved.insert(ast::Dependency {
860                    ident: name,
861                    usage: name_span,
862                });
863                ast::IdentExpr::Unresolved(name)
864            }
865        }
866    }
867
868    fn primary_expression<'a>(
869        &mut self,
870        lexer: &mut Lexer<'a>,
871        ctx: &mut ExpressionContext<'a, '_, '_>,
872    ) -> Result<'a, Handle<ast::Expression<'a>>> {
873        self.push_rule_span(Rule::PrimaryExpr, lexer);
874        const fn literal_ray_flag<'b>(flag: crate::RayFlag) -> ast::Expression<'b> {
875            ast::Expression::Literal(ast::Literal::Number(Number::U32(flag.bits())))
876        }
877        const fn literal_ray_intersection<'b>(
878            intersection: crate::RayQueryIntersection,
879        ) -> ast::Expression<'b> {
880            ast::Expression::Literal(ast::Literal::Number(Number::U32(intersection as u32)))
881        }
882
883        let expr = match lexer.peek() {
884            (Token::Paren('('), _) => {
885                let _ = lexer.next();
886                let expr = self.enclosed_expression(lexer, ctx)?;
887                lexer.expect(Token::Paren(')'))?;
888                self.pop_rule_span(lexer);
889                return Ok(expr);
890            }
891            (Token::Word("true"), _) => {
892                let _ = lexer.next();
893                ast::Expression::Literal(ast::Literal::Bool(true))
894            }
895            (Token::Word("false"), _) => {
896                let _ = lexer.next();
897                ast::Expression::Literal(ast::Literal::Bool(false))
898            }
899            (Token::Number(res), span) => {
900                let _ = lexer.next();
901                let num = res.map_err(|err| Error::BadNumber(span, err))?;
902
903                if let Some(enable_extension) = num.requires_enable_extension() {
904                    lexer.require_enable_extension(enable_extension, span)?;
905                }
906
907                ast::Expression::Literal(ast::Literal::Number(num))
908            }
909            (Token::Word("RAY_FLAG_NONE"), _) => {
910                let _ = lexer.next();
911                literal_ray_flag(crate::RayFlag::empty())
912            }
913            (Token::Word("RAY_FLAG_FORCE_OPAQUE"), _) => {
914                let _ = lexer.next();
915                literal_ray_flag(crate::RayFlag::FORCE_OPAQUE)
916            }
917            (Token::Word("RAY_FLAG_FORCE_NO_OPAQUE"), _) => {
918                let _ = lexer.next();
919                literal_ray_flag(crate::RayFlag::FORCE_NO_OPAQUE)
920            }
921            (Token::Word("RAY_FLAG_TERMINATE_ON_FIRST_HIT"), _) => {
922                let _ = lexer.next();
923                literal_ray_flag(crate::RayFlag::TERMINATE_ON_FIRST_HIT)
924            }
925            (Token::Word("RAY_FLAG_SKIP_CLOSEST_HIT_SHADER"), _) => {
926                let _ = lexer.next();
927                literal_ray_flag(crate::RayFlag::SKIP_CLOSEST_HIT_SHADER)
928            }
929            (Token::Word("RAY_FLAG_CULL_BACK_FACING"), _) => {
930                let _ = lexer.next();
931                literal_ray_flag(crate::RayFlag::CULL_BACK_FACING)
932            }
933            (Token::Word("RAY_FLAG_CULL_FRONT_FACING"), _) => {
934                let _ = lexer.next();
935                literal_ray_flag(crate::RayFlag::CULL_FRONT_FACING)
936            }
937            (Token::Word("RAY_FLAG_CULL_OPAQUE"), _) => {
938                let _ = lexer.next();
939                literal_ray_flag(crate::RayFlag::CULL_OPAQUE)
940            }
941            (Token::Word("RAY_FLAG_CULL_NO_OPAQUE"), _) => {
942                let _ = lexer.next();
943                literal_ray_flag(crate::RayFlag::CULL_NO_OPAQUE)
944            }
945            (Token::Word("RAY_FLAG_SKIP_TRIANGLES"), _) => {
946                let _ = lexer.next();
947                literal_ray_flag(crate::RayFlag::SKIP_TRIANGLES)
948            }
949            (Token::Word("RAY_FLAG_SKIP_AABBS"), _) => {
950                let _ = lexer.next();
951                literal_ray_flag(crate::RayFlag::SKIP_AABBS)
952            }
953            (Token::Word("RAY_QUERY_INTERSECTION_NONE"), _) => {
954                let _ = lexer.next();
955                literal_ray_intersection(crate::RayQueryIntersection::None)
956            }
957            (Token::Word("RAY_QUERY_INTERSECTION_TRIANGLE"), _) => {
958                let _ = lexer.next();
959                literal_ray_intersection(crate::RayQueryIntersection::Triangle)
960            }
961            (Token::Word("RAY_QUERY_INTERSECTION_GENERATED"), _) => {
962                let _ = lexer.next();
963                literal_ray_intersection(crate::RayQueryIntersection::Generated)
964            }
965            (Token::Word("RAY_QUERY_INTERSECTION_AABB"), _) => {
966                let _ = lexer.next();
967                literal_ray_intersection(crate::RayQueryIntersection::Aabb)
968            }
969            (Token::Word(word), span) => {
970                let start = lexer.start_byte_offset();
971                let _ = lexer.next();
972
973                if let Some(ty) = self.constructor_type(lexer, word, span, ctx)? {
974                    let ty_span = lexer.span_from(start);
975                    let components = self.arguments(lexer, ctx)?;
976                    ast::Expression::Construct {
977                        ty,
978                        ty_span,
979                        components,
980                    }
981                } else if let Token::Paren('(') = lexer.peek().0 {
982                    self.pop_rule_span(lexer);
983                    return self.function_call(lexer, word, span, ctx);
984                } else if ["bitcast", "coopLoad"].contains(&word) {
985                    self.pop_rule_span(lexer);
986                    return self.function_call(lexer, word, span, ctx);
987                } else {
988                    let ident = self.ident_expr(word, span, ctx);
989                    ast::Expression::Ident(ident)
990                }
991            }
992            other => {
993                return Err(Box::new(Error::Unexpected(
994                    other.1,
995                    ExpectedToken::PrimaryExpression,
996                )))
997            }
998        };
999
1000        let span = self.pop_rule_span(lexer);
1001        let expr = ctx.expressions.append(expr, span);
1002        Ok(expr)
1003    }
1004
1005    fn postfix<'a>(
1006        &mut self,
1007        span_start: usize,
1008        lexer: &mut Lexer<'a>,
1009        ctx: &mut ExpressionContext<'a, '_, '_>,
1010        expr: Handle<ast::Expression<'a>>,
1011    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1012        let mut expr = expr;
1013
1014        loop {
1015            let expression = match lexer.peek().0 {
1016                Token::Separator('.') => {
1017                    let _ = lexer.next();
1018                    let field = lexer.next_ident()?;
1019
1020                    ast::Expression::Member { base: expr, field }
1021                }
1022                Token::Paren('[') => {
1023                    let _ = lexer.next();
1024                    let index = self.enclosed_expression(lexer, ctx)?;
1025                    lexer.expect(Token::Paren(']'))?;
1026
1027                    ast::Expression::Index { base: expr, index }
1028                }
1029                _ => break,
1030            };
1031
1032            let span = lexer.span_from(span_start);
1033            expr = ctx.expressions.append(expression, span);
1034        }
1035
1036        Ok(expr)
1037    }
1038
1039    fn const_generic_expression<'a>(
1040        &mut self,
1041        lexer: &mut Lexer<'a>,
1042        ctx: &mut ExpressionContext<'a, '_, '_>,
1043    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1044        self.push_rule_span(Rule::GenericExpr, lexer);
1045        let expr = self.general_expression(lexer, ctx)?;
1046        self.pop_rule_span(lexer);
1047        Ok(expr)
1048    }
1049
1050    /// Parse a `unary_expression`.
1051    fn unary_expression<'a>(
1052        &mut self,
1053        lexer: &mut Lexer<'a>,
1054        ctx: &mut ExpressionContext<'a, '_, '_>,
1055    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1056        self.track_recursion(|this| {
1057            this.push_rule_span(Rule::UnaryExpr, lexer);
1058            //TODO: refactor this to avoid backing up
1059            let expr = match lexer.peek().0 {
1060                Token::Operation('-') => {
1061                    let _ = lexer.next();
1062                    let expr = this.unary_expression(lexer, ctx)?;
1063                    let expr = ast::Expression::Unary {
1064                        op: crate::UnaryOperator::Negate,
1065                        expr,
1066                    };
1067                    let span = this.peek_rule_span(lexer);
1068                    ctx.expressions.append(expr, span)
1069                }
1070                Token::Operation('!') => {
1071                    let _ = lexer.next();
1072                    let expr = this.unary_expression(lexer, ctx)?;
1073                    let expr = ast::Expression::Unary {
1074                        op: crate::UnaryOperator::LogicalNot,
1075                        expr,
1076                    };
1077                    let span = this.peek_rule_span(lexer);
1078                    ctx.expressions.append(expr, span)
1079                }
1080                Token::Operation('~') => {
1081                    let _ = lexer.next();
1082                    let expr = this.unary_expression(lexer, ctx)?;
1083                    let expr = ast::Expression::Unary {
1084                        op: crate::UnaryOperator::BitwiseNot,
1085                        expr,
1086                    };
1087                    let span = this.peek_rule_span(lexer);
1088                    ctx.expressions.append(expr, span)
1089                }
1090                Token::Operation('*') => {
1091                    let _ = lexer.next();
1092                    let expr = this.unary_expression(lexer, ctx)?;
1093                    let expr = ast::Expression::Deref(expr);
1094                    let span = this.peek_rule_span(lexer);
1095                    ctx.expressions.append(expr, span)
1096                }
1097                Token::Operation('&') => {
1098                    let _ = lexer.next();
1099                    let expr = this.unary_expression(lexer, ctx)?;
1100                    let expr = ast::Expression::AddrOf(expr);
1101                    let span = this.peek_rule_span(lexer);
1102                    ctx.expressions.append(expr, span)
1103                }
1104                _ => this.singular_expression(lexer, ctx)?,
1105            };
1106
1107            this.pop_rule_span(lexer);
1108            Ok(expr)
1109        })
1110    }
1111
1112    /// Parse a `lhs_expression`.
1113    ///
1114    /// LHS expressions only support the `&` and `*` operators and
1115    /// the `[]` and `.` postfix selectors.
1116    fn lhs_expression<'a>(
1117        &mut self,
1118        lexer: &mut Lexer<'a>,
1119        ctx: &mut ExpressionContext<'a, '_, '_>,
1120    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1121        self.track_recursion(|this| {
1122            this.push_rule_span(Rule::LhsExpr, lexer);
1123            let start = lexer.start_byte_offset();
1124            let expr = match lexer.peek() {
1125                (Token::Operation('*'), _) => {
1126                    let _ = lexer.next();
1127                    let expr = this.lhs_expression(lexer, ctx)?;
1128                    let expr = ast::Expression::Deref(expr);
1129                    let span = this.peek_rule_span(lexer);
1130                    ctx.expressions.append(expr, span)
1131                }
1132                (Token::Operation('&'), _) => {
1133                    let _ = lexer.next();
1134                    let expr = this.lhs_expression(lexer, ctx)?;
1135                    let expr = ast::Expression::AddrOf(expr);
1136                    let span = this.peek_rule_span(lexer);
1137                    ctx.expressions.append(expr, span)
1138                }
1139                (Token::Operation('('), _) => {
1140                    let _ = lexer.next();
1141                    let primary_expr = this.lhs_expression(lexer, ctx)?;
1142                    lexer.expect(Token::Paren(')'))?;
1143                    this.postfix(start, lexer, ctx, primary_expr)?
1144                }
1145                (Token::Word(word), span) => {
1146                    let _ = lexer.next();
1147                    let ident = this.ident_expr(word, span, ctx);
1148                    let primary_expr = ctx.expressions.append(ast::Expression::Ident(ident), span);
1149                    this.postfix(start, lexer, ctx, primary_expr)?
1150                }
1151                _ => this.singular_expression(lexer, ctx)?,
1152            };
1153
1154            this.pop_rule_span(lexer);
1155            Ok(expr)
1156        })
1157    }
1158
1159    /// Parse a `singular_expression`.
1160    fn singular_expression<'a>(
1161        &mut self,
1162        lexer: &mut Lexer<'a>,
1163        ctx: &mut ExpressionContext<'a, '_, '_>,
1164    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1165        let start = lexer.start_byte_offset();
1166        self.push_rule_span(Rule::SingularExpr, lexer);
1167        let primary_expr = self.primary_expression(lexer, ctx)?;
1168        let singular_expr = self.postfix(start, lexer, ctx, primary_expr)?;
1169        self.pop_rule_span(lexer);
1170
1171        Ok(singular_expr)
1172    }
1173
1174    fn equality_expression<'a>(
1175        &mut self,
1176        lexer: &mut Lexer<'a>,
1177        context: &mut ExpressionContext<'a, '_, '_>,
1178    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1179        // equality_expression
1180        context.parse_binary_op(
1181            lexer,
1182            |token| match token {
1183                Token::LogicalOperation('=') => Some(crate::BinaryOperator::Equal),
1184                Token::LogicalOperation('!') => Some(crate::BinaryOperator::NotEqual),
1185                _ => None,
1186            },
1187            // relational_expression
1188            |lexer, context| {
1189                let enclosing = self.race_rules(Rule::GenericExpr, Rule::EnclosedExpr);
1190                context.parse_binary_op(
1191                    lexer,
1192                    match enclosing {
1193                        Some(Rule::GenericExpr) => |token| match token {
1194                            Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
1195                            _ => None,
1196                        },
1197                        _ => |token| match token {
1198                            Token::Paren('<') => Some(crate::BinaryOperator::Less),
1199                            Token::Paren('>') => Some(crate::BinaryOperator::Greater),
1200                            Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
1201                            Token::LogicalOperation('>') => {
1202                                Some(crate::BinaryOperator::GreaterEqual)
1203                            }
1204                            _ => None,
1205                        },
1206                    },
1207                    // shift_expression
1208                    |lexer, context| {
1209                        context.parse_binary_op(
1210                            lexer,
1211                            match enclosing {
1212                                Some(Rule::GenericExpr) => |token| match token {
1213                                    Token::ShiftOperation('<') => {
1214                                        Some(crate::BinaryOperator::ShiftLeft)
1215                                    }
1216                                    _ => None,
1217                                },
1218                                _ => |token| match token {
1219                                    Token::ShiftOperation('<') => {
1220                                        Some(crate::BinaryOperator::ShiftLeft)
1221                                    }
1222                                    Token::ShiftOperation('>') => {
1223                                        Some(crate::BinaryOperator::ShiftRight)
1224                                    }
1225                                    _ => None,
1226                                },
1227                            },
1228                            // additive_expression
1229                            |lexer, context| {
1230                                context.parse_binary_op(
1231                                    lexer,
1232                                    |token| match token {
1233                                        Token::Operation('+') => Some(crate::BinaryOperator::Add),
1234                                        Token::Operation('-') => {
1235                                            Some(crate::BinaryOperator::Subtract)
1236                                        }
1237                                        _ => None,
1238                                    },
1239                                    // multiplicative_expression
1240                                    |lexer, context| {
1241                                        context.parse_binary_op(
1242                                            lexer,
1243                                            |token| match token {
1244                                                Token::Operation('*') => {
1245                                                    Some(crate::BinaryOperator::Multiply)
1246                                                }
1247                                                Token::Operation('/') => {
1248                                                    Some(crate::BinaryOperator::Divide)
1249                                                }
1250                                                Token::Operation('%') => {
1251                                                    Some(crate::BinaryOperator::Modulo)
1252                                                }
1253                                                _ => None,
1254                                            },
1255                                            |lexer, context| self.unary_expression(lexer, context),
1256                                        )
1257                                    },
1258                                )
1259                            },
1260                        )
1261                    },
1262                )
1263            },
1264        )
1265    }
1266
1267    fn general_expression<'a>(
1268        &mut self,
1269        lexer: &mut Lexer<'a>,
1270        ctx: &mut ExpressionContext<'a, '_, '_>,
1271    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1272        self.general_expression_with_span(lexer, ctx)
1273            .map(|(expr, _)| expr)
1274    }
1275
1276    fn general_expression_with_span<'a>(
1277        &mut self,
1278        lexer: &mut Lexer<'a>,
1279        context: &mut ExpressionContext<'a, '_, '_>,
1280    ) -> Result<'a, (Handle<ast::Expression<'a>>, Span)> {
1281        self.push_rule_span(Rule::GeneralExpr, lexer);
1282        // logical_or_expression
1283        let handle = context.parse_binary_op(
1284            lexer,
1285            |token| match token {
1286                Token::LogicalOperation('|') => Some(crate::BinaryOperator::LogicalOr),
1287                _ => None,
1288            },
1289            // logical_and_expression
1290            |lexer, context| {
1291                context.parse_binary_op(
1292                    lexer,
1293                    |token| match token {
1294                        Token::LogicalOperation('&') => Some(crate::BinaryOperator::LogicalAnd),
1295                        _ => None,
1296                    },
1297                    // inclusive_or_expression
1298                    |lexer, context| {
1299                        context.parse_binary_op(
1300                            lexer,
1301                            |token| match token {
1302                                Token::Operation('|') => Some(crate::BinaryOperator::InclusiveOr),
1303                                _ => None,
1304                            },
1305                            // exclusive_or_expression
1306                            |lexer, context| {
1307                                context.parse_binary_op(
1308                                    lexer,
1309                                    |token| match token {
1310                                        Token::Operation('^') => {
1311                                            Some(crate::BinaryOperator::ExclusiveOr)
1312                                        }
1313                                        _ => None,
1314                                    },
1315                                    // and_expression
1316                                    |lexer, context| {
1317                                        context.parse_binary_op(
1318                                            lexer,
1319                                            |token| match token {
1320                                                Token::Operation('&') => {
1321                                                    Some(crate::BinaryOperator::And)
1322                                                }
1323                                                _ => None,
1324                                            },
1325                                            |lexer, context| {
1326                                                self.equality_expression(lexer, context)
1327                                            },
1328                                        )
1329                                    },
1330                                )
1331                            },
1332                        )
1333                    },
1334                )
1335            },
1336        )?;
1337        Ok((handle, self.pop_rule_span(lexer)))
1338    }
1339
1340    fn variable_decl<'a>(
1341        &mut self,
1342        lexer: &mut Lexer<'a>,
1343        ctx: &mut ExpressionContext<'a, '_, '_>,
1344    ) -> Result<'a, ast::GlobalVariable<'a>> {
1345        self.push_rule_span(Rule::VariableDecl, lexer);
1346        let mut space = crate::AddressSpace::Handle;
1347
1348        if lexer.skip(Token::Paren('<')) {
1349            let (class_str, span) = lexer.next_ident_with_span()?;
1350            space = match class_str {
1351                "storage" => {
1352                    let access = if lexer.skip(Token::Separator(',')) {
1353                        lexer.next_storage_access()?
1354                    } else {
1355                        // defaulting to `read`
1356                        crate::StorageAccess::LOAD
1357                    };
1358                    crate::AddressSpace::Storage { access }
1359                }
1360                _ => conv::map_address_space(class_str, span, &lexer.enable_extensions)?,
1361            };
1362            lexer.expect(Token::Paren('>'))?;
1363        }
1364        let name = lexer.next_ident()?;
1365
1366        let ty = if lexer.skip(Token::Separator(':')) {
1367            Some(self.type_decl(lexer, ctx)?)
1368        } else {
1369            None
1370        };
1371
1372        let init = if lexer.skip(Token::Operation('=')) {
1373            let handle = self.general_expression(lexer, ctx)?;
1374            Some(handle)
1375        } else {
1376            None
1377        };
1378        lexer.expect(Token::Separator(';'))?;
1379        self.pop_rule_span(lexer);
1380
1381        Ok(ast::GlobalVariable {
1382            name,
1383            space,
1384            binding: None,
1385            ty,
1386            init,
1387            doc_comments: Vec::new(),
1388        })
1389    }
1390
1391    fn struct_body<'a>(
1392        &mut self,
1393        lexer: &mut Lexer<'a>,
1394        ctx: &mut ExpressionContext<'a, '_, '_>,
1395    ) -> Result<'a, Vec<ast::StructMember<'a>>> {
1396        let mut members = Vec::new();
1397        let mut member_names = FastHashSet::default();
1398
1399        lexer.expect(Token::Paren('{'))?;
1400        let mut ready = true;
1401        while !lexer.skip(Token::Paren('}')) {
1402            if !ready {
1403                return Err(Box::new(Error::Unexpected(
1404                    lexer.next().1,
1405                    ExpectedToken::Token(Token::Separator(',')),
1406                )));
1407            }
1408
1409            let doc_comments = lexer.accumulate_doc_comments();
1410
1411            let (mut size, mut align) = (ParsedAttribute::default(), ParsedAttribute::default());
1412            self.push_rule_span(Rule::Attribute, lexer);
1413            let mut bind_parser = BindingParser::default();
1414            while lexer.skip(Token::Attribute) {
1415                match lexer.next_ident_with_span()? {
1416                    ("size", name_span) => {
1417                        lexer.expect(Token::Paren('('))?;
1418                        let expr = self.general_expression(lexer, ctx)?;
1419                        lexer.expect(Token::Paren(')'))?;
1420                        size.set(expr, name_span)?;
1421                    }
1422                    ("align", name_span) => {
1423                        lexer.expect(Token::Paren('('))?;
1424                        let expr = self.general_expression(lexer, ctx)?;
1425                        lexer.expect(Token::Paren(')'))?;
1426                        align.set(expr, name_span)?;
1427                    }
1428                    (word, word_span) => bind_parser.parse(self, lexer, word, word_span, ctx)?,
1429                }
1430            }
1431
1432            let bind_span = self.pop_rule_span(lexer);
1433            let binding = bind_parser.finish(bind_span)?;
1434
1435            let name = lexer.next_ident()?;
1436            lexer.expect(Token::Separator(':'))?;
1437            let ty = self.type_decl(lexer, ctx)?;
1438            ready = lexer.skip(Token::Separator(','));
1439
1440            members.push(ast::StructMember {
1441                name,
1442                ty,
1443                binding,
1444                size: size.value,
1445                align: align.value,
1446                doc_comments,
1447            });
1448
1449            if !member_names.insert(name.name) {
1450                return Err(Box::new(Error::Redefinition {
1451                    previous: members
1452                        .iter()
1453                        .find(|x| x.name.name == name.name)
1454                        .map(|x| x.name.span)
1455                        .unwrap(),
1456                    current: name.span,
1457                }));
1458            }
1459        }
1460
1461        Ok(members)
1462    }
1463
1464    /// Parses `<T>`, returning T and span of T
1465    fn singular_generic<'a>(
1466        &mut self,
1467        lexer: &mut Lexer<'a>,
1468        ctx: &mut ExpressionContext<'a, '_, '_>,
1469    ) -> Result<'a, (Handle<ast::Type<'a>>, Span)> {
1470        lexer.expect_generic_paren('<')?;
1471        let start = lexer.start_byte_offset();
1472        let ty = self.type_decl(lexer, ctx)?;
1473        let span = lexer.span_from(start);
1474        lexer.skip(Token::Separator(','));
1475        lexer.expect_generic_paren('>')?;
1476        Ok((ty, span))
1477    }
1478
1479    /// Parses `<T,R>`, returning (T, span of T, R)
1480    fn cooperative_scalar_and_role<'a>(
1481        &mut self,
1482        lexer: &mut Lexer<'a>,
1483        ctx: &mut ExpressionContext<'a, '_, '_>,
1484    ) -> Result<'a, (Handle<ast::Type<'a>>, Span, crate::CooperativeRole)> {
1485        lexer.expect_generic_paren('<')?;
1486        let start = lexer.start_byte_offset();
1487        let ty = self.type_decl(lexer, ctx)?;
1488        let ty_span = lexer.span_from(start);
1489        lexer.expect(Token::Separator(','))?;
1490        let role = lexer.next_cooperative_role()?;
1491        lexer.expect_generic_paren('>')?;
1492        Ok((ty, ty_span, role))
1493    }
1494
1495    fn matrix_with_type<'a>(
1496        &mut self,
1497        lexer: &mut Lexer<'a>,
1498        ctx: &mut ExpressionContext<'a, '_, '_>,
1499        columns: crate::VectorSize,
1500        rows: crate::VectorSize,
1501    ) -> Result<'a, ast::Type<'a>> {
1502        let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1503        Ok(ast::Type::Matrix {
1504            columns,
1505            rows,
1506            ty,
1507            ty_span,
1508        })
1509    }
1510
1511    fn cooperative_matrix_with_type<'a>(
1512        &mut self,
1513        lexer: &mut Lexer<'a>,
1514        ctx: &mut ExpressionContext<'a, '_, '_>,
1515        columns: crate::CooperativeSize,
1516        rows: crate::CooperativeSize,
1517    ) -> Result<'a, ast::Type<'a>> {
1518        let (ty, ty_span, role) = self.cooperative_scalar_and_role(lexer, ctx)?;
1519        Ok(ast::Type::CooperativeMatrix {
1520            columns,
1521            rows,
1522            ty,
1523            ty_span,
1524            role,
1525        })
1526    }
1527
1528    fn type_decl_impl<'a>(
1529        &mut self,
1530        lexer: &mut Lexer<'a>,
1531        word: &'a str,
1532        span: Span,
1533        ctx: &mut ExpressionContext<'a, '_, '_>,
1534    ) -> Result<'a, Option<ast::Type<'a>>> {
1535        if let Some(scalar) = conv::get_scalar_type(&lexer.enable_extensions, span, word)? {
1536            return Ok(Some(ast::Type::Scalar(scalar)));
1537        }
1538
1539        Ok(Some(match word {
1540            "vec2" => {
1541                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1542                ast::Type::Vector {
1543                    size: crate::VectorSize::Bi,
1544                    ty,
1545                    ty_span,
1546                }
1547            }
1548            "vec2i" => ast::Type::Vector {
1549                size: crate::VectorSize::Bi,
1550                ty: ctx.new_scalar(Scalar::I32),
1551                ty_span: Span::UNDEFINED,
1552            },
1553            "vec2u" => ast::Type::Vector {
1554                size: crate::VectorSize::Bi,
1555                ty: ctx.new_scalar(Scalar::U32),
1556                ty_span: Span::UNDEFINED,
1557            },
1558            "vec2f" => ast::Type::Vector {
1559                size: crate::VectorSize::Bi,
1560                ty: ctx.new_scalar(Scalar::F32),
1561                ty_span: Span::UNDEFINED,
1562            },
1563            "vec2h" => ast::Type::Vector {
1564                size: crate::VectorSize::Bi,
1565                ty: ctx.new_scalar(Scalar::F16),
1566                ty_span: Span::UNDEFINED,
1567            },
1568            "vec3" => {
1569                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1570                ast::Type::Vector {
1571                    size: crate::VectorSize::Tri,
1572                    ty,
1573                    ty_span,
1574                }
1575            }
1576            "vec3i" => ast::Type::Vector {
1577                size: crate::VectorSize::Tri,
1578                ty: ctx.new_scalar(Scalar::I32),
1579                ty_span: Span::UNDEFINED,
1580            },
1581            "vec3u" => ast::Type::Vector {
1582                size: crate::VectorSize::Tri,
1583                ty: ctx.new_scalar(Scalar::U32),
1584                ty_span: Span::UNDEFINED,
1585            },
1586            "vec3f" => ast::Type::Vector {
1587                size: crate::VectorSize::Tri,
1588                ty: ctx.new_scalar(Scalar::F32),
1589                ty_span: Span::UNDEFINED,
1590            },
1591            "vec3h" => ast::Type::Vector {
1592                size: crate::VectorSize::Tri,
1593                ty: ctx.new_scalar(Scalar::F16),
1594                ty_span: Span::UNDEFINED,
1595            },
1596            "vec4" => {
1597                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1598                ast::Type::Vector {
1599                    size: crate::VectorSize::Quad,
1600                    ty,
1601                    ty_span,
1602                }
1603            }
1604            "vec4i" => ast::Type::Vector {
1605                size: crate::VectorSize::Quad,
1606                ty: ctx.new_scalar(Scalar::I32),
1607                ty_span: Span::UNDEFINED,
1608            },
1609            "vec4u" => ast::Type::Vector {
1610                size: crate::VectorSize::Quad,
1611                ty: ctx.new_scalar(Scalar::U32),
1612                ty_span: Span::UNDEFINED,
1613            },
1614            "vec4f" => ast::Type::Vector {
1615                size: crate::VectorSize::Quad,
1616                ty: ctx.new_scalar(Scalar::F32),
1617                ty_span: Span::UNDEFINED,
1618            },
1619            "vec4h" => ast::Type::Vector {
1620                size: crate::VectorSize::Quad,
1621                ty: ctx.new_scalar(Scalar::F16),
1622                ty_span: Span::UNDEFINED,
1623            },
1624            "mat2x2" => {
1625                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Bi)?
1626            }
1627            "mat2x2f" => ast::Type::Matrix {
1628                columns: crate::VectorSize::Bi,
1629                rows: crate::VectorSize::Bi,
1630                ty: ctx.new_scalar(Scalar::F32),
1631                ty_span: Span::UNDEFINED,
1632            },
1633            "mat2x2h" => ast::Type::Matrix {
1634                columns: crate::VectorSize::Bi,
1635                rows: crate::VectorSize::Bi,
1636                ty: ctx.new_scalar(Scalar::F16),
1637                ty_span: Span::UNDEFINED,
1638            },
1639            "mat2x3" => {
1640                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Tri)?
1641            }
1642            "mat2x3f" => ast::Type::Matrix {
1643                columns: crate::VectorSize::Bi,
1644                rows: crate::VectorSize::Tri,
1645                ty: ctx.new_scalar(Scalar::F32),
1646                ty_span: Span::UNDEFINED,
1647            },
1648            "mat2x3h" => ast::Type::Matrix {
1649                columns: crate::VectorSize::Bi,
1650                rows: crate::VectorSize::Tri,
1651                ty: ctx.new_scalar(Scalar::F16),
1652                ty_span: Span::UNDEFINED,
1653            },
1654            "mat2x4" => {
1655                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Quad)?
1656            }
1657            "mat2x4f" => ast::Type::Matrix {
1658                columns: crate::VectorSize::Bi,
1659                rows: crate::VectorSize::Quad,
1660                ty: ctx.new_scalar(Scalar::F32),
1661                ty_span: Span::UNDEFINED,
1662            },
1663            "mat2x4h" => ast::Type::Matrix {
1664                columns: crate::VectorSize::Bi,
1665                rows: crate::VectorSize::Quad,
1666                ty: ctx.new_scalar(Scalar::F16),
1667                ty_span: Span::UNDEFINED,
1668            },
1669            "mat3x2" => {
1670                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Bi)?
1671            }
1672            "mat3x2f" => ast::Type::Matrix {
1673                columns: crate::VectorSize::Tri,
1674                rows: crate::VectorSize::Bi,
1675                ty: ctx.new_scalar(Scalar::F32),
1676                ty_span: Span::UNDEFINED,
1677            },
1678            "mat3x2h" => ast::Type::Matrix {
1679                columns: crate::VectorSize::Tri,
1680                rows: crate::VectorSize::Bi,
1681                ty: ctx.new_scalar(Scalar::F16),
1682                ty_span: Span::UNDEFINED,
1683            },
1684            "mat3x3" => {
1685                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Tri)?
1686            }
1687            "mat3x3f" => ast::Type::Matrix {
1688                columns: crate::VectorSize::Tri,
1689                rows: crate::VectorSize::Tri,
1690                ty: ctx.new_scalar(Scalar::F32),
1691                ty_span: Span::UNDEFINED,
1692            },
1693            "mat3x3h" => ast::Type::Matrix {
1694                columns: crate::VectorSize::Tri,
1695                rows: crate::VectorSize::Tri,
1696                ty: ctx.new_scalar(Scalar::F16),
1697                ty_span: Span::UNDEFINED,
1698            },
1699            "mat3x4" => {
1700                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Quad)?
1701            }
1702            "mat3x4f" => ast::Type::Matrix {
1703                columns: crate::VectorSize::Tri,
1704                rows: crate::VectorSize::Quad,
1705                ty: ctx.new_scalar(Scalar::F32),
1706                ty_span: Span::UNDEFINED,
1707            },
1708            "mat3x4h" => ast::Type::Matrix {
1709                columns: crate::VectorSize::Tri,
1710                rows: crate::VectorSize::Quad,
1711                ty: ctx.new_scalar(Scalar::F16),
1712                ty_span: Span::UNDEFINED,
1713            },
1714            "mat4x2" => {
1715                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Bi)?
1716            }
1717            "mat4x2f" => ast::Type::Matrix {
1718                columns: crate::VectorSize::Quad,
1719                rows: crate::VectorSize::Bi,
1720                ty: ctx.new_scalar(Scalar::F32),
1721                ty_span: Span::UNDEFINED,
1722            },
1723            "mat4x2h" => ast::Type::Matrix {
1724                columns: crate::VectorSize::Quad,
1725                rows: crate::VectorSize::Bi,
1726                ty: ctx.new_scalar(Scalar::F16),
1727                ty_span: Span::UNDEFINED,
1728            },
1729            "mat4x3" => {
1730                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Tri)?
1731            }
1732            "mat4x3f" => ast::Type::Matrix {
1733                columns: crate::VectorSize::Quad,
1734                rows: crate::VectorSize::Tri,
1735                ty: ctx.new_scalar(Scalar::F32),
1736                ty_span: Span::UNDEFINED,
1737            },
1738            "mat4x3h" => ast::Type::Matrix {
1739                columns: crate::VectorSize::Quad,
1740                rows: crate::VectorSize::Tri,
1741                ty: ctx.new_scalar(Scalar::F16),
1742                ty_span: Span::UNDEFINED,
1743            },
1744            "mat4x4" => {
1745                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Quad)?
1746            }
1747            "mat4x4f" => ast::Type::Matrix {
1748                columns: crate::VectorSize::Quad,
1749                rows: crate::VectorSize::Quad,
1750                ty: ctx.new_scalar(Scalar::F32),
1751                ty_span: Span::UNDEFINED,
1752            },
1753            "mat4x4h" => ast::Type::Matrix {
1754                columns: crate::VectorSize::Quad,
1755                rows: crate::VectorSize::Quad,
1756                ty: ctx.new_scalar(Scalar::F16),
1757                ty_span: Span::UNDEFINED,
1758            },
1759            "coop_mat8x8" => {
1760                lexer.require_enable_extension(
1761                    ImplementedEnableExtension::WgpuCooperativeMatrix,
1762                    span,
1763                )?;
1764                self.cooperative_matrix_with_type(
1765                    lexer,
1766                    ctx,
1767                    crate::CooperativeSize::Eight,
1768                    crate::CooperativeSize::Eight,
1769                )?
1770            }
1771            "coop_mat16x16" => {
1772                lexer.require_enable_extension(
1773                    ImplementedEnableExtension::WgpuCooperativeMatrix,
1774                    span,
1775                )?;
1776                self.cooperative_matrix_with_type(
1777                    lexer,
1778                    ctx,
1779                    crate::CooperativeSize::Sixteen,
1780                    crate::CooperativeSize::Sixteen,
1781                )?
1782            }
1783            "atomic" => {
1784                let scalar = lexer.next_scalar_generic()?;
1785                ast::Type::Atomic(scalar)
1786            }
1787            "ptr" => {
1788                lexer.expect_generic_paren('<')?;
1789                let (ident, span) = lexer.next_ident_with_span()?;
1790                let mut space = conv::map_address_space(ident, span, &lexer.enable_extensions)?;
1791                lexer.expect(Token::Separator(','))?;
1792                let base = self.type_decl(lexer, ctx)?;
1793                if let crate::AddressSpace::Storage { ref mut access } = space {
1794                    *access = if lexer.end_of_generic_arguments() {
1795                        let result = lexer.next_storage_access()?;
1796                        lexer.skip(Token::Separator(','));
1797                        result
1798                    } else {
1799                        crate::StorageAccess::LOAD
1800                    };
1801                }
1802                lexer.expect_generic_paren('>')?;
1803                ast::Type::Pointer { base, space }
1804            }
1805            "array" => {
1806                lexer.expect_generic_paren('<')?;
1807                let base = self.type_decl(lexer, ctx)?;
1808                let size = if lexer.end_of_generic_arguments() {
1809                    let size = self.const_generic_expression(lexer, ctx)?;
1810                    lexer.skip(Token::Separator(','));
1811                    ast::ArraySize::Constant(size)
1812                } else {
1813                    ast::ArraySize::Dynamic
1814                };
1815                lexer.expect_generic_paren('>')?;
1816
1817                ast::Type::Array { base, size }
1818            }
1819            "binding_array" => {
1820                lexer.expect_generic_paren('<')?;
1821                let base = self.type_decl(lexer, ctx)?;
1822                let size = if lexer.end_of_generic_arguments() {
1823                    let size = self.unary_expression(lexer, ctx)?;
1824                    lexer.skip(Token::Separator(','));
1825                    ast::ArraySize::Constant(size)
1826                } else {
1827                    ast::ArraySize::Dynamic
1828                };
1829                lexer.expect_generic_paren('>')?;
1830
1831                ast::Type::BindingArray { base, size }
1832            }
1833            "sampler" => ast::Type::Sampler { comparison: false },
1834            "sampler_comparison" => ast::Type::Sampler { comparison: true },
1835            "texture_1d" => {
1836                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1837                Self::check_texture_sample_type(scalar, span)?;
1838                ast::Type::Image {
1839                    dim: crate::ImageDimension::D1,
1840                    arrayed: false,
1841                    class: crate::ImageClass::Sampled {
1842                        kind: scalar.kind,
1843                        multi: false,
1844                    },
1845                }
1846            }
1847            "texture_1d_array" => {
1848                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1849                Self::check_texture_sample_type(scalar, span)?;
1850                ast::Type::Image {
1851                    dim: crate::ImageDimension::D1,
1852                    arrayed: true,
1853                    class: crate::ImageClass::Sampled {
1854                        kind: scalar.kind,
1855                        multi: false,
1856                    },
1857                }
1858            }
1859            "texture_2d" => {
1860                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1861                Self::check_texture_sample_type(scalar, span)?;
1862                ast::Type::Image {
1863                    dim: crate::ImageDimension::D2,
1864                    arrayed: false,
1865                    class: crate::ImageClass::Sampled {
1866                        kind: scalar.kind,
1867                        multi: false,
1868                    },
1869                }
1870            }
1871            "texture_2d_array" => {
1872                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1873                Self::check_texture_sample_type(scalar, span)?;
1874                ast::Type::Image {
1875                    dim: crate::ImageDimension::D2,
1876                    arrayed: true,
1877                    class: crate::ImageClass::Sampled {
1878                        kind: scalar.kind,
1879                        multi: false,
1880                    },
1881                }
1882            }
1883            "texture_3d" => {
1884                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1885                Self::check_texture_sample_type(scalar, span)?;
1886                ast::Type::Image {
1887                    dim: crate::ImageDimension::D3,
1888                    arrayed: false,
1889                    class: crate::ImageClass::Sampled {
1890                        kind: scalar.kind,
1891                        multi: false,
1892                    },
1893                }
1894            }
1895            "texture_cube" => {
1896                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1897                Self::check_texture_sample_type(scalar, span)?;
1898                ast::Type::Image {
1899                    dim: crate::ImageDimension::Cube,
1900                    arrayed: false,
1901                    class: crate::ImageClass::Sampled {
1902                        kind: scalar.kind,
1903                        multi: false,
1904                    },
1905                }
1906            }
1907            "texture_cube_array" => {
1908                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1909                Self::check_texture_sample_type(scalar, span)?;
1910                ast::Type::Image {
1911                    dim: crate::ImageDimension::Cube,
1912                    arrayed: true,
1913                    class: crate::ImageClass::Sampled {
1914                        kind: scalar.kind,
1915                        multi: false,
1916                    },
1917                }
1918            }
1919            "texture_multisampled_2d" => {
1920                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1921                Self::check_texture_sample_type(scalar, span)?;
1922                ast::Type::Image {
1923                    dim: crate::ImageDimension::D2,
1924                    arrayed: false,
1925                    class: crate::ImageClass::Sampled {
1926                        kind: scalar.kind,
1927                        multi: true,
1928                    },
1929                }
1930            }
1931            "texture_multisampled_2d_array" => {
1932                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1933                Self::check_texture_sample_type(scalar, span)?;
1934                ast::Type::Image {
1935                    dim: crate::ImageDimension::D2,
1936                    arrayed: true,
1937                    class: crate::ImageClass::Sampled {
1938                        kind: scalar.kind,
1939                        multi: true,
1940                    },
1941                }
1942            }
1943            "texture_depth_2d" => ast::Type::Image {
1944                dim: crate::ImageDimension::D2,
1945                arrayed: false,
1946                class: crate::ImageClass::Depth { multi: false },
1947            },
1948            "texture_depth_2d_array" => ast::Type::Image {
1949                dim: crate::ImageDimension::D2,
1950                arrayed: true,
1951                class: crate::ImageClass::Depth { multi: false },
1952            },
1953            "texture_depth_cube" => ast::Type::Image {
1954                dim: crate::ImageDimension::Cube,
1955                arrayed: false,
1956                class: crate::ImageClass::Depth { multi: false },
1957            },
1958            "texture_depth_cube_array" => ast::Type::Image {
1959                dim: crate::ImageDimension::Cube,
1960                arrayed: true,
1961                class: crate::ImageClass::Depth { multi: false },
1962            },
1963            "texture_depth_multisampled_2d" => ast::Type::Image {
1964                dim: crate::ImageDimension::D2,
1965                arrayed: false,
1966                class: crate::ImageClass::Depth { multi: true },
1967            },
1968            "texture_external" => ast::Type::Image {
1969                dim: crate::ImageDimension::D2,
1970                arrayed: false,
1971                class: crate::ImageClass::External,
1972            },
1973            "texture_storage_1d" => {
1974                let (format, access) = lexer.next_format_generic()?;
1975                ast::Type::Image {
1976                    dim: crate::ImageDimension::D1,
1977                    arrayed: false,
1978                    class: crate::ImageClass::Storage { format, access },
1979                }
1980            }
1981            "texture_storage_1d_array" => {
1982                let (format, access) = lexer.next_format_generic()?;
1983                ast::Type::Image {
1984                    dim: crate::ImageDimension::D1,
1985                    arrayed: true,
1986                    class: crate::ImageClass::Storage { format, access },
1987                }
1988            }
1989            "texture_storage_2d" => {
1990                let (format, access) = lexer.next_format_generic()?;
1991                ast::Type::Image {
1992                    dim: crate::ImageDimension::D2,
1993                    arrayed: false,
1994                    class: crate::ImageClass::Storage { format, access },
1995                }
1996            }
1997            "texture_storage_2d_array" => {
1998                let (format, access) = lexer.next_format_generic()?;
1999                ast::Type::Image {
2000                    dim: crate::ImageDimension::D2,
2001                    arrayed: true,
2002                    class: crate::ImageClass::Storage { format, access },
2003                }
2004            }
2005            "texture_storage_3d" => {
2006                let (format, access) = lexer.next_format_generic()?;
2007                ast::Type::Image {
2008                    dim: crate::ImageDimension::D3,
2009                    arrayed: false,
2010                    class: crate::ImageClass::Storage { format, access },
2011                }
2012            }
2013            "acceleration_structure" => {
2014                lexer.require_enable_extension(ImplementedEnableExtension::WgpuRayQuery, span)?;
2015                let vertex_return = lexer.next_acceleration_structure_flags()?;
2016                if vertex_return {
2017                    lexer.require_enable_extension(
2018                        ImplementedEnableExtension::WgpuRayQueryVertexReturn,
2019                        span,
2020                    )?;
2021                }
2022                ast::Type::AccelerationStructure { vertex_return }
2023            }
2024            "ray_query" => {
2025                lexer.require_enable_extension(ImplementedEnableExtension::WgpuRayQuery, span)?;
2026                let vertex_return = lexer.next_acceleration_structure_flags()?;
2027                if vertex_return {
2028                    lexer.require_enable_extension(
2029                        ImplementedEnableExtension::WgpuRayQueryVertexReturn,
2030                        span,
2031                    )?;
2032                }
2033                ast::Type::RayQuery { vertex_return }
2034            }
2035            "RayDesc" => {
2036                lexer.require_enable_extension(ImplementedEnableExtension::WgpuRayQuery, span)?;
2037                ast::Type::RayDesc
2038            }
2039            "RayIntersection" => {
2040                lexer.require_enable_extension(ImplementedEnableExtension::WgpuRayQuery, span)?;
2041                ast::Type::RayIntersection
2042            }
2043            _ => return Ok(None),
2044        }))
2045    }
2046
2047    fn check_texture_sample_type(scalar: Scalar, span: Span) -> Result<'static, ()> {
2048        use crate::ScalarKind::*;
2049        // Validate according to https://gpuweb.github.io/gpuweb/wgsl/#sampled-texture-type
2050        match scalar {
2051            Scalar {
2052                kind: Float | Sint | Uint,
2053                width: 4,
2054            } => Ok(()),
2055            Scalar {
2056                kind: Uint,
2057                width: 8,
2058            } => Ok(()),
2059            _ => Err(Box::new(Error::BadTextureSampleType { span, scalar })),
2060        }
2061    }
2062
2063    /// Parse type declaration of a given name.
2064    fn type_decl<'a>(
2065        &mut self,
2066        lexer: &mut Lexer<'a>,
2067        ctx: &mut ExpressionContext<'a, '_, '_>,
2068    ) -> Result<'a, Handle<ast::Type<'a>>> {
2069        self.track_recursion(|this| {
2070            this.push_rule_span(Rule::TypeDecl, lexer);
2071
2072            let (name, span) = lexer.next_ident_with_span()?;
2073
2074            let ty = match this.type_decl_impl(lexer, name, span, ctx)? {
2075                Some(ty) => ty,
2076                None => {
2077                    ctx.unresolved.insert(ast::Dependency {
2078                        ident: name,
2079                        usage: span,
2080                    });
2081                    ast::Type::User(ast::Ident { name, span })
2082                }
2083            };
2084
2085            this.pop_rule_span(lexer);
2086
2087            let handle = ctx.types.append(ty, Span::UNDEFINED);
2088            Ok(handle)
2089        })
2090    }
2091
2092    fn assignment_op_and_rhs<'a>(
2093        &mut self,
2094        lexer: &mut Lexer<'a>,
2095        ctx: &mut ExpressionContext<'a, '_, '_>,
2096        block: &mut ast::Block<'a>,
2097        target: Handle<ast::Expression<'a>>,
2098        span_start: usize,
2099    ) -> Result<'a, ()> {
2100        use crate::BinaryOperator as Bo;
2101
2102        let op = lexer.next();
2103        let (op, value) = match op {
2104            (Token::Operation('='), _) => {
2105                let value = self.general_expression(lexer, ctx)?;
2106                (None, value)
2107            }
2108            (Token::AssignmentOperation(c), _) => {
2109                let op = match c {
2110                    '<' => Bo::ShiftLeft,
2111                    '>' => Bo::ShiftRight,
2112                    '+' => Bo::Add,
2113                    '-' => Bo::Subtract,
2114                    '*' => Bo::Multiply,
2115                    '/' => Bo::Divide,
2116                    '%' => Bo::Modulo,
2117                    '&' => Bo::And,
2118                    '|' => Bo::InclusiveOr,
2119                    '^' => Bo::ExclusiveOr,
2120                    // Note: `consume_token` shouldn't produce any other assignment ops
2121                    _ => unreachable!(),
2122                };
2123
2124                let value = self.general_expression(lexer, ctx)?;
2125                (Some(op), value)
2126            }
2127            token @ (Token::IncrementOperation | Token::DecrementOperation, _) => {
2128                let op = match token.0 {
2129                    Token::IncrementOperation => ast::StatementKind::Increment,
2130                    Token::DecrementOperation => ast::StatementKind::Decrement,
2131                    _ => unreachable!(),
2132                };
2133
2134                let span = lexer.span_from(span_start);
2135                block.stmts.push(ast::Statement {
2136                    kind: op(target),
2137                    span,
2138                });
2139                return Ok(());
2140            }
2141            _ => return Err(Box::new(Error::Unexpected(op.1, ExpectedToken::Assignment))),
2142        };
2143
2144        let span = lexer.span_from(span_start);
2145        block.stmts.push(ast::Statement {
2146            kind: ast::StatementKind::Assign { target, op, value },
2147            span,
2148        });
2149        Ok(())
2150    }
2151
2152    /// Parse an assignment statement (will also parse increment and decrement statements)
2153    fn assignment_statement<'a>(
2154        &mut self,
2155        lexer: &mut Lexer<'a>,
2156        ctx: &mut ExpressionContext<'a, '_, '_>,
2157        block: &mut ast::Block<'a>,
2158    ) -> Result<'a, ()> {
2159        let span_start = lexer.start_byte_offset();
2160        let target = self.lhs_expression(lexer, ctx)?;
2161        self.assignment_op_and_rhs(lexer, ctx, block, target, span_start)
2162    }
2163
2164    /// Parse a function call statement.
2165    /// Expects `ident` to be consumed (not in the lexer).
2166    fn function_statement<'a>(
2167        &mut self,
2168        lexer: &mut Lexer<'a>,
2169        ident: &'a str,
2170        ident_span: Span,
2171        span_start: usize,
2172        context: &mut ExpressionContext<'a, '_, '_>,
2173        block: &mut ast::Block<'a>,
2174    ) -> Result<'a, ()> {
2175        self.push_rule_span(Rule::SingularExpr, lexer);
2176
2177        context.unresolved.insert(ast::Dependency {
2178            ident,
2179            usage: ident_span,
2180        });
2181        let arguments = self.arguments(lexer, context)?;
2182        let span = lexer.span_from(span_start);
2183
2184        block.stmts.push(ast::Statement {
2185            kind: ast::StatementKind::Call {
2186                function: ast::Ident {
2187                    name: ident,
2188                    span: ident_span,
2189                },
2190                arguments,
2191            },
2192            span,
2193        });
2194
2195        self.pop_rule_span(lexer);
2196
2197        Ok(())
2198    }
2199
2200    fn function_call_or_assignment_statement<'a>(
2201        &mut self,
2202        lexer: &mut Lexer<'a>,
2203        context: &mut ExpressionContext<'a, '_, '_>,
2204        block: &mut ast::Block<'a>,
2205    ) -> Result<'a, ()> {
2206        let span_start = lexer.start_byte_offset();
2207        match lexer.peek() {
2208            (Token::Word(name), span) => {
2209                // A little hack for 2 token lookahead.
2210                let cloned = lexer.clone();
2211                let _ = lexer.next();
2212                match lexer.peek() {
2213                    (Token::Paren('('), _) => {
2214                        self.function_statement(lexer, name, span, span_start, context, block)
2215                    }
2216                    _ => {
2217                        *lexer = cloned;
2218                        self.assignment_statement(lexer, context, block)
2219                    }
2220                }
2221            }
2222            _ => self.assignment_statement(lexer, context, block),
2223        }
2224    }
2225
2226    fn statement<'a>(
2227        &mut self,
2228        lexer: &mut Lexer<'a>,
2229        ctx: &mut ExpressionContext<'a, '_, '_>,
2230        block: &mut ast::Block<'a>,
2231        brace_nesting_level: u8,
2232    ) -> Result<'a, ()> {
2233        self.track_recursion(|this| {
2234            this.push_rule_span(Rule::Statement, lexer);
2235            match lexer.peek() {
2236                (Token::Separator(';'), _) => {
2237                    let _ = lexer.next();
2238                    this.pop_rule_span(lexer);
2239                }
2240                (token, _) if is_start_of_compound_statement(token) => {
2241                    let (inner, span) = this.block(lexer, ctx, brace_nesting_level)?;
2242                    block.stmts.push(ast::Statement {
2243                        kind: ast::StatementKind::Block(inner),
2244                        span,
2245                    });
2246                    this.pop_rule_span(lexer);
2247                }
2248                (Token::Word(word), _) => {
2249                    let kind = match word {
2250                        "_" => {
2251                            let _ = lexer.next();
2252                            lexer.expect(Token::Operation('='))?;
2253                            let expr = this.general_expression(lexer, ctx)?;
2254                            lexer.expect(Token::Separator(';'))?;
2255
2256                            ast::StatementKind::Phony(expr)
2257                        }
2258                        "let" => {
2259                            let _ = lexer.next();
2260                            let name = lexer.next_ident()?;
2261
2262                            let given_ty = if lexer.skip(Token::Separator(':')) {
2263                                let ty = this.type_decl(lexer, ctx)?;
2264                                Some(ty)
2265                            } else {
2266                                None
2267                            };
2268                            lexer.expect(Token::Operation('='))?;
2269                            let expr_id = this.general_expression(lexer, ctx)?;
2270                            lexer.expect(Token::Separator(';'))?;
2271
2272                            let handle = ctx.declare_local(name)?;
2273                            ast::StatementKind::LocalDecl(ast::LocalDecl::Let(ast::Let {
2274                                name,
2275                                ty: given_ty,
2276                                init: expr_id,
2277                                handle,
2278                            }))
2279                        }
2280                        "const" => {
2281                            let _ = lexer.next();
2282                            let name = lexer.next_ident()?;
2283
2284                            let given_ty = if lexer.skip(Token::Separator(':')) {
2285                                let ty = this.type_decl(lexer, ctx)?;
2286                                Some(ty)
2287                            } else {
2288                                None
2289                            };
2290                            lexer.expect(Token::Operation('='))?;
2291                            let expr_id = this.general_expression(lexer, ctx)?;
2292                            lexer.expect(Token::Separator(';'))?;
2293
2294                            let handle = ctx.declare_local(name)?;
2295                            ast::StatementKind::LocalDecl(ast::LocalDecl::Const(ast::LocalConst {
2296                                name,
2297                                ty: given_ty,
2298                                init: expr_id,
2299                                handle,
2300                            }))
2301                        }
2302                        "var" => {
2303                            let _ = lexer.next();
2304
2305                            if lexer.skip(Token::Paren('<')) {
2306                                let (class_str, span) = lexer.next_ident_with_span()?;
2307                                if class_str != "function" {
2308                                    return Err(Box::new(Error::InvalidLocalVariableAddressSpace(
2309                                        span,
2310                                    )));
2311                                }
2312                                lexer.expect(Token::Paren('>'))?;
2313                            }
2314
2315                            let name = lexer.next_ident()?;
2316                            let ty = if lexer.skip(Token::Separator(':')) {
2317                                let ty = this.type_decl(lexer, ctx)?;
2318                                Some(ty)
2319                            } else {
2320                                None
2321                            };
2322
2323                            let init = if lexer.skip(Token::Operation('=')) {
2324                                let init = this.general_expression(lexer, ctx)?;
2325                                Some(init)
2326                            } else {
2327                                None
2328                            };
2329
2330                            lexer.expect(Token::Separator(';'))?;
2331
2332                            let handle = ctx.declare_local(name)?;
2333                            ast::StatementKind::LocalDecl(ast::LocalDecl::Var(ast::LocalVariable {
2334                                name,
2335                                ty,
2336                                init,
2337                                handle,
2338                            }))
2339                        }
2340                        "return" => {
2341                            let _ = lexer.next();
2342                            let value = if lexer.peek().0 != Token::Separator(';') {
2343                                let handle = this.general_expression(lexer, ctx)?;
2344                                Some(handle)
2345                            } else {
2346                                None
2347                            };
2348                            lexer.expect(Token::Separator(';'))?;
2349                            ast::StatementKind::Return { value }
2350                        }
2351                        "if" => {
2352                            let _ = lexer.next();
2353                            let condition = this.general_expression(lexer, ctx)?;
2354
2355                            let accept = this.block(lexer, ctx, brace_nesting_level)?.0;
2356
2357                            let mut elsif_stack = Vec::new();
2358                            let mut elseif_span_start = lexer.start_byte_offset();
2359                            let mut reject = loop {
2360                                if !lexer.skip(Token::Word("else")) {
2361                                    break ast::Block::default();
2362                                }
2363
2364                                if !lexer.skip(Token::Word("if")) {
2365                                    // ... else { ... }
2366                                    break this.block(lexer, ctx, brace_nesting_level)?.0;
2367                                }
2368
2369                                // ... else if (...) { ... }
2370                                let other_condition = this.general_expression(lexer, ctx)?;
2371                                let other_block = this.block(lexer, ctx, brace_nesting_level)?;
2372                                elsif_stack.push((elseif_span_start, other_condition, other_block));
2373                                elseif_span_start = lexer.start_byte_offset();
2374                            };
2375
2376                            // reverse-fold the else-if blocks
2377                            //Note: we may consider uplifting this to the IR
2378                            for (other_span_start, other_cond, other_block) in
2379                                elsif_stack.into_iter().rev()
2380                            {
2381                                let sub_stmt = ast::StatementKind::If {
2382                                    condition: other_cond,
2383                                    accept: other_block.0,
2384                                    reject,
2385                                };
2386                                reject = ast::Block::default();
2387                                let span = lexer.span_from(other_span_start);
2388                                reject.stmts.push(ast::Statement {
2389                                    kind: sub_stmt,
2390                                    span,
2391                                })
2392                            }
2393
2394                            ast::StatementKind::If {
2395                                condition,
2396                                accept,
2397                                reject,
2398                            }
2399                        }
2400                        "switch" => {
2401                            let _ = lexer.next();
2402                            let selector = this.general_expression(lexer, ctx)?;
2403                            let brace_span = lexer.expect_span(Token::Paren('{'))?;
2404                            let brace_nesting_level =
2405                                Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2406                            let mut cases = Vec::new();
2407
2408                            loop {
2409                                // cases + default
2410                                match lexer.next() {
2411                                    (Token::Word("case"), _) => {
2412                                        // parse a list of values
2413                                        let value = loop {
2414                                            let value = this.switch_value(lexer, ctx)?;
2415                                            if lexer.skip(Token::Separator(',')) {
2416                                                // list of values ends with ':' or a compound statement
2417                                                let next_token = lexer.peek().0;
2418                                                if next_token == Token::Separator(':')
2419                                                    || is_start_of_compound_statement(next_token)
2420                                                {
2421                                                    break value;
2422                                                }
2423                                            } else {
2424                                                break value;
2425                                            }
2426                                            cases.push(ast::SwitchCase {
2427                                                value,
2428                                                body: ast::Block::default(),
2429                                                fall_through: true,
2430                                            });
2431                                        };
2432
2433                                        lexer.skip(Token::Separator(':'));
2434
2435                                        let body = this.block(lexer, ctx, brace_nesting_level)?.0;
2436
2437                                        cases.push(ast::SwitchCase {
2438                                            value,
2439                                            body,
2440                                            fall_through: false,
2441                                        });
2442                                    }
2443                                    (Token::Word("default"), _) => {
2444                                        lexer.skip(Token::Separator(':'));
2445                                        let body = this.block(lexer, ctx, brace_nesting_level)?.0;
2446                                        cases.push(ast::SwitchCase {
2447                                            value: ast::SwitchValue::Default,
2448                                            body,
2449                                            fall_through: false,
2450                                        });
2451                                    }
2452                                    (Token::Paren('}'), _) => break,
2453                                    (_, span) => {
2454                                        return Err(Box::new(Error::Unexpected(
2455                                            span,
2456                                            ExpectedToken::SwitchItem,
2457                                        )))
2458                                    }
2459                                }
2460                            }
2461
2462                            ast::StatementKind::Switch { selector, cases }
2463                        }
2464                        "loop" => this.r#loop(lexer, ctx, brace_nesting_level)?,
2465                        "while" => {
2466                            let _ = lexer.next();
2467                            let mut body = ast::Block::default();
2468
2469                            let (condition, span) =
2470                                lexer.capture_span(|lexer| this.general_expression(lexer, ctx))?;
2471                            let mut reject = ast::Block::default();
2472                            reject.stmts.push(ast::Statement {
2473                                kind: ast::StatementKind::Break,
2474                                span,
2475                            });
2476
2477                            body.stmts.push(ast::Statement {
2478                                kind: ast::StatementKind::If {
2479                                    condition,
2480                                    accept: ast::Block::default(),
2481                                    reject,
2482                                },
2483                                span,
2484                            });
2485
2486                            let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
2487                            body.stmts.push(ast::Statement {
2488                                kind: ast::StatementKind::Block(block),
2489                                span,
2490                            });
2491
2492                            ast::StatementKind::Loop {
2493                                body,
2494                                continuing: ast::Block::default(),
2495                                break_if: None,
2496                            }
2497                        }
2498                        "for" => {
2499                            let _ = lexer.next();
2500                            lexer.expect(Token::Paren('('))?;
2501
2502                            ctx.local_table.push_scope();
2503
2504                            if !lexer.skip(Token::Separator(';')) {
2505                                let num_statements = block.stmts.len();
2506                                let (_, span) = {
2507                                    let ctx = &mut *ctx;
2508                                    let block = &mut *block;
2509                                    lexer.capture_span(|lexer| {
2510                                        this.statement(lexer, ctx, block, brace_nesting_level)
2511                                    })?
2512                                };
2513
2514                                if block.stmts.len() != num_statements {
2515                                    match block.stmts.last().unwrap().kind {
2516                                        ast::StatementKind::Call { .. }
2517                                        | ast::StatementKind::Assign { .. }
2518                                        | ast::StatementKind::LocalDecl(_) => {}
2519                                        _ => {
2520                                            return Err(Box::new(Error::InvalidForInitializer(
2521                                                span,
2522                                            )))
2523                                        }
2524                                    }
2525                                }
2526                            };
2527
2528                            let mut body = ast::Block::default();
2529                            if !lexer.skip(Token::Separator(';')) {
2530                                let (condition, span) =
2531                                    lexer.capture_span(|lexer| -> Result<'_, _> {
2532                                        let condition = this.general_expression(lexer, ctx)?;
2533                                        lexer.expect(Token::Separator(';'))?;
2534                                        Ok(condition)
2535                                    })?;
2536                                let mut reject = ast::Block::default();
2537                                reject.stmts.push(ast::Statement {
2538                                    kind: ast::StatementKind::Break,
2539                                    span,
2540                                });
2541                                body.stmts.push(ast::Statement {
2542                                    kind: ast::StatementKind::If {
2543                                        condition,
2544                                        accept: ast::Block::default(),
2545                                        reject,
2546                                    },
2547                                    span,
2548                                });
2549                            };
2550
2551                            let mut continuing = ast::Block::default();
2552                            if !lexer.skip(Token::Paren(')')) {
2553                                this.function_call_or_assignment_statement(
2554                                    lexer,
2555                                    ctx,
2556                                    &mut continuing,
2557                                )?;
2558                                lexer.expect(Token::Paren(')'))?;
2559                            }
2560
2561                            let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
2562                            body.stmts.push(ast::Statement {
2563                                kind: ast::StatementKind::Block(block),
2564                                span,
2565                            });
2566
2567                            ctx.local_table.pop_scope();
2568
2569                            ast::StatementKind::Loop {
2570                                body,
2571                                continuing,
2572                                break_if: None,
2573                            }
2574                        }
2575                        "break" => {
2576                            let (_, span) = lexer.next();
2577                            // Check if the next token is an `if`, this indicates
2578                            // that the user tried to type out a `break if` which
2579                            // is illegal in this position.
2580                            let (peeked_token, peeked_span) = lexer.peek();
2581                            if let Token::Word("if") = peeked_token {
2582                                let span = span.until(&peeked_span);
2583                                return Err(Box::new(Error::InvalidBreakIf(span)));
2584                            }
2585                            lexer.expect(Token::Separator(';'))?;
2586                            ast::StatementKind::Break
2587                        }
2588                        "continue" => {
2589                            let _ = lexer.next();
2590                            lexer.expect(Token::Separator(';'))?;
2591                            ast::StatementKind::Continue
2592                        }
2593                        "discard" => {
2594                            let _ = lexer.next();
2595                            lexer.expect(Token::Separator(';'))?;
2596                            ast::StatementKind::Kill
2597                        }
2598                        // https://www.w3.org/TR/WGSL/#const-assert-statement
2599                        "const_assert" => {
2600                            let _ = lexer.next();
2601                            // parentheses are optional
2602                            let paren = lexer.skip(Token::Paren('('));
2603
2604                            let condition = this.general_expression(lexer, ctx)?;
2605
2606                            if paren {
2607                                lexer.expect(Token::Paren(')'))?;
2608                            }
2609                            lexer.expect(Token::Separator(';'))?;
2610                            ast::StatementKind::ConstAssert(condition)
2611                        }
2612                        // assignment or a function call
2613                        _ => {
2614                            this.function_call_or_assignment_statement(lexer, ctx, block)?;
2615                            lexer.expect(Token::Separator(';'))?;
2616                            this.pop_rule_span(lexer);
2617                            return Ok(());
2618                        }
2619                    };
2620
2621                    let span = this.pop_rule_span(lexer);
2622                    block.stmts.push(ast::Statement { kind, span });
2623                }
2624                _ => {
2625                    this.assignment_statement(lexer, ctx, block)?;
2626                    lexer.expect(Token::Separator(';'))?;
2627                    this.pop_rule_span(lexer);
2628                }
2629            }
2630            Ok(())
2631        })
2632    }
2633
2634    fn r#loop<'a>(
2635        &mut self,
2636        lexer: &mut Lexer<'a>,
2637        ctx: &mut ExpressionContext<'a, '_, '_>,
2638        brace_nesting_level: u8,
2639    ) -> Result<'a, ast::StatementKind<'a>> {
2640        let _ = lexer.next();
2641        let mut body = ast::Block::default();
2642        let mut continuing = ast::Block::default();
2643        let mut break_if = None;
2644
2645        let brace_span = lexer.expect_span(Token::Paren('{'))?;
2646        let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2647
2648        ctx.local_table.push_scope();
2649
2650        loop {
2651            if lexer.skip(Token::Word("continuing")) {
2652                // Branch for the `continuing` block, this must be
2653                // the last thing in the loop body
2654
2655                // Expect a opening brace to start the continuing block
2656                let brace_span = lexer.expect_span(Token::Paren('{'))?;
2657                let brace_nesting_level =
2658                    Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2659                loop {
2660                    if lexer.skip(Token::Word("break")) {
2661                        // Branch for the `break if` statement, this statement
2662                        // has the form `break if <expr>;` and must be the last
2663                        // statement in a continuing block
2664
2665                        // The break must be followed by an `if` to form
2666                        // the break if
2667                        lexer.expect(Token::Word("if"))?;
2668
2669                        let condition = self.general_expression(lexer, ctx)?;
2670                        // Set the condition of the break if to the newly parsed
2671                        // expression
2672                        break_if = Some(condition);
2673
2674                        // Expect a semicolon to close the statement
2675                        lexer.expect(Token::Separator(';'))?;
2676                        // Expect a closing brace to close the continuing block,
2677                        // since the break if must be the last statement
2678                        lexer.expect(Token::Paren('}'))?;
2679                        // Stop parsing the continuing block
2680                        break;
2681                    } else if lexer.skip(Token::Paren('}')) {
2682                        // If we encounter a closing brace it means we have reached
2683                        // the end of the continuing block and should stop processing
2684                        break;
2685                    } else {
2686                        // Otherwise try to parse a statement
2687                        self.statement(lexer, ctx, &mut continuing, brace_nesting_level)?;
2688                    }
2689                }
2690                // Since the continuing block must be the last part of the loop body,
2691                // we expect to see a closing brace to end the loop body
2692                lexer.expect(Token::Paren('}'))?;
2693                break;
2694            }
2695            if lexer.skip(Token::Paren('}')) {
2696                // If we encounter a closing brace it means we have reached
2697                // the end of the loop body and should stop processing
2698                break;
2699            }
2700            // Otherwise try to parse a statement
2701            self.statement(lexer, ctx, &mut body, brace_nesting_level)?;
2702        }
2703
2704        ctx.local_table.pop_scope();
2705
2706        Ok(ast::StatementKind::Loop {
2707            body,
2708            continuing,
2709            break_if,
2710        })
2711    }
2712
2713    /// compound_statement
2714    fn block<'a>(
2715        &mut self,
2716        lexer: &mut Lexer<'a>,
2717        ctx: &mut ExpressionContext<'a, '_, '_>,
2718        brace_nesting_level: u8,
2719    ) -> Result<'a, (ast::Block<'a>, Span)> {
2720        self.push_rule_span(Rule::Block, lexer);
2721
2722        ctx.local_table.push_scope();
2723
2724        let mut diagnostic_filters = DiagnosticFilterMap::new();
2725
2726        self.push_rule_span(Rule::Attribute, lexer);
2727        while lexer.skip(Token::Attribute) {
2728            let (name, name_span) = lexer.next_ident_with_span()?;
2729            if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
2730                let filter = self.diagnostic_filter(lexer)?;
2731                let span = self.peek_rule_span(lexer);
2732                diagnostic_filters
2733                    .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
2734                    .map_err(|e| Box::new(e.into()))?;
2735            } else {
2736                return Err(Box::new(Error::Unexpected(
2737                    name_span,
2738                    ExpectedToken::DiagnosticAttribute,
2739                )));
2740            }
2741        }
2742        self.pop_rule_span(lexer);
2743
2744        if !diagnostic_filters.is_empty() {
2745            return Err(Box::new(
2746                Error::DiagnosticAttributeNotYetImplementedAtParseSite {
2747                    site_name_plural: "compound statements",
2748                    spans: diagnostic_filters.spans().collect(),
2749                },
2750            ));
2751        }
2752
2753        let brace_span = lexer.expect_span(Token::Paren('{'))?;
2754        let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2755        let mut block = ast::Block::default();
2756        while !lexer.skip(Token::Paren('}')) {
2757            self.statement(lexer, ctx, &mut block, brace_nesting_level)?;
2758        }
2759
2760        ctx.local_table.pop_scope();
2761
2762        let span = self.pop_rule_span(lexer);
2763        Ok((block, span))
2764    }
2765
2766    fn varying_binding<'a>(
2767        &mut self,
2768        lexer: &mut Lexer<'a>,
2769        ctx: &mut ExpressionContext<'a, '_, '_>,
2770    ) -> Result<'a, Option<ast::Binding<'a>>> {
2771        let mut bind_parser = BindingParser::default();
2772        self.push_rule_span(Rule::Attribute, lexer);
2773
2774        while lexer.skip(Token::Attribute) {
2775            let (word, span) = lexer.next_ident_with_span()?;
2776            bind_parser.parse(self, lexer, word, span, ctx)?;
2777        }
2778
2779        let span = self.pop_rule_span(lexer);
2780        bind_parser.finish(span)
2781    }
2782
2783    fn function_decl<'a>(
2784        &mut self,
2785        lexer: &mut Lexer<'a>,
2786        diagnostic_filter_leaf: Option<Handle<DiagnosticFilterNode>>,
2787        must_use: Option<Span>,
2788        out: &mut ast::TranslationUnit<'a>,
2789        dependencies: &mut FastIndexSet<ast::Dependency<'a>>,
2790    ) -> Result<'a, ast::Function<'a>> {
2791        self.push_rule_span(Rule::FunctionDecl, lexer);
2792        // read function name
2793        let fun_name = lexer.next_ident()?;
2794
2795        let mut locals = Arena::new();
2796
2797        let mut ctx = ExpressionContext {
2798            expressions: &mut out.expressions,
2799            local_table: &mut SymbolTable::default(),
2800            locals: &mut locals,
2801            types: &mut out.types,
2802            unresolved: dependencies,
2803        };
2804
2805        // start a scope that contains arguments as well as the function body
2806        ctx.local_table.push_scope();
2807
2808        // read parameter list
2809        let mut arguments = Vec::new();
2810        lexer.expect(Token::Paren('('))?;
2811        let mut ready = true;
2812        while !lexer.skip(Token::Paren(')')) {
2813            if !ready {
2814                return Err(Box::new(Error::Unexpected(
2815                    lexer.next().1,
2816                    ExpectedToken::Token(Token::Separator(',')),
2817                )));
2818            }
2819            let binding = self.varying_binding(lexer, &mut ctx)?;
2820
2821            let param_name = lexer.next_ident()?;
2822
2823            lexer.expect(Token::Separator(':'))?;
2824            let param_type = self.type_decl(lexer, &mut ctx)?;
2825
2826            let handle = ctx.declare_local(param_name)?;
2827            arguments.push(ast::FunctionArgument {
2828                name: param_name,
2829                ty: param_type,
2830                binding,
2831                handle,
2832            });
2833            ready = lexer.skip(Token::Separator(','));
2834        }
2835        // read return type
2836        let result = if lexer.skip(Token::Arrow) {
2837            let binding = self.varying_binding(lexer, &mut ctx)?;
2838            let ty = self.type_decl(lexer, &mut ctx)?;
2839            let must_use = must_use.is_some();
2840            Some(ast::FunctionResult {
2841                ty,
2842                binding,
2843                must_use,
2844            })
2845        } else if let Some(must_use) = must_use {
2846            return Err(Box::new(Error::FunctionMustUseReturnsVoid(
2847                must_use,
2848                self.peek_rule_span(lexer),
2849            )));
2850        } else {
2851            None
2852        };
2853
2854        // do not use `self.block` here, since we must not push a new scope
2855        lexer.expect(Token::Paren('{'))?;
2856        let brace_nesting_level = 1;
2857        let mut body = ast::Block::default();
2858        while !lexer.skip(Token::Paren('}')) {
2859            self.statement(lexer, &mut ctx, &mut body, brace_nesting_level)?;
2860        }
2861
2862        ctx.local_table.pop_scope();
2863
2864        let fun = ast::Function {
2865            entry_point: None,
2866            name: fun_name,
2867            arguments,
2868            result,
2869            body,
2870            diagnostic_filter_leaf,
2871            doc_comments: Vec::new(),
2872        };
2873
2874        // done
2875        self.pop_rule_span(lexer);
2876
2877        Ok(fun)
2878    }
2879
2880    fn directive_ident_list<'a>(
2881        &self,
2882        lexer: &mut Lexer<'a>,
2883        handler: impl FnMut(&'a str, Span) -> Result<'a, ()>,
2884    ) -> Result<'a, ()> {
2885        let mut handler = handler;
2886        'next_arg: loop {
2887            let (ident, span) = lexer.next_ident_with_span()?;
2888            handler(ident, span)?;
2889
2890            let expected_token = match lexer.peek().0 {
2891                Token::Separator(',') => {
2892                    let _ = lexer.next();
2893                    if matches!(lexer.peek().0, Token::Word(..)) {
2894                        continue 'next_arg;
2895                    }
2896                    ExpectedToken::AfterIdentListComma
2897                }
2898                _ => ExpectedToken::AfterIdentListArg,
2899            };
2900
2901            if !matches!(lexer.next().0, Token::Separator(';')) {
2902                return Err(Box::new(Error::Unexpected(span, expected_token)));
2903            }
2904
2905            break Ok(());
2906        }
2907    }
2908
2909    fn global_decl<'a>(
2910        &mut self,
2911        lexer: &mut Lexer<'a>,
2912        out: &mut ast::TranslationUnit<'a>,
2913    ) -> Result<'a, ()> {
2914        let doc_comments = lexer.accumulate_doc_comments();
2915
2916        // read attributes
2917        let mut binding = None;
2918        let mut stage = ParsedAttribute::default();
2919        let mut compute_like_span = Span::new(0, 0);
2920        let mut workgroup_size = ParsedAttribute::default();
2921        let mut early_depth_test = ParsedAttribute::default();
2922        let (mut bind_index, mut bind_group) =
2923            (ParsedAttribute::default(), ParsedAttribute::default());
2924        let mut id = ParsedAttribute::default();
2925        let mut payload = ParsedAttribute::default();
2926        let mut mesh_output = ParsedAttribute::default();
2927
2928        let mut must_use: ParsedAttribute<Span> = ParsedAttribute::default();
2929
2930        let mut dependencies = FastIndexSet::default();
2931        let mut ctx = ExpressionContext {
2932            expressions: &mut out.expressions,
2933            local_table: &mut SymbolTable::default(),
2934            locals: &mut Arena::new(),
2935            types: &mut out.types,
2936            unresolved: &mut dependencies,
2937        };
2938        let mut diagnostic_filters = DiagnosticFilterMap::new();
2939        let ensure_no_diag_attrs = |on_what, filters: DiagnosticFilterMap| -> Result<()> {
2940            if filters.is_empty() {
2941                Ok(())
2942            } else {
2943                Err(Box::new(Error::DiagnosticAttributeNotSupported {
2944                    on_what,
2945                    spans: filters.spans().collect(),
2946                }))
2947            }
2948        };
2949
2950        self.push_rule_span(Rule::Attribute, lexer);
2951        while lexer.skip(Token::Attribute) {
2952            let (name, name_span) = lexer.next_ident_with_span()?;
2953            if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
2954                let filter = self.diagnostic_filter(lexer)?;
2955                let span = self.peek_rule_span(lexer);
2956                diagnostic_filters
2957                    .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
2958                    .map_err(|e| Box::new(e.into()))?;
2959                continue;
2960            }
2961            match name {
2962                "binding" => {
2963                    lexer.expect(Token::Paren('('))?;
2964                    bind_index.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2965                    lexer.expect(Token::Paren(')'))?;
2966                }
2967                "group" => {
2968                    lexer.expect(Token::Paren('('))?;
2969                    bind_group.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2970                    lexer.expect(Token::Paren(')'))?;
2971                }
2972                "id" => {
2973                    lexer.expect(Token::Paren('('))?;
2974                    id.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2975                    lexer.expect(Token::Paren(')'))?;
2976                }
2977                "vertex" => {
2978                    stage.set(ShaderStage::Vertex, name_span)?;
2979                }
2980                "fragment" => {
2981                    stage.set(ShaderStage::Fragment, name_span)?;
2982                }
2983                "compute" => {
2984                    stage.set(ShaderStage::Compute, name_span)?;
2985                    compute_like_span = name_span;
2986                }
2987                "task" => {
2988                    lexer.require_enable_extension(
2989                        ImplementedEnableExtension::WgpuMeshShader,
2990                        name_span,
2991                    )?;
2992                    stage.set(ShaderStage::Task, name_span)?;
2993                    compute_like_span = name_span;
2994                }
2995                "mesh" => {
2996                    lexer.require_enable_extension(
2997                        ImplementedEnableExtension::WgpuMeshShader,
2998                        name_span,
2999                    )?;
3000                    stage.set(ShaderStage::Mesh, name_span)?;
3001                    compute_like_span = name_span;
3002
3003                    lexer.expect(Token::Paren('('))?;
3004                    mesh_output.set(lexer.next_ident_with_span()?, name_span)?;
3005                    lexer.expect(Token::Paren(')'))?;
3006                }
3007                "payload" => {
3008                    lexer.require_enable_extension(
3009                        ImplementedEnableExtension::WgpuMeshShader,
3010                        name_span,
3011                    )?;
3012                    lexer.expect(Token::Paren('('))?;
3013                    payload.set(lexer.next_ident_with_span()?, name_span)?;
3014                    lexer.expect(Token::Paren(')'))?;
3015                }
3016                "workgroup_size" => {
3017                    lexer.expect(Token::Paren('('))?;
3018                    let mut new_workgroup_size = [None; 3];
3019                    for (i, size) in new_workgroup_size.iter_mut().enumerate() {
3020                        *size = Some(self.general_expression(lexer, &mut ctx)?);
3021                        match lexer.next() {
3022                            (Token::Paren(')'), _) => break,
3023                            (Token::Separator(','), _) if i != 2 => (),
3024                            other => {
3025                                return Err(Box::new(Error::Unexpected(
3026                                    other.1,
3027                                    ExpectedToken::WorkgroupSizeSeparator,
3028                                )))
3029                            }
3030                        }
3031                    }
3032                    workgroup_size.set(new_workgroup_size, name_span)?;
3033                }
3034                "early_depth_test" => {
3035                    lexer.expect(Token::Paren('('))?;
3036                    let (ident, ident_span) = lexer.next_ident_with_span()?;
3037                    let value = if ident == "force" {
3038                        crate::EarlyDepthTest::Force
3039                    } else {
3040                        crate::EarlyDepthTest::Allow {
3041                            conservative: conv::map_conservative_depth(ident, ident_span)?,
3042                        }
3043                    };
3044                    lexer.expect(Token::Paren(')'))?;
3045                    early_depth_test.set(value, name_span)?;
3046                }
3047                "must_use" => {
3048                    must_use.set(name_span, name_span)?;
3049                }
3050                _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
3051            }
3052        }
3053
3054        let attrib_span = self.pop_rule_span(lexer);
3055        match (bind_group.value, bind_index.value) {
3056            (Some(group), Some(index)) => {
3057                binding = Some(ast::ResourceBinding {
3058                    group,
3059                    binding: index,
3060                });
3061            }
3062            (Some(_), None) => {
3063                return Err(Box::new(Error::MissingAttribute("binding", attrib_span)))
3064            }
3065            (None, Some(_)) => return Err(Box::new(Error::MissingAttribute("group", attrib_span))),
3066            (None, None) => {}
3067        }
3068
3069        // read item
3070        let start = lexer.start_byte_offset();
3071        let kind = match lexer.next() {
3072            (Token::Separator(';'), _) => {
3073                ensure_no_diag_attrs(
3074                    DiagnosticAttributeNotSupportedPosition::SemicolonInModulePosition,
3075                    diagnostic_filters,
3076                )?;
3077                None
3078            }
3079            (Token::Word(word), directive_span) if DirectiveKind::from_ident(word).is_some() => {
3080                return Err(Box::new(Error::DirectiveAfterFirstGlobalDecl {
3081                    directive_span,
3082                }));
3083            }
3084            (Token::Word("struct"), _) => {
3085                ensure_no_diag_attrs("`struct`s".into(), diagnostic_filters)?;
3086
3087                let name = lexer.next_ident()?;
3088
3089                let members = self.struct_body(lexer, &mut ctx)?;
3090
3091                Some(ast::GlobalDeclKind::Struct(ast::Struct {
3092                    name,
3093                    members,
3094                    doc_comments,
3095                }))
3096            }
3097            (Token::Word("alias"), _) => {
3098                ensure_no_diag_attrs("`alias`es".into(), diagnostic_filters)?;
3099
3100                let name = lexer.next_ident()?;
3101
3102                lexer.expect(Token::Operation('='))?;
3103                let ty = self.type_decl(lexer, &mut ctx)?;
3104                lexer.expect(Token::Separator(';'))?;
3105                Some(ast::GlobalDeclKind::Type(ast::TypeAlias { name, ty }))
3106            }
3107            (Token::Word("const"), _) => {
3108                ensure_no_diag_attrs("`const`s".into(), diagnostic_filters)?;
3109
3110                let name = lexer.next_ident()?;
3111
3112                let ty = if lexer.skip(Token::Separator(':')) {
3113                    let ty = self.type_decl(lexer, &mut ctx)?;
3114                    Some(ty)
3115                } else {
3116                    None
3117                };
3118
3119                lexer.expect(Token::Operation('='))?;
3120                let init = self.general_expression(lexer, &mut ctx)?;
3121                lexer.expect(Token::Separator(';'))?;
3122
3123                Some(ast::GlobalDeclKind::Const(ast::Const {
3124                    name,
3125                    ty,
3126                    init,
3127                    doc_comments,
3128                }))
3129            }
3130            (Token::Word("override"), _) => {
3131                ensure_no_diag_attrs("`override`s".into(), diagnostic_filters)?;
3132
3133                let name = lexer.next_ident()?;
3134
3135                let ty = if lexer.skip(Token::Separator(':')) {
3136                    Some(self.type_decl(lexer, &mut ctx)?)
3137                } else {
3138                    None
3139                };
3140
3141                let init = if lexer.skip(Token::Operation('=')) {
3142                    Some(self.general_expression(lexer, &mut ctx)?)
3143                } else {
3144                    None
3145                };
3146
3147                lexer.expect(Token::Separator(';'))?;
3148
3149                Some(ast::GlobalDeclKind::Override(ast::Override {
3150                    name,
3151                    id: id.value,
3152                    ty,
3153                    init,
3154                }))
3155            }
3156            (Token::Word("var"), _) => {
3157                ensure_no_diag_attrs("`var`s".into(), diagnostic_filters)?;
3158
3159                let mut var = self.variable_decl(lexer, &mut ctx)?;
3160                var.binding = binding.take();
3161                var.doc_comments = doc_comments;
3162                Some(ast::GlobalDeclKind::Var(var))
3163            }
3164            (Token::Word("fn"), _) => {
3165                let diagnostic_filter_leaf = Self::write_diagnostic_filters(
3166                    &mut out.diagnostic_filters,
3167                    diagnostic_filters,
3168                    out.diagnostic_filter_leaf,
3169                );
3170
3171                let function = self.function_decl(
3172                    lexer,
3173                    diagnostic_filter_leaf,
3174                    must_use.value,
3175                    out,
3176                    &mut dependencies,
3177                )?;
3178                Some(ast::GlobalDeclKind::Fn(ast::Function {
3179                    entry_point: if let Some(stage) = stage.value {
3180                        if stage.compute_like() && workgroup_size.value.is_none() {
3181                            return Err(Box::new(Error::MissingWorkgroupSize(compute_like_span)));
3182                        }
3183
3184                        Some(ast::EntryPoint {
3185                            stage,
3186                            early_depth_test: early_depth_test.value,
3187                            workgroup_size: workgroup_size.value,
3188                            mesh_output_variable: mesh_output.value,
3189                            task_payload: payload.value,
3190                        })
3191                    } else {
3192                        None
3193                    },
3194                    doc_comments,
3195                    ..function
3196                }))
3197            }
3198            (Token::Word("const_assert"), _) => {
3199                ensure_no_diag_attrs("`const_assert`s".into(), diagnostic_filters)?;
3200
3201                // parentheses are optional
3202                let paren = lexer.skip(Token::Paren('('));
3203
3204                let condition = self.general_expression(lexer, &mut ctx)?;
3205
3206                if paren {
3207                    lexer.expect(Token::Paren(')'))?;
3208                }
3209                lexer.expect(Token::Separator(';'))?;
3210                Some(ast::GlobalDeclKind::ConstAssert(condition))
3211            }
3212            (Token::End, _) => return Ok(()),
3213            other => {
3214                return Err(Box::new(Error::Unexpected(
3215                    other.1,
3216                    ExpectedToken::GlobalItem,
3217                )))
3218            }
3219        };
3220
3221        if let Some(kind) = kind {
3222            out.decls.append(
3223                ast::GlobalDecl { kind, dependencies },
3224                lexer.span_from(start),
3225            );
3226        }
3227
3228        if !self.rules.is_empty() {
3229            log::error!("Reached the end of global decl, but rule stack is not empty");
3230            log::error!("Rules: {:?}", self.rules);
3231            return Err(Box::new(Error::Internal("rule stack is not empty")));
3232        };
3233
3234        match binding {
3235            None => Ok(()),
3236            Some(_) => Err(Box::new(Error::Internal(
3237                "we had the attribute but no var?",
3238            ))),
3239        }
3240    }
3241
3242    pub fn parse<'a>(
3243        &mut self,
3244        source: &'a str,
3245        options: &Options,
3246    ) -> Result<'a, ast::TranslationUnit<'a>> {
3247        self.reset();
3248
3249        let mut lexer = Lexer::new(source, !options.parse_doc_comments);
3250        let mut tu = ast::TranslationUnit::default();
3251        let mut enable_extensions = EnableExtensions::empty();
3252        let mut diagnostic_filters = DiagnosticFilterMap::new();
3253
3254        // Parse module doc comments.
3255        tu.doc_comments = lexer.accumulate_module_doc_comments();
3256
3257        // Parse directives.
3258        while let Ok((ident, _directive_ident_span)) = lexer.peek_ident_with_span() {
3259            if let Some(kind) = DirectiveKind::from_ident(ident) {
3260                self.push_rule_span(Rule::Directive, &mut lexer);
3261                let _ = lexer.next_ident_with_span().unwrap();
3262                match kind {
3263                    DirectiveKind::Diagnostic => {
3264                        let diagnostic_filter = self.diagnostic_filter(&mut lexer)?;
3265                        let span = self.peek_rule_span(&lexer);
3266                        diagnostic_filters
3267                            .add(diagnostic_filter, span, ShouldConflictOnFullDuplicate::No)
3268                            .map_err(|e| Box::new(e.into()))?;
3269                        lexer.expect(Token::Separator(';'))?;
3270                    }
3271                    DirectiveKind::Enable => {
3272                        self.directive_ident_list(&mut lexer, |ident, span| {
3273                            let kind = EnableExtension::from_ident(ident, span)?;
3274                            let extension = match kind {
3275                                EnableExtension::Implemented(kind) => kind,
3276                                EnableExtension::Unimplemented(kind) => {
3277                                    return Err(Box::new(Error::EnableExtensionNotYetImplemented {
3278                                        kind,
3279                                        span,
3280                                    }))
3281                                }
3282                            };
3283                            enable_extensions.add(extension);
3284                            Ok(())
3285                        })?;
3286                    }
3287                    DirectiveKind::Requires => {
3288                        self.directive_ident_list(&mut lexer, |ident, span| {
3289                            match LanguageExtension::from_ident(ident) {
3290                                Some(LanguageExtension::Implemented(_kind)) => {
3291                                    // NOTE: No further validation is needed for an extension, so
3292                                    // just throw parsed information away. If we ever want to apply
3293                                    // what we've parsed to diagnostics, maybe we'll want to refer
3294                                    // to enabled extensions later?
3295                                    Ok(())
3296                                }
3297                                Some(LanguageExtension::Unimplemented(kind)) => {
3298                                    Err(Box::new(Error::LanguageExtensionNotYetImplemented {
3299                                        kind,
3300                                        span,
3301                                    }))
3302                                }
3303                                None => Err(Box::new(Error::UnknownLanguageExtension(span, ident))),
3304                            }
3305                        })?;
3306                    }
3307                }
3308                self.pop_rule_span(&lexer);
3309            } else {
3310                break;
3311            }
3312        }
3313
3314        lexer.enable_extensions = enable_extensions.clone();
3315        tu.enable_extensions = enable_extensions;
3316        tu.diagnostic_filter_leaf =
3317            Self::write_diagnostic_filters(&mut tu.diagnostic_filters, diagnostic_filters, None);
3318
3319        loop {
3320            match self.global_decl(&mut lexer, &mut tu) {
3321                Err(error) => return Err(error),
3322                Ok(()) => {
3323                    if lexer.peek().0 == Token::End {
3324                        break;
3325                    }
3326                }
3327            }
3328        }
3329
3330        Ok(tu)
3331    }
3332
3333    fn increase_brace_nesting(brace_nesting_level: u8, brace_span: Span) -> Result<'static, u8> {
3334        // From [spec.](https://gpuweb.github.io/gpuweb/wgsl/#limits):
3335        //
3336        // > § 2.4. Limits
3337        // >
3338        // > …
3339        // >
3340        // > Maximum nesting depth of brace-enclosed statements in a function[:] 127
3341        const BRACE_NESTING_MAXIMUM: u8 = 127;
3342        if brace_nesting_level + 1 > BRACE_NESTING_MAXIMUM {
3343            return Err(Box::new(Error::ExceededLimitForNestedBraces {
3344                span: brace_span,
3345                limit: BRACE_NESTING_MAXIMUM,
3346            }));
3347        }
3348        Ok(brace_nesting_level + 1)
3349    }
3350
3351    fn diagnostic_filter<'a>(&self, lexer: &mut Lexer<'a>) -> Result<'a, DiagnosticFilter> {
3352        lexer.expect(Token::Paren('('))?;
3353
3354        let (severity_control_name, severity_control_name_span) = lexer.next_ident_with_span()?;
3355        let new_severity = diagnostic_filter::Severity::from_wgsl_ident(severity_control_name)
3356            .ok_or(Error::DiagnosticInvalidSeverity {
3357                severity_control_name_span,
3358            })?;
3359
3360        lexer.expect(Token::Separator(','))?;
3361
3362        let (diagnostic_name_token, diagnostic_name_token_span) = lexer.next_ident_with_span()?;
3363        let triggering_rule = if lexer.skip(Token::Separator('.')) {
3364            let (ident, _span) = lexer.next_ident_with_span()?;
3365            FilterableTriggeringRule::User(Box::new([diagnostic_name_token.into(), ident.into()]))
3366        } else {
3367            let diagnostic_rule_name = diagnostic_name_token;
3368            let diagnostic_rule_name_span = diagnostic_name_token_span;
3369            if let Some(triggering_rule) =
3370                StandardFilterableTriggeringRule::from_wgsl_ident(diagnostic_rule_name)
3371            {
3372                FilterableTriggeringRule::Standard(triggering_rule)
3373            } else {
3374                diagnostic_filter::Severity::Warning.report_wgsl_parse_diag(
3375                    Box::new(Error::UnknownDiagnosticRuleName(diagnostic_rule_name_span)),
3376                    lexer.source,
3377                )?;
3378                FilterableTriggeringRule::Unknown(diagnostic_rule_name.into())
3379            }
3380        };
3381        let filter = DiagnosticFilter {
3382            triggering_rule,
3383            new_severity,
3384        };
3385        lexer.skip(Token::Separator(','));
3386        lexer.expect(Token::Paren(')'))?;
3387
3388        Ok(filter)
3389    }
3390
3391    pub(crate) fn write_diagnostic_filters(
3392        arena: &mut Arena<DiagnosticFilterNode>,
3393        filters: DiagnosticFilterMap,
3394        parent: Option<Handle<DiagnosticFilterNode>>,
3395    ) -> Option<Handle<DiagnosticFilterNode>> {
3396        filters
3397            .into_iter()
3398            .fold(parent, |parent, (triggering_rule, (new_severity, span))| {
3399                Some(arena.append(
3400                    DiagnosticFilterNode {
3401                        inner: DiagnosticFilter {
3402                            new_severity,
3403                            triggering_rule,
3404                        },
3405                        parent,
3406                    },
3407                    span,
3408                ))
3409            })
3410    }
3411}
3412
3413const fn is_start_of_compound_statement<'a>(token: Token<'a>) -> bool {
3414    matches!(token, Token::Attribute | Token::Paren('{'))
3415}