naga/front/wgsl/parse/
mod.rs

1use alloc::{boxed::Box, vec::Vec};
2use directive::enable_extension::ImplementedEnableExtension;
3
4use crate::diagnostic_filter::{
5    self, DiagnosticFilter, DiagnosticFilterMap, DiagnosticFilterNode, FilterableTriggeringRule,
6    ShouldConflictOnFullDuplicate, StandardFilterableTriggeringRule,
7};
8use crate::front::wgsl::error::{DiagnosticAttributeNotSupportedPosition, Error, ExpectedToken};
9use crate::front::wgsl::parse::directive::enable_extension::{EnableExtension, EnableExtensions};
10use crate::front::wgsl::parse::directive::language_extension::LanguageExtension;
11use crate::front::wgsl::parse::directive::DirectiveKind;
12use crate::front::wgsl::parse::lexer::{Lexer, Token};
13use crate::front::wgsl::parse::number::Number;
14use crate::front::wgsl::{Result, Scalar};
15use crate::front::SymbolTable;
16use crate::{Arena, FastHashSet, FastIndexSet, Handle, ShaderStage, Span};
17
18pub mod ast;
19pub mod conv;
20pub mod directive;
21pub mod lexer;
22pub mod number;
23
24/// State for constructing an AST expression.
25///
26/// Not to be confused with [`lower::ExpressionContext`], which is for producing
27/// Naga IR from the AST we produce here.
28///
29/// [`lower::ExpressionContext`]: super::lower::ExpressionContext
30struct ExpressionContext<'input, 'temp, 'out> {
31    /// The [`TranslationUnit::expressions`] arena to which we should contribute
32    /// expressions.
33    ///
34    /// [`TranslationUnit::expressions`]: ast::TranslationUnit::expressions
35    expressions: &'out mut Arena<ast::Expression<'input>>,
36
37    /// The [`TranslationUnit::types`] arena to which we should contribute new
38    /// types.
39    ///
40    /// [`TranslationUnit::types`]: ast::TranslationUnit::types
41    types: &'out mut Arena<ast::Type<'input>>,
42
43    /// A map from identifiers in scope to the locals/arguments they represent.
44    ///
45    /// The handles refer to the [`locals`] arena; see that field's
46    /// documentation for details.
47    ///
48    /// [`locals`]: ExpressionContext::locals
49    local_table: &'temp mut SymbolTable<&'input str, Handle<ast::Local>>,
50
51    /// Local variable and function argument arena for the function we're building.
52    ///
53    /// Note that the [`ast::Local`] here is actually a zero-sized type. This
54    /// `Arena`'s only role is to assign a unique `Handle` to each local
55    /// identifier, and track its definition's span for use in diagnostics. All
56    /// the detailed information about locals - names, types, etc. - is kept in
57    /// the [`LocalDecl`] statements we parsed from their declarations. For
58    /// arguments, that information is kept in [`arguments`].
59    ///
60    /// In the AST, when an [`Ident`] expression refers to a local variable or
61    /// argument, its [`IdentExpr`] holds the referent's `Handle<Local>` in this
62    /// arena.
63    ///
64    /// During lowering, [`LocalDecl`] statements add entries to a per-function
65    /// table that maps `Handle<Local>` values to their Naga representations,
66    /// accessed via [`StatementContext::local_table`] and
67    /// [`LocalExpressionContext::local_table`]. This table is then consulted when
68    /// lowering subsequent [`Ident`] expressions.
69    ///
70    /// [`LocalDecl`]: ast::StatementKind::LocalDecl
71    /// [`arguments`]: ast::Function::arguments
72    /// [`Ident`]: ast::Expression::Ident
73    /// [`IdentExpr`]: ast::IdentExpr
74    /// [`StatementContext::local_table`]: super::lower::StatementContext::local_table
75    /// [`LocalExpressionContext::local_table`]: super::lower::LocalExpressionContext::local_table
76    locals: &'out mut Arena<ast::Local>,
77
78    /// Identifiers used by the current global declaration that have no local definition.
79    ///
80    /// This becomes the [`GlobalDecl`]'s [`dependencies`] set.
81    ///
82    /// Note that we don't know at parse time what kind of [`GlobalDecl`] the
83    /// name refers to. We can't look up names until we've seen the entire
84    /// translation unit.
85    ///
86    /// [`GlobalDecl`]: ast::GlobalDecl
87    /// [`dependencies`]: ast::GlobalDecl::dependencies
88    unresolved: &'out mut FastIndexSet<ast::Dependency<'input>>,
89}
90
91impl<'a> ExpressionContext<'a, '_, '_> {
92    fn parse_binary_op(
93        &mut self,
94        lexer: &mut Lexer<'a>,
95        classifier: impl Fn(Token<'a>) -> Option<crate::BinaryOperator>,
96        mut parser: impl FnMut(&mut Lexer<'a>, &mut Self) -> Result<'a, Handle<ast::Expression<'a>>>,
97    ) -> Result<'a, Handle<ast::Expression<'a>>> {
98        let start = lexer.start_byte_offset();
99        let mut accumulator = parser(lexer, self)?;
100        while let Some(op) = classifier(lexer.peek().0) {
101            let _ = lexer.next();
102            let left = accumulator;
103            let right = parser(lexer, self)?;
104            accumulator = self.expressions.append(
105                ast::Expression::Binary { op, left, right },
106                lexer.span_from(start),
107            );
108        }
109        Ok(accumulator)
110    }
111
112    fn declare_local(&mut self, name: ast::Ident<'a>) -> Result<'a, Handle<ast::Local>> {
113        let handle = self.locals.append(ast::Local, name.span);
114        if let Some(old) = self.local_table.add(name.name, handle) {
115            Err(Box::new(Error::Redefinition {
116                previous: self.locals.get_span(old),
117                current: name.span,
118            }))
119        } else {
120            Ok(handle)
121        }
122    }
123
124    fn new_scalar(&mut self, scalar: Scalar) -> Handle<ast::Type<'a>> {
125        self.types
126            .append(ast::Type::Scalar(scalar), Span::UNDEFINED)
127    }
128}
129
130/// Which grammar rule we are in the midst of parsing.
131///
132/// This is used for error checking. `Parser` maintains a stack of
133/// these and (occasionally) checks that it is being pushed and popped
134/// as expected.
135#[derive(Copy, Clone, Debug, PartialEq)]
136enum Rule {
137    Attribute,
138    VariableDecl,
139    TypeDecl,
140    FunctionDecl,
141    Block,
142    Statement,
143    PrimaryExpr,
144    SingularExpr,
145    UnaryExpr,
146    GeneralExpr,
147    Directive,
148    GenericExpr,
149    EnclosedExpr,
150    LhsExpr,
151}
152
153struct ParsedAttribute<T> {
154    value: Option<T>,
155}
156
157impl<T> Default for ParsedAttribute<T> {
158    fn default() -> Self {
159        Self { value: None }
160    }
161}
162
163impl<T> ParsedAttribute<T> {
164    fn set(&mut self, value: T, name_span: Span) -> Result<'static, ()> {
165        if self.value.is_some() {
166            return Err(Box::new(Error::RepeatedAttribute(name_span)));
167        }
168        self.value = Some(value);
169        Ok(())
170    }
171}
172
173#[derive(Default)]
174struct BindingParser<'a> {
175    location: ParsedAttribute<Handle<ast::Expression<'a>>>,
176    built_in: ParsedAttribute<crate::BuiltIn>,
177    interpolation: ParsedAttribute<crate::Interpolation>,
178    sampling: ParsedAttribute<crate::Sampling>,
179    invariant: ParsedAttribute<bool>,
180    blend_src: ParsedAttribute<Handle<ast::Expression<'a>>>,
181}
182
183impl<'a> BindingParser<'a> {
184    fn parse(
185        &mut self,
186        parser: &mut Parser,
187        lexer: &mut Lexer<'a>,
188        name: &'a str,
189        name_span: Span,
190        ctx: &mut ExpressionContext<'a, '_, '_>,
191    ) -> Result<'a, ()> {
192        match name {
193            "location" => {
194                lexer.expect(Token::Paren('('))?;
195                self.location
196                    .set(parser.general_expression(lexer, ctx)?, name_span)?;
197                lexer.expect(Token::Paren(')'))?;
198            }
199            "builtin" => {
200                lexer.expect(Token::Paren('('))?;
201                let (raw, span) = lexer.next_ident_with_span()?;
202                self.built_in.set(
203                    conv::map_built_in(&lexer.enable_extensions, raw, span)?,
204                    name_span,
205                )?;
206                lexer.expect(Token::Paren(')'))?;
207            }
208            "interpolate" => {
209                lexer.expect(Token::Paren('('))?;
210                let (raw, span) = lexer.next_ident_with_span()?;
211                self.interpolation
212                    .set(conv::map_interpolation(raw, span)?, name_span)?;
213                if lexer.skip(Token::Separator(',')) {
214                    let (raw, span) = lexer.next_ident_with_span()?;
215                    self.sampling
216                        .set(conv::map_sampling(raw, span)?, name_span)?;
217                }
218                lexer.expect(Token::Paren(')'))?;
219            }
220
221            "invariant" => {
222                self.invariant.set(true, name_span)?;
223            }
224            "blend_src" => {
225                if !lexer
226                    .enable_extensions
227                    .contains(ImplementedEnableExtension::DualSourceBlending)
228                {
229                    return Err(Box::new(Error::EnableExtensionNotEnabled {
230                        span: name_span,
231                        kind: ImplementedEnableExtension::DualSourceBlending.into(),
232                    }));
233                }
234
235                lexer.expect(Token::Paren('('))?;
236                self.blend_src
237                    .set(parser.general_expression(lexer, ctx)?, name_span)?;
238                lexer.skip(Token::Separator(','));
239                lexer.expect(Token::Paren(')'))?;
240            }
241            _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
242        }
243        Ok(())
244    }
245
246    fn finish(self, span: Span) -> Result<'a, Option<ast::Binding<'a>>> {
247        match (
248            self.location.value,
249            self.built_in.value,
250            self.interpolation.value,
251            self.sampling.value,
252            self.invariant.value.unwrap_or_default(),
253            self.blend_src.value,
254        ) {
255            (None, None, None, None, false, None) => Ok(None),
256            (Some(location), None, interpolation, sampling, false, blend_src) => {
257                // Before handing over the completed `Module`, we call
258                // `apply_default_interpolation` to ensure that the interpolation and
259                // sampling have been explicitly specified on all vertex shader output and fragment
260                // shader input user bindings, so leaving them potentially `None` here is fine.
261                Ok(Some(ast::Binding::Location {
262                    location,
263                    interpolation,
264                    sampling,
265                    blend_src,
266                }))
267            }
268            (None, Some(crate::BuiltIn::Position { .. }), None, None, invariant, None) => {
269                Ok(Some(ast::Binding::BuiltIn(crate::BuiltIn::Position {
270                    invariant,
271                })))
272            }
273            (None, Some(built_in), None, None, false, None) => {
274                Ok(Some(ast::Binding::BuiltIn(built_in)))
275            }
276            (_, _, _, _, _, _) => Err(Box::new(Error::InconsistentBinding(span))),
277        }
278    }
279}
280
281/// Configuration for the whole parser run.
282pub struct Options {
283    /// Controls whether the parser should parse doc comments.
284    pub parse_doc_comments: bool,
285}
286
287impl Options {
288    /// Creates a new [`Options`] without doc comments parsing.
289    pub const fn new() -> Self {
290        Options {
291            parse_doc_comments: false,
292        }
293    }
294}
295
296pub struct Parser {
297    rules: Vec<(Rule, usize)>,
298    recursion_depth: u32,
299}
300
301impl Parser {
302    pub const fn new() -> Self {
303        Parser {
304            rules: Vec::new(),
305            recursion_depth: 0,
306        }
307    }
308
309    fn reset(&mut self) {
310        self.rules.clear();
311        self.recursion_depth = 0;
312    }
313
314    fn push_rule_span(&mut self, rule: Rule, lexer: &mut Lexer<'_>) {
315        self.rules.push((rule, lexer.start_byte_offset()));
316    }
317
318    fn pop_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
319        let (_, initial) = self.rules.pop().unwrap();
320        lexer.span_from(initial)
321    }
322
323    fn peek_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
324        let &(_, initial) = self.rules.last().unwrap();
325        lexer.span_from(initial)
326    }
327
328    fn race_rules(&self, rule0: Rule, rule1: Rule) -> Option<Rule> {
329        Some(
330            self.rules
331                .iter()
332                .rev()
333                .find(|&x| x.0 == rule0 || x.0 == rule1)?
334                .0,
335        )
336    }
337
338    fn track_recursion<'a, F, R>(&mut self, f: F) -> Result<'a, R>
339    where
340        F: FnOnce(&mut Self) -> Result<'a, R>,
341    {
342        self.recursion_depth += 1;
343        if self.recursion_depth >= 256 {
344            return Err(Box::new(Error::Internal("Parser recursion limit exceeded")));
345        }
346        let ret = f(self);
347        self.recursion_depth -= 1;
348        ret
349    }
350
351    fn switch_value<'a>(
352        &mut self,
353        lexer: &mut Lexer<'a>,
354        ctx: &mut ExpressionContext<'a, '_, '_>,
355    ) -> Result<'a, ast::SwitchValue<'a>> {
356        if let Token::Word("default") = lexer.peek().0 {
357            let _ = lexer.next();
358            return Ok(ast::SwitchValue::Default);
359        }
360
361        let expr = self.general_expression(lexer, ctx)?;
362        Ok(ast::SwitchValue::Expr(expr))
363    }
364
365    /// Decide if we're looking at a construction expression, and return its
366    /// type if so.
367    ///
368    /// If the identifier `word` is a [type-defining keyword], then return a
369    /// [`ConstructorType`] value describing the type to build. Return an error
370    /// if the type is not constructible (like `sampler`).
371    ///
372    /// If `word` isn't a type name, then return `None`.
373    ///
374    /// [type-defining keyword]: https://gpuweb.github.io/gpuweb/wgsl/#type-defining-keywords
375    /// [`ConstructorType`]: ast::ConstructorType
376    fn constructor_type<'a>(
377        &mut self,
378        lexer: &mut Lexer<'a>,
379        word: &'a str,
380        span: Span,
381        ctx: &mut ExpressionContext<'a, '_, '_>,
382    ) -> Result<'a, Option<ast::ConstructorType<'a>>> {
383        if let Some(scalar) = conv::get_scalar_type(&lexer.enable_extensions, span, word)? {
384            return Ok(Some(ast::ConstructorType::Scalar(scalar)));
385        }
386
387        let partial = match word {
388            "vec2" => ast::ConstructorType::PartialVector {
389                size: crate::VectorSize::Bi,
390            },
391            "vec2i" => {
392                return Ok(Some(ast::ConstructorType::Vector {
393                    size: crate::VectorSize::Bi,
394                    ty: ctx.new_scalar(Scalar::I32),
395                    ty_span: Span::UNDEFINED,
396                }))
397            }
398            "vec2u" => {
399                return Ok(Some(ast::ConstructorType::Vector {
400                    size: crate::VectorSize::Bi,
401                    ty: ctx.new_scalar(Scalar::U32),
402                    ty_span: Span::UNDEFINED,
403                }))
404            }
405            "vec2f" => {
406                return Ok(Some(ast::ConstructorType::Vector {
407                    size: crate::VectorSize::Bi,
408                    ty: ctx.new_scalar(Scalar::F32),
409                    ty_span: Span::UNDEFINED,
410                }))
411            }
412            "vec2h" => {
413                return Ok(Some(ast::ConstructorType::Vector {
414                    size: crate::VectorSize::Bi,
415                    ty: ctx.new_scalar(Scalar::F16),
416                    ty_span: Span::UNDEFINED,
417                }))
418            }
419            "vec3" => ast::ConstructorType::PartialVector {
420                size: crate::VectorSize::Tri,
421            },
422            "vec3i" => {
423                return Ok(Some(ast::ConstructorType::Vector {
424                    size: crate::VectorSize::Tri,
425                    ty: ctx.new_scalar(Scalar::I32),
426                    ty_span: Span::UNDEFINED,
427                }))
428            }
429            "vec3u" => {
430                return Ok(Some(ast::ConstructorType::Vector {
431                    size: crate::VectorSize::Tri,
432                    ty: ctx.new_scalar(Scalar::U32),
433                    ty_span: Span::UNDEFINED,
434                }))
435            }
436            "vec3f" => {
437                return Ok(Some(ast::ConstructorType::Vector {
438                    size: crate::VectorSize::Tri,
439                    ty: ctx.new_scalar(Scalar::F32),
440                    ty_span: Span::UNDEFINED,
441                }))
442            }
443            "vec3h" => {
444                return Ok(Some(ast::ConstructorType::Vector {
445                    size: crate::VectorSize::Tri,
446                    ty: ctx.new_scalar(Scalar::F16),
447                    ty_span: Span::UNDEFINED,
448                }))
449            }
450            "vec4" => ast::ConstructorType::PartialVector {
451                size: crate::VectorSize::Quad,
452            },
453            "vec4i" => {
454                return Ok(Some(ast::ConstructorType::Vector {
455                    size: crate::VectorSize::Quad,
456                    ty: ctx.new_scalar(Scalar::I32),
457                    ty_span: Span::UNDEFINED,
458                }))
459            }
460            "vec4u" => {
461                return Ok(Some(ast::ConstructorType::Vector {
462                    size: crate::VectorSize::Quad,
463                    ty: ctx.new_scalar(Scalar::U32),
464                    ty_span: Span::UNDEFINED,
465                }))
466            }
467            "vec4f" => {
468                return Ok(Some(ast::ConstructorType::Vector {
469                    size: crate::VectorSize::Quad,
470                    ty: ctx.new_scalar(Scalar::F32),
471                    ty_span: Span::UNDEFINED,
472                }))
473            }
474            "vec4h" => {
475                return Ok(Some(ast::ConstructorType::Vector {
476                    size: crate::VectorSize::Quad,
477                    ty: ctx.new_scalar(Scalar::F16),
478                    ty_span: Span::UNDEFINED,
479                }))
480            }
481            "mat2x2" => ast::ConstructorType::PartialMatrix {
482                columns: crate::VectorSize::Bi,
483                rows: crate::VectorSize::Bi,
484            },
485            "mat2x2f" => {
486                return Ok(Some(ast::ConstructorType::Matrix {
487                    columns: crate::VectorSize::Bi,
488                    rows: crate::VectorSize::Bi,
489                    ty: ctx.new_scalar(Scalar::F32),
490                    ty_span: Span::UNDEFINED,
491                }))
492            }
493            "mat2x2h" => {
494                return Ok(Some(ast::ConstructorType::Matrix {
495                    columns: crate::VectorSize::Bi,
496                    rows: crate::VectorSize::Bi,
497                    ty: ctx.new_scalar(Scalar::F16),
498                    ty_span: Span::UNDEFINED,
499                }))
500            }
501            "mat2x3" => ast::ConstructorType::PartialMatrix {
502                columns: crate::VectorSize::Bi,
503                rows: crate::VectorSize::Tri,
504            },
505            "mat2x3f" => {
506                return Ok(Some(ast::ConstructorType::Matrix {
507                    columns: crate::VectorSize::Bi,
508                    rows: crate::VectorSize::Tri,
509                    ty: ctx.new_scalar(Scalar::F32),
510                    ty_span: Span::UNDEFINED,
511                }))
512            }
513            "mat2x3h" => {
514                return Ok(Some(ast::ConstructorType::Matrix {
515                    columns: crate::VectorSize::Bi,
516                    rows: crate::VectorSize::Tri,
517                    ty: ctx.new_scalar(Scalar::F16),
518                    ty_span: Span::UNDEFINED,
519                }))
520            }
521            "mat2x4" => ast::ConstructorType::PartialMatrix {
522                columns: crate::VectorSize::Bi,
523                rows: crate::VectorSize::Quad,
524            },
525            "mat2x4f" => {
526                return Ok(Some(ast::ConstructorType::Matrix {
527                    columns: crate::VectorSize::Bi,
528                    rows: crate::VectorSize::Quad,
529                    ty: ctx.new_scalar(Scalar::F32),
530                    ty_span: Span::UNDEFINED,
531                }))
532            }
533            "mat2x4h" => {
534                return Ok(Some(ast::ConstructorType::Matrix {
535                    columns: crate::VectorSize::Bi,
536                    rows: crate::VectorSize::Quad,
537                    ty: ctx.new_scalar(Scalar::F16),
538                    ty_span: Span::UNDEFINED,
539                }))
540            }
541            "mat3x2" => ast::ConstructorType::PartialMatrix {
542                columns: crate::VectorSize::Tri,
543                rows: crate::VectorSize::Bi,
544            },
545            "mat3x2f" => {
546                return Ok(Some(ast::ConstructorType::Matrix {
547                    columns: crate::VectorSize::Tri,
548                    rows: crate::VectorSize::Bi,
549                    ty: ctx.new_scalar(Scalar::F32),
550                    ty_span: Span::UNDEFINED,
551                }))
552            }
553            "mat3x2h" => {
554                return Ok(Some(ast::ConstructorType::Matrix {
555                    columns: crate::VectorSize::Tri,
556                    rows: crate::VectorSize::Bi,
557                    ty: ctx.new_scalar(Scalar::F16),
558                    ty_span: Span::UNDEFINED,
559                }))
560            }
561            "mat3x3" => ast::ConstructorType::PartialMatrix {
562                columns: crate::VectorSize::Tri,
563                rows: crate::VectorSize::Tri,
564            },
565            "mat3x3f" => {
566                return Ok(Some(ast::ConstructorType::Matrix {
567                    columns: crate::VectorSize::Tri,
568                    rows: crate::VectorSize::Tri,
569                    ty: ctx.new_scalar(Scalar::F32),
570                    ty_span: Span::UNDEFINED,
571                }))
572            }
573            "mat3x3h" => {
574                return Ok(Some(ast::ConstructorType::Matrix {
575                    columns: crate::VectorSize::Tri,
576                    rows: crate::VectorSize::Tri,
577                    ty: ctx.new_scalar(Scalar::F16),
578                    ty_span: Span::UNDEFINED,
579                }))
580            }
581            "mat3x4" => ast::ConstructorType::PartialMatrix {
582                columns: crate::VectorSize::Tri,
583                rows: crate::VectorSize::Quad,
584            },
585            "mat3x4f" => {
586                return Ok(Some(ast::ConstructorType::Matrix {
587                    columns: crate::VectorSize::Tri,
588                    rows: crate::VectorSize::Quad,
589                    ty: ctx.new_scalar(Scalar::F32),
590                    ty_span: Span::UNDEFINED,
591                }))
592            }
593            "mat3x4h" => {
594                return Ok(Some(ast::ConstructorType::Matrix {
595                    columns: crate::VectorSize::Tri,
596                    rows: crate::VectorSize::Quad,
597                    ty: ctx.new_scalar(Scalar::F16),
598                    ty_span: Span::UNDEFINED,
599                }))
600            }
601            "mat4x2" => ast::ConstructorType::PartialMatrix {
602                columns: crate::VectorSize::Quad,
603                rows: crate::VectorSize::Bi,
604            },
605            "mat4x2f" => {
606                return Ok(Some(ast::ConstructorType::Matrix {
607                    columns: crate::VectorSize::Quad,
608                    rows: crate::VectorSize::Bi,
609                    ty: ctx.new_scalar(Scalar::F32),
610                    ty_span: Span::UNDEFINED,
611                }))
612            }
613            "mat4x2h" => {
614                return Ok(Some(ast::ConstructorType::Matrix {
615                    columns: crate::VectorSize::Quad,
616                    rows: crate::VectorSize::Bi,
617                    ty: ctx.new_scalar(Scalar::F16),
618                    ty_span: Span::UNDEFINED,
619                }))
620            }
621            "mat4x3" => ast::ConstructorType::PartialMatrix {
622                columns: crate::VectorSize::Quad,
623                rows: crate::VectorSize::Tri,
624            },
625            "mat4x3f" => {
626                return Ok(Some(ast::ConstructorType::Matrix {
627                    columns: crate::VectorSize::Quad,
628                    rows: crate::VectorSize::Tri,
629                    ty: ctx.new_scalar(Scalar::F32),
630                    ty_span: Span::UNDEFINED,
631                }))
632            }
633            "mat4x3h" => {
634                return Ok(Some(ast::ConstructorType::Matrix {
635                    columns: crate::VectorSize::Quad,
636                    rows: crate::VectorSize::Tri,
637                    ty: ctx.new_scalar(Scalar::F16),
638                    ty_span: Span::UNDEFINED,
639                }))
640            }
641            "mat4x4" => ast::ConstructorType::PartialMatrix {
642                columns: crate::VectorSize::Quad,
643                rows: crate::VectorSize::Quad,
644            },
645            "mat4x4f" => {
646                return Ok(Some(ast::ConstructorType::Matrix {
647                    columns: crate::VectorSize::Quad,
648                    rows: crate::VectorSize::Quad,
649                    ty: ctx.new_scalar(Scalar::F32),
650                    ty_span: Span::UNDEFINED,
651                }))
652            }
653            "mat4x4h" => {
654                return Ok(Some(ast::ConstructorType::Matrix {
655                    columns: crate::VectorSize::Quad,
656                    rows: crate::VectorSize::Quad,
657                    ty: ctx.new_scalar(Scalar::F16),
658                    ty_span: Span::UNDEFINED,
659                }))
660            }
661            "array" => ast::ConstructorType::PartialArray,
662            "atomic"
663            | "binding_array"
664            | "sampler"
665            | "sampler_comparison"
666            | "texture_1d"
667            | "texture_1d_array"
668            | "texture_2d"
669            | "texture_2d_array"
670            | "texture_3d"
671            | "texture_cube"
672            | "texture_cube_array"
673            | "texture_multisampled_2d"
674            | "texture_multisampled_2d_array"
675            | "texture_depth_2d"
676            | "texture_depth_2d_array"
677            | "texture_depth_cube"
678            | "texture_depth_cube_array"
679            | "texture_depth_multisampled_2d"
680            | "texture_external"
681            | "texture_storage_1d"
682            | "texture_storage_1d_array"
683            | "texture_storage_2d"
684            | "texture_storage_2d_array"
685            | "texture_storage_3d" => return Err(Box::new(Error::TypeNotConstructible(span))),
686            _ => return Ok(None),
687        };
688
689        // parse component type if present
690        match (lexer.peek().0, partial) {
691            (Token::Paren('<'), ast::ConstructorType::PartialVector { size }) => {
692                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
693                Ok(Some(ast::ConstructorType::Vector { size, ty, ty_span }))
694            }
695            (Token::Paren('<'), ast::ConstructorType::PartialMatrix { columns, rows }) => {
696                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
697                Ok(Some(ast::ConstructorType::Matrix {
698                    columns,
699                    rows,
700                    ty,
701                    ty_span,
702                }))
703            }
704            (Token::Paren('<'), ast::ConstructorType::PartialArray) => {
705                lexer.expect_generic_paren('<')?;
706                let base = self.type_decl(lexer, ctx)?;
707                let size = if lexer.end_of_generic_arguments() {
708                    let expr = self.const_generic_expression(lexer, ctx)?;
709                    lexer.skip(Token::Separator(','));
710                    ast::ArraySize::Constant(expr)
711                } else {
712                    ast::ArraySize::Dynamic
713                };
714                lexer.expect_generic_paren('>')?;
715
716                Ok(Some(ast::ConstructorType::Array { base, size }))
717            }
718            (_, partial) => Ok(Some(partial)),
719        }
720    }
721
722    /// Expects `name` to be consumed (not in lexer).
723    fn arguments<'a>(
724        &mut self,
725        lexer: &mut Lexer<'a>,
726        ctx: &mut ExpressionContext<'a, '_, '_>,
727    ) -> Result<'a, Vec<Handle<ast::Expression<'a>>>> {
728        self.push_rule_span(Rule::EnclosedExpr, lexer);
729        lexer.open_arguments()?;
730        let mut arguments = Vec::new();
731        loop {
732            if !arguments.is_empty() {
733                if !lexer.next_argument()? {
734                    break;
735                }
736            } else if lexer.skip(Token::Paren(')')) {
737                break;
738            }
739            let arg = self.general_expression(lexer, ctx)?;
740            arguments.push(arg);
741        }
742
743        self.pop_rule_span(lexer);
744        Ok(arguments)
745    }
746
747    fn enclosed_expression<'a>(
748        &mut self,
749        lexer: &mut Lexer<'a>,
750        ctx: &mut ExpressionContext<'a, '_, '_>,
751    ) -> Result<'a, Handle<ast::Expression<'a>>> {
752        self.push_rule_span(Rule::EnclosedExpr, lexer);
753        let expr = self.general_expression(lexer, ctx)?;
754        self.pop_rule_span(lexer);
755        Ok(expr)
756    }
757
758    /// Expects [`Rule::PrimaryExpr`] or [`Rule::SingularExpr`] on top; does not pop it.
759    /// Expects `name` to be consumed (not in lexer).
760    fn function_call<'a>(
761        &mut self,
762        lexer: &mut Lexer<'a>,
763        name: &'a str,
764        name_span: Span,
765        ctx: &mut ExpressionContext<'a, '_, '_>,
766    ) -> Result<'a, Handle<ast::Expression<'a>>> {
767        assert!(self.rules.last().is_some());
768
769        let expr = match name {
770            // bitcast looks like a function call, but it's an operator and must be handled differently.
771            "bitcast" => {
772                let (to, span) = self.singular_generic(lexer, ctx)?;
773
774                lexer.open_arguments()?;
775                let expr = self.general_expression(lexer, ctx)?;
776                lexer.close_arguments()?;
777
778                ast::Expression::Bitcast {
779                    expr,
780                    to,
781                    ty_span: span,
782                }
783            }
784            // everything else must be handled later, since they can be hidden by user-defined functions.
785            _ => {
786                let arguments = self.arguments(lexer, ctx)?;
787                ctx.unresolved.insert(ast::Dependency {
788                    ident: name,
789                    usage: name_span,
790                });
791                ast::Expression::Call {
792                    function: ast::Ident {
793                        name,
794                        span: name_span,
795                    },
796                    arguments,
797                }
798            }
799        };
800
801        let span = self.peek_rule_span(lexer);
802        let expr = ctx.expressions.append(expr, span);
803        Ok(expr)
804    }
805
806    fn ident_expr<'a>(
807        &mut self,
808        name: &'a str,
809        name_span: Span,
810        ctx: &mut ExpressionContext<'a, '_, '_>,
811    ) -> ast::IdentExpr<'a> {
812        match ctx.local_table.lookup(name) {
813            Some(&local) => ast::IdentExpr::Local(local),
814            None => {
815                ctx.unresolved.insert(ast::Dependency {
816                    ident: name,
817                    usage: name_span,
818                });
819                ast::IdentExpr::Unresolved(name)
820            }
821        }
822    }
823
824    fn primary_expression<'a>(
825        &mut self,
826        lexer: &mut Lexer<'a>,
827        ctx: &mut ExpressionContext<'a, '_, '_>,
828    ) -> Result<'a, Handle<ast::Expression<'a>>> {
829        self.push_rule_span(Rule::PrimaryExpr, lexer);
830        const fn literal_ray_flag<'b>(flag: crate::RayFlag) -> ast::Expression<'b> {
831            ast::Expression::Literal(ast::Literal::Number(Number::U32(flag.bits())))
832        }
833        const fn literal_ray_intersection<'b>(
834            intersection: crate::RayQueryIntersection,
835        ) -> ast::Expression<'b> {
836            ast::Expression::Literal(ast::Literal::Number(Number::U32(intersection as u32)))
837        }
838
839        let expr = match lexer.peek() {
840            (Token::Paren('('), _) => {
841                let _ = lexer.next();
842                let expr = self.enclosed_expression(lexer, ctx)?;
843                lexer.expect(Token::Paren(')'))?;
844                self.pop_rule_span(lexer);
845                return Ok(expr);
846            }
847            (Token::Word("true"), _) => {
848                let _ = lexer.next();
849                ast::Expression::Literal(ast::Literal::Bool(true))
850            }
851            (Token::Word("false"), _) => {
852                let _ = lexer.next();
853                ast::Expression::Literal(ast::Literal::Bool(false))
854            }
855            (Token::Number(res), span) => {
856                let _ = lexer.next();
857                let num = res.map_err(|err| Error::BadNumber(span, err))?;
858
859                if let Some(enable_extension) = num.requires_enable_extension() {
860                    if !lexer.enable_extensions.contains(enable_extension) {
861                        return Err(Box::new(Error::EnableExtensionNotEnabled {
862                            kind: enable_extension.into(),
863                            span,
864                        }));
865                    }
866                }
867
868                ast::Expression::Literal(ast::Literal::Number(num))
869            }
870            (Token::Word("RAY_FLAG_NONE"), _) => {
871                let _ = lexer.next();
872                literal_ray_flag(crate::RayFlag::empty())
873            }
874            (Token::Word("RAY_FLAG_FORCE_OPAQUE"), _) => {
875                let _ = lexer.next();
876                literal_ray_flag(crate::RayFlag::FORCE_OPAQUE)
877            }
878            (Token::Word("RAY_FLAG_FORCE_NO_OPAQUE"), _) => {
879                let _ = lexer.next();
880                literal_ray_flag(crate::RayFlag::FORCE_NO_OPAQUE)
881            }
882            (Token::Word("RAY_FLAG_TERMINATE_ON_FIRST_HIT"), _) => {
883                let _ = lexer.next();
884                literal_ray_flag(crate::RayFlag::TERMINATE_ON_FIRST_HIT)
885            }
886            (Token::Word("RAY_FLAG_SKIP_CLOSEST_HIT_SHADER"), _) => {
887                let _ = lexer.next();
888                literal_ray_flag(crate::RayFlag::SKIP_CLOSEST_HIT_SHADER)
889            }
890            (Token::Word("RAY_FLAG_CULL_BACK_FACING"), _) => {
891                let _ = lexer.next();
892                literal_ray_flag(crate::RayFlag::CULL_BACK_FACING)
893            }
894            (Token::Word("RAY_FLAG_CULL_FRONT_FACING"), _) => {
895                let _ = lexer.next();
896                literal_ray_flag(crate::RayFlag::CULL_FRONT_FACING)
897            }
898            (Token::Word("RAY_FLAG_CULL_OPAQUE"), _) => {
899                let _ = lexer.next();
900                literal_ray_flag(crate::RayFlag::CULL_OPAQUE)
901            }
902            (Token::Word("RAY_FLAG_CULL_NO_OPAQUE"), _) => {
903                let _ = lexer.next();
904                literal_ray_flag(crate::RayFlag::CULL_NO_OPAQUE)
905            }
906            (Token::Word("RAY_FLAG_SKIP_TRIANGLES"), _) => {
907                let _ = lexer.next();
908                literal_ray_flag(crate::RayFlag::SKIP_TRIANGLES)
909            }
910            (Token::Word("RAY_FLAG_SKIP_AABBS"), _) => {
911                let _ = lexer.next();
912                literal_ray_flag(crate::RayFlag::SKIP_AABBS)
913            }
914            (Token::Word("RAY_QUERY_INTERSECTION_NONE"), _) => {
915                let _ = lexer.next();
916                literal_ray_intersection(crate::RayQueryIntersection::None)
917            }
918            (Token::Word("RAY_QUERY_INTERSECTION_TRIANGLE"), _) => {
919                let _ = lexer.next();
920                literal_ray_intersection(crate::RayQueryIntersection::Triangle)
921            }
922            (Token::Word("RAY_QUERY_INTERSECTION_GENERATED"), _) => {
923                let _ = lexer.next();
924                literal_ray_intersection(crate::RayQueryIntersection::Generated)
925            }
926            (Token::Word("RAY_QUERY_INTERSECTION_AABB"), _) => {
927                let _ = lexer.next();
928                literal_ray_intersection(crate::RayQueryIntersection::Aabb)
929            }
930            (Token::Word(word), span) => {
931                let start = lexer.start_byte_offset();
932                let _ = lexer.next();
933
934                if let Some(ty) = self.constructor_type(lexer, word, span, ctx)? {
935                    let ty_span = lexer.span_from(start);
936                    let components = self.arguments(lexer, ctx)?;
937                    ast::Expression::Construct {
938                        ty,
939                        ty_span,
940                        components,
941                    }
942                } else if let Token::Paren('(') = lexer.peek().0 {
943                    self.pop_rule_span(lexer);
944                    return self.function_call(lexer, word, span, ctx);
945                } else if word == "bitcast" {
946                    self.pop_rule_span(lexer);
947                    return self.function_call(lexer, word, span, ctx);
948                } else {
949                    let ident = self.ident_expr(word, span, ctx);
950                    ast::Expression::Ident(ident)
951                }
952            }
953            other => {
954                return Err(Box::new(Error::Unexpected(
955                    other.1,
956                    ExpectedToken::PrimaryExpression,
957                )))
958            }
959        };
960
961        let span = self.pop_rule_span(lexer);
962        let expr = ctx.expressions.append(expr, span);
963        Ok(expr)
964    }
965
966    fn postfix<'a>(
967        &mut self,
968        span_start: usize,
969        lexer: &mut Lexer<'a>,
970        ctx: &mut ExpressionContext<'a, '_, '_>,
971        expr: Handle<ast::Expression<'a>>,
972    ) -> Result<'a, Handle<ast::Expression<'a>>> {
973        let mut expr = expr;
974
975        loop {
976            let expression = match lexer.peek().0 {
977                Token::Separator('.') => {
978                    let _ = lexer.next();
979                    let field = lexer.next_ident()?;
980
981                    ast::Expression::Member { base: expr, field }
982                }
983                Token::Paren('[') => {
984                    let _ = lexer.next();
985                    let index = self.enclosed_expression(lexer, ctx)?;
986                    lexer.expect(Token::Paren(']'))?;
987
988                    ast::Expression::Index { base: expr, index }
989                }
990                _ => break,
991            };
992
993            let span = lexer.span_from(span_start);
994            expr = ctx.expressions.append(expression, span);
995        }
996
997        Ok(expr)
998    }
999
1000    fn const_generic_expression<'a>(
1001        &mut self,
1002        lexer: &mut Lexer<'a>,
1003        ctx: &mut ExpressionContext<'a, '_, '_>,
1004    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1005        self.push_rule_span(Rule::GenericExpr, lexer);
1006        let expr = self.general_expression(lexer, ctx)?;
1007        self.pop_rule_span(lexer);
1008        Ok(expr)
1009    }
1010
1011    /// Parse a `unary_expression`.
1012    fn unary_expression<'a>(
1013        &mut self,
1014        lexer: &mut Lexer<'a>,
1015        ctx: &mut ExpressionContext<'a, '_, '_>,
1016    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1017        self.track_recursion(|this| {
1018            this.push_rule_span(Rule::UnaryExpr, lexer);
1019            //TODO: refactor this to avoid backing up
1020            let expr = match lexer.peek().0 {
1021                Token::Operation('-') => {
1022                    let _ = lexer.next();
1023                    let expr = this.unary_expression(lexer, ctx)?;
1024                    let expr = ast::Expression::Unary {
1025                        op: crate::UnaryOperator::Negate,
1026                        expr,
1027                    };
1028                    let span = this.peek_rule_span(lexer);
1029                    ctx.expressions.append(expr, span)
1030                }
1031                Token::Operation('!') => {
1032                    let _ = lexer.next();
1033                    let expr = this.unary_expression(lexer, ctx)?;
1034                    let expr = ast::Expression::Unary {
1035                        op: crate::UnaryOperator::LogicalNot,
1036                        expr,
1037                    };
1038                    let span = this.peek_rule_span(lexer);
1039                    ctx.expressions.append(expr, span)
1040                }
1041                Token::Operation('~') => {
1042                    let _ = lexer.next();
1043                    let expr = this.unary_expression(lexer, ctx)?;
1044                    let expr = ast::Expression::Unary {
1045                        op: crate::UnaryOperator::BitwiseNot,
1046                        expr,
1047                    };
1048                    let span = this.peek_rule_span(lexer);
1049                    ctx.expressions.append(expr, span)
1050                }
1051                Token::Operation('*') => {
1052                    let _ = lexer.next();
1053                    let expr = this.unary_expression(lexer, ctx)?;
1054                    let expr = ast::Expression::Deref(expr);
1055                    let span = this.peek_rule_span(lexer);
1056                    ctx.expressions.append(expr, span)
1057                }
1058                Token::Operation('&') => {
1059                    let _ = lexer.next();
1060                    let expr = this.unary_expression(lexer, ctx)?;
1061                    let expr = ast::Expression::AddrOf(expr);
1062                    let span = this.peek_rule_span(lexer);
1063                    ctx.expressions.append(expr, span)
1064                }
1065                _ => this.singular_expression(lexer, ctx)?,
1066            };
1067
1068            this.pop_rule_span(lexer);
1069            Ok(expr)
1070        })
1071    }
1072
1073    /// Parse a `lhs_expression`.
1074    ///
1075    /// LHS expressions only support the `&` and `*` operators and
1076    /// the `[]` and `.` postfix selectors.
1077    fn lhs_expression<'a>(
1078        &mut self,
1079        lexer: &mut Lexer<'a>,
1080        ctx: &mut ExpressionContext<'a, '_, '_>,
1081    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1082        self.track_recursion(|this| {
1083            this.push_rule_span(Rule::LhsExpr, lexer);
1084            let start = lexer.start_byte_offset();
1085            let expr = match lexer.peek() {
1086                (Token::Operation('*'), _) => {
1087                    let _ = lexer.next();
1088                    let expr = this.lhs_expression(lexer, ctx)?;
1089                    let expr = ast::Expression::Deref(expr);
1090                    let span = this.peek_rule_span(lexer);
1091                    ctx.expressions.append(expr, span)
1092                }
1093                (Token::Operation('&'), _) => {
1094                    let _ = lexer.next();
1095                    let expr = this.lhs_expression(lexer, ctx)?;
1096                    let expr = ast::Expression::AddrOf(expr);
1097                    let span = this.peek_rule_span(lexer);
1098                    ctx.expressions.append(expr, span)
1099                }
1100                (Token::Operation('('), _) => {
1101                    let _ = lexer.next();
1102                    let primary_expr = this.lhs_expression(lexer, ctx)?;
1103                    lexer.expect(Token::Paren(')'))?;
1104                    this.postfix(start, lexer, ctx, primary_expr)?
1105                }
1106                (Token::Word(word), span) => {
1107                    let _ = lexer.next();
1108                    let ident = this.ident_expr(word, span, ctx);
1109                    let primary_expr = ctx.expressions.append(ast::Expression::Ident(ident), span);
1110                    this.postfix(start, lexer, ctx, primary_expr)?
1111                }
1112                _ => this.singular_expression(lexer, ctx)?,
1113            };
1114
1115            this.pop_rule_span(lexer);
1116            Ok(expr)
1117        })
1118    }
1119
1120    /// Parse a `singular_expression`.
1121    fn singular_expression<'a>(
1122        &mut self,
1123        lexer: &mut Lexer<'a>,
1124        ctx: &mut ExpressionContext<'a, '_, '_>,
1125    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1126        let start = lexer.start_byte_offset();
1127        self.push_rule_span(Rule::SingularExpr, lexer);
1128        let primary_expr = self.primary_expression(lexer, ctx)?;
1129        let singular_expr = self.postfix(start, lexer, ctx, primary_expr)?;
1130        self.pop_rule_span(lexer);
1131
1132        Ok(singular_expr)
1133    }
1134
1135    fn equality_expression<'a>(
1136        &mut self,
1137        lexer: &mut Lexer<'a>,
1138        context: &mut ExpressionContext<'a, '_, '_>,
1139    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1140        // equality_expression
1141        context.parse_binary_op(
1142            lexer,
1143            |token| match token {
1144                Token::LogicalOperation('=') => Some(crate::BinaryOperator::Equal),
1145                Token::LogicalOperation('!') => Some(crate::BinaryOperator::NotEqual),
1146                _ => None,
1147            },
1148            // relational_expression
1149            |lexer, context| {
1150                let enclosing = self.race_rules(Rule::GenericExpr, Rule::EnclosedExpr);
1151                context.parse_binary_op(
1152                    lexer,
1153                    match enclosing {
1154                        Some(Rule::GenericExpr) => |token| match token {
1155                            Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
1156                            _ => None,
1157                        },
1158                        _ => |token| match token {
1159                            Token::Paren('<') => Some(crate::BinaryOperator::Less),
1160                            Token::Paren('>') => Some(crate::BinaryOperator::Greater),
1161                            Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
1162                            Token::LogicalOperation('>') => {
1163                                Some(crate::BinaryOperator::GreaterEqual)
1164                            }
1165                            _ => None,
1166                        },
1167                    },
1168                    // shift_expression
1169                    |lexer, context| {
1170                        context.parse_binary_op(
1171                            lexer,
1172                            match enclosing {
1173                                Some(Rule::GenericExpr) => |token| match token {
1174                                    Token::ShiftOperation('<') => {
1175                                        Some(crate::BinaryOperator::ShiftLeft)
1176                                    }
1177                                    _ => None,
1178                                },
1179                                _ => |token| match token {
1180                                    Token::ShiftOperation('<') => {
1181                                        Some(crate::BinaryOperator::ShiftLeft)
1182                                    }
1183                                    Token::ShiftOperation('>') => {
1184                                        Some(crate::BinaryOperator::ShiftRight)
1185                                    }
1186                                    _ => None,
1187                                },
1188                            },
1189                            // additive_expression
1190                            |lexer, context| {
1191                                context.parse_binary_op(
1192                                    lexer,
1193                                    |token| match token {
1194                                        Token::Operation('+') => Some(crate::BinaryOperator::Add),
1195                                        Token::Operation('-') => {
1196                                            Some(crate::BinaryOperator::Subtract)
1197                                        }
1198                                        _ => None,
1199                                    },
1200                                    // multiplicative_expression
1201                                    |lexer, context| {
1202                                        context.parse_binary_op(
1203                                            lexer,
1204                                            |token| match token {
1205                                                Token::Operation('*') => {
1206                                                    Some(crate::BinaryOperator::Multiply)
1207                                                }
1208                                                Token::Operation('/') => {
1209                                                    Some(crate::BinaryOperator::Divide)
1210                                                }
1211                                                Token::Operation('%') => {
1212                                                    Some(crate::BinaryOperator::Modulo)
1213                                                }
1214                                                _ => None,
1215                                            },
1216                                            |lexer, context| self.unary_expression(lexer, context),
1217                                        )
1218                                    },
1219                                )
1220                            },
1221                        )
1222                    },
1223                )
1224            },
1225        )
1226    }
1227
1228    fn general_expression<'a>(
1229        &mut self,
1230        lexer: &mut Lexer<'a>,
1231        ctx: &mut ExpressionContext<'a, '_, '_>,
1232    ) -> Result<'a, Handle<ast::Expression<'a>>> {
1233        self.general_expression_with_span(lexer, ctx)
1234            .map(|(expr, _)| expr)
1235    }
1236
1237    fn general_expression_with_span<'a>(
1238        &mut self,
1239        lexer: &mut Lexer<'a>,
1240        context: &mut ExpressionContext<'a, '_, '_>,
1241    ) -> Result<'a, (Handle<ast::Expression<'a>>, Span)> {
1242        self.push_rule_span(Rule::GeneralExpr, lexer);
1243        // logical_or_expression
1244        let handle = context.parse_binary_op(
1245            lexer,
1246            |token| match token {
1247                Token::LogicalOperation('|') => Some(crate::BinaryOperator::LogicalOr),
1248                _ => None,
1249            },
1250            // logical_and_expression
1251            |lexer, context| {
1252                context.parse_binary_op(
1253                    lexer,
1254                    |token| match token {
1255                        Token::LogicalOperation('&') => Some(crate::BinaryOperator::LogicalAnd),
1256                        _ => None,
1257                    },
1258                    // inclusive_or_expression
1259                    |lexer, context| {
1260                        context.parse_binary_op(
1261                            lexer,
1262                            |token| match token {
1263                                Token::Operation('|') => Some(crate::BinaryOperator::InclusiveOr),
1264                                _ => None,
1265                            },
1266                            // exclusive_or_expression
1267                            |lexer, context| {
1268                                context.parse_binary_op(
1269                                    lexer,
1270                                    |token| match token {
1271                                        Token::Operation('^') => {
1272                                            Some(crate::BinaryOperator::ExclusiveOr)
1273                                        }
1274                                        _ => None,
1275                                    },
1276                                    // and_expression
1277                                    |lexer, context| {
1278                                        context.parse_binary_op(
1279                                            lexer,
1280                                            |token| match token {
1281                                                Token::Operation('&') => {
1282                                                    Some(crate::BinaryOperator::And)
1283                                                }
1284                                                _ => None,
1285                                            },
1286                                            |lexer, context| {
1287                                                self.equality_expression(lexer, context)
1288                                            },
1289                                        )
1290                                    },
1291                                )
1292                            },
1293                        )
1294                    },
1295                )
1296            },
1297        )?;
1298        Ok((handle, self.pop_rule_span(lexer)))
1299    }
1300
1301    fn variable_decl<'a>(
1302        &mut self,
1303        lexer: &mut Lexer<'a>,
1304        ctx: &mut ExpressionContext<'a, '_, '_>,
1305    ) -> Result<'a, ast::GlobalVariable<'a>> {
1306        self.push_rule_span(Rule::VariableDecl, lexer);
1307        let mut space = crate::AddressSpace::Handle;
1308
1309        if lexer.skip(Token::Paren('<')) {
1310            let (class_str, span) = lexer.next_ident_with_span()?;
1311            space = match class_str {
1312                "storage" => {
1313                    let access = if lexer.skip(Token::Separator(',')) {
1314                        lexer.next_storage_access()?
1315                    } else {
1316                        // defaulting to `read`
1317                        crate::StorageAccess::LOAD
1318                    };
1319                    crate::AddressSpace::Storage { access }
1320                }
1321                _ => conv::map_address_space(class_str, span)?,
1322            };
1323            lexer.expect(Token::Paren('>'))?;
1324        }
1325        let name = lexer.next_ident()?;
1326
1327        let ty = if lexer.skip(Token::Separator(':')) {
1328            Some(self.type_decl(lexer, ctx)?)
1329        } else {
1330            None
1331        };
1332
1333        let init = if lexer.skip(Token::Operation('=')) {
1334            let handle = self.general_expression(lexer, ctx)?;
1335            Some(handle)
1336        } else {
1337            None
1338        };
1339        lexer.expect(Token::Separator(';'))?;
1340        self.pop_rule_span(lexer);
1341
1342        Ok(ast::GlobalVariable {
1343            name,
1344            space,
1345            binding: None,
1346            ty,
1347            init,
1348            doc_comments: Vec::new(),
1349        })
1350    }
1351
1352    fn struct_body<'a>(
1353        &mut self,
1354        lexer: &mut Lexer<'a>,
1355        ctx: &mut ExpressionContext<'a, '_, '_>,
1356    ) -> Result<'a, Vec<ast::StructMember<'a>>> {
1357        let mut members = Vec::new();
1358        let mut member_names = FastHashSet::default();
1359
1360        lexer.expect(Token::Paren('{'))?;
1361        let mut ready = true;
1362        while !lexer.skip(Token::Paren('}')) {
1363            if !ready {
1364                return Err(Box::new(Error::Unexpected(
1365                    lexer.next().1,
1366                    ExpectedToken::Token(Token::Separator(',')),
1367                )));
1368            }
1369
1370            let doc_comments = lexer.accumulate_doc_comments();
1371
1372            let (mut size, mut align) = (ParsedAttribute::default(), ParsedAttribute::default());
1373            self.push_rule_span(Rule::Attribute, lexer);
1374            let mut bind_parser = BindingParser::default();
1375            while lexer.skip(Token::Attribute) {
1376                match lexer.next_ident_with_span()? {
1377                    ("size", name_span) => {
1378                        lexer.expect(Token::Paren('('))?;
1379                        let expr = self.general_expression(lexer, ctx)?;
1380                        lexer.expect(Token::Paren(')'))?;
1381                        size.set(expr, name_span)?;
1382                    }
1383                    ("align", name_span) => {
1384                        lexer.expect(Token::Paren('('))?;
1385                        let expr = self.general_expression(lexer, ctx)?;
1386                        lexer.expect(Token::Paren(')'))?;
1387                        align.set(expr, name_span)?;
1388                    }
1389                    (word, word_span) => bind_parser.parse(self, lexer, word, word_span, ctx)?,
1390                }
1391            }
1392
1393            let bind_span = self.pop_rule_span(lexer);
1394            let binding = bind_parser.finish(bind_span)?;
1395
1396            let name = lexer.next_ident()?;
1397            lexer.expect(Token::Separator(':'))?;
1398            let ty = self.type_decl(lexer, ctx)?;
1399            ready = lexer.skip(Token::Separator(','));
1400
1401            members.push(ast::StructMember {
1402                name,
1403                ty,
1404                binding,
1405                size: size.value,
1406                align: align.value,
1407                doc_comments,
1408            });
1409
1410            if !member_names.insert(name.name) {
1411                return Err(Box::new(Error::Redefinition {
1412                    previous: members
1413                        .iter()
1414                        .find(|x| x.name.name == name.name)
1415                        .map(|x| x.name.span)
1416                        .unwrap(),
1417                    current: name.span,
1418                }));
1419            }
1420        }
1421
1422        Ok(members)
1423    }
1424
1425    /// Parses `<T>`, returning T and span of T
1426    fn singular_generic<'a>(
1427        &mut self,
1428        lexer: &mut Lexer<'a>,
1429        ctx: &mut ExpressionContext<'a, '_, '_>,
1430    ) -> Result<'a, (Handle<ast::Type<'a>>, Span)> {
1431        lexer.expect_generic_paren('<')?;
1432        let start = lexer.start_byte_offset();
1433        let ty = self.type_decl(lexer, ctx)?;
1434        let span = lexer.span_from(start);
1435        lexer.skip(Token::Separator(','));
1436        lexer.expect_generic_paren('>')?;
1437        Ok((ty, span))
1438    }
1439
1440    fn matrix_with_type<'a>(
1441        &mut self,
1442        lexer: &mut Lexer<'a>,
1443        ctx: &mut ExpressionContext<'a, '_, '_>,
1444        columns: crate::VectorSize,
1445        rows: crate::VectorSize,
1446    ) -> Result<'a, ast::Type<'a>> {
1447        let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1448        Ok(ast::Type::Matrix {
1449            columns,
1450            rows,
1451            ty,
1452            ty_span,
1453        })
1454    }
1455
1456    fn type_decl_impl<'a>(
1457        &mut self,
1458        lexer: &mut Lexer<'a>,
1459        word: &'a str,
1460        span: Span,
1461        ctx: &mut ExpressionContext<'a, '_, '_>,
1462    ) -> Result<'a, Option<ast::Type<'a>>> {
1463        if let Some(scalar) = conv::get_scalar_type(&lexer.enable_extensions, span, word)? {
1464            return Ok(Some(ast::Type::Scalar(scalar)));
1465        }
1466
1467        Ok(Some(match word {
1468            "vec2" => {
1469                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1470                ast::Type::Vector {
1471                    size: crate::VectorSize::Bi,
1472                    ty,
1473                    ty_span,
1474                }
1475            }
1476            "vec2i" => ast::Type::Vector {
1477                size: crate::VectorSize::Bi,
1478                ty: ctx.new_scalar(Scalar::I32),
1479                ty_span: Span::UNDEFINED,
1480            },
1481            "vec2u" => ast::Type::Vector {
1482                size: crate::VectorSize::Bi,
1483                ty: ctx.new_scalar(Scalar::U32),
1484                ty_span: Span::UNDEFINED,
1485            },
1486            "vec2f" => ast::Type::Vector {
1487                size: crate::VectorSize::Bi,
1488                ty: ctx.new_scalar(Scalar::F32),
1489                ty_span: Span::UNDEFINED,
1490            },
1491            "vec2h" => ast::Type::Vector {
1492                size: crate::VectorSize::Bi,
1493                ty: ctx.new_scalar(Scalar::F16),
1494                ty_span: Span::UNDEFINED,
1495            },
1496            "vec3" => {
1497                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1498                ast::Type::Vector {
1499                    size: crate::VectorSize::Tri,
1500                    ty,
1501                    ty_span,
1502                }
1503            }
1504            "vec3i" => ast::Type::Vector {
1505                size: crate::VectorSize::Tri,
1506                ty: ctx.new_scalar(Scalar::I32),
1507                ty_span: Span::UNDEFINED,
1508            },
1509            "vec3u" => ast::Type::Vector {
1510                size: crate::VectorSize::Tri,
1511                ty: ctx.new_scalar(Scalar::U32),
1512                ty_span: Span::UNDEFINED,
1513            },
1514            "vec3f" => ast::Type::Vector {
1515                size: crate::VectorSize::Tri,
1516                ty: ctx.new_scalar(Scalar::F32),
1517                ty_span: Span::UNDEFINED,
1518            },
1519            "vec3h" => ast::Type::Vector {
1520                size: crate::VectorSize::Tri,
1521                ty: ctx.new_scalar(Scalar::F16),
1522                ty_span: Span::UNDEFINED,
1523            },
1524            "vec4" => {
1525                let (ty, ty_span) = self.singular_generic(lexer, ctx)?;
1526                ast::Type::Vector {
1527                    size: crate::VectorSize::Quad,
1528                    ty,
1529                    ty_span,
1530                }
1531            }
1532            "vec4i" => ast::Type::Vector {
1533                size: crate::VectorSize::Quad,
1534                ty: ctx.new_scalar(Scalar::I32),
1535                ty_span: Span::UNDEFINED,
1536            },
1537            "vec4u" => ast::Type::Vector {
1538                size: crate::VectorSize::Quad,
1539                ty: ctx.new_scalar(Scalar::U32),
1540                ty_span: Span::UNDEFINED,
1541            },
1542            "vec4f" => ast::Type::Vector {
1543                size: crate::VectorSize::Quad,
1544                ty: ctx.new_scalar(Scalar::F32),
1545                ty_span: Span::UNDEFINED,
1546            },
1547            "vec4h" => ast::Type::Vector {
1548                size: crate::VectorSize::Quad,
1549                ty: ctx.new_scalar(Scalar::F16),
1550                ty_span: Span::UNDEFINED,
1551            },
1552            "mat2x2" => {
1553                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Bi)?
1554            }
1555            "mat2x2f" => ast::Type::Matrix {
1556                columns: crate::VectorSize::Bi,
1557                rows: crate::VectorSize::Bi,
1558                ty: ctx.new_scalar(Scalar::F32),
1559                ty_span: Span::UNDEFINED,
1560            },
1561            "mat2x2h" => ast::Type::Matrix {
1562                columns: crate::VectorSize::Bi,
1563                rows: crate::VectorSize::Bi,
1564                ty: ctx.new_scalar(Scalar::F16),
1565                ty_span: Span::UNDEFINED,
1566            },
1567            "mat2x3" => {
1568                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Tri)?
1569            }
1570            "mat2x3f" => ast::Type::Matrix {
1571                columns: crate::VectorSize::Bi,
1572                rows: crate::VectorSize::Tri,
1573                ty: ctx.new_scalar(Scalar::F32),
1574                ty_span: Span::UNDEFINED,
1575            },
1576            "mat2x3h" => ast::Type::Matrix {
1577                columns: crate::VectorSize::Bi,
1578                rows: crate::VectorSize::Tri,
1579                ty: ctx.new_scalar(Scalar::F16),
1580                ty_span: Span::UNDEFINED,
1581            },
1582            "mat2x4" => {
1583                self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Quad)?
1584            }
1585            "mat2x4f" => ast::Type::Matrix {
1586                columns: crate::VectorSize::Bi,
1587                rows: crate::VectorSize::Quad,
1588                ty: ctx.new_scalar(Scalar::F32),
1589                ty_span: Span::UNDEFINED,
1590            },
1591            "mat2x4h" => ast::Type::Matrix {
1592                columns: crate::VectorSize::Bi,
1593                rows: crate::VectorSize::Quad,
1594                ty: ctx.new_scalar(Scalar::F16),
1595                ty_span: Span::UNDEFINED,
1596            },
1597            "mat3x2" => {
1598                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Bi)?
1599            }
1600            "mat3x2f" => ast::Type::Matrix {
1601                columns: crate::VectorSize::Tri,
1602                rows: crate::VectorSize::Bi,
1603                ty: ctx.new_scalar(Scalar::F32),
1604                ty_span: Span::UNDEFINED,
1605            },
1606            "mat3x2h" => ast::Type::Matrix {
1607                columns: crate::VectorSize::Tri,
1608                rows: crate::VectorSize::Bi,
1609                ty: ctx.new_scalar(Scalar::F16),
1610                ty_span: Span::UNDEFINED,
1611            },
1612            "mat3x3" => {
1613                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Tri)?
1614            }
1615            "mat3x3f" => ast::Type::Matrix {
1616                columns: crate::VectorSize::Tri,
1617                rows: crate::VectorSize::Tri,
1618                ty: ctx.new_scalar(Scalar::F32),
1619                ty_span: Span::UNDEFINED,
1620            },
1621            "mat3x3h" => ast::Type::Matrix {
1622                columns: crate::VectorSize::Tri,
1623                rows: crate::VectorSize::Tri,
1624                ty: ctx.new_scalar(Scalar::F16),
1625                ty_span: Span::UNDEFINED,
1626            },
1627            "mat3x4" => {
1628                self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Quad)?
1629            }
1630            "mat3x4f" => ast::Type::Matrix {
1631                columns: crate::VectorSize::Tri,
1632                rows: crate::VectorSize::Quad,
1633                ty: ctx.new_scalar(Scalar::F32),
1634                ty_span: Span::UNDEFINED,
1635            },
1636            "mat3x4h" => ast::Type::Matrix {
1637                columns: crate::VectorSize::Tri,
1638                rows: crate::VectorSize::Quad,
1639                ty: ctx.new_scalar(Scalar::F16),
1640                ty_span: Span::UNDEFINED,
1641            },
1642            "mat4x2" => {
1643                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Bi)?
1644            }
1645            "mat4x2f" => ast::Type::Matrix {
1646                columns: crate::VectorSize::Quad,
1647                rows: crate::VectorSize::Bi,
1648                ty: ctx.new_scalar(Scalar::F32),
1649                ty_span: Span::UNDEFINED,
1650            },
1651            "mat4x2h" => ast::Type::Matrix {
1652                columns: crate::VectorSize::Quad,
1653                rows: crate::VectorSize::Bi,
1654                ty: ctx.new_scalar(Scalar::F16),
1655                ty_span: Span::UNDEFINED,
1656            },
1657            "mat4x3" => {
1658                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Tri)?
1659            }
1660            "mat4x3f" => ast::Type::Matrix {
1661                columns: crate::VectorSize::Quad,
1662                rows: crate::VectorSize::Tri,
1663                ty: ctx.new_scalar(Scalar::F32),
1664                ty_span: Span::UNDEFINED,
1665            },
1666            "mat4x3h" => ast::Type::Matrix {
1667                columns: crate::VectorSize::Quad,
1668                rows: crate::VectorSize::Tri,
1669                ty: ctx.new_scalar(Scalar::F16),
1670                ty_span: Span::UNDEFINED,
1671            },
1672            "mat4x4" => {
1673                self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Quad)?
1674            }
1675            "mat4x4f" => ast::Type::Matrix {
1676                columns: crate::VectorSize::Quad,
1677                rows: crate::VectorSize::Quad,
1678                ty: ctx.new_scalar(Scalar::F32),
1679                ty_span: Span::UNDEFINED,
1680            },
1681            "mat4x4h" => ast::Type::Matrix {
1682                columns: crate::VectorSize::Quad,
1683                rows: crate::VectorSize::Quad,
1684                ty: ctx.new_scalar(Scalar::F16),
1685                ty_span: Span::UNDEFINED,
1686            },
1687            "atomic" => {
1688                let scalar = lexer.next_scalar_generic()?;
1689                ast::Type::Atomic(scalar)
1690            }
1691            "ptr" => {
1692                lexer.expect_generic_paren('<')?;
1693                let (ident, span) = lexer.next_ident_with_span()?;
1694                let mut space = conv::map_address_space(ident, span)?;
1695                lexer.expect(Token::Separator(','))?;
1696                let base = self.type_decl(lexer, ctx)?;
1697                if let crate::AddressSpace::Storage { ref mut access } = space {
1698                    *access = if lexer.end_of_generic_arguments() {
1699                        let result = lexer.next_storage_access()?;
1700                        lexer.skip(Token::Separator(','));
1701                        result
1702                    } else {
1703                        crate::StorageAccess::LOAD
1704                    };
1705                }
1706                lexer.expect_generic_paren('>')?;
1707                ast::Type::Pointer { base, space }
1708            }
1709            "array" => {
1710                lexer.expect_generic_paren('<')?;
1711                let base = self.type_decl(lexer, ctx)?;
1712                let size = if lexer.end_of_generic_arguments() {
1713                    let size = self.const_generic_expression(lexer, ctx)?;
1714                    lexer.skip(Token::Separator(','));
1715                    ast::ArraySize::Constant(size)
1716                } else {
1717                    ast::ArraySize::Dynamic
1718                };
1719                lexer.expect_generic_paren('>')?;
1720
1721                ast::Type::Array { base, size }
1722            }
1723            "binding_array" => {
1724                lexer.expect_generic_paren('<')?;
1725                let base = self.type_decl(lexer, ctx)?;
1726                let size = if lexer.end_of_generic_arguments() {
1727                    let size = self.unary_expression(lexer, ctx)?;
1728                    lexer.skip(Token::Separator(','));
1729                    ast::ArraySize::Constant(size)
1730                } else {
1731                    ast::ArraySize::Dynamic
1732                };
1733                lexer.expect_generic_paren('>')?;
1734
1735                ast::Type::BindingArray { base, size }
1736            }
1737            "sampler" => ast::Type::Sampler { comparison: false },
1738            "sampler_comparison" => ast::Type::Sampler { comparison: true },
1739            "texture_1d" => {
1740                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1741                Self::check_texture_sample_type(scalar, span)?;
1742                ast::Type::Image {
1743                    dim: crate::ImageDimension::D1,
1744                    arrayed: false,
1745                    class: crate::ImageClass::Sampled {
1746                        kind: scalar.kind,
1747                        multi: false,
1748                    },
1749                }
1750            }
1751            "texture_1d_array" => {
1752                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1753                Self::check_texture_sample_type(scalar, span)?;
1754                ast::Type::Image {
1755                    dim: crate::ImageDimension::D1,
1756                    arrayed: true,
1757                    class: crate::ImageClass::Sampled {
1758                        kind: scalar.kind,
1759                        multi: false,
1760                    },
1761                }
1762            }
1763            "texture_2d" => {
1764                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1765                Self::check_texture_sample_type(scalar, span)?;
1766                ast::Type::Image {
1767                    dim: crate::ImageDimension::D2,
1768                    arrayed: false,
1769                    class: crate::ImageClass::Sampled {
1770                        kind: scalar.kind,
1771                        multi: false,
1772                    },
1773                }
1774            }
1775            "texture_2d_array" => {
1776                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1777                Self::check_texture_sample_type(scalar, span)?;
1778                ast::Type::Image {
1779                    dim: crate::ImageDimension::D2,
1780                    arrayed: true,
1781                    class: crate::ImageClass::Sampled {
1782                        kind: scalar.kind,
1783                        multi: false,
1784                    },
1785                }
1786            }
1787            "texture_3d" => {
1788                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1789                Self::check_texture_sample_type(scalar, span)?;
1790                ast::Type::Image {
1791                    dim: crate::ImageDimension::D3,
1792                    arrayed: false,
1793                    class: crate::ImageClass::Sampled {
1794                        kind: scalar.kind,
1795                        multi: false,
1796                    },
1797                }
1798            }
1799            "texture_cube" => {
1800                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1801                Self::check_texture_sample_type(scalar, span)?;
1802                ast::Type::Image {
1803                    dim: crate::ImageDimension::Cube,
1804                    arrayed: false,
1805                    class: crate::ImageClass::Sampled {
1806                        kind: scalar.kind,
1807                        multi: false,
1808                    },
1809                }
1810            }
1811            "texture_cube_array" => {
1812                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1813                Self::check_texture_sample_type(scalar, span)?;
1814                ast::Type::Image {
1815                    dim: crate::ImageDimension::Cube,
1816                    arrayed: true,
1817                    class: crate::ImageClass::Sampled {
1818                        kind: scalar.kind,
1819                        multi: false,
1820                    },
1821                }
1822            }
1823            "texture_multisampled_2d" => {
1824                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1825                Self::check_texture_sample_type(scalar, span)?;
1826                ast::Type::Image {
1827                    dim: crate::ImageDimension::D2,
1828                    arrayed: false,
1829                    class: crate::ImageClass::Sampled {
1830                        kind: scalar.kind,
1831                        multi: true,
1832                    },
1833                }
1834            }
1835            "texture_multisampled_2d_array" => {
1836                let (scalar, span) = lexer.next_scalar_generic_with_span()?;
1837                Self::check_texture_sample_type(scalar, span)?;
1838                ast::Type::Image {
1839                    dim: crate::ImageDimension::D2,
1840                    arrayed: true,
1841                    class: crate::ImageClass::Sampled {
1842                        kind: scalar.kind,
1843                        multi: true,
1844                    },
1845                }
1846            }
1847            "texture_depth_2d" => ast::Type::Image {
1848                dim: crate::ImageDimension::D2,
1849                arrayed: false,
1850                class: crate::ImageClass::Depth { multi: false },
1851            },
1852            "texture_depth_2d_array" => ast::Type::Image {
1853                dim: crate::ImageDimension::D2,
1854                arrayed: true,
1855                class: crate::ImageClass::Depth { multi: false },
1856            },
1857            "texture_depth_cube" => ast::Type::Image {
1858                dim: crate::ImageDimension::Cube,
1859                arrayed: false,
1860                class: crate::ImageClass::Depth { multi: false },
1861            },
1862            "texture_depth_cube_array" => ast::Type::Image {
1863                dim: crate::ImageDimension::Cube,
1864                arrayed: true,
1865                class: crate::ImageClass::Depth { multi: false },
1866            },
1867            "texture_depth_multisampled_2d" => ast::Type::Image {
1868                dim: crate::ImageDimension::D2,
1869                arrayed: false,
1870                class: crate::ImageClass::Depth { multi: true },
1871            },
1872            "texture_external" => ast::Type::Image {
1873                dim: crate::ImageDimension::D2,
1874                arrayed: false,
1875                class: crate::ImageClass::External,
1876            },
1877            "texture_storage_1d" => {
1878                let (format, access) = lexer.next_format_generic()?;
1879                ast::Type::Image {
1880                    dim: crate::ImageDimension::D1,
1881                    arrayed: false,
1882                    class: crate::ImageClass::Storage { format, access },
1883                }
1884            }
1885            "texture_storage_1d_array" => {
1886                let (format, access) = lexer.next_format_generic()?;
1887                ast::Type::Image {
1888                    dim: crate::ImageDimension::D1,
1889                    arrayed: true,
1890                    class: crate::ImageClass::Storage { format, access },
1891                }
1892            }
1893            "texture_storage_2d" => {
1894                let (format, access) = lexer.next_format_generic()?;
1895                ast::Type::Image {
1896                    dim: crate::ImageDimension::D2,
1897                    arrayed: false,
1898                    class: crate::ImageClass::Storage { format, access },
1899                }
1900            }
1901            "texture_storage_2d_array" => {
1902                let (format, access) = lexer.next_format_generic()?;
1903                ast::Type::Image {
1904                    dim: crate::ImageDimension::D2,
1905                    arrayed: true,
1906                    class: crate::ImageClass::Storage { format, access },
1907                }
1908            }
1909            "texture_storage_3d" => {
1910                let (format, access) = lexer.next_format_generic()?;
1911                ast::Type::Image {
1912                    dim: crate::ImageDimension::D3,
1913                    arrayed: false,
1914                    class: crate::ImageClass::Storage { format, access },
1915                }
1916            }
1917            "acceleration_structure" => {
1918                let vertex_return = lexer.next_acceleration_structure_flags()?;
1919                ast::Type::AccelerationStructure { vertex_return }
1920            }
1921            "ray_query" => {
1922                let vertex_return = lexer.next_acceleration_structure_flags()?;
1923                ast::Type::RayQuery { vertex_return }
1924            }
1925            "RayDesc" => ast::Type::RayDesc,
1926            "RayIntersection" => ast::Type::RayIntersection,
1927            _ => return Ok(None),
1928        }))
1929    }
1930
1931    fn check_texture_sample_type(scalar: Scalar, span: Span) -> Result<'static, ()> {
1932        use crate::ScalarKind::*;
1933        // Validate according to https://gpuweb.github.io/gpuweb/wgsl/#sampled-texture-type
1934        match scalar {
1935            Scalar {
1936                kind: Float | Sint | Uint,
1937                width: 4,
1938            } => Ok(()),
1939            Scalar {
1940                kind: Uint,
1941                width: 8,
1942            } => Ok(()),
1943            _ => Err(Box::new(Error::BadTextureSampleType { span, scalar })),
1944        }
1945    }
1946
1947    /// Parse type declaration of a given name.
1948    fn type_decl<'a>(
1949        &mut self,
1950        lexer: &mut Lexer<'a>,
1951        ctx: &mut ExpressionContext<'a, '_, '_>,
1952    ) -> Result<'a, Handle<ast::Type<'a>>> {
1953        self.track_recursion(|this| {
1954            this.push_rule_span(Rule::TypeDecl, lexer);
1955
1956            let (name, span) = lexer.next_ident_with_span()?;
1957
1958            let ty = match this.type_decl_impl(lexer, name, span, ctx)? {
1959                Some(ty) => ty,
1960                None => {
1961                    ctx.unresolved.insert(ast::Dependency {
1962                        ident: name,
1963                        usage: span,
1964                    });
1965                    ast::Type::User(ast::Ident { name, span })
1966                }
1967            };
1968
1969            this.pop_rule_span(lexer);
1970
1971            let handle = ctx.types.append(ty, Span::UNDEFINED);
1972            Ok(handle)
1973        })
1974    }
1975
1976    fn assignment_op_and_rhs<'a>(
1977        &mut self,
1978        lexer: &mut Lexer<'a>,
1979        ctx: &mut ExpressionContext<'a, '_, '_>,
1980        block: &mut ast::Block<'a>,
1981        target: Handle<ast::Expression<'a>>,
1982        span_start: usize,
1983    ) -> Result<'a, ()> {
1984        use crate::BinaryOperator as Bo;
1985
1986        let op = lexer.next();
1987        let (op, value) = match op {
1988            (Token::Operation('='), _) => {
1989                let value = self.general_expression(lexer, ctx)?;
1990                (None, value)
1991            }
1992            (Token::AssignmentOperation(c), _) => {
1993                let op = match c {
1994                    '<' => Bo::ShiftLeft,
1995                    '>' => Bo::ShiftRight,
1996                    '+' => Bo::Add,
1997                    '-' => Bo::Subtract,
1998                    '*' => Bo::Multiply,
1999                    '/' => Bo::Divide,
2000                    '%' => Bo::Modulo,
2001                    '&' => Bo::And,
2002                    '|' => Bo::InclusiveOr,
2003                    '^' => Bo::ExclusiveOr,
2004                    // Note: `consume_token` shouldn't produce any other assignment ops
2005                    _ => unreachable!(),
2006                };
2007
2008                let value = self.general_expression(lexer, ctx)?;
2009                (Some(op), value)
2010            }
2011            token @ (Token::IncrementOperation | Token::DecrementOperation, _) => {
2012                let op = match token.0 {
2013                    Token::IncrementOperation => ast::StatementKind::Increment,
2014                    Token::DecrementOperation => ast::StatementKind::Decrement,
2015                    _ => unreachable!(),
2016                };
2017
2018                let span = lexer.span_from(span_start);
2019                block.stmts.push(ast::Statement {
2020                    kind: op(target),
2021                    span,
2022                });
2023                return Ok(());
2024            }
2025            _ => return Err(Box::new(Error::Unexpected(op.1, ExpectedToken::Assignment))),
2026        };
2027
2028        let span = lexer.span_from(span_start);
2029        block.stmts.push(ast::Statement {
2030            kind: ast::StatementKind::Assign { target, op, value },
2031            span,
2032        });
2033        Ok(())
2034    }
2035
2036    /// Parse an assignment statement (will also parse increment and decrement statements)
2037    fn assignment_statement<'a>(
2038        &mut self,
2039        lexer: &mut Lexer<'a>,
2040        ctx: &mut ExpressionContext<'a, '_, '_>,
2041        block: &mut ast::Block<'a>,
2042    ) -> Result<'a, ()> {
2043        let span_start = lexer.start_byte_offset();
2044        let target = self.lhs_expression(lexer, ctx)?;
2045        self.assignment_op_and_rhs(lexer, ctx, block, target, span_start)
2046    }
2047
2048    /// Parse a function call statement.
2049    /// Expects `ident` to be consumed (not in the lexer).
2050    fn function_statement<'a>(
2051        &mut self,
2052        lexer: &mut Lexer<'a>,
2053        ident: &'a str,
2054        ident_span: Span,
2055        span_start: usize,
2056        context: &mut ExpressionContext<'a, '_, '_>,
2057        block: &mut ast::Block<'a>,
2058    ) -> Result<'a, ()> {
2059        self.push_rule_span(Rule::SingularExpr, lexer);
2060
2061        context.unresolved.insert(ast::Dependency {
2062            ident,
2063            usage: ident_span,
2064        });
2065        let arguments = self.arguments(lexer, context)?;
2066        let span = lexer.span_from(span_start);
2067
2068        block.stmts.push(ast::Statement {
2069            kind: ast::StatementKind::Call {
2070                function: ast::Ident {
2071                    name: ident,
2072                    span: ident_span,
2073                },
2074                arguments,
2075            },
2076            span,
2077        });
2078
2079        self.pop_rule_span(lexer);
2080
2081        Ok(())
2082    }
2083
2084    fn function_call_or_assignment_statement<'a>(
2085        &mut self,
2086        lexer: &mut Lexer<'a>,
2087        context: &mut ExpressionContext<'a, '_, '_>,
2088        block: &mut ast::Block<'a>,
2089    ) -> Result<'a, ()> {
2090        let span_start = lexer.start_byte_offset();
2091        match lexer.peek() {
2092            (Token::Word(name), span) => {
2093                // A little hack for 2 token lookahead.
2094                let cloned = lexer.clone();
2095                let _ = lexer.next();
2096                match lexer.peek() {
2097                    (Token::Paren('('), _) => {
2098                        self.function_statement(lexer, name, span, span_start, context, block)
2099                    }
2100                    _ => {
2101                        *lexer = cloned;
2102                        self.assignment_statement(lexer, context, block)
2103                    }
2104                }
2105            }
2106            _ => self.assignment_statement(lexer, context, block),
2107        }
2108    }
2109
2110    fn statement<'a>(
2111        &mut self,
2112        lexer: &mut Lexer<'a>,
2113        ctx: &mut ExpressionContext<'a, '_, '_>,
2114        block: &mut ast::Block<'a>,
2115        brace_nesting_level: u8,
2116    ) -> Result<'a, ()> {
2117        self.track_recursion(|this| {
2118            this.push_rule_span(Rule::Statement, lexer);
2119            match lexer.peek() {
2120                (Token::Separator(';'), _) => {
2121                    let _ = lexer.next();
2122                    this.pop_rule_span(lexer);
2123                }
2124                (token, _) if is_start_of_compound_statement(token) => {
2125                    let (inner, span) = this.block(lexer, ctx, brace_nesting_level)?;
2126                    block.stmts.push(ast::Statement {
2127                        kind: ast::StatementKind::Block(inner),
2128                        span,
2129                    });
2130                    this.pop_rule_span(lexer);
2131                }
2132                (Token::Word(word), _) => {
2133                    let kind = match word {
2134                        "_" => {
2135                            let _ = lexer.next();
2136                            lexer.expect(Token::Operation('='))?;
2137                            let expr = this.general_expression(lexer, ctx)?;
2138                            lexer.expect(Token::Separator(';'))?;
2139
2140                            ast::StatementKind::Phony(expr)
2141                        }
2142                        "let" => {
2143                            let _ = lexer.next();
2144                            let name = lexer.next_ident()?;
2145
2146                            let given_ty = if lexer.skip(Token::Separator(':')) {
2147                                let ty = this.type_decl(lexer, ctx)?;
2148                                Some(ty)
2149                            } else {
2150                                None
2151                            };
2152                            lexer.expect(Token::Operation('='))?;
2153                            let expr_id = this.general_expression(lexer, ctx)?;
2154                            lexer.expect(Token::Separator(';'))?;
2155
2156                            let handle = ctx.declare_local(name)?;
2157                            ast::StatementKind::LocalDecl(ast::LocalDecl::Let(ast::Let {
2158                                name,
2159                                ty: given_ty,
2160                                init: expr_id,
2161                                handle,
2162                            }))
2163                        }
2164                        "const" => {
2165                            let _ = lexer.next();
2166                            let name = lexer.next_ident()?;
2167
2168                            let given_ty = if lexer.skip(Token::Separator(':')) {
2169                                let ty = this.type_decl(lexer, ctx)?;
2170                                Some(ty)
2171                            } else {
2172                                None
2173                            };
2174                            lexer.expect(Token::Operation('='))?;
2175                            let expr_id = this.general_expression(lexer, ctx)?;
2176                            lexer.expect(Token::Separator(';'))?;
2177
2178                            let handle = ctx.declare_local(name)?;
2179                            ast::StatementKind::LocalDecl(ast::LocalDecl::Const(ast::LocalConst {
2180                                name,
2181                                ty: given_ty,
2182                                init: expr_id,
2183                                handle,
2184                            }))
2185                        }
2186                        "var" => {
2187                            let _ = lexer.next();
2188
2189                            let name = lexer.next_ident()?;
2190                            let ty = if lexer.skip(Token::Separator(':')) {
2191                                let ty = this.type_decl(lexer, ctx)?;
2192                                Some(ty)
2193                            } else {
2194                                None
2195                            };
2196
2197                            let init = if lexer.skip(Token::Operation('=')) {
2198                                let init = this.general_expression(lexer, ctx)?;
2199                                Some(init)
2200                            } else {
2201                                None
2202                            };
2203
2204                            lexer.expect(Token::Separator(';'))?;
2205
2206                            let handle = ctx.declare_local(name)?;
2207                            ast::StatementKind::LocalDecl(ast::LocalDecl::Var(ast::LocalVariable {
2208                                name,
2209                                ty,
2210                                init,
2211                                handle,
2212                            }))
2213                        }
2214                        "return" => {
2215                            let _ = lexer.next();
2216                            let value = if lexer.peek().0 != Token::Separator(';') {
2217                                let handle = this.general_expression(lexer, ctx)?;
2218                                Some(handle)
2219                            } else {
2220                                None
2221                            };
2222                            lexer.expect(Token::Separator(';'))?;
2223                            ast::StatementKind::Return { value }
2224                        }
2225                        "if" => {
2226                            let _ = lexer.next();
2227                            let condition = this.general_expression(lexer, ctx)?;
2228
2229                            let accept = this.block(lexer, ctx, brace_nesting_level)?.0;
2230
2231                            let mut elsif_stack = Vec::new();
2232                            let mut elseif_span_start = lexer.start_byte_offset();
2233                            let mut reject = loop {
2234                                if !lexer.skip(Token::Word("else")) {
2235                                    break ast::Block::default();
2236                                }
2237
2238                                if !lexer.skip(Token::Word("if")) {
2239                                    // ... else { ... }
2240                                    break this.block(lexer, ctx, brace_nesting_level)?.0;
2241                                }
2242
2243                                // ... else if (...) { ... }
2244                                let other_condition = this.general_expression(lexer, ctx)?;
2245                                let other_block = this.block(lexer, ctx, brace_nesting_level)?;
2246                                elsif_stack.push((elseif_span_start, other_condition, other_block));
2247                                elseif_span_start = lexer.start_byte_offset();
2248                            };
2249
2250                            // reverse-fold the else-if blocks
2251                            //Note: we may consider uplifting this to the IR
2252                            for (other_span_start, other_cond, other_block) in
2253                                elsif_stack.into_iter().rev()
2254                            {
2255                                let sub_stmt = ast::StatementKind::If {
2256                                    condition: other_cond,
2257                                    accept: other_block.0,
2258                                    reject,
2259                                };
2260                                reject = ast::Block::default();
2261                                let span = lexer.span_from(other_span_start);
2262                                reject.stmts.push(ast::Statement {
2263                                    kind: sub_stmt,
2264                                    span,
2265                                })
2266                            }
2267
2268                            ast::StatementKind::If {
2269                                condition,
2270                                accept,
2271                                reject,
2272                            }
2273                        }
2274                        "switch" => {
2275                            let _ = lexer.next();
2276                            let selector = this.general_expression(lexer, ctx)?;
2277                            let brace_span = lexer.expect_span(Token::Paren('{'))?;
2278                            let brace_nesting_level =
2279                                Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2280                            let mut cases = Vec::new();
2281
2282                            loop {
2283                                // cases + default
2284                                match lexer.next() {
2285                                    (Token::Word("case"), _) => {
2286                                        // parse a list of values
2287                                        let value = loop {
2288                                            let value = this.switch_value(lexer, ctx)?;
2289                                            if lexer.skip(Token::Separator(',')) {
2290                                                // list of values ends with ':' or a compound statement
2291                                                let next_token = lexer.peek().0;
2292                                                if next_token == Token::Separator(':')
2293                                                    || is_start_of_compound_statement(next_token)
2294                                                {
2295                                                    break value;
2296                                                }
2297                                            } else {
2298                                                break value;
2299                                            }
2300                                            cases.push(ast::SwitchCase {
2301                                                value,
2302                                                body: ast::Block::default(),
2303                                                fall_through: true,
2304                                            });
2305                                        };
2306
2307                                        lexer.skip(Token::Separator(':'));
2308
2309                                        let body = this.block(lexer, ctx, brace_nesting_level)?.0;
2310
2311                                        cases.push(ast::SwitchCase {
2312                                            value,
2313                                            body,
2314                                            fall_through: false,
2315                                        });
2316                                    }
2317                                    (Token::Word("default"), _) => {
2318                                        lexer.skip(Token::Separator(':'));
2319                                        let body = this.block(lexer, ctx, brace_nesting_level)?.0;
2320                                        cases.push(ast::SwitchCase {
2321                                            value: ast::SwitchValue::Default,
2322                                            body,
2323                                            fall_through: false,
2324                                        });
2325                                    }
2326                                    (Token::Paren('}'), _) => break,
2327                                    (_, span) => {
2328                                        return Err(Box::new(Error::Unexpected(
2329                                            span,
2330                                            ExpectedToken::SwitchItem,
2331                                        )))
2332                                    }
2333                                }
2334                            }
2335
2336                            ast::StatementKind::Switch { selector, cases }
2337                        }
2338                        "loop" => this.r#loop(lexer, ctx, brace_nesting_level)?,
2339                        "while" => {
2340                            let _ = lexer.next();
2341                            let mut body = ast::Block::default();
2342
2343                            let (condition, span) =
2344                                lexer.capture_span(|lexer| this.general_expression(lexer, ctx))?;
2345                            let mut reject = ast::Block::default();
2346                            reject.stmts.push(ast::Statement {
2347                                kind: ast::StatementKind::Break,
2348                                span,
2349                            });
2350
2351                            body.stmts.push(ast::Statement {
2352                                kind: ast::StatementKind::If {
2353                                    condition,
2354                                    accept: ast::Block::default(),
2355                                    reject,
2356                                },
2357                                span,
2358                            });
2359
2360                            let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
2361                            body.stmts.push(ast::Statement {
2362                                kind: ast::StatementKind::Block(block),
2363                                span,
2364                            });
2365
2366                            ast::StatementKind::Loop {
2367                                body,
2368                                continuing: ast::Block::default(),
2369                                break_if: None,
2370                            }
2371                        }
2372                        "for" => {
2373                            let _ = lexer.next();
2374                            lexer.expect(Token::Paren('('))?;
2375
2376                            ctx.local_table.push_scope();
2377
2378                            if !lexer.skip(Token::Separator(';')) {
2379                                let num_statements = block.stmts.len();
2380                                let (_, span) = {
2381                                    let ctx = &mut *ctx;
2382                                    let block = &mut *block;
2383                                    lexer.capture_span(|lexer| {
2384                                        this.statement(lexer, ctx, block, brace_nesting_level)
2385                                    })?
2386                                };
2387
2388                                if block.stmts.len() != num_statements {
2389                                    match block.stmts.last().unwrap().kind {
2390                                        ast::StatementKind::Call { .. }
2391                                        | ast::StatementKind::Assign { .. }
2392                                        | ast::StatementKind::LocalDecl(_) => {}
2393                                        _ => {
2394                                            return Err(Box::new(Error::InvalidForInitializer(
2395                                                span,
2396                                            )))
2397                                        }
2398                                    }
2399                                }
2400                            };
2401
2402                            let mut body = ast::Block::default();
2403                            if !lexer.skip(Token::Separator(';')) {
2404                                let (condition, span) =
2405                                    lexer.capture_span(|lexer| -> Result<'_, _> {
2406                                        let condition = this.general_expression(lexer, ctx)?;
2407                                        lexer.expect(Token::Separator(';'))?;
2408                                        Ok(condition)
2409                                    })?;
2410                                let mut reject = ast::Block::default();
2411                                reject.stmts.push(ast::Statement {
2412                                    kind: ast::StatementKind::Break,
2413                                    span,
2414                                });
2415                                body.stmts.push(ast::Statement {
2416                                    kind: ast::StatementKind::If {
2417                                        condition,
2418                                        accept: ast::Block::default(),
2419                                        reject,
2420                                    },
2421                                    span,
2422                                });
2423                            };
2424
2425                            let mut continuing = ast::Block::default();
2426                            if !lexer.skip(Token::Paren(')')) {
2427                                this.function_call_or_assignment_statement(
2428                                    lexer,
2429                                    ctx,
2430                                    &mut continuing,
2431                                )?;
2432                                lexer.expect(Token::Paren(')'))?;
2433                            }
2434
2435                            let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
2436                            body.stmts.push(ast::Statement {
2437                                kind: ast::StatementKind::Block(block),
2438                                span,
2439                            });
2440
2441                            ctx.local_table.pop_scope();
2442
2443                            ast::StatementKind::Loop {
2444                                body,
2445                                continuing,
2446                                break_if: None,
2447                            }
2448                        }
2449                        "break" => {
2450                            let (_, span) = lexer.next();
2451                            // Check if the next token is an `if`, this indicates
2452                            // that the user tried to type out a `break if` which
2453                            // is illegal in this position.
2454                            let (peeked_token, peeked_span) = lexer.peek();
2455                            if let Token::Word("if") = peeked_token {
2456                                let span = span.until(&peeked_span);
2457                                return Err(Box::new(Error::InvalidBreakIf(span)));
2458                            }
2459                            lexer.expect(Token::Separator(';'))?;
2460                            ast::StatementKind::Break
2461                        }
2462                        "continue" => {
2463                            let _ = lexer.next();
2464                            lexer.expect(Token::Separator(';'))?;
2465                            ast::StatementKind::Continue
2466                        }
2467                        "discard" => {
2468                            let _ = lexer.next();
2469                            lexer.expect(Token::Separator(';'))?;
2470                            ast::StatementKind::Kill
2471                        }
2472                        // https://www.w3.org/TR/WGSL/#const-assert-statement
2473                        "const_assert" => {
2474                            let _ = lexer.next();
2475                            // parentheses are optional
2476                            let paren = lexer.skip(Token::Paren('('));
2477
2478                            let condition = this.general_expression(lexer, ctx)?;
2479
2480                            if paren {
2481                                lexer.expect(Token::Paren(')'))?;
2482                            }
2483                            lexer.expect(Token::Separator(';'))?;
2484                            ast::StatementKind::ConstAssert(condition)
2485                        }
2486                        // assignment or a function call
2487                        _ => {
2488                            this.function_call_or_assignment_statement(lexer, ctx, block)?;
2489                            lexer.expect(Token::Separator(';'))?;
2490                            this.pop_rule_span(lexer);
2491                            return Ok(());
2492                        }
2493                    };
2494
2495                    let span = this.pop_rule_span(lexer);
2496                    block.stmts.push(ast::Statement { kind, span });
2497                }
2498                _ => {
2499                    this.assignment_statement(lexer, ctx, block)?;
2500                    lexer.expect(Token::Separator(';'))?;
2501                    this.pop_rule_span(lexer);
2502                }
2503            }
2504            Ok(())
2505        })
2506    }
2507
2508    fn r#loop<'a>(
2509        &mut self,
2510        lexer: &mut Lexer<'a>,
2511        ctx: &mut ExpressionContext<'a, '_, '_>,
2512        brace_nesting_level: u8,
2513    ) -> Result<'a, ast::StatementKind<'a>> {
2514        let _ = lexer.next();
2515        let mut body = ast::Block::default();
2516        let mut continuing = ast::Block::default();
2517        let mut break_if = None;
2518
2519        let brace_span = lexer.expect_span(Token::Paren('{'))?;
2520        let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2521
2522        ctx.local_table.push_scope();
2523
2524        loop {
2525            if lexer.skip(Token::Word("continuing")) {
2526                // Branch for the `continuing` block, this must be
2527                // the last thing in the loop body
2528
2529                // Expect a opening brace to start the continuing block
2530                let brace_span = lexer.expect_span(Token::Paren('{'))?;
2531                let brace_nesting_level =
2532                    Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2533                loop {
2534                    if lexer.skip(Token::Word("break")) {
2535                        // Branch for the `break if` statement, this statement
2536                        // has the form `break if <expr>;` and must be the last
2537                        // statement in a continuing block
2538
2539                        // The break must be followed by an `if` to form
2540                        // the break if
2541                        lexer.expect(Token::Word("if"))?;
2542
2543                        let condition = self.general_expression(lexer, ctx)?;
2544                        // Set the condition of the break if to the newly parsed
2545                        // expression
2546                        break_if = Some(condition);
2547
2548                        // Expect a semicolon to close the statement
2549                        lexer.expect(Token::Separator(';'))?;
2550                        // Expect a closing brace to close the continuing block,
2551                        // since the break if must be the last statement
2552                        lexer.expect(Token::Paren('}'))?;
2553                        // Stop parsing the continuing block
2554                        break;
2555                    } else if lexer.skip(Token::Paren('}')) {
2556                        // If we encounter a closing brace it means we have reached
2557                        // the end of the continuing block and should stop processing
2558                        break;
2559                    } else {
2560                        // Otherwise try to parse a statement
2561                        self.statement(lexer, ctx, &mut continuing, brace_nesting_level)?;
2562                    }
2563                }
2564                // Since the continuing block must be the last part of the loop body,
2565                // we expect to see a closing brace to end the loop body
2566                lexer.expect(Token::Paren('}'))?;
2567                break;
2568            }
2569            if lexer.skip(Token::Paren('}')) {
2570                // If we encounter a closing brace it means we have reached
2571                // the end of the loop body and should stop processing
2572                break;
2573            }
2574            // Otherwise try to parse a statement
2575            self.statement(lexer, ctx, &mut body, brace_nesting_level)?;
2576        }
2577
2578        ctx.local_table.pop_scope();
2579
2580        Ok(ast::StatementKind::Loop {
2581            body,
2582            continuing,
2583            break_if,
2584        })
2585    }
2586
2587    /// compound_statement
2588    fn block<'a>(
2589        &mut self,
2590        lexer: &mut Lexer<'a>,
2591        ctx: &mut ExpressionContext<'a, '_, '_>,
2592        brace_nesting_level: u8,
2593    ) -> Result<'a, (ast::Block<'a>, Span)> {
2594        self.push_rule_span(Rule::Block, lexer);
2595
2596        ctx.local_table.push_scope();
2597
2598        let mut diagnostic_filters = DiagnosticFilterMap::new();
2599
2600        self.push_rule_span(Rule::Attribute, lexer);
2601        while lexer.skip(Token::Attribute) {
2602            let (name, name_span) = lexer.next_ident_with_span()?;
2603            if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
2604                let filter = self.diagnostic_filter(lexer)?;
2605                let span = self.peek_rule_span(lexer);
2606                diagnostic_filters
2607                    .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
2608                    .map_err(|e| Box::new(e.into()))?;
2609            } else {
2610                return Err(Box::new(Error::Unexpected(
2611                    name_span,
2612                    ExpectedToken::DiagnosticAttribute,
2613                )));
2614            }
2615        }
2616        self.pop_rule_span(lexer);
2617
2618        if !diagnostic_filters.is_empty() {
2619            return Err(Box::new(
2620                Error::DiagnosticAttributeNotYetImplementedAtParseSite {
2621                    site_name_plural: "compound statements",
2622                    spans: diagnostic_filters.spans().collect(),
2623                },
2624            ));
2625        }
2626
2627        let brace_span = lexer.expect_span(Token::Paren('{'))?;
2628        let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
2629        let mut block = ast::Block::default();
2630        while !lexer.skip(Token::Paren('}')) {
2631            self.statement(lexer, ctx, &mut block, brace_nesting_level)?;
2632        }
2633
2634        ctx.local_table.pop_scope();
2635
2636        let span = self.pop_rule_span(lexer);
2637        Ok((block, span))
2638    }
2639
2640    fn varying_binding<'a>(
2641        &mut self,
2642        lexer: &mut Lexer<'a>,
2643        ctx: &mut ExpressionContext<'a, '_, '_>,
2644    ) -> Result<'a, Option<ast::Binding<'a>>> {
2645        let mut bind_parser = BindingParser::default();
2646        self.push_rule_span(Rule::Attribute, lexer);
2647
2648        while lexer.skip(Token::Attribute) {
2649            let (word, span) = lexer.next_ident_with_span()?;
2650            bind_parser.parse(self, lexer, word, span, ctx)?;
2651        }
2652
2653        let span = self.pop_rule_span(lexer);
2654        bind_parser.finish(span)
2655    }
2656
2657    fn function_decl<'a>(
2658        &mut self,
2659        lexer: &mut Lexer<'a>,
2660        diagnostic_filter_leaf: Option<Handle<DiagnosticFilterNode>>,
2661        must_use: Option<Span>,
2662        out: &mut ast::TranslationUnit<'a>,
2663        dependencies: &mut FastIndexSet<ast::Dependency<'a>>,
2664    ) -> Result<'a, ast::Function<'a>> {
2665        self.push_rule_span(Rule::FunctionDecl, lexer);
2666        // read function name
2667        let fun_name = lexer.next_ident()?;
2668
2669        let mut locals = Arena::new();
2670
2671        let mut ctx = ExpressionContext {
2672            expressions: &mut out.expressions,
2673            local_table: &mut SymbolTable::default(),
2674            locals: &mut locals,
2675            types: &mut out.types,
2676            unresolved: dependencies,
2677        };
2678
2679        // start a scope that contains arguments as well as the function body
2680        ctx.local_table.push_scope();
2681
2682        // read parameter list
2683        let mut arguments = Vec::new();
2684        lexer.expect(Token::Paren('('))?;
2685        let mut ready = true;
2686        while !lexer.skip(Token::Paren(')')) {
2687            if !ready {
2688                return Err(Box::new(Error::Unexpected(
2689                    lexer.next().1,
2690                    ExpectedToken::Token(Token::Separator(',')),
2691                )));
2692            }
2693            let binding = self.varying_binding(lexer, &mut ctx)?;
2694
2695            let param_name = lexer.next_ident()?;
2696
2697            lexer.expect(Token::Separator(':'))?;
2698            let param_type = self.type_decl(lexer, &mut ctx)?;
2699
2700            let handle = ctx.declare_local(param_name)?;
2701            arguments.push(ast::FunctionArgument {
2702                name: param_name,
2703                ty: param_type,
2704                binding,
2705                handle,
2706            });
2707            ready = lexer.skip(Token::Separator(','));
2708        }
2709        // read return type
2710        let result = if lexer.skip(Token::Arrow) {
2711            let binding = self.varying_binding(lexer, &mut ctx)?;
2712            let ty = self.type_decl(lexer, &mut ctx)?;
2713            let must_use = must_use.is_some();
2714            Some(ast::FunctionResult {
2715                ty,
2716                binding,
2717                must_use,
2718            })
2719        } else if let Some(must_use) = must_use {
2720            return Err(Box::new(Error::FunctionMustUseReturnsVoid(
2721                must_use,
2722                self.peek_rule_span(lexer),
2723            )));
2724        } else {
2725            None
2726        };
2727
2728        // do not use `self.block` here, since we must not push a new scope
2729        lexer.expect(Token::Paren('{'))?;
2730        let brace_nesting_level = 1;
2731        let mut body = ast::Block::default();
2732        while !lexer.skip(Token::Paren('}')) {
2733            self.statement(lexer, &mut ctx, &mut body, brace_nesting_level)?;
2734        }
2735
2736        ctx.local_table.pop_scope();
2737
2738        let fun = ast::Function {
2739            entry_point: None,
2740            name: fun_name,
2741            arguments,
2742            result,
2743            body,
2744            diagnostic_filter_leaf,
2745            doc_comments: Vec::new(),
2746        };
2747
2748        // done
2749        self.pop_rule_span(lexer);
2750
2751        Ok(fun)
2752    }
2753
2754    fn directive_ident_list<'a>(
2755        &self,
2756        lexer: &mut Lexer<'a>,
2757        handler: impl FnMut(&'a str, Span) -> Result<'a, ()>,
2758    ) -> Result<'a, ()> {
2759        let mut handler = handler;
2760        'next_arg: loop {
2761            let (ident, span) = lexer.next_ident_with_span()?;
2762            handler(ident, span)?;
2763
2764            let expected_token = match lexer.peek().0 {
2765                Token::Separator(',') => {
2766                    let _ = lexer.next();
2767                    if matches!(lexer.peek().0, Token::Word(..)) {
2768                        continue 'next_arg;
2769                    }
2770                    ExpectedToken::AfterIdentListComma
2771                }
2772                _ => ExpectedToken::AfterIdentListArg,
2773            };
2774
2775            if !matches!(lexer.next().0, Token::Separator(';')) {
2776                return Err(Box::new(Error::Unexpected(span, expected_token)));
2777            }
2778
2779            break Ok(());
2780        }
2781    }
2782
2783    fn global_decl<'a>(
2784        &mut self,
2785        lexer: &mut Lexer<'a>,
2786        out: &mut ast::TranslationUnit<'a>,
2787    ) -> Result<'a, ()> {
2788        let doc_comments = lexer.accumulate_doc_comments();
2789
2790        // read attributes
2791        let mut binding = None;
2792        let mut stage = ParsedAttribute::default();
2793        let mut compute_span = Span::new(0, 0);
2794        let mut workgroup_size = ParsedAttribute::default();
2795        let mut early_depth_test = ParsedAttribute::default();
2796        let (mut bind_index, mut bind_group) =
2797            (ParsedAttribute::default(), ParsedAttribute::default());
2798        let mut id = ParsedAttribute::default();
2799
2800        let mut must_use: ParsedAttribute<Span> = ParsedAttribute::default();
2801
2802        let mut dependencies = FastIndexSet::default();
2803        let mut ctx = ExpressionContext {
2804            expressions: &mut out.expressions,
2805            local_table: &mut SymbolTable::default(),
2806            locals: &mut Arena::new(),
2807            types: &mut out.types,
2808            unresolved: &mut dependencies,
2809        };
2810        let mut diagnostic_filters = DiagnosticFilterMap::new();
2811        let ensure_no_diag_attrs = |on_what, filters: DiagnosticFilterMap| -> Result<()> {
2812            if filters.is_empty() {
2813                Ok(())
2814            } else {
2815                Err(Box::new(Error::DiagnosticAttributeNotSupported {
2816                    on_what,
2817                    spans: filters.spans().collect(),
2818                }))
2819            }
2820        };
2821
2822        self.push_rule_span(Rule::Attribute, lexer);
2823        while lexer.skip(Token::Attribute) {
2824            let (name, name_span) = lexer.next_ident_with_span()?;
2825            if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
2826                let filter = self.diagnostic_filter(lexer)?;
2827                let span = self.peek_rule_span(lexer);
2828                diagnostic_filters
2829                    .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
2830                    .map_err(|e| Box::new(e.into()))?;
2831                continue;
2832            }
2833            match name {
2834                "binding" => {
2835                    lexer.expect(Token::Paren('('))?;
2836                    bind_index.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2837                    lexer.expect(Token::Paren(')'))?;
2838                }
2839                "group" => {
2840                    lexer.expect(Token::Paren('('))?;
2841                    bind_group.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2842                    lexer.expect(Token::Paren(')'))?;
2843                }
2844                "id" => {
2845                    lexer.expect(Token::Paren('('))?;
2846                    id.set(self.general_expression(lexer, &mut ctx)?, name_span)?;
2847                    lexer.expect(Token::Paren(')'))?;
2848                }
2849                "vertex" => {
2850                    stage.set(ShaderStage::Vertex, name_span)?;
2851                }
2852                "fragment" => {
2853                    stage.set(ShaderStage::Fragment, name_span)?;
2854                }
2855                "compute" => {
2856                    stage.set(ShaderStage::Compute, name_span)?;
2857                    compute_span = name_span;
2858                }
2859                "workgroup_size" => {
2860                    lexer.expect(Token::Paren('('))?;
2861                    let mut new_workgroup_size = [None; 3];
2862                    for (i, size) in new_workgroup_size.iter_mut().enumerate() {
2863                        *size = Some(self.general_expression(lexer, &mut ctx)?);
2864                        match lexer.next() {
2865                            (Token::Paren(')'), _) => break,
2866                            (Token::Separator(','), _) if i != 2 => (),
2867                            other => {
2868                                return Err(Box::new(Error::Unexpected(
2869                                    other.1,
2870                                    ExpectedToken::WorkgroupSizeSeparator,
2871                                )))
2872                            }
2873                        }
2874                    }
2875                    workgroup_size.set(new_workgroup_size, name_span)?;
2876                }
2877                "early_depth_test" => {
2878                    lexer.expect(Token::Paren('('))?;
2879                    let (ident, ident_span) = lexer.next_ident_with_span()?;
2880                    let value = if ident == "force" {
2881                        crate::EarlyDepthTest::Force
2882                    } else {
2883                        crate::EarlyDepthTest::Allow {
2884                            conservative: conv::map_conservative_depth(ident, ident_span)?,
2885                        }
2886                    };
2887                    lexer.expect(Token::Paren(')'))?;
2888                    early_depth_test.set(value, name_span)?;
2889                }
2890                "must_use" => {
2891                    must_use.set(name_span, name_span)?;
2892                }
2893                _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
2894            }
2895        }
2896
2897        let attrib_span = self.pop_rule_span(lexer);
2898        match (bind_group.value, bind_index.value) {
2899            (Some(group), Some(index)) => {
2900                binding = Some(ast::ResourceBinding {
2901                    group,
2902                    binding: index,
2903                });
2904            }
2905            (Some(_), None) => {
2906                return Err(Box::new(Error::MissingAttribute("binding", attrib_span)))
2907            }
2908            (None, Some(_)) => return Err(Box::new(Error::MissingAttribute("group", attrib_span))),
2909            (None, None) => {}
2910        }
2911
2912        // read item
2913        let start = lexer.start_byte_offset();
2914        let kind = match lexer.next() {
2915            (Token::Separator(';'), _) => {
2916                ensure_no_diag_attrs(
2917                    DiagnosticAttributeNotSupportedPosition::SemicolonInModulePosition,
2918                    diagnostic_filters,
2919                )?;
2920                None
2921            }
2922            (Token::Word(word), directive_span) if DirectiveKind::from_ident(word).is_some() => {
2923                return Err(Box::new(Error::DirectiveAfterFirstGlobalDecl {
2924                    directive_span,
2925                }));
2926            }
2927            (Token::Word("struct"), _) => {
2928                ensure_no_diag_attrs("`struct`s".into(), diagnostic_filters)?;
2929
2930                let name = lexer.next_ident()?;
2931
2932                let members = self.struct_body(lexer, &mut ctx)?;
2933
2934                Some(ast::GlobalDeclKind::Struct(ast::Struct {
2935                    name,
2936                    members,
2937                    doc_comments,
2938                }))
2939            }
2940            (Token::Word("alias"), _) => {
2941                ensure_no_diag_attrs("`alias`es".into(), diagnostic_filters)?;
2942
2943                let name = lexer.next_ident()?;
2944
2945                lexer.expect(Token::Operation('='))?;
2946                let ty = self.type_decl(lexer, &mut ctx)?;
2947                lexer.expect(Token::Separator(';'))?;
2948                Some(ast::GlobalDeclKind::Type(ast::TypeAlias { name, ty }))
2949            }
2950            (Token::Word("const"), _) => {
2951                ensure_no_diag_attrs("`const`s".into(), diagnostic_filters)?;
2952
2953                let name = lexer.next_ident()?;
2954
2955                let ty = if lexer.skip(Token::Separator(':')) {
2956                    let ty = self.type_decl(lexer, &mut ctx)?;
2957                    Some(ty)
2958                } else {
2959                    None
2960                };
2961
2962                lexer.expect(Token::Operation('='))?;
2963                let init = self.general_expression(lexer, &mut ctx)?;
2964                lexer.expect(Token::Separator(';'))?;
2965
2966                Some(ast::GlobalDeclKind::Const(ast::Const {
2967                    name,
2968                    ty,
2969                    init,
2970                    doc_comments,
2971                }))
2972            }
2973            (Token::Word("override"), _) => {
2974                ensure_no_diag_attrs("`override`s".into(), diagnostic_filters)?;
2975
2976                let name = lexer.next_ident()?;
2977
2978                let ty = if lexer.skip(Token::Separator(':')) {
2979                    Some(self.type_decl(lexer, &mut ctx)?)
2980                } else {
2981                    None
2982                };
2983
2984                let init = if lexer.skip(Token::Operation('=')) {
2985                    Some(self.general_expression(lexer, &mut ctx)?)
2986                } else {
2987                    None
2988                };
2989
2990                lexer.expect(Token::Separator(';'))?;
2991
2992                Some(ast::GlobalDeclKind::Override(ast::Override {
2993                    name,
2994                    id: id.value,
2995                    ty,
2996                    init,
2997                }))
2998            }
2999            (Token::Word("var"), _) => {
3000                ensure_no_diag_attrs("`var`s".into(), diagnostic_filters)?;
3001
3002                let mut var = self.variable_decl(lexer, &mut ctx)?;
3003                var.binding = binding.take();
3004                var.doc_comments = doc_comments;
3005                Some(ast::GlobalDeclKind::Var(var))
3006            }
3007            (Token::Word("fn"), _) => {
3008                let diagnostic_filter_leaf = Self::write_diagnostic_filters(
3009                    &mut out.diagnostic_filters,
3010                    diagnostic_filters,
3011                    out.diagnostic_filter_leaf,
3012                );
3013
3014                let function = self.function_decl(
3015                    lexer,
3016                    diagnostic_filter_leaf,
3017                    must_use.value,
3018                    out,
3019                    &mut dependencies,
3020                )?;
3021                Some(ast::GlobalDeclKind::Fn(ast::Function {
3022                    entry_point: if let Some(stage) = stage.value {
3023                        if stage == ShaderStage::Compute && workgroup_size.value.is_none() {
3024                            return Err(Box::new(Error::MissingWorkgroupSize(compute_span)));
3025                        }
3026                        Some(ast::EntryPoint {
3027                            stage,
3028                            early_depth_test: early_depth_test.value,
3029                            workgroup_size: workgroup_size.value,
3030                        })
3031                    } else {
3032                        None
3033                    },
3034                    doc_comments,
3035                    ..function
3036                }))
3037            }
3038            (Token::Word("const_assert"), _) => {
3039                ensure_no_diag_attrs("`const_assert`s".into(), diagnostic_filters)?;
3040
3041                // parentheses are optional
3042                let paren = lexer.skip(Token::Paren('('));
3043
3044                let condition = self.general_expression(lexer, &mut ctx)?;
3045
3046                if paren {
3047                    lexer.expect(Token::Paren(')'))?;
3048                }
3049                lexer.expect(Token::Separator(';'))?;
3050                Some(ast::GlobalDeclKind::ConstAssert(condition))
3051            }
3052            (Token::End, _) => return Ok(()),
3053            other => {
3054                return Err(Box::new(Error::Unexpected(
3055                    other.1,
3056                    ExpectedToken::GlobalItem,
3057                )))
3058            }
3059        };
3060
3061        if let Some(kind) = kind {
3062            out.decls.append(
3063                ast::GlobalDecl { kind, dependencies },
3064                lexer.span_from(start),
3065            );
3066        }
3067
3068        if !self.rules.is_empty() {
3069            log::error!("Reached the end of global decl, but rule stack is not empty");
3070            log::error!("Rules: {:?}", self.rules);
3071            return Err(Box::new(Error::Internal("rule stack is not empty")));
3072        };
3073
3074        match binding {
3075            None => Ok(()),
3076            Some(_) => Err(Box::new(Error::Internal(
3077                "we had the attribute but no var?",
3078            ))),
3079        }
3080    }
3081
3082    pub fn parse<'a>(
3083        &mut self,
3084        source: &'a str,
3085        options: &Options,
3086    ) -> Result<'a, ast::TranslationUnit<'a>> {
3087        self.reset();
3088
3089        let mut lexer = Lexer::new(source, !options.parse_doc_comments);
3090        let mut tu = ast::TranslationUnit::default();
3091        let mut enable_extensions = EnableExtensions::empty();
3092        let mut diagnostic_filters = DiagnosticFilterMap::new();
3093
3094        // Parse module doc comments.
3095        tu.doc_comments = lexer.accumulate_module_doc_comments();
3096
3097        // Parse directives.
3098        while let Ok((ident, _directive_ident_span)) = lexer.peek_ident_with_span() {
3099            if let Some(kind) = DirectiveKind::from_ident(ident) {
3100                self.push_rule_span(Rule::Directive, &mut lexer);
3101                let _ = lexer.next_ident_with_span().unwrap();
3102                match kind {
3103                    DirectiveKind::Diagnostic => {
3104                        let diagnostic_filter = self.diagnostic_filter(&mut lexer)?;
3105                        let span = self.peek_rule_span(&lexer);
3106                        diagnostic_filters
3107                            .add(diagnostic_filter, span, ShouldConflictOnFullDuplicate::No)
3108                            .map_err(|e| Box::new(e.into()))?;
3109                        lexer.expect(Token::Separator(';'))?;
3110                    }
3111                    DirectiveKind::Enable => {
3112                        self.directive_ident_list(&mut lexer, |ident, span| {
3113                            let kind = EnableExtension::from_ident(ident, span)?;
3114                            let extension = match kind {
3115                                EnableExtension::Implemented(kind) => kind,
3116                                EnableExtension::Unimplemented(kind) => {
3117                                    return Err(Box::new(Error::EnableExtensionNotYetImplemented {
3118                                        kind,
3119                                        span,
3120                                    }))
3121                                }
3122                            };
3123                            enable_extensions.add(extension);
3124                            Ok(())
3125                        })?;
3126                    }
3127                    DirectiveKind::Requires => {
3128                        self.directive_ident_list(&mut lexer, |ident, span| {
3129                            match LanguageExtension::from_ident(ident) {
3130                                Some(LanguageExtension::Implemented(_kind)) => {
3131                                    // NOTE: No further validation is needed for an extension, so
3132                                    // just throw parsed information away. If we ever want to apply
3133                                    // what we've parsed to diagnostics, maybe we'll want to refer
3134                                    // to enabled extensions later?
3135                                    Ok(())
3136                                }
3137                                Some(LanguageExtension::Unimplemented(kind)) => {
3138                                    Err(Box::new(Error::LanguageExtensionNotYetImplemented {
3139                                        kind,
3140                                        span,
3141                                    }))
3142                                }
3143                                None => Err(Box::new(Error::UnknownLanguageExtension(span, ident))),
3144                            }
3145                        })?;
3146                    }
3147                }
3148                self.pop_rule_span(&lexer);
3149            } else {
3150                break;
3151            }
3152        }
3153
3154        lexer.enable_extensions = enable_extensions.clone();
3155        tu.enable_extensions = enable_extensions;
3156        tu.diagnostic_filter_leaf =
3157            Self::write_diagnostic_filters(&mut tu.diagnostic_filters, diagnostic_filters, None);
3158
3159        loop {
3160            match self.global_decl(&mut lexer, &mut tu) {
3161                Err(error) => return Err(error),
3162                Ok(()) => {
3163                    if lexer.peek().0 == Token::End {
3164                        break;
3165                    }
3166                }
3167            }
3168        }
3169
3170        Ok(tu)
3171    }
3172
3173    fn increase_brace_nesting(brace_nesting_level: u8, brace_span: Span) -> Result<'static, u8> {
3174        // From [spec.](https://gpuweb.github.io/gpuweb/wgsl/#limits):
3175        //
3176        // > § 2.4. Limits
3177        // >
3178        // > …
3179        // >
3180        // > Maximum nesting depth of brace-enclosed statements in a function[:] 127
3181        const BRACE_NESTING_MAXIMUM: u8 = 127;
3182        if brace_nesting_level + 1 > BRACE_NESTING_MAXIMUM {
3183            return Err(Box::new(Error::ExceededLimitForNestedBraces {
3184                span: brace_span,
3185                limit: BRACE_NESTING_MAXIMUM,
3186            }));
3187        }
3188        Ok(brace_nesting_level + 1)
3189    }
3190
3191    fn diagnostic_filter<'a>(&self, lexer: &mut Lexer<'a>) -> Result<'a, DiagnosticFilter> {
3192        lexer.expect(Token::Paren('('))?;
3193
3194        let (severity_control_name, severity_control_name_span) = lexer.next_ident_with_span()?;
3195        let new_severity = diagnostic_filter::Severity::from_wgsl_ident(severity_control_name)
3196            .ok_or(Error::DiagnosticInvalidSeverity {
3197                severity_control_name_span,
3198            })?;
3199
3200        lexer.expect(Token::Separator(','))?;
3201
3202        let (diagnostic_name_token, diagnostic_name_token_span) = lexer.next_ident_with_span()?;
3203        let triggering_rule = if lexer.skip(Token::Separator('.')) {
3204            let (ident, _span) = lexer.next_ident_with_span()?;
3205            FilterableTriggeringRule::User(Box::new([diagnostic_name_token.into(), ident.into()]))
3206        } else {
3207            let diagnostic_rule_name = diagnostic_name_token;
3208            let diagnostic_rule_name_span = diagnostic_name_token_span;
3209            if let Some(triggering_rule) =
3210                StandardFilterableTriggeringRule::from_wgsl_ident(diagnostic_rule_name)
3211            {
3212                FilterableTriggeringRule::Standard(triggering_rule)
3213            } else {
3214                diagnostic_filter::Severity::Warning.report_wgsl_parse_diag(
3215                    Box::new(Error::UnknownDiagnosticRuleName(diagnostic_rule_name_span)),
3216                    lexer.source,
3217                )?;
3218                FilterableTriggeringRule::Unknown(diagnostic_rule_name.into())
3219            }
3220        };
3221        let filter = DiagnosticFilter {
3222            triggering_rule,
3223            new_severity,
3224        };
3225        lexer.skip(Token::Separator(','));
3226        lexer.expect(Token::Paren(')'))?;
3227
3228        Ok(filter)
3229    }
3230
3231    pub(crate) fn write_diagnostic_filters(
3232        arena: &mut Arena<DiagnosticFilterNode>,
3233        filters: DiagnosticFilterMap,
3234        parent: Option<Handle<DiagnosticFilterNode>>,
3235    ) -> Option<Handle<DiagnosticFilterNode>> {
3236        filters
3237            .into_iter()
3238            .fold(parent, |parent, (triggering_rule, (new_severity, span))| {
3239                Some(arena.append(
3240                    DiagnosticFilterNode {
3241                        inner: DiagnosticFilter {
3242                            new_severity,
3243                            triggering_rule,
3244                        },
3245                        parent,
3246                    },
3247                    span,
3248                ))
3249            })
3250    }
3251}
3252
3253const fn is_start_of_compound_statement<'a>(token: Token<'a>) -> bool {
3254    matches!(token, Token::Attribute | Token::Paren('{'))
3255}