1use alloc::{boxed::Box, vec::Vec};
2use directive::enable_extension::ImplementedEnableExtension;
3
4use crate::diagnostic_filter::{
5 self, DiagnosticFilter, DiagnosticFilterMap, DiagnosticFilterNode, FilterableTriggeringRule,
6 ShouldConflictOnFullDuplicate, StandardFilterableTriggeringRule,
7};
8use crate::front::wgsl::error::{DiagnosticAttributeNotSupportedPosition, Error, ExpectedToken};
9use crate::front::wgsl::parse::directive::enable_extension::{EnableExtension, EnableExtensions};
10use crate::front::wgsl::parse::directive::language_extension::LanguageExtension;
11use crate::front::wgsl::parse::directive::DirectiveKind;
12use crate::front::wgsl::parse::lexer::{Lexer, Token, TokenSpan};
13use crate::front::wgsl::parse::number::Number;
14use crate::front::wgsl::Result;
15use crate::front::SymbolTable;
16use crate::{Arena, FastHashSet, FastIndexSet, Handle, ShaderStage, Span};
17
18pub mod ast;
19pub mod conv;
20pub mod directive;
21pub mod lexer;
22pub mod number;
23
24struct ExpressionContext<'input, 'temp, 'out> {
31 expressions: &'out mut Arena<ast::Expression<'input>>,
36
37 local_table: &'temp mut SymbolTable<&'input str, Handle<ast::Local>>,
44
45 locals: &'out mut Arena<ast::Local>,
71
72 unresolved: &'out mut FastIndexSet<ast::Dependency<'input>>,
83}
84
85impl<'a> ExpressionContext<'a, '_, '_> {
86 fn parse_binary_op(
87 &mut self,
88 lexer: &mut Lexer<'a>,
89 classifier: impl Fn(Token<'a>) -> Option<crate::BinaryOperator>,
90 mut parser: impl FnMut(&mut Lexer<'a>, &mut Self) -> Result<'a, Handle<ast::Expression<'a>>>,
91 ) -> Result<'a, Handle<ast::Expression<'a>>> {
92 let start = lexer.start_byte_offset();
93 let mut accumulator = parser(lexer, self)?;
94 while let Some(op) = classifier(lexer.peek().0) {
95 let _ = lexer.next();
96 let left = accumulator;
97 let right = parser(lexer, self)?;
98 accumulator = self.expressions.append(
99 ast::Expression::Binary { op, left, right },
100 lexer.span_from(start),
101 );
102 }
103 Ok(accumulator)
104 }
105
106 fn declare_local(&mut self, name: ast::Ident<'a>) -> Result<'a, Handle<ast::Local>> {
107 let handle = self.locals.append(ast::Local, name.span);
108 if let Some(old) = self.local_table.add(name.name, handle) {
109 Err(Box::new(Error::Redefinition {
110 previous: self.locals.get_span(old),
111 current: name.span,
112 }))
113 } else {
114 Ok(handle)
115 }
116 }
117}
118
119#[derive(Copy, Clone, Debug, PartialEq)]
125enum Rule {
126 Attribute,
127 VariableDecl,
128 FunctionDecl,
129 Block,
130 Statement,
131 PrimaryExpr,
132 SingularExpr,
133 UnaryExpr,
134 GeneralExpr,
135 Directive,
136 GenericExpr,
137 EnclosedExpr,
138 LhsExpr,
139}
140
141struct ParsedAttribute<T> {
142 value: Option<T>,
143}
144
145impl<T> Default for ParsedAttribute<T> {
146 fn default() -> Self {
147 Self { value: None }
148 }
149}
150
151impl<T> ParsedAttribute<T> {
152 fn set(&mut self, value: T, name_span: Span) -> Result<'static, ()> {
153 if self.value.is_some() {
154 return Err(Box::new(Error::RepeatedAttribute(name_span)));
155 }
156 self.value = Some(value);
157 Ok(())
158 }
159}
160
161#[derive(Default)]
162struct BindingParser<'a> {
163 location: ParsedAttribute<Handle<ast::Expression<'a>>>,
164 built_in: ParsedAttribute<crate::BuiltIn>,
165 interpolation: ParsedAttribute<crate::Interpolation>,
166 sampling: ParsedAttribute<crate::Sampling>,
167 invariant: ParsedAttribute<bool>,
168 blend_src: ParsedAttribute<Handle<ast::Expression<'a>>>,
169 per_primitive: ParsedAttribute<()>,
170}
171
172impl<'a> BindingParser<'a> {
173 fn parse(
174 &mut self,
175 parser: &mut Parser,
176 lexer: &mut Lexer<'a>,
177 name: &'a str,
178 name_span: Span,
179 ctx: &mut ExpressionContext<'a, '_, '_>,
180 ) -> Result<'a, ()> {
181 match name {
182 "location" => {
183 lexer.expect(Token::Paren('('))?;
184 self.location
185 .set(parser.expression(lexer, ctx)?, name_span)?;
186 lexer.next_if(Token::Separator(','));
187 lexer.expect(Token::Paren(')'))?;
188 }
189 "builtin" => {
190 lexer.expect(Token::Paren('('))?;
191 let (raw, span) = lexer.next_ident_with_span()?;
192 self.built_in.set(
193 conv::map_built_in(&lexer.enable_extensions, raw, span)?,
194 name_span,
195 )?;
196 lexer.next_if(Token::Separator(','));
197 lexer.expect(Token::Paren(')'))?;
198 }
199 "interpolate" => {
200 lexer.expect(Token::Paren('('))?;
201 let (raw, span) = lexer.next_ident_with_span()?;
202 self.interpolation
203 .set(conv::map_interpolation(raw, span)?, name_span)?;
204 if lexer.next_if(Token::Separator(',')) {
205 let (raw, span) = lexer.next_ident_with_span()?;
206 self.sampling
207 .set(conv::map_sampling(raw, span)?, name_span)?;
208 }
209 lexer.next_if(Token::Separator(','));
210 lexer.expect(Token::Paren(')'))?;
211 }
212
213 "invariant" => {
214 self.invariant.set(true, name_span)?;
215 }
216 "blend_src" => {
217 lexer.require_enable_extension(
218 ImplementedEnableExtension::DualSourceBlending,
219 name_span,
220 )?;
221
222 lexer.expect(Token::Paren('('))?;
223 self.blend_src
224 .set(parser.expression(lexer, ctx)?, name_span)?;
225 lexer.next_if(Token::Separator(','));
226 lexer.expect(Token::Paren(')'))?;
227 }
228 "per_primitive" => {
229 lexer.require_enable_extension(
230 ImplementedEnableExtension::WgpuMeshShader,
231 name_span,
232 )?;
233 self.per_primitive.set((), name_span)?;
234 }
235 _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
236 }
237 Ok(())
238 }
239
240 fn finish(self, span: Span) -> Result<'a, Option<ast::Binding<'a>>> {
241 match (
242 self.location.value,
243 self.built_in.value,
244 self.interpolation.value,
245 self.sampling.value,
246 self.invariant.value.unwrap_or_default(),
247 self.blend_src.value,
248 self.per_primitive.value,
249 ) {
250 (None, None, None, None, false, None, None) => Ok(None),
251 (Some(location), None, interpolation, sampling, false, blend_src, per_primitive) => {
252 Ok(Some(ast::Binding::Location {
257 location,
258 interpolation,
259 sampling,
260 blend_src,
261 per_primitive: per_primitive.is_some(),
262 }))
263 }
264 (None, Some(crate::BuiltIn::Position { .. }), None, None, invariant, None, None) => {
265 Ok(Some(ast::Binding::BuiltIn(crate::BuiltIn::Position {
266 invariant,
267 })))
268 }
269 (None, Some(built_in), None, None, false, None, None) => {
270 Ok(Some(ast::Binding::BuiltIn(built_in)))
271 }
272 (_, _, _, _, _, _, _) => Err(Box::new(Error::InconsistentBinding(span))),
273 }
274 }
275}
276
277pub struct Options {
279 pub parse_doc_comments: bool,
281 pub capabilities: crate::valid::Capabilities,
283}
284
285impl Options {
286 pub const fn new() -> Self {
288 Options {
289 parse_doc_comments: false,
290 capabilities: crate::valid::Capabilities::all(),
291 }
292 }
293}
294
295pub struct Parser {
296 rules: Vec<(Rule, usize)>,
297 recursion_depth: u32,
298}
299
300impl Parser {
301 pub const fn new() -> Self {
302 Parser {
303 rules: Vec::new(),
304 recursion_depth: 0,
305 }
306 }
307
308 fn reset(&mut self) {
309 self.rules.clear();
310 self.recursion_depth = 0;
311 }
312
313 fn push_rule_span(&mut self, rule: Rule, lexer: &mut Lexer<'_>) {
314 self.rules.push((rule, lexer.start_byte_offset()));
315 }
316
317 fn pop_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
318 let (_, initial) = self.rules.pop().unwrap();
319 lexer.span_from(initial)
320 }
321
322 fn peek_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
323 let &(_, initial) = self.rules.last().unwrap();
324 lexer.span_from(initial)
325 }
326
327 fn race_rules(&self, rule0: Rule, rule1: Rule) -> Option<Rule> {
328 Some(
329 self.rules
330 .iter()
331 .rev()
332 .find(|&x| x.0 == rule0 || x.0 == rule1)?
333 .0,
334 )
335 }
336
337 fn track_recursion<'a, F, R>(&mut self, f: F) -> Result<'a, R>
338 where
339 F: FnOnce(&mut Self) -> Result<'a, R>,
340 {
341 self.recursion_depth += 1;
342 if self.recursion_depth >= 256 {
343 return Err(Box::new(Error::Internal("Parser recursion limit exceeded")));
344 }
345 let ret = f(self);
346 self.recursion_depth -= 1;
347 ret
348 }
349
350 fn switch_value<'a>(
351 &mut self,
352 lexer: &mut Lexer<'a>,
353 ctx: &mut ExpressionContext<'a, '_, '_>,
354 ) -> Result<'a, ast::SwitchValue<'a>> {
355 if lexer.next_if(Token::Word("default")) {
356 return Ok(ast::SwitchValue::Default);
357 }
358
359 let expr = self.expression(lexer, ctx)?;
360 Ok(ast::SwitchValue::Expr(expr))
361 }
362
363 fn arguments<'a>(
365 &mut self,
366 lexer: &mut Lexer<'a>,
367 ctx: &mut ExpressionContext<'a, '_, '_>,
368 ) -> Result<'a, Vec<Handle<ast::Expression<'a>>>> {
369 self.push_rule_span(Rule::EnclosedExpr, lexer);
370 lexer.open_arguments()?;
371 let mut arguments = Vec::new();
372 loop {
373 if !arguments.is_empty() {
374 if !lexer.next_argument()? {
375 break;
376 }
377 } else if lexer.next_if(Token::Paren(')')) {
378 break;
379 }
380 let arg = self.expression(lexer, ctx)?;
381 arguments.push(arg);
382 }
383
384 self.pop_rule_span(lexer);
385 Ok(arguments)
386 }
387
388 fn enclosed_expression<'a>(
389 &mut self,
390 lexer: &mut Lexer<'a>,
391 ctx: &mut ExpressionContext<'a, '_, '_>,
392 ) -> Result<'a, Handle<ast::Expression<'a>>> {
393 self.push_rule_span(Rule::EnclosedExpr, lexer);
394 let expr = self.expression(lexer, ctx)?;
395 self.pop_rule_span(lexer);
396 Ok(expr)
397 }
398
399 fn ident_expr<'a>(
400 &mut self,
401 name: &'a str,
402 name_span: Span,
403 ctx: &mut ExpressionContext<'a, '_, '_>,
404 ) -> ast::IdentExpr<'a> {
405 match ctx.local_table.lookup(name) {
406 Some(&local) => ast::IdentExpr::Local(local),
407 None => {
408 ctx.unresolved.insert(ast::Dependency {
409 ident: name,
410 usage: name_span,
411 });
412 ast::IdentExpr::Unresolved(name)
413 }
414 }
415 }
416
417 fn primary_expression<'a>(
418 &mut self,
419 lexer: &mut Lexer<'a>,
420 ctx: &mut ExpressionContext<'a, '_, '_>,
421 token: TokenSpan<'a>,
422 ) -> Result<'a, Handle<ast::Expression<'a>>> {
423 self.push_rule_span(Rule::PrimaryExpr, lexer);
424
425 const fn literal_ray_flag<'b>(flag: crate::RayFlag) -> ast::Expression<'b> {
426 ast::Expression::Literal(ast::Literal::Number(Number::U32(flag.bits())))
427 }
428 const fn literal_ray_intersection<'b>(
429 intersection: crate::RayQueryIntersection,
430 ) -> ast::Expression<'b> {
431 ast::Expression::Literal(ast::Literal::Number(Number::U32(intersection as u32)))
432 }
433
434 let expr = match token {
435 (Token::Paren('('), _) => {
436 let expr = self.enclosed_expression(lexer, ctx)?;
437 lexer.expect(Token::Paren(')'))?;
438 self.pop_rule_span(lexer);
439 return Ok(expr);
440 }
441 (Token::Word("true"), _) => ast::Expression::Literal(ast::Literal::Bool(true)),
442 (Token::Word("false"), _) => ast::Expression::Literal(ast::Literal::Bool(false)),
443 (Token::Number(res), span) => {
444 let num = res.map_err(|err| Error::BadNumber(span, err))?;
445
446 if let Some(enable_extension) = num.requires_enable_extension() {
447 lexer.require_enable_extension(enable_extension, span)?;
448 }
449
450 ast::Expression::Literal(ast::Literal::Number(num))
451 }
452 (Token::Word("RAY_FLAG_NONE"), _) => literal_ray_flag(crate::RayFlag::empty()),
453 (Token::Word("RAY_FLAG_FORCE_OPAQUE"), _) => {
454 literal_ray_flag(crate::RayFlag::FORCE_OPAQUE)
455 }
456 (Token::Word("RAY_FLAG_FORCE_NO_OPAQUE"), _) => {
457 literal_ray_flag(crate::RayFlag::FORCE_NO_OPAQUE)
458 }
459 (Token::Word("RAY_FLAG_TERMINATE_ON_FIRST_HIT"), _) => {
460 literal_ray_flag(crate::RayFlag::TERMINATE_ON_FIRST_HIT)
461 }
462 (Token::Word("RAY_FLAG_SKIP_CLOSEST_HIT_SHADER"), _) => {
463 literal_ray_flag(crate::RayFlag::SKIP_CLOSEST_HIT_SHADER)
464 }
465 (Token::Word("RAY_FLAG_CULL_BACK_FACING"), _) => {
466 literal_ray_flag(crate::RayFlag::CULL_BACK_FACING)
467 }
468 (Token::Word("RAY_FLAG_CULL_FRONT_FACING"), _) => {
469 literal_ray_flag(crate::RayFlag::CULL_FRONT_FACING)
470 }
471 (Token::Word("RAY_FLAG_CULL_OPAQUE"), _) => {
472 literal_ray_flag(crate::RayFlag::CULL_OPAQUE)
473 }
474 (Token::Word("RAY_FLAG_CULL_NO_OPAQUE"), _) => {
475 literal_ray_flag(crate::RayFlag::CULL_NO_OPAQUE)
476 }
477 (Token::Word("RAY_FLAG_SKIP_TRIANGLES"), _) => {
478 literal_ray_flag(crate::RayFlag::SKIP_TRIANGLES)
479 }
480 (Token::Word("RAY_FLAG_SKIP_AABBS"), _) => literal_ray_flag(crate::RayFlag::SKIP_AABBS),
481 (Token::Word("RAY_QUERY_INTERSECTION_NONE"), _) => {
482 literal_ray_intersection(crate::RayQueryIntersection::None)
483 }
484 (Token::Word("RAY_QUERY_INTERSECTION_TRIANGLE"), _) => {
485 literal_ray_intersection(crate::RayQueryIntersection::Triangle)
486 }
487 (Token::Word("RAY_QUERY_INTERSECTION_GENERATED"), _) => {
488 literal_ray_intersection(crate::RayQueryIntersection::Generated)
489 }
490 (Token::Word("RAY_QUERY_INTERSECTION_AABB"), _) => {
491 literal_ray_intersection(crate::RayQueryIntersection::Aabb)
492 }
493 (Token::Word(word), span) => {
494 let ident = self.template_elaborated_ident(word, span, lexer, ctx)?;
495
496 if let Token::Paren('(') = lexer.peek().0 {
497 let arguments = self.arguments(lexer, ctx)?;
498 ast::Expression::Call(ast::CallPhrase {
499 function: ident,
500 arguments,
501 })
502 } else {
503 ast::Expression::Ident(ident)
504 }
505 }
506 other => {
507 return Err(Box::new(Error::Unexpected(
508 other.1,
509 ExpectedToken::PrimaryExpression,
510 )))
511 }
512 };
513
514 self.pop_rule_span(lexer);
515 let span = lexer.span_with_start(token.1);
516 let expr = ctx.expressions.append(expr, span);
517 Ok(expr)
518 }
519
520 fn component_or_swizzle_specifier<'a>(
521 &mut self,
522 expr_start: Span,
523 lexer: &mut Lexer<'a>,
524 ctx: &mut ExpressionContext<'a, '_, '_>,
525 expr: Handle<ast::Expression<'a>>,
526 ) -> Result<'a, Handle<ast::Expression<'a>>> {
527 let mut expr = expr;
528
529 loop {
530 let expression = match lexer.peek().0 {
531 Token::Separator('.') => {
532 let _ = lexer.next();
533 let field = lexer.next_ident()?;
534
535 ast::Expression::Member { base: expr, field }
536 }
537 Token::Paren('[') => {
538 let _ = lexer.next();
539 let index = self.enclosed_expression(lexer, ctx)?;
540 lexer.expect(Token::Paren(']'))?;
541
542 ast::Expression::Index { base: expr, index }
543 }
544 _ => break,
545 };
546
547 let span = lexer.span_with_start(expr_start);
548 expr = ctx.expressions.append(expression, span);
549 }
550
551 Ok(expr)
552 }
553
554 fn unary_expression<'a>(
556 &mut self,
557 lexer: &mut Lexer<'a>,
558 ctx: &mut ExpressionContext<'a, '_, '_>,
559 ) -> Result<'a, Handle<ast::Expression<'a>>> {
560 self.push_rule_span(Rule::UnaryExpr, lexer);
561
562 enum UnaryOp {
563 Negate,
564 LogicalNot,
565 BitwiseNot,
566 Deref,
567 AddrOf,
568 }
569
570 let mut ops = Vec::new();
571 let mut expr;
572
573 loop {
574 match lexer.next() {
575 (Token::Operation('-'), span) => {
576 ops.push((UnaryOp::Negate, span));
577 }
578 (Token::Operation('!'), span) => {
579 ops.push((UnaryOp::LogicalNot, span));
580 }
581 (Token::Operation('~'), span) => {
582 ops.push((UnaryOp::BitwiseNot, span));
583 }
584 (Token::Operation('*'), span) => {
585 ops.push((UnaryOp::Deref, span));
586 }
587 (Token::Operation('&'), span) => {
588 ops.push((UnaryOp::AddrOf, span));
589 }
590 token => {
591 expr = self.singular_expression(lexer, ctx, token)?;
592 break;
593 }
594 };
595 }
596
597 for (op, span) in ops.into_iter().rev() {
598 let e = match op {
599 UnaryOp::Negate => ast::Expression::Unary {
600 op: crate::UnaryOperator::Negate,
601 expr,
602 },
603 UnaryOp::LogicalNot => ast::Expression::Unary {
604 op: crate::UnaryOperator::LogicalNot,
605 expr,
606 },
607 UnaryOp::BitwiseNot => ast::Expression::Unary {
608 op: crate::UnaryOperator::BitwiseNot,
609 expr,
610 },
611 UnaryOp::Deref => ast::Expression::Deref(expr),
612 UnaryOp::AddrOf => ast::Expression::AddrOf(expr),
613 };
614 let span = lexer.span_with_start(span);
615 expr = ctx.expressions.append(e, span);
616 }
617
618 self.pop_rule_span(lexer);
619 Ok(expr)
620 }
621
622 fn lhs_expression<'a>(
627 &mut self,
628 lexer: &mut Lexer<'a>,
629 ctx: &mut ExpressionContext<'a, '_, '_>,
630 token: Option<TokenSpan<'a>>,
631 expected_token: ExpectedToken<'a>,
632 ) -> Result<'a, Handle<ast::Expression<'a>>> {
633 self.track_recursion(|this| {
634 this.push_rule_span(Rule::LhsExpr, lexer);
635 let token = token.unwrap_or_else(|| lexer.next());
636 let expr = match token {
637 (Token::Operation('*'), _) => {
638 let expr =
639 this.lhs_expression(lexer, ctx, None, ExpectedToken::LhsExpression)?;
640 let expr = ast::Expression::Deref(expr);
641 let span = this.peek_rule_span(lexer);
642 ctx.expressions.append(expr, span)
643 }
644 (Token::Operation('&'), _) => {
645 let expr =
646 this.lhs_expression(lexer, ctx, None, ExpectedToken::LhsExpression)?;
647 let expr = ast::Expression::AddrOf(expr);
648 let span = this.peek_rule_span(lexer);
649 ctx.expressions.append(expr, span)
650 }
651 (Token::Paren('('), span) => {
652 let expr =
653 this.lhs_expression(lexer, ctx, None, ExpectedToken::LhsExpression)?;
654 lexer.expect(Token::Paren(')'))?;
655 this.component_or_swizzle_specifier(span, lexer, ctx, expr)?
656 }
657 (Token::Word(word), span) => {
658 let ident = this.ident_expr(word, span, ctx);
659 let ident = ast::TemplateElaboratedIdent {
660 ident,
661 ident_span: span,
662 template_list: Vec::new(),
663 template_list_span: Span::UNDEFINED,
664 };
665 let ident = ctx.expressions.append(ast::Expression::Ident(ident), span);
666 this.component_or_swizzle_specifier(span, lexer, ctx, ident)?
667 }
668 (_, span) => {
669 return Err(Box::new(Error::Unexpected(span, expected_token)));
670 }
671 };
672
673 this.pop_rule_span(lexer);
674 Ok(expr)
675 })
676 }
677
678 fn singular_expression<'a>(
680 &mut self,
681 lexer: &mut Lexer<'a>,
682 ctx: &mut ExpressionContext<'a, '_, '_>,
683 token: TokenSpan<'a>,
684 ) -> Result<'a, Handle<ast::Expression<'a>>> {
685 self.push_rule_span(Rule::SingularExpr, lexer);
686 let primary_expr = self.primary_expression(lexer, ctx, token)?;
687 let singular_expr =
688 self.component_or_swizzle_specifier(token.1, lexer, ctx, primary_expr)?;
689 self.pop_rule_span(lexer);
690
691 Ok(singular_expr)
692 }
693
694 fn equality_expression<'a>(
695 &mut self,
696 lexer: &mut Lexer<'a>,
697 context: &mut ExpressionContext<'a, '_, '_>,
698 ) -> Result<'a, Handle<ast::Expression<'a>>> {
699 context.parse_binary_op(
701 lexer,
702 |token| match token {
703 Token::LogicalOperation('=') => Some(crate::BinaryOperator::Equal),
704 Token::LogicalOperation('!') => Some(crate::BinaryOperator::NotEqual),
705 _ => None,
706 },
707 |lexer, context| {
709 let enclosing = self.race_rules(Rule::GenericExpr, Rule::EnclosedExpr);
710 context.parse_binary_op(
711 lexer,
712 match enclosing {
713 Some(Rule::GenericExpr) => |token| match token {
714 Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
715 _ => None,
716 },
717 _ => |token| match token {
718 Token::Paren('<') => Some(crate::BinaryOperator::Less),
719 Token::Paren('>') => Some(crate::BinaryOperator::Greater),
720 Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
721 Token::LogicalOperation('>') => {
722 Some(crate::BinaryOperator::GreaterEqual)
723 }
724 _ => None,
725 },
726 },
727 |lexer, context| {
729 context.parse_binary_op(
730 lexer,
731 match enclosing {
732 Some(Rule::GenericExpr) => |token| match token {
733 Token::ShiftOperation('<') => {
734 Some(crate::BinaryOperator::ShiftLeft)
735 }
736 _ => None,
737 },
738 _ => |token| match token {
739 Token::ShiftOperation('<') => {
740 Some(crate::BinaryOperator::ShiftLeft)
741 }
742 Token::ShiftOperation('>') => {
743 Some(crate::BinaryOperator::ShiftRight)
744 }
745 _ => None,
746 },
747 },
748 |lexer, context| {
750 context.parse_binary_op(
751 lexer,
752 |token| match token {
753 Token::Operation('+') => Some(crate::BinaryOperator::Add),
754 Token::Operation('-') => {
755 Some(crate::BinaryOperator::Subtract)
756 }
757 _ => None,
758 },
759 |lexer, context| {
761 context.parse_binary_op(
762 lexer,
763 |token| match token {
764 Token::Operation('*') => {
765 Some(crate::BinaryOperator::Multiply)
766 }
767 Token::Operation('/') => {
768 Some(crate::BinaryOperator::Divide)
769 }
770 Token::Operation('%') => {
771 Some(crate::BinaryOperator::Modulo)
772 }
773 _ => None,
774 },
775 |lexer, context| self.unary_expression(lexer, context),
776 )
777 },
778 )
779 },
780 )
781 },
782 )
783 },
784 )
785 }
786
787 fn expression<'a>(
788 &mut self,
789 lexer: &mut Lexer<'a>,
790 context: &mut ExpressionContext<'a, '_, '_>,
791 ) -> Result<'a, Handle<ast::Expression<'a>>> {
792 self.push_rule_span(Rule::GeneralExpr, lexer);
793 let handle = context.parse_binary_op(
795 lexer,
796 |token| match token {
797 Token::LogicalOperation('|') => Some(crate::BinaryOperator::LogicalOr),
798 _ => None,
799 },
800 |lexer, context| {
802 context.parse_binary_op(
803 lexer,
804 |token| match token {
805 Token::LogicalOperation('&') => Some(crate::BinaryOperator::LogicalAnd),
806 _ => None,
807 },
808 |lexer, context| {
810 context.parse_binary_op(
811 lexer,
812 |token| match token {
813 Token::Operation('|') => Some(crate::BinaryOperator::InclusiveOr),
814 _ => None,
815 },
816 |lexer, context| {
818 context.parse_binary_op(
819 lexer,
820 |token| match token {
821 Token::Operation('^') => {
822 Some(crate::BinaryOperator::ExclusiveOr)
823 }
824 _ => None,
825 },
826 |lexer, context| {
828 context.parse_binary_op(
829 lexer,
830 |token| match token {
831 Token::Operation('&') => {
832 Some(crate::BinaryOperator::And)
833 }
834 _ => None,
835 },
836 |lexer, context| {
837 self.equality_expression(lexer, context)
838 },
839 )
840 },
841 )
842 },
843 )
844 },
845 )
846 },
847 )?;
848 self.pop_rule_span(lexer);
849 Ok(handle)
850 }
851
852 fn optionally_typed_ident<'a>(
853 &mut self,
854 lexer: &mut Lexer<'a>,
855 ctx: &mut ExpressionContext<'a, '_, '_>,
856 ) -> Result<'a, (ast::Ident<'a>, Option<ast::TemplateElaboratedIdent<'a>>)> {
857 let name = lexer.next_ident()?;
858
859 let ty = if lexer.next_if(Token::Separator(':')) {
860 Some(self.type_specifier(lexer, ctx)?)
861 } else {
862 None
863 };
864
865 Ok((name, ty))
866 }
867
868 fn variable_decl<'a>(
870 &mut self,
871 lexer: &mut Lexer<'a>,
872 ctx: &mut ExpressionContext<'a, '_, '_>,
873 ) -> Result<'a, ast::GlobalVariable<'a>> {
874 self.push_rule_span(Rule::VariableDecl, lexer);
875 let (template_list, _) = self.maybe_template_list(lexer, ctx)?;
876 let (name, ty) = self.optionally_typed_ident(lexer, ctx)?;
877
878 let init = if lexer.next_if(Token::Operation('=')) {
879 let handle = self.expression(lexer, ctx)?;
880 Some(handle)
881 } else {
882 None
883 };
884 lexer.expect(Token::Separator(';'))?;
885 self.pop_rule_span(lexer);
886
887 Ok(ast::GlobalVariable {
888 name,
889 template_list,
890 binding: None,
891 ty,
892 init,
893 doc_comments: Vec::new(),
894 memory_decorations: crate::MemoryDecorations::empty(),
895 })
896 }
897
898 fn struct_body<'a>(
899 &mut self,
900 lexer: &mut Lexer<'a>,
901 ctx: &mut ExpressionContext<'a, '_, '_>,
902 ) -> Result<'a, Vec<ast::StructMember<'a>>> {
903 let mut members = Vec::new();
904 let mut member_names = FastHashSet::default();
905
906 lexer.expect(Token::Paren('{'))?;
907 let mut ready = true;
908 while !lexer.next_if(Token::Paren('}')) {
909 if !ready {
910 return Err(Box::new(Error::Unexpected(
911 lexer.next().1,
912 ExpectedToken::Token(Token::Separator(',')),
913 )));
914 }
915
916 let doc_comments = lexer.accumulate_doc_comments();
917
918 let (mut size, mut align) = (ParsedAttribute::default(), ParsedAttribute::default());
919 self.push_rule_span(Rule::Attribute, lexer);
920 let mut bind_parser = BindingParser::default();
921 while lexer.next_if(Token::Attribute) {
922 match lexer.next_ident_with_span()? {
923 ("size", name_span) => {
924 lexer.expect(Token::Paren('('))?;
925 let expr = self.expression(lexer, ctx)?;
926 lexer.next_if(Token::Separator(','));
927 lexer.expect(Token::Paren(')'))?;
928 size.set(expr, name_span)?;
929 }
930 ("align", name_span) => {
931 lexer.expect(Token::Paren('('))?;
932 let expr = self.expression(lexer, ctx)?;
933 lexer.next_if(Token::Separator(','));
934 lexer.expect(Token::Paren(')'))?;
935 align.set(expr, name_span)?;
936 }
937 (word, word_span) => bind_parser.parse(self, lexer, word, word_span, ctx)?,
938 }
939 }
940
941 let bind_span = self.pop_rule_span(lexer);
942 let binding = bind_parser.finish(bind_span)?;
943
944 let name = lexer.next_ident()?;
945 lexer.expect(Token::Separator(':'))?;
946 let ty = self.type_specifier(lexer, ctx)?;
947 ready = lexer.next_if(Token::Separator(','));
948
949 members.push(ast::StructMember {
950 name,
951 ty,
952 binding,
953 size: size.value,
954 align: align.value,
955 doc_comments,
956 });
957
958 if !member_names.insert(name.name) {
959 return Err(Box::new(Error::Redefinition {
960 previous: members
961 .iter()
962 .find(|x| x.name.name == name.name)
963 .map(|x| x.name.span)
964 .unwrap(),
965 current: name.span,
966 }));
967 }
968 }
969
970 Ok(members)
971 }
972
973 fn maybe_template_list<'a>(
974 &mut self,
975 lexer: &mut Lexer<'a>,
976 ctx: &mut ExpressionContext<'a, '_, '_>,
977 ) -> Result<'a, (Vec<Handle<ast::Expression<'a>>>, Span)> {
978 let start = lexer.start_byte_offset();
979 if lexer.next_if(Token::TemplateArgsStart) {
980 let mut args = Vec::new();
981 args.push(self.expression(lexer, ctx)?);
982 while lexer.next_if(Token::Separator(',')) && lexer.peek().0 != Token::TemplateArgsEnd {
983 args.push(self.expression(lexer, ctx)?);
984 }
985 lexer.expect(Token::TemplateArgsEnd)?;
986 let span = lexer.span_from(start);
987 Ok((args, span))
988 } else {
989 Ok((Vec::new(), Span::UNDEFINED))
990 }
991 }
992
993 fn template_elaborated_ident<'a>(
994 &mut self,
995 word: &'a str,
996 span: Span,
997 lexer: &mut Lexer<'a>,
998 ctx: &mut ExpressionContext<'a, '_, '_>,
999 ) -> Result<'a, ast::TemplateElaboratedIdent<'a>> {
1000 let ident = self.ident_expr(word, span, ctx);
1001 let (template_list, template_list_span) = self.maybe_template_list(lexer, ctx)?;
1002 Ok(ast::TemplateElaboratedIdent {
1003 ident,
1004 ident_span: span,
1005 template_list,
1006 template_list_span,
1007 })
1008 }
1009
1010 fn type_specifier<'a>(
1011 &mut self,
1012 lexer: &mut Lexer<'a>,
1013 ctx: &mut ExpressionContext<'a, '_, '_>,
1014 ) -> Result<'a, ast::TemplateElaboratedIdent<'a>> {
1015 let (name, span) = lexer.next_ident_with_span()?;
1016 self.template_elaborated_ident(name, span, lexer, ctx)
1017 }
1018
1019 fn variable_updating_statement<'a>(
1024 &mut self,
1025 lexer: &mut Lexer<'a>,
1026 ctx: &mut ExpressionContext<'a, '_, '_>,
1027 block: &mut ast::Block<'a>,
1028 token: TokenSpan<'a>,
1029 expected_token: ExpectedToken<'a>,
1030 ) -> Result<'a, ()> {
1031 match token {
1032 (Token::Word("_"), span) => {
1033 lexer.expect(Token::Operation('='))?;
1034 let expr = self.expression(lexer, ctx)?;
1035 let span = lexer.span_with_start(span);
1036 block.stmts.push(ast::Statement {
1037 kind: ast::StatementKind::Phony(expr),
1038 span,
1039 });
1040 return Ok(());
1041 }
1042 _ => {}
1043 }
1044 let target = self.lhs_expression(lexer, ctx, Some(token), expected_token)?;
1045
1046 let (op, value) = match lexer.next() {
1047 (Token::Operation('='), _) => {
1048 let value = self.expression(lexer, ctx)?;
1049 (None, value)
1050 }
1051 (Token::AssignmentOperation(c), _) => {
1052 use crate::BinaryOperator as Bo;
1053 let op = match c {
1054 '<' => Bo::ShiftLeft,
1055 '>' => Bo::ShiftRight,
1056 '+' => Bo::Add,
1057 '-' => Bo::Subtract,
1058 '*' => Bo::Multiply,
1059 '/' => Bo::Divide,
1060 '%' => Bo::Modulo,
1061 '&' => Bo::And,
1062 '|' => Bo::InclusiveOr,
1063 '^' => Bo::ExclusiveOr,
1064 _ => unreachable!(),
1066 };
1067
1068 let value = self.expression(lexer, ctx)?;
1069 (Some(op), value)
1070 }
1071 op_token @ (Token::IncrementOperation | Token::DecrementOperation, _) => {
1072 let op = match op_token.0 {
1073 Token::IncrementOperation => ast::StatementKind::Increment,
1074 Token::DecrementOperation => ast::StatementKind::Decrement,
1075 _ => unreachable!(),
1076 };
1077
1078 let span = lexer.span_with_start(token.1);
1079 block.stmts.push(ast::Statement {
1080 kind: op(target),
1081 span,
1082 });
1083 return Ok(());
1084 }
1085 (_, span) => return Err(Box::new(Error::Unexpected(span, ExpectedToken::Assignment))),
1086 };
1087
1088 let span = lexer.span_with_start(token.1);
1089 block.stmts.push(ast::Statement {
1090 kind: ast::StatementKind::Assign { target, op, value },
1091 span,
1092 });
1093 Ok(())
1094 }
1095
1096 fn maybe_func_call_statement<'a>(
1103 &mut self,
1104 lexer: &mut Lexer<'a>,
1105 context: &mut ExpressionContext<'a, '_, '_>,
1106 block: &mut ast::Block<'a>,
1107 token: TokenSpan<'a>,
1108 ) -> Result<'a, bool> {
1109 let (name, name_span) = match token {
1110 (Token::Word(name), span) => (name, span),
1111 _ => return Ok(false),
1112 };
1113 let ident = self.template_elaborated_ident(name, name_span, lexer, context)?;
1114 if ident.template_list.is_empty() && !matches!(lexer.peek(), (Token::Paren('('), _)) {
1115 return Ok(false);
1116 }
1117
1118 self.push_rule_span(Rule::SingularExpr, lexer);
1119
1120 let arguments = self.arguments(lexer, context)?;
1121 let span = lexer.span_with_start(name_span);
1122
1123 block.stmts.push(ast::Statement {
1124 kind: ast::StatementKind::Call(ast::CallPhrase {
1125 function: ident,
1126 arguments,
1127 }),
1128 span,
1129 });
1130
1131 self.pop_rule_span(lexer);
1132
1133 Ok(true)
1134 }
1135
1136 fn func_call_or_variable_updating_statement<'a>(
1141 &mut self,
1142 lexer: &mut Lexer<'a>,
1143 context: &mut ExpressionContext<'a, '_, '_>,
1144 block: &mut ast::Block<'a>,
1145 token: TokenSpan<'a>,
1146 expected_token: ExpectedToken<'a>,
1147 ) -> Result<'a, ()> {
1148 if !self.maybe_func_call_statement(lexer, context, block, token)? {
1149 self.variable_updating_statement(lexer, context, block, token, expected_token)?;
1150 }
1151 Ok(())
1152 }
1153
1154 fn variable_or_value_or_func_call_or_variable_updating_statement<'a>(
1162 &mut self,
1163 lexer: &mut Lexer<'a>,
1164 ctx: &mut ExpressionContext<'a, '_, '_>,
1165 block: &mut ast::Block<'a>,
1166 token: TokenSpan<'a>,
1167 expected_token: ExpectedToken<'a>,
1168 ) -> Result<'a, ()> {
1169 let local_decl = match token {
1170 (Token::Word("let"), _) => {
1171 let (name, given_ty) = self.optionally_typed_ident(lexer, ctx)?;
1172
1173 lexer.expect(Token::Operation('='))?;
1174 let expr_id = self.expression(lexer, ctx)?;
1175
1176 let handle = ctx.declare_local(name)?;
1177 ast::LocalDecl::Let(ast::Let {
1178 name,
1179 ty: given_ty,
1180 init: expr_id,
1181 handle,
1182 })
1183 }
1184 (Token::Word("const"), _) => {
1185 let (name, given_ty) = self.optionally_typed_ident(lexer, ctx)?;
1186
1187 lexer.expect(Token::Operation('='))?;
1188 let expr_id = self.expression(lexer, ctx)?;
1189
1190 let handle = ctx.declare_local(name)?;
1191 ast::LocalDecl::Const(ast::LocalConst {
1192 name,
1193 ty: given_ty,
1194 init: expr_id,
1195 handle,
1196 })
1197 }
1198 (Token::Word("var"), _) => {
1199 if lexer.next_if(Token::TemplateArgsStart) {
1200 let (class_str, span) = lexer.next_ident_with_span()?;
1201 if class_str != "function" {
1202 return Err(Box::new(Error::InvalidLocalVariableAddressSpace(span)));
1203 }
1204 lexer.expect(Token::TemplateArgsEnd)?;
1205 }
1206
1207 let (name, ty) = self.optionally_typed_ident(lexer, ctx)?;
1208
1209 let init = if lexer.next_if(Token::Operation('=')) {
1210 let init = self.expression(lexer, ctx)?;
1211 Some(init)
1212 } else {
1213 None
1214 };
1215
1216 let handle = ctx.declare_local(name)?;
1217 ast::LocalDecl::Var(ast::LocalVariable {
1218 name,
1219 ty,
1220 init,
1221 handle,
1222 })
1223 }
1224 token => {
1225 return self.func_call_or_variable_updating_statement(
1226 lexer,
1227 ctx,
1228 block,
1229 token,
1230 expected_token,
1231 );
1232 }
1233 };
1234
1235 let span = lexer.span_with_start(token.1);
1236 block.stmts.push(ast::Statement {
1237 kind: ast::StatementKind::LocalDecl(local_decl),
1238 span,
1239 });
1240
1241 Ok(())
1242 }
1243
1244 fn statement<'a>(
1245 &mut self,
1246 lexer: &mut Lexer<'a>,
1247 ctx: &mut ExpressionContext<'a, '_, '_>,
1248 block: &mut ast::Block<'a>,
1249 brace_nesting_level: u8,
1250 ) -> Result<'a, ()> {
1251 self.track_recursion(|this| {
1252 this.push_rule_span(Rule::Statement, lexer);
1253
1254 match lexer.peek() {
1260 (token, _) if is_start_of_compound_statement(token) => {
1261 let (inner, span) = this.block(lexer, ctx, brace_nesting_level)?;
1262 block.stmts.push(ast::Statement {
1263 kind: ast::StatementKind::Block(inner),
1264 span,
1265 });
1266 this.pop_rule_span(lexer);
1267 return Ok(());
1268 }
1269 _ => {}
1270 }
1271
1272 let kind = match lexer.next() {
1273 (Token::Separator(';'), _) => {
1274 this.pop_rule_span(lexer);
1275 return Ok(());
1276 }
1277 (Token::Word("return"), _) => {
1278 let value = if lexer.peek().0 != Token::Separator(';') {
1279 let handle = this.expression(lexer, ctx)?;
1280 Some(handle)
1281 } else {
1282 None
1283 };
1284 lexer.expect(Token::Separator(';'))?;
1285 ast::StatementKind::Return { value }
1286 }
1287 (Token::Word("if"), _) => {
1288 let condition = this.expression(lexer, ctx)?;
1289
1290 let accept = this.block(lexer, ctx, brace_nesting_level)?.0;
1291
1292 let mut elsif_stack = Vec::new();
1293 let mut elseif_span_start = lexer.start_byte_offset();
1294 let mut reject = loop {
1295 if !lexer.next_if(Token::Word("else")) {
1296 break ast::Block::default();
1297 }
1298
1299 if !lexer.next_if(Token::Word("if")) {
1300 break this.block(lexer, ctx, brace_nesting_level)?.0;
1302 }
1303
1304 let other_condition = this.expression(lexer, ctx)?;
1306 let other_block = this.block(lexer, ctx, brace_nesting_level)?;
1307 elsif_stack.push((elseif_span_start, other_condition, other_block));
1308 elseif_span_start = lexer.start_byte_offset();
1309 };
1310
1311 for (other_span_start, other_cond, other_block) in elsif_stack.into_iter().rev()
1314 {
1315 let sub_stmt = ast::StatementKind::If {
1316 condition: other_cond,
1317 accept: other_block.0,
1318 reject,
1319 };
1320 reject = ast::Block::default();
1321 let span = lexer.span_from(other_span_start);
1322 reject.stmts.push(ast::Statement {
1323 kind: sub_stmt,
1324 span,
1325 })
1326 }
1327
1328 ast::StatementKind::If {
1329 condition,
1330 accept,
1331 reject,
1332 }
1333 }
1334 (Token::Word("switch"), _) => {
1335 let selector = this.expression(lexer, ctx)?;
1336 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1337 let brace_nesting_level =
1338 Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1339 let mut cases = Vec::new();
1340
1341 loop {
1342 match lexer.next() {
1344 (Token::Word("case"), _) => {
1345 let value = loop {
1347 let value = this.switch_value(lexer, ctx)?;
1348 if lexer.next_if(Token::Separator(',')) {
1349 let next_token = lexer.peek().0;
1351 if next_token == Token::Separator(':')
1352 || is_start_of_compound_statement(next_token)
1353 {
1354 break value;
1355 }
1356 } else {
1357 break value;
1358 }
1359 cases.push(ast::SwitchCase {
1360 value,
1361 body: ast::Block::default(),
1362 fall_through: true,
1363 });
1364 };
1365
1366 lexer.next_if(Token::Separator(':'));
1367
1368 let body = this.block(lexer, ctx, brace_nesting_level)?.0;
1369
1370 cases.push(ast::SwitchCase {
1371 value,
1372 body,
1373 fall_through: false,
1374 });
1375 }
1376 (Token::Word("default"), _) => {
1377 lexer.next_if(Token::Separator(':'));
1378 let body = this.block(lexer, ctx, brace_nesting_level)?.0;
1379 cases.push(ast::SwitchCase {
1380 value: ast::SwitchValue::Default,
1381 body,
1382 fall_through: false,
1383 });
1384 }
1385 (Token::Paren('}'), _) => break,
1386 (_, span) => {
1387 return Err(Box::new(Error::Unexpected(
1388 span,
1389 ExpectedToken::SwitchItem,
1390 )))
1391 }
1392 }
1393 }
1394
1395 ast::StatementKind::Switch { selector, cases }
1396 }
1397 (Token::Word("loop"), _) => this.r#loop(lexer, ctx, brace_nesting_level)?,
1398 (Token::Word("while"), _) => {
1399 let mut body = ast::Block::default();
1400
1401 let (condition, span) =
1402 lexer.capture_span(|lexer| this.expression(lexer, ctx))?;
1403 let mut reject = ast::Block::default();
1404 reject.stmts.push(ast::Statement {
1405 kind: ast::StatementKind::Break,
1406 span,
1407 });
1408
1409 body.stmts.push(ast::Statement {
1410 kind: ast::StatementKind::If {
1411 condition,
1412 accept: ast::Block::default(),
1413 reject,
1414 },
1415 span,
1416 });
1417
1418 let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
1419 body.stmts.push(ast::Statement {
1420 kind: ast::StatementKind::Block(block),
1421 span,
1422 });
1423
1424 ast::StatementKind::Loop {
1425 body,
1426 continuing: ast::Block::default(),
1427 break_if: None,
1428 }
1429 }
1430 (Token::Word("for"), _) => {
1431 lexer.expect(Token::Paren('('))?;
1432
1433 ctx.local_table.push_scope();
1434
1435 if !lexer.next_if(Token::Separator(';')) {
1436 let token = lexer.next();
1437 this.variable_or_value_or_func_call_or_variable_updating_statement(
1438 lexer,
1439 ctx,
1440 block,
1441 token,
1442 ExpectedToken::ForInit,
1443 )?;
1444 lexer.expect(Token::Separator(';'))?;
1445 };
1446
1447 let mut body = ast::Block::default();
1448 if !lexer.next_if(Token::Separator(';')) {
1449 let (condition, span) = lexer.capture_span(|lexer| -> Result<'_, _> {
1450 let condition = this.expression(lexer, ctx)?;
1451 lexer.expect(Token::Separator(';'))?;
1452 Ok(condition)
1453 })?;
1454 let mut reject = ast::Block::default();
1455 reject.stmts.push(ast::Statement {
1456 kind: ast::StatementKind::Break,
1457 span,
1458 });
1459 body.stmts.push(ast::Statement {
1460 kind: ast::StatementKind::If {
1461 condition,
1462 accept: ast::Block::default(),
1463 reject,
1464 },
1465 span,
1466 });
1467 };
1468
1469 let mut continuing = ast::Block::default();
1470 if !lexer.next_if(Token::Paren(')')) {
1471 let token = lexer.next();
1472 this.func_call_or_variable_updating_statement(
1473 lexer,
1474 ctx,
1475 &mut continuing,
1476 token,
1477 ExpectedToken::ForUpdate,
1478 )?;
1479 lexer.expect(Token::Paren(')'))?;
1480 }
1481
1482 let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
1483 body.stmts.push(ast::Statement {
1484 kind: ast::StatementKind::Block(block),
1485 span,
1486 });
1487
1488 ctx.local_table.pop_scope();
1489
1490 ast::StatementKind::Loop {
1491 body,
1492 continuing,
1493 break_if: None,
1494 }
1495 }
1496 (Token::Word("break"), span) => {
1497 let (peeked_token, peeked_span) = lexer.peek();
1501 if let Token::Word("if") = peeked_token {
1502 let span = span.until(&peeked_span);
1503 return Err(Box::new(Error::InvalidBreakIf(span)));
1504 }
1505 lexer.expect(Token::Separator(';'))?;
1506 ast::StatementKind::Break
1507 }
1508 (Token::Word("continue"), _) => {
1509 lexer.expect(Token::Separator(';'))?;
1510 ast::StatementKind::Continue
1511 }
1512 (Token::Word("discard"), _) => {
1513 lexer.expect(Token::Separator(';'))?;
1514 ast::StatementKind::Kill
1515 }
1516 (Token::Word("const_assert"), _) => {
1518 let paren = lexer.next_if(Token::Paren('('));
1520
1521 let condition = this.expression(lexer, ctx)?;
1522
1523 if paren {
1524 lexer.expect(Token::Paren(')'))?;
1525 }
1526 lexer.expect(Token::Separator(';'))?;
1527 ast::StatementKind::ConstAssert(condition)
1528 }
1529 token => {
1530 this.variable_or_value_or_func_call_or_variable_updating_statement(
1531 lexer,
1532 ctx,
1533 block,
1534 token,
1535 ExpectedToken::Statement,
1536 )?;
1537 lexer.expect(Token::Separator(';'))?;
1538 this.pop_rule_span(lexer);
1539 return Ok(());
1540 }
1541 };
1542
1543 let span = this.pop_rule_span(lexer);
1544 block.stmts.push(ast::Statement { kind, span });
1545
1546 Ok(())
1547 })
1548 }
1549
1550 fn r#loop<'a>(
1551 &mut self,
1552 lexer: &mut Lexer<'a>,
1553 ctx: &mut ExpressionContext<'a, '_, '_>,
1554 brace_nesting_level: u8,
1555 ) -> Result<'a, ast::StatementKind<'a>> {
1556 let mut body = ast::Block::default();
1557 let mut continuing = ast::Block::default();
1558 let mut break_if = None;
1559
1560 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1561 let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1562
1563 ctx.local_table.push_scope();
1564
1565 loop {
1566 if lexer.next_if(Token::Word("continuing")) {
1567 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1572 let brace_nesting_level =
1573 Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1574 loop {
1575 if lexer.next_if(Token::Word("break")) {
1576 lexer.expect(Token::Word("if"))?;
1583
1584 let condition = self.expression(lexer, ctx)?;
1585 break_if = Some(condition);
1588
1589 lexer.expect(Token::Separator(';'))?;
1591 lexer.expect(Token::Paren('}'))?;
1594 break;
1596 } else if lexer.next_if(Token::Paren('}')) {
1597 break;
1600 } else {
1601 self.statement(lexer, ctx, &mut continuing, brace_nesting_level)?;
1603 }
1604 }
1605 lexer.expect(Token::Paren('}'))?;
1608 break;
1609 }
1610 if lexer.next_if(Token::Paren('}')) {
1611 break;
1614 }
1615 self.statement(lexer, ctx, &mut body, brace_nesting_level)?;
1617 }
1618
1619 ctx.local_table.pop_scope();
1620
1621 Ok(ast::StatementKind::Loop {
1622 body,
1623 continuing,
1624 break_if,
1625 })
1626 }
1627
1628 fn block<'a>(
1630 &mut self,
1631 lexer: &mut Lexer<'a>,
1632 ctx: &mut ExpressionContext<'a, '_, '_>,
1633 brace_nesting_level: u8,
1634 ) -> Result<'a, (ast::Block<'a>, Span)> {
1635 self.push_rule_span(Rule::Block, lexer);
1636
1637 ctx.local_table.push_scope();
1638
1639 let mut diagnostic_filters = DiagnosticFilterMap::new();
1640
1641 self.push_rule_span(Rule::Attribute, lexer);
1642 while lexer.next_if(Token::Attribute) {
1643 let (name, name_span) = lexer.next_ident_with_span()?;
1644 if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
1645 let filter = self.diagnostic_filter(lexer)?;
1646 let span = self.peek_rule_span(lexer);
1647 diagnostic_filters
1648 .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
1649 .map_err(|e| Box::new(e.into()))?;
1650 } else {
1651 return Err(Box::new(Error::Unexpected(
1652 name_span,
1653 ExpectedToken::DiagnosticAttribute,
1654 )));
1655 }
1656 }
1657 self.pop_rule_span(lexer);
1658
1659 if !diagnostic_filters.is_empty() {
1660 return Err(Box::new(
1661 Error::DiagnosticAttributeNotYetImplementedAtParseSite {
1662 site_name_plural: "compound statements",
1663 spans: diagnostic_filters.spans().collect(),
1664 },
1665 ));
1666 }
1667
1668 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1669 let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1670 let mut block = ast::Block::default();
1671 while !lexer.next_if(Token::Paren('}')) {
1672 self.statement(lexer, ctx, &mut block, brace_nesting_level)?;
1673 }
1674
1675 ctx.local_table.pop_scope();
1676
1677 let span = self.pop_rule_span(lexer);
1678 Ok((block, span))
1679 }
1680
1681 fn varying_binding<'a>(
1682 &mut self,
1683 lexer: &mut Lexer<'a>,
1684 ctx: &mut ExpressionContext<'a, '_, '_>,
1685 ) -> Result<'a, Option<ast::Binding<'a>>> {
1686 let mut bind_parser = BindingParser::default();
1687 self.push_rule_span(Rule::Attribute, lexer);
1688
1689 while lexer.next_if(Token::Attribute) {
1690 let (word, span) = lexer.next_ident_with_span()?;
1691 bind_parser.parse(self, lexer, word, span, ctx)?;
1692 }
1693
1694 let span = self.pop_rule_span(lexer);
1695 bind_parser.finish(span)
1696 }
1697
1698 fn function_decl<'a>(
1699 &mut self,
1700 lexer: &mut Lexer<'a>,
1701 diagnostic_filter_leaf: Option<Handle<DiagnosticFilterNode>>,
1702 must_use: Option<Span>,
1703 out: &mut ast::TranslationUnit<'a>,
1704 dependencies: &mut FastIndexSet<ast::Dependency<'a>>,
1705 ) -> Result<'a, ast::Function<'a>> {
1706 self.push_rule_span(Rule::FunctionDecl, lexer);
1707 let fun_name = lexer.next_ident()?;
1709
1710 let mut locals = Arena::new();
1711
1712 let mut ctx = ExpressionContext {
1713 expressions: &mut out.expressions,
1714 local_table: &mut SymbolTable::default(),
1715 locals: &mut locals,
1716 unresolved: dependencies,
1717 };
1718
1719 ctx.local_table.push_scope();
1721 ctx.local_table.reduce_lookup_scope();
1724
1725 let mut arguments = Vec::new();
1727 lexer.expect(Token::Paren('('))?;
1728 let mut ready = true;
1729 while !lexer.next_if(Token::Paren(')')) {
1730 if !ready {
1731 return Err(Box::new(Error::Unexpected(
1732 lexer.next().1,
1733 ExpectedToken::Token(Token::Separator(',')),
1734 )));
1735 }
1736 let binding = self.varying_binding(lexer, &mut ctx)?;
1737
1738 let param_name = lexer.next_ident()?;
1739
1740 lexer.expect(Token::Separator(':'))?;
1741 let param_type = self.type_specifier(lexer, &mut ctx)?;
1742
1743 let handle = ctx.declare_local(param_name)?;
1744 arguments.push(ast::FunctionArgument {
1745 name: param_name,
1746 ty: param_type,
1747 binding,
1748 handle,
1749 });
1750 ready = lexer.next_if(Token::Separator(','));
1751 }
1752 let result = if lexer.next_if(Token::Arrow) {
1754 let binding = self.varying_binding(lexer, &mut ctx)?;
1755 let ty = self.type_specifier(lexer, &mut ctx)?;
1756 let must_use = must_use.is_some();
1757 Some(ast::FunctionResult {
1758 ty,
1759 binding,
1760 must_use,
1761 })
1762 } else if let Some(must_use) = must_use {
1763 return Err(Box::new(Error::FunctionMustUseReturnsVoid(
1764 must_use,
1765 self.peek_rule_span(lexer),
1766 )));
1767 } else {
1768 None
1769 };
1770
1771 ctx.local_table.reset_lookup_scope();
1772
1773 lexer.expect(Token::Paren('{'))?;
1775 let brace_nesting_level = 1;
1776 let mut body = ast::Block::default();
1777 while !lexer.next_if(Token::Paren('}')) {
1778 self.statement(lexer, &mut ctx, &mut body, brace_nesting_level)?;
1779 }
1780
1781 ctx.local_table.pop_scope();
1782
1783 let fun = ast::Function {
1784 entry_point: None,
1785 name: fun_name,
1786 arguments,
1787 result,
1788 body,
1789 diagnostic_filter_leaf,
1790 doc_comments: Vec::new(),
1791 };
1792
1793 self.pop_rule_span(lexer);
1795
1796 Ok(fun)
1797 }
1798
1799 fn directive_ident_list<'a>(
1800 &self,
1801 lexer: &mut Lexer<'a>,
1802 handler: impl FnMut(&'a str, Span) -> Result<'a, ()>,
1803 ) -> Result<'a, ()> {
1804 let mut handler = handler;
1805 'next_arg: loop {
1806 let (ident, span) = lexer.next_ident_with_span()?;
1807 handler(ident, span)?;
1808
1809 let expected_token = match lexer.peek().0 {
1810 Token::Separator(',') => {
1811 let _ = lexer.next();
1812 if matches!(lexer.peek().0, Token::Word(..)) {
1813 continue 'next_arg;
1814 }
1815 ExpectedToken::AfterIdentListComma
1816 }
1817 _ => ExpectedToken::AfterIdentListArg,
1818 };
1819
1820 if !matches!(lexer.next().0, Token::Separator(';')) {
1821 return Err(Box::new(Error::Unexpected(span, expected_token)));
1822 }
1823
1824 break Ok(());
1825 }
1826 }
1827
1828 fn global_decl<'a>(
1829 &mut self,
1830 lexer: &mut Lexer<'a>,
1831 out: &mut ast::TranslationUnit<'a>,
1832 ) -> Result<'a, ()> {
1833 let doc_comments = lexer.accumulate_doc_comments();
1834
1835 let mut binding = None;
1837 let mut stage = ParsedAttribute::default();
1838 let mut shader_stage_error_span = Span::new(0, 0);
1841 let mut workgroup_size = ParsedAttribute::default();
1842 let mut early_depth_test = ParsedAttribute::default();
1843 let (mut bind_index, mut bind_group) =
1844 (ParsedAttribute::default(), ParsedAttribute::default());
1845 let mut id = ParsedAttribute::default();
1846 let mut payload = ParsedAttribute::default();
1848 let mut incoming_payload = ParsedAttribute::default();
1850 let mut mesh_output = ParsedAttribute::default();
1851
1852 let mut must_use: ParsedAttribute<Span> = ParsedAttribute::default();
1853 let mut memory_decorations = crate::MemoryDecorations::empty();
1854
1855 let mut dependencies = FastIndexSet::default();
1856 let mut ctx = ExpressionContext {
1857 expressions: &mut out.expressions,
1858 local_table: &mut SymbolTable::default(),
1859 locals: &mut Arena::new(),
1860 unresolved: &mut dependencies,
1861 };
1862 let mut diagnostic_filters = DiagnosticFilterMap::new();
1863 let ensure_no_diag_attrs = |on_what, filters: DiagnosticFilterMap| -> Result<()> {
1864 if filters.is_empty() {
1865 Ok(())
1866 } else {
1867 Err(Box::new(Error::DiagnosticAttributeNotSupported {
1868 on_what,
1869 spans: filters.spans().collect(),
1870 }))
1871 }
1872 };
1873
1874 self.push_rule_span(Rule::Attribute, lexer);
1875 while lexer.next_if(Token::Attribute) {
1876 let (name, name_span) = lexer.next_ident_with_span()?;
1877 if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
1878 let filter = self.diagnostic_filter(lexer)?;
1879 let span = self.peek_rule_span(lexer);
1880 diagnostic_filters
1881 .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
1882 .map_err(|e| Box::new(e.into()))?;
1883 continue;
1884 }
1885 match name {
1886 "binding" => {
1887 lexer.expect(Token::Paren('('))?;
1888 bind_index.set(self.expression(lexer, &mut ctx)?, name_span)?;
1889 lexer.next_if(Token::Separator(','));
1890 lexer.expect(Token::Paren(')'))?;
1891 }
1892 "group" => {
1893 lexer.expect(Token::Paren('('))?;
1894 bind_group.set(self.expression(lexer, &mut ctx)?, name_span)?;
1895 lexer.next_if(Token::Separator(','));
1896 lexer.expect(Token::Paren(')'))?;
1897 }
1898 "id" => {
1899 lexer.expect(Token::Paren('('))?;
1900 id.set(self.expression(lexer, &mut ctx)?, name_span)?;
1901 lexer.next_if(Token::Separator(','));
1902 lexer.expect(Token::Paren(')'))?;
1903 }
1904 "vertex" => {
1905 stage.set(ShaderStage::Vertex, name_span)?;
1906 }
1907 "fragment" => {
1908 stage.set(ShaderStage::Fragment, name_span)?;
1909 }
1910 "compute" => {
1911 stage.set(ShaderStage::Compute, name_span)?;
1912 shader_stage_error_span = name_span;
1913 }
1914 "task" => {
1915 lexer.require_enable_extension(
1916 ImplementedEnableExtension::WgpuMeshShader,
1917 name_span,
1918 )?;
1919 stage.set(ShaderStage::Task, name_span)?;
1920 shader_stage_error_span = name_span;
1921 }
1922 "mesh" => {
1923 lexer.require_enable_extension(
1924 ImplementedEnableExtension::WgpuMeshShader,
1925 name_span,
1926 )?;
1927 stage.set(ShaderStage::Mesh, name_span)?;
1928 shader_stage_error_span = name_span;
1929
1930 lexer.expect(Token::Paren('('))?;
1931 mesh_output.set(lexer.next_ident_with_span()?, name_span)?;
1932 lexer.expect(Token::Paren(')'))?;
1933 }
1934 "ray_generation" => {
1935 lexer.require_enable_extension(
1936 ImplementedEnableExtension::WgpuRayTracingPipeline,
1937 name_span,
1938 )?;
1939 stage.set(ShaderStage::RayGeneration, name_span)?;
1940 shader_stage_error_span = name_span;
1941 }
1942 "any_hit" => {
1943 lexer.require_enable_extension(
1944 ImplementedEnableExtension::WgpuRayTracingPipeline,
1945 name_span,
1946 )?;
1947 stage.set(ShaderStage::AnyHit, name_span)?;
1948 shader_stage_error_span = name_span;
1949 }
1950 "closest_hit" => {
1951 lexer.require_enable_extension(
1952 ImplementedEnableExtension::WgpuRayTracingPipeline,
1953 name_span,
1954 )?;
1955 stage.set(ShaderStage::ClosestHit, name_span)?;
1956 shader_stage_error_span = name_span;
1957 }
1958 "miss" => {
1959 lexer.require_enable_extension(
1960 ImplementedEnableExtension::WgpuRayTracingPipeline,
1961 name_span,
1962 )?;
1963 stage.set(ShaderStage::Miss, name_span)?;
1964 shader_stage_error_span = name_span;
1965 }
1966 "payload" => {
1967 lexer.require_enable_extension(
1968 ImplementedEnableExtension::WgpuMeshShader,
1969 name_span,
1970 )?;
1971 lexer.expect(Token::Paren('('))?;
1972 payload.set(lexer.next_ident_with_span()?, name_span)?;
1973 lexer.expect(Token::Paren(')'))?;
1974 }
1975 "incoming_payload" => {
1976 lexer.require_enable_extension(
1977 ImplementedEnableExtension::WgpuRayTracingPipeline,
1978 name_span,
1979 )?;
1980 lexer.expect(Token::Paren('('))?;
1981 incoming_payload.set(lexer.next_ident_with_span()?, name_span)?;
1982 lexer.expect(Token::Paren(')'))?;
1983 }
1984 "workgroup_size" => {
1985 lexer.expect(Token::Paren('('))?;
1986 let mut new_workgroup_size = [None; 3];
1987 for size in new_workgroup_size.iter_mut() {
1988 *size = Some(self.expression(lexer, &mut ctx)?);
1989 match lexer.next() {
1990 (Token::Paren(')'), _) => break,
1991 (Token::Separator(','), _) => {
1992 if lexer.next_if(Token::Paren(')')) {
1993 break;
1994 }
1995 }
1996 other => {
1997 return Err(Box::new(Error::Unexpected(
1998 other.1,
1999 ExpectedToken::WorkgroupSizeSeparator,
2000 )))
2001 }
2002 }
2003 }
2004 workgroup_size.set(new_workgroup_size, name_span)?;
2005 }
2006 "early_depth_test" => {
2007 lexer.expect(Token::Paren('('))?;
2008 let (ident, ident_span) = lexer.next_ident_with_span()?;
2009 let value = if ident == "force" {
2010 crate::EarlyDepthTest::Force
2011 } else {
2012 crate::EarlyDepthTest::Allow {
2013 conservative: conv::map_conservative_depth(ident, ident_span)?,
2014 }
2015 };
2016 lexer.expect(Token::Paren(')'))?;
2017 early_depth_test.set(value, name_span)?;
2018 }
2019 "must_use" => {
2020 must_use.set(name_span, name_span)?;
2021 }
2022 "coherent" => {
2023 memory_decorations |= crate::MemoryDecorations::COHERENT;
2024 }
2025 "volatile" => {
2026 memory_decorations |= crate::MemoryDecorations::VOLATILE;
2027 }
2028 _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
2029 }
2030 }
2031
2032 let attrib_span = self.pop_rule_span(lexer);
2033 match (bind_group.value, bind_index.value) {
2034 (Some(group), Some(index)) => {
2035 binding = Some(ast::ResourceBinding {
2036 group,
2037 binding: index,
2038 });
2039 }
2040 (Some(_), None) => {
2041 return Err(Box::new(Error::MissingAttribute("binding", attrib_span)))
2042 }
2043 (None, Some(_)) => return Err(Box::new(Error::MissingAttribute("group", attrib_span))),
2044 (None, None) => {}
2045 }
2046
2047 let start = lexer.start_byte_offset();
2049 let kind = match lexer.next() {
2050 (Token::Separator(';'), _) => {
2051 ensure_no_diag_attrs(
2052 DiagnosticAttributeNotSupportedPosition::SemicolonInModulePosition,
2053 diagnostic_filters,
2054 )?;
2055 None
2056 }
2057 (Token::Word(word), directive_span) if DirectiveKind::from_ident(word).is_some() => {
2058 return Err(Box::new(Error::DirectiveAfterFirstGlobalDecl {
2059 directive_span,
2060 }));
2061 }
2062 (Token::Word("struct"), _) => {
2063 ensure_no_diag_attrs("`struct`s".into(), diagnostic_filters)?;
2064
2065 let name = lexer.next_ident()?;
2066
2067 let members = self.struct_body(lexer, &mut ctx)?;
2068
2069 Some(ast::GlobalDeclKind::Struct(ast::Struct {
2070 name,
2071 members,
2072 doc_comments,
2073 }))
2074 }
2075 (Token::Word("alias"), _) => {
2076 ensure_no_diag_attrs("`alias`es".into(), diagnostic_filters)?;
2077
2078 let name = lexer.next_ident()?;
2079
2080 lexer.expect(Token::Operation('='))?;
2081 let ty = self.type_specifier(lexer, &mut ctx)?;
2082 lexer.expect(Token::Separator(';'))?;
2083 Some(ast::GlobalDeclKind::Type(ast::TypeAlias { name, ty }))
2084 }
2085 (Token::Word("const"), _) => {
2086 ensure_no_diag_attrs("`const`s".into(), diagnostic_filters)?;
2087
2088 let (name, ty) = self.optionally_typed_ident(lexer, &mut ctx)?;
2089
2090 lexer.expect(Token::Operation('='))?;
2091 let init = self.expression(lexer, &mut ctx)?;
2092 lexer.expect(Token::Separator(';'))?;
2093
2094 Some(ast::GlobalDeclKind::Const(ast::Const {
2095 name,
2096 ty,
2097 init,
2098 doc_comments,
2099 }))
2100 }
2101 (Token::Word("override"), _) => {
2102 ensure_no_diag_attrs("`override`s".into(), diagnostic_filters)?;
2103
2104 let (name, ty) = self.optionally_typed_ident(lexer, &mut ctx)?;
2105
2106 let init = if lexer.next_if(Token::Operation('=')) {
2107 Some(self.expression(lexer, &mut ctx)?)
2108 } else {
2109 None
2110 };
2111
2112 lexer.expect(Token::Separator(';'))?;
2113
2114 Some(ast::GlobalDeclKind::Override(ast::Override {
2115 name,
2116 id: id.value,
2117 ty,
2118 init,
2119 }))
2120 }
2121 (Token::Word("var"), _) => {
2122 ensure_no_diag_attrs("`var`s".into(), diagnostic_filters)?;
2123
2124 let mut var = self.variable_decl(lexer, &mut ctx)?;
2125 var.binding = binding.take();
2126 var.doc_comments = doc_comments;
2127 var.memory_decorations = memory_decorations;
2128 Some(ast::GlobalDeclKind::Var(var))
2129 }
2130 (Token::Word("fn"), _) => {
2131 let diagnostic_filter_leaf = Self::write_diagnostic_filters(
2132 &mut out.diagnostic_filters,
2133 diagnostic_filters,
2134 out.diagnostic_filter_leaf,
2135 );
2136
2137 let function = self.function_decl(
2138 lexer,
2139 diagnostic_filter_leaf,
2140 must_use.value,
2141 out,
2142 &mut dependencies,
2143 )?;
2144 Some(ast::GlobalDeclKind::Fn(ast::Function {
2145 entry_point: if let Some(stage) = stage.value {
2146 if stage.compute_like() && workgroup_size.value.is_none() {
2147 return Err(Box::new(Error::MissingWorkgroupSize(
2148 shader_stage_error_span,
2149 )));
2150 }
2151
2152 match stage {
2153 ShaderStage::AnyHit | ShaderStage::ClosestHit | ShaderStage::Miss => {
2154 if incoming_payload.value.is_none() {
2155 return Err(Box::new(Error::MissingIncomingPayload(
2156 shader_stage_error_span,
2157 )));
2158 }
2159 }
2160 _ => {}
2161 }
2162
2163 Some(ast::EntryPoint {
2164 stage,
2165 early_depth_test: early_depth_test.value,
2166 workgroup_size: workgroup_size.value,
2167 mesh_output_variable: mesh_output.value,
2168 task_payload: payload.value,
2169 ray_incoming_payload: incoming_payload.value,
2170 })
2171 } else {
2172 None
2173 },
2174 doc_comments,
2175 ..function
2176 }))
2177 }
2178 (Token::Word("const_assert"), _) => {
2179 ensure_no_diag_attrs("`const_assert`s".into(), diagnostic_filters)?;
2180
2181 let paren = lexer.next_if(Token::Paren('('));
2183
2184 let condition = self.expression(lexer, &mut ctx)?;
2185
2186 if paren {
2187 lexer.expect(Token::Paren(')'))?;
2188 }
2189 lexer.expect(Token::Separator(';'))?;
2190 Some(ast::GlobalDeclKind::ConstAssert(condition))
2191 }
2192 (Token::End, _) => return Ok(()),
2193 other => {
2194 return Err(Box::new(Error::Unexpected(
2195 other.1,
2196 ExpectedToken::GlobalItem,
2197 )))
2198 }
2199 };
2200
2201 if let Some(kind) = kind {
2202 out.decls.append(
2203 ast::GlobalDecl { kind, dependencies },
2204 lexer.span_from(start),
2205 );
2206 }
2207
2208 if !self.rules.is_empty() {
2209 log::error!("Reached the end of global decl, but rule stack is not empty");
2210 log::error!("Rules: {:?}", self.rules);
2211 return Err(Box::new(Error::Internal("rule stack is not empty")));
2212 };
2213
2214 match binding {
2215 None => Ok(()),
2216 Some(_) => Err(Box::new(Error::Internal(
2217 "we had the attribute but no var?",
2218 ))),
2219 }
2220 }
2221
2222 pub fn parse<'a>(
2223 &mut self,
2224 source: &'a str,
2225 options: &Options,
2226 ) -> Result<'a, ast::TranslationUnit<'a>> {
2227 self.reset();
2228
2229 let mut lexer = Lexer::new(source, !options.parse_doc_comments);
2230 let mut tu = ast::TranslationUnit::default();
2231 let mut enable_extensions = EnableExtensions::empty();
2232 let mut diagnostic_filters = DiagnosticFilterMap::new();
2233
2234 tu.doc_comments = lexer.accumulate_module_doc_comments();
2236
2237 while let (Token::Word(word), _) = lexer.peek() {
2239 if let Some(kind) = DirectiveKind::from_ident(word) {
2240 self.push_rule_span(Rule::Directive, &mut lexer);
2241 let _ = lexer.next_ident_with_span().unwrap();
2242 match kind {
2243 DirectiveKind::Diagnostic => {
2244 let diagnostic_filter = self.diagnostic_filter(&mut lexer)?;
2245 let span = self.peek_rule_span(&lexer);
2246 diagnostic_filters
2247 .add(diagnostic_filter, span, ShouldConflictOnFullDuplicate::No)
2248 .map_err(|e| Box::new(e.into()))?;
2249 lexer.expect(Token::Separator(';'))?;
2250 }
2251 DirectiveKind::Enable => {
2252 self.directive_ident_list(&mut lexer, |ident, span| {
2253 let kind = EnableExtension::from_ident(ident, span)?;
2254 let extension = match kind {
2255 EnableExtension::Implemented(kind) => kind,
2256 EnableExtension::Unimplemented(kind) => {
2257 return Err(Box::new(Error::EnableExtensionNotYetImplemented {
2258 kind,
2259 span,
2260 }))
2261 }
2262 };
2263 let required_capability = extension.capability();
2265 if !options.capabilities.contains(required_capability) {
2266 return Err(Box::new(Error::EnableExtensionNotSupported {
2267 kind,
2268 span,
2269 }));
2270 }
2271 enable_extensions.add(extension);
2272 Ok(())
2273 })?;
2274 }
2275 DirectiveKind::Requires => {
2276 self.directive_ident_list(&mut lexer, |ident, span| {
2277 match LanguageExtension::from_ident(ident) {
2278 Some(LanguageExtension::Implemented(_kind)) => {
2279 Ok(())
2284 }
2285 Some(LanguageExtension::Unimplemented(kind)) => {
2286 Err(Box::new(Error::LanguageExtensionNotYetImplemented {
2287 kind,
2288 span,
2289 }))
2290 }
2291 None => Err(Box::new(Error::UnknownLanguageExtension(span, ident))),
2292 }
2293 })?;
2294 }
2295 }
2296 self.pop_rule_span(&lexer);
2297 } else {
2298 break;
2299 }
2300 }
2301
2302 lexer.enable_extensions = enable_extensions;
2303 tu.enable_extensions = enable_extensions;
2304 tu.diagnostic_filter_leaf =
2305 Self::write_diagnostic_filters(&mut tu.diagnostic_filters, diagnostic_filters, None);
2306
2307 loop {
2308 match self.global_decl(&mut lexer, &mut tu) {
2309 Err(error) => return Err(error),
2310 Ok(()) => {
2311 if lexer.peek().0 == Token::End {
2312 break;
2313 }
2314 }
2315 }
2316 }
2317
2318 Ok(tu)
2319 }
2320
2321 fn increase_brace_nesting(brace_nesting_level: u8, brace_span: Span) -> Result<'static, u8> {
2322 const BRACE_NESTING_MAXIMUM: u8 = 127;
2330 if brace_nesting_level + 1 > BRACE_NESTING_MAXIMUM {
2331 return Err(Box::new(Error::ExceededLimitForNestedBraces {
2332 span: brace_span,
2333 limit: BRACE_NESTING_MAXIMUM,
2334 }));
2335 }
2336 Ok(brace_nesting_level + 1)
2337 }
2338
2339 fn diagnostic_filter<'a>(&self, lexer: &mut Lexer<'a>) -> Result<'a, DiagnosticFilter> {
2340 lexer.expect(Token::Paren('('))?;
2341
2342 let (severity_control_name, severity_control_name_span) = lexer.next_ident_with_span()?;
2343 let new_severity = diagnostic_filter::Severity::from_wgsl_ident(severity_control_name)
2344 .ok_or(Error::DiagnosticInvalidSeverity {
2345 severity_control_name_span,
2346 })?;
2347
2348 lexer.expect(Token::Separator(','))?;
2349
2350 let (diagnostic_name_token, diagnostic_name_token_span) = lexer.next_ident_with_span()?;
2351 let triggering_rule = if lexer.next_if(Token::Separator('.')) {
2352 let (ident, _span) = lexer.next_ident_with_span()?;
2353 FilterableTriggeringRule::User(Box::new([diagnostic_name_token.into(), ident.into()]))
2354 } else {
2355 let diagnostic_rule_name = diagnostic_name_token;
2356 let diagnostic_rule_name_span = diagnostic_name_token_span;
2357 if let Some(triggering_rule) =
2358 StandardFilterableTriggeringRule::from_wgsl_ident(diagnostic_rule_name)
2359 {
2360 FilterableTriggeringRule::Standard(triggering_rule)
2361 } else {
2362 diagnostic_filter::Severity::Warning.report_wgsl_parse_diag(
2363 Box::new(Error::UnknownDiagnosticRuleName(diagnostic_rule_name_span)),
2364 lexer.source,
2365 )?;
2366 FilterableTriggeringRule::Unknown(diagnostic_rule_name.into())
2367 }
2368 };
2369 let filter = DiagnosticFilter {
2370 triggering_rule,
2371 new_severity,
2372 };
2373 lexer.next_if(Token::Separator(','));
2374 lexer.expect(Token::Paren(')'))?;
2375
2376 Ok(filter)
2377 }
2378
2379 pub(crate) fn write_diagnostic_filters(
2380 arena: &mut Arena<DiagnosticFilterNode>,
2381 filters: DiagnosticFilterMap,
2382 parent: Option<Handle<DiagnosticFilterNode>>,
2383 ) -> Option<Handle<DiagnosticFilterNode>> {
2384 filters
2385 .into_iter()
2386 .fold(parent, |parent, (triggering_rule, (new_severity, span))| {
2387 Some(arena.append(
2388 DiagnosticFilterNode {
2389 inner: DiagnosticFilter {
2390 new_severity,
2391 triggering_rule,
2392 },
2393 parent,
2394 },
2395 span,
2396 ))
2397 })
2398 }
2399}
2400
2401const fn is_start_of_compound_statement<'a>(token: Token<'a>) -> bool {
2402 matches!(token, Token::Attribute | Token::Paren('{'))
2403}