1use alloc::{boxed::Box, vec::Vec};
2use directive::enable_extension::ImplementedEnableExtension;
3
4use crate::diagnostic_filter::{
5 self, DiagnosticFilter, DiagnosticFilterMap, DiagnosticFilterNode, FilterableTriggeringRule,
6 ShouldConflictOnFullDuplicate, StandardFilterableTriggeringRule,
7};
8use crate::front::wgsl::error::{DiagnosticAttributeNotSupportedPosition, Error, ExpectedToken};
9use crate::front::wgsl::parse::directive::enable_extension::{EnableExtension, EnableExtensions};
10use crate::front::wgsl::parse::directive::language_extension::LanguageExtension;
11use crate::front::wgsl::parse::directive::DirectiveKind;
12use crate::front::wgsl::parse::lexer::{Lexer, Token, TokenSpan};
13use crate::front::wgsl::parse::number::Number;
14use crate::front::wgsl::Result;
15use crate::front::SymbolTable;
16use crate::{Arena, FastHashSet, FastIndexSet, Handle, ShaderStage, Span};
17
18pub mod ast;
19pub mod conv;
20pub mod directive;
21pub mod lexer;
22pub mod number;
23
24struct ExpressionContext<'input, 'temp, 'out> {
31 expressions: &'out mut Arena<ast::Expression<'input>>,
36
37 local_table: &'temp mut SymbolTable<&'input str, Handle<ast::Local>>,
44
45 locals: &'out mut Arena<ast::Local>,
71
72 unresolved: &'out mut FastIndexSet<ast::Dependency<'input>>,
83}
84
85impl<'a> ExpressionContext<'a, '_, '_> {
86 fn parse_binary_op(
87 &mut self,
88 lexer: &mut Lexer<'a>,
89 classifier: impl Fn(Token<'a>) -> Option<crate::BinaryOperator>,
90 mut parser: impl FnMut(&mut Lexer<'a>, &mut Self) -> Result<'a, Handle<ast::Expression<'a>>>,
91 ) -> Result<'a, Handle<ast::Expression<'a>>> {
92 let start = lexer.start_byte_offset();
93 let mut accumulator = parser(lexer, self)?;
94 while let Some(op) = classifier(lexer.peek().0) {
95 let _ = lexer.next();
96 let left = accumulator;
97 let right = parser(lexer, self)?;
98 accumulator = self.expressions.append(
99 ast::Expression::Binary { op, left, right },
100 lexer.span_from(start),
101 );
102 }
103 Ok(accumulator)
104 }
105
106 fn declare_local(&mut self, name: ast::Ident<'a>) -> Result<'a, Handle<ast::Local>> {
107 let handle = self.locals.append(ast::Local, name.span);
108 if let Some(old) = self.local_table.add(name.name, handle) {
109 Err(Box::new(Error::Redefinition {
110 previous: self.locals.get_span(old),
111 current: name.span,
112 }))
113 } else {
114 Ok(handle)
115 }
116 }
117}
118
119#[derive(Copy, Clone, Debug, PartialEq)]
125enum Rule {
126 Attribute,
127 VariableDecl,
128 FunctionDecl,
129 Block,
130 Statement,
131 PrimaryExpr,
132 SingularExpr,
133 UnaryExpr,
134 GeneralExpr,
135 Directive,
136 GenericExpr,
137 EnclosedExpr,
138 LhsExpr,
139}
140
141struct ParsedAttribute<T> {
142 value: Option<T>,
143}
144
145impl<T> Default for ParsedAttribute<T> {
146 fn default() -> Self {
147 Self { value: None }
148 }
149}
150
151impl<T> ParsedAttribute<T> {
152 fn set(&mut self, value: T, name_span: Span) -> Result<'static, ()> {
153 if self.value.is_some() {
154 return Err(Box::new(Error::RepeatedAttribute(name_span)));
155 }
156 self.value = Some(value);
157 Ok(())
158 }
159}
160
161#[derive(Default)]
162struct BindingParser<'a> {
163 location: ParsedAttribute<Handle<ast::Expression<'a>>>,
164 built_in: ParsedAttribute<crate::BuiltIn>,
165 interpolation: ParsedAttribute<crate::Interpolation>,
166 sampling: ParsedAttribute<crate::Sampling>,
167 invariant: ParsedAttribute<bool>,
168 blend_src: ParsedAttribute<Handle<ast::Expression<'a>>>,
169 per_primitive: ParsedAttribute<()>,
170}
171
172impl<'a> BindingParser<'a> {
173 fn parse(
174 &mut self,
175 parser: &mut Parser,
176 lexer: &mut Lexer<'a>,
177 name: &'a str,
178 name_span: Span,
179 ctx: &mut ExpressionContext<'a, '_, '_>,
180 ) -> Result<'a, ()> {
181 match name {
182 "location" => {
183 lexer.expect(Token::Paren('('))?;
184 self.location
185 .set(parser.expression(lexer, ctx)?, name_span)?;
186 lexer.next_if(Token::Separator(','));
187 lexer.expect(Token::Paren(')'))?;
188 }
189 "builtin" => {
190 lexer.expect(Token::Paren('('))?;
191 let (raw, span) = lexer.next_ident_with_span()?;
192 self.built_in.set(
193 conv::map_built_in(&lexer.enable_extensions, raw, span)?,
194 name_span,
195 )?;
196 lexer.next_if(Token::Separator(','));
197 lexer.expect(Token::Paren(')'))?;
198 }
199 "interpolate" => {
200 lexer.expect(Token::Paren('('))?;
201 let (raw, span) = lexer.next_ident_with_span()?;
202 self.interpolation.set(
203 conv::map_interpolation(&lexer.enable_extensions, raw, span)?,
204 name_span,
205 )?;
206 if lexer.next_if(Token::Separator(',')) {
207 let (raw, span) = lexer.next_ident_with_span()?;
208 self.sampling
209 .set(conv::map_sampling(raw, span)?, name_span)?;
210 }
211 lexer.next_if(Token::Separator(','));
212 lexer.expect(Token::Paren(')'))?;
213 }
214
215 "invariant" => {
216 self.invariant.set(true, name_span)?;
217 }
218 "blend_src" => {
219 lexer.require_enable_extension(
220 ImplementedEnableExtension::DualSourceBlending,
221 name_span,
222 )?;
223
224 lexer.expect(Token::Paren('('))?;
225 self.blend_src
226 .set(parser.expression(lexer, ctx)?, name_span)?;
227 lexer.next_if(Token::Separator(','));
228 lexer.expect(Token::Paren(')'))?;
229 }
230 "per_primitive" => {
231 lexer.require_enable_extension(
232 ImplementedEnableExtension::WgpuMeshShader,
233 name_span,
234 )?;
235 self.per_primitive.set((), name_span)?;
236 }
237 _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
238 }
239 Ok(())
240 }
241
242 fn finish(self, span: Span) -> Result<'a, Option<ast::Binding<'a>>> {
243 match (
244 self.location.value,
245 self.built_in.value,
246 self.interpolation.value,
247 self.sampling.value,
248 self.invariant.value.unwrap_or_default(),
249 self.blend_src.value,
250 self.per_primitive.value,
251 ) {
252 (None, None, None, None, false, None, None) => Ok(None),
253 (Some(location), None, interpolation, sampling, false, blend_src, per_primitive) => {
254 Ok(Some(ast::Binding::Location {
259 location,
260 interpolation,
261 sampling,
262 blend_src,
263 per_primitive: per_primitive.is_some(),
264 }))
265 }
266 (None, Some(crate::BuiltIn::Position { .. }), None, None, invariant, None, None) => {
267 Ok(Some(ast::Binding::BuiltIn(crate::BuiltIn::Position {
268 invariant,
269 })))
270 }
271 (None, Some(built_in), None, None, false, None, None) => {
272 Ok(Some(ast::Binding::BuiltIn(built_in)))
273 }
274 (_, _, _, _, _, _, _) => Err(Box::new(Error::InconsistentBinding(span))),
275 }
276 }
277}
278
279pub struct Options {
281 pub parse_doc_comments: bool,
283 pub capabilities: crate::valid::Capabilities,
285}
286
287impl Options {
288 pub const fn new() -> Self {
290 Options {
291 parse_doc_comments: false,
292 capabilities: crate::valid::Capabilities::all(),
293 }
294 }
295}
296
297pub struct Parser {
298 rules: Vec<(Rule, usize)>,
299 recursion_depth: u32,
300}
301
302impl Parser {
303 pub const fn new() -> Self {
304 Parser {
305 rules: Vec::new(),
306 recursion_depth: 0,
307 }
308 }
309
310 fn reset(&mut self) {
311 self.rules.clear();
312 self.recursion_depth = 0;
313 }
314
315 fn push_rule_span(&mut self, rule: Rule, lexer: &mut Lexer<'_>) {
316 self.rules.push((rule, lexer.start_byte_offset()));
317 }
318
319 fn pop_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
320 let (_, initial) = self.rules.pop().unwrap();
321 lexer.span_from(initial)
322 }
323
324 fn peek_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
325 let &(_, initial) = self.rules.last().unwrap();
326 lexer.span_from(initial)
327 }
328
329 fn race_rules(&self, rule0: Rule, rule1: Rule) -> Option<Rule> {
330 Some(
331 self.rules
332 .iter()
333 .rev()
334 .find(|&x| x.0 == rule0 || x.0 == rule1)?
335 .0,
336 )
337 }
338
339 fn track_recursion<'a, F, R>(&mut self, f: F) -> Result<'a, R>
340 where
341 F: FnOnce(&mut Self) -> Result<'a, R>,
342 {
343 self.recursion_depth += 1;
344 if self.recursion_depth >= 200 {
345 return Err(Box::new(Error::Internal("Parser recursion limit exceeded")));
346 }
347 let ret = f(self);
348 self.recursion_depth -= 1;
349 ret
350 }
351
352 fn switch_value<'a>(
353 &mut self,
354 lexer: &mut Lexer<'a>,
355 ctx: &mut ExpressionContext<'a, '_, '_>,
356 ) -> Result<'a, ast::SwitchValue<'a>> {
357 if lexer.next_if(Token::Word("default")) {
358 return Ok(ast::SwitchValue::Default);
359 }
360
361 let expr = self.expression(lexer, ctx)?;
362 Ok(ast::SwitchValue::Expr(expr))
363 }
364
365 fn arguments<'a>(
367 &mut self,
368 lexer: &mut Lexer<'a>,
369 ctx: &mut ExpressionContext<'a, '_, '_>,
370 ) -> Result<'a, Vec<Handle<ast::Expression<'a>>>> {
371 self.push_rule_span(Rule::EnclosedExpr, lexer);
372 lexer.open_arguments()?;
373 let mut arguments = Vec::new();
374 loop {
375 if !arguments.is_empty() {
376 if !lexer.next_argument()? {
377 break;
378 }
379 } else if lexer.next_if(Token::Paren(')')) {
380 break;
381 }
382 let arg = self.expression(lexer, ctx)?;
383 arguments.push(arg);
384 }
385
386 self.pop_rule_span(lexer);
387 Ok(arguments)
388 }
389
390 fn enclosed_expression<'a>(
391 &mut self,
392 lexer: &mut Lexer<'a>,
393 ctx: &mut ExpressionContext<'a, '_, '_>,
394 ) -> Result<'a, Handle<ast::Expression<'a>>> {
395 self.push_rule_span(Rule::EnclosedExpr, lexer);
396 let expr = self.expression(lexer, ctx)?;
397 self.pop_rule_span(lexer);
398 Ok(expr)
399 }
400
401 fn ident_expr<'a>(
402 &mut self,
403 name: &'a str,
404 name_span: Span,
405 ctx: &mut ExpressionContext<'a, '_, '_>,
406 ) -> ast::IdentExpr<'a> {
407 match ctx.local_table.lookup(name) {
408 Some(&local) => ast::IdentExpr::Local(local),
409 None => {
410 ctx.unresolved.insert(ast::Dependency {
411 ident: name,
412 usage: name_span,
413 });
414 ast::IdentExpr::Unresolved(name)
415 }
416 }
417 }
418
419 fn primary_expression<'a>(
420 &mut self,
421 lexer: &mut Lexer<'a>,
422 ctx: &mut ExpressionContext<'a, '_, '_>,
423 token: TokenSpan<'a>,
424 ) -> Result<'a, Handle<ast::Expression<'a>>> {
425 self.push_rule_span(Rule::PrimaryExpr, lexer);
426
427 const fn literal_ray_flag<'b>(flag: crate::RayFlag) -> ast::Expression<'b> {
428 ast::Expression::Literal(ast::Literal::Number(Number::U32(flag.bits())))
429 }
430 const fn literal_ray_intersection<'b>(
431 intersection: crate::RayQueryIntersection,
432 ) -> ast::Expression<'b> {
433 ast::Expression::Literal(ast::Literal::Number(Number::U32(intersection as u32)))
434 }
435
436 let expr = match token {
437 (Token::Paren('('), _) => {
438 let expr = self.enclosed_expression(lexer, ctx)?;
439 lexer.expect(Token::Paren(')'))?;
440 self.pop_rule_span(lexer);
441 return Ok(expr);
442 }
443 (Token::Word("true"), _) => ast::Expression::Literal(ast::Literal::Bool(true)),
444 (Token::Word("false"), _) => ast::Expression::Literal(ast::Literal::Bool(false)),
445 (Token::Number(res), span) => {
446 let num = res.map_err(|err| Error::BadNumber(span, err))?;
447
448 if let Some(enable_extension) = num.requires_enable_extension() {
449 lexer.require_enable_extension(enable_extension, span)?;
450 }
451
452 ast::Expression::Literal(ast::Literal::Number(num))
453 }
454 (Token::Word("RAY_FLAG_NONE"), _) => literal_ray_flag(crate::RayFlag::empty()),
455 (Token::Word("RAY_FLAG_FORCE_OPAQUE"), _) => {
456 literal_ray_flag(crate::RayFlag::FORCE_OPAQUE)
457 }
458 (Token::Word("RAY_FLAG_FORCE_NO_OPAQUE"), _) => {
459 literal_ray_flag(crate::RayFlag::FORCE_NO_OPAQUE)
460 }
461 (Token::Word("RAY_FLAG_TERMINATE_ON_FIRST_HIT"), _) => {
462 literal_ray_flag(crate::RayFlag::TERMINATE_ON_FIRST_HIT)
463 }
464 (Token::Word("RAY_FLAG_SKIP_CLOSEST_HIT_SHADER"), _) => {
465 literal_ray_flag(crate::RayFlag::SKIP_CLOSEST_HIT_SHADER)
466 }
467 (Token::Word("RAY_FLAG_CULL_BACK_FACING"), _) => {
468 literal_ray_flag(crate::RayFlag::CULL_BACK_FACING)
469 }
470 (Token::Word("RAY_FLAG_CULL_FRONT_FACING"), _) => {
471 literal_ray_flag(crate::RayFlag::CULL_FRONT_FACING)
472 }
473 (Token::Word("RAY_FLAG_CULL_OPAQUE"), _) => {
474 literal_ray_flag(crate::RayFlag::CULL_OPAQUE)
475 }
476 (Token::Word("RAY_FLAG_CULL_NO_OPAQUE"), _) => {
477 literal_ray_flag(crate::RayFlag::CULL_NO_OPAQUE)
478 }
479 (Token::Word("RAY_FLAG_SKIP_TRIANGLES"), _) => {
480 literal_ray_flag(crate::RayFlag::SKIP_TRIANGLES)
481 }
482 (Token::Word("RAY_FLAG_SKIP_AABBS"), _) => literal_ray_flag(crate::RayFlag::SKIP_AABBS),
483 (Token::Word("RAY_QUERY_INTERSECTION_NONE"), _) => {
484 literal_ray_intersection(crate::RayQueryIntersection::None)
485 }
486 (Token::Word("RAY_QUERY_INTERSECTION_TRIANGLE"), _) => {
487 literal_ray_intersection(crate::RayQueryIntersection::Triangle)
488 }
489 (Token::Word("RAY_QUERY_INTERSECTION_GENERATED"), _) => {
490 literal_ray_intersection(crate::RayQueryIntersection::Generated)
491 }
492 (Token::Word("RAY_QUERY_INTERSECTION_AABB"), _) => {
493 literal_ray_intersection(crate::RayQueryIntersection::Aabb)
494 }
495 (Token::Word(word), span) => {
496 let ident = self.template_elaborated_ident(word, span, lexer, ctx)?;
497
498 if let Token::Paren('(') = lexer.peek().0 {
499 let arguments = self.arguments(lexer, ctx)?;
500 ast::Expression::Call(ast::CallPhrase {
501 function: ident,
502 arguments,
503 })
504 } else {
505 ast::Expression::Ident(ident)
506 }
507 }
508 other => {
509 return Err(Box::new(Error::Unexpected(
510 other.1,
511 ExpectedToken::PrimaryExpression,
512 )))
513 }
514 };
515
516 self.pop_rule_span(lexer);
517 let span = lexer.span_with_start(token.1);
518 let expr = ctx.expressions.append(expr, span);
519 Ok(expr)
520 }
521
522 fn component_or_swizzle_specifier<'a>(
523 &mut self,
524 expr_start: Span,
525 lexer: &mut Lexer<'a>,
526 ctx: &mut ExpressionContext<'a, '_, '_>,
527 expr: Handle<ast::Expression<'a>>,
528 ) -> Result<'a, Handle<ast::Expression<'a>>> {
529 let mut expr = expr;
530
531 loop {
532 let expression = match lexer.peek().0 {
533 Token::Separator('.') => {
534 let _ = lexer.next();
535 let field = lexer.next_ident()?;
536
537 ast::Expression::Member { base: expr, field }
538 }
539 Token::Paren('[') => {
540 let _ = lexer.next();
541 let index = self.enclosed_expression(lexer, ctx)?;
542 lexer.expect(Token::Paren(']'))?;
543
544 ast::Expression::Index { base: expr, index }
545 }
546 _ => break,
547 };
548
549 let span = lexer.span_with_start(expr_start);
550 expr = ctx.expressions.append(expression, span);
551 }
552
553 Ok(expr)
554 }
555
556 fn unary_expression<'a>(
558 &mut self,
559 lexer: &mut Lexer<'a>,
560 ctx: &mut ExpressionContext<'a, '_, '_>,
561 ) -> Result<'a, Handle<ast::Expression<'a>>> {
562 self.push_rule_span(Rule::UnaryExpr, lexer);
563
564 enum UnaryOp {
565 Negate,
566 LogicalNot,
567 BitwiseNot,
568 Deref,
569 AddrOf,
570 }
571
572 let mut ops = Vec::new();
573 let mut expr;
574
575 loop {
576 match lexer.next() {
577 (Token::Operation('-'), span) => {
578 ops.push((UnaryOp::Negate, span));
579 }
580 (Token::Operation('!'), span) => {
581 ops.push((UnaryOp::LogicalNot, span));
582 }
583 (Token::Operation('~'), span) => {
584 ops.push((UnaryOp::BitwiseNot, span));
585 }
586 (Token::Operation('*'), span) => {
587 ops.push((UnaryOp::Deref, span));
588 }
589 (Token::Operation('&'), span) => {
590 ops.push((UnaryOp::AddrOf, span));
591 }
592 token => {
593 expr = self.singular_expression(lexer, ctx, token)?;
594 break;
595 }
596 };
597 }
598
599 for (op, span) in ops.into_iter().rev() {
600 let e = match op {
601 UnaryOp::Negate => ast::Expression::Unary {
602 op: crate::UnaryOperator::Negate,
603 expr,
604 },
605 UnaryOp::LogicalNot => ast::Expression::Unary {
606 op: crate::UnaryOperator::LogicalNot,
607 expr,
608 },
609 UnaryOp::BitwiseNot => ast::Expression::Unary {
610 op: crate::UnaryOperator::BitwiseNot,
611 expr,
612 },
613 UnaryOp::Deref => ast::Expression::Deref(expr),
614 UnaryOp::AddrOf => ast::Expression::AddrOf(expr),
615 };
616 let span = lexer.span_with_start(span);
617 expr = ctx.expressions.append(e, span);
618 }
619
620 self.pop_rule_span(lexer);
621 Ok(expr)
622 }
623
624 fn lhs_expression<'a>(
629 &mut self,
630 lexer: &mut Lexer<'a>,
631 ctx: &mut ExpressionContext<'a, '_, '_>,
632 token: Option<TokenSpan<'a>>,
633 expected_token: ExpectedToken<'a>,
634 ) -> Result<'a, Handle<ast::Expression<'a>>> {
635 self.track_recursion(|this| {
636 this.push_rule_span(Rule::LhsExpr, lexer);
637 let token = token.unwrap_or_else(|| lexer.next());
638 let expr = match token {
639 (Token::Operation('*'), _) => {
640 let expr =
641 this.lhs_expression(lexer, ctx, None, ExpectedToken::LhsExpression)?;
642 let expr = ast::Expression::Deref(expr);
643 let span = this.peek_rule_span(lexer);
644 ctx.expressions.append(expr, span)
645 }
646 (Token::Operation('&'), _) => {
647 let expr =
648 this.lhs_expression(lexer, ctx, None, ExpectedToken::LhsExpression)?;
649 let expr = ast::Expression::AddrOf(expr);
650 let span = this.peek_rule_span(lexer);
651 ctx.expressions.append(expr, span)
652 }
653 (Token::Paren('('), span) => {
654 let expr =
655 this.lhs_expression(lexer, ctx, None, ExpectedToken::LhsExpression)?;
656 lexer.expect(Token::Paren(')'))?;
657 this.component_or_swizzle_specifier(span, lexer, ctx, expr)?
658 }
659 (Token::Word(word), span) => {
660 let ident = this.ident_expr(word, span, ctx);
661 let ident = ast::TemplateElaboratedIdent {
662 ident,
663 ident_span: span,
664 template_list: Vec::new(),
665 template_list_span: Span::UNDEFINED,
666 };
667 let ident = ctx.expressions.append(ast::Expression::Ident(ident), span);
668 this.component_or_swizzle_specifier(span, lexer, ctx, ident)?
669 }
670 (_, span) => {
671 return Err(Box::new(Error::Unexpected(span, expected_token)));
672 }
673 };
674
675 this.pop_rule_span(lexer);
676 Ok(expr)
677 })
678 }
679
680 fn singular_expression<'a>(
682 &mut self,
683 lexer: &mut Lexer<'a>,
684 ctx: &mut ExpressionContext<'a, '_, '_>,
685 token: TokenSpan<'a>,
686 ) -> Result<'a, Handle<ast::Expression<'a>>> {
687 self.push_rule_span(Rule::SingularExpr, lexer);
688 let primary_expr = self.primary_expression(lexer, ctx, token)?;
689 let singular_expr =
690 self.component_or_swizzle_specifier(token.1, lexer, ctx, primary_expr)?;
691 self.pop_rule_span(lexer);
692
693 Ok(singular_expr)
694 }
695
696 fn equality_expression<'a>(
697 &mut self,
698 lexer: &mut Lexer<'a>,
699 context: &mut ExpressionContext<'a, '_, '_>,
700 ) -> Result<'a, Handle<ast::Expression<'a>>> {
701 context.parse_binary_op(
703 lexer,
704 |token| match token {
705 Token::LogicalOperation('=') => Some(crate::BinaryOperator::Equal),
706 Token::LogicalOperation('!') => Some(crate::BinaryOperator::NotEqual),
707 _ => None,
708 },
709 |lexer, context| {
711 let enclosing = self.race_rules(Rule::GenericExpr, Rule::EnclosedExpr);
712 context.parse_binary_op(
713 lexer,
714 match enclosing {
715 Some(Rule::GenericExpr) => |token| match token {
716 Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
717 _ => None,
718 },
719 _ => |token| match token {
720 Token::Paren('<') => Some(crate::BinaryOperator::Less),
721 Token::Paren('>') => Some(crate::BinaryOperator::Greater),
722 Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
723 Token::LogicalOperation('>') => {
724 Some(crate::BinaryOperator::GreaterEqual)
725 }
726 _ => None,
727 },
728 },
729 |lexer, context| {
731 context.parse_binary_op(
732 lexer,
733 match enclosing {
734 Some(Rule::GenericExpr) => |token| match token {
735 Token::ShiftOperation('<') => {
736 Some(crate::BinaryOperator::ShiftLeft)
737 }
738 _ => None,
739 },
740 _ => |token| match token {
741 Token::ShiftOperation('<') => {
742 Some(crate::BinaryOperator::ShiftLeft)
743 }
744 Token::ShiftOperation('>') => {
745 Some(crate::BinaryOperator::ShiftRight)
746 }
747 _ => None,
748 },
749 },
750 |lexer, context| {
752 context.parse_binary_op(
753 lexer,
754 |token| match token {
755 Token::Operation('+') => Some(crate::BinaryOperator::Add),
756 Token::Operation('-') => {
757 Some(crate::BinaryOperator::Subtract)
758 }
759 _ => None,
760 },
761 |lexer, context| {
763 context.parse_binary_op(
764 lexer,
765 |token| match token {
766 Token::Operation('*') => {
767 Some(crate::BinaryOperator::Multiply)
768 }
769 Token::Operation('/') => {
770 Some(crate::BinaryOperator::Divide)
771 }
772 Token::Operation('%') => {
773 Some(crate::BinaryOperator::Modulo)
774 }
775 _ => None,
776 },
777 |lexer, context| self.unary_expression(lexer, context),
778 )
779 },
780 )
781 },
782 )
783 },
784 )
785 },
786 )
787 }
788
789 fn expression<'a>(
790 &mut self,
791 lexer: &mut Lexer<'a>,
792 context: &mut ExpressionContext<'a, '_, '_>,
793 ) -> Result<'a, Handle<ast::Expression<'a>>> {
794 self.track_recursion(|this| {
795 this.push_rule_span(Rule::GeneralExpr, lexer);
796 let handle = context.parse_binary_op(
798 lexer,
799 |token| match token {
800 Token::LogicalOperation('|') => Some(crate::BinaryOperator::LogicalOr),
801 _ => None,
802 },
803 |lexer, context| {
805 context.parse_binary_op(
806 lexer,
807 |token| match token {
808 Token::LogicalOperation('&') => Some(crate::BinaryOperator::LogicalAnd),
809 _ => None,
810 },
811 |lexer, context| {
813 context.parse_binary_op(
814 lexer,
815 |token| match token {
816 Token::Operation('|') => {
817 Some(crate::BinaryOperator::InclusiveOr)
818 }
819 _ => None,
820 },
821 |lexer, context| {
823 context.parse_binary_op(
824 lexer,
825 |token| match token {
826 Token::Operation('^') => {
827 Some(crate::BinaryOperator::ExclusiveOr)
828 }
829 _ => None,
830 },
831 |lexer, context| {
833 context.parse_binary_op(
834 lexer,
835 |token| match token {
836 Token::Operation('&') => {
837 Some(crate::BinaryOperator::And)
838 }
839 _ => None,
840 },
841 |lexer, context| {
842 this.equality_expression(lexer, context)
843 },
844 )
845 },
846 )
847 },
848 )
849 },
850 )
851 },
852 )?;
853 this.pop_rule_span(lexer);
854 Ok(handle)
855 })
856 }
857
858 fn optionally_typed_ident<'a>(
859 &mut self,
860 lexer: &mut Lexer<'a>,
861 ctx: &mut ExpressionContext<'a, '_, '_>,
862 ) -> Result<'a, (ast::Ident<'a>, Option<ast::TemplateElaboratedIdent<'a>>)> {
863 let name = lexer.next_ident()?;
864
865 let ty = if lexer.next_if(Token::Separator(':')) {
866 Some(self.type_specifier(lexer, ctx)?)
867 } else {
868 None
869 };
870
871 Ok((name, ty))
872 }
873
874 fn variable_decl<'a>(
876 &mut self,
877 lexer: &mut Lexer<'a>,
878 ctx: &mut ExpressionContext<'a, '_, '_>,
879 ) -> Result<'a, ast::GlobalVariable<'a>> {
880 self.push_rule_span(Rule::VariableDecl, lexer);
881 let (template_list, _) = self.maybe_template_list(lexer, ctx)?;
882 let (name, ty) = self.optionally_typed_ident(lexer, ctx)?;
883
884 let init = if lexer.next_if(Token::Operation('=')) {
885 let handle = self.expression(lexer, ctx)?;
886 Some(handle)
887 } else {
888 None
889 };
890 lexer.expect(Token::Separator(';'))?;
891 self.pop_rule_span(lexer);
892
893 Ok(ast::GlobalVariable {
894 name,
895 template_list,
896 binding: None,
897 ty,
898 init,
899 doc_comments: Vec::new(),
900 memory_decorations: crate::MemoryDecorations::empty(),
901 })
902 }
903
904 fn struct_body<'a>(
905 &mut self,
906 lexer: &mut Lexer<'a>,
907 ctx: &mut ExpressionContext<'a, '_, '_>,
908 ) -> Result<'a, Vec<ast::StructMember<'a>>> {
909 let mut members = Vec::new();
910 let mut member_names = FastHashSet::default();
911
912 lexer.expect(Token::Paren('{'))?;
913 let mut ready = true;
914 while !lexer.next_if(Token::Paren('}')) {
915 if !ready {
916 return Err(Box::new(Error::Unexpected(
917 lexer.next().1,
918 ExpectedToken::Token(Token::Separator(',')),
919 )));
920 }
921
922 let doc_comments = lexer.accumulate_doc_comments();
923
924 let (mut size, mut align) = (ParsedAttribute::default(), ParsedAttribute::default());
925 self.push_rule_span(Rule::Attribute, lexer);
926 let mut bind_parser = BindingParser::default();
927 while lexer.next_if(Token::Attribute) {
928 match lexer.next_ident_with_span()? {
929 ("size", name_span) => {
930 lexer.expect(Token::Paren('('))?;
931 let expr = self.expression(lexer, ctx)?;
932 lexer.next_if(Token::Separator(','));
933 lexer.expect(Token::Paren(')'))?;
934 size.set(expr, name_span)?;
935 }
936 ("align", name_span) => {
937 lexer.expect(Token::Paren('('))?;
938 let expr = self.expression(lexer, ctx)?;
939 lexer.next_if(Token::Separator(','));
940 lexer.expect(Token::Paren(')'))?;
941 align.set(expr, name_span)?;
942 }
943 (word, word_span) => bind_parser.parse(self, lexer, word, word_span, ctx)?,
944 }
945 }
946
947 let bind_span = self.pop_rule_span(lexer);
948 let binding = bind_parser.finish(bind_span)?;
949
950 let name = lexer.next_ident()?;
951 lexer.expect(Token::Separator(':'))?;
952 let ty = self.type_specifier(lexer, ctx)?;
953 ready = lexer.next_if(Token::Separator(','));
954
955 members.push(ast::StructMember {
956 name,
957 ty,
958 binding,
959 size: size.value,
960 align: align.value,
961 doc_comments,
962 });
963
964 if !member_names.insert(name.name) {
965 return Err(Box::new(Error::Redefinition {
966 previous: members
967 .iter()
968 .find(|x| x.name.name == name.name)
969 .map(|x| x.name.span)
970 .unwrap(),
971 current: name.span,
972 }));
973 }
974 }
975
976 Ok(members)
977 }
978
979 fn maybe_template_list<'a>(
980 &mut self,
981 lexer: &mut Lexer<'a>,
982 ctx: &mut ExpressionContext<'a, '_, '_>,
983 ) -> Result<'a, (Vec<Handle<ast::Expression<'a>>>, Span)> {
984 let start = lexer.start_byte_offset();
985 if lexer.next_if(Token::TemplateArgsStart) {
986 let mut args = Vec::new();
987 args.push(self.expression(lexer, ctx)?);
988 while lexer.next_if(Token::Separator(',')) && lexer.peek().0 != Token::TemplateArgsEnd {
989 args.push(self.expression(lexer, ctx)?);
990 }
991 lexer.expect(Token::TemplateArgsEnd)?;
992 let span = lexer.span_from(start);
993 Ok((args, span))
994 } else {
995 Ok((Vec::new(), Span::UNDEFINED))
996 }
997 }
998
999 fn template_elaborated_ident<'a>(
1000 &mut self,
1001 word: &'a str,
1002 span: Span,
1003 lexer: &mut Lexer<'a>,
1004 ctx: &mut ExpressionContext<'a, '_, '_>,
1005 ) -> Result<'a, ast::TemplateElaboratedIdent<'a>> {
1006 let ident = self.ident_expr(word, span, ctx);
1007 let (template_list, template_list_span) = self.maybe_template_list(lexer, ctx)?;
1008 Ok(ast::TemplateElaboratedIdent {
1009 ident,
1010 ident_span: span,
1011 template_list,
1012 template_list_span,
1013 })
1014 }
1015
1016 fn type_specifier<'a>(
1017 &mut self,
1018 lexer: &mut Lexer<'a>,
1019 ctx: &mut ExpressionContext<'a, '_, '_>,
1020 ) -> Result<'a, ast::TemplateElaboratedIdent<'a>> {
1021 let (name, span) = lexer.next_ident_with_span()?;
1022 self.template_elaborated_ident(name, span, lexer, ctx)
1023 }
1024
1025 fn variable_updating_statement<'a>(
1030 &mut self,
1031 lexer: &mut Lexer<'a>,
1032 ctx: &mut ExpressionContext<'a, '_, '_>,
1033 block: &mut ast::Block<'a>,
1034 token: TokenSpan<'a>,
1035 expected_token: ExpectedToken<'a>,
1036 ) -> Result<'a, ()> {
1037 match token {
1038 (Token::Word("_"), span) => {
1039 lexer.expect(Token::Operation('='))?;
1040 let expr = self.expression(lexer, ctx)?;
1041 let span = lexer.span_with_start(span);
1042 block.stmts.push(ast::Statement {
1043 kind: ast::StatementKind::Phony(expr),
1044 span,
1045 });
1046 return Ok(());
1047 }
1048 _ => {}
1049 }
1050 let target = self.lhs_expression(lexer, ctx, Some(token), expected_token)?;
1051
1052 let (op, value) = match lexer.next() {
1053 (Token::Operation('='), _) => {
1054 let value = self.expression(lexer, ctx)?;
1055 (None, value)
1056 }
1057 (Token::AssignmentOperation(c), _) => {
1058 use crate::BinaryOperator as Bo;
1059 let op = match c {
1060 '<' => Bo::ShiftLeft,
1061 '>' => Bo::ShiftRight,
1062 '+' => Bo::Add,
1063 '-' => Bo::Subtract,
1064 '*' => Bo::Multiply,
1065 '/' => Bo::Divide,
1066 '%' => Bo::Modulo,
1067 '&' => Bo::And,
1068 '|' => Bo::InclusiveOr,
1069 '^' => Bo::ExclusiveOr,
1070 _ => unreachable!(),
1072 };
1073
1074 let value = self.expression(lexer, ctx)?;
1075 (Some(op), value)
1076 }
1077 op_token @ (Token::IncrementOperation | Token::DecrementOperation, _) => {
1078 let op = match op_token.0 {
1079 Token::IncrementOperation => ast::StatementKind::Increment,
1080 Token::DecrementOperation => ast::StatementKind::Decrement,
1081 _ => unreachable!(),
1082 };
1083
1084 let span = lexer.span_with_start(token.1);
1085 block.stmts.push(ast::Statement {
1086 kind: op(target),
1087 span,
1088 });
1089 return Ok(());
1090 }
1091 (_, span) => return Err(Box::new(Error::Unexpected(span, ExpectedToken::Assignment))),
1092 };
1093
1094 let span = lexer.span_with_start(token.1);
1095 block.stmts.push(ast::Statement {
1096 kind: ast::StatementKind::Assign { target, op, value },
1097 span,
1098 });
1099 Ok(())
1100 }
1101
1102 fn maybe_func_call_statement<'a>(
1109 &mut self,
1110 lexer: &mut Lexer<'a>,
1111 context: &mut ExpressionContext<'a, '_, '_>,
1112 block: &mut ast::Block<'a>,
1113 token: TokenSpan<'a>,
1114 ) -> Result<'a, bool> {
1115 let (name, name_span) = match token {
1116 (Token::Word(name), span) => (name, span),
1117 _ => return Ok(false),
1118 };
1119 let ident = self.template_elaborated_ident(name, name_span, lexer, context)?;
1120 if ident.template_list.is_empty() && !matches!(lexer.peek(), (Token::Paren('('), _)) {
1121 return Ok(false);
1122 }
1123
1124 self.push_rule_span(Rule::SingularExpr, lexer);
1125
1126 let arguments = self.arguments(lexer, context)?;
1127 let span = lexer.span_with_start(name_span);
1128
1129 block.stmts.push(ast::Statement {
1130 kind: ast::StatementKind::Call(ast::CallPhrase {
1131 function: ident,
1132 arguments,
1133 }),
1134 span,
1135 });
1136
1137 self.pop_rule_span(lexer);
1138
1139 Ok(true)
1140 }
1141
1142 fn func_call_or_variable_updating_statement<'a>(
1147 &mut self,
1148 lexer: &mut Lexer<'a>,
1149 context: &mut ExpressionContext<'a, '_, '_>,
1150 block: &mut ast::Block<'a>,
1151 token: TokenSpan<'a>,
1152 expected_token: ExpectedToken<'a>,
1153 ) -> Result<'a, ()> {
1154 if !self.maybe_func_call_statement(lexer, context, block, token)? {
1155 self.variable_updating_statement(lexer, context, block, token, expected_token)?;
1156 }
1157 Ok(())
1158 }
1159
1160 fn variable_or_value_or_func_call_or_variable_updating_statement<'a>(
1168 &mut self,
1169 lexer: &mut Lexer<'a>,
1170 ctx: &mut ExpressionContext<'a, '_, '_>,
1171 block: &mut ast::Block<'a>,
1172 token: TokenSpan<'a>,
1173 expected_token: ExpectedToken<'a>,
1174 ) -> Result<'a, ()> {
1175 let local_decl = match token {
1176 (Token::Word("let"), _) => {
1177 let (name, given_ty) = self.optionally_typed_ident(lexer, ctx)?;
1178
1179 lexer.expect(Token::Operation('='))?;
1180 let expr_id = self.expression(lexer, ctx)?;
1181
1182 let handle = ctx.declare_local(name)?;
1183 ast::LocalDecl::Let(ast::Let {
1184 name,
1185 ty: given_ty,
1186 init: expr_id,
1187 handle,
1188 })
1189 }
1190 (Token::Word("const"), _) => {
1191 let (name, given_ty) = self.optionally_typed_ident(lexer, ctx)?;
1192
1193 lexer.expect(Token::Operation('='))?;
1194 let expr_id = self.expression(lexer, ctx)?;
1195
1196 let handle = ctx.declare_local(name)?;
1197 ast::LocalDecl::Const(ast::LocalConst {
1198 name,
1199 ty: given_ty,
1200 init: expr_id,
1201 handle,
1202 })
1203 }
1204 (Token::Word("var"), _) => {
1205 if lexer.next_if(Token::TemplateArgsStart) {
1206 let (class_str, span) = lexer.next_ident_with_span()?;
1207 if class_str != "function" {
1208 return Err(Box::new(Error::InvalidLocalVariableAddressSpace(span)));
1209 }
1210 lexer.expect(Token::TemplateArgsEnd)?;
1211 }
1212
1213 let (name, ty) = self.optionally_typed_ident(lexer, ctx)?;
1214
1215 let init = if lexer.next_if(Token::Operation('=')) {
1216 let init = self.expression(lexer, ctx)?;
1217 Some(init)
1218 } else {
1219 None
1220 };
1221
1222 let handle = ctx.declare_local(name)?;
1223 ast::LocalDecl::Var(ast::LocalVariable {
1224 name,
1225 ty,
1226 init,
1227 handle,
1228 })
1229 }
1230 token => {
1231 return self.func_call_or_variable_updating_statement(
1232 lexer,
1233 ctx,
1234 block,
1235 token,
1236 expected_token,
1237 );
1238 }
1239 };
1240
1241 let span = lexer.span_with_start(token.1);
1242 block.stmts.push(ast::Statement {
1243 kind: ast::StatementKind::LocalDecl(local_decl),
1244 span,
1245 });
1246
1247 Ok(())
1248 }
1249
1250 fn statement<'a>(
1251 &mut self,
1252 lexer: &mut Lexer<'a>,
1253 ctx: &mut ExpressionContext<'a, '_, '_>,
1254 block: &mut ast::Block<'a>,
1255 brace_nesting_level: u8,
1256 ) -> Result<'a, ()> {
1257 self.track_recursion(|this| {
1258 this.push_rule_span(Rule::Statement, lexer);
1259
1260 match lexer.peek() {
1266 (token, _) if is_start_of_compound_statement(token) => {
1267 let (inner, span) = this.block(lexer, ctx, brace_nesting_level)?;
1268 block.stmts.push(ast::Statement {
1269 kind: ast::StatementKind::Block(inner),
1270 span,
1271 });
1272 this.pop_rule_span(lexer);
1273 return Ok(());
1274 }
1275 _ => {}
1276 }
1277
1278 let kind = match lexer.next() {
1279 (Token::Separator(';'), _) => {
1280 this.pop_rule_span(lexer);
1281 return Ok(());
1282 }
1283 (Token::Word("return"), _) => {
1284 let value = if lexer.peek().0 != Token::Separator(';') {
1285 let handle = this.expression(lexer, ctx)?;
1286 Some(handle)
1287 } else {
1288 None
1289 };
1290 lexer.expect(Token::Separator(';'))?;
1291 ast::StatementKind::Return { value }
1292 }
1293 (Token::Word("if"), _) => {
1294 let condition = this.expression(lexer, ctx)?;
1295
1296 let accept = this.block(lexer, ctx, brace_nesting_level)?.0;
1297
1298 let mut elsif_stack = Vec::new();
1299 let mut elseif_span_start = lexer.start_byte_offset();
1300 let mut reject = loop {
1301 if !lexer.next_if(Token::Word("else")) {
1302 break ast::Block::default();
1303 }
1304
1305 if !lexer.next_if(Token::Word("if")) {
1306 break this.block(lexer, ctx, brace_nesting_level)?.0;
1308 }
1309
1310 let other_condition = this.expression(lexer, ctx)?;
1312 let other_block = this.block(lexer, ctx, brace_nesting_level)?;
1313 elsif_stack.push((elseif_span_start, other_condition, other_block));
1314 elseif_span_start = lexer.start_byte_offset();
1315 };
1316
1317 for (other_span_start, other_cond, other_block) in elsif_stack.into_iter().rev()
1320 {
1321 let sub_stmt = ast::StatementKind::If {
1322 condition: other_cond,
1323 accept: other_block.0,
1324 reject,
1325 };
1326 reject = ast::Block::default();
1327 let span = lexer.span_from(other_span_start);
1328 reject.stmts.push(ast::Statement {
1329 kind: sub_stmt,
1330 span,
1331 })
1332 }
1333
1334 ast::StatementKind::If {
1335 condition,
1336 accept,
1337 reject,
1338 }
1339 }
1340 (Token::Word("switch"), _) => {
1341 let selector = this.expression(lexer, ctx)?;
1342 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1343 let brace_nesting_level =
1344 Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1345 let mut cases = Vec::new();
1346
1347 loop {
1348 match lexer.next() {
1350 (Token::Word("case"), _) => {
1351 let value = loop {
1353 let value = this.switch_value(lexer, ctx)?;
1354 if lexer.next_if(Token::Separator(',')) {
1355 let next_token = lexer.peek().0;
1357 if next_token == Token::Separator(':')
1358 || is_start_of_compound_statement(next_token)
1359 {
1360 break value;
1361 }
1362 } else {
1363 break value;
1364 }
1365 cases.push(ast::SwitchCase {
1366 value,
1367 body: ast::Block::default(),
1368 fall_through: true,
1369 });
1370 };
1371
1372 lexer.next_if(Token::Separator(':'));
1373
1374 let body = this.block(lexer, ctx, brace_nesting_level)?.0;
1375
1376 cases.push(ast::SwitchCase {
1377 value,
1378 body,
1379 fall_through: false,
1380 });
1381 }
1382 (Token::Word("default"), _) => {
1383 lexer.next_if(Token::Separator(':'));
1384 let body = this.block(lexer, ctx, brace_nesting_level)?.0;
1385 cases.push(ast::SwitchCase {
1386 value: ast::SwitchValue::Default,
1387 body,
1388 fall_through: false,
1389 });
1390 }
1391 (Token::Paren('}'), _) => break,
1392 (_, span) => {
1393 return Err(Box::new(Error::Unexpected(
1394 span,
1395 ExpectedToken::SwitchItem,
1396 )))
1397 }
1398 }
1399 }
1400
1401 ast::StatementKind::Switch { selector, cases }
1402 }
1403 (Token::Word("loop"), _) => this.r#loop(lexer, ctx, brace_nesting_level)?,
1404 (Token::Word("while"), _) => {
1405 let mut body = ast::Block::default();
1406
1407 let (condition, span) =
1408 lexer.capture_span(|lexer| this.expression(lexer, ctx))?;
1409 let mut reject = ast::Block::default();
1410 reject.stmts.push(ast::Statement {
1411 kind: ast::StatementKind::Break,
1412 span,
1413 });
1414
1415 body.stmts.push(ast::Statement {
1416 kind: ast::StatementKind::If {
1417 condition,
1418 accept: ast::Block::default(),
1419 reject,
1420 },
1421 span,
1422 });
1423
1424 let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
1425 body.stmts.push(ast::Statement {
1426 kind: ast::StatementKind::Block(block),
1427 span,
1428 });
1429
1430 ast::StatementKind::Loop {
1431 body,
1432 continuing: ast::Block::default(),
1433 break_if: None,
1434 }
1435 }
1436 (Token::Word("for"), _) => {
1437 lexer.expect(Token::Paren('('))?;
1438
1439 ctx.local_table.push_scope();
1440
1441 if !lexer.next_if(Token::Separator(';')) {
1442 let token = lexer.next();
1443 this.variable_or_value_or_func_call_or_variable_updating_statement(
1444 lexer,
1445 ctx,
1446 block,
1447 token,
1448 ExpectedToken::ForInit,
1449 )?;
1450 lexer.expect(Token::Separator(';'))?;
1451 };
1452
1453 let mut body = ast::Block::default();
1454 if !lexer.next_if(Token::Separator(';')) {
1455 let (condition, span) = lexer.capture_span(|lexer| -> Result<'_, _> {
1456 let condition = this.expression(lexer, ctx)?;
1457 lexer.expect(Token::Separator(';'))?;
1458 Ok(condition)
1459 })?;
1460 let mut reject = ast::Block::default();
1461 reject.stmts.push(ast::Statement {
1462 kind: ast::StatementKind::Break,
1463 span,
1464 });
1465 body.stmts.push(ast::Statement {
1466 kind: ast::StatementKind::If {
1467 condition,
1468 accept: ast::Block::default(),
1469 reject,
1470 },
1471 span,
1472 });
1473 };
1474
1475 let mut continuing = ast::Block::default();
1476 if !lexer.next_if(Token::Paren(')')) {
1477 let token = lexer.next();
1478 this.func_call_or_variable_updating_statement(
1479 lexer,
1480 ctx,
1481 &mut continuing,
1482 token,
1483 ExpectedToken::ForUpdate,
1484 )?;
1485 lexer.expect(Token::Paren(')'))?;
1486 }
1487
1488 let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
1489 body.stmts.push(ast::Statement {
1490 kind: ast::StatementKind::Block(block),
1491 span,
1492 });
1493
1494 ctx.local_table.pop_scope();
1495
1496 ast::StatementKind::Loop {
1497 body,
1498 continuing,
1499 break_if: None,
1500 }
1501 }
1502 (Token::Word("break"), span) => {
1503 let (peeked_token, peeked_span) = lexer.peek();
1507 if let Token::Word("if") = peeked_token {
1508 let span = span.until(&peeked_span);
1509 return Err(Box::new(Error::InvalidBreakIf(span)));
1510 }
1511 lexer.expect(Token::Separator(';'))?;
1512 ast::StatementKind::Break
1513 }
1514 (Token::Word("continue"), _) => {
1515 lexer.expect(Token::Separator(';'))?;
1516 ast::StatementKind::Continue
1517 }
1518 (Token::Word("discard"), _) => {
1519 lexer.expect(Token::Separator(';'))?;
1520 ast::StatementKind::Kill
1521 }
1522 (Token::Word("const_assert"), _) => {
1524 let paren = lexer.next_if(Token::Paren('('));
1526
1527 let condition = this.expression(lexer, ctx)?;
1528
1529 if paren {
1530 lexer.expect(Token::Paren(')'))?;
1531 }
1532 lexer.expect(Token::Separator(';'))?;
1533 ast::StatementKind::ConstAssert(condition)
1534 }
1535 token => {
1536 this.variable_or_value_or_func_call_or_variable_updating_statement(
1537 lexer,
1538 ctx,
1539 block,
1540 token,
1541 ExpectedToken::Statement,
1542 )?;
1543 lexer.expect(Token::Separator(';'))?;
1544 this.pop_rule_span(lexer);
1545 return Ok(());
1546 }
1547 };
1548
1549 let span = this.pop_rule_span(lexer);
1550 block.stmts.push(ast::Statement { kind, span });
1551
1552 Ok(())
1553 })
1554 }
1555
1556 fn r#loop<'a>(
1557 &mut self,
1558 lexer: &mut Lexer<'a>,
1559 ctx: &mut ExpressionContext<'a, '_, '_>,
1560 brace_nesting_level: u8,
1561 ) -> Result<'a, ast::StatementKind<'a>> {
1562 let mut body = ast::Block::default();
1563 let mut continuing = ast::Block::default();
1564 let mut break_if = None;
1565
1566 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1567 let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1568
1569 ctx.local_table.push_scope();
1570
1571 loop {
1572 if lexer.next_if(Token::Word("continuing")) {
1573 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1578 let brace_nesting_level =
1579 Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1580 loop {
1581 if lexer.next_if(Token::Word("break")) {
1582 lexer.expect(Token::Word("if"))?;
1589
1590 let condition = self.expression(lexer, ctx)?;
1591 break_if = Some(condition);
1594
1595 lexer.expect(Token::Separator(';'))?;
1597 lexer.expect(Token::Paren('}'))?;
1600 break;
1602 } else if lexer.next_if(Token::Paren('}')) {
1603 break;
1606 } else {
1607 self.statement(lexer, ctx, &mut continuing, brace_nesting_level)?;
1609 }
1610 }
1611 lexer.expect(Token::Paren('}'))?;
1614 break;
1615 }
1616 if lexer.next_if(Token::Paren('}')) {
1617 break;
1620 }
1621 self.statement(lexer, ctx, &mut body, brace_nesting_level)?;
1623 }
1624
1625 ctx.local_table.pop_scope();
1626
1627 Ok(ast::StatementKind::Loop {
1628 body,
1629 continuing,
1630 break_if,
1631 })
1632 }
1633
1634 fn block<'a>(
1636 &mut self,
1637 lexer: &mut Lexer<'a>,
1638 ctx: &mut ExpressionContext<'a, '_, '_>,
1639 brace_nesting_level: u8,
1640 ) -> Result<'a, (ast::Block<'a>, Span)> {
1641 self.push_rule_span(Rule::Block, lexer);
1642
1643 ctx.local_table.push_scope();
1644
1645 let mut diagnostic_filters = DiagnosticFilterMap::new();
1646
1647 self.push_rule_span(Rule::Attribute, lexer);
1648 while lexer.next_if(Token::Attribute) {
1649 let (name, name_span) = lexer.next_ident_with_span()?;
1650 if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
1651 let filter = self.diagnostic_filter(lexer)?;
1652 let span = self.peek_rule_span(lexer);
1653 diagnostic_filters
1654 .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
1655 .map_err(|e| Box::new(e.into()))?;
1656 } else {
1657 return Err(Box::new(Error::Unexpected(
1658 name_span,
1659 ExpectedToken::DiagnosticAttribute,
1660 )));
1661 }
1662 }
1663 self.pop_rule_span(lexer);
1664
1665 if !diagnostic_filters.is_empty() {
1666 return Err(Box::new(
1667 Error::DiagnosticAttributeNotYetImplementedAtParseSite {
1668 site_name_plural: "compound statements",
1669 spans: diagnostic_filters.spans().collect(),
1670 },
1671 ));
1672 }
1673
1674 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1675 let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1676 let mut block = ast::Block::default();
1677 while !lexer.next_if(Token::Paren('}')) {
1678 self.statement(lexer, ctx, &mut block, brace_nesting_level)?;
1679 }
1680
1681 ctx.local_table.pop_scope();
1682
1683 let span = self.pop_rule_span(lexer);
1684 Ok((block, span))
1685 }
1686
1687 fn varying_binding<'a>(
1688 &mut self,
1689 lexer: &mut Lexer<'a>,
1690 ctx: &mut ExpressionContext<'a, '_, '_>,
1691 ) -> Result<'a, Option<ast::Binding<'a>>> {
1692 let mut bind_parser = BindingParser::default();
1693 self.push_rule_span(Rule::Attribute, lexer);
1694
1695 while lexer.next_if(Token::Attribute) {
1696 let (word, span) = lexer.next_ident_with_span()?;
1697 bind_parser.parse(self, lexer, word, span, ctx)?;
1698 }
1699
1700 let span = self.pop_rule_span(lexer);
1701 bind_parser.finish(span)
1702 }
1703
1704 fn function_decl<'a>(
1705 &mut self,
1706 lexer: &mut Lexer<'a>,
1707 diagnostic_filter_leaf: Option<Handle<DiagnosticFilterNode>>,
1708 must_use: Option<Span>,
1709 out: &mut ast::TranslationUnit<'a>,
1710 dependencies: &mut FastIndexSet<ast::Dependency<'a>>,
1711 ) -> Result<'a, ast::Function<'a>> {
1712 self.push_rule_span(Rule::FunctionDecl, lexer);
1713 let fun_name = lexer.next_ident()?;
1715
1716 let mut locals = Arena::new();
1717
1718 let mut ctx = ExpressionContext {
1719 expressions: &mut out.expressions,
1720 local_table: &mut SymbolTable::default(),
1721 locals: &mut locals,
1722 unresolved: dependencies,
1723 };
1724
1725 ctx.local_table.push_scope();
1727 ctx.local_table.reduce_lookup_scope();
1730
1731 let mut arguments = Vec::new();
1733 lexer.expect(Token::Paren('('))?;
1734 let mut ready = true;
1735 while !lexer.next_if(Token::Paren(')')) {
1736 if !ready {
1737 return Err(Box::new(Error::Unexpected(
1738 lexer.next().1,
1739 ExpectedToken::Token(Token::Separator(',')),
1740 )));
1741 }
1742 let binding = self.varying_binding(lexer, &mut ctx)?;
1743
1744 let param_name = lexer.next_ident()?;
1745
1746 lexer.expect(Token::Separator(':'))?;
1747 let param_type = self.type_specifier(lexer, &mut ctx)?;
1748
1749 let handle = ctx.declare_local(param_name)?;
1750 arguments.push(ast::FunctionArgument {
1751 name: param_name,
1752 ty: param_type,
1753 binding,
1754 handle,
1755 });
1756 ready = lexer.next_if(Token::Separator(','));
1757 }
1758 let result = if lexer.next_if(Token::Arrow) {
1760 let binding = self.varying_binding(lexer, &mut ctx)?;
1761 let ty = self.type_specifier(lexer, &mut ctx)?;
1762 let must_use = must_use.is_some();
1763 Some(ast::FunctionResult {
1764 ty,
1765 binding,
1766 must_use,
1767 })
1768 } else if let Some(must_use) = must_use {
1769 return Err(Box::new(Error::FunctionMustUseReturnsVoid(
1770 must_use,
1771 self.peek_rule_span(lexer),
1772 )));
1773 } else {
1774 None
1775 };
1776
1777 ctx.local_table.reset_lookup_scope();
1778
1779 lexer.expect(Token::Paren('{'))?;
1781 let brace_nesting_level = 1;
1782 let mut body = ast::Block::default();
1783 while !lexer.next_if(Token::Paren('}')) {
1784 self.statement(lexer, &mut ctx, &mut body, brace_nesting_level)?;
1785 }
1786
1787 ctx.local_table.pop_scope();
1788
1789 let fun = ast::Function {
1790 entry_point: None,
1791 name: fun_name,
1792 arguments,
1793 result,
1794 body,
1795 diagnostic_filter_leaf,
1796 doc_comments: Vec::new(),
1797 };
1798
1799 self.pop_rule_span(lexer);
1801
1802 Ok(fun)
1803 }
1804
1805 fn directive_ident_list<'a>(
1806 &self,
1807 lexer: &mut Lexer<'a>,
1808 handler: impl FnMut(&'a str, Span) -> Result<'a, ()>,
1809 ) -> Result<'a, ()> {
1810 let mut handler = handler;
1811 'next_arg: loop {
1812 let (ident, span) = lexer.next_ident_with_span()?;
1813 handler(ident, span)?;
1814
1815 let expected_token = match lexer.peek().0 {
1816 Token::Separator(',') => {
1817 let _ = lexer.next();
1818 if matches!(lexer.peek().0, Token::Word(..)) {
1819 continue 'next_arg;
1820 }
1821 ExpectedToken::AfterIdentListComma
1822 }
1823 _ => ExpectedToken::AfterIdentListArg,
1824 };
1825
1826 if !matches!(lexer.next().0, Token::Separator(';')) {
1827 return Err(Box::new(Error::Unexpected(span, expected_token)));
1828 }
1829
1830 break Ok(());
1831 }
1832 }
1833
1834 fn global_decl<'a>(
1835 &mut self,
1836 lexer: &mut Lexer<'a>,
1837 out: &mut ast::TranslationUnit<'a>,
1838 ) -> Result<'a, ()> {
1839 let doc_comments = lexer.accumulate_doc_comments();
1840
1841 let mut binding = None;
1843 let mut stage = ParsedAttribute::default();
1844 let mut shader_stage_error_span = Span::new(0, 0);
1847 let mut workgroup_size = ParsedAttribute::default();
1848 let mut early_depth_test = ParsedAttribute::default();
1849 let (mut bind_index, mut bind_group) =
1850 (ParsedAttribute::default(), ParsedAttribute::default());
1851 let mut id = ParsedAttribute::default();
1852 let mut payload = ParsedAttribute::default();
1854 let mut incoming_payload = ParsedAttribute::default();
1856 let mut mesh_output = ParsedAttribute::default();
1857
1858 let mut must_use: ParsedAttribute<Span> = ParsedAttribute::default();
1859 let mut memory_decorations = crate::MemoryDecorations::empty();
1860
1861 let mut dependencies = FastIndexSet::default();
1862 let mut ctx = ExpressionContext {
1863 expressions: &mut out.expressions,
1864 local_table: &mut SymbolTable::default(),
1865 locals: &mut Arena::new(),
1866 unresolved: &mut dependencies,
1867 };
1868 let mut diagnostic_filters = DiagnosticFilterMap::new();
1869 let ensure_no_diag_attrs = |on_what, filters: DiagnosticFilterMap| -> Result<()> {
1870 if filters.is_empty() {
1871 Ok(())
1872 } else {
1873 Err(Box::new(Error::DiagnosticAttributeNotSupported {
1874 on_what,
1875 spans: filters.spans().collect(),
1876 }))
1877 }
1878 };
1879
1880 self.push_rule_span(Rule::Attribute, lexer);
1881 while lexer.next_if(Token::Attribute) {
1882 let (name, name_span) = lexer.next_ident_with_span()?;
1883 if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
1884 let filter = self.diagnostic_filter(lexer)?;
1885 let span = self.peek_rule_span(lexer);
1886 diagnostic_filters
1887 .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
1888 .map_err(|e| Box::new(e.into()))?;
1889 continue;
1890 }
1891 match name {
1892 "binding" => {
1893 lexer.expect(Token::Paren('('))?;
1894 bind_index.set(self.expression(lexer, &mut ctx)?, name_span)?;
1895 lexer.next_if(Token::Separator(','));
1896 lexer.expect(Token::Paren(')'))?;
1897 }
1898 "group" => {
1899 lexer.expect(Token::Paren('('))?;
1900 bind_group.set(self.expression(lexer, &mut ctx)?, name_span)?;
1901 lexer.next_if(Token::Separator(','));
1902 lexer.expect(Token::Paren(')'))?;
1903 }
1904 "id" => {
1905 lexer.expect(Token::Paren('('))?;
1906 id.set(self.expression(lexer, &mut ctx)?, name_span)?;
1907 lexer.next_if(Token::Separator(','));
1908 lexer.expect(Token::Paren(')'))?;
1909 }
1910 "vertex" => {
1911 stage.set(ShaderStage::Vertex, name_span)?;
1912 }
1913 "fragment" => {
1914 stage.set(ShaderStage::Fragment, name_span)?;
1915 }
1916 "compute" => {
1917 stage.set(ShaderStage::Compute, name_span)?;
1918 shader_stage_error_span = name_span;
1919 }
1920 "task" => {
1921 lexer.require_enable_extension(
1922 ImplementedEnableExtension::WgpuMeshShader,
1923 name_span,
1924 )?;
1925 stage.set(ShaderStage::Task, name_span)?;
1926 shader_stage_error_span = name_span;
1927 }
1928 "mesh" => {
1929 lexer.require_enable_extension(
1930 ImplementedEnableExtension::WgpuMeshShader,
1931 name_span,
1932 )?;
1933 stage.set(ShaderStage::Mesh, name_span)?;
1934 shader_stage_error_span = name_span;
1935
1936 lexer.expect(Token::Paren('('))?;
1937 mesh_output.set(lexer.next_ident_with_span()?, name_span)?;
1938 lexer.expect(Token::Paren(')'))?;
1939 }
1940 "ray_generation" => {
1941 lexer.require_enable_extension(
1942 ImplementedEnableExtension::WgpuRayTracingPipeline,
1943 name_span,
1944 )?;
1945 stage.set(ShaderStage::RayGeneration, name_span)?;
1946 shader_stage_error_span = name_span;
1947 }
1948 "any_hit" => {
1949 lexer.require_enable_extension(
1950 ImplementedEnableExtension::WgpuRayTracingPipeline,
1951 name_span,
1952 )?;
1953 stage.set(ShaderStage::AnyHit, name_span)?;
1954 shader_stage_error_span = name_span;
1955 }
1956 "closest_hit" => {
1957 lexer.require_enable_extension(
1958 ImplementedEnableExtension::WgpuRayTracingPipeline,
1959 name_span,
1960 )?;
1961 stage.set(ShaderStage::ClosestHit, name_span)?;
1962 shader_stage_error_span = name_span;
1963 }
1964 "miss" => {
1965 lexer.require_enable_extension(
1966 ImplementedEnableExtension::WgpuRayTracingPipeline,
1967 name_span,
1968 )?;
1969 stage.set(ShaderStage::Miss, name_span)?;
1970 shader_stage_error_span = name_span;
1971 }
1972 "payload" => {
1973 lexer.require_enable_extension(
1974 ImplementedEnableExtension::WgpuMeshShader,
1975 name_span,
1976 )?;
1977 lexer.expect(Token::Paren('('))?;
1978 payload.set(lexer.next_ident_with_span()?, name_span)?;
1979 lexer.expect(Token::Paren(')'))?;
1980 }
1981 "incoming_payload" => {
1982 lexer.require_enable_extension(
1983 ImplementedEnableExtension::WgpuRayTracingPipeline,
1984 name_span,
1985 )?;
1986 lexer.expect(Token::Paren('('))?;
1987 incoming_payload.set(lexer.next_ident_with_span()?, name_span)?;
1988 lexer.expect(Token::Paren(')'))?;
1989 }
1990 "workgroup_size" => {
1991 lexer.expect(Token::Paren('('))?;
1992 let mut new_workgroup_size = [None; 3];
1993 for size in new_workgroup_size.iter_mut() {
1994 *size = Some(self.expression(lexer, &mut ctx)?);
1995 match lexer.next() {
1996 (Token::Paren(')'), _) => break,
1997 (Token::Separator(','), _) => {
1998 if lexer.next_if(Token::Paren(')')) {
1999 break;
2000 }
2001 }
2002 other => {
2003 return Err(Box::new(Error::Unexpected(
2004 other.1,
2005 ExpectedToken::WorkgroupSizeSeparator,
2006 )))
2007 }
2008 }
2009 }
2010 workgroup_size.set(new_workgroup_size, name_span)?;
2011 }
2012 "early_depth_test" => {
2013 lexer.expect(Token::Paren('('))?;
2014 let (ident, ident_span) = lexer.next_ident_with_span()?;
2015 let value = if ident == "force" {
2016 crate::EarlyDepthTest::Force
2017 } else {
2018 crate::EarlyDepthTest::Allow {
2019 conservative: conv::map_conservative_depth(ident, ident_span)?,
2020 }
2021 };
2022 lexer.expect(Token::Paren(')'))?;
2023 early_depth_test.set(value, name_span)?;
2024 }
2025 "must_use" => {
2026 must_use.set(name_span, name_span)?;
2027 }
2028 "coherent" => {
2029 memory_decorations |= crate::MemoryDecorations::COHERENT;
2030 }
2031 "volatile" => {
2032 memory_decorations |= crate::MemoryDecorations::VOLATILE;
2033 }
2034 _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
2035 }
2036 }
2037
2038 let attrib_span = self.pop_rule_span(lexer);
2039 match (bind_group.value, bind_index.value) {
2040 (Some(group), Some(index)) => {
2041 binding = Some(ast::ResourceBinding {
2042 group,
2043 binding: index,
2044 });
2045 }
2046 (Some(_), None) => {
2047 return Err(Box::new(Error::MissingAttribute("binding", attrib_span)))
2048 }
2049 (None, Some(_)) => return Err(Box::new(Error::MissingAttribute("group", attrib_span))),
2050 (None, None) => {}
2051 }
2052
2053 let start = lexer.start_byte_offset();
2055 let kind = match lexer.next() {
2056 (Token::Separator(';'), _) => {
2057 ensure_no_diag_attrs(
2058 DiagnosticAttributeNotSupportedPosition::SemicolonInModulePosition,
2059 diagnostic_filters,
2060 )?;
2061 None
2062 }
2063 (Token::Word(word), directive_span) if DirectiveKind::from_ident(word).is_some() => {
2064 return Err(Box::new(Error::DirectiveAfterFirstGlobalDecl {
2065 directive_span,
2066 }));
2067 }
2068 (Token::Word("struct"), _) => {
2069 ensure_no_diag_attrs("`struct`s".into(), diagnostic_filters)?;
2070
2071 let name = lexer.next_ident()?;
2072
2073 let members = self.struct_body(lexer, &mut ctx)?;
2074
2075 Some(ast::GlobalDeclKind::Struct(ast::Struct {
2076 name,
2077 members,
2078 doc_comments,
2079 }))
2080 }
2081 (Token::Word("alias"), _) => {
2082 ensure_no_diag_attrs("`alias`es".into(), diagnostic_filters)?;
2083
2084 let name = lexer.next_ident()?;
2085
2086 lexer.expect(Token::Operation('='))?;
2087 let ty = self.type_specifier(lexer, &mut ctx)?;
2088 lexer.expect(Token::Separator(';'))?;
2089 Some(ast::GlobalDeclKind::Type(ast::TypeAlias { name, ty }))
2090 }
2091 (Token::Word("const"), _) => {
2092 ensure_no_diag_attrs("`const`s".into(), diagnostic_filters)?;
2093
2094 let (name, ty) = self.optionally_typed_ident(lexer, &mut ctx)?;
2095
2096 lexer.expect(Token::Operation('='))?;
2097 let init = self.expression(lexer, &mut ctx)?;
2098 lexer.expect(Token::Separator(';'))?;
2099
2100 Some(ast::GlobalDeclKind::Const(ast::Const {
2101 name,
2102 ty,
2103 init,
2104 doc_comments,
2105 }))
2106 }
2107 (Token::Word("override"), _) => {
2108 ensure_no_diag_attrs("`override`s".into(), diagnostic_filters)?;
2109
2110 let (name, ty) = self.optionally_typed_ident(lexer, &mut ctx)?;
2111
2112 let init = if lexer.next_if(Token::Operation('=')) {
2113 Some(self.expression(lexer, &mut ctx)?)
2114 } else {
2115 None
2116 };
2117
2118 lexer.expect(Token::Separator(';'))?;
2119
2120 Some(ast::GlobalDeclKind::Override(ast::Override {
2121 name,
2122 id: id.value,
2123 ty,
2124 init,
2125 }))
2126 }
2127 (Token::Word("var"), _) => {
2128 ensure_no_diag_attrs("`var`s".into(), diagnostic_filters)?;
2129
2130 let mut var = self.variable_decl(lexer, &mut ctx)?;
2131 var.binding = binding.take();
2132 var.doc_comments = doc_comments;
2133 var.memory_decorations = memory_decorations;
2134 Some(ast::GlobalDeclKind::Var(var))
2135 }
2136 (Token::Word("fn"), _) => {
2137 let diagnostic_filter_leaf = Self::write_diagnostic_filters(
2138 &mut out.diagnostic_filters,
2139 diagnostic_filters,
2140 out.diagnostic_filter_leaf,
2141 );
2142
2143 let function = self.function_decl(
2144 lexer,
2145 diagnostic_filter_leaf,
2146 must_use.value,
2147 out,
2148 &mut dependencies,
2149 )?;
2150 Some(ast::GlobalDeclKind::Fn(ast::Function {
2151 entry_point: if let Some(stage) = stage.value {
2152 if stage.compute_like() && workgroup_size.value.is_none() {
2153 return Err(Box::new(Error::MissingWorkgroupSize(
2154 shader_stage_error_span,
2155 )));
2156 }
2157
2158 match stage {
2159 ShaderStage::AnyHit | ShaderStage::ClosestHit | ShaderStage::Miss => {
2160 if incoming_payload.value.is_none() {
2161 return Err(Box::new(Error::MissingIncomingPayload(
2162 shader_stage_error_span,
2163 )));
2164 }
2165 }
2166 _ => {}
2167 }
2168
2169 Some(ast::EntryPoint {
2170 stage,
2171 early_depth_test: early_depth_test.value,
2172 workgroup_size: workgroup_size.value,
2173 mesh_output_variable: mesh_output.value,
2174 task_payload: payload.value,
2175 ray_incoming_payload: incoming_payload.value,
2176 })
2177 } else {
2178 None
2179 },
2180 doc_comments,
2181 ..function
2182 }))
2183 }
2184 (Token::Word("const_assert"), _) => {
2185 ensure_no_diag_attrs("`const_assert`s".into(), diagnostic_filters)?;
2186
2187 let paren = lexer.next_if(Token::Paren('('));
2189
2190 let condition = self.expression(lexer, &mut ctx)?;
2191
2192 if paren {
2193 lexer.expect(Token::Paren(')'))?;
2194 }
2195 lexer.expect(Token::Separator(';'))?;
2196 Some(ast::GlobalDeclKind::ConstAssert(condition))
2197 }
2198 (Token::End, _) => return Ok(()),
2199 (Token::UnterminatedBlockComment(_), span) => {
2200 return Err(Box::new(Error::UnterminatedBlockComment(span)))
2201 }
2202 other => {
2203 return Err(Box::new(Error::Unexpected(
2204 other.1,
2205 ExpectedToken::GlobalItem,
2206 )))
2207 }
2208 };
2209
2210 if let Some(must_use_span) = must_use.value {
2211 if !matches!(kind.as_ref(), Some(ast::GlobalDeclKind::Fn(_))) {
2212 return Err(Box::new(Error::FunctionMustUseOnNonFunction(must_use_span)));
2213 }
2214 }
2215
2216 if let Some(kind) = kind {
2217 out.decls.append(
2218 ast::GlobalDecl { kind, dependencies },
2219 lexer.span_from(start),
2220 );
2221 }
2222
2223 if !self.rules.is_empty() {
2224 log::error!("Reached the end of global decl, but rule stack is not empty");
2225 log::error!("Rules: {:?}", self.rules);
2226 return Err(Box::new(Error::Internal("rule stack is not empty")));
2227 };
2228
2229 match binding {
2230 None => Ok(()),
2231 Some(_) => Err(Box::new(Error::Internal(
2232 "we had the attribute but no var?",
2233 ))),
2234 }
2235 }
2236
2237 pub fn parse<'a>(
2238 &mut self,
2239 source: &'a str,
2240 options: &Options,
2241 ) -> Result<'a, ast::TranslationUnit<'a>> {
2242 self.reset();
2243
2244 let mut lexer = Lexer::new(source, !options.parse_doc_comments);
2245 let mut tu = ast::TranslationUnit::default();
2246 let mut enable_extensions = EnableExtensions::empty();
2247 let mut diagnostic_filters = DiagnosticFilterMap::new();
2248
2249 tu.doc_comments = lexer.accumulate_module_doc_comments();
2251
2252 while let (Token::Word(word), _) = lexer.peek() {
2254 if let Some(kind) = DirectiveKind::from_ident(word) {
2255 self.push_rule_span(Rule::Directive, &mut lexer);
2256 let _ = lexer.next_ident_with_span().unwrap();
2257 match kind {
2258 DirectiveKind::Diagnostic => {
2259 let diagnostic_filter = self.diagnostic_filter(&mut lexer)?;
2260 let span = self.peek_rule_span(&lexer);
2261 diagnostic_filters
2262 .add(diagnostic_filter, span, ShouldConflictOnFullDuplicate::No)
2263 .map_err(|e| Box::new(e.into()))?;
2264 lexer.expect(Token::Separator(';'))?;
2265 }
2266 DirectiveKind::Enable => {
2267 self.directive_ident_list(&mut lexer, |ident, span| {
2268 let kind = EnableExtension::from_ident(ident, span)?;
2269 let extension = match kind {
2270 EnableExtension::Implemented(kind) => kind,
2271 EnableExtension::Unimplemented(kind) => {
2272 return Err(Box::new(Error::EnableExtensionNotYetImplemented {
2273 kind,
2274 span,
2275 }))
2276 }
2277 };
2278 let required_capability = extension.capability();
2280 if !options.capabilities.intersects(required_capability) {
2281 return Err(Box::new(Error::EnableExtensionNotSupported {
2282 kind,
2283 span,
2284 }));
2285 }
2286 enable_extensions.add(extension);
2287 Ok(())
2288 })?;
2289 }
2290 DirectiveKind::Requires => {
2291 self.directive_ident_list(&mut lexer, |ident, span| {
2292 match LanguageExtension::from_ident(ident) {
2293 Some(LanguageExtension::Implemented(_kind)) => {
2294 Ok(())
2299 }
2300 Some(LanguageExtension::Unimplemented(kind)) => {
2301 Err(Box::new(Error::LanguageExtensionNotYetImplemented {
2302 kind,
2303 span,
2304 }))
2305 }
2306 None => Err(Box::new(Error::UnknownLanguageExtension(span, ident))),
2307 }
2308 })?;
2309 }
2310 }
2311 self.pop_rule_span(&lexer);
2312 } else {
2313 break;
2314 }
2315 }
2316
2317 lexer.enable_extensions = enable_extensions;
2318 tu.enable_extensions = enable_extensions;
2319 tu.diagnostic_filter_leaf =
2320 Self::write_diagnostic_filters(&mut tu.diagnostic_filters, diagnostic_filters, None);
2321
2322 loop {
2323 match self.global_decl(&mut lexer, &mut tu) {
2324 Err(error) => return Err(error),
2325 Ok(()) => {
2326 if lexer.peek().0 == Token::End {
2327 break;
2328 }
2329 }
2330 }
2331 }
2332
2333 Ok(tu)
2334 }
2335
2336 fn increase_brace_nesting(brace_nesting_level: u8, brace_span: Span) -> Result<'static, u8> {
2337 const BRACE_NESTING_MAXIMUM: u8 = 127;
2345 if brace_nesting_level + 1 > BRACE_NESTING_MAXIMUM {
2346 return Err(Box::new(Error::ExceededLimitForNestedBraces {
2347 span: brace_span,
2348 limit: BRACE_NESTING_MAXIMUM,
2349 }));
2350 }
2351 Ok(brace_nesting_level + 1)
2352 }
2353
2354 fn diagnostic_filter<'a>(&self, lexer: &mut Lexer<'a>) -> Result<'a, DiagnosticFilter> {
2355 lexer.expect(Token::Paren('('))?;
2356
2357 let (severity_control_name, severity_control_name_span) = lexer.next_ident_with_span()?;
2358 let new_severity = diagnostic_filter::Severity::from_wgsl_ident(severity_control_name)
2359 .ok_or(Error::DiagnosticInvalidSeverity {
2360 severity_control_name_span,
2361 })?;
2362
2363 lexer.expect(Token::Separator(','))?;
2364
2365 let (diagnostic_name_token, diagnostic_name_token_span) = lexer.next_ident_with_span()?;
2366 let triggering_rule = if lexer.next_if(Token::Separator('.')) {
2367 let (ident, _span) = lexer.next_ident_with_span()?;
2368 FilterableTriggeringRule::User(Box::new([diagnostic_name_token.into(), ident.into()]))
2369 } else {
2370 let diagnostic_rule_name = diagnostic_name_token;
2371 let diagnostic_rule_name_span = diagnostic_name_token_span;
2372 if let Some(triggering_rule) =
2373 StandardFilterableTriggeringRule::from_wgsl_ident(diagnostic_rule_name)
2374 {
2375 FilterableTriggeringRule::Standard(triggering_rule)
2376 } else {
2377 diagnostic_filter::Severity::Warning.report_wgsl_parse_diag(
2378 Box::new(Error::UnknownDiagnosticRuleName(diagnostic_rule_name_span)),
2379 lexer.source,
2380 )?;
2381 FilterableTriggeringRule::Unknown(diagnostic_rule_name.into())
2382 }
2383 };
2384 let filter = DiagnosticFilter {
2385 triggering_rule,
2386 new_severity,
2387 };
2388 lexer.next_if(Token::Separator(','));
2389 lexer.expect(Token::Paren(')'))?;
2390
2391 Ok(filter)
2392 }
2393
2394 pub(crate) fn write_diagnostic_filters(
2395 arena: &mut Arena<DiagnosticFilterNode>,
2396 filters: DiagnosticFilterMap,
2397 parent: Option<Handle<DiagnosticFilterNode>>,
2398 ) -> Option<Handle<DiagnosticFilterNode>> {
2399 filters
2400 .into_iter()
2401 .fold(parent, |parent, (triggering_rule, (new_severity, span))| {
2402 Some(arena.append(
2403 DiagnosticFilterNode {
2404 inner: DiagnosticFilter {
2405 new_severity,
2406 triggering_rule,
2407 },
2408 parent,
2409 },
2410 span,
2411 ))
2412 })
2413 }
2414}
2415
2416const fn is_start_of_compound_statement<'a>(token: Token<'a>) -> bool {
2417 matches!(token, Token::Attribute | Token::Paren('{'))
2418}