1use alloc::{boxed::Box, vec::Vec};
2use directive::enable_extension::ImplementedEnableExtension;
3
4use crate::diagnostic_filter::{
5 self, DiagnosticFilter, DiagnosticFilterMap, DiagnosticFilterNode, FilterableTriggeringRule,
6 ShouldConflictOnFullDuplicate, StandardFilterableTriggeringRule,
7};
8use crate::front::wgsl::error::{DiagnosticAttributeNotSupportedPosition, Error, ExpectedToken};
9use crate::front::wgsl::parse::directive::enable_extension::{EnableExtension, EnableExtensions};
10use crate::front::wgsl::parse::directive::language_extension::LanguageExtension;
11use crate::front::wgsl::parse::directive::DirectiveKind;
12use crate::front::wgsl::parse::lexer::{Lexer, Token, TokenSpan};
13use crate::front::wgsl::parse::number::Number;
14use crate::front::wgsl::Result;
15use crate::front::SymbolTable;
16use crate::{Arena, FastHashSet, FastIndexSet, Handle, ShaderStage, Span};
17
18pub mod ast;
19pub mod conv;
20pub mod directive;
21pub mod lexer;
22pub mod number;
23
24struct ExpressionContext<'input, 'temp, 'out> {
31 expressions: &'out mut Arena<ast::Expression<'input>>,
36
37 local_table: &'temp mut SymbolTable<&'input str, Handle<ast::Local>>,
44
45 locals: &'out mut Arena<ast::Local>,
71
72 unresolved: &'out mut FastIndexSet<ast::Dependency<'input>>,
83}
84
85impl<'a> ExpressionContext<'a, '_, '_> {
86 fn parse_binary_op(
87 &mut self,
88 lexer: &mut Lexer<'a>,
89 classifier: impl Fn(Token<'a>) -> Option<crate::BinaryOperator>,
90 mut parser: impl FnMut(&mut Lexer<'a>, &mut Self) -> Result<'a, Handle<ast::Expression<'a>>>,
91 ) -> Result<'a, Handle<ast::Expression<'a>>> {
92 let start = lexer.start_byte_offset();
93 let mut accumulator = parser(lexer, self)?;
94 while let Some(op) = classifier(lexer.peek().0) {
95 let _ = lexer.next();
96 let left = accumulator;
97 let right = parser(lexer, self)?;
98 accumulator = self.expressions.append(
99 ast::Expression::Binary { op, left, right },
100 lexer.span_from(start),
101 );
102 }
103 Ok(accumulator)
104 }
105
106 fn declare_local(&mut self, name: ast::Ident<'a>) -> Result<'a, Handle<ast::Local>> {
107 let handle = self.locals.append(ast::Local, name.span);
108 if let Some(old) = self.local_table.add(name.name, handle) {
109 Err(Box::new(Error::Redefinition {
110 previous: self.locals.get_span(old),
111 current: name.span,
112 }))
113 } else {
114 Ok(handle)
115 }
116 }
117}
118
119#[derive(Copy, Clone, Debug, PartialEq)]
125enum Rule {
126 Attribute,
127 VariableDecl,
128 FunctionDecl,
129 Block,
130 Statement,
131 PrimaryExpr,
132 SingularExpr,
133 UnaryExpr,
134 GeneralExpr,
135 Directive,
136 GenericExpr,
137 EnclosedExpr,
138 LhsExpr,
139}
140
141struct ParsedAttribute<T> {
142 value: Option<T>,
143}
144
145impl<T> Default for ParsedAttribute<T> {
146 fn default() -> Self {
147 Self { value: None }
148 }
149}
150
151impl<T> ParsedAttribute<T> {
152 fn set(&mut self, value: T, name_span: Span) -> Result<'static, ()> {
153 if self.value.is_some() {
154 return Err(Box::new(Error::RepeatedAttribute(name_span)));
155 }
156 self.value = Some(value);
157 Ok(())
158 }
159}
160
161#[derive(Default)]
162struct BindingParser<'a> {
163 location: ParsedAttribute<Handle<ast::Expression<'a>>>,
164 built_in: ParsedAttribute<crate::BuiltIn>,
165 interpolation: ParsedAttribute<crate::Interpolation>,
166 sampling: ParsedAttribute<crate::Sampling>,
167 invariant: ParsedAttribute<bool>,
168 blend_src: ParsedAttribute<Handle<ast::Expression<'a>>>,
169 per_primitive: ParsedAttribute<()>,
170}
171
172impl<'a> BindingParser<'a> {
173 fn parse(
174 &mut self,
175 parser: &mut Parser,
176 lexer: &mut Lexer<'a>,
177 name: &'a str,
178 name_span: Span,
179 ctx: &mut ExpressionContext<'a, '_, '_>,
180 ) -> Result<'a, ()> {
181 match name {
182 "location" => {
183 lexer.expect(Token::Paren('('))?;
184 self.location
185 .set(parser.expression(lexer, ctx)?, name_span)?;
186 lexer.next_if(Token::Separator(','));
187 lexer.expect(Token::Paren(')'))?;
188 }
189 "builtin" => {
190 lexer.expect(Token::Paren('('))?;
191 let (raw, span) = lexer.next_ident_with_span()?;
192 self.built_in.set(
193 conv::map_built_in(&lexer.enable_extensions, raw, span)?,
194 name_span,
195 )?;
196 lexer.next_if(Token::Separator(','));
197 lexer.expect(Token::Paren(')'))?;
198 }
199 "interpolate" => {
200 lexer.expect(Token::Paren('('))?;
201 let (raw, span) = lexer.next_ident_with_span()?;
202 self.interpolation.set(
203 conv::map_interpolation(&lexer.enable_extensions, raw, span)?,
204 name_span,
205 )?;
206 if lexer.next_if(Token::Separator(','))
207 && !matches!(lexer.peek().0, Token::Paren(')'))
208 {
209 let (raw, span) = lexer.next_ident_with_span()?;
210 self.sampling
211 .set(conv::map_sampling(raw, span)?, name_span)?;
212 lexer.next_if(Token::Separator(','));
213 }
214 lexer.expect(Token::Paren(')'))?;
215 }
216
217 "invariant" => {
218 self.invariant.set(true, name_span)?;
219 }
220 "blend_src" => {
221 lexer.require_enable_extension(
222 ImplementedEnableExtension::DualSourceBlending,
223 name_span,
224 )?;
225
226 lexer.expect(Token::Paren('('))?;
227 self.blend_src
228 .set(parser.expression(lexer, ctx)?, name_span)?;
229 lexer.next_if(Token::Separator(','));
230 lexer.expect(Token::Paren(')'))?;
231 }
232 "per_primitive" => {
233 lexer.require_enable_extension(
234 ImplementedEnableExtension::WgpuMeshShader,
235 name_span,
236 )?;
237 self.per_primitive.set((), name_span)?;
238 }
239 _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
240 }
241 Ok(())
242 }
243
244 fn finish(self, span: Span) -> Result<'a, Option<ast::Binding<'a>>> {
245 match (
246 self.location.value,
247 self.built_in.value,
248 self.interpolation.value,
249 self.sampling.value,
250 self.invariant.value.unwrap_or_default(),
251 self.blend_src.value,
252 self.per_primitive.value,
253 ) {
254 (None, None, None, None, false, None, None) => Ok(None),
255 (Some(location), None, interpolation, sampling, false, blend_src, per_primitive) => {
256 Ok(Some(ast::Binding::Location {
261 location,
262 interpolation,
263 sampling,
264 blend_src,
265 per_primitive: per_primitive.is_some(),
266 }))
267 }
268 (None, Some(crate::BuiltIn::Position { .. }), None, None, invariant, None, None) => {
269 Ok(Some(ast::Binding::BuiltIn(crate::BuiltIn::Position {
270 invariant,
271 })))
272 }
273 (None, Some(built_in), None, None, false, None, None) => {
274 Ok(Some(ast::Binding::BuiltIn(built_in)))
275 }
276 (_, _, _, _, _, _, _) => Err(Box::new(Error::InconsistentBinding(span))),
277 }
278 }
279}
280
281pub struct Options {
283 pub parse_doc_comments: bool,
285 pub capabilities: crate::valid::Capabilities,
287}
288
289impl Options {
290 pub const fn new() -> Self {
292 Options {
293 parse_doc_comments: false,
294 capabilities: crate::valid::Capabilities::all(),
295 }
296 }
297}
298
299pub struct Parser {
300 rules: Vec<(Rule, usize)>,
301 recursion_depth: u32,
302}
303
304impl Parser {
305 pub const fn new() -> Self {
306 Parser {
307 rules: Vec::new(),
308 recursion_depth: 0,
309 }
310 }
311
312 fn reset(&mut self) {
313 self.rules.clear();
314 self.recursion_depth = 0;
315 }
316
317 fn push_rule_span(&mut self, rule: Rule, lexer: &mut Lexer<'_>) {
318 self.rules.push((rule, lexer.start_byte_offset()));
319 }
320
321 fn pop_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
322 let (_, initial) = self.rules.pop().unwrap();
323 lexer.span_from(initial)
324 }
325
326 fn peek_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
327 let &(_, initial) = self.rules.last().unwrap();
328 lexer.span_from(initial)
329 }
330
331 fn race_rules(&self, rule0: Rule, rule1: Rule) -> Option<Rule> {
332 Some(
333 self.rules
334 .iter()
335 .rev()
336 .find(|&x| x.0 == rule0 || x.0 == rule1)?
337 .0,
338 )
339 }
340
341 fn track_recursion<'a, F, R>(&mut self, f: F) -> Result<'a, R>
342 where
343 F: FnOnce(&mut Self) -> Result<'a, R>,
344 {
345 self.recursion_depth += 1;
346 if self.recursion_depth >= 200 {
347 return Err(Box::new(Error::Internal("Parser recursion limit exceeded")));
348 }
349 let ret = f(self);
350 self.recursion_depth -= 1;
351 ret
352 }
353
354 fn switch_value<'a>(
355 &mut self,
356 lexer: &mut Lexer<'a>,
357 ctx: &mut ExpressionContext<'a, '_, '_>,
358 ) -> Result<'a, ast::SwitchValue<'a>> {
359 if lexer.next_if(Token::Word("default")) {
360 return Ok(ast::SwitchValue::Default);
361 }
362
363 let expr = self.expression(lexer, ctx)?;
364 Ok(ast::SwitchValue::Expr(expr))
365 }
366
367 fn arguments<'a>(
369 &mut self,
370 lexer: &mut Lexer<'a>,
371 ctx: &mut ExpressionContext<'a, '_, '_>,
372 ) -> Result<'a, Vec<Handle<ast::Expression<'a>>>> {
373 self.push_rule_span(Rule::EnclosedExpr, lexer);
374 lexer.open_arguments()?;
375 let mut arguments = Vec::new();
376 loop {
377 if !arguments.is_empty() {
378 if !lexer.next_argument()? {
379 break;
380 }
381 } else if lexer.next_if(Token::Paren(')')) {
382 break;
383 }
384 let arg = self.expression(lexer, ctx)?;
385 arguments.push(arg);
386 }
387
388 self.pop_rule_span(lexer);
389 Ok(arguments)
390 }
391
392 fn enclosed_expression<'a>(
393 &mut self,
394 lexer: &mut Lexer<'a>,
395 ctx: &mut ExpressionContext<'a, '_, '_>,
396 ) -> Result<'a, Handle<ast::Expression<'a>>> {
397 self.push_rule_span(Rule::EnclosedExpr, lexer);
398 let expr = self.expression(lexer, ctx)?;
399 self.pop_rule_span(lexer);
400 Ok(expr)
401 }
402
403 fn ident_expr<'a>(
404 &mut self,
405 name: &'a str,
406 name_span: Span,
407 ctx: &mut ExpressionContext<'a, '_, '_>,
408 ) -> ast::IdentExpr<'a> {
409 match ctx.local_table.lookup(name) {
410 Some(&local) => ast::IdentExpr::Local(local),
411 None => {
412 ctx.unresolved.insert(ast::Dependency {
413 ident: name,
414 usage: name_span,
415 });
416 ast::IdentExpr::Unresolved(name)
417 }
418 }
419 }
420
421 fn primary_expression<'a>(
422 &mut self,
423 lexer: &mut Lexer<'a>,
424 ctx: &mut ExpressionContext<'a, '_, '_>,
425 token: TokenSpan<'a>,
426 ) -> Result<'a, Handle<ast::Expression<'a>>> {
427 self.push_rule_span(Rule::PrimaryExpr, lexer);
428
429 const fn literal_ray_flag<'b>(flag: crate::RayFlag) -> ast::Expression<'b> {
430 ast::Expression::Literal(ast::Literal::Number(Number::U32(flag.bits())))
431 }
432 const fn literal_ray_intersection<'b>(
433 intersection: crate::RayQueryIntersection,
434 ) -> ast::Expression<'b> {
435 ast::Expression::Literal(ast::Literal::Number(Number::U32(intersection as u32)))
436 }
437
438 let expr = match token {
439 (Token::Paren('('), _) => {
440 let expr = self.enclosed_expression(lexer, ctx)?;
441 lexer.expect(Token::Paren(')'))?;
442 self.pop_rule_span(lexer);
443 return Ok(expr);
444 }
445 (Token::Word("true"), _) => ast::Expression::Literal(ast::Literal::Bool(true)),
446 (Token::Word("false"), _) => ast::Expression::Literal(ast::Literal::Bool(false)),
447 (Token::Number(res), span) => {
448 let num = res.map_err(|err| Error::BadNumber(span, err))?;
449
450 if let Some(enable_extension) = num.requires_enable_extension() {
451 lexer.require_enable_extension(enable_extension, span)?;
452 }
453
454 ast::Expression::Literal(ast::Literal::Number(num))
455 }
456 (Token::Word("RAY_FLAG_NONE"), _) => literal_ray_flag(crate::RayFlag::empty()),
457 (Token::Word("RAY_FLAG_FORCE_OPAQUE"), _) => {
458 literal_ray_flag(crate::RayFlag::FORCE_OPAQUE)
459 }
460 (Token::Word("RAY_FLAG_FORCE_NO_OPAQUE"), _) => {
461 literal_ray_flag(crate::RayFlag::FORCE_NO_OPAQUE)
462 }
463 (Token::Word("RAY_FLAG_TERMINATE_ON_FIRST_HIT"), _) => {
464 literal_ray_flag(crate::RayFlag::TERMINATE_ON_FIRST_HIT)
465 }
466 (Token::Word("RAY_FLAG_SKIP_CLOSEST_HIT_SHADER"), _) => {
467 literal_ray_flag(crate::RayFlag::SKIP_CLOSEST_HIT_SHADER)
468 }
469 (Token::Word("RAY_FLAG_CULL_BACK_FACING"), _) => {
470 literal_ray_flag(crate::RayFlag::CULL_BACK_FACING)
471 }
472 (Token::Word("RAY_FLAG_CULL_FRONT_FACING"), _) => {
473 literal_ray_flag(crate::RayFlag::CULL_FRONT_FACING)
474 }
475 (Token::Word("RAY_FLAG_CULL_OPAQUE"), _) => {
476 literal_ray_flag(crate::RayFlag::CULL_OPAQUE)
477 }
478 (Token::Word("RAY_FLAG_CULL_NO_OPAQUE"), _) => {
479 literal_ray_flag(crate::RayFlag::CULL_NO_OPAQUE)
480 }
481 (Token::Word("RAY_FLAG_SKIP_TRIANGLES"), _) => {
482 literal_ray_flag(crate::RayFlag::SKIP_TRIANGLES)
483 }
484 (Token::Word("RAY_FLAG_SKIP_AABBS"), _) => literal_ray_flag(crate::RayFlag::SKIP_AABBS),
485 (Token::Word("RAY_QUERY_INTERSECTION_NONE"), _) => {
486 literal_ray_intersection(crate::RayQueryIntersection::None)
487 }
488 (Token::Word("RAY_QUERY_INTERSECTION_TRIANGLE"), _) => {
489 literal_ray_intersection(crate::RayQueryIntersection::Triangle)
490 }
491 (Token::Word("RAY_QUERY_INTERSECTION_GENERATED"), _) => {
492 literal_ray_intersection(crate::RayQueryIntersection::Generated)
493 }
494 (Token::Word("RAY_QUERY_INTERSECTION_AABB"), _) => {
495 literal_ray_intersection(crate::RayQueryIntersection::Aabb)
496 }
497 (Token::Word(word), span) => {
498 let ident = self.template_elaborated_ident(word, span, lexer, ctx)?;
499
500 if let Token::Paren('(') = lexer.peek().0 {
501 let arguments = self.arguments(lexer, ctx)?;
502 ast::Expression::Call(ast::CallPhrase {
503 function: ident,
504 arguments,
505 })
506 } else {
507 ast::Expression::Ident(ident)
508 }
509 }
510 other => {
511 return Err(Box::new(Error::Unexpected(
512 other.1,
513 ExpectedToken::PrimaryExpression,
514 )))
515 }
516 };
517
518 self.pop_rule_span(lexer);
519 let span = lexer.span_with_start(token.1);
520 let expr = ctx.expressions.append(expr, span);
521 Ok(expr)
522 }
523
524 fn component_or_swizzle_specifier<'a>(
525 &mut self,
526 expr_start: Span,
527 lexer: &mut Lexer<'a>,
528 ctx: &mut ExpressionContext<'a, '_, '_>,
529 expr: Handle<ast::Expression<'a>>,
530 ) -> Result<'a, Handle<ast::Expression<'a>>> {
531 let mut expr = expr;
532
533 loop {
534 let expression = match lexer.peek().0 {
535 Token::Separator('.') => {
536 let _ = lexer.next();
537 let field = lexer.next_ident()?;
538
539 ast::Expression::Member { base: expr, field }
540 }
541 Token::Paren('[') => {
542 let _ = lexer.next();
543 let index = self.enclosed_expression(lexer, ctx)?;
544 lexer.expect(Token::Paren(']'))?;
545
546 ast::Expression::Index { base: expr, index }
547 }
548 _ => break,
549 };
550
551 let span = lexer.span_with_start(expr_start);
552 expr = ctx.expressions.append(expression, span);
553 }
554
555 Ok(expr)
556 }
557
558 fn unary_expression<'a>(
560 &mut self,
561 lexer: &mut Lexer<'a>,
562 ctx: &mut ExpressionContext<'a, '_, '_>,
563 ) -> Result<'a, Handle<ast::Expression<'a>>> {
564 self.push_rule_span(Rule::UnaryExpr, lexer);
565
566 enum UnaryOp {
567 Negate,
568 LogicalNot,
569 BitwiseNot,
570 Deref,
571 AddrOf,
572 }
573
574 let mut ops = Vec::new();
575 let mut expr;
576
577 loop {
578 match lexer.next() {
579 (Token::Operation('-'), span) => {
580 ops.push((UnaryOp::Negate, span));
581 }
582 (Token::Operation('!'), span) => {
583 ops.push((UnaryOp::LogicalNot, span));
584 }
585 (Token::Operation('~'), span) => {
586 ops.push((UnaryOp::BitwiseNot, span));
587 }
588 (Token::Operation('*'), span) => {
589 ops.push((UnaryOp::Deref, span));
590 }
591 (Token::Operation('&'), span) => {
592 ops.push((UnaryOp::AddrOf, span));
593 }
594 token => {
595 expr = self.singular_expression(lexer, ctx, token)?;
596 break;
597 }
598 };
599 }
600
601 for (op, span) in ops.into_iter().rev() {
602 let e = match op {
603 UnaryOp::Negate => ast::Expression::Unary {
604 op: crate::UnaryOperator::Negate,
605 expr,
606 },
607 UnaryOp::LogicalNot => ast::Expression::Unary {
608 op: crate::UnaryOperator::LogicalNot,
609 expr,
610 },
611 UnaryOp::BitwiseNot => ast::Expression::Unary {
612 op: crate::UnaryOperator::BitwiseNot,
613 expr,
614 },
615 UnaryOp::Deref => ast::Expression::Deref(expr),
616 UnaryOp::AddrOf => ast::Expression::AddrOf(expr),
617 };
618 let span = lexer.span_with_start(span);
619 expr = ctx.expressions.append(e, span);
620 }
621
622 self.pop_rule_span(lexer);
623 Ok(expr)
624 }
625
626 fn lhs_expression<'a>(
631 &mut self,
632 lexer: &mut Lexer<'a>,
633 ctx: &mut ExpressionContext<'a, '_, '_>,
634 token: Option<TokenSpan<'a>>,
635 expected_token: ExpectedToken<'a>,
636 ) -> Result<'a, Handle<ast::Expression<'a>>> {
637 self.track_recursion(|this| {
638 this.push_rule_span(Rule::LhsExpr, lexer);
639 let token = token.unwrap_or_else(|| lexer.next());
640 let expr = match token {
641 (Token::Operation('*'), _) => {
642 let expr =
643 this.lhs_expression(lexer, ctx, None, ExpectedToken::LhsExpression)?;
644 let expr = ast::Expression::Deref(expr);
645 let span = this.peek_rule_span(lexer);
646 ctx.expressions.append(expr, span)
647 }
648 (Token::Operation('&'), _) => {
649 let expr =
650 this.lhs_expression(lexer, ctx, None, ExpectedToken::LhsExpression)?;
651 let expr = ast::Expression::AddrOf(expr);
652 let span = this.peek_rule_span(lexer);
653 ctx.expressions.append(expr, span)
654 }
655 (Token::Paren('('), span) => {
656 let expr =
657 this.lhs_expression(lexer, ctx, None, ExpectedToken::LhsExpression)?;
658 lexer.expect(Token::Paren(')'))?;
659 this.component_or_swizzle_specifier(span, lexer, ctx, expr)?
660 }
661 (Token::Word(word), span) => {
662 let ident = this.ident_expr(word, span, ctx);
663 let ident = ast::TemplateElaboratedIdent {
664 ident,
665 ident_span: span,
666 template_list: Vec::new(),
667 template_list_span: Span::UNDEFINED,
668 };
669 let ident = ctx.expressions.append(ast::Expression::Ident(ident), span);
670 this.component_or_swizzle_specifier(span, lexer, ctx, ident)?
671 }
672 (_, span) => {
673 return Err(Box::new(Error::Unexpected(span, expected_token)));
674 }
675 };
676
677 this.pop_rule_span(lexer);
678 Ok(expr)
679 })
680 }
681
682 fn singular_expression<'a>(
684 &mut self,
685 lexer: &mut Lexer<'a>,
686 ctx: &mut ExpressionContext<'a, '_, '_>,
687 token: TokenSpan<'a>,
688 ) -> Result<'a, Handle<ast::Expression<'a>>> {
689 self.push_rule_span(Rule::SingularExpr, lexer);
690 let primary_expr = self.primary_expression(lexer, ctx, token)?;
691 let singular_expr =
692 self.component_or_swizzle_specifier(token.1, lexer, ctx, primary_expr)?;
693 self.pop_rule_span(lexer);
694
695 Ok(singular_expr)
696 }
697
698 fn equality_expression<'a>(
699 &mut self,
700 lexer: &mut Lexer<'a>,
701 context: &mut ExpressionContext<'a, '_, '_>,
702 ) -> Result<'a, Handle<ast::Expression<'a>>> {
703 context.parse_binary_op(
705 lexer,
706 |token| match token {
707 Token::LogicalOperation('=') => Some(crate::BinaryOperator::Equal),
708 Token::LogicalOperation('!') => Some(crate::BinaryOperator::NotEqual),
709 _ => None,
710 },
711 |lexer, context| {
713 let enclosing = self.race_rules(Rule::GenericExpr, Rule::EnclosedExpr);
714 context.parse_binary_op(
715 lexer,
716 match enclosing {
717 Some(Rule::GenericExpr) => |token| match token {
718 Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
719 _ => None,
720 },
721 _ => |token| match token {
722 Token::Paren('<') => Some(crate::BinaryOperator::Less),
723 Token::Paren('>') => Some(crate::BinaryOperator::Greater),
724 Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
725 Token::LogicalOperation('>') => {
726 Some(crate::BinaryOperator::GreaterEqual)
727 }
728 _ => None,
729 },
730 },
731 |lexer, context| {
733 context.parse_binary_op(
734 lexer,
735 match enclosing {
736 Some(Rule::GenericExpr) => |token| match token {
737 Token::ShiftOperation('<') => {
738 Some(crate::BinaryOperator::ShiftLeft)
739 }
740 _ => None,
741 },
742 _ => |token| match token {
743 Token::ShiftOperation('<') => {
744 Some(crate::BinaryOperator::ShiftLeft)
745 }
746 Token::ShiftOperation('>') => {
747 Some(crate::BinaryOperator::ShiftRight)
748 }
749 _ => None,
750 },
751 },
752 |lexer, context| {
754 context.parse_binary_op(
755 lexer,
756 |token| match token {
757 Token::Operation('+') => Some(crate::BinaryOperator::Add),
758 Token::Operation('-') => {
759 Some(crate::BinaryOperator::Subtract)
760 }
761 _ => None,
762 },
763 |lexer, context| {
765 context.parse_binary_op(
766 lexer,
767 |token| match token {
768 Token::Operation('*') => {
769 Some(crate::BinaryOperator::Multiply)
770 }
771 Token::Operation('/') => {
772 Some(crate::BinaryOperator::Divide)
773 }
774 Token::Operation('%') => {
775 Some(crate::BinaryOperator::Modulo)
776 }
777 _ => None,
778 },
779 |lexer, context| self.unary_expression(lexer, context),
780 )
781 },
782 )
783 },
784 )
785 },
786 )
787 },
788 )
789 }
790
791 fn expression<'a>(
792 &mut self,
793 lexer: &mut Lexer<'a>,
794 context: &mut ExpressionContext<'a, '_, '_>,
795 ) -> Result<'a, Handle<ast::Expression<'a>>> {
796 self.track_recursion(|this| {
797 this.push_rule_span(Rule::GeneralExpr, lexer);
798 let handle = context.parse_binary_op(
800 lexer,
801 |token| match token {
802 Token::LogicalOperation('|') => Some(crate::BinaryOperator::LogicalOr),
803 _ => None,
804 },
805 |lexer, context| {
807 context.parse_binary_op(
808 lexer,
809 |token| match token {
810 Token::LogicalOperation('&') => Some(crate::BinaryOperator::LogicalAnd),
811 _ => None,
812 },
813 |lexer, context| {
815 context.parse_binary_op(
816 lexer,
817 |token| match token {
818 Token::Operation('|') => {
819 Some(crate::BinaryOperator::InclusiveOr)
820 }
821 _ => None,
822 },
823 |lexer, context| {
825 context.parse_binary_op(
826 lexer,
827 |token| match token {
828 Token::Operation('^') => {
829 Some(crate::BinaryOperator::ExclusiveOr)
830 }
831 _ => None,
832 },
833 |lexer, context| {
835 context.parse_binary_op(
836 lexer,
837 |token| match token {
838 Token::Operation('&') => {
839 Some(crate::BinaryOperator::And)
840 }
841 _ => None,
842 },
843 |lexer, context| {
844 this.equality_expression(lexer, context)
845 },
846 )
847 },
848 )
849 },
850 )
851 },
852 )
853 },
854 )?;
855 this.pop_rule_span(lexer);
856 Ok(handle)
857 })
858 }
859
860 fn optionally_typed_ident<'a>(
861 &mut self,
862 lexer: &mut Lexer<'a>,
863 ctx: &mut ExpressionContext<'a, '_, '_>,
864 ) -> Result<'a, (ast::Ident<'a>, Option<ast::TemplateElaboratedIdent<'a>>)> {
865 let name = lexer.next_ident()?;
866
867 let ty = if lexer.next_if(Token::Separator(':')) {
868 Some(self.type_specifier(lexer, ctx)?)
869 } else {
870 None
871 };
872
873 Ok((name, ty))
874 }
875
876 fn variable_decl<'a>(
878 &mut self,
879 lexer: &mut Lexer<'a>,
880 ctx: &mut ExpressionContext<'a, '_, '_>,
881 ) -> Result<'a, ast::GlobalVariable<'a>> {
882 self.push_rule_span(Rule::VariableDecl, lexer);
883 let (template_list, _) = self.maybe_template_list(lexer, ctx)?;
884 let (name, ty) = self.optionally_typed_ident(lexer, ctx)?;
885
886 let init = if lexer.next_if(Token::Operation('=')) {
887 let handle = self.expression(lexer, ctx)?;
888 Some(handle)
889 } else {
890 None
891 };
892 lexer.expect(Token::Separator(';'))?;
893 self.pop_rule_span(lexer);
894
895 Ok(ast::GlobalVariable {
896 name,
897 template_list,
898 binding: None,
899 ty,
900 init,
901 doc_comments: Vec::new(),
902 memory_decorations: crate::MemoryDecorations::empty(),
903 })
904 }
905
906 fn struct_body<'a>(
907 &mut self,
908 lexer: &mut Lexer<'a>,
909 ctx: &mut ExpressionContext<'a, '_, '_>,
910 ) -> Result<'a, Vec<ast::StructMember<'a>>> {
911 let mut members = Vec::new();
912 let mut member_names = FastHashSet::default();
913
914 lexer.expect(Token::Paren('{'))?;
915 let mut ready = true;
916 while !lexer.next_if(Token::Paren('}')) {
917 if !ready {
918 return Err(Box::new(Error::Unexpected(
919 lexer.next().1,
920 ExpectedToken::Token(Token::Separator(',')),
921 )));
922 }
923
924 let doc_comments = lexer.accumulate_doc_comments();
925
926 let (mut size, mut align) = (ParsedAttribute::default(), ParsedAttribute::default());
927 self.push_rule_span(Rule::Attribute, lexer);
928 let mut bind_parser = BindingParser::default();
929 while lexer.next_if(Token::Attribute) {
930 match lexer.next_ident_with_span()? {
931 ("size", name_span) => {
932 lexer.expect(Token::Paren('('))?;
933 let expr = self.expression(lexer, ctx)?;
934 lexer.next_if(Token::Separator(','));
935 lexer.expect(Token::Paren(')'))?;
936 size.set(expr, name_span)?;
937 }
938 ("align", name_span) => {
939 lexer.expect(Token::Paren('('))?;
940 let expr = self.expression(lexer, ctx)?;
941 lexer.next_if(Token::Separator(','));
942 lexer.expect(Token::Paren(')'))?;
943 align.set(expr, name_span)?;
944 }
945 (word, word_span) => bind_parser.parse(self, lexer, word, word_span, ctx)?,
946 }
947 }
948
949 let bind_span = self.pop_rule_span(lexer);
950 let binding = bind_parser.finish(bind_span)?;
951
952 let name = lexer.next_ident()?;
953 lexer.expect(Token::Separator(':'))?;
954 let ty = self.type_specifier(lexer, ctx)?;
955 ready = lexer.next_if(Token::Separator(','));
956
957 members.push(ast::StructMember {
958 name,
959 ty,
960 binding,
961 size: size.value,
962 align: align.value,
963 doc_comments,
964 });
965
966 if !member_names.insert(name.name) {
967 return Err(Box::new(Error::Redefinition {
968 previous: members
969 .iter()
970 .find(|x| x.name.name == name.name)
971 .map(|x| x.name.span)
972 .unwrap(),
973 current: name.span,
974 }));
975 }
976 }
977
978 Ok(members)
979 }
980
981 fn maybe_template_list<'a>(
982 &mut self,
983 lexer: &mut Lexer<'a>,
984 ctx: &mut ExpressionContext<'a, '_, '_>,
985 ) -> Result<'a, (Vec<Handle<ast::Expression<'a>>>, Span)> {
986 let start = lexer.start_byte_offset();
987 if lexer.next_if(Token::TemplateArgsStart) {
988 let mut args = Vec::new();
989 args.push(self.expression(lexer, ctx)?);
990 while lexer.next_if(Token::Separator(',')) && lexer.peek().0 != Token::TemplateArgsEnd {
991 args.push(self.expression(lexer, ctx)?);
992 }
993 lexer.expect(Token::TemplateArgsEnd)?;
994 let span = lexer.span_from(start);
995 Ok((args, span))
996 } else {
997 Ok((Vec::new(), Span::UNDEFINED))
998 }
999 }
1000
1001 fn template_elaborated_ident<'a>(
1002 &mut self,
1003 word: &'a str,
1004 span: Span,
1005 lexer: &mut Lexer<'a>,
1006 ctx: &mut ExpressionContext<'a, '_, '_>,
1007 ) -> Result<'a, ast::TemplateElaboratedIdent<'a>> {
1008 let ident = self.ident_expr(word, span, ctx);
1009 let (template_list, template_list_span) = self.maybe_template_list(lexer, ctx)?;
1010 Ok(ast::TemplateElaboratedIdent {
1011 ident,
1012 ident_span: span,
1013 template_list,
1014 template_list_span,
1015 })
1016 }
1017
1018 fn type_specifier<'a>(
1019 &mut self,
1020 lexer: &mut Lexer<'a>,
1021 ctx: &mut ExpressionContext<'a, '_, '_>,
1022 ) -> Result<'a, ast::TemplateElaboratedIdent<'a>> {
1023 let (name, span) = lexer.next_ident_with_span()?;
1024 self.template_elaborated_ident(name, span, lexer, ctx)
1025 }
1026
1027 fn variable_updating_statement<'a>(
1032 &mut self,
1033 lexer: &mut Lexer<'a>,
1034 ctx: &mut ExpressionContext<'a, '_, '_>,
1035 block: &mut ast::Block<'a>,
1036 token: TokenSpan<'a>,
1037 expected_token: ExpectedToken<'a>,
1038 ) -> Result<'a, ()> {
1039 match token {
1040 (Token::Word("_"), span) => {
1041 lexer.expect(Token::Operation('='))?;
1042 let expr = self.expression(lexer, ctx)?;
1043 let span = lexer.span_with_start(span);
1044 block.stmts.push(ast::Statement {
1045 kind: ast::StatementKind::Phony(expr),
1046 span,
1047 });
1048 return Ok(());
1049 }
1050 _ => {}
1051 }
1052 let target = self.lhs_expression(lexer, ctx, Some(token), expected_token)?;
1053
1054 let (op, value) = match lexer.next() {
1055 (Token::Operation('='), _) => {
1056 let value = self.expression(lexer, ctx)?;
1057 (None, value)
1058 }
1059 (Token::AssignmentOperation(c), _) => {
1060 use crate::BinaryOperator as Bo;
1061 let op = match c {
1062 '<' => Bo::ShiftLeft,
1063 '>' => Bo::ShiftRight,
1064 '+' => Bo::Add,
1065 '-' => Bo::Subtract,
1066 '*' => Bo::Multiply,
1067 '/' => Bo::Divide,
1068 '%' => Bo::Modulo,
1069 '&' => Bo::And,
1070 '|' => Bo::InclusiveOr,
1071 '^' => Bo::ExclusiveOr,
1072 _ => unreachable!(),
1074 };
1075
1076 let value = self.expression(lexer, ctx)?;
1077 (Some(op), value)
1078 }
1079 op_token @ (Token::IncrementOperation | Token::DecrementOperation, _) => {
1080 let op = match op_token.0 {
1081 Token::IncrementOperation => ast::StatementKind::Increment,
1082 Token::DecrementOperation => ast::StatementKind::Decrement,
1083 _ => unreachable!(),
1084 };
1085
1086 let span = lexer.span_with_start(token.1);
1087 block.stmts.push(ast::Statement {
1088 kind: op(target),
1089 span,
1090 });
1091 return Ok(());
1092 }
1093 (_, span) => return Err(Box::new(Error::Unexpected(span, ExpectedToken::Assignment))),
1094 };
1095
1096 let span = lexer.span_with_start(token.1);
1097 block.stmts.push(ast::Statement {
1098 kind: ast::StatementKind::Assign { target, op, value },
1099 span,
1100 });
1101 Ok(())
1102 }
1103
1104 fn maybe_func_call_statement<'a>(
1111 &mut self,
1112 lexer: &mut Lexer<'a>,
1113 context: &mut ExpressionContext<'a, '_, '_>,
1114 block: &mut ast::Block<'a>,
1115 token: TokenSpan<'a>,
1116 ) -> Result<'a, bool> {
1117 let (name, name_span) = match token {
1118 (Token::Word(name), span) => (name, span),
1119 _ => return Ok(false),
1120 };
1121 let ident = self.template_elaborated_ident(name, name_span, lexer, context)?;
1122 if ident.template_list.is_empty() && !matches!(lexer.peek(), (Token::Paren('('), _)) {
1123 return Ok(false);
1124 }
1125
1126 self.push_rule_span(Rule::SingularExpr, lexer);
1127
1128 let arguments = self.arguments(lexer, context)?;
1129 let span = lexer.span_with_start(name_span);
1130
1131 block.stmts.push(ast::Statement {
1132 kind: ast::StatementKind::Call(ast::CallPhrase {
1133 function: ident,
1134 arguments,
1135 }),
1136 span,
1137 });
1138
1139 self.pop_rule_span(lexer);
1140
1141 Ok(true)
1142 }
1143
1144 fn func_call_or_variable_updating_statement<'a>(
1149 &mut self,
1150 lexer: &mut Lexer<'a>,
1151 context: &mut ExpressionContext<'a, '_, '_>,
1152 block: &mut ast::Block<'a>,
1153 token: TokenSpan<'a>,
1154 expected_token: ExpectedToken<'a>,
1155 ) -> Result<'a, ()> {
1156 if !self.maybe_func_call_statement(lexer, context, block, token)? {
1157 self.variable_updating_statement(lexer, context, block, token, expected_token)?;
1158 }
1159 Ok(())
1160 }
1161
1162 fn variable_or_value_or_func_call_or_variable_updating_statement<'a>(
1170 &mut self,
1171 lexer: &mut Lexer<'a>,
1172 ctx: &mut ExpressionContext<'a, '_, '_>,
1173 block: &mut ast::Block<'a>,
1174 token: TokenSpan<'a>,
1175 expected_token: ExpectedToken<'a>,
1176 ) -> Result<'a, ()> {
1177 let local_decl = match token {
1178 (Token::Word("let"), _) => {
1179 let (name, given_ty) = self.optionally_typed_ident(lexer, ctx)?;
1180
1181 lexer.expect(Token::Operation('='))?;
1182 let expr_id = self.expression(lexer, ctx)?;
1183
1184 let handle = ctx.declare_local(name)?;
1185 ast::LocalDecl::Let(ast::Let {
1186 name,
1187 ty: given_ty,
1188 init: expr_id,
1189 handle,
1190 })
1191 }
1192 (Token::Word("const"), _) => {
1193 let (name, given_ty) = self.optionally_typed_ident(lexer, ctx)?;
1194
1195 lexer.expect(Token::Operation('='))?;
1196 let expr_id = self.expression(lexer, ctx)?;
1197
1198 let handle = ctx.declare_local(name)?;
1199 ast::LocalDecl::Const(ast::LocalConst {
1200 name,
1201 ty: given_ty,
1202 init: expr_id,
1203 handle,
1204 })
1205 }
1206 (Token::Word("var"), _) => {
1207 if lexer.next_if(Token::TemplateArgsStart) {
1208 let (class_str, span) = lexer.next_ident_with_span()?;
1209 if class_str != "function" {
1210 return Err(Box::new(Error::InvalidLocalVariableAddressSpace(span)));
1211 }
1212 lexer.expect(Token::TemplateArgsEnd)?;
1213 }
1214
1215 let (name, ty) = self.optionally_typed_ident(lexer, ctx)?;
1216
1217 let init = if lexer.next_if(Token::Operation('=')) {
1218 let init = self.expression(lexer, ctx)?;
1219 Some(init)
1220 } else {
1221 None
1222 };
1223
1224 let handle = ctx.declare_local(name)?;
1225 ast::LocalDecl::Var(ast::LocalVariable {
1226 name,
1227 ty,
1228 init,
1229 handle,
1230 })
1231 }
1232 token => {
1233 return self.func_call_or_variable_updating_statement(
1234 lexer,
1235 ctx,
1236 block,
1237 token,
1238 expected_token,
1239 );
1240 }
1241 };
1242
1243 let span = lexer.span_with_start(token.1);
1244 block.stmts.push(ast::Statement {
1245 kind: ast::StatementKind::LocalDecl(local_decl),
1246 span,
1247 });
1248
1249 Ok(())
1250 }
1251
1252 fn statement<'a>(
1253 &mut self,
1254 lexer: &mut Lexer<'a>,
1255 ctx: &mut ExpressionContext<'a, '_, '_>,
1256 block: &mut ast::Block<'a>,
1257 brace_nesting_level: u8,
1258 ) -> Result<'a, ()> {
1259 self.track_recursion(|this| {
1260 this.push_rule_span(Rule::Statement, lexer);
1261
1262 match lexer.peek() {
1268 (token, _) if is_start_of_compound_statement(token) => {
1269 let (inner, span) = this.block(lexer, ctx, brace_nesting_level)?;
1270 block.stmts.push(ast::Statement {
1271 kind: ast::StatementKind::Block(inner),
1272 span,
1273 });
1274 this.pop_rule_span(lexer);
1275 return Ok(());
1276 }
1277 _ => {}
1278 }
1279
1280 let kind = match lexer.next() {
1281 (Token::Separator(';'), _) => {
1282 this.pop_rule_span(lexer);
1283 return Ok(());
1284 }
1285 (Token::Word("return"), _) => {
1286 let value = if lexer.peek().0 != Token::Separator(';') {
1287 let handle = this.expression(lexer, ctx)?;
1288 Some(handle)
1289 } else {
1290 None
1291 };
1292 lexer.expect(Token::Separator(';'))?;
1293 ast::StatementKind::Return { value }
1294 }
1295 (Token::Word("if"), _) => {
1296 let condition = this.expression(lexer, ctx)?;
1297
1298 let accept = this.block(lexer, ctx, brace_nesting_level)?.0;
1299
1300 let mut elsif_stack = Vec::new();
1301 let mut elseif_span_start = lexer.start_byte_offset();
1302 let mut reject = loop {
1303 if !lexer.next_if(Token::Word("else")) {
1304 break ast::Block::default();
1305 }
1306
1307 if !lexer.next_if(Token::Word("if")) {
1308 break this.block(lexer, ctx, brace_nesting_level)?.0;
1310 }
1311
1312 let other_condition = this.expression(lexer, ctx)?;
1314 let other_block = this.block(lexer, ctx, brace_nesting_level)?;
1315 elsif_stack.push((elseif_span_start, other_condition, other_block));
1316 elseif_span_start = lexer.start_byte_offset();
1317 };
1318
1319 for (other_span_start, other_cond, other_block) in elsif_stack.into_iter().rev()
1322 {
1323 let sub_stmt = ast::StatementKind::If {
1324 condition: other_cond,
1325 accept: other_block.0,
1326 reject,
1327 };
1328 reject = ast::Block::default();
1329 let span = lexer.span_from(other_span_start);
1330 reject.stmts.push(ast::Statement {
1331 kind: sub_stmt,
1332 span,
1333 })
1334 }
1335
1336 ast::StatementKind::If {
1337 condition,
1338 accept,
1339 reject,
1340 }
1341 }
1342 (Token::Word("switch"), _) => {
1343 let selector = this.expression(lexer, ctx)?;
1344 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1345 let brace_nesting_level =
1346 Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1347 let mut cases = Vec::new();
1348
1349 loop {
1350 match lexer.next() {
1352 (Token::Word("case"), _) => {
1353 let value = loop {
1355 let value = this.switch_value(lexer, ctx)?;
1356 if lexer.next_if(Token::Separator(',')) {
1357 let next_token = lexer.peek().0;
1359 if next_token == Token::Separator(':')
1360 || is_start_of_compound_statement(next_token)
1361 {
1362 break value;
1363 }
1364 } else {
1365 break value;
1366 }
1367 cases.push(ast::SwitchCase {
1368 value,
1369 body: ast::Block::default(),
1370 fall_through: true,
1371 });
1372 };
1373
1374 lexer.next_if(Token::Separator(':'));
1375
1376 let body = this.block(lexer, ctx, brace_nesting_level)?.0;
1377
1378 cases.push(ast::SwitchCase {
1379 value,
1380 body,
1381 fall_through: false,
1382 });
1383 }
1384 (Token::Word("default"), _) => {
1385 lexer.next_if(Token::Separator(':'));
1386 let body = this.block(lexer, ctx, brace_nesting_level)?.0;
1387 cases.push(ast::SwitchCase {
1388 value: ast::SwitchValue::Default,
1389 body,
1390 fall_through: false,
1391 });
1392 }
1393 (Token::Paren('}'), _) => break,
1394 (_, span) => {
1395 return Err(Box::new(Error::Unexpected(
1396 span,
1397 ExpectedToken::SwitchItem,
1398 )))
1399 }
1400 }
1401 }
1402
1403 ast::StatementKind::Switch { selector, cases }
1404 }
1405 (Token::Word("loop"), _) => this.r#loop(lexer, ctx, brace_nesting_level)?,
1406 (Token::Word("while"), _) => {
1407 let mut body = ast::Block::default();
1408
1409 let (condition, span) =
1410 lexer.capture_span(|lexer| this.expression(lexer, ctx))?;
1411 let mut reject = ast::Block::default();
1412 reject.stmts.push(ast::Statement {
1413 kind: ast::StatementKind::Break,
1414 span,
1415 });
1416
1417 body.stmts.push(ast::Statement {
1418 kind: ast::StatementKind::If {
1419 condition,
1420 accept: ast::Block::default(),
1421 reject,
1422 },
1423 span,
1424 });
1425
1426 let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
1427 body.stmts.push(ast::Statement {
1428 kind: ast::StatementKind::Block(block),
1429 span,
1430 });
1431
1432 ast::StatementKind::Loop {
1433 body,
1434 continuing: ast::Block::default(),
1435 break_if: None,
1436 }
1437 }
1438 (Token::Word("for"), _) => {
1439 lexer.expect(Token::Paren('('))?;
1440
1441 ctx.local_table.push_scope();
1442
1443 if !lexer.next_if(Token::Separator(';')) {
1444 let token = lexer.next();
1445 this.variable_or_value_or_func_call_or_variable_updating_statement(
1446 lexer,
1447 ctx,
1448 block,
1449 token,
1450 ExpectedToken::ForInit,
1451 )?;
1452 lexer.expect(Token::Separator(';'))?;
1453 };
1454
1455 let mut body = ast::Block::default();
1456 if !lexer.next_if(Token::Separator(';')) {
1457 let (condition, span) = lexer.capture_span(|lexer| -> Result<'_, _> {
1458 let condition = this.expression(lexer, ctx)?;
1459 lexer.expect(Token::Separator(';'))?;
1460 Ok(condition)
1461 })?;
1462 let mut reject = ast::Block::default();
1463 reject.stmts.push(ast::Statement {
1464 kind: ast::StatementKind::Break,
1465 span,
1466 });
1467 body.stmts.push(ast::Statement {
1468 kind: ast::StatementKind::If {
1469 condition,
1470 accept: ast::Block::default(),
1471 reject,
1472 },
1473 span,
1474 });
1475 };
1476
1477 let mut continuing = ast::Block::default();
1478 if !lexer.next_if(Token::Paren(')')) {
1479 let token = lexer.next();
1480 this.func_call_or_variable_updating_statement(
1481 lexer,
1482 ctx,
1483 &mut continuing,
1484 token,
1485 ExpectedToken::ForUpdate,
1486 )?;
1487 lexer.expect(Token::Paren(')'))?;
1488 }
1489
1490 let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
1491 body.stmts.push(ast::Statement {
1492 kind: ast::StatementKind::Block(block),
1493 span,
1494 });
1495
1496 ctx.local_table.pop_scope();
1497
1498 ast::StatementKind::Loop {
1499 body,
1500 continuing,
1501 break_if: None,
1502 }
1503 }
1504 (Token::Word("break"), span) => {
1505 let (peeked_token, peeked_span) = lexer.peek();
1509 if let Token::Word("if") = peeked_token {
1510 let span = span.until(&peeked_span);
1511 return Err(Box::new(Error::InvalidBreakIf(span)));
1512 }
1513 lexer.expect(Token::Separator(';'))?;
1514 ast::StatementKind::Break
1515 }
1516 (Token::Word("continue"), _) => {
1517 lexer.expect(Token::Separator(';'))?;
1518 ast::StatementKind::Continue
1519 }
1520 (Token::Word("discard"), _) => {
1521 lexer.expect(Token::Separator(';'))?;
1522 ast::StatementKind::Kill
1523 }
1524 (Token::Word("const_assert"), _) => {
1526 let paren = lexer.next_if(Token::Paren('('));
1528
1529 let condition = this.expression(lexer, ctx)?;
1530
1531 if paren {
1532 lexer.expect(Token::Paren(')'))?;
1533 }
1534 lexer.expect(Token::Separator(';'))?;
1535 ast::StatementKind::ConstAssert(condition)
1536 }
1537 token => {
1538 this.variable_or_value_or_func_call_or_variable_updating_statement(
1539 lexer,
1540 ctx,
1541 block,
1542 token,
1543 ExpectedToken::Statement,
1544 )?;
1545 lexer.expect(Token::Separator(';'))?;
1546 this.pop_rule_span(lexer);
1547 return Ok(());
1548 }
1549 };
1550
1551 let span = this.pop_rule_span(lexer);
1552 block.stmts.push(ast::Statement { kind, span });
1553
1554 Ok(())
1555 })
1556 }
1557
1558 fn r#loop<'a>(
1559 &mut self,
1560 lexer: &mut Lexer<'a>,
1561 ctx: &mut ExpressionContext<'a, '_, '_>,
1562 brace_nesting_level: u8,
1563 ) -> Result<'a, ast::StatementKind<'a>> {
1564 let mut body = ast::Block::default();
1565 let mut continuing = ast::Block::default();
1566 let mut break_if = None;
1567
1568 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1569 let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1570
1571 ctx.local_table.push_scope();
1572
1573 loop {
1574 if lexer.next_if(Token::Word("continuing")) {
1575 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1580 let brace_nesting_level =
1581 Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1582 loop {
1583 if lexer.next_if(Token::Word("break")) {
1584 lexer.expect(Token::Word("if"))?;
1591
1592 let condition = self.expression(lexer, ctx)?;
1593 break_if = Some(condition);
1596
1597 lexer.expect(Token::Separator(';'))?;
1599 lexer.expect(Token::Paren('}'))?;
1602 break;
1604 } else if lexer.next_if(Token::Paren('}')) {
1605 break;
1608 } else {
1609 self.statement(lexer, ctx, &mut continuing, brace_nesting_level)?;
1611 }
1612 }
1613 lexer.expect(Token::Paren('}'))?;
1616 break;
1617 }
1618 if lexer.next_if(Token::Paren('}')) {
1619 break;
1622 }
1623 self.statement(lexer, ctx, &mut body, brace_nesting_level)?;
1625 }
1626
1627 ctx.local_table.pop_scope();
1628
1629 Ok(ast::StatementKind::Loop {
1630 body,
1631 continuing,
1632 break_if,
1633 })
1634 }
1635
1636 fn block<'a>(
1638 &mut self,
1639 lexer: &mut Lexer<'a>,
1640 ctx: &mut ExpressionContext<'a, '_, '_>,
1641 brace_nesting_level: u8,
1642 ) -> Result<'a, (ast::Block<'a>, Span)> {
1643 self.push_rule_span(Rule::Block, lexer);
1644
1645 ctx.local_table.push_scope();
1646
1647 let mut diagnostic_filters = DiagnosticFilterMap::new();
1648
1649 self.push_rule_span(Rule::Attribute, lexer);
1650 while lexer.next_if(Token::Attribute) {
1651 let (name, name_span) = lexer.next_ident_with_span()?;
1652 if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
1653 let filter = self.diagnostic_filter(lexer)?;
1654 let span = self.peek_rule_span(lexer);
1655 diagnostic_filters
1656 .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
1657 .map_err(|e| Box::new(e.into()))?;
1658 } else {
1659 return Err(Box::new(Error::Unexpected(
1660 name_span,
1661 ExpectedToken::DiagnosticAttribute,
1662 )));
1663 }
1664 }
1665 self.pop_rule_span(lexer);
1666
1667 if !diagnostic_filters.is_empty() {
1668 return Err(Box::new(
1669 Error::DiagnosticAttributeNotYetImplementedAtParseSite {
1670 site_name_plural: "compound statements",
1671 spans: diagnostic_filters.spans().collect(),
1672 },
1673 ));
1674 }
1675
1676 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1677 let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1678 let mut block = ast::Block::default();
1679 while !lexer.next_if(Token::Paren('}')) {
1680 self.statement(lexer, ctx, &mut block, brace_nesting_level)?;
1681 }
1682
1683 ctx.local_table.pop_scope();
1684
1685 let span = self.pop_rule_span(lexer);
1686 Ok((block, span))
1687 }
1688
1689 fn varying_binding<'a>(
1690 &mut self,
1691 lexer: &mut Lexer<'a>,
1692 ctx: &mut ExpressionContext<'a, '_, '_>,
1693 ) -> Result<'a, Option<ast::Binding<'a>>> {
1694 let mut bind_parser = BindingParser::default();
1695 self.push_rule_span(Rule::Attribute, lexer);
1696
1697 while lexer.next_if(Token::Attribute) {
1698 let (word, span) = lexer.next_ident_with_span()?;
1699 bind_parser.parse(self, lexer, word, span, ctx)?;
1700 }
1701
1702 let span = self.pop_rule_span(lexer);
1703 bind_parser.finish(span)
1704 }
1705
1706 fn function_decl<'a>(
1707 &mut self,
1708 lexer: &mut Lexer<'a>,
1709 diagnostic_filter_leaf: Option<Handle<DiagnosticFilterNode>>,
1710 must_use: Option<Span>,
1711 out: &mut ast::TranslationUnit<'a>,
1712 dependencies: &mut FastIndexSet<ast::Dependency<'a>>,
1713 ) -> Result<'a, ast::Function<'a>> {
1714 self.push_rule_span(Rule::FunctionDecl, lexer);
1715 let fun_name = lexer.next_ident()?;
1717
1718 let mut locals = Arena::new();
1719
1720 let mut ctx = ExpressionContext {
1721 expressions: &mut out.expressions,
1722 local_table: &mut SymbolTable::default(),
1723 locals: &mut locals,
1724 unresolved: dependencies,
1725 };
1726
1727 ctx.local_table.push_scope();
1729 ctx.local_table.reduce_lookup_scope();
1732
1733 let mut arguments = Vec::new();
1735 lexer.expect(Token::Paren('('))?;
1736 let mut ready = true;
1737 while !lexer.next_if(Token::Paren(')')) {
1738 if !ready {
1739 return Err(Box::new(Error::Unexpected(
1740 lexer.next().1,
1741 ExpectedToken::Token(Token::Separator(',')),
1742 )));
1743 }
1744 let binding = self.varying_binding(lexer, &mut ctx)?;
1745
1746 let param_name = lexer.next_ident()?;
1747
1748 lexer.expect(Token::Separator(':'))?;
1749 let param_type = self.type_specifier(lexer, &mut ctx)?;
1750
1751 let handle = ctx.declare_local(param_name)?;
1752 arguments.push(ast::FunctionArgument {
1753 name: param_name,
1754 ty: param_type,
1755 binding,
1756 handle,
1757 });
1758 ready = lexer.next_if(Token::Separator(','));
1759 }
1760 let result = if lexer.next_if(Token::Arrow) {
1762 let binding = self.varying_binding(lexer, &mut ctx)?;
1763 let ty = self.type_specifier(lexer, &mut ctx)?;
1764 let must_use = must_use.is_some();
1765 Some(ast::FunctionResult {
1766 ty,
1767 binding,
1768 must_use,
1769 })
1770 } else if let Some(must_use) = must_use {
1771 return Err(Box::new(Error::FunctionMustUseReturnsVoid(
1772 must_use,
1773 self.peek_rule_span(lexer),
1774 )));
1775 } else {
1776 None
1777 };
1778
1779 ctx.local_table.reset_lookup_scope();
1780
1781 lexer.expect(Token::Paren('{'))?;
1783 let brace_nesting_level = 1;
1784 let mut body = ast::Block::default();
1785 while !lexer.next_if(Token::Paren('}')) {
1786 self.statement(lexer, &mut ctx, &mut body, brace_nesting_level)?;
1787 }
1788
1789 ctx.local_table.pop_scope();
1790
1791 let fun = ast::Function {
1792 entry_point: None,
1793 name: fun_name,
1794 arguments,
1795 result,
1796 body,
1797 diagnostic_filter_leaf,
1798 doc_comments: Vec::new(),
1799 };
1800
1801 self.pop_rule_span(lexer);
1803
1804 Ok(fun)
1805 }
1806
1807 fn directive_ident_list<'a>(
1808 &self,
1809 lexer: &mut Lexer<'a>,
1810 handler: impl FnMut(&'a str, Span) -> Result<'a, ()>,
1811 ) -> Result<'a, ()> {
1812 let mut handler = handler;
1813 'next_arg: loop {
1814 let (ident, span) = lexer.next_ident_with_span()?;
1815 handler(ident, span)?;
1816
1817 let expected_token = match lexer.peek().0 {
1818 Token::Separator(',') => {
1819 let _ = lexer.next();
1820 if matches!(lexer.peek().0, Token::Word(..)) {
1821 continue 'next_arg;
1822 }
1823 ExpectedToken::AfterIdentListComma
1824 }
1825 _ => ExpectedToken::AfterIdentListArg,
1826 };
1827
1828 if !matches!(lexer.next().0, Token::Separator(';')) {
1829 return Err(Box::new(Error::Unexpected(span, expected_token)));
1830 }
1831
1832 break Ok(());
1833 }
1834 }
1835
1836 fn global_decl<'a>(
1837 &mut self,
1838 lexer: &mut Lexer<'a>,
1839 out: &mut ast::TranslationUnit<'a>,
1840 ) -> Result<'a, ()> {
1841 let doc_comments = lexer.accumulate_doc_comments();
1842
1843 let mut binding = None;
1845 let mut stage = ParsedAttribute::default();
1846 let mut shader_stage_error_span = Span::new(0, 0);
1849 let mut workgroup_size = ParsedAttribute::default();
1850 let mut early_depth_test = ParsedAttribute::default();
1851 let (mut bind_index, mut bind_group) =
1852 (ParsedAttribute::default(), ParsedAttribute::default());
1853 let mut id = ParsedAttribute::default();
1854 let mut payload = ParsedAttribute::default();
1856 let mut incoming_payload = ParsedAttribute::default();
1858 let mut mesh_output = ParsedAttribute::default();
1859
1860 let mut must_use: ParsedAttribute<Span> = ParsedAttribute::default();
1861 let mut memory_decorations = crate::MemoryDecorations::empty();
1862
1863 let mut dependencies = FastIndexSet::default();
1864 let mut ctx = ExpressionContext {
1865 expressions: &mut out.expressions,
1866 local_table: &mut SymbolTable::default(),
1867 locals: &mut Arena::new(),
1868 unresolved: &mut dependencies,
1869 };
1870 let mut diagnostic_filters = DiagnosticFilterMap::new();
1871 let ensure_no_diag_attrs = |on_what, filters: DiagnosticFilterMap| -> Result<()> {
1872 if filters.is_empty() {
1873 Ok(())
1874 } else {
1875 Err(Box::new(Error::DiagnosticAttributeNotSupported {
1876 on_what,
1877 spans: filters.spans().collect(),
1878 }))
1879 }
1880 };
1881
1882 self.push_rule_span(Rule::Attribute, lexer);
1883 while lexer.next_if(Token::Attribute) {
1884 let (name, name_span) = lexer.next_ident_with_span()?;
1885 if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
1886 let filter = self.diagnostic_filter(lexer)?;
1887 let span = self.peek_rule_span(lexer);
1888 diagnostic_filters
1889 .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
1890 .map_err(|e| Box::new(e.into()))?;
1891 continue;
1892 }
1893 match name {
1894 "binding" => {
1895 lexer.expect(Token::Paren('('))?;
1896 bind_index.set(self.expression(lexer, &mut ctx)?, name_span)?;
1897 lexer.next_if(Token::Separator(','));
1898 lexer.expect(Token::Paren(')'))?;
1899 }
1900 "group" => {
1901 lexer.expect(Token::Paren('('))?;
1902 bind_group.set(self.expression(lexer, &mut ctx)?, name_span)?;
1903 lexer.next_if(Token::Separator(','));
1904 lexer.expect(Token::Paren(')'))?;
1905 }
1906 "id" => {
1907 lexer.expect(Token::Paren('('))?;
1908 id.set(self.expression(lexer, &mut ctx)?, name_span)?;
1909 lexer.next_if(Token::Separator(','));
1910 lexer.expect(Token::Paren(')'))?;
1911 }
1912 "vertex" => {
1913 stage.set(ShaderStage::Vertex, name_span)?;
1914 }
1915 "fragment" => {
1916 stage.set(ShaderStage::Fragment, name_span)?;
1917 }
1918 "compute" => {
1919 stage.set(ShaderStage::Compute, name_span)?;
1920 shader_stage_error_span = name_span;
1921 }
1922 "task" => {
1923 lexer.require_enable_extension(
1924 ImplementedEnableExtension::WgpuMeshShader,
1925 name_span,
1926 )?;
1927 stage.set(ShaderStage::Task, name_span)?;
1928 shader_stage_error_span = name_span;
1929 }
1930 "mesh" => {
1931 lexer.require_enable_extension(
1932 ImplementedEnableExtension::WgpuMeshShader,
1933 name_span,
1934 )?;
1935 stage.set(ShaderStage::Mesh, name_span)?;
1936 shader_stage_error_span = name_span;
1937
1938 lexer.expect(Token::Paren('('))?;
1939 mesh_output.set(lexer.next_ident_with_span()?, name_span)?;
1940 lexer.expect(Token::Paren(')'))?;
1941 }
1942 "ray_generation" => {
1943 lexer.require_enable_extension(
1944 ImplementedEnableExtension::WgpuRayTracingPipeline,
1945 name_span,
1946 )?;
1947 stage.set(ShaderStage::RayGeneration, name_span)?;
1948 shader_stage_error_span = name_span;
1949 }
1950 "any_hit" => {
1951 lexer.require_enable_extension(
1952 ImplementedEnableExtension::WgpuRayTracingPipeline,
1953 name_span,
1954 )?;
1955 stage.set(ShaderStage::AnyHit, name_span)?;
1956 shader_stage_error_span = name_span;
1957 }
1958 "closest_hit" => {
1959 lexer.require_enable_extension(
1960 ImplementedEnableExtension::WgpuRayTracingPipeline,
1961 name_span,
1962 )?;
1963 stage.set(ShaderStage::ClosestHit, name_span)?;
1964 shader_stage_error_span = name_span;
1965 }
1966 "miss" => {
1967 lexer.require_enable_extension(
1968 ImplementedEnableExtension::WgpuRayTracingPipeline,
1969 name_span,
1970 )?;
1971 stage.set(ShaderStage::Miss, name_span)?;
1972 shader_stage_error_span = name_span;
1973 }
1974 "payload" => {
1975 lexer.require_enable_extension(
1976 ImplementedEnableExtension::WgpuMeshShader,
1977 name_span,
1978 )?;
1979 lexer.expect(Token::Paren('('))?;
1980 payload.set(lexer.next_ident_with_span()?, name_span)?;
1981 lexer.expect(Token::Paren(')'))?;
1982 }
1983 "incoming_payload" => {
1984 lexer.require_enable_extension(
1985 ImplementedEnableExtension::WgpuRayTracingPipeline,
1986 name_span,
1987 )?;
1988 lexer.expect(Token::Paren('('))?;
1989 incoming_payload.set(lexer.next_ident_with_span()?, name_span)?;
1990 lexer.expect(Token::Paren(')'))?;
1991 }
1992 "workgroup_size" => {
1993 lexer.expect(Token::Paren('('))?;
1994 let mut new_workgroup_size = [None; 3];
1995 for size in new_workgroup_size.iter_mut() {
1996 *size = Some(self.expression(lexer, &mut ctx)?);
1997 match lexer.next() {
1998 (Token::Paren(')'), _) => break,
1999 (Token::Separator(','), _) => {
2000 if lexer.next_if(Token::Paren(')')) {
2001 break;
2002 }
2003 }
2004 other => {
2005 return Err(Box::new(Error::Unexpected(
2006 other.1,
2007 ExpectedToken::WorkgroupSizeSeparator,
2008 )))
2009 }
2010 }
2011 }
2012 workgroup_size.set(new_workgroup_size, name_span)?;
2013 }
2014 "early_depth_test" => {
2015 lexer.expect(Token::Paren('('))?;
2016 let (ident, ident_span) = lexer.next_ident_with_span()?;
2017 let value = if ident == "force" {
2018 crate::EarlyDepthTest::Force
2019 } else {
2020 crate::EarlyDepthTest::Allow {
2021 conservative: conv::map_conservative_depth(ident, ident_span)?,
2022 }
2023 };
2024 lexer.expect(Token::Paren(')'))?;
2025 early_depth_test.set(value, name_span)?;
2026 }
2027 "must_use" => {
2028 must_use.set(name_span, name_span)?;
2029 }
2030 "coherent" => {
2031 memory_decorations |= crate::MemoryDecorations::COHERENT;
2032 }
2033 "volatile" => {
2034 memory_decorations |= crate::MemoryDecorations::VOLATILE;
2035 }
2036 _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
2037 }
2038 }
2039
2040 let attrib_span = self.pop_rule_span(lexer);
2041 match (bind_group.value, bind_index.value) {
2042 (Some(group), Some(index)) => {
2043 binding = Some(ast::ResourceBinding {
2044 group,
2045 binding: index,
2046 });
2047 }
2048 (Some(_), None) => {
2049 return Err(Box::new(Error::MissingAttribute("binding", attrib_span)))
2050 }
2051 (None, Some(_)) => return Err(Box::new(Error::MissingAttribute("group", attrib_span))),
2052 (None, None) => {}
2053 }
2054
2055 let start = lexer.start_byte_offset();
2057 let kind = match lexer.next() {
2058 (Token::Separator(';'), _) => {
2059 ensure_no_diag_attrs(
2060 DiagnosticAttributeNotSupportedPosition::SemicolonInModulePosition,
2061 diagnostic_filters,
2062 )?;
2063 None
2064 }
2065 (Token::Word(word), directive_span) if DirectiveKind::from_ident(word).is_some() => {
2066 return Err(Box::new(Error::DirectiveAfterFirstGlobalDecl {
2067 directive_span,
2068 }));
2069 }
2070 (Token::Word("struct"), _) => {
2071 ensure_no_diag_attrs("`struct`s".into(), diagnostic_filters)?;
2072
2073 let name = lexer.next_ident()?;
2074
2075 let members = self.struct_body(lexer, &mut ctx)?;
2076
2077 Some(ast::GlobalDeclKind::Struct(ast::Struct {
2078 name,
2079 members,
2080 doc_comments,
2081 }))
2082 }
2083 (Token::Word("alias"), _) => {
2084 ensure_no_diag_attrs("`alias`es".into(), diagnostic_filters)?;
2085
2086 let name = lexer.next_ident()?;
2087
2088 lexer.expect(Token::Operation('='))?;
2089 let ty = self.type_specifier(lexer, &mut ctx)?;
2090 lexer.expect(Token::Separator(';'))?;
2091 Some(ast::GlobalDeclKind::Type(ast::TypeAlias { name, ty }))
2092 }
2093 (Token::Word("const"), _) => {
2094 ensure_no_diag_attrs("`const`s".into(), diagnostic_filters)?;
2095
2096 let (name, ty) = self.optionally_typed_ident(lexer, &mut ctx)?;
2097
2098 lexer.expect(Token::Operation('='))?;
2099 let init = self.expression(lexer, &mut ctx)?;
2100 lexer.expect(Token::Separator(';'))?;
2101
2102 Some(ast::GlobalDeclKind::Const(ast::Const {
2103 name,
2104 ty,
2105 init,
2106 doc_comments,
2107 }))
2108 }
2109 (Token::Word("override"), _) => {
2110 ensure_no_diag_attrs("`override`s".into(), diagnostic_filters)?;
2111
2112 let (name, ty) = self.optionally_typed_ident(lexer, &mut ctx)?;
2113
2114 let init = if lexer.next_if(Token::Operation('=')) {
2115 Some(self.expression(lexer, &mut ctx)?)
2116 } else {
2117 None
2118 };
2119
2120 lexer.expect(Token::Separator(';'))?;
2121
2122 Some(ast::GlobalDeclKind::Override(ast::Override {
2123 name,
2124 id: id.value,
2125 ty,
2126 init,
2127 }))
2128 }
2129 (Token::Word("var"), _) => {
2130 ensure_no_diag_attrs("`var`s".into(), diagnostic_filters)?;
2131
2132 let mut var = self.variable_decl(lexer, &mut ctx)?;
2133 var.binding = binding.take();
2134 var.doc_comments = doc_comments;
2135 var.memory_decorations = memory_decorations;
2136 Some(ast::GlobalDeclKind::Var(var))
2137 }
2138 (Token::Word("fn"), _) => {
2139 let diagnostic_filter_leaf = Self::write_diagnostic_filters(
2140 &mut out.diagnostic_filters,
2141 diagnostic_filters,
2142 out.diagnostic_filter_leaf,
2143 );
2144
2145 let function = self.function_decl(
2146 lexer,
2147 diagnostic_filter_leaf,
2148 must_use.value,
2149 out,
2150 &mut dependencies,
2151 )?;
2152 Some(ast::GlobalDeclKind::Fn(ast::Function {
2153 entry_point: if let Some(stage) = stage.value {
2154 if stage.compute_like() && workgroup_size.value.is_none() {
2155 return Err(Box::new(Error::MissingWorkgroupSize(
2156 shader_stage_error_span,
2157 )));
2158 }
2159
2160 match stage {
2161 ShaderStage::AnyHit | ShaderStage::ClosestHit | ShaderStage::Miss => {
2162 if incoming_payload.value.is_none() {
2163 return Err(Box::new(Error::MissingIncomingPayload(
2164 shader_stage_error_span,
2165 )));
2166 }
2167 }
2168 _ => {}
2169 }
2170
2171 Some(ast::EntryPoint {
2172 stage,
2173 early_depth_test: early_depth_test.value,
2174 workgroup_size: workgroup_size.value,
2175 mesh_output_variable: mesh_output.value,
2176 task_payload: payload.value,
2177 ray_incoming_payload: incoming_payload.value,
2178 })
2179 } else {
2180 None
2181 },
2182 doc_comments,
2183 ..function
2184 }))
2185 }
2186 (Token::Word("const_assert"), _) => {
2187 ensure_no_diag_attrs("`const_assert`s".into(), diagnostic_filters)?;
2188
2189 let paren = lexer.next_if(Token::Paren('('));
2191
2192 let condition = self.expression(lexer, &mut ctx)?;
2193
2194 if paren {
2195 lexer.expect(Token::Paren(')'))?;
2196 }
2197 lexer.expect(Token::Separator(';'))?;
2198 Some(ast::GlobalDeclKind::ConstAssert(condition))
2199 }
2200 (Token::End, _) => return Ok(()),
2201 (Token::UnterminatedBlockComment(_), span) => {
2202 return Err(Box::new(Error::UnterminatedBlockComment(span)))
2203 }
2204 other => {
2205 return Err(Box::new(Error::Unexpected(
2206 other.1,
2207 ExpectedToken::GlobalItem,
2208 )))
2209 }
2210 };
2211
2212 if let Some(must_use_span) = must_use.value {
2213 if !matches!(kind.as_ref(), Some(ast::GlobalDeclKind::Fn(_))) {
2214 return Err(Box::new(Error::FunctionMustUseOnNonFunction(must_use_span)));
2215 }
2216 }
2217
2218 if let Some(kind) = kind {
2219 out.decls.append(
2220 ast::GlobalDecl { kind, dependencies },
2221 lexer.span_from(start),
2222 );
2223 }
2224
2225 if !self.rules.is_empty() {
2226 log::error!("Reached the end of global decl, but rule stack is not empty");
2227 log::error!("Rules: {:?}", self.rules);
2228 return Err(Box::new(Error::Internal("rule stack is not empty")));
2229 };
2230
2231 match binding {
2232 None => Ok(()),
2233 Some(_) => Err(Box::new(Error::Internal(
2234 "we had the attribute but no var?",
2235 ))),
2236 }
2237 }
2238
2239 pub fn parse<'a>(
2240 &mut self,
2241 source: &'a str,
2242 options: &Options,
2243 ) -> Result<'a, ast::TranslationUnit<'a>> {
2244 self.reset();
2245
2246 let mut lexer = Lexer::new(source, !options.parse_doc_comments);
2247 let mut tu = ast::TranslationUnit::default();
2248 let mut enable_extensions = EnableExtensions::empty();
2249 let mut diagnostic_filters = DiagnosticFilterMap::new();
2250
2251 tu.doc_comments = lexer.accumulate_module_doc_comments();
2253
2254 while let (Token::Word(word), _) = lexer.peek() {
2256 if let Some(kind) = DirectiveKind::from_ident(word) {
2257 self.push_rule_span(Rule::Directive, &mut lexer);
2258 let _ = lexer.next_ident_with_span().unwrap();
2259 match kind {
2260 DirectiveKind::Diagnostic => {
2261 let diagnostic_filter = self.diagnostic_filter(&mut lexer)?;
2262 let span = self.peek_rule_span(&lexer);
2263 diagnostic_filters
2264 .add(diagnostic_filter, span, ShouldConflictOnFullDuplicate::No)
2265 .map_err(|e| Box::new(e.into()))?;
2266 lexer.expect(Token::Separator(';'))?;
2267 }
2268 DirectiveKind::Enable => {
2269 self.directive_ident_list(&mut lexer, |ident, span| {
2270 let kind = EnableExtension::from_ident(ident, span)?;
2271 let extension = match kind {
2272 EnableExtension::Implemented(kind) => kind,
2273 EnableExtension::Unimplemented(kind) => {
2274 return Err(Box::new(Error::EnableExtensionNotYetImplemented {
2275 kind,
2276 span,
2277 }))
2278 }
2279 };
2280 let required_capability = extension.capability();
2282 if !options.capabilities.intersects(required_capability) {
2283 return Err(Box::new(Error::EnableExtensionNotSupported {
2284 kind,
2285 span,
2286 }));
2287 }
2288 enable_extensions.add(extension);
2289 Ok(())
2290 })?;
2291 }
2292 DirectiveKind::Requires => {
2293 self.directive_ident_list(&mut lexer, |ident, span| {
2294 match LanguageExtension::from_ident(ident) {
2295 Some(LanguageExtension::Implemented(_kind)) => {
2296 Ok(())
2301 }
2302 Some(LanguageExtension::Unimplemented(kind)) => {
2303 Err(Box::new(Error::LanguageExtensionNotYetImplemented {
2304 kind,
2305 span,
2306 }))
2307 }
2308 None => Err(Box::new(Error::UnknownLanguageExtension(span, ident))),
2309 }
2310 })?;
2311 }
2312 }
2313 self.pop_rule_span(&lexer);
2314 } else {
2315 break;
2316 }
2317 }
2318
2319 lexer.enable_extensions = enable_extensions;
2320 tu.enable_extensions = enable_extensions;
2321 tu.diagnostic_filter_leaf =
2322 Self::write_diagnostic_filters(&mut tu.diagnostic_filters, diagnostic_filters, None);
2323
2324 loop {
2325 match self.global_decl(&mut lexer, &mut tu) {
2326 Err(error) => return Err(error),
2327 Ok(()) => {
2328 if lexer.peek().0 == Token::End {
2329 break;
2330 }
2331 }
2332 }
2333 }
2334
2335 Ok(tu)
2336 }
2337
2338 fn increase_brace_nesting(brace_nesting_level: u8, brace_span: Span) -> Result<'static, u8> {
2339 const BRACE_NESTING_MAXIMUM: u8 = 127;
2347 if brace_nesting_level + 1 > BRACE_NESTING_MAXIMUM {
2348 return Err(Box::new(Error::ExceededLimitForNestedBraces {
2349 span: brace_span,
2350 limit: BRACE_NESTING_MAXIMUM,
2351 }));
2352 }
2353 Ok(brace_nesting_level + 1)
2354 }
2355
2356 fn diagnostic_filter<'a>(&self, lexer: &mut Lexer<'a>) -> Result<'a, DiagnosticFilter> {
2357 lexer.expect(Token::Paren('('))?;
2358
2359 let (severity_control_name, severity_control_name_span) = lexer.next_ident_with_span()?;
2360 let new_severity = diagnostic_filter::Severity::from_wgsl_ident(severity_control_name)
2361 .ok_or(Error::DiagnosticInvalidSeverity {
2362 severity_control_name_span,
2363 })?;
2364
2365 lexer.expect(Token::Separator(','))?;
2366
2367 let (diagnostic_name_token, diagnostic_name_token_span) = lexer.next_ident_with_span()?;
2368 let triggering_rule = if lexer.next_if(Token::Separator('.')) {
2369 let (ident, _span) = lexer.next_ident_with_span()?;
2370 FilterableTriggeringRule::User(Box::new([diagnostic_name_token.into(), ident.into()]))
2371 } else {
2372 let diagnostic_rule_name = diagnostic_name_token;
2373 let diagnostic_rule_name_span = diagnostic_name_token_span;
2374 if let Some(triggering_rule) =
2375 StandardFilterableTriggeringRule::from_wgsl_ident(diagnostic_rule_name)
2376 {
2377 FilterableTriggeringRule::Standard(triggering_rule)
2378 } else {
2379 diagnostic_filter::Severity::Warning.report_wgsl_parse_diag(
2380 Box::new(Error::UnknownDiagnosticRuleName(diagnostic_rule_name_span)),
2381 lexer.source,
2382 )?;
2383 FilterableTriggeringRule::Unknown(diagnostic_rule_name.into())
2384 }
2385 };
2386 let filter = DiagnosticFilter {
2387 triggering_rule,
2388 new_severity,
2389 };
2390 lexer.next_if(Token::Separator(','));
2391 lexer.expect(Token::Paren(')'))?;
2392
2393 Ok(filter)
2394 }
2395
2396 pub(crate) fn write_diagnostic_filters(
2397 arena: &mut Arena<DiagnosticFilterNode>,
2398 filters: DiagnosticFilterMap,
2399 parent: Option<Handle<DiagnosticFilterNode>>,
2400 ) -> Option<Handle<DiagnosticFilterNode>> {
2401 filters
2402 .into_iter()
2403 .fold(parent, |parent, (triggering_rule, (new_severity, span))| {
2404 Some(arena.append(
2405 DiagnosticFilterNode {
2406 inner: DiagnosticFilter {
2407 new_severity,
2408 triggering_rule,
2409 },
2410 parent,
2411 },
2412 span,
2413 ))
2414 })
2415 }
2416}
2417
2418const fn is_start_of_compound_statement<'a>(token: Token<'a>) -> bool {
2419 matches!(token, Token::Attribute | Token::Paren('{'))
2420}