1use alloc::{boxed::Box, vec::Vec};
2use directive::enable_extension::ImplementedEnableExtension;
3
4use crate::diagnostic_filter::{
5 self, DiagnosticFilter, DiagnosticFilterMap, DiagnosticFilterNode, FilterableTriggeringRule,
6 ShouldConflictOnFullDuplicate, StandardFilterableTriggeringRule,
7};
8use crate::front::wgsl::error::{DiagnosticAttributeNotSupportedPosition, Error, ExpectedToken};
9use crate::front::wgsl::parse::directive::enable_extension::{EnableExtension, EnableExtensions};
10use crate::front::wgsl::parse::directive::language_extension::LanguageExtension;
11use crate::front::wgsl::parse::directive::DirectiveKind;
12use crate::front::wgsl::parse::lexer::{Lexer, Token, TokenSpan};
13use crate::front::wgsl::parse::number::Number;
14use crate::front::wgsl::Result;
15use crate::front::SymbolTable;
16use crate::{Arena, FastHashSet, FastIndexSet, Handle, ShaderStage, Span};
17
18pub mod ast;
19pub mod conv;
20pub mod directive;
21pub mod lexer;
22pub mod number;
23
24struct ExpressionContext<'input, 'temp, 'out> {
31 expressions: &'out mut Arena<ast::Expression<'input>>,
36
37 local_table: &'temp mut SymbolTable<&'input str, Handle<ast::Local>>,
44
45 locals: &'out mut Arena<ast::Local>,
71
72 unresolved: &'out mut FastIndexSet<ast::Dependency<'input>>,
83}
84
85impl<'a> ExpressionContext<'a, '_, '_> {
86 fn parse_binary_op(
87 &mut self,
88 lexer: &mut Lexer<'a>,
89 classifier: impl Fn(Token<'a>) -> Option<crate::BinaryOperator>,
90 mut parser: impl FnMut(&mut Lexer<'a>, &mut Self) -> Result<'a, Handle<ast::Expression<'a>>>,
91 ) -> Result<'a, Handle<ast::Expression<'a>>> {
92 let start = lexer.start_byte_offset();
93 let mut accumulator = parser(lexer, self)?;
94 while let Some(op) = classifier(lexer.peek().0) {
95 let _ = lexer.next();
96 let left = accumulator;
97 let right = parser(lexer, self)?;
98 accumulator = self.expressions.append(
99 ast::Expression::Binary { op, left, right },
100 lexer.span_from(start),
101 );
102 }
103 Ok(accumulator)
104 }
105
106 fn declare_local(&mut self, name: ast::Ident<'a>) -> Result<'a, Handle<ast::Local>> {
107 let handle = self.locals.append(ast::Local, name.span);
108 if let Some(old) = self.local_table.add(name.name, handle) {
109 Err(Box::new(Error::Redefinition {
110 previous: self.locals.get_span(old),
111 current: name.span,
112 }))
113 } else {
114 Ok(handle)
115 }
116 }
117}
118
119#[derive(Copy, Clone, Debug, PartialEq)]
125enum Rule {
126 Attribute,
127 VariableDecl,
128 FunctionDecl,
129 Block,
130 Statement,
131 PrimaryExpr,
132 SingularExpr,
133 UnaryExpr,
134 GeneralExpr,
135 Directive,
136 GenericExpr,
137 EnclosedExpr,
138 LhsExpr,
139}
140
141struct ParsedAttribute<T> {
142 value: Option<T>,
143}
144
145impl<T> Default for ParsedAttribute<T> {
146 fn default() -> Self {
147 Self { value: None }
148 }
149}
150
151impl<T> ParsedAttribute<T> {
152 fn set(&mut self, value: T, name_span: Span) -> Result<'static, ()> {
153 if self.value.is_some() {
154 return Err(Box::new(Error::RepeatedAttribute(name_span)));
155 }
156 self.value = Some(value);
157 Ok(())
158 }
159}
160
161#[derive(Default)]
162struct BindingParser<'a> {
163 location: ParsedAttribute<Handle<ast::Expression<'a>>>,
164 built_in: ParsedAttribute<crate::BuiltIn>,
165 interpolation: ParsedAttribute<crate::Interpolation>,
166 sampling: ParsedAttribute<crate::Sampling>,
167 invariant: ParsedAttribute<bool>,
168 blend_src: ParsedAttribute<Handle<ast::Expression<'a>>>,
169 per_primitive: ParsedAttribute<()>,
170}
171
172impl<'a> BindingParser<'a> {
173 fn parse(
174 &mut self,
175 parser: &mut Parser,
176 lexer: &mut Lexer<'a>,
177 name: &'a str,
178 name_span: Span,
179 ctx: &mut ExpressionContext<'a, '_, '_>,
180 ) -> Result<'a, ()> {
181 match name {
182 "location" => {
183 lexer.expect(Token::Paren('('))?;
184 self.location
185 .set(parser.expression(lexer, ctx)?, name_span)?;
186 lexer.expect(Token::Paren(')'))?;
187 }
188 "builtin" => {
189 lexer.expect(Token::Paren('('))?;
190 let (raw, span) = lexer.next_ident_with_span()?;
191 self.built_in.set(
192 conv::map_built_in(&lexer.enable_extensions, raw, span)?,
193 name_span,
194 )?;
195 lexer.expect(Token::Paren(')'))?;
196 }
197 "interpolate" => {
198 lexer.expect(Token::Paren('('))?;
199 let (raw, span) = lexer.next_ident_with_span()?;
200 self.interpolation
201 .set(conv::map_interpolation(raw, span)?, name_span)?;
202 if lexer.next_if(Token::Separator(',')) {
203 let (raw, span) = lexer.next_ident_with_span()?;
204 self.sampling
205 .set(conv::map_sampling(raw, span)?, name_span)?;
206 }
207 lexer.expect(Token::Paren(')'))?;
208 }
209
210 "invariant" => {
211 self.invariant.set(true, name_span)?;
212 }
213 "blend_src" => {
214 lexer.require_enable_extension(
215 ImplementedEnableExtension::DualSourceBlending,
216 name_span,
217 )?;
218
219 lexer.expect(Token::Paren('('))?;
220 self.blend_src
221 .set(parser.expression(lexer, ctx)?, name_span)?;
222 lexer.next_if(Token::Separator(','));
223 lexer.expect(Token::Paren(')'))?;
224 }
225 "per_primitive" => {
226 lexer.require_enable_extension(
227 ImplementedEnableExtension::WgpuMeshShader,
228 name_span,
229 )?;
230 self.per_primitive.set((), name_span)?;
231 }
232 _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
233 }
234 Ok(())
235 }
236
237 fn finish(self, span: Span) -> Result<'a, Option<ast::Binding<'a>>> {
238 match (
239 self.location.value,
240 self.built_in.value,
241 self.interpolation.value,
242 self.sampling.value,
243 self.invariant.value.unwrap_or_default(),
244 self.blend_src.value,
245 self.per_primitive.value,
246 ) {
247 (None, None, None, None, false, None, None) => Ok(None),
248 (Some(location), None, interpolation, sampling, false, blend_src, per_primitive) => {
249 Ok(Some(ast::Binding::Location {
254 location,
255 interpolation,
256 sampling,
257 blend_src,
258 per_primitive: per_primitive.is_some(),
259 }))
260 }
261 (None, Some(crate::BuiltIn::Position { .. }), None, None, invariant, None, None) => {
262 Ok(Some(ast::Binding::BuiltIn(crate::BuiltIn::Position {
263 invariant,
264 })))
265 }
266 (None, Some(built_in), None, None, false, None, None) => {
267 Ok(Some(ast::Binding::BuiltIn(built_in)))
268 }
269 (_, _, _, _, _, _, _) => Err(Box::new(Error::InconsistentBinding(span))),
270 }
271 }
272}
273
274pub struct Options {
276 pub parse_doc_comments: bool,
278 pub capabilities: crate::valid::Capabilities,
280}
281
282impl Options {
283 pub const fn new() -> Self {
285 Options {
286 parse_doc_comments: false,
287 capabilities: crate::valid::Capabilities::all(),
288 }
289 }
290}
291
292pub struct Parser {
293 rules: Vec<(Rule, usize)>,
294 recursion_depth: u32,
295}
296
297impl Parser {
298 pub const fn new() -> Self {
299 Parser {
300 rules: Vec::new(),
301 recursion_depth: 0,
302 }
303 }
304
305 fn reset(&mut self) {
306 self.rules.clear();
307 self.recursion_depth = 0;
308 }
309
310 fn push_rule_span(&mut self, rule: Rule, lexer: &mut Lexer<'_>) {
311 self.rules.push((rule, lexer.start_byte_offset()));
312 }
313
314 fn pop_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
315 let (_, initial) = self.rules.pop().unwrap();
316 lexer.span_from(initial)
317 }
318
319 fn peek_rule_span(&mut self, lexer: &Lexer<'_>) -> Span {
320 let &(_, initial) = self.rules.last().unwrap();
321 lexer.span_from(initial)
322 }
323
324 fn race_rules(&self, rule0: Rule, rule1: Rule) -> Option<Rule> {
325 Some(
326 self.rules
327 .iter()
328 .rev()
329 .find(|&x| x.0 == rule0 || x.0 == rule1)?
330 .0,
331 )
332 }
333
334 fn track_recursion<'a, F, R>(&mut self, f: F) -> Result<'a, R>
335 where
336 F: FnOnce(&mut Self) -> Result<'a, R>,
337 {
338 self.recursion_depth += 1;
339 if self.recursion_depth >= 256 {
340 return Err(Box::new(Error::Internal("Parser recursion limit exceeded")));
341 }
342 let ret = f(self);
343 self.recursion_depth -= 1;
344 ret
345 }
346
347 fn switch_value<'a>(
348 &mut self,
349 lexer: &mut Lexer<'a>,
350 ctx: &mut ExpressionContext<'a, '_, '_>,
351 ) -> Result<'a, ast::SwitchValue<'a>> {
352 if lexer.next_if(Token::Word("default")) {
353 return Ok(ast::SwitchValue::Default);
354 }
355
356 let expr = self.expression(lexer, ctx)?;
357 Ok(ast::SwitchValue::Expr(expr))
358 }
359
360 fn arguments<'a>(
362 &mut self,
363 lexer: &mut Lexer<'a>,
364 ctx: &mut ExpressionContext<'a, '_, '_>,
365 ) -> Result<'a, Vec<Handle<ast::Expression<'a>>>> {
366 self.push_rule_span(Rule::EnclosedExpr, lexer);
367 lexer.open_arguments()?;
368 let mut arguments = Vec::new();
369 loop {
370 if !arguments.is_empty() {
371 if !lexer.next_argument()? {
372 break;
373 }
374 } else if lexer.next_if(Token::Paren(')')) {
375 break;
376 }
377 let arg = self.expression(lexer, ctx)?;
378 arguments.push(arg);
379 }
380
381 self.pop_rule_span(lexer);
382 Ok(arguments)
383 }
384
385 fn enclosed_expression<'a>(
386 &mut self,
387 lexer: &mut Lexer<'a>,
388 ctx: &mut ExpressionContext<'a, '_, '_>,
389 ) -> Result<'a, Handle<ast::Expression<'a>>> {
390 self.push_rule_span(Rule::EnclosedExpr, lexer);
391 let expr = self.expression(lexer, ctx)?;
392 self.pop_rule_span(lexer);
393 Ok(expr)
394 }
395
396 fn ident_expr<'a>(
397 &mut self,
398 name: &'a str,
399 name_span: Span,
400 ctx: &mut ExpressionContext<'a, '_, '_>,
401 ) -> ast::IdentExpr<'a> {
402 match ctx.local_table.lookup(name) {
403 Some(&local) => ast::IdentExpr::Local(local),
404 None => {
405 ctx.unresolved.insert(ast::Dependency {
406 ident: name,
407 usage: name_span,
408 });
409 ast::IdentExpr::Unresolved(name)
410 }
411 }
412 }
413
414 fn primary_expression<'a>(
415 &mut self,
416 lexer: &mut Lexer<'a>,
417 ctx: &mut ExpressionContext<'a, '_, '_>,
418 token: TokenSpan<'a>,
419 ) -> Result<'a, Handle<ast::Expression<'a>>> {
420 self.push_rule_span(Rule::PrimaryExpr, lexer);
421
422 const fn literal_ray_flag<'b>(flag: crate::RayFlag) -> ast::Expression<'b> {
423 ast::Expression::Literal(ast::Literal::Number(Number::U32(flag.bits())))
424 }
425 const fn literal_ray_intersection<'b>(
426 intersection: crate::RayQueryIntersection,
427 ) -> ast::Expression<'b> {
428 ast::Expression::Literal(ast::Literal::Number(Number::U32(intersection as u32)))
429 }
430
431 let expr = match token {
432 (Token::Paren('('), _) => {
433 let expr = self.enclosed_expression(lexer, ctx)?;
434 lexer.expect(Token::Paren(')'))?;
435 self.pop_rule_span(lexer);
436 return Ok(expr);
437 }
438 (Token::Word("true"), _) => ast::Expression::Literal(ast::Literal::Bool(true)),
439 (Token::Word("false"), _) => ast::Expression::Literal(ast::Literal::Bool(false)),
440 (Token::Number(res), span) => {
441 let num = res.map_err(|err| Error::BadNumber(span, err))?;
442
443 if let Some(enable_extension) = num.requires_enable_extension() {
444 lexer.require_enable_extension(enable_extension, span)?;
445 }
446
447 ast::Expression::Literal(ast::Literal::Number(num))
448 }
449 (Token::Word("RAY_FLAG_NONE"), _) => literal_ray_flag(crate::RayFlag::empty()),
450 (Token::Word("RAY_FLAG_FORCE_OPAQUE"), _) => {
451 literal_ray_flag(crate::RayFlag::FORCE_OPAQUE)
452 }
453 (Token::Word("RAY_FLAG_FORCE_NO_OPAQUE"), _) => {
454 literal_ray_flag(crate::RayFlag::FORCE_NO_OPAQUE)
455 }
456 (Token::Word("RAY_FLAG_TERMINATE_ON_FIRST_HIT"), _) => {
457 literal_ray_flag(crate::RayFlag::TERMINATE_ON_FIRST_HIT)
458 }
459 (Token::Word("RAY_FLAG_SKIP_CLOSEST_HIT_SHADER"), _) => {
460 literal_ray_flag(crate::RayFlag::SKIP_CLOSEST_HIT_SHADER)
461 }
462 (Token::Word("RAY_FLAG_CULL_BACK_FACING"), _) => {
463 literal_ray_flag(crate::RayFlag::CULL_BACK_FACING)
464 }
465 (Token::Word("RAY_FLAG_CULL_FRONT_FACING"), _) => {
466 literal_ray_flag(crate::RayFlag::CULL_FRONT_FACING)
467 }
468 (Token::Word("RAY_FLAG_CULL_OPAQUE"), _) => {
469 literal_ray_flag(crate::RayFlag::CULL_OPAQUE)
470 }
471 (Token::Word("RAY_FLAG_CULL_NO_OPAQUE"), _) => {
472 literal_ray_flag(crate::RayFlag::CULL_NO_OPAQUE)
473 }
474 (Token::Word("RAY_FLAG_SKIP_TRIANGLES"), _) => {
475 literal_ray_flag(crate::RayFlag::SKIP_TRIANGLES)
476 }
477 (Token::Word("RAY_FLAG_SKIP_AABBS"), _) => literal_ray_flag(crate::RayFlag::SKIP_AABBS),
478 (Token::Word("RAY_QUERY_INTERSECTION_NONE"), _) => {
479 literal_ray_intersection(crate::RayQueryIntersection::None)
480 }
481 (Token::Word("RAY_QUERY_INTERSECTION_TRIANGLE"), _) => {
482 literal_ray_intersection(crate::RayQueryIntersection::Triangle)
483 }
484 (Token::Word("RAY_QUERY_INTERSECTION_GENERATED"), _) => {
485 literal_ray_intersection(crate::RayQueryIntersection::Generated)
486 }
487 (Token::Word("RAY_QUERY_INTERSECTION_AABB"), _) => {
488 literal_ray_intersection(crate::RayQueryIntersection::Aabb)
489 }
490 (Token::Word(word), span) => {
491 let ident = self.template_elaborated_ident(word, span, lexer, ctx)?;
492
493 if let Token::Paren('(') = lexer.peek().0 {
494 let arguments = self.arguments(lexer, ctx)?;
495 ast::Expression::Call(ast::CallPhrase {
496 function: ident,
497 arguments,
498 })
499 } else {
500 ast::Expression::Ident(ident)
501 }
502 }
503 other => {
504 return Err(Box::new(Error::Unexpected(
505 other.1,
506 ExpectedToken::PrimaryExpression,
507 )))
508 }
509 };
510
511 self.pop_rule_span(lexer);
512 let span = lexer.span_with_start(token.1);
513 let expr = ctx.expressions.append(expr, span);
514 Ok(expr)
515 }
516
517 fn component_or_swizzle_specifier<'a>(
518 &mut self,
519 expr_start: Span,
520 lexer: &mut Lexer<'a>,
521 ctx: &mut ExpressionContext<'a, '_, '_>,
522 expr: Handle<ast::Expression<'a>>,
523 ) -> Result<'a, Handle<ast::Expression<'a>>> {
524 let mut expr = expr;
525
526 loop {
527 let expression = match lexer.peek().0 {
528 Token::Separator('.') => {
529 let _ = lexer.next();
530 let field = lexer.next_ident()?;
531
532 ast::Expression::Member { base: expr, field }
533 }
534 Token::Paren('[') => {
535 let _ = lexer.next();
536 let index = self.enclosed_expression(lexer, ctx)?;
537 lexer.expect(Token::Paren(']'))?;
538
539 ast::Expression::Index { base: expr, index }
540 }
541 _ => break,
542 };
543
544 let span = lexer.span_with_start(expr_start);
545 expr = ctx.expressions.append(expression, span);
546 }
547
548 Ok(expr)
549 }
550
551 fn unary_expression<'a>(
553 &mut self,
554 lexer: &mut Lexer<'a>,
555 ctx: &mut ExpressionContext<'a, '_, '_>,
556 ) -> Result<'a, Handle<ast::Expression<'a>>> {
557 self.push_rule_span(Rule::UnaryExpr, lexer);
558
559 enum UnaryOp {
560 Negate,
561 LogicalNot,
562 BitwiseNot,
563 Deref,
564 AddrOf,
565 }
566
567 let mut ops = Vec::new();
568 let mut expr;
569
570 loop {
571 match lexer.next() {
572 (Token::Operation('-'), span) => {
573 ops.push((UnaryOp::Negate, span));
574 }
575 (Token::Operation('!'), span) => {
576 ops.push((UnaryOp::LogicalNot, span));
577 }
578 (Token::Operation('~'), span) => {
579 ops.push((UnaryOp::BitwiseNot, span));
580 }
581 (Token::Operation('*'), span) => {
582 ops.push((UnaryOp::Deref, span));
583 }
584 (Token::Operation('&'), span) => {
585 ops.push((UnaryOp::AddrOf, span));
586 }
587 token => {
588 expr = self.singular_expression(lexer, ctx, token)?;
589 break;
590 }
591 };
592 }
593
594 for (op, span) in ops.into_iter().rev() {
595 let e = match op {
596 UnaryOp::Negate => ast::Expression::Unary {
597 op: crate::UnaryOperator::Negate,
598 expr,
599 },
600 UnaryOp::LogicalNot => ast::Expression::Unary {
601 op: crate::UnaryOperator::LogicalNot,
602 expr,
603 },
604 UnaryOp::BitwiseNot => ast::Expression::Unary {
605 op: crate::UnaryOperator::BitwiseNot,
606 expr,
607 },
608 UnaryOp::Deref => ast::Expression::Deref(expr),
609 UnaryOp::AddrOf => ast::Expression::AddrOf(expr),
610 };
611 let span = lexer.span_with_start(span);
612 expr = ctx.expressions.append(e, span);
613 }
614
615 self.pop_rule_span(lexer);
616 Ok(expr)
617 }
618
619 fn lhs_expression<'a>(
624 &mut self,
625 lexer: &mut Lexer<'a>,
626 ctx: &mut ExpressionContext<'a, '_, '_>,
627 token: Option<TokenSpan<'a>>,
628 expected_token: ExpectedToken<'a>,
629 ) -> Result<'a, Handle<ast::Expression<'a>>> {
630 self.track_recursion(|this| {
631 this.push_rule_span(Rule::LhsExpr, lexer);
632 let token = token.unwrap_or_else(|| lexer.next());
633 let expr = match token {
634 (Token::Operation('*'), _) => {
635 let expr =
636 this.lhs_expression(lexer, ctx, None, ExpectedToken::LhsExpression)?;
637 let expr = ast::Expression::Deref(expr);
638 let span = this.peek_rule_span(lexer);
639 ctx.expressions.append(expr, span)
640 }
641 (Token::Operation('&'), _) => {
642 let expr =
643 this.lhs_expression(lexer, ctx, None, ExpectedToken::LhsExpression)?;
644 let expr = ast::Expression::AddrOf(expr);
645 let span = this.peek_rule_span(lexer);
646 ctx.expressions.append(expr, span)
647 }
648 (Token::Paren('('), span) => {
649 let expr =
650 this.lhs_expression(lexer, ctx, None, ExpectedToken::LhsExpression)?;
651 lexer.expect(Token::Paren(')'))?;
652 this.component_or_swizzle_specifier(span, lexer, ctx, expr)?
653 }
654 (Token::Word(word), span) => {
655 let ident = this.ident_expr(word, span, ctx);
656 let ident = ast::TemplateElaboratedIdent {
657 ident,
658 ident_span: span,
659 template_list: Vec::new(),
660 template_list_span: Span::UNDEFINED,
661 };
662 let ident = ctx.expressions.append(ast::Expression::Ident(ident), span);
663 this.component_or_swizzle_specifier(span, lexer, ctx, ident)?
664 }
665 (_, span) => {
666 return Err(Box::new(Error::Unexpected(span, expected_token)));
667 }
668 };
669
670 this.pop_rule_span(lexer);
671 Ok(expr)
672 })
673 }
674
675 fn singular_expression<'a>(
677 &mut self,
678 lexer: &mut Lexer<'a>,
679 ctx: &mut ExpressionContext<'a, '_, '_>,
680 token: TokenSpan<'a>,
681 ) -> Result<'a, Handle<ast::Expression<'a>>> {
682 self.push_rule_span(Rule::SingularExpr, lexer);
683 let primary_expr = self.primary_expression(lexer, ctx, token)?;
684 let singular_expr =
685 self.component_or_swizzle_specifier(token.1, lexer, ctx, primary_expr)?;
686 self.pop_rule_span(lexer);
687
688 Ok(singular_expr)
689 }
690
691 fn equality_expression<'a>(
692 &mut self,
693 lexer: &mut Lexer<'a>,
694 context: &mut ExpressionContext<'a, '_, '_>,
695 ) -> Result<'a, Handle<ast::Expression<'a>>> {
696 context.parse_binary_op(
698 lexer,
699 |token| match token {
700 Token::LogicalOperation('=') => Some(crate::BinaryOperator::Equal),
701 Token::LogicalOperation('!') => Some(crate::BinaryOperator::NotEqual),
702 _ => None,
703 },
704 |lexer, context| {
706 let enclosing = self.race_rules(Rule::GenericExpr, Rule::EnclosedExpr);
707 context.parse_binary_op(
708 lexer,
709 match enclosing {
710 Some(Rule::GenericExpr) => |token| match token {
711 Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
712 _ => None,
713 },
714 _ => |token| match token {
715 Token::Paren('<') => Some(crate::BinaryOperator::Less),
716 Token::Paren('>') => Some(crate::BinaryOperator::Greater),
717 Token::LogicalOperation('<') => Some(crate::BinaryOperator::LessEqual),
718 Token::LogicalOperation('>') => {
719 Some(crate::BinaryOperator::GreaterEqual)
720 }
721 _ => None,
722 },
723 },
724 |lexer, context| {
726 context.parse_binary_op(
727 lexer,
728 match enclosing {
729 Some(Rule::GenericExpr) => |token| match token {
730 Token::ShiftOperation('<') => {
731 Some(crate::BinaryOperator::ShiftLeft)
732 }
733 _ => None,
734 },
735 _ => |token| match token {
736 Token::ShiftOperation('<') => {
737 Some(crate::BinaryOperator::ShiftLeft)
738 }
739 Token::ShiftOperation('>') => {
740 Some(crate::BinaryOperator::ShiftRight)
741 }
742 _ => None,
743 },
744 },
745 |lexer, context| {
747 context.parse_binary_op(
748 lexer,
749 |token| match token {
750 Token::Operation('+') => Some(crate::BinaryOperator::Add),
751 Token::Operation('-') => {
752 Some(crate::BinaryOperator::Subtract)
753 }
754 _ => None,
755 },
756 |lexer, context| {
758 context.parse_binary_op(
759 lexer,
760 |token| match token {
761 Token::Operation('*') => {
762 Some(crate::BinaryOperator::Multiply)
763 }
764 Token::Operation('/') => {
765 Some(crate::BinaryOperator::Divide)
766 }
767 Token::Operation('%') => {
768 Some(crate::BinaryOperator::Modulo)
769 }
770 _ => None,
771 },
772 |lexer, context| self.unary_expression(lexer, context),
773 )
774 },
775 )
776 },
777 )
778 },
779 )
780 },
781 )
782 }
783
784 fn expression<'a>(
785 &mut self,
786 lexer: &mut Lexer<'a>,
787 context: &mut ExpressionContext<'a, '_, '_>,
788 ) -> Result<'a, Handle<ast::Expression<'a>>> {
789 self.push_rule_span(Rule::GeneralExpr, lexer);
790 let handle = context.parse_binary_op(
792 lexer,
793 |token| match token {
794 Token::LogicalOperation('|') => Some(crate::BinaryOperator::LogicalOr),
795 _ => None,
796 },
797 |lexer, context| {
799 context.parse_binary_op(
800 lexer,
801 |token| match token {
802 Token::LogicalOperation('&') => Some(crate::BinaryOperator::LogicalAnd),
803 _ => None,
804 },
805 |lexer, context| {
807 context.parse_binary_op(
808 lexer,
809 |token| match token {
810 Token::Operation('|') => Some(crate::BinaryOperator::InclusiveOr),
811 _ => None,
812 },
813 |lexer, context| {
815 context.parse_binary_op(
816 lexer,
817 |token| match token {
818 Token::Operation('^') => {
819 Some(crate::BinaryOperator::ExclusiveOr)
820 }
821 _ => None,
822 },
823 |lexer, context| {
825 context.parse_binary_op(
826 lexer,
827 |token| match token {
828 Token::Operation('&') => {
829 Some(crate::BinaryOperator::And)
830 }
831 _ => None,
832 },
833 |lexer, context| {
834 self.equality_expression(lexer, context)
835 },
836 )
837 },
838 )
839 },
840 )
841 },
842 )
843 },
844 )?;
845 self.pop_rule_span(lexer);
846 Ok(handle)
847 }
848
849 fn optionally_typed_ident<'a>(
850 &mut self,
851 lexer: &mut Lexer<'a>,
852 ctx: &mut ExpressionContext<'a, '_, '_>,
853 ) -> Result<'a, (ast::Ident<'a>, Option<ast::TemplateElaboratedIdent<'a>>)> {
854 let name = lexer.next_ident()?;
855
856 let ty = if lexer.next_if(Token::Separator(':')) {
857 Some(self.type_specifier(lexer, ctx)?)
858 } else {
859 None
860 };
861
862 Ok((name, ty))
863 }
864
865 fn variable_decl<'a>(
867 &mut self,
868 lexer: &mut Lexer<'a>,
869 ctx: &mut ExpressionContext<'a, '_, '_>,
870 ) -> Result<'a, ast::GlobalVariable<'a>> {
871 self.push_rule_span(Rule::VariableDecl, lexer);
872 let (template_list, _) = self.maybe_template_list(lexer, ctx)?;
873 let (name, ty) = self.optionally_typed_ident(lexer, ctx)?;
874
875 let init = if lexer.next_if(Token::Operation('=')) {
876 let handle = self.expression(lexer, ctx)?;
877 Some(handle)
878 } else {
879 None
880 };
881 lexer.expect(Token::Separator(';'))?;
882 self.pop_rule_span(lexer);
883
884 Ok(ast::GlobalVariable {
885 name,
886 template_list,
887 binding: None,
888 ty,
889 init,
890 doc_comments: Vec::new(),
891 })
892 }
893
894 fn struct_body<'a>(
895 &mut self,
896 lexer: &mut Lexer<'a>,
897 ctx: &mut ExpressionContext<'a, '_, '_>,
898 ) -> Result<'a, Vec<ast::StructMember<'a>>> {
899 let mut members = Vec::new();
900 let mut member_names = FastHashSet::default();
901
902 lexer.expect(Token::Paren('{'))?;
903 let mut ready = true;
904 while !lexer.next_if(Token::Paren('}')) {
905 if !ready {
906 return Err(Box::new(Error::Unexpected(
907 lexer.next().1,
908 ExpectedToken::Token(Token::Separator(',')),
909 )));
910 }
911
912 let doc_comments = lexer.accumulate_doc_comments();
913
914 let (mut size, mut align) = (ParsedAttribute::default(), ParsedAttribute::default());
915 self.push_rule_span(Rule::Attribute, lexer);
916 let mut bind_parser = BindingParser::default();
917 while lexer.next_if(Token::Attribute) {
918 match lexer.next_ident_with_span()? {
919 ("size", name_span) => {
920 lexer.expect(Token::Paren('('))?;
921 let expr = self.expression(lexer, ctx)?;
922 lexer.expect(Token::Paren(')'))?;
923 size.set(expr, name_span)?;
924 }
925 ("align", name_span) => {
926 lexer.expect(Token::Paren('('))?;
927 let expr = self.expression(lexer, ctx)?;
928 lexer.expect(Token::Paren(')'))?;
929 align.set(expr, name_span)?;
930 }
931 (word, word_span) => bind_parser.parse(self, lexer, word, word_span, ctx)?,
932 }
933 }
934
935 let bind_span = self.pop_rule_span(lexer);
936 let binding = bind_parser.finish(bind_span)?;
937
938 let name = lexer.next_ident()?;
939 lexer.expect(Token::Separator(':'))?;
940 let ty = self.type_specifier(lexer, ctx)?;
941 ready = lexer.next_if(Token::Separator(','));
942
943 members.push(ast::StructMember {
944 name,
945 ty,
946 binding,
947 size: size.value,
948 align: align.value,
949 doc_comments,
950 });
951
952 if !member_names.insert(name.name) {
953 return Err(Box::new(Error::Redefinition {
954 previous: members
955 .iter()
956 .find(|x| x.name.name == name.name)
957 .map(|x| x.name.span)
958 .unwrap(),
959 current: name.span,
960 }));
961 }
962 }
963
964 Ok(members)
965 }
966
967 fn maybe_template_list<'a>(
968 &mut self,
969 lexer: &mut Lexer<'a>,
970 ctx: &mut ExpressionContext<'a, '_, '_>,
971 ) -> Result<'a, (Vec<Handle<ast::Expression<'a>>>, Span)> {
972 let start = lexer.start_byte_offset();
973 if lexer.next_if(Token::TemplateArgsStart) {
974 let mut args = Vec::new();
975 args.push(self.expression(lexer, ctx)?);
976 while lexer.next_if(Token::Separator(',')) && lexer.peek().0 != Token::TemplateArgsEnd {
977 args.push(self.expression(lexer, ctx)?);
978 }
979 lexer.expect(Token::TemplateArgsEnd)?;
980 let span = lexer.span_from(start);
981 Ok((args, span))
982 } else {
983 Ok((Vec::new(), Span::UNDEFINED))
984 }
985 }
986
987 fn template_elaborated_ident<'a>(
988 &mut self,
989 word: &'a str,
990 span: Span,
991 lexer: &mut Lexer<'a>,
992 ctx: &mut ExpressionContext<'a, '_, '_>,
993 ) -> Result<'a, ast::TemplateElaboratedIdent<'a>> {
994 let ident = self.ident_expr(word, span, ctx);
995 let (template_list, template_list_span) = self.maybe_template_list(lexer, ctx)?;
996 Ok(ast::TemplateElaboratedIdent {
997 ident,
998 ident_span: span,
999 template_list,
1000 template_list_span,
1001 })
1002 }
1003
1004 fn type_specifier<'a>(
1005 &mut self,
1006 lexer: &mut Lexer<'a>,
1007 ctx: &mut ExpressionContext<'a, '_, '_>,
1008 ) -> Result<'a, ast::TemplateElaboratedIdent<'a>> {
1009 let (name, span) = lexer.next_ident_with_span()?;
1010 self.template_elaborated_ident(name, span, lexer, ctx)
1011 }
1012
1013 fn variable_updating_statement<'a>(
1018 &mut self,
1019 lexer: &mut Lexer<'a>,
1020 ctx: &mut ExpressionContext<'a, '_, '_>,
1021 block: &mut ast::Block<'a>,
1022 token: TokenSpan<'a>,
1023 expected_token: ExpectedToken<'a>,
1024 ) -> Result<'a, ()> {
1025 match token {
1026 (Token::Word("_"), span) => {
1027 lexer.expect(Token::Operation('='))?;
1028 let expr = self.expression(lexer, ctx)?;
1029 let span = lexer.span_with_start(span);
1030 block.stmts.push(ast::Statement {
1031 kind: ast::StatementKind::Phony(expr),
1032 span,
1033 });
1034 return Ok(());
1035 }
1036 _ => {}
1037 }
1038 let target = self.lhs_expression(lexer, ctx, Some(token), expected_token)?;
1039
1040 let (op, value) = match lexer.next() {
1041 (Token::Operation('='), _) => {
1042 let value = self.expression(lexer, ctx)?;
1043 (None, value)
1044 }
1045 (Token::AssignmentOperation(c), _) => {
1046 use crate::BinaryOperator as Bo;
1047 let op = match c {
1048 '<' => Bo::ShiftLeft,
1049 '>' => Bo::ShiftRight,
1050 '+' => Bo::Add,
1051 '-' => Bo::Subtract,
1052 '*' => Bo::Multiply,
1053 '/' => Bo::Divide,
1054 '%' => Bo::Modulo,
1055 '&' => Bo::And,
1056 '|' => Bo::InclusiveOr,
1057 '^' => Bo::ExclusiveOr,
1058 _ => unreachable!(),
1060 };
1061
1062 let value = self.expression(lexer, ctx)?;
1063 (Some(op), value)
1064 }
1065 op_token @ (Token::IncrementOperation | Token::DecrementOperation, _) => {
1066 let op = match op_token.0 {
1067 Token::IncrementOperation => ast::StatementKind::Increment,
1068 Token::DecrementOperation => ast::StatementKind::Decrement,
1069 _ => unreachable!(),
1070 };
1071
1072 let span = lexer.span_with_start(token.1);
1073 block.stmts.push(ast::Statement {
1074 kind: op(target),
1075 span,
1076 });
1077 return Ok(());
1078 }
1079 (_, span) => return Err(Box::new(Error::Unexpected(span, ExpectedToken::Assignment))),
1080 };
1081
1082 let span = lexer.span_with_start(token.1);
1083 block.stmts.push(ast::Statement {
1084 kind: ast::StatementKind::Assign { target, op, value },
1085 span,
1086 });
1087 Ok(())
1088 }
1089
1090 fn maybe_func_call_statement<'a>(
1097 &mut self,
1098 lexer: &mut Lexer<'a>,
1099 context: &mut ExpressionContext<'a, '_, '_>,
1100 block: &mut ast::Block<'a>,
1101 token: TokenSpan<'a>,
1102 ) -> Result<'a, bool> {
1103 let (name, name_span) = match token {
1104 (Token::Word(name), span) => (name, span),
1105 _ => return Ok(false),
1106 };
1107 let ident = self.template_elaborated_ident(name, name_span, lexer, context)?;
1108 if ident.template_list.is_empty() && !matches!(lexer.peek(), (Token::Paren('('), _)) {
1109 return Ok(false);
1110 }
1111
1112 self.push_rule_span(Rule::SingularExpr, lexer);
1113
1114 let arguments = self.arguments(lexer, context)?;
1115 let span = lexer.span_with_start(name_span);
1116
1117 block.stmts.push(ast::Statement {
1118 kind: ast::StatementKind::Call(ast::CallPhrase {
1119 function: ident,
1120 arguments,
1121 }),
1122 span,
1123 });
1124
1125 self.pop_rule_span(lexer);
1126
1127 Ok(true)
1128 }
1129
1130 fn func_call_or_variable_updating_statement<'a>(
1135 &mut self,
1136 lexer: &mut Lexer<'a>,
1137 context: &mut ExpressionContext<'a, '_, '_>,
1138 block: &mut ast::Block<'a>,
1139 token: TokenSpan<'a>,
1140 expected_token: ExpectedToken<'a>,
1141 ) -> Result<'a, ()> {
1142 if !self.maybe_func_call_statement(lexer, context, block, token)? {
1143 self.variable_updating_statement(lexer, context, block, token, expected_token)?;
1144 }
1145 Ok(())
1146 }
1147
1148 fn variable_or_value_or_func_call_or_variable_updating_statement<'a>(
1156 &mut self,
1157 lexer: &mut Lexer<'a>,
1158 ctx: &mut ExpressionContext<'a, '_, '_>,
1159 block: &mut ast::Block<'a>,
1160 token: TokenSpan<'a>,
1161 expected_token: ExpectedToken<'a>,
1162 ) -> Result<'a, ()> {
1163 let local_decl = match token {
1164 (Token::Word("let"), _) => {
1165 let (name, given_ty) = self.optionally_typed_ident(lexer, ctx)?;
1166
1167 lexer.expect(Token::Operation('='))?;
1168 let expr_id = self.expression(lexer, ctx)?;
1169
1170 let handle = ctx.declare_local(name)?;
1171 ast::LocalDecl::Let(ast::Let {
1172 name,
1173 ty: given_ty,
1174 init: expr_id,
1175 handle,
1176 })
1177 }
1178 (Token::Word("const"), _) => {
1179 let (name, given_ty) = self.optionally_typed_ident(lexer, ctx)?;
1180
1181 lexer.expect(Token::Operation('='))?;
1182 let expr_id = self.expression(lexer, ctx)?;
1183
1184 let handle = ctx.declare_local(name)?;
1185 ast::LocalDecl::Const(ast::LocalConst {
1186 name,
1187 ty: given_ty,
1188 init: expr_id,
1189 handle,
1190 })
1191 }
1192 (Token::Word("var"), _) => {
1193 if lexer.next_if(Token::TemplateArgsStart) {
1194 let (class_str, span) = lexer.next_ident_with_span()?;
1195 if class_str != "function" {
1196 return Err(Box::new(Error::InvalidLocalVariableAddressSpace(span)));
1197 }
1198 lexer.expect(Token::TemplateArgsEnd)?;
1199 }
1200
1201 let (name, ty) = self.optionally_typed_ident(lexer, ctx)?;
1202
1203 let init = if lexer.next_if(Token::Operation('=')) {
1204 let init = self.expression(lexer, ctx)?;
1205 Some(init)
1206 } else {
1207 None
1208 };
1209
1210 let handle = ctx.declare_local(name)?;
1211 ast::LocalDecl::Var(ast::LocalVariable {
1212 name,
1213 ty,
1214 init,
1215 handle,
1216 })
1217 }
1218 token => {
1219 return self.func_call_or_variable_updating_statement(
1220 lexer,
1221 ctx,
1222 block,
1223 token,
1224 expected_token,
1225 );
1226 }
1227 };
1228
1229 let span = lexer.span_with_start(token.1);
1230 block.stmts.push(ast::Statement {
1231 kind: ast::StatementKind::LocalDecl(local_decl),
1232 span,
1233 });
1234
1235 Ok(())
1236 }
1237
1238 fn statement<'a>(
1239 &mut self,
1240 lexer: &mut Lexer<'a>,
1241 ctx: &mut ExpressionContext<'a, '_, '_>,
1242 block: &mut ast::Block<'a>,
1243 brace_nesting_level: u8,
1244 ) -> Result<'a, ()> {
1245 self.track_recursion(|this| {
1246 this.push_rule_span(Rule::Statement, lexer);
1247
1248 match lexer.peek() {
1254 (token, _) if is_start_of_compound_statement(token) => {
1255 let (inner, span) = this.block(lexer, ctx, brace_nesting_level)?;
1256 block.stmts.push(ast::Statement {
1257 kind: ast::StatementKind::Block(inner),
1258 span,
1259 });
1260 this.pop_rule_span(lexer);
1261 return Ok(());
1262 }
1263 _ => {}
1264 }
1265
1266 let kind = match lexer.next() {
1267 (Token::Separator(';'), _) => {
1268 this.pop_rule_span(lexer);
1269 return Ok(());
1270 }
1271 (Token::Word("return"), _) => {
1272 let value = if lexer.peek().0 != Token::Separator(';') {
1273 let handle = this.expression(lexer, ctx)?;
1274 Some(handle)
1275 } else {
1276 None
1277 };
1278 lexer.expect(Token::Separator(';'))?;
1279 ast::StatementKind::Return { value }
1280 }
1281 (Token::Word("if"), _) => {
1282 let condition = this.expression(lexer, ctx)?;
1283
1284 let accept = this.block(lexer, ctx, brace_nesting_level)?.0;
1285
1286 let mut elsif_stack = Vec::new();
1287 let mut elseif_span_start = lexer.start_byte_offset();
1288 let mut reject = loop {
1289 if !lexer.next_if(Token::Word("else")) {
1290 break ast::Block::default();
1291 }
1292
1293 if !lexer.next_if(Token::Word("if")) {
1294 break this.block(lexer, ctx, brace_nesting_level)?.0;
1296 }
1297
1298 let other_condition = this.expression(lexer, ctx)?;
1300 let other_block = this.block(lexer, ctx, brace_nesting_level)?;
1301 elsif_stack.push((elseif_span_start, other_condition, other_block));
1302 elseif_span_start = lexer.start_byte_offset();
1303 };
1304
1305 for (other_span_start, other_cond, other_block) in elsif_stack.into_iter().rev()
1308 {
1309 let sub_stmt = ast::StatementKind::If {
1310 condition: other_cond,
1311 accept: other_block.0,
1312 reject,
1313 };
1314 reject = ast::Block::default();
1315 let span = lexer.span_from(other_span_start);
1316 reject.stmts.push(ast::Statement {
1317 kind: sub_stmt,
1318 span,
1319 })
1320 }
1321
1322 ast::StatementKind::If {
1323 condition,
1324 accept,
1325 reject,
1326 }
1327 }
1328 (Token::Word("switch"), _) => {
1329 let selector = this.expression(lexer, ctx)?;
1330 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1331 let brace_nesting_level =
1332 Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1333 let mut cases = Vec::new();
1334
1335 loop {
1336 match lexer.next() {
1338 (Token::Word("case"), _) => {
1339 let value = loop {
1341 let value = this.switch_value(lexer, ctx)?;
1342 if lexer.next_if(Token::Separator(',')) {
1343 let next_token = lexer.peek().0;
1345 if next_token == Token::Separator(':')
1346 || is_start_of_compound_statement(next_token)
1347 {
1348 break value;
1349 }
1350 } else {
1351 break value;
1352 }
1353 cases.push(ast::SwitchCase {
1354 value,
1355 body: ast::Block::default(),
1356 fall_through: true,
1357 });
1358 };
1359
1360 lexer.next_if(Token::Separator(':'));
1361
1362 let body = this.block(lexer, ctx, brace_nesting_level)?.0;
1363
1364 cases.push(ast::SwitchCase {
1365 value,
1366 body,
1367 fall_through: false,
1368 });
1369 }
1370 (Token::Word("default"), _) => {
1371 lexer.next_if(Token::Separator(':'));
1372 let body = this.block(lexer, ctx, brace_nesting_level)?.0;
1373 cases.push(ast::SwitchCase {
1374 value: ast::SwitchValue::Default,
1375 body,
1376 fall_through: false,
1377 });
1378 }
1379 (Token::Paren('}'), _) => break,
1380 (_, span) => {
1381 return Err(Box::new(Error::Unexpected(
1382 span,
1383 ExpectedToken::SwitchItem,
1384 )))
1385 }
1386 }
1387 }
1388
1389 ast::StatementKind::Switch { selector, cases }
1390 }
1391 (Token::Word("loop"), _) => this.r#loop(lexer, ctx, brace_nesting_level)?,
1392 (Token::Word("while"), _) => {
1393 let mut body = ast::Block::default();
1394
1395 let (condition, span) =
1396 lexer.capture_span(|lexer| this.expression(lexer, ctx))?;
1397 let mut reject = ast::Block::default();
1398 reject.stmts.push(ast::Statement {
1399 kind: ast::StatementKind::Break,
1400 span,
1401 });
1402
1403 body.stmts.push(ast::Statement {
1404 kind: ast::StatementKind::If {
1405 condition,
1406 accept: ast::Block::default(),
1407 reject,
1408 },
1409 span,
1410 });
1411
1412 let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
1413 body.stmts.push(ast::Statement {
1414 kind: ast::StatementKind::Block(block),
1415 span,
1416 });
1417
1418 ast::StatementKind::Loop {
1419 body,
1420 continuing: ast::Block::default(),
1421 break_if: None,
1422 }
1423 }
1424 (Token::Word("for"), _) => {
1425 lexer.expect(Token::Paren('('))?;
1426
1427 ctx.local_table.push_scope();
1428
1429 if !lexer.next_if(Token::Separator(';')) {
1430 let token = lexer.next();
1431 this.variable_or_value_or_func_call_or_variable_updating_statement(
1432 lexer,
1433 ctx,
1434 block,
1435 token,
1436 ExpectedToken::ForInit,
1437 )?;
1438 lexer.expect(Token::Separator(';'))?;
1439 };
1440
1441 let mut body = ast::Block::default();
1442 if !lexer.next_if(Token::Separator(';')) {
1443 let (condition, span) = lexer.capture_span(|lexer| -> Result<'_, _> {
1444 let condition = this.expression(lexer, ctx)?;
1445 lexer.expect(Token::Separator(';'))?;
1446 Ok(condition)
1447 })?;
1448 let mut reject = ast::Block::default();
1449 reject.stmts.push(ast::Statement {
1450 kind: ast::StatementKind::Break,
1451 span,
1452 });
1453 body.stmts.push(ast::Statement {
1454 kind: ast::StatementKind::If {
1455 condition,
1456 accept: ast::Block::default(),
1457 reject,
1458 },
1459 span,
1460 });
1461 };
1462
1463 let mut continuing = ast::Block::default();
1464 if !lexer.next_if(Token::Paren(')')) {
1465 let token = lexer.next();
1466 this.func_call_or_variable_updating_statement(
1467 lexer,
1468 ctx,
1469 &mut continuing,
1470 token,
1471 ExpectedToken::ForUpdate,
1472 )?;
1473 lexer.expect(Token::Paren(')'))?;
1474 }
1475
1476 let (block, span) = this.block(lexer, ctx, brace_nesting_level)?;
1477 body.stmts.push(ast::Statement {
1478 kind: ast::StatementKind::Block(block),
1479 span,
1480 });
1481
1482 ctx.local_table.pop_scope();
1483
1484 ast::StatementKind::Loop {
1485 body,
1486 continuing,
1487 break_if: None,
1488 }
1489 }
1490 (Token::Word("break"), span) => {
1491 let (peeked_token, peeked_span) = lexer.peek();
1495 if let Token::Word("if") = peeked_token {
1496 let span = span.until(&peeked_span);
1497 return Err(Box::new(Error::InvalidBreakIf(span)));
1498 }
1499 lexer.expect(Token::Separator(';'))?;
1500 ast::StatementKind::Break
1501 }
1502 (Token::Word("continue"), _) => {
1503 lexer.expect(Token::Separator(';'))?;
1504 ast::StatementKind::Continue
1505 }
1506 (Token::Word("discard"), _) => {
1507 lexer.expect(Token::Separator(';'))?;
1508 ast::StatementKind::Kill
1509 }
1510 (Token::Word("const_assert"), _) => {
1512 let paren = lexer.next_if(Token::Paren('('));
1514
1515 let condition = this.expression(lexer, ctx)?;
1516
1517 if paren {
1518 lexer.expect(Token::Paren(')'))?;
1519 }
1520 lexer.expect(Token::Separator(';'))?;
1521 ast::StatementKind::ConstAssert(condition)
1522 }
1523 token => {
1524 this.variable_or_value_or_func_call_or_variable_updating_statement(
1525 lexer,
1526 ctx,
1527 block,
1528 token,
1529 ExpectedToken::Statement,
1530 )?;
1531 lexer.expect(Token::Separator(';'))?;
1532 this.pop_rule_span(lexer);
1533 return Ok(());
1534 }
1535 };
1536
1537 let span = this.pop_rule_span(lexer);
1538 block.stmts.push(ast::Statement { kind, span });
1539
1540 Ok(())
1541 })
1542 }
1543
1544 fn r#loop<'a>(
1545 &mut self,
1546 lexer: &mut Lexer<'a>,
1547 ctx: &mut ExpressionContext<'a, '_, '_>,
1548 brace_nesting_level: u8,
1549 ) -> Result<'a, ast::StatementKind<'a>> {
1550 let mut body = ast::Block::default();
1551 let mut continuing = ast::Block::default();
1552 let mut break_if = None;
1553
1554 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1555 let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1556
1557 ctx.local_table.push_scope();
1558
1559 loop {
1560 if lexer.next_if(Token::Word("continuing")) {
1561 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1566 let brace_nesting_level =
1567 Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1568 loop {
1569 if lexer.next_if(Token::Word("break")) {
1570 lexer.expect(Token::Word("if"))?;
1577
1578 let condition = self.expression(lexer, ctx)?;
1579 break_if = Some(condition);
1582
1583 lexer.expect(Token::Separator(';'))?;
1585 lexer.expect(Token::Paren('}'))?;
1588 break;
1590 } else if lexer.next_if(Token::Paren('}')) {
1591 break;
1594 } else {
1595 self.statement(lexer, ctx, &mut continuing, brace_nesting_level)?;
1597 }
1598 }
1599 lexer.expect(Token::Paren('}'))?;
1602 break;
1603 }
1604 if lexer.next_if(Token::Paren('}')) {
1605 break;
1608 }
1609 self.statement(lexer, ctx, &mut body, brace_nesting_level)?;
1611 }
1612
1613 ctx.local_table.pop_scope();
1614
1615 Ok(ast::StatementKind::Loop {
1616 body,
1617 continuing,
1618 break_if,
1619 })
1620 }
1621
1622 fn block<'a>(
1624 &mut self,
1625 lexer: &mut Lexer<'a>,
1626 ctx: &mut ExpressionContext<'a, '_, '_>,
1627 brace_nesting_level: u8,
1628 ) -> Result<'a, (ast::Block<'a>, Span)> {
1629 self.push_rule_span(Rule::Block, lexer);
1630
1631 ctx.local_table.push_scope();
1632
1633 let mut diagnostic_filters = DiagnosticFilterMap::new();
1634
1635 self.push_rule_span(Rule::Attribute, lexer);
1636 while lexer.next_if(Token::Attribute) {
1637 let (name, name_span) = lexer.next_ident_with_span()?;
1638 if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
1639 let filter = self.diagnostic_filter(lexer)?;
1640 let span = self.peek_rule_span(lexer);
1641 diagnostic_filters
1642 .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
1643 .map_err(|e| Box::new(e.into()))?;
1644 } else {
1645 return Err(Box::new(Error::Unexpected(
1646 name_span,
1647 ExpectedToken::DiagnosticAttribute,
1648 )));
1649 }
1650 }
1651 self.pop_rule_span(lexer);
1652
1653 if !diagnostic_filters.is_empty() {
1654 return Err(Box::new(
1655 Error::DiagnosticAttributeNotYetImplementedAtParseSite {
1656 site_name_plural: "compound statements",
1657 spans: diagnostic_filters.spans().collect(),
1658 },
1659 ));
1660 }
1661
1662 let brace_span = lexer.expect_span(Token::Paren('{'))?;
1663 let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?;
1664 let mut block = ast::Block::default();
1665 while !lexer.next_if(Token::Paren('}')) {
1666 self.statement(lexer, ctx, &mut block, brace_nesting_level)?;
1667 }
1668
1669 ctx.local_table.pop_scope();
1670
1671 let span = self.pop_rule_span(lexer);
1672 Ok((block, span))
1673 }
1674
1675 fn varying_binding<'a>(
1676 &mut self,
1677 lexer: &mut Lexer<'a>,
1678 ctx: &mut ExpressionContext<'a, '_, '_>,
1679 ) -> Result<'a, Option<ast::Binding<'a>>> {
1680 let mut bind_parser = BindingParser::default();
1681 self.push_rule_span(Rule::Attribute, lexer);
1682
1683 while lexer.next_if(Token::Attribute) {
1684 let (word, span) = lexer.next_ident_with_span()?;
1685 bind_parser.parse(self, lexer, word, span, ctx)?;
1686 }
1687
1688 let span = self.pop_rule_span(lexer);
1689 bind_parser.finish(span)
1690 }
1691
1692 fn function_decl<'a>(
1693 &mut self,
1694 lexer: &mut Lexer<'a>,
1695 diagnostic_filter_leaf: Option<Handle<DiagnosticFilterNode>>,
1696 must_use: Option<Span>,
1697 out: &mut ast::TranslationUnit<'a>,
1698 dependencies: &mut FastIndexSet<ast::Dependency<'a>>,
1699 ) -> Result<'a, ast::Function<'a>> {
1700 self.push_rule_span(Rule::FunctionDecl, lexer);
1701 let fun_name = lexer.next_ident()?;
1703
1704 let mut locals = Arena::new();
1705
1706 let mut ctx = ExpressionContext {
1707 expressions: &mut out.expressions,
1708 local_table: &mut SymbolTable::default(),
1709 locals: &mut locals,
1710 unresolved: dependencies,
1711 };
1712
1713 ctx.local_table.push_scope();
1715 ctx.local_table.reduce_lookup_scope();
1718
1719 let mut arguments = Vec::new();
1721 lexer.expect(Token::Paren('('))?;
1722 let mut ready = true;
1723 while !lexer.next_if(Token::Paren(')')) {
1724 if !ready {
1725 return Err(Box::new(Error::Unexpected(
1726 lexer.next().1,
1727 ExpectedToken::Token(Token::Separator(',')),
1728 )));
1729 }
1730 let binding = self.varying_binding(lexer, &mut ctx)?;
1731
1732 let param_name = lexer.next_ident()?;
1733
1734 lexer.expect(Token::Separator(':'))?;
1735 let param_type = self.type_specifier(lexer, &mut ctx)?;
1736
1737 let handle = ctx.declare_local(param_name)?;
1738 arguments.push(ast::FunctionArgument {
1739 name: param_name,
1740 ty: param_type,
1741 binding,
1742 handle,
1743 });
1744 ready = lexer.next_if(Token::Separator(','));
1745 }
1746 let result = if lexer.next_if(Token::Arrow) {
1748 let binding = self.varying_binding(lexer, &mut ctx)?;
1749 let ty = self.type_specifier(lexer, &mut ctx)?;
1750 let must_use = must_use.is_some();
1751 Some(ast::FunctionResult {
1752 ty,
1753 binding,
1754 must_use,
1755 })
1756 } else if let Some(must_use) = must_use {
1757 return Err(Box::new(Error::FunctionMustUseReturnsVoid(
1758 must_use,
1759 self.peek_rule_span(lexer),
1760 )));
1761 } else {
1762 None
1763 };
1764
1765 ctx.local_table.reset_lookup_scope();
1766
1767 lexer.expect(Token::Paren('{'))?;
1769 let brace_nesting_level = 1;
1770 let mut body = ast::Block::default();
1771 while !lexer.next_if(Token::Paren('}')) {
1772 self.statement(lexer, &mut ctx, &mut body, brace_nesting_level)?;
1773 }
1774
1775 ctx.local_table.pop_scope();
1776
1777 let fun = ast::Function {
1778 entry_point: None,
1779 name: fun_name,
1780 arguments,
1781 result,
1782 body,
1783 diagnostic_filter_leaf,
1784 doc_comments: Vec::new(),
1785 };
1786
1787 self.pop_rule_span(lexer);
1789
1790 Ok(fun)
1791 }
1792
1793 fn directive_ident_list<'a>(
1794 &self,
1795 lexer: &mut Lexer<'a>,
1796 handler: impl FnMut(&'a str, Span) -> Result<'a, ()>,
1797 ) -> Result<'a, ()> {
1798 let mut handler = handler;
1799 'next_arg: loop {
1800 let (ident, span) = lexer.next_ident_with_span()?;
1801 handler(ident, span)?;
1802
1803 let expected_token = match lexer.peek().0 {
1804 Token::Separator(',') => {
1805 let _ = lexer.next();
1806 if matches!(lexer.peek().0, Token::Word(..)) {
1807 continue 'next_arg;
1808 }
1809 ExpectedToken::AfterIdentListComma
1810 }
1811 _ => ExpectedToken::AfterIdentListArg,
1812 };
1813
1814 if !matches!(lexer.next().0, Token::Separator(';')) {
1815 return Err(Box::new(Error::Unexpected(span, expected_token)));
1816 }
1817
1818 break Ok(());
1819 }
1820 }
1821
1822 fn global_decl<'a>(
1823 &mut self,
1824 lexer: &mut Lexer<'a>,
1825 out: &mut ast::TranslationUnit<'a>,
1826 ) -> Result<'a, ()> {
1827 let doc_comments = lexer.accumulate_doc_comments();
1828
1829 let mut binding = None;
1831 let mut stage = ParsedAttribute::default();
1832 let mut shader_stage_error_span = Span::new(0, 0);
1835 let mut workgroup_size = ParsedAttribute::default();
1836 let mut early_depth_test = ParsedAttribute::default();
1837 let (mut bind_index, mut bind_group) =
1838 (ParsedAttribute::default(), ParsedAttribute::default());
1839 let mut id = ParsedAttribute::default();
1840 let mut payload = ParsedAttribute::default();
1842 let mut incoming_payload = ParsedAttribute::default();
1844 let mut mesh_output = ParsedAttribute::default();
1845
1846 let mut must_use: ParsedAttribute<Span> = ParsedAttribute::default();
1847
1848 let mut dependencies = FastIndexSet::default();
1849 let mut ctx = ExpressionContext {
1850 expressions: &mut out.expressions,
1851 local_table: &mut SymbolTable::default(),
1852 locals: &mut Arena::new(),
1853 unresolved: &mut dependencies,
1854 };
1855 let mut diagnostic_filters = DiagnosticFilterMap::new();
1856 let ensure_no_diag_attrs = |on_what, filters: DiagnosticFilterMap| -> Result<()> {
1857 if filters.is_empty() {
1858 Ok(())
1859 } else {
1860 Err(Box::new(Error::DiagnosticAttributeNotSupported {
1861 on_what,
1862 spans: filters.spans().collect(),
1863 }))
1864 }
1865 };
1866
1867 self.push_rule_span(Rule::Attribute, lexer);
1868 while lexer.next_if(Token::Attribute) {
1869 let (name, name_span) = lexer.next_ident_with_span()?;
1870 if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) {
1871 let filter = self.diagnostic_filter(lexer)?;
1872 let span = self.peek_rule_span(lexer);
1873 diagnostic_filters
1874 .add(filter, span, ShouldConflictOnFullDuplicate::Yes)
1875 .map_err(|e| Box::new(e.into()))?;
1876 continue;
1877 }
1878 match name {
1879 "binding" => {
1880 lexer.expect(Token::Paren('('))?;
1881 bind_index.set(self.expression(lexer, &mut ctx)?, name_span)?;
1882 lexer.expect(Token::Paren(')'))?;
1883 }
1884 "group" => {
1885 lexer.expect(Token::Paren('('))?;
1886 bind_group.set(self.expression(lexer, &mut ctx)?, name_span)?;
1887 lexer.expect(Token::Paren(')'))?;
1888 }
1889 "id" => {
1890 lexer.expect(Token::Paren('('))?;
1891 id.set(self.expression(lexer, &mut ctx)?, name_span)?;
1892 lexer.expect(Token::Paren(')'))?;
1893 }
1894 "vertex" => {
1895 stage.set(ShaderStage::Vertex, name_span)?;
1896 }
1897 "fragment" => {
1898 stage.set(ShaderStage::Fragment, name_span)?;
1899 }
1900 "compute" => {
1901 stage.set(ShaderStage::Compute, name_span)?;
1902 shader_stage_error_span = name_span;
1903 }
1904 "task" => {
1905 lexer.require_enable_extension(
1906 ImplementedEnableExtension::WgpuMeshShader,
1907 name_span,
1908 )?;
1909 stage.set(ShaderStage::Task, name_span)?;
1910 shader_stage_error_span = name_span;
1911 }
1912 "mesh" => {
1913 lexer.require_enable_extension(
1914 ImplementedEnableExtension::WgpuMeshShader,
1915 name_span,
1916 )?;
1917 stage.set(ShaderStage::Mesh, name_span)?;
1918 shader_stage_error_span = name_span;
1919
1920 lexer.expect(Token::Paren('('))?;
1921 mesh_output.set(lexer.next_ident_with_span()?, name_span)?;
1922 lexer.expect(Token::Paren(')'))?;
1923 }
1924 "ray_generation" => {
1925 lexer.require_enable_extension(
1926 ImplementedEnableExtension::WgpuRayTracingPipeline,
1927 name_span,
1928 )?;
1929 stage.set(ShaderStage::RayGeneration, name_span)?;
1930 shader_stage_error_span = name_span;
1931 }
1932 "any_hit" => {
1933 lexer.require_enable_extension(
1934 ImplementedEnableExtension::WgpuRayTracingPipeline,
1935 name_span,
1936 )?;
1937 stage.set(ShaderStage::AnyHit, name_span)?;
1938 shader_stage_error_span = name_span;
1939 }
1940 "closest_hit" => {
1941 lexer.require_enable_extension(
1942 ImplementedEnableExtension::WgpuRayTracingPipeline,
1943 name_span,
1944 )?;
1945 stage.set(ShaderStage::ClosestHit, name_span)?;
1946 shader_stage_error_span = name_span;
1947 }
1948 "miss" => {
1949 lexer.require_enable_extension(
1950 ImplementedEnableExtension::WgpuRayTracingPipeline,
1951 name_span,
1952 )?;
1953 stage.set(ShaderStage::Miss, name_span)?;
1954 shader_stage_error_span = name_span;
1955 }
1956 "payload" => {
1957 lexer.require_enable_extension(
1958 ImplementedEnableExtension::WgpuMeshShader,
1959 name_span,
1960 )?;
1961 lexer.expect(Token::Paren('('))?;
1962 payload.set(lexer.next_ident_with_span()?, name_span)?;
1963 lexer.expect(Token::Paren(')'))?;
1964 }
1965 "incoming_payload" => {
1966 lexer.require_enable_extension(
1967 ImplementedEnableExtension::WgpuRayTracingPipeline,
1968 name_span,
1969 )?;
1970 lexer.expect(Token::Paren('('))?;
1971 incoming_payload.set(lexer.next_ident_with_span()?, name_span)?;
1972 lexer.expect(Token::Paren(')'))?;
1973 }
1974 "workgroup_size" => {
1975 lexer.expect(Token::Paren('('))?;
1976 let mut new_workgroup_size = [None; 3];
1977 for (i, size) in new_workgroup_size.iter_mut().enumerate() {
1978 *size = Some(self.expression(lexer, &mut ctx)?);
1979 match lexer.next() {
1980 (Token::Paren(')'), _) => break,
1981 (Token::Separator(','), _) if i != 2 => (),
1982 other => {
1983 return Err(Box::new(Error::Unexpected(
1984 other.1,
1985 ExpectedToken::WorkgroupSizeSeparator,
1986 )))
1987 }
1988 }
1989 }
1990 workgroup_size.set(new_workgroup_size, name_span)?;
1991 }
1992 "early_depth_test" => {
1993 lexer.expect(Token::Paren('('))?;
1994 let (ident, ident_span) = lexer.next_ident_with_span()?;
1995 let value = if ident == "force" {
1996 crate::EarlyDepthTest::Force
1997 } else {
1998 crate::EarlyDepthTest::Allow {
1999 conservative: conv::map_conservative_depth(ident, ident_span)?,
2000 }
2001 };
2002 lexer.expect(Token::Paren(')'))?;
2003 early_depth_test.set(value, name_span)?;
2004 }
2005 "must_use" => {
2006 must_use.set(name_span, name_span)?;
2007 }
2008 _ => return Err(Box::new(Error::UnknownAttribute(name_span))),
2009 }
2010 }
2011
2012 let attrib_span = self.pop_rule_span(lexer);
2013 match (bind_group.value, bind_index.value) {
2014 (Some(group), Some(index)) => {
2015 binding = Some(ast::ResourceBinding {
2016 group,
2017 binding: index,
2018 });
2019 }
2020 (Some(_), None) => {
2021 return Err(Box::new(Error::MissingAttribute("binding", attrib_span)))
2022 }
2023 (None, Some(_)) => return Err(Box::new(Error::MissingAttribute("group", attrib_span))),
2024 (None, None) => {}
2025 }
2026
2027 let start = lexer.start_byte_offset();
2029 let kind = match lexer.next() {
2030 (Token::Separator(';'), _) => {
2031 ensure_no_diag_attrs(
2032 DiagnosticAttributeNotSupportedPosition::SemicolonInModulePosition,
2033 diagnostic_filters,
2034 )?;
2035 None
2036 }
2037 (Token::Word(word), directive_span) if DirectiveKind::from_ident(word).is_some() => {
2038 return Err(Box::new(Error::DirectiveAfterFirstGlobalDecl {
2039 directive_span,
2040 }));
2041 }
2042 (Token::Word("struct"), _) => {
2043 ensure_no_diag_attrs("`struct`s".into(), diagnostic_filters)?;
2044
2045 let name = lexer.next_ident()?;
2046
2047 let members = self.struct_body(lexer, &mut ctx)?;
2048
2049 Some(ast::GlobalDeclKind::Struct(ast::Struct {
2050 name,
2051 members,
2052 doc_comments,
2053 }))
2054 }
2055 (Token::Word("alias"), _) => {
2056 ensure_no_diag_attrs("`alias`es".into(), diagnostic_filters)?;
2057
2058 let name = lexer.next_ident()?;
2059
2060 lexer.expect(Token::Operation('='))?;
2061 let ty = self.type_specifier(lexer, &mut ctx)?;
2062 lexer.expect(Token::Separator(';'))?;
2063 Some(ast::GlobalDeclKind::Type(ast::TypeAlias { name, ty }))
2064 }
2065 (Token::Word("const"), _) => {
2066 ensure_no_diag_attrs("`const`s".into(), diagnostic_filters)?;
2067
2068 let (name, ty) = self.optionally_typed_ident(lexer, &mut ctx)?;
2069
2070 lexer.expect(Token::Operation('='))?;
2071 let init = self.expression(lexer, &mut ctx)?;
2072 lexer.expect(Token::Separator(';'))?;
2073
2074 Some(ast::GlobalDeclKind::Const(ast::Const {
2075 name,
2076 ty,
2077 init,
2078 doc_comments,
2079 }))
2080 }
2081 (Token::Word("override"), _) => {
2082 ensure_no_diag_attrs("`override`s".into(), diagnostic_filters)?;
2083
2084 let (name, ty) = self.optionally_typed_ident(lexer, &mut ctx)?;
2085
2086 let init = if lexer.next_if(Token::Operation('=')) {
2087 Some(self.expression(lexer, &mut ctx)?)
2088 } else {
2089 None
2090 };
2091
2092 lexer.expect(Token::Separator(';'))?;
2093
2094 Some(ast::GlobalDeclKind::Override(ast::Override {
2095 name,
2096 id: id.value,
2097 ty,
2098 init,
2099 }))
2100 }
2101 (Token::Word("var"), _) => {
2102 ensure_no_diag_attrs("`var`s".into(), diagnostic_filters)?;
2103
2104 let mut var = self.variable_decl(lexer, &mut ctx)?;
2105 var.binding = binding.take();
2106 var.doc_comments = doc_comments;
2107 Some(ast::GlobalDeclKind::Var(var))
2108 }
2109 (Token::Word("fn"), _) => {
2110 let diagnostic_filter_leaf = Self::write_diagnostic_filters(
2111 &mut out.diagnostic_filters,
2112 diagnostic_filters,
2113 out.diagnostic_filter_leaf,
2114 );
2115
2116 let function = self.function_decl(
2117 lexer,
2118 diagnostic_filter_leaf,
2119 must_use.value,
2120 out,
2121 &mut dependencies,
2122 )?;
2123 Some(ast::GlobalDeclKind::Fn(ast::Function {
2124 entry_point: if let Some(stage) = stage.value {
2125 if stage.compute_like() && workgroup_size.value.is_none() {
2126 return Err(Box::new(Error::MissingWorkgroupSize(
2127 shader_stage_error_span,
2128 )));
2129 }
2130
2131 match stage {
2132 ShaderStage::AnyHit | ShaderStage::ClosestHit | ShaderStage::Miss => {
2133 if incoming_payload.value.is_none() {
2134 return Err(Box::new(Error::MissingIncomingPayload(
2135 shader_stage_error_span,
2136 )));
2137 }
2138 }
2139 _ => {}
2140 }
2141
2142 Some(ast::EntryPoint {
2143 stage,
2144 early_depth_test: early_depth_test.value,
2145 workgroup_size: workgroup_size.value,
2146 mesh_output_variable: mesh_output.value,
2147 task_payload: payload.value,
2148 ray_incoming_payload: incoming_payload.value,
2149 })
2150 } else {
2151 None
2152 },
2153 doc_comments,
2154 ..function
2155 }))
2156 }
2157 (Token::Word("const_assert"), _) => {
2158 ensure_no_diag_attrs("`const_assert`s".into(), diagnostic_filters)?;
2159
2160 let paren = lexer.next_if(Token::Paren('('));
2162
2163 let condition = self.expression(lexer, &mut ctx)?;
2164
2165 if paren {
2166 lexer.expect(Token::Paren(')'))?;
2167 }
2168 lexer.expect(Token::Separator(';'))?;
2169 Some(ast::GlobalDeclKind::ConstAssert(condition))
2170 }
2171 (Token::End, _) => return Ok(()),
2172 other => {
2173 return Err(Box::new(Error::Unexpected(
2174 other.1,
2175 ExpectedToken::GlobalItem,
2176 )))
2177 }
2178 };
2179
2180 if let Some(kind) = kind {
2181 out.decls.append(
2182 ast::GlobalDecl { kind, dependencies },
2183 lexer.span_from(start),
2184 );
2185 }
2186
2187 if !self.rules.is_empty() {
2188 log::error!("Reached the end of global decl, but rule stack is not empty");
2189 log::error!("Rules: {:?}", self.rules);
2190 return Err(Box::new(Error::Internal("rule stack is not empty")));
2191 };
2192
2193 match binding {
2194 None => Ok(()),
2195 Some(_) => Err(Box::new(Error::Internal(
2196 "we had the attribute but no var?",
2197 ))),
2198 }
2199 }
2200
2201 pub fn parse<'a>(
2202 &mut self,
2203 source: &'a str,
2204 options: &Options,
2205 ) -> Result<'a, ast::TranslationUnit<'a>> {
2206 self.reset();
2207
2208 let mut lexer = Lexer::new(source, !options.parse_doc_comments);
2209 let mut tu = ast::TranslationUnit::default();
2210 let mut enable_extensions = EnableExtensions::empty();
2211 let mut diagnostic_filters = DiagnosticFilterMap::new();
2212
2213 tu.doc_comments = lexer.accumulate_module_doc_comments();
2215
2216 while let (Token::Word(word), _) = lexer.peek() {
2218 if let Some(kind) = DirectiveKind::from_ident(word) {
2219 self.push_rule_span(Rule::Directive, &mut lexer);
2220 let _ = lexer.next_ident_with_span().unwrap();
2221 match kind {
2222 DirectiveKind::Diagnostic => {
2223 let diagnostic_filter = self.diagnostic_filter(&mut lexer)?;
2224 let span = self.peek_rule_span(&lexer);
2225 diagnostic_filters
2226 .add(diagnostic_filter, span, ShouldConflictOnFullDuplicate::No)
2227 .map_err(|e| Box::new(e.into()))?;
2228 lexer.expect(Token::Separator(';'))?;
2229 }
2230 DirectiveKind::Enable => {
2231 self.directive_ident_list(&mut lexer, |ident, span| {
2232 let kind = EnableExtension::from_ident(ident, span)?;
2233 let extension = match kind {
2234 EnableExtension::Implemented(kind) => kind,
2235 EnableExtension::Unimplemented(kind) => {
2236 return Err(Box::new(Error::EnableExtensionNotYetImplemented {
2237 kind,
2238 span,
2239 }))
2240 }
2241 };
2242 let required_capability = extension.capability();
2244 if !options.capabilities.contains(required_capability) {
2245 return Err(Box::new(Error::EnableExtensionNotSupported {
2246 kind,
2247 span,
2248 }));
2249 }
2250 enable_extensions.add(extension);
2251 Ok(())
2252 })?;
2253 }
2254 DirectiveKind::Requires => {
2255 self.directive_ident_list(&mut lexer, |ident, span| {
2256 match LanguageExtension::from_ident(ident) {
2257 Some(LanguageExtension::Implemented(_kind)) => {
2258 Ok(())
2263 }
2264 Some(LanguageExtension::Unimplemented(kind)) => {
2265 Err(Box::new(Error::LanguageExtensionNotYetImplemented {
2266 kind,
2267 span,
2268 }))
2269 }
2270 None => Err(Box::new(Error::UnknownLanguageExtension(span, ident))),
2271 }
2272 })?;
2273 }
2274 }
2275 self.pop_rule_span(&lexer);
2276 } else {
2277 break;
2278 }
2279 }
2280
2281 lexer.enable_extensions = enable_extensions;
2282 tu.enable_extensions = enable_extensions;
2283 tu.diagnostic_filter_leaf =
2284 Self::write_diagnostic_filters(&mut tu.diagnostic_filters, diagnostic_filters, None);
2285
2286 loop {
2287 match self.global_decl(&mut lexer, &mut tu) {
2288 Err(error) => return Err(error),
2289 Ok(()) => {
2290 if lexer.peek().0 == Token::End {
2291 break;
2292 }
2293 }
2294 }
2295 }
2296
2297 Ok(tu)
2298 }
2299
2300 fn increase_brace_nesting(brace_nesting_level: u8, brace_span: Span) -> Result<'static, u8> {
2301 const BRACE_NESTING_MAXIMUM: u8 = 127;
2309 if brace_nesting_level + 1 > BRACE_NESTING_MAXIMUM {
2310 return Err(Box::new(Error::ExceededLimitForNestedBraces {
2311 span: brace_span,
2312 limit: BRACE_NESTING_MAXIMUM,
2313 }));
2314 }
2315 Ok(brace_nesting_level + 1)
2316 }
2317
2318 fn diagnostic_filter<'a>(&self, lexer: &mut Lexer<'a>) -> Result<'a, DiagnosticFilter> {
2319 lexer.expect(Token::Paren('('))?;
2320
2321 let (severity_control_name, severity_control_name_span) = lexer.next_ident_with_span()?;
2322 let new_severity = diagnostic_filter::Severity::from_wgsl_ident(severity_control_name)
2323 .ok_or(Error::DiagnosticInvalidSeverity {
2324 severity_control_name_span,
2325 })?;
2326
2327 lexer.expect(Token::Separator(','))?;
2328
2329 let (diagnostic_name_token, diagnostic_name_token_span) = lexer.next_ident_with_span()?;
2330 let triggering_rule = if lexer.next_if(Token::Separator('.')) {
2331 let (ident, _span) = lexer.next_ident_with_span()?;
2332 FilterableTriggeringRule::User(Box::new([diagnostic_name_token.into(), ident.into()]))
2333 } else {
2334 let diagnostic_rule_name = diagnostic_name_token;
2335 let diagnostic_rule_name_span = diagnostic_name_token_span;
2336 if let Some(triggering_rule) =
2337 StandardFilterableTriggeringRule::from_wgsl_ident(diagnostic_rule_name)
2338 {
2339 FilterableTriggeringRule::Standard(triggering_rule)
2340 } else {
2341 diagnostic_filter::Severity::Warning.report_wgsl_parse_diag(
2342 Box::new(Error::UnknownDiagnosticRuleName(diagnostic_rule_name_span)),
2343 lexer.source,
2344 )?;
2345 FilterableTriggeringRule::Unknown(diagnostic_rule_name.into())
2346 }
2347 };
2348 let filter = DiagnosticFilter {
2349 triggering_rule,
2350 new_severity,
2351 };
2352 lexer.next_if(Token::Separator(','));
2353 lexer.expect(Token::Paren(')'))?;
2354
2355 Ok(filter)
2356 }
2357
2358 pub(crate) fn write_diagnostic_filters(
2359 arena: &mut Arena<DiagnosticFilterNode>,
2360 filters: DiagnosticFilterMap,
2361 parent: Option<Handle<DiagnosticFilterNode>>,
2362 ) -> Option<Handle<DiagnosticFilterNode>> {
2363 filters
2364 .into_iter()
2365 .fold(parent, |parent, (triggering_rule, (new_severity, span))| {
2366 Some(arena.append(
2367 DiagnosticFilterNode {
2368 inner: DiagnosticFilter {
2369 new_severity,
2370 triggering_rule,
2371 },
2372 parent,
2373 },
2374 span,
2375 ))
2376 })
2377 }
2378}
2379
2380const fn is_start_of_compound_statement<'a>(token: Token<'a>) -> bool {
2381 matches!(token, Token::Attribute | Token::Paren('{'))
2382}