pub(in front::wgsl) struct Lexer<'a> {
input: &'a str,
pub(in front::wgsl) source: &'a str,
last_end_offset: usize,
tokens: Vec<((Token<'a>, Span), &'a str)>,
ignore_doc_comments: bool,
pub(in front::wgsl) enable_extensions: EnableExtensions,
}Fields§
§input: &'a strThe remaining unconsumed input.
source: &'a strThe full original source code.
We compare input against this to compute the lexer’s current offset in
the source.
last_end_offset: usizeThe byte offset of the end of the most recently returned non-trivia token.
This is consulted by the span_from function, for finding the
end of the span for larger structures like expressions or
statements.
tokens: Vec<((Token<'a>, Span), &'a str)>A stack of unconsumed tokens to which template list discovery has been applied.
This is a stack: the next token is at the end of the vector, not the start. So tokens appear here in the reverse of the order they appear in the source.
This doesn’t contain the whole source, only those tokens produced by
discover_template_lists’s look-ahead, or that have been produced by
other look-ahead functions like peek and next_if. When this is empty,
we call discover_template_lists to get more.
ignore_doc_comments: boolWhether or not to ignore doc comments.
If true, doc comments are treated as Token::Trivia.
enable_extensions: EnableExtensionsThe set of enable-extensions present in the module, determined in a pre-pass.
Implementations§
Source§impl<'a> Lexer<'a>
impl<'a> Lexer<'a>
pub(in front::wgsl) const fn new( input: &'a str, ignore_doc_comments: bool, ) -> Self
Sourcepub(in front::wgsl) fn require_enable_extension(
&self,
extension: ImplementedEnableExtension,
span: Span,
) -> Result<(), Box<Error<'static>>>
pub(in front::wgsl) fn require_enable_extension( &self, extension: ImplementedEnableExtension, span: Span, ) -> Result<(), Box<Error<'static>>>
Check that extension is enabled in self.
Sourcepub fn capture_span<T, E>(
&mut self,
inner: impl FnOnce(&mut Self) -> Result<T, E>,
) -> Result<(T, Span), E>
pub fn capture_span<T, E>( &mut self, inner: impl FnOnce(&mut Self) -> Result<T, E>, ) -> Result<(T, Span), E>
pub(in front::wgsl) fn start_byte_offset( &mut self, ) -> usize
Sourcepub(in front::wgsl) fn accumulate_module_doc_comments(
&mut self,
) -> Vec<&'a str>
pub(in front::wgsl) fn accumulate_module_doc_comments( &mut self, ) -> Vec<&'a str>
Collect all module doc comments until a non doc token is found.
Sourcepub(in front::wgsl) fn accumulate_doc_comments(
&mut self,
) -> Vec<&'a str>
pub(in front::wgsl) fn accumulate_doc_comments( &mut self, ) -> Vec<&'a str>
Collect all doc comments until a non doc token is found.
const fn current_byte_offset(&self) -> usize
pub(in front::wgsl) fn span_from( &self, offset: usize, ) -> Span
pub(in front::wgsl) fn span_with_start( &self, span: Span, ) -> Span
Sourcepub(in front::wgsl) fn next(
&mut self,
) -> (Token<'a>, Span)
pub(in front::wgsl) fn next( &mut self, ) -> (Token<'a>, Span)
Return the next non-whitespace token from self.
Assume we are a parse state where bit shift operators may occur, but not angle brackets.
Sourcefn next_impl(&mut self, ignore_doc_comments: bool) -> (Token<'a>, Span)
fn next_impl(&mut self, ignore_doc_comments: bool) -> (Token<'a>, Span)
Return the next non-whitespace token from self, with a span.
pub(in front::wgsl) fn peek( &mut self, ) -> (Token<'a>, Span)
Sourcepub(in front::wgsl) fn next_if(
&mut self,
what: Token<'_>,
) -> bool
pub(in front::wgsl) fn next_if( &mut self, what: Token<'_>, ) -> bool
If the next token matches it’s consumed and true is returned