glsl-lexer/src/lex/lexer.rs
xqtc dac68cfb10
Some checks failed
/ build (push) Failing after 1m16s
/ clippy (push) Successful in 1m18s
[Refactor] Partially split up tokens into respective enums
2024-07-21 20:40:47 +02:00

106 lines
3.1 KiB
Rust

use crate::tokens::Token;
use std::sync::Arc;
pub struct Lexer {
/// GLSL source
pub input: Vec<char>,
/// Position in source
pub position: usize,
/// [`char`] under position
pub current_char: Option<char>,
}
#[macro_export]
macro_rules! lex {
($source:expr) => {{
$crate::lexer::Lexer::get_tokens(&mut $crate::Lexer::new($source))
}};
}
impl Lexer {
pub fn new(input: &str) -> Self {
let mut lexer = Lexer {
input: input.chars().collect(),
position: 0,
current_char: None,
};
lexer.current_char = if lexer.position < lexer.input.len() {
Some(lexer.input[lexer.position])
} else {
None
};
lexer
}
pub fn advance(&mut self) {
self.position += 1;
self.current_char = if self.position < self.input.len() {
Some(self.input[self.position])
} else {
None
};
}
/// Peeks the next char in the source without incrementing `self.position`.
pub fn peek(&self) -> Option<char> {
if self.position + 1 < self.input.len() {
Some(self.input[self.position + 1])
} else {
None
}
}
/// Parses the source given the [`Lexer`] upon initialization and returns a vector of [`Token`].
/// # Example:
/// ```
/// use glsl_lexer::*;
/// let source = r#"
/// #version 440
/// uniform float time;
/// void main() {
/// gl_FragColor = vec4(1.0, 0.5, 0.2, 1.0);
/// }
/// "#;
/// let mut lexer = glsl_lexer::Lexer::new(&source);
/// let tokens = lexer.get_tokens();
/// dbg!("{}", tokens);
///```
// We are using Arc<[Token]> as return type for cheaper cloning of the returned value
pub fn get_tokens(&mut self) -> Arc<[Token]> {
let mut tokens: Vec<Token> = Vec::new();
while let Some(c) = self.current_char {
if c.is_whitespace() {
self.consume_whitespace();
tokens.push(Token::Whitespace);
} else if c.is_alphabetic() || c == '_' {
tokens.push(self.consume_identifier_or_keyword());
} else if c.is_ascii_digit() {
tokens.push(self.consume_number());
} else if c == '/' && self.peek() == Some('/') {
tokens.push(self.consume_comment());
} else {
match c {
// TODO Implement operands like +=
'+' | '-' | '*' | '/' | '%' | '&' | '|' | '^' | '!' | '=' | '<' | '>' | '?' => {
tokens.push(self.consume_operator());
}
'{' | '}' | '(' | ')' | '#' | ',' | ';' => {
tokens.push(self.consume_symbol());
}
'.' => {
tokens.push(self.consume_number());
}
_ => {
tokens.push(self.consume_unknown());
}
}
}
}
tokens.into()
}
}