Handlers for symbols and operators
All checks were successful
/ build (push) Successful in 1m14s
/ clippy (push) Successful in 1m13s

This commit is contained in:
xqtc 2024-07-25 15:23:20 +02:00
parent 483eb1885a
commit 9fcc8abe4c
5 changed files with 76 additions and 38 deletions

View file

@ -9,8 +9,9 @@ impl Lexer {
}
}
pub fn consume_unknown(&mut self) -> crate::tokens::Token {
todo!()
pub fn consume_unknown(&mut self, c: char) -> crate::tokens::Token {
self.advance();
crate::tokens::Token::Unknown(c.to_string())
}
pub fn consume_identifier_or_keyword(&mut self) -> crate::tokens::Token {
@ -45,31 +46,31 @@ impl Lexer {
crate::tokens::Token::Comment(comment)
}
pub fn consume_symbol(&mut self) -> crate::tokens::Token {
todo!()
pub fn consume_symbol(&mut self, c: char) -> crate::tokens::Token {
self.advance();
let symbol = self.symbols.get(&c.clone());
if let Some(token) = symbol {
token.clone()
} else {
crate::tokens::Token::IDENTIFIER(c.to_string())
}
}
pub fn consume_operator(&mut self) -> crate::tokens::Token {
let mut operator = String::new();
while let Some(c) = self.current_char {
operator.push(c);
match self.peek() {
Some('+') | Some('-') | Some('*') | Some('/') | Some('%') | Some('=')
| Some('!') | Some('<') | Some('>') => {
self.advance();
operator.push(
self.current_char
.expect("Critical error while lexing operators! Aborting..."),
);
}
None => {
self.advance();
break;
}
_ => {
self.advance();
break;
}
};
if let Some(peeked) = self.peek() {
match peeked {
'+' | '-' | '*' | '/' | '%' | '=' | '!' | '<' | '>' => {
self.advance();
}
_ => {
self.advance();
break;
}
};
}
}
crate::tokens::Token::IDENTIFIER(operator)
}

View file

@ -1,3 +1,4 @@
use crate::lex::util::populate_symbols;
use crate::tokens::Token;
use std::collections::HashMap;
use std::sync::Arc;
@ -12,6 +13,7 @@ pub struct Lexer {
/// [`char`] under position
pub current_char: Option<char>,
pub keywords: HashMap<&'static str, Token>,
pub symbols: HashMap<char, Token>,
}
#[macro_export]
@ -28,9 +30,11 @@ impl Lexer {
position: 0,
current_char: None,
keywords: HashMap::new(),
symbols: HashMap::new(),
};
// Populate keywords HashMap
populate_keywords(&mut lexer);
populate_symbols(&mut lexer);
dbg!("{}", &lexer.keywords);
lexer.current_char = if lexer.position < lexer.input.len() {
Some(lexer.input[lexer.position])
@ -93,8 +97,8 @@ impl Lexer {
tokens.push(self.consume_operator());
}
'{' | '}' | '(' | ')' | '#' | ',' | ';' => {
tokens.push(self.consume_symbol());
'{' | '}' | '(' | ')' | '[' | ']' | ',' | ';' | ':' => {
tokens.push(self.consume_symbol(c.clone()));
}
'.' => {
@ -102,7 +106,7 @@ impl Lexer {
}
_ => {
tokens.push(self.consume_unknown());
tokens.push(self.consume_unknown(c.clone()));
}
}
}

View file

@ -1,5 +1,18 @@
use crate::tokens::{Image, Material, Sampler, Token, Vector};
pub fn populate_symbols(lexer: &mut crate::lex::lexer::Lexer) {
lexer.symbols.insert('{', Token::LEFT_BRACE);
lexer.symbols.insert('}', Token::RIGHT_BRACE);
lexer.symbols.insert('(', Token::LEFT_PAREN);
lexer.symbols.insert(')', Token::RIGHT_PAREN);
lexer.symbols.insert('[', Token::LEFT_BRACKET);
lexer.symbols.insert(']', Token::RIGHT_BRACKET);
lexer.symbols.insert('.', Token::DOT);
lexer.symbols.insert(',', Token::COMMA);
lexer.symbols.insert(':', Token::COLON);
lexer.symbols.insert(';', Token::SEMICOLON);
}
pub fn populate_keywords(lexer: &mut crate::lex::lexer::Lexer) {
lexer.keywords.insert("const", Token::CONST);
lexer.keywords.insert("uniform", Token::UNIFORM);

View file

@ -6,29 +6,26 @@
//! ```
//! use glsl_lexer::*;
//!
//! fn main() {
//! let source = r#"
//! #version 440
//! uniform float time;
//! void main() {
//! gl_FragColor = vec4(1.0, 0.5, 0.2, 1.0);
//! }
//! "#;
//! }
//! let source = r#"
//! #version 440
//! uniform float time;
//! void main() {
//! gl_FragColor = vec4(1.0, 0.5, 0.2, 1.0);
//! }
//! "#;
//! ```
//! # WIP THAT SHIT STILL WONKY AF
#![allow(dead_code)]
#![allow(non_camel_case_types)]
pub mod lex;
mod tokens;
#[cfg(test)]
mod tests {
use super::*;
use crate::lex::lexer::Lexer;
use crate::tokens::{Image, Token};
use super::lex::lexer::Lexer;
use super::tokens::{Image, Token};
#[test]
fn keyword() {
@ -72,6 +69,28 @@ mod tests {
.into()
);
}
#[test]
fn test_symbol() {
let source = "{}()[],;:";
let mut lexer = Lexer::new(source);
let tokens = lexer.get_tokens();
assert_eq!(
tokens,
vec![
Token::LEFT_BRACE,
Token::RIGHT_BRACE,
Token::LEFT_PAREN,
Token::RIGHT_PAREN,
Token::LEFT_BRACKET,
Token::RIGHT_BRACKET,
Token::COMMA,
Token::SEMICOLON,
Token::COLON,
Token::EOF,
]
.into()
);
}
}
// #[cfg(test)]
// mod tests {

View file

@ -4,6 +4,7 @@
pub enum Token {
EOF,
Whitespace,
Unknown(String),
Comment(String),
CONST,
BOOL,