Small changes to keyword handling; Comment consumption
All checks were successful
/ build (push) Successful in 1m11s
/ clippy (push) Successful in 1m13s

This commit is contained in:
xqtc 2024-07-24 21:05:17 +02:00
parent f40f6be70d
commit c2966ce973
4 changed files with 32 additions and 24 deletions

View file

@ -23,7 +23,10 @@ impl Lexer {
break;
}
}
self.is_keyword(&word).unwrap()
match self.is_keyword(&word) {
Some(token) => token,
None => crate::tokens::Token::IDENTIFIER(word),
}
}
pub fn consume_number(&mut self) -> crate::tokens::Token {
@ -31,7 +34,15 @@ impl Lexer {
}
pub fn consume_comment(&mut self) -> crate::tokens::Token {
todo!()
let mut comment = String::new();
while let Some(c) = self.current_char {
if c == '\n' {
break;
}
comment.push(c);
self.advance();
}
crate::tokens::Token::Comment(comment)
}
pub fn consume_symbol(&mut self) -> crate::tokens::Token {

View file

@ -1,4 +1,3 @@
use crate::tokens::{Image, Material, Sampler, Token, Vector};
pub fn populate_tokens(lexer: &mut crate::lex::lexer::Lexer) {
@ -373,10 +372,8 @@ impl crate::lex::lexer::Lexer {
pub fn is_keyword(&mut self, word: &str) -> Option<Token> {
let token = self.keywords.get(word);
if let Some(token) = token {
Some(token.clone())
} else {
// TODO: Check if word is an identifier
Some(Token::IDENTIFIER(word.to_string()))
return Some(token.clone());
}
None
}
}

View file

@ -56,6 +56,22 @@ mod tests {
let tokens = lexer.get_tokens();
assert_eq!(tokens, vec![Token::UNIFORM, Token::EOF].into());
}
#[test]
fn test_single_line_comment() {
let source = "// This is a comment\n";
let mut lexer = Lexer::new(source);
let tokens = lexer.get_tokens();
assert_eq!(
tokens,
vec![
Token::Comment("// This is a comment".to_string()),
Token::Whitespace,
Token::EOF,
]
.into()
);
}
}
// #[cfg(test)]
// mod tests {
@ -164,23 +180,6 @@ mod tests {
// );
// }
//
// #[test]
// fn test_single_line_comment() {
// init();
// let source = "// This is a comment\n";
// let mut lexer = Lexer::new(source);
// let tokens = lexer.get_tokens();
// info!("[Comment] Tokens: {:#?}", tokens);
// assert_eq!(
// tokens,
// vec![
// Token::Comment("// This is a comment".to_string()),
// Token::Whitespace,
// Token::EOF,
// ]
// .into()
// );
// }
//
// // I hope that does it. Writing this test was pain.
// #[test]

View file

@ -4,6 +4,7 @@
pub enum Token {
EOF,
Whitespace,
Comment(String),
CONST,
BOOL,
FLOAT,