Make returned tokens Arc instead of Vec
Some checks failed
/ build (push) Failing after 1m38s
/ clippy (push) Successful in 2m4s

This commit is contained in:
xqtc161 2024-07-12 10:19:00 +02:00
parent 6aa73f802f
commit 6ab52a5f66
2 changed files with 27 additions and 13 deletions

View file

@ -81,11 +81,11 @@
"locked": { "locked": {
"lastModified": 1, "lastModified": 1,
"narHash": "sha256-8wkkYGr1dPSnX9oVMX8D6dTOROXKOYpBTKfriA0sEBI=", "narHash": "sha256-8wkkYGr1dPSnX9oVMX8D6dTOROXKOYpBTKfriA0sEBI=",
"path": "/nix/store/wdwf2qq1sqhs0nig8rffrq5dbpwr27v6-source/flake.systems.nix", "path": "/nix/store/hprrr24yh5w0pbbbz2hlwblriaqb99kx-source/flake.systems.nix",
"type": "path" "type": "path"
}, },
"original": { "original": {
"path": "/nix/store/wdwf2qq1sqhs0nig8rffrq5dbpwr27v6-source/flake.systems.nix", "path": "/nix/store/hprrr24yh5w0pbbbz2hlwblriaqb99kx-source/flake.systems.nix",
"type": "path" "type": "path"
} }
}, },

View file

@ -22,9 +22,11 @@
//! # WIP THAT SHIT STILL WONKY AF //! # WIP THAT SHIT STILL WONKY AF
use std::sync::Arc;
mod handlers; mod handlers;
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq, Clone)]
pub enum Token { pub enum Token {
/// Something like `float` /// Something like `float`
Identifier(String), Identifier(String),
@ -96,8 +98,9 @@ impl Lexer {
/// let mut lexer = glsl_lexer::Lexer::new(&source); /// let mut lexer = glsl_lexer::Lexer::new(&source);
/// let tokens = lexer.get_tokens(); /// let tokens = lexer.get_tokens();
/// dbg!("{}", tokens); /// dbg!("{}", tokens);
/// ``` ///```
pub fn get_tokens(&mut self) -> Vec<Token> { // We are using Arc<[Token]> as return type for cheaper cloning of the returned value
pub fn get_tokens(&mut self) -> Arc<[Token]> {
let mut tokens = Vec::new(); let mut tokens = Vec::new();
while let Some(c) = self.current_char { while let Some(c) = self.current_char {
if c.is_whitespace() { if c.is_whitespace() {
@ -131,7 +134,8 @@ impl Lexer {
} }
} }
tokens.push(Token::EOF); tokens.push(Token::EOF);
tokens let ret: Arc<[Token]> = tokens.into();
ret
} }
fn peek(&self) -> Option<char> { fn peek(&self) -> Option<char> {
@ -182,7 +186,7 @@ mod tests {
let mut lexer = Lexer::new(source_code); let mut lexer = Lexer::new(source_code);
let tokens = lexer.get_tokens(); let tokens = lexer.get_tokens();
info!("[Whitespace] Tokens: {:#?}", tokens); info!("[Whitespace] Tokens: {:#?}", tokens);
assert_eq!(tokens, vec![Token::Whitespace, Token::EOF]); assert_eq!(tokens, vec![Token::Whitespace, Token::EOF].into());
} }
#[test] #[test]
@ -194,7 +198,7 @@ mod tests {
info!("[Identifier] Tokens: {:#?}", tokens); info!("[Identifier] Tokens: {:#?}", tokens);
assert_eq!( assert_eq!(
tokens, tokens,
vec![Token::Identifier("variableName".to_string()), Token::EOF] vec![Token::Identifier("variableName".to_string()), Token::EOF].into()
); );
} }
@ -207,7 +211,7 @@ mod tests {
info!("[Keyword] Tokens: {:#?}", tokens); info!("[Keyword] Tokens: {:#?}", tokens);
assert_eq!( assert_eq!(
tokens, tokens,
vec![Token::Keyword("uniform".to_string()), Token::EOF] vec![Token::Keyword("uniform".to_string()), Token::EOF].into()
); );
} }
@ -218,7 +222,10 @@ mod tests {
let mut lexer = Lexer::new(source_code); let mut lexer = Lexer::new(source_code);
let tokens = lexer.get_tokens(); let tokens = lexer.get_tokens();
info!("[IntegerLiteral] Tokens: {:#?}", tokens); info!("[IntegerLiteral] Tokens: {:#?}", tokens);
assert_eq!(tokens, vec![Token::IntegerLiteral(12345), Token::EOF]); assert_eq!(
tokens,
vec![Token::IntegerLiteral(12345), Token::EOF].into()
);
} }
#[test] #[test]
@ -228,7 +235,10 @@ mod tests {
let mut lexer = Lexer::new(source_code); let mut lexer = Lexer::new(source_code);
let tokens = lexer.get_tokens(); let tokens = lexer.get_tokens();
info!("[FloatLiteral] Tokens: {:#?}", tokens); info!("[FloatLiteral] Tokens: {:#?}", tokens);
assert_eq!(tokens, vec![Token::FloatLiteral(123.4504), Token::EOF]); assert_eq!(
tokens,
vec![Token::FloatLiteral(123.4504), Token::EOF].into()
);
} }
#[test] #[test]
@ -238,11 +248,11 @@ mod tests {
let mut lexer = Lexer::new(source_code); let mut lexer = Lexer::new(source_code);
let tokens = lexer.get_tokens(); let tokens = lexer.get_tokens();
info!("[FloatLiteral Shorthand] Tokens: {:#?}", tokens); info!("[FloatLiteral Shorthand] Tokens: {:#?}", tokens);
assert_eq!(tokens, vec![Token::FloatLiteral(0.4504), Token::EOF]); assert_eq!(tokens, vec![Token::FloatLiteral(0.4504), Token::EOF].into());
} }
#[test] #[test]
fn test_swizzling() { fn swizzling() {
init(); init();
let source_code = "abcd.xyz"; let source_code = "abcd.xyz";
let mut lexer = Lexer::new(source_code); let mut lexer = Lexer::new(source_code);
@ -256,6 +266,7 @@ mod tests {
Token::Identifier("xyz".to_string()), Token::Identifier("xyz".to_string()),
Token::EOF Token::EOF
] ]
.into()
); );
} }
@ -284,6 +295,7 @@ mod tests {
Token::Operator("?".to_string()), Token::Operator("?".to_string()),
Token::EOF, Token::EOF,
] ]
.into()
); );
} }
@ -301,6 +313,7 @@ mod tests {
Token::Whitespace, Token::Whitespace,
Token::EOF, Token::EOF,
] ]
.into()
); );
} }
@ -359,6 +372,7 @@ mod tests {
Token::Whitespace, Token::Whitespace,
Token::EOF, Token::EOF,
] ]
.into()
); );
} }
} }