diff --git a/flake.lock b/flake.lock index d7afaa7..a6c0e3e 100644 --- a/flake.lock +++ b/flake.lock @@ -81,11 +81,11 @@ "locked": { "lastModified": 1, "narHash": "sha256-8wkkYGr1dPSnX9oVMX8D6dTOROXKOYpBTKfriA0sEBI=", - "path": "/nix/store/wdwf2qq1sqhs0nig8rffrq5dbpwr27v6-source/flake.systems.nix", + "path": "/nix/store/hprrr24yh5w0pbbbz2hlwblriaqb99kx-source/flake.systems.nix", "type": "path" }, "original": { - "path": "/nix/store/wdwf2qq1sqhs0nig8rffrq5dbpwr27v6-source/flake.systems.nix", + "path": "/nix/store/hprrr24yh5w0pbbbz2hlwblriaqb99kx-source/flake.systems.nix", "type": "path" } }, diff --git a/src/lib.rs b/src/lib.rs index a3181d3..831ec45 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -22,9 +22,11 @@ //! # WIP THAT SHIT STILL WONKY AF +use std::sync::Arc; + mod handlers; -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum Token { /// Something like `float` Identifier(String), @@ -96,8 +98,9 @@ impl Lexer { /// let mut lexer = glsl_lexer::Lexer::new(&source); /// let tokens = lexer.get_tokens(); /// dbg!("{}", tokens); - /// ``` - pub fn get_tokens(&mut self) -> Vec { + ///``` + // We are using Arc<[Token]> as return type for cheaper cloning of the returned value + pub fn get_tokens(&mut self) -> Arc<[Token]> { let mut tokens = Vec::new(); while let Some(c) = self.current_char { if c.is_whitespace() { @@ -131,7 +134,8 @@ impl Lexer { } } tokens.push(Token::EOF); - tokens + let ret: Arc<[Token]> = tokens.into(); + ret } fn peek(&self) -> Option { @@ -182,7 +186,7 @@ mod tests { let mut lexer = Lexer::new(source_code); let tokens = lexer.get_tokens(); info!("[Whitespace] Tokens: {:#?}", tokens); - assert_eq!(tokens, vec![Token::Whitespace, Token::EOF]); + assert_eq!(tokens, vec![Token::Whitespace, Token::EOF].into()); } #[test] @@ -194,7 +198,7 @@ mod tests { info!("[Identifier] Tokens: {:#?}", tokens); assert_eq!( tokens, - vec![Token::Identifier("variableName".to_string()), Token::EOF] + vec![Token::Identifier("variableName".to_string()), Token::EOF].into() ); } @@ -207,7 +211,7 @@ mod tests { info!("[Keyword] Tokens: {:#?}", tokens); assert_eq!( tokens, - vec![Token::Keyword("uniform".to_string()), Token::EOF] + vec![Token::Keyword("uniform".to_string()), Token::EOF].into() ); } @@ -218,7 +222,10 @@ mod tests { let mut lexer = Lexer::new(source_code); let tokens = lexer.get_tokens(); info!("[IntegerLiteral] Tokens: {:#?}", tokens); - assert_eq!(tokens, vec![Token::IntegerLiteral(12345), Token::EOF]); + assert_eq!( + tokens, + vec![Token::IntegerLiteral(12345), Token::EOF].into() + ); } #[test] @@ -228,7 +235,10 @@ mod tests { let mut lexer = Lexer::new(source_code); let tokens = lexer.get_tokens(); info!("[FloatLiteral] Tokens: {:#?}", tokens); - assert_eq!(tokens, vec![Token::FloatLiteral(123.4504), Token::EOF]); + assert_eq!( + tokens, + vec![Token::FloatLiteral(123.4504), Token::EOF].into() + ); } #[test] @@ -238,11 +248,11 @@ mod tests { let mut lexer = Lexer::new(source_code); let tokens = lexer.get_tokens(); info!("[FloatLiteral Shorthand] Tokens: {:#?}", tokens); - assert_eq!(tokens, vec![Token::FloatLiteral(0.4504), Token::EOF]); + assert_eq!(tokens, vec![Token::FloatLiteral(0.4504), Token::EOF].into()); } #[test] - fn test_swizzling() { + fn swizzling() { init(); let source_code = "abcd.xyz"; let mut lexer = Lexer::new(source_code); @@ -256,6 +266,7 @@ mod tests { Token::Identifier("xyz".to_string()), Token::EOF ] + .into() ); } @@ -284,6 +295,7 @@ mod tests { Token::Operator("?".to_string()), Token::EOF, ] + .into() ); } @@ -301,6 +313,7 @@ mod tests { Token::Whitespace, Token::EOF, ] + .into() ); } @@ -359,6 +372,7 @@ mod tests { Token::Whitespace, Token::EOF, ] + .into() ); } }