diff --git a/README.md b/README.md new file mode 100644 index 0000000..6d5b18b --- /dev/null +++ b/README.md @@ -0,0 +1,12 @@ +# GLSL-LEXER +A library providing lexing capabilities for GLSL 460 and it's subsets. + +--- + +## 👷🏻‍♀️ UNDER CONSTRUCTION 🚧 + +--- +## Features +- [ ] Building Syntax Tree +- [ ] Exporting Syntax Tree as graphviz +- [ ] Integration with [epoqe demo toolchain](https://www.pouet.net/groups.php?which=15323) diff --git a/src/lex/handlers.rs b/src/lex/handlers.rs index 03bfb64..8214abb 100644 --- a/src/lex/handlers.rs +++ b/src/lex/handlers.rs @@ -49,6 +49,28 @@ impl Lexer { todo!() } pub fn consume_operator(&mut self) -> crate::tokens::Token { - todo!() + let mut operator = String::new(); + while let Some(c) = self.current_char { + operator.push(c); + match self.peek() { + Some('+') | Some('-') | Some('*') | Some('/') | Some('%') | Some('=') + | Some('!') | Some('<') | Some('>') => { + self.advance(); + operator.push( + self.current_char + .expect("Critical error while lexing operators! Aborting..."), + ); + } + None => { + self.advance(); + break; + } + _ => { + self.advance(); + break; + } + }; + } + crate::tokens::Token::IDENTIFIER(operator) } } diff --git a/src/lex/lexer.rs b/src/lex/lexer.rs index 2c3cd3e..2bf32b1 100644 --- a/src/lex/lexer.rs +++ b/src/lex/lexer.rs @@ -2,7 +2,7 @@ use crate::tokens::Token; use std::collections::HashMap; use std::sync::Arc; -use super::util::populate_tokens; +use super::util::populate_keywords; pub struct Lexer { /// GLSL source @@ -29,7 +29,8 @@ impl Lexer { current_char: None, keywords: HashMap::new(), }; - populate_tokens(&mut lexer); + // Populate keywords HashMap + populate_keywords(&mut lexer); dbg!("{}", &lexer.keywords); lexer.current_char = if lexer.position < lexer.input.len() { Some(lexer.input[lexer.position]) diff --git a/src/lex/util.rs b/src/lex/util.rs index ec57bba..30be674 100644 --- a/src/lex/util.rs +++ b/src/lex/util.rs @@ -1,6 +1,6 @@ use crate::tokens::{Image, Material, Sampler, Token, Vector}; -pub fn populate_tokens(lexer: &mut crate::lex::lexer::Lexer) { +pub fn populate_keywords(lexer: &mut crate::lex::lexer::Lexer) { lexer.keywords.insert("const", Token::CONST); lexer.keywords.insert("uniform", Token::UNIFORM); lexer.keywords.insert("buffer", Token::BUFFER);