diff --git a/flake.lock b/flake.lock index 1d8ff6b..a8f49e7 100644 --- a/flake.lock +++ b/flake.lock @@ -72,23 +72,9 @@ "inputs": { "flake-parts": "flake-parts", "nixpkgs": "nixpkgs", - "systems": "systems", "treefmt-nix": "treefmt-nix" } }, - "systems": { - "flake": false, - "locked": { - "lastModified": 1, - "narHash": "sha256-8wkkYGr1dPSnX9oVMX8D6dTOROXKOYpBTKfriA0sEBI=", - "path": "/nix/store/3447wnbwfkgd91v8x0d3hj48hvb2h010-source/flake.systems.nix", - "type": "path" - }, - "original": { - "path": "/nix/store/3447wnbwfkgd91v8x0d3hj48hvb2h010-source/flake.systems.nix", - "type": "path" - } - }, "treefmt-nix": { "inputs": { "nixpkgs": "nixpkgs_2" diff --git a/src/handlers.rs b/src/lex/handlers.rs similarity index 100% rename from src/handlers.rs rename to src/lex/handlers.rs diff --git a/src/lex/lex.rs b/src/lex/lex.rs new file mode 100644 index 0000000..009dc74 --- /dev/null +++ b/src/lex/lex.rs @@ -0,0 +1,67 @@ +use crate::tokens::Token; +use std::sync::Arc; + +pub struct Lexer { + /// GLSL source + pub input: Vec, + /// Position in source + pub position: usize, + /// [`char`] under position + pub current_char: Option, +} + +impl Lexer { + pub fn new(input: &str) -> Self { + let mut lexer = Lexer { + input: input.chars().collect(), + position: 0, + current_char: None, + }; + lexer.current_char = if lexer.position < lexer.input.len() { + Some(lexer.input[lexer.position]) + } else { + None + }; + lexer + } + fn advance(&mut self) { + self.position += 1; + self.current_char = if self.position < self.input.len() { + Some(self.input[self.position]) + } else { + None + }; + } + + fn peek(&self) -> Option { + if self.position + 1 < self.input.len() { + Some(self.input[self.position + 1]) + } else { + None + } + } + + /// Parses the source given the [`Lexer`] upon initialization and returns a vector of [`Token`]. + /// # Example: + /// ``` + /// use glsl_lexer::*; + /// let source = r#" + /// #version 440 + /// uniform float time; + /// void main() { + /// gl_FragColor = vec4(1.0, 0.5, 0.2, 1.0); + /// } + /// "#; + /// let mut lexer = glsl_lexer::Lexer::new(&source); + /// let tokens = lexer.get_tokens(); + /// dbg!("{}", tokens); + ///``` + // We are using Arc<[Token]> as return type for cheaper cloning of the returned value + fn get_tokens(&mut self) -> Arc<[Token]> { + let mut tokens: Vec = Vec::new(); + + tokens.into() + } + + fn match_token(&self, ) -> Token {} +} diff --git a/src/lex/mod.rs b/src/lex/mod.rs new file mode 100644 index 0000000..44c1c82 --- /dev/null +++ b/src/lex/mod.rs @@ -0,0 +1,2 @@ +mod handlers; +mod lex; diff --git a/src/lib.rs b/src/lib.rs index 5c4e59f..9064b1d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -24,7 +24,9 @@ use std::sync::Arc; -mod handlers; +mod lex; +mod tokens; +mod util; #[derive(Debug, PartialEq, Clone)] pub enum Token { @@ -83,115 +85,62 @@ macro_rules! lex { }}; } -impl Lexer { - /// Instantiates the [`Lexer`] - pub fn new(input: &str) -> Self { - let mut lexer = Lexer { - input: input.chars().collect(), - position: 0, - current_char: None, - }; - lexer.current_char = if lexer.position < lexer.input.len() { - Some(lexer.input[lexer.position]) - } else { - None - }; - lexer - } - fn advance(&mut self) { - self.position += 1; - self.current_char = if self.position < self.input.len() { - Some(self.input[self.position]) - } else { - None - }; - } - - /// Parses the source given the [`Lexer`] upon initialization and returns a vector of [`Token`]. - /// # Example: - /// ``` - /// use glsl_lexer::*; - /// let source = r#" - /// #version 440 - /// uniform float time; - /// void main() { - /// gl_FragColor = vec4(1.0, 0.5, 0.2, 1.0); - /// } - /// "#; - /// let mut lexer = glsl_lexer::Lexer::new(&source); - /// let tokens = lexer.get_tokens(); - /// dbg!("{}", tokens); - ///``` - // We are using Arc<[Token]> as return type for cheaper cloning of the returned value - pub fn get_tokens(&mut self) -> Arc<[Token]> { - let mut tokens = Vec::new(); - while let Some(c) = self.current_char { - if c.is_whitespace() { - self.consume_whitespace(); - tokens.push(Token::Whitespace); - } else if c.is_alphabetic() || c == '_' { - tokens.push(self.consume_identifier_or_keyword()); - } else if c.is_ascii_digit() { - tokens.push(self.consume_number()); - } else if c == '/' && self.peek() == Some('/') { - tokens.push(self.consume_comment()); - } else { - match c { - // TODO Implement operands like += - '+' | '-' | '*' | '/' | '%' | '&' | '|' | '^' | '!' | '=' | '<' | '>' | '?' => { - tokens.push(self.consume_operator()); - } - - '{' | '}' | '(' | ')' | '#' | ',' | ';' => { - tokens.push(self.consume_symbol()); - } - - '.' => { - tokens.push(self.consume_number()); - } - - _ => { - tokens.push(self.consume_unknown()); - } - } - } - } - tokens.push(Token::EOF); - let ret: Arc<[Token]> = tokens.into(); - ret - } - - fn peek(&self) -> Option { - if self.position + 1 < self.input.len() { - Some(self.input[self.position + 1]) - } else { - None - } - } - - // fn error(&self, message: &str) -> ! { - // panic!("Lexer error at position {}: {}", self.position, message); + // /// Parses the source given the [`Lexer`] upon initialization and returns a vector of [`Token`]. + // /// # Example: + // /// ``` + // /// use glsl_lexer::*; + // /// let source = r#" + // /// #version 440 + // /// uniform float time; + // /// void main() { + // /// gl_FragColor = vec4(1.0, 0.5, 0.2, 1.0); + // /// } + // /// "#; + // /// let mut lexer = glsl_lexer::Lexer::new(&source); + // /// let tokens = lexer.get_tokens(); + // /// dbg!("{}", tokens); + // ///``` + // // We are using Arc<[Token]> as return type for cheaper cloning of the returned value + // pub fn get_tokens(&mut self) -> Arc<[Token]> { + // let mut tokens = Vec::new(); + // while let Some(c) = self.current_char { + // if c.is_whitespace() { + // self.consume_whitespace(); + // tokens.push(Token::Whitespace); + // } else if c.is_alphabetic() || c == '_' { + // tokens.push(self.consume_identifier_or_keyword()); + // } else if c.is_ascii_digit() { + // tokens.push(self.consume_number()); + // } else if c == '/' && self.peek() == Some('/') { + // tokens.push(self.consume_comment()); + // } else { + // match c { + // // TODO Implement operands like += + // '+' | '-' | '*' | '/' | '%' | '&' | '|' | '^' | '!' | '=' | '<' | '>' | '?' => { + // tokens.push(self.consume_operator()); + // } + // + // '{' | '}' | '(' | ')' | '#' | ',' | ';' => { + // tokens.push(self.consume_symbol()); + // } + // + // '.' => { + // tokens.push(self.consume_number()); + // } + // + // _ => { + // tokens.push(self.consume_unknown()); + // } + // } + // } + // } + // tokens.push(Token::EOF); + // let ret: Arc<[Token]> = tokens.into(); + // ret // } -} -fn is_keyword(word: &str) -> bool { - matches!( - word, - "void" - | "int" - | "float" - | "bool" - | "if" - | "else" - | "for" - | "while" - | "return" - | "struct" - | "uniform" - | "varying" - ) -} + #[cfg(test)] mod tests { diff --git a/src/tokens.rs b/src/tokens.rs new file mode 100644 index 0000000..2d8b1d4 --- /dev/null +++ b/src/tokens.rs @@ -0,0 +1,246 @@ +#![allow(non_camel_case_types)] +#![allow(non_snake_case)] +#![allow(nonstandard_style)] + +#[derive(Debug, Clone, PartialEq)] +pub enum Token { + CONST, + BOOL, + FLOAT, + INT, + UINT, + DOUBLE, + MAT2, + MAT3, + MAT4, + MAT2X2, + MAT2X3, + MAT2X4, + MAT3X2, + MAT3X3, + MAT3X4, + MAT4X2, + MAT4X3, + MAT4X4, + DVEC2, + DVEC3, + DVEC4, + DMAT2, + DMAT3, + DMAT4, + DMAT2X2, + DMAT2X3, + DMAT2X4, + DMAT3X2, + DMAT3X3, + DMAT3X4, + DMAT4X2, + DMAT4X3, + DMAT4X4, + CENTROID, + IN, + OUT, + INOUT, + UNIFORM, + PATCH, + SAMPLE, + BUFFER, + SHARED, + COHERENT, + VOLATILE, + RESTRICT, + READONLY, + WRITEONLY, + NOPERSPECTIVE, + FLAT, + SMOOTH, + LAYOUT, + ATOMIC_UINT, + SAMPLER(Sampler), + IMAGE2D, + IIMAGE2D, + UIMAGE2D, + IMAGE3D, + IIMAGE3D, + UIMAGE3D, + IMAGECUBE, + IIMAGECUBE, + UIMAGECUBE, + IMAGEBUFFER, + IIMAGEBUFFER, + UIMAGEBUFFER, + IMAGE2DARRAY, + IIMAGE2DARRAY, + UIMAGE2DARRAY, + IMAGECUBEARRAY, + IIMAGECUBEARRAY, + UIMAGECUBEARRAY, + IMAGE1D, + IIMAGE1D, + UIMAGE1D, + IMAGE1DARRAY, + IIMAGE1DARRAY, + UIMAGE1DARRAY, + IMAGE2DRECT, + IIMAGE2DRECT, + UIMAGE2DRECT, + IMAGE2DMS, + IIMAGE2DMS, + UIMAGE2DMS, + IMAGE2DMSARRAY, + IIMAGE2DMSARRAY, + UIMAGE2DMSARRAY, + STRUCT, + VOID, + WHILE, + BREAK, + CONTINUE, + DO, + ELSE, + FOR, + IF, + DISCARD, + RETURN, + SWITCH, + CASE, + DEFAULT, + SUBROUTINE, + IDENTIFIER, + TYPE_NAME, + FLOATCONSTANT(f32), + INTCONSTANT(i32), + UINTCONSTANT(u32), + BOOLCONSTANT(bool), + DOUBLECONSTANT(f64), + FIELD_SELECTION(String), + LEFT_OP, + RIGHT_OP, + INC_OP, + DEC_OP, + LE_OP, + GE_OP, + EQ_OP, + NE_OP, + AND_OP, + OR_OP, + XOR_OP, + MUL_ASSIGN, + DIV_ASSIGN, + ADD_ASSIGN, + MOD_ASSIGN, + LEFT_ASSIGN, + RIGHT_ASSIGN, + AND_ASSIGN, + XOR_ASSIGN, + OR_ASSIGN, + SUB_ASSIGN, + LEFT_PAREN, + RIGHT_PAREN, + LEFT_BRACKET, + RIGHT_BRACKET, + LEFT_BRACE, + RIGHT_BRACE, + DOT, + COMMA, + COLON, + EQUAL, + SEMICOLON, + BANG, + DASH, + TILDE, + PLUS, + STAR, + SLASH, + PERCENT, + LEFT_ANGLE, + RIGHT_ANGLE, + VERTICAL_BAR, + CARET, + AMPERSAND, + QUESTION, + INVARIANT, + PRECISE, + HIGH_PRECISION, + MEDIUM_PRECISION, + LOW_PRECISION, + PRECISION, +} + +#[derive(Debug, Clone, PartialEq)] +enum Sampler { + SAMPLER2D, + SAMPLER3D, + SAMPLERCUBE, + SAMPLER2DSHADOW, + SAMPLERCUBESHADOW, + SAMPLER2DARRAY, + SAMPLER2DARRAYSHADOW, + ISAMPLER2D, + ISAMPLER3D, + ISAMPLERCUBE, + ISAMPLER2DARRAY, + USAMPLER2D, + USAMPLER3D, + USAMPLERCUBE, + USAMPLER2DARRAY, + SAMPLER1D, + SAMPLER1DSHADOW, + SAMPLER1DARRAY, + SAMPLER1DARRAYSHADOW, + ISAMPLER1D, + ISAMPLER1DARRAY, + USAMPLER1D, + USAMPLER1DARRAY, + SAMPLER2DRECT, + SAMPLER2DRECTSHADOW, + ISAMPLER2DRECT, + USAMPLER2DRECT, + SAMPLERBUFFER, + ISAMPLERBUFFER, + USAMPLERBUFFER, + SAMPLERCUBEARRAY, + SAMPLERCUBEARRAYSHADOW, + ISAMPLERCUBEARRAY, + USAMPLERCUBEARRAY, + SAMPLER2DMS, + ISAMPLER2DMS, + USAMPLER2DMS, + SAMPLER2DMSARRAY, + ISAMPLER2DMSARRAY, + USAMPLER2DMSARRAY, +} + +#[derive(Debug, Clone, PartialEq)] +enum Primitive {} + +#[derive(Debug, Clone, PartialEq)] +enum Material { + MAT2, + MAT3, + MAT4, + MAT2X2, + MAT2X3, + MAT2X4, + MAT3X2, + MAT3X3, + MAT3X4, + MAT4X2, + MAT4X3, + MAT4X4, +} + +#[derive(Debug, Clone, PartialEq)] +enum Vector { + BVEC2, + BVEC3, + BVEC4, + IVEC2, + IVEC3, + IVEC4, + UVEC2, + UVEC3, + UVEC4, + VEC2, + VEC3, + VEC4, +} diff --git a/src/util.rs b/src/util.rs new file mode 100644 index 0000000..c1e8f4c --- /dev/null +++ b/src/util.rs @@ -0,0 +1,18 @@ + +fn is_keyword(word: &str) -> bool { + matches!( + word, + "void" + | "int" + | "float" + | "bool" + | "if" + | "else" + | "for" + | "while" + | "return" + | "struct" + | "uniform" + | "varying" + ) +}