Refactoring WIP just need to switch computers :3
Some checks failed
/ build (push) Failing after 1m36s
/ clippy (push) Failing after 1m27s

This commit is contained in:
xqtc 2024-07-21 17:42:15 +02:00
parent 75ea815ba5
commit b3075d7d84
7 changed files with 389 additions and 121 deletions

View file

@ -72,23 +72,9 @@
"inputs": {
"flake-parts": "flake-parts",
"nixpkgs": "nixpkgs",
"systems": "systems",
"treefmt-nix": "treefmt-nix"
}
},
"systems": {
"flake": false,
"locked": {
"lastModified": 1,
"narHash": "sha256-8wkkYGr1dPSnX9oVMX8D6dTOROXKOYpBTKfriA0sEBI=",
"path": "/nix/store/3447wnbwfkgd91v8x0d3hj48hvb2h010-source/flake.systems.nix",
"type": "path"
},
"original": {
"path": "/nix/store/3447wnbwfkgd91v8x0d3hj48hvb2h010-source/flake.systems.nix",
"type": "path"
}
},
"treefmt-nix": {
"inputs": {
"nixpkgs": "nixpkgs_2"

67
src/lex/lex.rs Normal file
View file

@ -0,0 +1,67 @@
use crate::tokens::Token;
use std::sync::Arc;
pub struct Lexer {
/// GLSL source
pub input: Vec<char>,
/// Position in source
pub position: usize,
/// [`char`] under position
pub current_char: Option<char>,
}
impl Lexer {
pub fn new(input: &str) -> Self {
let mut lexer = Lexer {
input: input.chars().collect(),
position: 0,
current_char: None,
};
lexer.current_char = if lexer.position < lexer.input.len() {
Some(lexer.input[lexer.position])
} else {
None
};
lexer
}
fn advance(&mut self) {
self.position += 1;
self.current_char = if self.position < self.input.len() {
Some(self.input[self.position])
} else {
None
};
}
fn peek(&self) -> Option<char> {
if self.position + 1 < self.input.len() {
Some(self.input[self.position + 1])
} else {
None
}
}
/// Parses the source given the [`Lexer`] upon initialization and returns a vector of [`Token`].
/// # Example:
/// ```
/// use glsl_lexer::*;
/// let source = r#"
/// #version 440
/// uniform float time;
/// void main() {
/// gl_FragColor = vec4(1.0, 0.5, 0.2, 1.0);
/// }
/// "#;
/// let mut lexer = glsl_lexer::Lexer::new(&source);
/// let tokens = lexer.get_tokens();
/// dbg!("{}", tokens);
///```
// We are using Arc<[Token]> as return type for cheaper cloning of the returned value
fn get_tokens(&mut self) -> Arc<[Token]> {
let mut tokens: Vec<Token> = Vec::new();
tokens.into()
}
fn match_token(&self, ) -> Token {}
}

2
src/lex/mod.rs Normal file
View file

@ -0,0 +1,2 @@
mod handlers;
mod lex;

View file

@ -24,7 +24,9 @@
use std::sync::Arc;
mod handlers;
mod lex;
mod tokens;
mod util;
#[derive(Debug, PartialEq, Clone)]
pub enum Token {
@ -83,115 +85,62 @@ macro_rules! lex {
}};
}
impl Lexer {
/// Instantiates the [`Lexer`]
pub fn new(input: &str) -> Self {
let mut lexer = Lexer {
input: input.chars().collect(),
position: 0,
current_char: None,
};
lexer.current_char = if lexer.position < lexer.input.len() {
Some(lexer.input[lexer.position])
} else {
None
};
lexer
}
fn advance(&mut self) {
self.position += 1;
self.current_char = if self.position < self.input.len() {
Some(self.input[self.position])
} else {
None
};
}
/// Parses the source given the [`Lexer`] upon initialization and returns a vector of [`Token`].
/// # Example:
/// ```
/// use glsl_lexer::*;
/// let source = r#"
/// #version 440
/// uniform float time;
/// void main() {
/// gl_FragColor = vec4(1.0, 0.5, 0.2, 1.0);
/// }
/// "#;
/// let mut lexer = glsl_lexer::Lexer::new(&source);
/// let tokens = lexer.get_tokens();
/// dbg!("{}", tokens);
///```
// We are using Arc<[Token]> as return type for cheaper cloning of the returned value
pub fn get_tokens(&mut self) -> Arc<[Token]> {
let mut tokens = Vec::new();
while let Some(c) = self.current_char {
if c.is_whitespace() {
self.consume_whitespace();
tokens.push(Token::Whitespace);
} else if c.is_alphabetic() || c == '_' {
tokens.push(self.consume_identifier_or_keyword());
} else if c.is_ascii_digit() {
tokens.push(self.consume_number());
} else if c == '/' && self.peek() == Some('/') {
tokens.push(self.consume_comment());
} else {
match c {
// TODO Implement operands like +=
'+' | '-' | '*' | '/' | '%' | '&' | '|' | '^' | '!' | '=' | '<' | '>' | '?' => {
tokens.push(self.consume_operator());
}
'{' | '}' | '(' | ')' | '#' | ',' | ';' => {
tokens.push(self.consume_symbol());
}
'.' => {
tokens.push(self.consume_number());
}
_ => {
tokens.push(self.consume_unknown());
}
}
}
}
tokens.push(Token::EOF);
let ret: Arc<[Token]> = tokens.into();
ret
}
fn peek(&self) -> Option<char> {
if self.position + 1 < self.input.len() {
Some(self.input[self.position + 1])
} else {
None
}
}
// fn error(&self, message: &str) -> ! {
// panic!("Lexer error at position {}: {}", self.position, message);
// /// Parses the source given the [`Lexer`] upon initialization and returns a vector of [`Token`].
// /// # Example:
// /// ```
// /// use glsl_lexer::*;
// /// let source = r#"
// /// #version 440
// /// uniform float time;
// /// void main() {
// /// gl_FragColor = vec4(1.0, 0.5, 0.2, 1.0);
// /// }
// /// "#;
// /// let mut lexer = glsl_lexer::Lexer::new(&source);
// /// let tokens = lexer.get_tokens();
// /// dbg!("{}", tokens);
// ///```
// // We are using Arc<[Token]> as return type for cheaper cloning of the returned value
// pub fn get_tokens(&mut self) -> Arc<[Token]> {
// let mut tokens = Vec::new();
// while let Some(c) = self.current_char {
// if c.is_whitespace() {
// self.consume_whitespace();
// tokens.push(Token::Whitespace);
// } else if c.is_alphabetic() || c == '_' {
// tokens.push(self.consume_identifier_or_keyword());
// } else if c.is_ascii_digit() {
// tokens.push(self.consume_number());
// } else if c == '/' && self.peek() == Some('/') {
// tokens.push(self.consume_comment());
// } else {
// match c {
// // TODO Implement operands like +=
// '+' | '-' | '*' | '/' | '%' | '&' | '|' | '^' | '!' | '=' | '<' | '>' | '?' => {
// tokens.push(self.consume_operator());
// }
//
// '{' | '}' | '(' | ')' | '#' | ',' | ';' => {
// tokens.push(self.consume_symbol());
// }
//
// '.' => {
// tokens.push(self.consume_number());
// }
//
// _ => {
// tokens.push(self.consume_unknown());
// }
// }
// }
// }
// tokens.push(Token::EOF);
// let ret: Arc<[Token]> = tokens.into();
// ret
// }
}
fn is_keyword(word: &str) -> bool {
matches!(
word,
"void"
| "int"
| "float"
| "bool"
| "if"
| "else"
| "for"
| "while"
| "return"
| "struct"
| "uniform"
| "varying"
)
}
#[cfg(test)]
mod tests {

246
src/tokens.rs Normal file
View file

@ -0,0 +1,246 @@
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(nonstandard_style)]
#[derive(Debug, Clone, PartialEq)]
pub enum Token {
CONST,
BOOL,
FLOAT,
INT,
UINT,
DOUBLE,
MAT2,
MAT3,
MAT4,
MAT2X2,
MAT2X3,
MAT2X4,
MAT3X2,
MAT3X3,
MAT3X4,
MAT4X2,
MAT4X3,
MAT4X4,
DVEC2,
DVEC3,
DVEC4,
DMAT2,
DMAT3,
DMAT4,
DMAT2X2,
DMAT2X3,
DMAT2X4,
DMAT3X2,
DMAT3X3,
DMAT3X4,
DMAT4X2,
DMAT4X3,
DMAT4X4,
CENTROID,
IN,
OUT,
INOUT,
UNIFORM,
PATCH,
SAMPLE,
BUFFER,
SHARED,
COHERENT,
VOLATILE,
RESTRICT,
READONLY,
WRITEONLY,
NOPERSPECTIVE,
FLAT,
SMOOTH,
LAYOUT,
ATOMIC_UINT,
SAMPLER(Sampler),
IMAGE2D,
IIMAGE2D,
UIMAGE2D,
IMAGE3D,
IIMAGE3D,
UIMAGE3D,
IMAGECUBE,
IIMAGECUBE,
UIMAGECUBE,
IMAGEBUFFER,
IIMAGEBUFFER,
UIMAGEBUFFER,
IMAGE2DARRAY,
IIMAGE2DARRAY,
UIMAGE2DARRAY,
IMAGECUBEARRAY,
IIMAGECUBEARRAY,
UIMAGECUBEARRAY,
IMAGE1D,
IIMAGE1D,
UIMAGE1D,
IMAGE1DARRAY,
IIMAGE1DARRAY,
UIMAGE1DARRAY,
IMAGE2DRECT,
IIMAGE2DRECT,
UIMAGE2DRECT,
IMAGE2DMS,
IIMAGE2DMS,
UIMAGE2DMS,
IMAGE2DMSARRAY,
IIMAGE2DMSARRAY,
UIMAGE2DMSARRAY,
STRUCT,
VOID,
WHILE,
BREAK,
CONTINUE,
DO,
ELSE,
FOR,
IF,
DISCARD,
RETURN,
SWITCH,
CASE,
DEFAULT,
SUBROUTINE,
IDENTIFIER,
TYPE_NAME,
FLOATCONSTANT(f32),
INTCONSTANT(i32),
UINTCONSTANT(u32),
BOOLCONSTANT(bool),
DOUBLECONSTANT(f64),
FIELD_SELECTION(String),
LEFT_OP,
RIGHT_OP,
INC_OP,
DEC_OP,
LE_OP,
GE_OP,
EQ_OP,
NE_OP,
AND_OP,
OR_OP,
XOR_OP,
MUL_ASSIGN,
DIV_ASSIGN,
ADD_ASSIGN,
MOD_ASSIGN,
LEFT_ASSIGN,
RIGHT_ASSIGN,
AND_ASSIGN,
XOR_ASSIGN,
OR_ASSIGN,
SUB_ASSIGN,
LEFT_PAREN,
RIGHT_PAREN,
LEFT_BRACKET,
RIGHT_BRACKET,
LEFT_BRACE,
RIGHT_BRACE,
DOT,
COMMA,
COLON,
EQUAL,
SEMICOLON,
BANG,
DASH,
TILDE,
PLUS,
STAR,
SLASH,
PERCENT,
LEFT_ANGLE,
RIGHT_ANGLE,
VERTICAL_BAR,
CARET,
AMPERSAND,
QUESTION,
INVARIANT,
PRECISE,
HIGH_PRECISION,
MEDIUM_PRECISION,
LOW_PRECISION,
PRECISION,
}
#[derive(Debug, Clone, PartialEq)]
enum Sampler {
SAMPLER2D,
SAMPLER3D,
SAMPLERCUBE,
SAMPLER2DSHADOW,
SAMPLERCUBESHADOW,
SAMPLER2DARRAY,
SAMPLER2DARRAYSHADOW,
ISAMPLER2D,
ISAMPLER3D,
ISAMPLERCUBE,
ISAMPLER2DARRAY,
USAMPLER2D,
USAMPLER3D,
USAMPLERCUBE,
USAMPLER2DARRAY,
SAMPLER1D,
SAMPLER1DSHADOW,
SAMPLER1DARRAY,
SAMPLER1DARRAYSHADOW,
ISAMPLER1D,
ISAMPLER1DARRAY,
USAMPLER1D,
USAMPLER1DARRAY,
SAMPLER2DRECT,
SAMPLER2DRECTSHADOW,
ISAMPLER2DRECT,
USAMPLER2DRECT,
SAMPLERBUFFER,
ISAMPLERBUFFER,
USAMPLERBUFFER,
SAMPLERCUBEARRAY,
SAMPLERCUBEARRAYSHADOW,
ISAMPLERCUBEARRAY,
USAMPLERCUBEARRAY,
SAMPLER2DMS,
ISAMPLER2DMS,
USAMPLER2DMS,
SAMPLER2DMSARRAY,
ISAMPLER2DMSARRAY,
USAMPLER2DMSARRAY,
}
#[derive(Debug, Clone, PartialEq)]
enum Primitive {}
#[derive(Debug, Clone, PartialEq)]
enum Material {
MAT2,
MAT3,
MAT4,
MAT2X2,
MAT2X3,
MAT2X4,
MAT3X2,
MAT3X3,
MAT3X4,
MAT4X2,
MAT4X3,
MAT4X4,
}
#[derive(Debug, Clone, PartialEq)]
enum Vector {
BVEC2,
BVEC3,
BVEC4,
IVEC2,
IVEC3,
IVEC4,
UVEC2,
UVEC3,
UVEC4,
VEC2,
VEC3,
VEC4,
}

18
src/util.rs Normal file
View file

@ -0,0 +1,18 @@
fn is_keyword(word: &str) -> bool {
matches!(
word,
"void"
| "int"
| "float"
| "bool"
| "if"
| "else"
| "for"
| "while"
| "return"
| "struct"
| "uniform"
| "varying"
)
}