Support swizzling; Some stuff for float handling
All checks were successful
/ build (push) Successful in 1m23s
/ clippy (push) Successful in 1m17s

This commit is contained in:
xqtc 2024-08-01 01:28:14 +02:00
parent 513f2f86f2
commit 5a0656fbe7
10 changed files with 129 additions and 9 deletions

7
Cargo.lock generated
View file

@ -94,6 +94,7 @@ name = "glsl-lexer"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"env_logger", "env_logger",
"lazy_static",
"log", "log",
] ]
@ -109,6 +110,12 @@ version = "1.70.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800"
[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]] [[package]]
name = "log" name = "log"
version = "0.4.22" version = "0.4.22"

View file

@ -5,4 +5,5 @@ edition = "2021"
[dependencies] [dependencies]
env_logger = "0.11.3" env_logger = "0.11.3"
lazy_static = "1.5.0"
log = "0.4.22" log = "0.4.22"

View file

@ -31,7 +31,7 @@
]; ];
rust-toolchain = pkgs.symlinkJoin { rust-toolchain = pkgs.symlinkJoin {
name = "rust-toolchain"; name = "rust-toolchain";
paths = [ pkgs.rustc pkgs.cargo pkgs.cargo-watch pkgs.clippy pkgs.rust-analyzer pkgs.rustPlatform.rustcSrc ]; paths = [ pkgs.rustc pkgs.cargo pkgs.cargo-watch pkgs.cargo-nextest pkgs.clippy pkgs.rust-analyzer pkgs.rustPlatform.rustcSrc ];
}; };
in in
{ {

34
src/ast/ast.rs Normal file
View file

@ -0,0 +1,34 @@
use crate::{lex::lexer::Lexer, tokens::Token};
struct AST {
nodes: Vec<Node>,
}
struct Node {
token: Token,
children: Vec<Node>,
}
impl Node {
pub fn new(token: Token) -> Self {
Node {
token,
children: Vec::new(),
}
}
pub fn add_child(&mut self, node: Node) {
self.children.push(node);
}
}
impl AST {
pub fn new() -> Self {
AST { nodes: Vec::new() }
}
pub fn add_node(&mut self, token: Token) {
let node = Node::new(token);
self.nodes.push(node);
}
}

1
src/ast/mod.rs Normal file
View file

@ -0,0 +1 @@
mod ast;

View file

@ -1,3 +1,5 @@
use core::panic;
use crate::lex::lexer::Lexer; use crate::lex::lexer::Lexer;
impl Lexer { impl Lexer {
pub fn consume_whitespace(&mut self) { pub fn consume_whitespace(&mut self) {
@ -33,19 +35,59 @@ impl Lexer {
pub fn consume_number(&mut self) -> crate::tokens::Token { pub fn consume_number(&mut self) -> crate::tokens::Token {
let mut number = String::new(); let mut number = String::new();
let mut is_float = false; let mut is_float = false;
let mut is_swizzle = false;
while let Some(c) = self.current_char { while let Some(c) = self.current_char {
if c.is_numeric() { if c.is_digit(10)
/* && self.peek().unwrap_or_else(|| ' ') == 'f' */
{
number.push(c); number.push(c);
self.advance(); self.advance();
} else if c == '.' || c == 'e' { } else if c == '.' || c == 'e' || c == 'f' {
////////////////////////////////////
//ALLES HIER DRIN IST NICHT SCHÖN//
////////////////////////////////////
match self.peek().unwrap_or_else(|| ' ') {
'x' | 'y' | 'z' | 'w' | 'r' | 'g' | 'b' | 'a' => {
is_swizzle = true;
number.push(c);
self.advance();
}
'0'..='9' | 'e' => {
is_float = true; is_float = true;
number.push(c); number.push(c);
self.advance() self.advance();
}
_ => {}
}
if c == 'f' {
is_float = true;
number.push(c);
self.advance();
dbg!(&number);
break;
}
} else if c.is_alphabetic() {
if c == '.' && self.peek().unwrap_or_else(|| ' ') == 'e' {
is_float = true;
self.advance();
dbg!("break in alphabetic");
break;
}
is_swizzle = true;
number.push(c);
self.advance();
} else {
} }
} }
if is_float { if is_float {
return crate::tokens::Token::FLOATCONSTANT(number); return crate::tokens::Token::FLOATCONSTANT(number);
} }
if is_swizzle {
let split: Vec<&str> = number.split('.').collect();
let ident2 = split[1];
return crate::tokens::Token::Swizzle(vec![".".to_string(), ident2.to_string()]);
}
crate::tokens::Token::INTCONSTANT(number) crate::tokens::Token::INTCONSTANT(number)
} }

View file

@ -29,10 +29,10 @@ impl Lexer {
input: input.chars().collect(), input: input.chars().collect(),
position: 0, position: 0,
current_char: None, current_char: None,
keywords: make_keywords(), keywords: crate::lex::util::KEYWORDS.clone(),
symbols: make_symbols(), symbols: crate::lex::util::SYMBOLS.clone(),
}; };
dbg!("{}", &lexer.keywords); // dbg!("{}", &lexer.keywords);
lexer.current_char = if lexer.position < lexer.input.len() { lexer.current_char = if lexer.position < lexer.input.len() {
Some(lexer.input[lexer.position]) Some(lexer.input[lexer.position])
} else { } else {
@ -85,7 +85,7 @@ impl Lexer {
tokens.push(Token::Whitespace); tokens.push(Token::Whitespace);
} else if c.is_alphabetic() || c == '_' { } else if c.is_alphabetic() || c == '_' {
tokens.push(self.consume_identifier_or_keyword()); tokens.push(self.consume_identifier_or_keyword());
} else if c.is_ascii_digit() { } else if c.is_digit(10) {
tokens.push(self.consume_number()); tokens.push(self.consume_number());
} else if c == '/' && self.peek() == Some('/') { } else if c == '/' && self.peek() == Some('/') {
tokens.push(self.consume_comment()); tokens.push(self.consume_comment());

View file

@ -1,7 +1,13 @@
use lazy_static::lazy_static;
use std::collections::HashMap; use std::collections::HashMap;
use crate::tokens::{Image, Material, Sampler, Token, Vector}; use crate::tokens::{Image, Material, Sampler, Token, Vector};
lazy_static! {
pub static ref SYMBOLS: HashMap<char, Token> = make_symbols();
pub static ref KEYWORDS: HashMap<&'static str, Token> = make_keywords();
}
pub fn make_symbols() -> HashMap<char, Token> { pub fn make_symbols() -> HashMap<char, Token> {
let mut symbols = HashMap::new(); let mut symbols = HashMap::new();
symbols.insert('{', Token::LEFT_BRACE); symbols.insert('{', Token::LEFT_BRACE);

View file

@ -19,6 +19,7 @@
#![allow(dead_code)] #![allow(dead_code)]
#![allow(non_camel_case_types)] #![allow(non_camel_case_types)]
mod ast;
pub mod lex; pub mod lex;
mod tokens; mod tokens;
@ -27,6 +28,17 @@ mod tests {
use super::lex::lexer::Lexer; use super::lex::lexer::Lexer;
use super::tokens::{Image, Token}; use super::tokens::{Image, Token};
#[test]
fn float_with_f_aggot() {
let source = "5f";
let mut lexer = Lexer::new(source);
let tokens = Lexer::get_tokens(&mut lexer);
assert_eq!(
tokens,
vec![Token::FLOATCONSTANT("5f".to_string()), Token::EOF].into()
)
}
#[test] #[test]
fn keyword() { fn keyword() {
let source = "image1D"; let source = "image1D";
@ -134,6 +146,22 @@ mod tests {
vec![Token::INTCONSTANT("12345".to_string()), Token::EOF].into() vec![Token::INTCONSTANT("12345".to_string()), Token::EOF].into()
); );
} }
#[test]
fn swizzling() {
let source_code = "abcd.xyz";
let mut lexer = Lexer::new(source_code);
let tokens = lexer.get_tokens();
assert_eq!(
tokens,
vec![
Token::IDENTIFIER("abcd".to_string()),
Token::Swizzle(vec![".".to_string(), "xyz".to_string()]),
Token::EOF
]
.into()
);
}
} }
// #[cfg(test)] // #[cfg(test)]
// mod tests { // mod tests {

View file

@ -4,6 +4,7 @@
pub enum Token { pub enum Token {
EOF, EOF,
Whitespace, Whitespace,
Swizzle(Vec<String>),
Unknown(String), Unknown(String),
Comment(String), Comment(String),
CONST, CONST,