Support swizzling; Some stuff for float handling
All checks were successful
/ build (push) Successful in 1m23s
/ clippy (push) Successful in 1m17s

This commit is contained in:
xqtc 2024-08-01 01:28:14 +02:00
parent 513f2f86f2
commit 5a0656fbe7
10 changed files with 129 additions and 9 deletions

7
Cargo.lock generated
View file

@ -94,6 +94,7 @@ name = "glsl-lexer"
version = "0.1.0"
dependencies = [
"env_logger",
"lazy_static",
"log",
]
@ -109,6 +110,12 @@ version = "1.70.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800"
[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "log"
version = "0.4.22"

View file

@ -5,4 +5,5 @@ edition = "2021"
[dependencies]
env_logger = "0.11.3"
lazy_static = "1.5.0"
log = "0.4.22"

View file

@ -31,7 +31,7 @@
];
rust-toolchain = pkgs.symlinkJoin {
name = "rust-toolchain";
paths = [ pkgs.rustc pkgs.cargo pkgs.cargo-watch pkgs.clippy pkgs.rust-analyzer pkgs.rustPlatform.rustcSrc ];
paths = [ pkgs.rustc pkgs.cargo pkgs.cargo-watch pkgs.cargo-nextest pkgs.clippy pkgs.rust-analyzer pkgs.rustPlatform.rustcSrc ];
};
in
{

34
src/ast/ast.rs Normal file
View file

@ -0,0 +1,34 @@
use crate::{lex::lexer::Lexer, tokens::Token};
struct AST {
nodes: Vec<Node>,
}
struct Node {
token: Token,
children: Vec<Node>,
}
impl Node {
pub fn new(token: Token) -> Self {
Node {
token,
children: Vec::new(),
}
}
pub fn add_child(&mut self, node: Node) {
self.children.push(node);
}
}
impl AST {
pub fn new() -> Self {
AST { nodes: Vec::new() }
}
pub fn add_node(&mut self, token: Token) {
let node = Node::new(token);
self.nodes.push(node);
}
}

1
src/ast/mod.rs Normal file
View file

@ -0,0 +1 @@
mod ast;

View file

@ -1,3 +1,5 @@
use core::panic;
use crate::lex::lexer::Lexer;
impl Lexer {
pub fn consume_whitespace(&mut self) {
@ -33,19 +35,59 @@ impl Lexer {
pub fn consume_number(&mut self) -> crate::tokens::Token {
let mut number = String::new();
let mut is_float = false;
let mut is_swizzle = false;
while let Some(c) = self.current_char {
if c.is_numeric() {
if c.is_digit(10)
/* && self.peek().unwrap_or_else(|| ' ') == 'f' */
{
number.push(c);
self.advance();
} else if c == '.' || c == 'e' {
is_float = true;
} else if c == '.' || c == 'e' || c == 'f' {
////////////////////////////////////
//ALLES HIER DRIN IST NICHT SCHÖN//
////////////////////////////////////
match self.peek().unwrap_or_else(|| ' ') {
'x' | 'y' | 'z' | 'w' | 'r' | 'g' | 'b' | 'a' => {
is_swizzle = true;
number.push(c);
self.advance();
}
'0'..='9' | 'e' => {
is_float = true;
number.push(c);
self.advance();
}
_ => {}
}
if c == 'f' {
is_float = true;
number.push(c);
self.advance();
dbg!(&number);
break;
}
} else if c.is_alphabetic() {
if c == '.' && self.peek().unwrap_or_else(|| ' ') == 'e' {
is_float = true;
self.advance();
dbg!("break in alphabetic");
break;
}
is_swizzle = true;
number.push(c);
self.advance()
self.advance();
} else {
}
}
if is_float {
return crate::tokens::Token::FLOATCONSTANT(number);
}
if is_swizzle {
let split: Vec<&str> = number.split('.').collect();
let ident2 = split[1];
return crate::tokens::Token::Swizzle(vec![".".to_string(), ident2.to_string()]);
}
crate::tokens::Token::INTCONSTANT(number)
}

View file

@ -29,10 +29,10 @@ impl Lexer {
input: input.chars().collect(),
position: 0,
current_char: None,
keywords: make_keywords(),
symbols: make_symbols(),
keywords: crate::lex::util::KEYWORDS.clone(),
symbols: crate::lex::util::SYMBOLS.clone(),
};
dbg!("{}", &lexer.keywords);
// dbg!("{}", &lexer.keywords);
lexer.current_char = if lexer.position < lexer.input.len() {
Some(lexer.input[lexer.position])
} else {
@ -85,7 +85,7 @@ impl Lexer {
tokens.push(Token::Whitespace);
} else if c.is_alphabetic() || c == '_' {
tokens.push(self.consume_identifier_or_keyword());
} else if c.is_ascii_digit() {
} else if c.is_digit(10) {
tokens.push(self.consume_number());
} else if c == '/' && self.peek() == Some('/') {
tokens.push(self.consume_comment());

View file

@ -1,7 +1,13 @@
use lazy_static::lazy_static;
use std::collections::HashMap;
use crate::tokens::{Image, Material, Sampler, Token, Vector};
lazy_static! {
pub static ref SYMBOLS: HashMap<char, Token> = make_symbols();
pub static ref KEYWORDS: HashMap<&'static str, Token> = make_keywords();
}
pub fn make_symbols() -> HashMap<char, Token> {
let mut symbols = HashMap::new();
symbols.insert('{', Token::LEFT_BRACE);

View file

@ -19,6 +19,7 @@
#![allow(dead_code)]
#![allow(non_camel_case_types)]
mod ast;
pub mod lex;
mod tokens;
@ -27,6 +28,17 @@ mod tests {
use super::lex::lexer::Lexer;
use super::tokens::{Image, Token};
#[test]
fn float_with_f_aggot() {
let source = "5f";
let mut lexer = Lexer::new(source);
let tokens = Lexer::get_tokens(&mut lexer);
assert_eq!(
tokens,
vec![Token::FLOATCONSTANT("5f".to_string()), Token::EOF].into()
)
}
#[test]
fn keyword() {
let source = "image1D";
@ -134,6 +146,22 @@ mod tests {
vec![Token::INTCONSTANT("12345".to_string()), Token::EOF].into()
);
}
#[test]
fn swizzling() {
let source_code = "abcd.xyz";
let mut lexer = Lexer::new(source_code);
let tokens = lexer.get_tokens();
assert_eq!(
tokens,
vec![
Token::IDENTIFIER("abcd".to_string()),
Token::Swizzle(vec![".".to_string(), "xyz".to_string()]),
Token::EOF
]
.into()
);
}
}
// #[cfg(test)]
// mod tests {

View file

@ -4,6 +4,7 @@
pub enum Token {
EOF,
Whitespace,
Swizzle(Vec<String>),
Unknown(String),
Comment(String),
CONST,