Add handler for operators; README
This commit is contained in:
parent
c2966ce973
commit
483eb1885a
12
README.md
Normal file
12
README.md
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
# GLSL-LEXER
|
||||||
|
A library providing lexing capabilities for GLSL 460 and it's subsets.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 👷🏻♀️ UNDER CONSTRUCTION 🚧
|
||||||
|
|
||||||
|
---
|
||||||
|
## Features
|
||||||
|
- [ ] Building Syntax Tree
|
||||||
|
- [ ] Exporting Syntax Tree as graphviz
|
||||||
|
- [ ] Integration with [epoqe demo toolchain](https://www.pouet.net/groups.php?which=15323)
|
|
@ -49,6 +49,28 @@ impl Lexer {
|
||||||
todo!()
|
todo!()
|
||||||
}
|
}
|
||||||
pub fn consume_operator(&mut self) -> crate::tokens::Token {
|
pub fn consume_operator(&mut self) -> crate::tokens::Token {
|
||||||
todo!()
|
let mut operator = String::new();
|
||||||
|
while let Some(c) = self.current_char {
|
||||||
|
operator.push(c);
|
||||||
|
match self.peek() {
|
||||||
|
Some('+') | Some('-') | Some('*') | Some('/') | Some('%') | Some('=')
|
||||||
|
| Some('!') | Some('<') | Some('>') => {
|
||||||
|
self.advance();
|
||||||
|
operator.push(
|
||||||
|
self.current_char
|
||||||
|
.expect("Critical error while lexing operators! Aborting..."),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
self.advance();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
self.advance();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
crate::tokens::Token::IDENTIFIER(operator)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@ use crate::tokens::Token;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::util::populate_tokens;
|
use super::util::populate_keywords;
|
||||||
|
|
||||||
pub struct Lexer {
|
pub struct Lexer {
|
||||||
/// GLSL source
|
/// GLSL source
|
||||||
|
@ -29,7 +29,8 @@ impl Lexer {
|
||||||
current_char: None,
|
current_char: None,
|
||||||
keywords: HashMap::new(),
|
keywords: HashMap::new(),
|
||||||
};
|
};
|
||||||
populate_tokens(&mut lexer);
|
// Populate keywords HashMap
|
||||||
|
populate_keywords(&mut lexer);
|
||||||
dbg!("{}", &lexer.keywords);
|
dbg!("{}", &lexer.keywords);
|
||||||
lexer.current_char = if lexer.position < lexer.input.len() {
|
lexer.current_char = if lexer.position < lexer.input.len() {
|
||||||
Some(lexer.input[lexer.position])
|
Some(lexer.input[lexer.position])
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::tokens::{Image, Material, Sampler, Token, Vector};
|
use crate::tokens::{Image, Material, Sampler, Token, Vector};
|
||||||
|
|
||||||
pub fn populate_tokens(lexer: &mut crate::lex::lexer::Lexer) {
|
pub fn populate_keywords(lexer: &mut crate::lex::lexer::Lexer) {
|
||||||
lexer.keywords.insert("const", Token::CONST);
|
lexer.keywords.insert("const", Token::CONST);
|
||||||
lexer.keywords.insert("uniform", Token::UNIFORM);
|
lexer.keywords.insert("uniform", Token::UNIFORM);
|
||||||
lexer.keywords.insert("buffer", Token::BUFFER);
|
lexer.keywords.insert("buffer", Token::BUFFER);
|
||||||
|
|
Loading…
Reference in a new issue