Number handling (w/o swizzling); change HashMap population
This commit is contained in:
parent
9be5a5e4c8
commit
d3da252586
|
@ -31,7 +31,23 @@ impl Lexer {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn consume_number(&mut self) -> crate::tokens::Token {
|
pub fn consume_number(&mut self) -> crate::tokens::Token {
|
||||||
todo!()
|
let mut number = String::new();
|
||||||
|
let mut is_float = false;
|
||||||
|
while let Some(c) = self.current_char {
|
||||||
|
if c.is_numeric() {
|
||||||
|
number.push(c);
|
||||||
|
self.advance();
|
||||||
|
} else if c == '.' {
|
||||||
|
is_float = true;
|
||||||
|
number.push(c);
|
||||||
|
self.advance()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if is_float {
|
||||||
|
return crate::tokens::Token::FLOATCONSTANT(number);
|
||||||
|
}
|
||||||
|
|
||||||
|
crate::tokens::Token::INTCONSTANT(number)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn consume_comment(&mut self) -> crate::tokens::Token {
|
pub fn consume_comment(&mut self) -> crate::tokens::Token {
|
||||||
|
|
|
@ -29,12 +29,9 @@ impl Lexer {
|
||||||
input: input.chars().collect(),
|
input: input.chars().collect(),
|
||||||
position: 0,
|
position: 0,
|
||||||
current_char: None,
|
current_char: None,
|
||||||
keywords: HashMap::new(),
|
keywords: populate_keywords(),
|
||||||
symbols: HashMap::new(),
|
symbols: populate_symbols(),
|
||||||
};
|
};
|
||||||
// Populate keywords HashMap
|
|
||||||
populate_keywords(&mut lexer);
|
|
||||||
populate_symbols(&mut lexer);
|
|
||||||
dbg!("{}", &lexer.keywords);
|
dbg!("{}", &lexer.keywords);
|
||||||
lexer.current_char = if lexer.position < lexer.input.len() {
|
lexer.current_char = if lexer.position < lexer.input.len() {
|
||||||
Some(lexer.input[lexer.position])
|
Some(lexer.input[lexer.position])
|
||||||
|
@ -43,6 +40,8 @@ impl Lexer {
|
||||||
};
|
};
|
||||||
lexer
|
lexer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Increments the lexer's position
|
||||||
pub fn advance(&mut self) {
|
pub fn advance(&mut self) {
|
||||||
self.position += 1;
|
self.position += 1;
|
||||||
self.current_char = if self.position < self.input.len() {
|
self.current_char = if self.position < self.input.len() {
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
pub mod handlers;
|
pub mod handlers;
|
||||||
pub mod lexer;
|
pub mod lexer;
|
||||||
pub mod util;
|
pub mod util;
|
||||||
|
|
||||||
|
use lexer::Lexer;
|
||||||
|
|
522
src/lex/util.rs
522
src/lex/util.rs
|
@ -1,384 +1,218 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use crate::tokens::{Image, Material, Sampler, Token, Vector};
|
use crate::tokens::{Image, Material, Sampler, Token, Vector};
|
||||||
|
|
||||||
pub fn populate_symbols(lexer: &mut crate::lex::lexer::Lexer) {
|
pub fn populate_symbols() -> HashMap<char, Token> {
|
||||||
lexer.symbols.insert('{', Token::LEFT_BRACE);
|
let mut symbols = HashMap::new();
|
||||||
lexer.symbols.insert('}', Token::RIGHT_BRACE);
|
symbols.insert('{', Token::LEFT_BRACE);
|
||||||
lexer.symbols.insert('(', Token::LEFT_PAREN);
|
symbols.insert('}', Token::RIGHT_BRACE);
|
||||||
lexer.symbols.insert(')', Token::RIGHT_PAREN);
|
symbols.insert('(', Token::LEFT_PAREN);
|
||||||
lexer.symbols.insert('[', Token::LEFT_BRACKET);
|
symbols.insert(')', Token::RIGHT_PAREN);
|
||||||
lexer.symbols.insert(']', Token::RIGHT_BRACKET);
|
symbols.insert('[', Token::LEFT_BRACKET);
|
||||||
lexer.symbols.insert('.', Token::DOT);
|
symbols.insert(']', Token::RIGHT_BRACKET);
|
||||||
lexer.symbols.insert(',', Token::COMMA);
|
symbols.insert('.', Token::DOT);
|
||||||
lexer.symbols.insert(':', Token::COLON);
|
symbols.insert(',', Token::COMMA);
|
||||||
lexer.symbols.insert(';', Token::SEMICOLON);
|
symbols.insert(':', Token::COLON);
|
||||||
|
symbols.insert(';', Token::SEMICOLON);
|
||||||
|
symbols
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn populate_keywords(lexer: &mut crate::lex::lexer::Lexer) {
|
pub fn populate_keywords() -> HashMap<&'static str, Token> {
|
||||||
lexer.keywords.insert("const", Token::CONST);
|
let mut keywords = HashMap::new();
|
||||||
lexer.keywords.insert("uniform", Token::UNIFORM);
|
keywords.insert("const", Token::CONST);
|
||||||
lexer.keywords.insert("buffer", Token::BUFFER);
|
keywords.insert("uniform", Token::UNIFORM);
|
||||||
lexer.keywords.insert("shared", Token::SHARED);
|
keywords.insert("buffer", Token::BUFFER);
|
||||||
lexer.keywords.insert("coherent", Token::COHERENT);
|
keywords.insert("shared", Token::SHARED);
|
||||||
lexer.keywords.insert("volatile", Token::VOLATILE);
|
keywords.insert("coherent", Token::COHERENT);
|
||||||
lexer.keywords.insert("restrict", Token::RESTRICT);
|
keywords.insert("volatile", Token::VOLATILE);
|
||||||
lexer.keywords.insert("readonly", Token::READONLY);
|
keywords.insert("restrict", Token::RESTRICT);
|
||||||
lexer.keywords.insert("writeonly", Token::WRITEONLY);
|
keywords.insert("readonly", Token::READONLY);
|
||||||
lexer.keywords.insert("atomic_uint", Token::ATOMIC_UINT);
|
keywords.insert("writeonly", Token::WRITEONLY);
|
||||||
lexer.keywords.insert("layout", Token::LAYOUT);
|
keywords.insert("atomic_uint", Token::ATOMIC_UINT);
|
||||||
lexer.keywords.insert("centroid", Token::CENTROID);
|
keywords.insert("layout", Token::LAYOUT);
|
||||||
lexer.keywords.insert("flat", Token::FLAT);
|
keywords.insert("centroid", Token::CENTROID);
|
||||||
lexer.keywords.insert("smooth", Token::SMOOTH);
|
keywords.insert("flat", Token::FLAT);
|
||||||
lexer.keywords.insert("noperspective", Token::NOPERSPECTIVE);
|
keywords.insert("smooth", Token::SMOOTH);
|
||||||
lexer.keywords.insert("patch", Token::PATCH);
|
keywords.insert("noperspective", Token::NOPERSPECTIVE);
|
||||||
lexer.keywords.insert("sample", Token::SAMPLE);
|
keywords.insert("patch", Token::PATCH);
|
||||||
lexer.keywords.insert("break", Token::BREAK);
|
keywords.insert("sample", Token::SAMPLE);
|
||||||
lexer.keywords.insert("continue", Token::CONTINUE);
|
keywords.insert("break", Token::BREAK);
|
||||||
lexer.keywords.insert("do", Token::DO);
|
keywords.insert("continue", Token::CONTINUE);
|
||||||
lexer.keywords.insert("for", Token::FOR);
|
keywords.insert("do", Token::DO);
|
||||||
lexer.keywords.insert("while", Token::WHILE);
|
keywords.insert("for", Token::FOR);
|
||||||
lexer.keywords.insert("switch", Token::SWITCH);
|
keywords.insert("while", Token::WHILE);
|
||||||
lexer.keywords.insert("case", Token::CASE);
|
keywords.insert("switch", Token::SWITCH);
|
||||||
lexer.keywords.insert("default", Token::DEFAULT);
|
keywords.insert("case", Token::CASE);
|
||||||
lexer.keywords.insert("if", Token::IF);
|
keywords.insert("default", Token::DEFAULT);
|
||||||
lexer.keywords.insert("else", Token::ELSE);
|
keywords.insert("if", Token::IF);
|
||||||
lexer.keywords.insert("subroutine", Token::SUBROUTINE);
|
keywords.insert("else", Token::ELSE);
|
||||||
lexer.keywords.insert("in", Token::IN);
|
keywords.insert("subroutine", Token::SUBROUTINE);
|
||||||
lexer.keywords.insert("out", Token::OUT);
|
keywords.insert("in", Token::IN);
|
||||||
lexer.keywords.insert("inout", Token::INOUT);
|
keywords.insert("out", Token::OUT);
|
||||||
lexer.keywords.insert("float", Token::FLOAT);
|
keywords.insert("inout", Token::INOUT);
|
||||||
lexer.keywords.insert("double", Token::DOUBLE);
|
keywords.insert("float", Token::FLOAT);
|
||||||
lexer.keywords.insert("int", Token::INT);
|
keywords.insert("double", Token::DOUBLE);
|
||||||
lexer.keywords.insert("void", Token::VOID);
|
keywords.insert("int", Token::INT);
|
||||||
lexer.keywords.insert("bool", Token::BOOL);
|
keywords.insert("void", Token::VOID);
|
||||||
lexer.keywords.insert("true", Token::BOOLCONSTANT(true));
|
keywords.insert("bool", Token::BOOL);
|
||||||
lexer.keywords.insert("false", Token::BOOLCONSTANT(false));
|
keywords.insert("true", Token::BOOLCONSTANT(true));
|
||||||
lexer.keywords.insert("invariant", Token::INVARIANT);
|
keywords.insert("false", Token::BOOLCONSTANT(false));
|
||||||
lexer.keywords.insert("precise", Token::PRECISE);
|
keywords.insert("invariant", Token::INVARIANT);
|
||||||
lexer.keywords.insert("discard", Token::DISCARD);
|
keywords.insert("precise", Token::PRECISE);
|
||||||
lexer.keywords.insert("return", Token::RETURN);
|
keywords.insert("discard", Token::DISCARD);
|
||||||
lexer
|
keywords.insert("return", Token::RETURN);
|
||||||
.keywords
|
keywords.insert("mat2", Token::Material(Material::MAT2));
|
||||||
.insert("mat2", Token::Material(Material::MAT2));
|
keywords.insert("mat3", Token::Material(Material::MAT3));
|
||||||
lexer
|
keywords.insert("mat4", Token::Material(Material::MAT4));
|
||||||
.keywords
|
keywords.insert("dmat2", Token::Material(Material::DMAT2));
|
||||||
.insert("mat3", Token::Material(Material::MAT3));
|
keywords.insert("dmat3", Token::Material(Material::DMAT3));
|
||||||
lexer
|
keywords.insert("dmat4", Token::Material(Material::DMAT4));
|
||||||
.keywords
|
keywords.insert("mat2x2", Token::Material(Material::MAT2X2));
|
||||||
.insert("mat4", Token::Material(Material::MAT4));
|
keywords.insert("mat2x3", Token::Material(Material::MAT2X3));
|
||||||
lexer
|
keywords.insert("mat2x4", Token::Material(Material::MAT2X4));
|
||||||
.keywords
|
keywords.insert("dmat2x2", Token::Material(Material::DMAT2X2));
|
||||||
.insert("dmat2", Token::Material(Material::DMAT2));
|
keywords.insert("dmat2x3", Token::Material(Material::DMAT2X3));
|
||||||
lexer
|
keywords.insert("dmat2x4", Token::Material(Material::DMAT2X4));
|
||||||
.keywords
|
keywords.insert("mat3x2", Token::Material(Material::MAT3X2));
|
||||||
.insert("dmat3", Token::Material(Material::DMAT3));
|
keywords.insert("mat3x3", Token::Material(Material::MAT3X3));
|
||||||
lexer
|
keywords.insert("mat3x4", Token::Material(Material::MAT3X4));
|
||||||
.keywords
|
keywords.insert("dmat3x2", Token::Material(Material::DMAT3X2));
|
||||||
.insert("dmat4", Token::Material(Material::DMAT4));
|
keywords.insert("dmat3x3", Token::Material(Material::DMAT3X3));
|
||||||
lexer
|
keywords.insert("dmat3x4", Token::Material(Material::DMAT3X4));
|
||||||
.keywords
|
keywords.insert("mat4x2", Token::Material(Material::MAT4X2));
|
||||||
.insert("mat2x2", Token::Material(Material::MAT2X2));
|
keywords.insert("mat4x3", Token::Material(Material::MAT4X3));
|
||||||
lexer
|
keywords.insert("mat4x4", Token::Material(Material::MAT4X4));
|
||||||
.keywords
|
keywords.insert("dmat4x2", Token::Material(Material::DMAT4X2));
|
||||||
.insert("mat2x3", Token::Material(Material::MAT2X3));
|
keywords.insert("dmat4x3", Token::Material(Material::DMAT4X3));
|
||||||
lexer
|
keywords.insert("dmat4x4", Token::Material(Material::DMAT4X4));
|
||||||
.keywords
|
keywords.insert("vec2", Token::Vector(Vector::VEC2));
|
||||||
.insert("mat2x4", Token::Material(Material::MAT2X4));
|
keywords.insert("vec3", Token::Vector(Vector::VEC3));
|
||||||
lexer
|
keywords.insert("vec4", Token::Vector(Vector::VEC4));
|
||||||
.keywords
|
keywords.insert("ivec2", Token::Vector(Vector::IVEC2));
|
||||||
.insert("dmat2x2", Token::Material(Material::DMAT2X2));
|
keywords.insert("ivec3", Token::Vector(Vector::IVEC3));
|
||||||
lexer
|
keywords.insert("ivec4", Token::Vector(Vector::IVEC4));
|
||||||
.keywords
|
keywords.insert("bvec2", Token::Vector(Vector::BVEC2));
|
||||||
.insert("dmat2x3", Token::Material(Material::DMAT2X3));
|
keywords.insert("bvec3", Token::Vector(Vector::BVEC3));
|
||||||
lexer
|
keywords.insert("bvec4", Token::Vector(Vector::BVEC4));
|
||||||
.keywords
|
keywords.insert("dvec2", Token::Vector(Vector::DVEC2));
|
||||||
.insert("dmat2x4", Token::Material(Material::DMAT2X4));
|
keywords.insert("dvec3", Token::Vector(Vector::DVEC3));
|
||||||
lexer
|
keywords.insert("dvec4", Token::Vector(Vector::DVEC4));
|
||||||
.keywords
|
keywords.insert("uint", Token::UINT);
|
||||||
.insert("mat3x2", Token::Material(Material::MAT3X2));
|
keywords.insert("uvec2", Token::Vector(Vector::UVEC2));
|
||||||
lexer
|
keywords.insert("uvec3", Token::Vector(Vector::UVEC3));
|
||||||
.keywords
|
keywords.insert("uvec4", Token::Vector(Vector::UVEC4));
|
||||||
.insert("mat3x3", Token::Material(Material::MAT3X3));
|
keywords.insert("lowp", Token::LOW_PRECISION);
|
||||||
lexer
|
keywords.insert("mediump", Token::MEDIUM_PRECISION);
|
||||||
.keywords
|
keywords.insert("highp", Token::HIGH_PRECISION);
|
||||||
.insert("mat3x4", Token::Material(Material::MAT3X4));
|
keywords.insert("precision", Token::PRECISION);
|
||||||
lexer
|
keywords.insert("sampler1D", Token::Sampler(Sampler::SAMPLER1D));
|
||||||
.keywords
|
keywords.insert("sampler2D", Token::Sampler(Sampler::SAMPLER2D));
|
||||||
.insert("dmat3x2", Token::Material(Material::DMAT3X2));
|
keywords.insert("sampler3D", Token::Sampler(Sampler::SAMPLER3D));
|
||||||
lexer
|
keywords.insert("samplerCube", Token::Sampler(Sampler::SAMPLERCUBE));
|
||||||
.keywords
|
keywords.insert("sampler1DShadow", Token::Sampler(Sampler::SAMPLER1DSHADOW));
|
||||||
.insert("dmat3x3", Token::Material(Material::DMAT3X3));
|
keywords.insert("sampler2DShadow", Token::Sampler(Sampler::SAMPLER2DSHADOW));
|
||||||
lexer
|
keywords.insert(
|
||||||
.keywords
|
|
||||||
.insert("dmat3x4", Token::Material(Material::DMAT3X4));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("mat4x2", Token::Material(Material::MAT4X2));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("mat4x3", Token::Material(Material::MAT4X3));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("mat4x4", Token::Material(Material::MAT4X4));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("dmat4x2", Token::Material(Material::DMAT4X2));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("dmat4x3", Token::Material(Material::DMAT4X3));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("dmat4x4", Token::Material(Material::DMAT4X4));
|
|
||||||
lexer.keywords.insert("vec2", Token::Vector(Vector::VEC2));
|
|
||||||
lexer.keywords.insert("vec3", Token::Vector(Vector::VEC3));
|
|
||||||
lexer.keywords.insert("vec4", Token::Vector(Vector::VEC4));
|
|
||||||
lexer.keywords.insert("ivec2", Token::Vector(Vector::IVEC2));
|
|
||||||
lexer.keywords.insert("ivec3", Token::Vector(Vector::IVEC3));
|
|
||||||
lexer.keywords.insert("ivec4", Token::Vector(Vector::IVEC4));
|
|
||||||
lexer.keywords.insert("bvec2", Token::Vector(Vector::BVEC2));
|
|
||||||
lexer.keywords.insert("bvec3", Token::Vector(Vector::BVEC3));
|
|
||||||
lexer.keywords.insert("bvec4", Token::Vector(Vector::BVEC4));
|
|
||||||
lexer.keywords.insert("dvec2", Token::Vector(Vector::DVEC2));
|
|
||||||
lexer.keywords.insert("dvec3", Token::Vector(Vector::DVEC3));
|
|
||||||
lexer.keywords.insert("dvec4", Token::Vector(Vector::DVEC4));
|
|
||||||
lexer.keywords.insert("uint", Token::UINT);
|
|
||||||
lexer.keywords.insert("uvec2", Token::Vector(Vector::UVEC2));
|
|
||||||
lexer.keywords.insert("uvec3", Token::Vector(Vector::UVEC3));
|
|
||||||
lexer.keywords.insert("uvec4", Token::Vector(Vector::UVEC4));
|
|
||||||
lexer.keywords.insert("lowp", Token::LOW_PRECISION);
|
|
||||||
lexer.keywords.insert("mediump", Token::MEDIUM_PRECISION);
|
|
||||||
lexer.keywords.insert("highp", Token::HIGH_PRECISION);
|
|
||||||
lexer.keywords.insert("precision", Token::PRECISION);
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("sampler1D", Token::Sampler(Sampler::SAMPLER1D));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("sampler2D", Token::Sampler(Sampler::SAMPLER2D));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("sampler3D", Token::Sampler(Sampler::SAMPLER3D));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("samplerCube", Token::Sampler(Sampler::SAMPLERCUBE));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("sampler1DShadow", Token::Sampler(Sampler::SAMPLER1DSHADOW));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("sampler2DShadow", Token::Sampler(Sampler::SAMPLER2DSHADOW));
|
|
||||||
lexer.keywords.insert(
|
|
||||||
"samplerCubeShadow",
|
"samplerCubeShadow",
|
||||||
Token::Sampler(Sampler::SAMPLERCUBESHADOW),
|
Token::Sampler(Sampler::SAMPLERCUBESHADOW),
|
||||||
);
|
);
|
||||||
lexer
|
keywords.insert("sampler1DArray", Token::Sampler(Sampler::SAMPLER1DARRAY));
|
||||||
.keywords
|
keywords.insert("sampler2DArray", Token::Sampler(Sampler::SAMPLER2DARRAY));
|
||||||
.insert("sampler1DArray", Token::Sampler(Sampler::SAMPLER1DARRAY));
|
keywords.insert(
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("sampler2DArray", Token::Sampler(Sampler::SAMPLER2DARRAY));
|
|
||||||
lexer.keywords.insert(
|
|
||||||
"sampler1DArrayShadow",
|
"sampler1DArrayShadow",
|
||||||
Token::Sampler(Sampler::SAMPLER1DARRAYSHADOW),
|
Token::Sampler(Sampler::SAMPLER1DARRAYSHADOW),
|
||||||
);
|
);
|
||||||
lexer.keywords.insert(
|
keywords.insert(
|
||||||
"sampler2DArrayShadow",
|
"sampler2DArrayShadow",
|
||||||
Token::Sampler(Sampler::SAMPLER2DARRAYSHADOW),
|
Token::Sampler(Sampler::SAMPLER2DARRAYSHADOW),
|
||||||
);
|
);
|
||||||
lexer
|
keywords.insert("isampler1D", Token::Sampler(Sampler::ISAMPLER1D));
|
||||||
.keywords
|
keywords.insert("isampler2D", Token::Sampler(Sampler::ISAMPLER2D));
|
||||||
.insert("isampler1D", Token::Sampler(Sampler::ISAMPLER1D));
|
keywords.insert("isampler3D", Token::Sampler(Sampler::ISAMPLER3D));
|
||||||
lexer
|
keywords.insert("isamplerCube", Token::Sampler(Sampler::ISAMPLERCUBE));
|
||||||
.keywords
|
keywords.insert("isampler1DArray", Token::Sampler(Sampler::ISAMPLER1DARRAY));
|
||||||
.insert("isampler2D", Token::Sampler(Sampler::ISAMPLER2D));
|
keywords.insert("isampler2DArray", Token::Sampler(Sampler::ISAMPLER2DARRAY));
|
||||||
lexer
|
keywords.insert("usampler1D", Token::Sampler(Sampler::USAMPLER1D));
|
||||||
.keywords
|
keywords.insert("usampler2D", Token::Sampler(Sampler::USAMPLER2D));
|
||||||
.insert("isampler3D", Token::Sampler(Sampler::ISAMPLER3D));
|
keywords.insert("usampler3D", Token::Sampler(Sampler::USAMPLER3D));
|
||||||
lexer
|
keywords.insert("usamplerCube", Token::Sampler(Sampler::USAMPLERCUBE));
|
||||||
.keywords
|
keywords.insert("usampler1DArray", Token::Sampler(Sampler::USAMPLER1DARRAY));
|
||||||
.insert("isamplerCube", Token::Sampler(Sampler::ISAMPLERCUBE));
|
keywords.insert("usampler2DArray", Token::Sampler(Sampler::USAMPLER2DARRAY));
|
||||||
lexer
|
keywords.insert("sampler2DRect", Token::Sampler(Sampler::SAMPLER2DRECT));
|
||||||
.keywords
|
keywords.insert(
|
||||||
.insert("isampler1DArray", Token::Sampler(Sampler::ISAMPLER1DARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("isampler2DArray", Token::Sampler(Sampler::ISAMPLER2DARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("usampler1D", Token::Sampler(Sampler::USAMPLER1D));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("usampler2D", Token::Sampler(Sampler::USAMPLER2D));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("usampler3D", Token::Sampler(Sampler::USAMPLER3D));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("usamplerCube", Token::Sampler(Sampler::USAMPLERCUBE));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("usampler1DArray", Token::Sampler(Sampler::USAMPLER1DARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("usampler2DArray", Token::Sampler(Sampler::USAMPLER2DARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("sampler2DRect", Token::Sampler(Sampler::SAMPLER2DRECT));
|
|
||||||
lexer.keywords.insert(
|
|
||||||
"sampler2DRectShadow",
|
"sampler2DRectShadow",
|
||||||
Token::Sampler(Sampler::SAMPLER2DRECTSHADOW),
|
Token::Sampler(Sampler::SAMPLER2DRECTSHADOW),
|
||||||
);
|
);
|
||||||
lexer
|
keywords.insert("isampler2DRect", Token::Sampler(Sampler::ISAMPLER2DRECT));
|
||||||
.keywords
|
keywords.insert("usampler2DRect", Token::Sampler(Sampler::USAMPLER2DRECT));
|
||||||
.insert("isampler2DRect", Token::Sampler(Sampler::ISAMPLER2DRECT));
|
keywords.insert("samplerBuffer", Token::Sampler(Sampler::SAMPLERBUFFER));
|
||||||
lexer
|
keywords.insert("isamplerBuffer", Token::Sampler(Sampler::ISAMPLERBUFFER));
|
||||||
.keywords
|
keywords.insert("usamplerBuffer", Token::Sampler(Sampler::USAMPLERBUFFER));
|
||||||
.insert("usampler2DRect", Token::Sampler(Sampler::USAMPLER2DRECT));
|
keywords.insert("sampler2DMS", Token::Sampler(Sampler::SAMPLER2DMS));
|
||||||
lexer
|
keywords.insert("isampler2DMS", Token::Sampler(Sampler::ISAMPLER2DMS));
|
||||||
.keywords
|
keywords.insert("usampler2DMS", Token::Sampler(Sampler::USAMPLER2DMS));
|
||||||
.insert("samplerBuffer", Token::Sampler(Sampler::SAMPLERBUFFER));
|
keywords.insert(
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("isamplerBuffer", Token::Sampler(Sampler::ISAMPLERBUFFER));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("usamplerBuffer", Token::Sampler(Sampler::USAMPLERBUFFER));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("sampler2DMS", Token::Sampler(Sampler::SAMPLER2DMS));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("isampler2DMS", Token::Sampler(Sampler::ISAMPLER2DMS));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("usampler2DMS", Token::Sampler(Sampler::USAMPLER2DMS));
|
|
||||||
lexer.keywords.insert(
|
|
||||||
"sampler2DMSArray",
|
"sampler2DMSArray",
|
||||||
Token::Sampler(Sampler::SAMPLER2DMSARRAY),
|
Token::Sampler(Sampler::SAMPLER2DMSARRAY),
|
||||||
);
|
);
|
||||||
lexer.keywords.insert(
|
keywords.insert(
|
||||||
"isampler2DMSArray",
|
"isampler2DMSArray",
|
||||||
Token::Sampler(Sampler::ISAMPLER2DMSARRAY),
|
Token::Sampler(Sampler::ISAMPLER2DMSARRAY),
|
||||||
);
|
);
|
||||||
lexer.keywords.insert(
|
keywords.insert(
|
||||||
"usampler2DMSArray",
|
"usampler2DMSArray",
|
||||||
Token::Sampler(Sampler::USAMPLER2DMSARRAY),
|
Token::Sampler(Sampler::USAMPLER2DMSARRAY),
|
||||||
);
|
);
|
||||||
lexer.keywords.insert(
|
keywords.insert(
|
||||||
"samplerCubeArray",
|
"samplerCubeArray",
|
||||||
Token::Sampler(Sampler::SAMPLERCUBEARRAY),
|
Token::Sampler(Sampler::SAMPLERCUBEARRAY),
|
||||||
);
|
);
|
||||||
lexer.keywords.insert(
|
keywords.insert(
|
||||||
"samplerCubeArrayShadow",
|
"samplerCubeArrayShadow",
|
||||||
Token::Sampler(Sampler::SAMPLERCUBEARRAYSHADOW),
|
Token::Sampler(Sampler::SAMPLERCUBEARRAYSHADOW),
|
||||||
);
|
);
|
||||||
lexer.keywords.insert(
|
keywords.insert(
|
||||||
"isamplerCubeArray",
|
"isamplerCubeArray",
|
||||||
Token::Sampler(Sampler::ISAMPLERCUBEARRAY),
|
Token::Sampler(Sampler::ISAMPLERCUBEARRAY),
|
||||||
);
|
);
|
||||||
lexer.keywords.insert(
|
keywords.insert(
|
||||||
"usamplerCubeArray",
|
"usamplerCubeArray",
|
||||||
Token::Sampler(Sampler::USAMPLERCUBEARRAY),
|
Token::Sampler(Sampler::USAMPLERCUBEARRAY),
|
||||||
);
|
);
|
||||||
lexer
|
keywords.insert("image1D", Token::Image(Image::IMAGE1D));
|
||||||
.keywords
|
keywords.insert("iimage1D", Token::Image(Image::IIMAGE1D));
|
||||||
.insert("image1D", Token::Image(Image::IMAGE1D));
|
keywords.insert("uimage1D", Token::Image(Image::UIMAGE1D));
|
||||||
lexer
|
keywords.insert("image2D", Token::Image(Image::IMAGE2D));
|
||||||
.keywords
|
keywords.insert("iimage2D", Token::Image(Image::IIMAGE2D));
|
||||||
.insert("iimage1D", Token::Image(Image::IIMAGE1D));
|
keywords.insert("uimage2D", Token::Image(Image::UIMAGE2D));
|
||||||
lexer
|
keywords.insert("image3D", Token::Image(Image::IMAGE3D));
|
||||||
.keywords
|
keywords.insert("iimage3D", Token::Image(Image::IIMAGE3D));
|
||||||
.insert("uimage1D", Token::Image(Image::UIMAGE1D));
|
keywords.insert("uimage3D", Token::Image(Image::UIMAGE3D));
|
||||||
lexer
|
keywords.insert("image2DRect", Token::Image(Image::IMAGE2DRECT));
|
||||||
.keywords
|
keywords.insert("iimage2DRect", Token::Image(Image::IIMAGE2DRECT));
|
||||||
.insert("image2D", Token::Image(Image::IMAGE2D));
|
keywords.insert("uimage2DRect", Token::Image(Image::UIMAGE2DRECT));
|
||||||
lexer
|
keywords.insert("imageCube", Token::Image(Image::IMAGECUBE));
|
||||||
.keywords
|
keywords.insert("iimageCube", Token::Image(Image::IIMAGECUBE));
|
||||||
.insert("iimage2D", Token::Image(Image::IIMAGE2D));
|
keywords.insert("uimageCube", Token::Image(Image::UIMAGECUBE));
|
||||||
lexer
|
keywords.insert("imageBuffer", Token::Image(Image::IMAGEBUFFER));
|
||||||
.keywords
|
keywords.insert("iimageBuffer", Token::Image(Image::IIMAGEBUFFER));
|
||||||
.insert("uimage2D", Token::Image(Image::UIMAGE2D));
|
keywords.insert("uimageBuffer", Token::Image(Image::UIMAGEBUFFER));
|
||||||
lexer
|
keywords.insert("image1DArray", Token::Image(Image::IMAGE1DARRAY));
|
||||||
.keywords
|
keywords.insert("iimage1DArray", Token::Image(Image::IIMAGE1DARRAY));
|
||||||
.insert("image3D", Token::Image(Image::IMAGE3D));
|
keywords.insert("uimage1DArray", Token::Image(Image::UIMAGE1DARRAY));
|
||||||
lexer
|
keywords.insert("image2DArray", Token::Image(Image::IMAGE2DARRAY));
|
||||||
.keywords
|
keywords.insert("iimage2DArray", Token::Image(Image::IIMAGE2DARRAY));
|
||||||
.insert("iimage3D", Token::Image(Image::IIMAGE3D));
|
keywords.insert("uimage2DArray", Token::Image(Image::UIMAGE2DARRAY));
|
||||||
lexer
|
keywords.insert("imageCubeArray", Token::Image(Image::IMAGECUBEARRAY));
|
||||||
.keywords
|
keywords.insert("iimageCubeArray", Token::Image(Image::IIMAGECUBEARRAY));
|
||||||
.insert("uimage3D", Token::Image(Image::UIMAGE3D));
|
keywords.insert("uimageCubeArray", Token::Image(Image::UIMAGECUBEARRAY));
|
||||||
lexer
|
keywords.insert("image2DMS", Token::Image(Image::IMAGE2DMS));
|
||||||
.keywords
|
keywords.insert("iimage2DMS", Token::Image(Image::IIMAGE2DMS));
|
||||||
.insert("image2DRect", Token::Image(Image::IMAGE2DRECT));
|
keywords.insert("uimage2DMS", Token::Image(Image::UIMAGE2DMS));
|
||||||
lexer
|
keywords.insert("image2DMSArray", Token::Image(Image::IMAGE2DMSARRAY));
|
||||||
.keywords
|
keywords.insert("iimage2DMSArray", Token::Image(Image::IIMAGE2DMSARRAY));
|
||||||
.insert("iimage2DRect", Token::Image(Image::IIMAGE2DRECT));
|
keywords.insert("uimage2DMSArray", Token::Image(Image::UIMAGE2DMSARRAY));
|
||||||
lexer
|
keywords.insert("struct", Token::STRUCT);
|
||||||
.keywords
|
keywords
|
||||||
.insert("uimage2DRect", Token::Image(Image::UIMAGE2DRECT));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("imageCube", Token::Image(Image::IMAGECUBE));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("iimageCube", Token::Image(Image::IIMAGECUBE));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("uimageCube", Token::Image(Image::UIMAGECUBE));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("imageBuffer", Token::Image(Image::IMAGEBUFFER));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("iimageBuffer", Token::Image(Image::IIMAGEBUFFER));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("uimageBuffer", Token::Image(Image::UIMAGEBUFFER));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("image1DArray", Token::Image(Image::IMAGE1DARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("iimage1DArray", Token::Image(Image::IIMAGE1DARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("uimage1DArray", Token::Image(Image::UIMAGE1DARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("image2DArray", Token::Image(Image::IMAGE2DARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("iimage2DArray", Token::Image(Image::IIMAGE2DARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("uimage2DArray", Token::Image(Image::UIMAGE2DARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("imageCubeArray", Token::Image(Image::IMAGECUBEARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("iimageCubeArray", Token::Image(Image::IIMAGECUBEARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("uimageCubeArray", Token::Image(Image::UIMAGECUBEARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("image2DMS", Token::Image(Image::IMAGE2DMS));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("iimage2DMS", Token::Image(Image::IIMAGE2DMS));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("uimage2DMS", Token::Image(Image::UIMAGE2DMS));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("image2DMSArray", Token::Image(Image::IMAGE2DMSARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("iimage2DMSArray", Token::Image(Image::IIMAGE2DMSARRAY));
|
|
||||||
lexer
|
|
||||||
.keywords
|
|
||||||
.insert("uimage2DMSArray", Token::Image(Image::UIMAGE2DMSARRAY));
|
|
||||||
lexer.keywords.insert("struct", Token::STRUCT);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl crate::lex::lexer::Lexer {
|
impl crate::lex::lexer::Lexer {
|
||||||
|
|
31
src/lib.rs
31
src/lib.rs
|
@ -91,6 +91,37 @@ mod tests {
|
||||||
.into()
|
.into()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
#[test]
|
||||||
|
fn float_literal() {
|
||||||
|
let source_code = "123.4504";
|
||||||
|
let mut lexer = Lexer::new(source_code);
|
||||||
|
let tokens = lexer.get_tokens();
|
||||||
|
assert_eq!(
|
||||||
|
tokens,
|
||||||
|
vec![Token::FLOATCONSTANT("123.4504".to_string()), Token::EOF].into()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn float_shorthand() {
|
||||||
|
let source_code = ".4504";
|
||||||
|
let mut lexer = Lexer::new(source_code);
|
||||||
|
let tokens = lexer.get_tokens();
|
||||||
|
assert_eq!(
|
||||||
|
tokens,
|
||||||
|
vec![Token::FLOATCONSTANT(".4504".to_string()), Token::EOF].into()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn integer_literal() {
|
||||||
|
let source_code = "12345";
|
||||||
|
let mut lexer = Lexer::new(source_code);
|
||||||
|
let tokens = lexer.get_tokens();
|
||||||
|
assert_eq!(
|
||||||
|
tokens,
|
||||||
|
vec![Token::INTCONSTANT("12345".to_string()), Token::EOF].into()
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// #[cfg(test)]
|
// #[cfg(test)]
|
||||||
// mod tests {
|
// mod tests {
|
||||||
|
|
|
@ -52,8 +52,8 @@ pub enum Token {
|
||||||
SUBROUTINE,
|
SUBROUTINE,
|
||||||
IDENTIFIER(String),
|
IDENTIFIER(String),
|
||||||
TYPE_NAME,
|
TYPE_NAME,
|
||||||
FLOATCONSTANT(f32),
|
FLOATCONSTANT(String),
|
||||||
INTCONSTANT(i32),
|
INTCONSTANT(String),
|
||||||
UINTCONSTANT(u32),
|
UINTCONSTANT(u32),
|
||||||
BOOLCONSTANT(bool),
|
BOOLCONSTANT(bool),
|
||||||
DOUBLECONSTANT(f64),
|
DOUBLECONSTANT(f64),
|
||||||
|
|
Loading…
Reference in a new issue