Number handling (w/o swizzling); change HashMap population
This commit is contained in:
parent
9be5a5e4c8
commit
d3da252586
|
@ -31,7 +31,23 @@ impl Lexer {
|
|||
}
|
||||
|
||||
pub fn consume_number(&mut self) -> crate::tokens::Token {
|
||||
todo!()
|
||||
let mut number = String::new();
|
||||
let mut is_float = false;
|
||||
while let Some(c) = self.current_char {
|
||||
if c.is_numeric() {
|
||||
number.push(c);
|
||||
self.advance();
|
||||
} else if c == '.' {
|
||||
is_float = true;
|
||||
number.push(c);
|
||||
self.advance()
|
||||
}
|
||||
}
|
||||
if is_float {
|
||||
return crate::tokens::Token::FLOATCONSTANT(number);
|
||||
}
|
||||
|
||||
crate::tokens::Token::INTCONSTANT(number)
|
||||
}
|
||||
|
||||
pub fn consume_comment(&mut self) -> crate::tokens::Token {
|
||||
|
|
|
@ -29,12 +29,9 @@ impl Lexer {
|
|||
input: input.chars().collect(),
|
||||
position: 0,
|
||||
current_char: None,
|
||||
keywords: HashMap::new(),
|
||||
symbols: HashMap::new(),
|
||||
keywords: populate_keywords(),
|
||||
symbols: populate_symbols(),
|
||||
};
|
||||
// Populate keywords HashMap
|
||||
populate_keywords(&mut lexer);
|
||||
populate_symbols(&mut lexer);
|
||||
dbg!("{}", &lexer.keywords);
|
||||
lexer.current_char = if lexer.position < lexer.input.len() {
|
||||
Some(lexer.input[lexer.position])
|
||||
|
@ -43,6 +40,8 @@ impl Lexer {
|
|||
};
|
||||
lexer
|
||||
}
|
||||
|
||||
/// Increments the lexer's position
|
||||
pub fn advance(&mut self) {
|
||||
self.position += 1;
|
||||
self.current_char = if self.position < self.input.len() {
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
pub mod handlers;
|
||||
pub mod lexer;
|
||||
pub mod util;
|
||||
|
||||
use lexer::Lexer;
|
||||
|
|
522
src/lex/util.rs
522
src/lex/util.rs
|
@ -1,384 +1,218 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use crate::tokens::{Image, Material, Sampler, Token, Vector};
|
||||
|
||||
pub fn populate_symbols(lexer: &mut crate::lex::lexer::Lexer) {
|
||||
lexer.symbols.insert('{', Token::LEFT_BRACE);
|
||||
lexer.symbols.insert('}', Token::RIGHT_BRACE);
|
||||
lexer.symbols.insert('(', Token::LEFT_PAREN);
|
||||
lexer.symbols.insert(')', Token::RIGHT_PAREN);
|
||||
lexer.symbols.insert('[', Token::LEFT_BRACKET);
|
||||
lexer.symbols.insert(']', Token::RIGHT_BRACKET);
|
||||
lexer.symbols.insert('.', Token::DOT);
|
||||
lexer.symbols.insert(',', Token::COMMA);
|
||||
lexer.symbols.insert(':', Token::COLON);
|
||||
lexer.symbols.insert(';', Token::SEMICOLON);
|
||||
pub fn populate_symbols() -> HashMap<char, Token> {
|
||||
let mut symbols = HashMap::new();
|
||||
symbols.insert('{', Token::LEFT_BRACE);
|
||||
symbols.insert('}', Token::RIGHT_BRACE);
|
||||
symbols.insert('(', Token::LEFT_PAREN);
|
||||
symbols.insert(')', Token::RIGHT_PAREN);
|
||||
symbols.insert('[', Token::LEFT_BRACKET);
|
||||
symbols.insert(']', Token::RIGHT_BRACKET);
|
||||
symbols.insert('.', Token::DOT);
|
||||
symbols.insert(',', Token::COMMA);
|
||||
symbols.insert(':', Token::COLON);
|
||||
symbols.insert(';', Token::SEMICOLON);
|
||||
symbols
|
||||
}
|
||||
|
||||
pub fn populate_keywords(lexer: &mut crate::lex::lexer::Lexer) {
|
||||
lexer.keywords.insert("const", Token::CONST);
|
||||
lexer.keywords.insert("uniform", Token::UNIFORM);
|
||||
lexer.keywords.insert("buffer", Token::BUFFER);
|
||||
lexer.keywords.insert("shared", Token::SHARED);
|
||||
lexer.keywords.insert("coherent", Token::COHERENT);
|
||||
lexer.keywords.insert("volatile", Token::VOLATILE);
|
||||
lexer.keywords.insert("restrict", Token::RESTRICT);
|
||||
lexer.keywords.insert("readonly", Token::READONLY);
|
||||
lexer.keywords.insert("writeonly", Token::WRITEONLY);
|
||||
lexer.keywords.insert("atomic_uint", Token::ATOMIC_UINT);
|
||||
lexer.keywords.insert("layout", Token::LAYOUT);
|
||||
lexer.keywords.insert("centroid", Token::CENTROID);
|
||||
lexer.keywords.insert("flat", Token::FLAT);
|
||||
lexer.keywords.insert("smooth", Token::SMOOTH);
|
||||
lexer.keywords.insert("noperspective", Token::NOPERSPECTIVE);
|
||||
lexer.keywords.insert("patch", Token::PATCH);
|
||||
lexer.keywords.insert("sample", Token::SAMPLE);
|
||||
lexer.keywords.insert("break", Token::BREAK);
|
||||
lexer.keywords.insert("continue", Token::CONTINUE);
|
||||
lexer.keywords.insert("do", Token::DO);
|
||||
lexer.keywords.insert("for", Token::FOR);
|
||||
lexer.keywords.insert("while", Token::WHILE);
|
||||
lexer.keywords.insert("switch", Token::SWITCH);
|
||||
lexer.keywords.insert("case", Token::CASE);
|
||||
lexer.keywords.insert("default", Token::DEFAULT);
|
||||
lexer.keywords.insert("if", Token::IF);
|
||||
lexer.keywords.insert("else", Token::ELSE);
|
||||
lexer.keywords.insert("subroutine", Token::SUBROUTINE);
|
||||
lexer.keywords.insert("in", Token::IN);
|
||||
lexer.keywords.insert("out", Token::OUT);
|
||||
lexer.keywords.insert("inout", Token::INOUT);
|
||||
lexer.keywords.insert("float", Token::FLOAT);
|
||||
lexer.keywords.insert("double", Token::DOUBLE);
|
||||
lexer.keywords.insert("int", Token::INT);
|
||||
lexer.keywords.insert("void", Token::VOID);
|
||||
lexer.keywords.insert("bool", Token::BOOL);
|
||||
lexer.keywords.insert("true", Token::BOOLCONSTANT(true));
|
||||
lexer.keywords.insert("false", Token::BOOLCONSTANT(false));
|
||||
lexer.keywords.insert("invariant", Token::INVARIANT);
|
||||
lexer.keywords.insert("precise", Token::PRECISE);
|
||||
lexer.keywords.insert("discard", Token::DISCARD);
|
||||
lexer.keywords.insert("return", Token::RETURN);
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat2", Token::Material(Material::MAT2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat3", Token::Material(Material::MAT3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat4", Token::Material(Material::MAT4));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat2", Token::Material(Material::DMAT2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat3", Token::Material(Material::DMAT3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat4", Token::Material(Material::DMAT4));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat2x2", Token::Material(Material::MAT2X2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat2x3", Token::Material(Material::MAT2X3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat2x4", Token::Material(Material::MAT2X4));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat2x2", Token::Material(Material::DMAT2X2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat2x3", Token::Material(Material::DMAT2X3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat2x4", Token::Material(Material::DMAT2X4));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat3x2", Token::Material(Material::MAT3X2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat3x3", Token::Material(Material::MAT3X3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat3x4", Token::Material(Material::MAT3X4));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat3x2", Token::Material(Material::DMAT3X2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat3x3", Token::Material(Material::DMAT3X3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat3x4", Token::Material(Material::DMAT3X4));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat4x2", Token::Material(Material::MAT4X2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat4x3", Token::Material(Material::MAT4X3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat4x4", Token::Material(Material::MAT4X4));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat4x2", Token::Material(Material::DMAT4X2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat4x3", Token::Material(Material::DMAT4X3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat4x4", Token::Material(Material::DMAT4X4));
|
||||
lexer.keywords.insert("vec2", Token::Vector(Vector::VEC2));
|
||||
lexer.keywords.insert("vec3", Token::Vector(Vector::VEC3));
|
||||
lexer.keywords.insert("vec4", Token::Vector(Vector::VEC4));
|
||||
lexer.keywords.insert("ivec2", Token::Vector(Vector::IVEC2));
|
||||
lexer.keywords.insert("ivec3", Token::Vector(Vector::IVEC3));
|
||||
lexer.keywords.insert("ivec4", Token::Vector(Vector::IVEC4));
|
||||
lexer.keywords.insert("bvec2", Token::Vector(Vector::BVEC2));
|
||||
lexer.keywords.insert("bvec3", Token::Vector(Vector::BVEC3));
|
||||
lexer.keywords.insert("bvec4", Token::Vector(Vector::BVEC4));
|
||||
lexer.keywords.insert("dvec2", Token::Vector(Vector::DVEC2));
|
||||
lexer.keywords.insert("dvec3", Token::Vector(Vector::DVEC3));
|
||||
lexer.keywords.insert("dvec4", Token::Vector(Vector::DVEC4));
|
||||
lexer.keywords.insert("uint", Token::UINT);
|
||||
lexer.keywords.insert("uvec2", Token::Vector(Vector::UVEC2));
|
||||
lexer.keywords.insert("uvec3", Token::Vector(Vector::UVEC3));
|
||||
lexer.keywords.insert("uvec4", Token::Vector(Vector::UVEC4));
|
||||
lexer.keywords.insert("lowp", Token::LOW_PRECISION);
|
||||
lexer.keywords.insert("mediump", Token::MEDIUM_PRECISION);
|
||||
lexer.keywords.insert("highp", Token::HIGH_PRECISION);
|
||||
lexer.keywords.insert("precision", Token::PRECISION);
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler1D", Token::Sampler(Sampler::SAMPLER1D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler2D", Token::Sampler(Sampler::SAMPLER2D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler3D", Token::Sampler(Sampler::SAMPLER3D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("samplerCube", Token::Sampler(Sampler::SAMPLERCUBE));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler1DShadow", Token::Sampler(Sampler::SAMPLER1DSHADOW));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler2DShadow", Token::Sampler(Sampler::SAMPLER2DSHADOW));
|
||||
lexer.keywords.insert(
|
||||
pub fn populate_keywords() -> HashMap<&'static str, Token> {
|
||||
let mut keywords = HashMap::new();
|
||||
keywords.insert("const", Token::CONST);
|
||||
keywords.insert("uniform", Token::UNIFORM);
|
||||
keywords.insert("buffer", Token::BUFFER);
|
||||
keywords.insert("shared", Token::SHARED);
|
||||
keywords.insert("coherent", Token::COHERENT);
|
||||
keywords.insert("volatile", Token::VOLATILE);
|
||||
keywords.insert("restrict", Token::RESTRICT);
|
||||
keywords.insert("readonly", Token::READONLY);
|
||||
keywords.insert("writeonly", Token::WRITEONLY);
|
||||
keywords.insert("atomic_uint", Token::ATOMIC_UINT);
|
||||
keywords.insert("layout", Token::LAYOUT);
|
||||
keywords.insert("centroid", Token::CENTROID);
|
||||
keywords.insert("flat", Token::FLAT);
|
||||
keywords.insert("smooth", Token::SMOOTH);
|
||||
keywords.insert("noperspective", Token::NOPERSPECTIVE);
|
||||
keywords.insert("patch", Token::PATCH);
|
||||
keywords.insert("sample", Token::SAMPLE);
|
||||
keywords.insert("break", Token::BREAK);
|
||||
keywords.insert("continue", Token::CONTINUE);
|
||||
keywords.insert("do", Token::DO);
|
||||
keywords.insert("for", Token::FOR);
|
||||
keywords.insert("while", Token::WHILE);
|
||||
keywords.insert("switch", Token::SWITCH);
|
||||
keywords.insert("case", Token::CASE);
|
||||
keywords.insert("default", Token::DEFAULT);
|
||||
keywords.insert("if", Token::IF);
|
||||
keywords.insert("else", Token::ELSE);
|
||||
keywords.insert("subroutine", Token::SUBROUTINE);
|
||||
keywords.insert("in", Token::IN);
|
||||
keywords.insert("out", Token::OUT);
|
||||
keywords.insert("inout", Token::INOUT);
|
||||
keywords.insert("float", Token::FLOAT);
|
||||
keywords.insert("double", Token::DOUBLE);
|
||||
keywords.insert("int", Token::INT);
|
||||
keywords.insert("void", Token::VOID);
|
||||
keywords.insert("bool", Token::BOOL);
|
||||
keywords.insert("true", Token::BOOLCONSTANT(true));
|
||||
keywords.insert("false", Token::BOOLCONSTANT(false));
|
||||
keywords.insert("invariant", Token::INVARIANT);
|
||||
keywords.insert("precise", Token::PRECISE);
|
||||
keywords.insert("discard", Token::DISCARD);
|
||||
keywords.insert("return", Token::RETURN);
|
||||
keywords.insert("mat2", Token::Material(Material::MAT2));
|
||||
keywords.insert("mat3", Token::Material(Material::MAT3));
|
||||
keywords.insert("mat4", Token::Material(Material::MAT4));
|
||||
keywords.insert("dmat2", Token::Material(Material::DMAT2));
|
||||
keywords.insert("dmat3", Token::Material(Material::DMAT3));
|
||||
keywords.insert("dmat4", Token::Material(Material::DMAT4));
|
||||
keywords.insert("mat2x2", Token::Material(Material::MAT2X2));
|
||||
keywords.insert("mat2x3", Token::Material(Material::MAT2X3));
|
||||
keywords.insert("mat2x4", Token::Material(Material::MAT2X4));
|
||||
keywords.insert("dmat2x2", Token::Material(Material::DMAT2X2));
|
||||
keywords.insert("dmat2x3", Token::Material(Material::DMAT2X3));
|
||||
keywords.insert("dmat2x4", Token::Material(Material::DMAT2X4));
|
||||
keywords.insert("mat3x2", Token::Material(Material::MAT3X2));
|
||||
keywords.insert("mat3x3", Token::Material(Material::MAT3X3));
|
||||
keywords.insert("mat3x4", Token::Material(Material::MAT3X4));
|
||||
keywords.insert("dmat3x2", Token::Material(Material::DMAT3X2));
|
||||
keywords.insert("dmat3x3", Token::Material(Material::DMAT3X3));
|
||||
keywords.insert("dmat3x4", Token::Material(Material::DMAT3X4));
|
||||
keywords.insert("mat4x2", Token::Material(Material::MAT4X2));
|
||||
keywords.insert("mat4x3", Token::Material(Material::MAT4X3));
|
||||
keywords.insert("mat4x4", Token::Material(Material::MAT4X4));
|
||||
keywords.insert("dmat4x2", Token::Material(Material::DMAT4X2));
|
||||
keywords.insert("dmat4x3", Token::Material(Material::DMAT4X3));
|
||||
keywords.insert("dmat4x4", Token::Material(Material::DMAT4X4));
|
||||
keywords.insert("vec2", Token::Vector(Vector::VEC2));
|
||||
keywords.insert("vec3", Token::Vector(Vector::VEC3));
|
||||
keywords.insert("vec4", Token::Vector(Vector::VEC4));
|
||||
keywords.insert("ivec2", Token::Vector(Vector::IVEC2));
|
||||
keywords.insert("ivec3", Token::Vector(Vector::IVEC3));
|
||||
keywords.insert("ivec4", Token::Vector(Vector::IVEC4));
|
||||
keywords.insert("bvec2", Token::Vector(Vector::BVEC2));
|
||||
keywords.insert("bvec3", Token::Vector(Vector::BVEC3));
|
||||
keywords.insert("bvec4", Token::Vector(Vector::BVEC4));
|
||||
keywords.insert("dvec2", Token::Vector(Vector::DVEC2));
|
||||
keywords.insert("dvec3", Token::Vector(Vector::DVEC3));
|
||||
keywords.insert("dvec4", Token::Vector(Vector::DVEC4));
|
||||
keywords.insert("uint", Token::UINT);
|
||||
keywords.insert("uvec2", Token::Vector(Vector::UVEC2));
|
||||
keywords.insert("uvec3", Token::Vector(Vector::UVEC3));
|
||||
keywords.insert("uvec4", Token::Vector(Vector::UVEC4));
|
||||
keywords.insert("lowp", Token::LOW_PRECISION);
|
||||
keywords.insert("mediump", Token::MEDIUM_PRECISION);
|
||||
keywords.insert("highp", Token::HIGH_PRECISION);
|
||||
keywords.insert("precision", Token::PRECISION);
|
||||
keywords.insert("sampler1D", Token::Sampler(Sampler::SAMPLER1D));
|
||||
keywords.insert("sampler2D", Token::Sampler(Sampler::SAMPLER2D));
|
||||
keywords.insert("sampler3D", Token::Sampler(Sampler::SAMPLER3D));
|
||||
keywords.insert("samplerCube", Token::Sampler(Sampler::SAMPLERCUBE));
|
||||
keywords.insert("sampler1DShadow", Token::Sampler(Sampler::SAMPLER1DSHADOW));
|
||||
keywords.insert("sampler2DShadow", Token::Sampler(Sampler::SAMPLER2DSHADOW));
|
||||
keywords.insert(
|
||||
"samplerCubeShadow",
|
||||
Token::Sampler(Sampler::SAMPLERCUBESHADOW),
|
||||
);
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler1DArray", Token::Sampler(Sampler::SAMPLER1DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler2DArray", Token::Sampler(Sampler::SAMPLER2DARRAY));
|
||||
lexer.keywords.insert(
|
||||
keywords.insert("sampler1DArray", Token::Sampler(Sampler::SAMPLER1DARRAY));
|
||||
keywords.insert("sampler2DArray", Token::Sampler(Sampler::SAMPLER2DARRAY));
|
||||
keywords.insert(
|
||||
"sampler1DArrayShadow",
|
||||
Token::Sampler(Sampler::SAMPLER1DARRAYSHADOW),
|
||||
);
|
||||
lexer.keywords.insert(
|
||||
keywords.insert(
|
||||
"sampler2DArrayShadow",
|
||||
Token::Sampler(Sampler::SAMPLER2DARRAYSHADOW),
|
||||
);
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isampler1D", Token::Sampler(Sampler::ISAMPLER1D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isampler2D", Token::Sampler(Sampler::ISAMPLER2D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isampler3D", Token::Sampler(Sampler::ISAMPLER3D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isamplerCube", Token::Sampler(Sampler::ISAMPLERCUBE));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isampler1DArray", Token::Sampler(Sampler::ISAMPLER1DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isampler2DArray", Token::Sampler(Sampler::ISAMPLER2DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usampler1D", Token::Sampler(Sampler::USAMPLER1D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usampler2D", Token::Sampler(Sampler::USAMPLER2D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usampler3D", Token::Sampler(Sampler::USAMPLER3D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usamplerCube", Token::Sampler(Sampler::USAMPLERCUBE));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usampler1DArray", Token::Sampler(Sampler::USAMPLER1DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usampler2DArray", Token::Sampler(Sampler::USAMPLER2DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler2DRect", Token::Sampler(Sampler::SAMPLER2DRECT));
|
||||
lexer.keywords.insert(
|
||||
keywords.insert("isampler1D", Token::Sampler(Sampler::ISAMPLER1D));
|
||||
keywords.insert("isampler2D", Token::Sampler(Sampler::ISAMPLER2D));
|
||||
keywords.insert("isampler3D", Token::Sampler(Sampler::ISAMPLER3D));
|
||||
keywords.insert("isamplerCube", Token::Sampler(Sampler::ISAMPLERCUBE));
|
||||
keywords.insert("isampler1DArray", Token::Sampler(Sampler::ISAMPLER1DARRAY));
|
||||
keywords.insert("isampler2DArray", Token::Sampler(Sampler::ISAMPLER2DARRAY));
|
||||
keywords.insert("usampler1D", Token::Sampler(Sampler::USAMPLER1D));
|
||||
keywords.insert("usampler2D", Token::Sampler(Sampler::USAMPLER2D));
|
||||
keywords.insert("usampler3D", Token::Sampler(Sampler::USAMPLER3D));
|
||||
keywords.insert("usamplerCube", Token::Sampler(Sampler::USAMPLERCUBE));
|
||||
keywords.insert("usampler1DArray", Token::Sampler(Sampler::USAMPLER1DARRAY));
|
||||
keywords.insert("usampler2DArray", Token::Sampler(Sampler::USAMPLER2DARRAY));
|
||||
keywords.insert("sampler2DRect", Token::Sampler(Sampler::SAMPLER2DRECT));
|
||||
keywords.insert(
|
||||
"sampler2DRectShadow",
|
||||
Token::Sampler(Sampler::SAMPLER2DRECTSHADOW),
|
||||
);
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isampler2DRect", Token::Sampler(Sampler::ISAMPLER2DRECT));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usampler2DRect", Token::Sampler(Sampler::USAMPLER2DRECT));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("samplerBuffer", Token::Sampler(Sampler::SAMPLERBUFFER));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isamplerBuffer", Token::Sampler(Sampler::ISAMPLERBUFFER));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usamplerBuffer", Token::Sampler(Sampler::USAMPLERBUFFER));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler2DMS", Token::Sampler(Sampler::SAMPLER2DMS));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isampler2DMS", Token::Sampler(Sampler::ISAMPLER2DMS));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usampler2DMS", Token::Sampler(Sampler::USAMPLER2DMS));
|
||||
lexer.keywords.insert(
|
||||
keywords.insert("isampler2DRect", Token::Sampler(Sampler::ISAMPLER2DRECT));
|
||||
keywords.insert("usampler2DRect", Token::Sampler(Sampler::USAMPLER2DRECT));
|
||||
keywords.insert("samplerBuffer", Token::Sampler(Sampler::SAMPLERBUFFER));
|
||||
keywords.insert("isamplerBuffer", Token::Sampler(Sampler::ISAMPLERBUFFER));
|
||||
keywords.insert("usamplerBuffer", Token::Sampler(Sampler::USAMPLERBUFFER));
|
||||
keywords.insert("sampler2DMS", Token::Sampler(Sampler::SAMPLER2DMS));
|
||||
keywords.insert("isampler2DMS", Token::Sampler(Sampler::ISAMPLER2DMS));
|
||||
keywords.insert("usampler2DMS", Token::Sampler(Sampler::USAMPLER2DMS));
|
||||
keywords.insert(
|
||||
"sampler2DMSArray",
|
||||
Token::Sampler(Sampler::SAMPLER2DMSARRAY),
|
||||
);
|
||||
lexer.keywords.insert(
|
||||
keywords.insert(
|
||||
"isampler2DMSArray",
|
||||
Token::Sampler(Sampler::ISAMPLER2DMSARRAY),
|
||||
);
|
||||
lexer.keywords.insert(
|
||||
keywords.insert(
|
||||
"usampler2DMSArray",
|
||||
Token::Sampler(Sampler::USAMPLER2DMSARRAY),
|
||||
);
|
||||
lexer.keywords.insert(
|
||||
keywords.insert(
|
||||
"samplerCubeArray",
|
||||
Token::Sampler(Sampler::SAMPLERCUBEARRAY),
|
||||
);
|
||||
lexer.keywords.insert(
|
||||
keywords.insert(
|
||||
"samplerCubeArrayShadow",
|
||||
Token::Sampler(Sampler::SAMPLERCUBEARRAYSHADOW),
|
||||
);
|
||||
lexer.keywords.insert(
|
||||
keywords.insert(
|
||||
"isamplerCubeArray",
|
||||
Token::Sampler(Sampler::ISAMPLERCUBEARRAY),
|
||||
);
|
||||
lexer.keywords.insert(
|
||||
keywords.insert(
|
||||
"usamplerCubeArray",
|
||||
Token::Sampler(Sampler::USAMPLERCUBEARRAY),
|
||||
);
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image1D", Token::Image(Image::IMAGE1D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage1D", Token::Image(Image::IIMAGE1D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage1D", Token::Image(Image::UIMAGE1D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image2D", Token::Image(Image::IMAGE2D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage2D", Token::Image(Image::IIMAGE2D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage2D", Token::Image(Image::UIMAGE2D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image3D", Token::Image(Image::IMAGE3D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage3D", Token::Image(Image::IIMAGE3D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage3D", Token::Image(Image::UIMAGE3D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image2DRect", Token::Image(Image::IMAGE2DRECT));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage2DRect", Token::Image(Image::IIMAGE2DRECT));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage2DRect", Token::Image(Image::UIMAGE2DRECT));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("imageCube", Token::Image(Image::IMAGECUBE));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimageCube", Token::Image(Image::IIMAGECUBE));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimageCube", Token::Image(Image::UIMAGECUBE));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("imageBuffer", Token::Image(Image::IMAGEBUFFER));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimageBuffer", Token::Image(Image::IIMAGEBUFFER));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimageBuffer", Token::Image(Image::UIMAGEBUFFER));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image1DArray", Token::Image(Image::IMAGE1DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage1DArray", Token::Image(Image::IIMAGE1DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage1DArray", Token::Image(Image::UIMAGE1DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image2DArray", Token::Image(Image::IMAGE2DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage2DArray", Token::Image(Image::IIMAGE2DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage2DArray", Token::Image(Image::UIMAGE2DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("imageCubeArray", Token::Image(Image::IMAGECUBEARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimageCubeArray", Token::Image(Image::IIMAGECUBEARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimageCubeArray", Token::Image(Image::UIMAGECUBEARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image2DMS", Token::Image(Image::IMAGE2DMS));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage2DMS", Token::Image(Image::IIMAGE2DMS));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage2DMS", Token::Image(Image::UIMAGE2DMS));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image2DMSArray", Token::Image(Image::IMAGE2DMSARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage2DMSArray", Token::Image(Image::IIMAGE2DMSARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage2DMSArray", Token::Image(Image::UIMAGE2DMSARRAY));
|
||||
lexer.keywords.insert("struct", Token::STRUCT);
|
||||
keywords.insert("image1D", Token::Image(Image::IMAGE1D));
|
||||
keywords.insert("iimage1D", Token::Image(Image::IIMAGE1D));
|
||||
keywords.insert("uimage1D", Token::Image(Image::UIMAGE1D));
|
||||
keywords.insert("image2D", Token::Image(Image::IMAGE2D));
|
||||
keywords.insert("iimage2D", Token::Image(Image::IIMAGE2D));
|
||||
keywords.insert("uimage2D", Token::Image(Image::UIMAGE2D));
|
||||
keywords.insert("image3D", Token::Image(Image::IMAGE3D));
|
||||
keywords.insert("iimage3D", Token::Image(Image::IIMAGE3D));
|
||||
keywords.insert("uimage3D", Token::Image(Image::UIMAGE3D));
|
||||
keywords.insert("image2DRect", Token::Image(Image::IMAGE2DRECT));
|
||||
keywords.insert("iimage2DRect", Token::Image(Image::IIMAGE2DRECT));
|
||||
keywords.insert("uimage2DRect", Token::Image(Image::UIMAGE2DRECT));
|
||||
keywords.insert("imageCube", Token::Image(Image::IMAGECUBE));
|
||||
keywords.insert("iimageCube", Token::Image(Image::IIMAGECUBE));
|
||||
keywords.insert("uimageCube", Token::Image(Image::UIMAGECUBE));
|
||||
keywords.insert("imageBuffer", Token::Image(Image::IMAGEBUFFER));
|
||||
keywords.insert("iimageBuffer", Token::Image(Image::IIMAGEBUFFER));
|
||||
keywords.insert("uimageBuffer", Token::Image(Image::UIMAGEBUFFER));
|
||||
keywords.insert("image1DArray", Token::Image(Image::IMAGE1DARRAY));
|
||||
keywords.insert("iimage1DArray", Token::Image(Image::IIMAGE1DARRAY));
|
||||
keywords.insert("uimage1DArray", Token::Image(Image::UIMAGE1DARRAY));
|
||||
keywords.insert("image2DArray", Token::Image(Image::IMAGE2DARRAY));
|
||||
keywords.insert("iimage2DArray", Token::Image(Image::IIMAGE2DARRAY));
|
||||
keywords.insert("uimage2DArray", Token::Image(Image::UIMAGE2DARRAY));
|
||||
keywords.insert("imageCubeArray", Token::Image(Image::IMAGECUBEARRAY));
|
||||
keywords.insert("iimageCubeArray", Token::Image(Image::IIMAGECUBEARRAY));
|
||||
keywords.insert("uimageCubeArray", Token::Image(Image::UIMAGECUBEARRAY));
|
||||
keywords.insert("image2DMS", Token::Image(Image::IMAGE2DMS));
|
||||
keywords.insert("iimage2DMS", Token::Image(Image::IIMAGE2DMS));
|
||||
keywords.insert("uimage2DMS", Token::Image(Image::UIMAGE2DMS));
|
||||
keywords.insert("image2DMSArray", Token::Image(Image::IMAGE2DMSARRAY));
|
||||
keywords.insert("iimage2DMSArray", Token::Image(Image::IIMAGE2DMSARRAY));
|
||||
keywords.insert("uimage2DMSArray", Token::Image(Image::UIMAGE2DMSARRAY));
|
||||
keywords.insert("struct", Token::STRUCT);
|
||||
keywords
|
||||
}
|
||||
|
||||
impl crate::lex::lexer::Lexer {
|
||||
|
|
31
src/lib.rs
31
src/lib.rs
|
@ -91,6 +91,37 @@ mod tests {
|
|||
.into()
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
fn float_literal() {
|
||||
let source_code = "123.4504";
|
||||
let mut lexer = Lexer::new(source_code);
|
||||
let tokens = lexer.get_tokens();
|
||||
assert_eq!(
|
||||
tokens,
|
||||
vec![Token::FLOATCONSTANT("123.4504".to_string()), Token::EOF].into()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn float_shorthand() {
|
||||
let source_code = ".4504";
|
||||
let mut lexer = Lexer::new(source_code);
|
||||
let tokens = lexer.get_tokens();
|
||||
assert_eq!(
|
||||
tokens,
|
||||
vec![Token::FLOATCONSTANT(".4504".to_string()), Token::EOF].into()
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
fn integer_literal() {
|
||||
let source_code = "12345";
|
||||
let mut lexer = Lexer::new(source_code);
|
||||
let tokens = lexer.get_tokens();
|
||||
assert_eq!(
|
||||
tokens,
|
||||
vec![Token::INTCONSTANT("12345".to_string()), Token::EOF].into()
|
||||
);
|
||||
}
|
||||
}
|
||||
// #[cfg(test)]
|
||||
// mod tests {
|
||||
|
|
|
@ -52,8 +52,8 @@ pub enum Token {
|
|||
SUBROUTINE,
|
||||
IDENTIFIER(String),
|
||||
TYPE_NAME,
|
||||
FLOATCONSTANT(f32),
|
||||
INTCONSTANT(i32),
|
||||
FLOATCONSTANT(String),
|
||||
INTCONSTANT(String),
|
||||
UINTCONSTANT(u32),
|
||||
BOOLCONSTANT(bool),
|
||||
DOUBLECONSTANT(f64),
|
||||
|
|
Loading…
Reference in a new issue