Working Keyword matching with HashMap
This commit is contained in:
parent
f46751dccd
commit
1ab6a76259
|
@ -14,7 +14,16 @@ impl Lexer {
|
|||
}
|
||||
|
||||
pub fn consume_identifier_or_keyword(&mut self) -> crate::tokens::Token {
|
||||
todo!()
|
||||
let mut word = String::new();
|
||||
while let Some(c) = self.current_char {
|
||||
if c.is_alphanumeric() || c == '_' {
|
||||
word.push(c);
|
||||
self.advance();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
self.is_keyword(&word).unwrap()
|
||||
}
|
||||
|
||||
pub fn consume_number(&mut self) -> crate::tokens::Token {
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
use crate::tokens::Token;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::util::populate_tokens;
|
||||
|
||||
pub struct Lexer {
|
||||
/// GLSL source
|
||||
pub input: Vec<char>,
|
||||
|
@ -8,6 +11,7 @@ pub struct Lexer {
|
|||
pub position: usize,
|
||||
/// [`char`] under position
|
||||
pub current_char: Option<char>,
|
||||
pub keywords: HashMap<&'static str, Token>,
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
|
@ -23,7 +27,10 @@ impl Lexer {
|
|||
input: input.chars().collect(),
|
||||
position: 0,
|
||||
current_char: None,
|
||||
keywords: HashMap::new(),
|
||||
};
|
||||
populate_tokens(&mut lexer);
|
||||
dbg!("{}", &lexer.keywords);
|
||||
lexer.current_char = if lexer.position < lexer.input.len() {
|
||||
Some(lexer.input[lexer.position])
|
||||
} else {
|
||||
|
@ -99,7 +106,7 @@ impl Lexer {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
tokens.push(Token::EOF);
|
||||
tokens.into()
|
||||
}
|
||||
}
|
||||
|
|
540
src/lex/util.rs
540
src/lex/util.rs
|
@ -1,175 +1,383 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use crate::tokens::{Image, Material, Sampler, Token, Vector};
|
||||
|
||||
pub fn populate_tokens(lexer: &mut crate::lex::lexer::Lexer) {
|
||||
lexer.keywords.insert("const", Token::CONST);
|
||||
lexer.keywords.insert("uniform", Token::UNIFORM);
|
||||
lexer.keywords.insert("buffer", Token::BUFFER);
|
||||
lexer.keywords.insert("shared", Token::SHARED);
|
||||
lexer.keywords.insert("coherent", Token::COHERENT);
|
||||
lexer.keywords.insert("volatile", Token::VOLATILE);
|
||||
lexer.keywords.insert("restrict", Token::RESTRICT);
|
||||
lexer.keywords.insert("readonly", Token::READONLY);
|
||||
lexer.keywords.insert("writeonly", Token::WRITEONLY);
|
||||
lexer.keywords.insert("atomic_uint", Token::ATOMIC_UINT);
|
||||
lexer.keywords.insert("layout", Token::LAYOUT);
|
||||
lexer.keywords.insert("centroid", Token::CENTROID);
|
||||
lexer.keywords.insert("flat", Token::FLAT);
|
||||
lexer.keywords.insert("smooth", Token::SMOOTH);
|
||||
lexer.keywords.insert("noperspective", Token::NOPERSPECTIVE);
|
||||
lexer.keywords.insert("patch", Token::PATCH);
|
||||
lexer.keywords.insert("sample", Token::SAMPLE);
|
||||
lexer.keywords.insert("break", Token::BREAK);
|
||||
lexer.keywords.insert("continue", Token::CONTINUE);
|
||||
lexer.keywords.insert("do", Token::DO);
|
||||
lexer.keywords.insert("for", Token::FOR);
|
||||
lexer.keywords.insert("while", Token::WHILE);
|
||||
lexer.keywords.insert("switch", Token::SWITCH);
|
||||
lexer.keywords.insert("case", Token::CASE);
|
||||
lexer.keywords.insert("default", Token::DEFAULT);
|
||||
lexer.keywords.insert("if", Token::IF);
|
||||
lexer.keywords.insert("else", Token::ELSE);
|
||||
lexer.keywords.insert("subroutine", Token::SUBROUTINE);
|
||||
lexer.keywords.insert("in", Token::IN);
|
||||
lexer.keywords.insert("out", Token::OUT);
|
||||
lexer.keywords.insert("inout", Token::INOUT);
|
||||
lexer.keywords.insert("float", Token::FLOAT);
|
||||
lexer.keywords.insert("double", Token::DOUBLE);
|
||||
lexer.keywords.insert("int", Token::INT);
|
||||
lexer.keywords.insert("void", Token::VOID);
|
||||
lexer.keywords.insert("bool", Token::BOOL);
|
||||
lexer.keywords.insert("true", Token::BOOLCONSTANT(true));
|
||||
lexer.keywords.insert("false", Token::BOOLCONSTANT(false));
|
||||
lexer.keywords.insert("invariant", Token::INVARIANT);
|
||||
lexer.keywords.insert("precise", Token::PRECISE);
|
||||
lexer.keywords.insert("discard", Token::DISCARD);
|
||||
lexer.keywords.insert("return", Token::RETURN);
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat2", Token::Material(Material::MAT2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat3", Token::Material(Material::MAT3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat4", Token::Material(Material::MAT4));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat2", Token::Material(Material::DMAT2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat3", Token::Material(Material::DMAT3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat4", Token::Material(Material::DMAT4));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat2x2", Token::Material(Material::MAT2X2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat2x3", Token::Material(Material::MAT2X3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat2x4", Token::Material(Material::MAT2X4));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat2x2", Token::Material(Material::DMAT2X2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat2x3", Token::Material(Material::DMAT2X3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat2x4", Token::Material(Material::DMAT2X4));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat3x2", Token::Material(Material::MAT3X2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat3x3", Token::Material(Material::MAT3X3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat3x4", Token::Material(Material::MAT3X4));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat3x2", Token::Material(Material::DMAT3X2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat3x3", Token::Material(Material::DMAT3X3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat3x4", Token::Material(Material::DMAT3X4));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat4x2", Token::Material(Material::MAT4X2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat4x3", Token::Material(Material::MAT4X3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("mat4x4", Token::Material(Material::MAT4X4));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat4x2", Token::Material(Material::DMAT4X2));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat4x3", Token::Material(Material::DMAT4X3));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("dmat4x4", Token::Material(Material::DMAT4X4));
|
||||
lexer.keywords.insert("vec2", Token::Vector(Vector::VEC2));
|
||||
lexer.keywords.insert("vec3", Token::Vector(Vector::VEC3));
|
||||
lexer.keywords.insert("vec4", Token::Vector(Vector::VEC4));
|
||||
lexer.keywords.insert("ivec2", Token::Vector(Vector::IVEC2));
|
||||
lexer.keywords.insert("ivec3", Token::Vector(Vector::IVEC3));
|
||||
lexer.keywords.insert("ivec4", Token::Vector(Vector::IVEC4));
|
||||
lexer.keywords.insert("bvec2", Token::Vector(Vector::BVEC2));
|
||||
lexer.keywords.insert("bvec3", Token::Vector(Vector::BVEC3));
|
||||
lexer.keywords.insert("bvec4", Token::Vector(Vector::BVEC4));
|
||||
lexer.keywords.insert("dvec2", Token::Vector(Vector::DVEC2));
|
||||
lexer.keywords.insert("dvec3", Token::Vector(Vector::DVEC3));
|
||||
lexer.keywords.insert("dvec4", Token::Vector(Vector::DVEC4));
|
||||
lexer.keywords.insert("uint", Token::UINT);
|
||||
lexer.keywords.insert("uvec2", Token::Vector(Vector::UVEC2));
|
||||
lexer.keywords.insert("uvec3", Token::Vector(Vector::UVEC3));
|
||||
lexer.keywords.insert("uvec4", Token::Vector(Vector::UVEC4));
|
||||
lexer.keywords.insert("lowp", Token::LOW_PRECISION);
|
||||
lexer.keywords.insert("mediump", Token::MEDIUM_PRECISION);
|
||||
lexer.keywords.insert("highp", Token::HIGH_PRECISION);
|
||||
lexer.keywords.insert("precision", Token::PRECISION);
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler1D", Token::Sampler(Sampler::SAMPLER1D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler2D", Token::Sampler(Sampler::SAMPLER2D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler3D", Token::Sampler(Sampler::SAMPLER3D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("samplerCube", Token::Sampler(Sampler::SAMPLERCUBE));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler1DShadow", Token::Sampler(Sampler::SAMPLER1DSHADOW));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler2DShadow", Token::Sampler(Sampler::SAMPLER2DSHADOW));
|
||||
lexer.keywords.insert(
|
||||
"samplerCubeShadow",
|
||||
Token::Sampler(Sampler::SAMPLERCUBESHADOW),
|
||||
);
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler1DArray", Token::Sampler(Sampler::SAMPLER1DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler2DArray", Token::Sampler(Sampler::SAMPLER2DARRAY));
|
||||
lexer.keywords.insert(
|
||||
"sampler1DArrayShadow",
|
||||
Token::Sampler(Sampler::SAMPLER1DARRAYSHADOW),
|
||||
);
|
||||
lexer.keywords.insert(
|
||||
"sampler2DArrayShadow",
|
||||
Token::Sampler(Sampler::SAMPLER2DARRAYSHADOW),
|
||||
);
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isampler1D", Token::Sampler(Sampler::ISAMPLER1D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isampler2D", Token::Sampler(Sampler::ISAMPLER2D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isampler3D", Token::Sampler(Sampler::ISAMPLER3D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isamplerCube", Token::Sampler(Sampler::ISAMPLERCUBE));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isampler1DArray", Token::Sampler(Sampler::ISAMPLER1DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isampler2DArray", Token::Sampler(Sampler::ISAMPLER2DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usampler1D", Token::Sampler(Sampler::USAMPLER1D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usampler2D", Token::Sampler(Sampler::USAMPLER2D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usampler3D", Token::Sampler(Sampler::USAMPLER3D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usamplerCube", Token::Sampler(Sampler::USAMPLERCUBE));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usampler1DArray", Token::Sampler(Sampler::USAMPLER1DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usampler2DArray", Token::Sampler(Sampler::USAMPLER2DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler2DRect", Token::Sampler(Sampler::SAMPLER2DRECT));
|
||||
lexer.keywords.insert(
|
||||
"sampler2DRectShadow",
|
||||
Token::Sampler(Sampler::SAMPLER2DRECTSHADOW),
|
||||
);
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isampler2DRect", Token::Sampler(Sampler::ISAMPLER2DRECT));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usampler2DRect", Token::Sampler(Sampler::USAMPLER2DRECT));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("samplerBuffer", Token::Sampler(Sampler::SAMPLERBUFFER));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isamplerBuffer", Token::Sampler(Sampler::ISAMPLERBUFFER));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usamplerBuffer", Token::Sampler(Sampler::USAMPLERBUFFER));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("sampler2DMS", Token::Sampler(Sampler::SAMPLER2DMS));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("isampler2DMS", Token::Sampler(Sampler::ISAMPLER2DMS));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("usampler2DMS", Token::Sampler(Sampler::USAMPLER2DMS));
|
||||
lexer.keywords.insert(
|
||||
"sampler2DMSArray",
|
||||
Token::Sampler(Sampler::SAMPLER2DMSARRAY),
|
||||
);
|
||||
lexer.keywords.insert(
|
||||
"isampler2DMSArray",
|
||||
Token::Sampler(Sampler::ISAMPLER2DMSARRAY),
|
||||
);
|
||||
lexer.keywords.insert(
|
||||
"usampler2DMSArray",
|
||||
Token::Sampler(Sampler::USAMPLER2DMSARRAY),
|
||||
);
|
||||
lexer.keywords.insert(
|
||||
"samplerCubeArray",
|
||||
Token::Sampler(Sampler::SAMPLERCUBEARRAY),
|
||||
);
|
||||
lexer.keywords.insert(
|
||||
"samplerCubeArrayShadow",
|
||||
Token::Sampler(Sampler::SAMPLERCUBEARRAYSHADOW),
|
||||
);
|
||||
lexer.keywords.insert(
|
||||
"isamplerCubeArray",
|
||||
Token::Sampler(Sampler::ISAMPLERCUBEARRAY),
|
||||
);
|
||||
lexer.keywords.insert(
|
||||
"usamplerCubeArray",
|
||||
Token::Sampler(Sampler::USAMPLERCUBEARRAY),
|
||||
);
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image1D", Token::Image(Image::IMAGE1D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage1D", Token::Image(Image::IIMAGE1D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage1D", Token::Image(Image::UIMAGE1D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image2D", Token::Image(Image::IMAGE2D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage2D", Token::Image(Image::IIMAGE2D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage2D", Token::Image(Image::UIMAGE2D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image3D", Token::Image(Image::IMAGE3D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage3D", Token::Image(Image::IIMAGE3D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage3D", Token::Image(Image::UIMAGE3D));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image2DRect", Token::Image(Image::IMAGE2DRECT));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage2DRect", Token::Image(Image::IIMAGE2DRECT));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage2DRect", Token::Image(Image::UIMAGE2DRECT));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("imageCube", Token::Image(Image::IMAGECUBE));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimageCube", Token::Image(Image::IIMAGECUBE));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimageCube", Token::Image(Image::UIMAGECUBE));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("imageBuffer", Token::Image(Image::IMAGEBUFFER));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimageBuffer", Token::Image(Image::IIMAGEBUFFER));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimageBuffer", Token::Image(Image::UIMAGEBUFFER));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image1DArray", Token::Image(Image::IMAGE1DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage1DArray", Token::Image(Image::IIMAGE1DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage1DArray", Token::Image(Image::UIMAGE1DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image2DArray", Token::Image(Image::IMAGE2DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage2DArray", Token::Image(Image::IIMAGE2DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage2DArray", Token::Image(Image::UIMAGE2DARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("imageCubeArray", Token::Image(Image::IMAGECUBEARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimageCubeArray", Token::Image(Image::IIMAGECUBEARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimageCubeArray", Token::Image(Image::UIMAGECUBEARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image2DMS", Token::Image(Image::IMAGE2DMS));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage2DMS", Token::Image(Image::IIMAGE2DMS));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage2DMS", Token::Image(Image::UIMAGE2DMS));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("image2DMSArray", Token::Image(Image::IMAGE2DMSARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("iimage2DMSArray", Token::Image(Image::IIMAGE2DMSARRAY));
|
||||
lexer
|
||||
.keywords
|
||||
.insert("uimage2DMSArray", Token::Image(Image::UIMAGE2DMSARRAY));
|
||||
lexer.keywords.insert("struct", Token::STRUCT);
|
||||
}
|
||||
|
||||
impl crate::lex::lexer::Lexer {
|
||||
pub fn is_keyword(word: &str) -> Option<Token> {
|
||||
let token: Option<Token> = match word {
|
||||
"const" => Some(Token::CONST),
|
||||
"uniform" => Some(Token::UNIFORM),
|
||||
"buffer" => Some(Token::BUFFER),
|
||||
"shared" => Some(Token::SHARED),
|
||||
"coherent" => Some(Token::COHERENT),
|
||||
"volatile" => Some(Token::VOLATILE),
|
||||
"restrict" => Some(Token::RESTRICT),
|
||||
"readonly" => Some(Token::READONLY),
|
||||
"writeonly" => Some(Token::WRITEONLY),
|
||||
"atomic_uint" => Some(Token::ATOMIC_UINT),
|
||||
"layout" => Some(Token::LAYOUT),
|
||||
"centroid" => Some(Token::CENTROID),
|
||||
"flat" => Some(Token::FLAT),
|
||||
"smooth" => Some(Token::SMOOTH),
|
||||
"noperspective" => Some(Token::NOPERSPECTIVE),
|
||||
"patch" => Some(Token::PATCH),
|
||||
"sample" => Some(Token::SAMPLE),
|
||||
"break" => Some(Token::BREAK),
|
||||
"continue" => Some(Token::CONTINUE),
|
||||
"do" => Some(Token::DO),
|
||||
"for" => Some(Token::FOR),
|
||||
"while" => Some(Token::WHILE),
|
||||
"switch" => Some(Token::SWITCH),
|
||||
"case" => Some(Token::CASE),
|
||||
"default" => Some(Token::DEFAULT),
|
||||
"if" => Some(Token::IF),
|
||||
"else" => Some(Token::ELSE),
|
||||
"subroutine" => Some(Token::SUBROUTINE),
|
||||
"in" => Some(Token::IN),
|
||||
"out" => Some(Token::OUT),
|
||||
"inout" => Some(Token::INOUT),
|
||||
"float" => Some(Token::FLOAT),
|
||||
"double" => Some(Token::DOUBLE),
|
||||
"int" => Some(Token::INT),
|
||||
"void" => Some(Token::VOID),
|
||||
"bool" => Some(Token::BOOL),
|
||||
"true" => Some(Token::BOOLCONSTANT(true)),
|
||||
"false" => Some(Token::BOOLCONSTANT(false)),
|
||||
"invariant" => Some(Token::INVARIANT),
|
||||
"precise" => Some(Token::PRECISE),
|
||||
"discard" => Some(Token::DISCARD),
|
||||
"return" => Some(Token::RETURN),
|
||||
"mat2" => Some(Token::Material(Material::MAT2)),
|
||||
"mat3" => Some(Token::Material(Material::MAT3)),
|
||||
"mat4" => Some(Token::Material(Material::MAT4)),
|
||||
"dmat2" => Some(Token::Material(Material::DMAT2)),
|
||||
"dmat3" => Some(Token::Material(Material::DMAT3)),
|
||||
"dmat4" => Some(Token::Material(Material::DMAT4)),
|
||||
"mat2x2" => Some(Token::Material(Material::MAT2X2)),
|
||||
"mat2x3" => Some(Token::Material(Material::MAT2X3)),
|
||||
"mat2x4" => Some(Token::Material(Material::MAT2X4)),
|
||||
"dmat2x2" => Some(Token::Material(Material::DMAT2X2)),
|
||||
"dmat2x3" => Some(Token::Material(Material::DMAT2X3)),
|
||||
"dmat2x4" => Some(Token::Material(Material::DMAT2X4)),
|
||||
"mat3x2" => Some(Token::Material(Material::MAT3X2)),
|
||||
"mat3x3" => Some(Token::Material(Material::MAT3X3)),
|
||||
"mat3x4" => Some(Token::Material(Material::MAT3X4)),
|
||||
"dmat3x2" => Some(Token::Material(Material::DMAT3X2)),
|
||||
"dmat3x3" => Some(Token::Material(Material::DMAT3X3)),
|
||||
"dmat3x4" => Some(Token::Material(Material::DMAT3X4)),
|
||||
"mat4x2" => Some(Token::Material(Material::MAT4X2)),
|
||||
"mat4x3" => Some(Token::Material(Material::MAT4X3)),
|
||||
"mat4x4" => Some(Token::Material(Material::MAT4X4)),
|
||||
"dmat4x2" => Some(Token::Material(Material::DMAT4X2)),
|
||||
"dmat4x3" => Some(Token::Material(Material::DMAT4X3)),
|
||||
"dmat4x4" => Some(Token::Material(Material::DMAT4X4)),
|
||||
"vec2" => Some(Token::Vector(Vector::VEC2)),
|
||||
"vec3" => Some(Token::Vector(Vector::VEC3)),
|
||||
"vec4" => Some(Token::Vector(Vector::VEC4)),
|
||||
"ivec2" => Some(Token::Vector(Vector::IVEC2)),
|
||||
"ivec3" => Some(Token::Vector(Vector::IVEC3)),
|
||||
"ivec4" => Some(Token::Vector(Vector::IVEC4)),
|
||||
"bvec2" => Some(Token::Vector(Vector::BVEC2)),
|
||||
"bvec3" => Some(Token::Vector(Vector::BVEC3)),
|
||||
"bvec4" => Some(Token::Vector(Vector::BVEC4)),
|
||||
"dvec2" => Some(Token::Vector(Vector::DVEC2)),
|
||||
"dvec3" => Some(Token::Vector(Vector::DVEC3)),
|
||||
"dvec4" => Some(Token::Vector(Vector::DVEC4)),
|
||||
"uint" => Some(Token::UINT),
|
||||
"uvec2" => Some(Token::Vector(Vector::UVEC2)),
|
||||
"uvec3" => Some(Token::Vector(Vector::UVEC3)),
|
||||
"uvec4" => Some(Token::Vector(Vector::UVEC4)),
|
||||
"lowp" => Some(Token::LOW_PRECISION),
|
||||
"mediump" => Some(Token::MEDIUM_PRECISION),
|
||||
"highp" => Some(Token::HIGH_PRECISION),
|
||||
"precision" => Some(Token::PRECISION),
|
||||
"sampler1D" => Some(Token::Sampler(Sampler::SAMPLER1D)),
|
||||
"sampler2D" => Some(Token::Sampler(Sampler::SAMPLER2D)),
|
||||
"sampler3D" => Some(Token::Sampler(Sampler::SAMPLER3D)),
|
||||
"samplerCube" => Some(Token::Sampler(Sampler::SAMPLERCUBE)),
|
||||
"sampler1DShadow" => Some(Token::Sampler(Sampler::SAMPLER1DSHADOW)),
|
||||
"sampler2DShadow" => Some(Token::Sampler(Sampler::SAMPLER2DSHADOW)),
|
||||
"samplerCubeShadow" => Some(Token::Sampler(Sampler::SAMPLERCUBESHADOW)),
|
||||
"sampler1DArray" => Some(Token::Sampler(Sampler::SAMPLER1DARRAY)),
|
||||
"sampler2DArray" => Some(Token::Sampler(Sampler::SAMPLER2DARRAY)),
|
||||
"sampler1DArrayShadow" => Some(Token::Sampler(Sampler::SAMPLER1DARRAYSHADOW)),
|
||||
"sampler2DArrayShadow" => Some(Token::Sampler(Sampler::SAMPLER2DARRAYSHADOW)),
|
||||
"isampler1D" => Some(Token::Sampler(Sampler::ISAMPLER1D)),
|
||||
"isampler2D" => Some(Token::Sampler(Sampler::ISAMPLER2D)),
|
||||
"isampler3D" => Some(Token::Sampler(Sampler::ISAMPLER3D)),
|
||||
"isamplerCube" => Some(Token::Sampler(Sampler::ISAMPLERCUBE)),
|
||||
"isampler1DArray" => Some(Token::Sampler(Sampler::ISAMPLER1DARRAY)),
|
||||
"isampler2DArray" => Some(Token::Sampler(Sampler::ISAMPLER2DARRAY)),
|
||||
"usampler1D" => Some(Token::Sampler(Sampler::USAMPLER1D)),
|
||||
"usampler2D" => Some(Token::Sampler(Sampler::USAMPLER2D)),
|
||||
"usampler3D" => Some(Token::Sampler(Sampler::USAMPLER3D)),
|
||||
"usamplerCube" => Some(Token::Sampler(Sampler::USAMPLERCUBE)),
|
||||
"usampler1DArray" => Some(Token::Sampler(Sampler::USAMPLER1DARRAY)),
|
||||
"usampler2DArray" => Some(Token::Sampler(Sampler::USAMPLER2DARRAY)),
|
||||
"sampler2DRect" => Some(Token::Sampler(Sampler::SAMPLER2DRECT)),
|
||||
"sampler2DRectShadow" => Some(Token::Sampler(Sampler::SAMPLER2DRECTSHADOW)),
|
||||
"isampler2DRect" => Some(Token::Sampler(Sampler::ISAMPLER2DRECT)),
|
||||
"usampler2DRect" => Some(Token::Sampler(Sampler::USAMPLER2DRECT)),
|
||||
"samplerBuffer" => Some(Token::Sampler(Sampler::SAMPLERBUFFER)),
|
||||
"isamplerBuffer" => Some(Token::Sampler(Sampler::ISAMPLERBUFFER)),
|
||||
"usamplerBuffer" => Some(Token::Sampler(Sampler::USAMPLERBUFFER)),
|
||||
"sampler2DMS" => Some(Token::Sampler(Sampler::SAMPLER2DMS)),
|
||||
"isampler2DMS" => Some(Token::Sampler(Sampler::ISAMPLER2DMS)),
|
||||
"usampler2DMS" => Some(Token::Sampler(Sampler::USAMPLER2DMS)),
|
||||
"sampler2DMSArray" => Some(Token::Sampler(Sampler::SAMPLER2DMSARRAY)),
|
||||
"isampler2DMSArray" => Some(Token::Sampler(Sampler::ISAMPLER2DMSARRAY)),
|
||||
"usampler2DMSArray" => Some(Token::Sampler(Sampler::USAMPLER2DMSARRAY)),
|
||||
"samplerCubeArray" => Some(Token::Sampler(Sampler::SAMPLERCUBEARRAY)),
|
||||
"samplerCubeArrayShadow" => Some(Token::Sampler(Sampler::SAMPLERCUBEARRAYSHADOW)),
|
||||
"isamplerCubeArray" => Some(Token::Sampler(Sampler::ISAMPLERCUBEARRAY)),
|
||||
"usamplerCubeArray" => Some(Token::Sampler(Sampler::USAMPLERCUBEARRAY)),
|
||||
"image1D" => Some(Token::Image(Image::IMAGE1D)),
|
||||
"iimage1D" => Some(Token::Image(Image::IIMAGE1D)),
|
||||
"uimage1D" => Some(Token::Image(Image::UIMAGE1D)),
|
||||
"image2D" => Some(Token::Image(Image::IMAGE2D)),
|
||||
"iimage2D" => Some(Token::Image(Image::IIMAGE2D)),
|
||||
"uimage2D" => Some(Token::Image(Image::UIMAGE2D)),
|
||||
"image3D" => Some(Token::Image(Image::IMAGE3D)),
|
||||
"iimage3D" => Some(Token::Image(Image::IIMAGE3D)),
|
||||
"uimage3D" => Some(Token::Image(Image::UIMAGE3D)),
|
||||
"image2DRect" => Some(Token::Image(Image::IMAGE2DRECT)),
|
||||
"iimage2DRect" => Some(Token::Image(Image::IIMAGE2DRECT)),
|
||||
"uimage2DRect" => Some(Token::Image(Image::UIMAGE2DRECT)),
|
||||
"imageCube" => Some(Token::Image(Image::IMAGECUBE)),
|
||||
"iimageCube" => Some(Token::Image(Image::IIMAGECUBE)),
|
||||
"uimageCube" => Some(Token::Image(Image::UIMAGECUBE)),
|
||||
"imageBuffer" => Some(Token::Image(Image::IMAGEBUFFER)),
|
||||
"iimageBuffer" => Some(Token::Image(Image::IIMAGEBUFFER)),
|
||||
"uimageBuffer" => Some(Token::Image(Image::UIMAGEBUFFER)),
|
||||
"image1DArray" => Some(Token::Image(Image::IMAGE1DARRAY)),
|
||||
"iimage1DArray" => Some(Token::Image(Image::IIMAGE1DARRAY)),
|
||||
"uimage1DArray" => Some(Token::Image(Image::UIMAGE1DARRAY)),
|
||||
"image2DArray" => Some(Token::Image(Image::IMAGE2DARRAY)),
|
||||
"iimage2DArray" => Some(Token::Image(Image::IIMAGE2DARRAY)),
|
||||
"uimage2DArray" => Some(Token::Image(Image::UIMAGE2DARRAY)),
|
||||
"imageCubeArray" => Some(Token::Image(Image::IMAGECUBEARRAY)),
|
||||
"iimageCubeArray" => Some(Token::Image(Image::IIMAGECUBEARRAY)),
|
||||
"uimageCubeArray" => Some(Token::Image(Image::UIMAGECUBEARRAY)),
|
||||
"image2DMS" => Some(Token::Image(Image::IMAGE2DMS)),
|
||||
"iimage2DMS" => Some(Token::Image(Image::IIMAGE2DMS)),
|
||||
"uimage2DMS" => Some(Token::Image(Image::UIMAGE2DMS)),
|
||||
"image2DMSArray" => Some(Token::Image(Image::IMAGE2DMSARRAY)),
|
||||
"iimage2DMSArray" => Some(Token::Image(Image::IIMAGE2DMSARRAY)),
|
||||
"uimage2DMSArray" => Some(Token::Image(Image::UIMAGE2DMSARRAY)),
|
||||
"struct" => Some(Token::STRUCT),
|
||||
_ => None,
|
||||
};
|
||||
pub fn is_keyword(&mut self, word: &str) -> Option<Token> {
|
||||
let token = self.keywords.get(word);
|
||||
if let Some(token) = token {
|
||||
Some(token)
|
||||
Some(token.clone())
|
||||
} else {
|
||||
Some(Token::IDENTIFIER)
|
||||
// TODO: Check if word is an identifier
|
||||
Some(Token::IDENTIFIER(word.to_string()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
47
src/lib.rs
47
src/lib.rs
|
@ -33,9 +33,29 @@ mod tests {
|
|||
#[test]
|
||||
fn keyword() {
|
||||
let source = "image1D";
|
||||
let res = Lexer::is_keyword(source);
|
||||
let mut lexer = Lexer::new(source);
|
||||
let res = Lexer::is_keyword(&mut lexer, source);
|
||||
assert_eq!(Some(Token::Image(Image::IMAGE1D)), res)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn identifier() {
|
||||
let source_code = "variableName";
|
||||
let mut lexer = Lexer::new(source_code);
|
||||
let tokens = lexer.get_tokens();
|
||||
assert_eq!(
|
||||
tokens,
|
||||
vec![Token::IDENTIFIER("variableName".to_string()), Token::EOF].into()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_keyword() {
|
||||
let source_code = "uniform";
|
||||
let mut lexer = Lexer::new(source_code);
|
||||
let tokens = lexer.get_tokens();
|
||||
assert_eq!(tokens, vec![Token::UNIFORM, Token::EOF].into());
|
||||
}
|
||||
}
|
||||
// #[cfg(test)]
|
||||
// mod tests {
|
||||
|
@ -59,31 +79,6 @@ mod tests {
|
|||
// assert_eq!(tokens, vec![Token::Whitespace, Token::EOF].into());
|
||||
// }
|
||||
//
|
||||
// #[test]
|
||||
// fn identifier() {
|
||||
// init();
|
||||
// let source_code = "variableName";
|
||||
// let mut lexer = Lexer::new(source_code);
|
||||
// let tokens = lexer.get_tokens();
|
||||
// info!("[Identifier] Tokens: {:#?}", tokens);
|
||||
// assert_eq!(
|
||||
// tokens,
|
||||
// vec![Token::Identifier("variableName".to_string()), Token::EOF].into()
|
||||
// );
|
||||
// }
|
||||
//
|
||||
// #[test]
|
||||
// fn keyword() {
|
||||
// init();
|
||||
// let source_code = "uniform";
|
||||
// let mut lexer = Lexer::new(source_code);
|
||||
// let tokens = lexer.get_tokens();
|
||||
// info!("[Keyword] Tokens: {:#?}", tokens);
|
||||
// assert_eq!(
|
||||
// tokens,
|
||||
// vec![Token::Keyword("uniform".to_string()), Token::EOF].into()
|
||||
// );
|
||||
// }
|
||||
//
|
||||
// #[test]
|
||||
// fn integer_literal() {
|
||||
|
|
|
@ -48,7 +48,7 @@ pub enum Token {
|
|||
CASE,
|
||||
DEFAULT,
|
||||
SUBROUTINE,
|
||||
IDENTIFIER,
|
||||
IDENTIFIER(String),
|
||||
TYPE_NAME,
|
||||
FLOATCONSTANT(f32),
|
||||
INTCONSTANT(i32),
|
||||
|
|
Loading…
Reference in a new issue