Initial commit

This commit is contained in:
xqtc161 2024-07-10 19:06:08 +02:00
commit 85ea81a41d
8 changed files with 446 additions and 0 deletions

1
.gitignore vendored Normal file
View file

@ -0,0 +1 @@
/target

7
Cargo.lock generated Normal file
View file

@ -0,0 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "glsl-lexer"
version = "0.1.0"

6
Cargo.toml Normal file
View file

@ -0,0 +1,6 @@
[package]
name = "glsl-lexer"
version = "0.1.0"
edition = "2021"
[dependencies]

113
flake.lock Normal file
View file

@ -0,0 +1,113 @@
{
"nodes": {
"flake-parts": {
"inputs": {
"nixpkgs-lib": "nixpkgs-lib"
},
"locked": {
"lastModified": 1709336216,
"narHash": "sha256-Dt/wOWeW6Sqm11Yh+2+t0dfEWxoMxGBvv3JpIocFl9E=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "f7b3c975cf067e56e7cda6cb098ebe3fb4d74ca2",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1710806803,
"narHash": "sha256-qrxvLS888pNJFwJdK+hf1wpRCSQcqA6W5+Ox202NDa0=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "b06025f1533a1e07b6db3e75151caa155d1c7eb3",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-lib": {
"locked": {
"dir": "lib",
"lastModified": 1709237383,
"narHash": "sha256-cy6ArO4k5qTx+l5o+0mL9f5fa86tYUX3ozE1S+Txlds=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "1536926ef5621b09bba54035ae2bb6d806d72ac8",
"type": "github"
},
"original": {
"dir": "lib",
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1708475490,
"narHash": "sha256-g1v0TsWBQPX97ziznfJdWhgMyMGtoBFs102xSYO4syU=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "0e74ca98a74bc7270d28838369593635a5db3260",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"flake-parts": "flake-parts",
"nixpkgs": "nixpkgs",
"systems": "systems",
"treefmt-nix": "treefmt-nix"
}
},
"systems": {
"flake": false,
"locked": {
"lastModified": 1,
"narHash": "sha256-8wkkYGr1dPSnX9oVMX8D6dTOROXKOYpBTKfriA0sEBI=",
"path": "/nix/store/z4by2bx56bm456s77isfwvr59mqg9brg-source/flake.systems.nix",
"type": "path"
},
"original": {
"path": "/nix/store/z4by2bx56bm456s77isfwvr59mqg9brg-source/flake.systems.nix",
"type": "path"
}
},
"treefmt-nix": {
"inputs": {
"nixpkgs": "nixpkgs_2"
},
"locked": {
"lastModified": 1710781103,
"narHash": "sha256-nehQK/XTFxfa6rYKtbi8M1w+IU1v5twYhiyA4dg1vpg=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "7ee5aaac63c30d3c97a8c56efe89f3b2aa9ae564",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "treefmt-nix",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

79
flake.nix Normal file
View file

@ -0,0 +1,79 @@
# Nix flake for reproducible builds & development environments.
# TL;DR:
# either `curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | sh -s -- install`
# or https://github.com/DeterminateSystems/nix-installer
# and then `nix build` or `nix develop`
{
inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
flake-parts.url = "github:hercules-ci/flake-parts";
#systems.url = "github:nix-systems/default";
# Dev tools
treefmt-nix.url = "github:numtide/treefmt-nix";
};
inputs.systems.url = "./flake.systems.nix";
inputs.systems.flake = false;
outputs = inputs:
inputs.flake-parts.lib.mkFlake { inherit inputs; } {
systems = import inputs.systems;
imports = [
inputs.treefmt-nix.flakeModule
];
perSystem = { config, self', pkgs, lib, system, ... }:
let
cargoToml = builtins.fromTOML (builtins.readFile ./Cargo.toml);
nonRustDeps = [
pkgs.pkg-config
];
rust-toolchain = pkgs.symlinkJoin {
name = "rust-toolchain";
paths = [ pkgs.rustc pkgs.cargo pkgs.cargo-watch pkgs.rust-analyzer pkgs.rustPlatform.rustcSrc ];
};
in
{
# Rust package
packages.default = pkgs.rustPlatform.buildRustPackage {
inherit (cargoToml.package) name version;
nativeBuildInputs = nonRustDeps;
buildInputs = nonRustDeps;
src = ./.;
cargoLock.lockFile = ./Cargo.lock;
};
# Rust dev environment
devShells.default = pkgs.mkShell {
inputsFrom = [
config.treefmt.build.devShell
];
shellHook = ''
# For rust-analyzer 'hover' tooltips to work.
export RUST_SRC_PATH=${pkgs.rustPlatform.rustLibSrc}
echo $RUST_SRC_PATH
echo
echo "Run 'just <recipe>' to get started"
just
'';
buildInputs = nonRustDeps;
nativeBuildInputs = with pkgs; [
just
rust-toolchain
];
RUST_BACKTRACE = 1;
};
# Add your auto-formatters here.
# cf. https://numtide.github.io/treefmt/
treefmt.config = {
projectRootFile = "flake.nix";
programs = {
nixpkgs-fmt.enable = true;
rustfmt.enable = true;
};
};
};
};
}

4
flake.systems.nix Normal file
View file

@ -0,0 +1,4 @@
[
"x86_64-linux"
"aarch64-linux"
]

14
justfile Normal file
View file

@ -0,0 +1,14 @@
default:
@just --list
# Auto-format the source tree
fmt:
treefmt
# Run 'cargo run' on the project
run *ARGS:
cargo run {{ARGS}}
# Run 'cargo watch' to run the project (auto-recompiles)
watch *ARGS:
cargo watch -x "run -- {{ARGS}}"

222
src/lib.rs Normal file
View file

@ -0,0 +1,222 @@
// WIP THAT SHIT STILL WONKY AF
#[derive(Debug, PartialEq)]
pub enum Token {
Identifier(String),
Keyword(String),
IntegerLiteral(i64),
FloatLiteral(f64),
Operator(String),
Symbol(char),
Whitespace,
Comment(String),
Unknown(char),
EndOfFile,
}
pub struct Lexer {
input: Vec<char>,
position: usize,
current_char: Option<char>,
}
impl Lexer {
pub fn new(input: &str) -> Self {
let mut lexer = Lexer {
input: input.chars().collect(),
position: 0,
current_char: None,
};
lexer.current_char = if lexer.position < lexer.input.len() {
Some(lexer.input[lexer.position])
} else {
None
};
lexer
}
fn advance(&mut self) {
self.position += 1;
self.current_char = if self.position < self.input.len() {
Some(self.input[self.position])
} else {
None
};
}
pub fn get_tokens(&mut self) -> Vec<Token> {
let mut tokens = Vec::new();
while let Some(c) = self.current_char {
if c.is_whitespace() {
self.consume_whitespace();
tokens.push(Token::Whitespace);
} else if c.is_alphabetic() || c == '_' {
tokens.push(self.consume_identifier_or_keyword());
} else if c.is_digit(10) {
tokens.push(self.consume_number());
} else if c == '/' && self.peek() == Some('/') {
tokens.push(self.consume_comment());
} else {
tokens.push(self.consume_symbol());
}
}
tokens.push(Token::EndOfFile);
tokens
}
fn peek(&self) -> Option<char> {
if self.position + 1 < self.input.len() {
Some(self.input[self.position + 1])
} else {
None
}
}
}
impl Lexer {
fn consume_whitespace(&mut self) {
while let Some(c) = self.current_char {
if !c.is_whitespace() {
break;
}
self.advance();
}
}
fn consume_identifier_or_keyword(&mut self) -> Token {
let mut identifier = String::new();
while let Some(c) = self.current_char {
if c.is_alphanumeric() || c == '_' {
identifier.push(c);
self.advance();
} else {
break;
}
}
if is_keyword(&identifier) {
Token::Keyword(identifier)
} else {
Token::Identifier(identifier)
}
}
fn consume_number(&mut self) -> Token {
let mut number = String::new();
let mut is_float = false;
while let Some(c) = self.current_char {
if c.is_digit(10) {
number.push(c);
self.advance();
} else if c == '.' {
number.push(c);
is_float = true;
self.advance();
} else {
break;
}
}
if is_float {
Token::FloatLiteral(number.parse().unwrap())
} else {
Token::IntegerLiteral(number.parse().unwrap())
}
}
fn consume_comment(&mut self) -> Token {
let mut comment = String::new();
while let Some(c) = self.current_char {
if c == '\n' {
break;
}
comment.push(c);
self.advance();
}
Token::Comment(comment)
}
fn consume_symbol(&mut self) -> Token {
let symbol = self.current_char.unwrap();
self.advance();
Token::Symbol(symbol)
}
}
fn is_keyword(word: &str) -> bool {
matches!(
word,
"void"
| "int"
| "float"
| "bool"
| "if"
| "else"
| "for"
| "while"
| "return"
| "struct"
| "uniform"
| "varying"
)
}
impl Lexer {
fn error(&self, message: &str) -> ! {
panic!("Lexer error at position {}: {}", self.position, message);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_whitespace() {
let source_code = " \t\n";
let mut lexer = Lexer::new(source_code);
let tokens = lexer.get_tokens();
assert_eq!(tokens, vec![Token::Whitespace, Token::EndOfFile]);
}
#[test]
fn test_identifier() {
let source_code = "variableName";
let mut lexer = Lexer::new(source_code);
let tokens = lexer.get_tokens();
assert_eq!(
tokens,
vec![
Token::Identifier("variableName".to_string()),
Token::EndOfFile
]
);
}
#[test]
fn test_keyword() {
let source_code = "uniform";
let mut lexer = Lexer::new(source_code);
let tokens = lexer.get_tokens();
assert_eq!(
tokens,
vec![Token::Keyword("uniform".to_string()), Token::EndOfFile]
);
}
#[test]
fn test_integer_literal() {
let source_code = "12345";
let mut lexer = Lexer::new(source_code);
let tokens = lexer.get_tokens();
assert_eq!(tokens, vec![Token::IntegerLiteral(12345), Token::EndOfFile]);
}
#[test]
fn test_float_literal() {
let source_code = "123.45";
let mut lexer = Lexer::new(source_code);
let tokens = lexer.get_tokens();
assert_eq!(tokens, vec![Token::FloatLiteral(123.45), Token::EndOfFile]);
}
}