MCLang, now with testing!

This commit is contained in:
Gvidas Juknevičius 2024-12-21 05:10:03 +02:00
parent 54b6df5862
commit debcf6ad6c
Signed by: MCorange
GPG Key ID: 12B1346D720B7FBB
26 changed files with 1210 additions and 41 deletions

7
Cargo.lock generated
View File

@ -63,6 +63,12 @@ version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
[[package]]
name = "camino"
version = "1.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3"
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.5.23" version = "4.5.23"
@ -132,6 +138,7 @@ name = "mclangc"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"camino",
"clap", "clap",
"lazy_static", "lazy_static",
"parse_int", "parse_int",

View File

@ -7,6 +7,7 @@ edition = "2021"
[dependencies] [dependencies]
anyhow = "1.0.94" anyhow = "1.0.94"
camino = "1.1.9"
clap = { version = "4.5.23", features = ["derive"] } clap = { version = "4.5.23", features = ["derive"] }
lazy_static = "1.5.0" lazy_static = "1.5.0"
parse_int = "0.6.0" parse_int = "0.6.0"

63
src/bin/test/logger.rs Normal file
View File

@ -0,0 +1,63 @@
#[repr(u8)]
#[derive(Debug, Default)]
pub enum Level {
Off = 0,
Error,
Warn,
#[default]
Info,
Help,
Debug
}
const C_RESET: &'static str = "\x1B[0m";
const C_ERROR: &'static str = "\x1B[1;31m";
const C_WARN: &'static str = "\x1B[1;33m";
const C_INFO: &'static str = "\x1B[1;32m";
const C_DEBUG: &'static str = "\x1B[1;35m";
const C_HELP: &'static str = "\x1B[1;36m";
pub fn _log(level: Level, str: &str) {
match level {
Level::Off => return,
Level::Error => println!("{C_ERROR}error{C_RESET}: {str}"),
Level::Warn => println!("{C_WARN}warn{C_RESET}: {str}"),
Level::Info => println!("{C_INFO}info{C_RESET}: {str}"),
Level::Help => println!("{C_HELP}help{C_RESET}: {str}"),
Level::Debug => println!("{C_DEBUG}debug{C_RESET}: {str}"),
}
}
#[macro_use]
pub mod log {
#[macro_export]
macro_rules! error {
($($arg:tt)*) => {
crate::logger::_log(crate::logger::Level::Error, &format!($($arg)*))
};
}
#[macro_export]
macro_rules! warn {
($($arg:tt)*) => {
crate::logger::_log(crate::logger::Level::Warn, &format!($($arg)*))
};
}
#[macro_export]
macro_rules! info {
($($arg:tt)*) => {
crate::logger::_log(crate::logger::Level::Info, &format!($($arg)*))
};
}
#[macro_export]
macro_rules! help {
($($arg:tt)*) => {
crate::logger::_log(crate::logger::Level::Help, &format!($($arg)*))
};
}
#[macro_export]
macro_rules! debug {
($($arg:tt)*) => {
crate::logger::_log(crate::logger::Level::Debug, &format!($($arg)*))
};
}
}

View File

@ -1,8 +1,19 @@
use std::{collections::HashMap, ffi::OsStr, io::Write, os::unix::ffi::OsStrExt, path::{Path, PathBuf}, process::ExitCode};
use anyhow::bail;
use camino::Utf8PathBuf;
use clap::Parser;
use mclangc;
#[macro_use]
mod logger;
/// Testing program for mclangc, taken inspiration from porth, which was made by tsoding :3 /// Testing program for mclangc, taken inspiration from porth, which was made by tsoding :3
#[derive(Debug, clap::Parser)] #[derive(Debug, clap::Parser)]
#[command(version, about, long_about = None)] #[command(version, about, long_about = None)]
struct CliArgs { struct CliArgs {
/// Path to the test folder
#[arg(long, short, default_value="./tests")]
path: Utf8PathBuf,
#[clap(subcommand)] #[clap(subcommand)]
cmd: CliCmd cmd: CliCmd
} }
@ -15,9 +26,196 @@ pub enum CliCmd {
Compile Compile
} }
struct CollectedFiles {
tokeniser: HashMap<String, (String, ExpTyp)>,
fn main() -> anyhow::Result<()> { parser: HashMap<String, (String, ExpTyp)>,
}
Ok(())
enum ExpTyp {
Text((PathBuf, String)),
Path(PathBuf),
}
impl ExpTyp {
pub fn path(&self) -> &Path {
match self {
Self::Text((p, _)) => p,
Self::Path(p) => p,
}
}
}
fn collect_files_for_single_type(path: &Path) -> anyhow::Result<HashMap<String, (String, ExpTyp)>> {
let mut files = HashMap::new();
for file in path.read_dir()? {
let file = file?;
if file.file_type()?.is_file() {
if file.path().extension() != Some(OsStr::from_bytes(b"mcl")) {
continue;
}
let src = std::fs::read_to_string(file.path())?;
let exp_p = file.path().with_extension("exp");
let name = file.path().with_extension("").file_name().unwrap().to_string_lossy().to_string();
if exp_p.exists() {
let exp = std::fs::read_to_string(&exp_p)?;
files.insert(name, (src, ExpTyp::Text((exp_p, exp))));
} else {
files.insert(name, (src, ExpTyp::Path(exp_p)));
}
}
}
Ok(files)
}
fn collect_all_files(path: &Path) -> anyhow::Result<CollectedFiles> {
let path = path.to_path_buf();
let mut tkn = path.clone();
tkn.push("tokeniser");
let mut parser = path.clone();
parser.push("parser");
Ok(CollectedFiles {
tokeniser: collect_files_for_single_type(&tkn)?,
parser: collect_files_for_single_type(&parser)?,
})
}
fn test_tokeniser(cf: &CollectedFiles, compile: bool) -> anyhow::Result<usize> {
let mut err_count = 0;
for (name, (src, expected)) in &cf.tokeniser {
let tokens = match mclangc::tokeniser::tokenise(src, &format!("tokeniser/{name}.mcl")) {
Ok(v) => v,
Err(e) => {
crate::error!("Test tokeniser/{name} had an error: {e}");
err_count += 1;
continue;
}
};
if compile {
let path = expected.path();
if path.exists() {
crate::info!("Test tokeniser/{name} already has a *.exp file, overwriting");
} else {
crate::info!("Test tokeniser/{name} doesnt a *.exp file, creating");
}
let mut fp = std::fs::File::options()
.write(true)
.truncate(true)
.create(true)
.open(path)?;
write!(fp, "{tokens:#?}")?;
} else {
let ExpTyp::Text((_, exp)) = expected else {
crate::warn!("Test tokeniser/{name} doesnt have a *.exp file, please make it by running 'test compile'");
continue;
};
if format!("{tokens:#?}") == *exp {
crate::info!("Test tokeniser/{name}: OK");
} else {
crate::error!("Test tokeniser/{name}: FAIL");
crate::debug!("Expected: {exp}");
crate::debug!("Got: {tokens:#?}");
err_count += 1;
}
}
}
Ok(err_count)
}
fn test_parser(cf: &CollectedFiles, compile: bool) -> anyhow::Result<usize> {
let mut err_count = 0;
for (name, (src, expected)) in &cf.parser {
let tokens = match mclangc::tokeniser::tokenise(src, &format!("parser/{name}.mcl")) {
Ok(v) => v,
Err(e) => {
crate::error!("Test parser/{name} had an error: {e}");
err_count += 1;
continue;
}
};
let ast = match mclangc::parser::parse_program(tokens) {
Ok(v) => v,
Err(e) => {
crate::error!("Test parser/{name} had an error: {e}");
err_count += 1;
continue;
}
};
if compile {
let path = expected.path();
if path.exists() {
crate::info!("Test parser/{name} already has a *.exp file, overwriting");
} else {
crate::info!("Test parser/{name} doesnt a *.exp file, creating");
}
let mut fp = std::fs::File::options()
.write(true)
.truncate(true)
.create(true)
.open(path)?;
write!(fp, "{ast:#?}")?;
} else {
let ExpTyp::Text((_, exp)) = expected else {
crate::warn!("Test parser/{name} doesnt have a *.exp file, please make it by running 'test compile'");
continue;
};
if format!("{ast:#?}") == *exp {
crate::info!("Test parser/{name}: OK");
} else {
crate::error!("Test parser/{name}: FAIL");
crate::debug!("Expected: {exp}");
crate::debug!("Got: {ast:#?}");
err_count += 1;
}
}
}
Ok(err_count)
}
fn test(cf: &CollectedFiles, compile: bool) -> anyhow::Result<usize> {
let mut err_count = test_tokeniser(&cf, compile)?;
err_count += test_parser(&cf, compile)?;
Ok(err_count)
}
fn main() -> ExitCode {
let cli = CliArgs::parse();
let cf = match collect_all_files(cli.path.as_std_path()) {
Ok(v) => v,
Err(e) => {
crate::error!("Failed to read directory '{}', do you have permission to read it?: {e}", cli.path);
return ExitCode::FAILURE;
}
};
let ec = match cli.cmd {
CliCmd::Run => {
match test(&cf, false) {
Ok(v) => v,
Err(e) => {
crate::error!("Had an error: {e}");
return ExitCode::FAILURE;
}
}
}
CliCmd::Compile => {
match test(&cf, true) {
Ok(v) => v,
Err(e) => {
crate::error!("Had an error: {e}");
return ExitCode::FAILURE;
}
}
}
};
if ec > 0 {
crate::error!("Testing FAILED, had {ec} errors");
return ExitCode::FAILURE;
} else {
crate::info!("Testing SUCCEEDED, had 0 errors");
}
ExitCode::SUCCESS
} }

View File

@ -4,8 +4,8 @@
fn main() -> anyhow::Result<()> { fn main() -> anyhow::Result<()> {
let data = std::fs::read_to_string("test.mcl").unwrap(); let data = std::fs::read_to_string("test.mcl").unwrap();
let tokens = mclangc::tokeniser::tokenise(&data)?; let tokens = mclangc::tokeniser::tokenise(&data, "test.mcl")?;
let prog = parser::parse_program(tokens)?; let prog = mclangc::parser::parse_program(tokens)?;
validator::validate_code(&prog); mclangc::validator::validate_code(&prog);
Ok(()) Ok(())
} }

View File

@ -33,8 +33,8 @@ impl Token {
} }
pub fn tokenise(s: &str) -> anyhow::Result<Vec<Token>> { pub fn tokenise(s: &str, file_p: &str) -> anyhow::Result<Vec<Token>> {
let mut loc = Loc::default(); let mut loc = Loc::new(file_p, 1, 1);
let mut tokens = Vec::new(); let mut tokens = Vec::new();
let chars: Vec<_> = s.chars().collect(); let chars: Vec<_> = s.chars().collect();
let mut chars = chars.iter().peekable(); let mut chars = chars.iter().peekable();
@ -70,44 +70,52 @@ pub fn tokenise(s: &str) -> anyhow::Result<Vec<Token>> {
// tokens.push(Token::new(TokenType::Comment(Comment::Line(buf.clone())), &loc)); // tokens.push(Token::new(TokenType::Comment(Comment::Line(buf.clone())), &loc));
} }
'\n' => loc.inc_line(), '\n' => loc.inc_line(),
'"' | '\'' | '"' => {
'c' if *c != 'c' || chars.peek() == Some(&&'"') => {
let str_typ = *c;
let mut sc = *c;
if *c == 'c' {
sc = '"';
chars.peek();
}
let mut last = '\0'; let mut last = '\0';
let mut buf = String::new(); let mut buf = String::new();
while let Some(c) = chars.next_if(|v| **v != '\n') { while let Some(c) = chars.next_if(|v| **v != '\n') {
loc.inc_col(); loc.inc_col();
if *c == sc && last != '\\' { if *c == '"' && last != '\\' {
break; break;
} }
buf.push(*c); buf.push(*c);
last = *c; last = *c;
} }
tokens.push(Token::new(TokenType::string(&buf, false), &loc));
match str_typ { }
'"' => { '\'' => {
tokens.push(Token::new(TokenType::string(&buf, false), &loc)); let mut last = '\0';
let mut buf = String::new();
while let Some(c) = chars.next_if(|v| **v != '\n') {
loc.inc_col();
if *c == '\'' && last != '\\' {
break;
} }
'c' => { buf.push(*c);
tokens.push(Token::new(TokenType::string(&buf, true), &loc)); last = *c;
}
'\'' => {
let buf = buf
.replace("\\n", "\n")
.replace("\\r", "\r");
if buf.len() > 1 {
lerror!(&loc, "Chars can only have 1 byte");
bail!("")
}
tokens.push(Token::new(TokenType::char(buf.chars().nth(0).unwrap()), &loc));
}
_ => unreachable!()
} }
let buf = buf
.replace("\\n", "\n")
.replace("\\r", "\r");
if buf.len() > 1 {
lerror!(&loc, "Chars can only have 1 byte");
bail!("")
}
tokens.push(Token::new(TokenType::char(buf.chars().nth(0).unwrap()), &loc));
}
'c' if chars.peek() == Some(&&'"') => {
chars.next();
let mut last = '\0';
let mut buf = String::new();
while let Some(c) = chars.next_if(|v| **v != '\n') {
loc.inc_col();
if *c == '"' && last != '\\' {
break;
}
buf.push(*c);
last = *c;
}
tokens.push(Token::new(TokenType::string(&buf, true), &loc));
} }
'a'..='z' | 'A'..='Z' | '_' => { 'a'..='z' | 'A'..='Z' | '_' => {
let mut buf = String::new(); let mut buf = String::new();
@ -139,12 +147,13 @@ pub fn tokenise(s: &str) -> anyhow::Result<Vec<Token>> {
'o' => radix = 8, 'o' => radix = 8,
_ => (), _ => (),
} }
}, },
None => { None => {
tokens.push(Token::new(TokenType::number(parse(&buf).unwrap(), radix, signed), &loc)); tokens.push(Token::new(TokenType::number(parse(&buf).unwrap(), radix, signed), &loc));
} }
} }
while let Some(c) = chars.next_if(|v| matches!(**v, '0'..='9' | '.' | 'a'..='f' | 'A'..='F')) { while let Some(c) = chars.next_if(|v| matches!(**v, '0'..='9' | '.' | 'a'..='f' | 'A'..='F' | 'x' | 'o')) {
loc.inc_col(); loc.inc_col();
buf.push(*c); buf.push(*c);
} }
@ -268,8 +277,8 @@ lazy_static::lazy_static!(
("|", TokenType::Punct(Punctuation::Or)), ("|", TokenType::Punct(Punctuation::Or)),
(">", TokenType::Punct(Punctuation::Gt)), (">", TokenType::Punct(Punctuation::Gt)),
("<", TokenType::Punct(Punctuation::Lt)), ("<", TokenType::Punct(Punctuation::Lt)),
(">=", TokenType::Punct(Punctuation::Ge)), (">=", TokenType::Punct(Punctuation::Ge)),
("<=", TokenType::Punct(Punctuation::Le)), ("<=", TokenType::Punct(Punctuation::Le)),
("^", TokenType::Punct(Punctuation::Xor)), ("^", TokenType::Punct(Punctuation::Xor)),
("+=", TokenType::Punct(Punctuation::AddEq)), ("+=", TokenType::Punct(Punctuation::AddEq)),
("-=", TokenType::Punct(Punctuation::SubEq)), ("-=", TokenType::Punct(Punctuation::SubEq)),

View File

@ -0,0 +1,10 @@
Program {
ast: Block(
[],
),
structs: {},
enums: {},
types: {},
functions: {},
member_functions: {},
}

View File

@ -0,0 +1,10 @@
Program {
ast: Block(
[],
),
structs: {},
enums: {},
types: {},
functions: {},
member_functions: {},
}

View File

@ -0,0 +1,10 @@
Program {
ast: Block(
[],
),
structs: {},
enums: {},
types: {},
functions: {},
member_functions: {},
}

View File

@ -0,0 +1,10 @@
Program {
ast: Block(
[],
),
structs: {},
enums: {},
types: {},
functions: {},
member_functions: {},
}

10
tests/parser/loops.exp Normal file
View File

@ -0,0 +1,10 @@
Program {
ast: Block(
[],
),
structs: {},
enums: {},
types: {},
functions: {},
member_functions: {},
}

10
tests/parser/structs.exp Normal file
View File

@ -0,0 +1,10 @@
Program {
ast: Block(
[],
),
structs: {},
enums: {},
types: {},
functions: {},
member_functions: {},
}

View File

@ -0,0 +1 @@
[]

View File

@ -0,0 +1,7 @@
// Hello, this is a single line comment
/*
And this is a multiline comment, which is
useful for longer documentation
*/

View File

@ -0,0 +1,62 @@
[
Token {
loc: Loc {
file: "tokeniser/delimiters.mcl",
line: 3,
col: 4,
},
tt: Delim(
ParenR,
),
},
Token {
loc: Loc {
file: "tokeniser/delimiters.mcl",
line: 3,
col: 2,
},
tt: Delim(
ParenL,
),
},
Token {
loc: Loc {
file: "tokeniser/delimiters.mcl",
line: 2,
col: 4,
},
tt: Delim(
CurlyR,
),
},
Token {
loc: Loc {
file: "tokeniser/delimiters.mcl",
line: 2,
col: 2,
},
tt: Delim(
CurlyL,
),
},
Token {
loc: Loc {
file: "tokeniser/delimiters.mcl",
line: 1,
col: 4,
},
tt: Delim(
SquareR,
),
},
Token {
loc: Loc {
file: "tokeniser/delimiters.mcl",
line: 1,
col: 2,
},
tt: Delim(
SquareL,
),
},
]

View File

@ -0,0 +1,3 @@
[ ]
{ }
( )

View File

@ -0,0 +1,212 @@
[
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 21,
col: 5,
},
tt: Keyword(
Loop,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 20,
col: 3,
},
tt: Keyword(
As,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 19,
col: 7,
},
tt: Keyword(
Return,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 18,
col: 7,
},
tt: Keyword(
Extern,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 17,
col: 8,
},
tt: Keyword(
Include,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 16,
col: 6,
},
tt: Keyword(
False,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 15,
col: 5,
},
tt: Keyword(
True,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 14,
col: 7,
},
tt: Keyword(
Static,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 13,
col: 4,
},
tt: Keyword(
Mut,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 12,
col: 6,
},
tt: Keyword(
Const,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 11,
col: 4,
},
tt: Keyword(
Let,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 10,
col: 9,
},
tt: Keyword(
Continue,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 9,
col: 6,
},
tt: Keyword(
Break,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 8,
col: 4,
},
tt: Keyword(
For,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 7,
col: 6,
},
tt: Keyword(
While,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 6,
col: 5,
},
tt: Keyword(
Type,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 5,
col: 5,
},
tt: Keyword(
Enum,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 4,
col: 7,
},
tt: Keyword(
Struct,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 3,
col: 5,
},
tt: Keyword(
Else,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 2,
col: 3,
},
tt: Keyword(
If,
),
},
Token {
loc: Loc {
file: "tokeniser/keywords.mcl",
line: 1,
col: 3,
},
tt: Keyword(
Fn,
),
},
]

View File

@ -0,0 +1,21 @@
fn
if
else
struct
enum
type
while
for
break
continue
let
const
mut
static
true
false
include
extern
return
as
loop

View File

@ -0,0 +1,96 @@
[
Token {
loc: Loc {
file: "tokeniser/literals.mcl",
line: 7,
col: 11,
},
tt: Number(
Number {
val: 173,
base: 2,
signed: false,
},
),
},
Token {
loc: Loc {
file: "tokeniser/literals.mcl",
line: 6,
col: 8,
},
tt: Number(
Number {
val: 13633,
base: 8,
signed: false,
},
),
},
Token {
loc: Loc {
file: "tokeniser/literals.mcl",
line: 5,
col: 9,
},
tt: Number(
Number {
val: 16759299,
base: 16,
signed: false,
},
),
},
Token {
loc: Loc {
file: "tokeniser/literals.mcl",
line: 4,
col: 3,
},
tt: Number(
Number {
val: 21,
base: 10,
signed: false,
},
),
},
Token {
loc: Loc {
file: "tokeniser/literals.mcl",
line: 3,
col: 22,
},
tt: String(
TString {
val: "this is a c string!",
cstr: true,
},
),
},
Token {
loc: Loc {
file: "tokeniser/literals.mcl",
line: 2,
col: 27,
},
tt: String(
TString {
val: "this is a normal string!",
cstr: false,
},
),
},
Token {
loc: Loc {
file: "tokeniser/literals.mcl",
line: 1,
col: 4,
},
tt: Char(
Char(
'c',
),
),
},
]

View File

@ -0,0 +1,8 @@
'c'
"this is a normal string!"
c"this is a c string!"
21
0xFfbA03
0o32501
0b10101101

View File

@ -0,0 +1,382 @@
[
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 38,
col: 2,
},
tt: Punct(
Pathaccess,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 37,
col: 2,
},
tt: Punct(
Fieldaccess,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 36,
col: 2,
},
tt: Punct(
Neq,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 35,
col: 2,
},
tt: Punct(
EqEq,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 34,
col: 2,
},
tt: Punct(
Eq,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 33,
col: 2,
},
tt: Punct(
XorEq,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 32,
col: 2,
},
tt: Punct(
OrEq,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 31,
col: 2,
},
tt: Punct(
AndEq,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 30,
col: 2,
},
tt: Punct(
ShrEq,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 29,
col: 2,
},
tt: Punct(
ShlEq,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 28,
col: 2,
},
tt: Punct(
ModEq,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 27,
col: 2,
},
tt: Punct(
MulEq,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 26,
col: 2,
},
tt: Punct(
DivEq,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 25,
col: 2,
},
tt: Punct(
SubEq,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 24,
col: 2,
},
tt: Punct(
AddEq,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 23,
col: 2,
},
tt: Punct(
Xor,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 22,
col: 2,
},
tt: Punct(
Le,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 21,
col: 2,
},
tt: Punct(
Ge,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 20,
col: 2,
},
tt: Punct(
Lt,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 19,
col: 2,
},
tt: Punct(
Gt,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 18,
col: 2,
},
tt: Punct(
Or,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 17,
col: 2,
},
tt: Punct(
OrOr,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 16,
col: 2,
},
tt: Punct(
AndAnd,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 15,
col: 2,
},
tt: Punct(
Shr,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 14,
col: 2,
},
tt: Punct(
Shl,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 13,
col: 2,
},
tt: Punct(
Mod,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 12,
col: 2,
},
tt: Punct(
Div,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 11,
col: 2,
},
tt: Punct(
Not,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 10,
col: 2,
},
tt: Punct(
Star,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 9,
col: 2,
},
tt: Punct(
Ampersand,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 8,
col: 2,
},
tt: Punct(
Comma,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 7,
col: 2,
},
tt: Punct(
Minus,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 6,
col: 2,
},
tt: Punct(
Plus,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 5,
col: 2,
},
tt: Punct(
FatArrow,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 4,
col: 2,
},
tt: Punct(
Arrow,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 3,
col: 2,
},
tt: Punct(
Pathaccess,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 2,
col: 2,
},
tt: Punct(
Colon,
),
},
Token {
loc: Loc {
file: "tokeniser/punctuation.mcl",
line: 1,
col: 2,
},
tt: Punct(
Semi,
),
},
]

View File

@ -0,0 +1,39 @@
;
:
::
->
=>
+
-
,
&
*
!
/
%
<<
>>
&&
||
|
>
<
>=
<=
^
+=
-=
/=
*=
%=
<<=
>>=
&=
|=
^=
=
==
!=
.
::