MCLang, now with testing!

This commit is contained in:
2024-12-21 05:10:03 +02:00
parent 54b6df5862
commit debcf6ad6c
26 changed files with 1210 additions and 41 deletions

63
src/bin/test/logger.rs Normal file
View File

@@ -0,0 +1,63 @@
#[repr(u8)]
#[derive(Debug, Default)]
pub enum Level {
Off = 0,
Error,
Warn,
#[default]
Info,
Help,
Debug
}
const C_RESET: &'static str = "\x1B[0m";
const C_ERROR: &'static str = "\x1B[1;31m";
const C_WARN: &'static str = "\x1B[1;33m";
const C_INFO: &'static str = "\x1B[1;32m";
const C_DEBUG: &'static str = "\x1B[1;35m";
const C_HELP: &'static str = "\x1B[1;36m";
pub fn _log(level: Level, str: &str) {
match level {
Level::Off => return,
Level::Error => println!("{C_ERROR}error{C_RESET}: {str}"),
Level::Warn => println!("{C_WARN}warn{C_RESET}: {str}"),
Level::Info => println!("{C_INFO}info{C_RESET}: {str}"),
Level::Help => println!("{C_HELP}help{C_RESET}: {str}"),
Level::Debug => println!("{C_DEBUG}debug{C_RESET}: {str}"),
}
}
#[macro_use]
pub mod log {
#[macro_export]
macro_rules! error {
($($arg:tt)*) => {
crate::logger::_log(crate::logger::Level::Error, &format!($($arg)*))
};
}
#[macro_export]
macro_rules! warn {
($($arg:tt)*) => {
crate::logger::_log(crate::logger::Level::Warn, &format!($($arg)*))
};
}
#[macro_export]
macro_rules! info {
($($arg:tt)*) => {
crate::logger::_log(crate::logger::Level::Info, &format!($($arg)*))
};
}
#[macro_export]
macro_rules! help {
($($arg:tt)*) => {
crate::logger::_log(crate::logger::Level::Help, &format!($($arg)*))
};
}
#[macro_export]
macro_rules! debug {
($($arg:tt)*) => {
crate::logger::_log(crate::logger::Level::Debug, &format!($($arg)*))
};
}
}

View File

@@ -1,8 +1,19 @@
use std::{collections::HashMap, ffi::OsStr, io::Write, os::unix::ffi::OsStrExt, path::{Path, PathBuf}, process::ExitCode};
use anyhow::bail;
use camino::Utf8PathBuf;
use clap::Parser;
use mclangc;
#[macro_use]
mod logger;
/// Testing program for mclangc, taken inspiration from porth, which was made by tsoding :3
#[derive(Debug, clap::Parser)]
#[command(version, about, long_about = None)]
struct CliArgs {
/// Path to the test folder
#[arg(long, short, default_value="./tests")]
path: Utf8PathBuf,
#[clap(subcommand)]
cmd: CliCmd
}
@@ -15,9 +26,196 @@ pub enum CliCmd {
Compile
}
fn main() -> anyhow::Result<()> {
Ok(())
struct CollectedFiles {
tokeniser: HashMap<String, (String, ExpTyp)>,
parser: HashMap<String, (String, ExpTyp)>,
}
enum ExpTyp {
Text((PathBuf, String)),
Path(PathBuf),
}
impl ExpTyp {
pub fn path(&self) -> &Path {
match self {
Self::Text((p, _)) => p,
Self::Path(p) => p,
}
}
}
fn collect_files_for_single_type(path: &Path) -> anyhow::Result<HashMap<String, (String, ExpTyp)>> {
let mut files = HashMap::new();
for file in path.read_dir()? {
let file = file?;
if file.file_type()?.is_file() {
if file.path().extension() != Some(OsStr::from_bytes(b"mcl")) {
continue;
}
let src = std::fs::read_to_string(file.path())?;
let exp_p = file.path().with_extension("exp");
let name = file.path().with_extension("").file_name().unwrap().to_string_lossy().to_string();
if exp_p.exists() {
let exp = std::fs::read_to_string(&exp_p)?;
files.insert(name, (src, ExpTyp::Text((exp_p, exp))));
} else {
files.insert(name, (src, ExpTyp::Path(exp_p)));
}
}
}
Ok(files)
}
fn collect_all_files(path: &Path) -> anyhow::Result<CollectedFiles> {
let path = path.to_path_buf();
let mut tkn = path.clone();
tkn.push("tokeniser");
let mut parser = path.clone();
parser.push("parser");
Ok(CollectedFiles {
tokeniser: collect_files_for_single_type(&tkn)?,
parser: collect_files_for_single_type(&parser)?,
})
}
fn test_tokeniser(cf: &CollectedFiles, compile: bool) -> anyhow::Result<usize> {
let mut err_count = 0;
for (name, (src, expected)) in &cf.tokeniser {
let tokens = match mclangc::tokeniser::tokenise(src, &format!("tokeniser/{name}.mcl")) {
Ok(v) => v,
Err(e) => {
crate::error!("Test tokeniser/{name} had an error: {e}");
err_count += 1;
continue;
}
};
if compile {
let path = expected.path();
if path.exists() {
crate::info!("Test tokeniser/{name} already has a *.exp file, overwriting");
} else {
crate::info!("Test tokeniser/{name} doesnt a *.exp file, creating");
}
let mut fp = std::fs::File::options()
.write(true)
.truncate(true)
.create(true)
.open(path)?;
write!(fp, "{tokens:#?}")?;
} else {
let ExpTyp::Text((_, exp)) = expected else {
crate::warn!("Test tokeniser/{name} doesnt have a *.exp file, please make it by running 'test compile'");
continue;
};
if format!("{tokens:#?}") == *exp {
crate::info!("Test tokeniser/{name}: OK");
} else {
crate::error!("Test tokeniser/{name}: FAIL");
crate::debug!("Expected: {exp}");
crate::debug!("Got: {tokens:#?}");
err_count += 1;
}
}
}
Ok(err_count)
}
fn test_parser(cf: &CollectedFiles, compile: bool) -> anyhow::Result<usize> {
let mut err_count = 0;
for (name, (src, expected)) in &cf.parser {
let tokens = match mclangc::tokeniser::tokenise(src, &format!("parser/{name}.mcl")) {
Ok(v) => v,
Err(e) => {
crate::error!("Test parser/{name} had an error: {e}");
err_count += 1;
continue;
}
};
let ast = match mclangc::parser::parse_program(tokens) {
Ok(v) => v,
Err(e) => {
crate::error!("Test parser/{name} had an error: {e}");
err_count += 1;
continue;
}
};
if compile {
let path = expected.path();
if path.exists() {
crate::info!("Test parser/{name} already has a *.exp file, overwriting");
} else {
crate::info!("Test parser/{name} doesnt a *.exp file, creating");
}
let mut fp = std::fs::File::options()
.write(true)
.truncate(true)
.create(true)
.open(path)?;
write!(fp, "{ast:#?}")?;
} else {
let ExpTyp::Text((_, exp)) = expected else {
crate::warn!("Test parser/{name} doesnt have a *.exp file, please make it by running 'test compile'");
continue;
};
if format!("{ast:#?}") == *exp {
crate::info!("Test parser/{name}: OK");
} else {
crate::error!("Test parser/{name}: FAIL");
crate::debug!("Expected: {exp}");
crate::debug!("Got: {ast:#?}");
err_count += 1;
}
}
}
Ok(err_count)
}
fn test(cf: &CollectedFiles, compile: bool) -> anyhow::Result<usize> {
let mut err_count = test_tokeniser(&cf, compile)?;
err_count += test_parser(&cf, compile)?;
Ok(err_count)
}
fn main() -> ExitCode {
let cli = CliArgs::parse();
let cf = match collect_all_files(cli.path.as_std_path()) {
Ok(v) => v,
Err(e) => {
crate::error!("Failed to read directory '{}', do you have permission to read it?: {e}", cli.path);
return ExitCode::FAILURE;
}
};
let ec = match cli.cmd {
CliCmd::Run => {
match test(&cf, false) {
Ok(v) => v,
Err(e) => {
crate::error!("Had an error: {e}");
return ExitCode::FAILURE;
}
}
}
CliCmd::Compile => {
match test(&cf, true) {
Ok(v) => v,
Err(e) => {
crate::error!("Had an error: {e}");
return ExitCode::FAILURE;
}
}
}
};
if ec > 0 {
crate::error!("Testing FAILED, had {ec} errors");
return ExitCode::FAILURE;
} else {
crate::info!("Testing SUCCEEDED, had 0 errors");
}
ExitCode::SUCCESS
}

View File

@@ -4,8 +4,8 @@
fn main() -> anyhow::Result<()> {
let data = std::fs::read_to_string("test.mcl").unwrap();
let tokens = mclangc::tokeniser::tokenise(&data)?;
let prog = parser::parse_program(tokens)?;
validator::validate_code(&prog);
let tokens = mclangc::tokeniser::tokenise(&data, "test.mcl")?;
let prog = mclangc::parser::parse_program(tokens)?;
mclangc::validator::validate_code(&prog);
Ok(())
}

View File

@@ -33,8 +33,8 @@ impl Token {
}
pub fn tokenise(s: &str) -> anyhow::Result<Vec<Token>> {
let mut loc = Loc::default();
pub fn tokenise(s: &str, file_p: &str) -> anyhow::Result<Vec<Token>> {
let mut loc = Loc::new(file_p, 1, 1);
let mut tokens = Vec::new();
let chars: Vec<_> = s.chars().collect();
let mut chars = chars.iter().peekable();
@@ -70,44 +70,52 @@ pub fn tokenise(s: &str) -> anyhow::Result<Vec<Token>> {
// tokens.push(Token::new(TokenType::Comment(Comment::Line(buf.clone())), &loc));
}
'\n' => loc.inc_line(),
'"' | '\'' |
'c' if *c != 'c' || chars.peek() == Some(&&'"') => {
let str_typ = *c;
let mut sc = *c;
if *c == 'c' {
sc = '"';
chars.peek();
}
'"' => {
let mut last = '\0';
let mut buf = String::new();
while let Some(c) = chars.next_if(|v| **v != '\n') {
loc.inc_col();
if *c == sc && last != '\\' {
if *c == '"' && last != '\\' {
break;
}
buf.push(*c);
last = *c;
}
match str_typ {
'"' => {
tokens.push(Token::new(TokenType::string(&buf, false), &loc));
tokens.push(Token::new(TokenType::string(&buf, false), &loc));
}
'\'' => {
let mut last = '\0';
let mut buf = String::new();
while let Some(c) = chars.next_if(|v| **v != '\n') {
loc.inc_col();
if *c == '\'' && last != '\\' {
break;
}
'c' => {
tokens.push(Token::new(TokenType::string(&buf, true), &loc));
}
'\'' => {
let buf = buf
.replace("\\n", "\n")
.replace("\\r", "\r");
if buf.len() > 1 {
lerror!(&loc, "Chars can only have 1 byte");
bail!("")
}
tokens.push(Token::new(TokenType::char(buf.chars().nth(0).unwrap()), &loc));
}
_ => unreachable!()
buf.push(*c);
last = *c;
}
let buf = buf
.replace("\\n", "\n")
.replace("\\r", "\r");
if buf.len() > 1 {
lerror!(&loc, "Chars can only have 1 byte");
bail!("")
}
tokens.push(Token::new(TokenType::char(buf.chars().nth(0).unwrap()), &loc));
}
'c' if chars.peek() == Some(&&'"') => {
chars.next();
let mut last = '\0';
let mut buf = String::new();
while let Some(c) = chars.next_if(|v| **v != '\n') {
loc.inc_col();
if *c == '"' && last != '\\' {
break;
}
buf.push(*c);
last = *c;
}
tokens.push(Token::new(TokenType::string(&buf, true), &loc));
}
'a'..='z' | 'A'..='Z' | '_' => {
let mut buf = String::new();
@@ -139,12 +147,13 @@ pub fn tokenise(s: &str) -> anyhow::Result<Vec<Token>> {
'o' => radix = 8,
_ => (),
}
},
None => {
tokens.push(Token::new(TokenType::number(parse(&buf).unwrap(), radix, signed), &loc));
}
}
while let Some(c) = chars.next_if(|v| matches!(**v, '0'..='9' | '.' | 'a'..='f' | 'A'..='F')) {
while let Some(c) = chars.next_if(|v| matches!(**v, '0'..='9' | '.' | 'a'..='f' | 'A'..='F' | 'x' | 'o')) {
loc.inc_col();
buf.push(*c);
}
@@ -178,7 +187,7 @@ pub fn tokenise(s: &str) -> anyhow::Result<Vec<Token>> {
}
tokens.push(Token::new(TokenType::number(parse(&buf).unwrap(), radix, signed), &loc));
}
16 => {
16 => {
if buf.strip_prefix("0x").expect("Unreachable")
.chars().filter(|v| !matches!(v, '0'..='9' | 'a'..='f' | 'A'..='F')).collect::<Vec<_>>().len() > 0 {
lerror!(&loc, "Invalid character in hex number");
@@ -268,8 +277,8 @@ lazy_static::lazy_static!(
("|", TokenType::Punct(Punctuation::Or)),
(">", TokenType::Punct(Punctuation::Gt)),
("<", TokenType::Punct(Punctuation::Lt)),
(">=", TokenType::Punct(Punctuation::Ge)),
("<=", TokenType::Punct(Punctuation::Le)),
(">=", TokenType::Punct(Punctuation::Ge)),
("<=", TokenType::Punct(Punctuation::Le)),
("^", TokenType::Punct(Punctuation::Xor)),
("+=", TokenType::Punct(Punctuation::AddEq)),
("-=", TokenType::Punct(Punctuation::SubEq)),