added including of files
This commit is contained in:
parent
30214808e5
commit
2d5e94608c
|
@ -312,10 +312,6 @@ pub fn compile(tokens: Vec<Operator>, args: Args) -> Result<()>{
|
||||||
}
|
}
|
||||||
ti += 1;
|
ti += 1;
|
||||||
},
|
},
|
||||||
|
|
||||||
OpType::Macro => {
|
|
||||||
panic!();
|
|
||||||
}
|
|
||||||
OpType::Syscall0 => {
|
OpType::Syscall0 => {
|
||||||
writeln!(writer, " ;; -- syscall0")?;
|
writeln!(writer, " ;; -- syscall0")?;
|
||||||
writeln!(writer, " pop rax")?;
|
writeln!(writer, " pop rax")?;
|
||||||
|
@ -387,7 +383,9 @@ pub fn compile(tokens: Vec<Operator>, args: Args) -> Result<()>{
|
||||||
writeln!(writer, " push rax")?;
|
writeln!(writer, " push rax")?;
|
||||||
ti += 1;
|
ti += 1;
|
||||||
},
|
},
|
||||||
OpType::None => unreachable!()
|
OpType::None => unreachable!(),
|
||||||
|
OpType::Macro => unreachable!(),
|
||||||
|
OpType::Include => unreachable!()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
writeln!(writer, "addr_{}:", ti)?;
|
writeln!(writer, "addr_{}:", ti)?;
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
|
|
||||||
|
pub const ALLOW_MACRO_REDEFINITION: bool = true;
|
||||||
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub enum OpType {
|
pub enum OpType {
|
||||||
|
|
||||||
|
@ -39,6 +42,7 @@ pub enum OpType {
|
||||||
While,
|
While,
|
||||||
Do,
|
Do,
|
||||||
Macro,
|
Macro,
|
||||||
|
Include,
|
||||||
|
|
||||||
// syscalls
|
// syscalls
|
||||||
Syscall0,
|
Syscall0,
|
||||||
|
@ -49,7 +53,7 @@ pub enum OpType {
|
||||||
Syscall5,
|
Syscall5,
|
||||||
Syscall6,
|
Syscall6,
|
||||||
|
|
||||||
None
|
None // Used for macros and any other non built in word definitions
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -106,6 +110,7 @@ impl OpType {
|
||||||
&OpType::While => "while",
|
&OpType::While => "while",
|
||||||
&OpType::Do => "do",
|
&OpType::Do => "do",
|
||||||
&OpType::Macro => "macro",
|
&OpType::Macro => "macro",
|
||||||
|
&OpType::Include => "include",
|
||||||
&OpType::Mem => "mem",
|
&OpType::Mem => "mem",
|
||||||
&OpType::Load8 => "!8",
|
&OpType::Load8 => "!8",
|
||||||
&OpType::Store8 => "@8",
|
&OpType::Store8 => "@8",
|
||||||
|
|
|
@ -224,10 +224,6 @@ pub fn run(tokens: Vec<crate::constants::Operator>) -> Result<()>{
|
||||||
ti += 1;
|
ti += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
OpType::Macro => {
|
|
||||||
panic!();
|
|
||||||
}
|
|
||||||
|
|
||||||
OpType::Syscall0 => {
|
OpType::Syscall0 => {
|
||||||
todo!();
|
todo!();
|
||||||
// ti += 1;
|
// ti += 1;
|
||||||
|
@ -269,7 +265,9 @@ pub fn run(tokens: Vec<crate::constants::Operator>) -> Result<()>{
|
||||||
todo!();
|
todo!();
|
||||||
// ti += 1;
|
// ti += 1;
|
||||||
},
|
},
|
||||||
OpType::None => unreachable!()
|
OpType::None => unreachable!(),
|
||||||
|
OpType::Macro => unreachable!(),
|
||||||
|
OpType::Include => unreachable!()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
23
src/lexer.rs
23
src/lexer.rs
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
use crate::{constants::{Token, TokenType}, preprocessor::preprocess};
|
use crate::{constants::{Token, TokenType}, preprocessor::preprocess, Args};
|
||||||
use color_eyre::Result;
|
use color_eyre::Result;
|
||||||
|
|
||||||
fn lex_word(s: String, tok_type: TokenType) -> (TokenType, String) {
|
fn lex_word(s: String, tok_type: TokenType) -> (TokenType, String) {
|
||||||
|
@ -68,22 +68,7 @@ fn lex_line(text: String) -> Result<Vec<(u32, String, TokenType)>> {
|
||||||
Ok(tokens)
|
Ok(tokens)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn lex(code: String, file: &String, args: Args, preprocessing: bool) -> Result<Vec<Token>> {
|
||||||
// fn lex_text(text: String) -> Result<Vec<Token>>{
|
|
||||||
// let tokens: Vec<Token> = Vec::new();
|
|
||||||
|
|
||||||
// let mut row = 0;
|
|
||||||
// let mut col = 0;
|
|
||||||
// let mut index = find_col(text.clone(), 0, |x| x.is_whitespace())?;
|
|
||||||
|
|
||||||
// while index < text.len() as u32 {
|
|
||||||
|
|
||||||
// }
|
|
||||||
|
|
||||||
// Ok(tokens)
|
|
||||||
// }
|
|
||||||
|
|
||||||
pub fn lex(code: String, file: &String) -> Result<Vec<Token>> {
|
|
||||||
let lines: Vec<(usize, &str)> = code
|
let lines: Vec<(usize, &str)> = code
|
||||||
.split(['\n', '\r'])
|
.split(['\n', '\r'])
|
||||||
.enumerate()
|
.enumerate()
|
||||||
|
@ -112,6 +97,8 @@ pub fn lex(code: String, file: &String) -> Result<Vec<Token>> {
|
||||||
// for token in tokens.clone() {
|
// for token in tokens.clone() {
|
||||||
// println!("tok: {:?}", token.text);
|
// println!("tok: {:?}", token.text);
|
||||||
// }
|
// }
|
||||||
tokens = preprocess(tokens)?;
|
if preprocessing {
|
||||||
|
tokens = preprocess(tokens, args)?;
|
||||||
|
}
|
||||||
Ok(tokens)
|
Ok(tokens)
|
||||||
}
|
}
|
53
src/main.rs
53
src/main.rs
|
@ -12,6 +12,10 @@ use color_eyre::Result;
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
|
|
||||||
pub const DEFAULT_OUT_FILE: &str = "a.out";
|
pub const DEFAULT_OUT_FILE: &str = "a.out";
|
||||||
|
pub const DEFAULT_INCLUDES: [&str;2] = [
|
||||||
|
"./include",
|
||||||
|
"~/.mclang/include",
|
||||||
|
];
|
||||||
|
|
||||||
#[derive(Parser, Debug, Clone)]
|
#[derive(Parser, Debug, Clone)]
|
||||||
#[command(author, version, about, long_about = None)]
|
#[command(author, version, about, long_about = None)]
|
||||||
|
@ -40,27 +44,64 @@ pub struct Args {
|
||||||
#[arg(long, short)]
|
#[arg(long, short)]
|
||||||
quiet: bool,
|
quiet: bool,
|
||||||
|
|
||||||
|
/// Add an include directory [default: ["./include", "~/.mclang/include"]]
|
||||||
|
#[arg(long, short='I')]
|
||||||
|
include: Vec<String>,
|
||||||
|
|
||||||
|
|
||||||
|
//#[arg(long, short='F')]
|
||||||
|
//features: Vec<String>,
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() -> Result<()> {
|
fn main() -> Result<()> {
|
||||||
let args = Args::parse();
|
let args = Args::parse();
|
||||||
|
|
||||||
|
let code = match fs::read_to_string(&args.in_file) {
|
||||||
let code = fs::read_to_string(&args.in_file)?;
|
Ok(t) => t,
|
||||||
let tokens = lexer::lex(code, &args.in_file)?;
|
Err(_) => {
|
||||||
|
error!("Failed to read file {}, exiting!", &args.in_file);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let tokens = match lexer::lex(code, &args.in_file, args.clone(), true) {
|
||||||
|
Ok(t) => t,
|
||||||
|
Err(_) => {
|
||||||
|
error!("Lexing failed, exiting!");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// for token in &tokens {
|
// for token in &tokens {
|
||||||
// println!("(f: {}, l: {}, c: {}, t: {})", token.file, token.line, token.col, token.text);
|
// println!("(f: {}, l: {}, c: {}, t: {})", token.file, token.line, token.col, token.text);
|
||||||
// }
|
// }
|
||||||
|
|
||||||
let mut parser = parser::Parser::new(tokens);
|
let mut parser = parser::Parser::new(tokens);
|
||||||
let tokens = parser.parse()?;
|
let tokens = match parser.parse() {
|
||||||
|
Ok(t) => t,
|
||||||
|
Err(_) => {
|
||||||
|
error!("Parsing failed, exiting!");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
};
|
||||||
if args.compile && args.interpret {
|
if args.compile && args.interpret {
|
||||||
error!("Cannot compile and interpret at the same time");
|
error!("Cannot compile and interpret at the same time");
|
||||||
} else if args.interpret {
|
} else if args.interpret {
|
||||||
interpret::linux_x86_64::run(tokens)?;
|
match interpret::linux_x86_64::run(tokens) {
|
||||||
|
Ok(_) => (),
|
||||||
|
Err(_) => {
|
||||||
|
error!("Interpretation failed, exiting!");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
};
|
||||||
} else if args.compile {
|
} else if args.compile {
|
||||||
compile::linux_x86_64::compile(tokens, args)?;
|
match compile::linux_x86_64::compile(tokens, args) {
|
||||||
|
Ok(_) => (),
|
||||||
|
Err(_) => {
|
||||||
|
error!("Compilation failed, exiting!");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
};
|
||||||
} else {
|
} else {
|
||||||
error!("Did not choose to compile or to interpret, exiting");
|
error!("Did not choose to compile or to interpret, exiting");
|
||||||
}
|
}
|
||||||
|
|
|
@ -134,6 +134,7 @@ pub fn lookup_word<P: Deref<Target = (String, u32, u32)>>(s: String, _pos: P) ->
|
||||||
("while", OpType::While),
|
("while", OpType::While),
|
||||||
("do", OpType::Do),
|
("do", OpType::Do),
|
||||||
("macro", OpType::Macro),
|
("macro", OpType::Macro),
|
||||||
|
("include", OpType::Include), // technically not but it fits next to macros
|
||||||
|
|
||||||
// mem
|
// mem
|
||||||
("mem", OpType::Mem),
|
("mem", OpType::Mem),
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use color_eyre::Result;
|
use color_eyre::Result;
|
||||||
use eyre::eyre;
|
use eyre::eyre;
|
||||||
|
|
||||||
use crate::constants::{Token, Loc, OpType, TokenType};
|
use crate::constants::{Token, Loc, OpType, TokenType};
|
||||||
use crate::{lerror, lnote};
|
use crate::lexer::lex;
|
||||||
|
use crate::{lerror, lnote, Args};
|
||||||
use crate::parser::lookup_word;
|
use crate::parser::lookup_word;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -13,7 +15,7 @@ pub struct Macro {
|
||||||
pub tokens: Vec<Token>
|
pub tokens: Vec<Token>
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn preprocess(tokens: Vec<Token>) -> Result<Vec<Token>>{
|
pub fn preprocess(tokens: Vec<Token>, args: Args) -> Result<Vec<Token>>{
|
||||||
let mut program: Vec<Token> = Vec::new();
|
let mut program: Vec<Token> = Vec::new();
|
||||||
let mut macros: HashMap<String, Macro> = HashMap::new();
|
let mut macros: HashMap<String, Macro> = HashMap::new();
|
||||||
|
|
||||||
|
@ -41,12 +43,13 @@ pub fn preprocess(tokens: Vec<Token>) -> Result<Vec<Token>>{
|
||||||
return Err(eyre!(""));
|
return Err(eyre!(""));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if crate::constants::ALLOW_MACRO_REDEFINITION {
|
||||||
if macros.get(¯o_name.text.clone()).is_some() { //? Maybe allow?
|
if macros.get(¯o_name.text.clone()).is_some() {
|
||||||
lerror!(¯o_name.loc(), "Macro redefinition is not allowed");
|
lerror!(¯o_name.loc(), "Macro redefinition is not allowed");
|
||||||
lnote!(¯os.get(¯o_name.text.clone()).unwrap().loc, "First definition here");
|
lnote!(¯os.get(¯o_name.text.clone()).unwrap().loc, "First definition here");
|
||||||
return Err(eyre!(""));
|
return Err(eyre!(""));
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let mut macr = Macro{ loc: macro_name.loc(), tokens: Vec::new() };
|
let mut macr = Macro{ loc: macro_name.loc(), tokens: Vec::new() };
|
||||||
|
|
||||||
|
@ -75,6 +78,45 @@ pub fn preprocess(tokens: Vec<Token>) -> Result<Vec<Token>>{
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_ if op_type == OpType::Include => {
|
||||||
|
if rtokens.len() == 0 {
|
||||||
|
lerror!(&token.loc(), "Include path not found, expected {} but found nothing", TokenType::String.human());
|
||||||
|
return Err(eyre!(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
let include_path = rtokens.pop().unwrap();
|
||||||
|
|
||||||
|
if include_path.typ != TokenType::String {
|
||||||
|
lerror!(&include_path.loc(), "Bad include path, expected {} but found {}", TokenType::String.human(), include_path.typ.human());
|
||||||
|
return Err(eyre!(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut in_paths = args.include.clone();
|
||||||
|
in_paths.append(&mut crate::DEFAULT_INCLUDES.to_vec().clone().iter().map(|f| f.to_string()).collect::<Vec<String>>());
|
||||||
|
|
||||||
|
let mut include_code = String::new();
|
||||||
|
|
||||||
|
for path in in_paths {
|
||||||
|
let p = PathBuf::from(path);
|
||||||
|
let p = p.join(include_path.text.clone());
|
||||||
|
|
||||||
|
if p.exists() {
|
||||||
|
include_code = std::fs::read_to_string(p)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if include_code.is_empty() {
|
||||||
|
lerror!(&include_path.loc(), "Include file in path '{}' was not found", include_path.text);
|
||||||
|
return Err(eyre!(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut code = lex(include_code, &include_path.text, args.clone(), false)?;
|
||||||
|
code.reverse();
|
||||||
|
rtokens.append(&mut code);
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
_ => {
|
_ => {
|
||||||
program.push(token);
|
program.push(token);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue
Block a user