Going to need to normalize that name though, because right now it really *really* sucks to have a big \\?\ kinda name. Probably normalize it relative to the base directory.
198 lines
6 KiB
Rust
198 lines
6 KiB
Rust
use std::{collections::HashMap, fs, path::PathBuf, rc::Rc};
|
|
|
|
use compiler::compile;
|
|
use parser::parse;
|
|
use semantics::{check, Error, ImportRecord, Semantics};
|
|
use vm::{eval, Context};
|
|
|
|
pub mod compiler;
|
|
pub mod parser;
|
|
pub mod semantics;
|
|
pub mod tokens;
|
|
pub mod vm;
|
|
|
|
pub enum ModuleSource {
|
|
SourceText(String),
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
pub enum ModuleLoadError {
|
|
IO(String, std::io::Error),
|
|
}
|
|
|
|
pub trait ModuleLoader {
|
|
fn normalize_module_name(&self, source: &str, name: String) -> String;
|
|
fn load_module(&self, name: &String) -> Result<ModuleSource, ModuleLoadError>;
|
|
}
|
|
|
|
pub struct StandardModuleLoader {
|
|
base_path: PathBuf,
|
|
}
|
|
|
|
impl StandardModuleLoader {
|
|
pub fn new(base_path: PathBuf) -> Self {
|
|
StandardModuleLoader { base_path }
|
|
}
|
|
}
|
|
|
|
impl ModuleLoader for StandardModuleLoader {
|
|
fn normalize_module_name(&self, source: &str, name: String) -> String {
|
|
let p = self.base_path.join(source).join(name.clone());
|
|
let result = match std::fs::canonicalize(&p) {
|
|
Ok(p) => match p.into_os_string().into_string() {
|
|
Ok(s) => s,
|
|
Err(_e) => name.clone(),
|
|
},
|
|
Err(_e) => name.clone(),
|
|
};
|
|
result
|
|
}
|
|
|
|
fn load_module(&self, name: &String) -> Result<ModuleSource, ModuleLoadError> {
|
|
match fs::read_to_string(name) {
|
|
Ok(c) => Ok(ModuleSource::SourceText(c)),
|
|
Err(e) => Err(ModuleLoadError::IO(name.clone(), e)),
|
|
}
|
|
}
|
|
}
|
|
|
|
pub struct Module {
|
|
id: u64,
|
|
semantics: Rc<Semantics>,
|
|
}
|
|
|
|
impl Module {
|
|
pub fn id(&self) -> u64 {
|
|
self.id
|
|
}
|
|
|
|
pub fn semantics(&self) -> Rc<Semantics> {
|
|
self.semantics.clone()
|
|
}
|
|
}
|
|
|
|
pub struct Runtime {
|
|
next_module_id: u64,
|
|
modules: HashMap<String, Rc<Module>>,
|
|
loader: Box<dyn ModuleLoader>,
|
|
}
|
|
|
|
impl Runtime {
|
|
pub fn new(loader: Box<dyn ModuleLoader>) -> Self {
|
|
Runtime {
|
|
next_module_id: 0,
|
|
modules: HashMap::new(),
|
|
loader,
|
|
}
|
|
}
|
|
|
|
pub fn load_module(&mut self, name: &str) -> Result<(Vec<Error>, Rc<Module>), ModuleLoadError> {
|
|
let mut init_pending = HashMap::new();
|
|
let mut names = Vec::new();
|
|
let name = self.loader.normalize_module_name("", name.to_string());
|
|
names.push(name.clone());
|
|
|
|
let mut id_assign = self.next_module_id;
|
|
|
|
while let Some(name) = names.pop() {
|
|
if self.modules.contains_key(&name) || init_pending.contains_key(&name) {
|
|
// Either already loaded or pending load.
|
|
continue;
|
|
}
|
|
|
|
// TODO: Errors here are bad! Remember, run everything!
|
|
match self.loader.load_module(&name)? {
|
|
ModuleSource::SourceText(source) => {
|
|
let source: Rc<str> = source.into();
|
|
let (tree, lines) = parse(&source);
|
|
let semantics =
|
|
Rc::new(Semantics::new(name.clone().into(), source, tree, lines));
|
|
|
|
let mut normalized_imports = Vec::new();
|
|
for import in semantics.imports() {
|
|
let normalized = self.loader.normalize_module_name(&name, import.clone());
|
|
|
|
names.push(normalized.clone());
|
|
normalized_imports.push((import, normalized));
|
|
}
|
|
|
|
init_pending.insert(name, (id_assign, normalized_imports, semantics));
|
|
id_assign += 1;
|
|
}
|
|
}
|
|
}
|
|
|
|
for (_, (_, imports, semantics)) in init_pending.iter() {
|
|
let mut import_table = HashMap::new();
|
|
for (import, normalized) in imports.iter() {
|
|
// NOTE: We look up the load(ed|ing) module here by normalized name, because that's how
|
|
// we track it...
|
|
let target = if let Some(module) = self.modules.get(&*normalized) {
|
|
ImportRecord {
|
|
name: normalized.clone(),
|
|
module_id: module.id(),
|
|
semantics: Rc::downgrade(&module.semantics),
|
|
}
|
|
} else {
|
|
let (module_id, _, semantics) = init_pending.get(&*normalized).unwrap();
|
|
ImportRecord {
|
|
name: normalized.clone(),
|
|
module_id: *module_id,
|
|
semantics: Rc::downgrade(semantics),
|
|
}
|
|
};
|
|
|
|
// ...but we set it into the import table here with the name
|
|
// that the source code used, for more better binding.
|
|
import_table.insert(import.clone(), target);
|
|
}
|
|
semantics.set_imports(import_table);
|
|
}
|
|
|
|
let mut errors = Vec::new();
|
|
for (name, (id, _, semantics)) in init_pending.into_iter() {
|
|
check(&semantics);
|
|
errors.append(&mut semantics.snapshot_errors());
|
|
self.modules.insert(name, Rc::new(Module { id, semantics }));
|
|
}
|
|
self.next_module_id = id_assign;
|
|
|
|
let result = self.modules.get(&name).unwrap().clone();
|
|
Ok((errors, result))
|
|
}
|
|
}
|
|
|
|
pub fn process_file(file: &str) {
|
|
let mut runtime = Runtime::new(Box::new(StandardModuleLoader::new(PathBuf::from("."))));
|
|
|
|
let (errors, module) = match runtime.load_module(file) {
|
|
Ok(r) => r,
|
|
Err(_) => {
|
|
eprintln!("Error loading module");
|
|
return;
|
|
}
|
|
};
|
|
|
|
// OK now there might be errors.
|
|
if errors.len() > 0 {
|
|
for e in errors {
|
|
eprintln!("{file}: {}:{}: {}", e.start.0, e.start.1, e.message);
|
|
}
|
|
return;
|
|
}
|
|
|
|
// shrug
|
|
let semantics = module.semantics();
|
|
let module = compile(&semantics);
|
|
let main_function = module.functions[module.init].clone();
|
|
|
|
let mut context = Context::new(module.clone());
|
|
match eval(&mut context, main_function, vec![]) {
|
|
Ok(v) => {
|
|
println!("{:?}", v);
|
|
}
|
|
Err(e) => {
|
|
eprintln!("{:?}", e);
|
|
}
|
|
}
|
|
}
|