Compare commits

..

No commits in common. "main" and "source-maps" have entirely different histories.

19304 changed files with 970 additions and 10654083 deletions

View file

@ -4,14 +4,3 @@
# something. But the reason for that error is beyond my ability to care at
# the moment.)
rustflags = ["-C","link-self-contained=on"]
# Vendor configuration, output of `cargo vendor`.
[source.crates-io]
replace-with = "vendored-sources"
[source."git+https://github.com/salsa-rs/salsa.git"]
git = "https://github.com/salsa-rs/salsa.git"
replace-with = "vendored-sources"
[source.vendored-sources]
directory = "third-party/vendor"

1
.gitattributes vendored
View file

@ -1 +0,0 @@
* -text

4
.gitignore vendored
View file

@ -1,7 +1,3 @@
/target
/oden-js/target
/oden-js-sys/target
/fine/target
.venv/
__pycache__/

1039
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,9 +1,26 @@
[workspace]
resolver = "2"
[package]
name = "oden"
version = "0.1.0"
edition = "2021"
members = [
"fine",
"oden",
"oden-js",
"oden-js-sys",
]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = []
tracing = ["tracy-client/enable"]
[dependencies]
anyhow = "1.0"
bytemuck = { version = "1.13", features = ["derive"] }
deno_ast = { version = "0.29.3", features = ["transpiling", "typescript"] }
env_logger = "0.10"
fontdue = "0.7.3"
image = { version = "0.24", default-features = false, features = ["png"] }
log = "0.4"
lru = "0.11.0"
notify = "6"
oden-js = { path = "oden-js" }
pollster = "0.3"
sourcemap = "7.0.0"
tracy-client = { version = "0.15.2", default-features = false }
wgpu = "0.17"
winit = "0.28"

109
fine/Cargo.lock generated
View file

@ -1,109 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "diff"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
[[package]]
name = "fine"
version = "0.1.0"
dependencies = [
"glob",
"pretty_assertions",
"prettyplease",
"proc-macro2",
"quote",
"syn",
"thiserror",
]
[[package]]
name = "glob"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "pretty_assertions"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66"
dependencies = [
"diff",
"yansi",
]
[[package]]
name = "prettyplease"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a41cf62165e97c7f814d2221421dbb9afcbcdb0a88068e5ea206e19951c2cbb5"
dependencies = [
"proc-macro2",
"syn",
]
[[package]]
name = "proc-macro2"
version = "1.0.76"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [
"proc-macro2",
]
[[package]]
name = "syn"
version = "2.0.47"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1726efe18f42ae774cc644f330953a5e7b3c3003d3edcecf18850fe9d4dd9afb"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "thiserror"
version = "1.0.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "yansi"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"

View file

@ -1,18 +0,0 @@
[package]
name = "fine"
version = "0.1.0"
edition = "2021"
[dev-dependencies]
pretty_assertions = "1.4.0"
[build-dependencies]
glob = "0.3.1"
prettyplease = "0.2.16"
proc-macro2 = "1.0.76"
quote = "1.0.35"
syn = "2.0.47"
[dependencies]
thiserror = "1.0.56"
unicode-width = "=0.1.11"

View file

@ -1,5 +0,0 @@
- The Export enum is stupid I think, for runtime modules. Why do we even have them? We should just put all the names in, like Declaration {} but for runtime.
- When adding PANIC instructions, push a diagnostic that I find if I can find one instead of a hard-coded string.
- runtime should have `new` with 0 args and `with_loader<T : ModuleLoader>` that does the boxing, and `new` should just make the standard one

View file

@ -1,194 +0,0 @@
use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream};
use quote::{format_ident, quote, TokenStreamExt};
use std::env;
use std::fs;
use std::path::{Path, PathBuf};
struct ExpectedErrors(Vec<String>);
impl quote::ToTokens for ExpectedErrors {
fn to_tokens(&self, tokens: &mut TokenStream) {
let mut inner = TokenStream::new();
for err in self.0.iter() {
inner.append(Literal::string(err));
inner.append(Punct::new(',', Spacing::Alone));
}
tokens.append(Ident::new("vec", Span::call_site()));
tokens.append(Punct::new('!', Spacing::Joint));
tokens.append(Group::new(Delimiter::Parenthesis, inner))
}
}
fn generate_test_for_file(path: PathBuf) -> String {
let contents = fs::read_to_string(&path)
.expect("Unable to read input")
.replace("\r\n", "\n");
let display_path = path.display().to_string();
// Start iterating over lines and processing directives....
let mut disabled = quote! {};
let mut assertions = Vec::new();
let mut lines = contents.lines();
while let Some(line) = lines.next() {
let line = match line.strip_prefix("//") {
Some(line) => line,
None => continue,
};
let line = line.trim();
if let Some(line) = line.strip_prefix("@ignore") {
let reason = line.trim();
assert_ne!(
reason, "",
"You need to provide at least some description for ignoring in {display_path}"
);
disabled = quote! { #[ignore = #reason] };
} else if line == "@concrete:" {
let mut concrete = String::new();
while let Some(line) = lines.next() {
let line = match line.strip_prefix("// | ") {
Some(line) => line,
None => break,
};
concrete.push_str(line);
concrete.push_str("\n");
}
assertions.push(quote! {
crate::assert_concrete(source.clone(), #concrete, #display_path);
});
} else if line == "@compiles-to:" {
let mut compiled = String::new();
while let Some(line) = lines.next() {
let line = match line.strip_prefix("// | ") {
Some(line) => line,
None => break,
};
compiled.push_str(line);
compiled.push_str("\n");
}
assertions.push(quote! {
crate::assert_compiles_to(_module.clone(), #compiled, #display_path);
});
} else if let Some(line) = line.strip_prefix("@type:") {
let (pos, expected) = line
.trim()
.split_once(' ')
.expect("Mal-formed type expectation");
let pos: usize = pos
.trim()
.parse()
.expect(&format!("Unable to parse position '{pos}'"));
let expected = expected.trim();
assertions.push(quote! {
crate::assert_type_at(_module.clone(), #pos, #expected, #display_path);
});
} else if let Some(line) = line.strip_prefix("@type-error:") {
let (pos, expected) = line
.trim()
.split_once(' ')
.expect("Mal-formed type-error expectation");
let pos: usize = pos
.trim()
.parse()
.expect(&format!("Unable to parse position '{pos}'"));
let expected = expected.trim();
assertions.push(quote! {
crate::assert_type_error_at(_module.clone(), &_errors, #pos, #expected, #display_path);
});
} else if line == "@no-errors" {
assertions.push(quote! {
crate::assert_no_errors(_module.clone(), &_errors);
});
} else if let Some(line) = line.strip_prefix("@eval:") {
let expected = line.trim();
assertions.push(quote! {
crate::assert_eval_ok(&program, _module.clone(), #expected);
});
} else if let Some(line) = line.strip_prefix("@check-error:") {
let expected = line.trim();
assertions.push(quote! {
crate::assert_check_error(_module.clone(), &_errors, #expected);
});
} else if line == "@expect-errors:" {
let mut errors = Vec::new();
while let Some(line) = lines.next() {
let line = match line.strip_prefix("// | ") {
Some(line) => line,
None => break,
};
errors.push(line.to_string());
}
let errors = ExpectedErrors(errors);
assertions.push(quote! {
crate::assert_errors(_module.clone(), &_errors, #errors);
});
} else if line.starts_with("@") {
panic!("Test file {display_path} has unknown directive: {line}");
}
}
let name = format_ident!("{}", path.file_stem().unwrap().to_string_lossy());
let test_method = quote! {
#disabled
fn #name() {
let source : std::rc::Rc<str> = #contents.into();
let mut program = crate::test_runtime(#display_path, source.clone());
let (_errors, _module) = program.load_module("__test__").unwrap();
#(#assertions)*
}
};
let syntax_tree = syn::parse2(test_method).unwrap();
prettyplease::unparse(&syntax_tree)
}
fn process_directory<T>(output: &mut String, path: T)
where
T: AsRef<Path>,
{
let fine_ext: std::ffi::OsString = "fine".into();
let path = path.as_ref();
for entry in std::fs::read_dir(path).expect("Unable to read directory") {
match entry {
Ok(dirent) => {
let file_type = dirent.file_type().unwrap();
if file_type.is_dir() {
let file_name = dirent.file_name();
let file_name = file_name.to_string_lossy().to_owned();
output.push_str(&format!("mod {file_name} {{\n"));
process_directory(output, dirent.path());
output.push_str("}\n\n");
} else if file_type.is_file() {
if dirent.path().extension() == Some(&fine_ext) {
output.push_str(&format!("// {}\n", dirent.path().display()));
output.push_str("#[test]\n");
output.push_str(&generate_test_for_file(dirent.path()));
output.push_str("\n\n");
}
} else {
eprintln!("Skipping symlink: {}", path.display());
}
}
Err(e) => eprintln!("Unable to read directory entry: {:?}", e),
}
}
}
fn main() {
println!("cargo:rerun-if-changed=./tests");
let mut test_source = String::new();
process_directory(&mut test_source, "./tests");
let out_dir = env::var_os("OUT_DIR").unwrap();
let dest_path = Path::new(&out_dir).join("generated_tests.rs");
fs::write(dest_path, test_source).unwrap();
}

View file

@ -1,138 +0,0 @@
# Design Notes for the Fine Language
This language is being designed as I go, because the main thing I'm
interested in is building something that's fun and productive for me
personally. That means, rather than being super careful, I'm just
building the thing that pleases me at any given moment.
Here are some notes. The notes are for me in the future, in case I'm
wondering why the language is one way instead of another way.
## The `new` keyword
I really like rust's "just use a type name with curly braces to
construct new values". It's really clean! Unfortunately it leads to an
ambiguity in the syntax that I don't like:
``` rust
if something { ...
```
In the code above, after I have parsed `something` and I see `{`, am I:
- Parsing an object construction expression for the type `something`?
- Parsing `something` as a boolean value reference and `{` as the
start of the block?
Naively you would expect the latter, but if I scan ahead a little more:
``` rust
if something { foo: true }.foo { }
```
Rust does not allow `struct` literals in the condition of the `if`,
which is correct, but that's more work than I want to do here. There's
just a lot of context flowing around about whether or not I can parse
a structure literal in any particular situation.
The `new` keyword is a compromise: we know that the context
immediately following the `new` keyword is always a type expression,
so we know that e.g. `<` or whatever means "generic type parameter"
and not "less than".
## Patterns and Alternate Types
Instead of `enums` or inheritance, we're using an alternate type like
in Typescript, with `or` between types. See the `alternates.fine` test
for a work-up of the types and syntax and whatnot. I think it works
pretty well.
Actually using alternate types involves pattern matching, either one
at a time, with the `is` operator, or in bulk, with the `match`
expression. `match` can check for completeness, but `if/is` cannot.
Patterns are VERY simple, and are explicitly *not* destructuring right
now. (Destructuring brings up a whole lot of complexity that I don't
want to deal with.)
Patterns are basically:
```
(identifier ":")? type_expression ("and" <expression>)?
```
The identifier at the front represents a binding of the value being
considered as if it were of the same type as the type expression; the
identifier is in scope for the optional predicate after the "and" and
so you can use it as if it were the type because, well, that part of
the pattern already matched.
As a special case, the identifier is *also* in scope for the body of
an `if` expression when an `is` expression is used as the condition.
```
if b is c:Foo {
result = result + c.a; // c should still be in scope!
}
```
`match` is the multi-value pattern matching expression, like this:
```
match b {
c:Foo -> c.a,
_ -> 0,
}
```
The special pattern `_` always evaluates to true.
Note that unlike rust we do not allow variable binding, e.g., in rust you can write:
```
match b {
d -> ...,
}
```
but in fine you need to write:
```
match b {
d:_ -> ...,
}
```
The reason is that the rust version is ambiguous: if `d` matches some
value already in scope (e.g., an `enum` arm) then the arm is matching
if b == d, but if `d` is unbound then `d` becomes a variable
declaration. This is a spooky action-at-a-distance and I don't approve
of it.
# Complete Garbage
## Lambdas/Closures/Anonymous Functions
Looking for a syntax here; I want to keep `fun` as a declaration like
`let` and not let it enter the expression space. I don't like
fat-arrow syntax because it makes expression parsing very ambiguous,
potentially requiring a lot of lookahead. (TODO: Is that true?)
Maybe a leading character like ` \x => x+1 ` or ` \(x,y) => x+y `?
## Interfaces/Traits/Whatever
These are incomplete structural types. Methods are easier to make
compatible than members, but members should also work so long as they
are strict prefixes of the thing.
What about sound casting with narrowing? That's union types baby, do
we really want those? It could be neat if we're doing otherwise
structural-compatibility.
## On Objects and Classes
Sometimes I think it should all be structural types.
Maybe later there can be anonymous types that match shapes.

View file

@ -1,388 +0,0 @@
from parser import Assoc, Grammar, Nothing, Token, rule, seq
ARROW = Token("Arrow")
AS = Token("As")
BAR = Token("Bar")
CLASS = Token("Class")
COLON = Token("Colon")
ELSE = Token("Else")
FOR = Token("For")
FUN = Token("Fun")
IDENTIFIER = Token("Identifier")
IF = Token("If")
IMPORT = Token("Import")
IN = Token("In")
LCURLY = Token("LeftBrace")
LET = Token("Let")
RCURLY = Token("RightBrace")
RETURN = Token("Return")
SEMICOLON = Token("Semicolon")
STRING = Token("String")
WHILE = Token("While")
EQUAL = Token("Equal")
LPAREN = Token("LeftParen")
RPAREN = Token("RightParen")
COMMA = Token("Comma")
SELF = Token("Selff")
OR = Token("Or")
IS = Token("Is")
AND = Token("And")
EQUALEQUAL = Token("EqualEqual")
BANGEQUAL = Token("BangEqual")
LESS = Token("Less")
GREATER = Token("Greater")
LESSEQUAL = Token("LessEqual")
GREATEREQUAL = Token("GreaterEqual")
PLUS = Token("Plus")
MINUS = Token("Minus")
STAR = Token("Star")
SLASH = Token("Slash")
NUMBER = Token("Number")
TRUE = Token("True")
FALSE = Token("False")
BANG = Token("Bang")
DOT = Token("Dot")
MATCH = Token("Match")
EXPORT = Token("Export")
UNDERSCORE = Token("Underscore")
NEW = Token("New")
LSQUARE = Token("LeftBracket")
RSQUARE = Token("RightBracket")
class FineGrammar(Grammar):
def __init__(self):
super().__init__(
precedence=[
(Assoc.RIGHT, [EQUAL]),
(Assoc.LEFT, [OR]),
(Assoc.LEFT, [IS]),
(Assoc.LEFT, [AND]),
(Assoc.LEFT, [EQUALEQUAL, BANGEQUAL]),
(Assoc.LEFT, [LESS, GREATER, GREATEREQUAL, LESSEQUAL]),
(Assoc.LEFT, [PLUS, MINUS]),
(Assoc.LEFT, [STAR, SLASH]),
(Assoc.LEFT, [self.primary_expression]),
(Assoc.LEFT, [LPAREN]),
(Assoc.LEFT, [DOT]),
#
# If there's a confusion about whether to make an IF
# statement or an expression, prefer the statement.
#
(Assoc.NONE, [self.if_statement]),
]
)
@rule
def file(self):
return self.file_statement_list
@rule
def file_statement_list(self):
return self.file_statement | (self.file_statement_list + self.file_statement)
@rule
def file_statement(self):
return (
self.import_statement | self.class_declaration | self.export_statement | self.statement
)
@rule
def import_statement(self):
return seq(IMPORT, STRING, AS, IDENTIFIER, SEMICOLON)
@rule
def class_declaration(self):
return seq(CLASS, IDENTIFIER, self.class_body)
@rule
def class_body(self):
return seq(LCURLY, RCURLY) | seq(LCURLY, self.class_members, RCURLY)
@rule
def class_members(self):
return self.class_member | seq(self.class_members, self.class_member)
@rule
def class_member(self):
return self.field_declaration | self.function_declaration
@rule
def field_declaration(self):
return seq(IDENTIFIER, COLON, self.type_expression, SEMICOLON)
# Types
@rule
def type_expression(self):
return self.alternate_type | self.type_identifier
@rule
def alternate_type(self):
return seq(self.type_expression, BAR, self.type_identifier)
@rule
def type_identifier(self):
return IDENTIFIER
@rule
def export_statement(self):
return (
seq(EXPORT, self.class_declaration)
| seq(EXPORT, self.function_declaration)
| seq(EXPORT, self.let_statement)
| seq(EXPORT, self.export_list, SEMICOLON)
)
@rule
def export_list(self):
return Nothing | IDENTIFIER | seq(IDENTIFIER, COMMA, self.export_list)
# Functions
@rule
def function_declaration(self):
return seq(FUN, IDENTIFIER, self.function_parameters, self.block) | seq(
FUN, IDENTIFIER, self.function_parameters, ARROW, self.type_expression, self.block
)
@rule
def function_parameters(self):
return (
seq(LPAREN, RPAREN)
| seq(LPAREN, self.first_parameter, RPAREN)
| seq(LPAREN, self.first_parameter, COMMA, self.parameter_list, RPAREN)
)
@rule
def first_parameter(self):
return SELF | self.parameter
@rule
def parameter_list(self):
return Nothing | self.parameter | seq(self.parameter, COMMA, self.parameter_list)
@rule
def parameter(self):
return seq(IDENTIFIER, COLON, self.type_expression)
# Block
@rule
def block(self):
return (
seq(LCURLY, RCURLY)
| seq(LCURLY, self.statement_list, RCURLY)
| seq(LCURLY, self.statement_list, self.expression, RCURLY)
)
@rule
def statement_list(self):
return self.statement | seq(self.statement_list, self.statement)
@rule
def statement(self):
return (
self.function_declaration
| self.let_statement
| self.return_statement
| self.for_statement
| self.if_statement
| self.while_statement
| self.expression_statement
)
@rule
def let_statement(self):
return seq(LET, IDENTIFIER, EQUAL, self.expression, SEMICOLON)
@rule
def return_statement(self):
return seq(RETURN, self.expression, SEMICOLON)
@rule
def for_statement(self):
return seq(FOR, self.iterator_variable, IN, self.expression, self.block)
@rule
def iterator_variable(self):
return IDENTIFIER
@rule
def if_statement(self):
return self.conditional_expression
@rule
def while_statement(self):
return seq(WHILE, self.expression, self.block)
@rule
def expression_statement(self):
return seq(self.expression, SEMICOLON)
# Expressions
@rule
def expression(self):
return self.assignment_expression
@rule
def assignment_expression(self):
return seq(self.or_expression, EQUAL, self.assignment_expression) | self.or_expression
@rule
def or_expression(self):
return seq(self.or_expression, OR, self.is_expression) | self.is_expression
@rule
def is_expression(self):
return seq(self.is_expression, IS, self.pattern) | self.and_expression
@rule
def and_expression(self):
return seq(self.and_expression, AND, self.equality_expression) | self.equality_expression
@rule
def equality_expression(self):
return (
seq(self.equality_expression, EQUALEQUAL, self.relation_expression)
| seq(self.equality_expression, BANGEQUAL, self.relation_expression)
| self.relation_expression
)
@rule
def relation_expression(self):
return (
seq(self.relation_expression, LESS, self.additive_expression)
| seq(self.relation_expression, LESSEQUAL, self.additive_expression)
| seq(self.relation_expression, GREATER, self.additive_expression)
| seq(self.relation_expression, GREATEREQUAL, self.additive_expression)
)
@rule
def additive_expression(self):
return (
seq(self.additive_expression, PLUS, self.multiplication_expression)
| seq(self.additive_expression, MINUS, self.multiplication_expression)
| self.multiplication_expression
)
@rule
def multiplication_expression(self):
return (
seq(self.multiplication_expression, STAR, self.primary_expression)
| seq(self.multiplication_expression, SLASH, self.primary_expression)
| self.primary_expression
)
@rule
def primary_expression(self):
return (
IDENTIFIER
| SELF
| NUMBER
| STRING
| TRUE
| FALSE
| seq(BANG, self.primary_expression)
| seq(MINUS, self.primary_expression)
| self.block
| self.conditional_expression
| self.list_constructor_expression
| self.object_constructor_expression
| self.match_expression
| seq(self.primary_expression, LPAREN, self.expression_list, RPAREN)
| seq(self.primary_expression, DOT, IDENTIFIER)
| seq(LPAREN, self.expression, RPAREN)
)
@rule
def conditional_expression(self):
return (
seq(IF, self.expression, self.block)
| seq(IF, self.expression, self.block, ELSE, self.conditional_expression)
| seq(IF, self.expression, self.block, ELSE, self.block)
)
@rule
def list_constructor_expression(self):
return seq(LSQUARE, RSQUARE) | seq(LSQUARE, self.expression_list, RSQUARE)
@rule
def expression_list(self):
return (
self.expression
| seq(self.expression, COMMA)
| seq(self.expression, COMMA, self.expression_list)
)
@rule
def match_expression(self):
return seq(MATCH, self.match_body)
@rule
def match_body(self):
return seq(LCURLY, RCURLY) | seq(LCURLY, self.match_arms, RCURLY)
@rule
def match_arms(self):
return (
self.match_arm
| seq(self.match_arm, COMMA)
| seq(self.match_arm, COMMA, self.match_arms)
)
@rule
def match_arm(self):
return seq(self.pattern, ARROW, self.expression)
@rule
def pattern(self):
return (
seq(self.variable_binding, self.pattern_core, AND, self.and_expression)
| seq(self.variable_binding, self.pattern_core)
| seq(self.pattern_core, AND, self.and_expression)
| self.pattern_core
)
@rule
def pattern_core(self):
return self.type_expression | self.wildcard_pattern
@rule
def wildcard_pattern(self):
return UNDERSCORE
@rule
def variable_binding(self):
return seq(IDENTIFIER, COLON)
@rule
def object_constructor_expression(self):
return seq(NEW, self.type_identifier, self.field_list)
@rule
def field_list(self):
return seq(LCURLY, RCURLY) | seq(LCURLY, self.field_values, RCURLY)
@rule
def field_values(self):
return (
self.field_value
| seq(self.field_value, COMMA)
| seq(self.field_value, COMMA, self.field_values)
)
@rule
def field_value(self):
return IDENTIFIER | seq(IDENTIFIER, COLON, self.expression)
grammar = FineGrammar()
table = grammar.build_table(start="file")
print(f"{len(table)} states")
average_entries = sum(len(row) for row in table) / len(table)
max_entries = max(len(row) for row in table)
print(f"{average_entries} average, {max_entries} max")
# print(parser_faster.format_table(gen, table))
# print()
# tree = parse(table, ["id", "+", "(", "id", "[", "id", "]", ")"])

View file

@ -1,422 +0,0 @@
// NOTE: Utterly Broken Ideas about Parse Tables.
//
// Committing this here so I can back it up.
use std::collections::HashSet;
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub enum ReduceRule {
// Generated
AlternateType,
Argument,
ArgumentList,
BinaryExpression,
Block,
CallExpression,
ClassDecl,
ConditionalExpression,
ExpressionStatement,
FieldDecl,
FieldList,
FieldValue,
File,
ForStatement,
FunctionDecl,
GroupingExpression,
Identifier,
IfStatement,
IsExpression,
IteratorVariable,
LetStatement,
ListConstructor,
ListConstructorElement,
LiteralExpression,
MatchArm,
MatchBody,
MatchExpression,
MemberAccess,
NewObjectExpression,
ParamList,
Parameter,
Pattern,
ReturnStatement,
ReturnType,
SelfParameter,
SelfReference,
TypeExpression,
TypeIdentifier,
TypeParameter,
TypeParameterList,
UnaryExpression,
VariableBinding,
WhileStatement,
WildcardPattern,
Import,
Export,
ExportList,
}
#[derive(Eq, PartialEq, Hash, Copy, Clone)]
pub enum TokenAction {
Error,
Reduce(ReduceRule, TreeKind, u16),
ReduceAnonymous(ReduceRule, u16),
Accept,
Shift(u16),
}
pub struct ParseState {
action_start: usize,
action_end: usize,
goto_start: usize,
goto_end: usize,
}
pub struct ParseTable<'a> {
state: &'a [ParseState],
start_state: usize,
token_action: &'a [TokenAction],
token_kind: &'a [TokenKind],
tree_goto: &'a [u16],
tree_rules: &'a [ReduceRule],
}
#[derive(Clone)]
enum StackEntry {
Nothing,
Tree(TreeRef),
AnonTree(Vec<Child>),
Token(TokenRef),
Error(TokenRef),
}
#[derive(Clone)]
struct ParseThread {
stack: Vec<(usize, StackEntry)>,
panic_count: u8,
error_count: u8,
score: u32,
}
impl ParseThread {
fn initial(start_state: usize) -> ParseThread {
ParseThread {
stack: vec![(start_state, StackEntry::Nothing)],
error_count: 0,
panic_count: 0,
score: 0,
}
}
fn reduce(
&mut self,
table: &ParseTable,
syntax: &mut SyntaxTree,
count: u16,
rule: ReduceRule,
kind: Option<TreeKind>,
) {
let mut children = Vec::new();
let count: usize = count.into();
let mut consumed = 0;
while consumed < count {
let Some((_, value)) = self.stack.pop() else {
break;
};
match value {
StackEntry::Nothing => panic!("Popped nothing!"),
StackEntry::Tree(t) => {
consumed += 1;
children.push(Child::Tree(t));
}
StackEntry::AnonTree(mut cs) => {
consumed += 1;
children.append(&mut cs);
}
StackEntry::Token(t) => {
consumed += 1;
children.push(Child::Token(t));
}
StackEntry::Error(t) => {
// Do *not* increment consumed; these don't count!
children.push(Child::Token(t));
}
}
}
assert_eq!(consumed, count, "Stack underflow on reduce");
let value = if let Some(kind) = kind {
let tr = syntax.add_tree(Tree {
kind,
self_ref: TreeRef::from_index(0),
parent: None,
start_pos: 0,
end_pos: 0,
children,
});
StackEntry::Tree(tr)
} else {
StackEntry::AnonTree(children)
};
let (goto_index, _) = self.stack.last().unwrap();
let goto_state = &table.state[*goto_index];
let index: usize = (goto_state.goto_start..goto_state.goto_end)
.find(|i| table.tree_rules[*i] == rule)
.expect("Unable to goto target after reduction")
.into();
let target_state: usize = table.tree_goto[index].into();
self.stack.push((target_state, value));
}
fn shift(&mut self, state: u16, tr: TokenRef) {
let target_state: usize = state.into();
self.stack.push((target_state, StackEntry::Token(tr)));
}
}
// This is what we set the panic level to when we get an error; we require
// this many successful token shifts to decide we're not lost.
const PANIC_THRESHOLD: u8 = 3;
// This is the maximum number of failed states that we're going to go through
// before we just try to reduce all the way out of the tree.
const THREAD_ERROR_LIMIT: u8 = 20;
pub fn table_parse(source: &str, table: &ParseTable) -> (Rc<SyntaxTree>, Rc<Lines>) {
let mut tokens = Tokens::new(source);
let mut syntax = SyntaxTree::new();
let mut threads = vec![ParseThread::initial(table.start_state)];
let mut next_threads = vec![];
let mut accepted_threads: Vec<ParseThread> = vec![];
let mut maybe_pushed_garbage = false;
// While we still have threads to run....
while threads.len() > 0 {
// We've still got live threads running, which means we've still got
// tokens to consume! Any thread that has accepted "early" should be
// penalized here.
for thread in accepted_threads.iter_mut() {
if thread.score > 0 {
thread.score -= 1;
}
}
// Grab us the next token from the stream.
// TODO: Collect ephemera before setting on the token.
let token = tokens.next();
let current_token = token.kind;
let current_token_ref = syntax.add_token(token, vec![]);
// Go over every thread in the list of threads to run. If a thread
// needs to keep running on this token it can push itself back onto
// the stack, and we'll re-consider it next time. (This is necessary
// for both reduce and for error handling.)
while let Some(mut thread) = threads.pop() {
let (state, _) = thread.stack.last().unwrap();
let state = &table.state[*state];
let action = (state.action_start..state.action_end)
.find(|i| table.token_kind[*i] == current_token)
.map(|i| &table.token_action[i])
.unwrap_or(&TokenAction::Error);
match action {
TokenAction::Reduce(rule, kind, count) => {
thread.reduce(table, &mut syntax, *count, *rule, Some(*kind));
thread.score += 1;
threads.push(thread); // Run me again, I can still work with this token.
}
TokenAction::ReduceAnonymous(rule, count) => {
thread.reduce(table, &mut syntax, *count, *rule, None);
thread.score += 1;
threads.push(thread); // Run me again, I can still work with this token.
}
TokenAction::Shift(state) => {
thread.shift(*state, current_token_ref);
thread.score += 1;
if thread.panic_count > 0 {
thread.panic_count -= 1;
} else if thread.error_count > 0 {
// TODO: We shifted a good number of tokens in a row,
// maybe we should consider reducing the error count
// here too, so that this thread might live for
// longer.
}
next_threads.push(thread);
}
TokenAction::Accept => {
thread.score += 1;
accepted_threads.push(thread);
}
// Error handling, the bane of LR parsers!
//
// In this parser, we borrow a trick from Tree-Sitter and
// treat the parse error as if it were an ambiguity: we see a
// token but don't know what to do with it, so we'll just try
// to do *everything* with it and see what sticks.
//
// The tricky part here is not causing an enormous explosion
// of threads, so we have certain conditions where we just
// give up and refuse to consider any more tokens for a given
// error thread.
//
TokenAction::Error => {
// First, report the error. (We use a pretty standard
// "panic" error recovery mode here to decide when to
// start showing new error messages, otherwise we would
// just generate *way* too many cascading errors.)
//
if thread.panic_count == 0 {
// TODO: Get a description for this state from the table somehow.
// TODO: Describe the error in an error message somehow.
let token = &syntax[current_token_ref];
let error_token = syntax.add_token(
Token::error(token.start(), token.end(), format!("PARSE ERROR")),
vec![],
);
// NOTE: `Error` stack entries are not counted when
// reducing, so we know this push here won't mess up
// the state machine.
thread.stack.push((0, StackEntry::Error(error_token)));
}
// Now mark the thread as panicing so that we don't
// produce too many random errors...
thread.panic_count = PANIC_THRESHOLD;
// Count the error.
// TODO: Check to see if this really does help thread explosion or not.
if thread.error_count < THREAD_ERROR_LIMIT {
thread.error_count += 1;
}
// Penalize this thread; this is not a great parse, we can tell.
if thread.score > 0 {
thread.score -= 1;
}
let mut executed = HashSet::new();
for index in state.action_start..state.action_end {
// Make absolutely sure we don't do the same thing
// twice! It can happen, and it is hugely wasteful
// because it spawns duplicate threads.
let action = &table.token_action[index];
if executed.contains(action) {
continue;
}
executed.insert(action.clone());
match action {
TokenAction::Error => {
panic!("Literal error in the table; table is corrupt")
}
TokenAction::Reduce(rule, kind, count) => {
// Let's pretend that we're done with the
// current rule and see what happens.
let mut new_thread = thread.clone();
new_thread.reduce(&table, &mut syntax, *count, *rule, Some(*kind));
threads.push(new_thread);
// Mark that we might have to trim the syntax
// tree because we might not use this
// reduction.
maybe_pushed_garbage = true;
}
TokenAction::ReduceAnonymous(rule, count) => {
// Let's pretend that we're done with the
// current rule and see what happens.
let mut new_thread = thread.clone();
new_thread.reduce(&table, &mut syntax, *count, *rule, None);
threads.push(new_thread);
}
TokenAction::Shift(state) => {
// Let's just pretend the current token
// matched this thing that we were looking
// for, and shift it anyway, and see what
// happens.
//
// This represents an expansion of the search
// space and so we only want to do it if we
// haven't reached our error limit yet.
if thread.error_count < THREAD_ERROR_LIMIT {
let mut new_thread = thread.clone();
new_thread.shift(*state, current_token_ref);
next_threads.push(new_thread);
}
}
TokenAction::Accept => accepted_threads.push(thread.clone()),
}
}
// Let's try to process the *next* token and see what
// happens with this same thread, unless we're giving up
// on the thread.
if thread.error_count < THREAD_ERROR_LIMIT {
next_threads.push(thread);
}
}
}
}
// Drain all the next_threads into the current stack and start again
// on the next token!
threads.append(&mut next_threads);
}
// OK no more threads, we're done. In theory at this point we should
// penalize all accepted threads for remaining tokens but if we've got no
// more threads and there are remaining tokens then they all hit their
// error limit and are basically equivalent. (Why penalize all threads by
// the same amount?)
//
// Let's just go through all the threads that "accepted" and pick the one
// with the highest score that also wound up with a named tree at the top.
let mut best_score = 0;
for thread in accepted_threads {
if thread.score >= best_score {
if let Some((_, StackEntry::Tree(tr))) = thread.stack.last() {
syntax.root = Some(*tr);
best_score = thread.score + 1;
}
}
}
// Now, our syntax tree might have errors in it, and if it does we might
// have pushed trees that we have no interest in ever seeing ever again.
// That means that we need to rewrite the tree starting from the root, to
// make sure that the trees in the syntax tree are for real for real.
if maybe_pushed_garbage {
let mut valid = HashSet::new();
let mut stack = Vec::new();
if let Some(tr) = &syntax.root {
stack.push(*tr);
}
while let Some(tr) = stack.pop() {
valid.insert(tr);
for x in syntax[tr].child_trees() {
stack.push(x);
}
}
for tr in syntax.trees.iter_mut() {
if !valid.contains(&tr.self_ref) {
tr.kind = TreeKind::Ignore;
}
}
}
(Rc::new(syntax), Rc::new(tokens.lines()))
}

File diff suppressed because it is too large Load diff

View file

@ -1,2 +0,0 @@
[tool.black]
line-length=100

View file

@ -1,40 +0,0 @@
import io
import parser
def generate_rust_parser(output: io.TextIOBase, table: list[dict[str, parser.Action]]):
lines = []
tree_kinds = list(
sorted(
{
action[1]
for state in table
for action in state.values()
if action[0] == "reduce" and action[1][0] != "_"
}
)
)
# First, generate the treekind enumeration
lines.extend(
[
"#[derive(Debug, Eq, PartialEq)]",
"pub enum TreeKind {",
" Error,",
"",
]
)
lines.extend(f" {kind}," for kind in tree_kinds)
lines.extend(
[
"}",
"",
]
)
# Next generate the parse table
lines.extend([])
pass

File diff suppressed because it is too large Load diff

View file

@ -1,5 +0,0 @@
mod notation;
mod print;
pub use notation::{flat, indent, nl, txt, Notation};
pub use print::pretty_print;

View file

@ -1,65 +0,0 @@
use std::ops::{BitAnd, BitOr};
use std::rc::Rc;
#[derive(Debug, Clone)]
pub struct Notation(pub(crate) Rc<NotationInner>);
#[derive(Debug, Clone)]
pub enum NotationInner {
Newline,
Text(String, u32),
Flat(Notation),
Indent(u32, Notation),
Concat(Notation, Notation),
Choice(Notation, Notation),
}
/// Display a newline
pub fn nl() -> Notation {
Notation(Rc::new(NotationInner::Newline))
}
/// Display text exactly as-is. The text should not contain a newline!
pub fn txt(s: impl ToString) -> Notation {
let string = s.to_string();
let width = unicode_width::UnicodeWidthStr::width(&string as &str) as u32;
Notation(Rc::new(NotationInner::Text(string, width)))
}
/// Use the leftmost option of every choice in the contained Notation.
/// If the contained Notation follows the recommendation of not
/// putting newlines in the left-most options of choices, then this
/// `flat` will be displayed all on one line.
pub fn flat(notation: Notation) -> Notation {
Notation(Rc::new(NotationInner::Flat(notation)))
}
/// Increase the indentation level of the contained notation by the
/// given width. The indentation level determines the number of spaces
/// put after `Newline`s. (It therefore doesn't affect the first line
/// of a notation.)
pub fn indent(indent: u32, notation: Notation) -> Notation {
Notation(Rc::new(NotationInner::Indent(indent, notation)))
}
impl BitAnd<Notation> for Notation {
type Output = Notation;
/// Display both notations. The first character of the right
/// notation immediately follows the last character of the
/// left notation.
fn bitand(self, other: Notation) -> Notation {
Notation(Rc::new(NotationInner::Concat(self, other)))
}
}
impl BitOr<Notation> for Notation {
type Output = Notation;
/// If inside a `flat`, _or_ the first line of the left notation
/// fits within the required width, then display the left
/// notation. Otherwise, display the right notation.
fn bitor(self, other: Notation) -> Notation {
Notation(Rc::new(NotationInner::Choice(self, other)))
}
}

View file

@ -1,152 +0,0 @@
use super::notation::{Notation, NotationInner};
pub fn pretty_print(notation: &Notation, printing_width: u32) -> String {
let mut printer = PrettyPrinter::new(notation, printing_width);
printer.print()
}
#[derive(Debug, Clone, Copy)]
struct Chunk<'a> {
notation: &'a Notation,
indent: u32,
flat: bool,
}
impl<'a> Chunk<'a> {
fn with_notation(self, notation: &'a Notation) -> Chunk<'a> {
Chunk {
notation,
indent: self.indent,
flat: self.flat,
}
}
fn indented(self, indent: u32) -> Chunk<'a> {
Chunk {
notation: self.notation,
indent: self.indent + indent,
flat: self.flat,
}
}
fn flat(self) -> Chunk<'a> {
Chunk {
notation: self.notation,
indent: self.indent,
flat: true,
}
}
}
struct PrettyPrinter<'a> {
/// Maximum line width that we'll try to stay within
width: u32,
/// Current column position
col: u32,
/// A stack of chunks to print. The _top_ of the stack is the
/// _end_ of the vector, which represents the _earliest_ part
/// of the document to print.
chunks: Vec<Chunk<'a>>,
}
impl<'a> PrettyPrinter<'a> {
fn new(notation: &'a Notation, width: u32) -> PrettyPrinter<'a> {
let chunk = Chunk {
notation,
indent: 0,
flat: false,
};
PrettyPrinter {
width,
col: 0,
chunks: vec![chunk],
}
}
fn print(&mut self) -> String {
use NotationInner::*;
let mut output = String::new();
while let Some(chunk) = self.chunks.pop() {
match chunk.notation.0.as_ref() {
Text(text, width) => {
output.push_str(text);
self.col += width;
}
Newline => {
output.push('\n');
for _ in 0..chunk.indent {
output.push(' ');
}
self.col = chunk.indent;
}
Flat(x) => self.chunks.push(chunk.with_notation(x).flat()),
Indent(i, x) => self.chunks.push(chunk.with_notation(x).indented(*i)),
Concat(x, y) => {
self.chunks.push(chunk.with_notation(y));
self.chunks.push(chunk.with_notation(x));
}
Choice(x, y) => {
if chunk.flat || self.fits(chunk.with_notation(x)) {
self.chunks.push(chunk.with_notation(x));
} else {
self.chunks.push(chunk.with_notation(y));
}
}
}
}
output
}
fn fits(&self, chunk: Chunk<'a>) -> bool {
use NotationInner::*;
let mut remaining = if self.col <= self.width {
self.width - self.col
} else {
return false;
};
let mut stack = vec![chunk];
let mut chunks = &self.chunks as &[Chunk];
loop {
let chunk = match stack.pop() {
Some(chunk) => chunk,
None => match chunks.split_last() {
None => return true,
Some((chunk, more_chunks)) => {
chunks = more_chunks;
*chunk
}
},
};
match chunk.notation.0.as_ref() {
Newline => return true,
Text(_text, text_width) => {
if *text_width <= remaining {
remaining -= *text_width;
} else {
return false;
}
}
Flat(x) => stack.push(chunk.with_notation(x).flat()),
Indent(i, x) => stack.push(chunk.with_notation(x).indented(*i)),
Concat(x, y) => {
stack.push(chunk.with_notation(y));
stack.push(chunk.with_notation(x));
}
Choice(x, y) => {
if chunk.flat {
stack.push(chunk.with_notation(x));
} else {
// Relies on the rule that for every choice
// `x | y`, the first line of `y` is no longer
// than the first line of `x`.
stack.push(chunk.with_notation(y));
}
}
}
}
}
}

View file

@ -1,73 +0,0 @@
use std::{path::PathBuf, rc::Rc};
use compiler::compile_module;
use program::{Module, Program, StandardModuleLoader};
use vm::{eval, Context};
pub mod compiler;
pub mod format;
pub mod parser;
pub mod program;
pub mod semantics;
pub mod tokens;
pub mod vm;
fn load_module(
program: &Program,
module: &Rc<Module>,
context: &mut Context,
) -> Result<(), vm::VMError> {
if !context.loaded(module.id()) {
let semantics = module.semantics();
let module = compile_module(&semantics);
context.set_module(module.clone());
for dep in module.deps.iter() {
if let Some(dep_mod) = program.get_module(dep) {
load_module(program, dep_mod, context)?;
}
}
eval(context, module.id, module.init, &[])?;
}
Ok(())
}
pub fn compile_program(program: &Program, context: &mut Context) -> Result<(), vm::VMError> {
for module in program.modules() {
load_module(program, module, context)?;
}
Ok(())
}
pub fn process_file(file: &str) {
let mut program = Program::new(Box::new(StandardModuleLoader::new(PathBuf::from("."))));
// Load the program text
let (errors, _) = match program.load_module(file) {
Ok(r) => r,
Err(_) => {
eprintln!("Error loading module");
return;
}
};
// OK now there might be semantic errors or whatnot.
if errors.len() > 0 {
for e in errors {
eprintln!("{file}: {}:{}: {}", e.start.0, e.start.1, e.message);
}
return;
}
// This is weird, why run the init function as main? Maybe just run main?
let mut context = Context::new();
match compile_program(&program, &mut context) {
Ok(_) => {}
Err(e) => {
eprintln!("{:?}", e);
}
}
}

View file

@ -1,8 +0,0 @@
use std::env;
pub fn main() {
let args: Vec<String> = env::args().collect();
for arg in &args[1..] {
fine::process_file(arg);
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,241 +0,0 @@
use std::{collections::HashMap, fs, path::PathBuf, rc::Rc};
use crate::parser::parse;
use crate::semantics::{check, Error, ModuleId, ModuleTable, Semantics};
/// The "source" of a module. The idea is that eventually different module
/// loaders could, like, provide us "external" modules or something.
///
/// For now we're only dealing with source code-based modules, though.
pub enum ModuleSource {
/// This module works based on source text which needs to be parsed and
/// analyzed and whatnot.
SourceText(String),
}
#[derive(Debug)]
pub enum ModuleLoadError {
/// Some IO error occurred while loading the module.
IO(String, std::io::Error),
}
/// `ModuleLoader` is the trait to implement if you can load modules. Loading
/// modules has two parts: first, to resolve an import into a full, canonical
/// module name, and second, to load a module based on its full, canonical
/// module name.
///
/// A full, canonical module name can be whatever you want it to be. By
/// default it's the canonical path of a file on disk, and
/// `normalize_module_name` resolves relative module names into canonical
/// paths based on the name of the module doing the importing.
pub trait ModuleLoader {
/// Convert a module name as seen in a fine program to a full, canonical
/// module name, whatever that means to you. The full, canonical name of
/// the module that contains the import is provided to you, so that you
/// can (for example) use it to resolve relative paths into absolute
/// ones. (The source name is `None` if this is some kind of root
/// module.)
fn normalize_module_name(&self, source: Option<&str>, name: String) -> String;
/// Load a module based on the full, canonical name of a module. (You
/// provided us with this name in a previous call to
/// normalize_module_name, so don't pretend you don't understand it.)
fn load_module(&self, name: &String) -> Result<ModuleSource, ModuleLoadError>;
}
/// The standard implementation of a module loader, which loads files from
/// the file system.
pub struct StandardModuleLoader {
base_path: PathBuf,
}
impl StandardModuleLoader {
/// Construct a new standard module loader that loads files relative to
/// the provided path.
pub fn new(base_path: PathBuf) -> Self {
StandardModuleLoader { base_path }
}
}
impl ModuleLoader for StandardModuleLoader {
fn normalize_module_name(&self, source: Option<&str>, name: String) -> String {
let source = source.unwrap_or("");
let p = self.base_path.join(source).join(name.clone());
let result = match std::fs::canonicalize(&p) {
Ok(p) => match p.into_os_string().into_string() {
Ok(s) => s,
Err(_e) => name.clone(),
},
Err(_e) => name.clone(),
};
result
}
fn load_module(&self, name: &String) -> Result<ModuleSource, ModuleLoadError> {
match fs::read_to_string(name) {
Ok(c) => Ok(ModuleSource::SourceText(c)),
Err(e) => Err(ModuleLoadError::IO(name.clone(), e)),
}
}
}
pub struct Module {
id: ModuleId,
semantics: Rc<Semantics>,
}
impl Module {
pub fn id(&self) -> ModuleId {
self.id
}
pub fn semantics(&self) -> Rc<Semantics> {
self.semantics.clone()
}
}
pub struct Program {
next_module_id: u64,
modules: HashMap<String, Rc<Module>>,
modules_by_id: HashMap<ModuleId, Rc<Module>>,
loader: Box<dyn ModuleLoader>,
}
impl Program {
pub fn new(loader: Box<dyn ModuleLoader>) -> Self {
Program {
next_module_id: 0,
modules: HashMap::new(),
modules_by_id: HashMap::new(),
loader,
}
}
pub fn modules(&self) -> impl Iterator<Item = &Rc<Module>> {
self.modules.values()
}
pub fn get_module(&self, id: &ModuleId) -> Option<&Rc<Module>> {
self.modules_by_id.get(id)
}
pub fn load_module(
&mut self,
name: &str,
) -> Result<(Vec<Rc<Error>>, Rc<Module>), ModuleLoadError> {
struct PendingModule {
mid: ModuleId,
imports: Vec<(String, String)>, // (raw, normalized)
semantics: Rc<Semantics>,
}
let mut init_pending = HashMap::new();
let mut names = Vec::new();
let name = self.loader.normalize_module_name(None, name.to_string());
names.push(name.clone());
let mut id_assign = self.next_module_id;
while let Some(name) = names.pop() {
if self.modules.contains_key(&name) || init_pending.contains_key(&name) {
// Either already loaded or pending load.
continue;
}
// TODO: Errors here are bad! Remember, run everything!
match self.loader.load_module(&name)? {
ModuleSource::SourceText(source) => {
let mid = ModuleId::from(id_assign);
id_assign += 1;
let source: Rc<str> = source.into();
let (tree, lines) = parse(&source);
let semantics = Rc::new(Semantics::new(
mid,
name.clone().into(),
source,
tree,
lines,
));
let mut imports = Vec::new();
for import in semantics.imports() {
let normalized = self
.loader
.normalize_module_name(Some(&name), import.clone());
names.push(normalized.clone());
imports.push((import, normalized));
}
init_pending.insert(
name,
PendingModule {
semantics,
mid,
imports,
},
);
}
}
}
for (_, pending) in init_pending.iter() {
let mut import_table = HashMap::new();
for (import, normalized) in pending.imports.iter() {
// NOTE: We look up the load(ed|ing) module here by
// normalized name, because that's how we track it...
let target = if let Some(module) = self.modules.get(&*normalized) {
module.id
} else {
let other = init_pending.get(&*normalized).unwrap();
other.mid
};
// ...but we set it into the import table here with the name
// that the source code used, because that's how the
// semantics needs to find it.
import_table.insert(import.clone(), target);
}
// Now tell the semantics object about its import table.
pending.semantics.set_imports(import_table);
}
let mut to_check = Vec::new();
for (name, pending) in init_pending.into_iter() {
to_check.push(pending.semantics.clone());
let module = Rc::new(Module {
id: pending.mid,
semantics: pending.semantics,
});
self.modules.insert(name, module.clone());
self.modules_by_id.insert(pending.mid, module);
}
self.next_module_id = id_assign;
// Rebuild the module map for everybody.
let mut module_table = ModuleTable::new();
for m in self.modules.values() {
// NOTE: self.modules keeps all the semantics alive; but to avoid
// cycles the module table itself contains weak pointers.
module_table.set_module(m.id, Rc::downgrade(&m.semantics));
}
let module_table = Rc::new(module_table);
for m in self.modules.values() {
m.semantics.set_module_table(module_table.clone());
}
// Check and report errors. (After the module map is set, so imports
// can be resolved correctly!)
let mut errors = Vec::new();
for semantics in to_check {
check(&semantics);
errors.append(&mut semantics.snapshot_errors());
}
let result = self.modules.get(&name).unwrap().clone();
Ok((errors, result))
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,690 +0,0 @@
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum TokenKind {
EOF,
Error,
Whitespace,
Comment,
LeftBrace, // TODO: LeftCurly
RightBrace, // TODO: RightCurly
LeftBracket, // TODO: LeftSquare
RightBracket, // TODO: RightSquare
LeftParen,
RightParen,
Comma,
Dot,
Minus,
Plus,
Semicolon,
Slash,
Star,
Arrow,
Colon,
Bang,
BangEqual,
Equal,
EqualEqual,
Greater,
GreaterEqual,
Less,
LessEqual,
Identifier,
String,
Number,
And,
As,
Async,
Await,
Class,
Else,
Export,
False,
For,
From,
Fun,
If,
Import,
In,
Is,
Let,
Match,
New,
Or,
Return,
Select,
Selff,
True,
Underscore,
While,
Yield,
}
// NOTE: Tokens are kinda big (like 40 bytes?) and AFAICT the only way to go
// smaller would be to find some other way to represent the error in an
// error token, but I'm kinda unwilling to do that.
//
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Token {
pub kind: TokenKind,
start: usize,
end: usize,
error: Option<Box<str>>,
}
impl Token {
pub fn new(kind: TokenKind, start: usize, end: usize) -> Self {
Token {
kind,
start,
end,
error: None,
}
}
pub fn error(start: usize, end: usize, message: String) -> Self {
Token {
kind: TokenKind::Error,
start,
end,
error: Some(message.into()),
}
}
pub fn start(&self) -> usize {
self.start
}
pub fn end(&self) -> usize {
self.end
}
pub fn len(&self) -> usize {
self.end() - self.start()
}
pub fn as_str<'a, 'b>(&'a self, source: &'b str) -> &'a str
where
'b: 'a,
{
if let Some(error) = &self.error {
&error
} else {
&source[self.start()..self.end()]
}
}
}
pub struct Lines {
newlines: Vec<usize>,
}
impl Lines {
fn new() -> Self {
Lines {
newlines: Vec::new(),
}
}
/// Record the position of a newline in the source.
pub fn add_line(&mut self, pos: usize) {
self.newlines.push(pos)
}
/// Return the position of the given token as a (line, column) pair. By
/// convention, lines are 1-based and columns are 0-based.
pub fn token_position(&self, token: &Token) -> (usize, usize) {
self.position(token.start)
}
/// Return the position of the given character offset as a (line,column)
/// pair. By convention, lines are 1-based and columns are 0-based.
pub fn position(&self, offset: usize) -> (usize, usize) {
let line_end_index = match self.newlines.binary_search(&offset) {
Ok(index) => index,
Err(index) => index,
};
let line_start_pos = if line_end_index == 0 {
0
} else {
self.newlines[line_end_index - 1] + 1
};
let line_number = line_end_index + 1;
let column_offset = offset - line_start_pos;
(line_number, column_offset)
}
}
pub struct Tokens<'a> {
source: &'a str,
chars: std::str::CharIndices<'a>,
next_char: Option<(usize, char)>,
lines: Lines,
}
impl<'a> Tokens<'a> {
pub fn new(source: &'a str) -> Self {
let mut result = Tokens {
source,
chars: source.char_indices(),
next_char: None,
lines: Lines::new(),
};
result.advance(); // Prime the pump
result
}
pub fn source(&self) -> &'a str {
self.source
}
pub fn lines(self) -> Lines {
self.lines
}
/// Return the position of the given token as a (line, column) pair. See
/// `Lines::token_position` for more information about the range, etc.
pub fn token_position(&self, token: &Token) -> (usize, usize) {
self.lines.token_position(token)
}
fn token(&self, start: usize, kind: TokenKind) -> Token {
Token::new(kind, start, self.pos())
}
fn number(&mut self, start: usize) -> Token {
// First, the main part.
loop {
if !self.matches_digit() {
break;
}
}
// Now the fraction part.
// The thing that is bad here is that this is speculative...
let backup = self.chars.clone();
if self.matches('.') {
let mut saw_digit = false;
loop {
if self.matches('_') {
} else if self.matches_next(|c| c.is_ascii_digit()) {
saw_digit = true;
} else {
break;
}
}
if saw_digit {
// OK we're good to here! Check the scientific notation.
if self.matches('e') || self.matches('E') {
if self.matches('+') || self.matches('-') {}
let mut saw_digit = false;
loop {
if self.matches('_') {
} else if self.matches_next(|c| c.is_ascii_digit()) {
saw_digit = true;
} else {
break;
}
}
if !saw_digit {
// This is just a broken number.
let slice = &self.source[start..self.pos()];
return Token::error(
start,
self.pos(),
format!("Invalid floating-point literal: {slice}"),
);
}
}
} else {
// Might be accessing a member on an integer.
self.chars = backup;
}
}
self.token(start, TokenKind::Number)
}
fn string(&mut self, start: usize, delimiter: char) -> Token {
while !self.matches(delimiter) {
if self.eof() {
return Token::error(
start,
self.pos(),
"Unterminated string constant".to_string(),
);
}
if self.matches('\\') {
self.advance();
} else {
self.advance();
}
}
self.token(start, TokenKind::String)
}
fn identifier_token_kind(ident: &str) -> TokenKind {
match ident.chars().nth(0).unwrap() {
'a' => {
if ident == "and" {
return TokenKind::And;
}
if ident == "as" {
return TokenKind::As;
}
if ident == "async" {
return TokenKind::Async;
}
if ident == "await" {
return TokenKind::Await;
}
}
'c' => {
if ident == "class" {
return TokenKind::Class;
}
}
'e' => {
if ident == "else" {
return TokenKind::Else;
}
if ident == "export" {
return TokenKind::Export;
}
}
'f' => {
if ident == "false" {
return TokenKind::False;
}
if ident == "for" {
return TokenKind::For;
}
if ident == "from" {
return TokenKind::From;
}
if ident == "fun" {
return TokenKind::Fun;
}
}
'i' => {
if ident == "if" {
return TokenKind::If;
}
if ident == "import" {
return TokenKind::Import;
}
if ident == "in" {
return TokenKind::In;
}
if ident == "is" {
return TokenKind::Is;
}
}
'l' => {
if ident == "let" {
return TokenKind::Let;
}
}
'm' => {
if ident == "match" {
return TokenKind::Match;
}
}
'n' => {
if ident == "new" {
return TokenKind::New;
}
}
'o' => {
if ident == "or" {
return TokenKind::Or;
}
}
'r' => {
if ident == "return" {
return TokenKind::Return;
}
}
's' => {
if ident == "select" {
return TokenKind::Select;
}
if ident == "self" {
return TokenKind::Selff;
}
}
't' => {
if ident == "true" {
return TokenKind::True;
}
}
'w' => {
if ident == "while" {
return TokenKind::While;
}
}
'y' => {
if ident == "yield" {
return TokenKind::Yield;
}
}
'_' => {
if ident == "_" {
return TokenKind::Underscore;
}
}
_ => (),
}
TokenKind::Identifier
}
fn identifier(&mut self, start: usize) -> Token {
loop {
// TODO: Use unicode identifier classes instead
if !self.matches_next(|c| c.is_ascii_alphanumeric() || c == '_') {
break;
}
}
let ident = &self.source[start..self.pos()];
let kind = Self::identifier_token_kind(ident);
Token::new(kind, start, self.pos())
}
fn matches(&mut self, ch: char) -> bool {
if let Some((_, next_ch)) = self.next_char {
if next_ch == ch {
self.advance();
return true;
}
}
false
}
fn matches_next<F>(&mut self, f: F) -> bool
where
F: FnOnce(char) -> bool,
{
if let Some((_, next_ch)) = self.next_char {
if f(next_ch) {
self.advance();
return true;
}
}
false
}
fn matches_digit(&mut self) -> bool {
self.matches('_') || self.matches_next(|c| c.is_ascii_digit())
}
fn advance(&mut self) -> Option<(usize, char)> {
let result = self.next_char;
self.next_char = self.chars.next();
result
}
fn pos(&self) -> usize {
match self.next_char {
Some((p, _)) => p,
None => self.source.len(),
}
}
fn eof(&self) -> bool {
self.next_char.is_none()
}
fn whitespace(&mut self, pos: usize) -> Token {
while let Some((pos, ch)) = self.next_char {
if ch == '\n' {
self.lines.add_line(pos);
} else if !ch.is_whitespace() {
break;
}
self.advance();
}
self.token(pos, TokenKind::Whitespace)
}
fn comment(&mut self, pos: usize) -> Token {
while let Some((_, ch)) = self.next_char {
if ch == '\n' {
break;
}
self.advance();
}
self.token(pos, TokenKind::Comment)
}
pub fn next(&mut self) -> Token {
let (pos, c) = match self.advance() {
Some((p, c)) => (p, c),
None => return self.token(self.source.len(), TokenKind::EOF),
};
match c {
' ' | '\t' | '\r' => self.whitespace(pos),
'\n' => {
self.lines.add_line(pos);
self.whitespace(pos)
}
'{' => self.token(pos, TokenKind::LeftBrace),
'}' => self.token(pos, TokenKind::RightBrace),
'[' => self.token(pos, TokenKind::LeftBracket),
']' => self.token(pos, TokenKind::RightBracket),
'(' => self.token(pos, TokenKind::LeftParen),
')' => self.token(pos, TokenKind::RightParen),
',' => self.token(pos, TokenKind::Comma),
'.' => self.token(pos, TokenKind::Dot),
'-' => {
if self.matches('>') {
self.token(pos, TokenKind::Arrow)
} else {
self.token(pos, TokenKind::Minus)
}
}
'+' => self.token(pos, TokenKind::Plus),
':' => self.token(pos, TokenKind::Colon),
';' => self.token(pos, TokenKind::Semicolon),
'/' => {
if self.matches('/') {
self.comment(pos)
} else {
self.token(pos, TokenKind::Slash)
}
}
'*' => self.token(pos, TokenKind::Star),
'!' => {
if self.matches('=') {
self.token(pos, TokenKind::BangEqual)
} else {
self.token(pos, TokenKind::Bang)
}
}
'=' => {
if self.matches('=') {
self.token(pos, TokenKind::EqualEqual)
} else {
self.token(pos, TokenKind::Equal)
}
}
'>' => {
if self.matches('=') {
self.token(pos, TokenKind::GreaterEqual)
} else {
self.token(pos, TokenKind::Greater)
}
}
'<' => {
if self.matches('=') {
self.token(pos, TokenKind::LessEqual)
} else {
self.token(pos, TokenKind::Less)
}
}
'\'' => self.string(pos, '\''),
'"' => self.string(pos, '"'),
_ => {
if c.is_ascii_digit() {
self.number(pos)
} else if c.is_ascii_alphabetic() || c == '_' {
self.identifier(pos)
} else {
Token::error(pos, self.pos(), format!("Unexpected character '{c}'"))
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
fn test_tokens_impl(input: &str, expected: Vec<Token>, expected_text: Vec<String>) {
let mut result = Vec::new();
let mut result_text = Vec::new();
let mut tokens = Tokens::new(input);
let mut is_eof = false;
while !is_eof {
let token = tokens.next();
is_eof = token.kind == TokenKind::EOF;
if token.kind == TokenKind::Whitespace {
continue;
}
result_text.push(token.as_str(input).to_string());
result.push(token);
}
assert_eq!(expected, result);
assert_eq!(expected_text, result_text);
}
macro_rules! test_tokens {
($name:ident, $input:expr, $($s:expr),+) => {
#[test]
fn $name() {
use TokenKind::*;
let mut expected: Vec<Token> = (vec![$($s),*])
.into_iter()
.map(|t| Token::new(t.1, t.0, t.0 + t.2.len()))
.collect();
expected.push(Token::new(TokenKind::EOF, $input.len(), $input.len()));
let mut expected_text: Vec<_> = (vec![$($s),*])
.into_iter()
.map(|t| t.2.to_string())
.collect();
expected_text.push("".to_string());
test_tokens_impl($input, expected, expected_text);
}
}
}
test_tokens!(
numbers,
"1 1.0 1.2e7 2.3e+7 3.3E-06 7_6 8.0e_8",
(0, Number, "1"),
(2, Number, "1.0"),
(6, Number, "1.2e7"),
(12, Number, "2.3e+7"),
(19, Number, "3.3E-06"),
(27, Number, "7_6"),
(31, Number, "8.0e_8")
);
test_tokens!(
identifiers,
"asdf x _123 a_23 x3a and or yield async await class else false for from",
(0, Identifier, "asdf"),
(5, Identifier, "x"),
(7, Identifier, "_123"),
(12, Identifier, "a_23"),
(17, Identifier, "x3a"),
(21, And, "and"),
(25, Or, "or"),
(28, Yield, "yield"),
(34, Async, "async"),
(40, Await, "await"),
(46, Class, "class"),
(52, Else, "else"),
(57, False, "false"),
(63, For, "for"),
(67, From, "from")
);
test_tokens!(
more_keywords,
"fun if import let return select self true while truewhile new",
(0, Fun, "fun"),
(4, If, "if"),
(7, Import, "import"),
(14, Let, "let"),
(18, Return, "return"),
(25, Select, "select"),
(32, Selff, "self"),
(37, True, "true"),
(42, While, "while"),
(48, Identifier, "truewhile"),
(58, New, "new")
);
test_tokens!(
more_more_keywords,
"in is match _ as export",
(0, In, "in"),
(3, Is, "is"),
(6, Match, "match"),
(12, Underscore, "_"),
(14, As, "as"),
(17, Export, "export")
);
test_tokens!(
strings,
r#"'this is a string that\'s great!\r\n' "foo's" 'bar"s' "#,
(0, String, r#"'this is a string that\'s great!\r\n'"#),
(38, String, r#""foo's""#),
(46, String, "'bar\"s'")
);
test_tokens!(
symbols,
"{ } ( ) [ ] . ! != < <= > >= = == , - -> + * / ; :",
(0, LeftBrace, "{"),
(2, RightBrace, "}"),
(4, LeftParen, "("),
(6, RightParen, ")"),
(8, LeftBracket, "["),
(10, RightBracket, "]"),
(12, Dot, "."),
(14, Bang, "!"),
(16, BangEqual, "!="),
(19, Less, "<"),
(21, LessEqual, "<="),
(24, Greater, ">"),
(26, GreaterEqual, ">="),
(29, Equal, "="),
(31, EqualEqual, "=="),
(34, Comma, ","),
(36, Minus, "-"),
(38, Arrow, "->"),
(41, Plus, "+"),
(43, Star, "*"),
(45, Slash, "/"),
(47, Semicolon, ";"),
(49, Colon, ":")
);
}

View file

@ -1,825 +0,0 @@
use core::fmt;
use std::cell::{Cell, RefCell};
use std::collections::HashMap;
use std::rc::Rc;
use crate::compiler::{
CompiledModule, Export, Function, Instruction, EXTERN_BUILTIN_LIST_GET_ITERATOR,
EXTERN_BUILTIN_LIST_ITERATOR_NEXT, EXTERN_BUILTIN_NOOP,
};
use crate::semantics::{ModuleId, Type};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum VMErrorCode {
#[error("code panic (syntax or semantic error): {0}")]
Panic(Rc<str>),
#[error("internal error: stack underflow")]
StackUnderflow,
#[error("internal error: stack type mismatch: {0:?} is not {1:?}")]
StackTypeMismatch(StackValue, Type),
// TODO: This one is *not* like the others! Distinguish between internal
// errors and user errors?
#[error("divide by zero")]
DivideByZero,
#[error("internal error: argument {0} out of range")]
ArgumentOutOfRange(usize),
#[error("internal error: global {0} out of range")]
GlobalOutOfRange(usize),
#[error("internal error: local {0} out of range")]
LocalOutOfRange(usize),
#[error("internal error: string {0} out of range")]
StringOutOfRange(usize),
#[error("internal error: function {0} out of range")]
FunctionOutOfRange(usize),
#[error("internal error: stack type mismatch ({0:?} is not function)")]
StackExpectedFunction(StackValue),
#[error("internal error: stack type mismatch ({0:?} is not object)")]
StackExpectedObject(StackValue),
#[error("internal error: stack type mismatch ({0:?} is not list)")]
StackExpectedList(StackValue),
#[error("internal error: stack type mismatch ({0:?} is not list iterator)")]
StackExpectedListIterator(StackValue),
#[error("internal error: slot {0} was out of range for object (type {1} with {2} slots)")]
SlotOutOfRange(usize, Rc<str>, usize),
#[error("internal error: the extern function with ID {0} was not registered")]
UnregisteredExternFunction(usize),
#[error("the requested export was not found: {0}")]
ExportNotFound(String),
#[error("the requested export is not a function: {0}")]
ExportNotFunction(String),
#[error("a module with id {0} has not been loaded")]
ModuleNotFound(ModuleId),
}
#[derive(Debug)]
pub struct VMError {
pub code: VMErrorCode,
pub stack: Box<[Frame]>,
}
type Result<T> = std::result::Result<T, VMErrorCode>;
#[derive(Clone, Debug)]
pub struct Object {
name: Rc<str>,
class_id: i64,
values: RefCell<Box<[StackValue]>>,
}
impl Object {
pub fn get_slot(&self, index: usize) -> Result<StackValue> {
match self.values.borrow().get(index) {
Some(v) => Ok(v.clone()),
None => Err(VMErrorCode::SlotOutOfRange(
index,
self.name.clone(),
self.values.borrow().len(),
)),
}
}
}
#[derive(Clone, Debug)]
pub struct ListIterator {
list: Rc<Vec<StackValue>>,
index: Cell<usize>,
}
#[derive(Clone)]
pub enum FuncValue {
Function(Rc<RuntimeModule>, Rc<Function>),
ExternFunction(usize),
}
impl fmt::Debug for FuncValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
FuncValue::Function(m, func) => write!(f, "fuction #{:?}:{:?}", m, func),
FuncValue::ExternFunction(i) => write!(f, "external {i}"),
}
}
}
#[derive(Clone, Debug)]
pub enum StackValue {
Nothing,
Bool(bool),
Float(f64),
Int(i64),
String(Rc<str>),
Function(Box<FuncValue>),
Object(Rc<Object>),
List(Rc<Vec<StackValue>>),
ListIterator(Rc<ListIterator>),
}
impl StackValue {
pub fn is_object(&self, id: i64) -> bool {
match self {
StackValue::Object(o) => o.class_id == id,
_ => false,
}
}
pub fn is_float(&self) -> bool {
match self {
StackValue::Float(_) => true,
_ => false,
}
}
pub fn is_nothing(&self) -> bool {
match self {
StackValue::Nothing => true,
_ => false,
}
}
pub fn is_bool(&self) -> bool {
match self {
StackValue::Bool(_) => true,
_ => false,
}
}
pub fn is_string(&self) -> bool {
match self {
StackValue::String(_) => true,
_ => false,
}
}
}
#[derive(Debug)]
pub struct Frame {
module: Rc<RuntimeModule>,
func: Rc<Function>,
args: Vec<StackValue>,
locals: Vec<StackValue>,
stack: Vec<StackValue>,
pc: usize,
}
impl Frame {
fn from_function(module: Rc<RuntimeModule>, func: Rc<Function>, args: Vec<StackValue>) -> Self {
let mut locals = Vec::new();
locals.resize(func.locals(), StackValue::Nothing);
Frame {
module,
func,
args,
locals,
stack: Vec::new(),
pc: 0,
}
}
pub fn module(&self) -> &Rc<RuntimeModule> {
&self.module
}
pub fn func(&self) -> &Rc<Function> {
&self.func
}
pub fn args(&self) -> &[StackValue] {
&self.args
}
pub fn pc(&self) -> usize {
self.pc
}
fn pop_value(&mut self) -> Result<StackValue> {
self.stack.pop().ok_or_else(|| VMErrorCode::StackUnderflow)
}
fn pop_bool(&mut self) -> Result<bool> {
match self.pop_value()? {
StackValue::Bool(v) => Ok(v),
v => Err(VMErrorCode::StackTypeMismatch(v, Type::Bool)),
}
}
fn pop_float(&mut self) -> Result<f64> {
match self.pop_value()? {
StackValue::Float(v) => Ok(v),
v => Err(VMErrorCode::StackTypeMismatch(v, Type::F64)),
}
}
fn pop_string(&mut self) -> Result<Rc<str>> {
match self.pop_value()? {
StackValue::String(v) => Ok(v),
v => Err(VMErrorCode::StackTypeMismatch(v, Type::String)),
}
}
fn pop_object(&mut self) -> Result<Rc<Object>> {
match self.pop_value()? {
StackValue::Object(v) => Ok(v),
v => Err(VMErrorCode::StackExpectedObject(v)),
}
}
fn pop_int(&mut self) -> Result<i64> {
match self.pop_value()? {
StackValue::Int(v) => Ok(v),
v => Err(VMErrorCode::StackTypeMismatch(v, Type::I64)),
}
}
fn push_value(&mut self, v: StackValue) {
self.stack.push(v)
}
fn push_bool(&mut self, value: bool) {
self.push_value(StackValue::Bool(value));
}
fn push_float(&mut self, value: f64) {
self.push_value(StackValue::Float(value));
}
fn push_nothing(&mut self) {
self.push_value(StackValue::Nothing);
}
fn push_string(&mut self, v: Rc<str>) {
self.push_value(StackValue::String(v))
}
fn push_function(&mut self, m: Rc<RuntimeModule>, v: Rc<Function>) {
self.push_value(StackValue::Function(Box::new(FuncValue::Function(m, v))));
}
fn push_extern_function(&mut self, v: usize) {
self.push_value(StackValue::Function(Box::new(FuncValue::ExternFunction(v))));
}
fn push_object(&mut self, v: Rc<Object>) {
self.push_value(StackValue::Object(v));
}
fn push_int(&mut self, v: i64) {
self.push_value(StackValue::Int(v));
}
fn push_list(&mut self, value: Rc<Vec<StackValue>>) {
self.push_value(StackValue::List(value));
}
fn get_argument(&self, i: usize) -> Result<StackValue> {
self.args
.get(i)
.map(|v| v.clone())
.ok_or_else(|| VMErrorCode::ArgumentOutOfRange(i))
}
fn get_local(&self, i: usize) -> Result<StackValue> {
self.locals
.get(i)
.map(|v| v.clone())
.ok_or_else(|| VMErrorCode::LocalOutOfRange(i))
}
fn get_string(&self, i: usize) -> Result<Rc<str>> {
let strings = self.func.strings();
strings
.get(i)
.map(|v| v.clone())
.ok_or_else(|| VMErrorCode::StringOutOfRange(i))
}
fn store_local(&mut self, i: usize, v: StackValue) -> Result<()> {
if i >= self.locals.len() {
Err(VMErrorCode::LocalOutOfRange(i))
} else {
self.locals[i] = v;
Ok(())
}
}
fn store_argument(&mut self, i: usize, v: StackValue) -> Result<()> {
if i >= self.locals.len() {
Err(VMErrorCode::LocalOutOfRange(i))
} else {
self.args[i] = v;
Ok(())
}
}
fn pop_function(&mut self) -> Result<FuncValue> {
match self.pop_value()? {
StackValue::Function(f) => Ok(*f),
v => Err(VMErrorCode::StackExpectedFunction(v)),
}
}
}
pub struct RuntimeModule {
code: Rc<CompiledModule>,
globals: RefCell<Vec<StackValue>>,
}
impl fmt::Debug for RuntimeModule {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "#{}", self.code.id)
}
}
impl RuntimeModule {
fn new(code: Rc<CompiledModule>) -> RuntimeModule {
let mut globals = Vec::new();
globals.resize(code.globals, StackValue::Nothing);
RuntimeModule {
code,
globals: RefCell::new(globals),
}
}
fn get_global(&self, i: usize) -> Result<StackValue> {
self.globals
.borrow()
.get(i)
.map(|v| v.clone())
.ok_or_else(|| VMErrorCode::GlobalOutOfRange(i)) // TODO: Test
}
fn set_global(&self, i: usize, v: StackValue) -> Result<()> {
let mut globals = self.globals.borrow_mut();
if i >= globals.len() {
Err(VMErrorCode::GlobalOutOfRange(i)) // TODO: Test
} else {
globals[i] = v;
Ok(())
}
}
fn get_function(&self, i: usize) -> Result<Rc<Function>> {
let functions = self.code.functions();
functions
.get(i)
.map(|v| v.clone())
.ok_or_else(|| VMErrorCode::FunctionOutOfRange(i)) // TODO: Test
}
}
pub struct Context {
modules: HashMap<ModuleId, Rc<RuntimeModule>>,
module_prefix: Option<ModuleId>,
}
impl Context {
pub fn new() -> Context {
Context {
modules: HashMap::new(),
module_prefix: None,
}
}
pub fn loaded(&self, module: ModuleId) -> bool {
self.modules.contains_key(&module)
}
pub fn get_module(&self, module: ModuleId) -> Option<&Rc<CompiledModule>> {
self.modules.get(&module).map(|rm| &rm.code)
}
pub fn set_module(&mut self, module: Rc<CompiledModule>) {
let id = module.id;
let runtime_module = Rc::new(RuntimeModule::new(module));
self.modules.insert(id, runtime_module.clone());
}
fn call_extern_function(&self, index: usize, args: &[StackValue]) -> Result<StackValue> {
match index {
EXTERN_BUILTIN_NOOP => Ok(StackValue::Nothing),
EXTERN_BUILTIN_LIST_GET_ITERATOR => {
let Some(list_value) = args.get(0) else {
return Err(VMErrorCode::ArgumentOutOfRange(0));
};
let StackValue::List(list) = list_value else {
return Err(VMErrorCode::StackExpectedList(list_value.clone()));
};
Ok(StackValue::ListIterator(Rc::new(ListIterator {
list: list.clone(),
index: Cell::new(0),
})))
}
EXTERN_BUILTIN_LIST_ITERATOR_NEXT => {
let Some(iter_value) = args.get(0) else {
return Err(VMErrorCode::ArgumentOutOfRange(0));
};
let StackValue::ListIterator(li) = iter_value else {
return Err(VMErrorCode::StackExpectedListIterator(iter_value.clone()));
};
let index = li.index.get();
if index >= li.list.len() {
Ok(StackValue::Nothing)
} else {
let result = li.list[index].clone();
li.index.set(index + 1);
Ok(result)
}
}
_ => Err(VMErrorCode::UnregisteredExternFunction(index)),
}
}
}
enum Flow {
Break,
Continue,
}
#[inline(always)]
fn eval_one(
instruction: Instruction,
index: &mut usize,
c: &mut Context,
f: &mut Frame,
stack: &mut Vec<Frame>,
) -> Result<Flow> {
let module_prefix = c.module_prefix.take();
match instruction {
Instruction::Panic(index) => {
let v = f
.get_string(index)
.unwrap_or_else(|_| "!!panic string out of range!!".into());
return Err(VMErrorCode::Panic(v));
}
Instruction::BoolNot => {
let value = f.pop_bool()?;
f.push_bool(!value);
}
Instruction::Discard => {
f.pop_value()?;
}
Instruction::Dup => {
let v = f.pop_value()?;
f.push_value(v.clone());
f.push_value(v);
}
Instruction::FloatAdd => {
let x = f.pop_float()?;
let y = f.pop_float()?;
f.push_float(x + y);
}
Instruction::FloatDivide => {
let x = f.pop_float()?;
let y = f.pop_float()?;
if x == 0. {
return Err(VMErrorCode::DivideByZero);
}
f.push_float(y / x);
}
Instruction::FloatMultiply => {
let x = f.pop_float()?;
let y = f.pop_float()?;
f.push_float(x * y);
}
Instruction::FloatSubtract => {
let x = f.pop_float()?;
let y = f.pop_float()?;
f.push_float(y - x);
}
Instruction::Jump(i) => {
*index = i;
}
Instruction::JumpFalse(i) => {
if !(f.pop_bool()?) {
*index = i;
}
}
Instruction::JumpTrue(i) => {
if f.pop_bool()? {
*index = i;
}
}
Instruction::LoadArgument(i) => {
let v = f.get_argument(i)?;
f.push_value(v);
}
Instruction::LoadLocal(i) => {
let v = f.get_local(i)?;
f.push_value(v);
}
Instruction::LoadModule(i) => {
let module = module_prefix
.map(|id| {
c.modules
.get(&id)
.ok_or_else(|| VMErrorCode::ModuleNotFound(id))
})
.unwrap_or(Ok(&f.module))?;
let v = module.get_global(i)?;
f.push_value(v);
}
Instruction::PushFalse => {
f.push_bool(false);
}
Instruction::PushFloat(v) => {
f.push_float(v);
}
Instruction::PushNothing => {
f.push_nothing();
}
Instruction::PushString(s) => {
let v = f.get_string(s)?;
f.push_string(v);
}
Instruction::PushTrue => {
f.push_bool(true);
}
Instruction::StoreArgument(i) => {
let v = f.pop_value()?;
f.store_argument(i, v)?;
}
Instruction::StoreLocal(i) => {
let v = f.pop_value()?;
f.store_local(i, v)?;
}
Instruction::StoreModule(i) => {
let v = f.pop_value()?;
let module = module_prefix
.map(|id| {
c.modules
.get(&id)
.ok_or_else(|| VMErrorCode::ModuleNotFound(id))
})
.unwrap_or(Ok(&f.module))?;
module.set_global(i, v)?;
}
Instruction::StoreSlot(i) => {
let o = f.pop_object()?;
let v = f.pop_value()?;
o.values.borrow_mut()[i] = v;
}
Instruction::LoadFunction(i) => {
let module = module_prefix
.map(|id| {
c.modules
.get(&id)
.ok_or_else(|| VMErrorCode::ModuleNotFound(id))
})
.unwrap_or(Ok(&f.module))?;
let v = module.get_function(i)?;
f.push_function(module.clone(), v);
}
Instruction::LoadExternFunction(i) => {
f.push_extern_function(i);
}
Instruction::Call(arg_count) => {
let function = f.pop_function()?;
let mut args = Vec::new();
for _ in 0..arg_count {
args.push(f.pop_value()?);
}
match function {
FuncValue::Function(module, func) => {
let mut frame = Frame::from_function(module, func, args);
std::mem::swap(&mut frame, f);
frame.pc = *index;
stack.push(frame);
*index = 0;
}
FuncValue::ExternFunction(i) => {
f.push_value(c.call_extern_function(i, &args)?);
}
}
}
Instruction::Return => match stack.pop() {
Some(mut frame) => {
// The return value is at the top of the stack already.
let retval = f.pop_value()?;
std::mem::swap(&mut frame, f);
*index = f.pc;
f.push_value(retval);
}
None => return Ok(Flow::Break),
},
Instruction::StringAdd => {
let x = f.pop_string()?;
let y = f.pop_string()?;
let mut new_string = y.to_string();
new_string.push_str(&x);
f.push_string(new_string.into());
}
Instruction::EqBool => {
let x = f.pop_bool()?;
let y = f.pop_bool()?;
f.push_bool(x == y);
}
Instruction::EqFloat => {
let x = f.pop_float()?;
let y = f.pop_float()?;
f.push_bool(x == y);
}
Instruction::EqString => {
let x = f.pop_string()?;
let y = f.pop_string()?;
f.push_bool(x == y);
}
Instruction::GreaterFloat => {
let x = f.pop_float()?;
let y = f.pop_float()?;
f.push_bool(y > x);
}
Instruction::GreaterString => {
let x = f.pop_string()?;
let y = f.pop_string()?;
f.push_bool(y > x);
}
Instruction::LessFloat => {
let x = f.pop_float()?;
let y = f.pop_float()?;
f.push_bool(y < x);
}
Instruction::LessString => {
let x = f.pop_string()?;
let y = f.pop_string()?;
f.push_bool(y < x);
}
Instruction::NewObject(slots) => {
let class_id = f.pop_int()?;
let name = f.pop_string()?;
let mut values = Vec::with_capacity(slots);
for _ in 0..slots {
values.push(f.pop_value()?);
}
let object = Object {
name,
class_id,
values: RefCell::new(values.into()),
};
f.push_object(object.into());
}
Instruction::LoadSlot(slot) => {
let obj = f.pop_object()?;
f.push_value(obj.get_slot(slot)?);
}
Instruction::IsClass(id) => {
let value = f.pop_value()?;
f.push_bool(value.is_object(id));
}
Instruction::PushInt(v) => {
f.push_int(v);
}
Instruction::IsBool => {
let v = f.pop_value()?;
f.push_bool(v.is_bool());
}
Instruction::IsFloat => {
let v = f.pop_value()?;
f.push_bool(v.is_float());
}
Instruction::IsString => {
let v = f.pop_value()?;
f.push_bool(v.is_string());
}
Instruction::IsNothing => {
let v = f.pop_value()?;
f.push_bool(v.is_nothing());
}
Instruction::NewList(c) => {
let mut v = Vec::with_capacity(c);
for _ in 0..c {
v.push(f.pop_value()?);
}
f.push_list(Rc::new(v));
}
Instruction::ModulePrefix(mid) => {
c.module_prefix = Some(mid);
}
}
Ok(Flow::Continue)
}
fn eval_core(
c: &mut Context,
module: Rc<RuntimeModule>,
function: Rc<Function>,
args: Vec<StackValue>,
) -> std::result::Result<StackValue, VMError> {
let mut stack = Vec::new();
let mut f = Frame::from_function(module, function, args);
let mut index = 0;
loop {
let instructions = f.func.instructions();
let instruction = instructions[index];
// {
// eprint!("{index}: [");
// if f.stack.len() > 3 {
// eprint!("...");
// }
// for val in f.stack.iter().take(3) {
// eprint!("{val:?} ");
// }
// eprintln!("] => {instruction:?}");
// }
index += 1;
match eval_one(instruction, &mut index, c, &mut f, &mut stack) {
Ok(Flow::Break) => match f.pop_value() {
Ok(v) => return Ok(v),
Err(e) => {
f.pc = index;
stack.push(f);
return Err(VMError {
code: e,
stack: stack.into(),
});
}
},
Ok(Flow::Continue) => (),
Err(e) => {
f.pc = index;
stack.push(f);
return Err(VMError {
code: e,
stack: stack.into(),
});
}
};
}
}
pub fn eval(
c: &mut Context,
module: ModuleId,
function: usize,
args: &[StackValue],
) -> std::result::Result<StackValue, VMError> {
let Some(module) = c.modules.get(&module) else {
return Err(VMError {
code: VMErrorCode::ModuleNotFound(module),
stack: Box::new([]),
});
};
let Some(function) = module.code.functions.get(function) else {
return Err(VMError {
code: VMErrorCode::FunctionOutOfRange(function),
stack: Box::new([]),
});
};
let function = function.clone();
let args = args.iter().map(|a| a.clone()).collect();
eval_core(c, module.clone(), function, args)
}
pub fn eval_export_fn(
c: &mut Context,
module: ModuleId,
name: &str,
args: &[StackValue],
) -> std::result::Result<StackValue, VMError> {
let Some(module) = c.modules.get(&module) else {
return Err(VMError {
code: VMErrorCode::ModuleNotFound(module),
stack: Box::new([]),
});
};
let export = match module.code.exports.get(name) {
Some(Export::Function(id)) => id,
Some(_) => {
return Err(VMError {
code: VMErrorCode::ExportNotFunction(name.to_string()),
stack: Box::new([]),
})
}
None => {
return Err(VMError {
code: VMErrorCode::ExportNotFound(name.to_string()),
stack: Box::new([]),
})
}
};
let function = module.code.functions[*export].clone();
let args = args.iter().map(|a| a.clone()).collect();
eval_core(c, module.clone(), function, args)
}

View file

@ -1,81 +0,0 @@
# Snapshot Tests
The `.fine` files in this directory and its descendants are processed
by `build.rs` into a series of snapshot-tests. The various test
assertions are specified by `@` directives in comments in the file.
e.g., a test might look like this:
```fine
// @concrete:
// | File
// | ExpressionStatement
// | BinaryExpression
// | BinaryExpression
// | LiteralExpression
// | Number:'"1"'
// | Star:'"*"'
// | LiteralExpression
// | Number:'"2"'
// | Plus:'"+"'
// | BinaryExpression
// | UnaryExpression
// | Minus:'"-"'
// | LiteralExpression
// | Number:'"3"'
// | Star:'"*"'
// | LiteralExpression
// | Number:'"4"'
// | Semicolon:'";"'
//
1 * 2 + -3 * 4;
// @type: 532 f64
```
## Assertions
The various assertions are as follows:
- The `// @ignore` directive marks the test as ignored.
- The `// @concrete:` assertion says that the following lines
(prefixed with `// | `, as above) describe the concrete syntax tree
of the file after parsing.
e.g.:
```fine
// @concrete:
// | File
// | ExpressionStatement
// | LiteralExpression
// | String:'"\"Hello world\""'
// | Semicolon:'";"'
//
"Hello world";
```
- The `// @type:` assertion says that the type of the tree at the
given point will match the given type. `@type` assertions usually go
after the contents of the file to make the probe points more stable
in the face of new assertions and whatnot.
e.g.:
```fine
"Hello world!"
// @type: 2 string
```
- The `// @type-error:` assertion says that the type of the tree at
the given point should be an error, and that the error message
provided should be among the generated errors. (Like `@type`, these
usually go after the code, for stability.)
e.g.:
```fine
- "twenty five";
// @type-error: 0 cannot apply unary operator '-' to value of type string
```

View file

@ -1,4 +0,0 @@
These tests all produce errors in their parse, but the point is that
the trees are kinda as best as we can get.
See e.g. https://matklad.github.io/2023/05/21/resilient-ll-parsing-tutorial.html

View file

@ -1,56 +0,0 @@
fun f1(x: f64,
fun f2(x: f64,, z: f64) {}
fun f3() {}
// @concrete:
// | File
// | FunctionDecl
// | Fun:'"fun"'
// | Identifier:'"f1"'
// | ParamList
// | LeftParen:'"("'
// | Parameter
// | Identifier:'"x"'
// | Colon:'":"'
// | TypeExpression
// | TypeIdentifier
// | Identifier:'"f64"'
// | Comma:'","'
// | Error:'"Error at 'fun': expect ')' to end a parameter list"'
// | FunctionDecl
// | Fun:'"fun"'
// | Identifier:'"f2"'
// | ParamList
// | LeftParen:'"("'
// | Parameter
// | Identifier:'"x"'
// | Colon:'":"'
// | TypeExpression
// | TypeIdentifier
// | Identifier:'"f64"'
// | Comma:'","'
// | Error
// | Error:'"Error at ',': expected parameter"'
// | Comma:'","'
// | Parameter
// | Identifier:'"z"'
// | Colon:'":"'
// | TypeExpression
// | TypeIdentifier
// | Identifier:'"f64"'
// | RightParen:'")"'
// | Block
// | LeftBrace:'"{"'
// | RightBrace:'"}"'
// | FunctionDecl
// | Fun:'"fun"'
// | Identifier:'"f3"'
// | ParamList
// | LeftParen:'"("'
// | RightParen:'")"'
// | Block
// | LeftBrace:'"{"'
// | RightBrace:'"}"'
//

View file

@ -1,60 +0,0 @@
fun fib_rec(f1: f64,
fun fib(n: f64) -> f64 {
fib_rec(1, 1, n)
}
// @concrete:
// | File
// | FunctionDecl
// | Fun:'"fun"'
// | Identifier:'"fib_rec"'
// | ParamList
// | LeftParen:'"("'
// | Parameter
// | Identifier:'"f1"'
// | Colon:'":"'
// | TypeExpression
// | TypeIdentifier
// | Identifier:'"f64"'
// | Comma:'","'
// | Error:'"Error at 'fun': expect ')' to end a parameter list"'
// | FunctionDecl
// | Fun:'"fun"'
// | Identifier:'"fib"'
// | ParamList
// | LeftParen:'"("'
// | Parameter
// | Identifier:'"n"'
// | Colon:'":"'
// | TypeExpression
// | TypeIdentifier
// | Identifier:'"f64"'
// | RightParen:'")"'
// | ReturnType
// | Arrow:'"->"'
// | TypeExpression
// | TypeIdentifier
// | Identifier:'"f64"'
// | Block
// | LeftBrace:'"{"'
// | ExpressionStatement
// | CallExpression
// | Identifier
// | Identifier:'"fib_rec"'
// | ArgumentList
// | LeftParen:'"("'
// | Argument
// | LiteralExpression
// | Number:'"1"'
// | Comma:'","'
// | Argument
// | LiteralExpression
// | Number:'"1"'
// | Comma:'","'
// | Argument
// | Identifier
// | Identifier:'"n"'
// | RightParen:'")"'
// | RightBrace:'"}"'
//

View file

@ -1,20 +0,0 @@
fun foo() { }
} // <- Whoopsie!
// @concrete:
// | File
// | FunctionDecl
// | Fun:'"fun"'
// | Identifier:'"foo"'
// | ParamList
// | LeftParen:'"("'
// | RightParen:'")"'
// | Block
// | LeftBrace:'"{"'
// | RightBrace:'"}"'
// | Error
// | Error:'"Error at '}': unbalanced '}'"'
// | RightBrace:'"}"'
// |
//

View file

@ -1,3 +0,0 @@
return "skidoo!";
// @check-error: a return statement can only be used inside a function

View file

@ -1,380 +0,0 @@
use fine::compile_program;
use fine::compiler::{compile_module, CompiledModule, Function};
use fine::program::{
Module, ModuleLoadError, ModuleLoader, ModuleSource, Program, StandardModuleLoader,
};
use fine::semantics::{Error, Type};
use fine::vm::{eval_export_fn, Context, VMError};
use pretty_assertions::assert_eq;
use std::fmt::Write as _;
use std::path::PathBuf;
use std::rc::Rc;
fn rebase_section(source_path: &str, section: &str, value: &str) {
let contents = std::fs::read_to_string(source_path)
.expect(&format!("unable to read input file {}", source_path));
let mut result = String::new();
let mut lines = contents.lines();
// Search for the section.
let mut found_section = false;
let marker = format!("// @{section}:");
while let Some(line) = lines.next() {
result.push_str(line);
result.push_str("\n");
if line == marker {
found_section = true;
break;
}
}
if !found_section {
panic!(
"unable to locate the {section} section in {source_path}. Is there a line that starts with '// @{section}:'?"
);
}
// We've found the section we care about, replace all the lines we care
// about with the actual lines.
let mut replaced_output = false;
while let Some(line) = lines.next() {
if line.starts_with("// | ") {
// Skip copying lines here, because we're skipping
// the existing concrete syntax tree.
} else {
// OK we're out of concrete syntax tree; copy in the
// new CST. (We do this inline so we don't lose
// `line`.)
for expected_line in value.lines() {
result.push_str("// | ");
result.push_str(expected_line);
result.push_str("\n");
}
// (Make sure not to drop this line.)
result.push_str(line);
result.push_str("\n");
replaced_output = true;
break;
}
}
if !replaced_output {
panic!(
"didn't actually replace the output section in {}",
source_path
);
}
// Now just copy the rest of the lines.
while let Some(line) = lines.next() {
result.push_str(line);
result.push_str("\n");
}
// ... and re-write the file.
std::fs::write(source_path, result).expect("unable to write the new file!");
}
fn should_rebase() -> bool {
let rebase = std::env::var("FINE_TEST_REBASE")
.unwrap_or(String::new())
.to_lowercase();
match rebase.as_str() {
"1" | "true" | "yes" | "y" => true,
_ => false,
}
}
fn assert_concrete(source: Rc<str>, expected: &str, source_path: &str) {
let (tree, _) = fine::parser::parse(&source);
let dump = tree.dump(&source, false);
if dump != expected {
if should_rebase() {
rebase_section(source_path, "concrete", &dump)
} else {
assert_eq!(expected, dump, "concrete syntax trees did not match (set FINE_TEST_REBASE=1 to auto-rebase if the diff is expected)")
}
}
}
macro_rules! semantic_panic {
($semantics:expr, $tr:expr, $($t:tt)*) => {{
let message = format!($($t)*);
eprintln!("{message}!");
$semantics.dump_compiler_state($tr);
panic!("{message}");
}};
}
macro_rules! semantic_assert {
($semantics:expr, $tr:expr, $pred:expr, $($t:tt)*) => {{
if !$pred {
let message = format!($($t)*);
eprintln!("{message}!");
$semantics.dump_compiler_state($tr);
panic!("{message}");
}
}};
}
macro_rules! semantic_assert_eq {
($semantics:expr, $tr:expr, $left:expr, $right:expr, $($t:tt)*) => {{
let ll = $left;
let rr = $right;
if ll != rr {
let message = format!($($t)*);
eprintln!("{message}!");
$semantics.dump_compiler_state($tr);
assert_eq!(ll, rr, "{}", message);
}
}};
}
struct TestLoader {
source: Rc<str>,
base: StandardModuleLoader,
}
impl TestLoader {
fn new(base_path: PathBuf, source: Rc<str>) -> Box<Self> {
let base_path = base_path
.parent()
.map(|p| p.to_owned())
.unwrap_or(base_path);
Box::new(TestLoader {
source,
base: StandardModuleLoader::new(base_path),
})
}
}
impl ModuleLoader for TestLoader {
fn normalize_module_name(&self, base: Option<&str>, name: String) -> String {
if name == "__test__" {
name
} else {
let base = match base {
Some("__test__") => None,
_ => base,
};
self.base.normalize_module_name(base, name)
}
}
fn load_module(&self, name: &String) -> Result<ModuleSource, ModuleLoadError> {
if name == "__test__" {
Ok(ModuleSource::SourceText(self.source.to_string()))
} else {
self.base.load_module(name)
}
}
}
fn test_runtime(_source_path: &str, source: Rc<str>) -> Program {
Program::new(TestLoader::new(_source_path.into(), source))
}
fn assert_type_at(module: Rc<Module>, pos: usize, expected: &str, _source_path: &str) {
let semantics = module.semantics();
let tree = semantics.tree();
let tree_ref = match tree.find_tree_at(pos) {
Some(t) => t,
None => semantic_panic!(
&semantics,
None,
"Unable to find the subtee at position {pos}"
),
};
let tree_type = semantics.type_of(tree_ref);
let actual = format!("{}", tree_type);
semantic_assert_eq!(
&semantics,
Some(tree_ref),
expected,
actual,
"The type of the tree at position {pos} was incorrect"
);
}
fn assert_type_error_at(
module: Rc<Module>,
errors: &[Rc<Error>],
pos: usize,
expected: &str,
_source_path: &str,
) {
let semantics = module.semantics();
let tree = semantics.tree();
let tree_ref = match tree.find_tree_at(pos) {
Some(t) => t,
None => semantic_panic!(
&semantics,
None,
"Unable to find the subtee at position {pos}"
),
};
let tree_type = semantics.type_of(tree_ref);
semantic_assert!(
&semantics,
Some(tree_ref),
matches!(tree_type, Type::Error(_)),
"The type of the {:?} tree at position {pos} was '{tree_type:?}', not an error",
tree[tree_ref].kind
);
semantic_assert!(
&semantics,
Some(tree_ref),
errors.iter().any(|e| e.message == expected),
"Unable to find the expected error message '{expected}'"
);
}
fn dump_function(out: &mut String, function: &Function) -> std::fmt::Result {
writeln!(
out,
"function {} ({} args, {} locals):",
function.name(),
function.args(),
function.locals()
)?;
let strings = function.strings();
writeln!(out, " strings ({}):", strings.len())?;
for (i, s) in strings.iter().enumerate() {
writeln!(out, " {}: \"{}\"", i, s)?; // TODO: ESCAPE
}
let code = function.instructions();
writeln!(out, " code ({}):", code.len())?;
for (i, inst) in code.iter().enumerate() {
writeln!(out, " {}: {:?}", i, inst)?;
}
Ok(())
}
fn dump_module(out: &mut String, module: &CompiledModule) -> std::fmt::Result {
for function in module.functions() {
dump_function(out, function)?;
}
Ok(())
}
fn assert_compiles_to(module: Rc<Module>, expected: &str, source_path: &str) {
let semantics = module.semantics();
let module = compile_module(&semantics);
let mut actual = String::new();
dump_module(&mut actual, &module).expect("no dumping?");
if expected != actual {
if should_rebase() {
rebase_section(source_path, "compiles-to", &actual)
} else {
semantic_assert_eq!(
&semantics,
None,
expected,
actual,
"did not compile as expected (set FINE_TEST_REBASE=1 to auto-rebase if the diff is expected)"
)
}
}
}
fn assert_no_errors(module: Rc<Module>, errors: &[Rc<Error>]) {
let semantics = module.semantics();
let expected_errors: &[Rc<Error>] = &[];
semantic_assert_eq!(
&semantics,
None,
expected_errors,
errors,
"expected no errors"
);
}
fn dump_runtime_error(module: &Rc<Module>, context: &Context, e: VMError) -> ! {
let semantics = module.semantics();
semantics.dump_compiler_state(None);
if let Some(module) = context.get_module(module.id()) {
let mut actual = String::new();
let _ = dump_module(&mut actual, &module);
eprintln!("{actual}");
}
eprintln!("Backtrace:");
for frame in e.stack.iter() {
let func = frame.func();
eprint!(" {} (", func.name());
for arg in frame.args().iter() {
eprint!("{:?},", arg);
}
eprintln!(") @ {}", frame.pc());
}
eprintln!();
panic!("error occurred while running: {:?}", e.code);
}
fn assert_eval_ok(program: &Program, module: Rc<Module>, expected: &str) {
let semantics = module.semantics();
let mut context = Context::new();
if let Err(e) = compile_program(&program, &mut context) {
dump_runtime_error(&module, &context, e);
};
match eval_export_fn(&mut context, module.id(), "test", &[]) {
Ok(v) => {
let actual = format!("{:?}", v);
semantic_assert_eq!(
&semantics,
None,
expected,
&actual,
"wrong return from test function"
);
}
Err(e) => dump_runtime_error(&module, &context, e),
}
}
fn assert_errors(module: Rc<Module>, errors: &[Rc<Error>], expected_errors: Vec<&str>) {
let semantics = module.semantics();
let errors: Vec<String> = errors.iter().map(|e| format!("{}", e)).collect();
semantic_assert_eq!(
&semantics,
None,
expected_errors,
errors,
"expected error messages to match"
);
}
fn assert_check_error(module: Rc<Module>, errors: &[Rc<Error>], expected: &str) {
let semantics = module.semantics();
semantic_assert!(
&semantics,
None,
errors.iter().any(|e| e.message == expected),
"Unable to find the expected error message '{expected}'"
);
}
include!(concat!(env!("OUT_DIR"), "/generated_tests.rs"));

View file

@ -1,155 +0,0 @@
// Examples of alternate types/union types/heterogeneous types.
class Foo {
x: f64;
}
class Bar {
y: f64;
}
fun extract_value(v: Foo or Bar) -> f64 {
match v {
v:Foo -> v.x,
v:Bar -> v.y,
} // No error; exhaustivity analysis should work.
}
// Some type compatibility nonsense
fun bottom() -> string or nothing {
"woooo"
}
fun same_but_wrong_way_around_should_be_compatible() -> nothing or string {
bottom()
}
fun same_but_larger_should_be_compatible() -> nothing or f64 or string {
bottom()
}
// This is Bob Nystrom's example from
// https://journal.stuffwithstuff.com/2023/08/04/representing-heterogeneous-data/
//
class MeleeWeapon {
damage: f64;
}
class RangedWeapon {
minRange: f64;
maxRange: f64;
}
class Monster {
health: f64;
}
fun print(x:string) {}
fun in_range(weapon: MeleeWeapon or RangedWeapon, distance: f64) -> bool {
match weapon {
w:RangedWeapon -> distance >= w.minRange and distance <= w.maxRange,
_ -> distance == 1
}
}
fun roll_dice(x:f64) -> f64 {
0
}
fun attack(weapon: MeleeWeapon or RangedWeapon, monster: Monster, distance: f64) {
// This is worse than Bob's final version but but it works. `is` operator
// should be the same precedence as `and` and left-associative, so the
// `and` that follows the declaration in the `is` still has the variables
// from the `is` binding in scope.
if weapon is MeleeWeapon and distance > 1 or
weapon is w:RangedWeapon and (distance < w.minRange or distance > w.maxRange) {
print("You are out of range");
return
}
// Bob says he doesn't want to do flow analysis and we're not, we're using
// scoping rules and associativity to make the re-bindings work.
//
// NOTE: special syntax here: `identifier` is `TypeExpression` ALMOST means
// `identifier is identifier : TypeExpression` as the shorthand for checking
// local variables. The *almost* part is that the effective type of the
// variable changes but not the binding. (Is this what we want?)
//
let damage = match weapon {
w:MeleeWeapon -> roll_dice(w.damage),
w:RangedWeapon -> w.maxRange - w.minRange,
};
if monster.health <= damage {
print("You kill the monster!");
monster.health = 0;
} else {
print("You wound the monster.");
monster.health = monster.health - damage;
}
}
fun more_examples(weapon: MeleeWeapon or RangedWeapon) -> f64 or nothing {
if weapon is w: RangedWeapon and w.maxRange > 10 {
// w is still in scope here; the `and` is bound into a predicate expression
// and breaks exhaustivity
w.minRange
}
}
// Some fun with iterators
class Finished {}
let FINISHED = new Finished {};
class Iterator {
current: f64;
fun next(self) -> f64 or Finished {
if self.current < 10 {
let result = self.current;
self.current = self.current + 1;
return result;
}
FINISHED
}
}
fun test() -> f64 {
let sum = 0;
// A single step of an iterator...
let it = new Iterator { current: 0 };
if it.next() is v: f64 {
sum = sum + v;
}
// Looping by hand over an iterator is pretty clean.
let it = new Iterator { current: 0 };
while it.next() is v: f64 {
sum = sum + v;
}
let it = new Iterator { current: 0 };
sum = sum + match it.next() {
v:f64 -> 100,
_ -> 1000,
};
// Unroll by hand...
let it = new Iterator { current: 0 };
while true {
if it.next() is v: f64 {
sum = sum + v;
} else {
return sum;
}
}
// Not in this test but `for` over an object should turn into something
// like the above.
}
// @no-errors
// @eval: Float(190.0)

View file

@ -1,86 +0,0 @@
fun foo(x: f64) -> f64 {
x + 7
}
fun test() -> f64 {
foo(1)
}
// @no-errors
// @eval: Float(8.0)
// @type: 20 f64
// @concrete:
// | File
// | FunctionDecl
// | Fun:'"fun"'
// | Identifier:'"foo"'
// | ParamList
// | LeftParen:'"("'
// | Parameter
// | Identifier:'"x"'
// | Colon:'":"'
// | TypeExpression
// | TypeIdentifier
// | Identifier:'"f64"'
// | RightParen:'")"'
// | ReturnType
// | Arrow:'"->"'
// | TypeExpression
// | TypeIdentifier
// | Identifier:'"f64"'
// | Block
// | LeftBrace:'"{"'
// | ExpressionStatement
// | BinaryExpression
// | Identifier
// | Identifier:'"x"'
// | Plus:'"+"'
// | LiteralExpression
// | Number:'"7"'
// | RightBrace:'"}"'
// | FunctionDecl
// | Fun:'"fun"'
// | Identifier:'"test"'
// | ParamList
// | LeftParen:'"("'
// | RightParen:'")"'
// | ReturnType
// | Arrow:'"->"'
// | TypeExpression
// | TypeIdentifier
// | Identifier:'"f64"'
// | Block
// | LeftBrace:'"{"'
// | ExpressionStatement
// | CallExpression
// | Identifier
// | Identifier:'"foo"'
// | ArgumentList
// | LeftParen:'"("'
// | Argument
// | LiteralExpression
// | Number:'"1"'
// | RightParen:'")"'
// | RightBrace:'"}"'
// |
// @compiles-to:
// | function foo (1 args, 0 locals):
// | strings (0):
// | code (4):
// | 0: LoadArgument(0)
// | 1: PushFloat(7.0)
// | 2: FloatAdd
// | 3: Return
// | function test (0 args, 0 locals):
// | strings (0):
// | code (4):
// | 0: PushFloat(1.0)
// | 1: LoadFunction(0)
// | 2: Call(1)
// | 3: Return
// | function << module >> (0 args, 0 locals):
// | strings (0):
// | code (2):
// | 0: PushNothing
// | 1: Return
// |

View file

@ -1,61 +0,0 @@
fun test() -> f64 {
1 * 2 + -3 * 4
}
// @no-errors
// @eval: Float(-10.0)
// @type: 23 f64
// @concrete:
// | File
// | FunctionDecl
// | Fun:'"fun"'
// | Identifier:'"test"'
// | ParamList
// | LeftParen:'"("'
// | RightParen:'")"'
// | ReturnType
// | Arrow:'"->"'
// | TypeExpression
// | TypeIdentifier
// | Identifier:'"f64"'
// | Block
// | LeftBrace:'"{"'
// | ExpressionStatement
// | BinaryExpression
// | BinaryExpression
// | LiteralExpression
// | Number:'"1"'
// | Star:'"*"'
// | LiteralExpression
// | Number:'"2"'
// | Plus:'"+"'
// | BinaryExpression
// | UnaryExpression
// | Minus:'"-"'
// | LiteralExpression
// | Number:'"3"'
// | Star:'"*"'
// | LiteralExpression
// | Number:'"4"'
// | RightBrace:'"}"'
// |
// @compiles-to:
// | function test (0 args, 0 locals):
// | strings (0):
// | code (10):
// | 0: PushFloat(1.0)
// | 1: PushFloat(2.0)
// | 2: FloatMultiply
// | 3: PushFloat(3.0)
// | 4: PushFloat(-1.0)
// | 5: FloatMultiply
// | 6: PushFloat(4.0)
// | 7: FloatMultiply
// | 8: FloatAdd
// | 9: Return
// | function << module >> (0 args, 0 locals):
// | strings (0):
// | code (2):
// | 0: PushNothing
// | 1: Return
// |

View file

@ -1,34 +0,0 @@
fun test() -> f64 {
let x = 12;
let y = 13;
let z = 2;
x = y = z;
x
}
// @no-errors
// @compiles-to:
// | function test (0 args, 3 locals):
// | strings (0):
// | code (14):
// | 0: PushFloat(12.0)
// | 1: StoreLocal(0)
// | 2: PushFloat(13.0)
// | 3: StoreLocal(1)
// | 4: PushFloat(2.0)
// | 5: StoreLocal(2)
// | 6: LoadLocal(2)
// | 7: Dup
// | 8: StoreLocal(1)
// | 9: Dup
// | 10: StoreLocal(0)
// | 11: Discard
// | 12: LoadLocal(0)
// | 13: Return
// | function << module >> (0 args, 0 locals):
// | strings (0):
// | code (2):
// | 0: PushNothing
// | 1: Return
// |
// @eval: Float(2.0)

View file

@ -1,34 +0,0 @@
fun test() {
{}
}
// @no-errors
// @compiles-to:
// | function test (0 args, 0 locals):
// | strings (0):
// | code (2):
// | 0: PushNothing
// | 1: Return
// | function << module >> (0 args, 0 locals):
// | strings (0):
// | code (2):
// | 0: PushNothing
// | 1: Return
// |
// @eval: Nothing
// @type: 15 nothing
// @concrete:
// | File
// | FunctionDecl
// | Fun:'"fun"'
// | Identifier:'"test"'
// | ParamList
// | LeftParen:'"("'
// | RightParen:'")"'
// | Block
// | LeftBrace:'"{"'
// | Block
// | LeftBrace:'"{"'
// | RightBrace:'"}"'
// | RightBrace:'"}"'
// |

View file

@ -1,66 +0,0 @@
fun test() -> bool {
true and false or false and !true
}
// @no-errors
// @compiles-to:
// | function test (0 args, 0 locals):
// | strings (0):
// | code (15):
// | 0: PushTrue
// | 1: JumpTrue(4)
// | 2: PushFalse
// | 3: Jump(5)
// | 4: PushFalse
// | 5: JumpFalse(8)
// | 6: PushTrue
// | 7: Jump(14)
// | 8: PushFalse
// | 9: JumpTrue(12)
// | 10: PushFalse
// | 11: Jump(14)
// | 12: PushTrue
// | 13: BoolNot
// | 14: Return
// | function << module >> (0 args, 0 locals):
// | strings (0):
// | code (2):
// | 0: PushNothing
// | 1: Return
// |
// @eval: Bool(false)
// @type: 15 bool
// @concrete:
// | File
// | FunctionDecl
// | Fun:'"fun"'
// | Identifier:'"test"'
// | ParamList
// | LeftParen:'"("'
// | RightParen:'")"'
// | ReturnType
// | Arrow:'"->"'
// | TypeExpression
// | TypeIdentifier
// | Identifier:'"bool"'
// | Block
// | LeftBrace:'"{"'
// | ExpressionStatement
// | BinaryExpression
// | BinaryExpression
// | LiteralExpression
// | True:'"true"'
// | And:'"and"'
// | LiteralExpression
// | False:'"false"'
// | Or:'"or"'
// | BinaryExpression
// | LiteralExpression
// | False:'"false"'
// | And:'"and"'
// | UnaryExpression
// | Bang:'"!"'
// | LiteralExpression
// | True:'"true"'
// | RightBrace:'"}"'
//

View file

@ -1,108 +0,0 @@
class Point {
x: f64;
y: f64;
fun something_static() -> f64 {
12
}
fun square_length(self) -> f64 {
self.x * self.x + self.y * self.y
}
}
class Line {
start: Point;
end: Point;
}
fun test() -> f64 {
let line = new Line {
start: new Point { y: 23, x: 7 },
end: new Point { x: 999, y: 99 },
};
let pt = line.start;
let z = line.start.x + pt.square_length() + Point.something_static();
z
}
// @no-errors
// @eval: Float(597.0)
// @compiles-to:
// | function something_static (0 args, 0 locals):
// | strings (0):
// | code (2):
// | 0: PushFloat(12.0)
// | 1: Return
// | function square_length (1 args, 0 locals):
// | strings (0):
// | code (12):
// | 0: LoadArgument(0)
// | 1: LoadSlot(0)
// | 2: LoadArgument(0)
// | 3: LoadSlot(0)
// | 4: FloatMultiply
// | 5: LoadArgument(0)
// | 6: LoadSlot(1)
// | 7: LoadArgument(0)
// | 8: LoadSlot(1)
// | 9: FloatMultiply
// | 10: FloatAdd
// | 11: Return
// | function Point (6 args, 0 locals):
// | strings (1):
// | 0: "Point"
// | code (6):
// | 0: LoadArgument(1)
// | 1: LoadArgument(0)
// | 2: PushString(0)
// | 3: PushInt(33)
// | 4: NewObject(2)
// | 5: Return
// | function Line (4 args, 0 locals):
// | strings (1):
// | 0: "Line"
// | code (6):
// | 0: LoadArgument(1)
// | 1: LoadArgument(0)
// | 2: PushString(0)
// | 3: PushInt(40)
// | 4: NewObject(2)
// | 5: Return
// | function test (0 args, 3 locals):
// | strings (0):
// | code (27):
// | 0: PushFloat(99.0)
// | 1: PushFloat(999.0)
// | 2: LoadFunction(2)
// | 3: Call(2)
// | 4: PushFloat(23.0)
// | 5: PushFloat(7.0)
// | 6: LoadFunction(2)
// | 7: Call(2)
// | 8: LoadFunction(3)
// | 9: Call(2)
// | 10: StoreLocal(0)
// | 11: LoadLocal(0)
// | 12: LoadSlot(0)
// | 13: StoreLocal(1)
// | 14: LoadLocal(0)
// | 15: LoadSlot(0)
// | 16: LoadSlot(0)
// | 17: LoadLocal(1)
// | 18: LoadFunction(1)
// | 19: Call(1)
// | 20: FloatAdd
// | 21: LoadFunction(0)
// | 22: Call(0)
// | 23: FloatAdd
// | 24: StoreLocal(2)
// | 25: LoadLocal(2)
// | 26: Return
// | function << module >> (0 args, 0 locals):
// | strings (0):
// | code (2):
// | 0: PushNothing
// | 1: Return
// |

View file

@ -1,18 +0,0 @@
fun is_hello(x:string) -> bool {
x == "hello"
}
fun is_23(x:f64) -> bool {
x == 23.0
}
fun is_false(x:bool) -> bool {
x == false
}
fun test() -> bool {
is_hello("hello") and is_23(23) and is_false(false)
}
// @no-errors
// @eval: Bool(true)

View file

@ -1,79 +0,0 @@
fun test() -> f64 {
if true { "discarded"; 23 } else { 45 }
}
// @no-errors
// Here come some type probes!
// (type of the condition)
// @type: 27 bool
//
// (the discarded expression)
// @type: 34 string
//
// (the "then" clause)
// @type: 47 f64
// @type: 50 f64
//
// (the "else" clause)
// @type: 59 f64
// @type: 62 f64
//
// @concrete:
// | File
// | FunctionDecl
// | Fun:'"fun"'
// | Identifier:'"test"'
// | ParamList
// | LeftParen:'"("'
// | RightParen:'")"'
// | ReturnType
// | Arrow:'"->"'
// | TypeExpression
// | TypeIdentifier
// | Identifier:'"f64"'
// | Block
// | LeftBrace:'"{"'
// | IfStatement
// | ConditionalExpression
// | If:'"if"'
// | LiteralExpression
// | True:'"true"'
// | Block
// | LeftBrace:'"{"'
// | ExpressionStatement
// | LiteralExpression
// | String:'"\"discarded\""'
// | Semicolon:'";"'
// | ExpressionStatement
// | LiteralExpression
// | Number:'"23"'
// | RightBrace:'"}"'
// | Else:'"else"'
// | Block
// | LeftBrace:'"{"'
// | ExpressionStatement
// | LiteralExpression
// | Number:'"45"'
// | RightBrace:'"}"'
// | RightBrace:'"}"'
//
// @compiles-to:
// | function test (0 args, 0 locals):
// | strings (1):
// | 0: "discarded"
// | code (8):
// | 0: PushTrue
// | 1: JumpFalse(6)
// | 2: PushString(0)
// | 3: Discard
// | 4: PushFloat(23.0)
// | 5: Jump(7)
// | 6: PushFloat(45.0)
// | 7: Return
// | function << module >> (0 args, 0 locals):
// | strings (0):
// | code (2):
// | 0: PushNothing
// | 1: Return
// |
// @eval: Float(23.0)

View file

@ -1,15 +0,0 @@
;
// @no-errors
// @concrete:
// | File
// | ExpressionStatement
// | Semicolon:'";"'
// |
// @compiles-to:
// | function << module >> (0 args, 0 locals):
// | strings (0):
// | code (2):
// | 0: PushNothing
// | 1: Return
// |

View file

@ -1,20 +0,0 @@
fun something() { }
fun something_else() { }
fun wrong() {
let x = 12;
let y = "hello";
x = y; // should be an error
y = x; // should be an error
let z = 23;
y = x = z; // should be an error
something_else = something; // should be an error
}
// @expect-errors:
// | __test__:7:4: cannot assign a value of type 'string' to type 'f64'
// | __test__:8:4: cannot assign a value of type 'f64' to type 'string'
// | __test__:11:4: cannot assign a value of type 'f64' to type 'string'
// | __test__:13:2: cannot assign a new value to a function declaration

View file

@ -1,4 +0,0 @@
112 - "twenty five";
"twenty five" - 112;
// @type-error: 4 cannot apply binary operator '-' to expressions of type 'f64' (on the left) and 'string' (on the right)
// @type-error: 35 cannot apply binary operator '-' to expressions of type 'string' (on the left) and 'f64' (on the right)

View file

@ -1,7 +0,0 @@
fun test() {
if true true { }
}
// NOTE: These errors should be better
// @expect-errors:
// | __test__:2:10: Error at 'true': expected a block after `if`

View file

@ -1,8 +0,0 @@
class Foo {}
fun test() -> f64 {
Foo + 23
}
// @expect-errors:
// | __test__:4:6: cannot apply binary operator '+' to expressions of type 'class Foo' (on the left) and 'f64' (on the right)

View file

@ -1,7 +0,0 @@
class Foo {
x: f64;
x: f64;
}
// @expect-errors:
// | __test__:3:2: duplicate definition of field 'x'

View file

@ -1,24 +0,0 @@
class Point {
x: f64;
y: f64;
}
fun test() {
let one = new Point { x: 23 };
let two = new Point { y: 23 };
let three = new Point { y: 23, x: 123, z: "hello" };
let four = new Point { y: 23, x: "what" };
let five = new Point { y: 23, x };
let x = "now";
let six = new Point { y: 23, x };
}
// @expect-errors:
// | __test__:7:12: missing an initializer for field y
// | __test__:8:12: missing an initializer for field x
// | __test__:9:41: Point instance does not have a field named z
// | __test__:10:32: field x is of type f64, but this expression generates a string
// | __test__:12:32: cannot find value x here
// | __test__:15:31: field x is of type f64, but this expression generates a string

View file

@ -1,4 +0,0 @@
fun something(x: f64, x: f64) {}
// @expect-errors:
// | __test__:1:22: duplicate definition of parameter 'x'

View file

@ -1,16 +0,0 @@
fun nested() {
fun foo() {}
fun foo() {}
;
}
fun nested() {}
class Bar {}
class Bar {}
// @expect-errors:
// | __test__:3:2: duplicate definition of function 'foo'
// | __test__:8:0: duplicate definition of function 'nested'
// | __test__:11:0: duplicate definition of class 'Bar'

View file

@ -1,4 +0,0 @@
if 23 { "what" } else { "the" }
// @expect-errors:
// | __test__:1:3: this condition produces 'f64', but must produce bool

View file

@ -1,4 +0,0 @@
if (if false { true }) { 32 } else { 23 }
// @expect-errors:
// | __test__:1:3: this condition produces 'nothing or bool', but must produce bool

View file

@ -1,12 +0,0 @@
{
// This is a block-local declaration; it should *not* appear in the global
// environment.
let y = 23;
}
fun foo() -> f64 {
y + 3
}
// @expect-errors:
// | __test__:8:2: cannot find value y here

View file

@ -1,5 +0,0 @@
fun this_is_missing_a_type(x) -> f64 {
23
}
// @check-error: the parameter is missing a type

View file

@ -1,6 +0,0 @@
fun foo(x: f64) {}
x;
// @type-error: 19 cannot find value x here
// Used to have a bug where statements after a function declaration would
// bind to the declaration's arguments, whoops.

View file

@ -1,11 +0,0 @@
class Foo {
fun bar(self) {}
}
fun test() {
let obj = new Foo {};
let f = obj.bar;
}
// @expect-errors:
// | __test__:7:6: methods cannot be assigned to variables

View file

@ -1,14 +0,0 @@
fun something() -> f64 {
return
}
fun test() -> f64 {
if false {
return "no way!";
}
23.0
}
// @expect-errors:
// | __test__:2:2: callers of this function expect a value of type 'f64' but this statement returns a value of type 'nothing'
// | __test__:7:4: callers of this function expect a value of type 'f64' but this statement returns a value of type 'string'

View file

@ -1,5 +0,0 @@
fun test() -> bool {
32
}
// @check-error: the body of this function yields a value of type 'f64', but callers expect this function to produce a 'bool'

View file

@ -1,2 +0,0 @@
- "twenty five";
// @type-error: 0 cannot apply unary operator '-' to value of type string

View file

@ -1,4 +0,0 @@
let x = y;
x;
// @type-error: 11 cannot find value y here

View file

@ -1,6 +0,0 @@
fun test() {
while 12 {}
}
// @expect-errors:
// | __test__:2:8: this condition produces 'f64', but must produce bool

View file

@ -1,12 +0,0 @@
class Foo {
foo: f64;
}
fun test() {
let f = new Foo { foo: 12 };
let z = f.{let y = 222; foo };
}
// @expect-errors:
// | __test__:7:12: Error at '{': expected an identifier after a '.' in member access
// | __test__:7:26: cannot find value foo here

View file

@ -1,11 +0,0 @@
fun foo(x: f64) {}
let x = foo("hello");
x;
let y = foo();
y;
let a = foo(2, 3);
a;
// @type-error: 41 parameter 0 has an incompatible type: expected f64 but got string
// @type-error: 59 expected 1 parameters
// @type-error: 81 expected 1 parameters

View file

@ -1,39 +0,0 @@
class MyIterator {
list: MyList<$0>;
index: i64;
fun next(self) -> $0 or nothing {
if self.index < list.len() {
let result = self.list[self.index];
self.index += 1;
result
} else {
nothing
}
}
}
class MyList {
list: list<$0>;
fun get_iterator(self) -> MyIterator {
new MyIterator {
list: self.list,
index: 0,
}
}
}
fun test() -> f64 {
// Type needs to be inferred as MyList<f64>
let x = MyList { list: [1, 2, 3] };
let sum = 0;
for v in x { // Pick up the iterator methods
sum = sum + v;
}
sum
}
// @ignore undesigned garbage, like all generics
// @no-errors

View file

@ -1,11 +0,0 @@
fun generic_add(x, y) {
x + y
}
fun test() {
generic_add(10, 10)
}
// @ignore undesigned garbage, like all generics
// @no-errors
// @eval: 20

View file

@ -1,3 +0,0 @@
if true { "blarg" } else { 23 }
// @type: 0 f64 or string

View file

@ -1,25 +0,0 @@
class Foo {
a: f64;
}
fun test() -> f64 {
let b = new Foo { a : 1000 };
let result = 0;
if b is c:Foo and c.a == 1000 {
result = result + 1;
}
if b is c:Foo and c.a == 24 {
result = result + 10;
}
if b is c:_ {
result = result + 100; // underscore should always match!
}
if b is c:Foo {
result = result + c.a; // c should still be in scope!
}
result
}
// @no-errors
// @eval: Float(1101.0)

View file

@ -1,16 +0,0 @@
fun sum(x: list<f64>) -> f64 {
let result = 0;
for v in x {
result = result + v;
}
result
}
fun test() -> f64 {
let val = [1, 2, 3];
sum(val)
}
// @no-errors
// @type: 155 list<f64>
// @eval: Float(6.0)

View file

@ -1,37 +0,0 @@
class Finished {}
let FINISHED = new Finished {};
class Iterator {
current: f64;
fun next(self) -> f64 or Finished {
if self.current < 10 {
let result = self.current;
self.current = self.current + 1;
return result;
}
FINISHED
}
}
fun test() -> f64 {
let result = 1;
let i = 0;
while i < 10 {
result = result * 2;
i = i + 1;
}
let sum = 0;
let it = new Iterator { current: 0 };
while it.next() is v: f64 {
sum = sum + v;
}
result + sum
}
// @no-errors
// @eval: Float(1069.0)

View file

@ -1,10 +0,0 @@
42;
// @no-errors
// @type: 0 f64
// @concrete:
// | File
// | ExpressionStatement
// | LiteralExpression
// | Number:'"42"'
// | Semicolon:'";"'

View file

@ -1,6 +0,0 @@
fun explicit_return() -> f64 {
return 10.0;
// No error: after this point code is unreachable.
}
// @no-errors

View file

@ -1,14 +0,0 @@
"Hello " + 'world!';
// @no-errors
// @type: 9 string
// @concrete:
// | File
// | ExpressionStatement
// | BinaryExpression
// | LiteralExpression
// | String:'"\"Hello \""'
// | Plus:'"+"'
// | LiteralExpression
// | String:'"'world!'"'
// | Semicolon:'";"'

View file

@ -1,89 +0,0 @@
let x = 23;
let y = x * 2;
let z = y;
z;
fun test() -> f64 {
x + y
}
// @no-errors
// @type: 38 f64
// @eval: Float(69.0)
// @concrete:
// | File
// | LetStatement
// | Let:'"let"'
// | Identifier:'"x"'
// | Equal:'"="'
// | LiteralExpression
// | Number:'"23"'
// | Semicolon:'";"'
// | LetStatement
// | Let:'"let"'
// | Identifier:'"y"'
// | Equal:'"="'
// | BinaryExpression
// | Identifier
// | Identifier:'"x"'
// | Star:'"*"'
// | LiteralExpression
// | Number:'"2"'
// | Semicolon:'";"'
// | LetStatement
// | Let:'"let"'
// | Identifier:'"z"'
// | Equal:'"="'
// | Identifier
// | Identifier:'"y"'
// | Semicolon:'";"'
// | ExpressionStatement
// | Identifier
// | Identifier:'"z"'
// | Semicolon:'";"'
// | FunctionDecl
// | Fun:'"fun"'
// | Identifier:'"test"'
// | ParamList
// | LeftParen:'"("'
// | RightParen:'")"'
// | ReturnType
// | Arrow:'"->"'
// | TypeExpression
// | TypeIdentifier
// | Identifier:'"f64"'
// | Block
// | LeftBrace:'"{"'
// | ExpressionStatement
// | BinaryExpression
// | Identifier
// | Identifier:'"x"'
// | Plus:'"+"'
// | Identifier
// | Identifier:'"y"'
// | RightBrace:'"}"'
// |
// @compiles-to:
// | function test (0 args, 0 locals):
// | strings (0):
// | code (4):
// | 0: LoadModule(0)
// | 1: LoadModule(1)
// | 2: FloatAdd
// | 3: Return
// | function << module >> (0 args, 0 locals):
// | strings (0):
// | code (12):
// | 0: PushFloat(23.0)
// | 1: StoreModule(0)
// | 2: LoadModule(0)
// | 3: PushFloat(2.0)
// | 4: FloatMultiply
// | 5: StoreModule(1)
// | 6: LoadModule(1)
// | 7: StoreModule(2)
// | 8: LoadModule(2)
// | 9: Discard
// | 10: PushNothing
// | 11: Return
// |

View file

@ -1,22 +0,0 @@
fun worst_fib(n: f64) -> f64 {
if n == 0 {
0
} else if n == 1 {
1
} else {
worst_fib(n-2) + delegate_worst_fib(n-1)
}
}
// NOTE: This nonsense exists to make sure mutual recursion works, in
// addition to direct recursion.
fun delegate_worst_fib(n: f64) -> f64 {
worst_fib(n)
}
fun test() -> f64 {
worst_fib(10)
}
// @no-errors
// @eval: Float(55.0)

View file

@ -1,3 +0,0 @@
export fun hello() -> string {
"hello"
}

View file

@ -1,14 +0,0 @@
import "./foo.fine" as foo;
// NOTE: This is right here because a known miscompilation will cause us to
// call this function instead of the actual target.
fun wrong_function() -> string {
"VERY WRONG"
}
fun test() -> string {
foo.hello() + " world"
}
// @no-errors
// @eval: String("hello world")

View file

@ -3,26 +3,27 @@
version = 3
[[package]]
name = "autocfg"
version = "1.0.1"
name = "bindgen"
version = "0.63.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "bit-set"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de"
checksum = "36d860121800b2a9a94f9b5604b332d5cffb234ce17609ea479d723dbc9d3885"
dependencies = [
"bit-vec",
"bitflags",
"cexpr",
"clang-sys",
"lazy_static",
"lazycell",
"log",
"peeking_take_while",
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"shlex",
"syn",
"which",
]
[[package]]
name = "bit-vec"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
[[package]]
name = "bitflags"
version = "1.3.2"
@ -30,10 +31,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "byteorder"
version = "1.4.3"
name = "cc"
version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
[[package]]
name = "cexpr"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
dependencies = [
"nom",
]
[[package]]
name = "cfg-if"
@ -42,28 +52,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "doc-comment"
version = "0.3.3"
name = "clang-sys"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "getrandom"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753"
checksum = "c688fc74432808e3eb684cae8830a86be1d66a2bd58e1f248ed0960a590baf6f"
dependencies = [
"cfg-if",
"glob",
"libc",
"wasi",
"libloading",
]
[[package]]
name = "either"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
[[package]]
name = "glob"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "lazy_static"
version = "1.4.0"
@ -71,16 +81,38 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.106"
name = "lazycell"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a60553f9a9e039a333b4e9b20573b9e9b9c0bb3a11e201ccc48ef4283456d673"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "libc"
version = "0.2.146"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b"
[[package]]
name = "libloading"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
dependencies = [
"cfg-if",
"winapi",
]
[[package]]
name = "log"
version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]]
name = "memchr"
version = "2.4.1"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "minimal-lexical"
@ -91,174 +123,126 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "nom"
version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"doc-comment",
"memchr",
"minimal-lexical",
"proptest",
]
[[package]]
name = "num-traits"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
name = "oden-js-sys"
version = "0.1.0"
dependencies = [
"autocfg",
"bindgen",
"cc",
"walkdir",
]
[[package]]
name = "ppv-lite86"
version = "0.2.15"
name = "once_cell"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed0cfbc8191465bed66e1718596ee0b0b35d5ee1f41c5df2189d0fe8bde535ba"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "proptest"
version = "1.0.0"
name = "peeking_take_while"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e0d9cc07f18492d879586c92b485def06bc850da3118075cd45d50e9c95b0e5"
checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
[[package]]
name = "proc-macro2"
version = "1.0.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
dependencies = [
"proc-macro2",
]
[[package]]
name = "regex"
version = "1.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f"
dependencies = [
"bit-set",
"bitflags",
"byteorder",
"lazy_static",
"num-traits",
"quick-error 2.0.1",
"rand",
"rand_chacha",
"rand_xorshift",
"regex-syntax",
"rusty-fork",
"tempfile",
]
[[package]]
name = "quick-error"
version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quick-error"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3"
[[package]]
name = "rand"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8"
dependencies = [
"libc",
"rand_chacha",
"rand_core",
"rand_hc",
]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core",
]
[[package]]
name = "rand_core"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
dependencies = [
"getrandom",
]
[[package]]
name = "rand_hc"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7"
dependencies = [
"rand_core",
]
[[package]]
name = "rand_xorshift"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f"
dependencies = [
"rand_core",
]
[[package]]
name = "redox_syscall"
version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff"
dependencies = [
"bitflags",
]
[[package]]
name = "regex-syntax"
version = "0.6.25"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
[[package]]
name = "remove_dir_all"
version = "0.5.3"
name = "rustc-hash"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
dependencies = [
"winapi",
"winapi-util",
]
[[package]]
name = "rusty-fork"
version = "0.3.0"
name = "shlex"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f"
checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"fnv",
"quick-error 1.2.3",
"tempfile",
"wait-timeout",
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "tempfile"
version = "3.2.0"
name = "unicode-ident"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22"
checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
[[package]]
name = "walkdir"
version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698"
dependencies = [
"cfg-if",
"same-file",
"winapi-util",
]
[[package]]
name = "which"
version = "4.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269"
dependencies = [
"either",
"libc",
"rand",
"redox_syscall",
"remove_dir_all",
"winapi",
"once_cell",
]
[[package]]
name = "wait-timeout"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
dependencies = [
"libc",
]
[[package]]
name = "wasi"
version = "0.10.2+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
[[package]]
name = "winapi"
version = "0.3.9"
@ -275,6 +259,15 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
dependencies = [
"winapi",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"

329
oden-js/Cargo.lock generated Normal file
View file

@ -0,0 +1,329 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "anyhow"
version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
[[package]]
name = "assert_matches"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9"
[[package]]
name = "bindgen"
version = "0.63.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36d860121800b2a9a94f9b5604b332d5cffb234ce17609ea479d723dbc9d3885"
dependencies = [
"bitflags",
"cexpr",
"clang-sys",
"lazy_static",
"lazycell",
"log",
"peeking_take_while",
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"shlex",
"syn 1.0.109",
"which",
]
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "cc"
version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
[[package]]
name = "cexpr"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
dependencies = [
"nom",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "clang-sys"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c688fc74432808e3eb684cae8830a86be1d66a2bd58e1f248ed0960a590baf6f"
dependencies = [
"glob",
"libc",
"libloading",
]
[[package]]
name = "either"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
[[package]]
name = "glob"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "lazycell"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "libc"
version = "0.2.146"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b"
[[package]]
name = "libloading"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
dependencies = [
"cfg-if",
"winapi",
]
[[package]]
name = "log"
version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]]
name = "memchr"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "nom"
version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"memchr",
"minimal-lexical",
]
[[package]]
name = "oden-js"
version = "0.1.0"
dependencies = [
"anyhow",
"assert_matches",
"bitflags",
"oden-js-sys",
"thiserror",
]
[[package]]
name = "oden-js-sys"
version = "0.1.0"
dependencies = [
"bindgen",
"cc",
"walkdir",
]
[[package]]
name = "once_cell"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "peeking_take_while"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
[[package]]
name = "proc-macro2"
version = "1.0.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
dependencies = [
"proc-macro2",
]
[[package]]
name = "regex"
version = "1.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f"
dependencies = [
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
[[package]]
name = "rustc-hash"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
dependencies = [
"winapi-util",
]
[[package]]
name = "shlex"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "thiserror"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.18",
]
[[package]]
name = "unicode-ident"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
[[package]]
name = "walkdir"
version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698"
dependencies = [
"same-file",
"winapi-util",
]
[[package]]
name = "which"
version = "4.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269"
dependencies = [
"either",
"libc",
"once_cell",
]
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
dependencies = [
"winapi",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"

View file

@ -1,26 +0,0 @@
[package]
name = "oden"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = []
tracing = ["tracy-client/enable"]
[dependencies]
anyhow = "1.0"
bytemuck = { version = "1.13", features = ["derive"] }
deno_ast = { version = "0.29.3", features = ["transpiling", "typescript"] }
env_logger = "0.10"
fontdue = "0.7.3"
image = { version = "0.24", default-features = false, features = ["png"] }
log = "0.4"
lru = "0.11.0"
notify = "6"
oden-js = { path = "../oden-js" }
pollster = "0.3"
sourcemap = "7.0.0"
tracy-client = { version = "0.15.2", default-features = false }
wgpu = "0.17"
winit = "0.28"

Some files were not shown because too many files have changed in this diff Show more