🤔 Add a type inference engine, along with typed literals. (#4)
The typed literal formatting mirrors that of Rust. If no type can be inferred for an untagged literal, the type inference engine will warn the user and then assume that they meant an unsigned 64-bit number. (This is slightly inconvenient, because there can be cases in which our Arbitrary instance may generate a unary negation, in which we should assume that it's a signed 64-bit number; we may want to revisit this later.) The type inference engine is a standard two phase one, in which we first generate a series of type constraints, and then we solve those constraints. In this particular implementation, we actually use a third phase to generate a final AST. Finally, to increase the amount of testing performed, I've removed the overflow checking in the evaluator. The only thing we now check for is division by zero. This does make things a trace slower in testing, but hopefully we get more coverage this way.
This commit was merged in pull request #4.
This commit is contained in:
@@ -27,7 +27,7 @@ use codespan_reporting::{diagnostic::Diagnostic, files::SimpleFiles};
|
||||
use lalrpop_util::lalrpop_mod;
|
||||
use logos::Logos;
|
||||
|
||||
mod arbitrary;
|
||||
pub mod arbitrary;
|
||||
mod ast;
|
||||
mod eval;
|
||||
mod location;
|
||||
@@ -40,6 +40,8 @@ lalrpop_mod!(
|
||||
mod pretty;
|
||||
mod validate;
|
||||
|
||||
#[cfg(test)]
|
||||
use crate::syntax::arbitrary::GenerationEnvironment;
|
||||
pub use crate::syntax::ast::*;
|
||||
pub use crate::syntax::location::Location;
|
||||
pub use crate::syntax::parser::{ProgramParser, StatementParser};
|
||||
@@ -48,7 +50,7 @@ pub use crate::syntax::tokens::{LexerError, Token};
|
||||
use ::pretty::{Arena, Pretty};
|
||||
use lalrpop_util::ParseError;
|
||||
#[cfg(test)]
|
||||
use proptest::{prop_assert, prop_assert_eq};
|
||||
use proptest::{arbitrary::Arbitrary, prop_assert, prop_assert_eq};
|
||||
#[cfg(test)]
|
||||
use std::str::FromStr;
|
||||
use thiserror::Error;
|
||||
@@ -73,12 +75,12 @@ pub enum ParserError {
|
||||
/// Raised when we're parsing the file, and run into a token in a
|
||||
/// place we weren't expecting it.
|
||||
#[error("Unrecognized token")]
|
||||
UnrecognizedToken(Location, Location, Token, Vec<String>),
|
||||
UnrecognizedToken(Location, Token, Vec<String>),
|
||||
|
||||
/// Raised when we were expecting the end of the file, but instead
|
||||
/// got another token.
|
||||
#[error("Extra token")]
|
||||
ExtraToken(Location, Token, Location),
|
||||
ExtraToken(Location, Token),
|
||||
|
||||
/// Raised when the lexer just had some sort of internal problem
|
||||
/// and just gave up.
|
||||
@@ -106,30 +108,24 @@ impl ParserError {
|
||||
fn convert(file_idx: usize, err: ParseError<usize, Token, LexerError>) -> Self {
|
||||
match err {
|
||||
ParseError::InvalidToken { location } => {
|
||||
ParserError::InvalidToken(Location::new(file_idx, location))
|
||||
}
|
||||
ParseError::UnrecognizedEof { location, expected } => {
|
||||
ParserError::UnrecognizedEOF(Location::new(file_idx, location), expected)
|
||||
ParserError::InvalidToken(Location::new(file_idx, location..location + 1))
|
||||
}
|
||||
ParseError::UnrecognizedEof { location, expected } => ParserError::UnrecognizedEOF(
|
||||
Location::new(file_idx, location..location + 1),
|
||||
expected,
|
||||
),
|
||||
ParseError::UnrecognizedToken {
|
||||
token: (start, token, end),
|
||||
expected,
|
||||
} => ParserError::UnrecognizedToken(
|
||||
Location::new(file_idx, start),
|
||||
Location::new(file_idx, end),
|
||||
token,
|
||||
expected,
|
||||
),
|
||||
} => {
|
||||
ParserError::UnrecognizedToken(Location::new(file_idx, start..end), token, expected)
|
||||
}
|
||||
ParseError::ExtraToken {
|
||||
token: (start, token, end),
|
||||
} => ParserError::ExtraToken(
|
||||
Location::new(file_idx, start),
|
||||
token,
|
||||
Location::new(file_idx, end),
|
||||
),
|
||||
} => ParserError::ExtraToken(Location::new(file_idx, start..end), token),
|
||||
ParseError::User { error } => match error {
|
||||
LexerError::LexFailure(offset) => {
|
||||
ParserError::LexFailure(Location::new(file_idx, offset))
|
||||
ParserError::LexFailure(Location::new(file_idx, offset..offset + 1))
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -180,37 +176,25 @@ impl<'a> From<&'a ParserError> for Diagnostic<usize> {
|
||||
),
|
||||
|
||||
// encountered a token where it shouldn't be
|
||||
ParserError::UnrecognizedToken(start, end, token, expected) => {
|
||||
ParserError::UnrecognizedToken(loc, token, expected) => {
|
||||
let expected_str =
|
||||
format!("unexpected token {}{}", token, display_expected(expected));
|
||||
let unexpected_str = format!("unexpected token {}", token);
|
||||
let labels = start.range_label(end);
|
||||
|
||||
Diagnostic::error()
|
||||
.with_labels(
|
||||
labels
|
||||
.into_iter()
|
||||
.map(|l| l.with_message(unexpected_str.clone()))
|
||||
.collect(),
|
||||
)
|
||||
.with_message(expected_str)
|
||||
.with_labels(vec![loc.primary_label().with_message(unexpected_str)])
|
||||
}
|
||||
|
||||
// I think we get this when we get a token, but were expected EOF
|
||||
ParserError::ExtraToken(start, token, end) => {
|
||||
ParserError::ExtraToken(loc, token) => {
|
||||
let expected_str =
|
||||
format!("unexpected token {} after the expected end of file", token);
|
||||
let unexpected_str = format!("unexpected token {}", token);
|
||||
let labels = start.range_label(end);
|
||||
|
||||
Diagnostic::error()
|
||||
.with_labels(
|
||||
labels
|
||||
.into_iter()
|
||||
.map(|l| l.with_message(unexpected_str.clone()))
|
||||
.collect(),
|
||||
)
|
||||
.with_message(expected_str)
|
||||
.with_labels(vec![loc.primary_label().with_message(unexpected_str)])
|
||||
}
|
||||
|
||||
// simple lexer errors
|
||||
@@ -293,24 +277,27 @@ fn order_of_operations() {
|
||||
Program::from_str(muladd1).unwrap(),
|
||||
Program {
|
||||
statements: vec![Statement::Binding(
|
||||
Location::new(testfile, 0),
|
||||
"x".to_string(),
|
||||
Location::new(testfile, 0..1),
|
||||
Name::manufactured("x"),
|
||||
Expression::Primitive(
|
||||
Location::new(testfile, 6),
|
||||
Location::new(testfile, 6..7),
|
||||
"+".to_string(),
|
||||
vec![
|
||||
Expression::Value(Location::new(testfile, 4), Value::Number(None, 1)),
|
||||
Expression::Value(
|
||||
Location::new(testfile, 4..5),
|
||||
Value::Number(None, None, 1),
|
||||
),
|
||||
Expression::Primitive(
|
||||
Location::new(testfile, 10),
|
||||
Location::new(testfile, 10..11),
|
||||
"*".to_string(),
|
||||
vec![
|
||||
Expression::Value(
|
||||
Location::new(testfile, 8),
|
||||
Value::Number(None, 2),
|
||||
Location::new(testfile, 8..9),
|
||||
Value::Number(None, None, 2),
|
||||
),
|
||||
Expression::Value(
|
||||
Location::new(testfile, 12),
|
||||
Value::Number(None, 3),
|
||||
Location::new(testfile, 12..13),
|
||||
Value::Number(None, None, 3),
|
||||
),
|
||||
]
|
||||
)
|
||||
@@ -350,8 +337,8 @@ proptest::proptest! {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generated_run_or_overflow(program: Program) {
|
||||
fn generated_run_or_overflow(program in Program::arbitrary_with(GenerationEnvironment::new(false))) {
|
||||
use crate::eval::{EvalError, PrimOpError};
|
||||
assert!(matches!(program.eval(), Ok(_) | Err(EvalError::PrimOp(PrimOpError::MathFailure(_)))))
|
||||
prop_assert!(matches!(program.eval(), Ok(_) | Err(EvalError::PrimOp(PrimOpError::MathFailure(_)))));
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user