Skip to content

Commit

Permalink
Fix test code and corner case of new -> mainline conversion
Browse files Browse the repository at this point in the history
  • Loading branch information
yannham committed Nov 20, 2024
1 parent ea85c46 commit eb338a1
Show file tree
Hide file tree
Showing 7 changed files with 61 additions and 41 deletions.
35 changes: 30 additions & 5 deletions core/src/bytecode/ast/compat.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1082,17 +1082,34 @@ impl<'ast> FromAst<Node<'ast>> for term::Term {
body,
rec,
} => {
// Mainline term bindings can't have any metadata associated with them. We need to
// rewrite let metadata to be free-standing type and contract annotations instead,
// which is achieved by this helper.
fn with_metadata(metadata: &LetMetadata<'_>, value: &Ast<'_>) -> term::RichTerm {
let value: term::RichTerm = value.to_mainline();
let pos = value.pos;

if metadata.annotation.is_empty() {
return value;
}

term::RichTerm::new(
term::Term::Annotated(metadata.annotation.to_mainline(), value),
pos,
)
}

// We try to collect all patterns as single identifiers. If this works, we can emit
// a simpler / more compact `Let`.
let try_bindings = bindings
.iter()
.map(
|LetBinding {
pattern,
metadata: _,
metadata,
value,
}| match pattern.data {
PatternData::Any(id) => Some((id, value.to_mainline())),
PatternData::Any(id) => Some((id, with_metadata(metadata, value))),
_ => None,
},
)
Expand All @@ -1113,9 +1130,9 @@ impl<'ast> FromAst<Node<'ast>> for term::Term {
|LetBinding {
pattern,
value,
metadata: _,
metadata,
}| {
(pattern.to_mainline(), value.to_mainline())
(pattern.to_mainline(), with_metadata(metadata, value))
},
)
.collect();
Expand Down Expand Up @@ -1159,7 +1176,15 @@ impl<'ast> FromAst<Node<'ast>> for term::Term {
}) => {
let fields = stat_fields
.iter()
.map(|(id, field)| (*id, field.to_mainline()))
.map(|(id, field)| {
let mut field: term::record::Field = field.to_mainline();

for labeled_ty in field.metadata.annotation.iter_mut() {
labeled_ty.label.field_name = Some(*id);
}

(*id, field)
})
.collect();

let dyn_fields = dyn_fields
Expand Down
4 changes: 2 additions & 2 deletions core/src/eval/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ fn eval_full_no_import(t: RichTerm) -> Result<Term, EvalError> {
fn parse(s: &str) -> Option<RichTerm> {
let id = Files::new().add("<test>", String::from(s));

grammar::ExprParser::new()
.parse_strict(id, lexer::Lexer::new(s))
grammar::TermParser::new()
.parse_strict_compat(id, lexer::Lexer::new(s))
.map(RichTerm::without_pos)
.map_err(|err| println!("{err:?}"))
.ok()
Expand Down
40 changes: 15 additions & 25 deletions core/src/parser/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ use assert_matches::assert_matches;
fn parse(s: &str) -> Result<RichTerm, ParseError> {
let id = Files::new().add("<test>", String::from(s));

super::grammar::ExprParser::new()
.parse_strict(id, Lexer::new(s))
super::grammar::TermParser::new()
.parse_strict_compat(id, Lexer::new(s))
.map_err(|errs| errs.errors.first().unwrap().clone())
}

Expand All @@ -38,29 +38,19 @@ fn mk_single_chunk(s: &str) -> RichTerm {
}

fn mk_symbolic_single_chunk(prefix: &str, s: &str) -> RichTerm {
use crate::term::record::Field;

build_record(
[
(
FieldPathElem::Ident("tag".into()),
Field::from(RichTerm::from(Term::Enum("SymbolicString".into()))),
),
(
FieldPathElem::Ident("prefix".into()),
Field::from(RichTerm::from(Term::Enum(prefix.into()))),
),
(
FieldPathElem::Ident("fragments".into()),
Field::from(RichTerm::from(Array(
std::iter::once(mk_single_chunk(s)).collect(),
Default::default(),
))),
),
],
Default::default(),
)
.into()
use crate::term::{make::builder, record::Field};

Check failure on line 41 in core/src/parser/tests.rs

View workflow job for this annotation

GitHub Actions / build-and-test (windows-latest)

unused import: `record::Field`

builder::Record::new()
.field("tag")
.value(Term::Enum("SymbolicString".into()))
.field("prefix")
.value(Term::Enum(prefix.into()))
.field("fragments")
.value(Array(
std::iter::once(mk_single_chunk(s)).collect(),
Default::default(),
))
.into()
}

#[test]
Expand Down
9 changes: 6 additions & 3 deletions core/src/parser/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -947,9 +947,12 @@ pub fn strip_indent<'ast>(chunks: &mut Vec<StringChunk<Ast<'ast>>>) {

#[cfg(test)]
mod tests {
use crate::typ::TypeF;

use super::*;
use crate::{
combine::Combine,
label::Label,
term::{LabeledType, TypeAnnotation},
typ::{Type, TypeF},
};

#[test]
fn contract_annotation_order() {
Expand Down
6 changes: 4 additions & 2 deletions core/src/pretty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1383,7 +1383,7 @@ mod tests {
use crate::files::Files;
use crate::parser::lexer::Lexer;
use crate::parser::{
grammar::{ExprParser, FixedTypeParser},
grammar::{FixedTypeParser, TermParser},
ErrorTolerantParserCompat,
};
use pretty::Doc;
Expand All @@ -1404,7 +1404,9 @@ mod tests {
fn parse_term(s: &str) -> RichTerm {
let id = Files::new().add("<test>", s);

ExprParser::new().parse_strict(id, Lexer::new(s)).unwrap()
TermParser::new()
.parse_strict_compat(id, Lexer::new(s))
.unwrap()
}

/// Parse a string representation `long` of a type, and assert that
Expand Down
4 changes: 2 additions & 2 deletions lsp/nls/src/analysis.rs
Original file line number Diff line number Diff line change
Expand Up @@ -479,8 +479,8 @@ mod tests {
let s = "{ field. }";
let file = Files::new().add("<test>", s.to_owned());

let (rt, _errors) = grammar::ExprParser::new()
.parse_tolerant(file, lexer::Lexer::new(s))
let (rt, _errors) = grammar::TermParser::new()
.parse_tolerant_compat(file, lexer::Lexer::new(s))
.unwrap();

let parent = ParentLookup::new(&rt);
Expand Down
4 changes: 2 additions & 2 deletions lsp/nls/src/position.rs
Original file line number Diff line number Diff line change
Expand Up @@ -216,8 +216,8 @@ pub(crate) mod tests {
pub fn parse(s: &str) -> (FileId, RichTerm) {
let id = Files::new().add("<test>", String::from(s));

let term = grammar::ExprParser::new()
.parse_strict(id, lexer::Lexer::new(s))
let term = grammar::TermParser::new()
.parse_strict_compat(id, lexer::Lexer::new(s))
.unwrap();
(id, term)
}
Expand Down

0 comments on commit eb338a1

Please sign in to comment.