diff --git a/src/grammar.lalrpop b/src/grammar.lalrpop index 174ec6326e..18c0390dcf 100644 --- a/src/grammar.lalrpop +++ b/src/grammar.lalrpop @@ -181,6 +181,7 @@ UOp: UnaryOp = { "head" => UnaryOp::ListHead(), "tail" => UnaryOp::ListTail(), "length" => UnaryOp::ListLength(), + "fieldsOf" => UnaryOp::FieldsOf(), }; switch_case: (Ident, RichTerm) = { @@ -354,6 +355,7 @@ extern { "head" => Token::Head, "tail" => Token::Tail, "length" => Token::Length, + "fieldsOf" => Token::FieldsOf, "hasField" => Token::HasField, "map" => Token::Map, diff --git a/src/operation.rs b/src/operation.rs index f5d84f3fdb..1887cb7d7d 100644 --- a/src/operation.rs +++ b/src/operation.rs @@ -328,6 +328,21 @@ fn process_unary_operation( )) } } + UnaryOp::FieldsOf() => { + if let Term::Record(map) = *t { + let mut fields: Vec = map.keys().map(|Ident(id)| id.clone()).collect(); + fields.sort(); + let terms = fields.into_iter().map(|id| Term::Str(id).into()).collect(); + Ok(Closure::atomic_closure(Term::List(terms).into())) + } else { + Err(EvalError::TypeError( + String::from("Record"), + String::from("fieldsOf"), + arg_pos, + RichTerm { term: t, pos }, + )) + } + } UnaryOp::MapRec(f) => { if let Term::Record(rec) = *t { let f_as_var = f.body.closurize(&mut env, f.env); diff --git a/src/parser/lexer.rs b/src/parser/lexer.rs index 7c454f5605..e139c5ae9b 100644 --- a/src/parser/lexer.rs +++ b/src/parser/lexer.rs @@ -107,6 +107,7 @@ pub enum Token<'input> { Head, Tail, Length, + FieldsOf, Unwrap, HasField, @@ -219,6 +220,7 @@ impl<'input> fmt::Display for Token<'input> { Token::Head => "head", Token::Tail => "tail", Token::Length => "length", + Token::FieldsOf => "fieldsOf", Token::HasField => "hasField", Token::Map => "map", @@ -559,6 +561,7 @@ impl<'input> Lexer<'input> { "map" => Token::Map, "elemAt" => Token::ElemAt, "merge" => Token::Merge, + "fieldsOf" => Token::FieldsOf, ty @ "Dyn" | ty @ "Num" | ty @ "Bool" | ty @ "Str" | ty @ "List" => Token::Type(ty), id => Token::Identifier(id), }; diff --git a/src/program.rs b/src/program.rs index 5c7b91c185..8e981c103c 100644 --- a/src/program.rs +++ b/src/program.rs @@ -1253,4 +1253,15 @@ Assume(#alwaysTrue -> #alwaysFalse, not ) true assert_npeq!("{ a = \"a\"; b = true }", "{ a = true; b = \"a\"}"); assert_npeq!("{ a = { a = true } }", "{a = { a = { a = true } } }"); } + + #[test] + fn fields_of() { + assert_peq!("fieldsOf {}", "[]"); + assert_peq!("fieldsOf {a = 1; b = 2; c = 3}", "[\"a\", \"b\", \"c\"]"); + assert_peq!("fieldsOf {aAa = 1; Zzz = 2;}", "[\"Zzz\", \"aAa\"]"); + assert_peq!( + "fieldsOf {foo = {bar = 0}; baz = Default(true)}", + "[\"baz\", \"foo\"]" + ); + } } diff --git a/src/term.rs b/src/term.rs index 37b7f46978..e684d3d8e1 100644 --- a/src/term.rs +++ b/src/term.rs @@ -500,6 +500,9 @@ pub enum UnaryOp { /// string accumulator, the remaining chunks to be evaluated, and is applied to the current /// chunk being evaluated. ChunksConcat(String, Vec>), + + /// Return the names of the fields of a record as a string list. + FieldsOf(), } impl UnaryOp { @@ -559,6 +562,8 @@ impl UnaryOp { }) .collect(), ), + + FieldsOf() => FieldsOf(), } } } diff --git a/src/typecheck.rs b/src/typecheck.rs index 325dca19dc..4071cae5cd 100644 --- a/src/typecheck.rs +++ b/src/typecheck.rs @@ -1379,6 +1379,13 @@ pub fn get_uop_type( )), // This should not happen, as ChunksConcat() is only produced during evaluation. UnaryOp::ChunksConcat(_, _) => panic!("cannot type ChunksConcat()"), + // forall rows. { rows } -> List + UnaryOp::FieldsOf() => TypeWrapper::Concrete(AbsType::arrow( + Box::new(TypeWrapper::Concrete(AbsType::StaticRecord(Box::new( + TypeWrapper::Ptr(new_var(state.table)), + )))), + Box::new(TypeWrapper::Concrete(AbsType::List())), + )), }) }