Skip to content

Commit

Permalink
Recognize Directive Prologues correctly
Browse files Browse the repository at this point in the history
  • Loading branch information
raskad committed Jan 14, 2023
1 parent 989edd4 commit 390d7c0
Show file tree
Hide file tree
Showing 7 changed files with 104 additions and 92 deletions.
18 changes: 12 additions & 6 deletions boa_ast/src/statement_list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,18 @@ pub struct StatementList {
}

impl StatementList {
/// Creates a new `StatementList` AST node.
#[must_use]
pub fn new<S>(statements: S, strict: bool) -> Self
where
S: Into<Box<[StatementListItem]>>,
{
Self {
statements: statements.into(),
strict,
}
}

/// Gets the list of statements.
#[inline]
#[must_use]
Expand All @@ -134,12 +146,6 @@ impl StatementList {
pub const fn strict(&self) -> bool {
self.strict
}

/// Set the strict mode.
#[inline]
pub fn set_strict(&mut self, strict: bool) {
self.strict = strict;
}
}

impl From<Box<[StatementListItem]>> for StatementList {
Expand Down
36 changes: 4 additions & 32 deletions boa_parser/src/parser/function/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ use boa_ast::{
Punctuator,
};
use boa_interner::{Interner, Sym};
use boa_macros::utf16;
use boa_profiler::Profiler;
use std::io::Read;

Expand Down Expand Up @@ -449,41 +448,14 @@ where
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
let _timer = Profiler::global().start_event("FunctionStatementList", "Parsing");

let global_strict_mode = cursor.strict_mode();
let mut strict = false;

if let Some(tk) = cursor.peek(0, interner)? {
match tk.kind() {
TokenKind::Punctuator(Punctuator::CloseBlock) => {
return Ok(Vec::new().into());
}
TokenKind::StringLiteral(string)
if interner.resolve_expect(*string).join(
|s| s == "use strict",
|g| g == utf16!("use strict"),
true,
) =>
{
cursor.set_strict_mode(true);
strict = true;
}
_ => {}
}
}

let statement_list = StatementList::new(
StatementList::new(
self.allow_yield,
self.allow_await,
true,
&FUNCTION_BREAK_TOKENS,
true,
false,
)
.parse(cursor, interner);

// Reset strict mode back to the global scope.
cursor.set_strict_mode(global_strict_mode);

let mut statement_list = statement_list?;
statement_list.set_strict(strict);
Ok(statement_list)
.parse(cursor, interner)
}
}
90 changes: 40 additions & 50 deletions boa_parser/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ mod tests;

use crate::{
error::ParseResult,
lexer::TokenKind,
parser::{
cursor::Cursor,
function::{FormalParameters, FunctionStatementList},
Expand All @@ -27,7 +26,6 @@ use boa_ast::{
Position, StatementList,
};
use boa_interner::Interner;
use boa_macros::utf16;
use rustc_hash::FxHashSet;
use std::io::Read;

Expand Down Expand Up @@ -239,52 +237,31 @@ where
type Output = StatementList;

fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
let mut strict = cursor.strict_mode();
match cursor.peek(0, interner)? {
Some(tok) => {
match tok.kind() {
// Set the strict mode
TokenKind::StringLiteral(string)
if interner.resolve_expect(*string).join(
|s| s == "use strict",
|g| g == utf16!("use strict"),
true,
) =>
{
cursor.set_strict_mode(true);
strict = true;
}
_ => {}
}
let mut statement_list =
ScriptBody::new(self.direct_eval).parse(cursor, interner)?;
statement_list.set_strict(strict);

// It is a Syntax Error if the LexicallyDeclaredNames of ScriptBody contains any duplicate entries.
// It is a Syntax Error if any element of the LexicallyDeclaredNames of ScriptBody also occurs in the VarDeclaredNames of ScriptBody.
let mut lexical_names = FxHashSet::default();
for name in top_level_lexically_declared_names(&statement_list) {
if !lexical_names.insert(name) {
return Err(Error::general(
"lexical name declared multiple times",
Position::new(1, 1),
));
}
}

for name in top_level_var_declared_names(&statement_list) {
if lexical_names.contains(&name) {
return Err(Error::general(
"lexical name declared multiple times",
Position::new(1, 1),
));
}
}

Ok(statement_list)
let statement_list = ScriptBody::new(true, cursor.strict_mode(), self.direct_eval)
.parse(cursor, interner)?;

// It is a Syntax Error if the LexicallyDeclaredNames of ScriptBody contains any duplicate entries.
// It is a Syntax Error if any element of the LexicallyDeclaredNames of ScriptBody also occurs in the VarDeclaredNames of ScriptBody.
let mut lexical_names = FxHashSet::default();
for name in top_level_lexically_declared_names(&statement_list) {
if !lexical_names.insert(name) {
return Err(Error::general(
"lexical name declared multiple times",
Position::new(1, 1),
));
}
None => Ok(StatementList::from(Vec::new())),
}

for name in top_level_var_declared_names(&statement_list) {
if lexical_names.contains(&name) {
return Err(Error::general(
"lexical name declared multiple times",
Position::new(1, 1),
));
}
}

Ok(statement_list)
}
}

Expand All @@ -296,14 +273,20 @@ where
/// [spec]: https://tc39.es/ecma262/#prod-ScriptBody
#[derive(Debug, Clone, Copy)]
pub struct ScriptBody {
directive_prologues: bool,
strict: bool,
direct_eval: bool,
}

impl ScriptBody {
/// Create a new `ScriptBody` parser.
#[inline]
const fn new(direct_eval: bool) -> Self {
Self { direct_eval }
const fn new(directive_prologues: bool, strict: bool, direct_eval: bool) -> Self {
Self {
directive_prologues,
strict,
direct_eval,
}
}
}

Expand All @@ -314,8 +297,15 @@ where
type Output = StatementList;

fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
let body = self::statement::StatementList::new(false, false, false, &[])
.parse(cursor, interner)?;
let body = self::statement::StatementList::new(
false,
false,
false,
&[],
self.directive_prologues,
self.strict,
)
.parse(cursor, interner)?;

if !self.direct_eval {
// It is a Syntax Error if StatementList Contains super unless the source text containing super is eval
Expand Down
2 changes: 2 additions & 0 deletions boa_parser/src/parser/statement/block/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,8 @@ where
self.allow_await,
self.allow_return,
&BLOCK_BREAK_TOKENS,
false,
false,
)
.parse(cursor, interner)
.map(statement::Block::from)?;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -609,9 +609,15 @@ where
let strict = cursor.strict_mode();
cursor.set_strict_mode(true);
let position = cursor.peek(0, interner).or_abrupt()?.span().start();
let statement_list =
StatementList::new(false, true, false, &FUNCTION_BREAK_TOKENS)
.parse(cursor, interner)?;
let statement_list = StatementList::new(
false,
true,
false,
&FUNCTION_BREAK_TOKENS,
false,
false,
)
.parse(cursor, interner)?;

let mut lexical_names = FxHashSet::default();
for name in &lexically_declared_names(&statement_list) {
Expand Down
34 changes: 33 additions & 1 deletion boa_parser/src/parser/statement/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ use boa_ast::{
Keyword, Punctuator,
};
use boa_interner::Interner;
use boa_macros::utf16;
use boa_profiler::Profiler;
use std::io::Read;

Expand Down Expand Up @@ -229,6 +230,8 @@ pub(super) struct StatementList {
allow_await: AllowAwait,
allow_return: AllowReturn,
break_nodes: &'static [TokenKind],
directive_prologues: bool,
strict: bool,
}

impl StatementList {
Expand All @@ -238,6 +241,8 @@ impl StatementList {
allow_await: A,
allow_return: R,
break_nodes: &'static [TokenKind],
directive_prologues: bool,
strict: bool,
) -> Self
where
Y: Into<AllowYield>,
Expand All @@ -249,6 +254,8 @@ impl StatementList {
allow_await: allow_await.into(),
allow_return: allow_return.into(),
break_nodes,
directive_prologues,
strict,
}
}
}
Expand All @@ -273,6 +280,10 @@ where
let _timer = Profiler::global().start_event("StatementList", "Parsing");
let mut items = Vec::new();

let global_strict = cursor.strict_mode();
let mut directive_prologues = self.directive_prologues;
let mut strict = self.strict;

loop {
match cursor.peek(0, interner)? {
Some(token) if self.break_nodes.contains(token.kind()) => break,
Expand All @@ -283,6 +294,25 @@ where
let item =
StatementListItem::new(self.allow_yield, self.allow_await, self.allow_return)
.parse(cursor, interner)?;

if directive_prologues {
if let ast::StatementListItem::Statement(ast::Statement::Expression(
ast::Expression::Literal(ast::expression::literal::Literal::String(string)),
)) = &item
{
if interner.resolve_expect(*string).join(
|s| s == "use strict",
|g| g == utf16!("use strict"),
true,
) {
cursor.set_strict_mode(true);
strict = true;
}
} else {
directive_prologues = false;
}
}

items.push(item);

// move the cursor forward for any consecutive semicolon.
Expand All @@ -291,7 +321,9 @@ where

items.sort_by(ast::StatementListItem::hoistable_order);

Ok(items.into())
cursor.set_strict_mode(global_strict);

Ok(ast::StatementList::new(items, strict))
}
}

Expand Down
4 changes: 4 additions & 0 deletions boa_parser/src/parser/statement/switch/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,8 @@ where
self.allow_await,
self.allow_return,
&CASE_BREAK_TOKENS,
false,
false,
)
.parse(cursor, interner)?;

Expand All @@ -195,6 +197,8 @@ where
self.allow_await,
self.allow_return,
&CASE_BREAK_TOKENS,
false,
false,
)
.parse(cursor, interner)?;

Expand Down

0 comments on commit 390d7c0

Please sign in to comment.