Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Shell: A couple tiny POSIX fixes #20279

Merged
merged 2 commits into from
Aug 4, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 49 additions & 5 deletions Userland/Shell/PosixLexer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_operator()

if (expect_heredoc_entry && tokens.size() > 1) {
auto [key, interpolation] = process_heredoc_key(tokens[1]);
m_state.heredoc_entries.enqueue(HeredocEntry {
m_state.heredoc_entries.append(HeredocEntry {
.key = key,
.allow_interpolation = interpolation,
.dedent = tokens[0].type == Token::Type::DoubleLessDash,
Expand Down Expand Up @@ -544,7 +544,7 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_start()
}

if (was_on_new_line && !m_state.heredoc_entries.is_empty()) {
auto const& entry = m_state.heredoc_entries.head();
auto const& entry = m_state.heredoc_entries.first();

auto start_index = m_lexer.tell();
Optional<size_t> end_index;
Expand Down Expand Up @@ -576,7 +576,7 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_start()
token.relevant_heredoc_key = entry.key;
token.type = Token::Type::HeredocContents;

m_state.heredoc_entries.dequeue();
m_state.heredoc_entries.take_first();

m_state.on_new_line = true;

Expand Down Expand Up @@ -705,6 +705,14 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_start()
};
}

if (!m_state.escaping && is_any_of("})"sv)(m_lexer.peek())) {
// That's an eof for us.
return ReductionResult {
.tokens = {},
.next_reduction = Reduction::None,
};
}

m_state.escaping = false;
m_state.buffer.append(consume());
return ReductionResult {
Expand Down Expand Up @@ -793,19 +801,55 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_parameter_expansion()

ErrorOr<Lexer::ReductionResult> Lexer::reduce_command_or_arithmetic_substitution_expansion()
{
auto ch = m_lexer.peek();
if (ch == '(' && m_state.buffer.string_view().ends_with("$("sv)) {
m_state.buffer.append(consume());
m_state.expansions.last() = ArithmeticExpansion {
.expression = {},
.value = StringBuilder {},
.range = range(-2)
};
return ReductionResult {
.tokens = {},
.next_reduction = Reduction::ArithmeticExpansion,
};
}

auto saved_position = m_state.position;
{
auto skip_mode = switch_to_skip_mode();

auto next_reduction = Reduction::Start;
do {
auto result = TRY(reduce(next_reduction));
next_reduction = result.next_reduction;
} while (next_reduction != Reduction::None);
saved_position = m_state.position;
}

auto const skipped_text = m_lexer.input().substring_view(m_state.position.end_offset, saved_position.end_offset - m_state.position.end_offset);
m_state.position.end_offset = saved_position.end_offset;
m_state.position.end_line = saved_position.end_line;

m_state.buffer.append(skipped_text);
m_state.expansions.last().get<CommandExpansion>().command.append(skipped_text);
m_state.expansions.last().visit([&](auto& expansion) {
expansion.range.length = m_state.position.end_offset - expansion.range.start - m_state.position.start_offset;
});

if (m_lexer.is_eof()) {
return ReductionResult {
.tokens = { Token::continuation("$("_short_string) },
.next_reduction = m_state.previous_reduction,
};
}

auto ch = m_lexer.peek();
ch = m_lexer.peek();
if (ch == '(' && m_state.buffer.string_view().ends_with("$("sv)) {
m_state.buffer.append(consume());
m_state.expansions.last() = ArithmeticExpansion {
.expression = {},
.value = StringBuilder {},
.value = m_state.expansions.last().get<CommandExpansion>().command,
.range = range(-2)
};
return ReductionResult {
Expand Down
18 changes: 13 additions & 5 deletions Userland/Shell/PosixLexer.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
#include <AK/GenericLexer.h>
#include <AK/Queue.h>
#include <AK/String.h>
#include <AK/TemporaryChange.h>
#include <AK/Variant.h>
#include <AK/Vector.h>
#include <Shell/AST.h>
Expand Down Expand Up @@ -209,7 +210,7 @@ struct State {
},
};
Vector<Expansion> expansions {};
Queue<HeredocEntry> heredoc_entries {};
Vector<HeredocEntry> heredoc_entries {};
bool on_new_line { true };
};

Expand Down Expand Up @@ -313,10 +314,6 @@ struct Token {
return Token::Type::And;
if (name == "|"sv)
return Token::Type::Pipe;
if (name == "("sv)
return Token::Type::OpenParen;
if (name == ")"sv)
return Token::Type::CloseParen;
if (name == ">"sv)
return Token::Type::Great;
if (name == "<"sv)
Expand Down Expand Up @@ -430,6 +427,17 @@ class Lexer {
ErrorOr<ReductionResult> reduce_extended_parameter_expansion();
ErrorOr<ReductionResult> reduce_heredoc_contents();

struct SkipTokens {
explicit SkipTokens(Lexer& lexer)
: m_state_change(lexer.m_state, lexer.m_state)
{
}

TemporaryChange<State> m_state_change;
};

SkipTokens switch_to_skip_mode() { return SkipTokens { *this }; }

char consume();
bool consume_specific(char);
void reconsume(StringView);
Expand Down
27 changes: 25 additions & 2 deletions Userland/Shell/PosixParser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -769,15 +769,38 @@ ErrorOr<RefPtr<AST::Node>> Parser::parse_and_or()

ErrorOr<RefPtr<AST::Node>> Parser::parse_pipeline()
{
return parse_pipe_sequence();
while (peek().type == Token::Type::Newline)
skip();

auto is_negated = false;
if (peek().type == Token::Type::Bang) {
is_negated = true;
skip();
}

return parse_pipe_sequence(is_negated);
}

ErrorOr<RefPtr<AST::Node>> Parser::parse_pipe_sequence()
ErrorOr<RefPtr<AST::Node>> Parser::parse_pipe_sequence(bool is_negated)
{
auto node = TRY(parse_command());
if (!node)
return RefPtr<AST::Node> {};

if (is_negated) {
if (is<AST::CastToCommand>(node.ptr())) {
node = make_ref_counted<AST::CastToCommand>(
node->position(),
make_ref_counted<AST::ListConcatenate>(
node->position(),
Vector<NonnullRefPtr<AST::Node>> {
make_ref_counted<AST::BarewordLiteral>(
node->position(),
"not"_short_string),
*static_cast<AST::CastToCommand&>(*node).inner() }));
}
}

for (;;) {
if (peek().type != Token::Type::Pipe)
break;
Expand Down
2 changes: 1 addition & 1 deletion Userland/Shell/PosixParser.h
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ class Parser {
ErrorOr<RefPtr<AST::Node>> parse_list();
ErrorOr<RefPtr<AST::Node>> parse_and_or();
ErrorOr<RefPtr<AST::Node>> parse_pipeline();
ErrorOr<RefPtr<AST::Node>> parse_pipe_sequence();
ErrorOr<RefPtr<AST::Node>> parse_pipe_sequence(bool is_negated);
ErrorOr<RefPtr<AST::Node>> parse_command();
ErrorOr<RefPtr<AST::Node>> parse_compound_command();
ErrorOr<RefPtr<AST::Node>> parse_subshell();
Expand Down
Loading