1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-07-25 14:27:35 +00:00

Shell: Treat '(' and '{' as operators in POSIX mode

This commit is contained in:
Ali Mohammad Pur 2023-09-21 02:35:01 +03:30 committed by Jelle Raaijmakers
parent 9e978c6cd1
commit 764ea6104e
3 changed files with 28 additions and 17 deletions

View file

@ -642,20 +642,6 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_start()
};
}
if (!m_state.escaping && is_part_of_operator(""sv, m_lexer.peek())) {
auto tokens = TRY(Token::maybe_from_state(m_state));
m_state.buffer.clear();
m_state.buffer.append(consume());
m_state.expansions.clear();
m_state.position.start_offset = m_state.position.end_offset;
m_state.position.start_line = m_state.position.end_line;
return ReductionResult {
.tokens = move(tokens),
.next_reduction = Reduction::Operator,
};
}
if (!m_state.escaping && consume_specific('\'')) {
m_state.buffer.append('\'');
return ReductionResult {
@ -708,7 +694,7 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_start()
};
}
if (!m_state.escaping && is_any_of("})"sv)(m_lexer.peek())) {
if (!m_state.escaping && m_state.in_skip_mode && is_any_of("})"sv)(m_lexer.peek())) {
// That's an eof for us.
return ReductionResult {
.tokens = {},
@ -716,6 +702,20 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_start()
};
}
if (!m_state.escaping && is_part_of_operator(""sv, m_lexer.peek())) {
auto tokens = TRY(Token::maybe_from_state(m_state));
m_state.buffer.clear();
m_state.buffer.append(consume());
m_state.expansions.clear();
m_state.position.start_offset = m_state.position.end_offset;
m_state.position.start_line = m_state.position.end_line;
return ReductionResult {
.tokens = move(tokens),
.next_reduction = Reduction::Operator,
};
}
m_state.escaping = false;
m_state.buffer.append(consume());
return ReductionResult {

View file

@ -197,6 +197,7 @@ struct State {
StringBuilder buffer {};
Reduction previous_reduction { Reduction::Start };
bool escaping { false };
bool in_skip_mode { false };
AST::Position position {
.start_offset = 0,
.end_offset = 0,
@ -320,6 +321,14 @@ struct Token {
return Token::Type::Less;
if (name == "\n"sv)
return Token::Type::Newline;
if (name == "("sv)
return Token::Type::OpenParen;
if (name == "{"sv)
return Token::Type::OpenBrace;
if (name == ")"sv)
return Token::Type::CloseParen;
if (name == "}"sv)
return Token::Type::CloseBrace;
return {};
}
@ -431,6 +440,7 @@ private:
explicit SkipTokens(Lexer& lexer)
: m_state_change(lexer.m_state, lexer.m_state)
{
lexer.m_state.in_skip_mode = true;
}
TemporaryChange<State> m_state_change;

View file

@ -188,7 +188,7 @@ void Parser::handle_heredoc_contents()
ErrorOr<Optional<Token>> Parser::next_expanded_token(Optional<Reduction> starting_reduction)
{
while (m_token_buffer.find_if([](auto& token) { return token.type == Token::Type::Eof; }).is_end()) {
while (m_token_buffer.is_empty() || m_token_buffer.last().type != Token::Type::Eof) {
auto tokens = TRY(m_lexer.batch_next(starting_reduction));
auto expanded = perform_expansions(move(tokens));
m_token_buffer.extend(expanded);
@ -1749,6 +1749,7 @@ ErrorOr<RefPtr<AST::Node>> Parser::parse_word()
case '\'':
if (in_quote == Quote::Single) {
in_quote = Quote::None;
if (run_start.has_value())
TRY(append_string_literal(string.substring_view(*run_start, i - *run_start)));
run_start = i + 1;
continue;