mirror of
https://github.com/RGBCube/serenity
synced 2025-07-25 14:17:36 +00:00
Shell: Treat '(' and '{' as operators in POSIX mode
This commit is contained in:
parent
9e978c6cd1
commit
764ea6104e
3 changed files with 28 additions and 17 deletions
|
@ -642,20 +642,6 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_start()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!m_state.escaping && is_part_of_operator(""sv, m_lexer.peek())) {
|
|
||||||
auto tokens = TRY(Token::maybe_from_state(m_state));
|
|
||||||
m_state.buffer.clear();
|
|
||||||
m_state.buffer.append(consume());
|
|
||||||
m_state.expansions.clear();
|
|
||||||
m_state.position.start_offset = m_state.position.end_offset;
|
|
||||||
m_state.position.start_line = m_state.position.end_line;
|
|
||||||
|
|
||||||
return ReductionResult {
|
|
||||||
.tokens = move(tokens),
|
|
||||||
.next_reduction = Reduction::Operator,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!m_state.escaping && consume_specific('\'')) {
|
if (!m_state.escaping && consume_specific('\'')) {
|
||||||
m_state.buffer.append('\'');
|
m_state.buffer.append('\'');
|
||||||
return ReductionResult {
|
return ReductionResult {
|
||||||
|
@ -708,7 +694,7 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_start()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!m_state.escaping && is_any_of("})"sv)(m_lexer.peek())) {
|
if (!m_state.escaping && m_state.in_skip_mode && is_any_of("})"sv)(m_lexer.peek())) {
|
||||||
// That's an eof for us.
|
// That's an eof for us.
|
||||||
return ReductionResult {
|
return ReductionResult {
|
||||||
.tokens = {},
|
.tokens = {},
|
||||||
|
@ -716,6 +702,20 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_start()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!m_state.escaping && is_part_of_operator(""sv, m_lexer.peek())) {
|
||||||
|
auto tokens = TRY(Token::maybe_from_state(m_state));
|
||||||
|
m_state.buffer.clear();
|
||||||
|
m_state.buffer.append(consume());
|
||||||
|
m_state.expansions.clear();
|
||||||
|
m_state.position.start_offset = m_state.position.end_offset;
|
||||||
|
m_state.position.start_line = m_state.position.end_line;
|
||||||
|
|
||||||
|
return ReductionResult {
|
||||||
|
.tokens = move(tokens),
|
||||||
|
.next_reduction = Reduction::Operator,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
m_state.escaping = false;
|
m_state.escaping = false;
|
||||||
m_state.buffer.append(consume());
|
m_state.buffer.append(consume());
|
||||||
return ReductionResult {
|
return ReductionResult {
|
||||||
|
|
|
@ -197,6 +197,7 @@ struct State {
|
||||||
StringBuilder buffer {};
|
StringBuilder buffer {};
|
||||||
Reduction previous_reduction { Reduction::Start };
|
Reduction previous_reduction { Reduction::Start };
|
||||||
bool escaping { false };
|
bool escaping { false };
|
||||||
|
bool in_skip_mode { false };
|
||||||
AST::Position position {
|
AST::Position position {
|
||||||
.start_offset = 0,
|
.start_offset = 0,
|
||||||
.end_offset = 0,
|
.end_offset = 0,
|
||||||
|
@ -320,6 +321,14 @@ struct Token {
|
||||||
return Token::Type::Less;
|
return Token::Type::Less;
|
||||||
if (name == "\n"sv)
|
if (name == "\n"sv)
|
||||||
return Token::Type::Newline;
|
return Token::Type::Newline;
|
||||||
|
if (name == "("sv)
|
||||||
|
return Token::Type::OpenParen;
|
||||||
|
if (name == "{"sv)
|
||||||
|
return Token::Type::OpenBrace;
|
||||||
|
if (name == ")"sv)
|
||||||
|
return Token::Type::CloseParen;
|
||||||
|
if (name == "}"sv)
|
||||||
|
return Token::Type::CloseBrace;
|
||||||
|
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
@ -431,6 +440,7 @@ private:
|
||||||
explicit SkipTokens(Lexer& lexer)
|
explicit SkipTokens(Lexer& lexer)
|
||||||
: m_state_change(lexer.m_state, lexer.m_state)
|
: m_state_change(lexer.m_state, lexer.m_state)
|
||||||
{
|
{
|
||||||
|
lexer.m_state.in_skip_mode = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
TemporaryChange<State> m_state_change;
|
TemporaryChange<State> m_state_change;
|
||||||
|
|
|
@ -188,7 +188,7 @@ void Parser::handle_heredoc_contents()
|
||||||
|
|
||||||
ErrorOr<Optional<Token>> Parser::next_expanded_token(Optional<Reduction> starting_reduction)
|
ErrorOr<Optional<Token>> Parser::next_expanded_token(Optional<Reduction> starting_reduction)
|
||||||
{
|
{
|
||||||
while (m_token_buffer.find_if([](auto& token) { return token.type == Token::Type::Eof; }).is_end()) {
|
while (m_token_buffer.is_empty() || m_token_buffer.last().type != Token::Type::Eof) {
|
||||||
auto tokens = TRY(m_lexer.batch_next(starting_reduction));
|
auto tokens = TRY(m_lexer.batch_next(starting_reduction));
|
||||||
auto expanded = perform_expansions(move(tokens));
|
auto expanded = perform_expansions(move(tokens));
|
||||||
m_token_buffer.extend(expanded);
|
m_token_buffer.extend(expanded);
|
||||||
|
@ -1749,7 +1749,8 @@ ErrorOr<RefPtr<AST::Node>> Parser::parse_word()
|
||||||
case '\'':
|
case '\'':
|
||||||
if (in_quote == Quote::Single) {
|
if (in_quote == Quote::Single) {
|
||||||
in_quote = Quote::None;
|
in_quote = Quote::None;
|
||||||
TRY(append_string_literal(string.substring_view(*run_start, i - *run_start)));
|
if (run_start.has_value())
|
||||||
|
TRY(append_string_literal(string.substring_view(*run_start, i - *run_start)));
|
||||||
run_start = i + 1;
|
run_start = i + 1;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue