1
Fork 0
mirror of https://github.com/RGBCube/serenity synced 2025-05-31 08:38:12 +00:00

LibCpp: Add library for working with c++ code

Moved the C++ Lexer we have from LibGUI to here, so that other
components could use it without linking with LibGUI.
This commit is contained in:
Itamar 2020-09-28 16:21:25 +03:00 committed by Andreas Kling
parent fec4152220
commit bf53d7ff64
8 changed files with 132 additions and 126 deletions

View file

@ -26,7 +26,7 @@
#include "CppAutoComplete.h" #include "CppAutoComplete.h"
#include <AK/HashTable.h> #include <AK/HashTable.h>
#include <LibGUI/CppLexer.h> #include <LibCpp/Lexer.h>
// #define DEBUG_AUTOCOMPLETE // #define DEBUG_AUTOCOMPLETE
@ -34,7 +34,7 @@ namespace HackStudio {
Vector<String> CppAutoComplete::get_suggestions(const String& code, GUI::TextPosition autocomplete_position) Vector<String> CppAutoComplete::get_suggestions(const String& code, GUI::TextPosition autocomplete_position)
{ {
auto lines = code.split('\n', true); auto lines = code.split('\n', true);
GUI::CppLexer lexer(code); Cpp::Lexer lexer(code);
auto tokens = lexer.lex(); auto tokens = lexer.lex();
auto index_of_target_token = token_in_position(tokens, autocomplete_position); auto index_of_target_token = token_in_position(tokens, autocomplete_position);
@ -52,12 +52,12 @@ Vector<String> CppAutoComplete::get_suggestions(const String& code, GUI::TextPos
return suggestions; return suggestions;
} }
String CppAutoComplete::text_of_token(const Vector<String> lines, const GUI::CppToken& token) String CppAutoComplete::text_of_token(const Vector<String> lines, const Cpp::Token& token)
{ {
return lines[token.m_start.line].substring(token.m_start.column, token.m_end.column - token.m_start.column + 1); return lines[token.m_start.line].substring(token.m_start.column, token.m_end.column - token.m_start.column + 1);
} }
Optional<size_t> CppAutoComplete::token_in_position(const Vector<GUI::CppToken>& tokens, GUI::TextPosition position) Optional<size_t> CppAutoComplete::token_in_position(const Vector<Cpp::Token>& tokens, GUI::TextPosition position)
{ {
for (size_t token_index = 0; token_index < tokens.size(); ++token_index) { for (size_t token_index = 0; token_index < tokens.size(); ++token_index) {
auto& token = tokens[token_index]; auto& token = tokens[token_index];
@ -70,7 +70,7 @@ Optional<size_t> CppAutoComplete::token_in_position(const Vector<GUI::CppToken>&
return {}; return {};
} }
Vector<String> CppAutoComplete::identifier_prefixes(const Vector<String> lines, const Vector<GUI::CppToken>& tokens, size_t target_token_index) Vector<String> CppAutoComplete::identifier_prefixes(const Vector<String> lines, const Vector<Cpp::Token>& tokens, size_t target_token_index)
{ {
auto partial_input = text_of_token(lines, tokens[target_token_index]); auto partial_input = text_of_token(lines, tokens[target_token_index]);
Vector<String> suggestions; Vector<String> suggestions;
@ -79,7 +79,7 @@ Vector<String> CppAutoComplete::identifier_prefixes(const Vector<String> lines,
for (size_t i = 0; i < target_token_index; ++i) { for (size_t i = 0; i < target_token_index; ++i) {
auto& token = tokens[i]; auto& token = tokens[i];
if (token.m_type != GUI::CppToken::Type::Identifier) if (token.m_type != Cpp::Token::Type::Identifier)
continue; continue;
auto text = text_of_token(lines, token); auto text = text_of_token(lines, token);
if (text.starts_with(partial_input) && !suggestions_lookup.contains(text)) { if (text.starts_with(partial_input) && !suggestions_lookup.contains(text)) {

View file

@ -28,7 +28,7 @@
#include <AK/String.h> #include <AK/String.h>
#include <AK/Vector.h> #include <AK/Vector.h>
#include <LibGUI/CppLexer.h> #include <LibCpp/Lexer.h>
#include <LibGUI/TextPosition.h> #include <LibGUI/TextPosition.h>
namespace HackStudio { namespace HackStudio {
@ -39,8 +39,8 @@ public:
static Vector<String> get_suggestions(const String& code, GUI::TextPosition autocomplete_position); static Vector<String> get_suggestions(const String& code, GUI::TextPosition autocomplete_position);
private: private:
static Optional<size_t> token_in_position(const Vector<GUI::CppToken>&, GUI::TextPosition); static Optional<size_t> token_in_position(const Vector<Cpp::Token>&, GUI::TextPosition);
static String text_of_token(const Vector<String> lines, const GUI::CppToken&); static String text_of_token(const Vector<String> lines, const Cpp::Token&);
static Vector<String> identifier_prefixes(const Vector<String> lines, const Vector<GUI::CppToken>&, size_t target_token_index); static Vector<String> identifier_prefixes(const Vector<String> lines, const Vector<Cpp::Token>&, size_t target_token_index);
}; };
}; };

View file

@ -28,3 +28,4 @@ add_subdirectory(LibVT)
add_subdirectory(LibWeb) add_subdirectory(LibWeb)
add_subdirectory(LibX86) add_subdirectory(LibX86)
add_subdirectory(LibDiff) add_subdirectory(LibDiff)
add_subdirectory(LibCpp)

View file

@ -0,0 +1,6 @@
set(SOURCES
Lexer.cpp
)
serenity_lib(LibCpp cpp)
target_link_libraries(LibCpp LibC)

View file

@ -24,27 +24,27 @@
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/ */
#include "CppLexer.h" #include "Lexer.h"
#include <AK/HashTable.h> #include <AK/HashTable.h>
#include <AK/StdLibExtras.h> #include <AK/StdLibExtras.h>
#include <AK/String.h> #include <AK/String.h>
#include <ctype.h> #include <ctype.h>
namespace GUI { namespace Cpp {
CppLexer::CppLexer(const StringView& input) Lexer::Lexer(const StringView& input)
: m_input(input) : m_input(input)
{ {
} }
char CppLexer::peek(size_t offset) const char Lexer::peek(size_t offset) const
{ {
if ((m_index + offset) >= m_input.length()) if ((m_index + offset) >= m_input.length())
return 0; return 0;
return m_input[m_index + offset]; return m_input[m_index + offset];
} }
char CppLexer::consume() char Lexer::consume()
{ {
ASSERT(m_index < m_input.length()); ASSERT(m_index < m_input.length());
char ch = m_input[m_index++]; char ch = m_input[m_index++];
@ -224,15 +224,15 @@ static bool is_known_type(const StringView& string)
return types.contains(string); return types.contains(string);
} }
Vector<CppToken> CppLexer::lex() Vector<Token> Lexer::lex()
{ {
Vector<CppToken> tokens; Vector<Token> tokens;
size_t token_start_index = 0; size_t token_start_index = 0;
CppPosition token_start_position; Position token_start_position;
auto emit_token = [&](auto type) { auto emit_token = [&](auto type) {
CppToken token; Token token;
token.m_type = type; token.m_type = type;
token.m_start = m_position; token.m_start = m_position;
token.m_end = m_position; token.m_end = m_position;
@ -245,7 +245,7 @@ Vector<CppToken> CppLexer::lex()
token_start_position = m_position; token_start_position = m_position;
}; };
auto commit_token = [&](auto type) { auto commit_token = [&](auto type) {
CppToken token; Token token;
token.m_type = type; token.m_type = type;
token.m_start = token_start_position; token.m_start = token_start_position;
token.m_end = m_previous_position; token.m_end = m_previous_position;
@ -339,31 +339,31 @@ Vector<CppToken> CppLexer::lex()
begin_token(); begin_token();
while (isspace(peek())) while (isspace(peek()))
consume(); consume();
commit_token(CppToken::Type::Whitespace); commit_token(Token::Type::Whitespace);
continue; continue;
} }
if (ch == '(') { if (ch == '(') {
emit_token(CppToken::Type::LeftParen); emit_token(Token::Type::LeftParen);
continue; continue;
} }
if (ch == ')') { if (ch == ')') {
emit_token(CppToken::Type::RightParen); emit_token(Token::Type::RightParen);
continue; continue;
} }
if (ch == '{') { if (ch == '{') {
emit_token(CppToken::Type::LeftCurly); emit_token(Token::Type::LeftCurly);
continue; continue;
} }
if (ch == '}') { if (ch == '}') {
emit_token(CppToken::Type::RightCurly); emit_token(Token::Type::RightCurly);
continue; continue;
} }
if (ch == '[') { if (ch == '[') {
emit_token(CppToken::Type::LeftBracket); emit_token(Token::Type::LeftBracket);
continue; continue;
} }
if (ch == ']') { if (ch == ']') {
emit_token(CppToken::Type::RightBracket); emit_token(Token::Type::RightBracket);
continue; continue;
} }
if (ch == '<') { if (ch == '<') {
@ -373,23 +373,23 @@ Vector<CppToken> CppLexer::lex()
consume(); consume();
if (peek() == '=') { if (peek() == '=') {
consume(); consume();
commit_token(CppToken::Type::LessLessEquals); commit_token(Token::Type::LessLessEquals);
continue; continue;
} }
commit_token(CppToken::Type::LessLess); commit_token(Token::Type::LessLess);
continue; continue;
} }
if (peek() == '=') { if (peek() == '=') {
consume(); consume();
commit_token(CppToken::Type::LessEquals); commit_token(Token::Type::LessEquals);
continue; continue;
} }
if (peek() == '>') { if (peek() == '>') {
consume(); consume();
commit_token(CppToken::Type::LessGreater); commit_token(Token::Type::LessGreater);
continue; continue;
} }
commit_token(CppToken::Type::Less); commit_token(Token::Type::Less);
continue; continue;
} }
if (ch == '>') { if (ch == '>') {
@ -399,22 +399,22 @@ Vector<CppToken> CppLexer::lex()
consume(); consume();
if (peek() == '=') { if (peek() == '=') {
consume(); consume();
commit_token(CppToken::Type::GreaterGreaterEquals); commit_token(Token::Type::GreaterGreaterEquals);
continue; continue;
} }
commit_token(CppToken::Type::GreaterGreater); commit_token(Token::Type::GreaterGreater);
continue; continue;
} }
if (peek() == '=') { if (peek() == '=') {
consume(); consume();
commit_token(CppToken::Type::GreaterEquals); commit_token(Token::Type::GreaterEquals);
continue; continue;
} }
commit_token(CppToken::Type::Greater); commit_token(Token::Type::Greater);
continue; continue;
} }
if (ch == ',') { if (ch == ',') {
emit_token(CppToken::Type::Comma); emit_token(Token::Type::Comma);
continue; continue;
} }
if (ch == '+') { if (ch == '+') {
@ -422,15 +422,15 @@ Vector<CppToken> CppLexer::lex()
consume(); consume();
if (peek() == '+') { if (peek() == '+') {
consume(); consume();
commit_token(CppToken::Type::PlusPlus); commit_token(Token::Type::PlusPlus);
continue; continue;
} }
if (peek() == '=') { if (peek() == '=') {
consume(); consume();
commit_token(CppToken::Type::PlusEquals); commit_token(Token::Type::PlusEquals);
continue; continue;
} }
commit_token(CppToken::Type::Plus); commit_token(Token::Type::Plus);
continue; continue;
} }
if (ch == '-') { if (ch == '-') {
@ -438,45 +438,45 @@ Vector<CppToken> CppLexer::lex()
consume(); consume();
if (peek() == '-') { if (peek() == '-') {
consume(); consume();
commit_token(CppToken::Type::MinusMinus); commit_token(Token::Type::MinusMinus);
continue; continue;
} }
if (peek() == '=') { if (peek() == '=') {
consume(); consume();
commit_token(CppToken::Type::MinusEquals); commit_token(Token::Type::MinusEquals);
continue; continue;
} }
if (peek() == '>') { if (peek() == '>') {
consume(); consume();
if (peek() == '*') { if (peek() == '*') {
consume(); consume();
commit_token(CppToken::Type::ArrowAsterisk); commit_token(Token::Type::ArrowAsterisk);
continue; continue;
} }
commit_token(CppToken::Type::Arrow); commit_token(Token::Type::Arrow);
continue; continue;
} }
commit_token(CppToken::Type::Minus); commit_token(Token::Type::Minus);
continue; continue;
} }
if (ch == '*') { if (ch == '*') {
emit_token_equals(CppToken::Type::Asterisk, CppToken::Type::AsteriskEquals); emit_token_equals(Token::Type::Asterisk, Token::Type::AsteriskEquals);
continue; continue;
} }
if (ch == '%') { if (ch == '%') {
emit_token_equals(CppToken::Type::Percent, CppToken::Type::PercentEquals); emit_token_equals(Token::Type::Percent, Token::Type::PercentEquals);
continue; continue;
} }
if (ch == '^') { if (ch == '^') {
emit_token_equals(CppToken::Type::Caret, CppToken::Type::CaretEquals); emit_token_equals(Token::Type::Caret, Token::Type::CaretEquals);
continue; continue;
} }
if (ch == '!') { if (ch == '!') {
emit_token_equals(CppToken::Type::ExclamationMark, CppToken::Type::ExclamationMarkEquals); emit_token_equals(Token::Type::ExclamationMark, Token::Type::ExclamationMarkEquals);
continue; continue;
} }
if (ch == '=') { if (ch == '=') {
emit_token_equals(CppToken::Type::Equals, CppToken::Type::EqualsEquals); emit_token_equals(Token::Type::Equals, Token::Type::EqualsEquals);
continue; continue;
} }
if (ch == '&') { if (ch == '&') {
@ -484,15 +484,15 @@ Vector<CppToken> CppLexer::lex()
consume(); consume();
if (peek() == '&') { if (peek() == '&') {
consume(); consume();
commit_token(CppToken::Type::AndAnd); commit_token(Token::Type::AndAnd);
continue; continue;
} }
if (peek() == '=') { if (peek() == '=') {
consume(); consume();
commit_token(CppToken::Type::AndEquals); commit_token(Token::Type::AndEquals);
continue; continue;
} }
commit_token(CppToken::Type::And); commit_token(Token::Type::And);
continue; continue;
} }
if (ch == '|') { if (ch == '|') {
@ -500,23 +500,23 @@ Vector<CppToken> CppLexer::lex()
consume(); consume();
if (peek() == '|') { if (peek() == '|') {
consume(); consume();
commit_token(CppToken::Type::PipePipe); commit_token(Token::Type::PipePipe);
continue; continue;
} }
if (peek() == '=') { if (peek() == '=') {
consume(); consume();
commit_token(CppToken::Type::PipeEquals); commit_token(Token::Type::PipeEquals);
continue; continue;
} }
commit_token(CppToken::Type::Pipe); commit_token(Token::Type::Pipe);
continue; continue;
} }
if (ch == '~') { if (ch == '~') {
emit_token(CppToken::Type::Tilde); emit_token(Token::Type::Tilde);
continue; continue;
} }
if (ch == '?') { if (ch == '?') {
emit_token(CppToken::Type::QuestionMark); emit_token(Token::Type::QuestionMark);
continue; continue;
} }
if (ch == ':') { if (ch == ':') {
@ -526,17 +526,17 @@ Vector<CppToken> CppLexer::lex()
consume(); consume();
if (peek() == '*') { if (peek() == '*') {
consume(); consume();
commit_token(CppToken::Type::ColonColonAsterisk); commit_token(Token::Type::ColonColonAsterisk);
continue; continue;
} }
commit_token(CppToken::Type::ColonColon); commit_token(Token::Type::ColonColon);
continue; continue;
} }
commit_token(CppToken::Type::Colon); commit_token(Token::Type::Colon);
continue; continue;
} }
if (ch == ';') { if (ch == ';') {
emit_token(CppToken::Type::Semicolon); emit_token(Token::Type::Semicolon);
continue; continue;
} }
if (ch == '.') { if (ch == '.') {
@ -544,10 +544,10 @@ Vector<CppToken> CppLexer::lex()
consume(); consume();
if (peek() == '*') { if (peek() == '*') {
consume(); consume();
commit_token(CppToken::Type::DotAsterisk); commit_token(Token::Type::DotAsterisk);
continue; continue;
} }
commit_token(CppToken::Type::Dot); commit_token(Token::Type::Dot);
continue; continue;
} }
if (ch == '#') { if (ch == '#') {
@ -560,12 +560,12 @@ Vector<CppToken> CppLexer::lex()
auto directive = StringView(m_input.characters_without_null_termination() + token_start_index, m_index - token_start_index); auto directive = StringView(m_input.characters_without_null_termination() + token_start_index, m_index - token_start_index);
if (directive == "#include") { if (directive == "#include") {
commit_token(CppToken::Type::IncludeStatement); commit_token(Token::Type::IncludeStatement);
begin_token(); begin_token();
while (isspace(peek())) while (isspace(peek()))
consume(); consume();
commit_token(CppToken::Type::Whitespace); commit_token(Token::Type::Whitespace);
begin_token(); begin_token();
if (peek() == '<' || peek() == '"') { if (peek() == '<' || peek() == '"') {
@ -574,11 +574,11 @@ Vector<CppToken> CppLexer::lex()
consume(); consume();
if (peek() && consume() == '\n') { if (peek() && consume() == '\n') {
commit_token(CppToken::Type::IncludePath); commit_token(Token::Type::IncludePath);
continue; continue;
} }
commit_token(CppToken::Type::IncludePath); commit_token(Token::Type::IncludePath);
begin_token(); begin_token();
} }
} }
@ -586,14 +586,14 @@ Vector<CppToken> CppLexer::lex()
while (peek() && peek() != '\n') while (peek() && peek() != '\n')
consume(); consume();
commit_token(CppToken::Type::PreprocessorStatement); commit_token(Token::Type::PreprocessorStatement);
continue; continue;
} }
if (ch == '/' && peek(1) == '/') { if (ch == '/' && peek(1) == '/') {
begin_token(); begin_token();
while (peek() && peek() != '\n') while (peek() && peek() != '\n')
consume(); consume();
commit_token(CppToken::Type::Comment); commit_token(Token::Type::Comment);
continue; continue;
} }
if (ch == '/' && peek(1) == '*') { if (ch == '/' && peek(1) == '*') {
@ -615,11 +615,11 @@ Vector<CppToken> CppLexer::lex()
consume(); consume();
} }
commit_token(CppToken::Type::Comment); commit_token(Token::Type::Comment);
continue; continue;
} }
if (ch == '/') { if (ch == '/') {
emit_token_equals(CppToken::Type::Slash, CppToken::Type::SlashEquals); emit_token_equals(Token::Type::Slash, Token::Type::SlashEquals);
continue; continue;
} }
if (size_t prefix = match_string_prefix('"'); prefix > 0) { if (size_t prefix = match_string_prefix('"'); prefix > 0) {
@ -629,11 +629,11 @@ Vector<CppToken> CppLexer::lex()
while (peek()) { while (peek()) {
if (peek() == '\\') { if (peek() == '\\') {
if (size_t escape = match_escape_sequence(); escape > 0) { if (size_t escape = match_escape_sequence(); escape > 0) {
commit_token(CppToken::Type::DoubleQuotedString); commit_token(Token::Type::DoubleQuotedString);
begin_token(); begin_token();
for (size_t i = 0; i < escape; ++i) for (size_t i = 0; i < escape; ++i)
consume(); consume();
commit_token(CppToken::Type::EscapeSequence); commit_token(Token::Type::EscapeSequence);
begin_token(); begin_token();
continue; continue;
} }
@ -642,7 +642,7 @@ Vector<CppToken> CppLexer::lex()
if (consume() == '"') if (consume() == '"')
break; break;
} }
commit_token(CppToken::Type::DoubleQuotedString); commit_token(Token::Type::DoubleQuotedString);
continue; continue;
} }
if (size_t prefix = match_string_prefix('R'); prefix > 0 && peek(prefix) == '"') { if (size_t prefix = match_string_prefix('R'); prefix > 0 && peek(prefix) == '"') {
@ -664,7 +664,7 @@ Vector<CppToken> CppLexer::lex()
} }
} }
} }
commit_token(CppToken::Type::RawString); commit_token(Token::Type::RawString);
continue; continue;
} }
if (size_t prefix = match_string_prefix('\''); prefix > 0) { if (size_t prefix = match_string_prefix('\''); prefix > 0) {
@ -674,11 +674,11 @@ Vector<CppToken> CppLexer::lex()
while (peek()) { while (peek()) {
if (peek() == '\\') { if (peek() == '\\') {
if (size_t escape = match_escape_sequence(); escape > 0) { if (size_t escape = match_escape_sequence(); escape > 0) {
commit_token(CppToken::Type::SingleQuotedString); commit_token(Token::Type::SingleQuotedString);
begin_token(); begin_token();
for (size_t i = 0; i < escape; ++i) for (size_t i = 0; i < escape; ++i)
consume(); consume();
commit_token(CppToken::Type::EscapeSequence); commit_token(Token::Type::EscapeSequence);
begin_token(); begin_token();
continue; continue;
} }
@ -687,14 +687,14 @@ Vector<CppToken> CppLexer::lex()
if (consume() == '\'') if (consume() == '\'')
break; break;
} }
commit_token(CppToken::Type::SingleQuotedString); commit_token(Token::Type::SingleQuotedString);
continue; continue;
} }
if (isdigit(ch) || (ch == '.' && isdigit(peek(1)))) { if (isdigit(ch) || (ch == '.' && isdigit(peek(1)))) {
begin_token(); begin_token();
consume(); consume();
auto type = ch == '.' ? CppToken::Type::Float : CppToken::Type::Integer; auto type = ch == '.' ? Token::Type::Float : Token::Type::Integer;
bool is_hex = false; bool is_hex = false;
bool is_binary = false; bool is_binary = false;
@ -703,7 +703,7 @@ Vector<CppToken> CppLexer::lex()
if (ch != 'e' && ch != 'E' && ch != 'p' && ch != 'P') if (ch != 'e' && ch != 'E' && ch != 'p' && ch != 'P')
return 0; return 0;
type = CppToken::Type::Float; type = Token::Type::Float;
size_t length = 1; size_t length = 1;
ch = peek(length); ch = peek(length);
if (ch == '+' || ch == '-') { if (ch == '+' || ch == '-') {
@ -719,10 +719,10 @@ Vector<CppToken> CppLexer::lex()
size_t length = 0; size_t length = 0;
for (;;) { for (;;) {
char ch = peek(length); char ch = peek(length);
if ((ch == 'u' || ch == 'U') && type == CppToken::Type::Integer) { if ((ch == 'u' || ch == 'U') && type == Token::Type::Integer) {
++length; ++length;
} else if ((ch == 'f' || ch == 'F') && !is_binary) { } else if ((ch == 'f' || ch == 'F') && !is_binary) {
type = CppToken::Type::Float; type = Token::Type::Float;
++length; ++length;
} else if (ch == 'l' || ch == 'L') { } else if (ch == 'l' || ch == 'L') {
++length; ++length;
@ -745,8 +745,8 @@ Vector<CppToken> CppLexer::lex()
for (char ch = peek(); (is_hex ? isxdigit(ch) : isdigit(ch)) || (ch == '\'' && peek(1) != '\'') || ch == '.'; ch = peek()) { for (char ch = peek(); (is_hex ? isxdigit(ch) : isdigit(ch)) || (ch == '\'' && peek(1) != '\'') || ch == '.'; ch = peek()) {
if (ch == '.') { if (ch == '.') {
if (type == CppToken::Type::Integer) { if (type == Token::Type::Integer) {
type = CppToken::Type::Float; type = Token::Type::Float;
} else } else
break; break;
}; };
@ -773,15 +773,15 @@ Vector<CppToken> CppLexer::lex()
consume(); consume();
auto token_view = StringView(m_input.characters_without_null_termination() + token_start_index, m_index - token_start_index); auto token_view = StringView(m_input.characters_without_null_termination() + token_start_index, m_index - token_start_index);
if (is_keyword(token_view)) if (is_keyword(token_view))
commit_token(CppToken::Type::Keyword); commit_token(Token::Type::Keyword);
else if (is_known_type(token_view)) else if (is_known_type(token_view))
commit_token(CppToken::Type::KnownType); commit_token(Token::Type::KnownType);
else else
commit_token(CppToken::Type::Identifier); commit_token(Token::Type::Identifier);
continue; continue;
} }
dbg() << "Unimplemented token character: " << ch; dbg() << "Unimplemented token character: " << ch;
emit_token(CppToken::Type::Unknown); emit_token(Token::Type::Unknown);
} }
return tokens; return tokens;
} }

View file

@ -29,7 +29,7 @@
#include <AK/StringView.h> #include <AK/StringView.h>
#include <AK/Vector.h> #include <AK/Vector.h>
namespace GUI { namespace Cpp {
#define FOR_EACH_TOKEN_TYPE \ #define FOR_EACH_TOKEN_TYPE \
__TOKEN(Unknown) \ __TOKEN(Unknown) \
@ -98,12 +98,12 @@ namespace GUI {
__TOKEN(KnownType) \ __TOKEN(KnownType) \
__TOKEN(Identifier) __TOKEN(Identifier)
struct CppPosition { struct Position {
size_t line; size_t line;
size_t column; size_t column;
}; };
struct CppToken { struct Token {
enum class Type { enum class Type {
#define __TOKEN(x) x, #define __TOKEN(x) x,
FOR_EACH_TOKEN_TYPE FOR_EACH_TOKEN_TYPE
@ -123,15 +123,15 @@ struct CppToken {
} }
Type m_type { Type::Unknown }; Type m_type { Type::Unknown };
CppPosition m_start; Position m_start;
CppPosition m_end; Position m_end;
}; };
class CppLexer { class Lexer {
public: public:
CppLexer(const StringView&); Lexer(const StringView&);
Vector<CppToken> lex(); Vector<Token> lex();
private: private:
char peek(size_t offset = 0) const; char peek(size_t offset = 0) const;
@ -139,8 +139,8 @@ private:
StringView m_input; StringView m_input;
size_t m_index { 0 }; size_t m_index { 0 };
CppPosition m_previous_position { 0, 0 }; Position m_previous_position { 0, 0 };
CppPosition m_position { 0, 0 }; Position m_position { 0, 0 };
}; };
} }

View file

@ -17,7 +17,6 @@ set(SOURCES
ComboBox.cpp ComboBox.cpp
Command.cpp Command.cpp
ControlBoxButton.cpp ControlBoxButton.cpp
CppLexer.cpp
CppSyntaxHighlighter.cpp CppSyntaxHighlighter.cpp
Desktop.cpp Desktop.cpp
Dialog.cpp Dialog.cpp
@ -96,4 +95,4 @@ set(GENERATED_SOURCES
) )
serenity_lib(LibGUI gui) serenity_lib(LibGUI gui)
target_link_libraries(LibGUI LibCore LibGfx LibIPC LibThread) target_link_libraries(LibGUI LibCore LibGfx LibIPC LibThread LibCpp)

View file

@ -24,7 +24,7 @@
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/ */
#include <LibGUI/CppLexer.h> #include <LibCpp/Lexer.h>
#include <LibGUI/CppSyntaxHighlighter.h> #include <LibGUI/CppSyntaxHighlighter.h>
#include <LibGUI/TextEditor.h> #include <LibGUI/TextEditor.h>
#include <LibGfx/Font.h> #include <LibGfx/Font.h>
@ -32,30 +32,30 @@
namespace GUI { namespace GUI {
static TextStyle style_for_token_type(Gfx::Palette palette, CppToken::Type type) static TextStyle style_for_token_type(Gfx::Palette palette, Cpp::Token::Type type)
{ {
switch (type) { switch (type) {
case CppToken::Type::Keyword: case Cpp::Token::Type::Keyword:
return { palette.syntax_keyword(), &Gfx::Font::default_bold_fixed_width_font() }; return { palette.syntax_keyword(), &Gfx::Font::default_bold_fixed_width_font() };
case CppToken::Type::KnownType: case Cpp::Token::Type::KnownType:
return { palette.syntax_type(), &Gfx::Font::default_bold_fixed_width_font() }; return { palette.syntax_type(), &Gfx::Font::default_bold_fixed_width_font() };
case CppToken::Type::Identifier: case Cpp::Token::Type::Identifier:
return { palette.syntax_identifier() }; return { palette.syntax_identifier() };
case CppToken::Type::DoubleQuotedString: case Cpp::Token::Type::DoubleQuotedString:
case CppToken::Type::SingleQuotedString: case Cpp::Token::Type::SingleQuotedString:
case CppToken::Type::RawString: case Cpp::Token::Type::RawString:
return { palette.syntax_string() }; return { palette.syntax_string() };
case CppToken::Type::Integer: case Cpp::Token::Type::Integer:
case CppToken::Type::Float: case Cpp::Token::Type::Float:
return { palette.syntax_number() }; return { palette.syntax_number() };
case CppToken::Type::IncludePath: case Cpp::Token::Type::IncludePath:
return { palette.syntax_preprocessor_value() }; return { palette.syntax_preprocessor_value() };
case CppToken::Type::EscapeSequence: case Cpp::Token::Type::EscapeSequence:
return { palette.syntax_keyword(), &Gfx::Font::default_bold_fixed_width_font() }; return { palette.syntax_keyword(), &Gfx::Font::default_bold_fixed_width_font() };
case CppToken::Type::PreprocessorStatement: case Cpp::Token::Type::PreprocessorStatement:
case CppToken::Type::IncludeStatement: case Cpp::Token::Type::IncludeStatement:
return { palette.syntax_preprocessor_statement() }; return { palette.syntax_preprocessor_statement() };
case CppToken::Type::Comment: case Cpp::Token::Type::Comment:
return { palette.syntax_comment() }; return { palette.syntax_comment() };
default: default:
return { palette.base_text() }; return { palette.base_text() };
@ -64,21 +64,21 @@ static TextStyle style_for_token_type(Gfx::Palette palette, CppToken::Type type)
bool CppSyntaxHighlighter::is_identifier(void* token) const bool CppSyntaxHighlighter::is_identifier(void* token) const
{ {
auto cpp_token = static_cast<GUI::CppToken::Type>(reinterpret_cast<size_t>(token)); auto cpp_token = static_cast<Cpp::Token::Type>(reinterpret_cast<size_t>(token));
return cpp_token == GUI::CppToken::Type::Identifier; return cpp_token == Cpp::Token::Type::Identifier;
} }
bool CppSyntaxHighlighter::is_navigatable(void* token) const bool CppSyntaxHighlighter::is_navigatable(void* token) const
{ {
auto cpp_token = static_cast<GUI::CppToken::Type>(reinterpret_cast<size_t>(token)); auto cpp_token = static_cast<Cpp::Token::Type>(reinterpret_cast<size_t>(token));
return cpp_token == GUI::CppToken::Type::IncludePath; return cpp_token == Cpp::Token::Type::IncludePath;
} }
void CppSyntaxHighlighter::rehighlight(Gfx::Palette palette) void CppSyntaxHighlighter::rehighlight(Gfx::Palette palette)
{ {
ASSERT(m_editor); ASSERT(m_editor);
auto text = m_editor->text(); auto text = m_editor->text();
CppLexer lexer(text); Cpp::Lexer lexer(text);
auto tokens = lexer.lex(); auto tokens = lexer.lex();
Vector<GUI::TextDocumentSpan> spans; Vector<GUI::TextDocumentSpan> spans;
@ -92,7 +92,7 @@ void CppSyntaxHighlighter::rehighlight(Gfx::Palette palette)
auto style = style_for_token_type(palette, token.m_type); auto style = style_for_token_type(palette, token.m_type);
span.color = style.color; span.color = style.color;
span.font = style.font; span.font = style.font;
span.is_skippable = token.m_type == CppToken::Type::Whitespace; span.is_skippable = token.m_type == Cpp::Token::Type::Whitespace;
span.data = reinterpret_cast<void*>(token.m_type); span.data = reinterpret_cast<void*>(token.m_type);
spans.append(span); spans.append(span);
} }
@ -108,16 +108,16 @@ Vector<SyntaxHighlighter::MatchingTokenPair> CppSyntaxHighlighter::matching_toke
{ {
static Vector<SyntaxHighlighter::MatchingTokenPair> pairs; static Vector<SyntaxHighlighter::MatchingTokenPair> pairs;
if (pairs.is_empty()) { if (pairs.is_empty()) {
pairs.append({ reinterpret_cast<void*>(CppToken::Type::LeftCurly), reinterpret_cast<void*>(CppToken::Type::RightCurly) }); pairs.append({ reinterpret_cast<void*>(Cpp::Token::Type::LeftCurly), reinterpret_cast<void*>(Cpp::Token::Type::RightCurly) });
pairs.append({ reinterpret_cast<void*>(CppToken::Type::LeftParen), reinterpret_cast<void*>(CppToken::Type::RightParen) }); pairs.append({ reinterpret_cast<void*>(Cpp::Token::Type::LeftParen), reinterpret_cast<void*>(Cpp::Token::Type::RightParen) });
pairs.append({ reinterpret_cast<void*>(CppToken::Type::LeftBracket), reinterpret_cast<void*>(CppToken::Type::RightBracket) }); pairs.append({ reinterpret_cast<void*>(Cpp::Token::Type::LeftBracket), reinterpret_cast<void*>(Cpp::Token::Type::RightBracket) });
} }
return pairs; return pairs;
} }
bool CppSyntaxHighlighter::token_types_equal(void* token1, void* token2) const bool CppSyntaxHighlighter::token_types_equal(void* token1, void* token2) const
{ {
return static_cast<GUI::CppToken::Type>(reinterpret_cast<size_t>(token1)) == static_cast<GUI::CppToken::Type>(reinterpret_cast<size_t>(token2)); return static_cast<Cpp::Token::Type>(reinterpret_cast<size_t>(token1)) == static_cast<Cpp::Token::Type>(reinterpret_cast<size_t>(token2));
} }
CppSyntaxHighlighter::~CppSyntaxHighlighter() CppSyntaxHighlighter::~CppSyntaxHighlighter()