mirror of
				https://github.com/RGBCube/serenity
				synced 2025-10-31 17:52:45 +00:00 
			
		
		
		
	LibCpp: Access Cpp::Token members via getter functions
This commit is contained in:
		
							parent
							
								
									d0b4f9cc0e
								
							
						
					
					
						commit
						5cd1c69b96
					
				
					 5 changed files with 56 additions and 53 deletions
				
			
		|  | @ -65,20 +65,20 @@ Vector<GUI::AutocompleteProvider::Entry> LexerAutoComplete::get_suggestions(cons | |||
| 
 | ||||
| StringView LexerAutoComplete::text_of_token(const Vector<String>& lines, const Cpp::Token& token) | ||||
| { | ||||
|     VERIFY(token.m_start.line == token.m_end.line); | ||||
|     VERIFY(token.m_start.column <= token.m_end.column); | ||||
|     return lines[token.m_start.line].substring_view(token.m_start.column, token.m_end.column - token.m_start.column + 1); | ||||
|     VERIFY(token.start().line == token.end().line); | ||||
|     VERIFY(token.start().column <= token.end().column); | ||||
|     return lines[token.start().line].substring_view(token.start().column, token.end().column - token.start().column + 1); | ||||
| } | ||||
| 
 | ||||
| Optional<size_t> LexerAutoComplete::token_in_position(const Vector<Cpp::Token>& tokens, const GUI::TextPosition& position) | ||||
| { | ||||
|     for (size_t token_index = 0; token_index < tokens.size(); ++token_index) { | ||||
|         auto& token = tokens[token_index]; | ||||
|         if (token.m_start.line != token.m_end.line) | ||||
|         if (token.start().line != token.end().line) | ||||
|             continue; | ||||
|         if (token.m_start.line != position.line()) | ||||
|         if (token.start().line != position.line()) | ||||
|             continue; | ||||
|         if (token.m_start.column + 1 > position.column() || token.m_end.column + 1 < position.column()) | ||||
|         if (token.start().column + 1 > position.column() || token.end().column + 1 < position.column()) | ||||
|             continue; | ||||
|         return token_index; | ||||
|     } | ||||
|  | @ -94,7 +94,7 @@ Vector<GUI::AutocompleteProvider::Entry> LexerAutoComplete::identifier_prefixes( | |||
| 
 | ||||
|     for (size_t i = 0; i < target_token_index; ++i) { | ||||
|         auto& token = tokens[i]; | ||||
|         if (token.m_type != Cpp::Token::Type::Identifier) | ||||
|         if (token.type() != Cpp::Token::Type::Identifier) | ||||
|             continue; | ||||
|         auto text = text_of_token(lines, token); | ||||
|         if (text.starts_with(partial_input) && suggestions_lookup.set(text) == AK::HashSetResult::InsertedNewEntry) { | ||||
|  |  | |||
|  | @ -232,11 +232,7 @@ Vector<Token> Lexer::lex() | |||
|     Position token_start_position; | ||||
| 
 | ||||
|     auto emit_token = [&](auto type) { | ||||
|         Token token; | ||||
|         token.m_type = type; | ||||
|         token.m_start = m_position; | ||||
|         token.m_end = m_position; | ||||
|         tokens.append(token); | ||||
|         tokens.empend(type, m_position, m_position); | ||||
|         consume(); | ||||
|     }; | ||||
| 
 | ||||
|  | @ -245,11 +241,7 @@ Vector<Token> Lexer::lex() | |||
|         token_start_position = m_position; | ||||
|     }; | ||||
|     auto commit_token = [&](auto type) { | ||||
|         Token token; | ||||
|         token.m_type = type; | ||||
|         token.m_start = token_start_position; | ||||
|         token.m_end = m_previous_position; | ||||
|         tokens.append(token); | ||||
|         tokens.empend(type, token_start_position, m_previous_position); | ||||
|     }; | ||||
| 
 | ||||
|     auto emit_token_equals = [&](auto type, auto equals_type) { | ||||
|  |  | |||
|  | @ -50,7 +50,7 @@ Parser::Parser(const StringView& program, const String& filename, Preprocessor:: | |||
|     dbgln("Tokens:"); | ||||
|     for (auto& token : m_tokens) { | ||||
|         StringView text; | ||||
|         if (token.m_start.line != token.m_end.line || token.m_start.column > token.m_end.column) | ||||
|         if (token.start().line != token.end().line || token.start().column > token.end().column) | ||||
|             text = {}; | ||||
|         else | ||||
|             text = text_of_token(token); | ||||
|  | @ -62,9 +62,9 @@ void Parser::initialize_program_tokens() | |||
| { | ||||
|     Lexer lexer(m_program); | ||||
|     for (auto& token : lexer.lex()) { | ||||
|         if (token.m_type == Token::Type::Whitespace) | ||||
|         if (token.type() == Token::Type::Whitespace) | ||||
|             continue; | ||||
|         if (token.m_type == Token::Type::Identifier) { | ||||
|         if (token.type() == Token::Type::Identifier) { | ||||
|             if (auto defined_value = m_definitions.find(text_of_token(token)); defined_value != m_definitions.end()) { | ||||
|                 add_tokens_for_preprocessor(token, defined_value->value); | ||||
|                 continue; | ||||
|  | @ -77,7 +77,7 @@ void Parser::initialize_program_tokens() | |||
| NonnullRefPtr<TranslationUnit> Parser::parse() | ||||
| { | ||||
|     SCOPE_LOGGER(); | ||||
|     auto unit = create_root_ast_node(m_tokens.first().m_start, m_tokens.last().m_end); | ||||
|     auto unit = create_root_ast_node(m_tokens.first().start(), m_tokens.last().end()); | ||||
|     while (!done()) { | ||||
|         if (match_comment()) { | ||||
|             consume(Token::Type::Comment); | ||||
|  | @ -154,7 +154,7 @@ NonnullRefPtr<FunctionDeclaration> Parser::parse_function_declaration(ASTNode& p | |||
|     } | ||||
| 
 | ||||
|     func->m_name = text_of_token(function_name); | ||||
|     func->m_return_type = create_ast_node<Type>(*func, return_type_token.m_start, return_type_token.m_end, text_of_token(return_type_token)); | ||||
|     func->m_return_type = create_ast_node<Type>(*func, return_type_token.start(), return_type_token.end(), text_of_token(return_type_token)); | ||||
|     if (parameters.has_value()) | ||||
|         func->m_parameters = move(parameters.value()); | ||||
|     func->m_definition = move(body); | ||||
|  | @ -167,7 +167,7 @@ NonnullRefPtr<FunctionDefinition> Parser::parse_function_definition(ASTNode& par | |||
|     SCOPE_LOGGER(); | ||||
|     auto func = create_ast_node<FunctionDefinition>(parent, position(), {}); | ||||
|     consume(Token::Type::LeftCurly); | ||||
|     while (!eof() && peek().m_type != Token::Type::RightCurly) { | ||||
|     while (!eof() && peek().type() != Token::Type::RightCurly) { | ||||
|         func->statements().append(parse_statement(func)); | ||||
|     } | ||||
|     func->set_end(position()); | ||||
|  | @ -372,12 +372,12 @@ NonnullRefPtr<Expression> Parser::parse_primary_expression(ASTNode& parent) | |||
|         if (match_function_call()) | ||||
|             return parse_function_call(parent); | ||||
|         auto token = consume(); | ||||
|         return create_ast_node<Identifier>(parent, token.m_start, token.m_end, text_of_token(token)); | ||||
|         return create_ast_node<Identifier>(parent, token.start(), token.end(), text_of_token(token)); | ||||
|     } | ||||
|     default: { | ||||
|         error("could not parse primary expression"); | ||||
|         auto token = consume(); | ||||
|         return create_ast_node<InvalidExpression>(parent, token.m_start, token.m_end); | ||||
|         return create_ast_node<InvalidExpression>(parent, token.start(), token.end()); | ||||
|     } | ||||
|     } | ||||
| } | ||||
|  | @ -444,7 +444,7 @@ NonnullRefPtr<Expression> Parser::parse_literal(ASTNode& parent) | |||
|     switch (peek().type()) { | ||||
|     case Token::Type::Integer: { | ||||
|         auto token = consume(); | ||||
|         return create_ast_node<NumericLiteral>(parent, token.m_start, token.m_end, text_of_token(token)); | ||||
|         return create_ast_node<NumericLiteral>(parent, token.start(), token.end(), text_of_token(token)); | ||||
|     } | ||||
|     case Token::Type::DoubleQuotedString: { | ||||
|         return parse_string_literal(parent); | ||||
|  | @ -457,7 +457,7 @@ NonnullRefPtr<Expression> Parser::parse_literal(ASTNode& parent) | |||
|     default: { | ||||
|         error("could not parse literal"); | ||||
|         auto token = consume(); | ||||
|         return create_ast_node<InvalidExpression>(parent, token.m_start, token.m_end); | ||||
|         return create_ast_node<InvalidExpression>(parent, token.start(), token.end()); | ||||
|     } | ||||
|     } | ||||
| } | ||||
|  | @ -465,7 +465,7 @@ NonnullRefPtr<Expression> Parser::parse_literal(ASTNode& parent) | |||
| NonnullRefPtr<Expression> Parser::parse_secondary_expression(ASTNode& parent, NonnullRefPtr<Expression> lhs) | ||||
| { | ||||
|     SCOPE_LOGGER(); | ||||
|     switch (peek().m_type) { | ||||
|     switch (peek().type()) { | ||||
|     case Token::Type::Plus: | ||||
|         return parse_binary_expression(parent, lhs, BinaryOp::Addition); | ||||
|     case Token::Type::Less: | ||||
|  | @ -554,7 +554,7 @@ bool Parser::match_function_declaration() | |||
|         return false; | ||||
|     consume(); | ||||
| 
 | ||||
|     while (consume().m_type != Token::Type::RightParen && !eof()) { }; | ||||
|     while (consume().type() != Token::Type::RightParen && !eof()) { }; | ||||
| 
 | ||||
|     if (peek(Token::Type::Semicolon).has_value() || peek(Token::Type::LeftCurly).has_value()) | ||||
|         return true; | ||||
|  | @ -571,7 +571,7 @@ Optional<NonnullRefPtrVector<Parameter>> Parser::parse_parameter_list(ASTNode& p | |||
| { | ||||
|     SCOPE_LOGGER(); | ||||
|     NonnullRefPtrVector<Parameter> parameters; | ||||
|     while (peek().m_type != Token::Type::RightParen && !eof()) { | ||||
|     while (peek().type() != Token::Type::RightParen && !eof()) { | ||||
|         if (match_ellipsis()) { | ||||
|             auto last_dot = consume(); | ||||
|             while (peek().type() == Token::Type::Dot) | ||||
|  | @ -590,7 +590,7 @@ Optional<NonnullRefPtrVector<Parameter>> Parser::parse_parameter_list(ASTNode& p | |||
|             if (name_identifier.has_value()) | ||||
|                 name = text_of_token(name_identifier.value()); | ||||
| 
 | ||||
|             auto param = create_ast_node<Parameter>(parent, type->start(), name_identifier.has_value() ? name_identifier.value().m_end : type->end(), name); | ||||
|             auto param = create_ast_node<Parameter>(parent, type->start(), name_identifier.has_value() ? name_identifier.value().end() : type->end(), name); | ||||
| 
 | ||||
|             param->m_type = move(type); | ||||
|             parameters.append(move(param)); | ||||
|  | @ -650,7 +650,7 @@ Token Parser::consume(Token::Type type) | |||
| 
 | ||||
| bool Parser::match(Token::Type type) | ||||
| { | ||||
|     return peek().m_type == type; | ||||
|     return peek().type() == type; | ||||
| } | ||||
| 
 | ||||
| Token Parser::consume() | ||||
|  | @ -672,7 +672,7 @@ Token Parser::peek(size_t offset) const | |||
| Optional<Token> Parser::peek(Token::Type type) const | ||||
| { | ||||
|     auto token = peek(); | ||||
|     if (token.m_type == type) | ||||
|     if (token.type() == type) | ||||
|         return token; | ||||
|     return {}; | ||||
| } | ||||
|  | @ -699,9 +699,9 @@ bool Parser::done() | |||
| 
 | ||||
| StringView Parser::text_of_token(const Cpp::Token& token) const | ||||
| { | ||||
|     VERIFY(token.m_start.line == token.m_end.line); | ||||
|     VERIFY(token.m_start.column <= token.m_end.column); | ||||
|     return m_lines[token.m_start.line].substring_view(token.m_start.column, token.m_end.column - token.m_start.column + 1); | ||||
|     VERIFY(token.start().line == token.end().line); | ||||
|     VERIFY(token.start().column <= token.end().column); | ||||
|     return m_lines[token.start().line].substring_view(token.start().column, token.end().column - token.start().column + 1); | ||||
| } | ||||
| 
 | ||||
| StringView Parser::text_of_node(const ASTNode& node) const | ||||
|  | @ -742,8 +742,8 @@ void Parser::error(StringView message) | |||
|         formatted_message = String::formatted("C++ Parser error: {}. token: {} ({}:{})", | ||||
|             message, | ||||
|             m_state.token_index < m_tokens.size() ? text_of_token(m_tokens[m_state.token_index]) : "EOF", | ||||
|             m_tokens[m_state.token_index].m_start.line, | ||||
|             m_tokens[m_state.token_index].m_start.column); | ||||
|             m_tokens[m_state.token_index].start().line, | ||||
|             m_tokens[m_state.token_index].start().column); | ||||
|     } | ||||
|     m_errors.append(formatted_message); | ||||
|     dbgln_if(CPP_DEBUG, "{}", formatted_message); | ||||
|  | @ -751,7 +751,7 @@ void Parser::error(StringView message) | |||
| 
 | ||||
| bool Parser::match_expression() | ||||
| { | ||||
|     auto token_type = peek().m_type; | ||||
|     auto token_type = peek().type(); | ||||
|     return token_type == Token::Type::Integer | ||||
|         || token_type == Token::Type::Float | ||||
|         || token_type == Token::Type::Identifier | ||||
|  | @ -767,14 +767,14 @@ bool Parser::eof() const | |||
| Position Parser::position() const | ||||
| { | ||||
|     if (eof()) | ||||
|         return m_tokens.last().m_end; | ||||
|     return peek().m_start; | ||||
|         return m_tokens.last().end(); | ||||
|     return peek().start(); | ||||
| } | ||||
| 
 | ||||
| RefPtr<ASTNode> Parser::eof_node() const | ||||
| { | ||||
|     VERIFY(m_tokens.size()); | ||||
|     return node_at(m_tokens.last().m_end); | ||||
|     return node_at(m_tokens.last().end()); | ||||
| } | ||||
| 
 | ||||
| RefPtr<ASTNode> Parser::node_at(Position pos) const | ||||
|  | @ -984,7 +984,7 @@ NonnullRefPtr<MemberDeclaration> Parser::parse_member_declaration(ASTNode& paren | |||
|         consume(Token::Type::RightCurly); | ||||
|     } | ||||
| 
 | ||||
|     member_decl->m_type = create_ast_node<Type>(*member_decl, type_token.m_start, type_token.m_end, text_of_token(type_token)); | ||||
|     member_decl->m_type = create_ast_node<Type>(*member_decl, type_token.start(), type_token.end(), text_of_token(type_token)); | ||||
|     member_decl->m_name = text_of_token(identifier_token); | ||||
|     member_decl->m_initial_value = move(initial_value); | ||||
|     consume(Token::Type::Semicolon); | ||||
|  | @ -1121,8 +1121,8 @@ void Parser::add_tokens_for_preprocessor(Token& replaced_token, Preprocessor::De | |||
|     for (auto token : lexer.lex()) { | ||||
|         if (token.type() == Token::Type::Whitespace) | ||||
|             continue; | ||||
|         token.m_start = replaced_token.start(); | ||||
|         token.m_end = replaced_token.end(); | ||||
|         token.set_start(replaced_token.start()); | ||||
|         token.set_end(replaced_token.end()); | ||||
|         m_tokens.append(move(token)); | ||||
|     } | ||||
| } | ||||
|  |  | |||
|  | @ -83,15 +83,15 @@ void SyntaxHighlighter::rehighlight(const Palette& palette) | |||
| 
 | ||||
|     Vector<GUI::TextDocumentSpan> spans; | ||||
|     for (auto& token : tokens) { | ||||
|         dbgln_if(SYNTAX_HIGHLIGHTING_DEBUG, "{} @ {}:{} - {}:{}", token.to_string(), token.m_start.line, token.m_start.column, token.m_end.line, token.m_end.column); | ||||
|         dbgln_if(SYNTAX_HIGHLIGHTING_DEBUG, "{} @ {}:{} - {}:{}", token.to_string(), token.start().line, token.start().column, token.end().line, token.end().column); | ||||
|         GUI::TextDocumentSpan span; | ||||
|         span.range.set_start({ token.m_start.line, token.m_start.column }); | ||||
|         span.range.set_end({ token.m_end.line, token.m_end.column }); | ||||
|         auto style = style_for_token_type(palette, token.m_type); | ||||
|         span.range.set_start({ token.start().line, token.start().column }); | ||||
|         span.range.set_end({ token.end().line, token.end().column }); | ||||
|         auto style = style_for_token_type(palette, token.type()); | ||||
|         span.attributes.color = style.color; | ||||
|         span.attributes.bold = style.bold; | ||||
|         span.is_skippable = token.m_type == Cpp::Token::Type::Whitespace; | ||||
|         span.data = reinterpret_cast<void*>(token.m_type); | ||||
|         span.is_skippable = token.type() == Cpp::Token::Type::Whitespace; | ||||
|         span.data = reinterpret_cast<void*>(token.type()); | ||||
|         spans.append(span); | ||||
|     } | ||||
|     m_client->do_set_spans(move(spans)); | ||||
|  |  | |||
|  | @ -114,6 +114,13 @@ struct Token { | |||
| #undef __TOKEN | ||||
|     }; | ||||
| 
 | ||||
|     Token(Type type, const Position& start, const Position& end) | ||||
|         : m_type(type) | ||||
|         , m_start(start) | ||||
|         , m_end(end) | ||||
|     { | ||||
|     } | ||||
| 
 | ||||
|     static const char* type_to_string(Type t) | ||||
|     { | ||||
|         switch (t) { | ||||
|  | @ -130,10 +137,14 @@ struct Token { | |||
|     { | ||||
|         return type_to_string(m_type); | ||||
|     } | ||||
|     Position start() const { return m_start; } | ||||
|     Position end() const { return m_end; } | ||||
|     const Position& start() const { return m_start; } | ||||
|     const Position& end() const { return m_end; } | ||||
| 
 | ||||
|     void set_start(const Position& other) {m_start = other;} | ||||
|     void set_end(const Position& other) {m_end = other;} | ||||
|     Type type() const { return m_type; } | ||||
| 
 | ||||
| private: | ||||
|     Type m_type { Type::Unknown }; | ||||
|     Position m_start; | ||||
|     Position m_end; | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 Itamar
						Itamar