feat(frontend): 重构词法分析器

- 添加 .gitignore 文件,忽略编译器生成的二进制文件
- 重构 lexer.c 文件,改进了关键字处理和字符串处理
- 更新前端的前端、解析器和 AST 相关文件,以适应新的词法分析器
- 优化了 token 相关的定义和函数,引入了新的 token 类型
This commit is contained in:
ZZY
2025-03-23 12:13:16 +08:00
parent 05c637e594
commit 2b4857001c
33 changed files with 532 additions and 624 deletions

View File

@ -19,7 +19,7 @@ ast_node_t* parse_block(parser_t* parser) {
symtab_enter_scope(parser->symtab);
tok_stream_t *tokbuf = &parser->tokbuf;
flush_peek_tok(tokbuf);
tok_type_t ttype;
cc_tktype_t ttype;
ast_node_t* node = new_ast_node_block();
expect_pop_tok(tokbuf, TOKEN_L_BRACE);

View File

@ -37,7 +37,7 @@ int peek_decl(tok_stream_t* tokbuf) {
ast_node_t* parse_decl_val(parser_t* parser) {
tok_stream_t* tokbuf = &parser->tokbuf;
tok_type_t ttype;
cc_tktype_t ttype;
flush_peek_tok(tokbuf);
ast_node_t* node;
@ -69,7 +69,7 @@ ast_node_t* parse_decl_val(parser_t* parser) {
ast_node_t* parse_decl(parser_t* parser) {
tok_stream_t* tokbuf = &parser->tokbuf;
flush_peek_tok(tokbuf);
tok_type_t ttype;
cc_tktype_t ttype;
ast_node_t* node;
if (peek_decl(tokbuf) == 0) {

View File

@ -82,7 +82,7 @@ static ast_node_t* parse_comma(tok_stream_t* tokbuf, symtab_t *symtab, ast_node_
static ast_node_t* parse_assign(tok_stream_t* tokbuf, symtab_t *symtab, ast_node_t* left) {
flush_peek_tok(tokbuf);
tok_type_t ttype = peek_tok_type(tokbuf);
cc_tktype_t ttype = peek_tok_type(tokbuf);
pop_tok(tokbuf);
ast_node_t* node = new_ast_node();
node->type = NT_ASSIGN;
@ -133,7 +133,7 @@ static ast_node_t* parse_assign(tok_stream_t* tokbuf, symtab_t *symtab, ast_node
static ast_node_t* parse_cmp(tok_stream_t* tokbuf, symtab_t *symtab, ast_node_t* left) {
flush_peek_tok(tokbuf);
tok_type_t ttype = peek_tok_type(tokbuf);
cc_tktype_t ttype = peek_tok_type(tokbuf);
pop_tok(tokbuf);
ast_node_t* node = new_ast_node();
// saved left
@ -171,7 +171,7 @@ static ast_node_t* parse_cmp(tok_stream_t* tokbuf, symtab_t *symtab, ast_node_t*
static ast_node_t* parse_cal(tok_stream_t* tokbuf, symtab_t *symtab, ast_node_t* left) {
flush_peek_tok(tokbuf);
tok_type_t ttype = peek_tok_type(tokbuf);
cc_tktype_t ttype = peek_tok_type(tokbuf);
pop_tok(tokbuf);
ast_node_t* node = new_ast_node();
node->expr.left = left;
@ -238,7 +238,7 @@ static ast_node_t* parse_call(tok_stream_t* tokbuf, symtab_t *symtab, ast_node_t
vector_init(node->call.params->params.params);
pop_tok(tokbuf); // 跳过 '('
tok_type_t ttype;
cc_tktype_t ttype;
while (1) {
flush_peek_tok(tokbuf);
ttype = peek_tok_type(tokbuf);
@ -330,7 +330,7 @@ static ast_node_t *parse_primary_expression(tok_stream_t* tokbuf, symtab_t *symt
node->type = NT_TERM_VAL;
node->syms.tok = *tok;
switch (tok->type) {
switch (tok->sub_type) {
case TOKEN_INT_LITERAL:
// node->data.data_type = TYPE_INT;
break;
@ -344,7 +344,7 @@ static ast_node_t *parse_primary_expression(tok_stream_t* tokbuf, symtab_t *symt
// node->data.data_type = TYPE_POINTER;
case TOKEN_IDENT:
node = expect_pop_ident(tokbuf);
tok_type_t ttype = peek_tok_type(tokbuf);
cc_tktype_t ttype = peek_tok_type(tokbuf);
if (ttype == TOKEN_L_PAREN) {
node = parse_call(tokbuf, symtab, node);
} else {
@ -365,7 +365,7 @@ END:
}
static ast_node_t *parse_subexpression(tok_stream_t* tokbuf, symtab_t *symtab, enum Precedence prec) {
tok_type_t ttype;
cc_tktype_t ttype;
struct expr_prec_table_t* work;
ast_node_t* left;
@ -400,7 +400,7 @@ ast_node_t* parse_expr(parser_t* parser) {
tok_stream_t* tokbuf = &(parser->tokbuf);
symtab_t *symtab = parser->symtab;
flush_peek_tok(tokbuf);
tok_type_t ttype = peek_tok_type(tokbuf);
cc_tktype_t ttype = peek_tok_type(tokbuf);
switch (ttype) {
case TOKEN_NOT:
case TOKEN_AND:

View File

@ -9,7 +9,7 @@
// TODO 语义分析压入符号表
static void parse_params(parser_t* parser, tok_stream_t* cache, ast_node_t* node) {
flush_peek_tok(cache);
tok_type_t ttype;
cc_tktype_t ttype;
ast_node_t *params = new_ast_node();
node->decl_func.params = params;
vector_init(params->params.params);
@ -79,7 +79,7 @@ ast_type_t check_is_func_decl(tok_stream_t* tokbuf, tok_stream_t* cache) {
LOG_ERROR("function parameter list too long");
}
cache->buf[cache->size++] = *tok;
switch (tok->type) {
switch (tok->sub_type) {
case TOKEN_L_PAREN:
depth++;
break;

View File

@ -4,7 +4,7 @@
ast_node_t* parse_stmt(parser_t* parser) {
tok_stream_t* tokbuf = &parser->tokbuf;
flush_peek_tok(tokbuf);
tok_type_t ttype = peek_tok_type(tokbuf);
cc_tktype_t ttype = peek_tok_type(tokbuf);
ast_node_t* node = new_ast_node();
switch (ttype) {
case TOKEN_IF: {

View File

@ -3,8 +3,8 @@
#include "../type.h"
ast_node_t* new_ast_ident_node(tok_t* tok) {
if (tok->type != TOKEN_IDENT) {
LOG_ERROR("syntax error: want identifier but got %d", tok->type);
if (tok->sub_type != TOKEN_IDENT) {
LOG_ERROR("syntax error: want identifier but got %d", tok->sub_type);
}
ast_node_t* node = new_ast_node();
node->type = NT_TERM_IDENT;
@ -24,7 +24,7 @@ ast_node_t* expect_pop_ident(tok_stream_t* tokbuf) {
ast_node_t* parse_type(parser_t* parser) {
tok_stream_t* tokbuf = &parser->tokbuf;
flush_peek_tok(tokbuf);
tok_type_t ttype = peek_tok_type(tokbuf);
cc_tktype_t ttype = peek_tok_type(tokbuf);
data_type_t dtype;
switch(ttype) {
case TOKEN_VOID: dtype = TYPE_VOID; break;