feat(backend/riscv32): 实现基础的编译器功能

- 完成 RV32IMA 指令集的代码生成
- 添加整数运算、分支、调用等基本指令支持
- 实现从 IR 到机器码的转换
- 添加简单的测试用例和测试框架
This commit is contained in:
ZZY
2025-03-08 16:50:21 +08:00
parent 95bf44eb3f
commit 172d72b0a0
32 changed files with 980 additions and 469 deletions

View File

@ -163,13 +163,14 @@ static void parse_char_literal(lexer_t* lexer, tok_t* token) {
if (*peek == '\\') {
peek++;
val = got_slash(peek);
peek++;
} else {
val = *peek;
val = *peek++;
}
if (*peek != '\'') error("Unclosed character literal");
if (*peek++ != '\'') error("Unclosed character literal");
token->val.ch = val;
lexer->cur_ptr = peek + 1;
lexer->cur_ptr = peek;
token->val.have = 1;
token->type = TOKEN_CHAR_LITERAL;
}

View File

@ -117,29 +117,23 @@ void test_literals() {
// test_lexer_string("4294967295", TOKEN_INT_LITERAL); // UINT_MAX
}
// TEST_CASE("Character literals"); {
// test_lexer_string("'a'", TOKEN_CHAR_LITERAL);
// test_lexer_string("'\\n'", TOKEN_CHAR_LITERAL);
// test_lexer_string("'\\t'", TOKEN_CHAR_LITERAL);
// test_lexer_string("'\\\\'", TOKEN_CHAR_LITERAL);
// test_lexer_string("'\\0'", TOKEN_CHAR_LITERAL);
// }
TEST_CASE("Character literals"); {
test_lexer_string("'a'", TOKEN_CHAR_LITERAL);
test_lexer_string("'\\n'", TOKEN_CHAR_LITERAL);
test_lexer_string("'\\t'", TOKEN_CHAR_LITERAL);
test_lexer_string("'\\\\'", TOKEN_CHAR_LITERAL);
test_lexer_string("'\\0'", TOKEN_CHAR_LITERAL);
}
TEST_CASE("String literals"); {
test_lexer_string("\"hello\"", TOKEN_STRING_LITERAL);
test_lexer_string("\"multi-line\\nstring\"", TOKEN_STRING_LITERAL);
test_lexer_string("\"escape\\\"quote\"", TOKEN_STRING_LITERAL);
}
// TEST_CASE("Integer literals");
// test_lexer_string("123", TOKEN_INT_LITERAL);
// test_lexer_string("0x1F", TOKEN_INT_LITERAL);
// TEST_CASE("Floating literals");
// test_lexer_string("3.14e-5", TOKEN_FLOAT_LITERAL);
// TEST_CASE("Character literals");
// test_lexer_string("'\\n'", TOKEN_CHAR_LITERAL);
}
// 边界测试

View File

@ -30,14 +30,13 @@ void init_tokbuf(tok_buf_t *tokbuf, void *stream, get_tokbuf_func gettok) {
tokbuf->cap = 0;
}
tok_t *peek_tok(tok_buf_t *tokbuf)
{
tok_t *peek_tok(tok_buf_t *tokbuf) {
int idx = tokbuf->peek;
idx = ROUND_IDX(idx + 1);
tokbuf->peek = ROUND_IDX(idx + 1);
if (tokbuf->size >= tokbuf->cap) {
error("peek too deep, outof array size");
}
if (tokbuf->peek == tokbuf->end) {
if (idx == tokbuf->end) {
if (tokbuf->size == tokbuf->cap) {
error("peek_tok buffer overflow");
}
@ -46,10 +45,9 @@ tok_t *peek_tok(tok_buf_t *tokbuf)
}
tokbuf->gettok(tokbuf->stream, &(tokbuf->buf[idx]));
tokbuf->size++;
tokbuf->end = idx;
tokbuf->end = tokbuf->peek;
}
tokbuf->peek = idx;
return &(tokbuf->buf[idx]);
}

View File

@ -173,6 +173,8 @@ typedef struct ASTNode* (*parse_func_t) (parser_t*);
void parse_prog(parser_t* parser);
ast_node_t* parse_decl(parser_t* parser);
ast_node_t* parse_decl_val(parser_t* parser);
ast_node_t* parse_block(parser_t* parser);
ast_node_t* parse_stmt(parser_t* parser);
ast_node_t* parse_expr(parser_t* parser);

View File

@ -8,6 +8,7 @@
// TODO 语义分析压入符号表
static void parse_params(parser_t* parser, tok_buf_t* cache, ast_node_t* node) {
flush_peek_tok(cache);
tok_type_t ttype;
ast_node_t *params = new_ast_node();
node->decl_func.params = params;

View File

@ -89,8 +89,12 @@ ast_node_t* parse_stmt(parser_t* parser) {
// init expr or init decl_var
// TODO need add this feature
node->for_stmt.init = parse_expr(parser);
expect_pop_tok(tokbuf, TOKEN_SEMICOLON);
if (peek_decl(tokbuf)) {
node->for_stmt.init = parse_decl_val(parser);
} else {
node->for_stmt.init = parse_expr(parser);
expect_pop_tok(tokbuf, TOKEN_SEMICOLON);
}
// cond expr or null
ttype = peek_tok_type(tokbuf);