This commit is contained in:
ZZY
2025-04-01 00:13:21 +08:00
parent 2b4857001c
commit 74f43a1ab7
79 changed files with 2271 additions and 2861 deletions

View File

@@ -74,7 +74,7 @@ static inline int keyword_cmp(const char* name, int len) {
return -1; // Not a keyword.
}
void init_lexer(lexer_t* lexer, const char* file_name, void* stream, lexer_sread_fn sread, strpool_t* strpool) {
void init_lexer(cc_lexer_t* lexer, const char* file_name, void* stream, lexer_sread_fn sread, strpool_t* strpool) {
lexer->strpool = strpool;
lexer->cur_ptr = lexer->end_ptr = (char*)&(lexer->buffer);
lexer->loc.fname = strpool_intern(lexer->strpool, file_name);
@@ -87,7 +87,7 @@ void init_lexer(lexer_t* lexer, const char* file_name, void* stream, lexer_sread
rt_memset(lexer->buffer, 0, sizeof(lexer->buffer));
}
static void flush_buffer(lexer_t* lexer) {
static void flush_buffer(cc_lexer_t* lexer) {
int num = lexer->end_ptr - lexer->cur_ptr;
for (int i = 0; i < num; i++) {
lexer->buffer[i] = lexer->cur_ptr[i];
@@ -96,7 +96,7 @@ static void flush_buffer(lexer_t* lexer) {
int read_size = LEXER_BUFFER_SIZE - num;
// TODO rt_size_t to int maybe lose precision
int got_size = lexer->sread(lexer->buffer + num, read_size, 1, read_size, lexer->stream);
int got_size = lexer->sread(lexer->buffer + num, 1, read_size, lexer->stream);
if (got_size < 0) {
LEX_ERROR("lexer read error");
} else if (got_size < read_size) {
@@ -110,7 +110,7 @@ static void flush_buffer(lexer_t* lexer) {
}
}
static void goto_newline(lexer_t* lexer) {
static void goto_newline(cc_lexer_t* lexer) {
do {
if (lexer->cur_ptr == lexer->end_ptr) {
flush_buffer(lexer);
@@ -120,7 +120,7 @@ static void goto_newline(lexer_t* lexer) {
} while (*lexer->cur_ptr != '\n' && *lexer->cur_ptr != '\0');
}
static void goto_block_comment(lexer_t* lexer) {
static void goto_block_comment(cc_lexer_t* lexer) {
while (1) {
if (lexer->end_ptr - lexer->cur_ptr < 2) {
flush_buffer(lexer);
@@ -159,7 +159,7 @@ static char got_slash(char* peek) {
return -1;
}
static void parse_char_literal(lexer_t* lexer, tok_t* token) {
static void parse_char_literal(cc_lexer_t* lexer, tok_t* token) {
char val = 0;
char* peek = lexer->cur_ptr + 1;
if (*peek == '\\') {
@@ -175,7 +175,7 @@ static void parse_char_literal(lexer_t* lexer, tok_t* token) {
token->val.ch = val;
}
static void parse_string_literal(lexer_t* lexer, tok_t* token) {
static void parse_string_literal(cc_lexer_t* lexer, tok_t* token) {
char* peek = lexer->cur_ptr + 1;
// TODO string literal size check
static char dest[LEXER_MAX_TOKEN_SIZE + 1];
@@ -200,7 +200,7 @@ static void parse_string_literal(lexer_t* lexer, tok_t* token) {
}
// FIXME it write by AI maybe error
static void parse_number(lexer_t* lexer, tok_t* token) {
static void parse_number(cc_lexer_t* lexer, tok_t* token) {
char* peek = lexer->cur_ptr;
int base = 10;
int is_float = 0;
@@ -290,7 +290,7 @@ static void parse_number(lexer_t* lexer, tok_t* token) {
#define GOT_ONE_TOKEN_BUF_SIZE 64
// /zh/c/language/operator_arithmetic.html
void get_token(lexer_t* lexer, tok_t* token) {
void get_token(cc_lexer_t* lexer, tok_t* token) {
// 需要保证缓冲区始终可读
if (lexer->end_ptr - lexer->cur_ptr < GOT_ONE_TOKEN_BUF_SIZE) {
flush_buffer(lexer);
@@ -515,7 +515,7 @@ static const tok_basic_type_t tok_type_map[] = {
}
// get_token maybe got invalid (with parser)
void get_valid_token(lexer_t* lexer, tok_t* token) {
void get_valid_token(cc_lexer_t* lexer, tok_t* token) {
tok_basic_type_t type;
do {
get_token(lexer, token);