Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions src/arm-codegen.c
Original file line number Diff line number Diff line change
Expand Up @@ -136,9 +136,9 @@ void update_elf_offset(ph2_ir_t *ph2_ir)
void cfg_flatten(void)
{
func_t *func = find_func("__syscall");
func->bbs->elf_offset = 44; /* offset of start + exit in codegen */
func->bbs->elf_offset = 48; /* offset of start + branch + exit in codegen */

elf_offset = 80; /* offset of start + exit + syscall in codegen */
elf_offset = 84; /* offset of start + branch + exit + syscall in codegen */
GLOBAL_FUNC->bbs->elf_offset = elf_offset;

for (ph2_ir_t *ph2_ir = GLOBAL_FUNC->bbs->ph2_ir_list.head; ph2_ir;
Expand Down Expand Up @@ -457,6 +457,8 @@ void code_generate(void)
emit(__sub_r(__AL, __sp, __sp, __r8));
emit(__mov_r(__AL, __r12, __sp));
emit(__bl(__AL, GLOBAL_FUNC->bbs->elf_offset - elf_code->size));
/* After global init, jump to main preparation */
emit(__b(__AL, 56)); /* PC+8: skip exit (24) + syscall (36) + ret (4) - 8 */

/* exit */
emit(__movw(__AL, __r8, GLOBAL_FUNC->stack_size));
Expand Down Expand Up @@ -490,7 +492,7 @@ void code_generate(void)
emit(__add_i(__AL, __r1, __r8, 4));
emit(__bl(__AL, MAIN_BB->elf_offset - elf_code->size));

/* exit with main's return value */
/* exit with main's return value - r0 already has the return value */
emit(__mov_i(__AL, __r7, 1));
emit(__svc());

Expand Down
37 changes: 37 additions & 0 deletions src/elf.c
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,15 @@ void elf_write_str(strbuf_t *elf_array, char *vals)
* If necessary, use elf_write_byte() to append the null character
* after calling elf_write_str().
*/
if (!elf_array || !vals)
return;
strbuf_puts(elf_array, vals);
}

void elf_write_byte(strbuf_t *elf_array, int val)
{
if (!elf_array)
return;
strbuf_putc(elf_array, val);
}

Expand All @@ -36,19 +40,29 @@ char e_extract_byte(int v, int b)

void elf_write_int(strbuf_t *elf_array, int val)
{
if (!elf_array)
return;
for (int i = 0; i < 4; i++)
strbuf_putc(elf_array, e_extract_byte(val, i));
}

void elf_write_blk(strbuf_t *elf_array, void *blk, int sz)
{
if (!elf_array || !blk || sz <= 0)
return;
char *ptr = blk;
for (int i = 0; i < sz; i++)
strbuf_putc(elf_array, ptr[i]);
}

void elf_generate_header(void)
{
/* Check for null pointers to prevent crashes */
if (!elf_code || !elf_data || !elf_symtab || !elf_strtab || !elf_header) {
error("ELF buffers not initialized");
return;
}

elf32_hdr_t hdr;
/*
* The following table explains the meaning of each field in the
Expand Down Expand Up @@ -175,6 +189,12 @@ void elf_generate_header(void)

void elf_generate_sections(void)
{
/* Check for null pointers to prevent crashes */
if (!elf_symtab || !elf_strtab || !elf_section) {
error("ELF section buffers not initialized");
return;
}

/* symtab section */
for (int b = 0; b < elf_symtab->size; b++)
elf_write_byte(elf_section, elf_symtab->elements[b]);
Expand Down Expand Up @@ -312,6 +332,12 @@ void elf_generate_sections(void)

void elf_align(void)
{
/* Check for null pointers to prevent crashes */
if (!elf_data || !elf_symtab || !elf_strtab) {
error("ELF buffers not initialized for alignment");
return;
}

while (elf_data->size & 3)
elf_write_byte(elf_data, 0);

Expand All @@ -324,6 +350,12 @@ void elf_align(void)

void elf_add_symbol(char *symbol, int pc)
{
/* Check for null pointers to prevent crashes */
if (!symbol || !elf_symtab || !elf_strtab) {
error("Invalid parameters for elf_add_symbol");
return;
}

elf_write_int(elf_symtab, elf_strtab->size);
elf_write_int(elf_symtab, pc);
elf_write_int(elf_symtab, 0);
Expand All @@ -344,6 +376,11 @@ void elf_generate(char *outfile)
outfile = "a.out";

FILE *fp = fopen(outfile, "wb");
if (!fp) {
error("Unable to open output file for writing");
return;
}

for (int i = 0; i < elf_header->size; i++)
fputc(elf_header->elements[i], fp);
for (int i = 0; i < elf_code->size; i++)
Expand Down
109 changes: 41 additions & 68 deletions src/lexer.c
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,12 @@
#define NUM_DIRECTIVES 11
#define NUM_KEYWORDS 16

/* Token mapping structure for elegant initialization */
typedef struct {
char *name;
token_t token;
} token_mapping_t;

/* Preprocessor directive hash table using existing shecc hashmap */
hashmap_t *DIRECTIVE_MAP = NULL;
/* C keywords hash table */
Expand All @@ -29,41 +35,25 @@ void lex_init_directives()

DIRECTIVE_MAP = hashmap_create(16); /* Small capacity for directives */

/* Initialization using indexed for-loop */
/* Initialization using struct compound literals for elegance */
directive_tokens_storage =
arena_alloc(GENERAL_ARENA, NUM_DIRECTIVES * sizeof(token_t));

char *names[NUM_DIRECTIVES];
token_t token_values[NUM_DIRECTIVES];

/* Populate arrays using index-based assignments for compatibility */
names[0] = "#define";
token_values[0] = T_cppd_define;
names[1] = "#elif";
token_values[1] = T_cppd_elif;
names[2] = "#else";
token_values[2] = T_cppd_else;
names[3] = "#endif";
token_values[3] = T_cppd_endif;
names[4] = "#error";
token_values[4] = T_cppd_error;
names[5] = "#if";
token_values[5] = T_cppd_if;
names[6] = "#ifdef";
token_values[6] = T_cppd_ifdef;
names[7] = "#ifndef";
token_values[7] = T_cppd_ifndef;
names[8] = "#include";
token_values[8] = T_cppd_include;
names[9] = "#pragma";
token_values[9] = T_cppd_pragma;
names[10] = "#undef";
token_values[10] = T_cppd_undef;
/* Use array compound literal for directive mappings */
token_mapping_t directives[] = {
{"#define", T_cppd_define}, {"#elif", T_cppd_elif},
{"#else", T_cppd_else}, {"#endif", T_cppd_endif},
{"#error", T_cppd_error}, {"#if", T_cppd_if},
{"#ifdef", T_cppd_ifdef}, {"#ifndef", T_cppd_ifndef},
{"#include", T_cppd_include}, {"#pragma", T_cppd_pragma},
{"#undef", T_cppd_undef},
};

/* hashmap insertion */
for (int i = 0; i < NUM_DIRECTIVES; i++) {
directive_tokens_storage[i] = token_values[i];
hashmap_put(DIRECTIVE_MAP, names[i], &directive_tokens_storage[i]);
directive_tokens_storage[i] = directives[i].token;
hashmap_put(DIRECTIVE_MAP, directives[i].name,
&directive_tokens_storage[i]);
}
}

Expand All @@ -74,51 +64,34 @@ void lex_init_keywords()

KEYWORD_MAP = hashmap_create(32); /* Capacity for keywords */

/* Initialization using indexed for-loop */
/* Initialization using struct compound literals for elegance */
keyword_tokens_storage =
arena_alloc(GENERAL_ARENA, NUM_KEYWORDS * sizeof(token_t));

char *names[NUM_KEYWORDS];
token_t token_values[NUM_KEYWORDS];

/* Populate arrays using index-based assignments for compatibility */
names[0] = "if";
token_values[0] = T_if;
names[1] = "while";
token_values[1] = T_while;
names[2] = "for";
token_values[2] = T_for;
names[3] = "do";
token_values[3] = T_do;
names[4] = "else";
token_values[4] = T_else;
names[5] = "return";
token_values[5] = T_return;
names[6] = "typedef";
token_values[6] = T_typedef;
names[7] = "enum";
token_values[7] = T_enum;
names[8] = "struct";
token_values[8] = T_struct;
names[9] = "sizeof";
token_values[9] = T_sizeof;
names[10] = "switch";
token_values[10] = T_switch;
names[11] = "case";
token_values[11] = T_case;
names[12] = "break";
token_values[12] = T_break;
names[13] = "default";
token_values[13] = T_default;
names[14] = "continue";
token_values[14] = T_continue;
names[15] = "union";
token_values[15] = T_union;
/* Use array compound literal for keyword mappings */
token_mapping_t keywords[] = {
{"if", T_if},
{"while", T_while},
{"for", T_for},
{"do", T_do},
{"else", T_else},
{"return", T_return},
{"typedef", T_typedef},
{"enum", T_enum},
{"struct", T_struct},
{"sizeof", T_sizeof},
{"switch", T_switch},
{"case", T_case},
{"break", T_break},
{"default", T_default},
{"continue", T_continue},
{"union", T_union},
};

/* hashmap insertion */
for (int i = 0; i < NUM_KEYWORDS; i++) {
keyword_tokens_storage[i] = token_values[i];
hashmap_put(KEYWORD_MAP, names[i], &keyword_tokens_storage[i]);
keyword_tokens_storage[i] = keywords[i].token;
hashmap_put(KEYWORD_MAP, keywords[i].name, &keyword_tokens_storage[i]);
}
}

Expand Down
Loading