Commit e7df7ad4 authored by William Bundy's avatar William Bundy

rituals build 129

parent 33eb74e2
git add --all
git commit -m "rituals build 128"
git commit -m "rituals build 129"
git push
git push github
typedef uint64 Hash;
Hash hash_string(char* c, int len)
{
Hash hash = 0;
for(isize i = 0; i < len; ++i) {
hash = c[i] + hash * 65599;
}
return hash;
}
static inline int32 dec_str_to_int(char* str, isize len)
{
int32 result = 0;
for(isize i = 0; i < len; ++i) {
result = result * 10 + str[i] - '0';
}
return result;
}
#define hash_literal(c) hash_string(c, sizeof(c) - 1)
enum Token_Kind
{
Token_Unknown,
Token_Semicolon,
Token_Backslash,
Token_OpenBrace,
Token_CloseBrace,
Token_OpenParen,
Token_CloseParen,
Token_OpenBracket,
Token_CloseBracket,
Token_Tilde,
Token_ExclamationMark,
Token_DollarSign,
Token_Modulo,
Token_Caret,
Token_Ampersand,
Token_Asterisk,
Token_Pipe,
Token_Dot,
Token_Comma,
Token_QuestionMark,
Token_Colon,
Token_Plus,
Token_Minus,
Token_Divide,
Token_Equals,
Token_GreaterThan,
Token_LessThan,
Token_CompilerDirective,
Token_Char,
Token_String,
Token_Number,
Token_Identifier,
Token_Integer,
Token_Float,
Operator_LogicalAnd,
Operator_LogicalOr,
Operator_BooleanEquals,
Operator_BooleanNotEquals,
Operator_BooleanGreaterEquals,
Operator_BooleanLessEquals,
Operator_PtrMemberAccess,
Operator_Decrement,
Operator_Increment,
Token_EndOfFile,
Token_Kind_Count,
};
struct Lexer_Location
{
isize file;
isize line;
isize offset;
};
struct Token
{
Token_Kind kind;
char* start;
isize len;
Hash hash;
Lexer_Location location;
Token *prev, *next;
};
void print_token(Token* t, Token* start)
{
isize sp = (isize) start;
printf("%d<-[%d %d %.*s]->%d", ((isize)t->prev - sp) / sizeof(Token), ((isize)t - sp) / sizeof(Token), t->kind, t->len, t->start, ((isize)t->next - sp) / sizeof(Token));
}
struct Lexer_File
{
char* filename;
isize pathlen;
isize index;
Lexer_Location location;
char* start;
char* head;
};
void init_lexer_file(Lexer_File* file, char* filename, char* prev_path, isize prev_path_len, Memory_Arena* arena)
{
isize len = strlen(filename);
char* filename_copy = arena_push_array(arena, char, len + prev_path_len+1);
memcpy(filename_copy, prev_path, prev_path_len);
memcpy(filename_copy + prev_path_len, filename, len);
filename_copy[len + prev_path_len] = '\0';
//printf(">>>>%s \n", filename_copy);
len = strlen(filename_copy);
isize pathlen = 0;
isize extlen = 0;
for(isize i = len - 1; i >= 0; --i) {
if(filename_copy[i] == '.') {
extlen = i;
}
if(filename_copy[i] == '/' || filename_copy[i] == '\\') {
pathlen = i + 1;
break;
}
}
Hash exthash = hash_string(filename_copy + extlen, len - extlen);
if(exthash != hash_literal(".c") && exthash != hash_literal(".cpp") && exthash != hash_literal(".h")) {
//printf("Hit invalid file suffix: %d %d %d %.*s", len, extlen, len - extlen, len - extlen, filename_copy + extlen);
file->pathlen = pathlen;
file->filename = filename_copy;
file->start = NULL;
file->head = NULL;
return;
}
file->pathlen = pathlen;
file->filename = filename_copy;
file->start = load_file(filename_copy, NULL, arena);
file->head = file->start;
}
struct Lexer
{
Lexer_File* main_file;
Lexer_File* files;
isize files_count, files_capacity;
isize structs_count;
isize procedures_count;
};
void init_lexer(Lexer* lex, isize file_capacity, Memory_Arena* arena)
{
lex->files = arena_push_array(arena, Lexer_File, file_capacity);
lex->files_count = 0;
lex->files_capacity = file_capacity;
lex->main_file = lex->files;
lex->main_file->index = 0;
lex->structs_count = 0;
lex->procedures_count = 0;
}
Lexer_File* get_next_file(Lexer* lex)
{
Lexer_File* file = lex->files + lex->files_count++;
file->index = lex->files_count - 1;
return file;
}
bool is_space(char c)
{
return c == ' ' || c == '\t' || c == '\r' || c == '\n' || c == '\f';
}
bool is_number(char c)
{
return c >= '0' && c <= '9';
}
bool is_valid_identifier(char c)
{
return is_number(c) || ((c >= 'A') && (c <= 'Z')) || ((c >= 'a') && (c <= 'z')) || c == '_';
}
#define nextchar do { \
if(f->head[0] == '\n') { \
f->location.line++; \
f->location.offset = 0; \
} else { \
f->location.offset++; \
} \
f->head++; \
} while(0)
#define valid ((f->head[0] != '\0') && (f->head[0] != EOF))
bool lexer_get_token(Lexer* lexer, Lexer_File* f, Token* t)
{
while(valid) {
if(is_space(f->head[0])) {
nextchar;
} else if(f->head[0] == '/') {
if(f->head[1] == '*') {
nextchar;
nextchar;
while(valid && !(f->head[0] == '*' &&
f->head[1] == '/')) {
nextchar;
}
nextchar;
nextchar;
} else if(f->head[1] == '/') {
nextchar;
nextchar;
while(valid && (f->head[0] != '\n')) {
nextchar;
}
nextchar;
} else {
break;
}
} else {
break;
}
}
t->start = f->head;
t->len = 1;
t->kind = Token_Unknown;
f->location.file = f->index;
t->location = f->location;
t->next = NULL;
//printf("%d<%c> ", f->head[0], f->head[0]);
bool hit = true;
switch(f->head[0]) {
case ';':
t->kind = Token_Semicolon;
break;
case '\\':
t->kind = Token_Backslash;
break;
case '{':
t->kind = Token_OpenBrace;
break;
case '}':
t->kind = Token_CloseBrace;
break;
case '(':
t->kind = Token_OpenParen;
break;
case ')':
t->kind = Token_CloseParen;
break;
case '[':
t->kind = Token_OpenBracket;
break;
case ']':
t->kind = Token_CloseBracket;
break;
case '~':
t->kind = Token_Tilde;
break;
case '!':
t->kind = Token_ExclamationMark;
break;
case '#':
t->kind = Token_CompilerDirective;
t->location.offset++;
t->start++;
nextchar;
while(valid && (f->head[0] != '\n')) {
nextchar;
}
t->len = f->head - t->start;
break;
case '$':
t->kind = Token_DollarSign;
break;
case '%':
t->kind = Token_Modulo;
break;
case '^':
t->kind = Token_Caret;
break;
case '&':
t->kind = Token_Ampersand;
break;
case '*':
t->kind = Token_Asterisk;
break;
case '|':
t->kind = Token_Pipe;
break;
case '.':
t->kind = Token_Dot;
break;
case ',':
t->kind = Token_Comma;
break;
case '?':
t->kind = Token_QuestionMark;
break;
case ':':
t->kind = Token_Colon;
break;
case '+':
t->kind = Token_Plus;
break;
case '-':
t->kind = Token_Minus;
break;
case '/':
t->kind = Token_Divide;
break;
case '=':
t->kind = Token_Equals;
break;
case '>':
t->kind = Token_GreaterThan;
break;
case '<':
t->kind = Token_LessThan;
break;
default:
hit = false;
break;
}
if(hit) {
nextchar;
} else switch(f->head[0]) {
case '\'':
t->kind = Token_Char;
t->location.offset++;
t->start++;
nextchar;
while(valid) {
nextchar;
if(f->head[0] == '\'') {
if(f->head[-1] != '\\') {
break;
} else {
if(f->head[-2] == '\\') {
break;
}
}
}
}
t->len = f->head - t->start;
nextchar;
break;
case '"':
t->kind = Token_String;
t->location.offset++;
t->start++;
nextchar;
while(valid) {
if(f->head[0] == '"') {
char* last = f->head - 1;
if(last[0] != '\\') {
break;
}
}
nextchar;
if(f->head[0] == '\n') {
printf(">>> Error, encountered newline in string literal\n");
printf(">>> line %d, char %d\n", f->location.line, f->location.offset);
break;
}
}
t->len = f->head - t->start;
nextchar;
break;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
t->kind = Token_Number;
while(valid && is_number(f->head[0])) {
nextchar;
}
t->len = f->head - t->start;
break;
case '\0':
case EOF:
t->kind = Token_EndOfFile;
return false;
default:
if(is_valid_identifier(f->head[0])) {
t->kind = Token_Identifier;
while(is_valid_identifier(f->head[0])) {
nextchar;
}
t->len = f->head - t->start;
} else {
t->kind = Token_Unknown;
}
break;
}
t->hash = hash_string(t->start, t->len);
return true;
}
#undef nextchar
#undef valid
void parse_number_tokens(Token* head)
{
Token* next = head->next;
if(head->kind == Token_Number) {
if(next && next->kind == Token_Dot) {
head->kind = Token_Float;
head->len++;
head->next = next->next;
parse_number_tokens(head);
} else {
head->kind = Token_Integer;
}
} else if(head->kind == Token_Float) {
if(next && next->kind == Token_Number) {
head->len += next->len;
head->next = next->next;
}
}
}
void parse_include_directive(Lexer* lex, Token* directive)
{
start_temp_arena(Temp_Arena);
char* buf = arena_push_array(Temp_Arena, char, directive->len + 1);
memcpy(buf, directive->start, directive->len);
buf[directive->len] = '\0';
Token* head = arena_push_struct(Temp_Arena, Token);
Token* start = head;
{
Token t;
Lexer_File f;
f.head = buf;
f.start = buf;
while(lexer_get_token(NULL, &f, &t)) {
*head = t;
head->next = arena_push_struct(Temp_Arena, Token);
head = head->next;
}
head->next = NULL;
}
if(start->hash != hash_string("include", sizeof("include") - 1)) {
end_temp_arena(Temp_Arena);
return;
}
head = start;
do {
if(head->kind == Token_String) {
char* filename = arena_push_array(Temp_Arena, char, head->len + 1);
memcpy(filename, head->start, head->len);
filename[head->len] = '\0';
Lexer_File* file = get_next_file(lex);
Lexer_File* including = lex->files + directive->location.file;
init_lexer_file(file, filename, including->filename, including->pathlen, Work_Arena);
if(file->start != NULL) {
Token* new_file_head = arena_push_struct(Work_Arena, Token);
Token* new_file_start = new_file_head;
Token* last = directive;
Token t;
while(lexer_get_token(lex, file, &t)) {
*new_file_head = t;
new_file_head->next = arena_push_struct(Work_Arena, Token);
new_file_head->prev = last;
last = new_file_head;
new_file_head = new_file_head->next;
}
Token* oldnext = directive->next;
directive->next = new_file_start;
new_file_start->prev = directive;
new_file_head->next = oldnext;
oldnext->prev = new_file_head;
}
break;
}
} while(head = head->next);
end_temp_arena(Temp_Arena);
}
Token* parse_dollarsign_instructions(Token* t)
{
if(t->kind != Token_DollarSign) return t;
Token* next = t->next->next;
Token* head = t;
if(next->hash == hash_literal("exclude")) {
do {
if(next->kind == Token_DollarSign) {
Token* tk = next->next->next;
if(tk->kind == Token_Identifier) {
if(tk->hash == hash_literal("end")) {
head = tk->next->next;
break;
}
}
}
} while(next = next->next);
}
return head;
}
void parse_tokens(Lexer* lex, Token* start)
{
Token* head = start;
do {
head = parse_dollarsign_instructions(head);
if(head->kind == Token_CompilerDirective && head->start[0] == 'i') {
parse_include_directive(lex, head);
}
} while(head = head->next);
head = start;
do {
Token* next;
switch(head->kind) {
case Token_DollarSign:
head = parse_dollarsign_instructions(head);
break;
case Token_Ampersand:
next = head->next;
if(next && next->kind == Token_Ampersand) {
head->kind = Operator_LogicalAnd;
head->len++;
head->next = next->next;
}
break;
case Token_Pipe:
next = head->next;
if(next && next->kind == Token_Pipe) {
head->kind = Operator_LogicalOr;
head->len++;
head->next = next->next;
}
break;
case Token_Equals:
next = head->next;
if(next && next->kind == Token_Equals) {
head->kind = Operator_BooleanEquals;
head->len++;
head->next = next->next;
}
break;
case Token_ExclamationMark:
next = head->next;
if(next && next->kind == Token_Equals) {
head->kind = Operator_BooleanNotEquals;
head->len++;
head->next = next->next;
}
break;
case Token_GreaterThan:
next = head->next;
if(next && next->kind == Token_Equals) {
head->kind = Operator_BooleanGreaterEquals;
head->len++;
head->next = next->next;
}
break;
case Token_LessThan:
next = head->next;
if(next && next->kind == Token_Equals) {
head->kind = Operator_BooleanLessEquals;
head->len++;
head->next = next->next;
}
break;
case Token_Number:
parse_number_tokens(head);
break;
case Token_Minus:
next = head->next;
if(next && next->kind == Token_GreaterThan) {
head->kind = Operator_PtrMemberAccess;
head->len++;
head->next = next->next;
} else if(next && next->kind == Token_Minus) {
head->kind = Operator_Decrement;
head->len++;
head->next = next->next;
} else if(next && next->kind == Token_Number) {
Token_Kind prevkind = Token_Unknown;
if(head->prev != NULL) {
prevkind = head->prev->kind;
}
if(prevkind != Token_Number &&
prevkind != Token_Integer &&
prevkind != Token_Float &&
prevkind != Token_Identifier ) {
head->kind = Token_Number;
head->len += next->len;
head->next = next->next;
next = head->next;
parse_number_tokens(head);
}
}
break;
case Token_Plus:
next = head->next;
if(next && next->kind == Token_Plus) {
head->kind = Operator_Increment;
head->len++;
head->next = next->next;
}
default:
break;
}
} while(head = head->next);
}
void parse_sing(Token* t, int32 brace_level)
{
if(t->hash == hash_literal("sing")) {
fprintf(stderr, "[%d]", brace_level);
}
}
struct Proc_Arg
{
char** terms;
isize count;
char* defaults;
};
void init_proc_arg(Proc_Arg* arg, isize count, Memory_Arena* arena)
{
arg->terms = arena_push_array(arena, char*, count);
arg->defaults = NULL;
arg->count = 0;
}
struct Proc_Prototype
{
Token *start, *end;
char** decorators;
isize decorators_count;
char* name;
Proc_Arg* args;
isize args_count;
Proc_Prototype* next;
};
void print_proc_prototype(Proc_Prototype* p)
{
if(p->name == NULL) return;
for(isize i = 0; i < p->decorators_count; ++i) {
printf("%s ", p->decorators[i]);
}
printf("%s(", p->name);