cinera: Lock database and config files

This commit aims to help Cinera instances coordinate themselves by
throwing an error upon being instructed to use a database or config
file currently in use by another instance.
This commit is contained in:
Matt Mascarenhas 2022-09-16 16:10:19 +01:00
parent 1cf703e346
commit f60cf3087f
2 changed files with 570 additions and 472 deletions

File diff suppressed because it is too large Load Diff

View File

@ -8,12 +8,6 @@ exit
// config // config
// //
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <stdarg.h>
#define ArgCountPointer(...) (sizeof((void *[]){__VA_ARGS__})/sizeof(void *)) #define ArgCountPointer(...) (sizeof((void *[]){__VA_ARGS__})/sizeof(void *))
#define DigitsInInt(I) DigitsInInt_(I, sizeof(*I)) #define DigitsInInt(I) DigitsInInt_(I, sizeof(*I))
@ -348,6 +342,15 @@ PushTokens(memory_book *TokensList)
return This; return This;
} }
void
PopTokens(memory_book *TokensList)
{
--TokensList->ItemCount;
tokens *This = GetPlaceInBook(TokensList, TokensList->ItemCount);
FreeBook(&This->Token);
This->CurrentLine = 0;
}
void void
FreeTokensList(memory_book *TokensList) FreeTokensList(memory_book *TokensList)
{ {
@ -357,7 +360,8 @@ FreeTokensList(memory_book *TokensList)
This->CurrentIndex = 0; This->CurrentIndex = 0;
This->CurrentLine = 0; This->CurrentLine = 0;
FreeBook(&This->Token); FreeBook(&This->Token);
FreeFile(&This->File); FreeFileBufferAndPath(&This->File); // NOTE(matt): Not closing file because the next time we touch this handle is in
// RemoveAndFreeWatchHandles() where we will close it
} }
TokensList->ItemCount = 0; TokensList->ItemCount = 0;
} }
@ -588,194 +592,204 @@ typedef struct
} config; } config;
char *ExpandPath(string Path, string *RelativeToFile); // NOTE(matt): Forward declared. Consider reorganising the code? char *ExpandPath(string Path, string *RelativeToFile); // NOTE(matt): Forward declared. Consider reorganising the code?
void PushWatchHandle(string Path, extension_id Extension, watch_type Type, project *Project, asset *Asset); // NOTE(matt): Forward declared. Consider reorganising the code? watch_file *PushWatchHandle(string Path, extension_id Extension, watch_type Type, project *Project, asset *Asset); // NOTE(matt): Forward declared. Consider reorganising the code?
tokens * tokens *
Tokenise(memory_book *TokensList, string Path) Tokenise(memory_book *TokensList, string Path, string *Filename, uint64_t LineNumber)
{ {
tokens *Result = 0; tokens *Result = 0;
char *Path0 = MakeString0("l", &Path); watch_file *WatchFile = PushWatchHandle(Path, EXT_NULL, WT_CONFIG, 0, 0);
FILE *Handle = 0;
PushWatchHandle(Path, EXT_NULL, WT_CONFIG, 0, 0); Result = PushTokens(TokensList);
if((Handle = fopen(Path0, "r"))) Result->File = InitFile(0, &Path, EXT_NULL, TRUE);
switch(ReadFileIntoBuffer(&Result->File))
{ {
fclose(Handle); case RC_ERROR_FILE_LOCKED:
Result = PushTokens(TokensList);
Result->File = InitFile(0, &Path, EXT_NULL);
ReadFileIntoBuffer(&Result->File);
buffer *B = &Result->File.Buffer;
SkipWhitespace(Result, B);
while(B->Ptr - B->Location < B->Size)
{
token T = {};
uint64_t Advancement = 0;
if(!StringsDifferS(TokenStrings[TOKEN_COMMENT_SINGLE], B))
{ {
T.Type = TOKEN_COMMENT; ConfigErrorLockedConfigLocation(Filename, LineNumber, &Path);
B->Ptr += StringLength(TokenStrings[TOKEN_COMMENT_SINGLE]); FreeFile(&Result->File, TRUE);
SkipWhitespace(Result, B); PopTokens(TokensList);
T.Content.Base = B->Ptr; Result = 0;
while(B->Ptr && *B->Ptr != '\n') } break;
{ case RC_ERROR_FILE:
++T.Content.Length;
++B->Ptr;
}
if(*B->Ptr == '\n')
{
++Result->CurrentLine;
}
Advancement = 1;
}
else if(!StringsDifferS(TokenStrings[TOKEN_COMMENT_MULTI_OPEN], B))
{ {
uint64_t CommentDepth = 1; ConfigErrorUnopenableConfigLocation(Filename, LineNumber, &Path);
T.Type = TOKEN_COMMENT; FreeFile(&Result->File, TRUE);
B->Ptr += StringLength(TokenStrings[TOKEN_COMMENT_MULTI_OPEN]); PopTokens(TokensList);
Result = 0;
} break;
default:
{
WatchFile->Handle = Result->File.Handle;
buffer *B = &Result->File.Buffer;
SkipWhitespace(Result, B); SkipWhitespace(Result, B);
T.Content.Base = B->Ptr; while(B->Ptr - B->Location < B->Size)
while(B->Ptr - B->Location < B->Size && CommentDepth)
{ {
if(!StringsDifferS(TokenStrings[TOKEN_COMMENT_MULTI_CLOSE], B)) token T = {};
uint64_t Advancement = 0;
if(!StringsDifferS(TokenStrings[TOKEN_COMMENT_SINGLE], B))
{ {
--CommentDepth; T.Type = TOKEN_COMMENT;
B->Ptr += StringLength(TokenStrings[TOKEN_COMMENT_MULTI_CLOSE]); B->Ptr += StringLength(TokenStrings[TOKEN_COMMENT_SINGLE]);
} SkipWhitespace(Result, B);
else if(B->Ptr - B->Location < B->Size && *B->Ptr == '\n') T.Content.Base = B->Ptr;
{ while(B->Ptr && *B->Ptr != '\n')
++Result->CurrentLine; {
++B->Ptr; ++T.Content.Length;
++B->Ptr;
}
if(*B->Ptr == '\n')
{
++Result->CurrentLine;
}
Advancement = 1;
} }
else if(!StringsDifferS(TokenStrings[TOKEN_COMMENT_MULTI_OPEN], B)) else if(!StringsDifferS(TokenStrings[TOKEN_COMMENT_MULTI_OPEN], B))
{ {
++CommentDepth; uint64_t CommentDepth = 1;
T.Type = TOKEN_COMMENT;
B->Ptr += StringLength(TokenStrings[TOKEN_COMMENT_MULTI_OPEN]); B->Ptr += StringLength(TokenStrings[TOKEN_COMMENT_MULTI_OPEN]);
SkipWhitespace(Result, B);
T.Content.Base = B->Ptr;
while(B->Ptr - B->Location < B->Size && CommentDepth)
{
if(!StringsDifferS(TokenStrings[TOKEN_COMMENT_MULTI_CLOSE], B))
{
--CommentDepth;
B->Ptr += StringLength(TokenStrings[TOKEN_COMMENT_MULTI_CLOSE]);
}
else if(B->Ptr - B->Location < B->Size && *B->Ptr == '\n')
{
++Result->CurrentLine;
++B->Ptr;
}
else if(!StringsDifferS(TokenStrings[TOKEN_COMMENT_MULTI_OPEN], B))
{
++CommentDepth;
B->Ptr += StringLength(TokenStrings[TOKEN_COMMENT_MULTI_OPEN]);
}
else
{
++B->Ptr;
}
}
T.Content.Length = B->Ptr - T.Content.Base;
Advancement = 0;//StringLength(TokenStrings[TOKEN_COMMENT_MULTI_CLOSE]);
}
else if(!StringsDifferS(TokenStrings[TOKEN_COMMENT_MULTI_CLOSE], B))
{
Advancement = 2;
T.Type = TOKEN_NULL;
string Char = { .Base = B->Ptr, .Length = 2 };
string Filepath = Wrap0(Result->File.Path);
ConfigError(&Filepath, Result->CurrentLine, S_WARNING, "Mismatched closing multiline comment marker: ", &Char);
}
else if(!StringsDifferS(TokenStrings[TOKEN_DOUBLEQUOTE], B))
{
T.Type = TOKEN_STRING;
++B->Ptr;
T.Content.Base = B->Ptr;
while(B->Ptr - B->Location < B->Size && *B->Ptr != '"')
{
if(*B->Ptr == '\\')
{
++T.Content.Length;
++B->Ptr;
}
++T.Content.Length;
++B->Ptr;
}
Advancement = 1;
}
else if(!StringsDifferS(TokenStrings[TOKEN_MINUS], B))
{
T.Type = TOKEN_MINUS;
T.Content.Base = B->Ptr;
++T.Content.Length;
Advancement = 1;
}
else if(!StringsDifferS(TokenStrings[TOKEN_ASSIGN], B))
{
T.Type = TOKEN_ASSIGN;
T.Content.Base = B->Ptr;
++T.Content.Length;
Advancement = 1;
}
else if(!StringsDifferS(TokenStrings[TOKEN_OPEN_BRACE], B))
{
T.Type = TOKEN_OPEN_BRACE;
T.Content.Base = B->Ptr;
++T.Content.Length;
Advancement = 1;
}
else if(!StringsDifferS(TokenStrings[TOKEN_CLOSE_BRACE], B))
{
T.Type = TOKEN_CLOSE_BRACE;
T.Content.Base = B->Ptr;
++T.Content.Length;
Advancement = 1;
}
else if(!StringsDifferS(TokenStrings[TOKEN_SEMICOLON], B))
{
T.Type = TOKEN_SEMICOLON;
T.Content.Base = B->Ptr;
++T.Content.Length;
Advancement = 1;
}
else if(IsValidIdentifierCharacter(*B->Ptr))
{
T.Type = TOKEN_IDENTIFIER;
T.Content.Base = B->Ptr;
while(IsValidIdentifierCharacter(*B->Ptr))
{
++T.Content.Length;
++B->Ptr;
}
Advancement = 0;
}
else if(IsNumber(*B->Ptr))
{
T.Type = TOKEN_NUMBER;
T.Content.Base = B->Ptr;
while(IsNumber(*B->Ptr))
{
++T.Content.Length;
++B->Ptr;
}
Advancement = 0;
}
else if(*B->Ptr == '\n')
{
T.Type = TOKEN_NEWLINE;
T.Content.Base = B->Ptr;
T.Content.Length = 1;
++Result->CurrentLine;
Advancement = 1;
} }
else else
{ {
++B->Ptr; T.Type = TOKEN_NULL;
string Char = GetUTF8Character(B->Ptr, B->Size - (B->Ptr - B->Location));
Advancement = Char.Length;
string Filepath = Wrap0(Result->File.Path);
if(Char.Base)
{
ConfigError(&Filepath, Result->CurrentLine, S_WARNING, "Unhandled character (ignored): ", &Char);
}
else
{
ConfigErrorInt(&Filepath, Result->CurrentLine, S_WARNING, "Malformed UTF-8 bytes encountered (skipped): ", Char.Length);
}
} }
}
T.Content.Length = B->Ptr - T.Content.Base;
Advancement = 0;//StringLength(TokenStrings[TOKEN_COMMENT_MULTI_CLOSE]);
}
else if(!StringsDifferS(TokenStrings[TOKEN_COMMENT_MULTI_CLOSE], B))
{
Advancement = 2;
T.Type = TOKEN_NULL;
string Char = { .Base = B->Ptr, .Length = 2 };
string Filepath = Wrap0(Result->File.Path);
ConfigError(&Filepath, Result->CurrentLine, S_WARNING, "Mismatched closing multiline comment marker: ", &Char);
}
else if(!StringsDifferS(TokenStrings[TOKEN_DOUBLEQUOTE], B))
{
T.Type = TOKEN_STRING;
++B->Ptr;
T.Content.Base = B->Ptr;
while(B->Ptr - B->Location < B->Size && *B->Ptr != '"')
{
if(*B->Ptr == '\\')
{
++T.Content.Length;
++B->Ptr;
}
++T.Content.Length;
++B->Ptr;
}
Advancement = 1;
}
else if(!StringsDifferS(TokenStrings[TOKEN_MINUS], B))
{
T.Type = TOKEN_MINUS;
T.Content.Base = B->Ptr;
++T.Content.Length;
Advancement = 1;
}
else if(!StringsDifferS(TokenStrings[TOKEN_ASSIGN], B))
{
T.Type = TOKEN_ASSIGN;
T.Content.Base = B->Ptr;
++T.Content.Length;
Advancement = 1;
}
else if(!StringsDifferS(TokenStrings[TOKEN_OPEN_BRACE], B))
{
T.Type = TOKEN_OPEN_BRACE;
T.Content.Base = B->Ptr;
++T.Content.Length;
Advancement = 1;
}
else if(!StringsDifferS(TokenStrings[TOKEN_CLOSE_BRACE], B))
{
T.Type = TOKEN_CLOSE_BRACE;
T.Content.Base = B->Ptr;
++T.Content.Length;
Advancement = 1;
}
else if(!StringsDifferS(TokenStrings[TOKEN_SEMICOLON], B))
{
T.Type = TOKEN_SEMICOLON;
T.Content.Base = B->Ptr;
++T.Content.Length;
Advancement = 1;
}
else if(IsValidIdentifierCharacter(*B->Ptr))
{
T.Type = TOKEN_IDENTIFIER;
T.Content.Base = B->Ptr;
while(IsValidIdentifierCharacter(*B->Ptr))
{
++T.Content.Length;
++B->Ptr;
}
Advancement = 0;
}
else if(IsNumber(*B->Ptr))
{
T.Type = TOKEN_NUMBER;
T.Content.Base = B->Ptr;
while(IsNumber(*B->Ptr))
{
++T.Content.Length;
++B->Ptr;
}
Advancement = 0;
}
else if(*B->Ptr == '\n')
{
T.Type = TOKEN_NEWLINE;
T.Content.Base = B->Ptr;
T.Content.Length = 1;
++Result->CurrentLine;
Advancement = 1;
}
else
{
T.Type = TOKEN_NULL;
string Char = GetUTF8Character(B->Ptr, B->Size - (B->Ptr - B->Location));
Advancement = Char.Length;
string Filepath = Wrap0(Result->File.Path);
if(Char.Base)
{
ConfigError(&Filepath, Result->CurrentLine, S_WARNING, "Unhandled character (ignored): ", &Char);
}
else
{
ConfigErrorInt(&Filepath, Result->CurrentLine, S_WARNING, "Malformed UTF-8 bytes encountered (skipped): ", Char.Length);
}
}
PushToken(Result, &T); PushToken(Result, &T);
Advance(B, Advancement); Advance(B, Advancement);
SkipWhitespace(Result, B); SkipWhitespace(Result, B);
} }
} break;
} }
else
{
ConfigError(0, 0, S_WARNING, "Unable to open config file: ", &Path);
}
Free(Path0);
return Result; return Result;
} }
@ -2341,7 +2355,8 @@ ScopeTokens(scope_tree *Tree, memory_book *TokensList, tokens *T, memory_book *T
} }
if(!I) if(!I)
{ {
I = Tokenise(TokensList, IncludePathL); token *This = GetPlaceInBook(&T->Token, IncludePathTokenIndex);
I = Tokenise(TokensList, IncludePathL, &Filepath, This->LineNumber);
} }
if(I) if(I)
{ {
@ -2352,11 +2367,6 @@ ScopeTokens(scope_tree *Tree, memory_book *TokensList, tokens *T, memory_book *T
return 0; return 0;
} }
} }
else
{
token *This = GetPlaceInBook(&T->Token, IncludePathTokenIndex);
ConfigFileIncludeError(&Filepath, This->LineNumber, Wrap0(IncludePath));
}
Free(IncludePath); Free(IncludePath);
} }
else else
@ -4977,7 +4987,7 @@ ParseConfig(string Path, memory_book *TokensList)
MEM_LOOP_POST("InitTypeSpecs") MEM_LOOP_POST("InitTypeSpecs")
#endif #endif
config *Result = 0; config *Result = 0;
tokens *T = Tokenise(TokensList, Path); tokens *T = Tokenise(TokensList, Path, NA, NA);
#if 0 #if 0
MEM_LOOP_PRE_FREE("Tokenise") MEM_LOOP_PRE_FREE("Tokenise")
@ -4993,7 +5003,7 @@ ParseConfig(string Path, memory_book *TokensList)
scope_tree *ScopeTree = InitRootScopeTree(); scope_tree *ScopeTree = InitRootScopeTree();
SetTypeSpec(ScopeTree, &TypeSpecs); SetTypeSpec(ScopeTree, &TypeSpecs);
SetDefaults(ScopeTree, &TypeSpecs); SetDefaults(ScopeTree, &TypeSpecs);
ScopeTree = ScopeTokens(ScopeTree, TokensList, T, &TypeSpecs, 0); ScopeTree = ScopeTokens(ScopeTree, TokensList, T, &TypeSpecs, NA);
// TODO(matt): Mem testing // TODO(matt): Mem testing
// //