fixed initialization bugs

This commit is contained in:
anon 2023-08-24 21:07:46 +02:00
parent b1c912689f
commit 570f15f635

View File

@ -76,21 +76,12 @@ int append_token(token_t * token) {
token_t * new_symbol_token(const char * const c,
hl_group_t * const g) {
char * new_word;
if (is_magic(*c)) {
new_word = (char *)malloc(sizeof(char)*3);
new_word[0] = '\\';
new_word[1] = *c;
new_word[2] = '\00';
} else {
new_word = strdup(c);
}
token_t * mt = (token_t*)malloc(sizeof(token_t));
mt->hl = g;
mt->t = KEYSYMBOL;
mt->syntax = regex_compile(new_word);
mt->syntax = regex_compile(c);
append_token(mt);
@ -113,14 +104,15 @@ int new_symbol_tokens(const char * const * symbols,
int new_char_tokens(const char * characters,
hl_group_t * const g) {
int i = 0;
char buffer[2] = "";
int i = 0;
buffer[1] = '\00';
char buffer[3];
buffer[0] = '\\';
buffer[2] = '\0';
for(const char * s = characters; *s != '\0'; s++) {
buffer[0] = *s;
if(new_symbol_token(buffer, g)) {
buffer[1] = *s;
if(new_symbol_token(is_magic(*s) ? buffer : buffer + 1, g)) {
++i;
}
}
@ -130,7 +122,7 @@ int new_char_tokens(const char * characters,
token_t * new_keyword_token(const char * const word,
hl_group_t * const g) {
char * new_word = strdup(word);
//char * new_word = strdup(word);
//size_t word_length = strlen(word);
//char * new_word = (char*)malloc(word_length + 4 + 1);
@ -142,7 +134,8 @@ token_t * new_keyword_token(const char * const word,
mt->hl = g;
mt->t = KEYWORD;
mt->syntax = regex_compile(new_word);
//mt->syntax = regex_compile(new_word);
mt->syntax = regex_compile(word);
append_token(mt);
@ -198,8 +191,8 @@ int token_fits(const token_t * const token,
void render_string(const char * const string,
const char * const mode) {
for (const char * s = string; *s != '\00';) {
int f;
int token_index = 0;
int f = 0;
size_t token_index = 0;
int offset;
for (; token_index < token_table.element_count; token_index++) {
@ -254,7 +247,7 @@ int hl_init(void) {
}
int hl_deinit(void) {
for (int i = 0; i < token_table.element_count; i++) {
for (size_t i = 0; i < token_table.element_count; i++) {
free_token(vector_get(&token_table, i));
}