304 lines
9.6 KiB
C
Executable File
304 lines
9.6 KiB
C
Executable File
/// __ ___ __ ___ ___ ___ ___ ___ ___ _ __
|
|
/// \ \/ / '__/ _ \ / __/ _ \/ __/ __|/ _ \| '__|
|
|
/// > <| | | (_) | (_| __/\__ \__ \ (_) | |
|
|
/// /_/\_\_| \___/ \___\___||___/___/\___/|_|
|
|
///
|
|
/// Copyright (c) 1997 - Ognjen 'xolatile' Milan Robovic
|
|
///
|
|
/// xolatile@chud.cyou - xrocessor - Probably the most minimalistic general preprocessor aimed at Ada, Pascal and EAXHLA language syntax.
|
|
///
|
|
/// This program is free software, free as in freedom and as in free beer, you can redistribute it and/or modify it under the terms of the GNU
|
|
/// General Public License as published by the Free Software Foundation, either version 3 of the License, or any later version if you wish...
|
|
///
|
|
/// This program is distributed in the hope that it will be useful, but it is probably not, and without any warranty, without even the implied
|
|
/// warranty of merchantability or fitness for a particular purpose, because it is pointless. Please see the GNU (Geenoo) General Public License
|
|
/// for more details, if you dare, it is a lot of text that nobody wants to read...
|
|
|
|
//~#define token_limit (32)
|
|
|
|
//~#include <xolatile/xtandard.h>
|
|
//~#include <xolatile/xrena.h>
|
|
|
|
static int parse_comment (char * buffer);
|
|
static int parse_include (char * buffer);
|
|
static int parse_use (char * buffer);
|
|
static int parse_alias (char * buffer);
|
|
static int parse_macro (char * buffer);
|
|
static int parse_scope (char * buffer);
|
|
static int parse_unscope (char * buffer);
|
|
static int parse_default (char * buffer);
|
|
static void parse_buffer (char * buffer, int limit);
|
|
|
|
enum {
|
|
token_comment, token_include, token_use, token_alias, token_macro, token_scope, token_unscope, token_default
|
|
};
|
|
|
|
static char * token_key [token_default] = {
|
|
"---", "include ", "use ", "alias ", "macro ", "scope ", "unscope "
|
|
};
|
|
|
|
static int (* parse_key [token_default + 1]) (char * buffer) = {
|
|
parse_comment, parse_include, parse_use, parse_alias, parse_macro, parse_scope, parse_unscope, parse_default
|
|
};
|
|
|
|
static int token_count = 0;
|
|
|
|
static struct {
|
|
char * data;
|
|
int size;
|
|
int type;
|
|
} * token_array [token_limit],
|
|
* token_value [token_limit];
|
|
|
|
static int parse_default (char * buffer) {
|
|
int select;
|
|
|
|
for (select = 0; select < token_count; ++select) {
|
|
if (string_compare_limit (buffer, token_array [select]->data, token_array [select]->size) == true) {
|
|
if (token_array [select]->type == token_alias) {
|
|
if (character_is_separator (buffer [-1]) == false) continue;
|
|
if (character_is_separator (buffer [token_array [select]->size]) == false) continue;
|
|
|
|
output (token_value [select]->data, token_value [select]->size);
|
|
|
|
return (token_array [select]->size);
|
|
}
|
|
|
|
if (token_array [select]->type == token_macro) {
|
|
if (character_is_separator (buffer [-1]) == false) continue;
|
|
if (character_is_separator (buffer [token_array [select]->size]) == false) continue;
|
|
|
|
parse_buffer (token_value [select]->data, token_value [select]->size);
|
|
|
|
return (token_array [select]->size);
|
|
}
|
|
}
|
|
}
|
|
|
|
output (buffer, 1);
|
|
|
|
return (1);
|
|
}
|
|
|
|
static int parse_comment (char * buffer) {
|
|
int offset;
|
|
|
|
for (offset = 3; buffer [offset] != '\0'; ++offset) {
|
|
if (buffer [offset] == '\n') break;
|
|
}
|
|
|
|
return (offset + 1);
|
|
}
|
|
|
|
static int parse_use (char * buffer) {
|
|
int offset;
|
|
int length;
|
|
|
|
for (offset = 4; (buffer [offset] != '\0') && (character_is_blank (buffer [offset]) == true); ++offset);
|
|
|
|
++token_count;
|
|
|
|
token_array [token_count - 1] = arena_add (sizeof (* * token_array));
|
|
|
|
token_array [token_count - 1]->data = & buffer [offset];
|
|
token_array [token_count - 1]->type = token_use;
|
|
|
|
for (length = 0; (buffer [offset + length] != '\0') && (character_is_blank (buffer [offset + length]) == false); ++length);
|
|
|
|
token_array [token_count - 1]->size = length;
|
|
|
|
return (offset + length + 1);
|
|
}
|
|
|
|
static int parse_include (char * buffer) {
|
|
int length;
|
|
int offset;
|
|
char * data;
|
|
|
|
for (offset = 8; (buffer [offset] != '\0') && (character_is_blank (buffer [offset]) == true); ++offset);
|
|
|
|
fatal_failure (buffer [offset] != '"', "parse_include: Not a string.");
|
|
|
|
++token_count;
|
|
|
|
token_array [token_count - 1] = arena_add (sizeof (* * token_array));
|
|
|
|
token_array [token_count - 1]->data = & buffer [offset + 1];
|
|
token_array [token_count - 1]->type = token_include;
|
|
|
|
for (length = 1; (buffer [offset + length] != '\0') && (buffer [offset + length] != '"'); ++length);
|
|
|
|
token_array [token_count - 1]->size = length - 1;
|
|
|
|
data = arena_add_file (token_array [token_count - 1]->data, token_array [token_count - 1]->size);
|
|
|
|
parse_buffer (data, -1);
|
|
|
|
return (offset + length + 1);
|
|
}
|
|
|
|
static int parse_alias (char * buffer) {
|
|
int offset;
|
|
int length;
|
|
|
|
for (offset = 6; (buffer [offset] != '\0') && (character_is_blank (buffer [offset]) == true); ++offset);
|
|
|
|
++token_count;
|
|
|
|
token_array [token_count - 1] = arena_add (sizeof (* * token_array));
|
|
token_value [token_count - 1] = arena_add (sizeof (* * token_value));
|
|
|
|
token_array [token_count - 1]->data = & buffer [offset];
|
|
token_array [token_count - 1]->type = token_alias;
|
|
|
|
for (length = 0; (buffer [offset + length] != '\0') && (character_is_blank (buffer [offset + length]) == false); ++length);
|
|
|
|
token_array [token_count - 1]->size = length;
|
|
|
|
for (offset += length; (buffer [offset] != '\0') && (character_is_blank (buffer [offset]) == true); ++offset);
|
|
|
|
token_value [token_count - 1]->data = & buffer [offset];
|
|
token_value [token_count - 1]->type = token_alias;
|
|
|
|
for (length = 0; (buffer [offset + length] != '\0') && (character_is_blank (buffer [offset + length]) == false); ++length);
|
|
|
|
token_value [token_count - 1]->size = length;
|
|
|
|
return (offset + length + 1);
|
|
}
|
|
|
|
static int parse_macro (char * buffer) {
|
|
int offset;
|
|
int length;
|
|
|
|
for (offset = 6; (buffer [offset] != '\0') && (character_is_blank (buffer [offset]) == true); ++offset);
|
|
|
|
++token_count;
|
|
|
|
token_array [token_count - 1] = arena_add (sizeof (* * token_array));
|
|
token_value [token_count - 1] = arena_add (sizeof (* * token_value));
|
|
|
|
token_array [token_count - 1]->data = & buffer [offset];
|
|
token_array [token_count - 1]->type = token_macro;
|
|
|
|
for (length = 0; (buffer [offset + length] != '\0') && (character_is_blank (buffer [offset + length]) == false); ++length);
|
|
|
|
token_array [token_count - 1]->size = length;
|
|
|
|
for (offset += length; (buffer [offset] != '\0') && (character_is_blank (buffer [offset]) == true); ++offset);
|
|
|
|
token_value [token_count - 1]->data = & buffer [offset];
|
|
token_value [token_count - 1]->type = token_macro;
|
|
|
|
for (length = 0; buffer [offset + length] != '\0'; ++length) {
|
|
if (string_compare_limit (& buffer [offset + length], "end macro", 9) == true) {
|
|
break;
|
|
}
|
|
}
|
|
|
|
token_value [token_count - 1]->size = length;
|
|
|
|
return (offset + length + 9 + 1);
|
|
}
|
|
|
|
static int parse_scope (char * buffer) {
|
|
int offset;
|
|
int length;
|
|
int select;
|
|
|
|
for (offset = 6; (buffer [offset] != '\0') && (character_is_blank (buffer [offset]) == true); ++offset);
|
|
|
|
for (length = 0; (buffer [offset + length] != '\0') && (character_is_blank (buffer [offset + length]) == false); ++length);
|
|
|
|
for (select = 0; select < token_count; ++select) {
|
|
if ((string_compare_limit (token_array [select]->data, & buffer [offset], length) == true) && (token_array [select]->type == token_use)) {
|
|
for (; buffer [offset + length] != '\0'; ++length) {
|
|
if (string_compare_limit (& buffer [offset + length], "end scope", 9) == true) {
|
|
return (offset + length + 9 + 1);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
return (offset + length + 1);
|
|
}
|
|
|
|
static int parse_unscope (char * buffer) {
|
|
int offset;
|
|
int length;
|
|
int select;
|
|
|
|
for (offset = 8; (buffer [offset] != '\0') && (character_is_blank (buffer [offset]) == true); ++offset);
|
|
|
|
for (length = 0; (buffer [offset + length] != '\0') && (character_is_blank (buffer [offset + length]) == false); ++length);
|
|
|
|
for (select = 0; select < token_count; ++select) {
|
|
if ((string_compare_limit (token_array [select]->data, & buffer [offset], length) == true) && (token_array [select]->type == token_use)) {
|
|
return (offset + length + 1);
|
|
}
|
|
}
|
|
|
|
for (; buffer [offset + length] != '\0'; ++length) {
|
|
if (string_compare_limit (& buffer [offset + length], "end unscope", 11) == true) {
|
|
return (offset + length + 11 + 1);
|
|
}
|
|
}
|
|
|
|
return (offset + length + 1);
|
|
}
|
|
|
|
static void parse_buffer (char * buffer, int limit) {
|
|
int offset;
|
|
int length;
|
|
int select;
|
|
|
|
for (length = offset = 0; (buffer [offset] != '\0') && (offset != limit); offset += length) {
|
|
for (select = 0; select < token_default; ++select) {
|
|
length = string_length (token_key [select]);
|
|
if (string_compare_limit (& buffer [offset], token_key [select], length) == true) {
|
|
break;
|
|
}
|
|
}
|
|
|
|
if (string_compare_limit (& buffer [offset], "end macro", 9) == true) {
|
|
length = 9;
|
|
continue;
|
|
}
|
|
|
|
if (string_compare_limit (& buffer [offset], "end scope", 9) == true) {
|
|
length = 9;
|
|
continue;
|
|
}
|
|
|
|
if (string_compare_limit (& buffer [offset], "end unscope", 11) == true) {
|
|
length = 11;
|
|
continue;
|
|
}
|
|
|
|
length = parse_key [select] (& buffer [offset]);
|
|
}
|
|
}
|
|
|
|
//~int main (int argc, char * * argv) {
|
|
//~int offset;
|
|
//~char * buffer;
|
|
|
|
//~if (argc != 2) return (2);
|
|
|
|
//~buffer = arena_add_file (argv [1], string_length (argv [1]));
|
|
|
|
//~parse_buffer (buffer, -1);
|
|
|
|
//~for (offset = 0; offset < token_count; ++offset) {
|
|
//~print ("/6entity %s:: %i == ", token_key [token_array [offset]->type], token_array [offset]->size);
|
|
//~output (token_array [offset]->data, token_array [offset]->size);
|
|
//~if ((token_array [offset]->type == token_alias) || (token_array [offset]->type == token_macro)) {
|
|
//~print (" -> ");
|
|
//~output (token_value [offset]->data, token_value [offset]->size);
|
|
//~}
|
|
//~print ("/-\n");
|
|
//~}
|
|
|
|
//~return (log_success);
|
|
//~}
|