#include #include #include #include "common.h" #include "lexer.h" // #include "ast.h" // #include "eval.h" #define READ_BUF_CAP 512 #define DEFAULT_TOKENS_CAP 8192 // make it a command line arg lexer_t lexer = NULL; // ast_t root = NULL; // eval_t evaluator = NULL; int main(void) { int ret = 1; char *filename = "files/test1.lisp"; lexer = lexer_create(DEFAULT_TOKENS_CAP); if(!lexer) { err("lexer_create: failed"); goto fail; } // tokenize input FILE *fp = fopen(filename, "r"); if(!fp) { err("fopen: %s: %s", filename, strerror(errno)); goto fail; } char buf[READ_BUF_CAP]; size_t bytes = 0; while((bytes = fread(buf, sizeof(char), READ_BUF_CAP, fp))) { if(lexer_tokenize(lexer, buf, bytes)) { fclose(fp); goto fail; } if(bytes < READ_BUF_CAP) break; } fclose(fp); lexer_print_tokens(lexer); // -------------- // ast = ast_create(); // if(!ast) { // err("ast_create: failed"); // goto fail; // } // if(ast_parse_lexer(ast, lexer)) { // err("ast_parse_lexer: failed"); // goto fail; // } // evaluator = evaluator_create(); // if(!evaluator) { // err("evaluator_create: failed"); // goto fail; // } // if(evaluator_eval_ast(evaluator, ast)) { // err("evaluator_eval_ast: failed"); // goto fail; // } ret = 0; fail: // evaluator_destroy(eval); // ast_destroy(ast); lexer_destroy(lexer); return ret; }