#include #include #include #include "common.h" #include "lexer.h" #include "parser.h" #include "eval.h" // TODO: the lexer, parser, and eval functions should return -1 on fatal, and 1 on non fatal error #define READ_BUF_CAP 512 lexer_t lexer = NULL; parser_t parser = NULL; // eval_t eval = NULL; int main(void) { int ret = 1; toklist_t tokens = {0}; ast_t ast_root = {0}; char *filename = "files/test1.lisp"; FILE *fp = fopen(filename, "r"); if(!fp) { err("fopen: %s: %s", filename, strerror(errno)); goto fail; } toklist_reset(&tokens); lexer = lexer_create(); if(!lexer) { err("lexer_create: failed"); goto fail; } char buf[READ_BUF_CAP]; size_t bytes = 0; while((bytes = fread(buf, sizeof(char), READ_BUF_CAP, fp))) { if(lexer_tokenize(lexer, &tokens, buf, bytes)) { toklist_reset(&tokens); fclose(fp); goto fail; } if(bytes < READ_BUF_CAP) break; } if(!lexer_has_finished(lexer)) { err("tokenization is not complete"); } fclose(fp); ast_reset(&ast_root); parser = parser_create(); if(!parser) { err("parser_create: failed"); goto fail; } if(parser_parse_toklist(parser, &tokens, &ast_root)) { err("parser_parse_lexer: failed"); goto fail; } toklist_print(&tokens); toklist_reset(&tokens); ast_print(&ast_root); ast_reset(&ast_root); // eval = eval_create(); // if(!eval) { // err("eval_create: failed"); // goto fail; // } // if(eval_ast(eval, ast)) { // err("eval_ast: failed"); // goto fail; // } ret = 0; fail: // eval_destroy(eval); parser_destroy(parser); lexer_destroy(lexer); return ret; }