blob: 992e594772ce592cf52260a739931222f92ef11f (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
|
#include <stdio.h>
#include <string.h>
#include <errno.h>
#include "common.h"
#include "lexer.h"
// #include "ast.h"
// #include "eval.h"
#define READ_BUF_CAP 512
#define DEFAULT_TOKENS_CAP 8192 // make it a command line arg
lexer_t lexer = NULL;
// ast_t root = NULL;
// eval_t evaluator = NULL;
int main(void)
{
int ret = 1;
char *filename = "files/test1.lisp";
lexer = lexer_create(DEFAULT_TOKENS_CAP);
if(!lexer) {
err("lexer_create: failed");
goto fail;
}
// tokenize input
FILE *fp = fopen(filename, "r");
if(!fp) {
err("fopen: %s: %s", filename, strerror(errno));
goto fail;
}
char buf[READ_BUF_CAP]; size_t bytes = 0;
while((bytes = fread(buf, sizeof(char), READ_BUF_CAP, fp))) {
if(lexer_tokenize(lexer, buf, bytes)) {
fclose(fp); goto fail;
}
if(bytes < READ_BUF_CAP) break;
}
fclose(fp);
lexer_print_tokens(lexer);
// --------------
// ast = ast_create();
// if(!ast) {
// err("ast_create: failed");
// goto fail;
// }
// if(ast_parse_lexer(ast, lexer)) {
// err("ast_parse_lexer: failed");
// goto fail;
// }
// evaluator = evaluator_create();
// if(!evaluator) {
// err("evaluator_create: failed");
// goto fail;
// }
// if(evaluator_eval_ast(evaluator, ast)) {
// err("evaluator_eval_ast: failed");
// goto fail;
// }
ret = 0;
fail:
// evaluator_destroy(eval);
// ast_destroy(ast);
lexer_destroy(lexer);
return ret;
}
|