aboutsummaryrefslogtreecommitdiff
path: root/src/main.c
diff options
context:
space:
mode:
authorkartofen <mladenovnasko0@gmail.com>2024-08-25 12:30:48 +0300
committerkartofen <mladenovnasko0@gmail.com>2024-08-25 12:30:48 +0300
commit54f071ac7d47ef515a3f6a4db9e83f2f9aca3c8c (patch)
tree63e9958c5961f32d191c580f6960b6f9eba02f20 /src/main.c
parentdf95e5bfca1c5e723b39f25f32401db8f9ebf6fe (diff)
lambda nearly done
Diffstat (limited to 'src/main.c')
-rw-r--r--src/main.c195
1 files changed, 146 insertions, 49 deletions
diff --git a/src/main.c b/src/main.c
index 195e213..8a07041 100644
--- a/src/main.c
+++ b/src/main.c
@@ -33,28 +33,27 @@
ERR_NZ(lexer_token_next(lexer, &token), r, \
die("Can't get next token")); \
print_token(&token); \
- } while(0);
+ } while(0)
#endif
-#define TOKEN_SKIP(ttype, fail) do { \
- if(token.type == ttype) { \
- TOKEN_NEXT(); \
- break; \
- } \
+#define TOKEN_ASSERT(ttype, fail) \
+ if(token.type != ttype) { \
err("Expected token '%s', not '%s'", \
token_type_string[ttype], \
token_type_string[token.type]); \
fail; \
+ }
+
+#define TOKEN_SKIP(ttype, fail) do { \
+ TOKEN_ASSERT(ttype, fail) \
+ else { \
+ TOKEN_NEXT(); \
+ } \
} while(0)
-#define TOKEN_MATCH(ttype, fail) do { \
- TOKEN_NEXT(); \
- if(token.type == ttype) break; \
- \
- err("Expected token '%s', not '%s'", \
- token_type_string[ttype], \
- token_type_string[token.type]); \
- fail; \
+#define TOKEN_MATCH(ttype, fail) do { \
+ TOKEN_NEXT(); \
+ TOKEN_ASSERT(ttype, fail); \
} while(0)
#define HAS_ENOUGH_ARGS(proc, type, argc, fail) \
@@ -88,14 +87,18 @@ env_t env = ENV_EMPTY;
struct token token;
value_t apply(value_t proc, size_t argc, value_t *argv);
-value_t evaluate(void);
+
+value_t evaluate_expr(void);
value_t evaluate_sexp(void);
value_t evaluate_id(void);
value_t evaluate_lambda(void);
value_t evaluate_define(void);
-value_t evaluate_quote(void);
+
+value_t quote_expr(void);
value_t quote_sexp(void);
+size_t toklist_expr(struct token **toklist);
+
static void destroy_env(char *key, value_t value)
{
(void)key;
@@ -121,6 +124,7 @@ int main(void)
(void *)builtin_proc_name_list[i],
(void *)proc_value, NULL, NULL);
}
+
char *filename = "files/test.l";
FILE *fp = fopen(filename, "r");
@@ -131,10 +135,10 @@ int main(void)
lexer = lexer_create(fp);
while(lexer_token_next(lexer, &token) == 0) {
- value_t val = evaluate();
+ value_t val = evaluate_expr();
- #ifdef FDEBUG
- print_value();
+ #ifdef DEBUG
+ print_value(val);
#else
char buf[256] = {0};
value_string(val, LEN(buf), buf);
@@ -148,7 +152,7 @@ int main(void)
fclose(fp);
env_destroy(env);
- env_destroy(user_env);
+ // env_destroy(user_env);
return 0;
}
@@ -160,6 +164,11 @@ value_t apply(value_t proc, size_t argc, value_t *argv)
switch(proc->type) {
case VALUE_PROC:
HAS_ENOUGH_ARGS(proc, proc, argc, return VALUE_EMPTY);
+ // create new env
+ // evaluate body
+ // destroy new env
+ // switch back to the previous env
+
NOT_IMPLEMENTED();
return VALUE_EMPTY;
case VALUE_PROC_BUILTIN:
@@ -179,12 +188,9 @@ value_t apply(value_t proc, size_t argc, value_t *argv)
#define SPECIAL_FORM(ret, argc, fn, fail) do { \
IS_NOT_FIRST(argc, fail); \
ERR_Z(ret = fn, fail); \
- TOKEN_MATCH(TOKEN_RP, \
- ret = VALUE_EMPTY; \
- fail); \
} while(0)
-value_t evaluate(void)
+value_t evaluate_expr(void)
{
value_t ret = VALUE_EMPTY;
@@ -201,7 +207,7 @@ value_t evaluate(void)
break;
case TOKEN_QUOTE:
TOKEN_NEXT();
- ERR_Z(ret = evaluate_quote(), goto exit);
+ ERR_Z(ret = quote_expr(), goto exit);
break;
default:
err("Did not exptect token '%s'", token_type_string[token.type]);
@@ -236,10 +242,14 @@ value_t evaluate_sexp(void)
goto exit;
case TOKEN_QUOTE_FORM:
TOKEN_NEXT();
- SPECIAL_FORM(ret, argc, evaluate_quote(), goto exit);
+ SPECIAL_FORM(ret, argc, quote_expr(), goto exit);
+ TOKEN_MATCH(TOKEN_RP,
+ value_destroy(ret);
+ ret = VALUE_EMPTY;
+ goto exit);
goto exit;
default:
- ERR_Z(body[argc] = evaluate(), goto exit);
+ ERR_Z(body[argc] = evaluate_expr(), goto exit);
break;
}
@@ -283,7 +293,56 @@ value_t evaluate_id(void)
value_t evaluate_lambda(void)
{
- NOT_IMPLEMENTED();
+ value_t ret = VALUE_EMPTY;
+
+ value_t args[32] = {VALUE_EMPTY};
+ size_t argc = 0;
+ value_t *arg_keys = NULL;
+
+ struct token *body = NULL;
+ size_t body_len = 0;
+
+ TOKEN_SKIP(TOKEN_LAMBDA, goto fail);
+ TOKEN_SKIP(TOKEN_LP, goto fail);
+
+ while(token.type != TOKEN_RP)
+ {
+ if(argc >= LEN(args)) {
+ err("Too many arguments");
+ goto fail;
+ }
+
+ if(token.type != TOKEN_ID) {
+ err("Token '%s' not expected in lambda args", token_type_string[token.type]);
+ goto fail;
+ }
+
+ ERR_Z(args[argc++] = value_from_token(&token), goto fail);
+
+ TOKEN_NEXT();
+ }
+
+ ERR_Z(body_len = toklist_expr(&body), goto fail);
+
+ TOKEN_MATCH(TOKEN_RP, goto fail);
+
+ arg_keys = calloc(argc, sizeof(*arg_keys));
+ memcpy(arg_keys, args, argc * sizeof(*arg_keys));
+
+ struct proc proc = {env_copy(env), arg_keys, argc, body, body_len};
+ ERR_Z(ret = value_create(VALUE_PROC, &proc), goto fail);
+
+ return ret;
+
+fail:
+ err("Procedure creation failed");
+ for(size_t i = 0; i < argc; i++) value_destroy(args[i]);
+ if(body) {
+ for(size_t i = 0; i < body_len; i++) token_dealloc(&body[i]);
+ free(body);
+ }
+ if(arg_keys) free(arg_keys);
+ return VALUE_EMPTY;
}
#define STR_ALLOC_COPY(dest, str) do { \
@@ -295,14 +354,15 @@ value_t evaluate_lambda(void)
value_t evaluate_define(void)
{
// TODO: don't alloc when the key is the same
-
- value_t ret = VALUE_EMPTY;
- char *key = NULL;
-
- TOKEN_SKIP(TOKEN_DEFINE, goto exit);
+ char *key = NULL;
// only in the outside environement
- if(env->parent != user_env) goto exit;
+ if(env->parent != user_env) {
+ err("define can only be called in the outermost environement");
+ goto fail;
+ }
+
+ TOKEN_SKIP(TOKEN_DEFINE, goto fail);
switch(token.type)
{
@@ -311,34 +371,40 @@ value_t evaluate_define(void)
break;
default:
err("Did not exptect token '%s'", token_type_string[token.type]);
- goto exit;
+ goto fail;
}
TOKEN_NEXT();
- ERR_Z(ret = evaluate(), goto exit);
- TOKEN_MATCH(TOKEN_RP, goto exit);
+ value_t val = VALUE_EMPTY;
+ ERR_Z(val = evaluate_expr(), goto fail);
+
+ TOKEN_MATCH(TOKEN_RP,
+ value_destroy(val);
+ goto fail);
value_t prevval = VALUE_EMPTY;
char *prevkey = NULL;
-
+
ERR_NZ(
- hashtable_insert(user_env->table, (void *)key, (void *)ret,
+ hashtable_insert(user_env->table, (void *)key, (void *)val,
(void**)&prevkey, (void **)&prevval),
r, {
err("Couldn't insert symbol into the hashtable due to %s", strerror(r));
- value_destroy(ret);
- free(key);
- return VALUE_EMPTY;
+ value_destroy(val); // the copy
+ goto fail;
});
if(prevkey) free(prevkey);
value_destroy(prevval);
-exit:
+ return VALUE_EMPTY;
+
+fail:
+ if(key) free(key);
return VALUE_EMPTY;
}
-value_t evaluate_quote(void)
+value_t quote_expr(void)
{
value_t ret = VALUE_EMPTY;
@@ -362,7 +428,7 @@ value_t evaluate_quote(void)
break;
case TOKEN_UNQUOTE:
TOKEN_SKIP(TOKEN_UNQUOTE, goto exit);
- ERR_Z(ret = evaluate(), goto exit);
+ ERR_Z(ret = evaluate_expr(), goto exit);
break;
default:
err("Did not exptect token '%s'", token_type_string[token.type]);
@@ -377,7 +443,7 @@ value_t quote_sexp(void)
value_t ret = VALUE_EMPTY;
value_t left = VALUE_EMPTY;
value_t right = VALUE_EMPTY;
- value_t nil = VALUE_EMPTY;
+ value_t nil = VALUE_EMPTY;
// TODO: make global nil and copy it
ERR_Z(nil = value_create(VALUE_NIL, NULL), goto exit);
@@ -390,14 +456,14 @@ value_t quote_sexp(void)
goto exit;
}
- ERR_Z(left = evaluate_quote(), goto exit);
+ ERR_Z(left = quote_expr(), goto exit);
TOKEN_NEXT();
if(token.type == TOKEN_DOT)
{
// Parse cons
TOKEN_NEXT();
- ERR_Z(right = evaluate_quote(), return VALUE_EMPTY);
+ ERR_Z(right = quote_expr(), return VALUE_EMPTY);
TOKEN_MATCH(TOKEN_RP, return VALUE_EMPTY);
struct cons cons = {value_copy(left), value_copy(right)};
@@ -411,7 +477,7 @@ value_t quote_sexp(void)
while(token.type != TOKEN_RP)
{
value_t new = VALUE_EMPTY;
- ERR_Z(new = evaluate_quote(),
+ ERR_Z(new = quote_expr(),
goto exit);
value_t new_cons = VALUE_EMPTY;
@@ -434,3 +500,34 @@ exit:
value_destroy(nil);
return ret;
}
+
+size_t toklist_expr(struct token **toklist)
+{
+ struct token tokens[256];
+ size_t tokens_len = 0;
+
+ size_t depth = 0;
+ do {
+ TOKEN_NEXT();
+
+ if(tokens_len >= LEN(tokens)) {
+ err("Too many tokens in expr");
+ goto fail;
+ }
+
+ if(token.type == TOKEN_LP) depth++;
+ else if(token.type == TOKEN_RP) depth--;
+
+ token_clone(&tokens[tokens_len++], &token);
+
+ } while(depth > 0);
+
+ *toklist = calloc(tokens_len, sizeof(*tokens));
+ memcpy(*toklist, tokens, tokens_len * sizeof(*tokens));
+
+ return tokens_len;
+
+fail:
+ for(size_t i = 0; i < tokens_len; i++) token_dealloc(&tokens[i]);
+ return 0;
+}