aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorkartofen <mladenovnasko0@gmail.com>2024-08-25 15:46:56 +0300
committerkartofen <mladenovnasko0@gmail.com>2024-08-25 15:46:56 +0300
commitc740ece288c3fb6f858a911222fd63caf95c4eea (patch)
tree860b3e15260b9f1cf6abc3c1f26d586c4ffdcd85 /src
parent54f071ac7d47ef515a3f6a4db9e83f2f9aca3c8c (diff)
lambda work, closures work, everything works
Diffstat (limited to 'src')
-rw-r--r--src/env.c7
-rw-r--r--src/env.h1
-rw-r--r--src/lexer.c14
-rw-r--r--src/main.c400
-rw-r--r--src/value.c19
5 files changed, 278 insertions, 163 deletions
diff --git a/src/env.c b/src/env.c
index fcf8b49..a530a36 100644
--- a/src/env.c
+++ b/src/env.c
@@ -64,11 +64,10 @@ void env_destroy(env_t env)
env_t env_copy(env_t env)
{
- env->refs++;
+ if(env == ENV_EMPTY) return ENV_EMPTY;
- if(env->parent) {
- env_copy(env->parent);
- }
+ env->refs++;
+ env_copy(env->parent);
return env;
}
diff --git a/src/env.h b/src/env.h
index d6e2ad3..6f3e169 100644
--- a/src/env.h
+++ b/src/env.h
@@ -24,5 +24,4 @@ env_t env_create(env_t parent, env_destroy_func destroy_func);
env_t env_copy(env_t env);
void env_destroy(env_t env);
-
#endif
diff --git a/src/lexer.c b/src/lexer.c
index b8897da..77407a9 100644
--- a/src/lexer.c
+++ b/src/lexer.c
@@ -6,10 +6,12 @@
#include "lexer.h"
#define CH(lexer) (lexer)->str[(lexer)->str_idx]
-#define TOKEN_SEPARATOR_TABLE(X, l) \
- X(('(' == CH(l)), on_separator(l, TOKEN_LP)) \
- X((')' == CH(l)), on_separator(l, TOKEN_RP)) \
- X(isspace(CH(l)), on_separator(l, TOKEN_NONE)) \
+#define TOKEN_SEPARATOR_TABLE(X, l) \
+ X(('(' == CH(l)), on_separator(l, TOKEN_LP)) \
+ X((')' == CH(l)), on_separator(l, TOKEN_RP)) \
+ X(('\''== CH(l)), on_separator(l, TOKEN_QUOTE)) \
+ X((',' == CH(l)), on_separator(l, TOKEN_UNQUOTE)) \
+ X(isspace(CH(l)), on_separator(l, TOKEN_NONE)) \
X(TABLE_ELSE, acc_add_char(l, CH(l)))
#define SET_TYPE(lexer, ttype) (lexer)->token.type = (ttype)
@@ -25,8 +27,6 @@
#define TOKEN_SPECIALS_TABLE(X) \
X(TOKEN_DOT, ".") \
- X(TOKEN_QUOTE, "'") \
- X(TOKEN_UNQUOTE, ",") \
X(TOKEN_LAMBDA, "lambda") \
X(TOKEN_DEFINE, "define") \
X(TOKEN_QUOTE_FORM, "quote")
@@ -34,6 +34,8 @@
#define TOKEN_VALUE_STRING_TABLE(X, tvalue) \
X(TOKEN_LP, "(") \
X(TOKEN_RP, ")") \
+ X(TOKEN_QUOTE, "'") \
+ X(TOKEN_UNQUOTE, ",") \
X(TOKEN_ID, "%s", tvalue.id) \
X(TOKEN_STR, "%s", tvalue.str) \
X(TOKEN_INT, "%d", tvalue.num) \
diff --git a/src/main.c b/src/main.c
index 8a07041..f4d1c29 100644
--- a/src/main.c
+++ b/src/main.c
@@ -24,44 +24,31 @@
// - Check for functions not wrapped in ERR_* macro
// - Add more error messages
-#ifndef DEBUG
-#define TOKEN_NEXT() \
- ERR_NZ(lexer_token_next(lexer, &token), r, \
- die("Can't get next token"))
-#else
-#define TOKEN_NEXT() do { \
- ERR_NZ(lexer_token_next(lexer, &token), r, \
- die("Can't get next token")); \
- print_token(&token); \
- } while(0)
-#endif
+#define TOKEN(tctx) (tctx)->token
-#define TOKEN_ASSERT(ttype, fail) \
- if(token.type != ttype) { \
+#define TOKEN_NEXT(tctx) \
+ ERR_Z(next_token(tctx), die("Can't get next token"))
+
+#define TOKEN_ASSERT(token, ttype, fail) \
+ if(token->type != ttype) { \
err("Expected token '%s', not '%s'", \
token_type_string[ttype], \
- token_type_string[token.type]); \
+ token_type_string[token->type]); \
fail; \
}
-#define TOKEN_SKIP(ttype, fail) do { \
- TOKEN_ASSERT(ttype, fail) \
- else { \
- TOKEN_NEXT(); \
- } \
+#define TOKEN_SKIP(tctx, ttype, fail) do { \
+ TOKEN_ASSERT(TOKEN(tctx), ttype, fail) \
+ else { \
+ TOKEN_NEXT(tctx); \
+ } \
} while(0)
-#define TOKEN_MATCH(ttype, fail) do { \
- TOKEN_NEXT(); \
- TOKEN_ASSERT(ttype, fail); \
+#define TOKEN_MATCH(tctx, ttype, fail) do { \
+ TOKEN_NEXT(tctx); \
+ TOKEN_ASSERT(TOKEN(tctx), ttype, fail); \
} while(0)
-#define HAS_ENOUGH_ARGS(proc, type, argc, fail) \
- if(argc != proc->value.type.argc) { \
- err("Wrong number of arguemnts, expected %zu, but got %zu", \
- proc->value.type.argc, argc); \
- fail; \
- }
#define NOT_IMPLEMENTED() die("Not Implemented. ABORTING")
@@ -80,24 +67,86 @@ static void print_value(value_t value)
value_type_string[value->type] : "VALUE", buf);
}
-static lexer_t lexer = LEXER_EMPTY;
-static env_t user_env = ENV_EMPTY;
+// token context
+struct tctx {
+ enum tctx_type {
+ TCTX_LEXER,
+ TCTX_TOKLIST
+ } type;
+
+ struct token *token;
+
+ union {
+ struct lex_ctx {
+ lexer_t lexer;
+ struct token token;
+ } lex_ctx;
+
+ struct tok_ctx {
+ struct token *list;
+ size_t len;
+ size_t idx;
+ } tok_ctx;
+ } value;
+};
+
+static void tctx_init_lexer(struct tctx *tctx, lexer_t lexer)
+{
+ tctx->type = TCTX_LEXER;
+ tctx->value.lex_ctx.lexer = lexer;
+ tctx->token = &tctx->value.lex_ctx.token;
+}
+
+static void tctx_init_toklist(struct tctx *tctx, struct token *toklist, size_t len)
+{
+ tctx->type = TCTX_TOKLIST;
+ tctx->value.tok_ctx.list = toklist;
+ tctx->value.tok_ctx.len = len;
+ tctx->value.tok_ctx.idx = 0;
+ tctx->token = NULL;
+}
+
+static struct token *next_token(struct tctx *tctx)
+{
+ switch(tctx->type) {
+ case TCTX_LEXER: ;
+ struct lex_ctx *l = &tctx->value.lex_ctx;
+ ERR_NZ(lexer_token_next(l->lexer, &l->token), _r, goto fail);
+ // tctx->token should already point to l->token
+ break;
+ case TCTX_TOKLIST: ;
+ struct tok_ctx *t = &tctx->value.tok_ctx;
+ if(t->idx == t->len) goto fail;
+ tctx->token = &t->list[t->idx++];
+ break;
+ }
+
+ #ifdef DEBUG
+ print_token(tctx->token);
+ #endif
+
+ return tctx->token;
+fail:
+ tctx->token = NULL;
+ return NULL;
+}
-env_t env = ENV_EMPTY;
-struct token token;
value_t apply(value_t proc, size_t argc, value_t *argv);
-value_t evaluate_expr(void);
-value_t evaluate_sexp(void);
-value_t evaluate_id(void);
-value_t evaluate_lambda(void);
-value_t evaluate_define(void);
+value_t evaluate_expr(env_t env, struct tctx *tctx);
+value_t evaluate_sexp(env_t env, struct tctx *tctx);
+
+value_t evaluate_id (env_t env, struct tctx *tctx);
+value_t evaluate_lambda(env_t env, struct tctx *tctx);
+value_t evaluate_define(env_t env, struct tctx *tctx);
-value_t quote_expr(void);
-value_t quote_sexp(void);
+value_t quote_expr(env_t env, struct tctx *tctx);
+value_t quote_sexp(env_t env, struct tctx *tctx);
-size_t toklist_expr(struct token **toklist);
+size_t toklist_expr(struct tctx *tctx, struct token **toklist);
+
+static env_t global_env = ENV_EMPTY;
static void destroy_env(char *key, value_t value)
{
@@ -105,7 +154,7 @@ static void destroy_env(char *key, value_t value)
value_destroy(value);
}
-static void destroy_user_env(char *key, value_t value)
+static void destroy_global_env(char *key, value_t value)
{
free(key);
value_destroy(value);
@@ -113,14 +162,14 @@ static void destroy_user_env(char *key, value_t value)
int main(void)
{
- user_env = env_create(ENV_EMPTY, destroy_user_env);
- env = env_create(user_env, destroy_env);
+ env_t builtin_env = env_create(ENV_EMPTY, destroy_env);
+ global_env = env_create(builtin_env, destroy_global_env);
for(size_t i = 0; i < BUILTIN_PROCEDURES; i++) {
value_t proc_value = value_create(
VALUE_PROC_BUILTIN,
(void *)&builtin_proc_descriptions[i]);
- hashtable_insert(env->table,
+ hashtable_insert(builtin_env->table,
(void *)builtin_proc_name_list[i],
(void *)proc_value, NULL, NULL);
}
@@ -132,12 +181,17 @@ int main(void)
die("fopen: %s", strerror(errno));
}
- lexer = lexer_create(fp);
+ lexer_t lexer = lexer_create(fp);
- while(lexer_token_next(lexer, &token) == 0) {
- value_t val = evaluate_expr();
+ struct tctx tctx = {0};
+ tctx_init_lexer(&tctx, lexer);
+
+ while(next_token(&tctx))
+ {
+ value_t val = evaluate_expr(ENV_EMPTY, &tctx);
#ifdef DEBUG
+ info("Line %zu evaluates to:", lexer->line);
print_value(val);
#else
char buf[256] = {0};
@@ -150,12 +204,42 @@ int main(void)
lexer_destroy(lexer);
fclose(fp);
-
- env_destroy(env);
- // env_destroy(user_env);
+
+
+ env_destroy(global_env);
return 0;
}
+#define HAS_ENOUGH_ARGS(proc, type, argc, fail) \
+ if(argc != proc->value.type.argc) { \
+ err("Wrong number of arguemnts, expected %zu, but got %zu", \
+ proc->value.type.argc, argc); \
+ fail; \
+ }
+
+static value_t apply_lambda(struct proc *proc, value_t *args)
+{
+ value_t ret = VALUE_EMPTY;
+ env_t env = ENV_EMPTY;
+ struct tctx tctx = {0};
+
+ ERR_Z(env = env_create(env_copy(proc->parent_env), destroy_env), goto exit);
+ tctx_init_toklist(&tctx, proc->body, proc->body_len);
+
+ for(size_t i = 0; i < proc->argc; i++) {
+ ERR_NZ(hashtable_insert(env->table,
+ (void*)proc->arg_keys[i]->value.atom,
+ (void*)value_copy(args[i]), NULL, NULL), _r, goto exit);
+ }
+
+ TOKEN_NEXT(&tctx);
+ ret = evaluate_expr(env, &tctx);
+
+exit:
+ env_destroy(env);
+ return ret;
+}
+
value_t apply(value_t proc, size_t argc, value_t *argv)
{
// TODO: make a global nil and copy it
@@ -164,13 +248,7 @@ value_t apply(value_t proc, size_t argc, value_t *argv)
switch(proc->type) {
case VALUE_PROC:
HAS_ENOUGH_ARGS(proc, proc, argc, return VALUE_EMPTY);
- // create new env
- // evaluate body
- // destroy new env
- // switch back to the previous env
-
- NOT_IMPLEMENTED();
- return VALUE_EMPTY;
+ return apply_lambda(&proc->value.proc, argv);
case VALUE_PROC_BUILTIN:
HAS_ENOUGH_ARGS(proc, proc_builtin, argc, return VALUE_EMPTY);
return proc->value.proc_builtin.proc(argv);
@@ -180,37 +258,27 @@ value_t apply(value_t proc, size_t argc, value_t *argv)
}
}
-#define IS_NOT_FIRST(arg, fail) \
- if(arg != 0) { \
- err("special forms can only be the first argument"); \
- fail; \
- }
-#define SPECIAL_FORM(ret, argc, fn, fail) do { \
- IS_NOT_FIRST(argc, fail); \
- ERR_Z(ret = fn, fail); \
- } while(0)
-
-value_t evaluate_expr(void)
+value_t evaluate_expr(env_t env, struct tctx *tctx)
{
value_t ret = VALUE_EMPTY;
- switch(token.type) {
+ switch(TOKEN(tctx)->type) {
case TOKEN_LP:
- ERR_Z(ret = evaluate_sexp(), goto exit);
+ ERR_Z(ret = evaluate_sexp(env, tctx), goto exit);
break;
case TOKEN_ID:
- ERR_Z(ret = evaluate_id(), goto exit);
+ ERR_Z(ret = evaluate_id(env, tctx), goto exit);
break;
case TOKEN_STR:
case TOKEN_INT:
- ERR_Z(ret = value_from_token(&token), goto exit);
+ ERR_Z(ret = value_from_token(TOKEN(tctx)), goto exit);
break;
case TOKEN_QUOTE:
- TOKEN_NEXT();
- ERR_Z(ret = quote_expr(), goto exit);
+ TOKEN_NEXT(tctx);
+ ERR_Z(ret = quote_expr(env, tctx), goto exit);
break;
default:
- err("Did not exptect token '%s'", token_type_string[token.type]);
+ err("Did not exptect token '%s'", token_type_string[TOKEN(tctx)->type]);
break;
}
@@ -218,57 +286,70 @@ exit:
return ret;
}
-value_t evaluate_sexp(void)
+#define IS_NOT_FIRST(arg, fail) \
+ if(arg != 0) { \
+ err("special forms can only be the first argument"); \
+ fail; \
+ }
+#define SPECIAL_FORM(ret, argc, fn, fail) do { \
+ IS_NOT_FIRST(argc, fail); \
+ ERR_Z(ret = fn, fail); \
+ } while(0)
+
+value_t evaluate_sexp(env_t env, struct tctx *tctx)
{
value_t ret = VALUE_EMPTY;
value_t body[256] = {VALUE_EMPTY};
size_t argc = 0;
- TOKEN_SKIP(TOKEN_LP, goto exit);
+ TOKEN_SKIP(tctx, TOKEN_LP, goto exit);
- for(argc = 0; token.type != TOKEN_RP; argc++)
+ for(argc = 0; TOKEN(tctx)->type != TOKEN_RP; argc++)
{
if(argc >= LEN(body)) {
err("Too many arguments");
goto exit;
}
- switch(token.type) {
+ switch(TOKEN(tctx)->type) {
case TOKEN_LAMBDA:
- SPECIAL_FORM(ret, argc, evaluate_lambda(), goto exit);
+ SPECIAL_FORM(ret, argc, evaluate_lambda(env, tctx), goto exit);
goto exit;
case TOKEN_DEFINE:
- SPECIAL_FORM(ret, argc, evaluate_define(), goto exit);
+ SPECIAL_FORM(ret, argc, evaluate_define(env, tctx), goto exit);
goto exit;
case TOKEN_QUOTE_FORM:
- TOKEN_NEXT();
- SPECIAL_FORM(ret, argc, quote_expr(), goto exit);
- TOKEN_MATCH(TOKEN_RP,
+ TOKEN_NEXT(tctx);
+ SPECIAL_FORM(ret, argc, quote_expr(env, tctx), goto exit);
+ TOKEN_MATCH(tctx, TOKEN_RP,
value_destroy(ret);
ret = VALUE_EMPTY;
goto exit);
goto exit;
default:
- ERR_Z(body[argc] = evaluate_expr(), goto exit);
+ ERR_Z(body[argc] = evaluate_expr(env, tctx), goto exit);
break;
}
- TOKEN_NEXT();
+ TOKEN_NEXT(tctx);
}
-
- ret = apply(body[0], argc-1, &body[1]);
- #ifdef DEBUG
+#ifdef DEBUG
info("------------------");
info("Applying procedure");
print_value(body[0]);
info("With Arguemnts");
if(argc > 0)
for(size_t i = 0; i < argc-1; i++) print_value(body[i+1]);
+#endif
+
+ ret = apply(body[0], argc-1, &body[1]);
+
+#ifdef DEBUG
info("Returns");
print_value(ret);
info("-----------------");
- #endif
+#endif
exit:
for(size_t i = 0; i < argc; i++)
@@ -276,22 +357,26 @@ exit:
return ret;
}
-static value_t evaluate_id_env(env_t env)
+value_t evaluate_id(env_t env, struct tctx *tctx)
{
- if(env == ENV_EMPTY) return VALUE_EMPTY;
+ if(env == ENV_EMPTY) {
+ return evaluate_id(global_env, tctx);
+ }
value_t ret = VALUE_EMPTY;
- ERR_NZ(hashtable_query(env->table, (void *)token.value.id, (void **)&ret), _r, return evaluate_id_env(env->parent));
-
+ ERR_NZ(hashtable_query(env->table, (void *)TOKEN(tctx)->value.id, (void **)&ret), _r, goto fail);
+
return value_copy(ret);
-}
-value_t evaluate_id(void)
-{
- return evaluate_id_env(env);
+fail:
+ if(env == global_env->parent) {
+ err("Symbol %s is unbound", TOKEN(tctx)->value.id);
+ return VALUE_EMPTY;
+ }
+ return evaluate_id(env->parent, tctx);
}
-value_t evaluate_lambda(void)
+value_t evaluate_lambda(env_t env, struct tctx *tctx)
{
value_t ret = VALUE_EMPTY;
@@ -302,35 +387,37 @@ value_t evaluate_lambda(void)
struct token *body = NULL;
size_t body_len = 0;
- TOKEN_SKIP(TOKEN_LAMBDA, goto fail);
- TOKEN_SKIP(TOKEN_LP, goto fail);
+ TOKEN_SKIP(tctx, TOKEN_LAMBDA, goto fail);
+ TOKEN_SKIP(tctx, TOKEN_LP, goto fail);
- while(token.type != TOKEN_RP)
+ while(TOKEN(tctx)->type != TOKEN_RP)
{
if(argc >= LEN(args)) {
err("Too many arguments");
goto fail;
}
- if(token.type != TOKEN_ID) {
- err("Token '%s' not expected in lambda args", token_type_string[token.type]);
+ if(TOKEN(tctx)->type != TOKEN_ID) {
+ err("Token '%s' not expected in lambda args", token_type_string[TOKEN(tctx)->type]);
goto fail;
}
- ERR_Z(args[argc++] = value_from_token(&token), goto fail);
-
- TOKEN_NEXT();
+ ERR_Z(args[argc++] = value_from_token(TOKEN(tctx)), goto fail);
+
+ TOKEN_NEXT(tctx);
}
- ERR_Z(body_len = toklist_expr(&body), goto fail);
-
- TOKEN_MATCH(TOKEN_RP, goto fail);
+ ERR_Z(body_len = toklist_expr(tctx, &body), goto fail);
+
+ TOKEN_MATCH(tctx, TOKEN_RP, goto fail);
arg_keys = calloc(argc, sizeof(*arg_keys));
memcpy(arg_keys, args, argc * sizeof(*arg_keys));
struct proc proc = {env_copy(env), arg_keys, argc, body, body_len};
- ERR_Z(ret = value_create(VALUE_PROC, &proc), goto fail);
+ ERR_Z(ret = value_create(VALUE_PROC, &proc),
+ env_destroy(env); // remove the copy
+ goto fail);
return ret;
@@ -351,35 +438,35 @@ fail:
memcpy((dest), (str), len); \
} while(0)
-value_t evaluate_define(void)
+value_t evaluate_define(env_t env, struct tctx *tctx)
{
// TODO: don't alloc when the key is the same
char *key = NULL;
// only in the outside environement
- if(env->parent != user_env) {
+ if(env != ENV_EMPTY) {
err("define can only be called in the outermost environement");
goto fail;
}
- TOKEN_SKIP(TOKEN_DEFINE, goto fail);
+ TOKEN_SKIP(tctx, TOKEN_DEFINE, goto fail);
- switch(token.type)
+ switch(TOKEN(tctx)->type)
{
case TOKEN_ID:
- STR_ALLOC_COPY(key, token.value.id);
+ STR_ALLOC_COPY(key, TOKEN(tctx)->value.id);
break;
default:
- err("Did not exptect token '%s'", token_type_string[token.type]);
+ err("Did not exptect token '%s'", token_type_string[TOKEN(tctx)->type]);
goto fail;
}
- TOKEN_NEXT();
+ TOKEN_NEXT(tctx);
value_t val = VALUE_EMPTY;
- ERR_Z(val = evaluate_expr(), goto fail);
+ ERR_Z(val = evaluate_expr(env, tctx), goto fail);
- TOKEN_MATCH(TOKEN_RP,
+ TOKEN_MATCH(tctx, TOKEN_RP,
value_destroy(val);
goto fail);
@@ -387,7 +474,7 @@ value_t evaluate_define(void)
char *prevkey = NULL;
ERR_NZ(
- hashtable_insert(user_env->table, (void *)key, (void *)val,
+ hashtable_insert(global_env->table, (void *)key, (void *)val,
(void**)&prevkey, (void **)&prevval),
r, {
err("Couldn't insert symbol into the hashtable due to %s", strerror(r));
@@ -404,41 +491,41 @@ fail:
return VALUE_EMPTY;
}
-value_t quote_expr(void)
+value_t quote_expr(env_t env, struct tctx *tctx)
{
value_t ret = VALUE_EMPTY;
- switch(token.type) {
+ switch(TOKEN(tctx)->type) {
case TOKEN_ID:
case TOKEN_STR:
case TOKEN_INT:
- ERR_Z(ret = value_from_token(&token), goto exit);
+ ERR_Z(ret = value_from_token(TOKEN(tctx)), goto exit);
break;
case TOKEN_QUOTE:
case TOKEN_LAMBDA:
case TOKEN_DEFINE:
case TOKEN_QUOTE_FORM: ;
char name[64] = {0};
- ERR_Z(token_value_string(&token, LEN(name), name), goto exit);
+ ERR_Z(token_value_string(TOKEN(tctx), LEN(name), name), goto exit);
struct token temp = {0}; temp.type = TOKEN_ID; temp.value.id = name;
ERR_Z(ret = value_from_token(&temp), goto exit);
break;
case TOKEN_LP:
- ERR_Z(ret = quote_sexp(), goto exit);
+ ERR_Z(ret = quote_sexp(env, tctx), goto exit);
break;
case TOKEN_UNQUOTE:
- TOKEN_SKIP(TOKEN_UNQUOTE, goto exit);
- ERR_Z(ret = evaluate_expr(), goto exit);
+ TOKEN_SKIP(tctx, TOKEN_UNQUOTE, goto exit);
+ ERR_Z(ret = evaluate_expr(env, tctx), goto exit);
break;
default:
- err("Did not exptect token '%s'", token_type_string[token.type]);
+ err("Did not exptect token '%s'", token_type_string[TOKEN(tctx)->type]);
break;
}
exit:
return ret;
}
-value_t quote_sexp(void)
+value_t quote_sexp(env_t env, struct tctx *tctx)
{
value_t ret = VALUE_EMPTY;
value_t left = VALUE_EMPTY;
@@ -448,23 +535,23 @@ value_t quote_sexp(void)
// TODO: make global nil and copy it
ERR_Z(nil = value_create(VALUE_NIL, NULL), goto exit);
- TOKEN_SKIP(TOKEN_LP, goto exit);
+ TOKEN_SKIP(tctx, TOKEN_LP, goto exit);
// Parse NIL
- if(token.type == TOKEN_RP) {
+ if(TOKEN(tctx)->type == TOKEN_RP) {
ret = value_copy(nil);
goto exit;
}
- ERR_Z(left = quote_expr(), goto exit);
+ ERR_Z(left = quote_expr(env, tctx), goto exit);
- TOKEN_NEXT();
- if(token.type == TOKEN_DOT)
+ TOKEN_NEXT(tctx);
+ if(TOKEN(tctx)->type == TOKEN_DOT)
{
// Parse cons
- TOKEN_NEXT();
- ERR_Z(right = quote_expr(), return VALUE_EMPTY);
- TOKEN_MATCH(TOKEN_RP, return VALUE_EMPTY);
+ TOKEN_NEXT(tctx);
+ ERR_Z(right = quote_expr(env, tctx), return VALUE_EMPTY);
+ TOKEN_MATCH(tctx, TOKEN_RP, return VALUE_EMPTY);
struct cons cons = {value_copy(left), value_copy(right)};
ret = value_create(VALUE_CONS, &cons);
@@ -474,10 +561,25 @@ value_t quote_sexp(void)
// Parse list
right = value_copy(nil);
value_t *rightmost = &right; // the final nil
- while(token.type != TOKEN_RP)
+ while(TOKEN(tctx)->type != TOKEN_RP)
{
value_t new = VALUE_EMPTY;
- ERR_Z(new = quote_expr(),
+
+ if(TOKEN(tctx)->type == TOKEN_DOT) {
+ TOKEN_NEXT(tctx);
+
+ ERR_Z(new = quote_expr(env, tctx), goto exit);
+
+ TOKEN_MATCH(tctx, TOKEN_RP,
+ value_destroy(new);
+ goto exit);
+
+ value_destroy(*rightmost);
+ *rightmost = new;
+ break;
+ }
+
+ ERR_Z(new = quote_expr(env, tctx),
goto exit);
value_t new_cons = VALUE_EMPTY;
@@ -489,7 +591,7 @@ value_t quote_sexp(void)
*rightmost = new_cons;
rightmost = &new_cons->value.cons.right;
- TOKEN_NEXT();
+ TOKEN_NEXT(tctx);
}
struct cons cons = {value_copy(left), value_copy(right)};
@@ -501,26 +603,28 @@ exit:
return ret;
}
-size_t toklist_expr(struct token **toklist)
+#define IS_OP(token) (token->type == TOKEN_QUOTE)
+
+size_t toklist_expr(struct tctx *tctx, struct token **toklist)
{
struct token tokens[256];
size_t tokens_len = 0;
size_t depth = 0;
do {
- TOKEN_NEXT();
+ TOKEN_NEXT(tctx);
if(tokens_len >= LEN(tokens)) {
err("Too many tokens in expr");
goto fail;
- }
+ }
- if(token.type == TOKEN_LP) depth++;
- else if(token.type == TOKEN_RP) depth--;
+ if(TOKEN(tctx)->type == TOKEN_LP) depth++;
+ else if(TOKEN(tctx)->type == TOKEN_RP) depth--;
- token_clone(&tokens[tokens_len++], &token);
+ token_clone(&tokens[tokens_len++], TOKEN(tctx));
- } while(depth > 0);
+ } while(depth > 0 || IS_OP(TOKEN(tctx)));
*toklist = calloc(tokens_len, sizeof(*tokens));
memcpy(*toklist, tokens, tokens_len * sizeof(*tokens));
diff --git a/src/value.c b/src/value.c
index 3c06fa3..d7df858 100644
--- a/src/value.c
+++ b/src/value.c
@@ -136,10 +136,21 @@ static int cons_print(char *buf, size_t buf_sz, struct cons *cons)
int offset = 0;
buf[offset++] = '(';
offset += value_string(cons->left, buf_sz-offset, buf+offset);
- buf[offset++] = ' ';
- buf[offset++] = '.';
- buf[offset++] = ' ';
- offset += value_string(cons->right, buf_sz-offset, buf+offset);
+
+ value_t right = cons->right;
+ while(right->type == VALUE_CONS) {
+ buf[offset++] = ' ';
+ offset += value_string(right->value.cons.left,
+ buf_sz-offset, buf+offset);
+ right = right->value.cons.right;
+ }
+
+ if(right->type != VALUE_NIL) {
+ buf[offset++] = ' ';
+ buf[offset++] = '.';
+ buf[offset++] = ' ';
+ offset += value_string(right, buf_sz-offset, buf+offset);
+ }
buf[offset++] = ')';
return offset;