diff options
Diffstat (limited to 'src/tests/lexer.c')
-rw-r--r-- | src/tests/lexer.c | 126 |
1 files changed, 126 insertions, 0 deletions
diff --git a/src/tests/lexer.c b/src/tests/lexer.c new file mode 100644 index 0000000..3700ae6 --- /dev/null +++ b/src/tests/lexer.c @@ -0,0 +1,126 @@ +#include "tests/tests.h" +#include "lexer.h" + +#include <string.h> + +#include "slice.h" +#include "token.h" + +static void +test_next_token(void) +{ + char *input = "let five = 5;\n" + "let ten = 10;\n" + "\n" + "let add = fn(x, y) {\n" + "\tx + y;\n" + "};\n" + "\n" + "let result = add(five, ten);" + "!-/*5;\n" + "5 < 10 > 5;\n" + "\n" + "if (5 < 10) {\n" + "\treturn true;\n" + "} else {\n" + "\treturn false;\n" + "}\n" + "\n" + "10 == 10;\n" + "10 != 9;\n"; + + struct lexer *lexer = lexer_new(input); + struct token expected[] = { + { TOKEN_LET, slice_fullstr("let") }, + { TOKEN_IDENT, slice_fullstr("five") }, + { TOKEN_ASSIGN, slice_fullstr("=") }, + { TOKEN_INT, slice_fullstr("5") }, + { TOKEN_SEMICOLON, slice_fullstr(";") }, + { TOKEN_LET, slice_fullstr("let") }, + { TOKEN_IDENT, slice_fullstr("ten") }, + { TOKEN_ASSIGN, slice_fullstr("=") }, + { TOKEN_INT, slice_fullstr("10") }, + { TOKEN_SEMICOLON, slice_fullstr(";") }, + { TOKEN_LET, slice_fullstr("let") }, + { TOKEN_IDENT, slice_fullstr("add") }, + { TOKEN_ASSIGN, slice_fullstr("=") }, + { TOKEN_FUNC, slice_fullstr("fn") }, + { TOKEN_LPAREN, slice_fullstr("(") }, + { TOKEN_IDENT, slice_fullstr("x") }, + { TOKEN_COMMA, slice_fullstr(",") }, + { TOKEN_IDENT, slice_fullstr("y") }, + { TOKEN_RPAREN, slice_fullstr(")") }, + { TOKEN_LBRACE, slice_fullstr("{") }, + { TOKEN_IDENT, slice_fullstr("x") }, + { TOKEN_PLUS, slice_fullstr("+") }, + { TOKEN_IDENT, slice_fullstr("y") }, + { TOKEN_SEMICOLON, slice_fullstr(";") }, + { TOKEN_RBRACE, slice_fullstr("}") }, + { TOKEN_SEMICOLON, slice_fullstr(";") }, + { TOKEN_LET, slice_fullstr("let") }, + { TOKEN_IDENT, slice_fullstr("result") }, + { TOKEN_ASSIGN, slice_fullstr("=") }, + { TOKEN_IDENT, slice_fullstr("add") }, + { TOKEN_LPAREN, slice_fullstr("(") }, + { TOKEN_IDENT, slice_fullstr("five") }, + { TOKEN_COMMA, slice_fullstr(",") }, + { TOKEN_IDENT, slice_fullstr("ten") }, + { TOKEN_RPAREN, slice_fullstr(")") }, + { TOKEN_SEMICOLON, slice_fullstr(";") }, + { TOKEN_BANG, slice_fullstr("!") }, + { TOKEN_MINUS, slice_fullstr("-") }, + { TOKEN_SLASH, slice_fullstr("/") }, + { TOKEN_ASTERISK, slice_fullstr("*") }, + { TOKEN_INT, slice_fullstr("5") }, + { TOKEN_SEMICOLON, slice_fullstr(";") }, + { TOKEN_INT, slice_fullstr("5") }, + { TOKEN_LT, slice_fullstr("<") }, + { TOKEN_INT, slice_fullstr("10") }, + { TOKEN_GT, slice_fullstr(">") }, + { TOKEN_INT, slice_fullstr("5") }, + { TOKEN_SEMICOLON, slice_fullstr(";") }, + { TOKEN_IF, slice_fullstr("if") }, + { TOKEN_LPAREN, slice_fullstr("(") }, + { TOKEN_INT, slice_fullstr("5") }, + { TOKEN_LT, slice_fullstr("<") }, + { TOKEN_INT, slice_fullstr("10") }, + { TOKEN_RPAREN, slice_fullstr(")") }, + { TOKEN_LBRACE, slice_fullstr("{") }, + { TOKEN_RETURN, slice_fullstr("return") }, + { TOKEN_TRUE, slice_fullstr("true") }, + { TOKEN_SEMICOLON, slice_fullstr(";") }, + { TOKEN_RBRACE, slice_fullstr("}") }, + { TOKEN_ELSE, slice_fullstr("else") }, + { TOKEN_LBRACE, slice_fullstr("{") }, + { TOKEN_RETURN, slice_fullstr("return") }, + { TOKEN_FALSE, slice_fullstr("false") }, + { TOKEN_SEMICOLON, slice_fullstr(";") }, + { TOKEN_RBRACE, slice_fullstr("}") }, + { TOKEN_INT, slice_fullstr("10") }, + { TOKEN_EQ, slice_fullstr("==") }, + { TOKEN_INT, slice_fullstr("10") }, + { TOKEN_SEMICOLON, slice_fullstr(";") }, + { TOKEN_INT, slice_fullstr("10") }, + { TOKEN_NOTEQ, slice_fullstr("!=") }, + { TOKEN_INT, slice_fullstr("9") }, + { TOKEN_SEMICOLON, slice_fullstr(";") }, + { TOKEN_EOF, slice_fullstr("") }, + }; + size_t i = 0; + + do { + struct token token = lexer_next_token(lexer); + asserteq(token.type, expected[i].type); + asserteq(slice_cmp(&token.literal, &expected[i].literal), 0); + i++; + } while (expected[i].type != TOKEN_EOF); + + lexer_destroy(lexer); +} + +int +main(void) +{ + INIT_TESTS(); + RUN_TEST(test_next_token); +} |