aboutsummaryrefslogtreecommitdiff
path: root/src/tests/lexer.c
blob: 3700ae6ec90d195fcdd5804cbb2b51e750b9b8ec (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
#include "tests/tests.h"
#include "lexer.h"

#include <string.h>

#include "slice.h"
#include "token.h"

static void
test_next_token(void)
{
	char *input = "let five = 5;\n"
				  "let ten = 10;\n"
				  "\n"
				  "let add = fn(x, y) {\n"
				  "\tx + y;\n"
				  "};\n"
				  "\n"
				  "let result = add(five, ten);"
				  "!-/*5;\n"
				  "5 < 10 > 5;\n"
				  "\n"
				  "if (5 < 10) {\n"
				  "\treturn true;\n"
				  "} else {\n"
				  "\treturn false;\n"
				  "}\n"
				  "\n"
				  "10 == 10;\n"
				  "10 != 9;\n";

	struct lexer *lexer = lexer_new(input);
	struct token expected[] = {
		{ TOKEN_LET, slice_fullstr("let") },
		{ TOKEN_IDENT, slice_fullstr("five") },
		{ TOKEN_ASSIGN, slice_fullstr("=") },
		{ TOKEN_INT, slice_fullstr("5") },
		{ TOKEN_SEMICOLON, slice_fullstr(";") },
		{ TOKEN_LET, slice_fullstr("let") },
		{ TOKEN_IDENT, slice_fullstr("ten") },
		{ TOKEN_ASSIGN, slice_fullstr("=") },
		{ TOKEN_INT, slice_fullstr("10") },
		{ TOKEN_SEMICOLON, slice_fullstr(";") },
		{ TOKEN_LET, slice_fullstr("let") },
		{ TOKEN_IDENT, slice_fullstr("add") },
		{ TOKEN_ASSIGN, slice_fullstr("=") },
		{ TOKEN_FUNC, slice_fullstr("fn") },
		{ TOKEN_LPAREN, slice_fullstr("(") },
		{ TOKEN_IDENT, slice_fullstr("x") },
		{ TOKEN_COMMA, slice_fullstr(",") },
		{ TOKEN_IDENT, slice_fullstr("y") },
		{ TOKEN_RPAREN, slice_fullstr(")") },
		{ TOKEN_LBRACE, slice_fullstr("{") },
		{ TOKEN_IDENT, slice_fullstr("x") },
		{ TOKEN_PLUS, slice_fullstr("+") },
		{ TOKEN_IDENT, slice_fullstr("y") },
		{ TOKEN_SEMICOLON, slice_fullstr(";") },
		{ TOKEN_RBRACE, slice_fullstr("}") },
		{ TOKEN_SEMICOLON, slice_fullstr(";") },
		{ TOKEN_LET, slice_fullstr("let") },
		{ TOKEN_IDENT, slice_fullstr("result") },
		{ TOKEN_ASSIGN, slice_fullstr("=") },
		{ TOKEN_IDENT, slice_fullstr("add") },
		{ TOKEN_LPAREN, slice_fullstr("(") },
		{ TOKEN_IDENT, slice_fullstr("five") },
		{ TOKEN_COMMA, slice_fullstr(",") },
		{ TOKEN_IDENT, slice_fullstr("ten") },
		{ TOKEN_RPAREN, slice_fullstr(")") },
		{ TOKEN_SEMICOLON, slice_fullstr(";") },
		{ TOKEN_BANG, slice_fullstr("!") },
		{ TOKEN_MINUS, slice_fullstr("-") },
		{ TOKEN_SLASH, slice_fullstr("/") },
		{ TOKEN_ASTERISK, slice_fullstr("*") },
		{ TOKEN_INT, slice_fullstr("5") },
		{ TOKEN_SEMICOLON, slice_fullstr(";") },
		{ TOKEN_INT, slice_fullstr("5") },
		{ TOKEN_LT, slice_fullstr("<") },
		{ TOKEN_INT, slice_fullstr("10") },
		{ TOKEN_GT, slice_fullstr(">") },
		{ TOKEN_INT, slice_fullstr("5") },
		{ TOKEN_SEMICOLON, slice_fullstr(";") },
		{ TOKEN_IF, slice_fullstr("if") },
		{ TOKEN_LPAREN, slice_fullstr("(") },
		{ TOKEN_INT, slice_fullstr("5") },
		{ TOKEN_LT, slice_fullstr("<") },
		{ TOKEN_INT, slice_fullstr("10") },
		{ TOKEN_RPAREN, slice_fullstr(")") },
		{ TOKEN_LBRACE, slice_fullstr("{") },
		{ TOKEN_RETURN, slice_fullstr("return") },
		{ TOKEN_TRUE, slice_fullstr("true") },
		{ TOKEN_SEMICOLON, slice_fullstr(";") },
		{ TOKEN_RBRACE, slice_fullstr("}") },
		{ TOKEN_ELSE, slice_fullstr("else") },
		{ TOKEN_LBRACE, slice_fullstr("{") },
		{ TOKEN_RETURN, slice_fullstr("return") },
		{ TOKEN_FALSE, slice_fullstr("false") },
		{ TOKEN_SEMICOLON, slice_fullstr(";") },
		{ TOKEN_RBRACE, slice_fullstr("}") },
		{ TOKEN_INT, slice_fullstr("10") },
		{ TOKEN_EQ, slice_fullstr("==") },
		{ TOKEN_INT, slice_fullstr("10") },
		{ TOKEN_SEMICOLON, slice_fullstr(";") },
		{ TOKEN_INT, slice_fullstr("10") },
		{ TOKEN_NOTEQ, slice_fullstr("!=") },
		{ TOKEN_INT, slice_fullstr("9") },
		{ TOKEN_SEMICOLON, slice_fullstr(";") },
		{ TOKEN_EOF, slice_fullstr("") },
	};
	size_t i = 0;

	do {
		struct token token = lexer_next_token(lexer);
		asserteq(token.type, expected[i].type);
		asserteq(slice_cmp(&token.literal, &expected[i].literal), 0);
		i++;
	} while (expected[i].type != TOKEN_EOF);

	lexer_destroy(lexer);
}

int
main(void)
{
	INIT_TESTS();
	RUN_TEST(test_next_token);
}