1 #ifndef TOKENIZER_SUITE_H_
2 #define TOKENIZER_SUITE_H_
9 #include "ebisp/tokenizer.h"
11 TEST(tokenizer_number_list_test)
13 struct Token token = next_token("(1 2 3)");
14 ASSERT_STREQN("(", token.begin, (size_t) (token.end - token.begin));
16 token = next_token(token.end);
17 ASSERT_STREQN("1", token.begin, (size_t) (token.end - token.begin));
19 token = next_token(token.end);
20 ASSERT_STREQN("2", token.begin, (size_t) (token.end - token.begin));
22 token = next_token(token.end);
23 ASSERT_STREQN("3", token.begin, (size_t) (token.end - token.begin));
25 token = next_token(token.end);
26 ASSERT_STREQN(")", token.begin, (size_t) (token.end - token.begin));
31 TEST(tokenizer_string_list_test)
33 struct Token token = next_token("(\"foo\" \"bar\" \"baz\")");
34 ASSERT_STREQN("(", token.begin, (size_t) (token.end - token.begin));
36 token = next_token(token.end);
37 ASSERT_STREQN("\"foo\"", token.begin, (size_t) (token.end - token.begin));
39 token = next_token(token.end);
40 ASSERT_STREQN("\"bar\"", token.begin, (size_t) (token.end - token.begin));
42 token = next_token(token.end);
43 ASSERT_STREQN("\"baz\"", token.begin, (size_t) (token.end - token.begin));
45 token = next_token(token.end);
46 ASSERT_STREQN(")", token.begin, (size_t) (token.end - token.begin));
51 TEST_SUITE(tokenizer_suite)
53 TEST_RUN(tokenizer_number_list_test);
54 TEST_RUN(tokenizer_string_list_test);
58 #endif // TOKENIZER_SUITE_H_