diff --git a/include/pocketpy/compiler/lexer.hpp b/include/pocketpy/compiler/lexer.hpp index 92f02c2b..f3141c94 100644 --- a/include/pocketpy/compiler/lexer.hpp +++ b/include/pocketpy/compiler/lexer.hpp @@ -25,9 +25,11 @@ constexpr const char* kTokens[] = { "**", "=", ">", "<", "..", "...", "->", "@", "==", "!=", ">=", "<=", "++", "--", "~", /** KW_BEGIN **/ - "class", "import", "as", "def", "lambda", "pass", "del", "from", "with", "yield", - "None", "in", "is", "and", "or", "not", "True", "False", "global", "try", "except", "finally", - "while", "for", "if", "elif", "else", "break", "continue", "return", "assert", "raise" + // NOTE: These keywords should be sorted in ascending order!! + "False", "None", "True", "and", "as", "assert", "break", "class", "continue", + "def", "del", "elif", "else", "except", "finally", "for", "from", "global", + "if", "import", "in", "is", "lambda", "not", "or", "pass", "raise", "return", + "try", "while", "with", "yield", }; // clang-format on @@ -50,12 +52,6 @@ constexpr TokenIndex TK(const char token[]) { constexpr inline bool is_raw_string_used(TokenIndex t) { return t == TK("@id") || t == TK("@long"); } #define TK_STR(t) kTokens[t] -const small_map kTokenKwMap = []() { - small_map map; - for(int k = TK("class"); k < kTokenCount; k++) - map.insert(kTokens[k], k); - return map; -}(); struct Token { TokenIndex type; diff --git a/src/compiler/lexer.cpp b/src/compiler/lexer.cpp index e14d1eed..66c18bbe 100644 --- a/src/compiler/lexer.cpp +++ b/src/compiler/lexer.cpp @@ -153,9 +153,13 @@ int Lexer::eat_name() { return 0; } - auto it = kTokenKwMap.try_get(name); - if(it != nullptr) { - add_token(*it); + const auto KW_BEGIN = kTokens + TK("False"); + const auto KW_END = kTokens + kTokenCount; + + auto it = std::lower_bound(KW_BEGIN, KW_END, name); + if(it != KW_END) { + assert(*it == name); + add_token(it - kTokens); } else { add_token(TK("@id")); }