lexer.h 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505
  1. #pragma once
  2. #include "common.h"
  3. #include "error.h"
  4. #include "str.h"
  5. namespace pkpy{
  6. typedef uint8_t TokenIndex;
  7. constexpr const char* kTokens[] = {
  8. "is not", "not in",
  9. "@eof", "@eol", "@sof",
  10. "@id", "@num", "@str", "@fstr",
  11. "@indent", "@dedent",
  12. /*****************************************/
  13. "+", "+=", "-", "-=", // (INPLACE_OP - 1) can get '=' removed
  14. "*", "*=", "/", "/=", "//", "//=", "%", "%=",
  15. "&", "&=", "|", "|=", "^", "^=",
  16. "<<", "<<=", ">>", ">>=",
  17. /*****************************************/
  18. ".", ",", ":", ";", "#", "(", ")", "[", "]", "{", "}", "::",
  19. "**", "=", ">", "<", "...", "->", "?", "@", "==", "!=", ">=", "<=",
  20. /** KW_BEGIN **/
  21. "class", "import", "as", "def", "lambda", "pass", "del", "from", "with", "yield",
  22. "None", "in", "is", "and", "or", "not", "True", "False", "global", "try", "except", "finally",
  23. "goto", "label", // extended keywords, not available in cpython
  24. "while", "for", "if", "elif", "else", "break", "continue", "return", "assert", "raise"
  25. };
  26. using TokenValue = std::variant<std::monostate, i64, f64, Str>;
  27. const TokenIndex kTokenCount = sizeof(kTokens) / sizeof(kTokens[0]);
  28. constexpr TokenIndex TK(const char token[]) {
  29. for(int k=0; k<kTokenCount; k++){
  30. const char* i = kTokens[k];
  31. const char* j = token;
  32. while(*i && *j && *i == *j) { i++; j++;}
  33. if(*i == *j) return k;
  34. }
  35. FATAL_ERROR();
  36. }
  37. #define TK_STR(t) kTokens[t]
  38. const std::map<std::string_view, TokenIndex> kTokenKwMap = [](){
  39. std::map<std::string_view, TokenIndex> map;
  40. for(int k=TK("class"); k<kTokenCount; k++) map[kTokens[k]] = k;
  41. return map;
  42. }();
  43. struct Token{
  44. TokenIndex type;
  45. const char* start;
  46. int length;
  47. int line;
  48. TokenValue value;
  49. Str str() const { return Str(start, length);}
  50. std::string_view sv() const { return std::string_view(start, length);}
  51. std::string info() const {
  52. std::stringstream ss;
  53. ss << line << ": " << TK_STR(type) << " '" << (
  54. sv()=="\n" ? "\\n" : sv()
  55. ) << "'";
  56. return ss.str();
  57. }
  58. };
  59. // https://docs.python.org/3/reference/expressions.html#operator-precedence
  60. enum Precedence {
  61. PREC_NONE,
  62. PREC_TUPLE, // ,
  63. PREC_LAMBDA, // lambda
  64. PREC_TERNARY, // ?:
  65. PREC_LOGICAL_OR, // or
  66. PREC_LOGICAL_AND, // and
  67. PREC_LOGICAL_NOT, // not
  68. PREC_EQUALITY, // == !=
  69. PREC_TEST, // in / is / is not / not in
  70. PREC_COMPARISION, // < > <= >=
  71. PREC_BITWISE_OR, // |
  72. PREC_BITWISE_XOR, // ^
  73. PREC_BITWISE_AND, // &
  74. PREC_BITWISE_SHIFT, // << >>
  75. PREC_TERM, // + -
  76. PREC_FACTOR, // * / % //
  77. PREC_UNARY, // - not
  78. PREC_EXPONENT, // **
  79. PREC_CALL, // ()
  80. PREC_SUBSCRIPT, // []
  81. PREC_ATTRIB, // .index
  82. PREC_PRIMARY,
  83. };
  84. enum StringType { NORMAL_STRING, RAW_STRING, F_STRING };
  85. struct Lexer {
  86. shared_ptr<SourceData> src;
  87. const char* token_start;
  88. const char* curr_char;
  89. int current_line = 1;
  90. std::vector<Token> nexts;
  91. stack<int> indents;
  92. int brackets_level = 0;
  93. bool used = false;
  94. char peekchar() const{ return *curr_char; }
  95. bool match_n_chars(int n, char c0){
  96. const char* c = curr_char;
  97. for(int i=0; i<n; i++){
  98. if(*c == '\0') return false;
  99. if(*c != c0) return false;
  100. c++;
  101. }
  102. for(int i=0; i<n; i++) eatchar_include_newline();
  103. return true;
  104. }
  105. int eat_spaces(){
  106. int count = 0;
  107. while (true) {
  108. switch (peekchar()) {
  109. case ' ' : count+=1; break;
  110. case '\t': count+=4; break;
  111. default: return count;
  112. }
  113. eatchar();
  114. }
  115. }
  116. bool eat_indentation(){
  117. if(brackets_level > 0) return true;
  118. int spaces = eat_spaces();
  119. if(peekchar() == '#') skip_line_comment();
  120. if(peekchar() == '\0' || peekchar() == '\n') return true;
  121. // https://docs.python.org/3/reference/lexical_analysis.html#indentation
  122. if(spaces > indents.top()){
  123. indents.push(spaces);
  124. nexts.push_back(Token{TK("@indent"), token_start, 0, current_line});
  125. } else if(spaces < indents.top()){
  126. while(spaces < indents.top()){
  127. indents.pop();
  128. nexts.push_back(Token{TK("@dedent"), token_start, 0, current_line});
  129. }
  130. if(spaces != indents.top()){
  131. return false;
  132. }
  133. }
  134. return true;
  135. }
  136. char eatchar() {
  137. char c = peekchar();
  138. if(c == '\n') throw std::runtime_error("eatchar() cannot consume a newline");
  139. curr_char++;
  140. return c;
  141. }
  142. char eatchar_include_newline() {
  143. char c = peekchar();
  144. curr_char++;
  145. if (c == '\n'){
  146. current_line++;
  147. src->line_starts.push_back(curr_char);
  148. }
  149. return c;
  150. }
  151. int eat_name() {
  152. curr_char--;
  153. while(true){
  154. unsigned char c = peekchar();
  155. int u8bytes = utf8len(c, true);
  156. if(u8bytes == 0) return 1;
  157. if(u8bytes == 1){
  158. if(isalpha(c) || c=='_' || isdigit(c)) {
  159. curr_char++;
  160. continue;
  161. }else{
  162. break;
  163. }
  164. }
  165. // handle multibyte char
  166. std::string u8str(curr_char, u8bytes);
  167. if(u8str.size() != u8bytes) return 2;
  168. uint32_t value = 0;
  169. for(int k=0; k < u8bytes; k++){
  170. uint8_t b = u8str[k];
  171. if(k==0){
  172. if(u8bytes == 2) value = (b & 0b00011111) << 6;
  173. else if(u8bytes == 3) value = (b & 0b00001111) << 12;
  174. else if(u8bytes == 4) value = (b & 0b00000111) << 18;
  175. }else{
  176. value |= (b & 0b00111111) << (6*(u8bytes-k-1));
  177. }
  178. }
  179. if(is_unicode_Lo_char(value)) curr_char += u8bytes;
  180. else break;
  181. }
  182. int length = (int)(curr_char - token_start);
  183. if(length == 0) return 3;
  184. std::string_view name(token_start, length);
  185. if(src->mode == JSON_MODE){
  186. if(name == "true"){
  187. add_token(TK("True"));
  188. } else if(name == "false"){
  189. add_token(TK("False"));
  190. } else if(name == "null"){
  191. add_token(TK("None"));
  192. } else {
  193. return 4;
  194. }
  195. return 0;
  196. }
  197. if(kTokenKwMap.count(name)){
  198. if(name == "not"){
  199. if(strncmp(curr_char, " in", 3) == 0){
  200. curr_char += 3;
  201. add_token(TK("not in"));
  202. return 0;
  203. }
  204. }else if(name == "is"){
  205. if(strncmp(curr_char, " not", 4) == 0){
  206. curr_char += 4;
  207. add_token(TK("is not"));
  208. return 0;
  209. }
  210. }
  211. add_token(kTokenKwMap.at(name));
  212. } else {
  213. add_token(TK("@id"));
  214. }
  215. return 0;
  216. }
  217. void skip_line_comment() {
  218. char c;
  219. while ((c = peekchar()) != '\0') {
  220. if (c == '\n') return;
  221. eatchar();
  222. }
  223. }
  224. bool matchchar(char c) {
  225. if (peekchar() != c) return false;
  226. eatchar_include_newline();
  227. return true;
  228. }
  229. void add_token(TokenIndex type, TokenValue value={}) {
  230. switch(type){
  231. case TK("{"): case TK("["): case TK("("): brackets_level++; break;
  232. case TK(")"): case TK("]"): case TK("}"): brackets_level--; break;
  233. }
  234. nexts.push_back( Token{
  235. type,
  236. token_start,
  237. (int)(curr_char - token_start),
  238. current_line - ((type == TK("@eol")) ? 1 : 0),
  239. value
  240. });
  241. }
  242. void add_token_2(char c, TokenIndex one, TokenIndex two) {
  243. if (matchchar(c)) add_token(two);
  244. else add_token(one);
  245. }
  246. Str eat_string_until(char quote, bool raw) {
  247. bool quote3 = match_n_chars(2, quote);
  248. std::vector<char> buff;
  249. while (true) {
  250. char c = eatchar_include_newline();
  251. if (c == quote){
  252. if(quote3 && !match_n_chars(2, quote)){
  253. buff.push_back(c);
  254. continue;
  255. }
  256. break;
  257. }
  258. if (c == '\0'){
  259. if(quote3 && src->mode == REPL_MODE){
  260. throw NeedMoreLines(false);
  261. }
  262. SyntaxError("EOL while scanning string literal");
  263. }
  264. if (c == '\n'){
  265. if(!quote3) SyntaxError("EOL while scanning string literal");
  266. else{
  267. buff.push_back(c);
  268. continue;
  269. }
  270. }
  271. if (!raw && c == '\\') {
  272. switch (eatchar_include_newline()) {
  273. case '"': buff.push_back('"'); break;
  274. case '\'': buff.push_back('\''); break;
  275. case '\\': buff.push_back('\\'); break;
  276. case 'n': buff.push_back('\n'); break;
  277. case 'r': buff.push_back('\r'); break;
  278. case 't': buff.push_back('\t'); break;
  279. default: SyntaxError("invalid escape char");
  280. }
  281. } else {
  282. buff.push_back(c);
  283. }
  284. }
  285. return Str(buff.data(), buff.size());
  286. }
  287. void eat_string(char quote, StringType type) {
  288. Str s = eat_string_until(quote, type == RAW_STRING);
  289. if(type == F_STRING){
  290. add_token(TK("@fstr"), s);
  291. }else{
  292. add_token(TK("@str"), s);
  293. }
  294. }
  295. void eat_number() {
  296. static const std::regex pattern("^(0x)?[0-9a-fA-F]+(\\.[0-9]+)?");
  297. std::smatch m;
  298. const char* i = token_start;
  299. while(*i != '\n' && *i != '\0') i++;
  300. std::string s = std::string(token_start, i);
  301. try{
  302. if (std::regex_search(s, m, pattern)) {
  303. // here is m.length()-1, since the first char was eaten by lex_token()
  304. for(int j=0; j<m.length()-1; j++) eatchar();
  305. int base = 10;
  306. size_t size;
  307. if (m[1].matched) base = 16;
  308. if (m[2].matched) {
  309. if(base == 16) SyntaxError("hex literal should not contain a dot");
  310. add_token(TK("@num"), S_TO_FLOAT(m[0], &size));
  311. } else {
  312. add_token(TK("@num"), S_TO_INT(m[0], &size, base));
  313. }
  314. if (size != m.length()) FATAL_ERROR();
  315. }
  316. }catch(std::exception& _){
  317. SyntaxError("invalid number literal");
  318. }
  319. }
  320. bool lex_one_token() {
  321. while (peekchar() != '\0') {
  322. token_start = curr_char;
  323. char c = eatchar_include_newline();
  324. switch (c) {
  325. case '\'': case '"': eat_string(c, NORMAL_STRING); return true;
  326. case '#': skip_line_comment(); break;
  327. case '{': add_token(TK("{")); return true;
  328. case '}': add_token(TK("}")); return true;
  329. case ',': add_token(TK(",")); return true;
  330. case ':': add_token_2(':', TK(":"), TK("::")); return true;
  331. case ';': add_token(TK(";")); return true;
  332. case '(': add_token(TK("(")); return true;
  333. case ')': add_token(TK(")")); return true;
  334. case '[': add_token(TK("[")); return true;
  335. case ']': add_token(TK("]")); return true;
  336. case '@': add_token(TK("@")); return true;
  337. case '%': add_token_2('=', TK("%"), TK("%=")); return true;
  338. case '&': add_token_2('=', TK("&"), TK("&=")); return true;
  339. case '|': add_token_2('=', TK("|"), TK("|=")); return true;
  340. case '^': add_token_2('=', TK("^"), TK("^=")); return true;
  341. case '?': add_token(TK("?")); return true;
  342. case '.': {
  343. if(matchchar('.')) {
  344. if(matchchar('.')) {
  345. add_token(TK("..."));
  346. } else {
  347. SyntaxError("invalid token '..'");
  348. }
  349. } else {
  350. add_token(TK("."));
  351. }
  352. return true;
  353. }
  354. case '=': add_token_2('=', TK("="), TK("==")); return true;
  355. case '+': add_token_2('=', TK("+"), TK("+=")); return true;
  356. case '>': {
  357. if(matchchar('=')) add_token(TK(">="));
  358. else if(matchchar('>')) add_token_2('=', TK(">>"), TK(">>="));
  359. else add_token(TK(">"));
  360. return true;
  361. }
  362. case '<': {
  363. if(matchchar('=')) add_token(TK("<="));
  364. else if(matchchar('<')) add_token_2('=', TK("<<"), TK("<<="));
  365. else add_token(TK("<"));
  366. return true;
  367. }
  368. case '-': {
  369. if(matchchar('=')) add_token(TK("-="));
  370. else if(matchchar('>')) add_token(TK("->"));
  371. else add_token(TK("-"));
  372. return true;
  373. }
  374. case '!':
  375. if(matchchar('=')) add_token(TK("!="));
  376. else SyntaxError("expected '=' after '!'");
  377. break;
  378. case '*':
  379. if (matchchar('*')) {
  380. add_token(TK("**")); // '**'
  381. } else {
  382. add_token_2('=', TK("*"), TK("*="));
  383. }
  384. return true;
  385. case '/':
  386. if(matchchar('/')) {
  387. add_token_2('=', TK("//"), TK("//="));
  388. } else {
  389. add_token_2('=', TK("/"), TK("/="));
  390. }
  391. return true;
  392. case ' ': case '\t': eat_spaces(); break;
  393. case '\n': {
  394. add_token(TK("@eol"));
  395. if(!eat_indentation()) IndentationError("unindent does not match any outer indentation level");
  396. return true;
  397. }
  398. default: {
  399. if(c == 'f'){
  400. if(matchchar('\'')) {eat_string('\'', F_STRING); return true;}
  401. if(matchchar('"')) {eat_string('"', F_STRING); return true;}
  402. }else if(c == 'r'){
  403. if(matchchar('\'')) {eat_string('\'', RAW_STRING); return true;}
  404. if(matchchar('"')) {eat_string('"', RAW_STRING); return true;}
  405. }
  406. if (c >= '0' && c <= '9') {
  407. eat_number();
  408. return true;
  409. }
  410. switch (eat_name())
  411. {
  412. case 0: break;
  413. case 1: SyntaxError("invalid char: " + std::string(1, c));
  414. case 2: SyntaxError("invalid utf8 sequence: " + std::string(1, c));
  415. case 3: SyntaxError("@id contains invalid char"); break;
  416. case 4: SyntaxError("invalid JSON token"); break;
  417. default: FATAL_ERROR();
  418. }
  419. return true;
  420. }
  421. }
  422. }
  423. token_start = curr_char;
  424. while(indents.size() > 1){
  425. indents.pop();
  426. add_token(TK("@dedent"));
  427. return true;
  428. }
  429. add_token(TK("@eof"));
  430. return false;
  431. }
  432. /***** Error Reporter *****/
  433. void throw_err(Str type, Str msg){
  434. int lineno = current_line;
  435. const char* cursor = curr_char;
  436. if(peekchar() == '\n'){
  437. lineno--;
  438. cursor--;
  439. }
  440. throw_err(type, msg, lineno, cursor);
  441. }
  442. void throw_err(Str type, Str msg, int lineno, const char* cursor){
  443. auto e = Exception("SyntaxError", msg);
  444. e.st_push(src->snapshot(lineno, cursor));
  445. throw e;
  446. }
  447. void SyntaxError(Str msg){ throw_err("SyntaxError", msg); }
  448. void SyntaxError(){ throw_err("SyntaxError", "invalid syntax"); }
  449. void IndentationError(Str msg){ throw_err("IndentationError", msg); }
  450. Lexer(shared_ptr<SourceData> src) {
  451. this->src = src;
  452. this->token_start = src->source.c_str();
  453. this->curr_char = src->source.c_str();
  454. this->nexts.push_back(Token{TK("@sof"), token_start, 0, current_line});
  455. this->indents.push(0);
  456. }
  457. std::vector<Token> run() {
  458. if(used) FATAL_ERROR();
  459. used = true;
  460. while (lex_one_token());
  461. return std::move(nexts);
  462. }
  463. };
  464. } // namespace pkpy