compiler.c 96 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890
  1. #include "pocketpy/compiler/compiler.h"
  2. #include "pocketpy/compiler/lexer.h"
  3. #include "pocketpy/objects/codeobject.h"
  4. #include "pocketpy/objects/sourcedata.h"
  5. #include "pocketpy/objects/object.h"
  6. #include "pocketpy/common/sstream.h"
  7. #include "pocketpy/common/memorypool.h"
  8. #include <assert.h>
  9. #include <stdbool.h>
  10. /* expr.h */
  11. typedef struct Expr Expr;
  12. typedef struct Ctx Ctx;
  13. typedef struct ExprVt {
  14. /* emit */
  15. void (*emit_)(Expr*, Ctx*);
  16. bool (*emit_del)(Expr*, Ctx*);
  17. bool (*emit_store)(Expr*, Ctx*);
  18. void (*emit_inplace)(Expr*, Ctx*);
  19. bool (*emit_istore)(Expr*, Ctx*);
  20. /* reflections */
  21. bool is_literal;
  22. bool is_name; // NameExpr
  23. bool is_tuple; // TupleExpr
  24. bool is_attrib; // AttribExpr
  25. bool is_subscr; // SubscrExpr
  26. bool is_starred; // StarredExpr
  27. bool is_binary; // BinaryExpr
  28. void (*dtor)(Expr*);
  29. } ExprVt;
  30. #define static_assert_expr_size(T) static_assert(sizeof(T) <= kPoolExprBlockSize, "")
  31. #define vtcall(f, self, ctx) ((self)->vt->f((self), (ctx)))
  32. #define vtemit_(self, ctx) vtcall(emit_, (self), (ctx))
  33. #define vtemit_del(self, ctx) ((self)->vt->emit_del ? vtcall(emit_del, self, ctx) : false)
  34. #define vtemit_store(self, ctx) ((self)->vt->emit_store ? vtcall(emit_store, self, ctx) : false)
  35. #define vtemit_inplace(self, ctx) \
  36. ((self)->vt->emit_inplace ? vtcall(emit_inplace, self, ctx) : vtemit_(self, ctx))
  37. #define vtemit_istore(self, ctx) \
  38. ((self)->vt->emit_istore ? vtcall(emit_istore, self, ctx) : vtemit_store(self, ctx))
  39. #define vtdelete(self) \
  40. do { \
  41. if(self) { \
  42. if((self)->vt->dtor) (self)->vt->dtor(self); \
  43. PoolExpr_dealloc(self); \
  44. } \
  45. } while(0)
  46. #define EXPR_COMMON_HEADER \
  47. const ExprVt* vt; \
  48. int line;
  49. typedef struct Expr {
  50. EXPR_COMMON_HEADER
  51. } Expr;
  52. /* context.h */
  53. typedef struct Ctx {
  54. CodeObject* co; // 1 CodeEmitContext <=> 1 CodeObject*
  55. FuncDecl* func; // optional, weakref
  56. int level;
  57. int curr_iblock;
  58. bool is_compiling_class;
  59. c11_vector /*T=Expr* */ s_expr;
  60. c11_smallmap_n2i global_names;
  61. c11_smallmap_s2n co_consts_string_dedup_map;
  62. } Ctx;
  63. typedef struct Expr Expr;
  64. static void Ctx__ctor(Ctx* self, CodeObject* co, FuncDecl* func, int level);
  65. static void Ctx__dtor(Ctx* self);
  66. static int Ctx__prepare_loop_divert(Ctx* self, int line, bool is_break);
  67. static int Ctx__enter_block(Ctx* self, CodeBlockType type);
  68. static void Ctx__exit_block(Ctx* self);
  69. static int Ctx__emit_(Ctx* self, Opcode opcode, uint16_t arg, int line);
  70. static int Ctx__emit_virtual(Ctx* self, Opcode opcode, uint16_t arg, int line, bool virtual);
  71. static void Ctx__revert_last_emit_(Ctx* self);
  72. static int Ctx__emit_int(Ctx* self, int64_t value, int line);
  73. static void Ctx__patch_jump(Ctx* self, int index);
  74. static void Ctx__emit_jump(Ctx* self, int target, int line);
  75. static int Ctx__add_varname(Ctx* self, py_Name name);
  76. static int Ctx__add_const(Ctx* self, py_Ref);
  77. static int Ctx__add_const_string(Ctx* self, c11_sv);
  78. static void Ctx__emit_store_name(Ctx* self, NameScope scope, py_Name name, int line);
  79. static void Ctx__s_emit_top(Ctx*); // emit top -> pop -> delete
  80. static void Ctx__s_push(Ctx*, Expr*); // push
  81. static Expr* Ctx__s_top(Ctx*); // top
  82. static int Ctx__s_size(Ctx*); // size
  83. static void Ctx__s_pop(Ctx*); // pop -> delete
  84. static Expr* Ctx__s_popx(Ctx*); // pop move
  85. static void Ctx__s_emit_decorators(Ctx*, int count);
  86. /* expr.c */
  87. typedef struct NameExpr {
  88. EXPR_COMMON_HEADER
  89. py_Name name;
  90. NameScope scope;
  91. } NameExpr;
  92. void NameExpr__emit_(Expr* self_, Ctx* ctx) {
  93. NameExpr* self = (NameExpr*)self_;
  94. int index = c11_smallmap_n2i__get(&ctx->co->varnames_inv, self->name, -1);
  95. if(self->scope == NAME_LOCAL && index >= 0) {
  96. Ctx__emit_(ctx, OP_LOAD_FAST, index, self->line);
  97. } else {
  98. Opcode op = ctx->level <= 1 ? OP_LOAD_GLOBAL : OP_LOAD_NONLOCAL;
  99. if(ctx->is_compiling_class && self->scope == NAME_GLOBAL) {
  100. // if we are compiling a class, we should use OP_LOAD_ATTR_GLOBAL instead of
  101. // OP_LOAD_GLOBAL this supports @property.setter
  102. op = OP_LOAD_CLASS_GLOBAL;
  103. // exec()/eval() won't work with OP_LOAD_ATTR_GLOBAL in class body
  104. } else {
  105. // we cannot determine the scope when calling exec()/eval()
  106. if(self->scope == NAME_GLOBAL_UNKNOWN) op = OP_LOAD_NAME;
  107. }
  108. Ctx__emit_(ctx, op, self->name, self->line);
  109. }
  110. }
  111. bool NameExpr__emit_del(Expr* self_, Ctx* ctx) {
  112. NameExpr* self = (NameExpr*)self_;
  113. switch(self->scope) {
  114. case NAME_LOCAL:
  115. Ctx__emit_(ctx, OP_DELETE_FAST, Ctx__add_varname(ctx, self->name), self->line);
  116. break;
  117. case NAME_GLOBAL: Ctx__emit_(ctx, OP_DELETE_GLOBAL, self->name, self->line); break;
  118. case NAME_GLOBAL_UNKNOWN: Ctx__emit_(ctx, OP_DELETE_NAME, self->name, self->line); break;
  119. default: c11__unreachable();
  120. }
  121. return true;
  122. }
  123. bool NameExpr__emit_store(Expr* self_, Ctx* ctx) {
  124. NameExpr* self = (NameExpr*)self_;
  125. if(ctx->is_compiling_class) {
  126. Ctx__emit_(ctx, OP_STORE_CLASS_ATTR, self->name, self->line);
  127. return true;
  128. }
  129. Ctx__emit_store_name(ctx, self->scope, self->name, self->line);
  130. return true;
  131. }
  132. NameExpr* NameExpr__new(int line, py_Name name, NameScope scope) {
  133. const static ExprVt Vt = {.emit_ = NameExpr__emit_,
  134. .emit_del = NameExpr__emit_del,
  135. .emit_store = NameExpr__emit_store,
  136. .is_name = true};
  137. static_assert_expr_size(NameExpr);
  138. NameExpr* self = PoolExpr_alloc();
  139. self->vt = &Vt;
  140. self->line = line;
  141. self->name = name;
  142. self->scope = scope;
  143. return self;
  144. }
  145. typedef struct StarredExpr {
  146. EXPR_COMMON_HEADER
  147. Expr* child;
  148. int level;
  149. } StarredExpr;
  150. void StarredExpr__emit_(Expr* self_, Ctx* ctx) {
  151. StarredExpr* self = (StarredExpr*)self_;
  152. vtemit_(self->child, ctx);
  153. Ctx__emit_(ctx, OP_UNARY_STAR, self->level, self->line);
  154. }
  155. bool StarredExpr__emit_store(Expr* self_, Ctx* ctx) {
  156. StarredExpr* self = (StarredExpr*)self_;
  157. if(self->level != 1) return false;
  158. // simply proxy to child
  159. return vtemit_store(self->child, ctx);
  160. }
  161. void StarredExpr__dtor(Expr* self_) {
  162. StarredExpr* self = (StarredExpr*)self_;
  163. vtdelete(self->child);
  164. }
  165. StarredExpr* StarredExpr__new(int line, Expr* child, int level) {
  166. const static ExprVt Vt = {.emit_ = StarredExpr__emit_,
  167. .emit_store = StarredExpr__emit_store,
  168. .is_starred = true,
  169. .dtor = StarredExpr__dtor};
  170. static_assert_expr_size(StarredExpr);
  171. StarredExpr* self = PoolExpr_alloc();
  172. self->vt = &Vt;
  173. self->line = line;
  174. self->child = child;
  175. self->level = level;
  176. return self;
  177. }
  178. // InvertExpr, NotExpr, NegatedExpr
  179. // NOTE: NegatedExpr always contains a non-const child. Should not generate -1 or -0.1
  180. typedef struct UnaryExpr {
  181. EXPR_COMMON_HEADER
  182. Expr* child;
  183. Opcode opcode;
  184. } UnaryExpr;
  185. void UnaryExpr__dtor(Expr* self_) {
  186. UnaryExpr* self = (UnaryExpr*)self_;
  187. vtdelete(self->child);
  188. }
  189. static void UnaryExpr__emit_(Expr* self_, Ctx* ctx) {
  190. UnaryExpr* self = (UnaryExpr*)self_;
  191. vtemit_(self->child, ctx);
  192. Ctx__emit_(ctx, self->opcode, BC_NOARG, self->line);
  193. }
  194. UnaryExpr* UnaryExpr__new(int line, Expr* child, Opcode opcode) {
  195. const static ExprVt Vt = {.emit_ = UnaryExpr__emit_, .dtor = UnaryExpr__dtor};
  196. static_assert_expr_size(UnaryExpr);
  197. UnaryExpr* self = PoolExpr_alloc();
  198. self->vt = &Vt;
  199. self->line = line;
  200. self->child = child;
  201. self->opcode = opcode;
  202. return self;
  203. }
  204. typedef struct FStringSpecExpr {
  205. EXPR_COMMON_HEADER
  206. Expr* child;
  207. c11_sv spec;
  208. } FStringSpecExpr;
  209. void FStringSpecExpr__emit_(Expr* self_, Ctx* ctx) {
  210. FStringSpecExpr* self = (FStringSpecExpr*)self_;
  211. vtemit_(self->child, ctx);
  212. int index = Ctx__add_const_string(ctx, self->spec);
  213. Ctx__emit_(ctx, OP_FORMAT_STRING, index, self->line);
  214. }
  215. FStringSpecExpr* FStringSpecExpr__new(int line, Expr* child, c11_sv spec) {
  216. const static ExprVt Vt = {.emit_ = FStringSpecExpr__emit_, .dtor = UnaryExpr__dtor};
  217. static_assert_expr_size(FStringSpecExpr);
  218. FStringSpecExpr* self = PoolExpr_alloc();
  219. self->vt = &Vt;
  220. self->line = line;
  221. self->child = child;
  222. self->spec = spec;
  223. return self;
  224. }
  225. typedef struct RawStringExpr {
  226. EXPR_COMMON_HEADER
  227. c11_sv value;
  228. Opcode opcode;
  229. } RawStringExpr;
  230. void RawStringExpr__emit_(Expr* self_, Ctx* ctx) {
  231. RawStringExpr* self = (RawStringExpr*)self_;
  232. int index = Ctx__add_const_string(ctx, self->value);
  233. Ctx__emit_(ctx, self->opcode, index, self->line);
  234. }
  235. RawStringExpr* RawStringExpr__new(int line, c11_sv value, Opcode opcode) {
  236. const static ExprVt Vt = {.emit_ = RawStringExpr__emit_};
  237. static_assert_expr_size(RawStringExpr);
  238. RawStringExpr* self = PoolExpr_alloc();
  239. self->vt = &Vt;
  240. self->line = line;
  241. self->value = value;
  242. self->opcode = opcode;
  243. return self;
  244. }
  245. typedef struct ImagExpr {
  246. EXPR_COMMON_HEADER
  247. double value;
  248. } ImagExpr;
  249. void ImagExpr__emit_(Expr* self_, Ctx* ctx) {
  250. ImagExpr* self = (ImagExpr*)self_;
  251. py_TValue value;
  252. py_newfloat(&value, self->value);
  253. int index = Ctx__add_const(ctx, &value);
  254. Ctx__emit_(ctx, OP_LOAD_CONST, index, self->line);
  255. Ctx__emit_(ctx, OP_BUILD_IMAG, BC_NOARG, self->line);
  256. }
  257. ImagExpr* ImagExpr__new(int line, double value) {
  258. const static ExprVt Vt = {.emit_ = ImagExpr__emit_};
  259. static_assert_expr_size(ImagExpr);
  260. ImagExpr* self = PoolExpr_alloc();
  261. self->vt = &Vt;
  262. self->line = line;
  263. self->value = value;
  264. return self;
  265. }
  266. typedef struct LiteralExpr {
  267. EXPR_COMMON_HEADER
  268. const TokenValue* value;
  269. bool negated;
  270. } LiteralExpr;
  271. void LiteralExpr__emit_(Expr* self_, Ctx* ctx) {
  272. LiteralExpr* self = (LiteralExpr*)self_;
  273. switch(self->value->index) {
  274. case TokenValue_I64: {
  275. py_i64 val = self->value->_i64;
  276. if(self->negated) val = -val;
  277. Ctx__emit_int(ctx, val, self->line);
  278. break;
  279. }
  280. case TokenValue_F64: {
  281. py_TValue value;
  282. py_f64 val = self->value->_f64;
  283. if(self->negated) val = -val;
  284. py_newfloat(&value, val);
  285. int index = Ctx__add_const(ctx, &value);
  286. Ctx__emit_(ctx, OP_LOAD_CONST, index, self->line);
  287. break;
  288. }
  289. case TokenValue_STR: {
  290. assert(!self->negated);
  291. c11_sv sv = c11_string__sv(self->value->_str);
  292. int index = Ctx__add_const_string(ctx, sv);
  293. Ctx__emit_(ctx, OP_LOAD_CONST, index, self->line);
  294. break;
  295. }
  296. default: c11__unreachable();
  297. }
  298. }
  299. LiteralExpr* LiteralExpr__new(int line, const TokenValue* value) {
  300. const static ExprVt Vt = {.emit_ = LiteralExpr__emit_, .is_literal = true};
  301. static_assert_expr_size(LiteralExpr);
  302. LiteralExpr* self = PoolExpr_alloc();
  303. self->vt = &Vt;
  304. self->line = line;
  305. self->value = value;
  306. self->negated = false;
  307. return self;
  308. }
  309. typedef struct Literal0Expr {
  310. EXPR_COMMON_HEADER
  311. TokenIndex token;
  312. } Literal0Expr;
  313. void Literal0Expr__emit_(Expr* self_, Ctx* ctx) {
  314. Literal0Expr* self = (Literal0Expr*)self_;
  315. Opcode opcode;
  316. switch(self->token) {
  317. case TK_NONE: opcode = OP_LOAD_NONE; break;
  318. case TK_TRUE: opcode = OP_LOAD_TRUE; break;
  319. case TK_FALSE: opcode = OP_LOAD_FALSE; break;
  320. case TK_DOTDOTDOT: opcode = OP_LOAD_ELLIPSIS; break;
  321. default: c11__unreachable();
  322. }
  323. Ctx__emit_(ctx, opcode, BC_NOARG, self->line);
  324. }
  325. Literal0Expr* Literal0Expr__new(int line, TokenIndex token) {
  326. const static ExprVt Vt = {.emit_ = Literal0Expr__emit_};
  327. static_assert_expr_size(Literal0Expr);
  328. Literal0Expr* self = PoolExpr_alloc();
  329. self->vt = &Vt;
  330. self->line = line;
  331. self->token = token;
  332. return self;
  333. }
  334. typedef struct SliceExpr {
  335. EXPR_COMMON_HEADER
  336. Expr* start;
  337. Expr* stop;
  338. Expr* step;
  339. } SliceExpr;
  340. void SliceExpr__dtor(Expr* self_) {
  341. SliceExpr* self = (SliceExpr*)self_;
  342. vtdelete(self->start);
  343. vtdelete(self->stop);
  344. vtdelete(self->step);
  345. }
  346. void SliceExpr__emit_(Expr* self_, Ctx* ctx) {
  347. SliceExpr* self = (SliceExpr*)self_;
  348. if(self->start)
  349. vtemit_(self->start, ctx);
  350. else
  351. Ctx__emit_(ctx, OP_LOAD_NONE, BC_NOARG, self->line);
  352. if(self->stop)
  353. vtemit_(self->stop, ctx);
  354. else
  355. Ctx__emit_(ctx, OP_LOAD_NONE, BC_NOARG, self->line);
  356. if(self->step)
  357. vtemit_(self->step, ctx);
  358. else
  359. Ctx__emit_(ctx, OP_LOAD_NONE, BC_NOARG, self->line);
  360. Ctx__emit_(ctx, OP_BUILD_SLICE, BC_NOARG, self->line);
  361. }
  362. SliceExpr* SliceExpr__new(int line) {
  363. const static ExprVt Vt = {.dtor = SliceExpr__dtor, .emit_ = SliceExpr__emit_};
  364. static_assert_expr_size(SliceExpr);
  365. SliceExpr* self = PoolExpr_alloc();
  366. self->vt = &Vt;
  367. self->line = line;
  368. self->start = NULL;
  369. self->stop = NULL;
  370. self->step = NULL;
  371. return self;
  372. }
  373. typedef struct DictItemExpr {
  374. EXPR_COMMON_HEADER
  375. Expr* key;
  376. Expr* value;
  377. } DictItemExpr;
  378. static void DictItemExpr__dtor(Expr* self_) {
  379. DictItemExpr* self = (DictItemExpr*)self_;
  380. vtdelete(self->key);
  381. vtdelete(self->value);
  382. }
  383. static void DictItemExpr__emit_(Expr* self_, Ctx* ctx) {
  384. DictItemExpr* self = (DictItemExpr*)self_;
  385. vtemit_(self->key, ctx);
  386. vtemit_(self->value, ctx);
  387. }
  388. static DictItemExpr* DictItemExpr__new(int line) {
  389. const static ExprVt Vt = {.dtor = DictItemExpr__dtor, .emit_ = DictItemExpr__emit_};
  390. static_assert_expr_size(DictItemExpr);
  391. DictItemExpr* self = PoolExpr_alloc();
  392. self->vt = &Vt;
  393. self->line = line;
  394. self->key = NULL;
  395. self->value = NULL;
  396. return self;
  397. }
  398. // ListExpr, DictExpr, SetExpr, TupleExpr
  399. typedef struct SequenceExpr {
  400. EXPR_COMMON_HEADER
  401. Expr** items;
  402. int itemCount;
  403. Opcode opcode;
  404. } SequenceExpr;
  405. static void SequenceExpr__emit_(Expr* self_, Ctx* ctx) {
  406. SequenceExpr* self = (SequenceExpr*)self_;
  407. for(int i = 0; i < self->itemCount; i++) {
  408. Expr* item = self->items[i];
  409. vtemit_(item, ctx);
  410. }
  411. Ctx__emit_(ctx, self->opcode, self->itemCount, self->line);
  412. }
  413. void SequenceExpr__dtor(Expr* self_) {
  414. SequenceExpr* self = (SequenceExpr*)self_;
  415. for(int i = 0; i < self->itemCount; i++) {
  416. vtdelete(self->items[i]);
  417. }
  418. free(self->items);
  419. }
  420. bool TupleExpr__emit_store(Expr* self_, Ctx* ctx) {
  421. SequenceExpr* self = (SequenceExpr*)self_;
  422. // TOS is an iterable
  423. // items may contain StarredExpr, we should check it
  424. int starred_i = -1;
  425. for(int i = 0; i < self->itemCount; i++) {
  426. Expr* e = self->items[i];
  427. if(e->vt->is_starred) {
  428. if(((StarredExpr*)e)->level > 0) {
  429. if(starred_i == -1)
  430. starred_i = i;
  431. else
  432. return false; // multiple StarredExpr not allowed
  433. }
  434. }
  435. }
  436. if(starred_i == -1) {
  437. Bytecode* prev = c11__at(Bytecode, &ctx->co->codes, ctx->co->codes.length - 1);
  438. if(prev->op == OP_BUILD_TUPLE && prev->arg == self->itemCount) {
  439. // build tuple and unpack it is meaningless
  440. Ctx__revert_last_emit_(ctx);
  441. } else {
  442. Ctx__emit_(ctx, OP_UNPACK_SEQUENCE, self->itemCount, self->line);
  443. }
  444. } else {
  445. // starred assignment target must be in a tuple
  446. if(self->itemCount == 1) return false;
  447. // starred assignment target must be the last one (differ from cpython)
  448. if(starred_i != self->itemCount - 1) return false;
  449. // a,*b = [1,2,3]
  450. // stack is [1,2,3] -> [1,[2,3]]
  451. Ctx__emit_(ctx, OP_UNPACK_EX, self->itemCount - 1, self->line);
  452. }
  453. // do reverse emit
  454. for(int i = self->itemCount - 1; i >= 0; i--) {
  455. Expr* e = self->items[i];
  456. bool ok = vtemit_store(e, ctx);
  457. if(!ok) return false;
  458. }
  459. return true;
  460. }
  461. bool TupleExpr__emit_del(Expr* self_, Ctx* ctx) {
  462. SequenceExpr* self = (SequenceExpr*)self_;
  463. for(int i = 0; i < self->itemCount; i++) {
  464. Expr* e = self->items[i];
  465. bool ok = vtemit_del(e, ctx);
  466. if(!ok) return false;
  467. }
  468. return true;
  469. }
  470. static SequenceExpr* SequenceExpr__new(int line, const ExprVt* vt, int count, Opcode opcode) {
  471. static_assert_expr_size(SequenceExpr);
  472. SequenceExpr* self = PoolExpr_alloc();
  473. self->vt = vt;
  474. self->line = line;
  475. self->opcode = opcode;
  476. self->items = malloc(sizeof(Expr*) * count);
  477. self->itemCount = count;
  478. return self;
  479. }
  480. SequenceExpr* FStringExpr__new(int line, int count) {
  481. const static ExprVt ListExprVt = {.dtor = SequenceExpr__dtor, .emit_ = SequenceExpr__emit_};
  482. return SequenceExpr__new(line, &ListExprVt, count, OP_BUILD_STRING);
  483. }
  484. SequenceExpr* ListExpr__new(int line, int count) {
  485. const static ExprVt ListExprVt = {.dtor = SequenceExpr__dtor, .emit_ = SequenceExpr__emit_};
  486. return SequenceExpr__new(line, &ListExprVt, count, OP_BUILD_LIST);
  487. }
  488. SequenceExpr* DictExpr__new(int line, int count) {
  489. const static ExprVt DictExprVt = {.dtor = SequenceExpr__dtor, .emit_ = SequenceExpr__emit_};
  490. return SequenceExpr__new(line, &DictExprVt, count, OP_BUILD_DICT);
  491. }
  492. SequenceExpr* SetExpr__new(int line, int count) {
  493. const static ExprVt SetExprVt = {
  494. .dtor = SequenceExpr__dtor,
  495. .emit_ = SequenceExpr__emit_,
  496. };
  497. return SequenceExpr__new(line, &SetExprVt, count, OP_BUILD_SET);
  498. }
  499. SequenceExpr* TupleExpr__new(int line, int count) {
  500. const static ExprVt TupleExprVt = {.dtor = SequenceExpr__dtor,
  501. .emit_ = SequenceExpr__emit_,
  502. .is_tuple = true,
  503. .emit_store = TupleExpr__emit_store,
  504. .emit_del = TupleExpr__emit_del};
  505. return SequenceExpr__new(line, &TupleExprVt, count, OP_BUILD_TUPLE);
  506. }
  507. typedef struct CompExpr {
  508. EXPR_COMMON_HEADER
  509. Expr* expr; // loop expr
  510. Expr* vars; // loop vars
  511. Expr* iter; // loop iter
  512. Expr* cond; // optional if condition
  513. Opcode op0;
  514. Opcode op1;
  515. } CompExpr;
  516. void CompExpr__dtor(Expr* self_) {
  517. CompExpr* self = (CompExpr*)self_;
  518. vtdelete(self->expr);
  519. vtdelete(self->vars);
  520. vtdelete(self->iter);
  521. vtdelete(self->cond);
  522. }
  523. void CompExpr__emit_(Expr* self_, Ctx* ctx) {
  524. CompExpr* self = (CompExpr*)self_;
  525. Ctx__emit_(ctx, self->op0, 0, self->line);
  526. vtemit_(self->iter, ctx);
  527. Ctx__emit_(ctx, OP_GET_ITER, BC_NOARG, BC_KEEPLINE);
  528. int block = Ctx__enter_block(ctx, CodeBlockType_FOR_LOOP);
  529. int block_start = Ctx__emit_(ctx, OP_FOR_ITER, block, BC_KEEPLINE);
  530. bool ok = vtemit_store(self->vars, ctx);
  531. // this error occurs in `vars` instead of this line, but...nevermind
  532. assert(ok); // this should raise a SyntaxError, but we just assert it
  533. if(self->cond) {
  534. vtemit_(self->cond, ctx);
  535. int patch = Ctx__emit_(ctx, OP_POP_JUMP_IF_FALSE, BC_NOARG, BC_KEEPLINE);
  536. vtemit_(self->expr, ctx);
  537. Ctx__emit_(ctx, self->op1, BC_NOARG, BC_KEEPLINE);
  538. Ctx__patch_jump(ctx, patch);
  539. } else {
  540. vtemit_(self->expr, ctx);
  541. Ctx__emit_(ctx, self->op1, BC_NOARG, BC_KEEPLINE);
  542. }
  543. Ctx__emit_jump(ctx, block_start, BC_KEEPLINE);
  544. Ctx__exit_block(ctx);
  545. }
  546. CompExpr* CompExpr__new(int line, Opcode op0, Opcode op1) {
  547. const static ExprVt Vt = {.dtor = CompExpr__dtor, .emit_ = CompExpr__emit_};
  548. static_assert_expr_size(CompExpr);
  549. CompExpr* self = PoolExpr_alloc();
  550. self->vt = &Vt;
  551. self->line = line;
  552. self->op0 = op0;
  553. self->op1 = op1;
  554. self->expr = NULL;
  555. self->vars = NULL;
  556. self->iter = NULL;
  557. self->cond = NULL;
  558. return self;
  559. }
  560. typedef struct LambdaExpr {
  561. EXPR_COMMON_HEADER
  562. int index;
  563. } LambdaExpr;
  564. static void LambdaExpr__emit_(Expr* self_, Ctx* ctx) {
  565. LambdaExpr* self = (LambdaExpr*)self_;
  566. Ctx__emit_(ctx, OP_LOAD_FUNCTION, self->index, self->line);
  567. }
  568. LambdaExpr* LambdaExpr__new(int line, int index) {
  569. const static ExprVt Vt = {.emit_ = LambdaExpr__emit_};
  570. static_assert_expr_size(LambdaExpr);
  571. LambdaExpr* self = PoolExpr_alloc();
  572. self->vt = &Vt;
  573. self->line = line;
  574. self->index = index;
  575. return self;
  576. }
  577. // AndExpr, OrExpr
  578. typedef struct LogicBinaryExpr {
  579. EXPR_COMMON_HEADER
  580. Expr* lhs;
  581. Expr* rhs;
  582. Opcode opcode;
  583. } LogicBinaryExpr;
  584. void LogicBinaryExpr__dtor(Expr* self_) {
  585. LogicBinaryExpr* self = (LogicBinaryExpr*)self_;
  586. vtdelete(self->lhs);
  587. vtdelete(self->rhs);
  588. }
  589. void LogicBinaryExpr__emit_(Expr* self_, Ctx* ctx) {
  590. LogicBinaryExpr* self = (LogicBinaryExpr*)self_;
  591. vtemit_(self->lhs, ctx);
  592. int patch = Ctx__emit_(ctx, self->opcode, BC_NOARG, self->line);
  593. vtemit_(self->rhs, ctx);
  594. Ctx__patch_jump(ctx, patch);
  595. }
  596. LogicBinaryExpr* LogicBinaryExpr__new(int line, Opcode opcode) {
  597. const static ExprVt Vt = {.emit_ = LogicBinaryExpr__emit_, .dtor = LogicBinaryExpr__dtor};
  598. static_assert_expr_size(LogicBinaryExpr);
  599. LogicBinaryExpr* self = PoolExpr_alloc();
  600. self->vt = &Vt;
  601. self->line = line;
  602. self->lhs = NULL;
  603. self->rhs = NULL;
  604. self->opcode = opcode;
  605. return self;
  606. }
  607. typedef struct GroupedExpr {
  608. EXPR_COMMON_HEADER
  609. Expr* child;
  610. } GroupedExpr;
  611. void GroupedExpr__dtor(Expr* self_) {
  612. GroupedExpr* self = (GroupedExpr*)self_;
  613. vtdelete(self->child);
  614. }
  615. void GroupedExpr__emit_(Expr* self_, Ctx* ctx) {
  616. GroupedExpr* self = (GroupedExpr*)self_;
  617. vtemit_(self->child, ctx);
  618. }
  619. bool GroupedExpr__emit_del(Expr* self_, Ctx* ctx) {
  620. GroupedExpr* self = (GroupedExpr*)self_;
  621. return vtemit_del(self->child, ctx);
  622. }
  623. bool GroupedExpr__emit_store(Expr* self_, Ctx* ctx) {
  624. GroupedExpr* self = (GroupedExpr*)self_;
  625. return vtemit_store(self->child, ctx);
  626. }
  627. GroupedExpr* GroupedExpr__new(int line, Expr* child) {
  628. const static ExprVt Vt = {.dtor = GroupedExpr__dtor,
  629. .emit_ = GroupedExpr__emit_,
  630. .emit_del = GroupedExpr__emit_del,
  631. .emit_store = GroupedExpr__emit_store};
  632. static_assert_expr_size(GroupedExpr);
  633. GroupedExpr* self = PoolExpr_alloc();
  634. self->vt = &Vt;
  635. self->line = line;
  636. self->child = child;
  637. return self;
  638. }
  639. typedef struct BinaryExpr {
  640. EXPR_COMMON_HEADER
  641. Expr* lhs;
  642. Expr* rhs;
  643. TokenIndex op;
  644. bool inplace;
  645. } BinaryExpr;
  646. static void BinaryExpr__dtor(Expr* self_) {
  647. BinaryExpr* self = (BinaryExpr*)self_;
  648. vtdelete(self->lhs);
  649. vtdelete(self->rhs);
  650. }
  651. static py_Name cmp_token2name(TokenIndex token) {
  652. switch(token) {
  653. case TK_LT: return __lt__;
  654. case TK_LE: return __le__;
  655. case TK_EQ: return __eq__;
  656. case TK_NE: return __ne__;
  657. case TK_GT: return __gt__;
  658. case TK_GE: return __ge__;
  659. default: return 0;
  660. }
  661. }
  662. #define is_compare_expr(e) ((e)->vt->is_binary && cmp_token2name(((BinaryExpr*)(e))->op))
  663. static void _emit_compare(BinaryExpr* self, Ctx* ctx, c11_vector* jmps) {
  664. if(is_compare_expr(self->lhs)) {
  665. _emit_compare((BinaryExpr*)self->lhs, ctx, jmps);
  666. } else {
  667. vtemit_(self->lhs, ctx); // [a]
  668. }
  669. vtemit_(self->rhs, ctx); // [a, b]
  670. Ctx__emit_(ctx, OP_DUP_TOP, BC_NOARG, self->line); // [a, b, b]
  671. Ctx__emit_(ctx, OP_ROT_THREE, BC_NOARG, self->line); // [b, a, b]
  672. Ctx__emit_(ctx, OP_BINARY_OP, cmp_token2name(self->op), self->line);
  673. // [b, RES]
  674. int index = Ctx__emit_(ctx, OP_SHORTCUT_IF_FALSE_OR_POP, BC_NOARG, self->line);
  675. c11_vector__push(int, jmps, index);
  676. }
  677. static void BinaryExpr__emit_(Expr* self_, Ctx* ctx) {
  678. BinaryExpr* self = (BinaryExpr*)self_;
  679. c11_vector /*T=int*/ jmps;
  680. c11_vector__ctor(&jmps, sizeof(int));
  681. if(cmp_token2name(self->op) && is_compare_expr(self->lhs)) {
  682. // (a < b) < c
  683. BinaryExpr* e = (BinaryExpr*)self->lhs;
  684. _emit_compare(e, ctx, &jmps);
  685. // [b, RES]
  686. } else {
  687. // (1 + 2) < c
  688. if(self->inplace) {
  689. vtemit_inplace(self->lhs, ctx);
  690. } else {
  691. vtemit_(self->lhs, ctx);
  692. }
  693. }
  694. vtemit_(self->rhs, ctx);
  695. Opcode opcode = OP_BINARY_OP;
  696. uint16_t arg = BC_NOARG;
  697. switch(self->op) {
  698. case TK_ADD: arg = __add__ | (__radd__ << 8); break;
  699. case TK_SUB: arg = __sub__ | (__rsub__ << 8); break;
  700. case TK_MUL: arg = __mul__ | (__rmul__ << 8); break;
  701. case TK_DIV: arg = __truediv__ | (__rtruediv__ << 8); break;
  702. case TK_FLOORDIV: arg = __floordiv__ | (__rfloordiv__ << 8); break;
  703. case TK_MOD: arg = __mod__ | (__rmod__ << 8); break;
  704. case TK_POW: arg = __pow__ | (__rpow__ << 8); break;
  705. case TK_LT: arg = __lt__ | (__gt__ << 8); break;
  706. case TK_LE: arg = __le__ | (__ge__ << 8); break;
  707. case TK_EQ: arg = __eq__ | (__eq__ << 8); break;
  708. case TK_NE: arg = __ne__ | (__ne__ << 8); break;
  709. case TK_GT: arg = __gt__ | (__lt__ << 8); break;
  710. case TK_GE: arg = __ge__ | (__le__ << 8); break;
  711. case TK_IN:
  712. opcode = OP_CONTAINS_OP;
  713. arg = 0;
  714. break;
  715. case TK_NOT_IN:
  716. opcode = OP_CONTAINS_OP;
  717. arg = 1;
  718. break;
  719. case TK_IS:
  720. opcode = OP_IS_OP;
  721. arg = 0;
  722. break;
  723. case TK_IS_NOT:
  724. opcode = OP_IS_OP;
  725. arg = 1;
  726. break;
  727. case TK_LSHIFT: arg = __lshift__; break;
  728. case TK_RSHIFT: arg = __rshift__; break;
  729. case TK_AND: arg = __and__; break;
  730. case TK_OR: arg = __or__; break;
  731. case TK_XOR: arg = __xor__; break;
  732. case TK_DECORATOR: arg = __matmul__; break;
  733. default: assert(false);
  734. }
  735. Ctx__emit_(ctx, opcode, arg, self->line);
  736. for(int i = 0; i < jmps.length; i++) {
  737. Ctx__patch_jump(ctx, c11__getitem(int, &jmps, i));
  738. }
  739. c11_vector__dtor(&jmps);
  740. }
  741. BinaryExpr* BinaryExpr__new(int line, TokenIndex op, bool inplace) {
  742. const static ExprVt Vt = {.emit_ = BinaryExpr__emit_,
  743. .dtor = BinaryExpr__dtor,
  744. .is_binary = true};
  745. static_assert_expr_size(BinaryExpr);
  746. BinaryExpr* self = PoolExpr_alloc();
  747. self->vt = &Vt;
  748. self->line = line;
  749. self->lhs = NULL;
  750. self->rhs = NULL;
  751. self->op = op;
  752. self->inplace = inplace;
  753. return self;
  754. }
  755. typedef struct TernaryExpr {
  756. EXPR_COMMON_HEADER
  757. Expr* cond;
  758. Expr* true_expr;
  759. Expr* false_expr;
  760. } TernaryExpr;
  761. void TernaryExpr__dtor(Expr* self_) {
  762. TernaryExpr* self = (TernaryExpr*)self_;
  763. vtdelete(self->cond);
  764. vtdelete(self->true_expr);
  765. vtdelete(self->false_expr);
  766. }
  767. void TernaryExpr__emit_(Expr* self_, Ctx* ctx) {
  768. TernaryExpr* self = (TernaryExpr*)self_;
  769. vtemit_(self->cond, ctx);
  770. int patch = Ctx__emit_(ctx, OP_POP_JUMP_IF_FALSE, BC_NOARG, self->cond->line);
  771. vtemit_(self->true_expr, ctx);
  772. int patch_2 = Ctx__emit_(ctx, OP_JUMP_FORWARD, BC_NOARG, self->true_expr->line);
  773. Ctx__patch_jump(ctx, patch);
  774. vtemit_(self->false_expr, ctx);
  775. Ctx__patch_jump(ctx, patch_2);
  776. }
  777. TernaryExpr* TernaryExpr__new(int line) {
  778. const static ExprVt Vt = {.dtor = TernaryExpr__dtor, .emit_ = TernaryExpr__emit_};
  779. static_assert_expr_size(TernaryExpr);
  780. TernaryExpr* self = PoolExpr_alloc();
  781. self->vt = &Vt;
  782. self->line = line;
  783. self->cond = NULL;
  784. self->true_expr = NULL;
  785. self->false_expr = NULL;
  786. return self;
  787. }
  788. typedef struct SubscrExpr {
  789. EXPR_COMMON_HEADER
  790. Expr* lhs;
  791. Expr* rhs;
  792. } SubscrExpr;
  793. void SubscrExpr__dtor(Expr* self_) {
  794. SubscrExpr* self = (SubscrExpr*)self_;
  795. vtdelete(self->lhs);
  796. vtdelete(self->rhs);
  797. }
  798. void SubscrExpr__emit_(Expr* self_, Ctx* ctx) {
  799. SubscrExpr* self = (SubscrExpr*)self_;
  800. vtemit_(self->lhs, ctx);
  801. vtemit_(self->rhs, ctx);
  802. Ctx__emit_(ctx, OP_LOAD_SUBSCR, BC_NOARG, self->line);
  803. }
  804. bool SubscrExpr__emit_store(Expr* self_, Ctx* ctx) {
  805. SubscrExpr* self = (SubscrExpr*)self_;
  806. vtemit_(self->lhs, ctx);
  807. vtemit_(self->rhs, ctx);
  808. Ctx__emit_(ctx, OP_STORE_SUBSCR, BC_NOARG, self->line);
  809. return true;
  810. }
  811. void SubscrExpr__emit_inplace(Expr* self_, Ctx* ctx) {
  812. SubscrExpr* self = (SubscrExpr*)self_;
  813. vtemit_(self->lhs, ctx);
  814. vtemit_(self->rhs, ctx);
  815. Ctx__emit_(ctx, OP_DUP_TOP_TWO, BC_NOARG, self->line);
  816. Ctx__emit_(ctx, OP_LOAD_SUBSCR, BC_NOARG, self->line);
  817. }
  818. bool SubscrExpr__emit_istore(Expr* self_, Ctx* ctx) {
  819. SubscrExpr* self = (SubscrExpr*)self_;
  820. // [a, b, val] -> [val, a, b]
  821. Ctx__emit_(ctx, OP_ROT_THREE, BC_NOARG, self->line);
  822. Ctx__emit_(ctx, OP_STORE_SUBSCR, BC_NOARG, self->line);
  823. return true;
  824. }
  825. bool SubscrExpr__emit_del(Expr* self_, Ctx* ctx) {
  826. SubscrExpr* self = (SubscrExpr*)self_;
  827. vtemit_(self->lhs, ctx);
  828. vtemit_(self->rhs, ctx);
  829. Ctx__emit_(ctx, OP_DELETE_SUBSCR, BC_NOARG, self->line);
  830. return true;
  831. }
  832. SubscrExpr* SubscrExpr__new(int line) {
  833. const static ExprVt Vt = {
  834. .dtor = SubscrExpr__dtor,
  835. .emit_ = SubscrExpr__emit_,
  836. .emit_store = SubscrExpr__emit_store,
  837. .emit_inplace = SubscrExpr__emit_inplace,
  838. .emit_istore = SubscrExpr__emit_istore,
  839. .emit_del = SubscrExpr__emit_del,
  840. .is_subscr = true,
  841. };
  842. static_assert_expr_size(SubscrExpr);
  843. SubscrExpr* self = PoolExpr_alloc();
  844. self->vt = &Vt;
  845. self->line = line;
  846. self->lhs = NULL;
  847. self->rhs = NULL;
  848. return self;
  849. }
  850. typedef struct AttribExpr {
  851. EXPR_COMMON_HEADER
  852. Expr* child;
  853. py_Name name;
  854. } AttribExpr;
  855. void AttribExpr__dtor(Expr* self_) {
  856. AttribExpr* self = (AttribExpr*)self_;
  857. vtdelete(self->child);
  858. }
  859. void AttribExpr__emit_(Expr* self_, Ctx* ctx) {
  860. AttribExpr* self = (AttribExpr*)self_;
  861. vtemit_(self->child, ctx);
  862. Ctx__emit_(ctx, OP_LOAD_ATTR, self->name, self->line);
  863. }
  864. bool AttribExpr__emit_del(Expr* self_, Ctx* ctx) {
  865. AttribExpr* self = (AttribExpr*)self_;
  866. vtemit_(self->child, ctx);
  867. Ctx__emit_(ctx, OP_DELETE_ATTR, self->name, self->line);
  868. return true;
  869. }
  870. bool AttribExpr__emit_store(Expr* self_, Ctx* ctx) {
  871. AttribExpr* self = (AttribExpr*)self_;
  872. vtemit_(self->child, ctx);
  873. Ctx__emit_(ctx, OP_STORE_ATTR, self->name, self->line);
  874. return true;
  875. }
  876. void AttribExpr__emit_inplace(Expr* self_, Ctx* ctx) {
  877. AttribExpr* self = (AttribExpr*)self_;
  878. vtemit_(self->child, ctx);
  879. Ctx__emit_(ctx, OP_DUP_TOP, BC_NOARG, self->line);
  880. Ctx__emit_(ctx, OP_LOAD_ATTR, self->name, self->line);
  881. }
  882. bool AttribExpr__emit_istore(Expr* self_, Ctx* ctx) {
  883. // [a, val] -> [val, a]
  884. AttribExpr* self = (AttribExpr*)self_;
  885. Ctx__emit_(ctx, OP_ROT_TWO, BC_NOARG, self->line);
  886. Ctx__emit_(ctx, OP_STORE_ATTR, self->name, self->line);
  887. return true;
  888. }
  889. AttribExpr* AttribExpr__new(int line, Expr* child, py_Name name) {
  890. const static ExprVt Vt = {.emit_ = AttribExpr__emit_,
  891. .emit_del = AttribExpr__emit_del,
  892. .emit_store = AttribExpr__emit_store,
  893. .emit_inplace = AttribExpr__emit_inplace,
  894. .emit_istore = AttribExpr__emit_istore,
  895. .dtor = AttribExpr__dtor,
  896. .is_attrib = true};
  897. static_assert_expr_size(AttribExpr);
  898. AttribExpr* self = PoolExpr_alloc();
  899. self->vt = &Vt;
  900. self->line = line;
  901. self->child = child;
  902. self->name = name;
  903. return self;
  904. }
  905. typedef struct CallExprKwArg {
  906. py_Name key;
  907. Expr* val;
  908. } CallExprKwArg;
  909. typedef struct CallExpr {
  910. EXPR_COMMON_HEADER
  911. Expr* callable;
  912. c11_vector /*T=Expr* */ args;
  913. // **a will be interpreted as a special keyword argument: {{0}: a}
  914. c11_vector /*T=CallExprKwArg */ kwargs;
  915. } CallExpr;
  916. void CallExpr__dtor(Expr* self_) {
  917. CallExpr* self = (CallExpr*)self_;
  918. vtdelete(self->callable);
  919. c11__foreach(Expr*, &self->args, e) vtdelete(*e);
  920. c11__foreach(CallExprKwArg, &self->kwargs, e) vtdelete(e->val);
  921. c11_vector__dtor(&self->args);
  922. c11_vector__dtor(&self->kwargs);
  923. }
  924. void CallExpr__emit_(Expr* self_, Ctx* ctx) {
  925. CallExpr* self = (CallExpr*)self_;
  926. bool vargs = false; // whether there is *args as input
  927. bool vkwargs = false; // whether there is **kwargs as input
  928. c11__foreach(Expr*, &self->args, e) {
  929. if((*e)->vt->is_starred) vargs = true;
  930. }
  931. c11__foreach(CallExprKwArg, &self->kwargs, e) {
  932. if(e->val->vt->is_starred) vkwargs = true;
  933. }
  934. // if callable is a AttrExpr, we should try to use `fast_call` instead of use `boundmethod`
  935. if(self->callable->vt->is_attrib) {
  936. AttribExpr* p = (AttribExpr*)self->callable;
  937. vtemit_(p->child, ctx);
  938. Ctx__emit_(ctx, OP_LOAD_METHOD, p->name, p->line);
  939. } else {
  940. vtemit_(self->callable, ctx);
  941. Ctx__emit_(ctx, OP_LOAD_NULL, BC_NOARG, BC_KEEPLINE);
  942. }
  943. Opcode opcode = OP_CALL;
  944. if(vargs || vkwargs) {
  945. // in this case, there is at least one *args or **kwargs as StarredExpr
  946. // OP_CALL_VARGS needs to unpack them via __vectorcall_buffer
  947. opcode = OP_CALL_VARGS;
  948. }
  949. c11__foreach(Expr*, &self->args, e) { vtemit_(*e, ctx); }
  950. c11__foreach(CallExprKwArg, &self->kwargs, e) {
  951. Ctx__emit_int(ctx, e->key, self->line);
  952. vtemit_(e->val, ctx);
  953. }
  954. int KWARGC = self->kwargs.length;
  955. int ARGC = self->args.length;
  956. assert(KWARGC < 256 && ARGC < 256);
  957. Ctx__emit_(ctx, opcode, (KWARGC << 8) | ARGC, self->line);
  958. }
  959. CallExpr* CallExpr__new(int line, Expr* callable) {
  960. const static ExprVt Vt = {.dtor = CallExpr__dtor, .emit_ = CallExpr__emit_};
  961. static_assert_expr_size(CallExpr);
  962. CallExpr* self = PoolExpr_alloc();
  963. self->vt = &Vt;
  964. self->line = line;
  965. self->callable = callable;
  966. c11_vector__ctor(&self->args, sizeof(Expr*));
  967. c11_vector__ctor(&self->kwargs, sizeof(CallExprKwArg));
  968. return self;
  969. }
  970. /* context.c */
  971. static void Ctx__ctor(Ctx* self, CodeObject* co, FuncDecl* func, int level) {
  972. self->co = co;
  973. self->func = func;
  974. self->level = level;
  975. self->curr_iblock = 0;
  976. self->is_compiling_class = false;
  977. c11_vector__ctor(&self->s_expr, sizeof(Expr*));
  978. c11_smallmap_n2i__ctor(&self->global_names);
  979. c11_smallmap_s2n__ctor(&self->co_consts_string_dedup_map);
  980. }
  981. static void Ctx__dtor(Ctx* self) {
  982. // clean the expr stack
  983. for(int i = 0; i < self->s_expr.length; i++) {
  984. vtdelete(c11__getitem(Expr*, &self->s_expr, i));
  985. }
  986. c11_vector__dtor(&self->s_expr);
  987. c11_smallmap_n2i__dtor(&self->global_names);
  988. c11_smallmap_s2n__dtor(&self->co_consts_string_dedup_map);
  989. }
  990. static int Ctx__prepare_loop_divert(Ctx* self, int line, bool is_break) {
  991. int index = self->curr_iblock;
  992. while(index >= 0) {
  993. CodeBlock* block = c11__at(CodeBlock, &self->co->blocks, index);
  994. switch(block->type) {
  995. case CodeBlockType_WHILE_LOOP: return index;
  996. case CodeBlockType_FOR_LOOP: {
  997. if(is_break) Ctx__emit_(self, OP_POP_TOP, BC_NOARG, line);
  998. return index;
  999. }
  1000. case CodeBlockType_WITH: {
  1001. Ctx__emit_(self, OP_POP_TOP, BC_NOARG, line);
  1002. break;
  1003. }
  1004. case CodeBlockType_EXCEPT: {
  1005. Ctx__emit_(self, OP_END_EXC_HANDLING, 1, line);
  1006. break;
  1007. }
  1008. case CodeBlockType_FINALLY: {
  1009. Ctx__emit_(self, OP_END_FINALLY, 1, line);
  1010. break;
  1011. }
  1012. default: break;
  1013. }
  1014. index = block->parent;
  1015. }
  1016. return index;
  1017. }
  1018. static int Ctx__enter_block(Ctx* self, CodeBlockType type) {
  1019. CodeBlock block = {type, self->curr_iblock, self->co->codes.length, -1, -1};
  1020. c11_vector__push(CodeBlock, &self->co->blocks, block);
  1021. self->curr_iblock = self->co->blocks.length - 1;
  1022. return self->curr_iblock;
  1023. }
  1024. static void Ctx__exit_block(Ctx* self) {
  1025. CodeBlock* block = c11__at(CodeBlock, &self->co->blocks, self->curr_iblock);
  1026. block->end = self->co->codes.length;
  1027. self->curr_iblock = block->parent;
  1028. assert(self->curr_iblock >= 0);
  1029. }
  1030. static void Ctx__s_emit_decorators(Ctx* self, int count) {
  1031. if(count == 0) return;
  1032. assert(Ctx__s_size(self) >= count);
  1033. // [obj]
  1034. for(int i = 0; i < count; i++) {
  1035. Expr* deco = Ctx__s_popx(self);
  1036. vtemit_(deco, self); // [obj, f]
  1037. Ctx__emit_(self, OP_ROT_TWO, BC_NOARG, deco->line); // [f, obj]
  1038. Ctx__emit_(self, OP_LOAD_NULL, BC_NOARG, BC_KEEPLINE); // [f, obj, NULL]
  1039. Ctx__emit_(self, OP_ROT_TWO, BC_NOARG, BC_KEEPLINE); // [obj, NULL, f]
  1040. Ctx__emit_(self, OP_CALL, 1, deco->line); // [obj]
  1041. vtdelete(deco);
  1042. }
  1043. }
  1044. static int Ctx__emit_virtual(Ctx* self, Opcode opcode, uint16_t arg, int line, bool is_virtual) {
  1045. Bytecode bc = {(uint8_t)opcode, arg};
  1046. BytecodeEx bcx = {line, is_virtual, self->curr_iblock};
  1047. c11_vector__push(Bytecode, &self->co->codes, bc);
  1048. c11_vector__push(BytecodeEx, &self->co->codes_ex, bcx);
  1049. int i = self->co->codes.length - 1;
  1050. BytecodeEx* codes_ex = (BytecodeEx*)self->co->codes_ex.data;
  1051. if(line == BC_KEEPLINE) { codes_ex[i].lineno = i >= 1 ? codes_ex[i - 1].lineno : 1; }
  1052. return i;
  1053. }
  1054. static int Ctx__emit_(Ctx* self, Opcode opcode, uint16_t arg, int line) {
  1055. return Ctx__emit_virtual(self, opcode, arg, line, false);
  1056. }
  1057. static void Ctx__revert_last_emit_(Ctx* self) {
  1058. c11_vector__pop(&self->co->codes);
  1059. c11_vector__pop(&self->co->codes_ex);
  1060. }
  1061. static int Ctx__emit_int(Ctx* self, int64_t value, int line) {
  1062. if((int16_t)value == value) {
  1063. return Ctx__emit_(self, OP_LOAD_SMALL_INT, (uint16_t)value, line);
  1064. } else {
  1065. py_TValue tmp;
  1066. py_newint(&tmp, value);
  1067. return Ctx__emit_(self, OP_LOAD_CONST, Ctx__add_const(self, &tmp), line);
  1068. }
  1069. }
  1070. static void Ctx__patch_jump(Ctx* self, int index) {
  1071. Bytecode* co_codes = (Bytecode*)self->co->codes.data;
  1072. int target = self->co->codes.length;
  1073. Bytecode__set_signed_arg(&co_codes[index], target - index);
  1074. }
  1075. static void Ctx__emit_jump(Ctx* self, int target, int line) {
  1076. int index = Ctx__emit_(self, OP_JUMP_FORWARD, BC_NOARG, line);
  1077. // should place after Ctx__emit_ because of realloc
  1078. Bytecode* co_codes = (Bytecode*)self->co->codes.data;
  1079. Bytecode__set_signed_arg(&co_codes[index], target - index);
  1080. }
  1081. static int Ctx__add_varname(Ctx* self, py_Name name) {
  1082. // PK_MAX_CO_VARNAMES will be checked when pop_context(), not here
  1083. return CodeObject__add_varname(self->co, name);
  1084. }
  1085. static int Ctx__add_const_string(Ctx* self, c11_sv key) {
  1086. uint16_t* val = c11_smallmap_s2n__try_get(&self->co_consts_string_dedup_map, key);
  1087. if(val) {
  1088. return *val;
  1089. } else {
  1090. py_TValue tmp;
  1091. py_newstrv(&tmp, key);
  1092. c11_vector__push(py_TValue, &self->co->consts, tmp);
  1093. int index = self->co->consts.length - 1;
  1094. c11_smallmap_s2n__set(&self->co_consts_string_dedup_map,
  1095. c11_string__sv(PyObject__userdata(tmp._obj)),
  1096. index);
  1097. return index;
  1098. }
  1099. }
  1100. static int Ctx__add_const(Ctx* self, py_Ref v) {
  1101. assert(v->type != tp_str);
  1102. c11_vector__push(py_TValue, &self->co->consts, *v);
  1103. return self->co->consts.length - 1;
  1104. }
  1105. static void Ctx__emit_store_name(Ctx* self, NameScope scope, py_Name name, int line) {
  1106. switch(scope) {
  1107. case NAME_LOCAL: Ctx__emit_(self, OP_STORE_FAST, Ctx__add_varname(self, name), line); break;
  1108. case NAME_GLOBAL: Ctx__emit_(self, OP_STORE_GLOBAL, name, line); break;
  1109. case NAME_GLOBAL_UNKNOWN: Ctx__emit_(self, OP_STORE_NAME, name, line); break;
  1110. default: c11__unreachable();
  1111. }
  1112. }
  1113. // emit top -> pop -> delete
  1114. static void Ctx__s_emit_top(Ctx* self) {
  1115. assert(self->s_expr.length);
  1116. Expr* top = c11_vector__back(Expr*, &self->s_expr);
  1117. vtemit_(top, self);
  1118. vtdelete(top);
  1119. c11_vector__pop(&self->s_expr);
  1120. }
  1121. // push
  1122. static void Ctx__s_push(Ctx* self, Expr* expr) { c11_vector__push(Expr*, &self->s_expr, expr); }
  1123. // top
  1124. static Expr* Ctx__s_top(Ctx* self) {
  1125. assert(self->s_expr.length);
  1126. return c11_vector__back(Expr*, &self->s_expr);
  1127. }
  1128. // size
  1129. static int Ctx__s_size(Ctx* self) { return self->s_expr.length; }
  1130. // pop -> delete
  1131. static void Ctx__s_pop(Ctx* self) {
  1132. assert(self->s_expr.length);
  1133. Expr* top = c11_vector__back(Expr*, &self->s_expr);
  1134. vtdelete(top);
  1135. c11_vector__pop(&self->s_expr);
  1136. }
  1137. // pop move
  1138. static Expr* Ctx__s_popx(Ctx* self) {
  1139. assert(self->s_expr.length);
  1140. Expr* top = c11_vector__back(Expr*, &self->s_expr);
  1141. c11_vector__pop(&self->s_expr);
  1142. return top;
  1143. }
  1144. /* compiler.c */
  1145. typedef struct Compiler Compiler;
  1146. typedef Error* (*PrattCallback)(Compiler* self);
  1147. typedef struct PrattRule {
  1148. PrattCallback prefix;
  1149. PrattCallback infix;
  1150. enum Precedence precedence;
  1151. } PrattRule;
  1152. const static PrattRule rules[TK__COUNT__];
  1153. typedef struct Compiler {
  1154. SourceData_ src; // weakref
  1155. Token* tokens;
  1156. int tokens_length;
  1157. int i; // current token index
  1158. c11_vector /*T=CodeEmitContext*/ contexts;
  1159. } Compiler;
  1160. static void Compiler__ctor(Compiler* self, SourceData_ src, Token* tokens, int tokens_length) {
  1161. self->src = src;
  1162. self->tokens = tokens;
  1163. self->tokens_length = tokens_length;
  1164. self->i = 0;
  1165. c11_vector__ctor(&self->contexts, sizeof(Ctx));
  1166. }
  1167. static void Compiler__dtor(Compiler* self) {
  1168. // free tokens
  1169. for(int i = 0; i < self->tokens_length; i++) {
  1170. if(self->tokens[i].value.index == TokenValue_STR) {
  1171. // free internal string
  1172. c11_string__delete(self->tokens[i].value._str);
  1173. }
  1174. }
  1175. free(self->tokens);
  1176. // free contexts
  1177. c11__foreach(Ctx, &self->contexts, ctx) Ctx__dtor(ctx);
  1178. c11_vector__dtor(&self->contexts);
  1179. }
  1180. /**************************************/
  1181. #define tk(i) (&self->tokens[i])
  1182. #define prev() (&self->tokens[self->i - 1])
  1183. #define curr() (&self->tokens[self->i])
  1184. #define next() (&self->tokens[self->i + 1])
  1185. #define advance() self->i++
  1186. #define mode() self->src->mode
  1187. #define ctx() (&c11_vector__back(Ctx, &self->contexts))
  1188. #define match_newlines() match_newlines_impl(self)
  1189. #define consume(expected) \
  1190. if(!match(expected)) \
  1191. return SyntaxError(self, \
  1192. "expected '%s', got '%s'", \
  1193. TokenSymbols[expected], \
  1194. TokenSymbols[curr()->type]);
  1195. #define consume_end_stmt() \
  1196. if(!match_end_stmt(self)) return SyntaxError(self, "expected statement end")
  1197. #define check(B) \
  1198. if((err = B)) return err
  1199. static NameScope name_scope(Compiler* self) {
  1200. NameScope s = self->contexts.length > 1 ? NAME_LOCAL : NAME_GLOBAL;
  1201. if(self->src->is_dynamic && s == NAME_GLOBAL) s = NAME_GLOBAL_UNKNOWN;
  1202. return s;
  1203. }
  1204. Error* SyntaxError(Compiler* self, const char* fmt, ...) {
  1205. Error* err = malloc(sizeof(Error));
  1206. err->src = self->src;
  1207. PK_INCREF(self->src);
  1208. Token* t = self->i == self->tokens_length ? prev() : curr();
  1209. err->lineno = t->line;
  1210. va_list args;
  1211. va_start(args, fmt);
  1212. vsnprintf(err->msg, sizeof(err->msg), fmt, args);
  1213. va_end(args);
  1214. return err;
  1215. }
  1216. /* Matchers */
  1217. static bool is_expression(Compiler* self, bool allow_slice) {
  1218. PrattCallback prefix = rules[curr()->type].prefix;
  1219. return prefix && (allow_slice || curr()->type != TK_COLON);
  1220. }
  1221. #define match(expected) (curr()->type == expected ? (++self->i) : 0)
  1222. static bool match_newlines_impl(Compiler* self) {
  1223. bool consumed = false;
  1224. if(curr()->type == TK_EOL) {
  1225. while(curr()->type == TK_EOL)
  1226. advance();
  1227. consumed = true;
  1228. }
  1229. return consumed;
  1230. }
  1231. static bool match_end_stmt(Compiler* self) {
  1232. if(match(TK_SEMICOLON)) {
  1233. match_newlines();
  1234. return true;
  1235. }
  1236. if(match_newlines() || curr()->type == TK_EOF) return true;
  1237. if(curr()->type == TK_DEDENT) return true;
  1238. return false;
  1239. }
  1240. /* Expression */
  1241. /// Parse an expression and push it onto the stack.
  1242. static Error* parse_expression(Compiler* self, int precedence, bool allow_slice) {
  1243. PrattCallback prefix = rules[curr()->type].prefix;
  1244. if(!prefix || (curr()->type == TK_COLON && !allow_slice)) {
  1245. return SyntaxError(self, "expected an expression, got %s", TokenSymbols[curr()->type]);
  1246. }
  1247. advance();
  1248. Error* err;
  1249. check(prefix(self));
  1250. while(rules[curr()->type].precedence >= precedence &&
  1251. (allow_slice || curr()->type != TK_COLON)) {
  1252. TokenIndex op = curr()->type;
  1253. advance();
  1254. PrattCallback infix = rules[op].infix;
  1255. if(infix == NULL) {
  1256. return SyntaxError(self, "expected an infix operator, got %s", TokenSymbols[op]);
  1257. }
  1258. check(infix(self));
  1259. }
  1260. return NULL;
  1261. }
  1262. static Error* EXPR_TUPLE_ALLOW_SLICE(Compiler* self, bool allow_slice) {
  1263. Error* err;
  1264. check(parse_expression(self, PREC_LOWEST + 1, allow_slice));
  1265. if(!match(TK_COMMA)) return NULL;
  1266. // tuple expression // (a, )
  1267. int count = 1;
  1268. do {
  1269. if(curr()->brackets_level) match_newlines();
  1270. if(!is_expression(self, allow_slice)) break;
  1271. check(parse_expression(self, PREC_LOWEST + 1, allow_slice));
  1272. count += 1;
  1273. if(curr()->brackets_level) match_newlines();
  1274. } while(match(TK_COMMA));
  1275. // pop `count` expressions from the stack and merge them into a TupleExpr
  1276. SequenceExpr* e = TupleExpr__new(prev()->line, count);
  1277. for(int i = count - 1; i >= 0; i--) {
  1278. e->items[i] = Ctx__s_popx(ctx());
  1279. }
  1280. Ctx__s_push(ctx(), (Expr*)e);
  1281. return NULL;
  1282. }
  1283. /// Parse a simple expression.
  1284. static Error* EXPR(Compiler* self) { return parse_expression(self, PREC_LOWEST + 1, false); }
  1285. /// Parse a simple expression or a tuple of expressions.
  1286. static Error* EXPR_TUPLE(Compiler* self) { return EXPR_TUPLE_ALLOW_SLICE(self, false); }
  1287. // special case for `for loop` and `comp`
  1288. static Error* EXPR_VARS(Compiler* self) {
  1289. int count = 0;
  1290. do {
  1291. consume(TK_ID);
  1292. py_Name name = py_namev(Token__sv(prev()));
  1293. NameExpr* e = NameExpr__new(prev()->line, name, name_scope(self));
  1294. Ctx__s_push(ctx(), (Expr*)e);
  1295. count += 1;
  1296. } while(match(TK_COMMA));
  1297. if(count > 1) {
  1298. SequenceExpr* e = TupleExpr__new(prev()->line, count);
  1299. for(int i = count - 1; i >= 0; i--) {
  1300. e->items[i] = Ctx__s_popx(ctx());
  1301. }
  1302. Ctx__s_push(ctx(), (Expr*)e);
  1303. }
  1304. return NULL;
  1305. }
  1306. /* Misc */
  1307. static void push_global_context(Compiler* self, CodeObject* co) {
  1308. co->start_line = self->i == 0 ? 1 : prev()->line;
  1309. Ctx* ctx = c11_vector__emplace(&self->contexts);
  1310. Ctx__ctor(ctx, co, NULL, self->contexts.length);
  1311. }
  1312. static Error* pop_context(Compiler* self) {
  1313. // add a `return None` in the end as a guard
  1314. // previously, we only do this if the last opcode is not a return
  1315. // however, this is buggy...since there may be a jump to the end (out of bound) even if the last
  1316. // opcode is a return
  1317. Ctx__emit_virtual(ctx(), OP_RETURN_VALUE, 1, BC_KEEPLINE, true);
  1318. CodeObject* co = ctx()->co;
  1319. // find the last valid token
  1320. int j = self->i - 1;
  1321. while(tk(j)->type == TK_EOL || tk(j)->type == TK_DEDENT || tk(j)->type == TK_EOF)
  1322. j--;
  1323. co->end_line = tk(j)->line;
  1324. // some check here
  1325. c11_vector* codes = &co->codes;
  1326. if(co->nlocals > PK_MAX_CO_VARNAMES) {
  1327. return SyntaxError(self, "maximum number of local variables exceeded");
  1328. }
  1329. if(co->consts.length > 65530) {
  1330. return SyntaxError(self, "maximum number of constants exceeded");
  1331. }
  1332. // pre-compute block.end or block.end2
  1333. for(int i = 0; i < codes->length; i++) {
  1334. Bytecode* bc = c11__at(Bytecode, codes, i);
  1335. if(bc->op == OP_LOOP_CONTINUE) {
  1336. CodeBlock* block = c11__at(CodeBlock, &ctx()->co->blocks, bc->arg);
  1337. Bytecode__set_signed_arg(bc, block->start - i);
  1338. } else if(bc->op == OP_LOOP_BREAK) {
  1339. CodeBlock* block = c11__at(CodeBlock, &ctx()->co->blocks, bc->arg);
  1340. Bytecode__set_signed_arg(bc, (block->end2 != -1 ? block->end2 : block->end) - i);
  1341. } else if(bc->op == OP_FOR_ITER || bc->op == OP_FOR_ITER_YIELD_VALUE) {
  1342. CodeBlock* block = c11__at(CodeBlock, &ctx()->co->blocks, bc->arg);
  1343. Bytecode__set_signed_arg(bc, block->end - i);
  1344. }
  1345. }
  1346. // pre-compute func->is_simple
  1347. FuncDecl* func = ctx()->func;
  1348. if(func) {
  1349. // check generator
  1350. Bytecode* codes = func->code.codes.data;
  1351. int codes_length = func->code.codes.length;
  1352. for(int i = 0; i < codes_length; i++) {
  1353. if(codes[i].op == OP_YIELD_VALUE || codes[i].op == OP_FOR_ITER_YIELD_VALUE) {
  1354. func->type = FuncType_GENERATOR;
  1355. break;
  1356. }
  1357. }
  1358. if(func->type == FuncType_UNSET) {
  1359. bool is_simple = true;
  1360. if(func->kwargs.length > 0) is_simple = false;
  1361. if(func->starred_arg >= 0) is_simple = false;
  1362. if(func->starred_kwarg >= 0) is_simple = false;
  1363. if(is_simple) {
  1364. func->type = FuncType_SIMPLE;
  1365. } else {
  1366. func->type = FuncType_NORMAL;
  1367. }
  1368. }
  1369. assert(func->type != FuncType_UNSET);
  1370. }
  1371. Ctx__dtor(ctx());
  1372. c11_vector__pop(&self->contexts);
  1373. return NULL;
  1374. }
  1375. /* Expression Callbacks */
  1376. static Error* exprLiteral(Compiler* self) {
  1377. LiteralExpr* e = LiteralExpr__new(prev()->line, &prev()->value);
  1378. Ctx__s_push(ctx(), (Expr*)e);
  1379. return NULL;
  1380. }
  1381. static Error* exprBytes(Compiler* self) {
  1382. c11_sv sv = c11_string__sv(prev()->value._str);
  1383. Ctx__s_push(ctx(), (Expr*)RawStringExpr__new(prev()->line, sv, OP_BUILD_BYTES));
  1384. return NULL;
  1385. }
  1386. static Error* exprFString(Compiler* self) {
  1387. // @fstr-begin, [@fstr-cpnt | <expr>]*, @fstr-end
  1388. int count = 0;
  1389. int line = prev()->line;
  1390. while(true) {
  1391. if(match(TK_FSTR_END)) {
  1392. SequenceExpr* e = FStringExpr__new(line, count);
  1393. for(int i = count - 1; i >= 0; i--) {
  1394. e->items[i] = Ctx__s_popx(ctx());
  1395. }
  1396. Ctx__s_push(ctx(), (Expr*)e);
  1397. return NULL;
  1398. } else if(match(TK_FSTR_CPNT)) {
  1399. // OP_LOAD_CONST
  1400. LiteralExpr* e = LiteralExpr__new(prev()->line, &prev()->value);
  1401. Ctx__s_push(ctx(), (Expr*)e);
  1402. count++;
  1403. } else {
  1404. // {a!r:.2f}
  1405. Error* err = EXPR(self);
  1406. if(err) return err;
  1407. count++;
  1408. if(match(TK_FSTR_SPEC)) {
  1409. c11_sv spec = Token__sv(prev());
  1410. // ':.2f}' -> ':.2f'
  1411. spec.size--;
  1412. Expr* child = Ctx__s_popx(ctx());
  1413. FStringSpecExpr* e = FStringSpecExpr__new(prev()->line, child, spec);
  1414. Ctx__s_push(ctx(), (Expr*)e);
  1415. }
  1416. }
  1417. }
  1418. }
  1419. static Error* exprImag(Compiler* self) {
  1420. Ctx__s_push(ctx(), (Expr*)ImagExpr__new(prev()->line, prev()->value._f64));
  1421. return NULL;
  1422. }
  1423. static FuncDecl_ push_f_context(Compiler* self, c11_sv name, int* out_index);
  1424. static Error* _compile_f_args(Compiler* self, FuncDecl* decl, bool is_lambda);
  1425. static Error* exprLambda(Compiler* self) {
  1426. Error* err;
  1427. int line = prev()->line;
  1428. int decl_index;
  1429. FuncDecl_ decl = push_f_context(self, (c11_sv){"<lambda>", 8}, &decl_index);
  1430. if(!match(TK_COLON)) {
  1431. check(_compile_f_args(self, decl, true));
  1432. consume(TK_COLON);
  1433. }
  1434. // https://github.com/pocketpy/pocketpy/issues/37
  1435. check(parse_expression(self, PREC_LAMBDA + 1, false));
  1436. Ctx__s_emit_top(ctx());
  1437. Ctx__emit_(ctx(), OP_RETURN_VALUE, BC_NOARG, BC_KEEPLINE);
  1438. check(pop_context(self));
  1439. LambdaExpr* e = LambdaExpr__new(line, decl_index);
  1440. Ctx__s_push(ctx(), (Expr*)e);
  1441. return NULL;
  1442. }
  1443. static Error* exprOr(Compiler* self) {
  1444. Error* err;
  1445. int line = prev()->line;
  1446. check(parse_expression(self, PREC_LOGICAL_OR + 1, false));
  1447. LogicBinaryExpr* e = LogicBinaryExpr__new(line, OP_JUMP_IF_TRUE_OR_POP);
  1448. e->rhs = Ctx__s_popx(ctx());
  1449. e->lhs = Ctx__s_popx(ctx());
  1450. Ctx__s_push(ctx(), (Expr*)e);
  1451. return NULL;
  1452. }
  1453. static Error* exprAnd(Compiler* self) {
  1454. Error* err;
  1455. int line = prev()->line;
  1456. check(parse_expression(self, PREC_LOGICAL_AND + 1, false));
  1457. LogicBinaryExpr* e = LogicBinaryExpr__new(line, OP_JUMP_IF_FALSE_OR_POP);
  1458. e->rhs = Ctx__s_popx(ctx());
  1459. e->lhs = Ctx__s_popx(ctx());
  1460. Ctx__s_push(ctx(), (Expr*)e);
  1461. return NULL;
  1462. }
  1463. static Error* exprTernary(Compiler* self) {
  1464. // [true_expr]
  1465. Error* err;
  1466. int line = prev()->line;
  1467. check(parse_expression(self, PREC_TERNARY + 1, false)); // [true_expr, cond]
  1468. consume(TK_ELSE);
  1469. check(parse_expression(self, PREC_TERNARY + 1, false)); // [true_expr, cond, false_expr]
  1470. TernaryExpr* e = TernaryExpr__new(line);
  1471. e->false_expr = Ctx__s_popx(ctx());
  1472. e->cond = Ctx__s_popx(ctx());
  1473. e->true_expr = Ctx__s_popx(ctx());
  1474. Ctx__s_push(ctx(), (Expr*)e);
  1475. return NULL;
  1476. }
  1477. static Error* exprBinaryOp(Compiler* self) {
  1478. Error* err;
  1479. int line = prev()->line;
  1480. TokenIndex op = prev()->type;
  1481. int precedence = rules[op].precedence;
  1482. if(op != TK_POW) {
  1483. // if not right associative, increase precedence
  1484. precedence += 1;
  1485. }
  1486. check(parse_expression(self, precedence, false));
  1487. BinaryExpr* e = BinaryExpr__new(line, op, false);
  1488. if(op == TK_IN || op == TK_NOT_IN) {
  1489. e->lhs = Ctx__s_popx(ctx());
  1490. e->rhs = Ctx__s_popx(ctx());
  1491. } else {
  1492. e->rhs = Ctx__s_popx(ctx());
  1493. e->lhs = Ctx__s_popx(ctx());
  1494. }
  1495. Ctx__s_push(ctx(), (Expr*)e);
  1496. return NULL;
  1497. }
  1498. static Error* exprNot(Compiler* self) {
  1499. Error* err;
  1500. int line = prev()->line;
  1501. check(parse_expression(self, PREC_LOGICAL_NOT + 1, false));
  1502. UnaryExpr* e = UnaryExpr__new(line, Ctx__s_popx(ctx()), OP_UNARY_NOT);
  1503. Ctx__s_push(ctx(), (Expr*)e);
  1504. return NULL;
  1505. }
  1506. static Error* exprUnaryOp(Compiler* self) {
  1507. Error* err;
  1508. int line = prev()->line;
  1509. TokenIndex op = prev()->type;
  1510. check(parse_expression(self, PREC_UNARY + 1, false));
  1511. Expr* e = Ctx__s_popx(ctx());
  1512. switch(op) {
  1513. case TK_SUB: {
  1514. // constant fold
  1515. if(e->vt->is_literal) {
  1516. LiteralExpr* le = (LiteralExpr*)e;
  1517. if(le->value->index == TokenValue_I64 || le->value->index == TokenValue_F64) {
  1518. le->negated = true;
  1519. }
  1520. Ctx__s_push(ctx(), e);
  1521. } else {
  1522. Ctx__s_push(ctx(), (Expr*)UnaryExpr__new(line, e, OP_UNARY_NEGATIVE));
  1523. }
  1524. break;
  1525. }
  1526. case TK_INVERT: Ctx__s_push(ctx(), (Expr*)UnaryExpr__new(line, e, OP_UNARY_INVERT)); break;
  1527. case TK_MUL: Ctx__s_push(ctx(), (Expr*)StarredExpr__new(line, e, 1)); break;
  1528. case TK_POW: Ctx__s_push(ctx(), (Expr*)StarredExpr__new(line, e, 2)); break;
  1529. default: assert(false);
  1530. }
  1531. return NULL;
  1532. }
  1533. static Error* exprGroup(Compiler* self) {
  1534. Error* err;
  1535. int line = prev()->line;
  1536. match_newlines();
  1537. check(EXPR_TUPLE(self)); // () is just for change precedence
  1538. match_newlines();
  1539. consume(TK_RPAREN);
  1540. if(Ctx__s_top(ctx())->vt->is_tuple) return NULL;
  1541. GroupedExpr* g = GroupedExpr__new(line, Ctx__s_popx(ctx()));
  1542. Ctx__s_push(ctx(), (Expr*)g);
  1543. return NULL;
  1544. }
  1545. static Error* exprName(Compiler* self) {
  1546. py_Name name = py_namev(Token__sv(prev()));
  1547. NameScope scope = name_scope(self);
  1548. // promote this name to global scope if needed
  1549. if(c11_smallmap_n2i__contains(&ctx()->global_names, name)) {
  1550. if(scope == NAME_GLOBAL_UNKNOWN) return SyntaxError(self, "cannot use global keyword here");
  1551. scope = NAME_GLOBAL;
  1552. }
  1553. NameExpr* e = NameExpr__new(prev()->line, name, scope);
  1554. Ctx__s_push(ctx(), (Expr*)e);
  1555. return NULL;
  1556. }
  1557. static Error* exprAttrib(Compiler* self) {
  1558. consume(TK_ID);
  1559. py_Name name = py_namev(Token__sv(prev()));
  1560. AttribExpr* e = AttribExpr__new(prev()->line, Ctx__s_popx(ctx()), name);
  1561. Ctx__s_push(ctx(), (Expr*)e);
  1562. return NULL;
  1563. }
  1564. static Error* exprLiteral0(Compiler* self) {
  1565. Literal0Expr* e = Literal0Expr__new(prev()->line, prev()->type);
  1566. Ctx__s_push(ctx(), (Expr*)e);
  1567. return NULL;
  1568. }
  1569. static Error* consume_comp(Compiler* self, Opcode op0, Opcode op1) {
  1570. // [expr]
  1571. Error* err;
  1572. int line = prev()->line;
  1573. bool has_cond = false;
  1574. check(EXPR_VARS(self)); // [expr, vars]
  1575. consume(TK_IN);
  1576. check(parse_expression(self, PREC_TERNARY + 1, false)); // [expr, vars, iter]
  1577. match_newlines();
  1578. if(match(TK_IF)) {
  1579. check(parse_expression(self, PREC_TERNARY + 1, false)); // [expr, vars, iter, cond]
  1580. has_cond = true;
  1581. }
  1582. CompExpr* ce = CompExpr__new(line, op0, op1);
  1583. if(has_cond) ce->cond = Ctx__s_popx(ctx());
  1584. ce->iter = Ctx__s_popx(ctx());
  1585. ce->vars = Ctx__s_popx(ctx());
  1586. ce->expr = Ctx__s_popx(ctx());
  1587. Ctx__s_push(ctx(), (Expr*)ce);
  1588. match_newlines();
  1589. return NULL;
  1590. }
  1591. static Error* exprList(Compiler* self) {
  1592. Error* err;
  1593. int line = prev()->line;
  1594. int count = 0;
  1595. do {
  1596. match_newlines();
  1597. if(curr()->type == TK_RBRACKET) break;
  1598. check(EXPR(self));
  1599. count += 1;
  1600. match_newlines();
  1601. if(count == 1 && match(TK_FOR)) {
  1602. check(consume_comp(self, OP_BUILD_LIST, OP_LIST_APPEND));
  1603. consume(TK_RBRACKET);
  1604. return NULL;
  1605. }
  1606. match_newlines();
  1607. } while(match(TK_COMMA));
  1608. consume(TK_RBRACKET);
  1609. SequenceExpr* e = ListExpr__new(line, count);
  1610. for(int i = count - 1; i >= 0; i--) {
  1611. e->items[i] = Ctx__s_popx(ctx());
  1612. }
  1613. Ctx__s_push(ctx(), (Expr*)e);
  1614. return NULL;
  1615. }
  1616. static Error* exprMap(Compiler* self) {
  1617. Error* err;
  1618. int line = prev()->line;
  1619. bool parsing_dict = false; // {...} may be dict or set
  1620. int count = 0;
  1621. do {
  1622. match_newlines();
  1623. if(curr()->type == TK_RBRACE) break;
  1624. check(EXPR(self)); // [key]
  1625. if(curr()->type == TK_COLON) { parsing_dict = true; }
  1626. if(parsing_dict) {
  1627. consume(TK_COLON);
  1628. check(EXPR(self)); // [key, value] -> [item]
  1629. DictItemExpr* item = DictItemExpr__new(prev()->line);
  1630. item->value = Ctx__s_popx(ctx());
  1631. item->key = Ctx__s_popx(ctx());
  1632. Ctx__s_push(ctx(), (Expr*)item);
  1633. }
  1634. count += 1; // key-value pair count
  1635. match_newlines();
  1636. if(count == 1 && match(TK_FOR)) {
  1637. if(parsing_dict) {
  1638. check(consume_comp(self, OP_BUILD_DICT, OP_DICT_ADD));
  1639. } else {
  1640. check(consume_comp(self, OP_BUILD_SET, OP_SET_ADD));
  1641. }
  1642. consume(TK_RBRACE);
  1643. return NULL;
  1644. }
  1645. match_newlines();
  1646. } while(match(TK_COMMA));
  1647. consume(TK_RBRACE);
  1648. SequenceExpr* se;
  1649. if(count == 0 || parsing_dict) {
  1650. se = DictExpr__new(line, count);
  1651. } else {
  1652. se = SetExpr__new(line, count);
  1653. }
  1654. for(int i = count - 1; i >= 0; i--) {
  1655. se->items[i] = Ctx__s_popx(ctx());
  1656. }
  1657. Ctx__s_push(ctx(), (Expr*)se);
  1658. return NULL;
  1659. }
  1660. static Error* exprCall(Compiler* self) {
  1661. Error* err;
  1662. CallExpr* e = CallExpr__new(prev()->line, Ctx__s_popx(ctx()));
  1663. Ctx__s_push(ctx(), (Expr*)e); // push onto the stack in advance
  1664. do {
  1665. match_newlines();
  1666. if(curr()->type == TK_RPAREN) break;
  1667. if(curr()->type == TK_ID && next()->type == TK_ASSIGN) {
  1668. consume(TK_ID);
  1669. py_Name key = py_namev(Token__sv(prev()));
  1670. consume(TK_ASSIGN);
  1671. check(EXPR(self));
  1672. CallExprKwArg kw = {key, Ctx__s_popx(ctx())};
  1673. c11_vector__push(CallExprKwArg, &e->kwargs, kw);
  1674. } else {
  1675. check(EXPR(self));
  1676. int star_level = 0;
  1677. Expr* top = Ctx__s_top(ctx());
  1678. if(top->vt->is_starred) star_level = ((StarredExpr*)top)->level;
  1679. if(star_level == 2) {
  1680. // **kwargs
  1681. CallExprKwArg kw = {0, Ctx__s_popx(ctx())};
  1682. c11_vector__push(CallExprKwArg, &e->kwargs, kw);
  1683. } else {
  1684. // positional argument
  1685. if(e->kwargs.length > 0) {
  1686. return SyntaxError(self, "positional argument follows keyword argument");
  1687. }
  1688. c11_vector__push(Expr*, &e->args, Ctx__s_popx(ctx()));
  1689. }
  1690. }
  1691. match_newlines();
  1692. } while(match(TK_COMMA));
  1693. consume(TK_RPAREN);
  1694. return NULL;
  1695. }
  1696. static Error* exprSlice0(Compiler* self) {
  1697. Error* err;
  1698. SliceExpr* slice = SliceExpr__new(prev()->line);
  1699. Ctx__s_push(ctx(), (Expr*)slice); // push onto the stack in advance
  1700. if(is_expression(self, false)) { // :<stop>
  1701. check(EXPR(self));
  1702. slice->stop = Ctx__s_popx(ctx());
  1703. // try optional step
  1704. if(match(TK_COLON)) { // :<stop>:<step>
  1705. check(EXPR(self));
  1706. slice->step = Ctx__s_popx(ctx());
  1707. }
  1708. } else if(match(TK_COLON)) {
  1709. if(is_expression(self, false)) { // ::<step>
  1710. check(EXPR(self));
  1711. slice->step = Ctx__s_popx(ctx());
  1712. } // else ::
  1713. } // else :
  1714. return NULL;
  1715. }
  1716. static Error* exprSlice1(Compiler* self) {
  1717. Error* err;
  1718. SliceExpr* slice = SliceExpr__new(prev()->line);
  1719. slice->start = Ctx__s_popx(ctx());
  1720. Ctx__s_push(ctx(), (Expr*)slice); // push onto the stack in advance
  1721. if(is_expression(self, false)) { // <start>:<stop>
  1722. check(EXPR(self));
  1723. slice->stop = Ctx__s_popx(ctx());
  1724. // try optional step
  1725. if(match(TK_COLON)) { // <start>:<stop>:<step>
  1726. check(EXPR(self));
  1727. slice->step = Ctx__s_popx(ctx());
  1728. }
  1729. } else if(match(TK_COLON)) { // <start>::<step>
  1730. check(EXPR(self));
  1731. slice->step = Ctx__s_popx(ctx());
  1732. } // else <start>:
  1733. return NULL;
  1734. }
  1735. static Error* exprSubscr(Compiler* self) {
  1736. Error* err;
  1737. int line = prev()->line;
  1738. match_newlines();
  1739. check(EXPR_TUPLE_ALLOW_SLICE(self, true));
  1740. match_newlines();
  1741. consume(TK_RBRACKET); // [lhs, rhs]
  1742. SubscrExpr* e = SubscrExpr__new(line);
  1743. e->rhs = Ctx__s_popx(ctx()); // [lhs]
  1744. e->lhs = Ctx__s_popx(ctx()); // []
  1745. Ctx__s_push(ctx(), (Expr*)e);
  1746. return NULL;
  1747. }
  1748. ////////////////
  1749. static Error* consume_type_hints(Compiler* self) {
  1750. Error* err;
  1751. check(EXPR(self));
  1752. Ctx__s_pop(ctx());
  1753. return NULL;
  1754. }
  1755. static Error* consume_type_hints_sv(Compiler* self, c11_sv* out) {
  1756. Error* err;
  1757. const char* start = curr()->start;
  1758. check(EXPR(self));
  1759. const char* end = prev()->start + prev()->length;
  1760. *out = (c11_sv){start, end - start};
  1761. Ctx__s_pop(ctx());
  1762. return NULL;
  1763. }
  1764. static Error* compile_stmt(Compiler* self);
  1765. static Error* compile_block_body(Compiler* self, PrattCallback callback) {
  1766. Error* err;
  1767. assert(callback != NULL);
  1768. consume(TK_COLON);
  1769. if(curr()->type != TK_EOL && curr()->type != TK_EOF) {
  1770. while(true) {
  1771. check(compile_stmt(self));
  1772. bool possible = curr()->type != TK_EOL && curr()->type != TK_EOF;
  1773. if(prev()->type != TK_SEMICOLON || !possible) break;
  1774. }
  1775. return NULL;
  1776. }
  1777. bool consumed = match_newlines();
  1778. if(!consumed) return SyntaxError(self, "expected a new line after ':'");
  1779. consume(TK_INDENT);
  1780. while(curr()->type != TK_DEDENT) {
  1781. match_newlines();
  1782. check(callback(self));
  1783. match_newlines();
  1784. }
  1785. consume(TK_DEDENT);
  1786. return NULL;
  1787. }
  1788. static Error* compile_if_stmt(Compiler* self) {
  1789. Error* err;
  1790. check(EXPR(self)); // condition
  1791. Ctx__s_emit_top(ctx());
  1792. int patch = Ctx__emit_(ctx(), OP_POP_JUMP_IF_FALSE, BC_NOARG, prev()->line);
  1793. err = compile_block_body(self, compile_stmt);
  1794. if(err) return err;
  1795. if(match(TK_ELIF)) {
  1796. int exit_patch = Ctx__emit_(ctx(), OP_JUMP_FORWARD, BC_NOARG, prev()->line);
  1797. Ctx__patch_jump(ctx(), patch);
  1798. check(compile_if_stmt(self));
  1799. Ctx__patch_jump(ctx(), exit_patch);
  1800. } else if(match(TK_ELSE)) {
  1801. int exit_patch = Ctx__emit_(ctx(), OP_JUMP_FORWARD, BC_NOARG, prev()->line);
  1802. Ctx__patch_jump(ctx(), patch);
  1803. check(compile_block_body(self, compile_stmt));
  1804. Ctx__patch_jump(ctx(), exit_patch);
  1805. } else {
  1806. Ctx__patch_jump(ctx(), patch);
  1807. }
  1808. return NULL;
  1809. }
  1810. static Error* compile_while_loop(Compiler* self) {
  1811. Error* err;
  1812. int block = Ctx__enter_block(ctx(), CodeBlockType_WHILE_LOOP);
  1813. int block_start = c11__at(CodeBlock, &ctx()->co->blocks, block)->start;
  1814. check(EXPR(self)); // condition
  1815. Ctx__s_emit_top(ctx());
  1816. int patch = Ctx__emit_(ctx(), OP_POP_JUMP_IF_FALSE, BC_NOARG, prev()->line);
  1817. check(compile_block_body(self, compile_stmt));
  1818. Ctx__emit_jump(ctx(), block_start, BC_KEEPLINE);
  1819. Ctx__patch_jump(ctx(), patch);
  1820. Ctx__exit_block(ctx());
  1821. // optional else clause
  1822. if(match(TK_ELSE)) {
  1823. check(compile_block_body(self, compile_stmt));
  1824. CodeBlock* p_block = c11__at(CodeBlock, &ctx()->co->blocks, block);
  1825. p_block->end2 = ctx()->co->codes.length;
  1826. }
  1827. return NULL;
  1828. }
  1829. static Error* compile_for_loop(Compiler* self) {
  1830. Error* err;
  1831. check(EXPR_VARS(self)); // [vars]
  1832. consume(TK_IN);
  1833. check(EXPR_TUPLE(self)); // [vars, iter]
  1834. Ctx__s_emit_top(ctx()); // [vars]
  1835. Ctx__emit_(ctx(), OP_GET_ITER, BC_NOARG, BC_KEEPLINE);
  1836. int block = Ctx__enter_block(ctx(), CodeBlockType_FOR_LOOP);
  1837. int block_start = Ctx__emit_(ctx(), OP_FOR_ITER, block, BC_KEEPLINE);
  1838. Expr* vars = Ctx__s_popx(ctx());
  1839. bool ok = vtemit_store(vars, ctx());
  1840. vtdelete(vars);
  1841. if(!ok) {
  1842. // this error occurs in `vars` instead of this line, but...nevermind
  1843. return SyntaxError(self, "invalid syntax");
  1844. }
  1845. check(compile_block_body(self, compile_stmt));
  1846. Ctx__emit_jump(ctx(), block_start, BC_KEEPLINE);
  1847. Ctx__exit_block(ctx());
  1848. // optional else clause
  1849. if(match(TK_ELSE)) {
  1850. check(compile_block_body(self, compile_stmt));
  1851. CodeBlock* p_block = c11__at(CodeBlock, &ctx()->co->blocks, block);
  1852. p_block->end2 = ctx()->co->codes.length;
  1853. }
  1854. return NULL;
  1855. }
  1856. static Error* compile_yield_from(Compiler* self, int kw_line) {
  1857. Error* err;
  1858. if(self->contexts.length <= 1) return SyntaxError(self, "'yield from' outside function");
  1859. check(EXPR_TUPLE(self));
  1860. Ctx__s_emit_top(ctx());
  1861. Ctx__emit_(ctx(), OP_GET_ITER, BC_NOARG, kw_line);
  1862. int block = Ctx__enter_block(ctx(), CodeBlockType_FOR_LOOP);
  1863. int block_start = Ctx__emit_(ctx(), OP_FOR_ITER_YIELD_VALUE, block, kw_line);
  1864. Ctx__emit_jump(ctx(), block_start, BC_KEEPLINE);
  1865. Ctx__exit_block(ctx());
  1866. // StopIteration.value will be pushed onto the stack
  1867. return NULL;
  1868. }
  1869. Error* try_compile_assignment(Compiler* self, bool* is_assign) {
  1870. Error* err;
  1871. switch(curr()->type) {
  1872. case TK_IADD:
  1873. case TK_ISUB:
  1874. case TK_IMUL:
  1875. case TK_IDIV:
  1876. case TK_IFLOORDIV:
  1877. case TK_IMOD:
  1878. case TK_ILSHIFT:
  1879. case TK_IRSHIFT:
  1880. case TK_IAND:
  1881. case TK_IOR:
  1882. case TK_IXOR: {
  1883. if(Ctx__s_top(ctx())->vt->is_starred)
  1884. return SyntaxError(self, "can't use inplace operator with starred expression");
  1885. if(ctx()->is_compiling_class)
  1886. return SyntaxError(self, "can't use inplace operator in class definition");
  1887. advance();
  1888. // a[x] += 1; a and x should be evaluated only once
  1889. // a.x += 1; a should be evaluated only once
  1890. // -1 to remove =; inplace=true
  1891. int line = prev()->line;
  1892. TokenIndex op = (TokenIndex)(prev()->type - 1);
  1893. // [lhs]
  1894. check(EXPR_TUPLE(self)); // [lhs, rhs]
  1895. if(Ctx__s_top(ctx())->vt->is_starred)
  1896. return SyntaxError(self, "can't use starred expression here");
  1897. BinaryExpr* e = BinaryExpr__new(line, op, true);
  1898. e->rhs = Ctx__s_popx(ctx()); // [lhs]
  1899. e->lhs = Ctx__s_popx(ctx()); // []
  1900. vtemit_((Expr*)e, ctx());
  1901. bool ok = vtemit_istore(e->lhs, ctx());
  1902. vtdelete((Expr*)e);
  1903. if(!ok) return SyntaxError(self, "invalid syntax");
  1904. *is_assign = true;
  1905. return NULL;
  1906. }
  1907. case TK_ASSIGN: {
  1908. consume(TK_ASSIGN);
  1909. int n = 0;
  1910. if(match(TK_YIELD_FROM)) {
  1911. check(compile_yield_from(self, prev()->line));
  1912. n = 1;
  1913. } else {
  1914. do {
  1915. check(EXPR_TUPLE(self));
  1916. n += 1;
  1917. } while(match(TK_ASSIGN));
  1918. // stack size is n+1
  1919. Ctx__s_emit_top(ctx());
  1920. for(int j = 1; j < n; j++)
  1921. Ctx__emit_(ctx(), OP_DUP_TOP, BC_NOARG, BC_KEEPLINE);
  1922. }
  1923. for(int j = 0; j < n; j++) {
  1924. if(Ctx__s_top(ctx())->vt->is_starred)
  1925. return SyntaxError(self, "can't use starred expression here");
  1926. Expr* e = Ctx__s_top(ctx());
  1927. bool ok = vtemit_store(e, ctx());
  1928. Ctx__s_pop(ctx());
  1929. if(!ok) return SyntaxError(self, "invalid syntax");
  1930. }
  1931. *is_assign = true;
  1932. return NULL;
  1933. }
  1934. default: *is_assign = false;
  1935. }
  1936. return NULL;
  1937. }
  1938. static FuncDecl_ push_f_context(Compiler* self, c11_sv name, int* out_index) {
  1939. FuncDecl_ decl = FuncDecl__rcnew(self->src, name);
  1940. decl->code.start_line = self->i == 0 ? 1 : prev()->line;
  1941. decl->nested = name_scope(self) == NAME_LOCAL;
  1942. // add_func_decl
  1943. Ctx* top_ctx = ctx();
  1944. c11_vector__push(FuncDecl_, &top_ctx->co->func_decls, decl);
  1945. *out_index = top_ctx->co->func_decls.length - 1;
  1946. // push new context
  1947. top_ctx = c11_vector__emplace(&self->contexts);
  1948. Ctx__ctor(top_ctx, &decl->code, decl, self->contexts.length);
  1949. return decl;
  1950. }
  1951. static Error* read_literal(Compiler* self, py_Ref out) {
  1952. Error* err;
  1953. advance();
  1954. const TokenValue* value = &prev()->value;
  1955. bool negated = false;
  1956. switch(prev()->type) {
  1957. case TK_SUB:
  1958. consume(TK_NUM);
  1959. value = &prev()->value;
  1960. negated = true;
  1961. case TK_NUM: {
  1962. if(value->index == TokenValue_I64) {
  1963. py_newint(out, negated ? -value->_i64 : value->_i64);
  1964. } else if(value->index == TokenValue_F64) {
  1965. py_newfloat(out, negated ? -value->_f64 : value->_f64);
  1966. } else {
  1967. c11__unreachable();
  1968. }
  1969. return NULL;
  1970. }
  1971. case TK_STR: py_newstr(out, value->_str->data); return NULL;
  1972. case TK_TRUE: py_newbool(out, true); return NULL;
  1973. case TK_FALSE: py_newbool(out, false); return NULL;
  1974. case TK_NONE: py_newnone(out); return NULL;
  1975. case TK_DOTDOTDOT: py_newellipsis(out); return NULL;
  1976. case TK_LPAREN: {
  1977. py_TValue cpnts[4];
  1978. int count = 0;
  1979. while(true) {
  1980. if(count == 4)
  1981. return SyntaxError(self, "default argument tuple exceeds 4 elements");
  1982. check(read_literal(self, &cpnts[count]));
  1983. count += 1;
  1984. if(curr()->type == TK_RPAREN) break;
  1985. consume(TK_COMMA);
  1986. if(curr()->type == TK_RPAREN) break;
  1987. }
  1988. consume(TK_RPAREN);
  1989. py_newtuple(out, count);
  1990. for(int i = 0; i < count; i++) {
  1991. py_tuple_setitem(out, i, &cpnts[i]);
  1992. }
  1993. return NULL;
  1994. }
  1995. default: py_newnil(out); return NULL;
  1996. }
  1997. }
  1998. static Error* _compile_f_args(Compiler* self, FuncDecl* decl, bool is_lambda) {
  1999. int state = 0; // 0 for args, 1 for *args, 2 for k=v, 3 for **kwargs
  2000. Error* err;
  2001. do {
  2002. if(!is_lambda) match_newlines();
  2003. if(state >= 3) return SyntaxError(self, "**kwargs should be the last argument");
  2004. if(match(TK_MUL)) {
  2005. if(state < 1)
  2006. state = 1;
  2007. else
  2008. return SyntaxError(self, "*args should be placed before **kwargs");
  2009. } else if(match(TK_POW)) {
  2010. state = 3;
  2011. }
  2012. consume(TK_ID);
  2013. py_Name name = py_namev(Token__sv(prev()));
  2014. // check duplicate argument name
  2015. if(FuncDecl__is_duplicated_arg(decl, name)) {
  2016. return SyntaxError(self, "duplicate argument name");
  2017. }
  2018. // eat type hints
  2019. if(!is_lambda && match(TK_COLON)) check(consume_type_hints(self));
  2020. if(state == 0 && curr()->type == TK_ASSIGN) state = 2;
  2021. switch(state) {
  2022. case 0: FuncDecl__add_arg(decl, name); break;
  2023. case 1:
  2024. FuncDecl__add_starred_arg(decl, name);
  2025. state += 1;
  2026. break;
  2027. case 2: {
  2028. consume(TK_ASSIGN);
  2029. py_TValue value;
  2030. check(read_literal(self, &value));
  2031. if(py_isnil(&value)) return SyntaxError(self, "default argument must be a literal");
  2032. FuncDecl__add_kwarg(decl, name, &value);
  2033. } break;
  2034. case 3:
  2035. FuncDecl__add_starred_kwarg(decl, name);
  2036. state += 1;
  2037. break;
  2038. }
  2039. } while(match(TK_COMMA));
  2040. if(!is_lambda) match_newlines();
  2041. return NULL;
  2042. }
  2043. static Error* consume_pep695_py312(Compiler* self) {
  2044. // https://peps.python.org/pep-0695/
  2045. Error* err;
  2046. if(match(TK_LBRACKET)) {
  2047. consume(TK_ID);
  2048. if(match(TK_COLON)) { check(consume_type_hints(self)); }
  2049. consume(TK_RBRACKET);
  2050. }
  2051. return NULL;
  2052. }
  2053. static Error* compile_function(Compiler* self, int decorators) {
  2054. Error* err;
  2055. consume(TK_ID);
  2056. c11_sv decl_name_sv = Token__sv(prev());
  2057. int decl_index;
  2058. FuncDecl_ decl = push_f_context(self, decl_name_sv, &decl_index);
  2059. consume_pep695_py312(self);
  2060. consume(TK_LPAREN);
  2061. if(!match(TK_RPAREN)) {
  2062. check(_compile_f_args(self, decl, false));
  2063. consume(TK_RPAREN);
  2064. }
  2065. if(match(TK_ARROW)) check(consume_type_hints(self));
  2066. check(compile_block_body(self, compile_stmt));
  2067. check(pop_context(self));
  2068. if(decl->code.codes.length >= 2) {
  2069. Bytecode* codes = (Bytecode*)decl->code.codes.data;
  2070. if(codes[0].op == OP_LOAD_CONST && codes[1].op == OP_POP_TOP) {
  2071. // handle optional docstring
  2072. py_TValue* consts = decl->code.consts.data;
  2073. py_TValue* c = &consts[codes[0].arg];
  2074. if(py_isstr(c)) {
  2075. decl->docstring = py_tostr(c);
  2076. codes[0].op = OP_NO_OP;
  2077. codes[1].op = OP_NO_OP;
  2078. }
  2079. }
  2080. }
  2081. Ctx__emit_(ctx(), OP_LOAD_FUNCTION, decl_index, prev()->line);
  2082. Ctx__s_emit_decorators(ctx(), decorators);
  2083. py_Name decl_name = py_namev(decl_name_sv);
  2084. if(ctx()->is_compiling_class) {
  2085. if(decl_name == __new__ || decl_name == __init__) {
  2086. if(decl->args.length == 0) {
  2087. return SyntaxError(self,
  2088. "%s() should have at least one positional argument",
  2089. py_name2str(decl_name));
  2090. }
  2091. }
  2092. Ctx__emit_(ctx(), OP_STORE_CLASS_ATTR, decl_name, prev()->line);
  2093. } else {
  2094. NameExpr* e = NameExpr__new(prev()->line, decl_name, name_scope(self));
  2095. vtemit_store((Expr*)e, ctx());
  2096. vtdelete((Expr*)e);
  2097. }
  2098. return NULL;
  2099. }
  2100. static Error* compile_class(Compiler* self, int decorators) {
  2101. Error* err;
  2102. consume(TK_ID);
  2103. py_Name name = py_namev(Token__sv(prev()));
  2104. bool has_base = false;
  2105. consume_pep695_py312(self);
  2106. if(match(TK_LPAREN)) {
  2107. if(is_expression(self, false)) {
  2108. check(EXPR(self));
  2109. has_base = true; // [base]
  2110. }
  2111. consume(TK_RPAREN);
  2112. }
  2113. if(!has_base) {
  2114. Ctx__emit_(ctx(), OP_LOAD_NONE, BC_NOARG, prev()->line);
  2115. } else {
  2116. Ctx__s_emit_top(ctx()); // []
  2117. }
  2118. Ctx__emit_(ctx(), OP_BEGIN_CLASS, name, BC_KEEPLINE);
  2119. c11__foreach(Ctx, &self->contexts, it) {
  2120. if(it->is_compiling_class) return SyntaxError(self, "nested class is not allowed");
  2121. }
  2122. ctx()->is_compiling_class = true;
  2123. check(compile_block_body(self, compile_stmt));
  2124. ctx()->is_compiling_class = false;
  2125. Ctx__s_emit_decorators(ctx(), decorators);
  2126. Ctx__emit_(ctx(), OP_END_CLASS, name, BC_KEEPLINE);
  2127. return NULL;
  2128. }
  2129. static Error* compile_decorated(Compiler* self) {
  2130. Error* err;
  2131. int count = 0;
  2132. do {
  2133. check(EXPR(self));
  2134. count += 1;
  2135. if(!match_newlines()) return SyntaxError(self, "expected a newline after '@'");
  2136. } while(match(TK_DECORATOR));
  2137. if(match(TK_CLASS)) {
  2138. check(compile_class(self, count));
  2139. } else {
  2140. consume(TK_DEF);
  2141. check(compile_function(self, count));
  2142. }
  2143. return NULL;
  2144. }
  2145. // import a [as b]
  2146. // import a [as b], c [as d]
  2147. static Error* compile_normal_import(Compiler* self) {
  2148. do {
  2149. consume(TK_ID);
  2150. c11_sv name = Token__sv(prev());
  2151. int index = Ctx__add_const_string(ctx(), name);
  2152. Ctx__emit_(ctx(), OP_IMPORT_PATH, index, prev()->line);
  2153. if(match(TK_AS)) {
  2154. consume(TK_ID);
  2155. name = Token__sv(prev());
  2156. }
  2157. Ctx__emit_store_name(ctx(), name_scope(self), py_namev(name), prev()->line);
  2158. } while(match(TK_COMMA));
  2159. consume_end_stmt();
  2160. return NULL;
  2161. }
  2162. // from a import b [as c], d [as e]
  2163. // from a.b import c [as d]
  2164. // from . import a [as b]
  2165. // from .a import b [as c]
  2166. // from ..a import b [as c]
  2167. // from .a.b import c [as d]
  2168. // from xxx import *
  2169. static Error* compile_from_import(c11_sbuf* buf, Compiler* self) {
  2170. int dots = 0;
  2171. while(true) {
  2172. switch(curr()->type) {
  2173. case TK_DOT: dots += 1; break;
  2174. case TK_DOTDOT: dots += 2; break;
  2175. case TK_DOTDOTDOT: dots += 3; break;
  2176. default: goto __EAT_DOTS_END;
  2177. }
  2178. advance();
  2179. }
  2180. __EAT_DOTS_END:
  2181. for(int i = 0; i < dots; i++) {
  2182. c11_sbuf__write_char(buf, '.');
  2183. }
  2184. if(dots > 0) {
  2185. // @id is optional if dots > 0
  2186. if(match(TK_ID)) {
  2187. c11_sbuf__write_sv(buf, Token__sv(prev()));
  2188. while(match(TK_DOT)) {
  2189. consume(TK_ID);
  2190. c11_sbuf__write_char(buf, '.');
  2191. c11_sbuf__write_sv(buf, Token__sv(prev()));
  2192. }
  2193. }
  2194. } else {
  2195. // @id is required if dots == 0
  2196. consume(TK_ID);
  2197. c11_sbuf__write_sv(buf, Token__sv(prev()));
  2198. while(match(TK_DOT)) {
  2199. consume(TK_ID);
  2200. c11_sbuf__write_char(buf, '.');
  2201. c11_sbuf__write_sv(buf, Token__sv(prev()));
  2202. }
  2203. }
  2204. c11_string* path = c11_sbuf__submit(buf);
  2205. Ctx__emit_(ctx(),
  2206. OP_IMPORT_PATH,
  2207. Ctx__add_const_string(ctx(), c11_string__sv(path)),
  2208. prev()->line);
  2209. c11_string__delete(path);
  2210. consume(TK_IMPORT);
  2211. if(match(TK_MUL)) {
  2212. if(name_scope(self) != NAME_GLOBAL)
  2213. return SyntaxError(self, "from <module> import * can only be used in global scope");
  2214. // pop the module and import __all__
  2215. Ctx__emit_(ctx(), OP_POP_IMPORT_STAR, BC_NOARG, prev()->line);
  2216. consume_end_stmt();
  2217. return NULL;
  2218. }
  2219. bool has_bracket = match(TK_LPAREN);
  2220. do {
  2221. if(has_bracket) match_newlines();
  2222. Ctx__emit_(ctx(), OP_DUP_TOP, BC_NOARG, BC_KEEPLINE);
  2223. consume(TK_ID);
  2224. c11_sv name = Token__sv(prev());
  2225. Ctx__emit_(ctx(), OP_LOAD_ATTR, py_namev(name), prev()->line);
  2226. if(match(TK_AS)) {
  2227. consume(TK_ID);
  2228. name = Token__sv(prev());
  2229. }
  2230. Ctx__emit_store_name(ctx(), name_scope(self), py_namev(name), prev()->line);
  2231. } while(match(TK_COMMA));
  2232. if(has_bracket) {
  2233. match_newlines();
  2234. consume(TK_RPAREN);
  2235. }
  2236. Ctx__emit_(ctx(), OP_POP_TOP, BC_NOARG, BC_KEEPLINE);
  2237. consume_end_stmt();
  2238. return NULL;
  2239. }
  2240. static Error* compile_try_except(Compiler* self) {
  2241. Error* err;
  2242. int patches[8];
  2243. int patches_length = 0;
  2244. Ctx__enter_block(ctx(), CodeBlockType_TRY);
  2245. Ctx__emit_(ctx(), OP_TRY_ENTER, BC_NOARG, prev()->line);
  2246. check(compile_block_body(self, compile_stmt));
  2247. // https://docs.python.org/3/reference/compound_stmts.html#finally-clause
  2248. /* If finally is present, it specifies a ‘cleanup’ handler. The try clause is executed,
  2249. * including any except and else clauses. If an exception occurs in any of the clauses and is
  2250. * not handled, the exception is temporarily saved. The finally clause is executed. If there is
  2251. * a saved exception it is re-raised at the end of the finally clause. If the finally clause
  2252. * raises another exception, the saved exception is set as the context of the new exception. If
  2253. * the finally clause executes a return, break or continue statement, the saved exception is
  2254. * discarded.
  2255. */
  2256. // known issue:
  2257. // A return, break, continue in try/except block will make the finally block not executed
  2258. bool has_finally = curr()->type == TK_FINALLY;
  2259. if(!has_finally) {
  2260. patches[patches_length++] = Ctx__emit_(ctx(), OP_JUMP_FORWARD, BC_NOARG, BC_KEEPLINE);
  2261. }
  2262. Ctx__exit_block(ctx());
  2263. if(has_finally) {
  2264. consume(TK_FINALLY);
  2265. Ctx__emit_(ctx(), OP_BEGIN_FINALLY, BC_NOARG, prev()->line);
  2266. // finally only, no except block
  2267. Ctx__enter_block(ctx(), CodeBlockType_FINALLY);
  2268. check(compile_block_body(self, compile_stmt));
  2269. Ctx__exit_block(ctx());
  2270. Ctx__emit_(ctx(), OP_END_FINALLY, BC_NOARG, BC_KEEPLINE);
  2271. // re-raise if needed
  2272. Ctx__emit_(ctx(), OP_RE_RAISE, BC_NOARG, BC_KEEPLINE);
  2273. return NULL;
  2274. }
  2275. do {
  2276. if(patches_length == 8) {
  2277. return SyntaxError(self, "maximum number of except clauses reached");
  2278. }
  2279. py_Name as_name = 0;
  2280. consume(TK_EXCEPT);
  2281. if(is_expression(self, false)) {
  2282. // except <expr>:
  2283. check(EXPR(self));
  2284. Ctx__s_emit_top(ctx());
  2285. Ctx__emit_(ctx(), OP_EXCEPTION_MATCH, BC_NOARG, prev()->line);
  2286. if(match(TK_AS)) {
  2287. // except <expr> as <name>:
  2288. consume(TK_ID);
  2289. as_name = py_namev(Token__sv(prev()));
  2290. }
  2291. } else {
  2292. // except:
  2293. Ctx__emit_(ctx(), OP_LOAD_TRUE, BC_NOARG, BC_KEEPLINE);
  2294. }
  2295. int patch = Ctx__emit_(ctx(), OP_POP_JUMP_IF_FALSE, BC_NOARG, BC_KEEPLINE);
  2296. // on match
  2297. Ctx__emit_(ctx(), OP_BEGIN_EXC_HANDLING, BC_NOARG, BC_KEEPLINE);
  2298. if(as_name) {
  2299. Ctx__emit_(ctx(), OP_PUSH_EXCEPTION, BC_NOARG, BC_KEEPLINE);
  2300. Ctx__emit_store_name(ctx(), name_scope(self), as_name, BC_KEEPLINE);
  2301. }
  2302. Ctx__enter_block(ctx(), CodeBlockType_EXCEPT);
  2303. check(compile_block_body(self, compile_stmt));
  2304. Ctx__exit_block(ctx());
  2305. Ctx__emit_(ctx(), OP_END_EXC_HANDLING, BC_NOARG, BC_KEEPLINE);
  2306. patches[patches_length++] = Ctx__emit_(ctx(), OP_JUMP_FORWARD, BC_NOARG, BC_KEEPLINE);
  2307. Ctx__patch_jump(ctx(), patch);
  2308. } while(curr()->type == TK_EXCEPT);
  2309. // no match, re-raise
  2310. // ...
  2311. // match one & handled, jump to the end
  2312. for(int i = 0; i < patches_length; i++)
  2313. Ctx__patch_jump(ctx(), patches[i]);
  2314. if(match(TK_FINALLY)) {
  2315. Ctx__emit_(ctx(), OP_BEGIN_FINALLY, BC_NOARG, prev()->line);
  2316. Ctx__enter_block(ctx(), CodeBlockType_FINALLY);
  2317. check(compile_block_body(self, compile_stmt));
  2318. Ctx__exit_block(ctx());
  2319. Ctx__emit_(ctx(), OP_END_FINALLY, BC_NOARG, BC_KEEPLINE);
  2320. }
  2321. // re-raise if needed
  2322. Ctx__emit_(ctx(), OP_RE_RAISE, BC_NOARG, BC_KEEPLINE);
  2323. return NULL;
  2324. }
  2325. static Error* compile_stmt(Compiler* self) {
  2326. Error* err;
  2327. if(match(TK_CLASS)) {
  2328. check(compile_class(self, 0));
  2329. return NULL;
  2330. }
  2331. advance();
  2332. int kw_line = prev()->line; // backup line number
  2333. switch(prev()->type) {
  2334. case TK_BREAK: {
  2335. int curr_loop_block = Ctx__prepare_loop_divert(ctx(), kw_line, true);
  2336. if(curr_loop_block < 0) return SyntaxError(self, "'break' outside loop");
  2337. Ctx__emit_(ctx(), OP_LOOP_BREAK, curr_loop_block, kw_line);
  2338. consume_end_stmt();
  2339. break;
  2340. }
  2341. case TK_CONTINUE: {
  2342. int curr_loop_block = Ctx__prepare_loop_divert(ctx(), kw_line, false);
  2343. if(curr_loop_block < 0) return SyntaxError(self, "'continue' not properly in loop");
  2344. Ctx__emit_(ctx(), OP_LOOP_CONTINUE, curr_loop_block, kw_line);
  2345. consume_end_stmt();
  2346. break;
  2347. }
  2348. case TK_YIELD:
  2349. if(self->contexts.length <= 1) return SyntaxError(self, "'yield' outside function");
  2350. if(match_end_stmt(self)) {
  2351. Ctx__emit_(ctx(), OP_YIELD_VALUE, 1, kw_line);
  2352. } else {
  2353. check(EXPR_TUPLE(self));
  2354. Ctx__s_emit_top(ctx());
  2355. Ctx__emit_(ctx(), OP_YIELD_VALUE, BC_NOARG, kw_line);
  2356. consume_end_stmt();
  2357. }
  2358. break;
  2359. case TK_YIELD_FROM:
  2360. check(compile_yield_from(self, kw_line));
  2361. Ctx__emit_(ctx(), OP_POP_TOP, BC_NOARG, kw_line);
  2362. consume_end_stmt();
  2363. break;
  2364. case TK_RETURN:
  2365. if(self->contexts.length <= 1) return SyntaxError(self, "'return' outside function");
  2366. if(match_end_stmt(self)) {
  2367. Ctx__emit_(ctx(), OP_RETURN_VALUE, 1, kw_line);
  2368. } else {
  2369. check(EXPR_TUPLE(self));
  2370. Ctx__s_emit_top(ctx());
  2371. consume_end_stmt();
  2372. Ctx__emit_(ctx(), OP_RETURN_VALUE, BC_NOARG, kw_line);
  2373. }
  2374. break;
  2375. /*************************************************/
  2376. case TK_IF: check(compile_if_stmt(self)); break;
  2377. case TK_WHILE: check(compile_while_loop(self)); break;
  2378. case TK_FOR: check(compile_for_loop(self)); break;
  2379. case TK_IMPORT: check(compile_normal_import(self)); break;
  2380. case TK_FROM: {
  2381. c11_sbuf buf;
  2382. c11_sbuf__ctor(&buf);
  2383. err = compile_from_import(&buf, self);
  2384. c11_sbuf__dtor(&buf);
  2385. if(err) return err;
  2386. break;
  2387. }
  2388. case TK_DEF: check(compile_function(self, 0)); break;
  2389. case TK_DECORATOR: check(compile_decorated(self)); break;
  2390. case TK_TRY: check(compile_try_except(self)); break;
  2391. case TK_PASS: consume_end_stmt(); break;
  2392. /*************************************************/
  2393. case TK_ASSERT: {
  2394. check(EXPR(self)); // condition
  2395. Ctx__s_emit_top(ctx());
  2396. int index = Ctx__emit_(ctx(), OP_POP_JUMP_IF_TRUE, BC_NOARG, kw_line);
  2397. int has_msg = 0;
  2398. if(match(TK_COMMA)) {
  2399. check(EXPR(self)); // message
  2400. Ctx__s_emit_top(ctx());
  2401. has_msg = 1;
  2402. }
  2403. Ctx__emit_(ctx(), OP_RAISE_ASSERT, has_msg, kw_line);
  2404. Ctx__patch_jump(ctx(), index);
  2405. consume_end_stmt();
  2406. break;
  2407. }
  2408. case TK_GLOBAL:
  2409. do {
  2410. consume(TK_ID);
  2411. py_Name name = py_namev(Token__sv(prev()));
  2412. c11_smallmap_n2i__set(&ctx()->global_names, name, 0);
  2413. } while(match(TK_COMMA));
  2414. consume_end_stmt();
  2415. break;
  2416. case TK_RAISE: {
  2417. check(EXPR(self));
  2418. Ctx__s_emit_top(ctx());
  2419. Ctx__emit_(ctx(), OP_RAISE, BC_NOARG, kw_line);
  2420. consume_end_stmt();
  2421. } break;
  2422. case TK_DEL: {
  2423. check(EXPR_TUPLE(self));
  2424. Expr* e = Ctx__s_top(ctx());
  2425. if(!vtemit_del(e, ctx())) return SyntaxError(self, "invalid syntax");
  2426. Ctx__s_pop(ctx());
  2427. consume_end_stmt();
  2428. } break;
  2429. case TK_WITH: {
  2430. check(EXPR(self)); // [ <expr> ]
  2431. Ctx__s_emit_top(ctx());
  2432. Ctx__enter_block(ctx(), CodeBlockType_WITH);
  2433. NameExpr* as_name = NULL;
  2434. if(match(TK_AS)) {
  2435. consume(TK_ID);
  2436. py_Name name = py_namev(Token__sv(prev()));
  2437. as_name = NameExpr__new(prev()->line, name, name_scope(self));
  2438. }
  2439. Ctx__emit_(ctx(), OP_WITH_ENTER, BC_NOARG, prev()->line);
  2440. // [ <expr> <expr>.__enter__() ]
  2441. if(as_name) {
  2442. bool ok = vtemit_store((Expr*)as_name, ctx());
  2443. vtdelete((Expr*)as_name);
  2444. if(!ok) return SyntaxError(self, "invalid syntax");
  2445. } else {
  2446. // discard `__enter__()`'s return value
  2447. Ctx__emit_(ctx(), OP_POP_TOP, BC_NOARG, BC_KEEPLINE);
  2448. }
  2449. check(compile_block_body(self, compile_stmt));
  2450. Ctx__emit_(ctx(), OP_WITH_EXIT, BC_NOARG, prev()->line);
  2451. Ctx__exit_block(ctx());
  2452. } break;
  2453. /*************************************************/
  2454. // handle dangling expression or assignment
  2455. default: {
  2456. // do revert since we have pre-called advance() at the beginning
  2457. --self->i;
  2458. check(EXPR_TUPLE(self));
  2459. bool is_typed_name = false; // e.g. x: int
  2460. // eat variable's type hint if it is a single name
  2461. if(Ctx__s_top(ctx())->vt->is_name) {
  2462. if(match(TK_COLON)) {
  2463. c11_sv type_hint;
  2464. check(consume_type_hints_sv(self, &type_hint));
  2465. is_typed_name = true;
  2466. if(ctx()->is_compiling_class) {
  2467. NameExpr* ne = (NameExpr*)Ctx__s_top(ctx());
  2468. int index = Ctx__add_const_string(ctx(), type_hint);
  2469. Ctx__emit_(ctx(), OP_LOAD_CONST, index, BC_KEEPLINE);
  2470. Ctx__emit_(ctx(), OP_ADD_CLASS_ANNOTATION, ne->name, BC_KEEPLINE);
  2471. }
  2472. }
  2473. }
  2474. bool is_assign = false;
  2475. check(try_compile_assignment(self, &is_assign));
  2476. if(!is_assign) {
  2477. if(Ctx__s_size(ctx()) > 0 && Ctx__s_top(ctx())->vt->is_starred) {
  2478. return SyntaxError(self, "can't use starred expression here");
  2479. }
  2480. if(!is_typed_name) {
  2481. Ctx__s_emit_top(ctx());
  2482. if((mode() == SINGLE_MODE) && name_scope(self) == NAME_GLOBAL) {
  2483. Ctx__emit_(ctx(), OP_PRINT_EXPR, BC_NOARG, BC_KEEPLINE);
  2484. } else {
  2485. Ctx__emit_(ctx(), OP_POP_TOP, BC_NOARG, BC_KEEPLINE);
  2486. }
  2487. } else {
  2488. Ctx__s_pop(ctx());
  2489. }
  2490. }
  2491. consume_end_stmt();
  2492. break;
  2493. }
  2494. }
  2495. return NULL;
  2496. }
  2497. /////////////////////////////////////////////////////////////////
  2498. Error* Compiler__compile(Compiler* self, CodeObject* out) {
  2499. // make sure it is the first time to compile
  2500. assert(self->i == 0);
  2501. // make sure the first token is @sof
  2502. assert(tk(0)->type == TK_SOF);
  2503. push_global_context(self, out);
  2504. advance(); // skip @sof, so prev() is always valid
  2505. match_newlines(); // skip possible leading '\n'
  2506. Error* err;
  2507. if(mode() == EVAL_MODE) {
  2508. check(EXPR_TUPLE(self));
  2509. Ctx__s_emit_top(ctx());
  2510. consume(TK_EOF);
  2511. Ctx__emit_(ctx(), OP_RETURN_VALUE, BC_NOARG, BC_KEEPLINE);
  2512. check(pop_context(self));
  2513. return NULL;
  2514. }
  2515. while(!match(TK_EOF)) {
  2516. check(compile_stmt(self));
  2517. match_newlines();
  2518. }
  2519. check(pop_context(self));
  2520. return NULL;
  2521. }
  2522. Error* pk_compile(SourceData_ src, CodeObject* out) {
  2523. Token* tokens;
  2524. int tokens_length;
  2525. Error* err = Lexer__process(src, &tokens, &tokens_length);
  2526. if(err) return err;
  2527. #if 0
  2528. Token* data = (Token*)tokens.data;
  2529. printf("%s\n", src->filename->data);
  2530. for(int i = 0; i < tokens.length; i++) {
  2531. Token* t = data + i;
  2532. c11_string* tmp = c11_string__new2(t->start, t->length);
  2533. if(t->value.index == TokenValue_STR) {
  2534. const char* value_str = t->value._str->data;
  2535. printf("[%d] %s: %s (value._str=%s)\n",
  2536. t->line,
  2537. TokenSymbols[t->type],
  2538. tmp->data,
  2539. value_str);
  2540. } else {
  2541. printf("[%d] %s: %s\n", t->line, TokenSymbols[t->type], tmp->data);
  2542. }
  2543. c11_string__delete(tmp);
  2544. }
  2545. #endif
  2546. Compiler compiler;
  2547. Compiler__ctor(&compiler, src, tokens, tokens_length);
  2548. CodeObject__ctor(out, src, c11_string__sv(src->filename));
  2549. err = Compiler__compile(&compiler, out);
  2550. if(err) {
  2551. // dispose the code object if error occurs
  2552. CodeObject__dtor(out);
  2553. }
  2554. Compiler__dtor(&compiler);
  2555. return err;
  2556. }
  2557. // clang-format off
  2558. const static PrattRule rules[TK__COUNT__] = {
  2559. // http://journal.stuffwithstuff.com/2011/03/19/pratt-parsers-expression-parsing-made-easy/
  2560. [TK_DOT] = { NULL, exprAttrib, PREC_PRIMARY },
  2561. [TK_LPAREN] = { exprGroup, exprCall, PREC_PRIMARY },
  2562. [TK_LBRACKET] = { exprList, exprSubscr, PREC_PRIMARY },
  2563. [TK_MOD] = { NULL, exprBinaryOp, PREC_FACTOR },
  2564. [TK_ADD] = { NULL, exprBinaryOp, PREC_TERM },
  2565. [TK_SUB] = { exprUnaryOp, exprBinaryOp, PREC_TERM },
  2566. [TK_MUL] = { exprUnaryOp, exprBinaryOp, PREC_FACTOR },
  2567. [TK_INVERT] = { exprUnaryOp, NULL, PREC_UNARY },
  2568. [TK_DIV] = { NULL, exprBinaryOp, PREC_FACTOR },
  2569. [TK_FLOORDIV] = { NULL, exprBinaryOp, PREC_FACTOR },
  2570. [TK_POW] = { exprUnaryOp, exprBinaryOp, PREC_EXPONENT },
  2571. [TK_GT] = { NULL, exprBinaryOp, PREC_COMPARISION },
  2572. [TK_LT] = { NULL, exprBinaryOp, PREC_COMPARISION },
  2573. [TK_EQ] = { NULL, exprBinaryOp, PREC_COMPARISION },
  2574. [TK_NE] = { NULL, exprBinaryOp, PREC_COMPARISION },
  2575. [TK_GE] = { NULL, exprBinaryOp, PREC_COMPARISION },
  2576. [TK_LE] = { NULL, exprBinaryOp, PREC_COMPARISION },
  2577. [TK_IN] = { NULL, exprBinaryOp, PREC_COMPARISION },
  2578. [TK_IS] = { NULL, exprBinaryOp, PREC_COMPARISION },
  2579. [TK_LSHIFT] = { NULL, exprBinaryOp, PREC_BITWISE_SHIFT },
  2580. [TK_RSHIFT] = { NULL, exprBinaryOp, PREC_BITWISE_SHIFT },
  2581. [TK_AND] = { NULL, exprBinaryOp, PREC_BITWISE_AND },
  2582. [TK_OR] = { NULL, exprBinaryOp, PREC_BITWISE_OR },
  2583. [TK_XOR] = { NULL, exprBinaryOp, PREC_BITWISE_XOR },
  2584. [TK_DECORATOR] = { NULL, exprBinaryOp, PREC_FACTOR },
  2585. [TK_IF] = { NULL, exprTernary, PREC_TERNARY },
  2586. [TK_NOT_IN] = { NULL, exprBinaryOp, PREC_COMPARISION },
  2587. [TK_IS_NOT] = { NULL, exprBinaryOp, PREC_COMPARISION },
  2588. [TK_AND_KW ] = { NULL, exprAnd, PREC_LOGICAL_AND },
  2589. [TK_OR_KW] = { NULL, exprOr, PREC_LOGICAL_OR },
  2590. [TK_NOT_KW] = { exprNot, NULL, PREC_LOGICAL_NOT },
  2591. [TK_TRUE] = { exprLiteral0 },
  2592. [TK_FALSE] = { exprLiteral0 },
  2593. [TK_NONE] = { exprLiteral0 },
  2594. [TK_DOTDOTDOT] = { exprLiteral0 },
  2595. [TK_LAMBDA] = { exprLambda, },
  2596. [TK_ID] = { exprName, },
  2597. [TK_NUM] = { exprLiteral, },
  2598. [TK_STR] = { exprLiteral, },
  2599. [TK_FSTR_BEGIN] = { exprFString, },
  2600. [TK_IMAG] = { exprImag, },
  2601. [TK_BYTES] = { exprBytes, },
  2602. [TK_LBRACE] = { exprMap },
  2603. [TK_COLON] = { exprSlice0, exprSlice1, PREC_PRIMARY }
  2604. };
  2605. // clang-format on
  2606. #undef static_assert_expr_size
  2607. #undef vtcall
  2608. #undef vtemit_
  2609. #undef vtemit_del
  2610. #undef vtemit_store
  2611. #undef vtemit_inplace
  2612. #undef vtemit_istore
  2613. #undef vtdelete
  2614. #undef EXPR_COMMON_HEADER
  2615. #undef is_compare_expr
  2616. #undef tk
  2617. #undef prev
  2618. #undef curr
  2619. #undef next
  2620. #undef advance
  2621. #undef mode
  2622. #undef ctx
  2623. #undef match_newlines
  2624. #undef consume
  2625. #undef consume_end_stmt
  2626. #undef check
  2627. #undef match