vm.c 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745
  1. #include "pocketpy/interpreter/vm.h"
  2. #include "pocketpy/common/memorypool.h"
  3. #include "pocketpy/common/utils.h"
  4. #include "pocketpy/interpreter/generator.h"
  5. #include "pocketpy/interpreter/modules.h"
  6. #include "pocketpy/interpreter/typeinfo.h"
  7. #include "pocketpy/objects/base.h"
  8. #include "pocketpy/interpreter/types.h"
  9. #include "pocketpy/common/_generated.h"
  10. #include "pocketpy/objects/exception.h"
  11. #include "pocketpy/pocketpy.h"
  12. #include <stdbool.h>
  13. #include <assert.h>
  14. static char* pk_default_importfile(const char* path) {
  15. #if PK_ENABLE_OS
  16. FILE* f = fopen(path, "rb");
  17. if(f == NULL) return NULL;
  18. fseek(f, 0, SEEK_END);
  19. long size = ftell(f);
  20. fseek(f, 0, SEEK_SET);
  21. char* buffer = PK_MALLOC(size + 1);
  22. size = fread(buffer, 1, size, f);
  23. buffer[size] = 0;
  24. fclose(f);
  25. return buffer;
  26. #else
  27. return NULL;
  28. #endif
  29. }
  30. static void pk_default_print(const char* data) { printf("%s", data); }
  31. static void pk_default_flush() { fflush(stdout); }
  32. static int pk_default_getchr() { return getchar(); }
  33. void py_profiler_begin() {
  34. LineProfiler* lp = &pk_current_vm->line_profiler;
  35. TraceInfo* trace_info = &pk_current_vm->trace_info;
  36. if(trace_info->func == NULL) py_sys_settrace(LineProfiler_tracefunc, true);
  37. c11__rtassert(trace_info->func == LineProfiler_tracefunc);
  38. LineProfiler__begin(lp);
  39. }
  40. void py_profiler_end() {
  41. LineProfiler* lp = &pk_current_vm->line_profiler;
  42. LineProfiler__end(lp);
  43. }
  44. void py_profiler_reset() {
  45. LineProfiler* lp = &pk_current_vm->line_profiler;
  46. LineProfiler__reset(lp);
  47. }
  48. char* py_profiler_report() {
  49. LineProfiler* lp = &pk_current_vm->line_profiler;
  50. if(lp->enabled) LineProfiler__end(lp);
  51. c11_string* s = LineProfiler__get_report(lp);
  52. char* s_dup = c11_strdup(s->data);
  53. c11_string__delete(s);
  54. return s_dup;
  55. }
  56. void LineProfiler_tracefunc(py_Frame* frame, enum py_TraceEvent event) {
  57. LineProfiler* lp = &pk_current_vm->line_profiler;
  58. if(lp->enabled) LineProfiler__tracefunc_internal(lp, frame, event);
  59. }
  60. static int BinTree__cmp_cstr(void* lhs, void* rhs) {
  61. const char* l = (const char*)lhs;
  62. const char* r = (const char*)rhs;
  63. return strcmp(l, r);
  64. }
  65. void VM__ctor(VM* self) {
  66. self->top_frame = NULL;
  67. const static BinTreeConfig modules_config = {
  68. .f_cmp = BinTree__cmp_cstr,
  69. .need_free_key = false,
  70. };
  71. BinTree__ctor(&self->modules, "", py_NIL(), &modules_config);
  72. c11_vector__ctor(&self->types, sizeof(TypePointer));
  73. self->builtins = NULL;
  74. self->main = NULL;
  75. self->callbacks.importfile = pk_default_importfile;
  76. self->callbacks.lazyimport = NULL;
  77. self->callbacks.print = pk_default_print;
  78. self->callbacks.flush = pk_default_flush;
  79. self->callbacks.getchr = pk_default_getchr;
  80. self->last_retval = *py_NIL();
  81. self->curr_exception = *py_NIL();
  82. self->recursion_depth = 0;
  83. self->max_recursion_depth = 1000;
  84. self->is_curr_exc_handled = false;
  85. self->ctx = NULL;
  86. self->curr_class = NULL;
  87. self->curr_decl_based_function = NULL;
  88. memset(&self->trace_info, 0, sizeof(TraceInfo));
  89. memset(&self->watchdog_info, 0, sizeof(WatchdogInfo));
  90. LineProfiler__ctor(&self->line_profiler);
  91. FixedMemoryPool__ctor(&self->pool_frame, sizeof(py_Frame), 32);
  92. ManagedHeap__ctor(&self->heap);
  93. ValueStack__ctor(&self->stack);
  94. CachedNames__ctor(&self->cached_names);
  95. NameDict__ctor(&self->compile_time_funcs, PK_TYPE_ATTR_LOAD_FACTOR);
  96. /* Init Builtin Types */
  97. // 0: unused
  98. TypePointer* placeholder = c11_vector__emplace(&self->types);
  99. placeholder->ti = NULL;
  100. placeholder->dtor = NULL;
  101. #define validate(t, expr) \
  102. if(t != (expr)) abort()
  103. validate(tp_object, pk_newtype("object", tp_nil, NULL, NULL, true, false));
  104. validate(tp_type, pk_newtype("type", tp_object, NULL, NULL, false, true));
  105. pk_object__register();
  106. validate(tp_int, pk_newtype("int", tp_object, NULL, NULL, false, true));
  107. validate(tp_float, pk_newtype("float", tp_object, NULL, NULL, false, true));
  108. validate(tp_bool, pk_newtype("bool", tp_object, NULL, NULL, false, true));
  109. pk_number__register();
  110. validate(tp_str, pk_str__register());
  111. validate(tp_str_iterator, pk_str_iterator__register());
  112. validate(tp_list, pk_list__register());
  113. validate(tp_tuple, pk_tuple__register());
  114. validate(tp_list_iterator, pk_list_iterator__register());
  115. validate(tp_tuple_iterator, pk_tuple_iterator__register());
  116. validate(tp_slice, pk_slice__register());
  117. validate(tp_range, pk_range__register());
  118. validate(tp_range_iterator, pk_range_iterator__register());
  119. validate(tp_module, pk_module__register());
  120. validate(tp_function, pk_function__register());
  121. validate(tp_nativefunc, pk_nativefunc__register());
  122. validate(tp_boundmethod, pk_boundmethod__register());
  123. validate(tp_super, pk_super__register());
  124. validate(tp_BaseException, pk_BaseException__register());
  125. validate(tp_Exception, pk_Exception__register());
  126. validate(tp_bytes, pk_bytes__register());
  127. validate(tp_namedict, pk_namedict__register());
  128. validate(tp_locals, pk_newtype("locals", tp_object, NULL, NULL, false, true));
  129. validate(tp_code, pk_code__register());
  130. validate(tp_dict, pk_dict__register());
  131. validate(tp_dict_iterator, pk_dict_items__register());
  132. validate(tp_property, pk_property__register());
  133. validate(tp_star_wrapper, pk_newtype("star_wrapper", tp_object, NULL, NULL, false, true));
  134. validate(tp_staticmethod, pk_staticmethod__register());
  135. validate(tp_classmethod, pk_classmethod__register());
  136. validate(tp_NoneType, pk_newtype("NoneType", tp_object, NULL, NULL, false, true));
  137. validate(tp_NotImplementedType,
  138. pk_newtype("NotImplementedType", tp_object, NULL, NULL, false, true));
  139. validate(tp_ellipsis, pk_newtype("ellipsis", tp_object, NULL, NULL, false, true));
  140. validate(tp_generator, pk_generator__register());
  141. self->builtins = pk_builtins__register();
  142. // inject some builtin exceptions
  143. #define INJECT_BUILTIN_EXC(name, TBase) \
  144. do { \
  145. py_Type type = pk_newtype(#name, TBase, self->builtins, NULL, false, true); \
  146. py_setdict(self->builtins, py_name(#name), py_tpobject(type)); \
  147. validate(tp_##name, type); \
  148. } while(0)
  149. INJECT_BUILTIN_EXC(SystemExit, tp_BaseException);
  150. INJECT_BUILTIN_EXC(KeyboardInterrupt, tp_BaseException);
  151. validate(tp_StopIteration, pk_StopIteration__register());
  152. py_setdict(self->builtins, py_name("StopIteration"), py_tpobject(tp_StopIteration));
  153. INJECT_BUILTIN_EXC(SyntaxError, tp_Exception);
  154. INJECT_BUILTIN_EXC(RecursionError, tp_Exception);
  155. INJECT_BUILTIN_EXC(OSError, tp_Exception);
  156. INJECT_BUILTIN_EXC(NotImplementedError, tp_Exception);
  157. INJECT_BUILTIN_EXC(TypeError, tp_Exception);
  158. INJECT_BUILTIN_EXC(IndexError, tp_Exception);
  159. INJECT_BUILTIN_EXC(ValueError, tp_Exception);
  160. INJECT_BUILTIN_EXC(RuntimeError, tp_Exception);
  161. INJECT_BUILTIN_EXC(TimeoutError, tp_Exception);
  162. INJECT_BUILTIN_EXC(ZeroDivisionError, tp_Exception);
  163. INJECT_BUILTIN_EXC(NameError, tp_Exception);
  164. INJECT_BUILTIN_EXC(UnboundLocalError, tp_Exception);
  165. INJECT_BUILTIN_EXC(AttributeError, tp_Exception);
  166. INJECT_BUILTIN_EXC(ImportError, tp_Exception);
  167. INJECT_BUILTIN_EXC(AssertionError, tp_Exception);
  168. INJECT_BUILTIN_EXC(KeyError, tp_Exception);
  169. #undef INJECT_BUILTIN_EXC
  170. #undef validate
  171. /* Setup Public Builtin Types */
  172. py_Type public_types[] = {
  173. tp_object,
  174. tp_type,
  175. tp_int,
  176. tp_float,
  177. tp_bool,
  178. tp_str,
  179. tp_list,
  180. tp_tuple,
  181. tp_slice,
  182. tp_range,
  183. tp_bytes,
  184. tp_dict,
  185. tp_property,
  186. tp_staticmethod,
  187. tp_classmethod,
  188. tp_super,
  189. tp_BaseException,
  190. tp_Exception,
  191. };
  192. for(int i = 0; i < c11__count_array(public_types); i++) {
  193. py_TypeInfo* ti = pk_typeinfo(public_types[i]);
  194. py_setdict(self->builtins, ti->name, &ti->self);
  195. }
  196. py_newnotimplemented(py_emplacedict(self->builtins, py_name("NotImplemented")));
  197. pk__add_module_vmath();
  198. pk__add_module_array2d();
  199. pk__add_module_colorcvt();
  200. // add modules
  201. pk__add_module_os();
  202. pk__add_module_sys();
  203. pk__add_module_io();
  204. pk__add_module_math();
  205. pk__add_module_dis();
  206. pk__add_module_random();
  207. pk__add_module_json();
  208. pk__add_module_gc();
  209. pk__add_module_time();
  210. pk__add_module_easing();
  211. pk__add_module_traceback();
  212. pk__add_module_enum();
  213. pk__add_module_inspect();
  214. pk__add_module_pickle();
  215. pk__add_module_base64();
  216. pk__add_module_importlib();
  217. pk__add_module_unicodedata();
  218. pk__add_module_conio();
  219. pk__add_module_lz4(); // optional
  220. pk__add_module_libhv(); // optional
  221. pk__add_module_cute_png(); // optional
  222. pk__add_module_pkpy();
  223. // add python builtins
  224. do {
  225. bool ok;
  226. ok = py_exec(kPythonLibs_builtins, "<builtins>", EXEC_MODE, self->builtins);
  227. if(!ok) goto __ABORT;
  228. break;
  229. __ABORT:
  230. py_printexc();
  231. c11__abort("failed to load python builtins!");
  232. } while(0);
  233. self->main = py_newmodule("__main__");
  234. }
  235. void VM__dtor(VM* self) {
  236. // reset traceinfo
  237. py_sys_settrace(NULL, true);
  238. LineProfiler__dtor(&self->line_profiler);
  239. // destroy all objects
  240. ManagedHeap__dtor(&self->heap);
  241. // clear frames
  242. while(self->top_frame)
  243. VM__pop_frame(self);
  244. BinTree__dtor(&self->modules);
  245. FixedMemoryPool__dtor(&self->pool_frame);
  246. ValueStack__dtor(&self->stack);
  247. CachedNames__dtor(&self->cached_names);
  248. NameDict__dtor(&self->compile_time_funcs);
  249. c11_vector__dtor(&self->types);
  250. }
  251. void VM__push_frame(VM* self, py_Frame* frame) {
  252. frame->f_back = self->top_frame;
  253. self->top_frame = frame;
  254. self->recursion_depth++;
  255. if(self->trace_info.func) self->trace_info.func(frame, TRACE_EVENT_PUSH);
  256. }
  257. void VM__pop_frame(VM* self) {
  258. assert(self->top_frame);
  259. py_Frame* frame = self->top_frame;
  260. if(self->trace_info.func) self->trace_info.func(frame, TRACE_EVENT_POP);
  261. // reset stack pointer
  262. self->stack.sp = frame->p0;
  263. // pop frame and delete
  264. self->top_frame = frame->f_back;
  265. Frame__delete(frame);
  266. self->recursion_depth--;
  267. }
  268. static void _clip_int(int* value, int min, int max) {
  269. if(*value < min) *value = min;
  270. if(*value > max) *value = max;
  271. }
  272. bool pk__parse_int_slice(py_Ref slice,
  273. int length,
  274. int* restrict start,
  275. int* restrict stop,
  276. int* restrict step) {
  277. if(py_isint(slice)) {
  278. int index = py_toint(slice);
  279. bool ok = pk__normalize_index(&index, length);
  280. if(!ok) return false;
  281. *start = index;
  282. *stop = index + 1;
  283. *step = 1;
  284. return true;
  285. }
  286. if(!py_istype(slice, tp_slice)) c11__abort("pk__parse_int_slice(): not a slice object");
  287. py_Ref s_start = py_getslot(slice, 0);
  288. py_Ref s_stop = py_getslot(slice, 1);
  289. py_Ref s_step = py_getslot(slice, 2);
  290. if(py_isnone(s_step))
  291. *step = 1;
  292. else {
  293. if(!py_checkint(s_step)) return false;
  294. *step = py_toint(s_step);
  295. }
  296. if(*step == 0) return ValueError("slice step cannot be zero");
  297. if(*step > 0) {
  298. if(py_isnone(s_start))
  299. *start = 0;
  300. else {
  301. if(!py_checkint(s_start)) return false;
  302. *start = py_toint(s_start);
  303. if(*start < 0) *start += length;
  304. _clip_int(start, 0, length);
  305. }
  306. if(py_isnone(s_stop))
  307. *stop = length;
  308. else {
  309. if(!py_checkint(s_stop)) return false;
  310. *stop = py_toint(s_stop);
  311. if(*stop < 0) *stop += length;
  312. _clip_int(stop, 0, length);
  313. }
  314. } else {
  315. if(py_isnone(s_start))
  316. *start = length - 1;
  317. else {
  318. if(!py_checkint(s_start)) return false;
  319. *start = py_toint(s_start);
  320. if(*start < 0) *start += length;
  321. _clip_int(start, -1, length - 1);
  322. }
  323. if(py_isnone(s_stop))
  324. *stop = -1;
  325. else {
  326. if(!py_checkint(s_stop)) return false;
  327. *stop = py_toint(s_stop);
  328. if(*stop < 0) *stop += length;
  329. _clip_int(stop, -1, length - 1);
  330. }
  331. }
  332. return true;
  333. }
  334. bool pk__normalize_index(int* index, int length) {
  335. if(*index < 0) *index += length;
  336. if(*index < 0 || *index >= length) return IndexError("%d not in [0, %d)", *index, length);
  337. return true;
  338. }
  339. static bool
  340. prepare_py_call(py_TValue* buffer, py_Ref argv, py_Ref p1, int kwargc, const FuncDecl* decl) {
  341. const CodeObject* co = &decl->code;
  342. int decl_argc = decl->args.length;
  343. if(p1 - argv < decl_argc) {
  344. return TypeError("%s() takes %d positional arguments but %d were given",
  345. co->name->data,
  346. decl_argc,
  347. (int)(p1 - argv));
  348. }
  349. py_TValue* t = argv;
  350. // prepare args
  351. memset(buffer, 0, co->nlocals * sizeof(py_TValue));
  352. c11__foreach(int, &decl->args, index) buffer[*index] = *t++;
  353. // prepare kwdefaults
  354. c11__foreach(FuncDeclKwArg, &decl->kwargs, kv) buffer[kv->index] = kv->value;
  355. // handle *args
  356. if(decl->starred_arg != -1) {
  357. int exceed_argc = p1 - t;
  358. py_Ref vargs = &buffer[decl->starred_arg];
  359. py_Ref data = py_newtuple(vargs, exceed_argc);
  360. for(int j = 0; j < exceed_argc; j++) {
  361. data[j] = *t++;
  362. }
  363. } else {
  364. // kwdefaults override
  365. // def f(a, b, c=None)
  366. // f(1, 2, 3) -> c=3
  367. c11__foreach(FuncDeclKwArg, &decl->kwargs, kv) {
  368. if(t >= p1) break;
  369. buffer[kv->index] = *t++;
  370. }
  371. // not able to consume all args
  372. if(t < p1) return TypeError("too many arguments (%s)", co->name->data);
  373. }
  374. if(decl->starred_kwarg != -1) py_newdict(&buffer[decl->starred_kwarg]);
  375. for(int j = 0; j < kwargc; j++) {
  376. py_Name key = (py_Name)py_toint(&p1[2 * j]);
  377. int index = c11_smallmap_n2d__get(&decl->kw_to_index, key, -1);
  378. // if key is an explicit key, set as local variable
  379. if(index >= 0) {
  380. buffer[index] = p1[2 * j + 1];
  381. } else {
  382. // otherwise, set as **kwargs if possible
  383. if(decl->starred_kwarg == -1) {
  384. return TypeError("'%n' is an invalid keyword argument for %s()",
  385. key,
  386. co->name->data);
  387. } else {
  388. // add to **kwargs
  389. bool ok =
  390. py_dict_setitem(&buffer[decl->starred_kwarg], py_name2ref(key), &p1[2 * j + 1]);
  391. if(!ok) return false;
  392. }
  393. }
  394. }
  395. return true;
  396. }
  397. FrameResult VM__vectorcall(VM* self, uint16_t argc, uint16_t kwargc, bool opcall) {
  398. #ifndef NDEBUG
  399. pk_print_stack(self, self->top_frame, (Bytecode){0});
  400. #endif
  401. py_Ref p1 = self->stack.sp - kwargc * 2;
  402. py_Ref p0 = p1 - argc - 2;
  403. // [callable, <self>, args..., kwargs...]
  404. // ^p0 ^p1 ^_sp
  405. // handle boundmethod, do a patch
  406. if(p0->type == tp_boundmethod) {
  407. assert(py_isnil(p0 + 1)); // self must be NULL
  408. py_TValue* slots = PyObject__slots(p0->_obj);
  409. p0[0] = slots[1]; // callable
  410. p0[1] = slots[0]; // self
  411. // [unbound, self, args..., kwargs...]
  412. }
  413. py_Ref argv = p0 + 1 + (int)py_isnil(p0 + 1);
  414. if(p0->type == tp_function) {
  415. Function* fn = py_touserdata(p0);
  416. const CodeObject* co = &fn->decl->code;
  417. switch(fn->decl->type) {
  418. case FuncType_NORMAL: {
  419. bool ok = prepare_py_call(self->vectorcall_buffer, argv, p1, kwargc, fn->decl);
  420. if(!ok) return RES_ERROR;
  421. // copy buffer back to stack
  422. self->stack.sp = argv + co->nlocals;
  423. memcpy(argv, self->vectorcall_buffer, co->nlocals * sizeof(py_TValue));
  424. // submit the call
  425. if(!fn->cfunc) {
  426. // python function
  427. VM__push_frame(self, Frame__new(co, p0, fn->module, fn->globals, argv, false));
  428. return opcall ? RES_CALL : VM__run_top_frame(self);
  429. } else {
  430. // decl-based binding
  431. self->curr_decl_based_function = p0;
  432. bool ok = py_callcfunc(fn->cfunc, co->nlocals, argv);
  433. self->stack.sp = p0;
  434. self->curr_decl_based_function = NULL;
  435. return ok ? RES_RETURN : RES_ERROR;
  436. }
  437. }
  438. case FuncType_SIMPLE:
  439. if(p1 - argv != fn->decl->args.length) {
  440. const char* fmt = "%s() takes %d positional arguments but %d were given";
  441. TypeError(fmt, co->name->data, fn->decl->args.length, (int)(p1 - argv));
  442. return RES_ERROR;
  443. }
  444. if(kwargc) {
  445. TypeError("%s() takes no keyword arguments", co->name->data);
  446. return RES_ERROR;
  447. }
  448. // [callable, <self>, args..., local_vars...]
  449. // ^p0 ^p1 ^_sp
  450. self->stack.sp = argv + co->nlocals;
  451. // initialize local variables to py_NIL
  452. memset(p1, 0, (char*)self->stack.sp - (char*)p1);
  453. // submit the call
  454. if(!fn->cfunc) {
  455. // python function
  456. VM__push_frame(self, Frame__new(co, p0, fn->module, fn->globals, argv, false));
  457. return opcall ? RES_CALL : VM__run_top_frame(self);
  458. } else {
  459. // decl-based binding
  460. self->curr_decl_based_function = p0;
  461. bool ok = py_callcfunc(fn->cfunc, co->nlocals, argv);
  462. self->stack.sp = p0;
  463. self->curr_decl_based_function = NULL;
  464. return ok ? RES_RETURN : RES_ERROR;
  465. }
  466. case FuncType_GENERATOR: {
  467. bool ok = prepare_py_call(self->vectorcall_buffer, argv, p1, kwargc, fn->decl);
  468. if(!ok) return RES_ERROR;
  469. // copy buffer back to stack
  470. self->stack.sp = argv + co->nlocals;
  471. memcpy(argv, self->vectorcall_buffer, co->nlocals * sizeof(py_TValue));
  472. py_Frame* frame = Frame__new(co, p0, fn->module, fn->globals, argv, false);
  473. pk_newgenerator(py_retval(), frame, p0, self->stack.sp);
  474. self->stack.sp = p0; // reset the stack
  475. return RES_RETURN;
  476. }
  477. default: c11__unreachable();
  478. };
  479. c11__unreachable();
  480. /*****************_py_call*****************/
  481. }
  482. if(p0->type == tp_nativefunc) {
  483. if(kwargc && p0->_cfunc != pk__object_new) {
  484. TypeError("nativefunc does not accept keyword arguments");
  485. return RES_ERROR;
  486. }
  487. bool ok = py_callcfunc(p0->_cfunc, p1 - argv, argv);
  488. self->stack.sp = p0;
  489. return ok ? RES_RETURN : RES_ERROR;
  490. }
  491. if(p0->type == tp_type) {
  492. // [cls, NULL, args..., kwargs...]
  493. py_Ref new_f = py_tpfindmagic(py_totype(p0), __new__);
  494. assert(new_f && py_isnil(p0 + 1));
  495. // prepare a copy of args and kwargs
  496. int span = self->stack.sp - argv;
  497. *self->stack.sp++ = *new_f; // push __new__
  498. *self->stack.sp++ = *p0; // push cls
  499. memcpy(self->stack.sp, argv, span * sizeof(py_TValue));
  500. self->stack.sp += span;
  501. // [new_f, cls, args..., kwargs...]
  502. if(VM__vectorcall(self, argc, kwargc, false) == RES_ERROR) return RES_ERROR;
  503. // by recursively using vectorcall, args and kwargs are consumed
  504. // try __init__
  505. // NOTE: previously we use `get_unbound_method` but here we just use `tpfindmagic`
  506. // >> [cls, NULL, args..., kwargs...]
  507. // >> py_retval() is the new instance
  508. py_Ref init_f = py_tpfindmagic(py_totype(p0), __init__);
  509. if(init_f) {
  510. // do an inplace patch
  511. *p0 = *init_f; // __init__
  512. p0[1] = self->last_retval; // self
  513. // [__init__, self, args..., kwargs...]
  514. if(VM__vectorcall(self, argc, kwargc, false) == RES_ERROR) return RES_ERROR;
  515. *py_retval() = p0[1]; // restore the new instance
  516. }
  517. // reset the stack
  518. self->stack.sp = p0;
  519. return RES_RETURN;
  520. }
  521. // handle `__call__` overload
  522. if(pk_loadmethod(p0, __call__)) {
  523. // [__call__, self, args..., kwargs...]
  524. return VM__vectorcall(self, argc, kwargc, opcall);
  525. }
  526. TypeError("'%t' object is not callable", p0->type);
  527. return RES_ERROR;
  528. }
  529. /****************************************/
  530. void FuncDecl__gc_mark(const FuncDecl* self, c11_vector* p_stack) {
  531. CodeObject__gc_mark(&self->code, p_stack);
  532. for(int j = 0; j < self->kwargs.length; j++) {
  533. FuncDeclKwArg* kw = c11__at(FuncDeclKwArg, &self->kwargs, j);
  534. pk__mark_value(&kw->value);
  535. }
  536. }
  537. void CodeObject__gc_mark(const CodeObject* self, c11_vector* p_stack) {
  538. for(int i = 0; i < self->consts.length; i++) {
  539. py_TValue* p = c11__at(py_TValue, &self->consts, i);
  540. pk__mark_value(p);
  541. }
  542. for(int i = 0; i < self->func_decls.length; i++) {
  543. FuncDecl_ decl = c11__getitem(FuncDecl_, &self->func_decls, i);
  544. FuncDecl__gc_mark(decl, p_stack);
  545. }
  546. }
  547. static void pk__mark_value_func(py_Ref val, void* ctx) {
  548. c11_vector* p_stack = ctx;
  549. pk__mark_value(val);
  550. }
  551. void ManagedHeap__mark(ManagedHeap* self) {
  552. VM* vm = pk_current_vm;
  553. c11_vector* p_stack = &self->gc_roots;
  554. assert(p_stack->length == 0);
  555. // mark value stack
  556. for(py_TValue* p = vm->stack.begin; p < vm->stack.sp; p++) {
  557. // assert(p->type != tp_nil);
  558. pk__mark_value(p);
  559. }
  560. // mark modules
  561. BinTree__apply_mark(&vm->modules, p_stack);
  562. // mark cached names
  563. for(int i = 0; i < vm->cached_names.entries.length; i++) {
  564. CachedNames_KV* kv = c11_chunkedvector__at(&vm->cached_names.entries, i);
  565. pk__mark_value(&kv->val);
  566. }
  567. // mark compile time functions
  568. for(int i = 0; i < vm->compile_time_funcs.capacity; i++) {
  569. NameDict_KV* kv = &vm->compile_time_funcs.items[i];
  570. if(kv->key == NULL) continue;
  571. pk__mark_value(&kv->value);
  572. }
  573. // mark types
  574. int types_length = vm->types.length;
  575. // 0-th type is placeholder
  576. for(py_Type i = 1; i < types_length; i++) {
  577. py_TypeInfo* ti = c11__getitem(TypePointer, &vm->types, i).ti;
  578. pk__mark_value(&ti->self);
  579. pk__mark_value(&ti->annotations);
  580. }
  581. // mark frame
  582. for(py_Frame* frame = vm->top_frame; frame; frame = frame->f_back) {
  583. Frame__gc_mark(frame, p_stack);
  584. }
  585. // mark vm's registers
  586. pk__mark_value(&vm->last_retval);
  587. pk__mark_value(&vm->curr_exception);
  588. for(int i = 0; i < c11__count_array(vm->reg); i++) {
  589. pk__mark_value(&vm->reg[i]);
  590. }
  591. // mark user func
  592. if(vm->callbacks.gc_mark) vm->callbacks.gc_mark(pk__mark_value_func, p_stack);
  593. /*****************************/
  594. while(p_stack->length > 0) {
  595. PyObject* obj = c11_vector__back(PyObject*, p_stack);
  596. c11_vector__pop(p_stack);
  597. assert(obj->gc_marked);
  598. if(obj->slots > 0) {
  599. py_TValue* p = PyObject__slots(obj);
  600. for(int i = 0; i < obj->slots; i++)
  601. pk__mark_value(p + i);
  602. } else if(obj->slots == -1) {
  603. NameDict* dict = PyObject__dict(obj);
  604. for(int i = 0; i < dict->capacity; i++) {
  605. NameDict_KV* kv = &dict->items[i];
  606. if(kv->key == NULL) continue;
  607. pk__mark_value(&kv->value);
  608. }
  609. }
  610. void* ud = PyObject__userdata(obj);
  611. switch(obj->type) {
  612. case tp_list: {
  613. List* self = ud;
  614. for(int i = 0; i < self->length; i++) {
  615. py_TValue* val = c11__at(py_TValue, self, i);
  616. pk__mark_value(val);
  617. }
  618. break;
  619. }
  620. case tp_dict: {
  621. Dict* self = ud;
  622. for(int i = 0; i < self->entries.length; i++) {
  623. DictEntry* entry = c11__at(DictEntry, &self->entries, i);
  624. if(py_isnil(&entry->key)) continue;
  625. pk__mark_value(&entry->key);
  626. pk__mark_value(&entry->val);
  627. }
  628. break;
  629. }
  630. case tp_generator: {
  631. Generator* self = ud;
  632. if(self->frame) Frame__gc_mark(self->frame, p_stack);
  633. break;
  634. }
  635. case tp_function: {
  636. function__gc_mark(ud, p_stack);
  637. break;
  638. }
  639. case tp_BaseException: {
  640. BaseException* self = ud;
  641. pk__mark_value(&self->args);
  642. pk__mark_value(&self->inner_exc);
  643. c11__foreach(BaseExceptionFrame, &self->stacktrace, frame) {
  644. pk__mark_value(&frame->locals);
  645. pk__mark_value(&frame->globals);
  646. }
  647. break;
  648. }
  649. case tp_code: {
  650. CodeObject* self = ud;
  651. CodeObject__gc_mark(self, p_stack);
  652. break;
  653. }
  654. case tp_chunked_array2d: {
  655. c11_chunked_array2d__mark(ud, p_stack);
  656. break;
  657. }
  658. }
  659. }
  660. }