vm.c 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887
  1. #include "pocketpy/interpreter/vm.h"
  2. #include "pocketpy/common/memorypool.h"
  3. #include "pocketpy/common/sstream.h"
  4. #include "pocketpy/common/utils.h"
  5. #include "pocketpy/interpreter/generator.h"
  6. #include "pocketpy/interpreter/modules.h"
  7. #include "pocketpy/interpreter/typeinfo.h"
  8. #include "pocketpy/objects/base.h"
  9. #include "pocketpy/interpreter/types.h"
  10. #include "pocketpy/common/_generated.h"
  11. #include "pocketpy/pocketpy.h"
  12. #include <stdbool.h>
  13. static char* pk_default_importfile(const char* path) {
  14. #if PK_ENABLE_OS
  15. FILE* f = fopen(path, "rb");
  16. if(f == NULL) return NULL;
  17. fseek(f, 0, SEEK_END);
  18. long size = ftell(f);
  19. fseek(f, 0, SEEK_SET);
  20. char* buffer = PK_MALLOC(size + 1);
  21. size = fread(buffer, 1, size, f);
  22. buffer[size] = 0;
  23. fclose(f);
  24. return buffer;
  25. #else
  26. return NULL;
  27. #endif
  28. }
  29. static void pk_default_print(const char* data) { printf("%s", data); }
  30. static void pk_default_flush() { fflush(stdout); }
  31. static int pk_default_getchr() { return getchar(); }
  32. void LineProfiler__tracefunc(py_Frame* frame, enum py_TraceEvent event) {
  33. LineProfiler* self = &pk_current_vm->line_profiler;
  34. if(self->enabled && event == TRACE_EVENT_LINE) { LineProfiler__tracefunc_line(self, frame); }
  35. }
  36. static void py_TypeInfo__ctor(py_TypeInfo* self,
  37. py_Name name,
  38. py_Type index,
  39. py_Type base,
  40. py_TypeInfo* base_ti,
  41. py_TValue module) {
  42. memset(self, 0, sizeof(py_TypeInfo));
  43. self->name = name;
  44. self->base = base;
  45. self->base_ti = base_ti;
  46. // create type object with __dict__
  47. ManagedHeap* heap = &pk_current_vm->heap;
  48. PyObject* typeobj = ManagedHeap__new(heap, tp_type, -1, sizeof(py_Type));
  49. *(py_Type*)PyObject__userdata(typeobj) = index;
  50. self->self = (py_TValue){
  51. .type = typeobj->type,
  52. .is_ptr = true,
  53. ._obj = typeobj,
  54. };
  55. self->module = module;
  56. self->annotations = *py_NIL();
  57. }
  58. static int BinTree__cmp_cstr(void* lhs, void* rhs) {
  59. const char* l = (const char*)lhs;
  60. const char* r = (const char*)rhs;
  61. return strcmp(l, r);
  62. }
  63. static int BinTree__cmp_voidp(void* lhs, void* rhs) { return lhs < rhs ? -1 : (lhs > rhs ? 1 : 0); }
  64. void VM__ctor(VM* self) {
  65. self->top_frame = NULL;
  66. const static BinTreeConfig modules_config = {
  67. .f_cmp = BinTree__cmp_cstr,
  68. .need_free_key = true,
  69. };
  70. BinTree__ctor(&self->modules, c11_strdup(""), py_NIL(), &modules_config);
  71. TypeList__ctor(&self->types);
  72. self->builtins = *py_NIL();
  73. self->main = *py_NIL();
  74. self->callbacks.importfile = pk_default_importfile;
  75. self->callbacks.print = pk_default_print;
  76. self->callbacks.flush = pk_default_flush;
  77. self->callbacks.getchr = pk_default_getchr;
  78. self->last_retval = *py_NIL();
  79. self->curr_exception = *py_NIL();
  80. self->recursion_depth = 0;
  81. self->max_recursion_depth = 1000;
  82. self->is_curr_exc_handled = false;
  83. self->ctx = NULL;
  84. self->curr_class = NULL;
  85. self->curr_decl_based_function = NULL;
  86. memset(&self->trace_info, 0, sizeof(TraceInfo));
  87. memset(&self->watchdog_info, 0, sizeof(WatchdogInfo));
  88. LineProfiler__ctor(&self->line_profiler);
  89. FixedMemoryPool__ctor(&self->pool_frame, sizeof(py_Frame), 32);
  90. ManagedHeap__ctor(&self->heap);
  91. ValueStack__ctor(&self->stack);
  92. CachedNames__ctor(&self->cached_names);
  93. NameDict__ctor(&self->compile_time_funcs, PK_TYPE_ATTR_LOAD_FACTOR);
  94. /* Init Builtin Types */
  95. // 0: unused
  96. void* placeholder = TypeList__emplace(&self->types);
  97. memset(placeholder, 0, sizeof(py_TypeInfo));
  98. #define validate(t, expr) \
  99. if(t != (expr)) abort()
  100. validate(tp_object, pk_newtype("object", 0, NULL, NULL, true, false));
  101. validate(tp_type, pk_newtype("type", 1, NULL, NULL, false, true));
  102. pk_object__register();
  103. validate(tp_int, pk_newtype("int", tp_object, NULL, NULL, false, true));
  104. validate(tp_float, pk_newtype("float", tp_object, NULL, NULL, false, true));
  105. validate(tp_bool, pk_newtype("bool", tp_object, NULL, NULL, false, true));
  106. pk_number__register();
  107. validate(tp_str, pk_str__register());
  108. validate(tp_str_iterator, pk_str_iterator__register());
  109. validate(tp_list, pk_list__register());
  110. validate(tp_tuple, pk_tuple__register());
  111. validate(tp_array_iterator, pk_array_iterator__register());
  112. validate(tp_slice, pk_slice__register());
  113. validate(tp_range, pk_range__register());
  114. validate(tp_range_iterator, pk_range_iterator__register());
  115. validate(tp_module, pk_newtype("module", tp_object, NULL, NULL, false, true));
  116. validate(tp_function, pk_function__register());
  117. validate(tp_nativefunc, pk_nativefunc__register());
  118. validate(tp_boundmethod, pk_boundmethod__register());
  119. validate(tp_super, pk_super__register());
  120. validate(tp_BaseException, pk_BaseException__register());
  121. validate(tp_Exception, pk_Exception__register());
  122. validate(tp_bytes, pk_bytes__register());
  123. validate(tp_namedict, pk_namedict__register());
  124. validate(tp_locals, pk_newtype("locals", tp_object, NULL, NULL, false, true));
  125. validate(tp_code, pk_code__register());
  126. validate(tp_dict, pk_dict__register());
  127. validate(tp_dict_iterator, pk_dict_items__register());
  128. validate(tp_property, pk_property__register());
  129. validate(tp_star_wrapper, pk_newtype("star_wrapper", tp_object, NULL, NULL, false, true));
  130. validate(tp_staticmethod, pk_staticmethod__register());
  131. validate(tp_classmethod, pk_classmethod__register());
  132. validate(tp_NoneType, pk_newtype("NoneType", tp_object, NULL, NULL, false, true));
  133. validate(tp_NotImplementedType,
  134. pk_newtype("NotImplementedType", tp_object, NULL, NULL, false, true));
  135. validate(tp_ellipsis, pk_newtype("ellipsis", tp_object, NULL, NULL, false, true));
  136. validate(tp_generator, pk_generator__register());
  137. self->builtins = pk_builtins__register();
  138. // inject some builtin exceptions
  139. #define INJECT_BUILTIN_EXC(name, TBase) \
  140. do { \
  141. py_Type type = pk_newtype(#name, TBase, &self->builtins, NULL, false, true); \
  142. py_setdict(&self->builtins, py_name(#name), py_tpobject(type)); \
  143. validate(tp_##name, type); \
  144. } while(0)
  145. INJECT_BUILTIN_EXC(SystemExit, tp_BaseException);
  146. INJECT_BUILTIN_EXC(KeyboardInterrupt, tp_BaseException);
  147. validate(tp_StopIteration, pk_StopIteration__register());
  148. py_setdict(&self->builtins, py_name("StopIteration"), py_tpobject(tp_StopIteration));
  149. INJECT_BUILTIN_EXC(SyntaxError, tp_Exception);
  150. INJECT_BUILTIN_EXC(RecursionError, tp_Exception);
  151. INJECT_BUILTIN_EXC(OSError, tp_Exception);
  152. INJECT_BUILTIN_EXC(NotImplementedError, tp_Exception);
  153. INJECT_BUILTIN_EXC(TypeError, tp_Exception);
  154. INJECT_BUILTIN_EXC(IndexError, tp_Exception);
  155. INJECT_BUILTIN_EXC(ValueError, tp_Exception);
  156. INJECT_BUILTIN_EXC(RuntimeError, tp_Exception);
  157. INJECT_BUILTIN_EXC(TimeoutError, tp_Exception);
  158. INJECT_BUILTIN_EXC(ZeroDivisionError, tp_Exception);
  159. INJECT_BUILTIN_EXC(NameError, tp_Exception);
  160. INJECT_BUILTIN_EXC(UnboundLocalError, tp_Exception);
  161. INJECT_BUILTIN_EXC(AttributeError, tp_Exception);
  162. INJECT_BUILTIN_EXC(ImportError, tp_Exception);
  163. INJECT_BUILTIN_EXC(AssertionError, tp_Exception);
  164. INJECT_BUILTIN_EXC(KeyError, tp_Exception);
  165. #undef INJECT_BUILTIN_EXC
  166. #undef validate
  167. /* Setup Public Builtin Types */
  168. py_Type public_types[] = {
  169. tp_object,
  170. tp_type,
  171. tp_int,
  172. tp_float,
  173. tp_bool,
  174. tp_str,
  175. tp_list,
  176. tp_tuple,
  177. tp_slice,
  178. tp_range,
  179. tp_bytes,
  180. tp_dict,
  181. tp_property,
  182. tp_staticmethod,
  183. tp_classmethod,
  184. tp_super,
  185. tp_BaseException,
  186. tp_Exception,
  187. };
  188. for(int i = 0; i < c11__count_array(public_types); i++) {
  189. py_TypeInfo* ti = pk__type_info(public_types[i]);
  190. py_setdict(&self->builtins, ti->name, &ti->self);
  191. }
  192. py_newnotimplemented(py_emplacedict(&self->builtins, py_name("NotImplemented")));
  193. pk__add_module_vmath();
  194. pk__add_module_array2d();
  195. pk__add_module_colorcvt();
  196. // add modules
  197. pk__add_module_os();
  198. pk__add_module_sys();
  199. pk__add_module_io();
  200. pk__add_module_math();
  201. pk__add_module_dis();
  202. pk__add_module_random();
  203. pk__add_module_json();
  204. pk__add_module_gc();
  205. pk__add_module_time();
  206. pk__add_module_easing();
  207. pk__add_module_traceback();
  208. pk__add_module_enum();
  209. pk__add_module_inspect();
  210. pk__add_module_pickle();
  211. pk__add_module_base64();
  212. pk__add_module_importlib();
  213. pk__add_module_unicodedata();
  214. pk__add_module_conio();
  215. pk__add_module_lz4(); // optional
  216. pk__add_module_libhv(); // optional
  217. pk__add_module_pkpy();
  218. // add python builtins
  219. do {
  220. bool ok;
  221. ok = py_exec(kPythonLibs_builtins, "<builtins>", EXEC_MODE, &self->builtins);
  222. if(!ok) goto __ABORT;
  223. break;
  224. __ABORT:
  225. py_printexc();
  226. c11__abort("failed to load python builtins!");
  227. } while(0);
  228. self->main = *py_newmodule("__main__");
  229. }
  230. void VM__dtor(VM* self) {
  231. // reset traceinfo
  232. py_sys_settrace(NULL, true);
  233. LineProfiler__dtor(&self->line_profiler);
  234. // destroy all objects
  235. ManagedHeap__dtor(&self->heap);
  236. // clear frames
  237. while(self->top_frame)
  238. VM__pop_frame(self);
  239. BinTree__dtor(&self->modules);
  240. TypeList__dtor(&self->types);
  241. FixedMemoryPool__dtor(&self->pool_frame);
  242. ValueStack__dtor(&self->stack);
  243. CachedNames__dtor(&self->cached_names);
  244. NameDict__dtor(&self->compile_time_funcs);
  245. }
  246. void VM__push_frame(VM* self, py_Frame* frame) {
  247. frame->f_back = self->top_frame;
  248. self->top_frame = frame;
  249. self->recursion_depth++;
  250. if(self->trace_info.func) self->trace_info.func(frame, TRACE_EVENT_PUSH);
  251. }
  252. void VM__pop_frame(VM* self) {
  253. assert(self->top_frame);
  254. py_Frame* frame = self->top_frame;
  255. if(self->trace_info.func) self->trace_info.func(frame, TRACE_EVENT_POP);
  256. // reset stack pointer
  257. self->stack.sp = frame->p0;
  258. // pop frame and delete
  259. self->top_frame = frame->f_back;
  260. Frame__delete(frame);
  261. self->recursion_depth--;
  262. }
  263. static void _clip_int(int* value, int min, int max) {
  264. if(*value < min) *value = min;
  265. if(*value > max) *value = max;
  266. }
  267. bool pk__parse_int_slice(py_Ref slice,
  268. int length,
  269. int* restrict start,
  270. int* restrict stop,
  271. int* restrict step) {
  272. if(py_isint(slice)) {
  273. int index = py_toint(slice);
  274. bool ok = pk__normalize_index(&index, length);
  275. if(!ok) return false;
  276. *start = index;
  277. *stop = index + 1;
  278. *step = 1;
  279. return true;
  280. }
  281. if(!py_istype(slice, tp_slice)) c11__abort("pk__parse_int_slice(): not a slice object");
  282. py_Ref s_start = py_getslot(slice, 0);
  283. py_Ref s_stop = py_getslot(slice, 1);
  284. py_Ref s_step = py_getslot(slice, 2);
  285. if(py_isnone(s_step))
  286. *step = 1;
  287. else {
  288. if(!py_checkint(s_step)) return false;
  289. *step = py_toint(s_step);
  290. }
  291. if(*step == 0) return ValueError("slice step cannot be zero");
  292. if(*step > 0) {
  293. if(py_isnone(s_start))
  294. *start = 0;
  295. else {
  296. if(!py_checkint(s_start)) return false;
  297. *start = py_toint(s_start);
  298. if(*start < 0) *start += length;
  299. _clip_int(start, 0, length);
  300. }
  301. if(py_isnone(s_stop))
  302. *stop = length;
  303. else {
  304. if(!py_checkint(s_stop)) return false;
  305. *stop = py_toint(s_stop);
  306. if(*stop < 0) *stop += length;
  307. _clip_int(stop, 0, length);
  308. }
  309. } else {
  310. if(py_isnone(s_start))
  311. *start = length - 1;
  312. else {
  313. if(!py_checkint(s_start)) return false;
  314. *start = py_toint(s_start);
  315. if(*start < 0) *start += length;
  316. _clip_int(start, -1, length - 1);
  317. }
  318. if(py_isnone(s_stop))
  319. *stop = -1;
  320. else {
  321. if(!py_checkint(s_stop)) return false;
  322. *stop = py_toint(s_stop);
  323. if(*stop < 0) *stop += length;
  324. _clip_int(stop, -1, length - 1);
  325. }
  326. }
  327. return true;
  328. }
  329. bool pk__normalize_index(int* index, int length) {
  330. if(*index < 0) *index += length;
  331. if(*index < 0 || *index >= length) return IndexError("%d not in [0, %d)", *index, length);
  332. return true;
  333. }
  334. py_Type pk_newtype(const char* name,
  335. py_Type base,
  336. const py_GlobalRef module,
  337. void (*dtor)(void*),
  338. bool is_python,
  339. bool is_sealed) {
  340. py_Type index = pk_current_vm->types.length;
  341. py_TypeInfo* ti = TypeList__emplace(&pk_current_vm->types);
  342. py_TypeInfo* base_ti = base ? pk__type_info(base) : NULL;
  343. if(base_ti && base_ti->is_sealed) {
  344. c11__abort("type '%s' is not an acceptable base type", py_name2str(base_ti->name));
  345. }
  346. py_TypeInfo__ctor(ti, py_name(name), index, base, base_ti, module ? *module : *py_NIL());
  347. if(!dtor && base) dtor = base_ti->dtor;
  348. ti->dtor = dtor;
  349. ti->is_python = is_python;
  350. ti->is_sealed = is_sealed;
  351. return index;
  352. }
  353. py_Type py_newtype(const char* name, py_Type base, const py_GlobalRef module, void (*dtor)(void*)) {
  354. if(strlen(name) == 0) c11__abort("type name cannot be empty");
  355. py_Type type = pk_newtype(name, base, module, dtor, false, false);
  356. if(module) py_setdict(module, py_name(name), py_tpobject(type));
  357. return type;
  358. }
  359. static bool
  360. prepare_py_call(py_TValue* buffer, py_Ref argv, py_Ref p1, int kwargc, const FuncDecl* decl) {
  361. const CodeObject* co = &decl->code;
  362. int decl_argc = decl->args.length;
  363. if(p1 - argv < decl_argc) {
  364. return TypeError("%s() takes %d positional arguments but %d were given",
  365. co->name->data,
  366. decl_argc,
  367. (int)(p1 - argv));
  368. }
  369. py_TValue* t = argv;
  370. // prepare args
  371. memset(buffer, 0, co->nlocals * sizeof(py_TValue));
  372. c11__foreach(int, &decl->args, index) buffer[*index] = *t++;
  373. // prepare kwdefaults
  374. c11__foreach(FuncDeclKwArg, &decl->kwargs, kv) buffer[kv->index] = kv->value;
  375. // handle *args
  376. if(decl->starred_arg != -1) {
  377. int exceed_argc = p1 - t;
  378. py_Ref vargs = &buffer[decl->starred_arg];
  379. py_Ref data = py_newtuple(vargs, exceed_argc);
  380. for(int j = 0; j < exceed_argc; j++) {
  381. data[j] = *t++;
  382. }
  383. } else {
  384. // kwdefaults override
  385. // def f(a, b, c=None)
  386. // f(1, 2, 3) -> c=3
  387. c11__foreach(FuncDeclKwArg, &decl->kwargs, kv) {
  388. if(t >= p1) break;
  389. buffer[kv->index] = *t++;
  390. }
  391. // not able to consume all args
  392. if(t < p1) return TypeError("too many arguments (%s)", co->name->data);
  393. }
  394. if(decl->starred_kwarg != -1) py_newdict(&buffer[decl->starred_kwarg]);
  395. for(int j = 0; j < kwargc; j++) {
  396. py_Name key = (py_Name)py_toint(&p1[2 * j]);
  397. int index = c11_smallmap_n2d__get(&decl->kw_to_index, key, -1);
  398. // if key is an explicit key, set as local variable
  399. if(index >= 0) {
  400. buffer[index] = p1[2 * j + 1];
  401. } else {
  402. // otherwise, set as **kwargs if possible
  403. if(decl->starred_kwarg == -1) {
  404. return TypeError("'%n' is an invalid keyword argument for %s()",
  405. key,
  406. co->name->data);
  407. } else {
  408. // add to **kwargs
  409. bool ok =
  410. py_dict_setitem(&buffer[decl->starred_kwarg], py_name2ref(key), &p1[2 * j + 1]);
  411. if(!ok) return false;
  412. }
  413. }
  414. }
  415. return true;
  416. }
  417. FrameResult VM__vectorcall(VM* self, uint16_t argc, uint16_t kwargc, bool opcall) {
  418. #ifndef NDEBUG
  419. pk_print_stack(self, self->top_frame, (Bytecode){0});
  420. #endif
  421. py_Ref p1 = self->stack.sp - kwargc * 2;
  422. py_Ref p0 = p1 - argc - 2;
  423. // [callable, <self>, args..., kwargs...]
  424. // ^p0 ^p1 ^_sp
  425. // handle boundmethod, do a patch
  426. if(p0->type == tp_boundmethod) {
  427. assert(py_isnil(p0 + 1)); // self must be NULL
  428. py_TValue* slots = PyObject__slots(p0->_obj);
  429. p0[0] = slots[1]; // callable
  430. p0[1] = slots[0]; // self
  431. // [unbound, self, args..., kwargs...]
  432. }
  433. py_Ref argv = p0 + 1 + (int)py_isnil(p0 + 1);
  434. if(p0->type == tp_function) {
  435. Function* fn = py_touserdata(p0);
  436. const CodeObject* co = &fn->decl->code;
  437. switch(fn->decl->type) {
  438. case FuncType_NORMAL: {
  439. bool ok = prepare_py_call(self->vectorcall_buffer, argv, p1, kwargc, fn->decl);
  440. if(!ok) return RES_ERROR;
  441. // copy buffer back to stack
  442. self->stack.sp = argv + co->nlocals;
  443. memcpy(argv, self->vectorcall_buffer, co->nlocals * sizeof(py_TValue));
  444. // submit the call
  445. if(!fn->cfunc) {
  446. // python function
  447. VM__push_frame(self, Frame__new(co, p0, fn->module, fn->globals, argv, false));
  448. return opcall ? RES_CALL : VM__run_top_frame(self);
  449. } else {
  450. // decl-based binding
  451. self->curr_decl_based_function = p0;
  452. bool ok = py_callcfunc(fn->cfunc, co->nlocals, argv);
  453. self->stack.sp = p0;
  454. self->curr_decl_based_function = NULL;
  455. return ok ? RES_RETURN : RES_ERROR;
  456. }
  457. }
  458. case FuncType_SIMPLE:
  459. if(p1 - argv != fn->decl->args.length) {
  460. const char* fmt = "%s() takes %d positional arguments but %d were given";
  461. TypeError(fmt, co->name->data, fn->decl->args.length, (int)(p1 - argv));
  462. return RES_ERROR;
  463. }
  464. if(kwargc) {
  465. TypeError("%s() takes no keyword arguments", co->name->data);
  466. return RES_ERROR;
  467. }
  468. // [callable, <self>, args..., local_vars...]
  469. // ^p0 ^p1 ^_sp
  470. self->stack.sp = argv + co->nlocals;
  471. // initialize local variables to py_NIL
  472. memset(p1, 0, (char*)self->stack.sp - (char*)p1);
  473. // submit the call
  474. if(!fn->cfunc) {
  475. // python function
  476. VM__push_frame(self, Frame__new(co, p0, fn->module, fn->globals, argv, false));
  477. return opcall ? RES_CALL : VM__run_top_frame(self);
  478. } else {
  479. // decl-based binding
  480. self->curr_decl_based_function = p0;
  481. bool ok = py_callcfunc(fn->cfunc, co->nlocals, argv);
  482. self->stack.sp = p0;
  483. self->curr_decl_based_function = NULL;
  484. return ok ? RES_RETURN : RES_ERROR;
  485. }
  486. case FuncType_GENERATOR: {
  487. bool ok = prepare_py_call(self->vectorcall_buffer, argv, p1, kwargc, fn->decl);
  488. if(!ok) return RES_ERROR;
  489. // copy buffer back to stack
  490. self->stack.sp = argv + co->nlocals;
  491. memcpy(argv, self->vectorcall_buffer, co->nlocals * sizeof(py_TValue));
  492. py_Frame* frame = Frame__new(co, p0, fn->module, fn->globals, argv, false);
  493. pk_newgenerator(py_retval(), frame, p0, self->stack.sp);
  494. self->stack.sp = p0; // reset the stack
  495. return RES_RETURN;
  496. }
  497. default: c11__unreachable();
  498. };
  499. c11__unreachable();
  500. /*****************_py_call*****************/
  501. }
  502. if(p0->type == tp_nativefunc) {
  503. if(kwargc && p0->_cfunc != pk__object_new) {
  504. TypeError("nativefunc does not accept keyword arguments");
  505. return RES_ERROR;
  506. }
  507. bool ok = py_callcfunc(p0->_cfunc, p1 - argv, argv);
  508. self->stack.sp = p0;
  509. return ok ? RES_RETURN : RES_ERROR;
  510. }
  511. if(p0->type == tp_type) {
  512. // [cls, NULL, args..., kwargs...]
  513. py_Ref new_f = py_tpfindmagic(py_totype(p0), __new__);
  514. assert(new_f && py_isnil(p0 + 1));
  515. // prepare a copy of args and kwargs
  516. int span = self->stack.sp - argv;
  517. *self->stack.sp++ = *new_f; // push __new__
  518. *self->stack.sp++ = *p0; // push cls
  519. memcpy(self->stack.sp, argv, span * sizeof(py_TValue));
  520. self->stack.sp += span;
  521. // [new_f, cls, args..., kwargs...]
  522. if(VM__vectorcall(self, argc, kwargc, false) == RES_ERROR) return RES_ERROR;
  523. // by recursively using vectorcall, args and kwargs are consumed
  524. // try __init__
  525. // NOTE: previously we use `get_unbound_method` but here we just use `tpfindmagic`
  526. // >> [cls, NULL, args..., kwargs...]
  527. // >> py_retval() is the new instance
  528. py_Ref init_f = py_tpfindmagic(py_totype(p0), __init__);
  529. if(init_f) {
  530. // do an inplace patch
  531. *p0 = *init_f; // __init__
  532. p0[1] = self->last_retval; // self
  533. // [__init__, self, args..., kwargs...]
  534. if(VM__vectorcall(self, argc, kwargc, false) == RES_ERROR) return RES_ERROR;
  535. *py_retval() = p0[1]; // restore the new instance
  536. }
  537. // reset the stack
  538. self->stack.sp = p0;
  539. return RES_RETURN;
  540. }
  541. // handle `__call__` overload
  542. if(pk_loadmethod(p0, __call__)) {
  543. // [__call__, self, args..., kwargs...]
  544. return VM__vectorcall(self, argc, kwargc, opcall);
  545. }
  546. TypeError("'%t' object is not callable", p0->type);
  547. return RES_ERROR;
  548. }
  549. /****************************************/
  550. void PyObject__dtor(PyObject* self) {
  551. py_TypeInfo* ti = pk__type_info(self->type);
  552. if(ti->dtor) ti->dtor(PyObject__userdata(self));
  553. if(self->slots == -1) NameDict__dtor(PyObject__dict(self));
  554. }
  555. void FuncDecl__gc_mark(const FuncDecl* self, c11_vector* p_stack) {
  556. CodeObject__gc_mark(&self->code, p_stack);
  557. for(int j = 0; j < self->kwargs.length; j++) {
  558. FuncDeclKwArg* kw = c11__at(FuncDeclKwArg, &self->kwargs, j);
  559. pk__mark_value(&kw->value);
  560. }
  561. }
  562. void CodeObject__gc_mark(const CodeObject* self, c11_vector* p_stack) {
  563. for(int i = 0; i < self->consts.length; i++) {
  564. py_TValue* p = c11__at(py_TValue, &self->consts, i);
  565. pk__mark_value(p);
  566. }
  567. for(int i = 0; i < self->func_decls.length; i++) {
  568. FuncDecl_ decl = c11__getitem(FuncDecl_, &self->func_decls, i);
  569. FuncDecl__gc_mark(decl, p_stack);
  570. }
  571. }
  572. static void pk__mark_value_func(py_Ref val, void* ctx) {
  573. c11_vector* p_stack = ctx;
  574. pk__mark_value(val);
  575. }
  576. void ManagedHeap__mark(ManagedHeap* self) {
  577. VM* vm = pk_current_vm;
  578. c11_vector* p_stack = &self->gc_roots;
  579. assert(p_stack->length == 0);
  580. // mark value stack
  581. for(py_TValue* p = vm->stack.begin; p != vm->stack.end; p++) {
  582. pk__mark_value(p);
  583. }
  584. // mark modules
  585. BinTree__apply_mark(&vm->modules, p_stack);
  586. // mark cached names
  587. for(int i = 0; i < vm->cached_names.entries.length; i++) {
  588. CachedNames_KV* kv = c11_chunkedvector__at(&vm->cached_names.entries, i);
  589. pk__mark_value(&kv->val);
  590. }
  591. // mark compile time functions
  592. for(int i = 0; i < vm->compile_time_funcs.capacity; i++) {
  593. NameDict_KV* kv = &vm->compile_time_funcs.items[i];
  594. if(kv->key == NULL) continue;
  595. pk__mark_value(&kv->value);
  596. }
  597. // mark types
  598. int types_length = vm->types.length;
  599. // 0-th type is placeholder
  600. for(py_Type i = 1; i < types_length; i++) {
  601. py_TypeInfo* ti = TypeList__get(&vm->types, i);
  602. // mark type object
  603. pk__mark_value(&ti->self);
  604. // mark type annotations
  605. pk__mark_value(&ti->annotations);
  606. }
  607. // mark frame
  608. for(py_Frame* frame = vm->top_frame; frame; frame = frame->f_back) {
  609. Frame__gc_mark(frame, p_stack);
  610. }
  611. // mark vm's registers
  612. pk__mark_value(&vm->last_retval);
  613. pk__mark_value(&vm->curr_exception);
  614. for(int i = 0; i < c11__count_array(vm->reg); i++) {
  615. pk__mark_value(&vm->reg[i]);
  616. }
  617. // mark user func
  618. if(vm->callbacks.gc_mark) vm->callbacks.gc_mark(pk__mark_value_func, p_stack);
  619. /*****************************/
  620. while(p_stack->length > 0) {
  621. PyObject* obj = c11_vector__back(PyObject*, p_stack);
  622. c11_vector__pop(p_stack);
  623. assert(obj->gc_marked);
  624. if(obj->slots > 0) {
  625. py_TValue* p = PyObject__slots(obj);
  626. for(int i = 0; i < obj->slots; i++)
  627. pk__mark_value(p + i);
  628. } else if(obj->slots == -1) {
  629. NameDict* dict = PyObject__dict(obj);
  630. for(int i = 0; i < dict->capacity; i++) {
  631. NameDict_KV* kv = &dict->items[i];
  632. if(kv->key == NULL) continue;
  633. pk__mark_value(&kv->value);
  634. }
  635. }
  636. void* ud = PyObject__userdata(obj);
  637. switch(obj->type) {
  638. case tp_list: {
  639. List* self = ud;
  640. for(int i = 0; i < self->length; i++) {
  641. py_TValue* val = c11__at(py_TValue, self, i);
  642. pk__mark_value(val);
  643. }
  644. break;
  645. }
  646. case tp_dict: {
  647. Dict* self = ud;
  648. for(int i = 0; i < self->entries.length; i++) {
  649. DictEntry* entry = c11__at(DictEntry, &self->entries, i);
  650. if(py_isnil(&entry->key)) continue;
  651. pk__mark_value(&entry->key);
  652. pk__mark_value(&entry->val);
  653. }
  654. break;
  655. }
  656. case tp_generator: {
  657. Generator* self = ud;
  658. if(self->frame) Frame__gc_mark(self->frame, p_stack);
  659. break;
  660. }
  661. case tp_function: {
  662. function__gc_mark(ud, p_stack);
  663. break;
  664. }
  665. case tp_code: {
  666. CodeObject* self = ud;
  667. CodeObject__gc_mark(self, p_stack);
  668. break;
  669. }
  670. case tp_chunked_array2d: {
  671. c11_chunked_array2d__mark(ud, p_stack);
  672. break;
  673. }
  674. }
  675. }
  676. }
  677. void pk_print_stack(VM* self, py_Frame* frame, Bytecode byte) {
  678. return;
  679. if(frame == NULL || py_isnil(&self->main)) return;
  680. py_TValue* sp = self->stack.sp;
  681. c11_sbuf buf;
  682. c11_sbuf__ctor(&buf);
  683. for(py_Ref p = self->stack.begin; p != sp; p++) {
  684. switch(p->type) {
  685. case tp_nil: c11_sbuf__write_cstr(&buf, "nil"); break;
  686. case tp_int: c11_sbuf__write_i64(&buf, p->_i64); break;
  687. case tp_float: c11_sbuf__write_f64(&buf, p->_f64, -1); break;
  688. case tp_bool: c11_sbuf__write_cstr(&buf, p->_bool ? "True" : "False"); break;
  689. case tp_NoneType: c11_sbuf__write_cstr(&buf, "None"); break;
  690. case tp_list: {
  691. pk_sprintf(&buf, "list(%d)", py_list_len(p));
  692. break;
  693. }
  694. case tp_tuple: {
  695. pk_sprintf(&buf, "tuple(%d)", py_tuple_len(p));
  696. break;
  697. }
  698. case tp_function: {
  699. Function* ud = py_touserdata(p);
  700. c11_sbuf__write_cstr(&buf, ud->decl->code.name->data);
  701. c11_sbuf__write_cstr(&buf, "()");
  702. break;
  703. }
  704. case tp_type: {
  705. pk_sprintf(&buf, "<class '%t'>", py_totype(p));
  706. break;
  707. }
  708. case tp_str: {
  709. pk_sprintf(&buf, "%q", py_tosv(p));
  710. break;
  711. }
  712. case tp_module: {
  713. py_Ref path = py_getdict(p, __path__);
  714. pk_sprintf(&buf, "<module '%v'>", py_tosv(path));
  715. break;
  716. }
  717. default: {
  718. pk_sprintf(&buf, "(%t)", p->type);
  719. break;
  720. }
  721. }
  722. if(p != &sp[-1]) c11_sbuf__write_cstr(&buf, ", ");
  723. }
  724. c11_string* stack_str = c11_sbuf__submit(&buf);
  725. printf("%s:%-3d: %-25s %-6d [%s]\n",
  726. frame->co->src->filename->data,
  727. Frame__lineno(frame),
  728. pk_opname(byte.op),
  729. byte.arg,
  730. stack_str->data);
  731. c11_string__delete(stack_str);
  732. }
  733. bool pk_wrapper__self(int argc, py_Ref argv) {
  734. PY_CHECK_ARGC(1);
  735. py_assign(py_retval(), argv);
  736. return true;
  737. }
  738. py_TypeInfo* pk__type_info(py_Type type) { return TypeList__get(&pk_current_vm->types, type); }
  739. int py_replinput(char* buf, int max_size) {
  740. buf[0] = '\0'; // reset first char because we check '@' at the beginning
  741. int size = 0;
  742. bool multiline = false;
  743. printf(">>> ");
  744. while(true) {
  745. int c = pk_current_vm->callbacks.getchr();
  746. if(c == EOF) return -1;
  747. if(c == '\n') {
  748. char last = '\0';
  749. if(size > 0) last = buf[size - 1];
  750. if(multiline) {
  751. if(last == '\n') {
  752. break; // 2 consecutive newlines to end multiline input
  753. } else {
  754. printf("... ");
  755. }
  756. } else {
  757. if(last == ':' || last == '(' || last == '[' || last == '{' || buf[0] == '@') {
  758. printf("... ");
  759. multiline = true;
  760. } else {
  761. break;
  762. }
  763. }
  764. }
  765. if(size == max_size - 1) {
  766. buf[size] = '\0';
  767. return size;
  768. }
  769. buf[size++] = c;
  770. }
  771. buf[size] = '\0';
  772. return size;
  773. }
  774. py_Ref py_name2ref(py_Name name) {
  775. assert(name != NULL);
  776. CachedNames* d = &pk_current_vm->cached_names;
  777. py_Ref res = CachedNames__try_get(d, name);
  778. if(res != NULL) return res;
  779. // not found, create a new one
  780. py_StackRef tmp = py_pushtmp();
  781. py_newstrv(tmp, py_name2sv(name));
  782. CachedNames__set(d, name, tmp);
  783. py_pop();
  784. return CachedNames__try_get(d, name);
  785. }