vm.c 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798
  1. #include "pocketpy/interpreter/vm.h"
  2. #include "pocketpy/common/memorypool.h"
  3. #include "pocketpy/common/sstream.h"
  4. #include "pocketpy/common/utils.h"
  5. #include "pocketpy/interpreter/generator.h"
  6. #include "pocketpy/interpreter/modules.h"
  7. #include "pocketpy/interpreter/typeinfo.h"
  8. #include "pocketpy/objects/base.h"
  9. #include "pocketpy/common/_generated.h"
  10. #include "pocketpy/pocketpy.h"
  11. #include <stdbool.h>
  12. static char* pk_default_importfile(const char* path) {
  13. #if PK_ENABLE_OS
  14. FILE* f = fopen(path, "rb");
  15. if(f == NULL) return NULL;
  16. fseek(f, 0, SEEK_END);
  17. long size = ftell(f);
  18. fseek(f, 0, SEEK_SET);
  19. char* buffer = PK_MALLOC(size + 1);
  20. size = fread(buffer, 1, size, f);
  21. buffer[size] = 0;
  22. fclose(f);
  23. return buffer;
  24. #else
  25. return NULL;
  26. #endif
  27. }
  28. static void pk_default_print(const char* data) { printf("%s", data); }
  29. static void py_TypeInfo__ctor(py_TypeInfo* self,
  30. py_Name name,
  31. py_Type index,
  32. py_Type base,
  33. py_TypeInfo* base_ti,
  34. py_TValue module) {
  35. memset(self, 0, sizeof(py_TypeInfo));
  36. self->name = name;
  37. self->base = base;
  38. self->base_ti = base_ti;
  39. // create type object with __dict__
  40. ManagedHeap* heap = &pk_current_vm->heap;
  41. PyObject* typeobj = ManagedHeap__new(heap, tp_type, -1, sizeof(py_Type));
  42. *(py_Type*)PyObject__userdata(typeobj) = index;
  43. self->self = (py_TValue){
  44. .type = typeobj->type,
  45. .is_ptr = true,
  46. ._obj = typeobj,
  47. };
  48. self->module = module;
  49. self->annotations = *py_NIL();
  50. }
  51. void VM__ctor(VM* self) {
  52. self->top_frame = NULL;
  53. ModuleDict__ctor(&self->modules, NULL, *py_NIL());
  54. TypeList__ctor(&self->types);
  55. self->builtins = *py_NIL();
  56. self->main = *py_NIL();
  57. self->callbacks.importfile = pk_default_importfile;
  58. self->callbacks.print = pk_default_print;
  59. self->callbacks.getchar = getchar;
  60. self->last_retval = *py_NIL();
  61. self->curr_exception = *py_NIL();
  62. self->is_signal_interrupted = false;
  63. self->is_curr_exc_handled = false;
  64. self->ctx = NULL;
  65. self->__curr_class = NULL;
  66. self->__curr_function = NULL;
  67. FixedMemoryPool__ctor(&self->pool_frame, sizeof(Frame), 32);
  68. ManagedHeap__ctor(&self->heap);
  69. ValueStack__ctor(&self->stack);
  70. /* Init Builtin Types */
  71. for(int i = 0; i < 128; i++) {
  72. char* p = py_newstrn(&self->ascii_literals[i], 1);
  73. *p = i;
  74. }
  75. py_newstrn(&self->ascii_literals[128], 0);
  76. // 0: unused
  77. void* placeholder = TypeList__emplace(&self->types);
  78. memset(placeholder, 0, sizeof(py_TypeInfo));
  79. #define validate(t, expr) \
  80. if(t != (expr)) abort()
  81. validate(tp_object, pk_newtype("object", 0, NULL, NULL, true, false));
  82. validate(tp_type, pk_newtype("type", 1, NULL, NULL, false, true));
  83. pk_object__register();
  84. validate(tp_int, pk_newtype("int", tp_object, NULL, NULL, false, true));
  85. validate(tp_float, pk_newtype("float", tp_object, NULL, NULL, false, true));
  86. validate(tp_bool, pk_newtype("bool", tp_object, NULL, NULL, false, true));
  87. pk_number__register();
  88. validate(tp_str, pk_str__register());
  89. validate(tp_str_iterator, pk_str_iterator__register());
  90. validate(tp_list, pk_list__register());
  91. validate(tp_tuple, pk_tuple__register());
  92. validate(tp_array_iterator, pk_array_iterator__register());
  93. validate(tp_slice, pk_slice__register());
  94. validate(tp_range, pk_range__register());
  95. validate(tp_range_iterator, pk_range_iterator__register());
  96. validate(tp_module, pk_newtype("module", tp_object, NULL, NULL, false, true));
  97. validate(tp_function, pk_function__register());
  98. validate(tp_nativefunc, pk_nativefunc__register());
  99. validate(tp_boundmethod, pk_boundmethod__register());
  100. validate(tp_super, pk_super__register());
  101. validate(tp_BaseException, pk_BaseException__register());
  102. validate(tp_Exception, pk_Exception__register());
  103. validate(tp_bytes, pk_bytes__register());
  104. validate(tp_namedict, pk_namedict__register());
  105. validate(tp_locals, pk_locals__register());
  106. validate(tp_code, pk_code__register());
  107. validate(tp_dict, pk_dict__register());
  108. validate(tp_dict_items, pk_dict_items__register());
  109. validate(tp_property, pk_property__register());
  110. validate(tp_star_wrapper, pk_newtype("star_wrapper", tp_object, NULL, NULL, false, true));
  111. validate(tp_staticmethod, pk_staticmethod__register());
  112. validate(tp_classmethod, pk_classmethod__register());
  113. validate(tp_NoneType, pk_newtype("NoneType", tp_object, NULL, NULL, false, true));
  114. validate(tp_NotImplementedType,
  115. pk_newtype("NotImplementedType", tp_object, NULL, NULL, false, true));
  116. validate(tp_ellipsis, pk_newtype("ellipsis", tp_object, NULL, NULL, false, true));
  117. validate(tp_generator, pk_generator__register());
  118. self->builtins = pk_builtins__register();
  119. // inject some builtin expections
  120. #define INJECT_BUILTIN_EXC(name, TBase) \
  121. do { \
  122. py_Type type = pk_newtype(#name, TBase, &self->builtins, NULL, false, true); \
  123. py_setdict(&self->builtins, py_name(#name), py_tpobject(type)); \
  124. validate(tp_##name, type); \
  125. } while(0)
  126. INJECT_BUILTIN_EXC(SystemExit, tp_BaseException);
  127. INJECT_BUILTIN_EXC(KeyboardInterrupt, tp_BaseException);
  128. validate(tp_StopIteration, pk_StopIteration__register());
  129. py_setdict(&self->builtins, py_name("StopIteration"), py_tpobject(tp_StopIteration));
  130. INJECT_BUILTIN_EXC(SyntaxError, tp_Exception);
  131. INJECT_BUILTIN_EXC(StackOverflowError, tp_Exception);
  132. INJECT_BUILTIN_EXC(OSError, tp_Exception);
  133. INJECT_BUILTIN_EXC(NotImplementedError, tp_Exception);
  134. INJECT_BUILTIN_EXC(TypeError, tp_Exception);
  135. INJECT_BUILTIN_EXC(IndexError, tp_Exception);
  136. INJECT_BUILTIN_EXC(ValueError, tp_Exception);
  137. INJECT_BUILTIN_EXC(RuntimeError, tp_Exception);
  138. INJECT_BUILTIN_EXC(ZeroDivisionError, tp_Exception);
  139. INJECT_BUILTIN_EXC(NameError, tp_Exception);
  140. INJECT_BUILTIN_EXC(UnboundLocalError, tp_Exception);
  141. INJECT_BUILTIN_EXC(AttributeError, tp_Exception);
  142. INJECT_BUILTIN_EXC(ImportError, tp_Exception);
  143. INJECT_BUILTIN_EXC(AssertionError, tp_Exception);
  144. INJECT_BUILTIN_EXC(KeyError, tp_Exception);
  145. #undef INJECT_BUILTIN_EXC
  146. #undef validate
  147. /* Setup Public Builtin Types */
  148. py_Type public_types[] = {
  149. tp_object,
  150. tp_type,
  151. tp_int,
  152. tp_float,
  153. tp_bool,
  154. tp_str,
  155. tp_list,
  156. tp_tuple,
  157. tp_slice,
  158. tp_range,
  159. tp_bytes,
  160. tp_dict,
  161. tp_property,
  162. tp_staticmethod,
  163. tp_classmethod,
  164. tp_super,
  165. tp_BaseException,
  166. tp_Exception,
  167. };
  168. for(int i = 0; i < c11__count_array(public_types); i++) {
  169. py_TypeInfo* ti = pk__type_info(public_types[i]);
  170. py_setdict(&self->builtins, ti->name, &ti->self);
  171. }
  172. py_newnotimplemented(py_emplacedict(&self->builtins, py_name("NotImplemented")));
  173. pk__add_module_linalg();
  174. pk__add_module_array2d();
  175. pk__add_module_colorcvt();
  176. // add modules
  177. pk__add_module_os();
  178. pk__add_module_sys();
  179. pk__add_module_io();
  180. pk__add_module_math();
  181. pk__add_module_dis();
  182. pk__add_module_random();
  183. pk__add_module_json();
  184. pk__add_module_gc();
  185. pk__add_module_time();
  186. pk__add_module_easing();
  187. pk__add_module_traceback();
  188. pk__add_module_enum();
  189. pk__add_module_inspect();
  190. pk__add_module_pickle();
  191. pk__add_module_importlib();
  192. pk__add_module_conio();
  193. pk__add_module_lz4(); // optional
  194. pk__add_module_libhv(); // optional
  195. pk__add_module_pkpy();
  196. // add python builtins
  197. do {
  198. bool ok;
  199. ok = py_exec(kPythonLibs_builtins, "<builtins>", EXEC_MODE, &self->builtins);
  200. if(!ok) goto __ABORT;
  201. break;
  202. __ABORT:
  203. py_printexc();
  204. c11__abort("failed to load python builtins!");
  205. } while(0);
  206. self->main = *py_newmodule("__main__");
  207. }
  208. void VM__dtor(VM* self) {
  209. // destroy all objects
  210. ManagedHeap__dtor(&self->heap);
  211. // clear frames
  212. while(self->top_frame)
  213. VM__pop_frame(self);
  214. ModuleDict__dtor(&self->modules);
  215. TypeList__dtor(&self->types);
  216. FixedMemoryPool__dtor(&self->pool_frame);
  217. ValueStack__clear(&self->stack);
  218. }
  219. void VM__push_frame(VM* self, Frame* frame) {
  220. frame->f_back = self->top_frame;
  221. self->top_frame = frame;
  222. }
  223. void VM__pop_frame(VM* self) {
  224. assert(self->top_frame);
  225. Frame* frame = self->top_frame;
  226. // reset stack pointer
  227. self->stack.sp = frame->p0;
  228. // pop frame and delete
  229. self->top_frame = frame->f_back;
  230. Frame__delete(frame);
  231. }
  232. static void _clip_int(int* value, int min, int max) {
  233. if(*value < min) *value = min;
  234. if(*value > max) *value = max;
  235. }
  236. bool pk__parse_int_slice(py_Ref slice, int length, int* start, int* stop, int* step) {
  237. py_Ref s_start = py_getslot(slice, 0);
  238. py_Ref s_stop = py_getslot(slice, 1);
  239. py_Ref s_step = py_getslot(slice, 2);
  240. if(py_isnone(s_step))
  241. *step = 1;
  242. else {
  243. if(!py_checkint(s_step)) return false;
  244. *step = py_toint(s_step);
  245. }
  246. if(*step == 0) return ValueError("slice step cannot be zero");
  247. if(*step > 0) {
  248. if(py_isnone(s_start))
  249. *start = 0;
  250. else {
  251. if(!py_checkint(s_start)) return false;
  252. *start = py_toint(s_start);
  253. if(*start < 0) *start += length;
  254. _clip_int(start, 0, length);
  255. }
  256. if(py_isnone(s_stop))
  257. *stop = length;
  258. else {
  259. if(!py_checkint(s_stop)) return false;
  260. *stop = py_toint(s_stop);
  261. if(*stop < 0) *stop += length;
  262. _clip_int(stop, 0, length);
  263. }
  264. } else {
  265. if(py_isnone(s_start))
  266. *start = length - 1;
  267. else {
  268. if(!py_checkint(s_start)) return false;
  269. *start = py_toint(s_start);
  270. if(*start < 0) *start += length;
  271. _clip_int(start, -1, length - 1);
  272. }
  273. if(py_isnone(s_stop))
  274. *stop = -1;
  275. else {
  276. if(!py_checkint(s_stop)) return false;
  277. *stop = py_toint(s_stop);
  278. if(*stop < 0) *stop += length;
  279. _clip_int(stop, -1, length - 1);
  280. }
  281. }
  282. return true;
  283. }
  284. bool pk__normalize_index(int* index, int length) {
  285. if(*index < 0) *index += length;
  286. if(*index < 0 || *index >= length) { return IndexError("%d not in [0, %d)", *index, length); }
  287. return true;
  288. }
  289. py_Type pk_newtype(const char* name,
  290. py_Type base,
  291. const py_GlobalRef module,
  292. void (*dtor)(void*),
  293. bool is_python,
  294. bool is_sealed) {
  295. py_Type index = pk_current_vm->types.length;
  296. py_TypeInfo* ti = TypeList__emplace(&pk_current_vm->types);
  297. py_TypeInfo* base_ti = base ? pk__type_info(base) : NULL;
  298. if(base_ti && base_ti->is_sealed) {
  299. c11__abort("type '%s' is not an acceptable base type", py_name2str(base_ti->name));
  300. }
  301. py_TypeInfo__ctor(ti, py_name(name), index, base, base_ti, module ? *module : *py_NIL());
  302. if(!dtor && base) dtor = base_ti->dtor;
  303. ti->dtor = dtor;
  304. ti->is_python = is_python;
  305. ti->is_sealed = is_sealed;
  306. return index;
  307. }
  308. py_Type py_newtype(const char* name, py_Type base, const py_GlobalRef module, void (*dtor)(void*)) {
  309. py_Type type = pk_newtype(name, base, module, dtor, false, false);
  310. if(module) py_setdict(module, py_name(name), py_tpobject(type));
  311. return type;
  312. }
  313. static bool
  314. prepare_py_call(py_TValue* buffer, py_Ref argv, py_Ref p1, int kwargc, const FuncDecl* decl) {
  315. const CodeObject* co = &decl->code;
  316. int decl_argc = decl->args.length;
  317. if(p1 - argv < decl_argc) {
  318. return TypeError("%s() takes %d positional arguments but %d were given",
  319. co->name->data,
  320. decl_argc,
  321. (int)(p1 - argv));
  322. }
  323. py_TValue* t = argv;
  324. // prepare args
  325. memset(buffer, 0, co->nlocals * sizeof(py_TValue));
  326. c11__foreach(int, &decl->args, index) buffer[*index] = *t++;
  327. // prepare kwdefaults
  328. c11__foreach(FuncDeclKwArg, &decl->kwargs, kv) buffer[kv->index] = kv->value;
  329. // handle *args
  330. if(decl->starred_arg != -1) {
  331. int exceed_argc = p1 - t;
  332. py_Ref vargs = &buffer[decl->starred_arg];
  333. py_newtuple(vargs, exceed_argc);
  334. for(int j = 0; j < exceed_argc; j++) {
  335. py_tuple_setitem(vargs, j, t++);
  336. }
  337. } else {
  338. // kwdefaults override
  339. // def f(a, b, c=None)
  340. // f(1, 2, 3) -> c=3
  341. c11__foreach(FuncDeclKwArg, &decl->kwargs, kv) {
  342. if(t >= p1) break;
  343. buffer[kv->index] = *t++;
  344. }
  345. // not able to consume all args
  346. if(t < p1) return TypeError("too many arguments (%s)", co->name->data);
  347. }
  348. if(decl->starred_kwarg != -1) py_newdict(&buffer[decl->starred_kwarg]);
  349. for(int j = 0; j < kwargc; j++) {
  350. py_Name key = py_toint(&p1[2 * j]);
  351. int index = c11_smallmap_n2i__get(&decl->kw_to_index, key, -1);
  352. // if key is an explicit key, set as local variable
  353. if(index >= 0) {
  354. buffer[index] = p1[2 * j + 1];
  355. } else {
  356. // otherwise, set as **kwargs if possible
  357. if(decl->starred_kwarg == -1) {
  358. return TypeError("'%n' is an invalid keyword argument for %s()",
  359. key,
  360. co->name->data);
  361. } else {
  362. // add to **kwargs
  363. bool ok = py_dict_setitem_by_str(&buffer[decl->starred_kwarg],
  364. py_name2str(key),
  365. &p1[2 * j + 1]);
  366. if(!ok) return false;
  367. }
  368. }
  369. }
  370. return true;
  371. }
  372. FrameResult VM__vectorcall(VM* self, uint16_t argc, uint16_t kwargc, bool opcall) {
  373. pk_print_stack(self, self->top_frame, (Bytecode){0});
  374. py_Ref p1 = self->stack.sp - kwargc * 2;
  375. py_Ref p0 = p1 - argc - 2;
  376. // [callable, <self>, args..., kwargs...]
  377. // ^p0 ^p1 ^_sp
  378. // handle boundmethod, do a patch
  379. if(p0->type == tp_boundmethod) {
  380. assert(py_isnil(p0 + 1)); // self must be NULL
  381. py_TValue* slots = PyObject__slots(p0->_obj);
  382. p0[0] = slots[1]; // callable
  383. p0[1] = slots[0]; // self
  384. // [unbound, self, args..., kwargs...]
  385. }
  386. py_Ref argv = p0 + 1 + (int)py_isnil(p0 + 1);
  387. if(p0->type == tp_function) {
  388. // check stack overflow
  389. if(self->stack.sp > self->stack.end) {
  390. py_exception(tp_StackOverflowError, "");
  391. return RES_ERROR;
  392. }
  393. Function* fn = py_touserdata(p0);
  394. const CodeObject* co = &fn->decl->code;
  395. switch(fn->decl->type) {
  396. case FuncType_NORMAL: {
  397. bool ok = prepare_py_call(self->__vectorcall_buffer, argv, p1, kwargc, fn->decl);
  398. if(!ok) return RES_ERROR;
  399. // copy buffer back to stack
  400. self->stack.sp = argv + co->nlocals;
  401. memcpy(argv, self->__vectorcall_buffer, co->nlocals * sizeof(py_TValue));
  402. // submit the call
  403. if(!fn->cfunc) {
  404. // python function
  405. VM__push_frame(self, Frame__new(co, &fn->module, p0, argv, true));
  406. return opcall ? RES_CALL : VM__run_top_frame(self);
  407. } else {
  408. // decl-based binding
  409. self->__curr_function = p0;
  410. bool ok = py_callcfunc(fn->cfunc, co->nlocals, argv);
  411. self->stack.sp = p0;
  412. self->__curr_function = NULL;
  413. return ok ? RES_RETURN : RES_ERROR;
  414. }
  415. }
  416. case FuncType_SIMPLE:
  417. if(p1 - argv != fn->decl->args.length) {
  418. const char* fmt = "%s() takes %d positional arguments but %d were given";
  419. TypeError(fmt, co->name->data, fn->decl->args.length, (int)(p1 - argv));
  420. return RES_ERROR;
  421. }
  422. if(kwargc) {
  423. TypeError("%s() takes no keyword arguments", co->name->data);
  424. return RES_ERROR;
  425. }
  426. // [callable, <self>, args..., local_vars...]
  427. // ^p0 ^p1 ^_sp
  428. self->stack.sp = argv + co->nlocals;
  429. // initialize local variables to py_NIL
  430. memset(p1, 0, (char*)self->stack.sp - (char*)p1);
  431. // submit the call
  432. if(!fn->cfunc) {
  433. // python function
  434. VM__push_frame(self, Frame__new(co, &fn->module, p0, argv, true));
  435. return opcall ? RES_CALL : VM__run_top_frame(self);
  436. } else {
  437. // decl-based binding
  438. self->__curr_function = p0;
  439. bool ok = py_callcfunc(fn->cfunc, co->nlocals, argv);
  440. self->stack.sp = p0;
  441. self->__curr_function = NULL;
  442. return ok ? RES_RETURN : RES_ERROR;
  443. }
  444. case FuncType_GENERATOR: {
  445. bool ok = prepare_py_call(self->__vectorcall_buffer, argv, p1, kwargc, fn->decl);
  446. if(!ok) return RES_ERROR;
  447. // copy buffer back to stack
  448. self->stack.sp = argv + co->nlocals;
  449. memcpy(argv, self->__vectorcall_buffer, co->nlocals * sizeof(py_TValue));
  450. Frame* frame = Frame__new(co, &fn->module, p0, argv, true);
  451. pk_newgenerator(py_retval(), frame, p0, self->stack.sp);
  452. self->stack.sp = p0; // reset the stack
  453. return RES_RETURN;
  454. }
  455. default: c11__unreachable();
  456. };
  457. c11__unreachable();
  458. /*****************_py_call*****************/
  459. }
  460. if(p0->type == tp_nativefunc) {
  461. if(kwargc && p0->_cfunc != pk__object_new) {
  462. TypeError("nativefunc does not accept keyword arguments");
  463. return RES_ERROR;
  464. }
  465. bool ok = py_callcfunc(p0->_cfunc, p1 - argv, argv);
  466. self->stack.sp = p0;
  467. return ok ? RES_RETURN : RES_ERROR;
  468. }
  469. if(p0->type == tp_type) {
  470. // [cls, NULL, args..., kwargs...]
  471. py_Ref new_f = py_tpfindmagic(py_totype(p0), __new__);
  472. assert(new_f && py_isnil(p0 + 1));
  473. // prepare a copy of args and kwargs
  474. int span = self->stack.sp - argv;
  475. *self->stack.sp++ = *new_f; // push __new__
  476. *self->stack.sp++ = *p0; // push cls
  477. memcpy(self->stack.sp, argv, span * sizeof(py_TValue));
  478. self->stack.sp += span;
  479. // [new_f, cls, args..., kwargs...]
  480. if(VM__vectorcall(self, argc, kwargc, false) == RES_ERROR) return RES_ERROR;
  481. // by recursively using vectorcall, args and kwargs are consumed
  482. // try __init__
  483. // NOTE: previously we use `get_unbound_method` but here we just use `tpfindmagic`
  484. // >> [cls, NULL, args..., kwargs...]
  485. // >> py_retval() is the new instance
  486. py_Ref init_f = py_tpfindmagic(py_totype(p0), __init__);
  487. if(init_f) {
  488. // do an inplace patch
  489. *p0 = *init_f; // __init__
  490. p0[1] = self->last_retval; // self
  491. // [__init__, self, args..., kwargs...]
  492. if(VM__vectorcall(self, argc, kwargc, false) == RES_ERROR) return RES_ERROR;
  493. *py_retval() = p0[1]; // restore the new instance
  494. }
  495. // reset the stack
  496. self->stack.sp = p0;
  497. return RES_RETURN;
  498. }
  499. // handle `__call__` overload
  500. if(pk_loadmethod(p0, __call__)) {
  501. // [__call__, self, args..., kwargs...]
  502. return VM__vectorcall(self, argc, kwargc, opcall);
  503. }
  504. TypeError("'%t' object is not callable", p0->type);
  505. return RES_ERROR;
  506. }
  507. /****************************************/
  508. void PyObject__dtor(PyObject* self) {
  509. py_TypeInfo* ti = pk__type_info(self->type);
  510. if(ti->dtor) ti->dtor(PyObject__userdata(self));
  511. if(self->slots == -1) NameDict__dtor(PyObject__dict(self));
  512. }
  513. static void mark_object(PyObject* obj);
  514. void pk__mark_value(py_TValue* val) {
  515. if(val->is_ptr) mark_object(val->_obj);
  516. }
  517. void pk__mark_namedict(NameDict* dict) {
  518. for(int i = 0; i < dict->length; i++) {
  519. NameDict_KV* kv = c11__at(NameDict_KV, dict, i);
  520. pk__mark_value(&kv->value);
  521. }
  522. }
  523. void pk__tp_set_marker(py_Type type, void (*gc_mark)(void*)) {
  524. py_TypeInfo* ti = pk__type_info(type);
  525. assert(ti->gc_mark == NULL);
  526. ti->gc_mark = gc_mark;
  527. }
  528. static void mark_object(PyObject* obj) {
  529. if(obj->gc_marked) return;
  530. obj->gc_marked = true;
  531. if(obj->slots > 0) {
  532. py_TValue* p = PyObject__slots(obj);
  533. for(int i = 0; i < obj->slots; i++)
  534. pk__mark_value(p + i);
  535. } else if(obj->slots == -1) {
  536. NameDict* dict = PyObject__dict(obj);
  537. pk__mark_namedict(dict);
  538. }
  539. py_TypeInfo* ti = pk__type_info(obj->type);
  540. if(ti->gc_mark) ti->gc_mark(PyObject__userdata(obj));
  541. }
  542. void FuncDecl__gc_mark(const FuncDecl* self) {
  543. CodeObject__gc_mark(&self->code);
  544. for(int j = 0; j < self->kwargs.length; j++) {
  545. FuncDeclKwArg* kw = c11__at(FuncDeclKwArg, &self->kwargs, j);
  546. pk__mark_value(&kw->value);
  547. }
  548. }
  549. void CodeObject__gc_mark(const CodeObject* self) {
  550. for(int i = 0; i < self->consts.length; i++) {
  551. py_TValue* p = c11__at(py_TValue, &self->consts, i);
  552. pk__mark_value(p);
  553. }
  554. for(int i = 0; i < self->func_decls.length; i++) {
  555. FuncDecl_ decl = c11__getitem(FuncDecl_, &self->func_decls, i);
  556. FuncDecl__gc_mark(decl);
  557. }
  558. }
  559. void ManagedHeap__mark(ManagedHeap* self) {
  560. VM* vm = pk_current_vm;
  561. // mark value stack
  562. for(py_TValue* p = vm->stack.begin; p != vm->stack.end; p++) {
  563. pk__mark_value(p);
  564. }
  565. // mark ascii literals
  566. for(int i = 0; i < c11__count_array(vm->ascii_literals); i++) {
  567. pk__mark_value(&vm->ascii_literals[i]);
  568. }
  569. // mark modules
  570. ModuleDict__apply_mark(&vm->modules, mark_object);
  571. // mark types
  572. int types_length = vm->types.length;
  573. // 0-th type is placeholder
  574. for(py_Type i = 1; i < types_length; i++) {
  575. py_TypeInfo* ti = TypeList__get(&vm->types, i);
  576. // mark type object
  577. pk__mark_value(&ti->self);
  578. // mark common magic slots
  579. for(int j = 0; j < PK_MAGIC_SLOTS_COMMON_LENGTH; j++) {
  580. py_TValue* slot = ti->magic_0 + j;
  581. if(py_isnil(slot)) continue;
  582. pk__mark_value(slot);
  583. }
  584. // mark uncommon magic slots
  585. if(ti->magic_1) {
  586. for(int j = 0; j < PK_MAGIC_SLOTS_UNCOMMON_LENGTH; j++) {
  587. py_TValue* slot = ti->magic_1 + j;
  588. if(py_isnil(slot)) continue;
  589. pk__mark_value(slot);
  590. }
  591. }
  592. // mark type annotations
  593. pk__mark_value(&ti->annotations);
  594. }
  595. // mark frame
  596. for(Frame* frame = vm->top_frame; frame; frame = frame->f_back) {
  597. Frame__gc_mark(frame);
  598. }
  599. // mark vm's registers
  600. pk__mark_value(&vm->last_retval);
  601. pk__mark_value(&vm->curr_exception);
  602. for(int i = 0; i < c11__count_array(vm->reg); i++) {
  603. pk__mark_value(&vm->reg[i]);
  604. }
  605. }
  606. void pk_print_stack(VM* self, Frame* frame, Bytecode byte) {
  607. return;
  608. if(frame == NULL || py_isnil(&self->main)) return;
  609. py_TValue* sp = self->stack.sp;
  610. c11_sbuf buf;
  611. c11_sbuf__ctor(&buf);
  612. for(py_Ref p = self->stack.begin; p != sp; p++) {
  613. switch(p->type) {
  614. case tp_nil: c11_sbuf__write_cstr(&buf, "nil"); break;
  615. case tp_int: c11_sbuf__write_i64(&buf, p->_i64); break;
  616. case tp_float: c11_sbuf__write_f64(&buf, p->_f64, -1); break;
  617. case tp_bool: c11_sbuf__write_cstr(&buf, p->_bool ? "True" : "False"); break;
  618. case tp_NoneType: c11_sbuf__write_cstr(&buf, "None"); break;
  619. case tp_list: {
  620. pk_sprintf(&buf, "list(%d)", py_list_len(p));
  621. break;
  622. }
  623. case tp_tuple: {
  624. pk_sprintf(&buf, "tuple(%d)", py_tuple_len(p));
  625. break;
  626. }
  627. case tp_function: {
  628. Function* ud = py_touserdata(p);
  629. c11_sbuf__write_cstr(&buf, ud->decl->code.name->data);
  630. c11_sbuf__write_cstr(&buf, "()");
  631. break;
  632. }
  633. case tp_type: {
  634. pk_sprintf(&buf, "<class '%t'>", py_totype(p));
  635. break;
  636. }
  637. case tp_str: {
  638. pk_sprintf(&buf, "%q", py_tosv(p));
  639. break;
  640. }
  641. case tp_module: {
  642. py_Ref path = py_getdict(p, __path__);
  643. pk_sprintf(&buf, "<module '%v'>", py_tosv(path));
  644. break;
  645. }
  646. default: {
  647. pk_sprintf(&buf, "(%t)", p->type);
  648. break;
  649. }
  650. }
  651. if(p != &sp[-1]) c11_sbuf__write_cstr(&buf, ", ");
  652. }
  653. c11_string* stack_str = c11_sbuf__submit(&buf);
  654. printf("%s:%-3d: %-25s %-6d [%s]\n",
  655. frame->co->src->filename->data,
  656. Frame__lineno(frame),
  657. pk_opname(byte.op),
  658. byte.arg,
  659. stack_str->data);
  660. c11_string__delete(stack_str);
  661. }
  662. bool pk_wrapper__self(int argc, py_Ref argv) {
  663. PY_CHECK_ARGC(1);
  664. py_assign(py_retval(), argv);
  665. return true;
  666. }
  667. py_TypeInfo* pk__type_info(py_Type type) { return TypeList__get(&pk_current_vm->types, type); }
  668. int py_replinput(char* buf, int max_size) {
  669. buf[0] = '\0'; // reset first char because we check '@' at the beginning
  670. int size = 0;
  671. bool multiline = false;
  672. printf(">>> ");
  673. while(true) {
  674. int c = pk_current_vm->callbacks.getchar();
  675. if(c == EOF) return -1;
  676. if(c == '\n') {
  677. char last = '\0';
  678. if(size > 0) last = buf[size - 1];
  679. if(multiline) {
  680. if(last == '\n') {
  681. break; // 2 consecutive newlines to end multiline input
  682. } else {
  683. printf("... ");
  684. }
  685. } else {
  686. if(last == ':' || last == '(' || last == '[' || last == '{' || buf[0] == '@') {
  687. printf("... ");
  688. multiline = true;
  689. } else {
  690. break;
  691. }
  692. }
  693. }
  694. if(size == max_size - 1) {
  695. buf[size] = '\0';
  696. return size;
  697. }
  698. buf[size++] = c;
  699. }
  700. buf[size] = '\0';
  701. return size;
  702. }