vm.h 55 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590
  1. #pragma once
  2. #include "codeobject.h"
  3. #include "common.h"
  4. #include "frame.h"
  5. #include "error.h"
  6. #include "gc.h"
  7. #include "memory.h"
  8. #include "obj.h"
  9. #include "str.h"
  10. #include "tuplelist.h"
  11. #include "dict.h"
  12. namespace pkpy{
  13. /* Stack manipulation macros */
  14. // https://github.com/python/cpython/blob/3.9/Python/ceval.c#L1123
  15. #define TOP() (s_data.top())
  16. #define SECOND() (s_data.second())
  17. #define THIRD() (s_data.third())
  18. #define PEEK(n) (s_data.peek(n))
  19. #define STACK_SHRINK(n) (s_data.shrink(n))
  20. #define PUSH(v) (s_data.push(v))
  21. #define POP() (s_data.pop())
  22. #define POPX() (s_data.popx())
  23. #define STACK_VIEW(n) (s_data.view(n))
  24. #define DEF_NATIVE_2(ctype, ptype) \
  25. template<> inline ctype py_cast<ctype>(VM* vm, PyObject* obj) { \
  26. vm->check_non_tagged_type(obj, vm->ptype); \
  27. return PK_OBJ_GET(ctype, obj); \
  28. } \
  29. template<> inline ctype _py_cast<ctype>(VM* vm, PyObject* obj) { \
  30. return PK_OBJ_GET(ctype, obj); \
  31. } \
  32. template<> inline ctype& py_cast<ctype&>(VM* vm, PyObject* obj) { \
  33. vm->check_non_tagged_type(obj, vm->ptype); \
  34. return PK_OBJ_GET(ctype, obj); \
  35. } \
  36. template<> inline ctype& _py_cast<ctype&>(VM* vm, PyObject* obj) { \
  37. return PK_OBJ_GET(ctype, obj); \
  38. } \
  39. inline PyObject* py_var(VM* vm, const ctype& value) { return vm->heap.gcnew(vm->ptype, value);} \
  40. inline PyObject* py_var(VM* vm, ctype&& value) { return vm->heap.gcnew(vm->ptype, std::move(value));}
  41. typedef PyObject* (*BinaryFuncC)(VM*, PyObject*, PyObject*);
  42. struct PyTypeInfo{
  43. PyObject* obj;
  44. Type base;
  45. Str name;
  46. bool subclass_enabled;
  47. // cached special methods
  48. // unary operators
  49. PyObject* (*m__repr__)(VM* vm, PyObject*) = nullptr;
  50. PyObject* (*m__str__)(VM* vm, PyObject*) = nullptr;
  51. i64 (*m__hash__)(VM* vm, PyObject*) = nullptr;
  52. i64 (*m__len__)(VM* vm, PyObject*) = nullptr;
  53. PyObject* (*m__iter__)(VM* vm, PyObject*) = nullptr;
  54. PyObject* (*m__next__)(VM* vm, PyObject*) = nullptr;
  55. PyObject* (*m__json__)(VM* vm, PyObject*) = nullptr;
  56. PyObject* (*m__neg__)(VM* vm, PyObject*) = nullptr;
  57. PyObject* (*m__bool__)(VM* vm, PyObject*) = nullptr;
  58. BinaryFuncC m__eq__ = nullptr;
  59. BinaryFuncC m__lt__ = nullptr;
  60. BinaryFuncC m__le__ = nullptr;
  61. BinaryFuncC m__gt__ = nullptr;
  62. BinaryFuncC m__ge__ = nullptr;
  63. BinaryFuncC m__contains__ = nullptr;
  64. // binary operators
  65. BinaryFuncC m__add__ = nullptr;
  66. BinaryFuncC m__sub__ = nullptr;
  67. BinaryFuncC m__mul__ = nullptr;
  68. BinaryFuncC m__truediv__ = nullptr;
  69. BinaryFuncC m__floordiv__ = nullptr;
  70. BinaryFuncC m__mod__ = nullptr;
  71. BinaryFuncC m__pow__ = nullptr;
  72. BinaryFuncC m__matmul__ = nullptr;
  73. BinaryFuncC m__lshift__ = nullptr;
  74. BinaryFuncC m__rshift__ = nullptr;
  75. BinaryFuncC m__and__ = nullptr;
  76. BinaryFuncC m__or__ = nullptr;
  77. BinaryFuncC m__xor__ = nullptr;
  78. // indexer
  79. PyObject* (*m__getitem__)(VM* vm, PyObject*, PyObject*) = nullptr;
  80. void (*m__setitem__)(VM* vm, PyObject*, PyObject*, PyObject*) = nullptr;
  81. void (*m__delitem__)(VM* vm, PyObject*, PyObject*) = nullptr;
  82. };
  83. struct FrameId{
  84. std::vector<pkpy::Frame>* data;
  85. int index;
  86. FrameId(std::vector<pkpy::Frame>* data, int index) : data(data), index(index) {}
  87. Frame* operator->() const { return &data->operator[](index); }
  88. Frame* get() const { return &data->operator[](index); }
  89. };
  90. typedef void(*PrintFunc)(VM*, const Str&);
  91. class VM {
  92. VM* vm; // self reference for simplify code
  93. public:
  94. ManagedHeap heap;
  95. ValueStack s_data;
  96. stack< Frame > callstack;
  97. std::vector<PyTypeInfo> _all_types;
  98. NameDict _modules; // loaded modules
  99. std::map<StrName, Str> _lazy_modules; // lazy loaded modules
  100. PyObject* None;
  101. PyObject* True;
  102. PyObject* False;
  103. PyObject* NotImplemented; // unused
  104. PyObject* Ellipsis;
  105. PyObject* builtins; // builtins module
  106. PyObject* StopIteration;
  107. PyObject* _main; // __main__ module
  108. PyObject* _last_exception;
  109. #if PK_ENABLE_CEVAL_CALLBACK
  110. void (*_ceval_on_step)(VM*, Frame*, Bytecode bc) = nullptr;
  111. #endif
  112. PrintFunc _stdout;
  113. PrintFunc _stderr;
  114. Bytes (*_import_handler)(const Str& name);
  115. // for quick access
  116. Type tp_object, tp_type, tp_int, tp_float, tp_bool, tp_str;
  117. Type tp_list, tp_tuple;
  118. Type tp_function, tp_native_func, tp_bound_method;
  119. Type tp_slice, tp_range, tp_module;
  120. Type tp_super, tp_exception, tp_bytes, tp_mappingproxy;
  121. Type tp_dict, tp_property, tp_star_wrapper;
  122. PyObject* cached_object__new__;
  123. const bool enable_os;
  124. VM(bool enable_os=true) : heap(this), enable_os(enable_os) {
  125. this->vm = this;
  126. _stdout = [](VM* vm, const Str& s) { std::cout << s; };
  127. _stderr = [](VM* vm, const Str& s) { std::cerr << s; };
  128. callstack.reserve(8);
  129. _main = nullptr;
  130. _last_exception = nullptr;
  131. _import_handler = [](const Str& name) { return Bytes(); };
  132. init_builtin_types();
  133. }
  134. FrameId top_frame() {
  135. #if PK_DEBUG_EXTRA_CHECK
  136. if(callstack.empty()) FATAL_ERROR();
  137. #endif
  138. return FrameId(&callstack.data(), callstack.size()-1);
  139. }
  140. PyObject* py_str(PyObject* obj){
  141. const PyTypeInfo* ti = _inst_type_info(obj);
  142. if(ti->m__str__) return ti->m__str__(this, obj);
  143. PyObject* self;
  144. PyObject* f = get_unbound_method(obj, __str__, &self, false);
  145. if(self != PY_NULL) return call_method(self, f);
  146. return py_repr(obj);
  147. }
  148. PyObject* py_repr(PyObject* obj){
  149. const PyTypeInfo* ti = _inst_type_info(obj);
  150. if(ti->m__repr__) return ti->m__repr__(this, obj);
  151. return call_method(obj, __repr__);
  152. }
  153. PyObject* py_json(PyObject* obj){
  154. const PyTypeInfo* ti = _inst_type_info(obj);
  155. if(ti->m__json__) return ti->m__json__(this, obj);
  156. return call_method(obj, __json__);
  157. }
  158. PyObject* py_iter(PyObject* obj){
  159. const PyTypeInfo* ti = _inst_type_info(obj);
  160. if(ti->m__iter__) return ti->m__iter__(this, obj);
  161. PyObject* self;
  162. PyObject* iter_f = get_unbound_method(obj, __iter__, &self, false);
  163. if(self != PY_NULL) return call_method(self, iter_f);
  164. TypeError(OBJ_NAME(_t(obj)).escape() + " object is not iterable");
  165. return nullptr;
  166. }
  167. PyObject* find_name_in_mro(PyObject* cls, StrName name){
  168. PyObject* val;
  169. do{
  170. val = cls->attr().try_get(name);
  171. if(val != nullptr) return val;
  172. Type base = _all_types[PK_OBJ_GET(Type, cls)].base;
  173. if(base.index == -1) break;
  174. cls = _all_types[base].obj;
  175. }while(true);
  176. return nullptr;
  177. }
  178. bool isinstance(PyObject* obj, Type cls_t){
  179. Type obj_t = PK_OBJ_GET(Type, _t(obj));
  180. do{
  181. if(obj_t == cls_t) return true;
  182. Type base = _all_types[obj_t].base;
  183. if(base.index == -1) break;
  184. obj_t = base;
  185. }while(true);
  186. return false;
  187. }
  188. PyObject* exec(Str source, Str filename, CompileMode mode, PyObject* _module=nullptr){
  189. if(_module == nullptr) _module = _main;
  190. try {
  191. CodeObject_ code = compile(source, filename, mode);
  192. #if PK_DEBUG_DIS_EXEC
  193. if(_module == _main) std::cout << disassemble(code) << '\n';
  194. #endif
  195. return _exec(code, _module);
  196. }catch (const Exception& e){
  197. _stderr(this, e.summary() + "\n");
  198. }
  199. #if !PK_DEBUG_FULL_EXCEPTION
  200. catch (const std::exception& e) {
  201. Str msg = "An std::exception occurred! It could be a bug.\n";
  202. msg = msg + e.what();
  203. _stderr(this, msg + "\n");
  204. }
  205. #endif
  206. callstack.clear();
  207. s_data.clear();
  208. return nullptr;
  209. }
  210. template<typename ...Args>
  211. PyObject* _exec(Args&&... args){
  212. callstack.emplace(&s_data, s_data._sp, std::forward<Args>(args)...);
  213. return _run_top_frame();
  214. }
  215. void _pop_frame(){
  216. Frame* frame = &callstack.top();
  217. s_data.reset(frame->_sp_base);
  218. callstack.pop();
  219. }
  220. void _push_varargs(){ }
  221. void _push_varargs(PyObject* _0){ PUSH(_0); }
  222. void _push_varargs(PyObject* _0, PyObject* _1){ PUSH(_0); PUSH(_1); }
  223. void _push_varargs(PyObject* _0, PyObject* _1, PyObject* _2){ PUSH(_0); PUSH(_1); PUSH(_2); }
  224. void _push_varargs(PyObject* _0, PyObject* _1, PyObject* _2, PyObject* _3){ PUSH(_0); PUSH(_1); PUSH(_2); PUSH(_3); }
  225. template<typename... Args>
  226. PyObject* call(PyObject* callable, Args&&... args){
  227. PUSH(callable);
  228. PUSH(PY_NULL);
  229. _push_varargs(args...);
  230. return vectorcall(sizeof...(args));
  231. }
  232. template<typename... Args>
  233. PyObject* call_method(PyObject* self, PyObject* callable, Args&&... args){
  234. PUSH(callable);
  235. PUSH(self);
  236. _push_varargs(args...);
  237. return vectorcall(sizeof...(args));
  238. }
  239. template<typename... Args>
  240. PyObject* call_method(PyObject* self, StrName name, Args&&... args){
  241. PyObject* callable = get_unbound_method(self, name, &self);
  242. return call_method(self, callable, args...);
  243. }
  244. PyObject* property(NativeFuncC fget, NativeFuncC fset=nullptr){
  245. PyObject* _0 = heap.gcnew(tp_native_func, NativeFunc(fget, 1, false));
  246. PyObject* _1 = vm->None;
  247. if(fset != nullptr) _1 = heap.gcnew(tp_native_func, NativeFunc(fset, 2, false));
  248. return call(_t(tp_property), _0, _1);
  249. }
  250. PyObject* new_type_object(PyObject* mod, StrName name, Type base, bool subclass_enabled=true){
  251. PyObject* obj = heap._new<Type>(tp_type, _all_types.size());
  252. const PyTypeInfo& base_info = _all_types[base];
  253. if(!base_info.subclass_enabled){
  254. TypeError(fmt("type ", base_info.name.escape(), " is not `subclass_enabled`"));
  255. }
  256. PyTypeInfo info{
  257. obj,
  258. base,
  259. (mod!=nullptr && mod!=builtins) ? Str(OBJ_NAME(mod)+"."+name.sv()): name.sv(),
  260. subclass_enabled,
  261. };
  262. if(mod != nullptr) mod->attr().set(name, obj);
  263. _all_types.push_back(info);
  264. return obj;
  265. }
  266. Type _new_type_object(StrName name, Type base=0) {
  267. PyObject* obj = new_type_object(nullptr, name, base, false);
  268. return PK_OBJ_GET(Type, obj);
  269. }
  270. PyObject* _find_type_object(const Str& type){
  271. PyObject* obj = builtins->attr().try_get(type);
  272. if(obj == nullptr){
  273. for(auto& t: _all_types) if(t.name == type) return t.obj;
  274. throw std::runtime_error(fmt("type not found: ", type));
  275. }
  276. check_non_tagged_type(obj, tp_type);
  277. return obj;
  278. }
  279. Type _type(const Str& type){
  280. PyObject* obj = _find_type_object(type);
  281. return PK_OBJ_GET(Type, obj);
  282. }
  283. PyTypeInfo* _type_info(const Str& type){
  284. PyObject* obj = builtins->attr().try_get(type);
  285. if(obj == nullptr){
  286. for(auto& t: _all_types) if(t.name == type) return &t;
  287. FATAL_ERROR();
  288. }
  289. return &_all_types[PK_OBJ_GET(Type, obj)];
  290. }
  291. PyTypeInfo* _type_info(Type type){
  292. return &_all_types[type];
  293. }
  294. const PyTypeInfo* _inst_type_info(PyObject* obj){
  295. if(is_int(obj)) return &_all_types[tp_int];
  296. if(is_float(obj)) return &_all_types[tp_float];
  297. return &_all_types[obj->type];
  298. }
  299. #define BIND_UNARY_SPECIAL(name) \
  300. void bind##name(Type type, PyObject* (*f)(VM*, PyObject*)){ \
  301. _all_types[type].m##name = f; \
  302. PyObject* nf = bind_method<0>(_t(type), #name, [](VM* vm, ArgsView args){ \
  303. return lambda_get_userdata<PyObject*(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);\
  304. }); \
  305. PK_OBJ_GET(NativeFunc, nf).set_userdata(f); \
  306. }
  307. BIND_UNARY_SPECIAL(__repr__)
  308. BIND_UNARY_SPECIAL(__str__)
  309. BIND_UNARY_SPECIAL(__iter__)
  310. BIND_UNARY_SPECIAL(__next__)
  311. BIND_UNARY_SPECIAL(__json__)
  312. BIND_UNARY_SPECIAL(__neg__)
  313. BIND_UNARY_SPECIAL(__bool__)
  314. void bind__hash__(Type type, i64 (*f)(VM* vm, PyObject*));
  315. void bind__len__(Type type, i64 (*f)(VM* vm, PyObject*));
  316. #undef BIND_UNARY_SPECIAL
  317. #define BIND_BINARY_SPECIAL(name) \
  318. void bind##name(Type type, BinaryFuncC f){ \
  319. PyObject* obj = _t(type); \
  320. _all_types[type].m##name = f; \
  321. PyObject* nf = bind_method<1>(obj, #name, [](VM* vm, ArgsView args){ \
  322. return lambda_get_userdata<BinaryFuncC>(args.begin())(vm, args[0], args[1]); \
  323. }); \
  324. PK_OBJ_GET(NativeFunc, nf).set_userdata(f); \
  325. }
  326. BIND_BINARY_SPECIAL(__eq__)
  327. BIND_BINARY_SPECIAL(__lt__)
  328. BIND_BINARY_SPECIAL(__le__)
  329. BIND_BINARY_SPECIAL(__gt__)
  330. BIND_BINARY_SPECIAL(__ge__)
  331. BIND_BINARY_SPECIAL(__contains__)
  332. BIND_BINARY_SPECIAL(__add__)
  333. BIND_BINARY_SPECIAL(__sub__)
  334. BIND_BINARY_SPECIAL(__mul__)
  335. BIND_BINARY_SPECIAL(__truediv__)
  336. BIND_BINARY_SPECIAL(__floordiv__)
  337. BIND_BINARY_SPECIAL(__mod__)
  338. BIND_BINARY_SPECIAL(__pow__)
  339. BIND_BINARY_SPECIAL(__matmul__)
  340. BIND_BINARY_SPECIAL(__lshift__)
  341. BIND_BINARY_SPECIAL(__rshift__)
  342. BIND_BINARY_SPECIAL(__and__)
  343. BIND_BINARY_SPECIAL(__or__)
  344. BIND_BINARY_SPECIAL(__xor__)
  345. #undef BIND_BINARY_SPECIAL
  346. void bind__getitem__(Type type, PyObject* (*f)(VM*, PyObject*, PyObject*)){
  347. PyObject* obj = _t(type);
  348. _all_types[type].m__getitem__ = f;
  349. PyObject* nf = bind_method<1>(obj, "__getitem__", [](VM* vm, ArgsView args){
  350. return lambda_get_userdata<PyObject*(*)(VM*, PyObject*, PyObject*)>(args.begin())(vm, args[0], args[1]);
  351. });
  352. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  353. }
  354. void bind__setitem__(Type type, void (*f)(VM*, PyObject*, PyObject*, PyObject*)){
  355. PyObject* obj = _t(type);
  356. _all_types[type].m__setitem__ = f;
  357. PyObject* nf = bind_method<2>(obj, "__setitem__", [](VM* vm, ArgsView args){
  358. lambda_get_userdata<void(*)(VM* vm, PyObject*, PyObject*, PyObject*)>(args.begin())(vm, args[0], args[1], args[2]);
  359. return vm->None;
  360. });
  361. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  362. }
  363. void bind__delitem__(Type type, void (*f)(VM*, PyObject*, PyObject*)){
  364. PyObject* obj = _t(type);
  365. _all_types[type].m__delitem__ = f;
  366. PyObject* nf = bind_method<1>(obj, "__delitem__", [](VM* vm, ArgsView args){
  367. lambda_get_userdata<void(*)(VM*, PyObject*, PyObject*)>(args.begin())(vm, args[0], args[1]);
  368. return vm->None;
  369. });
  370. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  371. }
  372. bool py_equals(PyObject* lhs, PyObject* rhs){
  373. if(lhs == rhs) return true;
  374. const PyTypeInfo* ti = _inst_type_info(lhs);
  375. PyObject* res;
  376. if(ti->m__eq__){
  377. res = ti->m__eq__(this, lhs, rhs);
  378. if(res != vm->NotImplemented) return res == vm->True;
  379. }
  380. res = call_method(lhs, __eq__, rhs);
  381. if(res != vm->NotImplemented) return res == vm->True;
  382. ti = _inst_type_info(rhs);
  383. if(ti->m__eq__){
  384. res = ti->m__eq__(this, rhs, lhs);
  385. if(res != vm->NotImplemented) return res == vm->True;
  386. }
  387. res = call_method(rhs, __eq__, lhs);
  388. if(res != vm->NotImplemented) return res == vm->True;
  389. return false;
  390. }
  391. template<int ARGC>
  392. PyObject* bind_func(Str type, Str name, NativeFuncC fn) {
  393. return bind_func<ARGC>(_find_type_object(type), name, fn);
  394. }
  395. template<int ARGC>
  396. PyObject* bind_method(Str type, Str name, NativeFuncC fn) {
  397. return bind_method<ARGC>(_find_type_object(type), name, fn);
  398. }
  399. template<int ARGC, typename __T>
  400. PyObject* bind_constructor(__T&& type, NativeFuncC fn) {
  401. static_assert(ARGC==-1 || ARGC>=1);
  402. return bind_func<ARGC>(std::forward<__T>(type), "__new__", fn);
  403. }
  404. template<typename T, typename __T>
  405. PyObject* bind_default_constructor(__T&& type) {
  406. return bind_constructor<1>(std::forward<__T>(type), [](VM* vm, ArgsView args){
  407. Type t = PK_OBJ_GET(Type, args[0]);
  408. return vm->heap.gcnew<T>(t, T());
  409. });
  410. }
  411. template<typename T, typename __T>
  412. PyObject* bind_notimplemented_constructor(__T&& type) {
  413. return bind_constructor<-1>(std::forward<__T>(type), [](VM* vm, ArgsView args){
  414. vm->NotImplementedError();
  415. return vm->None;
  416. });
  417. }
  418. template<int ARGC>
  419. PyObject* bind_builtin_func(Str name, NativeFuncC fn) {
  420. return bind_func<ARGC>(builtins, name, fn);
  421. }
  422. int normalized_index(int index, int size){
  423. if(index < 0) index += size;
  424. if(index < 0 || index >= size){
  425. IndexError(std::to_string(index) + " not in [0, " + std::to_string(size) + ")");
  426. }
  427. return index;
  428. }
  429. PyObject* py_next(PyObject* obj){
  430. const PyTypeInfo* ti = _inst_type_info(obj);
  431. if(ti->m__next__) return ti->m__next__(this, obj);
  432. return call_method(obj, __next__);
  433. }
  434. /***** Error Reporter *****/
  435. void _error(StrName name, const Str& msg){
  436. _error(Exception(name, msg));
  437. }
  438. void _raise(){
  439. bool ok = top_frame()->jump_to_exception_handler();
  440. if(ok) throw HandledException();
  441. else throw UnhandledException();
  442. }
  443. void StackOverflowError() { _error("StackOverflowError", ""); }
  444. void IOError(const Str& msg) { _error("IOError", msg); }
  445. void NotImplementedError(){ _error("NotImplementedError", ""); }
  446. void TypeError(const Str& msg){ _error("TypeError", msg); }
  447. void IndexError(const Str& msg){ _error("IndexError", msg); }
  448. void ValueError(const Str& msg){ _error("ValueError", msg); }
  449. void NameError(StrName name){ _error("NameError", fmt("name ", name.escape() + " is not defined")); }
  450. void KeyError(PyObject* obj){ _error("KeyError", PK_OBJ_GET(Str, py_repr(obj))); }
  451. void BinaryOptError(const char* op) { TypeError(fmt("unsupported operand type(s) for ", op)); }
  452. void AttributeError(PyObject* obj, StrName name){
  453. // OBJ_NAME calls getattr, which may lead to a infinite recursion
  454. _error("AttributeError", fmt("type ", OBJ_NAME(_t(obj)).escape(), " has no attribute ", name.escape()));
  455. }
  456. void AttributeError(Str msg){ _error("AttributeError", msg); }
  457. void check_type(PyObject* obj, Type type){
  458. if(is_type(obj, type)) return;
  459. TypeError("expected " + OBJ_NAME(_t(type)).escape() + ", got " + OBJ_NAME(_t(obj)).escape());
  460. }
  461. void check_non_tagged_type(PyObject* obj, Type type){
  462. if(is_non_tagged_type(obj, type)) return;
  463. TypeError("expected " + OBJ_NAME(_t(type)).escape() + ", got " + OBJ_NAME(_t(obj)).escape());
  464. }
  465. void check_int(PyObject* obj){
  466. if(is_int(obj)) return;
  467. check_type(obj, tp_int); // if failed, redirect to check_type to raise TypeError
  468. }
  469. void check_float(PyObject* obj){
  470. if(is_float(obj)) return;
  471. check_type(obj, tp_float); // if failed, redirect to check_type to raise TypeError
  472. }
  473. PyObject* _t(Type t){
  474. return _all_types[t.index].obj;
  475. }
  476. PyObject* _t(PyObject* obj){
  477. if(is_int(obj)) return _t(tp_int);
  478. if(is_float(obj)) return _t(tp_float);
  479. return _all_types[obj->type].obj;
  480. }
  481. struct ImportContext{
  482. // 0: normal; 1: __init__.py; 2: relative
  483. std::vector<std::pair<StrName, int>> pending;
  484. struct Temp{
  485. VM* vm;
  486. StrName name;
  487. Temp(VM* vm, StrName name, int type): vm(vm), name(name){
  488. ImportContext* ctx = &vm->_import_context;
  489. ctx->pending.emplace_back(name, type);
  490. }
  491. ~Temp(){
  492. ImportContext* ctx = &vm->_import_context;
  493. ctx->pending.pop_back();
  494. }
  495. };
  496. Temp temp(VM* vm, StrName name, int type){
  497. return Temp(vm, name, type);
  498. }
  499. };
  500. ImportContext _import_context;
  501. PyObject* py_import(StrName name, bool relative=false){
  502. Str filename;
  503. int type;
  504. if(relative){
  505. ImportContext* ctx = &_import_context;
  506. type = 2;
  507. for(auto it=ctx->pending.rbegin(); it!=ctx->pending.rend(); ++it){
  508. if(it->second == 2) continue;
  509. if(it->second == 1){
  510. filename = fmt(it->first, kPlatformSep, name, ".py");
  511. name = fmt(it->first, '.', name).c_str();
  512. break;
  513. }
  514. }
  515. if(filename.length() == 0) _error("ImportError", "relative import outside of package");
  516. }else{
  517. type = 0;
  518. filename = fmt(name, ".py");
  519. }
  520. for(auto& [k, v]: _import_context.pending){
  521. if(k == name){
  522. vm->_error("ImportError", fmt("circular import ", name.escape()));
  523. }
  524. }
  525. PyObject* ext_mod = _modules.try_get(name);
  526. if(ext_mod == nullptr){
  527. Str source;
  528. auto it = _lazy_modules.find(name);
  529. if(it == _lazy_modules.end()){
  530. Bytes b = _import_handler(filename);
  531. if(!relative && !b){
  532. filename = fmt(name, kPlatformSep, "__init__.py");
  533. b = _import_handler(filename);
  534. if(b) type = 1;
  535. }
  536. if(!b) _error("ImportError", fmt("module ", name.escape(), " not found"));
  537. source = Str(b.str());
  538. }else{
  539. source = it->second;
  540. _lazy_modules.erase(it);
  541. }
  542. auto _ = _import_context.temp(this, name, type);
  543. CodeObject_ code = compile(source, filename, EXEC_MODE);
  544. PyObject* new_mod = new_module(name);
  545. _exec(code, new_mod);
  546. new_mod->attr()._try_perfect_rehash();
  547. return new_mod;
  548. }else{
  549. return ext_mod;
  550. }
  551. }
  552. ~VM() {
  553. callstack.clear();
  554. s_data.clear();
  555. _all_types.clear();
  556. _modules.clear();
  557. _lazy_modules.clear();
  558. }
  559. #if PK_DEBUG_CEVAL_STEP
  560. void _log_s_data(const char* title = nullptr);
  561. #endif
  562. void _unpack_as_list(ArgsView args, List& list);
  563. void _unpack_as_dict(ArgsView args, Dict& dict);
  564. PyObject* vectorcall(int ARGC, int KWARGC=0, bool op_call=false);
  565. CodeObject_ compile(Str source, Str filename, CompileMode mode, bool unknown_global_scope=false);
  566. PyObject* py_negate(PyObject* obj);
  567. f64 num_to_float(PyObject* obj);
  568. bool py_bool(PyObject* obj);
  569. i64 py_hash(PyObject* obj);
  570. PyObject* py_list(PyObject*);
  571. PyObject* new_module(StrName name);
  572. Str disassemble(CodeObject_ co);
  573. void init_builtin_types();
  574. PyObject* getattr(PyObject* obj, StrName name, bool throw_err=true);
  575. PyObject* get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err=true, bool fallback=false);
  576. void parse_int_slice(const Slice& s, int length, int& start, int& stop, int& step);
  577. PyObject* format(Str, PyObject*);
  578. void setattr(PyObject* obj, StrName name, PyObject* value);
  579. template<int ARGC>
  580. PyObject* bind_method(PyObject*, Str, NativeFuncC);
  581. template<int ARGC>
  582. PyObject* bind_func(PyObject*, Str, NativeFuncC);
  583. void _error(Exception);
  584. PyObject* _run_top_frame();
  585. void post_init();
  586. PyObject* _py_generator(Frame&& frame, ArgsView buffer);
  587. };
  588. inline PyObject* NativeFunc::operator()(VM* vm, ArgsView args) const{
  589. if(args.size() != argc && argc != -1) {
  590. vm->TypeError(fmt("expected ", argc, " arguments, got ", args.size()));
  591. }
  592. #if PK_DEBUG_EXTRA_CHECK
  593. if(f == nullptr) FATAL_ERROR();
  594. #endif
  595. return f(vm, args);
  596. }
  597. DEF_NATIVE_2(Str, tp_str)
  598. DEF_NATIVE_2(List, tp_list)
  599. DEF_NATIVE_2(Tuple, tp_tuple)
  600. DEF_NATIVE_2(Function, tp_function)
  601. DEF_NATIVE_2(NativeFunc, tp_native_func)
  602. DEF_NATIVE_2(BoundMethod, tp_bound_method)
  603. DEF_NATIVE_2(Range, tp_range)
  604. DEF_NATIVE_2(Slice, tp_slice)
  605. DEF_NATIVE_2(Exception, tp_exception)
  606. DEF_NATIVE_2(Bytes, tp_bytes)
  607. DEF_NATIVE_2(MappingProxy, tp_mappingproxy)
  608. DEF_NATIVE_2(Dict, tp_dict)
  609. DEF_NATIVE_2(Property, tp_property)
  610. DEF_NATIVE_2(StarWrapper, tp_star_wrapper)
  611. #undef DEF_NATIVE_2
  612. #define PY_CAST_INT(T) \
  613. template<> inline T py_cast<T>(VM* vm, PyObject* obj){ \
  614. vm->check_int(obj); \
  615. return (T)(PK_BITS(obj) >> 2); \
  616. } \
  617. template<> inline T _py_cast<T>(VM* vm, PyObject* obj){ \
  618. return (T)(PK_BITS(obj) >> 2); \
  619. }
  620. PY_CAST_INT(char)
  621. PY_CAST_INT(short)
  622. PY_CAST_INT(int)
  623. PY_CAST_INT(long)
  624. PY_CAST_INT(long long)
  625. PY_CAST_INT(unsigned char)
  626. PY_CAST_INT(unsigned short)
  627. PY_CAST_INT(unsigned int)
  628. PY_CAST_INT(unsigned long)
  629. PY_CAST_INT(unsigned long long)
  630. template<> inline float py_cast<float>(VM* vm, PyObject* obj){
  631. vm->check_float(obj);
  632. i64 bits = PK_BITS(obj) & Number::c1;
  633. return BitsCvt(bits)._float;
  634. }
  635. template<> inline float _py_cast<float>(VM* vm, PyObject* obj){
  636. i64 bits = PK_BITS(obj) & Number::c1;
  637. return BitsCvt(bits)._float;
  638. }
  639. template<> inline double py_cast<double>(VM* vm, PyObject* obj){
  640. vm->check_float(obj);
  641. i64 bits = PK_BITS(obj) & Number::c1;
  642. return BitsCvt(bits)._float;
  643. }
  644. template<> inline double _py_cast<double>(VM* vm, PyObject* obj){
  645. i64 bits = PK_BITS(obj) & Number::c1;
  646. return BitsCvt(bits)._float;
  647. }
  648. #define PY_VAR_INT(T) \
  649. inline PyObject* py_var(VM* vm, T _val){ \
  650. i64 val = static_cast<i64>(_val); \
  651. if(((val << 2) >> 2) != val){ \
  652. vm->_error("OverflowError", std::to_string(val) + " is out of range"); \
  653. } \
  654. val = (val << 2) | 0b01; \
  655. return reinterpret_cast<PyObject*>(val); \
  656. }
  657. PY_VAR_INT(char)
  658. PY_VAR_INT(short)
  659. PY_VAR_INT(int)
  660. PY_VAR_INT(long)
  661. PY_VAR_INT(long long)
  662. PY_VAR_INT(unsigned char)
  663. PY_VAR_INT(unsigned short)
  664. PY_VAR_INT(unsigned int)
  665. PY_VAR_INT(unsigned long)
  666. PY_VAR_INT(unsigned long long)
  667. #define PY_VAR_FLOAT(T) \
  668. inline PyObject* py_var(VM* vm, T _val){ \
  669. BitsCvt val(static_cast<f64>(_val)); \
  670. i64 bits = val._int & Number::c1; \
  671. i64 tail = val._int & Number::c2; \
  672. if(tail == 0b10){ \
  673. if(bits&0b100) bits += 0b100; \
  674. }else if(tail == 0b11){ \
  675. bits += 0b100; \
  676. } \
  677. bits |= 0b10; \
  678. return reinterpret_cast<PyObject*>(bits); \
  679. }
  680. PY_VAR_FLOAT(float)
  681. PY_VAR_FLOAT(double)
  682. #undef PY_VAR_INT
  683. #undef PY_VAR_FLOAT
  684. inline PyObject* py_var(VM* vm, bool val){
  685. return val ? vm->True : vm->False;
  686. }
  687. template<> inline bool py_cast<bool>(VM* vm, PyObject* obj){
  688. if(obj == vm->True) return true;
  689. if(obj == vm->False) return false;
  690. vm->check_non_tagged_type(obj, vm->tp_bool);
  691. return false;
  692. }
  693. template<> inline bool _py_cast<bool>(VM* vm, PyObject* obj){
  694. return obj == vm->True;
  695. }
  696. inline PyObject* py_var(VM* vm, const char val[]){
  697. return VAR(Str(val));
  698. }
  699. inline PyObject* py_var(VM* vm, std::string val){
  700. return VAR(Str(std::move(val)));
  701. }
  702. inline PyObject* py_var(VM* vm, std::string_view val){
  703. return VAR(Str(val));
  704. }
  705. inline PyObject* py_var(VM* vm, NoReturn val){
  706. return vm->None;
  707. }
  708. inline PyObject* py_var(VM* vm, PyObject* val){
  709. return val;
  710. }
  711. inline PyObject* VM::py_negate(PyObject* obj){
  712. const PyTypeInfo* ti = _inst_type_info(obj);
  713. if(ti->m__neg__) return ti->m__neg__(this, obj);
  714. return call_method(obj, __neg__);
  715. }
  716. inline f64 VM::num_to_float(PyObject* obj){
  717. if(is_float(obj)){
  718. return _CAST(f64, obj);
  719. } else if (is_int(obj)){
  720. return (f64)_CAST(i64, obj);
  721. }
  722. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape());
  723. return 0;
  724. }
  725. inline bool VM::py_bool(PyObject* obj){
  726. if(is_non_tagged_type(obj, tp_bool)) return obj == True;
  727. if(obj == None) return false;
  728. if(is_int(obj)) return _CAST(i64, obj) != 0;
  729. if(is_float(obj)) return _CAST(f64, obj) != 0.0;
  730. PyObject* self;
  731. PyObject* len_f = get_unbound_method(obj, __len__, &self, false);
  732. if(self != PY_NULL){
  733. PyObject* ret = call_method(self, len_f);
  734. return CAST(i64, ret) > 0;
  735. }
  736. return true;
  737. }
  738. inline PyObject* VM::py_list(PyObject* it){
  739. auto _lock = heap.gc_scope_lock();
  740. it = py_iter(it);
  741. List list;
  742. PyObject* obj = py_next(it);
  743. while(obj != StopIteration){
  744. list.push_back(obj);
  745. obj = py_next(it);
  746. }
  747. return VAR(std::move(list));
  748. }
  749. inline void VM::parse_int_slice(const Slice& s, int length, int& start, int& stop, int& step){
  750. auto clip = [](int value, int min, int max){
  751. if(value < min) return min;
  752. if(value > max) return max;
  753. return value;
  754. };
  755. if(s.step == None) step = 1;
  756. else step = CAST(int, s.step);
  757. if(step == 0) ValueError("slice step cannot be zero");
  758. if(step > 0){
  759. if(s.start == None){
  760. start = 0;
  761. }else{
  762. start = CAST(int, s.start);
  763. if(start < 0) start += length;
  764. start = clip(start, 0, length);
  765. }
  766. if(s.stop == None){
  767. stop = length;
  768. }else{
  769. stop = CAST(int, s.stop);
  770. if(stop < 0) stop += length;
  771. stop = clip(stop, 0, length);
  772. }
  773. }else{
  774. if(s.start == None){
  775. start = length - 1;
  776. }else{
  777. start = CAST(int, s.start);
  778. if(start < 0) start += length;
  779. start = clip(start, -1, length - 1);
  780. }
  781. if(s.stop == None){
  782. stop = -1;
  783. }else{
  784. stop = CAST(int, s.stop);
  785. if(stop < 0) stop += length;
  786. stop = clip(stop, -1, length - 1);
  787. }
  788. }
  789. }
  790. inline i64 VM::py_hash(PyObject* obj){
  791. const PyTypeInfo* ti = _inst_type_info(obj);
  792. if(ti->m__hash__) return ti->m__hash__(this, obj);
  793. PyObject* ret = call_method(obj, __hash__);
  794. return CAST(i64, ret);
  795. }
  796. inline PyObject* VM::format(Str spec, PyObject* obj){
  797. if(spec.empty()) return py_str(obj);
  798. char type;
  799. switch(spec.end()[-1]){
  800. case 'f': case 'd': case 's':
  801. type = spec.end()[-1];
  802. spec = spec.substr(0, spec.length() - 1);
  803. break;
  804. default: type = ' '; break;
  805. }
  806. char pad_c = ' ';
  807. if(spec[0] == '0'){
  808. pad_c = '0';
  809. spec = spec.substr(1);
  810. }
  811. char align;
  812. if(spec[0] == '>'){
  813. align = '>';
  814. spec = spec.substr(1);
  815. }else if(spec[0] == '<'){
  816. align = '<';
  817. spec = spec.substr(1);
  818. }else{
  819. if(is_int(obj) || is_float(obj)) align = '>';
  820. else align = '<';
  821. }
  822. int dot = spec.index(".");
  823. int width, precision;
  824. try{
  825. if(dot >= 0){
  826. width = Number::stoi(spec.substr(0, dot).str());
  827. precision = Number::stoi(spec.substr(dot+1).str());
  828. }else{
  829. width = Number::stoi(spec.str());
  830. precision = -1;
  831. }
  832. }catch(...){
  833. ValueError("invalid format specifer");
  834. }
  835. if(type != 'f' && dot >= 0) ValueError("precision not allowed in the format specifier");
  836. Str ret;
  837. if(type == 'f'){
  838. f64 val = num_to_float(obj);
  839. if(precision < 0) precision = 6;
  840. std::stringstream ss;
  841. ss << std::fixed << std::setprecision(precision) << val;
  842. ret = ss.str();
  843. }else if(type == 'd'){
  844. ret = std::to_string(CAST(i64, obj));
  845. }else if(type == 's'){
  846. ret = CAST(Str&, obj);
  847. }else{
  848. ret = CAST(Str&, py_str(obj));
  849. }
  850. if(width > ret.length()){
  851. int pad = width - ret.length();
  852. std::string padding(pad, pad_c);
  853. if(align == '>') ret = padding.c_str() + ret;
  854. else ret = ret + padding.c_str();
  855. }
  856. return VAR(ret);
  857. }
  858. inline PyObject* VM::new_module(StrName name) {
  859. PyObject* obj = heap._new<DummyModule>(tp_module, DummyModule());
  860. obj->attr().set("__name__", VAR(name.sv()));
  861. // we do not allow override in order to avoid memory leak
  862. // it is because Module objects are not garbage collected
  863. if(_modules.contains(name)) throw std::runtime_error("module already exists");
  864. _modules.set(name, obj);
  865. return obj;
  866. }
  867. inline std::string _opcode_argstr(VM* vm, Bytecode byte, const CodeObject* co){
  868. std::string argStr = byte.arg == -1 ? "" : std::to_string(byte.arg);
  869. switch(byte.op){
  870. case OP_LOAD_CONST:
  871. if(vm != nullptr){
  872. argStr += fmt(" (", CAST(Str, vm->py_repr(co->consts[byte.arg])), ")");
  873. }
  874. break;
  875. case OP_LOAD_NAME: case OP_LOAD_GLOBAL: case OP_LOAD_NONLOCAL: case OP_STORE_GLOBAL:
  876. case OP_LOAD_ATTR: case OP_LOAD_METHOD: case OP_STORE_ATTR: case OP_DELETE_ATTR:
  877. case OP_IMPORT_NAME: case OP_BEGIN_CLASS: case OP_RAISE:
  878. case OP_DELETE_GLOBAL: case OP_INC_GLOBAL: case OP_DEC_GLOBAL:
  879. argStr += fmt(" (", StrName(byte.arg).sv(), ")");
  880. break;
  881. case OP_LOAD_FAST: case OP_STORE_FAST: case OP_DELETE_FAST: case OP_INC_FAST: case OP_DEC_FAST:
  882. argStr += fmt(" (", co->varnames[byte.arg].sv(), ")");
  883. break;
  884. case OP_LOAD_FUNCTION:
  885. argStr += fmt(" (", co->func_decls[byte.arg]->code->name, ")");
  886. break;
  887. }
  888. return argStr;
  889. }
  890. inline Str VM::disassemble(CodeObject_ co){
  891. auto pad = [](const Str& s, const int n){
  892. if(s.length() >= n) return s.substr(0, n);
  893. return s + std::string(n - s.length(), ' ');
  894. };
  895. std::vector<int> jumpTargets;
  896. for(auto byte : co->codes){
  897. if(byte.op == OP_JUMP_ABSOLUTE || byte.op == OP_POP_JUMP_IF_FALSE || byte.op == OP_SHORTCUT_IF_FALSE_OR_POP){
  898. jumpTargets.push_back(byte.arg);
  899. }
  900. }
  901. std::stringstream ss;
  902. int prev_line = -1;
  903. for(int i=0; i<co->codes.size(); i++){
  904. const Bytecode& byte = co->codes[i];
  905. Str line = std::to_string(co->lines[i]);
  906. if(co->lines[i] == prev_line) line = "";
  907. else{
  908. if(prev_line != -1) ss << "\n";
  909. prev_line = co->lines[i];
  910. }
  911. std::string pointer;
  912. if(std::find(jumpTargets.begin(), jumpTargets.end(), i) != jumpTargets.end()){
  913. pointer = "-> ";
  914. }else{
  915. pointer = " ";
  916. }
  917. ss << pad(line, 8) << pointer << pad(std::to_string(i), 3);
  918. ss << " " << pad(OP_NAMES[byte.op], 25) << " ";
  919. // ss << pad(byte.arg == -1 ? "" : std::to_string(byte.arg), 5);
  920. std::string argStr = _opcode_argstr(this, byte, co.get());
  921. ss << argStr;
  922. // ss << pad(argStr, 40); // may overflow
  923. // ss << co->blocks[byte.block].type;
  924. if(i != co->codes.size() - 1) ss << '\n';
  925. }
  926. for(auto& decl: co->func_decls){
  927. ss << "\n\n" << "Disassembly of " << decl->code->name << ":\n";
  928. ss << disassemble(decl->code);
  929. }
  930. ss << "\n";
  931. return Str(ss.str());
  932. }
  933. #if PK_DEBUG_CEVAL_STEP
  934. inline void VM::_log_s_data(const char* title) {
  935. if(_main == nullptr) return;
  936. if(callstack.empty()) return;
  937. std::stringstream ss;
  938. if(title) ss << title << " | ";
  939. std::map<PyObject**, int> sp_bases;
  940. for(Frame& f: callstack.data()){
  941. if(f._sp_base == nullptr) FATAL_ERROR();
  942. sp_bases[f._sp_base] += 1;
  943. }
  944. FrameId frame = top_frame();
  945. int line = frame->co->lines[frame->_ip];
  946. ss << frame->co->name << ":" << line << " [";
  947. for(PyObject** p=s_data.begin(); p!=s_data.end(); p++){
  948. ss << std::string(sp_bases[p], '|');
  949. if(sp_bases[p] > 0) ss << " ";
  950. PyObject* obj = *p;
  951. if(obj == nullptr) ss << "(nil)";
  952. else if(obj == PY_NULL) ss << "NULL";
  953. else if(is_int(obj)) ss << CAST(i64, obj);
  954. else if(is_float(obj)) ss << CAST(f64, obj);
  955. else if(is_type(obj, tp_str)) ss << CAST(Str, obj).escape();
  956. else if(obj == None) ss << "None";
  957. else if(obj == True) ss << "True";
  958. else if(obj == False) ss << "False";
  959. else if(is_type(obj, tp_function)){
  960. auto& f = CAST(Function&, obj);
  961. ss << f.decl->code->name << "(...)";
  962. } else if(is_type(obj, tp_type)){
  963. Type t = PK_OBJ_GET(Type, obj);
  964. ss << "<class " + _all_types[t].name.escape() + ">";
  965. } else if(is_type(obj, tp_list)){
  966. auto& t = CAST(List&, obj);
  967. ss << "list(size=" << t.size() << ")";
  968. } else if(is_type(obj, tp_tuple)){
  969. auto& t = CAST(Tuple&, obj);
  970. ss << "tuple(size=" << t.size() << ")";
  971. } else ss << "(" << obj_type_name(this, obj->type) << ")";
  972. ss << ", ";
  973. }
  974. std::string output = ss.str();
  975. if(!s_data.empty()) {
  976. output.pop_back(); output.pop_back();
  977. }
  978. output.push_back(']');
  979. Bytecode byte = frame->co->codes[frame->_ip];
  980. std::cout << output << " " << OP_NAMES[byte.op] << " " << _opcode_argstr(nullptr, byte, frame->co) << std::endl;
  981. }
  982. #endif
  983. inline void VM::init_builtin_types(){
  984. _all_types.push_back({heap._new<Type>(Type(1), Type(0)), -1, "object", true});
  985. _all_types.push_back({heap._new<Type>(Type(1), Type(1)), 0, "type", false});
  986. tp_object = 0; tp_type = 1;
  987. tp_int = _new_type_object("int");
  988. tp_float = _new_type_object("float");
  989. if(tp_int.index != kTpIntIndex || tp_float.index != kTpFloatIndex) FATAL_ERROR();
  990. tp_bool = _new_type_object("bool");
  991. tp_str = _new_type_object("str");
  992. tp_list = _new_type_object("list");
  993. tp_tuple = _new_type_object("tuple");
  994. tp_slice = _new_type_object("slice");
  995. tp_range = _new_type_object("range");
  996. tp_module = _new_type_object("module");
  997. tp_function = _new_type_object("function");
  998. tp_native_func = _new_type_object("native_func");
  999. tp_bound_method = _new_type_object("bound_method");
  1000. tp_super = _new_type_object("super");
  1001. tp_exception = _new_type_object("Exception");
  1002. tp_bytes = _new_type_object("bytes");
  1003. tp_mappingproxy = _new_type_object("mappingproxy");
  1004. tp_dict = _new_type_object("dict");
  1005. tp_property = _new_type_object("property");
  1006. tp_star_wrapper = _new_type_object("_star_wrapper");
  1007. this->None = heap._new<Dummy>(_new_type_object("NoneType"), {});
  1008. this->NotImplemented = heap._new<Dummy>(_new_type_object("NotImplementedType"), {});
  1009. this->Ellipsis = heap._new<Dummy>(_new_type_object("ellipsis"), {});
  1010. this->True = heap._new<Dummy>(tp_bool, {});
  1011. this->False = heap._new<Dummy>(tp_bool, {});
  1012. this->StopIteration = heap._new<Dummy>(_new_type_object("StopIterationType"), {});
  1013. this->builtins = new_module("builtins");
  1014. // setup public types
  1015. builtins->attr().set("type", _t(tp_type));
  1016. builtins->attr().set("object", _t(tp_object));
  1017. builtins->attr().set("bool", _t(tp_bool));
  1018. builtins->attr().set("int", _t(tp_int));
  1019. builtins->attr().set("float", _t(tp_float));
  1020. builtins->attr().set("str", _t(tp_str));
  1021. builtins->attr().set("list", _t(tp_list));
  1022. builtins->attr().set("tuple", _t(tp_tuple));
  1023. builtins->attr().set("range", _t(tp_range));
  1024. builtins->attr().set("bytes", _t(tp_bytes));
  1025. builtins->attr().set("dict", _t(tp_dict));
  1026. builtins->attr().set("property", _t(tp_property));
  1027. builtins->attr().set("StopIteration", StopIteration);
  1028. builtins->attr().set("NotImplemented", NotImplemented);
  1029. builtins->attr().set("slice", _t(tp_slice));
  1030. post_init();
  1031. for(int i=0; i<_all_types.size(); i++){
  1032. _all_types[i].obj->attr()._try_perfect_rehash();
  1033. }
  1034. for(auto [k, v]: _modules.items()) v->attr()._try_perfect_rehash();
  1035. this->_main = new_module("__main__");
  1036. }
  1037. // `heap.gc_scope_lock();` needed before calling this function
  1038. inline void VM::_unpack_as_list(ArgsView args, List& list){
  1039. for(PyObject* obj: args){
  1040. if(is_non_tagged_type(obj, tp_star_wrapper)){
  1041. const StarWrapper& w = _CAST(StarWrapper&, obj);
  1042. // maybe this check should be done in the compile time
  1043. if(w.level != 1) TypeError("expected level 1 star wrapper");
  1044. PyObject* _0 = py_iter(w.obj);
  1045. PyObject* _1 = py_next(_0);
  1046. while(_1 != StopIteration){
  1047. list.push_back(_1);
  1048. _1 = py_next(_0);
  1049. }
  1050. }else{
  1051. list.push_back(obj);
  1052. }
  1053. }
  1054. }
  1055. // `heap.gc_scope_lock();` needed before calling this function
  1056. inline void VM::_unpack_as_dict(ArgsView args, Dict& dict){
  1057. for(PyObject* obj: args){
  1058. if(is_non_tagged_type(obj, tp_star_wrapper)){
  1059. const StarWrapper& w = _CAST(StarWrapper&, obj);
  1060. // maybe this check should be done in the compile time
  1061. if(w.level != 2) TypeError("expected level 2 star wrapper");
  1062. const Dict& other = CAST(Dict&, w.obj);
  1063. dict.update(other);
  1064. }else{
  1065. const Tuple& t = CAST(Tuple&, obj);
  1066. if(t.size() != 2) TypeError("expected tuple of length 2");
  1067. dict.set(t[0], t[1]);
  1068. }
  1069. }
  1070. }
  1071. inline PyObject* VM::vectorcall(int ARGC, int KWARGC, bool op_call){
  1072. PyObject** p1 = s_data._sp - KWARGC*2;
  1073. PyObject** p0 = p1 - ARGC - 2;
  1074. // [callable, <self>, args..., kwargs...]
  1075. // ^p0 ^p1 ^_sp
  1076. PyObject* callable = p1[-(ARGC + 2)];
  1077. bool method_call = p1[-(ARGC + 1)] != PY_NULL;
  1078. // handle boundmethod, do a patch
  1079. if(is_non_tagged_type(callable, tp_bound_method)){
  1080. if(method_call) FATAL_ERROR();
  1081. auto& bm = CAST(BoundMethod&, callable);
  1082. callable = bm.func; // get unbound method
  1083. p1[-(ARGC + 2)] = bm.func;
  1084. p1[-(ARGC + 1)] = bm.self;
  1085. method_call = true;
  1086. // [unbound, self, args..., kwargs...]
  1087. }
  1088. ArgsView args(p1 - ARGC - int(method_call), p1);
  1089. if(is_non_tagged_type(callable, tp_native_func)){
  1090. const auto& f = PK_OBJ_GET(NativeFunc, callable);
  1091. if(KWARGC != 0) TypeError("native_func does not accept keyword arguments");
  1092. PyObject* ret = f(this, args);
  1093. s_data.reset(p0);
  1094. return ret;
  1095. }
  1096. ArgsView kwargs(p1, s_data._sp);
  1097. if(is_non_tagged_type(callable, tp_function)){
  1098. /*****************_py_call*****************/
  1099. // callable must be a `function` object
  1100. if(s_data.is_overflow()) StackOverflowError();
  1101. const Function& fn = CAST(Function&, callable);
  1102. const CodeObject* co = fn.decl->code.get();
  1103. int co_nlocals = co->varnames.size();
  1104. if(args.size() < fn.argc){
  1105. vm->TypeError(fmt(
  1106. "expected ", fn.argc, " positional arguments, got ", args.size(),
  1107. " (", fn.decl->code->name, ')'
  1108. ));
  1109. }
  1110. // if this function is simple, a.k.a, no kwargs and no *args and not a generator
  1111. // we can use a fast path to avoid using buffer copy
  1112. if(fn.is_simple){
  1113. if(args.size() > fn.argc) TypeError("too many positional arguments");
  1114. int spaces = co_nlocals - fn.argc;
  1115. for(int j=0; j<spaces; j++) PUSH(PY_NULL);
  1116. callstack.emplace(&s_data, p0, co, fn._module, callable, FastLocals(co, args.begin()));
  1117. if(op_call) return PY_OP_CALL;
  1118. return _run_top_frame();
  1119. }
  1120. int i = 0;
  1121. static THREAD_LOCAL PyObject* buffer[PK_MAX_CO_VARNAMES];
  1122. // prepare args
  1123. for(int index: fn.decl->args) buffer[index] = args[i++];
  1124. // set extra varnames to nullptr
  1125. for(int j=i; j<co_nlocals; j++) buffer[j] = PY_NULL;
  1126. // prepare kwdefaults
  1127. for(auto& kv: fn.decl->kwargs) buffer[kv.key] = kv.value;
  1128. // handle *args
  1129. if(fn.decl->starred_arg != -1){
  1130. ArgsView vargs(args.begin() + i, args.end());
  1131. buffer[fn.decl->starred_arg] = VAR(vargs.to_tuple());
  1132. i += vargs.size();
  1133. }else{
  1134. // kwdefaults override
  1135. for(auto& kv: fn.decl->kwargs){
  1136. if(i >= args.size()) break;
  1137. buffer[kv.key] = args[i++];
  1138. }
  1139. if(i < args.size()) TypeError(fmt("too many arguments", " (", fn.decl->code->name, ')'));
  1140. }
  1141. PyObject* vkwargs;
  1142. if(fn.decl->starred_kwarg != -1){
  1143. vkwargs = VAR(Dict(this));
  1144. buffer[fn.decl->starred_kwarg] = vkwargs;
  1145. }else{
  1146. vkwargs = nullptr;
  1147. }
  1148. for(int i=0; i<kwargs.size(); i+=2){
  1149. StrName key(CAST(int, kwargs[i]));
  1150. int index = co->varnames_inv.try_get(key);
  1151. if(index < 0){
  1152. if(vkwargs == nullptr){
  1153. TypeError(fmt(key.escape(), " is an invalid keyword argument for ", co->name, "()"));
  1154. }else{
  1155. Dict& dict = _CAST(Dict&, vkwargs);
  1156. dict.set(VAR(key.sv()), kwargs[i+1]);
  1157. }
  1158. }else{
  1159. buffer[index] = kwargs[i+1];
  1160. }
  1161. }
  1162. if(co->is_generator){
  1163. s_data.reset(p0);
  1164. return _py_generator(
  1165. Frame(&s_data, nullptr, co, fn._module, callable),
  1166. ArgsView(buffer, buffer + co_nlocals)
  1167. );
  1168. }
  1169. // copy buffer back to stack
  1170. s_data.reset(args.begin());
  1171. for(int i=0; i<co_nlocals; i++) PUSH(buffer[i]);
  1172. callstack.emplace(&s_data, p0, co, fn._module, callable, FastLocals(co, args.begin()));
  1173. if(op_call) return PY_OP_CALL;
  1174. return _run_top_frame();
  1175. /*****************_py_call*****************/
  1176. }
  1177. if(is_non_tagged_type(callable, tp_type)){
  1178. if(method_call) FATAL_ERROR();
  1179. // [type, NULL, args..., kwargs...]
  1180. DEF_SNAME(__new__);
  1181. PyObject* new_f = find_name_in_mro(callable, __new__);
  1182. PyObject* obj;
  1183. #if PK_DEBUG_EXTRA_CHECK
  1184. PK_ASSERT(new_f != nullptr);
  1185. #endif
  1186. if(new_f == cached_object__new__) {
  1187. // fast path for object.__new__
  1188. Type t = PK_OBJ_GET(Type, callable);
  1189. obj= vm->heap.gcnew<DummyInstance>(t, {});
  1190. }else{
  1191. PUSH(new_f);
  1192. PUSH(PY_NULL);
  1193. PUSH(callable); // cls
  1194. for(PyObject* obj: args) PUSH(obj);
  1195. for(PyObject* obj: kwargs) PUSH(obj);
  1196. // if obj is not an instance of callable, the behavior is undefined
  1197. obj = vectorcall(ARGC+1, KWARGC);
  1198. }
  1199. // __init__
  1200. PyObject* self;
  1201. DEF_SNAME(__init__);
  1202. callable = get_unbound_method(obj, __init__, &self, false);
  1203. if (self != PY_NULL) {
  1204. // replace `NULL` with `self`
  1205. p1[-(ARGC + 2)] = callable;
  1206. p1[-(ARGC + 1)] = self;
  1207. // [init_f, self, args..., kwargs...]
  1208. vectorcall(ARGC, KWARGC);
  1209. // We just discard the return value of `__init__`
  1210. // in cpython it raises a TypeError if the return value is not None
  1211. }else{
  1212. // manually reset the stack
  1213. s_data.reset(p0);
  1214. }
  1215. return obj;
  1216. }
  1217. // handle `__call__` overload
  1218. PyObject* self;
  1219. DEF_SNAME(__call__);
  1220. PyObject* call_f = get_unbound_method(callable, __call__, &self, false);
  1221. if(self != PY_NULL){
  1222. p1[-(ARGC + 2)] = call_f;
  1223. p1[-(ARGC + 1)] = self;
  1224. // [call_f, self, args..., kwargs...]
  1225. return vectorcall(ARGC, KWARGC, false);
  1226. }
  1227. TypeError(OBJ_NAME(_t(callable)).escape() + " object is not callable");
  1228. return nullptr;
  1229. }
  1230. // https://docs.python.org/3/howto/descriptor.html#invocation-from-an-instance
  1231. inline PyObject* VM::getattr(PyObject* obj, StrName name, bool throw_err){
  1232. PyObject* objtype;
  1233. // handle super() proxy
  1234. if(is_non_tagged_type(obj, tp_super)){
  1235. const Super& super = PK_OBJ_GET(Super, obj);
  1236. obj = super.first;
  1237. objtype = _t(super.second);
  1238. }else{
  1239. objtype = _t(obj);
  1240. }
  1241. PyObject* cls_var = find_name_in_mro(objtype, name);
  1242. if(cls_var != nullptr){
  1243. // handle descriptor
  1244. if(is_non_tagged_type(cls_var, tp_property)){
  1245. const Property& prop = _CAST(Property&, cls_var);
  1246. return call(prop.getter, obj);
  1247. }
  1248. }
  1249. // handle instance __dict__
  1250. if(!is_tagged(obj) && obj->is_attr_valid()){
  1251. PyObject* val = obj->attr().try_get(name);
  1252. if(val != nullptr) return val;
  1253. }
  1254. if(cls_var != nullptr){
  1255. // bound method is non-data descriptor
  1256. if(is_non_tagged_type(cls_var, tp_function) || is_non_tagged_type(cls_var, tp_native_func)){
  1257. return VAR(BoundMethod(obj, cls_var));
  1258. }
  1259. return cls_var;
  1260. }
  1261. if(throw_err) AttributeError(obj, name);
  1262. return nullptr;
  1263. }
  1264. // used by OP_LOAD_METHOD
  1265. // try to load a unbound method (fallback to `getattr` if not found)
  1266. inline PyObject* VM::get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err, bool fallback){
  1267. *self = PY_NULL;
  1268. PyObject* objtype;
  1269. // handle super() proxy
  1270. if(is_non_tagged_type(obj, tp_super)){
  1271. const Super& super = PK_OBJ_GET(Super, obj);
  1272. obj = super.first;
  1273. objtype = _t(super.second);
  1274. }else{
  1275. objtype = _t(obj);
  1276. }
  1277. PyObject* cls_var = find_name_in_mro(objtype, name);
  1278. if(fallback){
  1279. if(cls_var != nullptr){
  1280. // handle descriptor
  1281. if(is_non_tagged_type(cls_var, tp_property)){
  1282. const Property& prop = _CAST(Property&, cls_var);
  1283. return call(prop.getter, obj);
  1284. }
  1285. }
  1286. // handle instance __dict__
  1287. if(!is_tagged(obj) && obj->is_attr_valid()){
  1288. PyObject* val = obj->attr().try_get(name);
  1289. if(val != nullptr) return val;
  1290. }
  1291. }
  1292. if(cls_var != nullptr){
  1293. if(is_non_tagged_type(cls_var, tp_function) || is_non_tagged_type(cls_var, tp_native_func)){
  1294. *self = obj;
  1295. }
  1296. return cls_var;
  1297. }
  1298. if(throw_err) AttributeError(obj, name);
  1299. return nullptr;
  1300. }
  1301. inline void VM::setattr(PyObject* obj, StrName name, PyObject* value){
  1302. PyObject* objtype;
  1303. // handle super() proxy
  1304. if(is_non_tagged_type(obj, tp_super)){
  1305. Super& super = PK_OBJ_GET(Super, obj);
  1306. obj = super.first;
  1307. objtype = _t(super.second);
  1308. }else{
  1309. objtype = _t(obj);
  1310. }
  1311. PyObject* cls_var = find_name_in_mro(objtype, name);
  1312. if(cls_var != nullptr){
  1313. // handle descriptor
  1314. if(is_non_tagged_type(cls_var, tp_property)){
  1315. const Property& prop = _CAST(Property&, cls_var);
  1316. if(prop.setter != vm->None){
  1317. call(prop.setter, obj, value);
  1318. }else{
  1319. TypeError(fmt("readonly attribute: ", name.escape()));
  1320. }
  1321. return;
  1322. }
  1323. }
  1324. // handle instance __dict__
  1325. if(is_tagged(obj) || !obj->is_attr_valid()) TypeError("cannot set attribute");
  1326. obj->attr().set(name, value);
  1327. }
  1328. template<int ARGC>
  1329. PyObject* VM::bind_method(PyObject* obj, Str name, NativeFuncC fn) {
  1330. check_non_tagged_type(obj, tp_type);
  1331. PyObject* nf = VAR(NativeFunc(fn, ARGC, true));
  1332. obj->attr().set(name, nf);
  1333. return nf;
  1334. }
  1335. template<int ARGC>
  1336. PyObject* VM::bind_func(PyObject* obj, Str name, NativeFuncC fn) {
  1337. PyObject* nf = VAR(NativeFunc(fn, ARGC, false));
  1338. obj->attr().set(name, nf);
  1339. return nf;
  1340. }
  1341. inline void VM::_error(Exception e){
  1342. if(callstack.empty()){
  1343. e.is_re = false;
  1344. throw e;
  1345. }
  1346. PUSH(VAR(e));
  1347. _raise();
  1348. }
  1349. inline void ManagedHeap::mark() {
  1350. for(PyObject* obj: _no_gc) OBJ_MARK(obj);
  1351. for(auto& frame : vm->callstack.data()) frame._gc_mark();
  1352. for(PyObject* obj: vm->s_data) OBJ_MARK(obj);
  1353. if(_gc_marker_ex) _gc_marker_ex(vm);
  1354. if(vm->_last_exception) OBJ_MARK(vm->_last_exception);
  1355. }
  1356. inline Str obj_type_name(VM *vm, Type type){
  1357. return vm->_all_types[type].name;
  1358. }
  1359. #undef PY_VAR_INT
  1360. #undef PY_VAR_FLOAT
  1361. /***************************************************/
  1362. template<typename T>
  1363. PyObject* PyArrayGetItem(VM* vm, PyObject* obj, PyObject* index){
  1364. static_assert(std::is_same_v<T, List> || std::is_same_v<T, Tuple>);
  1365. const T& self = _CAST(T&, obj);
  1366. if(is_non_tagged_type(index, vm->tp_slice)){
  1367. const Slice& s = _CAST(Slice&, index);
  1368. int start, stop, step;
  1369. vm->parse_int_slice(s, self.size(), start, stop, step);
  1370. List new_list;
  1371. for(int i=start; step>0?i<stop:i>stop; i+=step) new_list.push_back(self[i]);
  1372. return VAR(T(std::move(new_list)));
  1373. }
  1374. int i = CAST(int, index);
  1375. i = vm->normalized_index(i, self.size());
  1376. return self[i];
  1377. }
  1378. inline void VM::bind__hash__(Type type, i64 (*f)(VM*, PyObject*)){
  1379. PyObject* obj = _t(type);
  1380. _all_types[type].m__hash__ = f;
  1381. PyObject* nf = bind_method<0>(obj, "__hash__", [](VM* vm, ArgsView args){
  1382. i64 ret = lambda_get_userdata<i64(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);
  1383. return VAR(ret);
  1384. });
  1385. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  1386. }
  1387. inline void VM::bind__len__(Type type, i64 (*f)(VM*, PyObject*)){
  1388. PyObject* obj = _t(type);
  1389. _all_types[type].m__len__ = f;
  1390. PyObject* nf = bind_method<0>(obj, "__len__", [](VM* vm, ArgsView args){
  1391. i64 ret = lambda_get_userdata<i64(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);
  1392. return VAR(ret);
  1393. });
  1394. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  1395. }
  1396. inline void Dict::_probe(PyObject *key, bool &ok, int &i) const{
  1397. ok = false;
  1398. i = vm->py_hash(key) & _mask;
  1399. while(_items[i].first != nullptr) {
  1400. if(vm->py_equals(_items[i].first, key)) { ok = true; break; }
  1401. i = (i + 1) & _mask;
  1402. }
  1403. }
  1404. inline void CodeObjectSerializer::write_object(VM *vm, PyObject *obj){
  1405. if(is_int(obj)) write_int(_CAST(i64, obj));
  1406. else if(is_float(obj)) write_float(_CAST(f64, obj));
  1407. else if(is_type(obj, vm->tp_str)) write_str(_CAST(Str&, obj));
  1408. else if(is_type(obj, vm->tp_bool)) write_bool(_CAST(bool, obj));
  1409. else if(obj == vm->None) write_none();
  1410. else if(obj == vm->Ellipsis) write_ellipsis();
  1411. else{
  1412. throw std::runtime_error(fmt(OBJ_NAME(vm->_t(obj)).escape(), " is not serializable"));
  1413. }
  1414. }
  1415. } // namespace pkpy