vm.h 57 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661
  1. #pragma once
  2. #include "codeobject.h"
  3. #include "common.h"
  4. #include "frame.h"
  5. #include "error.h"
  6. #include "gc.h"
  7. #include "memory.h"
  8. #include "obj.h"
  9. #include "str.h"
  10. #include "tuplelist.h"
  11. #include "dict.h"
  12. namespace pkpy{
  13. /* Stack manipulation macros */
  14. // https://github.com/python/cpython/blob/3.9/Python/ceval.c#L1123
  15. #define TOP() (s_data.top())
  16. #define SECOND() (s_data.second())
  17. #define THIRD() (s_data.third())
  18. #define PEEK(n) (s_data.peek(n))
  19. #define STACK_SHRINK(n) (s_data.shrink(n))
  20. #define PUSH(v) (s_data.push(v))
  21. #define POP() (s_data.pop())
  22. #define POPX() (s_data.popx())
  23. #define STACK_VIEW(n) (s_data.view(n))
  24. #define DEF_NATIVE_2(ctype, ptype) \
  25. template<> inline ctype py_cast<ctype>(VM* vm, PyObject* obj) { \
  26. vm->check_non_tagged_type(obj, vm->ptype); \
  27. return PK_OBJ_GET(ctype, obj); \
  28. } \
  29. template<> inline ctype _py_cast<ctype>(VM* vm, PyObject* obj) { \
  30. PK_UNUSED(vm); \
  31. return PK_OBJ_GET(ctype, obj); \
  32. } \
  33. template<> inline ctype& py_cast<ctype&>(VM* vm, PyObject* obj) { \
  34. vm->check_non_tagged_type(obj, vm->ptype); \
  35. return PK_OBJ_GET(ctype, obj); \
  36. } \
  37. template<> inline ctype& _py_cast<ctype&>(VM* vm, PyObject* obj) { \
  38. PK_UNUSED(vm); \
  39. return PK_OBJ_GET(ctype, obj); \
  40. } \
  41. inline PyObject* py_var(VM* vm, const ctype& value) { return vm->heap.gcnew(vm->ptype, value);} \
  42. inline PyObject* py_var(VM* vm, ctype&& value) { return vm->heap.gcnew(vm->ptype, std::move(value));}
  43. typedef PyObject* (*BinaryFuncC)(VM*, PyObject*, PyObject*);
  44. struct PyTypeInfo{
  45. PyObject* obj;
  46. Type base;
  47. Str name;
  48. bool subclass_enabled;
  49. // cached special methods
  50. // unary operators
  51. PyObject* (*m__repr__)(VM* vm, PyObject*) = nullptr;
  52. PyObject* (*m__str__)(VM* vm, PyObject*) = nullptr;
  53. i64 (*m__hash__)(VM* vm, PyObject*) = nullptr;
  54. i64 (*m__len__)(VM* vm, PyObject*) = nullptr;
  55. PyObject* (*m__iter__)(VM* vm, PyObject*) = nullptr;
  56. PyObject* (*m__next__)(VM* vm, PyObject*) = nullptr;
  57. PyObject* (*m__json__)(VM* vm, PyObject*) = nullptr;
  58. PyObject* (*m__neg__)(VM* vm, PyObject*) = nullptr;
  59. PyObject* (*m__bool__)(VM* vm, PyObject*) = nullptr;
  60. BinaryFuncC m__eq__ = nullptr;
  61. BinaryFuncC m__lt__ = nullptr;
  62. BinaryFuncC m__le__ = nullptr;
  63. BinaryFuncC m__gt__ = nullptr;
  64. BinaryFuncC m__ge__ = nullptr;
  65. BinaryFuncC m__contains__ = nullptr;
  66. // binary operators
  67. BinaryFuncC m__add__ = nullptr;
  68. BinaryFuncC m__sub__ = nullptr;
  69. BinaryFuncC m__mul__ = nullptr;
  70. BinaryFuncC m__truediv__ = nullptr;
  71. BinaryFuncC m__floordiv__ = nullptr;
  72. BinaryFuncC m__mod__ = nullptr;
  73. BinaryFuncC m__pow__ = nullptr;
  74. BinaryFuncC m__matmul__ = nullptr;
  75. BinaryFuncC m__lshift__ = nullptr;
  76. BinaryFuncC m__rshift__ = nullptr;
  77. BinaryFuncC m__and__ = nullptr;
  78. BinaryFuncC m__or__ = nullptr;
  79. BinaryFuncC m__xor__ = nullptr;
  80. // indexer
  81. PyObject* (*m__getitem__)(VM* vm, PyObject*, PyObject*) = nullptr;
  82. void (*m__setitem__)(VM* vm, PyObject*, PyObject*, PyObject*) = nullptr;
  83. void (*m__delitem__)(VM* vm, PyObject*, PyObject*) = nullptr;
  84. };
  85. struct FrameId{
  86. std::vector<pkpy::Frame>* data;
  87. int index;
  88. FrameId(std::vector<pkpy::Frame>* data, int index) : data(data), index(index) {}
  89. Frame* operator->() const { return &data->operator[](index); }
  90. Frame* get() const { return &data->operator[](index); }
  91. };
  92. typedef void(*PrintFunc)(VM*, const Str&);
  93. class VM {
  94. VM* vm; // self reference for simplify code
  95. public:
  96. ManagedHeap heap;
  97. ValueStack s_data;
  98. stack< Frame > callstack;
  99. std::vector<PyTypeInfo> _all_types;
  100. NameDict _modules; // loaded modules
  101. std::map<StrName, Str> _lazy_modules; // lazy loaded modules
  102. PyObject* None;
  103. PyObject* True;
  104. PyObject* False;
  105. PyObject* NotImplemented; // unused
  106. PyObject* Ellipsis;
  107. PyObject* builtins; // builtins module
  108. PyObject* StopIteration;
  109. PyObject* _main; // __main__ module
  110. PyObject* _last_exception;
  111. #if PK_ENABLE_CEVAL_CALLBACK
  112. void (*_ceval_on_step)(VM*, Frame*, Bytecode bc) = nullptr;
  113. #endif
  114. PrintFunc _stdout;
  115. PrintFunc _stderr;
  116. Bytes (*_import_handler)(const Str& name);
  117. // for quick access
  118. Type tp_object, tp_type, tp_int, tp_float, tp_bool, tp_str;
  119. Type tp_list, tp_tuple;
  120. Type tp_function, tp_native_func, tp_bound_method;
  121. Type tp_slice, tp_range, tp_module;
  122. Type tp_super, tp_exception, tp_bytes, tp_mappingproxy;
  123. Type tp_dict, tp_property, tp_star_wrapper;
  124. PyObject* cached_object__new__;
  125. const bool enable_os;
  126. VM(bool enable_os=true) : heap(this), enable_os(enable_os) {
  127. this->vm = this;
  128. _stdout = [](VM* vm, const Str& s) {
  129. PK_UNUSED(vm);
  130. std::cout << s;
  131. };
  132. _stderr = [](VM* vm, const Str& s) {
  133. PK_UNUSED(vm);
  134. std::cerr << s;
  135. };
  136. callstack.reserve(8);
  137. _main = nullptr;
  138. _last_exception = nullptr;
  139. _import_handler = [](const Str& name) {
  140. PK_UNUSED(name);
  141. return Bytes();
  142. };
  143. init_builtin_types();
  144. }
  145. FrameId top_frame() {
  146. #if PK_DEBUG_EXTRA_CHECK
  147. if(callstack.empty()) FATAL_ERROR();
  148. #endif
  149. return FrameId(&callstack.data(), callstack.size()-1);
  150. }
  151. PyObject* py_str(PyObject* obj){
  152. const PyTypeInfo* ti = _inst_type_info(obj);
  153. if(ti->m__str__) return ti->m__str__(this, obj);
  154. PyObject* self;
  155. PyObject* f = get_unbound_method(obj, __str__, &self, false);
  156. if(self != PY_NULL) return call_method(self, f);
  157. return py_repr(obj);
  158. }
  159. PyObject* py_repr(PyObject* obj){
  160. const PyTypeInfo* ti = _inst_type_info(obj);
  161. if(ti->m__repr__) return ti->m__repr__(this, obj);
  162. return call_method(obj, __repr__);
  163. }
  164. PyObject* py_json(PyObject* obj){
  165. const PyTypeInfo* ti = _inst_type_info(obj);
  166. if(ti->m__json__) return ti->m__json__(this, obj);
  167. return call_method(obj, __json__);
  168. }
  169. PyObject* py_iter(PyObject* obj){
  170. const PyTypeInfo* ti = _inst_type_info(obj);
  171. if(ti->m__iter__) return ti->m__iter__(this, obj);
  172. PyObject* self;
  173. PyObject* iter_f = get_unbound_method(obj, __iter__, &self, false);
  174. if(self != PY_NULL) return call_method(self, iter_f);
  175. TypeError(OBJ_NAME(_t(obj)).escape() + " object is not iterable");
  176. return nullptr;
  177. }
  178. PyObject* find_name_in_mro(PyObject* cls, StrName name){
  179. PyObject* val;
  180. do{
  181. val = cls->attr().try_get(name);
  182. if(val != nullptr) return val;
  183. Type base = _all_types[PK_OBJ_GET(Type, cls)].base;
  184. if(base.index == -1) break;
  185. cls = _all_types[base].obj;
  186. }while(true);
  187. return nullptr;
  188. }
  189. bool isinstance(PyObject* obj, Type cls_t){
  190. Type obj_t = PK_OBJ_GET(Type, _t(obj));
  191. do{
  192. if(obj_t == cls_t) return true;
  193. Type base = _all_types[obj_t].base;
  194. if(base.index == -1) break;
  195. obj_t = base;
  196. }while(true);
  197. return false;
  198. }
  199. PyObject* exec(Str source, Str filename, CompileMode mode, PyObject* _module=nullptr){
  200. if(_module == nullptr) _module = _main;
  201. try {
  202. CodeObject_ code = compile(source, filename, mode);
  203. #if PK_DEBUG_DIS_EXEC
  204. if(_module == _main) std::cout << disassemble(code) << '\n';
  205. #endif
  206. return _exec(code, _module);
  207. }catch (const Exception& e){
  208. _stderr(this, e.summary() + "\n");
  209. }
  210. #if !PK_DEBUG_FULL_EXCEPTION
  211. catch (const std::exception& e) {
  212. Str msg = "An std::exception occurred! It could be a bug.\n";
  213. msg = msg + e.what();
  214. _stderr(this, msg + "\n");
  215. }
  216. #endif
  217. callstack.clear();
  218. s_data.clear();
  219. return nullptr;
  220. }
  221. template<typename ...Args>
  222. PyObject* _exec(Args&&... args){
  223. callstack.emplace(&s_data, s_data._sp, std::forward<Args>(args)...);
  224. return _run_top_frame();
  225. }
  226. void _pop_frame(){
  227. Frame* frame = &callstack.top();
  228. s_data.reset(frame->_sp_base);
  229. callstack.pop();
  230. }
  231. void _push_varargs(){ }
  232. void _push_varargs(PyObject* _0){ PUSH(_0); }
  233. void _push_varargs(PyObject* _0, PyObject* _1){ PUSH(_0); PUSH(_1); }
  234. void _push_varargs(PyObject* _0, PyObject* _1, PyObject* _2){ PUSH(_0); PUSH(_1); PUSH(_2); }
  235. void _push_varargs(PyObject* _0, PyObject* _1, PyObject* _2, PyObject* _3){ PUSH(_0); PUSH(_1); PUSH(_2); PUSH(_3); }
  236. template<typename... Args>
  237. PyObject* call(PyObject* callable, Args&&... args){
  238. PUSH(callable);
  239. PUSH(PY_NULL);
  240. _push_varargs(args...);
  241. return vectorcall(sizeof...(args));
  242. }
  243. template<typename... Args>
  244. PyObject* call_method(PyObject* self, PyObject* callable, Args&&... args){
  245. PUSH(callable);
  246. PUSH(self);
  247. _push_varargs(args...);
  248. return vectorcall(sizeof...(args));
  249. }
  250. template<typename... Args>
  251. PyObject* call_method(PyObject* self, StrName name, Args&&... args){
  252. PyObject* callable = get_unbound_method(self, name, &self);
  253. return call_method(self, callable, args...);
  254. }
  255. PyObject* property(NativeFuncC fget, NativeFuncC fset=nullptr){
  256. PyObject* _0 = heap.gcnew(tp_native_func, NativeFunc(fget, 1, false));
  257. PyObject* _1 = vm->None;
  258. if(fset != nullptr) _1 = heap.gcnew(tp_native_func, NativeFunc(fset, 2, false));
  259. return call(_t(tp_property), _0, _1);
  260. }
  261. PyObject* new_type_object(PyObject* mod, StrName name, Type base, bool subclass_enabled=true){
  262. PyObject* obj = heap._new<Type>(tp_type, _all_types.size());
  263. const PyTypeInfo& base_info = _all_types[base];
  264. if(!base_info.subclass_enabled){
  265. TypeError(fmt("type ", base_info.name.escape(), " is not `subclass_enabled`"));
  266. }
  267. PyTypeInfo info{
  268. obj,
  269. base,
  270. (mod!=nullptr && mod!=builtins) ? Str(OBJ_NAME(mod)+"."+name.sv()): name.sv(),
  271. subclass_enabled,
  272. };
  273. if(mod != nullptr) mod->attr().set(name, obj);
  274. _all_types.push_back(info);
  275. return obj;
  276. }
  277. Type _new_type_object(StrName name, Type base=0) {
  278. PyObject* obj = new_type_object(nullptr, name, base, false);
  279. return PK_OBJ_GET(Type, obj);
  280. }
  281. PyObject* _find_type_object(const Str& type){
  282. PyObject* obj = builtins->attr().try_get(type);
  283. if(obj == nullptr){
  284. for(auto& t: _all_types) if(t.name == type) return t.obj;
  285. throw std::runtime_error(fmt("type not found: ", type));
  286. }
  287. check_non_tagged_type(obj, tp_type);
  288. return obj;
  289. }
  290. Type _type(const Str& type){
  291. PyObject* obj = _find_type_object(type);
  292. return PK_OBJ_GET(Type, obj);
  293. }
  294. PyTypeInfo* _type_info(const Str& type){
  295. PyObject* obj = builtins->attr().try_get(type);
  296. if(obj == nullptr){
  297. for(auto& t: _all_types) if(t.name == type) return &t;
  298. FATAL_ERROR();
  299. }
  300. return &_all_types[PK_OBJ_GET(Type, obj)];
  301. }
  302. PyTypeInfo* _type_info(Type type){
  303. return &_all_types[type];
  304. }
  305. const PyTypeInfo* _inst_type_info(PyObject* obj){
  306. if(is_int(obj)) return &_all_types[tp_int];
  307. if(is_float(obj)) return &_all_types[tp_float];
  308. return &_all_types[obj->type];
  309. }
  310. #define BIND_UNARY_SPECIAL(name) \
  311. void bind##name(Type type, PyObject* (*f)(VM*, PyObject*)){ \
  312. _all_types[type].m##name = f; \
  313. PyObject* nf = bind_method<0>(_t(type), #name, [](VM* vm, ArgsView args){ \
  314. return lambda_get_userdata<PyObject*(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);\
  315. }); \
  316. PK_OBJ_GET(NativeFunc, nf).set_userdata(f); \
  317. }
  318. BIND_UNARY_SPECIAL(__repr__)
  319. BIND_UNARY_SPECIAL(__str__)
  320. BIND_UNARY_SPECIAL(__iter__)
  321. BIND_UNARY_SPECIAL(__next__)
  322. BIND_UNARY_SPECIAL(__json__)
  323. BIND_UNARY_SPECIAL(__neg__)
  324. BIND_UNARY_SPECIAL(__bool__)
  325. void bind__hash__(Type type, i64 (*f)(VM* vm, PyObject*));
  326. void bind__len__(Type type, i64 (*f)(VM* vm, PyObject*));
  327. #undef BIND_UNARY_SPECIAL
  328. #define BIND_BINARY_SPECIAL(name) \
  329. void bind##name(Type type, BinaryFuncC f){ \
  330. PyObject* obj = _t(type); \
  331. _all_types[type].m##name = f; \
  332. PyObject* nf = bind_method<1>(obj, #name, [](VM* vm, ArgsView args){ \
  333. return lambda_get_userdata<BinaryFuncC>(args.begin())(vm, args[0], args[1]); \
  334. }); \
  335. PK_OBJ_GET(NativeFunc, nf).set_userdata(f); \
  336. }
  337. BIND_BINARY_SPECIAL(__eq__)
  338. BIND_BINARY_SPECIAL(__lt__)
  339. BIND_BINARY_SPECIAL(__le__)
  340. BIND_BINARY_SPECIAL(__gt__)
  341. BIND_BINARY_SPECIAL(__ge__)
  342. BIND_BINARY_SPECIAL(__contains__)
  343. BIND_BINARY_SPECIAL(__add__)
  344. BIND_BINARY_SPECIAL(__sub__)
  345. BIND_BINARY_SPECIAL(__mul__)
  346. BIND_BINARY_SPECIAL(__truediv__)
  347. BIND_BINARY_SPECIAL(__floordiv__)
  348. BIND_BINARY_SPECIAL(__mod__)
  349. BIND_BINARY_SPECIAL(__pow__)
  350. BIND_BINARY_SPECIAL(__matmul__)
  351. BIND_BINARY_SPECIAL(__lshift__)
  352. BIND_BINARY_SPECIAL(__rshift__)
  353. BIND_BINARY_SPECIAL(__and__)
  354. BIND_BINARY_SPECIAL(__or__)
  355. BIND_BINARY_SPECIAL(__xor__)
  356. #undef BIND_BINARY_SPECIAL
  357. void bind__getitem__(Type type, PyObject* (*f)(VM*, PyObject*, PyObject*)){
  358. PyObject* obj = _t(type);
  359. _all_types[type].m__getitem__ = f;
  360. PyObject* nf = bind_method<1>(obj, "__getitem__", [](VM* vm, ArgsView args){
  361. return lambda_get_userdata<PyObject*(*)(VM*, PyObject*, PyObject*)>(args.begin())(vm, args[0], args[1]);
  362. });
  363. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  364. }
  365. void bind__setitem__(Type type, void (*f)(VM*, PyObject*, PyObject*, PyObject*)){
  366. PyObject* obj = _t(type);
  367. _all_types[type].m__setitem__ = f;
  368. PyObject* nf = bind_method<2>(obj, "__setitem__", [](VM* vm, ArgsView args){
  369. lambda_get_userdata<void(*)(VM* vm, PyObject*, PyObject*, PyObject*)>(args.begin())(vm, args[0], args[1], args[2]);
  370. return vm->None;
  371. });
  372. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  373. }
  374. void bind__delitem__(Type type, void (*f)(VM*, PyObject*, PyObject*)){
  375. PyObject* obj = _t(type);
  376. _all_types[type].m__delitem__ = f;
  377. PyObject* nf = bind_method<1>(obj, "__delitem__", [](VM* vm, ArgsView args){
  378. lambda_get_userdata<void(*)(VM*, PyObject*, PyObject*)>(args.begin())(vm, args[0], args[1]);
  379. return vm->None;
  380. });
  381. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  382. }
  383. bool py_equals(PyObject* lhs, PyObject* rhs){
  384. if(lhs == rhs) return true;
  385. const PyTypeInfo* ti = _inst_type_info(lhs);
  386. PyObject* res;
  387. if(ti->m__eq__){
  388. res = ti->m__eq__(this, lhs, rhs);
  389. if(res != vm->NotImplemented) return res == vm->True;
  390. }
  391. res = call_method(lhs, __eq__, rhs);
  392. if(res != vm->NotImplemented) return res == vm->True;
  393. ti = _inst_type_info(rhs);
  394. if(ti->m__eq__){
  395. res = ti->m__eq__(this, rhs, lhs);
  396. if(res != vm->NotImplemented) return res == vm->True;
  397. }
  398. res = call_method(rhs, __eq__, lhs);
  399. if(res != vm->NotImplemented) return res == vm->True;
  400. return false;
  401. }
  402. template<int ARGC>
  403. PyObject* bind_func(Str type, Str name, NativeFuncC fn) {
  404. return bind_func<ARGC>(_find_type_object(type), name, fn);
  405. }
  406. template<int ARGC>
  407. PyObject* bind_method(Str type, Str name, NativeFuncC fn) {
  408. return bind_method<ARGC>(_find_type_object(type), name, fn);
  409. }
  410. template<int ARGC, typename __T>
  411. PyObject* bind_constructor(__T&& type, NativeFuncC fn) {
  412. static_assert(ARGC==-1 || ARGC>=1);
  413. return bind_func<ARGC>(std::forward<__T>(type), "__new__", fn);
  414. }
  415. template<typename T, typename __T>
  416. PyObject* bind_default_constructor(__T&& type) {
  417. return bind_constructor<1>(std::forward<__T>(type), [](VM* vm, ArgsView args){
  418. Type t = PK_OBJ_GET(Type, args[0]);
  419. return vm->heap.gcnew<T>(t, T());
  420. });
  421. }
  422. template<typename T, typename __T>
  423. PyObject* bind_notimplemented_constructor(__T&& type) {
  424. return bind_constructor<-1>(std::forward<__T>(type), [](VM* vm, ArgsView args){
  425. PK_UNUSED(args);
  426. vm->NotImplementedError();
  427. return vm->None;
  428. });
  429. }
  430. template<int ARGC>
  431. PyObject* bind_builtin_func(Str name, NativeFuncC fn) {
  432. return bind_func<ARGC>(builtins, name, fn);
  433. }
  434. int normalized_index(int index, int size){
  435. if(index < 0) index += size;
  436. if(index < 0 || index >= size){
  437. IndexError(std::to_string(index) + " not in [0, " + std::to_string(size) + ")");
  438. }
  439. return index;
  440. }
  441. PyObject* py_next(PyObject* obj){
  442. const PyTypeInfo* ti = _inst_type_info(obj);
  443. if(ti->m__next__) return ti->m__next__(this, obj);
  444. return call_method(obj, __next__);
  445. }
  446. /***** Error Reporter *****/
  447. void _error(StrName name, const Str& msg){
  448. _error(Exception(name, msg));
  449. }
  450. void _raise(){
  451. bool ok = top_frame()->jump_to_exception_handler();
  452. if(ok) throw HandledException();
  453. else throw UnhandledException();
  454. }
  455. void StackOverflowError() { _error("StackOverflowError", ""); }
  456. void IOError(const Str& msg) { _error("IOError", msg); }
  457. void NotImplementedError(){ _error("NotImplementedError", ""); }
  458. void TypeError(const Str& msg){ _error("TypeError", msg); }
  459. void IndexError(const Str& msg){ _error("IndexError", msg); }
  460. void ValueError(const Str& msg){ _error("ValueError", msg); }
  461. void NameError(StrName name){ _error("NameError", fmt("name ", name.escape() + " is not defined")); }
  462. void KeyError(PyObject* obj){ _error("KeyError", PK_OBJ_GET(Str, py_repr(obj))); }
  463. void BinaryOptError(const char* op) { TypeError(fmt("unsupported operand type(s) for ", op)); }
  464. void AttributeError(PyObject* obj, StrName name){
  465. // OBJ_NAME calls getattr, which may lead to a infinite recursion
  466. _error("AttributeError", fmt("type ", OBJ_NAME(_t(obj)).escape(), " has no attribute ", name.escape()));
  467. }
  468. void AttributeError(Str msg){ _error("AttributeError", msg); }
  469. void check_type(PyObject* obj, Type type){
  470. if(is_type(obj, type)) return;
  471. TypeError("expected " + OBJ_NAME(_t(type)).escape() + ", got " + OBJ_NAME(_t(obj)).escape());
  472. }
  473. void check_args_size(int size, int min_size, int max_size){
  474. if(size >= min_size && size <= max_size) return;
  475. TypeError(fmt("expected ", min_size, "-", max_size, " arguments, got ", size));
  476. }
  477. void check_non_tagged_type(PyObject* obj, Type type){
  478. if(is_non_tagged_type(obj, type)) return;
  479. TypeError("expected " + OBJ_NAME(_t(type)).escape() + ", got " + OBJ_NAME(_t(obj)).escape());
  480. }
  481. void check_int(PyObject* obj){
  482. if(is_int(obj)) return;
  483. check_type(obj, tp_int); // if failed, redirect to check_type to raise TypeError
  484. }
  485. void check_float(PyObject* obj){
  486. if(is_float(obj)) return;
  487. check_type(obj, tp_float); // if failed, redirect to check_type to raise TypeError
  488. }
  489. PyObject* _t(Type t){
  490. return _all_types[t.index].obj;
  491. }
  492. PyObject* _t(PyObject* obj){
  493. if(is_int(obj)) return _t(tp_int);
  494. if(is_float(obj)) return _t(tp_float);
  495. return _all_types[obj->type].obj;
  496. }
  497. struct ImportContext{
  498. // 0: normal; 1: __init__.py; 2: relative
  499. std::vector<std::pair<StrName, int>> pending;
  500. struct Temp{
  501. VM* vm;
  502. StrName name;
  503. Temp(VM* vm, StrName name, int type): vm(vm), name(name){
  504. ImportContext* ctx = &vm->_import_context;
  505. ctx->pending.emplace_back(name, type);
  506. }
  507. ~Temp(){
  508. ImportContext* ctx = &vm->_import_context;
  509. ctx->pending.pop_back();
  510. }
  511. };
  512. Temp temp(VM* vm, StrName name, int type){
  513. return Temp(vm, name, type);
  514. }
  515. };
  516. ImportContext _import_context;
  517. PyObject* py_import(StrName name, bool relative=false){
  518. Str filename;
  519. int type;
  520. if(relative){
  521. ImportContext* ctx = &_import_context;
  522. type = 2;
  523. for(auto it=ctx->pending.rbegin(); it!=ctx->pending.rend(); ++it){
  524. if(it->second == 2) continue;
  525. if(it->second == 1){
  526. filename = fmt(it->first, kPlatformSep, name, ".py");
  527. name = fmt(it->first, '.', name).c_str();
  528. break;
  529. }
  530. }
  531. if(filename.length() == 0) _error("ImportError", "relative import outside of package");
  532. }else{
  533. type = 0;
  534. filename = fmt(name, ".py");
  535. }
  536. for(auto& [k, v]: _import_context.pending){
  537. if(k == name){
  538. vm->_error("ImportError", fmt("circular import ", name.escape()));
  539. }
  540. }
  541. PyObject* ext_mod = _modules.try_get(name);
  542. if(ext_mod == nullptr){
  543. Str source;
  544. auto it = _lazy_modules.find(name);
  545. if(it == _lazy_modules.end()){
  546. Bytes b = _import_handler(filename);
  547. if(!relative && !b){
  548. filename = fmt(name, kPlatformSep, "__init__.py");
  549. b = _import_handler(filename);
  550. if(b) type = 1;
  551. }
  552. if(!b) _error("ImportError", fmt("module ", name.escape(), " not found"));
  553. source = Str(b.str());
  554. }else{
  555. source = it->second;
  556. _lazy_modules.erase(it);
  557. }
  558. auto _ = _import_context.temp(this, name, type);
  559. CodeObject_ code = compile(source, filename, EXEC_MODE);
  560. PyObject* new_mod = new_module(name);
  561. _exec(code, new_mod);
  562. new_mod->attr()._try_perfect_rehash();
  563. return new_mod;
  564. }else{
  565. return ext_mod;
  566. }
  567. }
  568. ~VM() {
  569. callstack.clear();
  570. s_data.clear();
  571. _all_types.clear();
  572. _modules.clear();
  573. _lazy_modules.clear();
  574. }
  575. #if PK_DEBUG_CEVAL_STEP
  576. void _log_s_data(const char* title = nullptr);
  577. #endif
  578. void _unpack_as_list(ArgsView args, List& list);
  579. void _unpack_as_dict(ArgsView args, Dict& dict);
  580. PyObject* vectorcall(int ARGC, int KWARGC=0, bool op_call=false);
  581. CodeObject_ compile(Str source, Str filename, CompileMode mode, bool unknown_global_scope=false);
  582. PyObject* py_negate(PyObject* obj);
  583. f64 num_to_float(PyObject* obj);
  584. bool py_bool(PyObject* obj);
  585. i64 py_hash(PyObject* obj);
  586. PyObject* py_list(PyObject*);
  587. PyObject* new_module(StrName name);
  588. Str disassemble(CodeObject_ co);
  589. void init_builtin_types();
  590. PyObject* getattr(PyObject* obj, StrName name, bool throw_err=true);
  591. PyObject* get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err=true, bool fallback=false);
  592. void parse_int_slice(const Slice& s, int length, int& start, int& stop, int& step);
  593. PyObject* format(Str, PyObject*);
  594. void setattr(PyObject* obj, StrName name, PyObject* value);
  595. template<int ARGC>
  596. PyObject* bind_method(PyObject*, Str, NativeFuncC);
  597. template<int ARGC>
  598. PyObject* bind_func(PyObject*, Str, NativeFuncC);
  599. void _error(Exception);
  600. PyObject* _run_top_frame();
  601. void post_init();
  602. PyObject* _py_generator(Frame&& frame, ArgsView buffer);
  603. // new style binding api
  604. PyObject* bind(PyObject*, const char*, const char*, NativeFuncC);
  605. PyObject* bind(PyObject*, const char*, NativeFuncC);
  606. void _prepare_py_call(PyObject**, ArgsView, ArgsView, const FuncDecl_&);
  607. };
  608. inline void NativeFunc::check_size(VM* vm, ArgsView args) const{
  609. if(args.size() != argc && argc != -1) {
  610. vm->TypeError(fmt("expected ", argc, " arguments, got ", args.size()));
  611. }
  612. }
  613. inline PyObject* NativeFunc::call(VM *vm, ArgsView args) const {
  614. return f(vm, args);
  615. }
  616. DEF_NATIVE_2(Str, tp_str)
  617. DEF_NATIVE_2(List, tp_list)
  618. DEF_NATIVE_2(Tuple, tp_tuple)
  619. DEF_NATIVE_2(Function, tp_function)
  620. DEF_NATIVE_2(NativeFunc, tp_native_func)
  621. DEF_NATIVE_2(BoundMethod, tp_bound_method)
  622. DEF_NATIVE_2(Range, tp_range)
  623. DEF_NATIVE_2(Slice, tp_slice)
  624. DEF_NATIVE_2(Exception, tp_exception)
  625. DEF_NATIVE_2(Bytes, tp_bytes)
  626. DEF_NATIVE_2(MappingProxy, tp_mappingproxy)
  627. DEF_NATIVE_2(Dict, tp_dict)
  628. DEF_NATIVE_2(Property, tp_property)
  629. DEF_NATIVE_2(StarWrapper, tp_star_wrapper)
  630. #undef DEF_NATIVE_2
  631. #define PY_CAST_INT(T) \
  632. template<> inline T py_cast<T>(VM* vm, PyObject* obj){ \
  633. vm->check_int(obj); \
  634. return (T)(PK_BITS(obj) >> 2); \
  635. } \
  636. template<> inline T _py_cast<T>(VM* vm, PyObject* obj){ \
  637. PK_UNUSED(vm); \
  638. return (T)(PK_BITS(obj) >> 2); \
  639. }
  640. PY_CAST_INT(char)
  641. PY_CAST_INT(short)
  642. PY_CAST_INT(int)
  643. PY_CAST_INT(long)
  644. PY_CAST_INT(long long)
  645. PY_CAST_INT(unsigned char)
  646. PY_CAST_INT(unsigned short)
  647. PY_CAST_INT(unsigned int)
  648. PY_CAST_INT(unsigned long)
  649. PY_CAST_INT(unsigned long long)
  650. template<> inline float py_cast<float>(VM* vm, PyObject* obj){
  651. vm->check_float(obj);
  652. i64 bits = PK_BITS(obj) & Number::c1;
  653. return BitsCvt(bits)._float;
  654. }
  655. template<> inline float _py_cast<float>(VM* vm, PyObject* obj){
  656. PK_UNUSED(vm);
  657. i64 bits = PK_BITS(obj) & Number::c1;
  658. return BitsCvt(bits)._float;
  659. }
  660. template<> inline double py_cast<double>(VM* vm, PyObject* obj){
  661. vm->check_float(obj);
  662. i64 bits = PK_BITS(obj) & Number::c1;
  663. return BitsCvt(bits)._float;
  664. }
  665. template<> inline double _py_cast<double>(VM* vm, PyObject* obj){
  666. PK_UNUSED(vm);
  667. i64 bits = PK_BITS(obj) & Number::c1;
  668. return BitsCvt(bits)._float;
  669. }
  670. #define PY_VAR_INT(T) \
  671. inline PyObject* py_var(VM* vm, T _val){ \
  672. i64 val = static_cast<i64>(_val); \
  673. if(((val << 2) >> 2) != val){ \
  674. vm->_error("OverflowError", std::to_string(val) + " is out of range"); \
  675. } \
  676. val = (val << 2) | 0b01; \
  677. return reinterpret_cast<PyObject*>(val); \
  678. }
  679. PY_VAR_INT(char)
  680. PY_VAR_INT(short)
  681. PY_VAR_INT(int)
  682. PY_VAR_INT(long)
  683. PY_VAR_INT(long long)
  684. PY_VAR_INT(unsigned char)
  685. PY_VAR_INT(unsigned short)
  686. PY_VAR_INT(unsigned int)
  687. PY_VAR_INT(unsigned long)
  688. PY_VAR_INT(unsigned long long)
  689. #define PY_VAR_FLOAT(T) \
  690. inline PyObject* py_var(VM* vm, T _val){ \
  691. PK_UNUSED(vm); \
  692. BitsCvt val(static_cast<f64>(_val)); \
  693. i64 bits = val._int & Number::c1; \
  694. i64 tail = val._int & Number::c2; \
  695. if(tail == 0b10){ \
  696. if(bits&0b100) bits += 0b100; \
  697. }else if(tail == 0b11){ \
  698. bits += 0b100; \
  699. } \
  700. bits |= 0b10; \
  701. return reinterpret_cast<PyObject*>(bits); \
  702. }
  703. PY_VAR_FLOAT(float)
  704. PY_VAR_FLOAT(double)
  705. #undef PY_VAR_INT
  706. #undef PY_VAR_FLOAT
  707. inline PyObject* py_var(VM* vm, bool val){
  708. return val ? vm->True : vm->False;
  709. }
  710. template<> inline bool py_cast<bool>(VM* vm, PyObject* obj){
  711. if(obj == vm->True) return true;
  712. if(obj == vm->False) return false;
  713. vm->check_non_tagged_type(obj, vm->tp_bool);
  714. return false;
  715. }
  716. template<> inline bool _py_cast<bool>(VM* vm, PyObject* obj){
  717. return obj == vm->True;
  718. }
  719. template<> inline CString py_cast<CString>(VM* vm, PyObject* obj){
  720. vm->check_non_tagged_type(obj, vm->tp_str);
  721. return PK_OBJ_GET(Str, obj).c_str();
  722. }
  723. template<> inline CString _py_cast<CString>(VM* vm, PyObject* obj){
  724. return PK_OBJ_GET(Str, obj).c_str();
  725. }
  726. inline PyObject* py_var(VM* vm, const char val[]){
  727. return VAR(Str(val));
  728. }
  729. inline PyObject* py_var(VM* vm, std::string val){
  730. return VAR(Str(std::move(val)));
  731. }
  732. inline PyObject* py_var(VM* vm, std::string_view val){
  733. return VAR(Str(val));
  734. }
  735. inline PyObject* py_var(VM* vm, NoReturn val){
  736. PK_UNUSED(val);
  737. return vm->None;
  738. }
  739. inline PyObject* py_var(VM* vm, PyObject* val){
  740. PK_UNUSED(vm);
  741. return val;
  742. }
  743. inline PyObject* VM::py_negate(PyObject* obj){
  744. const PyTypeInfo* ti = _inst_type_info(obj);
  745. if(ti->m__neg__) return ti->m__neg__(this, obj);
  746. return call_method(obj, __neg__);
  747. }
  748. inline f64 VM::num_to_float(PyObject* obj){
  749. if(is_float(obj)){
  750. return _CAST(f64, obj);
  751. } else if (is_int(obj)){
  752. return (f64)_CAST(i64, obj);
  753. }
  754. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape());
  755. return 0;
  756. }
  757. inline bool VM::py_bool(PyObject* obj){
  758. if(is_non_tagged_type(obj, tp_bool)) return obj == True;
  759. if(obj == None) return false;
  760. if(is_int(obj)) return _CAST(i64, obj) != 0;
  761. if(is_float(obj)) return _CAST(f64, obj) != 0.0;
  762. PyObject* self;
  763. PyObject* len_f = get_unbound_method(obj, __len__, &self, false);
  764. if(self != PY_NULL){
  765. PyObject* ret = call_method(self, len_f);
  766. return CAST(i64, ret) > 0;
  767. }
  768. return true;
  769. }
  770. inline PyObject* VM::py_list(PyObject* it){
  771. auto _lock = heap.gc_scope_lock();
  772. it = py_iter(it);
  773. List list;
  774. PyObject* obj = py_next(it);
  775. while(obj != StopIteration){
  776. list.push_back(obj);
  777. obj = py_next(it);
  778. }
  779. return VAR(std::move(list));
  780. }
  781. inline void VM::parse_int_slice(const Slice& s, int length, int& start, int& stop, int& step){
  782. auto clip = [](int value, int min, int max){
  783. if(value < min) return min;
  784. if(value > max) return max;
  785. return value;
  786. };
  787. if(s.step == None) step = 1;
  788. else step = CAST(int, s.step);
  789. if(step == 0) ValueError("slice step cannot be zero");
  790. if(step > 0){
  791. if(s.start == None){
  792. start = 0;
  793. }else{
  794. start = CAST(int, s.start);
  795. if(start < 0) start += length;
  796. start = clip(start, 0, length);
  797. }
  798. if(s.stop == None){
  799. stop = length;
  800. }else{
  801. stop = CAST(int, s.stop);
  802. if(stop < 0) stop += length;
  803. stop = clip(stop, 0, length);
  804. }
  805. }else{
  806. if(s.start == None){
  807. start = length - 1;
  808. }else{
  809. start = CAST(int, s.start);
  810. if(start < 0) start += length;
  811. start = clip(start, -1, length - 1);
  812. }
  813. if(s.stop == None){
  814. stop = -1;
  815. }else{
  816. stop = CAST(int, s.stop);
  817. if(stop < 0) stop += length;
  818. stop = clip(stop, -1, length - 1);
  819. }
  820. }
  821. }
  822. inline i64 VM::py_hash(PyObject* obj){
  823. const PyTypeInfo* ti = _inst_type_info(obj);
  824. if(ti->m__hash__) return ti->m__hash__(this, obj);
  825. PyObject* ret = call_method(obj, __hash__);
  826. return CAST(i64, ret);
  827. }
  828. inline PyObject* VM::format(Str spec, PyObject* obj){
  829. if(spec.empty()) return py_str(obj);
  830. char type;
  831. switch(spec.end()[-1]){
  832. case 'f': case 'd': case 's':
  833. type = spec.end()[-1];
  834. spec = spec.substr(0, spec.length() - 1);
  835. break;
  836. default: type = ' '; break;
  837. }
  838. char pad_c = ' ';
  839. if(spec[0] == '0'){
  840. pad_c = '0';
  841. spec = spec.substr(1);
  842. }
  843. char align;
  844. if(spec[0] == '>'){
  845. align = '>';
  846. spec = spec.substr(1);
  847. }else if(spec[0] == '<'){
  848. align = '<';
  849. spec = spec.substr(1);
  850. }else{
  851. if(is_int(obj) || is_float(obj)) align = '>';
  852. else align = '<';
  853. }
  854. int dot = spec.index(".");
  855. int width, precision;
  856. try{
  857. if(dot >= 0){
  858. width = Number::stoi(spec.substr(0, dot).str());
  859. precision = Number::stoi(spec.substr(dot+1).str());
  860. }else{
  861. width = Number::stoi(spec.str());
  862. precision = -1;
  863. }
  864. }catch(...){
  865. ValueError("invalid format specifer");
  866. UNREACHABLE();
  867. }
  868. if(type != 'f' && dot >= 0) ValueError("precision not allowed in the format specifier");
  869. Str ret;
  870. if(type == 'f'){
  871. f64 val = num_to_float(obj);
  872. if(precision < 0) precision = 6;
  873. std::stringstream ss;
  874. ss << std::fixed << std::setprecision(precision) << val;
  875. ret = ss.str();
  876. }else if(type == 'd'){
  877. ret = std::to_string(CAST(i64, obj));
  878. }else if(type == 's'){
  879. ret = CAST(Str&, obj);
  880. }else{
  881. ret = CAST(Str&, py_str(obj));
  882. }
  883. if(width > ret.length()){
  884. int pad = width - ret.length();
  885. std::string padding(pad, pad_c);
  886. if(align == '>') ret = padding.c_str() + ret;
  887. else ret = ret + padding.c_str();
  888. }
  889. return VAR(ret);
  890. }
  891. inline PyObject* VM::new_module(StrName name) {
  892. PyObject* obj = heap._new<DummyModule>(tp_module, DummyModule());
  893. obj->attr().set("__name__", VAR(name.sv()));
  894. // we do not allow override in order to avoid memory leak
  895. // it is because Module objects are not garbage collected
  896. if(_modules.contains(name)) throw std::runtime_error("module already exists");
  897. _modules.set(name, obj);
  898. return obj;
  899. }
  900. inline std::string _opcode_argstr(VM* vm, Bytecode byte, const CodeObject* co){
  901. std::string argStr = byte.arg == -1 ? "" : std::to_string(byte.arg);
  902. switch(byte.op){
  903. case OP_LOAD_CONST:
  904. if(vm != nullptr){
  905. argStr += fmt(" (", CAST(Str, vm->py_repr(co->consts[byte.arg])), ")");
  906. }
  907. break;
  908. case OP_LOAD_NAME: case OP_LOAD_GLOBAL: case OP_LOAD_NONLOCAL: case OP_STORE_GLOBAL:
  909. case OP_LOAD_ATTR: case OP_LOAD_METHOD: case OP_STORE_ATTR: case OP_DELETE_ATTR:
  910. case OP_IMPORT_NAME: case OP_BEGIN_CLASS: case OP_RAISE:
  911. case OP_DELETE_GLOBAL: case OP_INC_GLOBAL: case OP_DEC_GLOBAL: case OP_STORE_CLASS_ATTR:
  912. argStr += fmt(" (", StrName(byte.arg).sv(), ")");
  913. break;
  914. case OP_LOAD_FAST: case OP_STORE_FAST: case OP_DELETE_FAST: case OP_INC_FAST: case OP_DEC_FAST:
  915. argStr += fmt(" (", co->varnames[byte.arg].sv(), ")");
  916. break;
  917. case OP_LOAD_FUNCTION:
  918. argStr += fmt(" (", co->func_decls[byte.arg]->code->name, ")");
  919. break;
  920. }
  921. return argStr;
  922. }
  923. inline Str VM::disassemble(CodeObject_ co){
  924. auto pad = [](const Str& s, const int n){
  925. if(s.length() >= n) return s.substr(0, n);
  926. return s + std::string(n - s.length(), ' ');
  927. };
  928. std::vector<int> jumpTargets;
  929. for(auto byte : co->codes){
  930. if(byte.op == OP_JUMP_ABSOLUTE || byte.op == OP_POP_JUMP_IF_FALSE || byte.op == OP_SHORTCUT_IF_FALSE_OR_POP){
  931. jumpTargets.push_back(byte.arg);
  932. }
  933. }
  934. std::stringstream ss;
  935. int prev_line = -1;
  936. for(int i=0; i<co->codes.size(); i++){
  937. const Bytecode& byte = co->codes[i];
  938. Str line = std::to_string(co->lines[i]);
  939. if(co->lines[i] == prev_line) line = "";
  940. else{
  941. if(prev_line != -1) ss << "\n";
  942. prev_line = co->lines[i];
  943. }
  944. std::string pointer;
  945. if(std::find(jumpTargets.begin(), jumpTargets.end(), i) != jumpTargets.end()){
  946. pointer = "-> ";
  947. }else{
  948. pointer = " ";
  949. }
  950. ss << pad(line, 8) << pointer << pad(std::to_string(i), 3);
  951. ss << " " << pad(OP_NAMES[byte.op], 25) << " ";
  952. // ss << pad(byte.arg == -1 ? "" : std::to_string(byte.arg), 5);
  953. std::string argStr = _opcode_argstr(this, byte, co.get());
  954. ss << argStr;
  955. // ss << pad(argStr, 40); // may overflow
  956. // ss << co->blocks[byte.block].type;
  957. if(i != co->codes.size() - 1) ss << '\n';
  958. }
  959. for(auto& decl: co->func_decls){
  960. ss << "\n\n" << "Disassembly of " << decl->code->name << ":\n";
  961. ss << disassemble(decl->code);
  962. }
  963. ss << "\n";
  964. return Str(ss.str());
  965. }
  966. #if PK_DEBUG_CEVAL_STEP
  967. inline void VM::_log_s_data(const char* title) {
  968. if(_main == nullptr) return;
  969. if(callstack.empty()) return;
  970. std::stringstream ss;
  971. if(title) ss << title << " | ";
  972. std::map<PyObject**, int> sp_bases;
  973. for(Frame& f: callstack.data()){
  974. if(f._sp_base == nullptr) FATAL_ERROR();
  975. sp_bases[f._sp_base] += 1;
  976. }
  977. FrameId frame = top_frame();
  978. int line = frame->co->lines[frame->_ip];
  979. ss << frame->co->name << ":" << line << " [";
  980. for(PyObject** p=s_data.begin(); p!=s_data.end(); p++){
  981. ss << std::string(sp_bases[p], '|');
  982. if(sp_bases[p] > 0) ss << " ";
  983. PyObject* obj = *p;
  984. if(obj == nullptr) ss << "(nil)";
  985. else if(obj == PY_NULL) ss << "NULL";
  986. else if(is_int(obj)) ss << CAST(i64, obj);
  987. else if(is_float(obj)) ss << CAST(f64, obj);
  988. else if(is_type(obj, tp_str)) ss << CAST(Str, obj).escape();
  989. else if(obj == None) ss << "None";
  990. else if(obj == True) ss << "True";
  991. else if(obj == False) ss << "False";
  992. else if(is_type(obj, tp_function)){
  993. auto& f = CAST(Function&, obj);
  994. ss << f.decl->code->name << "(...)";
  995. } else if(is_type(obj, tp_type)){
  996. Type t = PK_OBJ_GET(Type, obj);
  997. ss << "<class " + _all_types[t].name.escape() + ">";
  998. } else if(is_type(obj, tp_list)){
  999. auto& t = CAST(List&, obj);
  1000. ss << "list(size=" << t.size() << ")";
  1001. } else if(is_type(obj, tp_tuple)){
  1002. auto& t = CAST(Tuple&, obj);
  1003. ss << "tuple(size=" << t.size() << ")";
  1004. } else ss << "(" << obj_type_name(this, obj->type) << ")";
  1005. ss << ", ";
  1006. }
  1007. std::string output = ss.str();
  1008. if(!s_data.empty()) {
  1009. output.pop_back(); output.pop_back();
  1010. }
  1011. output.push_back(']');
  1012. Bytecode byte = frame->co->codes[frame->_ip];
  1013. std::cout << output << " " << OP_NAMES[byte.op] << " " << _opcode_argstr(nullptr, byte, frame->co) << std::endl;
  1014. }
  1015. #endif
  1016. inline void VM::init_builtin_types(){
  1017. _all_types.push_back({heap._new<Type>(Type(1), Type(0)), -1, "object", true});
  1018. _all_types.push_back({heap._new<Type>(Type(1), Type(1)), 0, "type", false});
  1019. tp_object = 0; tp_type = 1;
  1020. tp_int = _new_type_object("int");
  1021. tp_float = _new_type_object("float");
  1022. if(tp_int.index != kTpIntIndex || tp_float.index != kTpFloatIndex) FATAL_ERROR();
  1023. tp_bool = _new_type_object("bool");
  1024. tp_str = _new_type_object("str");
  1025. tp_list = _new_type_object("list");
  1026. tp_tuple = _new_type_object("tuple");
  1027. tp_slice = _new_type_object("slice");
  1028. tp_range = _new_type_object("range");
  1029. tp_module = _new_type_object("module");
  1030. tp_function = _new_type_object("function");
  1031. tp_native_func = _new_type_object("native_func");
  1032. tp_bound_method = _new_type_object("bound_method");
  1033. tp_super = _new_type_object("super");
  1034. tp_exception = _new_type_object("Exception");
  1035. tp_bytes = _new_type_object("bytes");
  1036. tp_mappingproxy = _new_type_object("mappingproxy");
  1037. tp_dict = _new_type_object("dict");
  1038. tp_property = _new_type_object("property");
  1039. tp_star_wrapper = _new_type_object("_star_wrapper");
  1040. this->None = heap._new<Dummy>(_new_type_object("NoneType"), {});
  1041. this->NotImplemented = heap._new<Dummy>(_new_type_object("NotImplementedType"), {});
  1042. this->Ellipsis = heap._new<Dummy>(_new_type_object("ellipsis"), {});
  1043. this->True = heap._new<Dummy>(tp_bool, {});
  1044. this->False = heap._new<Dummy>(tp_bool, {});
  1045. this->StopIteration = heap._new<Dummy>(_new_type_object("StopIterationType"), {});
  1046. this->builtins = new_module("builtins");
  1047. // setup public types
  1048. builtins->attr().set("type", _t(tp_type));
  1049. builtins->attr().set("object", _t(tp_object));
  1050. builtins->attr().set("bool", _t(tp_bool));
  1051. builtins->attr().set("int", _t(tp_int));
  1052. builtins->attr().set("float", _t(tp_float));
  1053. builtins->attr().set("str", _t(tp_str));
  1054. builtins->attr().set("list", _t(tp_list));
  1055. builtins->attr().set("tuple", _t(tp_tuple));
  1056. builtins->attr().set("range", _t(tp_range));
  1057. builtins->attr().set("bytes", _t(tp_bytes));
  1058. builtins->attr().set("dict", _t(tp_dict));
  1059. builtins->attr().set("property", _t(tp_property));
  1060. builtins->attr().set("StopIteration", StopIteration);
  1061. builtins->attr().set("NotImplemented", NotImplemented);
  1062. builtins->attr().set("slice", _t(tp_slice));
  1063. post_init();
  1064. for(int i=0; i<_all_types.size(); i++){
  1065. _all_types[i].obj->attr()._try_perfect_rehash();
  1066. }
  1067. for(auto [k, v]: _modules.items()) v->attr()._try_perfect_rehash();
  1068. this->_main = new_module("__main__");
  1069. }
  1070. // `heap.gc_scope_lock();` needed before calling this function
  1071. inline void VM::_unpack_as_list(ArgsView args, List& list){
  1072. for(PyObject* obj: args){
  1073. if(is_non_tagged_type(obj, tp_star_wrapper)){
  1074. const StarWrapper& w = _CAST(StarWrapper&, obj);
  1075. // maybe this check should be done in the compile time
  1076. if(w.level != 1) TypeError("expected level 1 star wrapper");
  1077. PyObject* _0 = py_iter(w.obj);
  1078. PyObject* _1 = py_next(_0);
  1079. while(_1 != StopIteration){
  1080. list.push_back(_1);
  1081. _1 = py_next(_0);
  1082. }
  1083. }else{
  1084. list.push_back(obj);
  1085. }
  1086. }
  1087. }
  1088. // `heap.gc_scope_lock();` needed before calling this function
  1089. inline void VM::_unpack_as_dict(ArgsView args, Dict& dict){
  1090. for(PyObject* obj: args){
  1091. if(is_non_tagged_type(obj, tp_star_wrapper)){
  1092. const StarWrapper& w = _CAST(StarWrapper&, obj);
  1093. // maybe this check should be done in the compile time
  1094. if(w.level != 2) TypeError("expected level 2 star wrapper");
  1095. const Dict& other = CAST(Dict&, w.obj);
  1096. dict.update(other);
  1097. }else{
  1098. const Tuple& t = CAST(Tuple&, obj);
  1099. if(t.size() != 2) TypeError("expected tuple of length 2");
  1100. dict.set(t[0], t[1]);
  1101. }
  1102. }
  1103. }
  1104. inline void VM::_prepare_py_call(PyObject** buffer, ArgsView args, ArgsView kwargs, const FuncDecl_& decl){
  1105. const CodeObject* co = decl->code.get();
  1106. int co_nlocals = co->varnames.size();
  1107. int decl_argc = decl->args.size();
  1108. if(args.size() < decl_argc){
  1109. vm->TypeError(fmt(
  1110. "expected ", decl_argc, " positional arguments, got ", args.size(),
  1111. " (", co->name, ')'
  1112. ));
  1113. }
  1114. int i = 0;
  1115. // prepare args
  1116. for(int index: decl->args) buffer[index] = args[i++];
  1117. // set extra varnames to nullptr
  1118. for(int j=i; j<co_nlocals; j++) buffer[j] = PY_NULL;
  1119. // prepare kwdefaults
  1120. for(auto& kv: decl->kwargs) buffer[kv.key] = kv.value;
  1121. // handle *args
  1122. if(decl->starred_arg != -1){
  1123. ArgsView vargs(args.begin() + i, args.end());
  1124. buffer[decl->starred_arg] = VAR(vargs.to_tuple());
  1125. i += vargs.size();
  1126. }else{
  1127. // kwdefaults override
  1128. for(auto& kv: decl->kwargs){
  1129. if(i >= args.size()) break;
  1130. buffer[kv.key] = args[i++];
  1131. }
  1132. if(i < args.size()) TypeError(fmt("too many arguments", " (", decl->code->name, ')'));
  1133. }
  1134. PyObject* vkwargs;
  1135. if(decl->starred_kwarg != -1){
  1136. vkwargs = VAR(Dict(this));
  1137. buffer[decl->starred_kwarg] = vkwargs;
  1138. }else{
  1139. vkwargs = nullptr;
  1140. }
  1141. for(int j=0; j<kwargs.size(); j+=2){
  1142. StrName key(CAST(int, kwargs[j]));
  1143. int index = co->varnames_inv.try_get(key);
  1144. if(index < 0){
  1145. if(vkwargs == nullptr){
  1146. TypeError(fmt(key.escape(), " is an invalid keyword argument for ", co->name, "()"));
  1147. }else{
  1148. Dict& dict = _CAST(Dict&, vkwargs);
  1149. dict.set(VAR(key.sv()), kwargs[j+1]);
  1150. }
  1151. }else{
  1152. buffer[index] = kwargs[j+1];
  1153. }
  1154. }
  1155. }
  1156. inline PyObject* VM::vectorcall(int ARGC, int KWARGC, bool op_call){
  1157. PyObject** p1 = s_data._sp - KWARGC*2;
  1158. PyObject** p0 = p1 - ARGC - 2;
  1159. // [callable, <self>, args..., kwargs...]
  1160. // ^p0 ^p1 ^_sp
  1161. PyObject* callable = p1[-(ARGC + 2)];
  1162. bool method_call = p1[-(ARGC + 1)] != PY_NULL;
  1163. // handle boundmethod, do a patch
  1164. if(is_non_tagged_type(callable, tp_bound_method)){
  1165. if(method_call) FATAL_ERROR();
  1166. auto& bm = CAST(BoundMethod&, callable);
  1167. callable = bm.func; // get unbound method
  1168. p1[-(ARGC + 2)] = bm.func;
  1169. p1[-(ARGC + 1)] = bm.self;
  1170. method_call = true;
  1171. // [unbound, self, args..., kwargs...]
  1172. }
  1173. ArgsView args(p1 - ARGC - int(method_call), p1);
  1174. ArgsView kwargs(p1, s_data._sp);
  1175. static THREAD_LOCAL PyObject* buffer[PK_MAX_CO_VARNAMES];
  1176. if(is_non_tagged_type(callable, tp_native_func)){
  1177. const auto& f = PK_OBJ_GET(NativeFunc, callable);
  1178. PyObject* ret;
  1179. if(f.decl != nullptr){
  1180. int co_nlocals = f.decl->code->varnames.size();
  1181. _prepare_py_call(buffer, args, kwargs, f.decl);
  1182. // copy buffer back to stack
  1183. s_data.reset(args.begin());
  1184. for(int j=0; j<co_nlocals; j++) PUSH(buffer[j]);
  1185. ret = f.call(vm, ArgsView(s_data._sp - co_nlocals, s_data._sp));
  1186. }else{
  1187. if(KWARGC != 0) TypeError("old-style native_func does not accept keyword arguments");
  1188. f.check_size(this, args);
  1189. ret = f.call(this, args);
  1190. }
  1191. s_data.reset(p0);
  1192. return ret;
  1193. }
  1194. if(is_non_tagged_type(callable, tp_function)){
  1195. /*****************_py_call*****************/
  1196. // callable must be a `function` object
  1197. if(s_data.is_overflow()) StackOverflowError();
  1198. const Function& fn = PK_OBJ_GET(Function, callable);
  1199. const FuncDecl_& decl = fn.decl;
  1200. const CodeObject* co = decl->code.get();
  1201. int co_nlocals = co->varnames.size();
  1202. _prepare_py_call(buffer, args, kwargs, decl);
  1203. if(co->is_generator){
  1204. s_data.reset(p0);
  1205. return _py_generator(
  1206. Frame(&s_data, nullptr, co, fn._module, callable),
  1207. ArgsView(buffer, buffer + co_nlocals)
  1208. );
  1209. }
  1210. // copy buffer back to stack
  1211. s_data.reset(args.begin());
  1212. for(int j=0; j<co_nlocals; j++) PUSH(buffer[j]);
  1213. callstack.emplace(&s_data, p0, co, fn._module, callable, FastLocals(co, args.begin()));
  1214. if(op_call) return PY_OP_CALL;
  1215. return _run_top_frame();
  1216. /*****************_py_call*****************/
  1217. }
  1218. if(is_non_tagged_type(callable, tp_type)){
  1219. if(method_call) FATAL_ERROR();
  1220. // [type, NULL, args..., kwargs...]
  1221. DEF_SNAME(__new__);
  1222. PyObject* new_f = find_name_in_mro(callable, __new__);
  1223. PyObject* obj;
  1224. #if PK_DEBUG_EXTRA_CHECK
  1225. PK_ASSERT(new_f != nullptr);
  1226. #endif
  1227. if(new_f == cached_object__new__) {
  1228. // fast path for object.__new__
  1229. Type t = PK_OBJ_GET(Type, callable);
  1230. obj= vm->heap.gcnew<DummyInstance>(t, {});
  1231. }else{
  1232. PUSH(new_f);
  1233. PUSH(PY_NULL);
  1234. PUSH(callable); // cls
  1235. for(PyObject* o: args) PUSH(o);
  1236. for(PyObject* o: kwargs) PUSH(o);
  1237. // if obj is not an instance of callable, the behavior is undefined
  1238. obj = vectorcall(ARGC+1, KWARGC);
  1239. }
  1240. // __init__
  1241. PyObject* self;
  1242. DEF_SNAME(__init__);
  1243. callable = get_unbound_method(obj, __init__, &self, false);
  1244. if (self != PY_NULL) {
  1245. // replace `NULL` with `self`
  1246. p1[-(ARGC + 2)] = callable;
  1247. p1[-(ARGC + 1)] = self;
  1248. // [init_f, self, args..., kwargs...]
  1249. vectorcall(ARGC, KWARGC);
  1250. // We just discard the return value of `__init__`
  1251. // in cpython it raises a TypeError if the return value is not None
  1252. }else{
  1253. // manually reset the stack
  1254. s_data.reset(p0);
  1255. }
  1256. return obj;
  1257. }
  1258. // handle `__call__` overload
  1259. PyObject* self;
  1260. DEF_SNAME(__call__);
  1261. PyObject* call_f = get_unbound_method(callable, __call__, &self, false);
  1262. if(self != PY_NULL){
  1263. p1[-(ARGC + 2)] = call_f;
  1264. p1[-(ARGC + 1)] = self;
  1265. // [call_f, self, args..., kwargs...]
  1266. return vectorcall(ARGC, KWARGC, false);
  1267. }
  1268. TypeError(OBJ_NAME(_t(callable)).escape() + " object is not callable");
  1269. return nullptr;
  1270. }
  1271. // https://docs.python.org/3/howto/descriptor.html#invocation-from-an-instance
  1272. inline PyObject* VM::getattr(PyObject* obj, StrName name, bool throw_err){
  1273. PyObject* objtype;
  1274. // handle super() proxy
  1275. if(is_non_tagged_type(obj, tp_super)){
  1276. const Super& super = PK_OBJ_GET(Super, obj);
  1277. obj = super.first;
  1278. objtype = _t(super.second);
  1279. }else{
  1280. objtype = _t(obj);
  1281. }
  1282. PyObject* cls_var = find_name_in_mro(objtype, name);
  1283. if(cls_var != nullptr){
  1284. // handle descriptor
  1285. if(is_non_tagged_type(cls_var, tp_property)){
  1286. const Property& prop = _CAST(Property&, cls_var);
  1287. return call(prop.getter, obj);
  1288. }
  1289. }
  1290. // handle instance __dict__
  1291. if(!is_tagged(obj) && obj->is_attr_valid()){
  1292. PyObject* val = obj->attr().try_get(name);
  1293. if(val != nullptr) return val;
  1294. }
  1295. if(cls_var != nullptr){
  1296. // bound method is non-data descriptor
  1297. if(is_non_tagged_type(cls_var, tp_function) || is_non_tagged_type(cls_var, tp_native_func)){
  1298. return VAR(BoundMethod(obj, cls_var));
  1299. }
  1300. return cls_var;
  1301. }
  1302. if(throw_err) AttributeError(obj, name);
  1303. return nullptr;
  1304. }
  1305. // used by OP_LOAD_METHOD
  1306. // try to load a unbound method (fallback to `getattr` if not found)
  1307. inline PyObject* VM::get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err, bool fallback){
  1308. *self = PY_NULL;
  1309. PyObject* objtype;
  1310. // handle super() proxy
  1311. if(is_non_tagged_type(obj, tp_super)){
  1312. const Super& super = PK_OBJ_GET(Super, obj);
  1313. obj = super.first;
  1314. objtype = _t(super.second);
  1315. }else{
  1316. objtype = _t(obj);
  1317. }
  1318. PyObject* cls_var = find_name_in_mro(objtype, name);
  1319. if(fallback){
  1320. if(cls_var != nullptr){
  1321. // handle descriptor
  1322. if(is_non_tagged_type(cls_var, tp_property)){
  1323. const Property& prop = _CAST(Property&, cls_var);
  1324. return call(prop.getter, obj);
  1325. }
  1326. }
  1327. // handle instance __dict__
  1328. if(!is_tagged(obj) && obj->is_attr_valid()){
  1329. PyObject* val = obj->attr().try_get(name);
  1330. if(val != nullptr) return val;
  1331. }
  1332. }
  1333. if(cls_var != nullptr){
  1334. if(is_non_tagged_type(cls_var, tp_function) || is_non_tagged_type(cls_var, tp_native_func)){
  1335. *self = obj;
  1336. }
  1337. return cls_var;
  1338. }
  1339. if(throw_err) AttributeError(obj, name);
  1340. return nullptr;
  1341. }
  1342. inline void VM::setattr(PyObject* obj, StrName name, PyObject* value){
  1343. PyObject* objtype;
  1344. // handle super() proxy
  1345. if(is_non_tagged_type(obj, tp_super)){
  1346. Super& super = PK_OBJ_GET(Super, obj);
  1347. obj = super.first;
  1348. objtype = _t(super.second);
  1349. }else{
  1350. objtype = _t(obj);
  1351. }
  1352. PyObject* cls_var = find_name_in_mro(objtype, name);
  1353. if(cls_var != nullptr){
  1354. // handle descriptor
  1355. if(is_non_tagged_type(cls_var, tp_property)){
  1356. const Property& prop = _CAST(Property&, cls_var);
  1357. if(prop.setter != vm->None){
  1358. call(prop.setter, obj, value);
  1359. }else{
  1360. TypeError(fmt("readonly attribute: ", name.escape()));
  1361. }
  1362. return;
  1363. }
  1364. }
  1365. // handle instance __dict__
  1366. if(is_tagged(obj) || !obj->is_attr_valid()) TypeError("cannot set attribute");
  1367. obj->attr().set(name, value);
  1368. }
  1369. template<int ARGC>
  1370. PyObject* VM::bind_method(PyObject* obj, Str name, NativeFuncC fn) {
  1371. check_non_tagged_type(obj, tp_type);
  1372. PyObject* nf = VAR(NativeFunc(fn, ARGC, true));
  1373. obj->attr().set(name, nf);
  1374. return nf;
  1375. }
  1376. template<int ARGC>
  1377. PyObject* VM::bind_func(PyObject* obj, Str name, NativeFuncC fn) {
  1378. PyObject* nf = VAR(NativeFunc(fn, ARGC, false));
  1379. obj->attr().set(name, nf);
  1380. return nf;
  1381. }
  1382. inline PyObject* VM::bind(PyObject* obj, const char* sig, NativeFuncC fn){
  1383. return bind(obj, sig, nullptr, fn);
  1384. }
  1385. inline PyObject* VM::bind(PyObject* obj, const char* sig, const char* docstring, NativeFuncC fn){
  1386. CodeObject_ co;
  1387. try{
  1388. // fn(a, b, *c, d=1) -> None
  1389. co = compile("def " + Str(sig) + " : pass", "<bind>", EXEC_MODE);
  1390. }catch(Exception& e){
  1391. throw std::runtime_error("invalid signature: " + std::string(sig));
  1392. }
  1393. if(co->func_decls.size() != 1){
  1394. throw std::runtime_error("expected 1 function declaration");
  1395. }
  1396. FuncDecl_ decl = co->func_decls[0];
  1397. decl->signature = Str(sig);
  1398. if(docstring != nullptr){
  1399. decl->docstring = Str(docstring).strip();
  1400. }
  1401. PyObject* f_obj = VAR(NativeFunc(fn, decl));
  1402. obj->attr().set(decl->code->name, f_obj);
  1403. return f_obj;
  1404. }
  1405. inline void VM::_error(Exception e){
  1406. if(callstack.empty()){
  1407. e.is_re = false;
  1408. throw e;
  1409. }
  1410. PUSH(VAR(e));
  1411. _raise();
  1412. }
  1413. inline void ManagedHeap::mark() {
  1414. for(PyObject* obj: _no_gc) PK_OBJ_MARK(obj);
  1415. for(auto& frame : vm->callstack.data()) frame._gc_mark();
  1416. for(PyObject* obj: vm->s_data) PK_OBJ_MARK(obj);
  1417. if(_gc_marker_ex) _gc_marker_ex(vm);
  1418. if(vm->_last_exception) PK_OBJ_MARK(vm->_last_exception);
  1419. }
  1420. inline Str obj_type_name(VM *vm, Type type){
  1421. return vm->_all_types[type].name;
  1422. }
  1423. #undef PY_VAR_INT
  1424. #undef PY_VAR_FLOAT
  1425. /***************************************************/
  1426. template<typename T>
  1427. PyObject* PyArrayGetItem(VM* vm, PyObject* obj, PyObject* index){
  1428. static_assert(std::is_same_v<T, List> || std::is_same_v<T, Tuple>);
  1429. const T& self = _CAST(T&, obj);
  1430. if(is_non_tagged_type(index, vm->tp_slice)){
  1431. const Slice& s = _CAST(Slice&, index);
  1432. int start, stop, step;
  1433. vm->parse_int_slice(s, self.size(), start, stop, step);
  1434. List new_list;
  1435. for(int i=start; step>0?i<stop:i>stop; i+=step) new_list.push_back(self[i]);
  1436. return VAR(T(std::move(new_list)));
  1437. }
  1438. int i = CAST(int, index);
  1439. i = vm->normalized_index(i, self.size());
  1440. return self[i];
  1441. }
  1442. inline void VM::bind__hash__(Type type, i64 (*f)(VM*, PyObject*)){
  1443. PyObject* obj = _t(type);
  1444. _all_types[type].m__hash__ = f;
  1445. PyObject* nf = bind_method<0>(obj, "__hash__", [](VM* vm, ArgsView args){
  1446. i64 ret = lambda_get_userdata<i64(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);
  1447. return VAR(ret);
  1448. });
  1449. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  1450. }
  1451. inline void VM::bind__len__(Type type, i64 (*f)(VM*, PyObject*)){
  1452. PyObject* obj = _t(type);
  1453. _all_types[type].m__len__ = f;
  1454. PyObject* nf = bind_method<0>(obj, "__len__", [](VM* vm, ArgsView args){
  1455. i64 ret = lambda_get_userdata<i64(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);
  1456. return VAR(ret);
  1457. });
  1458. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  1459. }
  1460. inline void Dict::_probe(PyObject *key, bool &ok, int &i) const{
  1461. ok = false;
  1462. i = vm->py_hash(key) & _mask;
  1463. while(_items[i].first != nullptr) {
  1464. if(vm->py_equals(_items[i].first, key)) { ok = true; break; }
  1465. // https://github.com/python/cpython/blob/3.8/Objects/dictobject.c#L166
  1466. i = ((5*i) + 1) & _mask;
  1467. }
  1468. }
  1469. inline void CodeObjectSerializer::write_object(VM *vm, PyObject *obj){
  1470. if(is_int(obj)) write_int(_CAST(i64, obj));
  1471. else if(is_float(obj)) write_float(_CAST(f64, obj));
  1472. else if(is_type(obj, vm->tp_str)) write_str(_CAST(Str&, obj));
  1473. else if(is_type(obj, vm->tp_bool)) write_bool(_CAST(bool, obj));
  1474. else if(obj == vm->None) write_none();
  1475. else if(obj == vm->Ellipsis) write_ellipsis();
  1476. else{
  1477. throw std::runtime_error(fmt(OBJ_NAME(vm->_t(obj)).escape(), " is not serializable"));
  1478. }
  1479. }
  1480. } // namespace pkpy