vm.h 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930
  1. #pragma once
  2. #include "codeobject.h"
  3. #include "common.h"
  4. #include "frame.h"
  5. #include "error.h"
  6. #include "gc.h"
  7. #include "memory.h"
  8. #include "obj.h"
  9. #include "str.h"
  10. namespace pkpy{
  11. Str _read_file_cwd(const Str& name, bool* ok);
  12. #define DEF_NATIVE_2(ctype, ptype) \
  13. template<> inline ctype py_cast<ctype>(VM* vm, PyObject* obj) { \
  14. vm->check_type(obj, vm->ptype); \
  15. return OBJ_GET(ctype, obj); \
  16. } \
  17. template<> inline ctype _py_cast<ctype>(VM* vm, PyObject* obj) { \
  18. return OBJ_GET(ctype, obj); \
  19. } \
  20. template<> inline ctype& py_cast<ctype&>(VM* vm, PyObject* obj) { \
  21. vm->check_type(obj, vm->ptype); \
  22. return OBJ_GET(ctype, obj); \
  23. } \
  24. template<> inline ctype& _py_cast<ctype&>(VM* vm, PyObject* obj) { \
  25. return OBJ_GET(ctype, obj); \
  26. } \
  27. inline PyObject* py_var(VM* vm, const ctype& value) { return vm->heap.gcnew(vm->ptype, value);} \
  28. inline PyObject* py_var(VM* vm, ctype&& value) { return vm->heap.gcnew(vm->ptype, std::move(value));}
  29. class Generator final: public BaseIter {
  30. Frame frame;
  31. int state; // 0,1,2
  32. public:
  33. template<typename... Args>
  34. Generator(VM* vm, Frame&& frame)
  35. : BaseIter(vm), frame(std::move(frame)), state(0) {}
  36. PyObject* next() override;
  37. void _gc_mark() const override;
  38. };
  39. struct PyTypeInfo{
  40. PyObject* obj;
  41. Type base;
  42. Str name;
  43. };
  44. struct FrameId{
  45. std::vector<pkpy::Frame>* data;
  46. int index;
  47. FrameId(std::vector<pkpy::Frame>* data, int index) : data(data), index(index) {}
  48. Frame* operator->() const { return &data->operator[](index); }
  49. };
  50. class VM {
  51. VM* vm; // self reference for simplify code
  52. public:
  53. ManagedHeap heap;
  54. stack< Frame > callstack;
  55. std::vector<PyTypeInfo> _all_types;
  56. NameDict _modules; // loaded modules
  57. std::map<StrName, Str> _lazy_modules; // lazy loaded modules
  58. PyObject* _py_op_call;
  59. PyObject* _py_op_yield;
  60. PyObject* _py_null;
  61. PyObject* None;
  62. PyObject* True;
  63. PyObject* False;
  64. PyObject* Ellipsis;
  65. PyObject* builtins; // builtins module
  66. PyObject* _main; // __main__ module
  67. std::stringstream _stdout_buffer;
  68. std::stringstream _stderr_buffer;
  69. std::ostream* _stdout;
  70. std::ostream* _stderr;
  71. int recursionlimit = 1000;
  72. // for quick access
  73. Type tp_object, tp_type, tp_int, tp_float, tp_bool, tp_str;
  74. Type tp_list, tp_tuple;
  75. Type tp_function, tp_native_function, tp_iterator, tp_bound_method;
  76. Type tp_slice, tp_range, tp_module;
  77. Type tp_super, tp_exception, tp_star_wrapper;
  78. VM(bool use_stdio) : heap(this){
  79. this->vm = this;
  80. this->_stdout = use_stdio ? &std::cout : &_stdout_buffer;
  81. this->_stderr = use_stdio ? &std::cerr : &_stderr_buffer;
  82. callstack.data().reserve(8);
  83. init_builtin_types();
  84. }
  85. bool is_stdio_used() const { return _stdout == &std::cout; }
  86. FrameId top_frame() {
  87. #if DEBUG_EXTRA_CHECK
  88. if(callstack.empty()) FATAL_ERROR();
  89. #endif
  90. return FrameId(&callstack.data(), callstack.size()-1);
  91. }
  92. PyObject* asStr(PyObject* obj){
  93. PyObject* self;
  94. PyObject* f = get_unbound_method(obj, __str__, &self, false);
  95. if(self != _py_null) return call(f, Args{self});
  96. return asRepr(obj);
  97. }
  98. PyObject* asIter(PyObject* obj){
  99. if(is_type(obj, tp_iterator)) return obj;
  100. PyObject* self;
  101. PyObject* iter_f = get_unbound_method(obj, __iter__, &self, false);
  102. if(self != _py_null) return call(iter_f, Args{self});
  103. TypeError(OBJ_NAME(_t(obj)).escape() + " object is not iterable");
  104. return nullptr;
  105. }
  106. PyObject* asList(PyObject* iterable){
  107. if(is_type(iterable, tp_list)) return iterable;
  108. return call(_t(tp_list), Args{iterable});
  109. }
  110. PyObject* find_name_in_mro(PyObject* cls, StrName name){
  111. PyObject* val;
  112. do{
  113. val = cls->attr().try_get(name);
  114. if(val != nullptr) return val;
  115. Type cls_t = OBJ_GET(Type, cls);
  116. Type base = _all_types[cls_t].base;
  117. if(base.index == -1) break;
  118. cls = _all_types[base].obj;
  119. }while(true);
  120. return nullptr;
  121. }
  122. bool isinstance(PyObject* obj, Type cls_t){
  123. Type obj_t = OBJ_GET(Type, _t(obj));
  124. do{
  125. if(obj_t == cls_t) return true;
  126. Type base = _all_types[obj_t].base;
  127. if(base.index == -1) break;
  128. obj_t = base;
  129. }while(true);
  130. return false;
  131. }
  132. PyObject* fast_call(StrName name, Args&& args){
  133. PyObject* val = find_name_in_mro(_t(args[0]), name);
  134. if(val != nullptr) return call(val, std::move(args));
  135. AttributeError(args[0], name);
  136. return nullptr;
  137. }
  138. template<typename ArgT>
  139. std::enable_if_t<std::is_same_v<std::decay_t<ArgT>, Args>, PyObject*>
  140. call(PyObject* callable, ArgT&& args){
  141. return call(callable, std::forward<ArgT>(args), no_arg(), false);
  142. }
  143. PyObject* exec(Str source, Str filename, CompileMode mode, PyObject* _module=nullptr){
  144. if(_module == nullptr) _module = _main;
  145. try {
  146. CodeObject_ code = compile(source, filename, mode);
  147. #if DEBUG_DIS_EXEC
  148. if(_module == _main) std::cout << disassemble(code) << '\n';
  149. #endif
  150. return _exec(code, _module);
  151. }catch (const Exception& e){
  152. *_stderr << e.summary() << '\n';
  153. }
  154. #if !DEBUG_FULL_EXCEPTION
  155. catch (const std::exception& e) {
  156. *_stderr << "An std::exception occurred! It could be a bug.\n";
  157. *_stderr << e.what() << '\n';
  158. }
  159. #endif
  160. callstack.clear();
  161. return nullptr;
  162. }
  163. template<typename ...Args>
  164. void _push_new_frame(Args&&... args){
  165. if(callstack.size() > recursionlimit){
  166. _error("RecursionError", "maximum recursion depth exceeded");
  167. }
  168. callstack.emplace(std::forward<Args>(args)...);
  169. }
  170. void _push_new_frame(Frame&& frame){
  171. if(callstack.size() > recursionlimit){
  172. _error("RecursionError", "maximum recursion depth exceeded");
  173. }
  174. callstack.emplace(std::move(frame));
  175. }
  176. template<typename ...Args>
  177. PyObject* _exec(Args&&... args){
  178. _push_new_frame(std::forward<Args>(args)...);
  179. return _run_top_frame();
  180. }
  181. PyObject* property(NativeFuncRaw fget){
  182. PyObject* p = builtins->attr("property");
  183. PyObject* method = heap.gcnew(tp_native_function, NativeFunc(fget, 1, false));
  184. return call(p, Args{method});
  185. }
  186. PyObject* new_type_object(PyObject* mod, StrName name, Type base){
  187. PyObject* obj = heap._new<Type>(tp_type, _all_types.size());
  188. PyTypeInfo info{
  189. obj,
  190. base,
  191. (mod!=nullptr && mod!=builtins) ? Str(OBJ_NAME(mod)+"."+name.sv()): name.sv()
  192. };
  193. if(mod != nullptr) mod->attr().set(name, obj);
  194. _all_types.push_back(info);
  195. return obj;
  196. }
  197. Type _new_type_object(StrName name, Type base=0) {
  198. PyObject* obj = new_type_object(nullptr, name, base);
  199. return OBJ_GET(Type, obj);
  200. }
  201. PyObject* _find_type(const Str& type){
  202. PyObject* obj = builtins->attr().try_get(type);
  203. if(obj == nullptr){
  204. for(auto& t: _all_types) if(t.name == type) return t.obj;
  205. throw std::runtime_error(fmt("type not found: ", type));
  206. }
  207. return obj;
  208. }
  209. template<int ARGC>
  210. void bind_func(Str type, Str name, NativeFuncRaw fn) {
  211. bind_func<ARGC>(_find_type(type), name, fn);
  212. }
  213. template<int ARGC>
  214. void bind_method(Str type, Str name, NativeFuncRaw fn) {
  215. bind_method<ARGC>(_find_type(type), name, fn);
  216. }
  217. template<int ARGC, typename... Args>
  218. void bind_static_method(Args&&... args) {
  219. bind_func<ARGC>(std::forward<Args>(args)...);
  220. }
  221. template<int ARGC>
  222. void _bind_methods(std::vector<Str> types, Str name, NativeFuncRaw fn) {
  223. for(auto& type: types) bind_method<ARGC>(type, name, fn);
  224. }
  225. template<int ARGC>
  226. void bind_builtin_func(Str name, NativeFuncRaw fn) {
  227. bind_func<ARGC>(builtins, name, fn);
  228. }
  229. int normalized_index(int index, int size){
  230. if(index < 0) index += size;
  231. if(index < 0 || index >= size){
  232. IndexError(std::to_string(index) + " not in [0, " + std::to_string(size) + ")");
  233. }
  234. return index;
  235. }
  236. template<typename P>
  237. PyObject* PyIter(P&& value) {
  238. static_assert(std::is_base_of_v<BaseIter, std::decay_t<P>>);
  239. return heap.gcnew<P>(tp_iterator, std::forward<P>(value));
  240. }
  241. BaseIter* PyIter_AS_C(PyObject* obj)
  242. {
  243. check_type(obj, tp_iterator);
  244. return static_cast<BaseIter*>(obj->value());
  245. }
  246. /***** Error Reporter *****/
  247. void _error(StrName name, const Str& msg){
  248. _error(Exception(name, msg));
  249. }
  250. void _raise(){
  251. bool ok = top_frame()->jump_to_exception_handler();
  252. if(ok) throw HandledException();
  253. else throw UnhandledException();
  254. }
  255. void IOError(const Str& msg) { _error("IOError", msg); }
  256. void NotImplementedError(){ _error("NotImplementedError", ""); }
  257. void TypeError(const Str& msg){ _error("TypeError", msg); }
  258. void ZeroDivisionError(){ _error("ZeroDivisionError", "division by zero"); }
  259. void IndexError(const Str& msg){ _error("IndexError", msg); }
  260. void ValueError(const Str& msg){ _error("ValueError", msg); }
  261. void NameError(StrName name){ _error("NameError", fmt("name ", name.escape() + " is not defined")); }
  262. void AttributeError(PyObject* obj, StrName name){
  263. // OBJ_NAME calls getattr, which may lead to a infinite recursion
  264. _error("AttributeError", fmt("type ", OBJ_NAME(_t(obj)).escape(), " has no attribute ", name.escape()));
  265. }
  266. void AttributeError(Str msg){ _error("AttributeError", msg); }
  267. void check_type(PyObject* obj, Type type){
  268. if(is_type(obj, type)) return;
  269. TypeError("expected " + OBJ_NAME(_t(type)).escape() + ", but got " + OBJ_NAME(_t(obj)).escape());
  270. }
  271. PyObject* _t(Type t){
  272. return _all_types[t.index].obj;
  273. }
  274. PyObject* _t(PyObject* obj){
  275. if(is_int(obj)) return _t(tp_int);
  276. if(is_float(obj)) return _t(tp_float);
  277. return _all_types[OBJ_GET(Type, _t(obj->type)).index].obj;
  278. }
  279. ~VM() {
  280. callstack.clear();
  281. _all_types.clear();
  282. _modules.clear();
  283. _lazy_modules.clear();
  284. }
  285. CodeObject_ compile(Str source, Str filename, CompileMode mode, bool unknown_global_scope=false);
  286. PyObject* num_negated(PyObject* obj);
  287. f64 num_to_float(PyObject* obj);
  288. bool asBool(PyObject* obj);
  289. i64 hash(PyObject* obj);
  290. PyObject* asRepr(PyObject* obj);
  291. PyObject* new_module(StrName name);
  292. Str disassemble(CodeObject_ co);
  293. void init_builtin_types();
  294. PyObject* call(PyObject* callable, Args args, const Args& kwargs, bool opCall);
  295. PyObject* _py_call(PyObject* callable, ArgsView args, ArgsView kwargs);
  296. void unpack_args(Args& args);
  297. PyObject* getattr(PyObject* obj, StrName name, bool throw_err=true);
  298. PyObject* get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err=true, bool fallback=false);
  299. template<typename T>
  300. void setattr(PyObject* obj, StrName name, T&& value);
  301. template<int ARGC>
  302. void bind_method(PyObject*, Str, NativeFuncRaw);
  303. template<int ARGC>
  304. void bind_func(PyObject*, Str, NativeFuncRaw);
  305. void _error(Exception);
  306. PyObject* _run_top_frame();
  307. void post_init();
  308. };
  309. inline PyObject* NativeFunc::operator()(VM* vm, Args& args) const{
  310. int args_size = args.size() - (int)method; // remove self
  311. if(argc != -1 && args_size != argc) {
  312. vm->TypeError(fmt("expected ", argc, " arguments, but got ", args_size));
  313. }
  314. return f(vm, args);
  315. }
  316. inline void CodeObject::optimize(VM* vm){
  317. // uint32_t base_n = (uint32_t)(names.size() / kLocalsLoadFactor + 0.5);
  318. // perfect_locals_capacity = std::max(find_next_capacity(base_n), NameDict::__Capacity);
  319. // perfect_hash_seed = find_perfect_hash_seed(perfect_locals_capacity, names);
  320. }
  321. DEF_NATIVE_2(Str, tp_str)
  322. DEF_NATIVE_2(List, tp_list)
  323. DEF_NATIVE_2(Tuple, tp_tuple)
  324. DEF_NATIVE_2(Function, tp_function)
  325. DEF_NATIVE_2(NativeFunc, tp_native_function)
  326. DEF_NATIVE_2(BoundMethod, tp_bound_method)
  327. DEF_NATIVE_2(Range, tp_range)
  328. DEF_NATIVE_2(Slice, tp_slice)
  329. DEF_NATIVE_2(Exception, tp_exception)
  330. DEF_NATIVE_2(StarWrapper, tp_star_wrapper)
  331. #define PY_CAST_INT(T) \
  332. template<> inline T py_cast<T>(VM* vm, PyObject* obj){ \
  333. vm->check_type(obj, vm->tp_int); \
  334. return (T)(BITS(obj) >> 2); \
  335. } \
  336. template<> inline T _py_cast<T>(VM* vm, PyObject* obj){ \
  337. return (T)(BITS(obj) >> 2); \
  338. }
  339. PY_CAST_INT(char)
  340. PY_CAST_INT(short)
  341. PY_CAST_INT(int)
  342. PY_CAST_INT(long)
  343. PY_CAST_INT(long long)
  344. PY_CAST_INT(unsigned char)
  345. PY_CAST_INT(unsigned short)
  346. PY_CAST_INT(unsigned int)
  347. PY_CAST_INT(unsigned long)
  348. PY_CAST_INT(unsigned long long)
  349. template<> inline float py_cast<float>(VM* vm, PyObject* obj){
  350. vm->check_type(obj, vm->tp_float);
  351. i64 bits = BITS(obj);
  352. bits = (bits >> 2) << 2;
  353. return BitsCvt(bits)._float;
  354. }
  355. template<> inline float _py_cast<float>(VM* vm, PyObject* obj){
  356. i64 bits = BITS(obj);
  357. bits = (bits >> 2) << 2;
  358. return BitsCvt(bits)._float;
  359. }
  360. template<> inline double py_cast<double>(VM* vm, PyObject* obj){
  361. vm->check_type(obj, vm->tp_float);
  362. i64 bits = BITS(obj);
  363. bits = (bits >> 2) << 2;
  364. return BitsCvt(bits)._float;
  365. }
  366. template<> inline double _py_cast<double>(VM* vm, PyObject* obj){
  367. i64 bits = BITS(obj);
  368. bits = (bits >> 2) << 2;
  369. return BitsCvt(bits)._float;
  370. }
  371. #define PY_VAR_INT(T) \
  372. inline PyObject* py_var(VM* vm, T _val){ \
  373. i64 val = static_cast<i64>(_val); \
  374. if(((val << 2) >> 2) != val){ \
  375. vm->_error("OverflowError", std::to_string(val) + " is out of range"); \
  376. } \
  377. val = (val << 2) | 0b01; \
  378. return reinterpret_cast<PyObject*>(val); \
  379. }
  380. PY_VAR_INT(char)
  381. PY_VAR_INT(short)
  382. PY_VAR_INT(int)
  383. PY_VAR_INT(long)
  384. PY_VAR_INT(long long)
  385. PY_VAR_INT(unsigned char)
  386. PY_VAR_INT(unsigned short)
  387. PY_VAR_INT(unsigned int)
  388. PY_VAR_INT(unsigned long)
  389. PY_VAR_INT(unsigned long long)
  390. #define PY_VAR_FLOAT(T) \
  391. inline PyObject* py_var(VM* vm, T _val){ \
  392. f64 val = static_cast<f64>(_val); \
  393. i64 bits = BitsCvt(val)._int; \
  394. bits = (bits >> 2) << 2; \
  395. bits |= 0b10; \
  396. return reinterpret_cast<PyObject*>(bits); \
  397. }
  398. PY_VAR_FLOAT(float)
  399. PY_VAR_FLOAT(double)
  400. inline PyObject* py_var(VM* vm, bool val){
  401. return val ? vm->True : vm->False;
  402. }
  403. template<> inline bool py_cast<bool>(VM* vm, PyObject* obj){
  404. vm->check_type(obj, vm->tp_bool);
  405. return obj == vm->True;
  406. }
  407. template<> inline bool _py_cast<bool>(VM* vm, PyObject* obj){
  408. return obj == vm->True;
  409. }
  410. inline PyObject* py_var(VM* vm, const char val[]){
  411. return VAR(Str(val));
  412. }
  413. inline PyObject* py_var(VM* vm, std::string val){
  414. return VAR(Str(std::move(val)));
  415. }
  416. inline PyObject* py_var(VM* vm, std::string_view val){
  417. return VAR(Str(val));
  418. }
  419. template<typename T>
  420. void _check_py_class(VM* vm, PyObject* obj){
  421. vm->check_type(obj, T::_type(vm));
  422. }
  423. inline PyObject* VM::num_negated(PyObject* obj){
  424. if (is_int(obj)){
  425. return VAR(-CAST(i64, obj));
  426. }else if(is_float(obj)){
  427. return VAR(-CAST(f64, obj));
  428. }
  429. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape());
  430. return nullptr;
  431. }
  432. inline f64 VM::num_to_float(PyObject* obj){
  433. if(is_float(obj)){
  434. return CAST(f64, obj);
  435. } else if (is_int(obj)){
  436. return (f64)CAST(i64, obj);
  437. }
  438. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape());
  439. return 0;
  440. }
  441. inline bool VM::asBool(PyObject* obj){
  442. if(is_type(obj, tp_bool)) return obj == True;
  443. if(obj == None) return false;
  444. if(is_type(obj, tp_int)) return CAST(i64, obj) != 0;
  445. if(is_type(obj, tp_float)) return CAST(f64, obj) != 0.0;
  446. PyObject* self;
  447. PyObject* len_f = get_unbound_method(obj, __len__, &self, false);
  448. if(self != _py_null){
  449. PyObject* ret = call(len_f, Args{self});
  450. return CAST(i64, ret) > 0;
  451. }
  452. return true;
  453. }
  454. inline i64 VM::hash(PyObject* obj){
  455. if (is_type(obj, tp_str)) return CAST(Str&, obj).hash();
  456. if (is_int(obj)) return CAST(i64, obj);
  457. if (is_type(obj, tp_tuple)) {
  458. i64 x = 1000003;
  459. const Tuple& items = CAST(Tuple&, obj);
  460. for (int i=0; i<items.size(); i++) {
  461. i64 y = hash(items[i]);
  462. // recommended by Github Copilot
  463. x = x ^ (y + 0x9e3779b9 + (x << 6) + (x >> 2));
  464. }
  465. return x;
  466. }
  467. if (is_type(obj, tp_type)) return BITS(obj);
  468. if (is_type(obj, tp_bool)) return _CAST(bool, obj) ? 1 : 0;
  469. if (is_float(obj)){
  470. f64 val = CAST(f64, obj);
  471. return (i64)std::hash<f64>()(val);
  472. }
  473. TypeError("unhashable type: " + OBJ_NAME(_t(obj)).escape());
  474. return 0;
  475. }
  476. inline PyObject* VM::asRepr(PyObject* obj){
  477. // TODO: fastcall does not take care of super() proxy!
  478. return fast_call(__repr__, Args{obj});
  479. }
  480. inline PyObject* VM::new_module(StrName name) {
  481. PyObject* obj = heap._new<DummyModule>(tp_module, DummyModule());
  482. obj->attr().set(__name__, VAR(name.sv()));
  483. // we do not allow override in order to avoid memory leak
  484. // it is because Module objects are not garbage collected
  485. if(_modules.contains(name)) FATAL_ERROR();
  486. _modules.set(name, obj);
  487. return obj;
  488. }
  489. inline Str VM::disassemble(CodeObject_ co){
  490. auto pad = [](const Str& s, const int n){
  491. if(s.length() >= n) return s.substr(0, n);
  492. return s + std::string(n - s.length(), ' ');
  493. };
  494. std::vector<int> jumpTargets;
  495. for(auto byte : co->codes){
  496. if(byte.op == OP_JUMP_ABSOLUTE || byte.op == OP_POP_JUMP_IF_FALSE){
  497. jumpTargets.push_back(byte.arg);
  498. }
  499. }
  500. std::stringstream ss;
  501. int prev_line = -1;
  502. for(int i=0; i<co->codes.size(); i++){
  503. const Bytecode& byte = co->codes[i];
  504. Str line = std::to_string(co->lines[i]);
  505. if(co->lines[i] == prev_line) line = "";
  506. else{
  507. if(prev_line != -1) ss << "\n";
  508. prev_line = co->lines[i];
  509. }
  510. std::string pointer;
  511. if(std::find(jumpTargets.begin(), jumpTargets.end(), i) != jumpTargets.end()){
  512. pointer = "-> ";
  513. }else{
  514. pointer = " ";
  515. }
  516. ss << pad(line, 8) << pointer << pad(std::to_string(i), 3);
  517. ss << " " << pad(OP_NAMES[byte.op], 20) << " ";
  518. // ss << pad(byte.arg == -1 ? "" : std::to_string(byte.arg), 5);
  519. std::string argStr = byte.arg == -1 ? "" : std::to_string(byte.arg);
  520. switch(byte.op){
  521. case OP_LOAD_CONST:
  522. argStr += fmt(" (", CAST(Str, asRepr(co->consts[byte.arg])), ")");
  523. break;
  524. case OP_LOAD_NAME: case OP_LOAD_GLOBAL: case OP_LOAD_NONLOCAL: case OP_STORE_GLOBAL:
  525. case OP_LOAD_ATTR: case OP_LOAD_METHOD: case OP_STORE_ATTR: case OP_DELETE_ATTR:
  526. case OP_IMPORT_NAME: case OP_BEGIN_CLASS:
  527. case OP_DELETE_GLOBAL:
  528. argStr += fmt(" (", StrName(byte.arg).sv(), ")");
  529. break;
  530. case OP_LOAD_FAST: case OP_STORE_FAST: case OP_DELETE_FAST:
  531. argStr += fmt(" (", co->varnames[byte.arg].sv(), ")");
  532. break;
  533. case OP_BINARY_OP:
  534. argStr += fmt(" (", BINARY_SPECIAL_METHODS[byte.arg], ")");
  535. break;
  536. case OP_LOAD_FUNCTION:
  537. argStr += fmt(" (", co->func_decls[byte.arg]->code->name, ")");
  538. break;
  539. }
  540. ss << pad(argStr, 40); // may overflow
  541. ss << co->blocks[byte.block].type;
  542. if(i != co->codes.size() - 1) ss << '\n';
  543. }
  544. for(auto& decl: co->func_decls){
  545. ss << "\n\n" << "Disassembly of " << decl->code->name << ":\n";
  546. ss << disassemble(decl->code);
  547. }
  548. ss << "\n";
  549. return Str(ss.str());
  550. }
  551. inline void VM::init_builtin_types(){
  552. _all_types.push_back({heap._new<Type>(Type(1), Type(0)), -1, "object"});
  553. _all_types.push_back({heap._new<Type>(Type(1), Type(1)), 0, "type"});
  554. tp_object = 0; tp_type = 1;
  555. tp_int = _new_type_object("int");
  556. tp_float = _new_type_object("float");
  557. if(tp_int.index != kTpIntIndex || tp_float.index != kTpFloatIndex) FATAL_ERROR();
  558. tp_bool = _new_type_object("bool");
  559. tp_str = _new_type_object("str");
  560. tp_list = _new_type_object("list");
  561. tp_tuple = _new_type_object("tuple");
  562. tp_slice = _new_type_object("slice");
  563. tp_range = _new_type_object("range");
  564. tp_module = _new_type_object("module");
  565. tp_star_wrapper = _new_type_object("_star_wrapper");
  566. tp_function = _new_type_object("function");
  567. tp_native_function = _new_type_object("native_function");
  568. tp_iterator = _new_type_object("iterator");
  569. tp_bound_method = _new_type_object("bound_method");
  570. tp_super = _new_type_object("super");
  571. tp_exception = _new_type_object("Exception");
  572. this->None = heap._new<Dummy>(_new_type_object("NoneType"), {});
  573. this->Ellipsis = heap._new<Dummy>(_new_type_object("ellipsis"), {});
  574. this->True = heap._new<Dummy>(tp_bool, {});
  575. this->False = heap._new<Dummy>(tp_bool, {});
  576. this->_py_null = heap._new<Dummy>(_new_type_object("_py_null"), {});
  577. this->_py_op_call = heap._new<Dummy>(_new_type_object("_py_op_call"), {});
  578. this->_py_op_yield = heap._new<Dummy>(_new_type_object("_py_op_yield"), {});
  579. this->builtins = new_module("builtins");
  580. this->_main = new_module("__main__");
  581. // setup public types
  582. builtins->attr().set("type", _t(tp_type));
  583. builtins->attr().set("object", _t(tp_object));
  584. builtins->attr().set("bool", _t(tp_bool));
  585. builtins->attr().set("int", _t(tp_int));
  586. builtins->attr().set("float", _t(tp_float));
  587. builtins->attr().set("str", _t(tp_str));
  588. builtins->attr().set("list", _t(tp_list));
  589. builtins->attr().set("tuple", _t(tp_tuple));
  590. builtins->attr().set("range", _t(tp_range));
  591. post_init();
  592. for(int i=0; i<_all_types.size(); i++){
  593. _all_types[i].obj->attr()._try_perfect_rehash();
  594. }
  595. for(auto [k, v]: _modules.items()) v->attr()._try_perfect_rehash();
  596. }
  597. inline PyObject* VM::_py_call(PyObject* callable, ArgsView args, ArgsView kwargs){
  598. // callable is a `function` object
  599. const Function& fn = CAST(Function&, callable);
  600. const CodeObject* co = fn.decl->code.get();
  601. FastLocals locals(co);
  602. int i = 0;
  603. if(args.size() < fn.decl->args.size()){
  604. vm->TypeError(fmt(
  605. "expected ",
  606. fn.decl->args.size(),
  607. " positional arguments, but got ",
  608. args.size(),
  609. " (", fn.decl->code->name, ')'
  610. ));
  611. }
  612. // prepare args
  613. for(int index: fn.decl->args) locals[index] = args[i++];
  614. // prepare kwdefaults
  615. for(auto& kv: fn.decl->kwargs) locals[kv.key] = kv.value;
  616. // handle *args
  617. if(fn.decl->starred_arg != -1){
  618. List vargs; // handle *args
  619. while(i < args.size()) vargs.push_back(args[i++]);
  620. locals[fn.decl->starred_arg] = VAR(Tuple(std::move(vargs)));
  621. }else{
  622. // kwdefaults override
  623. for(auto& kv: fn.decl->kwargs){
  624. if(i < args.size()){
  625. locals[kv.key] = args[i++];
  626. }else{
  627. break;
  628. }
  629. }
  630. if(i < args.size()) TypeError(fmt("too many arguments", " (", fn.decl->code->name, ')'));
  631. }
  632. for(int i=0; i<kwargs.size(); i+=2){
  633. StrName key = CAST(int, kwargs[i]);
  634. // try_set has nullptr check
  635. // TODO: optimize this
  636. bool ok = locals.try_set(key, kwargs[i+1]);
  637. if(!ok){
  638. TypeError(fmt(key.escape(), " is an invalid keyword argument for ", co->name, "()"));
  639. }
  640. }
  641. PyObject* _module = fn._module != nullptr ? fn._module : top_frame()->_module;
  642. if(co->is_generator){
  643. return PyIter(Generator(this, Frame(co, _module, std::move(locals), fn._closure)));
  644. }
  645. _push_new_frame(co, _module, std::move(locals), fn._closure);
  646. return nullptr;
  647. }
  648. // TODO: callable/args here may be garbage collected accidentally
  649. inline PyObject* VM::call(PyObject* callable, Args args, const Args& kwargs, bool opCall){
  650. if(is_type(callable, tp_bound_method)){
  651. auto& bm = CAST(BoundMethod&, callable);
  652. callable = bm.method; // get unbound method
  653. args.extend_self(bm.obj);
  654. }
  655. if(is_type(callable, tp_native_function)){
  656. const auto& f = OBJ_GET(NativeFunc, callable);
  657. if(kwargs.size() != 0) TypeError("native_function does not accept keyword arguments");
  658. return f(this, args);
  659. } else if(is_type(callable, tp_function)){
  660. // ret is nullptr or a generator
  661. PyObject* ret = _py_call(callable, args, kwargs);
  662. if(ret != nullptr) return ret;
  663. if(opCall) return _py_op_call;
  664. return _run_top_frame();
  665. }
  666. if(is_type(callable, tp_type)){
  667. // TODO: derived __new__ ?
  668. PyObject* new_f = callable->attr().try_get(__new__);
  669. PyObject* obj;
  670. if(new_f != nullptr){
  671. // should not use std::move here, since we will reuse args in possible __init__
  672. obj = call(new_f, args, kwargs, false);
  673. if(!isinstance(obj, OBJ_GET(Type, callable))) return obj;
  674. }else{
  675. obj = heap.gcnew<DummyInstance>(OBJ_GET(Type, callable), {});
  676. }
  677. PyObject* self;
  678. PyObject* init_f = get_unbound_method(obj, __init__, &self, false);
  679. if (self != _py_null) {
  680. args.extend_self(self);
  681. call(init_f, std::move(args), kwargs, false);
  682. }
  683. return obj;
  684. }
  685. PyObject* self;
  686. PyObject* call_f = get_unbound_method(callable, __call__, &self, false);
  687. if(self != _py_null){
  688. args.extend_self(self);
  689. return call(call_f, std::move(args), kwargs, false);
  690. }
  691. TypeError(OBJ_NAME(_t(callable)).escape() + " object is not callable");
  692. return None;
  693. }
  694. inline void VM::unpack_args(Args& args){
  695. List unpacked;
  696. for(int i=0; i<args.size(); i++){
  697. if(is_type(args[i], tp_star_wrapper)){
  698. auto& star = _CAST(StarWrapper&, args[i]);
  699. List& list = CAST(List&, asList(star.obj));
  700. unpacked.extend(list);
  701. }else{
  702. unpacked.push_back(args[i]);
  703. }
  704. }
  705. args = Args(std::move(unpacked));
  706. }
  707. // https://docs.python.org/3/howto/descriptor.html#invocation-from-an-instance
  708. inline PyObject* VM::getattr(PyObject* obj, StrName name, bool throw_err){
  709. PyObject* objtype = _t(obj);
  710. // handle super() proxy
  711. if(is_type(obj, tp_super)){
  712. const Super& super = OBJ_GET(Super, obj);
  713. obj = super.first;
  714. objtype = _t(super.second);
  715. }
  716. PyObject* cls_var = find_name_in_mro(objtype, name);
  717. if(cls_var != nullptr){
  718. // handle descriptor
  719. PyObject* descr_get = _t(cls_var)->attr().try_get(__get__);
  720. if(descr_get != nullptr) return call(descr_get, Args{cls_var, obj});
  721. }
  722. // handle instance __dict__
  723. if(!is_tagged(obj) && obj->is_attr_valid()){
  724. PyObject* val = obj->attr().try_get(name);
  725. if(val != nullptr) return val;
  726. }
  727. if(cls_var != nullptr){
  728. // bound method is non-data descriptor
  729. if(is_type(cls_var, tp_function) || is_type(cls_var, tp_native_function)){
  730. return VAR(BoundMethod(obj, cls_var));
  731. }
  732. return cls_var;
  733. }
  734. if(throw_err) AttributeError(obj, name);
  735. return nullptr;
  736. }
  737. // used by OP_LOAD_METHOD
  738. // try to load a unbound method (fallback to `getattr` if not found)
  739. inline PyObject* VM::get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err, bool fallback){
  740. *self = _py_null;
  741. PyObject* objtype = _t(obj);
  742. // handle super() proxy
  743. if(is_type(obj, tp_super)){
  744. const Super& super = OBJ_GET(Super, obj);
  745. obj = super.first;
  746. objtype = _t(super.second);
  747. }
  748. PyObject* cls_var = find_name_in_mro(objtype, name);
  749. if(fallback){
  750. if(cls_var != nullptr){
  751. // handle descriptor
  752. PyObject* descr_get = _t(cls_var)->attr().try_get(__get__);
  753. if(descr_get != nullptr) return call(descr_get, Args{cls_var, obj});
  754. }
  755. // handle instance __dict__
  756. if(!is_tagged(obj) && obj->is_attr_valid()){
  757. PyObject* val = obj->attr().try_get(name);
  758. if(val != nullptr) return val;
  759. }
  760. }
  761. if(cls_var != nullptr){
  762. if(is_type(cls_var, tp_function) || is_type(cls_var, tp_native_function)){
  763. *self = obj;
  764. }
  765. return cls_var;
  766. }
  767. if(throw_err) AttributeError(obj, name);
  768. return nullptr;
  769. }
  770. template<typename T>
  771. inline void VM::setattr(PyObject* obj, StrName name, T&& value){
  772. static_assert(std::is_same_v<std::decay_t<T>, PyObject*>);
  773. PyObject* objtype = _t(obj);
  774. // handle super() proxy
  775. if(is_type(obj, tp_super)){
  776. Super& super = OBJ_GET(Super, obj);
  777. obj = super.first;
  778. objtype = _t(super.second);
  779. }
  780. PyObject* cls_var = find_name_in_mro(objtype, name);
  781. if(cls_var != nullptr){
  782. // handle descriptor
  783. PyObject* cls_var_t = _t(cls_var);
  784. if(cls_var_t->attr().contains(__get__)){
  785. PyObject* descr_set = cls_var_t->attr().try_get(__set__);
  786. if(descr_set != nullptr){
  787. call(descr_set, Args{cls_var, obj, std::forward<T>(value)});
  788. }else{
  789. TypeError(fmt("readonly attribute: ", name.escape()));
  790. }
  791. return;
  792. }
  793. }
  794. // handle instance __dict__
  795. if(is_tagged(obj) || !obj->is_attr_valid()) TypeError("cannot set attribute");
  796. obj->attr().set(name, std::forward<T>(value));
  797. }
  798. template<int ARGC>
  799. void VM::bind_method(PyObject* obj, Str name, NativeFuncRaw fn) {
  800. check_type(obj, tp_type);
  801. obj->attr().set(name, VAR(NativeFunc(fn, ARGC, true)));
  802. }
  803. template<int ARGC>
  804. void VM::bind_func(PyObject* obj, Str name, NativeFuncRaw fn) {
  805. obj->attr().set(name, VAR(NativeFunc(fn, ARGC, false)));
  806. }
  807. inline void VM::_error(Exception e){
  808. if(callstack.empty()){
  809. e.is_re = false;
  810. throw e;
  811. }
  812. Frame* frame = &callstack.top();
  813. frame->_s.push(VAR(e));
  814. _raise();
  815. }
  816. inline void ManagedHeap::mark() {
  817. for(PyObject* obj: _no_gc) OBJ_MARK(obj);
  818. for(auto& frame : vm->callstack.data()) frame._gc_mark();
  819. }
  820. inline Str obj_type_name(VM *vm, Type type){
  821. return vm->_all_types[type].name;
  822. }
  823. } // namespace pkpy