vm.h 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929
  1. #pragma once
  2. #include "common.h"
  3. #include "frame.h"
  4. #include "error.h"
  5. #include "gc.h"
  6. #include "memory.h"
  7. #include "obj.h"
  8. #include "str.h"
  9. #include <memory>
  10. namespace pkpy{
  11. Str _read_file_cwd(const Str& name, bool* ok);
  12. #define DEF_NATIVE_2(ctype, ptype) \
  13. template<> inline ctype py_cast<ctype>(VM* vm, PyObject* obj) { \
  14. vm->check_type(obj, vm->ptype); \
  15. return OBJ_GET(ctype, obj); \
  16. } \
  17. template<> inline ctype _py_cast<ctype>(VM* vm, PyObject* obj) { \
  18. return OBJ_GET(ctype, obj); \
  19. } \
  20. template<> inline ctype& py_cast<ctype&>(VM* vm, PyObject* obj) { \
  21. vm->check_type(obj, vm->ptype); \
  22. return OBJ_GET(ctype, obj); \
  23. } \
  24. template<> inline ctype& _py_cast<ctype&>(VM* vm, PyObject* obj) { \
  25. return OBJ_GET(ctype, obj); \
  26. } \
  27. inline PyObject* py_var(VM* vm, const ctype& value) { return vm->heap.gcnew(vm->ptype, value);} \
  28. inline PyObject* py_var(VM* vm, ctype&& value) { return vm->heap.gcnew(vm->ptype, std::move(value));}
  29. class Generator: public BaseIter {
  30. Frame frame;
  31. int state; // 0,1,2
  32. public:
  33. template<typename... Args>
  34. Generator(VM* vm, Frame&& frame)
  35. : BaseIter(vm), frame(std::move(frame)), state(0) {}
  36. PyObject* next() override;
  37. void _gc_mark() const override;
  38. };
  39. struct PyTypeInfo{
  40. PyObject* obj;
  41. Type base;
  42. Str name;
  43. };
  44. struct FrameId{
  45. std::vector<pkpy::Frame>* data;
  46. int index;
  47. FrameId(std::vector<pkpy::Frame>* data, int index) : data(data), index(index) {}
  48. Frame* operator->() const { return &data->operator[](index); }
  49. };
  50. class VM {
  51. VM* vm; // self reference for simplify code
  52. public:
  53. ManagedHeap heap;
  54. stack< Frame > callstack;
  55. std::vector<PyTypeInfo> _all_types;
  56. NameDict _modules; // loaded modules
  57. std::map<StrName, Str> _lazy_modules; // lazy loaded modules
  58. PyObject* _py_op_call;
  59. PyObject* _py_op_yield;
  60. PyObject* _py_null;
  61. PyObject* None;
  62. PyObject* True;
  63. PyObject* False;
  64. PyObject* Ellipsis;
  65. PyObject* builtins; // builtins module
  66. PyObject* _main; // __main__ module
  67. std::stringstream _stdout_buffer;
  68. std::stringstream _stderr_buffer;
  69. std::ostream* _stdout;
  70. std::ostream* _stderr;
  71. int recursionlimit = 1000;
  72. // for quick access
  73. Type tp_object, tp_type, tp_int, tp_float, tp_bool, tp_str;
  74. Type tp_list, tp_tuple;
  75. Type tp_function, tp_native_function, tp_iterator, tp_bound_method;
  76. Type tp_slice, tp_range, tp_module;
  77. Type tp_super, tp_exception, tp_star_wrapper;
  78. VM(bool use_stdio) : heap(this){
  79. this->vm = this;
  80. this->_stdout = use_stdio ? &std::cout : &_stdout_buffer;
  81. this->_stderr = use_stdio ? &std::cerr : &_stderr_buffer;
  82. callstack.data().reserve(8);
  83. init_builtin_types();
  84. }
  85. bool is_stdio_used() const { return _stdout == &std::cout; }
  86. FrameId top_frame() {
  87. #if DEBUG_EXTRA_CHECK
  88. if(callstack.empty()) FATAL_ERROR();
  89. #endif
  90. return FrameId(&callstack.data(), callstack.size()-1);
  91. }
  92. PyObject* asStr(PyObject* obj){
  93. PyObject* self;
  94. PyObject* f = get_unbound_method(obj, __str__, &self, false);
  95. if(self != _py_null) return call(f, Args{self});
  96. return asRepr(obj);
  97. }
  98. PyObject* asIter(PyObject* obj){
  99. if(is_type(obj, tp_iterator)) return obj;
  100. PyObject* self;
  101. PyObject* iter_f = get_unbound_method(obj, __iter__, &self, false);
  102. if(self != _py_null) return call(iter_f, Args{self});
  103. TypeError(OBJ_NAME(_t(obj)).escape() + " object is not iterable");
  104. return nullptr;
  105. }
  106. PyObject* asList(PyObject* iterable){
  107. if(is_type(iterable, tp_list)) return iterable;
  108. return call(_t(tp_list), Args{iterable});
  109. }
  110. PyObject* find_name_in_mro(PyObject* cls, StrName name){
  111. PyObject* val;
  112. do{
  113. val = cls->attr().try_get(name);
  114. if(val != nullptr) return val;
  115. Type cls_t = OBJ_GET(Type, cls);
  116. Type base = _all_types[cls_t].base;
  117. if(base.index == -1) break;
  118. cls = _all_types[base].obj;
  119. }while(true);
  120. return nullptr;
  121. }
  122. bool isinstance(PyObject* obj, Type cls_t){
  123. Type obj_t = OBJ_GET(Type, _t(obj));
  124. do{
  125. if(obj_t == cls_t) return true;
  126. Type base = _all_types[obj_t].base;
  127. if(base.index == -1) break;
  128. obj_t = base;
  129. }while(true);
  130. return false;
  131. }
  132. PyObject* fast_call(StrName name, Args&& args){
  133. PyObject* val = find_name_in_mro(_t(args[0]), name);
  134. if(val != nullptr) return call(val, std::move(args));
  135. AttributeError(args[0], name);
  136. return nullptr;
  137. }
  138. template<typename ArgT>
  139. std::enable_if_t<std::is_same_v<std::decay_t<ArgT>, Args>, PyObject*>
  140. call(PyObject* callable, ArgT&& args){
  141. return call(callable, std::forward<ArgT>(args), no_arg(), false);
  142. }
  143. PyObject* exec(Str source, Str filename, CompileMode mode, PyObject* _module=nullptr){
  144. if(_module == nullptr) _module = _main;
  145. try {
  146. CodeObject_ code = compile(source, filename, mode);
  147. #if DEBUG_DIS_EXEC
  148. if(_module == _main) std::cout << disassemble(code) << '\n';
  149. #endif
  150. return _exec(code, _module);
  151. }catch (const Exception& e){
  152. *_stderr << e.summary() << '\n';
  153. }
  154. #if !DEBUG_FULL_EXCEPTION
  155. catch (const std::exception& e) {
  156. *_stderr << "An std::exception occurred! It could be a bug.\n";
  157. *_stderr << e.what() << '\n';
  158. }
  159. #endif
  160. callstack.clear();
  161. return nullptr;
  162. }
  163. template<typename ...Args>
  164. void _push_new_frame(Args&&... args){
  165. if(callstack.size() > recursionlimit){
  166. _error("RecursionError", "maximum recursion depth exceeded");
  167. }
  168. callstack.emplace(std::forward<Args>(args)...);
  169. }
  170. void _push_new_frame(Frame&& frame){
  171. if(callstack.size() > recursionlimit){
  172. _error("RecursionError", "maximum recursion depth exceeded");
  173. }
  174. callstack.emplace(std::move(frame));
  175. }
  176. template<typename ...Args>
  177. PyObject* _exec(Args&&... args){
  178. _push_new_frame(std::forward<Args>(args)...);
  179. return _run_top_frame();
  180. }
  181. PyObject* property(NativeFuncRaw fget){
  182. PyObject* p = builtins->attr("property");
  183. PyObject* method = heap.gcnew(tp_native_function, NativeFunc(fget, 1, false));
  184. return call(p, Args{method});
  185. }
  186. PyObject* new_type_object(PyObject* mod, StrName name, Type base){
  187. PyObject* obj = heap._new<Type>(tp_type, _all_types.size());
  188. PyTypeInfo info{
  189. obj,
  190. base,
  191. (mod!=nullptr && mod!=builtins) ? Str(OBJ_NAME(mod)+"."+name.sv()): name.sv()
  192. };
  193. if(mod != nullptr) mod->attr().set(name, obj);
  194. _all_types.push_back(info);
  195. return obj;
  196. }
  197. Type _new_type_object(StrName name, Type base=0) {
  198. PyObject* obj = new_type_object(nullptr, name, base);
  199. return OBJ_GET(Type, obj);
  200. }
  201. PyObject* _find_type(const Str& type){
  202. PyObject* obj = builtins->attr().try_get(type);
  203. if(obj == nullptr){
  204. for(auto& t: _all_types) if(t.name == type) return t.obj;
  205. throw std::runtime_error(fmt("type not found: ", type));
  206. }
  207. return obj;
  208. }
  209. template<int ARGC>
  210. void bind_func(Str type, Str name, NativeFuncRaw fn) {
  211. bind_func<ARGC>(_find_type(type), name, fn);
  212. }
  213. template<int ARGC>
  214. void bind_method(Str type, Str name, NativeFuncRaw fn) {
  215. bind_method<ARGC>(_find_type(type), name, fn);
  216. }
  217. template<int ARGC, typename... Args>
  218. void bind_static_method(Args&&... args) {
  219. bind_func<ARGC>(std::forward<Args>(args)...);
  220. }
  221. template<int ARGC>
  222. void _bind_methods(std::vector<Str> types, Str name, NativeFuncRaw fn) {
  223. for(auto& type: types) bind_method<ARGC>(type, name, fn);
  224. }
  225. template<int ARGC>
  226. void bind_builtin_func(Str name, NativeFuncRaw fn) {
  227. bind_func<ARGC>(builtins, name, fn);
  228. }
  229. int normalized_index(int index, int size){
  230. if(index < 0) index += size;
  231. if(index < 0 || index >= size){
  232. IndexError(std::to_string(index) + " not in [0, " + std::to_string(size) + ")");
  233. }
  234. return index;
  235. }
  236. template<typename P>
  237. PyObject* PyIter(P&& value) {
  238. static_assert(std::is_base_of_v<BaseIter, std::decay_t<P>>);
  239. return heap.gcnew<P>(tp_iterator, std::forward<P>(value));
  240. }
  241. BaseIter* PyIter_AS_C(PyObject* obj)
  242. {
  243. check_type(obj, tp_iterator);
  244. return static_cast<BaseIter*>(obj->value());
  245. }
  246. /***** Error Reporter *****/
  247. void _error(StrName name, const Str& msg){
  248. _error(Exception(name, msg));
  249. }
  250. void _raise(){
  251. bool ok = top_frame()->jump_to_exception_handler();
  252. if(ok) throw HandledException();
  253. else throw UnhandledException();
  254. }
  255. void IOError(const Str& msg) { _error("IOError", msg); }
  256. void NotImplementedError(){ _error("NotImplementedError", ""); }
  257. void TypeError(const Str& msg){ _error("TypeError", msg); }
  258. void ZeroDivisionError(){ _error("ZeroDivisionError", "division by zero"); }
  259. void IndexError(const Str& msg){ _error("IndexError", msg); }
  260. void ValueError(const Str& msg){ _error("ValueError", msg); }
  261. void NameError(StrName name){ _error("NameError", fmt("name ", name.escape() + " is not defined")); }
  262. void AttributeError(PyObject* obj, StrName name){
  263. // OBJ_NAME calls getattr, which may lead to a infinite recursion
  264. _error("AttributeError", fmt("type ", OBJ_NAME(_t(obj)).escape(), " has no attribute ", name.escape()));
  265. }
  266. void AttributeError(Str msg){ _error("AttributeError", msg); }
  267. void check_type(PyObject* obj, Type type){
  268. if(is_type(obj, type)) return;
  269. TypeError("expected " + OBJ_NAME(_t(type)).escape() + ", but got " + OBJ_NAME(_t(obj)).escape());
  270. }
  271. PyObject* _t(Type t){
  272. return _all_types[t.index].obj;
  273. }
  274. PyObject* _t(PyObject* obj){
  275. if(is_int(obj)) return _t(tp_int);
  276. if(is_float(obj)) return _t(tp_float);
  277. return _all_types[OBJ_GET(Type, _t(obj->type)).index].obj;
  278. }
  279. ~VM() { heap.collect(); }
  280. CodeObject_ compile(Str source, Str filename, CompileMode mode);
  281. PyObject* num_negated(PyObject* obj);
  282. f64 num_to_float(PyObject* obj);
  283. bool asBool(PyObject* obj);
  284. i64 hash(PyObject* obj);
  285. PyObject* asRepr(PyObject* obj);
  286. PyObject* new_module(StrName name);
  287. Str disassemble(CodeObject_ co);
  288. void init_builtin_types();
  289. PyObject* call(PyObject* callable, Args args, const Args& kwargs, bool opCall);
  290. PyObject* _py_call(PyObject* callable, ArgsView args, ArgsView kwargs);
  291. void unpack_args(Args& args);
  292. PyObject* getattr(PyObject* obj, StrName name, bool throw_err=true);
  293. PyObject* get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err=true, bool fallback=false);
  294. template<typename T>
  295. void setattr(PyObject* obj, StrName name, T&& value);
  296. template<int ARGC>
  297. void bind_method(PyObject*, Str, NativeFuncRaw);
  298. template<int ARGC>
  299. void bind_func(PyObject*, Str, NativeFuncRaw);
  300. void _error(Exception);
  301. PyObject* _run_top_frame(bool force_no_pop=false);
  302. void post_init();
  303. };
  304. inline PyObject* NativeFunc::operator()(VM* vm, Args& args) const{
  305. int args_size = args.size() - (int)method; // remove self
  306. if(argc != -1 && args_size != argc) {
  307. vm->TypeError("expected " + std::to_string(argc) + " arguments, but got " + std::to_string(args_size));
  308. }
  309. return f(vm, args);
  310. }
  311. inline void CodeObject::optimize(VM* vm){
  312. // uint32_t base_n = (uint32_t)(names.size() / kLocalsLoadFactor + 0.5);
  313. // perfect_locals_capacity = std::max(find_next_capacity(base_n), NameDict::__Capacity);
  314. // perfect_hash_seed = find_perfect_hash_seed(perfect_locals_capacity, names);
  315. }
  316. DEF_NATIVE_2(Str, tp_str)
  317. DEF_NATIVE_2(List, tp_list)
  318. DEF_NATIVE_2(Tuple, tp_tuple)
  319. DEF_NATIVE_2(Function, tp_function)
  320. DEF_NATIVE_2(NativeFunc, tp_native_function)
  321. DEF_NATIVE_2(BoundMethod, tp_bound_method)
  322. DEF_NATIVE_2(Range, tp_range)
  323. DEF_NATIVE_2(Slice, tp_slice)
  324. DEF_NATIVE_2(Exception, tp_exception)
  325. DEF_NATIVE_2(StarWrapper, tp_star_wrapper)
  326. #define PY_CAST_INT(T) \
  327. template<> inline T py_cast<T>(VM* vm, PyObject* obj){ \
  328. vm->check_type(obj, vm->tp_int); \
  329. return (T)(BITS(obj) >> 2); \
  330. } \
  331. template<> inline T _py_cast<T>(VM* vm, PyObject* obj){ \
  332. return (T)(BITS(obj) >> 2); \
  333. }
  334. PY_CAST_INT(char)
  335. PY_CAST_INT(short)
  336. PY_CAST_INT(int)
  337. PY_CAST_INT(long)
  338. PY_CAST_INT(long long)
  339. PY_CAST_INT(unsigned char)
  340. PY_CAST_INT(unsigned short)
  341. PY_CAST_INT(unsigned int)
  342. PY_CAST_INT(unsigned long)
  343. PY_CAST_INT(unsigned long long)
  344. template<> inline float py_cast<float>(VM* vm, PyObject* obj){
  345. vm->check_type(obj, vm->tp_float);
  346. i64 bits = BITS(obj);
  347. bits = (bits >> 2) << 2;
  348. return BitsCvt(bits)._float;
  349. }
  350. template<> inline float _py_cast<float>(VM* vm, PyObject* obj){
  351. i64 bits = BITS(obj);
  352. bits = (bits >> 2) << 2;
  353. return BitsCvt(bits)._float;
  354. }
  355. template<> inline double py_cast<double>(VM* vm, PyObject* obj){
  356. vm->check_type(obj, vm->tp_float);
  357. i64 bits = BITS(obj);
  358. bits = (bits >> 2) << 2;
  359. return BitsCvt(bits)._float;
  360. }
  361. template<> inline double _py_cast<double>(VM* vm, PyObject* obj){
  362. i64 bits = BITS(obj);
  363. bits = (bits >> 2) << 2;
  364. return BitsCvt(bits)._float;
  365. }
  366. #define PY_VAR_INT(T) \
  367. inline PyObject* py_var(VM* vm, T _val){ \
  368. i64 val = static_cast<i64>(_val); \
  369. if(((val << 2) >> 2) != val){ \
  370. vm->_error("OverflowError", std::to_string(val) + " is out of range"); \
  371. } \
  372. val = (val << 2) | 0b01; \
  373. return reinterpret_cast<PyObject*>(val); \
  374. }
  375. PY_VAR_INT(char)
  376. PY_VAR_INT(short)
  377. PY_VAR_INT(int)
  378. PY_VAR_INT(long)
  379. PY_VAR_INT(long long)
  380. PY_VAR_INT(unsigned char)
  381. PY_VAR_INT(unsigned short)
  382. PY_VAR_INT(unsigned int)
  383. PY_VAR_INT(unsigned long)
  384. PY_VAR_INT(unsigned long long)
  385. #define PY_VAR_FLOAT(T) \
  386. inline PyObject* py_var(VM* vm, T _val){ \
  387. f64 val = static_cast<f64>(_val); \
  388. i64 bits = BitsCvt(val)._int; \
  389. bits = (bits >> 2) << 2; \
  390. bits |= 0b10; \
  391. return reinterpret_cast<PyObject*>(bits); \
  392. }
  393. PY_VAR_FLOAT(float)
  394. PY_VAR_FLOAT(double)
  395. inline PyObject* py_var(VM* vm, bool val){
  396. return val ? vm->True : vm->False;
  397. }
  398. template<> inline bool py_cast<bool>(VM* vm, PyObject* obj){
  399. vm->check_type(obj, vm->tp_bool);
  400. return obj == vm->True;
  401. }
  402. template<> inline bool _py_cast<bool>(VM* vm, PyObject* obj){
  403. return obj == vm->True;
  404. }
  405. inline PyObject* py_var(VM* vm, const char val[]){
  406. return VAR(Str(val));
  407. }
  408. inline PyObject* py_var(VM* vm, std::string val){
  409. return VAR(Str(std::move(val)));
  410. }
  411. inline PyObject* py_var(VM* vm, std::string_view val){
  412. return VAR(Str(val));
  413. }
  414. template<typename T>
  415. void _check_py_class(VM* vm, PyObject* obj){
  416. vm->check_type(obj, T::_type(vm));
  417. }
  418. inline PyObject* VM::num_negated(PyObject* obj){
  419. if (is_int(obj)){
  420. return VAR(-CAST(i64, obj));
  421. }else if(is_float(obj)){
  422. return VAR(-CAST(f64, obj));
  423. }
  424. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape());
  425. return nullptr;
  426. }
  427. inline f64 VM::num_to_float(PyObject* obj){
  428. if(is_float(obj)){
  429. return CAST(f64, obj);
  430. } else if (is_int(obj)){
  431. return (f64)CAST(i64, obj);
  432. }
  433. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape());
  434. return 0;
  435. }
  436. inline bool VM::asBool(PyObject* obj){
  437. if(is_type(obj, tp_bool)) return obj == True;
  438. if(obj == None) return false;
  439. if(is_type(obj, tp_int)) return CAST(i64, obj) != 0;
  440. if(is_type(obj, tp_float)) return CAST(f64, obj) != 0.0;
  441. PyObject* self;
  442. PyObject* len_f = get_unbound_method(obj, __len__, &self, false);
  443. if(self != _py_null){
  444. PyObject* ret = call(len_f, Args{self});
  445. return CAST(i64, ret) > 0;
  446. }
  447. return true;
  448. }
  449. inline i64 VM::hash(PyObject* obj){
  450. if (is_type(obj, tp_str)) return CAST(Str&, obj).hash();
  451. if (is_int(obj)) return CAST(i64, obj);
  452. if (is_type(obj, tp_tuple)) {
  453. i64 x = 1000003;
  454. const Tuple& items = CAST(Tuple&, obj);
  455. for (int i=0; i<items.size(); i++) {
  456. i64 y = hash(items[i]);
  457. // recommended by Github Copilot
  458. x = x ^ (y + 0x9e3779b9 + (x << 6) + (x >> 2));
  459. }
  460. return x;
  461. }
  462. if (is_type(obj, tp_type)) return BITS(obj);
  463. if (is_type(obj, tp_bool)) return _CAST(bool, obj) ? 1 : 0;
  464. if (is_float(obj)){
  465. f64 val = CAST(f64, obj);
  466. return (i64)std::hash<f64>()(val);
  467. }
  468. TypeError("unhashable type: " + OBJ_NAME(_t(obj)).escape());
  469. return 0;
  470. }
  471. inline PyObject* VM::asRepr(PyObject* obj){
  472. // TODO: fastcall does not take care of super() proxy!
  473. return fast_call(__repr__, Args{obj});
  474. }
  475. inline PyObject* VM::new_module(StrName name) {
  476. PyObject* obj = heap._new<DummyModule>(tp_module, DummyModule());
  477. obj->attr().set(__name__, VAR(name.sv()));
  478. // we do not allow override in order to avoid memory leak
  479. // it is because Module objects are not garbage collected
  480. if(_modules.contains(name)) FATAL_ERROR();
  481. _modules.set(name, obj);
  482. return obj;
  483. }
  484. inline Str VM::disassemble(CodeObject_ co){
  485. auto pad = [](const Str& s, const int n){
  486. if(s.length() >= n) return s.substr(0, n);
  487. return s + std::string(n - s.length(), ' ');
  488. };
  489. std::vector<int> jumpTargets;
  490. for(auto byte : co->codes){
  491. if(byte.op == OP_JUMP_ABSOLUTE || byte.op == OP_POP_JUMP_IF_FALSE){
  492. jumpTargets.push_back(byte.arg);
  493. }
  494. }
  495. std::stringstream ss;
  496. int prev_line = -1;
  497. for(int i=0; i<co->codes.size(); i++){
  498. const Bytecode& byte = co->codes[i];
  499. Str line = std::to_string(co->lines[i]);
  500. if(co->lines[i] == prev_line) line = "";
  501. else{
  502. if(prev_line != -1) ss << "\n";
  503. prev_line = co->lines[i];
  504. }
  505. std::string pointer;
  506. if(std::find(jumpTargets.begin(), jumpTargets.end(), i) != jumpTargets.end()){
  507. pointer = "-> ";
  508. }else{
  509. pointer = " ";
  510. }
  511. ss << pad(line, 8) << pointer << pad(std::to_string(i), 3);
  512. ss << " " << pad(OP_NAMES[byte.op], 20) << " ";
  513. // ss << pad(byte.arg == -1 ? "" : std::to_string(byte.arg), 5);
  514. std::string argStr = byte.arg == -1 ? "" : std::to_string(byte.arg);
  515. switch(byte.op){
  516. case OP_LOAD_CONST:
  517. argStr += fmt(" (", CAST(Str, asRepr(co->consts[byte.arg])), ")");
  518. break;
  519. case OP_LOAD_NAME: case OP_LOAD_GLOBAL: case OP_LOAD_NONLOCAL: case OP_STORE_GLOBAL:
  520. case OP_LOAD_ATTR: case OP_LOAD_METHOD: case OP_STORE_ATTR: case OP_DELETE_ATTR:
  521. case OP_IMPORT_NAME: case OP_BEGIN_CLASS:
  522. case OP_DELETE_GLOBAL:
  523. argStr += fmt(" (", co->names[byte.arg].sv(), ")");
  524. break;
  525. case OP_LOAD_FAST: case OP_STORE_FAST: case OP_DELETE_FAST:
  526. argStr += fmt(" (", co->varnames[byte.arg].sv(), ")");
  527. break;
  528. case OP_BINARY_OP:
  529. argStr += fmt(" (", BINARY_SPECIAL_METHODS[byte.arg], ")");
  530. break;
  531. case OP_LOAD_FUNCTION:
  532. argStr += fmt(" (", co->func_decls[byte.arg]->code->name, ")");
  533. break;
  534. }
  535. ss << pad(argStr, 40); // may overflow
  536. ss << co->blocks[byte.block].type;
  537. if(i != co->codes.size() - 1) ss << '\n';
  538. }
  539. // std::stringstream consts;
  540. // consts << "co_consts: ";
  541. // consts << CAST(Str&, asRepr(VAR(co->consts)));
  542. // std::stringstream names;
  543. // names << "co_names: ";
  544. // List list;
  545. // for(int i=0; i<co->names.size(); i++){
  546. // list.push_back(VAR(co->names[i].sv()));
  547. // }
  548. // names << CAST(Str, asRepr(VAR(list)));
  549. // ss << '\n' << consts.str() << '\n' << names.str();
  550. for(auto& decl: co->func_decls){
  551. ss << "\n\n" << "Disassembly of " << decl->code->name << ":\n";
  552. ss << disassemble(decl->code);
  553. }
  554. return Str(ss.str());
  555. }
  556. inline void VM::init_builtin_types(){
  557. _all_types.push_back({heap._new<Type>(Type(1), Type(0)), -1, "object"});
  558. _all_types.push_back({heap._new<Type>(Type(1), Type(1)), 0, "type"});
  559. tp_object = 0; tp_type = 1;
  560. tp_int = _new_type_object("int");
  561. tp_float = _new_type_object("float");
  562. if(tp_int.index != kTpIntIndex || tp_float.index != kTpFloatIndex) FATAL_ERROR();
  563. tp_bool = _new_type_object("bool");
  564. tp_str = _new_type_object("str");
  565. tp_list = _new_type_object("list");
  566. tp_tuple = _new_type_object("tuple");
  567. tp_slice = _new_type_object("slice");
  568. tp_range = _new_type_object("range");
  569. tp_module = _new_type_object("module");
  570. tp_star_wrapper = _new_type_object("_star_wrapper");
  571. tp_function = _new_type_object("function");
  572. tp_native_function = _new_type_object("native_function");
  573. tp_iterator = _new_type_object("iterator");
  574. tp_bound_method = _new_type_object("bound_method");
  575. tp_super = _new_type_object("super");
  576. tp_exception = _new_type_object("Exception");
  577. this->None = heap._new<Dummy>(_new_type_object("NoneType"), {});
  578. this->Ellipsis = heap._new<Dummy>(_new_type_object("ellipsis"), {});
  579. this->True = heap._new<Dummy>(tp_bool, {});
  580. this->False = heap._new<Dummy>(tp_bool, {});
  581. this->_py_null = heap._new<Dummy>(_new_type_object("_py_null"), {});
  582. this->_py_op_call = heap._new<Dummy>(_new_type_object("_py_op_call"), {});
  583. this->_py_op_yield = heap._new<Dummy>(_new_type_object("_py_op_yield"), {});
  584. this->builtins = new_module("builtins");
  585. this->_main = new_module("__main__");
  586. // setup public types
  587. builtins->attr().set("type", _t(tp_type));
  588. builtins->attr().set("object", _t(tp_object));
  589. builtins->attr().set("bool", _t(tp_bool));
  590. builtins->attr().set("int", _t(tp_int));
  591. builtins->attr().set("float", _t(tp_float));
  592. builtins->attr().set("str", _t(tp_str));
  593. builtins->attr().set("list", _t(tp_list));
  594. builtins->attr().set("tuple", _t(tp_tuple));
  595. builtins->attr().set("range", _t(tp_range));
  596. post_init();
  597. for(int i=0; i<_all_types.size(); i++){
  598. _all_types[i].obj->attr()._try_perfect_rehash();
  599. }
  600. for(auto [k, v]: _modules.items()) v->attr()._try_perfect_rehash();
  601. }
  602. inline PyObject* VM::_py_call(PyObject* callable, ArgsView args, ArgsView kwargs){
  603. // callable is a `function` object
  604. const Function& fn = CAST(Function&, callable);
  605. const CodeObject* co = fn.decl->code.get();
  606. FastLocals locals(co);
  607. int i = 0;
  608. for(int index: fn.decl->args){
  609. if(i < args.size()){
  610. locals[index] = args[i++];
  611. }else{
  612. StrName name = co->varnames[index];
  613. TypeError(fmt("missing positional argument ", name.escape()));
  614. }
  615. }
  616. // prepare kwdefaults
  617. for(auto& kv: fn.decl->kwargs) locals[kv.key] = kv.value;
  618. // handle *args
  619. if(fn.decl->starred_arg != -1){
  620. List vargs; // handle *args
  621. while(i < args.size()) vargs.push_back(args[i++]);
  622. locals[fn.decl->starred_arg] = VAR(Tuple(std::move(vargs)));
  623. }else{
  624. // kwdefaults override
  625. for(auto& kv: fn.decl->kwargs){
  626. if(i < args.size()){
  627. locals[kv.key] = args[i++];
  628. }else{
  629. break;
  630. }
  631. }
  632. if(i < args.size()) TypeError("too many arguments");
  633. }
  634. for(int i=0; i<kwargs.size(); i+=2){
  635. StrName key = CAST(int, kwargs[i]);
  636. // try_set has nullptr check
  637. // TODO: optimize this
  638. bool ok = locals.try_set(key, kwargs[i+1]);
  639. if(!ok){
  640. TypeError(fmt(key.escape(), " is an invalid keyword argument for ", co->name, "()"));
  641. }
  642. }
  643. PyObject* _module = fn._module != nullptr ? fn._module : top_frame()->_module;
  644. if(co->is_generator){
  645. return PyIter(Generator(this, Frame(co, _module, std::move(locals), fn._closure)));
  646. }
  647. _push_new_frame(co, _module, std::move(locals), fn._closure);
  648. return nullptr;
  649. }
  650. // TODO: callable/args here may be garbage collected accidentally
  651. inline PyObject* VM::call(PyObject* callable, Args args, const Args& kwargs, bool opCall){
  652. if(is_type(callable, tp_bound_method)){
  653. auto& bm = CAST(BoundMethod&, callable);
  654. callable = bm.method; // get unbound method
  655. args.extend_self(bm.obj);
  656. }
  657. if(is_type(callable, tp_native_function)){
  658. const auto& f = OBJ_GET(NativeFunc, callable);
  659. if(kwargs.size() != 0) TypeError("native_function does not accept keyword arguments");
  660. return f(this, args);
  661. } else if(is_type(callable, tp_function)){
  662. // ret is nullptr or a generator
  663. PyObject* ret = _py_call(callable, args, kwargs);
  664. if(ret != nullptr) return ret;
  665. if(opCall) return _py_op_call;
  666. return _run_top_frame();
  667. }
  668. if(is_type(callable, tp_type)){
  669. // TODO: use get_unbound_method here
  670. PyObject* new_f = callable->attr().try_get(__new__);
  671. PyObject* obj;
  672. if(new_f != nullptr){
  673. obj = call(new_f, std::move(args), kwargs, false);
  674. }else{
  675. obj = heap.gcnew<DummyInstance>(OBJ_GET(Type, callable), {});
  676. PyObject* self;
  677. PyObject* init_f = get_unbound_method(obj, __init__, &self, false);
  678. args.extend_self(self);
  679. if (self != _py_null) call(init_f, std::move(args), kwargs, false);
  680. }
  681. return obj;
  682. }
  683. PyObject* self;
  684. PyObject* call_f = get_unbound_method(callable, __call__, &self, false);
  685. if(self != _py_null){
  686. args.extend_self(self);
  687. return call(call_f, std::move(args), kwargs, false);
  688. }
  689. TypeError(OBJ_NAME(_t(callable)).escape() + " object is not callable");
  690. return None;
  691. }
  692. inline void VM::unpack_args(Args& args){
  693. List unpacked;
  694. for(int i=0; i<args.size(); i++){
  695. if(is_type(args[i], tp_star_wrapper)){
  696. auto& star = _CAST(StarWrapper&, args[i]);
  697. List& list = CAST(List&, asList(star.obj));
  698. unpacked.extend(list);
  699. }else{
  700. unpacked.push_back(args[i]);
  701. }
  702. }
  703. args = Args(std::move(unpacked));
  704. }
  705. // https://docs.python.org/3/howto/descriptor.html#invocation-from-an-instance
  706. inline PyObject* VM::getattr(PyObject* obj, StrName name, bool throw_err){
  707. PyObject* objtype = _t(obj);
  708. // handle super() proxy
  709. if(is_type(obj, tp_super)){
  710. const Super& super = OBJ_GET(Super, obj);
  711. obj = super.first;
  712. objtype = _t(super.second);
  713. }
  714. PyObject* cls_var = find_name_in_mro(objtype, name);
  715. if(cls_var != nullptr){
  716. // handle descriptor
  717. PyObject* descr_get = _t(cls_var)->attr().try_get(__get__);
  718. if(descr_get != nullptr) return call(descr_get, Args{cls_var, obj});
  719. }
  720. // handle instance __dict__
  721. if(!is_tagged(obj) && obj->is_attr_valid()){
  722. PyObject* val = obj->attr().try_get(name);
  723. if(val != nullptr) return val;
  724. }
  725. if(cls_var != nullptr){
  726. // bound method is non-data descriptor
  727. if(is_type(cls_var, tp_function) || is_type(cls_var, tp_native_function)){
  728. return VAR(BoundMethod(obj, cls_var));
  729. }
  730. return cls_var;
  731. }
  732. if(throw_err) AttributeError(obj, name);
  733. return nullptr;
  734. }
  735. // used by OP_LOAD_METHOD
  736. // try to load a unbound method (fallback to `getattr` if not found)
  737. inline PyObject* VM::get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err, bool fallback){
  738. *self = _py_null;
  739. PyObject* objtype = _t(obj);
  740. // handle super() proxy
  741. if(is_type(obj, tp_super)){
  742. const Super& super = OBJ_GET(Super, obj);
  743. obj = super.first;
  744. objtype = _t(super.second);
  745. }
  746. PyObject* cls_var = find_name_in_mro(objtype, name);
  747. if(fallback){
  748. if(cls_var != nullptr){
  749. // handle descriptor
  750. PyObject* descr_get = _t(cls_var)->attr().try_get(__get__);
  751. if(descr_get != nullptr) return call(descr_get, Args{cls_var, obj});
  752. }
  753. // handle instance __dict__
  754. if(!is_tagged(obj) && obj->is_attr_valid()){
  755. PyObject* val = obj->attr().try_get(name);
  756. if(val != nullptr) return val;
  757. }
  758. }
  759. if(cls_var != nullptr){
  760. if(is_type(cls_var, tp_function) || is_type(cls_var, tp_native_function)){
  761. *self = obj;
  762. }
  763. return cls_var;
  764. }
  765. if(throw_err) AttributeError(obj, name);
  766. return nullptr;
  767. }
  768. template<typename T>
  769. inline void VM::setattr(PyObject* obj, StrName name, T&& value){
  770. static_assert(std::is_same_v<std::decay_t<T>, PyObject*>);
  771. PyObject* objtype = _t(obj);
  772. // handle super() proxy
  773. if(is_type(obj, tp_super)){
  774. Super& super = OBJ_GET(Super, obj);
  775. obj = super.first;
  776. objtype = _t(super.second);
  777. }
  778. PyObject* cls_var = find_name_in_mro(objtype, name);
  779. if(cls_var != nullptr){
  780. // handle descriptor
  781. PyObject* cls_var_t = _t(cls_var);
  782. if(cls_var_t->attr().contains(__get__)){
  783. PyObject* descr_set = cls_var_t->attr().try_get(__set__);
  784. if(descr_set != nullptr){
  785. call(descr_set, Args{cls_var, obj, std::forward<T>(value)});
  786. }else{
  787. TypeError(fmt("readonly attribute: ", name.escape()));
  788. }
  789. return;
  790. }
  791. }
  792. // handle instance __dict__
  793. if(is_tagged(obj) || !obj->is_attr_valid()) TypeError("cannot set attribute");
  794. obj->attr().set(name, std::forward<T>(value));
  795. }
  796. template<int ARGC>
  797. void VM::bind_method(PyObject* obj, Str name, NativeFuncRaw fn) {
  798. check_type(obj, tp_type);
  799. obj->attr().set(name, VAR(NativeFunc(fn, ARGC, true)));
  800. }
  801. template<int ARGC>
  802. void VM::bind_func(PyObject* obj, Str name, NativeFuncRaw fn) {
  803. obj->attr().set(name, VAR(NativeFunc(fn, ARGC, false)));
  804. }
  805. inline void VM::_error(Exception e){
  806. if(callstack.empty()){
  807. e.is_re = false;
  808. throw e;
  809. }
  810. top_frame()->push(VAR(e));
  811. _raise();
  812. }
  813. inline void ManagedHeap::mark() {
  814. for(PyObject* obj: _no_gc) OBJ_MARK(obj);
  815. for(auto& frame : vm->callstack.data()) frame._gc_mark();
  816. }
  817. inline Str obj_type_name(VM *vm, Type type){
  818. return vm->_all_types[type].name;
  819. }
  820. } // namespace pkpy