vm.h 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914
  1. #pragma once
  2. #include "common.h"
  3. #include "frame.h"
  4. #include "error.h"
  5. #include "gc.h"
  6. namespace pkpy{
  7. Str _read_file_cwd(const Str& name, bool* ok);
  8. #define DEF_NATIVE_2(ctype, ptype) \
  9. template<> inline ctype py_cast<ctype>(VM* vm, PyObject* obj) { \
  10. vm->check_type(obj, vm->ptype); \
  11. return OBJ_GET(ctype, obj); \
  12. } \
  13. template<> inline ctype _py_cast<ctype>(VM* vm, PyObject* obj) { \
  14. return OBJ_GET(ctype, obj); \
  15. } \
  16. template<> inline ctype& py_cast<ctype&>(VM* vm, PyObject* obj) { \
  17. vm->check_type(obj, vm->ptype); \
  18. return OBJ_GET(ctype, obj); \
  19. } \
  20. template<> inline ctype& _py_cast<ctype&>(VM* vm, PyObject* obj) { \
  21. return OBJ_GET(ctype, obj); \
  22. } \
  23. inline PyObject* py_var(VM* vm, const ctype& value) { return vm->heap.gcnew(vm->ptype, value);} \
  24. inline PyObject* py_var(VM* vm, ctype&& value) { return vm->heap.gcnew(vm->ptype, std::move(value));}
  25. class Generator: public BaseIter {
  26. std::unique_ptr<Frame> frame;
  27. int state; // 0,1,2
  28. public:
  29. Generator(VM* vm, std::unique_ptr<Frame>&& frame)
  30. : BaseIter(vm, nullptr), frame(std::move(frame)), state(0) {}
  31. PyObject* next() override;
  32. void _mark() override;
  33. };
  34. struct PyTypeInfo{
  35. PyObject* obj;
  36. Type base;
  37. Str name;
  38. };
  39. class VM {
  40. VM* vm; // self reference for simplify code
  41. public:
  42. ManagedHeap heap;
  43. stack< std::unique_ptr<Frame> > callstack;
  44. std::vector<PyTypeInfo> _all_types;
  45. PyObject* run_frame(Frame* frame);
  46. NameDict _modules; // loaded modules
  47. std::map<StrName, Str> _lazy_modules; // lazy loaded modules
  48. PyObject* _py_op_call;
  49. PyObject* _py_op_yield;
  50. PyObject* None;
  51. PyObject* True;
  52. PyObject* False;
  53. PyObject* Ellipsis;
  54. PyObject* builtins; // builtins module
  55. PyObject* _main; // __main__ module
  56. bool use_stdio;
  57. std::ostream* _stdout;
  58. std::ostream* _stderr;
  59. int recursionlimit = 1000;
  60. // for quick access
  61. Type tp_object, tp_type, tp_int, tp_float, tp_bool, tp_str;
  62. Type tp_list, tp_tuple;
  63. Type tp_function, tp_native_function, tp_iterator, tp_bound_method;
  64. Type tp_slice, tp_range, tp_module, tp_ref;
  65. Type tp_super, tp_exception, tp_star_wrapper;
  66. VM(bool use_stdio){
  67. this->vm = this;
  68. this->use_stdio = use_stdio;
  69. if(use_stdio){
  70. this->_stdout = &std::cout;
  71. this->_stderr = &std::cerr;
  72. }else{
  73. this->_stdout = new StrStream();
  74. this->_stderr = new StrStream();
  75. }
  76. init_builtin_types();
  77. }
  78. Frame* top_frame() const {
  79. #if DEBUG_EXTRA_CHECK
  80. if(callstack.empty()) UNREACHABLE();
  81. #endif
  82. return callstack.top().get();
  83. }
  84. PyObject* asStr(PyObject* obj){
  85. PyObject* f = getattr(obj, __str__, false, true);
  86. if(f != nullptr) return call(f, no_arg());
  87. return asRepr(obj);
  88. }
  89. PyObject* asIter(PyObject* obj){
  90. if(is_type(obj, tp_iterator)) return obj;
  91. PyObject* iter_f = getattr(obj, __iter__, false, true);
  92. if(iter_f != nullptr) return call(iter_f, no_arg());
  93. TypeError(OBJ_NAME(_t(obj)).escape(true) + " object is not iterable");
  94. return nullptr;
  95. }
  96. PyObject* asList(PyObject* iterable){
  97. if(is_type(iterable, tp_list)) return iterable;
  98. return call(_t(tp_list), Args{iterable});
  99. }
  100. PyObject* find_name_in_mro(PyObject* cls, StrName name){
  101. PyObject* val;
  102. do{
  103. val = cls->attr().try_get(name);
  104. if(val != nullptr) return val;
  105. Type cls_t = OBJ_GET(Type, cls);
  106. Type base = _all_types[cls_t].base;
  107. if(base.index == -1) break;
  108. cls = _all_types[base].obj;
  109. }while(true);
  110. return nullptr;
  111. }
  112. bool isinstance(PyObject* obj, Type cls_t){
  113. Type obj_t = OBJ_GET(Type, _t(obj));
  114. do{
  115. if(obj_t == cls_t) return true;
  116. Type base = _all_types[obj_t].base;
  117. if(base.index == -1) break;
  118. obj_t = base;
  119. }while(true);
  120. return false;
  121. }
  122. PyObject* fast_call(StrName name, Args&& args){
  123. PyObject* val = find_name_in_mro(_t(args[0]), name);
  124. if(val != nullptr) return call(val, std::move(args));
  125. AttributeError(args[0], name);
  126. return nullptr;
  127. }
  128. template<typename ArgT>
  129. std::enable_if_t<std::is_same_v<std::decay_t<ArgT>, Args>, PyObject*>
  130. call(PyObject* callable, ArgT&& args){
  131. return call(callable, std::forward<ArgT>(args), no_arg(), false);
  132. }
  133. template<typename ArgT>
  134. std::enable_if_t<std::is_same_v<std::decay_t<ArgT>, Args>, PyObject*>
  135. call(PyObject* obj, const StrName name, ArgT&& args){
  136. PyObject* callable = getattr(obj, name, true, true);
  137. return call(callable, std::forward<ArgT>(args), no_arg(), false);
  138. }
  139. PyObject* exec(Str source, Str filename, CompileMode mode, PyObject* _module=nullptr){
  140. if(_module == nullptr) _module = _main;
  141. try {
  142. CodeObject_ code = compile(source, filename, mode);
  143. // if(_module == _main) std::cout << disassemble(code) << '\n';
  144. return _exec(code, _module);
  145. }catch (const Exception& e){
  146. *_stderr << e.summary() << '\n';
  147. }catch (const std::exception& e) {
  148. *_stderr << "An std::exception occurred! It could be a bug.\n";
  149. *_stderr << e.what() << '\n';
  150. }
  151. callstack = {};
  152. return nullptr;
  153. }
  154. template<typename ...Args>
  155. std::unique_ptr<Frame> _new_frame(Args&&... args){
  156. if(callstack.size() > recursionlimit){
  157. _error("RecursionError", "maximum recursion depth exceeded");
  158. }
  159. return std::make_unique<Frame>(std::forward<Args>(args)...);
  160. }
  161. template<typename ...Args>
  162. PyObject* _exec(Args&&... args){
  163. callstack.push(_new_frame(std::forward<Args>(args)...));
  164. return _exec();
  165. }
  166. PyObject* property(NativeFuncRaw fget){
  167. PyObject* p = builtins->attr("property");
  168. PyObject* method = heap.gcnew(tp_native_function, NativeFunc(fget, 1, false));
  169. return call(p, Args{method});
  170. }
  171. PyObject* new_type_object(PyObject* mod, StrName name, Type base){
  172. PyObject* obj = heap._new<Type>(tp_type, _all_types.size());
  173. PyTypeInfo info{
  174. .obj = obj,
  175. .base = base,
  176. .name = (mod!=nullptr && mod!=builtins) ? Str(OBJ_NAME(mod)+"."+name.str()): name.str()
  177. };
  178. if(mod != nullptr) mod->attr().set(name, obj);
  179. _all_types.push_back(info);
  180. return obj;
  181. }
  182. Type _new_type_object(StrName name, Type base=0) {
  183. PyObject* obj = new_type_object(nullptr, name, base);
  184. return OBJ_GET(Type, obj);
  185. }
  186. PyObject* _find_type(const Str& type){
  187. PyObject* obj = builtins->attr().try_get(type);
  188. if(obj == nullptr){
  189. for(auto& t: _all_types) if(t.name == type) return t.obj;
  190. throw std::runtime_error("type not found: " + type);
  191. }
  192. return obj;
  193. }
  194. template<int ARGC>
  195. void bind_func(Str type, Str name, NativeFuncRaw fn) {
  196. bind_func<ARGC>(_find_type(type), name, fn);
  197. }
  198. template<int ARGC>
  199. void bind_method(Str type, Str name, NativeFuncRaw fn) {
  200. bind_method<ARGC>(_find_type(type), name, fn);
  201. }
  202. template<int ARGC, typename... Args>
  203. void bind_static_method(Args&&... args) {
  204. bind_func<ARGC>(std::forward<Args>(args)...);
  205. }
  206. template<int ARGC>
  207. void _bind_methods(std::vector<Str> types, Str name, NativeFuncRaw fn) {
  208. for(auto& type: types) bind_method<ARGC>(type, name, fn);
  209. }
  210. template<int ARGC>
  211. void bind_builtin_func(Str name, NativeFuncRaw fn) {
  212. bind_func<ARGC>(builtins, name, fn);
  213. }
  214. int normalized_index(int index, int size){
  215. if(index < 0) index += size;
  216. if(index < 0 || index >= size){
  217. IndexError(std::to_string(index) + " not in [0, " + std::to_string(size) + ")");
  218. }
  219. return index;
  220. }
  221. template<typename P>
  222. PyObject* PyIter(P&& value) {
  223. static_assert(std::is_base_of_v<BaseIter, std::decay_t<P>>);
  224. return heap.gcnew<P>(tp_iterator, std::forward<P>(value));
  225. }
  226. BaseIter* PyIter_AS_C(PyObject* obj)
  227. {
  228. check_type(obj, tp_iterator);
  229. return static_cast<BaseIter*>(obj->value());
  230. }
  231. /***** Error Reporter *****/
  232. void _error(StrName name, const Str& msg){
  233. _error(Exception(name, msg));
  234. }
  235. void _raise(){
  236. bool ok = top_frame()->jump_to_exception_handler();
  237. if(ok) throw HandledException();
  238. else throw UnhandledException();
  239. }
  240. void IOError(const Str& msg) { _error("IOError", msg); }
  241. void NotImplementedError(){ _error("NotImplementedError", ""); }
  242. void TypeError(const Str& msg){ _error("TypeError", msg); }
  243. void ZeroDivisionError(){ _error("ZeroDivisionError", "division by zero"); }
  244. void IndexError(const Str& msg){ _error("IndexError", msg); }
  245. void ValueError(const Str& msg){ _error("ValueError", msg); }
  246. void NameError(StrName name){ _error("NameError", "name " + name.str().escape(true) + " is not defined"); }
  247. void AttributeError(PyObject* obj, StrName name){
  248. _error("AttributeError", "type " + OBJ_NAME(_t(obj)).escape(true) + " has no attribute " + name.str().escape(true));
  249. }
  250. void AttributeError(Str msg){ _error("AttributeError", msg); }
  251. void check_type(PyObject* obj, Type type){
  252. if(is_type(obj, type)) return;
  253. TypeError("expected " + OBJ_NAME(_t(type)).escape(true) + ", but got " + OBJ_NAME(_t(obj)).escape(true));
  254. }
  255. PyObject* _t(Type t){
  256. return _all_types[t.index].obj;
  257. }
  258. PyObject* _t(PyObject* obj){
  259. if(is_int(obj)) return _t(tp_int);
  260. if(is_float(obj)) return _t(tp_float);
  261. return _all_types[OBJ_GET(Type, _t(obj->type)).index].obj;
  262. }
  263. ~VM() {
  264. heap.collect(this);
  265. if(!use_stdio){
  266. delete _stdout;
  267. delete _stderr;
  268. }
  269. }
  270. CodeObject_ compile(Str source, Str filename, CompileMode mode);
  271. PyObject* num_negated(PyObject* obj);
  272. f64 num_to_float(PyObject* obj);
  273. bool asBool(PyObject* obj);
  274. i64 hash(PyObject* obj);
  275. PyObject* asRepr(PyObject* obj);
  276. PyObject* new_module(StrName name);
  277. Str disassemble(CodeObject_ co);
  278. void init_builtin_types();
  279. PyObject* call(PyObject* callable, Args args, const Args& kwargs, bool opCall);
  280. void unpack_args(Args& args);
  281. PyObject* getattr(PyObject* obj, StrName name, bool throw_err=true, bool class_only=false);
  282. template<typename T>
  283. void setattr(PyObject* obj, StrName name, T&& value);
  284. template<int ARGC>
  285. void bind_method(PyObject*, Str, NativeFuncRaw);
  286. template<int ARGC>
  287. void bind_func(PyObject*, Str, NativeFuncRaw);
  288. void _error(Exception);
  289. PyObject* _exec();
  290. template<typename P> PyObject* PyRef(P&&);
  291. const BaseRef* PyRef_AS_C(PyObject* obj);
  292. void post_init();
  293. };
  294. inline PyObject* NativeFunc::operator()(VM* vm, Args& args) const{
  295. int args_size = args.size() - (int)method; // remove self
  296. if(argc != -1 && args_size != argc) {
  297. vm->TypeError("expected " + std::to_string(argc) + " arguments, but got " + std::to_string(args_size));
  298. }
  299. return f(vm, args);
  300. }
  301. inline void CodeObject::optimize(VM* vm){
  302. std::vector<StrName> keys;
  303. for(auto& p: names) if(p.second == NAME_LOCAL) keys.push_back(p.first);
  304. uint32_t base_n = (uint32_t)(keys.size() / kLocalsLoadFactor + 0.5);
  305. perfect_locals_capacity = find_next_capacity(base_n);
  306. perfect_hash_seed = find_perfect_hash_seed(perfect_locals_capacity, keys);
  307. for(int i=1; i<codes.size(); i++){
  308. if(codes[i].op == OP_UNARY_NEGATIVE && codes[i-1].op == OP_LOAD_CONST){
  309. codes[i].op = OP_NO_OP;
  310. int pos = codes[i-1].arg;
  311. consts[pos] = vm->num_negated(consts[pos]);
  312. }
  313. }
  314. // pre-compute sn in co_consts
  315. for(int i=0; i<consts.size(); i++){
  316. if(is_type(consts[i], vm->tp_str)){
  317. Str& s = OBJ_GET(Str, consts[i]);
  318. s._cached_sn_index = StrName::get(s.c_str()).index;
  319. }
  320. }
  321. }
  322. DEF_NATIVE_2(Str, tp_str)
  323. DEF_NATIVE_2(List, tp_list)
  324. DEF_NATIVE_2(Tuple, tp_tuple)
  325. DEF_NATIVE_2(Function, tp_function)
  326. DEF_NATIVE_2(NativeFunc, tp_native_function)
  327. DEF_NATIVE_2(BoundMethod, tp_bound_method)
  328. DEF_NATIVE_2(Range, tp_range)
  329. DEF_NATIVE_2(Slice, tp_slice)
  330. DEF_NATIVE_2(Exception, tp_exception)
  331. DEF_NATIVE_2(StarWrapper, tp_star_wrapper)
  332. #define PY_CAST_INT(T) \
  333. template<> inline T py_cast<T>(VM* vm, PyObject* obj){ \
  334. vm->check_type(obj, vm->tp_int); \
  335. return (T)(BITS(obj) >> 2); \
  336. } \
  337. template<> inline T _py_cast<T>(VM* vm, PyObject* obj){ \
  338. return (T)(BITS(obj) >> 2); \
  339. }
  340. PY_CAST_INT(char)
  341. PY_CAST_INT(short)
  342. PY_CAST_INT(int)
  343. PY_CAST_INT(long)
  344. PY_CAST_INT(long long)
  345. PY_CAST_INT(unsigned char)
  346. PY_CAST_INT(unsigned short)
  347. PY_CAST_INT(unsigned int)
  348. PY_CAST_INT(unsigned long)
  349. PY_CAST_INT(unsigned long long)
  350. template<> inline float py_cast<float>(VM* vm, PyObject* obj){
  351. vm->check_type(obj, vm->tp_float);
  352. i64 bits = BITS(obj);
  353. bits = (bits >> 2) << 2;
  354. return BitsCvt(bits)._float;
  355. }
  356. template<> inline float _py_cast<float>(VM* vm, PyObject* obj){
  357. i64 bits = BITS(obj);
  358. bits = (bits >> 2) << 2;
  359. return BitsCvt(bits)._float;
  360. }
  361. template<> inline double py_cast<double>(VM* vm, PyObject* obj){
  362. vm->check_type(obj, vm->tp_float);
  363. i64 bits = BITS(obj);
  364. bits = (bits >> 2) << 2;
  365. return BitsCvt(bits)._float;
  366. }
  367. template<> inline double _py_cast<double>(VM* vm, PyObject* obj){
  368. i64 bits = BITS(obj);
  369. bits = (bits >> 2) << 2;
  370. return BitsCvt(bits)._float;
  371. }
  372. #define PY_VAR_INT(T) \
  373. inline PyObject* py_var(VM* vm, T _val){ \
  374. i64 val = static_cast<i64>(_val); \
  375. if(((val << 2) >> 2) != val){ \
  376. vm->_error("OverflowError", std::to_string(val) + " is out of range"); \
  377. } \
  378. val = (val << 2) | 0b01; \
  379. return reinterpret_cast<PyObject*>(val); \
  380. }
  381. PY_VAR_INT(char)
  382. PY_VAR_INT(short)
  383. PY_VAR_INT(int)
  384. PY_VAR_INT(long)
  385. PY_VAR_INT(long long)
  386. PY_VAR_INT(unsigned char)
  387. PY_VAR_INT(unsigned short)
  388. PY_VAR_INT(unsigned int)
  389. PY_VAR_INT(unsigned long)
  390. PY_VAR_INT(unsigned long long)
  391. #define PY_VAR_FLOAT(T) \
  392. inline PyObject* py_var(VM* vm, T _val){ \
  393. f64 val = static_cast<f64>(_val); \
  394. i64 bits = BitsCvt(val)._int; \
  395. bits = (bits >> 2) << 2; \
  396. bits |= 0b10; \
  397. return reinterpret_cast<PyObject*>(bits); \
  398. }
  399. PY_VAR_FLOAT(float)
  400. PY_VAR_FLOAT(double)
  401. inline PyObject* py_var(VM* vm, bool val){
  402. return val ? vm->True : vm->False;
  403. }
  404. template<> inline bool py_cast<bool>(VM* vm, PyObject* obj){
  405. vm->check_type(obj, vm->tp_bool);
  406. return obj == vm->True;
  407. }
  408. template<> inline bool _py_cast<bool>(VM* vm, PyObject* obj){
  409. return obj == vm->True;
  410. }
  411. inline PyObject* py_var(VM* vm, const char val[]){
  412. return VAR(Str(val));
  413. }
  414. inline PyObject* py_var(VM* vm, std::string val){
  415. return VAR(Str(std::move(val)));
  416. }
  417. template<typename T>
  418. void _check_py_class(VM* vm, PyObject* obj){
  419. vm->check_type(obj, T::_type(vm));
  420. }
  421. inline PyObject* VM::num_negated(PyObject* obj){
  422. if (is_int(obj)){
  423. return VAR(-CAST(i64, obj));
  424. }else if(is_float(obj)){
  425. return VAR(-CAST(f64, obj));
  426. }
  427. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape(true));
  428. return nullptr;
  429. }
  430. inline f64 VM::num_to_float(PyObject* obj){
  431. if(is_float(obj)){
  432. return CAST(f64, obj);
  433. } else if (is_int(obj)){
  434. return (f64)CAST(i64, obj);
  435. }
  436. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape(true));
  437. return 0;
  438. }
  439. inline bool VM::asBool(PyObject* obj){
  440. if(is_type(obj, tp_bool)) return obj == True;
  441. if(obj == None) return false;
  442. if(is_type(obj, tp_int)) return CAST(i64, obj) != 0;
  443. if(is_type(obj, tp_float)) return CAST(f64, obj) != 0.0;
  444. PyObject* len_f = getattr(obj, __len__, false, true);
  445. if(len_f != nullptr){
  446. PyObject* ret = call(len_f, no_arg());
  447. return CAST(i64, ret) > 0;
  448. }
  449. return true;
  450. }
  451. inline i64 VM::hash(PyObject* obj){
  452. if (is_type(obj, tp_str)) return CAST(Str&, obj).hash();
  453. if (is_int(obj)) return CAST(i64, obj);
  454. if (is_type(obj, tp_tuple)) {
  455. i64 x = 1000003;
  456. const Tuple& items = CAST(Tuple&, obj);
  457. for (int i=0; i<items.size(); i++) {
  458. i64 y = hash(items[i]);
  459. // recommended by Github Copilot
  460. x = x ^ (y + 0x9e3779b9 + (x << 6) + (x >> 2));
  461. }
  462. return x;
  463. }
  464. if (is_type(obj, tp_type)) return BITS(obj);
  465. if (is_type(obj, tp_bool)) return _CAST(bool, obj) ? 1 : 0;
  466. if (is_float(obj)){
  467. f64 val = CAST(f64, obj);
  468. return (i64)std::hash<f64>()(val);
  469. }
  470. TypeError("unhashable type: " + OBJ_NAME(_t(obj)).escape(true));
  471. return 0;
  472. }
  473. inline PyObject* VM::asRepr(PyObject* obj){
  474. return call(obj, __repr__, no_arg());
  475. }
  476. inline PyObject* VM::new_module(StrName name) {
  477. PyObject* obj = heap._new<DummyModule>(tp_module, DummyModule());
  478. obj->attr().set(__name__, VAR(name.str()));
  479. // we do not allow override in order to avoid memory leak
  480. // it is because Module objects are not garbage collected
  481. if(_modules.contains(name)) UNREACHABLE();
  482. _modules.set(name, obj);
  483. return obj;
  484. }
  485. inline Str VM::disassemble(CodeObject_ co){
  486. auto pad = [](const Str& s, const int n){
  487. if(s.size() >= n) return s.substr(0, n);
  488. return s + std::string(n - s.size(), ' ');
  489. };
  490. std::vector<int> jumpTargets;
  491. for(auto byte : co->codes){
  492. if(byte.op == OP_JUMP_ABSOLUTE || byte.op == OP_SAFE_JUMP_ABSOLUTE || byte.op == OP_POP_JUMP_IF_FALSE){
  493. jumpTargets.push_back(byte.arg);
  494. }
  495. }
  496. StrStream ss;
  497. ss << std::string(54, '-') << '\n';
  498. ss << co->name << ":\n";
  499. int prev_line = -1;
  500. for(int i=0; i<co->codes.size(); i++){
  501. const Bytecode& byte = co->codes[i];
  502. if(byte.op == OP_NO_OP) continue;
  503. Str line = std::to_string(byte.line);
  504. if(byte.line == prev_line) line = "";
  505. else{
  506. if(prev_line != -1) ss << "\n";
  507. prev_line = byte.line;
  508. }
  509. std::string pointer;
  510. if(std::find(jumpTargets.begin(), jumpTargets.end(), i) != jumpTargets.end()){
  511. pointer = "-> ";
  512. }else{
  513. pointer = " ";
  514. }
  515. ss << pad(line, 8) << pointer << pad(std::to_string(i), 3);
  516. ss << " " << pad(OP_NAMES[byte.op], 20) << " ";
  517. // ss << pad(byte.arg == -1 ? "" : std::to_string(byte.arg), 5);
  518. std::string argStr = byte.arg == -1 ? "" : std::to_string(byte.arg);
  519. if(byte.op == OP_LOAD_CONST){
  520. argStr += " (" + CAST(Str, asRepr(co->consts[byte.arg])) + ")";
  521. }
  522. if(byte.op == OP_LOAD_NAME_REF || byte.op == OP_LOAD_NAME || byte.op == OP_RAISE || byte.op == OP_STORE_NAME){
  523. argStr += " (" + co->names[byte.arg].first.str().escape(true) + ")";
  524. }
  525. ss << argStr;
  526. // ss << pad(argStr, 20); // may overflow
  527. // ss << co->blocks[byte.block].to_string();
  528. if(i != co->codes.size() - 1) ss << '\n';
  529. }
  530. StrStream consts;
  531. consts << "co_consts: ";
  532. consts << CAST(Str, asRepr(VAR(co->consts)));
  533. StrStream names;
  534. names << "co_names: ";
  535. List list;
  536. for(int i=0; i<co->names.size(); i++){
  537. list.push_back(VAR(co->names[i].first.str()));
  538. }
  539. names << CAST(Str, asRepr(VAR(list)));
  540. ss << '\n' << consts.str() << '\n' << names.str() << '\n';
  541. for(int i=0; i<co->consts.size(); i++){
  542. PyObject* obj = co->consts[i];
  543. if(is_type(obj, tp_function)){
  544. const auto& f = CAST(Function&, obj);
  545. ss << disassemble(f.code);
  546. }
  547. }
  548. return Str(ss.str());
  549. }
  550. inline void VM::init_builtin_types(){
  551. _all_types.push_back({.obj = heap._new<Type>(Type(1), Type(0)), .base = -1, .name = "object"});
  552. _all_types.push_back({.obj = heap._new<Type>(Type(1), Type(1)), .base = 0, .name = "type"});
  553. tp_object = 0; tp_type = 1;
  554. tp_int = _new_type_object("int");
  555. tp_float = _new_type_object("float");
  556. if(tp_int.index != kTpIntIndex || tp_float.index != kTpFloatIndex) UNREACHABLE();
  557. tp_bool = _new_type_object("bool");
  558. tp_str = _new_type_object("str");
  559. tp_list = _new_type_object("list");
  560. tp_tuple = _new_type_object("tuple");
  561. tp_slice = _new_type_object("slice");
  562. tp_range = _new_type_object("range");
  563. tp_module = _new_type_object("module");
  564. tp_ref = _new_type_object("_ref");
  565. tp_star_wrapper = _new_type_object("_star_wrapper");
  566. tp_function = _new_type_object("function");
  567. tp_native_function = _new_type_object("native_function");
  568. tp_iterator = _new_type_object("iterator");
  569. tp_bound_method = _new_type_object("bound_method");
  570. tp_super = _new_type_object("super");
  571. tp_exception = _new_type_object("Exception");
  572. this->None = heap._new<Dummy>(_new_type_object("NoneType"), {});
  573. this->Ellipsis = heap._new<Dummy>(_new_type_object("ellipsis"), {});
  574. this->True = heap._new<Dummy>(tp_bool, {});
  575. this->False = heap._new<Dummy>(tp_bool, {});
  576. this->_py_op_call = heap._new<Dummy>(_new_type_object("_py_op_call"), {});
  577. this->_py_op_yield = heap._new<Dummy>(_new_type_object("_py_op_yield"), {});
  578. this->builtins = new_module("builtins");
  579. this->_main = new_module("__main__");
  580. // setup public types
  581. builtins->attr().set("type", _t(tp_type));
  582. builtins->attr().set("object", _t(tp_object));
  583. builtins->attr().set("bool", _t(tp_bool));
  584. builtins->attr().set("int", _t(tp_int));
  585. builtins->attr().set("float", _t(tp_float));
  586. builtins->attr().set("str", _t(tp_str));
  587. builtins->attr().set("list", _t(tp_list));
  588. builtins->attr().set("tuple", _t(tp_tuple));
  589. builtins->attr().set("range", _t(tp_range));
  590. post_init();
  591. for(int i=0; i<_all_types.size(); i++){
  592. // std::cout << i << ": " << _all_types[i].name << std::endl;
  593. _all_types[i].obj->attr()._try_perfect_rehash();
  594. }
  595. for(auto [k, v]: _modules.items()) v->attr()._try_perfect_rehash();
  596. }
  597. inline PyObject* VM::call(PyObject* callable, Args args, const Args& kwargs, bool opCall){
  598. if(is_type(callable, tp_type)){
  599. PyObject* new_f = callable->attr().try_get(__new__);
  600. PyObject* obj;
  601. if(new_f != nullptr){
  602. obj = call(new_f, std::move(args), kwargs, false);
  603. }else{
  604. obj = heap.gcnew<DummyInstance>(OBJ_GET(Type, callable), {});
  605. PyObject* init_f = getattr(obj, __init__, false, true);
  606. if (init_f != nullptr) call(init_f, std::move(args), kwargs, false);
  607. }
  608. return obj;
  609. }
  610. if(is_type(callable, tp_bound_method)){
  611. auto& bm = CAST(BoundMethod&, callable);
  612. callable = bm.method; // get unbound method
  613. args.extend_self(bm.obj);
  614. }
  615. if(is_type(callable, tp_native_function)){
  616. const auto& f = OBJ_GET(NativeFunc, callable);
  617. if(kwargs.size() != 0) TypeError("native_function does not accept keyword arguments");
  618. return f(this, args);
  619. } else if(is_type(callable, tp_function)){
  620. const Function& fn = CAST(Function&, callable);
  621. NameDict_ locals = make_sp<NameDict>(
  622. fn.code->perfect_locals_capacity,
  623. kLocalsLoadFactor,
  624. fn.code->perfect_hash_seed
  625. );
  626. int i = 0;
  627. for(StrName name : fn.args){
  628. if(i < args.size()){
  629. locals->set(name, args[i++]);
  630. continue;
  631. }
  632. TypeError("missing positional argument " + name.str().escape(true));
  633. }
  634. locals->update(fn.kwargs);
  635. if(!fn.starred_arg.empty()){
  636. List vargs; // handle *args
  637. while(i < args.size()) vargs.push_back(args[i++]);
  638. locals->set(fn.starred_arg, VAR(Tuple(std::move(vargs))));
  639. }else{
  640. for(StrName key : fn.kwargs_order){
  641. if(i < args.size()){
  642. locals->set(key, args[i++]);
  643. }else{
  644. break;
  645. }
  646. }
  647. if(i < args.size()) TypeError("too many arguments");
  648. }
  649. for(int i=0; i<kwargs.size(); i+=2){
  650. const Str& key = CAST(Str&, kwargs[i]);
  651. if(!fn.kwargs.contains(key)){
  652. TypeError(key.escape(true) + " is an invalid keyword argument for " + fn.name.str() + "()");
  653. }
  654. locals->set(key, kwargs[i+1]);
  655. }
  656. PyObject* _module = fn._module != nullptr ? fn._module : top_frame()->_module;
  657. auto _frame = _new_frame(fn.code, _module, locals, fn._closure);
  658. if(fn.code->is_generator) return PyIter(Generator(this, std::move(_frame)));
  659. callstack.push(std::move(_frame));
  660. if(opCall) return _py_op_call;
  661. return _exec();
  662. }
  663. PyObject* call_f = getattr(callable, __call__, false, true);
  664. if(call_f != nullptr){
  665. return call(call_f, std::move(args), kwargs, false);
  666. }
  667. TypeError(OBJ_NAME(_t(callable)).escape(true) + " object is not callable");
  668. return None;
  669. }
  670. inline void VM::unpack_args(Args& args){
  671. List unpacked;
  672. for(int i=0; i<args.size(); i++){
  673. if(is_type(args[i], tp_star_wrapper)){
  674. auto& star = _CAST(StarWrapper&, args[i]);
  675. if(!star.rvalue) UNREACHABLE();
  676. List& list = CAST(List&, asList(star.obj));
  677. unpacked.insert(unpacked.end(), list.begin(), list.end());
  678. }else{
  679. unpacked.push_back(args[i]);
  680. }
  681. }
  682. args = Args(std::move(unpacked));
  683. }
  684. // https://docs.python.org/3/howto/descriptor.html#invocation-from-an-instance
  685. inline PyObject* VM::getattr(PyObject* obj, StrName name, bool throw_err, bool class_only){
  686. PyObject* objtype = _t(obj);
  687. // handle super() proxy
  688. if(is_type(obj, tp_super)){
  689. const Super& super = OBJ_GET(Super, obj);
  690. obj = super.first;
  691. objtype = _t(super.second);
  692. }
  693. PyObject* cls_var = find_name_in_mro(objtype, name);
  694. if(cls_var != nullptr){
  695. // handle descriptor
  696. PyObject* descr_get = _t(cls_var)->attr().try_get(__get__);
  697. if(descr_get != nullptr) return call(descr_get, Args{cls_var, obj});
  698. }
  699. // handle instance __dict__
  700. if(!class_only && !is_tagged(obj) && obj->is_attr_valid()){
  701. PyObject* val = obj->attr().try_get(name);
  702. if(val != nullptr) return val;
  703. }
  704. if(cls_var != nullptr){
  705. // bound method is non-data descriptor
  706. if(is_type(cls_var, tp_function) || is_type(cls_var, tp_native_function)){
  707. return VAR(BoundMethod(obj, cls_var));
  708. }
  709. return cls_var;
  710. }
  711. if(throw_err) AttributeError(obj, name);
  712. return nullptr;
  713. }
  714. template<typename T>
  715. inline void VM::setattr(PyObject* obj, StrName name, T&& value){
  716. static_assert(std::is_same_v<std::decay_t<T>, PyObject*>);
  717. PyObject* objtype = _t(obj);
  718. // handle super() proxy
  719. if(is_type(obj, tp_super)){
  720. Super& super = OBJ_GET(Super, obj);
  721. obj = super.first;
  722. objtype = _t(super.second);
  723. }
  724. PyObject* cls_var = find_name_in_mro(objtype, name);
  725. if(cls_var != nullptr){
  726. // handle descriptor
  727. PyObject* cls_var_t = _t(cls_var);
  728. if(cls_var_t->attr().contains(__get__)){
  729. PyObject* descr_set = cls_var_t->attr().try_get(__set__);
  730. if(descr_set != nullptr){
  731. call(descr_set, Args{cls_var, obj, std::forward<T>(value)});
  732. }else{
  733. TypeError("readonly attribute: " + name.str().escape(true));
  734. }
  735. return;
  736. }
  737. }
  738. // handle instance __dict__
  739. if(is_tagged(obj) || !obj->is_attr_valid()) TypeError("cannot set attribute");
  740. obj->attr().set(name, std::forward<T>(value));
  741. }
  742. template<int ARGC>
  743. void VM::bind_method(PyObject* obj, Str name, NativeFuncRaw fn) {
  744. check_type(obj, tp_type);
  745. obj->attr().set(name, VAR(NativeFunc(fn, ARGC, true)));
  746. }
  747. template<int ARGC>
  748. void VM::bind_func(PyObject* obj, Str name, NativeFuncRaw fn) {
  749. obj->attr().set(name, VAR(NativeFunc(fn, ARGC, false)));
  750. }
  751. inline void VM::_error(Exception e){
  752. if(callstack.empty()){
  753. e.is_re = false;
  754. throw e;
  755. }
  756. top_frame()->push(VAR(e));
  757. _raise();
  758. }
  759. inline PyObject* VM::_exec(){
  760. Frame* frame = top_frame();
  761. const i64 base_id = frame->id;
  762. bool need_raise = false;
  763. while(true){
  764. if(frame->id < base_id) UNREACHABLE();
  765. try{
  766. if(need_raise){ need_raise = false; _raise(); }
  767. PyObject* ret = run_frame(frame);
  768. if(ret == _py_op_yield) return _py_op_yield;
  769. if(ret != _py_op_call){
  770. if(frame->id == base_id){ // [ frameBase<- ]
  771. callstack.pop();
  772. return ret;
  773. }else{
  774. callstack.pop();
  775. frame = callstack.top().get();
  776. frame->push(ret);
  777. }
  778. }else{
  779. frame = callstack.top().get(); // [ frameBase, newFrame<- ]
  780. }
  781. }catch(HandledException& e){
  782. continue;
  783. }catch(UnhandledException& e){
  784. PyObject* obj = frame->pop();
  785. Exception& _e = CAST(Exception&, obj);
  786. _e.st_push(frame->snapshot());
  787. callstack.pop();
  788. if(callstack.empty()) throw _e;
  789. frame = callstack.top().get();
  790. frame->push(obj);
  791. if(frame->id < base_id) throw ToBeRaisedException();
  792. need_raise = true;
  793. }catch(ToBeRaisedException& e){
  794. need_raise = true;
  795. }
  796. }
  797. }
  798. inline void ManagedHeap::mark(VM *vm) {
  799. for(PyObject* obj: _no_gc) OBJ_MARK(obj);
  800. for(auto& frame : vm->callstack.data()){
  801. frame->_mark();
  802. }
  803. }
  804. inline void ManagedHeap::_delete_hook(VM *vm, PyObject *obj){
  805. Type t = OBJ_GET(Type, vm->_t(obj));
  806. std::cout << "delete " << vm->_all_types[t].name << " at " << obj << std::endl;
  807. }
  808. } // namespace pkpy