vm.cpp 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101
  1. #include "pocketpy/vm.h"
  2. namespace pkpy{
  3. VM::VM(bool enable_os) : heap(this), enable_os(enable_os) {
  4. this->vm = this;
  5. this->_c.error = nullptr;
  6. _stdout = [](VM* vm, const char* buf, int size) {
  7. PK_UNUSED(vm);
  8. std::cout.write(buf, size);
  9. };
  10. _stderr = [](VM* vm, const char* buf, int size) {
  11. PK_UNUSED(vm);
  12. std::cerr.write(buf, size);
  13. };
  14. callstack.reserve(8);
  15. _main = nullptr;
  16. _last_exception = nullptr;
  17. _import_handler = [](const Str& name) {
  18. PK_UNUSED(name);
  19. return Bytes();
  20. };
  21. init_builtin_types();
  22. }
  23. PyObject* VM::py_str(PyObject* obj){
  24. const PyTypeInfo* ti = _inst_type_info(obj);
  25. if(ti->m__str__) return ti->m__str__(this, obj);
  26. PyObject* self;
  27. PyObject* f = get_unbound_method(obj, __str__, &self, false);
  28. if(self != PY_NULL) return call_method(self, f);
  29. return py_repr(obj);
  30. }
  31. PyObject* VM::py_repr(PyObject* obj){
  32. const PyTypeInfo* ti = _inst_type_info(obj);
  33. if(ti->m__repr__) return ti->m__repr__(this, obj);
  34. return call_method(obj, __repr__);
  35. }
  36. PyObject* VM::py_json(PyObject* obj){
  37. const PyTypeInfo* ti = _inst_type_info(obj);
  38. if(ti->m__json__) return ti->m__json__(this, obj);
  39. return call_method(obj, __json__);
  40. }
  41. PyObject* VM::py_iter(PyObject* obj){
  42. const PyTypeInfo* ti = _inst_type_info(obj);
  43. if(ti->m__iter__) return ti->m__iter__(this, obj);
  44. PyObject* self;
  45. PyObject* iter_f = get_unbound_method(obj, __iter__, &self, false);
  46. if(self != PY_NULL) return call_method(self, iter_f);
  47. TypeError(OBJ_NAME(_t(obj)).escape() + " object is not iterable");
  48. return nullptr;
  49. }
  50. FrameId VM::top_frame(){
  51. #if PK_DEBUG_EXTRA_CHECK
  52. if(callstack.empty()) FATAL_ERROR();
  53. #endif
  54. return FrameId(&callstack.data(), callstack.size()-1);
  55. }
  56. void VM::_pop_frame(){
  57. Frame* frame = &callstack.top();
  58. s_data.reset(frame->_sp_base);
  59. callstack.pop();
  60. }
  61. PyObject* VM::find_name_in_mro(PyObject* cls, StrName name){
  62. PyObject* val;
  63. do{
  64. val = cls->attr().try_get(name);
  65. if(val != nullptr) return val;
  66. Type base = _all_types[PK_OBJ_GET(Type, cls)].base;
  67. if(base.index == -1) break;
  68. cls = _all_types[base].obj;
  69. }while(true);
  70. return nullptr;
  71. }
  72. bool VM::isinstance(PyObject* obj, Type cls_t){
  73. Type obj_t = PK_OBJ_GET(Type, _t(obj));
  74. do{
  75. if(obj_t == cls_t) return true;
  76. Type base = _all_types[obj_t].base;
  77. if(base.index == -1) break;
  78. obj_t = base;
  79. }while(true);
  80. return false;
  81. }
  82. PyObject* VM::exec(Str source, Str filename, CompileMode mode, PyObject* _module){
  83. if(_module == nullptr) _module = _main;
  84. try {
  85. CodeObject_ code = compile(source, filename, mode);
  86. #if PK_DEBUG_DIS_EXEC
  87. if(_module == _main) std::cout << disassemble(code) << '\n';
  88. #endif
  89. return _exec(code, _module);
  90. }catch (const Exception& e){
  91. Str sum = e.summary() + "\n";
  92. _stderr(this, sum.data, sum.size);
  93. }
  94. #if !PK_DEBUG_FULL_EXCEPTION
  95. catch (const std::exception& e) {
  96. Str msg = "An std::exception occurred! It could be a bug.\n";
  97. msg = msg + e.what() + "\n";
  98. _stderr(this, msg.data, msg.size);
  99. }
  100. #endif
  101. callstack.clear();
  102. s_data.clear();
  103. return nullptr;
  104. }
  105. PyObject* VM::new_type_object(PyObject* mod, StrName name, Type base, bool subclass_enabled){
  106. PyObject* obj = heap._new<Type>(tp_type, _all_types.size());
  107. const PyTypeInfo& base_info = _all_types[base];
  108. if(!base_info.subclass_enabled){
  109. TypeError(fmt("type ", base_info.name.escape(), " is not `subclass_enabled`"));
  110. }
  111. PyTypeInfo info{
  112. obj,
  113. base,
  114. (mod!=nullptr && mod!=builtins) ? Str(OBJ_NAME(mod)+"."+name.sv()): name.sv(),
  115. subclass_enabled,
  116. };
  117. if(mod != nullptr) mod->attr().set(name, obj);
  118. _all_types.push_back(info);
  119. return obj;
  120. }
  121. Type VM::_new_type_object(StrName name, Type base) {
  122. PyObject* obj = new_type_object(nullptr, name, base, false);
  123. return PK_OBJ_GET(Type, obj);
  124. }
  125. PyObject* VM::_find_type_object(const Str& type){
  126. PyObject* obj = builtins->attr().try_get(type);
  127. if(obj == nullptr){
  128. for(auto& t: _all_types) if(t.name == type) return t.obj;
  129. throw std::runtime_error(fmt("type not found: ", type));
  130. }
  131. check_non_tagged_type(obj, tp_type);
  132. return obj;
  133. }
  134. Type VM::_type(const Str& type){
  135. PyObject* obj = _find_type_object(type);
  136. return PK_OBJ_GET(Type, obj);
  137. }
  138. PyTypeInfo* VM::_type_info(const Str& type){
  139. PyObject* obj = builtins->attr().try_get(type);
  140. if(obj == nullptr){
  141. for(auto& t: _all_types) if(t.name == type) return &t;
  142. FATAL_ERROR();
  143. }
  144. return &_all_types[PK_OBJ_GET(Type, obj)];
  145. }
  146. PyTypeInfo* VM::_type_info(Type type){
  147. return &_all_types[type];
  148. }
  149. const PyTypeInfo* VM::_inst_type_info(PyObject* obj){
  150. if(is_int(obj)) return &_all_types[tp_int];
  151. if(is_float(obj)) return &_all_types[tp_float];
  152. return &_all_types[obj->type];
  153. }
  154. bool VM::py_equals(PyObject* lhs, PyObject* rhs){
  155. if(lhs == rhs) return true;
  156. const PyTypeInfo* ti = _inst_type_info(lhs);
  157. PyObject* res;
  158. if(ti->m__eq__){
  159. res = ti->m__eq__(this, lhs, rhs);
  160. if(res != vm->NotImplemented) return res == vm->True;
  161. }
  162. res = call_method(lhs, __eq__, rhs);
  163. if(res != vm->NotImplemented) return res == vm->True;
  164. ti = _inst_type_info(rhs);
  165. if(ti->m__eq__){
  166. res = ti->m__eq__(this, rhs, lhs);
  167. if(res != vm->NotImplemented) return res == vm->True;
  168. }
  169. res = call_method(rhs, __eq__, lhs);
  170. if(res != vm->NotImplemented) return res == vm->True;
  171. return false;
  172. }
  173. int VM::normalized_index(int index, int size){
  174. if(index < 0) index += size;
  175. if(index < 0 || index >= size){
  176. IndexError(std::to_string(index) + " not in [0, " + std::to_string(size) + ")");
  177. }
  178. return index;
  179. }
  180. PyObject* VM::py_next(PyObject* obj){
  181. const PyTypeInfo* ti = _inst_type_info(obj);
  182. if(ti->m__next__) return ti->m__next__(this, obj);
  183. return call_method(obj, __next__);
  184. }
  185. PyObject* VM::py_import(StrName name, bool relative){
  186. Str filename;
  187. int type;
  188. if(relative){
  189. ImportContext* ctx = &_import_context;
  190. type = 2;
  191. for(auto it=ctx->pending.rbegin(); it!=ctx->pending.rend(); ++it){
  192. if(it->second == 2) continue;
  193. if(it->second == 1){
  194. filename = fmt(it->first, kPlatformSep, name, ".py");
  195. name = fmt(it->first, '.', name).c_str();
  196. break;
  197. }
  198. }
  199. if(filename.length() == 0) _error("ImportError", "relative import outside of package");
  200. }else{
  201. type = 0;
  202. filename = fmt(name, ".py");
  203. }
  204. for(auto& [k, v]: _import_context.pending){
  205. if(k == name){
  206. vm->_error("ImportError", fmt("circular import ", name.escape()));
  207. }
  208. }
  209. PyObject* ext_mod = _modules.try_get(name);
  210. if(ext_mod == nullptr){
  211. Str source;
  212. auto it = _lazy_modules.find(name);
  213. if(it == _lazy_modules.end()){
  214. Bytes b = _import_handler(filename);
  215. if(!relative && !b){
  216. filename = fmt(name, kPlatformSep, "__init__.py");
  217. b = _import_handler(filename);
  218. if(b) type = 1;
  219. }
  220. if(!b) _error("ImportError", fmt("module ", name.escape(), " not found"));
  221. source = Str(b.str());
  222. }else{
  223. source = it->second;
  224. _lazy_modules.erase(it);
  225. }
  226. auto _ = _import_context.temp(this, name, type);
  227. CodeObject_ code = compile(source, filename, EXEC_MODE);
  228. PyObject* new_mod = new_module(name);
  229. _exec(code, new_mod);
  230. new_mod->attr()._try_perfect_rehash();
  231. return new_mod;
  232. }else{
  233. return ext_mod;
  234. }
  235. }
  236. VM::~VM() {
  237. callstack.clear();
  238. s_data.clear();
  239. _all_types.clear();
  240. _modules.clear();
  241. _lazy_modules.clear();
  242. }
  243. PyObject* VM::py_negate(PyObject* obj){
  244. const PyTypeInfo* ti = _inst_type_info(obj);
  245. if(ti->m__neg__) return ti->m__neg__(this, obj);
  246. return call_method(obj, __neg__);
  247. }
  248. void VM::check_int_or_float(PyObject *obj){
  249. if(!is_tagged(obj)){
  250. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape());
  251. }
  252. }
  253. bool VM::py_bool(PyObject* obj){
  254. if(is_non_tagged_type(obj, tp_bool)) return obj == True;
  255. if(obj == None) return false;
  256. if(is_int(obj)) return _CAST(i64, obj) != 0;
  257. if(is_float(obj)) return _CAST(f64, obj) != 0.0;
  258. PyObject* self;
  259. PyObject* len_f = get_unbound_method(obj, __len__, &self, false);
  260. if(self != PY_NULL){
  261. PyObject* ret = call_method(self, len_f);
  262. return CAST(i64, ret) > 0;
  263. }
  264. return true;
  265. }
  266. PyObject* VM::py_list(PyObject* it){
  267. auto _lock = heap.gc_scope_lock();
  268. it = py_iter(it);
  269. List list;
  270. PyObject* obj = py_next(it);
  271. while(obj != StopIteration){
  272. list.push_back(obj);
  273. obj = py_next(it);
  274. }
  275. return VAR(std::move(list));
  276. }
  277. void VM::parse_int_slice(const Slice& s, int length, int& start, int& stop, int& step){
  278. auto clip = [](int value, int min, int max){
  279. if(value < min) return min;
  280. if(value > max) return max;
  281. return value;
  282. };
  283. if(s.step == None) step = 1;
  284. else step = CAST(int, s.step);
  285. if(step == 0) ValueError("slice step cannot be zero");
  286. if(step > 0){
  287. if(s.start == None){
  288. start = 0;
  289. }else{
  290. start = CAST(int, s.start);
  291. if(start < 0) start += length;
  292. start = clip(start, 0, length);
  293. }
  294. if(s.stop == None){
  295. stop = length;
  296. }else{
  297. stop = CAST(int, s.stop);
  298. if(stop < 0) stop += length;
  299. stop = clip(stop, 0, length);
  300. }
  301. }else{
  302. if(s.start == None){
  303. start = length - 1;
  304. }else{
  305. start = CAST(int, s.start);
  306. if(start < 0) start += length;
  307. start = clip(start, -1, length - 1);
  308. }
  309. if(s.stop == None){
  310. stop = -1;
  311. }else{
  312. stop = CAST(int, s.stop);
  313. if(stop < 0) stop += length;
  314. stop = clip(stop, -1, length - 1);
  315. }
  316. }
  317. }
  318. i64 VM::py_hash(PyObject* obj){
  319. // https://docs.python.org/3.10/reference/datamodel.html#object.__hash__
  320. const PyTypeInfo* ti = _inst_type_info(obj);
  321. if(ti->m__hash__) return ti->m__hash__(this, obj);
  322. PyObject* self;
  323. PyObject* f = get_unbound_method(obj, __hash__, &self, false);
  324. if(f != nullptr){
  325. PyObject* ret = call_method(self, f);
  326. return CAST(i64, ret);
  327. }
  328. // if it is trivial `object`, return PK_BITS
  329. if(ti == &_all_types[tp_object]) return PK_BITS(obj);
  330. // otherwise, we check if it has a custom __eq__ other than object.__eq__
  331. bool has_custom_eq = false;
  332. if(ti->m__eq__) has_custom_eq = true;
  333. else{
  334. f = get_unbound_method(obj, __eq__, &self, false);
  335. has_custom_eq = f != _t(tp_object)->attr(__eq__);
  336. }
  337. if(has_custom_eq){
  338. TypeError(fmt("unhashable type: ", ti->name.escape()));
  339. return 0;
  340. }else{
  341. return PK_BITS(obj);
  342. }
  343. }
  344. PyObject* VM::format(Str spec, PyObject* obj){
  345. if(spec.empty()) return py_str(obj);
  346. char type;
  347. switch(spec.end()[-1]){
  348. case 'f': case 'd': case 's':
  349. type = spec.end()[-1];
  350. spec = spec.substr(0, spec.length() - 1);
  351. break;
  352. default: type = ' '; break;
  353. }
  354. char pad_c = ' ';
  355. if(spec[0] == '0'){
  356. pad_c = '0';
  357. spec = spec.substr(1);
  358. }
  359. char align;
  360. if(spec[0] == '>'){
  361. align = '>';
  362. spec = spec.substr(1);
  363. }else if(spec[0] == '<'){
  364. align = '<';
  365. spec = spec.substr(1);
  366. }else{
  367. if(is_int(obj) || is_float(obj)) align = '>';
  368. else align = '<';
  369. }
  370. int dot = spec.index(".");
  371. int width, precision;
  372. try{
  373. if(dot >= 0){
  374. if(dot == 0){
  375. width = -1;
  376. }else{
  377. width = Number::stoi(spec.substr(0, dot).str());
  378. }
  379. precision = Number::stoi(spec.substr(dot+1).str());
  380. }else{
  381. width = Number::stoi(spec.str());
  382. precision = -1;
  383. }
  384. }catch(...){
  385. ValueError("invalid format specifer");
  386. UNREACHABLE();
  387. }
  388. if(type != 'f' && dot >= 0) ValueError("precision not allowed in the format specifier");
  389. Str ret;
  390. if(type == 'f'){
  391. f64 val = CAST(f64, obj);
  392. if(precision < 0) precision = 6;
  393. std::stringstream ss;
  394. ss << std::fixed << std::setprecision(precision) << val;
  395. ret = ss.str();
  396. }else if(type == 'd'){
  397. ret = std::to_string(CAST(i64, obj));
  398. }else if(type == 's'){
  399. ret = CAST(Str&, obj);
  400. }else{
  401. ret = CAST(Str&, py_str(obj));
  402. }
  403. if(width != -1 && width > ret.length()){
  404. int pad = width - ret.length();
  405. std::string padding(pad, pad_c);
  406. if(align == '>') ret = padding.c_str() + ret;
  407. else ret = ret + padding.c_str();
  408. }
  409. return VAR(ret);
  410. }
  411. PyObject* VM::new_module(StrName name) {
  412. PyObject* obj = heap._new<DummyModule>(tp_module);
  413. obj->attr().set("__name__", VAR(name.sv()));
  414. // we do not allow override in order to avoid memory leak
  415. // it is because Module objects are not garbage collected
  416. if(_modules.contains(name)) throw std::runtime_error("module already exists");
  417. _modules.set(name, obj);
  418. return obj;
  419. }
  420. static std::string _opcode_argstr(VM* vm, Bytecode byte, const CodeObject* co){
  421. std::string argStr = byte.arg == -1 ? "" : std::to_string(byte.arg);
  422. switch(byte.op){
  423. case OP_LOAD_CONST: case OP_FORMAT_STRING:
  424. if(vm != nullptr){
  425. argStr += fmt(" (", CAST(Str, vm->py_repr(co->consts[byte.arg])), ")");
  426. }
  427. break;
  428. case OP_LOAD_NAME: case OP_LOAD_GLOBAL: case OP_LOAD_NONLOCAL: case OP_STORE_GLOBAL:
  429. case OP_LOAD_ATTR: case OP_LOAD_METHOD: case OP_STORE_ATTR: case OP_DELETE_ATTR:
  430. case OP_IMPORT_NAME: case OP_BEGIN_CLASS: case OP_RAISE:
  431. case OP_DELETE_GLOBAL: case OP_INC_GLOBAL: case OP_DEC_GLOBAL: case OP_STORE_CLASS_ATTR:
  432. argStr += fmt(" (", StrName(byte.arg).sv(), ")");
  433. break;
  434. case OP_LOAD_FAST: case OP_STORE_FAST: case OP_DELETE_FAST: case OP_INC_FAST: case OP_DEC_FAST:
  435. argStr += fmt(" (", co->varnames[byte.arg].sv(), ")");
  436. break;
  437. case OP_LOAD_FUNCTION:
  438. argStr += fmt(" (", co->func_decls[byte.arg]->code->name, ")");
  439. break;
  440. }
  441. return argStr;
  442. }
  443. Str VM::disassemble(CodeObject_ co){
  444. auto pad = [](const Str& s, const int n){
  445. if(s.length() >= n) return s.substr(0, n);
  446. return s + std::string(n - s.length(), ' ');
  447. };
  448. std::vector<int> jumpTargets;
  449. for(auto byte : co->codes){
  450. if(byte.op == OP_JUMP_ABSOLUTE || byte.op == OP_POP_JUMP_IF_FALSE || byte.op == OP_SHORTCUT_IF_FALSE_OR_POP){
  451. jumpTargets.push_back(byte.arg);
  452. }
  453. }
  454. std::stringstream ss;
  455. int prev_line = -1;
  456. for(int i=0; i<co->codes.size(); i++){
  457. const Bytecode& byte = co->codes[i];
  458. Str line = std::to_string(co->lines[i]);
  459. if(co->lines[i] == prev_line) line = "";
  460. else{
  461. if(prev_line != -1) ss << "\n";
  462. prev_line = co->lines[i];
  463. }
  464. std::string pointer;
  465. if(std::find(jumpTargets.begin(), jumpTargets.end(), i) != jumpTargets.end()){
  466. pointer = "-> ";
  467. }else{
  468. pointer = " ";
  469. }
  470. ss << pad(line, 8) << pointer << pad(std::to_string(i), 3);
  471. ss << " " << pad(OP_NAMES[byte.op], 25) << " ";
  472. // ss << pad(byte.arg == -1 ? "" : std::to_string(byte.arg), 5);
  473. std::string argStr = _opcode_argstr(this, byte, co.get());
  474. ss << argStr;
  475. // ss << pad(argStr, 40); // may overflow
  476. // ss << co->blocks[byte.block].type;
  477. if(i != co->codes.size() - 1) ss << '\n';
  478. }
  479. for(auto& decl: co->func_decls){
  480. ss << "\n\n" << "Disassembly of " << decl->code->name << ":\n";
  481. ss << disassemble(decl->code);
  482. }
  483. ss << "\n";
  484. return Str(ss.str());
  485. }
  486. #if PK_DEBUG_CEVAL_STEP
  487. void VM::_log_s_data(const char* title) {
  488. if(_main == nullptr) return;
  489. if(callstack.empty()) return;
  490. std::stringstream ss;
  491. if(title) ss << title << " | ";
  492. std::map<PyObject**, int> sp_bases;
  493. for(Frame& f: callstack.data()){
  494. if(f._sp_base == nullptr) FATAL_ERROR();
  495. sp_bases[f._sp_base] += 1;
  496. }
  497. FrameId frame = top_frame();
  498. int line = frame->co->lines[frame->_ip];
  499. ss << frame->co->name << ":" << line << " [";
  500. for(PyObject** p=s_data.begin(); p!=s_data.end(); p++){
  501. ss << std::string(sp_bases[p], '|');
  502. if(sp_bases[p] > 0) ss << " ";
  503. PyObject* obj = *p;
  504. if(obj == nullptr) ss << "(nil)";
  505. else if(obj == PY_NULL) ss << "NULL";
  506. else if(is_int(obj)) ss << CAST(i64, obj);
  507. else if(is_float(obj)) ss << CAST(f64, obj);
  508. else if(is_type(obj, tp_str)) ss << CAST(Str, obj).escape();
  509. else if(obj == None) ss << "None";
  510. else if(obj == True) ss << "True";
  511. else if(obj == False) ss << "False";
  512. else if(is_type(obj, tp_function)){
  513. auto& f = CAST(Function&, obj);
  514. ss << f.decl->code->name << "(...)";
  515. } else if(is_type(obj, tp_type)){
  516. Type t = PK_OBJ_GET(Type, obj);
  517. ss << "<class " + _all_types[t].name.escape() + ">";
  518. } else if(is_type(obj, tp_list)){
  519. auto& t = CAST(List&, obj);
  520. ss << "list(size=" << t.size() << ")";
  521. } else if(is_type(obj, tp_tuple)){
  522. auto& t = CAST(Tuple&, obj);
  523. ss << "tuple(size=" << t.size() << ")";
  524. } else ss << "(" << obj_type_name(this, obj->type) << ")";
  525. ss << ", ";
  526. }
  527. std::string output = ss.str();
  528. if(!s_data.empty()) {
  529. output.pop_back(); output.pop_back();
  530. }
  531. output.push_back(']');
  532. Bytecode byte = frame->co->codes[frame->_ip];
  533. std::cout << output << " " << OP_NAMES[byte.op] << " " << _opcode_argstr(nullptr, byte, frame->co) << std::endl;
  534. }
  535. #endif
  536. void VM::init_builtin_types(){
  537. _all_types.push_back({heap._new<Type>(Type(1), Type(0)), -1, "object", true});
  538. _all_types.push_back({heap._new<Type>(Type(1), Type(1)), 0, "type", false});
  539. tp_object = 0; tp_type = 1;
  540. tp_int = _new_type_object("int");
  541. tp_float = _new_type_object("float");
  542. if(tp_int.index != kTpIntIndex || tp_float.index != kTpFloatIndex) FATAL_ERROR();
  543. tp_bool = _new_type_object("bool");
  544. tp_str = _new_type_object("str");
  545. tp_list = _new_type_object("list");
  546. tp_tuple = _new_type_object("tuple");
  547. tp_slice = _new_type_object("slice");
  548. tp_range = _new_type_object("range");
  549. tp_module = _new_type_object("module");
  550. tp_function = _new_type_object("function");
  551. tp_native_func = _new_type_object("native_func");
  552. tp_bound_method = _new_type_object("bound_method");
  553. tp_super = _new_type_object("super");
  554. tp_exception = _new_type_object("Exception");
  555. tp_bytes = _new_type_object("bytes");
  556. tp_mappingproxy = _new_type_object("mappingproxy");
  557. tp_dict = _new_type_object("dict");
  558. tp_property = _new_type_object("property");
  559. tp_star_wrapper = _new_type_object("_star_wrapper");
  560. this->None = heap._new<Dummy>(_new_type_object("NoneType"));
  561. this->NotImplemented = heap._new<Dummy>(_new_type_object("NotImplementedType"));
  562. this->Ellipsis = heap._new<Dummy>(_new_type_object("ellipsis"));
  563. this->True = heap._new<Dummy>(tp_bool);
  564. this->False = heap._new<Dummy>(tp_bool);
  565. this->StopIteration = heap._new<Dummy>(_new_type_object("StopIterationType"));
  566. this->builtins = new_module("builtins");
  567. // setup public types
  568. builtins->attr().set("type", _t(tp_type));
  569. builtins->attr().set("object", _t(tp_object));
  570. builtins->attr().set("bool", _t(tp_bool));
  571. builtins->attr().set("int", _t(tp_int));
  572. builtins->attr().set("float", _t(tp_float));
  573. builtins->attr().set("str", _t(tp_str));
  574. builtins->attr().set("list", _t(tp_list));
  575. builtins->attr().set("tuple", _t(tp_tuple));
  576. builtins->attr().set("range", _t(tp_range));
  577. builtins->attr().set("bytes", _t(tp_bytes));
  578. builtins->attr().set("dict", _t(tp_dict));
  579. builtins->attr().set("property", _t(tp_property));
  580. builtins->attr().set("StopIteration", StopIteration);
  581. builtins->attr().set("NotImplemented", NotImplemented);
  582. builtins->attr().set("slice", _t(tp_slice));
  583. post_init();
  584. for(int i=0; i<_all_types.size(); i++){
  585. _all_types[i].obj->attr()._try_perfect_rehash();
  586. }
  587. for(auto [k, v]: _modules.items()) v->attr()._try_perfect_rehash();
  588. this->_main = new_module("__main__");
  589. }
  590. // `heap.gc_scope_lock();` needed before calling this function
  591. void VM::_unpack_as_list(ArgsView args, List& list){
  592. for(PyObject* obj: args){
  593. if(is_non_tagged_type(obj, tp_star_wrapper)){
  594. const StarWrapper& w = _CAST(StarWrapper&, obj);
  595. // maybe this check should be done in the compile time
  596. if(w.level != 1) TypeError("expected level 1 star wrapper");
  597. PyObject* _0 = py_iter(w.obj);
  598. PyObject* _1 = py_next(_0);
  599. while(_1 != StopIteration){
  600. list.push_back(_1);
  601. _1 = py_next(_0);
  602. }
  603. }else{
  604. list.push_back(obj);
  605. }
  606. }
  607. }
  608. // `heap.gc_scope_lock();` needed before calling this function
  609. void VM::_unpack_as_dict(ArgsView args, Dict& dict){
  610. for(PyObject* obj: args){
  611. if(is_non_tagged_type(obj, tp_star_wrapper)){
  612. const StarWrapper& w = _CAST(StarWrapper&, obj);
  613. // maybe this check should be done in the compile time
  614. if(w.level != 2) TypeError("expected level 2 star wrapper");
  615. const Dict& other = CAST(Dict&, w.obj);
  616. dict.update(other);
  617. }else{
  618. const Tuple& t = CAST(Tuple&, obj);
  619. if(t.size() != 2) TypeError("expected tuple of length 2");
  620. dict.set(t[0], t[1]);
  621. }
  622. }
  623. }
  624. void VM::_prepare_py_call(PyObject** buffer, ArgsView args, ArgsView kwargs, const FuncDecl_& decl){
  625. const CodeObject* co = decl->code.get();
  626. int co_nlocals = co->varnames.size();
  627. int decl_argc = decl->args.size();
  628. if(args.size() < decl_argc){
  629. vm->TypeError(fmt(
  630. "expected ", decl_argc, " positional arguments, got ", args.size(),
  631. " (", co->name, ')'
  632. ));
  633. }
  634. int i = 0;
  635. // prepare args
  636. for(int index: decl->args) buffer[index] = args[i++];
  637. // set extra varnames to nullptr
  638. for(int j=i; j<co_nlocals; j++) buffer[j] = PY_NULL;
  639. // prepare kwdefaults
  640. for(auto& kv: decl->kwargs) buffer[kv.key] = kv.value;
  641. // handle *args
  642. if(decl->starred_arg != -1){
  643. ArgsView vargs(args.begin() + i, args.end());
  644. buffer[decl->starred_arg] = VAR(vargs.to_tuple());
  645. i += vargs.size();
  646. }else{
  647. // kwdefaults override
  648. for(auto& kv: decl->kwargs){
  649. if(i >= args.size()) break;
  650. buffer[kv.key] = args[i++];
  651. }
  652. if(i < args.size()) TypeError(fmt("too many arguments", " (", decl->code->name, ')'));
  653. }
  654. PyObject* vkwargs;
  655. if(decl->starred_kwarg != -1){
  656. vkwargs = VAR(Dict(this));
  657. buffer[decl->starred_kwarg] = vkwargs;
  658. }else{
  659. vkwargs = nullptr;
  660. }
  661. for(int j=0; j<kwargs.size(); j+=2){
  662. StrName key(CAST(int, kwargs[j]));
  663. int index = co->varnames_inv.try_get(key);
  664. if(index < 0){
  665. if(vkwargs == nullptr){
  666. TypeError(fmt(key.escape(), " is an invalid keyword argument for ", co->name, "()"));
  667. }else{
  668. Dict& dict = _CAST(Dict&, vkwargs);
  669. dict.set(VAR(key.sv()), kwargs[j+1]);
  670. }
  671. }else{
  672. buffer[index] = kwargs[j+1];
  673. }
  674. }
  675. }
  676. PyObject* VM::vectorcall(int ARGC, int KWARGC, bool op_call){
  677. PyObject** p1 = s_data._sp - KWARGC*2;
  678. PyObject** p0 = p1 - ARGC - 2;
  679. // [callable, <self>, args..., kwargs...]
  680. // ^p0 ^p1 ^_sp
  681. PyObject* callable = p1[-(ARGC + 2)];
  682. bool method_call = p1[-(ARGC + 1)] != PY_NULL;
  683. // handle boundmethod, do a patch
  684. if(is_non_tagged_type(callable, tp_bound_method)){
  685. if(method_call) FATAL_ERROR();
  686. auto& bm = CAST(BoundMethod&, callable);
  687. callable = bm.func; // get unbound method
  688. p1[-(ARGC + 2)] = bm.func;
  689. p1[-(ARGC + 1)] = bm.self;
  690. method_call = true;
  691. // [unbound, self, args..., kwargs...]
  692. }
  693. ArgsView args(p1 - ARGC - int(method_call), p1);
  694. ArgsView kwargs(p1, s_data._sp);
  695. PyObject* buffer[PK_MAX_CO_VARNAMES];
  696. if(is_non_tagged_type(callable, tp_native_func)){
  697. const auto& f = PK_OBJ_GET(NativeFunc, callable);
  698. PyObject* ret;
  699. if(f.decl != nullptr){
  700. int co_nlocals = f.decl->code->varnames.size();
  701. _prepare_py_call(buffer, args, kwargs, f.decl);
  702. // copy buffer back to stack
  703. s_data.reset(args.begin());
  704. for(int j=0; j<co_nlocals; j++) PUSH(buffer[j]);
  705. ret = f.call(vm, ArgsView(s_data._sp - co_nlocals, s_data._sp));
  706. }else{
  707. if(KWARGC != 0) TypeError("old-style native_func does not accept keyword arguments");
  708. f.check_size(this, args);
  709. ret = f.call(this, args);
  710. }
  711. s_data.reset(p0);
  712. return ret;
  713. }
  714. if(is_non_tagged_type(callable, tp_function)){
  715. /*****************_py_call*****************/
  716. // callable must be a `function` object
  717. if(s_data.is_overflow()) StackOverflowError();
  718. const Function& fn = PK_OBJ_GET(Function, callable);
  719. const FuncDecl_& decl = fn.decl;
  720. const CodeObject* co = decl->code.get();
  721. int co_nlocals = co->varnames.size();
  722. _prepare_py_call(buffer, args, kwargs, decl);
  723. if(co->is_generator){
  724. s_data.reset(p0);
  725. return _py_generator(
  726. Frame(&s_data, nullptr, co, fn._module, callable),
  727. ArgsView(buffer, buffer + co_nlocals)
  728. );
  729. }
  730. // copy buffer back to stack
  731. s_data.reset(args.begin());
  732. for(int j=0; j<co_nlocals; j++) PUSH(buffer[j]);
  733. callstack.emplace(&s_data, p0, co, fn._module, callable, FastLocals(co, args.begin()));
  734. if(op_call) return PY_OP_CALL;
  735. return _run_top_frame();
  736. /*****************_py_call*****************/
  737. }
  738. if(is_non_tagged_type(callable, tp_type)){
  739. if(method_call) FATAL_ERROR();
  740. // [type, NULL, args..., kwargs...]
  741. PyObject* new_f = find_name_in_mro(callable, __new__);
  742. PyObject* obj;
  743. #if PK_DEBUG_EXTRA_CHECK
  744. PK_ASSERT(new_f != nullptr);
  745. #endif
  746. if(new_f == cached_object__new__) {
  747. // fast path for object.__new__
  748. Type t = PK_OBJ_GET(Type, callable);
  749. obj= vm->heap.gcnew<DummyInstance>(t);
  750. }else{
  751. PUSH(new_f);
  752. PUSH(PY_NULL);
  753. PUSH(callable); // cls
  754. for(PyObject* o: args) PUSH(o);
  755. for(PyObject* o: kwargs) PUSH(o);
  756. // if obj is not an instance of callable, the behavior is undefined
  757. obj = vectorcall(ARGC+1, KWARGC);
  758. }
  759. // __init__
  760. PyObject* self;
  761. callable = get_unbound_method(obj, __init__, &self, false);
  762. if (self != PY_NULL) {
  763. // replace `NULL` with `self`
  764. p1[-(ARGC + 2)] = callable;
  765. p1[-(ARGC + 1)] = self;
  766. // [init_f, self, args..., kwargs...]
  767. vectorcall(ARGC, KWARGC);
  768. // We just discard the return value of `__init__`
  769. // in cpython it raises a TypeError if the return value is not None
  770. }else{
  771. // manually reset the stack
  772. s_data.reset(p0);
  773. }
  774. return obj;
  775. }
  776. // handle `__call__` overload
  777. PyObject* self;
  778. PyObject* call_f = get_unbound_method(callable, __call__, &self, false);
  779. if(self != PY_NULL){
  780. p1[-(ARGC + 2)] = call_f;
  781. p1[-(ARGC + 1)] = self;
  782. // [call_f, self, args..., kwargs...]
  783. return vectorcall(ARGC, KWARGC, false);
  784. }
  785. TypeError(OBJ_NAME(_t(callable)).escape() + " object is not callable");
  786. return nullptr;
  787. }
  788. // https://docs.python.org/3/howto/descriptor.html#invocation-from-an-instance
  789. PyObject* VM::getattr(PyObject* obj, StrName name, bool throw_err){
  790. PyObject* objtype;
  791. // handle super() proxy
  792. if(is_non_tagged_type(obj, tp_super)){
  793. const Super& super = PK_OBJ_GET(Super, obj);
  794. obj = super.first;
  795. objtype = _t(super.second);
  796. }else{
  797. objtype = _t(obj);
  798. }
  799. PyObject* cls_var = find_name_in_mro(objtype, name);
  800. if(cls_var != nullptr){
  801. // handle descriptor
  802. if(is_non_tagged_type(cls_var, tp_property)){
  803. const Property& prop = _CAST(Property&, cls_var);
  804. return call(prop.getter, obj);
  805. }
  806. }
  807. // handle instance __dict__
  808. if(!is_tagged(obj) && obj->is_attr_valid()){
  809. PyObject* val = obj->attr().try_get(name);
  810. if(val != nullptr) return val;
  811. }
  812. if(cls_var != nullptr){
  813. // bound method is non-data descriptor
  814. if(is_non_tagged_type(cls_var, tp_function) || is_non_tagged_type(cls_var, tp_native_func)){
  815. return VAR(BoundMethod(obj, cls_var));
  816. }
  817. return cls_var;
  818. }
  819. if(throw_err) AttributeError(obj, name);
  820. return nullptr;
  821. }
  822. // used by OP_LOAD_METHOD
  823. // try to load a unbound method (fallback to `getattr` if not found)
  824. PyObject* VM::get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err, bool fallback){
  825. *self = PY_NULL;
  826. PyObject* objtype;
  827. // handle super() proxy
  828. if(is_non_tagged_type(obj, tp_super)){
  829. const Super& super = PK_OBJ_GET(Super, obj);
  830. obj = super.first;
  831. objtype = _t(super.second);
  832. }else{
  833. objtype = _t(obj);
  834. }
  835. PyObject* cls_var = find_name_in_mro(objtype, name);
  836. if(fallback){
  837. if(cls_var != nullptr){
  838. // handle descriptor
  839. if(is_non_tagged_type(cls_var, tp_property)){
  840. const Property& prop = _CAST(Property&, cls_var);
  841. return call(prop.getter, obj);
  842. }
  843. }
  844. // handle instance __dict__
  845. if(!is_tagged(obj) && obj->is_attr_valid()){
  846. PyObject* val = obj->attr().try_get(name);
  847. if(val != nullptr) return val;
  848. }
  849. }
  850. if(cls_var != nullptr){
  851. if(is_non_tagged_type(cls_var, tp_function) || is_non_tagged_type(cls_var, tp_native_func)){
  852. *self = obj;
  853. }
  854. return cls_var;
  855. }
  856. if(throw_err) AttributeError(obj, name);
  857. return nullptr;
  858. }
  859. void VM::setattr(PyObject* obj, StrName name, PyObject* value){
  860. PyObject* objtype;
  861. // handle super() proxy
  862. if(is_non_tagged_type(obj, tp_super)){
  863. Super& super = PK_OBJ_GET(Super, obj);
  864. obj = super.first;
  865. objtype = _t(super.second);
  866. }else{
  867. objtype = _t(obj);
  868. }
  869. PyObject* cls_var = find_name_in_mro(objtype, name);
  870. if(cls_var != nullptr){
  871. // handle descriptor
  872. if(is_non_tagged_type(cls_var, tp_property)){
  873. const Property& prop = _CAST(Property&, cls_var);
  874. if(prop.setter != vm->None){
  875. call(prop.setter, obj, value);
  876. }else{
  877. TypeError(fmt("readonly attribute: ", name.escape()));
  878. }
  879. return;
  880. }
  881. }
  882. // handle instance __dict__
  883. if(is_tagged(obj) || !obj->is_attr_valid()) TypeError("cannot set attribute");
  884. obj->attr().set(name, value);
  885. }
  886. PyObject* VM::bind(PyObject* obj, const char* sig, NativeFuncC fn, UserData userdata){
  887. return bind(obj, sig, nullptr, fn, userdata);
  888. }
  889. PyObject* VM::bind(PyObject* obj, const char* sig, const char* docstring, NativeFuncC fn, UserData userdata){
  890. CodeObject_ co;
  891. try{
  892. // fn(a, b, *c, d=1) -> None
  893. co = compile("def " + Str(sig) + " : pass", "<bind>", EXEC_MODE);
  894. }catch(Exception&){
  895. throw std::runtime_error("invalid signature: " + std::string(sig));
  896. }
  897. if(co->func_decls.size() != 1){
  898. throw std::runtime_error("expected 1 function declaration");
  899. }
  900. FuncDecl_ decl = co->func_decls[0];
  901. decl->signature = Str(sig);
  902. if(docstring != nullptr){
  903. decl->docstring = Str(docstring).strip();
  904. }
  905. PyObject* f_obj = VAR(NativeFunc(fn, decl));
  906. PK_OBJ_GET(NativeFunc, f_obj).set_userdata(userdata);
  907. if(obj != nullptr) obj->attr().set(decl->code->name, f_obj);
  908. return f_obj;
  909. }
  910. PyObject* VM::bind_property(PyObject* obj, Str name, NativeFuncC fget, NativeFuncC fset){
  911. PyObject* _0 = heap.gcnew<NativeFunc>(tp_native_func, fget, 1, false);
  912. PyObject* _1 = vm->None;
  913. if(fset != nullptr) _1 = heap.gcnew<NativeFunc>(tp_native_func, fset, 2, false);
  914. Str signature = name;
  915. int pos = name.index(":");
  916. if(pos > 0) name = name.substr(0, pos).strip();
  917. PyObject* prop = VAR(Property(_0, _1, signature));
  918. obj->attr().set(name, prop);
  919. return prop;
  920. }
  921. void VM::_error(Exception e){
  922. if(callstack.empty()){
  923. e.is_re = false;
  924. throw e;
  925. }
  926. PUSH(VAR(e));
  927. _raise();
  928. }
  929. void ManagedHeap::mark() {
  930. for(PyObject* obj: _no_gc) PK_OBJ_MARK(obj);
  931. for(auto& frame : vm->callstack.data()) frame._gc_mark();
  932. for(PyObject* obj: vm->s_data) PK_OBJ_MARK(obj);
  933. if(_gc_marker_ex) _gc_marker_ex(vm);
  934. if(vm->_last_exception) PK_OBJ_MARK(vm->_last_exception);
  935. if(vm->_c.error != nullptr) PK_OBJ_MARK(vm->_c.error);
  936. }
  937. Str obj_type_name(VM *vm, Type type){
  938. return vm->_all_types[type].name;
  939. }
  940. void VM::bind__hash__(Type type, i64 (*f)(VM*, PyObject*)){
  941. PyObject* obj = _t(type);
  942. _all_types[type].m__hash__ = f;
  943. PyObject* nf = bind_method<0>(obj, "__hash__", [](VM* vm, ArgsView args){
  944. i64 ret = lambda_get_userdata<i64(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);
  945. return VAR(ret);
  946. });
  947. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  948. }
  949. void VM::bind__len__(Type type, i64 (*f)(VM*, PyObject*)){
  950. PyObject* obj = _t(type);
  951. _all_types[type].m__len__ = f;
  952. PyObject* nf = bind_method<0>(obj, "__len__", [](VM* vm, ArgsView args){
  953. i64 ret = lambda_get_userdata<i64(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);
  954. return VAR(ret);
  955. });
  956. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  957. }
  958. void Dict::_probe_0(PyObject *key, bool &ok, int &i) const{
  959. ok = false;
  960. i64 hash = vm->py_hash(key);
  961. i = hash & _mask;
  962. // std::cout << CAST(Str, vm->py_repr(key)) << " " << hash << " " << i << std::endl;
  963. for(int j=0; j<_capacity; j++) {
  964. if(_items[i].first != nullptr){
  965. if(vm->py_equals(_items[i].first, key)) { ok = true; break; }
  966. }else{
  967. if(_items[i].second == nullptr) break;
  968. }
  969. // https://github.com/python/cpython/blob/3.8/Objects/dictobject.c#L166
  970. i = ((5*i) + 1) & _mask;
  971. // std::cout << CAST(Str, vm->py_repr(key)) << " next: " << i << std::endl;
  972. }
  973. }
  974. void Dict::_probe_1(PyObject *key, bool &ok, int &i) const{
  975. ok = false;
  976. i = vm->py_hash(key) & _mask;
  977. while(_items[i].first != nullptr) {
  978. if(vm->py_equals(_items[i].first, key)) { ok = true; break; }
  979. // https://github.com/python/cpython/blob/3.8/Objects/dictobject.c#L166
  980. i = ((5*i) + 1) & _mask;
  981. }
  982. }
  983. void CodeObjectSerializer::write_object(VM *vm, PyObject *obj){
  984. if(is_int(obj)) write_int(_CAST(i64, obj));
  985. else if(is_float(obj)) write_float(_CAST(f64, obj));
  986. else if(is_type(obj, vm->tp_str)) write_str(_CAST(Str&, obj));
  987. else if(is_type(obj, vm->tp_bool)) write_bool(_CAST(bool, obj));
  988. else if(obj == vm->None) write_none();
  989. else if(obj == vm->Ellipsis) write_ellipsis();
  990. else{
  991. throw std::runtime_error(fmt(OBJ_NAME(vm->_t(obj)).escape(), " is not serializable"));
  992. }
  993. }
  994. void NativeFunc::check_size(VM* vm, ArgsView args) const{
  995. if(args.size() != argc && argc != -1) {
  996. vm->TypeError(fmt("expected ", argc, " arguments, got ", args.size()));
  997. }
  998. }
  999. PyObject* NativeFunc::call(VM *vm, ArgsView args) const {
  1000. return f(vm, args);
  1001. }
  1002. } // namespace pkpy