vm.cpp 36 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066
  1. #include "pocketpy/vm.h"
  2. namespace pkpy{
  3. VM::VM(bool enable_os) : heap(this), enable_os(enable_os) {
  4. this->vm = this;
  5. this->_c.error = nullptr;
  6. _stdout = [](VM* vm, const char* buf, int size) {
  7. PK_UNUSED(vm);
  8. std::cout.write(buf, size);
  9. };
  10. _stderr = [](VM* vm, const char* buf, int size) {
  11. PK_UNUSED(vm);
  12. std::cerr.write(buf, size);
  13. };
  14. callstack.reserve(8);
  15. _main = nullptr;
  16. _last_exception = nullptr;
  17. _import_handler = [](const Str& name) {
  18. PK_UNUSED(name);
  19. return Bytes();
  20. };
  21. init_builtin_types();
  22. }
  23. PyObject* VM::py_str(PyObject* obj){
  24. const PyTypeInfo* ti = _inst_type_info(obj);
  25. if(ti->m__str__) return ti->m__str__(this, obj);
  26. PyObject* self;
  27. PyObject* f = get_unbound_method(obj, __str__, &self, false);
  28. if(self != PY_NULL) return call_method(self, f);
  29. return py_repr(obj);
  30. }
  31. PyObject* VM::py_repr(PyObject* obj){
  32. const PyTypeInfo* ti = _inst_type_info(obj);
  33. if(ti->m__repr__) return ti->m__repr__(this, obj);
  34. return call_method(obj, __repr__);
  35. }
  36. PyObject* VM::py_json(PyObject* obj){
  37. const PyTypeInfo* ti = _inst_type_info(obj);
  38. if(ti->m__json__) return ti->m__json__(this, obj);
  39. return call_method(obj, __json__);
  40. }
  41. PyObject* VM::py_iter(PyObject* obj){
  42. const PyTypeInfo* ti = _inst_type_info(obj);
  43. if(ti->m__iter__) return ti->m__iter__(this, obj);
  44. PyObject* self;
  45. PyObject* iter_f = get_unbound_method(obj, __iter__, &self, false);
  46. if(self != PY_NULL) return call_method(self, iter_f);
  47. TypeError(OBJ_NAME(_t(obj)).escape() + " object is not iterable");
  48. return nullptr;
  49. }
  50. FrameId VM::top_frame(){
  51. #if PK_DEBUG_EXTRA_CHECK
  52. if(callstack.empty()) FATAL_ERROR();
  53. #endif
  54. return FrameId(&callstack.data(), callstack.size()-1);
  55. }
  56. void VM::_pop_frame(){
  57. Frame* frame = &callstack.top();
  58. s_data.reset(frame->_sp_base);
  59. callstack.pop();
  60. }
  61. PyObject* VM::find_name_in_mro(PyObject* cls, StrName name){
  62. PyObject* val;
  63. do{
  64. val = cls->attr().try_get(name);
  65. if(val != nullptr) return val;
  66. Type base = _all_types[PK_OBJ_GET(Type, cls)].base;
  67. if(base.index == -1) break;
  68. cls = _all_types[base].obj;
  69. }while(true);
  70. return nullptr;
  71. }
  72. bool VM::isinstance(PyObject* obj, Type cls_t){
  73. Type obj_t = PK_OBJ_GET(Type, _t(obj));
  74. do{
  75. if(obj_t == cls_t) return true;
  76. Type base = _all_types[obj_t].base;
  77. if(base.index == -1) break;
  78. obj_t = base;
  79. }while(true);
  80. return false;
  81. }
  82. PyObject* VM::exec(Str source, Str filename, CompileMode mode, PyObject* _module){
  83. if(_module == nullptr) _module = _main;
  84. try {
  85. CodeObject_ code = compile(source, filename, mode);
  86. #if PK_DEBUG_DIS_EXEC
  87. if(_module == _main) std::cout << disassemble(code) << '\n';
  88. #endif
  89. return _exec(code, _module);
  90. }catch (const Exception& e){
  91. Str sum = e.summary() + "\n";
  92. _stderr(this, sum.data, sum.size);
  93. }
  94. #if !PK_DEBUG_FULL_EXCEPTION
  95. catch (const std::exception& e) {
  96. Str msg = "An std::exception occurred! It could be a bug.\n";
  97. msg = msg + e.what() + "\n";
  98. _stderr(this, msg.data, msg.size);
  99. }
  100. #endif
  101. callstack.clear();
  102. s_data.clear();
  103. return nullptr;
  104. }
  105. PyObject* VM::new_type_object(PyObject* mod, StrName name, Type base, bool subclass_enabled){
  106. PyObject* obj = heap._new<Type>(tp_type, _all_types.size());
  107. const PyTypeInfo& base_info = _all_types[base];
  108. if(!base_info.subclass_enabled){
  109. TypeError(fmt("type ", base_info.name.escape(), " is not `subclass_enabled`"));
  110. }
  111. PyTypeInfo info{
  112. obj,
  113. base,
  114. (mod!=nullptr && mod!=builtins) ? Str(OBJ_NAME(mod)+"."+name.sv()): name.sv(),
  115. subclass_enabled,
  116. };
  117. if(mod != nullptr) mod->attr().set(name, obj);
  118. _all_types.push_back(info);
  119. return obj;
  120. }
  121. Type VM::_new_type_object(StrName name, Type base) {
  122. PyObject* obj = new_type_object(nullptr, name, base, false);
  123. return PK_OBJ_GET(Type, obj);
  124. }
  125. PyObject* VM::_find_type_object(const Str& type){
  126. PyObject* obj = builtins->attr().try_get(type);
  127. if(obj == nullptr){
  128. for(auto& t: _all_types) if(t.name == type) return t.obj;
  129. throw std::runtime_error(fmt("type not found: ", type));
  130. }
  131. check_non_tagged_type(obj, tp_type);
  132. return obj;
  133. }
  134. Type VM::_type(const Str& type){
  135. PyObject* obj = _find_type_object(type);
  136. return PK_OBJ_GET(Type, obj);
  137. }
  138. PyTypeInfo* VM::_type_info(const Str& type){
  139. PyObject* obj = builtins->attr().try_get(type);
  140. if(obj == nullptr){
  141. for(auto& t: _all_types) if(t.name == type) return &t;
  142. FATAL_ERROR();
  143. }
  144. return &_all_types[PK_OBJ_GET(Type, obj)];
  145. }
  146. PyTypeInfo* VM::_type_info(Type type){
  147. return &_all_types[type];
  148. }
  149. const PyTypeInfo* VM::_inst_type_info(PyObject* obj){
  150. if(is_int(obj)) return &_all_types[tp_int];
  151. if(is_float(obj)) return &_all_types[tp_float];
  152. return &_all_types[obj->type];
  153. }
  154. bool VM::py_equals(PyObject* lhs, PyObject* rhs){
  155. if(lhs == rhs) return true;
  156. const PyTypeInfo* ti = _inst_type_info(lhs);
  157. PyObject* res;
  158. if(ti->m__eq__){
  159. res = ti->m__eq__(this, lhs, rhs);
  160. if(res != vm->NotImplemented) return res == vm->True;
  161. }
  162. res = call_method(lhs, __eq__, rhs);
  163. if(res != vm->NotImplemented) return res == vm->True;
  164. ti = _inst_type_info(rhs);
  165. if(ti->m__eq__){
  166. res = ti->m__eq__(this, rhs, lhs);
  167. if(res != vm->NotImplemented) return res == vm->True;
  168. }
  169. res = call_method(rhs, __eq__, lhs);
  170. if(res != vm->NotImplemented) return res == vm->True;
  171. return false;
  172. }
  173. int VM::normalized_index(int index, int size){
  174. if(index < 0) index += size;
  175. if(index < 0 || index >= size){
  176. IndexError(std::to_string(index) + " not in [0, " + std::to_string(size) + ")");
  177. }
  178. return index;
  179. }
  180. PyObject* VM::py_next(PyObject* obj){
  181. const PyTypeInfo* ti = _inst_type_info(obj);
  182. if(ti->m__next__) return ti->m__next__(this, obj);
  183. return call_method(obj, __next__);
  184. }
  185. PyObject* VM::py_import(StrName name, bool relative){
  186. Str filename;
  187. int type;
  188. if(relative){
  189. ImportContext* ctx = &_import_context;
  190. type = 2;
  191. for(auto it=ctx->pending.rbegin(); it!=ctx->pending.rend(); ++it){
  192. if(it->second == 2) continue;
  193. if(it->second == 1){
  194. filename = fmt(it->first, kPlatformSep, name, ".py");
  195. name = fmt(it->first, '.', name).c_str();
  196. break;
  197. }
  198. }
  199. if(filename.length() == 0) _error("ImportError", "relative import outside of package");
  200. }else{
  201. type = 0;
  202. filename = fmt(name, ".py");
  203. }
  204. for(auto& [k, v]: _import_context.pending){
  205. if(k == name){
  206. vm->_error("ImportError", fmt("circular import ", name.escape()));
  207. }
  208. }
  209. PyObject* ext_mod = _modules.try_get(name);
  210. if(ext_mod == nullptr){
  211. Str source;
  212. auto it = _lazy_modules.find(name);
  213. if(it == _lazy_modules.end()){
  214. Bytes b = _import_handler(filename);
  215. if(!relative && !b){
  216. filename = fmt(name, kPlatformSep, "__init__.py");
  217. b = _import_handler(filename);
  218. if(b) type = 1;
  219. }
  220. if(!b) _error("ImportError", fmt("module ", name.escape(), " not found"));
  221. source = Str(b.str());
  222. }else{
  223. source = it->second;
  224. _lazy_modules.erase(it);
  225. }
  226. auto _ = _import_context.temp(this, name, type);
  227. CodeObject_ code = compile(source, filename, EXEC_MODE);
  228. PyObject* new_mod = new_module(name);
  229. _exec(code, new_mod);
  230. new_mod->attr()._try_perfect_rehash();
  231. return new_mod;
  232. }else{
  233. return ext_mod;
  234. }
  235. }
  236. VM::~VM() {
  237. callstack.clear();
  238. s_data.clear();
  239. _all_types.clear();
  240. _modules.clear();
  241. _lazy_modules.clear();
  242. }
  243. PyObject* VM::py_negate(PyObject* obj){
  244. const PyTypeInfo* ti = _inst_type_info(obj);
  245. if(ti->m__neg__) return ti->m__neg__(this, obj);
  246. return call_method(obj, __neg__);
  247. }
  248. void VM::check_int_or_float(PyObject *obj){
  249. if(!is_tagged(obj)){
  250. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape());
  251. }
  252. }
  253. bool VM::py_bool(PyObject* obj){
  254. if(is_non_tagged_type(obj, tp_bool)) return obj == True;
  255. if(obj == None) return false;
  256. if(is_int(obj)) return _CAST(i64, obj) != 0;
  257. if(is_float(obj)) return _CAST(f64, obj) != 0.0;
  258. PyObject* self;
  259. PyObject* len_f = get_unbound_method(obj, __len__, &self, false);
  260. if(self != PY_NULL){
  261. PyObject* ret = call_method(self, len_f);
  262. return CAST(i64, ret) > 0;
  263. }
  264. return true;
  265. }
  266. PyObject* VM::py_list(PyObject* it){
  267. auto _lock = heap.gc_scope_lock();
  268. it = py_iter(it);
  269. List list;
  270. PyObject* obj = py_next(it);
  271. while(obj != StopIteration){
  272. list.push_back(obj);
  273. obj = py_next(it);
  274. }
  275. return VAR(std::move(list));
  276. }
  277. void VM::parse_int_slice(const Slice& s, int length, int& start, int& stop, int& step){
  278. auto clip = [](int value, int min, int max){
  279. if(value < min) return min;
  280. if(value > max) return max;
  281. return value;
  282. };
  283. if(s.step == None) step = 1;
  284. else step = CAST(int, s.step);
  285. if(step == 0) ValueError("slice step cannot be zero");
  286. if(step > 0){
  287. if(s.start == None){
  288. start = 0;
  289. }else{
  290. start = CAST(int, s.start);
  291. if(start < 0) start += length;
  292. start = clip(start, 0, length);
  293. }
  294. if(s.stop == None){
  295. stop = length;
  296. }else{
  297. stop = CAST(int, s.stop);
  298. if(stop < 0) stop += length;
  299. stop = clip(stop, 0, length);
  300. }
  301. }else{
  302. if(s.start == None){
  303. start = length - 1;
  304. }else{
  305. start = CAST(int, s.start);
  306. if(start < 0) start += length;
  307. start = clip(start, -1, length - 1);
  308. }
  309. if(s.stop == None){
  310. stop = -1;
  311. }else{
  312. stop = CAST(int, s.stop);
  313. if(stop < 0) stop += length;
  314. stop = clip(stop, -1, length - 1);
  315. }
  316. }
  317. }
  318. i64 VM::py_hash(PyObject* obj){
  319. const PyTypeInfo* ti = _inst_type_info(obj);
  320. if(ti->m__hash__) return ti->m__hash__(this, obj);
  321. PyObject* ret = call_method(obj, __hash__);
  322. return CAST(i64, ret);
  323. }
  324. PyObject* VM::format(Str spec, PyObject* obj){
  325. if(spec.empty()) return py_str(obj);
  326. char type;
  327. switch(spec.end()[-1]){
  328. case 'f': case 'd': case 's':
  329. type = spec.end()[-1];
  330. spec = spec.substr(0, spec.length() - 1);
  331. break;
  332. default: type = ' '; break;
  333. }
  334. char pad_c = ' ';
  335. if(spec[0] == '0'){
  336. pad_c = '0';
  337. spec = spec.substr(1);
  338. }
  339. char align;
  340. if(spec[0] == '>'){
  341. align = '>';
  342. spec = spec.substr(1);
  343. }else if(spec[0] == '<'){
  344. align = '<';
  345. spec = spec.substr(1);
  346. }else{
  347. if(is_int(obj) || is_float(obj)) align = '>';
  348. else align = '<';
  349. }
  350. int dot = spec.index(".");
  351. int width, precision;
  352. try{
  353. if(dot >= 0){
  354. if(dot == 0){
  355. width = -1;
  356. }else{
  357. width = Number::stoi(spec.substr(0, dot).str());
  358. }
  359. precision = Number::stoi(spec.substr(dot+1).str());
  360. }else{
  361. width = Number::stoi(spec.str());
  362. precision = -1;
  363. }
  364. }catch(...){
  365. ValueError("invalid format specifer");
  366. UNREACHABLE();
  367. }
  368. if(type != 'f' && dot >= 0) ValueError("precision not allowed in the format specifier");
  369. Str ret;
  370. if(type == 'f'){
  371. f64 val = CAST(f64, obj);
  372. if(precision < 0) precision = 6;
  373. std::stringstream ss;
  374. ss << std::fixed << std::setprecision(precision) << val;
  375. ret = ss.str();
  376. }else if(type == 'd'){
  377. ret = std::to_string(CAST(i64, obj));
  378. }else if(type == 's'){
  379. ret = CAST(Str&, obj);
  380. }else{
  381. ret = CAST(Str&, py_str(obj));
  382. }
  383. if(width != -1 && width > ret.length()){
  384. int pad = width - ret.length();
  385. std::string padding(pad, pad_c);
  386. if(align == '>') ret = padding.c_str() + ret;
  387. else ret = ret + padding.c_str();
  388. }
  389. return VAR(ret);
  390. }
  391. PyObject* VM::new_module(StrName name) {
  392. PyObject* obj = heap._new<DummyModule>(tp_module);
  393. obj->attr().set("__name__", VAR(name.sv()));
  394. // we do not allow override in order to avoid memory leak
  395. // it is because Module objects are not garbage collected
  396. if(_modules.contains(name)) throw std::runtime_error("module already exists");
  397. _modules.set(name, obj);
  398. return obj;
  399. }
  400. static std::string _opcode_argstr(VM* vm, Bytecode byte, const CodeObject* co){
  401. std::string argStr = byte.arg == -1 ? "" : std::to_string(byte.arg);
  402. switch(byte.op){
  403. case OP_LOAD_CONST: case OP_FORMAT_STRING:
  404. if(vm != nullptr){
  405. argStr += fmt(" (", CAST(Str, vm->py_repr(co->consts[byte.arg])), ")");
  406. }
  407. break;
  408. case OP_LOAD_NAME: case OP_LOAD_GLOBAL: case OP_LOAD_NONLOCAL: case OP_STORE_GLOBAL:
  409. case OP_LOAD_ATTR: case OP_LOAD_METHOD: case OP_STORE_ATTR: case OP_DELETE_ATTR:
  410. case OP_IMPORT_NAME: case OP_BEGIN_CLASS: case OP_RAISE:
  411. case OP_DELETE_GLOBAL: case OP_INC_GLOBAL: case OP_DEC_GLOBAL: case OP_STORE_CLASS_ATTR:
  412. argStr += fmt(" (", StrName(byte.arg).sv(), ")");
  413. break;
  414. case OP_LOAD_FAST: case OP_STORE_FAST: case OP_DELETE_FAST: case OP_INC_FAST: case OP_DEC_FAST:
  415. argStr += fmt(" (", co->varnames[byte.arg].sv(), ")");
  416. break;
  417. case OP_LOAD_FUNCTION:
  418. argStr += fmt(" (", co->func_decls[byte.arg]->code->name, ")");
  419. break;
  420. }
  421. return argStr;
  422. }
  423. Str VM::disassemble(CodeObject_ co){
  424. auto pad = [](const Str& s, const int n){
  425. if(s.length() >= n) return s.substr(0, n);
  426. return s + std::string(n - s.length(), ' ');
  427. };
  428. std::vector<int> jumpTargets;
  429. for(auto byte : co->codes){
  430. if(byte.op == OP_JUMP_ABSOLUTE || byte.op == OP_POP_JUMP_IF_FALSE || byte.op == OP_SHORTCUT_IF_FALSE_OR_POP){
  431. jumpTargets.push_back(byte.arg);
  432. }
  433. }
  434. std::stringstream ss;
  435. int prev_line = -1;
  436. for(int i=0; i<co->codes.size(); i++){
  437. const Bytecode& byte = co->codes[i];
  438. Str line = std::to_string(co->lines[i]);
  439. if(co->lines[i] == prev_line) line = "";
  440. else{
  441. if(prev_line != -1) ss << "\n";
  442. prev_line = co->lines[i];
  443. }
  444. std::string pointer;
  445. if(std::find(jumpTargets.begin(), jumpTargets.end(), i) != jumpTargets.end()){
  446. pointer = "-> ";
  447. }else{
  448. pointer = " ";
  449. }
  450. ss << pad(line, 8) << pointer << pad(std::to_string(i), 3);
  451. ss << " " << pad(OP_NAMES[byte.op], 25) << " ";
  452. // ss << pad(byte.arg == -1 ? "" : std::to_string(byte.arg), 5);
  453. std::string argStr = _opcode_argstr(this, byte, co.get());
  454. ss << argStr;
  455. // ss << pad(argStr, 40); // may overflow
  456. // ss << co->blocks[byte.block].type;
  457. if(i != co->codes.size() - 1) ss << '\n';
  458. }
  459. for(auto& decl: co->func_decls){
  460. ss << "\n\n" << "Disassembly of " << decl->code->name << ":\n";
  461. ss << disassemble(decl->code);
  462. }
  463. ss << "\n";
  464. return Str(ss.str());
  465. }
  466. #if PK_DEBUG_CEVAL_STEP
  467. void VM::_log_s_data(const char* title) {
  468. if(_main == nullptr) return;
  469. if(callstack.empty()) return;
  470. std::stringstream ss;
  471. if(title) ss << title << " | ";
  472. std::map<PyObject**, int> sp_bases;
  473. for(Frame& f: callstack.data()){
  474. if(f._sp_base == nullptr) FATAL_ERROR();
  475. sp_bases[f._sp_base] += 1;
  476. }
  477. FrameId frame = top_frame();
  478. int line = frame->co->lines[frame->_ip];
  479. ss << frame->co->name << ":" << line << " [";
  480. for(PyObject** p=s_data.begin(); p!=s_data.end(); p++){
  481. ss << std::string(sp_bases[p], '|');
  482. if(sp_bases[p] > 0) ss << " ";
  483. PyObject* obj = *p;
  484. if(obj == nullptr) ss << "(nil)";
  485. else if(obj == PY_NULL) ss << "NULL";
  486. else if(is_int(obj)) ss << CAST(i64, obj);
  487. else if(is_float(obj)) ss << CAST(f64, obj);
  488. else if(is_type(obj, tp_str)) ss << CAST(Str, obj).escape();
  489. else if(obj == None) ss << "None";
  490. else if(obj == True) ss << "True";
  491. else if(obj == False) ss << "False";
  492. else if(is_type(obj, tp_function)){
  493. auto& f = CAST(Function&, obj);
  494. ss << f.decl->code->name << "(...)";
  495. } else if(is_type(obj, tp_type)){
  496. Type t = PK_OBJ_GET(Type, obj);
  497. ss << "<class " + _all_types[t].name.escape() + ">";
  498. } else if(is_type(obj, tp_list)){
  499. auto& t = CAST(List&, obj);
  500. ss << "list(size=" << t.size() << ")";
  501. } else if(is_type(obj, tp_tuple)){
  502. auto& t = CAST(Tuple&, obj);
  503. ss << "tuple(size=" << t.size() << ")";
  504. } else ss << "(" << obj_type_name(this, obj->type) << ")";
  505. ss << ", ";
  506. }
  507. std::string output = ss.str();
  508. if(!s_data.empty()) {
  509. output.pop_back(); output.pop_back();
  510. }
  511. output.push_back(']');
  512. Bytecode byte = frame->co->codes[frame->_ip];
  513. std::cout << output << " " << OP_NAMES[byte.op] << " " << _opcode_argstr(nullptr, byte, frame->co) << std::endl;
  514. }
  515. #endif
  516. void VM::init_builtin_types(){
  517. _all_types.push_back({heap._new<Type>(Type(1), Type(0)), -1, "object", true});
  518. _all_types.push_back({heap._new<Type>(Type(1), Type(1)), 0, "type", false});
  519. tp_object = 0; tp_type = 1;
  520. tp_int = _new_type_object("int");
  521. tp_float = _new_type_object("float");
  522. if(tp_int.index != kTpIntIndex || tp_float.index != kTpFloatIndex) FATAL_ERROR();
  523. tp_bool = _new_type_object("bool");
  524. tp_str = _new_type_object("str");
  525. tp_list = _new_type_object("list");
  526. tp_tuple = _new_type_object("tuple");
  527. tp_slice = _new_type_object("slice");
  528. tp_range = _new_type_object("range");
  529. tp_module = _new_type_object("module");
  530. tp_function = _new_type_object("function");
  531. tp_native_func = _new_type_object("native_func");
  532. tp_bound_method = _new_type_object("bound_method");
  533. tp_super = _new_type_object("super");
  534. tp_exception = _new_type_object("Exception");
  535. tp_bytes = _new_type_object("bytes");
  536. tp_mappingproxy = _new_type_object("mappingproxy");
  537. tp_dict = _new_type_object("dict");
  538. tp_property = _new_type_object("property");
  539. tp_star_wrapper = _new_type_object("_star_wrapper");
  540. this->None = heap._new<Dummy>(_new_type_object("NoneType"));
  541. this->NotImplemented = heap._new<Dummy>(_new_type_object("NotImplementedType"));
  542. this->Ellipsis = heap._new<Dummy>(_new_type_object("ellipsis"));
  543. this->True = heap._new<Dummy>(tp_bool);
  544. this->False = heap._new<Dummy>(tp_bool);
  545. this->StopIteration = heap._new<Dummy>(_new_type_object("StopIterationType"));
  546. this->builtins = new_module("builtins");
  547. // setup public types
  548. builtins->attr().set("type", _t(tp_type));
  549. builtins->attr().set("object", _t(tp_object));
  550. builtins->attr().set("bool", _t(tp_bool));
  551. builtins->attr().set("int", _t(tp_int));
  552. builtins->attr().set("float", _t(tp_float));
  553. builtins->attr().set("str", _t(tp_str));
  554. builtins->attr().set("list", _t(tp_list));
  555. builtins->attr().set("tuple", _t(tp_tuple));
  556. builtins->attr().set("range", _t(tp_range));
  557. builtins->attr().set("bytes", _t(tp_bytes));
  558. builtins->attr().set("dict", _t(tp_dict));
  559. builtins->attr().set("property", _t(tp_property));
  560. builtins->attr().set("StopIteration", StopIteration);
  561. builtins->attr().set("NotImplemented", NotImplemented);
  562. builtins->attr().set("slice", _t(tp_slice));
  563. post_init();
  564. for(int i=0; i<_all_types.size(); i++){
  565. _all_types[i].obj->attr()._try_perfect_rehash();
  566. }
  567. for(auto [k, v]: _modules.items()) v->attr()._try_perfect_rehash();
  568. this->_main = new_module("__main__");
  569. }
  570. // `heap.gc_scope_lock();` needed before calling this function
  571. void VM::_unpack_as_list(ArgsView args, List& list){
  572. for(PyObject* obj: args){
  573. if(is_non_tagged_type(obj, tp_star_wrapper)){
  574. const StarWrapper& w = _CAST(StarWrapper&, obj);
  575. // maybe this check should be done in the compile time
  576. if(w.level != 1) TypeError("expected level 1 star wrapper");
  577. PyObject* _0 = py_iter(w.obj);
  578. PyObject* _1 = py_next(_0);
  579. while(_1 != StopIteration){
  580. list.push_back(_1);
  581. _1 = py_next(_0);
  582. }
  583. }else{
  584. list.push_back(obj);
  585. }
  586. }
  587. }
  588. // `heap.gc_scope_lock();` needed before calling this function
  589. void VM::_unpack_as_dict(ArgsView args, Dict& dict){
  590. for(PyObject* obj: args){
  591. if(is_non_tagged_type(obj, tp_star_wrapper)){
  592. const StarWrapper& w = _CAST(StarWrapper&, obj);
  593. // maybe this check should be done in the compile time
  594. if(w.level != 2) TypeError("expected level 2 star wrapper");
  595. const Dict& other = CAST(Dict&, w.obj);
  596. dict.update(other);
  597. }else{
  598. const Tuple& t = CAST(Tuple&, obj);
  599. if(t.size() != 2) TypeError("expected tuple of length 2");
  600. dict.set(t[0], t[1]);
  601. }
  602. }
  603. }
  604. void VM::_prepare_py_call(PyObject** buffer, ArgsView args, ArgsView kwargs, const FuncDecl_& decl){
  605. const CodeObject* co = decl->code.get();
  606. int co_nlocals = co->varnames.size();
  607. int decl_argc = decl->args.size();
  608. if(args.size() < decl_argc){
  609. vm->TypeError(fmt(
  610. "expected ", decl_argc, " positional arguments, got ", args.size(),
  611. " (", co->name, ')'
  612. ));
  613. }
  614. int i = 0;
  615. // prepare args
  616. for(int index: decl->args) buffer[index] = args[i++];
  617. // set extra varnames to nullptr
  618. for(int j=i; j<co_nlocals; j++) buffer[j] = PY_NULL;
  619. // prepare kwdefaults
  620. for(auto& kv: decl->kwargs) buffer[kv.key] = kv.value;
  621. // handle *args
  622. if(decl->starred_arg != -1){
  623. ArgsView vargs(args.begin() + i, args.end());
  624. buffer[decl->starred_arg] = VAR(vargs.to_tuple());
  625. i += vargs.size();
  626. }else{
  627. // kwdefaults override
  628. for(auto& kv: decl->kwargs){
  629. if(i >= args.size()) break;
  630. buffer[kv.key] = args[i++];
  631. }
  632. if(i < args.size()) TypeError(fmt("too many arguments", " (", decl->code->name, ')'));
  633. }
  634. PyObject* vkwargs;
  635. if(decl->starred_kwarg != -1){
  636. vkwargs = VAR(Dict(this));
  637. buffer[decl->starred_kwarg] = vkwargs;
  638. }else{
  639. vkwargs = nullptr;
  640. }
  641. for(int j=0; j<kwargs.size(); j+=2){
  642. StrName key(CAST(int, kwargs[j]));
  643. int index = co->varnames_inv.try_get(key);
  644. if(index < 0){
  645. if(vkwargs == nullptr){
  646. TypeError(fmt(key.escape(), " is an invalid keyword argument for ", co->name, "()"));
  647. }else{
  648. Dict& dict = _CAST(Dict&, vkwargs);
  649. dict.set(VAR(key.sv()), kwargs[j+1]);
  650. }
  651. }else{
  652. buffer[index] = kwargs[j+1];
  653. }
  654. }
  655. }
  656. PyObject* VM::vectorcall(int ARGC, int KWARGC, bool op_call){
  657. PyObject** p1 = s_data._sp - KWARGC*2;
  658. PyObject** p0 = p1 - ARGC - 2;
  659. // [callable, <self>, args..., kwargs...]
  660. // ^p0 ^p1 ^_sp
  661. PyObject* callable = p1[-(ARGC + 2)];
  662. bool method_call = p1[-(ARGC + 1)] != PY_NULL;
  663. // handle boundmethod, do a patch
  664. if(is_non_tagged_type(callable, tp_bound_method)){
  665. if(method_call) FATAL_ERROR();
  666. auto& bm = CAST(BoundMethod&, callable);
  667. callable = bm.func; // get unbound method
  668. p1[-(ARGC + 2)] = bm.func;
  669. p1[-(ARGC + 1)] = bm.self;
  670. method_call = true;
  671. // [unbound, self, args..., kwargs...]
  672. }
  673. ArgsView args(p1 - ARGC - int(method_call), p1);
  674. ArgsView kwargs(p1, s_data._sp);
  675. PyObject* buffer[PK_MAX_CO_VARNAMES];
  676. if(is_non_tagged_type(callable, tp_native_func)){
  677. const auto& f = PK_OBJ_GET(NativeFunc, callable);
  678. PyObject* ret;
  679. if(f.decl != nullptr){
  680. int co_nlocals = f.decl->code->varnames.size();
  681. _prepare_py_call(buffer, args, kwargs, f.decl);
  682. // copy buffer back to stack
  683. s_data.reset(args.begin());
  684. for(int j=0; j<co_nlocals; j++) PUSH(buffer[j]);
  685. ret = f.call(vm, ArgsView(s_data._sp - co_nlocals, s_data._sp));
  686. }else{
  687. if(KWARGC != 0) TypeError("old-style native_func does not accept keyword arguments");
  688. f.check_size(this, args);
  689. ret = f.call(this, args);
  690. }
  691. s_data.reset(p0);
  692. return ret;
  693. }
  694. if(is_non_tagged_type(callable, tp_function)){
  695. /*****************_py_call*****************/
  696. // callable must be a `function` object
  697. if(s_data.is_overflow()) StackOverflowError();
  698. const Function& fn = PK_OBJ_GET(Function, callable);
  699. const FuncDecl_& decl = fn.decl;
  700. const CodeObject* co = decl->code.get();
  701. int co_nlocals = co->varnames.size();
  702. _prepare_py_call(buffer, args, kwargs, decl);
  703. if(co->is_generator){
  704. s_data.reset(p0);
  705. return _py_generator(
  706. Frame(&s_data, nullptr, co, fn._module, callable),
  707. ArgsView(buffer, buffer + co_nlocals)
  708. );
  709. }
  710. // copy buffer back to stack
  711. s_data.reset(args.begin());
  712. for(int j=0; j<co_nlocals; j++) PUSH(buffer[j]);
  713. callstack.emplace(&s_data, p0, co, fn._module, callable, FastLocals(co, args.begin()));
  714. if(op_call) return PY_OP_CALL;
  715. return _run_top_frame();
  716. /*****************_py_call*****************/
  717. }
  718. if(is_non_tagged_type(callable, tp_type)){
  719. if(method_call) FATAL_ERROR();
  720. // [type, NULL, args..., kwargs...]
  721. PyObject* new_f = find_name_in_mro(callable, __new__);
  722. PyObject* obj;
  723. #if PK_DEBUG_EXTRA_CHECK
  724. PK_ASSERT(new_f != nullptr);
  725. #endif
  726. if(new_f == cached_object__new__) {
  727. // fast path for object.__new__
  728. Type t = PK_OBJ_GET(Type, callable);
  729. obj= vm->heap.gcnew<DummyInstance>(t);
  730. }else{
  731. PUSH(new_f);
  732. PUSH(PY_NULL);
  733. PUSH(callable); // cls
  734. for(PyObject* o: args) PUSH(o);
  735. for(PyObject* o: kwargs) PUSH(o);
  736. // if obj is not an instance of callable, the behavior is undefined
  737. obj = vectorcall(ARGC+1, KWARGC);
  738. }
  739. // __init__
  740. PyObject* self;
  741. callable = get_unbound_method(obj, __init__, &self, false);
  742. if (self != PY_NULL) {
  743. // replace `NULL` with `self`
  744. p1[-(ARGC + 2)] = callable;
  745. p1[-(ARGC + 1)] = self;
  746. // [init_f, self, args..., kwargs...]
  747. vectorcall(ARGC, KWARGC);
  748. // We just discard the return value of `__init__`
  749. // in cpython it raises a TypeError if the return value is not None
  750. }else{
  751. // manually reset the stack
  752. s_data.reset(p0);
  753. }
  754. return obj;
  755. }
  756. // handle `__call__` overload
  757. PyObject* self;
  758. PyObject* call_f = get_unbound_method(callable, __call__, &self, false);
  759. if(self != PY_NULL){
  760. p1[-(ARGC + 2)] = call_f;
  761. p1[-(ARGC + 1)] = self;
  762. // [call_f, self, args..., kwargs...]
  763. return vectorcall(ARGC, KWARGC, false);
  764. }
  765. TypeError(OBJ_NAME(_t(callable)).escape() + " object is not callable");
  766. return nullptr;
  767. }
  768. // https://docs.python.org/3/howto/descriptor.html#invocation-from-an-instance
  769. PyObject* VM::getattr(PyObject* obj, StrName name, bool throw_err){
  770. PyObject* objtype;
  771. // handle super() proxy
  772. if(is_non_tagged_type(obj, tp_super)){
  773. const Super& super = PK_OBJ_GET(Super, obj);
  774. obj = super.first;
  775. objtype = _t(super.second);
  776. }else{
  777. objtype = _t(obj);
  778. }
  779. PyObject* cls_var = find_name_in_mro(objtype, name);
  780. if(cls_var != nullptr){
  781. // handle descriptor
  782. if(is_non_tagged_type(cls_var, tp_property)){
  783. const Property& prop = _CAST(Property&, cls_var);
  784. return call(prop.getter, obj);
  785. }
  786. }
  787. // handle instance __dict__
  788. if(!is_tagged(obj) && obj->is_attr_valid()){
  789. PyObject* val = obj->attr().try_get(name);
  790. if(val != nullptr) return val;
  791. }
  792. if(cls_var != nullptr){
  793. // bound method is non-data descriptor
  794. if(is_non_tagged_type(cls_var, tp_function) || is_non_tagged_type(cls_var, tp_native_func)){
  795. return VAR(BoundMethod(obj, cls_var));
  796. }
  797. return cls_var;
  798. }
  799. if(throw_err) AttributeError(obj, name);
  800. return nullptr;
  801. }
  802. // used by OP_LOAD_METHOD
  803. // try to load a unbound method (fallback to `getattr` if not found)
  804. PyObject* VM::get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err, bool fallback){
  805. *self = PY_NULL;
  806. PyObject* objtype;
  807. // handle super() proxy
  808. if(is_non_tagged_type(obj, tp_super)){
  809. const Super& super = PK_OBJ_GET(Super, obj);
  810. obj = super.first;
  811. objtype = _t(super.second);
  812. }else{
  813. objtype = _t(obj);
  814. }
  815. PyObject* cls_var = find_name_in_mro(objtype, name);
  816. if(fallback){
  817. if(cls_var != nullptr){
  818. // handle descriptor
  819. if(is_non_tagged_type(cls_var, tp_property)){
  820. const Property& prop = _CAST(Property&, cls_var);
  821. return call(prop.getter, obj);
  822. }
  823. }
  824. // handle instance __dict__
  825. if(!is_tagged(obj) && obj->is_attr_valid()){
  826. PyObject* val = obj->attr().try_get(name);
  827. if(val != nullptr) return val;
  828. }
  829. }
  830. if(cls_var != nullptr){
  831. if(is_non_tagged_type(cls_var, tp_function) || is_non_tagged_type(cls_var, tp_native_func)){
  832. *self = obj;
  833. }
  834. return cls_var;
  835. }
  836. if(throw_err) AttributeError(obj, name);
  837. return nullptr;
  838. }
  839. void VM::setattr(PyObject* obj, StrName name, PyObject* value){
  840. PyObject* objtype;
  841. // handle super() proxy
  842. if(is_non_tagged_type(obj, tp_super)){
  843. Super& super = PK_OBJ_GET(Super, obj);
  844. obj = super.first;
  845. objtype = _t(super.second);
  846. }else{
  847. objtype = _t(obj);
  848. }
  849. PyObject* cls_var = find_name_in_mro(objtype, name);
  850. if(cls_var != nullptr){
  851. // handle descriptor
  852. if(is_non_tagged_type(cls_var, tp_property)){
  853. const Property& prop = _CAST(Property&, cls_var);
  854. if(prop.setter != vm->None){
  855. call(prop.setter, obj, value);
  856. }else{
  857. TypeError(fmt("readonly attribute: ", name.escape()));
  858. }
  859. return;
  860. }
  861. }
  862. // handle instance __dict__
  863. if(is_tagged(obj) || !obj->is_attr_valid()) TypeError("cannot set attribute");
  864. obj->attr().set(name, value);
  865. }
  866. PyObject* VM::bind(PyObject* obj, const char* sig, NativeFuncC fn, UserData userdata){
  867. return bind(obj, sig, nullptr, fn, userdata);
  868. }
  869. PyObject* VM::bind(PyObject* obj, const char* sig, const char* docstring, NativeFuncC fn, UserData userdata){
  870. CodeObject_ co;
  871. try{
  872. // fn(a, b, *c, d=1) -> None
  873. co = compile("def " + Str(sig) + " : pass", "<bind>", EXEC_MODE);
  874. }catch(Exception&){
  875. throw std::runtime_error("invalid signature: " + std::string(sig));
  876. }
  877. if(co->func_decls.size() != 1){
  878. throw std::runtime_error("expected 1 function declaration");
  879. }
  880. FuncDecl_ decl = co->func_decls[0];
  881. decl->signature = Str(sig);
  882. if(docstring != nullptr){
  883. decl->docstring = Str(docstring).strip();
  884. }
  885. PyObject* f_obj = VAR(NativeFunc(fn, decl));
  886. PK_OBJ_GET(NativeFunc, f_obj).set_userdata(userdata);
  887. if(obj != nullptr) obj->attr().set(decl->code->name, f_obj);
  888. return f_obj;
  889. }
  890. PyObject* VM::bind_property(PyObject* obj, Str name, NativeFuncC fget, NativeFuncC fset){
  891. PyObject* _0 = heap.gcnew<NativeFunc>(tp_native_func, fget, 1, false);
  892. PyObject* _1 = vm->None;
  893. if(fset != nullptr) _1 = heap.gcnew<NativeFunc>(tp_native_func, fset, 2, false);
  894. Str type_hint;
  895. int pos = name.index(":");
  896. if(pos > 0){
  897. type_hint = name.substr(pos + 1).strip();
  898. name = name.substr(0, pos).strip();
  899. }
  900. PyObject* prop = VAR(Property(_0, _1, type_hint));
  901. obj->attr().set(name, prop);
  902. return prop;
  903. }
  904. void VM::_error(Exception e){
  905. if(callstack.empty()){
  906. e.is_re = false;
  907. throw e;
  908. }
  909. PUSH(VAR(e));
  910. _raise();
  911. }
  912. void ManagedHeap::mark() {
  913. for(PyObject* obj: _no_gc) PK_OBJ_MARK(obj);
  914. for(auto& frame : vm->callstack.data()) frame._gc_mark();
  915. for(PyObject* obj: vm->s_data) PK_OBJ_MARK(obj);
  916. if(_gc_marker_ex) _gc_marker_ex(vm);
  917. if(vm->_last_exception) PK_OBJ_MARK(vm->_last_exception);
  918. if(vm->_c.error != nullptr) PK_OBJ_MARK(vm->_c.error);
  919. }
  920. Str obj_type_name(VM *vm, Type type){
  921. return vm->_all_types[type].name;
  922. }
  923. void VM::bind__hash__(Type type, i64 (*f)(VM*, PyObject*)){
  924. PyObject* obj = _t(type);
  925. _all_types[type].m__hash__ = f;
  926. PyObject* nf = bind_method<0>(obj, "__hash__", [](VM* vm, ArgsView args){
  927. i64 ret = lambda_get_userdata<i64(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);
  928. return VAR(ret);
  929. });
  930. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  931. }
  932. void VM::bind__len__(Type type, i64 (*f)(VM*, PyObject*)){
  933. PyObject* obj = _t(type);
  934. _all_types[type].m__len__ = f;
  935. PyObject* nf = bind_method<0>(obj, "__len__", [](VM* vm, ArgsView args){
  936. i64 ret = lambda_get_userdata<i64(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);
  937. return VAR(ret);
  938. });
  939. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  940. }
  941. void Dict::_probe(PyObject *key, bool &ok, int &i) const{
  942. ok = false;
  943. i = vm->py_hash(key) & _mask;
  944. while(_items[i].first != nullptr) {
  945. if(vm->py_equals(_items[i].first, key)) { ok = true; break; }
  946. // https://github.com/python/cpython/blob/3.8/Objects/dictobject.c#L166
  947. i = ((5*i) + 1) & _mask;
  948. }
  949. }
  950. void CodeObjectSerializer::write_object(VM *vm, PyObject *obj){
  951. if(is_int(obj)) write_int(_CAST(i64, obj));
  952. else if(is_float(obj)) write_float(_CAST(f64, obj));
  953. else if(is_type(obj, vm->tp_str)) write_str(_CAST(Str&, obj));
  954. else if(is_type(obj, vm->tp_bool)) write_bool(_CAST(bool, obj));
  955. else if(obj == vm->None) write_none();
  956. else if(obj == vm->Ellipsis) write_ellipsis();
  957. else{
  958. throw std::runtime_error(fmt(OBJ_NAME(vm->_t(obj)).escape(), " is not serializable"));
  959. }
  960. }
  961. void NativeFunc::check_size(VM* vm, ArgsView args) const{
  962. if(args.size() != argc && argc != -1) {
  963. vm->TypeError(fmt("expected ", argc, " arguments, got ", args.size()));
  964. }
  965. }
  966. PyObject* NativeFunc::call(VM *vm, ArgsView args) const {
  967. return f(vm, args);
  968. }
  969. } // namespace pkpy