vm.cpp 35 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060
  1. #include "pocketpy/vm.h"
  2. namespace pkpy{
  3. VM::VM(bool enable_os) : heap(this), enable_os(enable_os) {
  4. this->vm = this;
  5. _stdout = [](VM* vm, const Str& s) {
  6. PK_UNUSED(vm);
  7. std::cout << s;
  8. };
  9. _stderr = [](VM* vm, const Str& s) {
  10. PK_UNUSED(vm);
  11. std::cerr << s;
  12. };
  13. callstack.reserve(8);
  14. _main = nullptr;
  15. _last_exception = nullptr;
  16. _import_handler = [](const Str& name) {
  17. PK_UNUSED(name);
  18. return Bytes();
  19. };
  20. init_builtin_types();
  21. }
  22. PyObject* VM::py_str(PyObject* obj){
  23. const PyTypeInfo* ti = _inst_type_info(obj);
  24. if(ti->m__str__) return ti->m__str__(this, obj);
  25. PyObject* self;
  26. PyObject* f = get_unbound_method(obj, __str__, &self, false);
  27. if(self != PY_NULL) return call_method(self, f);
  28. return py_repr(obj);
  29. }
  30. PyObject* VM::py_repr(PyObject* obj){
  31. const PyTypeInfo* ti = _inst_type_info(obj);
  32. if(ti->m__repr__) return ti->m__repr__(this, obj);
  33. return call_method(obj, __repr__);
  34. }
  35. PyObject* VM::py_json(PyObject* obj){
  36. const PyTypeInfo* ti = _inst_type_info(obj);
  37. if(ti->m__json__) return ti->m__json__(this, obj);
  38. return call_method(obj, __json__);
  39. }
  40. PyObject* VM::py_iter(PyObject* obj){
  41. const PyTypeInfo* ti = _inst_type_info(obj);
  42. if(ti->m__iter__) return ti->m__iter__(this, obj);
  43. PyObject* self;
  44. PyObject* iter_f = get_unbound_method(obj, __iter__, &self, false);
  45. if(self != PY_NULL) return call_method(self, iter_f);
  46. TypeError(OBJ_NAME(_t(obj)).escape() + " object is not iterable");
  47. return nullptr;
  48. }
  49. FrameId VM::top_frame(){
  50. #if PK_DEBUG_EXTRA_CHECK
  51. if(callstack.empty()) FATAL_ERROR();
  52. #endif
  53. return FrameId(&callstack.data(), callstack.size()-1);
  54. }
  55. void VM::_pop_frame(){
  56. Frame* frame = &callstack.top();
  57. s_data.reset(frame->_sp_base);
  58. callstack.pop();
  59. }
  60. PyObject* VM::find_name_in_mro(PyObject* cls, StrName name){
  61. PyObject* val;
  62. do{
  63. val = cls->attr().try_get(name);
  64. if(val != nullptr) return val;
  65. Type base = _all_types[PK_OBJ_GET(Type, cls)].base;
  66. if(base.index == -1) break;
  67. cls = _all_types[base].obj;
  68. }while(true);
  69. return nullptr;
  70. }
  71. bool VM::isinstance(PyObject* obj, Type cls_t){
  72. Type obj_t = PK_OBJ_GET(Type, _t(obj));
  73. do{
  74. if(obj_t == cls_t) return true;
  75. Type base = _all_types[obj_t].base;
  76. if(base.index == -1) break;
  77. obj_t = base;
  78. }while(true);
  79. return false;
  80. }
  81. PyObject* VM::exec(Str source, Str filename, CompileMode mode, PyObject* _module){
  82. if(_module == nullptr) _module = _main;
  83. try {
  84. CodeObject_ code = compile(source, filename, mode);
  85. #if PK_DEBUG_DIS_EXEC
  86. if(_module == _main) std::cout << disassemble(code) << '\n';
  87. #endif
  88. return _exec(code, _module);
  89. }catch (const Exception& e){
  90. _stderr(this, e.summary() + "\n");
  91. }
  92. #if !PK_DEBUG_FULL_EXCEPTION
  93. catch (const std::exception& e) {
  94. Str msg = "An std::exception occurred! It could be a bug.\n";
  95. msg = msg + e.what();
  96. _stderr(this, msg + "\n");
  97. }
  98. #endif
  99. callstack.clear();
  100. s_data.clear();
  101. return nullptr;
  102. }
  103. PyObject* VM::property(NativeFuncC fget, NativeFuncC fset){
  104. PyObject* _0 = heap.gcnew(tp_native_func, NativeFunc(fget, 1, false));
  105. PyObject* _1 = vm->None;
  106. if(fset != nullptr) _1 = heap.gcnew(tp_native_func, NativeFunc(fset, 2, false));
  107. return call(_t(tp_property), _0, _1);
  108. }
  109. PyObject* VM::new_type_object(PyObject* mod, StrName name, Type base, bool subclass_enabled){
  110. PyObject* obj = heap._new<Type>(tp_type, _all_types.size());
  111. const PyTypeInfo& base_info = _all_types[base];
  112. if(!base_info.subclass_enabled){
  113. TypeError(fmt("type ", base_info.name.escape(), " is not `subclass_enabled`"));
  114. }
  115. PyTypeInfo info{
  116. obj,
  117. base,
  118. (mod!=nullptr && mod!=builtins) ? Str(OBJ_NAME(mod)+"."+name.sv()): name.sv(),
  119. subclass_enabled,
  120. };
  121. if(mod != nullptr) mod->attr().set(name, obj);
  122. _all_types.push_back(info);
  123. return obj;
  124. }
  125. Type VM::_new_type_object(StrName name, Type base) {
  126. PyObject* obj = new_type_object(nullptr, name, base, false);
  127. return PK_OBJ_GET(Type, obj);
  128. }
  129. PyObject* VM::_find_type_object(const Str& type){
  130. PyObject* obj = builtins->attr().try_get(type);
  131. if(obj == nullptr){
  132. for(auto& t: _all_types) if(t.name == type) return t.obj;
  133. throw std::runtime_error(fmt("type not found: ", type));
  134. }
  135. check_non_tagged_type(obj, tp_type);
  136. return obj;
  137. }
  138. Type VM::_type(const Str& type){
  139. PyObject* obj = _find_type_object(type);
  140. return PK_OBJ_GET(Type, obj);
  141. }
  142. PyTypeInfo* VM::_type_info(const Str& type){
  143. PyObject* obj = builtins->attr().try_get(type);
  144. if(obj == nullptr){
  145. for(auto& t: _all_types) if(t.name == type) return &t;
  146. FATAL_ERROR();
  147. }
  148. return &_all_types[PK_OBJ_GET(Type, obj)];
  149. }
  150. PyTypeInfo* VM::_type_info(Type type){
  151. return &_all_types[type];
  152. }
  153. const PyTypeInfo* VM::_inst_type_info(PyObject* obj){
  154. if(is_int(obj)) return &_all_types[tp_int];
  155. if(is_float(obj)) return &_all_types[tp_float];
  156. return &_all_types[obj->type];
  157. }
  158. bool VM::py_equals(PyObject* lhs, PyObject* rhs){
  159. if(lhs == rhs) return true;
  160. const PyTypeInfo* ti = _inst_type_info(lhs);
  161. PyObject* res;
  162. if(ti->m__eq__){
  163. res = ti->m__eq__(this, lhs, rhs);
  164. if(res != vm->NotImplemented) return res == vm->True;
  165. }
  166. res = call_method(lhs, __eq__, rhs);
  167. if(res != vm->NotImplemented) return res == vm->True;
  168. ti = _inst_type_info(rhs);
  169. if(ti->m__eq__){
  170. res = ti->m__eq__(this, rhs, lhs);
  171. if(res != vm->NotImplemented) return res == vm->True;
  172. }
  173. res = call_method(rhs, __eq__, lhs);
  174. if(res != vm->NotImplemented) return res == vm->True;
  175. return false;
  176. }
  177. int VM::normalized_index(int index, int size){
  178. if(index < 0) index += size;
  179. if(index < 0 || index >= size){
  180. IndexError(std::to_string(index) + " not in [0, " + std::to_string(size) + ")");
  181. }
  182. return index;
  183. }
  184. PyObject* VM::py_next(PyObject* obj){
  185. const PyTypeInfo* ti = _inst_type_info(obj);
  186. if(ti->m__next__) return ti->m__next__(this, obj);
  187. return call_method(obj, __next__);
  188. }
  189. PyObject* VM::py_import(StrName name, bool relative){
  190. Str filename;
  191. int type;
  192. if(relative){
  193. ImportContext* ctx = &_import_context;
  194. type = 2;
  195. for(auto it=ctx->pending.rbegin(); it!=ctx->pending.rend(); ++it){
  196. if(it->second == 2) continue;
  197. if(it->second == 1){
  198. filename = fmt(it->first, kPlatformSep, name, ".py");
  199. name = fmt(it->first, '.', name).c_str();
  200. break;
  201. }
  202. }
  203. if(filename.length() == 0) _error("ImportError", "relative import outside of package");
  204. }else{
  205. type = 0;
  206. filename = fmt(name, ".py");
  207. }
  208. for(auto& [k, v]: _import_context.pending){
  209. if(k == name){
  210. vm->_error("ImportError", fmt("circular import ", name.escape()));
  211. }
  212. }
  213. PyObject* ext_mod = _modules.try_get(name);
  214. if(ext_mod == nullptr){
  215. Str source;
  216. auto it = _lazy_modules.find(name);
  217. if(it == _lazy_modules.end()){
  218. Bytes b = _import_handler(filename);
  219. if(!relative && !b){
  220. filename = fmt(name, kPlatformSep, "__init__.py");
  221. b = _import_handler(filename);
  222. if(b) type = 1;
  223. }
  224. if(!b) _error("ImportError", fmt("module ", name.escape(), " not found"));
  225. source = Str(b.str());
  226. }else{
  227. source = it->second;
  228. _lazy_modules.erase(it);
  229. }
  230. auto _ = _import_context.temp(this, name, type);
  231. CodeObject_ code = compile(source, filename, EXEC_MODE);
  232. PyObject* new_mod = new_module(name);
  233. _exec(code, new_mod);
  234. new_mod->attr()._try_perfect_rehash();
  235. return new_mod;
  236. }else{
  237. return ext_mod;
  238. }
  239. }
  240. VM::~VM() {
  241. callstack.clear();
  242. s_data.clear();
  243. _all_types.clear();
  244. _modules.clear();
  245. _lazy_modules.clear();
  246. }
  247. PyObject* VM::py_negate(PyObject* obj){
  248. const PyTypeInfo* ti = _inst_type_info(obj);
  249. if(ti->m__neg__) return ti->m__neg__(this, obj);
  250. return call_method(obj, __neg__);
  251. }
  252. void VM::check_int_or_float(PyObject *obj){
  253. if(!is_tagged(obj)){
  254. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape());
  255. }
  256. }
  257. bool VM::py_bool(PyObject* obj){
  258. if(is_non_tagged_type(obj, tp_bool)) return obj == True;
  259. if(obj == None) return false;
  260. if(is_int(obj)) return _CAST(i64, obj) != 0;
  261. if(is_float(obj)) return _CAST(f64, obj) != 0.0;
  262. PyObject* self;
  263. PyObject* len_f = get_unbound_method(obj, __len__, &self, false);
  264. if(self != PY_NULL){
  265. PyObject* ret = call_method(self, len_f);
  266. return CAST(i64, ret) > 0;
  267. }
  268. return true;
  269. }
  270. PyObject* VM::py_list(PyObject* it){
  271. auto _lock = heap.gc_scope_lock();
  272. it = py_iter(it);
  273. List list;
  274. PyObject* obj = py_next(it);
  275. while(obj != StopIteration){
  276. list.push_back(obj);
  277. obj = py_next(it);
  278. }
  279. return VAR(std::move(list));
  280. }
  281. void VM::parse_int_slice(const Slice& s, int length, int& start, int& stop, int& step){
  282. auto clip = [](int value, int min, int max){
  283. if(value < min) return min;
  284. if(value > max) return max;
  285. return value;
  286. };
  287. if(s.step == None) step = 1;
  288. else step = CAST(int, s.step);
  289. if(step == 0) ValueError("slice step cannot be zero");
  290. if(step > 0){
  291. if(s.start == None){
  292. start = 0;
  293. }else{
  294. start = CAST(int, s.start);
  295. if(start < 0) start += length;
  296. start = clip(start, 0, length);
  297. }
  298. if(s.stop == None){
  299. stop = length;
  300. }else{
  301. stop = CAST(int, s.stop);
  302. if(stop < 0) stop += length;
  303. stop = clip(stop, 0, length);
  304. }
  305. }else{
  306. if(s.start == None){
  307. start = length - 1;
  308. }else{
  309. start = CAST(int, s.start);
  310. if(start < 0) start += length;
  311. start = clip(start, -1, length - 1);
  312. }
  313. if(s.stop == None){
  314. stop = -1;
  315. }else{
  316. stop = CAST(int, s.stop);
  317. if(stop < 0) stop += length;
  318. stop = clip(stop, -1, length - 1);
  319. }
  320. }
  321. }
  322. i64 VM::py_hash(PyObject* obj){
  323. const PyTypeInfo* ti = _inst_type_info(obj);
  324. if(ti->m__hash__) return ti->m__hash__(this, obj);
  325. PyObject* ret = call_method(obj, __hash__);
  326. return CAST(i64, ret);
  327. }
  328. PyObject* VM::format(Str spec, PyObject* obj){
  329. if(spec.empty()) return py_str(obj);
  330. char type;
  331. switch(spec.end()[-1]){
  332. case 'f': case 'd': case 's':
  333. type = spec.end()[-1];
  334. spec = spec.substr(0, spec.length() - 1);
  335. break;
  336. default: type = ' '; break;
  337. }
  338. char pad_c = ' ';
  339. if(spec[0] == '0'){
  340. pad_c = '0';
  341. spec = spec.substr(1);
  342. }
  343. char align;
  344. if(spec[0] == '>'){
  345. align = '>';
  346. spec = spec.substr(1);
  347. }else if(spec[0] == '<'){
  348. align = '<';
  349. spec = spec.substr(1);
  350. }else{
  351. if(is_int(obj) || is_float(obj)) align = '>';
  352. else align = '<';
  353. }
  354. int dot = spec.index(".");
  355. int width, precision;
  356. try{
  357. if(dot >= 0){
  358. if(dot == 0){
  359. width = -1;
  360. }else{
  361. width = Number::stoi(spec.substr(0, dot).str());
  362. }
  363. precision = Number::stoi(spec.substr(dot+1).str());
  364. }else{
  365. width = Number::stoi(spec.str());
  366. precision = -1;
  367. }
  368. }catch(...){
  369. ValueError("invalid format specifer");
  370. UNREACHABLE();
  371. }
  372. if(type != 'f' && dot >= 0) ValueError("precision not allowed in the format specifier");
  373. Str ret;
  374. if(type == 'f'){
  375. f64 val = CAST(f64, obj);
  376. if(precision < 0) precision = 6;
  377. std::stringstream ss;
  378. ss << std::fixed << std::setprecision(precision) << val;
  379. ret = ss.str();
  380. }else if(type == 'd'){
  381. ret = std::to_string(CAST(i64, obj));
  382. }else if(type == 's'){
  383. ret = CAST(Str&, obj);
  384. }else{
  385. ret = CAST(Str&, py_str(obj));
  386. }
  387. if(width != -1 && width > ret.length()){
  388. int pad = width - ret.length();
  389. std::string padding(pad, pad_c);
  390. if(align == '>') ret = padding.c_str() + ret;
  391. else ret = ret + padding.c_str();
  392. }
  393. return VAR(ret);
  394. }
  395. PyObject* VM::new_module(StrName name) {
  396. PyObject* obj = heap._new<DummyModule>(tp_module, DummyModule());
  397. obj->attr().set("__name__", VAR(name.sv()));
  398. // we do not allow override in order to avoid memory leak
  399. // it is because Module objects are not garbage collected
  400. if(_modules.contains(name)) throw std::runtime_error("module already exists");
  401. _modules.set(name, obj);
  402. return obj;
  403. }
  404. static std::string _opcode_argstr(VM* vm, Bytecode byte, const CodeObject* co){
  405. std::string argStr = byte.arg == -1 ? "" : std::to_string(byte.arg);
  406. switch(byte.op){
  407. case OP_LOAD_CONST: case OP_FORMAT_STRING:
  408. if(vm != nullptr){
  409. argStr += fmt(" (", CAST(Str, vm->py_repr(co->consts[byte.arg])), ")");
  410. }
  411. break;
  412. case OP_LOAD_NAME: case OP_LOAD_GLOBAL: case OP_LOAD_NONLOCAL: case OP_STORE_GLOBAL:
  413. case OP_LOAD_ATTR: case OP_LOAD_METHOD: case OP_STORE_ATTR: case OP_DELETE_ATTR:
  414. case OP_IMPORT_NAME: case OP_BEGIN_CLASS: case OP_RAISE:
  415. case OP_DELETE_GLOBAL: case OP_INC_GLOBAL: case OP_DEC_GLOBAL: case OP_STORE_CLASS_ATTR:
  416. argStr += fmt(" (", StrName(byte.arg).sv(), ")");
  417. break;
  418. case OP_LOAD_FAST: case OP_STORE_FAST: case OP_DELETE_FAST: case OP_INC_FAST: case OP_DEC_FAST:
  419. argStr += fmt(" (", co->varnames[byte.arg].sv(), ")");
  420. break;
  421. case OP_LOAD_FUNCTION:
  422. argStr += fmt(" (", co->func_decls[byte.arg]->code->name, ")");
  423. break;
  424. }
  425. return argStr;
  426. }
  427. Str VM::disassemble(CodeObject_ co){
  428. auto pad = [](const Str& s, const int n){
  429. if(s.length() >= n) return s.substr(0, n);
  430. return s + std::string(n - s.length(), ' ');
  431. };
  432. std::vector<int> jumpTargets;
  433. for(auto byte : co->codes){
  434. if(byte.op == OP_JUMP_ABSOLUTE || byte.op == OP_POP_JUMP_IF_FALSE || byte.op == OP_SHORTCUT_IF_FALSE_OR_POP){
  435. jumpTargets.push_back(byte.arg);
  436. }
  437. }
  438. std::stringstream ss;
  439. int prev_line = -1;
  440. for(int i=0; i<co->codes.size(); i++){
  441. const Bytecode& byte = co->codes[i];
  442. Str line = std::to_string(co->lines[i]);
  443. if(co->lines[i] == prev_line) line = "";
  444. else{
  445. if(prev_line != -1) ss << "\n";
  446. prev_line = co->lines[i];
  447. }
  448. std::string pointer;
  449. if(std::find(jumpTargets.begin(), jumpTargets.end(), i) != jumpTargets.end()){
  450. pointer = "-> ";
  451. }else{
  452. pointer = " ";
  453. }
  454. ss << pad(line, 8) << pointer << pad(std::to_string(i), 3);
  455. ss << " " << pad(OP_NAMES[byte.op], 25) << " ";
  456. // ss << pad(byte.arg == -1 ? "" : std::to_string(byte.arg), 5);
  457. std::string argStr = _opcode_argstr(this, byte, co.get());
  458. ss << argStr;
  459. // ss << pad(argStr, 40); // may overflow
  460. // ss << co->blocks[byte.block].type;
  461. if(i != co->codes.size() - 1) ss << '\n';
  462. }
  463. for(auto& decl: co->func_decls){
  464. ss << "\n\n" << "Disassembly of " << decl->code->name << ":\n";
  465. ss << disassemble(decl->code);
  466. }
  467. ss << "\n";
  468. return Str(ss.str());
  469. }
  470. #if PK_DEBUG_CEVAL_STEP
  471. void VM::_log_s_data(const char* title) {
  472. if(_main == nullptr) return;
  473. if(callstack.empty()) return;
  474. std::stringstream ss;
  475. if(title) ss << title << " | ";
  476. std::map<PyObject**, int> sp_bases;
  477. for(Frame& f: callstack.data()){
  478. if(f._sp_base == nullptr) FATAL_ERROR();
  479. sp_bases[f._sp_base] += 1;
  480. }
  481. FrameId frame = top_frame();
  482. int line = frame->co->lines[frame->_ip];
  483. ss << frame->co->name << ":" << line << " [";
  484. for(PyObject** p=s_data.begin(); p!=s_data.end(); p++){
  485. ss << std::string(sp_bases[p], '|');
  486. if(sp_bases[p] > 0) ss << " ";
  487. PyObject* obj = *p;
  488. if(obj == nullptr) ss << "(nil)";
  489. else if(obj == PY_NULL) ss << "NULL";
  490. else if(is_int(obj)) ss << CAST(i64, obj);
  491. else if(is_float(obj)) ss << CAST(f64, obj);
  492. else if(is_type(obj, tp_str)) ss << CAST(Str, obj).escape();
  493. else if(obj == None) ss << "None";
  494. else if(obj == True) ss << "True";
  495. else if(obj == False) ss << "False";
  496. else if(is_type(obj, tp_function)){
  497. auto& f = CAST(Function&, obj);
  498. ss << f.decl->code->name << "(...)";
  499. } else if(is_type(obj, tp_type)){
  500. Type t = PK_OBJ_GET(Type, obj);
  501. ss << "<class " + _all_types[t].name.escape() + ">";
  502. } else if(is_type(obj, tp_list)){
  503. auto& t = CAST(List&, obj);
  504. ss << "list(size=" << t.size() << ")";
  505. } else if(is_type(obj, tp_tuple)){
  506. auto& t = CAST(Tuple&, obj);
  507. ss << "tuple(size=" << t.size() << ")";
  508. } else ss << "(" << obj_type_name(this, obj->type) << ")";
  509. ss << ", ";
  510. }
  511. std::string output = ss.str();
  512. if(!s_data.empty()) {
  513. output.pop_back(); output.pop_back();
  514. }
  515. output.push_back(']');
  516. Bytecode byte = frame->co->codes[frame->_ip];
  517. std::cout << output << " " << OP_NAMES[byte.op] << " " << _opcode_argstr(nullptr, byte, frame->co) << std::endl;
  518. }
  519. #endif
  520. void VM::init_builtin_types(){
  521. _all_types.push_back({heap._new<Type>(Type(1), Type(0)), -1, "object", true});
  522. _all_types.push_back({heap._new<Type>(Type(1), Type(1)), 0, "type", false});
  523. tp_object = 0; tp_type = 1;
  524. tp_int = _new_type_object("int");
  525. tp_float = _new_type_object("float");
  526. if(tp_int.index != kTpIntIndex || tp_float.index != kTpFloatIndex) FATAL_ERROR();
  527. tp_bool = _new_type_object("bool");
  528. tp_str = _new_type_object("str");
  529. tp_list = _new_type_object("list");
  530. tp_tuple = _new_type_object("tuple");
  531. tp_slice = _new_type_object("slice");
  532. tp_range = _new_type_object("range");
  533. tp_module = _new_type_object("module");
  534. tp_function = _new_type_object("function");
  535. tp_native_func = _new_type_object("native_func");
  536. tp_bound_method = _new_type_object("bound_method");
  537. tp_super = _new_type_object("super");
  538. tp_exception = _new_type_object("Exception");
  539. tp_bytes = _new_type_object("bytes");
  540. tp_mappingproxy = _new_type_object("mappingproxy");
  541. tp_dict = _new_type_object("dict");
  542. tp_property = _new_type_object("property");
  543. tp_star_wrapper = _new_type_object("_star_wrapper");
  544. this->None = heap._new<Dummy>(_new_type_object("NoneType"), {});
  545. this->NotImplemented = heap._new<Dummy>(_new_type_object("NotImplementedType"), {});
  546. this->Ellipsis = heap._new<Dummy>(_new_type_object("ellipsis"), {});
  547. this->True = heap._new<Dummy>(tp_bool, {});
  548. this->False = heap._new<Dummy>(tp_bool, {});
  549. this->StopIteration = heap._new<Dummy>(_new_type_object("StopIterationType"), {});
  550. this->builtins = new_module("builtins");
  551. // setup public types
  552. builtins->attr().set("type", _t(tp_type));
  553. builtins->attr().set("object", _t(tp_object));
  554. builtins->attr().set("bool", _t(tp_bool));
  555. builtins->attr().set("int", _t(tp_int));
  556. builtins->attr().set("float", _t(tp_float));
  557. builtins->attr().set("str", _t(tp_str));
  558. builtins->attr().set("list", _t(tp_list));
  559. builtins->attr().set("tuple", _t(tp_tuple));
  560. builtins->attr().set("range", _t(tp_range));
  561. builtins->attr().set("bytes", _t(tp_bytes));
  562. builtins->attr().set("dict", _t(tp_dict));
  563. builtins->attr().set("property", _t(tp_property));
  564. builtins->attr().set("StopIteration", StopIteration);
  565. builtins->attr().set("NotImplemented", NotImplemented);
  566. builtins->attr().set("slice", _t(tp_slice));
  567. post_init();
  568. for(int i=0; i<_all_types.size(); i++){
  569. _all_types[i].obj->attr()._try_perfect_rehash();
  570. }
  571. for(auto [k, v]: _modules.items()) v->attr()._try_perfect_rehash();
  572. this->_main = new_module("__main__");
  573. }
  574. // `heap.gc_scope_lock();` needed before calling this function
  575. void VM::_unpack_as_list(ArgsView args, List& list){
  576. for(PyObject* obj: args){
  577. if(is_non_tagged_type(obj, tp_star_wrapper)){
  578. const StarWrapper& w = _CAST(StarWrapper&, obj);
  579. // maybe this check should be done in the compile time
  580. if(w.level != 1) TypeError("expected level 1 star wrapper");
  581. PyObject* _0 = py_iter(w.obj);
  582. PyObject* _1 = py_next(_0);
  583. while(_1 != StopIteration){
  584. list.push_back(_1);
  585. _1 = py_next(_0);
  586. }
  587. }else{
  588. list.push_back(obj);
  589. }
  590. }
  591. }
  592. // `heap.gc_scope_lock();` needed before calling this function
  593. void VM::_unpack_as_dict(ArgsView args, Dict& dict){
  594. for(PyObject* obj: args){
  595. if(is_non_tagged_type(obj, tp_star_wrapper)){
  596. const StarWrapper& w = _CAST(StarWrapper&, obj);
  597. // maybe this check should be done in the compile time
  598. if(w.level != 2) TypeError("expected level 2 star wrapper");
  599. const Dict& other = CAST(Dict&, w.obj);
  600. dict.update(other);
  601. }else{
  602. const Tuple& t = CAST(Tuple&, obj);
  603. if(t.size() != 2) TypeError("expected tuple of length 2");
  604. dict.set(t[0], t[1]);
  605. }
  606. }
  607. }
  608. void VM::_prepare_py_call(PyObject** buffer, ArgsView args, ArgsView kwargs, const FuncDecl_& decl){
  609. const CodeObject* co = decl->code.get();
  610. int co_nlocals = co->varnames.size();
  611. int decl_argc = decl->args.size();
  612. if(args.size() < decl_argc){
  613. vm->TypeError(fmt(
  614. "expected ", decl_argc, " positional arguments, got ", args.size(),
  615. " (", co->name, ')'
  616. ));
  617. }
  618. int i = 0;
  619. // prepare args
  620. for(int index: decl->args) buffer[index] = args[i++];
  621. // set extra varnames to nullptr
  622. for(int j=i; j<co_nlocals; j++) buffer[j] = PY_NULL;
  623. // prepare kwdefaults
  624. for(auto& kv: decl->kwargs) buffer[kv.key] = kv.value;
  625. // handle *args
  626. if(decl->starred_arg != -1){
  627. ArgsView vargs(args.begin() + i, args.end());
  628. buffer[decl->starred_arg] = VAR(vargs.to_tuple());
  629. i += vargs.size();
  630. }else{
  631. // kwdefaults override
  632. for(auto& kv: decl->kwargs){
  633. if(i >= args.size()) break;
  634. buffer[kv.key] = args[i++];
  635. }
  636. if(i < args.size()) TypeError(fmt("too many arguments", " (", decl->code->name, ')'));
  637. }
  638. PyObject* vkwargs;
  639. if(decl->starred_kwarg != -1){
  640. vkwargs = VAR(Dict(this));
  641. buffer[decl->starred_kwarg] = vkwargs;
  642. }else{
  643. vkwargs = nullptr;
  644. }
  645. for(int j=0; j<kwargs.size(); j+=2){
  646. StrName key(CAST(int, kwargs[j]));
  647. int index = co->varnames_inv.try_get(key);
  648. if(index < 0){
  649. if(vkwargs == nullptr){
  650. TypeError(fmt(key.escape(), " is an invalid keyword argument for ", co->name, "()"));
  651. }else{
  652. Dict& dict = _CAST(Dict&, vkwargs);
  653. dict.set(VAR(key.sv()), kwargs[j+1]);
  654. }
  655. }else{
  656. buffer[index] = kwargs[j+1];
  657. }
  658. }
  659. }
  660. PyObject* VM::vectorcall(int ARGC, int KWARGC, bool op_call){
  661. PyObject** p1 = s_data._sp - KWARGC*2;
  662. PyObject** p0 = p1 - ARGC - 2;
  663. // [callable, <self>, args..., kwargs...]
  664. // ^p0 ^p1 ^_sp
  665. PyObject* callable = p1[-(ARGC + 2)];
  666. bool method_call = p1[-(ARGC + 1)] != PY_NULL;
  667. // handle boundmethod, do a patch
  668. if(is_non_tagged_type(callable, tp_bound_method)){
  669. if(method_call) FATAL_ERROR();
  670. auto& bm = CAST(BoundMethod&, callable);
  671. callable = bm.func; // get unbound method
  672. p1[-(ARGC + 2)] = bm.func;
  673. p1[-(ARGC + 1)] = bm.self;
  674. method_call = true;
  675. // [unbound, self, args..., kwargs...]
  676. }
  677. ArgsView args(p1 - ARGC - int(method_call), p1);
  678. ArgsView kwargs(p1, s_data._sp);
  679. static PK_THREAD_LOCAL PyObject* buffer[PK_MAX_CO_VARNAMES];
  680. if(is_non_tagged_type(callable, tp_native_func)){
  681. const auto& f = PK_OBJ_GET(NativeFunc, callable);
  682. PyObject* ret;
  683. if(f.decl != nullptr){
  684. int co_nlocals = f.decl->code->varnames.size();
  685. _prepare_py_call(buffer, args, kwargs, f.decl);
  686. // copy buffer back to stack
  687. s_data.reset(args.begin());
  688. for(int j=0; j<co_nlocals; j++) PUSH(buffer[j]);
  689. ret = f.call(vm, ArgsView(s_data._sp - co_nlocals, s_data._sp));
  690. }else{
  691. if(KWARGC != 0) TypeError("old-style native_func does not accept keyword arguments");
  692. f.check_size(this, args);
  693. ret = f.call(this, args);
  694. }
  695. s_data.reset(p0);
  696. return ret;
  697. }
  698. if(is_non_tagged_type(callable, tp_function)){
  699. /*****************_py_call*****************/
  700. // callable must be a `function` object
  701. if(s_data.is_overflow()) StackOverflowError();
  702. const Function& fn = PK_OBJ_GET(Function, callable);
  703. const FuncDecl_& decl = fn.decl;
  704. const CodeObject* co = decl->code.get();
  705. int co_nlocals = co->varnames.size();
  706. _prepare_py_call(buffer, args, kwargs, decl);
  707. if(co->is_generator){
  708. s_data.reset(p0);
  709. return _py_generator(
  710. Frame(&s_data, nullptr, co, fn._module, callable),
  711. ArgsView(buffer, buffer + co_nlocals)
  712. );
  713. }
  714. // copy buffer back to stack
  715. s_data.reset(args.begin());
  716. for(int j=0; j<co_nlocals; j++) PUSH(buffer[j]);
  717. callstack.emplace(&s_data, p0, co, fn._module, callable, FastLocals(co, args.begin()));
  718. if(op_call) return PY_OP_CALL;
  719. return _run_top_frame();
  720. /*****************_py_call*****************/
  721. }
  722. if(is_non_tagged_type(callable, tp_type)){
  723. if(method_call) FATAL_ERROR();
  724. // [type, NULL, args..., kwargs...]
  725. DEF_SNAME(__new__);
  726. PyObject* new_f = find_name_in_mro(callable, __new__);
  727. PyObject* obj;
  728. #if PK_DEBUG_EXTRA_CHECK
  729. PK_ASSERT(new_f != nullptr);
  730. #endif
  731. if(new_f == cached_object__new__) {
  732. // fast path for object.__new__
  733. Type t = PK_OBJ_GET(Type, callable);
  734. obj= vm->heap.gcnew<DummyInstance>(t, {});
  735. }else{
  736. PUSH(new_f);
  737. PUSH(PY_NULL);
  738. PUSH(callable); // cls
  739. for(PyObject* o: args) PUSH(o);
  740. for(PyObject* o: kwargs) PUSH(o);
  741. // if obj is not an instance of callable, the behavior is undefined
  742. obj = vectorcall(ARGC+1, KWARGC);
  743. }
  744. // __init__
  745. PyObject* self;
  746. DEF_SNAME(__init__);
  747. callable = get_unbound_method(obj, __init__, &self, false);
  748. if (self != PY_NULL) {
  749. // replace `NULL` with `self`
  750. p1[-(ARGC + 2)] = callable;
  751. p1[-(ARGC + 1)] = self;
  752. // [init_f, self, args..., kwargs...]
  753. vectorcall(ARGC, KWARGC);
  754. // We just discard the return value of `__init__`
  755. // in cpython it raises a TypeError if the return value is not None
  756. }else{
  757. // manually reset the stack
  758. s_data.reset(p0);
  759. }
  760. return obj;
  761. }
  762. // handle `__call__` overload
  763. PyObject* self;
  764. DEF_SNAME(__call__);
  765. PyObject* call_f = get_unbound_method(callable, __call__, &self, false);
  766. if(self != PY_NULL){
  767. p1[-(ARGC + 2)] = call_f;
  768. p1[-(ARGC + 1)] = self;
  769. // [call_f, self, args..., kwargs...]
  770. return vectorcall(ARGC, KWARGC, false);
  771. }
  772. TypeError(OBJ_NAME(_t(callable)).escape() + " object is not callable");
  773. return nullptr;
  774. }
  775. // https://docs.python.org/3/howto/descriptor.html#invocation-from-an-instance
  776. PyObject* VM::getattr(PyObject* obj, StrName name, bool throw_err){
  777. PyObject* objtype;
  778. // handle super() proxy
  779. if(is_non_tagged_type(obj, tp_super)){
  780. const Super& super = PK_OBJ_GET(Super, obj);
  781. obj = super.first;
  782. objtype = _t(super.second);
  783. }else{
  784. objtype = _t(obj);
  785. }
  786. PyObject* cls_var = find_name_in_mro(objtype, name);
  787. if(cls_var != nullptr){
  788. // handle descriptor
  789. if(is_non_tagged_type(cls_var, tp_property)){
  790. const Property& prop = _CAST(Property&, cls_var);
  791. return call(prop.getter, obj);
  792. }
  793. }
  794. // handle instance __dict__
  795. if(!is_tagged(obj) && obj->is_attr_valid()){
  796. PyObject* val = obj->attr().try_get(name);
  797. if(val != nullptr) return val;
  798. }
  799. if(cls_var != nullptr){
  800. // bound method is non-data descriptor
  801. if(is_non_tagged_type(cls_var, tp_function) || is_non_tagged_type(cls_var, tp_native_func)){
  802. return VAR(BoundMethod(obj, cls_var));
  803. }
  804. return cls_var;
  805. }
  806. if(throw_err) AttributeError(obj, name);
  807. return nullptr;
  808. }
  809. // used by OP_LOAD_METHOD
  810. // try to load a unbound method (fallback to `getattr` if not found)
  811. PyObject* VM::get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err, bool fallback){
  812. *self = PY_NULL;
  813. PyObject* objtype;
  814. // handle super() proxy
  815. if(is_non_tagged_type(obj, tp_super)){
  816. const Super& super = PK_OBJ_GET(Super, obj);
  817. obj = super.first;
  818. objtype = _t(super.second);
  819. }else{
  820. objtype = _t(obj);
  821. }
  822. PyObject* cls_var = find_name_in_mro(objtype, name);
  823. if(fallback){
  824. if(cls_var != nullptr){
  825. // handle descriptor
  826. if(is_non_tagged_type(cls_var, tp_property)){
  827. const Property& prop = _CAST(Property&, cls_var);
  828. return call(prop.getter, obj);
  829. }
  830. }
  831. // handle instance __dict__
  832. if(!is_tagged(obj) && obj->is_attr_valid()){
  833. PyObject* val = obj->attr().try_get(name);
  834. if(val != nullptr) return val;
  835. }
  836. }
  837. if(cls_var != nullptr){
  838. if(is_non_tagged_type(cls_var, tp_function) || is_non_tagged_type(cls_var, tp_native_func)){
  839. *self = obj;
  840. }
  841. return cls_var;
  842. }
  843. if(throw_err) AttributeError(obj, name);
  844. return nullptr;
  845. }
  846. void VM::setattr(PyObject* obj, StrName name, PyObject* value){
  847. PyObject* objtype;
  848. // handle super() proxy
  849. if(is_non_tagged_type(obj, tp_super)){
  850. Super& super = PK_OBJ_GET(Super, obj);
  851. obj = super.first;
  852. objtype = _t(super.second);
  853. }else{
  854. objtype = _t(obj);
  855. }
  856. PyObject* cls_var = find_name_in_mro(objtype, name);
  857. if(cls_var != nullptr){
  858. // handle descriptor
  859. if(is_non_tagged_type(cls_var, tp_property)){
  860. const Property& prop = _CAST(Property&, cls_var);
  861. if(prop.setter != vm->None){
  862. call(prop.setter, obj, value);
  863. }else{
  864. TypeError(fmt("readonly attribute: ", name.escape()));
  865. }
  866. return;
  867. }
  868. }
  869. // handle instance __dict__
  870. if(is_tagged(obj) || !obj->is_attr_valid()) TypeError("cannot set attribute");
  871. obj->attr().set(name, value);
  872. }
  873. PyObject* VM::bind(PyObject* obj, const char* sig, NativeFuncC fn, UserData userdata){
  874. return bind(obj, sig, nullptr, fn, userdata);
  875. }
  876. PyObject* VM::bind(PyObject* obj, const char* sig, const char* docstring, NativeFuncC fn, UserData userdata){
  877. CodeObject_ co;
  878. try{
  879. // fn(a, b, *c, d=1) -> None
  880. co = compile("def " + Str(sig) + " : pass", "<bind>", EXEC_MODE);
  881. }catch(Exception&){
  882. throw std::runtime_error("invalid signature: " + std::string(sig));
  883. }
  884. if(co->func_decls.size() != 1){
  885. throw std::runtime_error("expected 1 function declaration");
  886. }
  887. FuncDecl_ decl = co->func_decls[0];
  888. decl->signature = Str(sig);
  889. if(docstring != nullptr){
  890. decl->docstring = Str(docstring).strip();
  891. }
  892. PyObject* f_obj = VAR(NativeFunc(fn, decl));
  893. PK_OBJ_GET(NativeFunc, f_obj).set_userdata(userdata);
  894. if(obj != nullptr) obj->attr().set(decl->code->name, f_obj);
  895. return f_obj;
  896. }
  897. void VM::_error(Exception e){
  898. if(callstack.empty()){
  899. e.is_re = false;
  900. throw e;
  901. }
  902. PUSH(VAR(e));
  903. _raise();
  904. }
  905. void ManagedHeap::mark() {
  906. for(PyObject* obj: _no_gc) PK_OBJ_MARK(obj);
  907. for(auto& frame : vm->callstack.data()) frame._gc_mark();
  908. for(PyObject* obj: vm->s_data) PK_OBJ_MARK(obj);
  909. if(_gc_marker_ex) _gc_marker_ex(vm);
  910. if(vm->_last_exception) PK_OBJ_MARK(vm->_last_exception);
  911. if(vm->_c.error != nullptr) PK_OBJ_MARK(vm->_c.error);
  912. }
  913. Str obj_type_name(VM *vm, Type type){
  914. return vm->_all_types[type].name;
  915. }
  916. void VM::bind__hash__(Type type, i64 (*f)(VM*, PyObject*)){
  917. PyObject* obj = _t(type);
  918. _all_types[type].m__hash__ = f;
  919. PyObject* nf = bind_method<0>(obj, "__hash__", [](VM* vm, ArgsView args){
  920. i64 ret = lambda_get_userdata<i64(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);
  921. return VAR(ret);
  922. });
  923. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  924. }
  925. void VM::bind__len__(Type type, i64 (*f)(VM*, PyObject*)){
  926. PyObject* obj = _t(type);
  927. _all_types[type].m__len__ = f;
  928. PyObject* nf = bind_method<0>(obj, "__len__", [](VM* vm, ArgsView args){
  929. i64 ret = lambda_get_userdata<i64(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);
  930. return VAR(ret);
  931. });
  932. PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
  933. }
  934. void Dict::_probe(PyObject *key, bool &ok, int &i) const{
  935. ok = false;
  936. i = vm->py_hash(key) & _mask;
  937. while(_items[i].first != nullptr) {
  938. if(vm->py_equals(_items[i].first, key)) { ok = true; break; }
  939. // https://github.com/python/cpython/blob/3.8/Objects/dictobject.c#L166
  940. i = ((5*i) + 1) & _mask;
  941. }
  942. }
  943. void CodeObjectSerializer::write_object(VM *vm, PyObject *obj){
  944. if(is_int(obj)) write_int(_CAST(i64, obj));
  945. else if(is_float(obj)) write_float(_CAST(f64, obj));
  946. else if(is_type(obj, vm->tp_str)) write_str(_CAST(Str&, obj));
  947. else if(is_type(obj, vm->tp_bool)) write_bool(_CAST(bool, obj));
  948. else if(obj == vm->None) write_none();
  949. else if(obj == vm->Ellipsis) write_ellipsis();
  950. else{
  951. throw std::runtime_error(fmt(OBJ_NAME(vm->_t(obj)).escape(), " is not serializable"));
  952. }
  953. }
  954. void NativeFunc::check_size(VM* vm, ArgsView args) const{
  955. if(args.size() != argc && argc != -1) {
  956. vm->TypeError(fmt("expected ", argc, " arguments, got ", args.size()));
  957. }
  958. }
  959. PyObject* NativeFunc::call(VM *vm, ArgsView args) const {
  960. return f(vm, args);
  961. }
  962. } // namespace pkpy