vm.h 41 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214
  1. #pragma once
  2. #include "codeobject.h"
  3. #include "common.h"
  4. #include "frame.h"
  5. #include "error.h"
  6. #include "gc.h"
  7. #include "memory.h"
  8. #include "obj.h"
  9. #include "str.h"
  10. #include "tuplelist.h"
  11. namespace pkpy{
  12. /* Stack manipulation macros */
  13. // https://github.com/python/cpython/blob/3.9/Python/ceval.c#L1123
  14. #define TOP() (s_data.top())
  15. #define SECOND() (s_data.second())
  16. #define THIRD() (s_data.third())
  17. #define PEEK(n) (s_data.peek(n))
  18. #define STACK_SHRINK(n) (s_data.shrink(n))
  19. #define PUSH(v) (s_data.push(v))
  20. #define POP() (s_data.pop())
  21. #define POPX() (s_data.popx())
  22. #define STACK_VIEW(n) (s_data.view(n))
  23. typedef Bytes (*ReadFileCwdFunc)(const Str& name);
  24. inline ReadFileCwdFunc _read_file_cwd = [](const Str& name) { return Bytes(); };
  25. inline int set_read_file_cwd(ReadFileCwdFunc func) { _read_file_cwd = func; return 0; }
  26. #define DEF_NATIVE_2(ctype, ptype) \
  27. template<> inline ctype py_cast<ctype>(VM* vm, PyObject* obj) { \
  28. vm->check_non_tagged_type(obj, vm->ptype); \
  29. return OBJ_GET(ctype, obj); \
  30. } \
  31. template<> inline ctype _py_cast<ctype>(VM* vm, PyObject* obj) { \
  32. return OBJ_GET(ctype, obj); \
  33. } \
  34. template<> inline ctype& py_cast<ctype&>(VM* vm, PyObject* obj) { \
  35. vm->check_non_tagged_type(obj, vm->ptype); \
  36. return OBJ_GET(ctype, obj); \
  37. } \
  38. template<> inline ctype& _py_cast<ctype&>(VM* vm, PyObject* obj) { \
  39. return OBJ_GET(ctype, obj); \
  40. } \
  41. inline PyObject* py_var(VM* vm, const ctype& value) { return vm->heap.gcnew(vm->ptype, value);} \
  42. inline PyObject* py_var(VM* vm, ctype&& value) { return vm->heap.gcnew(vm->ptype, std::move(value));}
  43. class Generator final: public BaseIter {
  44. Frame frame;
  45. int state; // 0,1,2
  46. List s_backup;
  47. public:
  48. Generator(VM* vm, Frame&& frame, ArgsView buffer): BaseIter(vm), frame(std::move(frame)), state(0) {
  49. for(PyObject* obj: buffer) s_backup.push_back(obj);
  50. }
  51. PyObject* next() override;
  52. void _gc_mark() const;
  53. };
  54. struct PyTypeInfo{
  55. PyObject* obj;
  56. Type base;
  57. Str name;
  58. };
  59. struct FrameId{
  60. std::vector<pkpy::Frame>* data;
  61. int index;
  62. FrameId(std::vector<pkpy::Frame>* data, int index) : data(data), index(index) {}
  63. Frame* operator->() const { return &data->operator[](index); }
  64. };
  65. class VM {
  66. VM* vm; // self reference for simplify code
  67. public:
  68. ManagedHeap heap;
  69. ValueStack s_data;
  70. stack< Frame > callstack;
  71. std::vector<PyTypeInfo> _all_types;
  72. void (*_gc_marker_ex)(VM*) = nullptr;
  73. NameDict _modules; // loaded modules
  74. std::map<StrName, Str> _lazy_modules; // lazy loaded modules
  75. PyObject* None;
  76. PyObject* True;
  77. PyObject* False;
  78. PyObject* Ellipsis;
  79. PyObject* builtins; // builtins module
  80. PyObject* StopIteration;
  81. PyObject* _main; // __main__ module
  82. std::stringstream _stdout_buffer;
  83. std::stringstream _stderr_buffer;
  84. std::ostream* _stdout;
  85. std::ostream* _stderr;
  86. bool _initialized;
  87. // for quick access
  88. Type tp_object, tp_type, tp_int, tp_float, tp_bool, tp_str;
  89. Type tp_list, tp_tuple;
  90. Type tp_function, tp_native_func, tp_iterator, tp_bound_method;
  91. Type tp_slice, tp_range, tp_module;
  92. Type tp_super, tp_exception, tp_bytes, tp_mappingproxy;
  93. const bool enable_os;
  94. VM(bool use_stdio=true, bool enable_os=true) : heap(this), enable_os(enable_os) {
  95. this->vm = this;
  96. this->_stdout = use_stdio ? &std::cout : &_stdout_buffer;
  97. this->_stderr = use_stdio ? &std::cerr : &_stderr_buffer;
  98. callstack.reserve(8);
  99. _initialized = false;
  100. init_builtin_types();
  101. _initialized = true;
  102. }
  103. bool is_stdio_used() const { return _stdout == &std::cout; }
  104. std::string read_output(){
  105. if(is_stdio_used()) UNREACHABLE();
  106. std::stringstream* s_out = (std::stringstream*)(vm->_stdout);
  107. std::stringstream* s_err = (std::stringstream*)(vm->_stderr);
  108. pkpy::Str _stdout = s_out->str();
  109. pkpy::Str _stderr = s_err->str();
  110. std::stringstream ss;
  111. ss << '{' << "\"stdout\": " << _stdout.escape(false);
  112. ss << ", " << "\"stderr\": " << _stderr.escape(false) << '}';
  113. s_out->str(""); s_err->str("");
  114. return ss.str();
  115. }
  116. FrameId top_frame() {
  117. #if DEBUG_EXTRA_CHECK
  118. if(callstack.empty()) FATAL_ERROR();
  119. #endif
  120. return FrameId(&callstack.data(), callstack.size()-1);
  121. }
  122. PyObject* asStr(PyObject* obj){
  123. PyObject* self;
  124. PyObject* f = get_unbound_method(obj, __str__, &self, false);
  125. if(self != PY_NULL) return call_method(self, f);
  126. return asRepr(obj);
  127. }
  128. PyObject* asIter(PyObject* obj){
  129. if(is_type(obj, tp_iterator)) return obj;
  130. PyObject* self;
  131. PyObject* iter_f = get_unbound_method(obj, __iter__, &self, false);
  132. if(self != PY_NULL) return call_method(self, iter_f);
  133. TypeError(OBJ_NAME(_t(obj)).escape() + " object is not iterable");
  134. return nullptr;
  135. }
  136. PyObject* asList(PyObject* it){
  137. if(is_non_tagged_type(it, tp_list)) return it;
  138. return call(_t(tp_list), it);
  139. }
  140. PyObject* find_name_in_mro(PyObject* cls, StrName name){
  141. PyObject* val;
  142. do{
  143. val = cls->attr().try_get(name);
  144. if(val != nullptr) return val;
  145. Type cls_t = OBJ_GET(Type, cls);
  146. Type base = _all_types[cls_t].base;
  147. if(base.index == -1) break;
  148. cls = _all_types[base].obj;
  149. }while(true);
  150. return nullptr;
  151. }
  152. bool isinstance(PyObject* obj, Type cls_t){
  153. Type obj_t = OBJ_GET(Type, _t(obj));
  154. do{
  155. if(obj_t == cls_t) return true;
  156. Type base = _all_types[obj_t].base;
  157. if(base.index == -1) break;
  158. obj_t = base;
  159. }while(true);
  160. return false;
  161. }
  162. PyObject* exec(Str source, Str filename, CompileMode mode, PyObject* _module=nullptr){
  163. if(_module == nullptr) _module = _main;
  164. try {
  165. CodeObject_ code = compile(source, filename, mode);
  166. #if DEBUG_DIS_EXEC
  167. if(_module == _main) std::cout << disassemble(code) << '\n';
  168. #endif
  169. return _exec(code, _module);
  170. }catch (const Exception& e){
  171. *_stderr << e.summary() << '\n';
  172. }
  173. #if !DEBUG_FULL_EXCEPTION
  174. catch (const std::exception& e) {
  175. *_stderr << "An std::exception occurred! It could be a bug.\n";
  176. *_stderr << e.what() << '\n';
  177. }
  178. #endif
  179. callstack.clear();
  180. s_data.clear();
  181. return nullptr;
  182. }
  183. template<typename ...Args>
  184. PyObject* _exec(Args&&... args){
  185. callstack.emplace(&s_data, s_data._sp, std::forward<Args>(args)...);
  186. return _run_top_frame();
  187. }
  188. void _pop_frame(){
  189. Frame* frame = &callstack.top();
  190. s_data.reset(frame->_sp_base);
  191. callstack.pop();
  192. }
  193. void _push_varargs(){ }
  194. void _push_varargs(PyObject* _0){ PUSH(_0); }
  195. void _push_varargs(PyObject* _0, PyObject* _1){ PUSH(_0); PUSH(_1); }
  196. void _push_varargs(PyObject* _0, PyObject* _1, PyObject* _2){ PUSH(_0); PUSH(_1); PUSH(_2); }
  197. void _push_varargs(PyObject* _0, PyObject* _1, PyObject* _2, PyObject* _3){ PUSH(_0); PUSH(_1); PUSH(_2); PUSH(_3); }
  198. template<typename... Args>
  199. PyObject* call(PyObject* callable, Args&&... args){
  200. PUSH(callable);
  201. PUSH(PY_NULL);
  202. _push_varargs(args...);
  203. return vectorcall(sizeof...(args));
  204. }
  205. template<typename... Args>
  206. PyObject* call_method(PyObject* self, PyObject* callable, Args&&... args){
  207. PUSH(callable);
  208. PUSH(self);
  209. _push_varargs(args...);
  210. return vectorcall(sizeof...(args));
  211. }
  212. template<typename... Args>
  213. PyObject* call_method(PyObject* self, StrName name, Args&&... args){
  214. PyObject* callable = get_unbound_method(self, name, &self);
  215. return call_method(self, callable, args...);
  216. }
  217. PyObject* property(NativeFuncC fget, NativeFuncC fset=nullptr){
  218. PyObject* p = builtins->attr("property");
  219. PyObject* _0 = heap.gcnew(tp_native_func, NativeFunc(fget, 1, false));
  220. PyObject* _1 = vm->None;
  221. if(fset != nullptr) _1 = heap.gcnew(tp_native_func, NativeFunc(fset, 2, false));
  222. return call(p, _0, _1);
  223. }
  224. PyObject* new_type_object(PyObject* mod, StrName name, Type base){
  225. PyObject* obj = heap._new<Type>(tp_type, _all_types.size());
  226. PyTypeInfo info{
  227. obj,
  228. base,
  229. (mod!=nullptr && mod!=builtins) ? Str(OBJ_NAME(mod)+"."+name.sv()): name.sv()
  230. };
  231. if(mod != nullptr) mod->attr().set(name, obj);
  232. _all_types.push_back(info);
  233. return obj;
  234. }
  235. Type _new_type_object(StrName name, Type base=0) {
  236. PyObject* obj = new_type_object(nullptr, name, base);
  237. return OBJ_GET(Type, obj);
  238. }
  239. PyObject* _find_type(const Str& type){
  240. PyObject* obj = builtins->attr().try_get(type);
  241. if(obj == nullptr){
  242. for(auto& t: _all_types) if(t.name == type) return t.obj;
  243. throw std::runtime_error(fmt("type not found: ", type));
  244. }
  245. return obj;
  246. }
  247. template<int ARGC>
  248. void bind_func(Str type, Str name, NativeFuncC fn) {
  249. bind_func<ARGC>(_find_type(type), name, fn);
  250. }
  251. template<int ARGC>
  252. void bind_method(Str type, Str name, NativeFuncC fn) {
  253. bind_method<ARGC>(_find_type(type), name, fn);
  254. }
  255. template<int ARGC, typename... Args>
  256. void bind_static_method(Args&&... args) {
  257. bind_func<ARGC>(std::forward<Args>(args)...);
  258. }
  259. template<int ARGC>
  260. void bind_builtin_func(Str name, NativeFuncC fn) {
  261. bind_func<ARGC>(builtins, name, fn);
  262. }
  263. int normalized_index(int index, int size){
  264. if(index < 0) index += size;
  265. if(index < 0 || index >= size){
  266. IndexError(std::to_string(index) + " not in [0, " + std::to_string(size) + ")");
  267. }
  268. return index;
  269. }
  270. template<typename P>
  271. PyObject* PyIter(P&& value) {
  272. static_assert(std::is_base_of_v<BaseIter, std::decay_t<P>>);
  273. return heap.gcnew<P>(tp_iterator, std::forward<P>(value));
  274. }
  275. PyObject* PyIterNext(PyObject* obj){
  276. if(is_non_tagged_type(obj, tp_iterator)){
  277. BaseIter* iter = static_cast<BaseIter*>(obj->value());
  278. return iter->next();
  279. }
  280. return call_method(obj, __next__);
  281. }
  282. /***** Error Reporter *****/
  283. void _error(StrName name, const Str& msg){
  284. _error(Exception(name, msg));
  285. }
  286. void _raise(){
  287. bool ok = top_frame()->jump_to_exception_handler();
  288. if(ok) throw HandledException();
  289. else throw UnhandledException();
  290. }
  291. void StackOverflowError() { _error("StackOverflowError", ""); }
  292. void IOError(const Str& msg) { _error("IOError", msg); }
  293. void NotImplementedError(){ _error("NotImplementedError", ""); }
  294. void TypeError(const Str& msg){ _error("TypeError", msg); }
  295. void ZeroDivisionError(){ _error("ZeroDivisionError", "division by zero"); }
  296. void IndexError(const Str& msg){ _error("IndexError", msg); }
  297. void ValueError(const Str& msg){ _error("ValueError", msg); }
  298. void NameError(StrName name){ _error("NameError", fmt("name ", name.escape() + " is not defined")); }
  299. void AttributeError(PyObject* obj, StrName name){
  300. // OBJ_NAME calls getattr, which may lead to a infinite recursion
  301. _error("AttributeError", fmt("type ", OBJ_NAME(_t(obj)).escape(), " has no attribute ", name.escape()));
  302. }
  303. void AttributeError(Str msg){ _error("AttributeError", msg); }
  304. void check_type(PyObject* obj, Type type){
  305. if(is_type(obj, type)) return;
  306. TypeError("expected " + OBJ_NAME(_t(type)).escape() + ", but got " + OBJ_NAME(_t(obj)).escape());
  307. }
  308. void check_non_tagged_type(PyObject* obj, Type type){
  309. if(is_non_tagged_type(obj, type)) return;
  310. TypeError("expected " + OBJ_NAME(_t(type)).escape() + ", but got " + OBJ_NAME(_t(obj)).escape());
  311. }
  312. void check_int(PyObject* obj){
  313. if(is_int(obj)) return;
  314. check_type(obj, tp_int);
  315. }
  316. void check_float(PyObject* obj){
  317. if(is_float(obj)) return;
  318. check_type(obj, tp_float);
  319. }
  320. PyObject* _t(Type t){
  321. return _all_types[t.index].obj;
  322. }
  323. PyObject* _t(PyObject* obj){
  324. if(is_int(obj)) return _t(tp_int);
  325. if(is_float(obj)) return _t(tp_float);
  326. return _all_types[OBJ_GET(Type, _t(obj->type)).index].obj;
  327. }
  328. ~VM() {
  329. callstack.clear();
  330. s_data.clear();
  331. _all_types.clear();
  332. _modules.clear();
  333. _lazy_modules.clear();
  334. }
  335. void _log_s_data(const char* title = nullptr);
  336. PyObject* vectorcall(int ARGC, int KWARGC=0, bool op_call=false);
  337. CodeObject_ compile(Str source, Str filename, CompileMode mode, bool unknown_global_scope=false);
  338. PyObject* num_negated(PyObject* obj);
  339. f64 num_to_float(PyObject* obj);
  340. bool asBool(PyObject* obj);
  341. i64 hash(PyObject* obj);
  342. PyObject* asRepr(PyObject* obj);
  343. PyObject* new_module(StrName name);
  344. Str disassemble(CodeObject_ co);
  345. void init_builtin_types();
  346. PyObject* _py_call(PyObject** sp_base, PyObject* callable, ArgsView args, ArgsView kwargs);
  347. PyObject* getattr(PyObject* obj, StrName name, bool throw_err=true);
  348. PyObject* get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err=true, bool fallback=false);
  349. void parse_int_slice(const Slice& s, int length, int& start, int& stop, int& step);
  350. PyObject* format(Str, PyObject*);
  351. void setattr(PyObject* obj, StrName name, PyObject* value);
  352. template<int ARGC>
  353. void bind_method(PyObject*, Str, NativeFuncC);
  354. template<int ARGC>
  355. void bind_func(PyObject*, Str, NativeFuncC);
  356. void _error(Exception);
  357. PyObject* _run_top_frame();
  358. void post_init();
  359. };
  360. inline PyObject* NativeFunc::operator()(VM* vm, ArgsView args) const{
  361. int args_size = args.size() - (int)method; // remove self
  362. if(argc != -1 && args_size != argc) {
  363. vm->TypeError(fmt("expected ", argc, " arguments, but got ", args_size));
  364. }
  365. return f(vm, args);
  366. }
  367. inline void CodeObject::optimize(VM* vm){
  368. // uint32_t base_n = (uint32_t)(names.size() / kLocalsLoadFactor + 0.5);
  369. // perfect_locals_capacity = std::max(find_next_capacity(base_n), NameDict::__Capacity);
  370. // perfect_hash_seed = find_perfect_hash_seed(perfect_locals_capacity, names);
  371. }
  372. DEF_NATIVE_2(Str, tp_str)
  373. DEF_NATIVE_2(List, tp_list)
  374. DEF_NATIVE_2(Tuple, tp_tuple)
  375. DEF_NATIVE_2(Function, tp_function)
  376. DEF_NATIVE_2(NativeFunc, tp_native_func)
  377. DEF_NATIVE_2(BoundMethod, tp_bound_method)
  378. DEF_NATIVE_2(Range, tp_range)
  379. DEF_NATIVE_2(Slice, tp_slice)
  380. DEF_NATIVE_2(Exception, tp_exception)
  381. DEF_NATIVE_2(Bytes, tp_bytes)
  382. DEF_NATIVE_2(MappingProxy, tp_mappingproxy)
  383. #define PY_CAST_INT(T) \
  384. template<> inline T py_cast<T>(VM* vm, PyObject* obj){ \
  385. vm->check_int(obj); \
  386. return (T)(BITS(obj) >> 2); \
  387. } \
  388. template<> inline T _py_cast<T>(VM* vm, PyObject* obj){ \
  389. return (T)(BITS(obj) >> 2); \
  390. }
  391. PY_CAST_INT(char)
  392. PY_CAST_INT(short)
  393. PY_CAST_INT(int)
  394. PY_CAST_INT(long)
  395. PY_CAST_INT(long long)
  396. PY_CAST_INT(unsigned char)
  397. PY_CAST_INT(unsigned short)
  398. PY_CAST_INT(unsigned int)
  399. PY_CAST_INT(unsigned long)
  400. PY_CAST_INT(unsigned long long)
  401. template<> inline float py_cast<float>(VM* vm, PyObject* obj){
  402. vm->check_float(obj);
  403. i64 bits = BITS(obj);
  404. bits = (bits >> 2) << 2;
  405. return BitsCvt(bits)._float;
  406. }
  407. template<> inline float _py_cast<float>(VM* vm, PyObject* obj){
  408. i64 bits = BITS(obj);
  409. bits = (bits >> 2) << 2;
  410. return BitsCvt(bits)._float;
  411. }
  412. template<> inline double py_cast<double>(VM* vm, PyObject* obj){
  413. vm->check_float(obj);
  414. i64 bits = BITS(obj);
  415. bits = (bits >> 2) << 2;
  416. return BitsCvt(bits)._float;
  417. }
  418. template<> inline double _py_cast<double>(VM* vm, PyObject* obj){
  419. i64 bits = BITS(obj);
  420. bits = (bits >> 2) << 2;
  421. return BitsCvt(bits)._float;
  422. }
  423. #define PY_VAR_INT(T) \
  424. inline PyObject* py_var(VM* vm, T _val){ \
  425. i64 val = static_cast<i64>(_val); \
  426. if(((val << 2) >> 2) != val){ \
  427. vm->_error("OverflowError", std::to_string(val) + " is out of range"); \
  428. } \
  429. val = (val << 2) | 0b01; \
  430. return reinterpret_cast<PyObject*>(val); \
  431. }
  432. PY_VAR_INT(char)
  433. PY_VAR_INT(short)
  434. PY_VAR_INT(int)
  435. PY_VAR_INT(long)
  436. PY_VAR_INT(long long)
  437. PY_VAR_INT(unsigned char)
  438. PY_VAR_INT(unsigned short)
  439. PY_VAR_INT(unsigned int)
  440. PY_VAR_INT(unsigned long)
  441. PY_VAR_INT(unsigned long long)
  442. #define PY_VAR_FLOAT(T) \
  443. inline PyObject* py_var(VM* vm, T _val){ \
  444. f64 val = static_cast<f64>(_val); \
  445. i64 bits = BitsCvt(val)._int; \
  446. bits = (bits >> 2) << 2; \
  447. bits |= 0b10; \
  448. return reinterpret_cast<PyObject*>(bits); \
  449. }
  450. PY_VAR_FLOAT(float)
  451. PY_VAR_FLOAT(double)
  452. inline PyObject* py_var(VM* vm, bool val){
  453. return val ? vm->True : vm->False;
  454. }
  455. template<> inline bool py_cast<bool>(VM* vm, PyObject* obj){
  456. vm->check_non_tagged_type(obj, vm->tp_bool);
  457. return obj == vm->True;
  458. }
  459. template<> inline bool _py_cast<bool>(VM* vm, PyObject* obj){
  460. return obj == vm->True;
  461. }
  462. inline PyObject* py_var(VM* vm, const char val[]){
  463. return VAR(Str(val));
  464. }
  465. inline PyObject* py_var(VM* vm, std::string val){
  466. return VAR(Str(std::move(val)));
  467. }
  468. inline PyObject* py_var(VM* vm, std::string_view val){
  469. return VAR(Str(val));
  470. }
  471. template<typename T>
  472. void _check_py_class(VM* vm, PyObject* obj){
  473. vm->check_non_tagged_type(obj, T::_type(vm));
  474. }
  475. inline PyObject* VM::num_negated(PyObject* obj){
  476. if (is_int(obj)){
  477. return VAR(-CAST(i64, obj));
  478. }else if(is_float(obj)){
  479. return VAR(-CAST(f64, obj));
  480. }
  481. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape());
  482. return nullptr;
  483. }
  484. inline f64 VM::num_to_float(PyObject* obj){
  485. if(is_float(obj)){
  486. return CAST(f64, obj);
  487. } else if (is_int(obj)){
  488. return (f64)CAST(i64, obj);
  489. }
  490. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape());
  491. return 0;
  492. }
  493. inline bool VM::asBool(PyObject* obj){
  494. if(is_non_tagged_type(obj, tp_bool)) return obj == True;
  495. if(obj == None) return false;
  496. if(is_int(obj)) return CAST(i64, obj) != 0;
  497. if(is_float(obj)) return CAST(f64, obj) != 0.0;
  498. PyObject* self;
  499. PyObject* len_f = get_unbound_method(obj, __len__, &self, false);
  500. if(self != PY_NULL){
  501. PyObject* ret = call_method(self, len_f);
  502. return CAST(i64, ret) > 0;
  503. }
  504. return true;
  505. }
  506. inline void VM::parse_int_slice(const Slice& s, int length, int& start, int& stop, int& step){
  507. auto clip = [](int value, int min, int max){
  508. if(value < min) return min;
  509. if(value > max) return max;
  510. return value;
  511. };
  512. if(s.step == None) step = 1;
  513. else step = CAST(int, s.step);
  514. if(step == 0) ValueError("slice step cannot be zero");
  515. if(step > 0){
  516. if(s.start == None){
  517. start = 0;
  518. }else{
  519. start = CAST(int, s.start);
  520. if(start < 0) start += length;
  521. start = clip(start, 0, length);
  522. }
  523. if(s.stop == None){
  524. stop = length;
  525. }else{
  526. stop = CAST(int, s.stop);
  527. if(stop < 0) stop += length;
  528. stop = clip(stop, 0, length);
  529. }
  530. }else{
  531. if(s.start == None){
  532. start = length - 1;
  533. }else{
  534. start = CAST(int, s.start);
  535. if(start < 0) start += length;
  536. start = clip(start, -1, length - 1);
  537. }
  538. if(s.stop == None){
  539. stop = -1;
  540. }else{
  541. stop = CAST(int, s.stop);
  542. if(stop < 0) stop += length;
  543. stop = clip(stop, -1, length - 1);
  544. }
  545. }
  546. }
  547. inline i64 VM::hash(PyObject* obj){
  548. if (is_non_tagged_type(obj, tp_str)) return CAST(Str&, obj).hash();
  549. if (is_int(obj)) return CAST(i64, obj);
  550. if (is_non_tagged_type(obj, tp_tuple)) {
  551. i64 x = 1000003;
  552. const Tuple& items = CAST(Tuple&, obj);
  553. for (int i=0; i<items.size(); i++) {
  554. i64 y = hash(items[i]);
  555. // recommended by Github Copilot
  556. x = x ^ (y + 0x9e3779b9 + (x << 6) + (x >> 2));
  557. }
  558. return x;
  559. }
  560. if (is_non_tagged_type(obj, tp_type)) return BITS(obj);
  561. if (is_non_tagged_type(obj, tp_iterator)) return BITS(obj);
  562. if (is_non_tagged_type(obj, tp_bool)) return _CAST(bool, obj) ? 1 : 0;
  563. if (is_float(obj)){
  564. f64 val = CAST(f64, obj);
  565. return (i64)std::hash<f64>()(val);
  566. }
  567. TypeError("unhashable type: " + OBJ_NAME(_t(obj)).escape());
  568. return 0;
  569. }
  570. inline PyObject* VM::asRepr(PyObject* obj){
  571. return call_method(obj, __repr__);
  572. }
  573. inline PyObject* VM::format(Str spec, PyObject* obj){
  574. if(spec.empty()) return asStr(obj);
  575. char type;
  576. switch(spec.end()[-1]){
  577. case 'f': case 'd': case 's':
  578. type = spec.end()[-1];
  579. spec = spec.substr(0, spec.length() - 1);
  580. break;
  581. default: type = ' '; break;
  582. }
  583. char pad_c = ' ';
  584. if(spec[0] == '0'){
  585. pad_c = '0';
  586. spec = spec.substr(1);
  587. }
  588. char align;
  589. if(spec[0] == '>'){
  590. align = '>';
  591. spec = spec.substr(1);
  592. }else if(spec[0] == '<'){
  593. align = '<';
  594. spec = spec.substr(1);
  595. }else{
  596. if(is_int(obj) || is_float(obj)) align = '>';
  597. else align = '<';
  598. }
  599. int dot = spec.index(".");
  600. int width, precision;
  601. try{
  602. if(dot >= 0){
  603. width = Number::stoi(spec.substr(0, dot).str());
  604. precision = Number::stoi(spec.substr(dot+1).str());
  605. }else{
  606. width = Number::stoi(spec.str());
  607. precision = -1;
  608. }
  609. }catch(...){
  610. ValueError("invalid format specifer");
  611. }
  612. if(type != 'f' && dot >= 0) ValueError("precision not allowed in the format specifier");
  613. Str ret;
  614. if(type == 'f'){
  615. f64 val = num_to_float(obj);
  616. if(precision < 0) precision = 6;
  617. std::stringstream ss;
  618. ss << std::fixed << std::setprecision(precision) << val;
  619. ret = ss.str();
  620. }else if(type == 'd'){
  621. ret = std::to_string(CAST(i64, obj));
  622. }else if(type == 's'){
  623. ret = CAST(Str&, obj);
  624. }else{
  625. ret = CAST(Str&, asStr(obj));
  626. }
  627. if(width > ret.length()){
  628. int pad = width - ret.length();
  629. std::string padding(pad, pad_c);
  630. if(align == '>') ret = padding.c_str() + ret;
  631. else ret = ret + padding.c_str();
  632. }
  633. return VAR(ret);
  634. }
  635. inline PyObject* VM::new_module(StrName name) {
  636. PyObject* obj = heap._new<DummyModule>(tp_module, DummyModule());
  637. obj->attr().set(__name__, VAR(name.sv()));
  638. // we do not allow override in order to avoid memory leak
  639. // it is because Module objects are not garbage collected
  640. if(_modules.contains(name)) FATAL_ERROR();
  641. _modules.set(name, obj);
  642. return obj;
  643. }
  644. inline std::string _opcode_argstr(VM* vm, Bytecode byte, const CodeObject* co){
  645. std::string argStr = byte.arg == -1 ? "" : std::to_string(byte.arg);
  646. switch(byte.op){
  647. case OP_LOAD_CONST:
  648. if(vm != nullptr){
  649. argStr += fmt(" (", CAST(Str, vm->asRepr(co->consts[byte.arg])), ")");
  650. }
  651. break;
  652. case OP_LOAD_NAME: case OP_LOAD_GLOBAL: case OP_LOAD_NONLOCAL: case OP_STORE_GLOBAL:
  653. case OP_LOAD_ATTR: case OP_LOAD_METHOD: case OP_STORE_ATTR: case OP_DELETE_ATTR:
  654. case OP_IMPORT_NAME: case OP_BEGIN_CLASS:
  655. case OP_DELETE_GLOBAL:
  656. argStr += fmt(" (", StrName(byte.arg).sv(), ")");
  657. break;
  658. case OP_LOAD_FAST: case OP_STORE_FAST: case OP_DELETE_FAST:
  659. argStr += fmt(" (", co->varnames[byte.arg].sv(), ")");
  660. break;
  661. case OP_BINARY_OP:
  662. argStr += fmt(" (", BINARY_SPECIAL_METHODS[byte.arg], ")");
  663. break;
  664. case OP_LOAD_FUNCTION:
  665. argStr += fmt(" (", co->func_decls[byte.arg]->code->name, ")");
  666. break;
  667. }
  668. return argStr;
  669. }
  670. inline Str VM::disassemble(CodeObject_ co){
  671. auto pad = [](const Str& s, const int n){
  672. if(s.length() >= n) return s.substr(0, n);
  673. return s + std::string(n - s.length(), ' ');
  674. };
  675. std::vector<int> jumpTargets;
  676. for(auto byte : co->codes){
  677. if(byte.op == OP_JUMP_ABSOLUTE || byte.op == OP_POP_JUMP_IF_FALSE){
  678. jumpTargets.push_back(byte.arg);
  679. }
  680. }
  681. std::stringstream ss;
  682. int prev_line = -1;
  683. for(int i=0; i<co->codes.size(); i++){
  684. const Bytecode& byte = co->codes[i];
  685. Str line = std::to_string(co->lines[i]);
  686. if(co->lines[i] == prev_line) line = "";
  687. else{
  688. if(prev_line != -1) ss << "\n";
  689. prev_line = co->lines[i];
  690. }
  691. std::string pointer;
  692. if(std::find(jumpTargets.begin(), jumpTargets.end(), i) != jumpTargets.end()){
  693. pointer = "-> ";
  694. }else{
  695. pointer = " ";
  696. }
  697. ss << pad(line, 8) << pointer << pad(std::to_string(i), 3);
  698. ss << " " << pad(OP_NAMES[byte.op], 20) << " ";
  699. // ss << pad(byte.arg == -1 ? "" : std::to_string(byte.arg), 5);
  700. std::string argStr = _opcode_argstr(this, byte, co.get());
  701. ss << pad(argStr, 40); // may overflow
  702. ss << co->blocks[byte.block].type;
  703. if(i != co->codes.size() - 1) ss << '\n';
  704. }
  705. for(auto& decl: co->func_decls){
  706. ss << "\n\n" << "Disassembly of " << decl->code->name << ":\n";
  707. ss << disassemble(decl->code);
  708. }
  709. ss << "\n";
  710. return Str(ss.str());
  711. }
  712. inline void VM::_log_s_data(const char* title) {
  713. if(!_initialized) return;
  714. if(callstack.empty()) return;
  715. std::stringstream ss;
  716. if(title) ss << title << " | ";
  717. std::map<PyObject**, int> sp_bases;
  718. for(Frame& f: callstack.data()){
  719. if(f._sp_base == nullptr) FATAL_ERROR();
  720. sp_bases[f._sp_base] += 1;
  721. }
  722. FrameId frame = top_frame();
  723. int line = frame->co->lines[frame->_ip];
  724. ss << frame->co->name << ":" << line << " [";
  725. for(PyObject** p=s_data.begin(); p!=s_data.end(); p++){
  726. ss << std::string(sp_bases[p], '|');
  727. if(sp_bases[p] > 0) ss << " ";
  728. PyObject* obj = *p;
  729. if(obj == nullptr) ss << "(nil)";
  730. else if(obj == PY_BEGIN_CALL) ss << "BEGIN_CALL";
  731. else if(obj == PY_NULL) ss << "NULL";
  732. else if(is_int(obj)) ss << CAST(i64, obj);
  733. else if(is_float(obj)) ss << CAST(f64, obj);
  734. else if(is_type(obj, tp_str)) ss << CAST(Str, obj).escape();
  735. else if(obj == None) ss << "None";
  736. else if(obj == True) ss << "True";
  737. else if(obj == False) ss << "False";
  738. else if(is_type(obj, tp_function)){
  739. auto& f = CAST(Function&, obj);
  740. ss << f.decl->code->name << "(...)";
  741. } else if(is_type(obj, tp_type)){
  742. Type t = OBJ_GET(Type, obj);
  743. ss << "<class " + _all_types[t].name.escape() + ">";
  744. } else if(is_type(obj, tp_list)){
  745. auto& t = CAST(List&, obj);
  746. ss << "list(size=" << t.size() << ")";
  747. } else if(is_type(obj, tp_tuple)){
  748. auto& t = CAST(Tuple&, obj);
  749. ss << "tuple(size=" << t.size() << ")";
  750. } else ss << "(" << obj_type_name(this, obj->type) << ")";
  751. ss << ", ";
  752. }
  753. std::string output = ss.str();
  754. if(!s_data.empty()) {
  755. output.pop_back(); output.pop_back();
  756. }
  757. output.push_back(']');
  758. Bytecode byte = frame->co->codes[frame->_ip];
  759. std::cout << output << " " << OP_NAMES[byte.op] << " " << _opcode_argstr(nullptr, byte, frame->co) << std::endl;
  760. }
  761. inline void VM::init_builtin_types(){
  762. _all_types.push_back({heap._new<Type>(Type(1), Type(0)), -1, "object"});
  763. _all_types.push_back({heap._new<Type>(Type(1), Type(1)), 0, "type"});
  764. tp_object = 0; tp_type = 1;
  765. tp_int = _new_type_object("int");
  766. tp_float = _new_type_object("float");
  767. if(tp_int.index != kTpIntIndex || tp_float.index != kTpFloatIndex) FATAL_ERROR();
  768. tp_bool = _new_type_object("bool");
  769. tp_str = _new_type_object("str");
  770. tp_list = _new_type_object("list");
  771. tp_tuple = _new_type_object("tuple");
  772. tp_slice = _new_type_object("slice");
  773. tp_range = _new_type_object("range");
  774. tp_module = _new_type_object("module");
  775. tp_function = _new_type_object("function");
  776. tp_native_func = _new_type_object("native_func");
  777. tp_iterator = _new_type_object("iterator");
  778. tp_bound_method = _new_type_object("bound_method");
  779. tp_super = _new_type_object("super");
  780. tp_exception = _new_type_object("Exception");
  781. tp_bytes = _new_type_object("bytes");
  782. tp_mappingproxy = _new_type_object("mappingproxy");
  783. this->None = heap._new<Dummy>(_new_type_object("NoneType"), {});
  784. this->Ellipsis = heap._new<Dummy>(_new_type_object("ellipsis"), {});
  785. this->True = heap._new<Dummy>(tp_bool, {});
  786. this->False = heap._new<Dummy>(tp_bool, {});
  787. this->StopIteration = heap._new<Dummy>(_new_type_object("StopIterationType"), {});
  788. this->builtins = new_module("builtins");
  789. this->_main = new_module("__main__");
  790. // setup public types
  791. builtins->attr().set("type", _t(tp_type));
  792. builtins->attr().set("object", _t(tp_object));
  793. builtins->attr().set("bool", _t(tp_bool));
  794. builtins->attr().set("int", _t(tp_int));
  795. builtins->attr().set("float", _t(tp_float));
  796. builtins->attr().set("str", _t(tp_str));
  797. builtins->attr().set("list", _t(tp_list));
  798. builtins->attr().set("tuple", _t(tp_tuple));
  799. builtins->attr().set("range", _t(tp_range));
  800. builtins->attr().set("bytes", _t(tp_bytes));
  801. builtins->attr().set("StopIteration", StopIteration);
  802. builtins->attr().set("slice", _t(tp_slice));
  803. post_init();
  804. for(int i=0; i<_all_types.size(); i++){
  805. _all_types[i].obj->attr()._try_perfect_rehash();
  806. }
  807. for(auto [k, v]: _modules.items()) v->attr()._try_perfect_rehash();
  808. }
  809. inline PyObject* VM::vectorcall(int ARGC, int KWARGC, bool op_call){
  810. bool is_varargs = ARGC == 0xFFFF;
  811. PyObject** p0;
  812. PyObject** p1 = s_data._sp - KWARGC*2;
  813. if(is_varargs){
  814. p0 = p1 - 1;
  815. while(*p0 != PY_BEGIN_CALL) p0--;
  816. // [BEGIN_CALL, callable, <self>, args..., kwargs...]
  817. // ^p0 ^p1 ^_sp
  818. ARGC = p1 - (p0 + 3);
  819. }else{
  820. p0 = p1 - ARGC - 2 - (int)is_varargs;
  821. // [callable, <self>, args..., kwargs...]
  822. // ^p0 ^p1 ^_sp
  823. }
  824. PyObject* callable = p1[-(ARGC + 2)];
  825. bool method_call = p1[-(ARGC + 1)] != PY_NULL;
  826. // handle boundmethod, do a patch
  827. if(is_non_tagged_type(callable, tp_bound_method)){
  828. if(method_call) FATAL_ERROR();
  829. auto& bm = CAST(BoundMethod&, callable);
  830. callable = bm.func; // get unbound method
  831. p1[-(ARGC + 2)] = bm.func;
  832. p1[-(ARGC + 1)] = bm.self;
  833. method_call = true;
  834. // [unbound, self, args..., kwargs...]
  835. }
  836. ArgsView args(p1 - ARGC - int(method_call), p1);
  837. if(is_non_tagged_type(callable, tp_native_func)){
  838. const auto& f = OBJ_GET(NativeFunc, callable);
  839. if(KWARGC != 0) TypeError("native_func does not accept keyword arguments");
  840. PyObject* ret = f(this, args);
  841. s_data.reset(p0);
  842. return ret;
  843. }
  844. ArgsView kwargs(p1, s_data._sp);
  845. if(is_non_tagged_type(callable, tp_function)){
  846. // ret is nullptr or a generator
  847. PyObject* ret = _py_call(p0, callable, args, kwargs);
  848. // stack resetting is handled by _py_call
  849. if(ret != nullptr) return ret;
  850. if(op_call) return PY_OP_CALL;
  851. return _run_top_frame();
  852. }
  853. if(is_non_tagged_type(callable, tp_type)){
  854. if(method_call) FATAL_ERROR();
  855. // [type, NULL, args..., kwargs...]
  856. // TODO: derived __new__ ?
  857. PyObject* new_f = callable->attr().try_get(__new__);
  858. PyObject* obj;
  859. if(new_f != nullptr){
  860. PUSH(new_f);
  861. PUSH(PY_NULL);
  862. for(PyObject* obj: args) PUSH(obj);
  863. for(PyObject* obj: kwargs) PUSH(obj);
  864. obj = vectorcall(ARGC, KWARGC);
  865. if(!isinstance(obj, OBJ_GET(Type, callable))) return obj;
  866. }else{
  867. obj = heap.gcnew<DummyInstance>(OBJ_GET(Type, callable), {});
  868. }
  869. PyObject* self;
  870. callable = get_unbound_method(obj, __init__, &self, false);
  871. if (self != PY_NULL) {
  872. // replace `NULL` with `self`
  873. p1[-(ARGC + 2)] = callable;
  874. p1[-(ARGC + 1)] = self;
  875. // [init_f, self, args..., kwargs...]
  876. vectorcall(ARGC, KWARGC);
  877. // We just discard the return value of `__init__`
  878. // in cpython it raises a TypeError if the return value is not None
  879. }else{
  880. // manually reset the stack
  881. s_data.reset(p0);
  882. }
  883. return obj;
  884. }
  885. // handle `__call__` overload
  886. PyObject* self;
  887. PyObject* call_f = get_unbound_method(callable, __call__, &self, false);
  888. if(self != PY_NULL){
  889. p1[-(ARGC + 2)] = call_f;
  890. p1[-(ARGC + 1)] = self;
  891. // [call_f, self, args..., kwargs...]
  892. return vectorcall(ARGC, KWARGC, false);
  893. }
  894. TypeError(OBJ_NAME(_t(callable)).escape() + " object is not callable");
  895. return nullptr;
  896. }
  897. inline PyObject* VM::_py_call(PyObject** p0, PyObject* callable, ArgsView args, ArgsView kwargs){
  898. // callable must be a `function` object
  899. if(s_data.is_overflow()) StackOverflowError();
  900. const Function& fn = CAST(Function&, callable);
  901. const CodeObject* co = fn.decl->code.get();
  902. if(args.size() < fn.argc){
  903. vm->TypeError(fmt(
  904. "expected ",
  905. fn.argc,
  906. " positional arguments, but got ",
  907. args.size(),
  908. " (", fn.decl->code->name, ')'
  909. ));
  910. }
  911. // if this function is simple, a.k.a, no kwargs and no *args and not a generator
  912. // we can use a fast path to avoid using buffer copy
  913. if(fn.is_simple){
  914. if(args.size() > fn.argc) TypeError("too many positional arguments");
  915. int spaces = co->varnames.size() - fn.argc;
  916. for(int j=0; j<spaces; j++) PUSH(nullptr);
  917. callstack.emplace(&s_data, p0, co, fn._module, callable, FastLocals(co, args.begin()));
  918. return nullptr;
  919. }
  920. int i = 0;
  921. static THREAD_LOCAL PyObject* buffer[PK_MAX_CO_VARNAMES];
  922. // prepare args
  923. for(int index: fn.decl->args) buffer[index] = args[i++];
  924. // set extra varnames to nullptr
  925. for(int j=i; j<co->varnames.size(); j++) buffer[j] = nullptr;
  926. // prepare kwdefaults
  927. for(auto& kv: fn.decl->kwargs) buffer[kv.key] = kv.value;
  928. // handle *args
  929. if(fn.decl->starred_arg != -1){
  930. List vargs; // handle *args
  931. while(i < args.size()) vargs.push_back(args[i++]);
  932. buffer[fn.decl->starred_arg] = VAR(Tuple(std::move(vargs)));
  933. }else{
  934. // kwdefaults override
  935. for(auto& kv: fn.decl->kwargs){
  936. if(i < args.size()){
  937. buffer[kv.key] = args[i++];
  938. }else{
  939. break;
  940. }
  941. }
  942. if(i < args.size()) TypeError(fmt("too many arguments", " (", fn.decl->code->name, ')'));
  943. }
  944. for(int i=0; i<kwargs.size(); i+=2){
  945. StrName key = CAST(int, kwargs[i]);
  946. int index = co->varnames_inv.try_get(key);
  947. if(index<0) TypeError(fmt(key.escape(), " is an invalid keyword argument for ", co->name, "()"));
  948. buffer[index] = kwargs[i+1];
  949. }
  950. s_data.reset(p0);
  951. if(co->is_generator){
  952. PyObject* ret = PyIter(Generator(
  953. this,
  954. Frame(&s_data, nullptr, co, fn._module, callable),
  955. ArgsView(buffer, buffer + co->varnames.size())
  956. ));
  957. return ret;
  958. }
  959. // copy buffer to stack
  960. for(int i=0; i<co->varnames.size(); i++) PUSH(buffer[i]);
  961. callstack.emplace(&s_data, p0, co, fn._module, callable);
  962. return nullptr;
  963. }
  964. // https://docs.python.org/3/howto/descriptor.html#invocation-from-an-instance
  965. inline PyObject* VM::getattr(PyObject* obj, StrName name, bool throw_err){
  966. PyObject* objtype = _t(obj);
  967. // handle super() proxy
  968. if(is_non_tagged_type(obj, tp_super)){
  969. const Super& super = OBJ_GET(Super, obj);
  970. obj = super.first;
  971. objtype = _t(super.second);
  972. }
  973. PyObject* cls_var = find_name_in_mro(objtype, name);
  974. if(cls_var != nullptr){
  975. // handle descriptor
  976. PyObject* descr_get = _t(cls_var)->attr().try_get(__get__);
  977. if(descr_get != nullptr) return call_method(cls_var, descr_get, obj);
  978. }
  979. // handle instance __dict__
  980. if(!is_tagged(obj) && obj->is_attr_valid()){
  981. PyObject* val = obj->attr().try_get(name);
  982. if(val != nullptr) return val;
  983. }
  984. if(cls_var != nullptr){
  985. // bound method is non-data descriptor
  986. if(is_non_tagged_type(cls_var, tp_function) || is_non_tagged_type(cls_var, tp_native_func)){
  987. return VAR(BoundMethod(obj, cls_var));
  988. }
  989. return cls_var;
  990. }
  991. if(throw_err) AttributeError(obj, name);
  992. return nullptr;
  993. }
  994. // used by OP_LOAD_METHOD
  995. // try to load a unbound method (fallback to `getattr` if not found)
  996. inline PyObject* VM::get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err, bool fallback){
  997. *self = PY_NULL;
  998. PyObject* objtype = _t(obj);
  999. // handle super() proxy
  1000. if(is_non_tagged_type(obj, tp_super)){
  1001. const Super& super = OBJ_GET(Super, obj);
  1002. obj = super.first;
  1003. objtype = _t(super.second);
  1004. }
  1005. PyObject* cls_var = find_name_in_mro(objtype, name);
  1006. if(fallback){
  1007. if(cls_var != nullptr){
  1008. // handle descriptor
  1009. PyObject* descr_get = _t(cls_var)->attr().try_get(__get__);
  1010. if(descr_get != nullptr) return call_method(cls_var, descr_get, obj);
  1011. }
  1012. // handle instance __dict__
  1013. if(!is_tagged(obj) && obj->is_attr_valid()){
  1014. PyObject* val = obj->attr().try_get(name);
  1015. if(val != nullptr) return val;
  1016. }
  1017. }
  1018. if(cls_var != nullptr){
  1019. if(is_non_tagged_type(cls_var, tp_function) || is_non_tagged_type(cls_var, tp_native_func)){
  1020. *self = obj;
  1021. }
  1022. return cls_var;
  1023. }
  1024. if(throw_err) AttributeError(obj, name);
  1025. return nullptr;
  1026. }
  1027. inline void VM::setattr(PyObject* obj, StrName name, PyObject* value){
  1028. PyObject* objtype = _t(obj);
  1029. // handle super() proxy
  1030. if(is_non_tagged_type(obj, tp_super)){
  1031. Super& super = OBJ_GET(Super, obj);
  1032. obj = super.first;
  1033. objtype = _t(super.second);
  1034. }
  1035. PyObject* cls_var = find_name_in_mro(objtype, name);
  1036. if(cls_var != nullptr){
  1037. // handle descriptor
  1038. PyObject* cls_var_t = _t(cls_var);
  1039. if(cls_var_t->attr().contains(__get__)){
  1040. PyObject* descr_set = cls_var_t->attr().try_get(__set__);
  1041. if(descr_set != nullptr){
  1042. call_method(cls_var, descr_set, obj, value);
  1043. }else{
  1044. TypeError(fmt("readonly attribute: ", name.escape()));
  1045. }
  1046. return;
  1047. }
  1048. }
  1049. // handle instance __dict__
  1050. if(is_tagged(obj) || !obj->is_attr_valid()) TypeError("cannot set attribute");
  1051. obj->attr().set(name, value);
  1052. }
  1053. template<int ARGC>
  1054. void VM::bind_method(PyObject* obj, Str name, NativeFuncC fn) {
  1055. check_non_tagged_type(obj, tp_type);
  1056. obj->attr().set(name, VAR(NativeFunc(fn, ARGC, true)));
  1057. }
  1058. template<int ARGC>
  1059. void VM::bind_func(PyObject* obj, Str name, NativeFuncC fn) {
  1060. obj->attr().set(name, VAR(NativeFunc(fn, ARGC, false)));
  1061. }
  1062. inline void VM::_error(Exception e){
  1063. if(callstack.empty()){
  1064. e.is_re = false;
  1065. throw e;
  1066. }
  1067. PUSH(VAR(e));
  1068. _raise();
  1069. }
  1070. inline void ManagedHeap::mark() {
  1071. for(PyObject* obj: _no_gc) OBJ_MARK(obj);
  1072. for(auto& frame : vm->callstack.data()) frame._gc_mark();
  1073. for(PyObject* obj: vm->s_data) if(obj!=nullptr) OBJ_MARK(obj);
  1074. if(vm->_gc_marker_ex != nullptr) vm->_gc_marker_ex(vm);
  1075. }
  1076. inline Str obj_type_name(VM *vm, Type type){
  1077. return vm->_all_types[type].name;
  1078. }
  1079. #undef PY_VAR_INT
  1080. #undef PY_VAR_FLOAT
  1081. } // namespace pkpy