vm.h 40 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199
  1. #pragma once
  2. #include "codeobject.h"
  3. #include "common.h"
  4. #include "frame.h"
  5. #include "error.h"
  6. #include "gc.h"
  7. #include "memory.h"
  8. #include "obj.h"
  9. #include "str.h"
  10. #include "tuplelist.h"
  11. namespace pkpy{
  12. /* Stack manipulation macros */
  13. // https://github.com/python/cpython/blob/3.9/Python/ceval.c#L1123
  14. #define TOP() (s_data.top())
  15. #define SECOND() (s_data.second())
  16. #define THIRD() (s_data.third())
  17. #define PEEK(n) (s_data.peek(n))
  18. #define STACK_SHRINK(n) (s_data.shrink(n))
  19. #define PUSH(v) (s_data.push(v))
  20. #define POP() (s_data.pop())
  21. #define POPX() (s_data.popx())
  22. #define STACK_VIEW(n) (s_data.view(n))
  23. typedef Bytes (*ReadFileCwdFunc)(const Str& name);
  24. inline ReadFileCwdFunc _read_file_cwd = [](const Str& name) { return Bytes(); };
  25. inline int set_read_file_cwd(ReadFileCwdFunc func) { _read_file_cwd = func; return 0; }
  26. #define DEF_NATIVE_2(ctype, ptype) \
  27. template<> inline ctype py_cast<ctype>(VM* vm, PyObject* obj) { \
  28. vm->check_non_tagged_type(obj, vm->ptype); \
  29. return OBJ_GET(ctype, obj); \
  30. } \
  31. template<> inline ctype _py_cast<ctype>(VM* vm, PyObject* obj) { \
  32. return OBJ_GET(ctype, obj); \
  33. } \
  34. template<> inline ctype& py_cast<ctype&>(VM* vm, PyObject* obj) { \
  35. vm->check_non_tagged_type(obj, vm->ptype); \
  36. return OBJ_GET(ctype, obj); \
  37. } \
  38. template<> inline ctype& _py_cast<ctype&>(VM* vm, PyObject* obj) { \
  39. return OBJ_GET(ctype, obj); \
  40. } \
  41. inline PyObject* py_var(VM* vm, const ctype& value) { return vm->heap.gcnew(vm->ptype, value);} \
  42. inline PyObject* py_var(VM* vm, ctype&& value) { return vm->heap.gcnew(vm->ptype, std::move(value));}
  43. class Generator final: public BaseIter {
  44. Frame frame;
  45. int state; // 0,1,2
  46. List s_backup;
  47. public:
  48. Generator(VM* vm, Frame&& frame, ArgsView buffer): BaseIter(vm), frame(std::move(frame)), state(0) {
  49. for(PyObject* obj: buffer) s_backup.push_back(obj);
  50. }
  51. PyObject* next() override;
  52. void _gc_mark() const;
  53. };
  54. struct PyTypeInfo{
  55. PyObject* obj;
  56. Type base;
  57. Str name;
  58. };
  59. struct FrameId{
  60. std::vector<pkpy::Frame>* data;
  61. int index;
  62. FrameId(std::vector<pkpy::Frame>* data, int index) : data(data), index(index) {}
  63. Frame* operator->() const { return &data->operator[](index); }
  64. };
  65. typedef void(*PrintFunc)(VM*, const Str&);
  66. class VM {
  67. VM* vm; // self reference for simplify code
  68. public:
  69. ManagedHeap heap;
  70. ValueStack s_data;
  71. stack< Frame > callstack;
  72. std::vector<PyTypeInfo> _all_types;
  73. void (*_gc_marker_ex)(VM*) = nullptr;
  74. NameDict _modules; // loaded modules
  75. std::map<StrName, Str> _lazy_modules; // lazy loaded modules
  76. PyObject* None;
  77. PyObject* True;
  78. PyObject* False;
  79. PyObject* Ellipsis;
  80. PyObject* builtins; // builtins module
  81. PyObject* StopIteration;
  82. PyObject* _main; // __main__ module
  83. PrintFunc _stdout;
  84. PrintFunc _stderr;
  85. bool _initialized;
  86. // for quick access
  87. Type tp_object, tp_type, tp_int, tp_float, tp_bool, tp_str;
  88. Type tp_list, tp_tuple;
  89. Type tp_function, tp_native_func, tp_iterator, tp_bound_method;
  90. Type tp_slice, tp_range, tp_module;
  91. Type tp_super, tp_exception, tp_bytes, tp_mappingproxy;
  92. const bool enable_os;
  93. VM(bool enable_os=true) : heap(this), enable_os(enable_os) {
  94. this->vm = this;
  95. _stdout = [](VM* vm, const Str& s) { std::cout << s; };
  96. _stderr = [](VM* vm, const Str& s) { std::cerr << s; };
  97. callstack.reserve(8);
  98. _initialized = false;
  99. init_builtin_types();
  100. _initialized = true;
  101. }
  102. FrameId top_frame() {
  103. #if DEBUG_EXTRA_CHECK
  104. if(callstack.empty()) FATAL_ERROR();
  105. #endif
  106. return FrameId(&callstack.data(), callstack.size()-1);
  107. }
  108. PyObject* asStr(PyObject* obj){
  109. PyObject* self;
  110. PyObject* f = get_unbound_method(obj, __str__, &self, false);
  111. if(self != PY_NULL) return call_method(self, f);
  112. return asRepr(obj);
  113. }
  114. PyObject* asIter(PyObject* obj){
  115. if(is_type(obj, tp_iterator)) return obj;
  116. PyObject* self;
  117. PyObject* iter_f = get_unbound_method(obj, __iter__, &self, false);
  118. if(self != PY_NULL) return call_method(self, iter_f);
  119. TypeError(OBJ_NAME(_t(obj)).escape() + " object is not iterable");
  120. return nullptr;
  121. }
  122. PyObject* asList(PyObject* it){
  123. if(is_non_tagged_type(it, tp_list)) return it;
  124. return call(_t(tp_list), it);
  125. }
  126. PyObject* find_name_in_mro(PyObject* cls, StrName name){
  127. PyObject* val;
  128. do{
  129. val = cls->attr().try_get(name);
  130. if(val != nullptr) return val;
  131. Type cls_t = OBJ_GET(Type, cls);
  132. Type base = _all_types[cls_t].base;
  133. if(base.index == -1) break;
  134. cls = _all_types[base].obj;
  135. }while(true);
  136. return nullptr;
  137. }
  138. bool isinstance(PyObject* obj, Type cls_t){
  139. Type obj_t = OBJ_GET(Type, _t(obj));
  140. do{
  141. if(obj_t == cls_t) return true;
  142. Type base = _all_types[obj_t].base;
  143. if(base.index == -1) break;
  144. obj_t = base;
  145. }while(true);
  146. return false;
  147. }
  148. PyObject* exec(Str source, Str filename, CompileMode mode, PyObject* _module=nullptr){
  149. if(_module == nullptr) _module = _main;
  150. try {
  151. CodeObject_ code = compile(source, filename, mode);
  152. #if DEBUG_DIS_EXEC
  153. if(_module == _main) std::cout << disassemble(code) << '\n';
  154. #endif
  155. return _exec(code, _module);
  156. }catch (const Exception& e){
  157. _stderr(this, e.summary() + "\n");
  158. }
  159. #if !DEBUG_FULL_EXCEPTION
  160. catch (const std::exception& e) {
  161. _stderr(this, "An std::exception occurred! It could be a bug.\n");
  162. _stderr(this, e.what());
  163. _stderr(this, "\n");
  164. }
  165. #endif
  166. callstack.clear();
  167. s_data.clear();
  168. return nullptr;
  169. }
  170. template<typename ...Args>
  171. PyObject* _exec(Args&&... args){
  172. callstack.emplace(&s_data, s_data._sp, std::forward<Args>(args)...);
  173. return _run_top_frame();
  174. }
  175. void _pop_frame(){
  176. Frame* frame = &callstack.top();
  177. s_data.reset(frame->_sp_base);
  178. callstack.pop();
  179. }
  180. void _push_varargs(){ }
  181. void _push_varargs(PyObject* _0){ PUSH(_0); }
  182. void _push_varargs(PyObject* _0, PyObject* _1){ PUSH(_0); PUSH(_1); }
  183. void _push_varargs(PyObject* _0, PyObject* _1, PyObject* _2){ PUSH(_0); PUSH(_1); PUSH(_2); }
  184. void _push_varargs(PyObject* _0, PyObject* _1, PyObject* _2, PyObject* _3){ PUSH(_0); PUSH(_1); PUSH(_2); PUSH(_3); }
  185. template<typename... Args>
  186. PyObject* call(PyObject* callable, Args&&... args){
  187. PUSH(callable);
  188. PUSH(PY_NULL);
  189. _push_varargs(args...);
  190. return vectorcall(sizeof...(args));
  191. }
  192. template<typename... Args>
  193. PyObject* call_method(PyObject* self, PyObject* callable, Args&&... args){
  194. PUSH(callable);
  195. PUSH(self);
  196. _push_varargs(args...);
  197. return vectorcall(sizeof...(args));
  198. }
  199. template<typename... Args>
  200. PyObject* call_method(PyObject* self, StrName name, Args&&... args){
  201. PyObject* callable = get_unbound_method(self, name, &self);
  202. return call_method(self, callable, args...);
  203. }
  204. PyObject* property(NativeFuncC fget, NativeFuncC fset=nullptr){
  205. PyObject* p = builtins->attr("property");
  206. PyObject* _0 = heap.gcnew(tp_native_func, NativeFunc(fget, 1, false));
  207. PyObject* _1 = vm->None;
  208. if(fset != nullptr) _1 = heap.gcnew(tp_native_func, NativeFunc(fset, 2, false));
  209. return call(p, _0, _1);
  210. }
  211. PyObject* new_type_object(PyObject* mod, StrName name, Type base){
  212. PyObject* obj = heap._new<Type>(tp_type, _all_types.size());
  213. PyTypeInfo info{
  214. obj,
  215. base,
  216. (mod!=nullptr && mod!=builtins) ? Str(OBJ_NAME(mod)+"."+name.sv()): name.sv()
  217. };
  218. if(mod != nullptr) mod->attr().set(name, obj);
  219. _all_types.push_back(info);
  220. return obj;
  221. }
  222. Type _new_type_object(StrName name, Type base=0) {
  223. PyObject* obj = new_type_object(nullptr, name, base);
  224. return OBJ_GET(Type, obj);
  225. }
  226. PyObject* _find_type(const Str& type){
  227. PyObject* obj = builtins->attr().try_get(type);
  228. if(obj == nullptr){
  229. for(auto& t: _all_types) if(t.name == type) return t.obj;
  230. throw std::runtime_error(fmt("type not found: ", type));
  231. }
  232. return obj;
  233. }
  234. template<int ARGC>
  235. void bind_func(Str type, Str name, NativeFuncC fn) {
  236. bind_func<ARGC>(_find_type(type), name, fn);
  237. }
  238. template<int ARGC>
  239. void bind_method(Str type, Str name, NativeFuncC fn) {
  240. bind_method<ARGC>(_find_type(type), name, fn);
  241. }
  242. template<int ARGC, typename... Args>
  243. void bind_static_method(Args&&... args) {
  244. bind_func<ARGC>(std::forward<Args>(args)...);
  245. }
  246. template<int ARGC>
  247. void bind_builtin_func(Str name, NativeFuncC fn) {
  248. bind_func<ARGC>(builtins, name, fn);
  249. }
  250. int normalized_index(int index, int size){
  251. if(index < 0) index += size;
  252. if(index < 0 || index >= size){
  253. IndexError(std::to_string(index) + " not in [0, " + std::to_string(size) + ")");
  254. }
  255. return index;
  256. }
  257. template<typename P>
  258. PyObject* PyIter(P&& value) {
  259. static_assert(std::is_base_of_v<BaseIter, std::decay_t<P>>);
  260. return heap.gcnew<P>(tp_iterator, std::forward<P>(value));
  261. }
  262. PyObject* PyIterNext(PyObject* obj){
  263. if(is_non_tagged_type(obj, tp_iterator)){
  264. BaseIter* iter = static_cast<BaseIter*>(obj->value());
  265. return iter->next();
  266. }
  267. return call_method(obj, __next__);
  268. }
  269. /***** Error Reporter *****/
  270. void _error(StrName name, const Str& msg){
  271. _error(Exception(name, msg));
  272. }
  273. void _raise(){
  274. bool ok = top_frame()->jump_to_exception_handler();
  275. if(ok) throw HandledException();
  276. else throw UnhandledException();
  277. }
  278. void StackOverflowError() { _error("StackOverflowError", ""); }
  279. void IOError(const Str& msg) { _error("IOError", msg); }
  280. void NotImplementedError(){ _error("NotImplementedError", ""); }
  281. void TypeError(const Str& msg){ _error("TypeError", msg); }
  282. void ZeroDivisionError(){ _error("ZeroDivisionError", "division by zero"); }
  283. void IndexError(const Str& msg){ _error("IndexError", msg); }
  284. void ValueError(const Str& msg){ _error("ValueError", msg); }
  285. void NameError(StrName name){ _error("NameError", fmt("name ", name.escape() + " is not defined")); }
  286. void AttributeError(PyObject* obj, StrName name){
  287. // OBJ_NAME calls getattr, which may lead to a infinite recursion
  288. _error("AttributeError", fmt("type ", OBJ_NAME(_t(obj)).escape(), " has no attribute ", name.escape()));
  289. }
  290. void AttributeError(Str msg){ _error("AttributeError", msg); }
  291. void check_type(PyObject* obj, Type type){
  292. if(is_type(obj, type)) return;
  293. TypeError("expected " + OBJ_NAME(_t(type)).escape() + ", but got " + OBJ_NAME(_t(obj)).escape());
  294. }
  295. void check_non_tagged_type(PyObject* obj, Type type){
  296. if(is_non_tagged_type(obj, type)) return;
  297. TypeError("expected " + OBJ_NAME(_t(type)).escape() + ", but got " + OBJ_NAME(_t(obj)).escape());
  298. }
  299. void check_int(PyObject* obj){
  300. if(is_int(obj)) return;
  301. check_type(obj, tp_int);
  302. }
  303. void check_float(PyObject* obj){
  304. if(is_float(obj)) return;
  305. check_type(obj, tp_float);
  306. }
  307. PyObject* _t(Type t){
  308. return _all_types[t.index].obj;
  309. }
  310. PyObject* _t(PyObject* obj){
  311. if(is_int(obj)) return _t(tp_int);
  312. if(is_float(obj)) return _t(tp_float);
  313. return _all_types[OBJ_GET(Type, _t(obj->type)).index].obj;
  314. }
  315. ~VM() {
  316. callstack.clear();
  317. s_data.clear();
  318. _all_types.clear();
  319. _modules.clear();
  320. _lazy_modules.clear();
  321. }
  322. void _log_s_data(const char* title = nullptr);
  323. PyObject* vectorcall(int ARGC, int KWARGC=0, bool op_call=false);
  324. CodeObject_ compile(Str source, Str filename, CompileMode mode, bool unknown_global_scope=false);
  325. PyObject* num_negated(PyObject* obj);
  326. f64 num_to_float(PyObject* obj);
  327. bool asBool(PyObject* obj);
  328. i64 hash(PyObject* obj);
  329. PyObject* asRepr(PyObject* obj);
  330. PyObject* new_module(StrName name);
  331. Str disassemble(CodeObject_ co);
  332. void init_builtin_types();
  333. PyObject* _py_call(PyObject** sp_base, PyObject* callable, ArgsView args, ArgsView kwargs);
  334. PyObject* getattr(PyObject* obj, StrName name, bool throw_err=true);
  335. PyObject* get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err=true, bool fallback=false);
  336. void parse_int_slice(const Slice& s, int length, int& start, int& stop, int& step);
  337. PyObject* format(Str, PyObject*);
  338. void setattr(PyObject* obj, StrName name, PyObject* value);
  339. template<int ARGC>
  340. void bind_method(PyObject*, Str, NativeFuncC);
  341. template<int ARGC>
  342. void bind_func(PyObject*, Str, NativeFuncC);
  343. void _error(Exception);
  344. PyObject* _run_top_frame();
  345. void post_init();
  346. };
  347. inline PyObject* NativeFunc::operator()(VM* vm, ArgsView args) const{
  348. int args_size = args.size() - (int)method; // remove self
  349. if(argc != -1 && args_size != argc) {
  350. vm->TypeError(fmt("expected ", argc, " arguments, but got ", args_size));
  351. }
  352. return f(vm, args);
  353. }
  354. inline void CodeObject::optimize(VM* vm){
  355. // uint32_t base_n = (uint32_t)(names.size() / kLocalsLoadFactor + 0.5);
  356. // perfect_locals_capacity = std::max(find_next_capacity(base_n), NameDict::__Capacity);
  357. // perfect_hash_seed = find_perfect_hash_seed(perfect_locals_capacity, names);
  358. }
  359. DEF_NATIVE_2(Str, tp_str)
  360. DEF_NATIVE_2(List, tp_list)
  361. DEF_NATIVE_2(Tuple, tp_tuple)
  362. DEF_NATIVE_2(Function, tp_function)
  363. DEF_NATIVE_2(NativeFunc, tp_native_func)
  364. DEF_NATIVE_2(BoundMethod, tp_bound_method)
  365. DEF_NATIVE_2(Range, tp_range)
  366. DEF_NATIVE_2(Slice, tp_slice)
  367. DEF_NATIVE_2(Exception, tp_exception)
  368. DEF_NATIVE_2(Bytes, tp_bytes)
  369. DEF_NATIVE_2(MappingProxy, tp_mappingproxy)
  370. #define PY_CAST_INT(T) \
  371. template<> inline T py_cast<T>(VM* vm, PyObject* obj){ \
  372. vm->check_int(obj); \
  373. return (T)(BITS(obj) >> 2); \
  374. } \
  375. template<> inline T _py_cast<T>(VM* vm, PyObject* obj){ \
  376. return (T)(BITS(obj) >> 2); \
  377. }
  378. PY_CAST_INT(char)
  379. PY_CAST_INT(short)
  380. PY_CAST_INT(int)
  381. PY_CAST_INT(long)
  382. PY_CAST_INT(long long)
  383. PY_CAST_INT(unsigned char)
  384. PY_CAST_INT(unsigned short)
  385. PY_CAST_INT(unsigned int)
  386. PY_CAST_INT(unsigned long)
  387. PY_CAST_INT(unsigned long long)
  388. template<> inline float py_cast<float>(VM* vm, PyObject* obj){
  389. vm->check_float(obj);
  390. i64 bits = BITS(obj);
  391. bits = (bits >> 2) << 2;
  392. return BitsCvt(bits)._float;
  393. }
  394. template<> inline float _py_cast<float>(VM* vm, PyObject* obj){
  395. i64 bits = BITS(obj);
  396. bits = (bits >> 2) << 2;
  397. return BitsCvt(bits)._float;
  398. }
  399. template<> inline double py_cast<double>(VM* vm, PyObject* obj){
  400. vm->check_float(obj);
  401. i64 bits = BITS(obj);
  402. bits = (bits >> 2) << 2;
  403. return BitsCvt(bits)._float;
  404. }
  405. template<> inline double _py_cast<double>(VM* vm, PyObject* obj){
  406. i64 bits = BITS(obj);
  407. bits = (bits >> 2) << 2;
  408. return BitsCvt(bits)._float;
  409. }
  410. #define PY_VAR_INT(T) \
  411. inline PyObject* py_var(VM* vm, T _val){ \
  412. i64 val = static_cast<i64>(_val); \
  413. if(((val << 2) >> 2) != val){ \
  414. vm->_error("OverflowError", std::to_string(val) + " is out of range"); \
  415. } \
  416. val = (val << 2) | 0b01; \
  417. return reinterpret_cast<PyObject*>(val); \
  418. }
  419. PY_VAR_INT(char)
  420. PY_VAR_INT(short)
  421. PY_VAR_INT(int)
  422. PY_VAR_INT(long)
  423. PY_VAR_INT(long long)
  424. PY_VAR_INT(unsigned char)
  425. PY_VAR_INT(unsigned short)
  426. PY_VAR_INT(unsigned int)
  427. PY_VAR_INT(unsigned long)
  428. PY_VAR_INT(unsigned long long)
  429. #define PY_VAR_FLOAT(T) \
  430. inline PyObject* py_var(VM* vm, T _val){ \
  431. f64 val = static_cast<f64>(_val); \
  432. i64 bits = BitsCvt(val)._int; \
  433. bits = (bits >> 2) << 2; \
  434. bits |= 0b10; \
  435. return reinterpret_cast<PyObject*>(bits); \
  436. }
  437. PY_VAR_FLOAT(float)
  438. PY_VAR_FLOAT(double)
  439. inline PyObject* py_var(VM* vm, bool val){
  440. return val ? vm->True : vm->False;
  441. }
  442. template<> inline bool py_cast<bool>(VM* vm, PyObject* obj){
  443. vm->check_non_tagged_type(obj, vm->tp_bool);
  444. return obj == vm->True;
  445. }
  446. template<> inline bool _py_cast<bool>(VM* vm, PyObject* obj){
  447. return obj == vm->True;
  448. }
  449. inline PyObject* py_var(VM* vm, const char val[]){
  450. return VAR(Str(val));
  451. }
  452. inline PyObject* py_var(VM* vm, std::string val){
  453. return VAR(Str(std::move(val)));
  454. }
  455. inline PyObject* py_var(VM* vm, std::string_view val){
  456. return VAR(Str(val));
  457. }
  458. template<typename T>
  459. void _check_py_class(VM* vm, PyObject* obj){
  460. vm->check_non_tagged_type(obj, T::_type(vm));
  461. }
  462. inline PyObject* VM::num_negated(PyObject* obj){
  463. if (is_int(obj)){
  464. return VAR(-CAST(i64, obj));
  465. }else if(is_float(obj)){
  466. return VAR(-CAST(f64, obj));
  467. }
  468. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape());
  469. return nullptr;
  470. }
  471. inline f64 VM::num_to_float(PyObject* obj){
  472. if(is_float(obj)){
  473. return CAST(f64, obj);
  474. } else if (is_int(obj)){
  475. return (f64)CAST(i64, obj);
  476. }
  477. TypeError("expected 'int' or 'float', got " + OBJ_NAME(_t(obj)).escape());
  478. return 0;
  479. }
  480. inline bool VM::asBool(PyObject* obj){
  481. if(is_non_tagged_type(obj, tp_bool)) return obj == True;
  482. if(obj == None) return false;
  483. if(is_int(obj)) return CAST(i64, obj) != 0;
  484. if(is_float(obj)) return CAST(f64, obj) != 0.0;
  485. PyObject* self;
  486. PyObject* len_f = get_unbound_method(obj, __len__, &self, false);
  487. if(self != PY_NULL){
  488. PyObject* ret = call_method(self, len_f);
  489. return CAST(i64, ret) > 0;
  490. }
  491. return true;
  492. }
  493. inline void VM::parse_int_slice(const Slice& s, int length, int& start, int& stop, int& step){
  494. auto clip = [](int value, int min, int max){
  495. if(value < min) return min;
  496. if(value > max) return max;
  497. return value;
  498. };
  499. if(s.step == None) step = 1;
  500. else step = CAST(int, s.step);
  501. if(step == 0) ValueError("slice step cannot be zero");
  502. if(step > 0){
  503. if(s.start == None){
  504. start = 0;
  505. }else{
  506. start = CAST(int, s.start);
  507. if(start < 0) start += length;
  508. start = clip(start, 0, length);
  509. }
  510. if(s.stop == None){
  511. stop = length;
  512. }else{
  513. stop = CAST(int, s.stop);
  514. if(stop < 0) stop += length;
  515. stop = clip(stop, 0, length);
  516. }
  517. }else{
  518. if(s.start == None){
  519. start = length - 1;
  520. }else{
  521. start = CAST(int, s.start);
  522. if(start < 0) start += length;
  523. start = clip(start, -1, length - 1);
  524. }
  525. if(s.stop == None){
  526. stop = -1;
  527. }else{
  528. stop = CAST(int, s.stop);
  529. if(stop < 0) stop += length;
  530. stop = clip(stop, -1, length - 1);
  531. }
  532. }
  533. }
  534. inline i64 VM::hash(PyObject* obj){
  535. if (is_non_tagged_type(obj, tp_str)) return CAST(Str&, obj).hash();
  536. if (is_int(obj)) return CAST(i64, obj);
  537. if (is_non_tagged_type(obj, tp_tuple)) {
  538. i64 x = 1000003;
  539. const Tuple& items = CAST(Tuple&, obj);
  540. for (int i=0; i<items.size(); i++) {
  541. i64 y = hash(items[i]);
  542. // recommended by Github Copilot
  543. x = x ^ (y + 0x9e3779b9 + (x << 6) + (x >> 2));
  544. }
  545. return x;
  546. }
  547. if (is_non_tagged_type(obj, tp_type)) return BITS(obj);
  548. if (is_non_tagged_type(obj, tp_iterator)) return BITS(obj);
  549. if (is_non_tagged_type(obj, tp_bool)) return _CAST(bool, obj) ? 1 : 0;
  550. if (is_float(obj)){
  551. f64 val = CAST(f64, obj);
  552. return (i64)std::hash<f64>()(val);
  553. }
  554. TypeError("unhashable type: " + OBJ_NAME(_t(obj)).escape());
  555. return 0;
  556. }
  557. inline PyObject* VM::asRepr(PyObject* obj){
  558. return call_method(obj, __repr__);
  559. }
  560. inline PyObject* VM::format(Str spec, PyObject* obj){
  561. if(spec.empty()) return asStr(obj);
  562. char type;
  563. switch(spec.end()[-1]){
  564. case 'f': case 'd': case 's':
  565. type = spec.end()[-1];
  566. spec = spec.substr(0, spec.length() - 1);
  567. break;
  568. default: type = ' '; break;
  569. }
  570. char pad_c = ' ';
  571. if(spec[0] == '0'){
  572. pad_c = '0';
  573. spec = spec.substr(1);
  574. }
  575. char align;
  576. if(spec[0] == '>'){
  577. align = '>';
  578. spec = spec.substr(1);
  579. }else if(spec[0] == '<'){
  580. align = '<';
  581. spec = spec.substr(1);
  582. }else{
  583. if(is_int(obj) || is_float(obj)) align = '>';
  584. else align = '<';
  585. }
  586. int dot = spec.index(".");
  587. int width, precision;
  588. try{
  589. if(dot >= 0){
  590. width = Number::stoi(spec.substr(0, dot).str());
  591. precision = Number::stoi(spec.substr(dot+1).str());
  592. }else{
  593. width = Number::stoi(spec.str());
  594. precision = -1;
  595. }
  596. }catch(...){
  597. ValueError("invalid format specifer");
  598. }
  599. if(type != 'f' && dot >= 0) ValueError("precision not allowed in the format specifier");
  600. Str ret;
  601. if(type == 'f'){
  602. f64 val = num_to_float(obj);
  603. if(precision < 0) precision = 6;
  604. std::stringstream ss;
  605. ss << std::fixed << std::setprecision(precision) << val;
  606. ret = ss.str();
  607. }else if(type == 'd'){
  608. ret = std::to_string(CAST(i64, obj));
  609. }else if(type == 's'){
  610. ret = CAST(Str&, obj);
  611. }else{
  612. ret = CAST(Str&, asStr(obj));
  613. }
  614. if(width > ret.length()){
  615. int pad = width - ret.length();
  616. std::string padding(pad, pad_c);
  617. if(align == '>') ret = padding.c_str() + ret;
  618. else ret = ret + padding.c_str();
  619. }
  620. return VAR(ret);
  621. }
  622. inline PyObject* VM::new_module(StrName name) {
  623. PyObject* obj = heap._new<DummyModule>(tp_module, DummyModule());
  624. obj->attr().set(__name__, VAR(name.sv()));
  625. // we do not allow override in order to avoid memory leak
  626. // it is because Module objects are not garbage collected
  627. if(_modules.contains(name)) FATAL_ERROR();
  628. _modules.set(name, obj);
  629. return obj;
  630. }
  631. inline std::string _opcode_argstr(VM* vm, Bytecode byte, const CodeObject* co){
  632. std::string argStr = byte.arg == -1 ? "" : std::to_string(byte.arg);
  633. switch(byte.op){
  634. case OP_LOAD_CONST:
  635. if(vm != nullptr){
  636. argStr += fmt(" (", CAST(Str, vm->asRepr(co->consts[byte.arg])), ")");
  637. }
  638. break;
  639. case OP_LOAD_NAME: case OP_LOAD_GLOBAL: case OP_LOAD_NONLOCAL: case OP_STORE_GLOBAL:
  640. case OP_LOAD_ATTR: case OP_LOAD_METHOD: case OP_STORE_ATTR: case OP_DELETE_ATTR:
  641. case OP_IMPORT_NAME: case OP_BEGIN_CLASS:
  642. case OP_DELETE_GLOBAL:
  643. argStr += fmt(" (", StrName(byte.arg).sv(), ")");
  644. break;
  645. case OP_LOAD_FAST: case OP_STORE_FAST: case OP_DELETE_FAST:
  646. argStr += fmt(" (", co->varnames[byte.arg].sv(), ")");
  647. break;
  648. case OP_BINARY_OP:
  649. argStr += fmt(" (", BINARY_SPECIAL_METHODS[byte.arg], ")");
  650. break;
  651. case OP_LOAD_FUNCTION:
  652. argStr += fmt(" (", co->func_decls[byte.arg]->code->name, ")");
  653. break;
  654. }
  655. return argStr;
  656. }
  657. inline Str VM::disassemble(CodeObject_ co){
  658. auto pad = [](const Str& s, const int n){
  659. if(s.length() >= n) return s.substr(0, n);
  660. return s + std::string(n - s.length(), ' ');
  661. };
  662. std::vector<int> jumpTargets;
  663. for(auto byte : co->codes){
  664. if(byte.op == OP_JUMP_ABSOLUTE || byte.op == OP_POP_JUMP_IF_FALSE){
  665. jumpTargets.push_back(byte.arg);
  666. }
  667. }
  668. std::stringstream ss;
  669. int prev_line = -1;
  670. for(int i=0; i<co->codes.size(); i++){
  671. const Bytecode& byte = co->codes[i];
  672. Str line = std::to_string(co->lines[i]);
  673. if(co->lines[i] == prev_line) line = "";
  674. else{
  675. if(prev_line != -1) ss << "\n";
  676. prev_line = co->lines[i];
  677. }
  678. std::string pointer;
  679. if(std::find(jumpTargets.begin(), jumpTargets.end(), i) != jumpTargets.end()){
  680. pointer = "-> ";
  681. }else{
  682. pointer = " ";
  683. }
  684. ss << pad(line, 8) << pointer << pad(std::to_string(i), 3);
  685. ss << " " << pad(OP_NAMES[byte.op], 20) << " ";
  686. // ss << pad(byte.arg == -1 ? "" : std::to_string(byte.arg), 5);
  687. std::string argStr = _opcode_argstr(this, byte, co.get());
  688. ss << pad(argStr, 40); // may overflow
  689. ss << co->blocks[byte.block].type;
  690. if(i != co->codes.size() - 1) ss << '\n';
  691. }
  692. for(auto& decl: co->func_decls){
  693. ss << "\n\n" << "Disassembly of " << decl->code->name << ":\n";
  694. ss << disassemble(decl->code);
  695. }
  696. ss << "\n";
  697. return Str(ss.str());
  698. }
  699. inline void VM::_log_s_data(const char* title) {
  700. if(!_initialized) return;
  701. if(callstack.empty()) return;
  702. std::stringstream ss;
  703. if(title) ss << title << " | ";
  704. std::map<PyObject**, int> sp_bases;
  705. for(Frame& f: callstack.data()){
  706. if(f._sp_base == nullptr) FATAL_ERROR();
  707. sp_bases[f._sp_base] += 1;
  708. }
  709. FrameId frame = top_frame();
  710. int line = frame->co->lines[frame->_ip];
  711. ss << frame->co->name << ":" << line << " [";
  712. for(PyObject** p=s_data.begin(); p!=s_data.end(); p++){
  713. ss << std::string(sp_bases[p], '|');
  714. if(sp_bases[p] > 0) ss << " ";
  715. PyObject* obj = *p;
  716. if(obj == nullptr) ss << "(nil)";
  717. else if(obj == PY_BEGIN_CALL) ss << "BEGIN_CALL";
  718. else if(obj == PY_NULL) ss << "NULL";
  719. else if(is_int(obj)) ss << CAST(i64, obj);
  720. else if(is_float(obj)) ss << CAST(f64, obj);
  721. else if(is_type(obj, tp_str)) ss << CAST(Str, obj).escape();
  722. else if(obj == None) ss << "None";
  723. else if(obj == True) ss << "True";
  724. else if(obj == False) ss << "False";
  725. else if(is_type(obj, tp_function)){
  726. auto& f = CAST(Function&, obj);
  727. ss << f.decl->code->name << "(...)";
  728. } else if(is_type(obj, tp_type)){
  729. Type t = OBJ_GET(Type, obj);
  730. ss << "<class " + _all_types[t].name.escape() + ">";
  731. } else if(is_type(obj, tp_list)){
  732. auto& t = CAST(List&, obj);
  733. ss << "list(size=" << t.size() << ")";
  734. } else if(is_type(obj, tp_tuple)){
  735. auto& t = CAST(Tuple&, obj);
  736. ss << "tuple(size=" << t.size() << ")";
  737. } else ss << "(" << obj_type_name(this, obj->type) << ")";
  738. ss << ", ";
  739. }
  740. std::string output = ss.str();
  741. if(!s_data.empty()) {
  742. output.pop_back(); output.pop_back();
  743. }
  744. output.push_back(']');
  745. Bytecode byte = frame->co->codes[frame->_ip];
  746. std::cout << output << " " << OP_NAMES[byte.op] << " " << _opcode_argstr(nullptr, byte, frame->co) << std::endl;
  747. }
  748. inline void VM::init_builtin_types(){
  749. _all_types.push_back({heap._new<Type>(Type(1), Type(0)), -1, "object"});
  750. _all_types.push_back({heap._new<Type>(Type(1), Type(1)), 0, "type"});
  751. tp_object = 0; tp_type = 1;
  752. tp_int = _new_type_object("int");
  753. tp_float = _new_type_object("float");
  754. if(tp_int.index != kTpIntIndex || tp_float.index != kTpFloatIndex) FATAL_ERROR();
  755. tp_bool = _new_type_object("bool");
  756. tp_str = _new_type_object("str");
  757. tp_list = _new_type_object("list");
  758. tp_tuple = _new_type_object("tuple");
  759. tp_slice = _new_type_object("slice");
  760. tp_range = _new_type_object("range");
  761. tp_module = _new_type_object("module");
  762. tp_function = _new_type_object("function");
  763. tp_native_func = _new_type_object("native_func");
  764. tp_iterator = _new_type_object("iterator");
  765. tp_bound_method = _new_type_object("bound_method");
  766. tp_super = _new_type_object("super");
  767. tp_exception = _new_type_object("Exception");
  768. tp_bytes = _new_type_object("bytes");
  769. tp_mappingproxy = _new_type_object("mappingproxy");
  770. this->None = heap._new<Dummy>(_new_type_object("NoneType"), {});
  771. this->Ellipsis = heap._new<Dummy>(_new_type_object("ellipsis"), {});
  772. this->True = heap._new<Dummy>(tp_bool, {});
  773. this->False = heap._new<Dummy>(tp_bool, {});
  774. this->StopIteration = heap._new<Dummy>(_new_type_object("StopIterationType"), {});
  775. this->builtins = new_module("builtins");
  776. this->_main = new_module("__main__");
  777. // setup public types
  778. builtins->attr().set("type", _t(tp_type));
  779. builtins->attr().set("object", _t(tp_object));
  780. builtins->attr().set("bool", _t(tp_bool));
  781. builtins->attr().set("int", _t(tp_int));
  782. builtins->attr().set("float", _t(tp_float));
  783. builtins->attr().set("str", _t(tp_str));
  784. builtins->attr().set("list", _t(tp_list));
  785. builtins->attr().set("tuple", _t(tp_tuple));
  786. builtins->attr().set("range", _t(tp_range));
  787. builtins->attr().set("bytes", _t(tp_bytes));
  788. builtins->attr().set("StopIteration", StopIteration);
  789. builtins->attr().set("slice", _t(tp_slice));
  790. post_init();
  791. for(int i=0; i<_all_types.size(); i++){
  792. _all_types[i].obj->attr()._try_perfect_rehash();
  793. }
  794. for(auto [k, v]: _modules.items()) v->attr()._try_perfect_rehash();
  795. }
  796. inline PyObject* VM::vectorcall(int ARGC, int KWARGC, bool op_call){
  797. bool is_varargs = ARGC == 0xFFFF;
  798. PyObject** p0;
  799. PyObject** p1 = s_data._sp - KWARGC*2;
  800. if(is_varargs){
  801. p0 = p1 - 1;
  802. while(*p0 != PY_BEGIN_CALL) p0--;
  803. // [BEGIN_CALL, callable, <self>, args..., kwargs...]
  804. // ^p0 ^p1 ^_sp
  805. ARGC = p1 - (p0 + 3);
  806. }else{
  807. p0 = p1 - ARGC - 2 - (int)is_varargs;
  808. // [callable, <self>, args..., kwargs...]
  809. // ^p0 ^p1 ^_sp
  810. }
  811. PyObject* callable = p1[-(ARGC + 2)];
  812. bool method_call = p1[-(ARGC + 1)] != PY_NULL;
  813. // handle boundmethod, do a patch
  814. if(is_non_tagged_type(callable, tp_bound_method)){
  815. if(method_call) FATAL_ERROR();
  816. auto& bm = CAST(BoundMethod&, callable);
  817. callable = bm.func; // get unbound method
  818. p1[-(ARGC + 2)] = bm.func;
  819. p1[-(ARGC + 1)] = bm.self;
  820. method_call = true;
  821. // [unbound, self, args..., kwargs...]
  822. }
  823. ArgsView args(p1 - ARGC - int(method_call), p1);
  824. if(is_non_tagged_type(callable, tp_native_func)){
  825. const auto& f = OBJ_GET(NativeFunc, callable);
  826. if(KWARGC != 0) TypeError("native_func does not accept keyword arguments");
  827. PyObject* ret = f(this, args);
  828. s_data.reset(p0);
  829. return ret;
  830. }
  831. ArgsView kwargs(p1, s_data._sp);
  832. if(is_non_tagged_type(callable, tp_function)){
  833. // ret is nullptr or a generator
  834. PyObject* ret = _py_call(p0, callable, args, kwargs);
  835. // stack resetting is handled by _py_call
  836. if(ret != nullptr) return ret;
  837. if(op_call) return PY_OP_CALL;
  838. return _run_top_frame();
  839. }
  840. if(is_non_tagged_type(callable, tp_type)){
  841. if(method_call) FATAL_ERROR();
  842. // [type, NULL, args..., kwargs...]
  843. // TODO: derived __new__ ?
  844. PyObject* new_f = callable->attr().try_get(__new__);
  845. PyObject* obj;
  846. if(new_f != nullptr){
  847. PUSH(new_f);
  848. PUSH(PY_NULL);
  849. for(PyObject* obj: args) PUSH(obj);
  850. for(PyObject* obj: kwargs) PUSH(obj);
  851. obj = vectorcall(ARGC, KWARGC);
  852. if(!isinstance(obj, OBJ_GET(Type, callable))) return obj;
  853. }else{
  854. obj = heap.gcnew<DummyInstance>(OBJ_GET(Type, callable), {});
  855. }
  856. PyObject* self;
  857. callable = get_unbound_method(obj, __init__, &self, false);
  858. if (self != PY_NULL) {
  859. // replace `NULL` with `self`
  860. p1[-(ARGC + 2)] = callable;
  861. p1[-(ARGC + 1)] = self;
  862. // [init_f, self, args..., kwargs...]
  863. vectorcall(ARGC, KWARGC);
  864. // We just discard the return value of `__init__`
  865. // in cpython it raises a TypeError if the return value is not None
  866. }else{
  867. // manually reset the stack
  868. s_data.reset(p0);
  869. }
  870. return obj;
  871. }
  872. // handle `__call__` overload
  873. PyObject* self;
  874. PyObject* call_f = get_unbound_method(callable, __call__, &self, false);
  875. if(self != PY_NULL){
  876. p1[-(ARGC + 2)] = call_f;
  877. p1[-(ARGC + 1)] = self;
  878. // [call_f, self, args..., kwargs...]
  879. return vectorcall(ARGC, KWARGC, false);
  880. }
  881. TypeError(OBJ_NAME(_t(callable)).escape() + " object is not callable");
  882. return nullptr;
  883. }
  884. inline PyObject* VM::_py_call(PyObject** p0, PyObject* callable, ArgsView args, ArgsView kwargs){
  885. // callable must be a `function` object
  886. if(s_data.is_overflow()) StackOverflowError();
  887. const Function& fn = CAST(Function&, callable);
  888. const CodeObject* co = fn.decl->code.get();
  889. if(args.size() < fn.argc){
  890. vm->TypeError(fmt(
  891. "expected ",
  892. fn.argc,
  893. " positional arguments, but got ",
  894. args.size(),
  895. " (", fn.decl->code->name, ')'
  896. ));
  897. }
  898. // if this function is simple, a.k.a, no kwargs and no *args and not a generator
  899. // we can use a fast path to avoid using buffer copy
  900. if(fn.is_simple){
  901. if(args.size() > fn.argc) TypeError("too many positional arguments");
  902. int spaces = co->varnames.size() - fn.argc;
  903. for(int j=0; j<spaces; j++) PUSH(nullptr);
  904. callstack.emplace(&s_data, p0, co, fn._module, callable, FastLocals(co, args.begin()));
  905. return nullptr;
  906. }
  907. int i = 0;
  908. static THREAD_LOCAL PyObject* buffer[PK_MAX_CO_VARNAMES];
  909. // prepare args
  910. for(int index: fn.decl->args) buffer[index] = args[i++];
  911. // set extra varnames to nullptr
  912. for(int j=i; j<co->varnames.size(); j++) buffer[j] = nullptr;
  913. // prepare kwdefaults
  914. for(auto& kv: fn.decl->kwargs) buffer[kv.key] = kv.value;
  915. // handle *args
  916. if(fn.decl->starred_arg != -1){
  917. List vargs; // handle *args
  918. while(i < args.size()) vargs.push_back(args[i++]);
  919. buffer[fn.decl->starred_arg] = VAR(Tuple(std::move(vargs)));
  920. }else{
  921. // kwdefaults override
  922. for(auto& kv: fn.decl->kwargs){
  923. if(i < args.size()){
  924. buffer[kv.key] = args[i++];
  925. }else{
  926. break;
  927. }
  928. }
  929. if(i < args.size()) TypeError(fmt("too many arguments", " (", fn.decl->code->name, ')'));
  930. }
  931. for(int i=0; i<kwargs.size(); i+=2){
  932. StrName key = CAST(int, kwargs[i]);
  933. int index = co->varnames_inv.try_get(key);
  934. if(index<0) TypeError(fmt(key.escape(), " is an invalid keyword argument for ", co->name, "()"));
  935. buffer[index] = kwargs[i+1];
  936. }
  937. s_data.reset(p0);
  938. if(co->is_generator){
  939. PyObject* ret = PyIter(Generator(
  940. this,
  941. Frame(&s_data, nullptr, co, fn._module, callable),
  942. ArgsView(buffer, buffer + co->varnames.size())
  943. ));
  944. return ret;
  945. }
  946. // copy buffer to stack
  947. for(int i=0; i<co->varnames.size(); i++) PUSH(buffer[i]);
  948. callstack.emplace(&s_data, p0, co, fn._module, callable);
  949. return nullptr;
  950. }
  951. // https://docs.python.org/3/howto/descriptor.html#invocation-from-an-instance
  952. inline PyObject* VM::getattr(PyObject* obj, StrName name, bool throw_err){
  953. PyObject* objtype = _t(obj);
  954. // handle super() proxy
  955. if(is_non_tagged_type(obj, tp_super)){
  956. const Super& super = OBJ_GET(Super, obj);
  957. obj = super.first;
  958. objtype = _t(super.second);
  959. }
  960. PyObject* cls_var = find_name_in_mro(objtype, name);
  961. if(cls_var != nullptr){
  962. // handle descriptor
  963. PyObject* descr_get = _t(cls_var)->attr().try_get(__get__);
  964. if(descr_get != nullptr) return call_method(cls_var, descr_get, obj);
  965. }
  966. // handle instance __dict__
  967. if(!is_tagged(obj) && obj->is_attr_valid()){
  968. PyObject* val = obj->attr().try_get(name);
  969. if(val != nullptr) return val;
  970. }
  971. if(cls_var != nullptr){
  972. // bound method is non-data descriptor
  973. if(is_non_tagged_type(cls_var, tp_function) || is_non_tagged_type(cls_var, tp_native_func)){
  974. return VAR(BoundMethod(obj, cls_var));
  975. }
  976. return cls_var;
  977. }
  978. if(throw_err) AttributeError(obj, name);
  979. return nullptr;
  980. }
  981. // used by OP_LOAD_METHOD
  982. // try to load a unbound method (fallback to `getattr` if not found)
  983. inline PyObject* VM::get_unbound_method(PyObject* obj, StrName name, PyObject** self, bool throw_err, bool fallback){
  984. *self = PY_NULL;
  985. PyObject* objtype = _t(obj);
  986. // handle super() proxy
  987. if(is_non_tagged_type(obj, tp_super)){
  988. const Super& super = OBJ_GET(Super, obj);
  989. obj = super.first;
  990. objtype = _t(super.second);
  991. }
  992. PyObject* cls_var = find_name_in_mro(objtype, name);
  993. if(fallback){
  994. if(cls_var != nullptr){
  995. // handle descriptor
  996. PyObject* descr_get = _t(cls_var)->attr().try_get(__get__);
  997. if(descr_get != nullptr) return call_method(cls_var, descr_get, obj);
  998. }
  999. // handle instance __dict__
  1000. if(!is_tagged(obj) && obj->is_attr_valid()){
  1001. PyObject* val = obj->attr().try_get(name);
  1002. if(val != nullptr) return val;
  1003. }
  1004. }
  1005. if(cls_var != nullptr){
  1006. if(is_non_tagged_type(cls_var, tp_function) || is_non_tagged_type(cls_var, tp_native_func)){
  1007. *self = obj;
  1008. }
  1009. return cls_var;
  1010. }
  1011. if(throw_err) AttributeError(obj, name);
  1012. return nullptr;
  1013. }
  1014. inline void VM::setattr(PyObject* obj, StrName name, PyObject* value){
  1015. PyObject* objtype = _t(obj);
  1016. // handle super() proxy
  1017. if(is_non_tagged_type(obj, tp_super)){
  1018. Super& super = OBJ_GET(Super, obj);
  1019. obj = super.first;
  1020. objtype = _t(super.second);
  1021. }
  1022. PyObject* cls_var = find_name_in_mro(objtype, name);
  1023. if(cls_var != nullptr){
  1024. // handle descriptor
  1025. PyObject* cls_var_t = _t(cls_var);
  1026. if(cls_var_t->attr().contains(__get__)){
  1027. PyObject* descr_set = cls_var_t->attr().try_get(__set__);
  1028. if(descr_set != nullptr){
  1029. call_method(cls_var, descr_set, obj, value);
  1030. }else{
  1031. TypeError(fmt("readonly attribute: ", name.escape()));
  1032. }
  1033. return;
  1034. }
  1035. }
  1036. // handle instance __dict__
  1037. if(is_tagged(obj) || !obj->is_attr_valid()) TypeError("cannot set attribute");
  1038. obj->attr().set(name, value);
  1039. }
  1040. template<int ARGC>
  1041. void VM::bind_method(PyObject* obj, Str name, NativeFuncC fn) {
  1042. check_non_tagged_type(obj, tp_type);
  1043. obj->attr().set(name, VAR(NativeFunc(fn, ARGC, true)));
  1044. }
  1045. template<int ARGC>
  1046. void VM::bind_func(PyObject* obj, Str name, NativeFuncC fn) {
  1047. obj->attr().set(name, VAR(NativeFunc(fn, ARGC, false)));
  1048. }
  1049. inline void VM::_error(Exception e){
  1050. if(callstack.empty()){
  1051. e.is_re = false;
  1052. throw e;
  1053. }
  1054. PUSH(VAR(e));
  1055. _raise();
  1056. }
  1057. inline void ManagedHeap::mark() {
  1058. for(PyObject* obj: _no_gc) OBJ_MARK(obj);
  1059. for(auto& frame : vm->callstack.data()) frame._gc_mark();
  1060. for(PyObject* obj: vm->s_data) if(obj!=nullptr) OBJ_MARK(obj);
  1061. if(vm->_gc_marker_ex != nullptr) vm->_gc_marker_ex(vm);
  1062. }
  1063. inline Str obj_type_name(VM *vm, Type type){
  1064. return vm->_all_types[type].name;
  1065. }
  1066. #undef PY_VAR_INT
  1067. #undef PY_VAR_FLOAT
  1068. } // namespace pkpy