frame.h 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170
  1. #pragma once
  2. #include "codeobject.h"
  3. #include "common.h"
  4. #include "memory.h"
  5. #include "obj.h"
  6. #include "vector.h"
  7. namespace pkpy{
  8. // weak reference fast locals
  9. struct FastLocals{
  10. // this is a weak reference
  11. const CodeObject* co;
  12. PyObject** a;
  13. int size() const{ return co->varnames.size();}
  14. PyObject*& operator[](int i){ return a[i]; }
  15. PyObject* operator[](int i) const { return a[i]; }
  16. FastLocals(const CodeObject* co, PyObject** a): co(co), a(a) {}
  17. PyObject** try_get_name(StrName name);
  18. NameDict_ to_namedict();
  19. PyObject** begin() const { return a; }
  20. PyObject** end() const { return a + size(); }
  21. };
  22. struct ValueStack {
  23. // We allocate extra PK_VM_STACK_SIZE/128 places to keep `_sp` valid when `is_overflow() == true`.
  24. PyObject* _begin[PK_VM_STACK_SIZE + PK_VM_STACK_SIZE/128];
  25. PyObject** _sp;
  26. PyObject** _max_end;
  27. static constexpr size_t max_size() { return PK_VM_STACK_SIZE; }
  28. ValueStack(): _sp(_begin), _max_end(_begin + PK_VM_STACK_SIZE) {}
  29. PyObject*& top(){ return _sp[-1]; }
  30. PyObject* top() const { return _sp[-1]; }
  31. PyObject*& second(){ return _sp[-2]; }
  32. PyObject* second() const { return _sp[-2]; }
  33. PyObject*& third(){ return _sp[-3]; }
  34. PyObject* third() const { return _sp[-3]; }
  35. PyObject*& peek(int n){ return _sp[-n]; }
  36. PyObject* peek(int n) const { return _sp[-n]; }
  37. void push(PyObject* v){ *_sp++ = v; }
  38. void pop(){ --_sp; }
  39. PyObject* popx(){ return *--_sp; }
  40. ArgsView view(int n){ return ArgsView(_sp-n, _sp); }
  41. void shrink(int n){ _sp -= n; }
  42. int size() const { return _sp - _begin; }
  43. bool empty() const { return _sp == _begin; }
  44. PyObject** begin() { return _begin; }
  45. PyObject** end() { return _sp; }
  46. void reset(PyObject** sp) {
  47. #if PK_DEBUG_EXTRA_CHECK
  48. if(sp < _begin || sp > _begin + MAX_SIZE) PK_FATAL_ERROR();
  49. #endif
  50. _sp = sp;
  51. }
  52. void clear() { _sp = _begin; }
  53. bool is_overflow() const { return _sp >= _max_end; }
  54. PyObject* operator[](int i) const { return _begin[i]; }
  55. PyObject*& operator[](int i) { return _begin[i]; }
  56. ValueStack(const ValueStack&) = delete;
  57. ValueStack(ValueStack&&) = delete;
  58. ValueStack& operator=(const ValueStack&) = delete;
  59. ValueStack& operator=(ValueStack&&) = delete;
  60. };
  61. struct Frame {
  62. int _ip;
  63. int _next_ip;
  64. // This is for unwinding only, use `actual_sp_base()` for value stack access
  65. PyObject** _sp_base;
  66. const CodeObject* co;
  67. PyObject* _module;
  68. PyObject* _callable; // a function object or nullptr (global scope)
  69. FastLocals _locals;
  70. NameDict& f_globals() noexcept { return _module->attr(); }
  71. PyObject* f_closure_try_get(StrName name);
  72. // function scope
  73. Frame(PyObject** p0, const CodeObject* co, PyObject* _module, PyObject* _callable, PyObject** _locals_base)
  74. : _ip(-1), _next_ip(0), _sp_base(p0), co(co), _module(_module), _callable(_callable), _locals(co, _locals_base) { }
  75. // exec/eval
  76. Frame(PyObject** p0, const CodeObject* co, PyObject* _module, PyObject* _callable, FastLocals _locals)
  77. : _ip(-1), _next_ip(0), _sp_base(p0), co(co), _module(_module), _callable(_callable), _locals(_locals) { }
  78. // global scope
  79. Frame(PyObject** p0, const CodeObject_& co, PyObject* _module)
  80. : _ip(-1), _next_ip(0), _sp_base(p0), co(co.get()), _module(_module), _callable(nullptr), _locals(co.get(), p0) {}
  81. int next_bytecode() {
  82. _ip = _next_ip++;
  83. #if PK_DEBUG_EXTRA_CHECK
  84. if(_ip >= co->codes.size()) PK_FATAL_ERROR();
  85. #endif
  86. return _ip;
  87. }
  88. PyObject** actual_sp_base() const { return _locals.a; }
  89. int stack_size(ValueStack* _s) const { return _s->_sp - actual_sp_base(); }
  90. ArgsView stack_view(ValueStack* _s) const { return ArgsView(actual_sp_base(), _s->_sp); }
  91. void jump_abs(int i){ _next_ip = i; }
  92. bool jump_to_exception_handler(ValueStack*);
  93. int _exit_block(ValueStack*, int);
  94. void jump_abs_break(ValueStack*, int);
  95. void _gc_mark() const {
  96. PK_OBJ_MARK(_module);
  97. co->_gc_mark();
  98. // Frame could be stored in a generator, so mark _callable for safety
  99. if(_callable != nullptr) PK_OBJ_MARK(_callable);
  100. }
  101. };
  102. struct LinkedFrame{
  103. LinkedFrame* f_back;
  104. Frame frame;
  105. template<typename... Args>
  106. LinkedFrame(LinkedFrame* f_back, Args&&... args) : f_back(f_back), frame(std::forward<Args>(args)...) {}
  107. };
  108. struct CallStack{
  109. static_assert(sizeof(LinkedFrame) <= 64 && std::is_trivially_destructible_v<LinkedFrame>);
  110. LinkedFrame* _tail;
  111. int _size;
  112. CallStack(): _tail(nullptr), _size(0) {}
  113. int size() const { return _size; }
  114. bool empty() const { return _size == 0; }
  115. void clear(){ while(!empty()) pop(); }
  116. template<typename... Args>
  117. void emplace(Args&&... args){
  118. _tail = new(pool64_alloc<LinkedFrame>()) LinkedFrame(_tail, std::forward<Args>(args)...);
  119. ++_size;
  120. }
  121. void pop(){
  122. #if PK_DEBUG_EXTRA_CHECK
  123. if(empty()) PK_FATAL_ERROR();
  124. #endif
  125. LinkedFrame* p = _tail;
  126. _tail = p->f_back;
  127. pool64_dealloc(p);
  128. --_size;
  129. }
  130. Frame& top() const { return _tail->frame; }
  131. template<typename Func>
  132. void apply(Func&& f){
  133. for(LinkedFrame* p = _tail; p != nullptr; p = p->f_back) f(p->frame);
  134. }
  135. };
  136. }; // namespace pkpy