mirror of
https://github.com/pocketpy/pocketpy
synced 2025-10-22 04:20:17 +00:00
macro renames
This commit is contained in:
parent
9fdac89b4b
commit
b82e52df00
@ -56,14 +56,14 @@ struct PyVec2: Vec2 {
|
|||||||
return VAR(Vec2(x, y));
|
return VAR(Vec2(x, y));
|
||||||
});
|
});
|
||||||
|
|
||||||
vm->bind__repr__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
vm->bind__repr__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
||||||
PyVec2& self = _CAST(PyVec2&, obj);
|
PyVec2& self = _CAST(PyVec2&, obj);
|
||||||
std::stringstream ss;
|
std::stringstream ss;
|
||||||
ss << "vec2(" << self.x << ", " << self.y << ")";
|
ss << "vec2(" << self.x << ", " << self.y << ")";
|
||||||
return VAR(ss.str());
|
return VAR(ss.str());
|
||||||
});
|
});
|
||||||
|
|
||||||
vm->bind__add__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj, PyObject* other){
|
vm->bind__add__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj, PyObject* other){
|
||||||
PyVec2& self = _CAST(PyVec2&, obj);
|
PyVec2& self = _CAST(PyVec2&, obj);
|
||||||
PyVec2& other_ = CAST(PyVec2&, other);
|
PyVec2& other_ = CAST(PyVec2&, other);
|
||||||
return VAR_T(PyVec2, self + other_);
|
return VAR_T(PyVec2, self + other_);
|
||||||
|
24
src/ceval.h
24
src/ceval.h
@ -23,7 +23,7 @@ inline PyObject* VM::_run_top_frame(){
|
|||||||
StrName _name;
|
StrName _name;
|
||||||
|
|
||||||
while(true){
|
while(true){
|
||||||
#if DEBUG_EXTRA_CHECK
|
#if PK_DEBUG_EXTRA_CHECK
|
||||||
if(frame.index < base_id) FATAL_ERROR();
|
if(frame.index < base_id) FATAL_ERROR();
|
||||||
#endif
|
#endif
|
||||||
try{
|
try{
|
||||||
@ -34,9 +34,16 @@ inline PyObject* VM::_run_top_frame(){
|
|||||||
* DO NOT leave any strong reference of PyObject* in the C stack
|
* DO NOT leave any strong reference of PyObject* in the C stack
|
||||||
*/
|
*/
|
||||||
{
|
{
|
||||||
|
|
||||||
|
#if PK_ENABLE_CEVAL_CALLBACK
|
||||||
|
#define CEVAL_STEP() byte = frame->next_bytecode(); if(_ceval_on_step) _ceval_on_step(this, frame, byte)
|
||||||
|
#else
|
||||||
|
#define CEVAL_STEP() byte = frame->next_bytecode()
|
||||||
|
#endif
|
||||||
|
|
||||||
#define DISPATCH_OP_CALL() { frame = top_frame(); goto __NEXT_FRAME; }
|
#define DISPATCH_OP_CALL() { frame = top_frame(); goto __NEXT_FRAME; }
|
||||||
__NEXT_FRAME:
|
__NEXT_FRAME:
|
||||||
Bytecode byte = frame->next_bytecode();
|
Bytecode CEVAL_STEP();
|
||||||
// cache
|
// cache
|
||||||
const CodeObject* co = frame->co;
|
const CodeObject* co = frame->co;
|
||||||
const auto& co_consts = co->consts;
|
const auto& co_consts = co->consts;
|
||||||
@ -49,16 +56,16 @@ static void* OP_LABELS[] = {
|
|||||||
#undef OPCODE
|
#undef OPCODE
|
||||||
};
|
};
|
||||||
|
|
||||||
#define DISPATCH() { byte = frame->next_bytecode(); goto *OP_LABELS[byte.op];}
|
#define DISPATCH() { CEVAL_STEP(); goto *OP_LABELS[byte.op];}
|
||||||
#define TARGET(op) CASE_OP_##op:
|
#define TARGET(op) CASE_OP_##op:
|
||||||
goto *OP_LABELS[byte.op];
|
goto *OP_LABELS[byte.op];
|
||||||
|
|
||||||
#else
|
#else
|
||||||
#define TARGET(op) case OP_##op:
|
#define TARGET(op) case OP_##op:
|
||||||
#define DISPATCH() { byte = frame->next_bytecode(); goto __NEXT_STEP;}
|
#define DISPATCH() { CEVAL_STEP(); goto __NEXT_STEP;}
|
||||||
|
|
||||||
__NEXT_STEP:;
|
__NEXT_STEP:;
|
||||||
#if DEBUG_CEVAL_STEP
|
#if PK_DEBUG_CEVAL_STEP
|
||||||
_log_s_data();
|
_log_s_data();
|
||||||
#endif
|
#endif
|
||||||
switch (byte.op)
|
switch (byte.op)
|
||||||
@ -624,7 +631,7 @@ __NEXT_STEP:;
|
|||||||
_0 = POPX(); // super
|
_0 = POPX(); // super
|
||||||
if(_0 == None) _0 = _t(tp_object);
|
if(_0 == None) _0 = _t(tp_object);
|
||||||
check_non_tagged_type(_0, tp_type);
|
check_non_tagged_type(_0, tp_type);
|
||||||
_1 = new_type_object(frame->_module, _name, OBJ_GET(Type, _0));
|
_1 = new_type_object(frame->_module, _name, PK_OBJ_GET(Type, _0));
|
||||||
PUSH(_1);
|
PUSH(_1);
|
||||||
DISPATCH();
|
DISPATCH();
|
||||||
TARGET(END_CLASS)
|
TARGET(END_CLASS)
|
||||||
@ -703,7 +710,7 @@ __NEXT_STEP:;
|
|||||||
} DISPATCH();
|
} DISPATCH();
|
||||||
|
|
||||||
#if !PK_ENABLE_COMPUTED_GOTO
|
#if !PK_ENABLE_COMPUTED_GOTO
|
||||||
#if DEBUG_EXTRA_CHECK
|
#if PK_DEBUG_EXTRA_CHECK
|
||||||
default: throw std::runtime_error(fmt(OP_NAMES[byte.op], " is not implemented"));
|
default: throw std::runtime_error(fmt(OP_NAMES[byte.op], " is not implemented"));
|
||||||
#else
|
#else
|
||||||
default: UNREACHABLE();
|
default: UNREACHABLE();
|
||||||
@ -715,6 +722,7 @@ __NEXT_STEP:;
|
|||||||
#undef DISPATCH
|
#undef DISPATCH
|
||||||
#undef TARGET
|
#undef TARGET
|
||||||
#undef DISPATCH_OP_CALL
|
#undef DISPATCH_OP_CALL
|
||||||
|
#undef CEVAL_STEP
|
||||||
/**********************************************************************/
|
/**********************************************************************/
|
||||||
UNREACHABLE();
|
UNREACHABLE();
|
||||||
}catch(HandledException& e){
|
}catch(HandledException& e){
|
||||||
@ -725,7 +733,7 @@ __NEXT_STEP:;
|
|||||||
_e.st_push(frame->snapshot());
|
_e.st_push(frame->snapshot());
|
||||||
_pop_frame();
|
_pop_frame();
|
||||||
if(callstack.empty()){
|
if(callstack.empty()){
|
||||||
#if DEBUG_FULL_EXCEPTION
|
#if PK_DEBUG_FULL_EXCEPTION
|
||||||
std::cerr << _e.summary() << std::endl;
|
std::cerr << _e.summary() << std::endl;
|
||||||
#endif
|
#endif
|
||||||
throw _e;
|
throw _e;
|
||||||
|
16
src/cffi.h
16
src/cffi.h
@ -9,7 +9,7 @@ namespace pkpy {
|
|||||||
static Type _type(VM* vm) { \
|
static Type _type(VM* vm) { \
|
||||||
static const StrName __x0(#mod); \
|
static const StrName __x0(#mod); \
|
||||||
static const StrName __x1(#name); \
|
static const StrName __x1(#name); \
|
||||||
return OBJ_GET(Type, vm->_modules[__x0]->attr(__x1)); \
|
return PK_OBJ_GET(Type, vm->_modules[__x0]->attr(__x1)); \
|
||||||
} \
|
} \
|
||||||
static void _check_type(VM* vm, PyObject* val){ \
|
static void _check_type(VM* vm, PyObject* val){ \
|
||||||
if(!vm->isinstance(val, T::_type(vm))){ \
|
if(!vm->isinstance(val, T::_type(vm))){ \
|
||||||
@ -72,7 +72,7 @@ struct VoidP{
|
|||||||
return VAR(self.hex());
|
return VAR(self.hex());
|
||||||
});
|
});
|
||||||
|
|
||||||
vm->bind__repr__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
vm->bind__repr__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
||||||
VoidP& self = _CAST(VoidP&, obj);
|
VoidP& self = _CAST(VoidP&, obj);
|
||||||
std::stringstream ss;
|
std::stringstream ss;
|
||||||
ss << "<void* at " << self.hex();
|
ss << "<void* at " << self.hex();
|
||||||
@ -82,7 +82,7 @@ struct VoidP{
|
|||||||
});
|
});
|
||||||
|
|
||||||
#define BIND_CMP(name, op) \
|
#define BIND_CMP(name, op) \
|
||||||
vm->bind##name(OBJ_GET(Type, type), [](VM* vm, PyObject* lhs, PyObject* rhs){ \
|
vm->bind##name(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* lhs, PyObject* rhs){ \
|
||||||
if(!is_non_tagged_type(rhs, VoidP::_type(vm))) return vm->NotImplemented; \
|
if(!is_non_tagged_type(rhs, VoidP::_type(vm))) return vm->NotImplemented; \
|
||||||
return VAR(_CAST(VoidP&, lhs) op _CAST(VoidP&, rhs)); \
|
return VAR(_CAST(VoidP&, lhs) op _CAST(VoidP&, rhs)); \
|
||||||
});
|
});
|
||||||
@ -95,7 +95,7 @@ struct VoidP{
|
|||||||
|
|
||||||
#undef BIND_CMP
|
#undef BIND_CMP
|
||||||
|
|
||||||
vm->bind__hash__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
vm->bind__hash__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
||||||
VoidP& self = _CAST(VoidP&, obj);
|
VoidP& self = _CAST(VoidP&, obj);
|
||||||
return reinterpret_cast<i64>(self.ptr);
|
return reinterpret_cast<i64>(self.ptr);
|
||||||
});
|
});
|
||||||
@ -122,13 +122,13 @@ struct VoidP{
|
|||||||
return VAR_T(VoidP, (char*)self.ptr + offset * self.base_offset);
|
return VAR_T(VoidP, (char*)self.ptr + offset * self.base_offset);
|
||||||
});
|
});
|
||||||
|
|
||||||
vm->bind__add__(OBJ_GET(Type, type), [](VM* vm, PyObject* lhs, PyObject* rhs){
|
vm->bind__add__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* lhs, PyObject* rhs){
|
||||||
VoidP& self = _CAST(VoidP&, lhs);
|
VoidP& self = _CAST(VoidP&, lhs);
|
||||||
i64 offset = CAST(i64, rhs);
|
i64 offset = CAST(i64, rhs);
|
||||||
return VAR_T(VoidP, (char*)self.ptr + offset);
|
return VAR_T(VoidP, (char*)self.ptr + offset);
|
||||||
});
|
});
|
||||||
|
|
||||||
vm->bind__sub__(OBJ_GET(Type, type), [](VM* vm, PyObject* lhs, PyObject* rhs){
|
vm->bind__sub__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* lhs, PyObject* rhs){
|
||||||
VoidP& self = _CAST(VoidP&, lhs);
|
VoidP& self = _CAST(VoidP&, lhs);
|
||||||
i64 offset = CAST(i64, rhs);
|
i64 offset = CAST(i64, rhs);
|
||||||
return VAR_T(VoidP, (char*)self.ptr - offset);
|
return VAR_T(VoidP, (char*)self.ptr - offset);
|
||||||
@ -243,7 +243,7 @@ struct C99Struct{
|
|||||||
return VAR_T(C99Struct, self);
|
return VAR_T(C99Struct, self);
|
||||||
});
|
});
|
||||||
|
|
||||||
vm->bind__eq__(OBJ_GET(Type, type), [](VM* vm, PyObject* lhs, PyObject* rhs){
|
vm->bind__eq__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* lhs, PyObject* rhs){
|
||||||
C99Struct& self = _CAST(C99Struct&, lhs);
|
C99Struct& self = _CAST(C99Struct&, lhs);
|
||||||
if(!is_non_tagged_type(rhs, C99Struct::_type(vm))) return vm->NotImplemented;
|
if(!is_non_tagged_type(rhs, C99Struct::_type(vm))) return vm->NotImplemented;
|
||||||
C99Struct& other = _CAST(C99Struct&, rhs);
|
C99Struct& other = _CAST(C99Struct&, rhs);
|
||||||
@ -328,7 +328,7 @@ struct C99ReflType final: ReflType{
|
|||||||
return VAR(self.size);
|
return VAR(self.size);
|
||||||
});
|
});
|
||||||
|
|
||||||
vm->bind__getitem__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj, PyObject* key){
|
vm->bind__getitem__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj, PyObject* key){
|
||||||
C99ReflType& self = _CAST(C99ReflType&, obj);
|
C99ReflType& self = _CAST(C99ReflType&, obj);
|
||||||
const Str& name = CAST(Str&, key);
|
const Str& name = CAST(Str&, key);
|
||||||
auto it = std::lower_bound(self.fields.begin(), self.fields.end(), name.sv());
|
auto it = std::lower_bound(self.fields.begin(), self.fields.end(), name.sv());
|
||||||
|
14
src/common.h
14
src/common.h
@ -39,11 +39,11 @@ struct GIL {
|
|||||||
explicit GIL() { _mutex.lock(); }
|
explicit GIL() { _mutex.lock(); }
|
||||||
~GIL() { _mutex.unlock(); }
|
~GIL() { _mutex.unlock(); }
|
||||||
};
|
};
|
||||||
#define GLOBAL_SCOPE_LOCK() auto _lock = GIL();
|
#define PK_GLOBAL_SCOPE_LOCK() auto _lock = GIL();
|
||||||
|
|
||||||
#else
|
#else
|
||||||
#define THREAD_LOCAL
|
#define THREAD_LOCAL
|
||||||
#define GLOBAL_SCOPE_LOCK()
|
#define PK_GLOBAL_SCOPE_LOCK()
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/*******************************************************************************/
|
/*******************************************************************************/
|
||||||
@ -119,11 +119,11 @@ struct Type {
|
|||||||
#define PK_ASSERT(x) if(!(x)) FATAL_ERROR();
|
#define PK_ASSERT(x) if(!(x)) FATAL_ERROR();
|
||||||
|
|
||||||
struct PyObject;
|
struct PyObject;
|
||||||
#define BITS(p) (reinterpret_cast<i64>(p))
|
#define PK_BITS(p) (reinterpret_cast<i64>(p))
|
||||||
inline bool is_tagged(PyObject* p) noexcept { return (BITS(p) & 0b11) != 0b00; }
|
inline bool is_tagged(PyObject* p) noexcept { return (PK_BITS(p) & 0b11) != 0b00; }
|
||||||
inline bool is_int(PyObject* p) noexcept { return (BITS(p) & 0b11) == 0b01; }
|
inline bool is_int(PyObject* p) noexcept { return (PK_BITS(p) & 0b11) == 0b01; }
|
||||||
inline bool is_float(PyObject* p) noexcept { return (BITS(p) & 0b11) == 0b10; }
|
inline bool is_float(PyObject* p) noexcept { return (PK_BITS(p) & 0b11) == 0b10; }
|
||||||
inline bool is_special(PyObject* p) noexcept { return (BITS(p) & 0b11) == 0b11; }
|
inline bool is_special(PyObject* p) noexcept { return (PK_BITS(p) & 0b11) == 0b11; }
|
||||||
|
|
||||||
inline bool is_both_int_or_float(PyObject* a, PyObject* b) noexcept {
|
inline bool is_both_int_or_float(PyObject* a, PyObject* b) noexcept {
|
||||||
return is_tagged(a) && is_tagged(b);
|
return is_tagged(a) && is_tagged(b);
|
||||||
|
@ -1101,4 +1101,7 @@ public:
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#undef BC_NOARG
|
||||||
|
#undef BC_KEEPLINE
|
||||||
|
|
||||||
} // namespace pkpy
|
} // namespace pkpy
|
23
src/config.h
23
src/config.h
@ -14,6 +14,9 @@
|
|||||||
// This triggers necessary locks to make the VM thread-safe
|
// This triggers necessary locks to make the VM thread-safe
|
||||||
#define PK_ENABLE_THREAD 0
|
#define PK_ENABLE_THREAD 0
|
||||||
|
|
||||||
|
// Enable this for `vm->_ceval_on_step`
|
||||||
|
#define PK_ENABLE_CEVAL_CALLBACK 0
|
||||||
|
|
||||||
// Whether to use `std::function` to do bindings or not
|
// Whether to use `std::function` to do bindings or not
|
||||||
// By default, functions to be binded must be a C function pointer without capture
|
// By default, functions to be binded must be a C function pointer without capture
|
||||||
// However, someone thinks it's not convenient.
|
// However, someone thinks it's not convenient.
|
||||||
@ -24,17 +27,17 @@
|
|||||||
/*************** debug settings ***************/
|
/*************** debug settings ***************/
|
||||||
|
|
||||||
// Enable this may help you find bugs
|
// Enable this may help you find bugs
|
||||||
#define DEBUG_EXTRA_CHECK 0
|
#define PK_DEBUG_EXTRA_CHECK 0
|
||||||
|
|
||||||
// Do not edit the following settings unless you know what you are doing
|
// Do not edit the following settings unless you know what you are doing
|
||||||
#define DEBUG_NO_BUILTIN_MODULES 0
|
#define PK_DEBUG_NO_BUILTINS 0
|
||||||
#define DEBUG_DIS_EXEC 0
|
#define PK_DEBUG_DIS_EXEC 0
|
||||||
#define DEBUG_CEVAL_STEP 0
|
#define PK_DEBUG_CEVAL_STEP 0
|
||||||
#define DEBUG_FULL_EXCEPTION 0
|
#define PK_DEBUG_FULL_EXCEPTION 0
|
||||||
#define DEBUG_MEMORY_POOL 0
|
#define PK_DEBUG_MEMORY_POOL 0
|
||||||
#define DEBUG_NO_MEMORY_POOL 0
|
#define PK_DEBUG_NO_MEMORY_POOL 0
|
||||||
#define DEBUG_NO_AUTO_GC 0
|
#define PK_DEBUG_NO_AUTO_GC 0
|
||||||
#define DEBUG_GC_STATS 0
|
#define PK_DEBUG_GC_STATS 0
|
||||||
|
|
||||||
/*************** internal settings ***************/
|
/*************** internal settings ***************/
|
||||||
|
|
||||||
@ -75,7 +78,7 @@ inline const float kTypeAttrLoadFactor = 0.5f;
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
#if DEBUG_CEVAL_STEP && defined(PK_ENABLE_COMPUTED_GOTO)
|
#if PK_DEBUG_CEVAL_STEP && defined(PK_ENABLE_COMPUTED_GOTO)
|
||||||
#undef PK_ENABLE_COMPUTED_GOTO
|
#undef PK_ENABLE_COMPUTED_GOTO
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -86,7 +86,7 @@ struct ValueStackImpl {
|
|||||||
PyObject** begin() { return _begin; }
|
PyObject** begin() { return _begin; }
|
||||||
PyObject** end() { return _sp; }
|
PyObject** end() { return _sp; }
|
||||||
void reset(PyObject** sp) {
|
void reset(PyObject** sp) {
|
||||||
#if DEBUG_EXTRA_CHECK
|
#if PK_DEBUG_EXTRA_CHECK
|
||||||
if(sp < _begin || sp > _begin + MAX_SIZE) FATAL_ERROR();
|
if(sp < _begin || sp > _begin + MAX_SIZE) FATAL_ERROR();
|
||||||
#endif
|
#endif
|
||||||
_sp = sp;
|
_sp = sp;
|
||||||
@ -118,7 +118,7 @@ struct Frame {
|
|||||||
|
|
||||||
PyObject* f_closure_try_get(StrName name){
|
PyObject* f_closure_try_get(StrName name){
|
||||||
if(_callable == nullptr) return nullptr;
|
if(_callable == nullptr) return nullptr;
|
||||||
Function& fn = OBJ_GET(Function, _callable);
|
Function& fn = PK_OBJ_GET(Function, _callable);
|
||||||
if(fn._closure == nullptr) return nullptr;
|
if(fn._closure == nullptr) return nullptr;
|
||||||
return fn._closure->try_get(name);
|
return fn._closure->try_get(name);
|
||||||
}
|
}
|
||||||
@ -134,7 +134,7 @@ struct Frame {
|
|||||||
|
|
||||||
Bytecode next_bytecode() {
|
Bytecode next_bytecode() {
|
||||||
_ip = _next_ip++;
|
_ip = _next_ip++;
|
||||||
#if DEBUG_EXTRA_CHECK
|
#if PK_DEBUG_EXTRA_CHECK
|
||||||
if(_ip >= co->codes.size()) FATAL_ERROR();
|
if(_ip >= co->codes.size()) FATAL_ERROR();
|
||||||
#endif
|
#endif
|
||||||
return co->codes[_ip];
|
return co->codes[_ip];
|
||||||
|
8
src/gc.h
8
src/gc.h
@ -65,7 +65,7 @@ struct ManagedHeap{
|
|||||||
return obj;
|
return obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
#if DEBUG_GC_STATS
|
#if PK_DEBUG_GC_STATS
|
||||||
inline static std::map<Type, int> deleted;
|
inline static std::map<Type, int> deleted;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -76,7 +76,7 @@ struct ManagedHeap{
|
|||||||
obj->gc.marked = false;
|
obj->gc.marked = false;
|
||||||
alive.push_back(obj);
|
alive.push_back(obj);
|
||||||
}else{
|
}else{
|
||||||
#if DEBUG_GC_STATS
|
#if PK_DEBUG_GC_STATS
|
||||||
deleted[obj->type] += 1;
|
deleted[obj->type] += 1;
|
||||||
#endif
|
#endif
|
||||||
if(_gc_on_delete) _gc_on_delete(vm, obj);
|
if(_gc_on_delete) _gc_on_delete(vm, obj);
|
||||||
@ -96,7 +96,7 @@ struct ManagedHeap{
|
|||||||
}
|
}
|
||||||
|
|
||||||
void _auto_collect(){
|
void _auto_collect(){
|
||||||
#if !DEBUG_NO_AUTO_GC
|
#if !PK_DEBUG_NO_AUTO_GC
|
||||||
if(_gc_lock_counter > 0) return;
|
if(_gc_lock_counter > 0) return;
|
||||||
if(gc_counter < gc_threshold) return;
|
if(gc_counter < gc_threshold) return;
|
||||||
gc_counter = 0;
|
gc_counter = 0;
|
||||||
@ -118,7 +118,7 @@ struct ManagedHeap{
|
|||||||
~ManagedHeap(){
|
~ManagedHeap(){
|
||||||
for(PyObject* obj: _no_gc) { obj->~PyObject(); pool64.dealloc(obj); }
|
for(PyObject* obj: _no_gc) { obj->~PyObject(); pool64.dealloc(obj); }
|
||||||
for(PyObject* obj: gen) { obj->~PyObject(); pool64.dealloc(obj); }
|
for(PyObject* obj: gen) { obj->~PyObject(); pool64.dealloc(obj); }
|
||||||
#if DEBUG_GC_STATS
|
#if PK_DEBUG_GC_STATS
|
||||||
for(auto& [type, count]: deleted){
|
for(auto& [type, count]: deleted){
|
||||||
std::cout << "GC: " << obj_type_name(vm, type) << "=" << count << std::endl;
|
std::cout << "GC: " << obj_type_name(vm, type) << "=" << count << std::endl;
|
||||||
}
|
}
|
||||||
|
26
src/iter.h
26
src/iter.h
@ -13,10 +13,10 @@ struct RangeIter{
|
|||||||
RangeIter(Range r) : r(r), current(r.start) {}
|
RangeIter(Range r) : r(r), current(r.start) {}
|
||||||
|
|
||||||
static void _register(VM* vm, PyObject* mod, PyObject* type){
|
static void _register(VM* vm, PyObject* mod, PyObject* type){
|
||||||
vm->_all_types[OBJ_GET(Type, type)].subclass_enabled = false;
|
vm->_all_types[PK_OBJ_GET(Type, type)].subclass_enabled = false;
|
||||||
vm->bind_notimplemented_constructor<RangeIter>(type);
|
vm->bind_notimplemented_constructor<RangeIter>(type);
|
||||||
vm->bind__iter__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj){ return obj; });
|
vm->bind__iter__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj){ return obj; });
|
||||||
vm->bind__next__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
vm->bind__next__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
||||||
RangeIter& self = _CAST(RangeIter&, obj);
|
RangeIter& self = _CAST(RangeIter&, obj);
|
||||||
bool has_next = self.r.step > 0 ? self.current < self.r.stop : self.current > self.r.stop;
|
bool has_next = self.r.step > 0 ? self.current < self.r.stop : self.current > self.r.stop;
|
||||||
if(!has_next) return vm->StopIteration;
|
if(!has_next) return vm->StopIteration;
|
||||||
@ -39,10 +39,10 @@ struct ArrayIter{
|
|||||||
void _gc_mark() const{ OBJ_MARK(ref); }
|
void _gc_mark() const{ OBJ_MARK(ref); }
|
||||||
|
|
||||||
static void _register(VM* vm, PyObject* mod, PyObject* type){
|
static void _register(VM* vm, PyObject* mod, PyObject* type){
|
||||||
vm->_all_types[OBJ_GET(Type, type)].subclass_enabled = false;
|
vm->_all_types[PK_OBJ_GET(Type, type)].subclass_enabled = false;
|
||||||
vm->bind_notimplemented_constructor<ArrayIter>(type);
|
vm->bind_notimplemented_constructor<ArrayIter>(type);
|
||||||
vm->bind__iter__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj){ return obj; });
|
vm->bind__iter__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj){ return obj; });
|
||||||
vm->bind__next__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
vm->bind__next__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
||||||
ArrayIter& self = _CAST(ArrayIter&, obj);
|
ArrayIter& self = _CAST(ArrayIter&, obj);
|
||||||
if(self.current == self.end) return vm->StopIteration;
|
if(self.current == self.end) return vm->StopIteration;
|
||||||
return *self.current++;
|
return *self.current++;
|
||||||
@ -56,15 +56,15 @@ struct StringIter{
|
|||||||
Str* str;
|
Str* str;
|
||||||
int index;
|
int index;
|
||||||
|
|
||||||
StringIter(PyObject* ref) : ref(ref), str(&OBJ_GET(Str, ref)), index(0) {}
|
StringIter(PyObject* ref) : ref(ref), str(&PK_OBJ_GET(Str, ref)), index(0) {}
|
||||||
|
|
||||||
void _gc_mark() const{ OBJ_MARK(ref); }
|
void _gc_mark() const{ OBJ_MARK(ref); }
|
||||||
|
|
||||||
static void _register(VM* vm, PyObject* mod, PyObject* type){
|
static void _register(VM* vm, PyObject* mod, PyObject* type){
|
||||||
vm->_all_types[OBJ_GET(Type, type)].subclass_enabled = false;
|
vm->_all_types[PK_OBJ_GET(Type, type)].subclass_enabled = false;
|
||||||
vm->bind_notimplemented_constructor<StringIter>(type);
|
vm->bind_notimplemented_constructor<StringIter>(type);
|
||||||
vm->bind__iter__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj){ return obj; });
|
vm->bind__iter__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj){ return obj; });
|
||||||
vm->bind__next__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
vm->bind__next__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
||||||
StringIter& self = _CAST(StringIter&, obj);
|
StringIter& self = _CAST(StringIter&, obj);
|
||||||
// TODO: optimize this... operator[] is of O(n) complexity
|
// TODO: optimize this... operator[] is of O(n) complexity
|
||||||
if(self.index == self.str->u8_length()) return vm->StopIteration;
|
if(self.index == self.str->u8_length()) return vm->StopIteration;
|
||||||
@ -114,10 +114,10 @@ struct Generator{
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void _register(VM* vm, PyObject* mod, PyObject* type){
|
static void _register(VM* vm, PyObject* mod, PyObject* type){
|
||||||
vm->_all_types[OBJ_GET(Type, type)].subclass_enabled = false;
|
vm->_all_types[PK_OBJ_GET(Type, type)].subclass_enabled = false;
|
||||||
vm->bind_notimplemented_constructor<Generator>(type);
|
vm->bind_notimplemented_constructor<Generator>(type);
|
||||||
vm->bind__iter__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj){ return obj; });
|
vm->bind__iter__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj){ return obj; });
|
||||||
vm->bind__next__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
vm->bind__next__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
||||||
Generator& self = _CAST(Generator&, obj);
|
Generator& self = _CAST(Generator&, obj);
|
||||||
return self.next(vm);
|
return self.next(vm);
|
||||||
});
|
});
|
||||||
|
@ -335,7 +335,7 @@ struct PyVec2: Vec2 {
|
|||||||
return VAR(Tuple({ VAR(self.x), VAR(self.y) }));
|
return VAR(Tuple({ VAR(self.x), VAR(self.y) }));
|
||||||
});
|
});
|
||||||
|
|
||||||
vm->bind__repr__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
vm->bind__repr__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
||||||
PyVec2& self = _CAST(PyVec2&, obj);
|
PyVec2& self = _CAST(PyVec2&, obj);
|
||||||
std::stringstream ss;
|
std::stringstream ss;
|
||||||
ss << "vec2(" << self.x << ", " << self.y << ")";
|
ss << "vec2(" << self.x << ", " << self.y << ")";
|
||||||
@ -395,7 +395,7 @@ struct PyVec3: Vec3 {
|
|||||||
return VAR(Tuple({ VAR(self.x), VAR(self.y), VAR(self.z) }));
|
return VAR(Tuple({ VAR(self.x), VAR(self.y), VAR(self.z) }));
|
||||||
});
|
});
|
||||||
|
|
||||||
vm->bind__repr__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
vm->bind__repr__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
||||||
PyVec3& self = _CAST(PyVec3&, obj);
|
PyVec3& self = _CAST(PyVec3&, obj);
|
||||||
std::stringstream ss;
|
std::stringstream ss;
|
||||||
ss << "vec3(" << self.x << ", " << self.y << ", " << self.z << ")";
|
ss << "vec3(" << self.x << ", " << self.y << ", " << self.z << ")";
|
||||||
@ -476,7 +476,7 @@ struct PyMat3x3: Mat3x3{
|
|||||||
|
|
||||||
#undef METHOD_PROXY_NONE
|
#undef METHOD_PROXY_NONE
|
||||||
|
|
||||||
vm->bind__repr__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
vm->bind__repr__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj){
|
||||||
PyMat3x3& self = _CAST(PyMat3x3&, obj);
|
PyMat3x3& self = _CAST(PyMat3x3&, obj);
|
||||||
std::stringstream ss;
|
std::stringstream ss;
|
||||||
ss << std::fixed << std::setprecision(4);
|
ss << std::fixed << std::setprecision(4);
|
||||||
@ -491,7 +491,7 @@ struct PyMat3x3: Mat3x3{
|
|||||||
return VAR_T(PyMat3x3, self);
|
return VAR_T(PyMat3x3, self);
|
||||||
});
|
});
|
||||||
|
|
||||||
vm->bind__getitem__(OBJ_GET(Type, type), [](VM* vm, PyObject* obj, PyObject* index){
|
vm->bind__getitem__(PK_OBJ_GET(Type, type), [](VM* vm, PyObject* obj, PyObject* index){
|
||||||
PyMat3x3& self = _CAST(PyMat3x3&, obj);
|
PyMat3x3& self = _CAST(PyMat3x3&, obj);
|
||||||
Tuple& t = CAST(Tuple&, index);
|
Tuple& t = CAST(Tuple&, index);
|
||||||
if(t.size() != 2){
|
if(t.size() != 2){
|
||||||
|
26
src/memory.h
26
src/memory.h
@ -40,7 +40,7 @@ struct DoubleLinkedList{
|
|||||||
}
|
}
|
||||||
|
|
||||||
void pop_back(){
|
void pop_back(){
|
||||||
#if DEBUG_MEMORY_POOL
|
#if PK_DEBUG_MEMORY_POOL
|
||||||
if(empty()) throw std::runtime_error("DoubleLinkedList::pop_back() called on empty list");
|
if(empty()) throw std::runtime_error("DoubleLinkedList::pop_back() called on empty list");
|
||||||
#endif
|
#endif
|
||||||
tail.prev->prev->next = &tail;
|
tail.prev->prev->next = &tail;
|
||||||
@ -49,7 +49,7 @@ struct DoubleLinkedList{
|
|||||||
}
|
}
|
||||||
|
|
||||||
void pop_front(){
|
void pop_front(){
|
||||||
#if DEBUG_MEMORY_POOL
|
#if PK_DEBUG_MEMORY_POOL
|
||||||
if(empty()) throw std::runtime_error("DoubleLinkedList::pop_front() called on empty list");
|
if(empty()) throw std::runtime_error("DoubleLinkedList::pop_front() called on empty list");
|
||||||
#endif
|
#endif
|
||||||
head.next->next->prev = &head;
|
head.next->next->prev = &head;
|
||||||
@ -58,21 +58,21 @@ struct DoubleLinkedList{
|
|||||||
}
|
}
|
||||||
|
|
||||||
T* back() const {
|
T* back() const {
|
||||||
#if DEBUG_MEMORY_POOL
|
#if PK_DEBUG_MEMORY_POOL
|
||||||
if(empty()) throw std::runtime_error("DoubleLinkedList::back() called on empty list");
|
if(empty()) throw std::runtime_error("DoubleLinkedList::back() called on empty list");
|
||||||
#endif
|
#endif
|
||||||
return static_cast<T*>(tail.prev);
|
return static_cast<T*>(tail.prev);
|
||||||
}
|
}
|
||||||
|
|
||||||
T* front() const {
|
T* front() const {
|
||||||
#if DEBUG_MEMORY_POOL
|
#if PK_DEBUG_MEMORY_POOL
|
||||||
if(empty()) throw std::runtime_error("DoubleLinkedList::front() called on empty list");
|
if(empty()) throw std::runtime_error("DoubleLinkedList::front() called on empty list");
|
||||||
#endif
|
#endif
|
||||||
return static_cast<T*>(head.next);
|
return static_cast<T*>(head.next);
|
||||||
}
|
}
|
||||||
|
|
||||||
void erase(T* node){
|
void erase(T* node){
|
||||||
#if DEBUG_MEMORY_POOL
|
#if PK_DEBUG_MEMORY_POOL
|
||||||
if(empty()) throw std::runtime_error("DoubleLinkedList::erase() called on empty list");
|
if(empty()) throw std::runtime_error("DoubleLinkedList::erase() called on empty list");
|
||||||
LinkedListNode* n = head.next;
|
LinkedListNode* n = head.next;
|
||||||
while(n != &tail){
|
while(n != &tail){
|
||||||
@ -99,7 +99,7 @@ struct DoubleLinkedList{
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool empty() const {
|
bool empty() const {
|
||||||
#if DEBUG_MEMORY_POOL
|
#if PK_DEBUG_MEMORY_POOL
|
||||||
if(size() == 0){
|
if(size() == 0){
|
||||||
if(head.next != &tail || tail.prev != &head){
|
if(head.next != &tail || tail.prev != &head){
|
||||||
throw std::runtime_error("DoubleLinkedList::size() returned 0 but the list is not empty");
|
throw std::runtime_error("DoubleLinkedList::size() returned 0 but the list is not empty");
|
||||||
@ -152,7 +152,7 @@ struct MemoryPool{
|
|||||||
}
|
}
|
||||||
|
|
||||||
Block* alloc(){
|
Block* alloc(){
|
||||||
#if DEBUG_MEMORY_POOL
|
#if PK_DEBUG_MEMORY_POOL
|
||||||
if(empty()) throw std::runtime_error("Arena::alloc() called on empty arena");
|
if(empty()) throw std::runtime_error("Arena::alloc() called on empty arena");
|
||||||
#endif
|
#endif
|
||||||
_free_list_size--;
|
_free_list_size--;
|
||||||
@ -160,7 +160,7 @@ struct MemoryPool{
|
|||||||
}
|
}
|
||||||
|
|
||||||
void dealloc(Block* block){
|
void dealloc(Block* block){
|
||||||
#if DEBUG_MEMORY_POOL
|
#if PK_DEBUG_MEMORY_POOL
|
||||||
if(full()) throw std::runtime_error("Arena::dealloc() called on full arena");
|
if(full()) throw std::runtime_error("Arena::dealloc() called on full arena");
|
||||||
#endif
|
#endif
|
||||||
_free_list[_free_list_size] = block;
|
_free_list[_free_list_size] = block;
|
||||||
@ -175,8 +175,8 @@ struct MemoryPool{
|
|||||||
void* alloc() { return alloc(sizeof(__T)); }
|
void* alloc() { return alloc(sizeof(__T)); }
|
||||||
|
|
||||||
void* alloc(size_t size){
|
void* alloc(size_t size){
|
||||||
GLOBAL_SCOPE_LOCK();
|
PK_GLOBAL_SCOPE_LOCK();
|
||||||
#if DEBUG_NO_MEMORY_POOL
|
#if PK_DEBUG_NO_MEMORY_POOL
|
||||||
return malloc(size);
|
return malloc(size);
|
||||||
#endif
|
#endif
|
||||||
if(size > __BlockSize){
|
if(size > __BlockSize){
|
||||||
@ -200,12 +200,12 @@ struct MemoryPool{
|
|||||||
}
|
}
|
||||||
|
|
||||||
void dealloc(void* p){
|
void dealloc(void* p){
|
||||||
GLOBAL_SCOPE_LOCK();
|
PK_GLOBAL_SCOPE_LOCK();
|
||||||
#if DEBUG_NO_MEMORY_POOL
|
#if PK_DEBUG_NO_MEMORY_POOL
|
||||||
free(p);
|
free(p);
|
||||||
return;
|
return;
|
||||||
#endif
|
#endif
|
||||||
#if DEBUG_MEMORY_POOL
|
#if PK_DEBUG_MEMORY_POOL
|
||||||
if(p == nullptr) throw std::runtime_error("MemoryPool::dealloc() called on nullptr");
|
if(p == nullptr) throw std::runtime_error("MemoryPool::dealloc() called on nullptr");
|
||||||
#endif
|
#endif
|
||||||
Block* block = (Block*)((char*)p - sizeof(void*));
|
Block* block = (Block*)((char*)p - sizeof(void*));
|
||||||
|
20
src/obj.h
20
src/obj.h
@ -43,7 +43,7 @@ struct NativeFunc {
|
|||||||
T get_userdata() const {
|
T get_userdata() const {
|
||||||
static_assert(std::is_trivially_copyable_v<T>);
|
static_assert(std::is_trivially_copyable_v<T>);
|
||||||
static_assert(sizeof(T) <= sizeof(UserData));
|
static_assert(sizeof(T) <= sizeof(UserData));
|
||||||
#if DEBUG_EXTRA_CHECK
|
#if PK_DEBUG_EXTRA_CHECK
|
||||||
if(!_has_userdata) throw std::runtime_error("userdata not set");
|
if(!_has_userdata) throw std::runtime_error("userdata not set");
|
||||||
#endif
|
#endif
|
||||||
return reinterpret_cast<const T&>(_userdata);
|
return reinterpret_cast<const T&>(_userdata);
|
||||||
@ -199,7 +199,7 @@ struct MappingProxy{
|
|||||||
NameDict& attr() noexcept { return obj->attr(); }
|
NameDict& attr() noexcept { return obj->attr(); }
|
||||||
};
|
};
|
||||||
|
|
||||||
#define OBJ_GET(T, obj) (((Py_<T>*)(obj))->_value)
|
#define PK_OBJ_GET(T, obj) (((Py_<T>*)(obj))->_value)
|
||||||
|
|
||||||
#define OBJ_MARK(obj) \
|
#define OBJ_MARK(obj) \
|
||||||
if(!is_tagged(obj) && !(obj)->gc.marked) { \
|
if(!is_tagged(obj) && !(obj)->gc.marked) { \
|
||||||
@ -218,18 +218,18 @@ inline void gc_mark_namedict(NameDict& t){
|
|||||||
|
|
||||||
Str obj_type_name(VM* vm, Type type);
|
Str obj_type_name(VM* vm, Type type);
|
||||||
|
|
||||||
#if DEBUG_NO_BUILTIN_MODULES
|
#if PK_DEBUG_NO_BUILTINS
|
||||||
#define OBJ_NAME(obj) Str("<?>")
|
#define OBJ_NAME(obj) Str("<?>")
|
||||||
#else
|
#else
|
||||||
DEF_SNAME(__name__);
|
DEF_SNAME(__name__);
|
||||||
#define OBJ_NAME(obj) OBJ_GET(Str, vm->getattr(obj, __name__))
|
#define OBJ_NAME(obj) PK_OBJ_GET(Str, vm->getattr(obj, __name__))
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
const int kTpIntIndex = 2;
|
const int kTpIntIndex = 2;
|
||||||
const int kTpFloatIndex = 3;
|
const int kTpFloatIndex = 3;
|
||||||
|
|
||||||
inline bool is_type(PyObject* obj, Type type) {
|
inline bool is_type(PyObject* obj, Type type) {
|
||||||
#if DEBUG_EXTRA_CHECK
|
#if PK_DEBUG_EXTRA_CHECK
|
||||||
if(obj == nullptr) throw std::runtime_error("is_type() called with nullptr");
|
if(obj == nullptr) throw std::runtime_error("is_type() called with nullptr");
|
||||||
if(is_special(obj)) throw std::runtime_error("is_type() called with special object");
|
if(is_special(obj)) throw std::runtime_error("is_type() called with special object");
|
||||||
#endif
|
#endif
|
||||||
@ -241,7 +241,7 @@ inline bool is_type(PyObject* obj, Type type) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
inline bool is_non_tagged_type(PyObject* obj, Type type) {
|
inline bool is_non_tagged_type(PyObject* obj, Type type) {
|
||||||
#if DEBUG_EXTRA_CHECK
|
#if PK_DEBUG_EXTRA_CHECK
|
||||||
if(obj == nullptr) throw std::runtime_error("is_non_tagged_type() called with nullptr");
|
if(obj == nullptr) throw std::runtime_error("is_non_tagged_type() called with nullptr");
|
||||||
if(is_special(obj)) throw std::runtime_error("is_non_tagged_type() called with special object");
|
if(is_special(obj)) throw std::runtime_error("is_non_tagged_type() called with special object");
|
||||||
#endif
|
#endif
|
||||||
@ -270,7 +270,7 @@ __T py_cast(VM* vm, PyObject* obj) {
|
|||||||
return to_void_p<T>(vm, obj);
|
return to_void_p<T>(vm, obj);
|
||||||
}else if constexpr(is_py_class<T>::value){
|
}else if constexpr(is_py_class<T>::value){
|
||||||
T::_check_type(vm, obj);
|
T::_check_type(vm, obj);
|
||||||
return OBJ_GET(T, obj);
|
return PK_OBJ_GET(T, obj);
|
||||||
}else if constexpr(std::is_pod_v<T>){
|
}else if constexpr(std::is_pod_v<T>){
|
||||||
return to_c99_struct<T>(vm, obj);
|
return to_c99_struct<T>(vm, obj);
|
||||||
}else {
|
}else {
|
||||||
@ -286,7 +286,7 @@ __T _py_cast(VM* vm, PyObject* obj) {
|
|||||||
}else if constexpr(std::is_pointer_v<__T>){
|
}else if constexpr(std::is_pointer_v<__T>){
|
||||||
return to_void_p<__T>(vm, obj);
|
return to_void_p<__T>(vm, obj);
|
||||||
}else if constexpr(is_py_class<T>::value){
|
}else if constexpr(is_py_class<T>::value){
|
||||||
return OBJ_GET(T, obj);
|
return PK_OBJ_GET(T, obj);
|
||||||
}else if constexpr(std::is_pod_v<T>){
|
}else if constexpr(std::is_pod_v<T>){
|
||||||
return to_c99_struct<T>(vm, obj);
|
return to_c99_struct<T>(vm, obj);
|
||||||
}else {
|
}else {
|
||||||
@ -430,8 +430,8 @@ struct Py_<DummyModule> final: PyObject {
|
|||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
inline T lambda_get_userdata(PyObject** p){
|
inline T lambda_get_userdata(PyObject** p){
|
||||||
if(p[-1] != PY_NULL) return OBJ_GET(NativeFunc, p[-1]).get_userdata<T>();
|
if(p[-1] != PY_NULL) return PK_OBJ_GET(NativeFunc, p[-1]).get_userdata<T>();
|
||||||
else return OBJ_GET(NativeFunc, p[-2]).get_userdata<T>();
|
else return PK_OBJ_GET(NativeFunc, p[-2]).get_userdata<T>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ inline CodeObject_ VM::compile(Str source, Str filename, CompileMode mode, bool
|
|||||||
try{
|
try{
|
||||||
return compiler.compile();
|
return compiler.compile();
|
||||||
}catch(Exception& e){
|
}catch(Exception& e){
|
||||||
#if DEBUG_FULL_EXCEPTION
|
#if PK_DEBUG_FULL_EXCEPTION
|
||||||
std::cerr << e.summary() << std::endl;
|
std::cerr << e.summary() << std::endl;
|
||||||
#endif
|
#endif
|
||||||
_error(e);
|
_error(e);
|
||||||
@ -75,9 +75,9 @@ inline void init_builtins(VM* _vm) {
|
|||||||
|
|
||||||
_vm->bind_builtin_func<2>("super", [](VM* vm, ArgsView args) {
|
_vm->bind_builtin_func<2>("super", [](VM* vm, ArgsView args) {
|
||||||
vm->check_non_tagged_type(args[0], vm->tp_type);
|
vm->check_non_tagged_type(args[0], vm->tp_type);
|
||||||
Type type = OBJ_GET(Type, args[0]);
|
Type type = PK_OBJ_GET(Type, args[0]);
|
||||||
if(!vm->isinstance(args[1], type)){
|
if(!vm->isinstance(args[1], type)){
|
||||||
Str _0 = obj_type_name(vm, OBJ_GET(Type, vm->_t(args[1])));
|
Str _0 = obj_type_name(vm, PK_OBJ_GET(Type, vm->_t(args[1])));
|
||||||
Str _1 = obj_type_name(vm, type);
|
Str _1 = obj_type_name(vm, type);
|
||||||
vm->TypeError("super(): " + _0.escape() + " is not an instance of " + _1.escape());
|
vm->TypeError("super(): " + _0.escape() + " is not an instance of " + _1.escape());
|
||||||
}
|
}
|
||||||
@ -87,7 +87,7 @@ inline void init_builtins(VM* _vm) {
|
|||||||
|
|
||||||
_vm->bind_builtin_func<2>("isinstance", [](VM* vm, ArgsView args) {
|
_vm->bind_builtin_func<2>("isinstance", [](VM* vm, ArgsView args) {
|
||||||
vm->check_non_tagged_type(args[1], vm->tp_type);
|
vm->check_non_tagged_type(args[1], vm->tp_type);
|
||||||
Type type = OBJ_GET(Type, args[1]);
|
Type type = PK_OBJ_GET(Type, args[1]);
|
||||||
return VAR(vm->isinstance(args[0], type));
|
return VAR(vm->isinstance(args[0], type));
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -260,11 +260,11 @@ inline void init_builtins(VM* _vm) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
_vm->bind__eq__(_vm->tp_object, [](VM* vm, PyObject* lhs, PyObject* rhs) { return VAR(lhs == rhs); });
|
_vm->bind__eq__(_vm->tp_object, [](VM* vm, PyObject* lhs, PyObject* rhs) { return VAR(lhs == rhs); });
|
||||||
_vm->bind__hash__(_vm->tp_object, [](VM* vm, PyObject* obj) { return BITS(obj); });
|
_vm->bind__hash__(_vm->tp_object, [](VM* vm, PyObject* obj) { return PK_BITS(obj); });
|
||||||
|
|
||||||
_vm->cached_object__new__ = _vm->bind_constructor<1>("object", [](VM* vm, ArgsView args) {
|
_vm->cached_object__new__ = _vm->bind_constructor<1>("object", [](VM* vm, ArgsView args) {
|
||||||
vm->check_non_tagged_type(args[0], vm->tp_type);
|
vm->check_non_tagged_type(args[0], vm->tp_type);
|
||||||
Type t = OBJ_GET(Type, args[0]);
|
Type t = PK_OBJ_GET(Type, args[0]);
|
||||||
return vm->heap.gcnew<DummyInstance>(t, {});
|
return vm->heap.gcnew<DummyInstance>(t, {});
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -282,7 +282,7 @@ inline void init_builtins(VM* _vm) {
|
|||||||
return VAR(r);
|
return VAR(r);
|
||||||
});
|
});
|
||||||
|
|
||||||
_vm->bind__iter__(_vm->tp_range, [](VM* vm, PyObject* obj) { return VAR_T(RangeIter, OBJ_GET(Range, obj)); });
|
_vm->bind__iter__(_vm->tp_range, [](VM* vm, PyObject* obj) { return VAR_T(RangeIter, PK_OBJ_GET(Range, obj)); });
|
||||||
_vm->bind__repr__(_vm->_type("NoneType"), [](VM* vm, PyObject* obj) { return VAR("None"); });
|
_vm->bind__repr__(_vm->_type("NoneType"), [](VM* vm, PyObject* obj) { return VAR("None"); });
|
||||||
_vm->bind__json__(_vm->_type("NoneType"), [](VM* vm, PyObject* obj) { return VAR("null"); });
|
_vm->bind__json__(_vm->_type("NoneType"), [](VM* vm, PyObject* obj) { return VAR("null"); });
|
||||||
|
|
||||||
@ -1362,11 +1362,11 @@ inline void VM::post_init(){
|
|||||||
|
|
||||||
_t(tp_object)->attr().set("__class__", property(CPP_LAMBDA(vm->_t(args[0]))));
|
_t(tp_object)->attr().set("__class__", property(CPP_LAMBDA(vm->_t(args[0]))));
|
||||||
_t(tp_type)->attr().set("__base__", property([](VM* vm, ArgsView args){
|
_t(tp_type)->attr().set("__base__", property([](VM* vm, ArgsView args){
|
||||||
const PyTypeInfo& info = vm->_all_types[OBJ_GET(Type, args[0])];
|
const PyTypeInfo& info = vm->_all_types[PK_OBJ_GET(Type, args[0])];
|
||||||
return info.base.index == -1 ? vm->None : vm->_all_types[info.base].obj;
|
return info.base.index == -1 ? vm->None : vm->_all_types[info.base].obj;
|
||||||
}));
|
}));
|
||||||
_t(tp_type)->attr().set("__name__", property([](VM* vm, ArgsView args){
|
_t(tp_type)->attr().set("__name__", property([](VM* vm, ArgsView args){
|
||||||
const PyTypeInfo& info = vm->_all_types[OBJ_GET(Type, args[0])];
|
const PyTypeInfo& info = vm->_all_types[PK_OBJ_GET(Type, args[0])];
|
||||||
return VAR(info.name);
|
return VAR(info.name);
|
||||||
}));
|
}));
|
||||||
|
|
||||||
@ -1396,7 +1396,7 @@ inline void VM::post_init(){
|
|||||||
return VAR(MappingProxy(args[0]));
|
return VAR(MappingProxy(args[0]));
|
||||||
}));
|
}));
|
||||||
|
|
||||||
#if !DEBUG_NO_BUILTIN_MODULES
|
#if !PK_DEBUG_NO_BUILTINS
|
||||||
add_module_sys(this);
|
add_module_sys(this);
|
||||||
add_module_traceback(this);
|
add_module_traceback(this);
|
||||||
add_module_time(this);
|
add_module_time(this);
|
||||||
|
78
src/vm.h
78
src/vm.h
@ -28,17 +28,17 @@ namespace pkpy{
|
|||||||
#define DEF_NATIVE_2(ctype, ptype) \
|
#define DEF_NATIVE_2(ctype, ptype) \
|
||||||
template<> inline ctype py_cast<ctype>(VM* vm, PyObject* obj) { \
|
template<> inline ctype py_cast<ctype>(VM* vm, PyObject* obj) { \
|
||||||
vm->check_non_tagged_type(obj, vm->ptype); \
|
vm->check_non_tagged_type(obj, vm->ptype); \
|
||||||
return OBJ_GET(ctype, obj); \
|
return PK_OBJ_GET(ctype, obj); \
|
||||||
} \
|
} \
|
||||||
template<> inline ctype _py_cast<ctype>(VM* vm, PyObject* obj) { \
|
template<> inline ctype _py_cast<ctype>(VM* vm, PyObject* obj) { \
|
||||||
return OBJ_GET(ctype, obj); \
|
return PK_OBJ_GET(ctype, obj); \
|
||||||
} \
|
} \
|
||||||
template<> inline ctype& py_cast<ctype&>(VM* vm, PyObject* obj) { \
|
template<> inline ctype& py_cast<ctype&>(VM* vm, PyObject* obj) { \
|
||||||
vm->check_non_tagged_type(obj, vm->ptype); \
|
vm->check_non_tagged_type(obj, vm->ptype); \
|
||||||
return OBJ_GET(ctype, obj); \
|
return PK_OBJ_GET(ctype, obj); \
|
||||||
} \
|
} \
|
||||||
template<> inline ctype& _py_cast<ctype&>(VM* vm, PyObject* obj) { \
|
template<> inline ctype& _py_cast<ctype&>(VM* vm, PyObject* obj) { \
|
||||||
return OBJ_GET(ctype, obj); \
|
return PK_OBJ_GET(ctype, obj); \
|
||||||
} \
|
} \
|
||||||
inline PyObject* py_var(VM* vm, const ctype& value) { return vm->heap.gcnew(vm->ptype, value);} \
|
inline PyObject* py_var(VM* vm, const ctype& value) { return vm->heap.gcnew(vm->ptype, value);} \
|
||||||
inline PyObject* py_var(VM* vm, ctype&& value) { return vm->heap.gcnew(vm->ptype, std::move(value));}
|
inline PyObject* py_var(VM* vm, ctype&& value) { return vm->heap.gcnew(vm->ptype, std::move(value));}
|
||||||
@ -124,6 +124,10 @@ public:
|
|||||||
|
|
||||||
PyObject* _last_exception;
|
PyObject* _last_exception;
|
||||||
|
|
||||||
|
#if PK_ENABLE_CEVAL_CALLBACK
|
||||||
|
void (*_ceval_on_step)(VM*, Frame*, Bytecode bc) = nullptr;
|
||||||
|
#endif
|
||||||
|
|
||||||
PrintFunc _stdout;
|
PrintFunc _stdout;
|
||||||
PrintFunc _stderr;
|
PrintFunc _stderr;
|
||||||
Bytes (*_import_handler)(const Str& name);
|
Bytes (*_import_handler)(const Str& name);
|
||||||
@ -152,7 +156,7 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
FrameId top_frame() {
|
FrameId top_frame() {
|
||||||
#if DEBUG_EXTRA_CHECK
|
#if PK_DEBUG_EXTRA_CHECK
|
||||||
if(callstack.empty()) FATAL_ERROR();
|
if(callstack.empty()) FATAL_ERROR();
|
||||||
#endif
|
#endif
|
||||||
return FrameId(&callstack.data(), callstack.size()-1);
|
return FrameId(&callstack.data(), callstack.size()-1);
|
||||||
@ -194,7 +198,7 @@ public:
|
|||||||
do{
|
do{
|
||||||
val = cls->attr().try_get(name);
|
val = cls->attr().try_get(name);
|
||||||
if(val != nullptr) return val;
|
if(val != nullptr) return val;
|
||||||
Type base = _all_types[OBJ_GET(Type, cls)].base;
|
Type base = _all_types[PK_OBJ_GET(Type, cls)].base;
|
||||||
if(base.index == -1) break;
|
if(base.index == -1) break;
|
||||||
cls = _all_types[base].obj;
|
cls = _all_types[base].obj;
|
||||||
}while(true);
|
}while(true);
|
||||||
@ -202,7 +206,7 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool isinstance(PyObject* obj, Type cls_t){
|
bool isinstance(PyObject* obj, Type cls_t){
|
||||||
Type obj_t = OBJ_GET(Type, _t(obj));
|
Type obj_t = PK_OBJ_GET(Type, _t(obj));
|
||||||
do{
|
do{
|
||||||
if(obj_t == cls_t) return true;
|
if(obj_t == cls_t) return true;
|
||||||
Type base = _all_types[obj_t].base;
|
Type base = _all_types[obj_t].base;
|
||||||
@ -216,14 +220,14 @@ public:
|
|||||||
if(_module == nullptr) _module = _main;
|
if(_module == nullptr) _module = _main;
|
||||||
try {
|
try {
|
||||||
CodeObject_ code = compile(source, filename, mode);
|
CodeObject_ code = compile(source, filename, mode);
|
||||||
#if DEBUG_DIS_EXEC
|
#if PK_DEBUG_DIS_EXEC
|
||||||
if(_module == _main) std::cout << disassemble(code) << '\n';
|
if(_module == _main) std::cout << disassemble(code) << '\n';
|
||||||
#endif
|
#endif
|
||||||
return _exec(code, _module);
|
return _exec(code, _module);
|
||||||
}catch (const Exception& e){
|
}catch (const Exception& e){
|
||||||
_stderr(this, e.summary() + "\n");
|
_stderr(this, e.summary() + "\n");
|
||||||
}
|
}
|
||||||
#if !DEBUG_FULL_EXCEPTION
|
#if !PK_DEBUG_FULL_EXCEPTION
|
||||||
catch (const std::exception& e) {
|
catch (const std::exception& e) {
|
||||||
Str msg = "An std::exception occurred! It could be a bug.\n";
|
Str msg = "An std::exception occurred! It could be a bug.\n";
|
||||||
msg = msg + e.what();
|
msg = msg + e.what();
|
||||||
@ -301,7 +305,7 @@ public:
|
|||||||
|
|
||||||
Type _new_type_object(StrName name, Type base=0) {
|
Type _new_type_object(StrName name, Type base=0) {
|
||||||
PyObject* obj = new_type_object(nullptr, name, base, false);
|
PyObject* obj = new_type_object(nullptr, name, base, false);
|
||||||
return OBJ_GET(Type, obj);
|
return PK_OBJ_GET(Type, obj);
|
||||||
}
|
}
|
||||||
|
|
||||||
PyObject* _find_type_object(const Str& type){
|
PyObject* _find_type_object(const Str& type){
|
||||||
@ -316,7 +320,7 @@ public:
|
|||||||
|
|
||||||
Type _type(const Str& type){
|
Type _type(const Str& type){
|
||||||
PyObject* obj = _find_type_object(type);
|
PyObject* obj = _find_type_object(type);
|
||||||
return OBJ_GET(Type, obj);
|
return PK_OBJ_GET(Type, obj);
|
||||||
}
|
}
|
||||||
|
|
||||||
PyTypeInfo* _type_info(const Str& type){
|
PyTypeInfo* _type_info(const Str& type){
|
||||||
@ -325,7 +329,7 @@ public:
|
|||||||
for(auto& t: _all_types) if(t.name == type) return &t;
|
for(auto& t: _all_types) if(t.name == type) return &t;
|
||||||
FATAL_ERROR();
|
FATAL_ERROR();
|
||||||
}
|
}
|
||||||
return &_all_types[OBJ_GET(Type, obj)];
|
return &_all_types[PK_OBJ_GET(Type, obj)];
|
||||||
}
|
}
|
||||||
|
|
||||||
PyTypeInfo* _type_info(Type type){
|
PyTypeInfo* _type_info(Type type){
|
||||||
@ -344,7 +348,7 @@ public:
|
|||||||
PyObject* nf = bind_method<0>(_t(type), #name, [](VM* vm, ArgsView args){ \
|
PyObject* nf = bind_method<0>(_t(type), #name, [](VM* vm, ArgsView args){ \
|
||||||
return lambda_get_userdata<PyObject*(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);\
|
return lambda_get_userdata<PyObject*(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);\
|
||||||
}); \
|
}); \
|
||||||
OBJ_GET(NativeFunc, nf).set_userdata(f); \
|
PK_OBJ_GET(NativeFunc, nf).set_userdata(f); \
|
||||||
}
|
}
|
||||||
|
|
||||||
BIND_UNARY_SPECIAL(__repr__)
|
BIND_UNARY_SPECIAL(__repr__)
|
||||||
@ -367,7 +371,7 @@ public:
|
|||||||
PyObject* nf = bind_method<1>(obj, #name, [](VM* vm, ArgsView args){ \
|
PyObject* nf = bind_method<1>(obj, #name, [](VM* vm, ArgsView args){ \
|
||||||
return lambda_get_userdata<BinaryFuncC>(args.begin())(vm, args[0], args[1]); \
|
return lambda_get_userdata<BinaryFuncC>(args.begin())(vm, args[0], args[1]); \
|
||||||
}); \
|
}); \
|
||||||
OBJ_GET(NativeFunc, nf).set_userdata(f); \
|
PK_OBJ_GET(NativeFunc, nf).set_userdata(f); \
|
||||||
}
|
}
|
||||||
|
|
||||||
BIND_BINARY_SPECIAL(__eq__)
|
BIND_BINARY_SPECIAL(__eq__)
|
||||||
@ -400,7 +404,7 @@ public:
|
|||||||
PyObject* nf = bind_method<1>(obj, "__getitem__", [](VM* vm, ArgsView args){
|
PyObject* nf = bind_method<1>(obj, "__getitem__", [](VM* vm, ArgsView args){
|
||||||
return lambda_get_userdata<PyObject*(*)(VM*, PyObject*, PyObject*)>(args.begin())(vm, args[0], args[1]);
|
return lambda_get_userdata<PyObject*(*)(VM*, PyObject*, PyObject*)>(args.begin())(vm, args[0], args[1]);
|
||||||
});
|
});
|
||||||
OBJ_GET(NativeFunc, nf).set_userdata(f);
|
PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
|
||||||
}
|
}
|
||||||
|
|
||||||
void bind__setitem__(Type type, void (*f)(VM*, PyObject*, PyObject*, PyObject*)){
|
void bind__setitem__(Type type, void (*f)(VM*, PyObject*, PyObject*, PyObject*)){
|
||||||
@ -410,7 +414,7 @@ public:
|
|||||||
lambda_get_userdata<void(*)(VM* vm, PyObject*, PyObject*, PyObject*)>(args.begin())(vm, args[0], args[1], args[2]);
|
lambda_get_userdata<void(*)(VM* vm, PyObject*, PyObject*, PyObject*)>(args.begin())(vm, args[0], args[1], args[2]);
|
||||||
return vm->None;
|
return vm->None;
|
||||||
});
|
});
|
||||||
OBJ_GET(NativeFunc, nf).set_userdata(f);
|
PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
|
||||||
}
|
}
|
||||||
|
|
||||||
void bind__delitem__(Type type, void (*f)(VM*, PyObject*, PyObject*)){
|
void bind__delitem__(Type type, void (*f)(VM*, PyObject*, PyObject*)){
|
||||||
@ -420,7 +424,7 @@ public:
|
|||||||
lambda_get_userdata<void(*)(VM*, PyObject*, PyObject*)>(args.begin())(vm, args[0], args[1]);
|
lambda_get_userdata<void(*)(VM*, PyObject*, PyObject*)>(args.begin())(vm, args[0], args[1]);
|
||||||
return vm->None;
|
return vm->None;
|
||||||
});
|
});
|
||||||
OBJ_GET(NativeFunc, nf).set_userdata(f);
|
PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool py_equals(PyObject* lhs, PyObject* rhs){
|
bool py_equals(PyObject* lhs, PyObject* rhs){
|
||||||
@ -463,7 +467,7 @@ public:
|
|||||||
template<typename T, typename __T>
|
template<typename T, typename __T>
|
||||||
PyObject* bind_default_constructor(__T&& type) {
|
PyObject* bind_default_constructor(__T&& type) {
|
||||||
return bind_constructor<1>(std::forward<__T>(type), [](VM* vm, ArgsView args){
|
return bind_constructor<1>(std::forward<__T>(type), [](VM* vm, ArgsView args){
|
||||||
Type t = OBJ_GET(Type, args[0]);
|
Type t = PK_OBJ_GET(Type, args[0]);
|
||||||
return vm->heap.gcnew<T>(t, T());
|
return vm->heap.gcnew<T>(t, T());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -513,7 +517,7 @@ public:
|
|||||||
void IndexError(const Str& msg){ _error("IndexError", msg); }
|
void IndexError(const Str& msg){ _error("IndexError", msg); }
|
||||||
void ValueError(const Str& msg){ _error("ValueError", msg); }
|
void ValueError(const Str& msg){ _error("ValueError", msg); }
|
||||||
void NameError(StrName name){ _error("NameError", fmt("name ", name.escape() + " is not defined")); }
|
void NameError(StrName name){ _error("NameError", fmt("name ", name.escape() + " is not defined")); }
|
||||||
void KeyError(PyObject* obj){ _error("KeyError", OBJ_GET(Str, py_repr(obj))); }
|
void KeyError(PyObject* obj){ _error("KeyError", PK_OBJ_GET(Str, py_repr(obj))); }
|
||||||
void BinaryOptError(const char* op) { TypeError(fmt("unsupported operand type(s) for ", op)); }
|
void BinaryOptError(const char* op) { TypeError(fmt("unsupported operand type(s) for ", op)); }
|
||||||
|
|
||||||
void AttributeError(PyObject* obj, StrName name){
|
void AttributeError(PyObject* obj, StrName name){
|
||||||
@ -638,7 +642,7 @@ public:
|
|||||||
_modules.clear();
|
_modules.clear();
|
||||||
_lazy_modules.clear();
|
_lazy_modules.clear();
|
||||||
}
|
}
|
||||||
#if DEBUG_CEVAL_STEP
|
#if PK_DEBUG_CEVAL_STEP
|
||||||
void _log_s_data(const char* title = nullptr);
|
void _log_s_data(const char* title = nullptr);
|
||||||
#endif
|
#endif
|
||||||
void _unpack_as_list(ArgsView args, List& list);
|
void _unpack_as_list(ArgsView args, List& list);
|
||||||
@ -672,7 +676,7 @@ inline PyObject* NativeFunc::operator()(VM* vm, ArgsView args) const{
|
|||||||
if(args.size() != argc && argc != -1) {
|
if(args.size() != argc && argc != -1) {
|
||||||
vm->TypeError(fmt("expected ", argc, " arguments, got ", args.size()));
|
vm->TypeError(fmt("expected ", argc, " arguments, got ", args.size()));
|
||||||
}
|
}
|
||||||
#if DEBUG_EXTRA_CHECK
|
#if PK_DEBUG_EXTRA_CHECK
|
||||||
if(f == nullptr) FATAL_ERROR();
|
if(f == nullptr) FATAL_ERROR();
|
||||||
#endif
|
#endif
|
||||||
return f(vm, args);
|
return f(vm, args);
|
||||||
@ -698,10 +702,10 @@ DEF_NATIVE_2(StarWrapper, tp_star_wrapper)
|
|||||||
#define PY_CAST_INT(T) \
|
#define PY_CAST_INT(T) \
|
||||||
template<> inline T py_cast<T>(VM* vm, PyObject* obj){ \
|
template<> inline T py_cast<T>(VM* vm, PyObject* obj){ \
|
||||||
vm->check_int(obj); \
|
vm->check_int(obj); \
|
||||||
return (T)(BITS(obj) >> 2); \
|
return (T)(PK_BITS(obj) >> 2); \
|
||||||
} \
|
} \
|
||||||
template<> inline T _py_cast<T>(VM* vm, PyObject* obj){ \
|
template<> inline T _py_cast<T>(VM* vm, PyObject* obj){ \
|
||||||
return (T)(BITS(obj) >> 2); \
|
return (T)(PK_BITS(obj) >> 2); \
|
||||||
}
|
}
|
||||||
|
|
||||||
PY_CAST_INT(char)
|
PY_CAST_INT(char)
|
||||||
@ -718,20 +722,20 @@ PY_CAST_INT(unsigned long long)
|
|||||||
|
|
||||||
template<> inline float py_cast<float>(VM* vm, PyObject* obj){
|
template<> inline float py_cast<float>(VM* vm, PyObject* obj){
|
||||||
vm->check_float(obj);
|
vm->check_float(obj);
|
||||||
i64 bits = BITS(obj) & Number::c1;
|
i64 bits = PK_BITS(obj) & Number::c1;
|
||||||
return BitsCvt(bits)._float;
|
return BitsCvt(bits)._float;
|
||||||
}
|
}
|
||||||
template<> inline float _py_cast<float>(VM* vm, PyObject* obj){
|
template<> inline float _py_cast<float>(VM* vm, PyObject* obj){
|
||||||
i64 bits = BITS(obj) & Number::c1;
|
i64 bits = PK_BITS(obj) & Number::c1;
|
||||||
return BitsCvt(bits)._float;
|
return BitsCvt(bits)._float;
|
||||||
}
|
}
|
||||||
template<> inline double py_cast<double>(VM* vm, PyObject* obj){
|
template<> inline double py_cast<double>(VM* vm, PyObject* obj){
|
||||||
vm->check_float(obj);
|
vm->check_float(obj);
|
||||||
i64 bits = BITS(obj) & Number::c1;
|
i64 bits = PK_BITS(obj) & Number::c1;
|
||||||
return BitsCvt(bits)._float;
|
return BitsCvt(bits)._float;
|
||||||
}
|
}
|
||||||
template<> inline double _py_cast<double>(VM* vm, PyObject* obj){
|
template<> inline double _py_cast<double>(VM* vm, PyObject* obj){
|
||||||
i64 bits = BITS(obj) & Number::c1;
|
i64 bits = PK_BITS(obj) & Number::c1;
|
||||||
return BitsCvt(bits)._float;
|
return BitsCvt(bits)._float;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1050,7 +1054,7 @@ inline Str VM::disassemble(CodeObject_ co){
|
|||||||
return Str(ss.str());
|
return Str(ss.str());
|
||||||
}
|
}
|
||||||
|
|
||||||
#if DEBUG_CEVAL_STEP
|
#if PK_DEBUG_CEVAL_STEP
|
||||||
inline void VM::_log_s_data(const char* title) {
|
inline void VM::_log_s_data(const char* title) {
|
||||||
if(_main == nullptr) return;
|
if(_main == nullptr) return;
|
||||||
if(callstack.empty()) return;
|
if(callstack.empty()) return;
|
||||||
@ -1080,7 +1084,7 @@ inline void VM::_log_s_data(const char* title) {
|
|||||||
auto& f = CAST(Function&, obj);
|
auto& f = CAST(Function&, obj);
|
||||||
ss << f.decl->code->name << "(...)";
|
ss << f.decl->code->name << "(...)";
|
||||||
} else if(is_type(obj, tp_type)){
|
} else if(is_type(obj, tp_type)){
|
||||||
Type t = OBJ_GET(Type, obj);
|
Type t = PK_OBJ_GET(Type, obj);
|
||||||
ss << "<class " + _all_types[t].name.escape() + ">";
|
ss << "<class " + _all_types[t].name.escape() + ">";
|
||||||
} else if(is_type(obj, tp_list)){
|
} else if(is_type(obj, tp_list)){
|
||||||
auto& t = CAST(List&, obj);
|
auto& t = CAST(List&, obj);
|
||||||
@ -1220,7 +1224,7 @@ inline PyObject* VM::vectorcall(int ARGC, int KWARGC, bool op_call){
|
|||||||
ArgsView args(p1 - ARGC - int(method_call), p1);
|
ArgsView args(p1 - ARGC - int(method_call), p1);
|
||||||
|
|
||||||
if(is_non_tagged_type(callable, tp_native_func)){
|
if(is_non_tagged_type(callable, tp_native_func)){
|
||||||
const auto& f = OBJ_GET(NativeFunc, callable);
|
const auto& f = PK_OBJ_GET(NativeFunc, callable);
|
||||||
if(KWARGC != 0) TypeError("native_func does not accept keyword arguments");
|
if(KWARGC != 0) TypeError("native_func does not accept keyword arguments");
|
||||||
PyObject* ret = f(this, args);
|
PyObject* ret = f(this, args);
|
||||||
s_data.reset(p0);
|
s_data.reset(p0);
|
||||||
@ -1327,12 +1331,12 @@ inline PyObject* VM::vectorcall(int ARGC, int KWARGC, bool op_call){
|
|||||||
DEF_SNAME(__new__);
|
DEF_SNAME(__new__);
|
||||||
PyObject* new_f = find_name_in_mro(callable, __new__);
|
PyObject* new_f = find_name_in_mro(callable, __new__);
|
||||||
PyObject* obj;
|
PyObject* obj;
|
||||||
#if DEBUG_EXTRA_CHECK
|
#if PK_DEBUG_EXTRA_CHECK
|
||||||
PK_ASSERT(new_f != nullptr);
|
PK_ASSERT(new_f != nullptr);
|
||||||
#endif
|
#endif
|
||||||
if(new_f == cached_object__new__) {
|
if(new_f == cached_object__new__) {
|
||||||
// fast path for object.__new__
|
// fast path for object.__new__
|
||||||
Type t = OBJ_GET(Type, callable);
|
Type t = PK_OBJ_GET(Type, callable);
|
||||||
obj= vm->heap.gcnew<DummyInstance>(t, {});
|
obj= vm->heap.gcnew<DummyInstance>(t, {});
|
||||||
}else{
|
}else{
|
||||||
PUSH(new_f);
|
PUSH(new_f);
|
||||||
@ -1382,7 +1386,7 @@ inline PyObject* VM::getattr(PyObject* obj, StrName name, bool throw_err){
|
|||||||
PyObject* objtype;
|
PyObject* objtype;
|
||||||
// handle super() proxy
|
// handle super() proxy
|
||||||
if(is_non_tagged_type(obj, tp_super)){
|
if(is_non_tagged_type(obj, tp_super)){
|
||||||
const Super& super = OBJ_GET(Super, obj);
|
const Super& super = PK_OBJ_GET(Super, obj);
|
||||||
obj = super.first;
|
obj = super.first;
|
||||||
objtype = _t(super.second);
|
objtype = _t(super.second);
|
||||||
}else{
|
}else{
|
||||||
@ -1419,7 +1423,7 @@ inline PyObject* VM::get_unbound_method(PyObject* obj, StrName name, PyObject**
|
|||||||
PyObject* objtype;
|
PyObject* objtype;
|
||||||
// handle super() proxy
|
// handle super() proxy
|
||||||
if(is_non_tagged_type(obj, tp_super)){
|
if(is_non_tagged_type(obj, tp_super)){
|
||||||
const Super& super = OBJ_GET(Super, obj);
|
const Super& super = PK_OBJ_GET(Super, obj);
|
||||||
obj = super.first;
|
obj = super.first;
|
||||||
objtype = _t(super.second);
|
objtype = _t(super.second);
|
||||||
}else{
|
}else{
|
||||||
@ -1456,7 +1460,7 @@ inline void VM::setattr(PyObject* obj, StrName name, PyObject* value){
|
|||||||
PyObject* objtype;
|
PyObject* objtype;
|
||||||
// handle super() proxy
|
// handle super() proxy
|
||||||
if(is_non_tagged_type(obj, tp_super)){
|
if(is_non_tagged_type(obj, tp_super)){
|
||||||
Super& super = OBJ_GET(Super, obj);
|
Super& super = PK_OBJ_GET(Super, obj);
|
||||||
obj = super.first;
|
obj = super.first;
|
||||||
objtype = _t(super.second);
|
objtype = _t(super.second);
|
||||||
}else{
|
}else{
|
||||||
@ -1547,7 +1551,7 @@ inline void VM::bind__hash__(Type type, i64 (*f)(VM*, PyObject*)){
|
|||||||
i64 ret = lambda_get_userdata<i64(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);
|
i64 ret = lambda_get_userdata<i64(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);
|
||||||
return VAR(ret);
|
return VAR(ret);
|
||||||
});
|
});
|
||||||
OBJ_GET(NativeFunc, nf).set_userdata(f);
|
PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
|
||||||
}
|
}
|
||||||
|
|
||||||
inline void VM::bind__len__(Type type, i64 (*f)(VM*, PyObject*)){
|
inline void VM::bind__len__(Type type, i64 (*f)(VM*, PyObject*)){
|
||||||
@ -1557,7 +1561,7 @@ inline void VM::bind__len__(Type type, i64 (*f)(VM*, PyObject*)){
|
|||||||
i64 ret = lambda_get_userdata<i64(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);
|
i64 ret = lambda_get_userdata<i64(*)(VM*, PyObject*)>(args.begin())(vm, args[0]);
|
||||||
return VAR(ret);
|
return VAR(ret);
|
||||||
});
|
});
|
||||||
OBJ_GET(NativeFunc, nf).set_userdata(f);
|
PK_OBJ_GET(NativeFunc, nf).set_userdata(f);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user