mirror of
https://github.com/pocketpy/pocketpy
synced 2025-10-21 12:00:18 +00:00
add precompile
This commit is contained in:
parent
790e87f688
commit
e02292c03d
@ -121,6 +121,7 @@ class Compiler {
|
|||||||
|
|
||||||
public:
|
public:
|
||||||
Compiler(VM* vm, std::string_view source, const Str& filename, CompileMode mode, bool unknown_global_scope=false);
|
Compiler(VM* vm, std::string_view source, const Str& filename, CompileMode mode, bool unknown_global_scope=false);
|
||||||
|
void precompile();
|
||||||
CodeObject_ compile();
|
CodeObject_ compile();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -59,14 +59,6 @@ struct Token{
|
|||||||
|
|
||||||
Str str() const { return Str(start, length);}
|
Str str() const { return Str(start, length);}
|
||||||
std::string_view sv() const { return std::string_view(start, length);}
|
std::string_view sv() const { return std::string_view(start, length);}
|
||||||
|
|
||||||
// Str info() const {
|
|
||||||
// SStream ss;
|
|
||||||
// ss << line << ": " << TK_STR(type) << " '" << (
|
|
||||||
// sv()=="\n" ? "\\n" : sv()
|
|
||||||
// ) << "'";
|
|
||||||
// return ss.str();
|
|
||||||
// }
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// https://docs.python.org/3/reference/expressions.html#operator-precedence
|
// https://docs.python.org/3/reference/expressions.html#operator-precedence
|
||||||
|
@ -1225,6 +1225,35 @@ __EAT_DOTS_END:
|
|||||||
init_pratt_rules();
|
init_pratt_rules();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Compiler::precompile(){
|
||||||
|
SStream ss;
|
||||||
|
ss << PK_VERSION << '\n'; // L1: version string
|
||||||
|
ss << lexer.src->filename << '\n'; // L2: filename
|
||||||
|
ss << mode() << '\n'; // L3: compile mode
|
||||||
|
ss << (int)unknown_global_scope << '\n'; // L4: unknown global scope
|
||||||
|
auto tokens = lexer.run();
|
||||||
|
ss << '=' << (int)tokens.size() << '\n'; // L5: token count
|
||||||
|
for(auto token: lexer.run()){
|
||||||
|
ss << (int)token.type << '\n';
|
||||||
|
int offset = token.start - lexer.src->source.c_str();
|
||||||
|
ss << offset << '\n';
|
||||||
|
ss << token.length << '\n';
|
||||||
|
ss << token.line << '\n';
|
||||||
|
ss << token.brackets_level << '\n';
|
||||||
|
// visit token value
|
||||||
|
std::visit([&ss](auto&& arg){
|
||||||
|
using T = std::decay_t<decltype(arg)>;
|
||||||
|
if constexpr(std::is_same_v<T, i64>){
|
||||||
|
ss << 'i' << arg << '\n';
|
||||||
|
}else if constexpr(std::is_same_v<T, f64>){
|
||||||
|
ss << 'f' << arg << '\n';
|
||||||
|
}else if constexpr(std::is_same_v<T, Str>){
|
||||||
|
ss << 's' << arg.escape() << '\n';
|
||||||
|
}
|
||||||
|
}, token.value);
|
||||||
|
}
|
||||||
|
std::cout << ss.str() << std::endl;
|
||||||
|
}
|
||||||
|
|
||||||
CodeObject_ Compiler::compile(){
|
CodeObject_ Compiler::compile(){
|
||||||
PK_ASSERT(i == 0) // make sure it is the first time to compile
|
PK_ASSERT(i == 0) // make sure it is the first time to compile
|
||||||
|
@ -486,6 +486,7 @@ static bool is_unicode_Lo_char(uint32_t c) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
std::vector<Token> Lexer::run() {
|
std::vector<Token> Lexer::run() {
|
||||||
|
PK_ASSERT(curr_char == src->source.c_str());
|
||||||
while (lex_one_token());
|
while (lex_one_token());
|
||||||
return std::move(nexts);
|
return std::move(nexts);
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user