some rename

This commit is contained in:
blueloveTH 2024-06-20 00:27:07 +08:00
parent 1431cf8cde
commit dbd75615d9
3 changed files with 37 additions and 37 deletions

View File

@ -9,12 +9,12 @@ extern "C" {
extern const char* pk_TokenSymbols[];
typedef struct pkpy_TokenDeserializer {
typedef struct pk_TokenDeserializer {
const char* curr;
const char* source;
} pkpy_TokenDeserializer;
} pk_TokenDeserializer;
enum TokenIndex{
typedef enum TokenIndex{
TK_EOF, TK_EOL, TK_SOF,
TK_ID, TK_NUM, TK_STR, TK_FSTR, TK_LONG, TK_BYTES, TK_IMAG,
TK_INDENT, TK_DEDENT,
@ -37,15 +37,15 @@ enum TokenIndex{
TK_TRY, TK_WHILE, TK_WITH, TK_YIELD,
/***************/
TK__COUNT__
};
} TokenIndex;
void pkpy_TokenDeserializer__ctor(pkpy_TokenDeserializer* self, const char* source);
bool pkpy_TokenDeserializer__match_char(pkpy_TokenDeserializer* self, char c);
c11_string pkpy_TokenDeserializer__read_string(pkpy_TokenDeserializer* self, char c);
pkpy_Str pkpy_TokenDeserializer__read_string_from_hex(pkpy_TokenDeserializer* self, char c);
int pkpy_TokenDeserializer__read_count(pkpy_TokenDeserializer* self);
int64_t pkpy_TokenDeserializer__read_uint(pkpy_TokenDeserializer* self, char c);
double pkpy_TokenDeserializer__read_float(pkpy_TokenDeserializer* self, char c);
void pk_TokenDeserializer__ctor(pk_TokenDeserializer* self, const char* source);
bool pk_TokenDeserializer__match_char(pk_TokenDeserializer* self, char c);
c11_string pk_TokenDeserializer__read_string(pk_TokenDeserializer* self, char c);
pkpy_Str pk_TokenDeserializer__read_string_from_hex(pk_TokenDeserializer* self, char c);
int pk_TokenDeserializer__read_count(pk_TokenDeserializer* self);
int64_t pk_TokenDeserializer__read_uint(pk_TokenDeserializer* self, char c);
double pk_TokenDeserializer__read_float(pk_TokenDeserializer* self, char c);
#ifdef __cplusplus
}

View File

@ -27,12 +27,12 @@ const char* pk_TokenSymbols[] = {
"try", "while", "with", "yield",
};
void pkpy_TokenDeserializer__ctor(pkpy_TokenDeserializer* self, const char* source){
void pk_TokenDeserializer__ctor(pk_TokenDeserializer* self, const char* source){
self->curr = source;
self->source = source;
}
bool pkpy_TokenDeserializer__match_char(pkpy_TokenDeserializer* self, char c){
bool pk_TokenDeserializer__match_char(pk_TokenDeserializer* self, char c){
if(*self->curr == c) {
self->curr++;
return true;
@ -40,7 +40,7 @@ bool pkpy_TokenDeserializer__match_char(pkpy_TokenDeserializer* self, char c){
return false;
}
c11_string pkpy_TokenDeserializer__read_string(pkpy_TokenDeserializer* self, char c){
c11_string pk_TokenDeserializer__read_string(pk_TokenDeserializer* self, char c){
const char* start = self->curr;
while(*self->curr != c)
self->curr++;
@ -49,8 +49,8 @@ c11_string pkpy_TokenDeserializer__read_string(pkpy_TokenDeserializer* self, cha
return retval;
}
pkpy_Str pkpy_TokenDeserializer__read_string_from_hex(pkpy_TokenDeserializer* self, char c){
c11_string sv = pkpy_TokenDeserializer__read_string(self, c);
pkpy_Str pk_TokenDeserializer__read_string_from_hex(pk_TokenDeserializer* self, char c){
c11_string sv = pk_TokenDeserializer__read_string(self, c);
const char* s = sv.data;
char* buffer = (char*)malloc(sv.size / 2 + 1);
for(int i = 0; i < sv.size; i += 2) {
@ -79,13 +79,13 @@ pkpy_Str pkpy_TokenDeserializer__read_string_from_hex(pkpy_TokenDeserializer* se
};
}
int pkpy_TokenDeserializer__read_count(pkpy_TokenDeserializer* self){
int pk_TokenDeserializer__read_count(pk_TokenDeserializer* self){
assert(*self->curr == '=');
self->curr++;
return pkpy_TokenDeserializer__read_uint(self, '\n');
return pk_TokenDeserializer__read_uint(self, '\n');
}
int64_t pkpy_TokenDeserializer__read_uint(pkpy_TokenDeserializer* self, char c){
int64_t pk_TokenDeserializer__read_uint(pk_TokenDeserializer* self, char c){
int64_t out = 0;
while(*self->curr != c) {
out = out * 10 + (*self->curr - '0');
@ -95,8 +95,8 @@ int64_t pkpy_TokenDeserializer__read_uint(pkpy_TokenDeserializer* self, char c){
return out;
}
double pkpy_TokenDeserializer__read_float(pkpy_TokenDeserializer* self, char c){
c11_string sv = pkpy_TokenDeserializer__read_string(self, c);
double pk_TokenDeserializer__read_float(pk_TokenDeserializer* self, char c){
c11_string sv = pk_TokenDeserializer__read_string(self, c);
pkpy_Str nullterm;
pkpy_Str__ctor2(&nullterm, sv.data, sv.size);
char* end;

View File

@ -533,34 +533,34 @@ Error* Lexer::run() noexcept{
}
Error* Lexer::from_precompiled() noexcept{
pkpy_TokenDeserializer deserializer;
pkpy_TokenDeserializer__ctor(&deserializer, pkpy_Str__data(&src->source));
pk_TokenDeserializer deserializer;
pk_TokenDeserializer__ctor(&deserializer, pkpy_Str__data(&src->source));
deserializer.curr += 5; // skip "pkpy:"
c11_string version = pkpy_TokenDeserializer__read_string(&deserializer, '\n');
c11_string version = pk_TokenDeserializer__read_string(&deserializer, '\n');
if(c11_string__cmp3(version, PK_VERSION) != 0) {
return SyntaxError("precompiled version mismatch");
}
if(pkpy_TokenDeserializer__read_uint(&deserializer, '\n') != (i64)src->mode){
if(pk_TokenDeserializer__read_uint(&deserializer, '\n') != (i64)src->mode){
return SyntaxError("precompiled mode mismatch");
}
int count = pkpy_TokenDeserializer__read_count(&deserializer);
int count = pk_TokenDeserializer__read_count(&deserializer);
c11_vector* precompiled_tokens = &src->_precompiled_tokens;
for(int i = 0; i < count; i++) {
c11_string item = pkpy_TokenDeserializer__read_string(&deserializer, '\n');
c11_string item = pk_TokenDeserializer__read_string(&deserializer, '\n');
pkpy_Str copied_item;
pkpy_Str__ctor2(&copied_item, item.data, item.size);
c11_vector__push(pkpy_Str, precompiled_tokens, copied_item);
}
count = pkpy_TokenDeserializer__read_count(&deserializer);
count = pk_TokenDeserializer__read_count(&deserializer);
for(int i = 0; i < count; i++) {
Token t;
t.type = (TokenIndex)pkpy_TokenDeserializer__read_uint(&deserializer, ',');
t.type = (TokenIndex)pk_TokenDeserializer__read_uint(&deserializer, ',');
if(is_raw_string_used(t.type)) {
i64 index = pkpy_TokenDeserializer__read_uint(&deserializer, ',');
i64 index = pk_TokenDeserializer__read_uint(&deserializer, ',');
pkpy_Str* p = c11__at(pkpy_Str, precompiled_tokens, index);
t.start = pkpy_Str__data(p);
t.length = c11__getitem(pkpy_Str, precompiled_tokens, index).size;
@ -569,28 +569,28 @@ Error* Lexer::from_precompiled() noexcept{
t.length = 0;
}
if(pkpy_TokenDeserializer__match_char(&deserializer, ',')) {
if(pk_TokenDeserializer__match_char(&deserializer, ',')) {
t.line = nexts.back().line;
} else {
t.line = (int)pkpy_TokenDeserializer__read_uint(&deserializer, ',');
t.line = (int)pk_TokenDeserializer__read_uint(&deserializer, ',');
}
if(pkpy_TokenDeserializer__match_char(&deserializer, ',')) {
if(pk_TokenDeserializer__match_char(&deserializer, ',')) {
t.brackets_level = nexts.back().brackets_level;
} else {
t.brackets_level = (int)pkpy_TokenDeserializer__read_uint(&deserializer, ',');
t.brackets_level = (int)pk_TokenDeserializer__read_uint(&deserializer, ',');
}
char type = (*deserializer.curr++); // read_char
switch(type) {
case 'I':
t.value = pkpy_TokenDeserializer__read_uint(&deserializer, '\n');
t.value = pk_TokenDeserializer__read_uint(&deserializer, '\n');
break;
case 'F':
t.value = pkpy_TokenDeserializer__read_float(&deserializer, '\n');
t.value = pk_TokenDeserializer__read_float(&deserializer, '\n');
break;
case 'S': {
pkpy_Str res = pkpy_TokenDeserializer__read_string_from_hex(&deserializer, '\n');
pkpy_Str res = pk_TokenDeserializer__read_string_from_hex(&deserializer, '\n');
t.value = Str(std::move(res));
} break;
default: