From 2f96054a4ece5f5a39a10d16c54b0d34b412eb07 Mon Sep 17 00:00:00 2001 From: blueloveTH Date: Wed, 13 Nov 2024 10:51:54 +0800 Subject: [PATCH] remove unused code --- include/pocketpy/common/vector.h | 18 ++---- include/pocketpy/compiler/lexer.h | 5 +- include/pocketpy/config.h | 4 +- src/common/sstream.c | 6 +- src/common/vector.c | 27 +------- src/compiler/compiler.c | 91 +++++++++++++++------------ src/compiler/lexer.c | 100 +----------------------------- 7 files changed, 69 insertions(+), 182 deletions(-) diff --git a/include/pocketpy/common/vector.h b/include/pocketpy/common/vector.h index c40a8b44..c5502300 100644 --- a/include/pocketpy/common/vector.h +++ b/include/pocketpy/common/vector.h @@ -1,21 +1,11 @@ #pragma once -#include "pocketpy/common/algorithm.h" - -#include #include -#include #include +#include +#include -typedef struct c11_array { - void* data; - int length; - int elem_size; -} c11_array; - -void c11_array__ctor(c11_array* self, int elem_size, int length); -void c11_array__dtor(c11_array* self); -c11_array c11_array__copy(const c11_array* self); +#include "algorithm.h" typedef struct c11_vector { void* data; @@ -31,7 +21,7 @@ void c11_vector__reserve(c11_vector* self, int capacity); void c11_vector__clear(c11_vector* self); void* c11_vector__emplace(c11_vector* self); bool c11_vector__contains(const c11_vector* self, void* elem); -c11_array c11_vector__submit(c11_vector* self); +void* c11_vector__submit(c11_vector* self, int* length); #define c11__getitem(T, self, index) (((T*)(self)->data)[index]) #define c11__setitem(T, self, index, value) ((T*)(self)->data)[index] = value; diff --git a/include/pocketpy/compiler/lexer.h b/include/pocketpy/compiler/lexer.h index a54f3140..1cb00e2d 100644 --- a/include/pocketpy/compiler/lexer.h +++ b/include/pocketpy/compiler/lexer.h @@ -84,9 +84,6 @@ enum Precedence { PREC_HIGHEST, }; -typedef c11_array TokenArray; - -Error* Lexer__process(SourceData_ src, TokenArray* out_tokens); -void TokenArray__dtor(TokenArray* self); +Error* Lexer__process(SourceData_ src, Token** out_tokens, int* out_length); #define Token__sv(self) (c11_sv){(self)->start, (self)->length} diff --git a/include/pocketpy/config.h b/include/pocketpy/config.h index e138f6bb..dff29d0f 100644 --- a/include/pocketpy/config.h +++ b/include/pocketpy/config.h @@ -1,10 +1,10 @@ #pragma once // clang-format off -#define PK_VERSION "2.0.1" +#define PK_VERSION "2.0.2" #define PK_VERSION_MAJOR 2 #define PK_VERSION_MINOR 0 -#define PK_VERSION_PATCH 1 +#define PK_VERSION_PATCH 2 /*************** feature settings ***************/ diff --git a/src/common/sstream.c b/src/common/sstream.c index 5b12dde8..371e1d89 100644 --- a/src/common/sstream.c +++ b/src/common/sstream.c @@ -140,9 +140,9 @@ void c11_sbuf__write_ptr(c11_sbuf* self, void* p) { c11_string* c11_sbuf__submit(c11_sbuf* self) { c11_vector__push(char, &self->data, '\0'); - c11_array arr = c11_vector__submit(&self->data); - c11_string* retval = arr.data; - retval->size = arr.length - sizeof(c11_string) - 1; + int arr_length; + c11_string* retval = c11_vector__submit(&self->data, &arr_length); + retval->size = arr_length - sizeof(c11_string) - 1; return retval; } diff --git a/src/common/vector.c b/src/common/vector.c index ca890792..87d9370f 100644 --- a/src/common/vector.c +++ b/src/common/vector.c @@ -3,24 +3,6 @@ #include #include -void c11_array__ctor(c11_array* self, int elem_size, int length){ - self->data = malloc(elem_size * length); - self->length = length; - self->elem_size = elem_size; -} - -void c11_array__dtor(c11_array* self){ - free(self->data); - self->data = NULL; - self->length = 0; -} - -c11_array c11_array__copy(const c11_array* self){ - c11_array retval; - c11_array__ctor(&retval, self->elem_size, self->length); - memcpy(retval.data, self->data, self->elem_size * self->length); - return retval; -} void c11_vector__ctor(c11_vector* self, int elem_size){ self->data = NULL; @@ -71,12 +53,9 @@ bool c11_vector__contains(const c11_vector *self, void *elem){ return false; } -c11_array c11_vector__submit(c11_vector* self){ - c11_array retval = { - .data = self->data, - .length = self->length, - .elem_size = self->elem_size - }; +void* c11_vector__submit(c11_vector* self, int* length){ + void* retval = self->data; + *length = self->length; self->data = NULL; self->length = 0; self->capacity = 0; diff --git a/src/compiler/compiler.c b/src/compiler/compiler.c index 9f880dab..5572b570 100644 --- a/src/compiler/compiler.c +++ b/src/compiler/compiler.c @@ -445,23 +445,26 @@ static DictItemExpr* DictItemExpr__new(int line) { // ListExpr, DictExpr, SetExpr, TupleExpr typedef struct SequenceExpr { EXPR_COMMON_HEADER - c11_array /*T=Expr* */ items; + Expr** items; + int itemCount; Opcode opcode; } SequenceExpr; static void SequenceExpr__emit_(Expr* self_, Ctx* ctx) { SequenceExpr* self = (SequenceExpr*)self_; - for(int i = 0; i < self->items.length; i++) { - Expr* item = c11__getitem(Expr*, &self->items, i); + for(int i = 0; i < self->itemCount; i++) { + Expr* item = self->items[i]; vtemit_(item, ctx); } - Ctx__emit_(ctx, self->opcode, self->items.length, self->line); + Ctx__emit_(ctx, self->opcode, self->itemCount, self->line); } void SequenceExpr__dtor(Expr* self_) { SequenceExpr* self = (SequenceExpr*)self_; - c11__foreach(Expr*, &self->items, e) vtdelete(*e); - c11_array__dtor(&self->items); + for(int i = 0; i < self->itemCount; i++) { + vtdelete(self->items[i]); + } + free(self->items); } bool TupleExpr__emit_store(Expr* self_, Ctx* ctx) { @@ -469,8 +472,8 @@ bool TupleExpr__emit_store(Expr* self_, Ctx* ctx) { // TOS is an iterable // items may contain StarredExpr, we should check it int starred_i = -1; - for(int i = 0; i < self->items.length; i++) { - Expr* e = c11__getitem(Expr*, &self->items, i); + for(int i = 0; i < self->itemCount; i++) { + Expr* e = self->items[i]; if(e->vt->is_starred) { if(((StarredExpr*)e)->level > 0) { if(starred_i == -1) @@ -483,24 +486,24 @@ bool TupleExpr__emit_store(Expr* self_, Ctx* ctx) { if(starred_i == -1) { Bytecode* prev = c11__at(Bytecode, &ctx->co->codes, ctx->co->codes.length - 1); - if(prev->op == OP_BUILD_TUPLE && prev->arg == self->items.length) { + if(prev->op == OP_BUILD_TUPLE && prev->arg == self->itemCount) { // build tuple and unpack it is meaningless Ctx__revert_last_emit_(ctx); } else { - Ctx__emit_(ctx, OP_UNPACK_SEQUENCE, self->items.length, self->line); + Ctx__emit_(ctx, OP_UNPACK_SEQUENCE, self->itemCount, self->line); } } else { // starred assignment target must be in a tuple - if(self->items.length == 1) return false; + if(self->itemCount == 1) return false; // starred assignment target must be the last one (differ from cpython) - if(starred_i != self->items.length - 1) return false; + if(starred_i != self->itemCount - 1) return false; // a,*b = [1,2,3] // stack is [1,2,3] -> [1,[2,3]] - Ctx__emit_(ctx, OP_UNPACK_EX, self->items.length - 1, self->line); + Ctx__emit_(ctx, OP_UNPACK_EX, self->itemCount - 1, self->line); } // do reverse emit - for(int i = self->items.length - 1; i >= 0; i--) { - Expr* e = c11__getitem(Expr*, &self->items, i); + for(int i = self->itemCount - 1; i >= 0; i--) { + Expr* e = self->items[i]; bool ok = vtemit_store(e, ctx); if(!ok) return false; } @@ -509,8 +512,9 @@ bool TupleExpr__emit_store(Expr* self_, Ctx* ctx) { bool TupleExpr__emit_del(Expr* self_, Ctx* ctx) { SequenceExpr* self = (SequenceExpr*)self_; - c11__foreach(Expr*, &self->items, e) { - bool ok = vtemit_del(*e, ctx); + for(int i = 0; i < self->itemCount; i++) { + Expr* e = self->items[i]; + bool ok = vtemit_del(e, ctx); if(!ok) return false; } return true; @@ -522,7 +526,8 @@ static SequenceExpr* SequenceExpr__new(int line, const ExprVt* vt, int count, Op self->vt = vt; self->line = line; self->opcode = opcode; - c11_array__ctor(&self->items, sizeof(Expr*), count); + self->items = malloc(sizeof(Expr*) * count); + self->itemCount = count; return self; } @@ -1295,29 +1300,41 @@ const static PrattRule rules[TK__COUNT__]; typedef struct Compiler { SourceData_ src; // weakref - TokenArray tokens; - int i; + + Token* tokens; + int tokens_length; + + int i; // current token index c11_vector /*T=CodeEmitContext*/ contexts; } Compiler; -static void Compiler__ctor(Compiler* self, SourceData_ src, TokenArray tokens) { +static void Compiler__ctor(Compiler* self, SourceData_ src, Token* tokens, int tokens_length) { self->src = src; self->tokens = tokens; + self->tokens_length = tokens_length; self->i = 0; c11_vector__ctor(&self->contexts, sizeof(Ctx)); } static void Compiler__dtor(Compiler* self) { - TokenArray__dtor(&self->tokens); + // free tokens + for(int i = 0; i < self->tokens_length; i++) { + if(self->tokens[i].value.index == TokenValue_STR) { + // free internal string + c11_string__delete(self->tokens[i].value._str); + } + } + free(self->tokens); + // free contexts c11__foreach(Ctx, &self->contexts, ctx) Ctx__dtor(ctx); c11_vector__dtor(&self->contexts); } /**************************************/ -#define tk(i) c11__at(Token, &self->tokens, i) -#define prev() tk(self->i - 1) -#define curr() tk(self->i) -#define next() tk(self->i + 1) +#define tk(i) (&self->tokens[i]) +#define prev() (&self->tokens[self->i - 1]) +#define curr() (&self->tokens[self->i]) +#define next() (&self->tokens[self->i + 1]) #define advance() self->i++ #define mode() self->src->mode @@ -1347,7 +1364,7 @@ Error* SyntaxError(Compiler* self, const char* fmt, ...) { Error* err = malloc(sizeof(Error)); err->src = self->src; PK_INCREF(self->src); - Token* t = self->i == self->tokens.length ? prev() : curr(); + Token* t = self->i == self->tokens_length ? prev() : curr(); err->lineno = t->line; va_list args; va_start(args, fmt); @@ -1424,8 +1441,7 @@ static Error* EXPR_TUPLE_ALLOW_SLICE(Compiler* self, bool allow_slice) { // pop `count` expressions from the stack and merge them into a TupleExpr SequenceExpr* e = TupleExpr__new(prev()->line, count); for(int i = count - 1; i >= 0; i--) { - Expr* item = Ctx__s_popx(ctx()); - c11__setitem(Expr*, &e->items, i, item); + e->items[i] = Ctx__s_popx(ctx()); } Ctx__s_push(ctx(), (Expr*)e); return NULL; @@ -1450,8 +1466,7 @@ static Error* EXPR_VARS(Compiler* self) { if(count > 1) { SequenceExpr* e = TupleExpr__new(prev()->line, count); for(int i = count - 1; i >= 0; i--) { - Expr* item = Ctx__s_popx(ctx()); - c11__setitem(Expr*, &e->items, i, item); + e->items[i] = Ctx__s_popx(ctx()); } Ctx__s_push(ctx(), (Expr*)e); } @@ -1556,8 +1571,7 @@ static Error* exprFString(Compiler* self) { if(match(TK_FSTR_END)) { SequenceExpr* e = FStringExpr__new(line, count); for(int i = count - 1; i >= 0; i--) { - Expr* item = Ctx__s_popx(ctx()); - c11__setitem(Expr*, &e->items, i, item); + e->items[i] = Ctx__s_popx(ctx()); } Ctx__s_push(ctx(), (Expr*)e); return NULL; @@ -1790,7 +1804,7 @@ static Error* exprList(Compiler* self) { consume(TK_RBRACKET); SequenceExpr* e = ListExpr__new(line, count); for(int i = count - 1; i >= 0; i--) { - c11__setitem(Expr*, &e->items, i, Ctx__s_popx(ctx())); + e->items[i] = Ctx__s_popx(ctx()); } Ctx__s_push(ctx(), (Expr*)e); return NULL; @@ -1836,7 +1850,7 @@ static Error* exprMap(Compiler* self) { se = SetExpr__new(line, count); } for(int i = count - 1; i >= 0; i--) { - c11__setitem(Expr*, &se->items, i, Ctx__s_popx(ctx())); + se->items[i] = Ctx__s_popx(ctx()); } Ctx__s_push(ctx(), (Expr*)se); return NULL; @@ -2766,8 +2780,9 @@ Error* Compiler__compile(Compiler* self, CodeObject* out) { } Error* pk_compile(SourceData_ src, CodeObject* out) { - TokenArray tokens; - Error* err = Lexer__process(src, &tokens); + Token* tokens; + int tokens_length; + Error* err = Lexer__process(src, &tokens, &tokens_length); if(err) return err; #if 0 @@ -2791,7 +2806,7 @@ Error* pk_compile(SourceData_ src, CodeObject* out) { #endif Compiler compiler; - Compiler__ctor(&compiler, src, tokens); + Compiler__ctor(&compiler, src, tokens, tokens_length); CodeObject__ctor(out, src, c11_string__sv(src->filename)); err = Compiler__compile(&compiler, out); if(err) { diff --git a/src/compiler/lexer.c b/src/compiler/lexer.c index ffa2f632..1d5dfde6 100644 --- a/src/compiler/lexer.c +++ b/src/compiler/lexer.c @@ -18,19 +18,6 @@ typedef struct Lexer { c11_vector /*T=int*/ indents; } Lexer; -typedef struct TokenDeserializer { - const char* curr; - const char* source; -} TokenDeserializer; - -void TokenDeserializer__ctor(TokenDeserializer* self, const char* source); -bool TokenDeserializer__match_char(TokenDeserializer* self, char c); -c11_sv TokenDeserializer__read_string(TokenDeserializer* self, char c); -c11_string* TokenDeserializer__read_string_from_hex(TokenDeserializer* self, char c); -int TokenDeserializer__read_count(TokenDeserializer* self); -int64_t TokenDeserializer__read_uint(TokenDeserializer* self, char c); -double TokenDeserializer__read_float(TokenDeserializer* self, char c); - const static TokenValue EmptyTokenValue; static Error* lex_one_token(Lexer* self, bool* eof, bool is_fstring); @@ -186,7 +173,7 @@ static bool eat_indentation(Lexer* self) { static bool is_possible_number_char(char c) { switch(c) { - // clang-format off + // clang-format off case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': @@ -593,7 +580,7 @@ static Error* lex_one_token(Lexer* self, bool* eof, bool is_fstring) { return NULL; } -Error* Lexer__process(SourceData_ src, TokenArray* out_tokens) { +Error* Lexer__process(SourceData_ src, Token** out_tokens, int* out_length) { Lexer lexer; Lexer__ctor(&lexer, src); @@ -612,20 +599,12 @@ Error* Lexer__process(SourceData_ src, TokenArray* out_tokens) { } } // set out_tokens - *out_tokens = c11_vector__submit(&lexer.nexts); + *out_tokens = c11_vector__submit(&lexer.nexts, out_length); Lexer__dtor(&lexer); return NULL; } -void TokenArray__dtor(TokenArray* self) { - Token* data = self->data; - for(int i = 0; i < self->length; i++) { - if(data[i].value.index == TokenValue_STR) { c11_string__delete(data[i].value._str); } - } - c11_array__dtor(self); -} - const char* TokenSymbols[] = { "@eof", "@eol", @@ -729,76 +708,3 @@ const char* TokenSymbols[] = { "yield", }; -void TokenDeserializer__ctor(TokenDeserializer* self, const char* source) { - self->curr = source; - self->source = source; -} - -bool TokenDeserializer__match_char(TokenDeserializer* self, char c) { - if(*self->curr == c) { - self->curr++; - return true; - } - return false; -} - -c11_sv TokenDeserializer__read_string(TokenDeserializer* self, char c) { - const char* start = self->curr; - while(*self->curr != c) - self->curr++; - c11_sv retval = {start, (int)(self->curr - start)}; - self->curr++; // skip the delimiter - return retval; -} - -c11_string* TokenDeserializer__read_string_from_hex(TokenDeserializer* self, char c) { - c11_sv sv = TokenDeserializer__read_string(self, c); - const char* s = sv.data; - c11_sbuf ss; - c11_sbuf__ctor(&ss); - for(int i = 0; i < sv.size; i += 2) { - char c = 0; - if(s[i] >= '0' && s[i] <= '9') - c += s[i] - '0'; - else if(s[i] >= 'a' && s[i] <= 'f') - c += s[i] - 'a' + 10; - else - assert(false); - c <<= 4; - if(s[i + 1] >= '0' && s[i + 1] <= '9') - c += s[i + 1] - '0'; - else if(s[i + 1] >= 'a' && s[i + 1] <= 'f') - c += s[i + 1] - 'a' + 10; - else - assert(false); - c11_sbuf__write_char(&ss, c); - } - return c11_sbuf__submit(&ss); -} - -int TokenDeserializer__read_count(TokenDeserializer* self) { - assert(*self->curr == '='); - self->curr++; - return TokenDeserializer__read_uint(self, '\n'); -} - -int64_t TokenDeserializer__read_uint(TokenDeserializer* self, char c) { - int64_t out = 0; - while(*self->curr != c) { - out = out * 10 + (*self->curr - '0'); - self->curr++; - } - self->curr++; // skip the delimiter - return out; -} - -double TokenDeserializer__read_float(TokenDeserializer* self, char c) { - c11_sv sv = TokenDeserializer__read_string(self, c); - // TODO: optimize this - c11_string* nullterm = c11_string__new2(sv.data, sv.size); - char* end; - double retval = strtod(nullterm->data, &end); - c11_string__delete(nullterm); - assert(*end == 0); - return retval; -}