From 00ca9b9af6a9eb0842ad565494d5de6f115cce47 Mon Sep 17 00:00:00 2001 From: GrieferAtWork Date: Tue, 28 Nov 2023 18:52:36 +0100 Subject: [PATCH] Fix some problems with sequence type assumpions Also fix `[x...]` sometimes re-returning `x` when it should actually create a new copy in the form of a list. --- include/deemon/compiler/optimize.h | 6 + src/deemon/compiler/asm/genasm.c | 13 +- src/deemon/compiler/asm/gencall.c | 313 ++++++-------------------- src/deemon/compiler/optimize/traits.c | 127 ++++++++++- src/deemon/execute/code-exec.c.inl | 4 +- util/test-errors.dee | 4 +- util/test/compiler-call-non-tuple.dee | 66 ++++++ util/test/compiler-sequence-copy.dee | 30 +++ 8 files changed, 316 insertions(+), 247 deletions(-) create mode 100644 util/test/compiler-call-non-tuple.dee create mode 100644 util/test/compiler-sequence-copy.dee diff --git a/include/deemon/compiler/optimize.h b/include/deemon/compiler/optimize.h index bccb601e1..07364811f 100644 --- a/include/deemon/compiler/optimize.h +++ b/include/deemon/compiler/optimize.h @@ -368,6 +368,12 @@ ast_predict_type_ex(struct ast *__restrict self, unsigned int flags); /* Same as `ast_predict_type()', but don't take type annotations into account */ #define ast_predict_type_noanno(self) ast_predict_type_ex(self, AST_PREDICT_TYPE_F_NOANNO) +/* Predict the reference count of a given AST at runtime (if predictable) + * If not predictable, return `0' (which is never a valid reference count) */ +INTDEF WUNUSED NONNULL((1)) Dee_refcnt_t DFCALL +ast_predict_object_refcnt(struct ast *__restrict self); + +#define ast_predict_object_shared(self) (ast_predict_object_refcnt(self) != 1) #define CONSTEXPR_ILLEGAL 0 /* Use of this object is not allowed. */ diff --git a/src/deemon/compiler/asm/genasm.c b/src/deemon/compiler/asm/genasm.c index bb06abb06..c99679ae0 100644 --- a/src/deemon/compiler/asm/genasm.c +++ b/src/deemon/compiler/asm/genasm.c @@ -242,7 +242,8 @@ struct seqops { STATIC_ASSERT((AST_FMULTIPLE_GENERIC & 3) <= 1); STATIC_ASSERT((AST_FMULTIPLE_GENERIC_KEYS & 3) == (AST_FMULTIPLE_DICT & 3)); -PRIVATE struct seqops seqops_info[4] = { +INTDEF struct seqops seqops_info[4]; +INTERN struct seqops seqops_info[4] = { /* [AST_FMULTIPLE_TUPLE & 3] = */ { &DeeTuple_Type, { ASM_PACK_TUPLE, ASM16_PACK_TUPLE }, ASM_CAST_TUPLE }, /* [AST_FMULTIPLE_LIST & 3] = */ { &DeeList_Type, { ASM_PACK_LIST, ASM16_PACK_LIST }, ASM_CAST_LIST }, /* [AST_FMULTIPLE_HASHSET & 3] = */ { &DeeHashSet_Type, { ASM_PACK_HASHSET, ASM16_PACK_HASHSET }, ASM_CAST_HASHSET }, @@ -499,8 +500,12 @@ INTERN WUNUSED NONNULL((1)) int /* The AST starts with an expand expression. * Because of that, we have to make sure that the entire * branch gets the correct type by casting now. */ - if (ast_predict_type(elem->a_expand) != - seqops_info[self->a_flag & 3].so_typ) { + DeeTypeObject *expected_type = seqops_info[self->a_flag & 3].so_typ; + if ((expected_type == &DeeTuple_Type /* Immutable sequence type */ || + !ast_predict_object_shared(elem->a_expand)) && + (ast_predict_type(elem->a_expand) == expected_type)) { + /* Sequence type is immutable, or not shared */ + } else { if (asm_putddi(self)) goto err; if unlikely(cast_sequence(self->a_flag)) @@ -511,7 +516,7 @@ INTERN WUNUSED NONNULL((1)) int active_size = 0; } else { if (need_all) { - if unlikely(active_size == UINT16_MAX) { + if unlikely(active_size >= UINT16_MAX) { PERRAST(self, W_ASM_SEQUENCE_TOO_LONG); goto err; } diff --git a/src/deemon/compiler/asm/gencall.c b/src/deemon/compiler/asm/gencall.c index 4d477a8ac..699b2f59f 100644 --- a/src/deemon/compiler/asm/gencall.c +++ b/src/deemon/compiler/asm/gencall.c @@ -120,6 +120,25 @@ ast_assemble_function_refargs(struct ast *__restrict function_ast, return NULL; } +PRIVATE WUNUSED NONNULL((1)) int +(DCALL ast_genasm_one_as_tuple)(struct ast *__restrict self) { + if (ast_genasm_one(self, ASM_G_FPUSHRES)) + goto err; + /* DONT use ast_predict_type here: that one can only be used when + * the assumption not being met results in weak undefined behavior. However, + * if the args-operand in a call really isn't a tuple, the results are hard + * undefined behavior (and probably an interpreter crash) */ + if (ast_predict_type_noanno(self) != &DeeTuple_Type) { + if (asm_putddi(self)) + goto err; + if (asm_gcast_tuple()) + goto err; + } + return 0; +err: + return -1; +} + PRIVATE int DCALL asm_gcall_func(struct ast *__restrict func, struct ast *__restrict args, @@ -268,18 +287,8 @@ asm_gcall_func(struct ast *__restrict func, /* Fallback: Push the arguments as a tuple, then concat that with refargs. */ generic_call: - if (ast_genasm_one(args, ASM_G_FPUSHRES)) + if (ast_genasm_one_as_tuple(args)) goto err_refargv; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err_refargv; - if (asm_gcast_tuple()) - goto err_refargv; - } if (refargv) { if (refargc) { /* Push all of the reference argument symbols. */ @@ -883,42 +892,23 @@ asm_gcall_expr(struct ast *__restrict func, } else if (attr->ca_flag & CLASS_ATTRIBUTE_FMETHOD) { if (asm_gpush_symbol(class_sym, func)) goto err; /* func, class_sym */ - if (ast_genasm_one(args, ASM_G_FPUSHRES)) - goto err; /* func, class_sym, args */ - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } + if (ast_genasm_one_as_tuple(args)) + goto err; /* func, class_sym, Tuple(args) */ if (asm_putddi(ddi_ast)) goto err; if (asm_gthiscall_tuple()) goto err; /* result */ goto pop_unused; } - if (ast_genasm_one(args, ASM_G_FPUSHRES)) - goto err; /* func, args */ - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } + if (ast_genasm_one_as_tuple(args)) + goto err; /* func, Tuple(args) */ if (asm_putddi(ddi_ast)) goto err; if (asm_gcall_tuple()) goto err; /* result */ goto pop_unused; } + /* The attribute must be accessed as virtual. */ if unlikely(asm_check_thiscall(funsym, func)) goto err; @@ -929,8 +919,8 @@ asm_gcall_expr(struct ast *__restrict func, goto err; if (this_sym->s_type == SYMBOL_TYPE_THIS && !SYMBOL_MUST_REFERENCE_THIS(this_sym)) { - if (ast_genasm(args, ASM_G_FPUSHRES)) - goto err; /* args */ + if (ast_genasm_one_as_tuple(args)) + goto err; /* Tuple(args) */ if (asm_putddi(ddi_ast)) goto err; if (asm_gcallattr_this_const_tuple((uint16_t)symid)) @@ -941,14 +931,15 @@ asm_gcall_expr(struct ast *__restrict func, goto err; if (asm_gpush_symbol(this_sym, func)) goto err; /* this */ - if (ast_genasm_one(args, ASM_G_FPUSHRES)) - goto err; /* this, args */ + if (ast_genasm_one_as_tuple(args)) + goto err; /* this, Tuple(args) */ if (asm_putddi(ddi_ast)) goto err; if (asm_gcallattr_const_tuple((uint16_t)symid)) goto err; /* result */ goto pop_unused; } + /* Regular, old member variable. */ if (attr->ca_flag & CLASS_ATTRIBUTE_FCLASSMEM) { if (ASM_SYMBOL_MAY_REFERENCE(class_sym)) { @@ -1009,16 +1000,16 @@ asm_gcall_expr(struct ast *__restrict func, /* Access to an instance member function (must produce a bound method). */ if (asm_gpush_symbol(this_sym, func)) goto err; /* func, this */ - if (ast_genasm_one(args, ASM_G_FPUSHRES)) - goto err; /* func, this, args */ + if (ast_genasm_one_as_tuple(args)) + goto err; /* func, this, Tuple(args) */ if (asm_putddi(ddi_ast)) goto err; if (asm_gthiscall_tuple()) goto err; /* result */ goto pop_unused; } - if (ast_genasm_one(args, ASM_G_FPUSHRES)) - goto err; /* func, args */ + if (ast_genasm_one_as_tuple(args)) + goto err; /* func, Tuple(args) */ if (asm_putddi(ddi_ast)) goto err; if (asm_gcall_tuple()) @@ -1042,18 +1033,8 @@ asm_gcall_expr(struct ast *__restrict func, goto err; if (asm_gpush_constexpr(DeeObjMethod_SELF(func->a_constexpr))) goto err; - if (ast_genasm_one(args, ASM_G_FPUSHRES)) - goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } + if (ast_genasm_one_as_tuple(args)) + goto err; /* self, Tuple(args) */ if (asm_putddi(ddi_ast)) goto err; if (asm_gcallattr_const_tuple((uint16_t)attrid)) @@ -1108,18 +1089,8 @@ asm_gcall_expr(struct ast *__restrict func, case SYMBOL_TYPE_THIS: if (SYMBOL_MUST_REFERENCE_THIS(sym)) break; - if (ast_genasm(args, ASM_G_FPUSHRES)) + if (ast_genasm_one_as_tuple(args)) goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (asm_putddi(ddi_ast)) goto err; if (asm_gcallattr_this_const_tuple((uint16_t)attrid)) @@ -1131,18 +1102,8 @@ asm_gcall_expr(struct ast *__restrict func, } if (ast_genasm(function_self, ASM_G_FPUSHRES)) goto err; - if (ast_genasm_one(args, ASM_G_FPUSHRES)) + if (ast_genasm_one_as_tuple(args)) goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (asm_putddi(ddi_ast)) goto err; if (asm_gcallattr_const_tuple((uint16_t)attrid)) @@ -1153,18 +1114,8 @@ asm_gcall_expr(struct ast *__restrict func, goto err; if (ast_genasm_one(function_attr, ASM_G_FPUSHRES)) goto err; - if (ast_genasm_one(args, ASM_G_FPUSHRES)) + if (ast_genasm_one_as_tuple(args)) goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (asm_putddi(ddi_ast)) goto err; if (asm_gcallattr_tuple()) @@ -1303,6 +1254,8 @@ asm_gcall_expr(struct ast *__restrict func, } #endif if (cxpr == (DeeObject *)&DeeBool_Type) { + if (ast_predict_type(arg0) == &DeeBool_Type) + goto pop_unused; if (ast_genasm(arg0, ASM_G_FPUSHRES)) goto err; if (asm_putddi(ddi_ast)) @@ -1312,6 +1265,8 @@ asm_gcall_expr(struct ast *__restrict func, goto pop_unused; } if (cxpr == (DeeObject *)&DeeString_Type) { + if (ast_predict_type(arg0) == &DeeString_Type) + goto pop_unused; if (ast_genasm(arg0, ASM_G_FPUSHRES)) goto err; if (asm_putddi(ddi_ast)) @@ -1321,6 +1276,8 @@ asm_gcall_expr(struct ast *__restrict func, goto pop_unused; } if (cxpr == (DeeObject *)&DeeTuple_Type) { + if (ast_predict_type(arg0) == &DeeTuple_Type) + goto pop_unused; if (ast_genasm(arg0, ASM_G_FPUSHRES)) goto err; if (asm_putddi(ddi_ast)) @@ -1329,18 +1286,29 @@ asm_gcall_expr(struct ast *__restrict func, goto err; goto pop_unused; } -#if 0 /* The real constructor has a special case for integer pre-sizing... */ if (cxpr == (DeeObject *)&DeeList_Type) { - if (ast_genasm(arg0, ASM_G_FPUSHRES)) - goto err; - if (asm_putddi(ddi_ast)) - goto err; - if (asm_gcast_list()) - goto err; - goto pop_unused; + DeeTypeObject *predict = ast_predict_type(arg0); + if (predict == &DeeList_Type) + goto pop_unused; + /* The constructor of `List()' has special functionality when + * given an integer, in which case the list is created with + * the given number of pre-allocates space. + * + * As such, we can only use the cast operator if the argument + * type can be predicated to not be numerical. */ + if (predict != NULL && !DeeType_Implements(predict, &DeeNumeric_Type)) { + if (ast_genasm(arg0, ASM_G_FPUSHRES)) + goto err; + if (asm_putddi(ddi_ast)) + goto err; + if (asm_gcast_list()) + goto err; + goto pop_unused; + } } -#endif if (cxpr == (DeeObject *)&DeeDict_Type) { + if (ast_predict_type(arg0) == &DeeDict_Type) + goto pop_unused; if (ast_genasm(arg0, ASM_G_FPUSHRES)) goto err; if (asm_putddi(ddi_ast)) @@ -1350,6 +1318,8 @@ asm_gcall_expr(struct ast *__restrict func, goto pop_unused; } if (cxpr == (DeeObject *)&DeeHashSet_Type) { + if (ast_predict_type(arg0) == &DeeHashSet_Type) + goto pop_unused; if (ast_genasm(arg0, ASM_G_FPUSHRES)) goto err; if (asm_putddi(ddi_ast)) @@ -1846,18 +1816,8 @@ asm_gcall_expr(struct ast *__restrict func, generic_call: if (ast_genasm(func, ASM_G_FPUSHRES)) goto err; - if (ast_genasm_one(args, ASM_G_FPUSHRES)) + if (ast_genasm_one_as_tuple(args)) goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (asm_putddi(ddi_ast)) goto err; if (asm_gcall_tuple()) @@ -1903,16 +1863,6 @@ asm_gcall_kw_expr(struct ast *__restrict func, if unlikely(asm_gargv(args->a_multiple.m_astc, args->a_multiple.m_astv)) goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (asm_putddi(ddi_ast)) goto err; if (asm_gcallattr_const_kw((uint16_t)attrid, @@ -1925,18 +1875,6 @@ asm_gcall_kw_expr(struct ast *__restrict func, args->a_constexpr == Dee_EmptyTuple) { if (asm_gpush_constexpr(DeeObjMethod_SELF(func->a_constexpr))) goto err; - if (ast_genasm_one(args, ASM_G_FPUSHRES)) - goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (asm_putddi(ddi_ast)) goto err; if (asm_gcallattr_const_kw((uint16_t)attrid, 0, (uint16_t)kwd_cid)) @@ -1945,18 +1883,8 @@ asm_gcall_kw_expr(struct ast *__restrict func, } if (asm_gpush_constexpr(DeeObjMethod_SELF(func->a_constexpr))) goto err; - if (ast_genasm_one(args, ASM_G_FPUSHRES)) + if (ast_genasm_one_as_tuple(args)) goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (asm_putddi(ddi_ast)) goto err; if (asm_gcallattr_const_tuple_kw((uint16_t)attrid, (uint16_t)kwd_cid)) @@ -1974,16 +1902,6 @@ asm_gcall_kw_expr(struct ast *__restrict func, if unlikely(asm_gargv(args->a_multiple.m_astc, args->a_multiple.m_astv)) goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (asm_putddi(ddi_ast)) goto err; if (ast_genasm_one(kwds, ASM_G_FPUSHRES)) @@ -2010,18 +1928,8 @@ asm_gcall_kw_expr(struct ast *__restrict func, goto err; if (asm_gpush_const((uint16_t)attrid)) goto err; - if (ast_genasm_one(args, ASM_G_FPUSHRES)) + if (ast_genasm_one_as_tuple(args)) goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (ast_genasm_one(kwds, ASM_G_FPUSHRES)) goto err; if (asm_putddi(ddi_ast)) @@ -2057,16 +1965,6 @@ asm_gcall_kw_expr(struct ast *__restrict func, if unlikely(asm_gargv(args->a_multiple.m_astc, args->a_multiple.m_astv)) goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (asm_putddi(ddi_ast)) goto err; if (asm_gcallattr_const_kw((uint16_t)att_cid, @@ -2079,18 +1977,6 @@ asm_gcall_kw_expr(struct ast *__restrict func, args->a_constexpr == Dee_EmptyTuple) { if (ast_genasm(base, ASM_G_FPUSHRES)) goto err; - if (ast_genasm_one(args, ASM_G_FPUSHRES)) - goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (asm_putddi(ddi_ast)) goto err; if (asm_gcallattr_const_kw((uint16_t)att_cid, 0, (uint16_t)kwd_cid)) @@ -2099,18 +1985,8 @@ asm_gcall_kw_expr(struct ast *__restrict func, } if (ast_genasm(base, ASM_G_FPUSHRES)) goto err; - if (ast_genasm_one(args, ASM_G_FPUSHRES)) + if (ast_genasm_one_as_tuple(args)) goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (asm_putddi(ddi_ast)) goto err; if (asm_gcallattr_const_tuple_kw((uint16_t)att_cid, (uint16_t)kwd_cid)) @@ -2128,16 +2004,6 @@ asm_gcall_kw_expr(struct ast *__restrict func, if unlikely(asm_gargv(args->a_multiple.m_astc, args->a_multiple.m_astv)) goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (asm_putddi(ddi_ast)) goto err; if (ast_genasm_one(kwds, ASM_G_FPUSHRES)) @@ -2164,18 +2030,8 @@ asm_gcall_kw_expr(struct ast *__restrict func, goto err; if (ast_genasm_one(name, ASM_G_FPUSHRES)) goto err; - if (ast_genasm_one(args, ASM_G_FPUSHRES)) + if (ast_genasm_one_as_tuple(args)) goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (ast_genasm_one(kwds, ASM_G_FPUSHRES)) goto err; if (asm_putddi(ddi_ast)) @@ -2185,6 +2041,7 @@ asm_gcall_kw_expr(struct ast *__restrict func, goto pop_unused; } } + /* The object being called isn't an attribute. */ if (ast_genasm(func, ASM_G_FPUSHRES)) goto err; @@ -2212,18 +2069,8 @@ asm_gcall_kw_expr(struct ast *__restrict func, if (asm_gcall_kw(0, (uint16_t)kwd_cid)) goto err; } else { - if (ast_genasm_one(args, ASM_G_FPUSHRES)) + if (ast_genasm_one_as_tuple(args)) goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (asm_putddi(ddi_ast)) goto err; if (asm_gcall_tuple_kw((uint16_t)kwd_cid)) @@ -2231,18 +2078,8 @@ asm_gcall_kw_expr(struct ast *__restrict func, } } else { /* Fallback: use the stack to pass all the arguments. */ - if (ast_genasm_one(args, ASM_G_FPUSHRES)) + if (ast_genasm_one_as_tuple(args)) goto err; - /* DONT use ast_predict_type here: that one can only be used when - * the assumption not being met results in weak undefined behavior. However, - * if the args-operand in a call really isn't a tuple, the results are hard - * undefined behavior (and probably an interpreter crash) */ - if (ast_predict_type_noanno(args) != &DeeTuple_Type) { - if (asm_putddi(args)) - goto err; - if (asm_gcast_tuple()) - goto err; - } if (ast_genasm_one(kwds, ASM_G_FPUSHRES)) goto err; if (asm_putddi(ddi_ast)) diff --git a/src/deemon/compiler/optimize/traits.c b/src/deemon/compiler/optimize/traits.c index d60288d5a..fc7d8d623 100644 --- a/src/deemon/compiler/optimize/traits.c +++ b/src/deemon/compiler/optimize/traits.c @@ -185,6 +185,17 @@ eval_decl_ast_type(struct decl_ast *__restrict self) { #endif /* CONFIG_LANGUAGE_DECLARATION_DOCUMENTATION */ +struct seqops { + /* Opcodes are encoded in big-endian. + * When the mask 0xff00 is ZERO, the opcode is a single byte long. */ + DeeTypeObject *so_typ; /* The deemon type for this sequence. */ + uint16_t so_pck[2]; /* Pack - [0]: 8-bit; [1]: 16-bit; */ + uint16_t so_cas; /* Cast */ +}; + +INTDEF struct seqops seqops_info[4]; + + /* Predict the typing of a given AST, or return NULL when unpredictable. * NOTE: When the `OPTIMIZE_FNOPREDICT' flag is set, this function always returns `NULL'. * @param: flags: Set of `AST_PREDICT_TYPE_F_*' */ @@ -206,6 +217,34 @@ ast_predict_type_ex(struct ast *__restrict self, unsigned int flags) { return ast_predict_type_ex(self->a_multiple.m_astv[self->a_multiple.m_astc - 1], flags); } +#ifdef CONFIG_LANGUAGE_DECLARATION_DOCUMENTATION + if (flags & AST_PREDICT_TYPE_F_NOANNO) { + /* Special case: the normal code generator is allowed to optimize + * based on type annotation, such that: + * >> local a: Tuple; + * >> local b: List; + * >> local c = (a...); // Cast-to-tuple can be optimized away + * >> local d = (b...); // Cast-to-tuple must be retained + * + * So if the caller is now asking us about the type of `c' when + * not considering type annotations, we mustn't respect the type + * annotation of `a' */ + if (self->a_multiple.m_astc >= 1) { + struct ast *e0 = self->a_multiple.m_astv[0]; + if (e0->a_type == AST_EXPAND) { + DeeTypeObject *expected_type = seqops_info[self->a_flag & 3].so_typ; + if ((expected_type == &DeeTuple_Type /* Immutable sequence type */ || + !ast_predict_object_shared(e0->a_expand)) && + (ast_predict_type(e0->a_expand) == expected_type)) { + /* In this case, the code generator will have no produced a cast operator. + * As such, it is our job to return the type of the sequence *without* + * taking type annotations into account. */ + return ast_predict_type_noanno(e0->a_expand); + } + } + } + } +#endif /* CONFIG_LANGUAGE_DECLARATION_DOCUMENTATION */ if (self->a_flag == AST_FMULTIPLE_TUPLE) return &DeeTuple_Type; if (self->a_flag == AST_FMULTIPLE_LIST) @@ -287,7 +326,6 @@ ast_predict_type_ex(struct ast *__restrict self, unsigned int flags) { if (!(flags & AST_PREDICT_TYPE_F_NOANNO)) return eval_decl_ast_type(&sym->s_decltype); #endif /* CONFIG_LANGUAGE_DECLARATION_DOCUMENTATION */ - } break; case AST_BOOL: @@ -504,6 +542,93 @@ ast_predict_type_ex(struct ast *__restrict self, unsigned int flags) { } +/* Predict the reference count of a given AST at runtime (if predictable) + * If not predictable, return `0' (which is never a valid reference count) */ +INTERN WUNUSED NONNULL((1)) Dee_refcnt_t DFCALL +ast_predict_object_refcnt(struct ast *__restrict self) { + switch (self->a_type) { + + case AST_MULTIPLE: + if (self->a_flag == AST_FMULTIPLE_KEEPLAST) { + if (!self->a_multiple.m_astc) + goto nope; + return ast_predict_object_refcnt(self->a_multiple.m_astv[self->a_multiple.m_astc - 1]); + } + if (self->a_multiple.m_astc == 0 && (self->a_flag == AST_FMULTIPLE_TUPLE || + self->a_flag == AST_FMULTIPLE_GENERIC || + self->a_flag == AST_FMULTIPLE_GENERIC_KEYS)) + goto nope; /* These will generate to access global singletons (with unknown reference counts) */ + if (self->a_flag != AST_FMULTIPLE_TUPLE) + return 1; /* Anything but tuples must be created on the spot. */ + if (self->a_multiple.m_astc == 1) { + /* Tuples with at least 2 elements must be created on the spot. */ + } else { + /* Special case for 1-element tuples. + * Here, `(foo...)' can get optimized when `foo' is already known to have tuple + * typing (when not considering type annotations), so if that optimization is + * done, then the resulting expression won't represent a new tuple, and we need + * to return the reference count of `foo'. */ + struct ast *e0 = self->a_multiple.m_astv[0]; + if (e0->a_type == AST_EXPAND) { + if (ast_predict_type(e0->a_expand) == &DeeTuple_Type) + return ast_predict_object_refcnt(e0->a_expand); + } + } + return 1; + + case AST_TRY: { + size_t i; + Dee_refcnt_t guard; + guard = ast_predict_object_refcnt(self->a_try.t_guard); + if (guard == 0) + goto nope; + for (i = 0; i < self->a_try.t_catchc; ++i) { + if (ast_predict_object_refcnt(self->a_try.t_catchv[i].ce_code) != guard) + goto nope; + } + return guard; + } break; + + case AST_CONDITIONAL: { + Dee_refcnt_t result; + if (self->a_flag & AST_FCOND_BOOL) + goto nope; /* Evaluates to a boolean singleton (which has unknown refcnt) */ + if (!self->a_conditional.c_tt || !self->a_conditional.c_ff) + goto nope; /* Possibly evaluates to `none' (which has unknown refcnt) */ + result = ast_predict_object_refcnt(self->a_conditional.c_tt); + if (result == 0) + goto nope; + if (ast_predict_object_refcnt(self->a_conditional.c_ff) != result) + goto nope; + return result; + } break; + + case AST_ACTION: { + switch (self->a_flag & AST_FACTION_KINDMASK) { +#define ACTION(x) case x &AST_FACTION_KINDMASK: + + ACTION(AST_FACTION_CELL0) + ACTION(AST_FACTION_CELL1) + ACTION(AST_FACTION_SUPEROF) + ACTION(AST_FACTION_AS) + return 1; + + default: break; +#undef ACTION + } + } break; + + case AST_CLASS: + return 1; + + default: + break; + } +nope: + return 0; +} + + INTERN WUNUSED NONNULL((1)) bool DCALL ast_has_sideeffects(struct ast *__restrict self) { diff --git a/src/deemon/execute/code-exec.c.inl b/src/deemon/execute/code-exec.c.inl index f9b5f3af7..44fd0c15a 100644 --- a/src/deemon/execute/code-exec.c.inl +++ b/src/deemon/execute/code-exec.c.inl @@ -992,8 +992,8 @@ inc_execsz_start: goto increase_stacksize; \ } #else /* EXEC_SAFE */ -#define ASSERT_TUPLE(ob) ASSERT(DeeTuple_CheckExact(ob)) -#define ASSERT_STRING(ob) ASSERT(DeeString_CheckExact(ob)) +#define ASSERT_TUPLE(ob) ASSERT_OBJECT_TYPE_EXACT(ob, &DeeTuple_Type) +#define ASSERT_STRING(ob) ASSERT_OBJECT_TYPE_EXACT(ob, &DeeString_Type) #define ASSERT_THISCALL() ASSERT(code->co_flags & CODE_FTHISCALL) #define CONST_LOCKREAD() (void)0 #define CONST_LOCKENDREAD() (void)0 diff --git a/util/test-errors.dee b/util/test-errors.dee index 7bc8c7043..eac262adc 100644 --- a/util/test-errors.dee +++ b/util/test-errors.dee @@ -324,12 +324,12 @@ function main() { //invokeTypeMembers(deemon.string); //invokeTypeMembers(deemon.Bytes); //invokeTypeMembers(deemon.int); - invokeTypeMembers(deemon.List); + //invokeTypeMembers(deemon.List); //invokeTypeMembers(deemon.Tuple); //invokeTypeMembers(deemon.Sequence); //invokeTypeMembers(deemon.Object); //invokeTypeMembers(deemon.bool); - //invokeTypeMembers(type(none)); + invokeTypeMembers(type(none)); //invokeTypeMembers(deemon.float); //invokeTypeMembers(deemon.Cell); //invokeTypeMembers(deemon.WeakRef); diff --git a/util/test/compiler-call-non-tuple.dee b/util/test/compiler-call-non-tuple.dee new file mode 100644 index 000000000..92a7a699c --- /dev/null +++ b/util/test/compiler-call-non-tuple.dee @@ -0,0 +1,66 @@ +#!/usr/bin/deemon +/* Copyright (c) 2018-2023 Griefer@Work * + * * + * This software is provided 'as-is', without any express or implied * + * warranty. In no event will the authors be held liable for any damages * + * arising from the use of this software. * + * * + * Permission is granted to anyone to use this software for any purpose, * + * including commercial applications, and to alter it and redistribute it * + * freely, subject to the following restrictions: * + * * + * 1. The origin of this software must not be misrepresented; you must not * + * claim that you wrote the original software. If you use this software * + * in a product, an acknowledgement (see the following) in the product * + * documentation is required: * + * Portions Copyright (c) 2018-2023 Griefer@Work * + * 2. Altered source versions must be plainly marked as such, and must not be * + * misrepresented as being the original software. * + * 3. This notice may not be removed or altered from any source distribution. * + */ +import * from deemon; +import * from errors; +import operators; + +function foo(args...) -> args.reversed() + ...; + +@@Important: even though @args is marked as a @Tuple, the compiler +@@ still has to generate a cast to @Tuple when using that +@@ argument in the contained call. This is because while +@@ type annotation can be used to optimize: +@@ >> local a: Tuple; +@@ >> local b = (a...); // No extra cast needed here +@@ It cannot be used to optimize places where the runtime +@@ *really* needs there to be an object of a certain type, +@@ as is the case when doing varargs calls. +function myCall(self, args: Tuple) -> self(args...); + + +local o = 42; +__asm__("" : "+X" (o)); + +/* NOTE: The NotImplemented errors here are due to deemon trying to + * cast an int to a sequence, which doesn't work because int + * doesn't implement `operator iter()` */ +local error = try foo(o...) catch (e...) e; +assert error is NotImplemented; + +local error = try foo.operator ()(o) catch (e...) e; +assert error is NotImplemented; + +local error = try operator ()(foo, o) catch (e...) e; +assert error is NotImplemented; + +local error = try operators.call(foo, o) catch (e...) e; +assert error is NotImplemented; + +local error = try myCall(foo, o) catch (e...) e; +assert error is NotImplemented; + +/* Also test all works when given something other than a tuple in varargs. */ +assert foo("abc") == "abc"; +assert foo("abc"...) == "cba"; +assert foo.operator ()("abc") == "cba"; +assert operator ()(foo, "abc") == "cba"; +assert operators.call(foo, "abc") == "cba"; +assert myCall(foo, "abc") == "cba"; diff --git a/util/test/compiler-sequence-copy.dee b/util/test/compiler-sequence-copy.dee new file mode 100644 index 000000000..0fff44f32 --- /dev/null +++ b/util/test/compiler-sequence-copy.dee @@ -0,0 +1,30 @@ +#!/usr/bin/deemon +/* Copyright (c) 2018-2023 Griefer@Work * + * * + * This software is provided 'as-is', without any express or implied * + * warranty. In no event will the authors be held liable for any damages * + * arising from the use of this software. * + * * + * Permission is granted to anyone to use this software for any purpose, * + * including commercial applications, and to alter it and redistribute it * + * freely, subject to the following restrictions: * + * * + * 1. The origin of this software must not be misrepresented; you must not * + * claim that you wrote the original software. If you use this software * + * in a product, an acknowledgement (see the following) in the product * + * documentation is required: * + * Portions Copyright (c) 2018-2023 Griefer@Work * + * 2. Altered source versions must be plainly marked as such, and must not be * + * misrepresented as being the original software. * + * 3. This notice may not be removed or altered from any source distribution. * + */ +import * from deemon; + +local a: List = [10, 20]; +local b: List = [a...]; +assert a !== b; + +local c: Tuple = (10, 20); +local d: Tuple = (c...); +assert c === d, "It would technically be OK for this to fail, " + "but GATW deemon always does this optimization";