Skip to content

Commit 64b555f

Browse files
vtjnashKristofferC
authored andcommitted
fix missing gc root on store to iparams (#49820)
Try to optimize the order of this code a bit more, given that these checks are somewhat infrequently to be needed. Fix #49762 (cherry picked from commit c245179)
1 parent d8291ed commit 64b555f

File tree

1 file changed

+59
-16
lines changed

1 file changed

+59
-16
lines changed

src/jltypes.c

Lines changed: 59 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1562,13 +1562,8 @@ static jl_value_t *inst_datatype_inner(jl_datatype_t *dt, jl_svec_t *p, jl_value
15621562
jl_typename_t *tn = dt->name;
15631563
int istuple = (tn == jl_tuple_typename);
15641564
int isnamedtuple = (tn == jl_namedtuple_typename);
1565-
if (tn != jl_type_typename) {
1566-
size_t i;
1567-
for (i = 0; i < ntp; i++)
1568-
iparams[i] = normalize_unionalls(iparams[i]);
1569-
}
15701565

1571-
// check type cache, if applicable
1566+
// check if type cache will be applicable
15721567
int cacheable = 1;
15731568
if (istuple) {
15741569
size_t i;
@@ -1582,26 +1577,32 @@ static jl_value_t *inst_datatype_inner(jl_datatype_t *dt, jl_svec_t *p, jl_value
15821577
if (jl_has_free_typevars(iparams[i]))
15831578
cacheable = 0;
15841579
}
1580+
// if applicable, check the cache first for a match
15851581
if (cacheable) {
1582+
jl_value_t *lkup = (jl_value_t*)lookup_type(tn, iparams, ntp);
1583+
if (lkup != NULL)
1584+
return lkup;
1585+
}
1586+
// if some normalization might be needed, do that now
1587+
// it is probably okay to mutate iparams, and we only store globally rooted objects here
1588+
if (check && cacheable) {
15861589
size_t i;
15871590
for (i = 0; i < ntp; i++) {
15881591
jl_value_t *pi = iparams[i];
15891592
if (pi == jl_bottom_type)
15901593
continue;
15911594
if (jl_is_datatype(pi))
15921595
continue;
1593-
if (jl_is_vararg(pi)) {
1594-
pi = jl_unwrap_vararg(pi);
1595-
if (jl_has_free_typevars(pi))
1596-
continue;
1597-
}
1598-
// normalize types equal to wrappers (prepare for wrapper_id)
1596+
if (jl_is_vararg(pi))
1597+
// This would require some special handling, but is not needed
1598+
// at the moment (and might be better handled in jl_wrap_vararg instead).
1599+
continue;
1600+
if (!cacheable && jl_has_free_typevars(pi))
1601+
continue;
1602+
// normalize types equal to wrappers (prepare for Typeofwrapper)
15991603
jl_value_t *tw = extract_wrapper(pi);
16001604
if (tw && tw != pi && (tn != jl_type_typename || jl_typeof(pi) == jl_typeof(tw)) &&
16011605
jl_types_equal(pi, tw)) {
1602-
// This would require some special handling, but is never used at
1603-
// the moment.
1604-
assert(!jl_is_vararg(iparams[i]));
16051606
iparams[i] = tw;
16061607
if (p) jl_gc_wb(p, tw);
16071608
}
@@ -1611,6 +1612,9 @@ static jl_value_t *inst_datatype_inner(jl_datatype_t *dt, jl_svec_t *p, jl_value
16111612
// normalize Type{Type{Union{}}} to Type{TypeofBottom}
16121613
iparams[0] = (jl_value_t*)jl_typeofbottom_type;
16131614
}
1615+
}
1616+
// then check the cache again, if applicable
1617+
if (cacheable) {
16141618
jl_value_t *lkup = (jl_value_t*)lookup_type(tn, iparams, ntp);
16151619
if (lkup != NULL)
16161620
return lkup;
@@ -1619,12 +1623,15 @@ static jl_value_t *inst_datatype_inner(jl_datatype_t *dt, jl_svec_t *p, jl_value
16191623
if (stack_lkup)
16201624
return stack_lkup;
16211625

1626+
// check parameters against bounds in type definition
1627+
// for whether this is even valid
16221628
if (!istuple) {
1623-
// check parameters against bounds in type definition
1629+
assert(ntp > 0);
16241630
check_datatype_parameters(tn, iparams, ntp);
16251631
}
16261632
else if (ntp == 0 && jl_emptytuple_type != NULL) {
16271633
// empty tuple type case
1634+
assert(istuple);
16281635
return (jl_value_t*)jl_emptytuple_type;
16291636
}
16301637

@@ -1670,6 +1677,42 @@ static jl_value_t *inst_datatype_inner(jl_datatype_t *dt, jl_svec_t *p, jl_value
16701677
jl_svecset(p, i, iparams[i]);
16711678
}
16721679

1680+
// try to simplify some type parameters
1681+
if (check && tn != jl_type_typename) {
1682+
size_t i;
1683+
int changed = 0;
1684+
if (istuple) // normalization might change Tuple's, but not other types's, cacheable status
1685+
cacheable = 1;
1686+
for (i = 0; i < ntp; i++) {
1687+
jl_value_t *newp = normalize_unionalls(iparams[i]);
1688+
if (newp != iparams[i]) {
1689+
iparams[i] = newp;
1690+
jl_svecset(p, i, newp);
1691+
changed = 1;
1692+
}
1693+
if (istuple && cacheable && !jl_is_concrete_type(newp))
1694+
cacheable = 0;
1695+
}
1696+
if (changed) {
1697+
// If this changed something, we need to check the cache again, in
1698+
// case we missed the match earlier before the normalizations
1699+
//
1700+
// e.g. return inst_datatype_inner(dt, p, iparams, ntp, stack, env, 0);
1701+
if (cacheable) {
1702+
jl_value_t *lkup = (jl_value_t*)lookup_type(tn, iparams, ntp);
1703+
if (lkup != NULL) {
1704+
JL_GC_POP();
1705+
return lkup;
1706+
}
1707+
}
1708+
jl_value_t *stack_lkup = lookup_type_stack(stack, dt, ntp, iparams);
1709+
if (stack_lkup) {
1710+
JL_GC_POP();
1711+
return stack_lkup;
1712+
}
1713+
}
1714+
}
1715+
16731716
// acquire the write lock now that we know we need a new object
16741717
// since we're going to immediately leak it globally via the instantiation stack
16751718
if (cacheable) {

0 commit comments

Comments
 (0)