@@ -19,11 +19,8 @@ import (
19
19
"cmd/compile/internal/types"
20
20
"cmd/internal/obj"
21
21
"cmd/internal/objabi"
22
- "cmd/internal/src"
23
22
"crypto/md5"
24
- "crypto/sha1"
25
23
"fmt"
26
- "os"
27
24
"strings"
28
25
)
29
26
@@ -632,7 +629,7 @@ func (lv *Liveness) pointerMap(liveout bvec, vars []*Node, args, locals bvec) {
632
629
633
630
// markUnsafePoints finds unsafe points and computes lv.unsafePoints.
634
631
func (lv * Liveness ) markUnsafePoints () {
635
- if compiling_runtime || lv .f .NoSplit || objabi . Clobberdead_enabled != 0 {
632
+ if compiling_runtime || lv .f .NoSplit {
636
633
// No complex analysis necessary. Do this on the fly
637
634
// in issafepoint.
638
635
return
@@ -791,7 +788,7 @@ func (lv *Liveness) issafepoint(v *ssa.Value) bool {
791
788
// go:nosplit functions are similar. Since safe points used to
792
789
// be coupled with stack checks, go:nosplit often actually
793
790
// means "no safe points in this function".
794
- if compiling_runtime || lv .f .NoSplit || objabi . Clobberdead_enabled != 0 {
791
+ if compiling_runtime || lv .f .NoSplit {
795
792
return v .Op .IsCall ()
796
793
}
797
794
switch v .Op {
@@ -1051,161 +1048,6 @@ func (lv *Liveness) epilogue() {
1051
1048
}
1052
1049
}
1053
1050
1054
- func (lv * Liveness ) clobber () {
1055
- // The clobberdead experiment inserts code to clobber all the dead variables (locals and args)
1056
- // before and after every safepoint. This experiment is useful for debugging the generation
1057
- // of live pointer bitmaps.
1058
- if objabi .Clobberdead_enabled == 0 {
1059
- return
1060
- }
1061
- var varSize int64
1062
- for _ , n := range lv .vars {
1063
- varSize += n .Type .Size ()
1064
- }
1065
- if len (lv .stackMaps ) > 1000 || varSize > 10000 {
1066
- // Be careful to avoid doing too much work.
1067
- // Bail if >1000 safepoints or >10000 bytes of variables.
1068
- // Otherwise, giant functions make this experiment generate too much code.
1069
- return
1070
- }
1071
- if h := os .Getenv ("GOCLOBBERDEADHASH" ); h != "" {
1072
- // Clobber only functions where the hash of the function name matches a pattern.
1073
- // Useful for binary searching for a miscompiled function.
1074
- hstr := ""
1075
- for _ , b := range sha1 .Sum ([]byte (lv .fn .funcname ())) {
1076
- hstr += fmt .Sprintf ("%08b" , b )
1077
- }
1078
- if ! strings .HasSuffix (hstr , h ) {
1079
- return
1080
- }
1081
- fmt .Printf ("\t \t \t CLOBBERDEAD %s\n " , lv .fn .funcname ())
1082
- }
1083
- if lv .f .Name == "forkAndExecInChild" || lv .f .Name == "wbBufFlush" {
1084
- // forkAndExecInChild calls vfork (on linux/amd64, anyway).
1085
- // The code we add here clobbers parts of the stack in the child.
1086
- // When the parent resumes, it is using the same stack frame. But the
1087
- // child has clobbered stack variables that the parent needs. Boom!
1088
- // In particular, the sys argument gets clobbered.
1089
- // Note to self: GOCLOBBERDEADHASH=011100101110
1090
- //
1091
- // runtime.wbBufFlush must not modify its arguments. See the comments
1092
- // in runtime/mwbbuf.go:wbBufFlush.
1093
- return
1094
- }
1095
-
1096
- var oldSched []* ssa.Value
1097
- for _ , b := range lv .f .Blocks {
1098
- // Copy block's values to a temporary.
1099
- oldSched = append (oldSched [:0 ], b .Values ... )
1100
- b .Values = b .Values [:0 ]
1101
-
1102
- // Clobber all dead variables at entry.
1103
- if b == lv .f .Entry {
1104
- for len (oldSched ) > 0 && len (oldSched [0 ].Args ) == 0 {
1105
- // Skip argless ops. We need to skip at least
1106
- // the lowered ClosurePtr op, because it
1107
- // really wants to be first. This will also
1108
- // skip ops like InitMem and SP, which are ok.
1109
- b .Values = append (b .Values , oldSched [0 ])
1110
- oldSched = oldSched [1 :]
1111
- }
1112
- clobber (lv , b , lv .stackMaps [0 ])
1113
- }
1114
-
1115
- // Copy values into schedule, adding clobbering around safepoints.
1116
- for _ , v := range oldSched {
1117
- if ! lv .issafepoint (v ) {
1118
- b .Values = append (b .Values , v )
1119
- continue
1120
- }
1121
- before := true
1122
- if v .Op .IsCall () && v .Aux != nil && v .Aux .(* obj.LSym ) == typedmemmove {
1123
- // Can't put clobber code before the call to typedmemmove.
1124
- // The variable to-be-copied is marked as dead
1125
- // at the callsite. That is ok, though, as typedmemmove
1126
- // is marked as nosplit, and the first thing it does
1127
- // is to call memmove (also nosplit), after which
1128
- // the source value is dead.
1129
- // See issue 16026.
1130
- before = false
1131
- }
1132
- if before {
1133
- clobber (lv , b , lv .stackMaps [lv .livenessMap .Get (v ).stackMapIndex ])
1134
- }
1135
- b .Values = append (b .Values , v )
1136
- clobber (lv , b , lv .stackMaps [lv .livenessMap .Get (v ).stackMapIndex ])
1137
- }
1138
- }
1139
- }
1140
-
1141
- // clobber generates code to clobber all dead variables (those not marked in live).
1142
- // Clobbering instructions are added to the end of b.Values.
1143
- func clobber (lv * Liveness , b * ssa.Block , live bvec ) {
1144
- for i , n := range lv .vars {
1145
- if ! live .Get (int32 (i )) {
1146
- clobberVar (b , n )
1147
- }
1148
- }
1149
- }
1150
-
1151
- // clobberVar generates code to trash the pointers in v.
1152
- // Clobbering instructions are added to the end of b.Values.
1153
- func clobberVar (b * ssa.Block , v * Node ) {
1154
- clobberWalk (b , v , 0 , v .Type )
1155
- }
1156
-
1157
- // b = block to which we append instructions
1158
- // v = variable
1159
- // offset = offset of (sub-portion of) variable to clobber (in bytes)
1160
- // t = type of sub-portion of v.
1161
- func clobberWalk (b * ssa.Block , v * Node , offset int64 , t * types.Type ) {
1162
- if ! types .Haspointers (t ) {
1163
- return
1164
- }
1165
- switch t .Etype {
1166
- case TPTR ,
1167
- TUNSAFEPTR ,
1168
- TFUNC ,
1169
- TCHAN ,
1170
- TMAP :
1171
- clobberPtr (b , v , offset )
1172
-
1173
- case TSTRING :
1174
- // struct { byte *str; int len; }
1175
- clobberPtr (b , v , offset )
1176
-
1177
- case TINTER :
1178
- // struct { Itab *tab; void *data; }
1179
- // or, when isnilinter(t)==true:
1180
- // struct { Type *type; void *data; }
1181
- // Note: the first word isn't a pointer. See comment in plive.go:onebitwalktype1.
1182
- clobberPtr (b , v , offset + int64 (Widthptr ))
1183
-
1184
- case TSLICE :
1185
- // struct { byte *array; int len; int cap; }
1186
- clobberPtr (b , v , offset )
1187
-
1188
- case TARRAY :
1189
- for i := int64 (0 ); i < t .NumElem (); i ++ {
1190
- clobberWalk (b , v , offset + i * t .Elem ().Size (), t .Elem ())
1191
- }
1192
-
1193
- case TSTRUCT :
1194
- for _ , t1 := range t .Fields ().Slice () {
1195
- clobberWalk (b , v , offset + t1 .Offset , t1 .Type )
1196
- }
1197
-
1198
- default :
1199
- Fatalf ("clobberWalk: unexpected type, %v" , t )
1200
- }
1201
- }
1202
-
1203
- // clobberPtr generates a clobber of the pointer at offset offset in v.
1204
- // The clobber instruction is added at the end of b.
1205
- func clobberPtr (b * ssa.Block , v * Node , offset int64 ) {
1206
- b .NewValue0IA (src .NoXPos , ssa .OpClobber , types .TypeVoid , offset , v )
1207
- }
1208
-
1209
1051
// Compact coalesces identical bitmaps from lv.livevars into the sets
1210
1052
// lv.stackMapSet and lv.regMaps.
1211
1053
//
@@ -1553,7 +1395,6 @@ func liveness(e *ssafn, f *ssa.Func, pp *Progs) LivenessMap {
1553
1395
lv .prologue ()
1554
1396
lv .solve ()
1555
1397
lv .epilogue ()
1556
- lv .clobber ()
1557
1398
if debuglive > 0 {
1558
1399
lv .showlive (nil , lv .stackMaps [0 ])
1559
1400
for _ , b := range f .Blocks {
0 commit comments