github.com/megatontech/mynoteforgo@v0.0.0-20200507084910-5d0c6ea6e890/源码/cmd/compile/internal/ssa/nilcheck.go (about) 1 // Copyright 2015 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package ssa 6 7 import ( 8 "cmd/internal/objabi" 9 "cmd/internal/src" 10 ) 11 12 // nilcheckelim eliminates unnecessary nil checks. 13 // runs on machine-independent code. 14 func nilcheckelim(f *Func) { 15 // A nil check is redundant if the same nil check was successful in a 16 // dominating block. The efficacy of this pass depends heavily on the 17 // efficacy of the cse pass. 18 sdom := f.sdom() 19 20 // TODO: Eliminate more nil checks. 21 // We can recursively remove any chain of fixed offset calculations, 22 // i.e. struct fields and array elements, even with non-constant 23 // indices: x is non-nil iff x.a.b[i].c is. 24 25 type walkState int 26 const ( 27 Work walkState = iota // process nil checks and traverse to dominees 28 ClearPtr // forget the fact that ptr is nil 29 ) 30 31 type bp struct { 32 block *Block // block, or nil in ClearPtr state 33 ptr *Value // if non-nil, ptr that is to be cleared in ClearPtr state 34 op walkState 35 } 36 37 work := make([]bp, 0, 256) 38 work = append(work, bp{block: f.Entry}) 39 40 // map from value ID to bool indicating if value is known to be non-nil 41 // in the current dominator path being walked. This slice is updated by 42 // walkStates to maintain the known non-nil values. 43 nonNilValues := make([]bool, f.NumValues()) 44 45 // make an initial pass identifying any non-nil values 46 for _, b := range f.Blocks { 47 for _, v := range b.Values { 48 // a value resulting from taking the address of a 49 // value, or a value constructed from an offset of a 50 // non-nil ptr (OpAddPtr) implies it is non-nil 51 // We also assume unsafe pointer arithmetic generates non-nil pointers. See #27180. 52 if v.Op == OpAddr || v.Op == OpLocalAddr || v.Op == OpAddPtr || v.Op == OpOffPtr || v.Op == OpAdd32 || v.Op == OpAdd64 || v.Op == OpSub32 || v.Op == OpSub64 { 53 nonNilValues[v.ID] = true 54 } 55 } 56 } 57 58 for changed := true; changed; { 59 changed = false 60 for _, b := range f.Blocks { 61 for _, v := range b.Values { 62 // phis whose arguments are all non-nil 63 // are non-nil 64 if v.Op == OpPhi { 65 argsNonNil := true 66 for _, a := range v.Args { 67 if !nonNilValues[a.ID] { 68 argsNonNil = false 69 break 70 } 71 } 72 if argsNonNil { 73 if !nonNilValues[v.ID] { 74 changed = true 75 } 76 nonNilValues[v.ID] = true 77 } 78 } 79 } 80 } 81 } 82 83 // allocate auxiliary date structures for computing store order 84 sset := f.newSparseSet(f.NumValues()) 85 defer f.retSparseSet(sset) 86 storeNumber := make([]int32, f.NumValues()) 87 88 // perform a depth first walk of the dominee tree 89 for len(work) > 0 { 90 node := work[len(work)-1] 91 work = work[:len(work)-1] 92 93 switch node.op { 94 case Work: 95 b := node.block 96 97 // First, see if we're dominated by an explicit nil check. 98 if len(b.Preds) == 1 { 99 p := b.Preds[0].b 100 if p.Kind == BlockIf && p.Control.Op == OpIsNonNil && p.Succs[0].b == b { 101 ptr := p.Control.Args[0] 102 if !nonNilValues[ptr.ID] { 103 nonNilValues[ptr.ID] = true 104 work = append(work, bp{op: ClearPtr, ptr: ptr}) 105 } 106 } 107 } 108 109 // Next, order values in the current block w.r.t. stores. 110 b.Values = storeOrder(b.Values, sset, storeNumber) 111 112 pendingLines := f.cachedLineStarts // Holds statement boundaries that need to be moved to a new value/block 113 pendingLines.clear() 114 115 // Next, process values in the block. 116 i := 0 117 for _, v := range b.Values { 118 b.Values[i] = v 119 i++ 120 switch v.Op { 121 case OpIsNonNil: 122 ptr := v.Args[0] 123 if nonNilValues[ptr.ID] { 124 if v.Pos.IsStmt() == src.PosIsStmt { // Boolean true is a terrible statement boundary. 125 pendingLines.add(v.Pos.Line()) 126 v.Pos = v.Pos.WithNotStmt() 127 } 128 // This is a redundant explicit nil check. 129 v.reset(OpConstBool) 130 v.AuxInt = 1 // true 131 } 132 case OpNilCheck: 133 ptr := v.Args[0] 134 if nonNilValues[ptr.ID] { 135 // This is a redundant implicit nil check. 136 // Logging in the style of the former compiler -- and omit line 1, 137 // which is usually in generated code. 138 if f.fe.Debug_checknil() && v.Pos.Line() > 1 { 139 f.Warnl(v.Pos, "removed nil check") 140 } 141 if v.Pos.IsStmt() == src.PosIsStmt { // About to lose a statement boundary 142 pendingLines.add(v.Pos.Line()) 143 } 144 v.reset(OpUnknown) 145 f.freeValue(v) 146 i-- 147 continue 148 } 149 // Record the fact that we know ptr is non nil, and remember to 150 // undo that information when this dominator subtree is done. 151 nonNilValues[ptr.ID] = true 152 work = append(work, bp{op: ClearPtr, ptr: ptr}) 153 fallthrough // a non-eliminated nil check might be a good place for a statement boundary. 154 default: 155 if pendingLines.contains(v.Pos.Line()) && v.Pos.IsStmt() != src.PosNotStmt { 156 v.Pos = v.Pos.WithIsStmt() 157 pendingLines.remove(v.Pos.Line()) 158 } 159 } 160 } 161 if pendingLines.contains(b.Pos.Line()) { 162 b.Pos = b.Pos.WithIsStmt() 163 pendingLines.remove(b.Pos.Line()) 164 } 165 for j := i; j < len(b.Values); j++ { 166 b.Values[j] = nil 167 } 168 b.Values = b.Values[:i] 169 170 // Add all dominated blocks to the work list. 171 for w := sdom[node.block.ID].child; w != nil; w = sdom[w.ID].sibling { 172 work = append(work, bp{op: Work, block: w}) 173 } 174 175 case ClearPtr: 176 nonNilValues[node.ptr.ID] = false 177 continue 178 } 179 } 180 } 181 182 // All platforms are guaranteed to fault if we load/store to anything smaller than this address. 183 // 184 // This should agree with minLegalPointer in the runtime. 185 const minZeroPage = 4096 186 187 // faultOnLoad is true if a load to an address below minZeroPage will trigger a SIGSEGV. 188 var faultOnLoad = objabi.GOOS != "aix" 189 190 // nilcheckelim2 eliminates unnecessary nil checks. 191 // Runs after lowering and scheduling. 192 func nilcheckelim2(f *Func) { 193 unnecessary := f.newSparseSet(f.NumValues()) 194 defer f.retSparseSet(unnecessary) 195 196 pendingLines := f.cachedLineStarts // Holds statement boundaries that need to be moved to a new value/block 197 198 for _, b := range f.Blocks { 199 // Walk the block backwards. Find instructions that will fault if their 200 // input pointer is nil. Remove nil checks on those pointers, as the 201 // faulting instruction effectively does the nil check for free. 202 unnecessary.clear() 203 pendingLines.clear() 204 // Optimization: keep track of removed nilcheck with smallest index 205 firstToRemove := len(b.Values) 206 for i := len(b.Values) - 1; i >= 0; i-- { 207 v := b.Values[i] 208 if opcodeTable[v.Op].nilCheck && unnecessary.contains(v.Args[0].ID) { 209 if f.fe.Debug_checknil() && v.Pos.Line() > 1 { 210 f.Warnl(v.Pos, "removed nil check") 211 } 212 if v.Pos.IsStmt() == src.PosIsStmt { 213 pendingLines.add(v.Pos.Line()) 214 } 215 v.reset(OpUnknown) 216 firstToRemove = i 217 continue 218 } 219 if v.Type.IsMemory() || v.Type.IsTuple() && v.Type.FieldType(1).IsMemory() { 220 if v.Op == OpVarDef || v.Op == OpVarKill || v.Op == OpVarLive { 221 // These ops don't really change memory. 222 continue 223 } 224 // This op changes memory. Any faulting instruction after v that 225 // we've recorded in the unnecessary map is now obsolete. 226 unnecessary.clear() 227 } 228 229 // Find any pointers that this op is guaranteed to fault on if nil. 230 var ptrstore [2]*Value 231 ptrs := ptrstore[:0] 232 if opcodeTable[v.Op].faultOnNilArg0 && (faultOnLoad || v.Type.IsMemory()) { 233 // On AIX, only writing will fault. 234 ptrs = append(ptrs, v.Args[0]) 235 } 236 if opcodeTable[v.Op].faultOnNilArg1 && (faultOnLoad || (v.Type.IsMemory() && v.Op != OpPPC64LoweredMove)) { 237 // On AIX, only writing will fault. 238 // LoweredMove is a special case because it's considered as a "mem" as it stores on arg0 but arg1 is accessed as a load and should be checked. 239 ptrs = append(ptrs, v.Args[1]) 240 } 241 242 for _, ptr := range ptrs { 243 // Check to make sure the offset is small. 244 switch opcodeTable[v.Op].auxType { 245 case auxSymOff: 246 if v.Aux != nil || v.AuxInt < 0 || v.AuxInt >= minZeroPage { 247 continue 248 } 249 case auxSymValAndOff: 250 off := ValAndOff(v.AuxInt).Off() 251 if v.Aux != nil || off < 0 || off >= minZeroPage { 252 continue 253 } 254 case auxInt32: 255 // Mips uses this auxType for atomic add constant. It does not affect the effective address. 256 case auxInt64: 257 // ARM uses this auxType for duffcopy/duffzero/alignment info. 258 // It does not affect the effective address. 259 case auxNone: 260 // offset is zero. 261 default: 262 v.Fatalf("can't handle aux %s (type %d) yet\n", v.auxString(), int(opcodeTable[v.Op].auxType)) 263 } 264 // This instruction is guaranteed to fault if ptr is nil. 265 // Any previous nil check op is unnecessary. 266 unnecessary.add(ptr.ID) 267 } 268 } 269 // Remove values we've clobbered with OpUnknown. 270 i := firstToRemove 271 for j := i; j < len(b.Values); j++ { 272 v := b.Values[j] 273 if v.Op != OpUnknown { 274 if v.Pos.IsStmt() != src.PosNotStmt && pendingLines.contains(v.Pos.Line()) { 275 v.Pos = v.Pos.WithIsStmt() 276 pendingLines.remove(v.Pos.Line()) 277 } 278 b.Values[i] = v 279 i++ 280 } 281 } 282 283 if pendingLines.contains(b.Pos.Line()) { 284 b.Pos = b.Pos.WithIsStmt() 285 } 286 287 for j := i; j < len(b.Values); j++ { 288 b.Values[j] = nil 289 } 290 b.Values = b.Values[:i] 291 292 // TODO: if b.Kind == BlockPlain, start the analysis in the subsequent block to find 293 // more unnecessary nil checks. Would fix test/nilptr3.go:159. 294 } 295 }