github.com/bir3/gocompiler@v0.9.2202/src/cmd/compile/internal/walk/builtin.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package walk 6 7 import ( 8 "fmt" 9 "github.com/bir3/gocompiler/src/go/constant" 10 "github.com/bir3/gocompiler/src/go/token" 11 "strings" 12 13 "github.com/bir3/gocompiler/src/cmd/compile/internal/base" 14 "github.com/bir3/gocompiler/src/cmd/compile/internal/escape" 15 "github.com/bir3/gocompiler/src/cmd/compile/internal/ir" 16 "github.com/bir3/gocompiler/src/cmd/compile/internal/reflectdata" 17 "github.com/bir3/gocompiler/src/cmd/compile/internal/typecheck" 18 "github.com/bir3/gocompiler/src/cmd/compile/internal/types" 19 ) 20 21 // Rewrite append(src, x, y, z) so that any side effects in 22 // x, y, z (including runtime panics) are evaluated in 23 // initialization statements before the append. 24 // For normal code generation, stop there and leave the 25 // rest to ssagen. 26 // 27 // For race detector, expand append(src, a [, b]* ) to 28 // 29 // init { 30 // s := src 31 // const argc = len(args) - 1 32 // newLen := s.len + argc 33 // if uint(newLen) <= uint(s.cap) { 34 // s = s[:newLen] 35 // } else { 36 // s = growslice(s.ptr, newLen, s.cap, argc, elemType) 37 // } 38 // s[s.len - argc] = a 39 // s[s.len - argc + 1] = b 40 // ... 41 // } 42 // s 43 func walkAppend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node { 44 if !ir.SameSafeExpr(dst, n.Args[0]) { 45 n.Args[0] = safeExpr(n.Args[0], init) 46 n.Args[0] = walkExpr(n.Args[0], init) 47 } 48 walkExprListSafe(n.Args[1:], init) 49 50 nsrc := n.Args[0] 51 52 // walkExprListSafe will leave OINDEX (s[n]) alone if both s 53 // and n are name or literal, but those may index the slice we're 54 // modifying here. Fix explicitly. 55 // Using cheapExpr also makes sure that the evaluation 56 // of all arguments (and especially any panics) happen 57 // before we begin to modify the slice in a visible way. 58 ls := n.Args[1:] 59 for i, n := range ls { 60 n = cheapExpr(n, init) 61 if !types.Identical(n.Type(), nsrc.Type().Elem()) { 62 n = typecheck.AssignConv(n, nsrc.Type().Elem(), "append") 63 n = walkExpr(n, init) 64 } 65 ls[i] = n 66 } 67 68 argc := len(n.Args) - 1 69 if argc < 1 { 70 return nsrc 71 } 72 73 // General case, with no function calls left as arguments. 74 // Leave for ssagen, except that instrumentation requires the old form. 75 if !base.Flag.Cfg.Instrumenting || base.Flag.CompilingRuntime { 76 return n 77 } 78 79 var l []ir.Node 80 81 // s = slice to append to 82 s := typecheck.TempAt(base.Pos, ir.CurFunc, nsrc.Type()) 83 l = append(l, ir.NewAssignStmt(base.Pos, s, nsrc)) 84 85 // num = number of things to append 86 num := ir.NewInt(base.Pos, int64(argc)) 87 88 // newLen := s.len + num 89 newLen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT]) 90 l = append(l, ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), num))) 91 92 // if uint(newLen) <= uint(s.cap) 93 nif := ir.NewIfStmt(base.Pos, nil, nil, nil) 94 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLE, typecheck.Conv(newLen, types.Types[types.TUINT]), typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])) 95 nif.Likely = true 96 97 // then { s = s[:n] } 98 slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, newLen, nil) 99 slice.SetBounded(true) 100 nif.Body = []ir.Node{ 101 ir.NewAssignStmt(base.Pos, s, slice), 102 } 103 104 // else { s = growslice(s.ptr, n, s.cap, a, T) } 105 nif.Else = []ir.Node{ 106 ir.NewAssignStmt(base.Pos, s, walkGrowslice(s, nif.PtrInit(), 107 ir.NewUnaryExpr(base.Pos, ir.OSPTR, s), 108 newLen, 109 ir.NewUnaryExpr(base.Pos, ir.OCAP, s), 110 num)), 111 } 112 113 l = append(l, nif) 114 115 ls = n.Args[1:] 116 for i, n := range ls { 117 // s[s.len-argc+i] = arg 118 ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewInt(base.Pos, int64(argc-i)))) 119 ix.SetBounded(true) 120 l = append(l, ir.NewAssignStmt(base.Pos, ix, n)) 121 } 122 123 typecheck.Stmts(l) 124 walkStmtList(l) 125 init.Append(l...) 126 return s 127 } 128 129 // growslice(ptr *T, newLen, oldCap, num int, <type>) (ret []T) 130 func walkGrowslice(slice *ir.Name, init *ir.Nodes, oldPtr, newLen, oldCap, num ir.Node) *ir.CallExpr { 131 elemtype := slice.Type().Elem() 132 fn := typecheck.LookupRuntime("growslice", elemtype, elemtype) 133 elemtypeptr := reflectdata.TypePtrAt(base.Pos, elemtype) 134 return mkcall1(fn, slice.Type(), init, oldPtr, newLen, oldCap, num, elemtypeptr) 135 } 136 137 // walkClear walks an OCLEAR node. 138 func walkClear(n *ir.UnaryExpr) ir.Node { 139 typ := n.X.Type() 140 switch { 141 case typ.IsSlice(): 142 if n := arrayClear(n.X.Pos(), n.X, nil); n != nil { 143 return n 144 } 145 // If n == nil, we are clearing an array which takes zero memory, do nothing. 146 return ir.NewBlockStmt(n.Pos(), nil) 147 case typ.IsMap(): 148 return mapClear(n.X, reflectdata.TypePtrAt(n.X.Pos(), n.X.Type())) 149 } 150 panic("unreachable") 151 } 152 153 // walkClose walks an OCLOSE node. 154 func walkClose(n *ir.UnaryExpr, init *ir.Nodes) ir.Node { 155 // cannot use chanfn - closechan takes any, not chan any 156 fn := typecheck.LookupRuntime("closechan", n.X.Type()) 157 return mkcall1(fn, nil, init, n.X) 158 } 159 160 // Lower copy(a, b) to a memmove call or a runtime call. 161 // 162 // init { 163 // n := len(a) 164 // if n > len(b) { n = len(b) } 165 // if a.ptr != b.ptr { memmove(a.ptr, b.ptr, n*sizeof(elem(a))) } 166 // } 167 // n; 168 // 169 // Also works if b is a string. 170 func walkCopy(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node { 171 if n.X.Type().Elem().HasPointers() { 172 ir.CurFunc.SetWBPos(n.Pos()) 173 fn := writebarrierfn("typedslicecopy", n.X.Type().Elem(), n.Y.Type().Elem()) 174 n.X = cheapExpr(n.X, init) 175 ptrL, lenL := backingArrayPtrLen(n.X) 176 n.Y = cheapExpr(n.Y, init) 177 ptrR, lenR := backingArrayPtrLen(n.Y) 178 return mkcall1(fn, n.Type(), init, reflectdata.CopyElemRType(base.Pos, n), ptrL, lenL, ptrR, lenR) 179 } 180 181 if runtimecall { 182 // rely on runtime to instrument: 183 // copy(n.Left, n.Right) 184 // n.Right can be a slice or string. 185 186 n.X = cheapExpr(n.X, init) 187 ptrL, lenL := backingArrayPtrLen(n.X) 188 n.Y = cheapExpr(n.Y, init) 189 ptrR, lenR := backingArrayPtrLen(n.Y) 190 191 fn := typecheck.LookupRuntime("slicecopy", ptrL.Type().Elem(), ptrR.Type().Elem()) 192 193 return mkcall1(fn, n.Type(), init, ptrL, lenL, ptrR, lenR, ir.NewInt(base.Pos, n.X.Type().Elem().Size())) 194 } 195 196 n.X = walkExpr(n.X, init) 197 n.Y = walkExpr(n.Y, init) 198 nl := typecheck.TempAt(base.Pos, ir.CurFunc, n.X.Type()) 199 nr := typecheck.TempAt(base.Pos, ir.CurFunc, n.Y.Type()) 200 var l []ir.Node 201 l = append(l, ir.NewAssignStmt(base.Pos, nl, n.X)) 202 l = append(l, ir.NewAssignStmt(base.Pos, nr, n.Y)) 203 204 nfrm := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nr) 205 nto := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nl) 206 207 nlen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT]) 208 209 // n = len(to) 210 l = append(l, ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nl))) 211 212 // if n > len(frm) { n = len(frm) } 213 nif := ir.NewIfStmt(base.Pos, nil, nil, nil) 214 215 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr)) 216 nif.Body.Append(ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr))) 217 l = append(l, nif) 218 219 // if to.ptr != frm.ptr { memmove( ... ) } 220 ne := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.ONE, nto, nfrm), nil, nil) 221 ne.Likely = true 222 l = append(l, ne) 223 224 fn := typecheck.LookupRuntime("memmove", nl.Type().Elem(), nl.Type().Elem()) 225 nwid := ir.Node(typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINTPTR])) 226 setwid := ir.NewAssignStmt(base.Pos, nwid, typecheck.Conv(nlen, types.Types[types.TUINTPTR])) 227 ne.Body.Append(setwid) 228 nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(base.Pos, nl.Type().Elem().Size())) 229 call := mkcall1(fn, nil, init, nto, nfrm, nwid) 230 ne.Body.Append(call) 231 232 typecheck.Stmts(l) 233 walkStmtList(l) 234 init.Append(l...) 235 return nlen 236 } 237 238 // walkDelete walks an ODELETE node. 239 func walkDelete(init *ir.Nodes, n *ir.CallExpr) ir.Node { 240 init.Append(ir.TakeInit(n)...) 241 map_ := n.Args[0] 242 key := n.Args[1] 243 map_ = walkExpr(map_, init) 244 key = walkExpr(key, init) 245 246 t := map_.Type() 247 fast := mapfast(t) 248 key = mapKeyArg(fast, n, key, false) 249 return mkcall1(mapfndel(mapdelete[fast], t), nil, init, reflectdata.DeleteMapRType(base.Pos, n), map_, key) 250 } 251 252 // walkLenCap walks an OLEN or OCAP node. 253 func walkLenCap(n *ir.UnaryExpr, init *ir.Nodes) ir.Node { 254 if isRuneCount(n) { 255 // Replace len([]rune(string)) with runtime.countrunes(string). 256 return mkcall("countrunes", n.Type(), init, typecheck.Conv(n.X.(*ir.ConvExpr).X, types.Types[types.TSTRING])) 257 } 258 if isByteCount(n) { 259 conv := n.X.(*ir.ConvExpr) 260 walkStmtList(conv.Init()) 261 init.Append(ir.TakeInit(conv)...) 262 _, len := backingArrayPtrLen(cheapExpr(conv.X, init)) 263 return len 264 } 265 266 n.X = walkExpr(n.X, init) 267 268 // replace len(*[10]int) with 10. 269 // delayed until now to preserve side effects. 270 t := n.X.Type() 271 272 if t.IsPtr() { 273 t = t.Elem() 274 } 275 if t.IsArray() { 276 safeExpr(n.X, init) 277 con := ir.NewConstExpr(constant.MakeInt64(t.NumElem()), n) 278 con.SetTypecheck(1) 279 return con 280 } 281 return n 282 } 283 284 // walkMakeChan walks an OMAKECHAN node. 285 func walkMakeChan(n *ir.MakeExpr, init *ir.Nodes) ir.Node { 286 // When size fits into int, use makechan instead of 287 // makechan64, which is faster and shorter on 32 bit platforms. 288 size := n.Len 289 fnname := "makechan64" 290 argtype := types.Types[types.TINT64] 291 292 // Type checking guarantees that TIDEAL size is positive and fits in an int. 293 // The case of size overflow when converting TUINT or TUINTPTR to TINT 294 // will be handled by the negative range checks in makechan during runtime. 295 if size.Type().IsKind(types.TIDEAL) || size.Type().Size() <= types.Types[types.TUINT].Size() { 296 fnname = "makechan" 297 argtype = types.Types[types.TINT] 298 } 299 300 return mkcall1(chanfn(fnname, 1, n.Type()), n.Type(), init, reflectdata.MakeChanRType(base.Pos, n), typecheck.Conv(size, argtype)) 301 } 302 303 // walkMakeMap walks an OMAKEMAP node. 304 func walkMakeMap(n *ir.MakeExpr, init *ir.Nodes) ir.Node { 305 t := n.Type() 306 hmapType := reflectdata.MapType() 307 hint := n.Len 308 309 // var h *hmap 310 var h ir.Node 311 if n.Esc() == ir.EscNone { 312 // Allocate hmap on stack. 313 314 // var hv hmap 315 // h = &hv 316 h = stackTempAddr(init, hmapType) 317 318 // Allocate one bucket pointed to by hmap.buckets on stack if hint 319 // is not larger than BUCKETSIZE. In case hint is larger than 320 // BUCKETSIZE runtime.makemap will allocate the buckets on the heap. 321 // Maximum key and elem size is 128 bytes, larger objects 322 // are stored with an indirection. So max bucket size is 2048+eps. 323 if !ir.IsConst(hint, constant.Int) || 324 constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(reflectdata.BUCKETSIZE)) { 325 326 // In case hint is larger than BUCKETSIZE runtime.makemap 327 // will allocate the buckets on the heap, see #20184 328 // 329 // if hint <= BUCKETSIZE { 330 // var bv bmap 331 // b = &bv 332 // h.buckets = b 333 // } 334 335 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, hint, ir.NewInt(base.Pos, reflectdata.BUCKETSIZE)), nil, nil) 336 nif.Likely = true 337 338 // var bv bmap 339 // b = &bv 340 b := stackTempAddr(&nif.Body, reflectdata.MapBucketType(t)) 341 342 // h.buckets = b 343 bsym := hmapType.Field(5).Sym // hmap.buckets see reflect.go:hmap 344 na := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, bsym), typecheck.ConvNop(b, types.Types[types.TUNSAFEPTR])) 345 nif.Body.Append(na) 346 appendWalkStmt(init, nif) 347 } 348 } 349 350 if ir.IsConst(hint, constant.Int) && constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(reflectdata.BUCKETSIZE)) { 351 // Handling make(map[any]any) and 352 // make(map[any]any, hint) where hint <= BUCKETSIZE 353 // special allows for faster map initialization and 354 // improves binary size by using calls with fewer arguments. 355 // For hint <= BUCKETSIZE overLoadFactor(hint, 0) is false 356 // and no buckets will be allocated by makemap. Therefore, 357 // no buckets need to be allocated in this code path. 358 if n.Esc() == ir.EscNone { 359 // Only need to initialize h.hash0 since 360 // hmap h has been allocated on the stack already. 361 // h.hash0 = rand32() 362 rand := mkcall("rand32", types.Types[types.TUINT32], init) 363 hashsym := hmapType.Field(4).Sym // hmap.hash0 see reflect.go:hmap 364 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, hashsym), rand)) 365 return typecheck.ConvNop(h, t) 366 } 367 // Call runtime.makehmap to allocate an 368 // hmap on the heap and initialize hmap's hash0 field. 369 fn := typecheck.LookupRuntime("makemap_small", t.Key(), t.Elem()) 370 return mkcall1(fn, n.Type(), init) 371 } 372 373 if n.Esc() != ir.EscNone { 374 h = typecheck.NodNil() 375 } 376 // Map initialization with a variable or large hint is 377 // more complicated. We therefore generate a call to 378 // runtime.makemap to initialize hmap and allocate the 379 // map buckets. 380 381 // When hint fits into int, use makemap instead of 382 // makemap64, which is faster and shorter on 32 bit platforms. 383 fnname := "makemap64" 384 argtype := types.Types[types.TINT64] 385 386 // Type checking guarantees that TIDEAL hint is positive and fits in an int. 387 // See checkmake call in TMAP case of OMAKE case in OpSwitch in typecheck1 function. 388 // The case of hint overflow when converting TUINT or TUINTPTR to TINT 389 // will be handled by the negative range checks in makemap during runtime. 390 if hint.Type().IsKind(types.TIDEAL) || hint.Type().Size() <= types.Types[types.TUINT].Size() { 391 fnname = "makemap" 392 argtype = types.Types[types.TINT] 393 } 394 395 fn := typecheck.LookupRuntime(fnname, hmapType, t.Key(), t.Elem()) 396 return mkcall1(fn, n.Type(), init, reflectdata.MakeMapRType(base.Pos, n), typecheck.Conv(hint, argtype), h) 397 } 398 399 // walkMakeSlice walks an OMAKESLICE node. 400 func walkMakeSlice(n *ir.MakeExpr, init *ir.Nodes) ir.Node { 401 l := n.Len 402 r := n.Cap 403 if r == nil { 404 r = safeExpr(l, init) 405 l = r 406 } 407 t := n.Type() 408 if t.Elem().NotInHeap() { 409 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem()) 410 } 411 if n.Esc() == ir.EscNone { 412 if why := escape.HeapAllocReason(n); why != "" { 413 base.Fatalf("%v has EscNone, but %v", n, why) 414 } 415 // var arr [r]T 416 // n = arr[:l] 417 i := typecheck.IndexConst(r) 418 if i < 0 { 419 base.Fatalf("walkExpr: invalid index %v", r) 420 } 421 422 // cap is constrained to [0,2^31) or [0,2^63) depending on whether 423 // we're in 32-bit or 64-bit systems. So it's safe to do: 424 // 425 // if uint64(len) > cap { 426 // if len < 0 { panicmakeslicelen() } 427 // panicmakeslicecap() 428 // } 429 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(l, types.Types[types.TUINT64]), ir.NewInt(base.Pos, i)), nil, nil) 430 niflen := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLT, l, ir.NewInt(base.Pos, 0)), nil, nil) 431 niflen.Body = []ir.Node{mkcall("panicmakeslicelen", nil, init)} 432 nif.Body.Append(niflen, mkcall("panicmakeslicecap", nil, init)) 433 init.Append(typecheck.Stmt(nif)) 434 435 t = types.NewArray(t.Elem(), i) // [r]T 436 var_ := typecheck.TempAt(base.Pos, ir.CurFunc, t) 437 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, nil)) // zero temp 438 r := ir.NewSliceExpr(base.Pos, ir.OSLICE, var_, nil, l, nil) // arr[:l] 439 // The conv is necessary in case n.Type is named. 440 return walkExpr(typecheck.Expr(typecheck.Conv(r, n.Type())), init) 441 } 442 443 // n escapes; set up a call to makeslice. 444 // When len and cap can fit into int, use makeslice instead of 445 // makeslice64, which is faster and shorter on 32 bit platforms. 446 447 len, cap := l, r 448 449 fnname := "makeslice64" 450 argtype := types.Types[types.TINT64] 451 452 // Type checking guarantees that TIDEAL len/cap are positive and fit in an int. 453 // The case of len or cap overflow when converting TUINT or TUINTPTR to TINT 454 // will be handled by the negative range checks in makeslice during runtime. 455 if (len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size()) && 456 (cap.Type().IsKind(types.TIDEAL) || cap.Type().Size() <= types.Types[types.TUINT].Size()) { 457 fnname = "makeslice" 458 argtype = types.Types[types.TINT] 459 } 460 fn := typecheck.LookupRuntime(fnname) 461 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.MakeSliceElemRType(base.Pos, n), typecheck.Conv(len, argtype), typecheck.Conv(cap, argtype)) 462 ptr.MarkNonNil() 463 len = typecheck.Conv(len, types.Types[types.TINT]) 464 cap = typecheck.Conv(cap, types.Types[types.TINT]) 465 sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, len, cap) 466 return walkExpr(typecheck.Expr(sh), init) 467 } 468 469 // walkMakeSliceCopy walks an OMAKESLICECOPY node. 470 func walkMakeSliceCopy(n *ir.MakeExpr, init *ir.Nodes) ir.Node { 471 if n.Esc() == ir.EscNone { 472 base.Fatalf("OMAKESLICECOPY with EscNone: %v", n) 473 } 474 475 t := n.Type() 476 if t.Elem().NotInHeap() { 477 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem()) 478 } 479 480 length := typecheck.Conv(n.Len, types.Types[types.TINT]) 481 copylen := ir.NewUnaryExpr(base.Pos, ir.OLEN, n.Cap) 482 copyptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, n.Cap) 483 484 if !t.Elem().HasPointers() && n.Bounded() { 485 // When len(to)==len(from) and elements have no pointers: 486 // replace make+copy with runtime.mallocgc+runtime.memmove. 487 488 // We do not check for overflow of len(to)*elem.Width here 489 // since len(from) is an existing checked slice capacity 490 // with same elem.Width for the from slice. 491 size := ir.NewBinaryExpr(base.Pos, ir.OMUL, typecheck.Conv(length, types.Types[types.TUINTPTR]), typecheck.Conv(ir.NewInt(base.Pos, t.Elem().Size()), types.Types[types.TUINTPTR])) 492 493 // instantiate mallocgc(size uintptr, typ *byte, needszero bool) unsafe.Pointer 494 fn := typecheck.LookupRuntime("mallocgc") 495 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, size, typecheck.NodNil(), ir.NewBool(base.Pos, false)) 496 ptr.MarkNonNil() 497 sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length) 498 499 s := typecheck.TempAt(base.Pos, ir.CurFunc, t) 500 r := typecheck.Stmt(ir.NewAssignStmt(base.Pos, s, sh)) 501 r = walkExpr(r, init) 502 init.Append(r) 503 504 // instantiate memmove(to *any, frm *any, size uintptr) 505 fn = typecheck.LookupRuntime("memmove", t.Elem(), t.Elem()) 506 ncopy := mkcall1(fn, nil, init, ir.NewUnaryExpr(base.Pos, ir.OSPTR, s), copyptr, size) 507 init.Append(walkExpr(typecheck.Stmt(ncopy), init)) 508 509 return s 510 } 511 // Replace make+copy with runtime.makeslicecopy. 512 // instantiate makeslicecopy(typ *byte, tolen int, fromlen int, from unsafe.Pointer) unsafe.Pointer 513 fn := typecheck.LookupRuntime("makeslicecopy") 514 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.MakeSliceElemRType(base.Pos, n), length, copylen, typecheck.Conv(copyptr, types.Types[types.TUNSAFEPTR])) 515 ptr.MarkNonNil() 516 sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length) 517 return walkExpr(typecheck.Expr(sh), init) 518 } 519 520 // walkNew walks an ONEW node. 521 func walkNew(n *ir.UnaryExpr, init *ir.Nodes) ir.Node { 522 t := n.Type().Elem() 523 if t.NotInHeap() { 524 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", n.Type().Elem()) 525 } 526 if n.Esc() == ir.EscNone { 527 if t.Size() > ir.MaxImplicitStackVarSize { 528 base.Fatalf("large ONEW with EscNone: %v", n) 529 } 530 return stackTempAddr(init, t) 531 } 532 types.CalcSize(t) 533 n.MarkNonNil() 534 return n 535 } 536 537 func walkMinMax(n *ir.CallExpr, init *ir.Nodes) ir.Node { 538 init.Append(ir.TakeInit(n)...) 539 walkExprList(n.Args, init) 540 return n 541 } 542 543 // generate code for print. 544 func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node { 545 // Hoist all the argument evaluation up before the lock. 546 walkExprListCheap(nn.Args, init) 547 548 // For println, add " " between elements and "\n" at the end. 549 if nn.Op() == ir.OPRINTLN { 550 s := nn.Args 551 t := make([]ir.Node, 0, len(s)*2) 552 for i, n := range s { 553 if i != 0 { 554 t = append(t, ir.NewString(base.Pos, " ")) 555 } 556 t = append(t, n) 557 } 558 t = append(t, ir.NewString(base.Pos, "\n")) 559 nn.Args = t 560 } 561 562 // Collapse runs of constant strings. 563 s := nn.Args 564 t := make([]ir.Node, 0, len(s)) 565 for i := 0; i < len(s); { 566 var strs []string 567 for i < len(s) && ir.IsConst(s[i], constant.String) { 568 strs = append(strs, ir.StringVal(s[i])) 569 i++ 570 } 571 if len(strs) > 0 { 572 t = append(t, ir.NewString(base.Pos, strings.Join(strs, ""))) 573 } 574 if i < len(s) { 575 t = append(t, s[i]) 576 i++ 577 } 578 } 579 nn.Args = t 580 581 calls := []ir.Node{mkcall("printlock", nil, init)} 582 for i, n := range nn.Args { 583 if n.Op() == ir.OLITERAL { 584 if n.Type() == types.UntypedRune { 585 n = typecheck.DefaultLit(n, types.RuneType) 586 } 587 588 switch n.Val().Kind() { 589 case constant.Int: 590 n = typecheck.DefaultLit(n, types.Types[types.TINT64]) 591 592 case constant.Float: 593 n = typecheck.DefaultLit(n, types.Types[types.TFLOAT64]) 594 } 595 } 596 597 if n.Op() != ir.OLITERAL && n.Type() != nil && n.Type().Kind() == types.TIDEAL { 598 n = typecheck.DefaultLit(n, types.Types[types.TINT64]) 599 } 600 n = typecheck.DefaultLit(n, nil) 601 nn.Args[i] = n 602 if n.Type() == nil || n.Type().Kind() == types.TFORW { 603 continue 604 } 605 606 var on *ir.Name 607 switch n.Type().Kind() { 608 case types.TINTER: 609 if n.Type().IsEmptyInterface() { 610 on = typecheck.LookupRuntime("printeface", n.Type()) 611 } else { 612 on = typecheck.LookupRuntime("printiface", n.Type()) 613 } 614 case types.TPTR: 615 if n.Type().Elem().NotInHeap() { 616 on = typecheck.LookupRuntime("printuintptr") 617 n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n) 618 n.SetType(types.Types[types.TUNSAFEPTR]) 619 n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n) 620 n.SetType(types.Types[types.TUINTPTR]) 621 break 622 } 623 fallthrough 624 case types.TCHAN, types.TMAP, types.TFUNC, types.TUNSAFEPTR: 625 on = typecheck.LookupRuntime("printpointer", n.Type()) 626 case types.TSLICE: 627 on = typecheck.LookupRuntime("printslice", n.Type()) 628 case types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINTPTR: 629 if types.RuntimeSymName(n.Type().Sym()) == "hex" { 630 on = typecheck.LookupRuntime("printhex") 631 } else { 632 on = typecheck.LookupRuntime("printuint") 633 } 634 case types.TINT, types.TINT8, types.TINT16, types.TINT32, types.TINT64: 635 on = typecheck.LookupRuntime("printint") 636 case types.TFLOAT32, types.TFLOAT64: 637 on = typecheck.LookupRuntime("printfloat") 638 case types.TCOMPLEX64, types.TCOMPLEX128: 639 on = typecheck.LookupRuntime("printcomplex") 640 case types.TBOOL: 641 on = typecheck.LookupRuntime("printbool") 642 case types.TSTRING: 643 cs := "" 644 if ir.IsConst(n, constant.String) { 645 cs = ir.StringVal(n) 646 } 647 switch cs { 648 case " ": 649 on = typecheck.LookupRuntime("printsp") 650 case "\n": 651 on = typecheck.LookupRuntime("printnl") 652 default: 653 on = typecheck.LookupRuntime("printstring") 654 } 655 default: 656 badtype(ir.OPRINT, n.Type(), nil) 657 continue 658 } 659 660 r := ir.NewCallExpr(base.Pos, ir.OCALL, on, nil) 661 if params := on.Type().Params(); len(params) > 0 { 662 t := params[0].Type 663 n = typecheck.Conv(n, t) 664 r.Args.Append(n) 665 } 666 calls = append(calls, r) 667 } 668 669 calls = append(calls, mkcall("printunlock", nil, init)) 670 671 typecheck.Stmts(calls) 672 walkExprList(calls, init) 673 674 r := ir.NewBlockStmt(base.Pos, nil) 675 r.List = calls 676 return walkStmt(typecheck.Stmt(r)) 677 } 678 679 // walkRecoverFP walks an ORECOVERFP node. 680 func walkRecoverFP(nn *ir.CallExpr, init *ir.Nodes) ir.Node { 681 return mkcall("gorecover", nn.Type(), init, walkExpr(nn.Args[0], init)) 682 } 683 684 // walkUnsafeData walks an OUNSAFESLICEDATA or OUNSAFESTRINGDATA expression. 685 func walkUnsafeData(n *ir.UnaryExpr, init *ir.Nodes) ir.Node { 686 slice := walkExpr(n.X, init) 687 res := typecheck.Expr(ir.NewUnaryExpr(n.Pos(), ir.OSPTR, slice)) 688 res.SetType(n.Type()) 689 return walkExpr(res, init) 690 } 691 692 func walkUnsafeSlice(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { 693 ptr := safeExpr(n.X, init) 694 len := safeExpr(n.Y, init) 695 sliceType := n.Type() 696 697 lenType := types.Types[types.TINT64] 698 unsafePtr := typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]) 699 700 // If checkptr enabled, call runtime.unsafeslicecheckptr to check ptr and len. 701 // for simplicity, unsafeslicecheckptr always uses int64. 702 // Type checking guarantees that TIDEAL len/cap are positive and fit in an int. 703 // The case of len or cap overflow when converting TUINT or TUINTPTR to TINT 704 // will be handled by the negative range checks in unsafeslice during runtime. 705 if ir.ShouldCheckPtr(ir.CurFunc, 1) { 706 fnname := "unsafeslicecheckptr" 707 fn := typecheck.LookupRuntime(fnname) 708 init.Append(mkcall1(fn, nil, init, reflectdata.UnsafeSliceElemRType(base.Pos, n), unsafePtr, typecheck.Conv(len, lenType))) 709 } else { 710 // Otherwise, open code unsafe.Slice to prevent runtime call overhead. 711 // Keep this code in sync with runtime.unsafeslice{,64} 712 if len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size() { 713 lenType = types.Types[types.TINT] 714 } else { 715 // len64 := int64(len) 716 // if int64(int(len64)) != len64 { 717 // panicunsafeslicelen() 718 // } 719 len64 := typecheck.Conv(len, lenType) 720 nif := ir.NewIfStmt(base.Pos, nil, nil, nil) 721 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, typecheck.Conv(typecheck.Conv(len64, types.Types[types.TINT]), lenType), len64) 722 nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body)) 723 appendWalkStmt(init, nif) 724 } 725 726 // if len < 0 { panicunsafeslicelen() } 727 nif := ir.NewIfStmt(base.Pos, nil, nil, nil) 728 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0)) 729 nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body)) 730 appendWalkStmt(init, nif) 731 732 if sliceType.Elem().Size() == 0 { 733 // if ptr == nil && len > 0 { 734 // panicunsafesliceptrnil() 735 // } 736 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil) 737 isNil := ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil()) 738 gtZero := ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0)) 739 nifPtr.Cond = 740 ir.NewLogicalExpr(base.Pos, ir.OANDAND, isNil, gtZero) 741 nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body)) 742 appendWalkStmt(init, nifPtr) 743 744 h := ir.NewSliceHeaderExpr(n.Pos(), sliceType, 745 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]), 746 typecheck.Conv(len, types.Types[types.TINT]), 747 typecheck.Conv(len, types.Types[types.TINT])) 748 return walkExpr(typecheck.Expr(h), init) 749 } 750 751 // mem, overflow := math.mulUintptr(et.size, len) 752 mem := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINTPTR]) 753 overflow := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TBOOL]) 754 755 decl := types.NewSignature(nil, 756 []*types.Field{ 757 types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]), 758 types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]), 759 }, 760 []*types.Field{ 761 types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]), 762 types.NewField(base.Pos, nil, types.Types[types.TBOOL]), 763 }) 764 765 fn := ir.NewFunc(n.Pos(), n.Pos(), math_MulUintptr, decl) 766 767 call := mkcall1(fn.Nname, fn.Type().ResultsTuple(), init, ir.NewInt(base.Pos, sliceType.Elem().Size()), typecheck.Conv(typecheck.Conv(len, lenType), types.Types[types.TUINTPTR])) 768 appendWalkStmt(init, ir.NewAssignListStmt(base.Pos, ir.OAS2, []ir.Node{mem, overflow}, []ir.Node{call})) 769 770 // if overflow || mem > -uintptr(ptr) { 771 // if ptr == nil { 772 // panicunsafesliceptrnil() 773 // } 774 // panicunsafeslicelen() 775 // } 776 nif = ir.NewIfStmt(base.Pos, nil, nil, nil) 777 memCond := ir.NewBinaryExpr(base.Pos, ir.OGT, mem, ir.NewUnaryExpr(base.Pos, ir.ONEG, typecheck.Conv(unsafePtr, types.Types[types.TUINTPTR]))) 778 nif.Cond = ir.NewLogicalExpr(base.Pos, ir.OOROR, overflow, memCond) 779 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil) 780 nifPtr.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil()) 781 nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body)) 782 nif.Body.Append(nifPtr, mkcall("panicunsafeslicelen", nil, &nif.Body)) 783 appendWalkStmt(init, nif) 784 } 785 786 h := ir.NewSliceHeaderExpr(n.Pos(), sliceType, 787 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]), 788 typecheck.Conv(len, types.Types[types.TINT]), 789 typecheck.Conv(len, types.Types[types.TINT])) 790 return walkExpr(typecheck.Expr(h), init) 791 } 792 793 var math_MulUintptr = &types.Sym{Pkg: types.NewPkg("runtime/internal/math", "math"), Name: "MulUintptr"} 794 795 func walkUnsafeString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { 796 ptr := safeExpr(n.X, init) 797 len := safeExpr(n.Y, init) 798 799 lenType := types.Types[types.TINT64] 800 unsafePtr := typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]) 801 802 // If checkptr enabled, call runtime.unsafestringcheckptr to check ptr and len. 803 // for simplicity, unsafestringcheckptr always uses int64. 804 // Type checking guarantees that TIDEAL len are positive and fit in an int. 805 if ir.ShouldCheckPtr(ir.CurFunc, 1) { 806 fnname := "unsafestringcheckptr" 807 fn := typecheck.LookupRuntime(fnname) 808 init.Append(mkcall1(fn, nil, init, unsafePtr, typecheck.Conv(len, lenType))) 809 } else { 810 // Otherwise, open code unsafe.String to prevent runtime call overhead. 811 // Keep this code in sync with runtime.unsafestring{,64} 812 if len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size() { 813 lenType = types.Types[types.TINT] 814 } else { 815 // len64 := int64(len) 816 // if int64(int(len64)) != len64 { 817 // panicunsafestringlen() 818 // } 819 len64 := typecheck.Conv(len, lenType) 820 nif := ir.NewIfStmt(base.Pos, nil, nil, nil) 821 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, typecheck.Conv(typecheck.Conv(len64, types.Types[types.TINT]), lenType), len64) 822 nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body)) 823 appendWalkStmt(init, nif) 824 } 825 826 // if len < 0 { panicunsafestringlen() } 827 nif := ir.NewIfStmt(base.Pos, nil, nil, nil) 828 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0)) 829 nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body)) 830 appendWalkStmt(init, nif) 831 832 // if uintpr(len) > -uintptr(ptr) { 833 // if ptr == nil { 834 // panicunsafestringnilptr() 835 // } 836 // panicunsafeslicelen() 837 // } 838 nifLen := ir.NewIfStmt(base.Pos, nil, nil, nil) 839 nifLen.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, types.Types[types.TUINTPTR]), ir.NewUnaryExpr(base.Pos, ir.ONEG, typecheck.Conv(unsafePtr, types.Types[types.TUINTPTR]))) 840 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil) 841 nifPtr.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil()) 842 nifPtr.Body.Append(mkcall("panicunsafestringnilptr", nil, &nifPtr.Body)) 843 nifLen.Body.Append(nifPtr, mkcall("panicunsafestringlen", nil, &nifLen.Body)) 844 appendWalkStmt(init, nifLen) 845 } 846 h := ir.NewStringHeaderExpr(n.Pos(), 847 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]), 848 typecheck.Conv(len, types.Types[types.TINT]), 849 ) 850 return walkExpr(typecheck.Expr(h), init) 851 } 852 853 func badtype(op ir.Op, tl, tr *types.Type) { 854 var s string 855 if tl != nil { 856 s += fmt.Sprintf("\n\t%v", tl) 857 } 858 if tr != nil { 859 s += fmt.Sprintf("\n\t%v", tr) 860 } 861 862 // common mistake: *struct and *interface. 863 if tl != nil && tr != nil && tl.IsPtr() && tr.IsPtr() { 864 if tl.Elem().IsStruct() && tr.Elem().IsInterface() { 865 s += "\n\t(*struct vs *interface)" 866 } else if tl.Elem().IsInterface() && tr.Elem().IsStruct() { 867 s += "\n\t(*interface vs *struct)" 868 } 869 } 870 871 base.Errorf("illegal types for operand: %v%s", op, s) 872 } 873 874 func writebarrierfn(name string, l *types.Type, r *types.Type) ir.Node { 875 return typecheck.LookupRuntime(name, l, r) 876 } 877 878 // isRuneCount reports whether n is of the form len([]rune(string)). 879 // These are optimized into a call to runtime.countrunes. 880 func isRuneCount(n ir.Node) bool { 881 return base.Flag.N == 0 && !base.Flag.Cfg.Instrumenting && n.Op() == ir.OLEN && n.(*ir.UnaryExpr).X.Op() == ir.OSTR2RUNES 882 } 883 884 // isByteCount reports whether n is of the form len(string([]byte)). 885 func isByteCount(n ir.Node) bool { 886 return base.Flag.N == 0 && !base.Flag.Cfg.Instrumenting && n.Op() == ir.OLEN && 887 (n.(*ir.UnaryExpr).X.Op() == ir.OBYTES2STR || n.(*ir.UnaryExpr).X.Op() == ir.OBYTES2STRTMP) 888 }