github.com/bir3/gocompiler@v0.3.205/src/cmd/compile/internal/s390x/ssa.go (about) 1 // Copyright 2016 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package s390x 6 7 import ( 8 "math" 9 10 "github.com/bir3/gocompiler/src/cmd/compile/internal/base" 11 "github.com/bir3/gocompiler/src/cmd/compile/internal/logopt" 12 "github.com/bir3/gocompiler/src/cmd/compile/internal/ssa" 13 "github.com/bir3/gocompiler/src/cmd/compile/internal/ssagen" 14 "github.com/bir3/gocompiler/src/cmd/compile/internal/types" 15 "github.com/bir3/gocompiler/src/cmd/internal/obj" 16 "github.com/bir3/gocompiler/src/cmd/internal/obj/s390x" 17 ) 18 19 // ssaMarkMoves marks any MOVXconst ops that need to avoid clobbering flags. 20 func ssaMarkMoves(s *ssagen.State, b *ssa.Block) { 21 flive := b.FlagsLiveAtEnd 22 for _, c := range b.ControlValues() { 23 flive = c.Type.IsFlags() || flive 24 } 25 for i := len(b.Values) - 1; i >= 0; i-- { 26 v := b.Values[i] 27 if flive && v.Op == ssa.OpS390XMOVDconst { 28 // The "mark" is any non-nil Aux value. 29 v.Aux = v 30 } 31 if v.Type.IsFlags() { 32 flive = false 33 } 34 for _, a := range v.Args { 35 if a.Type.IsFlags() { 36 flive = true 37 } 38 } 39 } 40 } 41 42 // loadByType returns the load instruction of the given type. 43 func loadByType(t *types.Type) obj.As { 44 if t.IsFloat() { 45 switch t.Size() { 46 case 4: 47 return s390x.AFMOVS 48 case 8: 49 return s390x.AFMOVD 50 } 51 } else { 52 switch t.Size() { 53 case 1: 54 if t.IsSigned() { 55 return s390x.AMOVB 56 } else { 57 return s390x.AMOVBZ 58 } 59 case 2: 60 if t.IsSigned() { 61 return s390x.AMOVH 62 } else { 63 return s390x.AMOVHZ 64 } 65 case 4: 66 if t.IsSigned() { 67 return s390x.AMOVW 68 } else { 69 return s390x.AMOVWZ 70 } 71 case 8: 72 return s390x.AMOVD 73 } 74 } 75 panic("bad load type") 76 } 77 78 // storeByType returns the store instruction of the given type. 79 func storeByType(t *types.Type) obj.As { 80 width := t.Size() 81 if t.IsFloat() { 82 switch width { 83 case 4: 84 return s390x.AFMOVS 85 case 8: 86 return s390x.AFMOVD 87 } 88 } else { 89 switch width { 90 case 1: 91 return s390x.AMOVB 92 case 2: 93 return s390x.AMOVH 94 case 4: 95 return s390x.AMOVW 96 case 8: 97 return s390x.AMOVD 98 } 99 } 100 panic("bad store type") 101 } 102 103 // moveByType returns the reg->reg move instruction of the given type. 104 func moveByType(t *types.Type) obj.As { 105 if t.IsFloat() { 106 return s390x.AFMOVD 107 } else { 108 switch t.Size() { 109 case 1: 110 if t.IsSigned() { 111 return s390x.AMOVB 112 } else { 113 return s390x.AMOVBZ 114 } 115 case 2: 116 if t.IsSigned() { 117 return s390x.AMOVH 118 } else { 119 return s390x.AMOVHZ 120 } 121 case 4: 122 if t.IsSigned() { 123 return s390x.AMOVW 124 } else { 125 return s390x.AMOVWZ 126 } 127 case 8: 128 return s390x.AMOVD 129 } 130 } 131 panic("bad load type") 132 } 133 134 // opregreg emits instructions for 135 // 136 // dest := dest(To) op src(From) 137 // 138 // and also returns the created obj.Prog so it 139 // may be further adjusted (offset, scale, etc). 140 func opregreg(s *ssagen.State, op obj.As, dest, src int16) *obj.Prog { 141 p := s.Prog(op) 142 p.From.Type = obj.TYPE_REG 143 p.To.Type = obj.TYPE_REG 144 p.To.Reg = dest 145 p.From.Reg = src 146 return p 147 } 148 149 // opregregimm emits instructions for 150 // 151 // dest := src(From) op off 152 // 153 // and also returns the created obj.Prog so it 154 // may be further adjusted (offset, scale, etc). 155 func opregregimm(s *ssagen.State, op obj.As, dest, src int16, off int64) *obj.Prog { 156 p := s.Prog(op) 157 p.From.Type = obj.TYPE_CONST 158 p.From.Offset = off 159 p.Reg = src 160 p.To.Reg = dest 161 p.To.Type = obj.TYPE_REG 162 return p 163 } 164 165 func ssaGenValue(s *ssagen.State, v *ssa.Value) { 166 switch v.Op { 167 case ssa.OpS390XSLD, ssa.OpS390XSLW, 168 ssa.OpS390XSRD, ssa.OpS390XSRW, 169 ssa.OpS390XSRAD, ssa.OpS390XSRAW, 170 ssa.OpS390XRLLG, ssa.OpS390XRLL: 171 r := v.Reg() 172 r1 := v.Args[0].Reg() 173 r2 := v.Args[1].Reg() 174 if r2 == s390x.REG_R0 { 175 v.Fatalf("cannot use R0 as shift value %s", v.LongString()) 176 } 177 p := opregreg(s, v.Op.Asm(), r, r2) 178 if r != r1 { 179 p.Reg = r1 180 } 181 case ssa.OpS390XRXSBG: 182 r2 := v.Args[1].Reg() 183 i := v.Aux.(s390x.RotateParams) 184 p := s.Prog(v.Op.Asm()) 185 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: int64(i.Start)} 186 p.SetRestArgs([]obj.Addr{ 187 {Type: obj.TYPE_CONST, Offset: int64(i.End)}, 188 {Type: obj.TYPE_CONST, Offset: int64(i.Amount)}, 189 {Type: obj.TYPE_REG, Reg: r2}, 190 }) 191 p.To = obj.Addr{Type: obj.TYPE_REG, Reg: v.Reg()} 192 case ssa.OpS390XRISBGZ: 193 r1 := v.Reg() 194 r2 := v.Args[0].Reg() 195 i := v.Aux.(s390x.RotateParams) 196 p := s.Prog(v.Op.Asm()) 197 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: int64(i.Start)} 198 p.SetRestArgs([]obj.Addr{ 199 {Type: obj.TYPE_CONST, Offset: int64(i.End)}, 200 {Type: obj.TYPE_CONST, Offset: int64(i.Amount)}, 201 {Type: obj.TYPE_REG, Reg: r2}, 202 }) 203 p.To = obj.Addr{Type: obj.TYPE_REG, Reg: r1} 204 case ssa.OpS390XADD, ssa.OpS390XADDW, 205 ssa.OpS390XSUB, ssa.OpS390XSUBW, 206 ssa.OpS390XAND, ssa.OpS390XANDW, 207 ssa.OpS390XOR, ssa.OpS390XORW, 208 ssa.OpS390XXOR, ssa.OpS390XXORW: 209 r := v.Reg() 210 r1 := v.Args[0].Reg() 211 r2 := v.Args[1].Reg() 212 p := opregreg(s, v.Op.Asm(), r, r2) 213 if r != r1 { 214 p.Reg = r1 215 } 216 case ssa.OpS390XADDC: 217 r1 := v.Reg0() 218 r2 := v.Args[0].Reg() 219 r3 := v.Args[1].Reg() 220 if r1 == r2 { 221 r2, r3 = r3, r2 222 } 223 p := opregreg(s, v.Op.Asm(), r1, r2) 224 if r3 != r1 { 225 p.Reg = r3 226 } 227 case ssa.OpS390XSUBC: 228 r1 := v.Reg0() 229 r2 := v.Args[0].Reg() 230 r3 := v.Args[1].Reg() 231 p := opregreg(s, v.Op.Asm(), r1, r3) 232 if r1 != r2 { 233 p.Reg = r2 234 } 235 case ssa.OpS390XADDE, ssa.OpS390XSUBE: 236 r2 := v.Args[1].Reg() 237 opregreg(s, v.Op.Asm(), v.Reg0(), r2) 238 case ssa.OpS390XADDCconst: 239 r1 := v.Reg0() 240 r3 := v.Args[0].Reg() 241 i2 := int64(int16(v.AuxInt)) 242 opregregimm(s, v.Op.Asm(), r1, r3, i2) 243 // 2-address opcode arithmetic 244 case ssa.OpS390XMULLD, ssa.OpS390XMULLW, 245 ssa.OpS390XMULHD, ssa.OpS390XMULHDU, 246 ssa.OpS390XFMULS, ssa.OpS390XFMUL, ssa.OpS390XFDIVS, ssa.OpS390XFDIV: 247 opregreg(s, v.Op.Asm(), v.Reg(), v.Args[1].Reg()) 248 case ssa.OpS390XFSUBS, ssa.OpS390XFSUB, 249 ssa.OpS390XFADDS, ssa.OpS390XFADD: 250 opregreg(s, v.Op.Asm(), v.Reg0(), v.Args[1].Reg()) 251 case ssa.OpS390XMLGR: 252 // MLGR Rx R3 -> R2:R3 253 r0 := v.Args[0].Reg() 254 r1 := v.Args[1].Reg() 255 if r1 != s390x.REG_R3 { 256 v.Fatalf("We require the multiplcand to be stored in R3 for MLGR %s", v.LongString()) 257 } 258 p := s.Prog(s390x.AMLGR) 259 p.From.Type = obj.TYPE_REG 260 p.From.Reg = r0 261 p.To.Reg = s390x.REG_R2 262 p.To.Type = obj.TYPE_REG 263 case ssa.OpS390XFMADD, ssa.OpS390XFMADDS, 264 ssa.OpS390XFMSUB, ssa.OpS390XFMSUBS: 265 r1 := v.Args[1].Reg() 266 r2 := v.Args[2].Reg() 267 p := s.Prog(v.Op.Asm()) 268 p.From.Type = obj.TYPE_REG 269 p.From.Reg = r1 270 p.Reg = r2 271 p.To.Type = obj.TYPE_REG 272 p.To.Reg = v.Reg() 273 case ssa.OpS390XFIDBR: 274 switch v.AuxInt { 275 case 0, 1, 3, 4, 5, 6, 7: 276 opregregimm(s, v.Op.Asm(), v.Reg(), v.Args[0].Reg(), v.AuxInt) 277 default: 278 v.Fatalf("invalid FIDBR mask: %v", v.AuxInt) 279 } 280 case ssa.OpS390XCPSDR: 281 p := opregreg(s, v.Op.Asm(), v.Reg(), v.Args[1].Reg()) 282 p.Reg = v.Args[0].Reg() 283 case ssa.OpS390XDIVD, ssa.OpS390XDIVW, 284 ssa.OpS390XDIVDU, ssa.OpS390XDIVWU, 285 ssa.OpS390XMODD, ssa.OpS390XMODW, 286 ssa.OpS390XMODDU, ssa.OpS390XMODWU: 287 288 // TODO(mundaym): use the temp registers every time like x86 does with AX? 289 dividend := v.Args[0].Reg() 290 divisor := v.Args[1].Reg() 291 292 // CPU faults upon signed overflow, which occurs when most 293 // negative int is divided by -1. 294 var j *obj.Prog 295 if v.Op == ssa.OpS390XDIVD || v.Op == ssa.OpS390XDIVW || 296 v.Op == ssa.OpS390XMODD || v.Op == ssa.OpS390XMODW { 297 298 var c *obj.Prog 299 c = s.Prog(s390x.ACMP) 300 j = s.Prog(s390x.ABEQ) 301 302 c.From.Type = obj.TYPE_REG 303 c.From.Reg = divisor 304 c.To.Type = obj.TYPE_CONST 305 c.To.Offset = -1 306 307 j.To.Type = obj.TYPE_BRANCH 308 309 } 310 311 p := s.Prog(v.Op.Asm()) 312 p.From.Type = obj.TYPE_REG 313 p.From.Reg = divisor 314 p.Reg = 0 315 p.To.Type = obj.TYPE_REG 316 p.To.Reg = dividend 317 318 // signed division, rest of the check for -1 case 319 if j != nil { 320 j2 := s.Prog(s390x.ABR) 321 j2.To.Type = obj.TYPE_BRANCH 322 323 var n *obj.Prog 324 if v.Op == ssa.OpS390XDIVD || v.Op == ssa.OpS390XDIVW { 325 // n * -1 = -n 326 n = s.Prog(s390x.ANEG) 327 n.To.Type = obj.TYPE_REG 328 n.To.Reg = dividend 329 } else { 330 // n % -1 == 0 331 n = s.Prog(s390x.AXOR) 332 n.From.Type = obj.TYPE_REG 333 n.From.Reg = dividend 334 n.To.Type = obj.TYPE_REG 335 n.To.Reg = dividend 336 } 337 338 j.To.SetTarget(n) 339 j2.To.SetTarget(s.Pc()) 340 } 341 case ssa.OpS390XADDconst, ssa.OpS390XADDWconst: 342 opregregimm(s, v.Op.Asm(), v.Reg(), v.Args[0].Reg(), v.AuxInt) 343 case ssa.OpS390XMULLDconst, ssa.OpS390XMULLWconst, 344 ssa.OpS390XSUBconst, ssa.OpS390XSUBWconst, 345 ssa.OpS390XANDconst, ssa.OpS390XANDWconst, 346 ssa.OpS390XORconst, ssa.OpS390XORWconst, 347 ssa.OpS390XXORconst, ssa.OpS390XXORWconst: 348 p := s.Prog(v.Op.Asm()) 349 p.From.Type = obj.TYPE_CONST 350 p.From.Offset = v.AuxInt 351 p.To.Type = obj.TYPE_REG 352 p.To.Reg = v.Reg() 353 case ssa.OpS390XSLDconst, ssa.OpS390XSLWconst, 354 ssa.OpS390XSRDconst, ssa.OpS390XSRWconst, 355 ssa.OpS390XSRADconst, ssa.OpS390XSRAWconst, 356 ssa.OpS390XRLLconst: 357 p := s.Prog(v.Op.Asm()) 358 p.From.Type = obj.TYPE_CONST 359 p.From.Offset = v.AuxInt 360 r := v.Reg() 361 r1 := v.Args[0].Reg() 362 if r != r1 { 363 p.Reg = r1 364 } 365 p.To.Type = obj.TYPE_REG 366 p.To.Reg = r 367 case ssa.OpS390XMOVDaddridx: 368 r := v.Args[0].Reg() 369 i := v.Args[1].Reg() 370 p := s.Prog(s390x.AMOVD) 371 p.From.Scale = 1 372 if i == s390x.REGSP { 373 r, i = i, r 374 } 375 p.From.Type = obj.TYPE_ADDR 376 p.From.Reg = r 377 p.From.Index = i 378 ssagen.AddAux(&p.From, v) 379 p.To.Type = obj.TYPE_REG 380 p.To.Reg = v.Reg() 381 case ssa.OpS390XMOVDaddr: 382 p := s.Prog(s390x.AMOVD) 383 p.From.Type = obj.TYPE_ADDR 384 p.From.Reg = v.Args[0].Reg() 385 ssagen.AddAux(&p.From, v) 386 p.To.Type = obj.TYPE_REG 387 p.To.Reg = v.Reg() 388 case ssa.OpS390XCMP, ssa.OpS390XCMPW, ssa.OpS390XCMPU, ssa.OpS390XCMPWU: 389 opregreg(s, v.Op.Asm(), v.Args[1].Reg(), v.Args[0].Reg()) 390 case ssa.OpS390XFCMPS, ssa.OpS390XFCMP: 391 opregreg(s, v.Op.Asm(), v.Args[1].Reg(), v.Args[0].Reg()) 392 case ssa.OpS390XCMPconst, ssa.OpS390XCMPWconst: 393 p := s.Prog(v.Op.Asm()) 394 p.From.Type = obj.TYPE_REG 395 p.From.Reg = v.Args[0].Reg() 396 p.To.Type = obj.TYPE_CONST 397 p.To.Offset = v.AuxInt 398 case ssa.OpS390XCMPUconst, ssa.OpS390XCMPWUconst: 399 p := s.Prog(v.Op.Asm()) 400 p.From.Type = obj.TYPE_REG 401 p.From.Reg = v.Args[0].Reg() 402 p.To.Type = obj.TYPE_CONST 403 p.To.Offset = int64(uint32(v.AuxInt)) 404 case ssa.OpS390XMOVDconst: 405 x := v.Reg() 406 p := s.Prog(v.Op.Asm()) 407 p.From.Type = obj.TYPE_CONST 408 p.From.Offset = v.AuxInt 409 p.To.Type = obj.TYPE_REG 410 p.To.Reg = x 411 case ssa.OpS390XFMOVSconst, ssa.OpS390XFMOVDconst: 412 x := v.Reg() 413 p := s.Prog(v.Op.Asm()) 414 p.From.Type = obj.TYPE_FCONST 415 p.From.Val = math.Float64frombits(uint64(v.AuxInt)) 416 p.To.Type = obj.TYPE_REG 417 p.To.Reg = x 418 case ssa.OpS390XADDWload, ssa.OpS390XADDload, 419 ssa.OpS390XMULLWload, ssa.OpS390XMULLDload, 420 ssa.OpS390XSUBWload, ssa.OpS390XSUBload, 421 ssa.OpS390XANDWload, ssa.OpS390XANDload, 422 ssa.OpS390XORWload, ssa.OpS390XORload, 423 ssa.OpS390XXORWload, ssa.OpS390XXORload: 424 p := s.Prog(v.Op.Asm()) 425 p.From.Type = obj.TYPE_MEM 426 p.From.Reg = v.Args[1].Reg() 427 ssagen.AddAux(&p.From, v) 428 p.To.Type = obj.TYPE_REG 429 p.To.Reg = v.Reg() 430 case ssa.OpS390XMOVDload, 431 ssa.OpS390XMOVWZload, ssa.OpS390XMOVHZload, ssa.OpS390XMOVBZload, 432 ssa.OpS390XMOVDBRload, ssa.OpS390XMOVWBRload, ssa.OpS390XMOVHBRload, 433 ssa.OpS390XMOVBload, ssa.OpS390XMOVHload, ssa.OpS390XMOVWload, 434 ssa.OpS390XFMOVSload, ssa.OpS390XFMOVDload: 435 p := s.Prog(v.Op.Asm()) 436 p.From.Type = obj.TYPE_MEM 437 p.From.Reg = v.Args[0].Reg() 438 ssagen.AddAux(&p.From, v) 439 p.To.Type = obj.TYPE_REG 440 p.To.Reg = v.Reg() 441 case ssa.OpS390XMOVBZloadidx, ssa.OpS390XMOVHZloadidx, ssa.OpS390XMOVWZloadidx, 442 ssa.OpS390XMOVBloadidx, ssa.OpS390XMOVHloadidx, ssa.OpS390XMOVWloadidx, ssa.OpS390XMOVDloadidx, 443 ssa.OpS390XMOVHBRloadidx, ssa.OpS390XMOVWBRloadidx, ssa.OpS390XMOVDBRloadidx, 444 ssa.OpS390XFMOVSloadidx, ssa.OpS390XFMOVDloadidx: 445 r := v.Args[0].Reg() 446 i := v.Args[1].Reg() 447 if i == s390x.REGSP { 448 r, i = i, r 449 } 450 p := s.Prog(v.Op.Asm()) 451 p.From.Type = obj.TYPE_MEM 452 p.From.Reg = r 453 p.From.Scale = 1 454 p.From.Index = i 455 ssagen.AddAux(&p.From, v) 456 p.To.Type = obj.TYPE_REG 457 p.To.Reg = v.Reg() 458 case ssa.OpS390XMOVBstore, ssa.OpS390XMOVHstore, ssa.OpS390XMOVWstore, ssa.OpS390XMOVDstore, 459 ssa.OpS390XMOVHBRstore, ssa.OpS390XMOVWBRstore, ssa.OpS390XMOVDBRstore, 460 ssa.OpS390XFMOVSstore, ssa.OpS390XFMOVDstore: 461 p := s.Prog(v.Op.Asm()) 462 p.From.Type = obj.TYPE_REG 463 p.From.Reg = v.Args[1].Reg() 464 p.To.Type = obj.TYPE_MEM 465 p.To.Reg = v.Args[0].Reg() 466 ssagen.AddAux(&p.To, v) 467 case ssa.OpS390XMOVBstoreidx, ssa.OpS390XMOVHstoreidx, ssa.OpS390XMOVWstoreidx, ssa.OpS390XMOVDstoreidx, 468 ssa.OpS390XMOVHBRstoreidx, ssa.OpS390XMOVWBRstoreidx, ssa.OpS390XMOVDBRstoreidx, 469 ssa.OpS390XFMOVSstoreidx, ssa.OpS390XFMOVDstoreidx: 470 r := v.Args[0].Reg() 471 i := v.Args[1].Reg() 472 if i == s390x.REGSP { 473 r, i = i, r 474 } 475 p := s.Prog(v.Op.Asm()) 476 p.From.Type = obj.TYPE_REG 477 p.From.Reg = v.Args[2].Reg() 478 p.To.Type = obj.TYPE_MEM 479 p.To.Reg = r 480 p.To.Scale = 1 481 p.To.Index = i 482 ssagen.AddAux(&p.To, v) 483 case ssa.OpS390XMOVDstoreconst, ssa.OpS390XMOVWstoreconst, ssa.OpS390XMOVHstoreconst, ssa.OpS390XMOVBstoreconst: 484 p := s.Prog(v.Op.Asm()) 485 p.From.Type = obj.TYPE_CONST 486 sc := v.AuxValAndOff() 487 p.From.Offset = sc.Val64() 488 p.To.Type = obj.TYPE_MEM 489 p.To.Reg = v.Args[0].Reg() 490 ssagen.AddAux2(&p.To, v, sc.Off64()) 491 case ssa.OpS390XMOVBreg, ssa.OpS390XMOVHreg, ssa.OpS390XMOVWreg, 492 ssa.OpS390XMOVBZreg, ssa.OpS390XMOVHZreg, ssa.OpS390XMOVWZreg, 493 ssa.OpS390XLDGR, ssa.OpS390XLGDR, 494 ssa.OpS390XCEFBRA, ssa.OpS390XCDFBRA, ssa.OpS390XCEGBRA, ssa.OpS390XCDGBRA, 495 ssa.OpS390XCFEBRA, ssa.OpS390XCFDBRA, ssa.OpS390XCGEBRA, ssa.OpS390XCGDBRA, 496 ssa.OpS390XCELFBR, ssa.OpS390XCDLFBR, ssa.OpS390XCELGBR, ssa.OpS390XCDLGBR, 497 ssa.OpS390XCLFEBR, ssa.OpS390XCLFDBR, ssa.OpS390XCLGEBR, ssa.OpS390XCLGDBR, 498 ssa.OpS390XLDEBR, ssa.OpS390XLEDBR, 499 ssa.OpS390XFNEG, ssa.OpS390XFNEGS, 500 ssa.OpS390XLPDFR, ssa.OpS390XLNDFR: 501 opregreg(s, v.Op.Asm(), v.Reg(), v.Args[0].Reg()) 502 case ssa.OpS390XCLEAR: 503 p := s.Prog(v.Op.Asm()) 504 p.From.Type = obj.TYPE_CONST 505 sc := v.AuxValAndOff() 506 p.From.Offset = sc.Val64() 507 p.To.Type = obj.TYPE_MEM 508 p.To.Reg = v.Args[0].Reg() 509 ssagen.AddAux2(&p.To, v, sc.Off64()) 510 case ssa.OpCopy: 511 if v.Type.IsMemory() { 512 return 513 } 514 x := v.Args[0].Reg() 515 y := v.Reg() 516 if x != y { 517 opregreg(s, moveByType(v.Type), y, x) 518 } 519 case ssa.OpLoadReg: 520 if v.Type.IsFlags() { 521 v.Fatalf("load flags not implemented: %v", v.LongString()) 522 return 523 } 524 p := s.Prog(loadByType(v.Type)) 525 ssagen.AddrAuto(&p.From, v.Args[0]) 526 p.To.Type = obj.TYPE_REG 527 p.To.Reg = v.Reg() 528 case ssa.OpStoreReg: 529 if v.Type.IsFlags() { 530 v.Fatalf("store flags not implemented: %v", v.LongString()) 531 return 532 } 533 p := s.Prog(storeByType(v.Type)) 534 p.From.Type = obj.TYPE_REG 535 p.From.Reg = v.Args[0].Reg() 536 ssagen.AddrAuto(&p.To, v) 537 case ssa.OpS390XLoweredGetClosurePtr: 538 // Closure pointer is R12 (already) 539 ssagen.CheckLoweredGetClosurePtr(v) 540 case ssa.OpS390XLoweredRound32F, ssa.OpS390XLoweredRound64F: 541 // input is already rounded 542 case ssa.OpS390XLoweredGetG: 543 r := v.Reg() 544 p := s.Prog(s390x.AMOVD) 545 p.From.Type = obj.TYPE_REG 546 p.From.Reg = s390x.REGG 547 p.To.Type = obj.TYPE_REG 548 p.To.Reg = r 549 case ssa.OpS390XLoweredGetCallerSP: 550 // caller's SP is FixedFrameSize below the address of the first arg 551 p := s.Prog(s390x.AMOVD) 552 p.From.Type = obj.TYPE_ADDR 553 p.From.Offset = -base.Ctxt.Arch.FixedFrameSize 554 p.From.Name = obj.NAME_PARAM 555 p.To.Type = obj.TYPE_REG 556 p.To.Reg = v.Reg() 557 case ssa.OpS390XLoweredGetCallerPC: 558 p := s.Prog(obj.AGETCALLERPC) 559 p.To.Type = obj.TYPE_REG 560 p.To.Reg = v.Reg() 561 case ssa.OpS390XCALLstatic, ssa.OpS390XCALLclosure, ssa.OpS390XCALLinter: 562 s.Call(v) 563 case ssa.OpS390XCALLtail: 564 s.TailCall(v) 565 case ssa.OpS390XLoweredWB: 566 p := s.Prog(obj.ACALL) 567 p.To.Type = obj.TYPE_MEM 568 p.To.Name = obj.NAME_EXTERN 569 p.To.Sym = v.Aux.(*obj.LSym) 570 case ssa.OpS390XLoweredPanicBoundsA, ssa.OpS390XLoweredPanicBoundsB, ssa.OpS390XLoweredPanicBoundsC: 571 p := s.Prog(obj.ACALL) 572 p.To.Type = obj.TYPE_MEM 573 p.To.Name = obj.NAME_EXTERN 574 p.To.Sym = ssagen.BoundsCheckFunc[v.AuxInt] 575 s.UseArgs(16) // space used in callee args area by assembly stubs 576 case ssa.OpS390XFLOGR, ssa.OpS390XPOPCNT, 577 ssa.OpS390XNEG, ssa.OpS390XNEGW, 578 ssa.OpS390XMOVWBR, ssa.OpS390XMOVDBR: 579 p := s.Prog(v.Op.Asm()) 580 p.From.Type = obj.TYPE_REG 581 p.From.Reg = v.Args[0].Reg() 582 p.To.Type = obj.TYPE_REG 583 p.To.Reg = v.Reg() 584 case ssa.OpS390XNOT, ssa.OpS390XNOTW: 585 v.Fatalf("NOT/NOTW generated %s", v.LongString()) 586 case ssa.OpS390XSumBytes2, ssa.OpS390XSumBytes4, ssa.OpS390XSumBytes8: 587 v.Fatalf("SumBytes generated %s", v.LongString()) 588 case ssa.OpS390XLOCGR: 589 p := s.Prog(v.Op.Asm()) 590 p.From.Type = obj.TYPE_CONST 591 p.From.Offset = int64(v.Aux.(s390x.CCMask)) 592 p.Reg = v.Args[1].Reg() 593 p.To.Type = obj.TYPE_REG 594 p.To.Reg = v.Reg() 595 case ssa.OpS390XFSQRTS, ssa.OpS390XFSQRT: 596 p := s.Prog(v.Op.Asm()) 597 p.From.Type = obj.TYPE_REG 598 p.From.Reg = v.Args[0].Reg() 599 p.To.Type = obj.TYPE_REG 600 p.To.Reg = v.Reg() 601 case ssa.OpS390XLTDBR, ssa.OpS390XLTEBR: 602 opregreg(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[0].Reg()) 603 case ssa.OpS390XInvertFlags: 604 v.Fatalf("InvertFlags should never make it to codegen %v", v.LongString()) 605 case ssa.OpS390XFlagEQ, ssa.OpS390XFlagLT, ssa.OpS390XFlagGT, ssa.OpS390XFlagOV: 606 v.Fatalf("Flag* ops should never make it to codegen %v", v.LongString()) 607 case ssa.OpS390XAddTupleFirst32, ssa.OpS390XAddTupleFirst64: 608 v.Fatalf("AddTupleFirst* should never make it to codegen %v", v.LongString()) 609 case ssa.OpS390XLoweredNilCheck: 610 // Issue a load which will fault if the input is nil. 611 p := s.Prog(s390x.AMOVBZ) 612 p.From.Type = obj.TYPE_MEM 613 p.From.Reg = v.Args[0].Reg() 614 ssagen.AddAux(&p.From, v) 615 p.To.Type = obj.TYPE_REG 616 p.To.Reg = s390x.REGTMP 617 if logopt.Enabled() { 618 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name) 619 } 620 if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers 621 base.WarnfAt(v.Pos, "generated nil check") 622 } 623 case ssa.OpS390XMVC: 624 vo := v.AuxValAndOff() 625 p := s.Prog(s390x.AMVC) 626 p.From.Type = obj.TYPE_CONST 627 p.From.Offset = vo.Val64() 628 p.SetFrom3(obj.Addr{ 629 Type: obj.TYPE_MEM, 630 Reg: v.Args[1].Reg(), 631 Offset: vo.Off64(), 632 }) 633 p.To.Type = obj.TYPE_MEM 634 p.To.Reg = v.Args[0].Reg() 635 p.To.Offset = vo.Off64() 636 case ssa.OpS390XSTMG2, ssa.OpS390XSTMG3, ssa.OpS390XSTMG4, 637 ssa.OpS390XSTM2, ssa.OpS390XSTM3, ssa.OpS390XSTM4: 638 for i := 2; i < len(v.Args)-1; i++ { 639 if v.Args[i].Reg() != v.Args[i-1].Reg()+1 { 640 v.Fatalf("invalid store multiple %s", v.LongString()) 641 } 642 } 643 p := s.Prog(v.Op.Asm()) 644 p.From.Type = obj.TYPE_REG 645 p.From.Reg = v.Args[1].Reg() 646 p.Reg = v.Args[len(v.Args)-2].Reg() 647 p.To.Type = obj.TYPE_MEM 648 p.To.Reg = v.Args[0].Reg() 649 ssagen.AddAux(&p.To, v) 650 case ssa.OpS390XLoweredMove: 651 // Inputs must be valid pointers to memory, 652 // so adjust arg0 and arg1 as part of the expansion. 653 // arg2 should be src+size, 654 // 655 // mvc: MVC $256, 0(R2), 0(R1) 656 // MOVD $256(R1), R1 657 // MOVD $256(R2), R2 658 // CMP R2, Rarg2 659 // BNE mvc 660 // MVC $rem, 0(R2), 0(R1) // if rem > 0 661 // arg2 is the last address to move in the loop + 256 662 mvc := s.Prog(s390x.AMVC) 663 mvc.From.Type = obj.TYPE_CONST 664 mvc.From.Offset = 256 665 mvc.SetFrom3(obj.Addr{Type: obj.TYPE_MEM, Reg: v.Args[1].Reg()}) 666 mvc.To.Type = obj.TYPE_MEM 667 mvc.To.Reg = v.Args[0].Reg() 668 669 for i := 0; i < 2; i++ { 670 movd := s.Prog(s390x.AMOVD) 671 movd.From.Type = obj.TYPE_ADDR 672 movd.From.Reg = v.Args[i].Reg() 673 movd.From.Offset = 256 674 movd.To.Type = obj.TYPE_REG 675 movd.To.Reg = v.Args[i].Reg() 676 } 677 678 cmpu := s.Prog(s390x.ACMPU) 679 cmpu.From.Reg = v.Args[1].Reg() 680 cmpu.From.Type = obj.TYPE_REG 681 cmpu.To.Reg = v.Args[2].Reg() 682 cmpu.To.Type = obj.TYPE_REG 683 684 bne := s.Prog(s390x.ABLT) 685 bne.To.Type = obj.TYPE_BRANCH 686 bne.To.SetTarget(mvc) 687 688 if v.AuxInt > 0 { 689 mvc := s.Prog(s390x.AMVC) 690 mvc.From.Type = obj.TYPE_CONST 691 mvc.From.Offset = v.AuxInt 692 mvc.SetFrom3(obj.Addr{Type: obj.TYPE_MEM, Reg: v.Args[1].Reg()}) 693 mvc.To.Type = obj.TYPE_MEM 694 mvc.To.Reg = v.Args[0].Reg() 695 } 696 case ssa.OpS390XLoweredZero: 697 // Input must be valid pointers to memory, 698 // so adjust arg0 as part of the expansion. 699 // arg1 should be src+size, 700 // 701 // clear: CLEAR $256, 0(R1) 702 // MOVD $256(R1), R1 703 // CMP R1, Rarg1 704 // BNE clear 705 // CLEAR $rem, 0(R1) // if rem > 0 706 // arg1 is the last address to zero in the loop + 256 707 clear := s.Prog(s390x.ACLEAR) 708 clear.From.Type = obj.TYPE_CONST 709 clear.From.Offset = 256 710 clear.To.Type = obj.TYPE_MEM 711 clear.To.Reg = v.Args[0].Reg() 712 713 movd := s.Prog(s390x.AMOVD) 714 movd.From.Type = obj.TYPE_ADDR 715 movd.From.Reg = v.Args[0].Reg() 716 movd.From.Offset = 256 717 movd.To.Type = obj.TYPE_REG 718 movd.To.Reg = v.Args[0].Reg() 719 720 cmpu := s.Prog(s390x.ACMPU) 721 cmpu.From.Reg = v.Args[0].Reg() 722 cmpu.From.Type = obj.TYPE_REG 723 cmpu.To.Reg = v.Args[1].Reg() 724 cmpu.To.Type = obj.TYPE_REG 725 726 bne := s.Prog(s390x.ABLT) 727 bne.To.Type = obj.TYPE_BRANCH 728 bne.To.SetTarget(clear) 729 730 if v.AuxInt > 0 { 731 clear := s.Prog(s390x.ACLEAR) 732 clear.From.Type = obj.TYPE_CONST 733 clear.From.Offset = v.AuxInt 734 clear.To.Type = obj.TYPE_MEM 735 clear.To.Reg = v.Args[0].Reg() 736 } 737 case ssa.OpS390XMOVBZatomicload, ssa.OpS390XMOVWZatomicload, ssa.OpS390XMOVDatomicload: 738 p := s.Prog(v.Op.Asm()) 739 p.From.Type = obj.TYPE_MEM 740 p.From.Reg = v.Args[0].Reg() 741 ssagen.AddAux(&p.From, v) 742 p.To.Type = obj.TYPE_REG 743 p.To.Reg = v.Reg0() 744 case ssa.OpS390XMOVBatomicstore, ssa.OpS390XMOVWatomicstore, ssa.OpS390XMOVDatomicstore: 745 p := s.Prog(v.Op.Asm()) 746 p.From.Type = obj.TYPE_REG 747 p.From.Reg = v.Args[1].Reg() 748 p.To.Type = obj.TYPE_MEM 749 p.To.Reg = v.Args[0].Reg() 750 ssagen.AddAux(&p.To, v) 751 case ssa.OpS390XLAN, ssa.OpS390XLAO: 752 // LA(N|O) Ry, TMP, 0(Rx) 753 op := s.Prog(v.Op.Asm()) 754 op.From.Type = obj.TYPE_REG 755 op.From.Reg = v.Args[1].Reg() 756 op.Reg = s390x.REGTMP 757 op.To.Type = obj.TYPE_MEM 758 op.To.Reg = v.Args[0].Reg() 759 case ssa.OpS390XLANfloor, ssa.OpS390XLAOfloor: 760 r := v.Args[0].Reg() // clobbered, assumed R1 in comments 761 762 // Round ptr down to nearest multiple of 4. 763 // ANDW $~3, R1 764 ptr := s.Prog(s390x.AANDW) 765 ptr.From.Type = obj.TYPE_CONST 766 ptr.From.Offset = 0xfffffffc 767 ptr.To.Type = obj.TYPE_REG 768 ptr.To.Reg = r 769 770 // Redirect output of LA(N|O) into R1 since it is clobbered anyway. 771 // LA(N|O) Rx, R1, 0(R1) 772 op := s.Prog(v.Op.Asm()) 773 op.From.Type = obj.TYPE_REG 774 op.From.Reg = v.Args[1].Reg() 775 op.Reg = r 776 op.To.Type = obj.TYPE_MEM 777 op.To.Reg = r 778 case ssa.OpS390XLAA, ssa.OpS390XLAAG: 779 p := s.Prog(v.Op.Asm()) 780 p.Reg = v.Reg0() 781 p.From.Type = obj.TYPE_REG 782 p.From.Reg = v.Args[1].Reg() 783 p.To.Type = obj.TYPE_MEM 784 p.To.Reg = v.Args[0].Reg() 785 ssagen.AddAux(&p.To, v) 786 case ssa.OpS390XLoweredAtomicCas32, ssa.OpS390XLoweredAtomicCas64: 787 // Convert the flags output of CS{,G} into a bool. 788 // CS{,G} arg1, arg2, arg0 789 // MOVD $0, ret 790 // BNE 2(PC) 791 // MOVD $1, ret 792 // NOP (so the BNE has somewhere to land) 793 794 // CS{,G} arg1, arg2, arg0 795 cs := s.Prog(v.Op.Asm()) 796 cs.From.Type = obj.TYPE_REG 797 cs.From.Reg = v.Args[1].Reg() // old 798 cs.Reg = v.Args[2].Reg() // new 799 cs.To.Type = obj.TYPE_MEM 800 cs.To.Reg = v.Args[0].Reg() 801 ssagen.AddAux(&cs.To, v) 802 803 // MOVD $0, ret 804 movd := s.Prog(s390x.AMOVD) 805 movd.From.Type = obj.TYPE_CONST 806 movd.From.Offset = 0 807 movd.To.Type = obj.TYPE_REG 808 movd.To.Reg = v.Reg0() 809 810 // BNE 2(PC) 811 bne := s.Prog(s390x.ABNE) 812 bne.To.Type = obj.TYPE_BRANCH 813 814 // MOVD $1, ret 815 movd = s.Prog(s390x.AMOVD) 816 movd.From.Type = obj.TYPE_CONST 817 movd.From.Offset = 1 818 movd.To.Type = obj.TYPE_REG 819 movd.To.Reg = v.Reg0() 820 821 // NOP (so the BNE has somewhere to land) 822 nop := s.Prog(obj.ANOP) 823 bne.To.SetTarget(nop) 824 case ssa.OpS390XLoweredAtomicExchange32, ssa.OpS390XLoweredAtomicExchange64: 825 // Loop until the CS{,G} succeeds. 826 // MOV{WZ,D} arg0, ret 827 // cs: CS{,G} ret, arg1, arg0 828 // BNE cs 829 830 // MOV{WZ,D} arg0, ret 831 load := s.Prog(loadByType(v.Type.FieldType(0))) 832 load.From.Type = obj.TYPE_MEM 833 load.From.Reg = v.Args[0].Reg() 834 load.To.Type = obj.TYPE_REG 835 load.To.Reg = v.Reg0() 836 ssagen.AddAux(&load.From, v) 837 838 // CS{,G} ret, arg1, arg0 839 cs := s.Prog(v.Op.Asm()) 840 cs.From.Type = obj.TYPE_REG 841 cs.From.Reg = v.Reg0() // old 842 cs.Reg = v.Args[1].Reg() // new 843 cs.To.Type = obj.TYPE_MEM 844 cs.To.Reg = v.Args[0].Reg() 845 ssagen.AddAux(&cs.To, v) 846 847 // BNE cs 848 bne := s.Prog(s390x.ABNE) 849 bne.To.Type = obj.TYPE_BRANCH 850 bne.To.SetTarget(cs) 851 case ssa.OpS390XSYNC: 852 s.Prog(s390x.ASYNC) 853 case ssa.OpClobber, ssa.OpClobberReg: 854 // TODO: implement for clobberdead experiment. Nop is ok for now. 855 default: 856 v.Fatalf("genValue not implemented: %s", v.LongString()) 857 } 858 } 859 860 func blockAsm(b *ssa.Block) obj.As { 861 switch b.Kind { 862 case ssa.BlockS390XBRC: 863 return s390x.ABRC 864 case ssa.BlockS390XCRJ: 865 return s390x.ACRJ 866 case ssa.BlockS390XCGRJ: 867 return s390x.ACGRJ 868 case ssa.BlockS390XCLRJ: 869 return s390x.ACLRJ 870 case ssa.BlockS390XCLGRJ: 871 return s390x.ACLGRJ 872 case ssa.BlockS390XCIJ: 873 return s390x.ACIJ 874 case ssa.BlockS390XCGIJ: 875 return s390x.ACGIJ 876 case ssa.BlockS390XCLIJ: 877 return s390x.ACLIJ 878 case ssa.BlockS390XCLGIJ: 879 return s390x.ACLGIJ 880 } 881 b.Fatalf("blockAsm not implemented: %s", b.LongString()) 882 panic("unreachable") 883 } 884 885 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) { 886 // Handle generic blocks first. 887 switch b.Kind { 888 case ssa.BlockPlain: 889 if b.Succs[0].Block() != next { 890 p := s.Prog(s390x.ABR) 891 p.To.Type = obj.TYPE_BRANCH 892 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()}) 893 } 894 return 895 case ssa.BlockDefer: 896 // defer returns in R3: 897 // 0 if we should continue executing 898 // 1 if we should jump to deferreturn call 899 p := s.Br(s390x.ACIJ, b.Succs[1].Block()) 900 p.From.Type = obj.TYPE_CONST 901 p.From.Offset = int64(s390x.NotEqual & s390x.NotUnordered) // unordered is not possible 902 p.Reg = s390x.REG_R3 903 p.SetFrom3Const(0) 904 if b.Succs[0].Block() != next { 905 s.Br(s390x.ABR, b.Succs[0].Block()) 906 } 907 return 908 case ssa.BlockExit, ssa.BlockRetJmp: 909 return 910 case ssa.BlockRet: 911 s.Prog(obj.ARET) 912 return 913 } 914 915 // Handle s390x-specific blocks. These blocks all have a 916 // condition code mask in the Aux value and 2 successors. 917 succs := [...]*ssa.Block{b.Succs[0].Block(), b.Succs[1].Block()} 918 mask := b.Aux.(s390x.CCMask) 919 920 // TODO: take into account Likely property for forward/backward 921 // branches. We currently can't do this because we don't know 922 // whether a block has already been emitted. In general forward 923 // branches are assumed 'not taken' and backward branches are 924 // assumed 'taken'. 925 if next == succs[0] { 926 succs[0], succs[1] = succs[1], succs[0] 927 mask = mask.Inverse() 928 } 929 930 p := s.Br(blockAsm(b), succs[0]) 931 switch b.Kind { 932 case ssa.BlockS390XBRC: 933 p.From.Type = obj.TYPE_CONST 934 p.From.Offset = int64(mask) 935 case ssa.BlockS390XCGRJ, ssa.BlockS390XCRJ, 936 ssa.BlockS390XCLGRJ, ssa.BlockS390XCLRJ: 937 p.From.Type = obj.TYPE_CONST 938 p.From.Offset = int64(mask & s390x.NotUnordered) // unordered is not possible 939 p.Reg = b.Controls[0].Reg() 940 p.SetFrom3Reg(b.Controls[1].Reg()) 941 case ssa.BlockS390XCGIJ, ssa.BlockS390XCIJ: 942 p.From.Type = obj.TYPE_CONST 943 p.From.Offset = int64(mask & s390x.NotUnordered) // unordered is not possible 944 p.Reg = b.Controls[0].Reg() 945 p.SetFrom3Const(int64(int8(b.AuxInt))) 946 case ssa.BlockS390XCLGIJ, ssa.BlockS390XCLIJ: 947 p.From.Type = obj.TYPE_CONST 948 p.From.Offset = int64(mask & s390x.NotUnordered) // unordered is not possible 949 p.Reg = b.Controls[0].Reg() 950 p.SetFrom3Const(int64(uint8(b.AuxInt))) 951 default: 952 b.Fatalf("branch not implemented: %s", b.LongString()) 953 } 954 if next != succs[1] { 955 s.Br(s390x.ABR, succs[1]) 956 } 957 }