github.com/bir3/gocompiler@v0.3.205/src/cmd/compile/internal/loong64/ssa.go (about) 1 // Copyright 2022 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package loong64 6 7 import ( 8 "math" 9 10 "github.com/bir3/gocompiler/src/cmd/compile/internal/base" 11 "github.com/bir3/gocompiler/src/cmd/compile/internal/ir" 12 "github.com/bir3/gocompiler/src/cmd/compile/internal/logopt" 13 "github.com/bir3/gocompiler/src/cmd/compile/internal/ssa" 14 "github.com/bir3/gocompiler/src/cmd/compile/internal/ssagen" 15 "github.com/bir3/gocompiler/src/cmd/compile/internal/types" 16 "github.com/bir3/gocompiler/src/cmd/internal/obj" 17 "github.com/bir3/gocompiler/src/cmd/internal/obj/loong64" 18 ) 19 20 // isFPreg reports whether r is an FP register. 21 func isFPreg(r int16) bool { 22 return loong64.REG_F0 <= r && r <= loong64.REG_F31 23 } 24 25 // loadByType returns the load instruction of the given type. 26 func loadByType(t *types.Type, r int16) obj.As { 27 if isFPreg(r) { 28 if t.Size() == 4 { 29 return loong64.AMOVF 30 } else { 31 return loong64.AMOVD 32 } 33 } else { 34 switch t.Size() { 35 case 1: 36 if t.IsSigned() { 37 return loong64.AMOVB 38 } else { 39 return loong64.AMOVBU 40 } 41 case 2: 42 if t.IsSigned() { 43 return loong64.AMOVH 44 } else { 45 return loong64.AMOVHU 46 } 47 case 4: 48 if t.IsSigned() { 49 return loong64.AMOVW 50 } else { 51 return loong64.AMOVWU 52 } 53 case 8: 54 return loong64.AMOVV 55 } 56 } 57 panic("bad load type") 58 } 59 60 // storeByType returns the store instruction of the given type. 61 func storeByType(t *types.Type, r int16) obj.As { 62 if isFPreg(r) { 63 if t.Size() == 4 { 64 return loong64.AMOVF 65 } else { 66 return loong64.AMOVD 67 } 68 } else { 69 switch t.Size() { 70 case 1: 71 return loong64.AMOVB 72 case 2: 73 return loong64.AMOVH 74 case 4: 75 return loong64.AMOVW 76 case 8: 77 return loong64.AMOVV 78 } 79 } 80 panic("bad store type") 81 } 82 83 func ssaGenValue(s *ssagen.State, v *ssa.Value) { 84 switch v.Op { 85 case ssa.OpCopy, ssa.OpLOONG64MOVVreg: 86 if v.Type.IsMemory() { 87 return 88 } 89 x := v.Args[0].Reg() 90 y := v.Reg() 91 if x == y { 92 return 93 } 94 as := loong64.AMOVV 95 if isFPreg(x) && isFPreg(y) { 96 as = loong64.AMOVD 97 } 98 p := s.Prog(as) 99 p.From.Type = obj.TYPE_REG 100 p.From.Reg = x 101 p.To.Type = obj.TYPE_REG 102 p.To.Reg = y 103 case ssa.OpLOONG64MOVVnop: 104 // nothing to do 105 case ssa.OpLoadReg: 106 if v.Type.IsFlags() { 107 v.Fatalf("load flags not implemented: %v", v.LongString()) 108 return 109 } 110 r := v.Reg() 111 p := s.Prog(loadByType(v.Type, r)) 112 ssagen.AddrAuto(&p.From, v.Args[0]) 113 p.To.Type = obj.TYPE_REG 114 p.To.Reg = r 115 case ssa.OpStoreReg: 116 if v.Type.IsFlags() { 117 v.Fatalf("store flags not implemented: %v", v.LongString()) 118 return 119 } 120 r := v.Args[0].Reg() 121 p := s.Prog(storeByType(v.Type, r)) 122 p.From.Type = obj.TYPE_REG 123 p.From.Reg = r 124 ssagen.AddrAuto(&p.To, v) 125 case ssa.OpLOONG64ADDV, 126 ssa.OpLOONG64SUBV, 127 ssa.OpLOONG64AND, 128 ssa.OpLOONG64OR, 129 ssa.OpLOONG64XOR, 130 ssa.OpLOONG64NOR, 131 ssa.OpLOONG64SLLV, 132 ssa.OpLOONG64SRLV, 133 ssa.OpLOONG64SRAV, 134 ssa.OpLOONG64ROTR, 135 ssa.OpLOONG64ROTRV, 136 ssa.OpLOONG64ADDF, 137 ssa.OpLOONG64ADDD, 138 ssa.OpLOONG64SUBF, 139 ssa.OpLOONG64SUBD, 140 ssa.OpLOONG64MULF, 141 ssa.OpLOONG64MULD, 142 ssa.OpLOONG64DIVF, 143 ssa.OpLOONG64DIVD: 144 p := s.Prog(v.Op.Asm()) 145 p.From.Type = obj.TYPE_REG 146 p.From.Reg = v.Args[1].Reg() 147 p.Reg = v.Args[0].Reg() 148 p.To.Type = obj.TYPE_REG 149 p.To.Reg = v.Reg() 150 case ssa.OpLOONG64SGT, 151 ssa.OpLOONG64SGTU: 152 p := s.Prog(v.Op.Asm()) 153 p.From.Type = obj.TYPE_REG 154 p.From.Reg = v.Args[0].Reg() 155 p.Reg = v.Args[1].Reg() 156 p.To.Type = obj.TYPE_REG 157 p.To.Reg = v.Reg() 158 case ssa.OpLOONG64ADDVconst, 159 ssa.OpLOONG64SUBVconst, 160 ssa.OpLOONG64ANDconst, 161 ssa.OpLOONG64ORconst, 162 ssa.OpLOONG64XORconst, 163 ssa.OpLOONG64NORconst, 164 ssa.OpLOONG64SLLVconst, 165 ssa.OpLOONG64SRLVconst, 166 ssa.OpLOONG64SRAVconst, 167 ssa.OpLOONG64ROTRconst, 168 ssa.OpLOONG64ROTRVconst, 169 ssa.OpLOONG64SGTconst, 170 ssa.OpLOONG64SGTUconst: 171 p := s.Prog(v.Op.Asm()) 172 p.From.Type = obj.TYPE_CONST 173 p.From.Offset = v.AuxInt 174 p.Reg = v.Args[0].Reg() 175 p.To.Type = obj.TYPE_REG 176 p.To.Reg = v.Reg() 177 case ssa.OpLOONG64MULV: 178 p := s.Prog(loong64.AMULV) 179 p.From.Type = obj.TYPE_REG 180 p.From.Reg = v.Args[1].Reg() 181 p.Reg = v.Args[0].Reg() 182 p.To.Type = obj.TYPE_REG 183 p.To.Reg = v.Reg1() 184 p1 := s.Prog(loong64.AMULHV) 185 p1.From.Type = obj.TYPE_REG 186 p1.From.Reg = v.Args[1].Reg() 187 p1.Reg = v.Args[0].Reg() 188 p1.To.Type = obj.TYPE_REG 189 p1.To.Reg = v.Reg0() 190 case ssa.OpLOONG64MULVU: 191 p := s.Prog(loong64.AMULV) 192 p.From.Type = obj.TYPE_REG 193 p.From.Reg = v.Args[1].Reg() 194 p.Reg = v.Args[0].Reg() 195 p.To.Type = obj.TYPE_REG 196 p.To.Reg = v.Reg1() 197 p1 := s.Prog(loong64.AMULHVU) 198 p1.From.Type = obj.TYPE_REG 199 p1.From.Reg = v.Args[1].Reg() 200 p1.Reg = v.Args[0].Reg() 201 p1.To.Type = obj.TYPE_REG 202 p1.To.Reg = v.Reg0() 203 case ssa.OpLOONG64DIVV: 204 p := s.Prog(loong64.ADIVV) 205 p.From.Type = obj.TYPE_REG 206 p.From.Reg = v.Args[1].Reg() 207 p.Reg = v.Args[0].Reg() 208 p.To.Type = obj.TYPE_REG 209 p.To.Reg = v.Reg1() 210 p1 := s.Prog(loong64.AREMV) 211 p1.From.Type = obj.TYPE_REG 212 p1.From.Reg = v.Args[1].Reg() 213 p1.Reg = v.Args[0].Reg() 214 p1.To.Type = obj.TYPE_REG 215 p1.To.Reg = v.Reg0() 216 case ssa.OpLOONG64DIVVU: 217 p := s.Prog(loong64.ADIVVU) 218 p.From.Type = obj.TYPE_REG 219 p.From.Reg = v.Args[1].Reg() 220 p.Reg = v.Args[0].Reg() 221 p.To.Type = obj.TYPE_REG 222 p.To.Reg = v.Reg1() 223 p1 := s.Prog(loong64.AREMVU) 224 p1.From.Type = obj.TYPE_REG 225 p1.From.Reg = v.Args[1].Reg() 226 p1.Reg = v.Args[0].Reg() 227 p1.To.Type = obj.TYPE_REG 228 p1.To.Reg = v.Reg0() 229 case ssa.OpLOONG64MOVVconst: 230 r := v.Reg() 231 p := s.Prog(v.Op.Asm()) 232 p.From.Type = obj.TYPE_CONST 233 p.From.Offset = v.AuxInt 234 p.To.Type = obj.TYPE_REG 235 p.To.Reg = r 236 if isFPreg(r) { 237 // cannot move into FP or special registers, use TMP as intermediate 238 p.To.Reg = loong64.REGTMP 239 p = s.Prog(loong64.AMOVV) 240 p.From.Type = obj.TYPE_REG 241 p.From.Reg = loong64.REGTMP 242 p.To.Type = obj.TYPE_REG 243 p.To.Reg = r 244 } 245 case ssa.OpLOONG64MOVFconst, 246 ssa.OpLOONG64MOVDconst: 247 p := s.Prog(v.Op.Asm()) 248 p.From.Type = obj.TYPE_FCONST 249 p.From.Val = math.Float64frombits(uint64(v.AuxInt)) 250 p.To.Type = obj.TYPE_REG 251 p.To.Reg = v.Reg() 252 case ssa.OpLOONG64CMPEQF, 253 ssa.OpLOONG64CMPEQD, 254 ssa.OpLOONG64CMPGEF, 255 ssa.OpLOONG64CMPGED, 256 ssa.OpLOONG64CMPGTF, 257 ssa.OpLOONG64CMPGTD: 258 p := s.Prog(v.Op.Asm()) 259 p.From.Type = obj.TYPE_REG 260 p.From.Reg = v.Args[0].Reg() 261 p.Reg = v.Args[1].Reg() 262 case ssa.OpLOONG64MOVVaddr: 263 p := s.Prog(loong64.AMOVV) 264 p.From.Type = obj.TYPE_ADDR 265 p.From.Reg = v.Args[0].Reg() 266 var wantreg string 267 // MOVV $sym+off(base), R 268 // the assembler expands it as the following: 269 // - base is SP: add constant offset to SP (R3) 270 // when constant is large, tmp register (R30) may be used 271 // - base is SB: load external address with relocation 272 switch v.Aux.(type) { 273 default: 274 v.Fatalf("aux is of unknown type %T", v.Aux) 275 case *obj.LSym: 276 wantreg = "SB" 277 ssagen.AddAux(&p.From, v) 278 case *ir.Name: 279 wantreg = "SP" 280 ssagen.AddAux(&p.From, v) 281 case nil: 282 // No sym, just MOVV $off(SP), R 283 wantreg = "SP" 284 p.From.Offset = v.AuxInt 285 } 286 if reg := v.Args[0].RegName(); reg != wantreg { 287 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg) 288 } 289 p.To.Type = obj.TYPE_REG 290 p.To.Reg = v.Reg() 291 case ssa.OpLOONG64MOVBload, 292 ssa.OpLOONG64MOVBUload, 293 ssa.OpLOONG64MOVHload, 294 ssa.OpLOONG64MOVHUload, 295 ssa.OpLOONG64MOVWload, 296 ssa.OpLOONG64MOVWUload, 297 ssa.OpLOONG64MOVVload, 298 ssa.OpLOONG64MOVFload, 299 ssa.OpLOONG64MOVDload: 300 p := s.Prog(v.Op.Asm()) 301 p.From.Type = obj.TYPE_MEM 302 p.From.Reg = v.Args[0].Reg() 303 ssagen.AddAux(&p.From, v) 304 p.To.Type = obj.TYPE_REG 305 p.To.Reg = v.Reg() 306 case ssa.OpLOONG64MOVBstore, 307 ssa.OpLOONG64MOVHstore, 308 ssa.OpLOONG64MOVWstore, 309 ssa.OpLOONG64MOVVstore, 310 ssa.OpLOONG64MOVFstore, 311 ssa.OpLOONG64MOVDstore: 312 p := s.Prog(v.Op.Asm()) 313 p.From.Type = obj.TYPE_REG 314 p.From.Reg = v.Args[1].Reg() 315 p.To.Type = obj.TYPE_MEM 316 p.To.Reg = v.Args[0].Reg() 317 ssagen.AddAux(&p.To, v) 318 case ssa.OpLOONG64MOVBstorezero, 319 ssa.OpLOONG64MOVHstorezero, 320 ssa.OpLOONG64MOVWstorezero, 321 ssa.OpLOONG64MOVVstorezero: 322 p := s.Prog(v.Op.Asm()) 323 p.From.Type = obj.TYPE_REG 324 p.From.Reg = loong64.REGZERO 325 p.To.Type = obj.TYPE_MEM 326 p.To.Reg = v.Args[0].Reg() 327 ssagen.AddAux(&p.To, v) 328 case ssa.OpLOONG64MOVBreg, 329 ssa.OpLOONG64MOVBUreg, 330 ssa.OpLOONG64MOVHreg, 331 ssa.OpLOONG64MOVHUreg, 332 ssa.OpLOONG64MOVWreg, 333 ssa.OpLOONG64MOVWUreg: 334 a := v.Args[0] 335 for a.Op == ssa.OpCopy || a.Op == ssa.OpLOONG64MOVVreg { 336 a = a.Args[0] 337 } 338 if a.Op == ssa.OpLoadReg && loong64.REG_R0 <= a.Reg() && a.Reg() <= loong64.REG_R31 { 339 // LoadReg from a narrower type does an extension, except loading 340 // to a floating point register. So only eliminate the extension 341 // if it is loaded to an integer register. 342 343 t := a.Type 344 switch { 345 case v.Op == ssa.OpLOONG64MOVBreg && t.Size() == 1 && t.IsSigned(), 346 v.Op == ssa.OpLOONG64MOVBUreg && t.Size() == 1 && !t.IsSigned(), 347 v.Op == ssa.OpLOONG64MOVHreg && t.Size() == 2 && t.IsSigned(), 348 v.Op == ssa.OpLOONG64MOVHUreg && t.Size() == 2 && !t.IsSigned(), 349 v.Op == ssa.OpLOONG64MOVWreg && t.Size() == 4 && t.IsSigned(), 350 v.Op == ssa.OpLOONG64MOVWUreg && t.Size() == 4 && !t.IsSigned(): 351 // arg is a proper-typed load, already zero/sign-extended, don't extend again 352 if v.Reg() == v.Args[0].Reg() { 353 return 354 } 355 p := s.Prog(loong64.AMOVV) 356 p.From.Type = obj.TYPE_REG 357 p.From.Reg = v.Args[0].Reg() 358 p.To.Type = obj.TYPE_REG 359 p.To.Reg = v.Reg() 360 return 361 default: 362 } 363 } 364 fallthrough 365 case ssa.OpLOONG64MOVWF, 366 ssa.OpLOONG64MOVWD, 367 ssa.OpLOONG64TRUNCFW, 368 ssa.OpLOONG64TRUNCDW, 369 ssa.OpLOONG64MOVVF, 370 ssa.OpLOONG64MOVVD, 371 ssa.OpLOONG64TRUNCFV, 372 ssa.OpLOONG64TRUNCDV, 373 ssa.OpLOONG64MOVFD, 374 ssa.OpLOONG64MOVDF, 375 ssa.OpLOONG64NEGF, 376 ssa.OpLOONG64NEGD, 377 ssa.OpLOONG64SQRTD, 378 ssa.OpLOONG64SQRTF: 379 p := s.Prog(v.Op.Asm()) 380 p.From.Type = obj.TYPE_REG 381 p.From.Reg = v.Args[0].Reg() 382 p.To.Type = obj.TYPE_REG 383 p.To.Reg = v.Reg() 384 case ssa.OpLOONG64NEGV: 385 // SUB from REGZERO 386 p := s.Prog(loong64.ASUBVU) 387 p.From.Type = obj.TYPE_REG 388 p.From.Reg = v.Args[0].Reg() 389 p.Reg = loong64.REGZERO 390 p.To.Type = obj.TYPE_REG 391 p.To.Reg = v.Reg() 392 case ssa.OpLOONG64DUFFZERO: 393 // runtime.duffzero expects start address - 8 in R19 394 p := s.Prog(loong64.ASUBVU) 395 p.From.Type = obj.TYPE_CONST 396 p.From.Offset = 8 397 p.Reg = v.Args[0].Reg() 398 p.To.Type = obj.TYPE_REG 399 p.To.Reg = loong64.REG_R19 400 p = s.Prog(obj.ADUFFZERO) 401 p.To.Type = obj.TYPE_MEM 402 p.To.Name = obj.NAME_EXTERN 403 p.To.Sym = ir.Syms.Duffzero 404 p.To.Offset = v.AuxInt 405 case ssa.OpLOONG64LoweredZero: 406 // SUBV $8, R19 407 // MOVV R0, 8(R19) 408 // ADDV $8, R19 409 // BNE Rarg1, R19, -2(PC) 410 // arg1 is the address of the last element to zero 411 var sz int64 412 var mov obj.As 413 switch { 414 case v.AuxInt%8 == 0: 415 sz = 8 416 mov = loong64.AMOVV 417 case v.AuxInt%4 == 0: 418 sz = 4 419 mov = loong64.AMOVW 420 case v.AuxInt%2 == 0: 421 sz = 2 422 mov = loong64.AMOVH 423 default: 424 sz = 1 425 mov = loong64.AMOVB 426 } 427 p := s.Prog(loong64.ASUBVU) 428 p.From.Type = obj.TYPE_CONST 429 p.From.Offset = sz 430 p.To.Type = obj.TYPE_REG 431 p.To.Reg = loong64.REG_R19 432 p2 := s.Prog(mov) 433 p2.From.Type = obj.TYPE_REG 434 p2.From.Reg = loong64.REGZERO 435 p2.To.Type = obj.TYPE_MEM 436 p2.To.Reg = loong64.REG_R19 437 p2.To.Offset = sz 438 p3 := s.Prog(loong64.AADDVU) 439 p3.From.Type = obj.TYPE_CONST 440 p3.From.Offset = sz 441 p3.To.Type = obj.TYPE_REG 442 p3.To.Reg = loong64.REG_R19 443 p4 := s.Prog(loong64.ABNE) 444 p4.From.Type = obj.TYPE_REG 445 p4.From.Reg = v.Args[1].Reg() 446 p4.Reg = loong64.REG_R19 447 p4.To.Type = obj.TYPE_BRANCH 448 p4.To.SetTarget(p2) 449 case ssa.OpLOONG64DUFFCOPY: 450 p := s.Prog(obj.ADUFFCOPY) 451 p.To.Type = obj.TYPE_MEM 452 p.To.Name = obj.NAME_EXTERN 453 p.To.Sym = ir.Syms.Duffcopy 454 p.To.Offset = v.AuxInt 455 case ssa.OpLOONG64LoweredMove: 456 // SUBV $8, R19 457 // MOVV 8(R19), Rtmp 458 // MOVV Rtmp, (R4) 459 // ADDV $8, R19 460 // ADDV $8, R4 461 // BNE Rarg2, R19, -4(PC) 462 // arg2 is the address of the last element of src 463 var sz int64 464 var mov obj.As 465 switch { 466 case v.AuxInt%8 == 0: 467 sz = 8 468 mov = loong64.AMOVV 469 case v.AuxInt%4 == 0: 470 sz = 4 471 mov = loong64.AMOVW 472 case v.AuxInt%2 == 0: 473 sz = 2 474 mov = loong64.AMOVH 475 default: 476 sz = 1 477 mov = loong64.AMOVB 478 } 479 p := s.Prog(loong64.ASUBVU) 480 p.From.Type = obj.TYPE_CONST 481 p.From.Offset = sz 482 p.To.Type = obj.TYPE_REG 483 p.To.Reg = loong64.REG_R19 484 p2 := s.Prog(mov) 485 p2.From.Type = obj.TYPE_MEM 486 p2.From.Reg = loong64.REG_R19 487 p2.From.Offset = sz 488 p2.To.Type = obj.TYPE_REG 489 p2.To.Reg = loong64.REGTMP 490 p3 := s.Prog(mov) 491 p3.From.Type = obj.TYPE_REG 492 p3.From.Reg = loong64.REGTMP 493 p3.To.Type = obj.TYPE_MEM 494 p3.To.Reg = loong64.REG_R4 495 p4 := s.Prog(loong64.AADDVU) 496 p4.From.Type = obj.TYPE_CONST 497 p4.From.Offset = sz 498 p4.To.Type = obj.TYPE_REG 499 p4.To.Reg = loong64.REG_R19 500 p5 := s.Prog(loong64.AADDVU) 501 p5.From.Type = obj.TYPE_CONST 502 p5.From.Offset = sz 503 p5.To.Type = obj.TYPE_REG 504 p5.To.Reg = loong64.REG_R4 505 p6 := s.Prog(loong64.ABNE) 506 p6.From.Type = obj.TYPE_REG 507 p6.From.Reg = v.Args[2].Reg() 508 p6.Reg = loong64.REG_R19 509 p6.To.Type = obj.TYPE_BRANCH 510 p6.To.SetTarget(p2) 511 case ssa.OpLOONG64CALLstatic, ssa.OpLOONG64CALLclosure, ssa.OpLOONG64CALLinter: 512 s.Call(v) 513 case ssa.OpLOONG64CALLtail: 514 s.TailCall(v) 515 case ssa.OpLOONG64LoweredWB: 516 p := s.Prog(obj.ACALL) 517 p.To.Type = obj.TYPE_MEM 518 p.To.Name = obj.NAME_EXTERN 519 p.To.Sym = v.Aux.(*obj.LSym) 520 case ssa.OpLOONG64LoweredPanicBoundsA, ssa.OpLOONG64LoweredPanicBoundsB, ssa.OpLOONG64LoweredPanicBoundsC: 521 p := s.Prog(obj.ACALL) 522 p.To.Type = obj.TYPE_MEM 523 p.To.Name = obj.NAME_EXTERN 524 p.To.Sym = ssagen.BoundsCheckFunc[v.AuxInt] 525 s.UseArgs(16) // space used in callee args area by assembly stubs 526 case ssa.OpLOONG64LoweredAtomicLoad8, ssa.OpLOONG64LoweredAtomicLoad32, ssa.OpLOONG64LoweredAtomicLoad64: 527 as := loong64.AMOVV 528 switch v.Op { 529 case ssa.OpLOONG64LoweredAtomicLoad8: 530 as = loong64.AMOVB 531 case ssa.OpLOONG64LoweredAtomicLoad32: 532 as = loong64.AMOVW 533 } 534 s.Prog(loong64.ADBAR) 535 p := s.Prog(as) 536 p.From.Type = obj.TYPE_MEM 537 p.From.Reg = v.Args[0].Reg() 538 p.To.Type = obj.TYPE_REG 539 p.To.Reg = v.Reg0() 540 s.Prog(loong64.ADBAR) 541 case ssa.OpLOONG64LoweredAtomicStore8, ssa.OpLOONG64LoweredAtomicStore32, ssa.OpLOONG64LoweredAtomicStore64: 542 as := loong64.AMOVV 543 switch v.Op { 544 case ssa.OpLOONG64LoweredAtomicStore8: 545 as = loong64.AMOVB 546 case ssa.OpLOONG64LoweredAtomicStore32: 547 as = loong64.AMOVW 548 } 549 s.Prog(loong64.ADBAR) 550 p := s.Prog(as) 551 p.From.Type = obj.TYPE_REG 552 p.From.Reg = v.Args[1].Reg() 553 p.To.Type = obj.TYPE_MEM 554 p.To.Reg = v.Args[0].Reg() 555 s.Prog(loong64.ADBAR) 556 case ssa.OpLOONG64LoweredAtomicStorezero32, ssa.OpLOONG64LoweredAtomicStorezero64: 557 as := loong64.AMOVV 558 if v.Op == ssa.OpLOONG64LoweredAtomicStorezero32 { 559 as = loong64.AMOVW 560 } 561 s.Prog(loong64.ADBAR) 562 p := s.Prog(as) 563 p.From.Type = obj.TYPE_REG 564 p.From.Reg = loong64.REGZERO 565 p.To.Type = obj.TYPE_MEM 566 p.To.Reg = v.Args[0].Reg() 567 s.Prog(loong64.ADBAR) 568 case ssa.OpLOONG64LoweredAtomicExchange32, ssa.OpLOONG64LoweredAtomicExchange64: 569 // DBAR 570 // MOVV Rarg1, Rtmp 571 // LL (Rarg0), Rout 572 // SC Rtmp, (Rarg0) 573 // BEQ Rtmp, -3(PC) 574 // DBAR 575 ll := loong64.ALLV 576 sc := loong64.ASCV 577 if v.Op == ssa.OpLOONG64LoweredAtomicExchange32 { 578 ll = loong64.ALL 579 sc = loong64.ASC 580 } 581 s.Prog(loong64.ADBAR) 582 p := s.Prog(loong64.AMOVV) 583 p.From.Type = obj.TYPE_REG 584 p.From.Reg = v.Args[1].Reg() 585 p.To.Type = obj.TYPE_REG 586 p.To.Reg = loong64.REGTMP 587 p1 := s.Prog(ll) 588 p1.From.Type = obj.TYPE_MEM 589 p1.From.Reg = v.Args[0].Reg() 590 p1.To.Type = obj.TYPE_REG 591 p1.To.Reg = v.Reg0() 592 p2 := s.Prog(sc) 593 p2.From.Type = obj.TYPE_REG 594 p2.From.Reg = loong64.REGTMP 595 p2.To.Type = obj.TYPE_MEM 596 p2.To.Reg = v.Args[0].Reg() 597 p3 := s.Prog(loong64.ABEQ) 598 p3.From.Type = obj.TYPE_REG 599 p3.From.Reg = loong64.REGTMP 600 p3.To.Type = obj.TYPE_BRANCH 601 p3.To.SetTarget(p) 602 s.Prog(loong64.ADBAR) 603 case ssa.OpLOONG64LoweredAtomicAdd32, ssa.OpLOONG64LoweredAtomicAdd64: 604 // DBAR 605 // LL (Rarg0), Rout 606 // ADDV Rarg1, Rout, Rtmp 607 // SC Rtmp, (Rarg0) 608 // BEQ Rtmp, -3(PC) 609 // DBAR 610 // ADDV Rarg1, Rout 611 ll := loong64.ALLV 612 sc := loong64.ASCV 613 if v.Op == ssa.OpLOONG64LoweredAtomicAdd32 { 614 ll = loong64.ALL 615 sc = loong64.ASC 616 } 617 s.Prog(loong64.ADBAR) 618 p := s.Prog(ll) 619 p.From.Type = obj.TYPE_MEM 620 p.From.Reg = v.Args[0].Reg() 621 p.To.Type = obj.TYPE_REG 622 p.To.Reg = v.Reg0() 623 p1 := s.Prog(loong64.AADDVU) 624 p1.From.Type = obj.TYPE_REG 625 p1.From.Reg = v.Args[1].Reg() 626 p1.Reg = v.Reg0() 627 p1.To.Type = obj.TYPE_REG 628 p1.To.Reg = loong64.REGTMP 629 p2 := s.Prog(sc) 630 p2.From.Type = obj.TYPE_REG 631 p2.From.Reg = loong64.REGTMP 632 p2.To.Type = obj.TYPE_MEM 633 p2.To.Reg = v.Args[0].Reg() 634 p3 := s.Prog(loong64.ABEQ) 635 p3.From.Type = obj.TYPE_REG 636 p3.From.Reg = loong64.REGTMP 637 p3.To.Type = obj.TYPE_BRANCH 638 p3.To.SetTarget(p) 639 s.Prog(loong64.ADBAR) 640 p4 := s.Prog(loong64.AADDVU) 641 p4.From.Type = obj.TYPE_REG 642 p4.From.Reg = v.Args[1].Reg() 643 p4.Reg = v.Reg0() 644 p4.To.Type = obj.TYPE_REG 645 p4.To.Reg = v.Reg0() 646 case ssa.OpLOONG64LoweredAtomicAddconst32, ssa.OpLOONG64LoweredAtomicAddconst64: 647 // DBAR 648 // LL (Rarg0), Rout 649 // ADDV $auxint, Rout, Rtmp 650 // SC Rtmp, (Rarg0) 651 // BEQ Rtmp, -3(PC) 652 // DBAR 653 // ADDV $auxint, Rout 654 ll := loong64.ALLV 655 sc := loong64.ASCV 656 if v.Op == ssa.OpLOONG64LoweredAtomicAddconst32 { 657 ll = loong64.ALL 658 sc = loong64.ASC 659 } 660 s.Prog(loong64.ADBAR) 661 p := s.Prog(ll) 662 p.From.Type = obj.TYPE_MEM 663 p.From.Reg = v.Args[0].Reg() 664 p.To.Type = obj.TYPE_REG 665 p.To.Reg = v.Reg0() 666 p1 := s.Prog(loong64.AADDVU) 667 p1.From.Type = obj.TYPE_CONST 668 p1.From.Offset = v.AuxInt 669 p1.Reg = v.Reg0() 670 p1.To.Type = obj.TYPE_REG 671 p1.To.Reg = loong64.REGTMP 672 p2 := s.Prog(sc) 673 p2.From.Type = obj.TYPE_REG 674 p2.From.Reg = loong64.REGTMP 675 p2.To.Type = obj.TYPE_MEM 676 p2.To.Reg = v.Args[0].Reg() 677 p3 := s.Prog(loong64.ABEQ) 678 p3.From.Type = obj.TYPE_REG 679 p3.From.Reg = loong64.REGTMP 680 p3.To.Type = obj.TYPE_BRANCH 681 p3.To.SetTarget(p) 682 s.Prog(loong64.ADBAR) 683 p4 := s.Prog(loong64.AADDVU) 684 p4.From.Type = obj.TYPE_CONST 685 p4.From.Offset = v.AuxInt 686 p4.Reg = v.Reg0() 687 p4.To.Type = obj.TYPE_REG 688 p4.To.Reg = v.Reg0() 689 case ssa.OpLOONG64LoweredAtomicCas32, ssa.OpLOONG64LoweredAtomicCas64: 690 // MOVV $0, Rout 691 // DBAR 692 // LL (Rarg0), Rtmp 693 // BNE Rtmp, Rarg1, 4(PC) 694 // MOVV Rarg2, Rout 695 // SC Rout, (Rarg0) 696 // BEQ Rout, -4(PC) 697 // DBAR 698 ll := loong64.ALLV 699 sc := loong64.ASCV 700 if v.Op == ssa.OpLOONG64LoweredAtomicCas32 { 701 ll = loong64.ALL 702 sc = loong64.ASC 703 } 704 p := s.Prog(loong64.AMOVV) 705 p.From.Type = obj.TYPE_REG 706 p.From.Reg = loong64.REGZERO 707 p.To.Type = obj.TYPE_REG 708 p.To.Reg = v.Reg0() 709 s.Prog(loong64.ADBAR) 710 p1 := s.Prog(ll) 711 p1.From.Type = obj.TYPE_MEM 712 p1.From.Reg = v.Args[0].Reg() 713 p1.To.Type = obj.TYPE_REG 714 p1.To.Reg = loong64.REGTMP 715 p2 := s.Prog(loong64.ABNE) 716 p2.From.Type = obj.TYPE_REG 717 p2.From.Reg = v.Args[1].Reg() 718 p2.Reg = loong64.REGTMP 719 p2.To.Type = obj.TYPE_BRANCH 720 p3 := s.Prog(loong64.AMOVV) 721 p3.From.Type = obj.TYPE_REG 722 p3.From.Reg = v.Args[2].Reg() 723 p3.To.Type = obj.TYPE_REG 724 p3.To.Reg = v.Reg0() 725 p4 := s.Prog(sc) 726 p4.From.Type = obj.TYPE_REG 727 p4.From.Reg = v.Reg0() 728 p4.To.Type = obj.TYPE_MEM 729 p4.To.Reg = v.Args[0].Reg() 730 p5 := s.Prog(loong64.ABEQ) 731 p5.From.Type = obj.TYPE_REG 732 p5.From.Reg = v.Reg0() 733 p5.To.Type = obj.TYPE_BRANCH 734 p5.To.SetTarget(p1) 735 p6 := s.Prog(loong64.ADBAR) 736 p2.To.SetTarget(p6) 737 case ssa.OpLOONG64LoweredNilCheck: 738 // Issue a load which will fault if arg is nil. 739 p := s.Prog(loong64.AMOVB) 740 p.From.Type = obj.TYPE_MEM 741 p.From.Reg = v.Args[0].Reg() 742 ssagen.AddAux(&p.From, v) 743 p.To.Type = obj.TYPE_REG 744 p.To.Reg = loong64.REGTMP 745 if logopt.Enabled() { 746 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name) 747 } 748 if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers 749 base.WarnfAt(v.Pos, "generated nil check") 750 } 751 case ssa.OpLOONG64FPFlagTrue, 752 ssa.OpLOONG64FPFlagFalse: 753 // MOVV $0, r 754 // BFPF 2(PC) 755 // MOVV $1, r 756 branch := loong64.ABFPF 757 if v.Op == ssa.OpLOONG64FPFlagFalse { 758 branch = loong64.ABFPT 759 } 760 p := s.Prog(loong64.AMOVV) 761 p.From.Type = obj.TYPE_REG 762 p.From.Reg = loong64.REGZERO 763 p.To.Type = obj.TYPE_REG 764 p.To.Reg = v.Reg() 765 p2 := s.Prog(branch) 766 p2.To.Type = obj.TYPE_BRANCH 767 p3 := s.Prog(loong64.AMOVV) 768 p3.From.Type = obj.TYPE_CONST 769 p3.From.Offset = 1 770 p3.To.Type = obj.TYPE_REG 771 p3.To.Reg = v.Reg() 772 p4 := s.Prog(obj.ANOP) // not a machine instruction, for branch to land 773 p2.To.SetTarget(p4) 774 case ssa.OpLOONG64LoweredGetClosurePtr: 775 // Closure pointer is R22 (loong64.REGCTXT). 776 ssagen.CheckLoweredGetClosurePtr(v) 777 case ssa.OpLOONG64LoweredGetCallerSP: 778 // caller's SP is FixedFrameSize below the address of the first arg 779 p := s.Prog(loong64.AMOVV) 780 p.From.Type = obj.TYPE_ADDR 781 p.From.Offset = -base.Ctxt.Arch.FixedFrameSize 782 p.From.Name = obj.NAME_PARAM 783 p.To.Type = obj.TYPE_REG 784 p.To.Reg = v.Reg() 785 case ssa.OpLOONG64LoweredGetCallerPC: 786 p := s.Prog(obj.AGETCALLERPC) 787 p.To.Type = obj.TYPE_REG 788 p.To.Reg = v.Reg() 789 case ssa.OpLOONG64MASKEQZ, ssa.OpLOONG64MASKNEZ: 790 p := s.Prog(v.Op.Asm()) 791 p.From.Type = obj.TYPE_REG 792 p.From.Reg = v.Args[1].Reg() 793 p.Reg = v.Args[0].Reg() 794 p.To.Type = obj.TYPE_REG 795 p.To.Reg = v.Reg() 796 case ssa.OpClobber, ssa.OpClobberReg: 797 // TODO: implement for clobberdead experiment. Nop is ok for now. 798 default: 799 v.Fatalf("genValue not implemented: %s", v.LongString()) 800 } 801 } 802 803 var blockJump = map[ssa.BlockKind]struct { 804 asm, invasm obj.As 805 }{ 806 ssa.BlockLOONG64EQ: {loong64.ABEQ, loong64.ABNE}, 807 ssa.BlockLOONG64NE: {loong64.ABNE, loong64.ABEQ}, 808 ssa.BlockLOONG64LTZ: {loong64.ABLTZ, loong64.ABGEZ}, 809 ssa.BlockLOONG64GEZ: {loong64.ABGEZ, loong64.ABLTZ}, 810 ssa.BlockLOONG64LEZ: {loong64.ABLEZ, loong64.ABGTZ}, 811 ssa.BlockLOONG64GTZ: {loong64.ABGTZ, loong64.ABLEZ}, 812 ssa.BlockLOONG64FPT: {loong64.ABFPT, loong64.ABFPF}, 813 ssa.BlockLOONG64FPF: {loong64.ABFPF, loong64.ABFPT}, 814 } 815 816 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) { 817 switch b.Kind { 818 case ssa.BlockPlain: 819 if b.Succs[0].Block() != next { 820 p := s.Prog(obj.AJMP) 821 p.To.Type = obj.TYPE_BRANCH 822 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()}) 823 } 824 case ssa.BlockDefer: 825 // defer returns in R19: 826 // 0 if we should continue executing 827 // 1 if we should jump to deferreturn call 828 p := s.Prog(loong64.ABNE) 829 p.From.Type = obj.TYPE_REG 830 p.From.Reg = loong64.REGZERO 831 p.Reg = loong64.REG_R19 832 p.To.Type = obj.TYPE_BRANCH 833 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[1].Block()}) 834 if b.Succs[0].Block() != next { 835 p := s.Prog(obj.AJMP) 836 p.To.Type = obj.TYPE_BRANCH 837 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()}) 838 } 839 case ssa.BlockExit, ssa.BlockRetJmp: 840 case ssa.BlockRet: 841 s.Prog(obj.ARET) 842 case ssa.BlockLOONG64EQ, ssa.BlockLOONG64NE, 843 ssa.BlockLOONG64LTZ, ssa.BlockLOONG64GEZ, 844 ssa.BlockLOONG64LEZ, ssa.BlockLOONG64GTZ, 845 ssa.BlockLOONG64FPT, ssa.BlockLOONG64FPF: 846 jmp := blockJump[b.Kind] 847 var p *obj.Prog 848 switch next { 849 case b.Succs[0].Block(): 850 p = s.Br(jmp.invasm, b.Succs[1].Block()) 851 case b.Succs[1].Block(): 852 p = s.Br(jmp.asm, b.Succs[0].Block()) 853 default: 854 if b.Likely != ssa.BranchUnlikely { 855 p = s.Br(jmp.asm, b.Succs[0].Block()) 856 s.Br(obj.AJMP, b.Succs[1].Block()) 857 } else { 858 p = s.Br(jmp.invasm, b.Succs[1].Block()) 859 s.Br(obj.AJMP, b.Succs[0].Block()) 860 } 861 } 862 if !b.Controls[0].Type.IsFlags() { 863 p.From.Type = obj.TYPE_REG 864 p.From.Reg = b.Controls[0].Reg() 865 } 866 default: 867 b.Fatalf("branch not implemented: %s", b.LongString()) 868 } 869 }