github.com/cloudwego/frugal@v0.1.15/internal/atm/ssa/pass_abispec_amd64.go (about) 1 /* 2 * Copyright 2022 ByteDance Inc. 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package ssa 18 19 import ( 20 `github.com/cloudwego/iasm/x86_64` 21 `github.com/cloudwego/frugal/internal/atm/abi` 22 `github.com/cloudwego/frugal/internal/rt` 23 ) 24 25 var _AbiClobbersC = []x86_64.Register64 { 26 x86_64.RAX, 27 x86_64.RCX, 28 x86_64.RDX, 29 x86_64.RSI, 30 x86_64.RDI, 31 x86_64.R8, 32 x86_64.R9, 33 x86_64.R10, 34 x86_64.R11, 35 } 36 37 var _AbiClobbersGo = []x86_64.Register64 { 38 x86_64.RAX, 39 x86_64.RCX, 40 x86_64.RDX, 41 x86_64.RBX, 42 x86_64.RSI, 43 x86_64.RDI, 44 x86_64.R8, 45 x86_64.R9, 46 x86_64.R10, 47 x86_64.R11, 48 x86_64.R12, 49 x86_64.R13, 50 x86_64.R14, 51 x86_64.R15, 52 } 53 54 var _NativeArgsOrder = [...]x86_64.Register64 { 55 x86_64.RDI, 56 x86_64.RSI, 57 x86_64.RDX, 58 x86_64.RCX, 59 x86_64.R8, 60 x86_64.R9, 61 } 62 63 func (ABILowering) abiCallFunc(cfg *CFG, bb *BasicBlock, p *IrCallFunc) { 64 argc := len(p.In) 65 retc := len(p.Out) 66 67 /* check argument & return value count */ 68 if argc != len(p.Func.Args) || retc != len(p.Func.Rets) { 69 panic("abi: gcall argument count mismatch: " + p.String()) 70 } 71 72 /* register buffer */ 73 argv := make([]Reg, 0, argc) 74 retv := make([]Reg, 0, retc) 75 clob := make([]Reg, 0, len(_AbiClobbersGo)) 76 rmap := make(map[x86_64.Register64]bool, len(_AbiClobbersGo)) 77 78 /* add all arch registers */ 79 for _, r := range _AbiClobbersGo { 80 rmap[r] = true 81 } 82 83 /* store each argument */ 84 for i, r := range p.In { 85 if v := p.Func.Args[i]; !v.InRegister { 86 mm := v.Mem 87 bb.Ins = append(bb.Ins, IrArchStoreStack(r, mm, IrSlotCall)) 88 } else { 89 rr := IrSetArch(cfg.CreateRegister(r.Ptr()), v.Reg) 90 bb.Ins, argv = append(bb.Ins, IrArchCopy(rr, r)), append(argv, rr) 91 } 92 } 93 94 /* convert each return register */ 95 for i, r := range p.Out { 96 if v := p.Func.Rets[i]; v.InRegister && r.Kind() != K_zero { 97 retv = append(retv, IrSetArch(cfg.CreateRegister(r.Ptr()), v.Reg)) 98 delete(rmap, v.Reg) 99 } 100 } 101 102 /* exclude return values from clobbering list (they are implied) */ 103 for _, r := range _AbiClobbersGo { 104 if rmap[r] { 105 clob = append(clob, IrSetArch(cfg.CreateRegister(false), r)) 106 } 107 } 108 109 /* add the call instruction */ 110 bb.Ins = append(bb.Ins, &IrAMD64_CALL_reg { 111 Fn : p.R, 112 In : argv, 113 Out : retv, 114 Clob : clob, 115 }) 116 117 /* declare clobber list if any */ 118 if len(clob) != 0 { 119 bb.Ins = append(bb.Ins, IrMarkClobber(clob...)) 120 } 121 122 /* load each return value */ 123 for i, r := range p.Out { 124 if r.Kind() != K_zero { 125 if v := p.Func.Rets[i]; v.InRegister { 126 bb.Ins = append(bb.Ins, IrArchCopy(r, retv[i])) 127 } else { 128 bb.Ins = append(bb.Ins, IrArchLoadStack(r, v.Mem, IrSlotCall)) 129 } 130 } 131 } 132 } 133 134 func (ABILowering) abiCallNative(cfg *CFG, bb *BasicBlock, p *IrCallNative) { 135 retv := Rz 136 argc := len(p.In) 137 138 /* check for argument count */ 139 if argc > len(_NativeArgsOrder) { 140 panic("abi: too many native arguments: " + p.String()) 141 } 142 143 /* register buffers */ 144 argv := make([]Reg, 0, argc) 145 clob := make([]Reg, 0, len(_AbiClobbersC)) 146 rmap := make(map[x86_64.Register64]bool, len(_AbiClobbersC)) 147 148 /* add all arch registers */ 149 for _, r := range _AbiClobbersC { 150 rmap[r] = true 151 } 152 153 /* convert each argument */ 154 for i, r := range p.In { 155 argv = append(argv, IrSetArch(cfg.CreateRegister(r.Ptr()), _NativeArgsOrder[i])) 156 bb.Ins = append(bb.Ins, IrArchCopy(argv[i], r)) 157 } 158 159 /* allocate register for return value if needed */ 160 if p.Out.Kind() != K_zero { 161 retv = IrSetArch(cfg.CreateRegister(p.Out.Ptr()), x86_64.RAX) 162 delete(rmap, x86_64.RAX) 163 } 164 165 /* exclude return values from clobbering list (they are implied) */ 166 for _, r := range _AbiClobbersC { 167 if rmap[r] { 168 clob = append(clob, IrSetArch(cfg.CreateRegister(false), r)) 169 } 170 } 171 172 /* add the call instruction */ 173 bb.Ins = append(bb.Ins, &IrAMD64_CALL_reg { 174 Fn : p.R, 175 In : argv, 176 Out : []Reg { retv }, 177 Clob : clob, 178 }) 179 180 /* declare clobber list if any */ 181 if len(clob) != 0 { 182 bb.Ins = append(bb.Ins, IrMarkClobber(clob...)) 183 } 184 185 /* copy the return value if needed */ 186 if p.Out.Kind() != K_zero { 187 bb.Ins = append(bb.Ins, IrArchCopy(p.Out, retv)) 188 } 189 } 190 191 func (ABILowering) abiCallMethod(cfg *CFG, bb *BasicBlock, p *IrCallMethod) { 192 argc := len(p.In) + 1 193 retc := len(p.Out) 194 195 /* check argument & return value count */ 196 if argc != len(p.Func.Args) || retc != len(p.Func.Rets) { 197 panic("abi: icall argument count mismatch: " + p.String()) 198 } 199 200 /* register buffer */ 201 argv := make([]Reg, 0, argc) 202 retv := make([]Reg, 0, retc) 203 clob := make([]Reg, 0, len(_AbiClobbersGo)) 204 rmap := make(map[x86_64.Register64]bool, len(_AbiClobbersGo)) 205 206 /* add all arch registers */ 207 for _, r := range _AbiClobbersGo { 208 rmap[r] = true 209 } 210 211 /* store the receiver */ 212 if rx := p.Func.Args[0]; !rx.InRegister { 213 mm := p.Func.Args[0].Mem 214 bb.Ins = append(bb.Ins, IrArchStoreStack(p.V, mm, IrSlotCall)) 215 } else { 216 rr := IrSetArch(cfg.CreateRegister(p.V.Ptr()), rx.Reg) 217 bb.Ins, argv = append(bb.Ins, IrArchCopy(rr, p.V)), append(argv, rr) 218 } 219 220 /* store each argument */ 221 for i, r := range p.In { 222 if v := p.Func.Args[i+1]; !v.InRegister { 223 mm := v.Mem 224 bb.Ins = append(bb.Ins, IrArchStoreStack(r, mm, IrSlotCall)) 225 } else { 226 rr := IrSetArch(cfg.CreateRegister(r.Ptr()), v.Reg) 227 bb.Ins, argv = append(bb.Ins, IrArchCopy(rr, r)), append(argv, rr) 228 } 229 } 230 231 /* convert each return register */ 232 for i, r := range p.Out { 233 if v := p.Func.Rets[i]; v.InRegister && r.Kind() != K_zero { 234 retv = append(retv, IrSetArch(cfg.CreateRegister(r.Ptr()), v.Reg)) 235 delete(rmap, v.Reg) 236 } 237 } 238 239 /* exclude return values from clobbering list (they are implied) */ 240 for _, r := range _AbiClobbersGo { 241 if rmap[r] { 242 clob = append(clob, IrSetArch(cfg.CreateRegister(false), r)) 243 } 244 } 245 246 /* add the call instruction */ 247 bb.Ins = append(bb.Ins, &IrAMD64_CALL_mem { 248 Fn : Ptr(p.T, int32(rt.GoItabFuncBase) + int32(p.Slot) * abi.PtrSize), 249 In : argv, 250 Out : retv, 251 Clob : clob, 252 }) 253 254 /* declare clobber list if any */ 255 if len(clob) != 0 { 256 bb.Ins = append(bb.Ins, IrMarkClobber(clob...)) 257 } 258 259 /* load each return value */ 260 for i, r := range p.Out { 261 if r.Kind() != K_zero { 262 if v := p.Func.Rets[i]; v.InRegister { 263 bb.Ins = append(bb.Ins, IrArchCopy(r, retv[i])) 264 } else { 265 bb.Ins = append(bb.Ins, IrArchLoadStack(r, v.Mem, IrSlotCall)) 266 } 267 } 268 } 269 }