github.com/cloudwego/frugal@v0.1.15/internal/binary/encoder/compiler_measure.go (about) 1 /* 2 * Copyright 2022 ByteDance Inc. 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package encoder 18 19 import ( 20 `math` 21 22 `github.com/cloudwego/frugal/internal/atm/abi` 23 `github.com/cloudwego/frugal/internal/binary/defs` 24 ) 25 26 func (self *Compiler) measure(p *Program, sp int, vt *defs.Type, startpc int) { 27 rt := vt.S 28 tt := vt.T 29 30 /* only recurse on structs */ 31 if tt != defs.T_struct { 32 self.measureOne(p, sp, vt, startpc) 33 return 34 } 35 36 /* check for loops with inlining depth limit */ 37 if self.t[rt] || !self.o.CanInline(sp, (p.pc() - startpc) * 2) { 38 p.rtt(OP_size_defer, rt) 39 return 40 } 41 42 /* measure the type recursively */ 43 self.t[rt] = true 44 self.measureOne(p, sp, vt, startpc) 45 delete(self.t, rt) 46 } 47 48 func (self *Compiler) measureOne(p *Program, sp int, vt *defs.Type, startpc int) { 49 switch vt.T { 50 case defs.T_bool : p.i64(OP_size_const, 1) 51 case defs.T_i8 : p.i64(OP_size_const, 1) 52 case defs.T_i16 : p.i64(OP_size_const, 2) 53 case defs.T_i32 : p.i64(OP_size_const, 4) 54 case defs.T_i64 : p.i64(OP_size_const, 8) 55 case defs.T_enum : p.i64(OP_size_const, 4) 56 case defs.T_double : p.i64(OP_size_const, 8) 57 case defs.T_string : p.i64(OP_size_const, 4); p.dyn(OP_size_dyn, abi.PtrSize, 1) 58 case defs.T_binary : p.i64(OP_size_const, 4); p.dyn(OP_size_dyn, abi.PtrSize, 1) 59 case defs.T_map : self.measureMap(p, sp, vt, startpc) 60 case defs.T_set : self.measureSeq(p, sp, vt, startpc) 61 case defs.T_list : self.measureSeq(p, sp, vt, startpc) 62 case defs.T_struct : self.measureStruct(p, sp, vt, startpc) 63 case defs.T_pointer : self.measurePtr(p, sp, vt, startpc) 64 default : panic("measureOne: unreachable") 65 } 66 } 67 68 func (self *Compiler) measurePtr(p *Program, sp int, vt *defs.Type, startpc int) { 69 i := p.pc() 70 p.tag(sp) 71 p.add(OP_if_nil) 72 p.add(OP_make_state) 73 p.add(OP_deref) 74 self.measure(p, sp + 1, vt.V, startpc) 75 p.add(OP_drop_state) 76 p.pin(i) 77 } 78 79 func (self *Compiler) measureMap(p *Program, sp int, vt *defs.Type, startpc int) { 80 nk := defs.GetSize(vt.K.S) 81 nv := defs.GetSize(vt.V.S) 82 83 /* 6-byte map header */ 84 p.tag(sp) 85 p.i64(OP_size_const, 6) 86 87 /* check for nil maps */ 88 i := p.pc() 89 p.add(OP_if_nil) 90 91 /* key and value are both trivially measuable */ 92 if nk > 0 && nv > 0 { 93 p.i64(OP_size_map, int64(nk + nv)) 94 p.pin(i) 95 return 96 } 97 98 /* key or value is trivially measuable */ 99 if nk > 0 { p.i64(OP_size_map, int64(nk)) } 100 if nv > 0 { p.i64(OP_size_map, int64(nv)) } 101 102 /* complex maps */ 103 j := p.pc() 104 p.add(OP_map_if_empty) 105 p.add(OP_make_state) 106 p.rtt(OP_map_begin, vt.S) 107 k := p.pc() 108 109 /* complex keys */ 110 if nk <= 0 { 111 p.add(OP_map_key) 112 self.measureItem(p, sp + 1, vt.K, startpc) 113 } 114 115 /* complex values */ 116 if nv <= 0 { 117 p.add(OP_map_value) 118 self.measureItem(p, sp + 1, vt.V, startpc) 119 } 120 121 /* move to the next state */ 122 p.add(OP_map_next) 123 p.jmp(OP_map_if_next, k) 124 p.add(OP_drop_state) 125 p.pin(i) 126 p.pin(j) 127 } 128 129 func (self *Compiler) measureSeq(p *Program, sp int, vt *defs.Type, startpc int) { 130 et := vt.V 131 nb := defs.GetSize(et.S) 132 133 /* 5-byte list or set header */ 134 p.tag(sp) 135 p.i64(OP_size_const, 5) 136 137 /* check for nil slice */ 138 i := p.pc() 139 p.add(OP_if_nil) 140 141 /* element is trivially measuable */ 142 if nb > 0 { 143 p.dyn(OP_size_dyn, abi.PtrSize, int64(nb)) 144 p.pin(i) 145 return 146 } 147 148 /* complex lists or sets */ 149 j := p.pc() 150 p.add(OP_list_if_empty) 151 p.add(OP_make_state) 152 p.add(OP_list_begin) 153 k := p.pc() 154 p.add(OP_goto) 155 r := p.pc() 156 p.i64(OP_seek, int64(et.S.Size())) 157 p.pin(k) 158 self.measureItem(p, sp + 1, et, startpc) 159 p.add(OP_list_decr) 160 p.jmp(OP_list_if_next, r) 161 p.add(OP_drop_state) 162 p.pin(i) 163 p.pin(j) 164 } 165 166 func (self *Compiler) measureItem(p *Program, sp int, vt *defs.Type, startpc int) { 167 tag := vt.T 168 elem := vt.V 169 170 /* special handling for pointers */ 171 if tag != defs.T_pointer { 172 self.measure(p, sp, vt, startpc) 173 return 174 } 175 176 /* must be pointer struct at this point */ 177 if elem.T != defs.T_struct { 178 panic("fatal: non-struct pointers within container elements") 179 } 180 181 /* always add the STOP field for structs */ 182 i := p.pc() 183 p.tag(sp) 184 p.add(OP_if_nil) 185 p.add(OP_make_state) 186 p.add(OP_deref) 187 self.measure(p, sp + 1, elem, startpc) 188 p.add(OP_drop_state) 189 j := p.pc() 190 p.add(OP_goto) 191 p.pin(i) 192 p.i64(OP_size_const, 1) 193 p.pin(j) 194 } 195 196 func (self *Compiler) measureStruct(p *Program, sp int, vt *defs.Type, startpc int) { 197 var err error 198 var fvs []defs.Field 199 200 /* struct is trivially measuable */ 201 if nb := defs.GetSize(vt.S); nb > 0 { 202 p.i64(OP_size_const, int64(nb)) 203 return 204 } 205 206 /* resolve the field */ 207 if fvs, err = defs.ResolveFields(vt.S); err != nil { 208 panic(err) 209 } 210 211 /* empty structs */ 212 if len(fvs) == 0 { 213 p.i64(OP_size_const, 4) 214 return 215 } 216 217 /* 1-byte stop field */ 218 p.tag(sp) 219 p.i64(OP_size_const, 1) 220 221 /* measure every field */ 222 for _, fv := range fvs { 223 p.i64(OP_seek, int64(fv.F)) 224 self.measureField(p, sp + 1, fv, startpc) 225 p.i64(OP_seek, -int64(fv.F)) 226 } 227 } 228 229 func (self *Compiler) measureField(p *Program, sp int, fv defs.Field, startpc int) { 230 switch fv.Type.T { 231 default: { 232 panic("fatal: invalid field type: " + fv.Type.String()) 233 } 234 235 /* non-pointer types */ 236 case defs.T_bool : fallthrough 237 case defs.T_i8 : fallthrough 238 case defs.T_double : fallthrough 239 case defs.T_i16 : fallthrough 240 case defs.T_i32 : fallthrough 241 case defs.T_i64 : fallthrough 242 case defs.T_string : fallthrough 243 case defs.T_enum : fallthrough 244 case defs.T_binary : { 245 if fv.Default.IsValid() && fv.Spec == defs.Optional { 246 self.measureStructDefault(p, sp, fv, startpc) 247 } else { 248 self.measureStructRequired(p, sp, fv, startpc) 249 } 250 } 251 252 /* struct types, only available in hand-written structs */ 253 case defs.T_struct: { 254 self.measureStructRequired(p, sp, fv, startpc) 255 } 256 257 /* sequencial types */ 258 case defs.T_map : fallthrough 259 case defs.T_set : fallthrough 260 case defs.T_list : { 261 if fv.Spec == defs.Optional { 262 self.measureStructIterable(p, sp, fv, startpc) 263 } else { 264 self.measureStructRequired(p, sp, fv, startpc) 265 } 266 } 267 268 /* pointers */ 269 case defs.T_pointer: { 270 if fv.Spec == defs.Optional { 271 self.measureStructOptional(p, sp, fv, startpc) 272 } else if fv.Type.V.T == defs.T_struct { 273 self.measureStructPointer(p, sp, fv, startpc) 274 } else { 275 panic("fatal: non-optional non-struct pointers") 276 } 277 } 278 } 279 } 280 281 func (self *Compiler) measureStructDefault(p *Program, sp int, fv defs.Field, startpc int) { 282 i := p.pc() 283 t := fv.Type.T 284 285 /* check for default values */ 286 switch t { 287 case defs.T_bool : p.dyn(OP_if_eq_imm, 1, bool2i64(fv.Default.Bool())) 288 case defs.T_i8 : p.dyn(OP_if_eq_imm, 1, fv.Default.Int()) 289 case defs.T_double : p.dyn(OP_if_eq_imm, 8, int64(math.Float64bits(fv.Default.Float()))) 290 case defs.T_i16 : p.dyn(OP_if_eq_imm, 2, fv.Default.Int()) 291 case defs.T_i32 : p.dyn(OP_if_eq_imm, 4, fv.Default.Int()) 292 case defs.T_i64 : p.dyn(OP_if_eq_imm, 8, fv.Default.Int()) 293 case defs.T_string : p.str(OP_if_eq_str, fv.Default.String()) 294 case defs.T_enum : p.dyn(OP_if_eq_imm, 4, fv.Default.Int()) 295 case defs.T_binary : p.str(OP_if_eq_str, mem2str(fv.Default.Bytes())) 296 default : panic("unreachable") 297 } 298 299 /* measure if it's not the default value */ 300 p.i64(OP_size_const, 3) 301 self.measure(p, sp, fv.Type, startpc) 302 p.pin(i) 303 } 304 305 func (self *Compiler) measureStructPointer(p *Program, sp int, fv defs.Field, startpc int) { 306 i := p.pc() 307 p.add(OP_if_nil) 308 p.i64(OP_size_const, 3) 309 p.add(OP_make_state) 310 p.add(OP_deref) 311 self.measure(p, sp + 1, fv.Type.V, startpc) 312 p.add(OP_drop_state) 313 j := p.pc() 314 p.add(OP_goto) 315 p.pin(i) 316 p.i64(OP_size_const, 4) 317 p.pin(j) 318 } 319 320 func (self *Compiler) measureStructIterable(p *Program, sp int, fv defs.Field, startpc int) { 321 i := p.pc() 322 p.add(OP_if_nil) 323 p.i64(OP_size_const, 3) 324 self.measure(p, sp, fv.Type, startpc) 325 p.pin(i) 326 } 327 328 func (self *Compiler) measureStructOptional(p *Program, sp int, fv defs.Field, startpc int) { 329 i := p.pc() 330 p.add(OP_if_nil) 331 p.i64(OP_size_const, 3) 332 p.add(OP_make_state) 333 p.add(OP_deref) 334 self.measure(p, sp + 1, fv.Type.V, startpc) 335 p.add(OP_drop_state) 336 p.pin(i) 337 } 338 339 func (self *Compiler) measureStructRequired(p *Program, sp int, fv defs.Field, startpc int) { 340 p.i64(OP_size_const, 3) 341 self.measure(p, sp, fv.Type, startpc) 342 }