github.com/osdi23p228/fabric@v0.0.0-20221218062954-77808885f5db/common/viperutil/config_util.go (about) 1 /* 2 Copyright IBM Corp. All Rights Reserved. 3 4 SPDX-License-Identifier: Apache-2.0 5 */ 6 7 package viperutil 8 9 import ( 10 "encoding/json" 11 "encoding/pem" 12 "fmt" 13 "io/ioutil" 14 "math" 15 "reflect" 16 "regexp" 17 "strconv" 18 "strings" 19 "time" 20 21 "github.com/Shopify/sarama" 22 version "github.com/hashicorp/go-version" 23 "github.com/mitchellh/mapstructure" 24 "github.com/osdi23p228/fabric/bccsp/factory" 25 "github.com/osdi23p228/fabric/common/flogging" 26 "github.com/pkg/errors" 27 "github.com/spf13/viper" 28 ) 29 30 var logger = flogging.MustGetLogger("viperutil") 31 32 type viperGetter func(key string) interface{} 33 34 func getKeysRecursively(base string, getKey viperGetter, nodeKeys map[string]interface{}, oType reflect.Type) map[string]interface{} { 35 subTypes := map[string]reflect.Type{} 36 37 if oType != nil && oType.Kind() == reflect.Struct { 38 outer: 39 for i := 0; i < oType.NumField(); i++ { 40 fieldName := oType.Field(i).Name 41 fieldType := oType.Field(i).Type 42 43 for key := range nodeKeys { 44 if strings.EqualFold(fieldName, key) { 45 subTypes[key] = fieldType 46 continue outer 47 } 48 } 49 50 subTypes[fieldName] = fieldType 51 nodeKeys[fieldName] = nil 52 } 53 } 54 55 result := make(map[string]interface{}) 56 for key := range nodeKeys { 57 fqKey := base + key 58 59 val := getKey(fqKey) 60 if m, ok := val.(map[interface{}]interface{}); ok { 61 logger.Debugf("Found map[interface{}]interface{} value for %s", fqKey) 62 tmp := make(map[string]interface{}) 63 for ik, iv := range m { 64 cik, ok := ik.(string) 65 if !ok { 66 panic("Non string key-entry") 67 } 68 tmp[cik] = iv 69 } 70 result[key] = getKeysRecursively(fqKey+".", getKey, tmp, subTypes[key]) 71 } else if m, ok := val.(map[string]interface{}); ok { 72 logger.Debugf("Found map[string]interface{} value for %s", fqKey) 73 result[key] = getKeysRecursively(fqKey+".", getKey, m, subTypes[key]) 74 } else if m, ok := unmarshalJSON(val); ok { 75 logger.Debugf("Found real value for %s setting to map[string]string %v", fqKey, m) 76 result[key] = m 77 } else { 78 if val == nil { 79 fileSubKey := fqKey + ".File" 80 fileVal := getKey(fileSubKey) 81 if fileVal != nil { 82 result[key] = map[string]interface{}{"File": fileVal} 83 continue 84 } 85 } 86 logger.Debugf("Found real value for %s setting to %T %v", fqKey, val, val) 87 result[key] = val 88 89 } 90 } 91 return result 92 } 93 94 func unmarshalJSON(val interface{}) (map[string]string, bool) { 95 mp := map[string]string{} 96 97 s, ok := val.(string) 98 if !ok { 99 logger.Debugf("Unmarshal JSON: value is not a string: %v", val) 100 return nil, false 101 } 102 err := json.Unmarshal([]byte(s), &mp) 103 if err != nil { 104 logger.Debugf("Unmarshal JSON: value cannot be unmarshalled: %s", err) 105 return nil, false 106 } 107 return mp, true 108 } 109 110 // customDecodeHook adds the additional functions of parsing durations from strings 111 // as well as parsing strings of the format "[thing1, thing2, thing3]" into string slices 112 // Note that whitespace around slice elements is removed 113 func customDecodeHook(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) { 114 durationHook := mapstructure.StringToTimeDurationHookFunc() 115 dur, err := mapstructure.DecodeHookExec(durationHook, f, t, data) 116 if err == nil { 117 if _, ok := dur.(time.Duration); ok { 118 return dur, nil 119 } 120 } 121 122 if f.Kind() != reflect.String { 123 return data, nil 124 } 125 126 raw := data.(string) 127 l := len(raw) 128 if l > 1 && raw[0] == '[' && raw[l-1] == ']' { 129 slice := strings.Split(raw[1:l-1], ",") 130 for i, v := range slice { 131 slice[i] = strings.TrimSpace(v) 132 } 133 return slice, nil 134 } 135 136 return data, nil 137 } 138 139 func byteSizeDecodeHook(f reflect.Kind, t reflect.Kind, data interface{}) (interface{}, error) { 140 if f != reflect.String || t != reflect.Uint32 { 141 return data, nil 142 } 143 raw := data.(string) 144 if raw == "" { 145 return data, nil 146 } 147 var re = regexp.MustCompile(`^(?P<size>[0-9]+)\s*(?i)(?P<unit>(k|m|g))b?$`) 148 if re.MatchString(raw) { 149 size, err := strconv.ParseUint(re.ReplaceAllString(raw, "${size}"), 0, 64) 150 if err != nil { 151 return data, nil 152 } 153 unit := re.ReplaceAllString(raw, "${unit}") 154 switch strings.ToLower(unit) { 155 case "g": 156 size = size << 10 157 fallthrough 158 case "m": 159 size = size << 10 160 fallthrough 161 case "k": 162 size = size << 10 163 } 164 if size > math.MaxUint32 { 165 return size, fmt.Errorf("value '%s' overflows uint32", raw) 166 } 167 return size, nil 168 } 169 return data, nil 170 } 171 172 func stringFromFileDecodeHook(f reflect.Kind, t reflect.Kind, data interface{}) (interface{}, error) { 173 // "to" type should be string 174 if t != reflect.String { 175 return data, nil 176 } 177 // "from" type should be map 178 if f != reflect.Map { 179 return data, nil 180 } 181 v := reflect.ValueOf(data) 182 switch v.Kind() { 183 case reflect.String: 184 return data, nil 185 case reflect.Map: 186 d := data.(map[string]interface{}) 187 fileName, ok := d["File"] 188 if !ok { 189 fileName, ok = d["file"] 190 } 191 switch { 192 case ok && fileName != nil: 193 bytes, err := ioutil.ReadFile(fileName.(string)) 194 if err != nil { 195 return data, err 196 } 197 return string(bytes), nil 198 case ok: 199 // fileName was nil 200 return nil, fmt.Errorf("Value of File: was nil") 201 } 202 } 203 return data, nil 204 } 205 206 func pemBlocksFromFileDecodeHook(f reflect.Kind, t reflect.Kind, data interface{}) (interface{}, error) { 207 // "to" type should be string 208 if t != reflect.Slice { 209 return data, nil 210 } 211 // "from" type should be map 212 if f != reflect.Map { 213 return data, nil 214 } 215 v := reflect.ValueOf(data) 216 switch v.Kind() { 217 case reflect.String: 218 return data, nil 219 case reflect.Map: 220 var fileName string 221 var ok bool 222 switch d := data.(type) { 223 case map[string]string: 224 fileName, ok = d["File"] 225 if !ok { 226 fileName, ok = d["file"] 227 } 228 case map[string]interface{}: 229 var fileI interface{} 230 fileI, ok = d["File"] 231 if !ok { 232 fileI = d["file"] 233 } 234 fileName, ok = fileI.(string) 235 } 236 237 switch { 238 case ok && fileName != "": 239 var result []string 240 bytes, err := ioutil.ReadFile(fileName) 241 if err != nil { 242 return data, err 243 } 244 for len(bytes) > 0 { 245 var block *pem.Block 246 block, bytes = pem.Decode(bytes) 247 if block == nil { 248 break 249 } 250 if block.Type != "CERTIFICATE" || len(block.Headers) != 0 { 251 continue 252 } 253 result = append(result, string(pem.EncodeToMemory(block))) 254 } 255 return result, nil 256 case ok: 257 // fileName was nil 258 return nil, fmt.Errorf("Value of File: was nil") 259 } 260 } 261 return data, nil 262 } 263 264 var kafkaVersionConstraints map[sarama.KafkaVersion]version.Constraints 265 266 func init() { 267 kafkaVersionConstraints = make(map[sarama.KafkaVersion]version.Constraints) 268 kafkaVersionConstraints[sarama.V0_8_2_0], _ = version.NewConstraint(">=0.8.2,<0.8.2.1") 269 kafkaVersionConstraints[sarama.V0_8_2_1], _ = version.NewConstraint(">=0.8.2.1,<0.8.2.2") 270 kafkaVersionConstraints[sarama.V0_8_2_2], _ = version.NewConstraint(">=0.8.2.2,<0.9.0.0") 271 kafkaVersionConstraints[sarama.V0_9_0_0], _ = version.NewConstraint(">=0.9.0.0,<0.9.0.1") 272 kafkaVersionConstraints[sarama.V0_9_0_1], _ = version.NewConstraint(">=0.9.0.1,<0.10.0.0") 273 kafkaVersionConstraints[sarama.V0_10_0_0], _ = version.NewConstraint(">=0.10.0.0,<0.10.0.1") 274 kafkaVersionConstraints[sarama.V0_10_0_1], _ = version.NewConstraint(">=0.10.0.1,<0.10.1.0") 275 kafkaVersionConstraints[sarama.V0_10_1_0], _ = version.NewConstraint(">=0.10.1.0,<0.10.2.0") 276 kafkaVersionConstraints[sarama.V0_10_2_0], _ = version.NewConstraint(">=0.10.2.0,<0.11.0.0") 277 kafkaVersionConstraints[sarama.V0_11_0_0], _ = version.NewConstraint(">=0.11.0.0,<1.0.0") 278 kafkaVersionConstraints[sarama.V1_0_0_0], _ = version.NewConstraint(">=1.0.0") 279 } 280 281 func kafkaVersionDecodeHook(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) { 282 if f.Kind() != reflect.String || t != reflect.TypeOf(sarama.KafkaVersion{}) { 283 return data, nil 284 } 285 286 v, err := version.NewVersion(data.(string)) 287 if err != nil { 288 return nil, fmt.Errorf("Unable to parse Kafka version: %s", err) 289 } 290 291 for kafkaVersion, constraints := range kafkaVersionConstraints { 292 if constraints.Check(v) { 293 return kafkaVersion, nil 294 } 295 } 296 297 return nil, fmt.Errorf("Unsupported Kafka version: '%s'", data) 298 } 299 300 func bccspHook(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) { 301 if t != reflect.TypeOf(&factory.FactoryOpts{}) { 302 return data, nil 303 } 304 305 config := factory.GetDefaultOpts() 306 307 err := mapstructure.WeakDecode(data, config) 308 if err != nil { 309 return nil, errors.Wrap(err, "could not decode bcssp type") 310 } 311 312 return config, nil 313 } 314 315 // EnhancedExactUnmarshal is intended to unmarshal a config file into a structure 316 // producing error when extraneous variables are introduced and supporting 317 // the time.Duration type 318 func EnhancedExactUnmarshal(v *viper.Viper, output interface{}) error { 319 oType := reflect.TypeOf(output) 320 if oType.Kind() != reflect.Ptr { 321 return errors.Errorf("supplied output argument must be a pointer to a struct but is not pointer") 322 } 323 eType := oType.Elem() 324 if eType.Kind() != reflect.Struct { 325 return errors.Errorf("supplied output argument must be a pointer to a struct, but it is pointer to something else") 326 } 327 328 baseKeys := v.AllSettings() 329 330 getterWithClass := func(key string) interface{} { return v.Get(key) } // hide receiver 331 leafKeys := getKeysRecursively("", getterWithClass, baseKeys, eType) 332 333 logger.Debugf("%+v", leafKeys) 334 config := &mapstructure.DecoderConfig{ 335 ErrorUnused: true, 336 Metadata: nil, 337 Result: output, 338 WeaklyTypedInput: true, 339 DecodeHook: mapstructure.ComposeDecodeHookFunc( 340 bccspHook, 341 customDecodeHook, 342 byteSizeDecodeHook, 343 stringFromFileDecodeHook, 344 pemBlocksFromFileDecodeHook, 345 kafkaVersionDecodeHook, 346 ), 347 } 348 349 decoder, err := mapstructure.NewDecoder(config) 350 if err != nil { 351 return err 352 } 353 return decoder.Decode(leafKeys) 354 }