github.com/1aal/kubeblocks@v0.0.0-20231107070852-e1c03e598921/pkg/lorry/engines/kafka/metadata.go (about) 1 /* 2 Copyright 2021 The Dapr Authors 3 Licensed under the Apache License, Version 2.0 (the "License"); 4 you may not use this file except in compliance with the License. 5 You may obtain a copy of the License at 6 http://www.apache.org/licenses/LICENSE-2.0 7 Unless required by applicable law or agreed to in writing, software 8 distributed under the License is distributed on an "AS IS" BASIS, 9 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 10 See the License for the specific language governing permissions and 11 limitations under the License. 12 */ 13 14 package kafka 15 16 import ( 17 "errors" 18 "fmt" 19 "strconv" 20 "strings" 21 "time" 22 23 "github.com/Shopify/sarama" 24 ) 25 26 const ( 27 key = "partitionKey" 28 skipVerify = "skipVerify" 29 caCert = "caCert" 30 clientCert = "clientCert" 31 clientKey = "clientKey" 32 consumeRetryEnabled = "consumeRetryEnabled" 33 consumeRetryInterval = "consumeRetryInterval" 34 authType = "authType" 35 passwordAuthType = "password" 36 oidcAuthType = "oidc" 37 mtlsAuthType = "mtls" 38 noAuthType = "none" 39 ) 40 41 type kafkaMetadata struct { 42 Brokers []string 43 ConsumerGroup string 44 ClientID string 45 AuthType string 46 SaslUsername string 47 SaslPassword string 48 SaslMechanism string 49 InitialOffset int64 50 MaxMessageBytes int 51 OidcTokenEndpoint string 52 OidcClientID string 53 OidcClientSecret string 54 OidcScopes []string 55 TLSDisable bool 56 TLSSkipVerify bool 57 TLSCaCert string 58 TLSClientCert string 59 TLSClientKey string 60 ConsumeRetryEnabled bool 61 ConsumeRetryInterval time.Duration 62 Version sarama.KafkaVersion 63 } 64 65 // upgradeMetadata updates metadata properties based on deprecated usage. 66 func (k *Kafka) upgradeMetadata(metadata map[string]string) (map[string]string, error) { 67 authTypeVal, authTypePres := metadata[authType] 68 authReqVal, authReqPres := metadata["authRequired"] 69 saslPassVal, saslPassPres := metadata["saslPassword"] 70 71 // If authType is not set, derive it from authRequired. 72 if (!authTypePres || authTypeVal == "") && authReqPres && authReqVal != "" { 73 k.logger.Info("AuthRequired is deprecated, use AuthType instead.") 74 validAuthRequired, err := strconv.ParseBool(authReqVal) 75 if err == nil { 76 if validAuthRequired { 77 // If legacy authRequired was used, either SASL username or mtls is the method. 78 if saslPassPres && saslPassVal != "" { 79 // User has specified saslPassword, so intend for password auth. 80 metadata[authType] = passwordAuthType 81 } else { 82 metadata[authType] = mtlsAuthType 83 } 84 } else { 85 metadata[authType] = noAuthType 86 } 87 } else { 88 return metadata, errors.New("kafka error: invalid value for 'authRequired' attribute") 89 } 90 } 91 92 // if consumeRetryEnabled is not present, use component default value 93 consumeRetryEnabledVal, consumeRetryEnabledPres := metadata[consumeRetryEnabled] 94 if !consumeRetryEnabledPres || consumeRetryEnabledVal == "" { 95 metadata[consumeRetryEnabled] = strconv.FormatBool(k.DefaultConsumeRetryEnabled) 96 } 97 98 return metadata, nil 99 } 100 101 // getKafkaMetadata returns new Kafka metadata. 102 func (k *Kafka) getKafkaMetadata(metadata map[string]string) (*kafkaMetadata, error) { 103 meta := kafkaMetadata{ 104 ConsumeRetryInterval: 100 * time.Millisecond, 105 } 106 // use the runtimeConfig.ID as the consumer group so that each dapr runtime creates its own consumergroup 107 if val, ok := metadata["consumerID"]; ok && val != "" { 108 meta.ConsumerGroup = val 109 k.logger.Info(fmt.Sprintf("Using %s as ConsumerGroup", meta.ConsumerGroup)) 110 } 111 112 if val, ok := metadata["consumerGroup"]; ok && val != "" { 113 meta.ConsumerGroup = val 114 k.logger.Info(fmt.Sprintf("Using %s as ConsumerGroup", meta.ConsumerGroup)) 115 } 116 117 if val, ok := metadata["clientID"]; ok && val != "" { 118 meta.ClientID = val 119 k.logger.Info(fmt.Sprintf("Using %s as ClientID", meta.ClientID)) 120 } 121 122 if val, ok := metadata["saslMechanism"]; ok && val != "" { 123 meta.SaslMechanism = val 124 k.logger.Info(fmt.Sprintf("Using %s as saslMechanism", meta.SaslMechanism)) 125 } 126 127 initialOffset, err := parseInitialOffset(metadata["initialOffset"]) 128 if err != nil { 129 return nil, err 130 } 131 meta.InitialOffset = initialOffset 132 133 if val, ok := metadata["brokers"]; ok && val != "" { 134 meta.Brokers = strings.Split(val, ",") 135 } else { 136 return nil, errors.New("kafka error: missing 'brokers' attribute") 137 } 138 139 k.logger.Info("Found brokers", "brokers", meta.Brokers) 140 141 val, ok := metadata["authType"] 142 if !ok { 143 return nil, errors.New("kafka error: missing 'authType' attribute") 144 } 145 if val == "" { 146 return nil, errors.New("kafka error: 'authType' attribute was empty") 147 } 148 149 switch strings.ToLower(val) { 150 case passwordAuthType: 151 meta.AuthType = val 152 if val, ok = metadata["saslUsername"]; ok && val != "" { 153 meta.SaslUsername = val 154 } else { 155 return nil, errors.New("kafka error: missing SASL Username for authType 'password'") 156 } 157 158 if val, ok = metadata["saslPassword"]; ok && val != "" { 159 meta.SaslPassword = val 160 } else { 161 return nil, errors.New("kafka error: missing SASL Password for authType 'password'") 162 } 163 k.logger.Info("Configuring SASL password authentication.") 164 case oidcAuthType: 165 meta.AuthType = val 166 if val, ok = metadata["oidcTokenEndpoint"]; ok && val != "" { 167 meta.OidcTokenEndpoint = val 168 } else { 169 return nil, errors.New("kafka error: missing OIDC Token Endpoint for authType 'oidc'") 170 } 171 if val, ok = metadata["oidcClientID"]; ok && val != "" { 172 meta.OidcClientID = val 173 } else { 174 return nil, errors.New("kafka error: missing OIDC Client ID for authType 'oidc'") 175 } 176 if val, ok = metadata["oidcClientSecret"]; ok && val != "" { 177 meta.OidcClientSecret = val 178 } else { 179 return nil, errors.New("kafka error: missing OIDC Client Secret for authType 'oidc'") 180 } 181 if val, ok = metadata["oidcScopes"]; ok && val != "" { 182 meta.OidcScopes = strings.Split(val, ",") 183 } else { 184 k.logger.Info("Warning: no OIDC scopes specified, using default 'openid' scope only. This is a security risk for token reuse.") 185 meta.OidcScopes = []string{"openid"} 186 } 187 k.logger.Info("Configuring SASL token authentication via OIDC.") 188 case mtlsAuthType: 189 meta.AuthType = val 190 if val, ok = metadata[clientCert]; ok && val != "" { 191 if !isValidPEM(val) { 192 return nil, errors.New("kafka error: invalid client certificate") 193 } 194 meta.TLSClientCert = val 195 } 196 if val, ok = metadata[clientKey]; ok && val != "" { 197 if !isValidPEM(val) { 198 return nil, errors.New("kafka error: invalid client key") 199 } 200 meta.TLSClientKey = val 201 } 202 // clientKey and clientCert need to be all specified or all not specified. 203 if (meta.TLSClientKey == "") != (meta.TLSClientCert == "") { 204 return nil, errors.New("kafka error: clientKey or clientCert is missing") 205 } 206 k.logger.Info("Configuring mTLS authentication.") 207 case noAuthType: 208 meta.AuthType = val 209 k.logger.Info("No authentication configured.") 210 default: 211 return nil, errors.New("kafka error: invalid value for 'authType' attribute") 212 } 213 214 if val, ok := metadata["maxMessageBytes"]; ok && val != "" { 215 maxBytes, err := strconv.Atoi(val) 216 if err != nil { 217 return nil, fmt.Errorf("kafka error: cannot parse maxMessageBytes: %w", err) 218 } 219 220 meta.MaxMessageBytes = maxBytes 221 } 222 223 if val, ok := metadata[caCert]; ok && val != "" { 224 if !isValidPEM(val) { 225 return nil, errors.New("kafka error: invalid ca certificate") 226 } 227 meta.TLSCaCert = val 228 } 229 230 if val, ok := metadata["disableTls"]; ok && val != "" { 231 boolVal, err := strconv.ParseBool(val) 232 if err != nil { 233 return nil, fmt.Errorf("kafka: invalid value for 'tlsDisable' attribute: %w", err) 234 } 235 meta.TLSDisable = boolVal 236 if meta.TLSDisable { 237 k.logger.Info("kafka: TLS connectivity to broker disabled") 238 } 239 } 240 241 if val, ok := metadata[skipVerify]; ok && val != "" { 242 boolVal, err := strconv.ParseBool(val) 243 if err != nil { 244 return nil, fmt.Errorf("kafka error: invalid value for '%s' attribute: %w", skipVerify, err) 245 } 246 meta.TLSSkipVerify = boolVal 247 if boolVal { 248 k.logger.Info("kafka: you are using 'skipVerify' to skip server config verify which is unsafe!") 249 } 250 } 251 252 if val, ok := metadata[consumeRetryEnabled]; ok && val != "" { 253 boolVal, err := strconv.ParseBool(val) 254 if err != nil { 255 return nil, fmt.Errorf("kafka error: invalid value for '%s' attribute: %w", consumeRetryEnabled, err) 256 } 257 meta.ConsumeRetryEnabled = boolVal 258 } 259 260 if val, ok := metadata[consumeRetryInterval]; ok && val != "" { 261 durationVal, err := time.ParseDuration(val) 262 if err != nil { 263 intVal, err := strconv.ParseUint(val, 10, 32) 264 if err != nil { 265 return nil, fmt.Errorf("kafka error: invalid value for '%s' attribute: %w", consumeRetryInterval, err) 266 } 267 durationVal = time.Duration(intVal) * time.Millisecond 268 } 269 meta.ConsumeRetryInterval = durationVal 270 } 271 272 if val, ok := metadata["version"]; ok && val != "" { 273 version, err := sarama.ParseKafkaVersion(val) 274 if err != nil { 275 return nil, errors.New("kafka error: invalid kafka version") 276 } 277 meta.Version = version 278 } else { 279 meta.Version = sarama.V2_0_0_0 //nolint:nosnakecase 280 } 281 282 return &meta, nil 283 }