github.com/zhyoulun/cilium@v1.6.12/pkg/policy/api/kafka.go (about)

     1  // Copyright 2016-2017 Authors of Cilium
     2  //
     3  // Licensed under the Apache License, Version 2.0 (the "License");
     4  // you may not use this file except in compliance with the License.
     5  // You may obtain a copy of the License at
     6  //
     7  //     http://www.apache.org/licenses/LICENSE-2.0
     8  //
     9  // Unless required by applicable law or agreed to in writing, software
    10  // distributed under the License is distributed on an "AS IS" BASIS,
    11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  // See the License for the specific language governing permissions and
    13  // limitations under the License.
    14  
    15  package api
    16  
    17  import (
    18  	"fmt"
    19  	"regexp"
    20  	"strings"
    21  )
    22  
    23  // PortRuleKafka is a list of Kafka protocol constraints. All fields are
    24  // optional, if all fields are empty or missing, the rule will match all
    25  // Kafka messages.
    26  type PortRuleKafka struct {
    27  	// Role is a case-insensitive string and describes a group of API keys
    28  	// necessary to perform certain higher-level Kafka operations such as "produce"
    29  	// or "consume". A Role automatically expands into all APIKeys required
    30  	// to perform the specified higher-level operation.
    31  	//
    32  	// The following values are supported:
    33  	//  - "produce": Allow producing to the topics specified in the rule
    34  	//  - "consume": Allow consuming from the topics specified in the rule
    35  	//
    36  	// This field is incompatible with the APIKey field, i.e APIKey and Role
    37  	// cannot both be specified in the same rule.
    38  	//
    39  	// If omitted or empty, and if APIKey is not specified, then all keys are
    40  	// allowed.
    41  
    42  	// +optional
    43  	Role string `json:"role,omitempty"`
    44  
    45  	// APIKey is a case-insensitive string matched against the key of a
    46  	// request, e.g. "produce", "fetch", "createtopic", "deletetopic", et al
    47  	// Reference: https://kafka.apache.org/protocol#protocol_api_keys
    48  	//
    49  	// If omitted or empty, and if Role is not specified, then all keys are allowed.
    50  	//
    51  	// +optional
    52  	APIKey string `json:"apiKey,omitempty"`
    53  
    54  	// APIVersion is the version matched against the api version of the
    55  	// Kafka message. If set, it has to be a string representing a positive
    56  	// integer.
    57  	//
    58  	// If omitted or empty, all versions are allowed.
    59  	//
    60  	// +optional
    61  	APIVersion string `json:"apiVersion,omitempty"`
    62  
    63  	// ClientID is the client identifier as provided in the request.
    64  	//
    65  	// From Kafka protocol documentation:
    66  	// This is a user supplied identifier for the client application. The
    67  	// user can use any identifier they like and it will be used when
    68  	// logging errors, monitoring aggregates, etc. For example, one might
    69  	// want to monitor not just the requests per second overall, but the
    70  	// number coming from each client application (each of which could
    71  	// reside on multiple servers). This id acts as a logical grouping
    72  	// across all requests from a particular client.
    73  	//
    74  	// If omitted or empty, all client identifiers are allowed.
    75  	//
    76  	// +optional
    77  	ClientID string `json:"clientID,omitempty"`
    78  
    79  	// Topic is the topic name contained in the message. If a Kafka request
    80  	// contains multiple topics, then all topics must be allowed or the
    81  	// message will be rejected.
    82  	//
    83  	// This constraint is ignored if the matched request message type
    84  	// doesn't contain any topic. Maximum size of Topic can be 249
    85  	// characters as per recent Kafka spec and allowed characters are
    86  	// a-z, A-Z, 0-9, -, . and _
    87  	// Older Kafka versions had longer topic lengths of 255, but in Kafka 0.10
    88  	// version the length was changed from 255 to 249. For compatibility
    89  	// reasons we are using 255
    90  	//
    91  	// If omitted or empty, all topics are allowed.
    92  	//
    93  	// +optional
    94  	Topic string `json:"topic,omitempty"`
    95  
    96  	// --------------------------------------------------------------------
    97  	// Private fields. These fields are used internally and are not exposed
    98  	// via the API.
    99  
   100  	// apiKeyInt is the integer representation of expanded Role. It is a
   101  	// list of all low-level apiKeys to
   102  	// be expanded as per the value of Role
   103  	apiKeyInt KafkaRole
   104  
   105  	// apiVersionInt is the integer representation of APIVersion
   106  	apiVersionInt *int16
   107  }
   108  
   109  // List of Kafka apiKeys which have a topic in their
   110  // request
   111  const (
   112  	ProduceKey              = 0
   113  	FetchKey                = 1
   114  	OffsetsKey              = 2
   115  	MetadataKey             = 3
   116  	LeaderAndIsr            = 4
   117  	StopReplica             = 5
   118  	UpdateMetadata          = 6
   119  	OffsetCommitKey         = 8
   120  	OffsetFetchKey          = 9
   121  	FindCoordinatorKey      = 10
   122  	JoinGroupKey            = 11
   123  	CreateTopicsKey         = 19
   124  	DeleteTopicsKey         = 20
   125  	DeleteRecordsKey        = 21
   126  	OffsetForLeaderEpochKey = 23
   127  	AddPartitionsToTxnKey   = 24
   128  	WriteTxnMarkersKey      = 27
   129  	TxnOffsetCommitKey      = 28
   130  	AlterReplicaLogDirsKey  = 34
   131  	DescribeLogDirsKey      = 35
   132  	CreatePartitionsKey     = 37
   133  )
   134  
   135  // List of Kafka apiKey which are not associated with
   136  // any topic
   137  const (
   138  	HeartbeatKey   = 12
   139  	LeaveGroupKey  = 13
   140  	SyncgroupKey   = 14
   141  	APIVersionsKey = 18
   142  )
   143  
   144  // List of Kafka Roles
   145  const (
   146  	ProduceRole = "produce"
   147  	ConsumeRole = "consume"
   148  )
   149  
   150  // KafkaAPIKeyMap is the map of all allowed kafka API keys
   151  // with the key values.
   152  // Reference: https://kafka.apache.org/protocol#protocol_api_keys
   153  var KafkaAPIKeyMap = map[string]int16{
   154  	"produce":              0,  /* Produce */
   155  	"fetch":                1,  /* Fetch */
   156  	"offsets":              2,  /* Offsets */
   157  	"metadata":             3,  /* Metadata */
   158  	"leaderandisr":         4,  /* LeaderAndIsr */
   159  	"stopreplica":          5,  /* StopReplica */
   160  	"updatemetadata":       6,  /* UpdateMetadata */
   161  	"controlledshutdown":   7,  /* ControlledShutdown */
   162  	"offsetcommit":         8,  /* OffsetCommit */
   163  	"offsetfetch":          9,  /* OffsetFetch */
   164  	"findcoordinator":      10, /* FindCoordinator */
   165  	"joingroup":            11, /* JoinGroup */
   166  	"heartbeat":            12, /* Heartbeat */
   167  	"leavegroup":           13, /* LeaveGroup */
   168  	"syncgroup":            14, /* SyncGroup */
   169  	"describegroups":       15, /* DescribeGroups */
   170  	"listgroups":           16, /* ListGroups */
   171  	"saslhandshake":        17, /* SaslHandshake */
   172  	"apiversions":          18, /* ApiVersions */
   173  	"createtopics":         19, /* CreateTopics */
   174  	"deletetopics":         20, /* DeleteTopics */
   175  	"deleterecords":        21, /* DeleteRecords */
   176  	"initproducerid":       22, /* InitProducerId */
   177  	"offsetforleaderepoch": 23, /* OffsetForLeaderEpoch */
   178  	"addpartitionstotxn":   24, /* AddPartitionsToTxn */
   179  	"addoffsetstotxn":      25, /* AddOffsetsToTxn */
   180  	"endtxn":               26, /* EndTxn */
   181  	"writetxnmarkers":      27, /* WriteTxnMarkers */
   182  	"txnoffsetcommit":      28, /* TxnOffsetCommit */
   183  	"describeacls":         29, /* DescribeAcls */
   184  	"createacls":           30, /* CreateAcls */
   185  	"deleteacls":           31, /* DeleteAcls */
   186  	"describeconfigs":      32, /* DescribeConfigs */
   187  	"alterconfigs":         33, /* AlterConfigs */
   188  }
   189  
   190  // KafkaReverseApiKeyMap is the map of all allowed kafka API keys
   191  // with the key values.
   192  // Reference: https://kafka.apache.org/protocol#protocol_api_keys
   193  var KafkaReverseAPIKeyMap = map[int16]string{
   194  	0:  "produce",              /* Produce */
   195  	1:  "fetch",                /* Fetch */
   196  	2:  "offsets",              /* Offsets */
   197  	3:  "metadata",             /* Metadata */
   198  	4:  "leaderandisr",         /* LeaderAndIsr */
   199  	5:  "stopreplica",          /* StopReplica */
   200  	6:  "updatemetadata",       /* UpdateMetadata */
   201  	7:  "controlledshutdown",   /* ControlledShutdown */
   202  	8:  "offsetcommit",         /* OffsetCommit */
   203  	9:  "offsetfetch",          /* OffsetFetch */
   204  	10: "findcoordinator",      /* FindCoordinator */
   205  	11: "joingroup",            /* JoinGroup */
   206  	12: "heartbeat",            /* Heartbeat */
   207  	13: "leavegroup",           /* LeaveGroup */
   208  	14: "syncgroup",            /* SyncGroup */
   209  	15: "describegroups",       /* DescribeGroups */
   210  	16: "listgroups",           /* ListGroups */
   211  	17: "saslhandshake",        /* SaslHandshake */
   212  	18: "apiversions",          /* ApiVersions */
   213  	19: "createtopics",         /* CreateTopics */
   214  	20: "deletetopics",         /* DeleteTopics */
   215  	21: "deleterecords",        /* DeleteRecords */
   216  	22: "initproducerid",       /* InitProducerId */
   217  	23: "offsetforleaderepoch", /* OffsetForLeaderEpoch */
   218  	24: "addpartitionstotxn",   /* AddPartitionsToTxn */
   219  	25: "addoffsetstotxn",      /* AddOffsetsToTxn */
   220  	26: "endtxn",               /* EndTxn */
   221  	27: "writetxnmarkers",      /* WriteTxnMarkers */
   222  	28: "txnoffsetcommit",      /* TxnOffsetCommit */
   223  	29: "describeacls",         /* DescribeAcls */
   224  	30: "createacls",           /* CreateAcls */
   225  	31: "deleteacls",           /* DeleteAcls */
   226  	32: "describeconfigs",      /* DescribeConfigs */
   227  	33: "alterconfigs",         /* AlterConfigs */
   228  }
   229  
   230  // KafkaRole is the list of all low-level apiKeys to
   231  // be expanded as per the value of Role
   232  type KafkaRole []int16
   233  
   234  // KafkaMaxTopicLen is the maximum character len of a topic.
   235  // Older Kafka versions had longer topic lengths of 255, in Kafka 0.10 version
   236  // the length was changed from 255 to 249. For compatibility reasons we are
   237  // using 255
   238  const (
   239  	KafkaMaxTopicLen = 255
   240  )
   241  
   242  // KafkaTopicValidChar is a one-time regex generation of all allowed characters
   243  // in kafka topic name.
   244  var KafkaTopicValidChar = regexp.MustCompile(`^[a-zA-Z0-9\\._\\-]+$`)
   245  
   246  // CheckAPIKeyRole checks the apiKey value in the request, and returns true if
   247  // it is allowed else false
   248  func (kr *PortRuleKafka) CheckAPIKeyRole(kind int16) bool {
   249  	// wildcard expression
   250  	if len(kr.apiKeyInt) == 0 {
   251  		return true
   252  	}
   253  
   254  	// Check kind
   255  	for _, apiKey := range kr.apiKeyInt {
   256  		if apiKey == kind {
   257  			return true
   258  		}
   259  	}
   260  	return false
   261  }
   262  
   263  // GetAPIVersion returns the APIVersion as integer or the bool set to true if
   264  // any API version is allowed
   265  func (kr *PortRuleKafka) GetAPIVersion() (int16, bool) {
   266  	if kr.apiVersionInt == nil {
   267  		return 0, true
   268  	}
   269  
   270  	return *kr.apiVersionInt, false
   271  }
   272  
   273  // MapRoleToAPIKey maps the Role to the low level set of APIKeys for that role
   274  func (kr *PortRuleKafka) MapRoleToAPIKey() error {
   275  	// Expand the kr.apiKeyInt array based on the Role.
   276  	// For produce role, we need to add mandatory apiKeys produce, metadata and
   277  	// apiversions. While for consume, we need to add mandatory apiKeys like
   278  	// fetch, offsets, offsetcommit, offsetfetch, apiversions, metadata,
   279  	// findcoordinator, joingroup, heartbeat,
   280  	// leavegroup and syncgroup.
   281  	switch strings.ToLower(kr.Role) {
   282  	case ProduceRole:
   283  		kr.apiKeyInt = KafkaRole{ProduceKey, MetadataKey, APIVersionsKey}
   284  		return nil
   285  	case ConsumeRole:
   286  		kr.apiKeyInt = KafkaRole{FetchKey, OffsetsKey, MetadataKey,
   287  			OffsetCommitKey, OffsetFetchKey, FindCoordinatorKey,
   288  			JoinGroupKey, HeartbeatKey, LeaveGroupKey, SyncgroupKey, APIVersionsKey}
   289  		return nil
   290  	default:
   291  		return fmt.Errorf("Invalid Kafka Role %s", kr.Role)
   292  	}
   293  }