github.com/nats-io/nats-server/v2@v2.11.0-preview.2/server/subject_transform.go (about)

     1  // Copyright 2023 The NATS Authors
     2  // Licensed under the Apache License, Version 2.0 (the "License");
     3  // you may not use this file except in compliance with the License.
     4  // You may obtain a copy of the License at
     5  //
     6  // http://www.apache.org/licenses/LICENSE-2.0
     7  //
     8  // Unless required by applicable law or agreed to in writing, software
     9  // distributed under the License is distributed on an "AS IS" BASIS,
    10  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    11  // See the License for the specific language governing permissions and
    12  // limitations under the License.
    13  
    14  package server
    15  
    16  import (
    17  	"fmt"
    18  	"hash/fnv"
    19  	"regexp"
    20  	"strconv"
    21  	"strings"
    22  )
    23  
    24  // Subject mapping and transform setups.
    25  var (
    26  	commaSeparatorRegEx                = regexp.MustCompile(`,\s*`)
    27  	partitionMappingFunctionRegEx      = regexp.MustCompile(`{{\s*[pP]artition\s*\((.*)\)\s*}}`)
    28  	wildcardMappingFunctionRegEx       = regexp.MustCompile(`{{\s*[wW]ildcard\s*\((.*)\)\s*}}`)
    29  	splitFromLeftMappingFunctionRegEx  = regexp.MustCompile(`{{\s*[sS]plit[fF]rom[lL]eft\s*\((.*)\)\s*}}`)
    30  	splitFromRightMappingFunctionRegEx = regexp.MustCompile(`{{\s*[sS]plit[fF]rom[rR]ight\s*\((.*)\)\s*}}`)
    31  	sliceFromLeftMappingFunctionRegEx  = regexp.MustCompile(`{{\s*[sS]lice[fF]rom[lL]eft\s*\((.*)\)\s*}}`)
    32  	sliceFromRightMappingFunctionRegEx = regexp.MustCompile(`{{\s*[sS]lice[fF]rom[rR]ight\s*\((.*)\)\s*}}`)
    33  	splitMappingFunctionRegEx          = regexp.MustCompile(`{{\s*[sS]plit\s*\((.*)\)\s*}}`)
    34  )
    35  
    36  // Enum for the subject mapping subjectTransform function types
    37  const (
    38  	NoTransform int16 = iota
    39  	BadTransform
    40  	Partition
    41  	Wildcard
    42  	SplitFromLeft
    43  	SplitFromRight
    44  	SliceFromLeft
    45  	SliceFromRight
    46  	Split
    47  )
    48  
    49  // Transforms for arbitrarily mapping subjects from one to another for maps, tees and filters.
    50  // These can also be used for proper mapping on wildcard exports/imports.
    51  // These will be grouped and caching and locking are assumed to be in the upper layers.
    52  type subjectTransform struct {
    53  	src, dest            string
    54  	dtoks                []string // destination tokens
    55  	stoks                []string // source tokens
    56  	dtokmftypes          []int16  // destination token mapping function types
    57  	dtokmftokindexesargs [][]int  // destination token mapping function array of source token index arguments
    58  	dtokmfintargs        []int32  // destination token mapping function int32 arguments
    59  	dtokmfstringargs     []string // destination token mapping function string arguments
    60  }
    61  
    62  // SubjectTransformer transforms subjects using mappings
    63  //
    64  // This API is not part of the public API and not subject to SemVer protections
    65  type SubjectTransformer interface {
    66  	// TODO(dlc) - We could add in client here to allow for things like foo -> foo.$ACCOUNT
    67  	Match(string) (string, error)
    68  	TransformSubject(subject string) string
    69  	TransformTokenizedSubject(tokens []string) string
    70  }
    71  
    72  func NewSubjectTransformWithStrict(src, dest string, strict bool) (*subjectTransform, error) {
    73  	// strict = true for import subject mappings that need to be reversible
    74  	// (meaning can only use the Wildcard function and must use all the pwcs that are present in the source)
    75  	// No source given is equivalent to the source being ">"
    76  
    77  	if dest == _EMPTY_ {
    78  		return nil, nil
    79  	}
    80  
    81  	if src == _EMPTY_ {
    82  		src = fwcs
    83  	}
    84  
    85  	// Both entries need to be valid subjects.
    86  	sv, stokens, npwcs, hasFwc := subjectInfo(src)
    87  	dv, dtokens, dnpwcs, dHasFwc := subjectInfo(dest)
    88  
    89  	// Make sure both are valid, match fwc if present and there are no pwcs in the dest subject.
    90  	if !sv || !dv || dnpwcs > 0 || hasFwc != dHasFwc {
    91  		return nil, ErrBadSubject
    92  	}
    93  
    94  	var dtokMappingFunctionTypes []int16
    95  	var dtokMappingFunctionTokenIndexes [][]int
    96  	var dtokMappingFunctionIntArgs []int32
    97  	var dtokMappingFunctionStringArgs []string
    98  
    99  	// If the src has partial wildcards then the dest needs to have the token place markers.
   100  	if npwcs > 0 || hasFwc {
   101  		// We need to count to make sure that the dest has token holders for the pwcs.
   102  		sti := make(map[int]int)
   103  		for i, token := range stokens {
   104  			if len(token) == 1 && token[0] == pwc {
   105  				sti[len(sti)+1] = i
   106  			}
   107  		}
   108  
   109  		nphs := 0
   110  		for _, token := range dtokens {
   111  			tranformType, transformArgWildcardIndexes, transfomArgInt, transformArgString, err := indexPlaceHolders(token)
   112  			if err != nil {
   113  				return nil, err
   114  			}
   115  
   116  			if strict {
   117  				if tranformType != NoTransform && tranformType != Wildcard {
   118  					return nil, &mappingDestinationErr{token, ErrMappingDestinationNotSupportedForImport}
   119  				}
   120  			}
   121  
   122  			if npwcs == 0 {
   123  				if tranformType != NoTransform {
   124  					return nil, &mappingDestinationErr{token, ErrMappingDestinationIndexOutOfRange}
   125  				}
   126  			}
   127  
   128  			if tranformType == NoTransform {
   129  				dtokMappingFunctionTypes = append(dtokMappingFunctionTypes, NoTransform)
   130  				dtokMappingFunctionTokenIndexes = append(dtokMappingFunctionTokenIndexes, []int{-1})
   131  				dtokMappingFunctionIntArgs = append(dtokMappingFunctionIntArgs, -1)
   132  				dtokMappingFunctionStringArgs = append(dtokMappingFunctionStringArgs, _EMPTY_)
   133  			} else {
   134  				nphs += len(transformArgWildcardIndexes)
   135  				// Now build up our runtime mapping from dest to source tokens.
   136  				var stis []int
   137  				for _, wildcardIndex := range transformArgWildcardIndexes {
   138  					if wildcardIndex > npwcs {
   139  						return nil, &mappingDestinationErr{fmt.Sprintf("%s: [%d]", token, wildcardIndex), ErrMappingDestinationIndexOutOfRange}
   140  					}
   141  					stis = append(stis, sti[wildcardIndex])
   142  				}
   143  				dtokMappingFunctionTypes = append(dtokMappingFunctionTypes, tranformType)
   144  				dtokMappingFunctionTokenIndexes = append(dtokMappingFunctionTokenIndexes, stis)
   145  				dtokMappingFunctionIntArgs = append(dtokMappingFunctionIntArgs, transfomArgInt)
   146  				dtokMappingFunctionStringArgs = append(dtokMappingFunctionStringArgs, transformArgString)
   147  
   148  			}
   149  		}
   150  		if strict && nphs < npwcs {
   151  			// not all wildcards are being used in the destination
   152  			return nil, &mappingDestinationErr{dest, ErrMappingDestinationNotUsingAllWildcards}
   153  		}
   154  	} else {
   155  		// no wildcards used in the source: check that no transform functions are used in the destination
   156  		for _, token := range dtokens {
   157  			tranformType, _, _, _, err := indexPlaceHolders(token)
   158  			if err != nil {
   159  				return nil, err
   160  			}
   161  
   162  			if tranformType != NoTransform {
   163  				return nil, &mappingDestinationErr{token, ErrMappingDestinationIndexOutOfRange}
   164  			}
   165  		}
   166  	}
   167  
   168  	return &subjectTransform{
   169  		src:                  src,
   170  		dest:                 dest,
   171  		dtoks:                dtokens,
   172  		stoks:                stokens,
   173  		dtokmftypes:          dtokMappingFunctionTypes,
   174  		dtokmftokindexesargs: dtokMappingFunctionTokenIndexes,
   175  		dtokmfintargs:        dtokMappingFunctionIntArgs,
   176  		dtokmfstringargs:     dtokMappingFunctionStringArgs,
   177  	}, nil
   178  }
   179  
   180  func NewSubjectTransform(src, dest string) (*subjectTransform, error) {
   181  	return NewSubjectTransformWithStrict(src, dest, false)
   182  }
   183  
   184  func NewSubjectTransformStrict(src, dest string) (*subjectTransform, error) {
   185  	return NewSubjectTransformWithStrict(src, dest, true)
   186  }
   187  
   188  func getMappingFunctionArgs(functionRegEx *regexp.Regexp, token string) []string {
   189  	commandStrings := functionRegEx.FindStringSubmatch(token)
   190  	if len(commandStrings) > 1 {
   191  		return commaSeparatorRegEx.Split(commandStrings[1], -1)
   192  	}
   193  	return nil
   194  }
   195  
   196  // Helper for mapping functions that take a wildcard index and an integer as arguments
   197  func transformIndexIntArgsHelper(token string, args []string, transformType int16) (int16, []int, int32, string, error) {
   198  	if len(args) < 2 {
   199  		return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token, ErrMappingDestinationNotEnoughArgs}
   200  	}
   201  	if len(args) > 2 {
   202  		return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token, ErrMappingDestinationTooManyArgs}
   203  	}
   204  	i, err := strconv.Atoi(strings.Trim(args[0], " "))
   205  	if err != nil {
   206  		return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token, ErrMappingDestinationInvalidArg}
   207  	}
   208  	mappingFunctionIntArg, err := strconv.Atoi(strings.Trim(args[1], " "))
   209  	if err != nil {
   210  		return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token, ErrMappingDestinationInvalidArg}
   211  	}
   212  
   213  	return transformType, []int{i}, int32(mappingFunctionIntArg), _EMPTY_, nil
   214  }
   215  
   216  // Helper to ingest and index the subjectTransform destination token (e.g. $x or {{}}) in the token
   217  // returns a transformation type, and three function arguments: an array of source subject token indexes,
   218  // and a single number (e.g. number of partitions, or a slice size), and a string (e.g.a split delimiter)
   219  func indexPlaceHolders(token string) (int16, []int, int32, string, error) {
   220  	length := len(token)
   221  	if length > 1 {
   222  		// old $1, $2, etc... mapping format still supported to maintain backwards compatibility
   223  		if token[0] == '$' { // simple non-partition mapping
   224  			tp, err := strconv.Atoi(token[1:])
   225  			if err != nil {
   226  				// other things rely on tokens starting with $ so not an error just leave it as is
   227  				return NoTransform, []int{-1}, -1, _EMPTY_, nil
   228  			}
   229  			return Wildcard, []int{tp}, -1, _EMPTY_, nil
   230  		}
   231  
   232  		// New 'mustache' style mapping
   233  		if length > 4 && token[0] == '{' && token[1] == '{' && token[length-2] == '}' && token[length-1] == '}' {
   234  			// wildcard(wildcard token index) (equivalent to $)
   235  			args := getMappingFunctionArgs(wildcardMappingFunctionRegEx, token)
   236  			if args != nil {
   237  				if len(args) == 1 && args[0] == _EMPTY_ {
   238  					return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token, ErrMappingDestinationNotEnoughArgs}
   239  				}
   240  				if len(args) == 1 {
   241  					tokenIndex, err := strconv.Atoi(strings.Trim(args[0], " "))
   242  					if err != nil {
   243  						return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token, ErrMappingDestinationInvalidArg}
   244  					}
   245  					return Wildcard, []int{tokenIndex}, -1, _EMPTY_, nil
   246  				} else {
   247  					return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token, ErrMappingDestinationTooManyArgs}
   248  				}
   249  			}
   250  
   251  			// partition(number of partitions, token1, token2, ...)
   252  			args = getMappingFunctionArgs(partitionMappingFunctionRegEx, token)
   253  			if args != nil {
   254  				if len(args) < 2 {
   255  					return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token, ErrMappingDestinationNotEnoughArgs}
   256  				}
   257  				if len(args) >= 2 {
   258  					mappingFunctionIntArg, err := strconv.Atoi(strings.Trim(args[0], " "))
   259  					if err != nil {
   260  						return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token, ErrMappingDestinationInvalidArg}
   261  					}
   262  					var numPositions = len(args[1:])
   263  					tokenIndexes := make([]int, numPositions)
   264  					for ti, t := range args[1:] {
   265  						i, err := strconv.Atoi(strings.Trim(t, " "))
   266  						if err != nil {
   267  							return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token, ErrMappingDestinationInvalidArg}
   268  						}
   269  						tokenIndexes[ti] = i
   270  					}
   271  
   272  					return Partition, tokenIndexes, int32(mappingFunctionIntArg), _EMPTY_, nil
   273  				}
   274  			}
   275  
   276  			// SplitFromLeft(token, position)
   277  			args = getMappingFunctionArgs(splitFromLeftMappingFunctionRegEx, token)
   278  			if args != nil {
   279  				return transformIndexIntArgsHelper(token, args, SplitFromLeft)
   280  			}
   281  
   282  			// SplitFromRight(token, position)
   283  			args = getMappingFunctionArgs(splitFromRightMappingFunctionRegEx, token)
   284  			if args != nil {
   285  				return transformIndexIntArgsHelper(token, args, SplitFromRight)
   286  			}
   287  
   288  			// SliceFromLeft(token, position)
   289  			args = getMappingFunctionArgs(sliceFromLeftMappingFunctionRegEx, token)
   290  			if args != nil {
   291  				return transformIndexIntArgsHelper(token, args, SliceFromLeft)
   292  			}
   293  
   294  			// SliceFromRight(token, position)
   295  			args = getMappingFunctionArgs(sliceFromRightMappingFunctionRegEx, token)
   296  			if args != nil {
   297  				return transformIndexIntArgsHelper(token, args, SliceFromRight)
   298  			}
   299  
   300  			// split(token, deliminator)
   301  			args = getMappingFunctionArgs(splitMappingFunctionRegEx, token)
   302  			if args != nil {
   303  				if len(args) < 2 {
   304  					return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token, ErrMappingDestinationNotEnoughArgs}
   305  				}
   306  				if len(args) > 2 {
   307  					return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token, ErrMappingDestinationTooManyArgs}
   308  				}
   309  				i, err := strconv.Atoi(strings.Trim(args[0], " "))
   310  				if err != nil {
   311  					return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token, ErrMappingDestinationInvalidArg}
   312  				}
   313  				if strings.Contains(args[1], " ") || strings.Contains(args[1], tsep) {
   314  					return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token: token, err: ErrMappingDestinationInvalidArg}
   315  				}
   316  
   317  				return Split, []int{i}, -1, args[1], nil
   318  			}
   319  
   320  			return BadTransform, []int{}, -1, _EMPTY_, &mappingDestinationErr{token, ErrUnknownMappingDestinationFunction}
   321  		}
   322  	}
   323  	return NoTransform, []int{-1}, -1, _EMPTY_, nil
   324  }
   325  
   326  // Helper function to tokenize subjects with partial wildcards into formal transform destinations.
   327  // e.g. "foo.*.*" -> "foo.$1.$2"
   328  func transformTokenize(subject string) string {
   329  	// We need to make the appropriate markers for the wildcards etc.
   330  	i := 1
   331  	var nda []string
   332  	for _, token := range strings.Split(subject, tsep) {
   333  		if token == pwcs {
   334  			nda = append(nda, fmt.Sprintf("$%d", i))
   335  			i++
   336  		} else {
   337  			nda = append(nda, token)
   338  		}
   339  	}
   340  	return strings.Join(nda, tsep)
   341  }
   342  
   343  // Helper function to go from transform destination to a subject with partial wildcards and ordered list of placeholders
   344  // E.g.:
   345  //
   346  //		"bar" -> "bar", []
   347  //		"foo.$2.$1" -> "foo.*.*", ["$2","$1"]
   348  //	    "foo.{{wildcard(2)}}.{{wildcard(1)}}" -> "foo.*.*", ["{{wildcard(2)}}","{{wildcard(1)}}"]
   349  func transformUntokenize(subject string) (string, []string) {
   350  	var phs []string
   351  	var nda []string
   352  
   353  	for _, token := range strings.Split(subject, tsep) {
   354  		if args := getMappingFunctionArgs(wildcardMappingFunctionRegEx, token); (len(token) > 1 && token[0] == '$' && token[1] >= '1' && token[1] <= '9') || (len(args) == 1 && args[0] != _EMPTY_) {
   355  			phs = append(phs, token)
   356  			nda = append(nda, pwcs)
   357  		} else {
   358  			nda = append(nda, token)
   359  		}
   360  	}
   361  	return strings.Join(nda, tsep), phs
   362  }
   363  
   364  func tokenizeSubject(subject string) []string {
   365  	// Tokenize the subject.
   366  	tsa := [32]string{}
   367  	tts := tsa[:0]
   368  	start := 0
   369  	for i := 0; i < len(subject); i++ {
   370  		if subject[i] == btsep {
   371  			tts = append(tts, subject[start:i])
   372  			start = i + 1
   373  		}
   374  	}
   375  	tts = append(tts, subject[start:])
   376  	return tts
   377  }
   378  
   379  // Match will take a literal published subject that is associated with a client and will match and subjectTransform
   380  // the subject if possible.
   381  //
   382  // This API is not part of the public API and not subject to SemVer protections
   383  func (tr *subjectTransform) Match(subject string) (string, error) {
   384  	// Special case: matches any and no no-op subjectTransform. May not be legal config for some features
   385  	// but specific validations made at subjectTransform create time
   386  	if (tr.src == fwcs || tr.src == _EMPTY_) && (tr.dest == fwcs || tr.dest == _EMPTY_) {
   387  		return subject, nil
   388  	}
   389  
   390  	tts := tokenizeSubject(subject)
   391  
   392  	// TODO(jnm): optimization -> not sure this is actually needed but was there in initial code
   393  	if !isValidLiteralSubject(tts) {
   394  		return _EMPTY_, ErrBadSubject
   395  	}
   396  
   397  	if (tr.src == _EMPTY_ || tr.src == fwcs) || isSubsetMatch(tts, tr.src) {
   398  		return tr.TransformTokenizedSubject(tts), nil
   399  	}
   400  	return _EMPTY_, ErrNoTransforms
   401  }
   402  
   403  // TransformSubject transforms a subject
   404  //
   405  // This API is not part of the public API and not subject to SemVer protection
   406  func (tr *subjectTransform) TransformSubject(subject string) string {
   407  	return tr.TransformTokenizedSubject(tokenizeSubject(subject))
   408  }
   409  
   410  func (tr *subjectTransform) getHashPartition(key []byte, numBuckets int) string {
   411  	h := fnv.New32a()
   412  	_, _ = h.Write(key)
   413  
   414  	return strconv.Itoa(int(h.Sum32() % uint32(numBuckets)))
   415  }
   416  
   417  // Do a subjectTransform on the subject to the dest subject.
   418  func (tr *subjectTransform) TransformTokenizedSubject(tokens []string) string {
   419  	if len(tr.dtokmftypes) == 0 {
   420  		return tr.dest
   421  	}
   422  
   423  	var b strings.Builder
   424  
   425  	// We need to walk destination tokens and create the mapped subject pulling tokens or mapping functions
   426  	li := len(tr.dtokmftypes) - 1
   427  	for i, mfType := range tr.dtokmftypes {
   428  		if mfType == NoTransform {
   429  			// Break if fwc
   430  			if len(tr.dtoks[i]) == 1 && tr.dtoks[i][0] == fwc {
   431  				break
   432  			}
   433  			b.WriteString(tr.dtoks[i])
   434  		} else {
   435  			switch mfType {
   436  			case Partition:
   437  				var (
   438  					_buffer       [64]byte
   439  					keyForHashing = _buffer[:0]
   440  				)
   441  				for _, sourceToken := range tr.dtokmftokindexesargs[i] {
   442  					keyForHashing = append(keyForHashing, []byte(tokens[sourceToken])...)
   443  				}
   444  				b.WriteString(tr.getHashPartition(keyForHashing, int(tr.dtokmfintargs[i])))
   445  			case Wildcard: // simple substitution
   446  				b.WriteString(tokens[tr.dtokmftokindexesargs[i][0]])
   447  			case SplitFromLeft:
   448  				sourceToken := tokens[tr.dtokmftokindexesargs[i][0]]
   449  				sourceTokenLen := len(sourceToken)
   450  				position := int(tr.dtokmfintargs[i])
   451  				if position > 0 && position < sourceTokenLen {
   452  					b.WriteString(sourceToken[:position])
   453  					b.WriteString(tsep)
   454  					b.WriteString(sourceToken[position:])
   455  				} else { // too small to split at the requested position: don't split
   456  					b.WriteString(sourceToken)
   457  				}
   458  			case SplitFromRight:
   459  				sourceToken := tokens[tr.dtokmftokindexesargs[i][0]]
   460  				sourceTokenLen := len(sourceToken)
   461  				position := int(tr.dtokmfintargs[i])
   462  				if position > 0 && position < sourceTokenLen {
   463  					b.WriteString(sourceToken[:sourceTokenLen-position])
   464  					b.WriteString(tsep)
   465  					b.WriteString(sourceToken[sourceTokenLen-position:])
   466  				} else { // too small to split at the requested position: don't split
   467  					b.WriteString(sourceToken)
   468  				}
   469  			case SliceFromLeft:
   470  				sourceToken := tokens[tr.dtokmftokindexesargs[i][0]]
   471  				sourceTokenLen := len(sourceToken)
   472  				sliceSize := int(tr.dtokmfintargs[i])
   473  				if sliceSize > 0 && sliceSize < sourceTokenLen {
   474  					for i := 0; i+sliceSize <= sourceTokenLen; i += sliceSize {
   475  						if i != 0 {
   476  							b.WriteString(tsep)
   477  						}
   478  						b.WriteString(sourceToken[i : i+sliceSize])
   479  						if i+sliceSize != sourceTokenLen && i+sliceSize+sliceSize > sourceTokenLen {
   480  							b.WriteString(tsep)
   481  							b.WriteString(sourceToken[i+sliceSize:])
   482  							break
   483  						}
   484  					}
   485  				} else { // too small to slice at the requested size: don't slice
   486  					b.WriteString(sourceToken)
   487  				}
   488  			case SliceFromRight:
   489  				sourceToken := tokens[tr.dtokmftokindexesargs[i][0]]
   490  				sourceTokenLen := len(sourceToken)
   491  				sliceSize := int(tr.dtokmfintargs[i])
   492  				if sliceSize > 0 && sliceSize < sourceTokenLen {
   493  					remainder := sourceTokenLen % sliceSize
   494  					if remainder > 0 {
   495  						b.WriteString(sourceToken[:remainder])
   496  						b.WriteString(tsep)
   497  					}
   498  					for i := remainder; i+sliceSize <= sourceTokenLen; i += sliceSize {
   499  						b.WriteString(sourceToken[i : i+sliceSize])
   500  						if i+sliceSize < sourceTokenLen {
   501  							b.WriteString(tsep)
   502  						}
   503  					}
   504  				} else { // too small to slice at the requested size: don't slice
   505  					b.WriteString(sourceToken)
   506  				}
   507  			case Split:
   508  				sourceToken := tokens[tr.dtokmftokindexesargs[i][0]]
   509  				splits := strings.Split(sourceToken, tr.dtokmfstringargs[i])
   510  				for j, split := range splits {
   511  					if split != _EMPTY_ {
   512  						b.WriteString(split)
   513  					}
   514  					if j < len(splits)-1 && splits[j+1] != _EMPTY_ && !(j == 0 && split == _EMPTY_) {
   515  						b.WriteString(tsep)
   516  					}
   517  				}
   518  			}
   519  		}
   520  
   521  		if i < li {
   522  			b.WriteByte(btsep)
   523  		}
   524  	}
   525  
   526  	// We may have more source tokens available. This happens with ">".
   527  	if tr.dtoks[len(tr.dtoks)-1] == fwcs {
   528  		for sli, i := len(tokens)-1, len(tr.stoks)-1; i < len(tokens); i++ {
   529  			b.WriteString(tokens[i])
   530  			if i < sli {
   531  				b.WriteByte(btsep)
   532  			}
   533  		}
   534  	}
   535  	return b.String()
   536  }
   537  
   538  // Reverse a subjectTransform.
   539  func (tr *subjectTransform) reverse() *subjectTransform {
   540  	if len(tr.dtokmftokindexesargs) == 0 {
   541  		rtr, _ := NewSubjectTransformStrict(tr.dest, tr.src)
   542  		return rtr
   543  	}
   544  	// If we are here we need to dynamically get the correct reverse
   545  	// of this subjectTransform.
   546  	nsrc, phs := transformUntokenize(tr.dest)
   547  	var nda []string
   548  	for _, token := range tr.stoks {
   549  		if token == pwcs {
   550  			if len(phs) == 0 {
   551  				// TODO(dlc) - Should not happen
   552  				return nil
   553  			}
   554  			nda = append(nda, phs[0])
   555  			phs = phs[1:]
   556  		} else {
   557  			nda = append(nda, token)
   558  		}
   559  	}
   560  	ndest := strings.Join(nda, tsep)
   561  	rtr, _ := NewSubjectTransformStrict(nsrc, ndest)
   562  	return rtr
   563  }
   564  
   565  // Will share relevant info regarding the subject.
   566  // Returns valid, tokens, num pwcs, has fwc.
   567  func subjectInfo(subject string) (bool, []string, int, bool) {
   568  	if subject == "" {
   569  		return false, nil, 0, false
   570  	}
   571  	npwcs := 0
   572  	sfwc := false
   573  	tokens := strings.Split(subject, tsep)
   574  	for _, t := range tokens {
   575  		if len(t) == 0 || sfwc {
   576  			return false, nil, 0, false
   577  		}
   578  		if len(t) > 1 {
   579  			continue
   580  		}
   581  		switch t[0] {
   582  		case fwc:
   583  			sfwc = true
   584  		case pwc:
   585  			npwcs++
   586  		}
   587  	}
   588  	return true, tokens, npwcs, sfwc
   589  }