gitlab.com/thomasboni/go-enry/v2@v2.8.3-0.20220418031202-30b0d7a3de98/internal/tokenizer/tokenize_c.go (about)

     1  // +build flex
     2  
     3  package tokenizer
     4  
     5  import "gitlab.com/thomasboni/go-enry/v2/internal/tokenizer/flex"
     6  
     7  // Tokenize returns lexical tokens from content. The tokens returned match what
     8  // the Linguist library returns. At most the first ByteLimit bytes of content are tokenized.
     9  func Tokenize(content []byte) []string {
    10  	if len(content) > ByteLimit {
    11  		content = content[:ByteLimit]
    12  	}
    13  
    14  	return flex.TokenizeFlex(content)
    15  }