tartrazine/internal/tokenizer/tokenize_c.go
2020-03-19 17:31:29 +01:00

16 lines
417 B
Go

// +build flex
package tokenizer
import "github.com/go-enry/go-enry/v2/internal/tokenizer/flex"
// Tokenize returns lexical tokens from content. The tokens returned match what
// the Linguist library returns. At most the first ByteLimit bytes of content are tokenized.
func Tokenize(content []byte) []string {
if len(content) > ByteLimit {
content = content[:ByteLimit]
}
return flex.TokenizeFlex(content)
}