mirror of
https://github.com/ralsina/tartrazine.git
synced 2024-11-12 22:42:23 +00:00
8756fbdcb4
Signed-off-by: Alexander Bezzubov <bzz@apache.org>
17 lines
428 B
Go
17 lines
428 B
Go
// +build flex
|
|
|
|
package tokenizer
|
|
|
|
import "gopkg.in/src-d/enry.v1/internal/tokenizer/flex"
|
|
|
|
// Tokenize returns language-agnostic lexical tokens from content. The tokens
|
|
// returned should match what the Linguist library returns. At most the first
|
|
// 100KB of content are tokenized.
|
|
func Tokenize(content []byte) []string {
|
|
if len(content) > byteLimit {
|
|
content = content[:byteLimit]
|
|
}
|
|
|
|
return flex.TokenizeFlex(content)
|
|
}
|