2019-03-24 17:55:05 +00:00
|
|
|
// +build flex
|
2019-03-24 01:06:19 +00:00
|
|
|
|
2019-03-24 17:55:05 +00:00
|
|
|
package tokenizer
|
2019-03-24 01:06:19 +00:00
|
|
|
|
2019-03-24 17:55:05 +00:00
|
|
|
import "gopkg.in/src-d/enry.v1/internal/tokenizer/flex"
|
2019-03-24 01:06:19 +00:00
|
|
|
|
2019-03-24 17:55:05 +00:00
|
|
|
// Tokenize returns language-agnostic lexical tokens from content. The tokens
|
|
|
|
// returned should match what the Linguist library returns. At most the first
|
|
|
|
// 100KB of content are tokenized.
|
|
|
|
func Tokenize(content []byte) []string {
|
|
|
|
if len(content) > byteLimit {
|
|
|
|
content = content[:byteLimit]
|
2019-03-24 01:06:19 +00:00
|
|
|
}
|
|
|
|
|
2019-03-24 17:55:05 +00:00
|
|
|
return flex.TokenizeFlex(content)
|
2019-03-24 01:06:19 +00:00
|
|
|
}
|