doc: improve API doc on review feedback

Signed-off-by: Alexander Bezzubov <bzz@apache.org>
This commit is contained in:
Alexander Bezzubov 2019-04-16 13:05:45 +02:00
parent ada6f15c93
commit 6c7b91cb91
No known key found for this signature in database
GPG Key ID: 8039F5787EFCD05D
2 changed files with 5 additions and 5 deletions

View File

@ -8,9 +8,11 @@ import (
"gopkg.in/src-d/enry.v1/regex"
)
// Tokenize returns lexical tokens from content. The tokens returned should match what
// the Linguist library returns (but they are not, until https://github.com/src-d/enry/issues/193).
// At most the first ByteLimit bytes of content are tokenized.
// Tokenize returns lexical tokens from content. The tokens returned match what
// the Linguist library returns. At most the first ByteLimit bytes of content are tokenized.
//
// BUG: Until https://github.com/src-d/enry/issues/193 is resolved, there are some
// differences between this function and the Linguist output.
func Tokenize(content []byte) []string {
if len(content) > ByteLimit {
content = content[:ByteLimit]

View File

@ -6,8 +6,6 @@ import "gopkg.in/src-d/enry.v1/internal/tokenizer/flex"
// Tokenize returns lexical tokens from content. The tokens returned match what
// the Linguist library returns. At most the first ByteLimit bytes of content are tokenized.
// Splitting at a byte offset means it might partition a last multibyte unicode character
// in the middle of a token (but it should not affect results).
func Tokenize(content []byte) []string {
if len(content) > ByteLimit {
content = content[:ByteLimit]