Lines Matching refs:normalize
74 func tokenizeStream(src io.Reader, normalize bool, dict *dictionary, updateDict bool) (*indexedDocu…
132 linebuf = append(linebuf, flushBuf(len(linebuf), obuf, normalize, ld))
137 appendToDoc(&doc, dict, line, linebuf, ld, normalize, updateDict, linebuf)
141 if !normalize {
161 if normalize {
183 linebuf = append(linebuf, flushBuf(len(linebuf), obuf, normalize, ld))
185 appendToDoc(&doc, dict, line, linebuf, ld, normalize, updateDict, linebuf)
227 linebuf = append(linebuf, flushBuf(len(linebuf), obuf, normalize, ld))
230 appendToDoc(&doc, dict, line, linebuf, ld, normalize, updateDict, linebuf)
240 …exedDocument, dict *dictionary, line int, in []tokenID, ld *dictionary, normalize bool, updateDict…
241 tokens, m := stringifyLineBuf(dict, line, linebuf, ld, normalize, updateDict)
249 func stringifyLineBuf(dict *dictionary, line int, in []tokenID, ld *dictionary, normalize bool, upd…
275 txt := cleanupToken(i, ld.getWord(r), normalize)