diff --git a/dict_1.16.go b/dict_1.16.go index 40e4d0c..1e0f5d8 100644 --- a/dict_1.16.go +++ b/dict_1.16.go @@ -93,7 +93,7 @@ func (seg *Segmenter) LoadDictStr(dict string) error { // 将分词添加到字典中 words := seg.SplitTextToWords([]byte(text)) token := Token{text: words, frequency: frequency, pos: pos} - seg.Dict.addToken(token) + seg.Dict.AddToken(token) } seg.CalcToken() diff --git a/dict_util.go b/dict_util.go index b8600e0..66ef5ec 100644 --- a/dict_util.go +++ b/dict_util.go @@ -59,7 +59,7 @@ func (seg *Segmenter) AddToken(text string, frequency float64, pos ...string) er words := seg.SplitTextToWords([]byte(text)) token := Token{text: words, frequency: frequency, pos: po} - return seg.Dict.addToken(token) + return seg.Dict.AddToken(token) } // AddTokenForce add new text to token and force @@ -99,7 +99,7 @@ func (seg *Segmenter) LoadDictMap(dict []map[string]string) error { words := seg.SplitTextToWords([]byte(d["text"])) token := Token{text: words, frequency: frequency, pos: d["pos"]} - seg.Dict.addToken(token) + seg.Dict.AddToken(token) } seg.CalcToken() @@ -311,7 +311,7 @@ func (seg *Segmenter) Reader(reader io.Reader, files ...string) error { // 将分词添加到字典中 words := seg.SplitTextToWords([]byte(text)) token := Token{text: words, frequency: frequency, pos: pos} - seg.Dict.addToken(token) + seg.Dict.AddToken(token) } return nil diff --git a/dictionary.go b/dictionary.go index df03744..bc770b4 100755 --- a/dictionary.go +++ b/dictionary.go @@ -47,8 +47,8 @@ func (dict *Dictionary) TotalFreq() float64 { return dict.totalFrequency } -// addToken 向词典中加入一个分词 -func (dict *Dictionary) addToken(token Token) error { +// AddToken 向词典中加入一个分词 +func (dict *Dictionary) AddToken(token Token) error { bytes := textSliceToBytes(token.text) val, err := dict.trie.Get(bytes) if err == nil || val > 0 {