1. 程式人生 > >利用NLTK進行分句分詞

利用NLTK進行分句分詞

1.輸入一個段落,分成句子(Punkt句子分割器)
import nltk
import nltk.data

def splitSentence(paragraph):
    tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
    sentences = tokenizer.tokenize(paragraph)
    return sentences

if __name__ == '__main__':
    print splitSentence("My name is Tom. I am a boy. I like soccer!")

結果為['My name is Tom.', 'I am a boy.', 'I like soccer!']

2.輸入一個句子,分成片語

from nltk.tokenize import WordPunctTokenizer  

def wordtokenizer(sentence):
    #分段
    words = WordPunctTokenizer().tokenize(sentence)
    return words

if __name__ == '__main__':
    print wordtokenizer("My name is Tom.")
結果為['My', 'name', 'is', 'Tom', '.']