|
|
|
@ -5,13 +5,13 @@ from whoosh.index import create_in,open_dir
|
|
|
|
|
from whoosh.fields import *
|
|
|
|
|
from whoosh.qparser import QueryParser
|
|
|
|
|
|
|
|
|
|
from jieba.analyse import ChineseAnalyzer
|
|
|
|
|
from jieba.analyse.analyzer import ChineseAnalyzer
|
|
|
|
|
|
|
|
|
|
analyzer = ChineseAnalyzer()
|
|
|
|
|
|
|
|
|
|
schema = Schema(title=TEXT(stored=True), path=ID(stored=True), content=TEXT(stored=True, analyzer=analyzer))
|
|
|
|
|
ix = create_in("tmp", schema) # for create new index
|
|
|
|
|
#ix = open_dir("tmp") # for read only
|
|
|
|
|
#ix = open_dir("tmp", schema=schema) # for read only
|
|
|
|
|
writer = ix.writer()
|
|
|
|
|
|
|
|
|
|
writer.add_document(
|
|
|
|
@ -49,12 +49,12 @@ searcher = ix.searcher()
|
|
|
|
|
parser = QueryParser("content", schema=ix.schema)
|
|
|
|
|
|
|
|
|
|
for keyword in ("水果世博园","你","first","中文","交换机","交换"):
|
|
|
|
|
print "result of ",keyword
|
|
|
|
|
print("result of ",keyword)
|
|
|
|
|
q = parser.parse(keyword)
|
|
|
|
|
results = searcher.search(q)
|
|
|
|
|
for hit in results:
|
|
|
|
|
print hit.highlights("content")
|
|
|
|
|
print "="*10
|
|
|
|
|
print(hit.highlights("content"))
|
|
|
|
|
print("="*10)
|
|
|
|
|
|
|
|
|
|
for t in analyzer("我的好朋友是李明;我爱北京天安门;IBM和Microsoft; I have a dream."):
|
|
|
|
|
print t.text
|
|
|
|
|
print(t.text)
|
|
|
|
|