From eb37e048da73aa5016ab9da4d7bbb1e9b8e53176 Mon Sep 17 00:00:00 2001 From: fxsjy Date: Wed, 8 Jan 2020 16:04:30 +0800 Subject: [PATCH] update version to 0.41 --- Changelog | 4 ++++ README.md | 0 jieba/__init__.py | 0 jieba/_compat.py | 0 setup.py | 2 +- 5 files changed, 5 insertions(+), 1 deletion(-) mode change 100755 => 100644 README.md mode change 100755 => 100644 jieba/__init__.py mode change 100755 => 100644 jieba/_compat.py mode change 100755 => 100644 setup.py diff --git a/Changelog b/Changelog index 7141e92..e385f0b 100644 --- a/Changelog +++ b/Changelog @@ -1,3 +1,7 @@ +2019-1-8: version 0.41 +1. 开启paddle模式更友好 +2. 修复cut_all模式不支持中英混合词的bug + 2019-12-25: version 0.40 1. 支持基于paddle的深度学习分词模式(use_paddle=True); by @JesseyXujin, @xyzhou-puck 2. 修复自定义Tokenizer实例的add_word方法指向全局的问题; by @linhx13 diff --git a/README.md b/README.md old mode 100755 new mode 100644 diff --git a/jieba/__init__.py b/jieba/__init__.py old mode 100755 new mode 100644 diff --git a/jieba/_compat.py b/jieba/_compat.py old mode 100755 new mode 100644 diff --git a/setup.py b/setup.py old mode 100755 new mode 100644 index 4bd4a5b..df926f5 --- a/setup.py +++ b/setup.py @@ -43,7 +43,7 @@ GitHub: https://github.com/fxsjy/jieba """ setup(name='jieba', - version='0.40', + version='0.41', description='Chinese Words Segmentation Utilities', long_description=LONGDOC, author='Sun, Junyi',