Fixing encoding issue #4

This commit is contained in:
maxirmx 2015-10-21 20:45:56 +03:00
parent fcbfff043f
commit f07e4accd7
1 changed files with 1 additions and 4 deletions

View File

@ -19,10 +19,7 @@ class Lemmatizer(object):
index[pos] = read_index(path.join(data_dir, 'wordnet', 'index.%s' % pos)) index[pos] = read_index(path.join(data_dir, 'wordnet', 'index.%s' % pos))
exc[pos] = read_exc(path.join(data_dir, 'wordnet', '%s.exc' % pos)) exc[pos] = read_exc(path.join(data_dir, 'wordnet', '%s.exc' % pos))
if path.exists(path.join(data_dir, 'vocab', 'lemma_rules.json')): if path.exists(path.join(data_dir, 'vocab', 'lemma_rules.json')):
if sys.version_info[0] == 3: rules = json.load(codecs.open(path.join(data_dir, 'vocab', 'lemma_rules.json'), encoding='utf_8'))
rules = json.load(open(path.join(data_dir, 'vocab', 'lemma_rules.json'), encoding="utf_8"))
else:
rules = json.load(open(path.join(data_dir, 'vocab', 'lemma_rules.json')))
else: else:
rules = {} rules = {}
return cls(index, exc, rules) return cls(index, exc, rules)