mirror of https://github.com/explosion/spaCy.git
Fix other open calls without context managers (#8245)
This commit is contained in:
parent
04239e94c7
commit
d54631f68b
|
@ -115,7 +115,8 @@ def convert(
|
|||
ner_map = srsly.read_json(ner_map) if ner_map is not None else None
|
||||
doc_files = []
|
||||
for input_loc in walk_directory(Path(input_path), converter):
|
||||
input_data = input_loc.open("r", encoding="utf-8").read()
|
||||
with input_loc.open("r", encoding="utf-8") as infile:
|
||||
input_data = infile.read()
|
||||
# Use converter function to convert data
|
||||
func = CONVERTERS[converter]
|
||||
docs = func(
|
||||
|
|
|
@ -439,7 +439,8 @@ class EntityLinker(TrainablePipe):
|
|||
|
||||
def load_model(p):
|
||||
try:
|
||||
self.model.from_bytes(p.open("rb").read())
|
||||
with p.open("rb") as infile:
|
||||
self.model.from_bytes(infile.read())
|
||||
except AttributeError:
|
||||
raise ValueError(Errors.E149) from None
|
||||
|
||||
|
|
|
@ -84,7 +84,8 @@ Phasellus tincidunt, augue quis porta finibus, massa sapien consectetur augue, n
|
|||
@pytest.mark.parametrize("file_name", ["sun.txt"])
|
||||
def test_tokenizer_handle_text_from_file(tokenizer, file_name):
|
||||
loc = ensure_path(__file__).parent / file_name
|
||||
text = loc.open("r", encoding="utf8").read()
|
||||
with loc.open("r", encoding="utf8") as infile:
|
||||
text = infile.read()
|
||||
assert len(text) != 0
|
||||
tokens = tokenizer(text)
|
||||
assert len(tokens) > 100
|
||||
|
|
Loading…
Reference in New Issue