mirror of https://github.com/explosion/spaCy.git
bwd compat for pipe.begin_training
This commit is contained in:
parent
6965cdf16d
commit
fb48de349c
|
@ -85,7 +85,9 @@ class Warnings:
|
||||||
"attribute or operator.")
|
"attribute or operator.")
|
||||||
|
|
||||||
# TODO: fix numbering after merging develop into master
|
# TODO: fix numbering after merging develop into master
|
||||||
W089 = ("The nlp.begin_training method has been renamed to nlp.initialize.")
|
W089 = ("The 'begin_training' method has been renamed to 'initialize', "
|
||||||
|
"for calls to 'nlp' as well as for the individual pipeline "
|
||||||
|
"components.")
|
||||||
W090 = ("Could not locate any {format} files in path '{path}'.")
|
W090 = ("Could not locate any {format} files in path '{path}'.")
|
||||||
W091 = ("Could not clean/remove the temp directory at {dir}: {msg}.")
|
W091 = ("Could not clean/remove the temp directory at {dir}: {msg}.")
|
||||||
W092 = ("Ignoring annotations for sentence starts, as dependency heads are set.")
|
W092 = ("Ignoring annotations for sentence starts, as dependency heads are set.")
|
||||||
|
|
|
@ -1207,7 +1207,11 @@ class Language:
|
||||||
)
|
)
|
||||||
self.tokenizer.initialize(get_examples, nlp=self, **tok_settings)
|
self.tokenizer.initialize(get_examples, nlp=self, **tok_settings)
|
||||||
for name, proc in self.pipeline:
|
for name, proc in self.pipeline:
|
||||||
if hasattr(proc, "initialize"):
|
# backwards compatibility for older components
|
||||||
|
if hasattr(proc, "begin_training"):
|
||||||
|
warnings.warn(Warnings.W089, DeprecationWarning)
|
||||||
|
proc.begin_training(get_examples, pipeline=self.pipeline, sgd=self._optimizer)
|
||||||
|
elif hasattr(proc, "initialize"):
|
||||||
p_settings = I["components"].get(name, {})
|
p_settings = I["components"].get(name, {})
|
||||||
p_settings = validate_init_settings(
|
p_settings = validate_init_settings(
|
||||||
proc.initialize, p_settings, section="components", name=name
|
proc.initialize, p_settings, section="components", name=name
|
||||||
|
|
Loading…
Reference in New Issue