Remove env_opt and simplfy default Optimizer

This commit is contained in:
Ines Montani 2020-08-14 14:59:54 +02:00
parent ab1d165bba
commit 37814b608d
3 changed files with 1 additions and 47 deletions

View File

@ -60,7 +60,6 @@ def evaluate(
fix_random_seed()
if use_gpu >= 0:
require_gpu(use_gpu)
util.set_env_log(False)
data_path = util.ensure_path(data_path)
output_path = util.ensure_path(output)
displacy_path = util.ensure_path(displacy_path)

View File

@ -48,7 +48,6 @@ def train_cli(
used to register custom functions and architectures that can then be
referenced in the config.
"""
util.set_env_log(verbose)
verify_cli_args(config_path, output_path)
overrides = parse_config_overrides(ctx.args)
import_code(code_path)

View File

@ -54,7 +54,6 @@ if TYPE_CHECKING:
from .vocab import Vocab # noqa: F401
_PRINT_ENV = False
OOV_RANK = numpy.iinfo(numpy.uint64).max
LEXEME_NORM_LANGS = ["da", "de", "el", "en", "id", "lb", "pt", "ru", "sr", "ta", "th"]
@ -109,11 +108,6 @@ class SimpleFrozenDict(dict):
raise NotImplementedError(self.error)
def set_env_log(value: bool) -> None:
global _PRINT_ENV
_PRINT_ENV = value
def lang_class_is_loaded(lang: str) -> bool:
"""Check whether a Language class is already loaded. Language classes are
loaded lazily, to avoid expensive setup code associated with the language
@ -602,27 +596,6 @@ def get_async(stream, numpy_array):
return array
def env_opt(name: str, default: Optional[Any] = None) -> Optional[Any]:
if type(default) is float:
type_convert = float
else:
type_convert = int
if "SPACY_" + name.upper() in os.environ:
value = type_convert(os.environ["SPACY_" + name.upper()])
if _PRINT_ENV:
print(name, "=", repr(value), "via", "$SPACY_" + name.upper())
return value
elif name in os.environ:
value = type_convert(os.environ[name])
if _PRINT_ENV:
print(name, "=", repr(value), "via", "$" + name)
return value
else:
if _PRINT_ENV:
print(name, "=", repr(default), "by default")
return default
def read_regex(path: Union[str, Path]) -> Pattern:
path = ensure_path(path)
with path.open(encoding="utf8") as file_:
@ -1067,24 +1040,7 @@ class DummyTokenizer:
def create_default_optimizer() -> Optimizer:
# TODO: Do we still want to allow env_opt?
learn_rate = env_opt("learn_rate", 0.001)
beta1 = env_opt("optimizer_B1", 0.9)
beta2 = env_opt("optimizer_B2", 0.999)
eps = env_opt("optimizer_eps", 1e-8)
L2 = env_opt("L2_penalty", 1e-6)
grad_clip = env_opt("grad_norm_clip", 10.0)
L2_is_weight_decay = env_opt("L2_is_weight_decay", False)
optimizer = Adam(
learn_rate,
L2=L2,
beta1=beta1,
beta2=beta2,
eps=eps,
grad_clip=grad_clip,
L2_is_weight_decay=L2_is_weight_decay,
)
return optimizer
return Adam()
def minibatch(items, size):