Support use_pytorch_for_gpu_memory config

This commit is contained in:
Matthw Honnibal 2020-05-22 23:10:40 +02:00
parent 25d6ed3fb8
commit 2d9de8684d
1 changed files with 3 additions and 1 deletions

View File

@ -7,7 +7,7 @@ from pathlib import Path
from wasabi import msg from wasabi import msg
import thinc import thinc
import thinc.schedules import thinc.schedules
from thinc.api import Model from thinc.api import Model, use_pytorch_for_gpu_memory
import random import random
from ..gold import GoldCorpus from ..gold import GoldCorpus
@ -171,6 +171,8 @@ def train_from_config(
msg.info(f"Loading config from: {config_path}") msg.info(f"Loading config from: {config_path}")
config = util.load_config(config_path, create_objects=False) config = util.load_config(config_path, create_objects=False)
util.fix_random_seed(config["training"]["seed"]) util.fix_random_seed(config["training"]["seed"])
if config["training"]["use_pytorch_for_gpu_memory"]:
use_pytorch_for_gpu_memory()
nlp_config = config["nlp"] nlp_config = config["nlp"]
config = util.load_config(config_path, create_objects=True) config = util.load_config(config_path, create_objects=True)
msg.info("Creating nlp from config") msg.info("Creating nlp from config")