mirror of https://github.com/explosion/spaCy.git
Update pretrain docs and add unsupported loss_func error (#3860)
* Add error to `get_vectors_loss` for unsupported loss function of `pretrain` * Add missing "--loss-func" argument to pretrain docs. Update pretrain plac annotations to match docs. * Add missing quotation marks
This commit is contained in:
parent
4866a7ee9e
commit
ebf5a04d6c
|
@ -23,18 +23,19 @@ from .train import _load_pretrained_tok2vec
|
|||
|
||||
|
||||
@plac.annotations(
|
||||
texts_loc=("Path to jsonl file with texts to learn from", "positional", None, str),
|
||||
vectors_model=("Name or path to vectors model to learn from"),
|
||||
output_dir=("Directory to write models each epoch", "positional", None, str),
|
||||
texts_loc=("Path to JSONL file with raw texts to learn from, with text provided as the key 'text' or tokens as the "
|
||||
"key 'tokens'", "positional", None, str),
|
||||
vectors_model=("Name or path to spaCy model with vectors to learn from"),
|
||||
output_dir=("Directory to write models to on each epoch", "positional", None, str),
|
||||
width=("Width of CNN layers", "option", "cw", int),
|
||||
depth=("Depth of CNN layers", "option", "cd", int),
|
||||
embed_rows=("Embedding rows", "option", "er", int),
|
||||
loss_func=("Loss to use for the objective. L2 or cosine", "option", "L", str),
|
||||
embed_rows=("Number of embedding rows", "option", "er", int),
|
||||
loss_func=("Loss function to use for the objective. Either 'L2' or 'cosine'", "option", "L", str),
|
||||
use_vectors=("Whether to use the static vectors as input features", "flag", "uv"),
|
||||
dropout=("Dropout", "option", "d", float),
|
||||
dropout=("Dropout rate", "option", "d", float),
|
||||
batch_size=("Number of words per training batch", "option", "bs", int),
|
||||
max_length=("Max words per example.", "option", "xw", int),
|
||||
min_length=("Min words per example.", "option", "nw", int),
|
||||
max_length=("Max words per example. Longer examples are discarded", "option", "xw", int),
|
||||
min_length=("Min words per example. Shorter examples are discarded", "option", "nw", int),
|
||||
seed=("Seed for random number generators", "option", "s", int),
|
||||
n_iter=("Number of iterations to pretrain", "option", "i", int),
|
||||
n_save_every=("Save model every X batches.", "option", "se", int),
|
||||
|
@ -250,6 +251,8 @@ def get_vectors_loss(ops, docs, prediction, objective="L2"):
|
|||
loss = (d_target ** 2).sum()
|
||||
elif objective == "cosine":
|
||||
loss, d_target = get_cossim_loss(prediction, target)
|
||||
else:
|
||||
raise ValueError(Errors.E139.format(loss_func=objective))
|
||||
return loss, d_target
|
||||
|
||||
|
||||
|
|
|
@ -399,6 +399,7 @@ class Errors(object):
|
|||
E138 = ("Invalid JSONL format for raw text '{text}'. Make sure the input includes either the "
|
||||
"`text` or `tokens` key. For more info, see the docs:\n"
|
||||
"https://spacy.io/api/cli#pretrain-jsonl")
|
||||
E139 = ("Unsupported loss_function '{loss_func}'. Use either 'L2' or 'cosine'")
|
||||
|
||||
|
||||
@add_codes
|
||||
|
|
|
@ -285,18 +285,19 @@ improvement.
|
|||
|
||||
```bash
|
||||
$ python -m spacy pretrain [texts_loc] [vectors_model] [output_dir] [--width]
|
||||
[--depth] [--embed-rows] [--dropout] [--seed] [--n-iter] [--use-vectors]
|
||||
[--depth] [--embed-rows] [--loss_func] [--dropout] [--seed] [--n-iter] [--use-vectors]
|
||||
[--n-save_every]
|
||||
```
|
||||
|
||||
| Argument | Type | Description |
|
||||
| ----------------------- | ---------- | --------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `texts_loc` | positional | Path to JSONL file with raw texts to learn from, with text provided as the key `"text"` or tokens as the key `tokens`. [See here](#pretrain-jsonl) for details. |
|
||||
| `texts_loc` | positional | Path to JSONL file with raw texts to learn from, with text provided as the key `"text"` or tokens as the key `"tokens"`. [See here](#pretrain-jsonl) for details. |
|
||||
| `vectors_model` | positional | Name or path to spaCy model with vectors to learn from. |
|
||||
| `output_dir` | positional | Directory to write models to on each epoch. |
|
||||
| `--width`, `-cw` | option | Width of CNN layers. |
|
||||
| `--depth`, `-cd` | option | Depth of CNN layers. |
|
||||
| `--embed-rows`, `-er` | option | Number of embedding rows. |
|
||||
| `--loss-func`, `-L` | option | Loss function to use for the objective. Either `"L2"` or `"cosine"`. |
|
||||
| `--dropout`, `-d` | option | Dropout rate. |
|
||||
| `--batch-size`, `-bs` | option | Number of words per training batch. |
|
||||
| `--max-length`, `-xw` | option | Maximum words per example. Longer examples are discarded. |
|
||||
|
@ -304,7 +305,7 @@ $ python -m spacy pretrain [texts_loc] [vectors_model] [output_dir] [--width]
|
|||
| `--seed`, `-s` | option | Seed for random number generators. |
|
||||
| `--n-iter`, `-i` | option | Number of iterations to pretrain. |
|
||||
| `--use-vectors`, `-uv` | flag | Whether to use the static vectors as input features. |
|
||||
| `--n-save_every`, `-se` | option | Save model every X batches. |
|
||||
| `--n-save-every`, `-se` | option | Save model every X batches. |
|
||||
| `--init-tok2vec`, `-t2v` <Tag variant="new">2.1</Tag> | option | Path to pretrained weights for the token-to-vector parts of the models. See `spacy pretrain`. Experimental.|
|
||||
| **CREATES** | weights | The pre-trained weights that can be used to initialize `spacy train`. |
|
||||
|
||||
|
|
Loading…
Reference in New Issue