mirror of https://github.com/explosion/spaCy.git
Updated explenation for for classy classification (#10484)
* Update universe.json added classy-classification to Spacy universe * Update universe.json added classy-classification to the spacy universe resources * Update universe.json corrected a small typo in json * Update website/meta/universe.json Co-authored-by: Sofie Van Landeghem <svlandeg@users.noreply.github.com> * Update website/meta/universe.json Co-authored-by: Sofie Van Landeghem <svlandeg@users.noreply.github.com> * Update website/meta/universe.json Co-authored-by: Sofie Van Landeghem <svlandeg@users.noreply.github.com> * Update universe.json processed merge feedback * Update universe.json * updated information for Classy Classificaiton Made a more comprehensible and easy description for Classy Classification based on feedback of Philip Vollet to prepare for sharing. * added note about examples * corrected for wrong formatting changes * Update website/meta/universe.json with small typo correction Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com> * resolved another typo * Update website/meta/universe.json Co-authored-by: Sofie Van Landeghem <svlandeg@users.noreply.github.com> Co-authored-by: Sofie Van Landeghem <svlandeg@users.noreply.github.com> Co-authored-by: Adriane Boyd <adrianeboyd@gmail.com>
This commit is contained in:
parent
e5debc68e4
commit
e021dc6279
|
@ -2601,8 +2601,9 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "classyclassification",
|
"id": "classyclassification",
|
||||||
"slogan": "A Python library for classy few-shot and zero-shot classification within spaCy.",
|
"title": "Classy Classification",
|
||||||
"description": "Huggingface does offer some nice models for few/zero-shot classification, but these are not tailored to multi-lingual approaches. Rasa NLU has a nice approach for this, but its too embedded in their codebase for easy usage outside of Rasa/chatbots. Additionally, it made sense to integrate sentence-transformers and Huggingface zero-shot, instead of default word embeddings. Finally, I decided to integrate with spaCy, since training a custom spaCy TextCategorizer seems like a lot of hassle if you want something quick and dirty.",
|
"slogan": "Have you ever struggled with needing a spaCy TextCategorizer but didn't have the time to train one from scratch? Classy Classification is the way to go!",
|
||||||
|
"description": "Have you ever struggled with needing a [spaCy TextCategorizer](https://spacy.io/api/textcategorizer) but didn't have the time to train one from scratch? Classy Classification is the way to go! For few-shot classification using [sentence-transformers](https://github.com/UKPLab/sentence-transformers) or [spaCy models](https://spacy.io/usage/models), provide a dictionary with labels and examples, or just provide a list of labels for zero shot-classification with [Huggingface zero-shot classifiers](https://huggingface.co/models?pipeline_tag=zero-shot-classification).",
|
||||||
"github": "davidberenstein1957/classy-classification",
|
"github": "davidberenstein1957/classy-classification",
|
||||||
"pip": "classy-classification",
|
"pip": "classy-classification",
|
||||||
"code_example": [
|
"code_example": [
|
||||||
|
@ -2618,32 +2619,36 @@
|
||||||
" \"Do you also have some ovens.\"]",
|
" \"Do you also have some ovens.\"]",
|
||||||
"}",
|
"}",
|
||||||
"",
|
"",
|
||||||
|
"# see github repo for examples on sentence-transformers and Huggingface",
|
||||||
"nlp = spacy.load('en_core_web_md')",
|
"nlp = spacy.load('en_core_web_md')",
|
||||||
"",
|
"nlp.add_pipe(\"text_categorizer\", ",
|
||||||
"classification_type = \"spacy_few_shot\"",
|
" config={",
|
||||||
"if classification_type == \"spacy_few_shot\":",
|
" \"data\": data,",
|
||||||
" nlp.add_pipe(\"text_categorizer\", ",
|
" \"model\": \"spacy\"",
|
||||||
" config={\"data\": data, \"model\": \"spacy\"}",
|
" }",
|
||||||
" )",
|
")",
|
||||||
"elif classification_type == \"sentence_transformer_few_shot\":",
|
|
||||||
" nlp.add_pipe(\"text_categorizer\", ",
|
|
||||||
" config={\"data\": data, \"model\": \"sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2\"}",
|
|
||||||
" )",
|
|
||||||
"elif classification_type == \"huggingface_zero_shot\":",
|
|
||||||
" nlp.add_pipe(\"text_categorizer\", ",
|
|
||||||
" config={\"data\": list(data.keys()), \"cat_type\": \"zero\", \"model\": \"facebook/bart-large-mnli\"}",
|
|
||||||
" )",
|
|
||||||
"",
|
"",
|
||||||
"print(nlp(\"I am looking for kitchen appliances.\")._.cats)",
|
"print(nlp(\"I am looking for kitchen appliances.\")._.cats)",
|
||||||
"print([doc._.cats for doc in nlp.pipe([\"I am looking for kitchen appliances.\"])])"
|
"# Output:",
|
||||||
|
"#",
|
||||||
|
"# [{\"label\": \"furniture\", \"score\": 0.21}, {\"label\": \"kitchen\", \"score\": 0.79}]"
|
||||||
],
|
],
|
||||||
"author": "David Berenstein",
|
"author": "David Berenstein",
|
||||||
"author_links": {
|
"author_links": {
|
||||||
"github": "davidberenstein1957",
|
"github": "davidberenstein1957",
|
||||||
"website": "https://www.linkedin.com/in/david-berenstein-1bab11105/"
|
"website": "https://www.linkedin.com/in/david-berenstein-1bab11105/"
|
||||||
},
|
},
|
||||||
"category": ["pipeline", "standalone"],
|
"category": [
|
||||||
"tags": ["classification", "zero-shot", "few-shot", "sentence-transformers", "huggingface"],
|
"pipeline",
|
||||||
|
"standalone"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"classification",
|
||||||
|
"zero-shot",
|
||||||
|
"few-shot",
|
||||||
|
"sentence-transformers",
|
||||||
|
"huggingface"
|
||||||
|
],
|
||||||
"spacy_version": 3
|
"spacy_version": 3
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
Loading…
Reference in New Issue