|
1 |
| -from .extra.marian import SUPPORTED_HELSINKI_NLP_MODELS |
2 |
| - |
3 |
| -SUPPORTED_TASKS = { |
4 |
| - # map tasks to automodels |
5 |
| - 'default': 'AutoModel', |
6 |
| - 'masked-lm': 'AutoModelForMaskedLM', |
7 |
| - 'causal-lm': 'AutoModelForCausalLM', |
8 |
| - 'seq2seq-lm': 'AutoModelForSeq2SeqLM', |
9 |
| - 'sequence-classification': 'AutoModelForSequenceClassification', |
10 |
| - 'token-classification': 'AutoModelForTokenClassification', |
11 |
| - # 'multiple-choice': 'AutoModelForMultipleChoice', |
12 |
| - 'object-detection': 'AutoModelForObjectDetection', |
13 |
| - 'question-answering': 'AutoModelForQuestionAnswering', |
14 |
| - 'image-classification': 'AutoModelForImageClassification', |
15 |
| - 'image-segmentation': 'AutoModelForImageSegmentation', |
16 |
| - # 'masked-im': 'AutoModelForMaskedImageModeling', |
17 |
| - # 'semantic-segmentation': 'AutoModelForSemanticSegmentation', |
18 |
| - 'speech2seq-lm': 'AutoModelForSpeechSeq2Seq', |
19 |
| - # 'audio-classification': 'AutoModelForAudioClassification', |
20 |
| - # 'audio-frame-classification': 'AutoModelForAudioFrameClassification', |
21 |
| - # 'audio-ctc': 'AutoModelForCTC', |
22 |
| - # 'audio-xvector': 'AutoModelForAudioXVector', |
23 |
| - 'vision2seq-lm': 'AutoModelForVision2Seq', |
24 |
| - # 'stable-diffusion': 'StableDiffusionPipeline', |
25 |
| - 'zero-shot-image-classification': 'AutoModelForZeroShotImageClassification', |
26 |
| - 'zero-shot-object-detection': 'AutoModelForZeroShotObjectDetection', |
27 |
| -} |
| 1 | +# from .extra.marian import SUPPORTED_HELSINKI_NLP_MODELS |
28 | 2 |
|
29 | 3 | SUPPORTED_MODELS = {
|
30 |
| - 'albert': { |
| 4 | + 'albert': [ |
31 | 5 | 'albert-base-v2',
|
32 | 6 | 'albert-large-v2',
|
33 | 7 | 'sentence-transformers/paraphrase-albert-small-v2',
|
34 | 8 | 'sentence-transformers/paraphrase-albert-base-v2',
|
35 |
| - }, |
| 9 | + ], |
36 | 10 |
|
37 |
| - 'bart': { |
| 11 | + 'bart': [ |
38 | 12 | 'sshleifer/distilbart-cnn-6-6',
|
39 | 13 | 'facebook/bart-large-cnn',
|
40 | 14 | 'facebook/bart-large-mnli',
|
41 |
| - }, |
42 |
| - 'bert': { |
| 15 | + ], |
| 16 | + 'bert': [ |
43 | 17 | 'bert-base-uncased',
|
44 | 18 | 'bert-base-cased',
|
45 | 19 | 'bert-base-multilingual-uncased',
|
|
63 | 37 | 'ckiplab/bert-base-chinese-pos',
|
64 | 38 | 'dslim/bert-base-NER',
|
65 | 39 | 'dslim/bert-base-NER-uncased',
|
66 |
| - }, |
| 40 | + |
| 41 | + 'allenai/scibert_scivocab_uncased', |
| 42 | + 'ProsusAI/finbert', |
| 43 | + 'emilyalsentzer/Bio_ClinicalBERT', |
| 44 | + 'SpanBERT/spanbert-large-cased', |
| 45 | + 'SpanBERT/spanbert-base-cased', |
| 46 | + ], |
67 | 47 | # TODO:
|
68 |
| - # 'blenderbot-small': { |
| 48 | + # 'blenderbot-small': [ |
69 | 49 | # 'facebook/blenderbot_small-90M',
|
70 |
| - # }, |
71 |
| - 'clip': { |
| 50 | + # ], |
| 51 | + 'clip': [ |
72 | 52 | 'openai/clip-vit-base-patch16',
|
73 | 53 | 'openai/clip-vit-base-patch32',
|
74 |
| - }, |
75 |
| - 'codegen': { |
| 54 | + ], |
| 55 | + 'codegen': [ |
76 | 56 | 'Salesforce/codegen-350M-mono',
|
77 | 57 | 'Salesforce/codegen-350M-multi',
|
78 | 58 | 'Salesforce/codegen-350M-nl',
|
79 |
| - }, |
80 |
| - 'detr': { |
| 59 | + ], |
| 60 | + 'detr': [ |
81 | 61 | 'facebook/detr-resnet-50',
|
82 | 62 | 'facebook/detr-resnet-101',
|
83 | 63 | 'facebook/detr-resnet-50-panoptic',
|
84 |
| - }, |
85 |
| - 'distilbert': { |
| 64 | + ], |
| 65 | + 'distilbert': [ |
86 | 66 | 'distilbert-base-uncased',
|
87 | 67 | 'distilbert-base-cased',
|
88 | 68 | 'distilbert-base-uncased-distilled-squad',
|
|
96 | 76 | 'sentence-transformers/distilbert-base-nli-mean-tokens',
|
97 | 77 | 'sentence-transformers/distilbert-base-nli-stsb-mean-tokens',
|
98 | 78 | 'sentence-transformers/msmarco-distilbert-base-v4',
|
99 |
| - }, |
100 |
| - 'gpt-neo': { |
| 79 | + ], |
| 80 | + 'gpt-neo': [ |
101 | 81 | 'EleutherAI/gpt-neo-125M',
|
102 | 82 | 'MBZUAI/LaMini-Neo-125M',
|
103 |
| - }, |
104 |
| - 'gpt2': { |
| 83 | + ], |
| 84 | + 'gpt2': [ |
105 | 85 | 'gpt2',
|
106 | 86 | 'distilgpt2',
|
107 | 87 | 'MBZUAI/LaMini-Cerebras-256M',
|
108 | 88 | 'MBZUAI/LaMini-Cerebras-590M',
|
109 |
| - }, |
| 89 | + 'MBZUAI/LaMini-GPT-124M', |
| 90 | + ], |
| 91 | + 'm2m_100': [ |
| 92 | + 'facebook/nllb-200-distilled-600M', |
| 93 | + ], |
110 | 94 | # TODO:
|
111 |
| - # 'marian': { |
| 95 | + # 'marian': [ |
112 | 96 | # f'Helsinki-NLP/opus-mt-{x}'
|
113 | 97 | # for x in SUPPORTED_HELSINKI_NLP_MODELS
|
114 |
| - # }, |
115 |
| - 'mobilebert': { |
| 98 | + # ], |
| 99 | + 'mobilebert': [ |
116 | 100 | 'typeform/mobilebert-uncased-mnli',
|
117 | 101 |
|
118 | 102 | # TODO:
|
119 | 103 | # https://github.com/huggingface/optimum/issues/1027
|
120 | 104 | # 'google/mobilebert-uncased',
|
121 |
| - }, |
122 |
| - 'mpnet': { |
| 105 | + ], |
| 106 | + 'mpnet': [ |
123 | 107 | 'sentence-transformers/all-mpnet-base-v2',
|
124 | 108 | 'sentence-transformers/nli-mpnet-base-v2',
|
125 | 109 | 'sentence-transformers/paraphrase-mpnet-base-v2',
|
126 | 110 | 'sentence-transformers/paraphrase-multilingual-mpnet-base-v2',
|
127 | 111 | 'sentence-transformers/multi-qa-mpnet-base-cos-v1',
|
128 | 112 | 'sentence-transformers/multi-qa-mpnet-base-dot-v1',
|
129 |
| - }, |
130 |
| - 'mt5': { |
| 113 | + ], |
| 114 | + 'mt5': [ |
131 | 115 | 'google/mt5-small',
|
132 | 116 | 'google/mt5-base',
|
133 |
| - }, |
134 |
| - 'roberta': { |
| 117 | + ], |
| 118 | + 'roberta': [ |
135 | 119 | 'xlm-roberta-base',
|
136 | 120 | 'roberta-base',
|
137 | 121 | 'distilroberta-base',
|
138 | 122 | 'roberta-large-mnli',
|
139 | 123 |
|
140 | 124 | 'sentence-transformers/all-distilroberta-v1',
|
141 | 125 | 'sentence-transformers/all-roberta-large-v1',
|
142 |
| - }, |
143 |
| - 'sam': { |
| 126 | + ], |
| 127 | + 'sam': [ |
144 | 128 | 'facebook/sam-vit-base',
|
145 | 129 | 'facebook/sam-vit-large',
|
146 | 130 | 'facebook/sam-vit-huge',
|
147 |
| - }, |
148 |
| - 'squeezebert': { |
| 131 | + ], |
| 132 | + 'squeezebert': [ |
149 | 133 | 'squeezebert/squeezebert-uncased',
|
150 | 134 | 'squeezebert/squeezebert-mnli',
|
151 |
| - }, |
152 |
| - 't5': { |
| 135 | + ], |
| 136 | + 't5': [ |
153 | 137 | 't5-small',
|
154 | 138 | 't5-base',
|
155 | 139 | 'google/t5-v1_1-small',
|
|
164 | 148 | 'MBZUAI/LaMini-T5-61M',
|
165 | 149 | 'MBZUAI/LaMini-T5-223M',
|
166 | 150 | 'MBZUAI/LaMini-T5-738M',
|
167 |
| - }, |
168 |
| - 'vision-encoder-decoder': { |
| 151 | + ], |
| 152 | + 'vision-encoder-decoder': [ |
169 | 153 | 'nlpconnect/vit-gpt2-image-captioning',
|
170 |
| - }, |
171 |
| - 'vit': { |
| 154 | + ], |
| 155 | + 'vit': [ |
172 | 156 | 'google/vit-base-patch16-224-in21k',
|
173 | 157 | 'google/vit-base-patch16-224',
|
174 |
| - }, |
175 |
| - 'whisper': { |
| 158 | + 'facebook/dino-vitb16', |
| 159 | + 'facebook/dino-vits8', |
| 160 | + 'facebook/dino-vitb8', |
| 161 | + 'facebook/dino-vits16', |
| 162 | + ], |
| 163 | + 'whisper': [ |
176 | 164 | 'openai/whisper-tiny',
|
177 | 165 | 'openai/whisper-tiny.en',
|
178 | 166 | 'openai/whisper-base',
|
179 | 167 | 'openai/whisper-base.en',
|
180 | 168 | 'openai/whisper-small',
|
181 | 169 | 'openai/whisper-small.en',
|
182 |
| - }, |
| 170 | + 'openai/whisper-medium', |
| 171 | + 'openai/whisper-medium.en', |
| 172 | + 'openai/whisper-large', |
| 173 | + 'openai/whisper-large-v2', |
| 174 | + ], |
183 | 175 | }
|
184 | 176 |
|
185 | 177 |
|
|
0 commit comments