Update with commit 0e4b7938d0e965362973797f47ad2b85f605a96a
Browse filesSee: https://github.com/huggingface/transformers/commit/0e4b7938d0e965362973797f47ad2b85f605a96a
- frameworks.json +1 -0
- pipeline_tags.json +3 -0
frameworks.json
CHANGED
@@ -196,6 +196,7 @@
|
|
196 |
{"model_type":"mobilevit","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoImageProcessor"}
|
197 |
{"model_type":"mobilevitv2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
|
198 |
{"model_type":"modernbert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
|
|
199 |
{"model_type":"moonshine","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
200 |
{"model_type":"moshi","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
201 |
{"model_type":"mpnet","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
|
|
196 |
{"model_type":"mobilevit","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoImageProcessor"}
|
197 |
{"model_type":"mobilevitv2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
|
198 |
{"model_type":"modernbert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
199 |
+
{"model_type":"modernbert-decoder","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
200 |
{"model_type":"moonshine","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
|
201 |
{"model_type":"moshi","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
202 |
{"model_type":"mpnet","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
pipeline_tags.json
CHANGED
@@ -697,6 +697,9 @@
|
|
697 |
{"model_class":"MobileViTModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
|
698 |
{"model_class":"MobileViTV2ForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
|
699 |
{"model_class":"MobileViTV2Model","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
|
|
|
|
|
|
|
700 |
{"model_class":"ModernBertForMaskedLM","pipeline_tag":"fill-mask","auto_class":"AutoModelForMaskedLM"}
|
701 |
{"model_class":"ModernBertForQuestionAnswering","pipeline_tag":"question-answering","auto_class":"AutoModelForQuestionAnswering"}
|
702 |
{"model_class":"ModernBertForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|
|
|
697 |
{"model_class":"MobileViTModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
|
698 |
{"model_class":"MobileViTV2ForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
|
699 |
{"model_class":"MobileViTV2Model","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
|
700 |
+
{"model_class":"ModernBertDecoderForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
701 |
+
{"model_class":"ModernBertDecoderForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|
702 |
+
{"model_class":"ModernBertDecoderModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
703 |
{"model_class":"ModernBertForMaskedLM","pipeline_tag":"fill-mask","auto_class":"AutoModelForMaskedLM"}
|
704 |
{"model_class":"ModernBertForQuestionAnswering","pipeline_tag":"question-answering","auto_class":"AutoModelForQuestionAnswering"}
|
705 |
{"model_class":"ModernBertForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|