lysandre HF staff commited on
Commit
7eb0ef4
1 Parent(s): 7730b20

Update with commit 667ed5635e6fd7e2df4fc23012746b1c0cbb7575

Browse files

See: https://github.com/huggingface/transformers/commit/667ed5635e6fd7e2df4fc23012746b1c0cbb7575

Files changed (2) hide show
  1. frameworks.json +1 -0
  2. pipeline_tags.json +4 -0
frameworks.json CHANGED
@@ -149,6 +149,7 @@
149
  {"model_type":"mobilenet_v2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
150
  {"model_type":"mobilevit","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoImageProcessor"}
151
  {"model_type":"mobilevitv2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
 
152
  {"model_type":"moshi","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
153
  {"model_type":"mpnet","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
154
  {"model_type":"mpt","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
 
149
  {"model_type":"mobilenet_v2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
150
  {"model_type":"mobilevit","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoImageProcessor"}
151
  {"model_type":"mobilevitv2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoImageProcessor"}
152
+ {"model_type":"modernbert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
153
  {"model_type":"moshi","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
154
  {"model_type":"mpnet","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
155
  {"model_type":"mpt","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
pipeline_tags.json CHANGED
@@ -590,6 +590,10 @@
590
  {"model_class":"MobileViTModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
591
  {"model_class":"MobileViTV2ForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
592
  {"model_class":"MobileViTV2Model","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
 
 
 
 
593
  {"model_class":"MoshiForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
594
  {"model_class":"MoshiModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
595
  {"model_class":"MptForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
 
590
  {"model_class":"MobileViTModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
591
  {"model_class":"MobileViTV2ForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
592
  {"model_class":"MobileViTV2Model","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
593
+ {"model_class":"ModernBertForMaskedLM","pipeline_tag":"fill-mask","auto_class":"AutoModelForMaskedLM"}
594
+ {"model_class":"ModernBertForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
595
+ {"model_class":"ModernBertForTokenClassification","pipeline_tag":"token-classification","auto_class":"AutoModelForTokenClassification"}
596
+ {"model_class":"ModernBertModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
597
  {"model_class":"MoshiForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
598
  {"model_class":"MoshiModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
599
  {"model_class":"MptForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}