Update with commit f84e0dbd2afa580a1a1c3d8643144b43b2ec654a
Browse filesSee: https://github.com/huggingface/transformers/commit/f84e0dbd2afa580a1a1c3d8643144b43b2ec654a
- frameworks.json +1 -0
- pipeline_tags.json +2 -0
frameworks.json
CHANGED
@@ -48,6 +48,7 @@
|
|
48 |
{"model_type":"openai-gpt","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
49 |
{"model_type":"pegasus","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
|
50 |
{"model_type":"perceiver","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
|
|
51 |
{"model_type":"prophetnet","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
52 |
{"model_type":"qdqbert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
53 |
{"model_type":"rag","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
|
|
48 |
{"model_type":"openai-gpt","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
49 |
{"model_type":"pegasus","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoTokenizer"}
|
50 |
{"model_type":"perceiver","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
51 |
+
{"model_type":"poolformer","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoFeatureExtractor"}
|
52 |
{"model_type":"prophetnet","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
53 |
{"model_type":"qdqbert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
|
54 |
{"model_type":"rag","pytorch":true,"tensorflow":true,"flax":false,"processor":"AutoTokenizer"}
|
pipeline_tags.json
CHANGED
@@ -304,6 +304,8 @@
|
|
304 |
{"model_class":"PerceiverForMaskedLM","pipeline_tag":"fill-mask","auto_class":"AutoModelForMaskedLM"}
|
305 |
{"model_class":"PerceiverForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|
306 |
{"model_class":"PerceiverModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
|
|
|
|
307 |
{"model_class":"ProphetNetForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
308 |
{"model_class":"ProphetNetForConditionalGeneration","pipeline_tag":"text2text-generation","auto_class":"AutoModelForSeq2SeqLM"}
|
309 |
{"model_class":"ProphetNetModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
|
|
304 |
{"model_class":"PerceiverForMaskedLM","pipeline_tag":"fill-mask","auto_class":"AutoModelForMaskedLM"}
|
305 |
{"model_class":"PerceiverForSequenceClassification","pipeline_tag":"text-classification","auto_class":"AutoModelForSequenceClassification"}
|
306 |
{"model_class":"PerceiverModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
307 |
+
{"model_class":"PoolFormerForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
|
308 |
+
{"model_class":"PoolFormerModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|
309 |
{"model_class":"ProphetNetForCausalLM","pipeline_tag":"text-generation","auto_class":"AutoModelForCausalLM"}
|
310 |
{"model_class":"ProphetNetForConditionalGeneration","pipeline_tag":"text2text-generation","auto_class":"AutoModelForSeq2SeqLM"}
|
311 |
{"model_class":"ProphetNetModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
|