metadata is None
python 3.10
transformers==4.46.2
torch==2.5.1
safetensors==0.4.5
venv/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:559, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
557 cls.register(config.class, model_class, exist_ok=True)
558 model_class = add_generation_mixin_to_remote_model(model_class)
--> 559 return model_class.from_pretrained(
560 pretrained_model_name_or_path, *model_args, config=config, **hub_kwargs, **kwargs
561 )
562 elif type(config) in cls._model_mapping.keys():
563 model_class = _get_model_class(config, cls._model_mapping)
venv/lib/python3.10/site-packages/transformers/modeling_utils.py:3997, in PreTrainedModel.from_pretrained(cls, pretrained_model_name_or_path, config, cache_dir, ignore_mismatched_sizes, force_download, local_files_only, token, revision, use_safetensors, weights_only, *model_args, **kwargs)
3994 with safe_open(resolved_archive_file, framework="pt") as f:
3995 metadata = f.metadata()
-> 3997 if metadata.get("format") == "pt":
3998 pass
3999 elif metadata.get("format") == "tf":
AttributeError: 'NoneType' object has no attribute 'get'
I am having the same issue:
File "\Lib\site-packages\transformers\models\auto\auto_factory.py", line 559, in from_pretrained
return model_class.from_pretrained(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "\Lib\site-packages\transformers\modeling_utils.py", line 3738, in from_pretrained
if metadata.get("format") == "pt":
^^^^^^^^^^^^
AttributeError: 'NoneType' object has no attribute 'get'
seems the latest model has something wrong.
I can use the previous one without problem https://hf-mirror.com/Alibaba-NLP/gte-multilingual-reranker-base/tree/4e88bd5dec38b6b9a7e623755029fc124c319d67
model_name_or_path = "Alibaba-NLP/gte-multilingual-reranker-base"
revision = "4e88bd5dec38b6b9a7e623755029fc124c319d67"
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path)
model = AutoModelForSequenceClassification.from_pretrained(
model_name_or_path, trust_remote_code=True,
torch_dtype=torch.float16,
revision=revision
)
The main branch file has been updated to fix this problem.