Bug in AutoModel

#26
by random-checkin - opened

NameError Traceback (most recent call last)
Cell In[6], line 5
2 from transformers import AutoProcessor, AutoModelForImageTextToText
4 processor = AutoProcessor.from_pretrained("meta-llama/Llama-4-Scout-17B-16E-Instruct")
----> 5 model = AutoModelForImageTextToText.from_pretrained("meta-llama/Llama-4-Scout-17B-16E-Instruct")

File /usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py:571, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
569 if model_class.config_class == config.sub_configs.get("text_config", None):
570 config = config.get_text_config()
--> 571 return model_class.from_pretrained(
572 pretrained_model_name_or_path, *model_args, config=config, **hub_kwargs, **kwargs
573 )
574 raise ValueError(
575 f"Unrecognized configuration class {config.class} for this kind of AutoModel: {cls.name}.\n"
576 f"Model type should be one of {', '.join(c.name for c in cls._model_mapping.keys())}."
577 )

File /usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py:279, in restore_default_torch_dtype.._wrapper(*args, **kwargs)
277 old_dtype = torch.get_default_dtype()
278 try:
--> 279 return func(*args, **kwargs)
280 finally:
281 torch.set_default_dtype(old_dtype)

File /usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py:4333, in PreTrainedModel.from_pretrained(cls, pretrained_model_name_or_path, config, cache_dir, ignore_mismatched_sizes, force_download, local_files_only, token, revision, use_safetensors, weights_only, *model_args, **kwargs)
4330 config.name_or_path = pretrained_model_name_or_path
4332 # Instantiate model.
-> 4333 model_init_context = cls.get_init_context(is_quantized, _is_ds_init_called)
4335 config = copy.deepcopy(config) # We do not want to modify the config inplace in from_pretrained.
4336 if not getattr(config, "_attn_implementation_autoset", False):

File /usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py:3736, in PreTrainedModel.get_init_context(cls, is_quantized, _is_ds_init_called)
3734 init_contexts.append(set_quantized_state())
3735 else:
-> 3736 init_contexts = [no_init_weights(), init_empty_weights()]
3738 return init_contexts

NameError: name 'init_empty_weights' is not defined

Could be that you don't have 'accelerate' installed, try: pip install accelerate

Meta Llama org

We are patching this ASAP! should be out there in 2h or so

yes, pip install accelerate worked

Your need to confirm your account before you can post a new comment.

Sign up or log in to comment