Unable to use from transformers

#59
by sraj - opened

I get the following error when I try to use it from transformers -

import os
from PIL import Image
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch

model_id = "./moondream2"
revision="2025-01-09"  # Pin to specific version
# revision = "2024-08-26"
model = AutoModelForCausalLM.from_pretrained(
    model_id, trust_remote_code=True, revision=revision, device_map={"": "cuda"}, local_files_only=True,
)

ERROR

---------------------------------------------------------------------------
FileNotFoundError                         Traceback (most recent call last)
Cell In[2], line 14
     12 revision="2025-01-09"  # Pin to specific version
     13 # revision = "2024-08-26"
---> 14 model = AutoModelForCausalLM.from_pretrained(
     15     model_id, trust_remote_code=True, revision=revision, device_map={"": "cuda"}, local_files_only=True,
     16 )
     17 # model = AutoModelForCausalLM.from_pretrained(model_id, trust_remote_code=True, revision=revision,
     18 #     torch_dtype=torch.float16, attn_implementation="flash_attention_2", local_files_only=True, cache_dir=model_id
     19 # ).to("cuda")
     20 tokenizer = AutoTokenizer.from_pretrained(model_id, revision=revision, local_files_only=True,)

File ~\AppData\Local\Programs\Python\Python312\Lib\site-packages\transformers\models\auto\auto_factory.py:526, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
    523 if kwargs.get("quantization_config", None) is not None:
    524     _ = kwargs.pop("quantization_config")
--> 526 config, kwargs = AutoConfig.from_pretrained(
    527     pretrained_model_name_or_path,
    528     return_unused_kwargs=True,
    529     trust_remote_code=trust_remote_code,
    530     code_revision=code_revision,
    531     _commit_hash=commit_hash,
    532     **hub_kwargs,
    533     **kwargs,
    534 )
    536 # if torch_dtype=auto was passed here, ensure to pass it on
    537 if kwargs_orig.get("torch_dtype", None) == "auto":

File ~\AppData\Local\Programs\Python\Python312\Lib\site-packages\transformers\models\auto\configuration_auto.py:1063, in AutoConfig.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
   1061 if has_remote_code and trust_remote_code:
   1062     class_ref = config_dict["auto_map"]["AutoConfig"]
-> 1063     config_class = get_class_from_dynamic_module(
   1064         class_ref, pretrained_model_name_or_path, code_revision=code_revision, **kwargs
   1065     )
   1066     if os.path.isdir(pretrained_model_name_or_path):
   1067         config_class.register_for_auto_class()

File ~\AppData\Local\Programs\Python\Python312\Lib\site-packages\transformers\dynamic_module_utils.py:553, in get_class_from_dynamic_module(class_reference, pretrained_model_name_or_path, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, repo_type, code_revision, **kwargs)
    540 # And lastly we get the class inside our newly created module
    541 final_module = get_cached_module_file(
    542     repo_id,
    543     module_file + ".py",
   (...)
    551     repo_type=repo_type,
    552 )
--> 553 return get_class_in_module(class_name, final_module, force_reload=force_download)

File ~\AppData\Local\Programs\Python\Python312\Lib\site-packages\transformers\dynamic_module_utils.py:238, in get_class_in_module(class_name, module_path, force_reload)
    235 module_spec = importlib.util.spec_from_file_location(name, location=module_file)
    237 # Hash the module file and all its relative imports to check if we need to reload it
--> 238 module_files: List[Path] = [module_file] + sorted(map(Path, get_relative_import_files(module_file)))
    239 module_hash: str = hashlib.sha256(b"".join(bytes(f) + f.read_bytes() for f in module_files)).hexdigest()
    241 module: ModuleType

File ~\AppData\Local\Programs\Python\Python312\Lib\site-packages\transformers\dynamic_module_utils.py:128, in get_relative_import_files(module_file)
    126 new_imports = []
    127 for f in files_to_check:
--> 128     new_imports.extend(get_relative_imports(f))
    130 module_path = Path(module_file).parent
    131 new_import_files = [str(module_path / m) for m in new_imports]

File ~\AppData\Local\Programs\Python\Python312\Lib\site-packages\transformers\dynamic_module_utils.py:97, in get_relative_imports(module_file)
     87 def get_relative_imports(module_file: Union[str, os.PathLike]) -> List[str]:
     88     """
     89     Get the list of modules that are relatively imported in a module file.
     90 
   (...)
     95         `List[str]`: The list of relative imports in the module.
     96     """
---> 97     with open(module_file, "r", encoding="utf-8") as f:
     98         content = f.read()
    100     # Imports of the form `import .xxx`

FileNotFoundError: [Errno 2] No such file or directory: 'C:\\Users\\---\\.cache\\huggingface\\modules\\transformers_modules\\moondream2\\layers.py'

@sraj this could be an issue w/ the environment setup - could you try the following steps in a fresh python environment, and let me know if it's working for you?

Sign up or log in to comment