state_dict = torch.load(model_path, weights_only=True, map_location=lambda storage, loc: storage) f24abb2 verified Fabrice-TIERCELIN commited on 5 days ago
snapshot_download(repo_id="openai/clip-vit-large-patch14", repo_type="model", local_dir="ckpts/text_encoder_2", force_download=True) 985a4fc verified Fabrice-TIERCELIN commited on 5 days ago
from huggingface_hub import snapshot_download fea04d7 verified Fabrice-TIERCELIN commited on 5 days ago