linoyts HF Staff commited on
Commit
b5c3f40
·
verified ·
1 Parent(s): e5acb29

Update optimization.py

Browse files
Files changed (1) hide show
  1. optimization.py +3 -3
optimization.py CHANGED
@@ -48,12 +48,12 @@ def optimize_pipeline_(pipeline: Callable[P, Any], *args: P.args, **kwargs: P.kw
48
  weight_name="FusionX_LoRa/Phantom_Wan_14B_FusionX_LoRA.safetensors",
49
  adapter_name="phantom"
50
  )
51
- kwargs = {}
52
- kwargs["load_into_transformer_2"] = True
53
  pipeline.load_lora_weights(
54
  "vrgamedevgirl84/Wan14BT2VFusioniX",
55
  weight_name="FusionX_LoRa/Phantom_Wan_14B_FusionX_LoRA.safetensors",
56
- adapter_name="phantom_2", **kwargs
57
  )
58
  pipeline.set_adapters(["phantom", "phantom_2"], adapter_weights=[1., 1.])
59
  pipeline.fuse_lora(adapter_names=["phantom"], lora_scale=3., components=["transformer"])
 
48
  weight_name="FusionX_LoRa/Phantom_Wan_14B_FusionX_LoRA.safetensors",
49
  adapter_name="phantom"
50
  )
51
+ kwargs_lora = {}
52
+ kwargs_lora["load_into_transformer_2"] = True
53
  pipeline.load_lora_weights(
54
  "vrgamedevgirl84/Wan14BT2VFusioniX",
55
  weight_name="FusionX_LoRa/Phantom_Wan_14B_FusionX_LoRA.safetensors",
56
+ adapter_name="phantom_2", **kwargs_lora
57
  )
58
  pipeline.set_adapters(["phantom", "phantom_2"], adapter_weights=[1., 1.])
59
  pipeline.fuse_lora(adapter_names=["phantom"], lora_scale=3., components=["transformer"])