Spaces:
Runtime error
Runtime error
Update worker_runpod.py
Browse files- worker_runpod.py +15 -13
worker_runpod.py
CHANGED
@@ -18,6 +18,7 @@ from cogvideox.utils.lora_utils import merge_lora, unmerge_lora
|
|
18 |
from cogvideox.utils.utils import get_image_to_video_latent, save_videos_grid, ASPECT_RATIO_512, get_closest_ratio, to_pil
|
19 |
from huggingface_hub import HfApi, HfFolder
|
20 |
|
|
|
21 |
# Low GPU memory mode
|
22 |
low_gpu_memory_mode = False
|
23 |
|
@@ -68,10 +69,10 @@ else:
|
|
68 |
def generate(input):
|
69 |
values = input["input"]
|
70 |
prompt = values["prompt"]
|
71 |
-
negative_prompt = values.get("negative_prompt", "")
|
72 |
guidance_scale = values.get("guidance_scale", 6.0)
|
73 |
seed = values.get("seed", 42)
|
74 |
-
num_inference_steps = values.get("num_inference_steps",
|
75 |
base_resolution = values.get("base_resolution", 512)
|
76 |
|
77 |
video_length = values.get("video_length", 53)
|
@@ -120,18 +121,19 @@ def generate(input):
|
|
120 |
video_path = os.path.join(save_path, f"{prefix}.mp4")
|
121 |
save_videos_grid(sample, video_path, fps=fps)
|
122 |
|
123 |
-
# Upload final video to Hugging Face repository
|
124 |
-
#hf_api = HfApi()
|
125 |
-
#repo_id = values.get("repo_id", "your-username/your-repo") # Set your HF repo
|
126 |
-
#hf_api.upload_file(
|
127 |
-
# path_or_fileobj=video_path,
|
128 |
-
# path_in_repo=f"{prefix}.mp4",
|
129 |
-
# repo_id=repo_id,
|
130 |
-
# repo_type="model" # or "dataset" if using a dataset repo
|
131 |
-
#)
|
132 |
|
133 |
-
|
134 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
135 |
result_url = ""
|
136 |
job_id = values.get("job_id", "default-job-id") # For RunPod job tracking
|
137 |
return {"jobId": job_id, "result": result_url, "status": "DONE"}
|
|
|
18 |
from cogvideox.utils.utils import get_image_to_video_latent, save_videos_grid, ASPECT_RATIO_512, get_closest_ratio, to_pil
|
19 |
from huggingface_hub import HfApi, HfFolder
|
20 |
|
21 |
+
tokenxf = os.getenv("HF_API_TOKEN")
|
22 |
# Low GPU memory mode
|
23 |
low_gpu_memory_mode = False
|
24 |
|
|
|
69 |
def generate(input):
|
70 |
values = input["input"]
|
71 |
prompt = values["prompt"]
|
72 |
+
negative_prompt = values.get("negative_prompt", "blurry, blurred, blurry face")
|
73 |
guidance_scale = values.get("guidance_scale", 6.0)
|
74 |
seed = values.get("seed", 42)
|
75 |
+
num_inference_steps = values.get("num_inference_steps", 18)
|
76 |
base_resolution = values.get("base_resolution", 512)
|
77 |
|
78 |
video_length = values.get("video_length", 53)
|
|
|
121 |
video_path = os.path.join(save_path, f"{prefix}.mp4")
|
122 |
save_videos_grid(sample, video_path, fps=fps)
|
123 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
124 |
|
125 |
+
hf_api = HfApi()
|
126 |
+
repo_id = "meepmoo/h4h4jejdf" # Set your HF repo
|
127 |
+
hf_api.upload_file(
|
128 |
+
path_or_fileobj=video_path,
|
129 |
+
path_in_repo=f"{prefix}.mp4",
|
130 |
+
repo_id=repo_id,
|
131 |
+
token=tokenxf,
|
132 |
+
repo_type="model"
|
133 |
+
)
|
134 |
+
|
135 |
+
Prepare output
|
136 |
+
result_url = f"https://huggingface.co/{repo_id}/blob/main/{prefix}.mp4"
|
137 |
result_url = ""
|
138 |
job_id = values.get("job_id", "default-job-id") # For RunPod job tracking
|
139 |
return {"jobId": job_id, "result": result_url, "status": "DONE"}
|