Update app.py
Browse files
app.py
CHANGED
@@ -100,10 +100,20 @@ def infer(genre_txt_content, lyrics_txt_content):
|
|
100 |
"--max_new_tokens", "3000",
|
101 |
"--disable_offload_model"
|
102 |
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
103 |
|
104 |
# Execute the command
|
105 |
try:
|
106 |
-
subprocess.run(command, check=True)
|
107 |
print("Command executed successfully!")
|
108 |
|
109 |
# Check and print the contents of the output folder
|
|
|
100 |
"--max_new_tokens", "3000",
|
101 |
"--disable_offload_model"
|
102 |
]
|
103 |
+
|
104 |
+
# Set up environment variables for CUDA
|
105 |
+
env = os.environ.copy() # Copy current environment
|
106 |
+
env.update({
|
107 |
+
"CUDA_VISIBLE_DEVICES": "0",
|
108 |
+
"PYTORCH_CUDA_ALLOC_CONF": "max_split_size_mb:512",
|
109 |
+
"CUDA_HOME": "/usr/local/cuda",
|
110 |
+
"PATH": f"/usr/local/cuda/bin:{env.get('PATH', '')}",
|
111 |
+
"LD_LIBRARY_PATH": f"/usr/local/cuda/lib64:{env.get('LD_LIBRARY_PATH', '')}"
|
112 |
+
})
|
113 |
|
114 |
# Execute the command
|
115 |
try:
|
116 |
+
subprocess.run(command, check=True, env=env)
|
117 |
print("Command executed successfully!")
|
118 |
|
119 |
# Check and print the contents of the output folder
|