Upload modified model with logging
Browse files- modeling_modified.py +2 -2
modeling_modified.py
CHANGED
|
@@ -67,7 +67,7 @@ def get_env_info() -> Dict[str, Any]:
|
|
| 67 |
import torch
|
| 68 |
|
| 69 |
if torch.backends.mps.is_available():
|
| 70 |
-
env_info["gpu_info"].append({"type": "MPS"})
|
| 71 |
except Exception:
|
| 72 |
pass
|
| 73 |
|
|
@@ -76,7 +76,7 @@ def get_env_info() -> Dict[str, Any]:
|
|
| 76 |
if platform.system() == "Linux":
|
| 77 |
amd_gpu_info = subprocess.check_output(["lspci", "-nn", "|", "grep", "VGA"]).decode()
|
| 78 |
if "AMD" in amd_gpu_info:
|
| 79 |
-
env_info["gpu_info"].append({"type": "AMD", "info": amd_gpu_info})
|
| 80 |
except Exception:
|
| 81 |
pass
|
| 82 |
|
|
|
|
| 67 |
import torch
|
| 68 |
|
| 69 |
if torch.backends.mps.is_available():
|
| 70 |
+
env_info["gpu_info"].append(str({"type": "MPS"}))
|
| 71 |
except Exception:
|
| 72 |
pass
|
| 73 |
|
|
|
|
| 76 |
if platform.system() == "Linux":
|
| 77 |
amd_gpu_info = subprocess.check_output(["lspci", "-nn", "|", "grep", "VGA"]).decode()
|
| 78 |
if "AMD" in amd_gpu_info:
|
| 79 |
+
env_info["gpu_info"].append(str({"type": "AMD", "info": amd_gpu_info}))
|
| 80 |
except Exception:
|
| 81 |
pass
|
| 82 |
|