aiqtech commited on
Commit
64e25d4
·
verified ·
1 Parent(s): 5b55adf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -0
app.py CHANGED
@@ -87,6 +87,16 @@ def load_model_on_demand(model_type: str):
87
  # GPU 설정
88
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") # 명시적으로 cuda:0 지정
89
 
 
 
 
 
 
 
 
 
 
 
90
  # GPU 설정을 try-except로 감싸기
91
  if torch.cuda.is_available():
92
  try:
 
87
  # GPU 설정
88
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") # 명시적으로 cuda:0 지정
89
 
90
+ # 전역 설정
91
+ torch.backends.cudnn.benchmark = False
92
+ torch.backends.cuda.matmul.allow_tf32 = True
93
+ torch.set_float32_matmul_precision('medium')
94
+
95
+ # 캐시 크기 제한
96
+ os.environ['TRANSFORMERS_CACHE'] = '/tmp/transformers_cache'
97
+ os.environ['HF_HOME'] = '/tmp/hf_home'
98
+ os.environ['TORCH_HOME'] = '/tmp/torch_home'
99
+
100
  # GPU 설정을 try-except로 감싸기
101
  if torch.cuda.is_available():
102
  try: