Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -16,6 +16,9 @@ HF_TOKEN = os.getenv("HF_TOKEN")
|
|
16 |
MAX_LENGTH_REQUEST = 1024
|
17 |
MAX_NEW_TOKENS = 128
|
18 |
MAX_LENGTH_RESPONSE = 100
|
|
|
|
|
|
|
19 |
|
20 |
# Настройка логирования
|
21 |
logging.basicConfig(
|
@@ -40,7 +43,7 @@ sber_rugpt3small_based_on_gpt2_model_name = "sberbank-ai/rugpt3small_based_on_gp
|
|
40 |
try:
|
41 |
model_name = rugpt3large_based_on_gpt2_model_name # Меньшая модель
|
42 |
tokenizer = AutoTokenizer.from_pretrained(model_name, padding_side="left")
|
43 |
-
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto"
|
44 |
logger.info("Модель успешно загружена")
|
45 |
except Exception as e:
|
46 |
logger.error(f"Ошибка загрузки модели: {str(e)}")
|
|
|
16 |
MAX_LENGTH_REQUEST = 1024
|
17 |
MAX_NEW_TOKENS = 128
|
18 |
MAX_LENGTH_RESPONSE = 100
|
19 |
+
TEST_ENV=os.getenv("TEST_ENV")
|
20 |
+
|
21 |
+
logger.info(f"TEST_ENV= {TEST_ENV}")
|
22 |
|
23 |
# Настройка логирования
|
24 |
logging.basicConfig(
|
|
|
43 |
try:
|
44 |
model_name = rugpt3large_based_on_gpt2_model_name # Меньшая модель
|
45 |
tokenizer = AutoTokenizer.from_pretrained(model_name, padding_side="left")
|
46 |
+
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto")
|
47 |
logger.info("Модель успешно загружена")
|
48 |
except Exception as e:
|
49 |
logger.error(f"Ошибка загрузки модели: {str(e)}")
|