Update multit2i.py
Browse files- multit2i.py +15 -0
multit2i.py
CHANGED
@@ -4,6 +4,21 @@ from threading import RLock
|
|
4 |
from pathlib import Path
|
5 |
from huggingface_hub import InferenceClient
|
6 |
import os
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
|
8 |
|
9 |
HF_TOKEN = os.environ.get("HF_TOKEN") if os.environ.get("HF_TOKEN") else None # If private or gated models aren't used, ENV setting is unnecessary.
|
|
|
4 |
from pathlib import Path
|
5 |
from huggingface_hub import InferenceClient
|
6 |
import os
|
7 |
+
from huggingface_hub import login
|
8 |
+
|
9 |
+
def set_token(token):
|
10 |
+
global HF_TOKEN
|
11 |
+
HF_TOKEN = token
|
12 |
+
os.environ["HF_TOKEN"] = token # Сохраняем в переменной окружения
|
13 |
+
login(HF_TOKEN)
|
14 |
+
return "Токен установлен!"
|
15 |
+
|
16 |
+
with gr.Blocks() as demo:
|
17 |
+
hf_token_input = gr.Textbox(label="Введите HF_TOKEN", type="password")
|
18 |
+
submit_button = gr.Button("Сохранить")
|
19 |
+
output_text = gr.Textbox(label="Статус")
|
20 |
+
|
21 |
+
submit_button.click(set_token, inputs=hf_token_input, outputs=output_text)
|
22 |
|
23 |
|
24 |
HF_TOKEN = os.environ.get("HF_TOKEN") if os.environ.get("HF_TOKEN") else None # If private or gated models aren't used, ENV setting is unnecessary.
|