Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,9 +4,6 @@ import openai
|
|
4 |
import anthropic
|
5 |
import os
|
6 |
|
7 |
-
# Cohere 모델 ID 참고용 상수 (이전 모델은 사용하지 않습니다)
|
8 |
-
# COHERE_MODEL = "CohereForAI/c4ai-command-r-plus-08-2024"
|
9 |
-
|
10 |
def get_client(model_name: str):
|
11 |
"""
|
12 |
모델 이름에 맞춰 InferenceClient 생성.
|
@@ -16,7 +13,9 @@ def get_client(model_name: str):
|
|
16 |
if not hf_token:
|
17 |
raise ValueError("HuggingFace API 토큰이 필요합니다. (환경변수 HF_TOKEN 미설정)")
|
18 |
|
19 |
-
if model_name == "c4ai-command-r-08-2024":
|
|
|
|
|
20 |
model_id = "CohereForAI/c4ai-command-r-08-2024"
|
21 |
else:
|
22 |
raise ValueError("유효하지 않은 모델 이름입니다.")
|
@@ -32,7 +31,7 @@ def cohere_respond(
|
|
32 |
top_p,
|
33 |
):
|
34 |
"""
|
35 |
-
Cohere 모델 응답 함수.
|
36 |
HF 토큰은 함수 내부에서 os.environ을 통해 불러온다.
|
37 |
"""
|
38 |
model_name = cohere_model_choice
|
@@ -224,7 +223,7 @@ with gr.Blocks() as demo:
|
|
224 |
# 일반 모델 관련 UI/기능 제거 (요청 사항에 따라 삭제)
|
225 |
# --------------------------------------------------
|
226 |
|
227 |
-
# Cohere Command R+ 탭 (모델 선택
|
228 |
with gr.Tab("Cohere Command R+"):
|
229 |
with gr.Row():
|
230 |
cohere_system_message = gr.Textbox(
|
@@ -236,8 +235,8 @@ with gr.Blocks() as demo:
|
|
236 |
lines=3
|
237 |
)
|
238 |
cohere_model_choice = gr.Radio(
|
239 |
-
choices=["c4ai-command-r-08-2024"],
|
240 |
-
value="c4ai-command-r-08-2024",
|
241 |
label="모델 선택"
|
242 |
)
|
243 |
cohere_max_tokens = gr.Slider(minimum=100, maximum=8000, value=2000, step=100, label="Max new tokens")
|
@@ -259,7 +258,7 @@ with gr.Blocks() as demo:
|
|
259 |
inputs_for_cohere = [
|
260 |
cohere_msg,
|
261 |
cohere_chatbot,
|
262 |
-
cohere_system_message,
|
263 |
cohere_model_choice,
|
264 |
cohere_max_tokens,
|
265 |
cohere_temperature,
|
@@ -269,7 +268,7 @@ with gr.Blocks() as demo:
|
|
269 |
cohere_submit_button.click(cohere_respond, inputs_for_cohere, cohere_chatbot)
|
270 |
cohere_clear_button.click(clear_conversation, outputs=cohere_chatbot, queue=False)
|
271 |
|
272 |
-
# ChatGPT
|
273 |
with gr.Tab("ChatGPT"):
|
274 |
with gr.Row():
|
275 |
chatgpt_system_message = gr.Textbox(
|
@@ -308,7 +307,7 @@ with gr.Blocks() as demo:
|
|
308 |
chatgpt_submit_button.click(chatgpt_respond, inputs_for_chatgpt, chatgpt_chatbot)
|
309 |
chatgpt_clear_button.click(clear_conversation, outputs=chatgpt_chatbot, queue=False)
|
310 |
|
311 |
-
# Claude
|
312 |
with gr.Tab("Claude"):
|
313 |
with gr.Row():
|
314 |
claude_system_message = gr.Textbox(
|
@@ -347,7 +346,7 @@ with gr.Blocks() as demo:
|
|
347 |
claude_submit_button.click(claude_respond, inputs_for_claude, claude_chatbot)
|
348 |
claude_clear_button.click(clear_conversation, outputs=claude_chatbot, queue=False)
|
349 |
|
350 |
-
# DeepSeek
|
351 |
with gr.Tab("DeepSeek"):
|
352 |
with gr.Row():
|
353 |
deepseek_system_message = gr.Textbox(
|
@@ -388,7 +387,6 @@ with gr.Blocks() as demo:
|
|
388 |
deepseek_temperature,
|
389 |
deepseek_top_p
|
390 |
]
|
391 |
-
# Textbox.submit에서는 stream 인자를 제거합니다.
|
392 |
deepseek_msg.submit(deepseek_respond, inputs_for_deepseek, deepseek_chatbot)
|
393 |
deepseek_submit_button.click(deepseek_respond, inputs_for_deepseek, deepseek_chatbot)
|
394 |
deepseek_clear_button.click(clear_conversation, outputs=deepseek_chatbot, queue=False)
|
|
|
4 |
import anthropic
|
5 |
import os
|
6 |
|
|
|
|
|
|
|
7 |
def get_client(model_name: str):
|
8 |
"""
|
9 |
모델 이름에 맞춰 InferenceClient 생성.
|
|
|
13 |
if not hf_token:
|
14 |
raise ValueError("HuggingFace API 토큰이 필요합니다. (환경변수 HF_TOKEN 미설정)")
|
15 |
|
16 |
+
if model_name == "c4ai-command-r-plus-08-2024":
|
17 |
+
model_id = "CohereForAI/c4ai-command-r-plus-08-2024"
|
18 |
+
elif model_name == "c4ai-command-r-08-2024":
|
19 |
model_id = "CohereForAI/c4ai-command-r-08-2024"
|
20 |
else:
|
21 |
raise ValueError("유효하지 않은 모델 이름입니다.")
|
|
|
31 |
top_p,
|
32 |
):
|
33 |
"""
|
34 |
+
Cohere Command R+ 모델 응답 함수.
|
35 |
HF 토큰은 함수 내부에서 os.environ을 통해 불러온다.
|
36 |
"""
|
37 |
model_name = cohere_model_choice
|
|
|
223 |
# 일반 모델 관련 UI/기능 제거 (요청 사항에 따라 삭제)
|
224 |
# --------------------------------------------------
|
225 |
|
226 |
+
# Cohere Command R+ 탭 (모델 선택 추가됨)
|
227 |
with gr.Tab("Cohere Command R+"):
|
228 |
with gr.Row():
|
229 |
cohere_system_message = gr.Textbox(
|
|
|
235 |
lines=3
|
236 |
)
|
237 |
cohere_model_choice = gr.Radio(
|
238 |
+
choices=["c4ai-command-r-plus-08-2024", "c4ai-command-r-08-2024"],
|
239 |
+
value="c4ai-command-r-plus-08-2024",
|
240 |
label="모델 선택"
|
241 |
)
|
242 |
cohere_max_tokens = gr.Slider(minimum=100, maximum=8000, value=2000, step=100, label="Max new tokens")
|
|
|
258 |
inputs_for_cohere = [
|
259 |
cohere_msg,
|
260 |
cohere_chatbot,
|
261 |
+
cohere_system_message,
|
262 |
cohere_model_choice,
|
263 |
cohere_max_tokens,
|
264 |
cohere_temperature,
|
|
|
268 |
cohere_submit_button.click(cohere_respond, inputs_for_cohere, cohere_chatbot)
|
269 |
cohere_clear_button.click(clear_conversation, outputs=cohere_chatbot, queue=False)
|
270 |
|
271 |
+
# ChatGPT 탭
|
272 |
with gr.Tab("ChatGPT"):
|
273 |
with gr.Row():
|
274 |
chatgpt_system_message = gr.Textbox(
|
|
|
307 |
chatgpt_submit_button.click(chatgpt_respond, inputs_for_chatgpt, chatgpt_chatbot)
|
308 |
chatgpt_clear_button.click(clear_conversation, outputs=chatgpt_chatbot, queue=False)
|
309 |
|
310 |
+
# Claude 탭
|
311 |
with gr.Tab("Claude"):
|
312 |
with gr.Row():
|
313 |
claude_system_message = gr.Textbox(
|
|
|
346 |
claude_submit_button.click(claude_respond, inputs_for_claude, claude_chatbot)
|
347 |
claude_clear_button.click(clear_conversation, outputs=claude_chatbot, queue=False)
|
348 |
|
349 |
+
# DeepSeek 탭
|
350 |
with gr.Tab("DeepSeek"):
|
351 |
with gr.Row():
|
352 |
deepseek_system_message = gr.Textbox(
|
|
|
387 |
deepseek_temperature,
|
388 |
deepseek_top_p
|
389 |
]
|
|
|
390 |
deepseek_msg.submit(deepseek_respond, inputs_for_deepseek, deepseek_chatbot)
|
391 |
deepseek_submit_button.click(deepseek_respond, inputs_for_deepseek, deepseek_chatbot)
|
392 |
deepseek_clear_button.click(clear_conversation, outputs=deepseek_chatbot, queue=False)
|