Kims12 commited on
Commit
813c9d5
·
verified ·
1 Parent(s): c757549

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -6
app.py CHANGED
@@ -4,7 +4,7 @@ import openai
4
  import anthropic
5
  import os
6
 
7
- # Cohere Command R+ 모델 ID 정의
8
  COHERE_MODEL = "CohereForAI/c4ai-command-r-plus-08-2024"
9
 
10
  def get_client(model_name: str):
@@ -16,8 +16,10 @@ def get_client(model_name: str):
16
  if not hf_token:
17
  raise ValueError("HuggingFace API 토큰이 필요합니다. (환경변수 HF_TOKEN 미설정)")
18
 
19
- if model_name == "Cohere Command R+":
20
- model_id = COHERE_MODEL
 
 
21
  else:
22
  raise ValueError("유효하지 않은 모델 이름입니다.")
23
  return InferenceClient(model_id, token=hf_token)
@@ -26,6 +28,7 @@ def cohere_respond(
26
  message,
27
  chat_history,
28
  system_message,
 
29
  max_tokens,
30
  temperature,
31
  top_p,
@@ -34,7 +37,7 @@ def cohere_respond(
34
  Cohere Command R+ 모델 응답 함수.
35
  HF 토큰은 함수 내부에서 os.environ을 통해 불러온다.
36
  """
37
- model_name = "Cohere Command R+"
38
  try:
39
  client = get_client(model_name)
40
  except ValueError as e:
@@ -223,7 +226,7 @@ with gr.Blocks() as demo:
223
  # 일반 모델 관련 UI/기능 제거 (요청 사항에 따라 삭제)
224
  # --------------------------------------------------
225
 
226
- # Cohere Command R+
227
  with gr.Tab("Cohere Command R+"):
228
  with gr.Row():
229
  cohere_system_message = gr.Textbox(
@@ -234,6 +237,11 @@ with gr.Blocks() as demo:
234
  label="System Message",
235
  lines=3
236
  )
 
 
 
 
 
237
  cohere_max_tokens = gr.Slider(minimum=100, maximum=8000, value=2000, step=100, label="Max new tokens")
238
  cohere_temperature = gr.Slider(minimum=0.1, maximum=2.0, value=0.7, step=0.1, label="Temperature")
239
  cohere_top_p = gr.Slider(
@@ -253,7 +261,8 @@ with gr.Blocks() as demo:
253
  inputs_for_cohere = [
254
  cohere_msg,
255
  cohere_chatbot,
256
- cohere_system_message,
 
257
  cohere_max_tokens,
258
  cohere_temperature,
259
  cohere_top_p
 
4
  import anthropic
5
  import os
6
 
7
+ # Cohere Command R+ 모델 ID 정의 (참고용 상수)
8
  COHERE_MODEL = "CohereForAI/c4ai-command-r-plus-08-2024"
9
 
10
  def get_client(model_name: str):
 
16
  if not hf_token:
17
  raise ValueError("HuggingFace API 토큰이 필요합니다. (환경변수 HF_TOKEN 미설정)")
18
 
19
+ if model_name == "c4ai-command-r-plus-08-2024":
20
+ model_id = "CohereForAI/c4ai-command-r-plus-08-2024"
21
+ elif model_name == "c4ai-command-r7b-12-2024":
22
+ model_id = "CohereForAI/c4ai-command-r7b-12-2024"
23
  else:
24
  raise ValueError("유효하지 않은 모델 이름입니다.")
25
  return InferenceClient(model_id, token=hf_token)
 
28
  message,
29
  chat_history,
30
  system_message,
31
+ cohere_model_choice,
32
  max_tokens,
33
  temperature,
34
  top_p,
 
37
  Cohere Command R+ 모델 응답 함수.
38
  HF 토큰은 함수 내부에서 os.environ을 통해 불러온다.
39
  """
40
+ model_name = cohere_model_choice
41
  try:
42
  client = get_client(model_name)
43
  except ValueError as e:
 
226
  # 일반 모델 관련 UI/기능 제거 (요청 사항에 따라 삭제)
227
  # --------------------------------------------------
228
 
229
+ # Cohere Command R+ 탭 (모델 선택 추가됨)
230
  with gr.Tab("Cohere Command R+"):
231
  with gr.Row():
232
  cohere_system_message = gr.Textbox(
 
237
  label="System Message",
238
  lines=3
239
  )
240
+ cohere_model_choice = gr.Radio(
241
+ choices=["c4ai-command-r-plus-08-2024", "c4ai-command-r7b-12-2024"],
242
+ value="c4ai-command-r-plus-08-2024",
243
+ label="모델 선택"
244
+ )
245
  cohere_max_tokens = gr.Slider(minimum=100, maximum=8000, value=2000, step=100, label="Max new tokens")
246
  cohere_temperature = gr.Slider(minimum=0.1, maximum=2.0, value=0.7, step=0.1, label="Temperature")
247
  cohere_top_p = gr.Slider(
 
261
  inputs_for_cohere = [
262
  cohere_msg,
263
  cohere_chatbot,
264
+ cohere_system_message,
265
+ cohere_model_choice,
266
  cohere_max_tokens,
267
  cohere_temperature,
268
  cohere_top_p