Kims12 commited on
Commit
f1d1009
·
verified ·
1 Parent(s): 8144da3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +77 -33
app.py CHANGED
@@ -17,10 +17,15 @@ MODELS = {
17
  # Cohere Command R+ 모델 ID 정의
18
  COHERE_MODEL = "CohereForAI/c4ai-command-r-plus-08-2024"
19
 
20
- def get_client(model_name):
21
- hf_token = os.getenv("HF_TOKEN")
 
 
 
 
22
  if not hf_token:
23
- raise ValueError("HF_TOKEN 환경 변수가 필요합니다.")
 
24
  if model_name in MODELS:
25
  model_id = MODELS[model_name]
26
  elif model_name == "Cohere Command R+":
@@ -29,6 +34,7 @@ def get_client(model_name):
29
  raise ValueError("유효하지 않은 모델 이름입니다.")
30
  return InferenceClient(model_id, token=hf_token)
31
 
 
32
  def respond(
33
  message,
34
  chat_history,
@@ -37,9 +43,10 @@ def respond(
37
  temperature,
38
  top_p,
39
  system_message,
 
40
  ):
41
  try:
42
- client = get_client(model_name)
43
  except ValueError as e:
44
  chat_history.append((message, str(e)))
45
  return chat_history
@@ -85,6 +92,7 @@ def respond(
85
  chat_history.append((message, error_message))
86
  yield chat_history
87
 
 
88
  def cohere_respond(
89
  message,
90
  chat_history,
@@ -92,10 +100,11 @@ def cohere_respond(
92
  max_tokens,
93
  temperature,
94
  top_p,
 
95
  ):
96
  model_name = "Cohere Command R+"
97
  try:
98
- client = get_client(model_name)
99
  except ValueError as e:
100
  chat_history.append((message, str(e)))
101
  return chat_history
@@ -109,8 +118,6 @@ def cohere_respond(
109
 
110
  messages.append({"role": "user", "content": message})
111
 
112
- response = ""
113
-
114
  try:
115
  # Cohere Command R+ 모델을 위한 비스트리밍 처리
116
  response_full = client.chat_completion(
@@ -127,6 +134,7 @@ def cohere_respond(
127
  chat_history.append((message, error_message))
128
  return chat_history
129
 
 
130
  def chatgpt_respond(
131
  message,
132
  chat_history,
@@ -134,12 +142,18 @@ def chatgpt_respond(
134
  max_tokens,
135
  temperature,
136
  top_p,
 
137
  ):
138
- openai.api_key = os.getenv("OPENAI_API_KEY")
139
- if not openai.api_key:
140
- chat_history.append((message, "OPENAI_API_KEY 환경 변수가 필요합니다."))
 
 
141
  return chat_history
142
 
 
 
 
143
  messages = [{"role": "system", "content": system_message}]
144
  for human, assistant in chat_history:
145
  messages.append({"role": "user", "content": human})
@@ -162,13 +176,28 @@ def chatgpt_respond(
162
  chat_history.append((message, error_message))
163
  return chat_history
164
 
 
165
  def clear_conversation():
166
  return []
167
 
 
168
  with gr.Blocks() as demo:
169
  gr.Markdown("# Prompting AI Chatbot")
170
  gr.Markdown("언어모델별 프롬프트 테스트 챗봇입니다.")
171
-
 
 
 
 
 
 
 
 
 
 
 
 
 
172
  with gr.Tab("일반 모델"):
173
  with gr.Row():
174
  with gr.Column(scale=1):
@@ -196,8 +225,19 @@ with gr.Blocks() as demo:
196
  submit_button = gr.Button("전송")
197
  clear_button = gr.Button("대화 내역 지우기")
198
 
199
- msg.submit(respond, [msg, chatbot, model_name, max_tokens, temperature, top_p, system_message], chatbot)
200
- submit_button.click(respond, [msg, chatbot, model_name, max_tokens, temperature, top_p, system_message], chatbot)
 
 
 
 
 
 
 
 
 
 
 
201
  clear_button.click(clear_conversation, outputs=chatbot, queue=False)
202
 
203
  with gr.Tab("Cohere Command R+"):
@@ -226,16 +266,18 @@ with gr.Blocks() as demo:
226
  cohere_submit_button = gr.Button("전송")
227
  cohere_clear_button = gr.Button("대화 내역 지우기")
228
 
229
- cohere_msg.submit(
230
- cohere_respond,
231
- [cohere_msg, cohere_chatbot, cohere_system_message, cohere_max_tokens, cohere_temperature, cohere_top_p],
232
- cohere_chatbot
233
- )
234
- cohere_submit_button.click(
235
- cohere_respond,
236
- [cohere_msg, cohere_chatbot, cohere_system_message, cohere_max_tokens, cohere_temperature, cohere_top_p],
237
- cohere_chatbot
238
- )
 
 
239
  cohere_clear_button.click(clear_conversation, outputs=cohere_chatbot, queue=False)
240
 
241
  with gr.Tab("ChatGPT"):
@@ -264,16 +306,18 @@ with gr.Blocks() as demo:
264
  chatgpt_submit_button = gr.Button("전송")
265
  chatgpt_clear_button = gr.Button("대화 내역 지우기")
266
 
267
- chatgpt_msg.submit(
268
- chatgpt_respond,
269
- [chatgpt_msg, chatgpt_chatbot, chatgpt_system_message, chatgpt_max_tokens, chatgpt_temperature, chatgpt_top_p],
270
- chatgpt_chatbot
271
- )
272
- chatgpt_submit_button.click(
273
- chatgpt_respond,
274
- [chatgpt_msg, chatgpt_chatbot, chatgpt_system_message, chatgpt_max_tokens, chatgpt_temperature, chatgpt_top_p],
275
- chatgpt_chatbot
276
- )
 
 
277
  chatgpt_clear_button.click(clear_conversation, outputs=chatgpt_chatbot, queue=False)
278
 
279
  if __name__ == "__main__":
 
17
  # Cohere Command R+ 모델 ID 정의
18
  COHERE_MODEL = "CohereForAI/c4ai-command-r-plus-08-2024"
19
 
20
+
21
+ def get_client(model_name, hf_token):
22
+ """
23
+ 모델 이름에 맞춰 InferenceClient 생성.
24
+ hf_token을 UI에서 입력받은 값으로 사용하도록 변경.
25
+ """
26
  if not hf_token:
27
+ raise ValueError("HuggingFace API 토큰이 필요합니다.")
28
+
29
  if model_name in MODELS:
30
  model_id = MODELS[model_name]
31
  elif model_name == "Cohere Command R+":
 
34
  raise ValueError("유효하지 않은 모델 이름입니다.")
35
  return InferenceClient(model_id, token=hf_token)
36
 
37
+
38
  def respond(
39
  message,
40
  chat_history,
 
43
  temperature,
44
  top_p,
45
  system_message,
46
+ hf_token, # HF 토큰을 추가로 받음
47
  ):
48
  try:
49
+ client = get_client(model_name, hf_token)
50
  except ValueError as e:
51
  chat_history.append((message, str(e)))
52
  return chat_history
 
92
  chat_history.append((message, error_message))
93
  yield chat_history
94
 
95
+
96
  def cohere_respond(
97
  message,
98
  chat_history,
 
100
  max_tokens,
101
  temperature,
102
  top_p,
103
+ hf_token, # HF 토큰 추가
104
  ):
105
  model_name = "Cohere Command R+"
106
  try:
107
+ client = get_client(model_name, hf_token)
108
  except ValueError as e:
109
  chat_history.append((message, str(e)))
110
  return chat_history
 
118
 
119
  messages.append({"role": "user", "content": message})
120
 
 
 
121
  try:
122
  # Cohere Command R+ 모델을 위한 비스트리밍 처리
123
  response_full = client.chat_completion(
 
134
  chat_history.append((message, error_message))
135
  return chat_history
136
 
137
+
138
  def chatgpt_respond(
139
  message,
140
  chat_history,
 
142
  max_tokens,
143
  temperature,
144
  top_p,
145
+ openai_token, # openai 토큰 추가
146
  ):
147
+ """
148
+ chatgpt용 응답. openai_token을 UI에서 입력받아 사용하도록 변경.
149
+ """
150
+ if not openai_token:
151
+ chat_history.append((message, "OpenAI API 토큰이 필요합니다."))
152
  return chat_history
153
 
154
+ # openai.api_key = os.getenv("OPENAI_API_KEY") # 기존 코드 주석
155
+ openai.api_key = openai_token # UI에서 받은 토큰 사용
156
+
157
  messages = [{"role": "system", "content": system_message}]
158
  for human, assistant in chat_history:
159
  messages.append({"role": "user", "content": human})
 
176
  chat_history.append((message, error_message))
177
  return chat_history
178
 
179
+
180
  def clear_conversation():
181
  return []
182
 
183
+
184
  with gr.Blocks() as demo:
185
  gr.Markdown("# Prompting AI Chatbot")
186
  gr.Markdown("언어모델별 프롬프트 테스트 챗봇입니다.")
187
+
188
+ # --- 토큰 입력 UI 추가 ---
189
+ with gr.Row():
190
+ hf_token_box = gr.Textbox(
191
+ label="HuggingFace 토큰 (비공개)",
192
+ type="password",
193
+ placeholder="HuggingFace API 토큰을 입력하세요..."
194
+ )
195
+ openai_token_box = gr.Textbox(
196
+ label="OpenAI 토큰 (비공개)",
197
+ type="password",
198
+ placeholder="OpenAI API 토큰을 입력하세요..."
199
+ )
200
+
201
  with gr.Tab("일반 모델"):
202
  with gr.Row():
203
  with gr.Column(scale=1):
 
225
  submit_button = gr.Button("전송")
226
  clear_button = gr.Button("대화 내역 지우기")
227
 
228
+ # respond 함수에 hf_token 인자를 전달하도록 수정
229
+ inputs_for_normal = [
230
+ msg,
231
+ chatbot,
232
+ model_name,
233
+ max_tokens,
234
+ temperature,
235
+ top_p,
236
+ system_message,
237
+ hf_token_box
238
+ ]
239
+ msg.submit(respond, inputs_for_normal, chatbot)
240
+ submit_button.click(respond, inputs_for_normal, chatbot)
241
  clear_button.click(clear_conversation, outputs=chatbot, queue=False)
242
 
243
  with gr.Tab("Cohere Command R+"):
 
266
  cohere_submit_button = gr.Button("전송")
267
  cohere_clear_button = gr.Button("대화 내역 지우기")
268
 
269
+ # cohere_respond 함수에 hf_token 인자를 전달하도록 수정
270
+ inputs_for_cohere = [
271
+ cohere_msg,
272
+ cohere_chatbot,
273
+ cohere_system_message,
274
+ cohere_max_tokens,
275
+ cohere_temperature,
276
+ cohere_top_p,
277
+ hf_token_box
278
+ ]
279
+ cohere_msg.submit(cohere_respond, inputs_for_cohere, cohere_chatbot)
280
+ cohere_submit_button.click(cohere_respond, inputs_for_cohere, cohere_chatbot)
281
  cohere_clear_button.click(clear_conversation, outputs=cohere_chatbot, queue=False)
282
 
283
  with gr.Tab("ChatGPT"):
 
306
  chatgpt_submit_button = gr.Button("전송")
307
  chatgpt_clear_button = gr.Button("대화 내역 지우기")
308
 
309
+ # chatgpt_respond 함수에 openai_token 인자를 전달하도록 수정
310
+ inputs_for_chatgpt = [
311
+ chatgpt_msg,
312
+ chatgpt_chatbot,
313
+ chatgpt_system_message,
314
+ chatgpt_max_tokens,
315
+ chatgpt_temperature,
316
+ chatgpt_top_p,
317
+ openai_token_box
318
+ ]
319
+ chatgpt_msg.submit(chatgpt_respond, inputs_for_chatgpt, chatgpt_chatbot)
320
+ chatgpt_submit_button.click(chatgpt_respond, inputs_for_chatgpt, chatgpt_chatbot)
321
  chatgpt_clear_button.click(clear_conversation, outputs=chatgpt_chatbot, queue=False)
322
 
323
  if __name__ == "__main__":