Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -19,7 +19,10 @@ def get_model_response(client, messages, max_tokens, temperature, top_p):
|
|
19 |
for message in response:
|
20 |
if stop_event.is_set():
|
21 |
break
|
22 |
-
|
|
|
|
|
|
|
23 |
if token:
|
24 |
full_response += token
|
25 |
yield full_response
|
@@ -35,10 +38,9 @@ def respond(message, history, system_message, max_tokens, temperature, top_p, se
|
|
35 |
messages.extend([{"role": "user" if i % 2 == 0 else "assistant", "content": m} for h in history for i, m in enumerate(h) if m])
|
36 |
messages.append({"role": "user", "content": message})
|
37 |
|
38 |
-
|
39 |
-
for
|
40 |
-
|
41 |
-
history.append((message, response))
|
42 |
yield "", history
|
43 |
|
44 |
except Exception as e:
|
@@ -59,16 +61,13 @@ def continue_writing(history, system_message, max_tokens, temperature, top_p, mo
|
|
59 |
messages.extend([{"role": "user" if i % 2 == 0 else "assistant", "content": m} for h in history for i, m in enumerate(h)])
|
60 |
messages.append({"role": "user", "content": prompt})
|
61 |
|
62 |
-
response
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
continued_response = last_assistant_message + " " + response
|
67 |
-
history[-1] = (last_user_message, continued_response)
|
68 |
-
return "", history
|
69 |
except Exception as e:
|
70 |
history.append(("시스템", f"계속 작성 중 오류 발생: {str(e)}"))
|
71 |
-
|
72 |
|
73 |
def stop_generation():
|
74 |
stop_event.set()
|
@@ -112,7 +111,6 @@ with gr.Blocks() as demo:
|
|
112 |
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.90, step=0.05, label="Top-p (핵 샘플링)")
|
113 |
model = gr.Radio(list(models.keys()), value=list(models.keys())[0], label="언어 모델 선택", info="사용할 언어 모델을 선택하세요")
|
114 |
|
115 |
-
# JavaScript를 사용하여 키 입력 동작 커스터마이즈
|
116 |
msg.javascript = """
|
117 |
(x) => {
|
118 |
const textbox = document.querySelector("#component-3");
|
@@ -130,7 +128,6 @@ with gr.Blocks() as demo:
|
|
130 |
}
|
131 |
"""
|
132 |
|
133 |
-
# 이벤트 핸들러 설정
|
134 |
send.click(respond, inputs=[msg, chatbot, system_message, max_tokens, temperature, top_p, model], outputs=[msg, chatbot])
|
135 |
msg.submit(respond, inputs=[msg, chatbot, system_message, max_tokens, temperature, top_p, model], outputs=[msg, chatbot])
|
136 |
continue_btn.click(continue_writing, inputs=[chatbot, system_message, max_tokens, temperature, top_p, model], outputs=[msg, chatbot])
|
|
|
19 |
for message in response:
|
20 |
if stop_event.is_set():
|
21 |
break
|
22 |
+
if hasattr(message.choices[0], 'delta'):
|
23 |
+
token = message.choices[0].delta.content
|
24 |
+
else:
|
25 |
+
token = message.choices[0].text
|
26 |
if token:
|
27 |
full_response += token
|
28 |
yield full_response
|
|
|
38 |
messages.extend([{"role": "user" if i % 2 == 0 else "assistant", "content": m} for h in history for i, m in enumerate(h) if m])
|
39 |
messages.append({"role": "user", "content": message})
|
40 |
|
41 |
+
history.append((message, ""))
|
42 |
+
for response in get_model_response(client, messages, max_tokens, temperature, top_p):
|
43 |
+
history[-1] = (message, response)
|
|
|
44 |
yield "", history
|
45 |
|
46 |
except Exception as e:
|
|
|
61 |
messages.extend([{"role": "user" if i % 2 == 0 else "assistant", "content": m} for h in history for i, m in enumerate(h)])
|
62 |
messages.append({"role": "user", "content": prompt})
|
63 |
|
64 |
+
for response in get_model_response(client, messages, max_tokens, temperature, top_p):
|
65 |
+
continued_response = last_assistant_message + " " + response
|
66 |
+
history[-1] = (last_user_message, continued_response)
|
67 |
+
yield "", history
|
|
|
|
|
|
|
68 |
except Exception as e:
|
69 |
history.append(("시스템", f"계속 작성 중 오류 발생: {str(e)}"))
|
70 |
+
yield "", history
|
71 |
|
72 |
def stop_generation():
|
73 |
stop_event.set()
|
|
|
111 |
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.90, step=0.05, label="Top-p (핵 샘플링)")
|
112 |
model = gr.Radio(list(models.keys()), value=list(models.keys())[0], label="언어 모델 선택", info="사용할 언어 모델을 선택하세요")
|
113 |
|
|
|
114 |
msg.javascript = """
|
115 |
(x) => {
|
116 |
const textbox = document.querySelector("#component-3");
|
|
|
128 |
}
|
129 |
"""
|
130 |
|
|
|
131 |
send.click(respond, inputs=[msg, chatbot, system_message, max_tokens, temperature, top_p, model], outputs=[msg, chatbot])
|
132 |
msg.submit(respond, inputs=[msg, chatbot, system_message, max_tokens, temperature, top_p, model], outputs=[msg, chatbot])
|
133 |
continue_btn.click(continue_writing, inputs=[chatbot, system_message, max_tokens, temperature, top_p, model], outputs=[msg, chatbot])
|