AIRider commited on
Commit
9a9e197
·
verified ·
1 Parent(s): 29e7d78

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -15
app.py CHANGED
@@ -24,31 +24,30 @@ def respond(message, chat_history, system_message, max_tokens, temperature, top_
24
  messages.extend([{"role": "user" if i % 2 == 0 else "assistant", "content": m} for h in chat_history for i, m in enumerate(h) if m])
25
  messages.append({"role": "user", "content": message})
26
 
 
27
  try:
28
- full_response = ""
29
- for chunk in client.text_generation(
30
- prompt="\n".join([f"{m['role']}: {m['content']}" for m in messages]),
31
- max_new_tokens=max_tokens,
32
  temperature=temperature,
33
  top_p=top_p,
34
  stream=True
35
  ):
36
  if stop_event.is_set():
37
  break
38
- if chunk:
39
- full_response += chunk
40
- yield chat_history + [(message, full_response)]
41
-
42
- chat_history.append((message, full_response))
43
- yield chat_history
 
44
  except Exception as e:
45
- error_message = f"오류 발생: {str(e)}"
46
- chat_history.append((message, error_message))
47
- yield chat_history
48
 
49
  def continue_writing(message, chat_history, system_message, max_tokens, temperature, top_p, selected_model):
50
  if not chat_history:
51
- return [("시스템", "대화 내역이 없습니다.")]
52
  last_user_message = chat_history[-1][0]
53
  last_assistant_message = chat_history[-1][1]
54
  prompt = f"이전 대화를 계속 이어서 작성해주세요. 이전 응답: {last_assistant_message}"
@@ -60,7 +59,7 @@ def stop_generation():
60
 
61
  def regenerate(chat_history, system_message, max_tokens, temperature, top_p, selected_model):
62
  if not chat_history:
63
- return [("시스템", "대화 내역이 없습니다.")]
64
  last_user_message = chat_history[-1][0]
65
  return respond(last_user_message, chat_history[:-1], system_message, max_tokens, temperature, top_p, selected_model)
66
 
 
24
  messages.extend([{"role": "user" if i % 2 == 0 else "assistant", "content": m} for h in chat_history for i, m in enumerate(h) if m])
25
  messages.append({"role": "user", "content": message})
26
 
27
+ response = ""
28
  try:
29
+ for chunk in client.chat_completion(
30
+ messages,
31
+ max_tokens=max_tokens,
 
32
  temperature=temperature,
33
  top_p=top_p,
34
  stream=True
35
  ):
36
  if stop_event.is_set():
37
  break
38
+ if hasattr(chunk.choices[0], 'delta'):
39
+ token = chunk.choices[0].delta.content
40
+ else:
41
+ token = chunk.choices[0].text
42
+ if token:
43
+ response += token
44
+ yield response
45
  except Exception as e:
46
+ yield f"오류 발생: {str(e)}"
 
 
47
 
48
  def continue_writing(message, chat_history, system_message, max_tokens, temperature, top_p, selected_model):
49
  if not chat_history:
50
+ return "대화 내역이 없습니다."
51
  last_user_message = chat_history[-1][0]
52
  last_assistant_message = chat_history[-1][1]
53
  prompt = f"이전 대화를 계속 이어서 작성해주세요. 이전 응답: {last_assistant_message}"
 
59
 
60
  def regenerate(chat_history, system_message, max_tokens, temperature, top_p, selected_model):
61
  if not chat_history:
62
+ return "대화 내역이 없습니다."
63
  last_user_message = chat_history[-1][0]
64
  return respond(last_user_message, chat_history[:-1], system_message, max_tokens, temperature, top_p, selected_model)
65