NCTCMumbai commited on
Commit
222ff4e
·
verified ·
1 Parent(s): 24c9a38

Update backend/query_llm.py

Browse files
Files changed (1) hide show
  1. backend/query_llm.py +9 -4
backend/query_llm.py CHANGED
@@ -106,14 +106,19 @@ def generate_hf(prompt: str, history: str, temperature: float = 0.5, max_new_tok
106
  return "I do not know what happened, but I couldn't understand you."
107
 
108
  def generate_qwen(formatted_prompt: str, history: str):
109
- response = client.predict(
110
  query=formatted_prompt,
111
- history=history,
112
  system="You are a helpful assistant.",
113
  api_name="/model_chat"
114
  )
115
- print('Response:',response)
116
- return response[1][0][1]
 
 
 
 
 
117
 
118
 
119
 
 
106
  return "I do not know what happened, but I couldn't understand you."
107
 
108
  def generate_qwen(formatted_prompt: str, history: str):
109
+ stream = client.predict(
110
  query=formatted_prompt,
111
+ history=[],
112
  system="You are a helpful assistant.",
113
  api_name="/model_chat"
114
  )
115
+ print('Response:',stream)
116
+ output = ""
117
+ for response in stream:
118
+ output += response.token.text
119
+ yield output
120
+ return output
121
+ #return response[1][0][1]
122
 
123
 
124