Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -116,7 +116,6 @@ def stream_chat(message: str, history: list, model: str, temperature: float, max
|
|
| 116 |
response = client.chat(
|
| 117 |
model=model,
|
| 118 |
messages=conversation,
|
| 119 |
-
stream=True,
|
| 120 |
options={
|
| 121 |
'num_predict': max_new_tokens,
|
| 122 |
'temperature': temperature,
|
|
@@ -128,7 +127,7 @@ def stream_chat(message: str, history: list, model: str, temperature: float, max
|
|
| 128 |
},
|
| 129 |
)
|
| 130 |
|
| 131 |
-
|
| 132 |
|
| 133 |
|
| 134 |
|
|
@@ -142,6 +141,7 @@ def main(message: str, history: list, model: str, temperature: float, max_new_to
|
|
| 142 |
else:
|
| 143 |
if not process:
|
| 144 |
launch()
|
|
|
|
| 145 |
response = stream_chat(
|
| 146 |
message,
|
| 147 |
history,
|
|
@@ -153,6 +153,8 @@ def main(message: str, history: list, model: str, temperature: float, max_new_to
|
|
| 153 |
penalty
|
| 154 |
)
|
| 155 |
|
|
|
|
|
|
|
| 156 |
buffer = ""
|
| 157 |
for chunk in response:
|
| 158 |
buffer += chunk["message"]["content"]
|
|
|
|
| 116 |
response = client.chat(
|
| 117 |
model=model,
|
| 118 |
messages=conversation,
|
|
|
|
| 119 |
options={
|
| 120 |
'num_predict': max_new_tokens,
|
| 121 |
'temperature': temperature,
|
|
|
|
| 127 |
},
|
| 128 |
)
|
| 129 |
|
| 130 |
+
yield response
|
| 131 |
|
| 132 |
|
| 133 |
|
|
|
|
| 141 |
else:
|
| 142 |
if not process:
|
| 143 |
launch()
|
| 144 |
+
|
| 145 |
response = stream_chat(
|
| 146 |
message,
|
| 147 |
history,
|
|
|
|
| 153 |
penalty
|
| 154 |
)
|
| 155 |
|
| 156 |
+
print(response)
|
| 157 |
+
|
| 158 |
buffer = ""
|
| 159 |
for chunk in response:
|
| 160 |
buffer += chunk["message"]["content"]
|