Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -127,13 +127,14 @@ def stream_chat(message: str, history: list, model: str, temperature: float, max
|
|
| 127 |
"keep_alive": "60s",
|
| 128 |
},
|
| 129 |
)
|
| 130 |
-
|
| 131 |
buffer = ""
|
| 132 |
for chunk in response:
|
| 133 |
buffer += chunk["message"]["content"]
|
| 134 |
yield buffer
|
| 135 |
|
| 136 |
|
|
|
|
| 137 |
def main(message: str, history: list, model: str, temperature: float, max_new_tokens: int, top_p: float, top_k: int, penalty: float):
|
| 138 |
if message.startswith("/"):
|
| 139 |
resp = ollama_func(message)
|
|
@@ -144,7 +145,7 @@ def main(message: str, history: list, model: str, temperature: float, max_new_to
|
|
| 144 |
else:
|
| 145 |
if not process:
|
| 146 |
launch()
|
| 147 |
-
stream_chat(
|
| 148 |
message,
|
| 149 |
history,
|
| 150 |
model,
|
|
@@ -154,6 +155,7 @@ def main(message: str, history: list, model: str, temperature: float, max_new_to
|
|
| 154 |
top_k,
|
| 155 |
penalty
|
| 156 |
)
|
|
|
|
| 157 |
|
| 158 |
|
| 159 |
chatbot = gr.Chatbot(height=600, placeholder=DESCRIPTION)
|
|
|
|
| 127 |
"keep_alive": "60s",
|
| 128 |
},
|
| 129 |
)
|
| 130 |
+
|
| 131 |
buffer = ""
|
| 132 |
for chunk in response:
|
| 133 |
buffer += chunk["message"]["content"]
|
| 134 |
yield buffer
|
| 135 |
|
| 136 |
|
| 137 |
+
|
| 138 |
def main(message: str, history: list, model: str, temperature: float, max_new_tokens: int, top_p: float, top_k: int, penalty: float):
|
| 139 |
if message.startswith("/"):
|
| 140 |
resp = ollama_func(message)
|
|
|
|
| 145 |
else:
|
| 146 |
if not process:
|
| 147 |
launch()
|
| 148 |
+
answer = stream_chat(
|
| 149 |
message,
|
| 150 |
history,
|
| 151 |
model,
|
|
|
|
| 155 |
top_k,
|
| 156 |
penalty
|
| 157 |
)
|
| 158 |
+
yield answer
|
| 159 |
|
| 160 |
|
| 161 |
chatbot = gr.Chatbot(height=600, placeholder=DESCRIPTION)
|