Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -136,7 +136,7 @@ async def stream_chat(message: str, history: list, model: str, temperature: floa
|
|
| 136 |
yield buffer
|
| 137 |
|
| 138 |
|
| 139 |
-
def main(message: str, history: list, model: str, temperature: float, max_new_tokens: int, top_p: float, top_k: int, penalty: float):
|
| 140 |
if message.startswith("/"):
|
| 141 |
resp = ollama_func(message)
|
| 142 |
yield resp
|
|
@@ -147,7 +147,7 @@ def main(message: str, history: list, model: str, temperature: float, max_new_to
|
|
| 147 |
if not process:
|
| 148 |
launch()
|
| 149 |
|
| 150 |
-
|
| 151 |
message,
|
| 152 |
history,
|
| 153 |
model,
|
|
|
|
| 136 |
yield buffer
|
| 137 |
|
| 138 |
|
| 139 |
+
async def main(message: str, history: list, model: str, temperature: float, max_new_tokens: int, top_p: float, top_k: int, penalty: float):
|
| 140 |
if message.startswith("/"):
|
| 141 |
resp = ollama_func(message)
|
| 142 |
yield resp
|
|
|
|
| 147 |
if not process:
|
| 148 |
launch()
|
| 149 |
|
| 150 |
+
response = await stream_chat(
|
| 151 |
message,
|
| 152 |
history,
|
| 153 |
model,
|