Update app.py
Browse files
app.py
CHANGED
|
@@ -74,12 +74,12 @@ async def complete(gen:GenModel):
|
|
| 74 |
output = llama.create_chat_completion(
|
| 75 |
messages=[
|
| 76 |
{"role": "system", "content": gen.system},
|
| 77 |
-
{"role": "user", "content": gen.question},
|
| 78 |
],
|
| 79 |
temperature=gen.temperature,
|
| 80 |
seed=gen.seed,
|
| 81 |
#stream=True
|
| 82 |
)
|
|
|
|
| 83 |
print(output)
|
| 84 |
"""
|
| 85 |
for chunk in output:
|
|
@@ -94,6 +94,7 @@ async def complete(gen:GenModel):
|
|
| 94 |
"""
|
| 95 |
et = time()
|
| 96 |
output["time"] = et - st
|
|
|
|
| 97 |
return output
|
| 98 |
except Exception as e:
|
| 99 |
logger.error(f"Error in /complete endpoint: {e}")
|
|
|
|
| 74 |
output = llama.create_chat_completion(
|
| 75 |
messages=[
|
| 76 |
{"role": "system", "content": gen.system},
|
|
|
|
| 77 |
],
|
| 78 |
temperature=gen.temperature,
|
| 79 |
seed=gen.seed,
|
| 80 |
#stream=True
|
| 81 |
)
|
| 82 |
+
messages.append({"role": "user", "content": gen.question},)
|
| 83 |
print(output)
|
| 84 |
"""
|
| 85 |
for chunk in output:
|
|
|
|
| 94 |
"""
|
| 95 |
et = time()
|
| 96 |
output["time"] = et - st
|
| 97 |
+
messages.append({'role': "assistant", "content": ouput['choices'][0]['message'])
|
| 98 |
return output
|
| 99 |
except Exception as e:
|
| 100 |
logger.error(f"Error in /complete endpoint: {e}")
|