Update app.py
Browse files
app.py
CHANGED
|
@@ -53,23 +53,24 @@ def get_assistant_aswer(st_model, st_messages, st_temp_value, st_max_tokens):
|
|
| 53 |
st.write ("model: " + st_model)
|
| 54 |
st.write ("temp: " + str(st_temp_value))
|
| 55 |
st.write ("max_tokens: " + str(st_max_tokens))
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
|
|
|
| 73 |
|
| 74 |
return response
|
| 75 |
|
|
|
|
| 53 |
st.write ("model: " + st_model)
|
| 54 |
st.write ("temp: " + str(st_temp_value))
|
| 55 |
st.write ("max_tokens: " + str(st_max_tokens))
|
| 56 |
+
|
| 57 |
+
#try:
|
| 58 |
+
stream = client.chat.completions.create(
|
| 59 |
+
model=st_model,
|
| 60 |
+
messages=[
|
| 61 |
+
{"role": m["role"], "content": m["content"]}
|
| 62 |
+
for m in st_messages
|
| 63 |
+
],
|
| 64 |
+
temperature=st_temp_value,
|
| 65 |
+
stream=True,
|
| 66 |
+
max_tokens=st_max_tokens,
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
for chunk in stream:
|
| 70 |
+
response =+ chunk.choices[0].delta.content
|
| 71 |
+
|
| 72 |
+
# except Exception as e:
|
| 73 |
+
# response = "😵💫 Looks like someone unplugged something!"
|
| 74 |
|
| 75 |
return response
|
| 76 |
|