Spaces:
Sleeping
Sleeping
Update utils/mistral.py
Browse files- utils/mistral.py +3 -2
utils/mistral.py
CHANGED
@@ -66,7 +66,7 @@ def Model_ProfessionalDetails_Output(resume, client):
|
|
66 |
}
|
67 |
|
68 |
response = ""
|
69 |
-
for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35):
|
70 |
response += message.choices[0].delta.content
|
71 |
|
72 |
try:
|
@@ -102,7 +102,7 @@ def Model_EducationalDetails_Output(resume, client):
|
|
102 |
}
|
103 |
|
104 |
response = ""
|
105 |
-
for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35):
|
106 |
response += message.choices[0].delta.content
|
107 |
|
108 |
try:
|
@@ -144,6 +144,7 @@ def Model_PersonalDetails_Output(resume, client):
|
|
144 |
max_tokens=3000,
|
145 |
stream=True,
|
146 |
temperature=0.35,
|
|
|
147 |
):
|
148 |
response += message.choices[0].delta.content
|
149 |
|
|
|
66 |
}
|
67 |
|
68 |
response = ""
|
69 |
+
for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35, return_full_text=False):
|
70 |
response += message.choices[0].delta.content
|
71 |
|
72 |
try:
|
|
|
102 |
}
|
103 |
|
104 |
response = ""
|
105 |
+
for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35, return_full_text=False):
|
106 |
response += message.choices[0].delta.content
|
107 |
|
108 |
try:
|
|
|
144 |
max_tokens=3000,
|
145 |
stream=True,
|
146 |
temperature=0.35,
|
147 |
+
return_full_text=False
|
148 |
):
|
149 |
response += message.choices[0].delta.content
|
150 |
|