Spaces:
Sleeping
Sleeping
Update utils/mistral.py
Browse files- utils/mistral.py +6 -6
utils/mistral.py
CHANGED
@@ -69,8 +69,8 @@ def Model_ProfessionalDetails_Output(resume, client):
|
|
69 |
for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35, response_format={"type": "json"}):
|
70 |
response += message.choices[0].delta.content
|
71 |
|
72 |
-
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35, response_format={"type": "json"})
|
73 |
-
print("This is without stream data ",data)
|
74 |
|
75 |
try:
|
76 |
clean_response = Data_Cleaner(response)
|
@@ -107,8 +107,8 @@ def Model_EducationalDetails_Output(resume, client):
|
|
107 |
response = ""
|
108 |
for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35, response_format={"type": "json"}):
|
109 |
response += message.choices[0].delta.content
|
110 |
-
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35, response_format={"type": "json"})
|
111 |
-
print("This is without stream data ",data)
|
112 |
|
113 |
try:
|
114 |
clean_response = Data_Cleaner(response)
|
@@ -147,8 +147,8 @@ def Model_PersonalDetails_Output(resume, client):
|
|
147 |
for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=True, temperature=0.35, response_format={"type": "json"}):
|
148 |
response += message.choices[0].delta.content
|
149 |
|
150 |
-
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35, response_format={"type": "json"})
|
151 |
-
print("This is without stream data ",data)
|
152 |
|
153 |
# Handle cases where the response might have formatting issues
|
154 |
try:
|
|
|
69 |
for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35, response_format={"type": "json"}):
|
70 |
response += message.choices[0].delta.content
|
71 |
|
72 |
+
#data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35, response_format={"type": "json"})
|
73 |
+
#print("This is without stream data ",data)
|
74 |
|
75 |
try:
|
76 |
clean_response = Data_Cleaner(response)
|
|
|
107 |
response = ""
|
108 |
for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35, response_format={"type": "json"}):
|
109 |
response += message.choices[0].delta.content
|
110 |
+
#data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35, response_format={"type": "json"})
|
111 |
+
#print("This is without stream data ",data)
|
112 |
|
113 |
try:
|
114 |
clean_response = Data_Cleaner(response)
|
|
|
147 |
for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=True, temperature=0.35, response_format={"type": "json"}):
|
148 |
response += message.choices[0].delta.content
|
149 |
|
150 |
+
#data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35, response_format={"type": "json"})
|
151 |
+
#print("This is without stream data ",data)
|
152 |
|
153 |
# Handle cases where the response might have formatting issues
|
154 |
try:
|