WebashalarForML commited on
Commit
cecd0ec
·
verified ·
1 Parent(s): 10c3633

Update utils/mistral.py

Browse files
Files changed (1) hide show
  1. utils/mistral.py +11 -9
utils/mistral.py CHANGED
@@ -66,8 +66,11 @@ def Model_ProfessionalDetails_Output(resume, client):
66
  }
67
 
68
  response = ""
69
- for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35, return_full_text=False):
70
  response += message.choices[0].delta.content
 
 
 
71
 
72
  try:
73
  clean_response = Data_Cleaner(response)
@@ -102,8 +105,10 @@ def Model_EducationalDetails_Output(resume, client):
102
  }
103
 
104
  response = ""
105
- for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35, return_full_text=False):
106
  response += message.choices[0].delta.content
 
 
107
 
108
  try:
109
  clean_response = Data_Cleaner(response)
@@ -139,15 +144,12 @@ def Model_PersonalDetails_Output(resume, client):
139
 
140
  # Response
141
  response = ""
142
- for message in client.chat_completion(
143
- messages=[system_role, user_prompt],
144
- max_tokens=3000,
145
- stream=True,
146
- temperature=0.35,
147
- return_full_text=False
148
- ):
149
  response += message.choices[0].delta.content
150
 
 
 
 
151
  # Handle cases where the response might have formatting issues
152
  try:
153
  #print('The Og response:-->',response)
 
66
  }
67
 
68
  response = ""
69
+ for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35, response_format={"type": "json"}):
70
  response += message.choices[0].delta.content
71
+
72
+ data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35, response_format={"type": "json"})
73
+ print("This is without stream data ",data)
74
 
75
  try:
76
  clean_response = Data_Cleaner(response)
 
105
  }
106
 
107
  response = ""
108
+ for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35, response_format={"type": "json"}):
109
  response += message.choices[0].delta.content
110
+ data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35, response_format={"type": "json"})
111
+ print("This is without stream data ",data)
112
 
113
  try:
114
  clean_response = Data_Cleaner(response)
 
144
 
145
  # Response
146
  response = ""
147
+ for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=True, temperature=0.35, response_format={"type": "json"}):
 
 
 
 
 
 
148
  response += message.choices[0].delta.content
149
 
150
+ data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35, response_format={"type": "json"})
151
+ print("This is without stream data ",data)
152
+
153
  # Handle cases where the response might have formatting issues
154
  try:
155
  #print('The Og response:-->',response)