WebashalarForML commited on
Commit
b8149ed
·
verified ·
1 Parent(s): 8ae4b48

Update utils/mistral.py

Browse files
Files changed (1) hide show
  1. utils/mistral.py +6 -6
utils/mistral.py CHANGED
@@ -66,10 +66,10 @@ def Model_ProfessionalDetails_Output(resume, client):
66
  }
67
 
68
  response = ""
69
- for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35):
70
  response += message.choices[0].delta.content
71
 
72
- data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35)
73
  print("This is without stream data ",data.choices[0].message.content)
74
 
75
  try:
@@ -105,9 +105,9 @@ def Model_EducationalDetails_Output(resume, client):
105
  }
106
 
107
  response = ""
108
- for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35):
109
  response += message.choices[0].delta.content
110
- data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35)
111
  print("This is without stream data ",data.choices[0].message.content)
112
 
113
  try:
@@ -144,10 +144,10 @@ def Model_PersonalDetails_Output(resume, client):
144
 
145
  # Response
146
  response = ""
147
- for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=True, temperature=0.35):
148
  response += message.choices[0].delta.content
149
 
150
- data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35)
151
  print("This is without stream data ",data.choices[0].message.content)
152
 
153
  # Handle cases where the response might have formatting issues
 
66
  }
67
 
68
  response = ""
69
+ for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True):#, temperature=0.35):
70
  response += message.choices[0].delta.content
71
 
72
+ data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False)#, temperature=0.35)
73
  print("This is without stream data ",data.choices[0].message.content)
74
 
75
  try:
 
105
  }
106
 
107
  response = ""
108
+ for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True):#, temperature=0.35):
109
  response += message.choices[0].delta.content
110
+ data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False)#, temperature=0.35)
111
  print("This is without stream data ",data.choices[0].message.content)
112
 
113
  try:
 
144
 
145
  # Response
146
  response = ""
147
+ for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=True):#, temperature=0.35):
148
  response += message.choices[0].delta.content
149
 
150
+ data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False)#, temperature=0.35)
151
  print("This is without stream data ",data.choices[0].message.content)
152
 
153
  # Handle cases where the response might have formatting issues