Koomemartin commited on
Commit
da0656f
·
verified ·
1 Parent(s): e4ea8a4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -116,22 +116,22 @@ if user_query:
116
  chat_streaming = client.chat.completions.create(
117
  messages=[
118
  {"role": "system", "content": "You are a helpful assistant specializing in extracting and analyzing website content. Provide information required by the user based on the website information provided. Ensure responses are clear, concise, and formatted in Markdown for better readability. use your knowledge to add relevant inforation to the users query"},
119
- {"role": "user", "content": f"{user_query} \n Here's the content to use:\n{website} \n Know respond appropriately"}
120
  ],
121
  model="llama3-groq-70b-8192-tool-use-preview",
122
  temperature=0.9,
123
  max_tokens=2042,
124
- top_p=1,
125
  stream=True,
126
  )
127
  # st.write('Passed model')
128
 
129
  except Exception as e:
130
- st.error(f"Failed to process query: {e}")
131
  response = ""
132
  try:
133
  for chunk in chat_streaming:
134
- content = chunk.choices[0].delta.content
135
  if content: # Ensure content is not None
136
  response += content
137
  st.write("🤖:")
 
116
  chat_streaming = client.chat.completions.create(
117
  messages=[
118
  {"role": "system", "content": "You are a helpful assistant specializing in extracting and analyzing website content. Provide information required by the user based on the website information provided. Ensure responses are clear, concise, and formatted in Markdown for better readability. use your knowledge to add relevant inforation to the users query"},
119
+ {"role": "user", "content": f" {user_query} \n Here's the content to use:\n {website} \n Know respond appropriately"}
120
  ],
121
  model="llama3-groq-70b-8192-tool-use-preview",
122
  temperature=0.9,
123
  max_tokens=2042,
124
+ top_p=0.6,
125
  stream=True,
126
  )
127
  # st.write('Passed model')
128
 
129
  except Exception as e:
130
+ st.error(f"Failed to process query to model: {e}")
131
  response = ""
132
  try:
133
  for chunk in chat_streaming:
134
+ content = chunk.get('choices', [{}])[0].get('delta', {}).get('content', None)
135
  if content: # Ensure content is not None
136
  response += content
137
  st.write("🤖:")