Shreyas094 commited on
Commit
fac37d6
·
verified ·
1 Parent(s): a1bfede

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -537,7 +537,7 @@ def get_response_from_llama(query, model, selected_docs, file_type, num_calls=1,
537
  # Generate content with streaming enabled
538
  for response in client.chat_completion(
539
  messages=messages, # Pass messages in the required format
540
- max_new_tokens=1000, # Reduced to ensure we stay within token limits
541
  temperature=temperature,
542
  stream=True,
543
  repetition_penalty=1.1,
 
537
  # Generate content with streaming enabled
538
  for response in client.chat_completion(
539
  messages=messages, # Pass messages in the required format
540
+ max_tokens=1000, # Reduced to ensure we stay within token limits
541
  temperature=temperature,
542
  stream=True,
543
  repetition_penalty=1.1,