Shreyas094 commited on
Commit
685135d
·
verified ·
1 Parent(s): eb21d5d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -7
app.py CHANGED
@@ -476,7 +476,7 @@ def get_response_from_llama(query, model, selected_docs, file_type, num_calls=1,
476
 
477
  client = InferenceClient(model, token=huggingface_token)
478
  logging.info("InferenceClient initialized")
479
-
480
  if file_type == "excel":
481
  # Excel functionality
482
  system_instruction = """You are a highly specialized Python programmer with deep expertise in data analysis and visualization using Excel spreadsheets.
@@ -496,7 +496,7 @@ def get_response_from_llama(query, model, selected_docs, file_type, num_calls=1,
496
  {"role": "system", "content": system_instruction},
497
  {"role": "user", "content": f"Based on the following data extracted from Excel spreadsheets:\n{context}\n\nPlease provide the Python code needed to execute the following task: '{query}'. Ensure that the code is derived directly from the dataset. If a chart is requested, use the matplotlib library to generate the appropriate visualization."}
498
  ]
499
-
500
  elif file_type == "pdf":
501
  # PDF functionality
502
  embed = get_embeddings()
@@ -519,15 +519,15 @@ def get_response_from_llama(query, model, selected_docs, file_type, num_calls=1,
519
  else:
520
  raise ValueError("Invalid file type. Use 'excel' or 'pdf'.")
521
 
522
- logging.info(f"Prepared messages: {messages}")
523
-
524
  full_response = ""
525
  for i in range(num_calls):
526
  logging.info(f"Starting API call {i+1}/{num_calls}")
527
  try:
528
- for message in client.chat.completions.create(
529
  messages=messages,
530
- max_tokens=2000,
531
  temperature=temperature,
532
  stream=True,
533
  ):
@@ -858,7 +858,7 @@ demo = gr.ChatInterface(
858
  ],
859
  title="AI-powered PDF Chat and Web Search Assistant",
860
  description="Chat with your PDFs or use web search to answer questions.",
861
- theme=gr.Theme.from_hub("JohnSmith9982/small_and_pretty"),
862
  css=css,
863
  examples=[
864
  ["Tell me about the contents of the uploaded PDFs."],
 
476
 
477
  client = InferenceClient(model, token=huggingface_token)
478
  logging.info("InferenceClient initialized")
479
+
480
  if file_type == "excel":
481
  # Excel functionality
482
  system_instruction = """You are a highly specialized Python programmer with deep expertise in data analysis and visualization using Excel spreadsheets.
 
496
  {"role": "system", "content": system_instruction},
497
  {"role": "user", "content": f"Based on the following data extracted from Excel spreadsheets:\n{context}\n\nPlease provide the Python code needed to execute the following task: '{query}'. Ensure that the code is derived directly from the dataset. If a chart is requested, use the matplotlib library to generate the appropriate visualization."}
498
  ]
499
+
500
  elif file_type == "pdf":
501
  # PDF functionality
502
  embed = get_embeddings()
 
519
  else:
520
  raise ValueError("Invalid file type. Use 'excel' or 'pdf'.")
521
 
522
+ # logging.info(f"Prepared messages: {messages}")
523
+
524
  full_response = ""
525
  for i in range(num_calls):
526
  logging.info(f"Starting API call {i+1}/{num_calls}")
527
  try:
528
+ for message in client.chat.completion(
529
  messages=messages,
530
+ max_tokens=2048,
531
  temperature=temperature,
532
  stream=True,
533
  ):
 
858
  ],
859
  title="AI-powered PDF Chat and Web Search Assistant",
860
  description="Chat with your PDFs or use web search to answer questions.",
861
+ theme=gr.Theme.from_hub("allenai/gradio-theme"),
862
  css=css,
863
  examples=[
864
  ["Tell me about the contents of the uploaded PDFs."],