Shreyas094 commited on
Commit
fdf4790
·
verified ·
1 Parent(s): be60a1d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -337,16 +337,16 @@ def get_response_from_pdf(query, model, num_calls=3, temperature=0.2):
337
  relevant_docs = retriever.get_relevant_documents(query)
338
  context_str = "\n".join([doc.page_content for doc in relevant_docs])
339
 
340
- prompt = f"""Using the following context from the PDF documents:
341
- {context_str}
342
- Write a detailed and complete response that answers the following user question: '{query}'"""
343
-
344
  if model == "@cf/meta/llama-3.1-8b-instruct":
345
  # Use Cloudflare API with the retrieved context
346
- for response in get_response_from_cloudflare(prompt, context_str, query, num_calls, temperature):
347
  yield response
348
  else:
349
  # Use Hugging Face API
 
 
 
 
350
  client = InferenceClient(model, token=huggingface_token)
351
 
352
  response = ""
 
337
  relevant_docs = retriever.get_relevant_documents(query)
338
  context_str = "\n".join([doc.page_content for doc in relevant_docs])
339
 
 
 
 
 
340
  if model == "@cf/meta/llama-3.1-8b-instruct":
341
  # Use Cloudflare API with the retrieved context
342
+ for response in get_response_from_cloudflare(prompt="", context=context_str, query=query, num_calls=num_calls, temperature=temperature):
343
  yield response
344
  else:
345
  # Use Hugging Face API
346
+ prompt = f"""Using the following context from the PDF documents:
347
+ {context_str}
348
+ Write a detailed and complete response that answers the following user question: '{query}'"""
349
+
350
  client = InferenceClient(model, token=huggingface_token)
351
 
352
  response = ""