Shreyas094 commited on
Commit
28413b4
·
verified ·
1 Parent(s): caecf96

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -4
app.py CHANGED
@@ -232,10 +232,18 @@ def respond(message, history, model, temperature, num_calls, use_web_search):
232
  logging.info(f"Generated Response (first line): {first_line}")
233
  yield response
234
  else:
235
- for partial_response in get_response_from_pdf(message, model, num_calls=num_calls, temperature=temperature):
236
- first_line = partial_response.split('\n')[0] if partial_response else ''
237
- logging.info(f"Generated Response (first line): {first_line}")
238
- yield partial_response
 
 
 
 
 
 
 
 
239
  except Exception as e:
240
  logging.error(f"Error with {model}: {str(e)}")
241
  if "microsoft/Phi-3-mini-4k-instruct" in model:
@@ -245,6 +253,42 @@ def respond(message, history, model, temperature, num_calls, use_web_search):
245
  else:
246
  yield f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model."
247
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
248
  def get_response_with_search(query, model, num_calls=3, temperature=0.2):
249
  search_results = duckduckgo_search(query)
250
  context = "\n".join(f"{result['title']}\n{result['body']}\nSource: {result['href']}\n"
 
232
  logging.info(f"Generated Response (first line): {first_line}")
233
  yield response
234
  else:
235
+ if model == "@cf/meta/llama-3.1-8b-instruct":
236
+ # Use Cloudflare API
237
+ for partial_response in get_response_from_cloudflare(message, num_calls=num_calls, temperature=temperature):
238
+ first_line = partial_response.split('\n')[0] if partial_response else ''
239
+ logging.info(f"Generated Response (first line): {first_line}")
240
+ yield partial_response
241
+ else:
242
+ # Use Hugging Face API
243
+ for partial_response in get_response_from_pdf(message, model, num_calls=num_calls, temperature=temperature):
244
+ first_line = partial_response.split('\n')[0] if partial_response else ''
245
+ logging.info(f"Generated Response (first line): {first_line}")
246
+ yield partial_response
247
  except Exception as e:
248
  logging.error(f"Error with {model}: {str(e)}")
249
  if "microsoft/Phi-3-mini-4k-instruct" in model:
 
253
  else:
254
  yield f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model."
255
 
256
+ def get_response_from_cloudflare(query, num_calls=3, temperature=0.2):
257
+ ACCOUNT_ID = "your-account-id" # Replace with your actual Cloudflare account ID
258
+ AUTH_TOKEN = os.environ.get("CLOUDFLARE_AUTH_TOKEN")
259
+
260
+ prompt = f"Write a detailed and complete response that answers the following user question: '{query}'"
261
+
262
+ for i in range(num_calls):
263
+ try:
264
+ response = requests.post(
265
+ f"https://api.cloudflare.com/client/v4/accounts/{ACCOUNT_ID}/ai/run/@cf/meta/llama-3.1-8b-instruct",
266
+ headers={"Authorization": f"Bearer {AUTH_TOKEN}"},
267
+ json={
268
+ "stream": True,
269
+ "messages": [
270
+ {"role": "system", "content": "You are a friendly assistant"},
271
+ {"role": "user", "content": prompt}
272
+ ],
273
+ "max_tokens": 1000,
274
+ "temperature": temperature
275
+ },
276
+ stream=True
277
+ )
278
+
279
+ partial_response = ""
280
+ for line in response.iter_lines():
281
+ if line:
282
+ try:
283
+ json_data = json.loads(line.decode('utf-8').split('data: ')[1])
284
+ chunk = json_data['response']
285
+ partial_response += chunk
286
+ yield partial_response
287
+ except json.JSONDecodeError:
288
+ continue
289
+ except Exception as e:
290
+ print(f"Error in generating response from Cloudflare: {str(e)}")
291
+
292
  def get_response_with_search(query, model, num_calls=3, temperature=0.2):
293
  search_results = duckduckgo_search(query)
294
  context = "\n".join(f"{result['title']}\n{result['body']}\nSource: {result['href']}\n"