Mishal23 commited on
Commit
8b9d9d4
·
verified ·
1 Parent(s): c99a6c4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -50
app.py CHANGED
@@ -1,8 +1,4 @@
1
- # app.py
2
-
3
  import pandas as pd
4
- from fastapi import FastAPI
5
- from pydantic import BaseModel
6
  from sentence_transformers import SentenceTransformer
7
  import faiss
8
  from datasets import load_dataset
@@ -37,55 +33,17 @@ embeddings = model.encode(questions, convert_to_tensor=True)
37
  index = faiss.IndexFlatL2(embeddings.shape[1])
38
  index.add(embeddings.cpu().numpy())
39
 
40
- # Define FastAPI app and model
41
- app = FastAPI()
42
-
43
- class Query(BaseModel):
44
- question: str
45
-
46
- @app.post("/ask")
47
- def ask_bot(query: Query):
48
- question_embedding = model.encode([query.question], convert_to_tensor=True)
49
  question_embedding_np = question_embedding.cpu().numpy()
50
  _, closest_index = index.search(question_embedding_np, k=1)
51
  best_match_idx = closest_index[0][0]
52
  answer = combined_data.iloc[best_match_idx]['Answer']
53
- return {"answer": answer}
54
-
55
- # Gradio Interface
56
-
57
- import gradio as gr
58
- import requests
59
-
60
- # Define the URL of your FastAPI endpoint
61
- API_URL = "http://localhost:8000/ask" # Update to your deployed FastAPI URL if needed
62
-
63
- def respond(message, history: list[tuple[str, str]]):
64
- payload = {"question": message}
65
-
66
- try:
67
- response = requests.post(API_URL, json=payload)
68
- response.raise_for_status()
69
- response_data = response.json()
70
- answer = response_data.get("answer", "Sorry, I didn't get that.")
71
- except requests.exceptions.RequestException as e:
72
- answer = f"Request Error: {str(e)}"
73
-
74
- # Update history
75
- history.append((message, answer))
76
- return answer, history
77
-
78
- # Gradio Chat Interface
79
- demo = gr.ChatInterface(
80
- respond,
81
- )
82
 
 
83
  if __name__ == "__main__":
84
- import threading
85
- import uvicorn
86
-
87
- # Run FastAPI in a separate thread
88
- threading.Thread(target=uvicorn.run, args=(app,), kwargs={"host": "0.0.0.0", "port": 8000}).start()
89
-
90
- # Launch Gradio interface
91
- demo.launch()
 
 
 
1
  import pandas as pd
 
 
2
  from sentence_transformers import SentenceTransformer
3
  import faiss
4
  from datasets import load_dataset
 
33
  index = faiss.IndexFlatL2(embeddings.shape[1])
34
  index.add(embeddings.cpu().numpy())
35
 
36
+ def get_answer(question):
37
+ question_embedding = model.encode([question], convert_to_tensor=True)
 
 
 
 
 
 
 
38
  question_embedding_np = question_embedding.cpu().numpy()
39
  _, closest_index = index.search(question_embedding_np, k=1)
40
  best_match_idx = closest_index[0][0]
41
  answer = combined_data.iloc[best_match_idx]['Answer']
42
+ return answer if answer else "Answer not found."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
 
44
+ # Example usage
45
  if __name__ == "__main__":
46
+ # Replace this with any input method you want, like a chatbot interface
47
+ user_question = input("Ask your question: ")
48
+ response = get_answer(user_question)
49
+ print("Response:", response)