Upload 2 files
Browse files- app.py +58 -0
- requirements.txt +8 -0
app.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import ast
|
| 4 |
+
import torch
|
| 5 |
+
import numpy as np
|
| 6 |
+
from huggingface_hub import hf_hub_download
|
| 7 |
+
from sentence_transformers import SentenceTransformer, util
|
| 8 |
+
|
| 9 |
+
# πΉ Load model
|
| 10 |
+
model = SentenceTransformer("sentence-transformers/all-MiniLM-L6-v2")
|
| 11 |
+
|
| 12 |
+
# πΉ Download book embeddings from Hugging Face Hub
|
| 13 |
+
repo_id = "AventIQ-AI/all-MiniLM-L6-v2-book-recommendation-system"
|
| 14 |
+
filename = "book_embeddings.csv"
|
| 15 |
+
csv_path = hf_hub_download(repo_id=repo_id, filename=filename)
|
| 16 |
+
|
| 17 |
+
# πΉ Load embeddings
|
| 18 |
+
df_embeddings = pd.read_csv(csv_path)
|
| 19 |
+
df_embeddings["embedding"] = df_embeddings["embedding"].apply(ast.literal_eval)
|
| 20 |
+
book_embeddings = torch.tensor(df_embeddings["embedding"].tolist())
|
| 21 |
+
|
| 22 |
+
# πΉ Function to get book recommendations
|
| 23 |
+
def get_book_recommendations(query, top_k=5):
|
| 24 |
+
query_embedding = model.encode(query, convert_to_tensor=True)
|
| 25 |
+
|
| 26 |
+
similarities = util.pytorch_cos_sim(query_embedding, book_embeddings).squeeze(0)
|
| 27 |
+
top_k_values, top_k_indices = torch.topk(similarities, k=top_k)
|
| 28 |
+
|
| 29 |
+
recommended_titles = df_embeddings.iloc[top_k_indices.cpu().numpy()]["title"].tolist()
|
| 30 |
+
recommended_scores = top_k_values.cpu().numpy().tolist()
|
| 31 |
+
|
| 32 |
+
return [f"π {title} - Score: {score:.4f}" for title, score in zip(recommended_titles, recommended_scores)]
|
| 33 |
+
|
| 34 |
+
# πΉ Define Gradio UI
|
| 35 |
+
with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
| 36 |
+
gr.Markdown("## π AI-Powered Book Recommendation System")
|
| 37 |
+
gr.Markdown("π **Find your next favorite book!** Enter a description or a genre, and the AI will suggest books.")
|
| 38 |
+
|
| 39 |
+
with gr.Row():
|
| 40 |
+
query_input = gr.Textbox(label="Enter Book Description / Genre", placeholder="E.g. A thrilling mystery novel...")
|
| 41 |
+
recommend_button = gr.Button("Get Recommendations π―")
|
| 42 |
+
|
| 43 |
+
output = gr.Textbox(label="Recommended Books", lines=5)
|
| 44 |
+
|
| 45 |
+
examples = [
|
| 46 |
+
["A horror novel with ghosts and dark nights"],
|
| 47 |
+
["A sci-fi adventure with aliens and space travel"],
|
| 48 |
+
["A romance story set in Paris"],
|
| 49 |
+
["A detective novel solving crimes in the city"],
|
| 50 |
+
["An inspiring self-help book for personal growth"]
|
| 51 |
+
]
|
| 52 |
+
|
| 53 |
+
gr.Examples(examples, inputs=[query_input])
|
| 54 |
+
|
| 55 |
+
recommend_button.click(fn=get_book_recommendations, inputs=[query_input], outputs=[output])
|
| 56 |
+
|
| 57 |
+
# πΉ Launch the Gradio app
|
| 58 |
+
demo.launch()
|
requirements.txt
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
torch
|
| 2 |
+
transformers
|
| 3 |
+
gradio
|
| 4 |
+
sentencepiece
|
| 5 |
+
torchvision
|
| 6 |
+
huggingface_hub
|
| 7 |
+
pillow
|
| 8 |
+
numpy
|