Spaces:
Running
Running
import gradio as gr | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
import torch | |
import requests | |
# Load DeepSeek R1 model | |
model_name = "deepseek-ai/deepseek-moe-8b-chat" # DeepSeek R1 model | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto") | |
# π― Function to extract interests from user input | |
def extract_interests(text): | |
prompt = f"Extract the main interests from this request: '{text}'. Provide only 3-5 relevant words." | |
inputs = tokenizer(prompt, return_tensors="pt").to("cuda") # Run on GPU if available | |
outputs = model.generate(**inputs, max_length=100) | |
interests = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return interests.split(", ") # Return as a list of keywords | |
# π Web search for gift suggestions | |
def search_gifts(interests): | |
query = "+".join(interests) | |
amazon_url = f"https://www.amazon.in/s?k={query}" | |
igp_url = f"https://www.igp.com/search?q={query}" | |
indiamart_url = f"https://dir.indiamart.com/search.mp?ss={query}" | |
return { | |
"Amazon": amazon_url, | |
"IGP": igp_url, | |
"IndiaMart": indiamart_url | |
} | |
# π― Main function for gift recommendation | |
def recommend_gifts(text): | |
if not text: | |
return "Please enter a description." | |
interests = extract_interests(text) # Use DeepSeek R1 | |
links = search_gifts(interests) # Get shopping links | |
return { | |
"Predicted Interests": interests, | |
"Gift Suggestions": links | |
} | |
# π¨ Gradio UI for easy interaction | |
demo = gr.Interface( | |
fn=recommend_gifts, | |
inputs="text", | |
outputs="json", | |
title="π AI Gift Recommender", | |
description="Enter details about the person you are buying a gift for, and get personalized suggestions with shopping links!", | |
) | |
# π Launch Gradio App | |
if __name__ == "__main__": | |
demo.launch() | |