noddysnots commited on
Commit
3f2c2f8
Β·
verified Β·
1 Parent(s): bf97bba

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -26
app.py CHANGED
@@ -3,46 +3,54 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
3
  import torch
4
  import requests
5
 
6
- # Load DeepSeek-R1 model and tokenizer
7
- model_name = "deepseek-ai/DeepSeek-R1"
8
  tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
9
- model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto", trust_remote_code=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
- # 🎁 Web search function to get gift suggestions from various e-commerce sites
12
- def search_gifts(query):
13
- search_query = query.replace(" ", "+")
 
 
14
  return {
15
- "Amazon": f"https://www.amazon.in/s?k={search_query}",
16
- "Flipkart": f"https://www.flipkart.com/search?q={search_query}",
17
- "IGP": f"https://www.igp.com/search?q={search_query}",
18
- "IndiaMart": f"https://dir.indiamart.com/search.mp?ss={search_query}",
19
  }
20
 
21
- # 🎯 Generate gift recommendations using AI
22
  def recommend_gifts(text):
23
  if not text:
24
  return "Please enter a description."
25
 
26
- # Prepare input prompt for the model
27
- prompt = f"Suggest the best gifts for: '{text}'"
28
-
29
- # Generate response using the model
30
- inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
31
- outputs = model.generate(inputs.input_ids, max_length=200, do_sample=True)
32
- recommendation = tokenizer.decode(outputs[0], skip_special_tokens=True)
33
-
34
- # Search for gifts on shopping websites
35
- product_links = search_gifts(recommendation)
36
 
37
  return {
38
- "Predicted Gift": recommendation,
39
- "Gift Suggestions": product_links
40
  }
41
 
42
- # 🎨 Gradio UI for easy interaction
43
  demo = gr.Interface(
44
- fn=recommend_gifts,
45
- inputs="text",
46
  outputs="json",
47
  title="🎁 AI Gift Recommender",
48
  description="Enter details about the person you are buying a gift for, and get personalized suggestions with shopping links!",
 
3
  import torch
4
  import requests
5
 
6
+ # Load DeepSeek R1 model with Accelerate enabled
7
+ model_name = "deepseek-ai/deepseek-moe-8b-chat"
8
  tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
9
+ model = AutoModelForCausalLM.from_pretrained(
10
+ model_name,
11
+ torch_dtype=torch.float16,
12
+ device_map="auto",
13
+ trust_remote_code=True,
14
+ low_cpu_mem_usage=True # Helps reduce memory consumption
15
+ )
16
+
17
+ # 🎯 Extract interests using DeepSeek R1
18
+ def extract_interests(text):
19
+ prompt = f"Extract key interests from this request: '{text}'. Give 3-5 relevant words only."
20
+
21
+ inputs = tokenizer(prompt, return_tensors="pt").to("cuda") # Runs on GPU if available
22
+ outputs = model.generate(**inputs, max_length=100)
23
+ interests = tokenizer.decode(outputs[0], skip_special_tokens=True)
24
 
25
+ return interests.split(", ") # Return as a list of keywords
26
+
27
+ # 🎁 Search for gift suggestions from various e-commerce sites
28
+ def search_gifts(interests):
29
+ query = "+".join(interests)
30
  return {
31
+ "Amazon": f"https://www.amazon.in/s?k={query}",
32
+ "Flipkart": f"https://www.flipkart.com/search?q={query}",
33
+ "IGP": f"https://www.igp.com/search?q={query}",
34
+ "IndiaMart": f"https://dir.indiamart.com/search.mp?ss={query}"
35
  }
36
 
37
+ # 🎯 Main function for generating gift recommendations
38
  def recommend_gifts(text):
39
  if not text:
40
  return "Please enter a description."
41
 
42
+ interests = extract_interests(text) # Extract relevant interests
43
+ links = search_gifts(interests) # Generate shopping links
 
 
 
 
 
 
 
 
44
 
45
  return {
46
+ "Predicted Interests": interests,
47
+ "Gift Suggestions": links
48
  }
49
 
50
+ # 🎨 Gradio Interface
51
  demo = gr.Interface(
52
+ fn=recommend_gifts,
53
+ inputs="text",
54
  outputs="json",
55
  title="🎁 AI Gift Recommender",
56
  description="Enter details about the person you are buying a gift for, and get personalized suggestions with shopping links!",