noddysnots commited on
Commit
df9ff98
Β·
verified Β·
1 Parent(s): f303687

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -19
app.py CHANGED
@@ -3,51 +3,50 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
3
  import torch
4
  import requests
5
 
6
- # Load DeepSeek R1 model with Accelerate enabled
7
- model_name = "deepseek-ai/deepseek-moe-8b-chat"
8
  tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
9
  model = AutoModelForCausalLM.from_pretrained(
10
- model_name,
11
- torch_dtype=torch.float16,
12
- device_map="auto",
13
- trust_remote_code=True,
14
- low_cpu_mem_usage=True # Helps reduce memory consumption
15
  )
16
 
17
- # 🎯 Extract interests using DeepSeek R1
18
  def extract_interests(text):
19
- prompt = f"Extract key interests from this request: '{text}'. Give 3-5 relevant words only."
20
 
21
- inputs = tokenizer(prompt, return_tensors="pt").to("cuda") # Runs on GPU if available
22
  outputs = model.generate(**inputs, max_length=100)
23
  interests = tokenizer.decode(outputs[0], skip_special_tokens=True)
24
 
25
  return interests.split(", ") # Return as a list of keywords
26
 
27
- # 🎁 Search for gift suggestions from various e-commerce sites
28
  def search_gifts(interests):
29
  query = "+".join(interests)
 
 
 
 
30
  return {
31
- "Amazon": f"https://www.amazon.in/s?k={query}",
32
- "Flipkart": f"https://www.flipkart.com/search?q={query}",
33
- "IGP": f"https://www.igp.com/search?q={query}",
34
- "IndiaMart": f"https://dir.indiamart.com/search.mp?ss={query}"
35
  }
36
 
37
- # 🎯 Main function for generating gift recommendations
38
  def recommend_gifts(text):
39
  if not text:
40
  return "Please enter a description."
41
 
42
- interests = extract_interests(text) # Extract relevant interests
43
- links = search_gifts(interests) # Generate shopping links
44
 
45
  return {
46
  "Predicted Interests": interests,
47
  "Gift Suggestions": links
48
  }
49
 
50
- # 🎨 Gradio Interface
51
  demo = gr.Interface(
52
  fn=recommend_gifts,
53
  inputs="text",
 
3
  import torch
4
  import requests
5
 
6
+ # Load DeepSeek R1 model
7
+ model_name = "deepseek-ai/DeepSeek-R1"
8
  tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
9
  model = AutoModelForCausalLM.from_pretrained(
10
+ model_name, torch_dtype=torch.float16, device_map="auto", trust_remote_code=True
 
 
 
 
11
  )
12
 
13
+ # 🎯 Function to extract interests from user input
14
  def extract_interests(text):
15
+ prompt = f"Extract 3-5 relevant gift-related interests from: '{text}'"
16
 
17
+ inputs = tokenizer(prompt, return_tensors="pt").to("cuda") # Use GPU if available
18
  outputs = model.generate(**inputs, max_length=100)
19
  interests = tokenizer.decode(outputs[0], skip_special_tokens=True)
20
 
21
  return interests.split(", ") # Return as a list of keywords
22
 
23
+ # 🎁 Web search for gift suggestions
24
  def search_gifts(interests):
25
  query = "+".join(interests)
26
+ amazon_url = f"https://www.amazon.in/s?k={query}"
27
+ igp_url = f"https://www.igp.com/search?q={query}"
28
+ indiamart_url = f"https://dir.indiamart.com/search.mp?ss={query}"
29
+
30
  return {
31
+ "Amazon": f"[Amazon]({amazon_url})",
32
+ "IGP": f"[IGP]({igp_url})",
33
+ "IndiaMart": f"[IndiaMart]({indiamart_url})"
 
34
  }
35
 
36
+ # 🎯 Main function for gift recommendation
37
  def recommend_gifts(text):
38
  if not text:
39
  return "Please enter a description."
40
 
41
+ interests = extract_interests(text) # Use DeepSeek R1
42
+ links = search_gifts(interests) # Get shopping links
43
 
44
  return {
45
  "Predicted Interests": interests,
46
  "Gift Suggestions": links
47
  }
48
 
49
+ # 🎨 Gradio UI for easy interaction
50
  demo = gr.Interface(
51
  fn=recommend_gifts,
52
  inputs="text",