noddysnots commited on
Commit
4538963
Β·
verified Β·
1 Parent(s): c435330

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -39
app.py CHANGED
@@ -1,58 +1,35 @@
1
  import gradio as gr
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
  import torch
4
- import requests
5
 
6
- # Load DeepSeek R1 model
7
- model_name = "deepseek-ai/deepseek-moe-8b-chat" # DeepSeek R1 model
8
  tokenizer = AutoTokenizer.from_pretrained(model_name)
9
  model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
10
 
11
- # 🎯 Function to extract interests from user input
12
- def extract_interests(text):
13
- prompt = f"Extract the main interests from this request: '{text}'. Provide only 3-5 relevant words."
14
-
15
- inputs = tokenizer(prompt, return_tensors="pt").to("cuda") # Run on GPU if available
16
- outputs = model.generate(**inputs, max_length=100)
17
- interests = tokenizer.decode(outputs[0], skip_special_tokens=True)
18
-
19
- return interests.split(", ") # Return as a list of keywords
20
-
21
- # 🎁 Web search for gift suggestions
22
- def search_gifts(interests):
23
- query = "+".join(interests)
24
- amazon_url = f"https://www.amazon.in/s?k={query}"
25
- igp_url = f"https://www.igp.com/search?q={query}"
26
- indiamart_url = f"https://dir.indiamart.com/search.mp?ss={query}"
27
-
28
- return {
29
- "Amazon": amazon_url,
30
- "IGP": igp_url,
31
- "IndiaMart": indiamart_url
32
- }
33
-
34
- # 🎯 Main function for gift recommendation
35
  def recommend_gifts(text):
36
  if not text:
37
  return "Please enter a description."
38
 
39
- interests = extract_interests(text) # Use DeepSeek R1
40
- links = search_gifts(interests) # Get shopping links
 
 
 
 
 
41
 
42
- return {
43
- "Predicted Interests": interests,
44
- "Gift Suggestions": links
45
- }
46
 
47
- # 🎨 Gradio UI for easy interaction
48
  demo = gr.Interface(
49
- fn=recommend_gifts,
50
- inputs="text",
51
  outputs="json",
52
- title="🎁 AI Gift Recommender",
53
- description="Enter details about the person you are buying a gift for, and get personalized suggestions with shopping links!",
54
  )
55
 
56
- # πŸš€ Launch Gradio App
57
  if __name__ == "__main__":
58
  demo.launch()
 
1
  import gradio as gr
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
  import torch
 
4
 
5
+ # Load DeepSeek-R1 model and tokenizer
6
+ model_name = "deepseek-ai/DeepSeek-R1"
7
  tokenizer = AutoTokenizer.from_pretrained(model_name)
8
  model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
9
 
10
+ # Function to generate gift recommendations
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  def recommend_gifts(text):
12
  if not text:
13
  return "Please enter a description."
14
 
15
+ # Prepare input prompt for the model
16
+ prompt = f"Based on the following description, suggest suitable gifts: '{text}'"
17
+
18
+ # Tokenize input and generate response
19
+ inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
20
+ outputs = model.generate(inputs.input_ids, max_length=200, do_sample=True)
21
+ recommendation = tokenizer.decode(outputs[0], skip_special_tokens=True)
22
 
23
+ return {"Recommendation": recommendation}
 
 
 
24
 
25
+ # Gradio interface
26
  demo = gr.Interface(
27
+ fn=recommend_gifts,
28
+ inputs="text",
29
  outputs="json",
30
+ title="AI Gift Recommender",
31
+ description="Enter details about the person you are buying a gift for, and get personalized suggestions!",
32
  )
33
 
 
34
  if __name__ == "__main__":
35
  demo.launch()