noddysnots commited on
Commit
05478ea
Β·
verified Β·
1 Parent(s): 3a3edda

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -9,7 +9,7 @@ try:
9
  except ImportError:
10
  raise RuntimeError("Missing required dependency: flash_attn. Install with `pip install flash-attn --no-build-isolation`")
11
 
12
- # Load DeepSeek-R1 model with trust_remote_code enabled
13
  model_name = "deepseek-ai/DeepSeek-R1"
14
  tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
15
 
@@ -26,7 +26,7 @@ model = AutoModelForCausalLM.from_pretrained(
26
  # Use a text-generation pipeline
27
  generator = pipeline("text-generation", model=model, tokenizer=tokenizer, device=0 if torch.cuda.is_available() else -1)
28
 
29
- # 🎯 Function to extract interests from user input
30
  def extract_interests(text):
31
  prompt = f"Extract 3-5 relevant interests from this request: '{text}'. Focus on hobbies and product preferences."
32
 
@@ -76,3 +76,5 @@ demo = gr.Interface(
76
  # πŸš€ Launch Gradio App
77
  if __name__ == "__main__":
78
  demo.launch()
 
 
 
9
  except ImportError:
10
  raise RuntimeError("Missing required dependency: flash_attn. Install with `pip install flash-attn --no-build-isolation`")
11
 
12
+ # Load DeepSeek-R1 model
13
  model_name = "deepseek-ai/DeepSeek-R1"
14
  tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
15
 
 
26
  # Use a text-generation pipeline
27
  generator = pipeline("text-generation", model=model, tokenizer=tokenizer, device=0 if torch.cuda.is_available() else -1)
28
 
29
+ # 🎯 Extract interests from user input
30
  def extract_interests(text):
31
  prompt = f"Extract 3-5 relevant interests from this request: '{text}'. Focus on hobbies and product preferences."
32
 
 
76
  # πŸš€ Launch Gradio App
77
  if __name__ == "__main__":
78
  demo.launch()
79
+
80
+