lucas-w commited on
Commit
65d9e8d
·
1 Parent(s): 8685186

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -1
app.py CHANGED
@@ -12,7 +12,12 @@ if not torch.cuda.is_available():
12
  DEFAULT_SYSTEM_PROMPT += '\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>'
13
 
14
  # Create a pipeline using the Hugging Face Llama-2-7b-chat model
15
- pipe = pipeline("chat", model="huggingface-projects/llama-2-7b-chat", tokenizer="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")
 
 
 
 
 
16
 
17
  # Define a function to interact with the pipeline using Gradio
18
  def llama_2_7b_chatbot(message):
 
12
  DEFAULT_SYSTEM_PROMPT += '\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>'
13
 
14
  # Create a pipeline using the Hugging Face Llama-2-7b-chat model
15
+ pipe = pipeline(
16
+ "chat",
17
+ model="huggingface-projects/llama-2-7b-chat",
18
+ tokenizer="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",
19
+ use_auth_token=True,
20
+ )
21
 
22
  # Define a function to interact with the pipeline using Gradio
23
  def llama_2_7b_chatbot(message):