sahilmayekar commited on
Commit
4032d8e
·
1 Parent(s): b869a26

Code Updated

Browse files
Files changed (1) hide show
  1. app.py +7 -2
app.py CHANGED
@@ -1,6 +1,11 @@
1
  import streamlit as st
2
- from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
3
  from datasets import load_dataset
 
 
 
 
 
4
 
5
  # Load the dataset
6
  ds = load_dataset("Vezora/Open-Critic-GPT")
@@ -9,7 +14,7 @@ st.write("Dataset")
9
 
10
  # Load the model and tokenizer
11
  model_name = "meta-llama/Meta-Llama-3-8B"
12
- model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
13
  tokenizer = AutoTokenizer.from_pretrained(model_name)
14
 
15
  # Function to generate a response from the model
 
1
  import streamlit as st
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
  from datasets import load_dataset
4
+ from huggingface_hub import login
5
+
6
+ # Authenticate to Hugging Face
7
+ hugging_face_token = "your_hugging_face_api_token" # Replace with your actual token
8
+ login(hugging_face_token)
9
 
10
  # Load the dataset
11
  ds = load_dataset("Vezora/Open-Critic-GPT")
 
14
 
15
  # Load the model and tokenizer
16
  model_name = "meta-llama/Meta-Llama-3-8B"
17
+ model = AutoModelForCausalLM.from_pretrained(model_name)
18
  tokenizer = AutoTokenizer.from_pretrained(model_name)
19
 
20
  # Function to generate a response from the model