Muh113 commited on
Commit
37f1f30
·
verified ·
1 Parent(s): 378a935

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -5,7 +5,7 @@ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
5
  # Replace with your Hugging Face model repository path
6
  model_repo_path = 'Muh113/Minecraft_Query_Wizard'
7
 
8
- # Check for GPU availability and set device accordingly
9
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
10
 
11
  # Load the model and tokenizer
@@ -13,7 +13,7 @@ tokenizer = AutoTokenizer.from_pretrained(model_repo_path)
13
  model = AutoModelForSeq2SeqLM.from_pretrained(model_repo_path).to(device)
14
 
15
  # Streamlit app layout
16
- st.title("Minecraft Question Answering App")
17
 
18
  # User input
19
  question_input = st.text_area("Enter a Minecraft-related question", height=150)
 
5
  # Replace with your Hugging Face model repository path
6
  model_repo_path = 'Muh113/Minecraft_Query_Wizard'
7
 
8
+ # Check for GPU availability and set the device accordingly
9
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
10
 
11
  # Load the model and tokenizer
 
13
  model = AutoModelForSeq2SeqLM.from_pretrained(model_repo_path).to(device)
14
 
15
  # Streamlit app layout
16
+ st.title("Minecraft Query Wizard")
17
 
18
  # User input
19
  question_input = st.text_area("Enter a Minecraft-related question", height=150)