research14 commited on
Commit
477b22c
·
1 Parent(s): 9430486

updated path

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -4,8 +4,8 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
4
  vicuna_model = AutoModelForCausalLM.from_pretrained("lmsys/vicuna-7b-v1.3")
5
  vicuna_tokenizer = AutoTokenizer.from_pretrained("lmsys/vicuna-7b-v1.3")
6
 
7
- llama_model = AutoModelForCausalLM.from_pretrained("./llama/hf/7B")
8
- llama_tokenizer = AutoTokenizer.from_pretrained("./llama/hf/7B")
9
 
10
  # Define the function for generating responses
11
  def generate_response(model, tokenizer, prompt):
 
4
  vicuna_model = AutoModelForCausalLM.from_pretrained("lmsys/vicuna-7b-v1.3")
5
  vicuna_tokenizer = AutoTokenizer.from_pretrained("lmsys/vicuna-7b-v1.3")
6
 
7
+ llama_model = AutoModelForCausalLM.from_pretrained("luodian/llama-7b-hf")
8
+ llama_tokenizer = AutoTokenizer.from_pretrained("luodian/llama-7b-hf")
9
 
10
  # Define the function for generating responses
11
  def generate_response(model, tokenizer, prompt):