Garvitj commited on
Commit
9263c0f
·
verified ·
1 Parent(s): 68486a9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -10
app.py CHANGED
@@ -8,25 +8,23 @@ import pytesseract as tess
8
  from sentence_transformers import SentenceTransformer, util
9
  import io
10
 
11
- model_name = "eachadea/vicuna-7b-1.1"
12
 
13
- # Check if CUDA is available, otherwise, fall back to CPU
14
- device = "cuda" if torch.cuda.is_available() else "cpu"
15
- print(f"Using device: {device}")
16
 
17
- # Load the tokenizer
18
- tokenizer = AutoTokenizer.from_pretrained(model_name)
19
-
20
- # Load the model
21
- # If CUDA is available, use float16, otherwise, use float32
22
  model = AutoModelForCausalLM.from_pretrained(
23
- model_name,
24
  torch_dtype=torch.float16 if device == "cuda" else torch.float32,
25
  device_map="auto" if device == "cuda" else None
26
  )
27
 
28
  # Move model to the appropriate device (CPU or CUDA)
29
  model.to(device)
 
 
 
30
  tess.pytesseract.tesseract_cmd = r"/app/tesseract.exe"
31
  # Load a smaller version of Sentence-BERT model
32
  model1 = SentenceTransformer('all-MiniLM-L6-v2')
 
8
  from sentence_transformers import SentenceTransformer, util
9
  import io
10
 
11
+ save_directory="Garvitj/grader"
12
 
13
+ # Load the tokenizer from the saved directory
14
+ tokenizer = AutoTokenizer.from_pretrained(save_directory)
 
15
 
16
+ # Load the model from the saved directory
 
 
 
 
17
  model = AutoModelForCausalLM.from_pretrained(
18
+ save_directory,
19
  torch_dtype=torch.float16 if device == "cuda" else torch.float32,
20
  device_map="auto" if device == "cuda" else None
21
  )
22
 
23
  # Move model to the appropriate device (CPU or CUDA)
24
  model.to(device)
25
+
26
+ print(f"Model and tokenizer loaded from {save_directory}")
27
+
28
  tess.pytesseract.tesseract_cmd = r"/app/tesseract.exe"
29
  # Load a smaller version of Sentence-BERT model
30
  model1 = SentenceTransformer('all-MiniLM-L6-v2')