michailroussos commited on
Commit
03dc554
·
1 Parent(s): 30b90d5

more changes to support no gpu

Browse files
Files changed (2) hide show
  1. app.py +5 -1
  2. requirements.txt +1 -1
app.py CHANGED
@@ -1,7 +1,11 @@
 
1
  import gradio as gr
2
  from unsloth import FastLanguageModel
3
  import torch
4
 
 
 
 
5
  # Set device to CPU
6
  device = torch.device("cpu")
7
 
@@ -15,7 +19,7 @@ model, tokenizer = FastLanguageModel.from_pretrained(
15
  max_seq_length=max_seq_length,
16
  dtype=dtype,
17
  load_in_4bit=True,
18
- ).to(device) # Make sure the model is on CPU
19
 
20
  # Enable native faster inference if possible
21
  FastLanguageModel.for_inference(model)
 
1
+ import os
2
  import gradio as gr
3
  from unsloth import FastLanguageModel
4
  import torch
5
 
6
+ # Disable CUDA explicitly by setting the environment variable
7
+ os.environ["CUDA_VISIBLE_DEVICES"] = "" # Disabling CUDA
8
+
9
  # Set device to CPU
10
  device = torch.device("cpu")
11
 
 
19
  max_seq_length=max_seq_length,
20
  dtype=dtype,
21
  load_in_4bit=True,
22
+ ).to(device) # Ensure the model is on CPU
23
 
24
  # Enable native faster inference if possible
25
  FastLanguageModel.for_inference(model)
requirements.txt CHANGED
@@ -2,4 +2,4 @@ unsloth==2024.12.4
2
  transformers==4.47.0
3
  gradio==5.8.0
4
  bitsandbytes==0.45.0
5
- torch==2.5.1
 
2
  transformers==4.47.0
3
  gradio==5.8.0
4
  bitsandbytes==0.45.0
5
+ torch==2.1.0+cpu