Akjava commited on
Commit
69110e0
Β·
verified Β·
1 Parent(s): 2c05a28

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -31,13 +31,14 @@ def init():
31
  model = AutoModelForCausalLM.from_pretrained(
32
  model_id, token=huggingface_token ,torch_dtype=dtype,device_map=device
33
  )
34
- text_generator = pipeline("text-generation", model=model, tokenizer=tokenizer,torch_dtype=dtype,device_map=device,device=0 ) #pipeline has not to(device)
35
 
36
  if next(model.parameters()).is_cuda:
37
  print("The model is on a GPU")
38
  else:
39
  print("The model is on a CPU")
40
-
 
41
  if text_generator.device == 'cuda':
42
  print("The pipeline is using a GPU")
43
  else:
 
31
  model = AutoModelForCausalLM.from_pretrained(
32
  model_id, token=huggingface_token ,torch_dtype=dtype,device_map=device
33
  )
34
+ text_generator = pipeline("text-generation", model=model, tokenizer=tokenizer,torch_dtype=dtype,device_map=device ) #pipeline has not to(device)
35
 
36
  if next(model.parameters()).is_cuda:
37
  print("The model is on a GPU")
38
  else:
39
  print("The model is on a CPU")
40
+
41
+ print(text_generator.device)
42
  if text_generator.device == 'cuda':
43
  print("The pipeline is using a GPU")
44
  else: