Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -2,6 +2,7 @@ import gradio as gr
|
|
| 2 |
from huggingface_hub import InferenceClient
|
| 3 |
import spaces
|
| 4 |
import torch
|
|
|
|
| 5 |
print(f"Is CUDA available: {torch.cuda.is_available()}")
|
| 6 |
# True
|
| 7 |
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
|
|
@@ -11,7 +12,7 @@ print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
|
|
| 11 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
| 12 |
"""
|
| 13 |
# client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
| 14 |
-
client = InferenceClient("deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B")
|
| 15 |
|
| 16 |
@spaces.GPU
|
| 17 |
def respond(
|
|
|
|
| 2 |
from huggingface_hub import InferenceClient
|
| 3 |
import spaces
|
| 4 |
import torch
|
| 5 |
+
import os
|
| 6 |
print(f"Is CUDA available: {torch.cuda.is_available()}")
|
| 7 |
# True
|
| 8 |
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
|
|
|
|
| 12 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
| 13 |
"""
|
| 14 |
# client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
| 15 |
+
client = InferenceClient("deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B", token=os.getenv(hftoken))
|
| 16 |
|
| 17 |
@spaces.GPU
|
| 18 |
def respond(
|