Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -17,6 +17,7 @@ print(f"CUDA version: {torch.version.cuda}")
|
|
17 |
print(f"Python version: {platform.python_version()}")
|
18 |
print(f"Pytorch version: {torch.__version__}")
|
19 |
print(f"Gradio version: {gr. __version__}")
|
|
|
20 |
|
21 |
|
22 |
"""
|
@@ -70,7 +71,7 @@ def respond(message, history: list[tuple[str, str]], model, system_message, max_
|
|
70 |
print(model)
|
71 |
model_name = choose_model(model)
|
72 |
|
73 |
-
client = InferenceClient(model_name, token=os.getenv('deepseekv2'),
|
74 |
|
75 |
messages = [{"role": "system", "content": system_message}]
|
76 |
|
|
|
17 |
print(f"Python version: {platform.python_version()}")
|
18 |
print(f"Pytorch version: {torch.__version__}")
|
19 |
print(f"Gradio version: {gr. __version__}")
|
20 |
+
print(f"HFhub version: {huggingface_hub.__version__}")
|
21 |
|
22 |
|
23 |
"""
|
|
|
71 |
print(model)
|
72 |
model_name = choose_model(model)
|
73 |
|
74 |
+
client = InferenceClient(model_name, provider="sambanova", token=os.getenv('deepseekv2'), )
|
75 |
|
76 |
messages = [{"role": "system", "content": system_message}]
|
77 |
|