Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -28,14 +28,14 @@ def choose_model(model_name):
|
|
28 |
model = "meta-llama/Llama-3.1-8B-Instruct"
|
29 |
|
30 |
client = InferenceClient(model, token=os.getenv('deepseekv2'))
|
31 |
-
return
|
32 |
|
33 |
|
34 |
@spaces.GPU(duration=1)
|
35 |
def respond(message, history: list[tuple[str, str]], model, system_message, max_tokens, temperature, top_p):
|
36 |
|
37 |
print(model)
|
38 |
-
choose_model(model)
|
39 |
messages = [{"role": "system", "content": system_message}]
|
40 |
|
41 |
for val in history:
|
|
|
28 |
model = "meta-llama/Llama-3.1-8B-Instruct"
|
29 |
|
30 |
client = InferenceClient(model, token=os.getenv('deepseekv2'))
|
31 |
+
return client
|
32 |
|
33 |
|
34 |
@spaces.GPU(duration=1)
|
35 |
def respond(message, history: list[tuple[str, str]], model, system_message, max_tokens, temperature, top_p):
|
36 |
|
37 |
print(model)
|
38 |
+
client = choose_model(model)
|
39 |
messages = [{"role": "system", "content": system_message}]
|
40 |
|
41 |
for val in history:
|