Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -36,15 +36,18 @@ def choose_model(model_name):
|
|
36 |
elif model_name == "Mixtral-8x7B-Instruct":
|
37 |
model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
38 |
|
39 |
-
|
40 |
-
return
|
41 |
|
42 |
|
43 |
@spaces.GPU(duration=1)
|
44 |
def respond(message, history: list[tuple[str, str]], model, system_message, max_tokens, temperature, top_p):
|
45 |
|
46 |
print(model)
|
47 |
-
|
|
|
|
|
|
|
48 |
messages = [{"role": "system", "content": system_message}]
|
49 |
|
50 |
for val in history:
|
|
|
36 |
elif model_name == "Mixtral-8x7B-Instruct":
|
37 |
model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
38 |
|
39 |
+
|
40 |
+
return model
|
41 |
|
42 |
|
43 |
@spaces.GPU(duration=1)
|
44 |
def respond(message, history: list[tuple[str, str]], model, system_message, max_tokens, temperature, top_p):
|
45 |
|
46 |
print(model)
|
47 |
+
model_name = choose_model(model)
|
48 |
+
|
49 |
+
client = InferenceClient(model_name, token=os.getenv('deepseekv2'))
|
50 |
+
|
51 |
messages = [{"role": "system", "content": system_message}]
|
52 |
|
53 |
for val in history:
|