Spaces:
Running
on
Zero
Running
on
Zero
0.7 fixing case typo pharia to Pharia
Browse files
app.py
CHANGED
@@ -122,7 +122,7 @@ def generate_both(system_prompt, input_text, chatbot_a, chatbot_b, max_new_token
|
|
122 |
new_messages_a = system_prompt_list + chat_history_a + input_text_list
|
123 |
new_messages_b = system_prompt_list + chat_history_b + input_text_list
|
124 |
|
125 |
-
if "
|
126 |
logging.debug("model a is pharia based, applying own template")
|
127 |
formatted_message_a = apply_chat_template(new_messages_a, add_generation_prompt=True)
|
128 |
input_ids_a = tokenizer_b(formatted_message_a, return_tensors="pt").input_ids.to(model_a.device)
|
@@ -133,8 +133,8 @@ def generate_both(system_prompt, input_text, chatbot_a, chatbot_b, max_new_token
|
|
133 |
return_tensors="pt"
|
134 |
).to(model_a.device)
|
135 |
|
136 |
-
if "
|
137 |
-
logging.debug("model b is
|
138 |
formatted_message_b = apply_chat_template(new_messages_a, add_generation_prompt=True)
|
139 |
input_ids_b = tokenizer_b(formatted_message_b, return_tensors="pt").input_ids.to(model_a.device)
|
140 |
else:
|
|
|
122 |
new_messages_a = system_prompt_list + chat_history_a + input_text_list
|
123 |
new_messages_b = system_prompt_list + chat_history_b + input_text_list
|
124 |
|
125 |
+
if "Pharia" in model_id_a:
|
126 |
logging.debug("model a is pharia based, applying own template")
|
127 |
formatted_message_a = apply_chat_template(new_messages_a, add_generation_prompt=True)
|
128 |
input_ids_a = tokenizer_b(formatted_message_a, return_tensors="pt").input_ids.to(model_a.device)
|
|
|
133 |
return_tensors="pt"
|
134 |
).to(model_a.device)
|
135 |
|
136 |
+
if "Pharia" in model_id_b:
|
137 |
+
logging.debug("model b is Pharia based, applying own template")
|
138 |
formatted_message_b = apply_chat_template(new_messages_a, add_generation_prompt=True)
|
139 |
input_ids_b = tokenizer_b(formatted_message_b, return_tensors="pt").input_ids.to(model_a.device)
|
140 |
else:
|