Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -154,7 +154,7 @@ Have a conversation with an AI using your reference voice!
|
|
154 |
chat_model_state = AutoModelForCausalLM.from_pretrained(
|
155 |
model_name, torch_dtype="auto", device_map="auto"
|
156 |
)
|
157 |
-
chat_tokenizer_state = AutoTokenizer.from_pretrained(model_name)
|
158 |
gr.Info("Chat model loaded.")
|
159 |
return gr.update(visible=False), gr.update(visible=True)
|
160 |
|
@@ -162,7 +162,7 @@ Have a conversation with an AI using your reference voice!
|
|
162 |
else:
|
163 |
chat_interface_container = gr.Column()
|
164 |
model_name = "deepseek-ai/DeepSeek-V3"
|
165 |
-
chat_model_state = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype="auto", device_map="auto")
|
166 |
chat_tokenizer_state = AutoTokenizer.from_pretrained(model_name)
|
167 |
|
168 |
with chat_interface_container:
|
|
|
154 |
chat_model_state = AutoModelForCausalLM.from_pretrained(
|
155 |
model_name, torch_dtype="auto", device_map="auto"
|
156 |
)
|
157 |
+
chat_tokenizer_state = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
|
158 |
gr.Info("Chat model loaded.")
|
159 |
return gr.update(visible=False), gr.update(visible=True)
|
160 |
|
|
|
162 |
else:
|
163 |
chat_interface_container = gr.Column()
|
164 |
model_name = "deepseek-ai/DeepSeek-V3"
|
165 |
+
chat_model_state = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype="auto", device_map="auto", trust_remote_code=True)
|
166 |
chat_tokenizer_state = AutoTokenizer.from_pretrained(model_name)
|
167 |
|
168 |
with chat_interface_container:
|