Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -3,8 +3,8 @@ import threading
|
|
| 3 |
from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig, TextIteratorStreamer
|
| 4 |
|
| 5 |
# Load the model and tokenizer
|
| 6 |
-
tokenizer = AutoTokenizer.from_pretrained("SmallDoge/Doge-20M-
|
| 7 |
-
model = AutoModelForCausalLM.from_pretrained("SmallDoge/Doge-20M-
|
| 8 |
|
| 9 |
# Generation configuration
|
| 10 |
generation_config = GenerationConfig(
|
|
@@ -73,7 +73,7 @@ def chat(user_input, history):
|
|
| 73 |
|
| 74 |
# Build the Gradio interface
|
| 75 |
with gr.Blocks() as demo:
|
| 76 |
-
gr.Markdown("## Chat with SmallDoge/Doge-20M-
|
| 77 |
chatbot = gr.Chatbot() # displays the conversation as a list of (user, assistant) pairs
|
| 78 |
with gr.Row():
|
| 79 |
msg = gr.Textbox(show_label=False, placeholder="Type your message here...")
|
|
|
|
| 3 |
from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig, TextIteratorStreamer
|
| 4 |
|
| 5 |
# Load the model and tokenizer
|
| 6 |
+
tokenizer = AutoTokenizer.from_pretrained("SmallDoge/Doge-20M-checkpoint")
|
| 7 |
+
model = AutoModelForCausalLM.from_pretrained("SmallDoge/Doge-20M-checkpoint", trust_remote_code=True)
|
| 8 |
|
| 9 |
# Generation configuration
|
| 10 |
generation_config = GenerationConfig(
|
|
|
|
| 73 |
|
| 74 |
# Build the Gradio interface
|
| 75 |
with gr.Blocks() as demo:
|
| 76 |
+
gr.Markdown("## Chat with SmallDoge/Doge-20M-checkpoint")
|
| 77 |
chatbot = gr.Chatbot() # displays the conversation as a list of (user, assistant) pairs
|
| 78 |
with gr.Row():
|
| 79 |
msg = gr.Textbox(show_label=False, placeholder="Type your message here...")
|