Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -37,7 +37,7 @@ def generate_chat_response_streaming(
|
|
37 |
input_text,
|
38 |
chat_history=None,
|
39 |
instruction_message=translation_instruction,
|
40 |
-
max_new_tokens=
|
41 |
temperature=0.0
|
42 |
):
|
43 |
"""Given some input from the user (and a chat history), generate a response"""
|
@@ -97,11 +97,11 @@ with gr.Blocks() as demo:
|
|
97 |
## Text translation using a "standard" Large Language Model (LLM)
|
98 |
""")
|
99 |
# User interface
|
100 |
-
chatbot = gr.Chatbot(height=
|
101 |
textbox = gr.Textbox(
|
102 |
placeholder="Enter text to translate",
|
103 |
container=False,
|
104 |
-
scale=7, lines=
|
105 |
|
106 |
# Additional inputs
|
107 |
instruction_message = gr.Textbox(
|
|
|
37 |
input_text,
|
38 |
chat_history=None,
|
39 |
instruction_message=translation_instruction,
|
40 |
+
max_new_tokens=1_024,
|
41 |
temperature=0.0
|
42 |
):
|
43 |
"""Given some input from the user (and a chat history), generate a response"""
|
|
|
97 |
## Text translation using a "standard" Large Language Model (LLM)
|
98 |
""")
|
99 |
# User interface
|
100 |
+
chatbot = gr.Chatbot(height=500, render=False)
|
101 |
textbox = gr.Textbox(
|
102 |
placeholder="Enter text to translate",
|
103 |
container=False,
|
104 |
+
scale=7, lines=6, render=False)
|
105 |
|
106 |
# Additional inputs
|
107 |
instruction_message = gr.Textbox(
|