Update app.py
Browse files
app.py
CHANGED
@@ -40,7 +40,7 @@ title = "flan-t5-large-grammar-synthesis Llama.cpp"
|
|
40 |
description = """
|
41 |
I'm using [fairydreaming/T5-branch](https://github.com/fairydreaming/llama-cpp-python/tree/t5), I'm not sure current llama-cpp-python server support t5
|
42 |
|
43 |
-
[Model-Q6_K-GGUF](flan-t5-large-grammar-synthesis), [Reference1](https://huggingface.co/spaces/sitammeur/Gemma-llamacpp)
|
44 |
"""
|
45 |
|
46 |
|
@@ -119,7 +119,7 @@ def respond(
|
|
119 |
# Create a chat interface
|
120 |
demo = gr.ChatInterface(
|
121 |
respond,
|
122 |
-
examples=[["What
|
123 |
additional_inputs_accordion=gr.Accordion(
|
124 |
label="⚙️ Parameters", open=False, render=False
|
125 |
),
|
|
|
40 |
description = """
|
41 |
I'm using [fairydreaming/T5-branch](https://github.com/fairydreaming/llama-cpp-python/tree/t5), I'm not sure current llama-cpp-python server support t5
|
42 |
|
43 |
+
[Model-Q6_K-GGUF](https://huggingface.co/pszemraj/flan-t5-large-grammar-synthesis-gguf), [Reference1](https://huggingface.co/spaces/sitammeur/Gemma-llamacpp)
|
44 |
"""
|
45 |
|
46 |
|
|
|
119 |
# Create a chat interface
|
120 |
demo = gr.ChatInterface(
|
121 |
respond,
|
122 |
+
examples=[["What are the capital of France?"], ["What real child was raise by wolves?"], ["What am gravity?"]],
|
123 |
additional_inputs_accordion=gr.Accordion(
|
124 |
label="⚙️ Parameters", open=False, render=False
|
125 |
),
|