Update app.py
Browse files
app.py
CHANGED
@@ -10,6 +10,15 @@ subprocess.run('pip install llama-cpp-agent==0.2.10', shell=True)
|
|
10 |
hf_hub_download(repo_id="bartowski/Meta-Llama-3-70B-Instruct-GGUF", filename="Meta-Llama-3-70B-Instruct-Q3_K_M.gguf", local_dir = "./models")
|
11 |
hf_hub_download(repo_id="bartowski/Llama-3-8B-Synthia-v3.5-GGUF", filename="Llama-3-8B-Synthia-v3.5-f16.gguf", local_dir = "./models")
|
12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
@spaces.GPU(duration=120)
|
14 |
def respond(
|
15 |
message,
|
@@ -84,8 +93,10 @@ demo = gr.ChatInterface(
|
|
84 |
theme=gr.themes.Soft(primary_hue="green", secondary_hue="indigo", neutral_hue="zinc",font=[gr.themes.GoogleFont("Exo"), "ui-sans-serif", "system-ui", "sans-serif"]).set(
|
85 |
block_background_fill_dark="*neutral_950",
|
86 |
input_background_fill_dark="*neutral_950",
|
87 |
-
message_border_radius="*radius_md"
|
88 |
-
|
|
|
|
|
89 |
)
|
90 |
|
91 |
if __name__ == "__main__":
|
|
|
10 |
hf_hub_download(repo_id="bartowski/Meta-Llama-3-70B-Instruct-GGUF", filename="Meta-Llama-3-70B-Instruct-Q3_K_M.gguf", local_dir = "./models")
|
11 |
hf_hub_download(repo_id="bartowski/Llama-3-8B-Synthia-v3.5-GGUF", filename="Llama-3-8B-Synthia-v3.5-f16.gguf", local_dir = "./models")
|
12 |
|
13 |
+
css = """
|
14 |
+
.message-row {
|
15 |
+
justify-content: space-evenly;
|
16 |
+
}
|
17 |
+
.message .user .message-bubble-border {
|
18 |
+
border-radius: 6px;
|
19 |
+
}
|
20 |
+
"""
|
21 |
+
|
22 |
@spaces.GPU(duration=120)
|
23 |
def respond(
|
24 |
message,
|
|
|
93 |
theme=gr.themes.Soft(primary_hue="green", secondary_hue="indigo", neutral_hue="zinc",font=[gr.themes.GoogleFont("Exo"), "ui-sans-serif", "system-ui", "sans-serif"]).set(
|
94 |
block_background_fill_dark="*neutral_950",
|
95 |
input_background_fill_dark="*neutral_950",
|
96 |
+
message_border_radius="*radius_md",
|
97 |
+
border-color-accent-subdued="*neutral_900"
|
98 |
+
),
|
99 |
+
css=css
|
100 |
)
|
101 |
|
102 |
if __name__ == "__main__":
|