Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import json
|
|
|
4 |
|
5 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
6 |
|
@@ -43,8 +44,9 @@ def respond(
|
|
43 |
data.append({"input": messages, "output": response})
|
44 |
|
45 |
# Write the data to a JSON file
|
46 |
-
with open('data.json', 'w') as f:
|
47 |
json.dump(data, f)
|
|
|
48 |
|
49 |
|
50 |
# Add a title to the UI
|
@@ -59,14 +61,14 @@ pre_prompt = gr.Textbox(
|
|
59 |
label="Pre-prompt",
|
60 |
interactive=False,
|
61 |
placeholder="Type here...",
|
|
|
62 |
)
|
63 |
|
64 |
demo = gr.ChatInterface(
|
65 |
respond,
|
66 |
title=title,
|
67 |
description=description,
|
68 |
-
additional_inputs=[
|
69 |
-
pre_prompt,
|
70 |
gr.Slider(minimum=256, maximum=8192, value=512, step=1, label="Max Gen tokens"),
|
71 |
gr.Slider(minimum=0.3, maximum=2.5, value=0.8, step=0.1, label="Creativity"),
|
72 |
gr.Slider(
|
@@ -81,5 +83,9 @@ demo = gr.ChatInterface(
|
|
81 |
|
82 |
|
83 |
if __name__ == "__main__":
|
84 |
-
|
85 |
-
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import json
|
4 |
+
import torch
|
5 |
|
6 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
7 |
|
|
|
44 |
data.append({"input": messages, "output": response})
|
45 |
|
46 |
# Write the data to a JSON file
|
47 |
+
with open('./data.json', 'w') as f:
|
48 |
json.dump(data, f)
|
49 |
+
print("Data successfully saved to data.json")
|
50 |
|
51 |
|
52 |
# Add a title to the UI
|
|
|
61 |
label="Pre-prompt",
|
62 |
interactive=False,
|
63 |
placeholder="Type here...",
|
64 |
+
visible=False,
|
65 |
)
|
66 |
|
67 |
demo = gr.ChatInterface(
|
68 |
respond,
|
69 |
title=title,
|
70 |
description=description,
|
71 |
+
additional_inputs=[pre_prompt,
|
|
|
72 |
gr.Slider(minimum=256, maximum=8192, value=512, step=1, label="Max Gen tokens"),
|
73 |
gr.Slider(minimum=0.3, maximum=2.5, value=0.8, step=0.1, label="Creativity"),
|
74 |
gr.Slider(
|
|
|
83 |
|
84 |
|
85 |
if __name__ == "__main__":
|
86 |
+
# Check if CUDA GPU is installed
|
87 |
+
if torch.cuda.is_available():
|
88 |
+
print("CUDA GPU is installed. Running inference on GPU.")
|
89 |
+
else:
|
90 |
+
print("CUDA GPU is not installed. Running inference on CPU.")
|
91 |
+
demo.launch()
|