Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import gradio as gr
|
2 |
-
from huggingface_hub import InferenceClient
|
3 |
import json
|
4 |
|
5 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
@@ -47,6 +47,14 @@ def respond(
|
|
47 |
json.dump(data, f)
|
48 |
print("Data successfully saved to data.json")
|
49 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
|
51 |
# Add a title to the UI
|
52 |
title = "<h1 style='text-align: center;'>Corenet</h1>"
|
@@ -68,7 +76,7 @@ demo = gr.ChatInterface(
|
|
68 |
title=title,
|
69 |
description=description,
|
70 |
additional_inputs=[pre_prompt,
|
71 |
-
gr.Slider(minimum=256, maximum=8192, value=512, step=
|
72 |
gr.Slider(minimum=0.3, maximum=2.5, value=0.8, step=0.1, label="Creativity"),
|
73 |
gr.Slider(
|
74 |
minimum=0.1,
|
@@ -82,4 +90,4 @@ demo = gr.ChatInterface(
|
|
82 |
|
83 |
|
84 |
if __name__ == "__main__":
|
85 |
-
demo.launch()
|
|
|
1 |
import gradio as gr
|
2 |
+
from huggingface_hub import InferenceClient, HfApi, HfFolder
|
3 |
import json
|
4 |
|
5 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
|
|
47 |
json.dump(data, f)
|
48 |
print("Data successfully saved to data.json")
|
49 |
|
50 |
+
# Save the file to Hugging Face space
|
51 |
+
hf = HfApi()
|
52 |
+
token = HfFolder.get_token()
|
53 |
+
namespace = "Tech-Meld/HX-Mistral_Alpha"
|
54 |
+
filename = "./data.json"
|
55 |
+
hf.upload_file(filename, namespace, token=token)
|
56 |
+
print("File successfully uploaded to Hugging Face space")
|
57 |
+
|
58 |
|
59 |
# Add a title to the UI
|
60 |
title = "<h1 style='text-align: center;'>Corenet</h1>"
|
|
|
76 |
title=title,
|
77 |
description=description,
|
78 |
additional_inputs=[pre_prompt,
|
79 |
+
gr.Slider(minimum=256, maximum=8192, value=512, step=64, label="Max Gen tokens"),
|
80 |
gr.Slider(minimum=0.3, maximum=2.5, value=0.8, step=0.1, label="Creativity"),
|
81 |
gr.Slider(
|
82 |
minimum=0.1,
|
|
|
90 |
|
91 |
|
92 |
if __name__ == "__main__":
|
93 |
+
demo.launch()
|