Spaces:
Runtime error
Runtime error
Mathias Lux
commited on
Commit
·
f0a7153
1
Parent(s):
7b36e69
removed the additional parameters.
Browse files
app.py
CHANGED
@@ -51,10 +51,10 @@ def respond(
|
|
51 |
|
52 |
for message in client.chat_completion(
|
53 |
messages,
|
54 |
-
max_tokens=
|
55 |
stream=True,
|
56 |
-
temperature=
|
57 |
-
top_p=
|
58 |
):
|
59 |
token = message.choices[0].delta.content
|
60 |
|
@@ -69,22 +69,11 @@ with gr.Blocks(title="AI Biographical Interview Assistant") as demo:
|
|
69 |
an in-depth interview about your life experiences. The AI interviewer uses the Socratic method
|
70 |
to ask thoughtful questions and gather information for creating a biographical article.
|
71 |
|
72 |
-
Simply start
|
73 |
""")
|
74 |
|
75 |
chatbot = gr.ChatInterface(
|
76 |
respond,
|
77 |
-
additional_inputs=[
|
78 |
-
gr.Slider(minimum=1, maximum=2048, value=1024, step=1, label="Max new tokens"),
|
79 |
-
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
80 |
-
gr.Slider(
|
81 |
-
minimum=0.1,
|
82 |
-
maximum=1.0,
|
83 |
-
value=0.95,
|
84 |
-
step=0.05,
|
85 |
-
label="Top-p (nucleus sampling)",
|
86 |
-
),
|
87 |
-
],
|
88 |
)
|
89 |
|
90 |
|
|
|
51 |
|
52 |
for message in client.chat_completion(
|
53 |
messages,
|
54 |
+
max_tokens=1024, # Fixed value
|
55 |
stream=True,
|
56 |
+
temperature=0.7, # Fixed value
|
57 |
+
top_p=0.95, # Fixed value
|
58 |
):
|
59 |
token = message.choices[0].delta.content
|
60 |
|
|
|
69 |
an in-depth interview about your life experiences. The AI interviewer uses the Socratic method
|
70 |
to ask thoughtful questions and gather information for creating a biographical article.
|
71 |
|
72 |
+
Simply start with stating your name and a few facts about your early childhood, and the AI will guide you through the interview process. When finished ask for the final article.
|
73 |
""")
|
74 |
|
75 |
chatbot = gr.ChatInterface(
|
76 |
respond,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
77 |
)
|
78 |
|
79 |
|