Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -33,8 +33,8 @@ def download_prompt_templates():
|
|
33 |
choices = choices[:1] + sorted(choices[1:])
|
34 |
return gr.update(value=choices[0], choices=choices)
|
35 |
|
36 |
-
|
37 |
-
|
38 |
|
39 |
def on_prompt_template_change(prompt_template):
|
40 |
if not isinstance(prompt_template, str): return
|
@@ -55,7 +55,7 @@ def submit_message( prompt, prompt_template, temperature, max_tokens, context_le
|
|
55 |
|
56 |
prompt_msg = { "role": "user", "content": prompt }
|
57 |
|
58 |
-
if not
|
59 |
history.append(prompt_msg)
|
60 |
history.append({
|
61 |
"role": "system",
|
@@ -125,7 +125,7 @@ with gr.Blocks(css=css) as demo:
|
|
125 |
btn_clear_conversation = gr.Button("π Start New Conversation")
|
126 |
with gr.Column():
|
127 |
#gr.Markdown("Enter your OpenAI API Key.", elem_id="label")
|
128 |
-
|
129 |
prompt_template = gr.Dropdown(label="Set a custom insruction for the chatbot:", choices=list(prompt_templates.keys()))
|
130 |
prompt_template_preview = gr.Markdown(elem_id="prompt_template_preview", visible=False)
|
131 |
with gr.Accordion("Advanced parameters", open=False, visible=False):
|
@@ -135,13 +135,13 @@ with gr.Blocks(css=css) as demo:
|
|
135 |
|
136 |
print(user_token)
|
137 |
|
138 |
-
btn_submit.click(submit_message, [ input_message, prompt_template, temperature, max_tokens, context_length, state], [input_message, chatbot, total_tokens_str, state])
|
139 |
-
input_message.submit(submit_message, [ input_message, prompt_template, temperature, max_tokens, context_length, state], [input_message, chatbot, total_tokens_str, state])
|
140 |
btn_clear_conversation.click(clear_conversation, [], [input_message, chatbot, total_tokens_str, state])
|
141 |
prompt_template.change(on_prompt_template_change, inputs=[prompt_template], outputs=[prompt_template_preview])
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
demo.load(download_prompt_templates, inputs=None, outputs=[prompt_template], queur=False)
|
146 |
|
147 |
print(user_token)
|
|
|
33 |
choices = choices[:1] + sorted(choices[1:])
|
34 |
return gr.update(value=choices[0], choices=choices)
|
35 |
|
36 |
+
def on_token_change(user_token):
|
37 |
+
openai.api_key = user_token
|
38 |
|
39 |
def on_prompt_template_change(prompt_template):
|
40 |
if not isinstance(prompt_template, str): return
|
|
|
55 |
|
56 |
prompt_msg = { "role": "user", "content": prompt }
|
57 |
|
58 |
+
if not user_token:
|
59 |
history.append(prompt_msg)
|
60 |
history.append({
|
61 |
"role": "system",
|
|
|
125 |
btn_clear_conversation = gr.Button("π Start New Conversation")
|
126 |
with gr.Column():
|
127 |
#gr.Markdown("Enter your OpenAI API Key.", elem_id="label")
|
128 |
+
user_token=gr.Text(value=API)
|
129 |
prompt_template = gr.Dropdown(label="Set a custom insruction for the chatbot:", choices=list(prompt_templates.keys()))
|
130 |
prompt_template_preview = gr.Markdown(elem_id="prompt_template_preview", visible=False)
|
131 |
with gr.Accordion("Advanced parameters", open=False, visible=False):
|
|
|
135 |
|
136 |
print(user_token)
|
137 |
|
138 |
+
btn_submit.click(submit_message, [ user_token,input_message, prompt_template, temperature, max_tokens, context_length, state], [input_message, chatbot, total_tokens_str, state])
|
139 |
+
input_message.submit(submit_message, [user_token, input_message, prompt_template, temperature, max_tokens, context_length, state], [input_message, chatbot, total_tokens_str, state])
|
140 |
btn_clear_conversation.click(clear_conversation, [], [input_message, chatbot, total_tokens_str, state])
|
141 |
prompt_template.change(on_prompt_template_change, inputs=[prompt_template], outputs=[prompt_template_preview])
|
142 |
+
user_token.change(on_token_change, inputs=[user_token], outputs=[])
|
143 |
+
|
144 |
+
|
145 |
demo.load(download_prompt_templates, inputs=None, outputs=[prompt_template], queur=False)
|
146 |
|
147 |
print(user_token)
|