bilgeyucel's picture
Add demo code
09b81f2
raw
history blame
1.22 kB
import gradio as gr
from haystack.nodes import PromptNode
from utils import lemmatizer_func
def run_prompt(prompt, api_key):
prompt_node = PromptNode(model_name_or_path="gpt-3.5-turbo", api_key=api_key)
lemmatized_prompt = lemmatizer_func(prompt)
response_plain = prompt_node(prompt)
response_lemmatized = prompt_node(lemmatized_prompt)
return response_plain[0][0], response_plain[1]["total_tokens"], response_lemmatized[0][0], response_lemmatized[1]["total_tokens"]
with gr.Blocks() as demo:
api_key = gr.Textbox(label="Enter your api key")
prompt = gr.Textbox(label="Prompt", value="Rachel has 17 apples. She gives 9 to Sarah. How many apples does Rachel have now?")
submit_btn = gr.Button("Submit")
token_count_plain = gr.Number(label="Plain Text Token Count")
token_count_lemmatized = gr.Number(label="Lemmatized Text Token Count")
prompt_response = gr.Textbox(label="Answer", show_copy_button=True)
lemmatized_prompt_response = gr.Textbox(label="Lemm Answer", show_copy_button=True)
submit_btn.click(fn=run_prompt, inputs=[prompt, api_key], outputs=[prompt_response, token_count_plain, lemmatized_prompt_response, token_count_lemmatized])
demo.launch()