File size: 3,128 Bytes
83de32a
c3fc45d
83de32a
 
 
22f120c
 
83de32a
 
 
7676b1c
 
 
83de32a
 
 
 
7676b1c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
83de32a
 
 
 
 
 
7f1cae9
c3fc45d
 
83de32a
c3fc45d
83de32a
 
 
df68234
7676b1c
 
 
 
 
 
 
 
 
83de32a
179dde0
52ed413
 
df68234
52ed413
 
4f4b28e
83de32a
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
import os

import gradio as gr
import openai

#from dotenv import load_dotenv
#load_dotenv()

llm_api_options = ["OpenAI API","Azure OpenAI API","Google PaLM API", "Llama 2"]
TEST_MESSAGE = "My favorite TV shows are The Mentalist, The Blacklist, Designated Survivor, and Unforgettable. What are ten series that I should watch next?"
openai_models = ["gpt-4", "gpt-4-0613", "gpt-4-32k", "gpt-4-32k-0613", "gpt-3.5-turbo", 
                     "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k", "gpt-3.5-turbo-16k-0613", "text-davinci-003", 
                     "text-davinci-002", "text-curie-001", "text-babbage-001", "text-ada-001"]

def test_handler(optionSelection, prompt: str = "Write an introductory paragraph to explain Generative AI to the reader of this content."):        
    match optionSelection:
        case  "OpenAI API":
            try:
                #model = "gpt-35-turbo"
                model = "gpt-4"
                system_prompt: str = "Explain in detail to help student understand the concept.", 
                assistant_prompt: str = None, 
                
                messages = [
                    {"role": "user", "content": f"{prompt}"},
                    {"role": "system", "content": f"{system_prompt}"},
                    {"role": "assistant", "content": f"{assistant_prompt}"}
                ]
                
                openai.api_key = os.getenv("OPENAI_API_KEY")
                openai.api_version = '2020-11-07'
                
                completion = openai.ChatCompletion.create(
                    model = model, 
                    messages = messages,
                    temperature = 0.7
                )           
                response = completion["choices"][0]["message"].content
                return "", response
            except openai.error.ServiceUnavailableError:
        case  "Azure OpenAI API":
            return "", ""
        case  "Google PaLM API":
            return "", ""
        case  "Llama 2":
            return "", ""
        case _:
            if optionSelection not in llm_api_options:
                return ValueError("Invalid choice!"), ""

        

with gr.Blocks() as LLMDemoTabbedScreen:
    with gr.Tab("Text-to-Text (Text Completion)"):
        llm_options = gr.Radio(llm_api_options, label="Select one", info="Which service do you want to use?", value="OpenAI API")
        with gr.Tab("Open AI"):
            openai_model = gr.Dropdown(openai_models, value="gpt-4", label="Model", info="Select one, for Natural language")

        with gr.Row():
            with gr.Column(): 
                test_string = gr.Textbox(label="Try String", value=TEST_MESSAGE, lines=2)
                test_string_response = gr.Textbox(label="Response")
                test_string_output_info = gr.Label(value="Output Info", label="Info")
                test_button = gr.Button("Try it")


    test_button.click(
            fn=test_handler,
            inputs=[llm_options, test_string],
            outputs=[test_string_output_info, test_string_response]
    )
    
if __name__ == "__main__":
    LLMDemoTabbedScreen.launch()