File size: 5,567 Bytes
2c61137
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
import gradio as gr
import pandas as pd
from io import StringIO
import openai
import pytesseract



import os

import numpy as np
import time

# Set up OpenAI API key
from api_key import api_key
openai.api_key = os.environ["CHATGPT_API_KEY"]

Init_system_prompt = "You are an AI Assistant that tries to teach kids various subjects. You are given learning material and you task is to ask questions given the material and then you also grade answers and give feedback how to improve the answers"
system_message = {"role": "system", "content": Init_system_prompt}

import os
from PIL import Image

import pytesseract
#os.system("rm -f path.txt")
path = os.system("which tesseract >> path.txt")
with open("path.txt", 'r') as file:
    tesseract_path = file.read().replace('\n', '')


########### TAB 1 (UPLOAD) FUNCTIONS  #############################

def print_files(files):
    for file in files:
        print(file.__dir__())
        print(file.name)
        print(file.file)


def create_data(files):
    question_context = ''
    for file in files:
        if file.name.endswith('png') or file.name.endswith('.jpg'):
            try:
                question_context += (pytesseract.image_to_string(Image.open(file.name), lang='fin')) + '\n\n'
            except Exception as e:
                print(e)
                pass

    return question_context


########### TAB 2 (CHROMA + PLOT) FUNCTIONS  #############################

########### TAB 3 (CHAT) FUNCTIONS  #############################

def user(user_message, history):
        return "", history + [[user_message, None]]

def bot(history, messages_history, system_prompt, teksti_contexti, temperature, max_tokens, chatgpt_model):
    user_message = history[-1][0]

    bot_message, messages_history = ask_gpt(user_message, messages_history, system_prompt, teksti_contexti, temperature, max_tokens,chatgpt_model)
    messages_history += [{"role": "assistant", "content": bot_message}]
    history[-1][1] = bot_message
    time.sleep(0.2)
    return history, messages_history, str(messages_history)

def ask_gpt(message, messages_history, context, system_prompt, temperature, max_tokens, chatgpt_model):
    if len(messages_history) < 1:
        messages_history = [{"role": "system", "content": system_prompt}]
    messages_history += [{"role": "user", "content": context + '\n Please ask a question about the previous paragramph?'}]
    print(messages_history)
    response = openai.ChatCompletion.create(
        model=chatgpt_model,
        messages=messages_history,
        temperature=temperature,
        max_tokens=max_tokens
    )
    return response['choices'][0]['message']['content'], messages_history

def init_history(messages_history, system_prompt):
    messages_history = []
    messages_history += [{"role": "system", "content": system_prompt}]
    msg_log = gr.Textbox.update(value="Tähän tulee message history")
    system_prompt = gr.Textbox.update(value=system_prompt, label='Insert system message here')
    return messages_history, system_prompt, msg_log


############# INTERFACE ##########################
with gr.Blocks() as demo:
    gr.Markdown("ChatGPT demo with RAG using Chromadb")
    
    
    
    ############# TAB 1 ##########################
    with gr.Tab("Upload documents"):
        with gr.Row():
            files = gr.File(file_count='multiple', file_types=['image'], interactivate = True)
            gr.Markdown("")
            testi = gr.Button()
        
    ############# TAB 2 ##########################
    with gr.Tab("Create questions"):
        
        with gr.Row():
            gr.Markdown("")
        with gr.Row():
            create_context_btn = gr.Button()
            teksti_contexti = gr.Textbox(value='Tähän tulee konteksti', label='Dataframe columns')
    
    ############# TAB 3 ##########################

    with gr.Tab("Chat"):
        gr.Markdown("""<h1><center>ChatGPT 
        ChatBot with Gradio and OpenAI</center></h1>
        """)
        with gr.Row():
            system_prompt = gr.Textbox(value=Init_system_prompt, label='Insert system message here')
            chatgpt_model = gr.Dropdown(choices=["gpt-3.5-turbo", "gpt-3.5-turbo-0301", "gpt-3.5-turbo-0613"], value='gpt-3.5-turbo',label='ChatGPT model to use', interactive=True)
            temperature = gr.Slider(minimum=0.0, maximum=1.0, step=0.05, value=0.0, label='temperature')
            max_tokens = gr.Slider(minimum=10, maximum=600, step=10, value=100, label='Max tokens')
        with gr.Row():
            chatbot = gr.Chatbot(label='ChatGPT Chat')
            state = gr.State([])
        with gr.Row():
            msg = gr.Textbox()
        with gr.Row():
            clear = gr.Button("Clear")
        with gr.Row():
            msg_log = gr.Textbox("Tähän tulee message history", label='Message history')
                    
        
    with gr.Accordion("Klikkaa avataksesi ohjeet"):
        gr.Markdown("Ohjeet tulee tänne")


    # TAB 1 (UPLOAD) Interactive elements:
    testi.click(print_files, [files])

    # TAB 2 (CHROMA + PLOT) Interactive elements:
    create_context_btn.click(create_data, files, teksti_contexti)


    # TAB 3 (CHAT) Interactive elements:
    msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
        bot, [chatbot, state, system_prompt, teksti_contexti, temperature, max_tokens, chatgpt_model], [chatbot, state, msg_log]
    )
    clear.click(lambda: None, None, chatbot, queue=False).success(init_history, [state, system_prompt], [state, system_prompt, msg_log])


demo.launch(debug=True)