File size: 3,448 Bytes
155af2c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
08c3cb4
155af2c
 
 
 
 
 
 
0f8aba4
155af2c
0f8aba4
155af2c
 
 
 
 
 
 
 
 
 
0f8aba4
155af2c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0f8aba4
155af2c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
import streamlit as st
import random
import time

from openai import OpenAI
import pandas as pd

import elemeta.nlp.runners.metafeature_extractors_runner as metafeature_extractors_runner

from elemeta.nlp.runners.metafeature_extractors_runner import MetafeatureExtractorsRunner
from elemeta.nlp.extractors.high_level.text_length import TextLength
from elemeta.nlp.extractors.high_level.text_complexity import TextComplexity
from elemeta.nlp.extractors.high_level.word_count import WordCount
from elemeta.nlp.extractors.high_level.detect_language_langdetect import DetectLanguage
from elemeta.nlp.extractors.high_level.sentiment_polarity import SentimentPolarity
from elemeta.nlp.extractors.high_level.toxicity_extractor import ToxicityExtractor
runner = MetafeatureExtractorsRunner(metafeature_extractors=[TextLength(),WordCount(),DetectLanguage()
                                                             ,SentimentPolarity(),TextComplexity(),ToxicityExtractor()])


def ask_gpt(messages,model="gpt-3.5-turbo"):
    ret =  client.chat.completions.create(model=model,
                                          messages=messages
                                          )
    return ret.choices[0].message.content

client = OpenAI()

st.title("Elemeta Chat")

st.header("Chat")
# Initialize chat history
if "messages" not in st.session_state:
    st.session_state.messages = []

# Display chat messages from history on app rerun
for message in st.session_state.messages:
    with st.chat_message(message["role"]):
        st.markdown(message["content"])

# Accept user input
if prompt := st.chat_input("Enter prompt to send to assistant"):
    # Add user message to chat history
    st.session_state.messages.append({"role": "user", "content": prompt})
    # Display user message in chat message container
    with st.chat_message("user"):
        st.markdown(prompt)

    # Display assistant response in chat message container
    with st.chat_message("assistant"):
        message_placeholder = st.empty()
        full_response = ""
        assistant_response = ask_gpt(messages=st.session_state.messages)
        # Simulate stream of response with milliseconds delay
        for chunk in assistant_response.split():
            full_response += chunk + " "
            time.sleep(0.05)
            # Add a blinking cursor to simulate typing
            message_placeholder.markdown(full_response + "▌")
        message_placeholder.markdown(full_response)
    # Add assistant response to chat history
    st.session_state.messages.append({"role": "assistant", "content": full_response})


user_messages = [message["content"] for message in st.session_state.messages if message["role"] == "user"]  
assistant_messages = [message["content"] for message in st.session_state.messages if message["role"] == "assistant"]    
# st.write("User Messages",user_messages)
# st.write("Assistant Messages",assistant_messages)

user_df = pd.DataFrame([runner.run(user_prompt) for user_prompt in user_messages])
user_df["prompt"] = user_messages
user_df.columns = 'user_' + user_df.columns.values
# st.dataframe(user_df)



assistant_df = pd.DataFrame([runner.run(assistant_prompt) for assistant_prompt in assistant_messages])
assistant_df["prompt"] = assistant_messages
assistant_df.columns = 'assistant_' + assistant_df.columns.values
# st.dataframe(assistant_df)


st.subheader("Chat Metafeatures")
st.dataframe(pd.concat([user_df,assistant_df],axis=1))