import streamlit as st import pandas as pd from transformers import pipeline # Load the anomalies data and convert all cells to strings data = pd.read_csv('anomalies.csv', sep=',', decimal='.') df = pd.DataFrame(data) df.fillna("", inplace=True) print(df.head()) def response(user_question): user_question = user_question.astype(str) if not isinstance(user_question, str): raise TypeError(f"Esperado uma string para a pergunta, mas recebeu {type(user_question)}") # Inicializa o pipeline para table-question-answering tqa = pipeline(task="table-question-answering", model="google/tapas-large-finetuned-wtq") # Obtém a resposta do modelo resposta = tqa(table=df, query=user_question) # Verifica se alguma célula foi retornada if 'cells' not in resposta or len(resposta['cells']) == 0: raise IndexError("Nenhuma célula foi retornada pelo modelo.") # Obtém a primeira célula da resposta final_response = resposta['cells'][0] return final_response # Streamlit interface st.markdown("""
Chatbot do Tesouro RS
""", unsafe_allow_html=True) # Chat history if 'history' not in st.session_state: st.session_state['history'] = [] # Input box for user question user_question = st.text_input("Escreva sua questão aqui:", "") if user_question: # Add person emoji when typing question st.session_state['history'].append(('👤', user_question)) st.markdown(f"**👤 {user_question}**") # Generate the response bot_response = response(user_question) # Add robot emoji when generating response and align to the right st.session_state['history'].append(('🤖', bot_response)) st.markdown(f"
**🤖 {bot_response}**
", unsafe_allow_html=True) # Clear history button if st.button("Limpar"): st.session_state['history'] = [] # Display chat history for sender, message in st.session_state['history']: if sender == '👤': st.markdown(f"**👤 {message}**") elif sender == '🤖': st.markdown(f"
**🤖 {message}**
", unsafe_allow_html=True)