import streamlit as st import uuid import sys import requests from peft import * import bitsandbytes as bnb import pandas as pd import torch import torch.nn as nn import transformers from datasets import load_dataset from huggingface_hub import notebook_login from peft import ( LoraConfig, PeftConfig, get_peft_model, prepare_model_for_kbit_training, ) from transformers import ( AutoConfig, AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, ) import pickle USER_ICON = "images/user-icon.png" AI_ICON = "images/ai-icon.png" MAX_HISTORY_LENGTH = 5 if 'user_id' in st.session_state: user_id = st.session_state['user_id'] else: user_id = str(uuid.uuid4()) st.session_state['user_id'] = user_id if 'chat_history' not in st.session_state: st.session_state['chat_history'] = [] if "chats" not in st.session_state: st.session_state.chats = [ { 'id': 0, 'question': '', 'answer': '' } ] if "questions" not in st.session_state: st.session_state.questions = [] if "answers" not in st.session_state: st.session_state.answers = [] if "input" not in st.session_state: st.session_state.input = "" st.markdown(""" """, unsafe_allow_html=True) # Load the model outside the handle_input() function with open('model_saved.pkl', 'rb') as f: model = pickle.load(f) if not hasattr(model, 'predict'): st.error("The loaded model does not have a predict method.") def write_top_bar(): col1, col2, col3 = st.columns([1,10,2]) with col1: st.image(AI_ICON, use_column_width='always') with col2: header = "Cogwise Intelligent Assistant" st.write(f"