import streamlit as st from langchain_groq import ChatGroq import yfinance as yf # Initialize the ChatGroq model llm = ChatGroq(model_name="Llama3-8b-8192", api_key="groq_api_key") # Custom CSS for dark blue theme st.markdown( """ """, unsafe_allow_html=True, ) # Initialize chat history in session state if "messages" not in st.session_state: st.session_state.messages = [{"role": "assistant", "content": "Hello! How can I assist you with stock information today?"}] # Display chat messages from history for message in st.session_state.messages: with st.chat_message(message["role"]): st.write(message["content"]) # Accept user input if prompt := st.chat_input("Ask me about stocks..."): # Display user message in chat message container with st.chat_message("user"): st.write(prompt) # Add user message to chat history st.session_state.messages.append({"role": "user", "content": prompt}) # Fetch stock data or generate response based on user input if "invest" in prompt.lower() or "should I invest" in prompt.lower(): company_name = prompt.split()[-1] # Assuming the last word is the ticker symbol or company name stock_data = yf.Ticker(company_name).info response = f"Here is the data for {company_name}:\n" response += f"Current Price: {stock_data.get('currentPrice', 'N/A')}\n" response += f"Market Cap: {stock_data.get('marketCap', 'N/A')}\n" response += f"PE Ratio: {stock_data.get('trailingPE', 'N/A')}\n" response += f"Dividend Yield: {stock_data.get('dividendYield', 'N/A')}\n" # Add more insights or advice logic here if needed else: response = llm.invoke(prompt) # Use the LLM for general questions # Display assistant response in chat message container with st.chat_message("assistant"): st.write(response) # Add assistant response to chat history st.session_state.messages.append({"role": "assistant", "content": response})