AI-Assistant / app.py
Manyue-DataScientist's picture
Update app.py
f2e07fb verified
raw
history blame
6.93 kB
import streamlit as st
import json
from typing import Dict, List, Any
def format_project_response(project: dict) -> str:
"""Format project details with clear separation"""
response = [f"\n• {project['name']}:"]
response.append(f" Description: {project['description']}")
if 'skills_used' in project:
response.append(f"\n Technologies Used:")
response.append(f" {', '.join(project['skills_used'])}")
if 'status' in project:
response.append(f"\n Current Status: {project['status']}")
if 'confidentiality_note' in project:
response.append(f" Note: {project['confidentiality_note']}")
return '\n'.join(response) + '\n'
def get_philosophical_response(query: str, knowledge_base: dict) -> str:
"""Handle philosophical or market-related queries"""
query_lower = query.lower()
# Market-related response
if any(word in query_lower for word in ['market', 'job', 'opportunity', 'down']):
return """I believe success in any market comes down to quality of effort and preparation. While the market may have cycles, I focus on:
• Continuous Skill Development:
- Building practical projects that solve real problems
- Staying updated with latest ML/AI trends
- Enhancing my technical portfolio
• Value Proposition:
- Unique combination of business and technical skills
- Focus on practical implementation
- Strong problem-solving approach
I see this period as an opportunity to strengthen my skills and build more impactful projects."""
# Off-topic response
if any(word in query_lower for word in ['weather', 'temperature', 'climate']):
return """I'm focused on discussing my ML/AI journey and projects. For weather information, I'd recommend checking local weather services.
Would you like to know about:
• My ML projects and technical skills?
• My journey from commerce to tech?
• My approach to the current job market?"""
return None # Return None if not a philosophical query
def generate_response(query: str, knowledge_base: dict) -> str:
"""Enhanced response generation with better handling of various queries"""
query_lower = query.lower()
# First check for philosophical/off-topic queries
philosophical_response = get_philosophical_response(query, knowledge_base)
if philosophical_response:
return philosophical_response
# Handle project listing requests
if any(word in query_lower for word in ['list', 'project', 'portfolio', 'built', 'created', 'developed']):
response_parts = ["Here are my key projects:"]
# Major Projects
response_parts.append("\nMajor Projects (In Development):")
for project in knowledge_base['projects']['major_projects']:
response_parts.append(format_project_response(project))
# Algorithm Projects
response_parts.append("\nCompleted Algorithm Implementation Projects:")
for project in knowledge_base['projects']['algorithm_practice_projects']:
response_parts.append(format_project_response(project))
response = '\n'.join(response_parts)
# Add relevant links
if 'online_presence' in knowledge_base.get('personal_details', {}):
response += f"\n\nView my complete portfolio: {knowledge_base['personal_details']['online_presence']['portfolio']}"
return response
# [Rest of your existing response handlers]
def main():
st.title("💬 Chat with Manyue's Portfolio")
# Initialize session state
if "messages" not in st.session_state:
st.session_state.messages = []
if "knowledge_base" not in st.session_state:
try:
with open('knowledge_base.json', 'r', encoding='utf-8') as f:
st.session_state.knowledge_base = json.load(f)
except FileNotFoundError:
st.error("Knowledge base file not found.")
return
# Display welcome message
if "displayed_welcome" not in st.session_state:
st.write("""
Hi! I'm Manyue's AI assistant. I can tell you about:
- My journey from commerce to ML/AI
- My technical skills and projects
- My fit for ML/AI roles
- You can also paste job descriptions to see how my profile matches!
""")
st.session_state.displayed_welcome = True
# Create two columns with proper sizing
col1, col2 = st.columns([3, 1])
with col1:
# Chat container for better scrolling
chat_container = st.container()
with chat_container:
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Chat input
if prompt := st.chat_input("Ask me anything or paste a job description...", key="chat_input"):
# Add user message
with st.chat_message("user"):
st.markdown(prompt)
st.session_state.messages.append({"role": "user", "content": prompt})
# Generate and display response
with st.chat_message("assistant"):
try:
response = generate_response(prompt, st.session_state.knowledge_base)
st.markdown(response)
st.session_state.messages.append({"role": "assistant", "content": response})
except Exception as e:
st.error(f"An error occurred: {str(e)}")
with col2:
st.subheader("Quick Questions")
example_questions = [
"Tell me about your ML projects",
"What are your technical skills?",
"What makes you stand out?",
"What's your journey into ML?",
"Your view on the current market?"
]
# Handle quick questions with proper keys
for i, question in enumerate(example_questions):
if st.button(question, key=f"btn_{i}"):
with st.chat_message("user"):
st.markdown(question)
st.session_state.messages.append({"role": "user", "content": question})
with st.chat_message("assistant"):
try:
response = generate_response(question, st.session_state.knowledge_base)
st.markdown(response)
st.session_state.messages.append({"role": "assistant", "content": response})
except Exception as e:
st.error(f"An error occurred: {str(e)}")
st.rerun()
st.markdown("---")
if st.button("Clear Chat", key="clear_chat"):
st.session_state.messages = []
st.rerun()
if __name__ == "__main__":
main()