|
import streamlit as st |
|
import json |
|
from typing import Dict, List, Any |
|
|
|
def format_project_response(project: dict) -> str: |
|
"""Format project details with clear separation""" |
|
response = [f"\n• {project['name']}:"] |
|
response.append(f" Description: {project['description']}") |
|
|
|
if 'skills_used' in project: |
|
response.append(f"\n Technologies Used:") |
|
response.append(f" {', '.join(project['skills_used'])}") |
|
|
|
if 'status' in project: |
|
response.append(f"\n Current Status: {project['status']}") |
|
if 'confidentiality_note' in project: |
|
response.append(f" Note: {project['confidentiality_note']}") |
|
|
|
return '\n'.join(response) + '\n' |
|
|
|
def get_philosophical_response(query: str, knowledge_base: dict) -> str: |
|
"""Handle philosophical or market-related queries""" |
|
query_lower = query.lower() |
|
|
|
|
|
if any(word in query_lower for word in ['market', 'job', 'opportunity', 'down']): |
|
return """I believe success in any market comes down to quality of effort and preparation. While the market may have cycles, I focus on: |
|
|
|
• Continuous Skill Development: |
|
- Building practical projects that solve real problems |
|
- Staying updated with latest ML/AI trends |
|
- Enhancing my technical portfolio |
|
|
|
• Value Proposition: |
|
- Unique combination of business and technical skills |
|
- Focus on practical implementation |
|
- Strong problem-solving approach |
|
|
|
I see this period as an opportunity to strengthen my skills and build more impactful projects.""" |
|
|
|
|
|
if any(word in query_lower for word in ['weather', 'temperature', 'climate']): |
|
return """I'm focused on discussing my ML/AI journey and projects. For weather information, I'd recommend checking local weather services. |
|
|
|
Would you like to know about: |
|
• My ML projects and technical skills? |
|
• My journey from commerce to tech? |
|
• My approach to the current job market?""" |
|
|
|
return None |
|
|
|
def generate_response(query: str, knowledge_base: dict) -> str: |
|
"""Enhanced response generation with better handling of various queries""" |
|
query_lower = query.lower() |
|
|
|
|
|
philosophical_response = get_philosophical_response(query, knowledge_base) |
|
if philosophical_response: |
|
return philosophical_response |
|
|
|
|
|
if any(word in query_lower for word in ['list', 'project', 'portfolio', 'built', 'created', 'developed']): |
|
response_parts = ["Here are my key projects:"] |
|
|
|
|
|
response_parts.append("\nMajor Projects (In Development):") |
|
for project in knowledge_base['projects']['major_projects']: |
|
response_parts.append(format_project_response(project)) |
|
|
|
|
|
response_parts.append("\nCompleted Algorithm Implementation Projects:") |
|
for project in knowledge_base['projects']['algorithm_practice_projects']: |
|
response_parts.append(format_project_response(project)) |
|
|
|
response = '\n'.join(response_parts) |
|
|
|
|
|
if 'online_presence' in knowledge_base.get('personal_details', {}): |
|
response += f"\n\nView my complete portfolio: {knowledge_base['personal_details']['online_presence']['portfolio']}" |
|
|
|
return response |
|
|
|
|
|
|
|
def main(): |
|
st.title("💬 Chat with Manyue's Portfolio") |
|
|
|
|
|
if "messages" not in st.session_state: |
|
st.session_state.messages = [] |
|
if "knowledge_base" not in st.session_state: |
|
try: |
|
with open('manny_knowledge_base.json', 'r', encoding='utf-8') as f: |
|
st.session_state.knowledge_base = json.load(f) |
|
except FileNotFoundError: |
|
st.error("Knowledge base file not found.") |
|
return |
|
|
|
|
|
if "displayed_welcome" not in st.session_state: |
|
st.write(""" |
|
Hi! I'm Manyue's AI assistant. I can tell you about: |
|
- My journey from commerce to ML/AI |
|
- My technical skills and projects |
|
- My fit for ML/AI roles |
|
- You can also paste job descriptions to see how my profile matches! |
|
""") |
|
st.session_state.displayed_welcome = True |
|
|
|
|
|
col1, col2 = st.columns([3, 1]) |
|
|
|
with col1: |
|
|
|
chat_container = st.container() |
|
with chat_container: |
|
for message in st.session_state.messages: |
|
with st.chat_message(message["role"]): |
|
st.markdown(message["content"]) |
|
|
|
|
|
if prompt := st.chat_input("Ask me anything or paste a job description...", key="chat_input"): |
|
|
|
with st.chat_message("user"): |
|
st.markdown(prompt) |
|
st.session_state.messages.append({"role": "user", "content": prompt}) |
|
|
|
|
|
with st.chat_message("assistant"): |
|
try: |
|
response = generate_response(prompt, st.session_state.knowledge_base) |
|
st.markdown(response) |
|
st.session_state.messages.append({"role": "assistant", "content": response}) |
|
except Exception as e: |
|
st.error(f"An error occurred: {str(e)}") |
|
|
|
with col2: |
|
st.subheader("Quick Questions") |
|
example_questions = [ |
|
"Tell me about your ML projects", |
|
"What are your technical skills?", |
|
"What makes you stand out?", |
|
"What's your journey into ML?", |
|
"Your view on the current market?" |
|
] |
|
|
|
|
|
for i, question in enumerate(example_questions): |
|
if st.button(question, key=f"btn_{i}"): |
|
with st.chat_message("user"): |
|
st.markdown(question) |
|
st.session_state.messages.append({"role": "user", "content": question}) |
|
|
|
with st.chat_message("assistant"): |
|
try: |
|
response = generate_response(question, st.session_state.knowledge_base) |
|
st.markdown(response) |
|
st.session_state.messages.append({"role": "assistant", "content": response}) |
|
except Exception as e: |
|
st.error(f"An error occurred: {str(e)}") |
|
st.rerun() |
|
|
|
st.markdown("---") |
|
if st.button("Clear Chat", key="clear_chat"): |
|
st.session_state.messages = [] |
|
st.rerun() |
|
|
|
if __name__ == "__main__": |
|
main() |