Ali-Developments commited on
Commit
fa8a216
·
verified ·
1 Parent(s): e661229

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +123 -34
src/streamlit_app.py CHANGED
@@ -1,40 +1,129 @@
1
- import altair as alt
2
- import numpy as np
3
- import pandas as pd
4
  import streamlit as st
 
 
 
 
 
 
 
 
 
 
 
5
 
6
- """
7
- # Welcome to Streamlit!
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
- Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:.
10
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
11
- forums](https://discuss.streamlit.io).
 
12
 
13
- In the meantime, below is an example of what you can do with just a few lines of code:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
 
16
- num_points = st.slider("Number of points in spiral", 1, 10000, 1100)
17
- num_turns = st.slider("Number of turns in spiral", 1, 300, 31)
18
-
19
- indices = np.linspace(0, 1, num_points)
20
- theta = 2 * np.pi * num_turns * indices
21
- radius = indices
22
-
23
- x = radius * np.cos(theta)
24
- y = radius * np.sin(theta)
25
-
26
- df = pd.DataFrame({
27
- "x": x,
28
- "y": y,
29
- "idx": indices,
30
- "rand": np.random.randn(num_points),
31
- })
32
-
33
- st.altair_chart(alt.Chart(df, height=700, width=700)
34
- .mark_point(filled=True)
35
- .encode(
36
- x=alt.X("x", axis=None),
37
- y=alt.Y("y", axis=None),
38
- color=alt.Color("idx", legend=None, scale=alt.Scale()),
39
- size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])),
40
- ))
 
1
+ # app.py
2
+ import os
 
3
  import streamlit as st
4
+ from dotenv import load_dotenv
5
+ from langchain.docstore.document import Document
6
+ from langchain_community.retrievers import BM25Retriever
7
+ from langchain.tools import Tool
8
+ from langgraph.graph.message import add_messages
9
+ from langgraph.graph import START, StateGraph
10
+ from langgraph.prebuilt import ToolNode, tools_condition
11
+ from langchain_core.messages import AnyMessage, HumanMessage
12
+ from langchain_groq import ChatGroq
13
+ from typing import TypedDict, Annotated
14
+ import fitz # PyMuPDF
15
 
16
+ # Load .env vars
17
+ load_dotenv()
18
+ os.environ["GROQ_API_KEY"] = os.getenv("GROQ_API_KEY")
19
+ groq_api_key = os.getenv("GROQ_API_KEY")
20
+
21
+ # --- PDF uploader and parser ---
22
+ def parse_pdfs(uploaded_files):
23
+ pdf_docs = []
24
+ for uploaded_file in uploaded_files:
25
+ with fitz.open(stream=uploaded_file.read(), filetype="pdf") as doc:
26
+ text = ""
27
+ for page in doc:
28
+ text += page.get_text()
29
+ pdf_docs.append(Document(page_content=text, metadata={"source": uploaded_file.name}))
30
+ return pdf_docs
31
+
32
+ # --- Guest info retrieval ---
33
+ def build_retriever(all_docs):
34
+ return BM25Retriever.from_documents(all_docs)
35
+
36
+ def extract_text(query: str, retriever):
37
+ results = retriever.invoke(query)
38
+ if results:
39
+ return "\n\n".join([doc.page_content for doc in results[:3]])
40
+ else:
41
+ return "لم يتم العثور على معلومات مطابقة في الملفات."
42
+
43
+
44
+ # --- Streamlit UI ---
45
+ st.set_page_config(page_title="NINU Agent", page_icon="🏛️")
46
+ st.title("🏛️ NINU - Guest & PDF Assistant")
47
+
48
+ st.markdown("** Hint:** NINU can help summarize lectures and quiz you step-by-step in simple English.")
49
+
50
+ # Initialize session state to hold conversation history
51
+ if "conversation_history" not in st.session_state:
52
+ st.session_state.conversation_history = []
53
+
54
+
55
+
56
+ # User input area
57
+ query = st.text_area("📝 اكتب سؤالك أو كمل مذاكرتك هنا:")
58
+
59
+ uploaded_files = st.file_uploader("📄 ارفع ملفات PDF للمحاضرات", type=["pdf"], accept_multiple_files=True)
60
 
61
+ if st.button("Ask NINU") and query:
62
+ # 1. Parse PDF
63
+ user_docs = parse_pdfs(uploaded_files) if uploaded_files else []
64
+ bm25_retriever = build_retriever(user_docs)
65
 
66
+ # 2. Create Tool
67
+ NINU_tool = Tool(
68
+ name="NINU_Lec_retriever",
69
+ func=lambda q: extract_text(q, bm25_retriever),
70
+ description="Retrieves content from uploaded PDFs based on a query."
71
+ )
72
+
73
+ # 3. Create LLM with tools
74
+ llm = ChatGroq(model="deepseek-r1-distill-llama-70b", groq_api_key=groq_api_key)
75
+ tools = [NINU_tool]
76
+ llm_with_tools = llm.bind_tools(tools)
77
+
78
+ class AgentState(TypedDict):
79
+ messages: Annotated[list[AnyMessage], add_messages]
80
+
81
+ def assistant(state: AgentState):
82
+ return {
83
+ "messages": [llm_with_tools.invoke(state["messages"])]
84
+ }
85
+
86
+ # 4. Build Agent Graph
87
+ builder = StateGraph(AgentState)
88
+ builder.add_node("assistant", assistant)
89
+ builder.add_node("tools", ToolNode(tools))
90
+ builder.add_edge(START, "assistant")
91
+ builder.add_conditional_edges("assistant", tools_condition)
92
+ builder.add_edge("tools", "assistant")
93
+ NINU = builder.compile()
94
+
95
+ # 5. Prepare full conversation messages
96
+ if len(st.session_state.conversation_history) == 0:
97
+ # Add the custom prompt first
98
+ intro_prompt = """
99
+ I uploaded a lecture PDF. I want you to study it with me step by step.
100
+
101
+ - Summarize the lecture part by part.
102
+ - Explain each part in very simple English like you're teaching a friend.
103
+ - After each part, ask me 2-3 MCQ questions in English.
104
+ - Wait for my answer before moving to the next part.
105
+ - If I answer incorrectly, explain why.
106
+
107
+ Let's begin! 💪
108
  """
109
+ st.session_state.conversation_history.append(HumanMessage(content=intro_prompt))
110
+
111
+ # Add the new user message
112
+ st.session_state.conversation_history.append(HumanMessage(content=query))
113
+
114
+ # 6. Invoke agent with full conversation
115
+ response = NINU.invoke({"messages": st.session_state.conversation_history})
116
+
117
+ # 7. Add assistant response to history
118
+ assistant_reply = response["messages"][-1]
119
+ st.session_state.conversation_history.append(assistant_reply)
120
+
121
+ # 8. Show output
122
+ st.markdown("### NINU's Response:")
123
+ st.write(assistant_reply.content)
124
 
125
+ # 9. Show full conversation history (optional)
126
+ with st.expander("🧾 Show full conversation history"):
127
+ for msg in st.session_state.conversation_history:
128
+ role = " You" if msg.type == "human" else " NINU"
129
+ st.markdown(f"**{role}:** {msg.content}")