Spaces:
Runtime error
Runtime error
File size: 1,517 Bytes
8849fbc fb78408 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
from langchain_community.memory import ConversationBufferMemory
from langchain_community.chat_message_histories import StreamlitChatMessageHistory
from langchain_groq import ChatGroq
from langchain.chains import LLMChain
groq_api_key='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'
llm = ChatGroq(model="gemma2-9b-it",api_key=groq_api_key)
from langchain_core.prompts import PromptTemplate
template = ("""You are a professional Maths tutor answer questions provided by user in step by step manner.
Use the provided context to answer the question.
try to engange with the user and follow up on questions asked
If you don't know the answer, say so. Explain your answer in detail.
Do not discuss the context in your response; just provide the answer directly.
Question: {question}
Answer:""")
rag_prompt = PromptTemplate.from_template(template)
history = StreamlitChatMessageHistory(key="chat_messages")
#Step 3 - here we create a memory object
memory = ConversationBufferMemory(chat_memory=history)
llm_chain = LLMChain(llm=llm, prompt=rag_prompt, memory=memory)
import streamlit as st
st.title('π¦π Welcome to the MathLearn π¦π')
for msg in history.messages:
st.chat_message(msg.type).write(msg.content)
if x := st.chat_input():
st.chat_message("human").write(x)
# As usual, new messages are added to StreamlitChatMessageHistory when the Chain is called.
response = llm_chain.invoke(x)
st.chat_message("ai").write(response["text"])
|