| # my_memory_logic.py | |
| from langchain.memory import ConversationBufferMemory | |
| from langchain.chat_models import ChatOpenAI | |
| from langchain.chains import LLMChain | |
| from langchain.prompts.chat import ChatPromptTemplate, SystemMessagePromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate | |
| memory = ConversationBufferMemory(return_messages=True) | |
| restatement_system_prompt = ( | |
| "Given a chat history and the latest user question " | |
| "which might reference context in the chat history, ..." | |
| ) | |
| restatement_prompt = ChatPromptTemplate.from_messages([ | |
| SystemMessagePromptTemplate.from_template(restatement_system_prompt), | |
| MessagesPlaceholder(variable_name="chat_history"), | |
| HumanMessagePromptTemplate.from_template("{input}") | |
| ]) | |
| restatement_llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0.0) | |
| restatement_chain = LLMChain(llm=restatement_llm, prompt=restatement_prompt) | |