File size: 1,617 Bytes
6f5e8e0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e209431
 
6f5e8e0
 
 
 
 
 
 
 
 
 
e209431
 
6f5e8e0
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
from langchain.prompts import ChatPromptTemplate

def make_standalone_question_chain(llm):
    prompt = ChatPromptTemplate.from_messages([
        ("system", """You are a helpful assistant that transforms user questions into standalone questions 
        by incorporating context from the chat history if needed. The output should be a self-contained 
        question that can be understood without any additional context.
        
        Examples:
        Chat History: "Let's talk about renewable energy"
        User Input: "What about solar?"
        Output: "What are the key aspects of solar energy as a renewable energy source?"
        
        Chat History: "What causes global warming?"
        User Input: "And what are its effects?"
        Output: "What are the effects of global warming on the environment and society?"
        """),
        ("user", """Chat History: {chat_history}
        User Question: {question}
        
        Transform this into a standalone question:
        Make sure to keep the original language of the question.""")
    ])
    
    chain = prompt | llm
    return chain

def make_standalone_question_node(llm):
    standalone_chain = make_standalone_question_chain(llm)
    
    def transform_to_standalone(state):
        chat_history = state.get("chat_history", "")
        if chat_history == "":
            return {}
        output = standalone_chain.invoke({
            "chat_history": chat_history,
            "question": state["user_input"]
        })
        state["user_input"] = output.content
        return state
        
    return transform_to_standalone