File size: 1,470 Bytes
d064c90
 
 
 
 
 
 
34bee21
 
d064c90
 
 
 
 
 
 
 
 
 
 
 
9ae879b
d064c90
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
from langchain_openai import ChatOpenAI
from langchain.prompts import PromptTemplate
from langchain.chains.question_answering import load_qa_chain
from langchain.chains import LLMChain
from langchain.memory import ConversationBufferMemory
from langchain.chains import RetrievalQA

import os
api_key = os.getenv("OPENAI_API_KEY")

def get_conversational_chain():
    prompt_template = """You are an expert and polite HR.
    In the context, a candidate's resume will be provided to you. Given a question the hiring manager wants to know about the candidate, i want you to give the answer with the most precision. Feel free to answer in sentences or bullet points whatever you find suitable.
    if there is some "\n" imagine things are writting in separate lines. make your move accordingly 
    If the question has no answer present in the resume, 
    feel free to say, "try ansking something else, this information is not available", don't provide the wrong answer no matter what is present in the question\n\n
    Context:\n {context}?\n
    Question: \n{question}\n

    Answer:
    """
    model = ChatOpenAI(temperature=0.7, api_key=api_key)
    memory = ConversationBufferMemory(llm=model, input_key = 'question', memory_key="chat_history")

    prompt = PromptTemplate(template=prompt_template, 
                            input_variables=["context"])
    chain = LLMChain(llm=model, prompt = prompt,
                          memory=memory)

    return chain