Joshua Sundance Bailey
commited on
Commit
·
3550ebd
1
Parent(s):
457889e
change variable name in prompt
Browse files
langchain-streamlit-demo/app.py
CHANGED
@@ -442,7 +442,7 @@ if st.session_state.llm:
|
|
442 |
config["max_concurrency"] = 5
|
443 |
# raw_results = st.session_state.doc_chain.batch(
|
444 |
# [
|
445 |
-
# {"
|
446 |
# for doc in st.session_state.texts
|
447 |
# ],
|
448 |
# config,
|
|
|
442 |
config["max_concurrency"] = 5
|
443 |
# raw_results = st.session_state.doc_chain.batch(
|
444 |
# [
|
445 |
+
# {"context": doc.page_content, "prompt": prompt}
|
446 |
# for doc in st.session_state.texts
|
447 |
# ],
|
448 |
# config,
|
langchain-streamlit-demo/qagen.py
CHANGED
@@ -36,7 +36,7 @@ Do not provide additional commentary and do not wrap your response in Markdown f
|
|
36 |
templ2 = """{prompt}
|
37 |
Please create question/answer pairs, in the specified JSON format, for the following text:
|
38 |
----------------
|
39 |
-
{
|
40 |
CHAT_PROMPT = ChatPromptTemplate.from_messages(
|
41 |
[
|
42 |
("system", templ1),
|
|
|
36 |
templ2 = """{prompt}
|
37 |
Please create question/answer pairs, in the specified JSON format, for the following text:
|
38 |
----------------
|
39 |
+
{context}"""
|
40 |
CHAT_PROMPT = ChatPromptTemplate.from_messages(
|
41 |
[
|
42 |
("system", templ1),
|