Update utils.py
Browse files
utils.py
CHANGED
|
@@ -360,7 +360,6 @@ def rag_chain(llm, prompt, retriever):
|
|
| 360 |
extracted_docs = extract_document_info(relevant_docs)
|
| 361 |
|
| 362 |
if (len(extracted_docs)>0):
|
| 363 |
-
print(extracted_docs)
|
| 364 |
#llm_chain = LLMChain(llm = llm, prompt = RAG_CHAIN_PROMPT)
|
| 365 |
#result = llm_chain.run({"context": relevant_docs, "question": prompt})
|
| 366 |
# Erstelle ein PromptTemplate mit Platzhaltern für Kontext und Frage
|
|
@@ -436,8 +435,6 @@ def generate_prompt_with_history(text, history, max_length=4048):
|
|
| 436 |
for x in history[::-1]:
|
| 437 |
history_text = x + history_text
|
| 438 |
flag = True
|
| 439 |
-
print("hist+prompt: ")
|
| 440 |
-
print(history_text)
|
| 441 |
if flag:
|
| 442 |
return prompt+history_text
|
| 443 |
else:
|
|
|
|
| 360 |
extracted_docs = extract_document_info(relevant_docs)
|
| 361 |
|
| 362 |
if (len(extracted_docs)>0):
|
|
|
|
| 363 |
#llm_chain = LLMChain(llm = llm, prompt = RAG_CHAIN_PROMPT)
|
| 364 |
#result = llm_chain.run({"context": relevant_docs, "question": prompt})
|
| 365 |
# Erstelle ein PromptTemplate mit Platzhaltern für Kontext und Frage
|
|
|
|
| 435 |
for x in history[::-1]:
|
| 436 |
history_text = x + history_text
|
| 437 |
flag = True
|
|
|
|
|
|
|
| 438 |
if flag:
|
| 439 |
return prompt+history_text
|
| 440 |
else:
|