Update app.py
Browse files
app.py
CHANGED
@@ -47,21 +47,17 @@ retriever = retriever_from_chroma(docs, hf, "mmr", 6)
|
|
47 |
|
48 |
callback_manager = CallbackManager([StreamingStdOutCallbackHandler()])
|
49 |
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
verbose=False,
|
62 |
-
)
|
63 |
-
|
64 |
-
llm = load_llm(model_path)
|
65 |
|
66 |
contextualize_q_system_prompt = """Given a context, chat history and the latest user question
|
67 |
which maybe reference context in the chat history, formulate a standalone question
|
|
|
47 |
|
48 |
callback_manager = CallbackManager([StreamingStdOutCallbackHandler()])
|
49 |
|
50 |
+
llm = llamacpp.LlamaCpp(
|
51 |
+
model_path="/kaggle/working/phi-2-layla-v1-chatml-Q8_0.gguf",
|
52 |
+
n_gpu_layers=1,
|
53 |
+
temperature=0.1,
|
54 |
+
top_p = 0.9,
|
55 |
+
n_ctx=22000,
|
56 |
+
max_tokens=200,
|
57 |
+
repeat_penalty=1.7,
|
58 |
+
callback_manager = callback_manager,
|
59 |
+
verbose=False,
|
60 |
+
)
|
|
|
|
|
|
|
|
|
61 |
|
62 |
contextualize_q_system_prompt = """Given a context, chat history and the latest user question
|
63 |
which maybe reference context in the chat history, formulate a standalone question
|