Update app.py
Browse files
app.py
CHANGED
@@ -141,7 +141,8 @@ def to_query(provider,question):
|
|
141 |
try:
|
142 |
query_agent = LlamaCppAgent(
|
143 |
provider,
|
144 |
-
system_prompt=f"{query_system}",
|
|
|
145 |
predefined_messages_formatter_type=MessagesFormatterType.GEMMA_2,
|
146 |
debug_output=True,
|
147 |
)
|
@@ -157,7 +158,7 @@ Search Query:
|
|
157 |
settings = provider.get_provider_default_settings()
|
158 |
messages = BasicChatHistory()
|
159 |
result = query_agent.get_chat_response(
|
160 |
-
message,
|
161 |
llm_sampling_settings=settings,
|
162 |
chat_history=messages,
|
163 |
returns_streaming_generator=False,
|
@@ -232,7 +233,8 @@ Question: %s
|
|
232 |
# Create the agent
|
233 |
agent = LlamaCppAgent(
|
234 |
provider,
|
235 |
-
system_prompt=f"{retriever_system}",
|
|
|
236 |
predefined_messages_formatter_type=MessagesFormatterType.GEMMA_2,
|
237 |
debug_output=True,
|
238 |
)
|
@@ -257,7 +259,7 @@ Question: %s
|
|
257 |
|
258 |
# Get the response stream
|
259 |
stream = agent.get_chat_response(
|
260 |
-
text,
|
261 |
llm_sampling_settings=settings,
|
262 |
chat_history=messages,
|
263 |
returns_streaming_generator=True,
|
|
|
141 |
try:
|
142 |
query_agent = LlamaCppAgent(
|
143 |
provider,
|
144 |
+
#system_prompt=f"{query_system}",
|
145 |
+
system_prompt="you are kind assistant",
|
146 |
predefined_messages_formatter_type=MessagesFormatterType.GEMMA_2,
|
147 |
debug_output=True,
|
148 |
)
|
|
|
158 |
settings = provider.get_provider_default_settings()
|
159 |
messages = BasicChatHistory()
|
160 |
result = query_agent.get_chat_response(
|
161 |
+
query_system+message,
|
162 |
llm_sampling_settings=settings,
|
163 |
chat_history=messages,
|
164 |
returns_streaming_generator=False,
|
|
|
233 |
# Create the agent
|
234 |
agent = LlamaCppAgent(
|
235 |
provider,
|
236 |
+
#system_prompt=f"{retriever_system}",
|
237 |
+
system_prompt="you are kind assistant",
|
238 |
predefined_messages_formatter_type=MessagesFormatterType.GEMMA_2,
|
239 |
debug_output=True,
|
240 |
)
|
|
|
259 |
|
260 |
# Get the response stream
|
261 |
stream = agent.get_chat_response(
|
262 |
+
retriever_system+text,
|
263 |
llm_sampling_settings=settings,
|
264 |
chat_history=messages,
|
265 |
returns_streaming_generator=True,
|