Spaces:
Running
Running
Update main.py
Browse files
main.py
CHANGED
@@ -60,12 +60,12 @@ except Exception as e:
|
|
60 |
raise RuntimeError("Initialization failed. Please check your embeddings or vectorstore setup.")
|
61 |
|
62 |
# ==========================
|
63 |
-
# Prompt Template (Context-Only
|
64 |
# ==========================
|
65 |
prompt_template = """<s>[INST]
|
66 |
-
You are a legal assistant specializing in the Indian Penal Code (IPC).
|
67 |
If the information is not found in the CONTEXT, respond with: "I don't have enough information yet."
|
68 |
-
|
69 |
CONTEXT: {context}
|
70 |
USER QUERY: {question}
|
71 |
RESPONSE:
|
@@ -90,7 +90,7 @@ except Exception as e:
|
|
90 |
raise RuntimeError("Something went wrong with the Together API setup. Please verify your API key.")
|
91 |
|
92 |
# ==========================
|
93 |
-
# Chat Processing Function
|
94 |
# ==========================
|
95 |
def generate_response(user_query: str) -> str:
|
96 |
try:
|
@@ -114,10 +114,10 @@ def generate_response(user_query: str) -> str:
|
|
114 |
logger.debug(f"Payload sent to LLM: {prompt_input}")
|
115 |
|
116 |
# Generate response using the LLM
|
117 |
-
response = llm(prompt.format(**prompt_input))
|
118 |
|
119 |
-
# Check if response is empty
|
120 |
-
if not response
|
121 |
return "I don't have enough information yet."
|
122 |
|
123 |
return response
|
|
|
60 |
raise RuntimeError("Initialization failed. Please check your embeddings or vectorstore setup.")
|
61 |
|
62 |
# ==========================
|
63 |
+
# Prompt Template (Context-Only)
|
64 |
# ==========================
|
65 |
prompt_template = """<s>[INST]
|
66 |
+
You are a legal assistant specializing in the Indian Penal Code (IPC). Provide precise, context-specific responses based solely on the given CONTEXT.
|
67 |
If the information is not found in the CONTEXT, respond with: "I don't have enough information yet."
|
68 |
+
|
69 |
CONTEXT: {context}
|
70 |
USER QUERY: {question}
|
71 |
RESPONSE:
|
|
|
90 |
raise RuntimeError("Something went wrong with the Together API setup. Please verify your API key.")
|
91 |
|
92 |
# ==========================
|
93 |
+
# Chat Processing Function
|
94 |
# ==========================
|
95 |
def generate_response(user_query: str) -> str:
|
96 |
try:
|
|
|
114 |
logger.debug(f"Payload sent to LLM: {prompt_input}")
|
115 |
|
116 |
# Generate response using the LLM
|
117 |
+
response = llm(prompt.format(**prompt_input))
|
118 |
|
119 |
+
# Check if response is empty
|
120 |
+
if not response.strip():
|
121 |
return "I don't have enough information yet."
|
122 |
|
123 |
return response
|