fixed duplication
Browse files
app.py
CHANGED
@@ -9,7 +9,6 @@ from utils.retriever import retrieve_paragraphs
|
|
9 |
from utils.generator import generate
|
10 |
|
11 |
# Sample questions for examples
|
12 |
-
|
13 |
SAMPLE_QUESTIONS = {
|
14 |
"Análisis de la deforestación": [
|
15 |
"¿Cuáles son los principales puntos críticos de deforestación en Ecuador?",
|
@@ -28,8 +27,10 @@ SAMPLE_QUESTIONS = {
|
|
28 |
]
|
29 |
}
|
30 |
|
31 |
-
#
|
|
|
32 |
|
|
|
33 |
def start_chat(query, history):
|
34 |
"""Start a new chat interaction"""
|
35 |
history = history + [(query, None)]
|
@@ -42,6 +43,10 @@ def finish_chat():
|
|
42 |
async def chat_response(query, history, method, country, uploaded_file):
|
43 |
"""Generate chat response based on method and inputs"""
|
44 |
|
|
|
|
|
|
|
|
|
45 |
# Validate inputs
|
46 |
if method == "Subir GeoJson":
|
47 |
if uploaded_file is None:
|
@@ -56,20 +61,28 @@ async def chat_response(query, history, method, country, uploaded_file):
|
|
56 |
yield history, ""
|
57 |
return
|
58 |
|
59 |
-
# Handle GeoJSON upload →
|
60 |
if method == "Subir GeoJson" and uploaded_file:
|
61 |
try:
|
62 |
-
#
|
63 |
-
|
64 |
-
|
65 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
66 |
except Exception as e:
|
67 |
response = f"Error processing file: {str(e)}"
|
68 |
|
69 |
# Handle "Talk to Reports"
|
70 |
else:
|
71 |
try:
|
72 |
-
# response = f"Based on EUDR reports for {country}, I can help you understand deforestation patterns and compliance requirements. Your question: '{query}' is being analyzed..."
|
73 |
retrieved_paragraphs = retrieve_paragraphs(query)
|
74 |
response = await generate(query=query, context=retrieved_paragraphs)
|
75 |
|
@@ -87,26 +100,26 @@ async def chat_response(query, history, method, country, uploaded_file):
|
|
87 |
def auto_analyze_file(file, history):
|
88 |
"""Automatically analyze uploaded GeoJSON file and add results to chat"""
|
89 |
if file is not None:
|
90 |
-
# Add a system message indicating file upload
|
91 |
-
analysis_query = "📄 GeoJSON cargado - analizando..."
|
92 |
-
history = history + [(analysis_query, None)]
|
93 |
-
return history, analysis_query
|
94 |
-
return history, ""
|
95 |
-
|
96 |
-
def auto_display_results(query, history, uploaded_file):
|
97 |
-
"""Display analysis results automatically after file upload"""
|
98 |
-
if uploaded_file is not None and query.startswith("📄 GeoJSON cargado"):
|
99 |
try:
|
100 |
-
#
|
101 |
-
|
102 |
-
|
|
|
|
|
|
|
|
|
103 |
|
104 |
-
#
|
105 |
-
|
|
|
|
|
|
|
|
|
106 |
return history, "**Sources:** WhispAPI Analysis Results"
|
|
|
107 |
except Exception as e:
|
108 |
error_msg = f"❌ Error processing GeoJSON file: {str(e)}"
|
109 |
-
history
|
110 |
return history, ""
|
111 |
|
112 |
return history, ""
|
@@ -384,16 +397,12 @@ with gr.Blocks(title="EUDR Bot", theme=theme, css=custom_css) as demo:
|
|
384 |
outputs=[geojson_section, reports_section, dropdown_country]
|
385 |
)
|
386 |
|
387 |
-
# File upload - automatically analyze and display in chat
|
388 |
uploaded_file.change(
|
389 |
fn=auto_analyze_file,
|
390 |
inputs=[uploaded_file, chatbot],
|
391 |
-
outputs=[chatbot,
|
392 |
queue=False
|
393 |
-
).then(
|
394 |
-
fn=auto_display_results,
|
395 |
-
inputs=[examples_hidden, chatbot, uploaded_file],
|
396 |
-
outputs=[chatbot, sources_textbox]
|
397 |
)
|
398 |
|
399 |
# Chat functionality
|
|
|
9 |
from utils.generator import generate
|
10 |
|
11 |
# Sample questions for examples
|
|
|
12 |
SAMPLE_QUESTIONS = {
|
13 |
"Análisis de la deforestación": [
|
14 |
"¿Cuáles son los principales puntos críticos de deforestación en Ecuador?",
|
|
|
27 |
]
|
28 |
}
|
29 |
|
30 |
+
# Global variable to cache API results and prevent double calls
|
31 |
+
geojson_analysis_cache = {}
|
32 |
|
33 |
+
# Initialize Chat
|
34 |
def start_chat(query, history):
|
35 |
"""Start a new chat interaction"""
|
36 |
history = history + [(query, None)]
|
|
|
43 |
async def chat_response(query, history, method, country, uploaded_file):
|
44 |
"""Generate chat response based on method and inputs"""
|
45 |
|
46 |
+
# Skip processing if this is an auto-generated file analysis message
|
47 |
+
if query.startswith("📄 GeoJSON cargado"):
|
48 |
+
return
|
49 |
+
|
50 |
# Validate inputs
|
51 |
if method == "Subir GeoJson":
|
52 |
if uploaded_file is None:
|
|
|
61 |
yield history, ""
|
62 |
return
|
63 |
|
64 |
+
# Handle GeoJSON upload → use cached results
|
65 |
if method == "Subir GeoJson" and uploaded_file:
|
66 |
try:
|
67 |
+
# Check if we have cached results for this file
|
68 |
+
file_key = f"{uploaded_file.name}_{uploaded_file.size if hasattr(uploaded_file, 'size') else 'unknown'}"
|
69 |
+
|
70 |
+
if file_key in geojson_analysis_cache:
|
71 |
+
# Use cached results
|
72 |
+
response = geojson_analysis_cache[file_key]
|
73 |
+
else:
|
74 |
+
# Call API and cache results
|
75 |
+
stats_result = handle_geojson_upload(uploaded_file)
|
76 |
+
formatted_stats = stats_result[0]
|
77 |
+
geojson_analysis_cache[file_key] = formatted_stats
|
78 |
+
response = formatted_stats
|
79 |
+
|
80 |
except Exception as e:
|
81 |
response = f"Error processing file: {str(e)}"
|
82 |
|
83 |
# Handle "Talk to Reports"
|
84 |
else:
|
85 |
try:
|
|
|
86 |
retrieved_paragraphs = retrieve_paragraphs(query)
|
87 |
response = await generate(query=query, context=retrieved_paragraphs)
|
88 |
|
|
|
100 |
def auto_analyze_file(file, history):
|
101 |
"""Automatically analyze uploaded GeoJSON file and add results to chat"""
|
102 |
if file is not None:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
103 |
try:
|
104 |
+
# Call API immediately and cache results
|
105 |
+
file_key = f"{file.name}_{file.size if hasattr(file, 'size') else 'unknown'}"
|
106 |
+
|
107 |
+
if file_key not in geojson_analysis_cache:
|
108 |
+
stats_result = handle_geojson_upload(file)
|
109 |
+
formatted_stats = stats_result[0]
|
110 |
+
geojson_analysis_cache[file_key] = formatted_stats
|
111 |
|
112 |
+
# Add analysis results directly to chat (no intermediate message)
|
113 |
+
analysis_query = "📄 Análisis del GeoJSON cargado"
|
114 |
+
cached_result = geojson_analysis_cache[file_key]
|
115 |
+
|
116 |
+
# Add both query and response to history
|
117 |
+
history = history + [(analysis_query, cached_result)]
|
118 |
return history, "**Sources:** WhispAPI Analysis Results"
|
119 |
+
|
120 |
except Exception as e:
|
121 |
error_msg = f"❌ Error processing GeoJSON file: {str(e)}"
|
122 |
+
history = history + [("📄 Error en análisis GeoJSON", error_msg)]
|
123 |
return history, ""
|
124 |
|
125 |
return history, ""
|
|
|
397 |
outputs=[geojson_section, reports_section, dropdown_country]
|
398 |
)
|
399 |
|
400 |
+
# File upload - automatically analyze and display in chat (SIMPLIFIED)
|
401 |
uploaded_file.change(
|
402 |
fn=auto_analyze_file,
|
403 |
inputs=[uploaded_file, chatbot],
|
404 |
+
outputs=[chatbot, sources_textbox],
|
405 |
queue=False
|
|
|
|
|
|
|
|
|
406 |
)
|
407 |
|
408 |
# Chat functionality
|