Update app.py
Browse files
app.py
CHANGED
@@ -1,3 +1,292 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
'''Copyright 2024 Ashok Kumar
|
2 |
|
3 |
Licensed under the Apache License, Version 2.0 (the "License");
|
@@ -94,79 +383,35 @@ def fetch_flight_data(lat_min, lat_max, lon_min, lon_max):
|
|
94 |
st.error(f"Unexpected error: {str(e)}")
|
95 |
return {'states': [], 'time': 0}
|
96 |
|
|
|
|
|
|
|
|
|
|
|
|
|
97 |
# Hugging Face model configuration
|
98 |
-
HF_API_URL = "https://api-inference.huggingface.co/models/
|
99 |
-
|
100 |
-
headers = {"Authorization": f"Bearer {HF_TOKEN}"}
|
101 |
|
102 |
def query_llm(prompt):
|
103 |
try:
|
104 |
payload = {
|
105 |
"inputs": prompt,
|
106 |
"parameters": {
|
107 |
-
"
|
108 |
"temperature": 0.1,
|
109 |
"top_p": 0.95,
|
110 |
-
"
|
111 |
}
|
112 |
}
|
113 |
|
114 |
response = requests.post(HF_API_URL, headers=headers, json=payload)
|
115 |
response.raise_for_status()
|
116 |
return response.json()[0]['generated_text']
|
117 |
-
except requests.exceptions.HTTPError as e:
|
118 |
-
if e.response.status_code == 403:
|
119 |
-
st.warning("Language model access is currently restricted. Using direct flight data display instead.")
|
120 |
-
else:
|
121 |
-
st.error(f"Error querying language model: {str(e)}")
|
122 |
-
return None
|
123 |
except Exception as e:
|
124 |
st.error(f"Error querying language model: {str(e)}")
|
125 |
return None
|
126 |
|
127 |
-
def create_flight_embeddings(geo_df):
|
128 |
-
"""Create embeddings for flight data to enable semantic search"""
|
129 |
-
try:
|
130 |
-
from sentence_transformers import SentenceTransformer
|
131 |
-
model = SentenceTransformer('all-MiniLM-L6-v2')
|
132 |
-
|
133 |
-
# Create text representations of flight data
|
134 |
-
flight_texts = []
|
135 |
-
for _, row in geo_df.iterrows():
|
136 |
-
text = f"Flight {row['callsign']} from {row['origin_country']} "
|
137 |
-
text += f"at altitude {row['baro_altitude']}m, speed {row['velocity']}m/s, "
|
138 |
-
text += f"heading {row['true_track']}°"
|
139 |
-
flight_texts.append(text)
|
140 |
-
|
141 |
-
# Generate embeddings
|
142 |
-
embeddings = model.encode(flight_texts)
|
143 |
-
return embeddings, flight_texts
|
144 |
-
except Exception as e:
|
145 |
-
st.warning(f"Could not create embeddings: {str(e)}")
|
146 |
-
return None, None
|
147 |
-
|
148 |
-
def find_similar_flights(identifier, geo_df, embeddings, flight_texts, threshold=0.7):
|
149 |
-
"""Find similar flights using semantic search"""
|
150 |
-
try:
|
151 |
-
from sentence_transformers import SentenceTransformer
|
152 |
-
model = SentenceTransformer('all-MiniLM-L6-v2')
|
153 |
-
|
154 |
-
# Create query embedding
|
155 |
-
query_embedding = model.encode([identifier])
|
156 |
-
|
157 |
-
# Calculate similarities
|
158 |
-
from sklearn.metrics.pairwise import cosine_similarity
|
159 |
-
similarities = cosine_similarity(query_embedding, embeddings)[0]
|
160 |
-
|
161 |
-
# Find similar flights
|
162 |
-
similar_indices = [i for i, sim in enumerate(similarities) if sim > threshold]
|
163 |
-
if similar_indices:
|
164 |
-
return geo_df.iloc[similar_indices]
|
165 |
-
return None
|
166 |
-
except Exception as e:
|
167 |
-
st.warning(f"Error in semantic search: {str(e)}")
|
168 |
-
return None
|
169 |
-
|
170 |
def query_flight_data(geo_df, question):
|
171 |
# Preprocess the question to extract key information
|
172 |
question = question.lower().strip()
|
@@ -198,61 +443,17 @@ def query_flight_data(geo_df, question):
|
|
198 |
if not identifier:
|
199 |
return "Please specify a flight identifier (callsign or ICAO code) in your question."
|
200 |
|
201 |
-
#
|
202 |
-
identifier = identifier.strip().upper()
|
203 |
-
|
204 |
-
# Try to find the flight by callsign or icao (case-insensitive)
|
205 |
flight_data = None
|
|
|
|
|
|
|
|
|
206 |
|
207 |
-
# First try exact match
|
208 |
-
if identifier in geo_df['callsign'].str.upper().values:
|
209 |
-
flight_data = geo_df[geo_df['callsign'].str.upper() == identifier]
|
210 |
-
elif identifier in geo_df['icao24'].str.upper().values:
|
211 |
-
flight_data = geo_df[geo_df['icao24'].str.upper() == identifier]
|
212 |
-
|
213 |
-
# If no exact match, try partial match
|
214 |
if flight_data is None or flight_data.empty:
|
215 |
-
|
216 |
-
clean_identifier = ''.join(filter(str.isalnum, identifier))
|
217 |
-
if not geo_df['callsign'].empty:
|
218 |
-
clean_callsigns = geo_df['callsign'].fillna('').apply(lambda x: ''.join(filter(str.isalnum, str(x).upper())))
|
219 |
-
matches = clean_callsigns == clean_identifier
|
220 |
-
if matches.any():
|
221 |
-
flight_data = geo_df[matches]
|
222 |
|
223 |
-
#
|
224 |
-
if flight_data is None or flight_data.empty:
|
225 |
-
try:
|
226 |
-
from difflib import get_close_matches
|
227 |
-
all_callsigns = geo_df['callsign'].fillna('').str.upper().unique()
|
228 |
-
close_matches = get_close_matches(identifier, all_callsigns, n=1, cutoff=0.8)
|
229 |
-
if close_matches:
|
230 |
-
flight_data = geo_df[geo_df['callsign'].str.upper() == close_matches[0]]
|
231 |
-
except:
|
232 |
-
pass
|
233 |
-
|
234 |
-
# If still no match, try semantic search using RAG
|
235 |
-
if flight_data is None or flight_data.empty:
|
236 |
-
try:
|
237 |
-
# Create embeddings for all flights
|
238 |
-
embeddings, flight_texts = create_flight_embeddings(geo_df)
|
239 |
-
if embeddings is not None:
|
240 |
-
# Try to find similar flights
|
241 |
-
similar_flights = find_similar_flights(identifier, geo_df, embeddings, flight_texts)
|
242 |
-
if similar_flights is not None and not similar_flights.empty:
|
243 |
-
flight_data = similar_flights
|
244 |
-
st.info(f"Found similar flight(s) to {identifier}")
|
245 |
-
except Exception as e:
|
246 |
-
st.warning(f"Semantic search failed: {str(e)}")
|
247 |
-
|
248 |
-
if flight_data is None or flight_data.empty:
|
249 |
-
# If still no match, show available flights
|
250 |
-
available_flights = geo_df['callsign'].dropna().unique()
|
251 |
-
if len(available_flights) > 0:
|
252 |
-
return f"Could not find flight {identifier}. Available flights: {', '.join(available_flights[:10])}..."
|
253 |
-
return f"Could not find flight {identifier}. No flights currently available in the selected area."
|
254 |
-
|
255 |
-
# Prepare flight data for display
|
256 |
flight_info = {}
|
257 |
for col in flight_data.columns:
|
258 |
if col in flight_data.columns:
|
@@ -270,75 +471,33 @@ def query_flight_data(geo_df, question):
|
|
270 |
flight_info[col] = f"{value}° N"
|
271 |
elif col == 'longitude':
|
272 |
flight_info[col] = f"{value}° E"
|
273 |
-
|
274 |
flight_info[col] = str(value)
|
275 |
|
276 |
if not flight_info:
|
277 |
return f"No information available for flight {identifier}."
|
278 |
|
279 |
-
#
|
280 |
-
|
281 |
-
|
282 |
-
|
283 |
|
284 |
-
|
285 |
{json.dumps(flight_info, indent=2)}
|
286 |
|
287 |
-
|
288 |
-
|
289 |
-
llm_response = query_llm(prompt)
|
290 |
-
if llm_response:
|
291 |
-
return llm_response
|
292 |
-
except:
|
293 |
-
pass
|
294 |
-
|
295 |
-
# Fallback to direct data display
|
296 |
-
response = f"Flight Information for {identifier}:\n"
|
297 |
-
for key, value in flight_info.items():
|
298 |
-
response += f"- {key.replace('_', ' ').title()}: {value}\n"
|
299 |
-
return response
|
300 |
|
301 |
-
|
302 |
-
|
303 |
-
# Get cached flight data
|
304 |
-
json_dict = fetch_flight_data(lat_min, lat_max, lon_min, lon_max)
|
305 |
|
306 |
-
if
|
307 |
-
|
308 |
-
|
309 |
-
|
310 |
-
|
311 |
-
|
312 |
-
|
313 |
-
|
314 |
-
|
315 |
-
# Optimize DataFrame creation
|
316 |
-
state_df = pd.DataFrame(json_dict["states"], columns=columns)
|
317 |
-
state_df['time'] = local_time
|
318 |
-
|
319 |
-
# Create GeoDataFrame more efficiently
|
320 |
-
gdf = gpd.GeoDataFrame(
|
321 |
-
state_df,
|
322 |
-
geometry=gpd.points_from_xy(state_df.longitude, state_df.latitude),
|
323 |
-
crs="EPSG:4326"
|
324 |
-
)
|
325 |
-
|
326 |
-
# Display information
|
327 |
-
st.title("Live Flight Tracker")
|
328 |
-
st.subheader('Flight Details', divider='rainbow')
|
329 |
-
st.write('Location: {0}'.format(loc))
|
330 |
-
st.write('Current Local Time: {0}-{1}:'.format(local_time, local_time_zone))
|
331 |
-
st.write("Minimum_latitude is {0} and Maximum_latitude is {1}".format(lat_min, lat_max))
|
332 |
-
st.write("Minimum_longitude is {0} and Maximum_longitude is {1}".format(lon_min, lon_max))
|
333 |
-
st.write('Number of Visible Flights: {}'.format(len(json_dict['states'])))
|
334 |
-
st.write('Plotting the flight: {}'.format(flight_info))
|
335 |
-
st.subheader('Map Visualization', divider='rainbow')
|
336 |
-
st.write('****Click ":orange[Update Map]" Button to Refresh the Map****')
|
337 |
-
return gdf
|
338 |
-
|
339 |
-
except Exception as e:
|
340 |
-
st.error(f"Error processing flight data: {str(e)}")
|
341 |
-
return None
|
342 |
|
343 |
def flight_tracking(flight_view_level, country, local_time_zone, flight_info, airport, color):
|
344 |
# Get cached location data
|
@@ -367,10 +526,51 @@ def flight_tracking(flight_view_level, country, local_time_zone, flight_info, ai
|
|
367 |
(airport_country_loc['Latitude'] >= lat_min) &
|
368 |
(airport_country_loc['Latitude'] <= lat_max) &
|
369 |
(airport_country_loc['Longitude'] >= lon_min) &
|
370 |
-
|
371 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
372 |
# Get traffic data
|
373 |
-
geo_df = get_traffic_gdf(
|
374 |
if geo_df is None:
|
375 |
return
|
376 |
|
|
|
1 |
+
# '''Copyright 2024 Ashok Kumar
|
2 |
+
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.'''
|
14 |
+
|
15 |
+
# import os
|
16 |
+
# import requests
|
17 |
+
# import json
|
18 |
+
# import pandas as pd
|
19 |
+
# import numpy as np
|
20 |
+
# import requests
|
21 |
+
# import geopandas as gpd
|
22 |
+
# import contextily as ctx
|
23 |
+
# import tzlocal
|
24 |
+
# import pytz
|
25 |
+
# from PIL import Image
|
26 |
+
# from datetime import datetime
|
27 |
+
# import matplotlib.pyplot as plt
|
28 |
+
# from geopy.exc import GeocoderTimedOut
|
29 |
+
# from geopy.geocoders import Nominatim
|
30 |
+
# import warnings
|
31 |
+
# warnings.filterwarnings('ignore')
|
32 |
+
# from plotly.graph_objs import Marker
|
33 |
+
# import plotly.express as px
|
34 |
+
# import streamlit as st
|
35 |
+
# from data import flight_data
|
36 |
+
# from huggingface_hub import InferenceApi, login, InferenceClient
|
37 |
+
|
38 |
+
|
39 |
+
# hf_token = os.getenv("HF_TOKEN")
|
40 |
+
# if hf_token is None:
|
41 |
+
# raise ValueError("Hugging Face token not found. Please set the HF_TOKEN environment variable.")
|
42 |
+
# login(hf_token)
|
43 |
+
|
44 |
+
|
45 |
+
# API_URL = "https://api-inference.huggingface.co/models/google/tapas-base-finetuned-wtq"
|
46 |
+
# headers = {"Authorization": f"Bearer {hf_token}"}
|
47 |
+
|
48 |
+
# def query(payload):
|
49 |
+
# response = requests.post(API_URL, headers=headers, json=payload)
|
50 |
+
# return response.json()
|
51 |
+
|
52 |
+
# def query_flight_data(geo_df, question):
|
53 |
+
|
54 |
+
|
55 |
+
# table_data = {
|
56 |
+
# "icao24": geo_df["icao24"].astype(str).iloc[:100].tolist(),
|
57 |
+
# "callsign": geo_df["callsign"].astype(str).replace({np.nan: None, np.inf: '0'}).iloc[:100].tolist(),
|
58 |
+
# "origin_country": geo_df["origin_country"].astype(str).replace({np.nan: None, np.inf: '0'}).iloc[:100].tolist(),
|
59 |
+
# "time_position": geo_df["time_position"].astype(str).replace({np.nan: '0', np.inf: '0'}).iloc[:100].tolist(),
|
60 |
+
# "last_contact": geo_df["last_contact"].astype(str).replace({np.nan: '0', np.inf: '0'}).iloc[:100].tolist(),
|
61 |
+
# "longitude": geo_df["longitude"].astype(str).replace({np.nan: '0', np.inf: '0'}).iloc[:100].tolist(),
|
62 |
+
# "latitude": geo_df["latitude"].astype(str).replace({np.nan: '0', np.inf: '0'}).iloc[:100].tolist(),
|
63 |
+
# "baro_altitude": geo_df["baro_altitude"].astype(str).replace({np.nan: '0', np.inf: '0'}).iloc[:100].tolist(),
|
64 |
+
# "on_ground": geo_df["on_ground"].astype(str).iloc[:100].tolist(), # Assuming on_ground is boolean or categorical
|
65 |
+
# "velocity": geo_df["velocity"].astype(str).replace({np.nan: '0', np.inf: '0'}).iloc[:100].tolist(),
|
66 |
+
# "true_track": geo_df["true_track"].astype(str).replace({np.nan: '0', np.inf: '0'}).iloc[:100].tolist(),
|
67 |
+
# "vertical_rate": geo_df["vertical_rate"].astype(str).replace({np.nan: '0', np.inf: '0'}).iloc[:100].tolist(),
|
68 |
+
# "sensors": geo_df["sensors"].astype(str).replace({np.nan: None, np.inf: '0'}).iloc[:100].tolist(), # Assuming sensors can be None
|
69 |
+
# "geo_altitude": geo_df["geo_altitude"].astype(str).replace({np.nan: '0', np.inf: '0'}).iloc[:100].tolist(),
|
70 |
+
# "squawk": geo_df["squawk"].astype(str).replace({np.nan: None, np.inf: '0'}).iloc[:100].tolist(), # Assuming squawk can be None
|
71 |
+
# "spi": geo_df["spi"].astype(str).iloc[:100].tolist(), # Assuming spi is boolean or categorical
|
72 |
+
# "position_source": geo_df["position_source"].astype(str).iloc[:100].tolist(), # Assuming position_source is categorical
|
73 |
+
# "time": geo_df["time"].astype(str).replace({np.nan: '0', np.inf: '0'}).iloc[:100].tolist(),
|
74 |
+
# "geometry": geo_df["geometry"].astype(str).replace({np.nan: None, np.inf: '0'}).iloc[:100].tolist() # Assuming geometry can be None
|
75 |
+
# }
|
76 |
+
|
77 |
+
|
78 |
+
# # Construct the payload
|
79 |
+
# payload = {
|
80 |
+
# "inputs": {
|
81 |
+
# "query": question,
|
82 |
+
# "table": table_data,
|
83 |
+
# }
|
84 |
+
# }
|
85 |
+
|
86 |
+
# # Get the model response
|
87 |
+
# response = query(payload)
|
88 |
+
|
89 |
+
# # Check if 'answer' is in response and return it as a sentence
|
90 |
+
# if 'answer' in response:
|
91 |
+
# answer = response['answer']
|
92 |
+
# return f"The answer to your question '{question}': :orange[{answer}]"
|
93 |
+
# else:
|
94 |
+
# return "The model could not find an answer to your question."
|
95 |
+
|
96 |
+
|
97 |
+
# def flight_tracking(flight_view_level, country, local_time_zone, flight_info, airport, color):
|
98 |
+
# geolocator = Nominatim(user_agent="flight_tracker")
|
99 |
+
# loc = geolocator.geocode(country)
|
100 |
+
# loc_box = loc[1]
|
101 |
+
# extend_left =+12*flight_view_level
|
102 |
+
# extend_right =+10*flight_view_level
|
103 |
+
# extend_top =+10*flight_view_level
|
104 |
+
# extend_bottom =+ 18*flight_view_level
|
105 |
+
# lat_min, lat_max = (loc_box[0] - extend_left), loc_box[0]+extend_right
|
106 |
+
# lon_min, lon_max = (loc_box[1] - extend_bottom), loc_box[1]+extend_top
|
107 |
+
|
108 |
+
# tile_zoom = 8 # zoom of the map loaded by contextily
|
109 |
+
# figsize = (15, 15)
|
110 |
+
# columns = ["icao24","callsign","origin_country","time_position","last_contact","longitude","latitude",
|
111 |
+
# "baro_altitude","on_ground","velocity","true_track","vertical_rate","sensors","geo_altitude",
|
112 |
+
# "squawk","spi","position_source",]
|
113 |
+
# data_url = "https://raw.githubusercontent.com/ashok2216-A/ashok_airport-data/main/data/airports.dat"
|
114 |
+
# column_names = ["Airport ID", "Name", "City", "Country", "IATA/FAA", "ICAO", "Latitude", "Longitude",
|
115 |
+
# "Altitude", "Timezone", "DST", "Tz database time zone", "Type", "Source"]
|
116 |
+
# airport_df = pd.read_csv(data_url, header=None, names=column_names)
|
117 |
+
# airport_locations = airport_df[["Name", "City", "Country", "IATA/FAA", "Latitude", "Longitude"]]
|
118 |
+
# airport_country_loc = airport_locations[airport_locations['Country'] == str(loc)]
|
119 |
+
# airport_country_loc = airport_country_loc[(airport_country_loc['Country'] == str(loc)) & (airport_country_loc['Latitude'] >= lat_min) &
|
120 |
+
# (airport_country_loc['Latitude'] <= lat_max) & (airport_country_loc['Longitude'] >= lon_min) &
|
121 |
+
# (airport_country_loc['Longitude'] <= lon_max)]
|
122 |
+
# def get_traffic_gdf():
|
123 |
+
# url_data = (
|
124 |
+
# f"https://@opensky-network.org/api/states/all?"
|
125 |
+
# f"lamin={str(lat_min)}"
|
126 |
+
# f"&lomin={str(lon_min)}"
|
127 |
+
# f"&lamax={str(lat_max)}"
|
128 |
+
# f"&lomax={str(lon_max)}")
|
129 |
+
# json_dict = requests.get(url_data).json()
|
130 |
+
|
131 |
+
# unix_timestamp = int(json_dict["time"])
|
132 |
+
# local_timezone = pytz.timezone(local_time_zone) # get pytz timezone
|
133 |
+
# local_time = datetime.fromtimestamp(unix_timestamp, local_timezone).strftime('%Y-%m-%d %H:%M:%S')
|
134 |
+
# time = []
|
135 |
+
# for i in range(len(json_dict['states'])):
|
136 |
+
# time.append(local_time)
|
137 |
+
# df_time = pd.DataFrame(time,columns=['time'])
|
138 |
+
# state_df = pd.DataFrame(json_dict["states"],columns=columns)
|
139 |
+
# state_df['time'] = df_time
|
140 |
+
# gdf = gpd.GeoDataFrame(
|
141 |
+
# state_df,
|
142 |
+
# geometry=gpd.points_from_xy(state_df.longitude, state_df.latitude),
|
143 |
+
# crs={"init": "epsg:4326"}, # WGS84
|
144 |
+
# )
|
145 |
+
# # banner_image = Image.open('banner.png')
|
146 |
+
# # st.image(banner_image, width=300)
|
147 |
+
# st.title("Live Flight Tracker")
|
148 |
+
# st.subheader('Flight Details', divider='rainbow')
|
149 |
+
# st.write('Location: {0}'.format(loc))
|
150 |
+
# st.write('Current Local Time: {0}-{1}:'.format(local_time, local_time_zone))
|
151 |
+
# st.write("Minimum_latitude is {0} and Maximum_latitude is {1}".format(lat_min, lat_max))
|
152 |
+
# st.write("Minimum_longitude is {0} and Maximum_longitude is {1}".format(lon_min, lon_max))
|
153 |
+
# st.write('Number of Visible Flights: {}'.format(len(json_dict['states'])))
|
154 |
+
# st.write('Plotting the flight: {}'.format(flight_info))
|
155 |
+
# st.subheader('Map Visualization', divider='rainbow')
|
156 |
+
# st.write('****Click ":orange[Update Map]" Button to Refresh the Map****')
|
157 |
+
# return gdf
|
158 |
+
|
159 |
+
# geo_df = get_traffic_gdf()
|
160 |
+
# if airport == 0:
|
161 |
+
# fig = px.scatter_mapbox(geo_df, lat="latitude", lon="longitude",color=flight_info,
|
162 |
+
# color_continuous_scale=color, zoom=4,width=1200, height=600,opacity=1,
|
163 |
+
# hover_name ='origin_country',hover_data=['callsign', 'baro_altitude',
|
164 |
+
# 'on_ground', 'velocity', 'true_track', 'vertical_rate', 'geo_altitude'], template='plotly_dark')
|
165 |
+
# elif airport == 1:
|
166 |
+
# fig = px.scatter_mapbox(geo_df, lat="latitude", lon="longitude",color=flight_info,
|
167 |
+
# color_continuous_scale=color, zoom=4,width=1200, height=600,opacity=1,
|
168 |
+
# hover_name ='origin_country',hover_data=['callsign', 'baro_altitude',
|
169 |
+
# 'on_ground', 'velocity', 'true_track', 'vertical_rate', 'geo_altitude'], template='plotly_dark')
|
170 |
+
# fig.add_trace(px.scatter_mapbox(airport_country_loc, lat="Latitude", lon="Longitude",
|
171 |
+
# hover_name ='Name', hover_data=["City", "Country", "IATA/FAA"]).data[0])
|
172 |
+
# else: None
|
173 |
+
# fig.update_layout(mapbox_style="carto-darkmatter")
|
174 |
+
# fig.update_layout(margin={"r": 0, "t": 0, "l": 0, "b": 0})
|
175 |
+
# # out = fig.show())
|
176 |
+
# out = st.plotly_chart(fig, theme=None)
|
177 |
+
# return out
|
178 |
+
# st.set_page_config(
|
179 |
+
# layout="wide"
|
180 |
+
# )
|
181 |
+
# image = Image.open('logo.png')
|
182 |
+
# add_selectbox = st.sidebar.image(
|
183 |
+
# image, width=150
|
184 |
+
# )
|
185 |
+
# add_selectbox = st.sidebar.subheader(
|
186 |
+
# "Configure Map",divider='rainbow'
|
187 |
+
# )
|
188 |
+
# with st.sidebar:
|
189 |
+
# Refresh = st.button('Update Map', key=1)
|
190 |
+
# on = st.toggle('View Airports')
|
191 |
+
# if on:
|
192 |
+
# air_port = 1
|
193 |
+
# st.write(':rainbow[Nice Work Buddy!]')
|
194 |
+
# st.write('Now Airports are Visible')
|
195 |
+
# else:
|
196 |
+
# air_port=0
|
197 |
+
# view = st.slider('Increase Flight Visibility',1,6,2)
|
198 |
+
# st.write("You Selected:", view)
|
199 |
+
# cou = st.text_input('Type Country Name', 'north america')
|
200 |
+
# st.write('The current Country name is', cou)
|
201 |
+
# time = st.text_input('Type Time Zone Name (Ex: America/Toronto, Europe/Berlin)', 'Asia/Kolkata')
|
202 |
+
# st.write('The current Time Zone is', time)
|
203 |
+
# info = st.selectbox(
|
204 |
+
# 'Select Flight Information',
|
205 |
+
# ('baro_altitude',
|
206 |
+
# 'on_ground', 'velocity',
|
207 |
+
# 'geo_altitude'))
|
208 |
+
# st.write('Plotting the data of Flight:', info)
|
209 |
+
# clr = st.radio('Pick A Color for Scatter Plot',["rainbow","ice","hot"])
|
210 |
+
# if clr == "rainbow":
|
211 |
+
# st.write('The current color is', "****:rainbow[Rainbow]****")
|
212 |
+
# elif clr == 'ice':
|
213 |
+
# st.write('The current color is', "****:blue[Ice]****")
|
214 |
+
# elif clr == 'hot':
|
215 |
+
# st.write('The current color is', "****:red[Hot]****")
|
216 |
+
# else: None
|
217 |
+
# # with st.spinner('Wait!, We Requesting API Data...'):
|
218 |
+
# # try:
|
219 |
+
# flight_tracking(flight_view_level=view, country=cou,flight_info=info,
|
220 |
+
# local_time_zone=time, airport=air_port, color=clr)
|
221 |
+
# st.subheader('Ask your Questions!', divider='rainbow')
|
222 |
+
# st.write("Google's TAPAS base LLM model 🤖")
|
223 |
+
# geo_df = flight_data(flight_view_level = view, country= cou, flight_info=info, local_time_zone=time, airport=1)
|
224 |
+
# question = st.text_input('Type your questions here', "What is the squawk code for SWR9XD?")
|
225 |
+
# result = query_flight_data(geo_df, question)
|
226 |
+
# st.markdown(result)
|
227 |
+
# # except TypeError:
|
228 |
+
# # st.error(':red[Error: ] Please Re-run this page.', icon="🚨")
|
229 |
+
# # st.button('Re-run', type="primary")
|
230 |
+
# # st.snow()
|
231 |
+
|
232 |
+
|
233 |
+
|
234 |
+
# # import streamlit as st
|
235 |
+
# # from huggingface_hub import InferenceClient
|
236 |
+
# # import os
|
237 |
+
|
238 |
+
# # hf_token = os.getenv("HF_TOKEN")
|
239 |
+
# # # Set up the Hugging Face Inference Client
|
240 |
+
# # client = InferenceClient(
|
241 |
+
# # provider="together", # Replace with the correct provider if needed
|
242 |
+
# # api_key= hf_token # Replace with your Hugging Face API key
|
243 |
+
# # )
|
244 |
+
|
245 |
+
# # # Streamlit app title
|
246 |
+
# # st.title("🤖 Deepseek R1 Chatbot")
|
247 |
+
# # st.write("Chat with the Deepseek R1 model powered by Hugging Face Inference API.")
|
248 |
+
|
249 |
+
# # # Initialize session state to store chat history
|
250 |
+
# # if "messages" not in st.session_state:
|
251 |
+
# # st.session_state.messages = []
|
252 |
+
|
253 |
+
# # # Display chat history
|
254 |
+
# # for message in st.session_state.messages:
|
255 |
+
# # with st.chat_message(message["role"]):
|
256 |
+
# # st.markdown(message["content"])
|
257 |
+
|
258 |
+
# # # User input
|
259 |
+
# # if prompt := st.chat_input("What would you like to ask?"):
|
260 |
+
# # # Add user message to chat history
|
261 |
+
# # st.session_state.messages.append({"role": "user", "content": prompt})
|
262 |
+
# # with st.chat_message("user"):
|
263 |
+
# # st.markdown(prompt)
|
264 |
+
|
265 |
+
# # # Generate response from Deepseek R1 model
|
266 |
+
# # with st.spinner("Thinking..."):
|
267 |
+
# # try:
|
268 |
+
# # # Prepare the messages for the model
|
269 |
+
# # messages = [{"role": m["role"], "content": m["content"]} for m in st.session_state.messages]
|
270 |
+
|
271 |
+
# # # Call the Hugging Face Inference API
|
272 |
+
# # completion = client.chat.completions.create(
|
273 |
+
# # model="deepseek-ai/DeepSeek-R1", # Replace with the correct model name
|
274 |
+
# # messages=messages,
|
275 |
+
# # max_tokens=500
|
276 |
+
# # )
|
277 |
+
|
278 |
+
# # # Extract the model's response
|
279 |
+
# # response = completion.choices[0].message.content
|
280 |
+
|
281 |
+
# # # Add model's response to chat history
|
282 |
+
# # st.session_state.messages.append({"role": "assistant", "content": response})
|
283 |
+
# # with st.chat_message("assistant"):
|
284 |
+
# # st.markdown(response)
|
285 |
+
|
286 |
+
# # except Exception as e:
|
287 |
+
# # st.error(f"An error occurred: {e}")
|
288 |
+
|
289 |
+
|
290 |
'''Copyright 2024 Ashok Kumar
|
291 |
|
292 |
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
383 |
st.error(f"Unexpected error: {str(e)}")
|
384 |
return {'states': [], 'time': 0}
|
385 |
|
386 |
+
|
387 |
+
hf_token = os.getenv("HF_TOKEN")
|
388 |
+
if hf_token is None:
|
389 |
+
raise ValueError("Hugging Face token not found. Please set the HF_TOKEN environment variable.")
|
390 |
+
login(hf_token)
|
391 |
+
|
392 |
# Hugging Face model configuration
|
393 |
+
HF_API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.2"
|
394 |
+
headers = {"Authorization": f"Bearer {hf_token}"}
|
|
|
395 |
|
396 |
def query_llm(prompt):
|
397 |
try:
|
398 |
payload = {
|
399 |
"inputs": prompt,
|
400 |
"parameters": {
|
401 |
+
"max_new_tokens": 250,
|
402 |
"temperature": 0.1,
|
403 |
"top_p": 0.95,
|
404 |
+
"return_full_text": False
|
405 |
}
|
406 |
}
|
407 |
|
408 |
response = requests.post(HF_API_URL, headers=headers, json=payload)
|
409 |
response.raise_for_status()
|
410 |
return response.json()[0]['generated_text']
|
|
|
|
|
|
|
|
|
|
|
|
|
411 |
except Exception as e:
|
412 |
st.error(f"Error querying language model: {str(e)}")
|
413 |
return None
|
414 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
415 |
def query_flight_data(geo_df, question):
|
416 |
# Preprocess the question to extract key information
|
417 |
question = question.lower().strip()
|
|
|
443 |
if not identifier:
|
444 |
return "Please specify a flight identifier (callsign or ICAO code) in your question."
|
445 |
|
446 |
+
# Try to find the flight by callsign or icao
|
|
|
|
|
|
|
447 |
flight_data = None
|
448 |
+
if identifier in geo_df['callsign'].values:
|
449 |
+
flight_data = geo_df[geo_df['callsign'] == identifier]
|
450 |
+
elif identifier in geo_df['icao24'].values:
|
451 |
+
flight_data = geo_df[geo_df['icao24'] == identifier]
|
452 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
453 |
if flight_data is None or flight_data.empty:
|
454 |
+
return f"Could not find flight information for {identifier}. Please check the flight identifier and try again."
|
|
|
|
|
|
|
|
|
|
|
|
|
455 |
|
456 |
+
# Prepare flight data for the LLM
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
457 |
flight_info = {}
|
458 |
for col in flight_data.columns:
|
459 |
if col in flight_data.columns:
|
|
|
471 |
flight_info[col] = f"{value}° N"
|
472 |
elif col == 'longitude':
|
473 |
flight_info[col] = f"{value}° E"
|
474 |
+
else:
|
475 |
flight_info[col] = str(value)
|
476 |
|
477 |
if not flight_info:
|
478 |
return f"No information available for flight {identifier}."
|
479 |
|
480 |
+
# Create a prompt for the LLM
|
481 |
+
prompt = f"""You are a flight information assistant. Answer the following question about flight {identifier} using the provided flight data.
|
482 |
+
|
483 |
+
Question: {question}
|
484 |
|
485 |
+
Flight Data:
|
486 |
{json.dumps(flight_info, indent=2)}
|
487 |
|
488 |
+
Please provide a clear and concise answer focusing on the specific information requested in the question. If the question asks for information not available in the data, say so clearly."""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
489 |
|
490 |
+
# Get response from LLM
|
491 |
+
llm_response = query_llm(prompt)
|
|
|
|
|
492 |
|
493 |
+
if llm_response:
|
494 |
+
return llm_response
|
495 |
+
else:
|
496 |
+
# Fallback to direct data response if LLM fails
|
497 |
+
response = f"Flight Information for {identifier}:\n"
|
498 |
+
for key, value in flight_info.items():
|
499 |
+
response += f"- {key.replace('_', ' ').title()}: {value}\n"
|
500 |
+
return response
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
501 |
|
502 |
def flight_tracking(flight_view_level, country, local_time_zone, flight_info, airport, color):
|
503 |
# Get cached location data
|
|
|
526 |
(airport_country_loc['Latitude'] >= lat_min) &
|
527 |
(airport_country_loc['Latitude'] <= lat_max) &
|
528 |
(airport_country_loc['Longitude'] >= lon_min) &
|
529 |
+
(airport_country_loc['Longitude'] <= lon_max)]
|
530 |
|
531 |
+
def get_traffic_gdf():
|
532 |
+
# Get cached flight data
|
533 |
+
json_dict = fetch_flight_data(lat_min, lat_max, lon_min, lon_max)
|
534 |
+
|
535 |
+
if not json_dict or not json_dict.get('states'):
|
536 |
+
st.warning("No flight data available for the selected area.")
|
537 |
+
return None
|
538 |
+
|
539 |
+
try:
|
540 |
+
unix_timestamp = int(json_dict["time"])
|
541 |
+
local_timezone = pytz.timezone(local_time_zone)
|
542 |
+
local_time = datetime.fromtimestamp(unix_timestamp, local_timezone).strftime('%Y-%m-%d %H:%M:%S')
|
543 |
+
|
544 |
+
# Optimize DataFrame creation
|
545 |
+
state_df = pd.DataFrame(json_dict["states"], columns=columns)
|
546 |
+
state_df['time'] = local_time
|
547 |
+
|
548 |
+
# Create GeoDataFrame more efficiently
|
549 |
+
gdf = gpd.GeoDataFrame(
|
550 |
+
state_df,
|
551 |
+
geometry=gpd.points_from_xy(state_df.longitude, state_df.latitude),
|
552 |
+
crs="EPSG:4326"
|
553 |
+
)
|
554 |
+
|
555 |
+
# Display information
|
556 |
+
st.title("Live Flight Tracker")
|
557 |
+
st.subheader('Flight Details', divider='rainbow')
|
558 |
+
st.write('Location: {0}'.format(loc))
|
559 |
+
st.write('Current Local Time: {0}-{1}:'.format(local_time, local_time_zone))
|
560 |
+
st.write("Minimum_latitude is {0} and Maximum_latitude is {1}".format(lat_min, lat_max))
|
561 |
+
st.write("Minimum_longitude is {0} and Maximum_longitude is {1}".format(lon_min, lon_max))
|
562 |
+
st.write('Number of Visible Flights: {}'.format(len(json_dict['states'])))
|
563 |
+
st.write('Plotting the flight: {}'.format(flight_info))
|
564 |
+
st.subheader('Map Visualization', divider='rainbow')
|
565 |
+
st.write('****Click ":orange[Update Map]" Button to Refresh the Map****')
|
566 |
+
return gdf
|
567 |
+
|
568 |
+
except Exception as e:
|
569 |
+
st.error(f"Error processing flight data: {str(e)}")
|
570 |
+
return None
|
571 |
+
|
572 |
# Get traffic data
|
573 |
+
geo_df = get_traffic_gdf()
|
574 |
if geo_df is None:
|
575 |
return
|
576 |
|