Spaces:
Runtime error
Runtime error
adding timesfm future metric prediction
Browse files- app.py +2 -2
- pag/monitor.py +110 -15
- pb/timesfm_pb2.py +30 -0
- pb/timesfm_pb2_grpc.py +66 -0
- process.py +4 -3
- requirements.txt +3 -1
- senHub.py +8 -0
app.py
CHANGED
|
@@ -58,8 +58,8 @@ def authenticate_user():
|
|
| 58 |
|
| 59 |
|
| 60 |
def main():
|
| 61 |
-
for x in os.walk("."):
|
| 62 |
-
|
| 63 |
|
| 64 |
if "authenticated" not in st.session_state:
|
| 65 |
st.session_state.authenticated = False
|
|
|
|
| 58 |
|
| 59 |
|
| 60 |
def main():
|
| 61 |
+
# for x in os.walk("."):
|
| 62 |
+
# print(x[0])
|
| 63 |
|
| 64 |
if "authenticated" not in st.session_state:
|
| 65 |
st.session_state.authenticated = False
|
pag/monitor.py
CHANGED
|
@@ -5,13 +5,18 @@ import streamlit as st
|
|
| 5 |
import geopandas as gpd
|
| 6 |
from authentication import greeting, check_password
|
| 7 |
from senHub import SenHub
|
| 8 |
-
from datetime import datetime
|
| 9 |
from sentinelhub import SHConfig
|
| 10 |
import requests
|
| 11 |
import process
|
| 12 |
from zipfile import ZipFile
|
| 13 |
import plotly.express as px
|
| 14 |
import threading
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
|
| 16 |
def check_authentication():
|
| 17 |
if not check_password():
|
|
@@ -20,9 +25,10 @@ def check_authentication():
|
|
| 20 |
|
| 21 |
|
| 22 |
config = SHConfig()
|
| 23 |
-
config.instance_id = '
|
| 24 |
-
config.sh_client_id = '
|
| 25 |
-
config.sh_client_secret = '
|
|
|
|
| 26 |
|
| 27 |
def select_field(gdf):
|
| 28 |
st.markdown("""
|
|
@@ -78,35 +84,49 @@ def get_and_cache_available_dates(_df, field, year, start_date, end_date):
|
|
| 78 |
# curated_df = gpd.read_file(curated_date_path)
|
| 79 |
# return curated_df
|
| 80 |
|
| 81 |
-
def get_cuarted_df_for_field(df, field, date, metric, clientName, dates):
|
| 82 |
curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
|
| 83 |
if curated_date_path is not None:
|
| 84 |
curated_df = gpd.read_file(curated_date_path)
|
| 85 |
|
| 86 |
else:
|
| 87 |
-
|
| 88 |
curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
|
| 89 |
print("curr selected date processed")
|
| 90 |
|
|
|
|
|
|
|
| 91 |
|
| 92 |
-
|
| 93 |
-
|
| 94 |
-
download_thread = threading.Thread(target=download_date_date, name="Downloader", args=(df, field, old_dates, metric, clientName))
|
| 95 |
-
download_thread.start()
|
| 96 |
|
| 97 |
curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
|
| 98 |
curated_df = gpd.read_file(curated_date_path)
|
|
|
|
| 99 |
return curated_df
|
| 100 |
|
| 101 |
-
def
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 102 |
for date in dates:
|
| 103 |
process.Download_image_in_given_date(clientName, metric, df, field, date)
|
| 104 |
process.mask_downladed_image(clientName, metric, df, field, date)
|
| 105 |
process.convert_maske_image_to_geodataframe(clientName, metric, df, field, date, df.crs)
|
| 106 |
-
print(f"finished downloading prev dates data")
|
| 107 |
return
|
| 108 |
|
| 109 |
|
|
|
|
| 110 |
def track(metric, field_name, src_df, client_name):
|
| 111 |
st.title(":green[Select Date and Start Monitoring]")
|
| 112 |
dates = []
|
|
@@ -160,9 +180,9 @@ def track(metric, field_name, src_df, client_name):
|
|
| 160 |
|
| 161 |
# Get the field data at the selected date
|
| 162 |
with st.spinner('Loading Field Data...'):
|
| 163 |
-
# Get the metric data and cloud cover data for the selected field and date
|
| 164 |
-
metric_data = get_cuarted_df_for_field(src_df, field_name, date, metric, client_name, dates)
|
| 165 |
-
cloud_cover_data = get_cuarted_df_for_field(src_df, field_name, date, 'CLP', client_name, dates)
|
| 166 |
|
| 167 |
#Merge the metric and cloud cover data on the geometry column
|
| 168 |
field_data = metric_data.merge(cloud_cover_data, on='geometry')
|
|
@@ -280,6 +300,81 @@ def monitor_fields():
|
|
| 280 |
else:
|
| 281 |
st.info("No Fields Added Yet!")
|
| 282 |
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 283 |
|
| 284 |
|
| 285 |
|
|
|
|
| 5 |
import geopandas as gpd
|
| 6 |
from authentication import greeting, check_password
|
| 7 |
from senHub import SenHub
|
| 8 |
+
from datetime import datetime, timedelta
|
| 9 |
from sentinelhub import SHConfig
|
| 10 |
import requests
|
| 11 |
import process
|
| 12 |
from zipfile import ZipFile
|
| 13 |
import plotly.express as px
|
| 14 |
import threading
|
| 15 |
+
import pandas as pd
|
| 16 |
+
import grpc
|
| 17 |
+
import pb.timesfm_pb2_grpc
|
| 18 |
+
import pb.timesfm_pb2
|
| 19 |
+
import time
|
| 20 |
|
| 21 |
def check_authentication():
|
| 22 |
if not check_password():
|
|
|
|
| 25 |
|
| 26 |
|
| 27 |
config = SHConfig()
|
| 28 |
+
config.instance_id = '352670fb-2edf-4abd-90c8-437485a2403e'
|
| 29 |
+
config.sh_client_id = 'ca95f10f-443c-4c60-9a36-98950292bb9b'
|
| 30 |
+
config.sh_client_secret = 'rNFGRxGNiNFrXJfGyHIkVRyGOrdWNsfI'
|
| 31 |
+
config.sh_timesfm_IP = "34.121.141.161"
|
| 32 |
|
| 33 |
def select_field(gdf):
|
| 34 |
st.markdown("""
|
|
|
|
| 84 |
# curated_df = gpd.read_file(curated_date_path)
|
| 85 |
# return curated_df
|
| 86 |
|
| 87 |
+
def get_cuarted_df_for_field(df, field, date, metric, clientName, dates=None):
|
| 88 |
curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
|
| 89 |
if curated_date_path is not None:
|
| 90 |
curated_df = gpd.read_file(curated_date_path)
|
| 91 |
|
| 92 |
else:
|
| 93 |
+
download_date_data(df, field, [date], metric, clientName,)
|
| 94 |
curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
|
| 95 |
print("curr selected date processed")
|
| 96 |
|
| 97 |
+
if dates:
|
| 98 |
+
old_dates = [prev_date for prev_date in dates if prev_date != date]
|
| 99 |
|
| 100 |
+
download_thread = threading.Thread(target=download_date_data, name="Downloader", args=(df, field, old_dates, metric, clientName))
|
| 101 |
+
download_thread.start()
|
|
|
|
|
|
|
| 102 |
|
| 103 |
curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
|
| 104 |
curated_df = gpd.read_file(curated_date_path)
|
| 105 |
+
|
| 106 |
return curated_df
|
| 107 |
|
| 108 |
+
# def check_and_download_date_data(df, field, date, metric, clientName,):
|
| 109 |
+
# curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
|
| 110 |
+
# if curated_date_path is not None:
|
| 111 |
+
# curated_df = gpd.read_file(curated_date_path)
|
| 112 |
+
# else:
|
| 113 |
+
# process.Download_image_in_given_date(clientName, metric, df, field, date)
|
| 114 |
+
# process.mask_downladed_image(clientName, metric, df, field, date)
|
| 115 |
+
# process.convert_maske_image_to_geodataframe(clientName, metric, df, field, date, df.crs)
|
| 116 |
+
# curated_date_path = utils.get_curated_location_img_path(clientName, metric, date, field)
|
| 117 |
+
# curated_df = gpd.read_file(curated_date_path)
|
| 118 |
+
# return curated_df
|
| 119 |
+
|
| 120 |
+
def download_date_data(df, field, dates, metric, clientName,):
|
| 121 |
for date in dates:
|
| 122 |
process.Download_image_in_given_date(clientName, metric, df, field, date)
|
| 123 |
process.mask_downladed_image(clientName, metric, df, field, date)
|
| 124 |
process.convert_maske_image_to_geodataframe(clientName, metric, df, field, date, df.crs)
|
| 125 |
+
# print(f"finished downloading prev dates data")
|
| 126 |
return
|
| 127 |
|
| 128 |
|
| 129 |
+
|
| 130 |
def track(metric, field_name, src_df, client_name):
|
| 131 |
st.title(":green[Select Date and Start Monitoring]")
|
| 132 |
dates = []
|
|
|
|
| 180 |
|
| 181 |
# Get the field data at the selected date
|
| 182 |
with st.spinner('Loading Field Data...'):
|
| 183 |
+
# Get the metric data and cloud cover data for the selected field and date, to enable background download set dates=dates
|
| 184 |
+
metric_data = get_cuarted_df_for_field(src_df, field_name, date, metric, client_name, dates=None)
|
| 185 |
+
cloud_cover_data = get_cuarted_df_for_field(src_df, field_name, date, 'CLP', client_name, dates=None)
|
| 186 |
|
| 187 |
#Merge the metric and cloud cover data on the geometry column
|
| 188 |
field_data = metric_data.merge(cloud_cover_data, on='geometry')
|
|
|
|
| 300 |
else:
|
| 301 |
st.info("No Fields Added Yet!")
|
| 302 |
return
|
| 303 |
+
|
| 304 |
+
if field_name != "Select Field":
|
| 305 |
+
st.title(":orange[Predict Metrics for Next Month]")
|
| 306 |
+
subcol1, subcol2, subcol3 = st.columns(3)
|
| 307 |
+
if subcol2.button(f'Predict {metric} for Next 3 Months'):
|
| 308 |
+
start_date = '2024-01-01'
|
| 309 |
+
today = datetime.today()
|
| 310 |
+
end_date = today.strftime('%Y-%m-%d')
|
| 311 |
+
year = '2024'
|
| 312 |
+
|
| 313 |
+
dates = get_and_cache_available_dates(gdf, field_name, year, start_date, end_date)
|
| 314 |
+
my_bar = st.progress(0, text= f"Downloading Data for the last {len(dates)//4} months ...")
|
| 315 |
+
counter = 0
|
| 316 |
+
downloaded_prev_metrics = []
|
| 317 |
+
for index, date in enumerate(dates):
|
| 318 |
+
# time.sleep(0.1)
|
| 319 |
+
metric_data = get_cuarted_df_for_field(gdf, field_name, date, metric, current_user, dates = None)
|
| 320 |
+
cloud_cover_data = get_cuarted_df_for_field(gdf, field_name, date, 'CLP', current_user, dates = None)
|
| 321 |
+
field_data = metric_data.merge(cloud_cover_data, on='geometry')
|
| 322 |
+
avg_metric = field_data[f'{metric}_{date}'].mean()
|
| 323 |
+
downloaded_prev_metrics.append((date, avg_metric))
|
| 324 |
+
counter = counter + 100/(len(dates))
|
| 325 |
+
my_bar.progress(round(counter), text=f"Downloading Data for the last {len(dates)//4} months: {round(counter)}%")
|
| 326 |
+
|
| 327 |
+
# chart_data = pd.DataFrame(
|
| 328 |
+
# {
|
| 329 |
+
# "date": [metric[0] for metric in downloaded_prev_metrics],
|
| 330 |
+
# f"{metric}": [metric[1] for metric in downloaded_prev_metrics],
|
| 331 |
+
# }
|
| 332 |
+
# )
|
| 333 |
+
|
| 334 |
+
# st.area_chart(chart_data, x="date", y=f"{metric}")
|
| 335 |
+
channel = grpc.insecure_channel(f"{config.sh_timesfm_IP}:50051")
|
| 336 |
+
print("runing client request")
|
| 337 |
+
stub = pb.timesfm_pb2_grpc.PredictAgriStub(channel)
|
| 338 |
+
features = stub.predict_metric(iter([pb.timesfm_pb2.prev_values(value=metric[1], date=metric[0]) for metric in downloaded_prev_metrics]))
|
| 339 |
+
print("server streaming:")
|
| 340 |
+
predictions = []
|
| 341 |
+
for feature in features:
|
| 342 |
+
predictions.append(feature.value)
|
| 343 |
+
# do something with the returned output
|
| 344 |
+
# print(predictions)
|
| 345 |
+
future_dates = []
|
| 346 |
+
# print(dates[0])
|
| 347 |
+
curr_date = datetime.today()
|
| 348 |
+
for pred in predictions:
|
| 349 |
+
curr_date = curr_date + timedelta(days=7)
|
| 350 |
+
future_dates.append(curr_date.strftime('%Y-%m-%d'))
|
| 351 |
+
|
| 352 |
+
prev_dates = [metric[0] for metric in downloaded_prev_metrics]
|
| 353 |
+
history_metric_data = [metric[1] for metric in downloaded_prev_metrics]
|
| 354 |
+
future_metric_data = predictions
|
| 355 |
+
interval_dates = prev_dates
|
| 356 |
+
interval_dates.extend(future_dates)
|
| 357 |
+
history_metric_data.extend([0 for i in range(len(predictions))])
|
| 358 |
+
masked_future_metric_data = [0 for i in range(len([metric[1] for metric in downloaded_prev_metrics]))]
|
| 359 |
+
masked_future_metric_data.extend(future_metric_data)
|
| 360 |
+
# print(f"interval_dates:{len(interval_dates)}")
|
| 361 |
+
# print(f"history_metric_data:{len(history_metric_data)}")
|
| 362 |
+
# print(f"masked_future_metric_data:{len(masked_future_metric_data)}")
|
| 363 |
+
print(predictions)
|
| 364 |
+
|
| 365 |
+
print(interval_dates)
|
| 366 |
+
prediction_chart_data = pd.DataFrame(
|
| 367 |
+
{
|
| 368 |
+
f"history_{metric}_values": history_metric_data,
|
| 369 |
+
f"predicted_{metric}_values":masked_future_metric_data,
|
| 370 |
+
f"date": interval_dates,
|
| 371 |
+
}
|
| 372 |
+
)
|
| 373 |
+
|
| 374 |
+
# print(prediction_chart_data)
|
| 375 |
+
|
| 376 |
+
st.area_chart(prediction_chart_data, x="date", y=[f"history_{metric}_values", f"predicted_{metric}_values"])
|
| 377 |
+
|
| 378 |
|
| 379 |
|
| 380 |
|
pb/timesfm_pb2.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: timesfm.proto
|
| 4 |
+
# Protobuf Python Version: 4.25.1
|
| 5 |
+
"""Generated protocol buffer code."""
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
from google.protobuf.internal import builder as _builder
|
| 10 |
+
# @@protoc_insertion_point(imports)
|
| 11 |
+
|
| 12 |
+
_sym_db = _symbol_database.Default()
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rtimesfm.proto\"*\n\x0bprev_values\x12\r\n\x05value\x18\x01 \x01(\x02\x12\x0c\n\x04\x64\x61te\x18\x02 \x01(\t\",\n\rfuture_values\x12\r\n\x05value\x18\x01 \x01(\x02\x12\x0c\n\x04\x64\x61te\x18\x02 \x01(\t2A\n\x0bPredictAgri\x12\x32\n\x0epredict_metric\x12\x0c.prev_values\x1a\x0e.future_values(\x01\x30\x01\x62\x06proto3')
|
| 18 |
+
|
| 19 |
+
_globals = globals()
|
| 20 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
| 21 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'timesfm_pb2', _globals)
|
| 22 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 23 |
+
DESCRIPTOR._options = None
|
| 24 |
+
_globals['_PREV_VALUES']._serialized_start=17
|
| 25 |
+
_globals['_PREV_VALUES']._serialized_end=59
|
| 26 |
+
_globals['_FUTURE_VALUES']._serialized_start=61
|
| 27 |
+
_globals['_FUTURE_VALUES']._serialized_end=105
|
| 28 |
+
_globals['_PREDICTAGRI']._serialized_start=107
|
| 29 |
+
_globals['_PREDICTAGRI']._serialized_end=172
|
| 30 |
+
# @@protoc_insertion_point(module_scope)
|
pb/timesfm_pb2_grpc.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
| 2 |
+
"""Client and server classes corresponding to protobuf-defined services."""
|
| 3 |
+
import grpc
|
| 4 |
+
|
| 5 |
+
import pb.timesfm_pb2 as timesfm__pb2
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class PredictAgriStub(object):
|
| 9 |
+
"""Missing associated documentation comment in .proto file."""
|
| 10 |
+
|
| 11 |
+
def __init__(self, channel):
|
| 12 |
+
"""Constructor.
|
| 13 |
+
|
| 14 |
+
Args:
|
| 15 |
+
channel: A grpc.Channel.
|
| 16 |
+
"""
|
| 17 |
+
self.predict_metric = channel.stream_stream(
|
| 18 |
+
'/PredictAgri/predict_metric',
|
| 19 |
+
request_serializer=timesfm__pb2.prev_values.SerializeToString,
|
| 20 |
+
response_deserializer=timesfm__pb2.future_values.FromString,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class PredictAgriServicer(object):
|
| 25 |
+
"""Missing associated documentation comment in .proto file."""
|
| 26 |
+
|
| 27 |
+
def predict_metric(self, request_iterator, context):
|
| 28 |
+
"""Missing associated documentation comment in .proto file."""
|
| 29 |
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
| 30 |
+
context.set_details('Method not implemented!')
|
| 31 |
+
raise NotImplementedError('Method not implemented!')
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def add_PredictAgriServicer_to_server(servicer, server):
|
| 35 |
+
rpc_method_handlers = {
|
| 36 |
+
'predict_metric': grpc.stream_stream_rpc_method_handler(
|
| 37 |
+
servicer.predict_metric,
|
| 38 |
+
request_deserializer=timesfm__pb2.prev_values.FromString,
|
| 39 |
+
response_serializer=timesfm__pb2.future_values.SerializeToString,
|
| 40 |
+
),
|
| 41 |
+
}
|
| 42 |
+
generic_handler = grpc.method_handlers_generic_handler(
|
| 43 |
+
'PredictAgri', rpc_method_handlers)
|
| 44 |
+
server.add_generic_rpc_handlers((generic_handler,))
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
# This class is part of an EXPERIMENTAL API.
|
| 48 |
+
class PredictAgri(object):
|
| 49 |
+
"""Missing associated documentation comment in .proto file."""
|
| 50 |
+
|
| 51 |
+
@staticmethod
|
| 52 |
+
def predict_metric(request_iterator,
|
| 53 |
+
target,
|
| 54 |
+
options=(),
|
| 55 |
+
channel_credentials=None,
|
| 56 |
+
call_credentials=None,
|
| 57 |
+
insecure=False,
|
| 58 |
+
compression=None,
|
| 59 |
+
wait_for_ready=None,
|
| 60 |
+
timeout=None,
|
| 61 |
+
metadata=None):
|
| 62 |
+
return grpc.experimental.stream_stream(request_iterator, target, '/PredictAgri/predict_metric',
|
| 63 |
+
timesfm__pb2.prev_values.SerializeToString,
|
| 64 |
+
timesfm__pb2.future_values.FromString,
|
| 65 |
+
options, channel_credentials,
|
| 66 |
+
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
|
process.py
CHANGED
|
@@ -6,9 +6,10 @@ from sentinelhub import SHConfig, MimeType
|
|
| 6 |
|
| 7 |
|
| 8 |
config = SHConfig()
|
| 9 |
-
config.instance_id = '
|
| 10 |
-
config.sh_client_id = '
|
| 11 |
-
config.sh_client_secret = '
|
|
|
|
| 12 |
|
| 13 |
def Download_image_in_given_date(clientName, metric, df, field, date, mime_type = MimeType.TIFF):
|
| 14 |
sen_obj = SenHub(config, mime_type = mime_type)
|
|
|
|
| 6 |
|
| 7 |
|
| 8 |
config = SHConfig()
|
| 9 |
+
config.instance_id = '352670fb-2edf-4abd-90c8-437485a2403e'
|
| 10 |
+
config.sh_client_id = 'ca95f10f-443c-4c60-9a36-98950292bb9b'
|
| 11 |
+
config.sh_client_secret = 'rNFGRxGNiNFrXJfGyHIkVRyGOrdWNsfI'
|
| 12 |
+
config.sh_timesfm_IP = "34.121.141.161"
|
| 13 |
|
| 14 |
def Download_image_in_given_date(clientName, metric, df, field, date, mime_type = MimeType.TIFF):
|
| 15 |
sen_obj = SenHub(config, mime_type = mime_type)
|
requirements.txt
CHANGED
|
@@ -9,4 +9,6 @@ sentinelhub==3.9.1
|
|
| 9 |
shapely==2.0.3
|
| 10 |
streamlit-folium==0.12.0
|
| 11 |
streamlit==1.29.0
|
| 12 |
-
geopy==2.4.1
|
|
|
|
|
|
|
|
|
| 9 |
shapely==2.0.3
|
| 10 |
streamlit-folium==0.12.0
|
| 11 |
streamlit==1.29.0
|
| 12 |
+
geopy==2.4.1
|
| 13 |
+
grpcio
|
| 14 |
+
grpcio-tools
|
senHub.py
CHANGED
|
@@ -41,9 +41,17 @@ class SenHub:
|
|
| 41 |
'''
|
| 42 |
client_id = self.config.sh_client_id
|
| 43 |
client_secret = self.config.sh_client_secret
|
|
|
|
|
|
|
|
|
|
| 44 |
client = BackendApplicationClient(client_id=client_id)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 45 |
oauth = OAuth2Session(client=client)
|
| 46 |
token = oauth.fetch_token(token_url='https://services.sentinel-hub.com/oauth/token',client_secret=client_secret)
|
|
|
|
| 47 |
self.token = token['access_token']
|
| 48 |
|
| 49 |
def get_input_data(self, date):
|
|
|
|
| 41 |
'''
|
| 42 |
client_id = self.config.sh_client_id
|
| 43 |
client_secret = self.config.sh_client_secret
|
| 44 |
+
# client_id = "ca95f10f-443c-4c60-9a36-98950292bb9b"
|
| 45 |
+
# client_secret = "rNFGRxGNiNFrXJfGyHIkVRyGOrdWNsfI"
|
| 46 |
+
|
| 47 |
client = BackendApplicationClient(client_id=client_id)
|
| 48 |
+
print(f"{client_id=}")
|
| 49 |
+
print(f"{client_secret=}")
|
| 50 |
+
# client = BackendApplicationClient(client_id="35fd54ac-3084-423d-8c2c-a8c1309e4770")
|
| 51 |
+
|
| 52 |
oauth = OAuth2Session(client=client)
|
| 53 |
token = oauth.fetch_token(token_url='https://services.sentinel-hub.com/oauth/token',client_secret=client_secret)
|
| 54 |
+
# token = oauth.fetch_token(token_url='https://services.sentinel-hub.com/oauth/token',client_secret="s0a9ltSVkVi3aqhmmdE7kHWPF4p3Muus")
|
| 55 |
self.token = token['access_token']
|
| 56 |
|
| 57 |
def get_input_data(self, date):
|