import fiona import requests import utm from geopy.geocoders import Nominatim import time import traceback import json import pandas as pd from fastkml import kml from socket import * from oct2py import octave from io import BytesIO import base64 import xlrd from statistics import median import math import firebase_admin from firebase_admin import credentials from firebase_admin import db from firebase_admin import messaging import threading import xlsxwriter import numpy as np import datetime import os from numba import jit, cuda from firebase_admin import firestore from make_on_the_go_farm_image import make_on_the_go_farm_image from sentinelhub import ( WebFeatureService, BBox, CRS, MimeType, CRS, BBox, WmsRequest, DataCollection, ) from get_polygon_mask import get_polygon_mask import traceback from google.cloud import storage import os from google.oauth2 import service_account import datetime from datetime import date import traceback from gen_report_new2 import gen_report_new from google.cloud import storage from oauth2client.service_account import ServiceAccountCredentials import os import firebase_admin from firebase_admin import credentials from firebase_admin import db import datetime storage_client = storage.Client.from_service_account_json("servicekey.json") bucket_name = "farmbase-b2f7e.appspot.com" cred = service_account.Credentials.from_service_account_file("servicekey.json") # cred = credentials.Certificate('servicekey.json') bucket = storage_client.bucket(bucket_name) try: firebase_admin.initialize_app( credentials.Certificate("servicekey.json"), {"databaseURL": "https://farmbase-b2f7e-31c0c.firebaseio.com/"}, ) except: print("fire running") db_firestore = firestore.client() sentinelSettings = db.reference("SentinelSettings4").get() clientID = sentinelSettings["ClientID"] clientSecret = sentinelSettings["ClientSecret"] wmsID = sentinelSettings["WMSID"] rviID = sentinelSettings["RVIID"] demID = sentinelSettings["DEMID"] images_arr = ["TCI", "NDVI", "NDWI", "NDRE", "RECL", "RVI", "RSM"] day_gap = 5 today = date.today() ndvi_val, ndwi_val = 0, 0 imageDate = today.strftime("%Y%m%d") def get_from_to_dates(imageDate, day_gap): current_month = int(imageDate[4:6]) current_year = int(imageDate[0:4]) current_date = int(imageDate[6:]) latestsensedday = 0 start_year = current_year if current_date < day_gap and current_month > 1: if latestsensedday == 0: start_date = abs(31 - current_date - day_gap) else: start_date = abs(31 - current_date - (day_gap - 1)) start_month = current_month - 1 start_year = current_year elif current_date < day_gap and current_month == 1: if latestsensedday == 0: start_date = abs(31 - current_date - day_gap) else: start_date = abs(31 - current_date - (day_gap - 1)) start_month = 12 start_year = start_year - 1 else: if latestsensedday == 0: start_date = current_date - day_gap else: start_date = current_date - (day_gap - 1) start_month = current_month start_year = current_year if start_date == 0: start_date = 1 if current_date == 0: current_date = 1 if current_month < 10: current_month = "0" + str(current_month) else: current_month = str(current_month) if start_month < 10: start_month = "0" + str(start_month) else: start_month = str(start_month) if current_date < 10: current_date = "0" + str(current_date) else: current_date = str(current_date) if start_date < 10: start_date = "0" + str(start_date) else: start_date = str(start_date) start_year = str(start_year) from_date = start_year + "-" + start_month + "-" + str(start_date) to_date = str(current_year) + "-" + current_month + "-" + str(current_date) l_date = str(current_year) + current_month + str(current_date) from_date = from_date + "T00:00:00" to_date = to_date + "T23:59:59" return from_date, to_date def get_bounds(points_obj): lats, lngs = [], [] for key, value in points_obj.items(): lats.append(value["Latitude"]) lngs.append(value["Longitude"]) bounds = [min(lngs), min(lats), max(lngs), max(lats)] return ( bounds, min(lats), max(lats), min(lngs), max(lngs), sum(lats) / len(lats), sum(lngs) / len(lngs), ) def upload_file(source_path, destination_path, expire_minutes=15): storage_client = storage.Client.from_service_account_json('servicekey.json') bucket_name = 'farmbase-b2f7e.appspot.com' bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_path) storage_url = 'https://farmonaut.com/Images/data_not_found.jpg' try: blob.upload_from_filename(source_path) #storage_url = blob.generate_signed_url(version="v4",expiration = datetime.timedelta(minutes=expire_minutes), method="GET") except: print('not there') def get_days_with_data(sensed_days_arr): my_yyyymm = ['202407', '202408', '202409', '202410'] final_dates = [] for single_day in sensed_days_arr: str_day = str(single_day) if any(x in str_day for x in ('202307', '202207', '202107')): final_dates.append(single_day) break for single_day in sensed_days_arr: str_day = str(single_day) if any(x in str_day for x in ('202308', '202208', '202108')): final_dates.append(single_day) break for single_day in sensed_days_arr: str_day = str(single_day) if any(x in str_day for x in ('202309', '202209', '202109')): final_dates.append(single_day) break for single_day in sensed_days_arr: str_day = str(single_day) if any(x in str_day for x in ('202310', '202210', '202110')): final_dates.append(single_day) break return final_dates def process_tmux_polygons(uid, polygons_arr): # # n_dates = ['2021-07-11','2021-07-26', '2021-08-26', '2021-09-26', '2021-10-11', '2021-10-26', '2021-11-11', '2021-11-26', # '2021-12-11', '2021-12-26', '2022-01-11', '2022-01-26', '2022-02-11', '2022-02-26', '2022-03-11', # '2022-03-26', '2022-04-11', '2022-04-26', '2022-05-11', '2022-05-26', '2022-06-11', '2022-06-26', # '2022-07-26', '2022-08-26', '2022-09-26', '2022-10-11', '2022-10-26', '2022-11-11', '2022-11-26', # '2022-12-11', '2022-12-26', '2023-01-11', '2023-01-26', '2023-02-11', '2023-02-26', '2023-03-11', # '2023-03-26', '2023-04-11', '2023-04-26', '2023-05-11', '2023-05-26', '2023-06-11', '2023-06-26', '2023-07-26'] sst = 1 #n_dates = [ "2023-07-15", "2023-07-25", "2023-08-04", "2023-08-14", "2023-08-24", "2023-09-03", "2023-09-13", "2023-09-23", "2023-10-03", "2023-10-13", "2023-10-23", "2023-11-02", "2023-11-12", "2023-11-22", "2023-12-02", "2023-12-12", "2023-12-22", "2024-01-01", "2024-01-11", "2024-01-21", "2024-01-31", "2024-02-10", "2024-02-20", "2024-02-26"] #n_dates = ['2024-06-27', '2024-07-06', '2024-07-22'] n_dates = ['2024-07-01','2024-07-15', '2024-08-01' ,'2024-08-15','2024-09-01','2024-09-15','2024-10-01','2024-10-17'] temp_n_dates = n_dates #n_dates = ['2023-08-01','2023-08-30','2023-09-01','2023-09-30'] #n_dates = ['2022-07-01', '2022-07-31'] #target_yyyymm_array = ['202207'] poly_num = 0 for single_polygon in polygons_arr: poly_num = poly_num + 1 print(poly_num) fieldid, polygonid = single_polygon[0], single_polygon[1] print(fieldid, polygonid) is_data_already_processed = 0 polygon_obj = db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child('Polygons').child(polygonid).get() sensed_days_arr = [] try: sensed_days_obj = polygon_obj["SensedDays"] temp_days_num = 0 for temp_key, val in sensed_days_obj.items(): sensed_days_arr.append(int(temp_key)) temp_days_num = temp_days_num + 1 except: temp_days_num = 0 # try: # final_dates = get_days_with_data(sensed_days_arr) # for single_final_date in final_dates: # str_final_date = str(single_final_date) # str_new_date = '2024' + str_final_date[4:] # print(final_dates, str_final_date, str_new_date) # temp_health_obj = db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child("Polygons").child(polygonid).child("Health").child(str_final_date).get() # db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child("Polygons").child(polygonid).child("Health").child(str_new_date).set(temp_health_obj) # db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child("Polygons").child(polygonid).child("SensedDays").child(str_new_date).set("yes") # time.sleep(1) # except: # print(traceback.format_exc()) # time.sleep(2) # continue # Dictionary to store matched dates for each target_yyyymm # matched_dates = {target: [] for target in target_yyyymm_array} # # Check each sensed day for every target_yyyymm # for target in target_yyyymm_array: # matched_dates[target] = [day for day in sensed_days_arr if str(day).startswith(target)] # # Output the results # for target, dates in matched_dates.items(): # if dates: # #continue # #is_data_already_processed = 1 # print(f"Sensed days containing {target}: {dates}") # #break # else: # print(f"No sensed days contain {target}") # # Flatten all matched dates and convert to 'YYYY-MM-DD' format for removal from n_dates # # Find which target_yyyymm keys had matches # keys_with_matches = [key for key, dates in matched_dates.items() if dates] # # Create a new array without the dates matching the matched YYYY-MM # temp_n_dates = [date for date in n_dates if not any(date.startswith(key[:4] + '-' + key[4:6]) for key in keys_with_matches)] # print(f"temp_n_dates: {temp_n_dates}") # if len(sensed_days_arr) == 0: # sensed_days_arr = [0] if sst == 1: #temp_days_num < 30: # poly_num = poly_num + 1 # print(poly_num) #elif max(sensed_days_arr) > 20251231: try: for ii in range((len(temp_n_dates)-1)): from_date = temp_n_dates[ii]+"T00:00:00" to_date = temp_n_dates[(ii+1)] + "T23:59:59" print(from_date, to_date) str_polygon_date = '' polygon_date = None try: for imageType in images_arr: time.sleep(2) if imageType == "RVI" or imageType == "RSM": satellite, INSTANCE_ID = DataCollection.SENTINEL1_IW, rviID day_gap = 20 else: satellite, INSTANCE_ID = DataCollection.SENTINEL2_L1C, wmsID day_gap = 5 c_map = 1 #from_date, to_date = get_from_to_dates(imageDate, day_gap) # print(polygon_obj["Coordinates"]) ( bounds, polygon_min_lat, polygon_max_lat, polygon_min_long, polygon_max_long, polygon_center_lat, polygon_center_long, ) = get_bounds(polygon_obj["Coordinates"]) points_obj = polygon_obj["Coordinates"] if imageType in ["NDVI", "NDWI", "RECL", "NDRE", "RVI", "RSM"]: polygon_date, index_val = make_on_the_go_farm_image( c_map, uid, fieldid, polygonid, satellite, imageType, bounds, from_date, to_date, clientID, clientSecret, INSTANCE_ID, ) if str_polygon_date != '': str_index_val = str(index_val) #str_polygon_date = polygon_date.replace("-","") db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child("Polygons").child(polygonid).child("Health").child(str_polygon_date).child(imageType).set(str_index_val) get_polygon_mask( uid, polygonid, imageType, points_obj, bounds[3], bounds[1], bounds[2], bounds[0], ) #upload_file((uid + '/' + polygonid + '/' + imageType + '.png'), ('PaidMonitoredFieldsPolygons/'+uid + '/' + fieldid + '/'+polygonid + '/'+str_polygon_date + '/'+imageType + 'png')) if imageType == "NDVI": ndvi_val = round(index_val) / 100 ndvi_val = index_val elif imageType == "NDWI": ndwi_val = round(index_val) / 100 ndwi_val = index_val #print([ndvi_val, ndwi_val]) else: temp_date, y = make_on_the_go_farm_image( c_map, uid, fieldid, polygonid, satellite, imageType, bounds, from_date, to_date, clientID, clientSecret, INSTANCE_ID, ) if temp_date is not None: get_polygon_mask( uid, polygonid, imageType, points_obj, bounds[3], bounds[1], bounds[2], bounds[0], ) str_polygon_date = temp_date.replace("-","") #upload_file((uid + '/' + polygonid + '/' + imageType + '.png'), ('PaidMonitoredFieldsPolygons/'+uid + '/' + fieldid + '/'+polygonid + '/'+str_polygon_date + '/'+imageType + 'png')) if str_polygon_date != '': if imageType == "TCI": etci_file_name = uid + "/" + polygonid + "/TCI.png" octave.make_polygon_etci(etci_file_name, uid) get_polygon_mask( uid, polygonid, imageType, points_obj, bounds[3], bounds[1], bounds[2], bounds[0], ) upload_file((uid + '/' + polygonid + '/' + imageType + '.png'), ('PaidMonitoredFieldsPolygons/'+uid + '/' + fieldid + '/'+polygonid + '/'+str_polygon_date + '/'+imageType + '.png')) try: if uid == "TCXcp5VIsfhHZrh0nm2VsgBtcGy2": f_address = ( "Farmer Code: " + polygon_obj["Name"] + ", Land ID: " + polygon_obj["PhoneNumber"] ) else: f_address = ( "Farmer Name: " + polygon_obj["Name"] + ", Phone Number: " + polygon_obj["PhoneNumber"] ) except: f_address = "Farm" # if str_polygon_date != '': # print(polygon_date) # polygon_date = polygon_date.replace("-", "") # get_polygon_mask(uid,polygonid,(imageType+"_CMAP2"),points_obj,bounds[3],bounds[1],bounds[2],bounds[0]) # gen_report_new(uid, field_address, center_lat, center_long, imagedate, fieldid, field_area, lang, failed, whitelabel, rangeLatLng, points, polygonID=None) print( [ polygon_center_lat, polygon_center_long, polygon_min_lat, polygon_max_lat, polygon_min_long, polygon_max_long, ] ) db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("Polygons").child(polygonid).child("SensedDays").child( str_polygon_date ).set( "yes" ) #time.sleep(80060) except: print(traceback.format_exc()) time.sleep(5) except: print(traceback.format_exc()) time.sleep(0.1) # kkk = 0 # uid = 'TCXcp5VIsfhHZrh0nm2VsgBtcGy2' # if not os.path.exists(uid): # os.makedirs(uid) # all_polygons_arr = [] # n_tmux = 28 # while kkk == 0: # temp_list = db.reference("PaidMonitoredFields").child("PMF").child(uid).get(False, True) # for fieldid, tmp in temp_list.items(): # polygons_obj = db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child('Polygons').get(False, True) # try: # for polygonid, temp_p in polygons_obj.items(): # all_polygons_arr.append((fieldid, polygonid)) # except: # awa = 1 # interval = round(len(all_polygons_arr)/n_tmux) # print([len(all_polygons_arr), interval]) # for i in range(1, (n_tmux + 1)): # if i == n_tmux: # temp_polygons_arr = all_polygons_arr[((i-1)*interval): (len(all_polygons_arr)-1)] # else: # temp_polygons_arr = all_polygons_arr[((i-1)*interval): (i*interval)] # process_tmux_polygons(uid, temp_polygons_arr) # time.sleep(86400) # get the list of approved polygons # structure: Satellite Polygons -> polygonID -> (UID, fieldID, enabled, duration) # iterate through polygons # generate images of that polygon # generate report # send whatsapp