from socket import * from oct2py import octave # from io import BytesIO # import base64 from google.cloud import storage # from oauth2client.service_account import ServiceAccountCredentials from send_webhook_data import send_webhook_data import os import time import firebase_admin from firebase_admin import credentials from firebase_admin import db from PIL import Image, ImageFilter # from send_notification import send_notification # from sendemail import sendemail import traceback # from sen_start_noti import sen_start_noti # from send_sar_email import send_sar_email # import json # import scipy # import random from gen_report_new2 import gen_report_new import time import datetime from merge_sar import merge_sar from find_sar import find_sar from merge_dem import merge_dem from find_dem import find_dem # from scipy import ndimage # from make_bigquery import make_bigquery from send_moni_noti import send_moni_noti from send_error_noti import send_error_noti from gmap_image import gmap_image from gmap_image_large import gmap_image_large from datetime import date, timedelta # from find_img import find_img # from find_img_large import find_img_large # from merge_img import merge_img # from all_proc import all_proc # from contour_images import contour_images from send_expiring_noti import send_expiring_noti from send_expired_noti import send_expired_noti # from make_trial_bigquery import make_trial_bigquery # from gen_geotiff import gen_geotiff # from sendgeotifs import sendgeotifs # from gen_report import gen_report from get_weather_data import get_weather_data # from sendonlyreport import sendonlyreport # from gen_failed_report import gen_failed_report from sendfailedreport import sendfailedreport from map_coords import map_coords from search_new_sentinel import search_new_sentinel # from convert_to_pdf import convert_to_pdf # from latlon_jp2_to_pixel import latlon_jp2_to_pixel from gen_geotiff2 import gen_geotiff2 # from search_sentinel_again import search_sentinel_again # from get_prev_date import get_prev_date # from make_bigquery_again import make_bigquery_again # import requests # import pdftotree # from convert_to_html import convert_to_html # from geopy.geocoders import Nominatim from firebase_admin import firestore from get_land_use import get_land_use from make_dir import make_dir # import threading import pytz # from create_interactive_html_report import make_interactive_html_report import process_field_flag # from send_whatsapp2024 import generate_jeevnai_whatsapp_image from satellite_visit_email import send_html_email def server2024(uid, fieldid, session_type): """ Function to process/generate a field satellite images :param uid: (string) user id :param fieldid: (string) field id :param session_type: (string) expected values - main, api, bulk :return: None """ # os.system("rm -rf AwsData") ## init firebase cred = credentials.Certificate("servicekey.json") try: firebase_admin.initialize_app( cred, {"databaseURL": "https://farmbase-b2f7e-31c0c.firebaseio.com/"} ) except Exception as e: print(e) dbF = firestore.client() # storage_client = storage.Client(project='farmbase-b2f7e') # bucket_name = "farmbase-b2f7e.appspot.com" ## init global variables today = date.today() d1 = today.strftime("%Y%m%d") # new_field = 0 ## fetch data for processing; get uid_list - uid to field_ids to True (constant value) sentinelSettings = db.reference("SentinelSettings4").get() disabledUIDs = [ "snQYQZqQx3SmVbRztmEqYn5Mkcz2", "KZQ7TZIYXnXN0b07OtrL1hlyYij1", "CeMGYvLXrGR5ZThxZ46iV7vY8sa2", "TCXcp5VIsfhHZrh0nm2VsgBtcGy2", "mFFHQdEtiSbn2hbYQAwwoIdYVi02", "4fPRPyszwLOjweG1qbpiCx3CFQo1", "KYThGMoIlISJG4mVXArK6Y5QPDh1", "hOV9C5CalZc9mIYCQQxplvgAfkG3", "ipRHhCOFIDV2pxgg7Nfz1ufZBmV2", "4fPRPyszwLOjweG1qbpiCx3CFQo1" ] if uid not in disabledUIDs: if uid == None and fieldid == None: # no uid and fieldid provided; process all except disabled uids temp_list = ( db.reference("PaidMonitoredFields").child("PMF").get(False, True) ) uid_list = {} # loop over uids to get it's field-IDs for m_uid, _ in temp_list.items(): if m_uid not in disabledUIDs: uid_list[m_uid] = ( db.reference("PaidMonitoredFields") .child("PMF") .child(m_uid) .get(False, True) ) elif uid != None and fieldid == None: # uid provided, but no fieldid; get all field_ids of this uid field_ids_obj = ( db.reference("PaidMonitoredFields") .child("PMF") .child(uid) .get(False, True) ) uid_list = {uid: field_ids_obj} else: # both uid and fieldid provided # field_obj_keys = ( # db.reference("PaidMonitoredFields") # .child("PMF") # .child(uid) # .child(fieldid) # .get(False, True) # ) # fieldList = {} # fieldList[fieldid] = field_obj_keys uid_list = {uid: {fieldid: True}} else: # given uid is disabled; process not uid or field uid_list = {} print("uid_list-len, uid, fieldid", len(uid_list), uid, fieldid) # img_count = 1 # lets_monitor = 1 # latest_rvi_day = 0 # last_latest_rvi_day = 0 # last_sensed_day = 0 # report_format = "" # wes1 = 1#print(uid) # try: # if uid not in disabledUIDs and uid not in ["M53J9SImW9My4vghlCaaWLYxsc22","ipRHhCOFIDV2pxgg7Nfz1ufZBmV2","D4xU2QGhooXfK6qiEeHdAlp0wk53"]: # make_interactive_html_report(uid) # bucket = storage_client.get_bucket(bucket_name) # blob = bucket.blob(("PaidMonitoredFields/" + uid + "/interactive_report.html")) # blob.upload_from_filename((uid + "/interactive_report.html")) # except Exception as e: # wes1 = 1#print(traceback.format_exc()) # def make_dir(uid, fieldid): # if not os.path.exists(uid): # os.makedirs(uid) # if not os.path.exists((uid + '/' + fieldid)): # os.makedirs((uid + '/' + fieldid)) ## FUNCTIONS TO BE USED IN PROCESSING def get_to_and_from_date_radar(requested_date, latestsensedday): current_date = int(requested_date[6:]) current_month = int(requested_date[4:6]) current_year = int(requested_date[:4]) start_year = current_year if current_date < 13 and current_month > 1: if latestsensedday == 0: start_date = 31 - current_date - 13 else: start_date = 31 - current_date - 12 start_month = current_month - 1 start_year = current_year elif current_date < 13 and current_month == 1: if latestsensedday == 0: start_date = 31 - current_date - 13 else: start_date = 31 - current_date - 12 start_month = 12 start_year = start_year - 1 else: if latestsensedday == 0: start_date = current_date - 13 else: start_date = current_date - 12 start_month = current_month start_year = current_year if start_date == 0: start_date = 1 if current_date == 0: current_date = 1 if current_month < 10: current_month = "0" + str(current_month) else: current_month = str(current_month) if start_month < 10: start_month = "0" + str(start_month) else: start_month = str(start_month) if current_date < 10: current_date = "0" + str(current_date) else: current_date = str(current_date) if start_date < 10: start_date = "0" + str(start_date) else: start_date = str(start_date) start_year = str(start_year) from_date = start_year + "-" + start_month + "-" + str(start_date) to_date = str(current_year) + "-" + current_month + "-" + str(current_date) l_date = str(current_year) + current_month + str(current_date) from_date = from_date + "T00:00:00" to_date = to_date + "T23:59:59" return from_date, to_date def get_to_and_from_date(requested_date, latestsensedday): current_date = int(requested_date[6:]) current_month = int(requested_date[4:6]) current_year = int(requested_date[:4]) start_year = current_year if current_date < 5 and current_month > 1: if latestsensedday == 0: start_date = 31 - current_date - 5 else: start_date = 31 - current_date - 4 start_month = current_month - 1 start_year = current_year elif current_date < 5 and current_month == 1: if latestsensedday == 0: start_date = 31 - current_date - 5 else: start_date = 31 - current_date - 4 start_month = 12 start_year = start_year - 1 else: if latestsensedday == 0: start_date = current_date - 5 else: start_date = current_date - 4 start_month = current_month start_year = current_year if start_date == 0: start_date = 1 if current_date == 0: current_date = 1 if current_month < 10: current_month = "0" + str(current_month) else: current_month = str(current_month) if start_month < 10: start_month = "0" + str(start_month) else: start_month = str(start_month) if current_date < 10: current_date = "0" + str(current_date) else: current_date = str(current_date) if start_date < 10: start_date = "0" + str(start_date) else: start_date = str(start_date) start_year = str(start_year) from_date = start_year + "-" + start_month + "-" + str(start_date) to_date = str(current_year) + "-" + current_month + "-" + str(current_date) l_date = str(current_year) + current_month + str(current_date) from_date = from_date + "T00:00:00" to_date = to_date + "T23:59:59" return from_date, to_date def format_date(yyyymmdd, time): start_year = yyyymmdd[:4] start_month = yyyymmdd[4:6] start_date = yyyymmdd[6:] new_date = start_year + "-" + start_month + "-" + start_date + time return new_date def is_data_already_processed(uid, fieldid): latest_day = ( db.reference("PaidMonitoredFields") .child("PMF") .child(uid) .child(fieldid) .child("LatestDay") .get() ) # fieldobj["Expired"] today = date.today() today_yyyymmdd = today.strftime("%Y%m%d") if latest_day is not None: day_diff = int(today_yyyymmdd) - int(latest_day) else: day_diff = 5 if day_diff < 5: print(("farm recently processed: ", latest_day, day_diff)) return True else: return False def shall_we_process_data(uid, fieldid): """ Tells if we should process a field based on its subscription, api credits, pause/expire status. :param UID (str): User ID :param FieldID (str): Field ID :return: (dict) {'process_field': bool, 'process_as': int, 'credits_needed': int*, 'visits_needed': int*, user_obj: dict, org_api_obj: dict} - 'process_field': True if we should process the field, False otherwise - 'process_as': 0 - pag, 1 - sub, 2 - api """ result = process_field_flag.process_field(uid, fieldid) print("should process data result", result) ##result["process_field"] = True # result["update_counter"] = False return result def satellite_data( uid, lang, fieldid, fromdate, todate, sentinelSettings, radar_from_date, radar_to_date, ): """ Main function of satellite data processing """ clientID = sentinelSettings["ClientID"] clientSecret = sentinelSettings["ClientSecret"] wmsID = sentinelSettings["WMSID"] rviID = sentinelSettings["RVIID"] demID = sentinelSettings["DEMID"] imagedate = None wes1 = 1 # print(fieldid) fie = ( db.reference("PaidMonitoredFields") .child("PMF") .child(uid) .child(fieldid) .get() ) latestsensedday = 0 latestfailedday = 0 latest_rvi_day = 0 latest_dem_day = 0 last_latest_rvi_day = 0 last_latest_dem_day = 0 report_return_obj = "yes" try: f_address = fie["FieldDescription"] except Exception as e: try: f_address = fie["FieldAddress"] except Exception as e: f_address = "Farm" whitelabel = "farmonaut" try: whitelabel = fie["Whitelabel"] except Exception as e: whitelabel = "farmonaut" wes1 = 1 # print(["whitelabel", whitelabel]) try: whitelabelObj = db.reference("WhitelabelEmails").child(whitelabel).get() whitelabelEmail = whitelabelObj["Email"] whitelabelPassword = whitelabelObj["Password"] except Exception as e: whitelabelEmail = None whitelabelPassword = None try: lang = fie["Language"] except Exception as e: lang = "en" if uid == "HC1KG5a2e1ZhXaPMpEcylZmeMYM2": lang = "ar" elif uid == "snQYQZqQx3SmVbRztmEqYn5Mkcz2": lang = "te" elif uid == "mFFHQdEtiSbn2hbYQAwwoIdYVi02": lang = "uz" elif uid == "8aGkNQm166bmk8cjHVHtwGli2DD2": lang = "pa,hi,en" elif "biopixel" in whitelabel: lang = "fr,ar" # elif uid == '8aGkNQm166bmk8cjHVHtwGli2DD2': # lang = 'pa' try: fieldDescription = fie["FieldDescription"] except Exception as e: fieldDescription = "not available" # wes1 = 1#print("fdd: " + fieldDescription) try: PlantDistance = fie["PlantDistance"] except Exception as e: PlantDistance = 0 try: field_paid = fie["Paid"] field_paid = field_paid.lower() if field_paid == "yes": has_paid = 1 else: has_paid = 0 imgdate_in_senseddays_flag = 0 imgdate_in_faileddays_flag = 0 senseddays = None big_query_performed = 0 paused = 0 expired = 0 faileddays = None wes1 = 1 # print("Made the payment") except Exception as e: wes1 = 1 # print("payment not done") has_paid = 0 imgdate_in_senseddays_flag = 0 imgdate_in_faileddays_flag = 0 senseddays = None big_query_performed = 0 paused = 0 expired = 0 faileddays = None trialprocessed = 1 if has_paid == 1: imgdate_in_senseddays_flag = 0 imgdate_in_faileddays_flag = 0 big_query_performed = 0 paused = 0 expired = 0 totalpaidmonths = 0 totalsenseddays = 0 try: senseddays = fie["SensedDays"] except Exception as e: senseddays = None try: faileddays = fie["FailedDays"] except Exception as e: faileddays = None coordinates = fie["Coordinates"] fieldarea = fie["FieldArea"] field_area = fieldarea try: pp = fie["Paused"] if pp == "yes": paused = 1 else: paused = 0 except Exception as e: paused = 0 try: ee = fie["Expired"] if int(ee) == 0: expired = 0 else: expired = 1 except Exception as e: expired = 0 # get total months paid try: tmp = fie["TotalPaidMonths"] totalpaidmonths = int(tpm) except Exception as e: try: tpm = fie["PaymentType"] totalpaidmonths = float(tpm) except Exception as e: tpm = 0 totalpaidmonths = 0 # get latest sensed day try: senseddays = fie["SensedDays"] for u, v in senseddays.items(): sensed_day = u totalsenseddays = totalsenseddays + 1 if int(sensed_day) > int(latestsensedday): latestsensedday = sensed_day last_sensed_day = latestsensedday # wes1 = 1#print("LSD") # wes1 = 1#print(latestsensedday) new_field = 0 except Exception as e: # wes1 = 1#print("new field") new_field = 1 totalsardays = 0 try: sar_days = fie["SARDays"] for u, v in senseddays.items(): sensed_day = u totalsardays = totalsardays + 1 if int(sensed_day) > int(latest_rvi_day): latest_rvi_day = sensed_day last_latest_rvi_day = latest_rvi_day # wes1 = 1#print("LSDR") # wes1 = 1#print(latest_rvi_day) new_field = 0 except Exception as e: # wes1 = 1#print("new field") new_field = 1 totaldemdays = 0 try: dem_days = fie["DEMDays"] for u, v in senseddays.items(): sensed_day = u totaldemdays = totaldemdays + 1 if int(sensed_day) > int(latest_dem_day): latest_dem_day = sensed_day last_latest_dem_day = latest_dem_day # wes1 = 1#print("LSDD") # wes1 = 1#print(latest_dem_day) new_field = 0 except Exception as e: # wes1 = 1#print("new field") new_field = 1 totalfaileddays = 0 try: faileddays = fie["FailedDays"] for u, v in faileddays.items(): failed_day = u totalfaileddays = totalfaileddays + 1 if int(failed_day) > int(latestfailedday): latestfailedday = failed_day # wes1 = 1#print("LFD") # wes1 = 1#print(latestfailedday) except Exception as e: aswadq = 1 ##wes1 = 1#print("no failed day") # wes1 = 1#print("tpm") # wes1 = 1#print(totalpaidmonths) trialenabled = "no" intfieldid = float(fieldid) intfieldid = intfieldid / 1000 orderdate = datetime.datetime.fromtimestamp(intfieldid).strftime( "%Y-%m-%d %H:%M:%S.%f" ) orderdate = orderdate[:10] orderyear = orderdate[:4] ordermonth = orderdate[5:7] orderday = orderdate[8:10] todayyear = d1[:4] todaymonth = d1[4:6] todayday = d1[6:] f_date = date(int(orderyear), int(ordermonth), int(orderday)) l_date = date(int(todayyear), int(todaymonth), int(todayday)) delta = l_date - f_date ndays = delta.days # wes1 = 1#print("ndays") # wes1 = 1#print(ndays) if ( (totalsenseddays >= 6 * totalpaidmonths + 1) and (ndays > 30 * totalpaidmonths) and (totalpaidmonths != -2) ): expired = 1 db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("Expired").set("1") send_expired_noti(uid) # wes1 = 1#print("expired") else: db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("Expired").delete() expired = 0 if ( (6 * totalpaidmonths - totalsenseddays < 3) and (30 * totalpaidmonths - ndays < 10) and (totalpaidmonths != -2) ): db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("Expiring").set("yes") else: db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("Expiring").set("no") # req_json_sar = find_sar(uid,fieldid,latest_rvi_day, expired, paused, fromdate, todate) # latest_rvi_day = req_json_sar["LatestDay"] if fromdate != None: latestsensedday = 0 latestfailedday = 0 latest_rvi_day = 0 latest_dem_day = 0 else: # latestsensedday = 0 latestfailedday = 0 latest_rvi_day = 0 latest_dem_day = 0 if latestsensedday == None: latestsensedday = 0 # ttg = "ttg" # paused = 0 try: ttgData = fie["TTGData"] except Exception as e: ttgData = None # wes1 = 1#print("dwdqw") # wes1 = 1# print(d1, latestsensedday, expired, paused, new_field, has_paid) if ( (int(d1) - int(latestsensedday) > 4) and (expired == 0) and (paused == 0) ): # last report build is older than 4 days & field not expired or paused; or historical req # wes1 = 1#print("inin") # db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child("Paused").set('no') if has_paid == 1 and expired == 0: # if new_field == 1 and payf == 1: # wes1 = 1#print("inin2") req_json_sar = find_sar( uid, fieldid, fie, latest_rvi_day, expired, paused, radar_from_date, radar_to_date, clientID, clientSecret, rviID, ) latest_rvi_day = req_json_sar["LatestDay"] req_json_land = get_land_use( uid, fieldid, fie, latestsensedday, latestfailedday, expired, paused, fromdate, todate, clientID, clientSecret, wmsID, ) req_json_dem = find_dem( uid, fieldid, fie, latest_dem_day, expired, paused, fromdate, todate, clientID, clientSecret, demID, ) latest_dem_day = req_json_dem["LatestDay"] req_json = search_new_sentinel( uid, fieldid, fie, latestsensedday, latestfailedday, expired, paused, fromdate, todate, clientID, clientSecret, wmsID, ) # elif new_field == 0 and payf == 1 and expired == 0: # #wes1 = 1#print("inin3") # req_json_sar = find_sar( # uid, # fieldid, # fie, # latest_rvi_day, # expired, # paused, # fromdate, # todate, # clientID, # clientSecret, # rviID, # ) # latest_rvi_day = req_json_sar["LatestDay"] # req_json_land = get_land_use( # uid, # fieldid, # fie, # latestsensedday, # latestfailedday, # expired, # paused, # fromdate, # todate, # clientID, # clientSecret, # wmsID, # ) # req_json_dem = find_dem( # uid, # fieldid, # fie, # latest_dem_day, # expired, # paused, # fromdate, # todate, # clientID, # clientSecret, # demID, # ) # latest_dem_day = req_json_dem["LatestDay"] # req_json = search_new_sentinel( # uid, # fieldid, # fie, # latestsensedday, # latestfailedday, # expired, # paused, # fromdate, # todate, # clientID, # clientSecret, # wmsID, # ) else: # wes1 = 1#print("notpaid") req_json = {} req_json["MGRS"] = "NotA" req_json["LatestDay"] = d1 new_string_json = req_json imagedate = str(req_json["LatestDay"]) # wes1 = 1#print("im_date") # wes1 = 1#print(imagedate) uid = current_uid # wes1 = 1#print(uid) # wes1 = 1#print(req_json) big_query_performed = 1 mgrs = req_json["MGRS"] # wes1 = 1#print("mgrs") # wes1 = 1#print(mgrs) try: previous_day = db.reference("LatestTileDates").child(mgrs).get() except Exception as e: db.reference("LatestTileDates").child(str(mgrs)).set(imagedate) if previous_day is None: previous_day = "11" # wes1 = 1#print("p_date") # wes1 = 1#print(previous_day) # if int(previous_day)= 0: result_status = uid + "failed4" else: result_status = "successful" if result_status.find("failed") < 0: # 'failed' not in result_status new_string_json["StartPixelLat"] = 180 new_string_json["EndPixelLat"] = 180 new_string_json["StartPixelLong"] = 180 new_string_json["EndPixelLong"] = 180 new_string_json["FieldID"] = fieldid new_string_json["PlantDistance"] = PlantDistance result_status = makeFieldImages(new_string_json) # try: # aqw2 = octave.check_clouds(uid) # except Exception as e: # aqw2 = 2 aqw2 = 0 diff_count = 0 else: # field doesn't follow basic checks result_status = "failed0" storage_client = storage.Client(project="farmbase-b2f7e") bucket_name = "farmbase-b2f7e.appspot.com" try: images_array = [ "tci", "etci", "ndvi", "evi", "rvi", "rsm", "ndwi", "ndre", "vari", "soc", "savi", "ndmi", "evapo", "avi", "bsi", "si", "dem", "hybrid_blind", "vssi", "lulc", "lai", ] images_array2 = [ "soc_cmap2", "ndre_cmap2", "ndvi_cmap2", "evi_cmap2", "ndwi_cmap2", "vari_cmap2", "savi_cmap2", "avi_cmap2", "bsi_cmap2", "si_cmap2", "ndmi_cmap2", "vssi_cmap2", "lai_cmap2", ] images_array3 = [ "tci.tif", "hybrid.tif", "etci.tif", "ndvi.tif", "evi.tif", "soc.tif" "ndre.tif", "vari.tif", ] images_array4 = [ "ndvi_pie", "evi_pie", "ndwi_pie", "ndre_pie", "vari_pie", "savi_pie", "avi_pie", "bsi_pie", "si_pie", "soc_pie", "ndmi_pie", "vssi_pie", "lai_pie", "ndvi_linegraph", "evi_linegraph", "ndwi_linegraph", "ndre_linegraph", "vari_linegraph", "savi_linegraph", "avi_linegraph", "bsi_linegraph", "si_linegraph", "soc_linegraph", "ndmi_linegraph", "vssi_linegraph", "weather_linegraph", "lai_linegraph", ] for imgName in images_array2: images_array.append(imgName) for imgName in images_array4: images_array.append(imgName) tif_array = [] for imgName in images_array: tif_array.append(imgName) for imgName in images_array2: tif_array.append(imgName) except Exception as e: print(e) wes1 = 1 ##wes1 = 1#print("problem in assigning storage address") print(latest_rvi_day,last_latest_rvi_day) latest_rvi_day = latest_rvi_day.replace("-","") if ( int(latest_rvi_day) > int(last_latest_rvi_day) and expired == 0 and paused == 0 ): try: fieldmaxlat = new_string_json["FieldMaxLat"] fieldminlat = new_string_json["FieldMinLat"] fieldminlong = new_string_json["FieldMinLong"] fieldmaxlong = new_string_json["FieldMaxLong"] aqpp = merge_dem( uid, fieldid, coordinates, latest_dem_day, fieldmaxlat, fieldminlat, fieldmaxlong, fieldminlong, ) destination_blob_name_dem = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + latest_rvi_day + "/dem" ) dem_file_name = uid + "/" + fieldid + "/dem.png" bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_blob_name_dem) blob.upload_from_filename(dem_file_name) except Exception as e: print(traceback.format_exc()) wes1 = 1 # wes1 = 1#print("dem error") if ( int(latest_rvi_day) > int(last_latest_rvi_day) and expired == 0 and paused == 0 ): db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("SARDays").child(str(latest_rvi_day)).set("yes") try: fieldmaxlat = new_string_json["FieldMaxLat"] fieldminlat = new_string_json["FieldMinLat"] fieldminlong = new_string_json["FieldMinLong"] fieldmaxlong = new_string_json["FieldMaxLong"] aqpp = merge_sar( uid, fieldid, coordinates, latest_rvi_day, fieldmaxlat, fieldminlat, fieldmaxlong, fieldminlong, ) s1_imgs = ["rvi", "rsm"] for temp_img in s1_imgs: destination_blob_name_rvi = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + latest_rvi_day + "/" + temp_img ) rvi_file_name = uid + "/" + fieldid + "/" + temp_img + ".png" bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_blob_name_rvi) blob.upload_from_filename(rvi_file_name) aqpp = merge_dem( uid, fieldid, coordinates, latest_dem_day, fieldmaxlat, fieldminlat, fieldmaxlong, fieldminlong, ) destination_blob_name_dem = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + latest_dem_day + "/dem" ) dem_file_name = uid + "/" + fieldid + "/dem.png" bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_blob_name_dem) blob.upload_from_filename(dem_file_name) attachments = set() # attachments.add(rvi_file_name) useremail = fie.get("Email", "NA") # try: # f_address = fie["FieldAddress"] # except Exception as e: # f_address = "not available" except Exception as e: print(e) wes1 = 1 ##wes1 = 1#print(e) if result_status.find("successful") < 0: if result_status.find("failed0") > 0: uid = result_status.replace("failed0", "") # wes1 = 1#print(uid) db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("FailedDays").child(imagedate).set("yes") send_error_noti(uid) if result_status.find("failed1") > 0: uid = result_status.replace("failed1", "") if generate_tifs == 1: bucket = storage_client.get_bucket(bucket_name) useremail = fie.get("Email", "NA") # f_address = fie["FieldAddress"] report_return_obj = gen_report_new( uid, f_address, centerlat, centerlong, imagedate, fieldid, fieldarea, lang, 1, whitelabel, [fieldminlat, fieldminlong, fieldmaxlat, fieldmaxlong], coordinates, None, 0, 0, fie, ) ff = f_address ff = ff.replace(" ", "_") ff = ff.replace(",", "_") ff = ff.replace("/", "_") doc_name_html = uid + "/" + fieldid + "/report.html" blob = bucket.blob( ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/report.html" ) ) # blob.upload_from_filename(doc_name_html) upload_reports( lang, uid, whitelabel, imagedate, ff, fieldid, bucket ) # attachments.add(doc_name) sendfailedreport( attachments, useremail, f_address, imagedate, whitelabel, whitelabelEmail, whitelabelPassword, ) db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("FailedDays").child(imagedate).set(report_return_obj) send_error_noti(uid) if result_status.find("failed2") > 0: uid = result_status.replace("failed2", "") db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("FailedDays").child(imagedate).set("yes") send_error_noti(uid) if result_status.find("failed4") > 0: uid = result_status.replace("failed4", "") else: # result_status is successful uid = result_status.replace("successful", "") # aqw2 = octave.check_clouds(uid) aqw2 = 0 p_date = latestsensedday c_lat = None p_coords = 0 if aqw2 < 2: fieldlatlen = abs(float(fieldmaxlat) - float(fieldminlat)) fieldlonglen = abs(float(fieldmaxlong) - float(fieldminlong)) map_coords( uid, fieldid, coordinates, imagedate, fieldmaxlat, fieldminlat, fieldmaxlong, fieldminlong, cropType, tif_array, field_area, ) try: imgs = ["rvi", "rsm"] for img in imgs: destination_blob_name_rvi = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + latest_rvi_day + "/" + img ) rvi_file_name = uid + "/" + fieldid + "/" + img + ".png" bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_blob_name_rvi) blob.upload_from_filename(rvi_file_name) except Exception as e: print(traceback.format_exc()) wes1 = 1#print("rvi error2") ndvi, ndwi = get_indices(uid, fieldid, imagedate) # f_address = db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('FieldAddress').get() centerlat = centerlat_large centerlong = centerlong_large # if f_address is None: # f_address = 'not defined' if generate_tifs == 1: # FieldObj GenTiff is yes if f_address.find("India") < 0 or gen_tif == "abs_yes": # Is not in India or FieldObj GenTiff is abs_yes for imageName in tif_array: tifName = imageName + ".tif" gen_geotiff2(new_string_json, uid, imageName, tifName) try: for imageName in tif_array: if imageName == "tci.tif": imageName == "TCI.tif" elif imageName == "etci.tif": imageName = "ETCI.tif" imageAddress = uid + "/" + fieldid + "/" + imageName imageDestination = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/" + imageName ) blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except Exception as e: wes1 = 1 # print("unable to upload") except Exception as e: wes1 = 1 # print("tif error") useremail = fie.get("Email", "NA") report_return_obj = gen_report_new( uid, f_address, centerlat, centerlong, imagedate, fieldid, field_area, lang, 0, whitelabel, [fieldminlat, fieldminlong, fieldmaxlat, fieldmaxlong], coordinates, None, ndvi, ndwi, fie, ) bucket = storage_client.get_bucket(bucket_name) for imageName in images_array: originalName = imageName if imageName == "tci": imageName = "TCI" elif imageName == "etci": imageName = "ETCI" elif imageName.find("pie") > -1: imageName = imageName + "chart" elif imageName == "mask_img": originalName = "mask" imageAddress = ( uid + "/" + fieldid + "/" + imageName + ".png" ) imageDestination = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/" + originalName ) blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except Exception as e: wes1 = 1 # print(("unable to upload" + originalName)) imageAddress = uid + "/" + fieldid + "/mask_img.png" imageDestination = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/mask" ) blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except Exception as e: wes1 = 1 # print("unable to upload mask") ff = f_address ff = ff.replace(" ", "_") ff = ff.replace(",", "_") ff = ff.replace("/", "_") doc_name_html = uid + "/" + fieldid + "/report.html" blob = bucket.blob( ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/report.html" ) ) # blob.upload_from_filename(doc_name_html) doc_name = ( uid + "/" + whitelabel + "_report_" + str(imagedate) + "_" + ff + ".pdf" ) upload_reports( lang, uid, whitelabel, imagedate, ff, fieldid, bucket ) # attachments.add(doc_name) if whitelabel is None or whitelabel.lower() == "farmonaut": try: if report_return_obj is not None: db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child("SensedDays").child(imagedate).set(report_return_obj) # sendonlyreport(attachments,useremail,f_address,imagedate,whitelabel,whitelabelEmail,whitelabelPassword) send_html_email( useremail, "Farmer", f_address, " ", imagedate, " ", fie.get("PaymentType", 0), uid, fieldid, lang ) except Exception as e: print(traceback.format_exc()) else: # Is in India and FieldObj GenTiff is not abs_yes useremail = fie.get("Email", "NA") report_return_obj = gen_report_new( uid, f_address, centerlat, centerlong, imagedate, fieldid, field_area, lang, 0, whitelabel, [fieldminlat, fieldminlong, fieldmaxlat, fieldmaxlong], coordinates, None, ndvi, ndwi, fie, ) bucket = storage_client.get_bucket(bucket_name) for imageName in images_array: if imageName == "tci": imageName == "TCI" elif imageName == "etci": imageName = "ETCI" elif imageName.find("pie") > -1: imageName = imageName + "chart" imageAddress = ( uid + "/" + fieldid + "/" + imageName + ".png" ) imageDestination = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/" + imageName ) blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except Exception as e: wes1 = 1 # print(("unable to upload" + imageName)) imageAddress = uid + "/" + fieldid + "/mask_img.png" imageDestination = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/mask" ) blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except Exception as e: wes1 = 1 # print("unable to upload mask") ff = f_address ff = ff.replace(" ", "_") ff = ff.replace(",", "_") ff = ff.replace("/", "_") ff = ff.replace("''", "_") doc_name_html = uid + "/" + fieldid + "_report.html" blob = bucket.blob( ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/report.html" ) ) # blob.upload_from_filename(doc_name_html) upload_reports( lang, uid, whitelabel, imagedate, ff, fieldid, bucket ) # attachments.add(doc_name) # wes1 = 1#print('report_erro') try: # sendonlyreport(attachments,useremail,f_address,imagedate,whitelabel,whitelabelEmail,whitelabelPassword) # send_html_email(useremail,'Farmer',f_address, ' ',imagedate,' ') if report_return_obj is not None: db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child("SensedDays").child(imagedate).set(report_return_obj) send_html_email( useremail, "Farmer", f_address, " ", imagedate, " ", fie.get("PaymentType", 0), uid, fieldid, lang ) except Exception as e: print(traceback.format_exc()) db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("LatestDay").set(imagedate) if report_return_obj is not None: db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("SensedDays").child(imagedate).set(report_return_obj) # webhook call send_webhook_data(uid, fieldid, imagedate) try: fieldid = str(fieldid) imagedate = str(imagedate) city_ref = dbF.collection(uid).document(fieldid) # Set the capital field city_ref.update({"LatestSensedDay": imagedate}) except Exception as e: wywy = 1 if whitelabel is None or whitelabel.lower() == "farmonaut": send_moni_noti( uid, fie.get("FieldDescription", fie.get("FieldAddress", " ")), "", ) else: # would never come here if generate_tifs == 1: useremail = fie.get("Email", "NA") # try: # f_address = fie["FieldAddress"] # except Exception as e: # f_address = "Not Available" report_return_obj = gen_report_new( uid, f_address, centerlat, centerlong, imagedate, fieldid, fieldarea, lang, 1, whitelabel, [fieldminlat, fieldminlong, fieldmaxlat, fieldmaxlong], coordinates, None, ndvi, ndwi, fie, ) ff = f_address ff = ff.replace(" ", "_") ff = ff.replace(",", "_") ff = ff.replace("/", "_") doc_name_html = uid + "/" + fieldid + "/report.html" bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob( ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/report.html" ) ) # blob.upload_from_filename(doc_name_html) upload_reports( lang, uid, whitelabel, imagedate, ff, fieldid, bucket ) # attachments.add(doc_name) sendfailedreport( attachments, useremail, f_address, imagedate, whitelabel, whitelabelEmail, whitelabelPassword, ) db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("FailedDays").child(imagedate).set(report_return_obj) send_error_noti(uid) def makeFieldImages(new_string_json): result_status = octave.monitored_field2022(new_string_json) # time.sleep(1000000) return result_status def get_indices(uid, fieldid, latestday): ndvi = ( db.reference("PaidMonitoredFields") .child("PMF") .child(uid) .child(fieldid) .child("Health") .child("ndvi") .child(latestday) .get() ) ndwi = ( db.reference("PaidMonitoredFields") .child("PMF") .child(uid) .child(fieldid) .child("Health") .child("ndwi") .child(latestday) .get() ) wes1 = 1 # print(["index", ndvi, ndwi, latestday]) ndvi = float(ndvi) / 100 ndwi = float(ndwi) / 100 return ndvi, ndwi def upload_reports(all_langs, uid, whitelabel, imagedate, ff, fieldid, bucket): return all_langs = all_langs.split(",") lang_num = 0 for single_lang in all_langs: if lang_num == 0: doc_name = ( uid + "/" + whitelabel + "_report_" + single_lang + "_" + str(imagedate) + "_" + ff + ".pdf" ) blob = bucket.blob( ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/report.pdf" ) ) blob.upload_from_filename(doc_name) else: doc_name = ( uid + "/" + whitelabel + "_report_" + single_lang + "_" + str(imagedate) + "_" + ff + ".pdf" ) blob = bucket.blob( ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/report" + single_lang + ".pdf" ) ) blob.upload_from_filename(doc_name) lang_num = lang_num + 1 def get_today_date_in_ist_as_yyyymmdd(): """ Returns the current date string as YYYYMMDD in IST (Indian Standard Time). """ # Get the current UTC time utc_now = datetime.datetime.utcnow() # Define the IST (Indian Standard Time) timezone with a UTC offset of GMT+5:30 # ist_timezone = pytz.timezone('Asia/Kolkata') # Convert UTC time to IST ist_now = utc_now + timedelta(minutes=330) # 330 minutes offset for GMT+5:30 # Format the date as YYYYMMDD formatted_date = ist_now.strftime("%Y%m%d") return formatted_date def should_delete_field(uid, fieldid, fieldobj): expired = ( db.reference("PaidMonitoredFields") .child("PMF") .child(uid) .child(fieldid) .child("Expired") .get() ) # fieldobj["Expired"] expired = expired or 0 try: expired = int(expired) except Exception as e: expired = 0 months_to_wait_before_deletion = 1 paymentType = ( db.reference("PaidMonitoredFields") .child("PMF") .child(uid) .child(fieldid) .child("PaymentType") .get() ) # fieldobj["Expired"] paymentType = paymentType or 0 try: paymentType = float(paymentType) except Exception as e: paymentType = 0 if paymentType != -2 and (int(expired) == 1 or paymentType == 0): if paymentType == 0: paymentMillis = 24 * 60 * 60 * 1000 else: try: paymentMillis = ( (int(paymentType) + months_to_wait_before_deletion) * 30 * 24 * 60 * 60 * 1000 ) except Exception as e: paymentMillis = ( months_to_wait_before_deletion * 30 * 24 * 60 * 60 * 100 ) try: wes1 = 1 # print([round(time.time() * 1000), paymentMillis, fieldid]) if (int(round(time.time() * 1000)) - int(fieldid)) > paymentMillis: wes1 = 1 # print(("deleting..." + str(uid) + ", " + str(fieldid))) fieldobj = ( db.reference("PaidMonitoredFields") .child("PMF") .child(uid) .child(fieldid) .get() ) try: db.reference("DeletedFields").child("PMF").child(uid).child( fieldid ).set(fieldobj) except Exception as e: wes1 = 1 # print("fieldobj not found") db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).delete() return 1 except Exception as e: wes1 = 1 # print(traceback.format_exc()) db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).delete() return 1 return 0 def process_data(uid, fieldid, lang, fields, sentinelSettings): toremove = "rm -rf " + uid + "/" + str(fieldid) os.system(toremove) make_dir(uid, fieldid) print("starting processing of", uid, fieldid) fie = fields[fieldid] # wes1 = 1#print(fie) is_deleted = 0 is_deleted = should_delete_field(uid, fieldid, fie) is_data_processing_needed = shall_we_process_data(uid, fieldid) print("is_data_processing_needed", is_data_processing_needed) if is_data_processing_needed["process_field"] == True: try: if is_deleted == 1: """field is deleted""" else: try: print( "is_data_already_processed", is_data_already_processed(uid, fieldid), ) previous_data_requests = ( db.reference("PaidMonitoredFields") .child("PMF") .child(uid) .child(fieldid) .child("PreviousDataRequests") .get() ) # fieldobj["Expired"] fie["PreviousDataRequests"] print("previous_data_requests", previous_data_requests) satellite_processed = False if ( is_data_already_processed(uid, fieldid) is False and session_type == "main" ): if previous_data_requests is None: print("going to process") satellite_data( uid, lang, fieldid, None, None, sentinelSettings, None, None, ) satellite_processed = True # in case of failure try processing data two more times if previous_data_requests is not None and session_type in [ "bulk", "api","main" ]: for requested_date, obj in previous_data_requests.items(): print("requested_date", requested_date) print("IST today", get_today_date_in_ist_as_yyyymmdd()) if True: # if int(get_today_date_in_ist()) - int(requested_date) >4: ( previous_from_date, previous_to_date, ) = get_to_and_from_date(requested_date, 0) (radar_from_date, radar_to_date) = ( get_to_and_from_date_radar(requested_date, 0) ) attempt = 0 print("attempting...") while attempt < 3: try: satellite_data( uid, lang, fieldid, previous_from_date, previous_to_date, sentinelSettings, radar_from_date, radar_to_date, ) # data processed successfully attempt = 3 satellite_processed = True except Exception as e: # data processing failed; retry in 10 seconds attempt = attempt + 1 print( "data processing failed - attempt, errTrace", attempt, traceback.format_exc(), ) time.sleep(10) # all attempts completed; remove previous data request db.reference("PaidMonitoredFields").child("PMF").child( uid ).child(fieldid).child("PreviousDataRequests").child( requested_date ).delete() # update satellite_visits/api_credits if satellite_processed: if is_data_processing_needed["process_as"] == 2: # process is sub based; update visits visits_needed = is_data_processing_needed.get( "visits_needed", 0 ) user_obj = is_data_processing_needed.get("user_obj", {}) pre_visits = user_obj.get("TotalSatelliteVisits", 0) str_ref_satellite_visits = ( "/" + uid + "/MyProfile/TotalSatelliteVisits" ) user_visits_ref = process_field_flag.db.reference( f"{str_ref_satellite_visits}", app=process_field_flag.default_app_2, ) user_visits_ref.set(pre_visits + visits_needed) elif is_data_processing_needed["process_as"] == 3: # process is api based; update credits if session_type == "bulk": credits_needed = is_data_processing_needed.get( "credits_needed", 0 ) org_api_obj = is_data_processing_needed.get( "org_api_obj", {} ) org_api_obj["UsedUnits"] = ( int(org_api_obj.get("UsedUnits", 0)) + credits_needed ) org_api_obj["remainingUnits"] = ( int(org_api_obj.get("remainingUnits", 0)) - credits_needed ) org_api_ref = db.reference(f"Organizations/{uid}") org_api_ref.set(org_api_obj) except Exception as e: print(traceback.format_exc()) except Exception as e: print(traceback.format_exc()) os.system(toremove) ## loop over all uids and field_ids to process fields one by one for m_uid, m_fields in uid_list.items(): current_uid = m_uid uid = current_uid lang = "en" fields = m_fields # all_threads = [] # thread_count = 0 # os.system("rm -rf AwsData") try: for fieldid, _ in fields.items(): # fieldid = fieldid#, args=(uid, fieldid, lang,fields,sentinelSettings,)))# process_data(uid, fieldid, lang, fields, sentinelSettings) except Exception as e: print("error point1", traceback.format_exc()) # r = requests.post('https://us-central1-farmbase-b2f7e.cloudfunctions.net/stopVM') # r.json() ############# testing ########### def __test(): # x = get_to_and_from_date("20250212", "20250217") today = date.today() d1 = today.strftime("%Y%m%d") print(int(d1)) # __test()