from socket import * from oct2py import octave from io import BytesIO import base64 from google.cloud import storage from oauth2client.service_account import ServiceAccountCredentials from send_webhook_data import send_webhook_data import os import time import firebase_admin from firebase_admin import credentials from firebase_admin import db from PIL import Image, ImageFilter from send_notification import send_notification from sendemail import sendemail import traceback from sen_start_noti import sen_start_noti from send_sar_email import send_sar_email import json import scipy import random from gen_report_new2 import gen_report_new import time import datetime from merge_sar import merge_sar from find_sar import find_sar from merge_dem import merge_dem from find_dem import find_dem from scipy import ndimage from make_bigquery import make_bigquery from send_moni_noti import send_moni_noti from send_error_noti import send_error_noti from gmap_image import gmap_image from gmap_image_large import gmap_image_large from datetime import date, timedelta from find_img import find_img from find_img_large import find_img_large from merge_img import merge_img from all_proc import all_proc from contour_images import contour_images from send_expiring_noti import send_expiring_noti from send_expired_noti import send_expired_noti from make_trial_bigquery import make_trial_bigquery from gen_geotiff import gen_geotiff from sendgeotifs import sendgeotifs from gen_report import gen_report from get_weather_data import get_weather_data from sendonlyreport import sendonlyreport from gen_failed_report import gen_failed_report from sendfailedreport import sendfailedreport from map_coords import map_coords from search_new_sentinel import search_new_sentinel from convert_to_pdf import convert_to_pdf from latlon_jp2_to_pixel import latlon_jp2_to_pixel from gen_geotiff2 import gen_geotiff2 from search_sentinel_again import search_sentinel_again from get_prev_date import get_prev_date from make_bigquery_again import make_bigquery_again import requests import pdftotree from convert_to_html import convert_to_html from geopy.geocoders import Nominatim from firebase_admin import firestore from get_land_use import get_land_use from make_dir import make_dir import threading import pytz from create_interactive_html_report import make_interactive_html_report def server2023(uid, fieldid): today = date.today() d1 = today.strftime("%Y%m%d") new_field = 0 #wes1 = 1#print(d1) aqw2 = 1 cred = credentials.Certificate("servicekey.json") # os.system("rm -rf AwsData") try: firebase_admin.initialize_app( cred, {"databaseURL": "https://farmbase-b2f7e-31c0c.firebaseio.com/"} ) except: qysy = 1 dbF = firestore.client() storage_client = storage.Client(project='farmbase-b2f7e') bucket_name = "farmbase-b2f7e.appspot.com" sentinelSettings = db.reference("SentinelSettings4").get() disabledUIDs = [ "snQYQZqQx3SmVbRztmEqYn5Mkcz2", "KZQ7TZIYXnXN0b07OtrL1hlyYij1", "CeMGYvLXrGR5ZThxZ46iV7vY8sa2", "TCXcp5VIsfhHZrh0nm2VsgBtcGy2", "mFFHQdEtiSbn2hbYQAwwoIdYVi02" ] if uid not in disabledUIDs: if uid == None and fieldid == None: temp_list = db.reference("PaidMonitoredFields").child("PMF").get(False, True) uid_list = {} for rr, tt in temp_list.items(): # disabledUIDs = ['snQYQZqQx3SmVbRztmEqYn5Mkcz2','KZQ7TZIYXnXN0b07OtrL1hlyYij1','CeMGYvLXrGR5ZThxZ46iV7vY8sa2','TCXcp5VIsfhHZrh0nm2VsgBtcGy2'] if rr not in disabledUIDs: # uid_list[rr] = tt uid_list[rr] = ( db.reference("PaidMonitoredFields").child("PMF").child(rr).get(False, True) ) elif uid != None and fieldid == None: uidobj = db.reference("PaidMonitoredFields").child("PMF").child(uid).get(False, True) uid_list = {} uid_list[uid] = uidobj else: fieldobj = ( db.reference("PaidMonitoredFields") .child("PMF") .child(uid) .child(fieldid) .get(False, True) ) fieldList = {} fieldList[fieldid] = fieldobj uid_list = {} uid_list[uid] = fieldList else: uid_list = {} img_count = 1 lets_monitor = 1 latest_rvi_day = 0 last_latest_rvi_day = 0 last_sensed_day = 0 report_format = "" wes1 = 1#print(uid) # try: # if uid not in disabledUIDs and uid not in ["M53J9SImW9My4vghlCaaWLYxsc22","ipRHhCOFIDV2pxgg7Nfz1ufZBmV2","D4xU2QGhooXfK6qiEeHdAlp0wk53"]: # make_interactive_html_report(uid) # bucket = storage_client.get_bucket(bucket_name) # blob = bucket.blob(("PaidMonitoredFields/" + uid + "/interactive_report.html")) # blob.upload_from_filename((uid + "/interactive_report.html")) # except: # wes1 = 1#print(traceback.format_exc()) # def make_dir(uid, fieldid): # if not os.path.exists(uid): # os.makedirs(uid) # if not os.path.exists((uid + '/' + fieldid)): # os.makedirs((uid + '/' + fieldid)) def get_to_and_from_date(requested_date, latestsensedday): current_date = int(requested_date[6:]) current_month = int(requested_date[4:6]) current_year = int(requested_date[:4]) start_year = current_year if current_date < 5 and current_month > 1: if latestsensedday == 0: start_date = 31 - current_date - 5 else: start_date = 31 - current_date - 4 start_month = current_month - 1 start_year = current_year elif current_date < 5 and current_month == 1: if latestsensedday == 0: start_date = 31 - current_date - 5 else: start_date = 31 - current_date - 4 start_month = 12 start_year = start_year - 1 else: if latestsensedday == 0: start_date = current_date - 5 else: start_date = current_date - 4 start_month = current_month start_year = current_year if start_date == 0: start_date = 1 if current_date == 0: current_date = 1 if current_month < 10: current_month = "0" + str(current_month) else: current_month = str(current_month) if start_month < 10: start_month = "0" + str(start_month) else: start_month = str(start_month) if current_date < 10: current_date = "0" + str(current_date) else: current_date = str(current_date) if start_date < 10: start_date = "0" + str(start_date) else: start_date = str(start_date) start_year = str(start_year) from_date = start_year + "-" + start_month + "-" + str(start_date) to_date = str(current_year) + "-" + current_month + "-" + str(current_date) l_date = str(current_year) + current_month + str(current_date) from_date = from_date + "T00:00:00" to_date = to_date + "T23:59:59" return from_date, to_date def format_date(yyyymmdd, time): start_year = yyyymmdd[:4] start_month = yyyymmdd[4:6] start_date = yyyymmdd[6:] new_date = start_year + "-" + start_month + "-" + start_date + time return new_date def shall_we_process_data(uid, fieldid): expired = db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child("Expired").get() # fieldobj["Expired"] expired = expired or 0 try: expired = int(expired) except: expired = 0 paused = db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child("Paused").get() # fieldobj["Expired"] print(paused) if paused is not None: if paused.lower() == "yes": paused = 1 else: paused = 0 latest_day = db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child("LatestDay").get() # fieldobj["Expired"] today = date.today() today_yyyymmdd = today.strftime("%Y%m%d") if latest_day is not None: day_diff = int(today_yyyymmdd)- int(latest_day) else: day_diff = 5 if paused is not None: if paused.lower() == "yes": paused = 1 else: paused = 0 if expired != 0 or paused == 1 or day_diff < 5: print(('farm paused or expired or recently processed: ', expired, paused, day_diff)) return False else: return True def satellite_data(uid, lang, fieldid, fields, fromdate, todate, sentinelSettings): clientID = sentinelSettings["ClientID"] clientSecret = sentinelSettings["ClientSecret"] wmsID = sentinelSettings["WMSID"] rviID = sentinelSettings["RVIID"] demID = sentinelSettings["DEMID"] imagedate = None wes1 = 1#print(fieldid) fie = fields[fieldid] latestsensedday = 0 latestfailedday = 0 latest_rvi_day = 0 latest_dem_day = 0 last_latest_rvi_day = 0 last_latest_dem_day = 0 report_return_obj = "yes" try: f_address = fie["FieldAddress"] except: try: f_address = fie["FieldDescription"] except: f_address = "Farm" whitelabel = "farmonaut" try: whitelabel = fie["Whitelabel"] except: whitelabel = "farmonaut" wes1 = 1#print(["whitelabel", whitelabel]) try: whitelabelObj = db.reference("WhitelabelEmails").child(whitelabel).get() whitelabelEmail = whitelabelObj["Email"] whitelabelPassword = whitelabelObj["Password"] except: whitelabelEmail = None whitelabelPassword = None try: lang = fie["Language"] except: lang = "en" if uid == "HC1KG5a2e1ZhXaPMpEcylZmeMYM2": lang = "ar" elif uid == "snQYQZqQx3SmVbRztmEqYn5Mkcz2": lang = "te" elif uid == "mFFHQdEtiSbn2hbYQAwwoIdYVi02": lang = "uz" elif uid == "8aGkNQm166bmk8cjHVHtwGli2DD2": lang = "pa,hi,en" elif "biopixel" in whitelabel: lang = "fr,ar" # elif uid == '8aGkNQm166bmk8cjHVHtwGli2DD2': # lang = 'pa' try: fieldDescription = fie["FieldDescription"] except: fieldDescription = "not available" # wes1 = 1#print("fdd: " + fieldDescription) try: PlantDistance = fie["PlantDistance"] except: PlantDistance = 0 try: payy = fie["Paid"] if payy == "yes" or payy == "Yes": payf = 1 else: payf = 0 sensed_day_flag = 0 failed_day_flag = 0 senseddays = None big_query_performed = 0 paused = 0 expired = 0 faileddays = None wes1 = 1#print("Made the payment") except: wes1 = 1#print("payment not done") payf = 0 sensed_day_flag = 0 failed_day_flag = 0 senseddays = None big_query_performed = 0 paused = 0 expired = 0 faileddays = None trialprocessed = 1 if payf == 1: sensed_day_flag = 0 failed_day_flag = 0 big_query_performed = 0 paused = 0 expired = 0 totalpaidmonths = 0 totalsenseddays = 0 try: senseddays = fie["SensedDays"] except: senseddays = None try: faileddays = fie["FailedDays"] except: faileddays = None coordinates = fie["Coordinates"] fieldarea = fie["FieldArea"] field_area = fieldarea try: pp = fie["Paused"] if pp == "yes": paused = 1 else: paused = 0 except: paused = 0 try: ee = fie["Expired"] if int(ee) == 0: expired = 0 else: expired = 1 except: expired = 0 try: tmp = fie["TotalPaidMonths"] totalpaidmonths = int(tpm) except: try: tpm = fie["PaymentType"] totalpaidmonths = float(tpm) except: tpm = 0 totalpaidmonths = 0 try: senseddays = fields[fieldid]["SensedDays"] for u, v in senseddays.items(): sensed_day = u totalsenseddays = totalsenseddays + 1 if int(sensed_day) > int(latestsensedday): latestsensedday = sensed_day last_sensed_day = latestsensedday # wes1 = 1#print("LSD") # wes1 = 1#print(latestsensedday) new_field = 0 except: # wes1 = 1#print("new field") new_field = 1 totalsardays = 0 try: sar_days = fields[fieldid]["SARDays"] for u, v in senseddays.items(): sensed_day = u totalsardays = totalsardays + 1 if int(sensed_day) > int(latest_rvi_day): latest_rvi_day = sensed_day last_latest_rvi_day = latest_rvi_day # wes1 = 1#print("LSDR") # wes1 = 1#print(latest_rvi_day) new_field = 0 except: # wes1 = 1#print("new field") new_field = 1 totaldemdays = 0 try: dem_days = fields[fieldid]["DEMDays"] for u, v in senseddays.items(): sensed_day = u totaldemdays = totaldemdays + 1 if int(sensed_day) > int(latest_dem_day): latest_dem_day = sensed_day last_latest_dem_day = latest_dem_day # wes1 = 1#print("LSDD") # wes1 = 1#print(latest_dem_day) new_field = 0 except: #wes1 = 1#print("new field") new_field = 1 totalfaileddays = 0 try: faileddays = fields[fieldid]["FailedDays"] for u, v in faileddays.items(): failed_day = u totalfaileddays = totalfaileddays + 1 if int(failed_day) > int(latestfailedday): latestfailedday = failed_day #wes1 = 1#print("LFD") #wes1 = 1#print(latestfailedday) except: aswadq = 1 ##wes1 = 1#print("no failed day") #wes1 = 1#print("tpm") #wes1 = 1#print(totalpaidmonths) trialenabled = "no" intfieldid = float(fieldid) intfieldid = intfieldid / 1000 orderdate = datetime.datetime.fromtimestamp(intfieldid).strftime( "%Y-%m-%d %H:%M:%S.%f" ) orderdate = orderdate[:10] orderyear = orderdate[:4] ordermonth = orderdate[5:7] orderday = orderdate[8:10] todayyear = d1[:4] todaymonth = d1[4:6] todayday = d1[6:] f_date = date(int(orderyear), int(ordermonth), int(orderday)) l_date = date(int(todayyear), int(todaymonth), int(todayday)) delta = l_date - f_date ndays = delta.days #wes1 = 1#print("ndays") #wes1 = 1#print(ndays) if ( totalsenseddays >= 6 * totalpaidmonths + 1 ) and (ndays > 30 * totalpaidmonths) and (totalpaidmonths != -2): expired = 1 db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("Expired").set("1") send_expired_noti(uid) #wes1 = 1#print("expired") else: db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("Expired").delete() expired = 0 if (6 * totalpaidmonths - totalsenseddays < 3) and ( 30 * totalpaidmonths - ndays < 10 ) and (totalpaidmonths != -2): db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("Expiring").set("yes") else: db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("Expiring").set("no") # req_json_sar = find_sar(uid,fieldid,latest_rvi_day, expired, paused, fromdate, todate) # latest_rvi_day = req_json_sar["LatestDay"] if fromdate != None: latestsensedday = 0 latestfailedday = 0 latest_rvi_day = 0 latest_dem_day = 0 else: # latestsensedday = 0 latestfailedday = 0 latest_rvi_day = 0 latest_dem_day = 0 if latestsensedday == None: latestsensedday = 0 # ttg = "ttg" # paused = 0 try: ttgData = fie["TTGData"] except: ttgData = None #wes1 = 1#print("dwdqw") #wes1 = 1#print(d1, latestsensedday, expired, paused) if ( (int(d1) - int(latestsensedday) > 4) and (expired == 0) and (paused == 0) ): #wes1 = 1#print("inin") # db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child("Paused").set('no') if new_field == 1 and payf == 1: #wes1 = 1#print("inin2") req_json_sar = find_sar( uid, fieldid, latest_rvi_day, expired, paused, fromdate, todate, clientID, clientSecret, rviID, ) latest_rvi_day = req_json_sar["LatestDay"] req_json_land = get_land_use( uid, fieldid, latestsensedday, latestfailedday, expired, paused, fromdate, todate, clientID, clientSecret, wmsID, ) req_json_dem = find_dem( uid, fieldid, latest_dem_day, expired, paused, fromdate, todate, clientID, clientSecret, demID, ) latest_dem_day = req_json_dem["LatestDay"] req_json = search_new_sentinel( uid, fieldid, latestsensedday, latestfailedday, expired, paused, fromdate, todate, clientID, clientSecret, wmsID, ) elif new_field == 0 and payf == 1 and expired == 0: #wes1 = 1#print("inin3") req_json_sar = find_sar( uid, fieldid, latest_rvi_day, expired, paused, fromdate, todate, clientID, clientSecret, rviID, ) latest_rvi_day = req_json_sar["LatestDay"] req_json_land = get_land_use( uid, fieldid, latestsensedday, latestfailedday, expired, paused, fromdate, todate, clientID, clientSecret, wmsID, ) req_json_dem = find_dem( uid, fieldid, latest_dem_day, expired, paused, fromdate, todate, clientID, clientSecret, demID, ) latest_dem_day = req_json_dem["LatestDay"] req_json = search_new_sentinel( uid, fieldid, latestsensedday, latestfailedday, expired, paused, fromdate, todate, clientID, clientSecret, wmsID, ) else: #wes1 = 1#print("notpaid") req_json = {} req_json["MGRS"] = "NotA" req_json["LatestDay"] = d1 new_string_json = req_json imagedate = str(req_json["LatestDay"]) #wes1 = 1#print("im_date") #wes1 = 1#print(imagedate) uid = current_uid #wes1 = 1#print(uid) #wes1 = 1#print(req_json) big_query_performed = 1 mgrs = req_json["MGRS"] #wes1 = 1#print("mgrs") #wes1 = 1#print(mgrs) try: previous_day = db.reference("LatestTileDates").child(mgrs).get() except: db.reference("LatestTileDates").child(str(mgrs)).set(imagedate) if previous_day is None: previous_day = "11" #wes1 = 1#print("p_date") #wes1 = 1#print(previous_day) # if int(previous_day)= 0: result_status = uid + "failed4" else: result_status = "successful" if result_status.find("failed") < 0: new_string_json["StartPixelLat"] = 180 new_string_json["EndPixelLat"] = 180 new_string_json["StartPixelLong"] = 180 new_string_json["EndPixelLong"] = 180 new_string_json["FieldID"] = fieldid new_string_json["PlantDistance"] = PlantDistance result_status = makeFieldImages(new_string_json) # try: # aqw2 = octave.check_clouds(uid) # except: # aqw2 = 2 aqw2 = 0 diff_count = 0 else: result_status = "failed0" storage_client = storage.Client(project='farmbase-b2f7e') bucket_name = "farmbase-b2f7e.appspot.com" try: images_array = [ "tci", "etci", "hybrid", "ndvi", "evi", "rvi", "rsm", "ndwi", "ndre", "vari", "soc", "savi", "ndmi", "evapo", "avi", "bsi", "si", "dem", "hybrid_blind", "vssi", "lulc", "lai" ] images_array2 = [ "soc_cmap2", "ndre_cmap2", "ndvi_cmap2", "evi_cmap2", "ndwi_cmap2", "vari_cmap2", "savi_cmap2", "avi_cmap2", "bsi_cmap2", "si_cmap2", "ndmi_cmap2", "vssi_cmap2", "lai_cmap2" ] images_array3 = [ "tci.tif", "hybrid.tif", "etci.tif", "ndvi.tif", "evi.tif", "soc.tif" "ndre.tif", "vari.tif", ] images_array4 = [ "ndvi_pie", "evi_pie", "ndwi_pie", "ndre_pie", "vari_pie", "savi_pie", "avi_pie", "bsi_pie", "si_pie", "soc_pie", "ndmi_pie", "vssi_pie", "lai_pie", "ndvi_linegraph", "evi_linegraph", "ndwi_linegraph", "ndre_linegraph", "vari_linegraph", "savi_linegraph", "avi_linegraph", "bsi_linegraph", "si_linegraph", "soc_linegraph", "ndmi_linegraph", "vssi_linegraph", "weather_linegraph", "lai_linegraph" ] for imgName in images_array2: images_array.append(imgName) for imgName in images_array4: images_array.append(imgName) tif_array = [] for imgName in images_array: tif_array.append(imgName) for imgName in images_array2: tif_array.append(imgName) except: wes1 = 1 ##wes1 = 1#print("problem in assigning storage address") if ( int(latest_rvi_day) > int(last_latest_rvi_day) and expired == 0 and paused == 0 ): try: fieldmaxlat = new_string_json["FieldMaxLat"] fieldminlat = new_string_json["FieldMinLat"] fieldminlong = new_string_json["FieldMinLong"] fieldmaxlong = new_string_json["FieldMaxLong"] aqpp = merge_dem( uid, fieldid, coordinates, latest_dem_day, fieldmaxlat, fieldminlat, fieldmaxlong, fieldminlong, ) destination_blob_name_dem = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + latest_rvi_day + "/dem" ) dem_file_name = uid + "/" + fieldid + "/dem.png" bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_blob_name_dem) blob.upload_from_filename(dem_file_name) except: wes1 = 1 #wes1 = 1#print("dem error") if ( int(latest_rvi_day) > int(last_latest_rvi_day) and expired == 0 and paused == 0 ): db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("SARDays").child(str(latest_rvi_day)).set("yes") try: fieldmaxlat = new_string_json["FieldMaxLat"] fieldminlat = new_string_json["FieldMinLat"] fieldminlong = new_string_json["FieldMinLong"] fieldmaxlong = new_string_json["FieldMaxLong"] aqpp = merge_sar( uid, fieldid, coordinates, latest_rvi_day, fieldmaxlat, fieldminlat, fieldmaxlong, fieldminlong, ) s1_imgs = ["rvi", "rsm"] for temp_img in s1_imgs: destination_blob_name_rvi = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + latest_rvi_day + "/" + temp_img ) rvi_file_name = uid + "/" + fieldid + "/" + temp_img + ".png" bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_blob_name_rvi) blob.upload_from_filename(rvi_file_name) aqpp = merge_dem( uid, fieldid, coordinates, latest_dem_day, fieldmaxlat, fieldminlat, fieldmaxlong, fieldminlong, ) destination_blob_name_dem = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + latest_dem_day + "/dem" ) dem_file_name = uid + "/" + fieldid + "/dem.png" bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_blob_name_dem) blob.upload_from_filename(dem_file_name) attachments = set() # attachments.add(rvi_file_name) useremail = fie["Email"] # try: # f_address = fie["FieldAddress"] # except: # f_address = "not available" except Exception as e: wes1 = 1 ##wes1 = 1#print(e) if result_status.find("successful") < 0: if result_status.find("failed0") > 0: uid = result_status.replace("failed0", "") #wes1 = 1#print(uid) db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("FailedDays").child(imagedate).set("yes") send_error_noti(uid) if result_status.find("failed1") > 0: uid = result_status.replace("failed1", "") if generate_tifs == 1: bucket = storage_client.get_bucket(bucket_name) useremail = fie["Email"] # f_address = fie["FieldAddress"] report_return_obj = gen_report_new( uid, f_address, centerlat, centerlong, imagedate, fieldid, fieldarea, lang, 1, whitelabel, [fieldminlat, fieldminlong, fieldmaxlat, fieldmaxlong], coordinates, None, 0, 0, fie, ) ff = f_address ff = ff.replace(" ", "_") ff = ff.replace(",", "_") ff = ff.replace("/", "_") doc_name_html = uid + "/" + fieldid + "/report.html" blob = bucket.blob( ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/report.html" ) ) #blob.upload_from_filename(doc_name_html) upload_reports( lang, uid, whitelabel, imagedate, ff, fieldid, bucket ) # attachments.add(doc_name) sendfailedreport( attachments, useremail, f_address, imagedate, whitelabel, whitelabelEmail, whitelabelPassword, ) db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("FailedDays").child(imagedate).set(report_return_obj) send_error_noti(uid) if result_status.find("failed2") > 0: uid = result_status.replace("failed2", "") db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("FailedDays").child(imagedate).set("yes") send_error_noti(uid) if result_status.find("failed4") > 0: uid = result_status.replace("failed4", "") else: # result_status is successful uid = result_status.replace("successful", "") # aqw2 = octave.check_clouds(uid) aqw2 = 0 p_date = latestsensedday c_lat = None p_coords = 0 if aqw2 < 2: fieldlatlen = abs(float(fieldmaxlat) - float(fieldminlat)) fieldlonglen = abs(float(fieldmaxlong) - float(fieldminlong)) map_coords( uid, fieldid, coordinates, imagedate, fieldmaxlat, fieldminlat, fieldmaxlong, fieldminlong, cropType, tif_array, field_area, ) try: imgs = ["rvi", "rsm"] for img in imgs: destination_blob_name_rvi = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + latest_rvi_day + "/" + img ) rvi_file_name = uid + "/" +fieldid + "/" + img + ".png" bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_blob_name_rvi) blob.upload_from_filename(rvi_file_name) except: wes1 = 1#print("rvi error2") ndvi, ndwi = get_indices(uid, fieldid, imagedate) # f_address = db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('FieldAddress').get() centerlat = centerlat_large centerlong = centerlong_large # if f_address is None: # f_address = 'not defined' if generate_tifs == 1: # FieldObj GenTiff is yes if f_address.find("India") < 0 or gen_tif == "abs_yes": # Is not in India or FieldObj GenTiff is abs_yes for imageName in tif_array: tifName = imageName + ".tif" gen_geotiff2(new_string_json, uid, imageName, tifName) try: for imageName in tif_array: if imageName == "tci.tif": imageName == "TCI.tif" elif imageName == "etci.tif": imageName = "ETCI.tif" imageAddress = uid + "/" + fieldid + "/" + imageName imageDestination = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/" + imageName ) blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except: wes1 = 1#print("unable to upload") except: wes1 = 1#print("tif error") useremail = fie["Email"] report_return_obj = gen_report_new( uid, f_address, centerlat, centerlong, imagedate, fieldid, field_area, lang, 0, whitelabel, [fieldminlat, fieldminlong, fieldmaxlat, fieldmaxlong], coordinates, None, ndvi, ndwi, fie, ) bucket = storage_client.get_bucket(bucket_name) for imageName in images_array: originalName = imageName if imageName == "tci": imageName = "TCI" elif imageName == "etci": imageName = "ETCI" elif imageName.find("pie") > -1: imageName = imageName + "chart" elif imageName == "mask_img": originalName = "mask" imageAddress = uid + "/" + fieldid + "/" + imageName + ".png" imageDestination = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/" + originalName ) blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except: wes1 = 1#print(("unable to upload" + originalName)) imageAddress = uid + "/" + fieldid + "/mask_img.png" imageDestination = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/mask" ) blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except: wes1 = 1#print("unable to upload mask") ff = f_address ff = ff.replace(" ", "_") ff = ff.replace(",", "_") ff = ff.replace("/", "_") doc_name_html = uid + "/" + fieldid + "/report.html" blob = bucket.blob( ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/report.html" ) ) #blob.upload_from_filename(doc_name_html) doc_name = ( uid + "/" + whitelabel + "_report_" + str(imagedate) + "_" + ff + ".pdf" ) upload_reports( lang, uid, whitelabel, imagedate, ff, fieldid, bucket ) # attachments.add(doc_name) try: sendonlyreport(attachments,useremail,f_address,imagedate,whitelabel,whitelabelEmail,whitelabelPassword) except: print(traceback.format_exc()) else: # Is in India and FieldObj GenTiff is not abs_yes useremail = fie.get("Email","NA") report_return_obj = gen_report_new( uid, f_address, centerlat, centerlong, imagedate, fieldid, field_area, lang, 0, whitelabel, [fieldminlat, fieldminlong, fieldmaxlat, fieldmaxlong], coordinates, None, ndvi, ndwi, fie, ) bucket = storage_client.get_bucket(bucket_name) for imageName in images_array: if imageName == "tci": imageName == "TCI" elif imageName == "etci": imageName = "ETCI" elif imageName.find("pie") > -1: imageName = imageName + "chart" imageAddress = uid + "/" + fieldid + "/" + imageName + ".png" imageDestination = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/" + imageName ) blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except: wes1 = 1#print(("unable to upload" + imageName)) imageAddress = uid + "/" + fieldid + "/mask_img.png" imageDestination = ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/mask" ) blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except: wes1 = 1#print("unable to upload mask") ff = f_address ff = ff.replace(" ", "_") ff = ff.replace(",", "_") ff = ff.replace("/", "_") ff = ff.replace("''", "_") doc_name_html = uid + "/" + fieldid + "_report.html" blob = bucket.blob( ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/report.html" ) ) #blob.upload_from_filename(doc_name_html) upload_reports( lang, uid, whitelabel, imagedate, ff, fieldid, bucket ) # attachments.add(doc_name) # wes1 = 1#print('report_erro') try: sendonlyreport(attachments,useremail,f_address,imagedate,whitelabel,whitelabelEmail,whitelabelPassword) except: print(traceback.format_exc()) db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("LatestDay").set(imagedate) if report_return_obj is not None: db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("SensedDays").child(imagedate).set(report_return_obj) if uid == "HC1KG5a2e1ZhXaPMpEcylZmeMYM2": send_webhook_data(fieldid) try: fieldid = str(fieldid) imagedate = str(imagedate) city_ref = dbF.collection(uid).document(fieldid) # Set the capital field city_ref.update({"LatestSensedDay": imagedate}) except: wywy = 1 send_moni_noti(uid,fie.get("FieldDescription", fie.get("FieldAddress", " ")), "") else: # would never come here if generate_tifs == 1: useremail = fie["Email"] # try: # f_address = fie["FieldAddress"] # except: # f_address = "Not Available" report_return_obj = gen_report_new( uid, f_address, centerlat, centerlong, imagedate, fieldid, fieldarea, lang, 1, whitelabel, [fieldminlat, fieldminlong, fieldmaxlat, fieldmaxlong], coordinates, None, ndvi, ndwi, fie, ) ff = f_address ff = ff.replace(" ", "_") ff = ff.replace(",", "_") ff = ff.replace("/", "_") doc_name_html = uid + "/" + fieldid + "/report.html" bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob( ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/report.html" ) ) #blob.upload_from_filename(doc_name_html) upload_reports( lang, uid, whitelabel, imagedate, ff, fieldid, bucket ) # attachments.add(doc_name) sendfailedreport( attachments, useremail, f_address, imagedate, whitelabel, whitelabelEmail, whitelabelPassword, ) db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).child("FailedDays").child(imagedate).set(report_return_obj) send_error_noti(uid) def makeFieldImages(new_string_json): result_status = octave.monitored_field2022(new_string_json) #time.sleep(1000000) return result_status def get_indices(uid, fieldid, latestday): ndvi = ( db.reference("PaidMonitoredFields") .child("PMF") .child(uid) .child(fieldid) .child("Health") .child("ndvi") .child(latestday) .get() ) ndwi = ( db.reference("PaidMonitoredFields") .child("PMF") .child(uid) .child(fieldid) .child("Health") .child("ndwi") .child(latestday) .get() ) wes1 = 1#print(["index", ndvi, ndwi, latestday]) ndvi = float(ndvi) / 100 ndwi = float(ndwi) / 100 return ndvi, ndwi def upload_reports(all_langs, uid, whitelabel, imagedate, ff, fieldid, bucket): return all_langs = all_langs.split(",") lang_num = 0 for single_lang in all_langs: if lang_num == 0: doc_name = ( uid + "/" + whitelabel + "_report_" + single_lang + "_" + str(imagedate) + "_" + ff + ".pdf" ) blob = bucket.blob( ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/report.pdf" ) ) blob.upload_from_filename(doc_name) else: doc_name = ( uid + "/" + whitelabel + "_report_" + single_lang + "_" + str(imagedate) + "_" + ff + ".pdf" ) blob = bucket.blob( ( "PaidMonitoredFields/" + uid + "/" + fieldid + "/" + imagedate + "/report" + single_lang + ".pdf" ) ) blob.upload_from_filename(doc_name) lang_num = lang_num + 1 def get_today_date_in_ist(): # Get the current UTC time utc_now = datetime.datetime.utcnow() # Define the IST (Indian Standard Time) timezone with a UTC offset of GMT+5:30 ist_timezone = pytz.timezone('Asia/Kolkata') # Convert UTC time to IST ist_now = utc_now + timedelta(minutes=330) # 330 minutes offset for GMT+5:30 # Format the date as YYYYMMDD formatted_date = ist_now.strftime('%Y%m%d') return formatted_date def should_delete_field(uid, fieldid, fieldobj): expired = db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child("Expired").get() # fieldobj["Expired"] expired = expired or 0 try: expired = int(expired) except: expired = 0 paymentType = db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child("PaymentType").get() # fieldobj["Expired"] paymentType = paymentType or 0 try: paymentType = int(paymentType) except: paymentType = 0 if paymentType != -2 and (int(expired) == 1 or paymentType == 0): if paymentType == 0: paymentMillis = 24 * 60 * 60 * 1000 else: try: paymentMillis = (int(paymentType) + 6) * 30 * 24 * 60 * 60 * 1000 except: paymentMillis = 6 * 30 * 24 * 60 * 60 * 100 try: wes1 = 1#print([round(time.time() * 1000), paymentMillis, fieldid]) if (int(round(time.time() * 1000)) - int(fieldid)) > paymentMillis: wes1 = 1#print(("deleting..." + str(uid) + ", " + str(fieldid))) try: db.reference("DeletedFields").child("PMF").child(uid).child( fieldid ).set(fieldobj) except: wes1 = 1#print("fieldobj not found") db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).delete() return 1 except: wes1 = 1#print(traceback.format_exc()) db.reference("PaidMonitoredFields").child("PMF").child(uid).child( fieldid ).delete() return 1 return 0 def process_data(uid, fieldid, lang,fields,sentinelSettings): #wes1 = 1#print([uid, fieldid, lang,fields]) toremove = "rm -rf " + uid + "/" + str(fieldid) os.system(toremove) make_dir(uid,fieldid) print(uid, fieldid) fie = fields[fieldid] # wes1 = 1#print(fie) is_deleted = 0 is_deleted = should_delete_field(uid, fieldid, fie) if shall_we_process_data(uid, fieldid): try: if is_deleted == 1: wes1 = 1#print("field deleted") else: try: previous_data_requests = db.reference("PaidMonitoredFields").child("PMF").child(uid).child(fieldid).child("PreviousDataRequests").get() # fieldobj["Expired"] fie["PreviousDataRequests"] wes1 = 1#print(previous_data_requests) # in case of failure try processing data two more times if previous_data_requests is not None: for timestamp, obj in previous_data_requests.items(): requested_date = timestamp if int(get_today_date_in_ist()) - int(requested_date) >4: # wes1 = 1#print(timestamp) ( previous_from_date, previous_to_date, ) = get_to_and_from_date(requested_date, 0) attempt = 0 while attempt < 3: try: satellite_data( uid, lang, fieldid, fields, previous_from_date, previous_to_date, sentinelSettings, ) attempt = 3 except: wes1 = 1#print(traceback.format_exc()) attempt = attempt + 1 time.sleep(10) db.reference("PaidMonitoredFields").child( "PMF" ).child(uid).child(fieldid).child( "PreviousDataRequests" ).child( requested_date ).delete() except: wes1 = 1#print(traceback.format_exc()) satellite_data( uid, lang, fieldid, fields, None, None, sentinelSettings ) except: wes1 = 1# print(traceback.format_exc()) os.system(toremove) for k, v in uid_list.items(): current_uid = k uid = current_uid all_threads = [] thread_count = 0 wes1 = 1#print(uid) if ( uid != "CeMGYvLXrGR5ZThxZ46iV7vY8sa2" and uid != "TCXcp5VIsfhHZrh0nm2VsgBtcGy2" ): # os.system("rm -rf AwsData") lang = "en" fields = v #fields will be "True" here. need to pull field data in process_data function try: for p, q in fields.items(): fieldid = p # if thread_count < 10: # wes1 = 1#print(("thread: "+fieldid)) # all_threads.append(threading.Thread(target=process_data, args=(uid, fieldid, lang,fields,sentinelSettings,))) # thread_count = thread_count +1 # if thread_count == 10: # for single_thread in all_threads: # single_thread.start() # for single_thread in all_threads: # single_thread.join() # thread_count = 0 # all_threads = [] process_data(uid, fieldid, lang,fields,sentinelSettings) except: wes1 = 1# print(traceback.format_exc()) # r = requests.post('https://us-central1-farmbase-b2f7e.cloudfunctions.net/stopVM') # r.json()