from socket import * from oct2py import octave from io import BytesIO import base64 from google.cloud import storage from oauth2client.service_account import ServiceAccountCredentials from send_webhook_data import send_webhook_data import os import firebase_admin from firebase_admin import credentials from firebase_admin import db from PIL import Image, ImageFilter from send_notification import send_notification from sendemail import sendemail from sen_start_noti import sen_start_noti from send_sar_email import send_sar_email import json import scipy import random from gen_report_new2 import gen_report_new import time import datetime from merge_sar import merge_sar from find_sar import find_sar from merge_dem import merge_dem from find_dem import find_dem from scipy import ndimage from make_bigquery import make_bigquery from send_moni_noti import send_moni_noti from send_error_noti import send_error_noti from gmap_image import gmap_image from gmap_image_large import gmap_image_large from datetime import date from find_img import find_img from find_img_large import find_img_large from merge_img import merge_img from all_proc import all_proc from contour_images import contour_images from send_expiring_noti import send_expiring_noti from send_expired_noti import send_expired_noti from make_trial_bigquery import make_trial_bigquery from gen_geotiff import gen_geotiff from sendgeotifs import sendgeotifs from gen_report import gen_report from get_weather_data import get_weather_data from sendonlyreport import sendonlyreport from gen_failed_report import gen_failed_report from sendfailedreport import sendfailedreport from map_coords import map_coords from search_new_sentinel import search_new_sentinel from convert_to_pdf import convert_to_pdf from latlon_jp2_to_pixel import latlon_jp2_to_pixel from gen_geotiff2 import gen_geotiff2 from search_sentinel_again import search_sentinel_again from get_prev_date import get_prev_date from make_bigquery_again import make_bigquery_again import requests import pdftotree from convert_to_html import convert_to_html from geopy.geocoders import Nominatim from firebase_admin import firestore from get_land_use import get_land_use def server2022(uid,fieldid): today = date.today() d1 = today.strftime('%Y%m%d') new_field = 0 print(d1) aqw2 = 1 cred = credentials.Certificate('servicekey.json') #os.system("rm -rf AwsData") try: firebase_admin.initialize_app(cred, {'databaseURL': 'https://farmbase-b2f7e-31c0c.firebaseio.com/'}) except: qysy = 1 dbF = firestore.client() sentinelSettings = db.reference('SentinelSettings').get() if uid == None and fieldid == None: temp_list = db.reference('PaidMonitoredFields').child('PMF').get(False,True) uid_list = {} for (rr,tt) in temp_list.items(): disabledUIDs = ['snQYQZqQx3SmVbRztmEqYn5Mkcz2','KZQ7TZIYXnXN0b07OtrL1hlyYij1','CeMGYvLXrGR5ZThxZ46iV7vY8sa2','TCXcp5VIsfhHZrh0nm2VsgBtcGy2','mFFHQdEtiSbn2hbYQAwwoIdYVi02'] if rr not in disabledUIDs: #uid_list[rr] = tt uid_list[rr] = db.reference('PaidMonitoredFields').child('PMF').child(rr).get() elif uid!= None and fieldid == None: uidobj = db.reference('PaidMonitoredFields').child('PMF').child(uid).get() uid_list = {} uid_list[uid] = uidobj else: fieldobj = db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).get() fieldList = {} fieldList[fieldid] = fieldobj uid_list = {} uid_list[uid] = fieldList img_count = 1 lets_monitor=1 latest_rvi_day = 0 last_latest_rvi_day = 0 last_sensed_day =0 report_format = '' def get_to_and_from_date(requested_date,latestsensedday): current_date = int(requested_date[6:]) current_month = int(requested_date[4:6]) current_year = int(requested_date[:4]) start_year = current_year if current_date < 5 and current_month>1: if latestsensedday == 0: start_date = 31-current_date - 5 else: start_date = 31-current_date - 4 start_month = current_month-1 start_year = current_year elif current_date <5 and current_month ==1: if latestsensedday == 0: start_date = 31-current_date - 5 else: start_date = 31-current_date - 4 start_month = 12 start_year = start_year-1 else: if latestsensedday == 0: start_date = current_date-5 else: start_date = current_date-4 start_month = current_month start_year = current_year if start_date == 0: start_date = 1 if current_date == 0: current_date = 1 if current_month < 10: current_month = "0"+str(current_month) else: current_month = str(current_month) if start_month < 10: start_month = "0"+str(start_month) else: start_month = str(start_month) if current_date < 10: current_date = "0"+str(current_date) else: current_date = str(current_date) if start_date < 10: start_date = "0"+str(start_date) else: start_date = str(start_date) start_year = str(start_year) from_date = start_year+'-'+start_month+'-'+str(start_date) to_date = str(current_year)+'-'+current_month+'-'+str(current_date) l_date = str(current_year)+current_month+str(current_date) from_date = from_date+'T00:00:00' to_date = to_date+'T23:59:59' return from_date, to_date def format_date(yyyymmdd,time): start_year = yyyymmdd[:4] start_month = yyyymmdd[4:6] start_date = yyyymmdd[6:] new_date = start_year+'-' + start_month + '-' + start_date + time return new_date def satellite_data(uid,lang,fieldid,fields,fromdate,todate, sentinelSettings): clientID = sentinelSettings["ClientID"] clientSecret = sentinelSettings["ClientSecret"] wmsID = sentinelSettings["WMSID"] rviID = sentinelSettings["RVIID"] demID = sentinelSettings["DEMID"] imagedate = None print(fieldid) fie = fields[fieldid] latestsensedday = 0 latestfailedday = 0 latest_rvi_day = 0 latest_dem_day = 0 last_latest_rvi_day = 0 last_latest_dem_day = 0 try: f_address = fie["FieldAddress"] except: try: f_address = fie["FieldDescription"] except: f_address = "Farm" whitelabel = "farmonaut" try: whitelabel = fie["Whitelabel"] except: whitelabel = "farmonaut" try: whitelabelObj = db.reference('WhitelabelEmails').child(whitelabel).get() whitelabelEmail = whitelabelObj["Email"] whitelabelPassword = whitelabelObj["Password"] except: whitelabelEmail = None whitelabelPassword = None try: lang = fie["Language"] except: lang = 'en' if uid == 'HC1KG5a2e1ZhXaPMpEcylZmeMYM2': lang = 'ar' elif uid == 'snQYQZqQx3SmVbRztmEqYn5Mkcz2': lang = 'te' elif uid == 'mFFHQdEtiSbn2hbYQAwwoIdYVi02': lang = 'uz' elif uid == '8aGkNQm166bmk8cjHVHtwGli2DD2': lang = 'pa' try: fieldDescription = fie["FieldDescription"] except: fieldDescription = "not available" print('fdd: ' +fieldDescription) try: PlantDistance = fie["PlantDistance"] except: PlantDistance = 0 try: payy = fie["Paid"] if payy=="yes" or payy=="Yes": payf = 1 else: payf = 0 sensed_day_flag=0 failed_day_flag = 0 senseddays = None big_query_performed = 0 paused = 0 expired = 0 faileddays = None print("Made the payment") except: print("payment not done") payf = 0 sensed_day_flag=0 failed_day_flag = 0 senseddays = None big_query_performed = 0 paused = 0 expired = 0 faileddays = None trialprocessed = 1 if payf == 1: sensed_day_flag=0 failed_day_flag = 0 big_query_performed = 0 paused = 0 expired = 0 totalpaidmonths = 0 totalsenseddays = 0 try: senseddays = fie["SensedDays"] except: senseddays = None try: faileddays = fie["FailedDays"] except: faileddays = None coordinates = fie["Coordinates"] fieldarea = fie["FieldArea"] try: pp = fie["Paused"] if pp=='yes': paused = 1 else: paused = 0 except: paused = 0 try: ee = fie["Expired"] if int(ee)==0: expired = 0 else: expired = 1 except: expired = 0 try: tmp = fie["TotalPaidMonths"] totalpaidmonths = int(tpm) except: try: tpm = fie["PaymentType"] totalpaidmonths = float(tpm) except: tpm = 0 totalpaidmonths = 0 try: senseddays = fields[fieldid]["SensedDays"] for (u,v) in senseddays.items(): sensed_day = u totalsenseddays = totalsenseddays+1 if int(sensed_day)>int(latestsensedday): latestsensedday = sensed_day last_sensed_day = latestsensedday print('LSD') print(latestsensedday) new_field=0 except: print('new field') new_field=1 totalsardays = 0 try: sar_days = fields[fieldid]["SARDays"] for (u,v) in senseddays.items(): sensed_day = u totalsardays = totalsardays+1 if int(sensed_day)>int(latest_rvi_day): latest_rvi_day = sensed_day last_latest_rvi_day = latest_rvi_day print('LSDR') print(latest_rvi_day) new_field=0 except: print('new field') new_field=1 totaldemdays = 0 try: dem_days = fields[fieldid]["DEMDays"] for (u,v) in senseddays.items(): sensed_day = u totaldemdays = totaldemdays+1 if int(sensed_day)>int(latest_dem_day): latest_dem_day = sensed_day last_latest_dem_day = latest_dem_day print('LSDD') print(latest_dem_day) new_field=0 except: print('new field') new_field=1 totalfaileddays = 0 try: faileddays = fields[fieldid]["FailedDays"] for (u,v) in faileddays.items(): failed_day = u totalfaileddays = totalfaileddays+1 if int(failed_day)>int(latestfailedday): latestfailedday = failed_day print('LFD') print(latestfailedday) except: print('no failed day') print('tpm') print(totalpaidmonths) trialenabled = 'no' intfieldid = float(fieldid) intfieldid = intfieldid/1000 orderdate = datetime.datetime.fromtimestamp(intfieldid).strftime('%Y-%m-%d %H:%M:%S.%f') orderdate = orderdate[:10] orderyear = orderdate[:4] ordermonth = orderdate[5:7] orderday = orderdate[8:10] todayyear = d1[:4] todaymonth = d1[4:6] todayday = d1[6:] f_date = date(int(orderyear),int(ordermonth),int(orderday)) l_date = date(int(todayyear),int(todaymonth), int(todayday)) delta = l_date - f_date ndays = delta.days print('ndays') print(ndays) if (totalsenseddays>=6*totalpaidmonths+1) and ndays > 30*totalpaidmonths: expired = 1 db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('Expired').set('1') send_expired_noti(uid) print('expired') else: db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('Expired').delete() expired = 0 if (6*totalpaidmonths-totalsenseddays<3) and (30*totalpaidmonths - ndays < 10): db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('Expiring').set('yes') else: db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('Expiring').set('no') #req_json_sar = find_sar(uid,fieldid,latest_rvi_day, expired, paused, fromdate, todate) #latest_rvi_day = req_json_sar["LatestDay"] if fromdate != None: latestsensedday = 0 latestfailedday = 0 latest_rvi_day = 0 latest_dem_day = 0 else: #latestsensedday = 0 latestfailedday = 0 latest_rvi_day = 0 latest_dem_day = 0 if latestsensedday == None: latestsensedday = 0 # ttg = "ttg" # paused = 0 try: ttgData = fie["TTGData"] except: ttgData = None print("dwdqw") print(d1, latestsensedday,expired, paused) if (int(d1)-int(latestsensedday)>4) and (expired == 0) and (paused == 0): print('inin') #db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child("Paused").set('no') if new_field==1 and payf==1: print('inin2') req_json_sar = find_sar(uid,fieldid,latest_rvi_day, expired,paused, fromdate, todate, clientID, clientSecret, rviID) latest_rvi_day = req_json_sar["LatestDay"] req_json_land = get_land_use(uid,fieldid,latestsensedday,latestfailedday, expired,paused, fromdate, todate, clientID, clientSecret, wmsID) req_json_dem = find_dem(uid,fieldid,latest_dem_day, expired,paused, fromdate, todate, clientID, clientSecret, demID) latest_dem_day = req_json_dem["LatestDay"] req_json = search_new_sentinel(uid,fieldid,latestsensedday,latestfailedday,expired,paused, fromdate, todate, clientID, clientSecret, wmsID) elif new_field==0 and payf==1 and expired==0: print('inin3') req_json_sar = find_sar(uid,fieldid,latest_rvi_day, expired, paused, fromdate, todate, clientID, clientSecret, rviID) latest_rvi_day = req_json_sar["LatestDay"] req_json_land = get_land_use(uid,fieldid,latestsensedday,latestfailedday, expired,paused, fromdate, todate, clientID, clientSecret, wmsID) req_json_dem = find_dem(uid,fieldid,latest_dem_day, expired,paused, fromdate, todate, clientID, clientSecret, demID) latest_dem_day = req_json_dem["LatestDay"] req_json = search_new_sentinel(uid,fieldid,latestsensedday, latestfailedday,expired,paused, fromdate, todate,clientID, clientSecret, wmsID) else: print('notpaid') req_json = {} req_json["MGRS"]="NotA" req_json["LatestDay"]=d1 new_string_json=req_json imagedate = str(req_json["LatestDay"]) print('im_date') print(imagedate) uid = current_uid print(uid) print(req_json) big_query_performed = 1 mgrs = req_json["MGRS"] print('mgrs') print(mgrs) try: previous_day = db.reference('LatestTileDates').child(mgrs).get() except: db.reference('LatestTileDates').child(str(mgrs)).set(imagedate) if previous_day is None: previous_day = '11' print('p_date') print(previous_day) if int(previous_day)=0: result_status = uid+"failed4" else: result_status = "successful" if result_status.find("failed") < 0: new_string_json["StartPixelLat"] = 180 new_string_json["EndPixelLat"] = 180 new_string_json["StartPixelLong"] = 180 new_string_json["EndPixelLong"] = 180 new_string_json["FieldID"] = fieldid new_string_json["PlantDistance"] = PlantDistance result_status= makeFieldImages(new_string_json) # try: # aqw2 = octave.check_clouds(uid) # except: # aqw2 = 2 aqw2 = 0 diff_count = 0 else: result_status = "failed0" storage_client = storage.Client() bucket_name = 'farmbase-b2f7e.appspot.com' try: images_array = ['tci','etci','hybrid','ndvi','evi','rvi','rsm','ndwi','ndre','vari','soc','savi','ndmi','evapo','avi','bsi','si','dem','hybrid_blind','vssi','lulc'] images_array2 = ['soc_cmap2','ndre_cmap2','ndvi_cmap2','evi_cmap2','ndwi_cmap2','vari_cmap2','savi_cmap2','avi_cmap2','bsi_cmap2','si_cmap2','ndmi_cmap2','vssi_cmap2'] images_array3 = ['tci.tif','hybrid.tif','etci.tif','ndvi.tif','evi.tif','soc.tif''ndre.tif','vari.tif'] images_array4 = ['ndvi_pie','evi_pie','ndwi_pie','ndre_pie','vari_pie','savi_pie','avi_pie','bsi_pie','si_pie','soc_pie','ndmi_pie','vssi_pie'] for imgName in images_array2: images_array.append(imgName) for imgName in images_array4: images_array.append(imgName) tif_array = [] for imgName in images_array: tif_array.append(imgName) for imgName in images_array2: tif_array.append(imgName) except: print('problem in assigning storage address') if int(latest_rvi_day) > int(last_latest_rvi_day) and expired == 0 and paused == 0: try: fieldmaxlat = new_string_json["FieldMaxLat"] fieldminlat = new_string_json["FieldMinLat"] fieldminlong = new_string_json["FieldMinLong"] fieldmaxlong = new_string_json["FieldMaxLong"] aqpp = merge_dem(uid,fieldid,coordinates,latest_dem_day,fieldmaxlat,fieldminlat,fieldmaxlong,fieldminlong) destination_blob_name_dem = 'PaidMonitoredFields/'+uid+'/'+fieldid+'/'+latest_rvi_day+'/dem' dem_file_name = uid+'/dem.png' bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_blob_name_dem) blob.upload_from_filename(dem_file_name) except: print('dem error') if int(latest_rvi_day) > int(last_latest_rvi_day) and expired == 0 and paused == 0: db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('SARDays').child(str(latest_rvi_day)).set('yes') try: fieldmaxlat = new_string_json["FieldMaxLat"] fieldminlat = new_string_json["FieldMinLat"] fieldminlong = new_string_json["FieldMinLong"] fieldmaxlong = new_string_json["FieldMaxLong"] aqpp = merge_sar(uid,fieldid,coordinates,latest_rvi_day,fieldmaxlat,fieldminlat,fieldmaxlong,fieldminlong) s1_imgs = ['rvi', 'rsm'] for temp_img in s1_imgs: destination_blob_name_rvi = 'PaidMonitoredFields/'+uid+'/'+fieldid+'/'+latest_rvi_day+'/' + temp_img rvi_file_name = uid+'/' + temp_img + '.png' bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_blob_name_rvi) blob.upload_from_filename(rvi_file_name) aqpp = merge_dem(uid,fieldid,coordinates,latest_dem_day,fieldmaxlat,fieldminlat,fieldmaxlong,fieldminlong) destination_blob_name_dem = 'PaidMonitoredFields/'+uid+'/'+fieldid+'/'+latest_dem_day+'/dem' dem_file_name = uid+'/dem.png' bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_blob_name_dem) blob.upload_from_filename(dem_file_name) attachments = set() #attachments.add(rvi_file_name) useremail = fie["Email"] # try: # f_address = fie["FieldAddress"] # except: # f_address = "not available" except Exception as e: print(e) if result_status.find('successful') < 0 : if result_status.find('failed0')>0: uid = result_status.replace("failed0","") print(uid) db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('FailedDays').child(imagedate).set('yes') send_error_noti(uid) if result_status.find('failed1')>0: uid = result_status.replace("failed1","") if generate_tifs==1: bucket = storage_client.get_bucket(bucket_name) useremail = fie["Email"] # f_address = fie["FieldAddress"] gen_report_new(uid,f_address,centerlat,centerlong,imagedate,fieldid,fieldarea,lang,1, whitelabel, [fieldminlat, fieldminlong, fieldmaxlat, fieldmaxlong], coordinates, None,0,0,fie) ff= f_address ff = ff.replace(" ","_") ff = ff.replace(",","_") ff = ff.replace("/","_") doc_name_html = uid+'report.html' blob = bucket.blob(('PaidMonitoredFields/'+uid+'/'+fieldid+'/'+imagedate+'/report.html')) blob.upload_from_filename(doc_name_html) upload_reports(lang,uid, whitelabel, imagedate, ff, fieldid, bucket) #attachments.add(doc_name) sendfailedreport(attachments,useremail,f_address,imagedate,whitelabel, whitelabelEmail, whitelabelPassword) db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('FailedDays').child(imagedate).set('yes') send_error_noti(uid) if result_status.find('failed2')>0: uid = result_status.replace("failed2","") db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('FailedDays').child(imagedate).set('yes') send_error_noti(uid) if result_status.find('failed4')>0: uid = result_status.replace("failed4","") else: uid = result_status.replace("successful","") # aqw2 = octave.check_clouds(uid) aqw2 = 0 p_date =latestsensedday c_lat = None p_coords = 0 if aqw2<2: fieldlatlen = abs(float(fieldmaxlat)-float(fieldminlat)) fieldlonglen = abs(float(fieldmaxlong)-float(fieldminlong)) field_area = map_coords(uid,fieldid,coordinates,imagedate,fieldmaxlat,fieldminlat,fieldmaxlong,fieldminlong,cropType, tif_array) try: imgs = ['rvi', 'rsm'] for img in imgs: destination_blob_name_rvi = 'PaidMonitoredFields/'+uid+'/'+fieldid+'/'+latest_rvi_day+'/' +img rvi_file_name = uid+'/' + img+'.png' bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_blob_name_rvi) blob.upload_from_filename(rvi_file_name) except: print('rvi error2') ndvi, ndwi = get_indices(uid, fieldid, imagedate) # f_address = db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('FieldAddress').get() centerlat = centerlat_large centerlong = centerlong_large # if f_address is None: # f_address = 'not defined' if generate_tifs==1: if f_address.find("India")<0 or gen_tif == 'abs_yes': for imageName in tif_array: tifName = imageName + '.tif' gen_geotiff2(new_string_json,uid, imageName,tifName) try: for imageName in tif_array: if imageName == 'tci.tif': imageName == 'TCI.tif' elif imageName == 'etci.tif': imageName = 'ETCI.tif' imageAddress = uid + '/' + imageName imageDestination = 'PaidMonitoredFields/'+uid+'/'+fieldid+'/'+imagedate+'/'+imageName blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except: print('unable to upload') except: print('tif error') useremail = fie["Email"] gen_report_new(uid,f_address,centerlat,centerlong,imagedate,fieldid,field_area,lang,0, whitelabel, [fieldminlat, fieldminlong, fieldmaxlat, fieldmaxlong], coordinates, None, ndvi, ndwi, fie) bucket = storage_client.get_bucket(bucket_name) for imageName in images_array: originalName = imageName if imageName == 'tci': imageName = 'TCI' elif imageName == 'etci': imageName = 'ETCI' elif imageName.find('pie')>-1: imageName = imageName + 'chart' elif imageName == 'mask_img': originalName = 'mask' imageAddress = uid + '/' + imageName + '.png' imageDestination = 'PaidMonitoredFields/'+uid+'/'+fieldid+'/'+imagedate+'/'+originalName blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except: print(('unable to upload' + originalName)) imageAddress = uid + '/mask_img.png' imageDestination = 'PaidMonitoredFields/'+uid+'/'+fieldid+'/'+imagedate+'/mask' blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except: print('unable to upload mask') ff = f_address ff = ff.replace(" ","_") ff = ff.replace(",","_") ff = ff.replace("/","_") doc_name_html = uid+'report.html' blob = bucket.blob(('PaidMonitoredFields/'+uid+'/'+fieldid+'/'+imagedate+'/report.html')) blob.upload_from_filename(doc_name_html) doc_name=uid+'/' + whitelabel + '_report_'+str(imagedate)+'_'+ff+'.pdf' upload_reports(lang,uid, whitelabel, imagedate, ff, fieldid, bucket) #attachments.add(doc_name) sendgeotifs(attachments,useremail,f_address,imagedate, whitelabel, whitelabelEmail, whitelabelPassword) else: useremail = fie["Email"] gen_report_new(uid,f_address,centerlat,centerlong,imagedate,fieldid,field_area,lang,0, whitelabel,[fieldminlat, fieldminlong, fieldmaxlat, fieldmaxlong], coordinates, None, ndvi, ndwi, fie) bucket = storage_client.get_bucket(bucket_name) for imageName in images_array: if imageName == 'tci': imageName == 'TCI' elif imageName == 'etci': imageName = 'ETCI' elif imageName.find('pie')>-1: imageName = imageName + 'chart' imageAddress = uid + '/' + imageName + '.png' imageDestination = 'PaidMonitoredFields/'+uid+'/'+fieldid+'/'+imagedate+'/'+imageName blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except: print(('unable to upload' + imageName)) imageAddress = uid + '/mask_img.png' imageDestination = 'PaidMonitoredFields/'+uid+'/'+fieldid+'/'+imagedate+'/mask' blob = bucket.blob(imageDestination) try: blob.upload_from_filename(imageAddress) except: print('unable to upload mask') ff = f_address ff = ff.replace(" ","_") ff = ff.replace(",","_") ff = ff.replace("/","_") ff = ff.replace("'\'","_") doc_name_html = uid+'report.html' blob = bucket.blob(('PaidMonitoredFields/'+uid+'/'+fieldid+'/'+imagedate+'/report.html')) blob.upload_from_filename(doc_name_html) upload_reports(lang,uid, whitelabel, imagedate, ff, fieldid, bucket) #attachments.add(doc_name) #print('report_erro') sendonlyreport(attachments,useremail,f_address,imagedate,whitelabel, whitelabelEmail, whitelabelPassword) db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('LatestDay').set(imagedate) db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('SensedDays').child(imagedate).set('yes') if uid == 'HC1KG5a2e1ZhXaPMpEcylZmeMYM2': send_webhook_data(fieldid) try: fieldid = str(fieldid) imagedate = str(imagedate) city_ref = dbF.collection(uid).document(fieldid) # Set the capital field city_ref.update({"LatestSensedDay": imagedate}) except: wywy = 1 send_moni_noti(uid) else: if generate_tifs==1: useremail = fie["Email"] # try: # f_address = fie["FieldAddress"] # except: # f_address = "Not Available" gen_report_new(uid,f_address,centerlat,centerlong,imagedate,fieldid,fieldarea,lang,1,whitelabel,[fieldminlat, fieldminlong, fieldmaxlat, fieldmaxlong], coordinates, None, ndvi, ndwi, fie) ff = f_address ff = ff.replace(" ","_") ff = ff.replace(",","_") ff = ff.replace("/","_") doc_name_html = uid+'report.html' bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(('PaidMonitoredFields/'+uid+'/'+fieldid+'/'+imagedate+'/report.html')) blob.upload_from_filename(doc_name_html) upload_reports(lang,uid, whitelabel, imagedate, ff, fieldid, bucket) #attachments.add(doc_name) sendfailedreport(attachments,useremail,f_address,imagedate,whitelabel, whitelabelEmail, whitelabelPassword) db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('FailedDays').child(imagedate).set('yes') send_error_noti(uid) def makeFieldImages(new_string_json): result_status = octave.monitored_field2022(new_string_json) return result_status def get_indices(uid, fieldid, latestday): ndvi = db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('Health').child('ndvi').child(latestday).get() ndwi = db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('Health').child('ndwi').child(latestday).get() print(['index', ndvi, ndwi]) ndvi = float(ndvi)/100 ndwi = float(ndwi)/100 return ndvi, ndwi def upload_reports(all_langs,uid, whitelabel, imagedate, ff, fieldid, bucket): return all_langs = all_langs.split(",") lang_num = 0 for single_lang in all_langs: if lang_num == 0: doc_name=uid+'/' + whitelabel + '_report_'+single_lang + '_'+str(imagedate)+'_'+ff+'.pdf' blob = bucket.blob(('PaidMonitoredFields/'+uid+'/'+fieldid+'/'+imagedate+'/report.pdf')) blob.upload_from_filename(doc_name) else: doc_name=uid+'/' + whitelabel + '_report_'+single_lang + '_'+str(imagedate)+'_'+ff+'.pdf' blob = bucket.blob(('PaidMonitoredFields/'+uid+'/'+fieldid+'/'+imagedate+'/report' + single_lang + '.pdf')) blob.upload_from_filename(doc_name) lang_num = lang_num+1 def should_delete_field(uid,fieldid, fieldobj): try: expired = fieldobj["Expired"] except: expired = 0 try: paymentType = fieldobj["PaymentType"] except: paymentType = 0 if int(expired) == 1 or paymentType == 0: if paymentType == 0: paymentMillis = 24*60*60*1000 else: try: paymentMillis = (int(paymentType) + 6)*30*24*60*60*1000 except: paymentMillis = 6*30*24*60*60*100 try: if (int(round(time.time()*1000)) - int(fieldid)) > paymentMillis: print(('deleting...' + str(uid) + ', ' + str(fieldid))) try: db.reference('DeletedFields').child('PMF').child(uid).child(fieldid).set(fieldobj) except: print('fieldobj not found') db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).delete() return 1 except: db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).delete() return 1 return 0 for (k,v) in uid_list.items(): current_uid = k uid = current_uid print(uid) if uid!= 'CeMGYvLXrGR5ZThxZ46iV7vY8sa2' and uid != 'TCXcp5VIsfhHZrh0nm2VsgBtcGy2': #os.system("rm -rf AwsData") lang = 'en' fields = v for (p,q) in fields.items(): toremove = "rm -rf " + uid os.system(toremove) fieldid = p fie = fields[fieldid] #print(fie) is_deleted = 0 is_deleted = should_delete_field(uid,fieldid,fie) try: if is_deleted == 1: print('field deleted') else: try: previous_data_requests = fie["PreviousDataRequests"] print(previous_data_requests) for (timestamp,obj) in previous_data_requests.items(): requested_date = timestamp #print(timestamp) previous_from_date, previous_to_date = get_to_and_from_date(requested_date,0) print(['dates: ',previous_from_date, previous_to_date]) satellite_data(uid, lang,fieldid,fields,previous_from_date,previous_to_date, sentinelSettings) db.reference('PaidMonitoredFields').child('PMF').child(uid).child(fieldid).child('PreviousDataRequests').child(requested_date).delete() except Exception as e: print(e) satellite_data(uid, lang, fieldid, fields, None, None, sentinelSettings) except Exception as e: print(e) #r = requests.post('https://us-central1-farmbase-b2f7e.cloudfunctions.net/stopVM') #r.json()