def estimate_area_server(gt_uids, study_uids, from_date, to_date, max_num, latestDay, field_id): from tempfile import tempdir import firebase_admin from firebase_admin import credentials from firebase_admin import db from find_s1_image2 import find_img_value import pandas as pdf from sklearn.metrics import classification_report, confusion_matrix, accuracy_score import statsmodels.api as sm import seaborn as sns sns.set() import traceback import math from sklearn.ensemble import RandomForestRegressor from sklearn.model_selection import train_test_split import csv import time import json import cv2 from find_modis_ndvi import find_modis_ndvi import numpy as np from sklearn.cluster import KMeans from find_study_area_values import find_study_area_values from find_study_area_values4 import find_study_area_values4 # from make_area_estimate_image import make_area_estimate_image from make_egypt_estimate_image import make_egypt_estimate_image from sentinelhub import WebFeatureService, BBox, CRS, MimeType, CRS, BBox, WmsRequest,DataCollection import traceback from get_mask import get_mask from PIL import Image import numpy as np from google.cloud import storage from find_study_area_values import find_study_area_values from find_study_area_values3 import find_study_area_values3 # from make_area_estimate_image import make_area_estimate_image # from make_egypt_estimate_image import make_egypt_estimate_image from make_area_estimation_image import make_area_estimation_image from PIL import Image from make_dir import make_dir cred = credentials.Certificate('servicekey.json') x = 0.0003 y = 0.0003 fromdate = '20220701' todate = '20220715' s1_images = ['IW-VH-DB'] #s1_images = ['NDVI'] s2_images = [] # s1_images = ["B02", "B03", "B04", "B05"] #s2_images = ["RVI-NEW"] try: firebase_admin.initialize_app(cred, {'databaseURL': 'https://farmbase-b2f7e-31c0c.firebaseio.com/'}) except: print('fire running') #get all non_gt farms storage_client = storage.Client() bucket_name = 'farmbase-b2f7e.appspot.com' sentinelSettings = db.reference('SentinelSettings').get() clientID = sentinelSettings["ClientID"] clientSecret = sentinelSettings["ClientSecret"] wmsID = sentinelSettings["WMSID"] rviID = sentinelSettings["RVIID"] demID = sentinelSettings["DEMID"] modisID = sentinelSettings["MODISID"] field_num = 0 uid_obj = {} uid_obj["d5uMEizVYEZQMb7jgYfs34CrctM2"] = 'roads' #0 uid_obj["foLUOBL0TwfS4oogj0SArS0KKCs2"] = 'plantation' #1 uid_obj["NijZGGvjg4NxsI4Lq7XBYwHzvBi2"] = 'buildings' #2 uid_obj["78cHywt2XKSlkerZ8ovHDZwcdFR2"] = 'emptyspaces' #3 uid_obj["VXpuh3xsZjgkyFCVrqCsY3htfpw2"] = 'forests' #4 uid_obj["dXw569KpfOP5gc4zl7auUpljNkH2"] = 'deserts' #5 uid_obj["Tlop7Ixft6Qm1kMoxvIkmmkktg72"] = 'waterbodies' #6 #uid_obj['fLwCtzdi7GRqrV12bI1ULGGcwDo2'] = 'banana' #7 # uid_obj['AvlbNpsuI7P1sva1NWMBUN4Fmbg1'] = 'coriander' #8 # uid_obj['Jh71NGn1A4VSu3WOYwXhRiia0Cu1'] = 'jeera' #9 #uid_obj["WdSPkBXkkXcEQGU8tSFhqfAGZUf1"] = 'sugarbeet' #7 # # #uid_obj['58EOGX5hGOXlQaKjQ99c3JpZxbm2'] = 'wheat' #7 #uid_obj["9fLMcJOY9pPOMWDiQDZrxxQlRS13"] = 'clover' #8 #uid_obj["qL4tgUcl1ggqLRFFgxTNbTgP3uk1"] = 'barley' #9 gt_img_width = 20 study_img_width = 300 s1_images = ['IW-VH-DB'] #s1_images = ['NDVI'] threshold = 5.25 day_gap = 2 portion_params = [0.25,0.25] max_gt_fields = 40 for gt_uid in gt_uids: uid_obj[gt_uid] = "main" i_num = 0 #roads, plantation, buildings, emptyspaces, forests, deserts, waterbodies = 'd5uMEizVYEZQMb7jgYfs34CrctM2','foLUOBL0TwfS4oogj0SArS0KKCs2','NijZGGvjg4NxsI4Lq7XBYwHzvBi2','78cHywt2XKSlkerZ8ovHDZwcdFR2', 'VXpuh3xsZjgkyFCVrqCsY3htfpw2','dXw569KpfOP5gc4zl7auUpljNkH2','Tlop7Ixft6Qm1kMoxvIkmmkktg72' #non_gt_uids = [roads, plantation, buildings, emptyspaces, forests, deserts, waterbodies] non_gt_signatures = [] all_signtaure_values = [] gt_wise_signature_values = {} gt_type_num = 0 regressor = RandomForestRegressor(n_estimators=200,random_state=0) def append_img_vals(temp_arr, img_values): if temp_arr is not None: for k in temp_arr: img_values.append(k) return img_values def get_field_bounds(field_obj): fieldmaxlat = field_obj["FieldMaxLat"] fieldminlat = field_obj["FieldMinLat"] fieldmaxlong = field_obj["FieldMaxLong"] fieldminlong = field_obj["FieldMinLong"] bounds = [fieldminlong,fieldminlat,fieldmaxlong,fieldmaxlat] return bounds def train_classifier(): X, Y = [], [] #print(gt_wise_signature_values) num = 0 for (num,temp_arrs) in gt_wise_signature_values.items(): for temp_arr in temp_arrs: X.append(temp_arr) Y.append(num) # if num <=6: # Y.append(-1) # else: # Y.append(1) X = np.asarray(X) Y = np.asarray(Y) json_object = json.dumps(gt_wise_signature_values, indent=4) np.savetxt("new_all_fields_img_values.csv", X, delimiter = ",") with open("new_crop_wise_img_values.json", "w") as outfile: outfile.write(json_object) #print(X) #print(Y) x_num = 0 for k in X: k = k[0:max_num] X[x_num] = k X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.33, random_state=0) ii = 0 regressor.fit(X_train, y_train) y_pred = regressor.predict(X) print(y_pred) def getSectionBounds(mainBounds, maxSize): # ARGUMENTS: # bounds are always treated as [minLng, minLat, maxLng, maxLat] # maxSize is in the form [latRange, lngRange] # lat lngs are assumed in decimal format # # RESULT: # list of list of bounds. first list represents rows & second cols # starting from top-left minLng = mainBounds[0] minLat = mainBounds[1] maxLat = mainBounds[3] maxLng = mainBounds[2] mainLatRange = maxLat - minLat mainLngRange = maxLng - minLng rowsCount = max(1, math.ceil(mainLatRange / maxSize[0])) colsCount = max(1, math.ceil(mainLngRange / maxSize[1])) sectionLatRange = mainLatRange / rowsCount sectionLngRange = mainLngRange / colsCount print(rowsCount, colsCount, sectionLatRange, sectionLngRange) sections = [] lastLat = maxLat for i in range(rowsCount): rowList = [] isLastRow = i == rowsCount - 1 lastLng = minLng rowMinLat = lastLat - sectionLatRange if not isLastRow else minLat for j in range(colsCount): isLastCol = j == colsCount - 1 boundMaxLng = lastLng + sectionLngRange if not isLastCol else maxLng bounds = [] bounds.append(lastLng) bounds.append(rowMinLat) bounds.append(boundMaxLng) bounds.append(lastLat) rowList.append(bounds) lastLng = boundMaxLng sections.append(rowList) lastLat = rowMinLat print(['sections: ', sections]) return sections def classify_study_area(uid, field_id): if field_id is None: fields = db.reference('PaidMonitoredFields').child('PMF').child(uid).get() else: fields = {} fields[field_id] = db.reference('PaidMonitoredFields').child('PMF').child(uid).child(field_id).get() field_num = 0 #try: for (field_id, field_obj) in fields.items(): make_dir(uid, field_id) if True:# field_id in ['1675573982690', '1673931611094', '1675574002503']: try: field_num = field_num + 1 bounds = get_field_bounds(field_obj) divided_bounds = getSectionBounds(bounds, portion_params) portion_num = 0 row_num, col_num = 0,0 for section_row in divided_bounds: col_num = 0 for portion_bounds in section_row: gt_type = 'study' img_values = [] print(['rowcol:', row_num, col_num]) for image in s1_images: #print([gt_type,DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num]) #find_study_area_values3(field_name,satellite, imageType,bounds, from_date, to_date,CLIENT_ID,CLIENT_SECRET,INSTANCE_ID, max_num, uid): temp_arr,w,h = find_study_area_values3(field_id,DataCollection.SENTINEL1_IW, image,portion_bounds, from_date, to_date,clientID,clientSecret,rviID, max_num, uid, study_img_width, day_gap) #print(temp_arr) #temp_arr = find_img_value(gt_type,DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num, study_img_width) #img_values = append_img_vals(temp_arr, img_values) #print(temp_arr) try: y_pred = regressor.predict(temp_arr) find_modis_ndvi(uid, field_obj["FieldDescription"], DataCollection.SENTINEL1_IW, "RVI", portion_bounds, from_date, to_date, clientID, clientSecret, rviID, max_num, study_img_width, field_id) make_area_estimation_image(y_pred, uid, field_id, threshold, latestDay, portion_num, row_num, col_num) except Exception as e: print(e) try: portion_img_name = uid + "/" + field_id + "/area_estimate_ndvi_" + field_obj["FieldDescription"] + "_"+ str(row_num)+ str(col_num)+ ".png" image = Image.new("RGB", (w, h)) image.save(portion_img_name) except Exception as e: print(e) #get_mask(uid,field_id,field_obj["Coordinates"],float(field_obj["FieldMaxLat"]),float(field_obj["FieldMinLat"]),float(field_obj["FieldMaxLong"]),float(field_obj["FieldMinLong"]), field_obj["FieldDescription"]) portion_num = portion_num + 1 col_num = col_num + 1 row_num = row_num + 1 stitch_portion_images(field_obj["FieldDescription"], uid, divided_bounds, field_id) mask_and_upload_final_img(uid, field_id, '10101010', field_obj["Coordinates"],field_obj["FieldMaxLat"],field_obj["FieldMinLat"],field_obj["FieldMaxLong"],field_obj["FieldMinLong"], field_obj["FieldDescription"], field_obj["FieldArea"]) #break except: print(traceback.format_exc()) #break def mask_and_upload_final_img(study_uid, study_field_id, latestDay, coordinates,fieldmaxlat,fieldminlat,fieldmaxlong,fieldminlong, field_name, field_area): img_name = study_uid + "/" + study_field_id + "/area_estimate_ndvi_" + field_name + ".png" destination_blob_name_dem = 'PaidMonitoredFields/'+study_uid+'/'+study_field_id+'/'+latestDay+'/hybrid' dem_file_name = img_name new_area_pixels, new_total_pixels = get_mask(study_uid,study_field_id,coordinates,fieldmaxlat,fieldminlat,fieldmaxlong,fieldminlong, field_name) #crop_area = (area_pixels_arr[1]/(area_pixels_arr[0] + area_pixels_arr[1]))*int(field_area) crop_area = (new_area_pixels/new_total_pixels)*int(field_area) crop_area = int(crop_area/10000) print_text = (field_name + ' Area: ' + str(int(field_area/10000)) + ' Ha, Crop Area: ' + str(crop_area) + ' Ha') print(print_text) # try: # db.reference('EgyptResults').child(field_name).set(print_text) # except: # awa = 1 bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_blob_name_dem) blob.upload_from_filename(dem_file_name) def stitch_portion_images(field_name, study_uid, sectionBounds, study_field_id): # the sticking img paths are expected to be named # from left to right starting from topLeft rowsCount = len(sectionBounds) colsCount = len(sectionBounds[0]) all_imgs = [] widths = [] heights = [] result_width = 0 result_height = 0 temp_width, temp_height = 0,0 for i in range(rowsCount): for j in range(colsCount): portion_img_name = ( study_uid +"/" + study_field_id + "/area_estimate_ndvi_" + field_name + "_" + str(i) + str(j) + ".png" ) try: portion_img = Image.open(portion_img_name) except Exception as e: portion_img_name = study_uid + "/" + study_field_id + "/area_estimate_ndvi_" + field_name + "_"+ str(i)+ str(j)+ ".png" image = Image.new("RGB", (temp_width, temp_height)) image.save(portion_img_name) all_imgs.append(portion_img) (width, height) = portion_img.size temp_width = width temp_height = height if len(widths) < colsCount: widths.append(width) result_width += width if len(heights) < rowsCount: heights.append(height) result_height += height result = Image.new("RGB", (result_width, result_height)) for i in range(rowsCount): start_height = i*heights[i] for j in range(colsCount): start_width = j*widths[j] print([i,j]) result.paste( im=all_imgs[j + colsCount * i], box =(start_width, start_height) #box=(0 if j == 0 else widths[j - 1], 0 if i == 0 else heights[i - 1]), ) final_img_name = study_uid + "/" + study_field_id + "/area_estimate_ndvi_" + field_name + ".png" result.save(final_img_name) def make_signature(uid, gt_type_num): field_num = 0 fields = db.reference('PaidMonitoredFields').child('PMF').child(uid).get() try: for (field_id, field_obj) in fields.items(): if field_num < max_gt_fields: field_num = field_num + 1 bounds = get_field_bounds(field_obj) gt_type = uid_obj[uid] img_values = [] for image in s1_images: #print([gt_type,DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num]) temp_arr = find_img_value(gt_type,DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num, gt_img_width, day_gap) print(temp_arr) img_values = append_img_vals(temp_arr, img_values) #print(img_values) if len(img_values) > 0: #i_num = i_num + 1 #print('i_num: ' + str(i_num)) all_signtaure_values.append(img_values) try: gt_wise_signature_values[gt_type_num].append(img_values) except: gt_wise_signature_values[gt_type_num] = [] gt_wise_signature_values[gt_type_num].append(img_values) except: print(traceback.format_exc()) # # #make non_gt_signatures for (temp_uid, gt_type) in uid_obj.items(): make_signature(temp_uid, gt_type_num) #if gt_type != "main": gt_type_num = gt_type_num + 1 train_classifier() for study_uid in study_uids: classify_study_area(study_uid, field_id)