from tempfile import tempdir import firebase_admin from firebase_admin import credentials from firebase_admin import db from find_s1_image import find_img_value import pandas as pd import statsmodels.api as sm import seaborn as sns sns.set() import numpy as np from sklearn.cluster import KMeans from find_study_area_values import find_study_area_values from make_area_estimate_image import make_area_estimate_image from sentinelhub import WebFeatureService, BBox, CRS, DataSource, MimeType, CRS, BBox, WmsRequest,DataCollection cred = credentials.Certificate('servicekey.json') x = 0.0002 y = 0.0002 fromdate = '20220701' todate = '20220715' s1_images = ['IW-VH-DB'] s2_images = [] # s1_images = ["B02", "B03", "B04", "B05"] #s2_images = ["RVI-NEW"] try: firebase_admin.initialize_app(cred, {'databaseURL': 'https://farmbase-b2f7e-31c0c.firebaseio.com/'}) except: print('fire running') study_uid = "vPM6EKWHfHRxmRfc0YJ7PZ8J9tR2" gt_uid = ['u5ZgAskSltX7dQdNFUlHsgVhAlf2', 'b2j92LjidjeGt2b60QHhSj4GeS63'] different_gt_uid = 'hxA3VYEFpaOluurIbHxdzd4gPlw1' study_field_id = '1662365839021' gt_fields = {} for temp_id in gt_uid: temp_gt_fields = db.reference('PaidMonitoredFields').child('PMF').child(temp_id).get() for(p,q) in temp_gt_fields.items(): gt_fields[p] = q #gt_fields = db.reference('PaidMonitoredFields').child('PMF').child(gt_uid).get() different_gt_fields = db.reference('PaidMonitoredFields').child('PMF').child(different_gt_uid).get() study_field = db.reference('PaidMonitoredFields').child('PMF').child(study_uid).child(study_field_id).get() sentinelSettings = db.reference('SentinelSettings').get() clientID = sentinelSettings["ClientID"] clientSecret = sentinelSettings["ClientSecret"] wmsID = sentinelSettings["WMSID"] rviID = sentinelSettings["RVIID"] demID = sentinelSettings["DEMID"] field_num = 0 from_date = '2022-06-25T00:00:00' to_date = '2022-09-06T23:59:59' max_num = 6 main_from_date = '20220105' main_to_date = '20220502' def append_img_vals(temp_arr, img_values): if temp_arr is not None: for k in temp_arr: img_values.append(k) return img_values #[min_long,min_lat,max_long,max_lat] #db_values, linear_values, ndvi_values, fcc_values, lswi_values all_fields_img_values = [] gt_fields_img_values = [] i = 0 for (fieldid, fieldobj) in gt_fields.items(): if i < 120: img_values = [] midlat = (float(fieldobj["FieldMinLat"]) + float(fieldobj["FieldMaxLat"]))/2 midlng = (float(fieldobj["FieldMinLong"]) + float(fieldobj["FieldMaxLong"]))/2 bounds = [(midlng-y),(midlat-x),(midlng+y),(midlat+x)] print(bounds) image_num = 0 for image in s2_images: temp_arr = find_img_value(fieldid,DataCollection.SENTINEL2_L1C,image, bounds, from_date, to_date, clientID, clientSecret, wmsID, max_num) img_values = append_img_vals(temp_arr, img_values) for image in s1_images: temp_arr = find_img_value(fieldid,DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num) img_values = append_img_vals(temp_arr, img_values) if len(img_values) > 0: all_fields_img_values.append(img_values) gt_fields_img_values.append(img_values) i = i+1 else: break i = 0 for (fieldid, fieldobj) in different_gt_fields.items(): if i < 40: img_values = [] midlat = (float(fieldobj["FieldMinLat"]) + float(fieldobj["FieldMaxLat"]))/2 midlng = (float(fieldobj["FieldMinLong"]) + float(fieldobj["FieldMaxLong"]))/2 bounds = [(midlng-y),(midlat-x),(midlng+y),(midlat+x)] print(bounds) image_num = 0 for image in s2_images: temp_arr = find_img_value(DataCollection.SENTINEL2_L1C,image, bounds, from_date, to_date, clientID, clientSecret, wmsID, max_num) img_values = append_img_vals(temp_arr, img_values) for image in s1_images: temp_arr = find_img_value(DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num) img_values = append_img_vals(temp_arr, img_values) if len(img_values) > 0: all_fields_img_values.append(img_values) i = i+1 else: break print(all_fields_img_values) print(all_fields_img_values) print(gt_fields_img_values) np.savetxt("cotton_all_field_img_values.csv", all_fields_img_values, delimiter = ",") np.savetxt("cotton_gt_field_img_values.csv", gt_fields_img_values, delimiter = ",") k_groups = 20 kmeans = KMeans(k_groups) #kmeans = KMeans(n_clusters =2, random_state = 0) kmeans.fit(all_fields_img_values) cluster_array = kmeans.predict(gt_fields_img_values) unique, count_array = np.unique(cluster_array, return_counts = True) result = np.column_stack((unique, count_array)) print(result) # avg_value = np.median(cluster_array) # print('avvg') # print(avg_value) # pred_val = 1 pred_vals = [] #sum_count_array = np.sum(count_array) #j = 0 #for i in count_array: # if i/sum_count_array >= 0.20: # pred_vals.append(unique[j]) # j = j+1 #print(pred_vals) new_count_array = sorted(result, key = lambda x:x[1], reverse=True) print(new_count_array) pred_vals = [] sum_count_array = np.sum(count_array) j = 0 for i in new_count_array: if j/sum_count_array < 0.6: pred_vals.append(i[0]) j = j + i[1] else: break # if avg_value <= 0.5: # pred_val = 0 # elif avg_value > 0.5 and avg_value <= 1: # pred_val = 1 # else: # pred_val = 2 study_pixel_values_arr = [] #get values for study area midlat = (float(study_field["FieldMinLat"]) + float(study_field["FieldMaxLat"]))/2 midlng = (float(study_field["FieldMinLong"]) + float(study_field["FieldMaxLong"]))/2 bounds = [float(study_field["FieldMinLong"]),float(study_field["FieldMinLat"]),float(study_field["FieldMaxLong"]),float(study_field["FieldMaxLat"])] print(bounds) image_num = 0 for image in s2_images: temp_arr,w,h = find_study_area_values(DataCollection.SENTINEL2_L1C,image, bounds, from_date, to_date, clientID, clientSecret, wmsID, max_num) for image in s1_images: temp_arr,w,h = find_study_area_values(DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num) Identified_clusters = kmeans.fit_predict(temp_arr) # np.savetxt('identified_arr.csv',Identified_clusters, delimiter=",") make_area_estimate_image(Identified_clusters, w, h, study_uid, study_field_id, pred_vals)