from tempfile import tempdir import firebase_admin from firebase_admin import credentials from firebase_admin import db from find_s1_image import find_img_value import pandas as pd import statsmodels.api as sm import seaborn as sns sns.set() import json import numpy as np from sklearn.cluster import KMeans from find_study_area_values import find_study_area_values from make_area_estimate_image import make_area_estimate_image from sentinelhub import WebFeatureService, BBox, CRS, DataSource, MimeType, CRS, BBox, WmsRequest,DataCollection cred = credentials.Certificate('servicekey.json') x = 0.0003 y = 0.0003 fromdate = '20220701' todate = '20220715' s1_images = ['IW-VH-DB'] s2_images = [] # s1_images = ["B02", "B03", "B04", "B05"] #s2_images = ["RVI-NEW"] try: firebase_admin.initialize_app(cred, {'databaseURL': 'https://farmbase-b2f7e-31c0c.firebaseio.com/'}) except: print('fire running') study_uid = "MBFshQgimkS6AwSDtqAyOpRXZW12" gt_uid = 'hxA3VYEFpaOluurIbHxdzd4gPlw1' different_gt_uid = 'joCrKyhfobbzgCxCQPHJfOOKKPF3' study_field_id = '1599307314955' gt_fields_dir = 'Downloads/PaddyGTpointsMP.geojson' #try: with open(gt_fields_dir) as f: gt_fields = json.load(f) gt_fields = gt_fields["features"] #print(gt_fields) # except Exception as e: # print(e) # gt_fields = {} different_gt_fields = db.reference('PaidMonitoredFields').child('PMF').child(different_gt_uid).get() study_field = db.reference('PaidMonitoredFields').child('PMF').child(study_uid).child(study_field_id).get() sentinelSettings = db.reference('SentinelSettings').get() clientID = sentinelSettings["ClientID"] clientSecret = sentinelSettings["ClientSecret"] wmsID = sentinelSettings["WMSID"] rviID = sentinelSettings["RVIID"] demID = sentinelSettings["DEMID"] field_num = 0 from_date = '2022-05-25T00:00:00' to_date = '2022-08-26T23:59:59' max_num = 7 main_from_date = '20220105' main_to_date = '20220502' def append_img_vals(temp_arr, img_values): if temp_arr is not None: for k in temp_arr: img_values.append(k) return img_values # def make_count_json(count_array): # new_json = {} # i = 0 # for k in count_array: # new_json[str(i)] = k # i = i+1 # return new #[min_long,min_lat,max_long,max_lat] #db_values, linear_values, ndvi_values, fcc_values, lswi_values all_fields_img_values = [] gt_fields_img_values = [] i = 0 for fieldobj in gt_fields: if i < 500: gt_field_points = fieldobj["geometry"]["coordinates"] print(gt_field_points) img_values = [] midlat = gt_field_points[1] midlng = gt_field_points[0] bounds = [(midlng-y),(midlat-x),(midlng+y),(midlat+x)] #print(bounds) image_num = 0 for image in s2_images: temp_arr = find_img_value(DataCollection.SENTINEL2_L1C,image, bounds, from_date, to_date, clientID, clientSecret, wmsID, max_num) img_values = append_img_vals(temp_arr, img_values) for image in s1_images: temp_arr = find_img_value(DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num) img_values = append_img_vals(temp_arr, img_values) if len(img_values) > 0: all_fields_img_values.append(img_values) gt_fields_img_values.append(img_values) i = i+1 else: break i = 0 for (fieldid, fieldobj) in different_gt_fields.items(): if i < 100: img_values = [] midlat = (float(fieldobj["FieldMinLat"]) + float(fieldobj["FieldMaxLat"]))/2 midlng = (float(fieldobj["FieldMinLong"]) + float(fieldobj["FieldMaxLong"]))/2 bounds = [(midlng-y),(midlat-x),(midlng+y),(midlat+x)] print(bounds) image_num = 0 for image in s2_images: temp_arr = find_img_value(DataCollection.SENTINEL2_L1C,image, bounds, from_date, to_date, clientID, clientSecret, wmsID, max_num) img_values = append_img_vals(temp_arr, img_values) for image in s1_images: temp_arr = find_img_value(DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num) img_values = append_img_vals(temp_arr, img_values) if len(img_values) > 0: all_fields_img_values.append(img_values) i = i+1 else: break print(all_fields_img_values) print(gt_fields_img_values) np.savetxt("paddy_all_field_img_values.csv", all_fields_img_values, delimiter = ",") np.savetxt("paddy_gt_field_img_values.csv", gt_fields_img_values, delimiter = ",") k_groups = 8 kmeans = KMeans(k_groups) #kmeans = KMeans(n_clusters =2, random_state = 0) kmeans.fit(all_fields_img_values) cluster_array = kmeans.predict(gt_fields_img_values) unique, count_array = np.unique(cluster_array, return_counts = True) result = np.column_stack((unique, count_array)) print(result) # avg_value = np.median(cluster_array) # print('avvg') # print(avg_value) # pred_val = 1 new_count_array = sorted(result, key = lambda x:x[1], reverse=True) print(new_count_array) pred_vals = [] sum_count_array = np.sum(count_array) j = 0 for i in new_count_array: if j/sum_count_array < 0.7: pred_vals.append(i[0]) j = j + i[1] else: break # for i in count_array: # if i/sum_count_array >= 0.20: # pred_vals.append(unique[j]) # j = j+1 print(pred_vals) # if avg_value <= 0.5: # pred_val = 0 # elif avg_value > 0.5 and avg_value <= 1: # pred_val = 1 # else: # pred_val = 2 study_pixel_values_arr = [] #get values for study area midlat = (float(study_field["FieldMinLat"]) + float(study_field["FieldMaxLat"]))/2 midlng = (float(study_field["FieldMinLong"]) + float(study_field["FieldMaxLong"]))/2 bounds = [float(study_field["FieldMinLong"]),float(study_field["FieldMinLat"]),float(study_field["FieldMaxLong"]),float(study_field["FieldMaxLat"])] print(bounds) image_num = 0 for image in s2_images: temp_arr,w,h = find_study_area_values(DataCollection.SENTINEL2_L1C,image, bounds, from_date, to_date, clientID, clientSecret, wmsID, max_num) for image in s1_images: temp_arr,w,h = find_study_area_values(DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num) Identified_clusters = kmeans.fit_predict(temp_arr) # np.savetxt('identified_arr.csv',Identified_clusters, delimiter=",") make_area_estimate_image(Identified_clusters, w, h, study_uid, study_field_id, pred_vals)