from tempfile import tempdir import firebase_admin from firebase_admin import credentials from firebase_admin import db from find_s1_image2 import find_img_value import pandas as pd import statsmodels.api as sm import seaborn as sns sns.set() import time import json import numpy as np from sklearn.cluster import KMeans from find_study_area_values import find_study_area_values from make_area_estimate_image import make_area_estimate_image import json from sentinelhub import WebFeatureService, BBox, CRS, DataSource, MimeType, CRS, BBox, WmsRequest,DataCollection cred = credentials.Certificate('servicekey.json') x = 0.0003 y = 0.0003 fromdate = '20220701' todate = '20220715' s1_images = ['IW-VH-DB'] s2_images = [] # s1_images = ["B02", "B03", "B04", "B05"] #s2_images = ["RVI-NEW"] try: firebase_admin.initialize_app(cred, {'databaseURL': 'https://farmbase-b2f7e-31c0c.firebaseio.com/'}) except: print('fire running') study_uid = "vPM6EKWHfHRxmRfc0YJ7PZ8J9tR2" gt_uid = ['u5ZgAskSltX7dQdNFUlHsgVhAlf2', 'b2j92LjidjeGt2b60QHhSj4GeS63', 'hSoBF0u61YfFeBFzcjhUFU3dh0e2','0AC0f8JWjUdg9fCm7f8QbXZTOp23'] different_gt_uid = 'hxA3VYEFpaOluurIbHxdzd4gPlw1' study_field_id = '1662365839021' gt_fields = {} for temp_id in gt_uid: temp_gt_fields = db.reference('PaidMonitoredFields').child('PMF').child(temp_id).get() for(p,q) in temp_gt_fields.items(): gt_fields[p] = q #gt_fields = db.reference('PaidMonitoredFields').child('PMF').child(gt_uid).get() different_gt_fields = db.reference('PaidMonitoredFields').child('PMF').child(different_gt_uid).get() study_fields = db.reference('PaidMonitoredFields').child('PMF').child(study_uid).get() for (p,q) in study_fields.items(): study_field = q x = 0.0003 y = 0.0003 sentinelSettings = db.reference('SentinelSettings').get() clientID = sentinelSettings["ClientID"] clientSecret = sentinelSettings["ClientSecret"] wmsID = sentinelSettings["WMSID"] rviID = sentinelSettings["RVIID"] demID = sentinelSettings["DEMID"] field_num = 0 from_date = '2022-06-25T00:00:00' to_date = '2022-09-17T23:59:59' max_num = 7 def append_img_vals(temp_arr, img_values): if temp_arr is not None: for k in temp_arr: img_values.append(k) return img_values all_fields_img_values = [] gt_fields_img_values = [] i = 0 i = 0 for (fieldid, fieldobj) in gt_fields.items(): if i < 150: img_values = [] midlat = (float(fieldobj["FieldMinLat"]) + float(fieldobj["FieldMaxLat"]))/2 midlng = (float(fieldobj["FieldMinLong"]) + float(fieldobj["FieldMaxLong"]))/2 bounds = [(midlng-y),(midlat-x),(midlng+y),(midlat+x)] print(bounds) image_num = 0 for image in s2_images: temp_arr = find_img_value(fieldid,DataCollection.SENTINEL2_L1C,image, bounds, from_date, to_date, clientID, clientSecret, wmsID, max_num) img_values = append_img_vals(temp_arr, img_values) for image in s1_images: temp_arr = find_img_value(fieldid,DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num) img_values = append_img_vals(temp_arr, img_values) if len(img_values) > 0: all_fields_img_values.append(img_values) gt_fields_img_values.append(img_values) i = i+1 else: break i = 0 for (fieldid, fieldobj) in different_gt_fields.items(): if i < 40: img_values = [] midlat = (float(fieldobj["FieldMinLat"]) + float(fieldobj["FieldMaxLat"]))/2 midlng = (float(fieldobj["FieldMinLong"]) + float(fieldobj["FieldMaxLong"]))/2 bounds = [(midlng-y),(midlat-x),(midlng+y),(midlat+x)] print(bounds) image_num = 0 for image in s2_images: temp_arr = find_img_value(DataCollection.SENTINEL2_L1C,image, bounds, from_date, to_date, clientID, clientSecret, wmsID, max_num) img_values = append_img_vals(temp_arr, img_values) for image in s1_images: temp_arr = find_img_value(DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num) img_values = append_img_vals(temp_arr, img_values) if len(img_values) > 0: all_fields_img_values.append(img_values) i = i+1 else: break print(all_fields_img_values) print(gt_fields_img_values) np.savetxt("paddy_all_field_img_values.csv", all_fields_img_values, delimiter = ",") np.savetxt("paddy_gt_field_img_values.csv", gt_fields_img_values, delimiter = ",") np.savetxt("all_fields_img_values.csv", all_fields_img_values, delimiter = ",") # Serializing json json_object = json.dumps(crop_wise_img_values, indent=4) # Writing to sample.json with open("crop_wise_img_values.json", "w") as outfile: outfile.write(json_object) # np.savetxt("crop_wise_img_values.csv", crop_wise_img_values, delimiter = ",") k_groups = 14 kmeans = KMeans(k_groups) #kmeans = KMeans(n_clusters =2, random_state = 0) kmeans.fit(all_fields_img_values) for (crop, crop_values) in crop_wise_img_values.items(): cluster_array = kmeans.predict(crop_values) unique, count_array = np.unique(cluster_array, return_counts = True) result = np.column_stack((unique, count_array)) print(result) new_count_array = sorted(result, key = lambda x:x[1], reverse=True) print(new_count_array) pred_vals = [] sum_count_array = np.sum(count_array) j = 0 for i in new_count_array: if j/sum_count_array < 0.7: pred_vals.append(i[0]) j = j + i[1] else: break print(pred_vals) study_pixel_values_arr = [] midlat = (float(study_field["FieldMinLat"]) + float(study_field["FieldMaxLat"]))/2 midlng = (float(study_field["FieldMinLong"]) + float(study_field["FieldMaxLong"]))/2 bounds = [float(study_field["FieldMinLong"]),float(study_field["FieldMinLat"]),float(study_field["FieldMaxLong"]),float(study_field["FieldMaxLat"])] print(bounds) image_num = 0 for image in s2_images: temp_arr,w,h = find_study_area_values3(DataCollection.SENTINEL2_L1C,image, bounds, from_date, to_date, clientID, clientSecret, wmsID, max_num) for image in s1_images: temp_arr,w,h = find_study_area_values3(DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num) Identified_clusters = kmeans.fit_predict(temp_arr) # np.savetxt('identified_arr.csv',Identified_clusters, delimiter=",") make_area_estimate_image(Identified_clusters, w, h, study_uid, study_field_id, pred_vals)