from tempfile import tempdir import firebase_admin from firebase_admin import credentials from firebase_admin import db from find_s1_image import find_img_value import pandas as pd import statsmodels.api as sm import seaborn as sns sns.set() import json import pandas as pdf from sklearn.metrics import classification_report, confusion_matrix, accuracy_score import statsmodels.api as sm import seaborn as sns sns.set() from sklearn.ensemble import RandomForestRegressor from sklearn.model_selection import train_test_split import numpy as np from sklearn.cluster import KMeans from find_study_area_values import find_study_area_values from make_area_estimate_image import make_area_estimate_image from sentinelhub import WebFeatureService, BBox, CRS, DataSource, MimeType, CRS, BBox, WmsRequest,DataCollection cred = credentials.Certificate('servicekey.json') x = 0.0003 y = 0.0003 fromdate = '20221025' todate = '20221205' s1_images = ['IW-VH-DB'] s2_images = [] # s1_images = ["B02", "B03", "B04", "B05"] #s2_images = ["RVI-NEW"] try: firebase_admin.initialize_app(cred, {'databaseURL': 'https://farmbase-b2f7e-31c0c.firebaseio.com/'}) except: print('fire running') study_uid = "MBFshQgimkS6AwSDtqAyOpRXZW12" study_field_uid = '6HjLvQdSnUQAWPWL5ck4rVnpdfT2' gt_uids = ['rF4qo7DGoxd3AeEC1Ux4vBGbsbl1', 'fSRlyKzCxxMJHJqDzpGhQImyyxf2'] different_gt_uid = 'joCrKyhfobbzgCxCQPHJfOOKKPF3' gt_fields = {} for gt_uid in gt_uids: temp_fields = db.reference('PaidMonitoredFields').child('PMF').child(gt_uid).get() for (fieldid, fieldobj) in temp_fields.items(): gt_fields[fieldid] = fieldobj different_gt_fields = db.reference('PaidMonitoredFields').child('PMF').child(different_gt_uid).get() study_fields = db.reference('PaidMonitoredFields').child('PMF').child(study_uid).get() for (key, val) in study_fields.items(): study_field = val study_field_id = key sentinelSettings = db.reference('SentinelSettings').get() clientID = sentinelSettings["ClientID"] clientSecret = sentinelSettings["ClientSecret"] wmsID = sentinelSettings["WMSID"] rviID = sentinelSettings["RVIID"] demID = sentinelSettings["DEMID"] field_num = 0 from_date = '2022-10-20T00:00:00' to_date = '2022-12-05T23:59:59' max_num = 4 def append_img_vals(temp_arr, img_values): if temp_arr is not None: for k in temp_arr: img_values.append(k) return img_values all_fields_img_values = [] gt_fields_img_values = [] i = 0 X = [] for (fieldid, fieldobj) in gt_fields.items(): if i < 500: #gt_field_points = fieldobj["geometry"]["coordinates"] #print(gt_field_points) img_values = [] # midlat = gt_field_points[1] # midlng = gt_field_points[0] midlat = (float(fieldobj["FieldMinLat"]) + float(fieldobj["FieldMaxLat"]))/2 midlng = (float(fieldobj["FieldMinLong"]) + float(fieldobj["FieldMaxLong"]))/2 bounds = [(midlng-y),(midlat-x),(midlng+y),(midlat+x)] #print(bounds) image_num = 0 for image in s2_images: temp_arr = find_img_value(fieldid,DataCollection.SENTINEL2_L1C,image, bounds, from_date, to_date, clientID, clientSecret, wmsID, max_num) img_values = append_img_vals(temp_arr, img_values) for image in s1_images: temp_arr = find_img_value(fieldid,DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num) img_values = append_img_vals(temp_arr, img_values) if len(img_values) > 0: all_fields_img_values.append(img_values) gt_fields_img_values.append(img_values) X.append(1) i = i+1 else: break i = 0 for (fieldid, fieldobj) in different_gt_fields.items(): if i < 100: img_values = [] midlat = (float(fieldobj["FieldMinLat"]) + float(fieldobj["FieldMaxLat"]))/2 midlng = (float(fieldobj["FieldMinLong"]) + float(fieldobj["FieldMaxLong"]))/2 bounds = [(midlng-y),(midlat-x),(midlng+y),(midlat+x)] print(bounds) image_num = 0 for image in s2_images: temp_arr = find_img_value(fieldid,DataCollection.SENTINEL2_L1C,image, bounds, from_date, to_date, clientID, clientSecret, wmsID, max_num) img_values = append_img_vals(temp_arr, img_values) for image in s1_images: temp_arr = find_img_value(fieldid,DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num) img_values = append_img_vals(temp_arr, img_values) if len(img_values) > 0: all_fields_img_values.append(img_values) X.append(0) i = i+1 else: break print(all_fields_img_values) print(gt_fields_img_values) np.savetxt("wheat_all_field_img_values.csv", all_fields_img_values, delimiter = ",") np.savetxt("wheat_gt_field_img_values.csv", gt_fields_img_values, delimiter = ",") midlat = (float(study_field["FieldMinLat"]) + float(study_field["FieldMaxLat"]))/2 midlng = (float(study_field["FieldMinLong"]) + float(study_field["FieldMaxLong"]))/2 bounds = [float(study_field["FieldMinLong"]),float(study_field["FieldMinLat"]),float(study_field["FieldMaxLong"]),float(study_field["FieldMaxLat"])] print(bounds) # image_num = 0 # for image in s2_images: # temp_arr,w,h = find_study_area_values(DataCollection.SENTINEL2_L1C,image, bounds, from_date, to_date, clientID, clientSecret, wmsID, max_num) # for image in s1_images: # temp_arr,w,h = find_study_area_values(DataCollection.SENTINEL1_IW, image, bounds, from_date, to_date, clientID, clientSecret, rviID, max_num) X_train, X_test, y_train, y_test = train_test_split(X, all_fields_img_values, test_size=0.25, random_state=0) ii = 0 regressor = RandomForestRegressor(n_estimators=200,random_state=0) regressor.fit(X_train, y_train) y_pred = regressor.predict(X) #make_area_estimate_image(Identified_clusters, w, h, study_uid, study_field_id, pred_vals)