from turtle import heading def find_study_area_values(field_name,satellite, imageType,bounds, from_date, to_date,CLIENT_ID,CLIENT_SECRET,INSTANCE_ID, max_num): from sentinelhub import WebFeatureService, BBox, CRS, DataSource, MimeType, CRS, BBox, WmsRequest,DataCollection from sentinelhub import SHConfig from firebase_admin import credentials from firebase_admin import db from statistics import mean from PIL import Image, ImageFilter import os.path import PIL import numpy as np import time from sentinelhub import BBoxSplitter from pathlib import Path from osgeo import gdal import os from osgeo import gdal from osgeo import osr import numpy as np import os,sys import cv2 import csv import json from make_geotiff import make_geotiff #from scipy import ndimag config = SHConfig() print(imageType) img_values_arr = [] cred = credentials.Certificate('servicekey2.json') access_key = 'AKIAIPCM5ZR7FRHMY3MA' secret_key = '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' if CLIENT_ID and CLIENT_SECRET: config.sh_client_id = CLIENT_ID config.sh_client_secret = CLIENT_SECRET config.instance_id = INSTANCE_ID lat_len = (bounds[3]-bounds[1]) lng_len = bounds[2] - bounds[0] #if width > height img_width = 500 search_bbox = BBox(bbox=bounds, crs=CRS.WGS84) search_time_interval = (from_date, to_date) # Here we are splitting you bbox that was transformed to WGS84 into 5x5 areas bbox_splitter = BBoxSplitter([search_bbox.geometry], CRS.WGS84, (5,5)) # Get a list of bboxes from the splitter bbox_list = bbox_splitter.get_bbox_list() date_list = [] iter_num = 0 for small_bbox in bbox_list: #print(small_bbox) iterator_wms = WebFeatureService(small_bbox,search_time_interval,data_collection=satellite,maxcc=1.0,config=config) images_arr = [] prev_date = '90101010' for tile_info in iterator_wms: s1_request = WmsRequest(data_collection=satellite,layer=imageType,bbox=small_bbox,time=tile_info["properties"]["date"],width=img_width,image_format=MimeType.TIFF,config=config) s1_data = s1_request.get_data() new_date = tile_info["properties"]["date"] prev_day = prev_date.replace("-","") new_day = new_date.replace("-","") file_date = new_date if int(prev_day) - int(new_day) <4: file_date = prev_date else: prev_date = new_date date_list.append(file_date) file_name= file_date + '/' +str(tile_info["properties"]["date"]) + str(iter_num)+'.tiff' print(file_name) isdir = os.path.isdir(file_date) if isdir != True: os.mkdir(file_date) aqa = 1 im = PIL.Image.fromarray(s1_data[-1]) im.save(file_name) make_geotiff(file_name, small_bbox) iter_num = iter_num + 1 # Save the data to disk #wms_bands_img = wms_request.save_data() # List the downloaded Tiffs in the different subfolders with pathlib (native library) for single_date in date_list: file_list = [f"{x}" for x in Path(single_date).iterdir()] print(file_list) # Create a virtual raster folder_name = 'tif_' + single_date isdir = os.path.isdir(folder_name) if isdir != True: os.mkdir(folder_name) gdal.BuildVRT((folder_name+'/merged.vrt'), file_list) # time.sleep(10) # folder_name = 'tif_2022-09-10' # Convert to JPEG gdal.Translate((folder_name+'/out.jpeg'),(folder_name + '/merged.vrt')) return avg_im = 0 i = 0 arr_init = 0 iterator_wms = WebFeatureService(search_bbox,search_time_interval,data_collection=satellite,maxcc=1.0,config=config) images_arr = [] prev_date = '90101010' for tile_info in iterator_wms: try: width = 2500 s1_request = WmsRequest(data_collection=satellite,layer=imageType,bbox=search_bbox,time=tile_info["properties"]["date"],width = width, image_format=MimeType.TIFF,config=config) s1_data = s1_request.get_data() except: height = 2500 s1_request = WmsRequest(data_collection=satellite,layer=imageType,bbox=search_bbox,time=tile_info["properties"]["date"],height = height,image_format=MimeType.TIFF,config=config) s1_data = s1_request.get_data() file_name= str(tile_info["properties"]["date"])+'.tiff' #print(file_name) new_date = tile_info["properties"]["date"] aqa = 1 prev_day = prev_date.replace("-","") new_day = new_date.replace("-","") #if aqa == 1: if int(prev_day)-int(new_day) > 4: #if str(prev_date) != str(new_date): #print(s1_data) #print(new_day) #prev_date = new_date im = PIL.Image.fromarray(s1_data[-1]) isdir = os.path.isdir(field_name) if isdir != True: os.mkdir(field_name) im.save((field_name + '/' +file_name)) # im = s1_data[-1] im = PIL.Image.open((field_name + '/' +file_name)) im = np.array(im) w,h = len(im[0]), len(im) # n_file = cv2.imread((field_name + '/' +file_name)) # image_size = (w,h) # newfieldminlat = bounds[1] # newfieldmaxlat = bounds[3] # newfieldminlong = bounds[0] # newfieldmaxlong =bounds[2] # b_pixels,g_pixels,r_pixels = cv2.split(n_file) # nx = image_size[1] # ny = image_size[0] # xmin,ymin,xmax,ymax = [newfieldminlong, newfieldminlat, newfieldmaxlong, newfieldmaxlat] # xres = (float(xmax)-float(xmin))/float(ny) # yres = (float(ymax)-float(ymin))/float(nx) # xmin = float(xmin) # ymin = float(ymin) # xmax = float(xmax) # ymax = float(ymax) # xres = float(xres) # yres = float(yres) # geotransform = (xmin,xres,0,ymax,0,-yres) # dst_ds = gdal.GetDriverByName('GTiff').Create((field_name + '/' +file_name),ny,nx,3,gdal.GDT_Byte) # dst_ds.SetGeoTransform(geotransform) # srs = osr.SpatialReference() # srs.ImportFromEPSG(4326) # dst_ds.SetProjection(srs.ExportToWkt()) # dst_ds.GetRasterBand(1).WriteArray(r_pixels) # dst_ds.GetRasterBand(2).WriteArray(g_pixels) # dst_ds.GetRasterBand(3).WriteArray(b_pixels) # dst_ds.FlushCache() # dst_ds=None z_num = 0 for xx in range(0,w): for yy in range(0,h): if im[yy,xx,0] == 0: z_num = z_num + 1 #print('z_num') #print(z_num) #print(0.25*w*h) if z_num < int(w*h): #print(im) images_arr.append(im[:,:,0]) if i < max_num: prev_date = tile_info["properties"]["date"] print(tile_info["properties"]["date"]) i = i +1 print(i) else: break #print(avg_im) #print(pixel_arr) #print(images_arr) #print(len(images_arr), len(images_arr[0]), len(images_arr[0][0])) new_arr = [] try: for i in range(0, len(images_arr[0])): for j in range(0, len(images_arr[0][0])): temp_arr = [] for k in range(0, max_num): #print(k,i,j) temp_arr.append(round(100*images_arr[k][i][j])/100) new_arr.append(temp_arr) except: aaa = 1 #print(new_arr) #np.savetxt('new_arr.csv',new_arr, delimiter=",") time.sleep(0.25) return new_arr,w,h